2014-10-24 Christophe Lyon <christophe.lyon@linaro.org>
[official-gcc.git] / gcc / df-scan.c
blob2da10d70ecb8b5150dc479d7b9f41bbc575bb237
1 /* Scanning of rtl for dataflow analysis.
2 Copyright (C) 1999-2014 Free Software Foundation, Inc.
3 Originally contributed by Michael P. Hayes
4 (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
5 Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
6 and Kenneth Zadeck (zadeck@naturalbridge.com).
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
13 version.
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "rtl.h"
29 #include "tm_p.h"
30 #include "insn-config.h"
31 #include "recog.h"
32 #include "hashtab.h"
33 #include "hash-set.h"
34 #include "vec.h"
35 #include "machmode.h"
36 #include "hard-reg-set.h"
37 #include "input.h"
38 #include "function.h"
39 #include "regs.h"
40 #include "alloc-pool.h"
41 #include "flags.h"
42 #include "basic-block.h"
43 #include "sbitmap.h"
44 #include "bitmap.h"
45 #include "dumpfile.h"
46 #include "tree.h"
47 #include "target.h"
48 #include "target-def.h"
49 #include "df.h"
50 #include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */
53 typedef struct df_mw_hardreg *df_mw_hardreg_ptr;
56 #ifndef HAVE_epilogue
57 #define HAVE_epilogue 0
58 #endif
59 #ifndef HAVE_prologue
60 #define HAVE_prologue 0
61 #endif
62 #ifndef HAVE_sibcall_epilogue
63 #define HAVE_sibcall_epilogue 0
64 #endif
66 #ifndef EPILOGUE_USES
67 #define EPILOGUE_USES(REGNO) 0
68 #endif
70 /* The set of hard registers in eliminables[i].from. */
72 static HARD_REG_SET elim_reg_set;
74 /* Initialize ur_in and ur_out as if all hard registers were partially
75 available. */
77 struct df_collection_rec
79 auto_vec<df_ref, 128> def_vec;
80 auto_vec<df_ref, 32> use_vec;
81 auto_vec<df_ref, 32> eq_use_vec;
82 auto_vec<df_mw_hardreg_ptr, 32> mw_vec;
85 static void df_ref_record (enum df_ref_class, struct df_collection_rec *,
86 rtx, rtx *,
87 basic_block, struct df_insn_info *,
88 enum df_ref_type, int ref_flags);
89 static void df_def_record_1 (struct df_collection_rec *, rtx *,
90 basic_block, struct df_insn_info *,
91 int ref_flags);
92 static void df_defs_record (struct df_collection_rec *, rtx,
93 basic_block, struct df_insn_info *,
94 int ref_flags);
95 static void df_uses_record (struct df_collection_rec *,
96 rtx *, enum df_ref_type,
97 basic_block, struct df_insn_info *,
98 int ref_flags);
100 static void df_install_ref_incremental (df_ref);
101 static void df_insn_refs_collect (struct df_collection_rec*,
102 basic_block, struct df_insn_info *);
103 static void df_canonize_collection_rec (struct df_collection_rec *);
105 static void df_get_regular_block_artificial_uses (bitmap);
106 static void df_get_eh_block_artificial_uses (bitmap);
108 static void df_record_entry_block_defs (bitmap);
109 static void df_record_exit_block_uses (bitmap);
110 static void df_get_exit_block_use_set (bitmap);
111 static void df_get_entry_block_def_set (bitmap);
112 static void df_grow_ref_info (struct df_ref_info *, unsigned int);
113 static void df_ref_chain_delete_du_chain (df_ref);
114 static void df_ref_chain_delete (df_ref);
116 static void df_refs_add_to_chains (struct df_collection_rec *,
117 basic_block, rtx_insn *, unsigned int);
119 static bool df_insn_refs_verify (struct df_collection_rec *, basic_block,
120 rtx_insn *, bool);
121 static void df_entry_block_defs_collect (struct df_collection_rec *, bitmap);
122 static void df_exit_block_uses_collect (struct df_collection_rec *, bitmap);
123 static void df_install_ref (df_ref, struct df_reg_info *,
124 struct df_ref_info *, bool);
126 static int df_ref_compare (df_ref, df_ref);
127 static int df_ref_ptr_compare (const void *, const void *);
128 static int df_mw_compare (const df_mw_hardreg *, const df_mw_hardreg *);
129 static int df_mw_ptr_compare (const void *, const void *);
131 static void df_insn_info_delete (unsigned int);
133 /* Indexed by hardware reg number, is true if that register is ever
134 used in the current function.
136 In df-scan.c, this is set up to record the hard regs used
137 explicitly. Reload adds in the hard regs used for holding pseudo
138 regs. Final uses it to generate the code in the function prologue
139 and epilogue to save and restore registers as needed. */
141 static bool regs_ever_live[FIRST_PSEUDO_REGISTER];
143 /* Flags used to tell df_refs_add_to_chains() which vectors it should copy. */
144 static const unsigned int copy_defs = 0x1;
145 static const unsigned int copy_uses = 0x2;
146 static const unsigned int copy_eq_uses = 0x4;
147 static const unsigned int copy_mw = 0x8;
148 static const unsigned int copy_all = copy_defs | copy_uses | copy_eq_uses
149 | copy_mw;
151 /*----------------------------------------------------------------------------
152 SCANNING DATAFLOW PROBLEM
154 There are several ways in which scanning looks just like the other
155 dataflow problems. It shares the all the mechanisms for local info
156 as well as basic block info. Where it differs is when and how often
157 it gets run. It also has no need for the iterative solver.
158 ----------------------------------------------------------------------------*/
160 /* Problem data for the scanning dataflow function. */
161 struct df_scan_problem_data
163 alloc_pool ref_base_pool;
164 alloc_pool ref_artificial_pool;
165 alloc_pool ref_regular_pool;
166 alloc_pool insn_pool;
167 alloc_pool reg_pool;
168 alloc_pool mw_reg_pool;
169 bitmap_obstack reg_bitmaps;
170 bitmap_obstack insn_bitmaps;
173 typedef struct df_scan_bb_info *df_scan_bb_info_t;
176 /* Internal function to shut down the scanning problem. */
177 static void
178 df_scan_free_internal (void)
180 struct df_scan_problem_data *problem_data
181 = (struct df_scan_problem_data *) df_scan->problem_data;
183 free (df->def_info.refs);
184 free (df->def_info.begin);
185 free (df->def_info.count);
186 memset (&df->def_info, 0, (sizeof (struct df_ref_info)));
188 free (df->use_info.refs);
189 free (df->use_info.begin);
190 free (df->use_info.count);
191 memset (&df->use_info, 0, (sizeof (struct df_ref_info)));
193 free (df->def_regs);
194 df->def_regs = NULL;
195 free (df->use_regs);
196 df->use_regs = NULL;
197 free (df->eq_use_regs);
198 df->eq_use_regs = NULL;
199 df->regs_size = 0;
200 DF_REG_SIZE (df) = 0;
202 free (df->insns);
203 df->insns = NULL;
204 DF_INSN_SIZE () = 0;
206 free (df_scan->block_info);
207 df_scan->block_info = NULL;
208 df_scan->block_info_size = 0;
210 bitmap_clear (&df->hardware_regs_used);
211 bitmap_clear (&df->regular_block_artificial_uses);
212 bitmap_clear (&df->eh_block_artificial_uses);
213 BITMAP_FREE (df->entry_block_defs);
214 BITMAP_FREE (df->exit_block_uses);
215 bitmap_clear (&df->insns_to_delete);
216 bitmap_clear (&df->insns_to_rescan);
217 bitmap_clear (&df->insns_to_notes_rescan);
219 free_alloc_pool (problem_data->ref_base_pool);
220 free_alloc_pool (problem_data->ref_artificial_pool);
221 free_alloc_pool (problem_data->ref_regular_pool);
222 free_alloc_pool (problem_data->insn_pool);
223 free_alloc_pool (problem_data->reg_pool);
224 free_alloc_pool (problem_data->mw_reg_pool);
225 bitmap_obstack_release (&problem_data->reg_bitmaps);
226 bitmap_obstack_release (&problem_data->insn_bitmaps);
227 free (df_scan->problem_data);
231 /* Free basic block info. */
233 static void
234 df_scan_free_bb_info (basic_block bb, void *vbb_info)
236 struct df_scan_bb_info *bb_info = (struct df_scan_bb_info *) vbb_info;
237 unsigned int bb_index = bb->index;
238 rtx_insn *insn;
240 FOR_BB_INSNS (bb, insn)
241 if (INSN_P (insn))
242 df_insn_info_delete (INSN_UID (insn));
244 if (bb_index < df_scan->block_info_size)
245 bb_info = df_scan_get_bb_info (bb_index);
247 /* Get rid of any artificial uses or defs. */
248 df_ref_chain_delete_du_chain (bb_info->artificial_defs);
249 df_ref_chain_delete_du_chain (bb_info->artificial_uses);
250 df_ref_chain_delete (bb_info->artificial_defs);
251 df_ref_chain_delete (bb_info->artificial_uses);
252 bb_info->artificial_defs = NULL;
253 bb_info->artificial_uses = NULL;
257 /* Allocate the problem data for the scanning problem. This should be
258 called when the problem is created or when the entire function is to
259 be rescanned. */
260 void
261 df_scan_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
263 struct df_scan_problem_data *problem_data;
264 unsigned int insn_num = get_max_uid () + 1;
265 unsigned int block_size = 512;
266 basic_block bb;
268 /* Given the number of pools, this is really faster than tearing
269 everything apart. */
270 if (df_scan->problem_data)
271 df_scan_free_internal ();
273 problem_data = XNEW (struct df_scan_problem_data);
274 df_scan->problem_data = problem_data;
275 df_scan->computed = true;
277 problem_data->ref_base_pool
278 = create_alloc_pool ("df_scan ref base",
279 sizeof (struct df_base_ref), block_size);
280 problem_data->ref_artificial_pool
281 = create_alloc_pool ("df_scan ref artificial",
282 sizeof (struct df_artificial_ref), block_size);
283 problem_data->ref_regular_pool
284 = create_alloc_pool ("df_scan ref regular",
285 sizeof (struct df_regular_ref), block_size);
286 problem_data->insn_pool
287 = create_alloc_pool ("df_scan insn",
288 sizeof (struct df_insn_info), block_size);
289 problem_data->reg_pool
290 = create_alloc_pool ("df_scan reg",
291 sizeof (struct df_reg_info), block_size);
292 problem_data->mw_reg_pool
293 = create_alloc_pool ("df_scan mw_reg",
294 sizeof (struct df_mw_hardreg), block_size / 16);
296 bitmap_obstack_initialize (&problem_data->reg_bitmaps);
297 bitmap_obstack_initialize (&problem_data->insn_bitmaps);
299 insn_num += insn_num / 4;
300 df_grow_reg_info ();
302 df_grow_insn_info ();
303 df_grow_bb_info (df_scan);
305 FOR_ALL_BB_FN (bb, cfun)
307 unsigned int bb_index = bb->index;
308 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb_index);
309 bb_info->artificial_defs = NULL;
310 bb_info->artificial_uses = NULL;
313 bitmap_initialize (&df->hardware_regs_used, &problem_data->reg_bitmaps);
314 bitmap_initialize (&df->regular_block_artificial_uses, &problem_data->reg_bitmaps);
315 bitmap_initialize (&df->eh_block_artificial_uses, &problem_data->reg_bitmaps);
316 df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
317 df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
318 bitmap_initialize (&df->insns_to_delete, &problem_data->insn_bitmaps);
319 bitmap_initialize (&df->insns_to_rescan, &problem_data->insn_bitmaps);
320 bitmap_initialize (&df->insns_to_notes_rescan, &problem_data->insn_bitmaps);
321 df_scan->optional_p = false;
325 /* Free all of the data associated with the scan problem. */
327 static void
328 df_scan_free (void)
330 if (df_scan->problem_data)
331 df_scan_free_internal ();
333 if (df->blocks_to_analyze)
335 BITMAP_FREE (df->blocks_to_analyze);
336 df->blocks_to_analyze = NULL;
339 free (df_scan);
342 /* Dump the preamble for DF_SCAN dump. */
343 static void
344 df_scan_start_dump (FILE *file ATTRIBUTE_UNUSED)
346 int i;
347 int dcount = 0;
348 int ucount = 0;
349 int ecount = 0;
350 int icount = 0;
351 int ccount = 0;
352 basic_block bb;
353 rtx_insn *insn;
355 fprintf (file, ";; invalidated by call \t");
356 df_print_regset (file, regs_invalidated_by_call_regset);
357 fprintf (file, ";; hardware regs used \t");
358 df_print_regset (file, &df->hardware_regs_used);
359 fprintf (file, ";; regular block artificial uses \t");
360 df_print_regset (file, &df->regular_block_artificial_uses);
361 fprintf (file, ";; eh block artificial uses \t");
362 df_print_regset (file, &df->eh_block_artificial_uses);
363 fprintf (file, ";; entry block defs \t");
364 df_print_regset (file, df->entry_block_defs);
365 fprintf (file, ";; exit block uses \t");
366 df_print_regset (file, df->exit_block_uses);
367 fprintf (file, ";; regs ever live \t");
368 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
369 if (df_regs_ever_live_p (i))
370 fprintf (file, " %d[%s]", i, reg_names[i]);
371 fprintf (file, "\n;; ref usage \t");
373 for (i = 0; i < (int)df->regs_inited; i++)
374 if (DF_REG_DEF_COUNT (i) || DF_REG_USE_COUNT (i) || DF_REG_EQ_USE_COUNT (i))
376 const char * sep = "";
378 fprintf (file, "r%d={", i);
379 if (DF_REG_DEF_COUNT (i))
381 fprintf (file, "%dd", DF_REG_DEF_COUNT (i));
382 sep = ",";
383 dcount += DF_REG_DEF_COUNT (i);
385 if (DF_REG_USE_COUNT (i))
387 fprintf (file, "%s%du", sep, DF_REG_USE_COUNT (i));
388 sep = ",";
389 ucount += DF_REG_USE_COUNT (i);
391 if (DF_REG_EQ_USE_COUNT (i))
393 fprintf (file, "%s%de", sep, DF_REG_EQ_USE_COUNT (i));
394 ecount += DF_REG_EQ_USE_COUNT (i);
396 fprintf (file, "} ");
399 FOR_EACH_BB_FN (bb, cfun)
400 FOR_BB_INSNS (bb, insn)
401 if (INSN_P (insn))
403 if (CALL_P (insn))
404 ccount++;
405 else
406 icount++;
409 fprintf (file, "\n;; total ref usage %d{%dd,%du,%de}"
410 " in %d{%d regular + %d call} insns.\n",
411 dcount + ucount + ecount, dcount, ucount, ecount,
412 icount + ccount, icount, ccount);
415 /* Dump the bb_info for a given basic block. */
416 static void
417 df_scan_start_block (basic_block bb, FILE *file)
419 struct df_scan_bb_info *bb_info
420 = df_scan_get_bb_info (bb->index);
422 if (bb_info)
424 fprintf (file, ";; bb %d artificial_defs: ", bb->index);
425 df_refs_chain_dump (bb_info->artificial_defs, true, file);
426 fprintf (file, "\n;; bb %d artificial_uses: ", bb->index);
427 df_refs_chain_dump (bb_info->artificial_uses, true, file);
428 fprintf (file, "\n");
430 #if 0
432 rtx_insn *insn;
433 FOR_BB_INSNS (bb, insn)
434 if (INSN_P (insn))
435 df_insn_debug (insn, false, file);
437 #endif
440 static struct df_problem problem_SCAN =
442 DF_SCAN, /* Problem id. */
443 DF_NONE, /* Direction. */
444 df_scan_alloc, /* Allocate the problem specific data. */
445 NULL, /* Reset global information. */
446 df_scan_free_bb_info, /* Free basic block info. */
447 NULL, /* Local compute function. */
448 NULL, /* Init the solution specific data. */
449 NULL, /* Iterative solver. */
450 NULL, /* Confluence operator 0. */
451 NULL, /* Confluence operator n. */
452 NULL, /* Transfer function. */
453 NULL, /* Finalize function. */
454 df_scan_free, /* Free all of the problem information. */
455 NULL, /* Remove this problem from the stack of dataflow problems. */
456 df_scan_start_dump, /* Debugging. */
457 df_scan_start_block, /* Debugging start block. */
458 NULL, /* Debugging end block. */
459 NULL, /* Debugging start insn. */
460 NULL, /* Debugging end insn. */
461 NULL, /* Incremental solution verify start. */
462 NULL, /* Incremental solution verify end. */
463 NULL, /* Dependent problem. */
464 sizeof (struct df_scan_bb_info),/* Size of entry of block_info array. */
465 TV_DF_SCAN, /* Timing variable. */
466 false /* Reset blocks on dropping out of blocks_to_analyze. */
470 /* Create a new DATAFLOW instance and add it to an existing instance
471 of DF. The returned structure is what is used to get at the
472 solution. */
474 void
475 df_scan_add_problem (void)
477 df_add_problem (&problem_SCAN);
481 /*----------------------------------------------------------------------------
482 Storage Allocation Utilities
483 ----------------------------------------------------------------------------*/
486 /* First, grow the reg_info information. If the current size is less than
487 the number of pseudos, grow to 25% more than the number of
488 pseudos.
490 Second, assure that all of the slots up to max_reg_num have been
491 filled with reg_info structures. */
493 void
494 df_grow_reg_info (void)
496 unsigned int max_reg = max_reg_num ();
497 unsigned int new_size = max_reg;
498 struct df_scan_problem_data *problem_data
499 = (struct df_scan_problem_data *) df_scan->problem_data;
500 unsigned int i;
502 if (df->regs_size < new_size)
504 new_size += new_size / 4;
505 df->def_regs = XRESIZEVEC (struct df_reg_info *, df->def_regs, new_size);
506 df->use_regs = XRESIZEVEC (struct df_reg_info *, df->use_regs, new_size);
507 df->eq_use_regs = XRESIZEVEC (struct df_reg_info *, df->eq_use_regs,
508 new_size);
509 df->def_info.begin = XRESIZEVEC (unsigned, df->def_info.begin, new_size);
510 df->def_info.count = XRESIZEVEC (unsigned, df->def_info.count, new_size);
511 df->use_info.begin = XRESIZEVEC (unsigned, df->use_info.begin, new_size);
512 df->use_info.count = XRESIZEVEC (unsigned, df->use_info.count, new_size);
513 df->regs_size = new_size;
516 for (i = df->regs_inited; i < max_reg; i++)
518 struct df_reg_info *reg_info;
520 reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool);
521 memset (reg_info, 0, sizeof (struct df_reg_info));
522 df->def_regs[i] = reg_info;
523 reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool);
524 memset (reg_info, 0, sizeof (struct df_reg_info));
525 df->use_regs[i] = reg_info;
526 reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool);
527 memset (reg_info, 0, sizeof (struct df_reg_info));
528 df->eq_use_regs[i] = reg_info;
529 df->def_info.begin[i] = 0;
530 df->def_info.count[i] = 0;
531 df->use_info.begin[i] = 0;
532 df->use_info.count[i] = 0;
535 df->regs_inited = max_reg;
539 /* Grow the ref information. */
541 static void
542 df_grow_ref_info (struct df_ref_info *ref_info, unsigned int new_size)
544 if (ref_info->refs_size < new_size)
546 ref_info->refs = XRESIZEVEC (df_ref, ref_info->refs, new_size);
547 memset (ref_info->refs + ref_info->refs_size, 0,
548 (new_size - ref_info->refs_size) *sizeof (df_ref));
549 ref_info->refs_size = new_size;
554 /* Check and grow the ref information if necessary. This routine
555 guarantees total_size + BITMAP_ADDEND amount of entries in refs
556 array. It updates ref_info->refs_size only and does not change
557 ref_info->total_size. */
559 static void
560 df_check_and_grow_ref_info (struct df_ref_info *ref_info,
561 unsigned bitmap_addend)
563 if (ref_info->refs_size < ref_info->total_size + bitmap_addend)
565 int new_size = ref_info->total_size + bitmap_addend;
566 new_size += ref_info->total_size / 4;
567 df_grow_ref_info (ref_info, new_size);
572 /* Grow the ref information. If the current size is less than the
573 number of instructions, grow to 25% more than the number of
574 instructions. */
576 void
577 df_grow_insn_info (void)
579 unsigned int new_size = get_max_uid () + 1;
580 if (DF_INSN_SIZE () < new_size)
582 new_size += new_size / 4;
583 df->insns = XRESIZEVEC (struct df_insn_info *, df->insns, new_size);
584 memset (df->insns + df->insns_size, 0,
585 (new_size - DF_INSN_SIZE ()) *sizeof (struct df_insn_info *));
586 DF_INSN_SIZE () = new_size;
593 /*----------------------------------------------------------------------------
594 PUBLIC INTERFACES FOR SMALL GRAIN CHANGES TO SCANNING.
595 ----------------------------------------------------------------------------*/
597 /* Rescan all of the block_to_analyze or all of the blocks in the
598 function if df_set_blocks if blocks_to_analyze is NULL; */
600 void
601 df_scan_blocks (void)
603 basic_block bb;
605 df->def_info.ref_order = DF_REF_ORDER_NO_TABLE;
606 df->use_info.ref_order = DF_REF_ORDER_NO_TABLE;
608 df_get_regular_block_artificial_uses (&df->regular_block_artificial_uses);
609 df_get_eh_block_artificial_uses (&df->eh_block_artificial_uses);
611 bitmap_ior_into (&df->eh_block_artificial_uses,
612 &df->regular_block_artificial_uses);
614 /* ENTRY and EXIT blocks have special defs/uses. */
615 df_get_entry_block_def_set (df->entry_block_defs);
616 df_record_entry_block_defs (df->entry_block_defs);
617 df_get_exit_block_use_set (df->exit_block_uses);
618 df_record_exit_block_uses (df->exit_block_uses);
619 df_set_bb_dirty (BASIC_BLOCK_FOR_FN (cfun, ENTRY_BLOCK));
620 df_set_bb_dirty (BASIC_BLOCK_FOR_FN (cfun, EXIT_BLOCK));
622 /* Regular blocks */
623 FOR_EACH_BB_FN (bb, cfun)
625 unsigned int bb_index = bb->index;
626 df_bb_refs_record (bb_index, true);
630 /* Create new refs under address LOC within INSN. This function is
631 only used externally. REF_FLAGS must be either 0 or DF_REF_IN_NOTE,
632 depending on whether LOC is inside PATTERN (INSN) or a note. */
634 void
635 df_uses_create (rtx *loc, rtx_insn *insn, int ref_flags)
637 gcc_assert (!(ref_flags & ~DF_REF_IN_NOTE));
638 df_uses_record (NULL, loc, DF_REF_REG_USE,
639 BLOCK_FOR_INSN (insn),
640 DF_INSN_INFO_GET (insn),
641 ref_flags);
644 static void
645 df_install_ref_incremental (df_ref ref)
647 struct df_reg_info **reg_info;
648 struct df_ref_info *ref_info;
649 df_ref *ref_ptr;
650 bool add_to_table;
652 rtx_insn *insn = DF_REF_INSN (ref);
653 basic_block bb = BLOCK_FOR_INSN (insn);
655 if (DF_REF_REG_DEF_P (ref))
657 reg_info = df->def_regs;
658 ref_info = &df->def_info;
659 ref_ptr = &DF_INSN_DEFS (insn);
660 add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
662 else if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
664 reg_info = df->eq_use_regs;
665 ref_info = &df->use_info;
666 ref_ptr = &DF_INSN_EQ_USES (insn);
667 switch (ref_info->ref_order)
669 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
670 case DF_REF_ORDER_BY_REG_WITH_NOTES:
671 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
672 add_to_table = true;
673 break;
674 default:
675 add_to_table = false;
676 break;
679 else
681 reg_info = df->use_regs;
682 ref_info = &df->use_info;
683 ref_ptr = &DF_INSN_USES (insn);
684 add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
687 /* Do not add if ref is not in the right blocks. */
688 if (add_to_table && df->analyze_subset)
689 add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
691 df_install_ref (ref, reg_info[DF_REF_REGNO (ref)], ref_info, add_to_table);
693 if (add_to_table)
694 switch (ref_info->ref_order)
696 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
697 case DF_REF_ORDER_BY_REG_WITH_NOTES:
698 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
699 ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
700 break;
701 default:
702 ref_info->ref_order = DF_REF_ORDER_UNORDERED;
703 break;
706 while (*ref_ptr && df_ref_compare (*ref_ptr, ref) < 0)
707 ref_ptr = &DF_REF_NEXT_LOC (*ref_ptr);
709 DF_REF_NEXT_LOC (ref) = *ref_ptr;
710 *ref_ptr = ref;
712 #if 0
713 if (dump_file)
715 fprintf (dump_file, "adding ref ");
716 df_ref_debug (ref, dump_file);
718 #endif
719 /* By adding the ref directly, df_insn_rescan my not find any
720 differences even though the block will have changed. So we need
721 to mark the block dirty ourselves. */
722 if (!DEBUG_INSN_P (DF_REF_INSN (ref)))
723 df_set_bb_dirty (bb);
728 /*----------------------------------------------------------------------------
729 UTILITIES TO CREATE AND DESTROY REFS AND CHAINS.
730 ----------------------------------------------------------------------------*/
732 static void
733 df_free_ref (df_ref ref)
735 struct df_scan_problem_data *problem_data
736 = (struct df_scan_problem_data *) df_scan->problem_data;
738 switch (DF_REF_CLASS (ref))
740 case DF_REF_BASE:
741 pool_free (problem_data->ref_base_pool, ref);
742 break;
744 case DF_REF_ARTIFICIAL:
745 pool_free (problem_data->ref_artificial_pool, ref);
746 break;
748 case DF_REF_REGULAR:
749 pool_free (problem_data->ref_regular_pool, ref);
750 break;
755 /* Unlink and delete REF at the reg_use, reg_eq_use or reg_def chain.
756 Also delete the def-use or use-def chain if it exists. */
758 static void
759 df_reg_chain_unlink (df_ref ref)
761 df_ref next = DF_REF_NEXT_REG (ref);
762 df_ref prev = DF_REF_PREV_REG (ref);
763 int id = DF_REF_ID (ref);
764 struct df_reg_info *reg_info;
765 df_ref *refs = NULL;
767 if (DF_REF_REG_DEF_P (ref))
769 int regno = DF_REF_REGNO (ref);
770 reg_info = DF_REG_DEF_GET (regno);
771 refs = df->def_info.refs;
773 else
775 if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
777 reg_info = DF_REG_EQ_USE_GET (DF_REF_REGNO (ref));
778 switch (df->use_info.ref_order)
780 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
781 case DF_REF_ORDER_BY_REG_WITH_NOTES:
782 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
783 refs = df->use_info.refs;
784 break;
785 default:
786 break;
789 else
791 reg_info = DF_REG_USE_GET (DF_REF_REGNO (ref));
792 refs = df->use_info.refs;
796 if (refs)
798 if (df->analyze_subset)
800 if (bitmap_bit_p (df->blocks_to_analyze, DF_REF_BBNO (ref)))
801 refs[id] = NULL;
803 else
804 refs[id] = NULL;
807 /* Delete any def-use or use-def chains that start here. It is
808 possible that there is trash in this field. This happens for
809 insns that have been deleted when rescanning has been deferred
810 and the chain problem has also been deleted. The chain tear down
811 code skips deleted insns. */
812 if (df_chain && DF_REF_CHAIN (ref))
813 df_chain_unlink (ref);
815 reg_info->n_refs--;
816 if (DF_REF_FLAGS_IS_SET (ref, DF_HARD_REG_LIVE))
818 gcc_assert (DF_REF_REGNO (ref) < FIRST_PSEUDO_REGISTER);
819 df->hard_regs_live_count[DF_REF_REGNO (ref)]--;
822 /* Unlink from the reg chain. If there is no prev, this is the
823 first of the list. If not, just join the next and prev. */
824 if (prev)
825 DF_REF_NEXT_REG (prev) = next;
826 else
828 gcc_assert (reg_info->reg_chain == ref);
829 reg_info->reg_chain = next;
831 if (next)
832 DF_REF_PREV_REG (next) = prev;
834 df_free_ref (ref);
838 /* Create the insn record for INSN. If there was one there, zero it
839 out. */
841 struct df_insn_info *
842 df_insn_create_insn_record (rtx_insn *insn)
844 struct df_scan_problem_data *problem_data
845 = (struct df_scan_problem_data *) df_scan->problem_data;
846 struct df_insn_info *insn_rec;
848 df_grow_insn_info ();
849 insn_rec = DF_INSN_INFO_GET (insn);
850 if (!insn_rec)
852 insn_rec = (struct df_insn_info *) pool_alloc (problem_data->insn_pool);
853 DF_INSN_INFO_SET (insn, insn_rec);
855 memset (insn_rec, 0, sizeof (struct df_insn_info));
856 insn_rec->insn = insn;
857 return insn_rec;
861 /* Delete all du chain (DF_REF_CHAIN()) of all refs in the ref chain. */
863 static void
864 df_ref_chain_delete_du_chain (df_ref ref)
866 for (; ref; ref = DF_REF_NEXT_LOC (ref))
867 /* CHAIN is allocated by DF_CHAIN. So make sure to
868 pass df_scan instance for the problem. */
869 if (DF_REF_CHAIN (ref))
870 df_chain_unlink (ref);
874 /* Delete all refs in the ref chain. */
876 static void
877 df_ref_chain_delete (df_ref ref)
879 df_ref next;
880 for (; ref; ref = next)
882 next = DF_REF_NEXT_LOC (ref);
883 df_reg_chain_unlink (ref);
888 /* Delete the hardreg chain. */
890 static void
891 df_mw_hardreg_chain_delete (struct df_mw_hardreg *hardregs)
893 struct df_scan_problem_data *problem_data
894 = (struct df_scan_problem_data *) df_scan->problem_data;
895 df_mw_hardreg *next;
897 for (; hardregs; hardregs = next)
899 next = DF_MWS_NEXT (hardregs);
900 pool_free (problem_data->mw_reg_pool, hardregs);
905 /* Delete all of the refs information from the insn with UID.
906 Internal helper for df_insn_delete, df_insn_rescan, and other
907 df-scan routines that don't have to work in deferred mode
908 and do not have to mark basic blocks for re-processing. */
910 static void
911 df_insn_info_delete (unsigned int uid)
913 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
915 bitmap_clear_bit (&df->insns_to_delete, uid);
916 bitmap_clear_bit (&df->insns_to_rescan, uid);
917 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
918 if (insn_info)
920 struct df_scan_problem_data *problem_data
921 = (struct df_scan_problem_data *) df_scan->problem_data;
923 /* In general, notes do not have the insn_info fields
924 initialized. However, combine deletes insns by changing them
925 to notes. How clever. So we cannot just check if it is a
926 valid insn before short circuiting this code, we need to see
927 if we actually initialized it. */
928 df_mw_hardreg_chain_delete (insn_info->mw_hardregs);
930 if (df_chain)
932 df_ref_chain_delete_du_chain (insn_info->defs);
933 df_ref_chain_delete_du_chain (insn_info->uses);
934 df_ref_chain_delete_du_chain (insn_info->eq_uses);
937 df_ref_chain_delete (insn_info->defs);
938 df_ref_chain_delete (insn_info->uses);
939 df_ref_chain_delete (insn_info->eq_uses);
941 pool_free (problem_data->insn_pool, insn_info);
942 DF_INSN_UID_SET (uid, NULL);
946 /* Delete all of the refs information from INSN, either right now
947 or marked for later in deferred mode. */
949 void
950 df_insn_delete (rtx_insn *insn)
952 unsigned int uid;
953 basic_block bb;
955 gcc_checking_assert (INSN_P (insn));
957 if (!df)
958 return;
960 uid = INSN_UID (insn);
961 bb = BLOCK_FOR_INSN (insn);
963 /* ??? bb can be NULL after pass_free_cfg. At that point, DF should
964 not exist anymore (as mentioned in df-core.c: "The only requirement
965 [for DF] is that there be a correct control flow graph." Clearly
966 that isn't the case after pass_free_cfg. But DF is freed much later
967 because some back-ends want to use DF info even though the CFG is
968 already gone. It's not clear to me whether that is safe, actually.
969 In any case, we expect BB to be non-NULL at least up to register
970 allocation, so disallow a non-NULL BB up to there. Not perfect
971 but better than nothing... */
972 gcc_checking_assert (bb != NULL || reload_completed);
974 df_grow_bb_info (df_scan);
975 df_grow_reg_info ();
977 /* The block must be marked as dirty now, rather than later as in
978 df_insn_rescan and df_notes_rescan because it may not be there at
979 rescanning time and the mark would blow up.
980 DEBUG_INSNs do not make a block's data flow solution dirty (at
981 worst the LUIDs are no longer contiguous). */
982 if (bb != NULL && NONDEBUG_INSN_P (insn))
983 df_set_bb_dirty (bb);
985 /* The client has deferred rescanning. */
986 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
988 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
989 if (insn_info)
991 bitmap_clear_bit (&df->insns_to_rescan, uid);
992 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
993 bitmap_set_bit (&df->insns_to_delete, uid);
995 if (dump_file)
996 fprintf (dump_file, "deferring deletion of insn with uid = %d.\n", uid);
997 return;
1000 if (dump_file)
1001 fprintf (dump_file, "deleting insn with uid = %d.\n", uid);
1003 df_insn_info_delete (uid);
1007 /* Free all of the refs and the mw_hardregs in COLLECTION_REC. */
1009 static void
1010 df_free_collection_rec (struct df_collection_rec *collection_rec)
1012 unsigned int ix;
1013 struct df_scan_problem_data *problem_data
1014 = (struct df_scan_problem_data *) df_scan->problem_data;
1015 df_ref ref;
1016 struct df_mw_hardreg *mw;
1018 FOR_EACH_VEC_ELT (collection_rec->def_vec, ix, ref)
1019 df_free_ref (ref);
1020 FOR_EACH_VEC_ELT (collection_rec->use_vec, ix, ref)
1021 df_free_ref (ref);
1022 FOR_EACH_VEC_ELT (collection_rec->eq_use_vec, ix, ref)
1023 df_free_ref (ref);
1024 FOR_EACH_VEC_ELT (collection_rec->mw_vec, ix, mw)
1025 pool_free (problem_data->mw_reg_pool, mw);
1027 collection_rec->def_vec.release ();
1028 collection_rec->use_vec.release ();
1029 collection_rec->eq_use_vec.release ();
1030 collection_rec->mw_vec.release ();
1033 /* Rescan INSN. Return TRUE if the rescanning produced any changes. */
1035 bool
1036 df_insn_rescan (rtx_insn *insn)
1038 unsigned int uid = INSN_UID (insn);
1039 struct df_insn_info *insn_info = NULL;
1040 basic_block bb = BLOCK_FOR_INSN (insn);
1041 struct df_collection_rec collection_rec;
1043 if ((!df) || (!INSN_P (insn)))
1044 return false;
1046 if (!bb)
1048 if (dump_file)
1049 fprintf (dump_file, "no bb for insn with uid = %d.\n", uid);
1050 return false;
1053 /* The client has disabled rescanning and plans to do it itself. */
1054 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1055 return false;
1057 df_grow_bb_info (df_scan);
1058 df_grow_reg_info ();
1060 insn_info = DF_INSN_UID_SAFE_GET (uid);
1062 /* The client has deferred rescanning. */
1063 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1065 if (!insn_info)
1067 insn_info = df_insn_create_insn_record (insn);
1068 insn_info->defs = 0;
1069 insn_info->uses = 0;
1070 insn_info->eq_uses = 0;
1071 insn_info->mw_hardregs = 0;
1073 if (dump_file)
1074 fprintf (dump_file, "deferring rescan insn with uid = %d.\n", uid);
1076 bitmap_clear_bit (&df->insns_to_delete, uid);
1077 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1078 bitmap_set_bit (&df->insns_to_rescan, INSN_UID (insn));
1079 return false;
1082 bitmap_clear_bit (&df->insns_to_delete, uid);
1083 bitmap_clear_bit (&df->insns_to_rescan, uid);
1084 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1085 if (insn_info)
1087 int luid;
1088 bool the_same = df_insn_refs_verify (&collection_rec, bb, insn, false);
1089 /* If there's no change, return false. */
1090 if (the_same)
1092 df_free_collection_rec (&collection_rec);
1093 if (dump_file)
1094 fprintf (dump_file, "verify found no changes in insn with uid = %d.\n", uid);
1095 return false;
1097 if (dump_file)
1098 fprintf (dump_file, "rescanning insn with uid = %d.\n", uid);
1100 /* There's change - we need to delete the existing info.
1101 Since the insn isn't moved, we can salvage its LUID. */
1102 luid = DF_INSN_LUID (insn);
1103 df_insn_info_delete (uid);
1104 df_insn_create_insn_record (insn);
1105 DF_INSN_LUID (insn) = luid;
1107 else
1109 struct df_insn_info *insn_info = df_insn_create_insn_record (insn);
1110 df_insn_refs_collect (&collection_rec, bb, insn_info);
1111 if (dump_file)
1112 fprintf (dump_file, "scanning new insn with uid = %d.\n", uid);
1115 df_refs_add_to_chains (&collection_rec, bb, insn, copy_all);
1116 if (!DEBUG_INSN_P (insn))
1117 df_set_bb_dirty (bb);
1119 return true;
1122 /* Same as df_insn_rescan, but don't mark the basic block as
1123 dirty. */
1125 bool
1126 df_insn_rescan_debug_internal (rtx_insn *insn)
1128 unsigned int uid = INSN_UID (insn);
1129 struct df_insn_info *insn_info;
1131 gcc_assert (DEBUG_INSN_P (insn)
1132 && VAR_LOC_UNKNOWN_P (INSN_VAR_LOCATION_LOC (insn)));
1134 if (!df)
1135 return false;
1137 insn_info = DF_INSN_UID_SAFE_GET (INSN_UID (insn));
1138 if (!insn_info)
1139 return false;
1141 if (dump_file)
1142 fprintf (dump_file, "deleting debug_insn with uid = %d.\n", uid);
1144 bitmap_clear_bit (&df->insns_to_delete, uid);
1145 bitmap_clear_bit (&df->insns_to_rescan, uid);
1146 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1148 if (insn_info->defs == 0
1149 && insn_info->uses == 0
1150 && insn_info->eq_uses == 0
1151 && insn_info->mw_hardregs == 0)
1152 return false;
1154 df_mw_hardreg_chain_delete (insn_info->mw_hardregs);
1156 if (df_chain)
1158 df_ref_chain_delete_du_chain (insn_info->defs);
1159 df_ref_chain_delete_du_chain (insn_info->uses);
1160 df_ref_chain_delete_du_chain (insn_info->eq_uses);
1163 df_ref_chain_delete (insn_info->defs);
1164 df_ref_chain_delete (insn_info->uses);
1165 df_ref_chain_delete (insn_info->eq_uses);
1167 insn_info->defs = 0;
1168 insn_info->uses = 0;
1169 insn_info->eq_uses = 0;
1170 insn_info->mw_hardregs = 0;
1172 return true;
1176 /* Rescan all of the insns in the function. Note that the artificial
1177 uses and defs are not touched. This function will destroy def-use
1178 or use-def chains. */
1180 void
1181 df_insn_rescan_all (void)
1183 bool no_insn_rescan = false;
1184 bool defer_insn_rescan = false;
1185 basic_block bb;
1186 bitmap_iterator bi;
1187 unsigned int uid;
1188 bitmap_head tmp;
1190 bitmap_initialize (&tmp, &df_bitmap_obstack);
1192 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1194 df_clear_flags (DF_NO_INSN_RESCAN);
1195 no_insn_rescan = true;
1198 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1200 df_clear_flags (DF_DEFER_INSN_RESCAN);
1201 defer_insn_rescan = true;
1204 bitmap_copy (&tmp, &df->insns_to_delete);
1205 EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
1207 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1208 if (insn_info)
1209 df_insn_info_delete (uid);
1212 bitmap_clear (&tmp);
1213 bitmap_clear (&df->insns_to_delete);
1214 bitmap_clear (&df->insns_to_rescan);
1215 bitmap_clear (&df->insns_to_notes_rescan);
1217 FOR_EACH_BB_FN (bb, cfun)
1219 rtx_insn *insn;
1220 FOR_BB_INSNS (bb, insn)
1222 df_insn_rescan (insn);
1226 if (no_insn_rescan)
1227 df_set_flags (DF_NO_INSN_RESCAN);
1228 if (defer_insn_rescan)
1229 df_set_flags (DF_DEFER_INSN_RESCAN);
1233 /* Process all of the deferred rescans or deletions. */
1235 void
1236 df_process_deferred_rescans (void)
1238 bool no_insn_rescan = false;
1239 bool defer_insn_rescan = false;
1240 bitmap_iterator bi;
1241 unsigned int uid;
1242 bitmap_head tmp;
1244 bitmap_initialize (&tmp, &df_bitmap_obstack);
1246 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1248 df_clear_flags (DF_NO_INSN_RESCAN);
1249 no_insn_rescan = true;
1252 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1254 df_clear_flags (DF_DEFER_INSN_RESCAN);
1255 defer_insn_rescan = true;
1258 if (dump_file)
1259 fprintf (dump_file, "starting the processing of deferred insns\n");
1261 bitmap_copy (&tmp, &df->insns_to_delete);
1262 EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
1264 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1265 if (insn_info)
1266 df_insn_info_delete (uid);
1269 bitmap_copy (&tmp, &df->insns_to_rescan);
1270 EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
1272 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1273 if (insn_info)
1274 df_insn_rescan (insn_info->insn);
1277 bitmap_copy (&tmp, &df->insns_to_notes_rescan);
1278 EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
1280 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1281 if (insn_info)
1282 df_notes_rescan (insn_info->insn);
1285 if (dump_file)
1286 fprintf (dump_file, "ending the processing of deferred insns\n");
1288 bitmap_clear (&tmp);
1289 bitmap_clear (&df->insns_to_delete);
1290 bitmap_clear (&df->insns_to_rescan);
1291 bitmap_clear (&df->insns_to_notes_rescan);
1293 if (no_insn_rescan)
1294 df_set_flags (DF_NO_INSN_RESCAN);
1295 if (defer_insn_rescan)
1296 df_set_flags (DF_DEFER_INSN_RESCAN);
1298 /* If someone changed regs_ever_live during this pass, fix up the
1299 entry and exit blocks. */
1300 if (df->redo_entry_and_exit)
1302 df_update_entry_exit_and_calls ();
1303 df->redo_entry_and_exit = false;
1308 /* Count the number of refs. Include the defs if INCLUDE_DEFS. Include
1309 the uses if INCLUDE_USES. Include the eq_uses if
1310 INCLUDE_EQ_USES. */
1312 static unsigned int
1313 df_count_refs (bool include_defs, bool include_uses,
1314 bool include_eq_uses)
1316 unsigned int regno;
1317 int size = 0;
1318 unsigned int m = df->regs_inited;
1320 for (regno = 0; regno < m; regno++)
1322 if (include_defs)
1323 size += DF_REG_DEF_COUNT (regno);
1324 if (include_uses)
1325 size += DF_REG_USE_COUNT (regno);
1326 if (include_eq_uses)
1327 size += DF_REG_EQ_USE_COUNT (regno);
1329 return size;
1333 /* Take build ref table for either the uses or defs from the reg-use
1334 or reg-def chains. This version processes the refs in reg order
1335 which is likely to be best if processing the whole function. */
1337 static void
1338 df_reorganize_refs_by_reg_by_reg (struct df_ref_info *ref_info,
1339 bool include_defs,
1340 bool include_uses,
1341 bool include_eq_uses)
1343 unsigned int m = df->regs_inited;
1344 unsigned int regno;
1345 unsigned int offset = 0;
1346 unsigned int start;
1348 if (df->changeable_flags & DF_NO_HARD_REGS)
1350 start = FIRST_PSEUDO_REGISTER;
1351 memset (ref_info->begin, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
1352 memset (ref_info->count, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
1354 else
1355 start = 0;
1357 ref_info->total_size
1358 = df_count_refs (include_defs, include_uses, include_eq_uses);
1360 df_check_and_grow_ref_info (ref_info, 1);
1362 for (regno = start; regno < m; regno++)
1364 int count = 0;
1365 ref_info->begin[regno] = offset;
1366 if (include_defs)
1368 df_ref ref = DF_REG_DEF_CHAIN (regno);
1369 while (ref)
1371 ref_info->refs[offset] = ref;
1372 DF_REF_ID (ref) = offset++;
1373 count++;
1374 ref = DF_REF_NEXT_REG (ref);
1375 gcc_checking_assert (offset < ref_info->refs_size);
1378 if (include_uses)
1380 df_ref ref = DF_REG_USE_CHAIN (regno);
1381 while (ref)
1383 ref_info->refs[offset] = ref;
1384 DF_REF_ID (ref) = offset++;
1385 count++;
1386 ref = DF_REF_NEXT_REG (ref);
1387 gcc_checking_assert (offset < ref_info->refs_size);
1390 if (include_eq_uses)
1392 df_ref ref = DF_REG_EQ_USE_CHAIN (regno);
1393 while (ref)
1395 ref_info->refs[offset] = ref;
1396 DF_REF_ID (ref) = offset++;
1397 count++;
1398 ref = DF_REF_NEXT_REG (ref);
1399 gcc_checking_assert (offset < ref_info->refs_size);
1402 ref_info->count[regno] = count;
1405 /* The bitmap size is not decremented when refs are deleted. So
1406 reset it now that we have squished out all of the empty
1407 slots. */
1408 ref_info->table_size = offset;
1412 /* Take build ref table for either the uses or defs from the reg-use
1413 or reg-def chains. This version processes the refs in insn order
1414 which is likely to be best if processing some segment of the
1415 function. */
1417 static void
1418 df_reorganize_refs_by_reg_by_insn (struct df_ref_info *ref_info,
1419 bool include_defs,
1420 bool include_uses,
1421 bool include_eq_uses)
1423 bitmap_iterator bi;
1424 unsigned int bb_index;
1425 unsigned int m = df->regs_inited;
1426 unsigned int offset = 0;
1427 unsigned int r;
1428 unsigned int start
1429 = (df->changeable_flags & DF_NO_HARD_REGS) ? FIRST_PSEUDO_REGISTER : 0;
1431 memset (ref_info->begin, 0, sizeof (int) * df->regs_inited);
1432 memset (ref_info->count, 0, sizeof (int) * df->regs_inited);
1434 ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1435 df_check_and_grow_ref_info (ref_info, 1);
1437 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
1439 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
1440 rtx_insn *insn;
1441 df_ref def, use;
1443 if (include_defs)
1444 FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
1446 unsigned int regno = DF_REF_REGNO (def);
1447 ref_info->count[regno]++;
1449 if (include_uses)
1450 FOR_EACH_ARTIFICIAL_USE (use, bb_index)
1452 unsigned int regno = DF_REF_REGNO (use);
1453 ref_info->count[regno]++;
1456 FOR_BB_INSNS (bb, insn)
1458 if (INSN_P (insn))
1460 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
1462 if (include_defs)
1463 FOR_EACH_INSN_INFO_DEF (def, insn_info)
1465 unsigned int regno = DF_REF_REGNO (def);
1466 ref_info->count[regno]++;
1468 if (include_uses)
1469 FOR_EACH_INSN_INFO_USE (use, insn_info)
1471 unsigned int regno = DF_REF_REGNO (use);
1472 ref_info->count[regno]++;
1474 if (include_eq_uses)
1475 FOR_EACH_INSN_INFO_EQ_USE (use, insn_info)
1477 unsigned int regno = DF_REF_REGNO (use);
1478 ref_info->count[regno]++;
1484 for (r = start; r < m; r++)
1486 ref_info->begin[r] = offset;
1487 offset += ref_info->count[r];
1488 ref_info->count[r] = 0;
1491 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
1493 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
1494 rtx_insn *insn;
1495 df_ref def, use;
1497 if (include_defs)
1498 FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
1500 unsigned int regno = DF_REF_REGNO (def);
1501 if (regno >= start)
1503 unsigned int id
1504 = ref_info->begin[regno] + ref_info->count[regno]++;
1505 DF_REF_ID (def) = id;
1506 ref_info->refs[id] = def;
1509 if (include_uses)
1510 FOR_EACH_ARTIFICIAL_USE (use, bb_index)
1512 unsigned int regno = DF_REF_REGNO (def);
1513 if (regno >= start)
1515 unsigned int id
1516 = ref_info->begin[regno] + ref_info->count[regno]++;
1517 DF_REF_ID (use) = id;
1518 ref_info->refs[id] = use;
1522 FOR_BB_INSNS (bb, insn)
1524 if (INSN_P (insn))
1526 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
1528 if (include_defs)
1529 FOR_EACH_INSN_INFO_DEF (def, insn_info)
1531 unsigned int regno = DF_REF_REGNO (def);
1532 if (regno >= start)
1534 unsigned int id
1535 = ref_info->begin[regno] + ref_info->count[regno]++;
1536 DF_REF_ID (def) = id;
1537 ref_info->refs[id] = def;
1540 if (include_uses)
1541 FOR_EACH_INSN_INFO_USE (use, insn_info)
1543 unsigned int regno = DF_REF_REGNO (use);
1544 if (regno >= start)
1546 unsigned int id
1547 = ref_info->begin[regno] + ref_info->count[regno]++;
1548 DF_REF_ID (use) = id;
1549 ref_info->refs[id] = use;
1552 if (include_eq_uses)
1553 FOR_EACH_INSN_INFO_EQ_USE (use, insn_info)
1555 unsigned int regno = DF_REF_REGNO (use);
1556 if (regno >= start)
1558 unsigned int id
1559 = ref_info->begin[regno] + ref_info->count[regno]++;
1560 DF_REF_ID (use) = id;
1561 ref_info->refs[id] = use;
1568 /* The bitmap size is not decremented when refs are deleted. So
1569 reset it now that we have squished out all of the empty
1570 slots. */
1572 ref_info->table_size = offset;
1575 /* Take build ref table for either the uses or defs from the reg-use
1576 or reg-def chains. */
1578 static void
1579 df_reorganize_refs_by_reg (struct df_ref_info *ref_info,
1580 bool include_defs,
1581 bool include_uses,
1582 bool include_eq_uses)
1584 if (df->analyze_subset)
1585 df_reorganize_refs_by_reg_by_insn (ref_info, include_defs,
1586 include_uses, include_eq_uses);
1587 else
1588 df_reorganize_refs_by_reg_by_reg (ref_info, include_defs,
1589 include_uses, include_eq_uses);
1593 /* Add the refs in REF_VEC to the table in REF_INFO starting at OFFSET. */
1594 static unsigned int
1595 df_add_refs_to_table (unsigned int offset,
1596 struct df_ref_info *ref_info,
1597 df_ref ref)
1599 for (; ref; ref = DF_REF_NEXT_LOC (ref))
1600 if (!(df->changeable_flags & DF_NO_HARD_REGS)
1601 || (DF_REF_REGNO (ref) >= FIRST_PSEUDO_REGISTER))
1603 ref_info->refs[offset] = ref;
1604 DF_REF_ID (ref) = offset++;
1606 return offset;
1610 /* Count the number of refs in all of the insns of BB. Include the
1611 defs if INCLUDE_DEFS. Include the uses if INCLUDE_USES. Include the
1612 eq_uses if INCLUDE_EQ_USES. */
1614 static unsigned int
1615 df_reorganize_refs_by_insn_bb (basic_block bb, unsigned int offset,
1616 struct df_ref_info *ref_info,
1617 bool include_defs, bool include_uses,
1618 bool include_eq_uses)
1620 rtx_insn *insn;
1622 if (include_defs)
1623 offset = df_add_refs_to_table (offset, ref_info,
1624 df_get_artificial_defs (bb->index));
1625 if (include_uses)
1626 offset = df_add_refs_to_table (offset, ref_info,
1627 df_get_artificial_uses (bb->index));
1629 FOR_BB_INSNS (bb, insn)
1630 if (INSN_P (insn))
1632 unsigned int uid = INSN_UID (insn);
1633 if (include_defs)
1634 offset = df_add_refs_to_table (offset, ref_info,
1635 DF_INSN_UID_DEFS (uid));
1636 if (include_uses)
1637 offset = df_add_refs_to_table (offset, ref_info,
1638 DF_INSN_UID_USES (uid));
1639 if (include_eq_uses)
1640 offset = df_add_refs_to_table (offset, ref_info,
1641 DF_INSN_UID_EQ_USES (uid));
1643 return offset;
1647 /* Organize the refs by insn into the table in REF_INFO. If
1648 blocks_to_analyze is defined, use that set, otherwise the entire
1649 program. Include the defs if INCLUDE_DEFS. Include the uses if
1650 INCLUDE_USES. Include the eq_uses if INCLUDE_EQ_USES. */
1652 static void
1653 df_reorganize_refs_by_insn (struct df_ref_info *ref_info,
1654 bool include_defs, bool include_uses,
1655 bool include_eq_uses)
1657 basic_block bb;
1658 unsigned int offset = 0;
1660 ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1661 df_check_and_grow_ref_info (ref_info, 1);
1662 if (df->blocks_to_analyze)
1664 bitmap_iterator bi;
1665 unsigned int index;
1667 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, index, bi)
1669 offset = df_reorganize_refs_by_insn_bb (BASIC_BLOCK_FOR_FN (cfun,
1670 index),
1671 offset, ref_info,
1672 include_defs, include_uses,
1673 include_eq_uses);
1676 ref_info->table_size = offset;
1678 else
1680 FOR_ALL_BB_FN (bb, cfun)
1681 offset = df_reorganize_refs_by_insn_bb (bb, offset, ref_info,
1682 include_defs, include_uses,
1683 include_eq_uses);
1684 ref_info->table_size = offset;
1689 /* If the use refs in DF are not organized, reorganize them. */
1691 void
1692 df_maybe_reorganize_use_refs (enum df_ref_order order)
1694 if (order == df->use_info.ref_order)
1695 return;
1697 switch (order)
1699 case DF_REF_ORDER_BY_REG:
1700 df_reorganize_refs_by_reg (&df->use_info, false, true, false);
1701 break;
1703 case DF_REF_ORDER_BY_REG_WITH_NOTES:
1704 df_reorganize_refs_by_reg (&df->use_info, false, true, true);
1705 break;
1707 case DF_REF_ORDER_BY_INSN:
1708 df_reorganize_refs_by_insn (&df->use_info, false, true, false);
1709 break;
1711 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1712 df_reorganize_refs_by_insn (&df->use_info, false, true, true);
1713 break;
1715 case DF_REF_ORDER_NO_TABLE:
1716 free (df->use_info.refs);
1717 df->use_info.refs = NULL;
1718 df->use_info.refs_size = 0;
1719 break;
1721 case DF_REF_ORDER_UNORDERED:
1722 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1723 gcc_unreachable ();
1724 break;
1727 df->use_info.ref_order = order;
1731 /* If the def refs in DF are not organized, reorganize them. */
1733 void
1734 df_maybe_reorganize_def_refs (enum df_ref_order order)
1736 if (order == df->def_info.ref_order)
1737 return;
1739 switch (order)
1741 case DF_REF_ORDER_BY_REG:
1742 df_reorganize_refs_by_reg (&df->def_info, true, false, false);
1743 break;
1745 case DF_REF_ORDER_BY_INSN:
1746 df_reorganize_refs_by_insn (&df->def_info, true, false, false);
1747 break;
1749 case DF_REF_ORDER_NO_TABLE:
1750 free (df->def_info.refs);
1751 df->def_info.refs = NULL;
1752 df->def_info.refs_size = 0;
1753 break;
1755 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1756 case DF_REF_ORDER_BY_REG_WITH_NOTES:
1757 case DF_REF_ORDER_UNORDERED:
1758 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1759 gcc_unreachable ();
1760 break;
1763 df->def_info.ref_order = order;
1767 /* Change all of the basic block references in INSN to use the insn's
1768 current basic block. This function is called from routines that move
1769 instructions from one block to another. */
1771 void
1772 df_insn_change_bb (rtx_insn *insn, basic_block new_bb)
1774 basic_block old_bb = BLOCK_FOR_INSN (insn);
1775 struct df_insn_info *insn_info;
1776 unsigned int uid = INSN_UID (insn);
1778 if (old_bb == new_bb)
1779 return;
1781 set_block_for_insn (insn, new_bb);
1783 if (!df)
1784 return;
1786 if (dump_file)
1787 fprintf (dump_file, "changing bb of uid %d\n", uid);
1789 insn_info = DF_INSN_UID_SAFE_GET (uid);
1790 if (insn_info == NULL)
1792 if (dump_file)
1793 fprintf (dump_file, " unscanned insn\n");
1794 df_insn_rescan (insn);
1795 return;
1798 if (!INSN_P (insn))
1799 return;
1801 df_set_bb_dirty (new_bb);
1802 if (old_bb)
1804 if (dump_file)
1805 fprintf (dump_file, " from %d to %d\n",
1806 old_bb->index, new_bb->index);
1807 df_set_bb_dirty (old_bb);
1809 else
1810 if (dump_file)
1811 fprintf (dump_file, " to %d\n", new_bb->index);
1815 /* Helper function for df_ref_change_reg_with_loc. */
1817 static void
1818 df_ref_change_reg_with_loc_1 (struct df_reg_info *old_df,
1819 struct df_reg_info *new_df,
1820 int new_regno, rtx loc)
1822 df_ref the_ref = old_df->reg_chain;
1824 while (the_ref)
1826 if ((!DF_REF_IS_ARTIFICIAL (the_ref))
1827 && DF_REF_LOC (the_ref)
1828 && (*DF_REF_LOC (the_ref) == loc))
1830 df_ref next_ref = DF_REF_NEXT_REG (the_ref);
1831 df_ref prev_ref = DF_REF_PREV_REG (the_ref);
1832 df_ref *ref_ptr;
1833 struct df_insn_info *insn_info = DF_REF_INSN_INFO (the_ref);
1835 DF_REF_REGNO (the_ref) = new_regno;
1836 DF_REF_REG (the_ref) = regno_reg_rtx[new_regno];
1838 /* Pull the_ref out of the old regno chain. */
1839 if (prev_ref)
1840 DF_REF_NEXT_REG (prev_ref) = next_ref;
1841 else
1842 old_df->reg_chain = next_ref;
1843 if (next_ref)
1844 DF_REF_PREV_REG (next_ref) = prev_ref;
1845 old_df->n_refs--;
1847 /* Put the ref into the new regno chain. */
1848 DF_REF_PREV_REG (the_ref) = NULL;
1849 DF_REF_NEXT_REG (the_ref) = new_df->reg_chain;
1850 if (new_df->reg_chain)
1851 DF_REF_PREV_REG (new_df->reg_chain) = the_ref;
1852 new_df->reg_chain = the_ref;
1853 new_df->n_refs++;
1854 if (DF_REF_BB (the_ref))
1855 df_set_bb_dirty (DF_REF_BB (the_ref));
1857 /* Need to sort the record again that the ref was in because
1858 the regno is a sorting key. First, find the right
1859 record. */
1860 if (DF_REF_REG_DEF_P (the_ref))
1861 ref_ptr = &insn_info->defs;
1862 else if (DF_REF_FLAGS (the_ref) & DF_REF_IN_NOTE)
1863 ref_ptr = &insn_info->eq_uses;
1864 else
1865 ref_ptr = &insn_info->uses;
1866 if (dump_file)
1867 fprintf (dump_file, "changing reg in insn %d\n",
1868 DF_REF_INSN_UID (the_ref));
1870 /* Stop if we find the current reference or where the reference
1871 needs to be. */
1872 while (*ref_ptr != the_ref && df_ref_compare (*ref_ptr, the_ref) < 0)
1873 ref_ptr = &DF_REF_NEXT_LOC (*ref_ptr);
1874 if (*ref_ptr != the_ref)
1876 /* The reference needs to be promoted up the list. */
1877 df_ref next = DF_REF_NEXT_LOC (the_ref);
1878 DF_REF_NEXT_LOC (the_ref) = *ref_ptr;
1879 *ref_ptr = the_ref;
1881 ref_ptr = &DF_REF_NEXT_LOC (*ref_ptr);
1882 while (*ref_ptr != the_ref);
1883 *ref_ptr = next;
1885 else if (DF_REF_NEXT_LOC (the_ref)
1886 && df_ref_compare (the_ref, DF_REF_NEXT_LOC (the_ref)) > 0)
1888 /* The reference needs to be demoted down the list. */
1889 *ref_ptr = DF_REF_NEXT_LOC (the_ref);
1891 ref_ptr = &DF_REF_NEXT_LOC (*ref_ptr);
1892 while (*ref_ptr && df_ref_compare (the_ref, *ref_ptr) > 0);
1893 DF_REF_NEXT_LOC (the_ref) = *ref_ptr;
1894 *ref_ptr = the_ref;
1897 the_ref = next_ref;
1899 else
1900 the_ref = DF_REF_NEXT_REG (the_ref);
1905 /* Change the regno of all refs that contained LOC from OLD_REGNO to
1906 NEW_REGNO. Refs that do not match LOC are not changed which means
1907 that artificial refs are not changed since they have no loc. This
1908 call is to support the SET_REGNO macro. */
1910 void
1911 df_ref_change_reg_with_loc (int old_regno, int new_regno, rtx loc)
1913 if ((!df) || (old_regno == -1) || (old_regno == new_regno))
1914 return;
1916 df_grow_reg_info ();
1918 df_ref_change_reg_with_loc_1 (DF_REG_DEF_GET (old_regno),
1919 DF_REG_DEF_GET (new_regno), new_regno, loc);
1920 df_ref_change_reg_with_loc_1 (DF_REG_USE_GET (old_regno),
1921 DF_REG_USE_GET (new_regno), new_regno, loc);
1922 df_ref_change_reg_with_loc_1 (DF_REG_EQ_USE_GET (old_regno),
1923 DF_REG_EQ_USE_GET (new_regno), new_regno, loc);
1927 /* Delete the mw_hardregs that point into the eq_notes. */
1929 static void
1930 df_mw_hardreg_chain_delete_eq_uses (struct df_insn_info *insn_info)
1932 struct df_mw_hardreg **mw_ptr = &insn_info->mw_hardregs;
1933 struct df_scan_problem_data *problem_data
1934 = (struct df_scan_problem_data *) df_scan->problem_data;
1936 while (*mw_ptr)
1938 df_mw_hardreg *mw = *mw_ptr;
1939 if (mw->flags & DF_REF_IN_NOTE)
1941 *mw_ptr = DF_MWS_NEXT (mw);
1942 pool_free (problem_data->mw_reg_pool, mw);
1944 else
1945 mw_ptr = &DF_MWS_NEXT (mw);
1950 /* Rescan only the REG_EQUIV/REG_EQUAL notes part of INSN. */
1952 void
1953 df_notes_rescan (rtx_insn *insn)
1955 struct df_insn_info *insn_info;
1956 unsigned int uid = INSN_UID (insn);
1958 if (!df)
1959 return;
1961 /* The client has disabled rescanning and plans to do it itself. */
1962 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1963 return;
1965 /* Do nothing if the insn hasn't been emitted yet. */
1966 if (!BLOCK_FOR_INSN (insn))
1967 return;
1969 df_grow_bb_info (df_scan);
1970 df_grow_reg_info ();
1972 insn_info = DF_INSN_UID_SAFE_GET (INSN_UID (insn));
1974 /* The client has deferred rescanning. */
1975 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1977 if (!insn_info)
1979 insn_info = df_insn_create_insn_record (insn);
1980 insn_info->defs = 0;
1981 insn_info->uses = 0;
1982 insn_info->eq_uses = 0;
1983 insn_info->mw_hardregs = 0;
1986 bitmap_clear_bit (&df->insns_to_delete, uid);
1987 /* If the insn is set to be rescanned, it does not need to also
1988 be notes rescanned. */
1989 if (!bitmap_bit_p (&df->insns_to_rescan, uid))
1990 bitmap_set_bit (&df->insns_to_notes_rescan, INSN_UID (insn));
1991 return;
1994 bitmap_clear_bit (&df->insns_to_delete, uid);
1995 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1997 if (insn_info)
1999 basic_block bb = BLOCK_FOR_INSN (insn);
2000 rtx note;
2001 struct df_collection_rec collection_rec;
2002 unsigned int i;
2004 df_mw_hardreg_chain_delete_eq_uses (insn_info);
2005 df_ref_chain_delete (insn_info->eq_uses);
2006 insn_info->eq_uses = NULL;
2008 /* Process REG_EQUIV/REG_EQUAL notes */
2009 for (note = REG_NOTES (insn); note;
2010 note = XEXP (note, 1))
2012 switch (REG_NOTE_KIND (note))
2014 case REG_EQUIV:
2015 case REG_EQUAL:
2016 df_uses_record (&collection_rec,
2017 &XEXP (note, 0), DF_REF_REG_USE,
2018 bb, insn_info, DF_REF_IN_NOTE);
2019 default:
2020 break;
2024 /* Find some place to put any new mw_hardregs. */
2025 df_canonize_collection_rec (&collection_rec);
2026 struct df_mw_hardreg **mw_ptr = &insn_info->mw_hardregs, *mw;
2027 FOR_EACH_VEC_ELT (collection_rec.mw_vec, i, mw)
2029 while (*mw_ptr && df_mw_compare (*mw_ptr, mw) < 0)
2030 mw_ptr = &DF_MWS_NEXT (*mw_ptr);
2031 DF_MWS_NEXT (mw) = *mw_ptr;
2032 *mw_ptr = mw;
2033 mw_ptr = &DF_MWS_NEXT (mw);
2035 df_refs_add_to_chains (&collection_rec, bb, insn, copy_eq_uses);
2037 else
2038 df_insn_rescan (insn);
2043 /*----------------------------------------------------------------------------
2044 Hard core instruction scanning code. No external interfaces here,
2045 just a lot of routines that look inside insns.
2046 ----------------------------------------------------------------------------*/
2049 /* Return true if the contents of two df_ref's are identical.
2050 It ignores DF_REF_MARKER. */
2052 static bool
2053 df_ref_equal_p (df_ref ref1, df_ref ref2)
2055 if (!ref2)
2056 return false;
2058 if (ref1 == ref2)
2059 return true;
2061 if (DF_REF_CLASS (ref1) != DF_REF_CLASS (ref2)
2062 || DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2)
2063 || DF_REF_REG (ref1) != DF_REF_REG (ref2)
2064 || DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2)
2065 || ((DF_REF_FLAGS (ref1) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG))
2066 != (DF_REF_FLAGS (ref2) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG)))
2067 || DF_REF_BB (ref1) != DF_REF_BB (ref2)
2068 || DF_REF_INSN_INFO (ref1) != DF_REF_INSN_INFO (ref2))
2069 return false;
2071 switch (DF_REF_CLASS (ref1))
2073 case DF_REF_ARTIFICIAL:
2074 case DF_REF_BASE:
2075 return true;
2077 case DF_REF_REGULAR:
2078 return DF_REF_LOC (ref1) == DF_REF_LOC (ref2);
2080 default:
2081 gcc_unreachable ();
2083 return false;
2087 /* Compare REF1 and REF2 for sorting. This is only called from places
2088 where all of the refs are of the same type, in the same insn, and
2089 have the same bb. So these fields are not checked. */
2091 static int
2092 df_ref_compare (df_ref ref1, df_ref ref2)
2094 if (DF_REF_CLASS (ref1) != DF_REF_CLASS (ref2))
2095 return (int)DF_REF_CLASS (ref1) - (int)DF_REF_CLASS (ref2);
2097 if (DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2))
2098 return (int)DF_REF_REGNO (ref1) - (int)DF_REF_REGNO (ref2);
2100 if (DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2))
2101 return (int)DF_REF_TYPE (ref1) - (int)DF_REF_TYPE (ref2);
2103 if (DF_REF_REG (ref1) != DF_REF_REG (ref2))
2104 return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2106 /* Cannot look at the LOC field on artificial refs. */
2107 if (DF_REF_CLASS (ref1) != DF_REF_ARTIFICIAL
2108 && DF_REF_LOC (ref1) != DF_REF_LOC (ref2))
2109 return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2111 if (DF_REF_FLAGS (ref1) != DF_REF_FLAGS (ref2))
2113 /* If two refs are identical except that one of them has is from
2114 a mw and one is not, we need to have the one with the mw
2115 first. */
2116 if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG) ==
2117 DF_REF_FLAGS_IS_SET (ref2, DF_REF_MW_HARDREG))
2118 return DF_REF_FLAGS (ref1) - DF_REF_FLAGS (ref2);
2119 else if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG))
2120 return -1;
2121 else
2122 return 1;
2125 return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2128 /* Like df_ref_compare, but compare two df_ref* pointers R1 and R2. */
2130 static int
2131 df_ref_ptr_compare (const void *r1, const void *r2)
2133 return df_ref_compare (*(const df_ref *) r1, *(const df_ref *) r2);
2136 static void
2137 df_swap_refs (vec<df_ref, va_heap> *ref_vec, int i, int j)
2139 df_ref tmp = (*ref_vec)[i];
2140 (*ref_vec)[i] = (*ref_vec)[j];
2141 (*ref_vec)[j] = tmp;
2144 /* Sort and compress a set of refs. */
2146 static void
2147 df_sort_and_compress_refs (vec<df_ref, va_heap> *ref_vec)
2149 unsigned int count;
2150 unsigned int i;
2151 unsigned int dist = 0;
2153 count = ref_vec->length ();
2155 /* If there are 1 or 0 elements, there is nothing to do. */
2156 if (count < 2)
2157 return;
2158 else if (count == 2)
2160 df_ref r0 = (*ref_vec)[0];
2161 df_ref r1 = (*ref_vec)[1];
2162 if (df_ref_compare (r0, r1) > 0)
2163 df_swap_refs (ref_vec, 0, 1);
2165 else
2167 for (i = 0; i < count - 1; i++)
2169 df_ref r0 = (*ref_vec)[i];
2170 df_ref r1 = (*ref_vec)[i + 1];
2171 if (df_ref_compare (r0, r1) >= 0)
2172 break;
2174 /* If the array is already strictly ordered,
2175 which is the most common case for large COUNT case
2176 (which happens for CALL INSNs),
2177 no need to sort and filter out duplicate.
2178 Simply return the count.
2179 Make sure DF_GET_ADD_REFS adds refs in the increasing order
2180 of DF_REF_COMPARE. */
2181 if (i == count - 1)
2182 return;
2183 ref_vec->qsort (df_ref_ptr_compare);
2186 for (i=0; i<count-dist; i++)
2188 /* Find the next ref that is not equal to the current ref. */
2189 while (i + dist + 1 < count
2190 && df_ref_equal_p ((*ref_vec)[i],
2191 (*ref_vec)[i + dist + 1]))
2193 df_free_ref ((*ref_vec)[i + dist + 1]);
2194 dist++;
2196 /* Copy it down to the next position. */
2197 if (dist && i + dist + 1 < count)
2198 (*ref_vec)[i + 1] = (*ref_vec)[i + dist + 1];
2201 count -= dist;
2202 ref_vec->truncate (count);
2206 /* Return true if the contents of two df_ref's are identical.
2207 It ignores DF_REF_MARKER. */
2209 static bool
2210 df_mw_equal_p (struct df_mw_hardreg *mw1, struct df_mw_hardreg *mw2)
2212 if (!mw2)
2213 return false;
2214 return (mw1 == mw2) ||
2215 (mw1->mw_reg == mw2->mw_reg
2216 && mw1->type == mw2->type
2217 && mw1->flags == mw2->flags
2218 && mw1->start_regno == mw2->start_regno
2219 && mw1->end_regno == mw2->end_regno);
2223 /* Compare MW1 and MW2 for sorting. */
2225 static int
2226 df_mw_compare (const df_mw_hardreg *mw1, const df_mw_hardreg *mw2)
2228 if (mw1->type != mw2->type)
2229 return mw1->type - mw2->type;
2231 if (mw1->flags != mw2->flags)
2232 return mw1->flags - mw2->flags;
2234 if (mw1->start_regno != mw2->start_regno)
2235 return mw1->start_regno - mw2->start_regno;
2237 if (mw1->end_regno != mw2->end_regno)
2238 return mw1->end_regno - mw2->end_regno;
2240 if (mw1->mw_reg != mw2->mw_reg)
2241 return mw1->mw_order - mw2->mw_order;
2243 return 0;
2246 /* Like df_mw_compare, but compare two df_mw_hardreg** pointers R1 and R2. */
2248 static int
2249 df_mw_ptr_compare (const void *m1, const void *m2)
2251 return df_mw_compare (*(const df_mw_hardreg *const *) m1,
2252 *(const df_mw_hardreg *const *) m2);
2255 /* Sort and compress a set of refs. */
2257 static void
2258 df_sort_and_compress_mws (vec<df_mw_hardreg_ptr, va_heap> *mw_vec)
2260 unsigned int count;
2261 struct df_scan_problem_data *problem_data
2262 = (struct df_scan_problem_data *) df_scan->problem_data;
2263 unsigned int i;
2264 unsigned int dist = 0;
2266 count = mw_vec->length ();
2267 if (count < 2)
2268 return;
2269 else if (count == 2)
2271 struct df_mw_hardreg *m0 = (*mw_vec)[0];
2272 struct df_mw_hardreg *m1 = (*mw_vec)[1];
2273 if (df_mw_compare (m0, m1) > 0)
2275 struct df_mw_hardreg *tmp = (*mw_vec)[0];
2276 (*mw_vec)[0] = (*mw_vec)[1];
2277 (*mw_vec)[1] = tmp;
2280 else
2281 mw_vec->qsort (df_mw_ptr_compare);
2283 for (i=0; i<count-dist; i++)
2285 /* Find the next ref that is not equal to the current ref. */
2286 while (i + dist + 1 < count
2287 && df_mw_equal_p ((*mw_vec)[i], (*mw_vec)[i + dist + 1]))
2289 pool_free (problem_data->mw_reg_pool,
2290 (*mw_vec)[i + dist + 1]);
2291 dist++;
2293 /* Copy it down to the next position. */
2294 if (dist && i + dist + 1 < count)
2295 (*mw_vec)[i + 1] = (*mw_vec)[i + dist + 1];
2298 count -= dist;
2299 mw_vec->truncate (count);
2303 /* Sort and remove duplicates from the COLLECTION_REC. */
2305 static void
2306 df_canonize_collection_rec (struct df_collection_rec *collection_rec)
2308 df_sort_and_compress_refs (&collection_rec->def_vec);
2309 df_sort_and_compress_refs (&collection_rec->use_vec);
2310 df_sort_and_compress_refs (&collection_rec->eq_use_vec);
2311 df_sort_and_compress_mws (&collection_rec->mw_vec);
2315 /* Add the new df_ref to appropriate reg_info/ref_info chains. */
2317 static void
2318 df_install_ref (df_ref this_ref,
2319 struct df_reg_info *reg_info,
2320 struct df_ref_info *ref_info,
2321 bool add_to_table)
2323 unsigned int regno = DF_REF_REGNO (this_ref);
2324 /* Add the ref to the reg_{def,use,eq_use} chain. */
2325 df_ref head = reg_info->reg_chain;
2327 reg_info->reg_chain = this_ref;
2328 reg_info->n_refs++;
2330 if (DF_REF_FLAGS_IS_SET (this_ref, DF_HARD_REG_LIVE))
2332 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
2333 df->hard_regs_live_count[regno]++;
2336 gcc_checking_assert (DF_REF_NEXT_REG (this_ref) == NULL
2337 && DF_REF_PREV_REG (this_ref) == NULL);
2339 DF_REF_NEXT_REG (this_ref) = head;
2341 /* We cannot actually link to the head of the chain. */
2342 DF_REF_PREV_REG (this_ref) = NULL;
2344 if (head)
2345 DF_REF_PREV_REG (head) = this_ref;
2347 if (add_to_table)
2349 gcc_assert (ref_info->ref_order != DF_REF_ORDER_NO_TABLE);
2350 df_check_and_grow_ref_info (ref_info, 1);
2351 DF_REF_ID (this_ref) = ref_info->table_size;
2352 /* Add the ref to the big array of defs. */
2353 ref_info->refs[ref_info->table_size] = this_ref;
2354 ref_info->table_size++;
2356 else
2357 DF_REF_ID (this_ref) = -1;
2359 ref_info->total_size++;
2363 /* This function takes one of the groups of refs (defs, uses or
2364 eq_uses) and installs the entire group into the insn. It also adds
2365 each of these refs into the appropriate chains. */
2367 static df_ref
2368 df_install_refs (basic_block bb,
2369 const vec<df_ref, va_heap> *old_vec,
2370 struct df_reg_info **reg_info,
2371 struct df_ref_info *ref_info,
2372 bool is_notes)
2374 unsigned int count = old_vec->length ();
2375 if (count)
2377 bool add_to_table;
2378 df_ref this_ref;
2379 unsigned int ix;
2381 switch (ref_info->ref_order)
2383 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
2384 case DF_REF_ORDER_BY_REG_WITH_NOTES:
2385 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
2386 ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
2387 add_to_table = true;
2388 break;
2389 case DF_REF_ORDER_UNORDERED:
2390 case DF_REF_ORDER_BY_REG:
2391 case DF_REF_ORDER_BY_INSN:
2392 ref_info->ref_order = DF_REF_ORDER_UNORDERED;
2393 add_to_table = !is_notes;
2394 break;
2395 default:
2396 add_to_table = false;
2397 break;
2400 /* Do not add if ref is not in the right blocks. */
2401 if (add_to_table && df->analyze_subset)
2402 add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
2404 FOR_EACH_VEC_ELT (*old_vec, ix, this_ref)
2406 DF_REF_NEXT_LOC (this_ref) = (ix + 1 < old_vec->length ()
2407 ? (*old_vec)[ix + 1]
2408 : NULL);
2409 df_install_ref (this_ref, reg_info[DF_REF_REGNO (this_ref)],
2410 ref_info, add_to_table);
2412 return (*old_vec)[0];
2414 else
2415 return 0;
2419 /* This function takes the mws installs the entire group into the
2420 insn. */
2422 static struct df_mw_hardreg *
2423 df_install_mws (const vec<df_mw_hardreg_ptr, va_heap> *old_vec)
2425 unsigned int count = old_vec->length ();
2426 if (count)
2428 for (unsigned int i = 0; i < count - 1; i++)
2429 DF_MWS_NEXT ((*old_vec)[i]) = (*old_vec)[i + 1];
2430 DF_MWS_NEXT ((*old_vec)[count - 1]) = 0;
2431 return (*old_vec)[0];
2433 else
2434 return 0;
2438 /* Add a chain of df_refs to appropriate ref chain/reg_info/ref_info
2439 chains and update other necessary information. */
2441 static void
2442 df_refs_add_to_chains (struct df_collection_rec *collection_rec,
2443 basic_block bb, rtx_insn *insn, unsigned int flags)
2445 if (insn)
2447 struct df_insn_info *insn_rec = DF_INSN_INFO_GET (insn);
2448 /* If there is a vector in the collection rec, add it to the
2449 insn. A null rec is a signal that the caller will handle the
2450 chain specially. */
2451 if (flags & copy_defs)
2453 gcc_checking_assert (!insn_rec->defs);
2454 insn_rec->defs
2455 = df_install_refs (bb, &collection_rec->def_vec,
2456 df->def_regs,
2457 &df->def_info, false);
2459 if (flags & copy_uses)
2461 gcc_checking_assert (!insn_rec->uses);
2462 insn_rec->uses
2463 = df_install_refs (bb, &collection_rec->use_vec,
2464 df->use_regs,
2465 &df->use_info, false);
2467 if (flags & copy_eq_uses)
2469 gcc_checking_assert (!insn_rec->eq_uses);
2470 insn_rec->eq_uses
2471 = df_install_refs (bb, &collection_rec->eq_use_vec,
2472 df->eq_use_regs,
2473 &df->use_info, true);
2475 if (flags & copy_mw)
2477 gcc_checking_assert (!insn_rec->mw_hardregs);
2478 insn_rec->mw_hardregs
2479 = df_install_mws (&collection_rec->mw_vec);
2482 else
2484 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
2486 gcc_checking_assert (!bb_info->artificial_defs);
2487 bb_info->artificial_defs
2488 = df_install_refs (bb, &collection_rec->def_vec,
2489 df->def_regs,
2490 &df->def_info, false);
2491 gcc_checking_assert (!bb_info->artificial_uses);
2492 bb_info->artificial_uses
2493 = df_install_refs (bb, &collection_rec->use_vec,
2494 df->use_regs,
2495 &df->use_info, false);
2500 /* Allocate a ref and initialize its fields. */
2502 static df_ref
2503 df_ref_create_structure (enum df_ref_class cl,
2504 struct df_collection_rec *collection_rec,
2505 rtx reg, rtx *loc,
2506 basic_block bb, struct df_insn_info *info,
2507 enum df_ref_type ref_type,
2508 int ref_flags)
2510 df_ref this_ref = NULL;
2511 int regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2512 struct df_scan_problem_data *problem_data
2513 = (struct df_scan_problem_data *) df_scan->problem_data;
2515 switch (cl)
2517 case DF_REF_BASE:
2518 this_ref = (df_ref) pool_alloc (problem_data->ref_base_pool);
2519 gcc_checking_assert (loc == NULL);
2520 break;
2522 case DF_REF_ARTIFICIAL:
2523 this_ref = (df_ref) pool_alloc (problem_data->ref_artificial_pool);
2524 this_ref->artificial_ref.bb = bb;
2525 gcc_checking_assert (loc == NULL);
2526 break;
2528 case DF_REF_REGULAR:
2529 this_ref = (df_ref) pool_alloc (problem_data->ref_regular_pool);
2530 this_ref->regular_ref.loc = loc;
2531 gcc_checking_assert (loc);
2532 break;
2535 DF_REF_CLASS (this_ref) = cl;
2536 DF_REF_ID (this_ref) = -1;
2537 DF_REF_REG (this_ref) = reg;
2538 DF_REF_REGNO (this_ref) = regno;
2539 DF_REF_TYPE (this_ref) = ref_type;
2540 DF_REF_INSN_INFO (this_ref) = info;
2541 DF_REF_CHAIN (this_ref) = NULL;
2542 DF_REF_FLAGS (this_ref) = ref_flags;
2543 DF_REF_NEXT_REG (this_ref) = NULL;
2544 DF_REF_PREV_REG (this_ref) = NULL;
2545 DF_REF_ORDER (this_ref) = df->ref_order++;
2547 /* We need to clear this bit because fwprop, and in the future
2548 possibly other optimizations sometimes create new refs using ond
2549 refs as the model. */
2550 DF_REF_FLAGS_CLEAR (this_ref, DF_HARD_REG_LIVE);
2552 /* See if this ref needs to have DF_HARD_REG_LIVE bit set. */
2553 if (regno < FIRST_PSEUDO_REGISTER
2554 && !DF_REF_IS_ARTIFICIAL (this_ref)
2555 && !DEBUG_INSN_P (DF_REF_INSN (this_ref)))
2557 if (DF_REF_REG_DEF_P (this_ref))
2559 if (!DF_REF_FLAGS_IS_SET (this_ref, DF_REF_MAY_CLOBBER))
2560 DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2562 else if (!(TEST_HARD_REG_BIT (elim_reg_set, regno)
2563 && (regno == FRAME_POINTER_REGNUM
2564 || regno == ARG_POINTER_REGNUM)))
2565 DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2568 if (collection_rec)
2570 if (DF_REF_REG_DEF_P (this_ref))
2571 collection_rec->def_vec.safe_push (this_ref);
2572 else if (DF_REF_FLAGS (this_ref) & DF_REF_IN_NOTE)
2573 collection_rec->eq_use_vec.safe_push (this_ref);
2574 else
2575 collection_rec->use_vec.safe_push (this_ref);
2577 else
2578 df_install_ref_incremental (this_ref);
2580 return this_ref;
2584 /* Create new references of type DF_REF_TYPE for each part of register REG
2585 at address LOC within INSN of BB. */
2588 static void
2589 df_ref_record (enum df_ref_class cl,
2590 struct df_collection_rec *collection_rec,
2591 rtx reg, rtx *loc,
2592 basic_block bb, struct df_insn_info *insn_info,
2593 enum df_ref_type ref_type,
2594 int ref_flags)
2596 unsigned int regno;
2598 gcc_checking_assert (REG_P (reg) || GET_CODE (reg) == SUBREG);
2600 regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2601 if (regno < FIRST_PSEUDO_REGISTER)
2603 struct df_mw_hardreg *hardreg = NULL;
2604 struct df_scan_problem_data *problem_data
2605 = (struct df_scan_problem_data *) df_scan->problem_data;
2606 unsigned int i;
2607 unsigned int endregno;
2608 df_ref ref;
2610 if (GET_CODE (reg) == SUBREG)
2612 regno += subreg_regno_offset (regno, GET_MODE (SUBREG_REG (reg)),
2613 SUBREG_BYTE (reg), GET_MODE (reg));
2614 endregno = regno + subreg_nregs (reg);
2616 else
2617 endregno = END_HARD_REGNO (reg);
2619 /* If this is a multiword hardreg, we create some extra
2620 datastructures that will enable us to easily build REG_DEAD
2621 and REG_UNUSED notes. */
2622 if (collection_rec
2623 && (endregno != regno + 1) && insn_info)
2625 /* Sets to a subreg of a multiword register are partial.
2626 Sets to a non-subreg of a multiword register are not. */
2627 if (GET_CODE (reg) == SUBREG)
2628 ref_flags |= DF_REF_PARTIAL;
2629 ref_flags |= DF_REF_MW_HARDREG;
2631 hardreg = (struct df_mw_hardreg *) pool_alloc (problem_data->mw_reg_pool);
2632 hardreg->type = ref_type;
2633 hardreg->flags = ref_flags;
2634 hardreg->mw_reg = reg;
2635 hardreg->start_regno = regno;
2636 hardreg->end_regno = endregno - 1;
2637 hardreg->mw_order = df->ref_order++;
2638 collection_rec->mw_vec.safe_push (hardreg);
2641 for (i = regno; i < endregno; i++)
2643 ref = df_ref_create_structure (cl, collection_rec, regno_reg_rtx[i], loc,
2644 bb, insn_info, ref_type, ref_flags);
2646 gcc_assert (ORIGINAL_REGNO (DF_REF_REG (ref)) == i);
2649 else
2651 df_ref_create_structure (cl, collection_rec, reg, loc, bb, insn_info,
2652 ref_type, ref_flags);
2657 /* A set to a non-paradoxical SUBREG for which the number of word_mode units
2658 covered by the outer mode is smaller than that covered by the inner mode,
2659 is a read-modify-write operation.
2660 This function returns true iff the SUBREG X is such a SUBREG. */
2662 bool
2663 df_read_modify_subreg_p (rtx x)
2665 unsigned int isize, osize;
2666 if (GET_CODE (x) != SUBREG)
2667 return false;
2668 isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
2669 osize = GET_MODE_SIZE (GET_MODE (x));
2670 return isize > osize
2671 && isize > REGMODE_NATURAL_SIZE (GET_MODE (SUBREG_REG (x)));
2675 /* Process all the registers defined in the rtx pointed by LOC.
2676 Autoincrement/decrement definitions will be picked up by df_uses_record.
2677 Any change here has to be matched in df_find_hard_reg_defs_1. */
2679 static void
2680 df_def_record_1 (struct df_collection_rec *collection_rec,
2681 rtx *loc, basic_block bb, struct df_insn_info *insn_info,
2682 int flags)
2684 rtx dst = *loc;
2686 /* It is legal to have a set destination be a parallel. */
2687 if (GET_CODE (dst) == PARALLEL)
2689 int i;
2690 for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
2692 rtx temp = XVECEXP (dst, 0, i);
2693 gcc_assert (GET_CODE (temp) == EXPR_LIST);
2694 df_def_record_1 (collection_rec, &XEXP (temp, 0),
2695 bb, insn_info, flags);
2697 return;
2700 if (GET_CODE (dst) == STRICT_LOW_PART)
2702 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL | DF_REF_STRICT_LOW_PART;
2704 loc = &XEXP (dst, 0);
2705 dst = *loc;
2708 if (GET_CODE (dst) == ZERO_EXTRACT)
2710 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL | DF_REF_ZERO_EXTRACT;
2712 loc = &XEXP (dst, 0);
2713 dst = *loc;
2716 /* At this point if we do not have a reg or a subreg, just return. */
2717 if (REG_P (dst))
2719 df_ref_record (DF_REF_REGULAR, collection_rec,
2720 dst, loc, bb, insn_info, DF_REF_REG_DEF, flags);
2722 /* We want to keep sp alive everywhere - by making all
2723 writes to sp also use of sp. */
2724 if (REGNO (dst) == STACK_POINTER_REGNUM)
2725 df_ref_record (DF_REF_BASE, collection_rec,
2726 dst, NULL, bb, insn_info, DF_REF_REG_USE, flags);
2728 else if (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst)))
2730 if (df_read_modify_subreg_p (dst))
2731 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL;
2733 flags |= DF_REF_SUBREG;
2735 df_ref_record (DF_REF_REGULAR, collection_rec,
2736 dst, loc, bb, insn_info, DF_REF_REG_DEF, flags);
2741 /* Process all the registers defined in the pattern rtx, X. Any change
2742 here has to be matched in df_find_hard_reg_defs. */
2744 static void
2745 df_defs_record (struct df_collection_rec *collection_rec,
2746 rtx x, basic_block bb, struct df_insn_info *insn_info,
2747 int flags)
2749 RTX_CODE code = GET_CODE (x);
2750 int i;
2752 switch (code)
2754 case SET:
2755 df_def_record_1 (collection_rec, &SET_DEST (x), bb, insn_info, flags);
2756 break;
2758 case CLOBBER:
2759 flags |= DF_REF_MUST_CLOBBER;
2760 df_def_record_1 (collection_rec, &XEXP (x, 0), bb, insn_info, flags);
2761 break;
2763 case COND_EXEC:
2764 df_defs_record (collection_rec, COND_EXEC_CODE (x),
2765 bb, insn_info, DF_REF_CONDITIONAL);
2766 break;
2768 case PARALLEL:
2769 for (i = 0; i < XVECLEN (x, 0); i++)
2770 df_defs_record (collection_rec, XVECEXP (x, 0, i),
2771 bb, insn_info, flags);
2772 break;
2773 default:
2774 /* No DEFs to record in other cases */
2775 break;
2779 /* Set bits in *DEFS for hard registers found in the rtx DST, which is the
2780 destination of a set or clobber. This has to match the logic in
2781 df_defs_record_1. */
2783 static void
2784 df_find_hard_reg_defs_1 (rtx dst, HARD_REG_SET *defs)
2786 /* It is legal to have a set destination be a parallel. */
2787 if (GET_CODE (dst) == PARALLEL)
2789 int i;
2790 for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
2792 rtx temp = XVECEXP (dst, 0, i);
2793 gcc_assert (GET_CODE (temp) == EXPR_LIST);
2794 df_find_hard_reg_defs_1 (XEXP (temp, 0), defs);
2796 return;
2799 if (GET_CODE (dst) == STRICT_LOW_PART)
2800 dst = XEXP (dst, 0);
2802 if (GET_CODE (dst) == ZERO_EXTRACT)
2803 dst = XEXP (dst, 0);
2805 /* At this point if we do not have a reg or a subreg, just return. */
2806 if (REG_P (dst) && HARD_REGISTER_P (dst))
2807 SET_HARD_REG_BIT (*defs, REGNO (dst));
2808 else if (GET_CODE (dst) == SUBREG
2809 && REG_P (SUBREG_REG (dst)) && HARD_REGISTER_P (dst))
2810 SET_HARD_REG_BIT (*defs, REGNO (SUBREG_REG (dst)));
2813 /* Set bits in *DEFS for hard registers defined in the pattern X. This
2814 has to match the logic in df_defs_record. */
2816 static void
2817 df_find_hard_reg_defs (rtx x, HARD_REG_SET *defs)
2819 RTX_CODE code = GET_CODE (x);
2820 int i;
2822 switch (code)
2824 case SET:
2825 df_find_hard_reg_defs_1 (SET_DEST (x), defs);
2826 break;
2828 case CLOBBER:
2829 df_find_hard_reg_defs_1 (XEXP (x, 0), defs);
2830 break;
2832 case COND_EXEC:
2833 df_find_hard_reg_defs (COND_EXEC_CODE (x), defs);
2834 break;
2836 case PARALLEL:
2837 for (i = 0; i < XVECLEN (x, 0); i++)
2838 df_find_hard_reg_defs (XVECEXP (x, 0, i), defs);
2839 break;
2840 default:
2841 /* No DEFs to record in other cases */
2842 break;
2847 /* Process all the registers used in the rtx at address LOC. */
2849 static void
2850 df_uses_record (struct df_collection_rec *collection_rec,
2851 rtx *loc, enum df_ref_type ref_type,
2852 basic_block bb, struct df_insn_info *insn_info,
2853 int flags)
2855 RTX_CODE code;
2856 rtx x;
2858 retry:
2859 x = *loc;
2860 if (!x)
2861 return;
2862 code = GET_CODE (x);
2863 switch (code)
2865 case LABEL_REF:
2866 case SYMBOL_REF:
2867 case CONST:
2868 CASE_CONST_ANY:
2869 case PC:
2870 case CC0:
2871 case ADDR_VEC:
2872 case ADDR_DIFF_VEC:
2873 return;
2875 case CLOBBER:
2876 /* If we are clobbering a MEM, mark any registers inside the address
2877 as being used. */
2878 if (MEM_P (XEXP (x, 0)))
2879 df_uses_record (collection_rec,
2880 &XEXP (XEXP (x, 0), 0),
2881 DF_REF_REG_MEM_STORE,
2882 bb, insn_info,
2883 flags);
2885 /* If we're clobbering a REG then we have a def so ignore. */
2886 return;
2888 case MEM:
2889 df_uses_record (collection_rec,
2890 &XEXP (x, 0), DF_REF_REG_MEM_LOAD,
2891 bb, insn_info, flags & DF_REF_IN_NOTE);
2892 return;
2894 case SUBREG:
2895 /* While we're here, optimize this case. */
2896 flags |= DF_REF_PARTIAL;
2897 /* In case the SUBREG is not of a REG, do not optimize. */
2898 if (!REG_P (SUBREG_REG (x)))
2900 loc = &SUBREG_REG (x);
2901 df_uses_record (collection_rec, loc, ref_type, bb, insn_info, flags);
2902 return;
2904 /* ... Fall through ... */
2906 case REG:
2907 df_ref_record (DF_REF_REGULAR, collection_rec,
2908 x, loc, bb, insn_info,
2909 ref_type, flags);
2910 return;
2912 case SIGN_EXTRACT:
2913 case ZERO_EXTRACT:
2915 df_uses_record (collection_rec,
2916 &XEXP (x, 1), ref_type, bb, insn_info, flags);
2917 df_uses_record (collection_rec,
2918 &XEXP (x, 2), ref_type, bb, insn_info, flags);
2920 /* If the parameters to the zero or sign extract are
2921 constants, strip them off and recurse, otherwise there is
2922 no information that we can gain from this operation. */
2923 if (code == ZERO_EXTRACT)
2924 flags |= DF_REF_ZERO_EXTRACT;
2925 else
2926 flags |= DF_REF_SIGN_EXTRACT;
2928 df_uses_record (collection_rec,
2929 &XEXP (x, 0), ref_type, bb, insn_info, flags);
2930 return;
2932 break;
2934 case SET:
2936 rtx dst = SET_DEST (x);
2937 gcc_assert (!(flags & DF_REF_IN_NOTE));
2938 df_uses_record (collection_rec,
2939 &SET_SRC (x), DF_REF_REG_USE, bb, insn_info, flags);
2941 switch (GET_CODE (dst))
2943 case SUBREG:
2944 if (df_read_modify_subreg_p (dst))
2946 df_uses_record (collection_rec, &SUBREG_REG (dst),
2947 DF_REF_REG_USE, bb, insn_info,
2948 flags | DF_REF_READ_WRITE | DF_REF_SUBREG);
2949 break;
2951 /* Fall through. */
2952 case REG:
2953 case PARALLEL:
2954 case SCRATCH:
2955 case PC:
2956 case CC0:
2957 break;
2958 case MEM:
2959 df_uses_record (collection_rec, &XEXP (dst, 0),
2960 DF_REF_REG_MEM_STORE, bb, insn_info, flags);
2961 break;
2962 case STRICT_LOW_PART:
2964 rtx *temp = &XEXP (dst, 0);
2965 /* A strict_low_part uses the whole REG and not just the
2966 SUBREG. */
2967 dst = XEXP (dst, 0);
2968 df_uses_record (collection_rec,
2969 (GET_CODE (dst) == SUBREG) ? &SUBREG_REG (dst) : temp,
2970 DF_REF_REG_USE, bb, insn_info,
2971 DF_REF_READ_WRITE | DF_REF_STRICT_LOW_PART);
2973 break;
2974 case ZERO_EXTRACT:
2976 df_uses_record (collection_rec, &XEXP (dst, 1),
2977 DF_REF_REG_USE, bb, insn_info, flags);
2978 df_uses_record (collection_rec, &XEXP (dst, 2),
2979 DF_REF_REG_USE, bb, insn_info, flags);
2980 if (GET_CODE (XEXP (dst,0)) == MEM)
2981 df_uses_record (collection_rec, &XEXP (dst, 0),
2982 DF_REF_REG_USE, bb, insn_info,
2983 flags);
2984 else
2985 df_uses_record (collection_rec, &XEXP (dst, 0),
2986 DF_REF_REG_USE, bb, insn_info,
2987 DF_REF_READ_WRITE | DF_REF_ZERO_EXTRACT);
2989 break;
2991 default:
2992 gcc_unreachable ();
2994 return;
2997 case RETURN:
2998 case SIMPLE_RETURN:
2999 break;
3001 case ASM_OPERANDS:
3002 case UNSPEC_VOLATILE:
3003 case TRAP_IF:
3004 case ASM_INPUT:
3006 /* Traditional and volatile asm instructions must be
3007 considered to use and clobber all hard registers, all
3008 pseudo-registers and all of memory. So must TRAP_IF and
3009 UNSPEC_VOLATILE operations.
3011 Consider for instance a volatile asm that changes the fpu
3012 rounding mode. An insn should not be moved across this
3013 even if it only uses pseudo-regs because it might give an
3014 incorrectly rounded result.
3016 However, flow.c's liveness computation did *not* do this,
3017 giving the reasoning as " ?!? Unfortunately, marking all
3018 hard registers as live causes massive problems for the
3019 register allocator and marking all pseudos as live creates
3020 mountains of uninitialized variable warnings."
3022 In order to maintain the status quo with regard to liveness
3023 and uses, we do what flow.c did and just mark any regs we
3024 can find in ASM_OPERANDS as used. In global asm insns are
3025 scanned and regs_asm_clobbered is filled out.
3027 For all ASM_OPERANDS, we must traverse the vector of input
3028 operands. We can not just fall through here since then we
3029 would be confused by the ASM_INPUT rtx inside ASM_OPERANDS,
3030 which do not indicate traditional asms unlike their normal
3031 usage. */
3032 if (code == ASM_OPERANDS)
3034 int j;
3036 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
3037 df_uses_record (collection_rec, &ASM_OPERANDS_INPUT (x, j),
3038 DF_REF_REG_USE, bb, insn_info, flags);
3039 return;
3041 break;
3044 case VAR_LOCATION:
3045 df_uses_record (collection_rec,
3046 &PAT_VAR_LOCATION_LOC (x),
3047 DF_REF_REG_USE, bb, insn_info, flags);
3048 return;
3050 case PRE_DEC:
3051 case POST_DEC:
3052 case PRE_INC:
3053 case POST_INC:
3054 case PRE_MODIFY:
3055 case POST_MODIFY:
3056 gcc_assert (!DEBUG_INSN_P (insn_info->insn));
3057 /* Catch the def of the register being modified. */
3058 df_ref_record (DF_REF_REGULAR, collection_rec, XEXP (x, 0), &XEXP (x, 0),
3059 bb, insn_info,
3060 DF_REF_REG_DEF,
3061 flags | DF_REF_READ_WRITE | DF_REF_PRE_POST_MODIFY);
3063 /* ... Fall through to handle uses ... */
3065 default:
3066 break;
3069 /* Recursively scan the operands of this expression. */
3071 const char *fmt = GET_RTX_FORMAT (code);
3072 int i;
3074 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3076 if (fmt[i] == 'e')
3078 /* Tail recursive case: save a function call level. */
3079 if (i == 0)
3081 loc = &XEXP (x, 0);
3082 goto retry;
3084 df_uses_record (collection_rec, &XEXP (x, i), ref_type,
3085 bb, insn_info, flags);
3087 else if (fmt[i] == 'E')
3089 int j;
3090 for (j = 0; j < XVECLEN (x, i); j++)
3091 df_uses_record (collection_rec,
3092 &XVECEXP (x, i, j), ref_type,
3093 bb, insn_info, flags);
3098 return;
3102 /* For all DF_REF_CONDITIONAL defs, add a corresponding uses. */
3104 static void
3105 df_get_conditional_uses (struct df_collection_rec *collection_rec)
3107 unsigned int ix;
3108 df_ref ref;
3110 FOR_EACH_VEC_ELT (collection_rec->def_vec, ix, ref)
3112 if (DF_REF_FLAGS_IS_SET (ref, DF_REF_CONDITIONAL))
3114 df_ref use;
3116 use = df_ref_create_structure (DF_REF_CLASS (ref), collection_rec, DF_REF_REG (ref),
3117 DF_REF_LOC (ref), DF_REF_BB (ref),
3118 DF_REF_INSN_INFO (ref), DF_REF_REG_USE,
3119 DF_REF_FLAGS (ref) & ~DF_REF_CONDITIONAL);
3120 DF_REF_REGNO (use) = DF_REF_REGNO (ref);
3126 /* Get call's extra defs and uses (track caller-saved registers). */
3128 static void
3129 df_get_call_refs (struct df_collection_rec *collection_rec,
3130 basic_block bb,
3131 struct df_insn_info *insn_info,
3132 int flags)
3134 rtx note;
3135 bool is_sibling_call;
3136 unsigned int i;
3137 HARD_REG_SET defs_generated;
3138 HARD_REG_SET fn_reg_set_usage;
3140 CLEAR_HARD_REG_SET (defs_generated);
3141 df_find_hard_reg_defs (PATTERN (insn_info->insn), &defs_generated);
3142 is_sibling_call = SIBLING_CALL_P (insn_info->insn);
3143 get_call_reg_set_usage (insn_info->insn, &fn_reg_set_usage,
3144 regs_invalidated_by_call);
3146 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3148 if (i == STACK_POINTER_REGNUM)
3149 /* The stack ptr is used (honorarily) by a CALL insn. */
3150 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3151 NULL, bb, insn_info, DF_REF_REG_USE,
3152 DF_REF_CALL_STACK_USAGE | flags);
3153 else if (global_regs[i])
3155 /* Calls to const functions cannot access any global registers and
3156 calls to pure functions cannot set them. All other calls may
3157 reference any of the global registers, so they are recorded as
3158 used. */
3159 if (!RTL_CONST_CALL_P (insn_info->insn))
3161 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3162 NULL, bb, insn_info, DF_REF_REG_USE, flags);
3163 if (!RTL_PURE_CALL_P (insn_info->insn))
3164 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3165 NULL, bb, insn_info, DF_REF_REG_DEF, flags);
3168 else if (TEST_HARD_REG_BIT (fn_reg_set_usage, i)
3169 /* no clobbers for regs that are the result of the call */
3170 && !TEST_HARD_REG_BIT (defs_generated, i)
3171 && (!is_sibling_call
3172 || !bitmap_bit_p (df->exit_block_uses, i)
3173 || refers_to_regno_p (i, i+1,
3174 crtl->return_rtx, NULL)))
3175 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3176 NULL, bb, insn_info, DF_REF_REG_DEF,
3177 DF_REF_MAY_CLOBBER | flags);
3180 /* Record the registers used to pass arguments, and explicitly
3181 noted as clobbered. */
3182 for (note = CALL_INSN_FUNCTION_USAGE (insn_info->insn); note;
3183 note = XEXP (note, 1))
3185 if (GET_CODE (XEXP (note, 0)) == USE)
3186 df_uses_record (collection_rec, &XEXP (XEXP (note, 0), 0),
3187 DF_REF_REG_USE, bb, insn_info, flags);
3188 else if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3190 if (REG_P (XEXP (XEXP (note, 0), 0)))
3192 unsigned int regno = REGNO (XEXP (XEXP (note, 0), 0));
3193 if (!TEST_HARD_REG_BIT (defs_generated, regno))
3194 df_defs_record (collection_rec, XEXP (note, 0), bb,
3195 insn_info, flags);
3197 else
3198 df_uses_record (collection_rec, &XEXP (note, 0),
3199 DF_REF_REG_USE, bb, insn_info, flags);
3203 return;
3206 /* Collect all refs in the INSN. This function is free of any
3207 side-effect - it will create and return a lists of df_ref's in the
3208 COLLECTION_REC without putting those refs into existing ref chains
3209 and reg chains. */
3211 static void
3212 df_insn_refs_collect (struct df_collection_rec *collection_rec,
3213 basic_block bb, struct df_insn_info *insn_info)
3215 rtx note;
3216 bool is_cond_exec = (GET_CODE (PATTERN (insn_info->insn)) == COND_EXEC);
3218 /* Clear out the collection record. */
3219 collection_rec->def_vec.truncate (0);
3220 collection_rec->use_vec.truncate (0);
3221 collection_rec->eq_use_vec.truncate (0);
3222 collection_rec->mw_vec.truncate (0);
3224 /* Process REG_EQUIV/REG_EQUAL notes. */
3225 for (note = REG_NOTES (insn_info->insn); note;
3226 note = XEXP (note, 1))
3228 switch (REG_NOTE_KIND (note))
3230 case REG_EQUIV:
3231 case REG_EQUAL:
3232 df_uses_record (collection_rec,
3233 &XEXP (note, 0), DF_REF_REG_USE,
3234 bb, insn_info, DF_REF_IN_NOTE);
3235 break;
3236 case REG_NON_LOCAL_GOTO:
3237 /* The frame ptr is used by a non-local goto. */
3238 df_ref_record (DF_REF_BASE, collection_rec,
3239 regno_reg_rtx[FRAME_POINTER_REGNUM],
3240 NULL, bb, insn_info,
3241 DF_REF_REG_USE, 0);
3242 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
3243 df_ref_record (DF_REF_BASE, collection_rec,
3244 regno_reg_rtx[HARD_FRAME_POINTER_REGNUM],
3245 NULL, bb, insn_info,
3246 DF_REF_REG_USE, 0);
3247 #endif
3248 break;
3249 default:
3250 break;
3254 /* For CALL_INSNs, first record DF_REF_BASE register defs, as well as
3255 uses from CALL_INSN_FUNCTION_USAGE. */
3256 if (CALL_P (insn_info->insn))
3257 df_get_call_refs (collection_rec, bb, insn_info,
3258 (is_cond_exec) ? DF_REF_CONDITIONAL : 0);
3260 /* Record other defs. These should be mostly for DF_REF_REGULAR, so
3261 that a qsort on the defs is unnecessary in most cases. */
3262 df_defs_record (collection_rec,
3263 PATTERN (insn_info->insn), bb, insn_info, 0);
3265 /* Record the register uses. */
3266 df_uses_record (collection_rec,
3267 &PATTERN (insn_info->insn), DF_REF_REG_USE, bb, insn_info, 0);
3269 /* DF_REF_CONDITIONAL needs corresponding USES. */
3270 if (is_cond_exec)
3271 df_get_conditional_uses (collection_rec);
3273 df_canonize_collection_rec (collection_rec);
3276 /* Recompute the luids for the insns in BB. */
3278 void
3279 df_recompute_luids (basic_block bb)
3281 rtx_insn *insn;
3282 int luid = 0;
3284 df_grow_insn_info ();
3286 /* Scan the block an insn at a time from beginning to end. */
3287 FOR_BB_INSNS (bb, insn)
3289 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
3290 /* Inserting labels does not always trigger the incremental
3291 rescanning. */
3292 if (!insn_info)
3294 gcc_assert (!INSN_P (insn));
3295 insn_info = df_insn_create_insn_record (insn);
3298 DF_INSN_INFO_LUID (insn_info) = luid;
3299 if (INSN_P (insn))
3300 luid++;
3305 /* Collect all artificial refs at the block level for BB and add them
3306 to COLLECTION_REC. */
3308 static void
3309 df_bb_refs_collect (struct df_collection_rec *collection_rec, basic_block bb)
3311 collection_rec->def_vec.truncate (0);
3312 collection_rec->use_vec.truncate (0);
3313 collection_rec->eq_use_vec.truncate (0);
3314 collection_rec->mw_vec.truncate (0);
3316 if (bb->index == ENTRY_BLOCK)
3318 df_entry_block_defs_collect (collection_rec, df->entry_block_defs);
3319 return;
3321 else if (bb->index == EXIT_BLOCK)
3323 df_exit_block_uses_collect (collection_rec, df->exit_block_uses);
3324 return;
3327 #ifdef EH_RETURN_DATA_REGNO
3328 if (bb_has_eh_pred (bb))
3330 unsigned int i;
3331 /* Mark the registers that will contain data for the handler. */
3332 for (i = 0; ; ++i)
3334 unsigned regno = EH_RETURN_DATA_REGNO (i);
3335 if (regno == INVALID_REGNUM)
3336 break;
3337 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[regno], NULL,
3338 bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP);
3341 #endif
3343 /* Add the hard_frame_pointer if this block is the target of a
3344 non-local goto. */
3345 if (bb->flags & BB_NON_LOCAL_GOTO_TARGET)
3346 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, hard_frame_pointer_rtx, NULL,
3347 bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP);
3349 /* Add the artificial uses. */
3350 if (bb->index >= NUM_FIXED_BLOCKS)
3352 bitmap_iterator bi;
3353 unsigned int regno;
3354 bitmap au = bb_has_eh_pred (bb)
3355 ? &df->eh_block_artificial_uses
3356 : &df->regular_block_artificial_uses;
3358 EXECUTE_IF_SET_IN_BITMAP (au, 0, regno, bi)
3360 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[regno], NULL,
3361 bb, NULL, DF_REF_REG_USE, 0);
3365 df_canonize_collection_rec (collection_rec);
3369 /* Record all the refs within the basic block BB_INDEX and scan the instructions if SCAN_INSNS. */
3371 void
3372 df_bb_refs_record (int bb_index, bool scan_insns)
3374 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
3375 rtx_insn *insn;
3376 int luid = 0;
3378 if (!df)
3379 return;
3381 df_collection_rec collection_rec;
3382 df_grow_bb_info (df_scan);
3383 if (scan_insns)
3384 /* Scan the block an insn at a time from beginning to end. */
3385 FOR_BB_INSNS (bb, insn)
3387 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
3388 gcc_assert (!insn_info);
3390 insn_info = df_insn_create_insn_record (insn);
3391 if (INSN_P (insn))
3393 /* Record refs within INSN. */
3394 DF_INSN_INFO_LUID (insn_info) = luid++;
3395 df_insn_refs_collect (&collection_rec, bb, DF_INSN_INFO_GET (insn));
3396 df_refs_add_to_chains (&collection_rec, bb, insn, copy_all);
3398 DF_INSN_INFO_LUID (insn_info) = luid;
3401 /* Other block level artificial refs */
3402 df_bb_refs_collect (&collection_rec, bb);
3403 df_refs_add_to_chains (&collection_rec, bb, NULL, copy_all);
3405 /* Now that the block has been processed, set the block as dirty so
3406 LR and LIVE will get it processed. */
3407 df_set_bb_dirty (bb);
3411 /* Get the artificial use set for a regular (i.e. non-exit/non-entry)
3412 block. */
3414 static void
3415 df_get_regular_block_artificial_uses (bitmap regular_block_artificial_uses)
3417 #ifdef EH_USES
3418 unsigned int i;
3419 #endif
3421 bitmap_clear (regular_block_artificial_uses);
3423 if (reload_completed)
3425 if (frame_pointer_needed)
3426 bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3428 else
3429 /* Before reload, there are a few registers that must be forced
3430 live everywhere -- which might not already be the case for
3431 blocks within infinite loops. */
3433 unsigned int picreg = PIC_OFFSET_TABLE_REGNUM;
3435 /* Any reference to any pseudo before reload is a potential
3436 reference of the frame pointer. */
3437 bitmap_set_bit (regular_block_artificial_uses, FRAME_POINTER_REGNUM);
3439 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
3440 bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3441 #endif
3443 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3444 /* Pseudos with argument area equivalences may require
3445 reloading via the argument pointer. */
3446 if (fixed_regs[ARG_POINTER_REGNUM])
3447 bitmap_set_bit (regular_block_artificial_uses, ARG_POINTER_REGNUM);
3448 #endif
3450 /* Any constant, or pseudo with constant equivalences, may
3451 require reloading from memory using the pic register. */
3452 if (picreg != INVALID_REGNUM
3453 && fixed_regs[picreg])
3454 bitmap_set_bit (regular_block_artificial_uses, picreg);
3456 /* The all-important stack pointer must always be live. */
3457 bitmap_set_bit (regular_block_artificial_uses, STACK_POINTER_REGNUM);
3459 #ifdef EH_USES
3460 /* EH_USES registers are used:
3461 1) at all insns that might throw (calls or with -fnon-call-exceptions
3462 trapping insns)
3463 2) in all EH edges
3464 3) to support backtraces and/or debugging, anywhere between their
3465 initialization and where they the saved registers are restored
3466 from them, including the cases where we don't reach the epilogue
3467 (noreturn call or infinite loop). */
3468 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3469 if (EH_USES (i))
3470 bitmap_set_bit (regular_block_artificial_uses, i);
3471 #endif
3475 /* Get the artificial use set for an eh block. */
3477 static void
3478 df_get_eh_block_artificial_uses (bitmap eh_block_artificial_uses)
3480 bitmap_clear (eh_block_artificial_uses);
3482 /* The following code (down through the arg_pointer setting APPEARS
3483 to be necessary because there is nothing that actually
3484 describes what the exception handling code may actually need
3485 to keep alive. */
3486 if (reload_completed)
3488 if (frame_pointer_needed)
3490 bitmap_set_bit (eh_block_artificial_uses, FRAME_POINTER_REGNUM);
3491 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
3492 bitmap_set_bit (eh_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3493 #endif
3495 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3496 if (fixed_regs[ARG_POINTER_REGNUM])
3497 bitmap_set_bit (eh_block_artificial_uses, ARG_POINTER_REGNUM);
3498 #endif
3504 /*----------------------------------------------------------------------------
3505 Specialized hard register scanning functions.
3506 ----------------------------------------------------------------------------*/
3509 /* Mark a register in SET. Hard registers in large modes get all
3510 of their component registers set as well. */
3512 static void
3513 df_mark_reg (rtx reg, void *vset)
3515 bitmap set = (bitmap) vset;
3516 int regno = REGNO (reg);
3518 gcc_assert (GET_MODE (reg) != BLKmode);
3520 if (regno < FIRST_PSEUDO_REGISTER)
3522 int n = hard_regno_nregs[regno][GET_MODE (reg)];
3523 bitmap_set_range (set, regno, n);
3525 else
3526 bitmap_set_bit (set, regno);
3530 /* Set the bit for regs that are considered being defined at the entry. */
3532 static void
3533 df_get_entry_block_def_set (bitmap entry_block_defs)
3535 rtx r;
3536 int i;
3538 bitmap_clear (entry_block_defs);
3540 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3542 if (global_regs[i])
3543 bitmap_set_bit (entry_block_defs, i);
3544 if (FUNCTION_ARG_REGNO_P (i))
3545 bitmap_set_bit (entry_block_defs, INCOMING_REGNO (i));
3548 /* The always important stack pointer. */
3549 bitmap_set_bit (entry_block_defs, STACK_POINTER_REGNUM);
3551 /* Once the prologue has been generated, all of these registers
3552 should just show up in the first regular block. */
3553 if (HAVE_prologue && epilogue_completed)
3555 /* Defs for the callee saved registers are inserted so that the
3556 pushes have some defining location. */
3557 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3558 if ((call_used_regs[i] == 0) && (df_regs_ever_live_p (i)))
3559 bitmap_set_bit (entry_block_defs, i);
3562 r = targetm.calls.struct_value_rtx (current_function_decl, true);
3563 if (r && REG_P (r))
3564 bitmap_set_bit (entry_block_defs, REGNO (r));
3566 /* If the function has an incoming STATIC_CHAIN, it has to show up
3567 in the entry def set. */
3568 r = targetm.calls.static_chain (current_function_decl, true);
3569 if (r && REG_P (r))
3570 bitmap_set_bit (entry_block_defs, REGNO (r));
3572 if ((!reload_completed) || frame_pointer_needed)
3574 /* Any reference to any pseudo before reload is a potential
3575 reference of the frame pointer. */
3576 bitmap_set_bit (entry_block_defs, FRAME_POINTER_REGNUM);
3577 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
3578 /* If they are different, also mark the hard frame pointer as live. */
3579 if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3580 bitmap_set_bit (entry_block_defs, HARD_FRAME_POINTER_REGNUM);
3581 #endif
3584 /* These registers are live everywhere. */
3585 if (!reload_completed)
3587 #ifdef PIC_OFFSET_TABLE_REGNUM
3588 unsigned int picreg = PIC_OFFSET_TABLE_REGNUM;
3589 #endif
3591 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3592 /* Pseudos with argument area equivalences may require
3593 reloading via the argument pointer. */
3594 if (fixed_regs[ARG_POINTER_REGNUM])
3595 bitmap_set_bit (entry_block_defs, ARG_POINTER_REGNUM);
3596 #endif
3598 #ifdef PIC_OFFSET_TABLE_REGNUM
3599 /* Any constant, or pseudo with constant equivalences, may
3600 require reloading from memory using the pic register. */
3601 if (picreg != INVALID_REGNUM
3602 && fixed_regs[picreg])
3603 bitmap_set_bit (entry_block_defs, picreg);
3604 #endif
3607 #ifdef INCOMING_RETURN_ADDR_RTX
3608 if (REG_P (INCOMING_RETURN_ADDR_RTX))
3609 bitmap_set_bit (entry_block_defs, REGNO (INCOMING_RETURN_ADDR_RTX));
3610 #endif
3612 targetm.extra_live_on_entry (entry_block_defs);
3616 /* Return the (conservative) set of hard registers that are defined on
3617 entry to the function.
3618 It uses df->entry_block_defs to determine which register
3619 reference to include. */
3621 static void
3622 df_entry_block_defs_collect (struct df_collection_rec *collection_rec,
3623 bitmap entry_block_defs)
3625 unsigned int i;
3626 bitmap_iterator bi;
3628 EXECUTE_IF_SET_IN_BITMAP (entry_block_defs, 0, i, bi)
3630 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
3631 ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, DF_REF_REG_DEF, 0);
3634 df_canonize_collection_rec (collection_rec);
3638 /* Record the (conservative) set of hard registers that are defined on
3639 entry to the function. */
3641 static void
3642 df_record_entry_block_defs (bitmap entry_block_defs)
3644 struct df_collection_rec collection_rec;
3645 df_entry_block_defs_collect (&collection_rec, entry_block_defs);
3647 /* Process bb_refs chain */
3648 df_refs_add_to_chains (&collection_rec,
3649 BASIC_BLOCK_FOR_FN (cfun, ENTRY_BLOCK),
3650 NULL,
3651 copy_defs);
3655 /* Update the defs in the entry block. */
3657 void
3658 df_update_entry_block_defs (void)
3660 bitmap_head refs;
3661 bool changed = false;
3663 bitmap_initialize (&refs, &df_bitmap_obstack);
3664 df_get_entry_block_def_set (&refs);
3665 if (df->entry_block_defs)
3667 if (!bitmap_equal_p (df->entry_block_defs, &refs))
3669 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (ENTRY_BLOCK);
3670 df_ref_chain_delete_du_chain (bb_info->artificial_defs);
3671 df_ref_chain_delete (bb_info->artificial_defs);
3672 bb_info->artificial_defs = NULL;
3673 changed = true;
3676 else
3678 struct df_scan_problem_data *problem_data
3679 = (struct df_scan_problem_data *) df_scan->problem_data;
3680 gcc_unreachable ();
3681 df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
3682 changed = true;
3685 if (changed)
3687 df_record_entry_block_defs (&refs);
3688 bitmap_copy (df->entry_block_defs, &refs);
3689 df_set_bb_dirty (BASIC_BLOCK_FOR_FN (cfun, ENTRY_BLOCK));
3691 bitmap_clear (&refs);
3695 /* Set the bit for regs that are considered being used at the exit. */
3697 static void
3698 df_get_exit_block_use_set (bitmap exit_block_uses)
3700 unsigned int i;
3701 unsigned int picreg = PIC_OFFSET_TABLE_REGNUM;
3703 bitmap_clear (exit_block_uses);
3705 /* Stack pointer is always live at the exit. */
3706 bitmap_set_bit (exit_block_uses, STACK_POINTER_REGNUM);
3708 /* Mark the frame pointer if needed at the end of the function.
3709 If we end up eliminating it, it will be removed from the live
3710 list of each basic block by reload. */
3712 if ((!reload_completed) || frame_pointer_needed)
3714 bitmap_set_bit (exit_block_uses, FRAME_POINTER_REGNUM);
3715 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
3716 /* If they are different, also mark the hard frame pointer as live. */
3717 if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3718 bitmap_set_bit (exit_block_uses, HARD_FRAME_POINTER_REGNUM);
3719 #endif
3722 /* Many architectures have a GP register even without flag_pic.
3723 Assume the pic register is not in use, or will be handled by
3724 other means, if it is not fixed. */
3725 if (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
3726 && picreg != INVALID_REGNUM
3727 && fixed_regs[picreg])
3728 bitmap_set_bit (exit_block_uses, picreg);
3730 /* Mark all global registers, and all registers used by the
3731 epilogue as being live at the end of the function since they
3732 may be referenced by our caller. */
3733 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3734 if (global_regs[i] || EPILOGUE_USES (i))
3735 bitmap_set_bit (exit_block_uses, i);
3737 if (HAVE_epilogue && epilogue_completed)
3739 /* Mark all call-saved registers that we actually used. */
3740 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3741 if (df_regs_ever_live_p (i) && !LOCAL_REGNO (i)
3742 && !TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
3743 bitmap_set_bit (exit_block_uses, i);
3746 #ifdef EH_RETURN_DATA_REGNO
3747 /* Mark the registers that will contain data for the handler. */
3748 if (reload_completed && crtl->calls_eh_return)
3749 for (i = 0; ; ++i)
3751 unsigned regno = EH_RETURN_DATA_REGNO (i);
3752 if (regno == INVALID_REGNUM)
3753 break;
3754 bitmap_set_bit (exit_block_uses, regno);
3756 #endif
3758 #ifdef EH_RETURN_STACKADJ_RTX
3759 if ((!HAVE_epilogue || ! epilogue_completed)
3760 && crtl->calls_eh_return)
3762 rtx tmp = EH_RETURN_STACKADJ_RTX;
3763 if (tmp && REG_P (tmp))
3764 df_mark_reg (tmp, exit_block_uses);
3766 #endif
3768 #ifdef EH_RETURN_HANDLER_RTX
3769 if ((!HAVE_epilogue || ! epilogue_completed)
3770 && crtl->calls_eh_return)
3772 rtx tmp = EH_RETURN_HANDLER_RTX;
3773 if (tmp && REG_P (tmp))
3774 df_mark_reg (tmp, exit_block_uses);
3776 #endif
3778 /* Mark function return value. */
3779 diddle_return_value (df_mark_reg, (void*) exit_block_uses);
3783 /* Return the refs of hard registers that are used in the exit block.
3784 It uses df->exit_block_uses to determine register to include. */
3786 static void
3787 df_exit_block_uses_collect (struct df_collection_rec *collection_rec, bitmap exit_block_uses)
3789 unsigned int i;
3790 bitmap_iterator bi;
3792 EXECUTE_IF_SET_IN_BITMAP (exit_block_uses, 0, i, bi)
3793 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
3794 EXIT_BLOCK_PTR_FOR_FN (cfun), NULL, DF_REF_REG_USE, 0);
3796 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3797 /* It is deliberate that this is not put in the exit block uses but
3798 I do not know why. */
3799 if (reload_completed
3800 && !bitmap_bit_p (exit_block_uses, ARG_POINTER_REGNUM)
3801 && bb_has_eh_pred (EXIT_BLOCK_PTR_FOR_FN (cfun))
3802 && fixed_regs[ARG_POINTER_REGNUM])
3803 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[ARG_POINTER_REGNUM], NULL,
3804 EXIT_BLOCK_PTR_FOR_FN (cfun), NULL, DF_REF_REG_USE, 0);
3805 #endif
3807 df_canonize_collection_rec (collection_rec);
3811 /* Record the set of hard registers that are used in the exit block.
3812 It uses df->exit_block_uses to determine which bit to include. */
3814 static void
3815 df_record_exit_block_uses (bitmap exit_block_uses)
3817 struct df_collection_rec collection_rec;
3818 df_exit_block_uses_collect (&collection_rec, exit_block_uses);
3820 /* Process bb_refs chain */
3821 df_refs_add_to_chains (&collection_rec,
3822 BASIC_BLOCK_FOR_FN (cfun, EXIT_BLOCK),
3823 NULL,
3824 copy_uses);
3828 /* Update the uses in the exit block. */
3830 void
3831 df_update_exit_block_uses (void)
3833 bitmap_head refs;
3834 bool changed = false;
3836 bitmap_initialize (&refs, &df_bitmap_obstack);
3837 df_get_exit_block_use_set (&refs);
3838 if (df->exit_block_uses)
3840 if (!bitmap_equal_p (df->exit_block_uses, &refs))
3842 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (EXIT_BLOCK);
3843 df_ref_chain_delete_du_chain (bb_info->artificial_uses);
3844 df_ref_chain_delete (bb_info->artificial_uses);
3845 bb_info->artificial_uses = NULL;
3846 changed = true;
3849 else
3851 struct df_scan_problem_data *problem_data
3852 = (struct df_scan_problem_data *) df_scan->problem_data;
3853 gcc_unreachable ();
3854 df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
3855 changed = true;
3858 if (changed)
3860 df_record_exit_block_uses (&refs);
3861 bitmap_copy (df->exit_block_uses,& refs);
3862 df_set_bb_dirty (BASIC_BLOCK_FOR_FN (cfun, EXIT_BLOCK));
3864 bitmap_clear (&refs);
3867 static bool initialized = false;
3870 /* Initialize some platform specific structures. */
3872 void
3873 df_hard_reg_init (void)
3875 #ifdef ELIMINABLE_REGS
3876 int i;
3877 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
3878 #endif
3879 if (initialized)
3880 return;
3882 /* Record which registers will be eliminated. We use this in
3883 mark_used_regs. */
3884 CLEAR_HARD_REG_SET (elim_reg_set);
3886 #ifdef ELIMINABLE_REGS
3887 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
3888 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
3889 #else
3890 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
3891 #endif
3893 initialized = true;
3897 /* Recompute the parts of scanning that are based on regs_ever_live
3898 because something changed in that array. */
3900 void
3901 df_update_entry_exit_and_calls (void)
3903 basic_block bb;
3905 df_update_entry_block_defs ();
3906 df_update_exit_block_uses ();
3908 /* The call insns need to be rescanned because there may be changes
3909 in the set of registers clobbered across the call. */
3910 FOR_EACH_BB_FN (bb, cfun)
3912 rtx_insn *insn;
3913 FOR_BB_INSNS (bb, insn)
3915 if (INSN_P (insn) && CALL_P (insn))
3916 df_insn_rescan (insn);
3922 /* Return true if hard REG is actually used in the some instruction.
3923 There are a fair number of conditions that affect the setting of
3924 this array. See the comment in df.h for df->hard_regs_live_count
3925 for the conditions that this array is set. */
3927 bool
3928 df_hard_reg_used_p (unsigned int reg)
3930 return df->hard_regs_live_count[reg] != 0;
3934 /* A count of the number of times REG is actually used in the some
3935 instruction. There are a fair number of conditions that affect the
3936 setting of this array. See the comment in df.h for
3937 df->hard_regs_live_count for the conditions that this array is
3938 set. */
3941 unsigned int
3942 df_hard_reg_used_count (unsigned int reg)
3944 return df->hard_regs_live_count[reg];
3948 /* Get the value of regs_ever_live[REGNO]. */
3950 bool
3951 df_regs_ever_live_p (unsigned int regno)
3953 return regs_ever_live[regno];
3957 /* Set regs_ever_live[REGNO] to VALUE. If this cause regs_ever_live
3958 to change, schedule that change for the next update. */
3960 void
3961 df_set_regs_ever_live (unsigned int regno, bool value)
3963 if (regs_ever_live[regno] == value)
3964 return;
3966 regs_ever_live[regno] = value;
3967 if (df)
3968 df->redo_entry_and_exit = true;
3972 /* Compute "regs_ever_live" information from the underlying df
3973 information. Set the vector to all false if RESET. */
3975 void
3976 df_compute_regs_ever_live (bool reset)
3978 unsigned int i;
3979 bool changed = df->redo_entry_and_exit;
3981 if (reset)
3982 memset (regs_ever_live, 0, sizeof (regs_ever_live));
3984 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3985 if ((!regs_ever_live[i]) && df_hard_reg_used_p (i))
3987 regs_ever_live[i] = true;
3988 changed = true;
3990 if (changed)
3991 df_update_entry_exit_and_calls ();
3992 df->redo_entry_and_exit = false;
3996 /*----------------------------------------------------------------------------
3997 Dataflow ref information verification functions.
3999 df_reg_chain_mark (refs, regno, is_def, is_eq_use)
4000 df_reg_chain_verify_unmarked (refs)
4001 df_refs_verify (vec<stack, va_df_ref>, ref*, bool)
4002 df_mws_verify (mw*, mw*, bool)
4003 df_insn_refs_verify (collection_rec, bb, insn, bool)
4004 df_bb_refs_verify (bb, refs, bool)
4005 df_bb_verify (bb)
4006 df_exit_block_bitmap_verify (bool)
4007 df_entry_block_bitmap_verify (bool)
4008 df_scan_verify ()
4009 ----------------------------------------------------------------------------*/
4012 /* Mark all refs in the reg chain. Verify that all of the registers
4013 are in the correct chain. */
4015 static unsigned int
4016 df_reg_chain_mark (df_ref refs, unsigned int regno,
4017 bool is_def, bool is_eq_use)
4019 unsigned int count = 0;
4020 df_ref ref;
4021 for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
4023 gcc_assert (!DF_REF_IS_REG_MARKED (ref));
4025 /* If there are no def-use or use-def chains, make sure that all
4026 of the chains are clear. */
4027 if (!df_chain)
4028 gcc_assert (!DF_REF_CHAIN (ref));
4030 /* Check to make sure the ref is in the correct chain. */
4031 gcc_assert (DF_REF_REGNO (ref) == regno);
4032 if (is_def)
4033 gcc_assert (DF_REF_REG_DEF_P (ref));
4034 else
4035 gcc_assert (!DF_REF_REG_DEF_P (ref));
4037 if (is_eq_use)
4038 gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE));
4039 else
4040 gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE) == 0);
4042 if (DF_REF_NEXT_REG (ref))
4043 gcc_assert (DF_REF_PREV_REG (DF_REF_NEXT_REG (ref)) == ref);
4044 count++;
4045 DF_REF_REG_MARK (ref);
4047 return count;
4051 /* Verify that all of the registers in the chain are unmarked. */
4053 static void
4054 df_reg_chain_verify_unmarked (df_ref refs)
4056 df_ref ref;
4057 for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
4058 gcc_assert (!DF_REF_IS_REG_MARKED (ref));
4062 /* Verify that NEW_REC and OLD_REC have exactly the same members. */
4064 static bool
4065 df_refs_verify (const vec<df_ref, va_heap> *new_rec, df_ref old_rec,
4066 bool abort_if_fail)
4068 unsigned int ix;
4069 df_ref new_ref;
4071 FOR_EACH_VEC_ELT (*new_rec, ix, new_ref)
4073 if (old_rec == NULL || !df_ref_equal_p (new_ref, old_rec))
4075 if (abort_if_fail)
4076 gcc_assert (0);
4077 else
4078 return false;
4081 /* Abort if fail is called from the function level verifier. If
4082 that is the context, mark this reg as being seem. */
4083 if (abort_if_fail)
4085 gcc_assert (DF_REF_IS_REG_MARKED (old_rec));
4086 DF_REF_REG_UNMARK (old_rec);
4089 old_rec = DF_REF_NEXT_LOC (old_rec);
4092 if (abort_if_fail)
4093 gcc_assert (old_rec == NULL);
4094 else
4095 return old_rec == NULL;
4096 return false;
4100 /* Verify that NEW_REC and OLD_REC have exactly the same members. */
4102 static bool
4103 df_mws_verify (const vec<df_mw_hardreg_ptr, va_heap> *new_rec,
4104 struct df_mw_hardreg *old_rec,
4105 bool abort_if_fail)
4107 unsigned int ix;
4108 struct df_mw_hardreg *new_reg;
4110 FOR_EACH_VEC_ELT (*new_rec, ix, new_reg)
4112 if (old_rec == NULL || !df_mw_equal_p (new_reg, old_rec))
4114 if (abort_if_fail)
4115 gcc_assert (0);
4116 else
4117 return false;
4119 old_rec = DF_MWS_NEXT (old_rec);
4122 if (abort_if_fail)
4123 gcc_assert (old_rec == NULL);
4124 else
4125 return old_rec == NULL;
4126 return false;
4130 /* Return true if the existing insn refs information is complete and
4131 correct. Otherwise (i.e. if there's any missing or extra refs),
4132 return the correct df_ref chain in REFS_RETURN.
4134 If ABORT_IF_FAIL, leave the refs that are verified (already in the
4135 ref chain) as DF_REF_MARKED(). If it's false, then it's a per-insn
4136 verification mode instead of the whole function, so unmark
4137 everything.
4139 If ABORT_IF_FAIL is set, this function never returns false. */
4141 static bool
4142 df_insn_refs_verify (struct df_collection_rec *collection_rec,
4143 basic_block bb,
4144 rtx_insn *insn,
4145 bool abort_if_fail)
4147 bool ret1, ret2, ret3, ret4;
4148 unsigned int uid = INSN_UID (insn);
4149 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
4151 df_insn_refs_collect (collection_rec, bb, insn_info);
4153 /* Unfortunately we cannot opt out early if one of these is not
4154 right because the marks will not get cleared. */
4155 ret1 = df_refs_verify (&collection_rec->def_vec, DF_INSN_UID_DEFS (uid),
4156 abort_if_fail);
4157 ret2 = df_refs_verify (&collection_rec->use_vec, DF_INSN_UID_USES (uid),
4158 abort_if_fail);
4159 ret3 = df_refs_verify (&collection_rec->eq_use_vec, DF_INSN_UID_EQ_USES (uid),
4160 abort_if_fail);
4161 ret4 = df_mws_verify (&collection_rec->mw_vec, DF_INSN_UID_MWS (uid),
4162 abort_if_fail);
4163 return (ret1 && ret2 && ret3 && ret4);
4167 /* Return true if all refs in the basic block are correct and complete.
4168 Due to df_ref_chain_verify, it will cause all refs
4169 that are verified to have DF_REF_MARK bit set. */
4171 static bool
4172 df_bb_verify (basic_block bb)
4174 rtx_insn *insn;
4175 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
4176 struct df_collection_rec collection_rec;
4178 gcc_assert (bb_info);
4180 /* Scan the block, one insn at a time, from beginning to end. */
4181 FOR_BB_INSNS_REVERSE (bb, insn)
4183 if (!INSN_P (insn))
4184 continue;
4185 df_insn_refs_verify (&collection_rec, bb, insn, true);
4186 df_free_collection_rec (&collection_rec);
4189 /* Do the artificial defs and uses. */
4190 df_bb_refs_collect (&collection_rec, bb);
4191 df_refs_verify (&collection_rec.def_vec, df_get_artificial_defs (bb->index), true);
4192 df_refs_verify (&collection_rec.use_vec, df_get_artificial_uses (bb->index), true);
4193 df_free_collection_rec (&collection_rec);
4195 return true;
4199 /* Returns true if the entry block has correct and complete df_ref set.
4200 If not it either aborts if ABORT_IF_FAIL is true or returns false. */
4202 static bool
4203 df_entry_block_bitmap_verify (bool abort_if_fail)
4205 bitmap_head entry_block_defs;
4206 bool is_eq;
4208 bitmap_initialize (&entry_block_defs, &df_bitmap_obstack);
4209 df_get_entry_block_def_set (&entry_block_defs);
4211 is_eq = bitmap_equal_p (&entry_block_defs, df->entry_block_defs);
4213 if (!is_eq && abort_if_fail)
4215 fprintf (stderr, "entry_block_defs = ");
4216 df_print_regset (stderr, &entry_block_defs);
4217 fprintf (stderr, "df->entry_block_defs = ");
4218 df_print_regset (stderr, df->entry_block_defs);
4219 gcc_assert (0);
4222 bitmap_clear (&entry_block_defs);
4224 return is_eq;
4228 /* Returns true if the exit block has correct and complete df_ref set.
4229 If not it either aborts if ABORT_IF_FAIL is true or returns false. */
4231 static bool
4232 df_exit_block_bitmap_verify (bool abort_if_fail)
4234 bitmap_head exit_block_uses;
4235 bool is_eq;
4237 bitmap_initialize (&exit_block_uses, &df_bitmap_obstack);
4238 df_get_exit_block_use_set (&exit_block_uses);
4240 is_eq = bitmap_equal_p (&exit_block_uses, df->exit_block_uses);
4242 if (!is_eq && abort_if_fail)
4244 fprintf (stderr, "exit_block_uses = ");
4245 df_print_regset (stderr, &exit_block_uses);
4246 fprintf (stderr, "df->exit_block_uses = ");
4247 df_print_regset (stderr, df->exit_block_uses);
4248 gcc_assert (0);
4251 bitmap_clear (&exit_block_uses);
4253 return is_eq;
4257 /* Return true if df_ref information for all insns in all blocks are
4258 correct and complete. */
4260 void
4261 df_scan_verify (void)
4263 unsigned int i;
4264 basic_block bb;
4265 bitmap_head regular_block_artificial_uses;
4266 bitmap_head eh_block_artificial_uses;
4268 if (!df)
4269 return;
4271 /* Verification is a 4 step process. */
4273 /* (1) All of the refs are marked by going through the reg chains. */
4274 for (i = 0; i < DF_REG_SIZE (df); i++)
4276 gcc_assert (df_reg_chain_mark (DF_REG_DEF_CHAIN (i), i, true, false)
4277 == DF_REG_DEF_COUNT (i));
4278 gcc_assert (df_reg_chain_mark (DF_REG_USE_CHAIN (i), i, false, false)
4279 == DF_REG_USE_COUNT (i));
4280 gcc_assert (df_reg_chain_mark (DF_REG_EQ_USE_CHAIN (i), i, false, true)
4281 == DF_REG_EQ_USE_COUNT (i));
4284 /* (2) There are various bitmaps whose value may change over the
4285 course of the compilation. This step recomputes them to make
4286 sure that they have not slipped out of date. */
4287 bitmap_initialize (&regular_block_artificial_uses, &df_bitmap_obstack);
4288 bitmap_initialize (&eh_block_artificial_uses, &df_bitmap_obstack);
4290 df_get_regular_block_artificial_uses (&regular_block_artificial_uses);
4291 df_get_eh_block_artificial_uses (&eh_block_artificial_uses);
4293 bitmap_ior_into (&eh_block_artificial_uses,
4294 &regular_block_artificial_uses);
4296 /* Check artificial_uses bitmaps didn't change. */
4297 gcc_assert (bitmap_equal_p (&regular_block_artificial_uses,
4298 &df->regular_block_artificial_uses));
4299 gcc_assert (bitmap_equal_p (&eh_block_artificial_uses,
4300 &df->eh_block_artificial_uses));
4302 bitmap_clear (&regular_block_artificial_uses);
4303 bitmap_clear (&eh_block_artificial_uses);
4305 /* Verify entry block and exit block. These only verify the bitmaps,
4306 the refs are verified in df_bb_verify. */
4307 df_entry_block_bitmap_verify (true);
4308 df_exit_block_bitmap_verify (true);
4310 /* (3) All of the insns in all of the blocks are traversed and the
4311 marks are cleared both in the artificial refs attached to the
4312 blocks and the real refs inside the insns. It is a failure to
4313 clear a mark that has not been set as this means that the ref in
4314 the block or insn was not in the reg chain. */
4316 FOR_ALL_BB_FN (bb, cfun)
4317 df_bb_verify (bb);
4319 /* (4) See if all reg chains are traversed a second time. This time
4320 a check is made that the marks are clear. A set mark would be a
4321 from a reg that is not in any insn or basic block. */
4323 for (i = 0; i < DF_REG_SIZE (df); i++)
4325 df_reg_chain_verify_unmarked (DF_REG_DEF_CHAIN (i));
4326 df_reg_chain_verify_unmarked (DF_REG_USE_CHAIN (i));
4327 df_reg_chain_verify_unmarked (DF_REG_EQ_USE_CHAIN (i));