re PR target/66258 (compiling a stdarg function with arch +nofp generates an ICE)
[official-gcc.git] / gcc / df-scan.c
blob74f7375e3e030d0fbce8a69ba4c8767b903a54aa
1 /* Scanning of rtl for dataflow analysis.
2 Copyright (C) 1999-2015 Free Software Foundation, Inc.
3 Originally contributed by Michael P. Hayes
4 (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
5 Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
6 and Kenneth Zadeck (zadeck@naturalbridge.com).
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
13 version.
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "rtl.h"
29 #include "tm_p.h"
30 #include "insn-config.h"
31 #include "recog.h"
32 #include "hashtab.h"
33 #include "hash-set.h"
34 #include "machmode.h"
35 #include "vec.h"
36 #include "double-int.h"
37 #include "input.h"
38 #include "alias.h"
39 #include "symtab.h"
40 #include "wide-int.h"
41 #include "inchash.h"
42 #include "hard-reg-set.h"
43 #include "input.h"
44 #include "function.h"
45 #include "regs.h"
46 #include "alloc-pool.h"
47 #include "flags.h"
48 #include "predict.h"
49 #include "dominance.h"
50 #include "cfg.h"
51 #include "basic-block.h"
52 #include "sbitmap.h"
53 #include "bitmap.h"
54 #include "dumpfile.h"
55 #include "tree.h"
56 #include "target.h"
57 #include "target-def.h"
58 #include "df.h"
59 #include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */
62 typedef struct df_mw_hardreg *df_mw_hardreg_ptr;
65 #ifndef HAVE_prologue
66 #define HAVE_prologue 0
67 #endif
68 #ifndef HAVE_sibcall_epilogue
69 #define HAVE_sibcall_epilogue 0
70 #endif
72 /* The set of hard registers in eliminables[i].from. */
74 static HARD_REG_SET elim_reg_set;
76 /* Initialize ur_in and ur_out as if all hard registers were partially
77 available. */
79 struct df_collection_rec
81 auto_vec<df_ref, 128> def_vec;
82 auto_vec<df_ref, 32> use_vec;
83 auto_vec<df_ref, 32> eq_use_vec;
84 auto_vec<df_mw_hardreg_ptr, 32> mw_vec;
87 static void df_ref_record (enum df_ref_class, struct df_collection_rec *,
88 rtx, rtx *,
89 basic_block, struct df_insn_info *,
90 enum df_ref_type, int ref_flags);
91 static void df_def_record_1 (struct df_collection_rec *, rtx *,
92 basic_block, struct df_insn_info *,
93 int ref_flags);
94 static void df_defs_record (struct df_collection_rec *, rtx,
95 basic_block, struct df_insn_info *,
96 int ref_flags);
97 static void df_uses_record (struct df_collection_rec *,
98 rtx *, enum df_ref_type,
99 basic_block, struct df_insn_info *,
100 int ref_flags);
102 static void df_install_ref_incremental (df_ref);
103 static void df_insn_refs_collect (struct df_collection_rec*,
104 basic_block, struct df_insn_info *);
105 static void df_canonize_collection_rec (struct df_collection_rec *);
107 static void df_get_regular_block_artificial_uses (bitmap);
108 static void df_get_eh_block_artificial_uses (bitmap);
110 static void df_record_entry_block_defs (bitmap);
111 static void df_record_exit_block_uses (bitmap);
112 static void df_get_exit_block_use_set (bitmap);
113 static void df_get_entry_block_def_set (bitmap);
114 static void df_grow_ref_info (struct df_ref_info *, unsigned int);
115 static void df_ref_chain_delete_du_chain (df_ref);
116 static void df_ref_chain_delete (df_ref);
118 static void df_refs_add_to_chains (struct df_collection_rec *,
119 basic_block, rtx_insn *, unsigned int);
121 static bool df_insn_refs_verify (struct df_collection_rec *, basic_block,
122 rtx_insn *, bool);
123 static void df_entry_block_defs_collect (struct df_collection_rec *, bitmap);
124 static void df_exit_block_uses_collect (struct df_collection_rec *, bitmap);
125 static void df_install_ref (df_ref, struct df_reg_info *,
126 struct df_ref_info *, bool);
128 static int df_ref_compare (df_ref, df_ref);
129 static int df_ref_ptr_compare (const void *, const void *);
130 static int df_mw_compare (const df_mw_hardreg *, const df_mw_hardreg *);
131 static int df_mw_ptr_compare (const void *, const void *);
133 static void df_insn_info_delete (unsigned int);
135 /* Indexed by hardware reg number, is true if that register is ever
136 used in the current function.
138 In df-scan.c, this is set up to record the hard regs used
139 explicitly. Reload adds in the hard regs used for holding pseudo
140 regs. Final uses it to generate the code in the function prologue
141 and epilogue to save and restore registers as needed. */
143 static bool regs_ever_live[FIRST_PSEUDO_REGISTER];
145 /* Flags used to tell df_refs_add_to_chains() which vectors it should copy. */
146 static const unsigned int copy_defs = 0x1;
147 static const unsigned int copy_uses = 0x2;
148 static const unsigned int copy_eq_uses = 0x4;
149 static const unsigned int copy_mw = 0x8;
150 static const unsigned int copy_all = copy_defs | copy_uses | copy_eq_uses
151 | copy_mw;
153 /*----------------------------------------------------------------------------
154 SCANNING DATAFLOW PROBLEM
156 There are several ways in which scanning looks just like the other
157 dataflow problems. It shares the all the mechanisms for local info
158 as well as basic block info. Where it differs is when and how often
159 it gets run. It also has no need for the iterative solver.
160 ----------------------------------------------------------------------------*/
162 #define SCAN_PROBLEM_DATA_BLOCK_SIZE 512
164 /* Problem data for the scanning dataflow function. */
165 struct df_scan_problem_data
167 pool_allocator<df_base_ref> *ref_base_pool;
168 pool_allocator<df_artificial_ref> *ref_artificial_pool;
169 pool_allocator<df_regular_ref> *ref_regular_pool;
170 pool_allocator<df_insn_info> *insn_pool;
171 pool_allocator<df_reg_info> *reg_pool;
172 pool_allocator<df_mw_hardreg> *mw_reg_pool;
174 bitmap_obstack reg_bitmaps;
175 bitmap_obstack insn_bitmaps;
178 typedef struct df_scan_bb_info *df_scan_bb_info_t;
181 /* Internal function to shut down the scanning problem. */
182 static void
183 df_scan_free_internal (void)
185 struct df_scan_problem_data *problem_data
186 = (struct df_scan_problem_data *) df_scan->problem_data;
188 free (df->def_info.refs);
189 free (df->def_info.begin);
190 free (df->def_info.count);
191 memset (&df->def_info, 0, (sizeof (struct df_ref_info)));
193 free (df->use_info.refs);
194 free (df->use_info.begin);
195 free (df->use_info.count);
196 memset (&df->use_info, 0, (sizeof (struct df_ref_info)));
198 free (df->def_regs);
199 df->def_regs = NULL;
200 free (df->use_regs);
201 df->use_regs = NULL;
202 free (df->eq_use_regs);
203 df->eq_use_regs = NULL;
204 df->regs_size = 0;
205 DF_REG_SIZE (df) = 0;
207 free (df->insns);
208 df->insns = NULL;
209 DF_INSN_SIZE () = 0;
211 free (df_scan->block_info);
212 df_scan->block_info = NULL;
213 df_scan->block_info_size = 0;
215 bitmap_clear (&df->hardware_regs_used);
216 bitmap_clear (&df->regular_block_artificial_uses);
217 bitmap_clear (&df->eh_block_artificial_uses);
218 BITMAP_FREE (df->entry_block_defs);
219 BITMAP_FREE (df->exit_block_uses);
220 bitmap_clear (&df->insns_to_delete);
221 bitmap_clear (&df->insns_to_rescan);
222 bitmap_clear (&df->insns_to_notes_rescan);
224 delete problem_data->ref_base_pool;
225 delete problem_data->ref_artificial_pool;
226 delete problem_data->ref_regular_pool;
227 delete problem_data->insn_pool;
228 delete problem_data->reg_pool;
229 delete problem_data->mw_reg_pool;
230 bitmap_obstack_release (&problem_data->reg_bitmaps);
231 bitmap_obstack_release (&problem_data->insn_bitmaps);
232 free (df_scan->problem_data);
236 /* Free basic block info. */
238 static void
239 df_scan_free_bb_info (basic_block bb, void *vbb_info)
241 struct df_scan_bb_info *bb_info = (struct df_scan_bb_info *) vbb_info;
242 unsigned int bb_index = bb->index;
243 rtx_insn *insn;
245 FOR_BB_INSNS (bb, insn)
246 if (INSN_P (insn))
247 df_insn_info_delete (INSN_UID (insn));
249 if (bb_index < df_scan->block_info_size)
250 bb_info = df_scan_get_bb_info (bb_index);
252 /* Get rid of any artificial uses or defs. */
253 df_ref_chain_delete_du_chain (bb_info->artificial_defs);
254 df_ref_chain_delete_du_chain (bb_info->artificial_uses);
255 df_ref_chain_delete (bb_info->artificial_defs);
256 df_ref_chain_delete (bb_info->artificial_uses);
257 bb_info->artificial_defs = NULL;
258 bb_info->artificial_uses = NULL;
262 /* Allocate the problem data for the scanning problem. This should be
263 called when the problem is created or when the entire function is to
264 be rescanned. */
265 void
266 df_scan_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
268 struct df_scan_problem_data *problem_data;
269 unsigned int insn_num = get_max_uid () + 1;
270 basic_block bb;
272 /* Given the number of pools, this is really faster than tearing
273 everything apart. */
274 if (df_scan->problem_data)
275 df_scan_free_internal ();
277 problem_data = XNEW (struct df_scan_problem_data);
278 df_scan->problem_data = problem_data;
279 df_scan->computed = true;
281 problem_data->ref_base_pool = new pool_allocator<df_base_ref>
282 ("df_scan ref base", SCAN_PROBLEM_DATA_BLOCK_SIZE);
283 problem_data->ref_artificial_pool = new pool_allocator<df_artificial_ref>
284 ("df_scan ref artificial", SCAN_PROBLEM_DATA_BLOCK_SIZE);
285 problem_data->ref_regular_pool = new pool_allocator<df_regular_ref>
286 ("df_scan ref regular", SCAN_PROBLEM_DATA_BLOCK_SIZE);
287 problem_data->insn_pool = new pool_allocator<df_insn_info>
288 ("df_scan insn", SCAN_PROBLEM_DATA_BLOCK_SIZE);
289 problem_data->reg_pool = new pool_allocator<df_reg_info>
290 ("df_scan reg", SCAN_PROBLEM_DATA_BLOCK_SIZE);
291 problem_data->mw_reg_pool = new pool_allocator<df_mw_hardreg>
292 ("df_scan mw_reg", SCAN_PROBLEM_DATA_BLOCK_SIZE / 16);
294 bitmap_obstack_initialize (&problem_data->reg_bitmaps);
295 bitmap_obstack_initialize (&problem_data->insn_bitmaps);
297 insn_num += insn_num / 4;
298 df_grow_reg_info ();
300 df_grow_insn_info ();
301 df_grow_bb_info (df_scan);
303 FOR_ALL_BB_FN (bb, cfun)
305 unsigned int bb_index = bb->index;
306 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb_index);
307 bb_info->artificial_defs = NULL;
308 bb_info->artificial_uses = NULL;
311 bitmap_initialize (&df->hardware_regs_used, &problem_data->reg_bitmaps);
312 bitmap_initialize (&df->regular_block_artificial_uses, &problem_data->reg_bitmaps);
313 bitmap_initialize (&df->eh_block_artificial_uses, &problem_data->reg_bitmaps);
314 df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
315 df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
316 bitmap_initialize (&df->insns_to_delete, &problem_data->insn_bitmaps);
317 bitmap_initialize (&df->insns_to_rescan, &problem_data->insn_bitmaps);
318 bitmap_initialize (&df->insns_to_notes_rescan, &problem_data->insn_bitmaps);
319 df_scan->optional_p = false;
323 /* Free all of the data associated with the scan problem. */
325 static void
326 df_scan_free (void)
328 if (df_scan->problem_data)
329 df_scan_free_internal ();
331 if (df->blocks_to_analyze)
333 BITMAP_FREE (df->blocks_to_analyze);
334 df->blocks_to_analyze = NULL;
337 free (df_scan);
340 /* Dump the preamble for DF_SCAN dump. */
341 static void
342 df_scan_start_dump (FILE *file ATTRIBUTE_UNUSED)
344 int i;
345 int dcount = 0;
346 int ucount = 0;
347 int ecount = 0;
348 int icount = 0;
349 int ccount = 0;
350 basic_block bb;
351 rtx_insn *insn;
353 fprintf (file, ";; invalidated by call \t");
354 df_print_regset (file, regs_invalidated_by_call_regset);
355 fprintf (file, ";; hardware regs used \t");
356 df_print_regset (file, &df->hardware_regs_used);
357 fprintf (file, ";; regular block artificial uses \t");
358 df_print_regset (file, &df->regular_block_artificial_uses);
359 fprintf (file, ";; eh block artificial uses \t");
360 df_print_regset (file, &df->eh_block_artificial_uses);
361 fprintf (file, ";; entry block defs \t");
362 df_print_regset (file, df->entry_block_defs);
363 fprintf (file, ";; exit block uses \t");
364 df_print_regset (file, df->exit_block_uses);
365 fprintf (file, ";; regs ever live \t");
366 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
367 if (df_regs_ever_live_p (i))
368 fprintf (file, " %d [%s]", i, reg_names[i]);
369 fprintf (file, "\n;; ref usage \t");
371 for (i = 0; i < (int)df->regs_inited; i++)
372 if (DF_REG_DEF_COUNT (i) || DF_REG_USE_COUNT (i) || DF_REG_EQ_USE_COUNT (i))
374 const char * sep = "";
376 fprintf (file, "r%d={", i);
377 if (DF_REG_DEF_COUNT (i))
379 fprintf (file, "%dd", DF_REG_DEF_COUNT (i));
380 sep = ",";
381 dcount += DF_REG_DEF_COUNT (i);
383 if (DF_REG_USE_COUNT (i))
385 fprintf (file, "%s%du", sep, DF_REG_USE_COUNT (i));
386 sep = ",";
387 ucount += DF_REG_USE_COUNT (i);
389 if (DF_REG_EQ_USE_COUNT (i))
391 fprintf (file, "%s%de", sep, DF_REG_EQ_USE_COUNT (i));
392 ecount += DF_REG_EQ_USE_COUNT (i);
394 fprintf (file, "} ");
397 FOR_EACH_BB_FN (bb, cfun)
398 FOR_BB_INSNS (bb, insn)
399 if (INSN_P (insn))
401 if (CALL_P (insn))
402 ccount++;
403 else
404 icount++;
407 fprintf (file, "\n;; total ref usage %d{%dd,%du,%de}"
408 " in %d{%d regular + %d call} insns.\n",
409 dcount + ucount + ecount, dcount, ucount, ecount,
410 icount + ccount, icount, ccount);
413 /* Dump the bb_info for a given basic block. */
414 static void
415 df_scan_start_block (basic_block bb, FILE *file)
417 struct df_scan_bb_info *bb_info
418 = df_scan_get_bb_info (bb->index);
420 if (bb_info)
422 fprintf (file, ";; bb %d artificial_defs: ", bb->index);
423 df_refs_chain_dump (bb_info->artificial_defs, true, file);
424 fprintf (file, "\n;; bb %d artificial_uses: ", bb->index);
425 df_refs_chain_dump (bb_info->artificial_uses, true, file);
426 fprintf (file, "\n");
428 #if 0
430 rtx_insn *insn;
431 FOR_BB_INSNS (bb, insn)
432 if (INSN_P (insn))
433 df_insn_debug (insn, false, file);
435 #endif
438 static struct df_problem problem_SCAN =
440 DF_SCAN, /* Problem id. */
441 DF_NONE, /* Direction. */
442 df_scan_alloc, /* Allocate the problem specific data. */
443 NULL, /* Reset global information. */
444 df_scan_free_bb_info, /* Free basic block info. */
445 NULL, /* Local compute function. */
446 NULL, /* Init the solution specific data. */
447 NULL, /* Iterative solver. */
448 NULL, /* Confluence operator 0. */
449 NULL, /* Confluence operator n. */
450 NULL, /* Transfer function. */
451 NULL, /* Finalize function. */
452 df_scan_free, /* Free all of the problem information. */
453 NULL, /* Remove this problem from the stack of dataflow problems. */
454 df_scan_start_dump, /* Debugging. */
455 df_scan_start_block, /* Debugging start block. */
456 NULL, /* Debugging end block. */
457 NULL, /* Debugging start insn. */
458 NULL, /* Debugging end insn. */
459 NULL, /* Incremental solution verify start. */
460 NULL, /* Incremental solution verify end. */
461 NULL, /* Dependent problem. */
462 sizeof (struct df_scan_bb_info),/* Size of entry of block_info array. */
463 TV_DF_SCAN, /* Timing variable. */
464 false /* Reset blocks on dropping out of blocks_to_analyze. */
468 /* Create a new DATAFLOW instance and add it to an existing instance
469 of DF. The returned structure is what is used to get at the
470 solution. */
472 void
473 df_scan_add_problem (void)
475 df_add_problem (&problem_SCAN);
479 /*----------------------------------------------------------------------------
480 Storage Allocation Utilities
481 ----------------------------------------------------------------------------*/
484 /* First, grow the reg_info information. If the current size is less than
485 the number of pseudos, grow to 25% more than the number of
486 pseudos.
488 Second, assure that all of the slots up to max_reg_num have been
489 filled with reg_info structures. */
491 void
492 df_grow_reg_info (void)
494 unsigned int max_reg = max_reg_num ();
495 unsigned int new_size = max_reg;
496 struct df_scan_problem_data *problem_data
497 = (struct df_scan_problem_data *) df_scan->problem_data;
498 unsigned int i;
500 if (df->regs_size < new_size)
502 new_size += new_size / 4;
503 df->def_regs = XRESIZEVEC (struct df_reg_info *, df->def_regs, new_size);
504 df->use_regs = XRESIZEVEC (struct df_reg_info *, df->use_regs, new_size);
505 df->eq_use_regs = XRESIZEVEC (struct df_reg_info *, df->eq_use_regs,
506 new_size);
507 df->def_info.begin = XRESIZEVEC (unsigned, df->def_info.begin, new_size);
508 df->def_info.count = XRESIZEVEC (unsigned, df->def_info.count, new_size);
509 df->use_info.begin = XRESIZEVEC (unsigned, df->use_info.begin, new_size);
510 df->use_info.count = XRESIZEVEC (unsigned, df->use_info.count, new_size);
511 df->regs_size = new_size;
514 for (i = df->regs_inited; i < max_reg; i++)
516 struct df_reg_info *reg_info;
518 // TODO
519 reg_info = problem_data->reg_pool->allocate ();
520 memset (reg_info, 0, sizeof (struct df_reg_info));
521 df->def_regs[i] = reg_info;
522 reg_info = problem_data->reg_pool->allocate ();
523 memset (reg_info, 0, sizeof (struct df_reg_info));
524 df->use_regs[i] = reg_info;
525 reg_info = problem_data->reg_pool->allocate ();
526 memset (reg_info, 0, sizeof (struct df_reg_info));
527 df->eq_use_regs[i] = reg_info;
528 df->def_info.begin[i] = 0;
529 df->def_info.count[i] = 0;
530 df->use_info.begin[i] = 0;
531 df->use_info.count[i] = 0;
534 df->regs_inited = max_reg;
538 /* Grow the ref information. */
540 static void
541 df_grow_ref_info (struct df_ref_info *ref_info, unsigned int new_size)
543 if (ref_info->refs_size < new_size)
545 ref_info->refs = XRESIZEVEC (df_ref, ref_info->refs, new_size);
546 memset (ref_info->refs + ref_info->refs_size, 0,
547 (new_size - ref_info->refs_size) *sizeof (df_ref));
548 ref_info->refs_size = new_size;
553 /* Check and grow the ref information if necessary. This routine
554 guarantees total_size + BITMAP_ADDEND amount of entries in refs
555 array. It updates ref_info->refs_size only and does not change
556 ref_info->total_size. */
558 static void
559 df_check_and_grow_ref_info (struct df_ref_info *ref_info,
560 unsigned bitmap_addend)
562 if (ref_info->refs_size < ref_info->total_size + bitmap_addend)
564 int new_size = ref_info->total_size + bitmap_addend;
565 new_size += ref_info->total_size / 4;
566 df_grow_ref_info (ref_info, new_size);
571 /* Grow the ref information. If the current size is less than the
572 number of instructions, grow to 25% more than the number of
573 instructions. */
575 void
576 df_grow_insn_info (void)
578 unsigned int new_size = get_max_uid () + 1;
579 if (DF_INSN_SIZE () < new_size)
581 new_size += new_size / 4;
582 df->insns = XRESIZEVEC (struct df_insn_info *, df->insns, new_size);
583 memset (df->insns + df->insns_size, 0,
584 (new_size - DF_INSN_SIZE ()) *sizeof (struct df_insn_info *));
585 DF_INSN_SIZE () = new_size;
592 /*----------------------------------------------------------------------------
593 PUBLIC INTERFACES FOR SMALL GRAIN CHANGES TO SCANNING.
594 ----------------------------------------------------------------------------*/
596 /* Rescan all of the block_to_analyze or all of the blocks in the
597 function if df_set_blocks if blocks_to_analyze is NULL; */
599 void
600 df_scan_blocks (void)
602 basic_block bb;
604 df->def_info.ref_order = DF_REF_ORDER_NO_TABLE;
605 df->use_info.ref_order = DF_REF_ORDER_NO_TABLE;
607 df_get_regular_block_artificial_uses (&df->regular_block_artificial_uses);
608 df_get_eh_block_artificial_uses (&df->eh_block_artificial_uses);
610 bitmap_ior_into (&df->eh_block_artificial_uses,
611 &df->regular_block_artificial_uses);
613 /* ENTRY and EXIT blocks have special defs/uses. */
614 df_get_entry_block_def_set (df->entry_block_defs);
615 df_record_entry_block_defs (df->entry_block_defs);
616 df_get_exit_block_use_set (df->exit_block_uses);
617 df_record_exit_block_uses (df->exit_block_uses);
618 df_set_bb_dirty (BASIC_BLOCK_FOR_FN (cfun, ENTRY_BLOCK));
619 df_set_bb_dirty (BASIC_BLOCK_FOR_FN (cfun, EXIT_BLOCK));
621 /* Regular blocks */
622 FOR_EACH_BB_FN (bb, cfun)
624 unsigned int bb_index = bb->index;
625 df_bb_refs_record (bb_index, true);
629 /* Create new refs under address LOC within INSN. This function is
630 only used externally. REF_FLAGS must be either 0 or DF_REF_IN_NOTE,
631 depending on whether LOC is inside PATTERN (INSN) or a note. */
633 void
634 df_uses_create (rtx *loc, rtx_insn *insn, int ref_flags)
636 gcc_assert (!(ref_flags & ~DF_REF_IN_NOTE));
637 df_uses_record (NULL, loc, DF_REF_REG_USE,
638 BLOCK_FOR_INSN (insn),
639 DF_INSN_INFO_GET (insn),
640 ref_flags);
643 static void
644 df_install_ref_incremental (df_ref ref)
646 struct df_reg_info **reg_info;
647 struct df_ref_info *ref_info;
648 df_ref *ref_ptr;
649 bool add_to_table;
651 rtx_insn *insn = DF_REF_INSN (ref);
652 basic_block bb = BLOCK_FOR_INSN (insn);
654 if (DF_REF_REG_DEF_P (ref))
656 reg_info = df->def_regs;
657 ref_info = &df->def_info;
658 ref_ptr = &DF_INSN_DEFS (insn);
659 add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
661 else if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
663 reg_info = df->eq_use_regs;
664 ref_info = &df->use_info;
665 ref_ptr = &DF_INSN_EQ_USES (insn);
666 switch (ref_info->ref_order)
668 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
669 case DF_REF_ORDER_BY_REG_WITH_NOTES:
670 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
671 add_to_table = true;
672 break;
673 default:
674 add_to_table = false;
675 break;
678 else
680 reg_info = df->use_regs;
681 ref_info = &df->use_info;
682 ref_ptr = &DF_INSN_USES (insn);
683 add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
686 /* Do not add if ref is not in the right blocks. */
687 if (add_to_table && df->analyze_subset)
688 add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
690 df_install_ref (ref, reg_info[DF_REF_REGNO (ref)], ref_info, add_to_table);
692 if (add_to_table)
693 switch (ref_info->ref_order)
695 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
696 case DF_REF_ORDER_BY_REG_WITH_NOTES:
697 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
698 ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
699 break;
700 default:
701 ref_info->ref_order = DF_REF_ORDER_UNORDERED;
702 break;
705 while (*ref_ptr && df_ref_compare (*ref_ptr, ref) < 0)
706 ref_ptr = &DF_REF_NEXT_LOC (*ref_ptr);
708 DF_REF_NEXT_LOC (ref) = *ref_ptr;
709 *ref_ptr = ref;
711 #if 0
712 if (dump_file)
714 fprintf (dump_file, "adding ref ");
715 df_ref_debug (ref, dump_file);
717 #endif
718 /* By adding the ref directly, df_insn_rescan my not find any
719 differences even though the block will have changed. So we need
720 to mark the block dirty ourselves. */
721 if (!DEBUG_INSN_P (DF_REF_INSN (ref)))
722 df_set_bb_dirty (bb);
727 /*----------------------------------------------------------------------------
728 UTILITIES TO CREATE AND DESTROY REFS AND CHAINS.
729 ----------------------------------------------------------------------------*/
731 static void
732 df_free_ref (df_ref ref)
734 struct df_scan_problem_data *problem_data
735 = (struct df_scan_problem_data *) df_scan->problem_data;
737 switch (DF_REF_CLASS (ref))
739 case DF_REF_BASE:
740 problem_data->ref_base_pool->remove ((df_base_ref *) (ref));
741 break;
743 case DF_REF_ARTIFICIAL:
744 problem_data->ref_artificial_pool->remove
745 ((df_artificial_ref *) (ref));
746 break;
748 case DF_REF_REGULAR:
749 problem_data->ref_regular_pool->remove
750 ((df_regular_ref *) (ref));
751 break;
756 /* Unlink and delete REF at the reg_use, reg_eq_use or reg_def chain.
757 Also delete the def-use or use-def chain if it exists. */
759 static void
760 df_reg_chain_unlink (df_ref ref)
762 df_ref next = DF_REF_NEXT_REG (ref);
763 df_ref prev = DF_REF_PREV_REG (ref);
764 int id = DF_REF_ID (ref);
765 struct df_reg_info *reg_info;
766 df_ref *refs = NULL;
768 if (DF_REF_REG_DEF_P (ref))
770 int regno = DF_REF_REGNO (ref);
771 reg_info = DF_REG_DEF_GET (regno);
772 refs = df->def_info.refs;
774 else
776 if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
778 reg_info = DF_REG_EQ_USE_GET (DF_REF_REGNO (ref));
779 switch (df->use_info.ref_order)
781 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
782 case DF_REF_ORDER_BY_REG_WITH_NOTES:
783 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
784 refs = df->use_info.refs;
785 break;
786 default:
787 break;
790 else
792 reg_info = DF_REG_USE_GET (DF_REF_REGNO (ref));
793 refs = df->use_info.refs;
797 if (refs)
799 if (df->analyze_subset)
801 if (bitmap_bit_p (df->blocks_to_analyze, DF_REF_BBNO (ref)))
802 refs[id] = NULL;
804 else
805 refs[id] = NULL;
808 /* Delete any def-use or use-def chains that start here. It is
809 possible that there is trash in this field. This happens for
810 insns that have been deleted when rescanning has been deferred
811 and the chain problem has also been deleted. The chain tear down
812 code skips deleted insns. */
813 if (df_chain && DF_REF_CHAIN (ref))
814 df_chain_unlink (ref);
816 reg_info->n_refs--;
817 if (DF_REF_FLAGS_IS_SET (ref, DF_HARD_REG_LIVE))
819 gcc_assert (DF_REF_REGNO (ref) < FIRST_PSEUDO_REGISTER);
820 df->hard_regs_live_count[DF_REF_REGNO (ref)]--;
823 /* Unlink from the reg chain. If there is no prev, this is the
824 first of the list. If not, just join the next and prev. */
825 if (prev)
826 DF_REF_NEXT_REG (prev) = next;
827 else
829 gcc_assert (reg_info->reg_chain == ref);
830 reg_info->reg_chain = next;
832 if (next)
833 DF_REF_PREV_REG (next) = prev;
835 df_free_ref (ref);
839 /* Create the insn record for INSN. If there was one there, zero it
840 out. */
842 struct df_insn_info *
843 df_insn_create_insn_record (rtx_insn *insn)
845 struct df_scan_problem_data *problem_data
846 = (struct df_scan_problem_data *) df_scan->problem_data;
847 struct df_insn_info *insn_rec;
849 df_grow_insn_info ();
850 insn_rec = DF_INSN_INFO_GET (insn);
851 if (!insn_rec)
853 insn_rec = problem_data->insn_pool->allocate ();
854 DF_INSN_INFO_SET (insn, insn_rec);
856 memset (insn_rec, 0, sizeof (struct df_insn_info));
857 insn_rec->insn = insn;
858 return insn_rec;
862 /* Delete all du chain (DF_REF_CHAIN()) of all refs in the ref chain. */
864 static void
865 df_ref_chain_delete_du_chain (df_ref ref)
867 for (; ref; ref = DF_REF_NEXT_LOC (ref))
868 /* CHAIN is allocated by DF_CHAIN. So make sure to
869 pass df_scan instance for the problem. */
870 if (DF_REF_CHAIN (ref))
871 df_chain_unlink (ref);
875 /* Delete all refs in the ref chain. */
877 static void
878 df_ref_chain_delete (df_ref ref)
880 df_ref next;
881 for (; ref; ref = next)
883 next = DF_REF_NEXT_LOC (ref);
884 df_reg_chain_unlink (ref);
889 /* Delete the hardreg chain. */
891 static void
892 df_mw_hardreg_chain_delete (struct df_mw_hardreg *hardregs)
894 struct df_scan_problem_data *problem_data
895 = (struct df_scan_problem_data *) df_scan->problem_data;
896 df_mw_hardreg *next;
898 for (; hardregs; hardregs = next)
900 next = DF_MWS_NEXT (hardregs);
901 problem_data->mw_reg_pool->remove (hardregs);
906 /* Delete all of the refs information from the insn with UID.
907 Internal helper for df_insn_delete, df_insn_rescan, and other
908 df-scan routines that don't have to work in deferred mode
909 and do not have to mark basic blocks for re-processing. */
911 static void
912 df_insn_info_delete (unsigned int uid)
914 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
916 bitmap_clear_bit (&df->insns_to_delete, uid);
917 bitmap_clear_bit (&df->insns_to_rescan, uid);
918 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
919 if (insn_info)
921 struct df_scan_problem_data *problem_data
922 = (struct df_scan_problem_data *) df_scan->problem_data;
924 /* In general, notes do not have the insn_info fields
925 initialized. However, combine deletes insns by changing them
926 to notes. How clever. So we cannot just check if it is a
927 valid insn before short circuiting this code, we need to see
928 if we actually initialized it. */
929 df_mw_hardreg_chain_delete (insn_info->mw_hardregs);
931 if (df_chain)
933 df_ref_chain_delete_du_chain (insn_info->defs);
934 df_ref_chain_delete_du_chain (insn_info->uses);
935 df_ref_chain_delete_du_chain (insn_info->eq_uses);
938 df_ref_chain_delete (insn_info->defs);
939 df_ref_chain_delete (insn_info->uses);
940 df_ref_chain_delete (insn_info->eq_uses);
942 problem_data->insn_pool->remove (insn_info);
943 DF_INSN_UID_SET (uid, NULL);
947 /* Delete all of the refs information from INSN, either right now
948 or marked for later in deferred mode. */
950 void
951 df_insn_delete (rtx_insn *insn)
953 unsigned int uid;
954 basic_block bb;
956 gcc_checking_assert (INSN_P (insn));
958 if (!df)
959 return;
961 uid = INSN_UID (insn);
962 bb = BLOCK_FOR_INSN (insn);
964 /* ??? bb can be NULL after pass_free_cfg. At that point, DF should
965 not exist anymore (as mentioned in df-core.c: "The only requirement
966 [for DF] is that there be a correct control flow graph." Clearly
967 that isn't the case after pass_free_cfg. But DF is freed much later
968 because some back-ends want to use DF info even though the CFG is
969 already gone. It's not clear to me whether that is safe, actually.
970 In any case, we expect BB to be non-NULL at least up to register
971 allocation, so disallow a non-NULL BB up to there. Not perfect
972 but better than nothing... */
973 gcc_checking_assert (bb != NULL || reload_completed);
975 df_grow_bb_info (df_scan);
976 df_grow_reg_info ();
978 /* The block must be marked as dirty now, rather than later as in
979 df_insn_rescan and df_notes_rescan because it may not be there at
980 rescanning time and the mark would blow up.
981 DEBUG_INSNs do not make a block's data flow solution dirty (at
982 worst the LUIDs are no longer contiguous). */
983 if (bb != NULL && NONDEBUG_INSN_P (insn))
984 df_set_bb_dirty (bb);
986 /* The client has deferred rescanning. */
987 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
989 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
990 if (insn_info)
992 bitmap_clear_bit (&df->insns_to_rescan, uid);
993 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
994 bitmap_set_bit (&df->insns_to_delete, uid);
996 if (dump_file)
997 fprintf (dump_file, "deferring deletion of insn with uid = %d.\n", uid);
998 return;
1001 if (dump_file)
1002 fprintf (dump_file, "deleting insn with uid = %d.\n", uid);
1004 df_insn_info_delete (uid);
1008 /* Free all of the refs and the mw_hardregs in COLLECTION_REC. */
1010 static void
1011 df_free_collection_rec (struct df_collection_rec *collection_rec)
1013 unsigned int ix;
1014 struct df_scan_problem_data *problem_data
1015 = (struct df_scan_problem_data *) df_scan->problem_data;
1016 df_ref ref;
1017 struct df_mw_hardreg *mw;
1019 FOR_EACH_VEC_ELT (collection_rec->def_vec, ix, ref)
1020 df_free_ref (ref);
1021 FOR_EACH_VEC_ELT (collection_rec->use_vec, ix, ref)
1022 df_free_ref (ref);
1023 FOR_EACH_VEC_ELT (collection_rec->eq_use_vec, ix, ref)
1024 df_free_ref (ref);
1025 FOR_EACH_VEC_ELT (collection_rec->mw_vec, ix, mw)
1026 problem_data->mw_reg_pool->remove (mw);
1028 collection_rec->def_vec.release ();
1029 collection_rec->use_vec.release ();
1030 collection_rec->eq_use_vec.release ();
1031 collection_rec->mw_vec.release ();
1034 /* Rescan INSN. Return TRUE if the rescanning produced any changes. */
1036 bool
1037 df_insn_rescan (rtx_insn *insn)
1039 unsigned int uid = INSN_UID (insn);
1040 struct df_insn_info *insn_info = NULL;
1041 basic_block bb = BLOCK_FOR_INSN (insn);
1042 struct df_collection_rec collection_rec;
1044 if ((!df) || (!INSN_P (insn)))
1045 return false;
1047 if (!bb)
1049 if (dump_file)
1050 fprintf (dump_file, "no bb for insn with uid = %d.\n", uid);
1051 return false;
1054 /* The client has disabled rescanning and plans to do it itself. */
1055 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1056 return false;
1058 df_grow_bb_info (df_scan);
1059 df_grow_reg_info ();
1061 insn_info = DF_INSN_UID_SAFE_GET (uid);
1063 /* The client has deferred rescanning. */
1064 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1066 if (!insn_info)
1068 insn_info = df_insn_create_insn_record (insn);
1069 insn_info->defs = 0;
1070 insn_info->uses = 0;
1071 insn_info->eq_uses = 0;
1072 insn_info->mw_hardregs = 0;
1074 if (dump_file)
1075 fprintf (dump_file, "deferring rescan insn with uid = %d.\n", uid);
1077 bitmap_clear_bit (&df->insns_to_delete, uid);
1078 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1079 bitmap_set_bit (&df->insns_to_rescan, INSN_UID (insn));
1080 return false;
1083 bitmap_clear_bit (&df->insns_to_delete, uid);
1084 bitmap_clear_bit (&df->insns_to_rescan, uid);
1085 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1086 if (insn_info)
1088 int luid;
1089 bool the_same = df_insn_refs_verify (&collection_rec, bb, insn, false);
1090 /* If there's no change, return false. */
1091 if (the_same)
1093 df_free_collection_rec (&collection_rec);
1094 if (dump_file)
1095 fprintf (dump_file, "verify found no changes in insn with uid = %d.\n", uid);
1096 return false;
1098 if (dump_file)
1099 fprintf (dump_file, "rescanning insn with uid = %d.\n", uid);
1101 /* There's change - we need to delete the existing info.
1102 Since the insn isn't moved, we can salvage its LUID. */
1103 luid = DF_INSN_LUID (insn);
1104 df_insn_info_delete (uid);
1105 df_insn_create_insn_record (insn);
1106 DF_INSN_LUID (insn) = luid;
1108 else
1110 struct df_insn_info *insn_info = df_insn_create_insn_record (insn);
1111 df_insn_refs_collect (&collection_rec, bb, insn_info);
1112 if (dump_file)
1113 fprintf (dump_file, "scanning new insn with uid = %d.\n", uid);
1116 df_refs_add_to_chains (&collection_rec, bb, insn, copy_all);
1117 if (!DEBUG_INSN_P (insn))
1118 df_set_bb_dirty (bb);
1120 return true;
1123 /* Same as df_insn_rescan, but don't mark the basic block as
1124 dirty. */
1126 bool
1127 df_insn_rescan_debug_internal (rtx_insn *insn)
1129 unsigned int uid = INSN_UID (insn);
1130 struct df_insn_info *insn_info;
1132 gcc_assert (DEBUG_INSN_P (insn)
1133 && VAR_LOC_UNKNOWN_P (INSN_VAR_LOCATION_LOC (insn)));
1135 if (!df)
1136 return false;
1138 insn_info = DF_INSN_UID_SAFE_GET (INSN_UID (insn));
1139 if (!insn_info)
1140 return false;
1142 if (dump_file)
1143 fprintf (dump_file, "deleting debug_insn with uid = %d.\n", uid);
1145 bitmap_clear_bit (&df->insns_to_delete, uid);
1146 bitmap_clear_bit (&df->insns_to_rescan, uid);
1147 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1149 if (insn_info->defs == 0
1150 && insn_info->uses == 0
1151 && insn_info->eq_uses == 0
1152 && insn_info->mw_hardregs == 0)
1153 return false;
1155 df_mw_hardreg_chain_delete (insn_info->mw_hardregs);
1157 if (df_chain)
1159 df_ref_chain_delete_du_chain (insn_info->defs);
1160 df_ref_chain_delete_du_chain (insn_info->uses);
1161 df_ref_chain_delete_du_chain (insn_info->eq_uses);
1164 df_ref_chain_delete (insn_info->defs);
1165 df_ref_chain_delete (insn_info->uses);
1166 df_ref_chain_delete (insn_info->eq_uses);
1168 insn_info->defs = 0;
1169 insn_info->uses = 0;
1170 insn_info->eq_uses = 0;
1171 insn_info->mw_hardregs = 0;
1173 return true;
1177 /* Rescan all of the insns in the function. Note that the artificial
1178 uses and defs are not touched. This function will destroy def-use
1179 or use-def chains. */
1181 void
1182 df_insn_rescan_all (void)
1184 bool no_insn_rescan = false;
1185 bool defer_insn_rescan = false;
1186 basic_block bb;
1187 bitmap_iterator bi;
1188 unsigned int uid;
1189 bitmap_head tmp;
1191 bitmap_initialize (&tmp, &df_bitmap_obstack);
1193 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1195 df_clear_flags (DF_NO_INSN_RESCAN);
1196 no_insn_rescan = true;
1199 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1201 df_clear_flags (DF_DEFER_INSN_RESCAN);
1202 defer_insn_rescan = true;
1205 bitmap_copy (&tmp, &df->insns_to_delete);
1206 EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
1208 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1209 if (insn_info)
1210 df_insn_info_delete (uid);
1213 bitmap_clear (&tmp);
1214 bitmap_clear (&df->insns_to_delete);
1215 bitmap_clear (&df->insns_to_rescan);
1216 bitmap_clear (&df->insns_to_notes_rescan);
1218 FOR_EACH_BB_FN (bb, cfun)
1220 rtx_insn *insn;
1221 FOR_BB_INSNS (bb, insn)
1223 df_insn_rescan (insn);
1227 if (no_insn_rescan)
1228 df_set_flags (DF_NO_INSN_RESCAN);
1229 if (defer_insn_rescan)
1230 df_set_flags (DF_DEFER_INSN_RESCAN);
1234 /* Process all of the deferred rescans or deletions. */
1236 void
1237 df_process_deferred_rescans (void)
1239 bool no_insn_rescan = false;
1240 bool defer_insn_rescan = false;
1241 bitmap_iterator bi;
1242 unsigned int uid;
1243 bitmap_head tmp;
1245 bitmap_initialize (&tmp, &df_bitmap_obstack);
1247 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1249 df_clear_flags (DF_NO_INSN_RESCAN);
1250 no_insn_rescan = true;
1253 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1255 df_clear_flags (DF_DEFER_INSN_RESCAN);
1256 defer_insn_rescan = true;
1259 if (dump_file)
1260 fprintf (dump_file, "starting the processing of deferred insns\n");
1262 bitmap_copy (&tmp, &df->insns_to_delete);
1263 EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
1265 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1266 if (insn_info)
1267 df_insn_info_delete (uid);
1270 bitmap_copy (&tmp, &df->insns_to_rescan);
1271 EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
1273 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1274 if (insn_info)
1275 df_insn_rescan (insn_info->insn);
1278 bitmap_copy (&tmp, &df->insns_to_notes_rescan);
1279 EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
1281 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1282 if (insn_info)
1283 df_notes_rescan (insn_info->insn);
1286 if (dump_file)
1287 fprintf (dump_file, "ending the processing of deferred insns\n");
1289 bitmap_clear (&tmp);
1290 bitmap_clear (&df->insns_to_delete);
1291 bitmap_clear (&df->insns_to_rescan);
1292 bitmap_clear (&df->insns_to_notes_rescan);
1294 if (no_insn_rescan)
1295 df_set_flags (DF_NO_INSN_RESCAN);
1296 if (defer_insn_rescan)
1297 df_set_flags (DF_DEFER_INSN_RESCAN);
1299 /* If someone changed regs_ever_live during this pass, fix up the
1300 entry and exit blocks. */
1301 if (df->redo_entry_and_exit)
1303 df_update_entry_exit_and_calls ();
1304 df->redo_entry_and_exit = false;
1309 /* Count the number of refs. Include the defs if INCLUDE_DEFS. Include
1310 the uses if INCLUDE_USES. Include the eq_uses if
1311 INCLUDE_EQ_USES. */
1313 static unsigned int
1314 df_count_refs (bool include_defs, bool include_uses,
1315 bool include_eq_uses)
1317 unsigned int regno;
1318 int size = 0;
1319 unsigned int m = df->regs_inited;
1321 for (regno = 0; regno < m; regno++)
1323 if (include_defs)
1324 size += DF_REG_DEF_COUNT (regno);
1325 if (include_uses)
1326 size += DF_REG_USE_COUNT (regno);
1327 if (include_eq_uses)
1328 size += DF_REG_EQ_USE_COUNT (regno);
1330 return size;
1334 /* Take build ref table for either the uses or defs from the reg-use
1335 or reg-def chains. This version processes the refs in reg order
1336 which is likely to be best if processing the whole function. */
1338 static void
1339 df_reorganize_refs_by_reg_by_reg (struct df_ref_info *ref_info,
1340 bool include_defs,
1341 bool include_uses,
1342 bool include_eq_uses)
1344 unsigned int m = df->regs_inited;
1345 unsigned int regno;
1346 unsigned int offset = 0;
1347 unsigned int start;
1349 if (df->changeable_flags & DF_NO_HARD_REGS)
1351 start = FIRST_PSEUDO_REGISTER;
1352 memset (ref_info->begin, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
1353 memset (ref_info->count, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
1355 else
1356 start = 0;
1358 ref_info->total_size
1359 = df_count_refs (include_defs, include_uses, include_eq_uses);
1361 df_check_and_grow_ref_info (ref_info, 1);
1363 for (regno = start; regno < m; regno++)
1365 int count = 0;
1366 ref_info->begin[regno] = offset;
1367 if (include_defs)
1369 df_ref ref = DF_REG_DEF_CHAIN (regno);
1370 while (ref)
1372 ref_info->refs[offset] = ref;
1373 DF_REF_ID (ref) = offset++;
1374 count++;
1375 ref = DF_REF_NEXT_REG (ref);
1376 gcc_checking_assert (offset < ref_info->refs_size);
1379 if (include_uses)
1381 df_ref ref = DF_REG_USE_CHAIN (regno);
1382 while (ref)
1384 ref_info->refs[offset] = ref;
1385 DF_REF_ID (ref) = offset++;
1386 count++;
1387 ref = DF_REF_NEXT_REG (ref);
1388 gcc_checking_assert (offset < ref_info->refs_size);
1391 if (include_eq_uses)
1393 df_ref ref = DF_REG_EQ_USE_CHAIN (regno);
1394 while (ref)
1396 ref_info->refs[offset] = ref;
1397 DF_REF_ID (ref) = offset++;
1398 count++;
1399 ref = DF_REF_NEXT_REG (ref);
1400 gcc_checking_assert (offset < ref_info->refs_size);
1403 ref_info->count[regno] = count;
1406 /* The bitmap size is not decremented when refs are deleted. So
1407 reset it now that we have squished out all of the empty
1408 slots. */
1409 ref_info->table_size = offset;
1413 /* Take build ref table for either the uses or defs from the reg-use
1414 or reg-def chains. This version processes the refs in insn order
1415 which is likely to be best if processing some segment of the
1416 function. */
1418 static void
1419 df_reorganize_refs_by_reg_by_insn (struct df_ref_info *ref_info,
1420 bool include_defs,
1421 bool include_uses,
1422 bool include_eq_uses)
1424 bitmap_iterator bi;
1425 unsigned int bb_index;
1426 unsigned int m = df->regs_inited;
1427 unsigned int offset = 0;
1428 unsigned int r;
1429 unsigned int start
1430 = (df->changeable_flags & DF_NO_HARD_REGS) ? FIRST_PSEUDO_REGISTER : 0;
1432 memset (ref_info->begin, 0, sizeof (int) * df->regs_inited);
1433 memset (ref_info->count, 0, sizeof (int) * df->regs_inited);
1435 ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1436 df_check_and_grow_ref_info (ref_info, 1);
1438 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
1440 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
1441 rtx_insn *insn;
1442 df_ref def, use;
1444 if (include_defs)
1445 FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
1447 unsigned int regno = DF_REF_REGNO (def);
1448 ref_info->count[regno]++;
1450 if (include_uses)
1451 FOR_EACH_ARTIFICIAL_USE (use, bb_index)
1453 unsigned int regno = DF_REF_REGNO (use);
1454 ref_info->count[regno]++;
1457 FOR_BB_INSNS (bb, insn)
1459 if (INSN_P (insn))
1461 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
1463 if (include_defs)
1464 FOR_EACH_INSN_INFO_DEF (def, insn_info)
1466 unsigned int regno = DF_REF_REGNO (def);
1467 ref_info->count[regno]++;
1469 if (include_uses)
1470 FOR_EACH_INSN_INFO_USE (use, insn_info)
1472 unsigned int regno = DF_REF_REGNO (use);
1473 ref_info->count[regno]++;
1475 if (include_eq_uses)
1476 FOR_EACH_INSN_INFO_EQ_USE (use, insn_info)
1478 unsigned int regno = DF_REF_REGNO (use);
1479 ref_info->count[regno]++;
1485 for (r = start; r < m; r++)
1487 ref_info->begin[r] = offset;
1488 offset += ref_info->count[r];
1489 ref_info->count[r] = 0;
1492 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
1494 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
1495 rtx_insn *insn;
1496 df_ref def, use;
1498 if (include_defs)
1499 FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
1501 unsigned int regno = DF_REF_REGNO (def);
1502 if (regno >= start)
1504 unsigned int id
1505 = ref_info->begin[regno] + ref_info->count[regno]++;
1506 DF_REF_ID (def) = id;
1507 ref_info->refs[id] = def;
1510 if (include_uses)
1511 FOR_EACH_ARTIFICIAL_USE (use, bb_index)
1513 unsigned int regno = DF_REF_REGNO (def);
1514 if (regno >= start)
1516 unsigned int id
1517 = ref_info->begin[regno] + ref_info->count[regno]++;
1518 DF_REF_ID (use) = id;
1519 ref_info->refs[id] = use;
1523 FOR_BB_INSNS (bb, insn)
1525 if (INSN_P (insn))
1527 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
1529 if (include_defs)
1530 FOR_EACH_INSN_INFO_DEF (def, insn_info)
1532 unsigned int regno = DF_REF_REGNO (def);
1533 if (regno >= start)
1535 unsigned int id
1536 = ref_info->begin[regno] + ref_info->count[regno]++;
1537 DF_REF_ID (def) = id;
1538 ref_info->refs[id] = def;
1541 if (include_uses)
1542 FOR_EACH_INSN_INFO_USE (use, insn_info)
1544 unsigned int regno = DF_REF_REGNO (use);
1545 if (regno >= start)
1547 unsigned int id
1548 = ref_info->begin[regno] + ref_info->count[regno]++;
1549 DF_REF_ID (use) = id;
1550 ref_info->refs[id] = use;
1553 if (include_eq_uses)
1554 FOR_EACH_INSN_INFO_EQ_USE (use, insn_info)
1556 unsigned int regno = DF_REF_REGNO (use);
1557 if (regno >= start)
1559 unsigned int id
1560 = ref_info->begin[regno] + ref_info->count[regno]++;
1561 DF_REF_ID (use) = id;
1562 ref_info->refs[id] = use;
1569 /* The bitmap size is not decremented when refs are deleted. So
1570 reset it now that we have squished out all of the empty
1571 slots. */
1573 ref_info->table_size = offset;
1576 /* Take build ref table for either the uses or defs from the reg-use
1577 or reg-def chains. */
1579 static void
1580 df_reorganize_refs_by_reg (struct df_ref_info *ref_info,
1581 bool include_defs,
1582 bool include_uses,
1583 bool include_eq_uses)
1585 if (df->analyze_subset)
1586 df_reorganize_refs_by_reg_by_insn (ref_info, include_defs,
1587 include_uses, include_eq_uses);
1588 else
1589 df_reorganize_refs_by_reg_by_reg (ref_info, include_defs,
1590 include_uses, include_eq_uses);
1594 /* Add the refs in REF_VEC to the table in REF_INFO starting at OFFSET. */
1595 static unsigned int
1596 df_add_refs_to_table (unsigned int offset,
1597 struct df_ref_info *ref_info,
1598 df_ref ref)
1600 for (; ref; ref = DF_REF_NEXT_LOC (ref))
1601 if (!(df->changeable_flags & DF_NO_HARD_REGS)
1602 || (DF_REF_REGNO (ref) >= FIRST_PSEUDO_REGISTER))
1604 ref_info->refs[offset] = ref;
1605 DF_REF_ID (ref) = offset++;
1607 return offset;
1611 /* Count the number of refs in all of the insns of BB. Include the
1612 defs if INCLUDE_DEFS. Include the uses if INCLUDE_USES. Include the
1613 eq_uses if INCLUDE_EQ_USES. */
1615 static unsigned int
1616 df_reorganize_refs_by_insn_bb (basic_block bb, unsigned int offset,
1617 struct df_ref_info *ref_info,
1618 bool include_defs, bool include_uses,
1619 bool include_eq_uses)
1621 rtx_insn *insn;
1623 if (include_defs)
1624 offset = df_add_refs_to_table (offset, ref_info,
1625 df_get_artificial_defs (bb->index));
1626 if (include_uses)
1627 offset = df_add_refs_to_table (offset, ref_info,
1628 df_get_artificial_uses (bb->index));
1630 FOR_BB_INSNS (bb, insn)
1631 if (INSN_P (insn))
1633 unsigned int uid = INSN_UID (insn);
1634 if (include_defs)
1635 offset = df_add_refs_to_table (offset, ref_info,
1636 DF_INSN_UID_DEFS (uid));
1637 if (include_uses)
1638 offset = df_add_refs_to_table (offset, ref_info,
1639 DF_INSN_UID_USES (uid));
1640 if (include_eq_uses)
1641 offset = df_add_refs_to_table (offset, ref_info,
1642 DF_INSN_UID_EQ_USES (uid));
1644 return offset;
1648 /* Organize the refs by insn into the table in REF_INFO. If
1649 blocks_to_analyze is defined, use that set, otherwise the entire
1650 program. Include the defs if INCLUDE_DEFS. Include the uses if
1651 INCLUDE_USES. Include the eq_uses if INCLUDE_EQ_USES. */
1653 static void
1654 df_reorganize_refs_by_insn (struct df_ref_info *ref_info,
1655 bool include_defs, bool include_uses,
1656 bool include_eq_uses)
1658 basic_block bb;
1659 unsigned int offset = 0;
1661 ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1662 df_check_and_grow_ref_info (ref_info, 1);
1663 if (df->blocks_to_analyze)
1665 bitmap_iterator bi;
1666 unsigned int index;
1668 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, index, bi)
1670 offset = df_reorganize_refs_by_insn_bb (BASIC_BLOCK_FOR_FN (cfun,
1671 index),
1672 offset, ref_info,
1673 include_defs, include_uses,
1674 include_eq_uses);
1677 ref_info->table_size = offset;
1679 else
1681 FOR_ALL_BB_FN (bb, cfun)
1682 offset = df_reorganize_refs_by_insn_bb (bb, offset, ref_info,
1683 include_defs, include_uses,
1684 include_eq_uses);
1685 ref_info->table_size = offset;
1690 /* If the use refs in DF are not organized, reorganize them. */
1692 void
1693 df_maybe_reorganize_use_refs (enum df_ref_order order)
1695 if (order == df->use_info.ref_order)
1696 return;
1698 switch (order)
1700 case DF_REF_ORDER_BY_REG:
1701 df_reorganize_refs_by_reg (&df->use_info, false, true, false);
1702 break;
1704 case DF_REF_ORDER_BY_REG_WITH_NOTES:
1705 df_reorganize_refs_by_reg (&df->use_info, false, true, true);
1706 break;
1708 case DF_REF_ORDER_BY_INSN:
1709 df_reorganize_refs_by_insn (&df->use_info, false, true, false);
1710 break;
1712 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1713 df_reorganize_refs_by_insn (&df->use_info, false, true, true);
1714 break;
1716 case DF_REF_ORDER_NO_TABLE:
1717 free (df->use_info.refs);
1718 df->use_info.refs = NULL;
1719 df->use_info.refs_size = 0;
1720 break;
1722 case DF_REF_ORDER_UNORDERED:
1723 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1724 gcc_unreachable ();
1725 break;
1728 df->use_info.ref_order = order;
1732 /* If the def refs in DF are not organized, reorganize them. */
1734 void
1735 df_maybe_reorganize_def_refs (enum df_ref_order order)
1737 if (order == df->def_info.ref_order)
1738 return;
1740 switch (order)
1742 case DF_REF_ORDER_BY_REG:
1743 df_reorganize_refs_by_reg (&df->def_info, true, false, false);
1744 break;
1746 case DF_REF_ORDER_BY_INSN:
1747 df_reorganize_refs_by_insn (&df->def_info, true, false, false);
1748 break;
1750 case DF_REF_ORDER_NO_TABLE:
1751 free (df->def_info.refs);
1752 df->def_info.refs = NULL;
1753 df->def_info.refs_size = 0;
1754 break;
1756 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1757 case DF_REF_ORDER_BY_REG_WITH_NOTES:
1758 case DF_REF_ORDER_UNORDERED:
1759 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1760 gcc_unreachable ();
1761 break;
1764 df->def_info.ref_order = order;
1768 /* Change all of the basic block references in INSN to use the insn's
1769 current basic block. This function is called from routines that move
1770 instructions from one block to another. */
1772 void
1773 df_insn_change_bb (rtx_insn *insn, basic_block new_bb)
1775 basic_block old_bb = BLOCK_FOR_INSN (insn);
1776 struct df_insn_info *insn_info;
1777 unsigned int uid = INSN_UID (insn);
1779 if (old_bb == new_bb)
1780 return;
1782 set_block_for_insn (insn, new_bb);
1784 if (!df)
1785 return;
1787 if (dump_file)
1788 fprintf (dump_file, "changing bb of uid %d\n", uid);
1790 insn_info = DF_INSN_UID_SAFE_GET (uid);
1791 if (insn_info == NULL)
1793 if (dump_file)
1794 fprintf (dump_file, " unscanned insn\n");
1795 df_insn_rescan (insn);
1796 return;
1799 if (!INSN_P (insn))
1800 return;
1802 df_set_bb_dirty (new_bb);
1803 if (old_bb)
1805 if (dump_file)
1806 fprintf (dump_file, " from %d to %d\n",
1807 old_bb->index, new_bb->index);
1808 df_set_bb_dirty (old_bb);
1810 else
1811 if (dump_file)
1812 fprintf (dump_file, " to %d\n", new_bb->index);
1816 /* Helper function for df_ref_change_reg_with_loc. */
1818 static void
1819 df_ref_change_reg_with_loc_1 (struct df_reg_info *old_df,
1820 struct df_reg_info *new_df,
1821 unsigned int new_regno, rtx loc)
1823 df_ref the_ref = old_df->reg_chain;
1825 while (the_ref)
1827 if ((!DF_REF_IS_ARTIFICIAL (the_ref))
1828 && DF_REF_LOC (the_ref)
1829 && (*DF_REF_LOC (the_ref) == loc))
1831 df_ref next_ref = DF_REF_NEXT_REG (the_ref);
1832 df_ref prev_ref = DF_REF_PREV_REG (the_ref);
1833 df_ref *ref_ptr;
1834 struct df_insn_info *insn_info = DF_REF_INSN_INFO (the_ref);
1836 DF_REF_REGNO (the_ref) = new_regno;
1837 DF_REF_REG (the_ref) = regno_reg_rtx[new_regno];
1839 /* Pull the_ref out of the old regno chain. */
1840 if (prev_ref)
1841 DF_REF_NEXT_REG (prev_ref) = next_ref;
1842 else
1843 old_df->reg_chain = next_ref;
1844 if (next_ref)
1845 DF_REF_PREV_REG (next_ref) = prev_ref;
1846 old_df->n_refs--;
1848 /* Put the ref into the new regno chain. */
1849 DF_REF_PREV_REG (the_ref) = NULL;
1850 DF_REF_NEXT_REG (the_ref) = new_df->reg_chain;
1851 if (new_df->reg_chain)
1852 DF_REF_PREV_REG (new_df->reg_chain) = the_ref;
1853 new_df->reg_chain = the_ref;
1854 new_df->n_refs++;
1855 if (DF_REF_BB (the_ref))
1856 df_set_bb_dirty (DF_REF_BB (the_ref));
1858 /* Need to sort the record again that the ref was in because
1859 the regno is a sorting key. First, find the right
1860 record. */
1861 if (DF_REF_REG_DEF_P (the_ref))
1862 ref_ptr = &insn_info->defs;
1863 else if (DF_REF_FLAGS (the_ref) & DF_REF_IN_NOTE)
1864 ref_ptr = &insn_info->eq_uses;
1865 else
1866 ref_ptr = &insn_info->uses;
1867 if (dump_file)
1868 fprintf (dump_file, "changing reg in insn %d\n",
1869 DF_REF_INSN_UID (the_ref));
1871 /* Stop if we find the current reference or where the reference
1872 needs to be. */
1873 while (*ref_ptr != the_ref && df_ref_compare (*ref_ptr, the_ref) < 0)
1874 ref_ptr = &DF_REF_NEXT_LOC (*ref_ptr);
1875 if (*ref_ptr != the_ref)
1877 /* The reference needs to be promoted up the list. */
1878 df_ref next = DF_REF_NEXT_LOC (the_ref);
1879 DF_REF_NEXT_LOC (the_ref) = *ref_ptr;
1880 *ref_ptr = the_ref;
1882 ref_ptr = &DF_REF_NEXT_LOC (*ref_ptr);
1883 while (*ref_ptr != the_ref);
1884 *ref_ptr = next;
1886 else if (DF_REF_NEXT_LOC (the_ref)
1887 && df_ref_compare (the_ref, DF_REF_NEXT_LOC (the_ref)) > 0)
1889 /* The reference needs to be demoted down the list. */
1890 *ref_ptr = DF_REF_NEXT_LOC (the_ref);
1892 ref_ptr = &DF_REF_NEXT_LOC (*ref_ptr);
1893 while (*ref_ptr && df_ref_compare (the_ref, *ref_ptr) > 0);
1894 DF_REF_NEXT_LOC (the_ref) = *ref_ptr;
1895 *ref_ptr = the_ref;
1898 the_ref = next_ref;
1900 else
1901 the_ref = DF_REF_NEXT_REG (the_ref);
1906 /* Change the regno of register LOC to NEW_REGNO and update the df
1907 information accordingly. Refs that do not match LOC are not changed
1908 which means that artificial refs are not changed since they have no loc.
1909 This call is to support the SET_REGNO macro. */
1911 void
1912 df_ref_change_reg_with_loc (rtx loc, unsigned int new_regno)
1914 unsigned int old_regno = REGNO (loc);
1915 if (old_regno == new_regno)
1916 return;
1918 if (df)
1920 df_grow_reg_info ();
1922 df_ref_change_reg_with_loc_1 (DF_REG_DEF_GET (old_regno),
1923 DF_REG_DEF_GET (new_regno),
1924 new_regno, loc);
1925 df_ref_change_reg_with_loc_1 (DF_REG_USE_GET (old_regno),
1926 DF_REG_USE_GET (new_regno),
1927 new_regno, loc);
1928 df_ref_change_reg_with_loc_1 (DF_REG_EQ_USE_GET (old_regno),
1929 DF_REG_EQ_USE_GET (new_regno),
1930 new_regno, loc);
1932 set_mode_and_regno (loc, GET_MODE (loc), new_regno);
1936 /* Delete the mw_hardregs that point into the eq_notes. */
1938 static void
1939 df_mw_hardreg_chain_delete_eq_uses (struct df_insn_info *insn_info)
1941 struct df_mw_hardreg **mw_ptr = &insn_info->mw_hardregs;
1942 struct df_scan_problem_data *problem_data
1943 = (struct df_scan_problem_data *) df_scan->problem_data;
1945 while (*mw_ptr)
1947 df_mw_hardreg *mw = *mw_ptr;
1948 if (mw->flags & DF_REF_IN_NOTE)
1950 *mw_ptr = DF_MWS_NEXT (mw);
1951 problem_data->mw_reg_pool->remove (mw);
1953 else
1954 mw_ptr = &DF_MWS_NEXT (mw);
1959 /* Rescan only the REG_EQUIV/REG_EQUAL notes part of INSN. */
1961 void
1962 df_notes_rescan (rtx_insn *insn)
1964 struct df_insn_info *insn_info;
1965 unsigned int uid = INSN_UID (insn);
1967 if (!df)
1968 return;
1970 /* The client has disabled rescanning and plans to do it itself. */
1971 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1972 return;
1974 /* Do nothing if the insn hasn't been emitted yet. */
1975 if (!BLOCK_FOR_INSN (insn))
1976 return;
1978 df_grow_bb_info (df_scan);
1979 df_grow_reg_info ();
1981 insn_info = DF_INSN_UID_SAFE_GET (INSN_UID (insn));
1983 /* The client has deferred rescanning. */
1984 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1986 if (!insn_info)
1988 insn_info = df_insn_create_insn_record (insn);
1989 insn_info->defs = 0;
1990 insn_info->uses = 0;
1991 insn_info->eq_uses = 0;
1992 insn_info->mw_hardregs = 0;
1995 bitmap_clear_bit (&df->insns_to_delete, uid);
1996 /* If the insn is set to be rescanned, it does not need to also
1997 be notes rescanned. */
1998 if (!bitmap_bit_p (&df->insns_to_rescan, uid))
1999 bitmap_set_bit (&df->insns_to_notes_rescan, INSN_UID (insn));
2000 return;
2003 bitmap_clear_bit (&df->insns_to_delete, uid);
2004 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
2006 if (insn_info)
2008 basic_block bb = BLOCK_FOR_INSN (insn);
2009 rtx note;
2010 struct df_collection_rec collection_rec;
2011 unsigned int i;
2013 df_mw_hardreg_chain_delete_eq_uses (insn_info);
2014 df_ref_chain_delete (insn_info->eq_uses);
2015 insn_info->eq_uses = NULL;
2017 /* Process REG_EQUIV/REG_EQUAL notes */
2018 for (note = REG_NOTES (insn); note;
2019 note = XEXP (note, 1))
2021 switch (REG_NOTE_KIND (note))
2023 case REG_EQUIV:
2024 case REG_EQUAL:
2025 df_uses_record (&collection_rec,
2026 &XEXP (note, 0), DF_REF_REG_USE,
2027 bb, insn_info, DF_REF_IN_NOTE);
2028 default:
2029 break;
2033 /* Find some place to put any new mw_hardregs. */
2034 df_canonize_collection_rec (&collection_rec);
2035 struct df_mw_hardreg **mw_ptr = &insn_info->mw_hardregs, *mw;
2036 FOR_EACH_VEC_ELT (collection_rec.mw_vec, i, mw)
2038 while (*mw_ptr && df_mw_compare (*mw_ptr, mw) < 0)
2039 mw_ptr = &DF_MWS_NEXT (*mw_ptr);
2040 DF_MWS_NEXT (mw) = *mw_ptr;
2041 *mw_ptr = mw;
2042 mw_ptr = &DF_MWS_NEXT (mw);
2044 df_refs_add_to_chains (&collection_rec, bb, insn, copy_eq_uses);
2046 else
2047 df_insn_rescan (insn);
2052 /*----------------------------------------------------------------------------
2053 Hard core instruction scanning code. No external interfaces here,
2054 just a lot of routines that look inside insns.
2055 ----------------------------------------------------------------------------*/
2058 /* Return true if the contents of two df_ref's are identical.
2059 It ignores DF_REF_MARKER. */
2061 static bool
2062 df_ref_equal_p (df_ref ref1, df_ref ref2)
2064 if (!ref2)
2065 return false;
2067 if (ref1 == ref2)
2068 return true;
2070 if (DF_REF_CLASS (ref1) != DF_REF_CLASS (ref2)
2071 || DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2)
2072 || DF_REF_REG (ref1) != DF_REF_REG (ref2)
2073 || DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2)
2074 || ((DF_REF_FLAGS (ref1) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG))
2075 != (DF_REF_FLAGS (ref2) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG)))
2076 || DF_REF_BB (ref1) != DF_REF_BB (ref2)
2077 || DF_REF_INSN_INFO (ref1) != DF_REF_INSN_INFO (ref2))
2078 return false;
2080 switch (DF_REF_CLASS (ref1))
2082 case DF_REF_ARTIFICIAL:
2083 case DF_REF_BASE:
2084 return true;
2086 case DF_REF_REGULAR:
2087 return DF_REF_LOC (ref1) == DF_REF_LOC (ref2);
2089 default:
2090 gcc_unreachable ();
2092 return false;
2096 /* Compare REF1 and REF2 for sorting. This is only called from places
2097 where all of the refs are of the same type, in the same insn, and
2098 have the same bb. So these fields are not checked. */
2100 static int
2101 df_ref_compare (df_ref ref1, df_ref ref2)
2103 if (DF_REF_CLASS (ref1) != DF_REF_CLASS (ref2))
2104 return (int)DF_REF_CLASS (ref1) - (int)DF_REF_CLASS (ref2);
2106 if (DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2))
2107 return (int)DF_REF_REGNO (ref1) - (int)DF_REF_REGNO (ref2);
2109 if (DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2))
2110 return (int)DF_REF_TYPE (ref1) - (int)DF_REF_TYPE (ref2);
2112 if (DF_REF_REG (ref1) != DF_REF_REG (ref2))
2113 return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2115 /* Cannot look at the LOC field on artificial refs. */
2116 if (DF_REF_CLASS (ref1) != DF_REF_ARTIFICIAL
2117 && DF_REF_LOC (ref1) != DF_REF_LOC (ref2))
2118 return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2120 if (DF_REF_FLAGS (ref1) != DF_REF_FLAGS (ref2))
2122 /* If two refs are identical except that one of them has is from
2123 a mw and one is not, we need to have the one with the mw
2124 first. */
2125 if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG) ==
2126 DF_REF_FLAGS_IS_SET (ref2, DF_REF_MW_HARDREG))
2127 return DF_REF_FLAGS (ref1) - DF_REF_FLAGS (ref2);
2128 else if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG))
2129 return -1;
2130 else
2131 return 1;
2134 return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2137 /* Like df_ref_compare, but compare two df_ref* pointers R1 and R2. */
2139 static int
2140 df_ref_ptr_compare (const void *r1, const void *r2)
2142 return df_ref_compare (*(const df_ref *) r1, *(const df_ref *) r2);
2145 static void
2146 df_swap_refs (vec<df_ref, va_heap> *ref_vec, int i, int j)
2148 df_ref tmp = (*ref_vec)[i];
2149 (*ref_vec)[i] = (*ref_vec)[j];
2150 (*ref_vec)[j] = tmp;
2153 /* Sort and compress a set of refs. */
2155 static void
2156 df_sort_and_compress_refs (vec<df_ref, va_heap> *ref_vec)
2158 unsigned int count;
2159 unsigned int i;
2160 unsigned int dist = 0;
2162 count = ref_vec->length ();
2164 /* If there are 1 or 0 elements, there is nothing to do. */
2165 if (count < 2)
2166 return;
2167 else if (count == 2)
2169 df_ref r0 = (*ref_vec)[0];
2170 df_ref r1 = (*ref_vec)[1];
2171 if (df_ref_compare (r0, r1) > 0)
2172 df_swap_refs (ref_vec, 0, 1);
2174 else
2176 for (i = 0; i < count - 1; i++)
2178 df_ref r0 = (*ref_vec)[i];
2179 df_ref r1 = (*ref_vec)[i + 1];
2180 if (df_ref_compare (r0, r1) >= 0)
2181 break;
2183 /* If the array is already strictly ordered,
2184 which is the most common case for large COUNT case
2185 (which happens for CALL INSNs),
2186 no need to sort and filter out duplicate.
2187 Simply return the count.
2188 Make sure DF_GET_ADD_REFS adds refs in the increasing order
2189 of DF_REF_COMPARE. */
2190 if (i == count - 1)
2191 return;
2192 ref_vec->qsort (df_ref_ptr_compare);
2195 for (i=0; i<count-dist; i++)
2197 /* Find the next ref that is not equal to the current ref. */
2198 while (i + dist + 1 < count
2199 && df_ref_equal_p ((*ref_vec)[i],
2200 (*ref_vec)[i + dist + 1]))
2202 df_free_ref ((*ref_vec)[i + dist + 1]);
2203 dist++;
2205 /* Copy it down to the next position. */
2206 if (dist && i + dist + 1 < count)
2207 (*ref_vec)[i + 1] = (*ref_vec)[i + dist + 1];
2210 count -= dist;
2211 ref_vec->truncate (count);
2215 /* Return true if the contents of two df_ref's are identical.
2216 It ignores DF_REF_MARKER. */
2218 static bool
2219 df_mw_equal_p (struct df_mw_hardreg *mw1, struct df_mw_hardreg *mw2)
2221 if (!mw2)
2222 return false;
2223 return (mw1 == mw2) ||
2224 (mw1->mw_reg == mw2->mw_reg
2225 && mw1->type == mw2->type
2226 && mw1->flags == mw2->flags
2227 && mw1->start_regno == mw2->start_regno
2228 && mw1->end_regno == mw2->end_regno);
2232 /* Compare MW1 and MW2 for sorting. */
2234 static int
2235 df_mw_compare (const df_mw_hardreg *mw1, const df_mw_hardreg *mw2)
2237 if (mw1->type != mw2->type)
2238 return mw1->type - mw2->type;
2240 if (mw1->flags != mw2->flags)
2241 return mw1->flags - mw2->flags;
2243 if (mw1->start_regno != mw2->start_regno)
2244 return mw1->start_regno - mw2->start_regno;
2246 if (mw1->end_regno != mw2->end_regno)
2247 return mw1->end_regno - mw2->end_regno;
2249 if (mw1->mw_reg != mw2->mw_reg)
2250 return mw1->mw_order - mw2->mw_order;
2252 return 0;
2255 /* Like df_mw_compare, but compare two df_mw_hardreg** pointers R1 and R2. */
2257 static int
2258 df_mw_ptr_compare (const void *m1, const void *m2)
2260 return df_mw_compare (*(const df_mw_hardreg *const *) m1,
2261 *(const df_mw_hardreg *const *) m2);
2264 /* Sort and compress a set of refs. */
2266 static void
2267 df_sort_and_compress_mws (vec<df_mw_hardreg_ptr, va_heap> *mw_vec)
2269 unsigned int count;
2270 struct df_scan_problem_data *problem_data
2271 = (struct df_scan_problem_data *) df_scan->problem_data;
2272 unsigned int i;
2273 unsigned int dist = 0;
2275 count = mw_vec->length ();
2276 if (count < 2)
2277 return;
2278 else if (count == 2)
2280 struct df_mw_hardreg *m0 = (*mw_vec)[0];
2281 struct df_mw_hardreg *m1 = (*mw_vec)[1];
2282 if (df_mw_compare (m0, m1) > 0)
2284 struct df_mw_hardreg *tmp = (*mw_vec)[0];
2285 (*mw_vec)[0] = (*mw_vec)[1];
2286 (*mw_vec)[1] = tmp;
2289 else
2290 mw_vec->qsort (df_mw_ptr_compare);
2292 for (i=0; i<count-dist; i++)
2294 /* Find the next ref that is not equal to the current ref. */
2295 while (i + dist + 1 < count
2296 && df_mw_equal_p ((*mw_vec)[i], (*mw_vec)[i + dist + 1]))
2298 problem_data->mw_reg_pool->remove ((*mw_vec)[i + dist + 1]);
2299 dist++;
2301 /* Copy it down to the next position. */
2302 if (dist && i + dist + 1 < count)
2303 (*mw_vec)[i + 1] = (*mw_vec)[i + dist + 1];
2306 count -= dist;
2307 mw_vec->truncate (count);
2311 /* Sort and remove duplicates from the COLLECTION_REC. */
2313 static void
2314 df_canonize_collection_rec (struct df_collection_rec *collection_rec)
2316 df_sort_and_compress_refs (&collection_rec->def_vec);
2317 df_sort_and_compress_refs (&collection_rec->use_vec);
2318 df_sort_and_compress_refs (&collection_rec->eq_use_vec);
2319 df_sort_and_compress_mws (&collection_rec->mw_vec);
2323 /* Add the new df_ref to appropriate reg_info/ref_info chains. */
2325 static void
2326 df_install_ref (df_ref this_ref,
2327 struct df_reg_info *reg_info,
2328 struct df_ref_info *ref_info,
2329 bool add_to_table)
2331 unsigned int regno = DF_REF_REGNO (this_ref);
2332 /* Add the ref to the reg_{def,use,eq_use} chain. */
2333 df_ref head = reg_info->reg_chain;
2335 reg_info->reg_chain = this_ref;
2336 reg_info->n_refs++;
2338 if (DF_REF_FLAGS_IS_SET (this_ref, DF_HARD_REG_LIVE))
2340 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
2341 df->hard_regs_live_count[regno]++;
2344 gcc_checking_assert (DF_REF_NEXT_REG (this_ref) == NULL
2345 && DF_REF_PREV_REG (this_ref) == NULL);
2347 DF_REF_NEXT_REG (this_ref) = head;
2349 /* We cannot actually link to the head of the chain. */
2350 DF_REF_PREV_REG (this_ref) = NULL;
2352 if (head)
2353 DF_REF_PREV_REG (head) = this_ref;
2355 if (add_to_table)
2357 gcc_assert (ref_info->ref_order != DF_REF_ORDER_NO_TABLE);
2358 df_check_and_grow_ref_info (ref_info, 1);
2359 DF_REF_ID (this_ref) = ref_info->table_size;
2360 /* Add the ref to the big array of defs. */
2361 ref_info->refs[ref_info->table_size] = this_ref;
2362 ref_info->table_size++;
2364 else
2365 DF_REF_ID (this_ref) = -1;
2367 ref_info->total_size++;
2371 /* This function takes one of the groups of refs (defs, uses or
2372 eq_uses) and installs the entire group into the insn. It also adds
2373 each of these refs into the appropriate chains. */
2375 static df_ref
2376 df_install_refs (basic_block bb,
2377 const vec<df_ref, va_heap> *old_vec,
2378 struct df_reg_info **reg_info,
2379 struct df_ref_info *ref_info,
2380 bool is_notes)
2382 unsigned int count = old_vec->length ();
2383 if (count)
2385 bool add_to_table;
2386 df_ref this_ref;
2387 unsigned int ix;
2389 switch (ref_info->ref_order)
2391 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
2392 case DF_REF_ORDER_BY_REG_WITH_NOTES:
2393 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
2394 ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
2395 add_to_table = true;
2396 break;
2397 case DF_REF_ORDER_UNORDERED:
2398 case DF_REF_ORDER_BY_REG:
2399 case DF_REF_ORDER_BY_INSN:
2400 ref_info->ref_order = DF_REF_ORDER_UNORDERED;
2401 add_to_table = !is_notes;
2402 break;
2403 default:
2404 add_to_table = false;
2405 break;
2408 /* Do not add if ref is not in the right blocks. */
2409 if (add_to_table && df->analyze_subset)
2410 add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
2412 FOR_EACH_VEC_ELT (*old_vec, ix, this_ref)
2414 DF_REF_NEXT_LOC (this_ref) = (ix + 1 < old_vec->length ()
2415 ? (*old_vec)[ix + 1]
2416 : NULL);
2417 df_install_ref (this_ref, reg_info[DF_REF_REGNO (this_ref)],
2418 ref_info, add_to_table);
2420 return (*old_vec)[0];
2422 else
2423 return 0;
2427 /* This function takes the mws installs the entire group into the
2428 insn. */
2430 static struct df_mw_hardreg *
2431 df_install_mws (const vec<df_mw_hardreg_ptr, va_heap> *old_vec)
2433 unsigned int count = old_vec->length ();
2434 if (count)
2436 for (unsigned int i = 0; i < count - 1; i++)
2437 DF_MWS_NEXT ((*old_vec)[i]) = (*old_vec)[i + 1];
2438 DF_MWS_NEXT ((*old_vec)[count - 1]) = 0;
2439 return (*old_vec)[0];
2441 else
2442 return 0;
2446 /* Add a chain of df_refs to appropriate ref chain/reg_info/ref_info
2447 chains and update other necessary information. */
2449 static void
2450 df_refs_add_to_chains (struct df_collection_rec *collection_rec,
2451 basic_block bb, rtx_insn *insn, unsigned int flags)
2453 if (insn)
2455 struct df_insn_info *insn_rec = DF_INSN_INFO_GET (insn);
2456 /* If there is a vector in the collection rec, add it to the
2457 insn. A null rec is a signal that the caller will handle the
2458 chain specially. */
2459 if (flags & copy_defs)
2461 gcc_checking_assert (!insn_rec->defs);
2462 insn_rec->defs
2463 = df_install_refs (bb, &collection_rec->def_vec,
2464 df->def_regs,
2465 &df->def_info, false);
2467 if (flags & copy_uses)
2469 gcc_checking_assert (!insn_rec->uses);
2470 insn_rec->uses
2471 = df_install_refs (bb, &collection_rec->use_vec,
2472 df->use_regs,
2473 &df->use_info, false);
2475 if (flags & copy_eq_uses)
2477 gcc_checking_assert (!insn_rec->eq_uses);
2478 insn_rec->eq_uses
2479 = df_install_refs (bb, &collection_rec->eq_use_vec,
2480 df->eq_use_regs,
2481 &df->use_info, true);
2483 if (flags & copy_mw)
2485 gcc_checking_assert (!insn_rec->mw_hardregs);
2486 insn_rec->mw_hardregs
2487 = df_install_mws (&collection_rec->mw_vec);
2490 else
2492 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
2494 gcc_checking_assert (!bb_info->artificial_defs);
2495 bb_info->artificial_defs
2496 = df_install_refs (bb, &collection_rec->def_vec,
2497 df->def_regs,
2498 &df->def_info, false);
2499 gcc_checking_assert (!bb_info->artificial_uses);
2500 bb_info->artificial_uses
2501 = df_install_refs (bb, &collection_rec->use_vec,
2502 df->use_regs,
2503 &df->use_info, false);
2508 /* Allocate a ref and initialize its fields. */
2510 static df_ref
2511 df_ref_create_structure (enum df_ref_class cl,
2512 struct df_collection_rec *collection_rec,
2513 rtx reg, rtx *loc,
2514 basic_block bb, struct df_insn_info *info,
2515 enum df_ref_type ref_type,
2516 int ref_flags)
2518 df_ref this_ref = NULL;
2519 int regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2520 struct df_scan_problem_data *problem_data
2521 = (struct df_scan_problem_data *) df_scan->problem_data;
2523 switch (cl)
2525 case DF_REF_BASE:
2526 this_ref = (df_ref) (problem_data->ref_base_pool->allocate ());
2527 gcc_checking_assert (loc == NULL);
2528 break;
2530 case DF_REF_ARTIFICIAL:
2531 this_ref = (df_ref) (problem_data->ref_artificial_pool->allocate ());
2532 this_ref->artificial_ref.bb = bb;
2533 gcc_checking_assert (loc == NULL);
2534 break;
2536 case DF_REF_REGULAR:
2537 this_ref = (df_ref) (problem_data->ref_regular_pool->allocate ());
2538 this_ref->regular_ref.loc = loc;
2539 gcc_checking_assert (loc);
2540 break;
2543 DF_REF_CLASS (this_ref) = cl;
2544 DF_REF_ID (this_ref) = -1;
2545 DF_REF_REG (this_ref) = reg;
2546 DF_REF_REGNO (this_ref) = regno;
2547 DF_REF_TYPE (this_ref) = ref_type;
2548 DF_REF_INSN_INFO (this_ref) = info;
2549 DF_REF_CHAIN (this_ref) = NULL;
2550 DF_REF_FLAGS (this_ref) = ref_flags;
2551 DF_REF_NEXT_REG (this_ref) = NULL;
2552 DF_REF_PREV_REG (this_ref) = NULL;
2553 DF_REF_ORDER (this_ref) = df->ref_order++;
2555 /* We need to clear this bit because fwprop, and in the future
2556 possibly other optimizations sometimes create new refs using ond
2557 refs as the model. */
2558 DF_REF_FLAGS_CLEAR (this_ref, DF_HARD_REG_LIVE);
2560 /* See if this ref needs to have DF_HARD_REG_LIVE bit set. */
2561 if (regno < FIRST_PSEUDO_REGISTER
2562 && !DF_REF_IS_ARTIFICIAL (this_ref)
2563 && !DEBUG_INSN_P (DF_REF_INSN (this_ref)))
2565 if (DF_REF_REG_DEF_P (this_ref))
2567 if (!DF_REF_FLAGS_IS_SET (this_ref, DF_REF_MAY_CLOBBER))
2568 DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2570 else if (!(TEST_HARD_REG_BIT (elim_reg_set, regno)
2571 && (regno == FRAME_POINTER_REGNUM
2572 || regno == ARG_POINTER_REGNUM)))
2573 DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2576 if (collection_rec)
2578 if (DF_REF_REG_DEF_P (this_ref))
2579 collection_rec->def_vec.safe_push (this_ref);
2580 else if (DF_REF_FLAGS (this_ref) & DF_REF_IN_NOTE)
2581 collection_rec->eq_use_vec.safe_push (this_ref);
2582 else
2583 collection_rec->use_vec.safe_push (this_ref);
2585 else
2586 df_install_ref_incremental (this_ref);
2588 return this_ref;
2592 /* Create new references of type DF_REF_TYPE for each part of register REG
2593 at address LOC within INSN of BB. */
2596 static void
2597 df_ref_record (enum df_ref_class cl,
2598 struct df_collection_rec *collection_rec,
2599 rtx reg, rtx *loc,
2600 basic_block bb, struct df_insn_info *insn_info,
2601 enum df_ref_type ref_type,
2602 int ref_flags)
2604 unsigned int regno;
2606 gcc_checking_assert (REG_P (reg) || GET_CODE (reg) == SUBREG);
2608 regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2609 if (regno < FIRST_PSEUDO_REGISTER)
2611 struct df_mw_hardreg *hardreg = NULL;
2612 struct df_scan_problem_data *problem_data
2613 = (struct df_scan_problem_data *) df_scan->problem_data;
2614 unsigned int i;
2615 unsigned int endregno;
2616 df_ref ref;
2618 if (GET_CODE (reg) == SUBREG)
2620 regno += subreg_regno_offset (regno, GET_MODE (SUBREG_REG (reg)),
2621 SUBREG_BYTE (reg), GET_MODE (reg));
2622 endregno = regno + subreg_nregs (reg);
2624 else
2625 endregno = END_REGNO (reg);
2627 /* If this is a multiword hardreg, we create some extra
2628 datastructures that will enable us to easily build REG_DEAD
2629 and REG_UNUSED notes. */
2630 if (collection_rec
2631 && (endregno != regno + 1) && insn_info)
2633 /* Sets to a subreg of a multiword register are partial.
2634 Sets to a non-subreg of a multiword register are not. */
2635 if (GET_CODE (reg) == SUBREG)
2636 ref_flags |= DF_REF_PARTIAL;
2637 ref_flags |= DF_REF_MW_HARDREG;
2639 hardreg = problem_data->mw_reg_pool->allocate ();
2640 hardreg->type = ref_type;
2641 hardreg->flags = ref_flags;
2642 hardreg->mw_reg = reg;
2643 hardreg->start_regno = regno;
2644 hardreg->end_regno = endregno - 1;
2645 hardreg->mw_order = df->ref_order++;
2646 collection_rec->mw_vec.safe_push (hardreg);
2649 for (i = regno; i < endregno; i++)
2651 ref = df_ref_create_structure (cl, collection_rec, regno_reg_rtx[i], loc,
2652 bb, insn_info, ref_type, ref_flags);
2654 gcc_assert (ORIGINAL_REGNO (DF_REF_REG (ref)) == i);
2657 else
2659 df_ref_create_structure (cl, collection_rec, reg, loc, bb, insn_info,
2660 ref_type, ref_flags);
2665 /* A set to a non-paradoxical SUBREG for which the number of word_mode units
2666 covered by the outer mode is smaller than that covered by the inner mode,
2667 is a read-modify-write operation.
2668 This function returns true iff the SUBREG X is such a SUBREG. */
2670 bool
2671 df_read_modify_subreg_p (rtx x)
2673 unsigned int isize, osize;
2674 if (GET_CODE (x) != SUBREG)
2675 return false;
2676 isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
2677 osize = GET_MODE_SIZE (GET_MODE (x));
2678 return isize > osize
2679 && isize > REGMODE_NATURAL_SIZE (GET_MODE (SUBREG_REG (x)));
2683 /* Process all the registers defined in the rtx pointed by LOC.
2684 Autoincrement/decrement definitions will be picked up by df_uses_record.
2685 Any change here has to be matched in df_find_hard_reg_defs_1. */
2687 static void
2688 df_def_record_1 (struct df_collection_rec *collection_rec,
2689 rtx *loc, basic_block bb, struct df_insn_info *insn_info,
2690 int flags)
2692 rtx dst = *loc;
2694 /* It is legal to have a set destination be a parallel. */
2695 if (GET_CODE (dst) == PARALLEL)
2697 int i;
2698 for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
2700 rtx temp = XVECEXP (dst, 0, i);
2701 gcc_assert (GET_CODE (temp) == EXPR_LIST);
2702 df_def_record_1 (collection_rec, &XEXP (temp, 0),
2703 bb, insn_info, flags);
2705 return;
2708 if (GET_CODE (dst) == STRICT_LOW_PART)
2710 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL | DF_REF_STRICT_LOW_PART;
2712 loc = &XEXP (dst, 0);
2713 dst = *loc;
2716 if (GET_CODE (dst) == ZERO_EXTRACT)
2718 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL | DF_REF_ZERO_EXTRACT;
2720 loc = &XEXP (dst, 0);
2721 dst = *loc;
2724 /* At this point if we do not have a reg or a subreg, just return. */
2725 if (REG_P (dst))
2727 df_ref_record (DF_REF_REGULAR, collection_rec,
2728 dst, loc, bb, insn_info, DF_REF_REG_DEF, flags);
2730 /* We want to keep sp alive everywhere - by making all
2731 writes to sp also use of sp. */
2732 if (REGNO (dst) == STACK_POINTER_REGNUM)
2733 df_ref_record (DF_REF_BASE, collection_rec,
2734 dst, NULL, bb, insn_info, DF_REF_REG_USE, flags);
2736 else if (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst)))
2738 if (df_read_modify_subreg_p (dst))
2739 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL;
2741 flags |= DF_REF_SUBREG;
2743 df_ref_record (DF_REF_REGULAR, collection_rec,
2744 dst, loc, bb, insn_info, DF_REF_REG_DEF, flags);
2749 /* Process all the registers defined in the pattern rtx, X. Any change
2750 here has to be matched in df_find_hard_reg_defs. */
2752 static void
2753 df_defs_record (struct df_collection_rec *collection_rec,
2754 rtx x, basic_block bb, struct df_insn_info *insn_info,
2755 int flags)
2757 RTX_CODE code = GET_CODE (x);
2758 int i;
2760 switch (code)
2762 case SET:
2763 df_def_record_1 (collection_rec, &SET_DEST (x), bb, insn_info, flags);
2764 break;
2766 case CLOBBER:
2767 flags |= DF_REF_MUST_CLOBBER;
2768 df_def_record_1 (collection_rec, &XEXP (x, 0), bb, insn_info, flags);
2769 break;
2771 case COND_EXEC:
2772 df_defs_record (collection_rec, COND_EXEC_CODE (x),
2773 bb, insn_info, DF_REF_CONDITIONAL);
2774 break;
2776 case PARALLEL:
2777 for (i = 0; i < XVECLEN (x, 0); i++)
2778 df_defs_record (collection_rec, XVECEXP (x, 0, i),
2779 bb, insn_info, flags);
2780 break;
2781 default:
2782 /* No DEFs to record in other cases */
2783 break;
2787 /* Set bits in *DEFS for hard registers found in the rtx DST, which is the
2788 destination of a set or clobber. This has to match the logic in
2789 df_defs_record_1. */
2791 static void
2792 df_find_hard_reg_defs_1 (rtx dst, HARD_REG_SET *defs)
2794 /* It is legal to have a set destination be a parallel. */
2795 if (GET_CODE (dst) == PARALLEL)
2797 int i;
2798 for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
2800 rtx temp = XVECEXP (dst, 0, i);
2801 gcc_assert (GET_CODE (temp) == EXPR_LIST);
2802 df_find_hard_reg_defs_1 (XEXP (temp, 0), defs);
2804 return;
2807 if (GET_CODE (dst) == STRICT_LOW_PART)
2808 dst = XEXP (dst, 0);
2810 if (GET_CODE (dst) == ZERO_EXTRACT)
2811 dst = XEXP (dst, 0);
2813 /* At this point if we do not have a reg or a subreg, just return. */
2814 if (REG_P (dst) && HARD_REGISTER_P (dst))
2815 SET_HARD_REG_BIT (*defs, REGNO (dst));
2816 else if (GET_CODE (dst) == SUBREG
2817 && REG_P (SUBREG_REG (dst)) && HARD_REGISTER_P (dst))
2818 SET_HARD_REG_BIT (*defs, REGNO (SUBREG_REG (dst)));
2821 /* Set bits in *DEFS for hard registers defined in the pattern X. This
2822 has to match the logic in df_defs_record. */
2824 static void
2825 df_find_hard_reg_defs (rtx x, HARD_REG_SET *defs)
2827 RTX_CODE code = GET_CODE (x);
2828 int i;
2830 switch (code)
2832 case SET:
2833 df_find_hard_reg_defs_1 (SET_DEST (x), defs);
2834 break;
2836 case CLOBBER:
2837 df_find_hard_reg_defs_1 (XEXP (x, 0), defs);
2838 break;
2840 case COND_EXEC:
2841 df_find_hard_reg_defs (COND_EXEC_CODE (x), defs);
2842 break;
2844 case PARALLEL:
2845 for (i = 0; i < XVECLEN (x, 0); i++)
2846 df_find_hard_reg_defs (XVECEXP (x, 0, i), defs);
2847 break;
2848 default:
2849 /* No DEFs to record in other cases */
2850 break;
2855 /* Process all the registers used in the rtx at address LOC. */
2857 static void
2858 df_uses_record (struct df_collection_rec *collection_rec,
2859 rtx *loc, enum df_ref_type ref_type,
2860 basic_block bb, struct df_insn_info *insn_info,
2861 int flags)
2863 RTX_CODE code;
2864 rtx x;
2866 retry:
2867 x = *loc;
2868 if (!x)
2869 return;
2870 code = GET_CODE (x);
2871 switch (code)
2873 case LABEL_REF:
2874 case SYMBOL_REF:
2875 case CONST:
2876 CASE_CONST_ANY:
2877 case PC:
2878 case CC0:
2879 case ADDR_VEC:
2880 case ADDR_DIFF_VEC:
2881 return;
2883 case CLOBBER:
2884 /* If we are clobbering a MEM, mark any registers inside the address
2885 as being used. */
2886 if (MEM_P (XEXP (x, 0)))
2887 df_uses_record (collection_rec,
2888 &XEXP (XEXP (x, 0), 0),
2889 DF_REF_REG_MEM_STORE,
2890 bb, insn_info,
2891 flags);
2893 /* If we're clobbering a REG then we have a def so ignore. */
2894 return;
2896 case MEM:
2897 df_uses_record (collection_rec,
2898 &XEXP (x, 0), DF_REF_REG_MEM_LOAD,
2899 bb, insn_info, flags & DF_REF_IN_NOTE);
2900 return;
2902 case SUBREG:
2903 /* While we're here, optimize this case. */
2904 flags |= DF_REF_PARTIAL;
2905 /* In case the SUBREG is not of a REG, do not optimize. */
2906 if (!REG_P (SUBREG_REG (x)))
2908 loc = &SUBREG_REG (x);
2909 df_uses_record (collection_rec, loc, ref_type, bb, insn_info, flags);
2910 return;
2912 /* ... Fall through ... */
2914 case REG:
2915 df_ref_record (DF_REF_REGULAR, collection_rec,
2916 x, loc, bb, insn_info,
2917 ref_type, flags);
2918 return;
2920 case SIGN_EXTRACT:
2921 case ZERO_EXTRACT:
2923 df_uses_record (collection_rec,
2924 &XEXP (x, 1), ref_type, bb, insn_info, flags);
2925 df_uses_record (collection_rec,
2926 &XEXP (x, 2), ref_type, bb, insn_info, flags);
2928 /* If the parameters to the zero or sign extract are
2929 constants, strip them off and recurse, otherwise there is
2930 no information that we can gain from this operation. */
2931 if (code == ZERO_EXTRACT)
2932 flags |= DF_REF_ZERO_EXTRACT;
2933 else
2934 flags |= DF_REF_SIGN_EXTRACT;
2936 df_uses_record (collection_rec,
2937 &XEXP (x, 0), ref_type, bb, insn_info, flags);
2938 return;
2940 break;
2942 case SET:
2944 rtx dst = SET_DEST (x);
2945 gcc_assert (!(flags & DF_REF_IN_NOTE));
2946 df_uses_record (collection_rec,
2947 &SET_SRC (x), DF_REF_REG_USE, bb, insn_info, flags);
2949 switch (GET_CODE (dst))
2951 case SUBREG:
2952 if (df_read_modify_subreg_p (dst))
2954 df_uses_record (collection_rec, &SUBREG_REG (dst),
2955 DF_REF_REG_USE, bb, insn_info,
2956 flags | DF_REF_READ_WRITE | DF_REF_SUBREG);
2957 break;
2959 /* Fall through. */
2960 case REG:
2961 case PARALLEL:
2962 case SCRATCH:
2963 case PC:
2964 case CC0:
2965 break;
2966 case MEM:
2967 df_uses_record (collection_rec, &XEXP (dst, 0),
2968 DF_REF_REG_MEM_STORE, bb, insn_info, flags);
2969 break;
2970 case STRICT_LOW_PART:
2972 rtx *temp = &XEXP (dst, 0);
2973 /* A strict_low_part uses the whole REG and not just the
2974 SUBREG. */
2975 dst = XEXP (dst, 0);
2976 df_uses_record (collection_rec,
2977 (GET_CODE (dst) == SUBREG) ? &SUBREG_REG (dst) : temp,
2978 DF_REF_REG_USE, bb, insn_info,
2979 DF_REF_READ_WRITE | DF_REF_STRICT_LOW_PART);
2981 break;
2982 case ZERO_EXTRACT:
2984 df_uses_record (collection_rec, &XEXP (dst, 1),
2985 DF_REF_REG_USE, bb, insn_info, flags);
2986 df_uses_record (collection_rec, &XEXP (dst, 2),
2987 DF_REF_REG_USE, bb, insn_info, flags);
2988 if (GET_CODE (XEXP (dst,0)) == MEM)
2989 df_uses_record (collection_rec, &XEXP (dst, 0),
2990 DF_REF_REG_USE, bb, insn_info,
2991 flags);
2992 else
2993 df_uses_record (collection_rec, &XEXP (dst, 0),
2994 DF_REF_REG_USE, bb, insn_info,
2995 DF_REF_READ_WRITE | DF_REF_ZERO_EXTRACT);
2997 break;
2999 default:
3000 gcc_unreachable ();
3002 return;
3005 case RETURN:
3006 case SIMPLE_RETURN:
3007 break;
3009 case ASM_OPERANDS:
3010 case UNSPEC_VOLATILE:
3011 case TRAP_IF:
3012 case ASM_INPUT:
3014 /* Traditional and volatile asm instructions must be
3015 considered to use and clobber all hard registers, all
3016 pseudo-registers and all of memory. So must TRAP_IF and
3017 UNSPEC_VOLATILE operations.
3019 Consider for instance a volatile asm that changes the fpu
3020 rounding mode. An insn should not be moved across this
3021 even if it only uses pseudo-regs because it might give an
3022 incorrectly rounded result.
3024 However, flow.c's liveness computation did *not* do this,
3025 giving the reasoning as " ?!? Unfortunately, marking all
3026 hard registers as live causes massive problems for the
3027 register allocator and marking all pseudos as live creates
3028 mountains of uninitialized variable warnings."
3030 In order to maintain the status quo with regard to liveness
3031 and uses, we do what flow.c did and just mark any regs we
3032 can find in ASM_OPERANDS as used. In global asm insns are
3033 scanned and regs_asm_clobbered is filled out.
3035 For all ASM_OPERANDS, we must traverse the vector of input
3036 operands. We can not just fall through here since then we
3037 would be confused by the ASM_INPUT rtx inside ASM_OPERANDS,
3038 which do not indicate traditional asms unlike their normal
3039 usage. */
3040 if (code == ASM_OPERANDS)
3042 int j;
3044 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
3045 df_uses_record (collection_rec, &ASM_OPERANDS_INPUT (x, j),
3046 DF_REF_REG_USE, bb, insn_info, flags);
3047 return;
3049 break;
3052 case VAR_LOCATION:
3053 df_uses_record (collection_rec,
3054 &PAT_VAR_LOCATION_LOC (x),
3055 DF_REF_REG_USE, bb, insn_info, flags);
3056 return;
3058 case PRE_DEC:
3059 case POST_DEC:
3060 case PRE_INC:
3061 case POST_INC:
3062 case PRE_MODIFY:
3063 case POST_MODIFY:
3064 gcc_assert (!DEBUG_INSN_P (insn_info->insn));
3065 /* Catch the def of the register being modified. */
3066 df_ref_record (DF_REF_REGULAR, collection_rec, XEXP (x, 0), &XEXP (x, 0),
3067 bb, insn_info,
3068 DF_REF_REG_DEF,
3069 flags | DF_REF_READ_WRITE | DF_REF_PRE_POST_MODIFY);
3071 /* ... Fall through to handle uses ... */
3073 default:
3074 break;
3077 /* Recursively scan the operands of this expression. */
3079 const char *fmt = GET_RTX_FORMAT (code);
3080 int i;
3082 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3084 if (fmt[i] == 'e')
3086 /* Tail recursive case: save a function call level. */
3087 if (i == 0)
3089 loc = &XEXP (x, 0);
3090 goto retry;
3092 df_uses_record (collection_rec, &XEXP (x, i), ref_type,
3093 bb, insn_info, flags);
3095 else if (fmt[i] == 'E')
3097 int j;
3098 for (j = 0; j < XVECLEN (x, i); j++)
3099 df_uses_record (collection_rec,
3100 &XVECEXP (x, i, j), ref_type,
3101 bb, insn_info, flags);
3106 return;
3110 /* For all DF_REF_CONDITIONAL defs, add a corresponding uses. */
3112 static void
3113 df_get_conditional_uses (struct df_collection_rec *collection_rec)
3115 unsigned int ix;
3116 df_ref ref;
3118 FOR_EACH_VEC_ELT (collection_rec->def_vec, ix, ref)
3120 if (DF_REF_FLAGS_IS_SET (ref, DF_REF_CONDITIONAL))
3122 df_ref use;
3124 use = df_ref_create_structure (DF_REF_CLASS (ref), collection_rec, DF_REF_REG (ref),
3125 DF_REF_LOC (ref), DF_REF_BB (ref),
3126 DF_REF_INSN_INFO (ref), DF_REF_REG_USE,
3127 DF_REF_FLAGS (ref) & ~DF_REF_CONDITIONAL);
3128 DF_REF_REGNO (use) = DF_REF_REGNO (ref);
3134 /* Get call's extra defs and uses (track caller-saved registers). */
3136 static void
3137 df_get_call_refs (struct df_collection_rec *collection_rec,
3138 basic_block bb,
3139 struct df_insn_info *insn_info,
3140 int flags)
3142 rtx note;
3143 bool is_sibling_call;
3144 unsigned int i;
3145 HARD_REG_SET defs_generated;
3146 HARD_REG_SET fn_reg_set_usage;
3148 CLEAR_HARD_REG_SET (defs_generated);
3149 df_find_hard_reg_defs (PATTERN (insn_info->insn), &defs_generated);
3150 is_sibling_call = SIBLING_CALL_P (insn_info->insn);
3151 get_call_reg_set_usage (insn_info->insn, &fn_reg_set_usage,
3152 regs_invalidated_by_call);
3154 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3156 if (i == STACK_POINTER_REGNUM)
3157 /* The stack ptr is used (honorarily) by a CALL insn. */
3158 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3159 NULL, bb, insn_info, DF_REF_REG_USE,
3160 DF_REF_CALL_STACK_USAGE | flags);
3161 else if (global_regs[i])
3163 /* Calls to const functions cannot access any global registers and
3164 calls to pure functions cannot set them. All other calls may
3165 reference any of the global registers, so they are recorded as
3166 used. */
3167 if (!RTL_CONST_CALL_P (insn_info->insn))
3169 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3170 NULL, bb, insn_info, DF_REF_REG_USE, flags);
3171 if (!RTL_PURE_CALL_P (insn_info->insn))
3172 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3173 NULL, bb, insn_info, DF_REF_REG_DEF, flags);
3176 else if (TEST_HARD_REG_BIT (fn_reg_set_usage, i)
3177 /* no clobbers for regs that are the result of the call */
3178 && !TEST_HARD_REG_BIT (defs_generated, i)
3179 && (!is_sibling_call
3180 || !bitmap_bit_p (df->exit_block_uses, i)
3181 || refers_to_regno_p (i, crtl->return_rtx)))
3182 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3183 NULL, bb, insn_info, DF_REF_REG_DEF,
3184 DF_REF_MAY_CLOBBER | flags);
3187 /* Record the registers used to pass arguments, and explicitly
3188 noted as clobbered. */
3189 for (note = CALL_INSN_FUNCTION_USAGE (insn_info->insn); note;
3190 note = XEXP (note, 1))
3192 if (GET_CODE (XEXP (note, 0)) == USE)
3193 df_uses_record (collection_rec, &XEXP (XEXP (note, 0), 0),
3194 DF_REF_REG_USE, bb, insn_info, flags);
3195 else if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3197 if (REG_P (XEXP (XEXP (note, 0), 0)))
3199 unsigned int regno = REGNO (XEXP (XEXP (note, 0), 0));
3200 if (!TEST_HARD_REG_BIT (defs_generated, regno))
3201 df_defs_record (collection_rec, XEXP (note, 0), bb,
3202 insn_info, flags);
3204 else
3205 df_uses_record (collection_rec, &XEXP (note, 0),
3206 DF_REF_REG_USE, bb, insn_info, flags);
3210 return;
3213 /* Collect all refs in the INSN. This function is free of any
3214 side-effect - it will create and return a lists of df_ref's in the
3215 COLLECTION_REC without putting those refs into existing ref chains
3216 and reg chains. */
3218 static void
3219 df_insn_refs_collect (struct df_collection_rec *collection_rec,
3220 basic_block bb, struct df_insn_info *insn_info)
3222 rtx note;
3223 bool is_cond_exec = (GET_CODE (PATTERN (insn_info->insn)) == COND_EXEC);
3225 /* Clear out the collection record. */
3226 collection_rec->def_vec.truncate (0);
3227 collection_rec->use_vec.truncate (0);
3228 collection_rec->eq_use_vec.truncate (0);
3229 collection_rec->mw_vec.truncate (0);
3231 /* Process REG_EQUIV/REG_EQUAL notes. */
3232 for (note = REG_NOTES (insn_info->insn); note;
3233 note = XEXP (note, 1))
3235 switch (REG_NOTE_KIND (note))
3237 case REG_EQUIV:
3238 case REG_EQUAL:
3239 df_uses_record (collection_rec,
3240 &XEXP (note, 0), DF_REF_REG_USE,
3241 bb, insn_info, DF_REF_IN_NOTE);
3242 break;
3243 case REG_NON_LOCAL_GOTO:
3244 /* The frame ptr is used by a non-local goto. */
3245 df_ref_record (DF_REF_BASE, collection_rec,
3246 regno_reg_rtx[FRAME_POINTER_REGNUM],
3247 NULL, bb, insn_info,
3248 DF_REF_REG_USE, 0);
3249 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER)
3250 df_ref_record (DF_REF_BASE, collection_rec,
3251 regno_reg_rtx[HARD_FRAME_POINTER_REGNUM],
3252 NULL, bb, insn_info,
3253 DF_REF_REG_USE, 0);
3254 break;
3255 default:
3256 break;
3260 /* For CALL_INSNs, first record DF_REF_BASE register defs, as well as
3261 uses from CALL_INSN_FUNCTION_USAGE. */
3262 if (CALL_P (insn_info->insn))
3263 df_get_call_refs (collection_rec, bb, insn_info,
3264 (is_cond_exec) ? DF_REF_CONDITIONAL : 0);
3266 /* Record other defs. These should be mostly for DF_REF_REGULAR, so
3267 that a qsort on the defs is unnecessary in most cases. */
3268 df_defs_record (collection_rec,
3269 PATTERN (insn_info->insn), bb, insn_info, 0);
3271 /* Record the register uses. */
3272 df_uses_record (collection_rec,
3273 &PATTERN (insn_info->insn), DF_REF_REG_USE, bb, insn_info, 0);
3275 /* DF_REF_CONDITIONAL needs corresponding USES. */
3276 if (is_cond_exec)
3277 df_get_conditional_uses (collection_rec);
3279 df_canonize_collection_rec (collection_rec);
3282 /* Recompute the luids for the insns in BB. */
3284 void
3285 df_recompute_luids (basic_block bb)
3287 rtx_insn *insn;
3288 int luid = 0;
3290 df_grow_insn_info ();
3292 /* Scan the block an insn at a time from beginning to end. */
3293 FOR_BB_INSNS (bb, insn)
3295 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
3296 /* Inserting labels does not always trigger the incremental
3297 rescanning. */
3298 if (!insn_info)
3300 gcc_assert (!INSN_P (insn));
3301 insn_info = df_insn_create_insn_record (insn);
3304 DF_INSN_INFO_LUID (insn_info) = luid;
3305 if (INSN_P (insn))
3306 luid++;
3311 /* Collect all artificial refs at the block level for BB and add them
3312 to COLLECTION_REC. */
3314 static void
3315 df_bb_refs_collect (struct df_collection_rec *collection_rec, basic_block bb)
3317 collection_rec->def_vec.truncate (0);
3318 collection_rec->use_vec.truncate (0);
3319 collection_rec->eq_use_vec.truncate (0);
3320 collection_rec->mw_vec.truncate (0);
3322 if (bb->index == ENTRY_BLOCK)
3324 df_entry_block_defs_collect (collection_rec, df->entry_block_defs);
3325 return;
3327 else if (bb->index == EXIT_BLOCK)
3329 df_exit_block_uses_collect (collection_rec, df->exit_block_uses);
3330 return;
3333 if (bb_has_eh_pred (bb))
3335 unsigned int i;
3336 /* Mark the registers that will contain data for the handler. */
3337 for (i = 0; ; ++i)
3339 unsigned regno = EH_RETURN_DATA_REGNO (i);
3340 if (regno == INVALID_REGNUM)
3341 break;
3342 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[regno], NULL,
3343 bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP);
3347 /* Add the hard_frame_pointer if this block is the target of a
3348 non-local goto. */
3349 if (bb->flags & BB_NON_LOCAL_GOTO_TARGET)
3350 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, hard_frame_pointer_rtx, NULL,
3351 bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP);
3353 /* Add the artificial uses. */
3354 if (bb->index >= NUM_FIXED_BLOCKS)
3356 bitmap_iterator bi;
3357 unsigned int regno;
3358 bitmap au = bb_has_eh_pred (bb)
3359 ? &df->eh_block_artificial_uses
3360 : &df->regular_block_artificial_uses;
3362 EXECUTE_IF_SET_IN_BITMAP (au, 0, regno, bi)
3364 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[regno], NULL,
3365 bb, NULL, DF_REF_REG_USE, 0);
3369 df_canonize_collection_rec (collection_rec);
3373 /* Record all the refs within the basic block BB_INDEX and scan the instructions if SCAN_INSNS. */
3375 void
3376 df_bb_refs_record (int bb_index, bool scan_insns)
3378 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
3379 rtx_insn *insn;
3380 int luid = 0;
3382 if (!df)
3383 return;
3385 df_collection_rec collection_rec;
3386 df_grow_bb_info (df_scan);
3387 if (scan_insns)
3388 /* Scan the block an insn at a time from beginning to end. */
3389 FOR_BB_INSNS (bb, insn)
3391 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
3392 gcc_assert (!insn_info);
3394 insn_info = df_insn_create_insn_record (insn);
3395 if (INSN_P (insn))
3397 /* Record refs within INSN. */
3398 DF_INSN_INFO_LUID (insn_info) = luid++;
3399 df_insn_refs_collect (&collection_rec, bb, DF_INSN_INFO_GET (insn));
3400 df_refs_add_to_chains (&collection_rec, bb, insn, copy_all);
3402 DF_INSN_INFO_LUID (insn_info) = luid;
3405 /* Other block level artificial refs */
3406 df_bb_refs_collect (&collection_rec, bb);
3407 df_refs_add_to_chains (&collection_rec, bb, NULL, copy_all);
3409 /* Now that the block has been processed, set the block as dirty so
3410 LR and LIVE will get it processed. */
3411 df_set_bb_dirty (bb);
3415 /* Get the artificial use set for a regular (i.e. non-exit/non-entry)
3416 block. */
3418 static void
3419 df_get_regular_block_artificial_uses (bitmap regular_block_artificial_uses)
3421 #ifdef EH_USES
3422 unsigned int i;
3423 #endif
3425 bitmap_clear (regular_block_artificial_uses);
3427 if (reload_completed)
3429 if (frame_pointer_needed)
3430 bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3432 else
3433 /* Before reload, there are a few registers that must be forced
3434 live everywhere -- which might not already be the case for
3435 blocks within infinite loops. */
3437 unsigned int picreg = PIC_OFFSET_TABLE_REGNUM;
3439 /* Any reference to any pseudo before reload is a potential
3440 reference of the frame pointer. */
3441 bitmap_set_bit (regular_block_artificial_uses, FRAME_POINTER_REGNUM);
3443 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER)
3444 bitmap_set_bit (regular_block_artificial_uses,
3445 HARD_FRAME_POINTER_REGNUM);
3447 /* Pseudos with argument area equivalences may require
3448 reloading via the argument pointer. */
3449 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3450 && fixed_regs[ARG_POINTER_REGNUM])
3451 bitmap_set_bit (regular_block_artificial_uses, ARG_POINTER_REGNUM);
3453 /* Any constant, or pseudo with constant equivalences, may
3454 require reloading from memory using the pic register. */
3455 if (picreg != INVALID_REGNUM
3456 && fixed_regs[picreg])
3457 bitmap_set_bit (regular_block_artificial_uses, picreg);
3459 /* The all-important stack pointer must always be live. */
3460 bitmap_set_bit (regular_block_artificial_uses, STACK_POINTER_REGNUM);
3462 #ifdef EH_USES
3463 /* EH_USES registers are used:
3464 1) at all insns that might throw (calls or with -fnon-call-exceptions
3465 trapping insns)
3466 2) in all EH edges
3467 3) to support backtraces and/or debugging, anywhere between their
3468 initialization and where they the saved registers are restored
3469 from them, including the cases where we don't reach the epilogue
3470 (noreturn call or infinite loop). */
3471 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3472 if (EH_USES (i))
3473 bitmap_set_bit (regular_block_artificial_uses, i);
3474 #endif
3478 /* Get the artificial use set for an eh block. */
3480 static void
3481 df_get_eh_block_artificial_uses (bitmap eh_block_artificial_uses)
3483 bitmap_clear (eh_block_artificial_uses);
3485 /* The following code (down through the arg_pointer setting APPEARS
3486 to be necessary because there is nothing that actually
3487 describes what the exception handling code may actually need
3488 to keep alive. */
3489 if (reload_completed)
3491 if (frame_pointer_needed)
3493 bitmap_set_bit (eh_block_artificial_uses, FRAME_POINTER_REGNUM);
3494 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER)
3495 bitmap_set_bit (eh_block_artificial_uses,
3496 HARD_FRAME_POINTER_REGNUM);
3498 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3499 && fixed_regs[ARG_POINTER_REGNUM])
3500 bitmap_set_bit (eh_block_artificial_uses, ARG_POINTER_REGNUM);
3506 /*----------------------------------------------------------------------------
3507 Specialized hard register scanning functions.
3508 ----------------------------------------------------------------------------*/
3511 /* Mark a register in SET. Hard registers in large modes get all
3512 of their component registers set as well. */
3514 static void
3515 df_mark_reg (rtx reg, void *vset)
3517 bitmap_set_range ((bitmap) vset, REGNO (reg), REG_NREGS (reg));
3521 /* Set the bit for regs that are considered being defined at the entry. */
3523 static void
3524 df_get_entry_block_def_set (bitmap entry_block_defs)
3526 rtx r;
3527 int i;
3529 bitmap_clear (entry_block_defs);
3531 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3533 if (global_regs[i])
3534 bitmap_set_bit (entry_block_defs, i);
3535 if (FUNCTION_ARG_REGNO_P (i))
3536 bitmap_set_bit (entry_block_defs, INCOMING_REGNO (i));
3539 /* The always important stack pointer. */
3540 bitmap_set_bit (entry_block_defs, STACK_POINTER_REGNUM);
3542 /* Once the prologue has been generated, all of these registers
3543 should just show up in the first regular block. */
3544 if (HAVE_prologue && epilogue_completed)
3546 /* Defs for the callee saved registers are inserted so that the
3547 pushes have some defining location. */
3548 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3549 if ((call_used_regs[i] == 0) && (df_regs_ever_live_p (i)))
3550 bitmap_set_bit (entry_block_defs, i);
3553 r = targetm.calls.struct_value_rtx (current_function_decl, true);
3554 if (r && REG_P (r))
3555 bitmap_set_bit (entry_block_defs, REGNO (r));
3557 /* If the function has an incoming STATIC_CHAIN, it has to show up
3558 in the entry def set. */
3559 r = targetm.calls.static_chain (current_function_decl, true);
3560 if (r && REG_P (r))
3561 bitmap_set_bit (entry_block_defs, REGNO (r));
3563 if ((!reload_completed) || frame_pointer_needed)
3565 /* Any reference to any pseudo before reload is a potential
3566 reference of the frame pointer. */
3567 bitmap_set_bit (entry_block_defs, FRAME_POINTER_REGNUM);
3569 /* If they are different, also mark the hard frame pointer as live. */
3570 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
3571 && !LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3572 bitmap_set_bit (entry_block_defs, HARD_FRAME_POINTER_REGNUM);
3575 /* These registers are live everywhere. */
3576 if (!reload_completed)
3578 /* Pseudos with argument area equivalences may require
3579 reloading via the argument pointer. */
3580 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3581 && fixed_regs[ARG_POINTER_REGNUM])
3582 bitmap_set_bit (entry_block_defs, ARG_POINTER_REGNUM);
3584 /* Any constant, or pseudo with constant equivalences, may
3585 require reloading from memory using the pic register. */
3586 unsigned int picreg = PIC_OFFSET_TABLE_REGNUM;
3587 if (picreg != INVALID_REGNUM
3588 && fixed_regs[picreg])
3589 bitmap_set_bit (entry_block_defs, picreg);
3592 #ifdef INCOMING_RETURN_ADDR_RTX
3593 if (REG_P (INCOMING_RETURN_ADDR_RTX))
3594 bitmap_set_bit (entry_block_defs, REGNO (INCOMING_RETURN_ADDR_RTX));
3595 #endif
3597 targetm.extra_live_on_entry (entry_block_defs);
3601 /* Return the (conservative) set of hard registers that are defined on
3602 entry to the function.
3603 It uses df->entry_block_defs to determine which register
3604 reference to include. */
3606 static void
3607 df_entry_block_defs_collect (struct df_collection_rec *collection_rec,
3608 bitmap entry_block_defs)
3610 unsigned int i;
3611 bitmap_iterator bi;
3613 EXECUTE_IF_SET_IN_BITMAP (entry_block_defs, 0, i, bi)
3615 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
3616 ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, DF_REF_REG_DEF, 0);
3619 df_canonize_collection_rec (collection_rec);
3623 /* Record the (conservative) set of hard registers that are defined on
3624 entry to the function. */
3626 static void
3627 df_record_entry_block_defs (bitmap entry_block_defs)
3629 struct df_collection_rec collection_rec;
3630 df_entry_block_defs_collect (&collection_rec, entry_block_defs);
3632 /* Process bb_refs chain */
3633 df_refs_add_to_chains (&collection_rec,
3634 BASIC_BLOCK_FOR_FN (cfun, ENTRY_BLOCK),
3635 NULL,
3636 copy_defs);
3640 /* Update the defs in the entry block. */
3642 void
3643 df_update_entry_block_defs (void)
3645 bitmap_head refs;
3646 bool changed = false;
3648 bitmap_initialize (&refs, &df_bitmap_obstack);
3649 df_get_entry_block_def_set (&refs);
3650 if (df->entry_block_defs)
3652 if (!bitmap_equal_p (df->entry_block_defs, &refs))
3654 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (ENTRY_BLOCK);
3655 df_ref_chain_delete_du_chain (bb_info->artificial_defs);
3656 df_ref_chain_delete (bb_info->artificial_defs);
3657 bb_info->artificial_defs = NULL;
3658 changed = true;
3661 else
3663 struct df_scan_problem_data *problem_data
3664 = (struct df_scan_problem_data *) df_scan->problem_data;
3665 gcc_unreachable ();
3666 df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
3667 changed = true;
3670 if (changed)
3672 df_record_entry_block_defs (&refs);
3673 bitmap_copy (df->entry_block_defs, &refs);
3674 df_set_bb_dirty (BASIC_BLOCK_FOR_FN (cfun, ENTRY_BLOCK));
3676 bitmap_clear (&refs);
3680 /* Set the bit for regs that are considered being used at the exit. */
3682 static void
3683 df_get_exit_block_use_set (bitmap exit_block_uses)
3685 unsigned int i;
3686 unsigned int picreg = PIC_OFFSET_TABLE_REGNUM;
3688 bitmap_clear (exit_block_uses);
3690 /* Stack pointer is always live at the exit. */
3691 bitmap_set_bit (exit_block_uses, STACK_POINTER_REGNUM);
3693 /* Mark the frame pointer if needed at the end of the function.
3694 If we end up eliminating it, it will be removed from the live
3695 list of each basic block by reload. */
3697 if ((!reload_completed) || frame_pointer_needed)
3699 bitmap_set_bit (exit_block_uses, FRAME_POINTER_REGNUM);
3701 /* If they are different, also mark the hard frame pointer as live. */
3702 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
3703 && !LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3704 bitmap_set_bit (exit_block_uses, HARD_FRAME_POINTER_REGNUM);
3707 /* Many architectures have a GP register even without flag_pic.
3708 Assume the pic register is not in use, or will be handled by
3709 other means, if it is not fixed. */
3710 if (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
3711 && picreg != INVALID_REGNUM
3712 && fixed_regs[picreg])
3713 bitmap_set_bit (exit_block_uses, picreg);
3715 /* Mark all global registers, and all registers used by the
3716 epilogue as being live at the end of the function since they
3717 may be referenced by our caller. */
3718 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3719 if (global_regs[i] || EPILOGUE_USES (i))
3720 bitmap_set_bit (exit_block_uses, i);
3722 if (HAVE_epilogue && epilogue_completed)
3724 /* Mark all call-saved registers that we actually used. */
3725 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3726 if (df_regs_ever_live_p (i) && !LOCAL_REGNO (i)
3727 && !TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
3728 bitmap_set_bit (exit_block_uses, i);
3731 /* Mark the registers that will contain data for the handler. */
3732 if (reload_completed && crtl->calls_eh_return)
3733 for (i = 0; ; ++i)
3735 unsigned regno = EH_RETURN_DATA_REGNO (i);
3736 if (regno == INVALID_REGNUM)
3737 break;
3738 bitmap_set_bit (exit_block_uses, regno);
3741 #ifdef EH_RETURN_STACKADJ_RTX
3742 if ((!HAVE_epilogue || ! epilogue_completed)
3743 && crtl->calls_eh_return)
3745 rtx tmp = EH_RETURN_STACKADJ_RTX;
3746 if (tmp && REG_P (tmp))
3747 df_mark_reg (tmp, exit_block_uses);
3749 #endif
3751 #ifdef EH_RETURN_HANDLER_RTX
3752 if ((!HAVE_epilogue || ! epilogue_completed)
3753 && crtl->calls_eh_return)
3755 rtx tmp = EH_RETURN_HANDLER_RTX;
3756 if (tmp && REG_P (tmp))
3757 df_mark_reg (tmp, exit_block_uses);
3759 #endif
3761 /* Mark function return value. */
3762 diddle_return_value (df_mark_reg, (void*) exit_block_uses);
3766 /* Return the refs of hard registers that are used in the exit block.
3767 It uses df->exit_block_uses to determine register to include. */
3769 static void
3770 df_exit_block_uses_collect (struct df_collection_rec *collection_rec, bitmap exit_block_uses)
3772 unsigned int i;
3773 bitmap_iterator bi;
3775 EXECUTE_IF_SET_IN_BITMAP (exit_block_uses, 0, i, bi)
3776 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
3777 EXIT_BLOCK_PTR_FOR_FN (cfun), NULL, DF_REF_REG_USE, 0);
3779 /* It is deliberate that this is not put in the exit block uses but
3780 I do not know why. */
3781 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3782 && reload_completed
3783 && !bitmap_bit_p (exit_block_uses, ARG_POINTER_REGNUM)
3784 && bb_has_eh_pred (EXIT_BLOCK_PTR_FOR_FN (cfun))
3785 && fixed_regs[ARG_POINTER_REGNUM])
3786 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[ARG_POINTER_REGNUM], NULL,
3787 EXIT_BLOCK_PTR_FOR_FN (cfun), NULL, DF_REF_REG_USE, 0);
3789 df_canonize_collection_rec (collection_rec);
3793 /* Record the set of hard registers that are used in the exit block.
3794 It uses df->exit_block_uses to determine which bit to include. */
3796 static void
3797 df_record_exit_block_uses (bitmap exit_block_uses)
3799 struct df_collection_rec collection_rec;
3800 df_exit_block_uses_collect (&collection_rec, exit_block_uses);
3802 /* Process bb_refs chain */
3803 df_refs_add_to_chains (&collection_rec,
3804 BASIC_BLOCK_FOR_FN (cfun, EXIT_BLOCK),
3805 NULL,
3806 copy_uses);
3810 /* Update the uses in the exit block. */
3812 void
3813 df_update_exit_block_uses (void)
3815 bitmap_head refs;
3816 bool changed = false;
3818 bitmap_initialize (&refs, &df_bitmap_obstack);
3819 df_get_exit_block_use_set (&refs);
3820 if (df->exit_block_uses)
3822 if (!bitmap_equal_p (df->exit_block_uses, &refs))
3824 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (EXIT_BLOCK);
3825 df_ref_chain_delete_du_chain (bb_info->artificial_uses);
3826 df_ref_chain_delete (bb_info->artificial_uses);
3827 bb_info->artificial_uses = NULL;
3828 changed = true;
3831 else
3833 struct df_scan_problem_data *problem_data
3834 = (struct df_scan_problem_data *) df_scan->problem_data;
3835 gcc_unreachable ();
3836 df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
3837 changed = true;
3840 if (changed)
3842 df_record_exit_block_uses (&refs);
3843 bitmap_copy (df->exit_block_uses,& refs);
3844 df_set_bb_dirty (BASIC_BLOCK_FOR_FN (cfun, EXIT_BLOCK));
3846 bitmap_clear (&refs);
3849 static bool initialized = false;
3852 /* Initialize some platform specific structures. */
3854 void
3855 df_hard_reg_init (void)
3857 #ifdef ELIMINABLE_REGS
3858 int i;
3859 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
3860 #endif
3861 if (initialized)
3862 return;
3864 /* Record which registers will be eliminated. We use this in
3865 mark_used_regs. */
3866 CLEAR_HARD_REG_SET (elim_reg_set);
3868 #ifdef ELIMINABLE_REGS
3869 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
3870 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
3871 #else
3872 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
3873 #endif
3875 initialized = true;
3879 /* Recompute the parts of scanning that are based on regs_ever_live
3880 because something changed in that array. */
3882 void
3883 df_update_entry_exit_and_calls (void)
3885 basic_block bb;
3887 df_update_entry_block_defs ();
3888 df_update_exit_block_uses ();
3890 /* The call insns need to be rescanned because there may be changes
3891 in the set of registers clobbered across the call. */
3892 FOR_EACH_BB_FN (bb, cfun)
3894 rtx_insn *insn;
3895 FOR_BB_INSNS (bb, insn)
3897 if (INSN_P (insn) && CALL_P (insn))
3898 df_insn_rescan (insn);
3904 /* Return true if hard REG is actually used in the some instruction.
3905 There are a fair number of conditions that affect the setting of
3906 this array. See the comment in df.h for df->hard_regs_live_count
3907 for the conditions that this array is set. */
3909 bool
3910 df_hard_reg_used_p (unsigned int reg)
3912 return df->hard_regs_live_count[reg] != 0;
3916 /* A count of the number of times REG is actually used in the some
3917 instruction. There are a fair number of conditions that affect the
3918 setting of this array. See the comment in df.h for
3919 df->hard_regs_live_count for the conditions that this array is
3920 set. */
3923 unsigned int
3924 df_hard_reg_used_count (unsigned int reg)
3926 return df->hard_regs_live_count[reg];
3930 /* Get the value of regs_ever_live[REGNO]. */
3932 bool
3933 df_regs_ever_live_p (unsigned int regno)
3935 return regs_ever_live[regno];
3939 /* Set regs_ever_live[REGNO] to VALUE. If this cause regs_ever_live
3940 to change, schedule that change for the next update. */
3942 void
3943 df_set_regs_ever_live (unsigned int regno, bool value)
3945 if (regs_ever_live[regno] == value)
3946 return;
3948 regs_ever_live[regno] = value;
3949 if (df)
3950 df->redo_entry_and_exit = true;
3954 /* Compute "regs_ever_live" information from the underlying df
3955 information. Set the vector to all false if RESET. */
3957 void
3958 df_compute_regs_ever_live (bool reset)
3960 unsigned int i;
3961 bool changed = df->redo_entry_and_exit;
3963 if (reset)
3964 memset (regs_ever_live, 0, sizeof (regs_ever_live));
3966 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3967 if ((!regs_ever_live[i]) && df_hard_reg_used_p (i))
3969 regs_ever_live[i] = true;
3970 changed = true;
3972 if (changed)
3973 df_update_entry_exit_and_calls ();
3974 df->redo_entry_and_exit = false;
3978 /*----------------------------------------------------------------------------
3979 Dataflow ref information verification functions.
3981 df_reg_chain_mark (refs, regno, is_def, is_eq_use)
3982 df_reg_chain_verify_unmarked (refs)
3983 df_refs_verify (vec<stack, va_df_ref>, ref*, bool)
3984 df_mws_verify (mw*, mw*, bool)
3985 df_insn_refs_verify (collection_rec, bb, insn, bool)
3986 df_bb_refs_verify (bb, refs, bool)
3987 df_bb_verify (bb)
3988 df_exit_block_bitmap_verify (bool)
3989 df_entry_block_bitmap_verify (bool)
3990 df_scan_verify ()
3991 ----------------------------------------------------------------------------*/
3994 /* Mark all refs in the reg chain. Verify that all of the registers
3995 are in the correct chain. */
3997 static unsigned int
3998 df_reg_chain_mark (df_ref refs, unsigned int regno,
3999 bool is_def, bool is_eq_use)
4001 unsigned int count = 0;
4002 df_ref ref;
4003 for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
4005 gcc_assert (!DF_REF_IS_REG_MARKED (ref));
4007 /* If there are no def-use or use-def chains, make sure that all
4008 of the chains are clear. */
4009 if (!df_chain)
4010 gcc_assert (!DF_REF_CHAIN (ref));
4012 /* Check to make sure the ref is in the correct chain. */
4013 gcc_assert (DF_REF_REGNO (ref) == regno);
4014 if (is_def)
4015 gcc_assert (DF_REF_REG_DEF_P (ref));
4016 else
4017 gcc_assert (!DF_REF_REG_DEF_P (ref));
4019 if (is_eq_use)
4020 gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE));
4021 else
4022 gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE) == 0);
4024 if (DF_REF_NEXT_REG (ref))
4025 gcc_assert (DF_REF_PREV_REG (DF_REF_NEXT_REG (ref)) == ref);
4026 count++;
4027 DF_REF_REG_MARK (ref);
4029 return count;
4033 /* Verify that all of the registers in the chain are unmarked. */
4035 static void
4036 df_reg_chain_verify_unmarked (df_ref refs)
4038 df_ref ref;
4039 for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
4040 gcc_assert (!DF_REF_IS_REG_MARKED (ref));
4044 /* Verify that NEW_REC and OLD_REC have exactly the same members. */
4046 static bool
4047 df_refs_verify (const vec<df_ref, va_heap> *new_rec, df_ref old_rec,
4048 bool abort_if_fail)
4050 unsigned int ix;
4051 df_ref new_ref;
4053 FOR_EACH_VEC_ELT (*new_rec, ix, new_ref)
4055 if (old_rec == NULL || !df_ref_equal_p (new_ref, old_rec))
4057 if (abort_if_fail)
4058 gcc_assert (0);
4059 else
4060 return false;
4063 /* Abort if fail is called from the function level verifier. If
4064 that is the context, mark this reg as being seem. */
4065 if (abort_if_fail)
4067 gcc_assert (DF_REF_IS_REG_MARKED (old_rec));
4068 DF_REF_REG_UNMARK (old_rec);
4071 old_rec = DF_REF_NEXT_LOC (old_rec);
4074 if (abort_if_fail)
4075 gcc_assert (old_rec == NULL);
4076 else
4077 return old_rec == NULL;
4078 return false;
4082 /* Verify that NEW_REC and OLD_REC have exactly the same members. */
4084 static bool
4085 df_mws_verify (const vec<df_mw_hardreg_ptr, va_heap> *new_rec,
4086 struct df_mw_hardreg *old_rec,
4087 bool abort_if_fail)
4089 unsigned int ix;
4090 struct df_mw_hardreg *new_reg;
4092 FOR_EACH_VEC_ELT (*new_rec, ix, new_reg)
4094 if (old_rec == NULL || !df_mw_equal_p (new_reg, old_rec))
4096 if (abort_if_fail)
4097 gcc_assert (0);
4098 else
4099 return false;
4101 old_rec = DF_MWS_NEXT (old_rec);
4104 if (abort_if_fail)
4105 gcc_assert (old_rec == NULL);
4106 else
4107 return old_rec == NULL;
4108 return false;
4112 /* Return true if the existing insn refs information is complete and
4113 correct. Otherwise (i.e. if there's any missing or extra refs),
4114 return the correct df_ref chain in REFS_RETURN.
4116 If ABORT_IF_FAIL, leave the refs that are verified (already in the
4117 ref chain) as DF_REF_MARKED(). If it's false, then it's a per-insn
4118 verification mode instead of the whole function, so unmark
4119 everything.
4121 If ABORT_IF_FAIL is set, this function never returns false. */
4123 static bool
4124 df_insn_refs_verify (struct df_collection_rec *collection_rec,
4125 basic_block bb,
4126 rtx_insn *insn,
4127 bool abort_if_fail)
4129 bool ret1, ret2, ret3, ret4;
4130 unsigned int uid = INSN_UID (insn);
4131 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
4133 df_insn_refs_collect (collection_rec, bb, insn_info);
4135 /* Unfortunately we cannot opt out early if one of these is not
4136 right because the marks will not get cleared. */
4137 ret1 = df_refs_verify (&collection_rec->def_vec, DF_INSN_UID_DEFS (uid),
4138 abort_if_fail);
4139 ret2 = df_refs_verify (&collection_rec->use_vec, DF_INSN_UID_USES (uid),
4140 abort_if_fail);
4141 ret3 = df_refs_verify (&collection_rec->eq_use_vec, DF_INSN_UID_EQ_USES (uid),
4142 abort_if_fail);
4143 ret4 = df_mws_verify (&collection_rec->mw_vec, DF_INSN_UID_MWS (uid),
4144 abort_if_fail);
4145 return (ret1 && ret2 && ret3 && ret4);
4149 /* Return true if all refs in the basic block are correct and complete.
4150 Due to df_ref_chain_verify, it will cause all refs
4151 that are verified to have DF_REF_MARK bit set. */
4153 static bool
4154 df_bb_verify (basic_block bb)
4156 rtx_insn *insn;
4157 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
4158 struct df_collection_rec collection_rec;
4160 gcc_assert (bb_info);
4162 /* Scan the block, one insn at a time, from beginning to end. */
4163 FOR_BB_INSNS_REVERSE (bb, insn)
4165 if (!INSN_P (insn))
4166 continue;
4167 df_insn_refs_verify (&collection_rec, bb, insn, true);
4168 df_free_collection_rec (&collection_rec);
4171 /* Do the artificial defs and uses. */
4172 df_bb_refs_collect (&collection_rec, bb);
4173 df_refs_verify (&collection_rec.def_vec, df_get_artificial_defs (bb->index), true);
4174 df_refs_verify (&collection_rec.use_vec, df_get_artificial_uses (bb->index), true);
4175 df_free_collection_rec (&collection_rec);
4177 return true;
4181 /* Returns true if the entry block has correct and complete df_ref set.
4182 If not it either aborts if ABORT_IF_FAIL is true or returns false. */
4184 static bool
4185 df_entry_block_bitmap_verify (bool abort_if_fail)
4187 bitmap_head entry_block_defs;
4188 bool is_eq;
4190 bitmap_initialize (&entry_block_defs, &df_bitmap_obstack);
4191 df_get_entry_block_def_set (&entry_block_defs);
4193 is_eq = bitmap_equal_p (&entry_block_defs, df->entry_block_defs);
4195 if (!is_eq && abort_if_fail)
4197 fprintf (stderr, "entry_block_defs = ");
4198 df_print_regset (stderr, &entry_block_defs);
4199 fprintf (stderr, "df->entry_block_defs = ");
4200 df_print_regset (stderr, df->entry_block_defs);
4201 gcc_assert (0);
4204 bitmap_clear (&entry_block_defs);
4206 return is_eq;
4210 /* Returns true if the exit block has correct and complete df_ref set.
4211 If not it either aborts if ABORT_IF_FAIL is true or returns false. */
4213 static bool
4214 df_exit_block_bitmap_verify (bool abort_if_fail)
4216 bitmap_head exit_block_uses;
4217 bool is_eq;
4219 bitmap_initialize (&exit_block_uses, &df_bitmap_obstack);
4220 df_get_exit_block_use_set (&exit_block_uses);
4222 is_eq = bitmap_equal_p (&exit_block_uses, df->exit_block_uses);
4224 if (!is_eq && abort_if_fail)
4226 fprintf (stderr, "exit_block_uses = ");
4227 df_print_regset (stderr, &exit_block_uses);
4228 fprintf (stderr, "df->exit_block_uses = ");
4229 df_print_regset (stderr, df->exit_block_uses);
4230 gcc_assert (0);
4233 bitmap_clear (&exit_block_uses);
4235 return is_eq;
4239 /* Return true if df_ref information for all insns in all blocks are
4240 correct and complete. */
4242 void
4243 df_scan_verify (void)
4245 unsigned int i;
4246 basic_block bb;
4247 bitmap_head regular_block_artificial_uses;
4248 bitmap_head eh_block_artificial_uses;
4250 if (!df)
4251 return;
4253 /* Verification is a 4 step process. */
4255 /* (1) All of the refs are marked by going through the reg chains. */
4256 for (i = 0; i < DF_REG_SIZE (df); i++)
4258 gcc_assert (df_reg_chain_mark (DF_REG_DEF_CHAIN (i), i, true, false)
4259 == DF_REG_DEF_COUNT (i));
4260 gcc_assert (df_reg_chain_mark (DF_REG_USE_CHAIN (i), i, false, false)
4261 == DF_REG_USE_COUNT (i));
4262 gcc_assert (df_reg_chain_mark (DF_REG_EQ_USE_CHAIN (i), i, false, true)
4263 == DF_REG_EQ_USE_COUNT (i));
4266 /* (2) There are various bitmaps whose value may change over the
4267 course of the compilation. This step recomputes them to make
4268 sure that they have not slipped out of date. */
4269 bitmap_initialize (&regular_block_artificial_uses, &df_bitmap_obstack);
4270 bitmap_initialize (&eh_block_artificial_uses, &df_bitmap_obstack);
4272 df_get_regular_block_artificial_uses (&regular_block_artificial_uses);
4273 df_get_eh_block_artificial_uses (&eh_block_artificial_uses);
4275 bitmap_ior_into (&eh_block_artificial_uses,
4276 &regular_block_artificial_uses);
4278 /* Check artificial_uses bitmaps didn't change. */
4279 gcc_assert (bitmap_equal_p (&regular_block_artificial_uses,
4280 &df->regular_block_artificial_uses));
4281 gcc_assert (bitmap_equal_p (&eh_block_artificial_uses,
4282 &df->eh_block_artificial_uses));
4284 bitmap_clear (&regular_block_artificial_uses);
4285 bitmap_clear (&eh_block_artificial_uses);
4287 /* Verify entry block and exit block. These only verify the bitmaps,
4288 the refs are verified in df_bb_verify. */
4289 df_entry_block_bitmap_verify (true);
4290 df_exit_block_bitmap_verify (true);
4292 /* (3) All of the insns in all of the blocks are traversed and the
4293 marks are cleared both in the artificial refs attached to the
4294 blocks and the real refs inside the insns. It is a failure to
4295 clear a mark that has not been set as this means that the ref in
4296 the block or insn was not in the reg chain. */
4298 FOR_ALL_BB_FN (bb, cfun)
4299 df_bb_verify (bb);
4301 /* (4) See if all reg chains are traversed a second time. This time
4302 a check is made that the marks are clear. A set mark would be a
4303 from a reg that is not in any insn or basic block. */
4305 for (i = 0; i < DF_REG_SIZE (df); i++)
4307 df_reg_chain_verify_unmarked (DF_REG_DEF_CHAIN (i));
4308 df_reg_chain_verify_unmarked (DF_REG_USE_CHAIN (i));
4309 df_reg_chain_verify_unmarked (DF_REG_EQ_USE_CHAIN (i));