2012-10-06 Janus Weil <janus@gcc.gnu.org>
[official-gcc.git] / gcc / df-scan.c
blob0fb1f640d9b9fce1c5906c2f61397184f65a39f5
1 /* Scanning of rtl for dataflow analysis.
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007,
3 2008, 2009, 2010, 2011, 2012 Free Software Foundation, Inc.
4 Originally contributed by Michael P. Hayes
5 (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
6 Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
7 and Kenneth Zadeck (zadeck@naturalbridge.com).
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "tm.h"
29 #include "rtl.h"
30 #include "tm_p.h"
31 #include "insn-config.h"
32 #include "recog.h"
33 #include "function.h"
34 #include "regs.h"
35 #include "alloc-pool.h"
36 #include "flags.h"
37 #include "hard-reg-set.h"
38 #include "basic-block.h"
39 #include "sbitmap.h"
40 #include "bitmap.h"
41 #include "dumpfile.h"
42 #include "tree.h"
43 #include "target.h"
44 #include "target-def.h"
45 #include "df.h"
46 #include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */
48 DEF_VEC_P(df_ref);
49 DEF_VEC_ALLOC_P_STACK(df_ref);
51 #define VEC_df_ref_stack_alloc(alloc) VEC_stack_alloc (df_ref, alloc)
53 typedef struct df_mw_hardreg *df_mw_hardreg_ptr;
55 DEF_VEC_P(df_mw_hardreg_ptr);
56 DEF_VEC_ALLOC_P_STACK(df_mw_hardreg_ptr);
58 #define VEC_df_mw_hardreg_ptr_stack_alloc(alloc) \
59 VEC_stack_alloc (df_mw_hardreg_ptr, alloc)
61 #ifndef HAVE_epilogue
62 #define HAVE_epilogue 0
63 #endif
64 #ifndef HAVE_prologue
65 #define HAVE_prologue 0
66 #endif
67 #ifndef HAVE_sibcall_epilogue
68 #define HAVE_sibcall_epilogue 0
69 #endif
71 #ifndef EPILOGUE_USES
72 #define EPILOGUE_USES(REGNO) 0
73 #endif
75 /* The following two macros free the vecs that hold either the refs or
76 the mw refs. They are a little tricky because the vec has 0
77 elements is special and is not to be freed. */
78 #define df_scan_free_ref_vec(V) \
79 do { \
80 if (V && *V) \
81 free (V); \
82 } while (0)
84 #define df_scan_free_mws_vec(V) \
85 do { \
86 if (V && *V) \
87 free (V); \
88 } while (0)
90 /* The set of hard registers in eliminables[i].from. */
92 static HARD_REG_SET elim_reg_set;
94 /* Initialize ur_in and ur_out as if all hard registers were partially
95 available. */
97 struct df_collection_rec
99 VEC(df_ref,stack) *def_vec;
100 VEC(df_ref,stack) *use_vec;
101 VEC(df_ref,stack) *eq_use_vec;
102 VEC(df_mw_hardreg_ptr,stack) *mw_vec;
105 static df_ref df_null_ref_rec[1];
106 static struct df_mw_hardreg * df_null_mw_rec[1];
108 static void df_ref_record (enum df_ref_class, struct df_collection_rec *,
109 rtx, rtx *,
110 basic_block, struct df_insn_info *,
111 enum df_ref_type, int ref_flags);
112 static void df_def_record_1 (struct df_collection_rec *, rtx *,
113 basic_block, struct df_insn_info *,
114 int ref_flags);
115 static void df_defs_record (struct df_collection_rec *, rtx,
116 basic_block, struct df_insn_info *,
117 int ref_flags);
118 static void df_uses_record (struct df_collection_rec *,
119 rtx *, enum df_ref_type,
120 basic_block, struct df_insn_info *,
121 int ref_flags);
123 static void df_install_ref_incremental (df_ref);
124 static df_ref df_ref_create_structure (enum df_ref_class,
125 struct df_collection_rec *, rtx, rtx *,
126 basic_block, struct df_insn_info *,
127 enum df_ref_type, int ref_flags);
128 static void df_insn_refs_collect (struct df_collection_rec*,
129 basic_block, struct df_insn_info *);
130 static void df_canonize_collection_rec (struct df_collection_rec *);
132 static void df_get_regular_block_artificial_uses (bitmap);
133 static void df_get_eh_block_artificial_uses (bitmap);
135 static void df_record_entry_block_defs (bitmap);
136 static void df_record_exit_block_uses (bitmap);
137 static void df_get_exit_block_use_set (bitmap);
138 static void df_get_entry_block_def_set (bitmap);
139 static void df_grow_ref_info (struct df_ref_info *, unsigned int);
140 static void df_ref_chain_delete_du_chain (df_ref *);
141 static void df_ref_chain_delete (df_ref *);
143 static void df_refs_add_to_chains (struct df_collection_rec *,
144 basic_block, rtx);
146 static bool df_insn_refs_verify (struct df_collection_rec *, basic_block, rtx, bool);
147 static void df_entry_block_defs_collect (struct df_collection_rec *, bitmap);
148 static void df_exit_block_uses_collect (struct df_collection_rec *, bitmap);
149 static void df_install_ref (df_ref, struct df_reg_info *,
150 struct df_ref_info *, bool);
152 static int df_ref_compare (const void *, const void *);
153 static int df_mw_compare (const void *, const void *);
155 /* Indexed by hardware reg number, is true if that register is ever
156 used in the current function.
158 In df-scan.c, this is set up to record the hard regs used
159 explicitly. Reload adds in the hard regs used for holding pseudo
160 regs. Final uses it to generate the code in the function prologue
161 and epilogue to save and restore registers as needed. */
163 static bool regs_ever_live[FIRST_PSEUDO_REGISTER];
165 /*----------------------------------------------------------------------------
166 SCANNING DATAFLOW PROBLEM
168 There are several ways in which scanning looks just like the other
169 dataflow problems. It shares the all the mechanisms for local info
170 as well as basic block info. Where it differs is when and how often
171 it gets run. It also has no need for the iterative solver.
172 ----------------------------------------------------------------------------*/
174 /* Problem data for the scanning dataflow function. */
175 struct df_scan_problem_data
177 alloc_pool ref_base_pool;
178 alloc_pool ref_artificial_pool;
179 alloc_pool ref_regular_pool;
180 alloc_pool insn_pool;
181 alloc_pool reg_pool;
182 alloc_pool mw_reg_pool;
183 bitmap_obstack reg_bitmaps;
184 bitmap_obstack insn_bitmaps;
187 typedef struct df_scan_bb_info *df_scan_bb_info_t;
190 /* Internal function to shut down the scanning problem. */
191 static void
192 df_scan_free_internal (void)
194 struct df_scan_problem_data *problem_data
195 = (struct df_scan_problem_data *) df_scan->problem_data;
196 unsigned int i;
197 basic_block bb;
199 /* The vectors that hold the refs are not pool allocated because
200 they come in many sizes. This makes them impossible to delete
201 all at once. */
202 for (i = 0; i < DF_INSN_SIZE(); i++)
204 struct df_insn_info *insn_info = DF_INSN_UID_GET(i);
205 /* Skip the insns that have no insn_info or have been
206 deleted. */
207 if (insn_info)
209 df_scan_free_ref_vec (insn_info->defs);
210 df_scan_free_ref_vec (insn_info->uses);
211 df_scan_free_ref_vec (insn_info->eq_uses);
212 df_scan_free_mws_vec (insn_info->mw_hardregs);
216 FOR_ALL_BB (bb)
218 unsigned int bb_index = bb->index;
219 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb_index);
220 if (bb_info)
222 df_scan_free_ref_vec (bb_info->artificial_defs);
223 df_scan_free_ref_vec (bb_info->artificial_uses);
227 free (df->def_info.refs);
228 free (df->def_info.begin);
229 free (df->def_info.count);
230 memset (&df->def_info, 0, (sizeof (struct df_ref_info)));
232 free (df->use_info.refs);
233 free (df->use_info.begin);
234 free (df->use_info.count);
235 memset (&df->use_info, 0, (sizeof (struct df_ref_info)));
237 free (df->def_regs);
238 df->def_regs = NULL;
239 free (df->use_regs);
240 df->use_regs = NULL;
241 free (df->eq_use_regs);
242 df->eq_use_regs = NULL;
243 df->regs_size = 0;
244 DF_REG_SIZE(df) = 0;
246 free (df->insns);
247 df->insns = NULL;
248 DF_INSN_SIZE () = 0;
250 free (df_scan->block_info);
251 df_scan->block_info = NULL;
252 df_scan->block_info_size = 0;
254 bitmap_clear (&df->hardware_regs_used);
255 bitmap_clear (&df->regular_block_artificial_uses);
256 bitmap_clear (&df->eh_block_artificial_uses);
257 BITMAP_FREE (df->entry_block_defs);
258 BITMAP_FREE (df->exit_block_uses);
259 bitmap_clear (&df->insns_to_delete);
260 bitmap_clear (&df->insns_to_rescan);
261 bitmap_clear (&df->insns_to_notes_rescan);
263 free_alloc_pool (problem_data->ref_base_pool);
264 free_alloc_pool (problem_data->ref_artificial_pool);
265 free_alloc_pool (problem_data->ref_regular_pool);
266 free_alloc_pool (problem_data->insn_pool);
267 free_alloc_pool (problem_data->reg_pool);
268 free_alloc_pool (problem_data->mw_reg_pool);
269 bitmap_obstack_release (&problem_data->reg_bitmaps);
270 bitmap_obstack_release (&problem_data->insn_bitmaps);
271 free (df_scan->problem_data);
275 /* Free basic block info. */
277 static void
278 df_scan_free_bb_info (basic_block bb, void *vbb_info)
280 struct df_scan_bb_info *bb_info = (struct df_scan_bb_info *) vbb_info;
281 unsigned int bb_index = bb->index;
283 /* See if bb_info is initialized. */
284 if (bb_info->artificial_defs)
286 rtx insn;
287 FOR_BB_INSNS (bb, insn)
289 if (INSN_P (insn))
290 /* Record defs within INSN. */
291 df_insn_delete (bb, INSN_UID (insn));
294 if (bb_index < df_scan->block_info_size)
295 bb_info = df_scan_get_bb_info (bb_index);
297 /* Get rid of any artificial uses or defs. */
298 if (bb_info->artificial_defs)
300 df_ref_chain_delete_du_chain (bb_info->artificial_defs);
301 df_ref_chain_delete_du_chain (bb_info->artificial_uses);
302 df_ref_chain_delete (bb_info->artificial_defs);
303 df_ref_chain_delete (bb_info->artificial_uses);
304 bb_info->artificial_defs = NULL;
305 bb_info->artificial_uses = NULL;
311 /* Allocate the problem data for the scanning problem. This should be
312 called when the problem is created or when the entire function is to
313 be rescanned. */
314 void
315 df_scan_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
317 struct df_scan_problem_data *problem_data;
318 unsigned int insn_num = get_max_uid () + 1;
319 unsigned int block_size = 512;
320 basic_block bb;
322 /* Given the number of pools, this is really faster than tearing
323 everything apart. */
324 if (df_scan->problem_data)
325 df_scan_free_internal ();
327 problem_data = XNEW (struct df_scan_problem_data);
328 df_scan->problem_data = problem_data;
329 df_scan->computed = true;
331 problem_data->ref_base_pool
332 = create_alloc_pool ("df_scan ref base",
333 sizeof (struct df_base_ref), block_size);
334 problem_data->ref_artificial_pool
335 = create_alloc_pool ("df_scan ref artificial",
336 sizeof (struct df_artificial_ref), block_size);
337 problem_data->ref_regular_pool
338 = create_alloc_pool ("df_scan ref regular",
339 sizeof (struct df_regular_ref), block_size);
340 problem_data->insn_pool
341 = create_alloc_pool ("df_scan insn",
342 sizeof (struct df_insn_info), block_size);
343 problem_data->reg_pool
344 = create_alloc_pool ("df_scan reg",
345 sizeof (struct df_reg_info), block_size);
346 problem_data->mw_reg_pool
347 = create_alloc_pool ("df_scan mw_reg",
348 sizeof (struct df_mw_hardreg), block_size / 16);
350 bitmap_obstack_initialize (&problem_data->reg_bitmaps);
351 bitmap_obstack_initialize (&problem_data->insn_bitmaps);
353 insn_num += insn_num / 4;
354 df_grow_reg_info ();
356 df_grow_insn_info ();
357 df_grow_bb_info (df_scan);
359 FOR_ALL_BB (bb)
361 unsigned int bb_index = bb->index;
362 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb_index);
363 bb_info->artificial_defs = NULL;
364 bb_info->artificial_uses = NULL;
367 bitmap_initialize (&df->hardware_regs_used, &problem_data->reg_bitmaps);
368 bitmap_initialize (&df->regular_block_artificial_uses, &problem_data->reg_bitmaps);
369 bitmap_initialize (&df->eh_block_artificial_uses, &problem_data->reg_bitmaps);
370 df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
371 df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
372 bitmap_initialize (&df->insns_to_delete, &problem_data->insn_bitmaps);
373 bitmap_initialize (&df->insns_to_rescan, &problem_data->insn_bitmaps);
374 bitmap_initialize (&df->insns_to_notes_rescan, &problem_data->insn_bitmaps);
375 df_scan->optional_p = false;
379 /* Free all of the data associated with the scan problem. */
381 static void
382 df_scan_free (void)
384 if (df_scan->problem_data)
385 df_scan_free_internal ();
387 if (df->blocks_to_analyze)
389 BITMAP_FREE (df->blocks_to_analyze);
390 df->blocks_to_analyze = NULL;
393 free (df_scan);
396 /* Dump the preamble for DF_SCAN dump. */
397 static void
398 df_scan_start_dump (FILE *file ATTRIBUTE_UNUSED)
400 int i;
401 int dcount = 0;
402 int ucount = 0;
403 int ecount = 0;
404 int icount = 0;
405 int ccount = 0;
406 basic_block bb;
407 rtx insn;
409 fprintf (file, ";; invalidated by call \t");
410 df_print_regset (file, regs_invalidated_by_call_regset);
411 fprintf (file, ";; hardware regs used \t");
412 df_print_regset (file, &df->hardware_regs_used);
413 fprintf (file, ";; regular block artificial uses \t");
414 df_print_regset (file, &df->regular_block_artificial_uses);
415 fprintf (file, ";; eh block artificial uses \t");
416 df_print_regset (file, &df->eh_block_artificial_uses);
417 fprintf (file, ";; entry block defs \t");
418 df_print_regset (file, df->entry_block_defs);
419 fprintf (file, ";; exit block uses \t");
420 df_print_regset (file, df->exit_block_uses);
421 fprintf (file, ";; regs ever live \t");
422 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
423 if (df_regs_ever_live_p (i))
424 fprintf (file, " %d[%s]", i, reg_names[i]);
425 fprintf (file, "\n;; ref usage \t");
427 for (i = 0; i < (int)df->regs_inited; i++)
428 if (DF_REG_DEF_COUNT (i) || DF_REG_USE_COUNT (i) || DF_REG_EQ_USE_COUNT (i))
430 const char * sep = "";
432 fprintf (file, "r%d={", i);
433 if (DF_REG_DEF_COUNT (i))
435 fprintf (file, "%dd", DF_REG_DEF_COUNT (i));
436 sep = ",";
437 dcount += DF_REG_DEF_COUNT (i);
439 if (DF_REG_USE_COUNT (i))
441 fprintf (file, "%s%du", sep, DF_REG_USE_COUNT (i));
442 sep = ",";
443 ucount += DF_REG_USE_COUNT (i);
445 if (DF_REG_EQ_USE_COUNT (i))
447 fprintf (file, "%s%de", sep, DF_REG_EQ_USE_COUNT (i));
448 ecount += DF_REG_EQ_USE_COUNT (i);
450 fprintf (file, "} ");
453 FOR_EACH_BB (bb)
454 FOR_BB_INSNS (bb, insn)
455 if (INSN_P (insn))
457 if (CALL_P (insn))
458 ccount++;
459 else
460 icount++;
463 fprintf (file, "\n;; total ref usage %d{%dd,%du,%de}"
464 " in %d{%d regular + %d call} insns.\n",
465 dcount + ucount + ecount, dcount, ucount, ecount,
466 icount + ccount, icount, ccount);
469 /* Dump the bb_info for a given basic block. */
470 static void
471 df_scan_start_block (basic_block bb, FILE *file)
473 struct df_scan_bb_info *bb_info
474 = df_scan_get_bb_info (bb->index);
476 if (bb_info)
478 fprintf (file, ";; bb %d artificial_defs: ", bb->index);
479 df_refs_chain_dump (bb_info->artificial_defs, true, file);
480 fprintf (file, "\n;; bb %d artificial_uses: ", bb->index);
481 df_refs_chain_dump (bb_info->artificial_uses, true, file);
482 fprintf (file, "\n");
484 #if 0
486 rtx insn;
487 FOR_BB_INSNS (bb, insn)
488 if (INSN_P (insn))
489 df_insn_debug (insn, false, file);
491 #endif
494 static struct df_problem problem_SCAN =
496 DF_SCAN, /* Problem id. */
497 DF_NONE, /* Direction. */
498 df_scan_alloc, /* Allocate the problem specific data. */
499 NULL, /* Reset global information. */
500 df_scan_free_bb_info, /* Free basic block info. */
501 NULL, /* Local compute function. */
502 NULL, /* Init the solution specific data. */
503 NULL, /* Iterative solver. */
504 NULL, /* Confluence operator 0. */
505 NULL, /* Confluence operator n. */
506 NULL, /* Transfer function. */
507 NULL, /* Finalize function. */
508 df_scan_free, /* Free all of the problem information. */
509 NULL, /* Remove this problem from the stack of dataflow problems. */
510 df_scan_start_dump, /* Debugging. */
511 df_scan_start_block, /* Debugging start block. */
512 NULL, /* Debugging end block. */
513 NULL, /* Incremental solution verify start. */
514 NULL, /* Incremental solution verify end. */
515 NULL, /* Dependent problem. */
516 sizeof (struct df_scan_bb_info),/* Size of entry of block_info array. */
517 TV_DF_SCAN, /* Timing variable. */
518 false /* Reset blocks on dropping out of blocks_to_analyze. */
522 /* Create a new DATAFLOW instance and add it to an existing instance
523 of DF. The returned structure is what is used to get at the
524 solution. */
526 void
527 df_scan_add_problem (void)
529 df_add_problem (&problem_SCAN);
533 /*----------------------------------------------------------------------------
534 Storage Allocation Utilities
535 ----------------------------------------------------------------------------*/
538 /* First, grow the reg_info information. If the current size is less than
539 the number of pseudos, grow to 25% more than the number of
540 pseudos.
542 Second, assure that all of the slots up to max_reg_num have been
543 filled with reg_info structures. */
545 void
546 df_grow_reg_info (void)
548 unsigned int max_reg = max_reg_num ();
549 unsigned int new_size = max_reg;
550 struct df_scan_problem_data *problem_data
551 = (struct df_scan_problem_data *) df_scan->problem_data;
552 unsigned int i;
554 if (df->regs_size < new_size)
556 new_size += new_size / 4;
557 df->def_regs = XRESIZEVEC (struct df_reg_info *, df->def_regs, new_size);
558 df->use_regs = XRESIZEVEC (struct df_reg_info *, df->use_regs, new_size);
559 df->eq_use_regs = XRESIZEVEC (struct df_reg_info *, df->eq_use_regs,
560 new_size);
561 df->def_info.begin = XRESIZEVEC (unsigned, df->def_info.begin, new_size);
562 df->def_info.count = XRESIZEVEC (unsigned, df->def_info.count, new_size);
563 df->use_info.begin = XRESIZEVEC (unsigned, df->use_info.begin, new_size);
564 df->use_info.count = XRESIZEVEC (unsigned, df->use_info.count, new_size);
565 df->regs_size = new_size;
568 for (i = df->regs_inited; i < max_reg; i++)
570 struct df_reg_info *reg_info;
572 reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool);
573 memset (reg_info, 0, sizeof (struct df_reg_info));
574 df->def_regs[i] = reg_info;
575 reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool);
576 memset (reg_info, 0, sizeof (struct df_reg_info));
577 df->use_regs[i] = reg_info;
578 reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool);
579 memset (reg_info, 0, sizeof (struct df_reg_info));
580 df->eq_use_regs[i] = reg_info;
581 df->def_info.begin[i] = 0;
582 df->def_info.count[i] = 0;
583 df->use_info.begin[i] = 0;
584 df->use_info.count[i] = 0;
587 df->regs_inited = max_reg;
591 /* Grow the ref information. */
593 static void
594 df_grow_ref_info (struct df_ref_info *ref_info, unsigned int new_size)
596 if (ref_info->refs_size < new_size)
598 ref_info->refs = XRESIZEVEC (df_ref, ref_info->refs, new_size);
599 memset (ref_info->refs + ref_info->refs_size, 0,
600 (new_size - ref_info->refs_size) *sizeof (df_ref));
601 ref_info->refs_size = new_size;
606 /* Check and grow the ref information if necessary. This routine
607 guarantees total_size + BITMAP_ADDEND amount of entries in refs
608 array. It updates ref_info->refs_size only and does not change
609 ref_info->total_size. */
611 static void
612 df_check_and_grow_ref_info (struct df_ref_info *ref_info,
613 unsigned bitmap_addend)
615 if (ref_info->refs_size < ref_info->total_size + bitmap_addend)
617 int new_size = ref_info->total_size + bitmap_addend;
618 new_size += ref_info->total_size / 4;
619 df_grow_ref_info (ref_info, new_size);
624 /* Grow the ref information. If the current size is less than the
625 number of instructions, grow to 25% more than the number of
626 instructions. */
628 void
629 df_grow_insn_info (void)
631 unsigned int new_size = get_max_uid () + 1;
632 if (DF_INSN_SIZE () < new_size)
634 new_size += new_size / 4;
635 df->insns = XRESIZEVEC (struct df_insn_info *, df->insns, new_size);
636 memset (df->insns + df->insns_size, 0,
637 (new_size - DF_INSN_SIZE ()) *sizeof (struct df_insn_info *));
638 DF_INSN_SIZE () = new_size;
645 /*----------------------------------------------------------------------------
646 PUBLIC INTERFACES FOR SMALL GRAIN CHANGES TO SCANNING.
647 ----------------------------------------------------------------------------*/
649 /* Rescan all of the block_to_analyze or all of the blocks in the
650 function if df_set_blocks if blocks_to_analyze is NULL; */
652 void
653 df_scan_blocks (void)
655 basic_block bb;
657 df->def_info.ref_order = DF_REF_ORDER_NO_TABLE;
658 df->use_info.ref_order = DF_REF_ORDER_NO_TABLE;
660 df_get_regular_block_artificial_uses (&df->regular_block_artificial_uses);
661 df_get_eh_block_artificial_uses (&df->eh_block_artificial_uses);
663 bitmap_ior_into (&df->eh_block_artificial_uses,
664 &df->regular_block_artificial_uses);
666 /* ENTRY and EXIT blocks have special defs/uses. */
667 df_get_entry_block_def_set (df->entry_block_defs);
668 df_record_entry_block_defs (df->entry_block_defs);
669 df_get_exit_block_use_set (df->exit_block_uses);
670 df_record_exit_block_uses (df->exit_block_uses);
671 df_set_bb_dirty (BASIC_BLOCK (ENTRY_BLOCK));
672 df_set_bb_dirty (BASIC_BLOCK (EXIT_BLOCK));
674 /* Regular blocks */
675 FOR_EACH_BB (bb)
677 unsigned int bb_index = bb->index;
678 df_bb_refs_record (bb_index, true);
682 /* Create new refs under address LOC within INSN. This function is
683 only used externally. REF_FLAGS must be either 0 or DF_REF_IN_NOTE,
684 depending on whether LOC is inside PATTERN (INSN) or a note. */
686 void
687 df_uses_create (rtx *loc, rtx insn, int ref_flags)
689 gcc_assert (!(ref_flags & ~DF_REF_IN_NOTE));
690 df_uses_record (NULL, loc, DF_REF_REG_USE,
691 BLOCK_FOR_INSN (insn),
692 DF_INSN_INFO_GET (insn),
693 ref_flags);
696 /* Create a new ref of type DF_REF_TYPE for register REG at address
697 LOC within INSN of BB. This function is only used externally. */
699 df_ref
700 df_ref_create (rtx reg, rtx *loc, rtx insn,
701 basic_block bb,
702 enum df_ref_type ref_type,
703 int ref_flags)
705 enum df_ref_class cl;
707 df_grow_reg_info ();
709 /* You cannot hack artificial refs. */
710 gcc_assert (insn);
712 if (loc)
713 cl = DF_REF_REGULAR;
714 else
715 cl = DF_REF_BASE;
717 return df_ref_create_structure (cl, NULL, reg, loc, bb,
718 DF_INSN_INFO_GET (insn),
719 ref_type, ref_flags);
722 static void
723 df_install_ref_incremental (df_ref ref)
725 struct df_reg_info **reg_info;
726 struct df_ref_info *ref_info;
727 df_ref *ref_rec;
728 df_ref **ref_rec_ptr;
729 unsigned int count = 0;
730 bool add_to_table;
732 rtx insn = DF_REF_INSN (ref);
733 basic_block bb = BLOCK_FOR_INSN (insn);
735 if (DF_REF_REG_DEF_P (ref))
737 reg_info = df->def_regs;
738 ref_info = &df->def_info;
739 ref_rec_ptr = &DF_INSN_DEFS (insn);
740 add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
742 else if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
744 reg_info = df->eq_use_regs;
745 ref_info = &df->use_info;
746 ref_rec_ptr = &DF_INSN_EQ_USES (insn);
747 switch (ref_info->ref_order)
749 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
750 case DF_REF_ORDER_BY_REG_WITH_NOTES:
751 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
752 add_to_table = true;
753 break;
754 default:
755 add_to_table = false;
756 break;
759 else
761 reg_info = df->use_regs;
762 ref_info = &df->use_info;
763 ref_rec_ptr = &DF_INSN_USES (insn);
764 add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
767 /* Do not add if ref is not in the right blocks. */
768 if (add_to_table && df->analyze_subset)
769 add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
771 df_install_ref (ref, reg_info[DF_REF_REGNO (ref)], ref_info, add_to_table);
773 if (add_to_table)
774 switch (ref_info->ref_order)
776 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
777 case DF_REF_ORDER_BY_REG_WITH_NOTES:
778 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
779 ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
780 break;
781 default:
782 ref_info->ref_order = DF_REF_ORDER_UNORDERED;
783 break;
786 ref_rec = *ref_rec_ptr;
787 while (*ref_rec)
789 count++;
790 ref_rec++;
793 ref_rec = *ref_rec_ptr;
794 if (count)
796 ref_rec = XRESIZEVEC (df_ref, ref_rec, count+2);
797 *ref_rec_ptr = ref_rec;
798 ref_rec[count] = ref;
799 ref_rec[count+1] = NULL;
800 qsort (ref_rec, count + 1, sizeof (df_ref), df_ref_compare);
802 else
804 df_ref *ref_rec = XNEWVEC (df_ref, 2);
805 ref_rec[0] = ref;
806 ref_rec[1] = NULL;
807 *ref_rec_ptr = ref_rec;
810 #if 0
811 if (dump_file)
813 fprintf (dump_file, "adding ref ");
814 df_ref_debug (ref, dump_file);
816 #endif
817 /* By adding the ref directly, df_insn_rescan my not find any
818 differences even though the block will have changed. So we need
819 to mark the block dirty ourselves. */
820 if (!DEBUG_INSN_P (DF_REF_INSN (ref)))
821 df_set_bb_dirty (bb);
826 /*----------------------------------------------------------------------------
827 UTILITIES TO CREATE AND DESTROY REFS AND CHAINS.
828 ----------------------------------------------------------------------------*/
830 static void
831 df_free_ref (df_ref ref)
833 struct df_scan_problem_data *problem_data
834 = (struct df_scan_problem_data *) df_scan->problem_data;
836 switch (DF_REF_CLASS (ref))
838 case DF_REF_BASE:
839 pool_free (problem_data->ref_base_pool, ref);
840 break;
842 case DF_REF_ARTIFICIAL:
843 pool_free (problem_data->ref_artificial_pool, ref);
844 break;
846 case DF_REF_REGULAR:
847 pool_free (problem_data->ref_regular_pool, ref);
848 break;
853 /* Unlink and delete REF at the reg_use, reg_eq_use or reg_def chain.
854 Also delete the def-use or use-def chain if it exists. */
856 static void
857 df_reg_chain_unlink (df_ref ref)
859 df_ref next = DF_REF_NEXT_REG (ref);
860 df_ref prev = DF_REF_PREV_REG (ref);
861 int id = DF_REF_ID (ref);
862 struct df_reg_info *reg_info;
863 df_ref *refs = NULL;
865 if (DF_REF_REG_DEF_P (ref))
867 int regno = DF_REF_REGNO (ref);
868 reg_info = DF_REG_DEF_GET (regno);
869 refs = df->def_info.refs;
871 else
873 if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
875 reg_info = DF_REG_EQ_USE_GET (DF_REF_REGNO (ref));
876 switch (df->use_info.ref_order)
878 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
879 case DF_REF_ORDER_BY_REG_WITH_NOTES:
880 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
881 refs = df->use_info.refs;
882 break;
883 default:
884 break;
887 else
889 reg_info = DF_REG_USE_GET (DF_REF_REGNO (ref));
890 refs = df->use_info.refs;
894 if (refs)
896 if (df->analyze_subset)
898 if (bitmap_bit_p (df->blocks_to_analyze, DF_REF_BBNO (ref)))
899 refs[id] = NULL;
901 else
902 refs[id] = NULL;
905 /* Delete any def-use or use-def chains that start here. It is
906 possible that there is trash in this field. This happens for
907 insns that have been deleted when rescanning has been deferred
908 and the chain problem has also been deleted. The chain tear down
909 code skips deleted insns. */
910 if (df_chain && DF_REF_CHAIN (ref))
911 df_chain_unlink (ref);
913 reg_info->n_refs--;
914 if (DF_REF_FLAGS_IS_SET (ref, DF_HARD_REG_LIVE))
916 gcc_assert (DF_REF_REGNO (ref) < FIRST_PSEUDO_REGISTER);
917 df->hard_regs_live_count[DF_REF_REGNO (ref)]--;
920 /* Unlink from the reg chain. If there is no prev, this is the
921 first of the list. If not, just join the next and prev. */
922 if (prev)
923 DF_REF_NEXT_REG (prev) = next;
924 else
926 gcc_assert (reg_info->reg_chain == ref);
927 reg_info->reg_chain = next;
929 if (next)
930 DF_REF_PREV_REG (next) = prev;
932 df_free_ref (ref);
936 /* Remove REF from VEC. */
938 static void
939 df_ref_compress_rec (df_ref **vec_ptr, df_ref ref)
941 df_ref *vec = *vec_ptr;
943 if (vec[1])
945 while (*vec && *vec != ref)
946 vec++;
948 while (*vec)
950 *vec = *(vec+1);
951 vec++;
954 else
956 free (vec);
957 *vec_ptr = df_null_ref_rec;
962 /* Unlink REF from all def-use/use-def chains, etc. */
964 void
965 df_ref_remove (df_ref ref)
967 #if 0
968 if (dump_file)
970 fprintf (dump_file, "removing ref ");
971 df_ref_debug (ref, dump_file);
973 #endif
975 if (DF_REF_REG_DEF_P (ref))
977 if (DF_REF_IS_ARTIFICIAL (ref))
979 struct df_scan_bb_info *bb_info
980 = df_scan_get_bb_info (DF_REF_BBNO (ref));
981 df_ref_compress_rec (&bb_info->artificial_defs, ref);
983 else
985 unsigned int uid = DF_REF_INSN_UID (ref);
986 struct df_insn_info *insn_rec = DF_INSN_UID_GET (uid);
987 df_ref_compress_rec (&insn_rec->defs, ref);
990 else
992 if (DF_REF_IS_ARTIFICIAL (ref))
994 struct df_scan_bb_info *bb_info
995 = df_scan_get_bb_info (DF_REF_BBNO (ref));
996 df_ref_compress_rec (&bb_info->artificial_uses, ref);
998 else
1000 unsigned int uid = DF_REF_INSN_UID (ref);
1001 struct df_insn_info *insn_rec = DF_INSN_UID_GET (uid);
1003 if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
1004 df_ref_compress_rec (&insn_rec->eq_uses, ref);
1005 else
1006 df_ref_compress_rec (&insn_rec->uses, ref);
1010 /* By deleting the ref directly, df_insn_rescan my not find any
1011 differences even though the block will have changed. So we need
1012 to mark the block dirty ourselves. */
1013 if (!DEBUG_INSN_P (DF_REF_INSN (ref)))
1014 df_set_bb_dirty (DF_REF_BB (ref));
1015 df_reg_chain_unlink (ref);
1019 /* Create the insn record for INSN. If there was one there, zero it
1020 out. */
1022 struct df_insn_info *
1023 df_insn_create_insn_record (rtx insn)
1025 struct df_scan_problem_data *problem_data
1026 = (struct df_scan_problem_data *) df_scan->problem_data;
1027 struct df_insn_info *insn_rec;
1029 df_grow_insn_info ();
1030 insn_rec = DF_INSN_INFO_GET (insn);
1031 if (!insn_rec)
1033 insn_rec = (struct df_insn_info *) pool_alloc (problem_data->insn_pool);
1034 DF_INSN_INFO_SET (insn, insn_rec);
1036 memset (insn_rec, 0, sizeof (struct df_insn_info));
1037 insn_rec->insn = insn;
1038 return insn_rec;
1042 /* Delete all du chain (DF_REF_CHAIN()) of all refs in the ref chain. */
1044 static void
1045 df_ref_chain_delete_du_chain (df_ref *ref_rec)
1047 while (*ref_rec)
1049 df_ref ref = *ref_rec;
1050 /* CHAIN is allocated by DF_CHAIN. So make sure to
1051 pass df_scan instance for the problem. */
1052 if (DF_REF_CHAIN (ref))
1053 df_chain_unlink (ref);
1054 ref_rec++;
1059 /* Delete all refs in the ref chain. */
1061 static void
1062 df_ref_chain_delete (df_ref *ref_rec)
1064 df_ref *start = ref_rec;
1065 while (*ref_rec)
1067 df_reg_chain_unlink (*ref_rec);
1068 ref_rec++;
1071 /* If the list is empty, it has a special shared element that is not
1072 to be deleted. */
1073 if (*start)
1074 free (start);
1078 /* Delete the hardreg chain. */
1080 static void
1081 df_mw_hardreg_chain_delete (struct df_mw_hardreg **hardregs)
1083 struct df_scan_problem_data *problem_data;
1085 if (!hardregs)
1086 return;
1088 problem_data = (struct df_scan_problem_data *) df_scan->problem_data;
1090 while (*hardregs)
1092 pool_free (problem_data->mw_reg_pool, *hardregs);
1093 hardregs++;
1098 /* Delete all of the refs information from INSN. BB must be passed in
1099 except when called from df_process_deferred_rescans to mark the block
1100 as dirty. */
1102 void
1103 df_insn_delete (basic_block bb, unsigned int uid)
1105 struct df_insn_info *insn_info = NULL;
1106 if (!df)
1107 return;
1109 df_grow_bb_info (df_scan);
1110 df_grow_reg_info ();
1112 /* The block must be marked as dirty now, rather than later as in
1113 df_insn_rescan and df_notes_rescan because it may not be there at
1114 rescanning time and the mark would blow up. */
1115 if (bb)
1116 df_set_bb_dirty (bb);
1118 insn_info = DF_INSN_UID_SAFE_GET (uid);
1120 /* The client has deferred rescanning. */
1121 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1123 if (insn_info)
1125 bitmap_clear_bit (&df->insns_to_rescan, uid);
1126 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1127 bitmap_set_bit (&df->insns_to_delete, uid);
1129 if (dump_file)
1130 fprintf (dump_file, "deferring deletion of insn with uid = %d.\n", uid);
1131 return;
1134 if (dump_file)
1135 fprintf (dump_file, "deleting insn with uid = %d.\n", uid);
1137 bitmap_clear_bit (&df->insns_to_delete, uid);
1138 bitmap_clear_bit (&df->insns_to_rescan, uid);
1139 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1140 if (insn_info)
1142 struct df_scan_problem_data *problem_data
1143 = (struct df_scan_problem_data *) df_scan->problem_data;
1145 /* In general, notes do not have the insn_info fields
1146 initialized. However, combine deletes insns by changing them
1147 to notes. How clever. So we cannot just check if it is a
1148 valid insn before short circuiting this code, we need to see
1149 if we actually initialized it. */
1150 if (insn_info->defs)
1152 df_mw_hardreg_chain_delete (insn_info->mw_hardregs);
1154 if (df_chain)
1156 df_ref_chain_delete_du_chain (insn_info->defs);
1157 df_ref_chain_delete_du_chain (insn_info->uses);
1158 df_ref_chain_delete_du_chain (insn_info->eq_uses);
1161 df_ref_chain_delete (insn_info->defs);
1162 df_ref_chain_delete (insn_info->uses);
1163 df_ref_chain_delete (insn_info->eq_uses);
1165 pool_free (problem_data->insn_pool, insn_info);
1166 DF_INSN_UID_SET (uid, NULL);
1171 /* Free all of the refs and the mw_hardregs in COLLECTION_REC. */
1173 static void
1174 df_free_collection_rec (struct df_collection_rec *collection_rec)
1176 unsigned int ix;
1177 struct df_scan_problem_data *problem_data
1178 = (struct df_scan_problem_data *) df_scan->problem_data;
1179 df_ref ref;
1180 struct df_mw_hardreg *mw;
1182 FOR_EACH_VEC_ELT (df_ref, collection_rec->def_vec, ix, ref)
1183 df_free_ref (ref);
1184 FOR_EACH_VEC_ELT (df_ref, collection_rec->use_vec, ix, ref)
1185 df_free_ref (ref);
1186 FOR_EACH_VEC_ELT (df_ref, collection_rec->eq_use_vec, ix, ref)
1187 df_free_ref (ref);
1188 FOR_EACH_VEC_ELT (df_mw_hardreg_ptr, collection_rec->mw_vec, ix, mw)
1189 pool_free (problem_data->mw_reg_pool, mw);
1191 VEC_free (df_ref, stack, collection_rec->def_vec);
1192 VEC_free (df_ref, stack, collection_rec->use_vec);
1193 VEC_free (df_ref, stack, collection_rec->eq_use_vec);
1194 VEC_free (df_mw_hardreg_ptr, stack, collection_rec->mw_vec);
1197 /* Rescan INSN. Return TRUE if the rescanning produced any changes. */
1199 bool
1200 df_insn_rescan (rtx insn)
1202 unsigned int uid = INSN_UID (insn);
1203 struct df_insn_info *insn_info = NULL;
1204 basic_block bb = BLOCK_FOR_INSN (insn);
1205 struct df_collection_rec collection_rec;
1207 if ((!df) || (!INSN_P (insn)))
1208 return false;
1210 if (!bb)
1212 if (dump_file)
1213 fprintf (dump_file, "no bb for insn with uid = %d.\n", uid);
1214 return false;
1217 /* The client has disabled rescanning and plans to do it itself. */
1218 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1219 return false;
1221 df_grow_bb_info (df_scan);
1222 df_grow_reg_info ();
1224 insn_info = DF_INSN_UID_SAFE_GET (uid);
1226 /* The client has deferred rescanning. */
1227 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1229 if (!insn_info)
1231 insn_info = df_insn_create_insn_record (insn);
1232 insn_info->defs = df_null_ref_rec;
1233 insn_info->uses = df_null_ref_rec;
1234 insn_info->eq_uses = df_null_ref_rec;
1235 insn_info->mw_hardregs = df_null_mw_rec;
1237 if (dump_file)
1238 fprintf (dump_file, "deferring rescan insn with uid = %d.\n", uid);
1240 bitmap_clear_bit (&df->insns_to_delete, uid);
1241 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1242 bitmap_set_bit (&df->insns_to_rescan, INSN_UID (insn));
1243 return false;
1246 collection_rec.def_vec = VEC_alloc (df_ref, stack, 128);
1247 collection_rec.use_vec = VEC_alloc (df_ref, stack, 32);
1248 collection_rec.eq_use_vec = VEC_alloc (df_ref, stack, 32);
1249 collection_rec.mw_vec = VEC_alloc (df_mw_hardreg_ptr, stack, 32);
1251 bitmap_clear_bit (&df->insns_to_delete, uid);
1252 bitmap_clear_bit (&df->insns_to_rescan, uid);
1253 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1254 if (insn_info)
1256 int luid;
1257 bool the_same = df_insn_refs_verify (&collection_rec, bb, insn, false);
1258 /* If there's no change, return false. */
1259 if (the_same)
1261 df_free_collection_rec (&collection_rec);
1262 if (dump_file)
1263 fprintf (dump_file, "verify found no changes in insn with uid = %d.\n", uid);
1264 return false;
1266 if (dump_file)
1267 fprintf (dump_file, "rescanning insn with uid = %d.\n", uid);
1269 /* There's change - we need to delete the existing info.
1270 Since the insn isn't moved, we can salvage its LUID. */
1271 luid = DF_INSN_LUID (insn);
1272 df_insn_delete (NULL, uid);
1273 df_insn_create_insn_record (insn);
1274 DF_INSN_LUID (insn) = luid;
1276 else
1278 struct df_insn_info *insn_info = df_insn_create_insn_record (insn);
1279 df_insn_refs_collect (&collection_rec, bb, insn_info);
1280 if (dump_file)
1281 fprintf (dump_file, "scanning new insn with uid = %d.\n", uid);
1284 df_refs_add_to_chains (&collection_rec, bb, insn);
1285 if (!DEBUG_INSN_P (insn))
1286 df_set_bb_dirty (bb);
1288 VEC_free (df_ref, stack, collection_rec.def_vec);
1289 VEC_free (df_ref, stack, collection_rec.use_vec);
1290 VEC_free (df_ref, stack, collection_rec.eq_use_vec);
1291 VEC_free (df_mw_hardreg_ptr, stack, collection_rec.mw_vec);
1293 return true;
1296 /* Same as df_insn_rescan, but don't mark the basic block as
1297 dirty. */
1299 bool
1300 df_insn_rescan_debug_internal (rtx insn)
1302 unsigned int uid = INSN_UID (insn);
1303 struct df_insn_info *insn_info;
1305 gcc_assert (DEBUG_INSN_P (insn)
1306 && VAR_LOC_UNKNOWN_P (INSN_VAR_LOCATION_LOC (insn)));
1308 if (!df)
1309 return false;
1311 insn_info = DF_INSN_UID_SAFE_GET (INSN_UID (insn));
1312 if (!insn_info)
1313 return false;
1315 if (dump_file)
1316 fprintf (dump_file, "deleting debug_insn with uid = %d.\n", uid);
1318 bitmap_clear_bit (&df->insns_to_delete, uid);
1319 bitmap_clear_bit (&df->insns_to_rescan, uid);
1320 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1322 if (!insn_info->defs)
1323 return false;
1325 if (insn_info->defs == df_null_ref_rec
1326 && insn_info->uses == df_null_ref_rec
1327 && insn_info->eq_uses == df_null_ref_rec
1328 && insn_info->mw_hardregs == df_null_mw_rec)
1329 return false;
1331 df_mw_hardreg_chain_delete (insn_info->mw_hardregs);
1333 if (df_chain)
1335 df_ref_chain_delete_du_chain (insn_info->defs);
1336 df_ref_chain_delete_du_chain (insn_info->uses);
1337 df_ref_chain_delete_du_chain (insn_info->eq_uses);
1340 df_ref_chain_delete (insn_info->defs);
1341 df_ref_chain_delete (insn_info->uses);
1342 df_ref_chain_delete (insn_info->eq_uses);
1344 insn_info->defs = df_null_ref_rec;
1345 insn_info->uses = df_null_ref_rec;
1346 insn_info->eq_uses = df_null_ref_rec;
1347 insn_info->mw_hardregs = df_null_mw_rec;
1349 return true;
1353 /* Rescan all of the insns in the function. Note that the artificial
1354 uses and defs are not touched. This function will destroy def-se
1355 or use-def chains. */
1357 void
1358 df_insn_rescan_all (void)
1360 bool no_insn_rescan = false;
1361 bool defer_insn_rescan = false;
1362 basic_block bb;
1363 bitmap_iterator bi;
1364 unsigned int uid;
1365 bitmap_head tmp;
1367 bitmap_initialize (&tmp, &df_bitmap_obstack);
1369 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1371 df_clear_flags (DF_NO_INSN_RESCAN);
1372 no_insn_rescan = true;
1375 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1377 df_clear_flags (DF_DEFER_INSN_RESCAN);
1378 defer_insn_rescan = true;
1381 bitmap_copy (&tmp, &df->insns_to_delete);
1382 EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
1384 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1385 if (insn_info)
1386 df_insn_delete (NULL, uid);
1389 bitmap_clear (&tmp);
1390 bitmap_clear (&df->insns_to_delete);
1391 bitmap_clear (&df->insns_to_rescan);
1392 bitmap_clear (&df->insns_to_notes_rescan);
1394 FOR_EACH_BB (bb)
1396 rtx insn;
1397 FOR_BB_INSNS (bb, insn)
1399 df_insn_rescan (insn);
1403 if (no_insn_rescan)
1404 df_set_flags (DF_NO_INSN_RESCAN);
1405 if (defer_insn_rescan)
1406 df_set_flags (DF_DEFER_INSN_RESCAN);
1410 /* Process all of the deferred rescans or deletions. */
1412 void
1413 df_process_deferred_rescans (void)
1415 bool no_insn_rescan = false;
1416 bool defer_insn_rescan = false;
1417 bitmap_iterator bi;
1418 unsigned int uid;
1419 bitmap_head tmp;
1421 bitmap_initialize (&tmp, &df_bitmap_obstack);
1423 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1425 df_clear_flags (DF_NO_INSN_RESCAN);
1426 no_insn_rescan = true;
1429 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1431 df_clear_flags (DF_DEFER_INSN_RESCAN);
1432 defer_insn_rescan = true;
1435 if (dump_file)
1436 fprintf (dump_file, "starting the processing of deferred insns\n");
1438 bitmap_copy (&tmp, &df->insns_to_delete);
1439 EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
1441 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1442 if (insn_info)
1443 df_insn_delete (NULL, uid);
1446 bitmap_copy (&tmp, &df->insns_to_rescan);
1447 EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
1449 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1450 if (insn_info)
1451 df_insn_rescan (insn_info->insn);
1454 bitmap_copy (&tmp, &df->insns_to_notes_rescan);
1455 EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
1457 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1458 if (insn_info)
1459 df_notes_rescan (insn_info->insn);
1462 if (dump_file)
1463 fprintf (dump_file, "ending the processing of deferred insns\n");
1465 bitmap_clear (&tmp);
1466 bitmap_clear (&df->insns_to_delete);
1467 bitmap_clear (&df->insns_to_rescan);
1468 bitmap_clear (&df->insns_to_notes_rescan);
1470 if (no_insn_rescan)
1471 df_set_flags (DF_NO_INSN_RESCAN);
1472 if (defer_insn_rescan)
1473 df_set_flags (DF_DEFER_INSN_RESCAN);
1475 /* If someone changed regs_ever_live during this pass, fix up the
1476 entry and exit blocks. */
1477 if (df->redo_entry_and_exit)
1479 df_update_entry_exit_and_calls ();
1480 df->redo_entry_and_exit = false;
1485 /* Count the number of refs. Include the defs if INCLUDE_DEFS. Include
1486 the uses if INCLUDE_USES. Include the eq_uses if
1487 INCLUDE_EQ_USES. */
1489 static unsigned int
1490 df_count_refs (bool include_defs, bool include_uses,
1491 bool include_eq_uses)
1493 unsigned int regno;
1494 int size = 0;
1495 unsigned int m = df->regs_inited;
1497 for (regno = 0; regno < m; regno++)
1499 if (include_defs)
1500 size += DF_REG_DEF_COUNT (regno);
1501 if (include_uses)
1502 size += DF_REG_USE_COUNT (regno);
1503 if (include_eq_uses)
1504 size += DF_REG_EQ_USE_COUNT (regno);
1506 return size;
1510 /* Take build ref table for either the uses or defs from the reg-use
1511 or reg-def chains. This version processes the refs in reg order
1512 which is likely to be best if processing the whole function. */
1514 static void
1515 df_reorganize_refs_by_reg_by_reg (struct df_ref_info *ref_info,
1516 bool include_defs,
1517 bool include_uses,
1518 bool include_eq_uses)
1520 unsigned int m = df->regs_inited;
1521 unsigned int regno;
1522 unsigned int offset = 0;
1523 unsigned int start;
1525 if (df->changeable_flags & DF_NO_HARD_REGS)
1527 start = FIRST_PSEUDO_REGISTER;
1528 memset (ref_info->begin, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
1529 memset (ref_info->count, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
1531 else
1532 start = 0;
1534 ref_info->total_size
1535 = df_count_refs (include_defs, include_uses, include_eq_uses);
1537 df_check_and_grow_ref_info (ref_info, 1);
1539 for (regno = start; regno < m; regno++)
1541 int count = 0;
1542 ref_info->begin[regno] = offset;
1543 if (include_defs)
1545 df_ref ref = DF_REG_DEF_CHAIN (regno);
1546 while (ref)
1548 ref_info->refs[offset] = ref;
1549 DF_REF_ID (ref) = offset++;
1550 count++;
1551 ref = DF_REF_NEXT_REG (ref);
1552 gcc_checking_assert (offset < ref_info->refs_size);
1555 if (include_uses)
1557 df_ref ref = DF_REG_USE_CHAIN (regno);
1558 while (ref)
1560 ref_info->refs[offset] = ref;
1561 DF_REF_ID (ref) = offset++;
1562 count++;
1563 ref = DF_REF_NEXT_REG (ref);
1564 gcc_checking_assert (offset < ref_info->refs_size);
1567 if (include_eq_uses)
1569 df_ref ref = DF_REG_EQ_USE_CHAIN (regno);
1570 while (ref)
1572 ref_info->refs[offset] = ref;
1573 DF_REF_ID (ref) = offset++;
1574 count++;
1575 ref = DF_REF_NEXT_REG (ref);
1576 gcc_checking_assert (offset < ref_info->refs_size);
1579 ref_info->count[regno] = count;
1582 /* The bitmap size is not decremented when refs are deleted. So
1583 reset it now that we have squished out all of the empty
1584 slots. */
1585 ref_info->table_size = offset;
1589 /* Take build ref table for either the uses or defs from the reg-use
1590 or reg-def chains. This version processes the refs in insn order
1591 which is likely to be best if processing some segment of the
1592 function. */
1594 static void
1595 df_reorganize_refs_by_reg_by_insn (struct df_ref_info *ref_info,
1596 bool include_defs,
1597 bool include_uses,
1598 bool include_eq_uses)
1600 bitmap_iterator bi;
1601 unsigned int bb_index;
1602 unsigned int m = df->regs_inited;
1603 unsigned int offset = 0;
1604 unsigned int r;
1605 unsigned int start
1606 = (df->changeable_flags & DF_NO_HARD_REGS) ? FIRST_PSEUDO_REGISTER : 0;
1608 memset (ref_info->begin, 0, sizeof (int) * df->regs_inited);
1609 memset (ref_info->count, 0, sizeof (int) * df->regs_inited);
1611 ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1612 df_check_and_grow_ref_info (ref_info, 1);
1614 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
1616 basic_block bb = BASIC_BLOCK (bb_index);
1617 rtx insn;
1618 df_ref *ref_rec;
1620 if (include_defs)
1621 for (ref_rec = df_get_artificial_defs (bb_index); *ref_rec; ref_rec++)
1623 unsigned int regno = DF_REF_REGNO (*ref_rec);
1624 ref_info->count[regno]++;
1626 if (include_uses)
1627 for (ref_rec = df_get_artificial_uses (bb_index); *ref_rec; ref_rec++)
1629 unsigned int regno = DF_REF_REGNO (*ref_rec);
1630 ref_info->count[regno]++;
1633 FOR_BB_INSNS (bb, insn)
1635 if (INSN_P (insn))
1637 unsigned int uid = INSN_UID (insn);
1639 if (include_defs)
1640 for (ref_rec = DF_INSN_UID_DEFS (uid); *ref_rec; ref_rec++)
1642 unsigned int regno = DF_REF_REGNO (*ref_rec);
1643 ref_info->count[regno]++;
1645 if (include_uses)
1646 for (ref_rec = DF_INSN_UID_USES (uid); *ref_rec; ref_rec++)
1648 unsigned int regno = DF_REF_REGNO (*ref_rec);
1649 ref_info->count[regno]++;
1651 if (include_eq_uses)
1652 for (ref_rec = DF_INSN_UID_EQ_USES (uid); *ref_rec; ref_rec++)
1654 unsigned int regno = DF_REF_REGNO (*ref_rec);
1655 ref_info->count[regno]++;
1661 for (r = start; r < m; r++)
1663 ref_info->begin[r] = offset;
1664 offset += ref_info->count[r];
1665 ref_info->count[r] = 0;
1668 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
1670 basic_block bb = BASIC_BLOCK (bb_index);
1671 rtx insn;
1672 df_ref *ref_rec;
1674 if (include_defs)
1675 for (ref_rec = df_get_artificial_defs (bb_index); *ref_rec; ref_rec++)
1677 df_ref ref = *ref_rec;
1678 unsigned int regno = DF_REF_REGNO (ref);
1679 if (regno >= start)
1681 unsigned int id
1682 = ref_info->begin[regno] + ref_info->count[regno]++;
1683 DF_REF_ID (ref) = id;
1684 ref_info->refs[id] = ref;
1687 if (include_uses)
1688 for (ref_rec = df_get_artificial_uses (bb_index); *ref_rec; ref_rec++)
1690 df_ref ref = *ref_rec;
1691 unsigned int regno = DF_REF_REGNO (ref);
1692 if (regno >= start)
1694 unsigned int id
1695 = ref_info->begin[regno] + ref_info->count[regno]++;
1696 DF_REF_ID (ref) = id;
1697 ref_info->refs[id] = ref;
1701 FOR_BB_INSNS (bb, insn)
1703 if (INSN_P (insn))
1705 unsigned int uid = INSN_UID (insn);
1707 if (include_defs)
1708 for (ref_rec = DF_INSN_UID_DEFS (uid); *ref_rec; ref_rec++)
1710 df_ref ref = *ref_rec;
1711 unsigned int regno = DF_REF_REGNO (ref);
1712 if (regno >= start)
1714 unsigned int id
1715 = ref_info->begin[regno] + ref_info->count[regno]++;
1716 DF_REF_ID (ref) = id;
1717 ref_info->refs[id] = ref;
1720 if (include_uses)
1721 for (ref_rec = DF_INSN_UID_USES (uid); *ref_rec; ref_rec++)
1723 df_ref ref = *ref_rec;
1724 unsigned int regno = DF_REF_REGNO (ref);
1725 if (regno >= start)
1727 unsigned int id
1728 = ref_info->begin[regno] + ref_info->count[regno]++;
1729 DF_REF_ID (ref) = id;
1730 ref_info->refs[id] = ref;
1733 if (include_eq_uses)
1734 for (ref_rec = DF_INSN_UID_EQ_USES (uid); *ref_rec; ref_rec++)
1736 df_ref ref = *ref_rec;
1737 unsigned int regno = DF_REF_REGNO (ref);
1738 if (regno >= start)
1740 unsigned int id
1741 = ref_info->begin[regno] + ref_info->count[regno]++;
1742 DF_REF_ID (ref) = id;
1743 ref_info->refs[id] = ref;
1750 /* The bitmap size is not decremented when refs are deleted. So
1751 reset it now that we have squished out all of the empty
1752 slots. */
1754 ref_info->table_size = offset;
1757 /* Take build ref table for either the uses or defs from the reg-use
1758 or reg-def chains. */
1760 static void
1761 df_reorganize_refs_by_reg (struct df_ref_info *ref_info,
1762 bool include_defs,
1763 bool include_uses,
1764 bool include_eq_uses)
1766 if (df->analyze_subset)
1767 df_reorganize_refs_by_reg_by_insn (ref_info, include_defs,
1768 include_uses, include_eq_uses);
1769 else
1770 df_reorganize_refs_by_reg_by_reg (ref_info, include_defs,
1771 include_uses, include_eq_uses);
1775 /* Add the refs in REF_VEC to the table in REF_INFO starting at OFFSET. */
1776 static unsigned int
1777 df_add_refs_to_table (unsigned int offset,
1778 struct df_ref_info *ref_info,
1779 df_ref *ref_vec)
1781 while (*ref_vec)
1783 df_ref ref = *ref_vec;
1784 if ((!(df->changeable_flags & DF_NO_HARD_REGS))
1785 || (DF_REF_REGNO (ref) >= FIRST_PSEUDO_REGISTER))
1787 ref_info->refs[offset] = ref;
1788 DF_REF_ID (*ref_vec) = offset++;
1790 ref_vec++;
1792 return offset;
1796 /* Count the number of refs in all of the insns of BB. Include the
1797 defs if INCLUDE_DEFS. Include the uses if INCLUDE_USES. Include the
1798 eq_uses if INCLUDE_EQ_USES. */
1800 static unsigned int
1801 df_reorganize_refs_by_insn_bb (basic_block bb, unsigned int offset,
1802 struct df_ref_info *ref_info,
1803 bool include_defs, bool include_uses,
1804 bool include_eq_uses)
1806 rtx insn;
1808 if (include_defs)
1809 offset = df_add_refs_to_table (offset, ref_info,
1810 df_get_artificial_defs (bb->index));
1811 if (include_uses)
1812 offset = df_add_refs_to_table (offset, ref_info,
1813 df_get_artificial_uses (bb->index));
1815 FOR_BB_INSNS (bb, insn)
1816 if (INSN_P (insn))
1818 unsigned int uid = INSN_UID (insn);
1819 if (include_defs)
1820 offset = df_add_refs_to_table (offset, ref_info,
1821 DF_INSN_UID_DEFS (uid));
1822 if (include_uses)
1823 offset = df_add_refs_to_table (offset, ref_info,
1824 DF_INSN_UID_USES (uid));
1825 if (include_eq_uses)
1826 offset = df_add_refs_to_table (offset, ref_info,
1827 DF_INSN_UID_EQ_USES (uid));
1829 return offset;
1833 /* Organize the refs by insn into the table in REF_INFO. If
1834 blocks_to_analyze is defined, use that set, otherwise the entire
1835 program. Include the defs if INCLUDE_DEFS. Include the uses if
1836 INCLUDE_USES. Include the eq_uses if INCLUDE_EQ_USES. */
1838 static void
1839 df_reorganize_refs_by_insn (struct df_ref_info *ref_info,
1840 bool include_defs, bool include_uses,
1841 bool include_eq_uses)
1843 basic_block bb;
1844 unsigned int offset = 0;
1846 ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1847 df_check_and_grow_ref_info (ref_info, 1);
1848 if (df->blocks_to_analyze)
1850 bitmap_iterator bi;
1851 unsigned int index;
1853 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, index, bi)
1855 offset = df_reorganize_refs_by_insn_bb (BASIC_BLOCK (index), offset, ref_info,
1856 include_defs, include_uses,
1857 include_eq_uses);
1860 ref_info->table_size = offset;
1862 else
1864 FOR_ALL_BB (bb)
1865 offset = df_reorganize_refs_by_insn_bb (bb, offset, ref_info,
1866 include_defs, include_uses,
1867 include_eq_uses);
1868 ref_info->table_size = offset;
1873 /* If the use refs in DF are not organized, reorganize them. */
1875 void
1876 df_maybe_reorganize_use_refs (enum df_ref_order order)
1878 if (order == df->use_info.ref_order)
1879 return;
1881 switch (order)
1883 case DF_REF_ORDER_BY_REG:
1884 df_reorganize_refs_by_reg (&df->use_info, false, true, false);
1885 break;
1887 case DF_REF_ORDER_BY_REG_WITH_NOTES:
1888 df_reorganize_refs_by_reg (&df->use_info, false, true, true);
1889 break;
1891 case DF_REF_ORDER_BY_INSN:
1892 df_reorganize_refs_by_insn (&df->use_info, false, true, false);
1893 break;
1895 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1896 df_reorganize_refs_by_insn (&df->use_info, false, true, true);
1897 break;
1899 case DF_REF_ORDER_NO_TABLE:
1900 free (df->use_info.refs);
1901 df->use_info.refs = NULL;
1902 df->use_info.refs_size = 0;
1903 break;
1905 case DF_REF_ORDER_UNORDERED:
1906 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1907 gcc_unreachable ();
1908 break;
1911 df->use_info.ref_order = order;
1915 /* If the def refs in DF are not organized, reorganize them. */
1917 void
1918 df_maybe_reorganize_def_refs (enum df_ref_order order)
1920 if (order == df->def_info.ref_order)
1921 return;
1923 switch (order)
1925 case DF_REF_ORDER_BY_REG:
1926 df_reorganize_refs_by_reg (&df->def_info, true, false, false);
1927 break;
1929 case DF_REF_ORDER_BY_INSN:
1930 df_reorganize_refs_by_insn (&df->def_info, true, false, false);
1931 break;
1933 case DF_REF_ORDER_NO_TABLE:
1934 free (df->def_info.refs);
1935 df->def_info.refs = NULL;
1936 df->def_info.refs_size = 0;
1937 break;
1939 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1940 case DF_REF_ORDER_BY_REG_WITH_NOTES:
1941 case DF_REF_ORDER_UNORDERED:
1942 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1943 gcc_unreachable ();
1944 break;
1947 df->def_info.ref_order = order;
1951 /* Change all of the basic block references in INSN to use the insn's
1952 current basic block. This function is called from routines that move
1953 instructions from one block to another. */
1955 void
1956 df_insn_change_bb (rtx insn, basic_block new_bb)
1958 basic_block old_bb = BLOCK_FOR_INSN (insn);
1959 struct df_insn_info *insn_info;
1960 unsigned int uid = INSN_UID (insn);
1962 if (old_bb == new_bb)
1963 return;
1965 set_block_for_insn (insn, new_bb);
1967 if (!df)
1968 return;
1970 if (dump_file)
1971 fprintf (dump_file, "changing bb of uid %d\n", uid);
1973 insn_info = DF_INSN_UID_SAFE_GET (uid);
1974 if (insn_info == NULL)
1976 if (dump_file)
1977 fprintf (dump_file, " unscanned insn\n");
1978 df_insn_rescan (insn);
1979 return;
1982 if (!INSN_P (insn))
1983 return;
1985 df_set_bb_dirty (new_bb);
1986 if (old_bb)
1988 if (dump_file)
1989 fprintf (dump_file, " from %d to %d\n",
1990 old_bb->index, new_bb->index);
1991 df_set_bb_dirty (old_bb);
1993 else
1994 if (dump_file)
1995 fprintf (dump_file, " to %d\n", new_bb->index);
1999 /* Helper function for df_ref_change_reg_with_loc. */
2001 static void
2002 df_ref_change_reg_with_loc_1 (struct df_reg_info *old_df,
2003 struct df_reg_info *new_df,
2004 int new_regno, rtx loc)
2006 df_ref the_ref = old_df->reg_chain;
2008 while (the_ref)
2010 if ((!DF_REF_IS_ARTIFICIAL (the_ref))
2011 && DF_REF_LOC (the_ref)
2012 && (*DF_REF_LOC (the_ref) == loc))
2014 df_ref next_ref = DF_REF_NEXT_REG (the_ref);
2015 df_ref prev_ref = DF_REF_PREV_REG (the_ref);
2016 df_ref *ref_vec, *ref_vec_t;
2017 struct df_insn_info *insn_info = DF_REF_INSN_INFO (the_ref);
2018 unsigned int count = 0;
2020 DF_REF_REGNO (the_ref) = new_regno;
2021 DF_REF_REG (the_ref) = regno_reg_rtx[new_regno];
2023 /* Pull the_ref out of the old regno chain. */
2024 if (prev_ref)
2025 DF_REF_NEXT_REG (prev_ref) = next_ref;
2026 else
2027 old_df->reg_chain = next_ref;
2028 if (next_ref)
2029 DF_REF_PREV_REG (next_ref) = prev_ref;
2030 old_df->n_refs--;
2032 /* Put the ref into the new regno chain. */
2033 DF_REF_PREV_REG (the_ref) = NULL;
2034 DF_REF_NEXT_REG (the_ref) = new_df->reg_chain;
2035 if (new_df->reg_chain)
2036 DF_REF_PREV_REG (new_df->reg_chain) = the_ref;
2037 new_df->reg_chain = the_ref;
2038 new_df->n_refs++;
2039 if (DF_REF_BB (the_ref))
2040 df_set_bb_dirty (DF_REF_BB (the_ref));
2042 /* Need to sort the record again that the ref was in because
2043 the regno is a sorting key. First, find the right
2044 record. */
2045 if (DF_REF_FLAGS (the_ref) & DF_REF_IN_NOTE)
2046 ref_vec = insn_info->eq_uses;
2047 else
2048 ref_vec = insn_info->uses;
2049 if (dump_file)
2050 fprintf (dump_file, "changing reg in insn %d\n",
2051 DF_REF_INSN_UID (the_ref));
2053 ref_vec_t = ref_vec;
2055 /* Find the length. */
2056 while (*ref_vec_t)
2058 count++;
2059 ref_vec_t++;
2061 qsort (ref_vec, count, sizeof (df_ref ), df_ref_compare);
2063 the_ref = next_ref;
2065 else
2066 the_ref = DF_REF_NEXT_REG (the_ref);
2071 /* Change the regno of all refs that contained LOC from OLD_REGNO to
2072 NEW_REGNO. Refs that do not match LOC are not changed which means
2073 that artificial refs are not changed since they have no loc. This
2074 call is to support the SET_REGNO macro. */
2076 void
2077 df_ref_change_reg_with_loc (int old_regno, int new_regno, rtx loc)
2079 if ((!df) || (old_regno == -1) || (old_regno == new_regno))
2080 return;
2082 df_grow_reg_info ();
2084 df_ref_change_reg_with_loc_1 (DF_REG_DEF_GET (old_regno),
2085 DF_REG_DEF_GET (new_regno), new_regno, loc);
2086 df_ref_change_reg_with_loc_1 (DF_REG_USE_GET (old_regno),
2087 DF_REG_USE_GET (new_regno), new_regno, loc);
2088 df_ref_change_reg_with_loc_1 (DF_REG_EQ_USE_GET (old_regno),
2089 DF_REG_EQ_USE_GET (new_regno), new_regno, loc);
2093 /* Delete the mw_hardregs that point into the eq_notes. */
2095 static unsigned int
2096 df_mw_hardreg_chain_delete_eq_uses (struct df_insn_info *insn_info)
2098 struct df_mw_hardreg **mw_vec = insn_info->mw_hardregs;
2099 unsigned int deleted = 0;
2100 unsigned int count = 0;
2101 struct df_scan_problem_data *problem_data
2102 = (struct df_scan_problem_data *) df_scan->problem_data;
2104 if (!*mw_vec)
2105 return 0;
2107 while (*mw_vec)
2109 if ((*mw_vec)->flags & DF_REF_IN_NOTE)
2111 struct df_mw_hardreg **temp_vec = mw_vec;
2113 pool_free (problem_data->mw_reg_pool, *mw_vec);
2114 temp_vec = mw_vec;
2115 /* Shove the remaining ones down one to fill the gap. While
2116 this looks n**2, it is highly unusual to have any mw regs
2117 in eq_notes and the chances of more than one are almost
2118 non existent. */
2119 while (*temp_vec)
2121 *temp_vec = *(temp_vec + 1);
2122 temp_vec++;
2124 deleted++;
2126 else
2128 mw_vec++;
2129 count++;
2133 if (count == 0)
2135 df_scan_free_mws_vec (insn_info->mw_hardregs);
2136 insn_info->mw_hardregs = df_null_mw_rec;
2137 return 0;
2139 return deleted;
2143 /* Rescan only the REG_EQUIV/REG_EQUAL notes part of INSN. */
2145 void
2146 df_notes_rescan (rtx insn)
2148 struct df_insn_info *insn_info;
2149 unsigned int uid = INSN_UID (insn);
2151 if (!df)
2152 return;
2154 /* The client has disabled rescanning and plans to do it itself. */
2155 if (df->changeable_flags & DF_NO_INSN_RESCAN)
2156 return;
2158 /* Do nothing if the insn hasn't been emitted yet. */
2159 if (!BLOCK_FOR_INSN (insn))
2160 return;
2162 df_grow_bb_info (df_scan);
2163 df_grow_reg_info ();
2165 insn_info = DF_INSN_UID_SAFE_GET (INSN_UID(insn));
2167 /* The client has deferred rescanning. */
2168 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
2170 if (!insn_info)
2172 insn_info = df_insn_create_insn_record (insn);
2173 insn_info->defs = df_null_ref_rec;
2174 insn_info->uses = df_null_ref_rec;
2175 insn_info->eq_uses = df_null_ref_rec;
2176 insn_info->mw_hardregs = df_null_mw_rec;
2179 bitmap_clear_bit (&df->insns_to_delete, uid);
2180 /* If the insn is set to be rescanned, it does not need to also
2181 be notes rescanned. */
2182 if (!bitmap_bit_p (&df->insns_to_rescan, uid))
2183 bitmap_set_bit (&df->insns_to_notes_rescan, INSN_UID (insn));
2184 return;
2187 bitmap_clear_bit (&df->insns_to_delete, uid);
2188 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
2190 if (insn_info)
2192 basic_block bb = BLOCK_FOR_INSN (insn);
2193 rtx note;
2194 struct df_collection_rec collection_rec;
2195 unsigned int num_deleted;
2196 unsigned int mw_len;
2198 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
2199 collection_rec.eq_use_vec = VEC_alloc (df_ref, stack, 32);
2200 collection_rec.mw_vec = VEC_alloc (df_mw_hardreg_ptr, stack, 32);
2202 num_deleted = df_mw_hardreg_chain_delete_eq_uses (insn_info);
2203 df_ref_chain_delete (insn_info->eq_uses);
2204 insn_info->eq_uses = NULL;
2206 /* Process REG_EQUIV/REG_EQUAL notes */
2207 for (note = REG_NOTES (insn); note;
2208 note = XEXP (note, 1))
2210 switch (REG_NOTE_KIND (note))
2212 case REG_EQUIV:
2213 case REG_EQUAL:
2214 df_uses_record (&collection_rec,
2215 &XEXP (note, 0), DF_REF_REG_USE,
2216 bb, insn_info, DF_REF_IN_NOTE);
2217 default:
2218 break;
2222 /* Find some place to put any new mw_hardregs. */
2223 df_canonize_collection_rec (&collection_rec);
2224 mw_len = VEC_length (df_mw_hardreg_ptr, collection_rec.mw_vec);
2225 if (mw_len)
2227 unsigned int count = 0;
2228 struct df_mw_hardreg **mw_rec = insn_info->mw_hardregs;
2229 while (*mw_rec)
2231 count++;
2232 mw_rec++;
2235 if (count)
2237 /* Append to the end of the existing record after
2238 expanding it if necessary. */
2239 if (mw_len > num_deleted)
2241 insn_info->mw_hardregs =
2242 XRESIZEVEC (struct df_mw_hardreg *,
2243 insn_info->mw_hardregs,
2244 count + 1 + mw_len);
2246 memcpy (&insn_info->mw_hardregs[count],
2247 VEC_address (df_mw_hardreg_ptr, collection_rec.mw_vec),
2248 mw_len * sizeof (struct df_mw_hardreg *));
2249 insn_info->mw_hardregs[count + mw_len] = NULL;
2250 qsort (insn_info->mw_hardregs, count + mw_len,
2251 sizeof (struct df_mw_hardreg *), df_mw_compare);
2253 else
2255 /* No vector there. */
2256 insn_info->mw_hardregs
2257 = XNEWVEC (struct df_mw_hardreg*, 1 + mw_len);
2258 memcpy (insn_info->mw_hardregs,
2259 VEC_address (df_mw_hardreg_ptr, collection_rec.mw_vec),
2260 mw_len * sizeof (struct df_mw_hardreg *));
2261 insn_info->mw_hardregs[mw_len] = NULL;
2264 /* Get rid of the mw_rec so that df_refs_add_to_chains will
2265 ignore it. */
2266 VEC_free (df_mw_hardreg_ptr, stack, collection_rec.mw_vec);
2267 df_refs_add_to_chains (&collection_rec, bb, insn);
2268 VEC_free (df_ref, stack, collection_rec.eq_use_vec);
2270 else
2271 df_insn_rescan (insn);
2276 /*----------------------------------------------------------------------------
2277 Hard core instruction scanning code. No external interfaces here,
2278 just a lot of routines that look inside insns.
2279 ----------------------------------------------------------------------------*/
2282 /* Return true if the contents of two df_ref's are identical.
2283 It ignores DF_REF_MARKER. */
2285 static bool
2286 df_ref_equal_p (df_ref ref1, df_ref ref2)
2288 if (!ref2)
2289 return false;
2291 if (ref1 == ref2)
2292 return true;
2294 if (DF_REF_CLASS (ref1) != DF_REF_CLASS (ref2)
2295 || DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2)
2296 || DF_REF_REG (ref1) != DF_REF_REG (ref2)
2297 || DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2)
2298 || ((DF_REF_FLAGS (ref1) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG))
2299 != (DF_REF_FLAGS (ref2) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG)))
2300 || DF_REF_BB (ref1) != DF_REF_BB (ref2)
2301 || DF_REF_INSN_INFO (ref1) != DF_REF_INSN_INFO (ref2))
2302 return false;
2304 switch (DF_REF_CLASS (ref1))
2306 case DF_REF_ARTIFICIAL:
2307 case DF_REF_BASE:
2308 return true;
2310 case DF_REF_REGULAR:
2311 return DF_REF_LOC (ref1) == DF_REF_LOC (ref2);
2313 default:
2314 gcc_unreachable ();
2316 return false;
2320 /* Compare REF1 and REF2 for sorting. This is only called from places
2321 where all of the refs are of the same type, in the same insn, and
2322 have the same bb. So these fields are not checked. */
2324 static int
2325 df_ref_compare (const void *r1, const void *r2)
2327 const df_ref ref1 = *(const df_ref *)r1;
2328 const df_ref ref2 = *(const df_ref *)r2;
2330 if (ref1 == ref2)
2331 return 0;
2333 if (DF_REF_CLASS (ref1) != DF_REF_CLASS (ref2))
2334 return (int)DF_REF_CLASS (ref1) - (int)DF_REF_CLASS (ref2);
2336 if (DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2))
2337 return (int)DF_REF_REGNO (ref1) - (int)DF_REF_REGNO (ref2);
2339 if (DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2))
2340 return (int)DF_REF_TYPE (ref1) - (int)DF_REF_TYPE (ref2);
2342 if (DF_REF_REG (ref1) != DF_REF_REG (ref2))
2343 return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2345 /* Cannot look at the LOC field on artificial refs. */
2346 if (DF_REF_CLASS (ref1) != DF_REF_ARTIFICIAL
2347 && DF_REF_LOC (ref1) != DF_REF_LOC (ref2))
2348 return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2350 if (DF_REF_FLAGS (ref1) != DF_REF_FLAGS (ref2))
2352 /* If two refs are identical except that one of them has is from
2353 a mw and one is not, we need to have the one with the mw
2354 first. */
2355 if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG) ==
2356 DF_REF_FLAGS_IS_SET (ref2, DF_REF_MW_HARDREG))
2357 return DF_REF_FLAGS (ref1) - DF_REF_FLAGS (ref2);
2358 else if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG))
2359 return -1;
2360 else
2361 return 1;
2364 return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2367 static void
2368 df_swap_refs (VEC(df_ref,stack) **ref_vec, int i, int j)
2370 df_ref tmp = VEC_index (df_ref, *ref_vec, i);
2371 VEC_replace (df_ref, *ref_vec, i, VEC_index (df_ref, *ref_vec, j));
2372 VEC_replace (df_ref, *ref_vec, j, tmp);
2375 /* Sort and compress a set of refs. */
2377 static void
2378 df_sort_and_compress_refs (VEC(df_ref,stack) **ref_vec)
2380 unsigned int count;
2381 unsigned int i;
2382 unsigned int dist = 0;
2384 count = VEC_length (df_ref, *ref_vec);
2386 /* If there are 1 or 0 elements, there is nothing to do. */
2387 if (count < 2)
2388 return;
2389 else if (count == 2)
2391 df_ref r0 = VEC_index (df_ref, *ref_vec, 0);
2392 df_ref r1 = VEC_index (df_ref, *ref_vec, 1);
2393 if (df_ref_compare (&r0, &r1) > 0)
2394 df_swap_refs (ref_vec, 0, 1);
2396 else
2398 for (i = 0; i < count - 1; i++)
2400 df_ref r0 = VEC_index (df_ref, *ref_vec, i);
2401 df_ref r1 = VEC_index (df_ref, *ref_vec, i + 1);
2402 if (df_ref_compare (&r0, &r1) >= 0)
2403 break;
2405 /* If the array is already strictly ordered,
2406 which is the most common case for large COUNT case
2407 (which happens for CALL INSNs),
2408 no need to sort and filter out duplicate.
2409 Simply return the count.
2410 Make sure DF_GET_ADD_REFS adds refs in the increasing order
2411 of DF_REF_COMPARE. */
2412 if (i == count - 1)
2413 return;
2414 VEC_qsort (df_ref, *ref_vec, df_ref_compare);
2417 for (i=0; i<count-dist; i++)
2419 /* Find the next ref that is not equal to the current ref. */
2420 while (i + dist + 1 < count
2421 && df_ref_equal_p (VEC_index (df_ref, *ref_vec, i),
2422 VEC_index (df_ref, *ref_vec, i + dist + 1)))
2424 df_free_ref (VEC_index (df_ref, *ref_vec, i + dist + 1));
2425 dist++;
2427 /* Copy it down to the next position. */
2428 if (dist && i + dist + 1 < count)
2429 VEC_replace (df_ref, *ref_vec, i + 1,
2430 VEC_index (df_ref, *ref_vec, i + dist + 1));
2433 count -= dist;
2434 VEC_truncate (df_ref, *ref_vec, count);
2438 /* Return true if the contents of two df_ref's are identical.
2439 It ignores DF_REF_MARKER. */
2441 static bool
2442 df_mw_equal_p (struct df_mw_hardreg *mw1, struct df_mw_hardreg *mw2)
2444 if (!mw2)
2445 return false;
2446 return (mw1 == mw2) ||
2447 (mw1->mw_reg == mw2->mw_reg
2448 && mw1->type == mw2->type
2449 && mw1->flags == mw2->flags
2450 && mw1->start_regno == mw2->start_regno
2451 && mw1->end_regno == mw2->end_regno);
2455 /* Compare MW1 and MW2 for sorting. */
2457 static int
2458 df_mw_compare (const void *m1, const void *m2)
2460 const struct df_mw_hardreg *const mw1 = *(const struct df_mw_hardreg *const*)m1;
2461 const struct df_mw_hardreg *const mw2 = *(const struct df_mw_hardreg *const*)m2;
2463 if (mw1 == mw2)
2464 return 0;
2466 if (mw1->type != mw2->type)
2467 return mw1->type - mw2->type;
2469 if (mw1->flags != mw2->flags)
2470 return mw1->flags - mw2->flags;
2472 if (mw1->start_regno != mw2->start_regno)
2473 return mw1->start_regno - mw2->start_regno;
2475 if (mw1->end_regno != mw2->end_regno)
2476 return mw1->end_regno - mw2->end_regno;
2478 if (mw1->mw_reg != mw2->mw_reg)
2479 return mw1->mw_order - mw2->mw_order;
2481 return 0;
2485 /* Sort and compress a set of refs. */
2487 static void
2488 df_sort_and_compress_mws (VEC(df_mw_hardreg_ptr,stack) **mw_vec)
2490 unsigned int count;
2491 struct df_scan_problem_data *problem_data
2492 = (struct df_scan_problem_data *) df_scan->problem_data;
2493 unsigned int i;
2494 unsigned int dist = 0;
2496 count = VEC_length (df_mw_hardreg_ptr, *mw_vec);
2497 if (count < 2)
2498 return;
2499 else if (count == 2)
2501 struct df_mw_hardreg *m0 = VEC_index (df_mw_hardreg_ptr, *mw_vec, 0);
2502 struct df_mw_hardreg *m1 = VEC_index (df_mw_hardreg_ptr, *mw_vec, 1);
2503 if (df_mw_compare (&m0, &m1) > 0)
2505 struct df_mw_hardreg *tmp = VEC_index (df_mw_hardreg_ptr,
2506 *mw_vec, 0);
2507 VEC_replace (df_mw_hardreg_ptr, *mw_vec, 0,
2508 VEC_index (df_mw_hardreg_ptr, *mw_vec, 1));
2509 VEC_replace (df_mw_hardreg_ptr, *mw_vec, 1, tmp);
2512 else
2513 VEC_qsort (df_mw_hardreg_ptr, *mw_vec, df_mw_compare);
2515 for (i=0; i<count-dist; i++)
2517 /* Find the next ref that is not equal to the current ref. */
2518 while (i + dist + 1 < count
2519 && df_mw_equal_p (VEC_index (df_mw_hardreg_ptr, *mw_vec, i),
2520 VEC_index (df_mw_hardreg_ptr, *mw_vec,
2521 i + dist + 1)))
2523 pool_free (problem_data->mw_reg_pool,
2524 VEC_index (df_mw_hardreg_ptr, *mw_vec, i + dist + 1));
2525 dist++;
2527 /* Copy it down to the next position. */
2528 if (dist && i + dist + 1 < count)
2529 VEC_replace (df_mw_hardreg_ptr, *mw_vec, i + 1,
2530 VEC_index (df_mw_hardreg_ptr, *mw_vec, i + dist + 1));
2533 count -= dist;
2534 VEC_truncate (df_mw_hardreg_ptr, *mw_vec, count);
2538 /* Sort and remove duplicates from the COLLECTION_REC. */
2540 static void
2541 df_canonize_collection_rec (struct df_collection_rec *collection_rec)
2543 df_sort_and_compress_refs (&collection_rec->def_vec);
2544 df_sort_and_compress_refs (&collection_rec->use_vec);
2545 df_sort_and_compress_refs (&collection_rec->eq_use_vec);
2546 df_sort_and_compress_mws (&collection_rec->mw_vec);
2550 /* Add the new df_ref to appropriate reg_info/ref_info chains. */
2552 static void
2553 df_install_ref (df_ref this_ref,
2554 struct df_reg_info *reg_info,
2555 struct df_ref_info *ref_info,
2556 bool add_to_table)
2558 unsigned int regno = DF_REF_REGNO (this_ref);
2559 /* Add the ref to the reg_{def,use,eq_use} chain. */
2560 df_ref head = reg_info->reg_chain;
2562 reg_info->reg_chain = this_ref;
2563 reg_info->n_refs++;
2565 if (DF_REF_FLAGS_IS_SET (this_ref, DF_HARD_REG_LIVE))
2567 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
2568 df->hard_regs_live_count[regno]++;
2571 gcc_checking_assert (DF_REF_NEXT_REG (this_ref) == NULL
2572 && DF_REF_PREV_REG (this_ref) == NULL);
2574 DF_REF_NEXT_REG (this_ref) = head;
2576 /* We cannot actually link to the head of the chain. */
2577 DF_REF_PREV_REG (this_ref) = NULL;
2579 if (head)
2580 DF_REF_PREV_REG (head) = this_ref;
2582 if (add_to_table)
2584 gcc_assert (ref_info->ref_order != DF_REF_ORDER_NO_TABLE);
2585 df_check_and_grow_ref_info (ref_info, 1);
2586 DF_REF_ID (this_ref) = ref_info->table_size;
2587 /* Add the ref to the big array of defs. */
2588 ref_info->refs[ref_info->table_size] = this_ref;
2589 ref_info->table_size++;
2591 else
2592 DF_REF_ID (this_ref) = -1;
2594 ref_info->total_size++;
2598 /* This function takes one of the groups of refs (defs, uses or
2599 eq_uses) and installs the entire group into the insn. It also adds
2600 each of these refs into the appropriate chains. */
2602 static df_ref *
2603 df_install_refs (basic_block bb,
2604 VEC(df_ref,stack)* old_vec,
2605 struct df_reg_info **reg_info,
2606 struct df_ref_info *ref_info,
2607 bool is_notes)
2609 unsigned int count;
2611 count = VEC_length (df_ref, old_vec);
2612 if (count)
2614 df_ref *new_vec = XNEWVEC (df_ref, count + 1);
2615 bool add_to_table;
2616 df_ref this_ref;
2617 unsigned int ix;
2619 switch (ref_info->ref_order)
2621 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
2622 case DF_REF_ORDER_BY_REG_WITH_NOTES:
2623 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
2624 ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
2625 add_to_table = true;
2626 break;
2627 case DF_REF_ORDER_UNORDERED:
2628 case DF_REF_ORDER_BY_REG:
2629 case DF_REF_ORDER_BY_INSN:
2630 ref_info->ref_order = DF_REF_ORDER_UNORDERED;
2631 add_to_table = !is_notes;
2632 break;
2633 default:
2634 add_to_table = false;
2635 break;
2638 /* Do not add if ref is not in the right blocks. */
2639 if (add_to_table && df->analyze_subset)
2640 add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
2642 FOR_EACH_VEC_ELT (df_ref, old_vec, ix, this_ref)
2644 new_vec[ix] = this_ref;
2645 df_install_ref (this_ref, reg_info[DF_REF_REGNO (this_ref)],
2646 ref_info, add_to_table);
2649 new_vec[count] = NULL;
2650 return new_vec;
2652 else
2653 return df_null_ref_rec;
2657 /* This function takes the mws installs the entire group into the
2658 insn. */
2660 static struct df_mw_hardreg **
2661 df_install_mws (VEC(df_mw_hardreg_ptr,stack) *old_vec)
2663 unsigned int count;
2665 count = VEC_length (df_mw_hardreg_ptr, old_vec);
2666 if (count)
2668 struct df_mw_hardreg **new_vec
2669 = XNEWVEC (struct df_mw_hardreg*, count + 1);
2670 memcpy (new_vec, VEC_address (df_mw_hardreg_ptr, old_vec),
2671 sizeof (struct df_mw_hardreg*) * count);
2672 new_vec[count] = NULL;
2673 return new_vec;
2675 else
2676 return df_null_mw_rec;
2680 /* Add a chain of df_refs to appropriate ref chain/reg_info/ref_info
2681 chains and update other necessary information. */
2683 static void
2684 df_refs_add_to_chains (struct df_collection_rec *collection_rec,
2685 basic_block bb, rtx insn)
2687 if (insn)
2689 struct df_insn_info *insn_rec = DF_INSN_INFO_GET (insn);
2690 /* If there is a vector in the collection rec, add it to the
2691 insn. A null rec is a signal that the caller will handle the
2692 chain specially. */
2693 if (collection_rec->def_vec)
2695 df_scan_free_ref_vec (insn_rec->defs);
2696 insn_rec->defs
2697 = df_install_refs (bb, collection_rec->def_vec,
2698 df->def_regs,
2699 &df->def_info, false);
2701 if (collection_rec->use_vec)
2703 df_scan_free_ref_vec (insn_rec->uses);
2704 insn_rec->uses
2705 = df_install_refs (bb, collection_rec->use_vec,
2706 df->use_regs,
2707 &df->use_info, false);
2709 if (collection_rec->eq_use_vec)
2711 df_scan_free_ref_vec (insn_rec->eq_uses);
2712 insn_rec->eq_uses
2713 = df_install_refs (bb, collection_rec->eq_use_vec,
2714 df->eq_use_regs,
2715 &df->use_info, true);
2717 if (collection_rec->mw_vec)
2719 df_scan_free_mws_vec (insn_rec->mw_hardregs);
2720 insn_rec->mw_hardregs
2721 = df_install_mws (collection_rec->mw_vec);
2724 else
2726 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
2728 df_scan_free_ref_vec (bb_info->artificial_defs);
2729 bb_info->artificial_defs
2730 = df_install_refs (bb, collection_rec->def_vec,
2731 df->def_regs,
2732 &df->def_info, false);
2733 df_scan_free_ref_vec (bb_info->artificial_uses);
2734 bb_info->artificial_uses
2735 = df_install_refs (bb, collection_rec->use_vec,
2736 df->use_regs,
2737 &df->use_info, false);
2742 /* Allocate a ref and initialize its fields. */
2744 static df_ref
2745 df_ref_create_structure (enum df_ref_class cl,
2746 struct df_collection_rec *collection_rec,
2747 rtx reg, rtx *loc,
2748 basic_block bb, struct df_insn_info *info,
2749 enum df_ref_type ref_type,
2750 int ref_flags)
2752 df_ref this_ref = NULL;
2753 int regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2754 struct df_scan_problem_data *problem_data
2755 = (struct df_scan_problem_data *) df_scan->problem_data;
2757 switch (cl)
2759 case DF_REF_BASE:
2760 this_ref = (df_ref) pool_alloc (problem_data->ref_base_pool);
2761 gcc_checking_assert (loc == NULL);
2762 break;
2764 case DF_REF_ARTIFICIAL:
2765 this_ref = (df_ref) pool_alloc (problem_data->ref_artificial_pool);
2766 this_ref->artificial_ref.bb = bb;
2767 gcc_checking_assert (loc == NULL);
2768 break;
2770 case DF_REF_REGULAR:
2771 this_ref = (df_ref) pool_alloc (problem_data->ref_regular_pool);
2772 this_ref->regular_ref.loc = loc;
2773 gcc_checking_assert (loc);
2774 break;
2777 DF_REF_CLASS (this_ref) = cl;
2778 DF_REF_ID (this_ref) = -1;
2779 DF_REF_REG (this_ref) = reg;
2780 DF_REF_REGNO (this_ref) = regno;
2781 DF_REF_TYPE (this_ref) = ref_type;
2782 DF_REF_INSN_INFO (this_ref) = info;
2783 DF_REF_CHAIN (this_ref) = NULL;
2784 DF_REF_FLAGS (this_ref) = ref_flags;
2785 DF_REF_NEXT_REG (this_ref) = NULL;
2786 DF_REF_PREV_REG (this_ref) = NULL;
2787 DF_REF_ORDER (this_ref) = df->ref_order++;
2789 /* We need to clear this bit because fwprop, and in the future
2790 possibly other optimizations sometimes create new refs using ond
2791 refs as the model. */
2792 DF_REF_FLAGS_CLEAR (this_ref, DF_HARD_REG_LIVE);
2794 /* See if this ref needs to have DF_HARD_REG_LIVE bit set. */
2795 if (regno < FIRST_PSEUDO_REGISTER
2796 && !DF_REF_IS_ARTIFICIAL (this_ref)
2797 && !DEBUG_INSN_P (DF_REF_INSN (this_ref)))
2799 if (DF_REF_REG_DEF_P (this_ref))
2801 if (!DF_REF_FLAGS_IS_SET (this_ref, DF_REF_MAY_CLOBBER))
2802 DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2804 else if (!(TEST_HARD_REG_BIT (elim_reg_set, regno)
2805 && (regno == FRAME_POINTER_REGNUM
2806 || regno == ARG_POINTER_REGNUM)))
2807 DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2810 if (collection_rec)
2812 if (DF_REF_REG_DEF_P (this_ref))
2813 VEC_safe_push (df_ref, stack, collection_rec->def_vec, this_ref);
2814 else if (DF_REF_FLAGS (this_ref) & DF_REF_IN_NOTE)
2815 VEC_safe_push (df_ref, stack, collection_rec->eq_use_vec, this_ref);
2816 else
2817 VEC_safe_push (df_ref, stack, collection_rec->use_vec, this_ref);
2819 else
2820 df_install_ref_incremental (this_ref);
2822 return this_ref;
2826 /* Create new references of type DF_REF_TYPE for each part of register REG
2827 at address LOC within INSN of BB. */
2830 static void
2831 df_ref_record (enum df_ref_class cl,
2832 struct df_collection_rec *collection_rec,
2833 rtx reg, rtx *loc,
2834 basic_block bb, struct df_insn_info *insn_info,
2835 enum df_ref_type ref_type,
2836 int ref_flags)
2838 unsigned int regno;
2840 gcc_checking_assert (REG_P (reg) || GET_CODE (reg) == SUBREG);
2842 regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2843 if (regno < FIRST_PSEUDO_REGISTER)
2845 struct df_mw_hardreg *hardreg = NULL;
2846 struct df_scan_problem_data *problem_data
2847 = (struct df_scan_problem_data *) df_scan->problem_data;
2848 unsigned int i;
2849 unsigned int endregno;
2850 df_ref ref;
2852 if (GET_CODE (reg) == SUBREG)
2854 regno += subreg_regno_offset (regno, GET_MODE (SUBREG_REG (reg)),
2855 SUBREG_BYTE (reg), GET_MODE (reg));
2856 endregno = regno + subreg_nregs (reg);
2858 else
2859 endregno = END_HARD_REGNO (reg);
2861 /* If this is a multiword hardreg, we create some extra
2862 datastructures that will enable us to easily build REG_DEAD
2863 and REG_UNUSED notes. */
2864 if (collection_rec
2865 && (endregno != regno + 1) && insn_info)
2867 /* Sets to a subreg of a multiword register are partial.
2868 Sets to a non-subreg of a multiword register are not. */
2869 if (GET_CODE (reg) == SUBREG)
2870 ref_flags |= DF_REF_PARTIAL;
2871 ref_flags |= DF_REF_MW_HARDREG;
2873 hardreg = (struct df_mw_hardreg *) pool_alloc (problem_data->mw_reg_pool);
2874 hardreg->type = ref_type;
2875 hardreg->flags = ref_flags;
2876 hardreg->mw_reg = reg;
2877 hardreg->start_regno = regno;
2878 hardreg->end_regno = endregno - 1;
2879 hardreg->mw_order = df->ref_order++;
2880 VEC_safe_push (df_mw_hardreg_ptr, stack, collection_rec->mw_vec,
2881 hardreg);
2884 for (i = regno; i < endregno; i++)
2886 ref = df_ref_create_structure (cl, collection_rec, regno_reg_rtx[i], loc,
2887 bb, insn_info, ref_type, ref_flags);
2889 gcc_assert (ORIGINAL_REGNO (DF_REF_REG (ref)) == i);
2892 else
2894 df_ref_create_structure (cl, collection_rec, reg, loc, bb, insn_info,
2895 ref_type, ref_flags);
2900 /* A set to a non-paradoxical SUBREG for which the number of word_mode units
2901 covered by the outer mode is smaller than that covered by the inner mode,
2902 is a read-modify-write operation.
2903 This function returns true iff the SUBREG X is such a SUBREG. */
2905 bool
2906 df_read_modify_subreg_p (rtx x)
2908 unsigned int isize, osize;
2909 if (GET_CODE (x) != SUBREG)
2910 return false;
2911 isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
2912 osize = GET_MODE_SIZE (GET_MODE (x));
2913 return isize > osize
2914 && isize > REGMODE_NATURAL_SIZE (GET_MODE (SUBREG_REG (x)));
2918 /* Process all the registers defined in the rtx pointed by LOC.
2919 Autoincrement/decrement definitions will be picked up by df_uses_record.
2920 Any change here has to be matched in df_find_hard_reg_defs_1. */
2922 static void
2923 df_def_record_1 (struct df_collection_rec *collection_rec,
2924 rtx *loc, basic_block bb, struct df_insn_info *insn_info,
2925 int flags)
2927 rtx dst = *loc;
2929 /* It is legal to have a set destination be a parallel. */
2930 if (GET_CODE (dst) == PARALLEL)
2932 int i;
2933 for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
2935 rtx temp = XVECEXP (dst, 0, i);
2936 gcc_assert (GET_CODE (temp) == EXPR_LIST);
2937 df_def_record_1 (collection_rec, &XEXP (temp, 0),
2938 bb, insn_info, flags);
2940 return;
2943 if (GET_CODE (dst) == STRICT_LOW_PART)
2945 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL | DF_REF_STRICT_LOW_PART;
2947 loc = &XEXP (dst, 0);
2948 dst = *loc;
2951 if (GET_CODE (dst) == ZERO_EXTRACT)
2953 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL | DF_REF_ZERO_EXTRACT;
2955 loc = &XEXP (dst, 0);
2956 dst = *loc;
2959 /* At this point if we do not have a reg or a subreg, just return. */
2960 if (REG_P (dst))
2962 df_ref_record (DF_REF_REGULAR, collection_rec,
2963 dst, loc, bb, insn_info, DF_REF_REG_DEF, flags);
2965 /* We want to keep sp alive everywhere - by making all
2966 writes to sp also use of sp. */
2967 if (REGNO (dst) == STACK_POINTER_REGNUM)
2968 df_ref_record (DF_REF_BASE, collection_rec,
2969 dst, NULL, bb, insn_info, DF_REF_REG_USE, flags);
2971 else if (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst)))
2973 if (df_read_modify_subreg_p (dst))
2974 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL;
2976 flags |= DF_REF_SUBREG;
2978 df_ref_record (DF_REF_REGULAR, collection_rec,
2979 dst, loc, bb, insn_info, DF_REF_REG_DEF, flags);
2984 /* Process all the registers defined in the pattern rtx, X. Any change
2985 here has to be matched in df_find_hard_reg_defs. */
2987 static void
2988 df_defs_record (struct df_collection_rec *collection_rec,
2989 rtx x, basic_block bb, struct df_insn_info *insn_info,
2990 int flags)
2992 RTX_CODE code = GET_CODE (x);
2993 int i;
2995 switch (code)
2997 case SET:
2998 df_def_record_1 (collection_rec, &SET_DEST (x), bb, insn_info, flags);
2999 break;
3001 case CLOBBER:
3002 flags |= DF_REF_MUST_CLOBBER;
3003 df_def_record_1 (collection_rec, &XEXP (x, 0), bb, insn_info, flags);
3004 break;
3006 case COND_EXEC:
3007 df_defs_record (collection_rec, COND_EXEC_CODE (x),
3008 bb, insn_info, DF_REF_CONDITIONAL);
3009 break;
3011 case PARALLEL:
3012 for (i = 0; i < XVECLEN (x, 0); i++)
3013 df_defs_record (collection_rec, XVECEXP (x, 0, i),
3014 bb, insn_info, flags);
3015 break;
3016 default:
3017 /* No DEFs to record in other cases */
3018 break;
3022 /* Set bits in *DEFS for hard registers found in the rtx DST, which is the
3023 destination of a set or clobber. This has to match the logic in
3024 df_defs_record_1. */
3026 static void
3027 df_find_hard_reg_defs_1 (rtx dst, HARD_REG_SET *defs)
3029 /* It is legal to have a set destination be a parallel. */
3030 if (GET_CODE (dst) == PARALLEL)
3032 int i;
3033 for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
3035 rtx temp = XVECEXP (dst, 0, i);
3036 gcc_assert (GET_CODE (temp) == EXPR_LIST);
3037 df_find_hard_reg_defs_1 (XEXP (temp, 0), defs);
3039 return;
3042 if (GET_CODE (dst) == STRICT_LOW_PART)
3043 dst = XEXP (dst, 0);
3045 if (GET_CODE (dst) == ZERO_EXTRACT)
3046 dst = XEXP (dst, 0);
3048 /* At this point if we do not have a reg or a subreg, just return. */
3049 if (REG_P (dst) && HARD_REGISTER_P (dst))
3050 SET_HARD_REG_BIT (*defs, REGNO (dst));
3051 else if (GET_CODE (dst) == SUBREG
3052 && REG_P (SUBREG_REG (dst)) && HARD_REGISTER_P (dst))
3053 SET_HARD_REG_BIT (*defs, REGNO (SUBREG_REG (dst)));
3056 /* Set bits in *DEFS for hard registers defined in the pattern X. This
3057 has to match the logic in df_defs_record. */
3059 static void
3060 df_find_hard_reg_defs (rtx x, HARD_REG_SET *defs)
3062 RTX_CODE code = GET_CODE (x);
3063 int i;
3065 switch (code)
3067 case SET:
3068 df_find_hard_reg_defs_1 (SET_DEST (x), defs);
3069 break;
3071 case CLOBBER:
3072 df_find_hard_reg_defs_1 (XEXP (x, 0), defs);
3073 break;
3075 case COND_EXEC:
3076 df_find_hard_reg_defs (COND_EXEC_CODE (x), defs);
3077 break;
3079 case PARALLEL:
3080 for (i = 0; i < XVECLEN (x, 0); i++)
3081 df_find_hard_reg_defs (XVECEXP (x, 0, i), defs);
3082 break;
3083 default:
3084 /* No DEFs to record in other cases */
3085 break;
3090 /* Process all the registers used in the rtx at address LOC. */
3092 static void
3093 df_uses_record (struct df_collection_rec *collection_rec,
3094 rtx *loc, enum df_ref_type ref_type,
3095 basic_block bb, struct df_insn_info *insn_info,
3096 int flags)
3098 RTX_CODE code;
3099 rtx x;
3101 retry:
3102 x = *loc;
3103 if (!x)
3104 return;
3105 code = GET_CODE (x);
3106 switch (code)
3108 case LABEL_REF:
3109 case SYMBOL_REF:
3110 case CONST:
3111 CASE_CONST_ANY:
3112 case PC:
3113 case CC0:
3114 case ADDR_VEC:
3115 case ADDR_DIFF_VEC:
3116 return;
3118 case CLOBBER:
3119 /* If we are clobbering a MEM, mark any registers inside the address
3120 as being used. */
3121 if (MEM_P (XEXP (x, 0)))
3122 df_uses_record (collection_rec,
3123 &XEXP (XEXP (x, 0), 0),
3124 DF_REF_REG_MEM_STORE,
3125 bb, insn_info,
3126 flags);
3128 /* If we're clobbering a REG then we have a def so ignore. */
3129 return;
3131 case MEM:
3132 df_uses_record (collection_rec,
3133 &XEXP (x, 0), DF_REF_REG_MEM_LOAD,
3134 bb, insn_info, flags & DF_REF_IN_NOTE);
3135 return;
3137 case SUBREG:
3138 /* While we're here, optimize this case. */
3139 flags |= DF_REF_PARTIAL;
3140 /* In case the SUBREG is not of a REG, do not optimize. */
3141 if (!REG_P (SUBREG_REG (x)))
3143 loc = &SUBREG_REG (x);
3144 df_uses_record (collection_rec, loc, ref_type, bb, insn_info, flags);
3145 return;
3147 /* ... Fall through ... */
3149 case REG:
3150 df_ref_record (DF_REF_REGULAR, collection_rec,
3151 x, loc, bb, insn_info,
3152 ref_type, flags);
3153 return;
3155 case SIGN_EXTRACT:
3156 case ZERO_EXTRACT:
3158 df_uses_record (collection_rec,
3159 &XEXP (x, 1), ref_type, bb, insn_info, flags);
3160 df_uses_record (collection_rec,
3161 &XEXP (x, 2), ref_type, bb, insn_info, flags);
3163 /* If the parameters to the zero or sign extract are
3164 constants, strip them off and recurse, otherwise there is
3165 no information that we can gain from this operation. */
3166 if (code == ZERO_EXTRACT)
3167 flags |= DF_REF_ZERO_EXTRACT;
3168 else
3169 flags |= DF_REF_SIGN_EXTRACT;
3171 df_uses_record (collection_rec,
3172 &XEXP (x, 0), ref_type, bb, insn_info, flags);
3173 return;
3175 break;
3177 case SET:
3179 rtx dst = SET_DEST (x);
3180 gcc_assert (!(flags & DF_REF_IN_NOTE));
3181 df_uses_record (collection_rec,
3182 &SET_SRC (x), DF_REF_REG_USE, bb, insn_info, flags);
3184 switch (GET_CODE (dst))
3186 case SUBREG:
3187 if (df_read_modify_subreg_p (dst))
3189 df_uses_record (collection_rec, &SUBREG_REG (dst),
3190 DF_REF_REG_USE, bb, insn_info,
3191 flags | DF_REF_READ_WRITE | DF_REF_SUBREG);
3192 break;
3194 /* Fall through. */
3195 case REG:
3196 case PARALLEL:
3197 case SCRATCH:
3198 case PC:
3199 case CC0:
3200 break;
3201 case MEM:
3202 df_uses_record (collection_rec, &XEXP (dst, 0),
3203 DF_REF_REG_MEM_STORE, bb, insn_info, flags);
3204 break;
3205 case STRICT_LOW_PART:
3207 rtx *temp = &XEXP (dst, 0);
3208 /* A strict_low_part uses the whole REG and not just the
3209 SUBREG. */
3210 dst = XEXP (dst, 0);
3211 df_uses_record (collection_rec,
3212 (GET_CODE (dst) == SUBREG) ? &SUBREG_REG (dst) : temp,
3213 DF_REF_REG_USE, bb, insn_info,
3214 DF_REF_READ_WRITE | DF_REF_STRICT_LOW_PART);
3216 break;
3217 case ZERO_EXTRACT:
3219 df_uses_record (collection_rec, &XEXP (dst, 1),
3220 DF_REF_REG_USE, bb, insn_info, flags);
3221 df_uses_record (collection_rec, &XEXP (dst, 2),
3222 DF_REF_REG_USE, bb, insn_info, flags);
3223 if (GET_CODE (XEXP (dst,0)) == MEM)
3224 df_uses_record (collection_rec, &XEXP (dst, 0),
3225 DF_REF_REG_USE, bb, insn_info,
3226 flags);
3227 else
3228 df_uses_record (collection_rec, &XEXP (dst, 0),
3229 DF_REF_REG_USE, bb, insn_info,
3230 DF_REF_READ_WRITE | DF_REF_ZERO_EXTRACT);
3232 break;
3234 default:
3235 gcc_unreachable ();
3237 return;
3240 case RETURN:
3241 case SIMPLE_RETURN:
3242 break;
3244 case ASM_OPERANDS:
3245 case UNSPEC_VOLATILE:
3246 case TRAP_IF:
3247 case ASM_INPUT:
3249 /* Traditional and volatile asm instructions must be
3250 considered to use and clobber all hard registers, all
3251 pseudo-registers and all of memory. So must TRAP_IF and
3252 UNSPEC_VOLATILE operations.
3254 Consider for instance a volatile asm that changes the fpu
3255 rounding mode. An insn should not be moved across this
3256 even if it only uses pseudo-regs because it might give an
3257 incorrectly rounded result.
3259 However, flow.c's liveness computation did *not* do this,
3260 giving the reasoning as " ?!? Unfortunately, marking all
3261 hard registers as live causes massive problems for the
3262 register allocator and marking all pseudos as live creates
3263 mountains of uninitialized variable warnings."
3265 In order to maintain the status quo with regard to liveness
3266 and uses, we do what flow.c did and just mark any regs we
3267 can find in ASM_OPERANDS as used. In global asm insns are
3268 scanned and regs_asm_clobbered is filled out.
3270 For all ASM_OPERANDS, we must traverse the vector of input
3271 operands. We can not just fall through here since then we
3272 would be confused by the ASM_INPUT rtx inside ASM_OPERANDS,
3273 which do not indicate traditional asms unlike their normal
3274 usage. */
3275 if (code == ASM_OPERANDS)
3277 int j;
3279 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
3280 df_uses_record (collection_rec, &ASM_OPERANDS_INPUT (x, j),
3281 DF_REF_REG_USE, bb, insn_info, flags);
3282 return;
3284 break;
3287 case VAR_LOCATION:
3288 df_uses_record (collection_rec,
3289 &PAT_VAR_LOCATION_LOC (x),
3290 DF_REF_REG_USE, bb, insn_info, flags);
3291 return;
3293 case PRE_DEC:
3294 case POST_DEC:
3295 case PRE_INC:
3296 case POST_INC:
3297 case PRE_MODIFY:
3298 case POST_MODIFY:
3299 gcc_assert (!DEBUG_INSN_P (insn_info->insn));
3300 /* Catch the def of the register being modified. */
3301 df_ref_record (DF_REF_REGULAR, collection_rec, XEXP (x, 0), &XEXP (x, 0),
3302 bb, insn_info,
3303 DF_REF_REG_DEF,
3304 flags | DF_REF_READ_WRITE | DF_REF_PRE_POST_MODIFY);
3306 /* ... Fall through to handle uses ... */
3308 default:
3309 break;
3312 /* Recursively scan the operands of this expression. */
3314 const char *fmt = GET_RTX_FORMAT (code);
3315 int i;
3317 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3319 if (fmt[i] == 'e')
3321 /* Tail recursive case: save a function call level. */
3322 if (i == 0)
3324 loc = &XEXP (x, 0);
3325 goto retry;
3327 df_uses_record (collection_rec, &XEXP (x, i), ref_type,
3328 bb, insn_info, flags);
3330 else if (fmt[i] == 'E')
3332 int j;
3333 for (j = 0; j < XVECLEN (x, i); j++)
3334 df_uses_record (collection_rec,
3335 &XVECEXP (x, i, j), ref_type,
3336 bb, insn_info, flags);
3341 return;
3345 /* For all DF_REF_CONDITIONAL defs, add a corresponding uses. */
3347 static void
3348 df_get_conditional_uses (struct df_collection_rec *collection_rec)
3350 unsigned int ix;
3351 df_ref ref;
3353 FOR_EACH_VEC_ELT (df_ref, collection_rec->def_vec, ix, ref)
3355 if (DF_REF_FLAGS_IS_SET (ref, DF_REF_CONDITIONAL))
3357 df_ref use;
3359 use = df_ref_create_structure (DF_REF_CLASS (ref), collection_rec, DF_REF_REG (ref),
3360 DF_REF_LOC (ref), DF_REF_BB (ref),
3361 DF_REF_INSN_INFO (ref), DF_REF_REG_USE,
3362 DF_REF_FLAGS (ref) & ~DF_REF_CONDITIONAL);
3363 DF_REF_REGNO (use) = DF_REF_REGNO (ref);
3369 /* Get call's extra defs and uses (track caller-saved registers). */
3371 static void
3372 df_get_call_refs (struct df_collection_rec *collection_rec,
3373 basic_block bb,
3374 struct df_insn_info *insn_info,
3375 int flags)
3377 rtx note;
3378 bool is_sibling_call;
3379 unsigned int i;
3380 HARD_REG_SET defs_generated;
3382 CLEAR_HARD_REG_SET (defs_generated);
3383 df_find_hard_reg_defs (PATTERN (insn_info->insn), &defs_generated);
3384 is_sibling_call = SIBLING_CALL_P (insn_info->insn);
3386 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3388 if (i == STACK_POINTER_REGNUM)
3389 /* The stack ptr is used (honorarily) by a CALL insn. */
3390 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3391 NULL, bb, insn_info, DF_REF_REG_USE,
3392 DF_REF_CALL_STACK_USAGE | flags);
3393 else if (global_regs[i])
3395 /* Calls to const functions cannot access any global registers and
3396 calls to pure functions cannot set them. All other calls may
3397 reference any of the global registers, so they are recorded as
3398 used. */
3399 if (!RTL_CONST_CALL_P (insn_info->insn))
3401 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3402 NULL, bb, insn_info, DF_REF_REG_USE, flags);
3403 if (!RTL_PURE_CALL_P (insn_info->insn))
3404 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3405 NULL, bb, insn_info, DF_REF_REG_DEF, flags);
3408 else if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i)
3409 /* no clobbers for regs that are the result of the call */
3410 && !TEST_HARD_REG_BIT (defs_generated, i)
3411 && (!is_sibling_call
3412 || !bitmap_bit_p (df->exit_block_uses, i)
3413 || refers_to_regno_p (i, i+1,
3414 crtl->return_rtx, NULL)))
3415 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3416 NULL, bb, insn_info, DF_REF_REG_DEF,
3417 DF_REF_MAY_CLOBBER | flags);
3420 /* Record the registers used to pass arguments, and explicitly
3421 noted as clobbered. */
3422 for (note = CALL_INSN_FUNCTION_USAGE (insn_info->insn); note;
3423 note = XEXP (note, 1))
3425 if (GET_CODE (XEXP (note, 0)) == USE)
3426 df_uses_record (collection_rec, &XEXP (XEXP (note, 0), 0),
3427 DF_REF_REG_USE, bb, insn_info, flags);
3428 else if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3430 if (REG_P (XEXP (XEXP (note, 0), 0)))
3432 unsigned int regno = REGNO (XEXP (XEXP (note, 0), 0));
3433 if (!TEST_HARD_REG_BIT (defs_generated, regno))
3434 df_defs_record (collection_rec, XEXP (note, 0), bb,
3435 insn_info, flags);
3437 else
3438 df_uses_record (collection_rec, &XEXP (note, 0),
3439 DF_REF_REG_USE, bb, insn_info, flags);
3443 return;
3446 /* Collect all refs in the INSN. This function is free of any
3447 side-effect - it will create and return a lists of df_ref's in the
3448 COLLECTION_REC without putting those refs into existing ref chains
3449 and reg chains. */
3451 static void
3452 df_insn_refs_collect (struct df_collection_rec *collection_rec,
3453 basic_block bb, struct df_insn_info *insn_info)
3455 rtx note;
3456 bool is_cond_exec = (GET_CODE (PATTERN (insn_info->insn)) == COND_EXEC);
3458 /* Clear out the collection record. */
3459 VEC_truncate (df_ref, collection_rec->def_vec, 0);
3460 VEC_truncate (df_ref, collection_rec->use_vec, 0);
3461 VEC_truncate (df_ref, collection_rec->eq_use_vec, 0);
3462 VEC_truncate (df_mw_hardreg_ptr, collection_rec->mw_vec, 0);
3464 /* Process REG_EQUIV/REG_EQUAL notes. */
3465 for (note = REG_NOTES (insn_info->insn); note;
3466 note = XEXP (note, 1))
3468 switch (REG_NOTE_KIND (note))
3470 case REG_EQUIV:
3471 case REG_EQUAL:
3472 df_uses_record (collection_rec,
3473 &XEXP (note, 0), DF_REF_REG_USE,
3474 bb, insn_info, DF_REF_IN_NOTE);
3475 break;
3476 case REG_NON_LOCAL_GOTO:
3477 /* The frame ptr is used by a non-local goto. */
3478 df_ref_record (DF_REF_BASE, collection_rec,
3479 regno_reg_rtx[FRAME_POINTER_REGNUM],
3480 NULL, bb, insn_info,
3481 DF_REF_REG_USE, 0);
3482 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
3483 df_ref_record (DF_REF_BASE, collection_rec,
3484 regno_reg_rtx[HARD_FRAME_POINTER_REGNUM],
3485 NULL, bb, insn_info,
3486 DF_REF_REG_USE, 0);
3487 #endif
3488 break;
3489 default:
3490 break;
3494 /* For CALL_INSNs, first record DF_REF_BASE register defs, as well as
3495 uses from CALL_INSN_FUNCTION_USAGE. */
3496 if (CALL_P (insn_info->insn))
3497 df_get_call_refs (collection_rec, bb, insn_info,
3498 (is_cond_exec) ? DF_REF_CONDITIONAL : 0);
3500 /* Record other defs. These should be mostly for DF_REF_REGULAR, so
3501 that a qsort on the defs is unnecessary in most cases. */
3502 df_defs_record (collection_rec,
3503 PATTERN (insn_info->insn), bb, insn_info, 0);
3505 /* Record the register uses. */
3506 df_uses_record (collection_rec,
3507 &PATTERN (insn_info->insn), DF_REF_REG_USE, bb, insn_info, 0);
3509 /* DF_REF_CONDITIONAL needs corresponding USES. */
3510 if (is_cond_exec)
3511 df_get_conditional_uses (collection_rec);
3513 df_canonize_collection_rec (collection_rec);
3516 /* Recompute the luids for the insns in BB. */
3518 void
3519 df_recompute_luids (basic_block bb)
3521 rtx insn;
3522 int luid = 0;
3524 df_grow_insn_info ();
3526 /* Scan the block an insn at a time from beginning to end. */
3527 FOR_BB_INSNS (bb, insn)
3529 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
3530 /* Inserting labels does not always trigger the incremental
3531 rescanning. */
3532 if (!insn_info)
3534 gcc_assert (!INSN_P (insn));
3535 insn_info = df_insn_create_insn_record (insn);
3538 DF_INSN_INFO_LUID (insn_info) = luid;
3539 if (INSN_P (insn))
3540 luid++;
3545 /* Collect all artificial refs at the block level for BB and add them
3546 to COLLECTION_REC. */
3548 static void
3549 df_bb_refs_collect (struct df_collection_rec *collection_rec, basic_block bb)
3551 VEC_truncate (df_ref, collection_rec->def_vec, 0);
3552 VEC_truncate (df_ref, collection_rec->use_vec, 0);
3553 VEC_truncate (df_ref, collection_rec->eq_use_vec, 0);
3554 VEC_truncate (df_mw_hardreg_ptr, collection_rec->mw_vec, 0);
3556 if (bb->index == ENTRY_BLOCK)
3558 df_entry_block_defs_collect (collection_rec, df->entry_block_defs);
3559 return;
3561 else if (bb->index == EXIT_BLOCK)
3563 df_exit_block_uses_collect (collection_rec, df->exit_block_uses);
3564 return;
3567 #ifdef EH_RETURN_DATA_REGNO
3568 if (bb_has_eh_pred (bb))
3570 unsigned int i;
3571 /* Mark the registers that will contain data for the handler. */
3572 for (i = 0; ; ++i)
3574 unsigned regno = EH_RETURN_DATA_REGNO (i);
3575 if (regno == INVALID_REGNUM)
3576 break;
3577 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[regno], NULL,
3578 bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP);
3581 #endif
3583 /* Add the hard_frame_pointer if this block is the target of a
3584 non-local goto. */
3585 if (bb->flags & BB_NON_LOCAL_GOTO_TARGET)
3586 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, hard_frame_pointer_rtx, NULL,
3587 bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP);
3589 /* Add the artificial uses. */
3590 if (bb->index >= NUM_FIXED_BLOCKS)
3592 bitmap_iterator bi;
3593 unsigned int regno;
3594 bitmap au = bb_has_eh_pred (bb)
3595 ? &df->eh_block_artificial_uses
3596 : &df->regular_block_artificial_uses;
3598 EXECUTE_IF_SET_IN_BITMAP (au, 0, regno, bi)
3600 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[regno], NULL,
3601 bb, NULL, DF_REF_REG_USE, 0);
3605 df_canonize_collection_rec (collection_rec);
3609 /* Record all the refs within the basic block BB_INDEX and scan the instructions if SCAN_INSNS. */
3611 void
3612 df_bb_refs_record (int bb_index, bool scan_insns)
3614 basic_block bb = BASIC_BLOCK (bb_index);
3615 rtx insn;
3616 int luid = 0;
3617 struct df_collection_rec collection_rec;
3619 if (!df)
3620 return;
3622 df_grow_bb_info (df_scan);
3623 collection_rec.def_vec = VEC_alloc (df_ref, stack, 128);
3624 collection_rec.use_vec = VEC_alloc (df_ref, stack, 32);
3625 collection_rec.eq_use_vec = VEC_alloc (df_ref, stack, 32);
3626 collection_rec.mw_vec = VEC_alloc (df_mw_hardreg_ptr, stack, 32);
3628 if (scan_insns)
3629 /* Scan the block an insn at a time from beginning to end. */
3630 FOR_BB_INSNS (bb, insn)
3632 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
3633 gcc_assert (!insn_info);
3635 insn_info = df_insn_create_insn_record (insn);
3636 if (INSN_P (insn))
3638 /* Record refs within INSN. */
3639 DF_INSN_INFO_LUID (insn_info) = luid++;
3640 df_insn_refs_collect (&collection_rec, bb, DF_INSN_INFO_GET (insn));
3641 df_refs_add_to_chains (&collection_rec, bb, insn);
3643 DF_INSN_INFO_LUID (insn_info) = luid;
3646 /* Other block level artificial refs */
3647 df_bb_refs_collect (&collection_rec, bb);
3648 df_refs_add_to_chains (&collection_rec, bb, NULL);
3650 VEC_free (df_ref, stack, collection_rec.def_vec);
3651 VEC_free (df_ref, stack, collection_rec.use_vec);
3652 VEC_free (df_ref, stack, collection_rec.eq_use_vec);
3653 VEC_free (df_mw_hardreg_ptr, stack, collection_rec.mw_vec);
3655 /* Now that the block has been processed, set the block as dirty so
3656 LR and LIVE will get it processed. */
3657 df_set_bb_dirty (bb);
3661 /* Get the artificial use set for a regular (i.e. non-exit/non-entry)
3662 block. */
3664 static void
3665 df_get_regular_block_artificial_uses (bitmap regular_block_artificial_uses)
3667 #ifdef EH_USES
3668 unsigned int i;
3669 #endif
3671 bitmap_clear (regular_block_artificial_uses);
3673 if (reload_completed)
3675 if (frame_pointer_needed)
3676 bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3678 else
3679 /* Before reload, there are a few registers that must be forced
3680 live everywhere -- which might not already be the case for
3681 blocks within infinite loops. */
3683 unsigned int picreg = PIC_OFFSET_TABLE_REGNUM;
3685 /* Any reference to any pseudo before reload is a potential
3686 reference of the frame pointer. */
3687 bitmap_set_bit (regular_block_artificial_uses, FRAME_POINTER_REGNUM);
3689 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
3690 bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3691 #endif
3693 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3694 /* Pseudos with argument area equivalences may require
3695 reloading via the argument pointer. */
3696 if (fixed_regs[ARG_POINTER_REGNUM])
3697 bitmap_set_bit (regular_block_artificial_uses, ARG_POINTER_REGNUM);
3698 #endif
3700 /* Any constant, or pseudo with constant equivalences, may
3701 require reloading from memory using the pic register. */
3702 if (picreg != INVALID_REGNUM
3703 && fixed_regs[picreg])
3704 bitmap_set_bit (regular_block_artificial_uses, picreg);
3706 /* The all-important stack pointer must always be live. */
3707 bitmap_set_bit (regular_block_artificial_uses, STACK_POINTER_REGNUM);
3709 #ifdef EH_USES
3710 /* EH_USES registers are used:
3711 1) at all insns that might throw (calls or with -fnon-call-exceptions
3712 trapping insns)
3713 2) in all EH edges
3714 3) to support backtraces and/or debugging, anywhere between their
3715 initialization and where they the saved registers are restored
3716 from them, including the cases where we don't reach the epilogue
3717 (noreturn call or infinite loop). */
3718 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3719 if (EH_USES (i))
3720 bitmap_set_bit (regular_block_artificial_uses, i);
3721 #endif
3725 /* Get the artificial use set for an eh block. */
3727 static void
3728 df_get_eh_block_artificial_uses (bitmap eh_block_artificial_uses)
3730 bitmap_clear (eh_block_artificial_uses);
3732 /* The following code (down through the arg_pointer setting APPEARS
3733 to be necessary because there is nothing that actually
3734 describes what the exception handling code may actually need
3735 to keep alive. */
3736 if (reload_completed)
3738 if (frame_pointer_needed)
3740 bitmap_set_bit (eh_block_artificial_uses, FRAME_POINTER_REGNUM);
3741 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
3742 bitmap_set_bit (eh_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3743 #endif
3745 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3746 if (fixed_regs[ARG_POINTER_REGNUM])
3747 bitmap_set_bit (eh_block_artificial_uses, ARG_POINTER_REGNUM);
3748 #endif
3754 /*----------------------------------------------------------------------------
3755 Specialized hard register scanning functions.
3756 ----------------------------------------------------------------------------*/
3759 /* Mark a register in SET. Hard registers in large modes get all
3760 of their component registers set as well. */
3762 static void
3763 df_mark_reg (rtx reg, void *vset)
3765 bitmap set = (bitmap) vset;
3766 int regno = REGNO (reg);
3768 gcc_assert (GET_MODE (reg) != BLKmode);
3770 if (regno < FIRST_PSEUDO_REGISTER)
3772 int n = hard_regno_nregs[regno][GET_MODE (reg)];
3773 bitmap_set_range (set, regno, n);
3775 else
3776 bitmap_set_bit (set, regno);
3780 /* Set the bit for regs that are considered being defined at the entry. */
3782 static void
3783 df_get_entry_block_def_set (bitmap entry_block_defs)
3785 rtx r;
3786 int i;
3788 bitmap_clear (entry_block_defs);
3790 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3791 if (FUNCTION_ARG_REGNO_P (i))
3792 bitmap_set_bit (entry_block_defs, INCOMING_REGNO (i));
3794 /* The always important stack pointer. */
3795 bitmap_set_bit (entry_block_defs, STACK_POINTER_REGNUM);
3797 /* Once the prologue has been generated, all of these registers
3798 should just show up in the first regular block. */
3799 if (HAVE_prologue && epilogue_completed)
3801 /* Defs for the callee saved registers are inserted so that the
3802 pushes have some defining location. */
3803 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3804 if ((call_used_regs[i] == 0) && (df_regs_ever_live_p (i)))
3805 bitmap_set_bit (entry_block_defs, i);
3808 r = targetm.calls.struct_value_rtx (current_function_decl, true);
3809 if (r && REG_P (r))
3810 bitmap_set_bit (entry_block_defs, REGNO (r));
3812 /* If the function has an incoming STATIC_CHAIN, it has to show up
3813 in the entry def set. */
3814 r = targetm.calls.static_chain (current_function_decl, true);
3815 if (r && REG_P (r))
3816 bitmap_set_bit (entry_block_defs, REGNO (r));
3818 if ((!reload_completed) || frame_pointer_needed)
3820 /* Any reference to any pseudo before reload is a potential
3821 reference of the frame pointer. */
3822 bitmap_set_bit (entry_block_defs, FRAME_POINTER_REGNUM);
3823 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
3824 /* If they are different, also mark the hard frame pointer as live. */
3825 if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3826 bitmap_set_bit (entry_block_defs, HARD_FRAME_POINTER_REGNUM);
3827 #endif
3830 /* These registers are live everywhere. */
3831 if (!reload_completed)
3833 #ifdef PIC_OFFSET_TABLE_REGNUM
3834 unsigned int picreg = PIC_OFFSET_TABLE_REGNUM;
3835 #endif
3837 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3838 /* Pseudos with argument area equivalences may require
3839 reloading via the argument pointer. */
3840 if (fixed_regs[ARG_POINTER_REGNUM])
3841 bitmap_set_bit (entry_block_defs, ARG_POINTER_REGNUM);
3842 #endif
3844 #ifdef PIC_OFFSET_TABLE_REGNUM
3845 /* Any constant, or pseudo with constant equivalences, may
3846 require reloading from memory using the pic register. */
3847 if (picreg != INVALID_REGNUM
3848 && fixed_regs[picreg])
3849 bitmap_set_bit (entry_block_defs, picreg);
3850 #endif
3853 #ifdef INCOMING_RETURN_ADDR_RTX
3854 if (REG_P (INCOMING_RETURN_ADDR_RTX))
3855 bitmap_set_bit (entry_block_defs, REGNO (INCOMING_RETURN_ADDR_RTX));
3856 #endif
3858 targetm.extra_live_on_entry (entry_block_defs);
3862 /* Return the (conservative) set of hard registers that are defined on
3863 entry to the function.
3864 It uses df->entry_block_defs to determine which register
3865 reference to include. */
3867 static void
3868 df_entry_block_defs_collect (struct df_collection_rec *collection_rec,
3869 bitmap entry_block_defs)
3871 unsigned int i;
3872 bitmap_iterator bi;
3874 EXECUTE_IF_SET_IN_BITMAP (entry_block_defs, 0, i, bi)
3876 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
3877 ENTRY_BLOCK_PTR, NULL, DF_REF_REG_DEF, 0);
3880 df_canonize_collection_rec (collection_rec);
3884 /* Record the (conservative) set of hard registers that are defined on
3885 entry to the function. */
3887 static void
3888 df_record_entry_block_defs (bitmap entry_block_defs)
3890 struct df_collection_rec collection_rec;
3891 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
3892 collection_rec.def_vec = VEC_alloc (df_ref, stack, FIRST_PSEUDO_REGISTER);
3893 df_entry_block_defs_collect (&collection_rec, entry_block_defs);
3895 /* Process bb_refs chain */
3896 df_refs_add_to_chains (&collection_rec, BASIC_BLOCK (ENTRY_BLOCK), NULL);
3897 VEC_free (df_ref, stack, collection_rec.def_vec);
3901 /* Update the defs in the entry block. */
3903 void
3904 df_update_entry_block_defs (void)
3906 bitmap_head refs;
3907 bool changed = false;
3909 bitmap_initialize (&refs, &df_bitmap_obstack);
3910 df_get_entry_block_def_set (&refs);
3911 if (df->entry_block_defs)
3913 if (!bitmap_equal_p (df->entry_block_defs, &refs))
3915 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (ENTRY_BLOCK);
3916 df_ref_chain_delete_du_chain (bb_info->artificial_defs);
3917 df_ref_chain_delete (bb_info->artificial_defs);
3918 bb_info->artificial_defs = NULL;
3919 changed = true;
3922 else
3924 struct df_scan_problem_data *problem_data
3925 = (struct df_scan_problem_data *) df_scan->problem_data;
3926 gcc_unreachable ();
3927 df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
3928 changed = true;
3931 if (changed)
3933 df_record_entry_block_defs (&refs);
3934 bitmap_copy (df->entry_block_defs, &refs);
3935 df_set_bb_dirty (BASIC_BLOCK (ENTRY_BLOCK));
3937 bitmap_clear (&refs);
3941 /* Set the bit for regs that are considered being used at the exit. */
3943 static void
3944 df_get_exit_block_use_set (bitmap exit_block_uses)
3946 unsigned int i;
3947 unsigned int picreg = PIC_OFFSET_TABLE_REGNUM;
3949 bitmap_clear (exit_block_uses);
3951 /* Stack pointer is always live at the exit. */
3952 bitmap_set_bit (exit_block_uses, STACK_POINTER_REGNUM);
3954 /* Mark the frame pointer if needed at the end of the function.
3955 If we end up eliminating it, it will be removed from the live
3956 list of each basic block by reload. */
3958 if ((!reload_completed) || frame_pointer_needed)
3960 bitmap_set_bit (exit_block_uses, FRAME_POINTER_REGNUM);
3961 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
3962 /* If they are different, also mark the hard frame pointer as live. */
3963 if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3964 bitmap_set_bit (exit_block_uses, HARD_FRAME_POINTER_REGNUM);
3965 #endif
3968 /* Many architectures have a GP register even without flag_pic.
3969 Assume the pic register is not in use, or will be handled by
3970 other means, if it is not fixed. */
3971 if (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
3972 && picreg != INVALID_REGNUM
3973 && fixed_regs[picreg])
3974 bitmap_set_bit (exit_block_uses, picreg);
3976 /* Mark all global registers, and all registers used by the
3977 epilogue as being live at the end of the function since they
3978 may be referenced by our caller. */
3979 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3980 if (global_regs[i] || EPILOGUE_USES (i))
3981 bitmap_set_bit (exit_block_uses, i);
3983 if (HAVE_epilogue && epilogue_completed)
3985 /* Mark all call-saved registers that we actually used. */
3986 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3987 if (df_regs_ever_live_p (i) && !LOCAL_REGNO (i)
3988 && !TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
3989 bitmap_set_bit (exit_block_uses, i);
3992 #ifdef EH_RETURN_DATA_REGNO
3993 /* Mark the registers that will contain data for the handler. */
3994 if (reload_completed && crtl->calls_eh_return)
3995 for (i = 0; ; ++i)
3997 unsigned regno = EH_RETURN_DATA_REGNO (i);
3998 if (regno == INVALID_REGNUM)
3999 break;
4000 bitmap_set_bit (exit_block_uses, regno);
4002 #endif
4004 #ifdef EH_RETURN_STACKADJ_RTX
4005 if ((!HAVE_epilogue || ! epilogue_completed)
4006 && crtl->calls_eh_return)
4008 rtx tmp = EH_RETURN_STACKADJ_RTX;
4009 if (tmp && REG_P (tmp))
4010 df_mark_reg (tmp, exit_block_uses);
4012 #endif
4014 #ifdef EH_RETURN_HANDLER_RTX
4015 if ((!HAVE_epilogue || ! epilogue_completed)
4016 && crtl->calls_eh_return)
4018 rtx tmp = EH_RETURN_HANDLER_RTX;
4019 if (tmp && REG_P (tmp))
4020 df_mark_reg (tmp, exit_block_uses);
4022 #endif
4024 /* Mark function return value. */
4025 diddle_return_value (df_mark_reg, (void*) exit_block_uses);
4029 /* Return the refs of hard registers that are used in the exit block.
4030 It uses df->exit_block_uses to determine register to include. */
4032 static void
4033 df_exit_block_uses_collect (struct df_collection_rec *collection_rec, bitmap exit_block_uses)
4035 unsigned int i;
4036 bitmap_iterator bi;
4038 EXECUTE_IF_SET_IN_BITMAP (exit_block_uses, 0, i, bi)
4039 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
4040 EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0);
4042 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4043 /* It is deliberate that this is not put in the exit block uses but
4044 I do not know why. */
4045 if (reload_completed
4046 && !bitmap_bit_p (exit_block_uses, ARG_POINTER_REGNUM)
4047 && bb_has_eh_pred (EXIT_BLOCK_PTR)
4048 && fixed_regs[ARG_POINTER_REGNUM])
4049 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[ARG_POINTER_REGNUM], NULL,
4050 EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0);
4051 #endif
4053 df_canonize_collection_rec (collection_rec);
4057 /* Record the set of hard registers that are used in the exit block.
4058 It uses df->exit_block_uses to determine which bit to include. */
4060 static void
4061 df_record_exit_block_uses (bitmap exit_block_uses)
4063 struct df_collection_rec collection_rec;
4064 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
4065 collection_rec.use_vec = VEC_alloc (df_ref, stack, FIRST_PSEUDO_REGISTER);
4067 df_exit_block_uses_collect (&collection_rec, exit_block_uses);
4069 /* Process bb_refs chain */
4070 df_refs_add_to_chains (&collection_rec, BASIC_BLOCK (EXIT_BLOCK), NULL);
4071 VEC_free (df_ref, stack, collection_rec.use_vec);
4075 /* Update the uses in the exit block. */
4077 void
4078 df_update_exit_block_uses (void)
4080 bitmap_head refs;
4081 bool changed = false;
4083 bitmap_initialize (&refs, &df_bitmap_obstack);
4084 df_get_exit_block_use_set (&refs);
4085 if (df->exit_block_uses)
4087 if (!bitmap_equal_p (df->exit_block_uses, &refs))
4089 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (EXIT_BLOCK);
4090 df_ref_chain_delete_du_chain (bb_info->artificial_uses);
4091 df_ref_chain_delete (bb_info->artificial_uses);
4092 bb_info->artificial_uses = NULL;
4093 changed = true;
4096 else
4098 struct df_scan_problem_data *problem_data
4099 = (struct df_scan_problem_data *) df_scan->problem_data;
4100 gcc_unreachable ();
4101 df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
4102 changed = true;
4105 if (changed)
4107 df_record_exit_block_uses (&refs);
4108 bitmap_copy (df->exit_block_uses,& refs);
4109 df_set_bb_dirty (BASIC_BLOCK (EXIT_BLOCK));
4111 bitmap_clear (&refs);
4114 static bool initialized = false;
4117 /* Initialize some platform specific structures. */
4119 void
4120 df_hard_reg_init (void)
4122 #ifdef ELIMINABLE_REGS
4123 int i;
4124 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
4125 #endif
4126 if (initialized)
4127 return;
4129 /* Record which registers will be eliminated. We use this in
4130 mark_used_regs. */
4131 CLEAR_HARD_REG_SET (elim_reg_set);
4133 #ifdef ELIMINABLE_REGS
4134 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
4135 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
4136 #else
4137 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
4138 #endif
4140 initialized = true;
4144 /* Recompute the parts of scanning that are based on regs_ever_live
4145 because something changed in that array. */
4147 void
4148 df_update_entry_exit_and_calls (void)
4150 basic_block bb;
4152 df_update_entry_block_defs ();
4153 df_update_exit_block_uses ();
4155 /* The call insns need to be rescanned because there may be changes
4156 in the set of registers clobbered across the call. */
4157 FOR_EACH_BB (bb)
4159 rtx insn;
4160 FOR_BB_INSNS (bb, insn)
4162 if (INSN_P (insn) && CALL_P (insn))
4163 df_insn_rescan (insn);
4169 /* Return true if hard REG is actually used in the some instruction.
4170 There are a fair number of conditions that affect the setting of
4171 this array. See the comment in df.h for df->hard_regs_live_count
4172 for the conditions that this array is set. */
4174 bool
4175 df_hard_reg_used_p (unsigned int reg)
4177 return df->hard_regs_live_count[reg] != 0;
4181 /* A count of the number of times REG is actually used in the some
4182 instruction. There are a fair number of conditions that affect the
4183 setting of this array. See the comment in df.h for
4184 df->hard_regs_live_count for the conditions that this array is
4185 set. */
4188 unsigned int
4189 df_hard_reg_used_count (unsigned int reg)
4191 return df->hard_regs_live_count[reg];
4195 /* Get the value of regs_ever_live[REGNO]. */
4197 bool
4198 df_regs_ever_live_p (unsigned int regno)
4200 return regs_ever_live[regno];
4204 /* Set regs_ever_live[REGNO] to VALUE. If this cause regs_ever_live
4205 to change, schedule that change for the next update. */
4207 void
4208 df_set_regs_ever_live (unsigned int regno, bool value)
4210 if (regs_ever_live[regno] == value)
4211 return;
4213 regs_ever_live[regno] = value;
4214 if (df)
4215 df->redo_entry_and_exit = true;
4219 /* Compute "regs_ever_live" information from the underlying df
4220 information. Set the vector to all false if RESET. */
4222 void
4223 df_compute_regs_ever_live (bool reset)
4225 unsigned int i;
4226 bool changed = df->redo_entry_and_exit;
4228 if (reset)
4229 memset (regs_ever_live, 0, sizeof (regs_ever_live));
4231 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4232 if ((!regs_ever_live[i]) && df_hard_reg_used_p (i))
4234 regs_ever_live[i] = true;
4235 changed = true;
4237 if (changed)
4238 df_update_entry_exit_and_calls ();
4239 df->redo_entry_and_exit = false;
4243 /*----------------------------------------------------------------------------
4244 Dataflow ref information verification functions.
4246 df_reg_chain_mark (refs, regno, is_def, is_eq_use)
4247 df_reg_chain_verify_unmarked (refs)
4248 df_refs_verify (VEC(stack,df_ref)*, ref*, bool)
4249 df_mws_verify (mw*, mw*, bool)
4250 df_insn_refs_verify (collection_rec, bb, insn, bool)
4251 df_bb_refs_verify (bb, refs, bool)
4252 df_bb_verify (bb)
4253 df_exit_block_bitmap_verify (bool)
4254 df_entry_block_bitmap_verify (bool)
4255 df_scan_verify ()
4256 ----------------------------------------------------------------------------*/
4259 /* Mark all refs in the reg chain. Verify that all of the registers
4260 are in the correct chain. */
4262 static unsigned int
4263 df_reg_chain_mark (df_ref refs, unsigned int regno,
4264 bool is_def, bool is_eq_use)
4266 unsigned int count = 0;
4267 df_ref ref;
4268 for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
4270 gcc_assert (!DF_REF_IS_REG_MARKED (ref));
4272 /* If there are no def-use or use-def chains, make sure that all
4273 of the chains are clear. */
4274 if (!df_chain)
4275 gcc_assert (!DF_REF_CHAIN (ref));
4277 /* Check to make sure the ref is in the correct chain. */
4278 gcc_assert (DF_REF_REGNO (ref) == regno);
4279 if (is_def)
4280 gcc_assert (DF_REF_REG_DEF_P (ref));
4281 else
4282 gcc_assert (!DF_REF_REG_DEF_P (ref));
4284 if (is_eq_use)
4285 gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE));
4286 else
4287 gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE) == 0);
4289 if (DF_REF_NEXT_REG (ref))
4290 gcc_assert (DF_REF_PREV_REG (DF_REF_NEXT_REG (ref)) == ref);
4291 count++;
4292 DF_REF_REG_MARK (ref);
4294 return count;
4298 /* Verify that all of the registers in the chain are unmarked. */
4300 static void
4301 df_reg_chain_verify_unmarked (df_ref refs)
4303 df_ref ref;
4304 for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
4305 gcc_assert (!DF_REF_IS_REG_MARKED (ref));
4309 /* Verify that NEW_REC and OLD_REC have exactly the same members. */
4311 static bool
4312 df_refs_verify (VEC(df_ref,stack) *new_rec, df_ref *old_rec,
4313 bool abort_if_fail)
4315 unsigned int ix;
4316 df_ref new_ref;
4318 FOR_EACH_VEC_ELT (df_ref, new_rec, ix, new_ref)
4320 if (*old_rec == NULL || !df_ref_equal_p (new_ref, *old_rec))
4322 if (abort_if_fail)
4323 gcc_assert (0);
4324 else
4325 return false;
4328 /* Abort if fail is called from the function level verifier. If
4329 that is the context, mark this reg as being seem. */
4330 if (abort_if_fail)
4332 gcc_assert (DF_REF_IS_REG_MARKED (*old_rec));
4333 DF_REF_REG_UNMARK (*old_rec);
4336 old_rec++;
4339 if (abort_if_fail)
4340 gcc_assert (*old_rec == NULL);
4341 else
4342 return *old_rec == NULL;
4343 return false;
4347 /* Verify that NEW_REC and OLD_REC have exactly the same members. */
4349 static bool
4350 df_mws_verify (VEC(df_mw_hardreg_ptr,stack) *new_rec,
4351 struct df_mw_hardreg **old_rec,
4352 bool abort_if_fail)
4354 unsigned int ix;
4355 struct df_mw_hardreg *new_reg;
4357 FOR_EACH_VEC_ELT (df_mw_hardreg_ptr, new_rec, ix, new_reg)
4359 if (*old_rec == NULL || !df_mw_equal_p (new_reg, *old_rec))
4361 if (abort_if_fail)
4362 gcc_assert (0);
4363 else
4364 return false;
4366 old_rec++;
4369 if (abort_if_fail)
4370 gcc_assert (*old_rec == NULL);
4371 else
4372 return *old_rec == NULL;
4373 return false;
4377 /* Return true if the existing insn refs information is complete and
4378 correct. Otherwise (i.e. if there's any missing or extra refs),
4379 return the correct df_ref chain in REFS_RETURN.
4381 If ABORT_IF_FAIL, leave the refs that are verified (already in the
4382 ref chain) as DF_REF_MARKED(). If it's false, then it's a per-insn
4383 verification mode instead of the whole function, so unmark
4384 everything.
4386 If ABORT_IF_FAIL is set, this function never returns false. */
4388 static bool
4389 df_insn_refs_verify (struct df_collection_rec *collection_rec,
4390 basic_block bb,
4391 rtx insn,
4392 bool abort_if_fail)
4394 bool ret1, ret2, ret3, ret4;
4395 unsigned int uid = INSN_UID (insn);
4396 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
4398 df_insn_refs_collect (collection_rec, bb, insn_info);
4400 if (!DF_INSN_UID_DEFS (uid))
4402 /* The insn_rec was created but it was never filled out. */
4403 if (abort_if_fail)
4404 gcc_assert (0);
4405 else
4406 return false;
4409 /* Unfortunately we cannot opt out early if one of these is not
4410 right because the marks will not get cleared. */
4411 ret1 = df_refs_verify (collection_rec->def_vec, DF_INSN_UID_DEFS (uid),
4412 abort_if_fail);
4413 ret2 = df_refs_verify (collection_rec->use_vec, DF_INSN_UID_USES (uid),
4414 abort_if_fail);
4415 ret3 = df_refs_verify (collection_rec->eq_use_vec, DF_INSN_UID_EQ_USES (uid),
4416 abort_if_fail);
4417 ret4 = df_mws_verify (collection_rec->mw_vec, DF_INSN_UID_MWS (uid),
4418 abort_if_fail);
4419 return (ret1 && ret2 && ret3 && ret4);
4423 /* Return true if all refs in the basic block are correct and complete.
4424 Due to df_ref_chain_verify, it will cause all refs
4425 that are verified to have DF_REF_MARK bit set. */
4427 static bool
4428 df_bb_verify (basic_block bb)
4430 rtx insn;
4431 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
4432 struct df_collection_rec collection_rec;
4434 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
4435 collection_rec.def_vec = VEC_alloc (df_ref, stack, 128);
4436 collection_rec.use_vec = VEC_alloc (df_ref, stack, 32);
4437 collection_rec.eq_use_vec = VEC_alloc (df_ref, stack, 32);
4438 collection_rec.mw_vec = VEC_alloc (df_mw_hardreg_ptr, stack, 32);
4440 gcc_assert (bb_info);
4442 /* Scan the block, one insn at a time, from beginning to end. */
4443 FOR_BB_INSNS_REVERSE (bb, insn)
4445 if (!INSN_P (insn))
4446 continue;
4447 df_insn_refs_verify (&collection_rec, bb, insn, true);
4448 df_free_collection_rec (&collection_rec);
4451 /* Do the artificial defs and uses. */
4452 df_bb_refs_collect (&collection_rec, bb);
4453 df_refs_verify (collection_rec.def_vec, df_get_artificial_defs (bb->index), true);
4454 df_refs_verify (collection_rec.use_vec, df_get_artificial_uses (bb->index), true);
4455 df_free_collection_rec (&collection_rec);
4457 return true;
4461 /* Returns true if the entry block has correct and complete df_ref set.
4462 If not it either aborts if ABORT_IF_FAIL is true or returns false. */
4464 static bool
4465 df_entry_block_bitmap_verify (bool abort_if_fail)
4467 bitmap_head entry_block_defs;
4468 bool is_eq;
4470 bitmap_initialize (&entry_block_defs, &df_bitmap_obstack);
4471 df_get_entry_block_def_set (&entry_block_defs);
4473 is_eq = bitmap_equal_p (&entry_block_defs, df->entry_block_defs);
4475 if (!is_eq && abort_if_fail)
4477 fprintf (stderr, "entry_block_defs = ");
4478 df_print_regset (stderr, &entry_block_defs);
4479 fprintf (stderr, "df->entry_block_defs = ");
4480 df_print_regset (stderr, df->entry_block_defs);
4481 gcc_assert (0);
4484 bitmap_clear (&entry_block_defs);
4486 return is_eq;
4490 /* Returns true if the exit block has correct and complete df_ref set.
4491 If not it either aborts if ABORT_IF_FAIL is true or returns false. */
4493 static bool
4494 df_exit_block_bitmap_verify (bool abort_if_fail)
4496 bitmap_head exit_block_uses;
4497 bool is_eq;
4499 bitmap_initialize (&exit_block_uses, &df_bitmap_obstack);
4500 df_get_exit_block_use_set (&exit_block_uses);
4502 is_eq = bitmap_equal_p (&exit_block_uses, df->exit_block_uses);
4504 if (!is_eq && abort_if_fail)
4506 fprintf (stderr, "exit_block_uses = ");
4507 df_print_regset (stderr, &exit_block_uses);
4508 fprintf (stderr, "df->exit_block_uses = ");
4509 df_print_regset (stderr, df->exit_block_uses);
4510 gcc_assert (0);
4513 bitmap_clear (&exit_block_uses);
4515 return is_eq;
4519 /* Return true if df_ref information for all insns in all blocks are
4520 correct and complete. */
4522 void
4523 df_scan_verify (void)
4525 unsigned int i;
4526 basic_block bb;
4527 bitmap_head regular_block_artificial_uses;
4528 bitmap_head eh_block_artificial_uses;
4530 if (!df)
4531 return;
4533 /* Verification is a 4 step process. */
4535 /* (1) All of the refs are marked by going through the reg chains. */
4536 for (i = 0; i < DF_REG_SIZE (df); i++)
4538 gcc_assert (df_reg_chain_mark (DF_REG_DEF_CHAIN (i), i, true, false)
4539 == DF_REG_DEF_COUNT(i));
4540 gcc_assert (df_reg_chain_mark (DF_REG_USE_CHAIN (i), i, false, false)
4541 == DF_REG_USE_COUNT(i));
4542 gcc_assert (df_reg_chain_mark (DF_REG_EQ_USE_CHAIN (i), i, false, true)
4543 == DF_REG_EQ_USE_COUNT(i));
4546 /* (2) There are various bitmaps whose value may change over the
4547 course of the compilation. This step recomputes them to make
4548 sure that they have not slipped out of date. */
4549 bitmap_initialize (&regular_block_artificial_uses, &df_bitmap_obstack);
4550 bitmap_initialize (&eh_block_artificial_uses, &df_bitmap_obstack);
4552 df_get_regular_block_artificial_uses (&regular_block_artificial_uses);
4553 df_get_eh_block_artificial_uses (&eh_block_artificial_uses);
4555 bitmap_ior_into (&eh_block_artificial_uses,
4556 &regular_block_artificial_uses);
4558 /* Check artificial_uses bitmaps didn't change. */
4559 gcc_assert (bitmap_equal_p (&regular_block_artificial_uses,
4560 &df->regular_block_artificial_uses));
4561 gcc_assert (bitmap_equal_p (&eh_block_artificial_uses,
4562 &df->eh_block_artificial_uses));
4564 bitmap_clear (&regular_block_artificial_uses);
4565 bitmap_clear (&eh_block_artificial_uses);
4567 /* Verify entry block and exit block. These only verify the bitmaps,
4568 the refs are verified in df_bb_verify. */
4569 df_entry_block_bitmap_verify (true);
4570 df_exit_block_bitmap_verify (true);
4572 /* (3) All of the insns in all of the blocks are traversed and the
4573 marks are cleared both in the artificial refs attached to the
4574 blocks and the real refs inside the insns. It is a failure to
4575 clear a mark that has not been set as this means that the ref in
4576 the block or insn was not in the reg chain. */
4578 FOR_ALL_BB (bb)
4579 df_bb_verify (bb);
4581 /* (4) See if all reg chains are traversed a second time. This time
4582 a check is made that the marks are clear. A set mark would be a
4583 from a reg that is not in any insn or basic block. */
4585 for (i = 0; i < DF_REG_SIZE (df); i++)
4587 df_reg_chain_verify_unmarked (DF_REG_DEF_CHAIN (i));
4588 df_reg_chain_verify_unmarked (DF_REG_USE_CHAIN (i));
4589 df_reg_chain_verify_unmarked (DF_REG_EQ_USE_CHAIN (i));