Merge trunk version 190524 into gupc branch.
[official-gcc.git] / gcc / df-scan.c
blob55492fa81417deddd14a364de6ec9f62f720e158
1 /* Scanning of rtl for dataflow analysis.
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007,
3 2008, 2009, 2010, 2011, 2012 Free Software Foundation, Inc.
4 Originally contributed by Michael P. Hayes
5 (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
6 Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
7 and Kenneth Zadeck (zadeck@naturalbridge.com).
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "tm.h"
29 #include "rtl.h"
30 #include "tm_p.h"
31 #include "insn-config.h"
32 #include "recog.h"
33 #include "function.h"
34 #include "regs.h"
35 #include "alloc-pool.h"
36 #include "flags.h"
37 #include "hard-reg-set.h"
38 #include "basic-block.h"
39 #include "sbitmap.h"
40 #include "bitmap.h"
41 #include "dumpfile.h"
42 #include "tree.h"
43 #include "target.h"
44 #include "target-def.h"
45 #include "df.h"
46 #include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */
48 DEF_VEC_P(df_ref);
49 DEF_VEC_ALLOC_P_STACK(df_ref);
51 #define VEC_df_ref_stack_alloc(alloc) VEC_stack_alloc (df_ref, alloc)
53 typedef struct df_mw_hardreg *df_mw_hardreg_ptr;
55 DEF_VEC_P(df_mw_hardreg_ptr);
56 DEF_VEC_ALLOC_P_STACK(df_mw_hardreg_ptr);
58 #define VEC_df_mw_hardreg_ptr_stack_alloc(alloc) \
59 VEC_stack_alloc (df_mw_hardreg_ptr, alloc)
61 #ifndef HAVE_epilogue
62 #define HAVE_epilogue 0
63 #endif
64 #ifndef HAVE_prologue
65 #define HAVE_prologue 0
66 #endif
67 #ifndef HAVE_sibcall_epilogue
68 #define HAVE_sibcall_epilogue 0
69 #endif
71 #ifndef EPILOGUE_USES
72 #define EPILOGUE_USES(REGNO) 0
73 #endif
75 /* The following two macros free the vecs that hold either the refs or
76 the mw refs. They are a little tricky because the vec has 0
77 elements is special and is not to be freed. */
78 #define df_scan_free_ref_vec(V) \
79 do { \
80 if (V && *V) \
81 free (V); \
82 } while (0)
84 #define df_scan_free_mws_vec(V) \
85 do { \
86 if (V && *V) \
87 free (V); \
88 } while (0)
90 /* The set of hard registers in eliminables[i].from. */
92 static HARD_REG_SET elim_reg_set;
94 /* Initialize ur_in and ur_out as if all hard registers were partially
95 available. */
97 struct df_collection_rec
99 VEC(df_ref,stack) *def_vec;
100 VEC(df_ref,stack) *use_vec;
101 VEC(df_ref,stack) *eq_use_vec;
102 VEC(df_mw_hardreg_ptr,stack) *mw_vec;
105 static df_ref df_null_ref_rec[1];
106 static struct df_mw_hardreg * df_null_mw_rec[1];
108 static void df_ref_record (enum df_ref_class, struct df_collection_rec *,
109 rtx, rtx *,
110 basic_block, struct df_insn_info *,
111 enum df_ref_type, int ref_flags);
112 static void df_def_record_1 (struct df_collection_rec *, rtx *,
113 basic_block, struct df_insn_info *,
114 int ref_flags);
115 static void df_defs_record (struct df_collection_rec *, rtx,
116 basic_block, struct df_insn_info *,
117 int ref_flags);
118 static void df_uses_record (struct df_collection_rec *,
119 rtx *, enum df_ref_type,
120 basic_block, struct df_insn_info *,
121 int ref_flags);
123 static void df_install_ref_incremental (df_ref);
124 static df_ref df_ref_create_structure (enum df_ref_class,
125 struct df_collection_rec *, rtx, rtx *,
126 basic_block, struct df_insn_info *,
127 enum df_ref_type, int ref_flags);
128 static void df_insn_refs_collect (struct df_collection_rec*,
129 basic_block, struct df_insn_info *);
130 static void df_canonize_collection_rec (struct df_collection_rec *);
132 static void df_get_regular_block_artificial_uses (bitmap);
133 static void df_get_eh_block_artificial_uses (bitmap);
135 static void df_record_entry_block_defs (bitmap);
136 static void df_record_exit_block_uses (bitmap);
137 static void df_get_exit_block_use_set (bitmap);
138 static void df_get_entry_block_def_set (bitmap);
139 static void df_grow_ref_info (struct df_ref_info *, unsigned int);
140 static void df_ref_chain_delete_du_chain (df_ref *);
141 static void df_ref_chain_delete (df_ref *);
143 static void df_refs_add_to_chains (struct df_collection_rec *,
144 basic_block, rtx);
146 static bool df_insn_refs_verify (struct df_collection_rec *, basic_block, rtx, bool);
147 static void df_entry_block_defs_collect (struct df_collection_rec *, bitmap);
148 static void df_exit_block_uses_collect (struct df_collection_rec *, bitmap);
149 static void df_install_ref (df_ref, struct df_reg_info *,
150 struct df_ref_info *, bool);
152 static int df_ref_compare (const void *, const void *);
153 static int df_mw_compare (const void *, const void *);
155 /* Indexed by hardware reg number, is true if that register is ever
156 used in the current function.
158 In df-scan.c, this is set up to record the hard regs used
159 explicitly. Reload adds in the hard regs used for holding pseudo
160 regs. Final uses it to generate the code in the function prologue
161 and epilogue to save and restore registers as needed. */
163 static bool regs_ever_live[FIRST_PSEUDO_REGISTER];
165 /*----------------------------------------------------------------------------
166 SCANNING DATAFLOW PROBLEM
168 There are several ways in which scanning looks just like the other
169 dataflow problems. It shares the all the mechanisms for local info
170 as well as basic block info. Where it differs is when and how often
171 it gets run. It also has no need for the iterative solver.
172 ----------------------------------------------------------------------------*/
174 /* Problem data for the scanning dataflow function. */
175 struct df_scan_problem_data
177 alloc_pool ref_base_pool;
178 alloc_pool ref_artificial_pool;
179 alloc_pool ref_regular_pool;
180 alloc_pool insn_pool;
181 alloc_pool reg_pool;
182 alloc_pool mw_reg_pool;
183 bitmap_obstack reg_bitmaps;
184 bitmap_obstack insn_bitmaps;
187 typedef struct df_scan_bb_info *df_scan_bb_info_t;
190 /* Internal function to shut down the scanning problem. */
191 static void
192 df_scan_free_internal (void)
194 struct df_scan_problem_data *problem_data
195 = (struct df_scan_problem_data *) df_scan->problem_data;
196 unsigned int i;
197 basic_block bb;
199 /* The vectors that hold the refs are not pool allocated because
200 they come in many sizes. This makes them impossible to delete
201 all at once. */
202 for (i = 0; i < DF_INSN_SIZE(); i++)
204 struct df_insn_info *insn_info = DF_INSN_UID_GET(i);
205 /* Skip the insns that have no insn_info or have been
206 deleted. */
207 if (insn_info)
209 df_scan_free_ref_vec (insn_info->defs);
210 df_scan_free_ref_vec (insn_info->uses);
211 df_scan_free_ref_vec (insn_info->eq_uses);
212 df_scan_free_mws_vec (insn_info->mw_hardregs);
216 FOR_ALL_BB (bb)
218 unsigned int bb_index = bb->index;
219 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb_index);
220 if (bb_info)
222 df_scan_free_ref_vec (bb_info->artificial_defs);
223 df_scan_free_ref_vec (bb_info->artificial_uses);
227 free (df->def_info.refs);
228 free (df->def_info.begin);
229 free (df->def_info.count);
230 memset (&df->def_info, 0, (sizeof (struct df_ref_info)));
232 free (df->use_info.refs);
233 free (df->use_info.begin);
234 free (df->use_info.count);
235 memset (&df->use_info, 0, (sizeof (struct df_ref_info)));
237 free (df->def_regs);
238 df->def_regs = NULL;
239 free (df->use_regs);
240 df->use_regs = NULL;
241 free (df->eq_use_regs);
242 df->eq_use_regs = NULL;
243 df->regs_size = 0;
244 DF_REG_SIZE(df) = 0;
246 free (df->insns);
247 df->insns = NULL;
248 DF_INSN_SIZE () = 0;
250 free (df_scan->block_info);
251 df_scan->block_info = NULL;
252 df_scan->block_info_size = 0;
254 bitmap_clear (&df->hardware_regs_used);
255 bitmap_clear (&df->regular_block_artificial_uses);
256 bitmap_clear (&df->eh_block_artificial_uses);
257 BITMAP_FREE (df->entry_block_defs);
258 BITMAP_FREE (df->exit_block_uses);
259 bitmap_clear (&df->insns_to_delete);
260 bitmap_clear (&df->insns_to_rescan);
261 bitmap_clear (&df->insns_to_notes_rescan);
263 free_alloc_pool (problem_data->ref_base_pool);
264 free_alloc_pool (problem_data->ref_artificial_pool);
265 free_alloc_pool (problem_data->ref_regular_pool);
266 free_alloc_pool (problem_data->insn_pool);
267 free_alloc_pool (problem_data->reg_pool);
268 free_alloc_pool (problem_data->mw_reg_pool);
269 bitmap_obstack_release (&problem_data->reg_bitmaps);
270 bitmap_obstack_release (&problem_data->insn_bitmaps);
271 free (df_scan->problem_data);
275 /* Free basic block info. */
277 static void
278 df_scan_free_bb_info (basic_block bb, void *vbb_info)
280 struct df_scan_bb_info *bb_info = (struct df_scan_bb_info *) vbb_info;
281 unsigned int bb_index = bb->index;
283 /* See if bb_info is initialized. */
284 if (bb_info->artificial_defs)
286 rtx insn;
287 FOR_BB_INSNS (bb, insn)
289 if (INSN_P (insn))
290 /* Record defs within INSN. */
291 df_insn_delete (bb, INSN_UID (insn));
294 if (bb_index < df_scan->block_info_size)
295 bb_info = df_scan_get_bb_info (bb_index);
297 /* Get rid of any artificial uses or defs. */
298 if (bb_info->artificial_defs)
300 df_ref_chain_delete_du_chain (bb_info->artificial_defs);
301 df_ref_chain_delete_du_chain (bb_info->artificial_uses);
302 df_ref_chain_delete (bb_info->artificial_defs);
303 df_ref_chain_delete (bb_info->artificial_uses);
304 bb_info->artificial_defs = NULL;
305 bb_info->artificial_uses = NULL;
311 /* Allocate the problem data for the scanning problem. This should be
312 called when the problem is created or when the entire function is to
313 be rescanned. */
314 void
315 df_scan_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
317 struct df_scan_problem_data *problem_data;
318 unsigned int insn_num = get_max_uid () + 1;
319 unsigned int block_size = 512;
320 basic_block bb;
322 /* Given the number of pools, this is really faster than tearing
323 everything apart. */
324 if (df_scan->problem_data)
325 df_scan_free_internal ();
327 problem_data = XNEW (struct df_scan_problem_data);
328 df_scan->problem_data = problem_data;
329 df_scan->computed = true;
331 problem_data->ref_base_pool
332 = create_alloc_pool ("df_scan ref base",
333 sizeof (struct df_base_ref), block_size);
334 problem_data->ref_artificial_pool
335 = create_alloc_pool ("df_scan ref artificial",
336 sizeof (struct df_artificial_ref), block_size);
337 problem_data->ref_regular_pool
338 = create_alloc_pool ("df_scan ref regular",
339 sizeof (struct df_regular_ref), block_size);
340 problem_data->insn_pool
341 = create_alloc_pool ("df_scan insn",
342 sizeof (struct df_insn_info), block_size);
343 problem_data->reg_pool
344 = create_alloc_pool ("df_scan reg",
345 sizeof (struct df_reg_info), block_size);
346 problem_data->mw_reg_pool
347 = create_alloc_pool ("df_scan mw_reg",
348 sizeof (struct df_mw_hardreg), block_size / 16);
350 bitmap_obstack_initialize (&problem_data->reg_bitmaps);
351 bitmap_obstack_initialize (&problem_data->insn_bitmaps);
353 insn_num += insn_num / 4;
354 df_grow_reg_info ();
356 df_grow_insn_info ();
357 df_grow_bb_info (df_scan);
359 FOR_ALL_BB (bb)
361 unsigned int bb_index = bb->index;
362 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb_index);
363 bb_info->artificial_defs = NULL;
364 bb_info->artificial_uses = NULL;
367 bitmap_initialize (&df->hardware_regs_used, &problem_data->reg_bitmaps);
368 bitmap_initialize (&df->regular_block_artificial_uses, &problem_data->reg_bitmaps);
369 bitmap_initialize (&df->eh_block_artificial_uses, &problem_data->reg_bitmaps);
370 df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
371 df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
372 bitmap_initialize (&df->insns_to_delete, &problem_data->insn_bitmaps);
373 bitmap_initialize (&df->insns_to_rescan, &problem_data->insn_bitmaps);
374 bitmap_initialize (&df->insns_to_notes_rescan, &problem_data->insn_bitmaps);
375 df_scan->optional_p = false;
379 /* Free all of the data associated with the scan problem. */
381 static void
382 df_scan_free (void)
384 if (df_scan->problem_data)
385 df_scan_free_internal ();
387 if (df->blocks_to_analyze)
389 BITMAP_FREE (df->blocks_to_analyze);
390 df->blocks_to_analyze = NULL;
393 free (df_scan);
396 /* Dump the preamble for DF_SCAN dump. */
397 static void
398 df_scan_start_dump (FILE *file ATTRIBUTE_UNUSED)
400 int i;
401 int dcount = 0;
402 int ucount = 0;
403 int ecount = 0;
404 int icount = 0;
405 int ccount = 0;
406 basic_block bb;
407 rtx insn;
409 fprintf (file, ";; invalidated by call \t");
410 df_print_regset (file, regs_invalidated_by_call_regset);
411 fprintf (file, ";; hardware regs used \t");
412 df_print_regset (file, &df->hardware_regs_used);
413 fprintf (file, ";; regular block artificial uses \t");
414 df_print_regset (file, &df->regular_block_artificial_uses);
415 fprintf (file, ";; eh block artificial uses \t");
416 df_print_regset (file, &df->eh_block_artificial_uses);
417 fprintf (file, ";; entry block defs \t");
418 df_print_regset (file, df->entry_block_defs);
419 fprintf (file, ";; exit block uses \t");
420 df_print_regset (file, df->exit_block_uses);
421 fprintf (file, ";; regs ever live \t");
422 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
423 if (df_regs_ever_live_p (i))
424 fprintf (file, " %d[%s]", i, reg_names[i]);
425 fprintf (file, "\n;; ref usage \t");
427 for (i = 0; i < (int)df->regs_inited; i++)
428 if (DF_REG_DEF_COUNT (i) || DF_REG_USE_COUNT (i) || DF_REG_EQ_USE_COUNT (i))
430 const char * sep = "";
432 fprintf (file, "r%d={", i);
433 if (DF_REG_DEF_COUNT (i))
435 fprintf (file, "%dd", DF_REG_DEF_COUNT (i));
436 sep = ",";
437 dcount += DF_REG_DEF_COUNT (i);
439 if (DF_REG_USE_COUNT (i))
441 fprintf (file, "%s%du", sep, DF_REG_USE_COUNT (i));
442 sep = ",";
443 ucount += DF_REG_USE_COUNT (i);
445 if (DF_REG_EQ_USE_COUNT (i))
447 fprintf (file, "%s%de", sep, DF_REG_EQ_USE_COUNT (i));
448 ecount += DF_REG_EQ_USE_COUNT (i);
450 fprintf (file, "} ");
453 FOR_EACH_BB (bb)
454 FOR_BB_INSNS (bb, insn)
455 if (INSN_P (insn))
457 if (CALL_P (insn))
458 ccount++;
459 else
460 icount++;
463 fprintf (file, "\n;; total ref usage %d{%dd,%du,%de}"
464 " in %d{%d regular + %d call} insns.\n",
465 dcount + ucount + ecount, dcount, ucount, ecount,
466 icount + ccount, icount, ccount);
469 /* Dump the bb_info for a given basic block. */
470 static void
471 df_scan_start_block (basic_block bb, FILE *file)
473 struct df_scan_bb_info *bb_info
474 = df_scan_get_bb_info (bb->index);
476 if (bb_info)
478 fprintf (file, ";; bb %d artificial_defs: ", bb->index);
479 df_refs_chain_dump (bb_info->artificial_defs, true, file);
480 fprintf (file, "\n;; bb %d artificial_uses: ", bb->index);
481 df_refs_chain_dump (bb_info->artificial_uses, true, file);
482 fprintf (file, "\n");
484 #if 0
486 rtx insn;
487 FOR_BB_INSNS (bb, insn)
488 if (INSN_P (insn))
489 df_insn_debug (insn, false, file);
491 #endif
494 static struct df_problem problem_SCAN =
496 DF_SCAN, /* Problem id. */
497 DF_NONE, /* Direction. */
498 df_scan_alloc, /* Allocate the problem specific data. */
499 NULL, /* Reset global information. */
500 df_scan_free_bb_info, /* Free basic block info. */
501 NULL, /* Local compute function. */
502 NULL, /* Init the solution specific data. */
503 NULL, /* Iterative solver. */
504 NULL, /* Confluence operator 0. */
505 NULL, /* Confluence operator n. */
506 NULL, /* Transfer function. */
507 NULL, /* Finalize function. */
508 df_scan_free, /* Free all of the problem information. */
509 NULL, /* Remove this problem from the stack of dataflow problems. */
510 df_scan_start_dump, /* Debugging. */
511 df_scan_start_block, /* Debugging start block. */
512 NULL, /* Debugging end block. */
513 NULL, /* Incremental solution verify start. */
514 NULL, /* Incremental solution verify end. */
515 NULL, /* Dependent problem. */
516 sizeof (struct df_scan_bb_info),/* Size of entry of block_info array. */
517 TV_DF_SCAN, /* Timing variable. */
518 false /* Reset blocks on dropping out of blocks_to_analyze. */
522 /* Create a new DATAFLOW instance and add it to an existing instance
523 of DF. The returned structure is what is used to get at the
524 solution. */
526 void
527 df_scan_add_problem (void)
529 df_add_problem (&problem_SCAN);
533 /*----------------------------------------------------------------------------
534 Storage Allocation Utilities
535 ----------------------------------------------------------------------------*/
538 /* First, grow the reg_info information. If the current size is less than
539 the number of pseudos, grow to 25% more than the number of
540 pseudos.
542 Second, assure that all of the slots up to max_reg_num have been
543 filled with reg_info structures. */
545 void
546 df_grow_reg_info (void)
548 unsigned int max_reg = max_reg_num ();
549 unsigned int new_size = max_reg;
550 struct df_scan_problem_data *problem_data
551 = (struct df_scan_problem_data *) df_scan->problem_data;
552 unsigned int i;
554 if (df->regs_size < new_size)
556 new_size += new_size / 4;
557 df->def_regs = XRESIZEVEC (struct df_reg_info *, df->def_regs, new_size);
558 df->use_regs = XRESIZEVEC (struct df_reg_info *, df->use_regs, new_size);
559 df->eq_use_regs = XRESIZEVEC (struct df_reg_info *, df->eq_use_regs,
560 new_size);
561 df->def_info.begin = XRESIZEVEC (unsigned, df->def_info.begin, new_size);
562 df->def_info.count = XRESIZEVEC (unsigned, df->def_info.count, new_size);
563 df->use_info.begin = XRESIZEVEC (unsigned, df->use_info.begin, new_size);
564 df->use_info.count = XRESIZEVEC (unsigned, df->use_info.count, new_size);
565 df->regs_size = new_size;
568 for (i = df->regs_inited; i < max_reg; i++)
570 struct df_reg_info *reg_info;
572 reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool);
573 memset (reg_info, 0, sizeof (struct df_reg_info));
574 df->def_regs[i] = reg_info;
575 reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool);
576 memset (reg_info, 0, sizeof (struct df_reg_info));
577 df->use_regs[i] = reg_info;
578 reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool);
579 memset (reg_info, 0, sizeof (struct df_reg_info));
580 df->eq_use_regs[i] = reg_info;
581 df->def_info.begin[i] = 0;
582 df->def_info.count[i] = 0;
583 df->use_info.begin[i] = 0;
584 df->use_info.count[i] = 0;
587 df->regs_inited = max_reg;
591 /* Grow the ref information. */
593 static void
594 df_grow_ref_info (struct df_ref_info *ref_info, unsigned int new_size)
596 if (ref_info->refs_size < new_size)
598 ref_info->refs = XRESIZEVEC (df_ref, ref_info->refs, new_size);
599 memset (ref_info->refs + ref_info->refs_size, 0,
600 (new_size - ref_info->refs_size) *sizeof (df_ref));
601 ref_info->refs_size = new_size;
606 /* Check and grow the ref information if necessary. This routine
607 guarantees total_size + BITMAP_ADDEND amount of entries in refs
608 array. It updates ref_info->refs_size only and does not change
609 ref_info->total_size. */
611 static void
612 df_check_and_grow_ref_info (struct df_ref_info *ref_info,
613 unsigned bitmap_addend)
615 if (ref_info->refs_size < ref_info->total_size + bitmap_addend)
617 int new_size = ref_info->total_size + bitmap_addend;
618 new_size += ref_info->total_size / 4;
619 df_grow_ref_info (ref_info, new_size);
624 /* Grow the ref information. If the current size is less than the
625 number of instructions, grow to 25% more than the number of
626 instructions. */
628 void
629 df_grow_insn_info (void)
631 unsigned int new_size = get_max_uid () + 1;
632 if (DF_INSN_SIZE () < new_size)
634 new_size += new_size / 4;
635 df->insns = XRESIZEVEC (struct df_insn_info *, df->insns, new_size);
636 memset (df->insns + df->insns_size, 0,
637 (new_size - DF_INSN_SIZE ()) *sizeof (struct df_insn_info *));
638 DF_INSN_SIZE () = new_size;
645 /*----------------------------------------------------------------------------
646 PUBLIC INTERFACES FOR SMALL GRAIN CHANGES TO SCANNING.
647 ----------------------------------------------------------------------------*/
649 /* Rescan all of the block_to_analyze or all of the blocks in the
650 function if df_set_blocks if blocks_to_analyze is NULL; */
652 void
653 df_scan_blocks (void)
655 basic_block bb;
657 df->def_info.ref_order = DF_REF_ORDER_NO_TABLE;
658 df->use_info.ref_order = DF_REF_ORDER_NO_TABLE;
660 df_get_regular_block_artificial_uses (&df->regular_block_artificial_uses);
661 df_get_eh_block_artificial_uses (&df->eh_block_artificial_uses);
663 bitmap_ior_into (&df->eh_block_artificial_uses,
664 &df->regular_block_artificial_uses);
666 /* ENTRY and EXIT blocks have special defs/uses. */
667 df_get_entry_block_def_set (df->entry_block_defs);
668 df_record_entry_block_defs (df->entry_block_defs);
669 df_get_exit_block_use_set (df->exit_block_uses);
670 df_record_exit_block_uses (df->exit_block_uses);
671 df_set_bb_dirty (BASIC_BLOCK (ENTRY_BLOCK));
672 df_set_bb_dirty (BASIC_BLOCK (EXIT_BLOCK));
674 /* Regular blocks */
675 FOR_EACH_BB (bb)
677 unsigned int bb_index = bb->index;
678 df_bb_refs_record (bb_index, true);
682 /* Create new refs under address LOC within INSN. This function is
683 only used externally. REF_FLAGS must be either 0 or DF_REF_IN_NOTE,
684 depending on whether LOC is inside PATTERN (INSN) or a note. */
686 void
687 df_uses_create (rtx *loc, rtx insn, int ref_flags)
689 gcc_assert (!(ref_flags & ~DF_REF_IN_NOTE));
690 df_uses_record (NULL, loc, DF_REF_REG_USE,
691 BLOCK_FOR_INSN (insn),
692 DF_INSN_INFO_GET (insn),
693 ref_flags);
696 /* Create a new ref of type DF_REF_TYPE for register REG at address
697 LOC within INSN of BB. This function is only used externally. */
699 df_ref
700 df_ref_create (rtx reg, rtx *loc, rtx insn,
701 basic_block bb,
702 enum df_ref_type ref_type,
703 int ref_flags)
705 enum df_ref_class cl;
707 df_grow_reg_info ();
709 /* You cannot hack artificial refs. */
710 gcc_assert (insn);
712 if (loc)
713 cl = DF_REF_REGULAR;
714 else
715 cl = DF_REF_BASE;
717 return df_ref_create_structure (cl, NULL, reg, loc, bb,
718 DF_INSN_INFO_GET (insn),
719 ref_type, ref_flags);
722 static void
723 df_install_ref_incremental (df_ref ref)
725 struct df_reg_info **reg_info;
726 struct df_ref_info *ref_info;
727 df_ref *ref_rec;
728 df_ref **ref_rec_ptr;
729 unsigned int count = 0;
730 bool add_to_table;
732 rtx insn = DF_REF_INSN (ref);
733 basic_block bb = BLOCK_FOR_INSN (insn);
735 if (DF_REF_REG_DEF_P (ref))
737 reg_info = df->def_regs;
738 ref_info = &df->def_info;
739 ref_rec_ptr = &DF_INSN_DEFS (insn);
740 add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
742 else if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
744 reg_info = df->eq_use_regs;
745 ref_info = &df->use_info;
746 ref_rec_ptr = &DF_INSN_EQ_USES (insn);
747 switch (ref_info->ref_order)
749 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
750 case DF_REF_ORDER_BY_REG_WITH_NOTES:
751 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
752 add_to_table = true;
753 break;
754 default:
755 add_to_table = false;
756 break;
759 else
761 reg_info = df->use_regs;
762 ref_info = &df->use_info;
763 ref_rec_ptr = &DF_INSN_USES (insn);
764 add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
767 /* Do not add if ref is not in the right blocks. */
768 if (add_to_table && df->analyze_subset)
769 add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
771 df_install_ref (ref, reg_info[DF_REF_REGNO (ref)], ref_info, add_to_table);
773 if (add_to_table)
774 switch (ref_info->ref_order)
776 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
777 case DF_REF_ORDER_BY_REG_WITH_NOTES:
778 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
779 ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
780 break;
781 default:
782 ref_info->ref_order = DF_REF_ORDER_UNORDERED;
783 break;
786 ref_rec = *ref_rec_ptr;
787 while (*ref_rec)
789 count++;
790 ref_rec++;
793 ref_rec = *ref_rec_ptr;
794 if (count)
796 ref_rec = XRESIZEVEC (df_ref, ref_rec, count+2);
797 *ref_rec_ptr = ref_rec;
798 ref_rec[count] = ref;
799 ref_rec[count+1] = NULL;
800 qsort (ref_rec, count + 1, sizeof (df_ref), df_ref_compare);
802 else
804 df_ref *ref_rec = XNEWVEC (df_ref, 2);
805 ref_rec[0] = ref;
806 ref_rec[1] = NULL;
807 *ref_rec_ptr = ref_rec;
810 #if 0
811 if (dump_file)
813 fprintf (dump_file, "adding ref ");
814 df_ref_debug (ref, dump_file);
816 #endif
817 /* By adding the ref directly, df_insn_rescan my not find any
818 differences even though the block will have changed. So we need
819 to mark the block dirty ourselves. */
820 if (!DEBUG_INSN_P (DF_REF_INSN (ref)))
821 df_set_bb_dirty (bb);
826 /*----------------------------------------------------------------------------
827 UTILITIES TO CREATE AND DESTROY REFS AND CHAINS.
828 ----------------------------------------------------------------------------*/
830 static void
831 df_free_ref (df_ref ref)
833 struct df_scan_problem_data *problem_data
834 = (struct df_scan_problem_data *) df_scan->problem_data;
836 switch (DF_REF_CLASS (ref))
838 case DF_REF_BASE:
839 pool_free (problem_data->ref_base_pool, ref);
840 break;
842 case DF_REF_ARTIFICIAL:
843 pool_free (problem_data->ref_artificial_pool, ref);
844 break;
846 case DF_REF_REGULAR:
847 pool_free (problem_data->ref_regular_pool, ref);
848 break;
853 /* Unlink and delete REF at the reg_use, reg_eq_use or reg_def chain.
854 Also delete the def-use or use-def chain if it exists. */
856 static void
857 df_reg_chain_unlink (df_ref ref)
859 df_ref next = DF_REF_NEXT_REG (ref);
860 df_ref prev = DF_REF_PREV_REG (ref);
861 int id = DF_REF_ID (ref);
862 struct df_reg_info *reg_info;
863 df_ref *refs = NULL;
865 if (DF_REF_REG_DEF_P (ref))
867 int regno = DF_REF_REGNO (ref);
868 reg_info = DF_REG_DEF_GET (regno);
869 refs = df->def_info.refs;
871 else
873 if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
875 reg_info = DF_REG_EQ_USE_GET (DF_REF_REGNO (ref));
876 switch (df->use_info.ref_order)
878 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
879 case DF_REF_ORDER_BY_REG_WITH_NOTES:
880 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
881 refs = df->use_info.refs;
882 break;
883 default:
884 break;
887 else
889 reg_info = DF_REG_USE_GET (DF_REF_REGNO (ref));
890 refs = df->use_info.refs;
894 if (refs)
896 if (df->analyze_subset)
898 if (bitmap_bit_p (df->blocks_to_analyze, DF_REF_BBNO (ref)))
899 refs[id] = NULL;
901 else
902 refs[id] = NULL;
905 /* Delete any def-use or use-def chains that start here. It is
906 possible that there is trash in this field. This happens for
907 insns that have been deleted when rescanning has been deferred
908 and the chain problem has also been deleted. The chain tear down
909 code skips deleted insns. */
910 if (df_chain && DF_REF_CHAIN (ref))
911 df_chain_unlink (ref);
913 reg_info->n_refs--;
914 if (DF_REF_FLAGS_IS_SET (ref, DF_HARD_REG_LIVE))
916 gcc_assert (DF_REF_REGNO (ref) < FIRST_PSEUDO_REGISTER);
917 df->hard_regs_live_count[DF_REF_REGNO (ref)]--;
920 /* Unlink from the reg chain. If there is no prev, this is the
921 first of the list. If not, just join the next and prev. */
922 if (prev)
923 DF_REF_NEXT_REG (prev) = next;
924 else
926 gcc_assert (reg_info->reg_chain == ref);
927 reg_info->reg_chain = next;
929 if (next)
930 DF_REF_PREV_REG (next) = prev;
932 df_free_ref (ref);
936 /* Remove REF from VEC. */
938 static void
939 df_ref_compress_rec (df_ref **vec_ptr, df_ref ref)
941 df_ref *vec = *vec_ptr;
943 if (vec[1])
945 while (*vec && *vec != ref)
946 vec++;
948 while (*vec)
950 *vec = *(vec+1);
951 vec++;
954 else
956 free (vec);
957 *vec_ptr = df_null_ref_rec;
962 /* Unlink REF from all def-use/use-def chains, etc. */
964 void
965 df_ref_remove (df_ref ref)
967 #if 0
968 if (dump_file)
970 fprintf (dump_file, "removing ref ");
971 df_ref_debug (ref, dump_file);
973 #endif
975 if (DF_REF_REG_DEF_P (ref))
977 if (DF_REF_IS_ARTIFICIAL (ref))
979 struct df_scan_bb_info *bb_info
980 = df_scan_get_bb_info (DF_REF_BBNO (ref));
981 df_ref_compress_rec (&bb_info->artificial_defs, ref);
983 else
985 unsigned int uid = DF_REF_INSN_UID (ref);
986 struct df_insn_info *insn_rec = DF_INSN_UID_GET (uid);
987 df_ref_compress_rec (&insn_rec->defs, ref);
990 else
992 if (DF_REF_IS_ARTIFICIAL (ref))
994 struct df_scan_bb_info *bb_info
995 = df_scan_get_bb_info (DF_REF_BBNO (ref));
996 df_ref_compress_rec (&bb_info->artificial_uses, ref);
998 else
1000 unsigned int uid = DF_REF_INSN_UID (ref);
1001 struct df_insn_info *insn_rec = DF_INSN_UID_GET (uid);
1003 if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
1004 df_ref_compress_rec (&insn_rec->eq_uses, ref);
1005 else
1006 df_ref_compress_rec (&insn_rec->uses, ref);
1010 /* By deleting the ref directly, df_insn_rescan my not find any
1011 differences even though the block will have changed. So we need
1012 to mark the block dirty ourselves. */
1013 if (!DEBUG_INSN_P (DF_REF_INSN (ref)))
1014 df_set_bb_dirty (DF_REF_BB (ref));
1015 df_reg_chain_unlink (ref);
1019 /* Create the insn record for INSN. If there was one there, zero it
1020 out. */
1022 struct df_insn_info *
1023 df_insn_create_insn_record (rtx insn)
1025 struct df_scan_problem_data *problem_data
1026 = (struct df_scan_problem_data *) df_scan->problem_data;
1027 struct df_insn_info *insn_rec;
1029 df_grow_insn_info ();
1030 insn_rec = DF_INSN_INFO_GET (insn);
1031 if (!insn_rec)
1033 insn_rec = (struct df_insn_info *) pool_alloc (problem_data->insn_pool);
1034 DF_INSN_INFO_SET (insn, insn_rec);
1036 memset (insn_rec, 0, sizeof (struct df_insn_info));
1037 insn_rec->insn = insn;
1038 return insn_rec;
1042 /* Delete all du chain (DF_REF_CHAIN()) of all refs in the ref chain. */
1044 static void
1045 df_ref_chain_delete_du_chain (df_ref *ref_rec)
1047 while (*ref_rec)
1049 df_ref ref = *ref_rec;
1050 /* CHAIN is allocated by DF_CHAIN. So make sure to
1051 pass df_scan instance for the problem. */
1052 if (DF_REF_CHAIN (ref))
1053 df_chain_unlink (ref);
1054 ref_rec++;
1059 /* Delete all refs in the ref chain. */
1061 static void
1062 df_ref_chain_delete (df_ref *ref_rec)
1064 df_ref *start = ref_rec;
1065 while (*ref_rec)
1067 df_reg_chain_unlink (*ref_rec);
1068 ref_rec++;
1071 /* If the list is empty, it has a special shared element that is not
1072 to be deleted. */
1073 if (*start)
1074 free (start);
1078 /* Delete the hardreg chain. */
1080 static void
1081 df_mw_hardreg_chain_delete (struct df_mw_hardreg **hardregs)
1083 struct df_scan_problem_data *problem_data;
1085 if (!hardregs)
1086 return;
1088 problem_data = (struct df_scan_problem_data *) df_scan->problem_data;
1090 while (*hardregs)
1092 pool_free (problem_data->mw_reg_pool, *hardregs);
1093 hardregs++;
1098 /* Delete all of the refs information from INSN. BB must be passed in
1099 except when called from df_process_deferred_rescans to mark the block
1100 as dirty. */
1102 void
1103 df_insn_delete (basic_block bb, unsigned int uid)
1105 struct df_insn_info *insn_info = NULL;
1106 if (!df)
1107 return;
1109 df_grow_bb_info (df_scan);
1110 df_grow_reg_info ();
1112 /* The block must be marked as dirty now, rather than later as in
1113 df_insn_rescan and df_notes_rescan because it may not be there at
1114 rescanning time and the mark would blow up. */
1115 if (bb)
1116 df_set_bb_dirty (bb);
1118 insn_info = DF_INSN_UID_SAFE_GET (uid);
1120 /* The client has deferred rescanning. */
1121 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1123 if (insn_info)
1125 bitmap_clear_bit (&df->insns_to_rescan, uid);
1126 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1127 bitmap_set_bit (&df->insns_to_delete, uid);
1129 if (dump_file)
1130 fprintf (dump_file, "deferring deletion of insn with uid = %d.\n", uid);
1131 return;
1134 if (dump_file)
1135 fprintf (dump_file, "deleting insn with uid = %d.\n", uid);
1137 bitmap_clear_bit (&df->insns_to_delete, uid);
1138 bitmap_clear_bit (&df->insns_to_rescan, uid);
1139 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1140 if (insn_info)
1142 struct df_scan_problem_data *problem_data
1143 = (struct df_scan_problem_data *) df_scan->problem_data;
1145 /* In general, notes do not have the insn_info fields
1146 initialized. However, combine deletes insns by changing them
1147 to notes. How clever. So we cannot just check if it is a
1148 valid insn before short circuiting this code, we need to see
1149 if we actually initialized it. */
1150 if (insn_info->defs)
1152 df_mw_hardreg_chain_delete (insn_info->mw_hardregs);
1154 if (df_chain)
1156 df_ref_chain_delete_du_chain (insn_info->defs);
1157 df_ref_chain_delete_du_chain (insn_info->uses);
1158 df_ref_chain_delete_du_chain (insn_info->eq_uses);
1161 df_ref_chain_delete (insn_info->defs);
1162 df_ref_chain_delete (insn_info->uses);
1163 df_ref_chain_delete (insn_info->eq_uses);
1165 pool_free (problem_data->insn_pool, insn_info);
1166 DF_INSN_UID_SET (uid, NULL);
1171 /* Free all of the refs and the mw_hardregs in COLLECTION_REC. */
1173 static void
1174 df_free_collection_rec (struct df_collection_rec *collection_rec)
1176 unsigned int ix;
1177 struct df_scan_problem_data *problem_data
1178 = (struct df_scan_problem_data *) df_scan->problem_data;
1179 df_ref ref;
1180 struct df_mw_hardreg *mw;
1182 FOR_EACH_VEC_ELT (df_ref, collection_rec->def_vec, ix, ref)
1183 df_free_ref (ref);
1184 FOR_EACH_VEC_ELT (df_ref, collection_rec->use_vec, ix, ref)
1185 df_free_ref (ref);
1186 FOR_EACH_VEC_ELT (df_ref, collection_rec->eq_use_vec, ix, ref)
1187 df_free_ref (ref);
1188 FOR_EACH_VEC_ELT (df_mw_hardreg_ptr, collection_rec->mw_vec, ix, mw)
1189 pool_free (problem_data->mw_reg_pool, mw);
1191 VEC_free (df_ref, stack, collection_rec->def_vec);
1192 VEC_free (df_ref, stack, collection_rec->use_vec);
1193 VEC_free (df_ref, stack, collection_rec->eq_use_vec);
1194 VEC_free (df_mw_hardreg_ptr, stack, collection_rec->mw_vec);
1197 /* Rescan INSN. Return TRUE if the rescanning produced any changes. */
1199 bool
1200 df_insn_rescan (rtx insn)
1202 unsigned int uid = INSN_UID (insn);
1203 struct df_insn_info *insn_info = NULL;
1204 basic_block bb = BLOCK_FOR_INSN (insn);
1205 struct df_collection_rec collection_rec;
1207 if ((!df) || (!INSN_P (insn)))
1208 return false;
1210 if (!bb)
1212 if (dump_file)
1213 fprintf (dump_file, "no bb for insn with uid = %d.\n", uid);
1214 return false;
1217 /* The client has disabled rescanning and plans to do it itself. */
1218 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1219 return false;
1221 df_grow_bb_info (df_scan);
1222 df_grow_reg_info ();
1224 insn_info = DF_INSN_UID_SAFE_GET (uid);
1226 /* The client has deferred rescanning. */
1227 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1229 if (!insn_info)
1231 insn_info = df_insn_create_insn_record (insn);
1232 insn_info->defs = df_null_ref_rec;
1233 insn_info->uses = df_null_ref_rec;
1234 insn_info->eq_uses = df_null_ref_rec;
1235 insn_info->mw_hardregs = df_null_mw_rec;
1237 if (dump_file)
1238 fprintf (dump_file, "deferring rescan insn with uid = %d.\n", uid);
1240 bitmap_clear_bit (&df->insns_to_delete, uid);
1241 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1242 bitmap_set_bit (&df->insns_to_rescan, INSN_UID (insn));
1243 return false;
1246 collection_rec.def_vec = VEC_alloc (df_ref, stack, 128);
1247 collection_rec.use_vec = VEC_alloc (df_ref, stack, 32);
1248 collection_rec.eq_use_vec = VEC_alloc (df_ref, stack, 32);
1249 collection_rec.mw_vec = VEC_alloc (df_mw_hardreg_ptr, stack, 32);
1251 bitmap_clear_bit (&df->insns_to_delete, uid);
1252 bitmap_clear_bit (&df->insns_to_rescan, uid);
1253 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1254 if (insn_info)
1256 int luid;
1257 bool the_same = df_insn_refs_verify (&collection_rec, bb, insn, false);
1258 /* If there's no change, return false. */
1259 if (the_same)
1261 df_free_collection_rec (&collection_rec);
1262 if (dump_file)
1263 fprintf (dump_file, "verify found no changes in insn with uid = %d.\n", uid);
1264 return false;
1266 if (dump_file)
1267 fprintf (dump_file, "rescanning insn with uid = %d.\n", uid);
1269 /* There's change - we need to delete the existing info.
1270 Since the insn isn't moved, we can salvage its LUID. */
1271 luid = DF_INSN_LUID (insn);
1272 df_insn_delete (NULL, uid);
1273 df_insn_create_insn_record (insn);
1274 DF_INSN_LUID (insn) = luid;
1276 else
1278 struct df_insn_info *insn_info = df_insn_create_insn_record (insn);
1279 df_insn_refs_collect (&collection_rec, bb, insn_info);
1280 if (dump_file)
1281 fprintf (dump_file, "scanning new insn with uid = %d.\n", uid);
1284 df_refs_add_to_chains (&collection_rec, bb, insn);
1285 if (!DEBUG_INSN_P (insn))
1286 df_set_bb_dirty (bb);
1288 VEC_free (df_ref, stack, collection_rec.def_vec);
1289 VEC_free (df_ref, stack, collection_rec.use_vec);
1290 VEC_free (df_ref, stack, collection_rec.eq_use_vec);
1291 VEC_free (df_mw_hardreg_ptr, stack, collection_rec.mw_vec);
1293 return true;
1296 /* Same as df_insn_rescan, but don't mark the basic block as
1297 dirty. */
1299 bool
1300 df_insn_rescan_debug_internal (rtx insn)
1302 unsigned int uid = INSN_UID (insn);
1303 struct df_insn_info *insn_info;
1305 gcc_assert (DEBUG_INSN_P (insn)
1306 && VAR_LOC_UNKNOWN_P (INSN_VAR_LOCATION_LOC (insn)));
1308 if (!df)
1309 return false;
1311 insn_info = DF_INSN_UID_SAFE_GET (INSN_UID (insn));
1312 if (!insn_info)
1313 return false;
1315 if (dump_file)
1316 fprintf (dump_file, "deleting debug_insn with uid = %d.\n", uid);
1318 bitmap_clear_bit (&df->insns_to_delete, uid);
1319 bitmap_clear_bit (&df->insns_to_rescan, uid);
1320 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1322 if (!insn_info->defs)
1323 return false;
1325 if (insn_info->defs == df_null_ref_rec
1326 && insn_info->uses == df_null_ref_rec
1327 && insn_info->eq_uses == df_null_ref_rec
1328 && insn_info->mw_hardregs == df_null_mw_rec)
1329 return false;
1331 df_mw_hardreg_chain_delete (insn_info->mw_hardregs);
1333 if (df_chain)
1335 df_ref_chain_delete_du_chain (insn_info->defs);
1336 df_ref_chain_delete_du_chain (insn_info->uses);
1337 df_ref_chain_delete_du_chain (insn_info->eq_uses);
1340 df_ref_chain_delete (insn_info->defs);
1341 df_ref_chain_delete (insn_info->uses);
1342 df_ref_chain_delete (insn_info->eq_uses);
1344 insn_info->defs = df_null_ref_rec;
1345 insn_info->uses = df_null_ref_rec;
1346 insn_info->eq_uses = df_null_ref_rec;
1347 insn_info->mw_hardregs = df_null_mw_rec;
1349 return true;
1353 /* Rescan all of the insns in the function. Note that the artificial
1354 uses and defs are not touched. This function will destroy def-se
1355 or use-def chains. */
1357 void
1358 df_insn_rescan_all (void)
1360 bool no_insn_rescan = false;
1361 bool defer_insn_rescan = false;
1362 basic_block bb;
1363 bitmap_iterator bi;
1364 unsigned int uid;
1365 bitmap_head tmp;
1367 bitmap_initialize (&tmp, &df_bitmap_obstack);
1369 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1371 df_clear_flags (DF_NO_INSN_RESCAN);
1372 no_insn_rescan = true;
1375 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1377 df_clear_flags (DF_DEFER_INSN_RESCAN);
1378 defer_insn_rescan = true;
1381 bitmap_copy (&tmp, &df->insns_to_delete);
1382 EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
1384 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1385 if (insn_info)
1386 df_insn_delete (NULL, uid);
1389 bitmap_clear (&tmp);
1390 bitmap_clear (&df->insns_to_delete);
1391 bitmap_clear (&df->insns_to_rescan);
1392 bitmap_clear (&df->insns_to_notes_rescan);
1394 FOR_EACH_BB (bb)
1396 rtx insn;
1397 FOR_BB_INSNS (bb, insn)
1399 df_insn_rescan (insn);
1403 if (no_insn_rescan)
1404 df_set_flags (DF_NO_INSN_RESCAN);
1405 if (defer_insn_rescan)
1406 df_set_flags (DF_DEFER_INSN_RESCAN);
1410 /* Process all of the deferred rescans or deletions. */
1412 void
1413 df_process_deferred_rescans (void)
1415 bool no_insn_rescan = false;
1416 bool defer_insn_rescan = false;
1417 bitmap_iterator bi;
1418 unsigned int uid;
1419 bitmap_head tmp;
1421 bitmap_initialize (&tmp, &df_bitmap_obstack);
1423 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1425 df_clear_flags (DF_NO_INSN_RESCAN);
1426 no_insn_rescan = true;
1429 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1431 df_clear_flags (DF_DEFER_INSN_RESCAN);
1432 defer_insn_rescan = true;
1435 if (dump_file)
1436 fprintf (dump_file, "starting the processing of deferred insns\n");
1438 bitmap_copy (&tmp, &df->insns_to_delete);
1439 EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
1441 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1442 if (insn_info)
1443 df_insn_delete (NULL, uid);
1446 bitmap_copy (&tmp, &df->insns_to_rescan);
1447 EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
1449 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1450 if (insn_info)
1451 df_insn_rescan (insn_info->insn);
1454 bitmap_copy (&tmp, &df->insns_to_notes_rescan);
1455 EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
1457 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1458 if (insn_info)
1459 df_notes_rescan (insn_info->insn);
1462 if (dump_file)
1463 fprintf (dump_file, "ending the processing of deferred insns\n");
1465 bitmap_clear (&tmp);
1466 bitmap_clear (&df->insns_to_delete);
1467 bitmap_clear (&df->insns_to_rescan);
1468 bitmap_clear (&df->insns_to_notes_rescan);
1470 if (no_insn_rescan)
1471 df_set_flags (DF_NO_INSN_RESCAN);
1472 if (defer_insn_rescan)
1473 df_set_flags (DF_DEFER_INSN_RESCAN);
1475 /* If someone changed regs_ever_live during this pass, fix up the
1476 entry and exit blocks. */
1477 if (df->redo_entry_and_exit)
1479 df_update_entry_exit_and_calls ();
1480 df->redo_entry_and_exit = false;
1485 /* Count the number of refs. Include the defs if INCLUDE_DEFS. Include
1486 the uses if INCLUDE_USES. Include the eq_uses if
1487 INCLUDE_EQ_USES. */
1489 static unsigned int
1490 df_count_refs (bool include_defs, bool include_uses,
1491 bool include_eq_uses)
1493 unsigned int regno;
1494 int size = 0;
1495 unsigned int m = df->regs_inited;
1497 for (regno = 0; regno < m; regno++)
1499 if (include_defs)
1500 size += DF_REG_DEF_COUNT (regno);
1501 if (include_uses)
1502 size += DF_REG_USE_COUNT (regno);
1503 if (include_eq_uses)
1504 size += DF_REG_EQ_USE_COUNT (regno);
1506 return size;
1510 /* Take build ref table for either the uses or defs from the reg-use
1511 or reg-def chains. This version processes the refs in reg order
1512 which is likely to be best if processing the whole function. */
1514 static void
1515 df_reorganize_refs_by_reg_by_reg (struct df_ref_info *ref_info,
1516 bool include_defs,
1517 bool include_uses,
1518 bool include_eq_uses)
1520 unsigned int m = df->regs_inited;
1521 unsigned int regno;
1522 unsigned int offset = 0;
1523 unsigned int start;
1525 if (df->changeable_flags & DF_NO_HARD_REGS)
1527 start = FIRST_PSEUDO_REGISTER;
1528 memset (ref_info->begin, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
1529 memset (ref_info->count, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
1531 else
1532 start = 0;
1534 ref_info->total_size
1535 = df_count_refs (include_defs, include_uses, include_eq_uses);
1537 df_check_and_grow_ref_info (ref_info, 1);
1539 for (regno = start; regno < m; regno++)
1541 int count = 0;
1542 ref_info->begin[regno] = offset;
1543 if (include_defs)
1545 df_ref ref = DF_REG_DEF_CHAIN (regno);
1546 while (ref)
1548 ref_info->refs[offset] = ref;
1549 DF_REF_ID (ref) = offset++;
1550 count++;
1551 ref = DF_REF_NEXT_REG (ref);
1552 gcc_checking_assert (offset < ref_info->refs_size);
1555 if (include_uses)
1557 df_ref ref = DF_REG_USE_CHAIN (regno);
1558 while (ref)
1560 ref_info->refs[offset] = ref;
1561 DF_REF_ID (ref) = offset++;
1562 count++;
1563 ref = DF_REF_NEXT_REG (ref);
1564 gcc_checking_assert (offset < ref_info->refs_size);
1567 if (include_eq_uses)
1569 df_ref ref = DF_REG_EQ_USE_CHAIN (regno);
1570 while (ref)
1572 ref_info->refs[offset] = ref;
1573 DF_REF_ID (ref) = offset++;
1574 count++;
1575 ref = DF_REF_NEXT_REG (ref);
1576 gcc_checking_assert (offset < ref_info->refs_size);
1579 ref_info->count[regno] = count;
1582 /* The bitmap size is not decremented when refs are deleted. So
1583 reset it now that we have squished out all of the empty
1584 slots. */
1585 ref_info->table_size = offset;
1589 /* Take build ref table for either the uses or defs from the reg-use
1590 or reg-def chains. This version processes the refs in insn order
1591 which is likely to be best if processing some segment of the
1592 function. */
1594 static void
1595 df_reorganize_refs_by_reg_by_insn (struct df_ref_info *ref_info,
1596 bool include_defs,
1597 bool include_uses,
1598 bool include_eq_uses)
1600 bitmap_iterator bi;
1601 unsigned int bb_index;
1602 unsigned int m = df->regs_inited;
1603 unsigned int offset = 0;
1604 unsigned int r;
1605 unsigned int start
1606 = (df->changeable_flags & DF_NO_HARD_REGS) ? FIRST_PSEUDO_REGISTER : 0;
1608 memset (ref_info->begin, 0, sizeof (int) * df->regs_inited);
1609 memset (ref_info->count, 0, sizeof (int) * df->regs_inited);
1611 ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1612 df_check_and_grow_ref_info (ref_info, 1);
1614 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
1616 basic_block bb = BASIC_BLOCK (bb_index);
1617 rtx insn;
1618 df_ref *ref_rec;
1620 if (include_defs)
1621 for (ref_rec = df_get_artificial_defs (bb_index); *ref_rec; ref_rec++)
1623 unsigned int regno = DF_REF_REGNO (*ref_rec);
1624 ref_info->count[regno]++;
1626 if (include_uses)
1627 for (ref_rec = df_get_artificial_uses (bb_index); *ref_rec; ref_rec++)
1629 unsigned int regno = DF_REF_REGNO (*ref_rec);
1630 ref_info->count[regno]++;
1633 FOR_BB_INSNS (bb, insn)
1635 if (INSN_P (insn))
1637 unsigned int uid = INSN_UID (insn);
1639 if (include_defs)
1640 for (ref_rec = DF_INSN_UID_DEFS (uid); *ref_rec; ref_rec++)
1642 unsigned int regno = DF_REF_REGNO (*ref_rec);
1643 ref_info->count[regno]++;
1645 if (include_uses)
1646 for (ref_rec = DF_INSN_UID_USES (uid); *ref_rec; ref_rec++)
1648 unsigned int regno = DF_REF_REGNO (*ref_rec);
1649 ref_info->count[regno]++;
1651 if (include_eq_uses)
1652 for (ref_rec = DF_INSN_UID_EQ_USES (uid); *ref_rec; ref_rec++)
1654 unsigned int regno = DF_REF_REGNO (*ref_rec);
1655 ref_info->count[regno]++;
1661 for (r = start; r < m; r++)
1663 ref_info->begin[r] = offset;
1664 offset += ref_info->count[r];
1665 ref_info->count[r] = 0;
1668 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
1670 basic_block bb = BASIC_BLOCK (bb_index);
1671 rtx insn;
1672 df_ref *ref_rec;
1674 if (include_defs)
1675 for (ref_rec = df_get_artificial_defs (bb_index); *ref_rec; ref_rec++)
1677 df_ref ref = *ref_rec;
1678 unsigned int regno = DF_REF_REGNO (ref);
1679 if (regno >= start)
1681 unsigned int id
1682 = ref_info->begin[regno] + ref_info->count[regno]++;
1683 DF_REF_ID (ref) = id;
1684 ref_info->refs[id] = ref;
1687 if (include_uses)
1688 for (ref_rec = df_get_artificial_uses (bb_index); *ref_rec; ref_rec++)
1690 df_ref ref = *ref_rec;
1691 unsigned int regno = DF_REF_REGNO (ref);
1692 if (regno >= start)
1694 unsigned int id
1695 = ref_info->begin[regno] + ref_info->count[regno]++;
1696 DF_REF_ID (ref) = id;
1697 ref_info->refs[id] = ref;
1701 FOR_BB_INSNS (bb, insn)
1703 if (INSN_P (insn))
1705 unsigned int uid = INSN_UID (insn);
1707 if (include_defs)
1708 for (ref_rec = DF_INSN_UID_DEFS (uid); *ref_rec; ref_rec++)
1710 df_ref ref = *ref_rec;
1711 unsigned int regno = DF_REF_REGNO (ref);
1712 if (regno >= start)
1714 unsigned int id
1715 = ref_info->begin[regno] + ref_info->count[regno]++;
1716 DF_REF_ID (ref) = id;
1717 ref_info->refs[id] = ref;
1720 if (include_uses)
1721 for (ref_rec = DF_INSN_UID_USES (uid); *ref_rec; ref_rec++)
1723 df_ref ref = *ref_rec;
1724 unsigned int regno = DF_REF_REGNO (ref);
1725 if (regno >= start)
1727 unsigned int id
1728 = ref_info->begin[regno] + ref_info->count[regno]++;
1729 DF_REF_ID (ref) = id;
1730 ref_info->refs[id] = ref;
1733 if (include_eq_uses)
1734 for (ref_rec = DF_INSN_UID_EQ_USES (uid); *ref_rec; ref_rec++)
1736 df_ref ref = *ref_rec;
1737 unsigned int regno = DF_REF_REGNO (ref);
1738 if (regno >= start)
1740 unsigned int id
1741 = ref_info->begin[regno] + ref_info->count[regno]++;
1742 DF_REF_ID (ref) = id;
1743 ref_info->refs[id] = ref;
1750 /* The bitmap size is not decremented when refs are deleted. So
1751 reset it now that we have squished out all of the empty
1752 slots. */
1754 ref_info->table_size = offset;
1757 /* Take build ref table for either the uses or defs from the reg-use
1758 or reg-def chains. */
1760 static void
1761 df_reorganize_refs_by_reg (struct df_ref_info *ref_info,
1762 bool include_defs,
1763 bool include_uses,
1764 bool include_eq_uses)
1766 if (df->analyze_subset)
1767 df_reorganize_refs_by_reg_by_insn (ref_info, include_defs,
1768 include_uses, include_eq_uses);
1769 else
1770 df_reorganize_refs_by_reg_by_reg (ref_info, include_defs,
1771 include_uses, include_eq_uses);
1775 /* Add the refs in REF_VEC to the table in REF_INFO starting at OFFSET. */
1776 static unsigned int
1777 df_add_refs_to_table (unsigned int offset,
1778 struct df_ref_info *ref_info,
1779 df_ref *ref_vec)
1781 while (*ref_vec)
1783 df_ref ref = *ref_vec;
1784 if ((!(df->changeable_flags & DF_NO_HARD_REGS))
1785 || (DF_REF_REGNO (ref) >= FIRST_PSEUDO_REGISTER))
1787 ref_info->refs[offset] = ref;
1788 DF_REF_ID (*ref_vec) = offset++;
1790 ref_vec++;
1792 return offset;
1796 /* Count the number of refs in all of the insns of BB. Include the
1797 defs if INCLUDE_DEFS. Include the uses if INCLUDE_USES. Include the
1798 eq_uses if INCLUDE_EQ_USES. */
1800 static unsigned int
1801 df_reorganize_refs_by_insn_bb (basic_block bb, unsigned int offset,
1802 struct df_ref_info *ref_info,
1803 bool include_defs, bool include_uses,
1804 bool include_eq_uses)
1806 rtx insn;
1808 if (include_defs)
1809 offset = df_add_refs_to_table (offset, ref_info,
1810 df_get_artificial_defs (bb->index));
1811 if (include_uses)
1812 offset = df_add_refs_to_table (offset, ref_info,
1813 df_get_artificial_uses (bb->index));
1815 FOR_BB_INSNS (bb, insn)
1816 if (INSN_P (insn))
1818 unsigned int uid = INSN_UID (insn);
1819 if (include_defs)
1820 offset = df_add_refs_to_table (offset, ref_info,
1821 DF_INSN_UID_DEFS (uid));
1822 if (include_uses)
1823 offset = df_add_refs_to_table (offset, ref_info,
1824 DF_INSN_UID_USES (uid));
1825 if (include_eq_uses)
1826 offset = df_add_refs_to_table (offset, ref_info,
1827 DF_INSN_UID_EQ_USES (uid));
1829 return offset;
1833 /* Organize the refs by insn into the table in REF_INFO. If
1834 blocks_to_analyze is defined, use that set, otherwise the entire
1835 program. Include the defs if INCLUDE_DEFS. Include the uses if
1836 INCLUDE_USES. Include the eq_uses if INCLUDE_EQ_USES. */
1838 static void
1839 df_reorganize_refs_by_insn (struct df_ref_info *ref_info,
1840 bool include_defs, bool include_uses,
1841 bool include_eq_uses)
1843 basic_block bb;
1844 unsigned int offset = 0;
1846 ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1847 df_check_and_grow_ref_info (ref_info, 1);
1848 if (df->blocks_to_analyze)
1850 bitmap_iterator bi;
1851 unsigned int index;
1853 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, index, bi)
1855 offset = df_reorganize_refs_by_insn_bb (BASIC_BLOCK (index), offset, ref_info,
1856 include_defs, include_uses,
1857 include_eq_uses);
1860 ref_info->table_size = offset;
1862 else
1864 FOR_ALL_BB (bb)
1865 offset = df_reorganize_refs_by_insn_bb (bb, offset, ref_info,
1866 include_defs, include_uses,
1867 include_eq_uses);
1868 ref_info->table_size = offset;
1873 /* If the use refs in DF are not organized, reorganize them. */
1875 void
1876 df_maybe_reorganize_use_refs (enum df_ref_order order)
1878 if (order == df->use_info.ref_order)
1879 return;
1881 switch (order)
1883 case DF_REF_ORDER_BY_REG:
1884 df_reorganize_refs_by_reg (&df->use_info, false, true, false);
1885 break;
1887 case DF_REF_ORDER_BY_REG_WITH_NOTES:
1888 df_reorganize_refs_by_reg (&df->use_info, false, true, true);
1889 break;
1891 case DF_REF_ORDER_BY_INSN:
1892 df_reorganize_refs_by_insn (&df->use_info, false, true, false);
1893 break;
1895 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1896 df_reorganize_refs_by_insn (&df->use_info, false, true, true);
1897 break;
1899 case DF_REF_ORDER_NO_TABLE:
1900 free (df->use_info.refs);
1901 df->use_info.refs = NULL;
1902 df->use_info.refs_size = 0;
1903 break;
1905 case DF_REF_ORDER_UNORDERED:
1906 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1907 gcc_unreachable ();
1908 break;
1911 df->use_info.ref_order = order;
1915 /* If the def refs in DF are not organized, reorganize them. */
1917 void
1918 df_maybe_reorganize_def_refs (enum df_ref_order order)
1920 if (order == df->def_info.ref_order)
1921 return;
1923 switch (order)
1925 case DF_REF_ORDER_BY_REG:
1926 df_reorganize_refs_by_reg (&df->def_info, true, false, false);
1927 break;
1929 case DF_REF_ORDER_BY_INSN:
1930 df_reorganize_refs_by_insn (&df->def_info, true, false, false);
1931 break;
1933 case DF_REF_ORDER_NO_TABLE:
1934 free (df->def_info.refs);
1935 df->def_info.refs = NULL;
1936 df->def_info.refs_size = 0;
1937 break;
1939 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1940 case DF_REF_ORDER_BY_REG_WITH_NOTES:
1941 case DF_REF_ORDER_UNORDERED:
1942 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1943 gcc_unreachable ();
1944 break;
1947 df->def_info.ref_order = order;
1951 /* Change all of the basic block references in INSN to use the insn's
1952 current basic block. This function is called from routines that move
1953 instructions from one block to another. */
1955 void
1956 df_insn_change_bb (rtx insn, basic_block new_bb)
1958 basic_block old_bb = BLOCK_FOR_INSN (insn);
1959 struct df_insn_info *insn_info;
1960 unsigned int uid = INSN_UID (insn);
1962 if (old_bb == new_bb)
1963 return;
1965 set_block_for_insn (insn, new_bb);
1967 if (!df)
1968 return;
1970 if (dump_file)
1971 fprintf (dump_file, "changing bb of uid %d\n", uid);
1973 insn_info = DF_INSN_UID_SAFE_GET (uid);
1974 if (insn_info == NULL)
1976 if (dump_file)
1977 fprintf (dump_file, " unscanned insn\n");
1978 df_insn_rescan (insn);
1979 return;
1982 if (!INSN_P (insn))
1983 return;
1985 df_set_bb_dirty (new_bb);
1986 if (old_bb)
1988 if (dump_file)
1989 fprintf (dump_file, " from %d to %d\n",
1990 old_bb->index, new_bb->index);
1991 df_set_bb_dirty (old_bb);
1993 else
1994 if (dump_file)
1995 fprintf (dump_file, " to %d\n", new_bb->index);
1999 /* Helper function for df_ref_change_reg_with_loc. */
2001 static void
2002 df_ref_change_reg_with_loc_1 (struct df_reg_info *old_df,
2003 struct df_reg_info *new_df,
2004 int new_regno, rtx loc)
2006 df_ref the_ref = old_df->reg_chain;
2008 while (the_ref)
2010 if ((!DF_REF_IS_ARTIFICIAL (the_ref))
2011 && DF_REF_LOC (the_ref)
2012 && (*DF_REF_LOC (the_ref) == loc))
2014 df_ref next_ref = DF_REF_NEXT_REG (the_ref);
2015 df_ref prev_ref = DF_REF_PREV_REG (the_ref);
2016 df_ref *ref_vec, *ref_vec_t;
2017 struct df_insn_info *insn_info = DF_REF_INSN_INFO (the_ref);
2018 unsigned int count = 0;
2020 DF_REF_REGNO (the_ref) = new_regno;
2021 DF_REF_REG (the_ref) = regno_reg_rtx[new_regno];
2023 /* Pull the_ref out of the old regno chain. */
2024 if (prev_ref)
2025 DF_REF_NEXT_REG (prev_ref) = next_ref;
2026 else
2027 old_df->reg_chain = next_ref;
2028 if (next_ref)
2029 DF_REF_PREV_REG (next_ref) = prev_ref;
2030 old_df->n_refs--;
2032 /* Put the ref into the new regno chain. */
2033 DF_REF_PREV_REG (the_ref) = NULL;
2034 DF_REF_NEXT_REG (the_ref) = new_df->reg_chain;
2035 if (new_df->reg_chain)
2036 DF_REF_PREV_REG (new_df->reg_chain) = the_ref;
2037 new_df->reg_chain = the_ref;
2038 new_df->n_refs++;
2039 if (DF_REF_BB (the_ref))
2040 df_set_bb_dirty (DF_REF_BB (the_ref));
2042 /* Need to sort the record again that the ref was in because
2043 the regno is a sorting key. First, find the right
2044 record. */
2045 if (DF_REF_FLAGS (the_ref) & DF_REF_IN_NOTE)
2046 ref_vec = insn_info->eq_uses;
2047 else
2048 ref_vec = insn_info->uses;
2049 if (dump_file)
2050 fprintf (dump_file, "changing reg in insn %d\n",
2051 DF_REF_INSN_UID (the_ref));
2053 ref_vec_t = ref_vec;
2055 /* Find the length. */
2056 while (*ref_vec_t)
2058 count++;
2059 ref_vec_t++;
2061 qsort (ref_vec, count, sizeof (df_ref ), df_ref_compare);
2063 the_ref = next_ref;
2065 else
2066 the_ref = DF_REF_NEXT_REG (the_ref);
2071 /* Change the regno of all refs that contained LOC from OLD_REGNO to
2072 NEW_REGNO. Refs that do not match LOC are not changed which means
2073 that artificial refs are not changed since they have no loc. This
2074 call is to support the SET_REGNO macro. */
2076 void
2077 df_ref_change_reg_with_loc (int old_regno, int new_regno, rtx loc)
2079 if ((!df) || (old_regno == -1) || (old_regno == new_regno))
2080 return;
2082 df_grow_reg_info ();
2084 df_ref_change_reg_with_loc_1 (DF_REG_DEF_GET (old_regno),
2085 DF_REG_DEF_GET (new_regno), new_regno, loc);
2086 df_ref_change_reg_with_loc_1 (DF_REG_USE_GET (old_regno),
2087 DF_REG_USE_GET (new_regno), new_regno, loc);
2088 df_ref_change_reg_with_loc_1 (DF_REG_EQ_USE_GET (old_regno),
2089 DF_REG_EQ_USE_GET (new_regno), new_regno, loc);
2093 /* Delete the mw_hardregs that point into the eq_notes. */
2095 static unsigned int
2096 df_mw_hardreg_chain_delete_eq_uses (struct df_insn_info *insn_info)
2098 struct df_mw_hardreg **mw_vec = insn_info->mw_hardregs;
2099 unsigned int deleted = 0;
2100 unsigned int count = 0;
2101 struct df_scan_problem_data *problem_data
2102 = (struct df_scan_problem_data *) df_scan->problem_data;
2104 if (!*mw_vec)
2105 return 0;
2107 while (*mw_vec)
2109 if ((*mw_vec)->flags & DF_REF_IN_NOTE)
2111 struct df_mw_hardreg **temp_vec = mw_vec;
2113 pool_free (problem_data->mw_reg_pool, *mw_vec);
2114 temp_vec = mw_vec;
2115 /* Shove the remaining ones down one to fill the gap. While
2116 this looks n**2, it is highly unusual to have any mw regs
2117 in eq_notes and the chances of more than one are almost
2118 non existent. */
2119 while (*temp_vec)
2121 *temp_vec = *(temp_vec + 1);
2122 temp_vec++;
2124 deleted++;
2126 else
2128 mw_vec++;
2129 count++;
2133 if (count == 0)
2135 df_scan_free_mws_vec (insn_info->mw_hardregs);
2136 insn_info->mw_hardregs = df_null_mw_rec;
2137 return 0;
2139 return deleted;
2143 /* Rescan only the REG_EQUIV/REG_EQUAL notes part of INSN. */
2145 void
2146 df_notes_rescan (rtx insn)
2148 struct df_insn_info *insn_info;
2149 unsigned int uid = INSN_UID (insn);
2151 if (!df)
2152 return;
2154 /* The client has disabled rescanning and plans to do it itself. */
2155 if (df->changeable_flags & DF_NO_INSN_RESCAN)
2156 return;
2158 /* Do nothing if the insn hasn't been emitted yet. */
2159 if (!BLOCK_FOR_INSN (insn))
2160 return;
2162 df_grow_bb_info (df_scan);
2163 df_grow_reg_info ();
2165 insn_info = DF_INSN_UID_SAFE_GET (INSN_UID(insn));
2167 /* The client has deferred rescanning. */
2168 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
2170 if (!insn_info)
2172 insn_info = df_insn_create_insn_record (insn);
2173 insn_info->defs = df_null_ref_rec;
2174 insn_info->uses = df_null_ref_rec;
2175 insn_info->eq_uses = df_null_ref_rec;
2176 insn_info->mw_hardregs = df_null_mw_rec;
2179 bitmap_clear_bit (&df->insns_to_delete, uid);
2180 /* If the insn is set to be rescanned, it does not need to also
2181 be notes rescanned. */
2182 if (!bitmap_bit_p (&df->insns_to_rescan, uid))
2183 bitmap_set_bit (&df->insns_to_notes_rescan, INSN_UID (insn));
2184 return;
2187 bitmap_clear_bit (&df->insns_to_delete, uid);
2188 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
2190 if (insn_info)
2192 basic_block bb = BLOCK_FOR_INSN (insn);
2193 rtx note;
2194 struct df_collection_rec collection_rec;
2195 unsigned int num_deleted;
2196 unsigned int mw_len;
2198 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
2199 collection_rec.eq_use_vec = VEC_alloc (df_ref, stack, 32);
2200 collection_rec.mw_vec = VEC_alloc (df_mw_hardreg_ptr, stack, 32);
2202 num_deleted = df_mw_hardreg_chain_delete_eq_uses (insn_info);
2203 df_ref_chain_delete (insn_info->eq_uses);
2204 insn_info->eq_uses = NULL;
2206 /* Process REG_EQUIV/REG_EQUAL notes */
2207 for (note = REG_NOTES (insn); note;
2208 note = XEXP (note, 1))
2210 switch (REG_NOTE_KIND (note))
2212 case REG_EQUIV:
2213 case REG_EQUAL:
2214 df_uses_record (&collection_rec,
2215 &XEXP (note, 0), DF_REF_REG_USE,
2216 bb, insn_info, DF_REF_IN_NOTE);
2217 default:
2218 break;
2222 /* Find some place to put any new mw_hardregs. */
2223 df_canonize_collection_rec (&collection_rec);
2224 mw_len = VEC_length (df_mw_hardreg_ptr, collection_rec.mw_vec);
2225 if (mw_len)
2227 unsigned int count = 0;
2228 struct df_mw_hardreg **mw_rec = insn_info->mw_hardregs;
2229 while (*mw_rec)
2231 count++;
2232 mw_rec++;
2235 if (count)
2237 /* Append to the end of the existing record after
2238 expanding it if necessary. */
2239 if (mw_len > num_deleted)
2241 insn_info->mw_hardregs =
2242 XRESIZEVEC (struct df_mw_hardreg *,
2243 insn_info->mw_hardregs,
2244 count + 1 + mw_len);
2246 memcpy (&insn_info->mw_hardregs[count],
2247 VEC_address (df_mw_hardreg_ptr, collection_rec.mw_vec),
2248 mw_len * sizeof (struct df_mw_hardreg *));
2249 insn_info->mw_hardregs[count + mw_len] = NULL;
2250 qsort (insn_info->mw_hardregs, count + mw_len,
2251 sizeof (struct df_mw_hardreg *), df_mw_compare);
2253 else
2255 /* No vector there. */
2256 insn_info->mw_hardregs
2257 = XNEWVEC (struct df_mw_hardreg*, 1 + mw_len);
2258 memcpy (insn_info->mw_hardregs,
2259 VEC_address (df_mw_hardreg_ptr, collection_rec.mw_vec),
2260 mw_len * sizeof (struct df_mw_hardreg *));
2261 insn_info->mw_hardregs[mw_len] = NULL;
2264 /* Get rid of the mw_rec so that df_refs_add_to_chains will
2265 ignore it. */
2266 VEC_free (df_mw_hardreg_ptr, stack, collection_rec.mw_vec);
2267 df_refs_add_to_chains (&collection_rec, bb, insn);
2268 VEC_free (df_ref, stack, collection_rec.eq_use_vec);
2270 else
2271 df_insn_rescan (insn);
2276 /*----------------------------------------------------------------------------
2277 Hard core instruction scanning code. No external interfaces here,
2278 just a lot of routines that look inside insns.
2279 ----------------------------------------------------------------------------*/
2282 /* Return true if the contents of two df_ref's are identical.
2283 It ignores DF_REF_MARKER. */
2285 static bool
2286 df_ref_equal_p (df_ref ref1, df_ref ref2)
2288 if (!ref2)
2289 return false;
2291 if (ref1 == ref2)
2292 return true;
2294 if (DF_REF_CLASS (ref1) != DF_REF_CLASS (ref2)
2295 || DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2)
2296 || DF_REF_REG (ref1) != DF_REF_REG (ref2)
2297 || DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2)
2298 || ((DF_REF_FLAGS (ref1) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG))
2299 != (DF_REF_FLAGS (ref2) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG)))
2300 || DF_REF_BB (ref1) != DF_REF_BB (ref2)
2301 || DF_REF_INSN_INFO (ref1) != DF_REF_INSN_INFO (ref2))
2302 return false;
2304 switch (DF_REF_CLASS (ref1))
2306 case DF_REF_ARTIFICIAL:
2307 case DF_REF_BASE:
2308 return true;
2310 case DF_REF_REGULAR:
2311 return DF_REF_LOC (ref1) == DF_REF_LOC (ref2);
2313 default:
2314 gcc_unreachable ();
2316 return false;
2320 /* Compare REF1 and REF2 for sorting. This is only called from places
2321 where all of the refs are of the same type, in the same insn, and
2322 have the same bb. So these fields are not checked. */
2324 static int
2325 df_ref_compare (const void *r1, const void *r2)
2327 const df_ref ref1 = *(const df_ref *)r1;
2328 const df_ref ref2 = *(const df_ref *)r2;
2330 if (ref1 == ref2)
2331 return 0;
2333 if (DF_REF_CLASS (ref1) != DF_REF_CLASS (ref2))
2334 return (int)DF_REF_CLASS (ref1) - (int)DF_REF_CLASS (ref2);
2336 if (DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2))
2337 return (int)DF_REF_REGNO (ref1) - (int)DF_REF_REGNO (ref2);
2339 if (DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2))
2340 return (int)DF_REF_TYPE (ref1) - (int)DF_REF_TYPE (ref2);
2342 if (DF_REF_REG (ref1) != DF_REF_REG (ref2))
2343 return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2345 /* Cannot look at the LOC field on artificial refs. */
2346 if (DF_REF_CLASS (ref1) != DF_REF_ARTIFICIAL
2347 && DF_REF_LOC (ref1) != DF_REF_LOC (ref2))
2348 return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2350 if (DF_REF_FLAGS (ref1) != DF_REF_FLAGS (ref2))
2352 /* If two refs are identical except that one of them has is from
2353 a mw and one is not, we need to have the one with the mw
2354 first. */
2355 if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG) ==
2356 DF_REF_FLAGS_IS_SET (ref2, DF_REF_MW_HARDREG))
2357 return DF_REF_FLAGS (ref1) - DF_REF_FLAGS (ref2);
2358 else if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG))
2359 return -1;
2360 else
2361 return 1;
2364 return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2367 static void
2368 df_swap_refs (VEC(df_ref,stack) **ref_vec, int i, int j)
2370 df_ref tmp = VEC_index (df_ref, *ref_vec, i);
2371 VEC_replace (df_ref, *ref_vec, i, VEC_index (df_ref, *ref_vec, j));
2372 VEC_replace (df_ref, *ref_vec, j, tmp);
2375 /* Sort and compress a set of refs. */
2377 static void
2378 df_sort_and_compress_refs (VEC(df_ref,stack) **ref_vec)
2380 unsigned int count;
2381 unsigned int i;
2382 unsigned int dist = 0;
2384 count = VEC_length (df_ref, *ref_vec);
2386 /* If there are 1 or 0 elements, there is nothing to do. */
2387 if (count < 2)
2388 return;
2389 else if (count == 2)
2391 df_ref r0 = VEC_index (df_ref, *ref_vec, 0);
2392 df_ref r1 = VEC_index (df_ref, *ref_vec, 1);
2393 if (df_ref_compare (&r0, &r1) > 0)
2394 df_swap_refs (ref_vec, 0, 1);
2396 else
2398 for (i = 0; i < count - 1; i++)
2400 df_ref r0 = VEC_index (df_ref, *ref_vec, i);
2401 df_ref r1 = VEC_index (df_ref, *ref_vec, i + 1);
2402 if (df_ref_compare (&r0, &r1) >= 0)
2403 break;
2405 /* If the array is already strictly ordered,
2406 which is the most common case for large COUNT case
2407 (which happens for CALL INSNs),
2408 no need to sort and filter out duplicate.
2409 Simply return the count.
2410 Make sure DF_GET_ADD_REFS adds refs in the increasing order
2411 of DF_REF_COMPARE. */
2412 if (i == count - 1)
2413 return;
2414 VEC_qsort (df_ref, *ref_vec, df_ref_compare);
2417 for (i=0; i<count-dist; i++)
2419 /* Find the next ref that is not equal to the current ref. */
2420 while (i + dist + 1 < count
2421 && df_ref_equal_p (VEC_index (df_ref, *ref_vec, i),
2422 VEC_index (df_ref, *ref_vec, i + dist + 1)))
2424 df_free_ref (VEC_index (df_ref, *ref_vec, i + dist + 1));
2425 dist++;
2427 /* Copy it down to the next position. */
2428 if (dist && i + dist + 1 < count)
2429 VEC_replace (df_ref, *ref_vec, i + 1,
2430 VEC_index (df_ref, *ref_vec, i + dist + 1));
2433 count -= dist;
2434 VEC_truncate (df_ref, *ref_vec, count);
2438 /* Return true if the contents of two df_ref's are identical.
2439 It ignores DF_REF_MARKER. */
2441 static bool
2442 df_mw_equal_p (struct df_mw_hardreg *mw1, struct df_mw_hardreg *mw2)
2444 if (!mw2)
2445 return false;
2446 return (mw1 == mw2) ||
2447 (mw1->mw_reg == mw2->mw_reg
2448 && mw1->type == mw2->type
2449 && mw1->flags == mw2->flags
2450 && mw1->start_regno == mw2->start_regno
2451 && mw1->end_regno == mw2->end_regno);
2455 /* Compare MW1 and MW2 for sorting. */
2457 static int
2458 df_mw_compare (const void *m1, const void *m2)
2460 const struct df_mw_hardreg *const mw1 = *(const struct df_mw_hardreg *const*)m1;
2461 const struct df_mw_hardreg *const mw2 = *(const struct df_mw_hardreg *const*)m2;
2463 if (mw1 == mw2)
2464 return 0;
2466 if (mw1->type != mw2->type)
2467 return mw1->type - mw2->type;
2469 if (mw1->flags != mw2->flags)
2470 return mw1->flags - mw2->flags;
2472 if (mw1->start_regno != mw2->start_regno)
2473 return mw1->start_regno - mw2->start_regno;
2475 if (mw1->end_regno != mw2->end_regno)
2476 return mw1->end_regno - mw2->end_regno;
2478 if (mw1->mw_reg != mw2->mw_reg)
2479 return mw1->mw_order - mw2->mw_order;
2481 return 0;
2485 /* Sort and compress a set of refs. */
2487 static void
2488 df_sort_and_compress_mws (VEC(df_mw_hardreg_ptr,stack) **mw_vec)
2490 unsigned int count;
2491 struct df_scan_problem_data *problem_data
2492 = (struct df_scan_problem_data *) df_scan->problem_data;
2493 unsigned int i;
2494 unsigned int dist = 0;
2496 count = VEC_length (df_mw_hardreg_ptr, *mw_vec);
2497 if (count < 2)
2498 return;
2499 else if (count == 2)
2501 struct df_mw_hardreg *m0 = VEC_index (df_mw_hardreg_ptr, *mw_vec, 0);
2502 struct df_mw_hardreg *m1 = VEC_index (df_mw_hardreg_ptr, *mw_vec, 1);
2503 if (df_mw_compare (&m0, &m1) > 0)
2505 struct df_mw_hardreg *tmp = VEC_index (df_mw_hardreg_ptr,
2506 *mw_vec, 0);
2507 VEC_replace (df_mw_hardreg_ptr, *mw_vec, 0,
2508 VEC_index (df_mw_hardreg_ptr, *mw_vec, 1));
2509 VEC_replace (df_mw_hardreg_ptr, *mw_vec, 1, tmp);
2512 else
2513 VEC_qsort (df_mw_hardreg_ptr, *mw_vec, df_mw_compare);
2515 for (i=0; i<count-dist; i++)
2517 /* Find the next ref that is not equal to the current ref. */
2518 while (i + dist + 1 < count
2519 && df_mw_equal_p (VEC_index (df_mw_hardreg_ptr, *mw_vec, i),
2520 VEC_index (df_mw_hardreg_ptr, *mw_vec,
2521 i + dist + 1)))
2523 pool_free (problem_data->mw_reg_pool,
2524 VEC_index (df_mw_hardreg_ptr, *mw_vec, i + dist + 1));
2525 dist++;
2527 /* Copy it down to the next position. */
2528 if (dist && i + dist + 1 < count)
2529 VEC_replace (df_mw_hardreg_ptr, *mw_vec, i + 1,
2530 VEC_index (df_mw_hardreg_ptr, *mw_vec, i + dist + 1));
2533 count -= dist;
2534 VEC_truncate (df_mw_hardreg_ptr, *mw_vec, count);
2538 /* Sort and remove duplicates from the COLLECTION_REC. */
2540 static void
2541 df_canonize_collection_rec (struct df_collection_rec *collection_rec)
2543 df_sort_and_compress_refs (&collection_rec->def_vec);
2544 df_sort_and_compress_refs (&collection_rec->use_vec);
2545 df_sort_and_compress_refs (&collection_rec->eq_use_vec);
2546 df_sort_and_compress_mws (&collection_rec->mw_vec);
2550 /* Add the new df_ref to appropriate reg_info/ref_info chains. */
2552 static void
2553 df_install_ref (df_ref this_ref,
2554 struct df_reg_info *reg_info,
2555 struct df_ref_info *ref_info,
2556 bool add_to_table)
2558 unsigned int regno = DF_REF_REGNO (this_ref);
2559 /* Add the ref to the reg_{def,use,eq_use} chain. */
2560 df_ref head = reg_info->reg_chain;
2562 reg_info->reg_chain = this_ref;
2563 reg_info->n_refs++;
2565 if (DF_REF_FLAGS_IS_SET (this_ref, DF_HARD_REG_LIVE))
2567 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
2568 df->hard_regs_live_count[regno]++;
2571 gcc_checking_assert (DF_REF_NEXT_REG (this_ref) == NULL
2572 && DF_REF_PREV_REG (this_ref) == NULL);
2574 DF_REF_NEXT_REG (this_ref) = head;
2576 /* We cannot actually link to the head of the chain. */
2577 DF_REF_PREV_REG (this_ref) = NULL;
2579 if (head)
2580 DF_REF_PREV_REG (head) = this_ref;
2582 if (add_to_table)
2584 gcc_assert (ref_info->ref_order != DF_REF_ORDER_NO_TABLE);
2585 df_check_and_grow_ref_info (ref_info, 1);
2586 DF_REF_ID (this_ref) = ref_info->table_size;
2587 /* Add the ref to the big array of defs. */
2588 ref_info->refs[ref_info->table_size] = this_ref;
2589 ref_info->table_size++;
2591 else
2592 DF_REF_ID (this_ref) = -1;
2594 ref_info->total_size++;
2598 /* This function takes one of the groups of refs (defs, uses or
2599 eq_uses) and installs the entire group into the insn. It also adds
2600 each of these refs into the appropriate chains. */
2602 static df_ref *
2603 df_install_refs (basic_block bb,
2604 VEC(df_ref,stack)* old_vec,
2605 struct df_reg_info **reg_info,
2606 struct df_ref_info *ref_info,
2607 bool is_notes)
2609 unsigned int count;
2611 count = VEC_length (df_ref, old_vec);
2612 if (count)
2614 df_ref *new_vec = XNEWVEC (df_ref, count + 1);
2615 bool add_to_table;
2616 df_ref this_ref;
2617 unsigned int ix;
2619 switch (ref_info->ref_order)
2621 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
2622 case DF_REF_ORDER_BY_REG_WITH_NOTES:
2623 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
2624 ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
2625 add_to_table = true;
2626 break;
2627 case DF_REF_ORDER_UNORDERED:
2628 case DF_REF_ORDER_BY_REG:
2629 case DF_REF_ORDER_BY_INSN:
2630 ref_info->ref_order = DF_REF_ORDER_UNORDERED;
2631 add_to_table = !is_notes;
2632 break;
2633 default:
2634 add_to_table = false;
2635 break;
2638 /* Do not add if ref is not in the right blocks. */
2639 if (add_to_table && df->analyze_subset)
2640 add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
2642 FOR_EACH_VEC_ELT (df_ref, old_vec, ix, this_ref)
2644 new_vec[ix] = this_ref;
2645 df_install_ref (this_ref, reg_info[DF_REF_REGNO (this_ref)],
2646 ref_info, add_to_table);
2649 new_vec[count] = NULL;
2650 return new_vec;
2652 else
2653 return df_null_ref_rec;
2657 /* This function takes the mws installs the entire group into the
2658 insn. */
2660 static struct df_mw_hardreg **
2661 df_install_mws (VEC(df_mw_hardreg_ptr,stack) *old_vec)
2663 unsigned int count;
2665 count = VEC_length (df_mw_hardreg_ptr, old_vec);
2666 if (count)
2668 struct df_mw_hardreg **new_vec
2669 = XNEWVEC (struct df_mw_hardreg*, count + 1);
2670 memcpy (new_vec, VEC_address (df_mw_hardreg_ptr, old_vec),
2671 sizeof (struct df_mw_hardreg*) * count);
2672 new_vec[count] = NULL;
2673 return new_vec;
2675 else
2676 return df_null_mw_rec;
2680 /* Add a chain of df_refs to appropriate ref chain/reg_info/ref_info
2681 chains and update other necessary information. */
2683 static void
2684 df_refs_add_to_chains (struct df_collection_rec *collection_rec,
2685 basic_block bb, rtx insn)
2687 if (insn)
2689 struct df_insn_info *insn_rec = DF_INSN_INFO_GET (insn);
2690 /* If there is a vector in the collection rec, add it to the
2691 insn. A null rec is a signal that the caller will handle the
2692 chain specially. */
2693 if (collection_rec->def_vec)
2695 df_scan_free_ref_vec (insn_rec->defs);
2696 insn_rec->defs
2697 = df_install_refs (bb, collection_rec->def_vec,
2698 df->def_regs,
2699 &df->def_info, false);
2701 if (collection_rec->use_vec)
2703 df_scan_free_ref_vec (insn_rec->uses);
2704 insn_rec->uses
2705 = df_install_refs (bb, collection_rec->use_vec,
2706 df->use_regs,
2707 &df->use_info, false);
2709 if (collection_rec->eq_use_vec)
2711 df_scan_free_ref_vec (insn_rec->eq_uses);
2712 insn_rec->eq_uses
2713 = df_install_refs (bb, collection_rec->eq_use_vec,
2714 df->eq_use_regs,
2715 &df->use_info, true);
2717 if (collection_rec->mw_vec)
2719 df_scan_free_mws_vec (insn_rec->mw_hardregs);
2720 insn_rec->mw_hardregs
2721 = df_install_mws (collection_rec->mw_vec);
2724 else
2726 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
2728 df_scan_free_ref_vec (bb_info->artificial_defs);
2729 bb_info->artificial_defs
2730 = df_install_refs (bb, collection_rec->def_vec,
2731 df->def_regs,
2732 &df->def_info, false);
2733 df_scan_free_ref_vec (bb_info->artificial_uses);
2734 bb_info->artificial_uses
2735 = df_install_refs (bb, collection_rec->use_vec,
2736 df->use_regs,
2737 &df->use_info, false);
2742 /* Allocate a ref and initialize its fields. */
2744 static df_ref
2745 df_ref_create_structure (enum df_ref_class cl,
2746 struct df_collection_rec *collection_rec,
2747 rtx reg, rtx *loc,
2748 basic_block bb, struct df_insn_info *info,
2749 enum df_ref_type ref_type,
2750 int ref_flags)
2752 df_ref this_ref = NULL;
2753 int regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2754 struct df_scan_problem_data *problem_data
2755 = (struct df_scan_problem_data *) df_scan->problem_data;
2757 switch (cl)
2759 case DF_REF_BASE:
2760 this_ref = (df_ref) pool_alloc (problem_data->ref_base_pool);
2761 gcc_checking_assert (loc == NULL);
2762 break;
2764 case DF_REF_ARTIFICIAL:
2765 this_ref = (df_ref) pool_alloc (problem_data->ref_artificial_pool);
2766 this_ref->artificial_ref.bb = bb;
2767 gcc_checking_assert (loc == NULL);
2768 break;
2770 case DF_REF_REGULAR:
2771 this_ref = (df_ref) pool_alloc (problem_data->ref_regular_pool);
2772 this_ref->regular_ref.loc = loc;
2773 gcc_checking_assert (loc);
2774 break;
2777 DF_REF_CLASS (this_ref) = cl;
2778 DF_REF_ID (this_ref) = -1;
2779 DF_REF_REG (this_ref) = reg;
2780 DF_REF_REGNO (this_ref) = regno;
2781 DF_REF_TYPE (this_ref) = ref_type;
2782 DF_REF_INSN_INFO (this_ref) = info;
2783 DF_REF_CHAIN (this_ref) = NULL;
2784 DF_REF_FLAGS (this_ref) = ref_flags;
2785 DF_REF_NEXT_REG (this_ref) = NULL;
2786 DF_REF_PREV_REG (this_ref) = NULL;
2787 DF_REF_ORDER (this_ref) = df->ref_order++;
2789 /* We need to clear this bit because fwprop, and in the future
2790 possibly other optimizations sometimes create new refs using ond
2791 refs as the model. */
2792 DF_REF_FLAGS_CLEAR (this_ref, DF_HARD_REG_LIVE);
2794 /* See if this ref needs to have DF_HARD_REG_LIVE bit set. */
2795 if (regno < FIRST_PSEUDO_REGISTER
2796 && !DF_REF_IS_ARTIFICIAL (this_ref)
2797 && !DEBUG_INSN_P (DF_REF_INSN (this_ref)))
2799 if (DF_REF_REG_DEF_P (this_ref))
2801 if (!DF_REF_FLAGS_IS_SET (this_ref, DF_REF_MAY_CLOBBER))
2802 DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2804 else if (!(TEST_HARD_REG_BIT (elim_reg_set, regno)
2805 && (regno == FRAME_POINTER_REGNUM
2806 || regno == ARG_POINTER_REGNUM)))
2807 DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2810 if (collection_rec)
2812 if (DF_REF_REG_DEF_P (this_ref))
2813 VEC_safe_push (df_ref, stack, collection_rec->def_vec, this_ref);
2814 else if (DF_REF_FLAGS (this_ref) & DF_REF_IN_NOTE)
2815 VEC_safe_push (df_ref, stack, collection_rec->eq_use_vec, this_ref);
2816 else
2817 VEC_safe_push (df_ref, stack, collection_rec->use_vec, this_ref);
2819 else
2820 df_install_ref_incremental (this_ref);
2822 return this_ref;
2826 /* Create new references of type DF_REF_TYPE for each part of register REG
2827 at address LOC within INSN of BB. */
2830 static void
2831 df_ref_record (enum df_ref_class cl,
2832 struct df_collection_rec *collection_rec,
2833 rtx reg, rtx *loc,
2834 basic_block bb, struct df_insn_info *insn_info,
2835 enum df_ref_type ref_type,
2836 int ref_flags)
2838 unsigned int regno;
2840 gcc_checking_assert (REG_P (reg) || GET_CODE (reg) == SUBREG);
2842 regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2843 if (regno < FIRST_PSEUDO_REGISTER)
2845 struct df_mw_hardreg *hardreg = NULL;
2846 struct df_scan_problem_data *problem_data
2847 = (struct df_scan_problem_data *) df_scan->problem_data;
2848 unsigned int i;
2849 unsigned int endregno;
2850 df_ref ref;
2852 if (GET_CODE (reg) == SUBREG)
2854 regno += subreg_regno_offset (regno, GET_MODE (SUBREG_REG (reg)),
2855 SUBREG_BYTE (reg), GET_MODE (reg));
2856 endregno = regno + subreg_nregs (reg);
2858 else
2859 endregno = END_HARD_REGNO (reg);
2861 /* If this is a multiword hardreg, we create some extra
2862 datastructures that will enable us to easily build REG_DEAD
2863 and REG_UNUSED notes. */
2864 if (collection_rec
2865 && (endregno != regno + 1) && insn_info)
2867 /* Sets to a subreg of a multiword register are partial.
2868 Sets to a non-subreg of a multiword register are not. */
2869 if (GET_CODE (reg) == SUBREG)
2870 ref_flags |= DF_REF_PARTIAL;
2871 ref_flags |= DF_REF_MW_HARDREG;
2873 hardreg = (struct df_mw_hardreg *) pool_alloc (problem_data->mw_reg_pool);
2874 hardreg->type = ref_type;
2875 hardreg->flags = ref_flags;
2876 hardreg->mw_reg = reg;
2877 hardreg->start_regno = regno;
2878 hardreg->end_regno = endregno - 1;
2879 hardreg->mw_order = df->ref_order++;
2880 VEC_safe_push (df_mw_hardreg_ptr, stack, collection_rec->mw_vec,
2881 hardreg);
2884 for (i = regno; i < endregno; i++)
2886 ref = df_ref_create_structure (cl, collection_rec, regno_reg_rtx[i], loc,
2887 bb, insn_info, ref_type, ref_flags);
2889 gcc_assert (ORIGINAL_REGNO (DF_REF_REG (ref)) == i);
2892 else
2894 df_ref_create_structure (cl, collection_rec, reg, loc, bb, insn_info,
2895 ref_type, ref_flags);
2900 /* A set to a non-paradoxical SUBREG for which the number of word_mode units
2901 covered by the outer mode is smaller than that covered by the inner mode,
2902 is a read-modify-write operation.
2903 This function returns true iff the SUBREG X is such a SUBREG. */
2905 bool
2906 df_read_modify_subreg_p (rtx x)
2908 unsigned int isize, osize;
2909 if (GET_CODE (x) != SUBREG)
2910 return false;
2911 isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
2912 osize = GET_MODE_SIZE (GET_MODE (x));
2913 return isize > osize
2914 && isize > REGMODE_NATURAL_SIZE (GET_MODE (SUBREG_REG (x)));
2918 /* Process all the registers defined in the rtx pointed by LOC.
2919 Autoincrement/decrement definitions will be picked up by df_uses_record.
2920 Any change here has to be matched in df_find_hard_reg_defs_1. */
2922 static void
2923 df_def_record_1 (struct df_collection_rec *collection_rec,
2924 rtx *loc, basic_block bb, struct df_insn_info *insn_info,
2925 int flags)
2927 rtx dst = *loc;
2929 /* It is legal to have a set destination be a parallel. */
2930 if (GET_CODE (dst) == PARALLEL)
2932 int i;
2933 for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
2935 rtx temp = XVECEXP (dst, 0, i);
2936 gcc_assert (GET_CODE (temp) == EXPR_LIST);
2937 df_def_record_1 (collection_rec, &XEXP (temp, 0),
2938 bb, insn_info, flags);
2940 return;
2943 if (GET_CODE (dst) == STRICT_LOW_PART)
2945 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL | DF_REF_STRICT_LOW_PART;
2947 loc = &XEXP (dst, 0);
2948 dst = *loc;
2951 if (GET_CODE (dst) == ZERO_EXTRACT)
2953 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL | DF_REF_ZERO_EXTRACT;
2955 loc = &XEXP (dst, 0);
2956 dst = *loc;
2959 /* At this point if we do not have a reg or a subreg, just return. */
2960 if (REG_P (dst))
2962 df_ref_record (DF_REF_REGULAR, collection_rec,
2963 dst, loc, bb, insn_info, DF_REF_REG_DEF, flags);
2965 /* We want to keep sp alive everywhere - by making all
2966 writes to sp also use of sp. */
2967 if (REGNO (dst) == STACK_POINTER_REGNUM)
2968 df_ref_record (DF_REF_BASE, collection_rec,
2969 dst, NULL, bb, insn_info, DF_REF_REG_USE, flags);
2971 else if (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst)))
2973 if (df_read_modify_subreg_p (dst))
2974 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL;
2976 flags |= DF_REF_SUBREG;
2978 df_ref_record (DF_REF_REGULAR, collection_rec,
2979 dst, loc, bb, insn_info, DF_REF_REG_DEF, flags);
2984 /* Process all the registers defined in the pattern rtx, X. Any change
2985 here has to be matched in df_find_hard_reg_defs. */
2987 static void
2988 df_defs_record (struct df_collection_rec *collection_rec,
2989 rtx x, basic_block bb, struct df_insn_info *insn_info,
2990 int flags)
2992 RTX_CODE code = GET_CODE (x);
2993 int i;
2995 switch (code)
2997 case SET:
2998 df_def_record_1 (collection_rec, &SET_DEST (x), bb, insn_info, flags);
2999 break;
3001 case CLOBBER:
3002 flags |= DF_REF_MUST_CLOBBER;
3003 df_def_record_1 (collection_rec, &XEXP (x, 0), bb, insn_info, flags);
3004 break;
3006 case COND_EXEC:
3007 df_defs_record (collection_rec, COND_EXEC_CODE (x),
3008 bb, insn_info, DF_REF_CONDITIONAL);
3009 break;
3011 case PARALLEL:
3012 for (i = 0; i < XVECLEN (x, 0); i++)
3013 df_defs_record (collection_rec, XVECEXP (x, 0, i),
3014 bb, insn_info, flags);
3015 break;
3016 default:
3017 /* No DEFs to record in other cases */
3018 break;
3022 /* Set bits in *DEFS for hard registers found in the rtx DST, which is the
3023 destination of a set or clobber. This has to match the logic in
3024 df_defs_record_1. */
3026 static void
3027 df_find_hard_reg_defs_1 (rtx dst, HARD_REG_SET *defs)
3029 /* It is legal to have a set destination be a parallel. */
3030 if (GET_CODE (dst) == PARALLEL)
3032 int i;
3033 for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
3035 rtx temp = XVECEXP (dst, 0, i);
3036 gcc_assert (GET_CODE (temp) == EXPR_LIST);
3037 df_find_hard_reg_defs_1 (XEXP (temp, 0), defs);
3039 return;
3042 if (GET_CODE (dst) == STRICT_LOW_PART)
3043 dst = XEXP (dst, 0);
3045 if (GET_CODE (dst) == ZERO_EXTRACT)
3046 dst = XEXP (dst, 0);
3048 /* At this point if we do not have a reg or a subreg, just return. */
3049 if (REG_P (dst) && HARD_REGISTER_P (dst))
3050 SET_HARD_REG_BIT (*defs, REGNO (dst));
3051 else if (GET_CODE (dst) == SUBREG
3052 && REG_P (SUBREG_REG (dst)) && HARD_REGISTER_P (dst))
3053 SET_HARD_REG_BIT (*defs, REGNO (SUBREG_REG (dst)));
3056 /* Set bits in *DEFS for hard registers defined in the pattern X. This
3057 has to match the logic in df_defs_record. */
3059 static void
3060 df_find_hard_reg_defs (rtx x, HARD_REG_SET *defs)
3062 RTX_CODE code = GET_CODE (x);
3063 int i;
3065 switch (code)
3067 case SET:
3068 df_find_hard_reg_defs_1 (SET_DEST (x), defs);
3069 break;
3071 case CLOBBER:
3072 df_find_hard_reg_defs_1 (XEXP (x, 0), defs);
3073 break;
3075 case COND_EXEC:
3076 df_find_hard_reg_defs (COND_EXEC_CODE (x), defs);
3077 break;
3079 case PARALLEL:
3080 for (i = 0; i < XVECLEN (x, 0); i++)
3081 df_find_hard_reg_defs (XVECEXP (x, 0, i), defs);
3082 break;
3083 default:
3084 /* No DEFs to record in other cases */
3085 break;
3090 /* Process all the registers used in the rtx at address LOC. */
3092 static void
3093 df_uses_record (struct df_collection_rec *collection_rec,
3094 rtx *loc, enum df_ref_type ref_type,
3095 basic_block bb, struct df_insn_info *insn_info,
3096 int flags)
3098 RTX_CODE code;
3099 rtx x;
3101 retry:
3102 x = *loc;
3103 if (!x)
3104 return;
3105 code = GET_CODE (x);
3106 switch (code)
3108 case LABEL_REF:
3109 case SYMBOL_REF:
3110 case CONST_INT:
3111 case CONST:
3112 case CONST_DOUBLE:
3113 case CONST_FIXED:
3114 case CONST_VECTOR:
3115 case PC:
3116 case CC0:
3117 case ADDR_VEC:
3118 case ADDR_DIFF_VEC:
3119 return;
3121 case CLOBBER:
3122 /* If we are clobbering a MEM, mark any registers inside the address
3123 as being used. */
3124 if (MEM_P (XEXP (x, 0)))
3125 df_uses_record (collection_rec,
3126 &XEXP (XEXP (x, 0), 0),
3127 DF_REF_REG_MEM_STORE,
3128 bb, insn_info,
3129 flags);
3131 /* If we're clobbering a REG then we have a def so ignore. */
3132 return;
3134 case MEM:
3135 df_uses_record (collection_rec,
3136 &XEXP (x, 0), DF_REF_REG_MEM_LOAD,
3137 bb, insn_info, flags & DF_REF_IN_NOTE);
3138 return;
3140 case SUBREG:
3141 /* While we're here, optimize this case. */
3142 flags |= DF_REF_PARTIAL;
3143 /* In case the SUBREG is not of a REG, do not optimize. */
3144 if (!REG_P (SUBREG_REG (x)))
3146 loc = &SUBREG_REG (x);
3147 df_uses_record (collection_rec, loc, ref_type, bb, insn_info, flags);
3148 return;
3150 /* ... Fall through ... */
3152 case REG:
3153 df_ref_record (DF_REF_REGULAR, collection_rec,
3154 x, loc, bb, insn_info,
3155 ref_type, flags);
3156 return;
3158 case SIGN_EXTRACT:
3159 case ZERO_EXTRACT:
3161 df_uses_record (collection_rec,
3162 &XEXP (x, 1), ref_type, bb, insn_info, flags);
3163 df_uses_record (collection_rec,
3164 &XEXP (x, 2), ref_type, bb, insn_info, flags);
3166 /* If the parameters to the zero or sign extract are
3167 constants, strip them off and recurse, otherwise there is
3168 no information that we can gain from this operation. */
3169 if (code == ZERO_EXTRACT)
3170 flags |= DF_REF_ZERO_EXTRACT;
3171 else
3172 flags |= DF_REF_SIGN_EXTRACT;
3174 df_uses_record (collection_rec,
3175 &XEXP (x, 0), ref_type, bb, insn_info, flags);
3176 return;
3178 break;
3180 case SET:
3182 rtx dst = SET_DEST (x);
3183 gcc_assert (!(flags & DF_REF_IN_NOTE));
3184 df_uses_record (collection_rec,
3185 &SET_SRC (x), DF_REF_REG_USE, bb, insn_info, flags);
3187 switch (GET_CODE (dst))
3189 case SUBREG:
3190 if (df_read_modify_subreg_p (dst))
3192 df_uses_record (collection_rec, &SUBREG_REG (dst),
3193 DF_REF_REG_USE, bb, insn_info,
3194 flags | DF_REF_READ_WRITE | DF_REF_SUBREG);
3195 break;
3197 /* Fall through. */
3198 case REG:
3199 case PARALLEL:
3200 case SCRATCH:
3201 case PC:
3202 case CC0:
3203 break;
3204 case MEM:
3205 df_uses_record (collection_rec, &XEXP (dst, 0),
3206 DF_REF_REG_MEM_STORE, bb, insn_info, flags);
3207 break;
3208 case STRICT_LOW_PART:
3210 rtx *temp = &XEXP (dst, 0);
3211 /* A strict_low_part uses the whole REG and not just the
3212 SUBREG. */
3213 dst = XEXP (dst, 0);
3214 df_uses_record (collection_rec,
3215 (GET_CODE (dst) == SUBREG) ? &SUBREG_REG (dst) : temp,
3216 DF_REF_REG_USE, bb, insn_info,
3217 DF_REF_READ_WRITE | DF_REF_STRICT_LOW_PART);
3219 break;
3220 case ZERO_EXTRACT:
3222 df_uses_record (collection_rec, &XEXP (dst, 1),
3223 DF_REF_REG_USE, bb, insn_info, flags);
3224 df_uses_record (collection_rec, &XEXP (dst, 2),
3225 DF_REF_REG_USE, bb, insn_info, flags);
3226 if (GET_CODE (XEXP (dst,0)) == MEM)
3227 df_uses_record (collection_rec, &XEXP (dst, 0),
3228 DF_REF_REG_USE, bb, insn_info,
3229 flags);
3230 else
3231 df_uses_record (collection_rec, &XEXP (dst, 0),
3232 DF_REF_REG_USE, bb, insn_info,
3233 DF_REF_READ_WRITE | DF_REF_ZERO_EXTRACT);
3235 break;
3237 default:
3238 gcc_unreachable ();
3240 return;
3243 case RETURN:
3244 case SIMPLE_RETURN:
3245 break;
3247 case ASM_OPERANDS:
3248 case UNSPEC_VOLATILE:
3249 case TRAP_IF:
3250 case ASM_INPUT:
3252 /* Traditional and volatile asm instructions must be
3253 considered to use and clobber all hard registers, all
3254 pseudo-registers and all of memory. So must TRAP_IF and
3255 UNSPEC_VOLATILE operations.
3257 Consider for instance a volatile asm that changes the fpu
3258 rounding mode. An insn should not be moved across this
3259 even if it only uses pseudo-regs because it might give an
3260 incorrectly rounded result.
3262 However, flow.c's liveness computation did *not* do this,
3263 giving the reasoning as " ?!? Unfortunately, marking all
3264 hard registers as live causes massive problems for the
3265 register allocator and marking all pseudos as live creates
3266 mountains of uninitialized variable warnings."
3268 In order to maintain the status quo with regard to liveness
3269 and uses, we do what flow.c did and just mark any regs we
3270 can find in ASM_OPERANDS as used. In global asm insns are
3271 scanned and regs_asm_clobbered is filled out.
3273 For all ASM_OPERANDS, we must traverse the vector of input
3274 operands. We can not just fall through here since then we
3275 would be confused by the ASM_INPUT rtx inside ASM_OPERANDS,
3276 which do not indicate traditional asms unlike their normal
3277 usage. */
3278 if (code == ASM_OPERANDS)
3280 int j;
3282 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
3283 df_uses_record (collection_rec, &ASM_OPERANDS_INPUT (x, j),
3284 DF_REF_REG_USE, bb, insn_info, flags);
3285 return;
3287 break;
3290 case VAR_LOCATION:
3291 df_uses_record (collection_rec,
3292 &PAT_VAR_LOCATION_LOC (x),
3293 DF_REF_REG_USE, bb, insn_info, flags);
3294 return;
3296 case PRE_DEC:
3297 case POST_DEC:
3298 case PRE_INC:
3299 case POST_INC:
3300 case PRE_MODIFY:
3301 case POST_MODIFY:
3302 gcc_assert (!DEBUG_INSN_P (insn_info->insn));
3303 /* Catch the def of the register being modified. */
3304 df_ref_record (DF_REF_REGULAR, collection_rec, XEXP (x, 0), &XEXP (x, 0),
3305 bb, insn_info,
3306 DF_REF_REG_DEF,
3307 flags | DF_REF_READ_WRITE | DF_REF_PRE_POST_MODIFY);
3309 /* ... Fall through to handle uses ... */
3311 default:
3312 break;
3315 /* Recursively scan the operands of this expression. */
3317 const char *fmt = GET_RTX_FORMAT (code);
3318 int i;
3320 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3322 if (fmt[i] == 'e')
3324 /* Tail recursive case: save a function call level. */
3325 if (i == 0)
3327 loc = &XEXP (x, 0);
3328 goto retry;
3330 df_uses_record (collection_rec, &XEXP (x, i), ref_type,
3331 bb, insn_info, flags);
3333 else if (fmt[i] == 'E')
3335 int j;
3336 for (j = 0; j < XVECLEN (x, i); j++)
3337 df_uses_record (collection_rec,
3338 &XVECEXP (x, i, j), ref_type,
3339 bb, insn_info, flags);
3344 return;
3348 /* For all DF_REF_CONDITIONAL defs, add a corresponding uses. */
3350 static void
3351 df_get_conditional_uses (struct df_collection_rec *collection_rec)
3353 unsigned int ix;
3354 df_ref ref;
3356 FOR_EACH_VEC_ELT (df_ref, collection_rec->def_vec, ix, ref)
3358 if (DF_REF_FLAGS_IS_SET (ref, DF_REF_CONDITIONAL))
3360 df_ref use;
3362 use = df_ref_create_structure (DF_REF_CLASS (ref), collection_rec, DF_REF_REG (ref),
3363 DF_REF_LOC (ref), DF_REF_BB (ref),
3364 DF_REF_INSN_INFO (ref), DF_REF_REG_USE,
3365 DF_REF_FLAGS (ref) & ~DF_REF_CONDITIONAL);
3366 DF_REF_REGNO (use) = DF_REF_REGNO (ref);
3372 /* Get call's extra defs and uses (track caller-saved registers). */
3374 static void
3375 df_get_call_refs (struct df_collection_rec *collection_rec,
3376 basic_block bb,
3377 struct df_insn_info *insn_info,
3378 int flags)
3380 rtx note;
3381 bool is_sibling_call;
3382 unsigned int i;
3383 HARD_REG_SET defs_generated;
3385 CLEAR_HARD_REG_SET (defs_generated);
3386 df_find_hard_reg_defs (PATTERN (insn_info->insn), &defs_generated);
3387 is_sibling_call = SIBLING_CALL_P (insn_info->insn);
3389 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3391 if (i == STACK_POINTER_REGNUM)
3392 /* The stack ptr is used (honorarily) by a CALL insn. */
3393 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3394 NULL, bb, insn_info, DF_REF_REG_USE,
3395 DF_REF_CALL_STACK_USAGE | flags);
3396 else if (global_regs[i])
3398 /* Calls to const functions cannot access any global registers and
3399 calls to pure functions cannot set them. All other calls may
3400 reference any of the global registers, so they are recorded as
3401 used. */
3402 if (!RTL_CONST_CALL_P (insn_info->insn))
3404 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3405 NULL, bb, insn_info, DF_REF_REG_USE, flags);
3406 if (!RTL_PURE_CALL_P (insn_info->insn))
3407 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3408 NULL, bb, insn_info, DF_REF_REG_DEF, flags);
3411 else if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i)
3412 /* no clobbers for regs that are the result of the call */
3413 && !TEST_HARD_REG_BIT (defs_generated, i)
3414 && (!is_sibling_call
3415 || !bitmap_bit_p (df->exit_block_uses, i)
3416 || refers_to_regno_p (i, i+1,
3417 crtl->return_rtx, NULL)))
3418 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3419 NULL, bb, insn_info, DF_REF_REG_DEF,
3420 DF_REF_MAY_CLOBBER | flags);
3423 /* Record the registers used to pass arguments, and explicitly
3424 noted as clobbered. */
3425 for (note = CALL_INSN_FUNCTION_USAGE (insn_info->insn); note;
3426 note = XEXP (note, 1))
3428 if (GET_CODE (XEXP (note, 0)) == USE)
3429 df_uses_record (collection_rec, &XEXP (XEXP (note, 0), 0),
3430 DF_REF_REG_USE, bb, insn_info, flags);
3431 else if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3433 if (REG_P (XEXP (XEXP (note, 0), 0)))
3435 unsigned int regno = REGNO (XEXP (XEXP (note, 0), 0));
3436 if (!TEST_HARD_REG_BIT (defs_generated, regno))
3437 df_defs_record (collection_rec, XEXP (note, 0), bb,
3438 insn_info, flags);
3440 else
3441 df_uses_record (collection_rec, &XEXP (note, 0),
3442 DF_REF_REG_USE, bb, insn_info, flags);
3446 return;
3449 /* Collect all refs in the INSN. This function is free of any
3450 side-effect - it will create and return a lists of df_ref's in the
3451 COLLECTION_REC without putting those refs into existing ref chains
3452 and reg chains. */
3454 static void
3455 df_insn_refs_collect (struct df_collection_rec *collection_rec,
3456 basic_block bb, struct df_insn_info *insn_info)
3458 rtx note;
3459 bool is_cond_exec = (GET_CODE (PATTERN (insn_info->insn)) == COND_EXEC);
3461 /* Clear out the collection record. */
3462 VEC_truncate (df_ref, collection_rec->def_vec, 0);
3463 VEC_truncate (df_ref, collection_rec->use_vec, 0);
3464 VEC_truncate (df_ref, collection_rec->eq_use_vec, 0);
3465 VEC_truncate (df_mw_hardreg_ptr, collection_rec->mw_vec, 0);
3467 /* Process REG_EQUIV/REG_EQUAL notes. */
3468 for (note = REG_NOTES (insn_info->insn); note;
3469 note = XEXP (note, 1))
3471 switch (REG_NOTE_KIND (note))
3473 case REG_EQUIV:
3474 case REG_EQUAL:
3475 df_uses_record (collection_rec,
3476 &XEXP (note, 0), DF_REF_REG_USE,
3477 bb, insn_info, DF_REF_IN_NOTE);
3478 break;
3479 case REG_NON_LOCAL_GOTO:
3480 /* The frame ptr is used by a non-local goto. */
3481 df_ref_record (DF_REF_BASE, collection_rec,
3482 regno_reg_rtx[FRAME_POINTER_REGNUM],
3483 NULL, bb, insn_info,
3484 DF_REF_REG_USE, 0);
3485 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
3486 df_ref_record (DF_REF_BASE, collection_rec,
3487 regno_reg_rtx[HARD_FRAME_POINTER_REGNUM],
3488 NULL, bb, insn_info,
3489 DF_REF_REG_USE, 0);
3490 #endif
3491 break;
3492 default:
3493 break;
3497 /* For CALL_INSNs, first record DF_REF_BASE register defs, as well as
3498 uses from CALL_INSN_FUNCTION_USAGE. */
3499 if (CALL_P (insn_info->insn))
3500 df_get_call_refs (collection_rec, bb, insn_info,
3501 (is_cond_exec) ? DF_REF_CONDITIONAL : 0);
3503 /* Record other defs. These should be mostly for DF_REF_REGULAR, so
3504 that a qsort on the defs is unnecessary in most cases. */
3505 df_defs_record (collection_rec,
3506 PATTERN (insn_info->insn), bb, insn_info, 0);
3508 /* Record the register uses. */
3509 df_uses_record (collection_rec,
3510 &PATTERN (insn_info->insn), DF_REF_REG_USE, bb, insn_info, 0);
3512 /* DF_REF_CONDITIONAL needs corresponding USES. */
3513 if (is_cond_exec)
3514 df_get_conditional_uses (collection_rec);
3516 df_canonize_collection_rec (collection_rec);
3519 /* Recompute the luids for the insns in BB. */
3521 void
3522 df_recompute_luids (basic_block bb)
3524 rtx insn;
3525 int luid = 0;
3527 df_grow_insn_info ();
3529 /* Scan the block an insn at a time from beginning to end. */
3530 FOR_BB_INSNS (bb, insn)
3532 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
3533 /* Inserting labels does not always trigger the incremental
3534 rescanning. */
3535 if (!insn_info)
3537 gcc_assert (!INSN_P (insn));
3538 insn_info = df_insn_create_insn_record (insn);
3541 DF_INSN_INFO_LUID (insn_info) = luid;
3542 if (INSN_P (insn))
3543 luid++;
3548 /* Collect all artificial refs at the block level for BB and add them
3549 to COLLECTION_REC. */
3551 static void
3552 df_bb_refs_collect (struct df_collection_rec *collection_rec, basic_block bb)
3554 VEC_truncate (df_ref, collection_rec->def_vec, 0);
3555 VEC_truncate (df_ref, collection_rec->use_vec, 0);
3556 VEC_truncate (df_ref, collection_rec->eq_use_vec, 0);
3557 VEC_truncate (df_mw_hardreg_ptr, collection_rec->mw_vec, 0);
3559 if (bb->index == ENTRY_BLOCK)
3561 df_entry_block_defs_collect (collection_rec, df->entry_block_defs);
3562 return;
3564 else if (bb->index == EXIT_BLOCK)
3566 df_exit_block_uses_collect (collection_rec, df->exit_block_uses);
3567 return;
3570 #ifdef EH_RETURN_DATA_REGNO
3571 if (bb_has_eh_pred (bb))
3573 unsigned int i;
3574 /* Mark the registers that will contain data for the handler. */
3575 for (i = 0; ; ++i)
3577 unsigned regno = EH_RETURN_DATA_REGNO (i);
3578 if (regno == INVALID_REGNUM)
3579 break;
3580 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[regno], NULL,
3581 bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP);
3584 #endif
3586 /* Add the hard_frame_pointer if this block is the target of a
3587 non-local goto. */
3588 if (bb->flags & BB_NON_LOCAL_GOTO_TARGET)
3589 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, hard_frame_pointer_rtx, NULL,
3590 bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP);
3592 /* Add the artificial uses. */
3593 if (bb->index >= NUM_FIXED_BLOCKS)
3595 bitmap_iterator bi;
3596 unsigned int regno;
3597 bitmap au = bb_has_eh_pred (bb)
3598 ? &df->eh_block_artificial_uses
3599 : &df->regular_block_artificial_uses;
3601 EXECUTE_IF_SET_IN_BITMAP (au, 0, regno, bi)
3603 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[regno], NULL,
3604 bb, NULL, DF_REF_REG_USE, 0);
3608 df_canonize_collection_rec (collection_rec);
3612 /* Record all the refs within the basic block BB_INDEX and scan the instructions if SCAN_INSNS. */
3614 void
3615 df_bb_refs_record (int bb_index, bool scan_insns)
3617 basic_block bb = BASIC_BLOCK (bb_index);
3618 rtx insn;
3619 int luid = 0;
3620 struct df_collection_rec collection_rec;
3622 if (!df)
3623 return;
3625 df_grow_bb_info (df_scan);
3626 collection_rec.def_vec = VEC_alloc (df_ref, stack, 128);
3627 collection_rec.use_vec = VEC_alloc (df_ref, stack, 32);
3628 collection_rec.eq_use_vec = VEC_alloc (df_ref, stack, 32);
3629 collection_rec.mw_vec = VEC_alloc (df_mw_hardreg_ptr, stack, 32);
3631 if (scan_insns)
3632 /* Scan the block an insn at a time from beginning to end. */
3633 FOR_BB_INSNS (bb, insn)
3635 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
3636 gcc_assert (!insn_info);
3638 insn_info = df_insn_create_insn_record (insn);
3639 if (INSN_P (insn))
3641 /* Record refs within INSN. */
3642 DF_INSN_INFO_LUID (insn_info) = luid++;
3643 df_insn_refs_collect (&collection_rec, bb, DF_INSN_INFO_GET (insn));
3644 df_refs_add_to_chains (&collection_rec, bb, insn);
3646 DF_INSN_INFO_LUID (insn_info) = luid;
3649 /* Other block level artificial refs */
3650 df_bb_refs_collect (&collection_rec, bb);
3651 df_refs_add_to_chains (&collection_rec, bb, NULL);
3653 VEC_free (df_ref, stack, collection_rec.def_vec);
3654 VEC_free (df_ref, stack, collection_rec.use_vec);
3655 VEC_free (df_ref, stack, collection_rec.eq_use_vec);
3656 VEC_free (df_mw_hardreg_ptr, stack, collection_rec.mw_vec);
3658 /* Now that the block has been processed, set the block as dirty so
3659 LR and LIVE will get it processed. */
3660 df_set_bb_dirty (bb);
3664 /* Get the artificial use set for a regular (i.e. non-exit/non-entry)
3665 block. */
3667 static void
3668 df_get_regular_block_artificial_uses (bitmap regular_block_artificial_uses)
3670 #ifdef EH_USES
3671 unsigned int i;
3672 #endif
3674 bitmap_clear (regular_block_artificial_uses);
3676 if (reload_completed)
3678 if (frame_pointer_needed)
3679 bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3681 else
3682 /* Before reload, there are a few registers that must be forced
3683 live everywhere -- which might not already be the case for
3684 blocks within infinite loops. */
3686 unsigned int picreg = PIC_OFFSET_TABLE_REGNUM;
3688 /* Any reference to any pseudo before reload is a potential
3689 reference of the frame pointer. */
3690 bitmap_set_bit (regular_block_artificial_uses, FRAME_POINTER_REGNUM);
3692 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
3693 bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3694 #endif
3696 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3697 /* Pseudos with argument area equivalences may require
3698 reloading via the argument pointer. */
3699 if (fixed_regs[ARG_POINTER_REGNUM])
3700 bitmap_set_bit (regular_block_artificial_uses, ARG_POINTER_REGNUM);
3701 #endif
3703 /* Any constant, or pseudo with constant equivalences, may
3704 require reloading from memory using the pic register. */
3705 if (picreg != INVALID_REGNUM
3706 && fixed_regs[picreg])
3707 bitmap_set_bit (regular_block_artificial_uses, picreg);
3709 /* The all-important stack pointer must always be live. */
3710 bitmap_set_bit (regular_block_artificial_uses, STACK_POINTER_REGNUM);
3712 #ifdef EH_USES
3713 /* EH_USES registers are used:
3714 1) at all insns that might throw (calls or with -fnon-call-exceptions
3715 trapping insns)
3716 2) in all EH edges
3717 3) to support backtraces and/or debugging, anywhere between their
3718 initialization and where they the saved registers are restored
3719 from them, including the cases where we don't reach the epilogue
3720 (noreturn call or infinite loop). */
3721 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3722 if (EH_USES (i))
3723 bitmap_set_bit (regular_block_artificial_uses, i);
3724 #endif
3728 /* Get the artificial use set for an eh block. */
3730 static void
3731 df_get_eh_block_artificial_uses (bitmap eh_block_artificial_uses)
3733 bitmap_clear (eh_block_artificial_uses);
3735 /* The following code (down through the arg_pointer setting APPEARS
3736 to be necessary because there is nothing that actually
3737 describes what the exception handling code may actually need
3738 to keep alive. */
3739 if (reload_completed)
3741 if (frame_pointer_needed)
3743 bitmap_set_bit (eh_block_artificial_uses, FRAME_POINTER_REGNUM);
3744 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
3745 bitmap_set_bit (eh_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3746 #endif
3748 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3749 if (fixed_regs[ARG_POINTER_REGNUM])
3750 bitmap_set_bit (eh_block_artificial_uses, ARG_POINTER_REGNUM);
3751 #endif
3757 /*----------------------------------------------------------------------------
3758 Specialized hard register scanning functions.
3759 ----------------------------------------------------------------------------*/
3762 /* Mark a register in SET. Hard registers in large modes get all
3763 of their component registers set as well. */
3765 static void
3766 df_mark_reg (rtx reg, void *vset)
3768 bitmap set = (bitmap) vset;
3769 int regno = REGNO (reg);
3771 gcc_assert (GET_MODE (reg) != BLKmode);
3773 if (regno < FIRST_PSEUDO_REGISTER)
3775 int n = hard_regno_nregs[regno][GET_MODE (reg)];
3776 bitmap_set_range (set, regno, n);
3778 else
3779 bitmap_set_bit (set, regno);
3783 /* Set the bit for regs that are considered being defined at the entry. */
3785 static void
3786 df_get_entry_block_def_set (bitmap entry_block_defs)
3788 rtx r;
3789 int i;
3791 bitmap_clear (entry_block_defs);
3793 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3794 if (FUNCTION_ARG_REGNO_P (i))
3795 bitmap_set_bit (entry_block_defs, INCOMING_REGNO (i));
3797 /* The always important stack pointer. */
3798 bitmap_set_bit (entry_block_defs, STACK_POINTER_REGNUM);
3800 /* Once the prologue has been generated, all of these registers
3801 should just show up in the first regular block. */
3802 if (HAVE_prologue && epilogue_completed)
3804 /* Defs for the callee saved registers are inserted so that the
3805 pushes have some defining location. */
3806 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3807 if ((call_used_regs[i] == 0) && (df_regs_ever_live_p (i)))
3808 bitmap_set_bit (entry_block_defs, i);
3811 r = targetm.calls.struct_value_rtx (current_function_decl, true);
3812 if (r && REG_P (r))
3813 bitmap_set_bit (entry_block_defs, REGNO (r));
3815 /* If the function has an incoming STATIC_CHAIN, it has to show up
3816 in the entry def set. */
3817 r = targetm.calls.static_chain (current_function_decl, true);
3818 if (r && REG_P (r))
3819 bitmap_set_bit (entry_block_defs, REGNO (r));
3821 if ((!reload_completed) || frame_pointer_needed)
3823 /* Any reference to any pseudo before reload is a potential
3824 reference of the frame pointer. */
3825 bitmap_set_bit (entry_block_defs, FRAME_POINTER_REGNUM);
3826 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
3827 /* If they are different, also mark the hard frame pointer as live. */
3828 if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3829 bitmap_set_bit (entry_block_defs, HARD_FRAME_POINTER_REGNUM);
3830 #endif
3833 /* These registers are live everywhere. */
3834 if (!reload_completed)
3836 #ifdef PIC_OFFSET_TABLE_REGNUM
3837 unsigned int picreg = PIC_OFFSET_TABLE_REGNUM;
3838 #endif
3840 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3841 /* Pseudos with argument area equivalences may require
3842 reloading via the argument pointer. */
3843 if (fixed_regs[ARG_POINTER_REGNUM])
3844 bitmap_set_bit (entry_block_defs, ARG_POINTER_REGNUM);
3845 #endif
3847 #ifdef PIC_OFFSET_TABLE_REGNUM
3848 /* Any constant, or pseudo with constant equivalences, may
3849 require reloading from memory using the pic register. */
3850 if (picreg != INVALID_REGNUM
3851 && fixed_regs[picreg])
3852 bitmap_set_bit (entry_block_defs, picreg);
3853 #endif
3856 #ifdef INCOMING_RETURN_ADDR_RTX
3857 if (REG_P (INCOMING_RETURN_ADDR_RTX))
3858 bitmap_set_bit (entry_block_defs, REGNO (INCOMING_RETURN_ADDR_RTX));
3859 #endif
3861 targetm.extra_live_on_entry (entry_block_defs);
3865 /* Return the (conservative) set of hard registers that are defined on
3866 entry to the function.
3867 It uses df->entry_block_defs to determine which register
3868 reference to include. */
3870 static void
3871 df_entry_block_defs_collect (struct df_collection_rec *collection_rec,
3872 bitmap entry_block_defs)
3874 unsigned int i;
3875 bitmap_iterator bi;
3877 EXECUTE_IF_SET_IN_BITMAP (entry_block_defs, 0, i, bi)
3879 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
3880 ENTRY_BLOCK_PTR, NULL, DF_REF_REG_DEF, 0);
3883 df_canonize_collection_rec (collection_rec);
3887 /* Record the (conservative) set of hard registers that are defined on
3888 entry to the function. */
3890 static void
3891 df_record_entry_block_defs (bitmap entry_block_defs)
3893 struct df_collection_rec collection_rec;
3894 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
3895 collection_rec.def_vec = VEC_alloc (df_ref, stack, FIRST_PSEUDO_REGISTER);
3896 df_entry_block_defs_collect (&collection_rec, entry_block_defs);
3898 /* Process bb_refs chain */
3899 df_refs_add_to_chains (&collection_rec, BASIC_BLOCK (ENTRY_BLOCK), NULL);
3900 VEC_free (df_ref, stack, collection_rec.def_vec);
3904 /* Update the defs in the entry block. */
3906 void
3907 df_update_entry_block_defs (void)
3909 bitmap_head refs;
3910 bool changed = false;
3912 bitmap_initialize (&refs, &df_bitmap_obstack);
3913 df_get_entry_block_def_set (&refs);
3914 if (df->entry_block_defs)
3916 if (!bitmap_equal_p (df->entry_block_defs, &refs))
3918 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (ENTRY_BLOCK);
3919 df_ref_chain_delete_du_chain (bb_info->artificial_defs);
3920 df_ref_chain_delete (bb_info->artificial_defs);
3921 bb_info->artificial_defs = NULL;
3922 changed = true;
3925 else
3927 struct df_scan_problem_data *problem_data
3928 = (struct df_scan_problem_data *) df_scan->problem_data;
3929 gcc_unreachable ();
3930 df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
3931 changed = true;
3934 if (changed)
3936 df_record_entry_block_defs (&refs);
3937 bitmap_copy (df->entry_block_defs, &refs);
3938 df_set_bb_dirty (BASIC_BLOCK (ENTRY_BLOCK));
3940 bitmap_clear (&refs);
3944 /* Set the bit for regs that are considered being used at the exit. */
3946 static void
3947 df_get_exit_block_use_set (bitmap exit_block_uses)
3949 unsigned int i;
3950 unsigned int picreg = PIC_OFFSET_TABLE_REGNUM;
3952 bitmap_clear (exit_block_uses);
3954 /* Stack pointer is always live at the exit. */
3955 bitmap_set_bit (exit_block_uses, STACK_POINTER_REGNUM);
3957 /* Mark the frame pointer if needed at the end of the function.
3958 If we end up eliminating it, it will be removed from the live
3959 list of each basic block by reload. */
3961 if ((!reload_completed) || frame_pointer_needed)
3963 bitmap_set_bit (exit_block_uses, FRAME_POINTER_REGNUM);
3964 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
3965 /* If they are different, also mark the hard frame pointer as live. */
3966 if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3967 bitmap_set_bit (exit_block_uses, HARD_FRAME_POINTER_REGNUM);
3968 #endif
3971 /* Many architectures have a GP register even without flag_pic.
3972 Assume the pic register is not in use, or will be handled by
3973 other means, if it is not fixed. */
3974 if (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
3975 && picreg != INVALID_REGNUM
3976 && fixed_regs[picreg])
3977 bitmap_set_bit (exit_block_uses, picreg);
3979 /* Mark all global registers, and all registers used by the
3980 epilogue as being live at the end of the function since they
3981 may be referenced by our caller. */
3982 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3983 if (global_regs[i] || EPILOGUE_USES (i))
3984 bitmap_set_bit (exit_block_uses, i);
3986 if (HAVE_epilogue && epilogue_completed)
3988 /* Mark all call-saved registers that we actually used. */
3989 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3990 if (df_regs_ever_live_p (i) && !LOCAL_REGNO (i)
3991 && !TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
3992 bitmap_set_bit (exit_block_uses, i);
3995 #ifdef EH_RETURN_DATA_REGNO
3996 /* Mark the registers that will contain data for the handler. */
3997 if (reload_completed && crtl->calls_eh_return)
3998 for (i = 0; ; ++i)
4000 unsigned regno = EH_RETURN_DATA_REGNO (i);
4001 if (regno == INVALID_REGNUM)
4002 break;
4003 bitmap_set_bit (exit_block_uses, regno);
4005 #endif
4007 #ifdef EH_RETURN_STACKADJ_RTX
4008 if ((!HAVE_epilogue || ! epilogue_completed)
4009 && crtl->calls_eh_return)
4011 rtx tmp = EH_RETURN_STACKADJ_RTX;
4012 if (tmp && REG_P (tmp))
4013 df_mark_reg (tmp, exit_block_uses);
4015 #endif
4017 #ifdef EH_RETURN_HANDLER_RTX
4018 if ((!HAVE_epilogue || ! epilogue_completed)
4019 && crtl->calls_eh_return)
4021 rtx tmp = EH_RETURN_HANDLER_RTX;
4022 if (tmp && REG_P (tmp))
4023 df_mark_reg (tmp, exit_block_uses);
4025 #endif
4027 /* Mark function return value. */
4028 diddle_return_value (df_mark_reg, (void*) exit_block_uses);
4032 /* Return the refs of hard registers that are used in the exit block.
4033 It uses df->exit_block_uses to determine register to include. */
4035 static void
4036 df_exit_block_uses_collect (struct df_collection_rec *collection_rec, bitmap exit_block_uses)
4038 unsigned int i;
4039 bitmap_iterator bi;
4041 EXECUTE_IF_SET_IN_BITMAP (exit_block_uses, 0, i, bi)
4042 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
4043 EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0);
4045 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4046 /* It is deliberate that this is not put in the exit block uses but
4047 I do not know why. */
4048 if (reload_completed
4049 && !bitmap_bit_p (exit_block_uses, ARG_POINTER_REGNUM)
4050 && bb_has_eh_pred (EXIT_BLOCK_PTR)
4051 && fixed_regs[ARG_POINTER_REGNUM])
4052 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[ARG_POINTER_REGNUM], NULL,
4053 EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0);
4054 #endif
4056 df_canonize_collection_rec (collection_rec);
4060 /* Record the set of hard registers that are used in the exit block.
4061 It uses df->exit_block_uses to determine which bit to include. */
4063 static void
4064 df_record_exit_block_uses (bitmap exit_block_uses)
4066 struct df_collection_rec collection_rec;
4067 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
4068 collection_rec.use_vec = VEC_alloc (df_ref, stack, FIRST_PSEUDO_REGISTER);
4070 df_exit_block_uses_collect (&collection_rec, exit_block_uses);
4072 /* Process bb_refs chain */
4073 df_refs_add_to_chains (&collection_rec, BASIC_BLOCK (EXIT_BLOCK), NULL);
4074 VEC_free (df_ref, stack, collection_rec.use_vec);
4078 /* Update the uses in the exit block. */
4080 void
4081 df_update_exit_block_uses (void)
4083 bitmap_head refs;
4084 bool changed = false;
4086 bitmap_initialize (&refs, &df_bitmap_obstack);
4087 df_get_exit_block_use_set (&refs);
4088 if (df->exit_block_uses)
4090 if (!bitmap_equal_p (df->exit_block_uses, &refs))
4092 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (EXIT_BLOCK);
4093 df_ref_chain_delete_du_chain (bb_info->artificial_uses);
4094 df_ref_chain_delete (bb_info->artificial_uses);
4095 bb_info->artificial_uses = NULL;
4096 changed = true;
4099 else
4101 struct df_scan_problem_data *problem_data
4102 = (struct df_scan_problem_data *) df_scan->problem_data;
4103 gcc_unreachable ();
4104 df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
4105 changed = true;
4108 if (changed)
4110 df_record_exit_block_uses (&refs);
4111 bitmap_copy (df->exit_block_uses,& refs);
4112 df_set_bb_dirty (BASIC_BLOCK (EXIT_BLOCK));
4114 bitmap_clear (&refs);
4117 static bool initialized = false;
4120 /* Initialize some platform specific structures. */
4122 void
4123 df_hard_reg_init (void)
4125 #ifdef ELIMINABLE_REGS
4126 int i;
4127 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
4128 #endif
4129 if (initialized)
4130 return;
4132 /* Record which registers will be eliminated. We use this in
4133 mark_used_regs. */
4134 CLEAR_HARD_REG_SET (elim_reg_set);
4136 #ifdef ELIMINABLE_REGS
4137 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
4138 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
4139 #else
4140 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
4141 #endif
4143 initialized = true;
4147 /* Recompute the parts of scanning that are based on regs_ever_live
4148 because something changed in that array. */
4150 void
4151 df_update_entry_exit_and_calls (void)
4153 basic_block bb;
4155 df_update_entry_block_defs ();
4156 df_update_exit_block_uses ();
4158 /* The call insns need to be rescanned because there may be changes
4159 in the set of registers clobbered across the call. */
4160 FOR_EACH_BB (bb)
4162 rtx insn;
4163 FOR_BB_INSNS (bb, insn)
4165 if (INSN_P (insn) && CALL_P (insn))
4166 df_insn_rescan (insn);
4172 /* Return true if hard REG is actually used in the some instruction.
4173 There are a fair number of conditions that affect the setting of
4174 this array. See the comment in df.h for df->hard_regs_live_count
4175 for the conditions that this array is set. */
4177 bool
4178 df_hard_reg_used_p (unsigned int reg)
4180 return df->hard_regs_live_count[reg] != 0;
4184 /* A count of the number of times REG is actually used in the some
4185 instruction. There are a fair number of conditions that affect the
4186 setting of this array. See the comment in df.h for
4187 df->hard_regs_live_count for the conditions that this array is
4188 set. */
4191 unsigned int
4192 df_hard_reg_used_count (unsigned int reg)
4194 return df->hard_regs_live_count[reg];
4198 /* Get the value of regs_ever_live[REGNO]. */
4200 bool
4201 df_regs_ever_live_p (unsigned int regno)
4203 return regs_ever_live[regno];
4207 /* Set regs_ever_live[REGNO] to VALUE. If this cause regs_ever_live
4208 to change, schedule that change for the next update. */
4210 void
4211 df_set_regs_ever_live (unsigned int regno, bool value)
4213 if (regs_ever_live[regno] == value)
4214 return;
4216 regs_ever_live[regno] = value;
4217 if (df)
4218 df->redo_entry_and_exit = true;
4222 /* Compute "regs_ever_live" information from the underlying df
4223 information. Set the vector to all false if RESET. */
4225 void
4226 df_compute_regs_ever_live (bool reset)
4228 unsigned int i;
4229 bool changed = df->redo_entry_and_exit;
4231 if (reset)
4232 memset (regs_ever_live, 0, sizeof (regs_ever_live));
4234 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4235 if ((!regs_ever_live[i]) && df_hard_reg_used_p (i))
4237 regs_ever_live[i] = true;
4238 changed = true;
4240 if (changed)
4241 df_update_entry_exit_and_calls ();
4242 df->redo_entry_and_exit = false;
4246 /*----------------------------------------------------------------------------
4247 Dataflow ref information verification functions.
4249 df_reg_chain_mark (refs, regno, is_def, is_eq_use)
4250 df_reg_chain_verify_unmarked (refs)
4251 df_refs_verify (VEC(stack,df_ref)*, ref*, bool)
4252 df_mws_verify (mw*, mw*, bool)
4253 df_insn_refs_verify (collection_rec, bb, insn, bool)
4254 df_bb_refs_verify (bb, refs, bool)
4255 df_bb_verify (bb)
4256 df_exit_block_bitmap_verify (bool)
4257 df_entry_block_bitmap_verify (bool)
4258 df_scan_verify ()
4259 ----------------------------------------------------------------------------*/
4262 /* Mark all refs in the reg chain. Verify that all of the registers
4263 are in the correct chain. */
4265 static unsigned int
4266 df_reg_chain_mark (df_ref refs, unsigned int regno,
4267 bool is_def, bool is_eq_use)
4269 unsigned int count = 0;
4270 df_ref ref;
4271 for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
4273 gcc_assert (!DF_REF_IS_REG_MARKED (ref));
4275 /* If there are no def-use or use-def chains, make sure that all
4276 of the chains are clear. */
4277 if (!df_chain)
4278 gcc_assert (!DF_REF_CHAIN (ref));
4280 /* Check to make sure the ref is in the correct chain. */
4281 gcc_assert (DF_REF_REGNO (ref) == regno);
4282 if (is_def)
4283 gcc_assert (DF_REF_REG_DEF_P (ref));
4284 else
4285 gcc_assert (!DF_REF_REG_DEF_P (ref));
4287 if (is_eq_use)
4288 gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE));
4289 else
4290 gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE) == 0);
4292 if (DF_REF_NEXT_REG (ref))
4293 gcc_assert (DF_REF_PREV_REG (DF_REF_NEXT_REG (ref)) == ref);
4294 count++;
4295 DF_REF_REG_MARK (ref);
4297 return count;
4301 /* Verify that all of the registers in the chain are unmarked. */
4303 static void
4304 df_reg_chain_verify_unmarked (df_ref refs)
4306 df_ref ref;
4307 for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
4308 gcc_assert (!DF_REF_IS_REG_MARKED (ref));
4312 /* Verify that NEW_REC and OLD_REC have exactly the same members. */
4314 static bool
4315 df_refs_verify (VEC(df_ref,stack) *new_rec, df_ref *old_rec,
4316 bool abort_if_fail)
4318 unsigned int ix;
4319 df_ref new_ref;
4321 FOR_EACH_VEC_ELT (df_ref, new_rec, ix, new_ref)
4323 if (*old_rec == NULL || !df_ref_equal_p (new_ref, *old_rec))
4325 if (abort_if_fail)
4326 gcc_assert (0);
4327 else
4328 return false;
4331 /* Abort if fail is called from the function level verifier. If
4332 that is the context, mark this reg as being seem. */
4333 if (abort_if_fail)
4335 gcc_assert (DF_REF_IS_REG_MARKED (*old_rec));
4336 DF_REF_REG_UNMARK (*old_rec);
4339 old_rec++;
4342 if (abort_if_fail)
4343 gcc_assert (*old_rec == NULL);
4344 else
4345 return *old_rec == NULL;
4346 return false;
4350 /* Verify that NEW_REC and OLD_REC have exactly the same members. */
4352 static bool
4353 df_mws_verify (VEC(df_mw_hardreg_ptr,stack) *new_rec,
4354 struct df_mw_hardreg **old_rec,
4355 bool abort_if_fail)
4357 unsigned int ix;
4358 struct df_mw_hardreg *new_reg;
4360 FOR_EACH_VEC_ELT (df_mw_hardreg_ptr, new_rec, ix, new_reg)
4362 if (*old_rec == NULL || !df_mw_equal_p (new_reg, *old_rec))
4364 if (abort_if_fail)
4365 gcc_assert (0);
4366 else
4367 return false;
4369 old_rec++;
4372 if (abort_if_fail)
4373 gcc_assert (*old_rec == NULL);
4374 else
4375 return *old_rec == NULL;
4376 return false;
4380 /* Return true if the existing insn refs information is complete and
4381 correct. Otherwise (i.e. if there's any missing or extra refs),
4382 return the correct df_ref chain in REFS_RETURN.
4384 If ABORT_IF_FAIL, leave the refs that are verified (already in the
4385 ref chain) as DF_REF_MARKED(). If it's false, then it's a per-insn
4386 verification mode instead of the whole function, so unmark
4387 everything.
4389 If ABORT_IF_FAIL is set, this function never returns false. */
4391 static bool
4392 df_insn_refs_verify (struct df_collection_rec *collection_rec,
4393 basic_block bb,
4394 rtx insn,
4395 bool abort_if_fail)
4397 bool ret1, ret2, ret3, ret4;
4398 unsigned int uid = INSN_UID (insn);
4399 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
4401 df_insn_refs_collect (collection_rec, bb, insn_info);
4403 if (!DF_INSN_UID_DEFS (uid))
4405 /* The insn_rec was created but it was never filled out. */
4406 if (abort_if_fail)
4407 gcc_assert (0);
4408 else
4409 return false;
4412 /* Unfortunately we cannot opt out early if one of these is not
4413 right because the marks will not get cleared. */
4414 ret1 = df_refs_verify (collection_rec->def_vec, DF_INSN_UID_DEFS (uid),
4415 abort_if_fail);
4416 ret2 = df_refs_verify (collection_rec->use_vec, DF_INSN_UID_USES (uid),
4417 abort_if_fail);
4418 ret3 = df_refs_verify (collection_rec->eq_use_vec, DF_INSN_UID_EQ_USES (uid),
4419 abort_if_fail);
4420 ret4 = df_mws_verify (collection_rec->mw_vec, DF_INSN_UID_MWS (uid),
4421 abort_if_fail);
4422 return (ret1 && ret2 && ret3 && ret4);
4426 /* Return true if all refs in the basic block are correct and complete.
4427 Due to df_ref_chain_verify, it will cause all refs
4428 that are verified to have DF_REF_MARK bit set. */
4430 static bool
4431 df_bb_verify (basic_block bb)
4433 rtx insn;
4434 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
4435 struct df_collection_rec collection_rec;
4437 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
4438 collection_rec.def_vec = VEC_alloc (df_ref, stack, 128);
4439 collection_rec.use_vec = VEC_alloc (df_ref, stack, 32);
4440 collection_rec.eq_use_vec = VEC_alloc (df_ref, stack, 32);
4441 collection_rec.mw_vec = VEC_alloc (df_mw_hardreg_ptr, stack, 32);
4443 gcc_assert (bb_info);
4445 /* Scan the block, one insn at a time, from beginning to end. */
4446 FOR_BB_INSNS_REVERSE (bb, insn)
4448 if (!INSN_P (insn))
4449 continue;
4450 df_insn_refs_verify (&collection_rec, bb, insn, true);
4453 /* Do the artificial defs and uses. */
4454 df_bb_refs_collect (&collection_rec, bb);
4455 df_refs_verify (collection_rec.def_vec, df_get_artificial_defs (bb->index), true);
4456 df_refs_verify (collection_rec.use_vec, df_get_artificial_uses (bb->index), true);
4457 df_free_collection_rec (&collection_rec);
4459 return true;
4463 /* Returns true if the entry block has correct and complete df_ref set.
4464 If not it either aborts if ABORT_IF_FAIL is true or returns false. */
4466 static bool
4467 df_entry_block_bitmap_verify (bool abort_if_fail)
4469 bitmap_head entry_block_defs;
4470 bool is_eq;
4472 bitmap_initialize (&entry_block_defs, &df_bitmap_obstack);
4473 df_get_entry_block_def_set (&entry_block_defs);
4475 is_eq = bitmap_equal_p (&entry_block_defs, df->entry_block_defs);
4477 if (!is_eq && abort_if_fail)
4479 fprintf (stderr, "entry_block_defs = ");
4480 df_print_regset (stderr, &entry_block_defs);
4481 fprintf (stderr, "df->entry_block_defs = ");
4482 df_print_regset (stderr, df->entry_block_defs);
4483 gcc_assert (0);
4486 bitmap_clear (&entry_block_defs);
4488 return is_eq;
4492 /* Returns true if the exit block has correct and complete df_ref set.
4493 If not it either aborts if ABORT_IF_FAIL is true or returns false. */
4495 static bool
4496 df_exit_block_bitmap_verify (bool abort_if_fail)
4498 bitmap_head exit_block_uses;
4499 bool is_eq;
4501 bitmap_initialize (&exit_block_uses, &df_bitmap_obstack);
4502 df_get_exit_block_use_set (&exit_block_uses);
4504 is_eq = bitmap_equal_p (&exit_block_uses, df->exit_block_uses);
4506 if (!is_eq && abort_if_fail)
4508 fprintf (stderr, "exit_block_uses = ");
4509 df_print_regset (stderr, &exit_block_uses);
4510 fprintf (stderr, "df->exit_block_uses = ");
4511 df_print_regset (stderr, df->exit_block_uses);
4512 gcc_assert (0);
4515 bitmap_clear (&exit_block_uses);
4517 return is_eq;
4521 /* Return true if df_ref information for all insns in all blocks are
4522 correct and complete. */
4524 void
4525 df_scan_verify (void)
4527 unsigned int i;
4528 basic_block bb;
4529 bitmap_head regular_block_artificial_uses;
4530 bitmap_head eh_block_artificial_uses;
4532 if (!df)
4533 return;
4535 /* Verification is a 4 step process. */
4537 /* (1) All of the refs are marked by going through the reg chains. */
4538 for (i = 0; i < DF_REG_SIZE (df); i++)
4540 gcc_assert (df_reg_chain_mark (DF_REG_DEF_CHAIN (i), i, true, false)
4541 == DF_REG_DEF_COUNT(i));
4542 gcc_assert (df_reg_chain_mark (DF_REG_USE_CHAIN (i), i, false, false)
4543 == DF_REG_USE_COUNT(i));
4544 gcc_assert (df_reg_chain_mark (DF_REG_EQ_USE_CHAIN (i), i, false, true)
4545 == DF_REG_EQ_USE_COUNT(i));
4548 /* (2) There are various bitmaps whose value may change over the
4549 course of the compilation. This step recomputes them to make
4550 sure that they have not slipped out of date. */
4551 bitmap_initialize (&regular_block_artificial_uses, &df_bitmap_obstack);
4552 bitmap_initialize (&eh_block_artificial_uses, &df_bitmap_obstack);
4554 df_get_regular_block_artificial_uses (&regular_block_artificial_uses);
4555 df_get_eh_block_artificial_uses (&eh_block_artificial_uses);
4557 bitmap_ior_into (&eh_block_artificial_uses,
4558 &regular_block_artificial_uses);
4560 /* Check artificial_uses bitmaps didn't change. */
4561 gcc_assert (bitmap_equal_p (&regular_block_artificial_uses,
4562 &df->regular_block_artificial_uses));
4563 gcc_assert (bitmap_equal_p (&eh_block_artificial_uses,
4564 &df->eh_block_artificial_uses));
4566 bitmap_clear (&regular_block_artificial_uses);
4567 bitmap_clear (&eh_block_artificial_uses);
4569 /* Verify entry block and exit block. These only verify the bitmaps,
4570 the refs are verified in df_bb_verify. */
4571 df_entry_block_bitmap_verify (true);
4572 df_exit_block_bitmap_verify (true);
4574 /* (3) All of the insns in all of the blocks are traversed and the
4575 marks are cleared both in the artificial refs attached to the
4576 blocks and the real refs inside the insns. It is a failure to
4577 clear a mark that has not been set as this means that the ref in
4578 the block or insn was not in the reg chain. */
4580 FOR_ALL_BB (bb)
4581 df_bb_verify (bb);
4583 /* (4) See if all reg chains are traversed a second time. This time
4584 a check is made that the marks are clear. A set mark would be a
4585 from a reg that is not in any insn or basic block. */
4587 for (i = 0; i < DF_REG_SIZE (df); i++)
4589 df_reg_chain_verify_unmarked (DF_REG_DEF_CHAIN (i));
4590 df_reg_chain_verify_unmarked (DF_REG_USE_CHAIN (i));
4591 df_reg_chain_verify_unmarked (DF_REG_EQ_USE_CHAIN (i));