libfuncs.h (LTI_synchronize): New libfunc_index.
[official-gcc.git] / gcc / df-scan.c
blob87dda3a7b2c90ab24b0820218b11209800e4a7fa
1 /* Scanning of rtl for dataflow analysis.
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007,
3 2008 Free Software Foundation, Inc.
4 Originally contributed by Michael P. Hayes
5 (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
6 Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
7 and Kenneth Zadeck (zadeck@naturalbridge.com).
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "tm.h"
29 #include "rtl.h"
30 #include "tm_p.h"
31 #include "insn-config.h"
32 #include "recog.h"
33 #include "function.h"
34 #include "regs.h"
35 #include "output.h"
36 #include "alloc-pool.h"
37 #include "flags.h"
38 #include "hard-reg-set.h"
39 #include "basic-block.h"
40 #include "sbitmap.h"
41 #include "bitmap.h"
42 #include "timevar.h"
43 #include "tree.h"
44 #include "target.h"
45 #include "target-def.h"
46 #include "df.h"
47 #include "tree-pass.h"
49 #ifndef HAVE_epilogue
50 #define HAVE_epilogue 0
51 #endif
52 #ifndef HAVE_prologue
53 #define HAVE_prologue 0
54 #endif
55 #ifndef HAVE_sibcall_epilogue
56 #define HAVE_sibcall_epilogue 0
57 #endif
59 #ifndef EPILOGUE_USES
60 #define EPILOGUE_USES(REGNO) 0
61 #endif
63 /* The bitmap_obstack is used to hold some static variables that
64 should not be reset after each function is compiled. */
66 static bitmap_obstack persistent_obstack;
68 /* The set of hard registers in eliminables[i].from. */
70 static HARD_REG_SET elim_reg_set;
72 /* This is a bitmap copy of regs_invalidated_by_call so that we can
73 easily add it into bitmaps, etc. */
75 bitmap df_invalidated_by_call = NULL;
77 /* Initialize ur_in and ur_out as if all hard registers were partially
78 available. */
80 struct df_collection_rec
82 struct df_ref ** def_vec;
83 unsigned int next_def;
84 struct df_ref ** use_vec;
85 unsigned int next_use;
86 struct df_ref ** eq_use_vec;
87 unsigned int next_eq_use;
88 struct df_mw_hardreg **mw_vec;
89 unsigned int next_mw;
92 static struct df_ref * df_null_ref_rec[1];
93 static struct df_mw_hardreg * df_null_mw_rec[1];
95 static void df_ref_record (struct df_collection_rec *,
96 rtx, rtx *,
97 basic_block, struct df_insn_info *,
98 enum df_ref_type, enum df_ref_flags,
99 int, int, enum machine_mode);
100 static void df_def_record_1 (struct df_collection_rec *, rtx,
101 basic_block, struct df_insn_info *,
102 enum df_ref_flags);
103 static void df_defs_record (struct df_collection_rec *, rtx,
104 basic_block, struct df_insn_info *,
105 enum df_ref_flags);
106 static void df_uses_record (struct df_collection_rec *,
107 rtx *, enum df_ref_type,
108 basic_block, struct df_insn_info *,
109 enum df_ref_flags,
110 int, int, enum machine_mode);
112 static struct df_ref *df_ref_create_structure (struct df_collection_rec *, rtx, rtx *,
113 basic_block, struct df_insn_info *,
114 enum df_ref_type, enum df_ref_flags,
115 int, int, enum machine_mode);
117 static void df_insn_refs_collect (struct df_collection_rec*,
118 basic_block, struct df_insn_info *);
119 static void df_canonize_collection_rec (struct df_collection_rec *);
121 static void df_get_regular_block_artificial_uses (bitmap);
122 static void df_get_eh_block_artificial_uses (bitmap);
124 static void df_record_entry_block_defs (bitmap);
125 static void df_record_exit_block_uses (bitmap);
126 static void df_get_exit_block_use_set (bitmap);
127 static void df_get_entry_block_def_set (bitmap);
128 static void df_grow_ref_info (struct df_ref_info *, unsigned int);
129 static void df_ref_chain_delete_du_chain (struct df_ref **);
130 static void df_ref_chain_delete (struct df_ref **);
132 static void df_refs_add_to_chains (struct df_collection_rec *,
133 basic_block, rtx);
135 static bool df_insn_refs_verify (struct df_collection_rec *, basic_block, rtx, bool);
136 static void df_entry_block_defs_collect (struct df_collection_rec *, bitmap);
137 static void df_exit_block_uses_collect (struct df_collection_rec *, bitmap);
138 static void df_install_ref (struct df_ref *, struct df_reg_info *,
139 struct df_ref_info *, bool);
141 static int df_ref_compare (const void *, const void *);
142 static int df_mw_compare (const void *, const void *);
144 /* Indexed by hardware reg number, is true if that register is ever
145 used in the current function.
147 In df-scan.c, this is set up to record the hard regs used
148 explicitly. Reload adds in the hard regs used for holding pseudo
149 regs. Final uses it to generate the code in the function prologue
150 and epilogue to save and restore registers as needed. */
152 static bool regs_ever_live[FIRST_PSEUDO_REGISTER];
154 /*----------------------------------------------------------------------------
155 SCANNING DATAFLOW PROBLEM
157 There are several ways in which scanning looks just like the other
158 dataflow problems. It shares the all the mechanisms for local info
159 as well as basic block info. Where it differs is when and how often
160 it gets run. It also has no need for the iterative solver.
161 ----------------------------------------------------------------------------*/
163 /* Problem data for the scanning dataflow function. */
164 struct df_scan_problem_data
166 alloc_pool ref_pool;
167 alloc_pool ref_extract_pool;
168 alloc_pool insn_pool;
169 alloc_pool reg_pool;
170 alloc_pool mw_reg_pool;
171 bitmap_obstack reg_bitmaps;
172 bitmap_obstack insn_bitmaps;
175 typedef struct df_scan_bb_info *df_scan_bb_info_t;
177 static void
178 df_scan_free_internal (void)
180 struct df_scan_problem_data *problem_data
181 = (struct df_scan_problem_data *) df_scan->problem_data;
183 free (df->def_info.refs);
184 free (df->def_info.begin);
185 free (df->def_info.count);
186 memset (&df->def_info, 0, (sizeof (struct df_ref_info)));
188 free (df->use_info.refs);
189 free (df->use_info.begin);
190 free (df->use_info.count);
191 memset (&df->use_info, 0, (sizeof (struct df_ref_info)));
193 free (df->def_regs);
194 df->def_regs = NULL;
195 free (df->use_regs);
196 df->use_regs = NULL;
197 free (df->eq_use_regs);
198 df->eq_use_regs = NULL;
199 df->regs_size = 0;
200 DF_REG_SIZE(df) = 0;
202 free (df->insns);
203 df->insns = NULL;
204 DF_INSN_SIZE () = 0;
206 free (df_scan->block_info);
207 df_scan->block_info = NULL;
208 df_scan->block_info_size = 0;
210 BITMAP_FREE (df->hardware_regs_used);
211 BITMAP_FREE (df->regular_block_artificial_uses);
212 BITMAP_FREE (df->eh_block_artificial_uses);
213 BITMAP_FREE (df->entry_block_defs);
214 BITMAP_FREE (df->exit_block_uses);
215 BITMAP_FREE (df->insns_to_delete);
216 BITMAP_FREE (df->insns_to_rescan);
217 BITMAP_FREE (df->insns_to_notes_rescan);
219 free_alloc_pool (df_scan->block_pool);
220 free_alloc_pool (problem_data->ref_pool);
221 free_alloc_pool (problem_data->ref_extract_pool);
222 free_alloc_pool (problem_data->insn_pool);
223 free_alloc_pool (problem_data->reg_pool);
224 free_alloc_pool (problem_data->mw_reg_pool);
225 bitmap_obstack_release (&problem_data->reg_bitmaps);
226 bitmap_obstack_release (&problem_data->insn_bitmaps);
227 free (df_scan->problem_data);
231 /* Set basic block info. */
233 static void
234 df_scan_set_bb_info (unsigned int index,
235 struct df_scan_bb_info *bb_info)
237 gcc_assert (df_scan);
238 df_grow_bb_info (df_scan);
239 df_scan->block_info[index] = (void *) bb_info;
243 /* Free basic block info. */
245 static void
246 df_scan_free_bb_info (basic_block bb, void *vbb_info)
248 struct df_scan_bb_info *bb_info = (struct df_scan_bb_info *) vbb_info;
249 unsigned int bb_index = bb->index;
250 if (bb_info)
252 rtx insn;
253 FOR_BB_INSNS (bb, insn)
255 if (INSN_P (insn))
256 /* Record defs within INSN. */
257 df_insn_delete (bb, INSN_UID (insn));
260 if (bb_index < df_scan->block_info_size)
261 bb_info = df_scan_get_bb_info (bb_index);
263 /* Get rid of any artificial uses or defs. */
264 df_ref_chain_delete_du_chain (bb_info->artificial_defs);
265 df_ref_chain_delete_du_chain (bb_info->artificial_uses);
266 df_ref_chain_delete (bb_info->artificial_defs);
267 df_ref_chain_delete (bb_info->artificial_uses);
268 bb_info->artificial_defs = NULL;
269 bb_info->artificial_uses = NULL;
270 pool_free (df_scan->block_pool, bb_info);
275 /* Allocate the problem data for the scanning problem. This should be
276 called when the problem is created or when the entire function is to
277 be rescanned. */
278 void
279 df_scan_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
281 struct df_scan_problem_data *problem_data;
282 unsigned int insn_num = get_max_uid () + 1;
283 unsigned int block_size = 400;
284 basic_block bb;
286 /* Given the number of pools, this is really faster than tearing
287 everything apart. */
288 if (df_scan->problem_data)
289 df_scan_free_internal ();
291 df_scan->block_pool
292 = create_alloc_pool ("df_scan_block pool",
293 sizeof (struct df_scan_bb_info),
294 block_size);
296 problem_data = XNEW (struct df_scan_problem_data);
297 df_scan->problem_data = problem_data;
298 df_scan->computed = true;
300 problem_data->ref_pool
301 = create_alloc_pool ("df_scan_ref pool",
302 sizeof (struct df_ref), block_size);
303 problem_data->ref_extract_pool
304 = create_alloc_pool ("df_scan_ref extract pool",
305 sizeof (struct df_ref_extract), block_size);
306 problem_data->insn_pool
307 = create_alloc_pool ("df_scan_insn pool",
308 sizeof (struct df_insn_info), block_size);
309 problem_data->reg_pool
310 = create_alloc_pool ("df_scan_reg pool",
311 sizeof (struct df_reg_info), block_size);
312 problem_data->mw_reg_pool
313 = create_alloc_pool ("df_scan_mw_reg pool",
314 sizeof (struct df_mw_hardreg), block_size);
316 bitmap_obstack_initialize (&problem_data->reg_bitmaps);
317 bitmap_obstack_initialize (&problem_data->insn_bitmaps);
319 insn_num += insn_num / 4;
320 df_grow_reg_info ();
322 df_grow_insn_info ();
323 df_grow_bb_info (df_scan);
325 FOR_ALL_BB (bb)
327 unsigned int bb_index = bb->index;
328 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb_index);
329 if (!bb_info)
331 bb_info = (struct df_scan_bb_info *) pool_alloc (df_scan->block_pool);
332 df_scan_set_bb_info (bb_index, bb_info);
334 bb_info->artificial_defs = NULL;
335 bb_info->artificial_uses = NULL;
338 df->hardware_regs_used = BITMAP_ALLOC (&problem_data->reg_bitmaps);
339 df->regular_block_artificial_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
340 df->eh_block_artificial_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
341 df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
342 df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
343 df->insns_to_delete = BITMAP_ALLOC (&problem_data->insn_bitmaps);
344 df->insns_to_rescan = BITMAP_ALLOC (&problem_data->insn_bitmaps);
345 df->insns_to_notes_rescan = BITMAP_ALLOC (&problem_data->insn_bitmaps);
346 df_scan->optional_p = false;
350 /* Free all of the data associated with the scan problem. */
352 static void
353 df_scan_free (void)
355 if (df_scan->problem_data)
356 df_scan_free_internal ();
358 if (df->blocks_to_analyze)
360 BITMAP_FREE (df->blocks_to_analyze);
361 df->blocks_to_analyze = NULL;
364 free (df_scan);
367 /* Dump the preamble for DF_SCAN dump. */
368 static void
369 df_scan_start_dump (FILE *file ATTRIBUTE_UNUSED)
371 int i;
373 fprintf (file, ";; invalidated by call \t");
374 df_print_regset (file, df_invalidated_by_call);
375 fprintf (file, ";; hardware regs used \t");
376 df_print_regset (file, df->hardware_regs_used);
377 fprintf (file, ";; regular block artificial uses \t");
378 df_print_regset (file, df->regular_block_artificial_uses);
379 fprintf (file, ";; eh block artificial uses \t");
380 df_print_regset (file, df->eh_block_artificial_uses);
381 fprintf (file, ";; entry block defs \t");
382 df_print_regset (file, df->entry_block_defs);
383 fprintf (file, ";; exit block uses \t");
384 df_print_regset (file, df->exit_block_uses);
385 fprintf (file, ";; regs ever live \t");
386 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
387 if (df_regs_ever_live_p (i))
388 fprintf (file, " %d[%s]", i, reg_names[i]);
390 fprintf (file, "\n");
393 /* Dump the bb_info for a given basic block. */
394 static void
395 df_scan_start_block (basic_block bb, FILE *file)
397 struct df_scan_bb_info *bb_info
398 = df_scan_get_bb_info (bb->index);
400 if (bb_info)
402 fprintf (file, ";; bb %d artificial_defs: ", bb->index);
403 df_refs_chain_dump (bb_info->artificial_defs, true, file);
404 fprintf (file, "\n;; bb %d artificial_uses: ", bb->index);
405 df_refs_chain_dump (bb_info->artificial_uses, true, file);
406 fprintf (file, "\n");
408 #if 0
410 rtx insn;
411 FOR_BB_INSNS (bb, insn)
412 if (INSN_P (insn))
413 df_insn_debug (insn, false, file);
415 #endif
418 static struct df_problem problem_SCAN =
420 DF_SCAN, /* Problem id. */
421 DF_NONE, /* Direction. */
422 df_scan_alloc, /* Allocate the problem specific data. */
423 NULL, /* Reset global information. */
424 df_scan_free_bb_info, /* Free basic block info. */
425 NULL, /* Local compute function. */
426 NULL, /* Init the solution specific data. */
427 NULL, /* Iterative solver. */
428 NULL, /* Confluence operator 0. */
429 NULL, /* Confluence operator n. */
430 NULL, /* Transfer function. */
431 NULL, /* Finalize function. */
432 df_scan_free, /* Free all of the problem information. */
433 NULL, /* Remove this problem from the stack of dataflow problems. */
434 df_scan_start_dump, /* Debugging. */
435 df_scan_start_block, /* Debugging start block. */
436 NULL, /* Debugging end block. */
437 NULL, /* Incremental solution verify start. */
438 NULL, /* Incremental solution verify end. */
439 NULL, /* Dependent problem. */
440 TV_DF_SCAN, /* Timing variable. */
441 false /* Reset blocks on dropping out of blocks_to_analyze. */
445 /* Create a new DATAFLOW instance and add it to an existing instance
446 of DF. The returned structure is what is used to get at the
447 solution. */
449 void
450 df_scan_add_problem (void)
452 df_add_problem (&problem_SCAN);
456 /*----------------------------------------------------------------------------
457 Storage Allocation Utilities
458 ----------------------------------------------------------------------------*/
461 /* First, grow the reg_info information. If the current size is less than
462 the number of pseudos, grow to 25% more than the number of
463 pseudos.
465 Second, assure that all of the slots up to max_reg_num have been
466 filled with reg_info structures. */
468 void
469 df_grow_reg_info (void)
471 unsigned int max_reg = max_reg_num ();
472 unsigned int new_size = max_reg;
473 struct df_scan_problem_data *problem_data
474 = (struct df_scan_problem_data *) df_scan->problem_data;
475 unsigned int i;
477 if (df->regs_size < new_size)
479 new_size += new_size / 4;
480 df->def_regs = XRESIZEVEC (struct df_reg_info *, df->def_regs, new_size);
481 df->use_regs = XRESIZEVEC (struct df_reg_info *, df->use_regs, new_size);
482 df->eq_use_regs = XRESIZEVEC (struct df_reg_info *, df->eq_use_regs,
483 new_size);
484 df->def_info.begin = XRESIZEVEC (unsigned, df->def_info.begin, new_size);
485 df->def_info.count = XRESIZEVEC (unsigned, df->def_info.count, new_size);
486 df->use_info.begin = XRESIZEVEC (unsigned, df->use_info.begin, new_size);
487 df->use_info.count = XRESIZEVEC (unsigned, df->use_info.count, new_size);
488 df->regs_size = new_size;
491 for (i = df->regs_inited; i < max_reg; i++)
493 struct df_reg_info *reg_info;
495 reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool);
496 memset (reg_info, 0, sizeof (struct df_reg_info));
497 df->def_regs[i] = reg_info;
498 reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool);
499 memset (reg_info, 0, sizeof (struct df_reg_info));
500 df->use_regs[i] = reg_info;
501 reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool);
502 memset (reg_info, 0, sizeof (struct df_reg_info));
503 df->eq_use_regs[i] = reg_info;
504 df->def_info.begin[i] = 0;
505 df->def_info.count[i] = 0;
506 df->use_info.begin[i] = 0;
507 df->use_info.count[i] = 0;
510 df->regs_inited = max_reg;
514 /* Grow the ref information. */
516 static void
517 df_grow_ref_info (struct df_ref_info *ref_info, unsigned int new_size)
519 if (ref_info->refs_size < new_size)
521 ref_info->refs = XRESIZEVEC (struct df_ref *, ref_info->refs, new_size);
522 memset (ref_info->refs + ref_info->refs_size, 0,
523 (new_size - ref_info->refs_size) *sizeof (struct df_ref *));
524 ref_info->refs_size = new_size;
529 /* Check and grow the ref information if necessary. This routine
530 guarantees total_size + BITMAP_ADDEND amount of entries in refs
531 array. It updates ref_info->refs_size only and does not change
532 ref_info->total_size. */
534 static void
535 df_check_and_grow_ref_info (struct df_ref_info *ref_info,
536 unsigned bitmap_addend)
538 if (ref_info->refs_size < ref_info->total_size + bitmap_addend)
540 int new_size = ref_info->total_size + bitmap_addend;
541 new_size += ref_info->total_size / 4;
542 df_grow_ref_info (ref_info, new_size);
547 /* Grow the ref information. If the current size is less than the
548 number of instructions, grow to 25% more than the number of
549 instructions. */
551 void
552 df_grow_insn_info (void)
554 unsigned int new_size = get_max_uid () + 1;
555 if (DF_INSN_SIZE () < new_size)
557 new_size += new_size / 4;
558 df->insns = XRESIZEVEC (struct df_insn_info *, df->insns, new_size);
559 memset (df->insns + df->insns_size, 0,
560 (new_size - DF_INSN_SIZE ()) *sizeof (struct df_insn_info *));
561 DF_INSN_SIZE () = new_size;
568 /*----------------------------------------------------------------------------
569 PUBLIC INTERFACES FOR SMALL GRAIN CHANGES TO SCANNING.
570 ----------------------------------------------------------------------------*/
572 /* Rescan all of the block_to_analyze or all of the blocks in the
573 function if df_set_blocks if blocks_to_analyze is NULL; */
575 void
576 df_scan_blocks (void)
578 basic_block bb;
580 df->def_info.ref_order = DF_REF_ORDER_NO_TABLE;
581 df->use_info.ref_order = DF_REF_ORDER_NO_TABLE;
583 df_get_regular_block_artificial_uses (df->regular_block_artificial_uses);
584 df_get_eh_block_artificial_uses (df->eh_block_artificial_uses);
586 bitmap_ior_into (df->eh_block_artificial_uses,
587 df->regular_block_artificial_uses);
589 /* ENTRY and EXIT blocks have special defs/uses. */
590 df_get_entry_block_def_set (df->entry_block_defs);
591 df_record_entry_block_defs (df->entry_block_defs);
592 df_get_exit_block_use_set (df->exit_block_uses);
593 df_record_exit_block_uses (df->exit_block_uses);
594 df_set_bb_dirty (BASIC_BLOCK (ENTRY_BLOCK));
595 df_set_bb_dirty (BASIC_BLOCK (EXIT_BLOCK));
597 /* Regular blocks */
598 FOR_EACH_BB (bb)
600 unsigned int bb_index = bb->index;
601 df_bb_refs_record (bb_index, true);
606 /* Create a new ref of type DF_REF_TYPE for register REG at address
607 LOC within INSN of BB. This function is only used externally.
609 If the REF_FLAGS field contain DF_REF_SIGN_EXTRACT or
610 DF_REF_ZERO_EXTRACT. WIDTH, OFFSET and MODE are used to access the
611 fields if they were constants. Otherwise they should be -1 if
612 those flags were set. */
614 struct df_ref *
615 df_ref_create (rtx reg, rtx *loc, rtx insn,
616 basic_block bb,
617 enum df_ref_type ref_type,
618 enum df_ref_flags ref_flags,
619 int width, int offset, enum machine_mode mode)
621 struct df_ref *ref;
622 struct df_reg_info **reg_info;
623 struct df_ref_info *ref_info;
624 struct df_ref **ref_rec;
625 struct df_ref ***ref_rec_ptr;
626 unsigned int count = 0;
627 bool add_to_table;
629 df_grow_reg_info ();
631 /* You cannot hack artificial refs. */
632 gcc_assert (insn);
633 ref = df_ref_create_structure (NULL, reg, loc, bb, DF_INSN_INFO_GET (insn),
634 ref_type, ref_flags,
635 width, offset, mode);
637 if (DF_REF_TYPE (ref) == DF_REF_REG_DEF)
639 reg_info = df->def_regs;
640 ref_info = &df->def_info;
641 ref_rec_ptr = &DF_INSN_DEFS (insn);
642 add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
644 else if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
646 reg_info = df->eq_use_regs;
647 ref_info = &df->use_info;
648 ref_rec_ptr = &DF_INSN_EQ_USES (insn);
649 switch (ref_info->ref_order)
651 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
652 case DF_REF_ORDER_BY_REG_WITH_NOTES:
653 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
654 add_to_table = true;
655 break;
656 default:
657 add_to_table = false;
658 break;
661 else
663 reg_info = df->use_regs;
664 ref_info = &df->use_info;
665 ref_rec_ptr = &DF_INSN_USES (insn);
666 add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
669 /* Do not add if ref is not in the right blocks. */
670 if (add_to_table && df->analyze_subset)
671 add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
673 df_install_ref (ref, reg_info[DF_REF_REGNO (ref)], ref_info, add_to_table);
675 if (add_to_table)
676 switch (ref_info->ref_order)
678 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
679 case DF_REF_ORDER_BY_REG_WITH_NOTES:
680 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
681 ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
682 break;
683 default:
684 ref_info->ref_order = DF_REF_ORDER_UNORDERED;
685 break;
688 ref_rec = *ref_rec_ptr;
689 while (*ref_rec)
691 count++;
692 ref_rec++;
695 ref_rec = *ref_rec_ptr;
696 if (count)
698 ref_rec = XRESIZEVEC (struct df_ref *, ref_rec, count+2);
699 *ref_rec_ptr = ref_rec;
700 ref_rec[count] = ref;
701 ref_rec[count+1] = NULL;
702 qsort (ref_rec, count + 1, sizeof (struct df_ref *), df_ref_compare);
704 else
706 struct df_ref **ref_rec = XNEWVEC (struct df_ref*, 2);
707 ref_rec[0] = ref;
708 ref_rec[1] = NULL;
709 *ref_rec_ptr = ref_rec;
712 #if 0
713 if (dump_file)
715 fprintf (dump_file, "adding ref ");
716 df_ref_debug (ref, dump_file);
718 #endif
719 /* By adding the ref directly, df_insn_rescan my not find any
720 differences even though the block will have changed. So we need
721 to mark the block dirty ourselves. */
722 df_set_bb_dirty (bb);
724 return ref;
729 /*----------------------------------------------------------------------------
730 UTILITIES TO CREATE AND DESTROY REFS AND CHAINS.
731 ----------------------------------------------------------------------------*/
733 static void
734 df_free_ref (struct df_ref *ref)
736 struct df_scan_problem_data *problem_data
737 = (struct df_scan_problem_data *) df_scan->problem_data;
739 if (DF_REF_FLAGS_IS_SET (ref, DF_REF_SIGN_EXTRACT | DF_REF_ZERO_EXTRACT))
740 pool_free (problem_data->ref_extract_pool, (struct df_ref_extract *)ref);
741 else
742 pool_free (problem_data->ref_pool, ref);
746 /* Unlink and delete REF at the reg_use, reg_eq_use or reg_def chain.
747 Also delete the def-use or use-def chain if it exists. */
749 static void
750 df_reg_chain_unlink (struct df_ref *ref)
752 struct df_ref *next = DF_REF_NEXT_REG (ref);
753 struct df_ref *prev = DF_REF_PREV_REG (ref);
754 int id = DF_REF_ID (ref);
755 struct df_reg_info *reg_info;
756 struct df_ref **refs = NULL;
758 if (DF_REF_TYPE (ref) == DF_REF_REG_DEF)
760 reg_info = DF_REG_DEF_GET (DF_REF_REGNO (ref));
761 refs = df->def_info.refs;
763 else
765 if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
767 reg_info = DF_REG_EQ_USE_GET (DF_REF_REGNO (ref));
768 switch (df->use_info.ref_order)
770 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
771 case DF_REF_ORDER_BY_REG_WITH_NOTES:
772 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
773 refs = df->use_info.refs;
774 break;
775 default:
776 break;
779 else
781 reg_info = DF_REG_USE_GET (DF_REF_REGNO (ref));
782 refs = df->use_info.refs;
786 if (refs)
788 if (df->analyze_subset)
790 if (bitmap_bit_p (df->blocks_to_analyze, DF_REF_BB (ref)->index))
791 refs[id] = NULL;
793 else
794 refs[id] = NULL;
797 /* Delete any def-use or use-def chains that start here. It is
798 possible that there is trash in this field. This happens for
799 insns that have been deleted when rescanning has been deferred
800 and the chain problem has also been deleted. The chain tear down
801 code skips deleted insns. */
802 if (df_chain && DF_REF_CHAIN (ref))
803 df_chain_unlink (ref);
805 reg_info->n_refs--;
806 if (DF_REF_FLAGS_IS_SET (ref, DF_HARD_REG_LIVE))
808 gcc_assert (DF_REF_REGNO (ref) < FIRST_PSEUDO_REGISTER);
809 df->hard_regs_live_count[DF_REF_REGNO (ref)]--;
812 /* Unlink from the reg chain. If there is no prev, this is the
813 first of the list. If not, just join the next and prev. */
814 if (prev)
815 DF_REF_NEXT_REG (prev) = next;
816 else
818 gcc_assert (reg_info->reg_chain == ref);
819 reg_info->reg_chain = next;
821 if (next)
822 DF_REF_PREV_REG (next) = prev;
824 df_free_ref (ref);
828 /* Remove REF from VEC. */
830 static void
831 df_ref_compress_rec (struct df_ref ***vec_ptr, struct df_ref *ref)
833 struct df_ref **vec = *vec_ptr;
835 if (vec[1])
837 while (*vec && *vec != ref)
838 vec++;
840 while (*vec)
842 *vec = *(vec+1);
843 vec++;
846 else
848 free (vec);
849 *vec_ptr = df_null_ref_rec;
854 /* Unlink REF from all def-use/use-def chains, etc. */
856 void
857 df_ref_remove (struct df_ref *ref)
859 #if 0
860 if (dump_file)
862 fprintf (dump_file, "removing ref ");
863 df_ref_debug (ref, dump_file);
865 #endif
867 if (DF_REF_REG_DEF_P (ref))
869 if (DF_REF_IS_ARTIFICIAL (ref))
871 struct df_scan_bb_info *bb_info
872 = df_scan_get_bb_info (DF_REF_BB (ref)->index);
873 df_ref_compress_rec (&bb_info->artificial_defs, ref);
875 else
877 unsigned int uid = DF_REF_INSN_UID (ref);
878 struct df_insn_info *insn_rec = DF_INSN_UID_GET (uid);
879 df_ref_compress_rec (&insn_rec->defs, ref);
882 else
884 if (DF_REF_IS_ARTIFICIAL (ref))
886 struct df_scan_bb_info *bb_info
887 = df_scan_get_bb_info (DF_REF_BB (ref)->index);
888 df_ref_compress_rec (&bb_info->artificial_uses, ref);
890 else
892 unsigned int uid = DF_REF_INSN_UID (ref);
893 struct df_insn_info *insn_rec = DF_INSN_UID_GET (uid);
895 if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
896 df_ref_compress_rec (&insn_rec->eq_uses, ref);
897 else
898 df_ref_compress_rec (&insn_rec->uses, ref);
902 /* By deleting the ref directly, df_insn_rescan my not find any
903 differences even though the block will have changed. So we need
904 to mark the block dirty ourselves. */
905 df_set_bb_dirty (DF_REF_BB (ref));
906 df_reg_chain_unlink (ref);
910 /* Create the insn record for INSN. If there was one there, zero it
911 out. */
913 struct df_insn_info *
914 df_insn_create_insn_record (rtx insn)
916 struct df_scan_problem_data *problem_data
917 = (struct df_scan_problem_data *) df_scan->problem_data;
918 struct df_insn_info *insn_rec;
920 df_grow_insn_info ();
921 insn_rec = DF_INSN_INFO_GET (insn);
922 if (!insn_rec)
924 insn_rec = (struct df_insn_info *) pool_alloc (problem_data->insn_pool);
925 DF_INSN_INFO_SET (insn, insn_rec);
927 memset (insn_rec, 0, sizeof (struct df_insn_info));
928 insn_rec->insn = insn;
929 return insn_rec;
933 /* Delete all du chain (DF_REF_CHAIN()) of all refs in the ref chain. */
935 static void
936 df_ref_chain_delete_du_chain (struct df_ref **ref_rec)
938 while (*ref_rec)
940 struct df_ref *ref = *ref_rec;
941 /* CHAIN is allocated by DF_CHAIN. So make sure to
942 pass df_scan instance for the problem. */
943 if (DF_REF_CHAIN (ref))
944 df_chain_unlink (ref);
945 ref_rec++;
950 /* Delete all refs in the ref chain. */
952 static void
953 df_ref_chain_delete (struct df_ref **ref_rec)
955 struct df_ref **start = ref_rec;
956 while (*ref_rec)
958 df_reg_chain_unlink (*ref_rec);
959 ref_rec++;
962 /* If the list is empty, it has a special shared element that is not
963 to be deleted. */
964 if (*start)
965 free (start);
969 /* Delete the hardreg chain. */
971 static void
972 df_mw_hardreg_chain_delete (struct df_mw_hardreg **hardregs)
974 struct df_scan_problem_data *problem_data;
976 if (!hardregs)
977 return;
979 problem_data = (struct df_scan_problem_data *) df_scan->problem_data;
981 while (*hardregs)
983 pool_free (problem_data->mw_reg_pool, *hardregs);
984 hardregs++;
989 /* Delete all of the refs information from INSN. BB must be passed in
990 except when called from df_process_deferred_rescans to mark the block
991 as dirty. */
993 void
994 df_insn_delete (basic_block bb, unsigned int uid)
996 struct df_insn_info *insn_info = NULL;
997 if (!df)
998 return;
1000 df_grow_bb_info (df_scan);
1001 df_grow_reg_info ();
1003 /* The block must be marked as dirty now, rather than later as in
1004 df_insn_rescan and df_notes_rescan because it may not be there at
1005 rescanning time and the mark would blow up. */
1006 if (bb)
1007 df_set_bb_dirty (bb);
1009 insn_info = DF_INSN_UID_SAFE_GET (uid);
1011 /* The client has deferred rescanning. */
1012 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1014 if (insn_info)
1016 bitmap_clear_bit (df->insns_to_rescan, uid);
1017 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1018 bitmap_set_bit (df->insns_to_delete, uid);
1020 if (dump_file)
1021 fprintf (dump_file, "deferring deletion of insn with uid = %d.\n", uid);
1022 return;
1025 if (dump_file)
1026 fprintf (dump_file, "deleting insn with uid = %d.\n", uid);
1028 bitmap_clear_bit (df->insns_to_delete, uid);
1029 bitmap_clear_bit (df->insns_to_rescan, uid);
1030 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1031 if (insn_info)
1033 struct df_scan_problem_data *problem_data
1034 = (struct df_scan_problem_data *) df_scan->problem_data;
1036 /* In general, notes do not have the insn_info fields
1037 initialized. However, combine deletes insns by changing them
1038 to notes. How clever. So we cannot just check if it is a
1039 valid insn before short circuiting this code, we need to see
1040 if we actually initialized it. */
1041 if (insn_info->defs)
1043 df_mw_hardreg_chain_delete (insn_info->mw_hardregs);
1045 if (df_chain)
1047 df_ref_chain_delete_du_chain (insn_info->defs);
1048 df_ref_chain_delete_du_chain (insn_info->uses);
1049 df_ref_chain_delete_du_chain (insn_info->eq_uses);
1052 df_ref_chain_delete (insn_info->defs);
1053 df_ref_chain_delete (insn_info->uses);
1054 df_ref_chain_delete (insn_info->eq_uses);
1056 pool_free (problem_data->insn_pool, insn_info);
1057 DF_INSN_UID_SET (uid, NULL);
1062 /* Free all of the refs and the mw_hardregs in COLLECTION_REC. */
1064 static void
1065 df_free_collection_rec (struct df_collection_rec *collection_rec)
1067 struct df_scan_problem_data *problem_data
1068 = (struct df_scan_problem_data *) df_scan->problem_data;
1069 struct df_ref **ref;
1070 struct df_mw_hardreg **mw;
1072 if (collection_rec->def_vec)
1073 for (ref = collection_rec->def_vec; *ref; ref++)
1074 df_free_ref (*ref);
1075 if (collection_rec->use_vec)
1076 for (ref = collection_rec->use_vec; *ref; ref++)
1077 df_free_ref (*ref);
1078 if (collection_rec->eq_use_vec)
1079 for (ref = collection_rec->eq_use_vec; *ref; ref++)
1080 df_free_ref (*ref);
1081 if (collection_rec->mw_vec)
1082 for (mw = collection_rec->mw_vec; *mw; mw++)
1083 pool_free (problem_data->mw_reg_pool, *mw);
1087 /* Rescan INSN. Return TRUE if the rescanning produced any changes. */
1089 bool
1090 df_insn_rescan (rtx insn)
1092 unsigned int uid = INSN_UID (insn);
1093 struct df_insn_info *insn_info = NULL;
1094 basic_block bb = BLOCK_FOR_INSN (insn);
1095 struct df_collection_rec collection_rec;
1096 collection_rec.def_vec = XALLOCAVEC (struct df_ref *, 1000);
1097 collection_rec.use_vec = XALLOCAVEC (struct df_ref *, 1000);
1098 collection_rec.eq_use_vec = XALLOCAVEC (struct df_ref *, 1000);
1099 collection_rec.mw_vec = XALLOCAVEC (struct df_mw_hardreg *, 100);
1101 if ((!df) || (!INSN_P (insn)))
1102 return false;
1104 if (!bb)
1106 if (dump_file)
1107 fprintf (dump_file, "no bb for insn with uid = %d.\n", uid);
1108 return false;
1111 /* The client has disabled rescanning and plans to do it itself. */
1112 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1113 return false;
1115 df_grow_bb_info (df_scan);
1116 df_grow_reg_info ();
1118 insn_info = DF_INSN_UID_SAFE_GET (uid);
1120 /* The client has deferred rescanning. */
1121 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1123 if (!insn_info)
1125 insn_info = df_insn_create_insn_record (insn);
1126 insn_info->defs = df_null_ref_rec;
1127 insn_info->uses = df_null_ref_rec;
1128 insn_info->eq_uses = df_null_ref_rec;
1129 insn_info->mw_hardregs = df_null_mw_rec;
1131 if (dump_file)
1132 fprintf (dump_file, "deferring rescan insn with uid = %d.\n", uid);
1134 bitmap_clear_bit (df->insns_to_delete, uid);
1135 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1136 bitmap_set_bit (df->insns_to_rescan, INSN_UID (insn));
1137 return false;
1140 bitmap_clear_bit (df->insns_to_delete, uid);
1141 bitmap_clear_bit (df->insns_to_rescan, uid);
1142 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1143 if (insn_info)
1145 bool the_same = df_insn_refs_verify (&collection_rec, bb, insn, false);
1146 /* If there's no change, return false. */
1147 if (the_same)
1149 df_free_collection_rec (&collection_rec);
1150 if (dump_file)
1151 fprintf (dump_file, "verify found no changes in insn with uid = %d.\n", uid);
1152 return false;
1154 if (dump_file)
1155 fprintf (dump_file, "rescanning insn with uid = %d.\n", uid);
1157 /* There's change - we need to delete the existing info. */
1158 df_insn_delete (NULL, uid);
1159 df_insn_create_insn_record (insn);
1161 else
1163 struct df_insn_info *insn_info = df_insn_create_insn_record (insn);
1164 df_insn_refs_collect (&collection_rec, bb, insn_info);
1165 if (dump_file)
1166 fprintf (dump_file, "scanning new insn with uid = %d.\n", uid);
1169 df_refs_add_to_chains (&collection_rec, bb, insn);
1170 df_set_bb_dirty (bb);
1171 return true;
1175 /* Rescan all of the insns in the function. Note that the artificial
1176 uses and defs are not touched. This function will destroy def-se
1177 or use-def chains. */
1179 void
1180 df_insn_rescan_all (void)
1182 bool no_insn_rescan = false;
1183 bool defer_insn_rescan = false;
1184 basic_block bb;
1185 bitmap_iterator bi;
1186 unsigned int uid;
1187 bitmap tmp = BITMAP_ALLOC (&df_bitmap_obstack);
1189 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1191 df_clear_flags (DF_NO_INSN_RESCAN);
1192 no_insn_rescan = true;
1195 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1197 df_clear_flags (DF_DEFER_INSN_RESCAN);
1198 defer_insn_rescan = true;
1201 bitmap_copy (tmp, df->insns_to_delete);
1202 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1204 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1205 if (insn_info)
1206 df_insn_delete (NULL, uid);
1209 BITMAP_FREE (tmp);
1210 bitmap_clear (df->insns_to_delete);
1211 bitmap_clear (df->insns_to_rescan);
1212 bitmap_clear (df->insns_to_notes_rescan);
1214 FOR_EACH_BB (bb)
1216 rtx insn;
1217 FOR_BB_INSNS (bb, insn)
1219 df_insn_rescan (insn);
1223 if (no_insn_rescan)
1224 df_set_flags (DF_NO_INSN_RESCAN);
1225 if (defer_insn_rescan)
1226 df_set_flags (DF_DEFER_INSN_RESCAN);
1230 /* Process all of the deferred rescans or deletions. */
1232 void
1233 df_process_deferred_rescans (void)
1235 bool no_insn_rescan = false;
1236 bool defer_insn_rescan = false;
1237 bitmap_iterator bi;
1238 unsigned int uid;
1239 bitmap tmp = BITMAP_ALLOC (&df_bitmap_obstack);
1241 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1243 df_clear_flags (DF_NO_INSN_RESCAN);
1244 no_insn_rescan = true;
1247 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1249 df_clear_flags (DF_DEFER_INSN_RESCAN);
1250 defer_insn_rescan = true;
1253 if (dump_file)
1254 fprintf (dump_file, "starting the processing of deferred insns\n");
1256 bitmap_copy (tmp, df->insns_to_delete);
1257 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1259 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1260 if (insn_info)
1261 df_insn_delete (NULL, uid);
1264 bitmap_copy (tmp, df->insns_to_rescan);
1265 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1267 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1268 if (insn_info)
1269 df_insn_rescan (insn_info->insn);
1272 bitmap_copy (tmp, df->insns_to_notes_rescan);
1273 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1275 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1276 if (insn_info)
1277 df_notes_rescan (insn_info->insn);
1280 if (dump_file)
1281 fprintf (dump_file, "ending the processing of deferred insns\n");
1283 BITMAP_FREE (tmp);
1284 bitmap_clear (df->insns_to_delete);
1285 bitmap_clear (df->insns_to_rescan);
1286 bitmap_clear (df->insns_to_notes_rescan);
1288 if (no_insn_rescan)
1289 df_set_flags (DF_NO_INSN_RESCAN);
1290 if (defer_insn_rescan)
1291 df_set_flags (DF_DEFER_INSN_RESCAN);
1293 /* If someone changed regs_ever_live during this pass, fix up the
1294 entry and exit blocks. */
1295 if (df->redo_entry_and_exit)
1297 df_update_entry_exit_and_calls ();
1298 df->redo_entry_and_exit = false;
1303 /* Count the number of refs. Include the defs if INCLUDE_DEFS. Include
1304 the uses if INCLUDE_USES. Include the eq_uses if
1305 INCLUDE_EQ_USES. */
1307 static unsigned int
1308 df_count_refs (bool include_defs, bool include_uses,
1309 bool include_eq_uses)
1311 unsigned int regno;
1312 int size = 0;
1313 unsigned int m = df->regs_inited;
1315 for (regno = 0; regno < m; regno++)
1317 if (include_defs)
1318 size += DF_REG_DEF_COUNT (regno);
1319 if (include_uses)
1320 size += DF_REG_USE_COUNT (regno);
1321 if (include_eq_uses)
1322 size += DF_REG_EQ_USE_COUNT (regno);
1324 return size;
1328 /* Take build ref table for either the uses or defs from the reg-use
1329 or reg-def chains. This version processes the refs in reg order
1330 which is likely to be best if processing the whole function. */
1332 static void
1333 df_reorganize_refs_by_reg_by_reg (struct df_ref_info *ref_info,
1334 bool include_defs,
1335 bool include_uses,
1336 bool include_eq_uses)
1338 unsigned int m = df->regs_inited;
1339 unsigned int regno;
1340 unsigned int offset = 0;
1341 unsigned int start;
1343 if (df->changeable_flags & DF_NO_HARD_REGS)
1345 start = FIRST_PSEUDO_REGISTER;
1346 memset (ref_info->begin, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
1347 memset (ref_info->count, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
1349 else
1350 start = 0;
1352 ref_info->total_size
1353 = df_count_refs (include_defs, include_uses, include_eq_uses);
1355 df_check_and_grow_ref_info (ref_info, 1);
1357 for (regno = start; regno < m; regno++)
1359 int count = 0;
1360 ref_info->begin[regno] = offset;
1361 if (include_defs)
1363 struct df_ref *ref = DF_REG_DEF_CHAIN (regno);
1364 while (ref)
1366 ref_info->refs[offset] = ref;
1367 DF_REF_ID (ref) = offset++;
1368 count++;
1369 ref = DF_REF_NEXT_REG (ref);
1370 gcc_assert (offset < ref_info->refs_size);
1373 if (include_uses)
1375 struct df_ref *ref = DF_REG_USE_CHAIN (regno);
1376 while (ref)
1378 ref_info->refs[offset] = ref;
1379 DF_REF_ID (ref) = offset++;
1380 count++;
1381 ref = DF_REF_NEXT_REG (ref);
1382 gcc_assert (offset < ref_info->refs_size);
1385 if (include_eq_uses)
1387 struct df_ref *ref = DF_REG_EQ_USE_CHAIN (regno);
1388 while (ref)
1390 ref_info->refs[offset] = ref;
1391 DF_REF_ID (ref) = offset++;
1392 count++;
1393 ref = DF_REF_NEXT_REG (ref);
1394 gcc_assert (offset < ref_info->refs_size);
1397 ref_info->count[regno] = count;
1400 /* The bitmap size is not decremented when refs are deleted. So
1401 reset it now that we have squished out all of the empty
1402 slots. */
1403 ref_info->table_size = offset;
1407 /* Take build ref table for either the uses or defs from the reg-use
1408 or reg-def chains. This version processes the refs in insn order
1409 which is likely to be best if processing some segment of the
1410 function. */
1412 static void
1413 df_reorganize_refs_by_reg_by_insn (struct df_ref_info *ref_info,
1414 bool include_defs,
1415 bool include_uses,
1416 bool include_eq_uses)
1418 bitmap_iterator bi;
1419 unsigned int bb_index;
1420 unsigned int m = df->regs_inited;
1421 unsigned int offset = 0;
1422 unsigned int r;
1423 unsigned int start
1424 = (df->changeable_flags & DF_NO_HARD_REGS) ? FIRST_PSEUDO_REGISTER : 0;
1426 memset (ref_info->begin, 0, sizeof (int) * df->regs_inited);
1427 memset (ref_info->count, 0, sizeof (int) * df->regs_inited);
1429 ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1430 df_check_and_grow_ref_info (ref_info, 1);
1432 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
1434 basic_block bb = BASIC_BLOCK (bb_index);
1435 rtx insn;
1436 struct df_ref **ref_rec;
1438 if (include_defs)
1439 for (ref_rec = df_get_artificial_defs (bb_index); *ref_rec; ref_rec++)
1441 unsigned int regno = DF_REF_REGNO (*ref_rec);
1442 ref_info->count[regno]++;
1444 if (include_uses)
1445 for (ref_rec = df_get_artificial_uses (bb_index); *ref_rec; ref_rec++)
1447 unsigned int regno = DF_REF_REGNO (*ref_rec);
1448 ref_info->count[regno]++;
1451 FOR_BB_INSNS (bb, insn)
1453 if (INSN_P (insn))
1455 unsigned int uid = INSN_UID (insn);
1457 if (include_defs)
1458 for (ref_rec = DF_INSN_UID_DEFS (uid); *ref_rec; ref_rec++)
1460 unsigned int regno = DF_REF_REGNO (*ref_rec);
1461 ref_info->count[regno]++;
1463 if (include_uses)
1464 for (ref_rec = DF_INSN_UID_USES (uid); *ref_rec; ref_rec++)
1466 unsigned int regno = DF_REF_REGNO (*ref_rec);
1467 ref_info->count[regno]++;
1469 if (include_eq_uses)
1470 for (ref_rec = DF_INSN_UID_EQ_USES (uid); *ref_rec; ref_rec++)
1472 unsigned int regno = DF_REF_REGNO (*ref_rec);
1473 ref_info->count[regno]++;
1479 for (r = start; r < m; r++)
1481 ref_info->begin[r] = offset;
1482 offset += ref_info->count[r];
1483 ref_info->count[r] = 0;
1486 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
1488 basic_block bb = BASIC_BLOCK (bb_index);
1489 rtx insn;
1490 struct df_ref **ref_rec;
1492 if (include_defs)
1493 for (ref_rec = df_get_artificial_defs (bb_index); *ref_rec; ref_rec++)
1495 struct df_ref *ref = *ref_rec;
1496 unsigned int regno = DF_REF_REGNO (ref);
1497 if (regno >= start)
1499 unsigned int id
1500 = ref_info->begin[regno] + ref_info->count[regno]++;
1501 DF_REF_ID (ref) = id;
1502 ref_info->refs[id] = ref;
1505 if (include_uses)
1506 for (ref_rec = df_get_artificial_uses (bb_index); *ref_rec; ref_rec++)
1508 struct df_ref *ref = *ref_rec;
1509 unsigned int regno = DF_REF_REGNO (ref);
1510 if (regno >= start)
1512 unsigned int id
1513 = ref_info->begin[regno] + ref_info->count[regno]++;
1514 DF_REF_ID (ref) = id;
1515 ref_info->refs[id] = ref;
1519 FOR_BB_INSNS (bb, insn)
1521 if (INSN_P (insn))
1523 unsigned int uid = INSN_UID (insn);
1525 if (include_defs)
1526 for (ref_rec = DF_INSN_UID_DEFS (uid); *ref_rec; ref_rec++)
1528 struct df_ref *ref = *ref_rec;
1529 unsigned int regno = DF_REF_REGNO (ref);
1530 if (regno >= start)
1532 unsigned int id
1533 = ref_info->begin[regno] + ref_info->count[regno]++;
1534 DF_REF_ID (ref) = id;
1535 ref_info->refs[id] = ref;
1538 if (include_uses)
1539 for (ref_rec = DF_INSN_UID_USES (uid); *ref_rec; ref_rec++)
1541 struct df_ref *ref = *ref_rec;
1542 unsigned int regno = DF_REF_REGNO (ref);
1543 if (regno >= start)
1545 unsigned int id
1546 = ref_info->begin[regno] + ref_info->count[regno]++;
1547 DF_REF_ID (ref) = id;
1548 ref_info->refs[id] = ref;
1551 if (include_eq_uses)
1552 for (ref_rec = DF_INSN_UID_EQ_USES (uid); *ref_rec; ref_rec++)
1554 struct df_ref *ref = *ref_rec;
1555 unsigned int regno = DF_REF_REGNO (ref);
1556 if (regno >= start)
1558 unsigned int id
1559 = ref_info->begin[regno] + ref_info->count[regno]++;
1560 DF_REF_ID (ref) = id;
1561 ref_info->refs[id] = ref;
1568 /* The bitmap size is not decremented when refs are deleted. So
1569 reset it now that we have squished out all of the empty
1570 slots. */
1572 ref_info->table_size = offset;
1575 /* Take build ref table for either the uses or defs from the reg-use
1576 or reg-def chains. */
1578 static void
1579 df_reorganize_refs_by_reg (struct df_ref_info *ref_info,
1580 bool include_defs,
1581 bool include_uses,
1582 bool include_eq_uses)
1584 if (df->analyze_subset)
1585 df_reorganize_refs_by_reg_by_insn (ref_info, include_defs,
1586 include_uses, include_eq_uses);
1587 else
1588 df_reorganize_refs_by_reg_by_reg (ref_info, include_defs,
1589 include_uses, include_eq_uses);
1593 /* Add the refs in REF_VEC to the table in REF_INFO starting at OFFSET. */
1594 static unsigned int
1595 df_add_refs_to_table (unsigned int offset,
1596 struct df_ref_info *ref_info,
1597 struct df_ref **ref_vec)
1599 while (*ref_vec)
1601 struct df_ref *ref = *ref_vec;
1602 if ((!(df->changeable_flags & DF_NO_HARD_REGS))
1603 || (DF_REF_REGNO (ref) >= FIRST_PSEUDO_REGISTER))
1605 ref_info->refs[offset] = ref;
1606 DF_REF_ID (*ref_vec) = offset++;
1608 ref_vec++;
1610 return offset;
1614 /* Count the number of refs in all of the insns of BB. Include the
1615 defs if INCLUDE_DEFS. Include the uses if INCLUDE_USES. Include the
1616 eq_uses if INCLUDE_EQ_USES. */
1618 static unsigned int
1619 df_reorganize_refs_by_insn_bb (basic_block bb, unsigned int offset,
1620 struct df_ref_info *ref_info,
1621 bool include_defs, bool include_uses,
1622 bool include_eq_uses)
1624 rtx insn;
1626 if (include_defs)
1627 offset = df_add_refs_to_table (offset, ref_info,
1628 df_get_artificial_defs (bb->index));
1629 if (include_uses)
1630 offset = df_add_refs_to_table (offset, ref_info,
1631 df_get_artificial_uses (bb->index));
1633 FOR_BB_INSNS (bb, insn)
1634 if (INSN_P (insn))
1636 unsigned int uid = INSN_UID (insn);
1637 if (include_defs)
1638 offset = df_add_refs_to_table (offset, ref_info,
1639 DF_INSN_UID_DEFS (uid));
1640 if (include_uses)
1641 offset = df_add_refs_to_table (offset, ref_info,
1642 DF_INSN_UID_USES (uid));
1643 if (include_eq_uses)
1644 offset = df_add_refs_to_table (offset, ref_info,
1645 DF_INSN_UID_EQ_USES (uid));
1647 return offset;
1651 /* Organize the refs by insn into the table in REF_INFO. If
1652 blocks_to_analyze is defined, use that set, otherwise the entire
1653 program. Include the defs if INCLUDE_DEFS. Include the uses if
1654 INCLUDE_USES. Include the eq_uses if INCLUDE_EQ_USES. */
1656 static void
1657 df_reorganize_refs_by_insn (struct df_ref_info *ref_info,
1658 bool include_defs, bool include_uses,
1659 bool include_eq_uses)
1661 basic_block bb;
1662 unsigned int offset = 0;
1664 ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1665 df_check_and_grow_ref_info (ref_info, 1);
1666 if (df->blocks_to_analyze)
1668 bitmap_iterator bi;
1669 unsigned int index;
1671 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, index, bi)
1673 offset = df_reorganize_refs_by_insn_bb (BASIC_BLOCK (index), offset, ref_info,
1674 include_defs, include_uses,
1675 include_eq_uses);
1678 ref_info->table_size = offset;
1680 else
1682 FOR_ALL_BB (bb)
1683 offset = df_reorganize_refs_by_insn_bb (bb, offset, ref_info,
1684 include_defs, include_uses,
1685 include_eq_uses);
1686 ref_info->table_size = offset;
1691 /* If the use refs in DF are not organized, reorganize them. */
1693 void
1694 df_maybe_reorganize_use_refs (enum df_ref_order order)
1696 if (order == df->use_info.ref_order)
1697 return;
1699 switch (order)
1701 case DF_REF_ORDER_BY_REG:
1702 df_reorganize_refs_by_reg (&df->use_info, false, true, false);
1703 break;
1705 case DF_REF_ORDER_BY_REG_WITH_NOTES:
1706 df_reorganize_refs_by_reg (&df->use_info, false, true, true);
1707 break;
1709 case DF_REF_ORDER_BY_INSN:
1710 df_reorganize_refs_by_insn (&df->use_info, false, true, false);
1711 break;
1713 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1714 df_reorganize_refs_by_insn (&df->use_info, false, true, true);
1715 break;
1717 case DF_REF_ORDER_NO_TABLE:
1718 free (df->use_info.refs);
1719 df->use_info.refs = NULL;
1720 df->use_info.refs_size = 0;
1721 break;
1723 case DF_REF_ORDER_UNORDERED:
1724 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1725 gcc_unreachable ();
1726 break;
1729 df->use_info.ref_order = order;
1733 /* If the def refs in DF are not organized, reorganize them. */
1735 void
1736 df_maybe_reorganize_def_refs (enum df_ref_order order)
1738 if (order == df->def_info.ref_order)
1739 return;
1741 switch (order)
1743 case DF_REF_ORDER_BY_REG:
1744 df_reorganize_refs_by_reg (&df->def_info, true, false, false);
1745 break;
1747 case DF_REF_ORDER_BY_INSN:
1748 df_reorganize_refs_by_insn (&df->def_info, true, false, false);
1749 break;
1751 case DF_REF_ORDER_NO_TABLE:
1752 free (df->def_info.refs);
1753 df->def_info.refs = NULL;
1754 df->def_info.refs_size = 0;
1755 break;
1757 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1758 case DF_REF_ORDER_BY_REG_WITH_NOTES:
1759 case DF_REF_ORDER_UNORDERED:
1760 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1761 gcc_unreachable ();
1762 break;
1765 df->def_info.ref_order = order;
1769 /* Change the BB of all refs in the ref chain from OLD_BB to NEW_BB.
1770 Assumes that all refs in the chain have the same BB. */
1772 static void
1773 df_ref_chain_change_bb (struct df_ref **ref_rec,
1774 basic_block old_bb,
1775 basic_block new_bb)
1777 while (*ref_rec)
1779 struct df_ref *ref = *ref_rec;
1781 gcc_assert (DF_REF_BB (ref) == old_bb);
1782 DF_REF_BB (ref) = new_bb;
1783 ref_rec++;
1788 /* Change all of the basic block references in INSN to use the insn's
1789 current basic block. This function is called from routines that move
1790 instructions from one block to another. */
1792 void
1793 df_insn_change_bb (rtx insn, basic_block new_bb)
1795 basic_block old_bb = BLOCK_FOR_INSN (insn);
1796 struct df_insn_info *insn_info;
1797 unsigned int uid = INSN_UID (insn);
1799 if (old_bb == new_bb)
1800 return;
1802 set_block_for_insn (insn, new_bb);
1804 if (!df)
1805 return;
1807 if (dump_file)
1808 fprintf (dump_file, "changing bb of uid %d\n", uid);
1810 insn_info = DF_INSN_UID_SAFE_GET (uid);
1811 if (insn_info == NULL)
1813 if (dump_file)
1814 fprintf (dump_file, " unscanned insn\n");
1815 df_insn_rescan (insn);
1816 return;
1819 if (!INSN_P (insn))
1820 return;
1822 df_ref_chain_change_bb (insn_info->defs, old_bb, new_bb);
1823 df_ref_chain_change_bb (insn_info->uses, old_bb, new_bb);
1824 df_ref_chain_change_bb (insn_info->eq_uses, old_bb, new_bb);
1826 df_set_bb_dirty (new_bb);
1827 if (old_bb)
1829 if (dump_file)
1830 fprintf (dump_file, " from %d to %d\n",
1831 old_bb->index, new_bb->index);
1832 df_set_bb_dirty (old_bb);
1834 else
1835 if (dump_file)
1836 fprintf (dump_file, " to %d\n", new_bb->index);
1840 /* Helper function for df_ref_change_reg_with_loc. */
1842 static void
1843 df_ref_change_reg_with_loc_1 (struct df_reg_info *old, struct df_reg_info *new,
1844 int new_regno, rtx loc)
1846 struct df_ref *the_ref = old->reg_chain;
1848 while (the_ref)
1850 if (DF_REF_LOC(the_ref) && (*DF_REF_LOC(the_ref) == loc))
1852 struct df_ref *next_ref = the_ref->next_reg;
1853 struct df_ref *prev_ref = the_ref->prev_reg;
1854 struct df_ref **ref_vec, **ref_vec_t;
1855 unsigned int count = 0;
1857 DF_REF_REGNO (the_ref) = new_regno;
1858 DF_REF_REG (the_ref) = regno_reg_rtx[new_regno];
1860 /* Pull the_ref out of the old regno chain. */
1861 if (prev_ref)
1862 prev_ref->next_reg = next_ref;
1863 else
1864 old->reg_chain = next_ref;
1865 if (next_ref)
1866 next_ref->prev_reg = prev_ref;
1867 old->n_refs--;
1869 /* Put the ref into the new regno chain. */
1870 the_ref->prev_reg = NULL;
1871 the_ref->next_reg = new->reg_chain;
1872 if (new->reg_chain)
1873 new->reg_chain->prev_reg = the_ref;
1874 new->reg_chain = the_ref;
1875 new->n_refs++;
1876 df_set_bb_dirty (DF_REF_BB (the_ref));
1878 /* Need to resort the record that the ref was in because the
1879 regno is a sorting key. First, find the right record. */
1880 if (DF_REF_IS_ARTIFICIAL (the_ref))
1882 unsigned int bb_index = DF_REF_BB (the_ref)->index;
1883 if (DF_REF_REG_DEF_P (the_ref))
1884 ref_vec = df_get_artificial_defs (bb_index);
1885 else
1886 ref_vec = df_get_artificial_uses (bb_index);
1888 else
1890 struct df_insn_info *insn_info = DF_REF_INSN_INFO (the_ref);
1891 if (DF_REF_FLAGS (the_ref) & DF_REF_IN_NOTE)
1892 ref_vec = insn_info->eq_uses;
1893 else
1894 ref_vec = insn_info->uses;
1895 if (dump_file)
1896 fprintf (dump_file, "changing reg in insn %d\n",
1897 INSN_UID (DF_REF_INSN (the_ref)));
1899 ref_vec_t = ref_vec;
1901 /* Find the length. */
1902 while (*ref_vec_t)
1904 count++;
1905 ref_vec_t++;
1907 qsort (ref_vec, count, sizeof (struct df_ref *), df_ref_compare);
1909 the_ref = next_ref;
1911 else
1912 the_ref = the_ref->next_reg;
1917 /* Change the regno of all refs that contained LOC from OLD_REGNO to
1918 NEW_REGNO. Refs that do not match LOC are not changed. This call
1919 is to support the SET_REGNO macro. */
1921 void
1922 df_ref_change_reg_with_loc (int old_regno, int new_regno, rtx loc)
1924 if ((!df) || (old_regno == -1) || (old_regno == new_regno))
1925 return;
1927 df_grow_reg_info ();
1929 df_ref_change_reg_with_loc_1 (DF_REG_DEF_GET (old_regno),
1930 DF_REG_DEF_GET (new_regno), new_regno, loc);
1931 df_ref_change_reg_with_loc_1 (DF_REG_USE_GET (old_regno),
1932 DF_REG_USE_GET (new_regno), new_regno, loc);
1933 df_ref_change_reg_with_loc_1 (DF_REG_EQ_USE_GET (old_regno),
1934 DF_REG_EQ_USE_GET (new_regno), new_regno, loc);
1938 /* Delete the mw_hardregs that point into the eq_notes. */
1940 static unsigned int
1941 df_mw_hardreg_chain_delete_eq_uses (struct df_insn_info *insn_info)
1943 struct df_mw_hardreg **mw_vec = insn_info->mw_hardregs;
1944 unsigned int deleted = 0;
1945 unsigned int count = 0;
1946 struct df_scan_problem_data *problem_data
1947 = (struct df_scan_problem_data *) df_scan->problem_data;
1949 if (!*mw_vec)
1950 return 0;
1952 while (*mw_vec)
1954 if ((*mw_vec)->flags & DF_REF_IN_NOTE)
1956 struct df_mw_hardreg **temp_vec = mw_vec;
1958 pool_free (problem_data->mw_reg_pool, *mw_vec);
1959 temp_vec = mw_vec;
1960 /* Shove the remaining ones down one to fill the gap. While
1961 this looks n**2, it is highly unusual to have any mw regs
1962 in eq_notes and the chances of more than one are almost
1963 non existent. */
1964 while (*temp_vec)
1966 *temp_vec = *(temp_vec + 1);
1967 temp_vec++;
1969 deleted++;
1971 else
1973 mw_vec++;
1974 count++;
1978 if (count == 0)
1980 free (insn_info->mw_hardregs);
1981 insn_info->mw_hardregs = df_null_mw_rec;
1982 return 0;
1984 return deleted;
1988 /* Rescan only the REG_EQUIV/REG_EQUAL notes part of INSN. */
1990 void
1991 df_notes_rescan (rtx insn)
1993 struct df_insn_info *insn_info;
1994 unsigned int uid = INSN_UID (insn);
1996 if (!df)
1997 return;
1999 /* The client has disabled rescanning and plans to do it itself. */
2000 if (df->changeable_flags & DF_NO_INSN_RESCAN)
2001 return;
2003 /* Do nothing if the insn hasn't been emitted yet. */
2004 if (!BLOCK_FOR_INSN (insn))
2005 return;
2007 df_grow_bb_info (df_scan);
2008 df_grow_reg_info ();
2010 insn_info = DF_INSN_UID_SAFE_GET (INSN_UID(insn));
2012 /* The client has deferred rescanning. */
2013 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
2015 if (!insn_info)
2017 insn_info = df_insn_create_insn_record (insn);
2018 insn_info->defs = df_null_ref_rec;
2019 insn_info->uses = df_null_ref_rec;
2020 insn_info->eq_uses = df_null_ref_rec;
2021 insn_info->mw_hardregs = df_null_mw_rec;
2024 bitmap_clear_bit (df->insns_to_delete, uid);
2025 /* If the insn is set to be rescanned, it does not need to also
2026 be notes rescanned. */
2027 if (!bitmap_bit_p (df->insns_to_rescan, uid))
2028 bitmap_set_bit (df->insns_to_notes_rescan, INSN_UID (insn));
2029 return;
2032 bitmap_clear_bit (df->insns_to_delete, uid);
2033 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
2035 if (insn_info)
2037 basic_block bb = BLOCK_FOR_INSN (insn);
2038 rtx note;
2039 struct df_collection_rec collection_rec;
2040 unsigned int num_deleted;
2042 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
2043 collection_rec.eq_use_vec = XALLOCAVEC (struct df_ref *, 1000);
2044 collection_rec.mw_vec = XALLOCAVEC (struct df_mw_hardreg *, 1000);
2046 num_deleted = df_mw_hardreg_chain_delete_eq_uses (insn_info);
2047 df_ref_chain_delete (insn_info->eq_uses);
2048 insn_info->eq_uses = NULL;
2050 /* Process REG_EQUIV/REG_EQUAL notes */
2051 for (note = REG_NOTES (insn); note;
2052 note = XEXP (note, 1))
2054 switch (REG_NOTE_KIND (note))
2056 case REG_EQUIV:
2057 case REG_EQUAL:
2058 df_uses_record (&collection_rec,
2059 &XEXP (note, 0), DF_REF_REG_USE,
2060 bb, insn_info, DF_REF_IN_NOTE, -1, -1, 0);
2061 default:
2062 break;
2066 /* Find some place to put any new mw_hardregs. */
2067 df_canonize_collection_rec (&collection_rec);
2068 if (collection_rec.next_mw)
2070 unsigned int count = 0;
2071 struct df_mw_hardreg **mw_rec = insn_info->mw_hardregs;
2072 while (*mw_rec)
2074 count++;
2075 mw_rec++;
2078 if (count)
2080 /* Append to the end of the existing record after
2081 expanding it if necessary. */
2082 if (collection_rec.next_mw > num_deleted)
2084 insn_info->mw_hardregs =
2085 xrealloc (insn_info->mw_hardregs,
2086 (count + 1 + collection_rec.next_mw)
2087 * sizeof (struct df_ref*));
2089 memcpy (&insn_info->mw_hardregs[count], collection_rec.mw_vec,
2090 (collection_rec.next_mw + 1) * sizeof (struct df_mw_hardreg *));
2091 qsort (insn_info->mw_hardregs, count + collection_rec.next_mw,
2092 sizeof (struct df_mw_hardreg *), df_mw_compare);
2094 else
2096 /* No vector there. */
2097 insn_info->mw_hardregs
2098 = XNEWVEC (struct df_mw_hardreg*,
2099 count + 1 + collection_rec.next_mw);
2100 memcpy (insn_info->mw_hardregs, collection_rec.mw_vec,
2101 (collection_rec.next_mw + 1) * sizeof (struct df_mw_hardreg *));
2104 /* Get rid of the mw_rec so that df_refs_add_to_chains will
2105 ignore it. */
2106 collection_rec.mw_vec = NULL;
2107 collection_rec.next_mw = 0;
2108 df_refs_add_to_chains (&collection_rec, bb, insn);
2110 else
2111 df_insn_rescan (insn);
2116 /*----------------------------------------------------------------------------
2117 Hard core instruction scanning code. No external interfaces here,
2118 just a lot of routines that look inside insns.
2119 ----------------------------------------------------------------------------*/
2122 /* Return true if the contents of two df_ref's are identical.
2123 It ignores DF_REF_MARKER. */
2125 static bool
2126 df_ref_equal_p (struct df_ref *ref1, struct df_ref *ref2)
2128 if (!ref2)
2129 return false;
2131 /* The two flag tests here are only to make sure we do not look at
2132 the offset and width if they are not there. The flags are
2133 compared in the next set of tests. */
2134 if ((DF_REF_FLAGS_IS_SET (ref1, DF_REF_SIGN_EXTRACT | DF_REF_ZERO_EXTRACT))
2135 && (DF_REF_FLAGS_IS_SET (ref2, DF_REF_SIGN_EXTRACT | DF_REF_ZERO_EXTRACT))
2136 && ((DF_REF_EXTRACT_OFFSET (ref1) != DF_REF_EXTRACT_OFFSET (ref2))
2137 || (DF_REF_EXTRACT_WIDTH (ref1) != DF_REF_EXTRACT_WIDTH (ref2))
2138 || (DF_REF_EXTRACT_MODE (ref1) != DF_REF_EXTRACT_MODE (ref2))))
2139 return false;
2141 return (ref1 == ref2) ||
2142 (DF_REF_REG (ref1) == DF_REF_REG (ref2)
2143 && DF_REF_REGNO (ref1) == DF_REF_REGNO (ref2)
2144 && DF_REF_LOC (ref1) == DF_REF_LOC (ref2)
2145 && DF_REF_INSN_INFO (ref1) == DF_REF_INSN_INFO (ref2)
2146 && DF_REF_TYPE (ref1) == DF_REF_TYPE (ref2)
2147 && ((DF_REF_FLAGS (ref1) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG))
2148 == (DF_REF_FLAGS (ref2) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG)))
2149 && DF_REF_BB (ref1) == DF_REF_BB (ref2));
2153 /* Compare REF1 and REF2 for sorting. This is only called from places
2154 where all of the refs are of the same type, in the same insn, and
2155 have the same bb. So these fields are not checked. */
2157 static int
2158 df_ref_compare (const void *r1, const void *r2)
2160 const struct df_ref *const ref1 = *(const struct df_ref *const*)r1;
2161 const struct df_ref *const ref2 = *(const struct df_ref *const*)r2;
2163 if (ref1 == ref2)
2164 return 0;
2166 if (DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2))
2167 return (int)DF_REF_REGNO (ref1) - (int)DF_REF_REGNO (ref2);
2169 if (DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2))
2170 return (int)DF_REF_TYPE (ref1) - (int)DF_REF_TYPE (ref2);
2172 if ((DF_REF_REG (ref1) != DF_REF_REG (ref2))
2173 || (DF_REF_LOC (ref1) != DF_REF_LOC (ref2)))
2174 return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2176 if (DF_REF_FLAGS (ref1) != DF_REF_FLAGS (ref2))
2178 /* If two refs are identical except that one of them has is from
2179 a mw and one is not, we need to have the one with the mw
2180 first. */
2181 if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG) ==
2182 DF_REF_FLAGS_IS_SET (ref2, DF_REF_MW_HARDREG))
2183 return DF_REF_FLAGS (ref1) - DF_REF_FLAGS (ref2);
2184 else if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG))
2185 return -1;
2186 else
2187 return 1;
2190 /* The flags are the same at this point so it is safe to only look
2191 at ref1. */
2192 if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_SIGN_EXTRACT | DF_REF_ZERO_EXTRACT))
2194 if (DF_REF_EXTRACT_OFFSET_CONST (ref1) != DF_REF_EXTRACT_OFFSET_CONST (ref2))
2195 return DF_REF_EXTRACT_OFFSET_CONST (ref1) - DF_REF_EXTRACT_OFFSET_CONST (ref2);
2196 if (DF_REF_EXTRACT_WIDTH_CONST (ref1) != DF_REF_EXTRACT_WIDTH_CONST (ref2))
2197 return DF_REF_EXTRACT_WIDTH_CONST (ref1) - DF_REF_EXTRACT_WIDTH_CONST (ref2);
2198 if (DF_REF_EXTRACT_MODE_CONST (ref1) != DF_REF_EXTRACT_MODE_CONST (ref2))
2199 return DF_REF_EXTRACT_MODE_CONST (ref1) - DF_REF_EXTRACT_MODE_CONST (ref2);
2201 return 0;
2204 static void
2205 df_swap_refs (struct df_ref **ref_vec, int i, int j)
2207 struct df_ref *tmp = ref_vec[i];
2208 ref_vec[i] = ref_vec[j];
2209 ref_vec[j] = tmp;
2212 /* Sort and compress a set of refs. */
2214 static unsigned int
2215 df_sort_and_compress_refs (struct df_ref **ref_vec, unsigned int count)
2217 unsigned int i;
2218 unsigned int dist = 0;
2220 ref_vec[count] = NULL;
2221 /* If there are 1 or 0 elements, there is nothing to do. */
2222 if (count < 2)
2223 return count;
2224 else if (count == 2)
2226 if (df_ref_compare (&ref_vec[0], &ref_vec[1]) > 0)
2227 df_swap_refs (ref_vec, 0, 1);
2229 else
2231 for (i = 0; i < count - 1; i++)
2232 if (df_ref_compare (&ref_vec[i], &ref_vec[i+1]) >= 0)
2233 break;
2234 /* If the array is already strictly ordered,
2235 which is the most common case for large COUNT case
2236 (which happens for CALL INSNs),
2237 no need to sort and filter out duplicate.
2238 Simply return the count.
2239 Make sure DF_GET_ADD_REFS adds refs in the increasing order
2240 of DF_REF_COMPARE. */
2241 if (i == count - 1)
2242 return count;
2243 qsort (ref_vec, count, sizeof (struct df_ref *), df_ref_compare);
2246 for (i=0; i<count-dist; i++)
2248 /* Find the next ref that is not equal to the current ref. */
2249 while (df_ref_equal_p (ref_vec[i], ref_vec[i + dist + 1]))
2251 df_free_ref (ref_vec[i + dist + 1]);
2252 dist++;
2254 /* Copy it down to the next position. */
2255 if (dist)
2256 ref_vec[i+1] = ref_vec[i + dist + 1];
2259 count -= dist;
2260 ref_vec[count] = NULL;
2261 return count;
2265 /* Return true if the contents of two df_ref's are identical.
2266 It ignores DF_REF_MARKER. */
2268 static bool
2269 df_mw_equal_p (struct df_mw_hardreg *mw1, struct df_mw_hardreg *mw2)
2271 if (!mw2)
2272 return false;
2273 return (mw1 == mw2) ||
2274 (mw1->mw_reg == mw2->mw_reg
2275 && mw1->type == mw2->type
2276 && mw1->flags == mw2->flags
2277 && mw1->start_regno == mw2->start_regno
2278 && mw1->end_regno == mw2->end_regno);
2282 /* Compare MW1 and MW2 for sorting. */
2284 static int
2285 df_mw_compare (const void *m1, const void *m2)
2287 const struct df_mw_hardreg *const mw1 = *(const struct df_mw_hardreg *const*)m1;
2288 const struct df_mw_hardreg *const mw2 = *(const struct df_mw_hardreg *const*)m2;
2290 if (mw1 == mw2)
2291 return 0;
2293 if (mw1->type != mw2->type)
2294 return mw1->type - mw2->type;
2296 if (mw1->flags != mw2->flags)
2297 return mw1->flags - mw2->flags;
2299 if (mw1->start_regno != mw2->start_regno)
2300 return mw1->start_regno - mw2->start_regno;
2302 if (mw1->end_regno != mw2->end_regno)
2303 return mw1->end_regno - mw2->end_regno;
2305 if (mw1->mw_reg != mw2->mw_reg)
2306 return mw1->mw_order - mw2->mw_order;
2308 return 0;
2312 /* Sort and compress a set of refs. */
2314 static unsigned int
2315 df_sort_and_compress_mws (struct df_mw_hardreg **mw_vec, unsigned int count)
2317 struct df_scan_problem_data *problem_data
2318 = (struct df_scan_problem_data *) df_scan->problem_data;
2319 unsigned int i;
2320 unsigned int dist = 0;
2321 mw_vec[count] = NULL;
2323 if (count < 2)
2324 return count;
2325 else if (count == 2)
2327 if (df_mw_compare (&mw_vec[0], &mw_vec[1]) > 0)
2329 struct df_mw_hardreg *tmp = mw_vec[0];
2330 mw_vec[0] = mw_vec[1];
2331 mw_vec[1] = tmp;
2334 else
2335 qsort (mw_vec, count, sizeof (struct df_mw_hardreg *), df_mw_compare);
2337 for (i=0; i<count-dist; i++)
2339 /* Find the next ref that is not equal to the current ref. */
2340 while (df_mw_equal_p (mw_vec[i], mw_vec[i + dist + 1]))
2342 pool_free (problem_data->mw_reg_pool, mw_vec[i + dist + 1]);
2343 dist++;
2345 /* Copy it down to the next position. */
2346 if (dist)
2347 mw_vec[i+1] = mw_vec[i + dist + 1];
2350 count -= dist;
2351 mw_vec[count] = NULL;
2352 return count;
2356 /* Sort and remove duplicates from the COLLECTION_REC. */
2358 static void
2359 df_canonize_collection_rec (struct df_collection_rec *collection_rec)
2361 if (collection_rec->def_vec)
2362 collection_rec->next_def
2363 = df_sort_and_compress_refs (collection_rec->def_vec,
2364 collection_rec->next_def);
2365 if (collection_rec->use_vec)
2366 collection_rec->next_use
2367 = df_sort_and_compress_refs (collection_rec->use_vec,
2368 collection_rec->next_use);
2369 if (collection_rec->eq_use_vec)
2370 collection_rec->next_eq_use
2371 = df_sort_and_compress_refs (collection_rec->eq_use_vec,
2372 collection_rec->next_eq_use);
2373 if (collection_rec->mw_vec)
2374 collection_rec->next_mw
2375 = df_sort_and_compress_mws (collection_rec->mw_vec,
2376 collection_rec->next_mw);
2380 /* Add the new df_ref to appropriate reg_info/ref_info chains. */
2382 static void
2383 df_install_ref (struct df_ref *this_ref,
2384 struct df_reg_info *reg_info,
2385 struct df_ref_info *ref_info,
2386 bool add_to_table)
2388 unsigned int regno = DF_REF_REGNO (this_ref);
2389 /* Add the ref to the reg_{def,use,eq_use} chain. */
2390 struct df_ref *head = reg_info->reg_chain;
2392 reg_info->reg_chain = this_ref;
2393 reg_info->n_refs++;
2395 if (DF_REF_FLAGS_IS_SET (this_ref, DF_HARD_REG_LIVE))
2397 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
2398 df->hard_regs_live_count[regno]++;
2401 gcc_assert (DF_REF_NEXT_REG (this_ref) == NULL);
2402 gcc_assert (DF_REF_PREV_REG (this_ref) == NULL);
2404 DF_REF_NEXT_REG (this_ref) = head;
2406 /* We cannot actually link to the head of the chain. */
2407 DF_REF_PREV_REG (this_ref) = NULL;
2409 if (head)
2410 DF_REF_PREV_REG (head) = this_ref;
2412 if (add_to_table)
2414 gcc_assert (ref_info->ref_order != DF_REF_ORDER_NO_TABLE);
2415 df_check_and_grow_ref_info (ref_info, 1);
2416 DF_REF_ID (this_ref) = ref_info->table_size;
2417 /* Add the ref to the big array of defs. */
2418 ref_info->refs[ref_info->table_size] = this_ref;
2419 ref_info->table_size++;
2421 else
2422 DF_REF_ID (this_ref) = -1;
2424 ref_info->total_size++;
2428 /* This function takes one of the groups of refs (defs, uses or
2429 eq_uses) and installs the entire group into the insn. It also adds
2430 each of these refs into the appropriate chains. */
2432 static struct df_ref **
2433 df_install_refs (basic_block bb,
2434 struct df_ref **old_vec, unsigned int count,
2435 struct df_reg_info **reg_info,
2436 struct df_ref_info *ref_info,
2437 bool is_notes)
2439 if (count)
2441 unsigned int i;
2442 struct df_ref **new_vec = XNEWVEC (struct df_ref*, count + 1);
2443 bool add_to_table;
2445 switch (ref_info->ref_order)
2447 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
2448 case DF_REF_ORDER_BY_REG_WITH_NOTES:
2449 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
2450 ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
2451 add_to_table = true;
2452 break;
2453 case DF_REF_ORDER_UNORDERED:
2454 case DF_REF_ORDER_BY_REG:
2455 case DF_REF_ORDER_BY_INSN:
2456 ref_info->ref_order = DF_REF_ORDER_UNORDERED;
2457 add_to_table = !is_notes;
2458 break;
2459 default:
2460 add_to_table = false;
2461 break;
2464 /* Do not add if ref is not in the right blocks. */
2465 if (add_to_table && df->analyze_subset)
2466 add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
2468 for (i = 0; i < count; i++)
2470 struct df_ref *this_ref = old_vec[i];
2471 new_vec[i] = this_ref;
2472 df_install_ref (this_ref, reg_info[DF_REF_REGNO (this_ref)],
2473 ref_info, add_to_table);
2476 new_vec[count] = NULL;
2477 return new_vec;
2479 else
2480 return df_null_ref_rec;
2484 /* This function takes the mws installs the entire group into the
2485 insn. */
2487 static struct df_mw_hardreg **
2488 df_install_mws (struct df_mw_hardreg **old_vec, unsigned int count)
2490 if (count)
2492 struct df_mw_hardreg **new_vec
2493 = XNEWVEC (struct df_mw_hardreg*, count + 1);
2494 memcpy (new_vec, old_vec,
2495 sizeof (struct df_mw_hardreg*) * (count + 1));
2496 return new_vec;
2498 else
2499 return df_null_mw_rec;
2503 /* Add a chain of df_refs to appropriate ref chain/reg_info/ref_info
2504 chains and update other necessary information. */
2506 static void
2507 df_refs_add_to_chains (struct df_collection_rec *collection_rec,
2508 basic_block bb, rtx insn)
2510 if (insn)
2512 struct df_insn_info *insn_rec = DF_INSN_INFO_GET (insn);
2513 /* If there is a vector in the collection rec, add it to the
2514 insn. A null rec is a signal that the caller will handle the
2515 chain specially. */
2516 if (collection_rec->def_vec)
2518 if (insn_rec->defs && *insn_rec->defs)
2519 free (insn_rec->defs);
2520 insn_rec->defs
2521 = df_install_refs (bb, collection_rec->def_vec,
2522 collection_rec->next_def,
2523 df->def_regs,
2524 &df->def_info, false);
2526 if (collection_rec->use_vec)
2528 if (insn_rec->uses && *insn_rec->uses)
2529 free (insn_rec->uses);
2530 insn_rec->uses
2531 = df_install_refs (bb, collection_rec->use_vec,
2532 collection_rec->next_use,
2533 df->use_regs,
2534 &df->use_info, false);
2536 if (collection_rec->eq_use_vec)
2538 if (insn_rec->eq_uses && *insn_rec->eq_uses)
2539 free (insn_rec->eq_uses);
2540 insn_rec->eq_uses
2541 = df_install_refs (bb, collection_rec->eq_use_vec,
2542 collection_rec->next_eq_use,
2543 df->eq_use_regs,
2544 &df->use_info, true);
2546 if (collection_rec->mw_vec)
2548 if (insn_rec->mw_hardregs && *insn_rec->mw_hardregs)
2549 free (insn_rec->mw_hardregs);
2550 insn_rec->mw_hardregs
2551 = df_install_mws (collection_rec->mw_vec,
2552 collection_rec->next_mw);
2555 else
2557 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
2559 if (bb_info->artificial_defs && *bb_info->artificial_defs)
2560 free (bb_info->artificial_defs);
2561 bb_info->artificial_defs
2562 = df_install_refs (bb, collection_rec->def_vec,
2563 collection_rec->next_def,
2564 df->def_regs,
2565 &df->def_info, false);
2566 if (bb_info->artificial_uses && *bb_info->artificial_uses)
2567 free (bb_info->artificial_uses);
2568 bb_info->artificial_uses
2569 = df_install_refs (bb, collection_rec->use_vec,
2570 collection_rec->next_use,
2571 df->use_regs,
2572 &df->use_info, false);
2577 /* Allocate a ref and initialize its fields.
2579 If the REF_FLAGS field contain DF_REF_SIGN_EXTRACT or
2580 DF_REF_ZERO_EXTRACT. WIDTH, OFFSET and MODE are used to access the fields
2581 if they were constants. Otherwise they should be -1 if those flags
2582 were set. */
2584 static struct df_ref *
2585 df_ref_create_structure (struct df_collection_rec *collection_rec,
2586 rtx reg, rtx *loc,
2587 basic_block bb, struct df_insn_info *info,
2588 enum df_ref_type ref_type,
2589 enum df_ref_flags ref_flags,
2590 int width, int offset, enum machine_mode mode)
2592 struct df_ref *this_ref;
2593 int regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2594 struct df_scan_problem_data *problem_data
2595 = (struct df_scan_problem_data *) df_scan->problem_data;
2597 if (ref_flags & (DF_REF_SIGN_EXTRACT | DF_REF_ZERO_EXTRACT))
2599 this_ref = (struct df_ref *) pool_alloc (problem_data->ref_extract_pool);
2600 DF_REF_EXTRACT_WIDTH (this_ref) = width;
2601 DF_REF_EXTRACT_OFFSET (this_ref) = offset;
2602 DF_REF_EXTRACT_MODE (this_ref) = mode;
2604 else
2605 this_ref = (struct df_ref *) pool_alloc (problem_data->ref_pool);
2606 DF_REF_ID (this_ref) = -1;
2607 DF_REF_REG (this_ref) = reg;
2608 DF_REF_REGNO (this_ref) = regno;
2609 DF_REF_LOC (this_ref) = loc;
2610 DF_REF_INSN_INFO (this_ref) = info;
2611 DF_REF_CHAIN (this_ref) = NULL;
2612 DF_REF_TYPE (this_ref) = ref_type;
2613 DF_REF_FLAGS (this_ref) = ref_flags;
2614 DF_REF_BB (this_ref) = bb;
2615 DF_REF_NEXT_REG (this_ref) = NULL;
2616 DF_REF_PREV_REG (this_ref) = NULL;
2617 DF_REF_ORDER (this_ref) = df->ref_order++;
2619 /* We need to clear this bit because fwprop, and in the future
2620 possibly other optimizations sometimes create new refs using ond
2621 refs as the model. */
2622 DF_REF_FLAGS_CLEAR (this_ref, DF_HARD_REG_LIVE);
2624 /* See if this ref needs to have DF_HARD_REG_LIVE bit set. */
2625 if ((regno < FIRST_PSEUDO_REGISTER)
2626 && (!DF_REF_IS_ARTIFICIAL (this_ref)))
2628 if (DF_REF_TYPE (this_ref) == DF_REF_REG_DEF)
2630 if (!DF_REF_FLAGS_IS_SET (this_ref, DF_REF_MAY_CLOBBER))
2631 DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2633 else if (!(TEST_HARD_REG_BIT (elim_reg_set, regno)
2634 && (regno == FRAME_POINTER_REGNUM
2635 || regno == ARG_POINTER_REGNUM)))
2636 DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2639 if (collection_rec)
2641 if (DF_REF_TYPE (this_ref) == DF_REF_REG_DEF)
2642 collection_rec->def_vec[collection_rec->next_def++] = this_ref;
2643 else if (DF_REF_FLAGS (this_ref) & DF_REF_IN_NOTE)
2644 collection_rec->eq_use_vec[collection_rec->next_eq_use++] = this_ref;
2645 else
2646 collection_rec->use_vec[collection_rec->next_use++] = this_ref;
2649 return this_ref;
2653 /* Create new references of type DF_REF_TYPE for each part of register REG
2654 at address LOC within INSN of BB.
2656 If the REF_FLAGS field contain DF_REF_SIGN_EXTRACT or
2657 DF_REF_ZERO_EXTRACT. WIDTH, OFFSET and MODE are used to access the
2658 fields if they were constants. Otherwise they should be -1 if
2659 those flags were set. */
2662 static void
2663 df_ref_record (struct df_collection_rec *collection_rec,
2664 rtx reg, rtx *loc,
2665 basic_block bb, struct df_insn_info *insn_info,
2666 enum df_ref_type ref_type,
2667 enum df_ref_flags ref_flags,
2668 int width, int offset, enum machine_mode mode)
2670 unsigned int regno;
2672 gcc_assert (REG_P (reg) || GET_CODE (reg) == SUBREG);
2674 regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2675 if (regno < FIRST_PSEUDO_REGISTER)
2677 struct df_mw_hardreg *hardreg = NULL;
2678 struct df_scan_problem_data *problem_data
2679 = (struct df_scan_problem_data *) df_scan->problem_data;
2680 unsigned int i;
2681 unsigned int endregno;
2682 struct df_ref *ref;
2684 if (GET_CODE (reg) == SUBREG)
2686 regno += subreg_regno_offset (regno, GET_MODE (SUBREG_REG (reg)),
2687 SUBREG_BYTE (reg), GET_MODE (reg));
2688 endregno = regno + subreg_nregs (reg);
2690 else
2691 endregno = END_HARD_REGNO (reg);
2693 /* If this is a multiword hardreg, we create some extra
2694 datastructures that will enable us to easily build REG_DEAD
2695 and REG_UNUSED notes. */
2696 if ((endregno != regno + 1) && insn_info)
2698 /* Sets to a subreg of a multiword register are partial.
2699 Sets to a non-subreg of a multiword register are not. */
2700 if (GET_CODE (reg) == SUBREG)
2701 ref_flags |= DF_REF_PARTIAL;
2702 ref_flags |= DF_REF_MW_HARDREG;
2704 hardreg = (struct df_mw_hardreg *) pool_alloc (problem_data->mw_reg_pool);
2705 hardreg->type = ref_type;
2706 hardreg->flags = ref_flags;
2707 hardreg->mw_reg = reg;
2708 hardreg->start_regno = regno;
2709 hardreg->end_regno = endregno - 1;
2710 hardreg->mw_order = df->ref_order++;
2711 collection_rec->mw_vec[collection_rec->next_mw++] = hardreg;
2714 for (i = regno; i < endregno; i++)
2716 ref = df_ref_create_structure (collection_rec, regno_reg_rtx[i], loc,
2717 bb, insn_info, ref_type, ref_flags,
2718 width, offset, mode);
2720 gcc_assert (ORIGINAL_REGNO (DF_REF_REG (ref)) == i);
2723 else
2725 struct df_ref *ref;
2726 ref = df_ref_create_structure (collection_rec, reg, loc, bb, insn_info,
2727 ref_type, ref_flags, width, offset, mode);
2732 /* A set to a non-paradoxical SUBREG for which the number of word_mode units
2733 covered by the outer mode is smaller than that covered by the inner mode,
2734 is a read-modify-write operation.
2735 This function returns true iff the SUBREG X is such a SUBREG. */
2737 bool
2738 df_read_modify_subreg_p (rtx x)
2740 unsigned int isize, osize;
2741 if (GET_CODE (x) != SUBREG)
2742 return false;
2743 isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
2744 osize = GET_MODE_SIZE (GET_MODE (x));
2745 return isize > osize
2746 && isize > REGMODE_NATURAL_SIZE (GET_MODE (SUBREG_REG (x)));
2750 /* Process all the registers defined in the rtx, X.
2751 Autoincrement/decrement definitions will be picked up by
2752 df_uses_record. */
2754 static void
2755 df_def_record_1 (struct df_collection_rec *collection_rec,
2756 rtx x, basic_block bb, struct df_insn_info *insn_info,
2757 enum df_ref_flags flags)
2759 rtx *loc;
2760 rtx dst;
2761 int offset = -1;
2762 int width = -1;
2763 enum machine_mode mode = 0;
2765 /* We may recursively call ourselves on EXPR_LIST when dealing with PARALLEL
2766 construct. */
2767 if (GET_CODE (x) == EXPR_LIST || GET_CODE (x) == CLOBBER)
2768 loc = &XEXP (x, 0);
2769 else
2770 loc = &SET_DEST (x);
2771 dst = *loc;
2773 /* It is legal to have a set destination be a parallel. */
2774 if (GET_CODE (dst) == PARALLEL)
2776 int i;
2778 for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
2780 rtx temp = XVECEXP (dst, 0, i);
2781 if (GET_CODE (temp) == EXPR_LIST || GET_CODE (temp) == CLOBBER
2782 || GET_CODE (temp) == SET)
2783 df_def_record_1 (collection_rec,
2784 temp, bb, insn_info,
2785 GET_CODE (temp) == CLOBBER
2786 ? flags | DF_REF_MUST_CLOBBER : flags);
2788 return;
2791 if (GET_CODE (dst) == STRICT_LOW_PART)
2793 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL | DF_REF_STRICT_LOW_PART;
2795 loc = &XEXP (dst, 0);
2796 dst = *loc;
2799 if (GET_CODE (dst) == ZERO_EXTRACT)
2801 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL | DF_REF_ZERO_EXTRACT;
2803 if (GET_CODE (XEXP (dst, 1)) == CONST_INT
2804 && GET_CODE (XEXP (dst, 2)) == CONST_INT)
2806 width = INTVAL (XEXP (dst, 1));
2807 offset = INTVAL (XEXP (dst, 2));
2808 mode = GET_MODE (dst);
2811 loc = &XEXP (dst, 0);
2812 dst = *loc;
2815 /* At this point if we do not have a reg or a subreg, just return. */
2816 if (REG_P (dst))
2818 df_ref_record (collection_rec,
2819 dst, loc, bb, insn_info, DF_REF_REG_DEF, flags,
2820 width, offset, mode);
2822 /* We want to keep sp alive everywhere - by making all
2823 writes to sp also use of sp. */
2824 if (REGNO (dst) == STACK_POINTER_REGNUM)
2825 df_ref_record (collection_rec,
2826 dst, NULL, bb, insn_info, DF_REF_REG_USE, flags,
2827 width, offset, mode);
2829 else if (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst)))
2831 if (df_read_modify_subreg_p (dst))
2832 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL;
2834 flags |= DF_REF_SUBREG;
2836 df_ref_record (collection_rec,
2837 dst, loc, bb, insn_info, DF_REF_REG_DEF, flags,
2838 width, offset, mode);
2843 /* Process all the registers defined in the pattern rtx, X. */
2845 static void
2846 df_defs_record (struct df_collection_rec *collection_rec,
2847 rtx x, basic_block bb, struct df_insn_info *insn_info,
2848 enum df_ref_flags flags)
2850 RTX_CODE code = GET_CODE (x);
2852 if (code == SET || code == CLOBBER)
2854 /* Mark the single def within the pattern. */
2855 enum df_ref_flags clobber_flags = flags;
2856 clobber_flags |= (code == CLOBBER) ? DF_REF_MUST_CLOBBER : 0;
2857 df_def_record_1 (collection_rec, x, bb, insn_info, clobber_flags);
2859 else if (code == COND_EXEC)
2861 df_defs_record (collection_rec, COND_EXEC_CODE (x),
2862 bb, insn_info, DF_REF_CONDITIONAL);
2864 else if (code == PARALLEL)
2866 int i;
2868 /* Mark the multiple defs within the pattern. */
2869 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
2870 df_defs_record (collection_rec, XVECEXP (x, 0, i), bb, insn_info, flags);
2875 /* Process all the registers used in the rtx at address LOC.
2877 If the REF_FLAGS field contain DF_REF_SIGN_EXTRACT or
2878 DF_REF_ZERO_EXTRACT. WIDTH, OFFSET and MODE are used to access the
2879 fields if they were constants. Otherwise they should be -1 if
2880 those flags were set. */
2882 static void
2883 df_uses_record (struct df_collection_rec *collection_rec,
2884 rtx *loc, enum df_ref_type ref_type,
2885 basic_block bb, struct df_insn_info *insn_info,
2886 enum df_ref_flags flags,
2887 int width, int offset, enum machine_mode mode)
2889 RTX_CODE code;
2890 rtx x;
2892 retry:
2893 x = *loc;
2894 if (!x)
2895 return;
2896 code = GET_CODE (x);
2897 switch (code)
2899 case LABEL_REF:
2900 case SYMBOL_REF:
2901 case CONST_INT:
2902 case CONST:
2903 case CONST_DOUBLE:
2904 case CONST_FIXED:
2905 case CONST_VECTOR:
2906 case PC:
2907 case CC0:
2908 case ADDR_VEC:
2909 case ADDR_DIFF_VEC:
2910 return;
2912 case CLOBBER:
2913 /* If we are clobbering a MEM, mark any registers inside the address
2914 as being used. */
2915 if (MEM_P (XEXP (x, 0)))
2916 df_uses_record (collection_rec,
2917 &XEXP (XEXP (x, 0), 0),
2918 DF_REF_REG_MEM_STORE,
2919 bb, insn_info,
2920 flags, width, offset, mode);
2922 /* If we're clobbering a REG then we have a def so ignore. */
2923 return;
2925 case MEM:
2926 df_uses_record (collection_rec,
2927 &XEXP (x, 0), DF_REF_REG_MEM_LOAD,
2928 bb, insn_info, flags & DF_REF_IN_NOTE,
2929 width, offset, mode);
2930 return;
2932 case SUBREG:
2933 /* While we're here, optimize this case. */
2934 flags |= DF_REF_PARTIAL;
2935 /* In case the SUBREG is not of a REG, do not optimize. */
2936 if (!REG_P (SUBREG_REG (x)))
2938 loc = &SUBREG_REG (x);
2939 df_uses_record (collection_rec, loc, ref_type, bb, insn_info, flags,
2940 width, offset, mode);
2941 return;
2943 /* ... Fall through ... */
2945 case REG:
2946 df_ref_record (collection_rec,
2947 x, loc, bb, insn_info,
2948 ref_type, flags,
2949 width, offset, mode);
2950 return;
2952 case SIGN_EXTRACT:
2953 case ZERO_EXTRACT:
2955 /* If the parameters to the zero or sign extract are
2956 constants, strip them off and recurse, otherwise there is
2957 no information that we can gain from this operation. */
2958 if (GET_CODE (XEXP (x, 1)) == CONST_INT
2959 && GET_CODE (XEXP (x, 2)) == CONST_INT)
2961 width = INTVAL (XEXP (x, 1));
2962 offset = INTVAL (XEXP (x, 2));
2963 mode = GET_MODE (x);
2965 if (code == ZERO_EXTRACT)
2966 flags |= DF_REF_ZERO_EXTRACT;
2967 else
2968 flags |= DF_REF_SIGN_EXTRACT;
2970 df_uses_record (collection_rec,
2971 &XEXP (x, 0), ref_type, bb, insn_info, flags,
2972 width, offset, mode);
2973 return;
2976 break;
2978 case SET:
2980 rtx dst = SET_DEST (x);
2981 gcc_assert (!(flags & DF_REF_IN_NOTE));
2982 df_uses_record (collection_rec,
2983 &SET_SRC (x), DF_REF_REG_USE, bb, insn_info, flags,
2984 width, offset, mode);
2986 switch (GET_CODE (dst))
2988 case SUBREG:
2989 if (df_read_modify_subreg_p (dst))
2991 df_uses_record (collection_rec, &SUBREG_REG (dst),
2992 DF_REF_REG_USE, bb, insn_info,
2993 flags | DF_REF_READ_WRITE | DF_REF_SUBREG,
2994 width, offset, mode);
2995 break;
2997 /* Fall through. */
2998 case REG:
2999 case PARALLEL:
3000 case SCRATCH:
3001 case PC:
3002 case CC0:
3003 break;
3004 case MEM:
3005 df_uses_record (collection_rec, &XEXP (dst, 0),
3006 DF_REF_REG_MEM_STORE, bb, insn_info, flags,
3007 width, offset, mode);
3008 break;
3009 case STRICT_LOW_PART:
3011 rtx *temp = &XEXP (dst, 0);
3012 /* A strict_low_part uses the whole REG and not just the
3013 SUBREG. */
3014 dst = XEXP (dst, 0);
3015 df_uses_record (collection_rec,
3016 (GET_CODE (dst) == SUBREG) ? &SUBREG_REG (dst) : temp,
3017 DF_REF_REG_USE, bb, insn_info,
3018 DF_REF_READ_WRITE | DF_REF_STRICT_LOW_PART,
3019 width, offset, mode);
3021 break;
3022 case ZERO_EXTRACT:
3024 if (GET_CODE (XEXP (dst, 1)) == CONST_INT
3025 && GET_CODE (XEXP (dst, 2)) == CONST_INT)
3027 width = INTVAL (XEXP (dst, 1));
3028 offset = INTVAL (XEXP (dst, 2));
3029 mode = GET_MODE (dst);
3031 else
3033 df_uses_record (collection_rec, &XEXP (dst, 1),
3034 DF_REF_REG_USE, bb, insn_info, flags,
3035 width, offset, mode);
3036 df_uses_record (collection_rec, &XEXP (dst, 2),
3037 DF_REF_REG_USE, bb, insn_info, flags,
3038 width, offset, mode);
3041 df_uses_record (collection_rec, &XEXP (dst, 0),
3042 DF_REF_REG_USE, bb, insn_info,
3043 DF_REF_READ_WRITE | DF_REF_ZERO_EXTRACT,
3044 width, offset, mode);
3046 break;
3048 default:
3049 gcc_unreachable ();
3051 return;
3054 case RETURN:
3055 break;
3057 case ASM_OPERANDS:
3058 case UNSPEC_VOLATILE:
3059 case TRAP_IF:
3060 case ASM_INPUT:
3062 /* Traditional and volatile asm instructions must be
3063 considered to use and clobber all hard registers, all
3064 pseudo-registers and all of memory. So must TRAP_IF and
3065 UNSPEC_VOLATILE operations.
3067 Consider for instance a volatile asm that changes the fpu
3068 rounding mode. An insn should not be moved across this
3069 even if it only uses pseudo-regs because it might give an
3070 incorrectly rounded result.
3072 However, flow.c's liveness computation did *not* do this,
3073 giving the reasoning as " ?!? Unfortunately, marking all
3074 hard registers as live causes massive problems for the
3075 register allocator and marking all pseudos as live creates
3076 mountains of uninitialized variable warnings."
3078 In order to maintain the status quo with regard to liveness
3079 and uses, we do what flow.c did and just mark any regs we
3080 can find in ASM_OPERANDS as used. In global asm insns are
3081 scanned and regs_asm_clobbered is filled out.
3083 For all ASM_OPERANDS, we must traverse the vector of input
3084 operands. We can not just fall through here since then we
3085 would be confused by the ASM_INPUT rtx inside ASM_OPERANDS,
3086 which do not indicate traditional asms unlike their normal
3087 usage. */
3088 if (code == ASM_OPERANDS)
3090 int j;
3092 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
3093 df_uses_record (collection_rec, &ASM_OPERANDS_INPUT (x, j),
3094 DF_REF_REG_USE, bb, insn_info, flags,
3095 width, offset, mode);
3096 return;
3098 break;
3101 case PRE_DEC:
3102 case POST_DEC:
3103 case PRE_INC:
3104 case POST_INC:
3105 case PRE_MODIFY:
3106 case POST_MODIFY:
3107 /* Catch the def of the register being modified. */
3108 df_ref_record (collection_rec, XEXP (x, 0), &XEXP (x, 0),
3109 bb, insn_info,
3110 DF_REF_REG_DEF,
3111 flags | DF_REF_READ_WRITE | DF_REF_PRE_POST_MODIFY,
3112 width, offset, mode);
3114 /* ... Fall through to handle uses ... */
3116 default:
3117 break;
3120 /* Recursively scan the operands of this expression. */
3122 const char *fmt = GET_RTX_FORMAT (code);
3123 int i;
3125 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3127 if (fmt[i] == 'e')
3129 /* Tail recursive case: save a function call level. */
3130 if (i == 0)
3132 loc = &XEXP (x, 0);
3133 goto retry;
3135 df_uses_record (collection_rec, &XEXP (x, i), ref_type,
3136 bb, insn_info, flags,
3137 width, offset, mode);
3139 else if (fmt[i] == 'E')
3141 int j;
3142 for (j = 0; j < XVECLEN (x, i); j++)
3143 df_uses_record (collection_rec,
3144 &XVECEXP (x, i, j), ref_type,
3145 bb, insn_info, flags,
3146 width, offset, mode);
3151 return;
3155 /* For all DF_REF_CONDITIONAL defs, add a corresponding uses. */
3157 static void
3158 df_get_conditional_uses (struct df_collection_rec *collection_rec)
3160 unsigned int i;
3161 for (i = 0; i < collection_rec->next_def; i++)
3163 struct df_ref *ref = collection_rec->def_vec[i];
3164 if (DF_REF_FLAGS_IS_SET (ref, DF_REF_CONDITIONAL))
3166 int width = -1;
3167 int offset = -1;
3168 enum machine_mode mode = 0;
3169 struct df_ref *use;
3171 if (DF_REF_FLAGS_IS_SET (ref, DF_REF_SIGN_EXTRACT | DF_REF_ZERO_EXTRACT))
3173 width = DF_REF_EXTRACT_WIDTH (ref);
3174 offset = DF_REF_EXTRACT_OFFSET (ref);
3175 mode = DF_REF_EXTRACT_MODE (ref);
3178 use = df_ref_create_structure (collection_rec, DF_REF_REG (ref),
3179 DF_REF_LOC (ref), DF_REF_BB (ref),
3180 DF_REF_INSN_INFO (ref), DF_REF_REG_USE,
3181 DF_REF_FLAGS (ref) & ~DF_REF_CONDITIONAL,
3182 width, offset, mode);
3183 DF_REF_REGNO (use) = DF_REF_REGNO (ref);
3189 /* Get call's extra defs and uses. */
3191 static void
3192 df_get_call_refs (struct df_collection_rec * collection_rec,
3193 basic_block bb,
3194 struct df_insn_info *insn_info,
3195 enum df_ref_flags flags)
3197 rtx note;
3198 bitmap_iterator bi;
3199 unsigned int ui;
3200 bool is_sibling_call;
3201 unsigned int i;
3202 bitmap defs_generated = BITMAP_ALLOC (&df_bitmap_obstack);
3204 /* Do not generate clobbers for registers that are the result of the
3205 call. This causes ordering problems in the chain building code
3206 depending on which def is seen first. */
3207 for (i=0; i<collection_rec->next_def; i++)
3209 struct df_ref *def = collection_rec->def_vec[i];
3210 bitmap_set_bit (defs_generated, DF_REF_REGNO (def));
3213 /* Record the registers used to pass arguments, and explicitly
3214 noted as clobbered. */
3215 for (note = CALL_INSN_FUNCTION_USAGE (insn_info->insn); note;
3216 note = XEXP (note, 1))
3218 if (GET_CODE (XEXP (note, 0)) == USE)
3219 df_uses_record (collection_rec, &XEXP (XEXP (note, 0), 0),
3220 DF_REF_REG_USE, bb, insn_info, flags, -1, -1, 0);
3221 else if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3223 if (REG_P (XEXP (XEXP (note, 0), 0)))
3225 unsigned int regno = REGNO (XEXP (XEXP (note, 0), 0));
3226 if (!bitmap_bit_p (defs_generated, regno))
3227 df_defs_record (collection_rec, XEXP (note, 0), bb,
3228 insn_info, flags);
3230 else
3231 df_uses_record (collection_rec, &XEXP (note, 0),
3232 DF_REF_REG_USE, bb, insn_info, flags, -1, -1, 0);
3236 /* The stack ptr is used (honorarily) by a CALL insn. */
3237 df_ref_record (collection_rec, regno_reg_rtx[STACK_POINTER_REGNUM],
3238 NULL, bb, insn_info, DF_REF_REG_USE,
3239 DF_REF_CALL_STACK_USAGE | flags,
3240 -1, -1, 0);
3242 /* Calls may also reference any of the global registers,
3243 so they are recorded as used. */
3244 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3245 if (global_regs[i])
3247 df_ref_record (collection_rec, regno_reg_rtx[i],
3248 NULL, bb, insn_info, DF_REF_REG_USE, flags, -1, -1, 0);
3249 df_ref_record (collection_rec, regno_reg_rtx[i],
3250 NULL, bb, insn_info, DF_REF_REG_DEF, flags, -1, -1, 0);
3253 is_sibling_call = SIBLING_CALL_P (insn_info->insn);
3254 EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call, 0, ui, bi)
3256 if (!global_regs[ui]
3257 && (!bitmap_bit_p (defs_generated, ui))
3258 && (!is_sibling_call
3259 || !bitmap_bit_p (df->exit_block_uses, ui)
3260 || refers_to_regno_p (ui, ui+1,
3261 crtl->return_rtx, NULL)))
3262 df_ref_record (collection_rec, regno_reg_rtx[ui],
3263 NULL, bb, insn_info, DF_REF_REG_DEF,
3264 DF_REF_MAY_CLOBBER | flags,
3265 -1, -1, 0);
3268 BITMAP_FREE (defs_generated);
3269 return;
3272 /* Collect all refs in the INSN. This function is free of any
3273 side-effect - it will create and return a lists of df_ref's in the
3274 COLLECTION_REC without putting those refs into existing ref chains
3275 and reg chains. */
3277 static void
3278 df_insn_refs_collect (struct df_collection_rec* collection_rec,
3279 basic_block bb, struct df_insn_info *insn_info)
3281 rtx note;
3282 bool is_cond_exec = (GET_CODE (PATTERN (insn_info->insn)) == COND_EXEC);
3284 /* Clear out the collection record. */
3285 collection_rec->next_def = 0;
3286 collection_rec->next_use = 0;
3287 collection_rec->next_eq_use = 0;
3288 collection_rec->next_mw = 0;
3290 /* Record register defs. */
3291 df_defs_record (collection_rec, PATTERN (insn_info->insn), bb, insn_info, 0);
3293 /* Process REG_EQUIV/REG_EQUAL notes. */
3294 for (note = REG_NOTES (insn_info->insn); note;
3295 note = XEXP (note, 1))
3297 switch (REG_NOTE_KIND (note))
3299 case REG_EQUIV:
3300 case REG_EQUAL:
3301 df_uses_record (collection_rec,
3302 &XEXP (note, 0), DF_REF_REG_USE,
3303 bb, insn_info, DF_REF_IN_NOTE, -1, -1, 0);
3304 break;
3305 case REG_NON_LOCAL_GOTO:
3306 /* The frame ptr is used by a non-local goto. */
3307 df_ref_record (collection_rec,
3308 regno_reg_rtx[FRAME_POINTER_REGNUM],
3309 NULL, bb, insn_info,
3310 DF_REF_REG_USE, 0, -1, -1, 0);
3311 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3312 df_ref_record (collection_rec,
3313 regno_reg_rtx[HARD_FRAME_POINTER_REGNUM],
3314 NULL, bb, insn_info,
3315 DF_REF_REG_USE, 0, -1, -1, 0);
3316 #endif
3317 break;
3318 default:
3319 break;
3323 if (CALL_P (insn_info->insn))
3324 df_get_call_refs (collection_rec, bb, insn_info,
3325 (is_cond_exec) ? DF_REF_CONDITIONAL : 0);
3327 /* Record the register uses. */
3328 df_uses_record (collection_rec,
3329 &PATTERN (insn_info->insn), DF_REF_REG_USE, bb, insn_info, 0,
3330 -1, -1, 0);
3332 /* DF_REF_CONDITIONAL needs corresponding USES. */
3333 if (is_cond_exec)
3334 df_get_conditional_uses (collection_rec);
3336 df_canonize_collection_rec (collection_rec);
3339 /* Recompute the luids for the insns in BB. */
3341 void
3342 df_recompute_luids (basic_block bb)
3344 rtx insn;
3345 int luid = 0;
3347 df_grow_insn_info ();
3349 /* Scan the block an insn at a time from beginning to end. */
3350 FOR_BB_INSNS (bb, insn)
3352 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
3353 /* Inserting labels does not always trigger the incremental
3354 rescanning. */
3355 if (!insn_info)
3357 gcc_assert (!INSN_P (insn));
3358 insn_info = df_insn_create_insn_record (insn);
3361 DF_INSN_INFO_LUID (insn_info) = luid;
3362 if (INSN_P (insn))
3363 luid++;
3368 /* Returns true if the function entry needs to
3369 define the static chain register. */
3371 static bool
3372 df_need_static_chain_reg (struct function *fun)
3374 tree fun_context = decl_function_context (fun->decl);
3375 return fun_context
3376 && DECL_NO_STATIC_CHAIN (fun_context) == false;
3380 /* Collect all artificial refs at the block level for BB and add them
3381 to COLLECTION_REC. */
3383 static void
3384 df_bb_refs_collect (struct df_collection_rec *collection_rec, basic_block bb)
3386 collection_rec->next_def = 0;
3387 collection_rec->next_use = 0;
3388 collection_rec->next_eq_use = 0;
3389 collection_rec->next_mw = 0;
3391 if (bb->index == ENTRY_BLOCK)
3393 df_entry_block_defs_collect (collection_rec, df->entry_block_defs);
3394 return;
3396 else if (bb->index == EXIT_BLOCK)
3398 df_exit_block_uses_collect (collection_rec, df->exit_block_uses);
3399 return;
3402 #ifdef EH_RETURN_DATA_REGNO
3403 if (bb_has_eh_pred (bb))
3405 unsigned int i;
3406 /* Mark the registers that will contain data for the handler. */
3407 for (i = 0; ; ++i)
3409 unsigned regno = EH_RETURN_DATA_REGNO (i);
3410 if (regno == INVALID_REGNUM)
3411 break;
3412 df_ref_record (collection_rec, regno_reg_rtx[regno], NULL,
3413 bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP, -1, -1, 0);
3416 #endif
3419 #ifdef EH_USES
3420 if (bb_has_eh_pred (bb))
3422 unsigned int i;
3423 /* This code is putting in an artificial ref for the use at the
3424 TOP of the block that receives the exception. It is too
3425 cumbersome to actually put the ref on the edge. We could
3426 either model this at the top of the receiver block or the
3427 bottom of the sender block.
3429 The bottom of the sender block is problematic because not all
3430 out-edges of a block are eh-edges. However, it is true
3431 that all edges into a block are either eh-edges or none of
3432 them are eh-edges. Thus, we can model this at the top of the
3433 eh-receiver for all of the edges at once. */
3434 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3435 if (EH_USES (i))
3436 df_ref_record (collection_rec, regno_reg_rtx[i], NULL,
3437 bb, NULL, DF_REF_REG_USE, DF_REF_AT_TOP, -1, -1, 0);
3439 #endif
3441 /* Add the hard_frame_pointer if this block is the target of a
3442 non-local goto. */
3443 if (bb->flags & BB_NON_LOCAL_GOTO_TARGET)
3444 df_ref_record (collection_rec, hard_frame_pointer_rtx, NULL,
3445 bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP, -1, -1, 0);
3447 /* Add the artificial uses. */
3448 if (bb->index >= NUM_FIXED_BLOCKS)
3450 bitmap_iterator bi;
3451 unsigned int regno;
3452 bitmap au = bb_has_eh_pred (bb)
3453 ? df->eh_block_artificial_uses
3454 : df->regular_block_artificial_uses;
3456 EXECUTE_IF_SET_IN_BITMAP (au, 0, regno, bi)
3458 df_ref_record (collection_rec, regno_reg_rtx[regno], NULL,
3459 bb, NULL, DF_REF_REG_USE, 0, -1, -1, 0);
3463 df_canonize_collection_rec (collection_rec);
3467 /* Record all the refs within the basic block BB_INDEX and scan the instructions if SCAN_INSNS. */
3469 void
3470 df_bb_refs_record (int bb_index, bool scan_insns)
3472 basic_block bb = BASIC_BLOCK (bb_index);
3473 rtx insn;
3474 int luid = 0;
3475 struct df_scan_bb_info *bb_info;
3476 struct df_collection_rec collection_rec;
3477 collection_rec.def_vec = XALLOCAVEC (struct df_ref *, 1000);
3478 collection_rec.use_vec = XALLOCAVEC (struct df_ref *, 1000);
3479 collection_rec.eq_use_vec = XALLOCAVEC (struct df_ref *, 1000);
3480 collection_rec.mw_vec = XALLOCAVEC (struct df_mw_hardreg *, 100);
3482 if (!df)
3483 return;
3485 bb_info = df_scan_get_bb_info (bb_index);
3487 /* Need to make sure that there is a record in the basic block info. */
3488 if (!bb_info)
3490 bb_info = (struct df_scan_bb_info *) pool_alloc (df_scan->block_pool);
3491 df_scan_set_bb_info (bb_index, bb_info);
3492 bb_info->artificial_defs = NULL;
3493 bb_info->artificial_uses = NULL;
3496 if (scan_insns)
3497 /* Scan the block an insn at a time from beginning to end. */
3498 FOR_BB_INSNS (bb, insn)
3500 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
3501 gcc_assert (!insn_info);
3503 insn_info = df_insn_create_insn_record (insn);
3504 if (INSN_P (insn))
3506 /* Record refs within INSN. */
3507 DF_INSN_INFO_LUID (insn_info) = luid++;
3508 df_insn_refs_collect (&collection_rec, bb, DF_INSN_INFO_GET (insn));
3509 df_refs_add_to_chains (&collection_rec, bb, insn);
3511 DF_INSN_INFO_LUID (insn_info) = luid;
3514 /* Other block level artificial refs */
3515 df_bb_refs_collect (&collection_rec, bb);
3516 df_refs_add_to_chains (&collection_rec, bb, NULL);
3518 /* Now that the block has been processed, set the block as dirty so
3519 LR and LIVE will get it processed. */
3520 df_set_bb_dirty (bb);
3524 /* Get the artificial use set for a regular (i.e. non-exit/non-entry)
3525 block. */
3527 static void
3528 df_get_regular_block_artificial_uses (bitmap regular_block_artificial_uses)
3530 bitmap_clear (regular_block_artificial_uses);
3532 if (reload_completed)
3534 if (frame_pointer_needed)
3535 bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3537 else
3538 /* Before reload, there are a few registers that must be forced
3539 live everywhere -- which might not already be the case for
3540 blocks within infinite loops. */
3542 /* Any reference to any pseudo before reload is a potential
3543 reference of the frame pointer. */
3544 bitmap_set_bit (regular_block_artificial_uses, FRAME_POINTER_REGNUM);
3546 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3547 bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3548 #endif
3550 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3551 /* Pseudos with argument area equivalences may require
3552 reloading via the argument pointer. */
3553 if (fixed_regs[ARG_POINTER_REGNUM])
3554 bitmap_set_bit (regular_block_artificial_uses, ARG_POINTER_REGNUM);
3555 #endif
3557 /* Any constant, or pseudo with constant equivalences, may
3558 require reloading from memory using the pic register. */
3559 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3560 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3561 bitmap_set_bit (regular_block_artificial_uses, PIC_OFFSET_TABLE_REGNUM);
3563 /* The all-important stack pointer must always be live. */
3564 bitmap_set_bit (regular_block_artificial_uses, STACK_POINTER_REGNUM);
3568 /* Get the artificial use set for an eh block. */
3570 static void
3571 df_get_eh_block_artificial_uses (bitmap eh_block_artificial_uses)
3573 bitmap_clear (eh_block_artificial_uses);
3575 /* The following code (down thru the arg_pointer setting APPEARS
3576 to be necessary because there is nothing that actually
3577 describes what the exception handling code may actually need
3578 to keep alive. */
3579 if (reload_completed)
3581 if (frame_pointer_needed)
3583 bitmap_set_bit (eh_block_artificial_uses, FRAME_POINTER_REGNUM);
3584 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3585 bitmap_set_bit (eh_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3586 #endif
3588 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3589 if (fixed_regs[ARG_POINTER_REGNUM])
3590 bitmap_set_bit (eh_block_artificial_uses, ARG_POINTER_REGNUM);
3591 #endif
3597 /*----------------------------------------------------------------------------
3598 Specialized hard register scanning functions.
3599 ----------------------------------------------------------------------------*/
3602 /* Mark a register in SET. Hard registers in large modes get all
3603 of their component registers set as well. */
3605 static void
3606 df_mark_reg (rtx reg, void *vset)
3608 bitmap set = (bitmap) vset;
3609 int regno = REGNO (reg);
3611 gcc_assert (GET_MODE (reg) != BLKmode);
3613 bitmap_set_bit (set, regno);
3614 if (regno < FIRST_PSEUDO_REGISTER)
3616 int n = hard_regno_nregs[regno][GET_MODE (reg)];
3617 while (--n > 0)
3618 bitmap_set_bit (set, regno + n);
3623 /* Set the bit for regs that are considered being defined at the entry. */
3625 static void
3626 df_get_entry_block_def_set (bitmap entry_block_defs)
3628 rtx r;
3629 int i;
3631 bitmap_clear (entry_block_defs);
3633 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3635 if (FUNCTION_ARG_REGNO_P (i))
3636 #ifdef INCOMING_REGNO
3637 bitmap_set_bit (entry_block_defs, INCOMING_REGNO (i));
3638 #else
3639 bitmap_set_bit (entry_block_defs, i);
3640 #endif
3643 /* Once the prologue has been generated, all of these registers
3644 should just show up in the first regular block. */
3645 if (HAVE_prologue && epilogue_completed)
3647 /* Defs for the callee saved registers are inserted so that the
3648 pushes have some defining location. */
3649 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3650 if ((call_used_regs[i] == 0) && (df_regs_ever_live_p (i)))
3651 bitmap_set_bit (entry_block_defs, i);
3653 else
3655 /* The always important stack pointer. */
3656 bitmap_set_bit (entry_block_defs, STACK_POINTER_REGNUM);
3658 /* If STATIC_CHAIN_INCOMING_REGNUM == STATIC_CHAIN_REGNUM
3659 only STATIC_CHAIN_REGNUM is defined. If they are different,
3660 we only care about the STATIC_CHAIN_INCOMING_REGNUM. */
3661 #ifdef STATIC_CHAIN_INCOMING_REGNUM
3662 bitmap_set_bit (entry_block_defs, STATIC_CHAIN_INCOMING_REGNUM);
3663 #else
3664 #ifdef STATIC_CHAIN_REGNUM
3665 bitmap_set_bit (entry_block_defs, STATIC_CHAIN_REGNUM);
3666 #endif
3667 #endif
3670 r = targetm.calls.struct_value_rtx (current_function_decl, true);
3671 if (r && REG_P (r))
3672 bitmap_set_bit (entry_block_defs, REGNO (r));
3674 if ((!reload_completed) || frame_pointer_needed)
3676 /* Any reference to any pseudo before reload is a potential
3677 reference of the frame pointer. */
3678 bitmap_set_bit (entry_block_defs, FRAME_POINTER_REGNUM);
3679 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3680 /* If they are different, also mark the hard frame pointer as live. */
3681 if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3682 bitmap_set_bit (entry_block_defs, HARD_FRAME_POINTER_REGNUM);
3683 #endif
3686 /* These registers are live everywhere. */
3687 if (!reload_completed)
3689 #ifdef EH_USES
3690 /* The ia-64, the only machine that uses this, does not define these
3691 until after reload. */
3692 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3693 if (EH_USES (i))
3695 bitmap_set_bit (entry_block_defs, i);
3697 #endif
3699 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3700 /* Pseudos with argument area equivalences may require
3701 reloading via the argument pointer. */
3702 if (fixed_regs[ARG_POINTER_REGNUM])
3703 bitmap_set_bit (entry_block_defs, ARG_POINTER_REGNUM);
3704 #endif
3706 #ifdef PIC_OFFSET_TABLE_REGNUM
3707 /* Any constant, or pseudo with constant equivalences, may
3708 require reloading from memory using the pic register. */
3709 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3710 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3711 bitmap_set_bit (entry_block_defs, PIC_OFFSET_TABLE_REGNUM);
3712 #endif
3715 #ifdef INCOMING_RETURN_ADDR_RTX
3716 if (REG_P (INCOMING_RETURN_ADDR_RTX))
3717 bitmap_set_bit (entry_block_defs, REGNO (INCOMING_RETURN_ADDR_RTX));
3718 #endif
3720 targetm.live_on_entry (entry_block_defs);
3722 /* If the function has an incoming STATIC_CHAIN,
3723 it has to show up in the entry def set. */
3724 if (df_need_static_chain_reg (cfun))
3726 #ifdef STATIC_CHAIN_INCOMING_REGNUM
3727 bitmap_set_bit (entry_block_defs, STATIC_CHAIN_INCOMING_REGNUM);
3728 #else
3729 #ifdef STATIC_CHAIN_REGNUM
3730 bitmap_set_bit (entry_block_defs, STATIC_CHAIN_REGNUM);
3731 #endif
3732 #endif
3737 /* Return the (conservative) set of hard registers that are defined on
3738 entry to the function.
3739 It uses df->entry_block_defs to determine which register
3740 reference to include. */
3742 static void
3743 df_entry_block_defs_collect (struct df_collection_rec *collection_rec,
3744 bitmap entry_block_defs)
3746 unsigned int i;
3747 bitmap_iterator bi;
3749 EXECUTE_IF_SET_IN_BITMAP (entry_block_defs, 0, i, bi)
3751 df_ref_record (collection_rec, regno_reg_rtx[i], NULL,
3752 ENTRY_BLOCK_PTR, NULL, DF_REF_REG_DEF, 0, -1, -1, 0);
3755 df_canonize_collection_rec (collection_rec);
3759 /* Record the (conservative) set of hard registers that are defined on
3760 entry to the function. */
3762 static void
3763 df_record_entry_block_defs (bitmap entry_block_defs)
3765 struct df_collection_rec collection_rec;
3766 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
3767 collection_rec.def_vec = XALLOCAVEC (struct df_ref *, FIRST_PSEUDO_REGISTER);
3769 df_entry_block_defs_collect (&collection_rec, entry_block_defs);
3771 /* Process bb_refs chain */
3772 df_refs_add_to_chains (&collection_rec, BASIC_BLOCK (ENTRY_BLOCK), NULL);
3776 /* Update the defs in the entry block. */
3778 void
3779 df_update_entry_block_defs (void)
3781 bitmap refs = BITMAP_ALLOC (&df_bitmap_obstack);
3782 bool changed = false;
3784 df_get_entry_block_def_set (refs);
3785 if (df->entry_block_defs)
3787 if (!bitmap_equal_p (df->entry_block_defs, refs))
3789 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (ENTRY_BLOCK);
3790 df_ref_chain_delete_du_chain (bb_info->artificial_defs);
3791 df_ref_chain_delete (bb_info->artificial_defs);
3792 bb_info->artificial_defs = NULL;
3793 changed = true;
3796 else
3798 struct df_scan_problem_data *problem_data
3799 = (struct df_scan_problem_data *) df_scan->problem_data;
3800 df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
3801 changed = true;
3804 if (changed)
3806 df_record_entry_block_defs (refs);
3807 bitmap_copy (df->entry_block_defs, refs);
3808 df_set_bb_dirty (BASIC_BLOCK (ENTRY_BLOCK));
3810 BITMAP_FREE (refs);
3814 /* Set the bit for regs that are considered being used at the exit. */
3816 static void
3817 df_get_exit_block_use_set (bitmap exit_block_uses)
3819 unsigned int i;
3821 bitmap_clear (exit_block_uses);
3823 /* Stack pointer is always live at the exit. */
3824 bitmap_set_bit (exit_block_uses, STACK_POINTER_REGNUM);
3826 /* Mark the frame pointer if needed at the end of the function.
3827 If we end up eliminating it, it will be removed from the live
3828 list of each basic block by reload. */
3830 if ((!reload_completed) || frame_pointer_needed)
3832 bitmap_set_bit (exit_block_uses, FRAME_POINTER_REGNUM);
3833 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3834 /* If they are different, also mark the hard frame pointer as live. */
3835 if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3836 bitmap_set_bit (exit_block_uses, HARD_FRAME_POINTER_REGNUM);
3837 #endif
3840 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
3841 /* Many architectures have a GP register even without flag_pic.
3842 Assume the pic register is not in use, or will be handled by
3843 other means, if it is not fixed. */
3844 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3845 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3846 bitmap_set_bit (exit_block_uses, PIC_OFFSET_TABLE_REGNUM);
3847 #endif
3849 /* Mark all global registers, and all registers used by the
3850 epilogue as being live at the end of the function since they
3851 may be referenced by our caller. */
3852 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3853 if (global_regs[i] || EPILOGUE_USES (i))
3854 bitmap_set_bit (exit_block_uses, i);
3856 if (HAVE_epilogue && epilogue_completed)
3858 /* Mark all call-saved registers that we actually used. */
3859 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3860 if (df_regs_ever_live_p (i) && !LOCAL_REGNO (i)
3861 && !TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
3862 bitmap_set_bit (exit_block_uses, i);
3865 #ifdef EH_RETURN_DATA_REGNO
3866 /* Mark the registers that will contain data for the handler. */
3867 if (reload_completed && crtl->calls_eh_return)
3868 for (i = 0; ; ++i)
3870 unsigned regno = EH_RETURN_DATA_REGNO (i);
3871 if (regno == INVALID_REGNUM)
3872 break;
3873 bitmap_set_bit (exit_block_uses, regno);
3875 #endif
3877 #ifdef EH_RETURN_STACKADJ_RTX
3878 if ((!HAVE_epilogue || ! epilogue_completed)
3879 && crtl->calls_eh_return)
3881 rtx tmp = EH_RETURN_STACKADJ_RTX;
3882 if (tmp && REG_P (tmp))
3883 df_mark_reg (tmp, exit_block_uses);
3885 #endif
3887 #ifdef EH_RETURN_HANDLER_RTX
3888 if ((!HAVE_epilogue || ! epilogue_completed)
3889 && crtl->calls_eh_return)
3891 rtx tmp = EH_RETURN_HANDLER_RTX;
3892 if (tmp && REG_P (tmp))
3893 df_mark_reg (tmp, exit_block_uses);
3895 #endif
3897 /* Mark function return value. */
3898 diddle_return_value (df_mark_reg, (void*) exit_block_uses);
3902 /* Return the refs of hard registers that are used in the exit block.
3903 It uses df->exit_block_uses to determine register to include. */
3905 static void
3906 df_exit_block_uses_collect (struct df_collection_rec *collection_rec, bitmap exit_block_uses)
3908 unsigned int i;
3909 bitmap_iterator bi;
3911 EXECUTE_IF_SET_IN_BITMAP (exit_block_uses, 0, i, bi)
3912 df_ref_record (collection_rec, regno_reg_rtx[i], NULL,
3913 EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0, -1, -1, 0);
3915 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3916 /* It is deliberate that this is not put in the exit block uses but
3917 I do not know why. */
3918 if (reload_completed
3919 && !bitmap_bit_p (exit_block_uses, ARG_POINTER_REGNUM)
3920 && bb_has_eh_pred (EXIT_BLOCK_PTR)
3921 && fixed_regs[ARG_POINTER_REGNUM])
3922 df_ref_record (collection_rec, regno_reg_rtx[ARG_POINTER_REGNUM], NULL,
3923 EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0, -1, -1, 0);
3924 #endif
3926 df_canonize_collection_rec (collection_rec);
3930 /* Record the set of hard registers that are used in the exit block.
3931 It uses df->exit_block_uses to determine which bit to include. */
3933 static void
3934 df_record_exit_block_uses (bitmap exit_block_uses)
3936 struct df_collection_rec collection_rec;
3937 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
3938 collection_rec.use_vec = XALLOCAVEC (struct df_ref *, FIRST_PSEUDO_REGISTER);
3940 df_exit_block_uses_collect (&collection_rec, exit_block_uses);
3942 /* Process bb_refs chain */
3943 df_refs_add_to_chains (&collection_rec, BASIC_BLOCK (EXIT_BLOCK), NULL);
3947 /* Update the uses in the exit block. */
3949 void
3950 df_update_exit_block_uses (void)
3952 bitmap refs = BITMAP_ALLOC (&df_bitmap_obstack);
3953 bool changed = false;
3955 df_get_exit_block_use_set (refs);
3956 if (df->exit_block_uses)
3958 if (!bitmap_equal_p (df->exit_block_uses, refs))
3960 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (EXIT_BLOCK);
3961 df_ref_chain_delete_du_chain (bb_info->artificial_uses);
3962 df_ref_chain_delete (bb_info->artificial_uses);
3963 bb_info->artificial_uses = NULL;
3964 changed = true;
3967 else
3969 struct df_scan_problem_data *problem_data
3970 = (struct df_scan_problem_data *) df_scan->problem_data;
3971 df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
3972 changed = true;
3975 if (changed)
3977 df_record_exit_block_uses (refs);
3978 bitmap_copy (df->exit_block_uses, refs);
3979 df_set_bb_dirty (BASIC_BLOCK (EXIT_BLOCK));
3981 BITMAP_FREE (refs);
3984 static bool initialized = false;
3987 /* Initialize some platform specific structures. */
3989 void
3990 df_hard_reg_init (void)
3992 int i;
3993 #ifdef ELIMINABLE_REGS
3994 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
3995 #endif
3996 if (initialized)
3997 return;
3999 bitmap_obstack_initialize (&persistent_obstack);
4001 /* Record which registers will be eliminated. We use this in
4002 mark_used_regs. */
4003 CLEAR_HARD_REG_SET (elim_reg_set);
4005 #ifdef ELIMINABLE_REGS
4006 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
4007 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
4008 #else
4009 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
4010 #endif
4012 df_invalidated_by_call = BITMAP_ALLOC (&persistent_obstack);
4014 /* Inconveniently, this is only readily available in hard reg set
4015 form. */
4016 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
4017 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
4018 bitmap_set_bit (df_invalidated_by_call, i);
4020 initialized = true;
4024 /* Recompute the parts of scanning that are based on regs_ever_live
4025 because something changed in that array. */
4027 void
4028 df_update_entry_exit_and_calls (void)
4030 basic_block bb;
4032 df_update_entry_block_defs ();
4033 df_update_exit_block_uses ();
4035 /* The call insns need to be rescanned because there may be changes
4036 in the set of registers clobbered across the call. */
4037 FOR_EACH_BB (bb)
4039 rtx insn;
4040 FOR_BB_INSNS (bb, insn)
4042 if (INSN_P (insn) && CALL_P (insn))
4043 df_insn_rescan (insn);
4049 /* Return true if hard REG is actually used in the some instruction.
4050 There are a fair number of conditions that affect the setting of
4051 this array. See the comment in df.h for df->hard_regs_live_count
4052 for the conditions that this array is set. */
4054 bool
4055 df_hard_reg_used_p (unsigned int reg)
4057 gcc_assert (df);
4058 return df->hard_regs_live_count[reg] != 0;
4062 /* A count of the number of times REG is actually used in the some
4063 instruction. There are a fair number of conditions that affect the
4064 setting of this array. See the comment in df.h for
4065 df->hard_regs_live_count for the conditions that this array is
4066 set. */
4069 unsigned int
4070 df_hard_reg_used_count (unsigned int reg)
4072 gcc_assert (df);
4073 return df->hard_regs_live_count[reg];
4077 /* Get the value of regs_ever_live[REGNO]. */
4079 bool
4080 df_regs_ever_live_p (unsigned int regno)
4082 return regs_ever_live[regno];
4086 /* Set regs_ever_live[REGNO] to VALUE. If this cause regs_ever_live
4087 to change, schedule that change for the next update. */
4089 void
4090 df_set_regs_ever_live (unsigned int regno, bool value)
4092 if (regs_ever_live[regno] == value)
4093 return;
4095 regs_ever_live[regno] = value;
4096 if (df)
4097 df->redo_entry_and_exit = true;
4101 /* Compute "regs_ever_live" information from the underlying df
4102 information. Set the vector to all false if RESET. */
4104 void
4105 df_compute_regs_ever_live (bool reset)
4107 unsigned int i;
4108 bool changed = df->redo_entry_and_exit;
4110 if (reset)
4111 memset (regs_ever_live, 0, sizeof (regs_ever_live));
4113 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4114 if ((!regs_ever_live[i]) && df_hard_reg_used_p (i))
4116 regs_ever_live[i] = true;
4117 changed = true;
4119 if (changed)
4120 df_update_entry_exit_and_calls ();
4121 df->redo_entry_and_exit = false;
4125 /*----------------------------------------------------------------------------
4126 Dataflow ref information verification functions.
4128 df_reg_chain_mark (refs, regno, is_def, is_eq_use)
4129 df_reg_chain_verify_unmarked (refs)
4130 df_refs_verify (ref*, ref*, bool)
4131 df_mws_verify (mw*, mw*, bool)
4132 df_insn_refs_verify (collection_rec, bb, insn, bool)
4133 df_bb_refs_verify (bb, refs, bool)
4134 df_bb_verify (bb)
4135 df_exit_block_bitmap_verify (bool)
4136 df_entry_block_bitmap_verify (bool)
4137 df_scan_verify ()
4138 ----------------------------------------------------------------------------*/
4141 /* Mark all refs in the reg chain. Verify that all of the registers
4142 are in the correct chain. */
4144 static unsigned int
4145 df_reg_chain_mark (struct df_ref *refs, unsigned int regno,
4146 bool is_def, bool is_eq_use)
4148 unsigned int count = 0;
4149 struct df_ref *ref;
4150 for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
4152 gcc_assert (!DF_REF_IS_REG_MARKED (ref));
4154 /* If there are no def-use or use-def chains, make sure that all
4155 of the chains are clear. */
4156 if (!df_chain)
4157 gcc_assert (!DF_REF_CHAIN (ref));
4159 /* Check to make sure the ref is in the correct chain. */
4160 gcc_assert (DF_REF_REGNO (ref) == regno);
4161 if (is_def)
4162 gcc_assert (DF_REF_TYPE(ref) == DF_REF_REG_DEF);
4163 else
4164 gcc_assert (DF_REF_TYPE(ref) != DF_REF_REG_DEF);
4166 if (is_eq_use)
4167 gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE));
4168 else
4169 gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE) == 0);
4171 if (ref->next_reg)
4172 gcc_assert (ref->next_reg->prev_reg == ref);
4173 count++;
4174 DF_REF_REG_MARK (ref);
4176 return count;
4180 /* Verify that all of the registers in the chain are unmarked. */
4182 static void
4183 df_reg_chain_verify_unmarked (struct df_ref *refs)
4185 struct df_ref *ref;
4186 for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
4187 gcc_assert (!DF_REF_IS_REG_MARKED (ref));
4191 /* Verify that NEW_REC and OLD_REC have exactly the same members. */
4193 static bool
4194 df_refs_verify (struct df_ref **new_rec, struct df_ref **old_rec,
4195 bool abort_if_fail)
4197 while ((*new_rec) && (*old_rec))
4199 if (!df_ref_equal_p (*new_rec, *old_rec))
4201 if (abort_if_fail)
4202 gcc_assert (0);
4203 else
4204 return false;
4207 /* Abort if fail is called from the function level verifier. If
4208 that is the context, mark this reg as being seem. */
4209 if (abort_if_fail)
4211 gcc_assert (DF_REF_IS_REG_MARKED (*old_rec));
4212 DF_REF_REG_UNMARK (*old_rec);
4215 new_rec++;
4216 old_rec++;
4219 if (abort_if_fail)
4220 gcc_assert ((*new_rec == NULL) && (*old_rec == NULL));
4221 else
4222 return ((*new_rec == NULL) && (*old_rec == NULL));
4223 return false;
4227 /* Verify that NEW_REC and OLD_REC have exactly the same members. */
4229 static bool
4230 df_mws_verify (struct df_mw_hardreg **new_rec, struct df_mw_hardreg **old_rec,
4231 bool abort_if_fail)
4233 while ((*new_rec) && (*old_rec))
4235 if (!df_mw_equal_p (*new_rec, *old_rec))
4237 if (abort_if_fail)
4238 gcc_assert (0);
4239 else
4240 return false;
4242 new_rec++;
4243 old_rec++;
4246 if (abort_if_fail)
4247 gcc_assert ((*new_rec == NULL) && (*old_rec == NULL));
4248 else
4249 return ((*new_rec == NULL) && (*old_rec == NULL));
4250 return false;
4254 /* Return true if the existing insn refs information is complete and
4255 correct. Otherwise (i.e. if there's any missing or extra refs),
4256 return the correct df_ref chain in REFS_RETURN.
4258 If ABORT_IF_FAIL, leave the refs that are verified (already in the
4259 ref chain) as DF_REF_MARKED(). If it's false, then it's a per-insn
4260 verification mode instead of the whole function, so unmark
4261 everything.
4263 If ABORT_IF_FAIL is set, this function never returns false. */
4265 static bool
4266 df_insn_refs_verify (struct df_collection_rec *collection_rec,
4267 basic_block bb,
4268 rtx insn,
4269 bool abort_if_fail)
4271 bool ret1, ret2, ret3, ret4;
4272 unsigned int uid = INSN_UID (insn);
4273 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
4275 df_insn_refs_collect (collection_rec, bb, insn_info);
4277 if (!DF_INSN_UID_DEFS (uid))
4279 /* The insn_rec was created but it was never filled out. */
4280 if (abort_if_fail)
4281 gcc_assert (0);
4282 else
4283 return false;
4286 /* Unfortunately we cannot opt out early if one of these is not
4287 right because the marks will not get cleared. */
4288 ret1 = df_refs_verify (collection_rec->def_vec, DF_INSN_UID_DEFS (uid),
4289 abort_if_fail);
4290 ret2 = df_refs_verify (collection_rec->use_vec, DF_INSN_UID_USES (uid),
4291 abort_if_fail);
4292 ret3 = df_refs_verify (collection_rec->eq_use_vec, DF_INSN_UID_EQ_USES (uid),
4293 abort_if_fail);
4294 ret4 = df_mws_verify (collection_rec->mw_vec, DF_INSN_UID_MWS (uid),
4295 abort_if_fail);
4296 return (ret1 && ret2 && ret3 && ret4);
4300 /* Return true if all refs in the basic block are correct and complete.
4301 Due to df_ref_chain_verify, it will cause all refs
4302 that are verified to have DF_REF_MARK bit set. */
4304 static bool
4305 df_bb_verify (basic_block bb)
4307 rtx insn;
4308 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
4309 struct df_collection_rec collection_rec;
4311 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
4312 collection_rec.def_vec = XALLOCAVEC (struct df_ref *, 1000);
4313 collection_rec.use_vec = XALLOCAVEC (struct df_ref *, 1000);
4314 collection_rec.eq_use_vec = XALLOCAVEC (struct df_ref *, 1000);
4315 collection_rec.mw_vec = XALLOCAVEC (struct df_mw_hardreg *, 100);
4317 gcc_assert (bb_info);
4319 /* Scan the block an insn at a time from beginning to end. */
4320 FOR_BB_INSNS_REVERSE (bb, insn)
4322 if (!INSN_P (insn))
4323 continue;
4324 df_insn_refs_verify (&collection_rec, bb, insn, true);
4325 df_free_collection_rec (&collection_rec);
4328 /* Do the artificial defs and uses. */
4329 df_bb_refs_collect (&collection_rec, bb);
4330 df_refs_verify (collection_rec.def_vec, df_get_artificial_defs (bb->index), true);
4331 df_refs_verify (collection_rec.use_vec, df_get_artificial_uses (bb->index), true);
4332 df_free_collection_rec (&collection_rec);
4334 return true;
4338 /* Returns true if the entry block has correct and complete df_ref set.
4339 If not it either aborts if ABORT_IF_FAIL is true or returns false. */
4341 static bool
4342 df_entry_block_bitmap_verify (bool abort_if_fail)
4344 bitmap entry_block_defs = BITMAP_ALLOC (&df_bitmap_obstack);
4345 bool is_eq;
4347 df_get_entry_block_def_set (entry_block_defs);
4349 is_eq = bitmap_equal_p (entry_block_defs, df->entry_block_defs);
4351 if (!is_eq && abort_if_fail)
4353 print_current_pass (stderr);
4354 fprintf (stderr, "entry_block_defs = ");
4355 df_print_regset (stderr, entry_block_defs);
4356 fprintf (stderr, "df->entry_block_defs = ");
4357 df_print_regset (stderr, df->entry_block_defs);
4358 gcc_assert (0);
4361 BITMAP_FREE (entry_block_defs);
4363 return is_eq;
4367 /* Returns true if the exit block has correct and complete df_ref set.
4368 If not it either aborts if ABORT_IF_FAIL is true or returns false. */
4370 static bool
4371 df_exit_block_bitmap_verify (bool abort_if_fail)
4373 bitmap exit_block_uses = BITMAP_ALLOC (&df_bitmap_obstack);
4374 bool is_eq;
4376 df_get_exit_block_use_set (exit_block_uses);
4378 is_eq = bitmap_equal_p (exit_block_uses, df->exit_block_uses);
4380 if (!is_eq && abort_if_fail)
4382 print_current_pass (stderr);
4383 fprintf (stderr, "exit_block_uses = ");
4384 df_print_regset (stderr, exit_block_uses);
4385 fprintf (stderr, "df->exit_block_uses = ");
4386 df_print_regset (stderr, df->exit_block_uses);
4387 gcc_assert (0);
4390 BITMAP_FREE (exit_block_uses);
4392 return is_eq;
4396 /* Return true if df_ref information for all insns in all blocks are
4397 correct and complete. */
4399 void
4400 df_scan_verify (void)
4402 unsigned int i;
4403 basic_block bb;
4404 bitmap regular_block_artificial_uses;
4405 bitmap eh_block_artificial_uses;
4407 if (!df)
4408 return;
4410 /* Verification is a 4 step process. */
4412 /* (1) All of the refs are marked by going thru the reg chains. */
4413 for (i = 0; i < DF_REG_SIZE (df); i++)
4415 gcc_assert (df_reg_chain_mark (DF_REG_DEF_CHAIN (i), i, true, false)
4416 == DF_REG_DEF_COUNT(i));
4417 gcc_assert (df_reg_chain_mark (DF_REG_USE_CHAIN (i), i, false, false)
4418 == DF_REG_USE_COUNT(i));
4419 gcc_assert (df_reg_chain_mark (DF_REG_EQ_USE_CHAIN (i), i, false, true)
4420 == DF_REG_EQ_USE_COUNT(i));
4423 /* (2) There are various bitmaps whose value may change over the
4424 course of the compilation. This step recomputes them to make
4425 sure that they have not slipped out of date. */
4426 regular_block_artificial_uses = BITMAP_ALLOC (&df_bitmap_obstack);
4427 eh_block_artificial_uses = BITMAP_ALLOC (&df_bitmap_obstack);
4429 df_get_regular_block_artificial_uses (regular_block_artificial_uses);
4430 df_get_eh_block_artificial_uses (eh_block_artificial_uses);
4432 bitmap_ior_into (eh_block_artificial_uses,
4433 regular_block_artificial_uses);
4435 /* Check artificial_uses bitmaps didn't change. */
4436 gcc_assert (bitmap_equal_p (regular_block_artificial_uses,
4437 df->regular_block_artificial_uses));
4438 gcc_assert (bitmap_equal_p (eh_block_artificial_uses,
4439 df->eh_block_artificial_uses));
4441 BITMAP_FREE (regular_block_artificial_uses);
4442 BITMAP_FREE (eh_block_artificial_uses);
4444 /* Verify entry block and exit block. These only verify the bitmaps,
4445 the refs are verified in df_bb_verify. */
4446 df_entry_block_bitmap_verify (true);
4447 df_exit_block_bitmap_verify (true);
4449 /* (3) All of the insns in all of the blocks are traversed and the
4450 marks are cleared both in the artificial refs attached to the
4451 blocks and the real refs inside the insns. It is a failure to
4452 clear a mark that has not been set as this means that the ref in
4453 the block or insn was not in the reg chain. */
4455 FOR_ALL_BB (bb)
4456 df_bb_verify (bb);
4458 /* (4) See if all reg chains are traversed a second time. This time
4459 a check is made that the marks are clear. A set mark would be a
4460 from a reg that is not in any insn or basic block. */
4462 for (i = 0; i < DF_REG_SIZE (df); i++)
4464 df_reg_chain_verify_unmarked (DF_REG_DEF_CHAIN (i));
4465 df_reg_chain_verify_unmarked (DF_REG_USE_CHAIN (i));
4466 df_reg_chain_verify_unmarked (DF_REG_EQ_USE_CHAIN (i));