1 /* Scanning of rtl for dataflow analysis.
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007,
3 2008, 2009, 2010, 2011, 2012 Free Software Foundation, Inc.
4 Originally contributed by Michael P. Hayes
5 (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
6 Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
7 and Kenneth Zadeck (zadeck@naturalbridge.com).
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
27 #include "coretypes.h"
31 #include "insn-config.h"
35 #include "alloc-pool.h"
37 #include "hard-reg-set.h"
38 #include "basic-block.h"
44 #include "target-def.h"
46 #include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */
49 DEF_VEC_ALLOC_P_STACK(df_ref
);
51 #define VEC_df_ref_stack_alloc(alloc) VEC_stack_alloc (df_ref, alloc)
53 typedef struct df_mw_hardreg
*df_mw_hardreg_ptr
;
55 DEF_VEC_P(df_mw_hardreg_ptr
);
56 DEF_VEC_ALLOC_P_STACK(df_mw_hardreg_ptr
);
58 #define VEC_df_mw_hardreg_ptr_stack_alloc(alloc) \
59 VEC_stack_alloc (df_mw_hardreg_ptr, alloc)
62 #define HAVE_epilogue 0
65 #define HAVE_prologue 0
67 #ifndef HAVE_sibcall_epilogue
68 #define HAVE_sibcall_epilogue 0
72 #define EPILOGUE_USES(REGNO) 0
75 /* The following two macros free the vecs that hold either the refs or
76 the mw refs. They are a little tricky because the vec has 0
77 elements is special and is not to be freed. */
78 #define df_scan_free_ref_vec(V) \
84 #define df_scan_free_mws_vec(V) \
90 /* The set of hard registers in eliminables[i].from. */
92 static HARD_REG_SET elim_reg_set
;
94 /* Initialize ur_in and ur_out as if all hard registers were partially
97 struct df_collection_rec
99 VEC(df_ref
,stack
) *def_vec
;
100 VEC(df_ref
,stack
) *use_vec
;
101 VEC(df_ref
,stack
) *eq_use_vec
;
102 VEC(df_mw_hardreg_ptr
,stack
) *mw_vec
;
105 static df_ref df_null_ref_rec
[1];
106 static struct df_mw_hardreg
* df_null_mw_rec
[1];
108 static void df_ref_record (enum df_ref_class
, struct df_collection_rec
*,
110 basic_block
, struct df_insn_info
*,
111 enum df_ref_type
, int ref_flags
);
112 static void df_def_record_1 (struct df_collection_rec
*, rtx
*,
113 basic_block
, struct df_insn_info
*,
115 static void df_defs_record (struct df_collection_rec
*, rtx
,
116 basic_block
, struct df_insn_info
*,
118 static void df_uses_record (struct df_collection_rec
*,
119 rtx
*, enum df_ref_type
,
120 basic_block
, struct df_insn_info
*,
123 static void df_install_ref_incremental (df_ref
);
124 static df_ref
df_ref_create_structure (enum df_ref_class
,
125 struct df_collection_rec
*, rtx
, rtx
*,
126 basic_block
, struct df_insn_info
*,
127 enum df_ref_type
, int ref_flags
);
128 static void df_insn_refs_collect (struct df_collection_rec
*,
129 basic_block
, struct df_insn_info
*);
130 static void df_canonize_collection_rec (struct df_collection_rec
*);
132 static void df_get_regular_block_artificial_uses (bitmap
);
133 static void df_get_eh_block_artificial_uses (bitmap
);
135 static void df_record_entry_block_defs (bitmap
);
136 static void df_record_exit_block_uses (bitmap
);
137 static void df_get_exit_block_use_set (bitmap
);
138 static void df_get_entry_block_def_set (bitmap
);
139 static void df_grow_ref_info (struct df_ref_info
*, unsigned int);
140 static void df_ref_chain_delete_du_chain (df_ref
*);
141 static void df_ref_chain_delete (df_ref
*);
143 static void df_refs_add_to_chains (struct df_collection_rec
*,
146 static bool df_insn_refs_verify (struct df_collection_rec
*, basic_block
, rtx
, bool);
147 static void df_entry_block_defs_collect (struct df_collection_rec
*, bitmap
);
148 static void df_exit_block_uses_collect (struct df_collection_rec
*, bitmap
);
149 static void df_install_ref (df_ref
, struct df_reg_info
*,
150 struct df_ref_info
*, bool);
152 static int df_ref_compare (const void *, const void *);
153 static int df_mw_compare (const void *, const void *);
155 /* Indexed by hardware reg number, is true if that register is ever
156 used in the current function.
158 In df-scan.c, this is set up to record the hard regs used
159 explicitly. Reload adds in the hard regs used for holding pseudo
160 regs. Final uses it to generate the code in the function prologue
161 and epilogue to save and restore registers as needed. */
163 static bool regs_ever_live
[FIRST_PSEUDO_REGISTER
];
165 /*----------------------------------------------------------------------------
166 SCANNING DATAFLOW PROBLEM
168 There are several ways in which scanning looks just like the other
169 dataflow problems. It shares the all the mechanisms for local info
170 as well as basic block info. Where it differs is when and how often
171 it gets run. It also has no need for the iterative solver.
172 ----------------------------------------------------------------------------*/
174 /* Problem data for the scanning dataflow function. */
175 struct df_scan_problem_data
177 alloc_pool ref_base_pool
;
178 alloc_pool ref_artificial_pool
;
179 alloc_pool ref_regular_pool
;
180 alloc_pool insn_pool
;
182 alloc_pool mw_reg_pool
;
183 bitmap_obstack reg_bitmaps
;
184 bitmap_obstack insn_bitmaps
;
187 typedef struct df_scan_bb_info
*df_scan_bb_info_t
;
190 /* Internal function to shut down the scanning problem. */
192 df_scan_free_internal (void)
194 struct df_scan_problem_data
*problem_data
195 = (struct df_scan_problem_data
*) df_scan
->problem_data
;
199 /* The vectors that hold the refs are not pool allocated because
200 they come in many sizes. This makes them impossible to delete
202 for (i
= 0; i
< DF_INSN_SIZE(); i
++)
204 struct df_insn_info
*insn_info
= DF_INSN_UID_GET(i
);
205 /* Skip the insns that have no insn_info or have been
209 df_scan_free_ref_vec (insn_info
->defs
);
210 df_scan_free_ref_vec (insn_info
->uses
);
211 df_scan_free_ref_vec (insn_info
->eq_uses
);
212 df_scan_free_mws_vec (insn_info
->mw_hardregs
);
218 unsigned int bb_index
= bb
->index
;
219 struct df_scan_bb_info
*bb_info
= df_scan_get_bb_info (bb_index
);
222 df_scan_free_ref_vec (bb_info
->artificial_defs
);
223 df_scan_free_ref_vec (bb_info
->artificial_uses
);
227 free (df
->def_info
.refs
);
228 free (df
->def_info
.begin
);
229 free (df
->def_info
.count
);
230 memset (&df
->def_info
, 0, (sizeof (struct df_ref_info
)));
232 free (df
->use_info
.refs
);
233 free (df
->use_info
.begin
);
234 free (df
->use_info
.count
);
235 memset (&df
->use_info
, 0, (sizeof (struct df_ref_info
)));
241 free (df
->eq_use_regs
);
242 df
->eq_use_regs
= NULL
;
250 free (df_scan
->block_info
);
251 df_scan
->block_info
= NULL
;
252 df_scan
->block_info_size
= 0;
254 bitmap_clear (&df
->hardware_regs_used
);
255 bitmap_clear (&df
->regular_block_artificial_uses
);
256 bitmap_clear (&df
->eh_block_artificial_uses
);
257 BITMAP_FREE (df
->entry_block_defs
);
258 BITMAP_FREE (df
->exit_block_uses
);
259 bitmap_clear (&df
->insns_to_delete
);
260 bitmap_clear (&df
->insns_to_rescan
);
261 bitmap_clear (&df
->insns_to_notes_rescan
);
263 free_alloc_pool (problem_data
->ref_base_pool
);
264 free_alloc_pool (problem_data
->ref_artificial_pool
);
265 free_alloc_pool (problem_data
->ref_regular_pool
);
266 free_alloc_pool (problem_data
->insn_pool
);
267 free_alloc_pool (problem_data
->reg_pool
);
268 free_alloc_pool (problem_data
->mw_reg_pool
);
269 bitmap_obstack_release (&problem_data
->reg_bitmaps
);
270 bitmap_obstack_release (&problem_data
->insn_bitmaps
);
271 free (df_scan
->problem_data
);
275 /* Free basic block info. */
278 df_scan_free_bb_info (basic_block bb
, void *vbb_info
)
280 struct df_scan_bb_info
*bb_info
= (struct df_scan_bb_info
*) vbb_info
;
281 unsigned int bb_index
= bb
->index
;
283 /* See if bb_info is initialized. */
284 if (bb_info
->artificial_defs
)
287 FOR_BB_INSNS (bb
, insn
)
290 /* Record defs within INSN. */
291 df_insn_delete (bb
, INSN_UID (insn
));
294 if (bb_index
< df_scan
->block_info_size
)
295 bb_info
= df_scan_get_bb_info (bb_index
);
297 /* Get rid of any artificial uses or defs. */
298 if (bb_info
->artificial_defs
)
300 df_ref_chain_delete_du_chain (bb_info
->artificial_defs
);
301 df_ref_chain_delete_du_chain (bb_info
->artificial_uses
);
302 df_ref_chain_delete (bb_info
->artificial_defs
);
303 df_ref_chain_delete (bb_info
->artificial_uses
);
304 bb_info
->artificial_defs
= NULL
;
305 bb_info
->artificial_uses
= NULL
;
311 /* Allocate the problem data for the scanning problem. This should be
312 called when the problem is created or when the entire function is to
315 df_scan_alloc (bitmap all_blocks ATTRIBUTE_UNUSED
)
317 struct df_scan_problem_data
*problem_data
;
318 unsigned int insn_num
= get_max_uid () + 1;
319 unsigned int block_size
= 512;
322 /* Given the number of pools, this is really faster than tearing
324 if (df_scan
->problem_data
)
325 df_scan_free_internal ();
327 problem_data
= XNEW (struct df_scan_problem_data
);
328 df_scan
->problem_data
= problem_data
;
329 df_scan
->computed
= true;
331 problem_data
->ref_base_pool
332 = create_alloc_pool ("df_scan ref base",
333 sizeof (struct df_base_ref
), block_size
);
334 problem_data
->ref_artificial_pool
335 = create_alloc_pool ("df_scan ref artificial",
336 sizeof (struct df_artificial_ref
), block_size
);
337 problem_data
->ref_regular_pool
338 = create_alloc_pool ("df_scan ref regular",
339 sizeof (struct df_regular_ref
), block_size
);
340 problem_data
->insn_pool
341 = create_alloc_pool ("df_scan insn",
342 sizeof (struct df_insn_info
), block_size
);
343 problem_data
->reg_pool
344 = create_alloc_pool ("df_scan reg",
345 sizeof (struct df_reg_info
), block_size
);
346 problem_data
->mw_reg_pool
347 = create_alloc_pool ("df_scan mw_reg",
348 sizeof (struct df_mw_hardreg
), block_size
/ 16);
350 bitmap_obstack_initialize (&problem_data
->reg_bitmaps
);
351 bitmap_obstack_initialize (&problem_data
->insn_bitmaps
);
353 insn_num
+= insn_num
/ 4;
356 df_grow_insn_info ();
357 df_grow_bb_info (df_scan
);
361 unsigned int bb_index
= bb
->index
;
362 struct df_scan_bb_info
*bb_info
= df_scan_get_bb_info (bb_index
);
363 bb_info
->artificial_defs
= NULL
;
364 bb_info
->artificial_uses
= NULL
;
367 bitmap_initialize (&df
->hardware_regs_used
, &problem_data
->reg_bitmaps
);
368 bitmap_initialize (&df
->regular_block_artificial_uses
, &problem_data
->reg_bitmaps
);
369 bitmap_initialize (&df
->eh_block_artificial_uses
, &problem_data
->reg_bitmaps
);
370 df
->entry_block_defs
= BITMAP_ALLOC (&problem_data
->reg_bitmaps
);
371 df
->exit_block_uses
= BITMAP_ALLOC (&problem_data
->reg_bitmaps
);
372 bitmap_initialize (&df
->insns_to_delete
, &problem_data
->insn_bitmaps
);
373 bitmap_initialize (&df
->insns_to_rescan
, &problem_data
->insn_bitmaps
);
374 bitmap_initialize (&df
->insns_to_notes_rescan
, &problem_data
->insn_bitmaps
);
375 df_scan
->optional_p
= false;
379 /* Free all of the data associated with the scan problem. */
384 if (df_scan
->problem_data
)
385 df_scan_free_internal ();
387 if (df
->blocks_to_analyze
)
389 BITMAP_FREE (df
->blocks_to_analyze
);
390 df
->blocks_to_analyze
= NULL
;
396 /* Dump the preamble for DF_SCAN dump. */
398 df_scan_start_dump (FILE *file ATTRIBUTE_UNUSED
)
409 fprintf (file
, ";; invalidated by call \t");
410 df_print_regset (file
, regs_invalidated_by_call_regset
);
411 fprintf (file
, ";; hardware regs used \t");
412 df_print_regset (file
, &df
->hardware_regs_used
);
413 fprintf (file
, ";; regular block artificial uses \t");
414 df_print_regset (file
, &df
->regular_block_artificial_uses
);
415 fprintf (file
, ";; eh block artificial uses \t");
416 df_print_regset (file
, &df
->eh_block_artificial_uses
);
417 fprintf (file
, ";; entry block defs \t");
418 df_print_regset (file
, df
->entry_block_defs
);
419 fprintf (file
, ";; exit block uses \t");
420 df_print_regset (file
, df
->exit_block_uses
);
421 fprintf (file
, ";; regs ever live \t");
422 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
423 if (df_regs_ever_live_p (i
))
424 fprintf (file
, " %d[%s]", i
, reg_names
[i
]);
425 fprintf (file
, "\n;; ref usage \t");
427 for (i
= 0; i
< (int)df
->regs_inited
; i
++)
428 if (DF_REG_DEF_COUNT (i
) || DF_REG_USE_COUNT (i
) || DF_REG_EQ_USE_COUNT (i
))
430 const char * sep
= "";
432 fprintf (file
, "r%d={", i
);
433 if (DF_REG_DEF_COUNT (i
))
435 fprintf (file
, "%dd", DF_REG_DEF_COUNT (i
));
437 dcount
+= DF_REG_DEF_COUNT (i
);
439 if (DF_REG_USE_COUNT (i
))
441 fprintf (file
, "%s%du", sep
, DF_REG_USE_COUNT (i
));
443 ucount
+= DF_REG_USE_COUNT (i
);
445 if (DF_REG_EQ_USE_COUNT (i
))
447 fprintf (file
, "%s%de", sep
, DF_REG_EQ_USE_COUNT (i
));
448 ecount
+= DF_REG_EQ_USE_COUNT (i
);
450 fprintf (file
, "} ");
454 FOR_BB_INSNS (bb
, insn
)
463 fprintf (file
, "\n;; total ref usage %d{%dd,%du,%de}"
464 " in %d{%d regular + %d call} insns.\n",
465 dcount
+ ucount
+ ecount
, dcount
, ucount
, ecount
,
466 icount
+ ccount
, icount
, ccount
);
469 /* Dump the bb_info for a given basic block. */
471 df_scan_start_block (basic_block bb
, FILE *file
)
473 struct df_scan_bb_info
*bb_info
474 = df_scan_get_bb_info (bb
->index
);
478 fprintf (file
, ";; bb %d artificial_defs: ", bb
->index
);
479 df_refs_chain_dump (bb_info
->artificial_defs
, true, file
);
480 fprintf (file
, "\n;; bb %d artificial_uses: ", bb
->index
);
481 df_refs_chain_dump (bb_info
->artificial_uses
, true, file
);
482 fprintf (file
, "\n");
487 FOR_BB_INSNS (bb
, insn
)
489 df_insn_debug (insn
, false, file
);
494 static struct df_problem problem_SCAN
=
496 DF_SCAN
, /* Problem id. */
497 DF_NONE
, /* Direction. */
498 df_scan_alloc
, /* Allocate the problem specific data. */
499 NULL
, /* Reset global information. */
500 df_scan_free_bb_info
, /* Free basic block info. */
501 NULL
, /* Local compute function. */
502 NULL
, /* Init the solution specific data. */
503 NULL
, /* Iterative solver. */
504 NULL
, /* Confluence operator 0. */
505 NULL
, /* Confluence operator n. */
506 NULL
, /* Transfer function. */
507 NULL
, /* Finalize function. */
508 df_scan_free
, /* Free all of the problem information. */
509 NULL
, /* Remove this problem from the stack of dataflow problems. */
510 df_scan_start_dump
, /* Debugging. */
511 df_scan_start_block
, /* Debugging start block. */
512 NULL
, /* Debugging end block. */
513 NULL
, /* Incremental solution verify start. */
514 NULL
, /* Incremental solution verify end. */
515 NULL
, /* Dependent problem. */
516 sizeof (struct df_scan_bb_info
),/* Size of entry of block_info array. */
517 TV_DF_SCAN
, /* Timing variable. */
518 false /* Reset blocks on dropping out of blocks_to_analyze. */
522 /* Create a new DATAFLOW instance and add it to an existing instance
523 of DF. The returned structure is what is used to get at the
527 df_scan_add_problem (void)
529 df_add_problem (&problem_SCAN
);
533 /*----------------------------------------------------------------------------
534 Storage Allocation Utilities
535 ----------------------------------------------------------------------------*/
538 /* First, grow the reg_info information. If the current size is less than
539 the number of pseudos, grow to 25% more than the number of
542 Second, assure that all of the slots up to max_reg_num have been
543 filled with reg_info structures. */
546 df_grow_reg_info (void)
548 unsigned int max_reg
= max_reg_num ();
549 unsigned int new_size
= max_reg
;
550 struct df_scan_problem_data
*problem_data
551 = (struct df_scan_problem_data
*) df_scan
->problem_data
;
554 if (df
->regs_size
< new_size
)
556 new_size
+= new_size
/ 4;
557 df
->def_regs
= XRESIZEVEC (struct df_reg_info
*, df
->def_regs
, new_size
);
558 df
->use_regs
= XRESIZEVEC (struct df_reg_info
*, df
->use_regs
, new_size
);
559 df
->eq_use_regs
= XRESIZEVEC (struct df_reg_info
*, df
->eq_use_regs
,
561 df
->def_info
.begin
= XRESIZEVEC (unsigned, df
->def_info
.begin
, new_size
);
562 df
->def_info
.count
= XRESIZEVEC (unsigned, df
->def_info
.count
, new_size
);
563 df
->use_info
.begin
= XRESIZEVEC (unsigned, df
->use_info
.begin
, new_size
);
564 df
->use_info
.count
= XRESIZEVEC (unsigned, df
->use_info
.count
, new_size
);
565 df
->regs_size
= new_size
;
568 for (i
= df
->regs_inited
; i
< max_reg
; i
++)
570 struct df_reg_info
*reg_info
;
572 reg_info
= (struct df_reg_info
*) pool_alloc (problem_data
->reg_pool
);
573 memset (reg_info
, 0, sizeof (struct df_reg_info
));
574 df
->def_regs
[i
] = reg_info
;
575 reg_info
= (struct df_reg_info
*) pool_alloc (problem_data
->reg_pool
);
576 memset (reg_info
, 0, sizeof (struct df_reg_info
));
577 df
->use_regs
[i
] = reg_info
;
578 reg_info
= (struct df_reg_info
*) pool_alloc (problem_data
->reg_pool
);
579 memset (reg_info
, 0, sizeof (struct df_reg_info
));
580 df
->eq_use_regs
[i
] = reg_info
;
581 df
->def_info
.begin
[i
] = 0;
582 df
->def_info
.count
[i
] = 0;
583 df
->use_info
.begin
[i
] = 0;
584 df
->use_info
.count
[i
] = 0;
587 df
->regs_inited
= max_reg
;
591 /* Grow the ref information. */
594 df_grow_ref_info (struct df_ref_info
*ref_info
, unsigned int new_size
)
596 if (ref_info
->refs_size
< new_size
)
598 ref_info
->refs
= XRESIZEVEC (df_ref
, ref_info
->refs
, new_size
);
599 memset (ref_info
->refs
+ ref_info
->refs_size
, 0,
600 (new_size
- ref_info
->refs_size
) *sizeof (df_ref
));
601 ref_info
->refs_size
= new_size
;
606 /* Check and grow the ref information if necessary. This routine
607 guarantees total_size + BITMAP_ADDEND amount of entries in refs
608 array. It updates ref_info->refs_size only and does not change
609 ref_info->total_size. */
612 df_check_and_grow_ref_info (struct df_ref_info
*ref_info
,
613 unsigned bitmap_addend
)
615 if (ref_info
->refs_size
< ref_info
->total_size
+ bitmap_addend
)
617 int new_size
= ref_info
->total_size
+ bitmap_addend
;
618 new_size
+= ref_info
->total_size
/ 4;
619 df_grow_ref_info (ref_info
, new_size
);
624 /* Grow the ref information. If the current size is less than the
625 number of instructions, grow to 25% more than the number of
629 df_grow_insn_info (void)
631 unsigned int new_size
= get_max_uid () + 1;
632 if (DF_INSN_SIZE () < new_size
)
634 new_size
+= new_size
/ 4;
635 df
->insns
= XRESIZEVEC (struct df_insn_info
*, df
->insns
, new_size
);
636 memset (df
->insns
+ df
->insns_size
, 0,
637 (new_size
- DF_INSN_SIZE ()) *sizeof (struct df_insn_info
*));
638 DF_INSN_SIZE () = new_size
;
645 /*----------------------------------------------------------------------------
646 PUBLIC INTERFACES FOR SMALL GRAIN CHANGES TO SCANNING.
647 ----------------------------------------------------------------------------*/
649 /* Rescan all of the block_to_analyze or all of the blocks in the
650 function if df_set_blocks if blocks_to_analyze is NULL; */
653 df_scan_blocks (void)
657 df
->def_info
.ref_order
= DF_REF_ORDER_NO_TABLE
;
658 df
->use_info
.ref_order
= DF_REF_ORDER_NO_TABLE
;
660 df_get_regular_block_artificial_uses (&df
->regular_block_artificial_uses
);
661 df_get_eh_block_artificial_uses (&df
->eh_block_artificial_uses
);
663 bitmap_ior_into (&df
->eh_block_artificial_uses
,
664 &df
->regular_block_artificial_uses
);
666 /* ENTRY and EXIT blocks have special defs/uses. */
667 df_get_entry_block_def_set (df
->entry_block_defs
);
668 df_record_entry_block_defs (df
->entry_block_defs
);
669 df_get_exit_block_use_set (df
->exit_block_uses
);
670 df_record_exit_block_uses (df
->exit_block_uses
);
671 df_set_bb_dirty (BASIC_BLOCK (ENTRY_BLOCK
));
672 df_set_bb_dirty (BASIC_BLOCK (EXIT_BLOCK
));
677 unsigned int bb_index
= bb
->index
;
678 df_bb_refs_record (bb_index
, true);
682 /* Create new refs under address LOC within INSN. This function is
683 only used externally. REF_FLAGS must be either 0 or DF_REF_IN_NOTE,
684 depending on whether LOC is inside PATTERN (INSN) or a note. */
687 df_uses_create (rtx
*loc
, rtx insn
, int ref_flags
)
689 gcc_assert (!(ref_flags
& ~DF_REF_IN_NOTE
));
690 df_uses_record (NULL
, loc
, DF_REF_REG_USE
,
691 BLOCK_FOR_INSN (insn
),
692 DF_INSN_INFO_GET (insn
),
696 /* Create a new ref of type DF_REF_TYPE for register REG at address
697 LOC within INSN of BB. This function is only used externally. */
700 df_ref_create (rtx reg
, rtx
*loc
, rtx insn
,
702 enum df_ref_type ref_type
,
705 enum df_ref_class cl
;
709 /* You cannot hack artificial refs. */
717 return df_ref_create_structure (cl
, NULL
, reg
, loc
, bb
,
718 DF_INSN_INFO_GET (insn
),
719 ref_type
, ref_flags
);
723 df_install_ref_incremental (df_ref ref
)
725 struct df_reg_info
**reg_info
;
726 struct df_ref_info
*ref_info
;
728 df_ref
**ref_rec_ptr
;
729 unsigned int count
= 0;
732 rtx insn
= DF_REF_INSN (ref
);
733 basic_block bb
= BLOCK_FOR_INSN (insn
);
735 if (DF_REF_REG_DEF_P (ref
))
737 reg_info
= df
->def_regs
;
738 ref_info
= &df
->def_info
;
739 ref_rec_ptr
= &DF_INSN_DEFS (insn
);
740 add_to_table
= ref_info
->ref_order
!= DF_REF_ORDER_NO_TABLE
;
742 else if (DF_REF_FLAGS (ref
) & DF_REF_IN_NOTE
)
744 reg_info
= df
->eq_use_regs
;
745 ref_info
= &df
->use_info
;
746 ref_rec_ptr
= &DF_INSN_EQ_USES (insn
);
747 switch (ref_info
->ref_order
)
749 case DF_REF_ORDER_UNORDERED_WITH_NOTES
:
750 case DF_REF_ORDER_BY_REG_WITH_NOTES
:
751 case DF_REF_ORDER_BY_INSN_WITH_NOTES
:
755 add_to_table
= false;
761 reg_info
= df
->use_regs
;
762 ref_info
= &df
->use_info
;
763 ref_rec_ptr
= &DF_INSN_USES (insn
);
764 add_to_table
= ref_info
->ref_order
!= DF_REF_ORDER_NO_TABLE
;
767 /* Do not add if ref is not in the right blocks. */
768 if (add_to_table
&& df
->analyze_subset
)
769 add_to_table
= bitmap_bit_p (df
->blocks_to_analyze
, bb
->index
);
771 df_install_ref (ref
, reg_info
[DF_REF_REGNO (ref
)], ref_info
, add_to_table
);
774 switch (ref_info
->ref_order
)
776 case DF_REF_ORDER_UNORDERED_WITH_NOTES
:
777 case DF_REF_ORDER_BY_REG_WITH_NOTES
:
778 case DF_REF_ORDER_BY_INSN_WITH_NOTES
:
779 ref_info
->ref_order
= DF_REF_ORDER_UNORDERED_WITH_NOTES
;
782 ref_info
->ref_order
= DF_REF_ORDER_UNORDERED
;
786 ref_rec
= *ref_rec_ptr
;
793 ref_rec
= *ref_rec_ptr
;
796 ref_rec
= XRESIZEVEC (df_ref
, ref_rec
, count
+2);
797 *ref_rec_ptr
= ref_rec
;
798 ref_rec
[count
] = ref
;
799 ref_rec
[count
+1] = NULL
;
800 qsort (ref_rec
, count
+ 1, sizeof (df_ref
), df_ref_compare
);
804 df_ref
*ref_rec
= XNEWVEC (df_ref
, 2);
807 *ref_rec_ptr
= ref_rec
;
813 fprintf (dump_file
, "adding ref ");
814 df_ref_debug (ref
, dump_file
);
817 /* By adding the ref directly, df_insn_rescan my not find any
818 differences even though the block will have changed. So we need
819 to mark the block dirty ourselves. */
820 if (!DEBUG_INSN_P (DF_REF_INSN (ref
)))
821 df_set_bb_dirty (bb
);
826 /*----------------------------------------------------------------------------
827 UTILITIES TO CREATE AND DESTROY REFS AND CHAINS.
828 ----------------------------------------------------------------------------*/
831 df_free_ref (df_ref ref
)
833 struct df_scan_problem_data
*problem_data
834 = (struct df_scan_problem_data
*) df_scan
->problem_data
;
836 switch (DF_REF_CLASS (ref
))
839 pool_free (problem_data
->ref_base_pool
, ref
);
842 case DF_REF_ARTIFICIAL
:
843 pool_free (problem_data
->ref_artificial_pool
, ref
);
847 pool_free (problem_data
->ref_regular_pool
, ref
);
853 /* Unlink and delete REF at the reg_use, reg_eq_use or reg_def chain.
854 Also delete the def-use or use-def chain if it exists. */
857 df_reg_chain_unlink (df_ref ref
)
859 df_ref next
= DF_REF_NEXT_REG (ref
);
860 df_ref prev
= DF_REF_PREV_REG (ref
);
861 int id
= DF_REF_ID (ref
);
862 struct df_reg_info
*reg_info
;
865 if (DF_REF_REG_DEF_P (ref
))
867 int regno
= DF_REF_REGNO (ref
);
868 reg_info
= DF_REG_DEF_GET (regno
);
869 refs
= df
->def_info
.refs
;
873 if (DF_REF_FLAGS (ref
) & DF_REF_IN_NOTE
)
875 reg_info
= DF_REG_EQ_USE_GET (DF_REF_REGNO (ref
));
876 switch (df
->use_info
.ref_order
)
878 case DF_REF_ORDER_UNORDERED_WITH_NOTES
:
879 case DF_REF_ORDER_BY_REG_WITH_NOTES
:
880 case DF_REF_ORDER_BY_INSN_WITH_NOTES
:
881 refs
= df
->use_info
.refs
;
889 reg_info
= DF_REG_USE_GET (DF_REF_REGNO (ref
));
890 refs
= df
->use_info
.refs
;
896 if (df
->analyze_subset
)
898 if (bitmap_bit_p (df
->blocks_to_analyze
, DF_REF_BBNO (ref
)))
905 /* Delete any def-use or use-def chains that start here. It is
906 possible that there is trash in this field. This happens for
907 insns that have been deleted when rescanning has been deferred
908 and the chain problem has also been deleted. The chain tear down
909 code skips deleted insns. */
910 if (df_chain
&& DF_REF_CHAIN (ref
))
911 df_chain_unlink (ref
);
914 if (DF_REF_FLAGS_IS_SET (ref
, DF_HARD_REG_LIVE
))
916 gcc_assert (DF_REF_REGNO (ref
) < FIRST_PSEUDO_REGISTER
);
917 df
->hard_regs_live_count
[DF_REF_REGNO (ref
)]--;
920 /* Unlink from the reg chain. If there is no prev, this is the
921 first of the list. If not, just join the next and prev. */
923 DF_REF_NEXT_REG (prev
) = next
;
926 gcc_assert (reg_info
->reg_chain
== ref
);
927 reg_info
->reg_chain
= next
;
930 DF_REF_PREV_REG (next
) = prev
;
936 /* Remove REF from VEC. */
939 df_ref_compress_rec (df_ref
**vec_ptr
, df_ref ref
)
941 df_ref
*vec
= *vec_ptr
;
945 while (*vec
&& *vec
!= ref
)
957 *vec_ptr
= df_null_ref_rec
;
962 /* Unlink REF from all def-use/use-def chains, etc. */
965 df_ref_remove (df_ref ref
)
970 fprintf (dump_file
, "removing ref ");
971 df_ref_debug (ref
, dump_file
);
975 if (DF_REF_REG_DEF_P (ref
))
977 if (DF_REF_IS_ARTIFICIAL (ref
))
979 struct df_scan_bb_info
*bb_info
980 = df_scan_get_bb_info (DF_REF_BBNO (ref
));
981 df_ref_compress_rec (&bb_info
->artificial_defs
, ref
);
985 unsigned int uid
= DF_REF_INSN_UID (ref
);
986 struct df_insn_info
*insn_rec
= DF_INSN_UID_GET (uid
);
987 df_ref_compress_rec (&insn_rec
->defs
, ref
);
992 if (DF_REF_IS_ARTIFICIAL (ref
))
994 struct df_scan_bb_info
*bb_info
995 = df_scan_get_bb_info (DF_REF_BBNO (ref
));
996 df_ref_compress_rec (&bb_info
->artificial_uses
, ref
);
1000 unsigned int uid
= DF_REF_INSN_UID (ref
);
1001 struct df_insn_info
*insn_rec
= DF_INSN_UID_GET (uid
);
1003 if (DF_REF_FLAGS (ref
) & DF_REF_IN_NOTE
)
1004 df_ref_compress_rec (&insn_rec
->eq_uses
, ref
);
1006 df_ref_compress_rec (&insn_rec
->uses
, ref
);
1010 /* By deleting the ref directly, df_insn_rescan my not find any
1011 differences even though the block will have changed. So we need
1012 to mark the block dirty ourselves. */
1013 if (!DEBUG_INSN_P (DF_REF_INSN (ref
)))
1014 df_set_bb_dirty (DF_REF_BB (ref
));
1015 df_reg_chain_unlink (ref
);
1019 /* Create the insn record for INSN. If there was one there, zero it
1022 struct df_insn_info
*
1023 df_insn_create_insn_record (rtx insn
)
1025 struct df_scan_problem_data
*problem_data
1026 = (struct df_scan_problem_data
*) df_scan
->problem_data
;
1027 struct df_insn_info
*insn_rec
;
1029 df_grow_insn_info ();
1030 insn_rec
= DF_INSN_INFO_GET (insn
);
1033 insn_rec
= (struct df_insn_info
*) pool_alloc (problem_data
->insn_pool
);
1034 DF_INSN_INFO_SET (insn
, insn_rec
);
1036 memset (insn_rec
, 0, sizeof (struct df_insn_info
));
1037 insn_rec
->insn
= insn
;
1042 /* Delete all du chain (DF_REF_CHAIN()) of all refs in the ref chain. */
1045 df_ref_chain_delete_du_chain (df_ref
*ref_rec
)
1049 df_ref ref
= *ref_rec
;
1050 /* CHAIN is allocated by DF_CHAIN. So make sure to
1051 pass df_scan instance for the problem. */
1052 if (DF_REF_CHAIN (ref
))
1053 df_chain_unlink (ref
);
1059 /* Delete all refs in the ref chain. */
1062 df_ref_chain_delete (df_ref
*ref_rec
)
1064 df_ref
*start
= ref_rec
;
1067 df_reg_chain_unlink (*ref_rec
);
1071 /* If the list is empty, it has a special shared element that is not
1078 /* Delete the hardreg chain. */
1081 df_mw_hardreg_chain_delete (struct df_mw_hardreg
**hardregs
)
1083 struct df_scan_problem_data
*problem_data
;
1088 problem_data
= (struct df_scan_problem_data
*) df_scan
->problem_data
;
1092 pool_free (problem_data
->mw_reg_pool
, *hardregs
);
1098 /* Delete all of the refs information from INSN. BB must be passed in
1099 except when called from df_process_deferred_rescans to mark the block
1103 df_insn_delete (basic_block bb
, unsigned int uid
)
1105 struct df_insn_info
*insn_info
= NULL
;
1109 df_grow_bb_info (df_scan
);
1110 df_grow_reg_info ();
1112 /* The block must be marked as dirty now, rather than later as in
1113 df_insn_rescan and df_notes_rescan because it may not be there at
1114 rescanning time and the mark would blow up. */
1116 df_set_bb_dirty (bb
);
1118 insn_info
= DF_INSN_UID_SAFE_GET (uid
);
1120 /* The client has deferred rescanning. */
1121 if (df
->changeable_flags
& DF_DEFER_INSN_RESCAN
)
1125 bitmap_clear_bit (&df
->insns_to_rescan
, uid
);
1126 bitmap_clear_bit (&df
->insns_to_notes_rescan
, uid
);
1127 bitmap_set_bit (&df
->insns_to_delete
, uid
);
1130 fprintf (dump_file
, "deferring deletion of insn with uid = %d.\n", uid
);
1135 fprintf (dump_file
, "deleting insn with uid = %d.\n", uid
);
1137 bitmap_clear_bit (&df
->insns_to_delete
, uid
);
1138 bitmap_clear_bit (&df
->insns_to_rescan
, uid
);
1139 bitmap_clear_bit (&df
->insns_to_notes_rescan
, uid
);
1142 struct df_scan_problem_data
*problem_data
1143 = (struct df_scan_problem_data
*) df_scan
->problem_data
;
1145 /* In general, notes do not have the insn_info fields
1146 initialized. However, combine deletes insns by changing them
1147 to notes. How clever. So we cannot just check if it is a
1148 valid insn before short circuiting this code, we need to see
1149 if we actually initialized it. */
1150 if (insn_info
->defs
)
1152 df_mw_hardreg_chain_delete (insn_info
->mw_hardregs
);
1156 df_ref_chain_delete_du_chain (insn_info
->defs
);
1157 df_ref_chain_delete_du_chain (insn_info
->uses
);
1158 df_ref_chain_delete_du_chain (insn_info
->eq_uses
);
1161 df_ref_chain_delete (insn_info
->defs
);
1162 df_ref_chain_delete (insn_info
->uses
);
1163 df_ref_chain_delete (insn_info
->eq_uses
);
1165 pool_free (problem_data
->insn_pool
, insn_info
);
1166 DF_INSN_UID_SET (uid
, NULL
);
1171 /* Free all of the refs and the mw_hardregs in COLLECTION_REC. */
1174 df_free_collection_rec (struct df_collection_rec
*collection_rec
)
1177 struct df_scan_problem_data
*problem_data
1178 = (struct df_scan_problem_data
*) df_scan
->problem_data
;
1180 struct df_mw_hardreg
*mw
;
1182 FOR_EACH_VEC_ELT (df_ref
, collection_rec
->def_vec
, ix
, ref
)
1184 FOR_EACH_VEC_ELT (df_ref
, collection_rec
->use_vec
, ix
, ref
)
1186 FOR_EACH_VEC_ELT (df_ref
, collection_rec
->eq_use_vec
, ix
, ref
)
1188 FOR_EACH_VEC_ELT (df_mw_hardreg_ptr
, collection_rec
->mw_vec
, ix
, mw
)
1189 pool_free (problem_data
->mw_reg_pool
, mw
);
1191 VEC_free (df_ref
, stack
, collection_rec
->def_vec
);
1192 VEC_free (df_ref
, stack
, collection_rec
->use_vec
);
1193 VEC_free (df_ref
, stack
, collection_rec
->eq_use_vec
);
1194 VEC_free (df_mw_hardreg_ptr
, stack
, collection_rec
->mw_vec
);
1197 /* Rescan INSN. Return TRUE if the rescanning produced any changes. */
1200 df_insn_rescan (rtx insn
)
1202 unsigned int uid
= INSN_UID (insn
);
1203 struct df_insn_info
*insn_info
= NULL
;
1204 basic_block bb
= BLOCK_FOR_INSN (insn
);
1205 struct df_collection_rec collection_rec
;
1207 if ((!df
) || (!INSN_P (insn
)))
1213 fprintf (dump_file
, "no bb for insn with uid = %d.\n", uid
);
1217 /* The client has disabled rescanning and plans to do it itself. */
1218 if (df
->changeable_flags
& DF_NO_INSN_RESCAN
)
1221 df_grow_bb_info (df_scan
);
1222 df_grow_reg_info ();
1224 insn_info
= DF_INSN_UID_SAFE_GET (uid
);
1226 /* The client has deferred rescanning. */
1227 if (df
->changeable_flags
& DF_DEFER_INSN_RESCAN
)
1231 insn_info
= df_insn_create_insn_record (insn
);
1232 insn_info
->defs
= df_null_ref_rec
;
1233 insn_info
->uses
= df_null_ref_rec
;
1234 insn_info
->eq_uses
= df_null_ref_rec
;
1235 insn_info
->mw_hardregs
= df_null_mw_rec
;
1238 fprintf (dump_file
, "deferring rescan insn with uid = %d.\n", uid
);
1240 bitmap_clear_bit (&df
->insns_to_delete
, uid
);
1241 bitmap_clear_bit (&df
->insns_to_notes_rescan
, uid
);
1242 bitmap_set_bit (&df
->insns_to_rescan
, INSN_UID (insn
));
1246 collection_rec
.def_vec
= VEC_alloc (df_ref
, stack
, 128);
1247 collection_rec
.use_vec
= VEC_alloc (df_ref
, stack
, 32);
1248 collection_rec
.eq_use_vec
= VEC_alloc (df_ref
, stack
, 32);
1249 collection_rec
.mw_vec
= VEC_alloc (df_mw_hardreg_ptr
, stack
, 32);
1251 bitmap_clear_bit (&df
->insns_to_delete
, uid
);
1252 bitmap_clear_bit (&df
->insns_to_rescan
, uid
);
1253 bitmap_clear_bit (&df
->insns_to_notes_rescan
, uid
);
1257 bool the_same
= df_insn_refs_verify (&collection_rec
, bb
, insn
, false);
1258 /* If there's no change, return false. */
1261 df_free_collection_rec (&collection_rec
);
1263 fprintf (dump_file
, "verify found no changes in insn with uid = %d.\n", uid
);
1267 fprintf (dump_file
, "rescanning insn with uid = %d.\n", uid
);
1269 /* There's change - we need to delete the existing info.
1270 Since the insn isn't moved, we can salvage its LUID. */
1271 luid
= DF_INSN_LUID (insn
);
1272 df_insn_delete (NULL
, uid
);
1273 df_insn_create_insn_record (insn
);
1274 DF_INSN_LUID (insn
) = luid
;
1278 struct df_insn_info
*insn_info
= df_insn_create_insn_record (insn
);
1279 df_insn_refs_collect (&collection_rec
, bb
, insn_info
);
1281 fprintf (dump_file
, "scanning new insn with uid = %d.\n", uid
);
1284 df_refs_add_to_chains (&collection_rec
, bb
, insn
);
1285 if (!DEBUG_INSN_P (insn
))
1286 df_set_bb_dirty (bb
);
1288 VEC_free (df_ref
, stack
, collection_rec
.def_vec
);
1289 VEC_free (df_ref
, stack
, collection_rec
.use_vec
);
1290 VEC_free (df_ref
, stack
, collection_rec
.eq_use_vec
);
1291 VEC_free (df_mw_hardreg_ptr
, stack
, collection_rec
.mw_vec
);
1296 /* Same as df_insn_rescan, but don't mark the basic block as
1300 df_insn_rescan_debug_internal (rtx insn
)
1302 unsigned int uid
= INSN_UID (insn
);
1303 struct df_insn_info
*insn_info
;
1305 gcc_assert (DEBUG_INSN_P (insn
)
1306 && VAR_LOC_UNKNOWN_P (INSN_VAR_LOCATION_LOC (insn
)));
1311 insn_info
= DF_INSN_UID_SAFE_GET (INSN_UID (insn
));
1316 fprintf (dump_file
, "deleting debug_insn with uid = %d.\n", uid
);
1318 bitmap_clear_bit (&df
->insns_to_delete
, uid
);
1319 bitmap_clear_bit (&df
->insns_to_rescan
, uid
);
1320 bitmap_clear_bit (&df
->insns_to_notes_rescan
, uid
);
1322 if (!insn_info
->defs
)
1325 if (insn_info
->defs
== df_null_ref_rec
1326 && insn_info
->uses
== df_null_ref_rec
1327 && insn_info
->eq_uses
== df_null_ref_rec
1328 && insn_info
->mw_hardregs
== df_null_mw_rec
)
1331 df_mw_hardreg_chain_delete (insn_info
->mw_hardregs
);
1335 df_ref_chain_delete_du_chain (insn_info
->defs
);
1336 df_ref_chain_delete_du_chain (insn_info
->uses
);
1337 df_ref_chain_delete_du_chain (insn_info
->eq_uses
);
1340 df_ref_chain_delete (insn_info
->defs
);
1341 df_ref_chain_delete (insn_info
->uses
);
1342 df_ref_chain_delete (insn_info
->eq_uses
);
1344 insn_info
->defs
= df_null_ref_rec
;
1345 insn_info
->uses
= df_null_ref_rec
;
1346 insn_info
->eq_uses
= df_null_ref_rec
;
1347 insn_info
->mw_hardregs
= df_null_mw_rec
;
1353 /* Rescan all of the insns in the function. Note that the artificial
1354 uses and defs are not touched. This function will destroy def-se
1355 or use-def chains. */
1358 df_insn_rescan_all (void)
1360 bool no_insn_rescan
= false;
1361 bool defer_insn_rescan
= false;
1367 bitmap_initialize (&tmp
, &df_bitmap_obstack
);
1369 if (df
->changeable_flags
& DF_NO_INSN_RESCAN
)
1371 df_clear_flags (DF_NO_INSN_RESCAN
);
1372 no_insn_rescan
= true;
1375 if (df
->changeable_flags
& DF_DEFER_INSN_RESCAN
)
1377 df_clear_flags (DF_DEFER_INSN_RESCAN
);
1378 defer_insn_rescan
= true;
1381 bitmap_copy (&tmp
, &df
->insns_to_delete
);
1382 EXECUTE_IF_SET_IN_BITMAP (&tmp
, 0, uid
, bi
)
1384 struct df_insn_info
*insn_info
= DF_INSN_UID_SAFE_GET (uid
);
1386 df_insn_delete (NULL
, uid
);
1389 bitmap_clear (&tmp
);
1390 bitmap_clear (&df
->insns_to_delete
);
1391 bitmap_clear (&df
->insns_to_rescan
);
1392 bitmap_clear (&df
->insns_to_notes_rescan
);
1397 FOR_BB_INSNS (bb
, insn
)
1399 df_insn_rescan (insn
);
1404 df_set_flags (DF_NO_INSN_RESCAN
);
1405 if (defer_insn_rescan
)
1406 df_set_flags (DF_DEFER_INSN_RESCAN
);
1410 /* Process all of the deferred rescans or deletions. */
1413 df_process_deferred_rescans (void)
1415 bool no_insn_rescan
= false;
1416 bool defer_insn_rescan
= false;
1421 bitmap_initialize (&tmp
, &df_bitmap_obstack
);
1423 if (df
->changeable_flags
& DF_NO_INSN_RESCAN
)
1425 df_clear_flags (DF_NO_INSN_RESCAN
);
1426 no_insn_rescan
= true;
1429 if (df
->changeable_flags
& DF_DEFER_INSN_RESCAN
)
1431 df_clear_flags (DF_DEFER_INSN_RESCAN
);
1432 defer_insn_rescan
= true;
1436 fprintf (dump_file
, "starting the processing of deferred insns\n");
1438 bitmap_copy (&tmp
, &df
->insns_to_delete
);
1439 EXECUTE_IF_SET_IN_BITMAP (&tmp
, 0, uid
, bi
)
1441 struct df_insn_info
*insn_info
= DF_INSN_UID_SAFE_GET (uid
);
1443 df_insn_delete (NULL
, uid
);
1446 bitmap_copy (&tmp
, &df
->insns_to_rescan
);
1447 EXECUTE_IF_SET_IN_BITMAP (&tmp
, 0, uid
, bi
)
1449 struct df_insn_info
*insn_info
= DF_INSN_UID_SAFE_GET (uid
);
1451 df_insn_rescan (insn_info
->insn
);
1454 bitmap_copy (&tmp
, &df
->insns_to_notes_rescan
);
1455 EXECUTE_IF_SET_IN_BITMAP (&tmp
, 0, uid
, bi
)
1457 struct df_insn_info
*insn_info
= DF_INSN_UID_SAFE_GET (uid
);
1459 df_notes_rescan (insn_info
->insn
);
1463 fprintf (dump_file
, "ending the processing of deferred insns\n");
1465 bitmap_clear (&tmp
);
1466 bitmap_clear (&df
->insns_to_delete
);
1467 bitmap_clear (&df
->insns_to_rescan
);
1468 bitmap_clear (&df
->insns_to_notes_rescan
);
1471 df_set_flags (DF_NO_INSN_RESCAN
);
1472 if (defer_insn_rescan
)
1473 df_set_flags (DF_DEFER_INSN_RESCAN
);
1475 /* If someone changed regs_ever_live during this pass, fix up the
1476 entry and exit blocks. */
1477 if (df
->redo_entry_and_exit
)
1479 df_update_entry_exit_and_calls ();
1480 df
->redo_entry_and_exit
= false;
1485 /* Count the number of refs. Include the defs if INCLUDE_DEFS. Include
1486 the uses if INCLUDE_USES. Include the eq_uses if
1490 df_count_refs (bool include_defs
, bool include_uses
,
1491 bool include_eq_uses
)
1495 unsigned int m
= df
->regs_inited
;
1497 for (regno
= 0; regno
< m
; regno
++)
1500 size
+= DF_REG_DEF_COUNT (regno
);
1502 size
+= DF_REG_USE_COUNT (regno
);
1503 if (include_eq_uses
)
1504 size
+= DF_REG_EQ_USE_COUNT (regno
);
1510 /* Take build ref table for either the uses or defs from the reg-use
1511 or reg-def chains. This version processes the refs in reg order
1512 which is likely to be best if processing the whole function. */
1515 df_reorganize_refs_by_reg_by_reg (struct df_ref_info
*ref_info
,
1518 bool include_eq_uses
)
1520 unsigned int m
= df
->regs_inited
;
1522 unsigned int offset
= 0;
1525 if (df
->changeable_flags
& DF_NO_HARD_REGS
)
1527 start
= FIRST_PSEUDO_REGISTER
;
1528 memset (ref_info
->begin
, 0, sizeof (int) * FIRST_PSEUDO_REGISTER
);
1529 memset (ref_info
->count
, 0, sizeof (int) * FIRST_PSEUDO_REGISTER
);
1534 ref_info
->total_size
1535 = df_count_refs (include_defs
, include_uses
, include_eq_uses
);
1537 df_check_and_grow_ref_info (ref_info
, 1);
1539 for (regno
= start
; regno
< m
; regno
++)
1542 ref_info
->begin
[regno
] = offset
;
1545 df_ref ref
= DF_REG_DEF_CHAIN (regno
);
1548 ref_info
->refs
[offset
] = ref
;
1549 DF_REF_ID (ref
) = offset
++;
1551 ref
= DF_REF_NEXT_REG (ref
);
1552 gcc_checking_assert (offset
< ref_info
->refs_size
);
1557 df_ref ref
= DF_REG_USE_CHAIN (regno
);
1560 ref_info
->refs
[offset
] = ref
;
1561 DF_REF_ID (ref
) = offset
++;
1563 ref
= DF_REF_NEXT_REG (ref
);
1564 gcc_checking_assert (offset
< ref_info
->refs_size
);
1567 if (include_eq_uses
)
1569 df_ref ref
= DF_REG_EQ_USE_CHAIN (regno
);
1572 ref_info
->refs
[offset
] = ref
;
1573 DF_REF_ID (ref
) = offset
++;
1575 ref
= DF_REF_NEXT_REG (ref
);
1576 gcc_checking_assert (offset
< ref_info
->refs_size
);
1579 ref_info
->count
[regno
] = count
;
1582 /* The bitmap size is not decremented when refs are deleted. So
1583 reset it now that we have squished out all of the empty
1585 ref_info
->table_size
= offset
;
1589 /* Take build ref table for either the uses or defs from the reg-use
1590 or reg-def chains. This version processes the refs in insn order
1591 which is likely to be best if processing some segment of the
1595 df_reorganize_refs_by_reg_by_insn (struct df_ref_info
*ref_info
,
1598 bool include_eq_uses
)
1601 unsigned int bb_index
;
1602 unsigned int m
= df
->regs_inited
;
1603 unsigned int offset
= 0;
1606 = (df
->changeable_flags
& DF_NO_HARD_REGS
) ? FIRST_PSEUDO_REGISTER
: 0;
1608 memset (ref_info
->begin
, 0, sizeof (int) * df
->regs_inited
);
1609 memset (ref_info
->count
, 0, sizeof (int) * df
->regs_inited
);
1611 ref_info
->total_size
= df_count_refs (include_defs
, include_uses
, include_eq_uses
);
1612 df_check_and_grow_ref_info (ref_info
, 1);
1614 EXECUTE_IF_SET_IN_BITMAP (df
->blocks_to_analyze
, 0, bb_index
, bi
)
1616 basic_block bb
= BASIC_BLOCK (bb_index
);
1621 for (ref_rec
= df_get_artificial_defs (bb_index
); *ref_rec
; ref_rec
++)
1623 unsigned int regno
= DF_REF_REGNO (*ref_rec
);
1624 ref_info
->count
[regno
]++;
1627 for (ref_rec
= df_get_artificial_uses (bb_index
); *ref_rec
; ref_rec
++)
1629 unsigned int regno
= DF_REF_REGNO (*ref_rec
);
1630 ref_info
->count
[regno
]++;
1633 FOR_BB_INSNS (bb
, insn
)
1637 unsigned int uid
= INSN_UID (insn
);
1640 for (ref_rec
= DF_INSN_UID_DEFS (uid
); *ref_rec
; ref_rec
++)
1642 unsigned int regno
= DF_REF_REGNO (*ref_rec
);
1643 ref_info
->count
[regno
]++;
1646 for (ref_rec
= DF_INSN_UID_USES (uid
); *ref_rec
; ref_rec
++)
1648 unsigned int regno
= DF_REF_REGNO (*ref_rec
);
1649 ref_info
->count
[regno
]++;
1651 if (include_eq_uses
)
1652 for (ref_rec
= DF_INSN_UID_EQ_USES (uid
); *ref_rec
; ref_rec
++)
1654 unsigned int regno
= DF_REF_REGNO (*ref_rec
);
1655 ref_info
->count
[regno
]++;
1661 for (r
= start
; r
< m
; r
++)
1663 ref_info
->begin
[r
] = offset
;
1664 offset
+= ref_info
->count
[r
];
1665 ref_info
->count
[r
] = 0;
1668 EXECUTE_IF_SET_IN_BITMAP (df
->blocks_to_analyze
, 0, bb_index
, bi
)
1670 basic_block bb
= BASIC_BLOCK (bb_index
);
1675 for (ref_rec
= df_get_artificial_defs (bb_index
); *ref_rec
; ref_rec
++)
1677 df_ref ref
= *ref_rec
;
1678 unsigned int regno
= DF_REF_REGNO (ref
);
1682 = ref_info
->begin
[regno
] + ref_info
->count
[regno
]++;
1683 DF_REF_ID (ref
) = id
;
1684 ref_info
->refs
[id
] = ref
;
1688 for (ref_rec
= df_get_artificial_uses (bb_index
); *ref_rec
; ref_rec
++)
1690 df_ref ref
= *ref_rec
;
1691 unsigned int regno
= DF_REF_REGNO (ref
);
1695 = ref_info
->begin
[regno
] + ref_info
->count
[regno
]++;
1696 DF_REF_ID (ref
) = id
;
1697 ref_info
->refs
[id
] = ref
;
1701 FOR_BB_INSNS (bb
, insn
)
1705 unsigned int uid
= INSN_UID (insn
);
1708 for (ref_rec
= DF_INSN_UID_DEFS (uid
); *ref_rec
; ref_rec
++)
1710 df_ref ref
= *ref_rec
;
1711 unsigned int regno
= DF_REF_REGNO (ref
);
1715 = ref_info
->begin
[regno
] + ref_info
->count
[regno
]++;
1716 DF_REF_ID (ref
) = id
;
1717 ref_info
->refs
[id
] = ref
;
1721 for (ref_rec
= DF_INSN_UID_USES (uid
); *ref_rec
; ref_rec
++)
1723 df_ref ref
= *ref_rec
;
1724 unsigned int regno
= DF_REF_REGNO (ref
);
1728 = ref_info
->begin
[regno
] + ref_info
->count
[regno
]++;
1729 DF_REF_ID (ref
) = id
;
1730 ref_info
->refs
[id
] = ref
;
1733 if (include_eq_uses
)
1734 for (ref_rec
= DF_INSN_UID_EQ_USES (uid
); *ref_rec
; ref_rec
++)
1736 df_ref ref
= *ref_rec
;
1737 unsigned int regno
= DF_REF_REGNO (ref
);
1741 = ref_info
->begin
[regno
] + ref_info
->count
[regno
]++;
1742 DF_REF_ID (ref
) = id
;
1743 ref_info
->refs
[id
] = ref
;
1750 /* The bitmap size is not decremented when refs are deleted. So
1751 reset it now that we have squished out all of the empty
1754 ref_info
->table_size
= offset
;
1757 /* Take build ref table for either the uses or defs from the reg-use
1758 or reg-def chains. */
1761 df_reorganize_refs_by_reg (struct df_ref_info
*ref_info
,
1764 bool include_eq_uses
)
1766 if (df
->analyze_subset
)
1767 df_reorganize_refs_by_reg_by_insn (ref_info
, include_defs
,
1768 include_uses
, include_eq_uses
);
1770 df_reorganize_refs_by_reg_by_reg (ref_info
, include_defs
,
1771 include_uses
, include_eq_uses
);
1775 /* Add the refs in REF_VEC to the table in REF_INFO starting at OFFSET. */
1777 df_add_refs_to_table (unsigned int offset
,
1778 struct df_ref_info
*ref_info
,
1783 df_ref ref
= *ref_vec
;
1784 if ((!(df
->changeable_flags
& DF_NO_HARD_REGS
))
1785 || (DF_REF_REGNO (ref
) >= FIRST_PSEUDO_REGISTER
))
1787 ref_info
->refs
[offset
] = ref
;
1788 DF_REF_ID (*ref_vec
) = offset
++;
1796 /* Count the number of refs in all of the insns of BB. Include the
1797 defs if INCLUDE_DEFS. Include the uses if INCLUDE_USES. Include the
1798 eq_uses if INCLUDE_EQ_USES. */
1801 df_reorganize_refs_by_insn_bb (basic_block bb
, unsigned int offset
,
1802 struct df_ref_info
*ref_info
,
1803 bool include_defs
, bool include_uses
,
1804 bool include_eq_uses
)
1809 offset
= df_add_refs_to_table (offset
, ref_info
,
1810 df_get_artificial_defs (bb
->index
));
1812 offset
= df_add_refs_to_table (offset
, ref_info
,
1813 df_get_artificial_uses (bb
->index
));
1815 FOR_BB_INSNS (bb
, insn
)
1818 unsigned int uid
= INSN_UID (insn
);
1820 offset
= df_add_refs_to_table (offset
, ref_info
,
1821 DF_INSN_UID_DEFS (uid
));
1823 offset
= df_add_refs_to_table (offset
, ref_info
,
1824 DF_INSN_UID_USES (uid
));
1825 if (include_eq_uses
)
1826 offset
= df_add_refs_to_table (offset
, ref_info
,
1827 DF_INSN_UID_EQ_USES (uid
));
1833 /* Organize the refs by insn into the table in REF_INFO. If
1834 blocks_to_analyze is defined, use that set, otherwise the entire
1835 program. Include the defs if INCLUDE_DEFS. Include the uses if
1836 INCLUDE_USES. Include the eq_uses if INCLUDE_EQ_USES. */
1839 df_reorganize_refs_by_insn (struct df_ref_info
*ref_info
,
1840 bool include_defs
, bool include_uses
,
1841 bool include_eq_uses
)
1844 unsigned int offset
= 0;
1846 ref_info
->total_size
= df_count_refs (include_defs
, include_uses
, include_eq_uses
);
1847 df_check_and_grow_ref_info (ref_info
, 1);
1848 if (df
->blocks_to_analyze
)
1853 EXECUTE_IF_SET_IN_BITMAP (df
->blocks_to_analyze
, 0, index
, bi
)
1855 offset
= df_reorganize_refs_by_insn_bb (BASIC_BLOCK (index
), offset
, ref_info
,
1856 include_defs
, include_uses
,
1860 ref_info
->table_size
= offset
;
1865 offset
= df_reorganize_refs_by_insn_bb (bb
, offset
, ref_info
,
1866 include_defs
, include_uses
,
1868 ref_info
->table_size
= offset
;
1873 /* If the use refs in DF are not organized, reorganize them. */
1876 df_maybe_reorganize_use_refs (enum df_ref_order order
)
1878 if (order
== df
->use_info
.ref_order
)
1883 case DF_REF_ORDER_BY_REG
:
1884 df_reorganize_refs_by_reg (&df
->use_info
, false, true, false);
1887 case DF_REF_ORDER_BY_REG_WITH_NOTES
:
1888 df_reorganize_refs_by_reg (&df
->use_info
, false, true, true);
1891 case DF_REF_ORDER_BY_INSN
:
1892 df_reorganize_refs_by_insn (&df
->use_info
, false, true, false);
1895 case DF_REF_ORDER_BY_INSN_WITH_NOTES
:
1896 df_reorganize_refs_by_insn (&df
->use_info
, false, true, true);
1899 case DF_REF_ORDER_NO_TABLE
:
1900 free (df
->use_info
.refs
);
1901 df
->use_info
.refs
= NULL
;
1902 df
->use_info
.refs_size
= 0;
1905 case DF_REF_ORDER_UNORDERED
:
1906 case DF_REF_ORDER_UNORDERED_WITH_NOTES
:
1911 df
->use_info
.ref_order
= order
;
1915 /* If the def refs in DF are not organized, reorganize them. */
1918 df_maybe_reorganize_def_refs (enum df_ref_order order
)
1920 if (order
== df
->def_info
.ref_order
)
1925 case DF_REF_ORDER_BY_REG
:
1926 df_reorganize_refs_by_reg (&df
->def_info
, true, false, false);
1929 case DF_REF_ORDER_BY_INSN
:
1930 df_reorganize_refs_by_insn (&df
->def_info
, true, false, false);
1933 case DF_REF_ORDER_NO_TABLE
:
1934 free (df
->def_info
.refs
);
1935 df
->def_info
.refs
= NULL
;
1936 df
->def_info
.refs_size
= 0;
1939 case DF_REF_ORDER_BY_INSN_WITH_NOTES
:
1940 case DF_REF_ORDER_BY_REG_WITH_NOTES
:
1941 case DF_REF_ORDER_UNORDERED
:
1942 case DF_REF_ORDER_UNORDERED_WITH_NOTES
:
1947 df
->def_info
.ref_order
= order
;
1951 /* Change all of the basic block references in INSN to use the insn's
1952 current basic block. This function is called from routines that move
1953 instructions from one block to another. */
1956 df_insn_change_bb (rtx insn
, basic_block new_bb
)
1958 basic_block old_bb
= BLOCK_FOR_INSN (insn
);
1959 struct df_insn_info
*insn_info
;
1960 unsigned int uid
= INSN_UID (insn
);
1962 if (old_bb
== new_bb
)
1965 set_block_for_insn (insn
, new_bb
);
1971 fprintf (dump_file
, "changing bb of uid %d\n", uid
);
1973 insn_info
= DF_INSN_UID_SAFE_GET (uid
);
1974 if (insn_info
== NULL
)
1977 fprintf (dump_file
, " unscanned insn\n");
1978 df_insn_rescan (insn
);
1985 df_set_bb_dirty (new_bb
);
1989 fprintf (dump_file
, " from %d to %d\n",
1990 old_bb
->index
, new_bb
->index
);
1991 df_set_bb_dirty (old_bb
);
1995 fprintf (dump_file
, " to %d\n", new_bb
->index
);
1999 /* Helper function for df_ref_change_reg_with_loc. */
2002 df_ref_change_reg_with_loc_1 (struct df_reg_info
*old_df
,
2003 struct df_reg_info
*new_df
,
2004 int new_regno
, rtx loc
)
2006 df_ref the_ref
= old_df
->reg_chain
;
2010 if ((!DF_REF_IS_ARTIFICIAL (the_ref
))
2011 && DF_REF_LOC (the_ref
)
2012 && (*DF_REF_LOC (the_ref
) == loc
))
2014 df_ref next_ref
= DF_REF_NEXT_REG (the_ref
);
2015 df_ref prev_ref
= DF_REF_PREV_REG (the_ref
);
2016 df_ref
*ref_vec
, *ref_vec_t
;
2017 struct df_insn_info
*insn_info
= DF_REF_INSN_INFO (the_ref
);
2018 unsigned int count
= 0;
2020 DF_REF_REGNO (the_ref
) = new_regno
;
2021 DF_REF_REG (the_ref
) = regno_reg_rtx
[new_regno
];
2023 /* Pull the_ref out of the old regno chain. */
2025 DF_REF_NEXT_REG (prev_ref
) = next_ref
;
2027 old_df
->reg_chain
= next_ref
;
2029 DF_REF_PREV_REG (next_ref
) = prev_ref
;
2032 /* Put the ref into the new regno chain. */
2033 DF_REF_PREV_REG (the_ref
) = NULL
;
2034 DF_REF_NEXT_REG (the_ref
) = new_df
->reg_chain
;
2035 if (new_df
->reg_chain
)
2036 DF_REF_PREV_REG (new_df
->reg_chain
) = the_ref
;
2037 new_df
->reg_chain
= the_ref
;
2039 if (DF_REF_BB (the_ref
))
2040 df_set_bb_dirty (DF_REF_BB (the_ref
));
2042 /* Need to sort the record again that the ref was in because
2043 the regno is a sorting key. First, find the right
2045 if (DF_REF_FLAGS (the_ref
) & DF_REF_IN_NOTE
)
2046 ref_vec
= insn_info
->eq_uses
;
2048 ref_vec
= insn_info
->uses
;
2050 fprintf (dump_file
, "changing reg in insn %d\n",
2051 DF_REF_INSN_UID (the_ref
));
2053 ref_vec_t
= ref_vec
;
2055 /* Find the length. */
2061 qsort (ref_vec
, count
, sizeof (df_ref
), df_ref_compare
);
2066 the_ref
= DF_REF_NEXT_REG (the_ref
);
2071 /* Change the regno of all refs that contained LOC from OLD_REGNO to
2072 NEW_REGNO. Refs that do not match LOC are not changed which means
2073 that artificial refs are not changed since they have no loc. This
2074 call is to support the SET_REGNO macro. */
2077 df_ref_change_reg_with_loc (int old_regno
, int new_regno
, rtx loc
)
2079 if ((!df
) || (old_regno
== -1) || (old_regno
== new_regno
))
2082 df_grow_reg_info ();
2084 df_ref_change_reg_with_loc_1 (DF_REG_DEF_GET (old_regno
),
2085 DF_REG_DEF_GET (new_regno
), new_regno
, loc
);
2086 df_ref_change_reg_with_loc_1 (DF_REG_USE_GET (old_regno
),
2087 DF_REG_USE_GET (new_regno
), new_regno
, loc
);
2088 df_ref_change_reg_with_loc_1 (DF_REG_EQ_USE_GET (old_regno
),
2089 DF_REG_EQ_USE_GET (new_regno
), new_regno
, loc
);
2093 /* Delete the mw_hardregs that point into the eq_notes. */
2096 df_mw_hardreg_chain_delete_eq_uses (struct df_insn_info
*insn_info
)
2098 struct df_mw_hardreg
**mw_vec
= insn_info
->mw_hardregs
;
2099 unsigned int deleted
= 0;
2100 unsigned int count
= 0;
2101 struct df_scan_problem_data
*problem_data
2102 = (struct df_scan_problem_data
*) df_scan
->problem_data
;
2109 if ((*mw_vec
)->flags
& DF_REF_IN_NOTE
)
2111 struct df_mw_hardreg
**temp_vec
= mw_vec
;
2113 pool_free (problem_data
->mw_reg_pool
, *mw_vec
);
2115 /* Shove the remaining ones down one to fill the gap. While
2116 this looks n**2, it is highly unusual to have any mw regs
2117 in eq_notes and the chances of more than one are almost
2121 *temp_vec
= *(temp_vec
+ 1);
2135 df_scan_free_mws_vec (insn_info
->mw_hardregs
);
2136 insn_info
->mw_hardregs
= df_null_mw_rec
;
2143 /* Rescan only the REG_EQUIV/REG_EQUAL notes part of INSN. */
2146 df_notes_rescan (rtx insn
)
2148 struct df_insn_info
*insn_info
;
2149 unsigned int uid
= INSN_UID (insn
);
2154 /* The client has disabled rescanning and plans to do it itself. */
2155 if (df
->changeable_flags
& DF_NO_INSN_RESCAN
)
2158 /* Do nothing if the insn hasn't been emitted yet. */
2159 if (!BLOCK_FOR_INSN (insn
))
2162 df_grow_bb_info (df_scan
);
2163 df_grow_reg_info ();
2165 insn_info
= DF_INSN_UID_SAFE_GET (INSN_UID(insn
));
2167 /* The client has deferred rescanning. */
2168 if (df
->changeable_flags
& DF_DEFER_INSN_RESCAN
)
2172 insn_info
= df_insn_create_insn_record (insn
);
2173 insn_info
->defs
= df_null_ref_rec
;
2174 insn_info
->uses
= df_null_ref_rec
;
2175 insn_info
->eq_uses
= df_null_ref_rec
;
2176 insn_info
->mw_hardregs
= df_null_mw_rec
;
2179 bitmap_clear_bit (&df
->insns_to_delete
, uid
);
2180 /* If the insn is set to be rescanned, it does not need to also
2181 be notes rescanned. */
2182 if (!bitmap_bit_p (&df
->insns_to_rescan
, uid
))
2183 bitmap_set_bit (&df
->insns_to_notes_rescan
, INSN_UID (insn
));
2187 bitmap_clear_bit (&df
->insns_to_delete
, uid
);
2188 bitmap_clear_bit (&df
->insns_to_notes_rescan
, uid
);
2192 basic_block bb
= BLOCK_FOR_INSN (insn
);
2194 struct df_collection_rec collection_rec
;
2195 unsigned int num_deleted
;
2196 unsigned int mw_len
;
2198 memset (&collection_rec
, 0, sizeof (struct df_collection_rec
));
2199 collection_rec
.eq_use_vec
= VEC_alloc (df_ref
, stack
, 32);
2200 collection_rec
.mw_vec
= VEC_alloc (df_mw_hardreg_ptr
, stack
, 32);
2202 num_deleted
= df_mw_hardreg_chain_delete_eq_uses (insn_info
);
2203 df_ref_chain_delete (insn_info
->eq_uses
);
2204 insn_info
->eq_uses
= NULL
;
2206 /* Process REG_EQUIV/REG_EQUAL notes */
2207 for (note
= REG_NOTES (insn
); note
;
2208 note
= XEXP (note
, 1))
2210 switch (REG_NOTE_KIND (note
))
2214 df_uses_record (&collection_rec
,
2215 &XEXP (note
, 0), DF_REF_REG_USE
,
2216 bb
, insn_info
, DF_REF_IN_NOTE
);
2222 /* Find some place to put any new mw_hardregs. */
2223 df_canonize_collection_rec (&collection_rec
);
2224 mw_len
= VEC_length (df_mw_hardreg_ptr
, collection_rec
.mw_vec
);
2227 unsigned int count
= 0;
2228 struct df_mw_hardreg
**mw_rec
= insn_info
->mw_hardregs
;
2237 /* Append to the end of the existing record after
2238 expanding it if necessary. */
2239 if (mw_len
> num_deleted
)
2241 insn_info
->mw_hardregs
=
2242 XRESIZEVEC (struct df_mw_hardreg
*,
2243 insn_info
->mw_hardregs
,
2244 count
+ 1 + mw_len
);
2246 memcpy (&insn_info
->mw_hardregs
[count
],
2247 VEC_address (df_mw_hardreg_ptr
, collection_rec
.mw_vec
),
2248 mw_len
* sizeof (struct df_mw_hardreg
*));
2249 insn_info
->mw_hardregs
[count
+ mw_len
] = NULL
;
2250 qsort (insn_info
->mw_hardregs
, count
+ mw_len
,
2251 sizeof (struct df_mw_hardreg
*), df_mw_compare
);
2255 /* No vector there. */
2256 insn_info
->mw_hardregs
2257 = XNEWVEC (struct df_mw_hardreg
*, 1 + mw_len
);
2258 memcpy (insn_info
->mw_hardregs
,
2259 VEC_address (df_mw_hardreg_ptr
, collection_rec
.mw_vec
),
2260 mw_len
* sizeof (struct df_mw_hardreg
*));
2261 insn_info
->mw_hardregs
[mw_len
] = NULL
;
2264 /* Get rid of the mw_rec so that df_refs_add_to_chains will
2266 VEC_free (df_mw_hardreg_ptr
, stack
, collection_rec
.mw_vec
);
2267 df_refs_add_to_chains (&collection_rec
, bb
, insn
);
2268 VEC_free (df_ref
, stack
, collection_rec
.eq_use_vec
);
2271 df_insn_rescan (insn
);
2276 /*----------------------------------------------------------------------------
2277 Hard core instruction scanning code. No external interfaces here,
2278 just a lot of routines that look inside insns.
2279 ----------------------------------------------------------------------------*/
2282 /* Return true if the contents of two df_ref's are identical.
2283 It ignores DF_REF_MARKER. */
2286 df_ref_equal_p (df_ref ref1
, df_ref ref2
)
2294 if (DF_REF_CLASS (ref1
) != DF_REF_CLASS (ref2
)
2295 || DF_REF_REGNO (ref1
) != DF_REF_REGNO (ref2
)
2296 || DF_REF_REG (ref1
) != DF_REF_REG (ref2
)
2297 || DF_REF_TYPE (ref1
) != DF_REF_TYPE (ref2
)
2298 || ((DF_REF_FLAGS (ref1
) & ~(DF_REF_REG_MARKER
+ DF_REF_MW_HARDREG
))
2299 != (DF_REF_FLAGS (ref2
) & ~(DF_REF_REG_MARKER
+ DF_REF_MW_HARDREG
)))
2300 || DF_REF_BB (ref1
) != DF_REF_BB (ref2
)
2301 || DF_REF_INSN_INFO (ref1
) != DF_REF_INSN_INFO (ref2
))
2304 switch (DF_REF_CLASS (ref1
))
2306 case DF_REF_ARTIFICIAL
:
2310 case DF_REF_REGULAR
:
2311 return DF_REF_LOC (ref1
) == DF_REF_LOC (ref2
);
2320 /* Compare REF1 and REF2 for sorting. This is only called from places
2321 where all of the refs are of the same type, in the same insn, and
2322 have the same bb. So these fields are not checked. */
2325 df_ref_compare (const void *r1
, const void *r2
)
2327 const df_ref ref1
= *(const df_ref
*)r1
;
2328 const df_ref ref2
= *(const df_ref
*)r2
;
2333 if (DF_REF_CLASS (ref1
) != DF_REF_CLASS (ref2
))
2334 return (int)DF_REF_CLASS (ref1
) - (int)DF_REF_CLASS (ref2
);
2336 if (DF_REF_REGNO (ref1
) != DF_REF_REGNO (ref2
))
2337 return (int)DF_REF_REGNO (ref1
) - (int)DF_REF_REGNO (ref2
);
2339 if (DF_REF_TYPE (ref1
) != DF_REF_TYPE (ref2
))
2340 return (int)DF_REF_TYPE (ref1
) - (int)DF_REF_TYPE (ref2
);
2342 if (DF_REF_REG (ref1
) != DF_REF_REG (ref2
))
2343 return (int)DF_REF_ORDER (ref1
) - (int)DF_REF_ORDER (ref2
);
2345 /* Cannot look at the LOC field on artificial refs. */
2346 if (DF_REF_CLASS (ref1
) != DF_REF_ARTIFICIAL
2347 && DF_REF_LOC (ref1
) != DF_REF_LOC (ref2
))
2348 return (int)DF_REF_ORDER (ref1
) - (int)DF_REF_ORDER (ref2
);
2350 if (DF_REF_FLAGS (ref1
) != DF_REF_FLAGS (ref2
))
2352 /* If two refs are identical except that one of them has is from
2353 a mw and one is not, we need to have the one with the mw
2355 if (DF_REF_FLAGS_IS_SET (ref1
, DF_REF_MW_HARDREG
) ==
2356 DF_REF_FLAGS_IS_SET (ref2
, DF_REF_MW_HARDREG
))
2357 return DF_REF_FLAGS (ref1
) - DF_REF_FLAGS (ref2
);
2358 else if (DF_REF_FLAGS_IS_SET (ref1
, DF_REF_MW_HARDREG
))
2364 return (int)DF_REF_ORDER (ref1
) - (int)DF_REF_ORDER (ref2
);
2368 df_swap_refs (VEC(df_ref
,stack
) **ref_vec
, int i
, int j
)
2370 df_ref tmp
= VEC_index (df_ref
, *ref_vec
, i
);
2371 VEC_replace (df_ref
, *ref_vec
, i
, VEC_index (df_ref
, *ref_vec
, j
));
2372 VEC_replace (df_ref
, *ref_vec
, j
, tmp
);
2375 /* Sort and compress a set of refs. */
2378 df_sort_and_compress_refs (VEC(df_ref
,stack
) **ref_vec
)
2382 unsigned int dist
= 0;
2384 count
= VEC_length (df_ref
, *ref_vec
);
2386 /* If there are 1 or 0 elements, there is nothing to do. */
2389 else if (count
== 2)
2391 df_ref r0
= VEC_index (df_ref
, *ref_vec
, 0);
2392 df_ref r1
= VEC_index (df_ref
, *ref_vec
, 1);
2393 if (df_ref_compare (&r0
, &r1
) > 0)
2394 df_swap_refs (ref_vec
, 0, 1);
2398 for (i
= 0; i
< count
- 1; i
++)
2400 df_ref r0
= VEC_index (df_ref
, *ref_vec
, i
);
2401 df_ref r1
= VEC_index (df_ref
, *ref_vec
, i
+ 1);
2402 if (df_ref_compare (&r0
, &r1
) >= 0)
2405 /* If the array is already strictly ordered,
2406 which is the most common case for large COUNT case
2407 (which happens for CALL INSNs),
2408 no need to sort and filter out duplicate.
2409 Simply return the count.
2410 Make sure DF_GET_ADD_REFS adds refs in the increasing order
2411 of DF_REF_COMPARE. */
2414 VEC_qsort (df_ref
, *ref_vec
, df_ref_compare
);
2417 for (i
=0; i
<count
-dist
; i
++)
2419 /* Find the next ref that is not equal to the current ref. */
2420 while (i
+ dist
+ 1 < count
2421 && df_ref_equal_p (VEC_index (df_ref
, *ref_vec
, i
),
2422 VEC_index (df_ref
, *ref_vec
, i
+ dist
+ 1)))
2424 df_free_ref (VEC_index (df_ref
, *ref_vec
, i
+ dist
+ 1));
2427 /* Copy it down to the next position. */
2428 if (dist
&& i
+ dist
+ 1 < count
)
2429 VEC_replace (df_ref
, *ref_vec
, i
+ 1,
2430 VEC_index (df_ref
, *ref_vec
, i
+ dist
+ 1));
2434 VEC_truncate (df_ref
, *ref_vec
, count
);
2438 /* Return true if the contents of two df_ref's are identical.
2439 It ignores DF_REF_MARKER. */
2442 df_mw_equal_p (struct df_mw_hardreg
*mw1
, struct df_mw_hardreg
*mw2
)
2446 return (mw1
== mw2
) ||
2447 (mw1
->mw_reg
== mw2
->mw_reg
2448 && mw1
->type
== mw2
->type
2449 && mw1
->flags
== mw2
->flags
2450 && mw1
->start_regno
== mw2
->start_regno
2451 && mw1
->end_regno
== mw2
->end_regno
);
2455 /* Compare MW1 and MW2 for sorting. */
2458 df_mw_compare (const void *m1
, const void *m2
)
2460 const struct df_mw_hardreg
*const mw1
= *(const struct df_mw_hardreg
*const*)m1
;
2461 const struct df_mw_hardreg
*const mw2
= *(const struct df_mw_hardreg
*const*)m2
;
2466 if (mw1
->type
!= mw2
->type
)
2467 return mw1
->type
- mw2
->type
;
2469 if (mw1
->flags
!= mw2
->flags
)
2470 return mw1
->flags
- mw2
->flags
;
2472 if (mw1
->start_regno
!= mw2
->start_regno
)
2473 return mw1
->start_regno
- mw2
->start_regno
;
2475 if (mw1
->end_regno
!= mw2
->end_regno
)
2476 return mw1
->end_regno
- mw2
->end_regno
;
2478 if (mw1
->mw_reg
!= mw2
->mw_reg
)
2479 return mw1
->mw_order
- mw2
->mw_order
;
2485 /* Sort and compress a set of refs. */
2488 df_sort_and_compress_mws (VEC(df_mw_hardreg_ptr
,stack
) **mw_vec
)
2491 struct df_scan_problem_data
*problem_data
2492 = (struct df_scan_problem_data
*) df_scan
->problem_data
;
2494 unsigned int dist
= 0;
2496 count
= VEC_length (df_mw_hardreg_ptr
, *mw_vec
);
2499 else if (count
== 2)
2501 struct df_mw_hardreg
*m0
= VEC_index (df_mw_hardreg_ptr
, *mw_vec
, 0);
2502 struct df_mw_hardreg
*m1
= VEC_index (df_mw_hardreg_ptr
, *mw_vec
, 1);
2503 if (df_mw_compare (&m0
, &m1
) > 0)
2505 struct df_mw_hardreg
*tmp
= VEC_index (df_mw_hardreg_ptr
,
2507 VEC_replace (df_mw_hardreg_ptr
, *mw_vec
, 0,
2508 VEC_index (df_mw_hardreg_ptr
, *mw_vec
, 1));
2509 VEC_replace (df_mw_hardreg_ptr
, *mw_vec
, 1, tmp
);
2513 VEC_qsort (df_mw_hardreg_ptr
, *mw_vec
, df_mw_compare
);
2515 for (i
=0; i
<count
-dist
; i
++)
2517 /* Find the next ref that is not equal to the current ref. */
2518 while (i
+ dist
+ 1 < count
2519 && df_mw_equal_p (VEC_index (df_mw_hardreg_ptr
, *mw_vec
, i
),
2520 VEC_index (df_mw_hardreg_ptr
, *mw_vec
,
2523 pool_free (problem_data
->mw_reg_pool
,
2524 VEC_index (df_mw_hardreg_ptr
, *mw_vec
, i
+ dist
+ 1));
2527 /* Copy it down to the next position. */
2528 if (dist
&& i
+ dist
+ 1 < count
)
2529 VEC_replace (df_mw_hardreg_ptr
, *mw_vec
, i
+ 1,
2530 VEC_index (df_mw_hardreg_ptr
, *mw_vec
, i
+ dist
+ 1));
2534 VEC_truncate (df_mw_hardreg_ptr
, *mw_vec
, count
);
2538 /* Sort and remove duplicates from the COLLECTION_REC. */
2541 df_canonize_collection_rec (struct df_collection_rec
*collection_rec
)
2543 df_sort_and_compress_refs (&collection_rec
->def_vec
);
2544 df_sort_and_compress_refs (&collection_rec
->use_vec
);
2545 df_sort_and_compress_refs (&collection_rec
->eq_use_vec
);
2546 df_sort_and_compress_mws (&collection_rec
->mw_vec
);
2550 /* Add the new df_ref to appropriate reg_info/ref_info chains. */
2553 df_install_ref (df_ref this_ref
,
2554 struct df_reg_info
*reg_info
,
2555 struct df_ref_info
*ref_info
,
2558 unsigned int regno
= DF_REF_REGNO (this_ref
);
2559 /* Add the ref to the reg_{def,use,eq_use} chain. */
2560 df_ref head
= reg_info
->reg_chain
;
2562 reg_info
->reg_chain
= this_ref
;
2565 if (DF_REF_FLAGS_IS_SET (this_ref
, DF_HARD_REG_LIVE
))
2567 gcc_assert (regno
< FIRST_PSEUDO_REGISTER
);
2568 df
->hard_regs_live_count
[regno
]++;
2571 gcc_checking_assert (DF_REF_NEXT_REG (this_ref
) == NULL
2572 && DF_REF_PREV_REG (this_ref
) == NULL
);
2574 DF_REF_NEXT_REG (this_ref
) = head
;
2576 /* We cannot actually link to the head of the chain. */
2577 DF_REF_PREV_REG (this_ref
) = NULL
;
2580 DF_REF_PREV_REG (head
) = this_ref
;
2584 gcc_assert (ref_info
->ref_order
!= DF_REF_ORDER_NO_TABLE
);
2585 df_check_and_grow_ref_info (ref_info
, 1);
2586 DF_REF_ID (this_ref
) = ref_info
->table_size
;
2587 /* Add the ref to the big array of defs. */
2588 ref_info
->refs
[ref_info
->table_size
] = this_ref
;
2589 ref_info
->table_size
++;
2592 DF_REF_ID (this_ref
) = -1;
2594 ref_info
->total_size
++;
2598 /* This function takes one of the groups of refs (defs, uses or
2599 eq_uses) and installs the entire group into the insn. It also adds
2600 each of these refs into the appropriate chains. */
2603 df_install_refs (basic_block bb
,
2604 VEC(df_ref
,stack
)* old_vec
,
2605 struct df_reg_info
**reg_info
,
2606 struct df_ref_info
*ref_info
,
2611 count
= VEC_length (df_ref
, old_vec
);
2614 df_ref
*new_vec
= XNEWVEC (df_ref
, count
+ 1);
2619 switch (ref_info
->ref_order
)
2621 case DF_REF_ORDER_UNORDERED_WITH_NOTES
:
2622 case DF_REF_ORDER_BY_REG_WITH_NOTES
:
2623 case DF_REF_ORDER_BY_INSN_WITH_NOTES
:
2624 ref_info
->ref_order
= DF_REF_ORDER_UNORDERED_WITH_NOTES
;
2625 add_to_table
= true;
2627 case DF_REF_ORDER_UNORDERED
:
2628 case DF_REF_ORDER_BY_REG
:
2629 case DF_REF_ORDER_BY_INSN
:
2630 ref_info
->ref_order
= DF_REF_ORDER_UNORDERED
;
2631 add_to_table
= !is_notes
;
2634 add_to_table
= false;
2638 /* Do not add if ref is not in the right blocks. */
2639 if (add_to_table
&& df
->analyze_subset
)
2640 add_to_table
= bitmap_bit_p (df
->blocks_to_analyze
, bb
->index
);
2642 FOR_EACH_VEC_ELT (df_ref
, old_vec
, ix
, this_ref
)
2644 new_vec
[ix
] = this_ref
;
2645 df_install_ref (this_ref
, reg_info
[DF_REF_REGNO (this_ref
)],
2646 ref_info
, add_to_table
);
2649 new_vec
[count
] = NULL
;
2653 return df_null_ref_rec
;
2657 /* This function takes the mws installs the entire group into the
2660 static struct df_mw_hardreg
**
2661 df_install_mws (VEC(df_mw_hardreg_ptr
,stack
) *old_vec
)
2665 count
= VEC_length (df_mw_hardreg_ptr
, old_vec
);
2668 struct df_mw_hardreg
**new_vec
2669 = XNEWVEC (struct df_mw_hardreg
*, count
+ 1);
2670 memcpy (new_vec
, VEC_address (df_mw_hardreg_ptr
, old_vec
),
2671 sizeof (struct df_mw_hardreg
*) * count
);
2672 new_vec
[count
] = NULL
;
2676 return df_null_mw_rec
;
2680 /* Add a chain of df_refs to appropriate ref chain/reg_info/ref_info
2681 chains and update other necessary information. */
2684 df_refs_add_to_chains (struct df_collection_rec
*collection_rec
,
2685 basic_block bb
, rtx insn
)
2689 struct df_insn_info
*insn_rec
= DF_INSN_INFO_GET (insn
);
2690 /* If there is a vector in the collection rec, add it to the
2691 insn. A null rec is a signal that the caller will handle the
2693 if (collection_rec
->def_vec
)
2695 df_scan_free_ref_vec (insn_rec
->defs
);
2697 = df_install_refs (bb
, collection_rec
->def_vec
,
2699 &df
->def_info
, false);
2701 if (collection_rec
->use_vec
)
2703 df_scan_free_ref_vec (insn_rec
->uses
);
2705 = df_install_refs (bb
, collection_rec
->use_vec
,
2707 &df
->use_info
, false);
2709 if (collection_rec
->eq_use_vec
)
2711 df_scan_free_ref_vec (insn_rec
->eq_uses
);
2713 = df_install_refs (bb
, collection_rec
->eq_use_vec
,
2715 &df
->use_info
, true);
2717 if (collection_rec
->mw_vec
)
2719 df_scan_free_mws_vec (insn_rec
->mw_hardregs
);
2720 insn_rec
->mw_hardregs
2721 = df_install_mws (collection_rec
->mw_vec
);
2726 struct df_scan_bb_info
*bb_info
= df_scan_get_bb_info (bb
->index
);
2728 df_scan_free_ref_vec (bb_info
->artificial_defs
);
2729 bb_info
->artificial_defs
2730 = df_install_refs (bb
, collection_rec
->def_vec
,
2732 &df
->def_info
, false);
2733 df_scan_free_ref_vec (bb_info
->artificial_uses
);
2734 bb_info
->artificial_uses
2735 = df_install_refs (bb
, collection_rec
->use_vec
,
2737 &df
->use_info
, false);
2742 /* Allocate a ref and initialize its fields. */
2745 df_ref_create_structure (enum df_ref_class cl
,
2746 struct df_collection_rec
*collection_rec
,
2748 basic_block bb
, struct df_insn_info
*info
,
2749 enum df_ref_type ref_type
,
2752 df_ref this_ref
= NULL
;
2753 int regno
= REGNO (GET_CODE (reg
) == SUBREG
? SUBREG_REG (reg
) : reg
);
2754 struct df_scan_problem_data
*problem_data
2755 = (struct df_scan_problem_data
*) df_scan
->problem_data
;
2760 this_ref
= (df_ref
) pool_alloc (problem_data
->ref_base_pool
);
2761 gcc_checking_assert (loc
== NULL
);
2764 case DF_REF_ARTIFICIAL
:
2765 this_ref
= (df_ref
) pool_alloc (problem_data
->ref_artificial_pool
);
2766 this_ref
->artificial_ref
.bb
= bb
;
2767 gcc_checking_assert (loc
== NULL
);
2770 case DF_REF_REGULAR
:
2771 this_ref
= (df_ref
) pool_alloc (problem_data
->ref_regular_pool
);
2772 this_ref
->regular_ref
.loc
= loc
;
2773 gcc_checking_assert (loc
);
2777 DF_REF_CLASS (this_ref
) = cl
;
2778 DF_REF_ID (this_ref
) = -1;
2779 DF_REF_REG (this_ref
) = reg
;
2780 DF_REF_REGNO (this_ref
) = regno
;
2781 DF_REF_TYPE (this_ref
) = ref_type
;
2782 DF_REF_INSN_INFO (this_ref
) = info
;
2783 DF_REF_CHAIN (this_ref
) = NULL
;
2784 DF_REF_FLAGS (this_ref
) = ref_flags
;
2785 DF_REF_NEXT_REG (this_ref
) = NULL
;
2786 DF_REF_PREV_REG (this_ref
) = NULL
;
2787 DF_REF_ORDER (this_ref
) = df
->ref_order
++;
2789 /* We need to clear this bit because fwprop, and in the future
2790 possibly other optimizations sometimes create new refs using ond
2791 refs as the model. */
2792 DF_REF_FLAGS_CLEAR (this_ref
, DF_HARD_REG_LIVE
);
2794 /* See if this ref needs to have DF_HARD_REG_LIVE bit set. */
2795 if (regno
< FIRST_PSEUDO_REGISTER
2796 && !DF_REF_IS_ARTIFICIAL (this_ref
)
2797 && !DEBUG_INSN_P (DF_REF_INSN (this_ref
)))
2799 if (DF_REF_REG_DEF_P (this_ref
))
2801 if (!DF_REF_FLAGS_IS_SET (this_ref
, DF_REF_MAY_CLOBBER
))
2802 DF_REF_FLAGS_SET (this_ref
, DF_HARD_REG_LIVE
);
2804 else if (!(TEST_HARD_REG_BIT (elim_reg_set
, regno
)
2805 && (regno
== FRAME_POINTER_REGNUM
2806 || regno
== ARG_POINTER_REGNUM
)))
2807 DF_REF_FLAGS_SET (this_ref
, DF_HARD_REG_LIVE
);
2812 if (DF_REF_REG_DEF_P (this_ref
))
2813 VEC_safe_push (df_ref
, stack
, collection_rec
->def_vec
, this_ref
);
2814 else if (DF_REF_FLAGS (this_ref
) & DF_REF_IN_NOTE
)
2815 VEC_safe_push (df_ref
, stack
, collection_rec
->eq_use_vec
, this_ref
);
2817 VEC_safe_push (df_ref
, stack
, collection_rec
->use_vec
, this_ref
);
2820 df_install_ref_incremental (this_ref
);
2826 /* Create new references of type DF_REF_TYPE for each part of register REG
2827 at address LOC within INSN of BB. */
2831 df_ref_record (enum df_ref_class cl
,
2832 struct df_collection_rec
*collection_rec
,
2834 basic_block bb
, struct df_insn_info
*insn_info
,
2835 enum df_ref_type ref_type
,
2840 gcc_checking_assert (REG_P (reg
) || GET_CODE (reg
) == SUBREG
);
2842 regno
= REGNO (GET_CODE (reg
) == SUBREG
? SUBREG_REG (reg
) : reg
);
2843 if (regno
< FIRST_PSEUDO_REGISTER
)
2845 struct df_mw_hardreg
*hardreg
= NULL
;
2846 struct df_scan_problem_data
*problem_data
2847 = (struct df_scan_problem_data
*) df_scan
->problem_data
;
2849 unsigned int endregno
;
2852 if (GET_CODE (reg
) == SUBREG
)
2854 regno
+= subreg_regno_offset (regno
, GET_MODE (SUBREG_REG (reg
)),
2855 SUBREG_BYTE (reg
), GET_MODE (reg
));
2856 endregno
= regno
+ subreg_nregs (reg
);
2859 endregno
= END_HARD_REGNO (reg
);
2861 /* If this is a multiword hardreg, we create some extra
2862 datastructures that will enable us to easily build REG_DEAD
2863 and REG_UNUSED notes. */
2865 && (endregno
!= regno
+ 1) && insn_info
)
2867 /* Sets to a subreg of a multiword register are partial.
2868 Sets to a non-subreg of a multiword register are not. */
2869 if (GET_CODE (reg
) == SUBREG
)
2870 ref_flags
|= DF_REF_PARTIAL
;
2871 ref_flags
|= DF_REF_MW_HARDREG
;
2873 hardreg
= (struct df_mw_hardreg
*) pool_alloc (problem_data
->mw_reg_pool
);
2874 hardreg
->type
= ref_type
;
2875 hardreg
->flags
= ref_flags
;
2876 hardreg
->mw_reg
= reg
;
2877 hardreg
->start_regno
= regno
;
2878 hardreg
->end_regno
= endregno
- 1;
2879 hardreg
->mw_order
= df
->ref_order
++;
2880 VEC_safe_push (df_mw_hardreg_ptr
, stack
, collection_rec
->mw_vec
,
2884 for (i
= regno
; i
< endregno
; i
++)
2886 ref
= df_ref_create_structure (cl
, collection_rec
, regno_reg_rtx
[i
], loc
,
2887 bb
, insn_info
, ref_type
, ref_flags
);
2889 gcc_assert (ORIGINAL_REGNO (DF_REF_REG (ref
)) == i
);
2894 df_ref_create_structure (cl
, collection_rec
, reg
, loc
, bb
, insn_info
,
2895 ref_type
, ref_flags
);
2900 /* A set to a non-paradoxical SUBREG for which the number of word_mode units
2901 covered by the outer mode is smaller than that covered by the inner mode,
2902 is a read-modify-write operation.
2903 This function returns true iff the SUBREG X is such a SUBREG. */
2906 df_read_modify_subreg_p (rtx x
)
2908 unsigned int isize
, osize
;
2909 if (GET_CODE (x
) != SUBREG
)
2911 isize
= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
)));
2912 osize
= GET_MODE_SIZE (GET_MODE (x
));
2913 return isize
> osize
2914 && isize
> REGMODE_NATURAL_SIZE (GET_MODE (SUBREG_REG (x
)));
2918 /* Process all the registers defined in the rtx pointed by LOC.
2919 Autoincrement/decrement definitions will be picked up by df_uses_record.
2920 Any change here has to be matched in df_find_hard_reg_defs_1. */
2923 df_def_record_1 (struct df_collection_rec
*collection_rec
,
2924 rtx
*loc
, basic_block bb
, struct df_insn_info
*insn_info
,
2929 /* It is legal to have a set destination be a parallel. */
2930 if (GET_CODE (dst
) == PARALLEL
)
2933 for (i
= XVECLEN (dst
, 0) - 1; i
>= 0; i
--)
2935 rtx temp
= XVECEXP (dst
, 0, i
);
2936 gcc_assert (GET_CODE (temp
) == EXPR_LIST
);
2937 df_def_record_1 (collection_rec
, &XEXP (temp
, 0),
2938 bb
, insn_info
, flags
);
2943 if (GET_CODE (dst
) == STRICT_LOW_PART
)
2945 flags
|= DF_REF_READ_WRITE
| DF_REF_PARTIAL
| DF_REF_STRICT_LOW_PART
;
2947 loc
= &XEXP (dst
, 0);
2951 if (GET_CODE (dst
) == ZERO_EXTRACT
)
2953 flags
|= DF_REF_READ_WRITE
| DF_REF_PARTIAL
| DF_REF_ZERO_EXTRACT
;
2955 loc
= &XEXP (dst
, 0);
2959 /* At this point if we do not have a reg or a subreg, just return. */
2962 df_ref_record (DF_REF_REGULAR
, collection_rec
,
2963 dst
, loc
, bb
, insn_info
, DF_REF_REG_DEF
, flags
);
2965 /* We want to keep sp alive everywhere - by making all
2966 writes to sp also use of sp. */
2967 if (REGNO (dst
) == STACK_POINTER_REGNUM
)
2968 df_ref_record (DF_REF_BASE
, collection_rec
,
2969 dst
, NULL
, bb
, insn_info
, DF_REF_REG_USE
, flags
);
2971 else if (GET_CODE (dst
) == SUBREG
&& REG_P (SUBREG_REG (dst
)))
2973 if (df_read_modify_subreg_p (dst
))
2974 flags
|= DF_REF_READ_WRITE
| DF_REF_PARTIAL
;
2976 flags
|= DF_REF_SUBREG
;
2978 df_ref_record (DF_REF_REGULAR
, collection_rec
,
2979 dst
, loc
, bb
, insn_info
, DF_REF_REG_DEF
, flags
);
2984 /* Process all the registers defined in the pattern rtx, X. Any change
2985 here has to be matched in df_find_hard_reg_defs. */
2988 df_defs_record (struct df_collection_rec
*collection_rec
,
2989 rtx x
, basic_block bb
, struct df_insn_info
*insn_info
,
2992 RTX_CODE code
= GET_CODE (x
);
2998 df_def_record_1 (collection_rec
, &SET_DEST (x
), bb
, insn_info
, flags
);
3002 flags
|= DF_REF_MUST_CLOBBER
;
3003 df_def_record_1 (collection_rec
, &XEXP (x
, 0), bb
, insn_info
, flags
);
3007 df_defs_record (collection_rec
, COND_EXEC_CODE (x
),
3008 bb
, insn_info
, DF_REF_CONDITIONAL
);
3012 for (i
= 0; i
< XVECLEN (x
, 0); i
++)
3013 df_defs_record (collection_rec
, XVECEXP (x
, 0, i
),
3014 bb
, insn_info
, flags
);
3017 /* No DEFs to record in other cases */
3022 /* Set bits in *DEFS for hard registers found in the rtx DST, which is the
3023 destination of a set or clobber. This has to match the logic in
3024 df_defs_record_1. */
3027 df_find_hard_reg_defs_1 (rtx dst
, HARD_REG_SET
*defs
)
3029 /* It is legal to have a set destination be a parallel. */
3030 if (GET_CODE (dst
) == PARALLEL
)
3033 for (i
= XVECLEN (dst
, 0) - 1; i
>= 0; i
--)
3035 rtx temp
= XVECEXP (dst
, 0, i
);
3036 gcc_assert (GET_CODE (temp
) == EXPR_LIST
);
3037 df_find_hard_reg_defs_1 (XEXP (temp
, 0), defs
);
3042 if (GET_CODE (dst
) == STRICT_LOW_PART
)
3043 dst
= XEXP (dst
, 0);
3045 if (GET_CODE (dst
) == ZERO_EXTRACT
)
3046 dst
= XEXP (dst
, 0);
3048 /* At this point if we do not have a reg or a subreg, just return. */
3049 if (REG_P (dst
) && HARD_REGISTER_P (dst
))
3050 SET_HARD_REG_BIT (*defs
, REGNO (dst
));
3051 else if (GET_CODE (dst
) == SUBREG
3052 && REG_P (SUBREG_REG (dst
)) && HARD_REGISTER_P (dst
))
3053 SET_HARD_REG_BIT (*defs
, REGNO (SUBREG_REG (dst
)));
3056 /* Set bits in *DEFS for hard registers defined in the pattern X. This
3057 has to match the logic in df_defs_record. */
3060 df_find_hard_reg_defs (rtx x
, HARD_REG_SET
*defs
)
3062 RTX_CODE code
= GET_CODE (x
);
3068 df_find_hard_reg_defs_1 (SET_DEST (x
), defs
);
3072 df_find_hard_reg_defs_1 (XEXP (x
, 0), defs
);
3076 df_find_hard_reg_defs (COND_EXEC_CODE (x
), defs
);
3080 for (i
= 0; i
< XVECLEN (x
, 0); i
++)
3081 df_find_hard_reg_defs (XVECEXP (x
, 0, i
), defs
);
3084 /* No DEFs to record in other cases */
3090 /* Process all the registers used in the rtx at address LOC. */
3093 df_uses_record (struct df_collection_rec
*collection_rec
,
3094 rtx
*loc
, enum df_ref_type ref_type
,
3095 basic_block bb
, struct df_insn_info
*insn_info
,
3105 code
= GET_CODE (x
);
3122 /* If we are clobbering a MEM, mark any registers inside the address
3124 if (MEM_P (XEXP (x
, 0)))
3125 df_uses_record (collection_rec
,
3126 &XEXP (XEXP (x
, 0), 0),
3127 DF_REF_REG_MEM_STORE
,
3131 /* If we're clobbering a REG then we have a def so ignore. */
3135 df_uses_record (collection_rec
,
3136 &XEXP (x
, 0), DF_REF_REG_MEM_LOAD
,
3137 bb
, insn_info
, flags
& DF_REF_IN_NOTE
);
3141 /* While we're here, optimize this case. */
3142 flags
|= DF_REF_PARTIAL
;
3143 /* In case the SUBREG is not of a REG, do not optimize. */
3144 if (!REG_P (SUBREG_REG (x
)))
3146 loc
= &SUBREG_REG (x
);
3147 df_uses_record (collection_rec
, loc
, ref_type
, bb
, insn_info
, flags
);
3150 /* ... Fall through ... */
3153 df_ref_record (DF_REF_REGULAR
, collection_rec
,
3154 x
, loc
, bb
, insn_info
,
3161 df_uses_record (collection_rec
,
3162 &XEXP (x
, 1), ref_type
, bb
, insn_info
, flags
);
3163 df_uses_record (collection_rec
,
3164 &XEXP (x
, 2), ref_type
, bb
, insn_info
, flags
);
3166 /* If the parameters to the zero or sign extract are
3167 constants, strip them off and recurse, otherwise there is
3168 no information that we can gain from this operation. */
3169 if (code
== ZERO_EXTRACT
)
3170 flags
|= DF_REF_ZERO_EXTRACT
;
3172 flags
|= DF_REF_SIGN_EXTRACT
;
3174 df_uses_record (collection_rec
,
3175 &XEXP (x
, 0), ref_type
, bb
, insn_info
, flags
);
3182 rtx dst
= SET_DEST (x
);
3183 gcc_assert (!(flags
& DF_REF_IN_NOTE
));
3184 df_uses_record (collection_rec
,
3185 &SET_SRC (x
), DF_REF_REG_USE
, bb
, insn_info
, flags
);
3187 switch (GET_CODE (dst
))
3190 if (df_read_modify_subreg_p (dst
))
3192 df_uses_record (collection_rec
, &SUBREG_REG (dst
),
3193 DF_REF_REG_USE
, bb
, insn_info
,
3194 flags
| DF_REF_READ_WRITE
| DF_REF_SUBREG
);
3205 df_uses_record (collection_rec
, &XEXP (dst
, 0),
3206 DF_REF_REG_MEM_STORE
, bb
, insn_info
, flags
);
3208 case STRICT_LOW_PART
:
3210 rtx
*temp
= &XEXP (dst
, 0);
3211 /* A strict_low_part uses the whole REG and not just the
3213 dst
= XEXP (dst
, 0);
3214 df_uses_record (collection_rec
,
3215 (GET_CODE (dst
) == SUBREG
) ? &SUBREG_REG (dst
) : temp
,
3216 DF_REF_REG_USE
, bb
, insn_info
,
3217 DF_REF_READ_WRITE
| DF_REF_STRICT_LOW_PART
);
3222 df_uses_record (collection_rec
, &XEXP (dst
, 1),
3223 DF_REF_REG_USE
, bb
, insn_info
, flags
);
3224 df_uses_record (collection_rec
, &XEXP (dst
, 2),
3225 DF_REF_REG_USE
, bb
, insn_info
, flags
);
3226 if (GET_CODE (XEXP (dst
,0)) == MEM
)
3227 df_uses_record (collection_rec
, &XEXP (dst
, 0),
3228 DF_REF_REG_USE
, bb
, insn_info
,
3231 df_uses_record (collection_rec
, &XEXP (dst
, 0),
3232 DF_REF_REG_USE
, bb
, insn_info
,
3233 DF_REF_READ_WRITE
| DF_REF_ZERO_EXTRACT
);
3248 case UNSPEC_VOLATILE
:
3252 /* Traditional and volatile asm instructions must be
3253 considered to use and clobber all hard registers, all
3254 pseudo-registers and all of memory. So must TRAP_IF and
3255 UNSPEC_VOLATILE operations.
3257 Consider for instance a volatile asm that changes the fpu
3258 rounding mode. An insn should not be moved across this
3259 even if it only uses pseudo-regs because it might give an
3260 incorrectly rounded result.
3262 However, flow.c's liveness computation did *not* do this,
3263 giving the reasoning as " ?!? Unfortunately, marking all
3264 hard registers as live causes massive problems for the
3265 register allocator and marking all pseudos as live creates
3266 mountains of uninitialized variable warnings."
3268 In order to maintain the status quo with regard to liveness
3269 and uses, we do what flow.c did and just mark any regs we
3270 can find in ASM_OPERANDS as used. In global asm insns are
3271 scanned and regs_asm_clobbered is filled out.
3273 For all ASM_OPERANDS, we must traverse the vector of input
3274 operands. We can not just fall through here since then we
3275 would be confused by the ASM_INPUT rtx inside ASM_OPERANDS,
3276 which do not indicate traditional asms unlike their normal
3278 if (code
== ASM_OPERANDS
)
3282 for (j
= 0; j
< ASM_OPERANDS_INPUT_LENGTH (x
); j
++)
3283 df_uses_record (collection_rec
, &ASM_OPERANDS_INPUT (x
, j
),
3284 DF_REF_REG_USE
, bb
, insn_info
, flags
);
3291 df_uses_record (collection_rec
,
3292 &PAT_VAR_LOCATION_LOC (x
),
3293 DF_REF_REG_USE
, bb
, insn_info
, flags
);
3302 gcc_assert (!DEBUG_INSN_P (insn_info
->insn
));
3303 /* Catch the def of the register being modified. */
3304 df_ref_record (DF_REF_REGULAR
, collection_rec
, XEXP (x
, 0), &XEXP (x
, 0),
3307 flags
| DF_REF_READ_WRITE
| DF_REF_PRE_POST_MODIFY
);
3309 /* ... Fall through to handle uses ... */
3315 /* Recursively scan the operands of this expression. */
3317 const char *fmt
= GET_RTX_FORMAT (code
);
3320 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3324 /* Tail recursive case: save a function call level. */
3330 df_uses_record (collection_rec
, &XEXP (x
, i
), ref_type
,
3331 bb
, insn_info
, flags
);
3333 else if (fmt
[i
] == 'E')
3336 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3337 df_uses_record (collection_rec
,
3338 &XVECEXP (x
, i
, j
), ref_type
,
3339 bb
, insn_info
, flags
);
3348 /* For all DF_REF_CONDITIONAL defs, add a corresponding uses. */
3351 df_get_conditional_uses (struct df_collection_rec
*collection_rec
)
3356 FOR_EACH_VEC_ELT (df_ref
, collection_rec
->def_vec
, ix
, ref
)
3358 if (DF_REF_FLAGS_IS_SET (ref
, DF_REF_CONDITIONAL
))
3362 use
= df_ref_create_structure (DF_REF_CLASS (ref
), collection_rec
, DF_REF_REG (ref
),
3363 DF_REF_LOC (ref
), DF_REF_BB (ref
),
3364 DF_REF_INSN_INFO (ref
), DF_REF_REG_USE
,
3365 DF_REF_FLAGS (ref
) & ~DF_REF_CONDITIONAL
);
3366 DF_REF_REGNO (use
) = DF_REF_REGNO (ref
);
3372 /* Get call's extra defs and uses (track caller-saved registers). */
3375 df_get_call_refs (struct df_collection_rec
*collection_rec
,
3377 struct df_insn_info
*insn_info
,
3381 bool is_sibling_call
;
3383 HARD_REG_SET defs_generated
;
3385 CLEAR_HARD_REG_SET (defs_generated
);
3386 df_find_hard_reg_defs (PATTERN (insn_info
->insn
), &defs_generated
);
3387 is_sibling_call
= SIBLING_CALL_P (insn_info
->insn
);
3389 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3391 if (i
== STACK_POINTER_REGNUM
)
3392 /* The stack ptr is used (honorarily) by a CALL insn. */
3393 df_ref_record (DF_REF_BASE
, collection_rec
, regno_reg_rtx
[i
],
3394 NULL
, bb
, insn_info
, DF_REF_REG_USE
,
3395 DF_REF_CALL_STACK_USAGE
| flags
);
3396 else if (global_regs
[i
])
3398 /* Calls to const functions cannot access any global registers and
3399 calls to pure functions cannot set them. All other calls may
3400 reference any of the global registers, so they are recorded as
3402 if (!RTL_CONST_CALL_P (insn_info
->insn
))
3404 df_ref_record (DF_REF_BASE
, collection_rec
, regno_reg_rtx
[i
],
3405 NULL
, bb
, insn_info
, DF_REF_REG_USE
, flags
);
3406 if (!RTL_PURE_CALL_P (insn_info
->insn
))
3407 df_ref_record (DF_REF_BASE
, collection_rec
, regno_reg_rtx
[i
],
3408 NULL
, bb
, insn_info
, DF_REF_REG_DEF
, flags
);
3411 else if (TEST_HARD_REG_BIT (regs_invalidated_by_call
, i
)
3412 /* no clobbers for regs that are the result of the call */
3413 && !TEST_HARD_REG_BIT (defs_generated
, i
)
3414 && (!is_sibling_call
3415 || !bitmap_bit_p (df
->exit_block_uses
, i
)
3416 || refers_to_regno_p (i
, i
+1,
3417 crtl
->return_rtx
, NULL
)))
3418 df_ref_record (DF_REF_BASE
, collection_rec
, regno_reg_rtx
[i
],
3419 NULL
, bb
, insn_info
, DF_REF_REG_DEF
,
3420 DF_REF_MAY_CLOBBER
| flags
);
3423 /* Record the registers used to pass arguments, and explicitly
3424 noted as clobbered. */
3425 for (note
= CALL_INSN_FUNCTION_USAGE (insn_info
->insn
); note
;
3426 note
= XEXP (note
, 1))
3428 if (GET_CODE (XEXP (note
, 0)) == USE
)
3429 df_uses_record (collection_rec
, &XEXP (XEXP (note
, 0), 0),
3430 DF_REF_REG_USE
, bb
, insn_info
, flags
);
3431 else if (GET_CODE (XEXP (note
, 0)) == CLOBBER
)
3433 if (REG_P (XEXP (XEXP (note
, 0), 0)))
3435 unsigned int regno
= REGNO (XEXP (XEXP (note
, 0), 0));
3436 if (!TEST_HARD_REG_BIT (defs_generated
, regno
))
3437 df_defs_record (collection_rec
, XEXP (note
, 0), bb
,
3441 df_uses_record (collection_rec
, &XEXP (note
, 0),
3442 DF_REF_REG_USE
, bb
, insn_info
, flags
);
3449 /* Collect all refs in the INSN. This function is free of any
3450 side-effect - it will create and return a lists of df_ref's in the
3451 COLLECTION_REC without putting those refs into existing ref chains
3455 df_insn_refs_collect (struct df_collection_rec
*collection_rec
,
3456 basic_block bb
, struct df_insn_info
*insn_info
)
3459 bool is_cond_exec
= (GET_CODE (PATTERN (insn_info
->insn
)) == COND_EXEC
);
3461 /* Clear out the collection record. */
3462 VEC_truncate (df_ref
, collection_rec
->def_vec
, 0);
3463 VEC_truncate (df_ref
, collection_rec
->use_vec
, 0);
3464 VEC_truncate (df_ref
, collection_rec
->eq_use_vec
, 0);
3465 VEC_truncate (df_mw_hardreg_ptr
, collection_rec
->mw_vec
, 0);
3467 /* Process REG_EQUIV/REG_EQUAL notes. */
3468 for (note
= REG_NOTES (insn_info
->insn
); note
;
3469 note
= XEXP (note
, 1))
3471 switch (REG_NOTE_KIND (note
))
3475 df_uses_record (collection_rec
,
3476 &XEXP (note
, 0), DF_REF_REG_USE
,
3477 bb
, insn_info
, DF_REF_IN_NOTE
);
3479 case REG_NON_LOCAL_GOTO
:
3480 /* The frame ptr is used by a non-local goto. */
3481 df_ref_record (DF_REF_BASE
, collection_rec
,
3482 regno_reg_rtx
[FRAME_POINTER_REGNUM
],
3483 NULL
, bb
, insn_info
,
3485 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
3486 df_ref_record (DF_REF_BASE
, collection_rec
,
3487 regno_reg_rtx
[HARD_FRAME_POINTER_REGNUM
],
3488 NULL
, bb
, insn_info
,
3497 /* For CALL_INSNs, first record DF_REF_BASE register defs, as well as
3498 uses from CALL_INSN_FUNCTION_USAGE. */
3499 if (CALL_P (insn_info
->insn
))
3500 df_get_call_refs (collection_rec
, bb
, insn_info
,
3501 (is_cond_exec
) ? DF_REF_CONDITIONAL
: 0);
3503 /* Record other defs. These should be mostly for DF_REF_REGULAR, so
3504 that a qsort on the defs is unnecessary in most cases. */
3505 df_defs_record (collection_rec
,
3506 PATTERN (insn_info
->insn
), bb
, insn_info
, 0);
3508 /* Record the register uses. */
3509 df_uses_record (collection_rec
,
3510 &PATTERN (insn_info
->insn
), DF_REF_REG_USE
, bb
, insn_info
, 0);
3512 /* DF_REF_CONDITIONAL needs corresponding USES. */
3514 df_get_conditional_uses (collection_rec
);
3516 df_canonize_collection_rec (collection_rec
);
3519 /* Recompute the luids for the insns in BB. */
3522 df_recompute_luids (basic_block bb
)
3527 df_grow_insn_info ();
3529 /* Scan the block an insn at a time from beginning to end. */
3530 FOR_BB_INSNS (bb
, insn
)
3532 struct df_insn_info
*insn_info
= DF_INSN_INFO_GET (insn
);
3533 /* Inserting labels does not always trigger the incremental
3537 gcc_assert (!INSN_P (insn
));
3538 insn_info
= df_insn_create_insn_record (insn
);
3541 DF_INSN_INFO_LUID (insn_info
) = luid
;
3548 /* Collect all artificial refs at the block level for BB and add them
3549 to COLLECTION_REC. */
3552 df_bb_refs_collect (struct df_collection_rec
*collection_rec
, basic_block bb
)
3554 VEC_truncate (df_ref
, collection_rec
->def_vec
, 0);
3555 VEC_truncate (df_ref
, collection_rec
->use_vec
, 0);
3556 VEC_truncate (df_ref
, collection_rec
->eq_use_vec
, 0);
3557 VEC_truncate (df_mw_hardreg_ptr
, collection_rec
->mw_vec
, 0);
3559 if (bb
->index
== ENTRY_BLOCK
)
3561 df_entry_block_defs_collect (collection_rec
, df
->entry_block_defs
);
3564 else if (bb
->index
== EXIT_BLOCK
)
3566 df_exit_block_uses_collect (collection_rec
, df
->exit_block_uses
);
3570 #ifdef EH_RETURN_DATA_REGNO
3571 if (bb_has_eh_pred (bb
))
3574 /* Mark the registers that will contain data for the handler. */
3577 unsigned regno
= EH_RETURN_DATA_REGNO (i
);
3578 if (regno
== INVALID_REGNUM
)
3580 df_ref_record (DF_REF_ARTIFICIAL
, collection_rec
, regno_reg_rtx
[regno
], NULL
,
3581 bb
, NULL
, DF_REF_REG_DEF
, DF_REF_AT_TOP
);
3586 /* Add the hard_frame_pointer if this block is the target of a
3588 if (bb
->flags
& BB_NON_LOCAL_GOTO_TARGET
)
3589 df_ref_record (DF_REF_ARTIFICIAL
, collection_rec
, hard_frame_pointer_rtx
, NULL
,
3590 bb
, NULL
, DF_REF_REG_DEF
, DF_REF_AT_TOP
);
3592 /* Add the artificial uses. */
3593 if (bb
->index
>= NUM_FIXED_BLOCKS
)
3597 bitmap au
= bb_has_eh_pred (bb
)
3598 ? &df
->eh_block_artificial_uses
3599 : &df
->regular_block_artificial_uses
;
3601 EXECUTE_IF_SET_IN_BITMAP (au
, 0, regno
, bi
)
3603 df_ref_record (DF_REF_ARTIFICIAL
, collection_rec
, regno_reg_rtx
[regno
], NULL
,
3604 bb
, NULL
, DF_REF_REG_USE
, 0);
3608 df_canonize_collection_rec (collection_rec
);
3612 /* Record all the refs within the basic block BB_INDEX and scan the instructions if SCAN_INSNS. */
3615 df_bb_refs_record (int bb_index
, bool scan_insns
)
3617 basic_block bb
= BASIC_BLOCK (bb_index
);
3620 struct df_collection_rec collection_rec
;
3625 df_grow_bb_info (df_scan
);
3626 collection_rec
.def_vec
= VEC_alloc (df_ref
, stack
, 128);
3627 collection_rec
.use_vec
= VEC_alloc (df_ref
, stack
, 32);
3628 collection_rec
.eq_use_vec
= VEC_alloc (df_ref
, stack
, 32);
3629 collection_rec
.mw_vec
= VEC_alloc (df_mw_hardreg_ptr
, stack
, 32);
3632 /* Scan the block an insn at a time from beginning to end. */
3633 FOR_BB_INSNS (bb
, insn
)
3635 struct df_insn_info
*insn_info
= DF_INSN_INFO_GET (insn
);
3636 gcc_assert (!insn_info
);
3638 insn_info
= df_insn_create_insn_record (insn
);
3641 /* Record refs within INSN. */
3642 DF_INSN_INFO_LUID (insn_info
) = luid
++;
3643 df_insn_refs_collect (&collection_rec
, bb
, DF_INSN_INFO_GET (insn
));
3644 df_refs_add_to_chains (&collection_rec
, bb
, insn
);
3646 DF_INSN_INFO_LUID (insn_info
) = luid
;
3649 /* Other block level artificial refs */
3650 df_bb_refs_collect (&collection_rec
, bb
);
3651 df_refs_add_to_chains (&collection_rec
, bb
, NULL
);
3653 VEC_free (df_ref
, stack
, collection_rec
.def_vec
);
3654 VEC_free (df_ref
, stack
, collection_rec
.use_vec
);
3655 VEC_free (df_ref
, stack
, collection_rec
.eq_use_vec
);
3656 VEC_free (df_mw_hardreg_ptr
, stack
, collection_rec
.mw_vec
);
3658 /* Now that the block has been processed, set the block as dirty so
3659 LR and LIVE will get it processed. */
3660 df_set_bb_dirty (bb
);
3664 /* Get the artificial use set for a regular (i.e. non-exit/non-entry)
3668 df_get_regular_block_artificial_uses (bitmap regular_block_artificial_uses
)
3674 bitmap_clear (regular_block_artificial_uses
);
3676 if (reload_completed
)
3678 if (frame_pointer_needed
)
3679 bitmap_set_bit (regular_block_artificial_uses
, HARD_FRAME_POINTER_REGNUM
);
3682 /* Before reload, there are a few registers that must be forced
3683 live everywhere -- which might not already be the case for
3684 blocks within infinite loops. */
3686 unsigned int picreg
= PIC_OFFSET_TABLE_REGNUM
;
3688 /* Any reference to any pseudo before reload is a potential
3689 reference of the frame pointer. */
3690 bitmap_set_bit (regular_block_artificial_uses
, FRAME_POINTER_REGNUM
);
3692 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
3693 bitmap_set_bit (regular_block_artificial_uses
, HARD_FRAME_POINTER_REGNUM
);
3696 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3697 /* Pseudos with argument area equivalences may require
3698 reloading via the argument pointer. */
3699 if (fixed_regs
[ARG_POINTER_REGNUM
])
3700 bitmap_set_bit (regular_block_artificial_uses
, ARG_POINTER_REGNUM
);
3703 /* Any constant, or pseudo with constant equivalences, may
3704 require reloading from memory using the pic register. */
3705 if (picreg
!= INVALID_REGNUM
3706 && fixed_regs
[picreg
])
3707 bitmap_set_bit (regular_block_artificial_uses
, picreg
);
3709 /* The all-important stack pointer must always be live. */
3710 bitmap_set_bit (regular_block_artificial_uses
, STACK_POINTER_REGNUM
);
3713 /* EH_USES registers are used:
3714 1) at all insns that might throw (calls or with -fnon-call-exceptions
3717 3) to support backtraces and/or debugging, anywhere between their
3718 initialization and where they the saved registers are restored
3719 from them, including the cases where we don't reach the epilogue
3720 (noreturn call or infinite loop). */
3721 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3723 bitmap_set_bit (regular_block_artificial_uses
, i
);
3728 /* Get the artificial use set for an eh block. */
3731 df_get_eh_block_artificial_uses (bitmap eh_block_artificial_uses
)
3733 bitmap_clear (eh_block_artificial_uses
);
3735 /* The following code (down through the arg_pointer setting APPEARS
3736 to be necessary because there is nothing that actually
3737 describes what the exception handling code may actually need
3739 if (reload_completed
)
3741 if (frame_pointer_needed
)
3743 bitmap_set_bit (eh_block_artificial_uses
, FRAME_POINTER_REGNUM
);
3744 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
3745 bitmap_set_bit (eh_block_artificial_uses
, HARD_FRAME_POINTER_REGNUM
);
3748 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3749 if (fixed_regs
[ARG_POINTER_REGNUM
])
3750 bitmap_set_bit (eh_block_artificial_uses
, ARG_POINTER_REGNUM
);
3757 /*----------------------------------------------------------------------------
3758 Specialized hard register scanning functions.
3759 ----------------------------------------------------------------------------*/
3762 /* Mark a register in SET. Hard registers in large modes get all
3763 of their component registers set as well. */
3766 df_mark_reg (rtx reg
, void *vset
)
3768 bitmap set
= (bitmap
) vset
;
3769 int regno
= REGNO (reg
);
3771 gcc_assert (GET_MODE (reg
) != BLKmode
);
3773 if (regno
< FIRST_PSEUDO_REGISTER
)
3775 int n
= hard_regno_nregs
[regno
][GET_MODE (reg
)];
3776 bitmap_set_range (set
, regno
, n
);
3779 bitmap_set_bit (set
, regno
);
3783 /* Set the bit for regs that are considered being defined at the entry. */
3786 df_get_entry_block_def_set (bitmap entry_block_defs
)
3791 bitmap_clear (entry_block_defs
);
3793 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3794 if (FUNCTION_ARG_REGNO_P (i
))
3795 bitmap_set_bit (entry_block_defs
, INCOMING_REGNO (i
));
3797 /* The always important stack pointer. */
3798 bitmap_set_bit (entry_block_defs
, STACK_POINTER_REGNUM
);
3800 /* Once the prologue has been generated, all of these registers
3801 should just show up in the first regular block. */
3802 if (HAVE_prologue
&& epilogue_completed
)
3804 /* Defs for the callee saved registers are inserted so that the
3805 pushes have some defining location. */
3806 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3807 if ((call_used_regs
[i
] == 0) && (df_regs_ever_live_p (i
)))
3808 bitmap_set_bit (entry_block_defs
, i
);
3811 r
= targetm
.calls
.struct_value_rtx (current_function_decl
, true);
3813 bitmap_set_bit (entry_block_defs
, REGNO (r
));
3815 /* If the function has an incoming STATIC_CHAIN, it has to show up
3816 in the entry def set. */
3817 r
= targetm
.calls
.static_chain (current_function_decl
, true);
3819 bitmap_set_bit (entry_block_defs
, REGNO (r
));
3821 if ((!reload_completed
) || frame_pointer_needed
)
3823 /* Any reference to any pseudo before reload is a potential
3824 reference of the frame pointer. */
3825 bitmap_set_bit (entry_block_defs
, FRAME_POINTER_REGNUM
);
3826 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
3827 /* If they are different, also mark the hard frame pointer as live. */
3828 if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM
))
3829 bitmap_set_bit (entry_block_defs
, HARD_FRAME_POINTER_REGNUM
);
3833 /* These registers are live everywhere. */
3834 if (!reload_completed
)
3836 #ifdef PIC_OFFSET_TABLE_REGNUM
3837 unsigned int picreg
= PIC_OFFSET_TABLE_REGNUM
;
3840 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3841 /* Pseudos with argument area equivalences may require
3842 reloading via the argument pointer. */
3843 if (fixed_regs
[ARG_POINTER_REGNUM
])
3844 bitmap_set_bit (entry_block_defs
, ARG_POINTER_REGNUM
);
3847 #ifdef PIC_OFFSET_TABLE_REGNUM
3848 /* Any constant, or pseudo with constant equivalences, may
3849 require reloading from memory using the pic register. */
3850 if (picreg
!= INVALID_REGNUM
3851 && fixed_regs
[picreg
])
3852 bitmap_set_bit (entry_block_defs
, picreg
);
3856 #ifdef INCOMING_RETURN_ADDR_RTX
3857 if (REG_P (INCOMING_RETURN_ADDR_RTX
))
3858 bitmap_set_bit (entry_block_defs
, REGNO (INCOMING_RETURN_ADDR_RTX
));
3861 targetm
.extra_live_on_entry (entry_block_defs
);
3865 /* Return the (conservative) set of hard registers that are defined on
3866 entry to the function.
3867 It uses df->entry_block_defs to determine which register
3868 reference to include. */
3871 df_entry_block_defs_collect (struct df_collection_rec
*collection_rec
,
3872 bitmap entry_block_defs
)
3877 EXECUTE_IF_SET_IN_BITMAP (entry_block_defs
, 0, i
, bi
)
3879 df_ref_record (DF_REF_ARTIFICIAL
, collection_rec
, regno_reg_rtx
[i
], NULL
,
3880 ENTRY_BLOCK_PTR
, NULL
, DF_REF_REG_DEF
, 0);
3883 df_canonize_collection_rec (collection_rec
);
3887 /* Record the (conservative) set of hard registers that are defined on
3888 entry to the function. */
3891 df_record_entry_block_defs (bitmap entry_block_defs
)
3893 struct df_collection_rec collection_rec
;
3894 memset (&collection_rec
, 0, sizeof (struct df_collection_rec
));
3895 collection_rec
.def_vec
= VEC_alloc (df_ref
, stack
, FIRST_PSEUDO_REGISTER
);
3896 df_entry_block_defs_collect (&collection_rec
, entry_block_defs
);
3898 /* Process bb_refs chain */
3899 df_refs_add_to_chains (&collection_rec
, BASIC_BLOCK (ENTRY_BLOCK
), NULL
);
3900 VEC_free (df_ref
, stack
, collection_rec
.def_vec
);
3904 /* Update the defs in the entry block. */
3907 df_update_entry_block_defs (void)
3910 bool changed
= false;
3912 bitmap_initialize (&refs
, &df_bitmap_obstack
);
3913 df_get_entry_block_def_set (&refs
);
3914 if (df
->entry_block_defs
)
3916 if (!bitmap_equal_p (df
->entry_block_defs
, &refs
))
3918 struct df_scan_bb_info
*bb_info
= df_scan_get_bb_info (ENTRY_BLOCK
);
3919 df_ref_chain_delete_du_chain (bb_info
->artificial_defs
);
3920 df_ref_chain_delete (bb_info
->artificial_defs
);
3921 bb_info
->artificial_defs
= NULL
;
3927 struct df_scan_problem_data
*problem_data
3928 = (struct df_scan_problem_data
*) df_scan
->problem_data
;
3930 df
->entry_block_defs
= BITMAP_ALLOC (&problem_data
->reg_bitmaps
);
3936 df_record_entry_block_defs (&refs
);
3937 bitmap_copy (df
->entry_block_defs
, &refs
);
3938 df_set_bb_dirty (BASIC_BLOCK (ENTRY_BLOCK
));
3940 bitmap_clear (&refs
);
3944 /* Set the bit for regs that are considered being used at the exit. */
3947 df_get_exit_block_use_set (bitmap exit_block_uses
)
3950 unsigned int picreg
= PIC_OFFSET_TABLE_REGNUM
;
3952 bitmap_clear (exit_block_uses
);
3954 /* Stack pointer is always live at the exit. */
3955 bitmap_set_bit (exit_block_uses
, STACK_POINTER_REGNUM
);
3957 /* Mark the frame pointer if needed at the end of the function.
3958 If we end up eliminating it, it will be removed from the live
3959 list of each basic block by reload. */
3961 if ((!reload_completed
) || frame_pointer_needed
)
3963 bitmap_set_bit (exit_block_uses
, FRAME_POINTER_REGNUM
);
3964 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
3965 /* If they are different, also mark the hard frame pointer as live. */
3966 if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM
))
3967 bitmap_set_bit (exit_block_uses
, HARD_FRAME_POINTER_REGNUM
);
3971 /* Many architectures have a GP register even without flag_pic.
3972 Assume the pic register is not in use, or will be handled by
3973 other means, if it is not fixed. */
3974 if (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
3975 && picreg
!= INVALID_REGNUM
3976 && fixed_regs
[picreg
])
3977 bitmap_set_bit (exit_block_uses
, picreg
);
3979 /* Mark all global registers, and all registers used by the
3980 epilogue as being live at the end of the function since they
3981 may be referenced by our caller. */
3982 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3983 if (global_regs
[i
] || EPILOGUE_USES (i
))
3984 bitmap_set_bit (exit_block_uses
, i
);
3986 if (HAVE_epilogue
&& epilogue_completed
)
3988 /* Mark all call-saved registers that we actually used. */
3989 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3990 if (df_regs_ever_live_p (i
) && !LOCAL_REGNO (i
)
3991 && !TEST_HARD_REG_BIT (regs_invalidated_by_call
, i
))
3992 bitmap_set_bit (exit_block_uses
, i
);
3995 #ifdef EH_RETURN_DATA_REGNO
3996 /* Mark the registers that will contain data for the handler. */
3997 if (reload_completed
&& crtl
->calls_eh_return
)
4000 unsigned regno
= EH_RETURN_DATA_REGNO (i
);
4001 if (regno
== INVALID_REGNUM
)
4003 bitmap_set_bit (exit_block_uses
, regno
);
4007 #ifdef EH_RETURN_STACKADJ_RTX
4008 if ((!HAVE_epilogue
|| ! epilogue_completed
)
4009 && crtl
->calls_eh_return
)
4011 rtx tmp
= EH_RETURN_STACKADJ_RTX
;
4012 if (tmp
&& REG_P (tmp
))
4013 df_mark_reg (tmp
, exit_block_uses
);
4017 #ifdef EH_RETURN_HANDLER_RTX
4018 if ((!HAVE_epilogue
|| ! epilogue_completed
)
4019 && crtl
->calls_eh_return
)
4021 rtx tmp
= EH_RETURN_HANDLER_RTX
;
4022 if (tmp
&& REG_P (tmp
))
4023 df_mark_reg (tmp
, exit_block_uses
);
4027 /* Mark function return value. */
4028 diddle_return_value (df_mark_reg
, (void*) exit_block_uses
);
4032 /* Return the refs of hard registers that are used in the exit block.
4033 It uses df->exit_block_uses to determine register to include. */
4036 df_exit_block_uses_collect (struct df_collection_rec
*collection_rec
, bitmap exit_block_uses
)
4041 EXECUTE_IF_SET_IN_BITMAP (exit_block_uses
, 0, i
, bi
)
4042 df_ref_record (DF_REF_ARTIFICIAL
, collection_rec
, regno_reg_rtx
[i
], NULL
,
4043 EXIT_BLOCK_PTR
, NULL
, DF_REF_REG_USE
, 0);
4045 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4046 /* It is deliberate that this is not put in the exit block uses but
4047 I do not know why. */
4048 if (reload_completed
4049 && !bitmap_bit_p (exit_block_uses
, ARG_POINTER_REGNUM
)
4050 && bb_has_eh_pred (EXIT_BLOCK_PTR
)
4051 && fixed_regs
[ARG_POINTER_REGNUM
])
4052 df_ref_record (DF_REF_ARTIFICIAL
, collection_rec
, regno_reg_rtx
[ARG_POINTER_REGNUM
], NULL
,
4053 EXIT_BLOCK_PTR
, NULL
, DF_REF_REG_USE
, 0);
4056 df_canonize_collection_rec (collection_rec
);
4060 /* Record the set of hard registers that are used in the exit block.
4061 It uses df->exit_block_uses to determine which bit to include. */
4064 df_record_exit_block_uses (bitmap exit_block_uses
)
4066 struct df_collection_rec collection_rec
;
4067 memset (&collection_rec
, 0, sizeof (struct df_collection_rec
));
4068 collection_rec
.use_vec
= VEC_alloc (df_ref
, stack
, FIRST_PSEUDO_REGISTER
);
4070 df_exit_block_uses_collect (&collection_rec
, exit_block_uses
);
4072 /* Process bb_refs chain */
4073 df_refs_add_to_chains (&collection_rec
, BASIC_BLOCK (EXIT_BLOCK
), NULL
);
4074 VEC_free (df_ref
, stack
, collection_rec
.use_vec
);
4078 /* Update the uses in the exit block. */
4081 df_update_exit_block_uses (void)
4084 bool changed
= false;
4086 bitmap_initialize (&refs
, &df_bitmap_obstack
);
4087 df_get_exit_block_use_set (&refs
);
4088 if (df
->exit_block_uses
)
4090 if (!bitmap_equal_p (df
->exit_block_uses
, &refs
))
4092 struct df_scan_bb_info
*bb_info
= df_scan_get_bb_info (EXIT_BLOCK
);
4093 df_ref_chain_delete_du_chain (bb_info
->artificial_uses
);
4094 df_ref_chain_delete (bb_info
->artificial_uses
);
4095 bb_info
->artificial_uses
= NULL
;
4101 struct df_scan_problem_data
*problem_data
4102 = (struct df_scan_problem_data
*) df_scan
->problem_data
;
4104 df
->exit_block_uses
= BITMAP_ALLOC (&problem_data
->reg_bitmaps
);
4110 df_record_exit_block_uses (&refs
);
4111 bitmap_copy (df
->exit_block_uses
,& refs
);
4112 df_set_bb_dirty (BASIC_BLOCK (EXIT_BLOCK
));
4114 bitmap_clear (&refs
);
4117 static bool initialized
= false;
4120 /* Initialize some platform specific structures. */
4123 df_hard_reg_init (void)
4125 #ifdef ELIMINABLE_REGS
4127 static const struct {const int from
, to
; } eliminables
[] = ELIMINABLE_REGS
;
4132 /* Record which registers will be eliminated. We use this in
4134 CLEAR_HARD_REG_SET (elim_reg_set
);
4136 #ifdef ELIMINABLE_REGS
4137 for (i
= 0; i
< (int) ARRAY_SIZE (eliminables
); i
++)
4138 SET_HARD_REG_BIT (elim_reg_set
, eliminables
[i
].from
);
4140 SET_HARD_REG_BIT (elim_reg_set
, FRAME_POINTER_REGNUM
);
4147 /* Recompute the parts of scanning that are based on regs_ever_live
4148 because something changed in that array. */
4151 df_update_entry_exit_and_calls (void)
4155 df_update_entry_block_defs ();
4156 df_update_exit_block_uses ();
4158 /* The call insns need to be rescanned because there may be changes
4159 in the set of registers clobbered across the call. */
4163 FOR_BB_INSNS (bb
, insn
)
4165 if (INSN_P (insn
) && CALL_P (insn
))
4166 df_insn_rescan (insn
);
4172 /* Return true if hard REG is actually used in the some instruction.
4173 There are a fair number of conditions that affect the setting of
4174 this array. See the comment in df.h for df->hard_regs_live_count
4175 for the conditions that this array is set. */
4178 df_hard_reg_used_p (unsigned int reg
)
4180 return df
->hard_regs_live_count
[reg
] != 0;
4184 /* A count of the number of times REG is actually used in the some
4185 instruction. There are a fair number of conditions that affect the
4186 setting of this array. See the comment in df.h for
4187 df->hard_regs_live_count for the conditions that this array is
4192 df_hard_reg_used_count (unsigned int reg
)
4194 return df
->hard_regs_live_count
[reg
];
4198 /* Get the value of regs_ever_live[REGNO]. */
4201 df_regs_ever_live_p (unsigned int regno
)
4203 return regs_ever_live
[regno
];
4207 /* Set regs_ever_live[REGNO] to VALUE. If this cause regs_ever_live
4208 to change, schedule that change for the next update. */
4211 df_set_regs_ever_live (unsigned int regno
, bool value
)
4213 if (regs_ever_live
[regno
] == value
)
4216 regs_ever_live
[regno
] = value
;
4218 df
->redo_entry_and_exit
= true;
4222 /* Compute "regs_ever_live" information from the underlying df
4223 information. Set the vector to all false if RESET. */
4226 df_compute_regs_ever_live (bool reset
)
4229 bool changed
= df
->redo_entry_and_exit
;
4232 memset (regs_ever_live
, 0, sizeof (regs_ever_live
));
4234 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
4235 if ((!regs_ever_live
[i
]) && df_hard_reg_used_p (i
))
4237 regs_ever_live
[i
] = true;
4241 df_update_entry_exit_and_calls ();
4242 df
->redo_entry_and_exit
= false;
4246 /*----------------------------------------------------------------------------
4247 Dataflow ref information verification functions.
4249 df_reg_chain_mark (refs, regno, is_def, is_eq_use)
4250 df_reg_chain_verify_unmarked (refs)
4251 df_refs_verify (VEC(stack,df_ref)*, ref*, bool)
4252 df_mws_verify (mw*, mw*, bool)
4253 df_insn_refs_verify (collection_rec, bb, insn, bool)
4254 df_bb_refs_verify (bb, refs, bool)
4256 df_exit_block_bitmap_verify (bool)
4257 df_entry_block_bitmap_verify (bool)
4259 ----------------------------------------------------------------------------*/
4262 /* Mark all refs in the reg chain. Verify that all of the registers
4263 are in the correct chain. */
4266 df_reg_chain_mark (df_ref refs
, unsigned int regno
,
4267 bool is_def
, bool is_eq_use
)
4269 unsigned int count
= 0;
4271 for (ref
= refs
; ref
; ref
= DF_REF_NEXT_REG (ref
))
4273 gcc_assert (!DF_REF_IS_REG_MARKED (ref
));
4275 /* If there are no def-use or use-def chains, make sure that all
4276 of the chains are clear. */
4278 gcc_assert (!DF_REF_CHAIN (ref
));
4280 /* Check to make sure the ref is in the correct chain. */
4281 gcc_assert (DF_REF_REGNO (ref
) == regno
);
4283 gcc_assert (DF_REF_REG_DEF_P (ref
));
4285 gcc_assert (!DF_REF_REG_DEF_P (ref
));
4288 gcc_assert ((DF_REF_FLAGS (ref
) & DF_REF_IN_NOTE
));
4290 gcc_assert ((DF_REF_FLAGS (ref
) & DF_REF_IN_NOTE
) == 0);
4292 if (DF_REF_NEXT_REG (ref
))
4293 gcc_assert (DF_REF_PREV_REG (DF_REF_NEXT_REG (ref
)) == ref
);
4295 DF_REF_REG_MARK (ref
);
4301 /* Verify that all of the registers in the chain are unmarked. */
4304 df_reg_chain_verify_unmarked (df_ref refs
)
4307 for (ref
= refs
; ref
; ref
= DF_REF_NEXT_REG (ref
))
4308 gcc_assert (!DF_REF_IS_REG_MARKED (ref
));
4312 /* Verify that NEW_REC and OLD_REC have exactly the same members. */
4315 df_refs_verify (VEC(df_ref
,stack
) *new_rec
, df_ref
*old_rec
,
4321 FOR_EACH_VEC_ELT (df_ref
, new_rec
, ix
, new_ref
)
4323 if (*old_rec
== NULL
|| !df_ref_equal_p (new_ref
, *old_rec
))
4331 /* Abort if fail is called from the function level verifier. If
4332 that is the context, mark this reg as being seem. */
4335 gcc_assert (DF_REF_IS_REG_MARKED (*old_rec
));
4336 DF_REF_REG_UNMARK (*old_rec
);
4343 gcc_assert (*old_rec
== NULL
);
4345 return *old_rec
== NULL
;
4350 /* Verify that NEW_REC and OLD_REC have exactly the same members. */
4353 df_mws_verify (VEC(df_mw_hardreg_ptr
,stack
) *new_rec
,
4354 struct df_mw_hardreg
**old_rec
,
4358 struct df_mw_hardreg
*new_reg
;
4360 FOR_EACH_VEC_ELT (df_mw_hardreg_ptr
, new_rec
, ix
, new_reg
)
4362 if (*old_rec
== NULL
|| !df_mw_equal_p (new_reg
, *old_rec
))
4373 gcc_assert (*old_rec
== NULL
);
4375 return *old_rec
== NULL
;
4380 /* Return true if the existing insn refs information is complete and
4381 correct. Otherwise (i.e. if there's any missing or extra refs),
4382 return the correct df_ref chain in REFS_RETURN.
4384 If ABORT_IF_FAIL, leave the refs that are verified (already in the
4385 ref chain) as DF_REF_MARKED(). If it's false, then it's a per-insn
4386 verification mode instead of the whole function, so unmark
4389 If ABORT_IF_FAIL is set, this function never returns false. */
4392 df_insn_refs_verify (struct df_collection_rec
*collection_rec
,
4397 bool ret1
, ret2
, ret3
, ret4
;
4398 unsigned int uid
= INSN_UID (insn
);
4399 struct df_insn_info
*insn_info
= DF_INSN_INFO_GET (insn
);
4401 df_insn_refs_collect (collection_rec
, bb
, insn_info
);
4403 if (!DF_INSN_UID_DEFS (uid
))
4405 /* The insn_rec was created but it was never filled out. */
4412 /* Unfortunately we cannot opt out early if one of these is not
4413 right because the marks will not get cleared. */
4414 ret1
= df_refs_verify (collection_rec
->def_vec
, DF_INSN_UID_DEFS (uid
),
4416 ret2
= df_refs_verify (collection_rec
->use_vec
, DF_INSN_UID_USES (uid
),
4418 ret3
= df_refs_verify (collection_rec
->eq_use_vec
, DF_INSN_UID_EQ_USES (uid
),
4420 ret4
= df_mws_verify (collection_rec
->mw_vec
, DF_INSN_UID_MWS (uid
),
4422 return (ret1
&& ret2
&& ret3
&& ret4
);
4426 /* Return true if all refs in the basic block are correct and complete.
4427 Due to df_ref_chain_verify, it will cause all refs
4428 that are verified to have DF_REF_MARK bit set. */
4431 df_bb_verify (basic_block bb
)
4434 struct df_scan_bb_info
*bb_info
= df_scan_get_bb_info (bb
->index
);
4435 struct df_collection_rec collection_rec
;
4437 memset (&collection_rec
, 0, sizeof (struct df_collection_rec
));
4438 collection_rec
.def_vec
= VEC_alloc (df_ref
, stack
, 128);
4439 collection_rec
.use_vec
= VEC_alloc (df_ref
, stack
, 32);
4440 collection_rec
.eq_use_vec
= VEC_alloc (df_ref
, stack
, 32);
4441 collection_rec
.mw_vec
= VEC_alloc (df_mw_hardreg_ptr
, stack
, 32);
4443 gcc_assert (bb_info
);
4445 /* Scan the block, one insn at a time, from beginning to end. */
4446 FOR_BB_INSNS_REVERSE (bb
, insn
)
4450 df_insn_refs_verify (&collection_rec
, bb
, insn
, true);
4453 /* Do the artificial defs and uses. */
4454 df_bb_refs_collect (&collection_rec
, bb
);
4455 df_refs_verify (collection_rec
.def_vec
, df_get_artificial_defs (bb
->index
), true);
4456 df_refs_verify (collection_rec
.use_vec
, df_get_artificial_uses (bb
->index
), true);
4457 df_free_collection_rec (&collection_rec
);
4463 /* Returns true if the entry block has correct and complete df_ref set.
4464 If not it either aborts if ABORT_IF_FAIL is true or returns false. */
4467 df_entry_block_bitmap_verify (bool abort_if_fail
)
4469 bitmap_head entry_block_defs
;
4472 bitmap_initialize (&entry_block_defs
, &df_bitmap_obstack
);
4473 df_get_entry_block_def_set (&entry_block_defs
);
4475 is_eq
= bitmap_equal_p (&entry_block_defs
, df
->entry_block_defs
);
4477 if (!is_eq
&& abort_if_fail
)
4479 fprintf (stderr
, "entry_block_defs = ");
4480 df_print_regset (stderr
, &entry_block_defs
);
4481 fprintf (stderr
, "df->entry_block_defs = ");
4482 df_print_regset (stderr
, df
->entry_block_defs
);
4486 bitmap_clear (&entry_block_defs
);
4492 /* Returns true if the exit block has correct and complete df_ref set.
4493 If not it either aborts if ABORT_IF_FAIL is true or returns false. */
4496 df_exit_block_bitmap_verify (bool abort_if_fail
)
4498 bitmap_head exit_block_uses
;
4501 bitmap_initialize (&exit_block_uses
, &df_bitmap_obstack
);
4502 df_get_exit_block_use_set (&exit_block_uses
);
4504 is_eq
= bitmap_equal_p (&exit_block_uses
, df
->exit_block_uses
);
4506 if (!is_eq
&& abort_if_fail
)
4508 fprintf (stderr
, "exit_block_uses = ");
4509 df_print_regset (stderr
, &exit_block_uses
);
4510 fprintf (stderr
, "df->exit_block_uses = ");
4511 df_print_regset (stderr
, df
->exit_block_uses
);
4515 bitmap_clear (&exit_block_uses
);
4521 /* Return true if df_ref information for all insns in all blocks are
4522 correct and complete. */
4525 df_scan_verify (void)
4529 bitmap_head regular_block_artificial_uses
;
4530 bitmap_head eh_block_artificial_uses
;
4535 /* Verification is a 4 step process. */
4537 /* (1) All of the refs are marked by going through the reg chains. */
4538 for (i
= 0; i
< DF_REG_SIZE (df
); i
++)
4540 gcc_assert (df_reg_chain_mark (DF_REG_DEF_CHAIN (i
), i
, true, false)
4541 == DF_REG_DEF_COUNT(i
));
4542 gcc_assert (df_reg_chain_mark (DF_REG_USE_CHAIN (i
), i
, false, false)
4543 == DF_REG_USE_COUNT(i
));
4544 gcc_assert (df_reg_chain_mark (DF_REG_EQ_USE_CHAIN (i
), i
, false, true)
4545 == DF_REG_EQ_USE_COUNT(i
));
4548 /* (2) There are various bitmaps whose value may change over the
4549 course of the compilation. This step recomputes them to make
4550 sure that they have not slipped out of date. */
4551 bitmap_initialize (®ular_block_artificial_uses
, &df_bitmap_obstack
);
4552 bitmap_initialize (&eh_block_artificial_uses
, &df_bitmap_obstack
);
4554 df_get_regular_block_artificial_uses (®ular_block_artificial_uses
);
4555 df_get_eh_block_artificial_uses (&eh_block_artificial_uses
);
4557 bitmap_ior_into (&eh_block_artificial_uses
,
4558 ®ular_block_artificial_uses
);
4560 /* Check artificial_uses bitmaps didn't change. */
4561 gcc_assert (bitmap_equal_p (®ular_block_artificial_uses
,
4562 &df
->regular_block_artificial_uses
));
4563 gcc_assert (bitmap_equal_p (&eh_block_artificial_uses
,
4564 &df
->eh_block_artificial_uses
));
4566 bitmap_clear (®ular_block_artificial_uses
);
4567 bitmap_clear (&eh_block_artificial_uses
);
4569 /* Verify entry block and exit block. These only verify the bitmaps,
4570 the refs are verified in df_bb_verify. */
4571 df_entry_block_bitmap_verify (true);
4572 df_exit_block_bitmap_verify (true);
4574 /* (3) All of the insns in all of the blocks are traversed and the
4575 marks are cleared both in the artificial refs attached to the
4576 blocks and the real refs inside the insns. It is a failure to
4577 clear a mark that has not been set as this means that the ref in
4578 the block or insn was not in the reg chain. */
4583 /* (4) See if all reg chains are traversed a second time. This time
4584 a check is made that the marks are clear. A set mark would be a
4585 from a reg that is not in any insn or basic block. */
4587 for (i
= 0; i
< DF_REG_SIZE (df
); i
++)
4589 df_reg_chain_verify_unmarked (DF_REG_DEF_CHAIN (i
));
4590 df_reg_chain_verify_unmarked (DF_REG_USE_CHAIN (i
));
4591 df_reg_chain_verify_unmarked (DF_REG_EQ_USE_CHAIN (i
));