Fix DealII type problems.
[official-gcc/Ramakrishna.git] / gcc / df-scan.c
blob4428b8820e9c6bbad9d2fea3fb80564f0652076b
1 /* Scanning of rtl for dataflow analysis.
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007,
3 2008, 2009 Free Software Foundation, Inc.
4 Originally contributed by Michael P. Hayes
5 (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
6 Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
7 and Kenneth Zadeck (zadeck@naturalbridge.com).
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "tm.h"
29 #include "rtl.h"
30 #include "tm_p.h"
31 #include "insn-config.h"
32 #include "recog.h"
33 #include "function.h"
34 #include "regs.h"
35 #include "output.h"
36 #include "alloc-pool.h"
37 #include "flags.h"
38 #include "hard-reg-set.h"
39 #include "basic-block.h"
40 #include "sbitmap.h"
41 #include "bitmap.h"
42 #include "timevar.h"
43 #include "tree.h"
44 #include "target.h"
45 #include "target-def.h"
46 #include "df.h"
47 #include "tree-pass.h"
49 DEF_VEC_P(df_ref);
50 DEF_VEC_ALLOC_P_STACK(df_ref);
52 #define VEC_df_ref_stack_alloc(alloc) VEC_stack_alloc (df_ref, alloc)
54 typedef struct df_mw_hardreg *df_mw_hardreg_ptr;
56 DEF_VEC_P(df_mw_hardreg_ptr);
57 DEF_VEC_ALLOC_P_STACK(df_mw_hardreg_ptr);
59 #define VEC_df_mw_hardreg_ptr_stack_alloc(alloc) \
60 VEC_stack_alloc (df_mw_hardreg_ptr, alloc)
62 #ifndef HAVE_epilogue
63 #define HAVE_epilogue 0
64 #endif
65 #ifndef HAVE_prologue
66 #define HAVE_prologue 0
67 #endif
68 #ifndef HAVE_sibcall_epilogue
69 #define HAVE_sibcall_epilogue 0
70 #endif
72 #ifndef EPILOGUE_USES
73 #define EPILOGUE_USES(REGNO) 0
74 #endif
76 /* The following two macros free the vecs that hold either the refs or
77 the mw refs. They are a little tricky because the vec has 0
78 elements is special and is not to be freed. */
79 #define df_scan_free_ref_vec(V) \
80 do { \
81 if (V && *V) \
82 free (V); \
83 } while (0)
85 #define df_scan_free_mws_vec(V) \
86 do { \
87 if (V && *V) \
88 free (V); \
89 } while (0)
91 /* The set of hard registers in eliminables[i].from. */
93 static HARD_REG_SET elim_reg_set;
95 /* Initialize ur_in and ur_out as if all hard registers were partially
96 available. */
98 struct df_collection_rec
100 VEC(df_ref,stack) *def_vec;
101 VEC(df_ref,stack) *use_vec;
102 VEC(df_ref,stack) *eq_use_vec;
103 VEC(df_mw_hardreg_ptr,stack) *mw_vec;
106 static df_ref df_null_ref_rec[1];
107 static struct df_mw_hardreg * df_null_mw_rec[1];
109 static void df_ref_record (enum df_ref_class, struct df_collection_rec *,
110 rtx, rtx *,
111 basic_block, struct df_insn_info *,
112 enum df_ref_type, int ref_flags,
113 int, int, enum machine_mode);
114 static void df_def_record_1 (struct df_collection_rec *, rtx,
115 basic_block, struct df_insn_info *,
116 int ref_flags);
117 static void df_defs_record (struct df_collection_rec *, rtx,
118 basic_block, struct df_insn_info *,
119 int ref_flags);
120 static void df_uses_record (enum df_ref_class, struct df_collection_rec *,
121 rtx *, enum df_ref_type,
122 basic_block, struct df_insn_info *,
123 int ref_flags,
124 int, int, enum machine_mode);
126 static df_ref df_ref_create_structure (enum df_ref_class,
127 struct df_collection_rec *, rtx, rtx *,
128 basic_block, struct df_insn_info *,
129 enum df_ref_type, int ref_flags,
130 int, int, enum machine_mode);
132 static void df_insn_refs_collect (struct df_collection_rec*,
133 basic_block, struct df_insn_info *);
134 static void df_canonize_collection_rec (struct df_collection_rec *);
136 static void df_get_regular_block_artificial_uses (bitmap);
137 static void df_get_eh_block_artificial_uses (bitmap);
139 static void df_record_entry_block_defs (bitmap);
140 static void df_record_exit_block_uses (bitmap);
141 static void df_get_exit_block_use_set (bitmap);
142 static void df_get_entry_block_def_set (bitmap);
143 static void df_grow_ref_info (struct df_ref_info *, unsigned int);
144 static void df_ref_chain_delete_du_chain (df_ref *);
145 static void df_ref_chain_delete (df_ref *);
147 static void df_refs_add_to_chains (struct df_collection_rec *,
148 basic_block, rtx);
150 static bool df_insn_refs_verify (struct df_collection_rec *, basic_block, rtx, bool);
151 static void df_entry_block_defs_collect (struct df_collection_rec *, bitmap);
152 static void df_exit_block_uses_collect (struct df_collection_rec *, bitmap);
153 static void df_install_ref (df_ref, struct df_reg_info *,
154 struct df_ref_info *, bool);
156 static int df_ref_compare (const void *, const void *);
157 static int df_mw_compare (const void *, const void *);
159 /* Indexed by hardware reg number, is true if that register is ever
160 used in the current function.
162 In df-scan.c, this is set up to record the hard regs used
163 explicitly. Reload adds in the hard regs used for holding pseudo
164 regs. Final uses it to generate the code in the function prologue
165 and epilogue to save and restore registers as needed. */
167 static bool regs_ever_live[FIRST_PSEUDO_REGISTER];
169 /*----------------------------------------------------------------------------
170 SCANNING DATAFLOW PROBLEM
172 There are several ways in which scanning looks just like the other
173 dataflow problems. It shares the all the mechanisms for local info
174 as well as basic block info. Where it differs is when and how often
175 it gets run. It also has no need for the iterative solver.
176 ----------------------------------------------------------------------------*/
178 /* Problem data for the scanning dataflow function. */
179 struct df_scan_problem_data
181 alloc_pool ref_base_pool;
182 alloc_pool ref_artificial_pool;
183 alloc_pool ref_regular_pool;
184 alloc_pool ref_extract_pool;
185 alloc_pool insn_pool;
186 alloc_pool reg_pool;
187 alloc_pool mw_reg_pool;
188 bitmap_obstack reg_bitmaps;
189 bitmap_obstack insn_bitmaps;
192 typedef struct df_scan_bb_info *df_scan_bb_info_t;
195 /* Internal function to shut down the scanning problem. */
196 static void
197 df_scan_free_internal (void)
199 struct df_scan_problem_data *problem_data
200 = (struct df_scan_problem_data *) df_scan->problem_data;
201 unsigned int i;
202 basic_block bb;
204 /* The vectors that hold the refs are not pool allocated because
205 they come in many sizes. This makes them impossible to delete
206 all at once. */
207 for (i = 0; i < DF_INSN_SIZE(); i++)
209 struct df_insn_info *insn_info = DF_INSN_UID_GET(i);
210 /* Skip the insns that have no insn_info or have been
211 deleted. */
212 if (insn_info)
214 df_scan_free_ref_vec (insn_info->defs);
215 df_scan_free_ref_vec (insn_info->uses);
216 df_scan_free_ref_vec (insn_info->eq_uses);
217 df_scan_free_mws_vec (insn_info->mw_hardregs);
221 FOR_ALL_BB (bb)
223 unsigned int bb_index = bb->index;
224 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb_index);
225 if (bb_info)
227 df_scan_free_ref_vec (bb_info->artificial_defs);
228 df_scan_free_ref_vec (bb_info->artificial_uses);
232 free (df->def_info.refs);
233 free (df->def_info.begin);
234 free (df->def_info.count);
235 memset (&df->def_info, 0, (sizeof (struct df_ref_info)));
237 free (df->use_info.refs);
238 free (df->use_info.begin);
239 free (df->use_info.count);
240 memset (&df->use_info, 0, (sizeof (struct df_ref_info)));
242 free (df->def_regs);
243 df->def_regs = NULL;
244 free (df->use_regs);
245 df->use_regs = NULL;
246 free (df->eq_use_regs);
247 df->eq_use_regs = NULL;
248 df->regs_size = 0;
249 DF_REG_SIZE(df) = 0;
251 free (df->insns);
252 df->insns = NULL;
253 DF_INSN_SIZE () = 0;
255 free (df_scan->block_info);
256 df_scan->block_info = NULL;
257 df_scan->block_info_size = 0;
259 BITMAP_FREE (df->hardware_regs_used);
260 BITMAP_FREE (df->regular_block_artificial_uses);
261 BITMAP_FREE (df->eh_block_artificial_uses);
262 BITMAP_FREE (df->entry_block_defs);
263 BITMAP_FREE (df->exit_block_uses);
264 BITMAP_FREE (df->insns_to_delete);
265 BITMAP_FREE (df->insns_to_rescan);
266 BITMAP_FREE (df->insns_to_notes_rescan);
268 free_alloc_pool (df_scan->block_pool);
269 free_alloc_pool (problem_data->ref_base_pool);
270 free_alloc_pool (problem_data->ref_artificial_pool);
271 free_alloc_pool (problem_data->ref_regular_pool);
272 free_alloc_pool (problem_data->ref_extract_pool);
273 free_alloc_pool (problem_data->insn_pool);
274 free_alloc_pool (problem_data->reg_pool);
275 free_alloc_pool (problem_data->mw_reg_pool);
276 bitmap_obstack_release (&problem_data->reg_bitmaps);
277 bitmap_obstack_release (&problem_data->insn_bitmaps);
278 free (df_scan->problem_data);
282 /* Set basic block info. */
284 static void
285 df_scan_set_bb_info (unsigned int index,
286 struct df_scan_bb_info *bb_info)
288 gcc_assert (df_scan);
289 df_grow_bb_info (df_scan);
290 df_scan->block_info[index] = (void *) bb_info;
294 /* Free basic block info. */
296 static void
297 df_scan_free_bb_info (basic_block bb, void *vbb_info)
299 struct df_scan_bb_info *bb_info = (struct df_scan_bb_info *) vbb_info;
300 unsigned int bb_index = bb->index;
301 if (bb_info)
303 rtx insn;
304 FOR_BB_INSNS (bb, insn)
306 if (INSN_P (insn))
307 /* Record defs within INSN. */
308 df_insn_delete (bb, INSN_UID (insn));
311 if (bb_index < df_scan->block_info_size)
312 bb_info = df_scan_get_bb_info (bb_index);
314 /* Get rid of any artificial uses or defs. */
315 df_ref_chain_delete_du_chain (bb_info->artificial_defs);
316 df_ref_chain_delete_du_chain (bb_info->artificial_uses);
317 df_ref_chain_delete (bb_info->artificial_defs);
318 df_ref_chain_delete (bb_info->artificial_uses);
319 bb_info->artificial_defs = NULL;
320 bb_info->artificial_uses = NULL;
321 pool_free (df_scan->block_pool, bb_info);
326 /* Allocate the problem data for the scanning problem. This should be
327 called when the problem is created or when the entire function is to
328 be rescanned. */
329 void
330 df_scan_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
332 struct df_scan_problem_data *problem_data;
333 unsigned int insn_num = get_max_uid () + 1;
334 unsigned int block_size = 400;
335 basic_block bb;
337 /* Given the number of pools, this is really faster than tearing
338 everything apart. */
339 if (df_scan->problem_data)
340 df_scan_free_internal ();
342 df_scan->block_pool
343 = create_alloc_pool ("df_scan_block pool",
344 sizeof (struct df_scan_bb_info),
345 block_size);
347 problem_data = XNEW (struct df_scan_problem_data);
348 df_scan->problem_data = problem_data;
349 df_scan->computed = true;
351 problem_data->ref_base_pool
352 = create_alloc_pool ("df_scan ref base",
353 sizeof (struct df_base_ref), block_size);
354 problem_data->ref_artificial_pool
355 = create_alloc_pool ("df_scan ref artificial",
356 sizeof (struct df_artificial_ref), block_size);
357 problem_data->ref_regular_pool
358 = create_alloc_pool ("df_scan ref regular",
359 sizeof (struct df_regular_ref), block_size);
360 problem_data->ref_extract_pool
361 = create_alloc_pool ("df_scan ref extract",
362 sizeof (struct df_extract_ref), block_size);
363 problem_data->insn_pool
364 = create_alloc_pool ("df_scan insn",
365 sizeof (struct df_insn_info), block_size);
366 problem_data->reg_pool
367 = create_alloc_pool ("df_scan reg",
368 sizeof (struct df_reg_info), block_size);
369 problem_data->mw_reg_pool
370 = create_alloc_pool ("df_scan mw_reg",
371 sizeof (struct df_mw_hardreg), block_size);
373 bitmap_obstack_initialize (&problem_data->reg_bitmaps);
374 bitmap_obstack_initialize (&problem_data->insn_bitmaps);
376 insn_num += insn_num / 4;
377 df_grow_reg_info ();
379 df_grow_insn_info ();
380 df_grow_bb_info (df_scan);
382 FOR_ALL_BB (bb)
384 unsigned int bb_index = bb->index;
385 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb_index);
386 if (!bb_info)
388 bb_info = (struct df_scan_bb_info *) pool_alloc (df_scan->block_pool);
389 df_scan_set_bb_info (bb_index, bb_info);
391 bb_info->artificial_defs = NULL;
392 bb_info->artificial_uses = NULL;
395 df->hardware_regs_used = BITMAP_ALLOC (&problem_data->reg_bitmaps);
396 df->regular_block_artificial_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
397 df->eh_block_artificial_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
398 df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
399 df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
400 df->insns_to_delete = BITMAP_ALLOC (&problem_data->insn_bitmaps);
401 df->insns_to_rescan = BITMAP_ALLOC (&problem_data->insn_bitmaps);
402 df->insns_to_notes_rescan = BITMAP_ALLOC (&problem_data->insn_bitmaps);
403 df_scan->optional_p = false;
407 /* Free all of the data associated with the scan problem. */
409 static void
410 df_scan_free (void)
412 if (df_scan->problem_data)
413 df_scan_free_internal ();
415 if (df->blocks_to_analyze)
417 BITMAP_FREE (df->blocks_to_analyze);
418 df->blocks_to_analyze = NULL;
421 free (df_scan);
424 /* Dump the preamble for DF_SCAN dump. */
425 static void
426 df_scan_start_dump (FILE *file ATTRIBUTE_UNUSED)
428 int i;
429 int dcount = 0;
430 int ucount = 0;
431 int ecount = 0;
432 int icount = 0;
433 int ccount = 0;
434 basic_block bb;
435 rtx insn;
437 fprintf (file, ";; invalidated by call \t");
438 df_print_regset (file, regs_invalidated_by_call_regset);
439 fprintf (file, ";; hardware regs used \t");
440 df_print_regset (file, df->hardware_regs_used);
441 fprintf (file, ";; regular block artificial uses \t");
442 df_print_regset (file, df->regular_block_artificial_uses);
443 fprintf (file, ";; eh block artificial uses \t");
444 df_print_regset (file, df->eh_block_artificial_uses);
445 fprintf (file, ";; entry block defs \t");
446 df_print_regset (file, df->entry_block_defs);
447 fprintf (file, ";; exit block uses \t");
448 df_print_regset (file, df->exit_block_uses);
449 fprintf (file, ";; regs ever live \t");
450 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
451 if (df_regs_ever_live_p (i))
452 fprintf (file, " %d[%s]", i, reg_names[i]);
453 fprintf (file, "\n;; ref usage \t");
455 for (i = 0; i < (int)df->regs_inited; i++)
456 if (DF_REG_DEF_COUNT (i) || DF_REG_USE_COUNT (i) || DF_REG_EQ_USE_COUNT (i))
458 const char * sep = "";
460 fprintf (file, "r%d={", i);
461 if (DF_REG_DEF_COUNT (i))
463 fprintf (file, "%dd", DF_REG_DEF_COUNT (i));
464 sep = ",";
465 dcount += DF_REG_DEF_COUNT (i);
467 if (DF_REG_USE_COUNT (i))
469 fprintf (file, "%s%du", sep, DF_REG_USE_COUNT (i));
470 sep = ",";
471 ucount += DF_REG_USE_COUNT (i);
473 if (DF_REG_EQ_USE_COUNT (i))
475 fprintf (file, "%s%dd", sep, DF_REG_EQ_USE_COUNT (i));
476 ecount += DF_REG_EQ_USE_COUNT (i);
478 fprintf (file, "} ");
481 FOR_EACH_BB (bb)
482 FOR_BB_INSNS (bb, insn)
483 if (INSN_P (insn))
485 if (CALL_P (insn))
486 ccount++;
487 else
488 icount++;
491 fprintf (file, "\n;; total ref usage %d{%dd,%du,%de} in %d{%d regular + %d call} insns.\n",
492 dcount + ucount + ecount, dcount, ucount, ecount, icount + ccount, icount, ccount);
495 /* Dump the bb_info for a given basic block. */
496 static void
497 df_scan_start_block (basic_block bb, FILE *file)
499 struct df_scan_bb_info *bb_info
500 = df_scan_get_bb_info (bb->index);
502 if (bb_info)
504 fprintf (file, ";; bb %d artificial_defs: ", bb->index);
505 df_refs_chain_dump (bb_info->artificial_defs, true, file);
506 fprintf (file, "\n;; bb %d artificial_uses: ", bb->index);
507 df_refs_chain_dump (bb_info->artificial_uses, true, file);
508 fprintf (file, "\n");
510 #if 0
512 rtx insn;
513 FOR_BB_INSNS (bb, insn)
514 if (INSN_P (insn))
515 df_insn_debug (insn, false, file);
517 #endif
520 static struct df_problem problem_SCAN =
522 DF_SCAN, /* Problem id. */
523 DF_NONE, /* Direction. */
524 df_scan_alloc, /* Allocate the problem specific data. */
525 NULL, /* Reset global information. */
526 df_scan_free_bb_info, /* Free basic block info. */
527 NULL, /* Local compute function. */
528 NULL, /* Init the solution specific data. */
529 NULL, /* Iterative solver. */
530 NULL, /* Confluence operator 0. */
531 NULL, /* Confluence operator n. */
532 NULL, /* Transfer function. */
533 NULL, /* Finalize function. */
534 df_scan_free, /* Free all of the problem information. */
535 NULL, /* Remove this problem from the stack of dataflow problems. */
536 df_scan_start_dump, /* Debugging. */
537 df_scan_start_block, /* Debugging start block. */
538 NULL, /* Debugging end block. */
539 NULL, /* Incremental solution verify start. */
540 NULL, /* Incremental solution verify end. */
541 NULL, /* Dependent problem. */
542 TV_DF_SCAN, /* Timing variable. */
543 false /* Reset blocks on dropping out of blocks_to_analyze. */
547 /* Create a new DATAFLOW instance and add it to an existing instance
548 of DF. The returned structure is what is used to get at the
549 solution. */
551 void
552 df_scan_add_problem (void)
554 df_add_problem (&problem_SCAN);
558 /*----------------------------------------------------------------------------
559 Storage Allocation Utilities
560 ----------------------------------------------------------------------------*/
563 /* First, grow the reg_info information. If the current size is less than
564 the number of pseudos, grow to 25% more than the number of
565 pseudos.
567 Second, assure that all of the slots up to max_reg_num have been
568 filled with reg_info structures. */
570 void
571 df_grow_reg_info (void)
573 unsigned int max_reg = max_reg_num ();
574 unsigned int new_size = max_reg;
575 struct df_scan_problem_data *problem_data
576 = (struct df_scan_problem_data *) df_scan->problem_data;
577 unsigned int i;
579 if (df->regs_size < new_size)
581 new_size += new_size / 4;
582 df->def_regs = XRESIZEVEC (struct df_reg_info *, df->def_regs, new_size);
583 df->use_regs = XRESIZEVEC (struct df_reg_info *, df->use_regs, new_size);
584 df->eq_use_regs = XRESIZEVEC (struct df_reg_info *, df->eq_use_regs,
585 new_size);
586 df->def_info.begin = XRESIZEVEC (unsigned, df->def_info.begin, new_size);
587 df->def_info.count = XRESIZEVEC (unsigned, df->def_info.count, new_size);
588 df->use_info.begin = XRESIZEVEC (unsigned, df->use_info.begin, new_size);
589 df->use_info.count = XRESIZEVEC (unsigned, df->use_info.count, new_size);
590 df->regs_size = new_size;
593 for (i = df->regs_inited; i < max_reg; i++)
595 struct df_reg_info *reg_info;
597 reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool);
598 memset (reg_info, 0, sizeof (struct df_reg_info));
599 df->def_regs[i] = reg_info;
600 reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool);
601 memset (reg_info, 0, sizeof (struct df_reg_info));
602 df->use_regs[i] = reg_info;
603 reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool);
604 memset (reg_info, 0, sizeof (struct df_reg_info));
605 df->eq_use_regs[i] = reg_info;
606 df->def_info.begin[i] = 0;
607 df->def_info.count[i] = 0;
608 df->use_info.begin[i] = 0;
609 df->use_info.count[i] = 0;
612 df->regs_inited = max_reg;
616 /* Grow the ref information. */
618 static void
619 df_grow_ref_info (struct df_ref_info *ref_info, unsigned int new_size)
621 if (ref_info->refs_size < new_size)
623 ref_info->refs = XRESIZEVEC (df_ref, ref_info->refs, new_size);
624 memset (ref_info->refs + ref_info->refs_size, 0,
625 (new_size - ref_info->refs_size) *sizeof (df_ref));
626 ref_info->refs_size = new_size;
631 /* Check and grow the ref information if necessary. This routine
632 guarantees total_size + BITMAP_ADDEND amount of entries in refs
633 array. It updates ref_info->refs_size only and does not change
634 ref_info->total_size. */
636 static void
637 df_check_and_grow_ref_info (struct df_ref_info *ref_info,
638 unsigned bitmap_addend)
640 if (ref_info->refs_size < ref_info->total_size + bitmap_addend)
642 int new_size = ref_info->total_size + bitmap_addend;
643 new_size += ref_info->total_size / 4;
644 df_grow_ref_info (ref_info, new_size);
649 /* Grow the ref information. If the current size is less than the
650 number of instructions, grow to 25% more than the number of
651 instructions. */
653 void
654 df_grow_insn_info (void)
656 unsigned int new_size = get_max_uid () + 1;
657 if (DF_INSN_SIZE () < new_size)
659 new_size += new_size / 4;
660 df->insns = XRESIZEVEC (struct df_insn_info *, df->insns, new_size);
661 memset (df->insns + df->insns_size, 0,
662 (new_size - DF_INSN_SIZE ()) *sizeof (struct df_insn_info *));
663 DF_INSN_SIZE () = new_size;
670 /*----------------------------------------------------------------------------
671 PUBLIC INTERFACES FOR SMALL GRAIN CHANGES TO SCANNING.
672 ----------------------------------------------------------------------------*/
674 /* Rescan all of the block_to_analyze or all of the blocks in the
675 function if df_set_blocks if blocks_to_analyze is NULL; */
677 void
678 df_scan_blocks (void)
680 basic_block bb;
682 df->def_info.ref_order = DF_REF_ORDER_NO_TABLE;
683 df->use_info.ref_order = DF_REF_ORDER_NO_TABLE;
685 df_get_regular_block_artificial_uses (df->regular_block_artificial_uses);
686 df_get_eh_block_artificial_uses (df->eh_block_artificial_uses);
688 bitmap_ior_into (df->eh_block_artificial_uses,
689 df->regular_block_artificial_uses);
691 /* ENTRY and EXIT blocks have special defs/uses. */
692 df_get_entry_block_def_set (df->entry_block_defs);
693 df_record_entry_block_defs (df->entry_block_defs);
694 df_get_exit_block_use_set (df->exit_block_uses);
695 df_record_exit_block_uses (df->exit_block_uses);
696 df_set_bb_dirty (BASIC_BLOCK (ENTRY_BLOCK));
697 df_set_bb_dirty (BASIC_BLOCK (EXIT_BLOCK));
699 /* Regular blocks */
700 FOR_EACH_BB (bb)
702 unsigned int bb_index = bb->index;
703 df_bb_refs_record (bb_index, true);
708 /* Create a new ref of type DF_REF_TYPE for register REG at address
709 LOC within INSN of BB. This function is only used externally.
711 If the REF_FLAGS field contain DF_REF_SIGN_EXTRACT or
712 DF_REF_ZERO_EXTRACT. WIDTH, OFFSET and MODE are used to access the
713 fields if they were constants. Otherwise they should be -1 if
714 those flags were set. */
716 df_ref
717 df_ref_create (rtx reg, rtx *loc, rtx insn,
718 basic_block bb,
719 enum df_ref_type ref_type,
720 int ref_flags,
721 int width, int offset, enum machine_mode mode)
723 df_ref ref;
724 struct df_reg_info **reg_info;
725 struct df_ref_info *ref_info;
726 df_ref *ref_rec;
727 df_ref **ref_rec_ptr;
728 unsigned int count = 0;
729 bool add_to_table;
730 enum df_ref_class cl;
732 df_grow_reg_info ();
734 /* You cannot hack artificial refs. */
735 gcc_assert (insn);
737 if (width != -1 || offset != -1)
738 cl = DF_REF_EXTRACT;
739 else if (loc)
740 cl = DF_REF_REGULAR;
741 else
742 cl = DF_REF_BASE;
743 ref = df_ref_create_structure (cl, NULL, reg, loc, bb, DF_INSN_INFO_GET (insn),
744 ref_type, ref_flags,
745 width, offset, mode);
747 if (DF_REF_REG_DEF_P (ref))
749 reg_info = df->def_regs;
750 ref_info = &df->def_info;
751 ref_rec_ptr = &DF_INSN_DEFS (insn);
752 add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
754 else if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
756 reg_info = df->eq_use_regs;
757 ref_info = &df->use_info;
758 ref_rec_ptr = &DF_INSN_EQ_USES (insn);
759 switch (ref_info->ref_order)
761 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
762 case DF_REF_ORDER_BY_REG_WITH_NOTES:
763 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
764 add_to_table = true;
765 break;
766 default:
767 add_to_table = false;
768 break;
771 else
773 reg_info = df->use_regs;
774 ref_info = &df->use_info;
775 ref_rec_ptr = &DF_INSN_USES (insn);
776 add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
779 /* Do not add if ref is not in the right blocks. */
780 if (add_to_table && df->analyze_subset)
781 add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
783 df_install_ref (ref, reg_info[DF_REF_REGNO (ref)], ref_info, add_to_table);
785 if (add_to_table)
786 switch (ref_info->ref_order)
788 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
789 case DF_REF_ORDER_BY_REG_WITH_NOTES:
790 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
791 ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
792 break;
793 default:
794 ref_info->ref_order = DF_REF_ORDER_UNORDERED;
795 break;
798 ref_rec = *ref_rec_ptr;
799 while (*ref_rec)
801 count++;
802 ref_rec++;
805 ref_rec = *ref_rec_ptr;
806 if (count)
808 ref_rec = XRESIZEVEC (df_ref, ref_rec, count+2);
809 *ref_rec_ptr = ref_rec;
810 ref_rec[count] = ref;
811 ref_rec[count+1] = NULL;
812 qsort (ref_rec, count + 1, sizeof (df_ref), df_ref_compare);
814 else
816 df_ref *ref_rec = XNEWVEC (df_ref, 2);
817 ref_rec[0] = ref;
818 ref_rec[1] = NULL;
819 *ref_rec_ptr = ref_rec;
822 #if 0
823 if (dump_file)
825 fprintf (dump_file, "adding ref ");
826 df_ref_debug (ref, dump_file);
828 #endif
829 /* By adding the ref directly, df_insn_rescan my not find any
830 differences even though the block will have changed. So we need
831 to mark the block dirty ourselves. */
832 if (!DEBUG_INSN_P (DF_REF_INSN (ref)))
833 df_set_bb_dirty (bb);
835 return ref;
840 /*----------------------------------------------------------------------------
841 UTILITIES TO CREATE AND DESTROY REFS AND CHAINS.
842 ----------------------------------------------------------------------------*/
844 static void
845 df_free_ref (df_ref ref)
847 struct df_scan_problem_data *problem_data
848 = (struct df_scan_problem_data *) df_scan->problem_data;
850 switch (DF_REF_CLASS (ref))
852 case DF_REF_BASE:
853 pool_free (problem_data->ref_base_pool, ref);
854 break;
856 case DF_REF_ARTIFICIAL:
857 pool_free (problem_data->ref_artificial_pool, ref);
858 break;
860 case DF_REF_REGULAR:
861 pool_free (problem_data->ref_regular_pool, ref);
862 break;
864 case DF_REF_EXTRACT:
865 pool_free (problem_data->ref_extract_pool, ref);
866 break;
871 /* Unlink and delete REF at the reg_use, reg_eq_use or reg_def chain.
872 Also delete the def-use or use-def chain if it exists. */
874 static void
875 df_reg_chain_unlink (df_ref ref)
877 df_ref next = DF_REF_NEXT_REG (ref);
878 df_ref prev = DF_REF_PREV_REG (ref);
879 int id = DF_REF_ID (ref);
880 struct df_reg_info *reg_info;
881 df_ref *refs = NULL;
883 if (DF_REF_REG_DEF_P (ref))
885 int regno = DF_REF_REGNO (ref);
886 reg_info = DF_REG_DEF_GET (regno);
887 refs = df->def_info.refs;
889 else
891 if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
893 reg_info = DF_REG_EQ_USE_GET (DF_REF_REGNO (ref));
894 switch (df->use_info.ref_order)
896 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
897 case DF_REF_ORDER_BY_REG_WITH_NOTES:
898 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
899 refs = df->use_info.refs;
900 break;
901 default:
902 break;
905 else
907 reg_info = DF_REG_USE_GET (DF_REF_REGNO (ref));
908 refs = df->use_info.refs;
912 if (refs)
914 if (df->analyze_subset)
916 if (bitmap_bit_p (df->blocks_to_analyze, DF_REF_BBNO (ref)))
917 refs[id] = NULL;
919 else
920 refs[id] = NULL;
923 /* Delete any def-use or use-def chains that start here. It is
924 possible that there is trash in this field. This happens for
925 insns that have been deleted when rescanning has been deferred
926 and the chain problem has also been deleted. The chain tear down
927 code skips deleted insns. */
928 if (df_chain && DF_REF_CHAIN (ref))
929 df_chain_unlink (ref);
931 reg_info->n_refs--;
932 if (DF_REF_FLAGS_IS_SET (ref, DF_HARD_REG_LIVE))
934 gcc_assert (DF_REF_REGNO (ref) < FIRST_PSEUDO_REGISTER);
935 df->hard_regs_live_count[DF_REF_REGNO (ref)]--;
938 /* Unlink from the reg chain. If there is no prev, this is the
939 first of the list. If not, just join the next and prev. */
940 if (prev)
941 DF_REF_NEXT_REG (prev) = next;
942 else
944 gcc_assert (reg_info->reg_chain == ref);
945 reg_info->reg_chain = next;
947 if (next)
948 DF_REF_PREV_REG (next) = prev;
950 df_free_ref (ref);
954 /* Remove REF from VEC. */
956 static void
957 df_ref_compress_rec (df_ref **vec_ptr, df_ref ref)
959 df_ref *vec = *vec_ptr;
961 if (vec[1])
963 while (*vec && *vec != ref)
964 vec++;
966 while (*vec)
968 *vec = *(vec+1);
969 vec++;
972 else
974 free (vec);
975 *vec_ptr = df_null_ref_rec;
980 /* Unlink REF from all def-use/use-def chains, etc. */
982 void
983 df_ref_remove (df_ref ref)
985 #if 0
986 if (dump_file)
988 fprintf (dump_file, "removing ref ");
989 df_ref_debug (ref, dump_file);
991 #endif
993 if (DF_REF_REG_DEF_P (ref))
995 if (DF_REF_IS_ARTIFICIAL (ref))
997 struct df_scan_bb_info *bb_info
998 = df_scan_get_bb_info (DF_REF_BBNO (ref));
999 df_ref_compress_rec (&bb_info->artificial_defs, ref);
1001 else
1003 unsigned int uid = DF_REF_INSN_UID (ref);
1004 struct df_insn_info *insn_rec = DF_INSN_UID_GET (uid);
1005 df_ref_compress_rec (&insn_rec->defs, ref);
1008 else
1010 if (DF_REF_IS_ARTIFICIAL (ref))
1012 struct df_scan_bb_info *bb_info
1013 = df_scan_get_bb_info (DF_REF_BBNO (ref));
1014 df_ref_compress_rec (&bb_info->artificial_uses, ref);
1016 else
1018 unsigned int uid = DF_REF_INSN_UID (ref);
1019 struct df_insn_info *insn_rec = DF_INSN_UID_GET (uid);
1021 if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
1022 df_ref_compress_rec (&insn_rec->eq_uses, ref);
1023 else
1024 df_ref_compress_rec (&insn_rec->uses, ref);
1028 /* By deleting the ref directly, df_insn_rescan my not find any
1029 differences even though the block will have changed. So we need
1030 to mark the block dirty ourselves. */
1031 if (!DEBUG_INSN_P (DF_REF_INSN (ref)))
1032 df_set_bb_dirty (DF_REF_BB (ref));
1033 df_reg_chain_unlink (ref);
1037 /* Create the insn record for INSN. If there was one there, zero it
1038 out. */
1040 struct df_insn_info *
1041 df_insn_create_insn_record (rtx insn)
1043 struct df_scan_problem_data *problem_data
1044 = (struct df_scan_problem_data *) df_scan->problem_data;
1045 struct df_insn_info *insn_rec;
1047 df_grow_insn_info ();
1048 insn_rec = DF_INSN_INFO_GET (insn);
1049 if (!insn_rec)
1051 insn_rec = (struct df_insn_info *) pool_alloc (problem_data->insn_pool);
1052 DF_INSN_INFO_SET (insn, insn_rec);
1054 memset (insn_rec, 0, sizeof (struct df_insn_info));
1055 insn_rec->insn = insn;
1056 return insn_rec;
1060 /* Delete all du chain (DF_REF_CHAIN()) of all refs in the ref chain. */
1062 static void
1063 df_ref_chain_delete_du_chain (df_ref *ref_rec)
1065 while (*ref_rec)
1067 df_ref ref = *ref_rec;
1068 /* CHAIN is allocated by DF_CHAIN. So make sure to
1069 pass df_scan instance for the problem. */
1070 if (DF_REF_CHAIN (ref))
1071 df_chain_unlink (ref);
1072 ref_rec++;
1077 /* Delete all refs in the ref chain. */
1079 static void
1080 df_ref_chain_delete (df_ref *ref_rec)
1082 df_ref *start = ref_rec;
1083 while (*ref_rec)
1085 df_reg_chain_unlink (*ref_rec);
1086 ref_rec++;
1089 /* If the list is empty, it has a special shared element that is not
1090 to be deleted. */
1091 if (*start)
1092 free (start);
1096 /* Delete the hardreg chain. */
1098 static void
1099 df_mw_hardreg_chain_delete (struct df_mw_hardreg **hardregs)
1101 struct df_scan_problem_data *problem_data;
1103 if (!hardregs)
1104 return;
1106 problem_data = (struct df_scan_problem_data *) df_scan->problem_data;
1108 while (*hardregs)
1110 pool_free (problem_data->mw_reg_pool, *hardregs);
1111 hardregs++;
1116 /* Delete all of the refs information from INSN. BB must be passed in
1117 except when called from df_process_deferred_rescans to mark the block
1118 as dirty. */
1120 void
1121 df_insn_delete (basic_block bb, unsigned int uid)
1123 struct df_insn_info *insn_info = NULL;
1124 if (!df)
1125 return;
1127 df_grow_bb_info (df_scan);
1128 df_grow_reg_info ();
1130 /* The block must be marked as dirty now, rather than later as in
1131 df_insn_rescan and df_notes_rescan because it may not be there at
1132 rescanning time and the mark would blow up. */
1133 if (bb)
1134 df_set_bb_dirty (bb);
1136 insn_info = DF_INSN_UID_SAFE_GET (uid);
1138 /* The client has deferred rescanning. */
1139 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1141 if (insn_info)
1143 bitmap_clear_bit (df->insns_to_rescan, uid);
1144 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1145 bitmap_set_bit (df->insns_to_delete, uid);
1147 if (dump_file)
1148 fprintf (dump_file, "deferring deletion of insn with uid = %d.\n", uid);
1149 return;
1152 if (dump_file)
1153 fprintf (dump_file, "deleting insn with uid = %d.\n", uid);
1155 bitmap_clear_bit (df->insns_to_delete, uid);
1156 bitmap_clear_bit (df->insns_to_rescan, uid);
1157 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1158 if (insn_info)
1160 struct df_scan_problem_data *problem_data
1161 = (struct df_scan_problem_data *) df_scan->problem_data;
1163 /* In general, notes do not have the insn_info fields
1164 initialized. However, combine deletes insns by changing them
1165 to notes. How clever. So we cannot just check if it is a
1166 valid insn before short circuiting this code, we need to see
1167 if we actually initialized it. */
1168 if (insn_info->defs)
1170 df_mw_hardreg_chain_delete (insn_info->mw_hardregs);
1172 if (df_chain)
1174 df_ref_chain_delete_du_chain (insn_info->defs);
1175 df_ref_chain_delete_du_chain (insn_info->uses);
1176 df_ref_chain_delete_du_chain (insn_info->eq_uses);
1179 df_ref_chain_delete (insn_info->defs);
1180 df_ref_chain_delete (insn_info->uses);
1181 df_ref_chain_delete (insn_info->eq_uses);
1183 pool_free (problem_data->insn_pool, insn_info);
1184 DF_INSN_UID_SET (uid, NULL);
1189 /* Free all of the refs and the mw_hardregs in COLLECTION_REC. */
1191 static void
1192 df_free_collection_rec (struct df_collection_rec *collection_rec)
1194 unsigned int ix;
1195 struct df_scan_problem_data *problem_data
1196 = (struct df_scan_problem_data *) df_scan->problem_data;
1197 df_ref ref;
1198 struct df_mw_hardreg *mw;
1200 for (ix = 0; VEC_iterate (df_ref, collection_rec->def_vec, ix, ref); ++ix)
1201 df_free_ref (ref);
1202 for (ix = 0; VEC_iterate (df_ref, collection_rec->use_vec, ix, ref); ++ix)
1203 df_free_ref (ref);
1204 for (ix = 0; VEC_iterate (df_ref, collection_rec->eq_use_vec, ix, ref); ++ix)
1205 df_free_ref (ref);
1206 for (ix = 0;
1207 VEC_iterate (df_mw_hardreg_ptr, collection_rec->mw_vec, ix, mw);
1208 ++ix)
1209 pool_free (problem_data->mw_reg_pool, mw);
1211 VEC_free (df_ref, stack, collection_rec->def_vec);
1212 VEC_free (df_ref, stack, collection_rec->use_vec);
1213 VEC_free (df_ref, stack, collection_rec->eq_use_vec);
1214 VEC_free (df_mw_hardreg_ptr, stack, collection_rec->mw_vec);
1217 /* Rescan INSN. Return TRUE if the rescanning produced any changes. */
1219 bool
1220 df_insn_rescan (rtx insn)
1222 unsigned int uid = INSN_UID (insn);
1223 struct df_insn_info *insn_info = NULL;
1224 basic_block bb = BLOCK_FOR_INSN (insn);
1225 struct df_collection_rec collection_rec;
1227 if ((!df) || (!INSN_P (insn)))
1228 return false;
1230 if (!bb)
1232 if (dump_file)
1233 fprintf (dump_file, "no bb for insn with uid = %d.\n", uid);
1234 return false;
1237 /* The client has disabled rescanning and plans to do it itself. */
1238 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1239 return false;
1241 df_grow_bb_info (df_scan);
1242 df_grow_reg_info ();
1244 insn_info = DF_INSN_UID_SAFE_GET (uid);
1246 /* The client has deferred rescanning. */
1247 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1249 if (!insn_info)
1251 insn_info = df_insn_create_insn_record (insn);
1252 insn_info->defs = df_null_ref_rec;
1253 insn_info->uses = df_null_ref_rec;
1254 insn_info->eq_uses = df_null_ref_rec;
1255 insn_info->mw_hardregs = df_null_mw_rec;
1257 if (dump_file)
1258 fprintf (dump_file, "deferring rescan insn with uid = %d.\n", uid);
1260 bitmap_clear_bit (df->insns_to_delete, uid);
1261 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1262 bitmap_set_bit (df->insns_to_rescan, INSN_UID (insn));
1263 return false;
1266 collection_rec.def_vec = VEC_alloc (df_ref, stack, 128);
1267 collection_rec.use_vec = VEC_alloc (df_ref, stack, 32);
1268 collection_rec.eq_use_vec = VEC_alloc (df_ref, stack, 32);
1269 collection_rec.mw_vec = VEC_alloc (df_mw_hardreg_ptr, stack, 32);
1271 bitmap_clear_bit (df->insns_to_delete, uid);
1272 bitmap_clear_bit (df->insns_to_rescan, uid);
1273 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1274 if (insn_info)
1276 int luid;
1277 bool the_same = df_insn_refs_verify (&collection_rec, bb, insn, false);
1278 /* If there's no change, return false. */
1279 if (the_same)
1281 df_free_collection_rec (&collection_rec);
1282 if (dump_file)
1283 fprintf (dump_file, "verify found no changes in insn with uid = %d.\n", uid);
1284 return false;
1286 if (dump_file)
1287 fprintf (dump_file, "rescanning insn with uid = %d.\n", uid);
1289 /* There's change - we need to delete the existing info.
1290 Since the insn isn't moved, we can salvage its LUID. */
1291 luid = DF_INSN_LUID (insn);
1292 df_insn_delete (NULL, uid);
1293 df_insn_create_insn_record (insn);
1294 DF_INSN_LUID (insn) = luid;
1296 else
1298 struct df_insn_info *insn_info = df_insn_create_insn_record (insn);
1299 df_insn_refs_collect (&collection_rec, bb, insn_info);
1300 if (dump_file)
1301 fprintf (dump_file, "scanning new insn with uid = %d.\n", uid);
1304 df_refs_add_to_chains (&collection_rec, bb, insn);
1305 df_set_bb_dirty (bb);
1307 VEC_free (df_ref, stack, collection_rec.def_vec);
1308 VEC_free (df_ref, stack, collection_rec.use_vec);
1309 VEC_free (df_ref, stack, collection_rec.eq_use_vec);
1310 VEC_free (df_mw_hardreg_ptr, stack, collection_rec.mw_vec);
1312 return true;
1315 /* Same as df_insn_rescan, but don't mark the basic block as
1316 dirty. */
1318 bool
1319 df_insn_rescan_debug_internal (rtx insn)
1321 unsigned int uid = INSN_UID (insn);
1322 struct df_insn_info *insn_info;
1324 gcc_assert (DEBUG_INSN_P (insn));
1325 gcc_assert (VAR_LOC_UNKNOWN_P (INSN_VAR_LOCATION_LOC (insn)));
1327 if (!df)
1328 return false;
1330 insn_info = DF_INSN_UID_SAFE_GET (INSN_UID (insn));
1331 if (!insn_info)
1332 return false;
1334 if (dump_file)
1335 fprintf (dump_file, "deleting debug_insn with uid = %d.\n", uid);
1337 bitmap_clear_bit (df->insns_to_delete, uid);
1338 bitmap_clear_bit (df->insns_to_rescan, uid);
1339 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1341 if (!insn_info->defs)
1342 return false;
1344 if (insn_info->defs == df_null_ref_rec
1345 && insn_info->uses == df_null_ref_rec
1346 && insn_info->eq_uses == df_null_ref_rec
1347 && insn_info->mw_hardregs == df_null_mw_rec)
1348 return false;
1350 df_mw_hardreg_chain_delete (insn_info->mw_hardregs);
1352 if (df_chain)
1354 df_ref_chain_delete_du_chain (insn_info->defs);
1355 df_ref_chain_delete_du_chain (insn_info->uses);
1356 df_ref_chain_delete_du_chain (insn_info->eq_uses);
1359 df_ref_chain_delete (insn_info->defs);
1360 df_ref_chain_delete (insn_info->uses);
1361 df_ref_chain_delete (insn_info->eq_uses);
1363 insn_info->defs = df_null_ref_rec;
1364 insn_info->uses = df_null_ref_rec;
1365 insn_info->eq_uses = df_null_ref_rec;
1366 insn_info->mw_hardregs = df_null_mw_rec;
1368 return true;
1372 /* Rescan all of the insns in the function. Note that the artificial
1373 uses and defs are not touched. This function will destroy def-se
1374 or use-def chains. */
1376 void
1377 df_insn_rescan_all (void)
1379 bool no_insn_rescan = false;
1380 bool defer_insn_rescan = false;
1381 basic_block bb;
1382 bitmap_iterator bi;
1383 unsigned int uid;
1384 bitmap tmp = BITMAP_ALLOC (&df_bitmap_obstack);
1386 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1388 df_clear_flags (DF_NO_INSN_RESCAN);
1389 no_insn_rescan = true;
1392 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1394 df_clear_flags (DF_DEFER_INSN_RESCAN);
1395 defer_insn_rescan = true;
1398 bitmap_copy (tmp, df->insns_to_delete);
1399 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1401 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1402 if (insn_info)
1403 df_insn_delete (NULL, uid);
1406 BITMAP_FREE (tmp);
1407 bitmap_clear (df->insns_to_delete);
1408 bitmap_clear (df->insns_to_rescan);
1409 bitmap_clear (df->insns_to_notes_rescan);
1411 FOR_EACH_BB (bb)
1413 rtx insn;
1414 FOR_BB_INSNS (bb, insn)
1416 df_insn_rescan (insn);
1420 if (no_insn_rescan)
1421 df_set_flags (DF_NO_INSN_RESCAN);
1422 if (defer_insn_rescan)
1423 df_set_flags (DF_DEFER_INSN_RESCAN);
1427 /* Process all of the deferred rescans or deletions. */
1429 void
1430 df_process_deferred_rescans (void)
1432 bool no_insn_rescan = false;
1433 bool defer_insn_rescan = false;
1434 bitmap_iterator bi;
1435 unsigned int uid;
1436 bitmap tmp = BITMAP_ALLOC (&df_bitmap_obstack);
1438 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1440 df_clear_flags (DF_NO_INSN_RESCAN);
1441 no_insn_rescan = true;
1444 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1446 df_clear_flags (DF_DEFER_INSN_RESCAN);
1447 defer_insn_rescan = true;
1450 if (dump_file)
1451 fprintf (dump_file, "starting the processing of deferred insns\n");
1453 bitmap_copy (tmp, df->insns_to_delete);
1454 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1456 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1457 if (insn_info)
1458 df_insn_delete (NULL, uid);
1461 bitmap_copy (tmp, df->insns_to_rescan);
1462 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1464 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1465 if (insn_info)
1466 df_insn_rescan (insn_info->insn);
1469 bitmap_copy (tmp, df->insns_to_notes_rescan);
1470 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1472 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1473 if (insn_info)
1474 df_notes_rescan (insn_info->insn);
1477 if (dump_file)
1478 fprintf (dump_file, "ending the processing of deferred insns\n");
1480 BITMAP_FREE (tmp);
1481 bitmap_clear (df->insns_to_delete);
1482 bitmap_clear (df->insns_to_rescan);
1483 bitmap_clear (df->insns_to_notes_rescan);
1485 if (no_insn_rescan)
1486 df_set_flags (DF_NO_INSN_RESCAN);
1487 if (defer_insn_rescan)
1488 df_set_flags (DF_DEFER_INSN_RESCAN);
1490 /* If someone changed regs_ever_live during this pass, fix up the
1491 entry and exit blocks. */
1492 if (df->redo_entry_and_exit)
1494 df_update_entry_exit_and_calls ();
1495 df->redo_entry_and_exit = false;
1500 /* Count the number of refs. Include the defs if INCLUDE_DEFS. Include
1501 the uses if INCLUDE_USES. Include the eq_uses if
1502 INCLUDE_EQ_USES. */
1504 static unsigned int
1505 df_count_refs (bool include_defs, bool include_uses,
1506 bool include_eq_uses)
1508 unsigned int regno;
1509 int size = 0;
1510 unsigned int m = df->regs_inited;
1512 for (regno = 0; regno < m; regno++)
1514 if (include_defs)
1515 size += DF_REG_DEF_COUNT (regno);
1516 if (include_uses)
1517 size += DF_REG_USE_COUNT (regno);
1518 if (include_eq_uses)
1519 size += DF_REG_EQ_USE_COUNT (regno);
1521 return size;
1525 /* Take build ref table for either the uses or defs from the reg-use
1526 or reg-def chains. This version processes the refs in reg order
1527 which is likely to be best if processing the whole function. */
1529 static void
1530 df_reorganize_refs_by_reg_by_reg (struct df_ref_info *ref_info,
1531 bool include_defs,
1532 bool include_uses,
1533 bool include_eq_uses)
1535 unsigned int m = df->regs_inited;
1536 unsigned int regno;
1537 unsigned int offset = 0;
1538 unsigned int start;
1540 if (df->changeable_flags & DF_NO_HARD_REGS)
1542 start = FIRST_PSEUDO_REGISTER;
1543 memset (ref_info->begin, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
1544 memset (ref_info->count, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
1546 else
1547 start = 0;
1549 ref_info->total_size
1550 = df_count_refs (include_defs, include_uses, include_eq_uses);
1552 df_check_and_grow_ref_info (ref_info, 1);
1554 for (regno = start; regno < m; regno++)
1556 int count = 0;
1557 ref_info->begin[regno] = offset;
1558 if (include_defs)
1560 df_ref ref = DF_REG_DEF_CHAIN (regno);
1561 while (ref)
1563 ref_info->refs[offset] = ref;
1564 DF_REF_ID (ref) = offset++;
1565 count++;
1566 ref = DF_REF_NEXT_REG (ref);
1567 gcc_assert (offset < ref_info->refs_size);
1570 if (include_uses)
1572 df_ref ref = DF_REG_USE_CHAIN (regno);
1573 while (ref)
1575 ref_info->refs[offset] = ref;
1576 DF_REF_ID (ref) = offset++;
1577 count++;
1578 ref = DF_REF_NEXT_REG (ref);
1579 gcc_assert (offset < ref_info->refs_size);
1582 if (include_eq_uses)
1584 df_ref ref = DF_REG_EQ_USE_CHAIN (regno);
1585 while (ref)
1587 ref_info->refs[offset] = ref;
1588 DF_REF_ID (ref) = offset++;
1589 count++;
1590 ref = DF_REF_NEXT_REG (ref);
1591 gcc_assert (offset < ref_info->refs_size);
1594 ref_info->count[regno] = count;
1597 /* The bitmap size is not decremented when refs are deleted. So
1598 reset it now that we have squished out all of the empty
1599 slots. */
1600 ref_info->table_size = offset;
1604 /* Take build ref table for either the uses or defs from the reg-use
1605 or reg-def chains. This version processes the refs in insn order
1606 which is likely to be best if processing some segment of the
1607 function. */
1609 static void
1610 df_reorganize_refs_by_reg_by_insn (struct df_ref_info *ref_info,
1611 bool include_defs,
1612 bool include_uses,
1613 bool include_eq_uses)
1615 bitmap_iterator bi;
1616 unsigned int bb_index;
1617 unsigned int m = df->regs_inited;
1618 unsigned int offset = 0;
1619 unsigned int r;
1620 unsigned int start
1621 = (df->changeable_flags & DF_NO_HARD_REGS) ? FIRST_PSEUDO_REGISTER : 0;
1623 memset (ref_info->begin, 0, sizeof (int) * df->regs_inited);
1624 memset (ref_info->count, 0, sizeof (int) * df->regs_inited);
1626 ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1627 df_check_and_grow_ref_info (ref_info, 1);
1629 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
1631 basic_block bb = BASIC_BLOCK (bb_index);
1632 rtx insn;
1633 df_ref *ref_rec;
1635 if (include_defs)
1636 for (ref_rec = df_get_artificial_defs (bb_index); *ref_rec; ref_rec++)
1638 unsigned int regno = DF_REF_REGNO (*ref_rec);
1639 ref_info->count[regno]++;
1641 if (include_uses)
1642 for (ref_rec = df_get_artificial_uses (bb_index); *ref_rec; ref_rec++)
1644 unsigned int regno = DF_REF_REGNO (*ref_rec);
1645 ref_info->count[regno]++;
1648 FOR_BB_INSNS (bb, insn)
1650 if (INSN_P (insn))
1652 unsigned int uid = INSN_UID (insn);
1654 if (include_defs)
1655 for (ref_rec = DF_INSN_UID_DEFS (uid); *ref_rec; ref_rec++)
1657 unsigned int regno = DF_REF_REGNO (*ref_rec);
1658 ref_info->count[regno]++;
1660 if (include_uses)
1661 for (ref_rec = DF_INSN_UID_USES (uid); *ref_rec; ref_rec++)
1663 unsigned int regno = DF_REF_REGNO (*ref_rec);
1664 ref_info->count[regno]++;
1666 if (include_eq_uses)
1667 for (ref_rec = DF_INSN_UID_EQ_USES (uid); *ref_rec; ref_rec++)
1669 unsigned int regno = DF_REF_REGNO (*ref_rec);
1670 ref_info->count[regno]++;
1676 for (r = start; r < m; r++)
1678 ref_info->begin[r] = offset;
1679 offset += ref_info->count[r];
1680 ref_info->count[r] = 0;
1683 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
1685 basic_block bb = BASIC_BLOCK (bb_index);
1686 rtx insn;
1687 df_ref *ref_rec;
1689 if (include_defs)
1690 for (ref_rec = df_get_artificial_defs (bb_index); *ref_rec; ref_rec++)
1692 df_ref ref = *ref_rec;
1693 unsigned int regno = DF_REF_REGNO (ref);
1694 if (regno >= start)
1696 unsigned int id
1697 = ref_info->begin[regno] + ref_info->count[regno]++;
1698 DF_REF_ID (ref) = id;
1699 ref_info->refs[id] = ref;
1702 if (include_uses)
1703 for (ref_rec = df_get_artificial_uses (bb_index); *ref_rec; ref_rec++)
1705 df_ref ref = *ref_rec;
1706 unsigned int regno = DF_REF_REGNO (ref);
1707 if (regno >= start)
1709 unsigned int id
1710 = ref_info->begin[regno] + ref_info->count[regno]++;
1711 DF_REF_ID (ref) = id;
1712 ref_info->refs[id] = ref;
1716 FOR_BB_INSNS (bb, insn)
1718 if (INSN_P (insn))
1720 unsigned int uid = INSN_UID (insn);
1722 if (include_defs)
1723 for (ref_rec = DF_INSN_UID_DEFS (uid); *ref_rec; ref_rec++)
1725 df_ref ref = *ref_rec;
1726 unsigned int regno = DF_REF_REGNO (ref);
1727 if (regno >= start)
1729 unsigned int id
1730 = ref_info->begin[regno] + ref_info->count[regno]++;
1731 DF_REF_ID (ref) = id;
1732 ref_info->refs[id] = ref;
1735 if (include_uses)
1736 for (ref_rec = DF_INSN_UID_USES (uid); *ref_rec; ref_rec++)
1738 df_ref ref = *ref_rec;
1739 unsigned int regno = DF_REF_REGNO (ref);
1740 if (regno >= start)
1742 unsigned int id
1743 = ref_info->begin[regno] + ref_info->count[regno]++;
1744 DF_REF_ID (ref) = id;
1745 ref_info->refs[id] = ref;
1748 if (include_eq_uses)
1749 for (ref_rec = DF_INSN_UID_EQ_USES (uid); *ref_rec; ref_rec++)
1751 df_ref ref = *ref_rec;
1752 unsigned int regno = DF_REF_REGNO (ref);
1753 if (regno >= start)
1755 unsigned int id
1756 = ref_info->begin[regno] + ref_info->count[regno]++;
1757 DF_REF_ID (ref) = id;
1758 ref_info->refs[id] = ref;
1765 /* The bitmap size is not decremented when refs are deleted. So
1766 reset it now that we have squished out all of the empty
1767 slots. */
1769 ref_info->table_size = offset;
1772 /* Take build ref table for either the uses or defs from the reg-use
1773 or reg-def chains. */
1775 static void
1776 df_reorganize_refs_by_reg (struct df_ref_info *ref_info,
1777 bool include_defs,
1778 bool include_uses,
1779 bool include_eq_uses)
1781 if (df->analyze_subset)
1782 df_reorganize_refs_by_reg_by_insn (ref_info, include_defs,
1783 include_uses, include_eq_uses);
1784 else
1785 df_reorganize_refs_by_reg_by_reg (ref_info, include_defs,
1786 include_uses, include_eq_uses);
1790 /* Add the refs in REF_VEC to the table in REF_INFO starting at OFFSET. */
1791 static unsigned int
1792 df_add_refs_to_table (unsigned int offset,
1793 struct df_ref_info *ref_info,
1794 df_ref *ref_vec)
1796 while (*ref_vec)
1798 df_ref ref = *ref_vec;
1799 if ((!(df->changeable_flags & DF_NO_HARD_REGS))
1800 || (DF_REF_REGNO (ref) >= FIRST_PSEUDO_REGISTER))
1802 ref_info->refs[offset] = ref;
1803 DF_REF_ID (*ref_vec) = offset++;
1805 ref_vec++;
1807 return offset;
1811 /* Count the number of refs in all of the insns of BB. Include the
1812 defs if INCLUDE_DEFS. Include the uses if INCLUDE_USES. Include the
1813 eq_uses if INCLUDE_EQ_USES. */
1815 static unsigned int
1816 df_reorganize_refs_by_insn_bb (basic_block bb, unsigned int offset,
1817 struct df_ref_info *ref_info,
1818 bool include_defs, bool include_uses,
1819 bool include_eq_uses)
1821 rtx insn;
1823 if (include_defs)
1824 offset = df_add_refs_to_table (offset, ref_info,
1825 df_get_artificial_defs (bb->index));
1826 if (include_uses)
1827 offset = df_add_refs_to_table (offset, ref_info,
1828 df_get_artificial_uses (bb->index));
1830 FOR_BB_INSNS (bb, insn)
1831 if (INSN_P (insn))
1833 unsigned int uid = INSN_UID (insn);
1834 if (include_defs)
1835 offset = df_add_refs_to_table (offset, ref_info,
1836 DF_INSN_UID_DEFS (uid));
1837 if (include_uses)
1838 offset = df_add_refs_to_table (offset, ref_info,
1839 DF_INSN_UID_USES (uid));
1840 if (include_eq_uses)
1841 offset = df_add_refs_to_table (offset, ref_info,
1842 DF_INSN_UID_EQ_USES (uid));
1844 return offset;
1848 /* Organize the refs by insn into the table in REF_INFO. If
1849 blocks_to_analyze is defined, use that set, otherwise the entire
1850 program. Include the defs if INCLUDE_DEFS. Include the uses if
1851 INCLUDE_USES. Include the eq_uses if INCLUDE_EQ_USES. */
1853 static void
1854 df_reorganize_refs_by_insn (struct df_ref_info *ref_info,
1855 bool include_defs, bool include_uses,
1856 bool include_eq_uses)
1858 basic_block bb;
1859 unsigned int offset = 0;
1861 ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1862 df_check_and_grow_ref_info (ref_info, 1);
1863 if (df->blocks_to_analyze)
1865 bitmap_iterator bi;
1866 unsigned int index;
1868 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, index, bi)
1870 offset = df_reorganize_refs_by_insn_bb (BASIC_BLOCK (index), offset, ref_info,
1871 include_defs, include_uses,
1872 include_eq_uses);
1875 ref_info->table_size = offset;
1877 else
1879 FOR_ALL_BB (bb)
1880 offset = df_reorganize_refs_by_insn_bb (bb, offset, ref_info,
1881 include_defs, include_uses,
1882 include_eq_uses);
1883 ref_info->table_size = offset;
1888 /* If the use refs in DF are not organized, reorganize them. */
1890 void
1891 df_maybe_reorganize_use_refs (enum df_ref_order order)
1893 if (order == df->use_info.ref_order)
1894 return;
1896 switch (order)
1898 case DF_REF_ORDER_BY_REG:
1899 df_reorganize_refs_by_reg (&df->use_info, false, true, false);
1900 break;
1902 case DF_REF_ORDER_BY_REG_WITH_NOTES:
1903 df_reorganize_refs_by_reg (&df->use_info, false, true, true);
1904 break;
1906 case DF_REF_ORDER_BY_INSN:
1907 df_reorganize_refs_by_insn (&df->use_info, false, true, false);
1908 break;
1910 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1911 df_reorganize_refs_by_insn (&df->use_info, false, true, true);
1912 break;
1914 case DF_REF_ORDER_NO_TABLE:
1915 free (df->use_info.refs);
1916 df->use_info.refs = NULL;
1917 df->use_info.refs_size = 0;
1918 break;
1920 case DF_REF_ORDER_UNORDERED:
1921 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1922 gcc_unreachable ();
1923 break;
1926 df->use_info.ref_order = order;
1930 /* If the def refs in DF are not organized, reorganize them. */
1932 void
1933 df_maybe_reorganize_def_refs (enum df_ref_order order)
1935 if (order == df->def_info.ref_order)
1936 return;
1938 switch (order)
1940 case DF_REF_ORDER_BY_REG:
1941 df_reorganize_refs_by_reg (&df->def_info, true, false, false);
1942 break;
1944 case DF_REF_ORDER_BY_INSN:
1945 df_reorganize_refs_by_insn (&df->def_info, true, false, false);
1946 break;
1948 case DF_REF_ORDER_NO_TABLE:
1949 free (df->def_info.refs);
1950 df->def_info.refs = NULL;
1951 df->def_info.refs_size = 0;
1952 break;
1954 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1955 case DF_REF_ORDER_BY_REG_WITH_NOTES:
1956 case DF_REF_ORDER_UNORDERED:
1957 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1958 gcc_unreachable ();
1959 break;
1962 df->def_info.ref_order = order;
1966 /* Change all of the basic block references in INSN to use the insn's
1967 current basic block. This function is called from routines that move
1968 instructions from one block to another. */
1970 void
1971 df_insn_change_bb (rtx insn, basic_block new_bb)
1973 basic_block old_bb = BLOCK_FOR_INSN (insn);
1974 struct df_insn_info *insn_info;
1975 unsigned int uid = INSN_UID (insn);
1977 if (old_bb == new_bb)
1978 return;
1980 set_block_for_insn (insn, new_bb);
1982 if (!df)
1983 return;
1985 if (dump_file)
1986 fprintf (dump_file, "changing bb of uid %d\n", uid);
1988 insn_info = DF_INSN_UID_SAFE_GET (uid);
1989 if (insn_info == NULL)
1991 if (dump_file)
1992 fprintf (dump_file, " unscanned insn\n");
1993 df_insn_rescan (insn);
1994 return;
1997 if (!INSN_P (insn))
1998 return;
2000 df_set_bb_dirty (new_bb);
2001 if (old_bb)
2003 if (dump_file)
2004 fprintf (dump_file, " from %d to %d\n",
2005 old_bb->index, new_bb->index);
2006 df_set_bb_dirty (old_bb);
2008 else
2009 if (dump_file)
2010 fprintf (dump_file, " to %d\n", new_bb->index);
2014 /* Helper function for df_ref_change_reg_with_loc. */
2016 static void
2017 df_ref_change_reg_with_loc_1 (struct df_reg_info *old_df,
2018 struct df_reg_info *new_df,
2019 int new_regno, rtx loc)
2021 df_ref the_ref = old_df->reg_chain;
2023 while (the_ref)
2025 if ((!DF_REF_IS_ARTIFICIAL (the_ref))
2026 && (DF_REF_LOC (the_ref))
2027 && (*DF_REF_LOC (the_ref) == loc))
2029 df_ref next_ref = DF_REF_NEXT_REG (the_ref);
2030 df_ref prev_ref = DF_REF_PREV_REG (the_ref);
2031 df_ref *ref_vec, *ref_vec_t;
2032 struct df_insn_info *insn_info = DF_REF_INSN_INFO (the_ref);
2033 unsigned int count = 0;
2035 DF_REF_REGNO (the_ref) = new_regno;
2036 DF_REF_REG (the_ref) = regno_reg_rtx[new_regno];
2038 /* Pull the_ref out of the old regno chain. */
2039 if (prev_ref)
2040 DF_REF_NEXT_REG (prev_ref) = next_ref;
2041 else
2042 old_df->reg_chain = next_ref;
2043 if (next_ref)
2044 DF_REF_PREV_REG (next_ref) = prev_ref;
2045 old_df->n_refs--;
2047 /* Put the ref into the new regno chain. */
2048 DF_REF_PREV_REG (the_ref) = NULL;
2049 DF_REF_NEXT_REG (the_ref) = new_df->reg_chain;
2050 if (new_df->reg_chain)
2051 DF_REF_PREV_REG (new_df->reg_chain) = the_ref;
2052 new_df->reg_chain = the_ref;
2053 new_df->n_refs++;
2054 if (DF_REF_BB (the_ref))
2055 df_set_bb_dirty (DF_REF_BB (the_ref));
2057 /* Need to sort the record again that the ref was in because
2058 the regno is a sorting key. First, find the right
2059 record. */
2060 if (DF_REF_FLAGS (the_ref) & DF_REF_IN_NOTE)
2061 ref_vec = insn_info->eq_uses;
2062 else
2063 ref_vec = insn_info->uses;
2064 if (dump_file)
2065 fprintf (dump_file, "changing reg in insn %d\n",
2066 DF_REF_INSN_UID (the_ref));
2068 ref_vec_t = ref_vec;
2070 /* Find the length. */
2071 while (*ref_vec_t)
2073 count++;
2074 ref_vec_t++;
2076 qsort (ref_vec, count, sizeof (df_ref ), df_ref_compare);
2078 the_ref = next_ref;
2080 else
2081 the_ref = DF_REF_NEXT_REG (the_ref);
2086 /* Change the regno of all refs that contained LOC from OLD_REGNO to
2087 NEW_REGNO. Refs that do not match LOC are not changed which means
2088 that artificial refs are not changed since they have no loc. This
2089 call is to support the SET_REGNO macro. */
2091 void
2092 df_ref_change_reg_with_loc (int old_regno, int new_regno, rtx loc)
2094 if ((!df) || (old_regno == -1) || (old_regno == new_regno))
2095 return;
2097 df_grow_reg_info ();
2099 df_ref_change_reg_with_loc_1 (DF_REG_DEF_GET (old_regno),
2100 DF_REG_DEF_GET (new_regno), new_regno, loc);
2101 df_ref_change_reg_with_loc_1 (DF_REG_USE_GET (old_regno),
2102 DF_REG_USE_GET (new_regno), new_regno, loc);
2103 df_ref_change_reg_with_loc_1 (DF_REG_EQ_USE_GET (old_regno),
2104 DF_REG_EQ_USE_GET (new_regno), new_regno, loc);
2108 /* Delete the mw_hardregs that point into the eq_notes. */
2110 static unsigned int
2111 df_mw_hardreg_chain_delete_eq_uses (struct df_insn_info *insn_info)
2113 struct df_mw_hardreg **mw_vec = insn_info->mw_hardregs;
2114 unsigned int deleted = 0;
2115 unsigned int count = 0;
2116 struct df_scan_problem_data *problem_data
2117 = (struct df_scan_problem_data *) df_scan->problem_data;
2119 if (!*mw_vec)
2120 return 0;
2122 while (*mw_vec)
2124 if ((*mw_vec)->flags & DF_REF_IN_NOTE)
2126 struct df_mw_hardreg **temp_vec = mw_vec;
2128 pool_free (problem_data->mw_reg_pool, *mw_vec);
2129 temp_vec = mw_vec;
2130 /* Shove the remaining ones down one to fill the gap. While
2131 this looks n**2, it is highly unusual to have any mw regs
2132 in eq_notes and the chances of more than one are almost
2133 non existent. */
2134 while (*temp_vec)
2136 *temp_vec = *(temp_vec + 1);
2137 temp_vec++;
2139 deleted++;
2141 else
2143 mw_vec++;
2144 count++;
2148 if (count == 0)
2150 df_scan_free_mws_vec (insn_info->mw_hardregs);
2151 insn_info->mw_hardregs = df_null_mw_rec;
2152 return 0;
2154 return deleted;
2158 /* Rescan only the REG_EQUIV/REG_EQUAL notes part of INSN. */
2160 void
2161 df_notes_rescan (rtx insn)
2163 struct df_insn_info *insn_info;
2164 unsigned int uid = INSN_UID (insn);
2166 if (!df)
2167 return;
2169 /* The client has disabled rescanning and plans to do it itself. */
2170 if (df->changeable_flags & DF_NO_INSN_RESCAN)
2171 return;
2173 /* Do nothing if the insn hasn't been emitted yet. */
2174 if (!BLOCK_FOR_INSN (insn))
2175 return;
2177 df_grow_bb_info (df_scan);
2178 df_grow_reg_info ();
2180 insn_info = DF_INSN_UID_SAFE_GET (INSN_UID(insn));
2182 /* The client has deferred rescanning. */
2183 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
2185 if (!insn_info)
2187 insn_info = df_insn_create_insn_record (insn);
2188 insn_info->defs = df_null_ref_rec;
2189 insn_info->uses = df_null_ref_rec;
2190 insn_info->eq_uses = df_null_ref_rec;
2191 insn_info->mw_hardregs = df_null_mw_rec;
2194 bitmap_clear_bit (df->insns_to_delete, uid);
2195 /* If the insn is set to be rescanned, it does not need to also
2196 be notes rescanned. */
2197 if (!bitmap_bit_p (df->insns_to_rescan, uid))
2198 bitmap_set_bit (df->insns_to_notes_rescan, INSN_UID (insn));
2199 return;
2202 bitmap_clear_bit (df->insns_to_delete, uid);
2203 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
2205 if (insn_info)
2207 basic_block bb = BLOCK_FOR_INSN (insn);
2208 rtx note;
2209 struct df_collection_rec collection_rec;
2210 unsigned int num_deleted;
2211 unsigned int mw_len;
2213 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
2214 collection_rec.eq_use_vec = VEC_alloc (df_ref, stack, 32);
2215 collection_rec.mw_vec = VEC_alloc (df_mw_hardreg_ptr, stack, 32);
2217 num_deleted = df_mw_hardreg_chain_delete_eq_uses (insn_info);
2218 df_ref_chain_delete (insn_info->eq_uses);
2219 insn_info->eq_uses = NULL;
2221 /* Process REG_EQUIV/REG_EQUAL notes */
2222 for (note = REG_NOTES (insn); note;
2223 note = XEXP (note, 1))
2225 switch (REG_NOTE_KIND (note))
2227 case REG_EQUIV:
2228 case REG_EQUAL:
2229 df_uses_record (DF_REF_REGULAR, &collection_rec,
2230 &XEXP (note, 0), DF_REF_REG_USE,
2231 bb, insn_info, DF_REF_IN_NOTE, -1, -1, VOIDmode);
2232 default:
2233 break;
2237 /* Find some place to put any new mw_hardregs. */
2238 df_canonize_collection_rec (&collection_rec);
2239 mw_len = VEC_length (df_mw_hardreg_ptr, collection_rec.mw_vec);
2240 if (mw_len)
2242 unsigned int count = 0;
2243 struct df_mw_hardreg **mw_rec = insn_info->mw_hardregs;
2244 while (*mw_rec)
2246 count++;
2247 mw_rec++;
2250 if (count)
2252 /* Append to the end of the existing record after
2253 expanding it if necessary. */
2254 if (mw_len > num_deleted)
2256 insn_info->mw_hardregs =
2257 XRESIZEVEC (struct df_mw_hardreg *,
2258 insn_info->mw_hardregs,
2259 count + 1 + mw_len);
2261 memcpy (&insn_info->mw_hardregs[count],
2262 VEC_address (df_mw_hardreg_ptr, collection_rec.mw_vec),
2263 mw_len * sizeof (struct df_mw_hardreg *));
2264 insn_info->mw_hardregs[count + mw_len] = NULL;
2265 qsort (insn_info->mw_hardregs, count + mw_len,
2266 sizeof (struct df_mw_hardreg *), df_mw_compare);
2268 else
2270 /* No vector there. */
2271 insn_info->mw_hardregs
2272 = XNEWVEC (struct df_mw_hardreg*, 1 + mw_len);
2273 memcpy (insn_info->mw_hardregs,
2274 VEC_address (df_mw_hardreg_ptr, collection_rec.mw_vec),
2275 mw_len * sizeof (struct df_mw_hardreg *));
2276 insn_info->mw_hardregs[mw_len] = NULL;
2279 /* Get rid of the mw_rec so that df_refs_add_to_chains will
2280 ignore it. */
2281 VEC_free (df_mw_hardreg_ptr, stack, collection_rec.mw_vec);
2282 df_refs_add_to_chains (&collection_rec, bb, insn);
2283 VEC_free (df_ref, stack, collection_rec.eq_use_vec);
2285 else
2286 df_insn_rescan (insn);
2291 /*----------------------------------------------------------------------------
2292 Hard core instruction scanning code. No external interfaces here,
2293 just a lot of routines that look inside insns.
2294 ----------------------------------------------------------------------------*/
2297 /* Return true if the contents of two df_ref's are identical.
2298 It ignores DF_REF_MARKER. */
2300 static bool
2301 df_ref_equal_p (df_ref ref1, df_ref ref2)
2303 if (!ref2)
2304 return false;
2306 if (ref1 == ref2)
2307 return true;
2309 if (DF_REF_CLASS (ref1) != DF_REF_CLASS (ref2)
2310 || DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2)
2311 || DF_REF_REG (ref1) != DF_REF_REG (ref2)
2312 || DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2)
2313 || ((DF_REF_FLAGS (ref1) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG))
2314 != (DF_REF_FLAGS (ref2) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG)))
2315 || DF_REF_BB (ref1) != DF_REF_BB (ref2)
2316 || DF_REF_INSN_INFO (ref1) != DF_REF_INSN_INFO (ref2))
2317 return false;
2319 switch (DF_REF_CLASS (ref1))
2321 case DF_REF_ARTIFICIAL:
2322 case DF_REF_BASE:
2323 return true;
2325 case DF_REF_EXTRACT:
2326 if ((DF_REF_EXTRACT_OFFSET (ref1) != DF_REF_EXTRACT_OFFSET (ref2))
2327 || (DF_REF_EXTRACT_WIDTH (ref1) != DF_REF_EXTRACT_WIDTH (ref2))
2328 || (DF_REF_EXTRACT_MODE (ref1) != DF_REF_EXTRACT_MODE (ref2)))
2329 return false;
2330 /* fallthru. */
2332 case DF_REF_REGULAR:
2333 return DF_REF_LOC (ref1) == DF_REF_LOC (ref2);
2335 default:
2336 gcc_unreachable ();
2338 return false;
2342 /* Compare REF1 and REF2 for sorting. This is only called from places
2343 where all of the refs are of the same type, in the same insn, and
2344 have the same bb. So these fields are not checked. */
2346 static int
2347 df_ref_compare (const void *r1, const void *r2)
2349 const df_ref ref1 = *(const df_ref *)r1;
2350 const df_ref ref2 = *(const df_ref *)r2;
2352 if (ref1 == ref2)
2353 return 0;
2355 if (DF_REF_CLASS (ref1) != DF_REF_CLASS (ref2))
2356 return (int)DF_REF_CLASS (ref1) - (int)DF_REF_CLASS (ref2);
2358 if (DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2))
2359 return (int)DF_REF_REGNO (ref1) - (int)DF_REF_REGNO (ref2);
2361 if (DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2))
2362 return (int)DF_REF_TYPE (ref1) - (int)DF_REF_TYPE (ref2);
2364 if (DF_REF_REG (ref1) != DF_REF_REG (ref2))
2365 return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2367 /* Cannot look at the LOC field on artificial refs. */
2368 if (DF_REF_CLASS (ref1) != DF_REF_ARTIFICIAL
2369 && DF_REF_LOC (ref1) != DF_REF_LOC (ref2))
2370 return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2372 if (DF_REF_FLAGS (ref1) != DF_REF_FLAGS (ref2))
2374 /* If two refs are identical except that one of them has is from
2375 a mw and one is not, we need to have the one with the mw
2376 first. */
2377 if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG) ==
2378 DF_REF_FLAGS_IS_SET (ref2, DF_REF_MW_HARDREG))
2379 return DF_REF_FLAGS (ref1) - DF_REF_FLAGS (ref2);
2380 else if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG))
2381 return -1;
2382 else
2383 return 1;
2386 /* The classes are the same at this point so it is safe to only look
2387 at ref1. */
2388 if (DF_REF_CLASS (ref1) == DF_REF_EXTRACT)
2390 if (DF_REF_EXTRACT_OFFSET (ref1) != DF_REF_EXTRACT_OFFSET (ref2))
2391 return DF_REF_EXTRACT_OFFSET (ref1) - DF_REF_EXTRACT_OFFSET (ref2);
2392 if (DF_REF_EXTRACT_WIDTH (ref1) != DF_REF_EXTRACT_WIDTH (ref2))
2393 return DF_REF_EXTRACT_WIDTH (ref1) - DF_REF_EXTRACT_WIDTH (ref2);
2394 if (DF_REF_EXTRACT_MODE (ref1) != DF_REF_EXTRACT_MODE (ref2))
2395 return DF_REF_EXTRACT_MODE (ref1) - DF_REF_EXTRACT_MODE (ref2);
2397 return 0;
2400 static void
2401 df_swap_refs (VEC(df_ref,stack) **ref_vec, int i, int j)
2403 df_ref tmp = VEC_index (df_ref, *ref_vec, i);
2404 VEC_replace (df_ref, *ref_vec, i, VEC_index (df_ref, *ref_vec, j));
2405 VEC_replace (df_ref, *ref_vec, j, tmp);
2408 /* Sort and compress a set of refs. */
2410 static void
2411 df_sort_and_compress_refs (VEC(df_ref,stack) **ref_vec)
2413 unsigned int count;
2414 unsigned int i;
2415 unsigned int dist = 0;
2417 count = VEC_length (df_ref, *ref_vec);
2419 /* If there are 1 or 0 elements, there is nothing to do. */
2420 if (count < 2)
2421 return;
2422 else if (count == 2)
2424 df_ref r0 = VEC_index (df_ref, *ref_vec, 0);
2425 df_ref r1 = VEC_index (df_ref, *ref_vec, 1);
2426 if (df_ref_compare (&r0, &r1) > 0)
2427 df_swap_refs (ref_vec, 0, 1);
2429 else
2431 for (i = 0; i < count - 1; i++)
2433 df_ref r0 = VEC_index (df_ref, *ref_vec, i);
2434 df_ref r1 = VEC_index (df_ref, *ref_vec, i + 1);
2435 if (df_ref_compare (&r0, &r1) >= 0)
2436 break;
2438 /* If the array is already strictly ordered,
2439 which is the most common case for large COUNT case
2440 (which happens for CALL INSNs),
2441 no need to sort and filter out duplicate.
2442 Simply return the count.
2443 Make sure DF_GET_ADD_REFS adds refs in the increasing order
2444 of DF_REF_COMPARE. */
2445 if (i == count - 1)
2446 return;
2447 qsort (VEC_address (df_ref, *ref_vec), count, sizeof (df_ref),
2448 df_ref_compare);
2451 for (i=0; i<count-dist; i++)
2453 /* Find the next ref that is not equal to the current ref. */
2454 while (i + dist + 1 < count
2455 && df_ref_equal_p (VEC_index (df_ref, *ref_vec, i),
2456 VEC_index (df_ref, *ref_vec, i + dist + 1)))
2458 df_free_ref (VEC_index (df_ref, *ref_vec, i + dist + 1));
2459 dist++;
2461 /* Copy it down to the next position. */
2462 if (dist && i + dist + 1 < count)
2463 VEC_replace (df_ref, *ref_vec, i + 1,
2464 VEC_index (df_ref, *ref_vec, i + dist + 1));
2467 count -= dist;
2468 VEC_truncate (df_ref, *ref_vec, count);
2472 /* Return true if the contents of two df_ref's are identical.
2473 It ignores DF_REF_MARKER. */
2475 static bool
2476 df_mw_equal_p (struct df_mw_hardreg *mw1, struct df_mw_hardreg *mw2)
2478 if (!mw2)
2479 return false;
2480 return (mw1 == mw2) ||
2481 (mw1->mw_reg == mw2->mw_reg
2482 && mw1->type == mw2->type
2483 && mw1->flags == mw2->flags
2484 && mw1->start_regno == mw2->start_regno
2485 && mw1->end_regno == mw2->end_regno);
2489 /* Compare MW1 and MW2 for sorting. */
2491 static int
2492 df_mw_compare (const void *m1, const void *m2)
2494 const struct df_mw_hardreg *const mw1 = *(const struct df_mw_hardreg *const*)m1;
2495 const struct df_mw_hardreg *const mw2 = *(const struct df_mw_hardreg *const*)m2;
2497 if (mw1 == mw2)
2498 return 0;
2500 if (mw1->type != mw2->type)
2501 return mw1->type - mw2->type;
2503 if (mw1->flags != mw2->flags)
2504 return mw1->flags - mw2->flags;
2506 if (mw1->start_regno != mw2->start_regno)
2507 return mw1->start_regno - mw2->start_regno;
2509 if (mw1->end_regno != mw2->end_regno)
2510 return mw1->end_regno - mw2->end_regno;
2512 if (mw1->mw_reg != mw2->mw_reg)
2513 return mw1->mw_order - mw2->mw_order;
2515 return 0;
2519 /* Sort and compress a set of refs. */
2521 static void
2522 df_sort_and_compress_mws (VEC(df_mw_hardreg_ptr,stack) **mw_vec)
2524 unsigned int count;
2525 struct df_scan_problem_data *problem_data
2526 = (struct df_scan_problem_data *) df_scan->problem_data;
2527 unsigned int i;
2528 unsigned int dist = 0;
2530 count = VEC_length (df_mw_hardreg_ptr, *mw_vec);
2531 if (count < 2)
2532 return;
2533 else if (count == 2)
2535 struct df_mw_hardreg *m0 = VEC_index (df_mw_hardreg_ptr, *mw_vec, 0);
2536 struct df_mw_hardreg *m1 = VEC_index (df_mw_hardreg_ptr, *mw_vec, 1);
2537 if (df_mw_compare (&m0, &m1) > 0)
2539 struct df_mw_hardreg *tmp = VEC_index (df_mw_hardreg_ptr,
2540 *mw_vec, 0);
2541 VEC_replace (df_mw_hardreg_ptr, *mw_vec, 0,
2542 VEC_index (df_mw_hardreg_ptr, *mw_vec, 1));
2543 VEC_replace (df_mw_hardreg_ptr, *mw_vec, 1, tmp);
2546 else
2547 qsort (VEC_address (df_mw_hardreg_ptr, *mw_vec), count,
2548 sizeof (struct df_mw_hardreg *), df_mw_compare);
2550 for (i=0; i<count-dist; i++)
2552 /* Find the next ref that is not equal to the current ref. */
2553 while (i + dist + 1 < count
2554 && df_mw_equal_p (VEC_index (df_mw_hardreg_ptr, *mw_vec, i),
2555 VEC_index (df_mw_hardreg_ptr, *mw_vec,
2556 i + dist + 1)))
2558 pool_free (problem_data->mw_reg_pool,
2559 VEC_index (df_mw_hardreg_ptr, *mw_vec, i + dist + 1));
2560 dist++;
2562 /* Copy it down to the next position. */
2563 if (dist && i + dist + 1 < count)
2564 VEC_replace (df_mw_hardreg_ptr, *mw_vec, i + 1,
2565 VEC_index (df_mw_hardreg_ptr, *mw_vec, i + dist + 1));
2568 count -= dist;
2569 VEC_truncate (df_mw_hardreg_ptr, *mw_vec, count);
2573 /* Sort and remove duplicates from the COLLECTION_REC. */
2575 static void
2576 df_canonize_collection_rec (struct df_collection_rec *collection_rec)
2578 df_sort_and_compress_refs (&collection_rec->def_vec);
2579 df_sort_and_compress_refs (&collection_rec->use_vec);
2580 df_sort_and_compress_refs (&collection_rec->eq_use_vec);
2581 df_sort_and_compress_mws (&collection_rec->mw_vec);
2585 /* Add the new df_ref to appropriate reg_info/ref_info chains. */
2587 static void
2588 df_install_ref (df_ref this_ref,
2589 struct df_reg_info *reg_info,
2590 struct df_ref_info *ref_info,
2591 bool add_to_table)
2593 unsigned int regno = DF_REF_REGNO (this_ref);
2594 /* Add the ref to the reg_{def,use,eq_use} chain. */
2595 df_ref head = reg_info->reg_chain;
2597 reg_info->reg_chain = this_ref;
2598 reg_info->n_refs++;
2600 if (DF_REF_FLAGS_IS_SET (this_ref, DF_HARD_REG_LIVE))
2602 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
2603 df->hard_regs_live_count[regno]++;
2606 gcc_assert (DF_REF_NEXT_REG (this_ref) == NULL);
2607 gcc_assert (DF_REF_PREV_REG (this_ref) == NULL);
2609 DF_REF_NEXT_REG (this_ref) = head;
2611 /* We cannot actually link to the head of the chain. */
2612 DF_REF_PREV_REG (this_ref) = NULL;
2614 if (head)
2615 DF_REF_PREV_REG (head) = this_ref;
2617 if (add_to_table)
2619 gcc_assert (ref_info->ref_order != DF_REF_ORDER_NO_TABLE);
2620 df_check_and_grow_ref_info (ref_info, 1);
2621 DF_REF_ID (this_ref) = ref_info->table_size;
2622 /* Add the ref to the big array of defs. */
2623 ref_info->refs[ref_info->table_size] = this_ref;
2624 ref_info->table_size++;
2626 else
2627 DF_REF_ID (this_ref) = -1;
2629 ref_info->total_size++;
2633 /* This function takes one of the groups of refs (defs, uses or
2634 eq_uses) and installs the entire group into the insn. It also adds
2635 each of these refs into the appropriate chains. */
2637 static df_ref *
2638 df_install_refs (basic_block bb,
2639 VEC(df_ref,stack)* old_vec,
2640 struct df_reg_info **reg_info,
2641 struct df_ref_info *ref_info,
2642 bool is_notes)
2644 unsigned int count;
2646 count = VEC_length (df_ref, old_vec);
2647 if (count)
2649 df_ref *new_vec = XNEWVEC (df_ref, count + 1);
2650 bool add_to_table;
2651 df_ref this_ref;
2652 unsigned int ix;
2654 switch (ref_info->ref_order)
2656 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
2657 case DF_REF_ORDER_BY_REG_WITH_NOTES:
2658 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
2659 ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
2660 add_to_table = true;
2661 break;
2662 case DF_REF_ORDER_UNORDERED:
2663 case DF_REF_ORDER_BY_REG:
2664 case DF_REF_ORDER_BY_INSN:
2665 ref_info->ref_order = DF_REF_ORDER_UNORDERED;
2666 add_to_table = !is_notes;
2667 break;
2668 default:
2669 add_to_table = false;
2670 break;
2673 /* Do not add if ref is not in the right blocks. */
2674 if (add_to_table && df->analyze_subset)
2675 add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
2677 for (ix = 0; VEC_iterate (df_ref, old_vec, ix, this_ref); ++ix)
2679 new_vec[ix] = this_ref;
2680 df_install_ref (this_ref, reg_info[DF_REF_REGNO (this_ref)],
2681 ref_info, add_to_table);
2684 new_vec[count] = NULL;
2685 return new_vec;
2687 else
2688 return df_null_ref_rec;
2692 /* This function takes the mws installs the entire group into the
2693 insn. */
2695 static struct df_mw_hardreg **
2696 df_install_mws (VEC(df_mw_hardreg_ptr,stack) *old_vec)
2698 unsigned int count;
2700 count = VEC_length (df_mw_hardreg_ptr, old_vec);
2701 if (count)
2703 struct df_mw_hardreg **new_vec
2704 = XNEWVEC (struct df_mw_hardreg*, count + 1);
2705 memcpy (new_vec, VEC_address (df_mw_hardreg_ptr, old_vec),
2706 sizeof (struct df_mw_hardreg*) * count);
2707 new_vec[count] = NULL;
2708 return new_vec;
2710 else
2711 return df_null_mw_rec;
2715 /* Add a chain of df_refs to appropriate ref chain/reg_info/ref_info
2716 chains and update other necessary information. */
2718 static void
2719 df_refs_add_to_chains (struct df_collection_rec *collection_rec,
2720 basic_block bb, rtx insn)
2722 if (insn)
2724 struct df_insn_info *insn_rec = DF_INSN_INFO_GET (insn);
2725 /* If there is a vector in the collection rec, add it to the
2726 insn. A null rec is a signal that the caller will handle the
2727 chain specially. */
2728 if (collection_rec->def_vec)
2730 df_scan_free_ref_vec (insn_rec->defs);
2731 insn_rec->defs
2732 = df_install_refs (bb, collection_rec->def_vec,
2733 df->def_regs,
2734 &df->def_info, false);
2736 if (collection_rec->use_vec)
2738 df_scan_free_ref_vec (insn_rec->uses);
2739 insn_rec->uses
2740 = df_install_refs (bb, collection_rec->use_vec,
2741 df->use_regs,
2742 &df->use_info, false);
2744 if (collection_rec->eq_use_vec)
2746 df_scan_free_ref_vec (insn_rec->eq_uses);
2747 insn_rec->eq_uses
2748 = df_install_refs (bb, collection_rec->eq_use_vec,
2749 df->eq_use_regs,
2750 &df->use_info, true);
2752 if (collection_rec->mw_vec)
2754 df_scan_free_mws_vec (insn_rec->mw_hardregs);
2755 insn_rec->mw_hardregs
2756 = df_install_mws (collection_rec->mw_vec);
2759 else
2761 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
2763 df_scan_free_ref_vec (bb_info->artificial_defs);
2764 bb_info->artificial_defs
2765 = df_install_refs (bb, collection_rec->def_vec,
2766 df->def_regs,
2767 &df->def_info, false);
2768 df_scan_free_ref_vec (bb_info->artificial_uses);
2769 bb_info->artificial_uses
2770 = df_install_refs (bb, collection_rec->use_vec,
2771 df->use_regs,
2772 &df->use_info, false);
2777 /* Allocate a ref and initialize its fields.
2779 If the REF_FLAGS field contain DF_REF_SIGN_EXTRACT or
2780 DF_REF_ZERO_EXTRACT. WIDTH, OFFSET and MODE are used to access the fields
2781 if they were constants. Otherwise they should be -1 if those flags
2782 were set. */
2784 static df_ref
2785 df_ref_create_structure (enum df_ref_class cl,
2786 struct df_collection_rec *collection_rec,
2787 rtx reg, rtx *loc,
2788 basic_block bb, struct df_insn_info *info,
2789 enum df_ref_type ref_type,
2790 int ref_flags,
2791 int width, int offset, enum machine_mode mode)
2793 df_ref this_ref = NULL;
2794 int regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2795 struct df_scan_problem_data *problem_data
2796 = (struct df_scan_problem_data *) df_scan->problem_data;
2798 switch (cl)
2800 case DF_REF_BASE:
2801 this_ref = (df_ref) pool_alloc (problem_data->ref_base_pool);
2802 gcc_assert (loc == NULL);
2803 break;
2805 case DF_REF_ARTIFICIAL:
2806 this_ref = (df_ref) pool_alloc (problem_data->ref_artificial_pool);
2807 this_ref->artificial_ref.bb = bb;
2808 gcc_assert (loc == NULL);
2809 break;
2811 case DF_REF_REGULAR:
2812 this_ref = (df_ref) pool_alloc (problem_data->ref_regular_pool);
2813 this_ref->regular_ref.loc = loc;
2814 gcc_assert (loc);
2815 break;
2817 case DF_REF_EXTRACT:
2818 this_ref = (df_ref) pool_alloc (problem_data->ref_extract_pool);
2819 DF_REF_EXTRACT_WIDTH (this_ref) = width;
2820 DF_REF_EXTRACT_OFFSET (this_ref) = offset;
2821 DF_REF_EXTRACT_MODE (this_ref) = mode;
2822 this_ref->regular_ref.loc = loc;
2823 gcc_assert (loc);
2824 break;
2827 DF_REF_CLASS (this_ref) = cl;
2828 DF_REF_ID (this_ref) = -1;
2829 DF_REF_REG (this_ref) = reg;
2830 DF_REF_REGNO (this_ref) = regno;
2831 DF_REF_TYPE (this_ref) = ref_type;
2832 DF_REF_INSN_INFO (this_ref) = info;
2833 DF_REF_CHAIN (this_ref) = NULL;
2834 DF_REF_FLAGS (this_ref) = ref_flags;
2835 DF_REF_NEXT_REG (this_ref) = NULL;
2836 DF_REF_PREV_REG (this_ref) = NULL;
2837 DF_REF_ORDER (this_ref) = df->ref_order++;
2839 /* We need to clear this bit because fwprop, and in the future
2840 possibly other optimizations sometimes create new refs using ond
2841 refs as the model. */
2842 DF_REF_FLAGS_CLEAR (this_ref, DF_HARD_REG_LIVE);
2844 /* See if this ref needs to have DF_HARD_REG_LIVE bit set. */
2845 if ((regno < FIRST_PSEUDO_REGISTER)
2846 && (!DF_REF_IS_ARTIFICIAL (this_ref)))
2848 if (DF_REF_REG_DEF_P (this_ref))
2850 if (!DF_REF_FLAGS_IS_SET (this_ref, DF_REF_MAY_CLOBBER))
2851 DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2853 else if (!(TEST_HARD_REG_BIT (elim_reg_set, regno)
2854 && (regno == FRAME_POINTER_REGNUM
2855 || regno == ARG_POINTER_REGNUM)))
2856 DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2859 if (collection_rec)
2861 if (DF_REF_REG_DEF_P (this_ref))
2862 VEC_safe_push (df_ref, stack, collection_rec->def_vec, this_ref);
2863 else if (DF_REF_FLAGS (this_ref) & DF_REF_IN_NOTE)
2864 VEC_safe_push (df_ref, stack, collection_rec->eq_use_vec, this_ref);
2865 else
2866 VEC_safe_push (df_ref, stack, collection_rec->use_vec, this_ref);
2869 return this_ref;
2873 /* Create new references of type DF_REF_TYPE for each part of register REG
2874 at address LOC within INSN of BB.
2876 If the REF_FLAGS field contain DF_REF_SIGN_EXTRACT or
2877 DF_REF_ZERO_EXTRACT. WIDTH, OFFSET and MODE are used to access the
2878 fields if they were constants. Otherwise they should be -1 if
2879 those flags were set. */
2882 static void
2883 df_ref_record (enum df_ref_class cl,
2884 struct df_collection_rec *collection_rec,
2885 rtx reg, rtx *loc,
2886 basic_block bb, struct df_insn_info *insn_info,
2887 enum df_ref_type ref_type,
2888 int ref_flags,
2889 int width, int offset, enum machine_mode mode)
2891 unsigned int regno;
2893 gcc_assert (REG_P (reg) || GET_CODE (reg) == SUBREG);
2895 regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2896 if (regno < FIRST_PSEUDO_REGISTER)
2898 struct df_mw_hardreg *hardreg = NULL;
2899 struct df_scan_problem_data *problem_data
2900 = (struct df_scan_problem_data *) df_scan->problem_data;
2901 unsigned int i;
2902 unsigned int endregno;
2903 df_ref ref;
2905 if (GET_CODE (reg) == SUBREG)
2907 regno += subreg_regno_offset (regno, GET_MODE (SUBREG_REG (reg)),
2908 SUBREG_BYTE (reg), GET_MODE (reg));
2909 endregno = regno + subreg_nregs (reg);
2911 else
2912 endregno = END_HARD_REGNO (reg);
2914 /* If this is a multiword hardreg, we create some extra
2915 datastructures that will enable us to easily build REG_DEAD
2916 and REG_UNUSED notes. */
2917 if ((endregno != regno + 1) && insn_info)
2919 /* Sets to a subreg of a multiword register are partial.
2920 Sets to a non-subreg of a multiword register are not. */
2921 if (GET_CODE (reg) == SUBREG)
2922 ref_flags |= DF_REF_PARTIAL;
2923 ref_flags |= DF_REF_MW_HARDREG;
2925 hardreg = (struct df_mw_hardreg *) pool_alloc (problem_data->mw_reg_pool);
2926 hardreg->type = ref_type;
2927 hardreg->flags = ref_flags;
2928 hardreg->mw_reg = reg;
2929 hardreg->start_regno = regno;
2930 hardreg->end_regno = endregno - 1;
2931 hardreg->mw_order = df->ref_order++;
2932 VEC_safe_push (df_mw_hardreg_ptr, stack, collection_rec->mw_vec,
2933 hardreg);
2936 for (i = regno; i < endregno; i++)
2938 ref = df_ref_create_structure (cl, collection_rec, regno_reg_rtx[i], loc,
2939 bb, insn_info, ref_type, ref_flags,
2940 width, offset, mode);
2942 gcc_assert (ORIGINAL_REGNO (DF_REF_REG (ref)) == i);
2945 else
2947 df_ref_create_structure (cl, collection_rec, reg, loc, bb, insn_info,
2948 ref_type, ref_flags, width, offset, mode);
2953 /* A set to a non-paradoxical SUBREG for which the number of word_mode units
2954 covered by the outer mode is smaller than that covered by the inner mode,
2955 is a read-modify-write operation.
2956 This function returns true iff the SUBREG X is such a SUBREG. */
2958 bool
2959 df_read_modify_subreg_p (rtx x)
2961 unsigned int isize, osize;
2962 if (GET_CODE (x) != SUBREG)
2963 return false;
2964 isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
2965 osize = GET_MODE_SIZE (GET_MODE (x));
2966 return isize > osize
2967 && isize > REGMODE_NATURAL_SIZE (GET_MODE (SUBREG_REG (x)));
2971 /* Process all the registers defined in the rtx, X.
2972 Autoincrement/decrement definitions will be picked up by
2973 df_uses_record. */
2975 static void
2976 df_def_record_1 (struct df_collection_rec *collection_rec,
2977 rtx x, basic_block bb, struct df_insn_info *insn_info,
2978 int flags)
2980 rtx *loc;
2981 rtx dst;
2982 int offset = -1;
2983 int width = -1;
2984 enum machine_mode mode = VOIDmode;
2985 enum df_ref_class cl = DF_REF_REGULAR;
2987 /* We may recursively call ourselves on EXPR_LIST when dealing with PARALLEL
2988 construct. */
2989 if (GET_CODE (x) == EXPR_LIST || GET_CODE (x) == CLOBBER)
2990 loc = &XEXP (x, 0);
2991 else
2992 loc = &SET_DEST (x);
2993 dst = *loc;
2995 /* It is legal to have a set destination be a parallel. */
2996 if (GET_CODE (dst) == PARALLEL)
2998 int i;
3000 for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
3002 rtx temp = XVECEXP (dst, 0, i);
3003 if (GET_CODE (temp) == EXPR_LIST || GET_CODE (temp) == CLOBBER
3004 || GET_CODE (temp) == SET)
3005 df_def_record_1 (collection_rec,
3006 temp, bb, insn_info,
3007 GET_CODE (temp) == CLOBBER
3008 ? flags | DF_REF_MUST_CLOBBER : flags);
3010 return;
3013 if (GET_CODE (dst) == STRICT_LOW_PART)
3015 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL | DF_REF_STRICT_LOW_PART;
3017 loc = &XEXP (dst, 0);
3018 dst = *loc;
3021 if (GET_CODE (dst) == ZERO_EXTRACT)
3023 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL | DF_REF_ZERO_EXTRACT;
3025 if (CONST_INT_P (XEXP (dst, 1))
3026 && CONST_INT_P (XEXP (dst, 2)))
3028 width = INTVAL (XEXP (dst, 1));
3029 offset = INTVAL (XEXP (dst, 2));
3030 mode = GET_MODE (dst);
3031 cl = DF_REF_EXTRACT;
3034 loc = &XEXP (dst, 0);
3035 dst = *loc;
3038 /* At this point if we do not have a reg or a subreg, just return. */
3039 if (REG_P (dst))
3041 df_ref_record (cl, collection_rec,
3042 dst, loc, bb, insn_info, DF_REF_REG_DEF, flags,
3043 width, offset, mode);
3045 /* We want to keep sp alive everywhere - by making all
3046 writes to sp also use of sp. */
3047 if (REGNO (dst) == STACK_POINTER_REGNUM)
3048 df_ref_record (DF_REF_BASE, collection_rec,
3049 dst, NULL, bb, insn_info, DF_REF_REG_USE, flags,
3050 width, offset, mode);
3052 else if (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst)))
3054 if (df_read_modify_subreg_p (dst))
3055 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL;
3057 flags |= DF_REF_SUBREG;
3059 df_ref_record (cl, collection_rec,
3060 dst, loc, bb, insn_info, DF_REF_REG_DEF, flags,
3061 width, offset, mode);
3066 /* Process all the registers defined in the pattern rtx, X. */
3068 static void
3069 df_defs_record (struct df_collection_rec *collection_rec,
3070 rtx x, basic_block bb, struct df_insn_info *insn_info,
3071 int flags)
3073 RTX_CODE code = GET_CODE (x);
3075 if (code == SET || code == CLOBBER)
3077 /* Mark the single def within the pattern. */
3078 int clobber_flags = flags;
3079 clobber_flags |= (code == CLOBBER) ? DF_REF_MUST_CLOBBER : 0;
3080 df_def_record_1 (collection_rec, x, bb, insn_info, clobber_flags);
3082 else if (code == COND_EXEC)
3084 df_defs_record (collection_rec, COND_EXEC_CODE (x),
3085 bb, insn_info, DF_REF_CONDITIONAL);
3087 else if (code == PARALLEL)
3089 int i;
3091 /* Mark the multiple defs within the pattern. */
3092 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3093 df_defs_record (collection_rec, XVECEXP (x, 0, i), bb, insn_info, flags);
3098 /* Process all the registers used in the rtx at address LOC.
3100 If the REF_FLAGS field contain DF_REF_SIGN_EXTRACT or
3101 DF_REF_ZERO_EXTRACT. WIDTH, OFFSET and MODE are used to access the
3102 fields if they were constants. Otherwise they should be -1 if
3103 those flags were set. */
3105 static void
3106 df_uses_record (enum df_ref_class cl, struct df_collection_rec *collection_rec,
3107 rtx *loc, enum df_ref_type ref_type,
3108 basic_block bb, struct df_insn_info *insn_info,
3109 int flags,
3110 int width, int offset, enum machine_mode mode)
3112 RTX_CODE code;
3113 rtx x;
3115 retry:
3116 x = *loc;
3117 if (!x)
3118 return;
3119 code = GET_CODE (x);
3120 switch (code)
3122 case LABEL_REF:
3123 case SYMBOL_REF:
3124 case CONST_INT:
3125 case CONST:
3126 case CONST_DOUBLE:
3127 case CONST_FIXED:
3128 case CONST_VECTOR:
3129 case PC:
3130 case CC0:
3131 case ADDR_VEC:
3132 case ADDR_DIFF_VEC:
3133 return;
3135 case CLOBBER:
3136 /* If we are clobbering a MEM, mark any registers inside the address
3137 as being used. */
3138 if (MEM_P (XEXP (x, 0)))
3139 df_uses_record (cl, collection_rec,
3140 &XEXP (XEXP (x, 0), 0),
3141 DF_REF_REG_MEM_STORE,
3142 bb, insn_info,
3143 flags, width, offset, mode);
3145 /* If we're clobbering a REG then we have a def so ignore. */
3146 return;
3148 case MEM:
3149 df_uses_record (cl, collection_rec,
3150 &XEXP (x, 0), DF_REF_REG_MEM_LOAD,
3151 bb, insn_info, flags & DF_REF_IN_NOTE,
3152 width, offset, mode);
3153 return;
3155 case SUBREG:
3156 /* While we're here, optimize this case. */
3157 flags |= DF_REF_PARTIAL;
3158 /* In case the SUBREG is not of a REG, do not optimize. */
3159 if (!REG_P (SUBREG_REG (x)))
3161 loc = &SUBREG_REG (x);
3162 df_uses_record (cl, collection_rec, loc, ref_type, bb, insn_info, flags,
3163 width, offset, mode);
3164 return;
3166 /* ... Fall through ... */
3168 case REG:
3169 df_ref_record (cl, collection_rec,
3170 x, loc, bb, insn_info,
3171 ref_type, flags,
3172 width, offset, mode);
3173 return;
3175 case SIGN_EXTRACT:
3176 case ZERO_EXTRACT:
3178 /* If the parameters to the zero or sign extract are
3179 constants, strip them off and recurse, otherwise there is
3180 no information that we can gain from this operation. */
3181 if (CONST_INT_P (XEXP (x, 1))
3182 && CONST_INT_P (XEXP (x, 2)))
3184 width = INTVAL (XEXP (x, 1));
3185 offset = INTVAL (XEXP (x, 2));
3186 mode = GET_MODE (x);
3188 if (code == ZERO_EXTRACT)
3189 flags |= DF_REF_ZERO_EXTRACT;
3190 else
3191 flags |= DF_REF_SIGN_EXTRACT;
3193 df_uses_record (DF_REF_EXTRACT, collection_rec,
3194 &XEXP (x, 0), ref_type, bb, insn_info, flags,
3195 width, offset, mode);
3196 return;
3199 break;
3201 case SET:
3203 rtx dst = SET_DEST (x);
3204 gcc_assert (!(flags & DF_REF_IN_NOTE));
3205 df_uses_record (cl, collection_rec,
3206 &SET_SRC (x), DF_REF_REG_USE, bb, insn_info, flags,
3207 width, offset, mode);
3209 switch (GET_CODE (dst))
3211 case SUBREG:
3212 if (df_read_modify_subreg_p (dst))
3214 df_uses_record (cl, collection_rec, &SUBREG_REG (dst),
3215 DF_REF_REG_USE, bb, insn_info,
3216 flags | DF_REF_READ_WRITE | DF_REF_SUBREG,
3217 width, offset, mode);
3218 break;
3220 /* Fall through. */
3221 case REG:
3222 case PARALLEL:
3223 case SCRATCH:
3224 case PC:
3225 case CC0:
3226 break;
3227 case MEM:
3228 df_uses_record (cl, collection_rec, &XEXP (dst, 0),
3229 DF_REF_REG_MEM_STORE, bb, insn_info, flags,
3230 width, offset, mode);
3231 break;
3232 case STRICT_LOW_PART:
3234 rtx *temp = &XEXP (dst, 0);
3235 /* A strict_low_part uses the whole REG and not just the
3236 SUBREG. */
3237 dst = XEXP (dst, 0);
3238 df_uses_record (cl, collection_rec,
3239 (GET_CODE (dst) == SUBREG) ? &SUBREG_REG (dst) : temp,
3240 DF_REF_REG_USE, bb, insn_info,
3241 DF_REF_READ_WRITE | DF_REF_STRICT_LOW_PART,
3242 width, offset, mode);
3244 break;
3245 case ZERO_EXTRACT:
3247 if (CONST_INT_P (XEXP (dst, 1))
3248 && CONST_INT_P (XEXP (dst, 2)))
3250 width = INTVAL (XEXP (dst, 1));
3251 offset = INTVAL (XEXP (dst, 2));
3252 mode = GET_MODE (dst);
3253 if (GET_CODE (XEXP (dst,0)) == MEM)
3255 /* Handle the case of zero_extract(mem(...)) in the set dest.
3256 This special case is allowed only if the mem is a single byte and
3257 is useful to set a bitfield in memory. */
3258 df_uses_record (DF_REF_EXTRACT, collection_rec, &XEXP (XEXP (dst,0), 0),
3259 DF_REF_REG_MEM_STORE, bb, insn_info,
3260 DF_REF_ZERO_EXTRACT,
3261 width, offset, mode);
3263 else
3265 df_uses_record (DF_REF_EXTRACT, collection_rec, &XEXP (dst, 0),
3266 DF_REF_REG_USE, bb, insn_info,
3267 DF_REF_READ_WRITE | DF_REF_ZERO_EXTRACT,
3268 width, offset, mode);
3271 else
3273 df_uses_record (cl, collection_rec, &XEXP (dst, 1),
3274 DF_REF_REG_USE, bb, insn_info, flags,
3275 width, offset, mode);
3276 df_uses_record (cl, collection_rec, &XEXP (dst, 2),
3277 DF_REF_REG_USE, bb, insn_info, flags,
3278 width, offset, mode);
3279 df_uses_record (cl, collection_rec, &XEXP (dst, 0),
3280 DF_REF_REG_USE, bb, insn_info,
3281 DF_REF_READ_WRITE | DF_REF_ZERO_EXTRACT,
3282 width, offset, mode);
3286 break;
3288 default:
3289 gcc_unreachable ();
3291 return;
3294 case RETURN:
3295 break;
3297 case ASM_OPERANDS:
3298 case UNSPEC_VOLATILE:
3299 case TRAP_IF:
3300 case ASM_INPUT:
3302 /* Traditional and volatile asm instructions must be
3303 considered to use and clobber all hard registers, all
3304 pseudo-registers and all of memory. So must TRAP_IF and
3305 UNSPEC_VOLATILE operations.
3307 Consider for instance a volatile asm that changes the fpu
3308 rounding mode. An insn should not be moved across this
3309 even if it only uses pseudo-regs because it might give an
3310 incorrectly rounded result.
3312 However, flow.c's liveness computation did *not* do this,
3313 giving the reasoning as " ?!? Unfortunately, marking all
3314 hard registers as live causes massive problems for the
3315 register allocator and marking all pseudos as live creates
3316 mountains of uninitialized variable warnings."
3318 In order to maintain the status quo with regard to liveness
3319 and uses, we do what flow.c did and just mark any regs we
3320 can find in ASM_OPERANDS as used. In global asm insns are
3321 scanned and regs_asm_clobbered is filled out.
3323 For all ASM_OPERANDS, we must traverse the vector of input
3324 operands. We can not just fall through here since then we
3325 would be confused by the ASM_INPUT rtx inside ASM_OPERANDS,
3326 which do not indicate traditional asms unlike their normal
3327 usage. */
3328 if (code == ASM_OPERANDS)
3330 int j;
3332 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
3333 df_uses_record (cl, collection_rec, &ASM_OPERANDS_INPUT (x, j),
3334 DF_REF_REG_USE, bb, insn_info, flags,
3335 width, offset, mode);
3336 return;
3338 break;
3341 case VAR_LOCATION:
3342 df_uses_record (cl, collection_rec,
3343 &PAT_VAR_LOCATION_LOC (x),
3344 DF_REF_REG_USE, bb, insn_info,
3345 flags, width, offset, mode);
3346 return;
3348 case PRE_DEC:
3349 case POST_DEC:
3350 case PRE_INC:
3351 case POST_INC:
3352 case PRE_MODIFY:
3353 case POST_MODIFY:
3354 gcc_assert (!DEBUG_INSN_P (insn_info->insn));
3355 /* Catch the def of the register being modified. */
3356 df_ref_record (cl, collection_rec, XEXP (x, 0), &XEXP (x, 0),
3357 bb, insn_info,
3358 DF_REF_REG_DEF,
3359 flags | DF_REF_READ_WRITE | DF_REF_PRE_POST_MODIFY,
3360 width, offset, mode);
3362 /* ... Fall through to handle uses ... */
3364 default:
3365 break;
3368 /* Recursively scan the operands of this expression. */
3370 const char *fmt = GET_RTX_FORMAT (code);
3371 int i;
3373 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3375 if (fmt[i] == 'e')
3377 /* Tail recursive case: save a function call level. */
3378 if (i == 0)
3380 loc = &XEXP (x, 0);
3381 goto retry;
3383 df_uses_record (cl, collection_rec, &XEXP (x, i), ref_type,
3384 bb, insn_info, flags,
3385 width, offset, mode);
3387 else if (fmt[i] == 'E')
3389 int j;
3390 for (j = 0; j < XVECLEN (x, i); j++)
3391 df_uses_record (cl, collection_rec,
3392 &XVECEXP (x, i, j), ref_type,
3393 bb, insn_info, flags,
3394 width, offset, mode);
3399 return;
3403 /* For all DF_REF_CONDITIONAL defs, add a corresponding uses. */
3405 static void
3406 df_get_conditional_uses (struct df_collection_rec *collection_rec)
3408 unsigned int ix;
3409 df_ref ref;
3411 for (ix = 0; VEC_iterate (df_ref, collection_rec->def_vec, ix, ref); ++ix)
3413 if (DF_REF_FLAGS_IS_SET (ref, DF_REF_CONDITIONAL))
3415 int width = -1;
3416 int offset = -1;
3417 enum machine_mode mode = VOIDmode;
3418 df_ref use;
3420 if (DF_REF_FLAGS_IS_SET (ref, DF_REF_SIGN_EXTRACT | DF_REF_ZERO_EXTRACT))
3422 width = DF_REF_EXTRACT_WIDTH (ref);
3423 offset = DF_REF_EXTRACT_OFFSET (ref);
3424 mode = DF_REF_EXTRACT_MODE (ref);
3427 use = df_ref_create_structure (DF_REF_CLASS (ref), collection_rec, DF_REF_REG (ref),
3428 DF_REF_LOC (ref), DF_REF_BB (ref),
3429 DF_REF_INSN_INFO (ref), DF_REF_REG_USE,
3430 DF_REF_FLAGS (ref) & ~DF_REF_CONDITIONAL,
3431 width, offset, mode);
3432 DF_REF_REGNO (use) = DF_REF_REGNO (ref);
3438 /* Get call's extra defs and uses. */
3440 static void
3441 df_get_call_refs (struct df_collection_rec * collection_rec,
3442 basic_block bb,
3443 struct df_insn_info *insn_info,
3444 int flags)
3446 rtx note;
3447 bitmap_iterator bi;
3448 unsigned int ui;
3449 bool is_sibling_call;
3450 unsigned int i;
3451 df_ref def;
3452 bitmap defs_generated = BITMAP_ALLOC (&df_bitmap_obstack);
3454 /* Do not generate clobbers for registers that are the result of the
3455 call. This causes ordering problems in the chain building code
3456 depending on which def is seen first. */
3457 for (i = 0; VEC_iterate (df_ref, collection_rec->def_vec, i, def); ++i)
3458 bitmap_set_bit (defs_generated, DF_REF_REGNO (def));
3460 /* Record the registers used to pass arguments, and explicitly
3461 noted as clobbered. */
3462 for (note = CALL_INSN_FUNCTION_USAGE (insn_info->insn); note;
3463 note = XEXP (note, 1))
3465 if (GET_CODE (XEXP (note, 0)) == USE)
3466 df_uses_record (DF_REF_REGULAR, collection_rec, &XEXP (XEXP (note, 0), 0),
3467 DF_REF_REG_USE, bb, insn_info, flags, -1, -1,
3468 VOIDmode);
3469 else if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3471 if (REG_P (XEXP (XEXP (note, 0), 0)))
3473 unsigned int regno = REGNO (XEXP (XEXP (note, 0), 0));
3474 if (!bitmap_bit_p (defs_generated, regno))
3475 df_defs_record (collection_rec, XEXP (note, 0), bb,
3476 insn_info, flags);
3478 else
3479 df_uses_record (DF_REF_REGULAR, collection_rec, &XEXP (note, 0),
3480 DF_REF_REG_USE, bb, insn_info, flags, -1, -1,
3481 VOIDmode);
3485 /* The stack ptr is used (honorarily) by a CALL insn. */
3486 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[STACK_POINTER_REGNUM],
3487 NULL, bb, insn_info, DF_REF_REG_USE,
3488 DF_REF_CALL_STACK_USAGE | flags,
3489 -1, -1, VOIDmode);
3491 /* Calls may also reference any of the global registers,
3492 so they are recorded as used. */
3493 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3494 if (global_regs[i])
3496 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3497 NULL, bb, insn_info, DF_REF_REG_USE, flags, -1, -1,
3498 VOIDmode);
3499 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3500 NULL, bb, insn_info, DF_REF_REG_DEF, flags, -1, -1,
3501 VOIDmode);
3504 is_sibling_call = SIBLING_CALL_P (insn_info->insn);
3505 EXECUTE_IF_SET_IN_BITMAP (regs_invalidated_by_call_regset, 0, ui, bi)
3507 if (!global_regs[ui]
3508 && (!bitmap_bit_p (defs_generated, ui))
3509 && (!is_sibling_call
3510 || !bitmap_bit_p (df->exit_block_uses, ui)
3511 || refers_to_regno_p (ui, ui+1,
3512 crtl->return_rtx, NULL)))
3513 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[ui],
3514 NULL, bb, insn_info, DF_REF_REG_DEF,
3515 DF_REF_MAY_CLOBBER | flags,
3516 -1, -1, VOIDmode);
3519 BITMAP_FREE (defs_generated);
3520 return;
3523 /* Collect all refs in the INSN. This function is free of any
3524 side-effect - it will create and return a lists of df_ref's in the
3525 COLLECTION_REC without putting those refs into existing ref chains
3526 and reg chains. */
3528 static void
3529 df_insn_refs_collect (struct df_collection_rec* collection_rec,
3530 basic_block bb, struct df_insn_info *insn_info)
3532 rtx note;
3533 bool is_cond_exec = (GET_CODE (PATTERN (insn_info->insn)) == COND_EXEC);
3535 /* Clear out the collection record. */
3536 VEC_truncate (df_ref, collection_rec->def_vec, 0);
3537 VEC_truncate (df_ref, collection_rec->use_vec, 0);
3538 VEC_truncate (df_ref, collection_rec->eq_use_vec, 0);
3539 VEC_truncate (df_mw_hardreg_ptr, collection_rec->mw_vec, 0);
3541 /* Record register defs. */
3542 df_defs_record (collection_rec, PATTERN (insn_info->insn), bb, insn_info, 0);
3544 /* Process REG_EQUIV/REG_EQUAL notes. */
3545 for (note = REG_NOTES (insn_info->insn); note;
3546 note = XEXP (note, 1))
3548 switch (REG_NOTE_KIND (note))
3550 case REG_EQUIV:
3551 case REG_EQUAL:
3552 df_uses_record (DF_REF_REGULAR, collection_rec,
3553 &XEXP (note, 0), DF_REF_REG_USE,
3554 bb, insn_info, DF_REF_IN_NOTE, -1, -1, VOIDmode);
3555 break;
3556 case REG_NON_LOCAL_GOTO:
3557 /* The frame ptr is used by a non-local goto. */
3558 df_ref_record (DF_REF_BASE, collection_rec,
3559 regno_reg_rtx[FRAME_POINTER_REGNUM],
3560 NULL, bb, insn_info,
3561 DF_REF_REG_USE, 0, -1, -1, VOIDmode);
3562 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3563 df_ref_record (DF_REF_BASE, collection_rec,
3564 regno_reg_rtx[HARD_FRAME_POINTER_REGNUM],
3565 NULL, bb, insn_info,
3566 DF_REF_REG_USE, 0, -1, -1, VOIDmode);
3567 #endif
3568 break;
3569 default:
3570 break;
3574 if (CALL_P (insn_info->insn))
3575 df_get_call_refs (collection_rec, bb, insn_info,
3576 (is_cond_exec) ? DF_REF_CONDITIONAL : 0);
3578 /* Record the register uses. */
3579 df_uses_record (DF_REF_REGULAR, collection_rec,
3580 &PATTERN (insn_info->insn), DF_REF_REG_USE, bb, insn_info, 0,
3581 -1, -1, VOIDmode);
3583 /* DF_REF_CONDITIONAL needs corresponding USES. */
3584 if (is_cond_exec)
3585 df_get_conditional_uses (collection_rec);
3587 df_canonize_collection_rec (collection_rec);
3590 /* Recompute the luids for the insns in BB. */
3592 void
3593 df_recompute_luids (basic_block bb)
3595 rtx insn;
3596 int luid = 0;
3598 df_grow_insn_info ();
3600 /* Scan the block an insn at a time from beginning to end. */
3601 FOR_BB_INSNS (bb, insn)
3603 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
3604 /* Inserting labels does not always trigger the incremental
3605 rescanning. */
3606 if (!insn_info)
3608 gcc_assert (!INSN_P (insn));
3609 insn_info = df_insn_create_insn_record (insn);
3612 DF_INSN_INFO_LUID (insn_info) = luid;
3613 if (INSN_P (insn))
3614 luid++;
3619 /* Collect all artificial refs at the block level for BB and add them
3620 to COLLECTION_REC. */
3622 static void
3623 df_bb_refs_collect (struct df_collection_rec *collection_rec, basic_block bb)
3625 VEC_truncate (df_ref, collection_rec->def_vec, 0);
3626 VEC_truncate (df_ref, collection_rec->use_vec, 0);
3627 VEC_truncate (df_ref, collection_rec->eq_use_vec, 0);
3628 VEC_truncate (df_mw_hardreg_ptr, collection_rec->mw_vec, 0);
3630 if (bb->index == ENTRY_BLOCK)
3632 df_entry_block_defs_collect (collection_rec, df->entry_block_defs);
3633 return;
3635 else if (bb->index == EXIT_BLOCK)
3637 df_exit_block_uses_collect (collection_rec, df->exit_block_uses);
3638 return;
3641 #ifdef EH_RETURN_DATA_REGNO
3642 if (bb_has_eh_pred (bb))
3644 unsigned int i;
3645 /* Mark the registers that will contain data for the handler. */
3646 for (i = 0; ; ++i)
3648 unsigned regno = EH_RETURN_DATA_REGNO (i);
3649 if (regno == INVALID_REGNUM)
3650 break;
3651 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[regno], NULL,
3652 bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP, -1, -1,
3653 VOIDmode);
3656 #endif
3658 /* Add the hard_frame_pointer if this block is the target of a
3659 non-local goto. */
3660 if (bb->flags & BB_NON_LOCAL_GOTO_TARGET)
3661 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, hard_frame_pointer_rtx, NULL,
3662 bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP, -1, -1, VOIDmode);
3664 /* Add the artificial uses. */
3665 if (bb->index >= NUM_FIXED_BLOCKS)
3667 bitmap_iterator bi;
3668 unsigned int regno;
3669 bitmap au = bb_has_eh_pred (bb)
3670 ? df->eh_block_artificial_uses
3671 : df->regular_block_artificial_uses;
3673 EXECUTE_IF_SET_IN_BITMAP (au, 0, regno, bi)
3675 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[regno], NULL,
3676 bb, NULL, DF_REF_REG_USE, 0, -1, -1, VOIDmode);
3680 df_canonize_collection_rec (collection_rec);
3684 /* Record all the refs within the basic block BB_INDEX and scan the instructions if SCAN_INSNS. */
3686 void
3687 df_bb_refs_record (int bb_index, bool scan_insns)
3689 basic_block bb = BASIC_BLOCK (bb_index);
3690 rtx insn;
3691 int luid = 0;
3692 struct df_scan_bb_info *bb_info;
3693 struct df_collection_rec collection_rec;
3695 if (!df)
3696 return;
3698 bb_info = df_scan_get_bb_info (bb_index);
3700 /* Need to make sure that there is a record in the basic block info. */
3701 if (!bb_info)
3703 bb_info = (struct df_scan_bb_info *) pool_alloc (df_scan->block_pool);
3704 df_scan_set_bb_info (bb_index, bb_info);
3705 bb_info->artificial_defs = NULL;
3706 bb_info->artificial_uses = NULL;
3709 collection_rec.def_vec = VEC_alloc (df_ref, stack, 128);
3710 collection_rec.use_vec = VEC_alloc (df_ref, stack, 32);
3711 collection_rec.eq_use_vec = VEC_alloc (df_ref, stack, 32);
3712 collection_rec.mw_vec = VEC_alloc (df_mw_hardreg_ptr, stack, 32);
3714 if (scan_insns)
3715 /* Scan the block an insn at a time from beginning to end. */
3716 FOR_BB_INSNS (bb, insn)
3718 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
3719 gcc_assert (!insn_info);
3721 insn_info = df_insn_create_insn_record (insn);
3722 if (INSN_P (insn))
3724 /* Record refs within INSN. */
3725 DF_INSN_INFO_LUID (insn_info) = luid++;
3726 df_insn_refs_collect (&collection_rec, bb, DF_INSN_INFO_GET (insn));
3727 df_refs_add_to_chains (&collection_rec, bb, insn);
3729 DF_INSN_INFO_LUID (insn_info) = luid;
3732 /* Other block level artificial refs */
3733 df_bb_refs_collect (&collection_rec, bb);
3734 df_refs_add_to_chains (&collection_rec, bb, NULL);
3736 VEC_free (df_ref, stack, collection_rec.def_vec);
3737 VEC_free (df_ref, stack, collection_rec.use_vec);
3738 VEC_free (df_ref, stack, collection_rec.eq_use_vec);
3739 VEC_free (df_mw_hardreg_ptr, stack, collection_rec.mw_vec);
3741 /* Now that the block has been processed, set the block as dirty so
3742 LR and LIVE will get it processed. */
3743 df_set_bb_dirty (bb);
3747 /* Get the artificial use set for a regular (i.e. non-exit/non-entry)
3748 block. */
3750 static void
3751 df_get_regular_block_artificial_uses (bitmap regular_block_artificial_uses)
3753 #ifdef EH_USES
3754 unsigned int i;
3755 #endif
3757 bitmap_clear (regular_block_artificial_uses);
3759 if (reload_completed)
3761 if (frame_pointer_needed)
3762 bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3764 else
3765 /* Before reload, there are a few registers that must be forced
3766 live everywhere -- which might not already be the case for
3767 blocks within infinite loops. */
3769 /* Any reference to any pseudo before reload is a potential
3770 reference of the frame pointer. */
3771 bitmap_set_bit (regular_block_artificial_uses, FRAME_POINTER_REGNUM);
3773 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3774 bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3775 #endif
3777 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3778 /* Pseudos with argument area equivalences may require
3779 reloading via the argument pointer. */
3780 if (fixed_regs[ARG_POINTER_REGNUM])
3781 bitmap_set_bit (regular_block_artificial_uses, ARG_POINTER_REGNUM);
3782 #endif
3784 /* Any constant, or pseudo with constant equivalences, may
3785 require reloading from memory using the pic register. */
3786 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3787 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3788 bitmap_set_bit (regular_block_artificial_uses, PIC_OFFSET_TABLE_REGNUM);
3790 /* The all-important stack pointer must always be live. */
3791 bitmap_set_bit (regular_block_artificial_uses, STACK_POINTER_REGNUM);
3793 #ifdef EH_USES
3794 /* EH_USES registers are used:
3795 1) at all insns that might throw (calls or with -fnon-call-exceptions
3796 trapping insns)
3797 2) in all EH edges
3798 3) to support backtraces and/or debugging, anywhere between their
3799 initialization and where they the saved registers are restored
3800 from them, including the cases where we don't reach the epilogue
3801 (noreturn call or infinite loop). */
3802 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3803 if (EH_USES (i))
3804 bitmap_set_bit (regular_block_artificial_uses, i);
3805 #endif
3809 /* Get the artificial use set for an eh block. */
3811 static void
3812 df_get_eh_block_artificial_uses (bitmap eh_block_artificial_uses)
3814 bitmap_clear (eh_block_artificial_uses);
3816 /* The following code (down thru the arg_pointer setting APPEARS
3817 to be necessary because there is nothing that actually
3818 describes what the exception handling code may actually need
3819 to keep alive. */
3820 if (reload_completed)
3822 if (frame_pointer_needed)
3824 bitmap_set_bit (eh_block_artificial_uses, FRAME_POINTER_REGNUM);
3825 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3826 bitmap_set_bit (eh_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3827 #endif
3829 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3830 if (fixed_regs[ARG_POINTER_REGNUM])
3831 bitmap_set_bit (eh_block_artificial_uses, ARG_POINTER_REGNUM);
3832 #endif
3838 /*----------------------------------------------------------------------------
3839 Specialized hard register scanning functions.
3840 ----------------------------------------------------------------------------*/
3843 /* Mark a register in SET. Hard registers in large modes get all
3844 of their component registers set as well. */
3846 static void
3847 df_mark_reg (rtx reg, void *vset)
3849 bitmap set = (bitmap) vset;
3850 int regno = REGNO (reg);
3852 gcc_assert (GET_MODE (reg) != BLKmode);
3854 bitmap_set_bit (set, regno);
3855 if (regno < FIRST_PSEUDO_REGISTER)
3857 int n = hard_regno_nregs[regno][GET_MODE (reg)];
3858 while (--n > 0)
3859 bitmap_set_bit (set, regno + n);
3864 /* Set the bit for regs that are considered being defined at the entry. */
3866 static void
3867 df_get_entry_block_def_set (bitmap entry_block_defs)
3869 rtx r;
3870 int i;
3872 bitmap_clear (entry_block_defs);
3874 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3876 if (FUNCTION_ARG_REGNO_P (i))
3877 #ifdef INCOMING_REGNO
3878 bitmap_set_bit (entry_block_defs, INCOMING_REGNO (i));
3879 #else
3880 bitmap_set_bit (entry_block_defs, i);
3881 #endif
3884 /* The always important stack pointer. */
3885 bitmap_set_bit (entry_block_defs, STACK_POINTER_REGNUM);
3887 /* Once the prologue has been generated, all of these registers
3888 should just show up in the first regular block. */
3889 if (HAVE_prologue && epilogue_completed)
3891 /* Defs for the callee saved registers are inserted so that the
3892 pushes have some defining location. */
3893 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3894 if ((call_used_regs[i] == 0) && (df_regs_ever_live_p (i)))
3895 bitmap_set_bit (entry_block_defs, i);
3898 r = targetm.calls.struct_value_rtx (current_function_decl, true);
3899 if (r && REG_P (r))
3900 bitmap_set_bit (entry_block_defs, REGNO (r));
3902 /* If the function has an incoming STATIC_CHAIN, it has to show up
3903 in the entry def set. */
3904 r = targetm.calls.static_chain (current_function_decl, true);
3905 if (r && REG_P (r))
3906 bitmap_set_bit (entry_block_defs, REGNO (r));
3908 if ((!reload_completed) || frame_pointer_needed)
3910 /* Any reference to any pseudo before reload is a potential
3911 reference of the frame pointer. */
3912 bitmap_set_bit (entry_block_defs, FRAME_POINTER_REGNUM);
3913 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3914 /* If they are different, also mark the hard frame pointer as live. */
3915 if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3916 bitmap_set_bit (entry_block_defs, HARD_FRAME_POINTER_REGNUM);
3917 #endif
3920 /* These registers are live everywhere. */
3921 if (!reload_completed)
3923 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3924 /* Pseudos with argument area equivalences may require
3925 reloading via the argument pointer. */
3926 if (fixed_regs[ARG_POINTER_REGNUM])
3927 bitmap_set_bit (entry_block_defs, ARG_POINTER_REGNUM);
3928 #endif
3930 #ifdef PIC_OFFSET_TABLE_REGNUM
3931 /* Any constant, or pseudo with constant equivalences, may
3932 require reloading from memory using the pic register. */
3933 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3934 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3935 bitmap_set_bit (entry_block_defs, PIC_OFFSET_TABLE_REGNUM);
3936 #endif
3939 #ifdef INCOMING_RETURN_ADDR_RTX
3940 if (REG_P (INCOMING_RETURN_ADDR_RTX))
3941 bitmap_set_bit (entry_block_defs, REGNO (INCOMING_RETURN_ADDR_RTX));
3942 #endif
3944 targetm.live_on_entry (entry_block_defs);
3948 /* Return the (conservative) set of hard registers that are defined on
3949 entry to the function.
3950 It uses df->entry_block_defs to determine which register
3951 reference to include. */
3953 static void
3954 df_entry_block_defs_collect (struct df_collection_rec *collection_rec,
3955 bitmap entry_block_defs)
3957 unsigned int i;
3958 bitmap_iterator bi;
3960 EXECUTE_IF_SET_IN_BITMAP (entry_block_defs, 0, i, bi)
3962 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
3963 ENTRY_BLOCK_PTR, NULL, DF_REF_REG_DEF, 0, -1, -1,
3964 VOIDmode);
3967 df_canonize_collection_rec (collection_rec);
3971 /* Record the (conservative) set of hard registers that are defined on
3972 entry to the function. */
3974 static void
3975 df_record_entry_block_defs (bitmap entry_block_defs)
3977 struct df_collection_rec collection_rec;
3978 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
3979 collection_rec.def_vec = VEC_alloc (df_ref, stack, FIRST_PSEUDO_REGISTER);
3980 df_entry_block_defs_collect (&collection_rec, entry_block_defs);
3982 /* Process bb_refs chain */
3983 df_refs_add_to_chains (&collection_rec, BASIC_BLOCK (ENTRY_BLOCK), NULL);
3984 VEC_free (df_ref, stack, collection_rec.def_vec);
3988 /* Update the defs in the entry block. */
3990 void
3991 df_update_entry_block_defs (void)
3993 bitmap refs = BITMAP_ALLOC (&df_bitmap_obstack);
3994 bool changed = false;
3996 df_get_entry_block_def_set (refs);
3997 if (df->entry_block_defs)
3999 if (!bitmap_equal_p (df->entry_block_defs, refs))
4001 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (ENTRY_BLOCK);
4002 df_ref_chain_delete_du_chain (bb_info->artificial_defs);
4003 df_ref_chain_delete (bb_info->artificial_defs);
4004 bb_info->artificial_defs = NULL;
4005 changed = true;
4008 else
4010 struct df_scan_problem_data *problem_data
4011 = (struct df_scan_problem_data *) df_scan->problem_data;
4012 df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
4013 changed = true;
4016 if (changed)
4018 df_record_entry_block_defs (refs);
4019 bitmap_copy (df->entry_block_defs, refs);
4020 df_set_bb_dirty (BASIC_BLOCK (ENTRY_BLOCK));
4022 BITMAP_FREE (refs);
4026 /* Set the bit for regs that are considered being used at the exit. */
4028 static void
4029 df_get_exit_block_use_set (bitmap exit_block_uses)
4031 unsigned int i;
4033 bitmap_clear (exit_block_uses);
4035 /* Stack pointer is always live at the exit. */
4036 bitmap_set_bit (exit_block_uses, STACK_POINTER_REGNUM);
4038 /* Mark the frame pointer if needed at the end of the function.
4039 If we end up eliminating it, it will be removed from the live
4040 list of each basic block by reload. */
4042 if ((!reload_completed) || frame_pointer_needed)
4044 bitmap_set_bit (exit_block_uses, FRAME_POINTER_REGNUM);
4045 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4046 /* If they are different, also mark the hard frame pointer as live. */
4047 if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
4048 bitmap_set_bit (exit_block_uses, HARD_FRAME_POINTER_REGNUM);
4049 #endif
4052 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
4053 /* Many architectures have a GP register even without flag_pic.
4054 Assume the pic register is not in use, or will be handled by
4055 other means, if it is not fixed. */
4056 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4057 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
4058 bitmap_set_bit (exit_block_uses, PIC_OFFSET_TABLE_REGNUM);
4059 #endif
4061 /* Mark all global registers, and all registers used by the
4062 epilogue as being live at the end of the function since they
4063 may be referenced by our caller. */
4064 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4065 if (global_regs[i] || EPILOGUE_USES (i))
4066 bitmap_set_bit (exit_block_uses, i);
4068 if (HAVE_epilogue && epilogue_completed)
4070 /* Mark all call-saved registers that we actually used. */
4071 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4072 if (df_regs_ever_live_p (i) && !LOCAL_REGNO (i)
4073 && !TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
4074 bitmap_set_bit (exit_block_uses, i);
4077 #ifdef EH_RETURN_DATA_REGNO
4078 /* Mark the registers that will contain data for the handler. */
4079 if (reload_completed && crtl->calls_eh_return)
4080 for (i = 0; ; ++i)
4082 unsigned regno = EH_RETURN_DATA_REGNO (i);
4083 if (regno == INVALID_REGNUM)
4084 break;
4085 bitmap_set_bit (exit_block_uses, regno);
4087 #endif
4089 #ifdef EH_RETURN_STACKADJ_RTX
4090 if ((!HAVE_epilogue || ! epilogue_completed)
4091 && crtl->calls_eh_return)
4093 rtx tmp = EH_RETURN_STACKADJ_RTX;
4094 if (tmp && REG_P (tmp))
4095 df_mark_reg (tmp, exit_block_uses);
4097 #endif
4099 #ifdef EH_RETURN_HANDLER_RTX
4100 if ((!HAVE_epilogue || ! epilogue_completed)
4101 && crtl->calls_eh_return)
4103 rtx tmp = EH_RETURN_HANDLER_RTX;
4104 if (tmp && REG_P (tmp))
4105 df_mark_reg (tmp, exit_block_uses);
4107 #endif
4109 /* Mark function return value. */
4110 diddle_return_value (df_mark_reg, (void*) exit_block_uses);
4114 /* Return the refs of hard registers that are used in the exit block.
4115 It uses df->exit_block_uses to determine register to include. */
4117 static void
4118 df_exit_block_uses_collect (struct df_collection_rec *collection_rec, bitmap exit_block_uses)
4120 unsigned int i;
4121 bitmap_iterator bi;
4123 EXECUTE_IF_SET_IN_BITMAP (exit_block_uses, 0, i, bi)
4124 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
4125 EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0, -1, -1, VOIDmode);
4127 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4128 /* It is deliberate that this is not put in the exit block uses but
4129 I do not know why. */
4130 if (reload_completed
4131 && !bitmap_bit_p (exit_block_uses, ARG_POINTER_REGNUM)
4132 && bb_has_eh_pred (EXIT_BLOCK_PTR)
4133 && fixed_regs[ARG_POINTER_REGNUM])
4134 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[ARG_POINTER_REGNUM], NULL,
4135 EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0, -1, -1, VOIDmode);
4136 #endif
4138 df_canonize_collection_rec (collection_rec);
4142 /* Record the set of hard registers that are used in the exit block.
4143 It uses df->exit_block_uses to determine which bit to include. */
4145 static void
4146 df_record_exit_block_uses (bitmap exit_block_uses)
4148 struct df_collection_rec collection_rec;
4149 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
4150 collection_rec.use_vec = VEC_alloc (df_ref, stack, FIRST_PSEUDO_REGISTER);
4152 df_exit_block_uses_collect (&collection_rec, exit_block_uses);
4154 /* Process bb_refs chain */
4155 df_refs_add_to_chains (&collection_rec, BASIC_BLOCK (EXIT_BLOCK), NULL);
4156 VEC_free (df_ref, stack, collection_rec.use_vec);
4160 /* Update the uses in the exit block. */
4162 void
4163 df_update_exit_block_uses (void)
4165 bitmap refs = BITMAP_ALLOC (&df_bitmap_obstack);
4166 bool changed = false;
4168 df_get_exit_block_use_set (refs);
4169 if (df->exit_block_uses)
4171 if (!bitmap_equal_p (df->exit_block_uses, refs))
4173 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (EXIT_BLOCK);
4174 df_ref_chain_delete_du_chain (bb_info->artificial_uses);
4175 df_ref_chain_delete (bb_info->artificial_uses);
4176 bb_info->artificial_uses = NULL;
4177 changed = true;
4180 else
4182 struct df_scan_problem_data *problem_data
4183 = (struct df_scan_problem_data *) df_scan->problem_data;
4184 df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
4185 changed = true;
4188 if (changed)
4190 df_record_exit_block_uses (refs);
4191 bitmap_copy (df->exit_block_uses, refs);
4192 df_set_bb_dirty (BASIC_BLOCK (EXIT_BLOCK));
4194 BITMAP_FREE (refs);
4197 static bool initialized = false;
4200 /* Initialize some platform specific structures. */
4202 void
4203 df_hard_reg_init (void)
4205 #ifdef ELIMINABLE_REGS
4206 int i;
4207 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
4208 #endif
4209 if (initialized)
4210 return;
4212 /* Record which registers will be eliminated. We use this in
4213 mark_used_regs. */
4214 CLEAR_HARD_REG_SET (elim_reg_set);
4216 #ifdef ELIMINABLE_REGS
4217 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
4218 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
4219 #else
4220 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
4221 #endif
4223 initialized = true;
4227 /* Recompute the parts of scanning that are based on regs_ever_live
4228 because something changed in that array. */
4230 void
4231 df_update_entry_exit_and_calls (void)
4233 basic_block bb;
4235 df_update_entry_block_defs ();
4236 df_update_exit_block_uses ();
4238 /* The call insns need to be rescanned because there may be changes
4239 in the set of registers clobbered across the call. */
4240 FOR_EACH_BB (bb)
4242 rtx insn;
4243 FOR_BB_INSNS (bb, insn)
4245 if (INSN_P (insn) && CALL_P (insn))
4246 df_insn_rescan (insn);
4252 /* Return true if hard REG is actually used in the some instruction.
4253 There are a fair number of conditions that affect the setting of
4254 this array. See the comment in df.h for df->hard_regs_live_count
4255 for the conditions that this array is set. */
4257 bool
4258 df_hard_reg_used_p (unsigned int reg)
4260 gcc_assert (df);
4261 return df->hard_regs_live_count[reg] != 0;
4265 /* A count of the number of times REG is actually used in the some
4266 instruction. There are a fair number of conditions that affect the
4267 setting of this array. See the comment in df.h for
4268 df->hard_regs_live_count for the conditions that this array is
4269 set. */
4272 unsigned int
4273 df_hard_reg_used_count (unsigned int reg)
4275 gcc_assert (df);
4276 return df->hard_regs_live_count[reg];
4280 /* Get the value of regs_ever_live[REGNO]. */
4282 bool
4283 df_regs_ever_live_p (unsigned int regno)
4285 return regs_ever_live[regno];
4289 /* Set regs_ever_live[REGNO] to VALUE. If this cause regs_ever_live
4290 to change, schedule that change for the next update. */
4292 void
4293 df_set_regs_ever_live (unsigned int regno, bool value)
4295 if (regs_ever_live[regno] == value)
4296 return;
4298 regs_ever_live[regno] = value;
4299 if (df)
4300 df->redo_entry_and_exit = true;
4304 /* Compute "regs_ever_live" information from the underlying df
4305 information. Set the vector to all false if RESET. */
4307 void
4308 df_compute_regs_ever_live (bool reset)
4310 unsigned int i;
4311 bool changed = df->redo_entry_and_exit;
4313 if (reset)
4314 memset (regs_ever_live, 0, sizeof (regs_ever_live));
4316 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4317 if ((!regs_ever_live[i]) && df_hard_reg_used_p (i))
4319 regs_ever_live[i] = true;
4320 changed = true;
4322 if (changed)
4323 df_update_entry_exit_and_calls ();
4324 df->redo_entry_and_exit = false;
4328 /*----------------------------------------------------------------------------
4329 Dataflow ref information verification functions.
4331 df_reg_chain_mark (refs, regno, is_def, is_eq_use)
4332 df_reg_chain_verify_unmarked (refs)
4333 df_refs_verify (VEC(stack,df_ref)*, ref*, bool)
4334 df_mws_verify (mw*, mw*, bool)
4335 df_insn_refs_verify (collection_rec, bb, insn, bool)
4336 df_bb_refs_verify (bb, refs, bool)
4337 df_bb_verify (bb)
4338 df_exit_block_bitmap_verify (bool)
4339 df_entry_block_bitmap_verify (bool)
4340 df_scan_verify ()
4341 ----------------------------------------------------------------------------*/
4344 /* Mark all refs in the reg chain. Verify that all of the registers
4345 are in the correct chain. */
4347 static unsigned int
4348 df_reg_chain_mark (df_ref refs, unsigned int regno,
4349 bool is_def, bool is_eq_use)
4351 unsigned int count = 0;
4352 df_ref ref;
4353 for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
4355 gcc_assert (!DF_REF_IS_REG_MARKED (ref));
4357 /* If there are no def-use or use-def chains, make sure that all
4358 of the chains are clear. */
4359 if (!df_chain)
4360 gcc_assert (!DF_REF_CHAIN (ref));
4362 /* Check to make sure the ref is in the correct chain. */
4363 gcc_assert (DF_REF_REGNO (ref) == regno);
4364 if (is_def)
4365 gcc_assert (DF_REF_REG_DEF_P (ref));
4366 else
4367 gcc_assert (!DF_REF_REG_DEF_P (ref));
4369 if (is_eq_use)
4370 gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE));
4371 else
4372 gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE) == 0);
4374 if (DF_REF_NEXT_REG (ref))
4375 gcc_assert (DF_REF_PREV_REG (DF_REF_NEXT_REG (ref)) == ref);
4376 count++;
4377 DF_REF_REG_MARK (ref);
4379 return count;
4383 /* Verify that all of the registers in the chain are unmarked. */
4385 static void
4386 df_reg_chain_verify_unmarked (df_ref refs)
4388 df_ref ref;
4389 for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
4390 gcc_assert (!DF_REF_IS_REG_MARKED (ref));
4394 /* Verify that NEW_REC and OLD_REC have exactly the same members. */
4396 static bool
4397 df_refs_verify (VEC(df_ref,stack) *new_rec, df_ref *old_rec,
4398 bool abort_if_fail)
4400 unsigned int ix;
4401 df_ref new_ref;
4403 for (ix = 0; VEC_iterate (df_ref, new_rec, ix, new_ref); ++ix)
4405 if (*old_rec == NULL || !df_ref_equal_p (new_ref, *old_rec))
4407 if (abort_if_fail)
4408 gcc_assert (0);
4409 else
4410 return false;
4413 /* Abort if fail is called from the function level verifier. If
4414 that is the context, mark this reg as being seem. */
4415 if (abort_if_fail)
4417 gcc_assert (DF_REF_IS_REG_MARKED (*old_rec));
4418 DF_REF_REG_UNMARK (*old_rec);
4421 old_rec++;
4424 if (abort_if_fail)
4425 gcc_assert (*old_rec == NULL);
4426 else
4427 return *old_rec == NULL;
4428 return false;
4432 /* Verify that NEW_REC and OLD_REC have exactly the same members. */
4434 static bool
4435 df_mws_verify (VEC(df_mw_hardreg_ptr,stack) *new_rec,
4436 struct df_mw_hardreg **old_rec,
4437 bool abort_if_fail)
4439 unsigned int ix;
4440 struct df_mw_hardreg *new_reg;
4442 for (ix = 0; VEC_iterate (df_mw_hardreg_ptr, new_rec, ix, new_reg); ++ix)
4444 if (*old_rec == NULL || !df_mw_equal_p (new_reg, *old_rec))
4446 if (abort_if_fail)
4447 gcc_assert (0);
4448 else
4449 return false;
4451 old_rec++;
4454 if (abort_if_fail)
4455 gcc_assert (*old_rec == NULL);
4456 else
4457 return *old_rec == NULL;
4458 return false;
4462 /* Return true if the existing insn refs information is complete and
4463 correct. Otherwise (i.e. if there's any missing or extra refs),
4464 return the correct df_ref chain in REFS_RETURN.
4466 If ABORT_IF_FAIL, leave the refs that are verified (already in the
4467 ref chain) as DF_REF_MARKED(). If it's false, then it's a per-insn
4468 verification mode instead of the whole function, so unmark
4469 everything.
4471 If ABORT_IF_FAIL is set, this function never returns false. */
4473 static bool
4474 df_insn_refs_verify (struct df_collection_rec *collection_rec,
4475 basic_block bb,
4476 rtx insn,
4477 bool abort_if_fail)
4479 bool ret1, ret2, ret3, ret4;
4480 unsigned int uid = INSN_UID (insn);
4481 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
4483 df_insn_refs_collect (collection_rec, bb, insn_info);
4485 if (!DF_INSN_UID_DEFS (uid))
4487 /* The insn_rec was created but it was never filled out. */
4488 if (abort_if_fail)
4489 gcc_assert (0);
4490 else
4491 return false;
4494 /* Unfortunately we cannot opt out early if one of these is not
4495 right because the marks will not get cleared. */
4496 ret1 = df_refs_verify (collection_rec->def_vec, DF_INSN_UID_DEFS (uid),
4497 abort_if_fail);
4498 ret2 = df_refs_verify (collection_rec->use_vec, DF_INSN_UID_USES (uid),
4499 abort_if_fail);
4500 ret3 = df_refs_verify (collection_rec->eq_use_vec, DF_INSN_UID_EQ_USES (uid),
4501 abort_if_fail);
4502 ret4 = df_mws_verify (collection_rec->mw_vec, DF_INSN_UID_MWS (uid),
4503 abort_if_fail);
4504 return (ret1 && ret2 && ret3 && ret4);
4508 /* Return true if all refs in the basic block are correct and complete.
4509 Due to df_ref_chain_verify, it will cause all refs
4510 that are verified to have DF_REF_MARK bit set. */
4512 static bool
4513 df_bb_verify (basic_block bb)
4515 rtx insn;
4516 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
4517 struct df_collection_rec collection_rec;
4519 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
4520 collection_rec.def_vec = VEC_alloc (df_ref, stack, 128);
4521 collection_rec.use_vec = VEC_alloc (df_ref, stack, 32);
4522 collection_rec.eq_use_vec = VEC_alloc (df_ref, stack, 32);
4523 collection_rec.mw_vec = VEC_alloc (df_mw_hardreg_ptr, stack, 32);
4525 gcc_assert (bb_info);
4527 /* Scan the block, one insn at a time, from beginning to end. */
4528 FOR_BB_INSNS_REVERSE (bb, insn)
4530 if (!INSN_P (insn))
4531 continue;
4532 df_insn_refs_verify (&collection_rec, bb, insn, true);
4533 df_free_collection_rec (&collection_rec);
4536 /* Do the artificial defs and uses. */
4537 df_bb_refs_collect (&collection_rec, bb);
4538 df_refs_verify (collection_rec.def_vec, df_get_artificial_defs (bb->index), true);
4539 df_refs_verify (collection_rec.use_vec, df_get_artificial_uses (bb->index), true);
4540 df_free_collection_rec (&collection_rec);
4542 return true;
4546 /* Returns true if the entry block has correct and complete df_ref set.
4547 If not it either aborts if ABORT_IF_FAIL is true or returns false. */
4549 static bool
4550 df_entry_block_bitmap_verify (bool abort_if_fail)
4552 bitmap entry_block_defs = BITMAP_ALLOC (&df_bitmap_obstack);
4553 bool is_eq;
4555 df_get_entry_block_def_set (entry_block_defs);
4557 is_eq = bitmap_equal_p (entry_block_defs, df->entry_block_defs);
4559 if (!is_eq && abort_if_fail)
4561 print_current_pass (stderr);
4562 fprintf (stderr, "entry_block_defs = ");
4563 df_print_regset (stderr, entry_block_defs);
4564 fprintf (stderr, "df->entry_block_defs = ");
4565 df_print_regset (stderr, df->entry_block_defs);
4566 gcc_assert (0);
4569 BITMAP_FREE (entry_block_defs);
4571 return is_eq;
4575 /* Returns true if the exit block has correct and complete df_ref set.
4576 If not it either aborts if ABORT_IF_FAIL is true or returns false. */
4578 static bool
4579 df_exit_block_bitmap_verify (bool abort_if_fail)
4581 bitmap exit_block_uses = BITMAP_ALLOC (&df_bitmap_obstack);
4582 bool is_eq;
4584 df_get_exit_block_use_set (exit_block_uses);
4586 is_eq = bitmap_equal_p (exit_block_uses, df->exit_block_uses);
4588 if (!is_eq && abort_if_fail)
4590 print_current_pass (stderr);
4591 fprintf (stderr, "exit_block_uses = ");
4592 df_print_regset (stderr, exit_block_uses);
4593 fprintf (stderr, "df->exit_block_uses = ");
4594 df_print_regset (stderr, df->exit_block_uses);
4595 gcc_assert (0);
4598 BITMAP_FREE (exit_block_uses);
4600 return is_eq;
4604 /* Return true if df_ref information for all insns in all blocks are
4605 correct and complete. */
4607 void
4608 df_scan_verify (void)
4610 unsigned int i;
4611 basic_block bb;
4612 bitmap regular_block_artificial_uses;
4613 bitmap eh_block_artificial_uses;
4615 if (!df)
4616 return;
4618 /* Verification is a 4 step process. */
4620 /* (1) All of the refs are marked by going thru the reg chains. */
4621 for (i = 0; i < DF_REG_SIZE (df); i++)
4623 gcc_assert (df_reg_chain_mark (DF_REG_DEF_CHAIN (i), i, true, false)
4624 == DF_REG_DEF_COUNT(i));
4625 gcc_assert (df_reg_chain_mark (DF_REG_USE_CHAIN (i), i, false, false)
4626 == DF_REG_USE_COUNT(i));
4627 gcc_assert (df_reg_chain_mark (DF_REG_EQ_USE_CHAIN (i), i, false, true)
4628 == DF_REG_EQ_USE_COUNT(i));
4631 /* (2) There are various bitmaps whose value may change over the
4632 course of the compilation. This step recomputes them to make
4633 sure that they have not slipped out of date. */
4634 regular_block_artificial_uses = BITMAP_ALLOC (&df_bitmap_obstack);
4635 eh_block_artificial_uses = BITMAP_ALLOC (&df_bitmap_obstack);
4637 df_get_regular_block_artificial_uses (regular_block_artificial_uses);
4638 df_get_eh_block_artificial_uses (eh_block_artificial_uses);
4640 bitmap_ior_into (eh_block_artificial_uses,
4641 regular_block_artificial_uses);
4643 /* Check artificial_uses bitmaps didn't change. */
4644 gcc_assert (bitmap_equal_p (regular_block_artificial_uses,
4645 df->regular_block_artificial_uses));
4646 gcc_assert (bitmap_equal_p (eh_block_artificial_uses,
4647 df->eh_block_artificial_uses));
4649 BITMAP_FREE (regular_block_artificial_uses);
4650 BITMAP_FREE (eh_block_artificial_uses);
4652 /* Verify entry block and exit block. These only verify the bitmaps,
4653 the refs are verified in df_bb_verify. */
4654 df_entry_block_bitmap_verify (true);
4655 df_exit_block_bitmap_verify (true);
4657 /* (3) All of the insns in all of the blocks are traversed and the
4658 marks are cleared both in the artificial refs attached to the
4659 blocks and the real refs inside the insns. It is a failure to
4660 clear a mark that has not been set as this means that the ref in
4661 the block or insn was not in the reg chain. */
4663 FOR_ALL_BB (bb)
4664 df_bb_verify (bb);
4666 /* (4) See if all reg chains are traversed a second time. This time
4667 a check is made that the marks are clear. A set mark would be a
4668 from a reg that is not in any insn or basic block. */
4670 for (i = 0; i < DF_REG_SIZE (df); i++)
4672 df_reg_chain_verify_unmarked (DF_REG_DEF_CHAIN (i));
4673 df_reg_chain_verify_unmarked (DF_REG_USE_CHAIN (i));
4674 df_reg_chain_verify_unmarked (DF_REG_EQ_USE_CHAIN (i));