* emit-rtl.c (adjust_address_1): Always copy address to avoid
[official-gcc.git] / gcc / df.c
bloba6a474c42160eb2f24c7eff7f2eeb36ae0b647ff
1 /* Dataflow support routines.
2 Copyright (C) 1999, 2000, 2001 Free Software Foundation, Inc.
3 Contributed by Michael P. Hayes (m.hayes@elec.canterbury.ac.nz,
4 mhayes@redhat.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA.
24 OVERVIEW:
26 This file provides some dataflow routines for computing reaching defs,
27 upward exposed uses, live variables, def-use chains, and use-def
28 chains. The global dataflow is performed using simple iterative
29 methods with a worklist and could be sped up by ordering the blocks
30 with a depth first search order.
32 A `struct ref' data structure (ref) is allocated for every register
33 reference (def or use) and this records the insn and bb the ref is
34 found within. The refs are linked together in chains of uses and defs
35 for each insn and for each register. Each ref also has a chain field
36 that links all the use refs for a def or all the def refs for a use.
37 This is used to create use-def or def-use chains.
40 USAGE:
42 Here's an example of using the dataflow routines.
44 struct df *df;
46 df = df_init ();
48 df_analyse (df, 0, DF_ALL);
50 df_dump (df, DF_ALL, stderr);
52 df_finish (df);
55 df_init simply creates a poor man's object (df) that needs to be
56 passed to all the dataflow routines. df_finish destroys this
57 object and frees up any allocated memory.
59 df_analyse performs the following:
61 1. Records defs and uses by scanning the insns in each basic block
62 or by scanning the insns queued by df_insn_modify.
63 2. Links defs and uses into insn-def and insn-use chains.
64 3. Links defs and uses into reg-def and reg-use chains.
65 4. Assigns LUIDs to each insn (for modified blocks).
66 5. Calculates local reaching definitions.
67 6. Calculates global reaching definitions.
68 7. Creates use-def chains.
69 8. Calculates local reaching uses (upwards exposed uses).
70 9. Calculates global reaching uses.
71 10. Creates def-use chains.
72 11. Calculates local live registers.
73 12. Calculates global live registers.
74 13. Calculates register lifetimes and determines local registers.
77 PHILOSOPHY:
79 Note that the dataflow information is not updated for every newly
80 deleted or created insn. If the dataflow information requires
81 updating then all the changed, new, or deleted insns needs to be
82 marked with df_insn_modify (or df_insns_modify) either directly or
83 indirectly (say through calling df_insn_delete). df_insn_modify
84 marks all the modified insns to get processed the next time df_analyse
85 is called.
87 Beware that tinkering with insns may invalidate the dataflow information.
88 The philosophy behind these routines is that once the dataflow
89 information has been gathered, the user should store what they require
90 before they tinker with any insn. Once a reg is replaced, for example,
91 then the reg-def/reg-use chains will point to the wrong place. Once a
92 whole lot of changes have been made, df_analyse can be called again
93 to update the dataflow information. Currently, this is not very smart
94 with regard to propagating changes to the dataflow so it should not
95 be called very often.
98 DATA STRUCTURES:
100 The basic object is a REF (reference) and this may either be a DEF
101 (definition) or a USE of a register.
103 These are linked into a variety of lists; namely reg-def, reg-use,
104 insn-def, insn-use, def-use, and use-def lists. For example,
105 the reg-def lists contain all the refs that define a given register
106 while the insn-use lists contain all the refs used by an insn.
108 Note that the reg-def and reg-use chains are generally short (except for the
109 hard registers) and thus it is much faster to search these chains
110 rather than searching the def or use bitmaps.
112 If the insns are in SSA form then the reg-def and use-def lists
113 should only contain the single defining ref.
115 TODO:
117 1) Incremental dataflow analysis.
119 Note that if a loop invariant insn is hoisted (or sunk), we do not
120 need to change the def-use or use-def chains. All we have to do is to
121 change the bb field for all the associated defs and uses and to
122 renumber the LUIDs for the original and new basic blocks of the insn.
124 When shadowing loop mems we create new uses and defs for new pseudos
125 so we do not affect the existing dataflow information.
127 My current strategy is to queue up all modified, created, or deleted
128 insns so when df_analyse is called we can easily determine all the new
129 or deleted refs. Currently the global dataflow information is
130 recomputed from scratch but this could be propagated more efficiently.
132 2) Improved global data flow computation using depth first search.
134 3) Reduced memory requirements.
136 We could operate a pool of ref structures. When a ref is deleted it
137 gets returned to the pool (say by linking on to a chain of free refs).
138 This will require a pair of bitmaps for defs and uses so that we can
139 tell which ones have been changed. Alternatively, we could
140 periodically squeeze the def and use tables and associated bitmaps and
141 renumber the def and use ids.
143 4) Ordering of reg-def and reg-use lists.
145 Should the first entry in the def list be the first def (within a BB)?
146 Similarly, should the first entry in the use list be the last use
147 (within a BB)?
149 5) Working with a sub-CFG.
151 Often the whole CFG does not need to be analysed, for example,
152 when optimising a loop, only certain registers are of interest.
153 Perhaps there should be a bitmap argument to df_analyse to specify
154 which registers should be analysed? */
156 #define HANDLE_SUBREG
158 #include "config.h"
159 #include "system.h"
160 #include "rtl.h"
161 #include "tm_p.h"
162 #include "insn-config.h"
163 #include "recog.h"
164 #include "function.h"
165 #include "regs.h"
166 #include "obstack.h"
167 #include "hard-reg-set.h"
168 #include "basic-block.h"
169 #include "sbitmap.h"
170 #include "bitmap.h"
171 #include "df.h"
172 #include "fibheap.h"
174 #define FOR_ALL_BBS(BB, CODE) \
175 do { \
176 int node_; \
177 for (node_ = 0; node_ < n_basic_blocks; node_++) \
178 {(BB) = BASIC_BLOCK (node_); CODE;};} while (0)
180 #define FOR_EACH_BB_IN_BITMAP(BITMAP, MIN, BB, CODE) \
181 do { \
182 unsigned int node_; \
183 EXECUTE_IF_SET_IN_BITMAP (BITMAP, MIN, node_, \
184 {(BB) = BASIC_BLOCK (node_); CODE;});} while (0)
186 #define FOR_EACH_BB_IN_BITMAP_REV(BITMAP, MIN, BB, CODE) \
187 do { \
188 unsigned int node_; \
189 EXECUTE_IF_SET_IN_BITMAP_REV (BITMAP, node_, \
190 {(BB) = BASIC_BLOCK (node_); CODE;});} while (0)
192 #define FOR_EACH_BB_IN_SBITMAP(BITMAP, MIN, BB, CODE) \
193 do { \
194 unsigned int node_; \
195 EXECUTE_IF_SET_IN_SBITMAP (BITMAP, MIN, node_, \
196 {(BB) = BASIC_BLOCK (node_); CODE;});} while (0)
198 #define obstack_chunk_alloc xmalloc
199 #define obstack_chunk_free free
201 static struct obstack df_ref_obstack;
202 static struct df *ddf;
204 static void df_reg_table_realloc PARAMS((struct df *, int));
205 #if 0
206 static void df_def_table_realloc PARAMS((struct df *, int));
207 #endif
208 static void df_insn_table_realloc PARAMS((struct df *, int));
209 static void df_bitmaps_alloc PARAMS((struct df *, int));
210 static void df_bitmaps_free PARAMS((struct df *, int));
211 static void df_free PARAMS((struct df *));
212 static void df_alloc PARAMS((struct df *, int));
214 static rtx df_reg_clobber_gen PARAMS((unsigned int));
215 static rtx df_reg_use_gen PARAMS((unsigned int));
217 static inline struct df_link *df_link_create PARAMS((struct ref *,
218 struct df_link *));
219 static struct df_link *df_ref_unlink PARAMS((struct df_link **, struct ref *));
220 static void df_def_unlink PARAMS((struct df *, struct ref *));
221 static void df_use_unlink PARAMS((struct df *, struct ref *));
222 static void df_insn_refs_unlink PARAMS ((struct df *, basic_block, rtx));
223 #if 0
224 static void df_bb_refs_unlink PARAMS ((struct df *, basic_block));
225 static void df_refs_unlink PARAMS ((struct df *, bitmap));
226 #endif
228 static struct ref *df_ref_create PARAMS((struct df *,
229 rtx, rtx *, basic_block, rtx,
230 enum df_ref_type, enum df_ref_flags));
231 static void df_ref_record_1 PARAMS((struct df *, rtx, rtx *,
232 basic_block, rtx, enum df_ref_type,
233 enum df_ref_flags));
234 static void df_ref_record PARAMS((struct df *, rtx, rtx *,
235 basic_block bb, rtx, enum df_ref_type,
236 enum df_ref_flags));
237 static void df_def_record_1 PARAMS((struct df *, rtx, basic_block, rtx));
238 static void df_defs_record PARAMS((struct df *, rtx, basic_block, rtx));
239 static void df_uses_record PARAMS((struct df *, rtx *,
240 enum df_ref_type, basic_block, rtx,
241 enum df_ref_flags));
242 static void df_insn_refs_record PARAMS((struct df *, basic_block, rtx));
243 static void df_bb_refs_record PARAMS((struct df *, basic_block));
244 static void df_refs_record PARAMS((struct df *, bitmap));
246 static void df_bb_reg_def_chain_create PARAMS((struct df *, basic_block));
247 static void df_reg_def_chain_create PARAMS((struct df *, bitmap));
248 static void df_bb_reg_use_chain_create PARAMS((struct df *, basic_block));
249 static void df_reg_use_chain_create PARAMS((struct df *, bitmap));
250 static void df_bb_du_chain_create PARAMS((struct df *, basic_block, bitmap));
251 static void df_du_chain_create PARAMS((struct df *, bitmap));
252 static void df_bb_ud_chain_create PARAMS((struct df *, basic_block));
253 static void df_ud_chain_create PARAMS((struct df *, bitmap));
254 static void df_bb_rd_local_compute PARAMS((struct df *, basic_block));
255 static void df_rd_local_compute PARAMS((struct df *, bitmap));
256 static void df_bb_ru_local_compute PARAMS((struct df *, basic_block));
257 static void df_ru_local_compute PARAMS((struct df *, bitmap));
258 static void df_bb_lr_local_compute PARAMS((struct df *, basic_block));
259 static void df_lr_local_compute PARAMS((struct df *, bitmap));
260 static void df_bb_reg_info_compute PARAMS((struct df *, basic_block, bitmap));
261 static void df_reg_info_compute PARAMS((struct df *, bitmap));
263 static int df_bb_luids_set PARAMS((struct df *df, basic_block));
264 static int df_luids_set PARAMS((struct df *df, bitmap));
266 static int df_modified_p PARAMS ((struct df *, bitmap));
267 static int df_refs_queue PARAMS ((struct df *));
268 static int df_refs_process PARAMS ((struct df *));
269 static int df_bb_refs_update PARAMS ((struct df *, basic_block));
270 static int df_refs_update PARAMS ((struct df *));
271 static void df_analyse_1 PARAMS((struct df *, bitmap, int, int));
273 static void df_insns_modify PARAMS((struct df *, basic_block,
274 rtx, rtx));
275 static int df_rtx_mem_replace PARAMS ((rtx *, void *));
276 static int df_rtx_reg_replace PARAMS ((rtx *, void *));
277 void df_refs_reg_replace PARAMS ((struct df *, bitmap,
278 struct df_link *, rtx, rtx));
280 static int df_def_dominates_all_uses_p PARAMS((struct df *, struct ref *def));
281 static int df_def_dominates_uses_p PARAMS((struct df *,
282 struct ref *def, bitmap));
283 static struct ref *df_bb_regno_last_use_find PARAMS((struct df *, basic_block,
284 unsigned int));
285 static struct ref *df_bb_regno_first_def_find PARAMS((struct df *, basic_block,
286 unsigned int));
287 static struct ref *df_bb_insn_regno_last_use_find PARAMS((struct df *,
288 basic_block,
289 rtx, unsigned int));
290 static struct ref *df_bb_insn_regno_first_def_find PARAMS((struct df *,
291 basic_block,
292 rtx, unsigned int));
294 static void df_chain_dump PARAMS((struct df_link *, FILE *file));
295 static void df_chain_dump_regno PARAMS((struct df_link *, FILE *file));
296 static void df_regno_debug PARAMS ((struct df *, unsigned int, FILE *));
297 static void df_ref_debug PARAMS ((struct df *, struct ref *, FILE *));
298 static void df_rd_transfer_function PARAMS ((int, int *, bitmap, bitmap,
299 bitmap, bitmap, void *));
300 static void df_ru_transfer_function PARAMS ((int, int *, bitmap, bitmap,
301 bitmap, bitmap, void *));
302 static void df_lr_transfer_function PARAMS ((int, int *, bitmap, bitmap,
303 bitmap, bitmap, void *));
304 static void hybrid_search_bitmap PARAMS ((basic_block, bitmap *, bitmap *,
305 bitmap *, bitmap *, enum df_flow_dir,
306 enum df_confluence_op,
307 transfer_function_bitmap,
308 sbitmap, sbitmap, void *));
309 static void hybrid_search_sbitmap PARAMS ((basic_block, sbitmap *, sbitmap *,
310 sbitmap *, sbitmap *, enum df_flow_dir,
311 enum df_confluence_op,
312 transfer_function_sbitmap,
313 sbitmap, sbitmap, void *));
314 static inline bool read_modify_subreg_p PARAMS ((rtx));
317 /* Local memory allocation/deallocation routines. */
320 /* Increase the insn info table by SIZE more elements. */
321 static void
322 df_insn_table_realloc (df, size)
323 struct df *df;
324 int size;
326 /* Make table 25 percent larger by default. */
327 if (! size)
328 size = df->insn_size / 4;
330 size += df->insn_size;
332 df->insns = (struct insn_info *)
333 xrealloc (df->insns, size * sizeof (struct insn_info));
335 memset (df->insns + df->insn_size, 0,
336 (size - df->insn_size) * sizeof (struct insn_info));
338 df->insn_size = size;
340 if (! df->insns_modified)
342 df->insns_modified = BITMAP_XMALLOC ();
343 bitmap_zero (df->insns_modified);
348 /* Increase the reg info table by SIZE more elements. */
349 static void
350 df_reg_table_realloc (df, size)
351 struct df *df;
352 int size;
354 /* Make table 25 percent larger by default. */
355 if (! size)
356 size = df->reg_size / 4;
358 size += df->reg_size;
360 df->regs = (struct reg_info *)
361 xrealloc (df->regs, size * sizeof (struct reg_info));
363 /* Zero the new entries. */
364 memset (df->regs + df->reg_size, 0,
365 (size - df->reg_size) * sizeof (struct reg_info));
367 df->reg_size = size;
371 #if 0
372 /* Not currently used. */
373 static void
374 df_def_table_realloc (df, size)
375 struct df *df;
376 int size;
378 int i;
379 struct ref *refs;
381 /* Make table 25 percent larger by default. */
382 if (! size)
383 size = df->def_size / 4;
385 df->def_size += size;
386 df->defs = xrealloc (df->defs,
387 df->def_size * sizeof (*df->defs));
389 /* Allocate a new block of memory and link into list of blocks
390 that will need to be freed later. */
392 refs = xmalloc (size * sizeof (*refs));
394 /* Link all the new refs together, overloading the chain field. */
395 for (i = 0; i < size - 1; i++)
396 refs[i].chain = (struct df_link *)(refs + i + 1);
397 refs[size - 1].chain = 0;
399 #endif
403 /* Allocate bitmaps for each basic block. */
404 static void
405 df_bitmaps_alloc (df, flags)
406 struct df *df;
407 int flags;
409 unsigned int i;
410 int dflags = 0;
412 /* Free the bitmaps if they need resizing. */
413 if ((flags & DF_LR) && df->n_regs < (unsigned int)max_reg_num ())
414 dflags |= DF_LR | DF_RU;
415 if ((flags & DF_RU) && df->n_uses < df->use_id)
416 dflags |= DF_RU;
417 if ((flags & DF_RD) && df->n_defs < df->def_id)
418 dflags |= DF_RD;
420 if (dflags)
421 df_bitmaps_free (df, dflags);
423 df->n_defs = df->def_id;
424 df->n_uses = df->use_id;
426 for (i = 0; i < df->n_bbs; i++)
428 basic_block bb = BASIC_BLOCK (i);
429 struct bb_info *bb_info = DF_BB_INFO (df, bb);
431 if (flags & DF_RD && ! bb_info->rd_in)
433 /* Allocate bitmaps for reaching definitions. */
434 bb_info->rd_kill = BITMAP_XMALLOC ();
435 bitmap_zero (bb_info->rd_kill);
436 bb_info->rd_gen = BITMAP_XMALLOC ();
437 bitmap_zero (bb_info->rd_gen);
438 bb_info->rd_in = BITMAP_XMALLOC ();
439 bb_info->rd_out = BITMAP_XMALLOC ();
440 bb_info->rd_valid = 0;
443 if (flags & DF_RU && ! bb_info->ru_in)
445 /* Allocate bitmaps for upward exposed uses. */
446 bb_info->ru_kill = BITMAP_XMALLOC ();
447 bitmap_zero (bb_info->ru_kill);
448 /* Note the lack of symmetry. */
449 bb_info->ru_gen = BITMAP_XMALLOC ();
450 bitmap_zero (bb_info->ru_gen);
451 bb_info->ru_in = BITMAP_XMALLOC ();
452 bb_info->ru_out = BITMAP_XMALLOC ();
453 bb_info->ru_valid = 0;
456 if (flags & DF_LR && ! bb_info->lr_in)
458 /* Allocate bitmaps for live variables. */
459 bb_info->lr_def = BITMAP_XMALLOC ();
460 bitmap_zero (bb_info->lr_def);
461 bb_info->lr_use = BITMAP_XMALLOC ();
462 bitmap_zero (bb_info->lr_use);
463 bb_info->lr_in = BITMAP_XMALLOC ();
464 bb_info->lr_out = BITMAP_XMALLOC ();
465 bb_info->lr_valid = 0;
471 /* Free bitmaps for each basic block. */
472 static void
473 df_bitmaps_free (df, flags)
474 struct df *df ATTRIBUTE_UNUSED;
475 int flags;
477 unsigned int i;
479 for (i = 0; i < df->n_bbs; i++)
481 basic_block bb = BASIC_BLOCK (i);
482 struct bb_info *bb_info = DF_BB_INFO (df, bb);
484 if (!bb_info)
485 continue;
487 if ((flags & DF_RD) && bb_info->rd_in)
489 /* Free bitmaps for reaching definitions. */
490 BITMAP_XFREE (bb_info->rd_kill);
491 bb_info->rd_kill = NULL;
492 BITMAP_XFREE (bb_info->rd_gen);
493 bb_info->rd_gen = NULL;
494 BITMAP_XFREE (bb_info->rd_in);
495 bb_info->rd_in = NULL;
496 BITMAP_XFREE (bb_info->rd_out);
497 bb_info->rd_out = NULL;
500 if ((flags & DF_RU) && bb_info->ru_in)
502 /* Free bitmaps for upward exposed uses. */
503 BITMAP_XFREE (bb_info->ru_kill);
504 bb_info->ru_kill = NULL;
505 BITMAP_XFREE (bb_info->ru_gen);
506 bb_info->ru_gen = NULL;
507 BITMAP_XFREE (bb_info->ru_in);
508 bb_info->ru_in = NULL;
509 BITMAP_XFREE (bb_info->ru_out);
510 bb_info->ru_out = NULL;
513 if ((flags & DF_LR) && bb_info->lr_in)
515 /* Free bitmaps for live variables. */
516 BITMAP_XFREE (bb_info->lr_def);
517 bb_info->lr_def = NULL;
518 BITMAP_XFREE (bb_info->lr_use);
519 bb_info->lr_use = NULL;
520 BITMAP_XFREE (bb_info->lr_in);
521 bb_info->lr_in = NULL;
522 BITMAP_XFREE (bb_info->lr_out);
523 bb_info->lr_out = NULL;
526 df->flags &= ~(flags & (DF_RD | DF_RU | DF_LR));
530 /* Allocate and initialise dataflow memory. */
531 static void
532 df_alloc (df, n_regs)
533 struct df *df;
534 int n_regs;
536 int n_insns;
537 int i;
539 gcc_obstack_init (&df_ref_obstack);
541 /* Perhaps we should use LUIDs to save memory for the insn_refs
542 table. This is only a small saving; a few pointers. */
543 n_insns = get_max_uid () + 1;
545 df->def_id = 0;
546 df->n_defs = 0;
547 /* Approximate number of defs by number of insns. */
548 df->def_size = n_insns;
549 df->defs = xmalloc (df->def_size * sizeof (*df->defs));
551 df->use_id = 0;
552 df->n_uses = 0;
553 /* Approximate number of uses by twice number of insns. */
554 df->use_size = n_insns * 2;
555 df->uses = xmalloc (df->use_size * sizeof (*df->uses));
557 df->n_regs = n_regs;
558 df->n_bbs = n_basic_blocks;
560 /* Allocate temporary working array used during local dataflow analysis. */
561 df->reg_def_last = xmalloc (df->n_regs * sizeof (struct ref *));
563 df_insn_table_realloc (df, n_insns);
565 df_reg_table_realloc (df, df->n_regs);
567 df->bbs_modified = BITMAP_XMALLOC ();
568 bitmap_zero (df->bbs_modified);
570 df->flags = 0;
572 df->bbs = xcalloc (df->n_bbs, sizeof (struct bb_info));
574 df->all_blocks = BITMAP_XMALLOC ();
575 for (i = 0; i < n_basic_blocks; i++)
576 bitmap_set_bit (df->all_blocks, i);
580 /* Free all the dataflow info. */
581 static void
582 df_free (df)
583 struct df *df;
585 df_bitmaps_free (df, DF_ALL);
587 if (df->bbs)
588 free (df->bbs);
589 df->bbs = 0;
591 if (df->insns)
592 free (df->insns);
593 df->insns = 0;
594 df->insn_size = 0;
596 if (df->defs)
597 free (df->defs);
598 df->defs = 0;
599 df->def_size = 0;
600 df->def_id = 0;
602 if (df->uses)
603 free (df->uses);
604 df->uses = 0;
605 df->use_size = 0;
606 df->use_id = 0;
608 if (df->regs)
609 free (df->regs);
610 df->regs = 0;
611 df->reg_size = 0;
613 if (df->bbs_modified)
614 BITMAP_XFREE (df->bbs_modified);
615 df->bbs_modified = 0;
617 if (df->insns_modified)
618 BITMAP_XFREE (df->insns_modified);
619 df->insns_modified = 0;
621 BITMAP_XFREE (df->all_blocks);
622 df->all_blocks = 0;
624 obstack_free (&df_ref_obstack, NULL);
627 /* Local miscellaneous routines. */
629 /* Return a USE for register REGNO. */
630 static rtx df_reg_use_gen (regno)
631 unsigned int regno;
633 rtx reg;
634 rtx use;
636 reg = regno >= FIRST_PSEUDO_REGISTER
637 ? regno_reg_rtx[regno] : gen_rtx_REG (reg_raw_mode[regno], regno);
639 use = gen_rtx_USE (GET_MODE (reg), reg);
640 return use;
644 /* Return a CLOBBER for register REGNO. */
645 static rtx df_reg_clobber_gen (regno)
646 unsigned int regno;
648 rtx reg;
649 rtx use;
651 reg = regno >= FIRST_PSEUDO_REGISTER
652 ? regno_reg_rtx[regno] : gen_rtx_REG (reg_raw_mode[regno], regno);
654 use = gen_rtx_CLOBBER (GET_MODE (reg), reg);
655 return use;
658 /* Local chain manipulation routines. */
660 /* Create a link in a def-use or use-def chain. */
661 static inline struct df_link *
662 df_link_create (ref, next)
663 struct ref *ref;
664 struct df_link *next;
666 struct df_link *link;
668 link = (struct df_link *) obstack_alloc (&df_ref_obstack,
669 sizeof (*link));
670 link->next = next;
671 link->ref = ref;
672 return link;
676 /* Add REF to chain head pointed to by PHEAD. */
677 static struct df_link *
678 df_ref_unlink (phead, ref)
679 struct df_link **phead;
680 struct ref *ref;
682 struct df_link *link = *phead;
684 if (link)
686 if (! link->next)
688 /* Only a single ref. It must be the one we want.
689 If not, the def-use and use-def chains are likely to
690 be inconsistent. */
691 if (link->ref != ref)
692 abort ();
693 /* Now have an empty chain. */
694 *phead = NULL;
696 else
698 /* Multiple refs. One of them must be us. */
699 if (link->ref == ref)
700 *phead = link->next;
701 else
703 /* Follow chain. */
704 for (; link->next; link = link->next)
706 if (link->next->ref == ref)
708 /* Unlink from list. */
709 link->next = link->next->next;
710 return link->next;
716 return link;
720 /* Unlink REF from all def-use/use-def chains, etc. */
722 df_ref_remove (df, ref)
723 struct df *df;
724 struct ref *ref;
726 if (DF_REF_REG_DEF_P (ref))
728 df_def_unlink (df, ref);
729 df_ref_unlink (&df->insns[DF_REF_INSN_UID (ref)].defs, ref);
731 else
733 df_use_unlink (df, ref);
734 df_ref_unlink (&df->insns[DF_REF_INSN_UID (ref)].uses, ref);
736 return 1;
740 /* Unlink DEF from use-def and reg-def chains. */
741 static void
742 df_def_unlink (df, def)
743 struct df *df ATTRIBUTE_UNUSED;
744 struct ref *def;
746 struct df_link *du_link;
747 unsigned int dregno = DF_REF_REGNO (def);
749 /* Follow def-use chain to find all the uses of this def. */
750 for (du_link = DF_REF_CHAIN (def); du_link; du_link = du_link->next)
752 struct ref *use = du_link->ref;
754 /* Unlink this def from the use-def chain. */
755 df_ref_unlink (&DF_REF_CHAIN (use), def);
757 DF_REF_CHAIN (def) = 0;
759 /* Unlink def from reg-def chain. */
760 df_ref_unlink (&df->regs[dregno].defs, def);
762 df->defs[DF_REF_ID (def)] = 0;
766 /* Unlink use from def-use and reg-use chains. */
767 static void
768 df_use_unlink (df, use)
769 struct df *df ATTRIBUTE_UNUSED;
770 struct ref *use;
772 struct df_link *ud_link;
773 unsigned int uregno = DF_REF_REGNO (use);
775 /* Follow use-def chain to find all the defs of this use. */
776 for (ud_link = DF_REF_CHAIN (use); ud_link; ud_link = ud_link->next)
778 struct ref *def = ud_link->ref;
780 /* Unlink this use from the def-use chain. */
781 df_ref_unlink (&DF_REF_CHAIN (def), use);
783 DF_REF_CHAIN (use) = 0;
785 /* Unlink use from reg-use chain. */
786 df_ref_unlink (&df->regs[uregno].uses, use);
788 df->uses[DF_REF_ID (use)] = 0;
791 /* Local routines for recording refs. */
794 /* Create a new ref of type DF_REF_TYPE for register REG at address
795 LOC within INSN of BB. */
796 static struct ref *
797 df_ref_create (df, reg, loc, bb, insn, ref_type, ref_flags)
798 struct df *df;
799 rtx reg;
800 rtx *loc;
801 basic_block bb;
802 rtx insn;
803 enum df_ref_type ref_type;
804 enum df_ref_flags ref_flags;
806 struct ref *this_ref;
807 unsigned int uid;
809 this_ref = (struct ref *) obstack_alloc (&df_ref_obstack,
810 sizeof (*this_ref));
811 DF_REF_REG (this_ref) = reg;
812 DF_REF_LOC (this_ref) = loc;
813 DF_REF_BB (this_ref) = bb;
814 DF_REF_INSN (this_ref) = insn;
815 DF_REF_CHAIN (this_ref) = 0;
816 DF_REF_TYPE (this_ref) = ref_type;
817 DF_REF_FLAGS (this_ref) = ref_flags;
818 uid = INSN_UID (insn);
820 if (ref_type == DF_REF_REG_DEF)
822 if (df->def_id >= df->def_size)
824 /* Make table 25 percent larger. */
825 df->def_size += (df->def_size / 4);
826 df->defs = xrealloc (df->defs,
827 df->def_size * sizeof (*df->defs));
829 DF_REF_ID (this_ref) = df->def_id;
830 df->defs[df->def_id++] = this_ref;
832 else
834 if (df->use_id >= df->use_size)
836 /* Make table 25 percent larger. */
837 df->use_size += (df->use_size / 4);
838 df->uses = xrealloc (df->uses,
839 df->use_size * sizeof (*df->uses));
841 DF_REF_ID (this_ref) = df->use_id;
842 df->uses[df->use_id++] = this_ref;
844 return this_ref;
848 /* Create a new reference of type DF_REF_TYPE for a single register REG,
849 used inside the LOC rtx of INSN. */
850 static void
851 df_ref_record_1 (df, reg, loc, bb, insn, ref_type, ref_flags)
852 struct df *df;
853 rtx reg;
854 rtx *loc;
855 basic_block bb;
856 rtx insn;
857 enum df_ref_type ref_type;
858 enum df_ref_flags ref_flags;
860 df_ref_create (df, reg, loc, bb, insn, ref_type, ref_flags);
864 /* Create new references of type DF_REF_TYPE for each part of register REG
865 at address LOC within INSN of BB. */
866 static void
867 df_ref_record (df, reg, loc, bb, insn, ref_type, ref_flags)
868 struct df *df;
869 rtx reg;
870 rtx *loc;
871 basic_block bb;
872 rtx insn;
873 enum df_ref_type ref_type;
874 enum df_ref_flags ref_flags;
876 unsigned int regno;
878 if (GET_CODE (reg) != REG && GET_CODE (reg) != SUBREG)
879 abort ();
881 /* For the reg allocator we are interested in some SUBREG rtx's, but not
882 all. Notably only those representing a word extraction from a multi-word
883 reg. As written in the docu those should have the form
884 (subreg:SI (reg:M A) N), with size(SImode) > size(Mmode).
885 XXX Is that true? We could also use the global word_mode variable. */
886 if (GET_CODE (reg) == SUBREG
887 && (GET_MODE_SIZE (GET_MODE (reg)) < GET_MODE_SIZE (word_mode)
888 || GET_MODE_SIZE (GET_MODE (reg))
889 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (reg)))))
891 loc = &SUBREG_REG (reg);
892 reg = *loc;
895 regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
896 if (regno < FIRST_PSEUDO_REGISTER)
898 int i;
899 int endregno;
901 if (! (df->flags & DF_HARD_REGS))
902 return;
904 /* GET_MODE (reg) is correct here. We don't want to go into a SUBREG
905 for the mode, because we only want to add references to regs, which
906 are really referenced. E.g. a (subreg:SI (reg:DI 0) 0) does _not_
907 reference the whole reg 0 in DI mode (which would also include
908 reg 1, at least, if 0 and 1 are SImode registers). */
909 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg));
911 for (i = regno; i < endregno; i++)
912 df_ref_record_1 (df, gen_rtx_REG (reg_raw_mode[i], i),
913 loc, bb, insn, ref_type, ref_flags);
915 else
917 df_ref_record_1 (df, reg, loc, bb, insn, ref_type, ref_flags);
921 /* Writes to SUBREG of inndermode wider than word and outermode shorter than
922 word are read-modify-write. */
924 static inline bool
925 read_modify_subreg_p (x)
926 rtx x;
928 if (GET_CODE (x) != SUBREG)
929 return false;
930 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) <= UNITS_PER_WORD)
931 return false;
932 if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
933 return false;
934 return true;
937 /* Process all the registers defined in the rtx, X. */
938 static void
939 df_def_record_1 (df, x, bb, insn)
940 struct df *df;
941 rtx x;
942 basic_block bb;
943 rtx insn;
945 rtx *loc = &SET_DEST (x);
946 rtx dst = *loc;
947 enum df_ref_flags flags = 0;
949 /* Some targets place small structures in registers for
950 return values of functions. */
951 if (GET_CODE (dst) == PARALLEL && GET_MODE (dst) == BLKmode)
953 int i;
955 for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
956 df_def_record_1 (df, XVECEXP (dst, 0, i), bb, insn);
957 return;
960 /* May be, we should flag the use of strict_low_part somehow. Might be
961 handy for the reg allocator. */
962 while (GET_CODE (dst) == STRICT_LOW_PART
963 || GET_CODE (dst) == ZERO_EXTRACT
964 || GET_CODE (dst) == SIGN_EXTRACT
965 || read_modify_subreg_p (dst))
967 /* Strict low part always contains SUBREG, but we don't want to make
968 it appear outside, as whole register is always considered. */
969 if (GET_CODE (dst) == STRICT_LOW_PART)
971 loc = &XEXP (dst, 0);
972 dst = *loc;
974 loc = &XEXP (dst, 0);
975 dst = *loc;
976 flags |= DF_REF_READ_WRITE;
979 if (GET_CODE (dst) == REG
980 || (GET_CODE (dst) == SUBREG && GET_CODE (SUBREG_REG (dst)) == REG))
981 df_ref_record (df, dst, loc, bb, insn, DF_REF_REG_DEF, flags);
985 /* Process all the registers defined in the pattern rtx, X. */
986 static void
987 df_defs_record (df, x, bb, insn)
988 struct df *df;
989 rtx x;
990 basic_block bb;
991 rtx insn;
993 RTX_CODE code = GET_CODE (x);
995 if (code == SET || code == CLOBBER)
997 /* Mark the single def within the pattern. */
998 df_def_record_1 (df, x, bb, insn);
1000 else if (code == PARALLEL)
1002 int i;
1004 /* Mark the multiple defs within the pattern. */
1005 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1007 code = GET_CODE (XVECEXP (x, 0, i));
1008 if (code == SET || code == CLOBBER)
1009 df_def_record_1 (df, XVECEXP (x, 0, i), bb, insn);
1015 /* Process all the registers used in the rtx at address LOC. */
1016 static void
1017 df_uses_record (df, loc, ref_type, bb, insn, flags)
1018 struct df *df;
1019 rtx *loc;
1020 enum df_ref_type ref_type;
1021 basic_block bb;
1022 rtx insn;
1023 enum df_ref_flags flags;
1025 RTX_CODE code;
1026 rtx x;
1027 retry:
1028 x = *loc;
1029 if (!x)
1030 return;
1031 code = GET_CODE (x);
1032 switch (code)
1034 case LABEL_REF:
1035 case SYMBOL_REF:
1036 case CONST_INT:
1037 case CONST:
1038 case CONST_DOUBLE:
1039 case PC:
1040 case ADDR_VEC:
1041 case ADDR_DIFF_VEC:
1042 return;
1044 case CLOBBER:
1045 /* If we are clobbering a MEM, mark any registers inside the address
1046 as being used. */
1047 if (GET_CODE (XEXP (x, 0)) == MEM)
1048 df_uses_record (df, &XEXP (XEXP (x, 0), 0),
1049 DF_REF_REG_MEM_STORE, bb, insn, flags);
1051 /* If we're clobbering a REG then we have a def so ignore. */
1052 return;
1054 case MEM:
1055 df_uses_record (df, &XEXP (x, 0), DF_REF_REG_MEM_LOAD, bb, insn, flags);
1056 return;
1058 case SUBREG:
1059 /* While we're here, optimize this case. */
1061 /* In case the SUBREG is not of a register, don't optimize. */
1062 if (GET_CODE (SUBREG_REG (x)) != REG)
1064 loc = &SUBREG_REG (x);
1065 df_uses_record (df, loc, ref_type, bb, insn, flags);
1066 return;
1069 /* ... Fall through ... */
1071 case REG:
1072 /* See a register (or subreg) other than being set. */
1073 df_ref_record (df, x, loc, bb, insn, ref_type, flags);
1074 return;
1076 case SET:
1078 rtx dst = SET_DEST (x);
1080 df_uses_record (df, &SET_SRC (x), DF_REF_REG_USE, bb, insn, 0);
1082 switch (GET_CODE (dst))
1084 case SUBREG:
1085 if (read_modify_subreg_p (dst))
1087 df_uses_record (df, &SUBREG_REG (dst), DF_REF_REG_USE, bb,
1088 insn, DF_REF_READ_WRITE);
1089 break;
1091 /* ... FALLTHRU ... */
1092 case REG:
1093 case PC:
1094 break;
1095 case MEM:
1096 df_uses_record (df, &XEXP (dst, 0),
1097 DF_REF_REG_MEM_STORE,
1098 bb, insn, 0);
1099 break;
1100 case STRICT_LOW_PART:
1101 /* A strict_low_part uses the whole reg not only the subreg. */
1102 dst = XEXP (dst, 0);
1103 if (GET_CODE (dst) != SUBREG)
1104 abort ();
1105 df_uses_record (df, &SUBREG_REG (dst), DF_REF_REG_USE, bb,
1106 insn, DF_REF_READ_WRITE);
1107 break;
1108 case ZERO_EXTRACT:
1109 case SIGN_EXTRACT:
1110 df_uses_record (df, &XEXP (dst, 0), DF_REF_REG_USE, bb, insn,
1111 DF_REF_READ_WRITE);
1112 df_uses_record (df, &XEXP (dst, 1), DF_REF_REG_USE, bb, insn, 0);
1113 df_uses_record (df, &XEXP (dst, 2), DF_REF_REG_USE, bb, insn, 0);
1114 dst = XEXP (dst, 0);
1115 break;
1116 default:
1117 abort ();
1119 return;
1122 case RETURN:
1123 break;
1125 case ASM_OPERANDS:
1126 case UNSPEC_VOLATILE:
1127 case TRAP_IF:
1128 case ASM_INPUT:
1130 /* Traditional and volatile asm instructions must be considered to use
1131 and clobber all hard registers, all pseudo-registers and all of
1132 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
1134 Consider for instance a volatile asm that changes the fpu rounding
1135 mode. An insn should not be moved across this even if it only uses
1136 pseudo-regs because it might give an incorrectly rounded result.
1138 For now, just mark any regs we can find in ASM_OPERANDS as
1139 used. */
1141 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
1142 We can not just fall through here since then we would be confused
1143 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
1144 traditional asms unlike their normal usage. */
1145 if (code == ASM_OPERANDS)
1147 int j;
1149 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
1150 df_uses_record (df, &ASM_OPERANDS_INPUT (x, j),
1151 DF_REF_REG_USE, bb, insn, 0);
1152 return;
1154 break;
1157 case PRE_DEC:
1158 case POST_DEC:
1159 case PRE_INC:
1160 case POST_INC:
1161 case PRE_MODIFY:
1162 case POST_MODIFY:
1163 /* Catch the def of the register being modified. */
1164 df_ref_record (df, XEXP (x, 0), &XEXP (x, 0), bb, insn, DF_REF_REG_DEF, DF_REF_READ_WRITE);
1166 /* ... Fall through to handle uses ... */
1168 default:
1169 break;
1172 /* Recursively scan the operands of this expression. */
1174 const char *fmt = GET_RTX_FORMAT (code);
1175 int i;
1177 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1179 if (fmt[i] == 'e')
1181 /* Tail recursive case: save a function call level. */
1182 if (i == 0)
1184 loc = &XEXP (x, 0);
1185 goto retry;
1187 df_uses_record (df, &XEXP (x, i), ref_type, bb, insn, flags);
1189 else if (fmt[i] == 'E')
1191 int j;
1192 for (j = 0; j < XVECLEN (x, i); j++)
1193 df_uses_record (df, &XVECEXP (x, i, j), ref_type,
1194 bb, insn, flags);
1201 /* Record all the df within INSN of basic block BB. */
1202 static void
1203 df_insn_refs_record (df, bb, insn)
1204 struct df *df;
1205 basic_block bb;
1206 rtx insn;
1208 int i;
1210 if (INSN_P (insn))
1212 rtx note;
1214 /* Record register defs */
1215 df_defs_record (df, PATTERN (insn), bb, insn);
1217 if (df->flags & DF_EQUIV_NOTES)
1218 for (note = REG_NOTES (insn); note;
1219 note = XEXP (note, 1))
1221 switch (REG_NOTE_KIND (note))
1223 case REG_EQUIV:
1224 case REG_EQUAL:
1225 df_uses_record (df, &XEXP (note, 0), DF_REF_REG_USE,
1226 bb, insn, 0);
1227 default:
1228 break;
1232 if (GET_CODE (insn) == CALL_INSN)
1234 rtx note;
1235 rtx x;
1237 /* Record the registers used to pass arguments. */
1238 for (note = CALL_INSN_FUNCTION_USAGE (insn); note;
1239 note = XEXP (note, 1))
1241 if (GET_CODE (XEXP (note, 0)) == USE)
1242 df_uses_record (df, &SET_DEST (XEXP (note, 0)), DF_REF_REG_USE,
1243 bb, insn, 0);
1246 /* The stack ptr is used (honorarily) by a CALL insn. */
1247 x = df_reg_use_gen (STACK_POINTER_REGNUM);
1248 df_uses_record (df, &SET_DEST (x), DF_REF_REG_USE, bb, insn, 0);
1250 if (df->flags & DF_HARD_REGS)
1252 /* Calls may also reference any of the global registers,
1253 so they are recorded as used. */
1254 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1255 if (global_regs[i])
1257 x = df_reg_use_gen (i);
1258 df_uses_record (df, &SET_DEST (x),
1259 DF_REF_REG_USE, bb, insn, 0);
1264 /* Record the register uses. */
1265 df_uses_record (df, &PATTERN (insn),
1266 DF_REF_REG_USE, bb, insn, 0);
1269 if (GET_CODE (insn) == CALL_INSN)
1271 rtx note;
1273 if (df->flags & DF_HARD_REGS)
1275 /* Kill all registers invalidated by a call. */
1276 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1277 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1279 rtx reg_clob = df_reg_clobber_gen (i);
1280 df_defs_record (df, reg_clob, bb, insn);
1284 /* There may be extra registers to be clobbered. */
1285 for (note = CALL_INSN_FUNCTION_USAGE (insn);
1286 note;
1287 note = XEXP (note, 1))
1288 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
1289 df_defs_record (df, XEXP (note, 0), bb, insn);
1295 /* Record all the refs within the basic block BB. */
1296 static void
1297 df_bb_refs_record (df, bb)
1298 struct df *df;
1299 basic_block bb;
1301 rtx insn;
1303 /* Scan the block an insn at a time from beginning to end. */
1304 for (insn = bb->head; ; insn = NEXT_INSN (insn))
1306 if (INSN_P (insn))
1308 /* Record defs within INSN. */
1309 df_insn_refs_record (df, bb, insn);
1311 if (insn == bb->end)
1312 break;
1317 /* Record all the refs in the basic blocks specified by BLOCKS. */
1318 static void
1319 df_refs_record (df, blocks)
1320 struct df *df;
1321 bitmap blocks;
1323 basic_block bb;
1325 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1327 df_bb_refs_record (df, bb);
1331 /* Dataflow analysis routines. */
1334 /* Create reg-def chains for basic block BB. These are a list of
1335 definitions for each register. */
1336 static void
1337 df_bb_reg_def_chain_create (df, bb)
1338 struct df *df;
1339 basic_block bb;
1341 rtx insn;
1343 /* Perhaps the defs should be sorted using a depth first search
1344 of the CFG (or possibly a breadth first search). We currently
1345 scan the basic blocks in reverse order so that the first defs
1346 appear at the start of the chain. */
1348 for (insn = bb->end; insn && insn != PREV_INSN (bb->head);
1349 insn = PREV_INSN (insn))
1351 struct df_link *link;
1352 unsigned int uid = INSN_UID (insn);
1354 if (! INSN_P (insn))
1355 continue;
1357 for (link = df->insns[uid].defs; link; link = link->next)
1359 struct ref *def = link->ref;
1360 unsigned int dregno = DF_REF_REGNO (def);
1362 df->regs[dregno].defs
1363 = df_link_create (def, df->regs[dregno].defs);
1369 /* Create reg-def chains for each basic block within BLOCKS. These
1370 are a list of definitions for each register. */
1371 static void
1372 df_reg_def_chain_create (df, blocks)
1373 struct df *df;
1374 bitmap blocks;
1376 basic_block bb;
1378 FOR_EACH_BB_IN_BITMAP/*_REV*/ (blocks, 0, bb,
1380 df_bb_reg_def_chain_create (df, bb);
1385 /* Create reg-use chains for basic block BB. These are a list of uses
1386 for each register. */
1387 static void
1388 df_bb_reg_use_chain_create (df, bb)
1389 struct df *df;
1390 basic_block bb;
1392 rtx insn;
1394 /* Scan in forward order so that the last uses appear at the
1395 start of the chain. */
1397 for (insn = bb->head; insn && insn != NEXT_INSN (bb->end);
1398 insn = NEXT_INSN (insn))
1400 struct df_link *link;
1401 unsigned int uid = INSN_UID (insn);
1403 if (! INSN_P (insn))
1404 continue;
1406 for (link = df->insns[uid].uses; link; link = link->next)
1408 struct ref *use = link->ref;
1409 unsigned int uregno = DF_REF_REGNO (use);
1411 df->regs[uregno].uses
1412 = df_link_create (use, df->regs[uregno].uses);
1418 /* Create reg-use chains for each basic block within BLOCKS. These
1419 are a list of uses for each register. */
1420 static void
1421 df_reg_use_chain_create (df, blocks)
1422 struct df *df;
1423 bitmap blocks;
1425 basic_block bb;
1427 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1429 df_bb_reg_use_chain_create (df, bb);
1434 /* Create def-use chains from reaching use bitmaps for basic block BB. */
1435 static void
1436 df_bb_du_chain_create (df, bb, ru)
1437 struct df *df;
1438 basic_block bb;
1439 bitmap ru;
1441 struct bb_info *bb_info = DF_BB_INFO (df, bb);
1442 rtx insn;
1444 bitmap_copy (ru, bb_info->ru_out);
1446 /* For each def in BB create a linked list (chain) of uses
1447 reached from the def. */
1448 for (insn = bb->end; insn && insn != PREV_INSN (bb->head);
1449 insn = PREV_INSN (insn))
1451 struct df_link *def_link;
1452 struct df_link *use_link;
1453 unsigned int uid = INSN_UID (insn);
1455 if (! INSN_P (insn))
1456 continue;
1458 /* For each def in insn... */
1459 for (def_link = df->insns[uid].defs; def_link; def_link = def_link->next)
1461 struct ref *def = def_link->ref;
1462 unsigned int dregno = DF_REF_REGNO (def);
1464 DF_REF_CHAIN (def) = 0;
1466 /* While the reg-use chains are not essential, it
1467 is _much_ faster to search these short lists rather
1468 than all the reaching uses, especially for large functions. */
1469 for (use_link = df->regs[dregno].uses; use_link;
1470 use_link = use_link->next)
1472 struct ref *use = use_link->ref;
1474 if (bitmap_bit_p (ru, DF_REF_ID (use)))
1476 DF_REF_CHAIN (def)
1477 = df_link_create (use, DF_REF_CHAIN (def));
1479 bitmap_clear_bit (ru, DF_REF_ID (use));
1484 /* For each use in insn... */
1485 for (use_link = df->insns[uid].uses; use_link; use_link = use_link->next)
1487 struct ref *use = use_link->ref;
1488 bitmap_set_bit (ru, DF_REF_ID (use));
1494 /* Create def-use chains from reaching use bitmaps for basic blocks
1495 in BLOCKS. */
1496 static void
1497 df_du_chain_create (df, blocks)
1498 struct df *df;
1499 bitmap blocks;
1501 bitmap ru;
1502 basic_block bb;
1504 ru = BITMAP_XMALLOC ();
1506 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1508 df_bb_du_chain_create (df, bb, ru);
1511 BITMAP_XFREE (ru);
1515 /* Create use-def chains from reaching def bitmaps for basic block BB. */
1516 static void
1517 df_bb_ud_chain_create (df, bb)
1518 struct df *df;
1519 basic_block bb;
1521 struct bb_info *bb_info = DF_BB_INFO (df, bb);
1522 struct ref **reg_def_last = df->reg_def_last;
1523 rtx insn;
1525 memset (reg_def_last, 0, df->n_regs * sizeof (struct ref *));
1527 /* For each use in BB create a linked list (chain) of defs
1528 that reach the use. */
1529 for (insn = bb->head; insn && insn != NEXT_INSN (bb->end);
1530 insn = NEXT_INSN (insn))
1532 unsigned int uid = INSN_UID (insn);
1533 struct df_link *use_link;
1534 struct df_link *def_link;
1536 if (! INSN_P (insn))
1537 continue;
1539 /* For each use in insn... */
1540 for (use_link = df->insns[uid].uses; use_link; use_link = use_link->next)
1542 struct ref *use = use_link->ref;
1543 unsigned int regno = DF_REF_REGNO (use);
1545 DF_REF_CHAIN (use) = 0;
1547 /* Has regno been defined in this BB yet? If so, use
1548 the last def as the single entry for the use-def
1549 chain for this use. Otherwise, we need to add all
1550 the defs using this regno that reach the start of
1551 this BB. */
1552 if (reg_def_last[regno])
1554 DF_REF_CHAIN (use)
1555 = df_link_create (reg_def_last[regno], 0);
1557 else
1559 /* While the reg-def chains are not essential, it is
1560 _much_ faster to search these short lists rather than
1561 all the reaching defs, especially for large
1562 functions. */
1563 for (def_link = df->regs[regno].defs; def_link;
1564 def_link = def_link->next)
1566 struct ref *def = def_link->ref;
1568 if (bitmap_bit_p (bb_info->rd_in, DF_REF_ID (def)))
1570 DF_REF_CHAIN (use)
1571 = df_link_create (def, DF_REF_CHAIN (use));
1578 /* For each def in insn...record the last def of each reg. */
1579 for (def_link = df->insns[uid].defs; def_link; def_link = def_link->next)
1581 struct ref *def = def_link->ref;
1582 int dregno = DF_REF_REGNO (def);
1584 reg_def_last[dregno] = def;
1590 /* Create use-def chains from reaching def bitmaps for basic blocks
1591 within BLOCKS. */
1592 static void
1593 df_ud_chain_create (df, blocks)
1594 struct df *df;
1595 bitmap blocks;
1597 basic_block bb;
1599 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1601 df_bb_ud_chain_create (df, bb);
1607 static void
1608 df_rd_transfer_function (bb, changed, in, out, gen, kill, data)
1609 int bb ATTRIBUTE_UNUSED;
1610 int *changed;
1611 bitmap in, out, gen, kill;
1612 void *data ATTRIBUTE_UNUSED;
1614 *changed = bitmap_union_of_diff (out, gen, in, kill);
1616 static void
1617 df_ru_transfer_function (bb, changed, in, out, gen, kill, data)
1618 int bb ATTRIBUTE_UNUSED;
1619 int *changed;
1620 bitmap in, out, gen, kill;
1621 void *data ATTRIBUTE_UNUSED;
1623 *changed = bitmap_union_of_diff (in, gen, out, kill);
1626 static void
1627 df_lr_transfer_function (bb, changed, in, out, use, def, data)
1628 int bb ATTRIBUTE_UNUSED;
1629 int *changed;
1630 bitmap in, out, use, def;
1631 void *data ATTRIBUTE_UNUSED;
1633 *changed = bitmap_union_of_diff (in, use, out, def);
1637 /* Compute local reaching def info for basic block BB. */
1638 static void
1639 df_bb_rd_local_compute (df, bb)
1640 struct df *df;
1641 basic_block bb;
1643 struct bb_info *bb_info = DF_BB_INFO (df, bb);
1644 rtx insn;
1646 for (insn = bb->head; insn && insn != NEXT_INSN (bb->end);
1647 insn = NEXT_INSN (insn))
1649 unsigned int uid = INSN_UID (insn);
1650 struct df_link *def_link;
1652 if (! INSN_P (insn))
1653 continue;
1655 for (def_link = df->insns[uid].defs; def_link; def_link = def_link->next)
1657 struct ref *def = def_link->ref;
1658 unsigned int regno = DF_REF_REGNO (def);
1659 struct df_link *def2_link;
1661 for (def2_link = df->regs[regno].defs; def2_link;
1662 def2_link = def2_link->next)
1664 struct ref *def2 = def2_link->ref;
1666 /* Add all defs of this reg to the set of kills. This
1667 is greedy since many of these defs will not actually
1668 be killed by this BB but it keeps things a lot
1669 simpler. */
1670 bitmap_set_bit (bb_info->rd_kill, DF_REF_ID (def2));
1672 /* Zap from the set of gens for this BB. */
1673 bitmap_clear_bit (bb_info->rd_gen, DF_REF_ID (def2));
1676 bitmap_set_bit (bb_info->rd_gen, DF_REF_ID (def));
1680 bb_info->rd_valid = 1;
1684 /* Compute local reaching def info for each basic block within BLOCKS. */
1685 static void
1686 df_rd_local_compute (df, blocks)
1687 struct df *df;
1688 bitmap blocks;
1690 basic_block bb;
1692 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1694 df_bb_rd_local_compute (df, bb);
1699 /* Compute local reaching use (upward exposed use) info for basic
1700 block BB. */
1701 static void
1702 df_bb_ru_local_compute (df, bb)
1703 struct df *df;
1704 basic_block bb;
1706 /* This is much more tricky than computing reaching defs. With
1707 reaching defs, defs get killed by other defs. With upwards
1708 exposed uses, these get killed by defs with the same regno. */
1710 struct bb_info *bb_info = DF_BB_INFO (df, bb);
1711 rtx insn;
1714 for (insn = bb->end; insn && insn != PREV_INSN (bb->head);
1715 insn = PREV_INSN (insn))
1717 unsigned int uid = INSN_UID (insn);
1718 struct df_link *def_link;
1719 struct df_link *use_link;
1721 if (! INSN_P (insn))
1722 continue;
1724 for (def_link = df->insns[uid].defs; def_link; def_link = def_link->next)
1726 struct ref *def = def_link->ref;
1727 unsigned int dregno = DF_REF_REGNO (def);
1729 for (use_link = df->regs[dregno].uses; use_link;
1730 use_link = use_link->next)
1732 struct ref *use = use_link->ref;
1734 /* Add all uses of this reg to the set of kills. This
1735 is greedy since many of these uses will not actually
1736 be killed by this BB but it keeps things a lot
1737 simpler. */
1738 bitmap_set_bit (bb_info->ru_kill, DF_REF_ID (use));
1740 /* Zap from the set of gens for this BB. */
1741 bitmap_clear_bit (bb_info->ru_gen, DF_REF_ID (use));
1745 for (use_link = df->insns[uid].uses; use_link; use_link = use_link->next)
1747 struct ref *use = use_link->ref;
1748 /* Add use to set of gens in this BB. */
1749 bitmap_set_bit (bb_info->ru_gen, DF_REF_ID (use));
1752 bb_info->ru_valid = 1;
1756 /* Compute local reaching use (upward exposed use) info for each basic
1757 block within BLOCKS. */
1758 static void
1759 df_ru_local_compute (df, blocks)
1760 struct df *df;
1761 bitmap blocks;
1763 basic_block bb;
1765 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1767 df_bb_ru_local_compute (df, bb);
1772 /* Compute local live variable info for basic block BB. */
1773 static void
1774 df_bb_lr_local_compute (df, bb)
1775 struct df *df;
1776 basic_block bb;
1778 struct bb_info *bb_info = DF_BB_INFO (df, bb);
1779 rtx insn;
1781 for (insn = bb->end; insn && insn != PREV_INSN (bb->head);
1782 insn = PREV_INSN (insn))
1784 unsigned int uid = INSN_UID (insn);
1785 struct df_link *link;
1787 if (! INSN_P (insn))
1788 continue;
1790 for (link = df->insns[uid].defs; link; link = link->next)
1792 struct ref *def = link->ref;
1793 unsigned int dregno = DF_REF_REGNO (def);
1795 /* Add def to set of defs in this BB. */
1796 bitmap_set_bit (bb_info->lr_def, dregno);
1798 bitmap_clear_bit (bb_info->lr_use, dregno);
1801 for (link = df->insns[uid].uses; link; link = link->next)
1803 struct ref *use = link->ref;
1804 /* Add use to set of uses in this BB. */
1805 bitmap_set_bit (bb_info->lr_use, DF_REF_REGNO (use));
1808 bb_info->lr_valid = 1;
1812 /* Compute local live variable info for each basic block within BLOCKS. */
1813 static void
1814 df_lr_local_compute (df, blocks)
1815 struct df *df;
1816 bitmap blocks;
1818 basic_block bb;
1820 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1822 df_bb_lr_local_compute (df, bb);
1827 /* Compute register info: lifetime, bb, and number of defs and uses
1828 for basic block BB. */
1829 static void
1830 df_bb_reg_info_compute (df, bb, live)
1831 struct df *df;
1832 basic_block bb;
1833 bitmap live;
1835 struct reg_info *reg_info = df->regs;
1836 struct bb_info *bb_info = DF_BB_INFO (df, bb);
1837 rtx insn;
1839 bitmap_copy (live, bb_info->lr_out);
1841 for (insn = bb->end; insn && insn != PREV_INSN (bb->head);
1842 insn = PREV_INSN (insn))
1844 unsigned int uid = INSN_UID (insn);
1845 unsigned int regno;
1846 struct df_link *link;
1848 if (! INSN_P (insn))
1849 continue;
1851 for (link = df->insns[uid].defs; link; link = link->next)
1853 struct ref *def = link->ref;
1854 unsigned int dregno = DF_REF_REGNO (def);
1856 /* Kill this register. */
1857 bitmap_clear_bit (live, dregno);
1858 reg_info[dregno].n_defs++;
1861 for (link = df->insns[uid].uses; link; link = link->next)
1863 struct ref *use = link->ref;
1864 unsigned int uregno = DF_REF_REGNO (use);
1866 /* This register is now live. */
1867 bitmap_set_bit (live, uregno);
1868 reg_info[uregno].n_uses++;
1871 /* Increment lifetimes of all live registers. */
1872 EXECUTE_IF_SET_IN_BITMAP (live, 0, regno,
1874 reg_info[regno].lifetime++;
1880 /* Compute register info: lifetime, bb, and number of defs and uses. */
1881 static void
1882 df_reg_info_compute (df, blocks)
1883 struct df *df;
1884 bitmap blocks;
1886 basic_block bb;
1887 bitmap live;
1889 live = BITMAP_XMALLOC ();
1891 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1893 df_bb_reg_info_compute (df, bb, live);
1896 BITMAP_XFREE (live);
1900 /* Assign LUIDs for BB. */
1901 static int
1902 df_bb_luids_set (df, bb)
1903 struct df *df;
1904 basic_block bb;
1906 rtx insn;
1907 int luid = 0;
1909 /* The LUIDs are monotonically increasing for each basic block. */
1911 for (insn = bb->head; ; insn = NEXT_INSN (insn))
1913 if (INSN_P (insn))
1914 DF_INSN_LUID (df, insn) = luid++;
1915 DF_INSN_LUID (df, insn) = luid;
1917 if (insn == bb->end)
1918 break;
1920 return luid;
1924 /* Assign LUIDs for each basic block within BLOCKS. */
1925 static int
1926 df_luids_set (df, blocks)
1927 struct df *df;
1928 bitmap blocks;
1930 basic_block bb;
1931 int total = 0;
1933 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1935 total += df_bb_luids_set (df, bb);
1937 return total;
1940 /* Perform dataflow analysis using existing DF structure for blocks
1941 within BLOCKS. If BLOCKS is zero, use all basic blocks in the CFG. */
1942 static void
1943 df_analyse_1 (df, blocks, flags, update)
1944 struct df *df;
1945 bitmap blocks;
1946 int flags;
1947 int update;
1949 int aflags;
1950 int dflags;
1951 int i;
1952 dflags = 0;
1953 aflags = flags;
1954 if (flags & DF_UD_CHAIN)
1955 aflags |= DF_RD | DF_RD_CHAIN;
1957 if (flags & DF_DU_CHAIN)
1958 aflags |= DF_RU;
1960 if (flags & DF_RU)
1961 aflags |= DF_RU_CHAIN;
1963 if (flags & DF_REG_INFO)
1964 aflags |= DF_LR;
1966 if (! blocks)
1967 blocks = df->all_blocks;
1969 df->flags = flags;
1970 if (update)
1972 df_refs_update (df);
1973 /* More fine grained incremental dataflow analysis would be
1974 nice. For now recompute the whole shebang for the
1975 modified blocks. */
1976 #if 0
1977 df_refs_unlink (df, blocks);
1978 #endif
1979 /* All the def-use, use-def chains can be potentially
1980 modified by changes in one block. The size of the
1981 bitmaps can also change. */
1983 else
1985 /* Scan the function for all register defs and uses. */
1986 df_refs_queue (df);
1987 df_refs_record (df, blocks);
1989 /* Link all the new defs and uses to the insns. */
1990 df_refs_process (df);
1993 /* Allocate the bitmaps now the total number of defs and uses are
1994 known. If the number of defs or uses have changed, then
1995 these bitmaps need to be reallocated. */
1996 df_bitmaps_alloc (df, aflags);
1998 /* Set the LUIDs for each specified basic block. */
1999 df_luids_set (df, blocks);
2001 /* Recreate reg-def and reg-use chains from scratch so that first
2002 def is at the head of the reg-def chain and the last use is at
2003 the head of the reg-use chain. This is only important for
2004 regs local to a basic block as it speeds up searching. */
2005 if (aflags & DF_RD_CHAIN)
2007 df_reg_def_chain_create (df, blocks);
2010 if (aflags & DF_RU_CHAIN)
2012 df_reg_use_chain_create (df, blocks);
2015 df->dfs_order = xmalloc (sizeof(int) * n_basic_blocks);
2016 df->rc_order = xmalloc (sizeof(int) * n_basic_blocks);
2017 df->rts_order = xmalloc (sizeof(int) * n_basic_blocks);
2018 df->inverse_dfs_map = xmalloc (sizeof(int) * n_basic_blocks);
2019 df->inverse_rc_map = xmalloc (sizeof(int) * n_basic_blocks);
2020 df->inverse_rts_map = xmalloc (sizeof(int) * n_basic_blocks);
2022 flow_depth_first_order_compute (df->dfs_order, df->rc_order);
2023 flow_reverse_top_sort_order_compute (df->rts_order);
2024 for (i = 0; i < n_basic_blocks; i ++)
2026 df->inverse_dfs_map[df->dfs_order[i]] = i;
2027 df->inverse_rc_map[df->rc_order[i]] = i;
2028 df->inverse_rts_map[df->rts_order[i]] = i;
2030 if (aflags & DF_RD)
2032 /* Compute the sets of gens and kills for the defs of each bb. */
2033 df_rd_local_compute (df, df->flags & DF_RD ? blocks : df->all_blocks);
2035 int i;
2036 bitmap *in = xmalloc (sizeof (bitmap) * n_basic_blocks);
2037 bitmap *out = xmalloc (sizeof (bitmap) * n_basic_blocks);
2038 bitmap *gen = xmalloc (sizeof (bitmap) * n_basic_blocks);
2039 bitmap *kill = xmalloc (sizeof (bitmap) * n_basic_blocks);
2040 for (i = 0; i < n_basic_blocks; i ++)
2042 in[i] = DF_BB_INFO (df, BASIC_BLOCK (i))->rd_in;
2043 out[i] = DF_BB_INFO (df, BASIC_BLOCK (i))->rd_out;
2044 gen[i] = DF_BB_INFO (df, BASIC_BLOCK (i))->rd_gen;
2045 kill[i] = DF_BB_INFO (df, BASIC_BLOCK (i))->rd_kill;
2047 iterative_dataflow_bitmap (in, out, gen, kill, df->all_blocks,
2048 FORWARD, UNION, df_rd_transfer_function,
2049 df->inverse_rc_map, NULL);
2050 free (in);
2051 free (out);
2052 free (gen);
2053 free (kill);
2057 if (aflags & DF_UD_CHAIN)
2059 /* Create use-def chains. */
2060 df_ud_chain_create (df, df->all_blocks);
2062 if (! (flags & DF_RD))
2063 dflags |= DF_RD;
2066 if (aflags & DF_RU)
2068 /* Compute the sets of gens and kills for the upwards exposed
2069 uses in each bb. */
2070 df_ru_local_compute (df, df->flags & DF_RU ? blocks : df->all_blocks);
2072 int i;
2073 bitmap *in = xmalloc (sizeof (bitmap) * n_basic_blocks);
2074 bitmap *out = xmalloc (sizeof (bitmap) * n_basic_blocks);
2075 bitmap *gen = xmalloc (sizeof (bitmap) * n_basic_blocks);
2076 bitmap *kill = xmalloc (sizeof (bitmap) * n_basic_blocks);
2077 for (i = 0; i < n_basic_blocks; i ++)
2079 in[i] = DF_BB_INFO (df, BASIC_BLOCK (i))->ru_in;
2080 out[i] = DF_BB_INFO (df, BASIC_BLOCK (i))->ru_out;
2081 gen[i] = DF_BB_INFO (df, BASIC_BLOCK (i))->ru_gen;
2082 kill[i] = DF_BB_INFO (df, BASIC_BLOCK (i))->ru_kill;
2084 iterative_dataflow_bitmap (in, out, gen, kill, df->all_blocks,
2085 BACKWARD, UNION, df_ru_transfer_function,
2086 df->inverse_rts_map, NULL);
2087 free (in);
2088 free (out);
2089 free (gen);
2090 free (kill);
2094 if (aflags & DF_DU_CHAIN)
2096 /* Create def-use chains. */
2097 df_du_chain_create (df, df->all_blocks);
2099 if (! (flags & DF_RU))
2100 dflags |= DF_RU;
2103 /* Free up bitmaps that are no longer required. */
2104 if (dflags)
2105 df_bitmaps_free (df, dflags);
2107 if (aflags & DF_LR)
2109 /* Compute the sets of defs and uses of live variables. */
2110 df_lr_local_compute (df, df->flags & DF_LR ? blocks : df->all_blocks);
2112 int i;
2113 bitmap *in = xmalloc (sizeof (bitmap) * n_basic_blocks);
2114 bitmap *out = xmalloc (sizeof (bitmap) * n_basic_blocks);
2115 bitmap *use = xmalloc (sizeof (bitmap) * n_basic_blocks);
2116 bitmap *def = xmalloc (sizeof (bitmap) * n_basic_blocks);
2117 for (i = 0; i < n_basic_blocks; i ++)
2119 in[i] = DF_BB_INFO (df, BASIC_BLOCK (i))->lr_in;
2120 out[i] = DF_BB_INFO (df, BASIC_BLOCK (i))->lr_out;
2121 use[i] = DF_BB_INFO (df, BASIC_BLOCK (i))->lr_use;
2122 def[i] = DF_BB_INFO (df, BASIC_BLOCK (i))->lr_def;
2124 iterative_dataflow_bitmap (in, out, use, def, df->all_blocks,
2125 BACKWARD, UNION, df_lr_transfer_function,
2126 df->inverse_rts_map, NULL);
2127 free (in);
2128 free (out);
2129 free (use);
2130 free (def);
2134 if (aflags & DF_REG_INFO)
2136 df_reg_info_compute (df, df->all_blocks);
2138 free (df->dfs_order);
2139 free (df->rc_order);
2140 free (df->rts_order);
2141 free (df->inverse_rc_map);
2142 free (df->inverse_dfs_map);
2143 free (df->inverse_rts_map);
2147 /* Initialise dataflow analysis. */
2148 struct df *
2149 df_init ()
2151 struct df *df;
2153 df = xcalloc (1, sizeof (struct df));
2155 /* Squirrel away a global for debugging. */
2156 ddf = df;
2158 return df;
2162 /* Start queuing refs. */
2163 static int
2164 df_refs_queue (df)
2165 struct df *df;
2167 df->def_id_save = df->def_id;
2168 df->use_id_save = df->use_id;
2169 /* ???? Perhaps we should save current obstack state so that we can
2170 unwind it. */
2171 return 0;
2175 /* Process queued refs. */
2176 static int
2177 df_refs_process (df)
2178 struct df *df;
2180 unsigned int i;
2182 /* Build new insn-def chains. */
2183 for (i = df->def_id_save; i != df->def_id; i++)
2185 struct ref *def = df->defs[i];
2186 unsigned int uid = DF_REF_INSN_UID (def);
2188 /* Add def to head of def list for INSN. */
2189 df->insns[uid].defs
2190 = df_link_create (def, df->insns[uid].defs);
2193 /* Build new insn-use chains. */
2194 for (i = df->use_id_save; i != df->use_id; i++)
2196 struct ref *use = df->uses[i];
2197 unsigned int uid = DF_REF_INSN_UID (use);
2199 /* Add use to head of use list for INSN. */
2200 df->insns[uid].uses
2201 = df_link_create (use, df->insns[uid].uses);
2203 return 0;
2207 /* Update refs for basic block BB. */
2208 static int
2209 df_bb_refs_update (df, bb)
2210 struct df *df;
2211 basic_block bb;
2213 rtx insn;
2214 int count = 0;
2216 /* While we have to scan the chain of insns for this BB, we don't
2217 need to allocate and queue a long chain of BB/INSN pairs. Using
2218 a bitmap for insns_modified saves memory and avoids queuing
2219 duplicates. */
2221 for (insn = bb->head; ; insn = NEXT_INSN (insn))
2223 unsigned int uid;
2225 uid = INSN_UID (insn);
2227 if (bitmap_bit_p (df->insns_modified, uid))
2229 /* Delete any allocated refs of this insn. MPH, FIXME. */
2230 df_insn_refs_unlink (df, bb, insn);
2232 /* Scan the insn for refs. */
2233 df_insn_refs_record (df, bb, insn);
2236 bitmap_clear_bit (df->insns_modified, uid);
2237 count++;
2239 if (insn == bb->end)
2240 break;
2242 return count;
2246 /* Process all the modified/deleted insns that were queued. */
2247 static int
2248 df_refs_update (df)
2249 struct df *df;
2251 basic_block bb;
2252 int count = 0;
2254 if ((unsigned int)max_reg_num () >= df->reg_size)
2255 df_reg_table_realloc (df, 0);
2257 df_refs_queue (df);
2259 FOR_EACH_BB_IN_BITMAP (df->bbs_modified, 0, bb,
2261 count += df_bb_refs_update (df, bb);
2264 df_refs_process (df);
2265 return count;
2269 /* Return non-zero if any of the requested blocks in the bitmap
2270 BLOCKS have been modified. */
2271 static int
2272 df_modified_p (df, blocks)
2273 struct df *df;
2274 bitmap blocks;
2276 unsigned int j;
2277 int update = 0;
2279 for (j = 0; j < df->n_bbs; j++)
2280 if (bitmap_bit_p (df->bbs_modified, j)
2281 && (! blocks || (blocks == (bitmap) -1) || bitmap_bit_p (blocks, j)))
2283 update = 1;
2284 break;
2287 return update;
2291 /* Analyse dataflow info for the basic blocks specified by the bitmap
2292 BLOCKS, or for the whole CFG if BLOCKS is zero, or just for the
2293 modified blocks if BLOCKS is -1. */
2295 df_analyse (df, blocks, flags)
2296 struct df *df;
2297 bitmap blocks;
2298 int flags;
2300 int update;
2302 /* We could deal with additional basic blocks being created by
2303 rescanning everything again. */
2304 if (df->n_bbs && df->n_bbs != (unsigned int)n_basic_blocks)
2305 abort ();
2307 update = df_modified_p (df, blocks);
2308 if (update || (flags != df->flags))
2310 if (! blocks)
2312 if (df->n_bbs)
2314 /* Recompute everything from scratch. */
2315 df_free (df);
2317 /* Allocate and initialise data structures. */
2318 df_alloc (df, max_reg_num ());
2319 df_analyse_1 (df, 0, flags, 0);
2320 update = 1;
2322 else
2324 if (blocks == (bitmap) -1)
2325 blocks = df->bbs_modified;
2327 if (! df->n_bbs)
2328 abort ();
2330 df_analyse_1 (df, blocks, flags, 1);
2331 bitmap_zero (df->bbs_modified);
2334 return update;
2338 /* Free all the dataflow info and the DF structure. */
2339 void
2340 df_finish (df)
2341 struct df *df;
2343 df_free (df);
2344 free (df);
2348 /* Unlink INSN from its reference information. */
2349 static void
2350 df_insn_refs_unlink (df, bb, insn)
2351 struct df *df;
2352 basic_block bb ATTRIBUTE_UNUSED;
2353 rtx insn;
2355 struct df_link *link;
2356 unsigned int uid;
2358 uid = INSN_UID (insn);
2360 /* Unlink all refs defined by this insn. */
2361 for (link = df->insns[uid].defs; link; link = link->next)
2362 df_def_unlink (df, link->ref);
2364 /* Unlink all refs used by this insn. */
2365 for (link = df->insns[uid].uses; link; link = link->next)
2366 df_use_unlink (df, link->ref);
2368 df->insns[uid].defs = 0;
2369 df->insns[uid].uses = 0;
2373 #if 0
2374 /* Unlink all the insns within BB from their reference information. */
2375 static void
2376 df_bb_refs_unlink (df, bb)
2377 struct df *df;
2378 basic_block bb;
2380 rtx insn;
2382 /* Scan the block an insn at a time from beginning to end. */
2383 for (insn = bb->head; ; insn = NEXT_INSN (insn))
2385 if (INSN_P (insn))
2387 /* Unlink refs for INSN. */
2388 df_insn_refs_unlink (df, bb, insn);
2390 if (insn == bb->end)
2391 break;
2396 /* Unlink all the refs in the basic blocks specified by BLOCKS.
2397 Not currently used. */
2398 static void
2399 df_refs_unlink (df, blocks)
2400 struct df *df;
2401 bitmap blocks;
2403 basic_block bb;
2405 if (blocks)
2407 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
2409 df_bb_refs_unlink (df, bb);
2412 else
2414 FOR_ALL_BBS (bb,
2416 df_bb_refs_unlink (df, bb);
2420 #endif
2422 /* Functions to modify insns. */
2425 /* Delete INSN and all its reference information. */
2427 df_insn_delete (df, bb, insn)
2428 struct df *df;
2429 basic_block bb ATTRIBUTE_UNUSED;
2430 rtx insn;
2432 /* If the insn is a jump, we should perhaps call delete_insn to
2433 handle the JUMP_LABEL? */
2435 /* We should not be deleting the NOTE_INSN_BASIC_BLOCK or label. */
2436 if (insn == bb->head)
2437 abort ();
2439 /* Delete the insn. */
2440 delete_insn (insn);
2442 df_insn_modify (df, bb, insn);
2444 return NEXT_INSN (insn);
2448 /* Mark that INSN within BB may have changed (created/modified/deleted).
2449 This may be called multiple times for the same insn. There is no
2450 harm calling this function if the insn wasn't changed; it will just
2451 slow down the rescanning of refs. */
2452 void
2453 df_insn_modify (df, bb, insn)
2454 struct df *df;
2455 basic_block bb;
2456 rtx insn;
2458 unsigned int uid;
2460 uid = INSN_UID (insn);
2462 if (uid >= df->insn_size)
2463 df_insn_table_realloc (df, 0);
2465 bitmap_set_bit (df->bbs_modified, bb->index);
2466 bitmap_set_bit (df->insns_modified, uid);
2468 /* For incremental updating on the fly, perhaps we could make a copy
2469 of all the refs of the original insn and turn them into
2470 anti-refs. When df_refs_update finds these anti-refs, it annihilates
2471 the original refs. If validate_change fails then these anti-refs
2472 will just get ignored. */
2476 typedef struct replace_args
2478 rtx match;
2479 rtx replacement;
2480 rtx insn;
2481 int modified;
2482 } replace_args;
2485 /* Replace mem pointed to by PX with its associated pseudo register.
2486 DATA is actually a pointer to a structure describing the
2487 instruction currently being scanned and the MEM we are currently
2488 replacing. */
2489 static int
2490 df_rtx_mem_replace (px, data)
2491 rtx *px;
2492 void *data;
2494 replace_args *args = (replace_args *) data;
2495 rtx mem = *px;
2497 if (mem == NULL_RTX)
2498 return 0;
2500 switch (GET_CODE (mem))
2502 case MEM:
2503 break;
2505 case CONST_DOUBLE:
2506 /* We're not interested in the MEM associated with a
2507 CONST_DOUBLE, so there's no need to traverse into one. */
2508 return -1;
2510 default:
2511 /* This is not a MEM. */
2512 return 0;
2515 if (!rtx_equal_p (args->match, mem))
2516 /* This is not the MEM we are currently replacing. */
2517 return 0;
2519 /* Actually replace the MEM. */
2520 validate_change (args->insn, px, args->replacement, 1);
2521 args->modified++;
2523 return 0;
2528 df_insn_mem_replace (df, bb, insn, mem, reg)
2529 struct df *df;
2530 basic_block bb;
2531 rtx insn;
2532 rtx mem;
2533 rtx reg;
2535 replace_args args;
2537 args.insn = insn;
2538 args.match = mem;
2539 args.replacement = reg;
2540 args.modified = 0;
2542 /* Search and replace all matching mems within insn. */
2543 for_each_rtx (&insn, df_rtx_mem_replace, &args);
2545 if (args.modified)
2546 df_insn_modify (df, bb, insn);
2548 /* ???? FIXME. We may have a new def or one or more new uses of REG
2549 in INSN. REG should be a new pseudo so it won't affect the
2550 dataflow information that we currently have. We should add
2551 the new uses and defs to INSN and then recreate the chains
2552 when df_analyse is called. */
2553 return args.modified;
2557 /* Replace one register with another. Called through for_each_rtx; PX
2558 points to the rtx being scanned. DATA is actually a pointer to a
2559 structure of arguments. */
2560 static int
2561 df_rtx_reg_replace (px, data)
2562 rtx *px;
2563 void *data;
2565 rtx x = *px;
2566 replace_args *args = (replace_args *) data;
2568 if (x == NULL_RTX)
2569 return 0;
2571 if (x == args->match)
2573 validate_change (args->insn, px, args->replacement, 1);
2574 args->modified++;
2577 return 0;
2581 /* Replace the reg within every ref on CHAIN that is within the set
2582 BLOCKS of basic blocks with NEWREG. Also update the regs within
2583 REG_NOTES. */
2584 void
2585 df_refs_reg_replace (df, blocks, chain, oldreg, newreg)
2586 struct df *df;
2587 bitmap blocks;
2588 struct df_link *chain;
2589 rtx oldreg;
2590 rtx newreg;
2592 struct df_link *link;
2593 replace_args args;
2595 if (! blocks)
2596 blocks = df->all_blocks;
2598 args.match = oldreg;
2599 args.replacement = newreg;
2600 args.modified = 0;
2602 for (link = chain; link; link = link->next)
2604 struct ref *ref = link->ref;
2605 rtx insn = DF_REF_INSN (ref);
2607 if (! INSN_P (insn))
2608 continue;
2610 if (bitmap_bit_p (blocks, DF_REF_BBNO (ref)))
2612 df_ref_reg_replace (df, ref, oldreg, newreg);
2614 /* Replace occurrences of the reg within the REG_NOTES. */
2615 if ((! link->next || DF_REF_INSN (ref)
2616 != DF_REF_INSN (link->next->ref))
2617 && REG_NOTES (insn))
2619 args.insn = insn;
2620 for_each_rtx (&REG_NOTES (insn), df_rtx_reg_replace, &args);
2623 else
2625 /* Temporary check to ensure that we have a grip on which
2626 regs should be replaced. */
2627 abort ();
2633 /* Replace all occurrences of register OLDREG with register NEWREG in
2634 blocks defined by bitmap BLOCKS. This also replaces occurrences of
2635 OLDREG in the REG_NOTES but only for insns containing OLDREG. This
2636 routine expects the reg-use and reg-def chains to be valid. */
2638 df_reg_replace (df, blocks, oldreg, newreg)
2639 struct df *df;
2640 bitmap blocks;
2641 rtx oldreg;
2642 rtx newreg;
2644 unsigned int oldregno = REGNO (oldreg);
2646 df_refs_reg_replace (df, blocks, df->regs[oldregno].defs, oldreg, newreg);
2647 df_refs_reg_replace (df, blocks, df->regs[oldregno].uses, oldreg, newreg);
2648 return 1;
2652 /* Try replacing the reg within REF with NEWREG. Do not modify
2653 def-use/use-def chains. */
2655 df_ref_reg_replace (df, ref, oldreg, newreg)
2656 struct df *df;
2657 struct ref *ref;
2658 rtx oldreg;
2659 rtx newreg;
2661 /* Check that insn was deleted by being converted into a NOTE. If
2662 so ignore this insn. */
2663 if (! INSN_P (DF_REF_INSN (ref)))
2664 return 0;
2666 if (oldreg && oldreg != DF_REF_REG (ref))
2667 abort ();
2669 if (! validate_change (DF_REF_INSN (ref), DF_REF_LOC (ref), newreg, 1))
2670 return 0;
2672 df_insn_modify (df, DF_REF_BB (ref), DF_REF_INSN (ref));
2673 return 1;
2677 struct ref*
2678 df_bb_def_use_swap (df, bb, def_insn, use_insn, regno)
2679 struct df * df;
2680 basic_block bb;
2681 rtx def_insn;
2682 rtx use_insn;
2683 unsigned int regno;
2685 struct ref *def;
2686 struct ref *use;
2687 int def_uid;
2688 int use_uid;
2689 struct df_link *link;
2691 def = df_bb_insn_regno_first_def_find (df, bb, def_insn, regno);
2692 if (! def)
2693 return 0;
2695 use = df_bb_insn_regno_last_use_find (df, bb, use_insn, regno);
2696 if (! use)
2697 return 0;
2699 /* The USE no longer exists. */
2700 use_uid = INSN_UID (use_insn);
2701 df_use_unlink (df, use);
2702 df_ref_unlink (&df->insns[use_uid].uses, use);
2704 /* The DEF requires shifting so remove it from DEF_INSN
2705 and add it to USE_INSN by reusing LINK. */
2706 def_uid = INSN_UID (def_insn);
2707 link = df_ref_unlink (&df->insns[def_uid].defs, def);
2708 link->ref = def;
2709 link->next = df->insns[use_uid].defs;
2710 df->insns[use_uid].defs = link;
2712 #if 0
2713 link = df_ref_unlink (&df->regs[regno].defs, def);
2714 link->ref = def;
2715 link->next = df->regs[regno].defs;
2716 df->insns[regno].defs = link;
2717 #endif
2719 DF_REF_INSN (def) = use_insn;
2720 return def;
2724 /* Record df between FIRST_INSN and LAST_INSN inclusive. All new
2725 insns must be processed by this routine. */
2726 static void
2727 df_insns_modify (df, bb, first_insn, last_insn)
2728 struct df *df;
2729 basic_block bb;
2730 rtx first_insn;
2731 rtx last_insn;
2733 rtx insn;
2735 for (insn = first_insn; ; insn = NEXT_INSN (insn))
2737 unsigned int uid;
2739 /* A non-const call should not have slipped through the net. If
2740 it does, we need to create a new basic block. Ouch. The
2741 same applies for a label. */
2742 if ((GET_CODE (insn) == CALL_INSN
2743 && ! CONST_OR_PURE_CALL_P (insn))
2744 || GET_CODE (insn) == CODE_LABEL)
2745 abort ();
2747 uid = INSN_UID (insn);
2749 if (uid >= df->insn_size)
2750 df_insn_table_realloc (df, 0);
2752 df_insn_modify (df, bb, insn);
2754 if (insn == last_insn)
2755 break;
2760 /* Emit PATTERN before INSN within BB. */
2762 df_pattern_emit_before (df, pattern, bb, insn)
2763 struct df *df ATTRIBUTE_UNUSED;
2764 rtx pattern;
2765 basic_block bb;
2766 rtx insn;
2768 rtx ret_insn;
2769 rtx prev_insn = PREV_INSN (insn);
2771 /* We should not be inserting before the start of the block. */
2772 if (insn == bb->head)
2773 abort ();
2774 ret_insn = emit_insn_before (pattern, insn);
2775 if (ret_insn == insn)
2776 return ret_insn;
2778 df_insns_modify (df, bb, NEXT_INSN (prev_insn), ret_insn);
2779 return ret_insn;
2783 /* Emit PATTERN after INSN within BB. */
2785 df_pattern_emit_after (df, pattern, bb, insn)
2786 struct df *df;
2787 rtx pattern;
2788 basic_block bb;
2789 rtx insn;
2791 rtx ret_insn;
2793 ret_insn = emit_insn_after (pattern, insn);
2794 if (ret_insn == insn)
2795 return ret_insn;
2797 df_insns_modify (df, bb, NEXT_INSN (insn), ret_insn);
2798 return ret_insn;
2802 /* Emit jump PATTERN after INSN within BB. */
2804 df_jump_pattern_emit_after (df, pattern, bb, insn)
2805 struct df *df;
2806 rtx pattern;
2807 basic_block bb;
2808 rtx insn;
2810 rtx ret_insn;
2812 ret_insn = emit_jump_insn_after (pattern, insn);
2813 if (ret_insn == insn)
2814 return ret_insn;
2816 df_insns_modify (df, bb, NEXT_INSN (insn), ret_insn);
2817 return ret_insn;
2821 /* Move INSN within BB before BEFORE_INSN within BEFORE_BB.
2823 This function should only be used to move loop invariant insns
2824 out of a loop where it has been proven that the def-use info
2825 will still be valid. */
2827 df_insn_move_before (df, bb, insn, before_bb, before_insn)
2828 struct df *df;
2829 basic_block bb;
2830 rtx insn;
2831 basic_block before_bb;
2832 rtx before_insn;
2834 struct df_link *link;
2835 unsigned int uid;
2837 if (! bb)
2838 return df_pattern_emit_before (df, insn, before_bb, before_insn);
2840 uid = INSN_UID (insn);
2842 /* Change bb for all df defined and used by this insn. */
2843 for (link = df->insns[uid].defs; link; link = link->next)
2844 DF_REF_BB (link->ref) = before_bb;
2845 for (link = df->insns[uid].uses; link; link = link->next)
2846 DF_REF_BB (link->ref) = before_bb;
2848 /* The lifetimes of the registers used in this insn will be reduced
2849 while the lifetimes of the registers defined in this insn
2850 are likely to be increased. */
2852 /* ???? Perhaps all the insns moved should be stored on a list
2853 which df_analyse removes when it recalculates data flow. */
2855 return emit_insn_before (insn, before_insn);
2858 /* Functions to query dataflow information. */
2862 df_insn_regno_def_p (df, bb, insn, regno)
2863 struct df *df;
2864 basic_block bb ATTRIBUTE_UNUSED;
2865 rtx insn;
2866 unsigned int regno;
2868 unsigned int uid;
2869 struct df_link *link;
2871 uid = INSN_UID (insn);
2873 for (link = df->insns[uid].defs; link; link = link->next)
2875 struct ref *def = link->ref;
2877 if (DF_REF_REGNO (def) == regno)
2878 return 1;
2881 return 0;
2885 static int
2886 df_def_dominates_all_uses_p (df, def)
2887 struct df *df ATTRIBUTE_UNUSED;
2888 struct ref *def;
2890 struct df_link *du_link;
2892 /* Follow def-use chain to find all the uses of this def. */
2893 for (du_link = DF_REF_CHAIN (def); du_link; du_link = du_link->next)
2895 struct ref *use = du_link->ref;
2896 struct df_link *ud_link;
2898 /* Follow use-def chain to check all the defs for this use. */
2899 for (ud_link = DF_REF_CHAIN (use); ud_link; ud_link = ud_link->next)
2900 if (ud_link->ref != def)
2901 return 0;
2903 return 1;
2908 df_insn_dominates_all_uses_p (df, bb, insn)
2909 struct df *df;
2910 basic_block bb ATTRIBUTE_UNUSED;
2911 rtx insn;
2913 unsigned int uid;
2914 struct df_link *link;
2916 uid = INSN_UID (insn);
2918 for (link = df->insns[uid].defs; link; link = link->next)
2920 struct ref *def = link->ref;
2922 if (! df_def_dominates_all_uses_p (df, def))
2923 return 0;
2926 return 1;
2930 /* Return non-zero if all DF dominates all the uses within the bitmap
2931 BLOCKS. */
2932 static int
2933 df_def_dominates_uses_p (df, def, blocks)
2934 struct df *df ATTRIBUTE_UNUSED;
2935 struct ref *def;
2936 bitmap blocks;
2938 struct df_link *du_link;
2940 /* Follow def-use chain to find all the uses of this def. */
2941 for (du_link = DF_REF_CHAIN (def); du_link; du_link = du_link->next)
2943 struct ref *use = du_link->ref;
2944 struct df_link *ud_link;
2946 /* Only worry about the uses within BLOCKS. For example,
2947 consider a register defined within a loop that is live at the
2948 loop exits. */
2949 if (bitmap_bit_p (blocks, DF_REF_BBNO (use)))
2951 /* Follow use-def chain to check all the defs for this use. */
2952 for (ud_link = DF_REF_CHAIN (use); ud_link; ud_link = ud_link->next)
2953 if (ud_link->ref != def)
2954 return 0;
2957 return 1;
2961 /* Return non-zero if all the defs of INSN within BB dominates
2962 all the corresponding uses. */
2964 df_insn_dominates_uses_p (df, bb, insn, blocks)
2965 struct df *df;
2966 basic_block bb ATTRIBUTE_UNUSED;
2967 rtx insn;
2968 bitmap blocks;
2970 unsigned int uid;
2971 struct df_link *link;
2973 uid = INSN_UID (insn);
2975 for (link = df->insns[uid].defs; link; link = link->next)
2977 struct ref *def = link->ref;
2979 /* Only consider the defs within BLOCKS. */
2980 if (bitmap_bit_p (blocks, DF_REF_BBNO (def))
2981 && ! df_def_dominates_uses_p (df, def, blocks))
2982 return 0;
2984 return 1;
2988 /* Return the basic block that REG referenced in or NULL if referenced
2989 in multiple basic blocks. */
2990 basic_block
2991 df_regno_bb (df, regno)
2992 struct df *df;
2993 unsigned int regno;
2995 struct df_link *defs = df->regs[regno].defs;
2996 struct df_link *uses = df->regs[regno].uses;
2997 struct ref *def = defs ? defs->ref : 0;
2998 struct ref *use = uses ? uses->ref : 0;
2999 basic_block bb_def = def ? DF_REF_BB (def) : 0;
3000 basic_block bb_use = use ? DF_REF_BB (use) : 0;
3002 /* Compare blocks of first def and last use. ???? FIXME. What if
3003 the reg-def and reg-use lists are not correctly ordered. */
3004 return bb_def == bb_use ? bb_def : 0;
3008 /* Return non-zero if REG used in multiple basic blocks. */
3010 df_reg_global_p (df, reg)
3011 struct df *df;
3012 rtx reg;
3014 return df_regno_bb (df, REGNO (reg)) != 0;
3018 /* Return total lifetime (in insns) of REG. */
3020 df_reg_lifetime (df, reg)
3021 struct df *df;
3022 rtx reg;
3024 return df->regs[REGNO (reg)].lifetime;
3028 /* Return non-zero if REG live at start of BB. */
3030 df_bb_reg_live_start_p (df, bb, reg)
3031 struct df *df ATTRIBUTE_UNUSED;
3032 basic_block bb;
3033 rtx reg;
3035 struct bb_info *bb_info = DF_BB_INFO (df, bb);
3037 #ifdef ENABLE_CHECKING
3038 if (! bb_info->lr_in)
3039 abort ();
3040 #endif
3042 return bitmap_bit_p (bb_info->lr_in, REGNO (reg));
3046 /* Return non-zero if REG live at end of BB. */
3048 df_bb_reg_live_end_p (df, bb, reg)
3049 struct df *df ATTRIBUTE_UNUSED;
3050 basic_block bb;
3051 rtx reg;
3053 struct bb_info *bb_info = DF_BB_INFO (df, bb);
3055 #ifdef ENABLE_CHECKING
3056 if (! bb_info->lr_in)
3057 abort ();
3058 #endif
3060 return bitmap_bit_p (bb_info->lr_out, REGNO (reg));
3064 /* Return -1 if life of REG1 before life of REG2, 1 if life of REG1
3065 after life of REG2, or 0, if the lives overlap. */
3067 df_bb_regs_lives_compare (df, bb, reg1, reg2)
3068 struct df *df;
3069 basic_block bb;
3070 rtx reg1;
3071 rtx reg2;
3073 unsigned int regno1 = REGNO (reg1);
3074 unsigned int regno2 = REGNO (reg2);
3075 struct ref *def1;
3076 struct ref *use1;
3077 struct ref *def2;
3078 struct ref *use2;
3081 /* The regs must be local to BB. */
3082 if (df_regno_bb (df, regno1) != bb
3083 || df_regno_bb (df, regno2) != bb)
3084 abort ();
3086 def2 = df_bb_regno_first_def_find (df, bb, regno2);
3087 use1 = df_bb_regno_last_use_find (df, bb, regno1);
3089 if (DF_INSN_LUID (df, DF_REF_INSN (def2))
3090 > DF_INSN_LUID (df, DF_REF_INSN (use1)))
3091 return -1;
3093 def1 = df_bb_regno_first_def_find (df, bb, regno1);
3094 use2 = df_bb_regno_last_use_find (df, bb, regno2);
3096 if (DF_INSN_LUID (df, DF_REF_INSN (def1))
3097 > DF_INSN_LUID (df, DF_REF_INSN (use2)))
3098 return 1;
3100 return 0;
3104 /* Return last use of REGNO within BB. */
3105 static struct ref *
3106 df_bb_regno_last_use_find (df, bb, regno)
3107 struct df * df;
3108 basic_block bb ATTRIBUTE_UNUSED;
3109 unsigned int regno;
3111 struct df_link *link;
3113 /* This assumes that the reg-use list is ordered such that for any
3114 BB, the last use is found first. However, since the BBs are not
3115 ordered, the first use in the chain is not necessarily the last
3116 use in the function. */
3117 for (link = df->regs[regno].uses; link; link = link->next)
3119 struct ref *use = link->ref;
3121 if (DF_REF_BB (use) == bb)
3122 return use;
3124 return 0;
3128 /* Return first def of REGNO within BB. */
3129 static struct ref *
3130 df_bb_regno_first_def_find (df, bb, regno)
3131 struct df * df;
3132 basic_block bb ATTRIBUTE_UNUSED;
3133 unsigned int regno;
3135 struct df_link *link;
3137 /* This assumes that the reg-def list is ordered such that for any
3138 BB, the first def is found first. However, since the BBs are not
3139 ordered, the first def in the chain is not necessarily the first
3140 def in the function. */
3141 for (link = df->regs[regno].defs; link; link = link->next)
3143 struct ref *def = link->ref;
3145 if (DF_REF_BB (def) == bb)
3146 return def;
3148 return 0;
3152 /* Return first use of REGNO inside INSN within BB. */
3153 static struct ref *
3154 df_bb_insn_regno_last_use_find (df, bb, insn, regno)
3155 struct df * df;
3156 basic_block bb ATTRIBUTE_UNUSED;
3157 rtx insn;
3158 unsigned int regno;
3160 unsigned int uid;
3161 struct df_link *link;
3163 uid = INSN_UID (insn);
3165 for (link = df->insns[uid].uses; link; link = link->next)
3167 struct ref *use = link->ref;
3169 if (DF_REF_REGNO (use) == regno)
3170 return use;
3173 return 0;
3177 /* Return first def of REGNO inside INSN within BB. */
3178 static struct ref *
3179 df_bb_insn_regno_first_def_find (df, bb, insn, regno)
3180 struct df * df;
3181 basic_block bb ATTRIBUTE_UNUSED;
3182 rtx insn;
3183 unsigned int regno;
3185 unsigned int uid;
3186 struct df_link *link;
3188 uid = INSN_UID (insn);
3190 for (link = df->insns[uid].defs; link; link = link->next)
3192 struct ref *def = link->ref;
3194 if (DF_REF_REGNO (def) == regno)
3195 return def;
3198 return 0;
3202 /* Return insn using REG if the BB contains only a single
3203 use and def of REG. */
3205 df_bb_single_def_use_insn_find (df, bb, insn, reg)
3206 struct df * df;
3207 basic_block bb;
3208 rtx insn;
3209 rtx reg;
3211 struct ref *def;
3212 struct ref *use;
3213 struct df_link *du_link;
3215 def = df_bb_insn_regno_first_def_find (df, bb, insn, REGNO (reg));
3217 if (! def)
3218 abort ();
3220 du_link = DF_REF_CHAIN (def);
3222 if (! du_link)
3223 return NULL_RTX;
3225 use = du_link->ref;
3227 /* Check if def is dead. */
3228 if (! use)
3229 return NULL_RTX;
3231 /* Check for multiple uses. */
3232 if (du_link->next)
3233 return NULL_RTX;
3235 return DF_REF_INSN (use);
3238 /* Functions for debugging/dumping dataflow information. */
3241 /* Dump a def-use or use-def chain for REF to FILE. */
3242 static void
3243 df_chain_dump (link, file)
3244 struct df_link *link;
3245 FILE *file;
3247 fprintf (file, "{ ");
3248 for (; link; link = link->next)
3250 fprintf (file, "%c%d ",
3251 DF_REF_REG_DEF_P (link->ref) ? 'd' : 'u',
3252 DF_REF_ID (link->ref));
3254 fprintf (file, "}");
3257 static void
3258 df_chain_dump_regno (link, file)
3259 struct df_link *link;
3260 FILE *file;
3262 fprintf (file, "{ ");
3263 for (; link; link = link->next)
3265 fprintf (file, "%c%d(%d) ",
3266 DF_REF_REG_DEF_P (link->ref) ? 'd' : 'u',
3267 DF_REF_ID (link->ref),
3268 DF_REF_REGNO (link->ref));
3270 fprintf (file, "}");
3273 /* Dump dataflow info. */
3274 void
3275 df_dump (df, flags, file)
3276 struct df *df;
3277 int flags;
3278 FILE *file;
3280 unsigned int i;
3281 unsigned int j;
3283 if (! df || ! file)
3284 return;
3286 fprintf (file, "\nDataflow summary:\n");
3287 fprintf (file, "n_regs = %d, n_defs = %d, n_uses = %d, n_bbs = %d\n",
3288 df->n_regs, df->n_defs, df->n_uses, df->n_bbs);
3290 if (flags & DF_RD)
3292 fprintf (file, "Reaching defs:\n");
3293 for (i = 0; i < df->n_bbs; i++)
3295 basic_block bb = BASIC_BLOCK (i);
3296 struct bb_info *bb_info = DF_BB_INFO (df, bb);
3298 if (! bb_info->rd_in)
3299 continue;
3301 fprintf (file, "bb %d in \t", i);
3302 dump_bitmap (file, bb_info->rd_in);
3303 fprintf (file, "bb %d gen \t", i);
3304 dump_bitmap (file, bb_info->rd_gen);
3305 fprintf (file, "bb %d kill\t", i);
3306 dump_bitmap (file, bb_info->rd_kill);
3307 fprintf (file, "bb %d out \t", i);
3308 dump_bitmap (file, bb_info->rd_out);
3312 if (flags & DF_UD_CHAIN)
3314 fprintf (file, "Use-def chains:\n");
3315 for (j = 0; j < df->n_defs; j++)
3317 if (df->defs[j])
3319 fprintf (file, "d%d bb %d luid %d insn %d reg %d ",
3320 j, DF_REF_BBNO (df->defs[j]),
3321 DF_INSN_LUID (df, DF_REF_INSN (df->defs[j])),
3322 DF_REF_INSN_UID (df->defs[j]),
3323 DF_REF_REGNO (df->defs[j]));
3324 if (df->defs[j]->flags & DF_REF_READ_WRITE)
3325 fprintf (file, "read/write ");
3326 df_chain_dump (DF_REF_CHAIN (df->defs[j]), file);
3327 fprintf (file, "\n");
3332 if (flags & DF_RU)
3334 fprintf (file, "Reaching uses:\n");
3335 for (i = 0; i < df->n_bbs; i++)
3337 basic_block bb = BASIC_BLOCK (i);
3338 struct bb_info *bb_info = DF_BB_INFO (df, bb);
3340 if (! bb_info->ru_in)
3341 continue;
3343 fprintf (file, "bb %d in \t", i);
3344 dump_bitmap (file, bb_info->ru_in);
3345 fprintf (file, "bb %d gen \t", i);
3346 dump_bitmap (file, bb_info->ru_gen);
3347 fprintf (file, "bb %d kill\t", i);
3348 dump_bitmap (file, bb_info->ru_kill);
3349 fprintf (file, "bb %d out \t", i);
3350 dump_bitmap (file, bb_info->ru_out);
3354 if (flags & DF_DU_CHAIN)
3356 fprintf (file, "Def-use chains:\n");
3357 for (j = 0; j < df->n_uses; j++)
3359 if (df->uses[j])
3361 fprintf (file, "u%d bb %d luid %d insn %d reg %d ",
3362 j, DF_REF_BBNO (df->uses[j]),
3363 DF_INSN_LUID (df, DF_REF_INSN (df->uses[j])),
3364 DF_REF_INSN_UID (df->uses[j]),
3365 DF_REF_REGNO (df->uses[j]));
3366 if (df->uses[j]->flags & DF_REF_READ_WRITE)
3367 fprintf (file, "read/write ");
3368 df_chain_dump (DF_REF_CHAIN (df->uses[j]), file);
3369 fprintf (file, "\n");
3374 if (flags & DF_LR)
3376 fprintf (file, "Live regs:\n");
3377 for (i = 0; i < df->n_bbs; i++)
3379 basic_block bb = BASIC_BLOCK (i);
3380 struct bb_info *bb_info = DF_BB_INFO (df, bb);
3382 if (! bb_info->lr_in)
3383 continue;
3385 fprintf (file, "bb %d in \t", i);
3386 dump_bitmap (file, bb_info->lr_in);
3387 fprintf (file, "bb %d use \t", i);
3388 dump_bitmap (file, bb_info->lr_use);
3389 fprintf (file, "bb %d def \t", i);
3390 dump_bitmap (file, bb_info->lr_def);
3391 fprintf (file, "bb %d out \t", i);
3392 dump_bitmap (file, bb_info->lr_out);
3396 if (flags & (DF_REG_INFO | DF_RD_CHAIN | DF_RU_CHAIN))
3398 struct reg_info *reg_info = df->regs;
3400 fprintf (file, "Register info:\n");
3401 for (j = 0; j < df->n_regs; j++)
3403 if (((flags & DF_REG_INFO)
3404 && (reg_info[j].n_uses || reg_info[j].n_defs))
3405 || ((flags & DF_RD_CHAIN) && reg_info[j].defs)
3406 || ((flags & DF_RU_CHAIN) && reg_info[j].uses))
3408 fprintf (file, "reg %d", j);
3409 if ((flags & DF_RD_CHAIN) && (flags & DF_RU_CHAIN))
3411 basic_block bb = df_regno_bb (df, j);
3413 if (bb)
3414 fprintf (file, " bb %d", bb->index);
3415 else
3416 fprintf (file, " bb ?");
3418 if (flags & DF_REG_INFO)
3420 fprintf (file, " life %d", reg_info[j].lifetime);
3423 if ((flags & DF_REG_INFO) || (flags & DF_RD_CHAIN))
3425 fprintf (file, " defs ");
3426 if (flags & DF_REG_INFO)
3427 fprintf (file, "%d ", reg_info[j].n_defs);
3428 if (flags & DF_RD_CHAIN)
3429 df_chain_dump (reg_info[j].defs, file);
3432 if ((flags & DF_REG_INFO) || (flags & DF_RU_CHAIN))
3434 fprintf (file, " uses ");
3435 if (flags & DF_REG_INFO)
3436 fprintf (file, "%d ", reg_info[j].n_uses);
3437 if (flags & DF_RU_CHAIN)
3438 df_chain_dump (reg_info[j].uses, file);
3441 fprintf (file, "\n");
3445 fprintf (file, "\n");
3449 void
3450 df_insn_debug (df, insn, file)
3451 struct df *df;
3452 rtx insn;
3453 FILE *file;
3455 unsigned int uid;
3456 int bbi;
3458 uid = INSN_UID (insn);
3459 if (uid >= df->insn_size)
3460 return;
3462 if (df->insns[uid].defs)
3463 bbi = DF_REF_BBNO (df->insns[uid].defs->ref);
3464 else if (df->insns[uid].uses)
3465 bbi = DF_REF_BBNO (df->insns[uid].uses->ref);
3466 else
3467 bbi = -1;
3469 fprintf (file, "insn %d bb %d luid %d defs ",
3470 uid, bbi, DF_INSN_LUID (df, insn));
3471 df_chain_dump (df->insns[uid].defs, file);
3472 fprintf (file, " uses ");
3473 df_chain_dump (df->insns[uid].uses, file);
3474 fprintf (file, "\n");
3477 void
3478 df_insn_debug_regno (df, insn, file)
3479 struct df *df;
3480 rtx insn;
3481 FILE *file;
3483 unsigned int uid;
3484 int bbi;
3486 uid = INSN_UID (insn);
3487 if (uid >= df->insn_size)
3488 return;
3490 if (df->insns[uid].defs)
3491 bbi = DF_REF_BBNO (df->insns[uid].defs->ref);
3492 else if (df->insns[uid].uses)
3493 bbi = DF_REF_BBNO (df->insns[uid].uses->ref);
3494 else
3495 bbi = -1;
3497 fprintf (file, "insn %d bb %d luid %d defs ",
3498 uid, bbi, DF_INSN_LUID (df, insn));
3499 df_chain_dump_regno (df->insns[uid].defs, file);
3500 fprintf (file, " uses ");
3501 df_chain_dump_regno (df->insns[uid].uses, file);
3502 fprintf (file, "\n");
3505 static void
3506 df_regno_debug (df, regno, file)
3507 struct df *df;
3508 unsigned int regno;
3509 FILE *file;
3511 if (regno >= df->reg_size)
3512 return;
3514 fprintf (file, "reg %d life %d defs ",
3515 regno, df->regs[regno].lifetime);
3516 df_chain_dump (df->regs[regno].defs, file);
3517 fprintf (file, " uses ");
3518 df_chain_dump (df->regs[regno].uses, file);
3519 fprintf (file, "\n");
3523 static void
3524 df_ref_debug (df, ref, file)
3525 struct df *df;
3526 struct ref *ref;
3527 FILE *file;
3529 fprintf (file, "%c%d ",
3530 DF_REF_REG_DEF_P (ref) ? 'd' : 'u',
3531 DF_REF_ID (ref));
3532 fprintf (file, "reg %d bb %d luid %d insn %d chain ",
3533 DF_REF_REGNO (ref),
3534 DF_REF_BBNO (ref),
3535 DF_INSN_LUID (df, DF_REF_INSN (ref)),
3536 INSN_UID (DF_REF_INSN (ref)));
3537 df_chain_dump (DF_REF_CHAIN (ref), file);
3538 fprintf (file, "\n");
3542 void
3543 debug_df_insn (insn)
3544 rtx insn;
3546 df_insn_debug (ddf, insn, stderr);
3547 debug_rtx (insn);
3551 void
3552 debug_df_reg (reg)
3553 rtx reg;
3555 df_regno_debug (ddf, REGNO (reg), stderr);
3559 void
3560 debug_df_regno (regno)
3561 unsigned int regno;
3563 df_regno_debug (ddf, regno, stderr);
3567 void
3568 debug_df_ref (ref)
3569 struct ref *ref;
3571 df_ref_debug (ddf, ref, stderr);
3575 void
3576 debug_df_defno (defno)
3577 unsigned int defno;
3579 df_ref_debug (ddf, ddf->defs[defno], stderr);
3583 void
3584 debug_df_useno (defno)
3585 unsigned int defno;
3587 df_ref_debug (ddf, ddf->uses[defno], stderr);
3591 void
3592 debug_df_chain (link)
3593 struct df_link *link;
3595 df_chain_dump (link, stderr);
3596 fputc ('\n', stderr);
3599 /* Hybrid search algorithm from "Implementation Techniques for
3600 Efficient Data-Flow Analysis of Large Programs". */
3601 static void
3602 hybrid_search_bitmap (block, in, out, gen, kill, dir,
3603 conf_op, transfun, visited, pending,
3604 data)
3605 basic_block block;
3606 bitmap *in, *out, *gen, *kill;
3607 enum df_flow_dir dir;
3608 enum df_confluence_op conf_op;
3609 transfer_function_bitmap transfun;
3610 sbitmap visited;
3611 sbitmap pending;
3612 void *data;
3614 int changed;
3615 int i = block->index;
3616 edge e;
3617 basic_block bb= block;
3618 SET_BIT (visited, block->index);
3619 if (TEST_BIT (pending, block->index))
3621 if (dir == FORWARD)
3623 /* Calculate <conf_op> of predecessor_outs */
3624 bitmap_zero (in[i]);
3625 for (e = bb->pred; e != 0; e = e->pred_next)
3627 if (e->src == ENTRY_BLOCK_PTR)
3628 continue;
3629 switch (conf_op)
3631 case UNION:
3632 bitmap_a_or_b (in[i], in[i], out[e->src->index]);
3633 break;
3634 case INTERSECTION:
3635 bitmap_a_and_b (in[i], in[i], out[e->src->index]);
3636 break;
3640 else
3642 /* Calculate <conf_op> of successor ins */
3643 bitmap_zero(out[i]);
3644 for (e = bb->succ; e != 0; e = e->succ_next)
3646 if (e->dest == EXIT_BLOCK_PTR)
3647 continue;
3648 switch (conf_op)
3650 case UNION:
3651 bitmap_a_or_b (out[i], out[i], in[e->dest->index]);
3652 break;
3653 case INTERSECTION:
3654 bitmap_a_and_b (out[i], out[i], in[e->dest->index]);
3655 break;
3659 /* Common part */
3660 (*transfun)(i, &changed, in[i], out[i], gen[i], kill[i], data);
3661 RESET_BIT (pending, i);
3662 if (changed)
3664 if (dir == FORWARD)
3666 for (e = bb->succ; e != 0; e = e->succ_next)
3668 if (e->dest == EXIT_BLOCK_PTR || e->dest->index == i)
3669 continue;
3670 SET_BIT (pending, e->dest->index);
3673 else
3675 for (e = bb->pred; e != 0; e = e->pred_next)
3677 if (e->src == ENTRY_BLOCK_PTR || e->dest->index == i)
3678 continue;
3679 SET_BIT (pending, e->src->index);
3684 if (dir == FORWARD)
3686 for (e = bb->succ; e != 0; e = e->succ_next)
3688 if (e->dest == EXIT_BLOCK_PTR || e->dest->index == i)
3689 continue;
3690 if (!TEST_BIT (visited, e->dest->index))
3691 hybrid_search_bitmap (e->dest, in, out, gen, kill, dir,
3692 conf_op, transfun, visited, pending,
3693 data);
3696 else
3698 for (e = bb->pred; e != 0; e = e->pred_next)
3700 if (e->src == ENTRY_BLOCK_PTR || e->src->index == i)
3701 continue;
3702 if (!TEST_BIT (visited, e->src->index))
3703 hybrid_search_bitmap (e->src, in, out, gen, kill, dir,
3704 conf_op, transfun, visited, pending,
3705 data);
3711 /* Hybrid search for sbitmaps, rather than bitmaps. */
3712 static void
3713 hybrid_search_sbitmap (block, in, out, gen, kill, dir,
3714 conf_op, transfun, visited, pending,
3715 data)
3716 basic_block block;
3717 sbitmap *in, *out, *gen, *kill;
3718 enum df_flow_dir dir;
3719 enum df_confluence_op conf_op;
3720 transfer_function_sbitmap transfun;
3721 sbitmap visited;
3722 sbitmap pending;
3723 void *data;
3725 int changed;
3726 int i = block->index;
3727 edge e;
3728 basic_block bb= block;
3729 SET_BIT (visited, block->index);
3730 if (TEST_BIT (pending, block->index))
3732 if (dir == FORWARD)
3734 /* Calculate <conf_op> of predecessor_outs */
3735 sbitmap_zero (in[i]);
3736 for (e = bb->pred; e != 0; e = e->pred_next)
3738 if (e->src == ENTRY_BLOCK_PTR)
3739 continue;
3740 switch (conf_op)
3742 case UNION:
3743 sbitmap_a_or_b (in[i], in[i], out[e->src->index]);
3744 break;
3745 case INTERSECTION:
3746 sbitmap_a_and_b (in[i], in[i], out[e->src->index]);
3747 break;
3751 else
3753 /* Calculate <conf_op> of successor ins */
3754 sbitmap_zero(out[i]);
3755 for (e = bb->succ; e != 0; e = e->succ_next)
3757 if (e->dest == EXIT_BLOCK_PTR)
3758 continue;
3759 switch (conf_op)
3761 case UNION:
3762 sbitmap_a_or_b (out[i], out[i], in[e->dest->index]);
3763 break;
3764 case INTERSECTION:
3765 sbitmap_a_and_b (out[i], out[i], in[e->dest->index]);
3766 break;
3770 /* Common part */
3771 (*transfun)(i, &changed, in[i], out[i], gen[i], kill[i], data);
3772 RESET_BIT (pending, i);
3773 if (changed)
3775 if (dir == FORWARD)
3777 for (e = bb->succ; e != 0; e = e->succ_next)
3779 if (e->dest == EXIT_BLOCK_PTR || e->dest->index == i)
3780 continue;
3781 SET_BIT (pending, e->dest->index);
3784 else
3786 for (e = bb->pred; e != 0; e = e->pred_next)
3788 if (e->src == ENTRY_BLOCK_PTR || e->dest->index == i)
3789 continue;
3790 SET_BIT (pending, e->src->index);
3795 if (dir == FORWARD)
3797 for (e = bb->succ; e != 0; e = e->succ_next)
3799 if (e->dest == EXIT_BLOCK_PTR || e->dest->index == i)
3800 continue;
3801 if (!TEST_BIT (visited, e->dest->index))
3802 hybrid_search_sbitmap (e->dest, in, out, gen, kill, dir,
3803 conf_op, transfun, visited, pending,
3804 data);
3807 else
3809 for (e = bb->pred; e != 0; e = e->pred_next)
3811 if (e->src == ENTRY_BLOCK_PTR || e->src->index == i)
3812 continue;
3813 if (!TEST_BIT (visited, e->src->index))
3814 hybrid_search_sbitmap (e->src, in, out, gen, kill, dir,
3815 conf_op, transfun, visited, pending,
3816 data);
3824 /* gen = GEN set.
3825 kill = KILL set.
3826 in, out = Filled in by function.
3827 blocks = Blocks to analyze.
3828 dir = Dataflow direction.
3829 conf_op = Confluence operation.
3830 transfun = Transfer function.
3831 order = Order to iterate in. (Should map block numbers -> order)
3832 data = Whatever you want. It's passed to the transfer function.
3834 This function will perform iterative bitvector dataflow, producing
3835 the in and out sets. Even if you only want to perform it for a
3836 small number of blocks, the vectors for in and out must be large
3837 enough for *all* blocks, because changing one block might affect
3838 others. However, it'll only put what you say to analyze on the
3839 initial worklist.
3841 For forward problems, you probably want to pass in a mapping of
3842 block number to rc_order (like df->inverse_rc_map).
3844 void
3845 iterative_dataflow_sbitmap (in, out, gen, kill, blocks,
3846 dir, conf_op, transfun, order, data)
3847 sbitmap *in, *out, *gen, *kill;
3848 bitmap blocks;
3849 enum df_flow_dir dir;
3850 enum df_confluence_op conf_op;
3851 transfer_function_sbitmap transfun;
3852 int *order;
3853 void *data;
3855 int i;
3856 fibheap_t worklist;
3857 basic_block bb;
3858 sbitmap visited, pending;
3859 pending = sbitmap_alloc (n_basic_blocks);
3860 visited = sbitmap_alloc (n_basic_blocks);
3861 sbitmap_zero (pending);
3862 sbitmap_zero (visited);
3863 worklist = fibheap_new ();
3864 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i,
3866 fibheap_insert (worklist, order[i], (void *) (size_t) i);
3867 SET_BIT (pending, i);
3868 if (dir == FORWARD)
3869 sbitmap_copy (out[i], gen[i]);
3870 else
3871 sbitmap_copy (in[i], gen[i]);
3873 while (sbitmap_first_set_bit (pending) != -1)
3875 while (!fibheap_empty (worklist))
3877 i = (size_t) fibheap_extract_min (worklist);
3878 bb = BASIC_BLOCK (i);
3879 if (!TEST_BIT (visited, bb->index))
3880 hybrid_search_sbitmap (bb, in, out, gen, kill, dir,
3881 conf_op, transfun, visited, pending, data);
3883 if (sbitmap_first_set_bit (pending) != -1)
3885 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i,
3887 fibheap_insert (worklist, order[i], (void *) (size_t) i);
3889 sbitmap_zero (visited);
3891 else
3893 break;
3896 sbitmap_free (pending);
3897 sbitmap_free (visited);
3898 fibheap_delete (worklist);
3901 /* Exactly the same as iterative_dataflow_sbitmap, except it works on
3902 bitmaps instead */
3903 void
3904 iterative_dataflow_bitmap (in, out, gen, kill, blocks,
3905 dir, conf_op, transfun, order, data)
3906 bitmap *in, *out, *gen, *kill;
3907 bitmap blocks;
3908 enum df_flow_dir dir;
3909 enum df_confluence_op conf_op;
3910 transfer_function_bitmap transfun;
3911 int *order;
3912 void *data;
3914 int i;
3915 fibheap_t worklist;
3916 basic_block bb;
3917 sbitmap visited, pending;
3918 pending = sbitmap_alloc (n_basic_blocks);
3919 visited = sbitmap_alloc (n_basic_blocks);
3920 sbitmap_zero (pending);
3921 sbitmap_zero (visited);
3922 worklist = fibheap_new ();
3923 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i,
3925 fibheap_insert (worklist, order[i], (void *) (size_t) i);
3926 SET_BIT (pending, i);
3927 if (dir == FORWARD)
3928 bitmap_copy (out[i], gen[i]);
3929 else
3930 bitmap_copy (in[i], gen[i]);
3932 while (sbitmap_first_set_bit (pending) != -1)
3934 while (!fibheap_empty (worklist))
3936 i = (size_t) fibheap_extract_min (worklist);
3937 bb = BASIC_BLOCK (i);
3938 if (!TEST_BIT (visited, bb->index))
3939 hybrid_search_bitmap (bb, in, out, gen, kill, dir,
3940 conf_op, transfun, visited, pending, data);
3942 if (sbitmap_first_set_bit (pending) != -1)
3944 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i,
3946 fibheap_insert (worklist, order[i], (void *) (size_t) i);
3948 sbitmap_zero (visited);
3950 else
3952 break;
3955 sbitmap_free (pending);
3956 sbitmap_free (visited);
3957 fibheap_delete (worklist);