1 /* Dataflow support routines.
2 Copyright (C) 1999, 2000, 2001 Free Software Foundation, Inc.
3 Contributed by Michael P. Hayes (m.hayes@elec.canterbury.ac.nz,
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA.
26 This file provides some dataflow routines for computing reaching defs,
27 upward exposed uses, live variables, def-use chains, and use-def
28 chains. The global dataflow is performed using simple iterative
29 methods with a worklist and could be sped up by ordering the blocks
30 with a depth first search order.
32 A `struct ref' data structure (ref) is allocated for every register
33 reference (def or use) and this records the insn and bb the ref is
34 found within. The refs are linked together in chains of uses and defs
35 for each insn and for each register. Each ref also has a chain field
36 that links all the use refs for a def or all the def refs for a use.
37 This is used to create use-def or def-use chains.
42 Here's an example of using the dataflow routines.
48 df_analyse (df, 0, DF_ALL);
50 df_dump (df, DF_ALL, stderr);
55 df_init simply creates a poor man's object (df) that needs to be
56 passed to all the dataflow routines. df_finish destroys this
57 object and frees up any allocated memory.
59 df_analyse performs the following:
61 1. Records defs and uses by scanning the insns in each basic block
62 or by scanning the insns queued by df_insn_modify.
63 2. Links defs and uses into insn-def and insn-use chains.
64 3. Links defs and uses into reg-def and reg-use chains.
65 4. Assigns LUIDs to each insn (for modified blocks).
66 5. Calculates local reaching definitions.
67 6. Calculates global reaching definitions.
68 7. Creates use-def chains.
69 8. Calculates local reaching uses (upwards exposed uses).
70 9. Calculates global reaching uses.
71 10. Creates def-use chains.
72 11. Calculates local live registers.
73 12. Calculates global live registers.
74 13. Calculates register lifetimes and determines local registers.
79 Note that the dataflow information is not updated for every newly
80 deleted or created insn. If the dataflow information requires
81 updating then all the changed, new, or deleted insns needs to be
82 marked with df_insn_modify (or df_insns_modify) either directly or
83 indirectly (say through calling df_insn_delete). df_insn_modify
84 marks all the modified insns to get processed the next time df_analyse
87 Beware that tinkering with insns may invalidate the dataflow information.
88 The philosophy behind these routines is that once the dataflow
89 information has been gathered, the user should store what they require
90 before they tinker with any insn. Once a reg is replaced, for example,
91 then the reg-def/reg-use chains will point to the wrong place. Once a
92 whole lot of changes have been made, df_analyse can be called again
93 to update the dataflow information. Currently, this is not very smart
94 with regard to propagating changes to the dataflow so it should not
100 The basic object is a REF (reference) and this may either be a DEF
101 (definition) or a USE of a register.
103 These are linked into a variety of lists; namely reg-def, reg-use,
104 insn-def, insn-use, def-use, and use-def lists. For example,
105 the reg-def lists contain all the refs that define a given register
106 while the insn-use lists contain all the refs used by an insn.
108 Note that the reg-def and reg-use chains are generally short (except for the
109 hard registers) and thus it is much faster to search these chains
110 rather than searching the def or use bitmaps.
112 If the insns are in SSA form then the reg-def and use-def lists
113 should only contain the single defining ref.
117 1) Incremental dataflow analysis.
119 Note that if a loop invariant insn is hoisted (or sunk), we do not
120 need to change the def-use or use-def chains. All we have to do is to
121 change the bb field for all the associated defs and uses and to
122 renumber the LUIDs for the original and new basic blocks of the insn.
124 When shadowing loop mems we create new uses and defs for new pseudos
125 so we do not affect the existing dataflow information.
127 My current strategy is to queue up all modified, created, or deleted
128 insns so when df_analyse is called we can easily determine all the new
129 or deleted refs. Currently the global dataflow information is
130 recomputed from scratch but this could be propagated more efficiently.
132 2) Improved global data flow computation using depth first search.
134 3) Reduced memory requirements.
136 We could operate a pool of ref structures. When a ref is deleted it
137 gets returned to the pool (say by linking on to a chain of free refs).
138 This will require a pair of bitmaps for defs and uses so that we can
139 tell which ones have been changed. Alternatively, we could
140 periodically squeeze the def and use tables and associated bitmaps and
141 renumber the def and use ids.
143 4) Ordering of reg-def and reg-use lists.
145 Should the first entry in the def list be the first def (within a BB)?
146 Similarly, should the first entry in the use list be the last use
149 5) Working with a sub-CFG.
151 Often the whole CFG does not need to be analysed, for example,
152 when optimising a loop, only certain registers are of interest.
153 Perhaps there should be a bitmap argument to df_analyse to specify
154 which registers should be analysed? */
156 #define HANDLE_SUBREG
161 #include "insn-config.h"
163 #include "function.h"
166 #include "hard-reg-set.h"
167 #include "basic-block.h"
172 #define FOR_ALL_BBS(BB, CODE) \
175 for (node_ = 0; node_ < n_basic_blocks; node_++) \
176 {(BB) = BASIC_BLOCK (node_); CODE;};} while (0)
178 #define FOR_EACH_BB_IN_BITMAP(BITMAP, MIN, BB, CODE) \
180 unsigned int node_; \
181 EXECUTE_IF_SET_IN_BITMAP (BITMAP, MIN, node_, \
182 {(BB) = BASIC_BLOCK (node_); CODE;});} while (0)
184 #define FOR_EACH_BB_IN_BITMAP_REV(BITMAP, MIN, BB, CODE) \
186 unsigned int node_; \
187 EXECUTE_IF_SET_IN_BITMAP_REV (BITMAP, node_, \
188 {(BB) = BASIC_BLOCK (node_); CODE;});} while (0)
190 #define FOR_EACH_BB_IN_SBITMAP(BITMAP, MIN, BB, CODE) \
192 unsigned int node_; \
193 EXECUTE_IF_SET_IN_SBITMAP (BITMAP, MIN, node_, \
194 {(BB) = BASIC_BLOCK (node_); CODE;});} while (0)
196 #define obstack_chunk_alloc xmalloc
197 #define obstack_chunk_free free
199 static struct obstack df_ref_obstack
;
200 static struct df
*ddf
;
202 static void df_reg_table_realloc
PARAMS((struct df
*, int));
204 static void df_def_table_realloc
PARAMS((struct df
*, int));
206 static void df_insn_table_realloc
PARAMS((struct df
*, int));
207 static void df_bitmaps_alloc
PARAMS((struct df
*, int));
208 static void df_bitmaps_free
PARAMS((struct df
*, int));
209 static void df_free
PARAMS((struct df
*));
210 static void df_alloc
PARAMS((struct df
*, int));
212 static rtx df_reg_clobber_gen
PARAMS((unsigned int));
213 static rtx df_reg_use_gen
PARAMS((unsigned int));
215 static inline struct df_link
*df_link_create
PARAMS((struct ref
*,
217 static struct df_link
*df_ref_unlink
PARAMS((struct df_link
**, struct ref
*));
218 static void df_def_unlink
PARAMS((struct df
*, struct ref
*));
219 static void df_use_unlink
PARAMS((struct df
*, struct ref
*));
220 static void df_insn_refs_unlink
PARAMS ((struct df
*, basic_block
, rtx
));
221 static void df_bb_refs_unlink
PARAMS ((struct df
*, basic_block
));
223 static void df_refs_unlink
PARAMS ((struct df
*, bitmap
));
226 static struct ref
*df_ref_create
PARAMS((struct df
*,
227 rtx
, rtx
*, basic_block
, rtx
,
229 static void df_ref_record_1
PARAMS((struct df
*, rtx
, rtx
*,
230 basic_block
, rtx
, enum df_ref_type
));
231 static void df_ref_record
PARAMS((struct df
*, rtx
, rtx
*,
232 basic_block bb
, rtx
, enum df_ref_type
));
233 static void df_def_record_1
PARAMS((struct df
*, rtx
, basic_block
, rtx
));
234 static void df_defs_record
PARAMS((struct df
*, rtx
, basic_block
, rtx
));
235 static void df_uses_record
PARAMS((struct df
*, rtx
*,
236 enum df_ref_type
, basic_block
, rtx
));
237 static void df_insn_refs_record
PARAMS((struct df
*, basic_block
, rtx
));
238 static void df_bb_refs_record
PARAMS((struct df
*, basic_block
));
239 static void df_refs_record
PARAMS((struct df
*, bitmap
));
241 static int df_visit_next
PARAMS ((struct df
*, sbitmap
));
242 static void df_bb_reg_def_chain_create
PARAMS((struct df
*, basic_block
));
243 static void df_reg_def_chain_create
PARAMS((struct df
*, bitmap
));
244 static void df_bb_reg_use_chain_create
PARAMS((struct df
*, basic_block
));
245 static void df_reg_use_chain_create
PARAMS((struct df
*, bitmap
));
246 static void df_bb_du_chain_create
PARAMS((struct df
*, basic_block
, bitmap
));
247 static void df_du_chain_create
PARAMS((struct df
*, bitmap
));
248 static void df_bb_ud_chain_create
PARAMS((struct df
*, basic_block
));
249 static void df_ud_chain_create
PARAMS((struct df
*, bitmap
));
250 static void df_rd_global_compute
PARAMS((struct df
*, bitmap
));
251 static void df_ru_global_compute
PARAMS((struct df
*, bitmap
));
252 static void df_lr_global_compute
PARAMS((struct df
*, bitmap
));
253 static void df_bb_rd_local_compute
PARAMS((struct df
*, basic_block
));
254 static void df_rd_local_compute
PARAMS((struct df
*, bitmap
));
255 static void df_bb_ru_local_compute
PARAMS((struct df
*, basic_block
));
256 static void df_ru_local_compute
PARAMS((struct df
*, bitmap
));
257 static void df_bb_lr_local_compute
PARAMS((struct df
*, basic_block
));
258 static void df_lr_local_compute
PARAMS((struct df
*, bitmap
));
259 static void df_bb_reg_info_compute
PARAMS((struct df
*, basic_block
, bitmap
));
260 static void df_reg_info_compute
PARAMS((struct df
*, bitmap
));
262 static int df_bb_luids_set
PARAMS((struct df
*df
, basic_block
));
263 static int df_luids_set
PARAMS((struct df
*df
, bitmap
));
265 static int df_modified_p
PARAMS ((struct df
*, bitmap
));
266 static int df_refs_queue
PARAMS ((struct df
*));
267 static int df_refs_process
PARAMS ((struct df
*));
268 static int df_bb_refs_update
PARAMS ((struct df
*, basic_block
));
269 static int df_refs_update
PARAMS ((struct df
*));
270 static void df_analyse_1
PARAMS((struct df
*, bitmap
, int, int));
272 static void df_insns_modify
PARAMS((struct df
*, basic_block
,
274 static int df_rtx_mem_replace
PARAMS ((rtx
*, void *));
275 static int df_rtx_reg_replace
PARAMS ((rtx
*, void *));
276 void df_refs_reg_replace
PARAMS ((struct df
*, bitmap
,
277 struct df_link
*, rtx
, rtx
));
279 static int df_def_dominates_all_uses_p
PARAMS((struct df
*, struct ref
*def
));
280 static int df_def_dominates_uses_p
PARAMS((struct df
*,
281 struct ref
*def
, bitmap
));
282 static struct ref
*df_bb_regno_last_use_find
PARAMS((struct df
*, basic_block
,
284 static struct ref
*df_bb_regno_first_def_find
PARAMS((struct df
*, basic_block
,
286 static struct ref
*df_bb_insn_regno_last_use_find
PARAMS((struct df
*,
289 static struct ref
*df_bb_insn_regno_first_def_find
PARAMS((struct df
*,
293 static void df_chain_dump
PARAMS((struct df_link
*, FILE *file
));
294 static void df_chain_dump_regno
PARAMS((struct df_link
*, FILE *file
));
295 static void df_regno_debug
PARAMS ((struct df
*, unsigned int, FILE *));
296 static void df_ref_debug
PARAMS ((struct df
*, struct ref
*, FILE *));
299 /* Local memory allocation/deallocation routines. */
302 /* Increase the insn info table by SIZE more elements. */
304 df_insn_table_realloc (df
, size
)
308 /* Make table 25 percent larger by default. */
310 size
= df
->insn_size
/ 4;
312 size
+= df
->insn_size
;
314 df
->insns
= (struct insn_info
*)
315 xrealloc (df
->insns
, size
* sizeof (struct insn_info
));
317 memset (df
->insns
+ df
->insn_size
, 0,
318 (size
- df
->insn_size
) * sizeof (struct insn_info
));
320 df
->insn_size
= size
;
322 if (! df
->insns_modified
)
324 df
->insns_modified
= BITMAP_XMALLOC ();
325 bitmap_zero (df
->insns_modified
);
330 /* Increase the reg info table by SIZE more elements. */
332 df_reg_table_realloc (df
, size
)
336 /* Make table 25 percent larger by default. */
338 size
= df
->reg_size
/ 4;
340 size
+= df
->reg_size
;
342 df
->regs
= (struct reg_info
*)
343 xrealloc (df
->regs
, size
* sizeof (struct reg_info
));
345 /* Zero the new entries. */
346 memset (df
->regs
+ df
->reg_size
, 0,
347 (size
- df
->reg_size
) * sizeof (struct reg_info
));
354 /* Not currently used. */
356 df_def_table_realloc (df
, size
)
363 /* Make table 25 percent larger by default. */
365 size
= df
->def_size
/ 4;
367 df
->def_size
+= size
;
368 df
->defs
= xrealloc (df
->defs
,
369 df
->def_size
* sizeof (*df
->defs
));
371 /* Allocate a new block of memory and link into list of blocks
372 that will need to be freed later. */
374 refs
= xmalloc (size
* sizeof (*refs
));
376 /* Link all the new refs together, overloading the chain field. */
377 for (i
= 0; i
< size
- 1; i
++)
378 refs
[i
].chain
= (struct df_link
*)(refs
+ i
+ 1);
379 refs
[size
- 1].chain
= 0;
385 /* Allocate bitmaps for each basic block. */
387 df_bitmaps_alloc (df
, flags
)
394 /* Free the bitmaps if they need resizing. */
395 if ((flags
& DF_LR
) && df
->n_regs
< (unsigned int)max_reg_num ())
396 dflags
|= DF_LR
| DF_RU
;
397 if ((flags
& DF_RU
) && df
->n_uses
< df
->use_id
)
399 if ((flags
& DF_RD
) && df
->n_defs
< df
->def_id
)
403 df_bitmaps_free (df
, dflags
);
405 df
->n_defs
= df
->def_id
;
406 df
->n_uses
= df
->use_id
;
408 for (i
= 0; i
< df
->n_bbs
; i
++)
410 basic_block bb
= BASIC_BLOCK (i
);
411 struct bb_info
*bb_info
= DF_BB_INFO (df
, bb
);
413 if (flags
& DF_RD
&& ! bb_info
->rd_in
)
415 /* Allocate bitmaps for reaching definitions. */
416 bb_info
->rd_kill
= BITMAP_XMALLOC ();
417 bitmap_zero (bb_info
->rd_kill
);
418 bb_info
->rd_gen
= BITMAP_XMALLOC ();
419 bitmap_zero (bb_info
->rd_gen
);
420 bb_info
->rd_in
= BITMAP_XMALLOC ();
421 bb_info
->rd_out
= BITMAP_XMALLOC ();
422 bb_info
->rd_valid
= 0;
425 if (flags
& DF_RU
&& ! bb_info
->ru_in
)
427 /* Allocate bitmaps for upward exposed uses. */
428 bb_info
->ru_kill
= BITMAP_XMALLOC ();
429 bitmap_zero (bb_info
->ru_kill
);
430 /* Note the lack of symmetry. */
431 bb_info
->ru_gen
= BITMAP_XMALLOC ();
432 bitmap_zero (bb_info
->ru_gen
);
433 bb_info
->ru_in
= BITMAP_XMALLOC ();
434 bb_info
->ru_out
= BITMAP_XMALLOC ();
435 bb_info
->ru_valid
= 0;
438 if (flags
& DF_LR
&& ! bb_info
->lr_in
)
440 /* Allocate bitmaps for live variables. */
441 bb_info
->lr_def
= BITMAP_XMALLOC ();
442 bitmap_zero (bb_info
->lr_def
);
443 bb_info
->lr_use
= BITMAP_XMALLOC ();
444 bitmap_zero (bb_info
->lr_use
);
445 bb_info
->lr_in
= BITMAP_XMALLOC ();
446 bb_info
->lr_out
= BITMAP_XMALLOC ();
447 bb_info
->lr_valid
= 0;
453 /* Free bitmaps for each basic block. */
455 df_bitmaps_free (df
, flags
)
456 struct df
*df ATTRIBUTE_UNUSED
;
461 for (i
= 0; i
< df
->n_bbs
; i
++)
463 basic_block bb
= BASIC_BLOCK (i
);
464 struct bb_info
*bb_info
= DF_BB_INFO (df
, bb
);
469 if ((flags
& DF_RD
) && bb_info
->rd_in
)
471 /* Free bitmaps for reaching definitions. */
472 BITMAP_XFREE (bb_info
->rd_kill
);
473 bb_info
->rd_kill
= NULL
;
474 BITMAP_XFREE (bb_info
->rd_gen
);
475 bb_info
->rd_gen
= NULL
;
476 BITMAP_XFREE (bb_info
->rd_in
);
477 bb_info
->rd_in
= NULL
;
478 BITMAP_XFREE (bb_info
->rd_out
);
479 bb_info
->rd_out
= NULL
;
482 if ((flags
& DF_RU
) && bb_info
->ru_in
)
484 /* Free bitmaps for upward exposed uses. */
485 BITMAP_XFREE (bb_info
->ru_kill
);
486 bb_info
->ru_kill
= NULL
;
487 BITMAP_XFREE (bb_info
->ru_gen
);
488 bb_info
->ru_gen
= NULL
;
489 BITMAP_XFREE (bb_info
->ru_in
);
490 bb_info
->ru_in
= NULL
;
491 BITMAP_XFREE (bb_info
->ru_out
);
492 bb_info
->ru_out
= NULL
;
495 if ((flags
& DF_LR
) && bb_info
->lr_in
)
497 /* Free bitmaps for live variables. */
498 BITMAP_XFREE (bb_info
->lr_def
);
499 bb_info
->lr_def
= NULL
;
500 BITMAP_XFREE (bb_info
->lr_use
);
501 bb_info
->lr_use
= NULL
;
502 BITMAP_XFREE (bb_info
->lr_in
);
503 bb_info
->lr_in
= NULL
;
504 BITMAP_XFREE (bb_info
->lr_out
);
505 bb_info
->lr_out
= NULL
;
508 df
->flags
&= ~(flags
& (DF_RD
| DF_RU
| DF_LR
));
512 /* Allocate and initialise dataflow memory. */
514 df_alloc (df
, n_regs
)
521 gcc_obstack_init (&df_ref_obstack
);
523 /* Perhaps we should use LUIDs to save memory for the insn_refs
524 table. This is only a small saving; a few pointers. */
525 n_insns
= get_max_uid () + 1;
529 /* Approximate number of defs by number of insns. */
530 df
->def_size
= n_insns
;
531 df
->defs
= xmalloc (df
->def_size
* sizeof (*df
->defs
));
535 /* Approximate number of uses by twice number of insns. */
536 df
->use_size
= n_insns
* 2;
537 df
->uses
= xmalloc (df
->use_size
* sizeof (*df
->uses
));
540 df
->n_bbs
= n_basic_blocks
;
542 /* Allocate temporary working array used during local dataflow analysis. */
543 df
->reg_def_last
= xmalloc (df
->n_regs
* sizeof (struct ref
*));
545 df_insn_table_realloc (df
, n_insns
);
547 df_reg_table_realloc (df
, df
->n_regs
);
549 df
->bbs_modified
= BITMAP_XMALLOC ();
550 bitmap_zero (df
->bbs_modified
);
554 df
->bbs
= xcalloc (df
->n_bbs
, sizeof (struct bb_info
));
556 df
->all_blocks
= BITMAP_XMALLOC ();
557 for (i
= 0; i
< n_basic_blocks
; i
++)
558 bitmap_set_bit (df
->all_blocks
, i
);
562 /* Free all the dataflow info. */
567 df_bitmaps_free (df
, DF_ALL
);
595 if (df
->bbs_modified
)
596 BITMAP_XFREE (df
->bbs_modified
);
597 df
->bbs_modified
= 0;
599 if (df
->insns_modified
)
600 BITMAP_XFREE (df
->insns_modified
);
601 df
->insns_modified
= 0;
603 BITMAP_XFREE (df
->all_blocks
);
606 obstack_free (&df_ref_obstack
, NULL
);
609 /* Local miscellaneous routines. */
611 /* Return a USE for register REGNO. */
612 static rtx
df_reg_use_gen (regno
)
618 reg
= regno
>= FIRST_PSEUDO_REGISTER
619 ? regno_reg_rtx
[regno
] : gen_rtx_REG (reg_raw_mode
[regno
], regno
);
621 use
= gen_rtx_USE (GET_MODE (reg
), reg
);
626 /* Return a CLOBBER for register REGNO. */
627 static rtx
df_reg_clobber_gen (regno
)
633 reg
= regno
>= FIRST_PSEUDO_REGISTER
634 ? regno_reg_rtx
[regno
] : gen_rtx_REG (reg_raw_mode
[regno
], regno
);
636 use
= gen_rtx_CLOBBER (GET_MODE (reg
), reg
);
640 /* Local chain manipulation routines. */
642 /* Create a link in a def-use or use-def chain. */
643 static inline struct df_link
*
644 df_link_create (ref
, next
)
646 struct df_link
*next
;
648 struct df_link
*link
;
650 link
= (struct df_link
*) obstack_alloc (&df_ref_obstack
,
658 /* Add REF to chain head pointed to by PHEAD. */
659 static struct df_link
*
660 df_ref_unlink (phead
, ref
)
661 struct df_link
**phead
;
664 struct df_link
*link
= *phead
;
670 /* Only a single ref. It must be the one we want.
671 If not, the def-use and use-def chains are likely to
673 if (link
->ref
!= ref
)
675 /* Now have an empty chain. */
680 /* Multiple refs. One of them must be us. */
681 if (link
->ref
== ref
)
686 for (; link
->next
; link
= link
->next
)
688 if (link
->next
->ref
== ref
)
690 /* Unlink from list. */
691 link
->next
= link
->next
->next
;
702 /* Unlink REF from all def-use/use-def chains, etc. */
704 df_ref_remove (df
, ref
)
708 if (DF_REF_REG_DEF_P (ref
))
710 df_def_unlink (df
, ref
);
711 df_ref_unlink (&df
->insns
[DF_REF_INSN_UID (ref
)].defs
, ref
);
715 df_use_unlink (df
, ref
);
716 df_ref_unlink (&df
->insns
[DF_REF_INSN_UID (ref
)].uses
, ref
);
722 /* Unlink DEF from use-def and reg-def chains. */
724 df_def_unlink (df
, def
)
725 struct df
*df ATTRIBUTE_UNUSED
;
728 struct df_link
*du_link
;
729 unsigned int dregno
= DF_REF_REGNO (def
);
731 /* Follow def-use chain to find all the uses of this def. */
732 for (du_link
= DF_REF_CHAIN (def
); du_link
; du_link
= du_link
->next
)
734 struct ref
*use
= du_link
->ref
;
736 /* Unlink this def from the use-def chain. */
737 df_ref_unlink (&DF_REF_CHAIN (use
), def
);
739 DF_REF_CHAIN (def
) = 0;
741 /* Unlink def from reg-def chain. */
742 df_ref_unlink (&df
->regs
[dregno
].defs
, def
);
744 df
->defs
[DF_REF_ID (def
)] = 0;
748 /* Unlink use from def-use and reg-use chains. */
750 df_use_unlink (df
, use
)
751 struct df
*df ATTRIBUTE_UNUSED
;
754 struct df_link
*ud_link
;
755 unsigned int uregno
= DF_REF_REGNO (use
);
757 /* Follow use-def chain to find all the defs of this use. */
758 for (ud_link
= DF_REF_CHAIN (use
); ud_link
; ud_link
= ud_link
->next
)
760 struct ref
*def
= ud_link
->ref
;
762 /* Unlink this use from the def-use chain. */
763 df_ref_unlink (&DF_REF_CHAIN (def
), use
);
765 DF_REF_CHAIN (use
) = 0;
767 /* Unlink use from reg-use chain. */
768 df_ref_unlink (&df
->regs
[uregno
].uses
, use
);
770 df
->uses
[DF_REF_ID (use
)] = 0;
773 /* Local routines for recording refs. */
776 /* Create a new ref of type DF_REF_TYPE for register REG at address
777 LOC within INSN of BB. */
779 df_ref_create (df
, reg
, loc
, bb
, insn
, ref_type
)
785 enum df_ref_type ref_type
;
787 struct ref
*this_ref
;
790 this_ref
= (struct ref
*) obstack_alloc (&df_ref_obstack
,
792 DF_REF_REG (this_ref
) = reg
;
793 DF_REF_LOC (this_ref
) = loc
;
794 DF_REF_BB (this_ref
) = bb
;
795 DF_REF_INSN (this_ref
) = insn
;
796 DF_REF_CHAIN (this_ref
) = 0;
797 DF_REF_TYPE (this_ref
) = ref_type
;
798 uid
= INSN_UID (insn
);
800 if (ref_type
== DF_REF_REG_DEF
)
802 if (df
->def_id
>= df
->def_size
)
804 /* Make table 25 percent larger. */
805 df
->def_size
+= (df
->def_size
/ 4);
806 df
->defs
= xrealloc (df
->defs
,
807 df
->def_size
* sizeof (*df
->defs
));
809 DF_REF_ID (this_ref
) = df
->def_id
;
810 df
->defs
[df
->def_id
++] = this_ref
;
814 if (df
->use_id
>= df
->use_size
)
816 /* Make table 25 percent larger. */
817 df
->use_size
+= (df
->use_size
/ 4);
818 df
->uses
= xrealloc (df
->uses
,
819 df
->use_size
* sizeof (*df
->uses
));
821 DF_REF_ID (this_ref
) = df
->use_id
;
822 df
->uses
[df
->use_id
++] = this_ref
;
828 /* Create a new reference of type DF_REF_TYPE for a single register REG,
829 used inside the LOC rtx of INSN. */
831 df_ref_record_1 (df
, reg
, loc
, bb
, insn
, ref_type
)
837 enum df_ref_type ref_type
;
839 df_ref_create (df
, reg
, loc
, bb
, insn
, ref_type
);
843 /* Create new references of type DF_REF_TYPE for each part of register REG
844 at address LOC within INSN of BB. */
846 df_ref_record (df
, reg
, loc
, bb
, insn
, ref_type
)
852 enum df_ref_type ref_type
;
856 if (GET_CODE (reg
) != REG
&& GET_CODE (reg
) != SUBREG
)
859 /* For the reg allocator we are interested in some SUBREG rtx's, but not
860 all. Notably only those representing a word extraction from a multi-word
861 reg. As written in the docu those should have the form
862 (subreg:SI (reg:M A) N), with size(SImode) > size(Mmode).
863 XXX Is that true? We could also use the global word_mode variable. */
864 if (GET_CODE (reg
) == SUBREG
865 && (GET_MODE_SIZE (GET_MODE (reg
)) < GET_MODE_SIZE (word_mode
)
866 || GET_MODE_SIZE (GET_MODE (reg
))
867 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (reg
)))))
869 loc
= &SUBREG_REG (reg
);
873 regno
= REGNO (GET_CODE (reg
) == SUBREG
? SUBREG_REG (reg
) : reg
);
874 if (regno
< FIRST_PSEUDO_REGISTER
)
879 if (! (df
->flags
& DF_HARD_REGS
))
882 /* GET_MODE (reg) is correct here. We don't want to go into a SUBREG
883 for the mode, because we only want to add references to regs, which
884 are really referenced. E.g. a (subreg:SI (reg:DI 0) 0) does _not_
885 reference the whole reg 0 in DI mode (which would also include
886 reg 1, at least, if 0 and 1 are SImode registers). */
887 endregno
= regno
+ HARD_REGNO_NREGS (regno
, GET_MODE (reg
));
889 for (i
= regno
; i
< endregno
; i
++)
890 df_ref_record_1 (df
, gen_rtx_REG (reg_raw_mode
[i
], i
),
891 loc
, bb
, insn
, ref_type
);
895 df_ref_record_1 (df
, reg
, loc
, bb
, insn
, ref_type
);
900 /* Process all the registers defined in the rtx, X. */
902 df_def_record_1 (df
, x
, bb
, insn
)
908 rtx
*loc
= &SET_DEST (x
);
911 /* Some targets place small structures in registers for
912 return values of functions. */
913 if (GET_CODE (dst
) == PARALLEL
&& GET_MODE (dst
) == BLKmode
)
917 for (i
= XVECLEN (dst
, 0) - 1; i
>= 0; i
--)
918 df_def_record_1 (df
, XVECEXP (dst
, 0, i
), bb
, insn
);
922 /* May be, we should flag the use of strict_low_part somehow. Might be
923 handy for the reg allocator. */
925 while (GET_CODE (dst
) == STRICT_LOW_PART
926 || GET_CODE (dst
) == ZERO_EXTRACT
927 || GET_CODE (dst
) == SIGN_EXTRACT
)
929 loc
= &XEXP (dst
, 0);
932 /* For the reg allocator we are interested in exact register references.
933 This means, we want to know, if only a part of a register is
936 if (GET_CODE (dst) == SUBREG)
938 loc = &XEXP (dst, 0);
943 while (GET_CODE (dst
) == SUBREG
944 || GET_CODE (dst
) == ZERO_EXTRACT
945 || GET_CODE (dst
) == SIGN_EXTRACT
946 || GET_CODE (dst
) == STRICT_LOW_PART
)
948 loc
= &XEXP (dst
, 0);
953 if (GET_CODE (dst
) == REG
954 || (GET_CODE (dst
) == SUBREG
&& GET_CODE (SUBREG_REG (dst
)) == REG
))
955 df_ref_record (df
, dst
, loc
, bb
, insn
, DF_REF_REG_DEF
);
959 /* Process all the registers defined in the pattern rtx, X. */
961 df_defs_record (df
, x
, bb
, insn
)
967 RTX_CODE code
= GET_CODE (x
);
969 if (code
== SET
|| code
== CLOBBER
)
971 /* Mark the single def within the pattern. */
972 df_def_record_1 (df
, x
, bb
, insn
);
974 else if (code
== PARALLEL
)
978 /* Mark the multiple defs within the pattern. */
979 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
981 code
= GET_CODE (XVECEXP (x
, 0, i
));
982 if (code
== SET
|| code
== CLOBBER
)
983 df_def_record_1 (df
, XVECEXP (x
, 0, i
), bb
, insn
);
989 /* Process all the registers used in the rtx at address LOC. */
991 df_uses_record (df
, loc
, ref_type
, bb
, insn
)
994 enum df_ref_type ref_type
;
1003 code
= GET_CODE (x
);
1017 /* If we are clobbering a MEM, mark any registers inside the address
1019 if (GET_CODE (XEXP (x
, 0)) == MEM
)
1020 df_uses_record (df
, &XEXP (XEXP (x
, 0), 0),
1021 DF_REF_REG_MEM_STORE
, bb
, insn
);
1023 /* If we're clobbering a REG then we have a def so ignore. */
1027 df_uses_record (df
, &XEXP (x
, 0), DF_REF_REG_MEM_LOAD
, bb
, insn
);
1031 /* While we're here, optimize this case. */
1032 #if defined(HANDLE_SUBREG)
1034 /* In case the SUBREG is not of a register, don't optimize. */
1035 if (GET_CODE (SUBREG_REG (x
)) != REG
)
1037 loc
= &SUBREG_REG (x
);
1038 df_uses_record (df
, loc
, ref_type
, bb
, insn
);
1042 loc
= &SUBREG_REG (x
);
1044 if (GET_CODE (x
) != REG
)
1046 df_uses_record (df
, loc
, ref_type
, bb
, insn
);
1051 /* ... Fall through ... */
1054 /* See a register (or subreg) other than being set. */
1055 df_ref_record (df
, x
, loc
, bb
, insn
, ref_type
);
1060 rtx dst
= SET_DEST (x
);
1063 /* If storing into MEM, don't show it as being used. But do
1064 show the address as being used. */
1065 if (GET_CODE (dst
) == MEM
)
1067 df_uses_record (df
, &XEXP (dst
, 0),
1068 DF_REF_REG_MEM_STORE
,
1070 df_uses_record (df
, &SET_SRC (x
), DF_REF_REG_USE
, bb
, insn
);
1074 #if 1 && defined(HANDLE_SUBREG)
1075 /* Look for sets that perform a read-modify-write. */
1076 while (GET_CODE (dst
) == STRICT_LOW_PART
1077 || GET_CODE (dst
) == ZERO_EXTRACT
1078 || GET_CODE (dst
) == SIGN_EXTRACT
)
1080 if (GET_CODE (dst
) == STRICT_LOW_PART
)
1082 dst
= XEXP (dst
, 0);
1083 if (GET_CODE (dst
) != SUBREG
)
1085 /* A strict_low_part uses the whole reg not only the subreg. */
1086 df_uses_record (df
, &SUBREG_REG (dst
), DF_REF_REG_USE
, bb
, insn
);
1090 df_uses_record (df
, &XEXP (dst
, 0), DF_REF_REG_USE
, bb
, insn
);
1091 dst
= XEXP (dst
, 0);
1094 if (GET_CODE (dst
) == SUBREG
)
1096 /* Paradoxical or too small subreg's are read-mod-write. */
1097 if (GET_MODE_SIZE (GET_MODE (dst
)) < GET_MODE_SIZE (word_mode
)
1098 || GET_MODE_SIZE (GET_MODE (dst
))
1099 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dst
))))
1102 /* In the original code also some SUBREG rtx's were considered
1103 read-modify-write (those with
1104 REG_SIZE(SUBREG_REG(dst)) > REG_SIZE(dst) )
1105 e.g. a (subreg:QI (reg:SI A) 0). I can't see this. The only
1106 reason for a read cycle for reg A would be to somehow preserve
1107 the bits outside of the subreg:QI. But for this a strict_low_part
1108 was necessary anyway, and this we handled already. */
1110 while (GET_CODE (dst
) == STRICT_LOW_PART
1111 || GET_CODE (dst
) == ZERO_EXTRACT
1112 || GET_CODE (dst
) == SIGN_EXTRACT
1113 || GET_CODE (dst
) == SUBREG
)
1115 /* A SUBREG of a smaller size does not use the old value. */
1116 if (GET_CODE (dst
) != SUBREG
1117 || (REG_SIZE (SUBREG_REG (dst
)) > REG_SIZE (dst
)))
1119 dst
= XEXP (dst
, 0);
1123 if ((GET_CODE (dst
) == PARALLEL
&& GET_MODE (dst
) == BLKmode
)
1124 || GET_CODE (dst
) == REG
|| GET_CODE (dst
) == SUBREG
)
1126 #if 1 || !defined(HANDLE_SUBREG)
1128 df_uses_record (df
, &SET_DEST (x
), DF_REF_REG_USE
, bb
, insn
);
1130 df_uses_record (df
, &SET_SRC (x
), DF_REF_REG_USE
, bb
, insn
);
1140 case UNSPEC_VOLATILE
:
1144 /* Traditional and volatile asm instructions must be considered to use
1145 and clobber all hard registers, all pseudo-registers and all of
1146 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
1148 Consider for instance a volatile asm that changes the fpu rounding
1149 mode. An insn should not be moved across this even if it only uses
1150 pseudo-regs because it might give an incorrectly rounded result.
1152 For now, just mark any regs we can find in ASM_OPERANDS as
1155 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
1156 We can not just fall through here since then we would be confused
1157 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
1158 traditional asms unlike their normal usage. */
1159 if (code
== ASM_OPERANDS
)
1163 for (j
= 0; j
< ASM_OPERANDS_INPUT_LENGTH (x
); j
++)
1164 df_uses_record (df
, &ASM_OPERANDS_INPUT (x
, j
),
1165 DF_REF_REG_USE
, bb
, insn
);
1176 /* Catch the def of the register being modified. */
1177 df_ref_record (df
, XEXP (x
, 0), &XEXP (x
, 0), bb
, insn
, DF_REF_REG_DEF
);
1179 /* ... Fall through to handle uses ... */
1185 /* Recursively scan the operands of this expression. */
1187 register const char *fmt
= GET_RTX_FORMAT (code
);
1190 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1194 /* Tail recursive case: save a function call level. */
1200 df_uses_record (df
, &XEXP (x
, i
), ref_type
, bb
, insn
);
1202 else if (fmt
[i
] == 'E')
1205 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1206 df_uses_record (df
, &XVECEXP (x
, i
, j
), ref_type
,
1214 /* Record all the df within INSN of basic block BB. */
1216 df_insn_refs_record (df
, bb
, insn
)
1225 /* Record register defs */
1226 df_defs_record (df
, PATTERN (insn
), bb
, insn
);
1228 if (GET_CODE (insn
) == CALL_INSN
)
1233 /* Record the registers used to pass arguments. */
1234 for (note
= CALL_INSN_FUNCTION_USAGE (insn
); note
;
1235 note
= XEXP (note
, 1))
1237 if (GET_CODE (XEXP (note
, 0)) == USE
)
1238 df_uses_record (df
, &SET_DEST (XEXP (note
, 0)), DF_REF_REG_USE
,
1242 /* The stack ptr is used (honorarily) by a CALL insn. */
1243 x
= df_reg_use_gen (STACK_POINTER_REGNUM
);
1244 df_uses_record (df
, &SET_DEST (x
), DF_REF_REG_USE
, bb
, insn
);
1246 if (df
->flags
& DF_HARD_REGS
)
1248 /* Calls may also reference any of the global registers,
1249 so they are recorded as used. */
1250 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1253 x
= df_reg_use_gen (i
);
1254 df_uses_record (df
, &SET_DEST (x
),
1255 DF_REF_REG_USE
, bb
, insn
);
1260 /* Record the register uses. */
1261 df_uses_record (df
, &PATTERN (insn
),
1262 DF_REF_REG_USE
, bb
, insn
);
1265 if (GET_CODE (insn
) == CALL_INSN
)
1269 if (df
->flags
& DF_HARD_REGS
)
1271 /* Each call clobbers all call-clobbered regs that are not
1272 global or fixed and have not been explicitly defined
1273 in the call pattern. */
1274 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1275 if (call_used_regs
[i
]
1278 && ! df_insn_regno_def_p (df
, bb
, insn
, i
))
1280 rtx reg_clob
= df_reg_clobber_gen (i
);
1281 df_defs_record (df
, reg_clob
, bb
, insn
);
1285 /* There may be extra registers to be clobbered. */
1286 for (note
= CALL_INSN_FUNCTION_USAGE (insn
);
1288 note
= XEXP (note
, 1))
1289 if (GET_CODE (XEXP (note
, 0)) == CLOBBER
)
1290 df_defs_record (df
, XEXP (note
, 0), bb
, insn
);
1296 /* Record all the refs within the basic block BB. */
1298 df_bb_refs_record (df
, bb
)
1304 /* Scan the block an insn at a time from beginning to end. */
1305 for (insn
= bb
->head
; ; insn
= NEXT_INSN (insn
))
1309 /* Record defs within INSN. */
1310 df_insn_refs_record (df
, bb
, insn
);
1312 if (insn
== bb
->end
)
1318 /* Record all the refs in the basic blocks specified by BLOCKS. */
1320 df_refs_record (df
, blocks
)
1326 FOR_EACH_BB_IN_BITMAP (blocks
, 0, bb
,
1328 df_bb_refs_record (df
, bb
);
1332 /* Dataflow analysis routines. */
1335 /* Create reg-def chains for basic block BB. These are a list of
1336 definitions for each register. */
1338 df_bb_reg_def_chain_create (df
, bb
)
1344 /* Perhaps the defs should be sorted using a depth first search
1345 of the CFG (or possibly a breadth first search). We currently
1346 scan the basic blocks in reverse order so that the first defs
1347 apprear at the start of the chain. */
1349 for (insn
= bb
->end
; insn
&& insn
!= PREV_INSN (bb
->head
);
1350 insn
= PREV_INSN (insn
))
1352 struct df_link
*link
;
1353 unsigned int uid
= INSN_UID (insn
);
1355 if (! INSN_P (insn
))
1358 for (link
= df
->insns
[uid
].defs
; link
; link
= link
->next
)
1360 struct ref
*def
= link
->ref
;
1361 unsigned int dregno
= DF_REF_REGNO (def
);
1363 df
->regs
[dregno
].defs
1364 = df_link_create (def
, df
->regs
[dregno
].defs
);
1370 /* Create reg-def chains for each basic block within BLOCKS. These
1371 are a list of definitions for each register. */
1373 df_reg_def_chain_create (df
, blocks
)
1379 FOR_EACH_BB_IN_BITMAP
/*_REV*/ (blocks
, 0, bb
,
1381 df_bb_reg_def_chain_create (df
, bb
);
1386 /* Create reg-use chains for basic block BB. These are a list of uses
1387 for each register. */
1389 df_bb_reg_use_chain_create (df
, bb
)
1395 /* Scan in forward order so that the last uses appear at the
1396 start of the chain. */
1398 for (insn
= bb
->head
; insn
&& insn
!= NEXT_INSN (bb
->end
);
1399 insn
= NEXT_INSN (insn
))
1401 struct df_link
*link
;
1402 unsigned int uid
= INSN_UID (insn
);
1404 if (! INSN_P (insn
))
1407 for (link
= df
->insns
[uid
].uses
; link
; link
= link
->next
)
1409 struct ref
*use
= link
->ref
;
1410 unsigned int uregno
= DF_REF_REGNO (use
);
1412 df
->regs
[uregno
].uses
1413 = df_link_create (use
, df
->regs
[uregno
].uses
);
1419 /* Create reg-use chains for each basic block within BLOCKS. These
1420 are a list of uses for each register. */
1422 df_reg_use_chain_create (df
, blocks
)
1428 FOR_EACH_BB_IN_BITMAP (blocks
, 0, bb
,
1430 df_bb_reg_use_chain_create (df
, bb
);
1435 /* Create def-use chains from reaching use bitmaps for basic block BB. */
1437 df_bb_du_chain_create (df
, bb
, ru
)
1442 struct bb_info
*bb_info
= DF_BB_INFO (df
, bb
);
1445 bitmap_copy (ru
, bb_info
->ru_out
);
1447 /* For each def in BB create a linked list (chain) of uses
1448 reached from the def. */
1449 for (insn
= bb
->end
; insn
&& insn
!= PREV_INSN (bb
->head
);
1450 insn
= PREV_INSN (insn
))
1452 struct df_link
*def_link
;
1453 struct df_link
*use_link
;
1454 unsigned int uid
= INSN_UID (insn
);
1456 if (! INSN_P (insn
))
1459 /* For each def in insn... */
1460 for (def_link
= df
->insns
[uid
].defs
; def_link
; def_link
= def_link
->next
)
1462 struct ref
*def
= def_link
->ref
;
1463 unsigned int dregno
= DF_REF_REGNO (def
);
1465 DF_REF_CHAIN (def
) = 0;
1467 /* While the reg-use chains are not essential, it
1468 is _much_ faster to search these short lists rather
1469 than all the reaching uses, especially for large functions. */
1470 for (use_link
= df
->regs
[dregno
].uses
; use_link
;
1471 use_link
= use_link
->next
)
1473 struct ref
*use
= use_link
->ref
;
1475 if (bitmap_bit_p (ru
, DF_REF_ID (use
)))
1478 = df_link_create (use
, DF_REF_CHAIN (def
));
1480 bitmap_clear_bit (ru
, DF_REF_ID (use
));
1485 /* For each use in insn... */
1486 for (use_link
= df
->insns
[uid
].uses
; use_link
; use_link
= use_link
->next
)
1488 struct ref
*use
= use_link
->ref
;
1489 bitmap_set_bit (ru
, DF_REF_ID (use
));
1495 /* Create def-use chains from reaching use bitmaps for basic blocks
1498 df_du_chain_create (df
, blocks
)
1505 ru
= BITMAP_XMALLOC ();
1507 FOR_EACH_BB_IN_BITMAP (blocks
, 0, bb
,
1509 df_bb_du_chain_create (df
, bb
, ru
);
1516 /* Create use-def chains from reaching def bitmaps for basic block BB. */
1518 df_bb_ud_chain_create (df
, bb
)
1522 struct bb_info
*bb_info
= DF_BB_INFO (df
, bb
);
1523 struct ref
**reg_def_last
= df
->reg_def_last
;
1526 memset (reg_def_last
, 0, df
->n_regs
* sizeof (struct ref
*));
1528 /* For each use in BB create a linked list (chain) of defs
1529 that reach the use. */
1530 for (insn
= bb
->head
; insn
&& insn
!= NEXT_INSN (bb
->end
);
1531 insn
= NEXT_INSN (insn
))
1533 unsigned int uid
= INSN_UID (insn
);
1534 struct df_link
*use_link
;
1535 struct df_link
*def_link
;
1537 if (! INSN_P (insn
))
1540 /* For each use in insn... */
1541 for (use_link
= df
->insns
[uid
].uses
; use_link
; use_link
= use_link
->next
)
1543 struct ref
*use
= use_link
->ref
;
1544 unsigned int regno
= DF_REF_REGNO (use
);
1546 DF_REF_CHAIN (use
) = 0;
1548 /* Has regno been defined in this BB yet? If so, use
1549 the last def as the single entry for the use-def
1550 chain for this use. Otherwise, we need to add all
1551 the defs using this regno that reach the start of
1553 if (reg_def_last
[regno
])
1556 = df_link_create (reg_def_last
[regno
], 0);
1560 /* While the reg-def chains are not essential, it is
1561 _much_ faster to search these short lists rather than
1562 all the reaching defs, especially for large
1564 for (def_link
= df
->regs
[regno
].defs
; def_link
;
1565 def_link
= def_link
->next
)
1567 struct ref
*def
= def_link
->ref
;
1569 if (bitmap_bit_p (bb_info
->rd_in
, DF_REF_ID (def
)))
1572 = df_link_create (def
, DF_REF_CHAIN (use
));
1579 /* For each def in insn...record the last def of each reg. */
1580 for (def_link
= df
->insns
[uid
].defs
; def_link
; def_link
= def_link
->next
)
1582 struct ref
*def
= def_link
->ref
;
1583 int dregno
= DF_REF_REGNO (def
);
1585 reg_def_last
[dregno
] = def
;
1591 /* Create use-def chains from reaching def bitmaps for basic blocks
1594 df_ud_chain_create (df
, blocks
)
1600 FOR_EACH_BB_IN_BITMAP (blocks
, 0, bb
,
1602 df_bb_ud_chain_create (df
, bb
);
1607 /* Use depth first order, and the worklist, to figure out what block
1611 df_visit_next (df
, blocks
)
1612 struct df
*df ATTRIBUTE_UNUSED
;
1616 for (i
= 0; i
< n_basic_blocks
; i
++)
1617 if (TEST_BIT (blocks
, df
->rc_order
[i
]))
1618 return df
->rc_order
[i
];
1619 return sbitmap_first_set_bit (blocks
);
1622 /* Calculate reaching defs for each basic block in BLOCKS, i.e., the
1623 defs that are live at the start of a basic block. */
1625 df_rd_global_compute (df
, blocks
)
1626 struct df
*df ATTRIBUTE_UNUSED
;
1633 worklist
= sbitmap_alloc (n_basic_blocks
);
1634 sbitmap_zero (worklist
);
1636 /* Copy the blocklist to the worklist */
1637 EXECUTE_IF_SET_IN_BITMAP (blocks
, 0, i
,
1639 SET_BIT (worklist
, i
);
1642 /* We assume that only the basic blocks in WORKLIST have been
1644 FOR_EACH_BB_IN_SBITMAP (worklist
, 0, bb
,
1646 struct bb_info
*bb_info
= DF_BB_INFO (df
, bb
);
1648 bitmap_copy (bb_info
->rd_out
, bb_info
->rd_gen
);
1651 while ((i
= df_visit_next (df
, worklist
)) >= 0)
1653 struct bb_info
*bb_info
;
1657 /* Remove this block from the worklist. */
1658 RESET_BIT (worklist
, i
);
1661 bb
= BASIC_BLOCK (i
);
1662 bb_info
= DF_BB_INFO (df
, bb
);
1664 /* Calculate union of predecessor outs. */
1665 bitmap_zero (bb_info
->rd_in
);
1666 for (e
= bb
->pred
; e
!= 0; e
= e
->pred_next
)
1668 struct bb_info
*pred_refs
= DF_BB_INFO (df
, e
->src
);
1670 if (e
->src
== ENTRY_BLOCK_PTR
)
1673 bitmap_a_or_b (bb_info
->rd_in
, bb_info
->rd_in
,
1677 /* RD_OUT is the set of defs that are live at the end of the
1678 BB. These are the defs that are either generated by defs
1679 (RD_GEN) within the BB or are live at the start (RD_IN)
1680 and are not killed by other defs (RD_KILL). */
1681 changed
= bitmap_union_of_diff (bb_info
->rd_out
, bb_info
->rd_gen
,
1682 bb_info
->rd_in
, bb_info
->rd_kill
);
1686 /* Add each of this block's successors to the worklist. */
1687 for (e
= bb
->succ
; e
!= 0; e
= e
->succ_next
)
1689 if (e
->dest
== EXIT_BLOCK_PTR
)
1692 SET_BIT (worklist
, i
);
1696 sbitmap_free (worklist
);
1700 /* Calculate reaching uses for each basic block within BLOCKS, i.e.,
1701 the uses that are live at the start of a basic block. */
1703 df_ru_global_compute (df
, blocks
)
1704 struct df
*df ATTRIBUTE_UNUSED
;
1711 worklist
= sbitmap_alloc (n_basic_blocks
);
1712 sbitmap_zero (worklist
);
1714 EXECUTE_IF_SET_IN_BITMAP (blocks
, 0, i
,
1716 SET_BIT (worklist
, i
);
1719 /* We assume that only the basic blocks in WORKLIST have been
1721 FOR_EACH_BB_IN_SBITMAP (worklist
, 0, bb
,
1723 struct bb_info
*bb_info
= DF_BB_INFO (df
, bb
);
1725 bitmap_copy (bb_info
->ru_in
, bb_info
->ru_gen
);
1729 while ((i
= df_visit_next (df
, worklist
)) >= 0)
1731 struct bb_info
*bb_info
;
1735 /* Remove this block from the worklist. */
1736 RESET_BIT (worklist
, i
);
1738 bb
= BASIC_BLOCK (i
);
1739 bb_info
= DF_BB_INFO (df
, bb
);
1741 /* Calculate union of successor ins. */
1742 bitmap_zero (bb_info
->ru_out
);
1743 for (e
= bb
->succ
; e
!= 0; e
= e
->succ_next
)
1745 struct bb_info
*succ_refs
= DF_BB_INFO (df
, e
->dest
);
1747 if (e
->dest
== EXIT_BLOCK_PTR
)
1750 bitmap_a_or_b (bb_info
->ru_out
, bb_info
->ru_out
,
1754 /* RU_IN is the set of uses that are live at the start of the
1755 BB. These are the uses that are either generated within the
1756 BB (RU_GEN) or are live at the end (RU_OUT) and are not uses
1757 killed by defs within the BB (RU_KILL). */
1758 changed
= bitmap_union_of_diff (bb_info
->ru_in
, bb_info
->ru_gen
,
1759 bb_info
->ru_out
, bb_info
->ru_kill
);
1763 /* Add each of this block's predecessors to the worklist. */
1764 for (e
= bb
->pred
; e
!= 0; e
= e
->pred_next
)
1766 if (e
->src
== ENTRY_BLOCK_PTR
)
1769 SET_BIT (worklist
, i
);
1774 sbitmap_free (worklist
);
1778 /* Calculate live registers for each basic block within BLOCKS. */
1780 df_lr_global_compute (df
, blocks
)
1781 struct df
*df ATTRIBUTE_UNUSED
;
1788 worklist
= BITMAP_XMALLOC ();
1789 bitmap_copy (worklist
, blocks
);
1791 /* We assume that only the basic blocks in WORKLIST have been
1793 FOR_EACH_BB_IN_BITMAP (worklist
, 0, bb
,
1795 struct bb_info
*bb_info
= DF_BB_INFO (df
, bb
);
1797 bitmap_copy (bb_info
->lr_in
, bb_info
->lr_use
);
1800 while ((i
= bitmap_last_set_bit (worklist
)) >= 0)
1802 struct bb_info
*bb_info
= DF_BB_INFO (df
, bb
);
1806 /* Remove this block from the worklist. */
1807 bitmap_clear_bit (worklist
, i
);
1809 bb
= BASIC_BLOCK (i
);
1810 bb_info
= DF_BB_INFO (df
, bb
);
1812 /* Calculate union of successor ins. */
1813 bitmap_zero (bb_info
->lr_out
);
1814 for (e
= bb
->succ
; e
!= 0; e
= e
->succ_next
)
1816 struct bb_info
*succ_refs
= DF_BB_INFO (df
, e
->dest
);
1818 if (e
->dest
== EXIT_BLOCK_PTR
)
1821 bitmap_a_or_b (bb_info
->lr_out
, bb_info
->lr_out
,
1825 /* LR_IN is the set of uses that are live at the start of the
1826 BB. These are the uses that are either generated by uses
1827 (LR_USE) within the BB or are live at the end (LR_OUT)
1828 and are not killed by other uses (LR_DEF). */
1829 changed
= bitmap_union_of_diff (bb_info
->lr_in
, bb_info
->lr_use
,
1830 bb_info
->lr_out
, bb_info
->lr_def
);
1834 /* Add each of this block's predecessors to the worklist. */
1835 for (e
= bb
->pred
; e
!= 0; e
= e
->pred_next
)
1837 if (e
->src
== ENTRY_BLOCK_PTR
)
1840 bitmap_set_bit (worklist
, e
->src
->index
);
1844 BITMAP_XFREE (worklist
);
1848 /* Compute local reaching def info for basic block BB. */
1850 df_bb_rd_local_compute (df
, bb
)
1854 struct bb_info
*bb_info
= DF_BB_INFO (df
, bb
);
1857 for (insn
= bb
->head
; insn
&& insn
!= NEXT_INSN (bb
->end
);
1858 insn
= NEXT_INSN (insn
))
1860 unsigned int uid
= INSN_UID (insn
);
1861 struct df_link
*def_link
;
1863 if (! INSN_P (insn
))
1866 for (def_link
= df
->insns
[uid
].defs
; def_link
; def_link
= def_link
->next
)
1868 struct ref
*def
= def_link
->ref
;
1869 unsigned int regno
= DF_REF_REGNO (def
);
1870 struct df_link
*def2_link
;
1872 for (def2_link
= df
->regs
[regno
].defs
; def2_link
;
1873 def2_link
= def2_link
->next
)
1875 struct ref
*def2
= def2_link
->ref
;
1877 /* Add all defs of this reg to the set of kills. This
1878 is greedy since many of these defs will not actually
1879 be killed by this BB but it keeps things a lot
1881 bitmap_set_bit (bb_info
->rd_kill
, DF_REF_ID (def2
));
1883 /* Zap from the set of gens for this BB. */
1884 bitmap_clear_bit (bb_info
->rd_gen
, DF_REF_ID (def2
));
1887 bitmap_set_bit (bb_info
->rd_gen
, DF_REF_ID (def
));
1891 bb_info
->rd_valid
= 1;
1895 /* Compute local reaching def info for each basic block within BLOCKS. */
1897 df_rd_local_compute (df
, blocks
)
1903 FOR_EACH_BB_IN_BITMAP (blocks
, 0, bb
,
1905 df_bb_rd_local_compute (df
, bb
);
1910 /* Compute local reaching use (upward exposed use) info for basic
1913 df_bb_ru_local_compute (df
, bb
)
1917 /* This is much more tricky than computing reaching defs. With
1918 reaching defs, defs get killed by other defs. With upwards
1919 exposed uses, these get killed by defs with the same regno. */
1921 struct bb_info
*bb_info
= DF_BB_INFO (df
, bb
);
1924 for (insn
= bb
->end
; insn
&& insn
!= PREV_INSN (bb
->head
);
1925 insn
= PREV_INSN (insn
))
1927 unsigned int uid
= INSN_UID (insn
);
1928 struct df_link
*def_link
;
1929 struct df_link
*use_link
;
1931 if (! INSN_P (insn
))
1934 for (def_link
= df
->insns
[uid
].defs
; def_link
; def_link
= def_link
->next
)
1936 struct ref
*def
= def_link
->ref
;
1937 unsigned int dregno
= DF_REF_REGNO (def
);
1939 for (use_link
= df
->regs
[dregno
].uses
; use_link
;
1940 use_link
= use_link
->next
)
1942 struct ref
*use
= use_link
->ref
;
1944 /* Add all uses of this reg to the set of kills. This
1945 is greedy since many of these uses will not actually
1946 be killed by this BB but it keeps things a lot
1948 bitmap_set_bit (bb_info
->ru_kill
, DF_REF_ID (use
));
1950 /* Zap from the set of gens for this BB. */
1951 bitmap_clear_bit (bb_info
->ru_gen
, DF_REF_ID (use
));
1955 for (use_link
= df
->insns
[uid
].uses
; use_link
; use_link
= use_link
->next
)
1957 struct ref
*use
= use_link
->ref
;
1958 /* Add use to set of gens in this BB. */
1959 bitmap_set_bit (bb_info
->ru_gen
, DF_REF_ID (use
));
1962 bb_info
->ru_valid
= 1;
1966 /* Compute local reaching use (upward exposed use) info for each basic
1967 block within BLOCKS. */
1969 df_ru_local_compute (df
, blocks
)
1975 FOR_EACH_BB_IN_BITMAP (blocks
, 0, bb
,
1977 df_bb_ru_local_compute (df
, bb
);
1982 /* Compute local live variable info for basic block BB. */
1984 df_bb_lr_local_compute (df
, bb
)
1988 struct bb_info
*bb_info
= DF_BB_INFO (df
, bb
);
1991 for (insn
= bb
->end
; insn
&& insn
!= PREV_INSN (bb
->head
);
1992 insn
= PREV_INSN (insn
))
1994 unsigned int uid
= INSN_UID (insn
);
1995 struct df_link
*link
;
1997 if (! INSN_P (insn
))
2000 for (link
= df
->insns
[uid
].defs
; link
; link
= link
->next
)
2002 struct ref
*def
= link
->ref
;
2003 unsigned int dregno
= DF_REF_REGNO (def
);
2005 /* Add def to set of defs in this BB. */
2006 bitmap_set_bit (bb_info
->lr_def
, dregno
);
2008 bitmap_clear_bit (bb_info
->lr_use
, dregno
);
2011 for (link
= df
->insns
[uid
].uses
; link
; link
= link
->next
)
2013 struct ref
*use
= link
->ref
;
2014 /* Add use to set of uses in this BB. */
2015 bitmap_set_bit (bb_info
->lr_use
, DF_REF_REGNO (use
));
2018 bb_info
->lr_valid
= 1;
2022 /* Compute local live variable info for each basic block within BLOCKS. */
2024 df_lr_local_compute (df
, blocks
)
2030 FOR_EACH_BB_IN_BITMAP (blocks
, 0, bb
,
2032 df_bb_lr_local_compute (df
, bb
);
2037 /* Compute register info: lifetime, bb, and number of defs and uses
2038 for basic block BB. */
2040 df_bb_reg_info_compute (df
, bb
, live
)
2045 struct reg_info
*reg_info
= df
->regs
;
2046 struct bb_info
*bb_info
= DF_BB_INFO (df
, bb
);
2049 bitmap_copy (live
, bb_info
->lr_out
);
2051 for (insn
= bb
->end
; insn
&& insn
!= PREV_INSN (bb
->head
);
2052 insn
= PREV_INSN (insn
))
2054 unsigned int uid
= INSN_UID (insn
);
2056 struct df_link
*link
;
2058 if (! INSN_P (insn
))
2061 for (link
= df
->insns
[uid
].defs
; link
; link
= link
->next
)
2063 struct ref
*def
= link
->ref
;
2064 unsigned int dregno
= DF_REF_REGNO (def
);
2066 /* Kill this register. */
2067 bitmap_clear_bit (live
, dregno
);
2068 reg_info
[dregno
].n_defs
++;
2071 for (link
= df
->insns
[uid
].uses
; link
; link
= link
->next
)
2073 struct ref
*use
= link
->ref
;
2074 unsigned int uregno
= DF_REF_REGNO (use
);
2076 /* This register is now live. */
2077 bitmap_set_bit (live
, uregno
);
2078 reg_info
[uregno
].n_uses
++;
2081 /* Increment lifetimes of all live registers. */
2082 EXECUTE_IF_SET_IN_BITMAP (live
, 0, regno
,
2084 reg_info
[regno
].lifetime
++;
2090 /* Compute register info: lifetime, bb, and number of defs and uses. */
2092 df_reg_info_compute (df
, blocks
)
2099 live
= BITMAP_XMALLOC ();
2101 FOR_EACH_BB_IN_BITMAP (blocks
, 0, bb
,
2103 df_bb_reg_info_compute (df
, bb
, live
);
2106 BITMAP_XFREE (live
);
2110 /* Assign LUIDs for BB. */
2112 df_bb_luids_set (df
, bb
)
2119 /* The LUIDs are monotonically increasing for each basic block. */
2121 for (insn
= bb
->head
; ; insn
= NEXT_INSN (insn
))
2124 DF_INSN_LUID (df
, insn
) = luid
++;
2125 DF_INSN_LUID (df
, insn
) = luid
;
2127 if (insn
== bb
->end
)
2134 /* Assign LUIDs for each basic block within BLOCKS. */
2136 df_luids_set (df
, blocks
)
2143 FOR_EACH_BB_IN_BITMAP (blocks
, 0, bb
,
2145 total
+= df_bb_luids_set (df
, bb
);
2151 /* Perform dataflow analysis using existing DF structure for blocks
2152 within BLOCKS. If BLOCKS is zero, use all basic blocks in the CFG. */
2154 df_analyse_1 (df
, blocks
, flags
, update
)
2165 if (flags
& DF_UD_CHAIN
)
2166 aflags
|= DF_RD
| DF_RD_CHAIN
;
2168 if (flags
& DF_DU_CHAIN
)
2172 aflags
|= DF_RU_CHAIN
;
2174 if (flags
& DF_REG_INFO
)
2178 blocks
= df
->all_blocks
;
2183 df_refs_update (df
);
2184 /* More fine grained incremental dataflow analysis would be
2185 nice. For now recompute the whole shebang for the
2188 df_refs_unlink (df
, blocks
);
2190 /* All the def-use, use-def chains can be potentially
2191 modified by changes in one block. The size of the
2192 bitmaps can also change. */
2196 /* Scan the function for all register defs and uses. */
2198 df_refs_record (df
, blocks
);
2200 /* Link all the new defs and uses to the insns. */
2201 df_refs_process (df
);
2204 /* Allocate the bitmaps now the total number of defs and uses are
2205 known. If the number of defs or uses have changed, then
2206 these bitmaps need to be reallocated. */
2207 df_bitmaps_alloc (df
, aflags
);
2209 /* Set the LUIDs for each specified basic block. */
2210 df_luids_set (df
, blocks
);
2212 /* Recreate reg-def and reg-use chains from scratch so that first
2213 def is at the head of the reg-def chain and the last use is at
2214 the head of the reg-use chain. This is only important for
2215 regs local to a basic block as it speeds up searching. */
2216 if (aflags
& DF_RD_CHAIN
)
2218 df_reg_def_chain_create (df
, blocks
);
2221 if (aflags
& DF_RU_CHAIN
)
2223 df_reg_use_chain_create (df
, blocks
);
2226 df
->dfs_order
= xmalloc (sizeof(int) * n_basic_blocks
);
2227 df
->rc_order
= xmalloc (sizeof(int) * n_basic_blocks
);
2229 flow_depth_first_order_compute (df
->dfs_order
, df
->rc_order
);
2233 /* Compute the sets of gens and kills for the defs of each bb. */
2234 df_rd_local_compute (df
, df
->flags
& DF_RD
? blocks
: df
->all_blocks
);
2236 /* Compute the global reaching definitions. */
2237 df_rd_global_compute (df
, df
->all_blocks
);
2240 if (aflags
& DF_UD_CHAIN
)
2242 /* Create use-def chains. */
2243 df_ud_chain_create (df
, df
->all_blocks
);
2245 if (! (flags
& DF_RD
))
2251 /* Compute the sets of gens and kills for the upwards exposed
2253 df_ru_local_compute (df
, df
->flags
& DF_RU
? blocks
: df
->all_blocks
);
2255 /* Compute the global reaching uses. */
2256 df_ru_global_compute (df
, df
->all_blocks
);
2259 if (aflags
& DF_DU_CHAIN
)
2261 /* Create def-use chains. */
2262 df_du_chain_create (df
, df
->all_blocks
);
2264 if (! (flags
& DF_RU
))
2268 /* Free up bitmaps that are no longer required. */
2270 df_bitmaps_free (df
, dflags
);
2274 /* Compute the sets of defs and uses of live variables. */
2275 df_lr_local_compute (df
, df
->flags
& DF_LR
? blocks
: df
->all_blocks
);
2277 /* Compute the global live variables. */
2278 df_lr_global_compute (df
, df
->all_blocks
);
2281 if (aflags
& DF_REG_INFO
)
2283 df_reg_info_compute (df
, df
->all_blocks
);
2285 free (df
->dfs_order
);
2286 free (df
->rc_order
);
2290 /* Initialise dataflow analysis. */
2296 df
= xcalloc (1, sizeof (struct df
));
2298 /* Squirrel away a global for debugging. */
2305 /* Start queuing refs. */
2310 df
->def_id_save
= df
->def_id
;
2311 df
->use_id_save
= df
->use_id
;
2312 /* ???? Perhaps we should save current obstack state so that we can
2318 /* Process queued refs. */
2320 df_refs_process (df
)
2325 /* Build new insn-def chains. */
2326 for (i
= df
->def_id_save
; i
!= df
->def_id
; i
++)
2328 struct ref
*def
= df
->defs
[i
];
2329 unsigned int uid
= DF_REF_INSN_UID (def
);
2331 /* Add def to head of def list for INSN. */
2333 = df_link_create (def
, df
->insns
[uid
].defs
);
2336 /* Build new insn-use chains. */
2337 for (i
= df
->use_id_save
; i
!= df
->use_id
; i
++)
2339 struct ref
*use
= df
->uses
[i
];
2340 unsigned int uid
= DF_REF_INSN_UID (use
);
2342 /* Add use to head of use list for INSN. */
2344 = df_link_create (use
, df
->insns
[uid
].uses
);
2350 /* Update refs for basic block BB. */
2352 df_bb_refs_update (df
, bb
)
2359 /* While we have to scan the chain of insns for this BB, we don't
2360 need to allocate and queue a long chain of BB/INSN pairs. Using
2361 a bitmap for insns_modified saves memory and avoids queuing
2364 for (insn
= bb
->head
; ; insn
= NEXT_INSN (insn
))
2368 uid
= INSN_UID (insn
);
2370 if (bitmap_bit_p (df
->insns_modified
, uid
))
2372 /* Delete any allocated refs of this insn. MPH, FIXME. */
2373 df_insn_refs_unlink (df
, bb
, insn
);
2375 /* Scan the insn for refs. */
2376 df_insn_refs_record (df
, bb
, insn
);
2379 bitmap_clear_bit (df
->insns_modified
, uid
);
2382 if (insn
== bb
->end
)
2389 /* Process all the modified/deleted insns that were queued. */
2397 if ((unsigned int)max_reg_num () >= df
->reg_size
)
2398 df_reg_table_realloc (df
, 0);
2402 FOR_EACH_BB_IN_BITMAP (df
->bbs_modified
, 0, bb
,
2404 count
+= df_bb_refs_update (df
, bb
);
2407 df_refs_process (df
);
2412 /* Return non-zero if any of the requested blocks in the bitmap
2413 BLOCKS have been modified. */
2415 df_modified_p (df
, blocks
)
2422 for (j
= 0; j
< df
->n_bbs
; j
++)
2423 if (bitmap_bit_p (df
->bbs_modified
, j
)
2424 && (! blocks
|| (blocks
== (bitmap
) -1) || bitmap_bit_p (blocks
, j
)))
2434 /* Analyse dataflow info for the basic blocks specified by the bitmap
2435 BLOCKS, or for the whole CFG if BLOCKS is zero, or just for the
2436 modified blocks if BLOCKS is -1. */
2438 df_analyse (df
, blocks
, flags
)
2445 /* We could deal with additional basic blocks being created by
2446 rescanning everything again. */
2447 if (df
->n_bbs
&& df
->n_bbs
!= (unsigned int)n_basic_blocks
)
2450 update
= df_modified_p (df
, blocks
);
2451 if (update
|| (flags
!= df
->flags
))
2457 /* Recompute everything from scratch. */
2460 /* Allocate and initialise data structures. */
2461 df_alloc (df
, max_reg_num ());
2462 df_analyse_1 (df
, 0, flags
, 0);
2467 if (blocks
== (bitmap
) -1)
2468 blocks
= df
->bbs_modified
;
2473 df_analyse_1 (df
, blocks
, flags
, 1);
2474 bitmap_zero (df
->bbs_modified
);
2481 /* Free all the dataflow info and the DF structure. */
2491 /* Unlink INSN from its reference information. */
2493 df_insn_refs_unlink (df
, bb
, insn
)
2495 basic_block bb ATTRIBUTE_UNUSED
;
2498 struct df_link
*link
;
2501 uid
= INSN_UID (insn
);
2503 /* Unlink all refs defined by this insn. */
2504 for (link
= df
->insns
[uid
].defs
; link
; link
= link
->next
)
2505 df_def_unlink (df
, link
->ref
);
2507 /* Unlink all refs used by this insn. */
2508 for (link
= df
->insns
[uid
].uses
; link
; link
= link
->next
)
2509 df_use_unlink (df
, link
->ref
);
2511 df
->insns
[uid
].defs
= 0;
2512 df
->insns
[uid
].uses
= 0;
2516 /* Unlink all the insns within BB from their reference information. */
2518 df_bb_refs_unlink (df
, bb
)
2524 /* Scan the block an insn at a time from beginning to end. */
2525 for (insn
= bb
->head
; ; insn
= NEXT_INSN (insn
))
2529 /* Unlink refs for INSN. */
2530 df_insn_refs_unlink (df
, bb
, insn
);
2532 if (insn
== bb
->end
)
2539 /* Unlink all the refs in the basic blocks specified by BLOCKS.
2540 Not currently used. */
2542 df_refs_unlink (df
, blocks
)
2550 FOR_EACH_BB_IN_BITMAP (blocks
, 0, bb
,
2552 df_bb_refs_unlink (df
, bb
);
2559 df_bb_refs_unlink (df
, bb
);
2565 /* Functions to modify insns. */
2568 /* Delete INSN and all its reference information. */
2570 df_insn_delete (df
, bb
, insn
)
2572 basic_block bb ATTRIBUTE_UNUSED
;
2575 /* If the insn is a jump, we should perhaps call delete_insn to
2576 handle the JUMP_LABEL? */
2578 /* We should not be deleting the NOTE_INSN_BASIC_BLOCK or label. */
2579 if (insn
== bb
->head
)
2581 if (insn
== bb
->end
)
2582 bb
->end
= PREV_INSN (insn
);
2584 /* Delete the insn. */
2585 PUT_CODE (insn
, NOTE
);
2586 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
2587 NOTE_SOURCE_FILE (insn
) = 0;
2589 df_insn_modify (df
, bb
, insn
);
2591 return NEXT_INSN (insn
);
2595 /* Mark that INSN within BB may have changed (created/modified/deleted).
2596 This may be called multiple times for the same insn. There is no
2597 harm calling this function if the insn wasn't changed; it will just
2598 slow down the rescanning of refs. */
2600 df_insn_modify (df
, bb
, insn
)
2607 uid
= INSN_UID (insn
);
2609 bitmap_set_bit (df
->bbs_modified
, bb
->index
);
2610 bitmap_set_bit (df
->insns_modified
, uid
);
2613 /* For incremental updating on the fly, perhaps we could make a copy
2614 of all the refs of the original insn and turn them into
2615 anti-refs. When df_refs_update finds these anti-refs, it annihilates
2616 the original refs. If validate_change fails then these anti-refs
2617 will just get ignored. */
2623 typedef struct replace_args
2632 /* Replace mem pointed to by PX with its associated pseudo register.
2633 DATA is actually a pointer to a structure describing the
2634 instruction currently being scanned and the MEM we are currently
2637 df_rtx_mem_replace (px
, data
)
2641 replace_args
*args
= (replace_args
*) data
;
2644 if (mem
== NULL_RTX
)
2647 switch (GET_CODE (mem
))
2653 /* We're not interested in the MEM associated with a
2654 CONST_DOUBLE, so there's no need to traverse into one. */
2658 /* This is not a MEM. */
2662 if (!rtx_equal_p (args
->match
, mem
))
2663 /* This is not the MEM we are currently replacing. */
2666 /* Actually replace the MEM. */
2667 validate_change (args
->insn
, px
, args
->replacement
, 1);
2675 df_insn_mem_replace (df
, bb
, insn
, mem
, reg
)
2686 args
.replacement
= reg
;
2689 /* Seach and replace all matching mems within insn. */
2690 for_each_rtx (&insn
, df_rtx_mem_replace
, &args
);
2693 df_insn_modify (df
, bb
, insn
);
2695 /* ???? FIXME. We may have a new def or one or more new uses of REG
2696 in INSN. REG should be a new pseudo so it won't affect the
2697 dataflow information that we currently have. We should add
2698 the new uses and defs to INSN and then recreate the chains
2699 when df_analyse is called. */
2700 return args
.modified
;
2704 /* Replace one register with another. Called through for_each_rtx; PX
2705 points to the rtx being scanned. DATA is actually a pointer to a
2706 structure of arguments. */
2708 df_rtx_reg_replace (px
, data
)
2713 replace_args
*args
= (replace_args
*) data
;
2718 if (x
== args
->match
)
2720 validate_change (args
->insn
, px
, args
->replacement
, 1);
2728 /* Replace the reg within every ref on CHAIN that is within the set
2729 BLOCKS of basic blocks with NEWREG. Also update the regs within
2732 df_refs_reg_replace (df
, blocks
, chain
, oldreg
, newreg
)
2735 struct df_link
*chain
;
2739 struct df_link
*link
;
2743 blocks
= df
->all_blocks
;
2745 args
.match
= oldreg
;
2746 args
.replacement
= newreg
;
2749 for (link
= chain
; link
; link
= link
->next
)
2751 struct ref
*ref
= link
->ref
;
2752 rtx insn
= DF_REF_INSN (ref
);
2754 if (! INSN_P (insn
))
2757 if (bitmap_bit_p (blocks
, DF_REF_BBNO (ref
)))
2759 df_ref_reg_replace (df
, ref
, oldreg
, newreg
);
2761 /* Replace occurrences of the reg within the REG_NOTES. */
2762 if ((! link
->next
|| DF_REF_INSN (ref
)
2763 != DF_REF_INSN (link
->next
->ref
))
2764 && REG_NOTES (insn
))
2767 for_each_rtx (®_NOTES (insn
), df_rtx_reg_replace
, &args
);
2772 /* Temporary check to ensure that we have a grip on which
2773 regs should be replaced. */
2780 /* Replace all occurrences of register OLDREG with register NEWREG in
2781 blocks defined by bitmap BLOCKS. This also replaces occurrences of
2782 OLDREG in the REG_NOTES but only for insns containing OLDREG. This
2783 routine expects the reg-use and reg-def chains to be valid. */
2785 df_reg_replace (df
, blocks
, oldreg
, newreg
)
2791 unsigned int oldregno
= REGNO (oldreg
);
2793 df_refs_reg_replace (df
, blocks
, df
->regs
[oldregno
].defs
, oldreg
, newreg
);
2794 df_refs_reg_replace (df
, blocks
, df
->regs
[oldregno
].uses
, oldreg
, newreg
);
2799 /* Try replacing the reg within REF with NEWREG. Do not modify
2800 def-use/use-def chains. */
2802 df_ref_reg_replace (df
, ref
, oldreg
, newreg
)
2808 /* Check that insn was deleted by being converted into a NOTE. If
2809 so ignore this insn. */
2810 if (! INSN_P (DF_REF_INSN (ref
)))
2813 if (oldreg
&& oldreg
!= DF_REF_REG (ref
))
2816 if (! validate_change (DF_REF_INSN (ref
), DF_REF_LOC (ref
), newreg
, 1))
2819 df_insn_modify (df
, DF_REF_BB (ref
), DF_REF_INSN (ref
));
2825 df_bb_def_use_swap (df
, bb
, def_insn
, use_insn
, regno
)
2836 struct df_link
*link
;
2838 def
= df_bb_insn_regno_first_def_find (df
, bb
, def_insn
, regno
);
2842 use
= df_bb_insn_regno_last_use_find (df
, bb
, use_insn
, regno
);
2846 /* The USE no longer exists. */
2847 use_uid
= INSN_UID (use_insn
);
2848 df_use_unlink (df
, use
);
2849 df_ref_unlink (&df
->insns
[use_uid
].uses
, use
);
2851 /* The DEF requires shifting so remove it from DEF_INSN
2852 and add it to USE_INSN by reusing LINK. */
2853 def_uid
= INSN_UID (def_insn
);
2854 link
= df_ref_unlink (&df
->insns
[def_uid
].defs
, def
);
2856 link
->next
= df
->insns
[use_uid
].defs
;
2857 df
->insns
[use_uid
].defs
= link
;
2860 link
= df_ref_unlink (&df
->regs
[regno
].defs
, def
);
2862 link
->next
= df
->regs
[regno
].defs
;
2863 df
->insns
[regno
].defs
= link
;
2866 DF_REF_INSN (def
) = use_insn
;
2871 /* Record df between FIRST_INSN and LAST_INSN inclusive. All new
2872 insns must be processed by this routine. */
2874 df_insns_modify (df
, bb
, first_insn
, last_insn
)
2882 for (insn
= first_insn
; ; insn
= NEXT_INSN (insn
))
2886 /* A non-const call should not have slipped through the net. If
2887 it does, we need to create a new basic block. Ouch. The
2888 same applies for a label. */
2889 if ((GET_CODE (insn
) == CALL_INSN
2890 && ! CONST_CALL_P (insn
))
2891 || GET_CODE (insn
) == CODE_LABEL
)
2894 uid
= INSN_UID (insn
);
2896 if (uid
>= df
->insn_size
)
2897 df_insn_table_realloc (df
, 0);
2899 df_insn_modify (df
, bb
, insn
);
2901 if (insn
== last_insn
)
2907 /* Emit PATTERN before INSN within BB. */
2909 df_pattern_emit_before (df
, pattern
, bb
, insn
)
2910 struct df
*df ATTRIBUTE_UNUSED
;
2916 rtx prev_insn
= PREV_INSN (insn
);
2918 /* We should not be inserting before the start of the block. */
2919 if (insn
== bb
->head
)
2921 ret_insn
= emit_insn_before (pattern
, insn
);
2922 if (ret_insn
== insn
)
2925 df_insns_modify (df
, bb
, NEXT_INSN (prev_insn
), ret_insn
);
2930 /* Emit PATTERN after INSN within BB. */
2932 df_pattern_emit_after (df
, pattern
, bb
, insn
)
2940 ret_insn
= emit_insn_after (pattern
, insn
);
2941 if (ret_insn
== insn
)
2944 if (bb
->end
== insn
)
2947 df_insns_modify (df
, bb
, NEXT_INSN (insn
), ret_insn
);
2952 /* Emit jump PATTERN after INSN within BB. */
2954 df_jump_pattern_emit_after (df
, pattern
, bb
, insn
)
2962 ret_insn
= emit_jump_insn_after (pattern
, insn
);
2963 if (ret_insn
== insn
)
2966 if (bb
->end
== insn
)
2969 df_insns_modify (df
, bb
, NEXT_INSN (insn
), ret_insn
);
2974 /* Move INSN within BB before BEFORE_INSN within BEFORE_BB.
2976 This function should only be used to move loop invariant insns
2977 out of a loop where it has been proven that the def-use info
2978 will still be valid. */
2980 df_insn_move_before (df
, bb
, insn
, before_bb
, before_insn
)
2984 basic_block before_bb
;
2987 struct df_link
*link
;
2991 return df_pattern_emit_before (df
, insn
, before_bb
, before_insn
);
2993 uid
= INSN_UID (insn
);
2995 /* Change bb for all df defined and used by this insn. */
2996 for (link
= df
->insns
[uid
].defs
; link
; link
= link
->next
)
2997 DF_REF_BB (link
->ref
) = before_bb
;
2998 for (link
= df
->insns
[uid
].uses
; link
; link
= link
->next
)
2999 DF_REF_BB (link
->ref
) = before_bb
;
3001 /* The lifetimes of the registers used in this insn will be reduced
3002 while the lifetimes of the registers defined in this insn
3003 are likely to be increased. */
3005 /* ???? Perhaps all the insns moved should be stored on a list
3006 which df_analyse removes when it recalculates data flow. */
3008 return emit_block_insn_before (insn
, before_insn
, before_bb
);
3011 /* Functions to query dataflow information. */
3015 df_insn_regno_def_p (df
, bb
, insn
, regno
)
3017 basic_block bb ATTRIBUTE_UNUSED
;
3022 struct df_link
*link
;
3024 uid
= INSN_UID (insn
);
3026 for (link
= df
->insns
[uid
].defs
; link
; link
= link
->next
)
3028 struct ref
*def
= link
->ref
;
3030 if (DF_REF_REGNO (def
) == regno
)
3039 df_def_dominates_all_uses_p (df
, def
)
3040 struct df
*df ATTRIBUTE_UNUSED
;
3043 struct df_link
*du_link
;
3045 /* Follow def-use chain to find all the uses of this def. */
3046 for (du_link
= DF_REF_CHAIN (def
); du_link
; du_link
= du_link
->next
)
3048 struct ref
*use
= du_link
->ref
;
3049 struct df_link
*ud_link
;
3051 /* Follow use-def chain to check all the defs for this use. */
3052 for (ud_link
= DF_REF_CHAIN (use
); ud_link
; ud_link
= ud_link
->next
)
3053 if (ud_link
->ref
!= def
)
3061 df_insn_dominates_all_uses_p (df
, bb
, insn
)
3063 basic_block bb ATTRIBUTE_UNUSED
;
3067 struct df_link
*link
;
3069 uid
= INSN_UID (insn
);
3071 for (link
= df
->insns
[uid
].defs
; link
; link
= link
->next
)
3073 struct ref
*def
= link
->ref
;
3075 if (! df_def_dominates_all_uses_p (df
, def
))
3083 /* Return non-zero if all DF dominates all the uses within the bitmap
3086 df_def_dominates_uses_p (df
, def
, blocks
)
3087 struct df
*df ATTRIBUTE_UNUSED
;
3091 struct df_link
*du_link
;
3093 /* Follow def-use chain to find all the uses of this def. */
3094 for (du_link
= DF_REF_CHAIN (def
); du_link
; du_link
= du_link
->next
)
3096 struct ref
*use
= du_link
->ref
;
3097 struct df_link
*ud_link
;
3099 /* Only worry about the uses within BLOCKS. For example,
3100 consider a register defined within a loop that is live at the
3102 if (bitmap_bit_p (blocks
, DF_REF_BBNO (use
)))
3104 /* Follow use-def chain to check all the defs for this use. */
3105 for (ud_link
= DF_REF_CHAIN (use
); ud_link
; ud_link
= ud_link
->next
)
3106 if (ud_link
->ref
!= def
)
3114 /* Return non-zero if all the defs of INSN within BB dominates
3115 all the corresponding uses. */
3117 df_insn_dominates_uses_p (df
, bb
, insn
, blocks
)
3119 basic_block bb ATTRIBUTE_UNUSED
;
3124 struct df_link
*link
;
3126 uid
= INSN_UID (insn
);
3128 for (link
= df
->insns
[uid
].defs
; link
; link
= link
->next
)
3130 struct ref
*def
= link
->ref
;
3132 /* Only consider the defs within BLOCKS. */
3133 if (bitmap_bit_p (blocks
, DF_REF_BBNO (def
))
3134 && ! df_def_dominates_uses_p (df
, def
, blocks
))
3141 /* Return the basic block that REG referenced in or NULL if referenced
3142 in multiple basic blocks. */
3144 df_regno_bb (df
, regno
)
3148 struct df_link
*defs
= df
->regs
[regno
].defs
;
3149 struct df_link
*uses
= df
->regs
[regno
].uses
;
3150 struct ref
*def
= defs
? defs
->ref
: 0;
3151 struct ref
*use
= uses
? uses
->ref
: 0;
3152 basic_block bb_def
= def
? DF_REF_BB (def
) : 0;
3153 basic_block bb_use
= use
? DF_REF_BB (use
) : 0;
3155 /* Compare blocks of first def and last use. ???? FIXME. What if
3156 the reg-def and reg-use lists are not correctly ordered. */
3157 return bb_def
== bb_use
? bb_def
: 0;
3161 /* Return non-zero if REG used in multiple basic blocks. */
3163 df_reg_global_p (df
, reg
)
3167 return df_regno_bb (df
, REGNO (reg
)) != 0;
3171 /* Return total lifetime (in insns) of REG. */
3173 df_reg_lifetime (df
, reg
)
3177 return df
->regs
[REGNO (reg
)].lifetime
;
3181 /* Return non-zero if REG live at start of BB. */
3183 df_bb_reg_live_start_p (df
, bb
, reg
)
3184 struct df
*df ATTRIBUTE_UNUSED
;
3188 struct bb_info
*bb_info
= DF_BB_INFO (df
, bb
);
3190 #ifdef ENABLE_CHECKING
3191 if (! bb_info
->lr_in
)
3195 return bitmap_bit_p (bb_info
->lr_in
, REGNO (reg
));
3199 /* Return non-zero if REG live at end of BB. */
3201 df_bb_reg_live_end_p (df
, bb
, reg
)
3202 struct df
*df ATTRIBUTE_UNUSED
;
3206 struct bb_info
*bb_info
= DF_BB_INFO (df
, bb
);
3208 #ifdef ENABLE_CHECKING
3209 if (! bb_info
->lr_in
)
3213 return bitmap_bit_p (bb_info
->lr_out
, REGNO (reg
));
3217 /* Return -1 if life of REG1 before life of REG2, 1 if life of REG1
3218 after life of REG2, or 0, if the lives overlap. */
3220 df_bb_regs_lives_compare (df
, bb
, reg1
, reg2
)
3226 unsigned int regno1
= REGNO (reg1
);
3227 unsigned int regno2
= REGNO (reg2
);
3234 /* The regs must be local to BB. */
3235 if (df_regno_bb (df
, regno1
) != bb
3236 || df_regno_bb (df
, regno2
) != bb
)
3239 def2
= df_bb_regno_first_def_find (df
, bb
, regno2
);
3240 use1
= df_bb_regno_last_use_find (df
, bb
, regno1
);
3242 if (DF_INSN_LUID (df
, DF_REF_INSN (def2
))
3243 > DF_INSN_LUID (df
, DF_REF_INSN (use1
)))
3246 def1
= df_bb_regno_first_def_find (df
, bb
, regno1
);
3247 use2
= df_bb_regno_last_use_find (df
, bb
, regno2
);
3249 if (DF_INSN_LUID (df
, DF_REF_INSN (def1
))
3250 > DF_INSN_LUID (df
, DF_REF_INSN (use2
)))
3257 /* Return last use of REGNO within BB. */
3259 df_bb_regno_last_use_find (df
, bb
, regno
)
3261 basic_block bb ATTRIBUTE_UNUSED
;
3264 struct df_link
*link
;
3266 /* This assumes that the reg-use list is ordered such that for any
3267 BB, the last use is found first. However, since the BBs are not
3268 ordered, the first use in the chain is not necessarily the last
3269 use in the function. */
3270 for (link
= df
->regs
[regno
].uses
; link
; link
= link
->next
)
3272 struct ref
*use
= link
->ref
;
3274 if (DF_REF_BB (use
) == bb
)
3281 /* Return first def of REGNO within BB. */
3283 df_bb_regno_first_def_find (df
, bb
, regno
)
3285 basic_block bb ATTRIBUTE_UNUSED
;
3288 struct df_link
*link
;
3290 /* This assumes that the reg-def list is ordered such that for any
3291 BB, the first def is found first. However, since the BBs are not
3292 ordered, the first def in the chain is not necessarily the first
3293 def in the function. */
3294 for (link
= df
->regs
[regno
].defs
; link
; link
= link
->next
)
3296 struct ref
*def
= link
->ref
;
3298 if (DF_REF_BB (def
) == bb
)
3305 /* Return first use of REGNO inside INSN within BB. */
3307 df_bb_insn_regno_last_use_find (df
, bb
, insn
, regno
)
3309 basic_block bb ATTRIBUTE_UNUSED
;
3314 struct df_link
*link
;
3316 uid
= INSN_UID (insn
);
3318 for (link
= df
->insns
[uid
].uses
; link
; link
= link
->next
)
3320 struct ref
*use
= link
->ref
;
3322 if (DF_REF_REGNO (use
) == regno
)
3330 /* Return first def of REGNO inside INSN within BB. */
3332 df_bb_insn_regno_first_def_find (df
, bb
, insn
, regno
)
3334 basic_block bb ATTRIBUTE_UNUSED
;
3339 struct df_link
*link
;
3341 uid
= INSN_UID (insn
);
3343 for (link
= df
->insns
[uid
].defs
; link
; link
= link
->next
)
3345 struct ref
*def
= link
->ref
;
3347 if (DF_REF_REGNO (def
) == regno
)
3355 /* Return insn using REG if the BB contains only a single
3356 use and def of REG. */
3358 df_bb_single_def_use_insn_find (df
, bb
, insn
, reg
)
3366 struct df_link
*du_link
;
3368 def
= df_bb_insn_regno_first_def_find (df
, bb
, insn
, REGNO (reg
));
3373 du_link
= DF_REF_CHAIN (def
);
3380 /* Check if def is dead. */
3384 /* Check for multiple uses. */
3388 return DF_REF_INSN (use
);
3391 /* Functions for debugging/dumping dataflow information. */
3394 /* Dump a def-use or use-def chain for REF to FILE. */
3396 df_chain_dump (link
, file
)
3397 struct df_link
*link
;
3400 fprintf (file
, "{ ");
3401 for (; link
; link
= link
->next
)
3403 fprintf (file
, "%c%d ",
3404 DF_REF_REG_DEF_P (link
->ref
) ? 'd' : 'u',
3405 DF_REF_ID (link
->ref
));
3407 fprintf (file
, "}");
3411 df_chain_dump_regno (link
, file
)
3412 struct df_link
*link
;
3415 fprintf (file
, "{ ");
3416 for (; link
; link
= link
->next
)
3418 fprintf (file
, "%c%d(%d) ",
3419 DF_REF_REG_DEF_P (link
->ref
) ? 'd' : 'u',
3420 DF_REF_ID (link
->ref
),
3421 DF_REF_REGNO (link
->ref
));
3423 fprintf (file
, "}");
3426 /* Dump dataflow info. */
3428 df_dump (df
, flags
, file
)
3439 fprintf (file
, "\nDataflow summary:\n");
3440 fprintf (file
, "n_regs = %d, n_defs = %d, n_uses = %d, n_bbs = %d\n",
3441 df
->n_regs
, df
->n_defs
, df
->n_uses
, df
->n_bbs
);
3445 fprintf (file
, "Reaching defs:\n");
3446 for (i
= 0; i
< df
->n_bbs
; i
++)
3448 basic_block bb
= BASIC_BLOCK (i
);
3449 struct bb_info
*bb_info
= DF_BB_INFO (df
, bb
);
3451 if (! bb_info
->rd_in
)
3454 fprintf (file
, "bb %d in \t", i
);
3455 dump_bitmap (file
, bb_info
->rd_in
);
3456 fprintf (file
, "bb %d gen \t", i
);
3457 dump_bitmap (file
, bb_info
->rd_gen
);
3458 fprintf (file
, "bb %d kill\t", i
);
3459 dump_bitmap (file
, bb_info
->rd_kill
);
3460 fprintf (file
, "bb %d out \t", i
);
3461 dump_bitmap (file
, bb_info
->rd_out
);
3465 if (flags
& DF_UD_CHAIN
)
3467 fprintf (file
, "Use-def chains:\n");
3468 for (j
= 0; j
< df
->n_defs
; j
++)
3472 fprintf (file
, "d%d bb %d luid %d insn %d reg %d ",
3473 j
, DF_REF_BBNO (df
->defs
[j
]),
3474 DF_INSN_LUID (df
, DF_REF_INSN (df
->defs
[j
])),
3475 DF_REF_INSN_UID (df
->defs
[j
]),
3476 DF_REF_REGNO (df
->defs
[j
]));
3477 df_chain_dump (DF_REF_CHAIN (df
->defs
[j
]), file
);
3478 fprintf (file
, "\n");
3485 fprintf (file
, "Reaching uses:\n");
3486 for (i
= 0; i
< df
->n_bbs
; i
++)
3488 basic_block bb
= BASIC_BLOCK (i
);
3489 struct bb_info
*bb_info
= DF_BB_INFO (df
, bb
);
3491 if (! bb_info
->ru_in
)
3494 fprintf (file
, "bb %d in \t", i
);
3495 dump_bitmap (file
, bb_info
->ru_in
);
3496 fprintf (file
, "bb %d gen \t", i
);
3497 dump_bitmap (file
, bb_info
->ru_gen
);
3498 fprintf (file
, "bb %d kill\t", i
);
3499 dump_bitmap (file
, bb_info
->ru_kill
);
3500 fprintf (file
, "bb %d out \t", i
);
3501 dump_bitmap (file
, bb_info
->ru_out
);
3505 if (flags
& DF_DU_CHAIN
)
3507 fprintf (file
, "Def-use chains:\n");
3508 for (j
= 0; j
< df
->n_uses
; j
++)
3512 fprintf (file
, "u%d bb %d luid %d insn %d reg %d ",
3513 j
, DF_REF_BBNO (df
->uses
[j
]),
3514 DF_INSN_LUID (df
, DF_REF_INSN (df
->uses
[j
])),
3515 DF_REF_INSN_UID (df
->uses
[j
]),
3516 DF_REF_REGNO (df
->uses
[j
]));
3517 df_chain_dump (DF_REF_CHAIN (df
->uses
[j
]), file
);
3518 fprintf (file
, "\n");
3525 fprintf (file
, "Live regs:\n");
3526 for (i
= 0; i
< df
->n_bbs
; i
++)
3528 basic_block bb
= BASIC_BLOCK (i
);
3529 struct bb_info
*bb_info
= DF_BB_INFO (df
, bb
);
3531 if (! bb_info
->lr_in
)
3534 fprintf (file
, "bb %d in \t", i
);
3535 dump_bitmap (file
, bb_info
->lr_in
);
3536 fprintf (file
, "bb %d use \t", i
);
3537 dump_bitmap (file
, bb_info
->lr_use
);
3538 fprintf (file
, "bb %d def \t", i
);
3539 dump_bitmap (file
, bb_info
->lr_def
);
3540 fprintf (file
, "bb %d out \t", i
);
3541 dump_bitmap (file
, bb_info
->lr_out
);
3545 if (flags
& (DF_REG_INFO
| DF_RD_CHAIN
| DF_RU_CHAIN
))
3547 struct reg_info
*reg_info
= df
->regs
;
3549 fprintf (file
, "Register info:\n");
3550 for (j
= 0; j
< df
->n_regs
; j
++)
3552 if (((flags
& DF_REG_INFO
)
3553 && (reg_info
[j
].n_uses
|| reg_info
[j
].n_defs
))
3554 || ((flags
& DF_RD_CHAIN
) && reg_info
[j
].defs
)
3555 || ((flags
& DF_RU_CHAIN
) && reg_info
[j
].uses
))
3557 fprintf (file
, "reg %d", j
);
3558 if ((flags
& DF_RD_CHAIN
) && (flags
& DF_RU_CHAIN
))
3560 basic_block bb
= df_regno_bb (df
, j
);
3563 fprintf (file
, " bb %d", bb
->index
);
3565 fprintf (file
, " bb ?");
3567 if (flags
& DF_REG_INFO
)
3569 fprintf (file
, " life %d", reg_info
[j
].lifetime
);
3572 if ((flags
& DF_REG_INFO
) || (flags
& DF_RD_CHAIN
))
3574 fprintf (file
, " defs ");
3575 if (flags
& DF_REG_INFO
)
3576 fprintf (file
, "%d ", reg_info
[j
].n_defs
);
3577 if (flags
& DF_RD_CHAIN
)
3578 df_chain_dump (reg_info
[j
].defs
, file
);
3581 if ((flags
& DF_REG_INFO
) || (flags
& DF_RU_CHAIN
))
3583 fprintf (file
, " uses ");
3584 if (flags
& DF_REG_INFO
)
3585 fprintf (file
, "%d ", reg_info
[j
].n_uses
);
3586 if (flags
& DF_RU_CHAIN
)
3587 df_chain_dump (reg_info
[j
].uses
, file
);
3590 fprintf (file
, "\n");
3594 fprintf (file
, "\n");
3599 df_insn_debug (df
, insn
, file
)
3607 uid
= INSN_UID (insn
);
3608 if (uid
>= df
->insn_size
)
3611 if (df
->insns
[uid
].defs
)
3612 bbi
= DF_REF_BBNO (df
->insns
[uid
].defs
->ref
);
3613 else if (df
->insns
[uid
].uses
)
3614 bbi
= DF_REF_BBNO (df
->insns
[uid
].uses
->ref
);
3618 fprintf (file
, "insn %d bb %d luid %d defs ",
3619 uid
, bbi
, DF_INSN_LUID (df
, insn
));
3620 df_chain_dump (df
->insns
[uid
].defs
, file
);
3621 fprintf (file
, " uses ");
3622 df_chain_dump (df
->insns
[uid
].uses
, file
);
3623 fprintf (file
, "\n");
3627 df_insn_debug_regno (df
, insn
, file
)
3635 uid
= INSN_UID (insn
);
3636 if (uid
>= df
->insn_size
)
3639 if (df
->insns
[uid
].defs
)
3640 bbi
= DF_REF_BBNO (df
->insns
[uid
].defs
->ref
);
3641 else if (df
->insns
[uid
].uses
)
3642 bbi
= DF_REF_BBNO (df
->insns
[uid
].uses
->ref
);
3646 fprintf (file
, "insn %d bb %d luid %d defs ",
3647 uid
, bbi
, DF_INSN_LUID (df
, insn
));
3648 df_chain_dump_regno (df
->insns
[uid
].defs
, file
);
3649 fprintf (file
, " uses ");
3650 df_chain_dump_regno (df
->insns
[uid
].uses
, file
);
3651 fprintf (file
, "\n");
3655 df_regno_debug (df
, regno
, file
)
3660 if (regno
>= df
->reg_size
)
3663 fprintf (file
, "reg %d life %d defs ",
3664 regno
, df
->regs
[regno
].lifetime
);
3665 df_chain_dump (df
->regs
[regno
].defs
, file
);
3666 fprintf (file
, " uses ");
3667 df_chain_dump (df
->regs
[regno
].uses
, file
);
3668 fprintf (file
, "\n");
3673 df_ref_debug (df
, ref
, file
)
3678 fprintf (file
, "%c%d ",
3679 DF_REF_REG_DEF_P (ref
) ? 'd' : 'u',
3681 fprintf (file
, "reg %d bb %d luid %d insn %d chain ",
3684 DF_INSN_LUID (df
, DF_REF_INSN (ref
)),
3685 INSN_UID (DF_REF_INSN (ref
)));
3686 df_chain_dump (DF_REF_CHAIN (ref
), file
);
3687 fprintf (file
, "\n");
3692 debug_df_insn (insn
)
3695 df_insn_debug (ddf
, insn
, stderr
);
3704 df_regno_debug (ddf
, REGNO (reg
), stderr
);
3709 debug_df_regno (regno
)
3712 df_regno_debug (ddf
, regno
, stderr
);
3720 df_ref_debug (ddf
, ref
, stderr
);
3725 debug_df_defno (defno
)
3728 df_ref_debug (ddf
, ddf
->defs
[defno
], stderr
);
3733 debug_df_useno (defno
)
3736 df_ref_debug (ddf
, ddf
->uses
[defno
], stderr
);
3741 debug_df_chain (link
)
3742 struct df_link
*link
;
3744 df_chain_dump (link
, stderr
);
3745 fputc ('\n', stderr
);