1 /* Standard problems for dataflow support routines.
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006
3 Free Software Foundation, Inc.
4 Originally contributed by Michael P. Hayes
5 (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
6 Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
7 and Kenneth Zadeck (zadeck@naturalbridge.com).
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 2, or (at your option) any later
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING. If not, write to the Free
23 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
28 #include "coretypes.h"
32 #include "insn-config.h"
37 #include "alloc-pool.h"
39 #include "hard-reg-set.h"
40 #include "basic-block.h"
49 #define REG_DEAD_DEBUGGING
52 #define DF_SPARSE_THRESHOLD 32
54 static bitmap seen_in_block
= NULL
;
55 static bitmap seen_in_insn
= NULL
;
56 static void df_ri_dump (struct dataflow
*, FILE *);
59 /*----------------------------------------------------------------------------
60 Public functions access functions for the dataflow problems.
61 ----------------------------------------------------------------------------*/
63 /* Create a du or ud chain from SRC to DST and link it into SRC. */
66 df_chain_create (struct dataflow
*dflow
, struct df_ref
*src
, struct df_ref
*dst
)
68 struct df_link
*head
= DF_REF_CHAIN (src
);
69 struct df_link
*link
= pool_alloc (dflow
->block_pool
);;
71 DF_REF_CHAIN (src
) = link
;
78 /* Delete a du or ud chain for REF. If LINK is NULL, delete all
79 chains for ref and check to see if the reverse chains can also be
80 deleted. If LINK is not NULL it must be a link off of ref. In
81 this case, the other end is not deleted. */
84 df_chain_unlink (struct dataflow
*dflow
, struct df_ref
*ref
, struct df_link
*link
)
86 struct df_link
*chain
= DF_REF_CHAIN (ref
);
89 /* Link was the first element in the chain. */
91 DF_REF_CHAIN (ref
) = link
->next
;
94 /* Link is an internal element in the chain. */
95 struct df_link
*prev
= chain
;
100 prev
->next
= chain
->next
;
107 pool_free (dflow
->block_pool
, link
);
111 /* If chain is NULL here, it was because of a recursive call
112 when the other flavor of chains was not built. Just run thru
113 the entire chain calling the other side and then deleting the
117 struct df_link
*next
= chain
->next
;
118 /* Delete the other side if it exists. */
119 df_chain_unlink (dflow
, chain
->ref
, chain
);
126 /* Copy the du or ud chain starting at FROM_REF and attach it to
130 df_chain_copy (struct dataflow
*dflow
,
131 struct df_ref
*to_ref
,
132 struct df_link
*from_ref
)
136 df_chain_create (dflow
, to_ref
, from_ref
->ref
);
137 from_ref
= from_ref
->next
;
142 /* Get the live in set for BB no matter what problem happens to be
146 df_get_live_in (struct df
*df
, basic_block bb
)
148 gcc_assert (df
->problems_by_index
[DF_LR
]);
150 if (df
->problems_by_index
[DF_UREC
])
151 return DF_RA_LIVE_IN (df
, bb
);
152 else if (df
->problems_by_index
[DF_UR
])
153 return DF_LIVE_IN (df
, bb
);
155 return DF_UPWARD_LIVE_IN (df
, bb
);
159 /* Get the live out set for BB no matter what problem happens to be
163 df_get_live_out (struct df
*df
, basic_block bb
)
165 gcc_assert (df
->problems_by_index
[DF_LR
]);
167 if (df
->problems_by_index
[DF_UREC
])
168 return DF_RA_LIVE_OUT (df
, bb
);
169 else if (df
->problems_by_index
[DF_UR
])
170 return DF_LIVE_OUT (df
, bb
);
172 return DF_UPWARD_LIVE_OUT (df
, bb
);
176 /*----------------------------------------------------------------------------
178 ----------------------------------------------------------------------------*/
180 /* Generic versions to get the void* version of the block info. Only
181 used inside the problem instance vectors. */
183 /* Grow the bb_info array. */
186 df_grow_bb_info (struct dataflow
*dflow
)
188 unsigned int new_size
= last_basic_block
+ 1;
189 if (dflow
->block_info_size
< new_size
)
191 new_size
+= new_size
/ 4;
192 dflow
->block_info
= xrealloc (dflow
->block_info
,
193 new_size
*sizeof (void*));
194 memset (dflow
->block_info
+ dflow
->block_info_size
, 0,
195 (new_size
- dflow
->block_info_size
) *sizeof (void *));
196 dflow
->block_info_size
= new_size
;
200 /* Dump a def-use or use-def chain for REF to FILE. */
203 df_chain_dump (struct df_link
*link
, FILE *file
)
205 fprintf (file
, "{ ");
206 for (; link
; link
= link
->next
)
208 fprintf (file
, "%c%d(bb %d insn %d) ",
209 DF_REF_REG_DEF_P (link
->ref
) ? 'd' : 'u',
210 DF_REF_ID (link
->ref
),
211 DF_REF_BBNO (link
->ref
),
212 DF_REF_INSN (link
->ref
) ? DF_REF_INSN_UID (link
->ref
) : -1);
218 /* Print some basic block info as part of df_dump. */
221 df_print_bb_index (basic_block bb
, FILE *file
)
226 fprintf (file
, "( ");
227 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
229 basic_block pred
= e
->src
;
230 fprintf (file
, "%d ", pred
->index
);
232 fprintf (file
, ")->[%d]->( ", bb
->index
);
233 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
235 basic_block succ
= e
->dest
;
236 fprintf (file
, "%d ", succ
->index
);
238 fprintf (file
, ")\n");
242 /* Return a bitmap for REGNO from the cache MAPS. The bitmap is to
243 contain COUNT bits starting at START. These bitmaps are not to be
244 changed since there is a cache of them. */
247 df_ref_bitmap (bitmap
*maps
, unsigned int regno
, int start
, int count
)
249 bitmap ids
= maps
[regno
];
253 unsigned int end
= start
+ count
;;
254 ids
= BITMAP_ALLOC (NULL
);
256 for (i
= start
; i
< end
; i
++)
257 bitmap_set_bit (ids
, i
);
263 /* Make sure that the seen_in_insn and seen_in_block sbitmaps are set
269 seen_in_block
= BITMAP_ALLOC (NULL
);
270 seen_in_insn
= BITMAP_ALLOC (NULL
);
277 BITMAP_FREE (seen_in_block
);
278 BITMAP_FREE (seen_in_insn
);
283 /*----------------------------------------------------------------------------
286 Find the locations in the function where each use site for a pseudo
287 can reach backwards. In and out bitvectors are built for each basic
288 block. The id field in the ref is used to index into these sets.
289 See df.h for details.
291 ----------------------------------------------------------------------------*/
293 /* This problem plays a large number of games for the sake of
296 1) The order of the bits in the bitvectors. After the scanning
297 phase, all of the uses are sorted. All of the uses for the reg 0
298 are first, followed by all uses for reg 1 and so on.
300 2) There are two kill sets, one if the number of uses is less or
301 equal to DF_SPARSE_THRESHOLD and another if it is greater.
303 <= : There is a bitmap for each register, uses_sites[N], that is
304 built on demand. This bitvector contains a 1 for each use or reg
307 > : One level of indirection is used to keep from generating long
308 strings of 1 bits in the kill sets. Bitvectors that are indexed
309 by the regnum are used to represent that there is a killing def
310 for the register. The confluence and transfer functions use
311 these along with the bitmap_clear_range call to remove ranges of
312 bits without actually generating a knockout vector.
314 The kill and sparse_kill and the dense_invalidated_by_call and
315 sparse_invalidated_by call both play this game. */
317 /* Private data used to compute the solution for this problem. These
318 data structures are not accessible outside of this module. */
319 struct df_ru_problem_data
322 bitmap
*use_sites
; /* Bitmap of uses for each pseudo. */
323 unsigned int use_sites_size
; /* Size of use_sites. */
324 /* The set of defs to regs invalidated by call. */
325 bitmap sparse_invalidated_by_call
;
326 /* The set of defs to regs invalidated by call for ru. */
327 bitmap dense_invalidated_by_call
;
330 /* Get basic block info. */
332 struct df_ru_bb_info
*
333 df_ru_get_bb_info (struct dataflow
*dflow
, unsigned int index
)
335 return (struct df_ru_bb_info
*) dflow
->block_info
[index
];
339 /* Set basic block info. */
342 df_ru_set_bb_info (struct dataflow
*dflow
, unsigned int index
,
343 struct df_ru_bb_info
*bb_info
)
345 dflow
->block_info
[index
] = bb_info
;
349 /* Free basic block info. */
352 df_ru_free_bb_info (struct dataflow
*dflow
,
353 basic_block bb ATTRIBUTE_UNUSED
,
356 struct df_ru_bb_info
*bb_info
= (struct df_ru_bb_info
*) vbb_info
;
359 BITMAP_FREE (bb_info
->kill
);
360 BITMAP_FREE (bb_info
->sparse_kill
);
361 BITMAP_FREE (bb_info
->gen
);
362 BITMAP_FREE (bb_info
->in
);
363 BITMAP_FREE (bb_info
->out
);
364 pool_free (dflow
->block_pool
, bb_info
);
369 /* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
370 not touched unless the block is new. */
373 df_ru_alloc (struct dataflow
*dflow
,
374 bitmap blocks_to_rescan ATTRIBUTE_UNUSED
,
377 unsigned int bb_index
;
379 unsigned int reg_size
= max_reg_num ();
381 if (!dflow
->block_pool
)
382 dflow
->block_pool
= create_alloc_pool ("df_ru_block pool",
383 sizeof (struct df_ru_bb_info
), 50);
385 if (dflow
->problem_data
)
388 struct df_ru_problem_data
*problem_data
389 = (struct df_ru_problem_data
*) dflow
->problem_data
;
391 for (i
= 0; i
< problem_data
->use_sites_size
; i
++)
393 bitmap bm
= problem_data
->use_sites
[i
];
397 problem_data
->use_sites
[i
] = NULL
;
401 if (problem_data
->use_sites_size
< reg_size
)
403 problem_data
->use_sites
404 = xrealloc (problem_data
->use_sites
, reg_size
* sizeof (bitmap
));
405 memset (problem_data
->use_sites
+ problem_data
->use_sites_size
, 0,
406 (reg_size
- problem_data
->use_sites_size
) * sizeof (bitmap
));
407 problem_data
->use_sites_size
= reg_size
;
410 bitmap_clear (problem_data
->sparse_invalidated_by_call
);
411 bitmap_clear (problem_data
->dense_invalidated_by_call
);
415 struct df_ru_problem_data
*problem_data
= XNEW (struct df_ru_problem_data
);
416 dflow
->problem_data
= problem_data
;
418 problem_data
->use_sites
= XCNEWVEC (bitmap
, reg_size
);
419 problem_data
->use_sites_size
= reg_size
;
420 problem_data
->sparse_invalidated_by_call
= BITMAP_ALLOC (NULL
);
421 problem_data
->dense_invalidated_by_call
= BITMAP_ALLOC (NULL
);
424 df_grow_bb_info (dflow
);
426 /* Because of the clustering of all def sites for the same pseudo,
427 we have to process all of the blocks before doing the
430 EXECUTE_IF_SET_IN_BITMAP (all_blocks
, 0, bb_index
, bi
)
432 struct df_ru_bb_info
*bb_info
= df_ru_get_bb_info (dflow
, bb_index
);
435 bitmap_clear (bb_info
->kill
);
436 bitmap_clear (bb_info
->sparse_kill
);
437 bitmap_clear (bb_info
->gen
);
441 bb_info
= (struct df_ru_bb_info
*) pool_alloc (dflow
->block_pool
);
442 df_ru_set_bb_info (dflow
, bb_index
, bb_info
);
443 bb_info
->kill
= BITMAP_ALLOC (NULL
);
444 bb_info
->sparse_kill
= BITMAP_ALLOC (NULL
);
445 bb_info
->gen
= BITMAP_ALLOC (NULL
);
446 bb_info
->in
= BITMAP_ALLOC (NULL
);
447 bb_info
->out
= BITMAP_ALLOC (NULL
);
453 /* Process a list of DEFs for df_ru_bb_local_compute. */
456 df_ru_bb_local_compute_process_def (struct dataflow
*dflow
,
457 struct df_ru_bb_info
*bb_info
,
459 enum df_ref_flags top_flag
)
461 struct df
*df
= dflow
->df
;
464 if ((top_flag
== (DF_REF_FLAGS (def
) & DF_REF_AT_TOP
))
465 /* If the def is to only part of the reg, it is as if it did
466 not happen, since some of the bits may get thru. */
467 && (!(DF_REF_FLAGS (def
) & DF_REF_PARTIAL
)))
469 unsigned int regno
= DF_REF_REGNO (def
);
470 unsigned int begin
= DF_REG_USE_GET (df
, regno
)->begin
;
471 unsigned int n_uses
= DF_REG_USE_GET (df
, regno
)->n_refs
;
472 if (!bitmap_bit_p (seen_in_block
, regno
))
474 /* The first def for regno in the insn, causes the kill
475 info to be generated. Do not modify the gen set
476 because the only values in it are the uses from here
477 to the top of the block and this def does not effect
479 if (!bitmap_bit_p (seen_in_insn
, regno
))
481 if (n_uses
> DF_SPARSE_THRESHOLD
)
482 bitmap_set_bit (bb_info
->sparse_kill
, regno
);
485 struct df_ru_problem_data
* problem_data
486 = (struct df_ru_problem_data
*)dflow
->problem_data
;
488 = df_ref_bitmap (problem_data
->use_sites
, regno
,
490 bitmap_ior_into (bb_info
->kill
, uses
);
493 bitmap_set_bit (seen_in_insn
, regno
);
501 /* Process a list of USEs for df_ru_bb_local_compute. */
504 df_ru_bb_local_compute_process_use (struct df_ru_bb_info
*bb_info
,
506 enum df_ref_flags top_flag
)
510 if (top_flag
== (DF_REF_FLAGS (use
) & DF_REF_AT_TOP
))
512 /* Add use to set of gens in this BB unless we have seen a
513 def in a previous instruction. */
514 unsigned int regno
= DF_REF_REGNO (use
);
515 if (!bitmap_bit_p (seen_in_block
, regno
))
516 bitmap_set_bit (bb_info
->gen
, DF_REF_ID (use
));
522 /* Compute local reaching use (upward exposed use) info for basic
523 block BB. USE_INFO->REGS[R] caches the set of uses for register R. */
525 df_ru_bb_local_compute (struct dataflow
*dflow
, unsigned int bb_index
)
527 struct df
*df
= dflow
->df
;
528 basic_block bb
= BASIC_BLOCK (bb_index
);
529 struct df_ru_bb_info
*bb_info
= df_ru_get_bb_info (dflow
, bb_index
);
532 /* Set when a def for regno is seen. */
533 bitmap_clear (seen_in_block
);
534 bitmap_clear (seen_in_insn
);
537 /* Variables defined in the prolog that are used by the exception
539 df_ru_bb_local_compute_process_use (bb_info
,
540 df_get_artificial_uses (df
, bb_index
),
543 df_ru_bb_local_compute_process_def (dflow
, bb_info
,
544 df_get_artificial_defs (df
, bb_index
),
547 FOR_BB_INSNS (bb
, insn
)
549 unsigned int uid
= INSN_UID (insn
);
553 df_ru_bb_local_compute_process_use (bb_info
,
554 DF_INSN_UID_USES (df
, uid
), 0);
556 df_ru_bb_local_compute_process_def (dflow
, bb_info
,
557 DF_INSN_UID_DEFS (df
, uid
), 0);
559 bitmap_ior_into (seen_in_block
, seen_in_insn
);
560 bitmap_clear (seen_in_insn
);
563 /* Process the hardware registers that are always live. */
564 df_ru_bb_local_compute_process_use (bb_info
,
565 df_get_artificial_uses (df
, bb_index
), 0);
567 df_ru_bb_local_compute_process_def (dflow
, bb_info
,
568 df_get_artificial_defs (df
, bb_index
), 0);
572 /* Compute local reaching use (upward exposed use) info for each basic
573 block within BLOCKS. */
575 df_ru_local_compute (struct dataflow
*dflow
,
577 bitmap rescan_blocks ATTRIBUTE_UNUSED
)
579 struct df
*df
= dflow
->df
;
580 unsigned int bb_index
;
583 struct df_ru_problem_data
*problem_data
584 = (struct df_ru_problem_data
*) dflow
->problem_data
;
585 bitmap sparse_invalidated
= problem_data
->sparse_invalidated_by_call
;
586 bitmap dense_invalidated
= problem_data
->dense_invalidated_by_call
;
590 if (!df
->use_info
.refs_organized
)
591 df_reorganize_refs (&df
->use_info
);
593 EXECUTE_IF_SET_IN_BITMAP (all_blocks
, 0, bb_index
, bi
)
595 df_ru_bb_local_compute (dflow
, bb_index
);
598 /* Set up the knockout bit vectors to be applied across EH_EDGES. */
599 EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call
, 0, regno
, bi
)
601 struct df_reg_info
*reg_info
= DF_REG_USE_GET (df
, regno
);
602 if (reg_info
->n_refs
> DF_SPARSE_THRESHOLD
)
603 bitmap_set_bit (sparse_invalidated
, regno
);
606 bitmap defs
= df_ref_bitmap (problem_data
->use_sites
, regno
,
607 reg_info
->begin
, reg_info
->n_refs
);
608 bitmap_ior_into (dense_invalidated
, defs
);
616 /* Initialize the solution bit vectors for problem. */
619 df_ru_init_solution (struct dataflow
*dflow
, bitmap all_blocks
)
621 unsigned int bb_index
;
624 EXECUTE_IF_SET_IN_BITMAP (all_blocks
, 0, bb_index
, bi
)
626 struct df_ru_bb_info
*bb_info
= df_ru_get_bb_info (dflow
, bb_index
);
627 bitmap_copy (bb_info
->in
, bb_info
->gen
);
628 bitmap_clear (bb_info
->out
);
633 /* Out of target gets or of in of source. */
636 df_ru_confluence_n (struct dataflow
*dflow
, edge e
)
638 bitmap op1
= df_ru_get_bb_info (dflow
, e
->src
->index
)->out
;
639 bitmap op2
= df_ru_get_bb_info (dflow
, e
->dest
->index
)->in
;
641 if (e
->flags
& EDGE_EH
)
643 struct df_ru_problem_data
*problem_data
644 = (struct df_ru_problem_data
*) dflow
->problem_data
;
645 bitmap sparse_invalidated
= problem_data
->sparse_invalidated_by_call
;
646 bitmap dense_invalidated
= problem_data
->dense_invalidated_by_call
;
647 struct df
*df
= dflow
->df
;
650 bitmap tmp
= BITMAP_ALLOC (NULL
);
652 bitmap_copy (tmp
, op2
);
653 bitmap_and_compl_into (tmp
, dense_invalidated
);
655 EXECUTE_IF_SET_IN_BITMAP (sparse_invalidated
, 0, regno
, bi
)
657 bitmap_clear_range (tmp
,
658 DF_REG_USE_GET (df
, regno
)->begin
,
659 DF_REG_USE_GET (df
, regno
)->n_refs
);
661 bitmap_ior_into (op1
, tmp
);
665 bitmap_ior_into (op1
, op2
);
669 /* Transfer function. */
672 df_ru_transfer_function (struct dataflow
*dflow
, int bb_index
)
674 struct df_ru_bb_info
*bb_info
= df_ru_get_bb_info (dflow
, bb_index
);
677 bitmap in
= bb_info
->in
;
678 bitmap out
= bb_info
->out
;
679 bitmap gen
= bb_info
->gen
;
680 bitmap kill
= bb_info
->kill
;
681 bitmap sparse_kill
= bb_info
->sparse_kill
;
683 if (bitmap_empty_p (sparse_kill
))
684 return bitmap_ior_and_compl (in
, gen
, out
, kill
);
687 struct df
*df
= dflow
->df
;
688 bool changed
= false;
689 bitmap tmp
= BITMAP_ALLOC (NULL
);
690 bitmap_copy (tmp
, out
);
691 EXECUTE_IF_SET_IN_BITMAP (sparse_kill
, 0, regno
, bi
)
693 bitmap_clear_range (tmp
,
694 DF_REG_USE_GET (df
, regno
)->begin
,
695 DF_REG_USE_GET (df
, regno
)->n_refs
);
697 bitmap_and_compl_into (tmp
, kill
);
698 bitmap_ior_into (tmp
, gen
);
699 changed
= !bitmap_equal_p (tmp
, in
);
712 /* Free all storage associated with the problem. */
715 df_ru_free (struct dataflow
*dflow
)
718 struct df_ru_problem_data
*problem_data
719 = (struct df_ru_problem_data
*) dflow
->problem_data
;
723 for (i
= 0; i
< dflow
->block_info_size
; i
++)
725 struct df_ru_bb_info
*bb_info
= df_ru_get_bb_info (dflow
, i
);
728 BITMAP_FREE (bb_info
->kill
);
729 BITMAP_FREE (bb_info
->sparse_kill
);
730 BITMAP_FREE (bb_info
->gen
);
731 BITMAP_FREE (bb_info
->in
);
732 BITMAP_FREE (bb_info
->out
);
736 free_alloc_pool (dflow
->block_pool
);
738 for (i
= 0; i
< problem_data
->use_sites_size
; i
++)
740 bitmap bm
= problem_data
->use_sites
[i
];
745 free (problem_data
->use_sites
);
746 BITMAP_FREE (problem_data
->sparse_invalidated_by_call
);
747 BITMAP_FREE (problem_data
->dense_invalidated_by_call
);
749 dflow
->block_info_size
= 0;
750 free (dflow
->block_info
);
751 free (dflow
->problem_data
);
757 /* Debugging info. */
760 df_ru_dump (struct dataflow
*dflow
, FILE *file
)
763 struct df
*df
= dflow
->df
;
764 struct df_ru_problem_data
*problem_data
765 = (struct df_ru_problem_data
*) dflow
->problem_data
;
766 unsigned int m
= max_reg_num ();
769 if (!dflow
->block_info
)
772 fprintf (file
, "Reaching uses:\n");
774 fprintf (file
, " sparse invalidated \t");
775 dump_bitmap (file
, problem_data
->sparse_invalidated_by_call
);
776 fprintf (file
, " dense invalidated \t");
777 dump_bitmap (file
, problem_data
->dense_invalidated_by_call
);
779 for (regno
= 0; regno
< m
; regno
++)
780 if (DF_REG_USE_GET (df
, regno
)->n_refs
)
781 fprintf (file
, "%d[%d,%d] ", regno
,
782 DF_REG_USE_GET (df
, regno
)->begin
,
783 DF_REG_USE_GET (df
, regno
)->n_refs
);
784 fprintf (file
, "\n");
788 struct df_ru_bb_info
*bb_info
= df_ru_get_bb_info (dflow
, bb
->index
);
789 df_print_bb_index (bb
, file
);
794 fprintf (file
, " in \t(%d)\n", (int) bitmap_count_bits (bb_info
->in
));
795 dump_bitmap (file
, bb_info
->in
);
796 fprintf (file
, " gen \t(%d)\n", (int) bitmap_count_bits (bb_info
->gen
));
797 dump_bitmap (file
, bb_info
->gen
);
798 fprintf (file
, " kill\t(%d)\n", (int) bitmap_count_bits (bb_info
->kill
));
799 dump_bitmap (file
, bb_info
->kill
);
800 fprintf (file
, " out \t(%d)\n", (int) bitmap_count_bits (bb_info
->out
));
801 dump_bitmap (file
, bb_info
->out
);
805 /* All of the information associated with every instance of the problem. */
807 static struct df_problem problem_RU
=
809 DF_RU
, /* Problem id. */
810 DF_BACKWARD
, /* Direction. */
811 df_ru_alloc
, /* Allocate the problem specific data. */
812 NULL
, /* Reset global information. */
813 df_ru_free_bb_info
, /* Free basic block info. */
814 df_ru_local_compute
, /* Local compute function. */
815 df_ru_init_solution
, /* Init the solution specific data. */
816 df_iterative_dataflow
, /* Iterative solver. */
817 NULL
, /* Confluence operator 0. */
818 df_ru_confluence_n
, /* Confluence operator n. */
819 df_ru_transfer_function
, /* Transfer function. */
820 NULL
, /* Finalize function. */
821 df_ru_free
, /* Free all of the problem information. */
822 df_ru_dump
, /* Debugging. */
823 NULL
, /* Dependent problem. */
824 0 /* Changeable flags. */
829 /* Create a new DATAFLOW instance and add it to an existing instance
830 of DF. The returned structure is what is used to get at the
834 df_ru_add_problem (struct df
*df
, int flags
)
836 return df_add_problem (df
, &problem_RU
, flags
);
840 /*----------------------------------------------------------------------------
843 Find the locations in the function where each definition site for a
844 pseudo reaches. In and out bitvectors are built for each basic
845 block. The id field in the ref is used to index into these sets.
846 See df.h for details.
847 ----------------------------------------------------------------------------*/
849 /* See the comment at the top of the Reaching Uses problem for how the
850 uses are represented in the kill sets. The same games are played
851 here for the defs. */
853 /* Private data used to compute the solution for this problem. These
854 data structures are not accessible outside of this module. */
855 struct df_rd_problem_data
857 /* If the number of defs for regnum N is less than
858 DF_SPARSE_THRESHOLD, uses_sites[N] contains a mask of the all of
859 the defs of reg N indexed by the id in the ref structure. If
860 there are more than DF_SPARSE_THRESHOLD defs for regnum N a
861 different mechanism is used to mask the def. */
862 bitmap
*def_sites
; /* Bitmap of defs for each pseudo. */
863 unsigned int def_sites_size
; /* Size of def_sites. */
864 /* The set of defs to regs invalidated by call. */
865 bitmap sparse_invalidated_by_call
;
866 /* The set of defs to regs invalidate by call for rd. */
867 bitmap dense_invalidated_by_call
;
870 /* Get basic block info. */
872 struct df_rd_bb_info
*
873 df_rd_get_bb_info (struct dataflow
*dflow
, unsigned int index
)
875 return (struct df_rd_bb_info
*) dflow
->block_info
[index
];
879 /* Set basic block info. */
882 df_rd_set_bb_info (struct dataflow
*dflow
, unsigned int index
,
883 struct df_rd_bb_info
*bb_info
)
885 dflow
->block_info
[index
] = bb_info
;
889 /* Free basic block info. */
892 df_rd_free_bb_info (struct dataflow
*dflow
,
893 basic_block bb ATTRIBUTE_UNUSED
,
896 struct df_rd_bb_info
*bb_info
= (struct df_rd_bb_info
*) vbb_info
;
899 BITMAP_FREE (bb_info
->kill
);
900 BITMAP_FREE (bb_info
->sparse_kill
);
901 BITMAP_FREE (bb_info
->gen
);
902 BITMAP_FREE (bb_info
->in
);
903 BITMAP_FREE (bb_info
->out
);
904 pool_free (dflow
->block_pool
, bb_info
);
909 /* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
910 not touched unless the block is new. */
913 df_rd_alloc (struct dataflow
*dflow
,
914 bitmap blocks_to_rescan ATTRIBUTE_UNUSED
,
917 unsigned int bb_index
;
919 unsigned int reg_size
= max_reg_num ();
921 if (!dflow
->block_pool
)
922 dflow
->block_pool
= create_alloc_pool ("df_rd_block pool",
923 sizeof (struct df_rd_bb_info
), 50);
925 if (dflow
->problem_data
)
928 struct df_rd_problem_data
*problem_data
929 = (struct df_rd_problem_data
*) dflow
->problem_data
;
931 for (i
= 0; i
< problem_data
->def_sites_size
; i
++)
933 bitmap bm
= problem_data
->def_sites
[i
];
937 problem_data
->def_sites
[i
] = NULL
;
941 if (problem_data
->def_sites_size
< reg_size
)
943 problem_data
->def_sites
944 = xrealloc (problem_data
->def_sites
, reg_size
*sizeof (bitmap
));
945 memset (problem_data
->def_sites
+ problem_data
->def_sites_size
, 0,
946 (reg_size
- problem_data
->def_sites_size
) *sizeof (bitmap
));
947 problem_data
->def_sites_size
= reg_size
;
950 bitmap_clear (problem_data
->sparse_invalidated_by_call
);
951 bitmap_clear (problem_data
->dense_invalidated_by_call
);
955 struct df_rd_problem_data
*problem_data
= XNEW (struct df_rd_problem_data
);
956 dflow
->problem_data
= problem_data
;
958 problem_data
->def_sites
= XCNEWVEC (bitmap
, reg_size
);
959 problem_data
->def_sites_size
= reg_size
;
960 problem_data
->sparse_invalidated_by_call
= BITMAP_ALLOC (NULL
);
961 problem_data
->dense_invalidated_by_call
= BITMAP_ALLOC (NULL
);
964 df_grow_bb_info (dflow
);
966 /* Because of the clustering of all use sites for the same pseudo,
967 we have to process all of the blocks before doing the
970 EXECUTE_IF_SET_IN_BITMAP (all_blocks
, 0, bb_index
, bi
)
972 struct df_rd_bb_info
*bb_info
= df_rd_get_bb_info (dflow
, bb_index
);
975 bitmap_clear (bb_info
->kill
);
976 bitmap_clear (bb_info
->sparse_kill
);
977 bitmap_clear (bb_info
->gen
);
981 bb_info
= (struct df_rd_bb_info
*) pool_alloc (dflow
->block_pool
);
982 df_rd_set_bb_info (dflow
, bb_index
, bb_info
);
983 bb_info
->kill
= BITMAP_ALLOC (NULL
);
984 bb_info
->sparse_kill
= BITMAP_ALLOC (NULL
);
985 bb_info
->gen
= BITMAP_ALLOC (NULL
);
986 bb_info
->in
= BITMAP_ALLOC (NULL
);
987 bb_info
->out
= BITMAP_ALLOC (NULL
);
993 /* Process a list of DEFs for df_rd_bb_local_compute. */
996 df_rd_bb_local_compute_process_def (struct dataflow
*dflow
,
997 struct df_rd_bb_info
*bb_info
,
999 enum df_ref_flags top_flag
)
1001 struct df
*df
= dflow
->df
;
1004 if (top_flag
== (DF_REF_FLAGS (def
) & DF_REF_AT_TOP
))
1006 unsigned int regno
= DF_REF_REGNO (def
);
1007 unsigned int begin
= DF_REG_DEF_GET (df
, regno
)->begin
;
1008 unsigned int n_defs
= DF_REG_DEF_GET (df
, regno
)->n_refs
;
1010 /* Only the last def(s) for a regno in the block has any
1012 if (!bitmap_bit_p (seen_in_block
, regno
))
1014 /* The first def for regno in insn gets to knock out the
1015 defs from other instructions. */
1016 if ((!bitmap_bit_p (seen_in_insn
, regno
))
1017 /* If the def is to only part of the reg, it does
1018 not kill the other defs that reach here. */
1019 && (!((DF_REF_FLAGS (def
) & DF_REF_PARTIAL
)
1020 || (DF_REF_FLAGS (def
) & DF_REF_MAY_CLOBBER
))))
1022 if (n_defs
> DF_SPARSE_THRESHOLD
)
1024 bitmap_set_bit (bb_info
->sparse_kill
, regno
);
1025 bitmap_clear_range(bb_info
->gen
, begin
, n_defs
);
1029 struct df_rd_problem_data
* problem_data
1030 = (struct df_rd_problem_data
*)dflow
->problem_data
;
1031 bitmap defs
= df_ref_bitmap (problem_data
->def_sites
,
1032 regno
, begin
, n_defs
);
1033 bitmap_ior_into (bb_info
->kill
, defs
);
1034 bitmap_and_compl_into (bb_info
->gen
, defs
);
1038 bitmap_set_bit (seen_in_insn
, regno
);
1039 /* All defs for regno in the instruction may be put into
1041 if (!(DF_REF_FLAGS (def
)
1042 & (DF_REF_MUST_CLOBBER
| DF_REF_MAY_CLOBBER
)))
1043 bitmap_set_bit (bb_info
->gen
, DF_REF_ID (def
));
1046 def
= def
->next_ref
;
1050 /* Compute local reaching def info for basic block BB. */
1053 df_rd_bb_local_compute (struct dataflow
*dflow
, unsigned int bb_index
)
1055 struct df
*df
= dflow
->df
;
1056 basic_block bb
= BASIC_BLOCK (bb_index
);
1057 struct df_rd_bb_info
*bb_info
= df_rd_get_bb_info (dflow
, bb_index
);
1060 bitmap_clear (seen_in_block
);
1061 bitmap_clear (seen_in_insn
);
1063 df_rd_bb_local_compute_process_def (dflow
, bb_info
,
1064 df_get_artificial_defs (df
, bb_index
), 0);
1066 FOR_BB_INSNS_REVERSE (bb
, insn
)
1068 unsigned int uid
= INSN_UID (insn
);
1073 df_rd_bb_local_compute_process_def (dflow
, bb_info
,
1074 DF_INSN_UID_DEFS (df
, uid
), 0);
1076 /* This complex dance with the two bitmaps is required because
1077 instructions can assign twice to the same pseudo. This
1078 generally happens with calls that will have one def for the
1079 result and another def for the clobber. If only one vector
1080 is used and the clobber goes first, the result will be
1082 bitmap_ior_into (seen_in_block
, seen_in_insn
);
1083 bitmap_clear (seen_in_insn
);
1086 /* Process the artificial defs at the top of the block last since we
1087 are going backwards through the block and these are logically at
1089 df_rd_bb_local_compute_process_def (dflow
, bb_info
,
1090 df_get_artificial_defs (df
, bb_index
),
1095 /* Compute local reaching def info for each basic block within BLOCKS. */
1098 df_rd_local_compute (struct dataflow
*dflow
,
1100 bitmap rescan_blocks ATTRIBUTE_UNUSED
)
1102 struct df
*df
= dflow
->df
;
1103 unsigned int bb_index
;
1106 struct df_rd_problem_data
*problem_data
1107 = (struct df_rd_problem_data
*) dflow
->problem_data
;
1108 bitmap sparse_invalidated
= problem_data
->sparse_invalidated_by_call
;
1109 bitmap dense_invalidated
= problem_data
->dense_invalidated_by_call
;
1113 if (!df
->def_info
.refs_organized
)
1114 df_reorganize_refs (&df
->def_info
);
1116 EXECUTE_IF_SET_IN_BITMAP (all_blocks
, 0, bb_index
, bi
)
1118 df_rd_bb_local_compute (dflow
, bb_index
);
1121 /* Set up the knockout bit vectors to be applied across EH_EDGES. */
1122 EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call
, 0, regno
, bi
)
1124 struct df_reg_info
*reg_info
= DF_REG_DEF_GET (df
, regno
);
1125 if (reg_info
->n_refs
> DF_SPARSE_THRESHOLD
)
1127 bitmap_set_bit (sparse_invalidated
, regno
);
1131 bitmap defs
= df_ref_bitmap (problem_data
->def_sites
, regno
,
1132 reg_info
->begin
, reg_info
->n_refs
);
1133 bitmap_ior_into (dense_invalidated
, defs
);
1140 /* Initialize the solution bit vectors for problem. */
1143 df_rd_init_solution (struct dataflow
*dflow
, bitmap all_blocks
)
1145 unsigned int bb_index
;
1148 EXECUTE_IF_SET_IN_BITMAP (all_blocks
, 0, bb_index
, bi
)
1150 struct df_rd_bb_info
*bb_info
= df_rd_get_bb_info (dflow
, bb_index
);
1152 bitmap_copy (bb_info
->out
, bb_info
->gen
);
1153 bitmap_clear (bb_info
->in
);
1157 /* In of target gets or of out of source. */
1160 df_rd_confluence_n (struct dataflow
*dflow
, edge e
)
1162 bitmap op1
= df_rd_get_bb_info (dflow
, e
->dest
->index
)->in
;
1163 bitmap op2
= df_rd_get_bb_info (dflow
, e
->src
->index
)->out
;
1165 if (e
->flags
& EDGE_EH
)
1167 struct df_rd_problem_data
*problem_data
1168 = (struct df_rd_problem_data
*) dflow
->problem_data
;
1169 bitmap sparse_invalidated
= problem_data
->sparse_invalidated_by_call
;
1170 bitmap dense_invalidated
= problem_data
->dense_invalidated_by_call
;
1171 struct df
*df
= dflow
->df
;
1174 bitmap tmp
= BITMAP_ALLOC (NULL
);
1176 bitmap_copy (tmp
, op2
);
1177 bitmap_and_compl_into (tmp
, dense_invalidated
);
1179 EXECUTE_IF_SET_IN_BITMAP (sparse_invalidated
, 0, regno
, bi
)
1181 bitmap_clear_range (tmp
,
1182 DF_REG_DEF_GET (df
, regno
)->begin
,
1183 DF_REG_DEF_GET (df
, regno
)->n_refs
);
1185 bitmap_ior_into (op1
, tmp
);
1189 bitmap_ior_into (op1
, op2
);
1193 /* Transfer function. */
1196 df_rd_transfer_function (struct dataflow
*dflow
, int bb_index
)
1198 struct df_rd_bb_info
*bb_info
= df_rd_get_bb_info (dflow
, bb_index
);
1201 bitmap in
= bb_info
->in
;
1202 bitmap out
= bb_info
->out
;
1203 bitmap gen
= bb_info
->gen
;
1204 bitmap kill
= bb_info
->kill
;
1205 bitmap sparse_kill
= bb_info
->sparse_kill
;
1207 if (bitmap_empty_p (sparse_kill
))
1208 return bitmap_ior_and_compl (out
, gen
, in
, kill
);
1211 struct df
*df
= dflow
->df
;
1212 bool changed
= false;
1213 bitmap tmp
= BITMAP_ALLOC (NULL
);
1214 bitmap_copy (tmp
, in
);
1215 EXECUTE_IF_SET_IN_BITMAP (sparse_kill
, 0, regno
, bi
)
1217 bitmap_clear_range (tmp
,
1218 DF_REG_DEF_GET (df
, regno
)->begin
,
1219 DF_REG_DEF_GET (df
, regno
)->n_refs
);
1221 bitmap_and_compl_into (tmp
, kill
);
1222 bitmap_ior_into (tmp
, gen
);
1223 changed
= !bitmap_equal_p (tmp
, out
);
1236 /* Free all storage associated with the problem. */
1239 df_rd_free (struct dataflow
*dflow
)
1242 struct df_rd_problem_data
*problem_data
1243 = (struct df_rd_problem_data
*) dflow
->problem_data
;
1247 for (i
= 0; i
< dflow
->block_info_size
; i
++)
1249 struct df_rd_bb_info
*bb_info
= df_rd_get_bb_info (dflow
, i
);
1252 BITMAP_FREE (bb_info
->kill
);
1253 BITMAP_FREE (bb_info
->sparse_kill
);
1254 BITMAP_FREE (bb_info
->gen
);
1255 BITMAP_FREE (bb_info
->in
);
1256 BITMAP_FREE (bb_info
->out
);
1260 free_alloc_pool (dflow
->block_pool
);
1262 for (i
= 0; i
< problem_data
->def_sites_size
; i
++)
1264 bitmap bm
= problem_data
->def_sites
[i
];
1269 free (problem_data
->def_sites
);
1270 BITMAP_FREE (problem_data
->sparse_invalidated_by_call
);
1271 BITMAP_FREE (problem_data
->dense_invalidated_by_call
);
1273 dflow
->block_info_size
= 0;
1274 free (dflow
->block_info
);
1275 free (dflow
->problem_data
);
1281 /* Debugging info. */
1284 df_rd_dump (struct dataflow
*dflow
, FILE *file
)
1286 struct df
*df
= dflow
->df
;
1288 struct df_rd_problem_data
*problem_data
1289 = (struct df_rd_problem_data
*) dflow
->problem_data
;
1290 unsigned int m
= max_reg_num ();
1293 if (!dflow
->block_info
)
1296 fprintf (file
, "Reaching defs:\n\n");
1298 fprintf (file
, " sparse invalidated \t");
1299 dump_bitmap (file
, problem_data
->sparse_invalidated_by_call
);
1300 fprintf (file
, " dense invalidated \t");
1301 dump_bitmap (file
, problem_data
->dense_invalidated_by_call
);
1303 for (regno
= 0; regno
< m
; regno
++)
1304 if (DF_REG_DEF_GET (df
, regno
)->n_refs
)
1305 fprintf (file
, "%d[%d,%d] ", regno
,
1306 DF_REG_DEF_GET (df
, regno
)->begin
,
1307 DF_REG_DEF_GET (df
, regno
)->n_refs
);
1308 fprintf (file
, "\n");
1312 struct df_rd_bb_info
*bb_info
= df_rd_get_bb_info (dflow
, bb
->index
);
1313 df_print_bb_index (bb
, file
);
1318 fprintf (file
, " in \t(%d)\n", (int) bitmap_count_bits (bb_info
->in
));
1319 dump_bitmap (file
, bb_info
->in
);
1320 fprintf (file
, " gen \t(%d)\n", (int) bitmap_count_bits (bb_info
->gen
));
1321 dump_bitmap (file
, bb_info
->gen
);
1322 fprintf (file
, " kill\t(%d)\n", (int) bitmap_count_bits (bb_info
->kill
));
1323 dump_bitmap (file
, bb_info
->kill
);
1324 fprintf (file
, " out \t(%d)\n", (int) bitmap_count_bits (bb_info
->out
));
1325 dump_bitmap (file
, bb_info
->out
);
1329 /* All of the information associated with every instance of the problem. */
1331 static struct df_problem problem_RD
=
1333 DF_RD
, /* Problem id. */
1334 DF_FORWARD
, /* Direction. */
1335 df_rd_alloc
, /* Allocate the problem specific data. */
1336 NULL
, /* Reset global information. */
1337 df_rd_free_bb_info
, /* Free basic block info. */
1338 df_rd_local_compute
, /* Local compute function. */
1339 df_rd_init_solution
, /* Init the solution specific data. */
1340 df_iterative_dataflow
, /* Iterative solver. */
1341 NULL
, /* Confluence operator 0. */
1342 df_rd_confluence_n
, /* Confluence operator n. */
1343 df_rd_transfer_function
, /* Transfer function. */
1344 NULL
, /* Finalize function. */
1345 df_rd_free
, /* Free all of the problem information. */
1346 df_rd_dump
, /* Debugging. */
1347 NULL
, /* Dependent problem. */
1348 0 /* Changeable flags. */
1353 /* Create a new DATAFLOW instance and add it to an existing instance
1354 of DF. The returned structure is what is used to get at the
1358 df_rd_add_problem (struct df
*df
, int flags
)
1360 return df_add_problem (df
, &problem_RD
, flags
);
1365 /*----------------------------------------------------------------------------
1368 Find the locations in the function where any use of a pseudo can
1369 reach in the backwards direction. In and out bitvectors are built
1370 for each basic block. The regnum is used to index into these sets.
1371 See df.h for details.
1372 ----------------------------------------------------------------------------*/
1374 /* Get basic block info. */
1376 struct df_lr_bb_info
*
1377 df_lr_get_bb_info (struct dataflow
*dflow
, unsigned int index
)
1379 return (struct df_lr_bb_info
*) dflow
->block_info
[index
];
1383 /* Set basic block info. */
1386 df_lr_set_bb_info (struct dataflow
*dflow
, unsigned int index
,
1387 struct df_lr_bb_info
*bb_info
)
1389 dflow
->block_info
[index
] = bb_info
;
1393 /* Free basic block info. */
1396 df_lr_free_bb_info (struct dataflow
*dflow
,
1397 basic_block bb ATTRIBUTE_UNUSED
,
1400 struct df_lr_bb_info
*bb_info
= (struct df_lr_bb_info
*) vbb_info
;
1403 BITMAP_FREE (bb_info
->use
);
1404 BITMAP_FREE (bb_info
->def
);
1405 BITMAP_FREE (bb_info
->in
);
1406 BITMAP_FREE (bb_info
->out
);
1407 pool_free (dflow
->block_pool
, bb_info
);
1412 /* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
1413 not touched unless the block is new. */
1416 df_lr_alloc (struct dataflow
*dflow
, bitmap blocks_to_rescan
,
1417 bitmap all_blocks ATTRIBUTE_UNUSED
)
1419 unsigned int bb_index
;
1422 if (!dflow
->block_pool
)
1423 dflow
->block_pool
= create_alloc_pool ("df_lr_block pool",
1424 sizeof (struct df_lr_bb_info
), 50);
1426 df_grow_bb_info (dflow
);
1428 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan
, 0, bb_index
, bi
)
1430 struct df_lr_bb_info
*bb_info
= df_lr_get_bb_info (dflow
, bb_index
);
1433 bitmap_clear (bb_info
->def
);
1434 bitmap_clear (bb_info
->use
);
1438 bb_info
= (struct df_lr_bb_info
*) pool_alloc (dflow
->block_pool
);
1439 df_lr_set_bb_info (dflow
, bb_index
, bb_info
);
1440 bb_info
->use
= BITMAP_ALLOC (NULL
);
1441 bb_info
->def
= BITMAP_ALLOC (NULL
);
1442 bb_info
->in
= BITMAP_ALLOC (NULL
);
1443 bb_info
->out
= BITMAP_ALLOC (NULL
);
1449 /* Compute local live register info for basic block BB. */
1452 df_lr_bb_local_compute (struct dataflow
*dflow
,
1453 struct df
*df
, unsigned int bb_index
)
1455 basic_block bb
= BASIC_BLOCK (bb_index
);
1456 struct df_lr_bb_info
*bb_info
= df_lr_get_bb_info (dflow
, bb_index
);
1461 /* Process the registers set in an exception handler. */
1462 for (def
= df_get_artificial_defs (df
, bb_index
); def
; def
= def
->next_ref
)
1463 if (((DF_REF_FLAGS (def
) & DF_REF_AT_TOP
) == 0)
1464 && (!(DF_REF_FLAGS (def
) & DF_REF_PARTIAL
)))
1466 unsigned int dregno
= DF_REF_REGNO (def
);
1467 bitmap_set_bit (bb_info
->def
, dregno
);
1468 bitmap_clear_bit (bb_info
->use
, dregno
);
1471 /* Process the hardware registers that are always live. */
1472 for (use
= df_get_artificial_uses (df
, bb_index
); use
; use
= use
->next_ref
)
1473 /* Add use to set of uses in this BB. */
1474 if ((DF_REF_FLAGS (use
) & DF_REF_AT_TOP
) == 0)
1475 bitmap_set_bit (bb_info
->use
, DF_REF_REGNO (use
));
1477 FOR_BB_INSNS_REVERSE (bb
, insn
)
1479 unsigned int uid
= INSN_UID (insn
);
1486 for (def
= DF_INSN_UID_DEFS (df
, uid
); def
; def
= def
->next_ref
)
1488 unsigned int dregno
= DF_REF_REGNO (def
);
1490 if (dregno
>= FIRST_PSEUDO_REGISTER
1491 || !(SIBLING_CALL_P (insn
)
1492 && bitmap_bit_p (df
->exit_block_uses
, dregno
)
1493 && !refers_to_regno_p (dregno
, dregno
+1,
1494 current_function_return_rtx
,
1497 /* If the def is to only part of the reg, it does
1498 not kill the other defs that reach here. */
1499 if (!(DF_REF_FLAGS (def
) & DF_REF_PARTIAL
))
1501 bitmap_set_bit (bb_info
->def
, dregno
);
1502 bitmap_clear_bit (bb_info
->use
, dregno
);
1509 for (def
= DF_INSN_UID_DEFS (df
, uid
); def
; def
= def
->next_ref
)
1511 unsigned int dregno
= DF_REF_REGNO (def
);
1513 if (DF_INSN_CONTAINS_ASM (df
, insn
)
1514 && dregno
< FIRST_PSEUDO_REGISTER
)
1517 unsigned int end
= dregno
1518 + hard_regno_nregs
[dregno
][GET_MODE (DF_REF_REG (def
))] - 1;
1519 for (i
= dregno
; i
<= end
; ++i
)
1520 regs_asm_clobbered
[i
] = 1;
1522 /* If the def is to only part of the reg, it does
1523 not kill the other defs that reach here. */
1524 if (!(DF_REF_FLAGS (def
) & DF_REF_PARTIAL
))
1526 bitmap_set_bit (bb_info
->def
, dregno
);
1527 bitmap_clear_bit (bb_info
->use
, dregno
);
1532 for (use
= DF_INSN_UID_USES (df
, uid
); use
; use
= use
->next_ref
)
1533 /* Add use to set of uses in this BB. */
1534 bitmap_set_bit (bb_info
->use
, DF_REF_REGNO (use
));
1537 /* Process the registers set in an exception handler. */
1538 for (def
= df_get_artificial_defs (df
, bb_index
); def
; def
= def
->next_ref
)
1539 if ((DF_REF_FLAGS (def
) & DF_REF_AT_TOP
)
1540 && (!(DF_REF_FLAGS (def
) & DF_REF_PARTIAL
)))
1542 unsigned int dregno
= DF_REF_REGNO (def
);
1543 bitmap_set_bit (bb_info
->def
, dregno
);
1544 bitmap_clear_bit (bb_info
->use
, dregno
);
1548 /* Process the uses that are live into an exception handler. */
1549 for (use
= df_get_artificial_uses (df
, bb_index
); use
; use
= use
->next_ref
)
1550 /* Add use to set of uses in this BB. */
1551 if (DF_REF_FLAGS (use
) & DF_REF_AT_TOP
)
1552 bitmap_set_bit (bb_info
->use
, DF_REF_REGNO (use
));
1557 /* Compute local live register info for each basic block within BLOCKS. */
1560 df_lr_local_compute (struct dataflow
*dflow
,
1562 bitmap rescan_blocks
)
1564 struct df
*df
= dflow
->df
;
1565 unsigned int bb_index
;
1568 /* Assume that the stack pointer is unchanging if alloca hasn't
1570 if (bitmap_equal_p (all_blocks
, rescan_blocks
))
1571 memset (regs_asm_clobbered
, 0, sizeof (regs_asm_clobbered
));
1573 bitmap_clear (df
->hardware_regs_used
);
1575 /* The all-important stack pointer must always be live. */
1576 bitmap_set_bit (df
->hardware_regs_used
, STACK_POINTER_REGNUM
);
1578 /* Before reload, there are a few registers that must be forced
1579 live everywhere -- which might not already be the case for
1580 blocks within infinite loops. */
1581 if (!reload_completed
)
1583 /* Any reference to any pseudo before reload is a potential
1584 reference of the frame pointer. */
1585 bitmap_set_bit (df
->hardware_regs_used
, FRAME_POINTER_REGNUM
);
1587 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1588 /* Pseudos with argument area equivalences may require
1589 reloading via the argument pointer. */
1590 if (fixed_regs
[ARG_POINTER_REGNUM
])
1591 bitmap_set_bit (df
->hardware_regs_used
, ARG_POINTER_REGNUM
);
1594 /* Any constant, or pseudo with constant equivalences, may
1595 require reloading from memory using the pic register. */
1596 if ((unsigned) PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
1597 && fixed_regs
[PIC_OFFSET_TABLE_REGNUM
])
1598 bitmap_set_bit (df
->hardware_regs_used
, PIC_OFFSET_TABLE_REGNUM
);
1601 if (bitmap_bit_p (rescan_blocks
, EXIT_BLOCK
))
1603 /* The exit block is special for this problem and its bits are
1604 computed from thin air. */
1605 struct df_lr_bb_info
*bb_info
= df_lr_get_bb_info (dflow
, EXIT_BLOCK
);
1606 bitmap_copy (bb_info
->use
, df
->exit_block_uses
);
1609 EXECUTE_IF_SET_IN_BITMAP (rescan_blocks
, 0, bb_index
, bi
)
1611 if (bb_index
== EXIT_BLOCK
)
1613 df_lr_bb_local_compute (dflow
, df
, bb_index
);
1618 /* Initialize the solution vectors. */
1621 df_lr_init (struct dataflow
*dflow
, bitmap all_blocks
)
1623 unsigned int bb_index
;
1626 EXECUTE_IF_SET_IN_BITMAP (all_blocks
, 0, bb_index
, bi
)
1628 struct df_lr_bb_info
*bb_info
= df_lr_get_bb_info (dflow
, bb_index
);
1629 bitmap_copy (bb_info
->in
, bb_info
->use
);
1630 bitmap_clear (bb_info
->out
);
1635 /* Confluence function that processes infinite loops. This might be a
1636 noreturn function that throws. And even if it isn't, getting the
1637 unwind info right helps debugging. */
1639 df_lr_confluence_0 (struct dataflow
*dflow
, basic_block bb
)
1641 struct df
*df
= dflow
->df
;
1643 bitmap op1
= df_lr_get_bb_info (dflow
, bb
->index
)->out
;
1644 if (bb
!= EXIT_BLOCK_PTR
)
1645 bitmap_copy (op1
, df
->hardware_regs_used
);
1649 /* Confluence function that ignores fake edges. */
1652 df_lr_confluence_n (struct dataflow
*dflow
, edge e
)
1654 bitmap op1
= df_lr_get_bb_info (dflow
, e
->src
->index
)->out
;
1655 bitmap op2
= df_lr_get_bb_info (dflow
, e
->dest
->index
)->in
;
1657 /* Call-clobbered registers die across exception and call edges. */
1658 /* ??? Abnormal call edges ignored for the moment, as this gets
1659 confused by sibling call edges, which crashes reg-stack. */
1660 if (e
->flags
& EDGE_EH
)
1661 bitmap_ior_and_compl_into (op1
, op2
, df_invalidated_by_call
);
1663 bitmap_ior_into (op1
, op2
);
1665 bitmap_ior_into (op1
, dflow
->df
->hardware_regs_used
);
1669 /* Transfer function. */
1672 df_lr_transfer_function (struct dataflow
*dflow
, int bb_index
)
1674 struct df_lr_bb_info
*bb_info
= df_lr_get_bb_info (dflow
, bb_index
);
1675 bitmap in
= bb_info
->in
;
1676 bitmap out
= bb_info
->out
;
1677 bitmap use
= bb_info
->use
;
1678 bitmap def
= bb_info
->def
;
1680 return bitmap_ior_and_compl (in
, use
, out
, def
);
1684 /* Free all storage associated with the problem. */
1687 df_lr_free (struct dataflow
*dflow
)
1689 if (dflow
->block_info
)
1692 for (i
= 0; i
< dflow
->block_info_size
; i
++)
1694 struct df_lr_bb_info
*bb_info
= df_lr_get_bb_info (dflow
, i
);
1697 BITMAP_FREE (bb_info
->use
);
1698 BITMAP_FREE (bb_info
->def
);
1699 BITMAP_FREE (bb_info
->in
);
1700 BITMAP_FREE (bb_info
->out
);
1703 free_alloc_pool (dflow
->block_pool
);
1705 dflow
->block_info_size
= 0;
1706 free (dflow
->block_info
);
1709 free (dflow
->problem_data
);
1714 /* Debugging info. */
1717 df_lr_dump (struct dataflow
*dflow
, FILE *file
)
1721 if (!dflow
->block_info
)
1724 fprintf (file
, "Live Registers:\n");
1727 struct df_lr_bb_info
*bb_info
= df_lr_get_bb_info (dflow
, bb
->index
);
1728 df_print_bb_index (bb
, file
);
1733 fprintf (file
, " in \t");
1734 dump_bitmap (file
, bb_info
->in
);
1735 fprintf (file
, " use \t");
1736 dump_bitmap (file
, bb_info
->use
);
1737 fprintf (file
, " def \t");
1738 dump_bitmap (file
, bb_info
->def
);
1739 fprintf (file
, " out \t");
1740 dump_bitmap (file
, bb_info
->out
);
1744 /* All of the information associated with every instance of the problem. */
1746 static struct df_problem problem_LR
=
1748 DF_LR
, /* Problem id. */
1749 DF_BACKWARD
, /* Direction. */
1750 df_lr_alloc
, /* Allocate the problem specific data. */
1751 NULL
, /* Reset global information. */
1752 df_lr_free_bb_info
, /* Free basic block info. */
1753 df_lr_local_compute
, /* Local compute function. */
1754 df_lr_init
, /* Init the solution specific data. */
1755 df_iterative_dataflow
, /* Iterative solver. */
1756 df_lr_confluence_0
, /* Confluence operator 0. */
1757 df_lr_confluence_n
, /* Confluence operator n. */
1758 df_lr_transfer_function
, /* Transfer function. */
1759 NULL
, /* Finalize function. */
1760 df_lr_free
, /* Free all of the problem information. */
1761 df_lr_dump
, /* Debugging. */
1762 NULL
, /* Dependent problem. */
1767 /* Create a new DATAFLOW instance and add it to an existing instance
1768 of DF. The returned structure is what is used to get at the
1772 df_lr_add_problem (struct df
*df
, int flags
)
1774 return df_add_problem (df
, &problem_LR
, flags
);
1779 /*----------------------------------------------------------------------------
1780 UNINITIALIZED REGISTERS
1782 Find the set of uses for registers that are reachable from the entry
1783 block without passing thru a definition. In and out bitvectors are built
1784 for each basic block. The regnum is used to index into these sets.
1785 See df.h for details.
1786 ----------------------------------------------------------------------------*/
1788 /* Get basic block info. */
1790 struct df_ur_bb_info
*
1791 df_ur_get_bb_info (struct dataflow
*dflow
, unsigned int index
)
1793 return (struct df_ur_bb_info
*) dflow
->block_info
[index
];
1797 /* Set basic block info. */
1800 df_ur_set_bb_info (struct dataflow
*dflow
, unsigned int index
,
1801 struct df_ur_bb_info
*bb_info
)
1803 dflow
->block_info
[index
] = bb_info
;
1807 /* Free basic block info. */
1810 df_ur_free_bb_info (struct dataflow
*dflow
,
1811 basic_block bb ATTRIBUTE_UNUSED
,
1814 struct df_ur_bb_info
*bb_info
= (struct df_ur_bb_info
*) vbb_info
;
1817 BITMAP_FREE (bb_info
->gen
);
1818 BITMAP_FREE (bb_info
->kill
);
1819 BITMAP_FREE (bb_info
->in
);
1820 BITMAP_FREE (bb_info
->out
);
1821 pool_free (dflow
->block_pool
, bb_info
);
1826 /* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
1827 not touched unless the block is new. */
1830 df_ur_alloc (struct dataflow
*dflow
, bitmap blocks_to_rescan
,
1831 bitmap all_blocks ATTRIBUTE_UNUSED
)
1833 unsigned int bb_index
;
1836 if (!dflow
->block_pool
)
1837 dflow
->block_pool
= create_alloc_pool ("df_ur_block pool",
1838 sizeof (struct df_ur_bb_info
), 100);
1840 df_grow_bb_info (dflow
);
1842 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan
, 0, bb_index
, bi
)
1844 struct df_ur_bb_info
*bb_info
= df_ur_get_bb_info (dflow
, bb_index
);
1847 bitmap_clear (bb_info
->kill
);
1848 bitmap_clear (bb_info
->gen
);
1852 bb_info
= (struct df_ur_bb_info
*) pool_alloc (dflow
->block_pool
);
1853 df_ur_set_bb_info (dflow
, bb_index
, bb_info
);
1854 bb_info
->kill
= BITMAP_ALLOC (NULL
);
1855 bb_info
->gen
= BITMAP_ALLOC (NULL
);
1856 bb_info
->in
= BITMAP_ALLOC (NULL
);
1857 bb_info
->out
= BITMAP_ALLOC (NULL
);
1863 /* Compute local uninitialized register info for basic block BB. */
1866 df_ur_bb_local_compute (struct dataflow
*dflow
, unsigned int bb_index
)
1868 struct df
*df
= dflow
->df
;
1869 basic_block bb
= BASIC_BLOCK (bb_index
);
1870 struct df_ur_bb_info
*bb_info
= df_ur_get_bb_info (dflow
, bb_index
);
1874 bitmap_clear (seen_in_block
);
1875 bitmap_clear (seen_in_insn
);
1877 for (def
= df_get_artificial_defs (df
, bb_index
); def
; def
= def
->next_ref
)
1878 if ((DF_REF_FLAGS (def
) & DF_REF_AT_TOP
) == 0)
1880 unsigned int regno
= DF_REF_REGNO (def
);
1881 if (!bitmap_bit_p (seen_in_block
, regno
))
1883 bitmap_set_bit (seen_in_block
, regno
);
1884 bitmap_set_bit (bb_info
->gen
, regno
);
1888 FOR_BB_INSNS_REVERSE (bb
, insn
)
1890 unsigned int uid
= INSN_UID (insn
);
1894 for (def
= DF_INSN_UID_DEFS (df
, uid
); def
; def
= def
->next_ref
)
1896 unsigned int regno
= DF_REF_REGNO (def
);
1897 /* Only the last def counts. */
1898 if (!bitmap_bit_p (seen_in_block
, regno
))
1900 bitmap_set_bit (seen_in_insn
, regno
);
1902 if (DF_REF_FLAGS (def
)
1903 & (DF_REF_MUST_CLOBBER
| DF_REF_MAY_CLOBBER
))
1905 /* Only must clobbers for the entire reg destroy the
1907 if ((DF_REF_FLAGS (def
) & DF_REF_MUST_CLOBBER
)
1908 && (!DF_REF_FLAGS (def
) & DF_REF_PARTIAL
))
1909 bitmap_set_bit (bb_info
->kill
, regno
);
1912 bitmap_set_bit (bb_info
->gen
, regno
);
1915 bitmap_ior_into (seen_in_block
, seen_in_insn
);
1916 bitmap_clear (seen_in_insn
);
1919 for (def
= df_get_artificial_defs (df
, bb_index
); def
; def
= def
->next_ref
)
1920 if (DF_REF_FLAGS (def
) & DF_REF_AT_TOP
)
1922 unsigned int regno
= DF_REF_REGNO (def
);
1923 if (!bitmap_bit_p (seen_in_block
, regno
))
1925 bitmap_set_bit (seen_in_block
, regno
);
1926 bitmap_set_bit (bb_info
->gen
, regno
);
1932 /* Compute local uninitialized register info. */
1935 df_ur_local_compute (struct dataflow
*dflow
,
1936 bitmap all_blocks ATTRIBUTE_UNUSED
,
1937 bitmap rescan_blocks
)
1939 unsigned int bb_index
;
1944 EXECUTE_IF_SET_IN_BITMAP (rescan_blocks
, 0, bb_index
, bi
)
1946 df_ur_bb_local_compute (dflow
, bb_index
);
1953 /* Initialize the solution vectors. */
1956 df_ur_init (struct dataflow
*dflow
, bitmap all_blocks
)
1958 unsigned int bb_index
;
1961 EXECUTE_IF_SET_IN_BITMAP (all_blocks
, 0, bb_index
, bi
)
1963 struct df_ur_bb_info
*bb_info
= df_ur_get_bb_info (dflow
, bb_index
);
1965 bitmap_copy (bb_info
->out
, bb_info
->gen
);
1966 bitmap_clear (bb_info
->in
);
1971 /* Or in the stack regs, hard regs and early clobber regs into the the
1972 ur_in sets of all of the blocks. */
1975 df_ur_local_finalize (struct dataflow
*dflow
, bitmap all_blocks
)
1977 struct df
*df
= dflow
->df
;
1978 struct dataflow
*lr_dflow
= df
->problems_by_index
[DF_LR
];
1979 bitmap tmp
= BITMAP_ALLOC (NULL
);
1981 unsigned int bb_index
;
1983 EXECUTE_IF_SET_IN_BITMAP (all_blocks
, 0, bb_index
, bi
)
1985 struct df_ur_bb_info
*bb_info
= df_ur_get_bb_info (dflow
, bb_index
);
1986 struct df_lr_bb_info
*bb_lr_info
= df_lr_get_bb_info (lr_dflow
, bb_index
);
1988 /* No register may reach a location where it is not used. Thus
1989 we trim the rr result to the places where it is used. */
1990 bitmap_and_into (bb_info
->in
, bb_lr_info
->in
);
1991 bitmap_and_into (bb_info
->out
, bb_lr_info
->out
);
1994 /* Hard registers may still stick in the ur_out set, but not
1995 be in the ur_in set, if their only mention was in a call
1996 in this block. This is because a call kills in the lr
1997 problem but does not kill in the ur problem. To clean
1998 this up, we execute the transfer function on the lr_in
1999 set and then use that to knock bits out of ur_out. */
2000 bitmap_ior_and_compl (tmp
, bb_info
->gen
, bb_lr_info
->in
,
2002 bitmap_and_into (bb_info
->out
, tmp
);
2010 /* Confluence function that ignores fake edges. */
2013 df_ur_confluence_n (struct dataflow
*dflow
, edge e
)
2015 bitmap op1
= df_ur_get_bb_info (dflow
, e
->dest
->index
)->in
;
2016 bitmap op2
= df_ur_get_bb_info (dflow
, e
->src
->index
)->out
;
2018 if (e
->flags
& EDGE_FAKE
)
2021 bitmap_ior_into (op1
, op2
);
2025 /* Transfer function. */
2028 df_ur_transfer_function (struct dataflow
*dflow
, int bb_index
)
2030 struct df_ur_bb_info
*bb_info
= df_ur_get_bb_info (dflow
, bb_index
);
2031 bitmap in
= bb_info
->in
;
2032 bitmap out
= bb_info
->out
;
2033 bitmap gen
= bb_info
->gen
;
2034 bitmap kill
= bb_info
->kill
;
2036 return bitmap_ior_and_compl (out
, gen
, in
, kill
);
2040 /* Free all storage associated with the problem. */
2043 df_ur_free (struct dataflow
*dflow
)
2045 if (dflow
->block_info
)
2049 for (i
= 0; i
< dflow
->block_info_size
; i
++)
2051 struct df_ur_bb_info
*bb_info
= df_ur_get_bb_info (dflow
, i
);
2054 BITMAP_FREE (bb_info
->gen
);
2055 BITMAP_FREE (bb_info
->kill
);
2056 BITMAP_FREE (bb_info
->in
);
2057 BITMAP_FREE (bb_info
->out
);
2061 free_alloc_pool (dflow
->block_pool
);
2062 dflow
->block_info_size
= 0;
2063 free (dflow
->block_info
);
2069 /* Debugging info. */
2072 df_ur_dump (struct dataflow
*dflow
, FILE *file
)
2076 if (!dflow
->block_info
)
2079 fprintf (file
, "Undefined regs:\n");
2083 struct df_ur_bb_info
*bb_info
= df_ur_get_bb_info (dflow
, bb
->index
);
2084 df_print_bb_index (bb
, file
);
2089 fprintf (file
, " in \t");
2090 dump_bitmap (file
, bb_info
->in
);
2091 fprintf (file
, " gen \t");
2092 dump_bitmap (file
, bb_info
->gen
);
2093 fprintf (file
, " kill\t");
2094 dump_bitmap (file
, bb_info
->kill
);
2095 fprintf (file
, " out \t");
2096 dump_bitmap (file
, bb_info
->out
);
2100 /* All of the information associated with every instance of the problem. */
2102 static struct df_problem problem_UR
=
2104 DF_UR
, /* Problem id. */
2105 DF_FORWARD
, /* Direction. */
2106 df_ur_alloc
, /* Allocate the problem specific data. */
2107 NULL
, /* Reset global information. */
2108 df_ur_free_bb_info
, /* Free basic block info. */
2109 df_ur_local_compute
, /* Local compute function. */
2110 df_ur_init
, /* Init the solution specific data. */
2111 df_iterative_dataflow
, /* Iterative solver. */
2112 NULL
, /* Confluence operator 0. */
2113 df_ur_confluence_n
, /* Confluence operator n. */
2114 df_ur_transfer_function
, /* Transfer function. */
2115 df_ur_local_finalize
, /* Finalize function. */
2116 df_ur_free
, /* Free all of the problem information. */
2117 df_ur_dump
, /* Debugging. */
2118 df_lr_add_problem
, /* Dependent problem. */
2119 0 /* Changeable flags. */
2123 /* Create a new DATAFLOW instance and add it to an existing instance
2124 of DF. The returned structure is what is used to get at the
2128 df_ur_add_problem (struct df
*df
, int flags
)
2130 return df_add_problem (df
, &problem_UR
, flags
);
2135 /*----------------------------------------------------------------------------
2136 UNINITIALIZED REGISTERS WITH EARLYCLOBBER
2138 Find the set of uses for registers that are reachable from the entry
2139 block without passing thru a definition. In and out bitvectors are built
2140 for each basic block. The regnum is used to index into these sets.
2141 See df.h for details.
2143 This is a variant of the UR problem above that has a lot of special
2144 features just for the register allocation phase. This problem
2145 should go a away if someone would fix the interference graph.
2147 ----------------------------------------------------------------------------*/
2149 /* Private data used to compute the solution for this problem. These
2150 data structures are not accessible outside of this module. */
2151 struct df_urec_problem_data
2153 bool earlyclobbers_found
; /* True if any instruction contains an
2156 bitmap stack_regs
; /* Registers that may be allocated to a STACK_REGS. */
2161 /* Get basic block info. */
2163 struct df_urec_bb_info
*
2164 df_urec_get_bb_info (struct dataflow
*dflow
, unsigned int index
)
2166 return (struct df_urec_bb_info
*) dflow
->block_info
[index
];
2170 /* Set basic block info. */
2173 df_urec_set_bb_info (struct dataflow
*dflow
, unsigned int index
,
2174 struct df_urec_bb_info
*bb_info
)
2176 dflow
->block_info
[index
] = bb_info
;
2180 /* Free basic block info. */
2183 df_urec_free_bb_info (struct dataflow
*dflow
,
2184 basic_block bb ATTRIBUTE_UNUSED
,
2187 struct df_urec_bb_info
*bb_info
= (struct df_urec_bb_info
*) vbb_info
;
2190 BITMAP_FREE (bb_info
->gen
);
2191 BITMAP_FREE (bb_info
->kill
);
2192 BITMAP_FREE (bb_info
->in
);
2193 BITMAP_FREE (bb_info
->out
);
2194 BITMAP_FREE (bb_info
->earlyclobber
);
2195 pool_free (dflow
->block_pool
, bb_info
);
2200 /* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
2201 not touched unless the block is new. */
2204 df_urec_alloc (struct dataflow
*dflow
, bitmap blocks_to_rescan
,
2205 bitmap all_blocks ATTRIBUTE_UNUSED
)
2208 unsigned int bb_index
;
2210 struct df_urec_problem_data
*problem_data
2211 = (struct df_urec_problem_data
*) dflow
->problem_data
;
2213 if (!dflow
->block_pool
)
2214 dflow
->block_pool
= create_alloc_pool ("df_urec_block pool",
2215 sizeof (struct df_urec_bb_info
), 50);
2217 if (!dflow
->problem_data
)
2219 problem_data
= XNEW (struct df_urec_problem_data
);
2220 dflow
->problem_data
= problem_data
;
2222 problem_data
->earlyclobbers_found
= false;
2224 df_grow_bb_info (dflow
);
2226 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan
, 0, bb_index
, bi
)
2228 struct df_urec_bb_info
*bb_info
= df_urec_get_bb_info (dflow
, bb_index
);
2231 bitmap_clear (bb_info
->kill
);
2232 bitmap_clear (bb_info
->gen
);
2233 bitmap_clear (bb_info
->earlyclobber
);
2237 bb_info
= (struct df_urec_bb_info
*) pool_alloc (dflow
->block_pool
);
2238 df_urec_set_bb_info (dflow
, bb_index
, bb_info
);
2239 bb_info
->kill
= BITMAP_ALLOC (NULL
);
2240 bb_info
->gen
= BITMAP_ALLOC (NULL
);
2241 bb_info
->in
= BITMAP_ALLOC (NULL
);
2242 bb_info
->out
= BITMAP_ALLOC (NULL
);
2243 bb_info
->earlyclobber
= BITMAP_ALLOC (NULL
);
2249 /* The function modifies local info for register REG being changed in
2250 SETTER. DATA is used to pass the current basic block info. */
2253 df_urec_mark_reg_change (rtx reg
, rtx setter
, void *data
)
2258 struct df_urec_bb_info
*bb_info
= (struct df_urec_bb_info
*) data
;
2260 if (GET_CODE (reg
) == SUBREG
)
2261 reg
= SUBREG_REG (reg
);
2267 endregno
= regno
= REGNO (reg
);
2268 if (regno
< FIRST_PSEUDO_REGISTER
)
2270 endregno
+=hard_regno_nregs
[regno
][GET_MODE (reg
)];
2272 for (i
= regno
; i
< endregno
; i
++)
2274 bitmap_set_bit (bb_info
->kill
, i
);
2276 if (GET_CODE (setter
) != CLOBBER
)
2277 bitmap_set_bit (bb_info
->gen
, i
);
2279 bitmap_clear_bit (bb_info
->gen
, i
);
2284 bitmap_set_bit (bb_info
->kill
, regno
);
2286 if (GET_CODE (setter
) != CLOBBER
)
2287 bitmap_set_bit (bb_info
->gen
, regno
);
2289 bitmap_clear_bit (bb_info
->gen
, regno
);
2292 /* Classes of registers which could be early clobbered in the current
2295 static VEC(int,heap
) *earlyclobber_regclass
;
2297 /* This function finds and stores register classes that could be early
2298 clobbered in INSN. If any earlyclobber classes are found, the function
2299 returns TRUE, in all other cases it returns FALSE. */
2302 df_urec_check_earlyclobber (rtx insn
)
2307 extract_insn (insn
);
2309 VEC_truncate (int, earlyclobber_regclass
, 0);
2310 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2315 enum reg_class
class;
2316 const char *p
= recog_data
.constraints
[opno
];
2325 case '=': case '+': case '?':
2328 case 'm': case '<': case '>': case 'V': case 'o':
2329 case 'E': case 'F': case 'G': case 'H':
2330 case 's': case 'i': case 'n':
2331 case 'I': case 'J': case 'K': case 'L':
2332 case 'M': case 'N': case 'O': case 'P':
2334 case '0': case '1': case '2': case '3': case '4':
2335 case '5': case '6': case '7': case '8': case '9':
2336 /* These don't say anything we care about. */
2344 if (amp_p
&& class != NO_REGS
)
2350 VEC_iterate (int, earlyclobber_regclass
, i
, rc
);
2353 if (rc
== (int) class)
2357 /* We use VEC_quick_push here because
2358 earlyclobber_regclass holds no more than
2359 N_REG_CLASSES elements. */
2360 VEC_quick_push (int, earlyclobber_regclass
, (int) class);
2370 class = GENERAL_REGS
;
2374 class = REG_CLASS_FROM_CONSTRAINT (c
, p
);
2379 p
+= CONSTRAINT_LEN (c
, p
);
2386 /* The function checks that pseudo-register *X has a class
2387 intersecting with the class of pseudo-register could be early
2388 clobbered in the same insn.
2390 This function is a no-op if earlyclobber_regclass is empty.
2392 Reload can assign the same hard register to uninitialized
2393 pseudo-register and early clobbered pseudo-register in an insn if
2394 the pseudo-register is used first time in given BB and not lived at
2395 the BB start. To prevent this we don't change life information for
2396 such pseudo-registers. */
2399 df_urec_mark_reg_use_for_earlyclobber (rtx
*x
, void *data
)
2401 enum reg_class pref_class
, alt_class
;
2403 struct df_urec_bb_info
*bb_info
= (struct df_urec_bb_info
*) data
;
2405 if (REG_P (*x
) && REGNO (*x
) >= FIRST_PSEUDO_REGISTER
)
2410 if (bitmap_bit_p (bb_info
->kill
, regno
)
2411 || bitmap_bit_p (bb_info
->gen
, regno
))
2413 pref_class
= reg_preferred_class (regno
);
2414 alt_class
= reg_alternate_class (regno
);
2415 for (i
= 0; VEC_iterate (int, earlyclobber_regclass
, i
, rc
); i
++)
2417 if (reg_classes_intersect_p (rc
, pref_class
)
2419 && reg_classes_intersect_p (rc
, alt_class
)))
2421 bitmap_set_bit (bb_info
->earlyclobber
, regno
);
2429 /* The function processes all pseudo-registers in *X with the aid of
2430 previous function. */
2433 df_urec_mark_reg_use_for_earlyclobber_1 (rtx
*x
, void *data
)
2435 for_each_rtx (x
, df_urec_mark_reg_use_for_earlyclobber
, data
);
2439 /* Compute local uninitialized register info for basic block BB. */
2442 df_urec_bb_local_compute (struct dataflow
*dflow
, unsigned int bb_index
)
2444 struct df
*df
= dflow
->df
;
2445 basic_block bb
= BASIC_BLOCK (bb_index
);
2446 struct df_urec_bb_info
*bb_info
= df_urec_get_bb_info (dflow
, bb_index
);
2450 for (def
= df_get_artificial_defs (df
, bb_index
); def
; def
= def
->next_ref
)
2451 if (DF_REF_FLAGS (def
) & DF_REF_AT_TOP
)
2453 unsigned int regno
= DF_REF_REGNO (def
);
2454 bitmap_set_bit (bb_info
->gen
, regno
);
2457 FOR_BB_INSNS (bb
, insn
)
2461 note_stores (PATTERN (insn
), df_urec_mark_reg_change
, bb_info
);
2462 if (df_urec_check_earlyclobber (insn
))
2464 struct df_urec_problem_data
*problem_data
2465 = (struct df_urec_problem_data
*) dflow
->problem_data
;
2466 problem_data
->earlyclobbers_found
= true;
2467 note_uses (&PATTERN (insn
),
2468 df_urec_mark_reg_use_for_earlyclobber_1
, bb_info
);
2473 for (def
= df_get_artificial_defs (df
, bb_index
); def
; def
= def
->next_ref
)
2474 if ((DF_REF_FLAGS (def
) & DF_REF_AT_TOP
) == 0)
2476 unsigned int regno
= DF_REF_REGNO (def
);
2477 bitmap_set_bit (bb_info
->gen
, regno
);
2483 /* Compute local uninitialized register info. */
2486 df_urec_local_compute (struct dataflow
*dflow
,
2487 bitmap all_blocks ATTRIBUTE_UNUSED
,
2488 bitmap rescan_blocks
)
2490 unsigned int bb_index
;
2494 HARD_REG_SET zero
, stack_hard_regs
, used
;
2495 struct df_urec_problem_data
*problem_data
2496 = (struct df_urec_problem_data
*) dflow
->problem_data
;
2498 /* Any register that MAY be allocated to a register stack (like the
2499 387) is treated poorly. Each such register is marked as being
2500 live everywhere. This keeps the register allocator and the
2501 subsequent passes from doing anything useful with these values.
2503 FIXME: This seems like an incredibly poor idea. */
2505 CLEAR_HARD_REG_SET (zero
);
2506 CLEAR_HARD_REG_SET (stack_hard_regs
);
2507 for (i
= FIRST_STACK_REG
; i
<= LAST_STACK_REG
; i
++)
2508 SET_HARD_REG_BIT (stack_hard_regs
, i
);
2509 problem_data
->stack_regs
= BITMAP_ALLOC (NULL
);
2510 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
2512 COPY_HARD_REG_SET (used
, reg_class_contents
[reg_preferred_class (i
)]);
2513 IOR_HARD_REG_SET (used
, reg_class_contents
[reg_alternate_class (i
)]);
2514 AND_HARD_REG_SET (used
, stack_hard_regs
);
2515 GO_IF_HARD_REG_EQUAL (used
, zero
, skip
);
2516 bitmap_set_bit (problem_data
->stack_regs
, i
);
2522 /* We know that earlyclobber_regclass holds no more than
2523 N_REG_CLASSES elements. See df_urec_check_earlyclobber. */
2524 earlyclobber_regclass
= VEC_alloc (int, heap
, N_REG_CLASSES
);
2526 EXECUTE_IF_SET_IN_BITMAP (rescan_blocks
, 0, bb_index
, bi
)
2528 df_urec_bb_local_compute (dflow
, bb_index
);
2531 VEC_free (int, heap
, earlyclobber_regclass
);
2535 /* Initialize the solution vectors. */
2538 df_urec_init (struct dataflow
*dflow
, bitmap all_blocks
)
2540 unsigned int bb_index
;
2543 EXECUTE_IF_SET_IN_BITMAP (all_blocks
, 0, bb_index
, bi
)
2545 struct df_urec_bb_info
*bb_info
= df_urec_get_bb_info (dflow
, bb_index
);
2547 bitmap_copy (bb_info
->out
, bb_info
->gen
);
2548 bitmap_clear (bb_info
->in
);
2553 /* Or in the stack regs, hard regs and early clobber regs into the the
2554 ur_in sets of all of the blocks. */
2557 df_urec_local_finalize (struct dataflow
*dflow
, bitmap all_blocks
)
2559 struct df
*df
= dflow
->df
;
2560 struct dataflow
*lr_dflow
= df
->problems_by_index
[DF_LR
];
2561 bitmap tmp
= BITMAP_ALLOC (NULL
);
2563 unsigned int bb_index
;
2564 struct df_urec_problem_data
*problem_data
2565 = (struct df_urec_problem_data
*) dflow
->problem_data
;
2567 EXECUTE_IF_SET_IN_BITMAP (all_blocks
, 0, bb_index
, bi
)
2569 struct df_urec_bb_info
*bb_info
= df_urec_get_bb_info (dflow
, bb_index
);
2570 struct df_lr_bb_info
*bb_lr_info
= df_lr_get_bb_info (lr_dflow
, bb_index
);
2572 if (bb_index
!= ENTRY_BLOCK
&& bb_index
!= EXIT_BLOCK
)
2574 if (problem_data
->earlyclobbers_found
)
2575 bitmap_ior_into (bb_info
->in
, bb_info
->earlyclobber
);
2578 /* We can not use the same stack register for uninitialized
2579 pseudo-register and another living pseudo-register
2580 because if the uninitialized pseudo-register dies,
2581 subsequent pass reg-stack will be confused (it will
2582 believe that the other register dies). */
2583 bitmap_ior_into (bb_info
->in
, problem_data
->stack_regs
);
2584 bitmap_ior_into (bb_info
->out
, problem_data
->stack_regs
);
2588 /* No register may reach a location where it is not used. Thus
2589 we trim the rr result to the places where it is used. */
2590 bitmap_and_into (bb_info
->in
, bb_lr_info
->in
);
2591 bitmap_and_into (bb_info
->out
, bb_lr_info
->out
);
2594 /* Hard registers may still stick in the ur_out set, but not
2595 be in the ur_in set, if their only mention was in a call
2596 in this block. This is because a call kills in the lr
2597 problem but does not kill in the rr problem. To clean
2598 this up, we execute the transfer function on the lr_in
2599 set and then use that to knock bits out of ur_out. */
2600 bitmap_ior_and_compl (tmp
, bb_info
->gen
, bb_lr_info
->in
,
2602 bitmap_and_into (bb_info
->out
, tmp
);
2607 BITMAP_FREE (problem_data
->stack_regs
);
2613 /* Confluence function that ignores fake edges. */
2616 df_urec_confluence_n (struct dataflow
*dflow
, edge e
)
2618 bitmap op1
= df_urec_get_bb_info (dflow
, e
->dest
->index
)->in
;
2619 bitmap op2
= df_urec_get_bb_info (dflow
, e
->src
->index
)->out
;
2621 if (e
->flags
& EDGE_FAKE
)
2624 bitmap_ior_into (op1
, op2
);
2628 /* Transfer function. */
2631 df_urec_transfer_function (struct dataflow
*dflow
, int bb_index
)
2633 struct df_urec_bb_info
*bb_info
= df_urec_get_bb_info (dflow
, bb_index
);
2634 bitmap in
= bb_info
->in
;
2635 bitmap out
= bb_info
->out
;
2636 bitmap gen
= bb_info
->gen
;
2637 bitmap kill
= bb_info
->kill
;
2639 return bitmap_ior_and_compl (out
, gen
, in
, kill
);
2643 /* Free all storage associated with the problem. */
2646 df_urec_free (struct dataflow
*dflow
)
2648 if (dflow
->block_info
)
2652 for (i
= 0; i
< dflow
->block_info_size
; i
++)
2654 struct df_urec_bb_info
*bb_info
= df_urec_get_bb_info (dflow
, i
);
2657 BITMAP_FREE (bb_info
->gen
);
2658 BITMAP_FREE (bb_info
->kill
);
2659 BITMAP_FREE (bb_info
->in
);
2660 BITMAP_FREE (bb_info
->out
);
2661 BITMAP_FREE (bb_info
->earlyclobber
);
2665 free_alloc_pool (dflow
->block_pool
);
2667 dflow
->block_info_size
= 0;
2668 free (dflow
->block_info
);
2669 free (dflow
->problem_data
);
2675 /* Debugging info. */
2678 df_urec_dump (struct dataflow
*dflow
, FILE *file
)
2682 if (!dflow
->block_info
)
2685 fprintf (file
, "Undefined regs:\n");
2689 struct df_urec_bb_info
*bb_info
= df_urec_get_bb_info (dflow
, bb
->index
);
2690 df_print_bb_index (bb
, file
);
2695 fprintf (file
, " in \t");
2696 dump_bitmap (file
, bb_info
->in
);
2697 fprintf (file
, " gen \t");
2698 dump_bitmap (file
, bb_info
->gen
);
2699 fprintf (file
, " kill\t");
2700 dump_bitmap (file
, bb_info
->kill
);
2701 fprintf (file
, " ec\t");
2702 dump_bitmap (file
, bb_info
->earlyclobber
);
2703 fprintf (file
, " out \t");
2704 dump_bitmap (file
, bb_info
->out
);
2708 /* All of the information associated with every instance of the problem. */
2710 static struct df_problem problem_UREC
=
2712 DF_UREC
, /* Problem id. */
2713 DF_FORWARD
, /* Direction. */
2714 df_urec_alloc
, /* Allocate the problem specific data. */
2715 NULL
, /* Reset global information. */
2716 df_urec_free_bb_info
, /* Free basic block info. */
2717 df_urec_local_compute
, /* Local compute function. */
2718 df_urec_init
, /* Init the solution specific data. */
2719 df_iterative_dataflow
, /* Iterative solver. */
2720 NULL
, /* Confluence operator 0. */
2721 df_urec_confluence_n
, /* Confluence operator n. */
2722 df_urec_transfer_function
, /* Transfer function. */
2723 df_urec_local_finalize
, /* Finalize function. */
2724 df_urec_free
, /* Free all of the problem information. */
2725 df_urec_dump
, /* Debugging. */
2726 df_lr_add_problem
, /* Dependent problem. */
2727 0 /* Changeable flags. */
2731 /* Create a new DATAFLOW instance and add it to an existing instance
2732 of DF. The returned structure is what is used to get at the
2736 df_urec_add_problem (struct df
*df
, int flags
)
2738 return df_add_problem (df
, &problem_UREC
, flags
);
2743 /*----------------------------------------------------------------------------
2744 CREATE DEF_USE (DU) and / or USE_DEF (UD) CHAINS
2746 Link either the defs to the uses and / or the uses to the defs.
2748 These problems are set up like the other dataflow problems so that
2749 they nicely fit into the framework. They are much simpler and only
2750 involve a single traversal of instructions and an examination of
2751 the reaching defs information (the dependent problem).
2752 ----------------------------------------------------------------------------*/
2754 /* Create def-use or use-def chains. */
2757 df_chain_alloc (struct dataflow
*dflow
,
2758 bitmap blocks_to_rescan ATTRIBUTE_UNUSED
,
2759 bitmap all_blocks ATTRIBUTE_UNUSED
)
2762 struct df
*df
= dflow
->df
;
2765 /* Wholesale destruction of the old chains. */
2766 if (dflow
->block_pool
)
2767 free_alloc_pool (dflow
->block_pool
);
2769 dflow
->block_pool
= create_alloc_pool ("df_chain_chain_block pool",
2770 sizeof (struct df_link
), 100);
2772 if (dflow
->flags
& DF_DU_CHAIN
)
2774 if (!df
->def_info
.refs_organized
)
2775 df_reorganize_refs (&df
->def_info
);
2777 /* Clear out the pointers from the refs. */
2778 for (i
= 0; i
< DF_DEFS_SIZE (df
); i
++)
2780 struct df_ref
*ref
= df
->def_info
.refs
[i
];
2781 DF_REF_CHAIN (ref
) = NULL
;
2785 if (dflow
->flags
& DF_UD_CHAIN
)
2787 if (!df
->use_info
.refs_organized
)
2788 df_reorganize_refs (&df
->use_info
);
2789 for (i
= 0; i
< DF_USES_SIZE (df
); i
++)
2791 struct df_ref
*ref
= df
->use_info
.refs
[i
];
2792 DF_REF_CHAIN (ref
) = NULL
;
2798 /* Reset all def_use and use_def chains in INSN. */
2801 df_chain_insn_reset (struct dataflow
*dflow
, rtx insn
)
2803 struct df
*df
= dflow
->df
;
2804 unsigned int uid
= INSN_UID (insn
);
2805 struct df_insn_info
*insn_info
= NULL
;
2808 if (uid
< df
->insns_size
)
2809 insn_info
= DF_INSN_UID_GET (df
, uid
);
2813 if (dflow
->flags
& DF_DU_CHAIN
)
2815 ref
= insn_info
->defs
;
2819 ref
= ref
->next_ref
;
2823 if (dflow
->flags
& DF_UD_CHAIN
)
2825 ref
= insn_info
->uses
;
2829 ref
= ref
->next_ref
;
2836 /* Reset all def_use and use_def chains in basic block. */
2839 df_chain_bb_reset (struct dataflow
*dflow
, unsigned int bb_index
)
2841 struct df
*df
= dflow
->df
;
2843 basic_block bb
= BASIC_BLOCK (bb_index
);
2845 /* Some one deleted the basic block out from under us. */
2849 FOR_BB_INSNS (bb
, insn
)
2853 /* Record defs within INSN. */
2854 df_chain_insn_reset (dflow
, insn
);
2858 /* Get rid of any chains in artificial uses or defs. */
2859 if (dflow
->flags
& DF_DU_CHAIN
)
2862 def
= df_get_artificial_defs (df
, bb_index
);
2866 def
= def
->next_ref
;
2870 if (dflow
->flags
& DF_UD_CHAIN
)
2873 use
= df_get_artificial_uses (df
, bb_index
);
2877 use
= use
->next_ref
;
2883 /* Reset all of the chains when the set of basic blocks changes. */
2887 df_chain_reset (struct dataflow
*dflow
, bitmap blocks_to_clear
)
2890 unsigned int bb_index
;
2892 EXECUTE_IF_SET_IN_BITMAP (blocks_to_clear
, 0, bb_index
, bi
)
2894 df_chain_bb_reset (dflow
, bb_index
);
2897 free_alloc_pool (dflow
->block_pool
);
2898 dflow
->block_pool
= NULL
;
2902 /* Create the chains for a list of USEs. */
2905 df_chain_create_bb_process_use (struct dataflow
*dflow
,
2908 enum df_ref_flags top_flag
)
2910 struct df
*df
= dflow
->df
;
2912 unsigned int def_index
;
2916 /* Do not want to go through this for an uninitialized var. */
2917 unsigned int uregno
= DF_REF_REGNO (use
);
2918 int count
= DF_REG_DEF_GET (df
, uregno
)->n_refs
;
2921 if (top_flag
== (DF_REF_FLAGS (use
) & DF_REF_AT_TOP
))
2923 unsigned int first_index
= DF_REG_DEF_GET (df
, uregno
)->begin
;
2924 unsigned int last_index
= first_index
+ count
- 1;
2926 EXECUTE_IF_SET_IN_BITMAP (local_rd
, first_index
, def_index
, bi
)
2929 if (def_index
> last_index
)
2932 def
= DF_DEFS_GET (df
, def_index
);
2933 if (dflow
->flags
& DF_DU_CHAIN
)
2934 df_chain_create (dflow
, def
, use
);
2935 if (dflow
->flags
& DF_UD_CHAIN
)
2936 df_chain_create (dflow
, use
, def
);
2940 use
= use
->next_ref
;
2944 /* Reset the storage pool that the def-use or use-def chains have been
2945 allocated in. We do not need to re adjust the pointers in the refs,
2946 these have already been clean out.*/
2948 /* Create chains from reaching defs bitmaps for basic block BB. */
2950 df_chain_create_bb (struct dataflow
*dflow
,
2951 struct dataflow
*rd_dflow
,
2952 unsigned int bb_index
)
2954 basic_block bb
= BASIC_BLOCK (bb_index
);
2955 struct df_rd_bb_info
*bb_info
= df_rd_get_bb_info (rd_dflow
, bb_index
);
2957 bitmap cpy
= BITMAP_ALLOC (NULL
);
2958 struct df
*df
= dflow
->df
;
2961 bitmap_copy (cpy
, bb_info
->in
);
2963 /* Since we are going forwards, process the artificial uses first
2964 then the artificial defs second. */
2967 /* Create the chains for the artificial uses from the EH_USES at the
2968 beginning of the block. */
2969 df_chain_create_bb_process_use (dflow
, cpy
,
2970 df_get_artificial_uses (df
, bb
->index
),
2974 for (def
= df_get_artificial_defs (df
, bb_index
); def
; def
= def
->next_ref
)
2975 if (DF_REF_FLAGS (def
) & DF_REF_AT_TOP
)
2977 unsigned int dregno
= DF_REF_REGNO (def
);
2978 if (!(DF_REF_FLAGS (def
) & DF_REF_PARTIAL
))
2979 bitmap_clear_range (cpy
,
2980 DF_REG_DEF_GET (df
, dregno
)->begin
,
2981 DF_REG_DEF_GET (df
, dregno
)->n_refs
);
2982 bitmap_set_bit (cpy
, DF_REF_ID (def
));
2985 /* Process the regular instructions next. */
2986 FOR_BB_INSNS (bb
, insn
)
2989 unsigned int uid
= INSN_UID (insn
);
2994 /* Now scan the uses and link them up with the defs that remain
2995 in the cpy vector. */
2997 df_chain_create_bb_process_use (dflow
, cpy
,
2998 DF_INSN_UID_USES (df
, uid
), 0);
3000 /* Since we are going forwards, process the defs second. This
3001 pass only changes the bits in cpy. */
3002 for (def
= DF_INSN_UID_DEFS (df
, uid
); def
; def
= def
->next_ref
)
3004 unsigned int dregno
= DF_REF_REGNO (def
);
3005 if (!(DF_REF_FLAGS (def
) & DF_REF_PARTIAL
))
3006 bitmap_clear_range (cpy
,
3007 DF_REG_DEF_GET (df
, dregno
)->begin
,
3008 DF_REG_DEF_GET (df
, dregno
)->n_refs
);
3009 if (!(DF_REF_FLAGS (def
)
3010 & (DF_REF_MUST_CLOBBER
| DF_REF_MAY_CLOBBER
)))
3011 bitmap_set_bit (cpy
, DF_REF_ID (def
));
3015 /* Create the chains for the artificial uses of the hard registers
3016 at the end of the block. */
3017 df_chain_create_bb_process_use (dflow
, cpy
,
3018 df_get_artificial_uses (df
, bb
->index
), 0);
3021 /* Create def-use chains from reaching use bitmaps for basic blocks
3025 df_chain_finalize (struct dataflow
*dflow
, bitmap all_blocks
)
3027 unsigned int bb_index
;
3029 struct df
*df
= dflow
->df
;
3030 struct dataflow
*rd_dflow
= df
->problems_by_index
[DF_RD
];
3032 EXECUTE_IF_SET_IN_BITMAP (all_blocks
, 0, bb_index
, bi
)
3034 df_chain_create_bb (dflow
, rd_dflow
, bb_index
);
3039 /* Free all storage associated with the problem. */
3042 df_chain_free (struct dataflow
*dflow
)
3044 free_alloc_pool (dflow
->block_pool
);
3049 /* Debugging info. */
3052 df_chains_dump (struct dataflow
*dflow
, FILE *file
)
3054 struct df
*df
= dflow
->df
;
3057 if (dflow
->flags
& DF_DU_CHAIN
)
3059 fprintf (file
, "Def-use chains:\n");
3060 for (j
= 0; j
< df
->def_info
.bitmap_size
; j
++)
3062 struct df_ref
*def
= DF_DEFS_GET (df
, j
);
3065 fprintf (file
, "d%d bb %d luid %d insn %d reg %d ",
3066 j
, DF_REF_BBNO (def
),
3068 DF_INSN_LUID (df
, DF_REF_INSN (def
)):
3070 DF_REF_INSN (def
) ? DF_REF_INSN_UID (def
) : -1,
3071 DF_REF_REGNO (def
));
3072 if (def
->flags
& DF_REF_READ_WRITE
)
3073 fprintf (file
, "read/write ");
3074 df_chain_dump (DF_REF_CHAIN (def
), file
);
3075 fprintf (file
, "\n");
3080 if (dflow
->flags
& DF_UD_CHAIN
)
3082 fprintf (file
, "Use-def chains:\n");
3083 for (j
= 0; j
< df
->use_info
.bitmap_size
; j
++)
3085 struct df_ref
*use
= DF_USES_GET (df
, j
);
3088 fprintf (file
, "u%d bb %d luid %d insn %d reg %d ",
3089 j
, DF_REF_BBNO (use
),
3091 DF_INSN_LUID (df
, DF_REF_INSN (use
))
3093 DF_REF_INSN (DF_USES_GET (df
, j
)) ?
3094 DF_REF_INSN_UID (DF_USES_GET (df
,j
))
3096 DF_REF_REGNO (use
));
3097 if (use
->flags
& DF_REF_READ_WRITE
)
3098 fprintf (file
, "read/write ");
3099 if (use
->flags
& DF_REF_STRIPPED
)
3100 fprintf (file
, "stripped ");
3101 if (use
->flags
& DF_REF_IN_NOTE
)
3102 fprintf (file
, "note ");
3103 df_chain_dump (DF_REF_CHAIN (use
), file
);
3104 fprintf (file
, "\n");
3111 static struct df_problem problem_CHAIN
=
3113 DF_CHAIN
, /* Problem id. */
3114 DF_NONE
, /* Direction. */
3115 df_chain_alloc
, /* Allocate the problem specific data. */
3116 df_chain_reset
, /* Reset global information. */
3117 NULL
, /* Free basic block info. */
3118 NULL
, /* Local compute function. */
3119 NULL
, /* Init the solution specific data. */
3120 NULL
, /* Iterative solver. */
3121 NULL
, /* Confluence operator 0. */
3122 NULL
, /* Confluence operator n. */
3123 NULL
, /* Transfer function. */
3124 df_chain_finalize
, /* Finalize function. */
3125 df_chain_free
, /* Free all of the problem information. */
3126 df_chains_dump
, /* Debugging. */
3127 df_rd_add_problem
, /* Dependent problem. */
3128 0 /* Changeable flags. */
3132 /* Create a new DATAFLOW instance and add it to an existing instance
3133 of DF. The returned structure is what is used to get at the
3137 df_chain_add_problem (struct df
*df
, int flags
)
3139 return df_add_problem (df
, &problem_CHAIN
, flags
);
3143 /*----------------------------------------------------------------------------
3144 REGISTER INFORMATION
3146 This pass properly computes REG_DEAD and REG_UNUSED notes.
3148 If the DF_RI_LIFE flag is set the following vectors containing
3149 information about register usage are properly set: REG_N_REFS,
3150 REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH, REG_N_CALLS_CROSSED,
3151 REG_N_THROWING_CALLS_CROSSED and REG_BASIC_BLOCK.
3153 ----------------------------------------------------------------------------*/
3155 #ifdef REG_DEAD_DEBUGGING
3157 print_note (char *prefix
, rtx insn
, rtx note
)
3159 fprintf (stderr
, "%s %d ", prefix
, INSN_UID (insn
));
3160 print_rtl (stderr
, note
);
3161 fprintf (stderr
, "\n");
3165 /* Allocate the lifetime information. */
3168 df_ri_alloc (struct dataflow
*dflow
,
3169 bitmap blocks_to_rescan ATTRIBUTE_UNUSED
,
3170 bitmap all_blocks ATTRIBUTE_UNUSED
)
3173 struct df
*df
= dflow
->df
;
3175 if (dflow
->flags
& DF_RI_LIFE
)
3177 max_regno
= max_reg_num ();
3178 allocate_reg_info (max_regno
, FALSE
, FALSE
);
3180 /* Reset all the data we'll collect. */
3181 for (i
= 0; i
< max_regno
; i
++)
3183 REG_N_SETS (i
) = DF_REG_DEF_COUNT (df
, i
);
3184 REG_N_REFS (i
) = DF_REG_USE_COUNT (df
, i
) + REG_N_SETS (i
);
3185 REG_N_DEATHS (i
) = 0;
3186 REG_N_CALLS_CROSSED (i
) = 0;
3187 REG_N_THROWING_CALLS_CROSSED (i
) = 0;
3188 REG_LIVE_LENGTH (i
) = 0;
3190 REG_BASIC_BLOCK (i
) = REG_BLOCK_UNKNOWN
;
3196 /* After reg-stack, the x86 floating point stack regs are difficult to
3197 analyze because of all of the pushes, pops and rotations. Thus, we
3198 just leave the notes alone. */
3201 df_ignore_stack_reg (int regno ATTRIBUTE_UNUSED
)
3204 return (regstack_completed
3205 && IN_RANGE (regno
, FIRST_STACK_REG
, LAST_STACK_REG
));
3212 /* Remove all of the REG_DEAD or REG_UNUSED notes from INSN. */
3215 df_kill_notes (rtx insn
, int flags
)
3217 rtx
*pprev
= ®_NOTES (insn
);
3222 switch (REG_NOTE_KIND (link
))
3225 if (flags
& DF_RI_LIFE
)
3226 if (df_ignore_stack_reg (REGNO (XEXP (link
, 0))))
3227 REG_N_DEATHS (REGNO (XEXP (link
, 0)))++;
3231 if (!df_ignore_stack_reg (REGNO (XEXP (link
, 0))))
3233 rtx next
= XEXP (link
, 1);
3234 #ifdef REG_DEAD_DEBUGGING
3235 print_note ("deleting: ", insn
, link
);
3237 free_EXPR_LIST_node (link
);
3238 *pprev
= link
= next
;
3243 pprev
= &XEXP (link
, 1);
3251 /* Set the REG_UNUSED notes for the multiword hardreg defs in INSN
3252 based on the bits in LIVE. Do not generate notes for registers in
3253 artificial uses. DO_NOT_GEN is updated so that REG_DEAD notes are
3254 not generated if the reg is both read and written by the
3259 df_set_unused_notes_for_mw (rtx insn
, struct df_mw_hardreg
*mws
,
3260 bitmap live
, bitmap do_not_gen
,
3261 bitmap artificial_uses
, int flags
)
3263 bool all_dead
= true;
3264 struct df_link
*regs
= mws
->regs
;
3265 unsigned int regno
= DF_REF_REGNO (regs
->ref
);
3267 #ifdef REG_DEAD_DEBUGGING
3268 fprintf (stderr
, "mw unused looking at %d\n", DF_REF_REGNO (regs
->ref
));
3269 df_ref_debug (regs
->ref
, stderr
);
3273 unsigned int regno
= DF_REF_REGNO (regs
->ref
);
3274 if ((bitmap_bit_p (live
, regno
))
3275 || bitmap_bit_p (artificial_uses
, regno
))
3285 struct df_link
*regs
= mws
->regs
;
3286 rtx note
= alloc_EXPR_LIST (REG_UNUSED
, *DF_REF_LOC (regs
->ref
),
3288 REG_NOTES (insn
) = note
;
3289 #ifdef REG_DEAD_DEBUGGING
3290 print_note ("adding 1: ", insn
, note
);
3292 bitmap_set_bit (do_not_gen
, regno
);
3293 /* Only do this if the value is totally dead. */
3294 if (flags
& DF_RI_LIFE
)
3296 REG_N_DEATHS (regno
) ++;
3297 REG_LIVE_LENGTH (regno
)++;
3302 struct df_link
*regs
= mws
->regs
;
3305 struct df_ref
*ref
= regs
->ref
;
3307 regno
= DF_REF_REGNO (ref
);
3308 if ((!bitmap_bit_p (live
, regno
))
3309 && (!bitmap_bit_p (artificial_uses
, regno
)))
3311 rtx note
= alloc_EXPR_LIST (REG_UNUSED
, regno_reg_rtx
[regno
],
3313 REG_NOTES (insn
) = note
;
3314 #ifdef REG_DEAD_DEBUGGING
3315 print_note ("adding 2: ", insn
, note
);
3318 bitmap_set_bit (do_not_gen
, regno
);
3325 /* Set the REG_DEAD notes for the multiword hardreg use in INSN based
3326 on the bits in LIVE. DO_NOT_GEN is used to keep REG_DEAD notes
3327 from being set if the instruction both reads and writes the
3331 df_set_dead_notes_for_mw (rtx insn
, struct df_mw_hardreg
*mws
,
3332 bitmap live
, bitmap do_not_gen
,
3333 bitmap artificial_uses
, int flags
)
3335 bool all_dead
= true;
3336 struct df_link
*regs
= mws
->regs
;
3337 unsigned int regno
= DF_REF_REGNO (regs
->ref
);
3339 #ifdef REG_DEAD_DEBUGGING
3340 fprintf (stderr
, "mw looking at %d\n", DF_REF_REGNO (regs
->ref
));
3341 df_ref_debug (regs
->ref
, stderr
);
3345 unsigned int regno
= DF_REF_REGNO (regs
->ref
);
3346 if ((bitmap_bit_p (live
, regno
))
3347 || bitmap_bit_p (artificial_uses
, regno
))
3357 if (!bitmap_bit_p (do_not_gen
, regno
))
3359 /* Add a dead note for the entire multi word register. */
3360 struct df_link
*regs
= mws
->regs
;
3361 rtx note
= alloc_EXPR_LIST (REG_DEAD
, *DF_REF_LOC (regs
->ref
),
3363 REG_NOTES (insn
) = note
;
3364 #ifdef REG_DEAD_DEBUGGING
3365 print_note ("adding 1: ", insn
, note
);
3368 if (flags
& DF_RI_LIFE
)
3370 struct df_link
*regs
= mws
->regs
;
3373 struct df_ref
*ref
= regs
->ref
;
3374 regno
= DF_REF_REGNO (ref
);
3375 REG_N_DEATHS (regno
)++;
3383 struct df_link
*regs
= mws
->regs
;
3386 struct df_ref
*ref
= regs
->ref
;
3388 regno
= DF_REF_REGNO (ref
);
3389 if ((!bitmap_bit_p (live
, regno
))
3390 && (!bitmap_bit_p (artificial_uses
, regno
))
3391 && (!bitmap_bit_p (do_not_gen
, regno
)))
3393 rtx note
= alloc_EXPR_LIST (REG_DEAD
, regno_reg_rtx
[regno
],
3395 REG_NOTES (insn
) = note
;
3396 if (flags
& DF_RI_LIFE
)
3397 REG_N_DEATHS (regno
)++;
3398 #ifdef REG_DEAD_DEBUGGING
3399 print_note ("adding 2: ", insn
, note
);
3409 /* Create a REG_UNUSED note if necessary for DEF in INSN updating LIVE
3410 and DO_NOT_GEN. Do not generate notes for registers in artificial
3414 df_create_unused_note (basic_block bb
, rtx insn
, struct df_ref
*def
,
3415 bitmap live
, bitmap do_not_gen
, bitmap artificial_uses
,
3416 bitmap local_live
, bitmap local_processed
,
3417 int flags
, int luid
)
3419 unsigned int dregno
= DF_REF_REGNO (def
);
3421 #ifdef REG_DEAD_DEBUGGING
3422 fprintf (stderr
, " regular looking at def ");
3423 df_ref_debug (def
, stderr
);
3426 if (bitmap_bit_p (live
, dregno
))
3428 if (flags
& DF_RI_LIFE
)
3430 /* If we have seen this regno, then it has already been
3431 processed correctly with the per insn increment. If we
3432 have not seen it we need to add the length from here to
3433 the end of the block to the live length. */
3434 if (bitmap_bit_p (local_processed
, dregno
))
3436 if (!(DF_REF_FLAGS (def
) & DF_REF_PARTIAL
))
3437 bitmap_clear_bit (local_live
, dregno
);
3441 bitmap_set_bit (local_processed
, dregno
);
3442 REG_LIVE_LENGTH (dregno
) += luid
;
3446 else if ((!(DF_REF_FLAGS (def
) & DF_REF_MW_HARDREG
))
3447 && (!bitmap_bit_p (artificial_uses
, dregno
))
3448 && (!df_ignore_stack_reg (dregno
)))
3450 rtx reg
= GET_CODE (*DF_REF_LOC (def
)) == SUBREG
?
3451 SUBREG_REG (*DF_REF_LOC (def
)) : *DF_REF_LOC (def
);
3452 rtx note
= alloc_EXPR_LIST (REG_UNUSED
, reg
, REG_NOTES (insn
));
3453 REG_NOTES (insn
) = note
;
3454 #ifdef REG_DEAD_DEBUGGING
3455 print_note ("adding 3: ", insn
, note
);
3457 if (flags
& DF_RI_LIFE
)
3459 REG_N_DEATHS (dregno
) ++;
3460 REG_LIVE_LENGTH (dregno
)++;
3464 if ((flags
& DF_RI_LIFE
) && (dregno
>= FIRST_PSEUDO_REGISTER
))
3466 REG_FREQ (dregno
) += REG_FREQ_FROM_BB (bb
);
3467 if (REG_BASIC_BLOCK (dregno
) == REG_BLOCK_UNKNOWN
)
3468 REG_BASIC_BLOCK (dregno
) = bb
->index
;
3469 else if (REG_BASIC_BLOCK (dregno
) != bb
->index
)
3470 REG_BASIC_BLOCK (dregno
) = REG_BLOCK_GLOBAL
;
3473 if (!(DF_REF_FLAGS (def
) & (DF_REF_MUST_CLOBBER
+ DF_REF_MAY_CLOBBER
)))
3474 bitmap_set_bit (do_not_gen
, dregno
);
3476 /* Kill this register if it is not a subreg store. */
3477 if (!(DF_REF_FLAGS (def
) & DF_REF_PARTIAL
))
3478 bitmap_clear_bit (live
, dregno
);
3482 /* Recompute the REG_DEAD and REG_UNUSED notes and compute register
3483 info: lifetime, bb, and number of defs and uses for basic block
3484 BB. The three bitvectors are scratch regs used here. */
3487 df_ri_bb_compute (struct dataflow
*dflow
, unsigned int bb_index
,
3488 bitmap live
, bitmap do_not_gen
, bitmap artificial_uses
,
3489 bitmap local_live
, bitmap local_processed
, bitmap setjumps_crossed
)
3491 struct df
*df
= dflow
->df
;
3492 basic_block bb
= BASIC_BLOCK (bb_index
);
3498 bitmap_copy (live
, df_get_live_out (df
, bb
));
3499 bitmap_clear (artificial_uses
);
3501 if (dflow
->flags
& DF_RI_LIFE
)
3503 /* Process the regs live at the end of the block. Mark them as
3504 not local to any one basic block. */
3507 EXECUTE_IF_SET_IN_BITMAP (live
, 0, regno
, bi
)
3508 REG_BASIC_BLOCK (regno
) = REG_BLOCK_GLOBAL
;
3511 /* Process the artificial defs and uses at the bottom of the block
3512 to begin processing. */
3513 for (def
= df_get_artificial_defs (df
, bb_index
); def
; def
= def
->next_ref
)
3514 if ((DF_REF_FLAGS (def
) & DF_REF_AT_TOP
) == 0)
3515 bitmap_clear_bit (live
, DF_REF_REGNO (def
));
3517 for (use
= df_get_artificial_uses (df
, bb_index
); use
; use
= use
->next_ref
)
3518 if ((DF_REF_FLAGS (use
) & DF_REF_AT_TOP
) == 0)
3520 unsigned int regno
= DF_REF_REGNO (use
);
3521 bitmap_set_bit (live
, regno
);
3523 /* Notes are not generated for any of the artificial registers
3524 at the bottom of the block. */
3525 bitmap_set_bit (artificial_uses
, regno
);
3528 FOR_BB_INSNS_REVERSE (bb
, insn
)
3530 unsigned int uid
= INSN_UID (insn
);
3533 struct df_mw_hardreg
*mws
;
3538 if (dflow
->flags
& DF_RI_LIFE
)
3540 /* Increment the live_length for all of the registers that
3541 are are referenced in this block and live at this
3542 particular point. */
3545 EXECUTE_IF_SET_IN_BITMAP (local_live
, 0, regno
, bi
)
3547 REG_LIVE_LENGTH (regno
)++;
3552 bitmap_clear (do_not_gen
);
3553 df_kill_notes (insn
, dflow
->flags
);
3555 /* Process the defs. */
3558 if (dflow
->flags
& DF_RI_LIFE
)
3560 bool can_throw
= can_throw_internal (insn
);
3561 bool set_jump
= (find_reg_note (insn
, REG_SETJMP
, NULL
) != NULL
);
3562 EXECUTE_IF_SET_IN_BITMAP (live
, 0, regno
, bi
)
3564 REG_N_CALLS_CROSSED (regno
)++;
3566 REG_N_THROWING_CALLS_CROSSED (regno
)++;
3568 /* We have a problem with any pseudoreg that lives
3569 across the setjmp. ANSI says that if a user
3570 variable does not change in value between the
3571 setjmp and the longjmp, then the longjmp
3572 preserves it. This includes longjmp from a place
3573 where the pseudo appears dead. (In principle,
3574 the value still exists if it is in scope.) If
3575 the pseudo goes in a hard reg, some other value
3576 may occupy that hard reg where this pseudo is
3577 dead, thus clobbering the pseudo. Conclusion:
3578 such a pseudo must not go in a hard reg. */
3579 if (set_jump
&& regno
>= FIRST_PSEUDO_REGISTER
)
3580 bitmap_set_bit (setjumps_crossed
, regno
);
3584 /* We only care about real sets for calls. Clobbers only
3585 may clobber and cannot be depended on. */
3586 for (mws
= DF_INSN_UID_MWS (df
, uid
); mws
; mws
= mws
->next
)
3588 if ((mws
->type
== DF_REF_REG_DEF
)
3589 && !df_ignore_stack_reg (REGNO (mws
->mw_reg
)))
3590 df_set_unused_notes_for_mw (insn
, mws
, live
, do_not_gen
,
3591 artificial_uses
, dflow
->flags
);
3594 /* All of the defs except the return value are some sort of
3595 clobber. This code is for the return. */
3596 for (def
= DF_INSN_UID_DEFS (df
, uid
); def
; def
= def
->next_ref
)
3597 if (!(DF_REF_FLAGS (def
) & (DF_REF_MUST_CLOBBER
| DF_REF_MAY_CLOBBER
)))
3598 df_create_unused_note (bb
, insn
, def
, live
, do_not_gen
,
3599 artificial_uses
, local_live
,
3600 local_processed
, dflow
->flags
, luid
);
3606 for (mws
= DF_INSN_UID_MWS (df
, uid
); mws
; mws
= mws
->next
)
3608 if (mws
->type
== DF_REF_REG_DEF
)
3609 df_set_unused_notes_for_mw (insn
, mws
, live
, do_not_gen
,
3610 artificial_uses
, dflow
->flags
);
3613 for (def
= DF_INSN_UID_DEFS (df
, uid
); def
; def
= def
->next_ref
)
3614 df_create_unused_note (bb
, insn
, def
, live
, do_not_gen
,
3615 artificial_uses
, local_live
,
3616 local_processed
, dflow
->flags
, luid
);
3619 /* Process the uses. */
3620 for (mws
= DF_INSN_UID_MWS (df
, uid
); mws
; mws
= mws
->next
)
3622 if ((mws
->type
!= DF_REF_REG_DEF
)
3623 && !df_ignore_stack_reg (REGNO (mws
->mw_reg
)))
3624 df_set_dead_notes_for_mw (insn
, mws
, live
, do_not_gen
,
3625 artificial_uses
, dflow
->flags
);
3628 for (use
= DF_INSN_UID_USES (df
, uid
); use
; use
= use
->next_ref
)
3630 unsigned int uregno
= DF_REF_REGNO (use
);
3632 if ((dflow
->flags
& DF_RI_LIFE
) && (uregno
>= FIRST_PSEUDO_REGISTER
))
3634 REG_FREQ (uregno
) += REG_FREQ_FROM_BB (bb
);
3635 if (REG_BASIC_BLOCK (uregno
) == REG_BLOCK_UNKNOWN
)
3636 REG_BASIC_BLOCK (uregno
) = bb
->index
;
3637 else if (REG_BASIC_BLOCK (uregno
) != bb
->index
)
3638 REG_BASIC_BLOCK (uregno
) = REG_BLOCK_GLOBAL
;
3641 #ifdef REG_DEAD_DEBUGGING
3642 fprintf (stderr
, " regular looking at use ");
3643 df_ref_debug (use
, stderr
);
3645 if (!bitmap_bit_p (live
, uregno
))
3647 if ( (!(DF_REF_FLAGS (use
) & DF_REF_MW_HARDREG
))
3648 && (!bitmap_bit_p (do_not_gen
, uregno
))
3649 && (!bitmap_bit_p (artificial_uses
, uregno
))
3650 && (!(DF_REF_FLAGS (use
) & DF_REF_READ_WRITE
))
3651 && (!df_ignore_stack_reg (uregno
)))
3653 rtx reg
= GET_CODE (*DF_REF_LOC (use
)) == SUBREG
?
3654 SUBREG_REG (*DF_REF_LOC (use
)) : *DF_REF_LOC (use
);
3655 rtx note
= alloc_EXPR_LIST (REG_DEAD
, reg
, REG_NOTES (insn
));
3656 REG_NOTES (insn
) = note
;
3657 if (dflow
->flags
& DF_RI_LIFE
)
3658 REG_N_DEATHS (uregno
)++;
3660 #ifdef REG_DEAD_DEBUGGING
3661 print_note ("adding 4: ", insn
, note
);
3664 /* This register is now live. */
3665 bitmap_set_bit (live
, uregno
);
3667 if (dflow
->flags
& DF_RI_LIFE
)
3669 /* If we have seen this regno, then it has already
3670 been processed correctly with the per insn
3671 increment. If we have not seen it we set the bit
3672 so that begins to get processed locally. Note
3673 that we don't even get here if the variable was
3674 live at the end of the block since just a ref
3675 inside the block does not effect the
3677 REG_LIVE_LENGTH (uregno
) ++;
3678 bitmap_set_bit (local_live
, uregno
);
3679 bitmap_set_bit (local_processed
, uregno
);
3685 if (dflow
->flags
& DF_RI_LIFE
)
3687 /* Add the length of the block to all of the registers that were
3688 not referenced, but still live in this block. */
3691 bitmap_and_compl_into (live
, local_processed
);
3692 EXECUTE_IF_SET_IN_BITMAP (live
, 0, regno
, bi
)
3694 REG_LIVE_LENGTH (regno
) += luid
;
3696 bitmap_clear (local_processed
);
3697 bitmap_clear (local_live
);
3702 /* Compute register info: lifetime, bb, and number of defs and uses. */
3704 df_ri_compute (struct dataflow
*dflow
, bitmap all_blocks ATTRIBUTE_UNUSED
,
3705 bitmap blocks_to_scan
)
3707 unsigned int bb_index
;
3709 bitmap live
= BITMAP_ALLOC (NULL
);
3710 bitmap do_not_gen
= BITMAP_ALLOC (NULL
);
3711 bitmap artificial_uses
= BITMAP_ALLOC (NULL
);
3712 bitmap local_live
= NULL
;
3713 bitmap local_processed
= NULL
;
3714 bitmap setjumps_crossed
= NULL
;
3716 if (dflow
->flags
& DF_RI_LIFE
)
3718 local_live
= BITMAP_ALLOC (NULL
);
3719 local_processed
= BITMAP_ALLOC (NULL
);
3720 setjumps_crossed
= BITMAP_ALLOC (NULL
);
3724 #ifdef REG_DEAD_DEBUGGING
3725 df_lr_dump (dflow
->df
->problems_by_index
[DF_LR
], stderr
);
3726 print_rtl_with_bb (stderr
, get_insns());
3729 EXECUTE_IF_SET_IN_BITMAP (blocks_to_scan
, 0, bb_index
, bi
)
3731 df_ri_bb_compute (dflow
, bb_index
, live
, do_not_gen
, artificial_uses
,
3732 local_live
, local_processed
, setjumps_crossed
);
3736 BITMAP_FREE (do_not_gen
);
3737 BITMAP_FREE (artificial_uses
);
3738 if (dflow
->flags
& DF_RI_LIFE
)
3742 /* See the setjump comment in df_ri_bb_compute. */
3743 EXECUTE_IF_SET_IN_BITMAP (setjumps_crossed
, 0, regno
, bi
)
3745 REG_BASIC_BLOCK (regno
) = REG_BLOCK_UNKNOWN
;
3746 REG_LIVE_LENGTH (regno
) = -1;
3749 BITMAP_FREE (local_live
);
3750 BITMAP_FREE (local_processed
);
3751 BITMAP_FREE (setjumps_crossed
);
3756 /* Free all storage associated with the problem. */
3759 df_ri_free (struct dataflow
*dflow
)
3761 free (dflow
->problem_data
);
3766 /* Debugging info. */
3769 df_ri_dump (struct dataflow
*dflow
, FILE *file
)
3771 print_rtl_with_bb (file
, get_insns ());
3773 if (dflow
->flags
& DF_RI_LIFE
)
3775 fprintf (file
, "Register info:\n");
3776 dump_flow_info (file
, -1);
3780 /* All of the information associated every instance of the problem. */
3782 static struct df_problem problem_RI
=
3784 DF_RI
, /* Problem id. */
3785 DF_NONE
, /* Direction. */
3786 df_ri_alloc
, /* Allocate the problem specific data. */
3787 NULL
, /* Reset global information. */
3788 NULL
, /* Free basic block info. */
3789 df_ri_compute
, /* Local compute function. */
3790 NULL
, /* Init the solution specific data. */
3791 NULL
, /* Iterative solver. */
3792 NULL
, /* Confluence operator 0. */
3793 NULL
, /* Confluence operator n. */
3794 NULL
, /* Transfer function. */
3795 NULL
, /* Finalize function. */
3796 df_ri_free
, /* Free all of the problem information. */
3797 df_ri_dump
, /* Debugging. */
3799 /* Technically this is only dependent on the live registers problem
3800 but it will produce information if built one of uninitialized
3801 register problems (UR, UREC) is also run. */
3802 df_lr_add_problem
, /* Dependent problem. */
3803 0 /* Changeable flags. */
3807 /* Create a new DATAFLOW instance and add it to an existing instance
3808 of DF. The returned structure is what is used to get at the
3812 df_ri_add_problem (struct df
*df
, int flags
)
3814 return df_add_problem (df
, &problem_RI
, flags
);