1 /* Standard problems for dataflow support routines.
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006
3 Free Software Foundation, Inc.
4 Originally contributed by Michael P. Hayes
5 (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
6 Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
7 and Kenneth Zadeck (zadeck@naturalbridge.com).
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 2, or (at your option) any later
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING. If not, write to the Free
23 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
28 #include "coretypes.h"
32 #include "insn-config.h"
37 #include "alloc-pool.h"
39 #include "hard-reg-set.h"
40 #include "basic-block.h"
49 #define REG_DEAD_DEBUGGING
52 #define DF_SPARSE_THRESHOLD 32
54 static bitmap seen_in_block
= NULL
;
55 static bitmap seen_in_insn
= NULL
;
56 static void df_ri_dump (struct dataflow
*, FILE *);
59 /*----------------------------------------------------------------------------
60 Public functions access functions for the dataflow problems.
61 ----------------------------------------------------------------------------*/
63 /* Create a du or ud chain from SRC to DST and link it into SRC. */
66 df_chain_create (struct dataflow
*dflow
, struct df_ref
*src
, struct df_ref
*dst
)
68 struct df_link
*head
= DF_REF_CHAIN (src
);
69 struct df_link
*link
= pool_alloc (dflow
->block_pool
);;
71 DF_REF_CHAIN (src
) = link
;
78 /* Delete a du or ud chain for REF. If LINK is NULL, delete all
79 chains for ref and check to see if the reverse chains can also be
80 deleted. If LINK is not NULL it must be a link off of ref. In
81 this case, the other end is not deleted. */
84 df_chain_unlink (struct dataflow
*dflow
, struct df_ref
*ref
, struct df_link
*link
)
86 struct df_link
*chain
= DF_REF_CHAIN (ref
);
89 /* Link was the first element in the chain. */
91 DF_REF_CHAIN (ref
) = link
->next
;
94 /* Link is an internal element in the chain. */
95 struct df_link
*prev
= chain
;
100 prev
->next
= chain
->next
;
107 pool_free (dflow
->block_pool
, link
);
111 /* If chain is NULL here, it was because of a recursive call
112 when the other flavor of chains was not built. Just run thru
113 the entire chain calling the other side and then deleting the
117 struct df_link
*next
= chain
->next
;
118 /* Delete the other side if it exists. */
119 df_chain_unlink (dflow
, chain
->ref
, chain
);
126 /* Copy the du or ud chain starting at FROM_REF and attach it to
130 df_chain_copy (struct dataflow
*dflow
,
131 struct df_ref
*to_ref
,
132 struct df_link
*from_ref
)
136 df_chain_create (dflow
, to_ref
, from_ref
->ref
);
137 from_ref
= from_ref
->next
;
142 /* Get the live in set for BB no matter what problem happens to be
146 df_get_live_in (struct df
*df
, basic_block bb
)
148 gcc_assert (df
->problems_by_index
[DF_LR
]);
150 if (df
->problems_by_index
[DF_UREC
])
151 return DF_RA_LIVE_IN (df
, bb
);
152 else if (df
->problems_by_index
[DF_UR
])
153 return DF_LIVE_IN (df
, bb
);
155 return DF_UPWARD_LIVE_IN (df
, bb
);
159 /* Get the live out set for BB no matter what problem happens to be
163 df_get_live_out (struct df
*df
, basic_block bb
)
165 gcc_assert (df
->problems_by_index
[DF_LR
]);
167 if (df
->problems_by_index
[DF_UREC
])
168 return DF_RA_LIVE_OUT (df
, bb
);
169 else if (df
->problems_by_index
[DF_UR
])
170 return DF_LIVE_OUT (df
, bb
);
172 return DF_UPWARD_LIVE_OUT (df
, bb
);
176 /*----------------------------------------------------------------------------
178 ----------------------------------------------------------------------------*/
180 /* Generic versions to get the void* version of the block info. Only
181 used inside the problem instance vectors. */
183 /* Grow the bb_info array. */
186 df_grow_bb_info (struct dataflow
*dflow
)
188 unsigned int new_size
= last_basic_block
+ 1;
189 if (dflow
->block_info_size
< new_size
)
191 new_size
+= new_size
/ 4;
192 dflow
->block_info
= xrealloc (dflow
->block_info
,
193 new_size
*sizeof (void*));
194 memset (dflow
->block_info
+ dflow
->block_info_size
, 0,
195 (new_size
- dflow
->block_info_size
) *sizeof (void *));
196 dflow
->block_info_size
= new_size
;
200 /* Dump a def-use or use-def chain for REF to FILE. */
203 df_chain_dump (struct df_link
*link
, FILE *file
)
205 fprintf (file
, "{ ");
206 for (; link
; link
= link
->next
)
208 fprintf (file
, "%c%d(bb %d insn %d) ",
209 DF_REF_REG_DEF_P (link
->ref
) ? 'd' : 'u',
210 DF_REF_ID (link
->ref
),
211 DF_REF_BBNO (link
->ref
),
212 DF_REF_INSN (link
->ref
) ? DF_REF_INSN_UID (link
->ref
) : -1);
218 /* Print some basic block info as part of df_dump. */
221 df_print_bb_index (basic_block bb
, FILE *file
)
226 fprintf (file
, "( ");
227 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
229 basic_block pred
= e
->src
;
230 fprintf (file
, "%d ", pred
->index
);
232 fprintf (file
, ")->[%d]->( ", bb
->index
);
233 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
235 basic_block succ
= e
->dest
;
236 fprintf (file
, "%d ", succ
->index
);
238 fprintf (file
, ")\n");
242 /* Return a bitmap for REGNO from the cache MAPS. The bitmap is to
243 contain COUNT bits starting at START. These bitmaps are not to be
244 changed since there is a cache of them. */
247 df_ref_bitmap (bitmap
*maps
, unsigned int regno
, int start
, int count
)
249 bitmap ids
= maps
[regno
];
253 unsigned int end
= start
+ count
;;
254 ids
= BITMAP_ALLOC (NULL
);
256 for (i
= start
; i
< end
; i
++)
257 bitmap_set_bit (ids
, i
);
263 /* Make sure that the seen_in_insn and seen_in_block sbitmaps are set
269 seen_in_block
= BITMAP_ALLOC (NULL
);
270 seen_in_insn
= BITMAP_ALLOC (NULL
);
277 BITMAP_FREE (seen_in_block
);
278 BITMAP_FREE (seen_in_insn
);
283 /*----------------------------------------------------------------------------
286 Find the locations in the function where each use site for a pseudo
287 can reach backwards. In and out bitvectors are built for each basic
288 block. The id field in the ref is used to index into these sets.
289 See df.h for details.
291 ----------------------------------------------------------------------------*/
293 /* This problem plays a large number of games for the sake of
296 1) The order of the bits in the bitvectors. After the scanning
297 phase, all of the uses are sorted. All of the uses for the reg 0
298 are first, followed by all uses for reg 1 and so on.
300 2) There are two kill sets, one if the number of uses is less or
301 equal to DF_SPARSE_THRESHOLD and another if it is greater.
303 <= : There is a bitmap for each register, uses_sites[N], that is
304 built on demand. This bitvector contains a 1 for each use or reg
307 > : One level of indirection is used to keep from generating long
308 strings of 1 bits in the kill sets. Bitvectors that are indexed
309 by the regnum are used to represent that there is a killing def
310 for the register. The confluence and transfer functions use
311 these along with the bitmap_clear_range call to remove ranges of
312 bits without actually generating a knockout vector.
314 The kill and sparse_kill and the dense_invalidated_by_call and
315 sparse_invalidated_by call both play this game. */
317 /* Private data used to compute the solution for this problem. These
318 data structures are not accessible outside of this module. */
319 struct df_ru_problem_data
322 bitmap
*use_sites
; /* Bitmap of uses for each pseudo. */
323 unsigned int use_sites_size
; /* Size of use_sites. */
324 /* The set of defs to regs invalidated by call. */
325 bitmap sparse_invalidated_by_call
;
326 /* The set of defs to regs invalidated by call for ru. */
327 bitmap dense_invalidated_by_call
;
330 /* Get basic block info. */
332 struct df_ru_bb_info
*
333 df_ru_get_bb_info (struct dataflow
*dflow
, unsigned int index
)
335 return (struct df_ru_bb_info
*) dflow
->block_info
[index
];
339 /* Set basic block info. */
342 df_ru_set_bb_info (struct dataflow
*dflow
, unsigned int index
,
343 struct df_ru_bb_info
*bb_info
)
345 dflow
->block_info
[index
] = bb_info
;
349 /* Free basic block info. */
352 df_ru_free_bb_info (struct dataflow
*dflow
,
353 basic_block bb ATTRIBUTE_UNUSED
,
356 struct df_ru_bb_info
*bb_info
= (struct df_ru_bb_info
*) vbb_info
;
359 BITMAP_FREE (bb_info
->kill
);
360 BITMAP_FREE (bb_info
->sparse_kill
);
361 BITMAP_FREE (bb_info
->gen
);
362 BITMAP_FREE (bb_info
->in
);
363 BITMAP_FREE (bb_info
->out
);
364 pool_free (dflow
->block_pool
, bb_info
);
369 /* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
370 not touched unless the block is new. */
373 df_ru_alloc (struct dataflow
*dflow
,
374 bitmap blocks_to_rescan ATTRIBUTE_UNUSED
,
377 unsigned int bb_index
;
379 unsigned int reg_size
= max_reg_num ();
381 if (!dflow
->block_pool
)
382 dflow
->block_pool
= create_alloc_pool ("df_ru_block pool",
383 sizeof (struct df_ru_bb_info
), 50);
385 if (dflow
->problem_data
)
388 struct df_ru_problem_data
*problem_data
389 = (struct df_ru_problem_data
*) dflow
->problem_data
;
391 for (i
= 0; i
< problem_data
->use_sites_size
; i
++)
393 bitmap bm
= problem_data
->use_sites
[i
];
397 problem_data
->use_sites
[i
] = NULL
;
401 if (problem_data
->use_sites_size
< reg_size
)
403 problem_data
->use_sites
404 = xrealloc (problem_data
->use_sites
, reg_size
* sizeof (bitmap
));
405 memset (problem_data
->use_sites
+ problem_data
->use_sites_size
, 0,
406 (reg_size
- problem_data
->use_sites_size
) * sizeof (bitmap
));
407 problem_data
->use_sites_size
= reg_size
;
410 bitmap_clear (problem_data
->sparse_invalidated_by_call
);
411 bitmap_clear (problem_data
->dense_invalidated_by_call
);
415 struct df_ru_problem_data
*problem_data
= XNEW (struct df_ru_problem_data
);
416 dflow
->problem_data
= problem_data
;
418 problem_data
->use_sites
= XCNEWVEC (bitmap
, reg_size
);
419 problem_data
->use_sites_size
= reg_size
;
420 problem_data
->sparse_invalidated_by_call
= BITMAP_ALLOC (NULL
);
421 problem_data
->dense_invalidated_by_call
= BITMAP_ALLOC (NULL
);
424 df_grow_bb_info (dflow
);
426 /* Because of the clustering of all def sites for the same pseudo,
427 we have to process all of the blocks before doing the
430 EXECUTE_IF_SET_IN_BITMAP (all_blocks
, 0, bb_index
, bi
)
432 struct df_ru_bb_info
*bb_info
= df_ru_get_bb_info (dflow
, bb_index
);
435 bitmap_clear (bb_info
->kill
);
436 bitmap_clear (bb_info
->sparse_kill
);
437 bitmap_clear (bb_info
->gen
);
441 bb_info
= (struct df_ru_bb_info
*) pool_alloc (dflow
->block_pool
);
442 df_ru_set_bb_info (dflow
, bb_index
, bb_info
);
443 bb_info
->kill
= BITMAP_ALLOC (NULL
);
444 bb_info
->sparse_kill
= BITMAP_ALLOC (NULL
);
445 bb_info
->gen
= BITMAP_ALLOC (NULL
);
446 bb_info
->in
= BITMAP_ALLOC (NULL
);
447 bb_info
->out
= BITMAP_ALLOC (NULL
);
453 /* Process a list of DEFs for df_ru_bb_local_compute. */
456 df_ru_bb_local_compute_process_def (struct dataflow
*dflow
,
457 struct df_ru_bb_info
*bb_info
,
459 enum df_ref_flags top_flag
)
461 struct df
*df
= dflow
->df
;
464 if ((top_flag
== (DF_REF_FLAGS (def
) & DF_REF_AT_TOP
))
465 /* If the def is to only part of the reg, it is as if it did
466 not happen, since some of the bits may get thru. */
467 && (!(DF_REF_FLAGS (def
) & DF_REF_PARTIAL
)))
469 unsigned int regno
= DF_REF_REGNO (def
);
470 unsigned int begin
= DF_REG_USE_GET (df
, regno
)->begin
;
471 unsigned int n_uses
= DF_REG_USE_GET (df
, regno
)->n_refs
;
472 if (!bitmap_bit_p (seen_in_block
, regno
))
474 /* The first def for regno in the insn, causes the kill
475 info to be generated. Do not modify the gen set
476 because the only values in it are the uses from here
477 to the top of the block and this def does not effect
479 if (!bitmap_bit_p (seen_in_insn
, regno
))
481 if (n_uses
> DF_SPARSE_THRESHOLD
)
482 bitmap_set_bit (bb_info
->sparse_kill
, regno
);
485 struct df_ru_problem_data
* problem_data
486 = (struct df_ru_problem_data
*)dflow
->problem_data
;
488 = df_ref_bitmap (problem_data
->use_sites
, regno
,
490 bitmap_ior_into (bb_info
->kill
, uses
);
493 bitmap_set_bit (seen_in_insn
, regno
);
501 /* Process a list of USEs for df_ru_bb_local_compute. */
504 df_ru_bb_local_compute_process_use (struct df_ru_bb_info
*bb_info
,
506 enum df_ref_flags top_flag
)
510 if (top_flag
== (DF_REF_FLAGS (use
) & DF_REF_AT_TOP
))
512 /* Add use to set of gens in this BB unless we have seen a
513 def in a previous instruction. */
514 unsigned int regno
= DF_REF_REGNO (use
);
515 if (!bitmap_bit_p (seen_in_block
, regno
))
516 bitmap_set_bit (bb_info
->gen
, DF_REF_ID (use
));
522 /* Compute local reaching use (upward exposed use) info for basic
523 block BB. USE_INFO->REGS[R] caches the set of uses for register R. */
525 df_ru_bb_local_compute (struct dataflow
*dflow
, unsigned int bb_index
)
527 struct df
*df
= dflow
->df
;
528 basic_block bb
= BASIC_BLOCK (bb_index
);
529 struct df_ru_bb_info
*bb_info
= df_ru_get_bb_info (dflow
, bb_index
);
532 /* Set when a def for regno is seen. */
533 bitmap_clear (seen_in_block
);
534 bitmap_clear (seen_in_insn
);
537 /* Variables defined in the prolog that are used by the exception
539 df_ru_bb_local_compute_process_use (bb_info
,
540 df_get_artificial_uses (df
, bb_index
),
543 df_ru_bb_local_compute_process_def (dflow
, bb_info
,
544 df_get_artificial_defs (df
, bb_index
),
547 FOR_BB_INSNS (bb
, insn
)
549 unsigned int uid
= INSN_UID (insn
);
553 df_ru_bb_local_compute_process_use (bb_info
,
554 DF_INSN_UID_USES (df
, uid
), 0);
556 df_ru_bb_local_compute_process_def (dflow
, bb_info
,
557 DF_INSN_UID_DEFS (df
, uid
), 0);
559 bitmap_ior_into (seen_in_block
, seen_in_insn
);
560 bitmap_clear (seen_in_insn
);
563 /* Process the hardware registers that are always live. */
564 df_ru_bb_local_compute_process_use (bb_info
,
565 df_get_artificial_uses (df
, bb_index
), 0);
567 df_ru_bb_local_compute_process_def (dflow
, bb_info
,
568 df_get_artificial_defs (df
, bb_index
), 0);
572 /* Compute local reaching use (upward exposed use) info for each basic
573 block within BLOCKS. */
575 df_ru_local_compute (struct dataflow
*dflow
,
577 bitmap rescan_blocks ATTRIBUTE_UNUSED
)
579 struct df
*df
= dflow
->df
;
580 unsigned int bb_index
;
583 struct df_ru_problem_data
*problem_data
584 = (struct df_ru_problem_data
*) dflow
->problem_data
;
585 bitmap sparse_invalidated
= problem_data
->sparse_invalidated_by_call
;
586 bitmap dense_invalidated
= problem_data
->dense_invalidated_by_call
;
589 df_reorganize_refs (&df
->use_info
);
591 EXECUTE_IF_SET_IN_BITMAP (all_blocks
, 0, bb_index
, bi
)
593 df_ru_bb_local_compute (dflow
, bb_index
);
596 /* Set up the knockout bit vectors to be applied across EH_EDGES. */
597 EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call
, 0, regno
, bi
)
599 struct df_reg_info
*reg_info
= DF_REG_USE_GET (df
, regno
);
600 if (reg_info
->n_refs
> DF_SPARSE_THRESHOLD
)
601 bitmap_set_bit (sparse_invalidated
, regno
);
604 bitmap defs
= df_ref_bitmap (problem_data
->use_sites
, regno
,
605 reg_info
->begin
, reg_info
->n_refs
);
606 bitmap_ior_into (dense_invalidated
, defs
);
614 /* Initialize the solution bit vectors for problem. */
617 df_ru_init_solution (struct dataflow
*dflow
, bitmap all_blocks
)
619 unsigned int bb_index
;
622 EXECUTE_IF_SET_IN_BITMAP (all_blocks
, 0, bb_index
, bi
)
624 struct df_ru_bb_info
*bb_info
= df_ru_get_bb_info (dflow
, bb_index
);
625 bitmap_copy (bb_info
->in
, bb_info
->gen
);
626 bitmap_clear (bb_info
->out
);
631 /* Out of target gets or of in of source. */
634 df_ru_confluence_n (struct dataflow
*dflow
, edge e
)
636 bitmap op1
= df_ru_get_bb_info (dflow
, e
->src
->index
)->out
;
637 bitmap op2
= df_ru_get_bb_info (dflow
, e
->dest
->index
)->in
;
639 if (e
->flags
& EDGE_EH
)
641 struct df_ru_problem_data
*problem_data
642 = (struct df_ru_problem_data
*) dflow
->problem_data
;
643 bitmap sparse_invalidated
= problem_data
->sparse_invalidated_by_call
;
644 bitmap dense_invalidated
= problem_data
->dense_invalidated_by_call
;
645 struct df
*df
= dflow
->df
;
648 bitmap tmp
= BITMAP_ALLOC (NULL
);
650 bitmap_copy (tmp
, op2
);
651 bitmap_and_compl_into (tmp
, dense_invalidated
);
653 EXECUTE_IF_SET_IN_BITMAP (sparse_invalidated
, 0, regno
, bi
)
655 bitmap_clear_range (tmp
,
656 DF_REG_USE_GET (df
, regno
)->begin
,
657 DF_REG_USE_GET (df
, regno
)->n_refs
);
659 bitmap_ior_into (op1
, tmp
);
663 bitmap_ior_into (op1
, op2
);
667 /* Transfer function. */
670 df_ru_transfer_function (struct dataflow
*dflow
, int bb_index
)
672 struct df_ru_bb_info
*bb_info
= df_ru_get_bb_info (dflow
, bb_index
);
675 bitmap in
= bb_info
->in
;
676 bitmap out
= bb_info
->out
;
677 bitmap gen
= bb_info
->gen
;
678 bitmap kill
= bb_info
->kill
;
679 bitmap sparse_kill
= bb_info
->sparse_kill
;
681 if (bitmap_empty_p (sparse_kill
))
682 return bitmap_ior_and_compl (in
, gen
, out
, kill
);
685 struct df
*df
= dflow
->df
;
686 bool changed
= false;
687 bitmap tmp
= BITMAP_ALLOC (NULL
);
688 bitmap_copy (tmp
, out
);
689 EXECUTE_IF_SET_IN_BITMAP (sparse_kill
, 0, regno
, bi
)
691 bitmap_clear_range (tmp
,
692 DF_REG_USE_GET (df
, regno
)->begin
,
693 DF_REG_USE_GET (df
, regno
)->n_refs
);
695 bitmap_and_compl_into (tmp
, kill
);
696 bitmap_ior_into (tmp
, gen
);
697 changed
= !bitmap_equal_p (tmp
, in
);
710 /* Free all storage associated with the problem. */
713 df_ru_free (struct dataflow
*dflow
)
716 struct df_ru_problem_data
*problem_data
717 = (struct df_ru_problem_data
*) dflow
->problem_data
;
721 for (i
= 0; i
< dflow
->block_info_size
; i
++)
723 struct df_ru_bb_info
*bb_info
= df_ru_get_bb_info (dflow
, i
);
726 BITMAP_FREE (bb_info
->kill
);
727 BITMAP_FREE (bb_info
->sparse_kill
);
728 BITMAP_FREE (bb_info
->gen
);
729 BITMAP_FREE (bb_info
->in
);
730 BITMAP_FREE (bb_info
->out
);
734 free_alloc_pool (dflow
->block_pool
);
736 for (i
= 0; i
< problem_data
->use_sites_size
; i
++)
738 bitmap bm
= problem_data
->use_sites
[i
];
743 free (problem_data
->use_sites
);
744 BITMAP_FREE (problem_data
->sparse_invalidated_by_call
);
745 BITMAP_FREE (problem_data
->dense_invalidated_by_call
);
747 dflow
->block_info_size
= 0;
748 free (dflow
->block_info
);
749 free (dflow
->problem_data
);
755 /* Debugging info. */
758 df_ru_dump (struct dataflow
*dflow
, FILE *file
)
761 struct df
*df
= dflow
->df
;
762 struct df_ru_problem_data
*problem_data
763 = (struct df_ru_problem_data
*) dflow
->problem_data
;
764 unsigned int m
= max_reg_num ();
767 if (!dflow
->block_info
)
770 fprintf (file
, "Reaching uses:\n");
772 fprintf (file
, " sparse invalidated \t");
773 dump_bitmap (file
, problem_data
->sparse_invalidated_by_call
);
774 fprintf (file
, " dense invalidated \t");
775 dump_bitmap (file
, problem_data
->dense_invalidated_by_call
);
777 for (regno
= 0; regno
< m
; regno
++)
778 if (DF_REG_USE_GET (df
, regno
)->n_refs
)
779 fprintf (file
, "%d[%d,%d] ", regno
,
780 DF_REG_USE_GET (df
, regno
)->begin
,
781 DF_REG_USE_GET (df
, regno
)->n_refs
);
782 fprintf (file
, "\n");
786 struct df_ru_bb_info
*bb_info
= df_ru_get_bb_info (dflow
, bb
->index
);
787 df_print_bb_index (bb
, file
);
792 fprintf (file
, " in \t(%d)\n", (int) bitmap_count_bits (bb_info
->in
));
793 dump_bitmap (file
, bb_info
->in
);
794 fprintf (file
, " gen \t(%d)\n", (int) bitmap_count_bits (bb_info
->gen
));
795 dump_bitmap (file
, bb_info
->gen
);
796 fprintf (file
, " kill\t(%d)\n", (int) bitmap_count_bits (bb_info
->kill
));
797 dump_bitmap (file
, bb_info
->kill
);
798 fprintf (file
, " out \t(%d)\n", (int) bitmap_count_bits (bb_info
->out
));
799 dump_bitmap (file
, bb_info
->out
);
803 /* All of the information associated with every instance of the problem. */
805 static struct df_problem problem_RU
=
807 DF_RU
, /* Problem id. */
808 DF_BACKWARD
, /* Direction. */
809 df_ru_alloc
, /* Allocate the problem specific data. */
810 NULL
, /* Reset global information. */
811 df_ru_free_bb_info
, /* Free basic block info. */
812 df_ru_local_compute
, /* Local compute function. */
813 df_ru_init_solution
, /* Init the solution specific data. */
814 df_iterative_dataflow
, /* Iterative solver. */
815 NULL
, /* Confluence operator 0. */
816 df_ru_confluence_n
, /* Confluence operator n. */
817 df_ru_transfer_function
, /* Transfer function. */
818 NULL
, /* Finalize function. */
819 df_ru_free
, /* Free all of the problem information. */
820 df_ru_dump
, /* Debugging. */
821 NULL
, /* Dependent problem. */
822 0 /* Changeable flags. */
827 /* Create a new DATAFLOW instance and add it to an existing instance
828 of DF. The returned structure is what is used to get at the
832 df_ru_add_problem (struct df
*df
, int flags
)
834 return df_add_problem (df
, &problem_RU
, flags
);
838 /*----------------------------------------------------------------------------
841 Find the locations in the function where each definition site for a
842 pseudo reaches. In and out bitvectors are built for each basic
843 block. The id field in the ref is used to index into these sets.
844 See df.h for details.
845 ----------------------------------------------------------------------------*/
847 /* See the comment at the top of the Reaching Uses problem for how the
848 uses are represented in the kill sets. The same games are played
849 here for the defs. */
851 /* Private data used to compute the solution for this problem. These
852 data structures are not accessible outside of this module. */
853 struct df_rd_problem_data
855 /* If the number of defs for regnum N is less than
856 DF_SPARSE_THRESHOLD, uses_sites[N] contains a mask of the all of
857 the defs of reg N indexed by the id in the ref structure. If
858 there are more than DF_SPARSE_THRESHOLD defs for regnum N a
859 different mechanism is used to mask the def. */
860 bitmap
*def_sites
; /* Bitmap of defs for each pseudo. */
861 unsigned int def_sites_size
; /* Size of def_sites. */
862 /* The set of defs to regs invalidated by call. */
863 bitmap sparse_invalidated_by_call
;
864 /* The set of defs to regs invalidate by call for rd. */
865 bitmap dense_invalidated_by_call
;
868 /* Get basic block info. */
870 struct df_rd_bb_info
*
871 df_rd_get_bb_info (struct dataflow
*dflow
, unsigned int index
)
873 return (struct df_rd_bb_info
*) dflow
->block_info
[index
];
877 /* Set basic block info. */
880 df_rd_set_bb_info (struct dataflow
*dflow
, unsigned int index
,
881 struct df_rd_bb_info
*bb_info
)
883 dflow
->block_info
[index
] = bb_info
;
887 /* Free basic block info. */
890 df_rd_free_bb_info (struct dataflow
*dflow
,
891 basic_block bb ATTRIBUTE_UNUSED
,
894 struct df_rd_bb_info
*bb_info
= (struct df_rd_bb_info
*) vbb_info
;
897 BITMAP_FREE (bb_info
->kill
);
898 BITMAP_FREE (bb_info
->sparse_kill
);
899 BITMAP_FREE (bb_info
->gen
);
900 BITMAP_FREE (bb_info
->in
);
901 BITMAP_FREE (bb_info
->out
);
902 pool_free (dflow
->block_pool
, bb_info
);
907 /* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
908 not touched unless the block is new. */
911 df_rd_alloc (struct dataflow
*dflow
,
912 bitmap blocks_to_rescan ATTRIBUTE_UNUSED
,
915 unsigned int bb_index
;
917 unsigned int reg_size
= max_reg_num ();
919 if (!dflow
->block_pool
)
920 dflow
->block_pool
= create_alloc_pool ("df_rd_block pool",
921 sizeof (struct df_rd_bb_info
), 50);
923 if (dflow
->problem_data
)
926 struct df_rd_problem_data
*problem_data
927 = (struct df_rd_problem_data
*) dflow
->problem_data
;
929 for (i
= 0; i
< problem_data
->def_sites_size
; i
++)
931 bitmap bm
= problem_data
->def_sites
[i
];
935 problem_data
->def_sites
[i
] = NULL
;
939 if (problem_data
->def_sites_size
< reg_size
)
941 problem_data
->def_sites
942 = xrealloc (problem_data
->def_sites
, reg_size
*sizeof (bitmap
));
943 memset (problem_data
->def_sites
+ problem_data
->def_sites_size
, 0,
944 (reg_size
- problem_data
->def_sites_size
) *sizeof (bitmap
));
945 problem_data
->def_sites_size
= reg_size
;
948 bitmap_clear (problem_data
->sparse_invalidated_by_call
);
949 bitmap_clear (problem_data
->dense_invalidated_by_call
);
953 struct df_rd_problem_data
*problem_data
= XNEW (struct df_rd_problem_data
);
954 dflow
->problem_data
= problem_data
;
956 problem_data
->def_sites
= XCNEWVEC (bitmap
, reg_size
);
957 problem_data
->def_sites_size
= reg_size
;
958 problem_data
->sparse_invalidated_by_call
= BITMAP_ALLOC (NULL
);
959 problem_data
->dense_invalidated_by_call
= BITMAP_ALLOC (NULL
);
962 df_grow_bb_info (dflow
);
964 /* Because of the clustering of all use sites for the same pseudo,
965 we have to process all of the blocks before doing the
968 EXECUTE_IF_SET_IN_BITMAP (all_blocks
, 0, bb_index
, bi
)
970 struct df_rd_bb_info
*bb_info
= df_rd_get_bb_info (dflow
, bb_index
);
973 bitmap_clear (bb_info
->kill
);
974 bitmap_clear (bb_info
->sparse_kill
);
975 bitmap_clear (bb_info
->gen
);
979 bb_info
= (struct df_rd_bb_info
*) pool_alloc (dflow
->block_pool
);
980 df_rd_set_bb_info (dflow
, bb_index
, bb_info
);
981 bb_info
->kill
= BITMAP_ALLOC (NULL
);
982 bb_info
->sparse_kill
= BITMAP_ALLOC (NULL
);
983 bb_info
->gen
= BITMAP_ALLOC (NULL
);
984 bb_info
->in
= BITMAP_ALLOC (NULL
);
985 bb_info
->out
= BITMAP_ALLOC (NULL
);
991 /* Process a list of DEFs for df_rd_bb_local_compute. */
994 df_rd_bb_local_compute_process_def (struct dataflow
*dflow
,
995 struct df_rd_bb_info
*bb_info
,
997 enum df_ref_flags top_flag
)
999 struct df
*df
= dflow
->df
;
1002 if (top_flag
== (DF_REF_FLAGS (def
) & DF_REF_AT_TOP
))
1004 unsigned int regno
= DF_REF_REGNO (def
);
1005 unsigned int begin
= DF_REG_DEF_GET (df
, regno
)->begin
;
1006 unsigned int n_defs
= DF_REG_DEF_GET (df
, regno
)->n_refs
;
1008 /* Only the last def(s) for a regno in the block has any
1010 if (!bitmap_bit_p (seen_in_block
, regno
))
1012 /* The first def for regno in insn gets to knock out the
1013 defs from other instructions. */
1014 if ((!bitmap_bit_p (seen_in_insn
, regno
))
1015 /* If the def is to only part of the reg, it does
1016 not kill the other defs that reach here. */
1017 && (!((DF_REF_FLAGS (def
) & DF_REF_PARTIAL
)
1018 || (DF_REF_FLAGS (def
) & DF_REF_MAY_CLOBBER
))))
1020 if (n_defs
> DF_SPARSE_THRESHOLD
)
1022 bitmap_set_bit (bb_info
->sparse_kill
, regno
);
1023 bitmap_clear_range(bb_info
->gen
, begin
, n_defs
);
1027 struct df_rd_problem_data
* problem_data
1028 = (struct df_rd_problem_data
*)dflow
->problem_data
;
1029 bitmap defs
= df_ref_bitmap (problem_data
->def_sites
,
1030 regno
, begin
, n_defs
);
1031 bitmap_ior_into (bb_info
->kill
, defs
);
1032 bitmap_and_compl_into (bb_info
->gen
, defs
);
1036 bitmap_set_bit (seen_in_insn
, regno
);
1037 /* All defs for regno in the instruction may be put into
1039 if (!(DF_REF_FLAGS (def
)
1040 & (DF_REF_MUST_CLOBBER
| DF_REF_MAY_CLOBBER
)))
1041 bitmap_set_bit (bb_info
->gen
, DF_REF_ID (def
));
1044 def
= def
->next_ref
;
1048 /* Compute local reaching def info for basic block BB. */
1051 df_rd_bb_local_compute (struct dataflow
*dflow
, unsigned int bb_index
)
1053 struct df
*df
= dflow
->df
;
1054 basic_block bb
= BASIC_BLOCK (bb_index
);
1055 struct df_rd_bb_info
*bb_info
= df_rd_get_bb_info (dflow
, bb_index
);
1058 bitmap_clear (seen_in_block
);
1059 bitmap_clear (seen_in_insn
);
1061 df_rd_bb_local_compute_process_def (dflow
, bb_info
,
1062 df_get_artificial_defs (df
, bb_index
), 0);
1064 FOR_BB_INSNS_REVERSE (bb
, insn
)
1066 unsigned int uid
= INSN_UID (insn
);
1071 df_rd_bb_local_compute_process_def (dflow
, bb_info
,
1072 DF_INSN_UID_DEFS (df
, uid
), 0);
1074 /* This complex dance with the two bitmaps is required because
1075 instructions can assign twice to the same pseudo. This
1076 generally happens with calls that will have one def for the
1077 result and another def for the clobber. If only one vector
1078 is used and the clobber goes first, the result will be
1080 bitmap_ior_into (seen_in_block
, seen_in_insn
);
1081 bitmap_clear (seen_in_insn
);
1084 /* Process the artificial defs at the top of the block last since we
1085 are going backwards through the block and these are logically at
1087 df_rd_bb_local_compute_process_def (dflow
, bb_info
,
1088 df_get_artificial_defs (df
, bb_index
),
1093 /* Compute local reaching def info for each basic block within BLOCKS. */
1096 df_rd_local_compute (struct dataflow
*dflow
,
1098 bitmap rescan_blocks ATTRIBUTE_UNUSED
)
1100 struct df
*df
= dflow
->df
;
1101 unsigned int bb_index
;
1104 struct df_rd_problem_data
*problem_data
1105 = (struct df_rd_problem_data
*) dflow
->problem_data
;
1106 bitmap sparse_invalidated
= problem_data
->sparse_invalidated_by_call
;
1107 bitmap dense_invalidated
= problem_data
->dense_invalidated_by_call
;
1110 df_reorganize_refs (&df
->def_info
);
1112 EXECUTE_IF_SET_IN_BITMAP (all_blocks
, 0, bb_index
, bi
)
1114 df_rd_bb_local_compute (dflow
, bb_index
);
1117 /* Set up the knockout bit vectors to be applied across EH_EDGES. */
1118 EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call
, 0, regno
, bi
)
1120 struct df_reg_info
*reg_info
= DF_REG_DEF_GET (df
, regno
);
1121 if (reg_info
->n_refs
> DF_SPARSE_THRESHOLD
)
1123 bitmap_set_bit (sparse_invalidated
, regno
);
1127 bitmap defs
= df_ref_bitmap (problem_data
->def_sites
, regno
,
1128 reg_info
->begin
, reg_info
->n_refs
);
1129 bitmap_ior_into (dense_invalidated
, defs
);
1136 /* Initialize the solution bit vectors for problem. */
1139 df_rd_init_solution (struct dataflow
*dflow
, bitmap all_blocks
)
1141 unsigned int bb_index
;
1144 EXECUTE_IF_SET_IN_BITMAP (all_blocks
, 0, bb_index
, bi
)
1146 struct df_rd_bb_info
*bb_info
= df_rd_get_bb_info (dflow
, bb_index
);
1148 bitmap_copy (bb_info
->out
, bb_info
->gen
);
1149 bitmap_clear (bb_info
->in
);
1153 /* In of target gets or of out of source. */
1156 df_rd_confluence_n (struct dataflow
*dflow
, edge e
)
1158 bitmap op1
= df_rd_get_bb_info (dflow
, e
->dest
->index
)->in
;
1159 bitmap op2
= df_rd_get_bb_info (dflow
, e
->src
->index
)->out
;
1161 if (e
->flags
& EDGE_EH
)
1163 struct df_rd_problem_data
*problem_data
1164 = (struct df_rd_problem_data
*) dflow
->problem_data
;
1165 bitmap sparse_invalidated
= problem_data
->sparse_invalidated_by_call
;
1166 bitmap dense_invalidated
= problem_data
->dense_invalidated_by_call
;
1167 struct df
*df
= dflow
->df
;
1170 bitmap tmp
= BITMAP_ALLOC (NULL
);
1172 bitmap_copy (tmp
, op2
);
1173 bitmap_and_compl_into (tmp
, dense_invalidated
);
1175 EXECUTE_IF_SET_IN_BITMAP (sparse_invalidated
, 0, regno
, bi
)
1177 bitmap_clear_range (tmp
,
1178 DF_REG_DEF_GET (df
, regno
)->begin
,
1179 DF_REG_DEF_GET (df
, regno
)->n_refs
);
1181 bitmap_ior_into (op1
, tmp
);
1185 bitmap_ior_into (op1
, op2
);
1189 /* Transfer function. */
1192 df_rd_transfer_function (struct dataflow
*dflow
, int bb_index
)
1194 struct df_rd_bb_info
*bb_info
= df_rd_get_bb_info (dflow
, bb_index
);
1197 bitmap in
= bb_info
->in
;
1198 bitmap out
= bb_info
->out
;
1199 bitmap gen
= bb_info
->gen
;
1200 bitmap kill
= bb_info
->kill
;
1201 bitmap sparse_kill
= bb_info
->sparse_kill
;
1203 if (bitmap_empty_p (sparse_kill
))
1204 return bitmap_ior_and_compl (out
, gen
, in
, kill
);
1207 struct df
*df
= dflow
->df
;
1208 bool changed
= false;
1209 bitmap tmp
= BITMAP_ALLOC (NULL
);
1210 bitmap_copy (tmp
, in
);
1211 EXECUTE_IF_SET_IN_BITMAP (sparse_kill
, 0, regno
, bi
)
1213 bitmap_clear_range (tmp
,
1214 DF_REG_DEF_GET (df
, regno
)->begin
,
1215 DF_REG_DEF_GET (df
, regno
)->n_refs
);
1217 bitmap_and_compl_into (tmp
, kill
);
1218 bitmap_ior_into (tmp
, gen
);
1219 changed
= !bitmap_equal_p (tmp
, out
);
1232 /* Free all storage associated with the problem. */
1235 df_rd_free (struct dataflow
*dflow
)
1238 struct df_rd_problem_data
*problem_data
1239 = (struct df_rd_problem_data
*) dflow
->problem_data
;
1243 for (i
= 0; i
< dflow
->block_info_size
; i
++)
1245 struct df_rd_bb_info
*bb_info
= df_rd_get_bb_info (dflow
, i
);
1248 BITMAP_FREE (bb_info
->kill
);
1249 BITMAP_FREE (bb_info
->sparse_kill
);
1250 BITMAP_FREE (bb_info
->gen
);
1251 BITMAP_FREE (bb_info
->in
);
1252 BITMAP_FREE (bb_info
->out
);
1256 free_alloc_pool (dflow
->block_pool
);
1258 for (i
= 0; i
< problem_data
->def_sites_size
; i
++)
1260 bitmap bm
= problem_data
->def_sites
[i
];
1265 free (problem_data
->def_sites
);
1266 BITMAP_FREE (problem_data
->sparse_invalidated_by_call
);
1267 BITMAP_FREE (problem_data
->dense_invalidated_by_call
);
1269 dflow
->block_info_size
= 0;
1270 free (dflow
->block_info
);
1271 free (dflow
->problem_data
);
1277 /* Debugging info. */
1280 df_rd_dump (struct dataflow
*dflow
, FILE *file
)
1282 struct df
*df
= dflow
->df
;
1284 struct df_rd_problem_data
*problem_data
1285 = (struct df_rd_problem_data
*) dflow
->problem_data
;
1286 unsigned int m
= max_reg_num ();
1289 if (!dflow
->block_info
)
1292 fprintf (file
, "Reaching defs:\n\n");
1294 fprintf (file
, " sparse invalidated \t");
1295 dump_bitmap (file
, problem_data
->sparse_invalidated_by_call
);
1296 fprintf (file
, " dense invalidated \t");
1297 dump_bitmap (file
, problem_data
->dense_invalidated_by_call
);
1299 for (regno
= 0; regno
< m
; regno
++)
1300 if (DF_REG_DEF_GET (df
, regno
)->n_refs
)
1301 fprintf (file
, "%d[%d,%d] ", regno
,
1302 DF_REG_DEF_GET (df
, regno
)->begin
,
1303 DF_REG_DEF_GET (df
, regno
)->n_refs
);
1304 fprintf (file
, "\n");
1308 struct df_rd_bb_info
*bb_info
= df_rd_get_bb_info (dflow
, bb
->index
);
1309 df_print_bb_index (bb
, file
);
1314 fprintf (file
, " in \t(%d)\n", (int) bitmap_count_bits (bb_info
->in
));
1315 dump_bitmap (file
, bb_info
->in
);
1316 fprintf (file
, " gen \t(%d)\n", (int) bitmap_count_bits (bb_info
->gen
));
1317 dump_bitmap (file
, bb_info
->gen
);
1318 fprintf (file
, " kill\t(%d)\n", (int) bitmap_count_bits (bb_info
->kill
));
1319 dump_bitmap (file
, bb_info
->kill
);
1320 fprintf (file
, " out \t(%d)\n", (int) bitmap_count_bits (bb_info
->out
));
1321 dump_bitmap (file
, bb_info
->out
);
1325 /* All of the information associated with every instance of the problem. */
1327 static struct df_problem problem_RD
=
1329 DF_RD
, /* Problem id. */
1330 DF_FORWARD
, /* Direction. */
1331 df_rd_alloc
, /* Allocate the problem specific data. */
1332 NULL
, /* Reset global information. */
1333 df_rd_free_bb_info
, /* Free basic block info. */
1334 df_rd_local_compute
, /* Local compute function. */
1335 df_rd_init_solution
, /* Init the solution specific data. */
1336 df_iterative_dataflow
, /* Iterative solver. */
1337 NULL
, /* Confluence operator 0. */
1338 df_rd_confluence_n
, /* Confluence operator n. */
1339 df_rd_transfer_function
, /* Transfer function. */
1340 NULL
, /* Finalize function. */
1341 df_rd_free
, /* Free all of the problem information. */
1342 df_rd_dump
, /* Debugging. */
1343 NULL
, /* Dependent problem. */
1344 0 /* Changeable flags. */
1349 /* Create a new DATAFLOW instance and add it to an existing instance
1350 of DF. The returned structure is what is used to get at the
1354 df_rd_add_problem (struct df
*df
, int flags
)
1356 return df_add_problem (df
, &problem_RD
, flags
);
1361 /*----------------------------------------------------------------------------
1364 Find the locations in the function where any use of a pseudo can
1365 reach in the backwards direction. In and out bitvectors are built
1366 for each basic block. The regnum is used to index into these sets.
1367 See df.h for details.
1368 ----------------------------------------------------------------------------*/
1370 /* Get basic block info. */
1372 struct df_lr_bb_info
*
1373 df_lr_get_bb_info (struct dataflow
*dflow
, unsigned int index
)
1375 return (struct df_lr_bb_info
*) dflow
->block_info
[index
];
1379 /* Set basic block info. */
1382 df_lr_set_bb_info (struct dataflow
*dflow
, unsigned int index
,
1383 struct df_lr_bb_info
*bb_info
)
1385 dflow
->block_info
[index
] = bb_info
;
1389 /* Free basic block info. */
1392 df_lr_free_bb_info (struct dataflow
*dflow
,
1393 basic_block bb ATTRIBUTE_UNUSED
,
1396 struct df_lr_bb_info
*bb_info
= (struct df_lr_bb_info
*) vbb_info
;
1399 BITMAP_FREE (bb_info
->use
);
1400 BITMAP_FREE (bb_info
->def
);
1401 BITMAP_FREE (bb_info
->in
);
1402 BITMAP_FREE (bb_info
->out
);
1403 pool_free (dflow
->block_pool
, bb_info
);
1408 /* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
1409 not touched unless the block is new. */
1412 df_lr_alloc (struct dataflow
*dflow
, bitmap blocks_to_rescan
,
1413 bitmap all_blocks ATTRIBUTE_UNUSED
)
1415 unsigned int bb_index
;
1418 if (!dflow
->block_pool
)
1419 dflow
->block_pool
= create_alloc_pool ("df_lr_block pool",
1420 sizeof (struct df_lr_bb_info
), 50);
1422 df_grow_bb_info (dflow
);
1424 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan
, 0, bb_index
, bi
)
1426 struct df_lr_bb_info
*bb_info
= df_lr_get_bb_info (dflow
, bb_index
);
1429 bitmap_clear (bb_info
->def
);
1430 bitmap_clear (bb_info
->use
);
1434 bb_info
= (struct df_lr_bb_info
*) pool_alloc (dflow
->block_pool
);
1435 df_lr_set_bb_info (dflow
, bb_index
, bb_info
);
1436 bb_info
->use
= BITMAP_ALLOC (NULL
);
1437 bb_info
->def
= BITMAP_ALLOC (NULL
);
1438 bb_info
->in
= BITMAP_ALLOC (NULL
);
1439 bb_info
->out
= BITMAP_ALLOC (NULL
);
1445 /* Compute local live register info for basic block BB. */
1448 df_lr_bb_local_compute (struct dataflow
*dflow
,
1449 struct df
*df
, unsigned int bb_index
)
1451 basic_block bb
= BASIC_BLOCK (bb_index
);
1452 struct df_lr_bb_info
*bb_info
= df_lr_get_bb_info (dflow
, bb_index
);
1457 /* Process the registers set in an exception handler. */
1458 for (def
= df_get_artificial_defs (df
, bb_index
); def
; def
= def
->next_ref
)
1459 if (((DF_REF_FLAGS (def
) & DF_REF_AT_TOP
) == 0)
1460 && (!(DF_REF_FLAGS (def
) & DF_REF_PARTIAL
)))
1462 unsigned int dregno
= DF_REF_REGNO (def
);
1463 bitmap_set_bit (bb_info
->def
, dregno
);
1464 bitmap_clear_bit (bb_info
->use
, dregno
);
1467 /* Process the hardware registers that are always live. */
1468 for (use
= df_get_artificial_uses (df
, bb_index
); use
; use
= use
->next_ref
)
1469 /* Add use to set of uses in this BB. */
1470 if ((DF_REF_FLAGS (use
) & DF_REF_AT_TOP
) == 0)
1471 bitmap_set_bit (bb_info
->use
, DF_REF_REGNO (use
));
1473 FOR_BB_INSNS_REVERSE (bb
, insn
)
1475 unsigned int uid
= INSN_UID (insn
);
1482 for (def
= DF_INSN_UID_DEFS (df
, uid
); def
; def
= def
->next_ref
)
1484 unsigned int dregno
= DF_REF_REGNO (def
);
1486 if (dregno
>= FIRST_PSEUDO_REGISTER
1487 || !(SIBLING_CALL_P (insn
)
1488 && bitmap_bit_p (df
->exit_block_uses
, dregno
)
1489 && !refers_to_regno_p (dregno
, dregno
+1,
1490 current_function_return_rtx
,
1493 /* If the def is to only part of the reg, it does
1494 not kill the other defs that reach here. */
1495 if (!(DF_REF_FLAGS (def
) & DF_REF_PARTIAL
))
1497 bitmap_set_bit (bb_info
->def
, dregno
);
1498 bitmap_clear_bit (bb_info
->use
, dregno
);
1505 for (def
= DF_INSN_UID_DEFS (df
, uid
); def
; def
= def
->next_ref
)
1507 unsigned int dregno
= DF_REF_REGNO (def
);
1509 if (DF_INSN_CONTAINS_ASM (df
, insn
)
1510 && dregno
< FIRST_PSEUDO_REGISTER
)
1513 unsigned int end
= dregno
1514 + hard_regno_nregs
[dregno
][GET_MODE (DF_REF_REG (def
))] - 1;
1515 for (i
= dregno
; i
<= end
; ++i
)
1516 regs_asm_clobbered
[i
] = 1;
1518 /* If the def is to only part of the reg, it does
1519 not kill the other defs that reach here. */
1520 if (!(DF_REF_FLAGS (def
) & DF_REF_PARTIAL
))
1522 bitmap_set_bit (bb_info
->def
, dregno
);
1523 bitmap_clear_bit (bb_info
->use
, dregno
);
1528 for (use
= DF_INSN_UID_USES (df
, uid
); use
; use
= use
->next_ref
)
1529 /* Add use to set of uses in this BB. */
1530 bitmap_set_bit (bb_info
->use
, DF_REF_REGNO (use
));
1533 /* Process the registers set in an exception handler. */
1534 for (def
= df_get_artificial_defs (df
, bb_index
); def
; def
= def
->next_ref
)
1535 if ((DF_REF_FLAGS (def
) & DF_REF_AT_TOP
)
1536 && (!(DF_REF_FLAGS (def
) & DF_REF_PARTIAL
)))
1538 unsigned int dregno
= DF_REF_REGNO (def
);
1539 bitmap_set_bit (bb_info
->def
, dregno
);
1540 bitmap_clear_bit (bb_info
->use
, dregno
);
1544 /* Process the uses that are live into an exception handler. */
1545 for (use
= df_get_artificial_uses (df
, bb_index
); use
; use
= use
->next_ref
)
1546 /* Add use to set of uses in this BB. */
1547 if (DF_REF_FLAGS (use
) & DF_REF_AT_TOP
)
1548 bitmap_set_bit (bb_info
->use
, DF_REF_REGNO (use
));
1553 /* Compute local live register info for each basic block within BLOCKS. */
1556 df_lr_local_compute (struct dataflow
*dflow
,
1558 bitmap rescan_blocks
)
1560 struct df
*df
= dflow
->df
;
1561 unsigned int bb_index
;
1564 /* Assume that the stack pointer is unchanging if alloca hasn't
1566 if (bitmap_equal_p (all_blocks
, rescan_blocks
))
1567 memset (regs_asm_clobbered
, 0, sizeof (regs_asm_clobbered
));
1569 bitmap_clear (df
->hardware_regs_used
);
1571 /* The all-important stack pointer must always be live. */
1572 bitmap_set_bit (df
->hardware_regs_used
, STACK_POINTER_REGNUM
);
1574 /* Before reload, there are a few registers that must be forced
1575 live everywhere -- which might not already be the case for
1576 blocks within infinite loops. */
1577 if (!reload_completed
)
1579 /* Any reference to any pseudo before reload is a potential
1580 reference of the frame pointer. */
1581 bitmap_set_bit (df
->hardware_regs_used
, FRAME_POINTER_REGNUM
);
1583 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1584 /* Pseudos with argument area equivalences may require
1585 reloading via the argument pointer. */
1586 if (fixed_regs
[ARG_POINTER_REGNUM
])
1587 bitmap_set_bit (df
->hardware_regs_used
, ARG_POINTER_REGNUM
);
1590 /* Any constant, or pseudo with constant equivalences, may
1591 require reloading from memory using the pic register. */
1592 if ((unsigned) PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
1593 && fixed_regs
[PIC_OFFSET_TABLE_REGNUM
])
1594 bitmap_set_bit (df
->hardware_regs_used
, PIC_OFFSET_TABLE_REGNUM
);
1597 if (bitmap_bit_p (rescan_blocks
, EXIT_BLOCK
))
1599 /* The exit block is special for this problem and its bits are
1600 computed from thin air. */
1601 struct df_lr_bb_info
*bb_info
= df_lr_get_bb_info (dflow
, EXIT_BLOCK
);
1602 bitmap_copy (bb_info
->use
, df
->exit_block_uses
);
1605 EXECUTE_IF_SET_IN_BITMAP (rescan_blocks
, 0, bb_index
, bi
)
1607 if (bb_index
== EXIT_BLOCK
)
1609 df_lr_bb_local_compute (dflow
, df
, bb_index
);
1614 /* Initialize the solution vectors. */
1617 df_lr_init (struct dataflow
*dflow
, bitmap all_blocks
)
1619 unsigned int bb_index
;
1622 EXECUTE_IF_SET_IN_BITMAP (all_blocks
, 0, bb_index
, bi
)
1624 struct df_lr_bb_info
*bb_info
= df_lr_get_bb_info (dflow
, bb_index
);
1625 bitmap_copy (bb_info
->in
, bb_info
->use
);
1626 bitmap_clear (bb_info
->out
);
1631 /* Confluence function that processes infinite loops. This might be a
1632 noreturn function that throws. And even if it isn't, getting the
1633 unwind info right helps debugging. */
1635 df_lr_confluence_0 (struct dataflow
*dflow
, basic_block bb
)
1637 struct df
*df
= dflow
->df
;
1639 bitmap op1
= df_lr_get_bb_info (dflow
, bb
->index
)->out
;
1640 if (bb
!= EXIT_BLOCK_PTR
)
1641 bitmap_copy (op1
, df
->hardware_regs_used
);
1645 /* Confluence function that ignores fake edges. */
1648 df_lr_confluence_n (struct dataflow
*dflow
, edge e
)
1650 bitmap op1
= df_lr_get_bb_info (dflow
, e
->src
->index
)->out
;
1651 bitmap op2
= df_lr_get_bb_info (dflow
, e
->dest
->index
)->in
;
1653 /* Call-clobbered registers die across exception and call edges. */
1654 /* ??? Abnormal call edges ignored for the moment, as this gets
1655 confused by sibling call edges, which crashes reg-stack. */
1656 if (e
->flags
& EDGE_EH
)
1657 bitmap_ior_and_compl_into (op1
, op2
, df_invalidated_by_call
);
1659 bitmap_ior_into (op1
, op2
);
1661 bitmap_ior_into (op1
, dflow
->df
->hardware_regs_used
);
1665 /* Transfer function. */
1668 df_lr_transfer_function (struct dataflow
*dflow
, int bb_index
)
1670 struct df_lr_bb_info
*bb_info
= df_lr_get_bb_info (dflow
, bb_index
);
1671 bitmap in
= bb_info
->in
;
1672 bitmap out
= bb_info
->out
;
1673 bitmap use
= bb_info
->use
;
1674 bitmap def
= bb_info
->def
;
1676 return bitmap_ior_and_compl (in
, use
, out
, def
);
1680 /* Free all storage associated with the problem. */
1683 df_lr_free (struct dataflow
*dflow
)
1685 if (dflow
->block_info
)
1688 for (i
= 0; i
< dflow
->block_info_size
; i
++)
1690 struct df_lr_bb_info
*bb_info
= df_lr_get_bb_info (dflow
, i
);
1693 BITMAP_FREE (bb_info
->use
);
1694 BITMAP_FREE (bb_info
->def
);
1695 BITMAP_FREE (bb_info
->in
);
1696 BITMAP_FREE (bb_info
->out
);
1699 free_alloc_pool (dflow
->block_pool
);
1701 dflow
->block_info_size
= 0;
1702 free (dflow
->block_info
);
1705 free (dflow
->problem_data
);
1710 /* Debugging info. */
1713 df_lr_dump (struct dataflow
*dflow
, FILE *file
)
1717 if (!dflow
->block_info
)
1720 fprintf (file
, "Live Registers:\n");
1723 struct df_lr_bb_info
*bb_info
= df_lr_get_bb_info (dflow
, bb
->index
);
1724 df_print_bb_index (bb
, file
);
1729 fprintf (file
, " in \t");
1730 dump_bitmap (file
, bb_info
->in
);
1731 fprintf (file
, " use \t");
1732 dump_bitmap (file
, bb_info
->use
);
1733 fprintf (file
, " def \t");
1734 dump_bitmap (file
, bb_info
->def
);
1735 fprintf (file
, " out \t");
1736 dump_bitmap (file
, bb_info
->out
);
1740 /* All of the information associated with every instance of the problem. */
1742 static struct df_problem problem_LR
=
1744 DF_LR
, /* Problem id. */
1745 DF_BACKWARD
, /* Direction. */
1746 df_lr_alloc
, /* Allocate the problem specific data. */
1747 NULL
, /* Reset global information. */
1748 df_lr_free_bb_info
, /* Free basic block info. */
1749 df_lr_local_compute
, /* Local compute function. */
1750 df_lr_init
, /* Init the solution specific data. */
1751 df_iterative_dataflow
, /* Iterative solver. */
1752 df_lr_confluence_0
, /* Confluence operator 0. */
1753 df_lr_confluence_n
, /* Confluence operator n. */
1754 df_lr_transfer_function
, /* Transfer function. */
1755 NULL
, /* Finalize function. */
1756 df_lr_free
, /* Free all of the problem information. */
1757 df_lr_dump
, /* Debugging. */
1758 NULL
, /* Dependent problem. */
1763 /* Create a new DATAFLOW instance and add it to an existing instance
1764 of DF. The returned structure is what is used to get at the
1768 df_lr_add_problem (struct df
*df
, int flags
)
1770 return df_add_problem (df
, &problem_LR
, flags
);
1775 /*----------------------------------------------------------------------------
1776 UNINITIALIZED REGISTERS
1778 Find the set of uses for registers that are reachable from the entry
1779 block without passing thru a definition. In and out bitvectors are built
1780 for each basic block. The regnum is used to index into these sets.
1781 See df.h for details.
1782 ----------------------------------------------------------------------------*/
1784 /* Get basic block info. */
1786 struct df_ur_bb_info
*
1787 df_ur_get_bb_info (struct dataflow
*dflow
, unsigned int index
)
1789 return (struct df_ur_bb_info
*) dflow
->block_info
[index
];
1793 /* Set basic block info. */
1796 df_ur_set_bb_info (struct dataflow
*dflow
, unsigned int index
,
1797 struct df_ur_bb_info
*bb_info
)
1799 dflow
->block_info
[index
] = bb_info
;
1803 /* Free basic block info. */
1806 df_ur_free_bb_info (struct dataflow
*dflow
,
1807 basic_block bb ATTRIBUTE_UNUSED
,
1810 struct df_ur_bb_info
*bb_info
= (struct df_ur_bb_info
*) vbb_info
;
1813 BITMAP_FREE (bb_info
->gen
);
1814 BITMAP_FREE (bb_info
->kill
);
1815 BITMAP_FREE (bb_info
->in
);
1816 BITMAP_FREE (bb_info
->out
);
1817 pool_free (dflow
->block_pool
, bb_info
);
1822 /* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
1823 not touched unless the block is new. */
1826 df_ur_alloc (struct dataflow
*dflow
, bitmap blocks_to_rescan
,
1827 bitmap all_blocks ATTRIBUTE_UNUSED
)
1829 unsigned int bb_index
;
1832 if (!dflow
->block_pool
)
1833 dflow
->block_pool
= create_alloc_pool ("df_ur_block pool",
1834 sizeof (struct df_ur_bb_info
), 100);
1836 df_grow_bb_info (dflow
);
1838 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan
, 0, bb_index
, bi
)
1840 struct df_ur_bb_info
*bb_info
= df_ur_get_bb_info (dflow
, bb_index
);
1843 bitmap_clear (bb_info
->kill
);
1844 bitmap_clear (bb_info
->gen
);
1848 bb_info
= (struct df_ur_bb_info
*) pool_alloc (dflow
->block_pool
);
1849 df_ur_set_bb_info (dflow
, bb_index
, bb_info
);
1850 bb_info
->kill
= BITMAP_ALLOC (NULL
);
1851 bb_info
->gen
= BITMAP_ALLOC (NULL
);
1852 bb_info
->in
= BITMAP_ALLOC (NULL
);
1853 bb_info
->out
= BITMAP_ALLOC (NULL
);
1859 /* Compute local uninitialized register info for basic block BB. */
1862 df_ur_bb_local_compute (struct dataflow
*dflow
, unsigned int bb_index
)
1864 struct df
*df
= dflow
->df
;
1865 basic_block bb
= BASIC_BLOCK (bb_index
);
1866 struct df_ur_bb_info
*bb_info
= df_ur_get_bb_info (dflow
, bb_index
);
1870 bitmap_clear (seen_in_block
);
1871 bitmap_clear (seen_in_insn
);
1873 for (def
= df_get_artificial_defs (df
, bb_index
); def
; def
= def
->next_ref
)
1874 if ((DF_REF_FLAGS (def
) & DF_REF_AT_TOP
) == 0)
1876 unsigned int regno
= DF_REF_REGNO (def
);
1877 if (!bitmap_bit_p (seen_in_block
, regno
))
1879 bitmap_set_bit (seen_in_block
, regno
);
1880 bitmap_set_bit (bb_info
->gen
, regno
);
1884 FOR_BB_INSNS_REVERSE (bb
, insn
)
1886 unsigned int uid
= INSN_UID (insn
);
1890 for (def
= DF_INSN_UID_DEFS (df
, uid
); def
; def
= def
->next_ref
)
1892 unsigned int regno
= DF_REF_REGNO (def
);
1893 /* Only the last def counts. */
1894 if (!bitmap_bit_p (seen_in_block
, regno
))
1896 bitmap_set_bit (seen_in_insn
, regno
);
1898 if (DF_REF_FLAGS (def
)
1899 & (DF_REF_MUST_CLOBBER
| DF_REF_MAY_CLOBBER
))
1901 /* Only must clobbers for the entire reg destroy the
1903 if ((DF_REF_FLAGS (def
) & DF_REF_MUST_CLOBBER
)
1904 && (!DF_REF_FLAGS (def
) & DF_REF_PARTIAL
))
1905 bitmap_set_bit (bb_info
->kill
, regno
);
1908 bitmap_set_bit (bb_info
->gen
, regno
);
1911 bitmap_ior_into (seen_in_block
, seen_in_insn
);
1912 bitmap_clear (seen_in_insn
);
1915 for (def
= df_get_artificial_defs (df
, bb_index
); def
; def
= def
->next_ref
)
1916 if (DF_REF_FLAGS (def
) & DF_REF_AT_TOP
)
1918 unsigned int regno
= DF_REF_REGNO (def
);
1919 if (!bitmap_bit_p (seen_in_block
, regno
))
1921 bitmap_set_bit (seen_in_block
, regno
);
1922 bitmap_set_bit (bb_info
->gen
, regno
);
1928 /* Compute local uninitialized register info. */
1931 df_ur_local_compute (struct dataflow
*dflow
,
1932 bitmap all_blocks ATTRIBUTE_UNUSED
,
1933 bitmap rescan_blocks
)
1935 unsigned int bb_index
;
1940 EXECUTE_IF_SET_IN_BITMAP (rescan_blocks
, 0, bb_index
, bi
)
1942 df_ur_bb_local_compute (dflow
, bb_index
);
1949 /* Initialize the solution vectors. */
1952 df_ur_init (struct dataflow
*dflow
, bitmap all_blocks
)
1954 unsigned int bb_index
;
1957 EXECUTE_IF_SET_IN_BITMAP (all_blocks
, 0, bb_index
, bi
)
1959 struct df_ur_bb_info
*bb_info
= df_ur_get_bb_info (dflow
, bb_index
);
1961 bitmap_copy (bb_info
->out
, bb_info
->gen
);
1962 bitmap_clear (bb_info
->in
);
1967 /* Or in the stack regs, hard regs and early clobber regs into the
1968 ur_in sets of all of the blocks. */
1971 df_ur_local_finalize (struct dataflow
*dflow
, bitmap all_blocks
)
1973 struct df
*df
= dflow
->df
;
1974 struct dataflow
*lr_dflow
= df
->problems_by_index
[DF_LR
];
1975 bitmap tmp
= BITMAP_ALLOC (NULL
);
1977 unsigned int bb_index
;
1979 EXECUTE_IF_SET_IN_BITMAP (all_blocks
, 0, bb_index
, bi
)
1981 struct df_ur_bb_info
*bb_info
= df_ur_get_bb_info (dflow
, bb_index
);
1982 struct df_lr_bb_info
*bb_lr_info
= df_lr_get_bb_info (lr_dflow
, bb_index
);
1984 /* No register may reach a location where it is not used. Thus
1985 we trim the rr result to the places where it is used. */
1986 bitmap_and_into (bb_info
->in
, bb_lr_info
->in
);
1987 bitmap_and_into (bb_info
->out
, bb_lr_info
->out
);
1990 /* Hard registers may still stick in the ur_out set, but not
1991 be in the ur_in set, if their only mention was in a call
1992 in this block. This is because a call kills in the lr
1993 problem but does not kill in the ur problem. To clean
1994 this up, we execute the transfer function on the lr_in
1995 set and then use that to knock bits out of ur_out. */
1996 bitmap_ior_and_compl (tmp
, bb_info
->gen
, bb_lr_info
->in
,
1998 bitmap_and_into (bb_info
->out
, tmp
);
2006 /* Confluence function that ignores fake edges. */
2009 df_ur_confluence_n (struct dataflow
*dflow
, edge e
)
2011 bitmap op1
= df_ur_get_bb_info (dflow
, e
->dest
->index
)->in
;
2012 bitmap op2
= df_ur_get_bb_info (dflow
, e
->src
->index
)->out
;
2014 if (e
->flags
& EDGE_FAKE
)
2017 bitmap_ior_into (op1
, op2
);
2021 /* Transfer function. */
2024 df_ur_transfer_function (struct dataflow
*dflow
, int bb_index
)
2026 struct df_ur_bb_info
*bb_info
= df_ur_get_bb_info (dflow
, bb_index
);
2027 bitmap in
= bb_info
->in
;
2028 bitmap out
= bb_info
->out
;
2029 bitmap gen
= bb_info
->gen
;
2030 bitmap kill
= bb_info
->kill
;
2032 return bitmap_ior_and_compl (out
, gen
, in
, kill
);
2036 /* Free all storage associated with the problem. */
2039 df_ur_free (struct dataflow
*dflow
)
2041 if (dflow
->block_info
)
2045 for (i
= 0; i
< dflow
->block_info_size
; i
++)
2047 struct df_ur_bb_info
*bb_info
= df_ur_get_bb_info (dflow
, i
);
2050 BITMAP_FREE (bb_info
->gen
);
2051 BITMAP_FREE (bb_info
->kill
);
2052 BITMAP_FREE (bb_info
->in
);
2053 BITMAP_FREE (bb_info
->out
);
2057 free_alloc_pool (dflow
->block_pool
);
2058 dflow
->block_info_size
= 0;
2059 free (dflow
->block_info
);
2065 /* Debugging info. */
2068 df_ur_dump (struct dataflow
*dflow
, FILE *file
)
2072 if (!dflow
->block_info
)
2075 fprintf (file
, "Undefined regs:\n");
2079 struct df_ur_bb_info
*bb_info
= df_ur_get_bb_info (dflow
, bb
->index
);
2080 df_print_bb_index (bb
, file
);
2085 fprintf (file
, " in \t");
2086 dump_bitmap (file
, bb_info
->in
);
2087 fprintf (file
, " gen \t");
2088 dump_bitmap (file
, bb_info
->gen
);
2089 fprintf (file
, " kill\t");
2090 dump_bitmap (file
, bb_info
->kill
);
2091 fprintf (file
, " out \t");
2092 dump_bitmap (file
, bb_info
->out
);
2096 /* All of the information associated with every instance of the problem. */
2098 static struct df_problem problem_UR
=
2100 DF_UR
, /* Problem id. */
2101 DF_FORWARD
, /* Direction. */
2102 df_ur_alloc
, /* Allocate the problem specific data. */
2103 NULL
, /* Reset global information. */
2104 df_ur_free_bb_info
, /* Free basic block info. */
2105 df_ur_local_compute
, /* Local compute function. */
2106 df_ur_init
, /* Init the solution specific data. */
2107 df_iterative_dataflow
, /* Iterative solver. */
2108 NULL
, /* Confluence operator 0. */
2109 df_ur_confluence_n
, /* Confluence operator n. */
2110 df_ur_transfer_function
, /* Transfer function. */
2111 df_ur_local_finalize
, /* Finalize function. */
2112 df_ur_free
, /* Free all of the problem information. */
2113 df_ur_dump
, /* Debugging. */
2114 df_lr_add_problem
, /* Dependent problem. */
2115 0 /* Changeable flags. */
2119 /* Create a new DATAFLOW instance and add it to an existing instance
2120 of DF. The returned structure is what is used to get at the
2124 df_ur_add_problem (struct df
*df
, int flags
)
2126 return df_add_problem (df
, &problem_UR
, flags
);
2131 /*----------------------------------------------------------------------------
2132 UNINITIALIZED REGISTERS WITH EARLYCLOBBER
2134 Find the set of uses for registers that are reachable from the entry
2135 block without passing thru a definition. In and out bitvectors are built
2136 for each basic block. The regnum is used to index into these sets.
2137 See df.h for details.
2139 This is a variant of the UR problem above that has a lot of special
2140 features just for the register allocation phase. This problem
2141 should go away if someone would fix the interference graph.
2143 ----------------------------------------------------------------------------*/
2145 /* Private data used to compute the solution for this problem. These
2146 data structures are not accessible outside of this module. */
2147 struct df_urec_problem_data
2149 bool earlyclobbers_found
; /* True if any instruction contains an
2152 bitmap stack_regs
; /* Registers that may be allocated to a STACK_REGS. */
2157 /* Get basic block info. */
2159 struct df_urec_bb_info
*
2160 df_urec_get_bb_info (struct dataflow
*dflow
, unsigned int index
)
2162 return (struct df_urec_bb_info
*) dflow
->block_info
[index
];
2166 /* Set basic block info. */
2169 df_urec_set_bb_info (struct dataflow
*dflow
, unsigned int index
,
2170 struct df_urec_bb_info
*bb_info
)
2172 dflow
->block_info
[index
] = bb_info
;
2176 /* Free basic block info. */
2179 df_urec_free_bb_info (struct dataflow
*dflow
,
2180 basic_block bb ATTRIBUTE_UNUSED
,
2183 struct df_urec_bb_info
*bb_info
= (struct df_urec_bb_info
*) vbb_info
;
2186 BITMAP_FREE (bb_info
->gen
);
2187 BITMAP_FREE (bb_info
->kill
);
2188 BITMAP_FREE (bb_info
->in
);
2189 BITMAP_FREE (bb_info
->out
);
2190 BITMAP_FREE (bb_info
->earlyclobber
);
2191 pool_free (dflow
->block_pool
, bb_info
);
2196 /* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
2197 not touched unless the block is new. */
2200 df_urec_alloc (struct dataflow
*dflow
, bitmap blocks_to_rescan
,
2201 bitmap all_blocks ATTRIBUTE_UNUSED
)
2204 unsigned int bb_index
;
2206 struct df_urec_problem_data
*problem_data
2207 = (struct df_urec_problem_data
*) dflow
->problem_data
;
2209 if (!dflow
->block_pool
)
2210 dflow
->block_pool
= create_alloc_pool ("df_urec_block pool",
2211 sizeof (struct df_urec_bb_info
), 50);
2213 if (!dflow
->problem_data
)
2215 problem_data
= XNEW (struct df_urec_problem_data
);
2216 dflow
->problem_data
= problem_data
;
2218 problem_data
->earlyclobbers_found
= false;
2220 df_grow_bb_info (dflow
);
2222 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan
, 0, bb_index
, bi
)
2224 struct df_urec_bb_info
*bb_info
= df_urec_get_bb_info (dflow
, bb_index
);
2227 bitmap_clear (bb_info
->kill
);
2228 bitmap_clear (bb_info
->gen
);
2229 bitmap_clear (bb_info
->earlyclobber
);
2233 bb_info
= (struct df_urec_bb_info
*) pool_alloc (dflow
->block_pool
);
2234 df_urec_set_bb_info (dflow
, bb_index
, bb_info
);
2235 bb_info
->kill
= BITMAP_ALLOC (NULL
);
2236 bb_info
->gen
= BITMAP_ALLOC (NULL
);
2237 bb_info
->in
= BITMAP_ALLOC (NULL
);
2238 bb_info
->out
= BITMAP_ALLOC (NULL
);
2239 bb_info
->earlyclobber
= BITMAP_ALLOC (NULL
);
2245 /* The function modifies local info for register REG being changed in
2246 SETTER. DATA is used to pass the current basic block info. */
2249 df_urec_mark_reg_change (rtx reg
, rtx setter
, void *data
)
2254 struct df_urec_bb_info
*bb_info
= (struct df_urec_bb_info
*) data
;
2256 if (GET_CODE (reg
) == SUBREG
)
2257 reg
= SUBREG_REG (reg
);
2262 regno
= REGNO (reg
);
2263 if (regno
< FIRST_PSEUDO_REGISTER
)
2265 endregno
= END_HARD_REGNO (reg
);
2266 for (i
= regno
; i
< endregno
; i
++)
2268 bitmap_set_bit (bb_info
->kill
, i
);
2270 if (GET_CODE (setter
) != CLOBBER
)
2271 bitmap_set_bit (bb_info
->gen
, i
);
2273 bitmap_clear_bit (bb_info
->gen
, i
);
2278 bitmap_set_bit (bb_info
->kill
, regno
);
2280 if (GET_CODE (setter
) != CLOBBER
)
2281 bitmap_set_bit (bb_info
->gen
, regno
);
2283 bitmap_clear_bit (bb_info
->gen
, regno
);
2286 /* Classes of registers which could be early clobbered in the current
2289 static VEC(int,heap
) *earlyclobber_regclass
;
2291 /* This function finds and stores register classes that could be early
2292 clobbered in INSN. If any earlyclobber classes are found, the function
2293 returns TRUE, in all other cases it returns FALSE. */
2296 df_urec_check_earlyclobber (rtx insn
)
2301 extract_insn (insn
);
2303 VEC_truncate (int, earlyclobber_regclass
, 0);
2304 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2309 enum reg_class
class;
2310 const char *p
= recog_data
.constraints
[opno
];
2319 case '=': case '+': case '?':
2322 case 'm': case '<': case '>': case 'V': case 'o':
2323 case 'E': case 'F': case 'G': case 'H':
2324 case 's': case 'i': case 'n':
2325 case 'I': case 'J': case 'K': case 'L':
2326 case 'M': case 'N': case 'O': case 'P':
2328 case '0': case '1': case '2': case '3': case '4':
2329 case '5': case '6': case '7': case '8': case '9':
2330 /* These don't say anything we care about. */
2338 if (amp_p
&& class != NO_REGS
)
2344 VEC_iterate (int, earlyclobber_regclass
, i
, rc
);
2347 if (rc
== (int) class)
2351 /* We use VEC_quick_push here because
2352 earlyclobber_regclass holds no more than
2353 N_REG_CLASSES elements. */
2354 VEC_quick_push (int, earlyclobber_regclass
, (int) class);
2364 class = GENERAL_REGS
;
2368 class = REG_CLASS_FROM_CONSTRAINT (c
, p
);
2373 p
+= CONSTRAINT_LEN (c
, p
);
2380 /* The function checks that pseudo-register *X has a class
2381 intersecting with the class of pseudo-register could be early
2382 clobbered in the same insn.
2384 This function is a no-op if earlyclobber_regclass is empty.
2386 Reload can assign the same hard register to uninitialized
2387 pseudo-register and early clobbered pseudo-register in an insn if
2388 the pseudo-register is used first time in given BB and not lived at
2389 the BB start. To prevent this we don't change life information for
2390 such pseudo-registers. */
2393 df_urec_mark_reg_use_for_earlyclobber (rtx
*x
, void *data
)
2395 enum reg_class pref_class
, alt_class
;
2397 struct df_urec_bb_info
*bb_info
= (struct df_urec_bb_info
*) data
;
2399 if (REG_P (*x
) && REGNO (*x
) >= FIRST_PSEUDO_REGISTER
)
2404 if (bitmap_bit_p (bb_info
->kill
, regno
)
2405 || bitmap_bit_p (bb_info
->gen
, regno
))
2407 pref_class
= reg_preferred_class (regno
);
2408 alt_class
= reg_alternate_class (regno
);
2409 for (i
= 0; VEC_iterate (int, earlyclobber_regclass
, i
, rc
); i
++)
2411 if (reg_classes_intersect_p (rc
, pref_class
)
2413 && reg_classes_intersect_p (rc
, alt_class
)))
2415 bitmap_set_bit (bb_info
->earlyclobber
, regno
);
2423 /* The function processes all pseudo-registers in *X with the aid of
2424 previous function. */
2427 df_urec_mark_reg_use_for_earlyclobber_1 (rtx
*x
, void *data
)
2429 for_each_rtx (x
, df_urec_mark_reg_use_for_earlyclobber
, data
);
2433 /* Compute local uninitialized register info for basic block BB. */
2436 df_urec_bb_local_compute (struct dataflow
*dflow
, unsigned int bb_index
)
2438 struct df
*df
= dflow
->df
;
2439 basic_block bb
= BASIC_BLOCK (bb_index
);
2440 struct df_urec_bb_info
*bb_info
= df_urec_get_bb_info (dflow
, bb_index
);
2444 for (def
= df_get_artificial_defs (df
, bb_index
); def
; def
= def
->next_ref
)
2445 if (DF_REF_FLAGS (def
) & DF_REF_AT_TOP
)
2447 unsigned int regno
= DF_REF_REGNO (def
);
2448 bitmap_set_bit (bb_info
->gen
, regno
);
2451 FOR_BB_INSNS (bb
, insn
)
2455 note_stores (PATTERN (insn
), df_urec_mark_reg_change
, bb_info
);
2456 if (df_urec_check_earlyclobber (insn
))
2458 struct df_urec_problem_data
*problem_data
2459 = (struct df_urec_problem_data
*) dflow
->problem_data
;
2460 problem_data
->earlyclobbers_found
= true;
2461 note_uses (&PATTERN (insn
),
2462 df_urec_mark_reg_use_for_earlyclobber_1
, bb_info
);
2467 for (def
= df_get_artificial_defs (df
, bb_index
); def
; def
= def
->next_ref
)
2468 if ((DF_REF_FLAGS (def
) & DF_REF_AT_TOP
) == 0)
2470 unsigned int regno
= DF_REF_REGNO (def
);
2471 bitmap_set_bit (bb_info
->gen
, regno
);
2477 /* Compute local uninitialized register info. */
2480 df_urec_local_compute (struct dataflow
*dflow
,
2481 bitmap all_blocks ATTRIBUTE_UNUSED
,
2482 bitmap rescan_blocks
)
2484 unsigned int bb_index
;
2488 HARD_REG_SET stack_hard_regs
, used
;
2489 struct df_urec_problem_data
*problem_data
2490 = (struct df_urec_problem_data
*) dflow
->problem_data
;
2492 /* Any register that MAY be allocated to a register stack (like the
2493 387) is treated poorly. Each such register is marked as being
2494 live everywhere. This keeps the register allocator and the
2495 subsequent passes from doing anything useful with these values.
2497 FIXME: This seems like an incredibly poor idea. */
2499 CLEAR_HARD_REG_SET (stack_hard_regs
);
2500 for (i
= FIRST_STACK_REG
; i
<= LAST_STACK_REG
; i
++)
2501 SET_HARD_REG_BIT (stack_hard_regs
, i
);
2502 problem_data
->stack_regs
= BITMAP_ALLOC (NULL
);
2503 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
2505 COPY_HARD_REG_SET (used
, reg_class_contents
[reg_preferred_class (i
)]);
2506 IOR_HARD_REG_SET (used
, reg_class_contents
[reg_alternate_class (i
)]);
2507 AND_HARD_REG_SET (used
, stack_hard_regs
);
2508 if (!hard_reg_set_empty_p (used
))
2509 bitmap_set_bit (problem_data
->stack_regs
, i
);
2513 /* We know that earlyclobber_regclass holds no more than
2514 N_REG_CLASSES elements. See df_urec_check_earlyclobber. */
2515 earlyclobber_regclass
= VEC_alloc (int, heap
, N_REG_CLASSES
);
2517 EXECUTE_IF_SET_IN_BITMAP (rescan_blocks
, 0, bb_index
, bi
)
2519 df_urec_bb_local_compute (dflow
, bb_index
);
2522 VEC_free (int, heap
, earlyclobber_regclass
);
2526 /* Initialize the solution vectors. */
2529 df_urec_init (struct dataflow
*dflow
, bitmap all_blocks
)
2531 unsigned int bb_index
;
2534 EXECUTE_IF_SET_IN_BITMAP (all_blocks
, 0, bb_index
, bi
)
2536 struct df_urec_bb_info
*bb_info
= df_urec_get_bb_info (dflow
, bb_index
);
2538 bitmap_copy (bb_info
->out
, bb_info
->gen
);
2539 bitmap_clear (bb_info
->in
);
2544 /* Or in the stack regs, hard regs and early clobber regs into the
2545 ur_in sets of all of the blocks. */
2548 df_urec_local_finalize (struct dataflow
*dflow
, bitmap all_blocks
)
2550 struct df
*df
= dflow
->df
;
2551 struct dataflow
*lr_dflow
= df
->problems_by_index
[DF_LR
];
2552 bitmap tmp
= BITMAP_ALLOC (NULL
);
2554 unsigned int bb_index
;
2555 struct df_urec_problem_data
*problem_data
2556 = (struct df_urec_problem_data
*) dflow
->problem_data
;
2558 EXECUTE_IF_SET_IN_BITMAP (all_blocks
, 0, bb_index
, bi
)
2560 struct df_urec_bb_info
*bb_info
= df_urec_get_bb_info (dflow
, bb_index
);
2561 struct df_lr_bb_info
*bb_lr_info
= df_lr_get_bb_info (lr_dflow
, bb_index
);
2563 if (bb_index
!= ENTRY_BLOCK
&& bb_index
!= EXIT_BLOCK
)
2565 if (problem_data
->earlyclobbers_found
)
2566 bitmap_ior_into (bb_info
->in
, bb_info
->earlyclobber
);
2569 /* We can not use the same stack register for uninitialized
2570 pseudo-register and another living pseudo-register
2571 because if the uninitialized pseudo-register dies,
2572 subsequent pass reg-stack will be confused (it will
2573 believe that the other register dies). */
2574 bitmap_ior_into (bb_info
->in
, problem_data
->stack_regs
);
2575 bitmap_ior_into (bb_info
->out
, problem_data
->stack_regs
);
2579 /* No register may reach a location where it is not used. Thus
2580 we trim the rr result to the places where it is used. */
2581 bitmap_and_into (bb_info
->in
, bb_lr_info
->in
);
2582 bitmap_and_into (bb_info
->out
, bb_lr_info
->out
);
2585 /* Hard registers may still stick in the ur_out set, but not
2586 be in the ur_in set, if their only mention was in a call
2587 in this block. This is because a call kills in the lr
2588 problem but does not kill in the rr problem. To clean
2589 this up, we execute the transfer function on the lr_in
2590 set and then use that to knock bits out of ur_out. */
2591 bitmap_ior_and_compl (tmp
, bb_info
->gen
, bb_lr_info
->in
,
2593 bitmap_and_into (bb_info
->out
, tmp
);
2598 BITMAP_FREE (problem_data
->stack_regs
);
2604 /* Confluence function that ignores fake edges. */
2607 df_urec_confluence_n (struct dataflow
*dflow
, edge e
)
2609 bitmap op1
= df_urec_get_bb_info (dflow
, e
->dest
->index
)->in
;
2610 bitmap op2
= df_urec_get_bb_info (dflow
, e
->src
->index
)->out
;
2612 if (e
->flags
& EDGE_FAKE
)
2615 bitmap_ior_into (op1
, op2
);
2619 /* Transfer function. */
2622 df_urec_transfer_function (struct dataflow
*dflow
, int bb_index
)
2624 struct df_urec_bb_info
*bb_info
= df_urec_get_bb_info (dflow
, bb_index
);
2625 bitmap in
= bb_info
->in
;
2626 bitmap out
= bb_info
->out
;
2627 bitmap gen
= bb_info
->gen
;
2628 bitmap kill
= bb_info
->kill
;
2630 return bitmap_ior_and_compl (out
, gen
, in
, kill
);
2634 /* Free all storage associated with the problem. */
2637 df_urec_free (struct dataflow
*dflow
)
2639 if (dflow
->block_info
)
2643 for (i
= 0; i
< dflow
->block_info_size
; i
++)
2645 struct df_urec_bb_info
*bb_info
= df_urec_get_bb_info (dflow
, i
);
2648 BITMAP_FREE (bb_info
->gen
);
2649 BITMAP_FREE (bb_info
->kill
);
2650 BITMAP_FREE (bb_info
->in
);
2651 BITMAP_FREE (bb_info
->out
);
2652 BITMAP_FREE (bb_info
->earlyclobber
);
2656 free_alloc_pool (dflow
->block_pool
);
2658 dflow
->block_info_size
= 0;
2659 free (dflow
->block_info
);
2660 free (dflow
->problem_data
);
2666 /* Debugging info. */
2669 df_urec_dump (struct dataflow
*dflow
, FILE *file
)
2673 if (!dflow
->block_info
)
2676 fprintf (file
, "Undefined regs:\n");
2680 struct df_urec_bb_info
*bb_info
= df_urec_get_bb_info (dflow
, bb
->index
);
2681 df_print_bb_index (bb
, file
);
2686 fprintf (file
, " in \t");
2687 dump_bitmap (file
, bb_info
->in
);
2688 fprintf (file
, " gen \t");
2689 dump_bitmap (file
, bb_info
->gen
);
2690 fprintf (file
, " kill\t");
2691 dump_bitmap (file
, bb_info
->kill
);
2692 fprintf (file
, " ec\t");
2693 dump_bitmap (file
, bb_info
->earlyclobber
);
2694 fprintf (file
, " out \t");
2695 dump_bitmap (file
, bb_info
->out
);
2699 /* All of the information associated with every instance of the problem. */
2701 static struct df_problem problem_UREC
=
2703 DF_UREC
, /* Problem id. */
2704 DF_FORWARD
, /* Direction. */
2705 df_urec_alloc
, /* Allocate the problem specific data. */
2706 NULL
, /* Reset global information. */
2707 df_urec_free_bb_info
, /* Free basic block info. */
2708 df_urec_local_compute
, /* Local compute function. */
2709 df_urec_init
, /* Init the solution specific data. */
2710 df_iterative_dataflow
, /* Iterative solver. */
2711 NULL
, /* Confluence operator 0. */
2712 df_urec_confluence_n
, /* Confluence operator n. */
2713 df_urec_transfer_function
, /* Transfer function. */
2714 df_urec_local_finalize
, /* Finalize function. */
2715 df_urec_free
, /* Free all of the problem information. */
2716 df_urec_dump
, /* Debugging. */
2717 df_lr_add_problem
, /* Dependent problem. */
2718 0 /* Changeable flags. */
2722 /* Create a new DATAFLOW instance and add it to an existing instance
2723 of DF. The returned structure is what is used to get at the
2727 df_urec_add_problem (struct df
*df
, int flags
)
2729 return df_add_problem (df
, &problem_UREC
, flags
);
2734 /*----------------------------------------------------------------------------
2735 CREATE DEF_USE (DU) and / or USE_DEF (UD) CHAINS
2737 Link either the defs to the uses and / or the uses to the defs.
2739 These problems are set up like the other dataflow problems so that
2740 they nicely fit into the framework. They are much simpler and only
2741 involve a single traversal of instructions and an examination of
2742 the reaching defs information (the dependent problem).
2743 ----------------------------------------------------------------------------*/
2745 /* Create def-use or use-def chains. */
2748 df_chain_alloc (struct dataflow
*dflow
,
2749 bitmap blocks_to_rescan ATTRIBUTE_UNUSED
,
2750 bitmap all_blocks ATTRIBUTE_UNUSED
)
2753 struct df
*df
= dflow
->df
;
2756 /* Wholesale destruction of the old chains. */
2757 if (dflow
->block_pool
)
2758 free_alloc_pool (dflow
->block_pool
);
2760 dflow
->block_pool
= create_alloc_pool ("df_chain_chain_block pool",
2761 sizeof (struct df_link
), 100);
2763 if (dflow
->flags
& DF_DU_CHAIN
)
2765 df_reorganize_refs (&df
->def_info
);
2767 /* Clear out the pointers from the refs. */
2768 for (i
= 0; i
< DF_DEFS_SIZE (df
); i
++)
2770 struct df_ref
*ref
= df
->def_info
.refs
[i
];
2771 DF_REF_CHAIN (ref
) = NULL
;
2775 if (dflow
->flags
& DF_UD_CHAIN
)
2777 df_reorganize_refs (&df
->use_info
);
2778 for (i
= 0; i
< DF_USES_SIZE (df
); i
++)
2780 struct df_ref
*ref
= df
->use_info
.refs
[i
];
2781 DF_REF_CHAIN (ref
) = NULL
;
2787 /* Reset all def_use and use_def chains in INSN. */
2790 df_chain_insn_reset (struct dataflow
*dflow
, rtx insn
)
2792 struct df
*df
= dflow
->df
;
2793 unsigned int uid
= INSN_UID (insn
);
2794 struct df_insn_info
*insn_info
= NULL
;
2797 if (uid
< df
->insns_size
)
2798 insn_info
= DF_INSN_UID_GET (df
, uid
);
2802 if (dflow
->flags
& DF_DU_CHAIN
)
2804 ref
= insn_info
->defs
;
2808 ref
= ref
->next_ref
;
2812 if (dflow
->flags
& DF_UD_CHAIN
)
2814 ref
= insn_info
->uses
;
2818 ref
= ref
->next_ref
;
2825 /* Reset all def_use and use_def chains in basic block. */
2828 df_chain_bb_reset (struct dataflow
*dflow
, unsigned int bb_index
)
2830 struct df
*df
= dflow
->df
;
2832 basic_block bb
= BASIC_BLOCK (bb_index
);
2834 /* Some one deleted the basic block out from under us. */
2838 FOR_BB_INSNS (bb
, insn
)
2842 /* Record defs within INSN. */
2843 df_chain_insn_reset (dflow
, insn
);
2847 /* Get rid of any chains in artificial uses or defs. */
2848 if (dflow
->flags
& DF_DU_CHAIN
)
2851 def
= df_get_artificial_defs (df
, bb_index
);
2855 def
= def
->next_ref
;
2859 if (dflow
->flags
& DF_UD_CHAIN
)
2862 use
= df_get_artificial_uses (df
, bb_index
);
2866 use
= use
->next_ref
;
2872 /* Reset all of the chains when the set of basic blocks changes. */
2876 df_chain_reset (struct dataflow
*dflow
, bitmap blocks_to_clear
)
2879 unsigned int bb_index
;
2881 EXECUTE_IF_SET_IN_BITMAP (blocks_to_clear
, 0, bb_index
, bi
)
2883 df_chain_bb_reset (dflow
, bb_index
);
2886 free_alloc_pool (dflow
->block_pool
);
2887 dflow
->block_pool
= NULL
;
2891 /* Create the chains for a list of USEs. */
2894 df_chain_create_bb_process_use (struct dataflow
*dflow
,
2897 enum df_ref_flags top_flag
)
2899 struct df
*df
= dflow
->df
;
2901 unsigned int def_index
;
2905 /* Do not want to go through this for an uninitialized var. */
2906 unsigned int uregno
= DF_REF_REGNO (use
);
2907 int count
= DF_REG_DEF_GET (df
, uregno
)->n_refs
;
2910 if (top_flag
== (DF_REF_FLAGS (use
) & DF_REF_AT_TOP
))
2912 unsigned int first_index
= DF_REG_DEF_GET (df
, uregno
)->begin
;
2913 unsigned int last_index
= first_index
+ count
- 1;
2915 EXECUTE_IF_SET_IN_BITMAP (local_rd
, first_index
, def_index
, bi
)
2918 if (def_index
> last_index
)
2921 def
= DF_DEFS_GET (df
, def_index
);
2922 if (dflow
->flags
& DF_DU_CHAIN
)
2923 df_chain_create (dflow
, def
, use
);
2924 if (dflow
->flags
& DF_UD_CHAIN
)
2925 df_chain_create (dflow
, use
, def
);
2929 use
= use
->next_ref
;
2933 /* Reset the storage pool that the def-use or use-def chains have been
2934 allocated in. We do not need to re adjust the pointers in the refs,
2935 these have already been clean out.*/
2937 /* Create chains from reaching defs bitmaps for basic block BB. */
2939 df_chain_create_bb (struct dataflow
*dflow
,
2940 struct dataflow
*rd_dflow
,
2941 unsigned int bb_index
)
2943 basic_block bb
= BASIC_BLOCK (bb_index
);
2944 struct df_rd_bb_info
*bb_info
= df_rd_get_bb_info (rd_dflow
, bb_index
);
2946 bitmap cpy
= BITMAP_ALLOC (NULL
);
2947 struct df
*df
= dflow
->df
;
2950 bitmap_copy (cpy
, bb_info
->in
);
2952 /* Since we are going forwards, process the artificial uses first
2953 then the artificial defs second. */
2956 /* Create the chains for the artificial uses from the EH_USES at the
2957 beginning of the block. */
2958 df_chain_create_bb_process_use (dflow
, cpy
,
2959 df_get_artificial_uses (df
, bb
->index
),
2963 for (def
= df_get_artificial_defs (df
, bb_index
); def
; def
= def
->next_ref
)
2964 if (DF_REF_FLAGS (def
) & DF_REF_AT_TOP
)
2966 unsigned int dregno
= DF_REF_REGNO (def
);
2967 if (!(DF_REF_FLAGS (def
) & DF_REF_PARTIAL
))
2968 bitmap_clear_range (cpy
,
2969 DF_REG_DEF_GET (df
, dregno
)->begin
,
2970 DF_REG_DEF_GET (df
, dregno
)->n_refs
);
2971 bitmap_set_bit (cpy
, DF_REF_ID (def
));
2974 /* Process the regular instructions next. */
2975 FOR_BB_INSNS (bb
, insn
)
2978 unsigned int uid
= INSN_UID (insn
);
2983 /* Now scan the uses and link them up with the defs that remain
2984 in the cpy vector. */
2986 df_chain_create_bb_process_use (dflow
, cpy
,
2987 DF_INSN_UID_USES (df
, uid
), 0);
2989 /* Since we are going forwards, process the defs second. This
2990 pass only changes the bits in cpy. */
2991 for (def
= DF_INSN_UID_DEFS (df
, uid
); def
; def
= def
->next_ref
)
2993 unsigned int dregno
= DF_REF_REGNO (def
);
2994 if (!(DF_REF_FLAGS (def
) & DF_REF_PARTIAL
))
2995 bitmap_clear_range (cpy
,
2996 DF_REG_DEF_GET (df
, dregno
)->begin
,
2997 DF_REG_DEF_GET (df
, dregno
)->n_refs
);
2998 if (!(DF_REF_FLAGS (def
)
2999 & (DF_REF_MUST_CLOBBER
| DF_REF_MAY_CLOBBER
)))
3000 bitmap_set_bit (cpy
, DF_REF_ID (def
));
3004 /* Create the chains for the artificial uses of the hard registers
3005 at the end of the block. */
3006 df_chain_create_bb_process_use (dflow
, cpy
,
3007 df_get_artificial_uses (df
, bb
->index
), 0);
3010 /* Create def-use chains from reaching use bitmaps for basic blocks
3014 df_chain_finalize (struct dataflow
*dflow
, bitmap all_blocks
)
3016 unsigned int bb_index
;
3018 struct df
*df
= dflow
->df
;
3019 struct dataflow
*rd_dflow
= df
->problems_by_index
[DF_RD
];
3021 EXECUTE_IF_SET_IN_BITMAP (all_blocks
, 0, bb_index
, bi
)
3023 df_chain_create_bb (dflow
, rd_dflow
, bb_index
);
3028 /* Free all storage associated with the problem. */
3031 df_chain_free (struct dataflow
*dflow
)
3033 free_alloc_pool (dflow
->block_pool
);
3038 /* Debugging info. */
3041 df_chains_dump (struct dataflow
*dflow
, FILE *file
)
3043 struct df
*df
= dflow
->df
;
3046 if (dflow
->flags
& DF_DU_CHAIN
)
3048 fprintf (file
, "Def-use chains:\n");
3049 for (j
= 0; j
< df
->def_info
.bitmap_size
; j
++)
3051 struct df_ref
*def
= DF_DEFS_GET (df
, j
);
3054 fprintf (file
, "d%d bb %d luid %d insn %d reg %d ",
3055 j
, DF_REF_BBNO (def
),
3057 DF_INSN_LUID (df
, DF_REF_INSN (def
)):
3059 DF_REF_INSN (def
) ? DF_REF_INSN_UID (def
) : -1,
3060 DF_REF_REGNO (def
));
3061 if (def
->flags
& DF_REF_READ_WRITE
)
3062 fprintf (file
, "read/write ");
3063 df_chain_dump (DF_REF_CHAIN (def
), file
);
3064 fprintf (file
, "\n");
3069 if (dflow
->flags
& DF_UD_CHAIN
)
3071 fprintf (file
, "Use-def chains:\n");
3072 for (j
= 0; j
< df
->use_info
.bitmap_size
; j
++)
3074 struct df_ref
*use
= DF_USES_GET (df
, j
);
3077 fprintf (file
, "u%d bb %d luid %d insn %d reg %d ",
3078 j
, DF_REF_BBNO (use
),
3080 DF_INSN_LUID (df
, DF_REF_INSN (use
))
3082 DF_REF_INSN (DF_USES_GET (df
, j
)) ?
3083 DF_REF_INSN_UID (DF_USES_GET (df
,j
))
3085 DF_REF_REGNO (use
));
3086 if (use
->flags
& DF_REF_READ_WRITE
)
3087 fprintf (file
, "read/write ");
3088 if (use
->flags
& DF_REF_STRIPPED
)
3089 fprintf (file
, "stripped ");
3090 if (use
->flags
& DF_REF_IN_NOTE
)
3091 fprintf (file
, "note ");
3092 df_chain_dump (DF_REF_CHAIN (use
), file
);
3093 fprintf (file
, "\n");
3100 static struct df_problem problem_CHAIN
=
3102 DF_CHAIN
, /* Problem id. */
3103 DF_NONE
, /* Direction. */
3104 df_chain_alloc
, /* Allocate the problem specific data. */
3105 df_chain_reset
, /* Reset global information. */
3106 NULL
, /* Free basic block info. */
3107 NULL
, /* Local compute function. */
3108 NULL
, /* Init the solution specific data. */
3109 NULL
, /* Iterative solver. */
3110 NULL
, /* Confluence operator 0. */
3111 NULL
, /* Confluence operator n. */
3112 NULL
, /* Transfer function. */
3113 df_chain_finalize
, /* Finalize function. */
3114 df_chain_free
, /* Free all of the problem information. */
3115 df_chains_dump
, /* Debugging. */
3116 df_rd_add_problem
, /* Dependent problem. */
3117 0 /* Changeable flags. */
3121 /* Create a new DATAFLOW instance and add it to an existing instance
3122 of DF. The returned structure is what is used to get at the
3126 df_chain_add_problem (struct df
*df
, int flags
)
3128 return df_add_problem (df
, &problem_CHAIN
, flags
);
3132 /*----------------------------------------------------------------------------
3133 REGISTER INFORMATION
3135 This pass properly computes REG_DEAD and REG_UNUSED notes.
3137 If the DF_RI_LIFE flag is set the following vectors containing
3138 information about register usage are properly set: REG_N_REFS,
3139 REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH, REG_N_CALLS_CROSSED,
3140 REG_N_THROWING_CALLS_CROSSED and REG_BASIC_BLOCK.
3142 ----------------------------------------------------------------------------*/
3144 #ifdef REG_DEAD_DEBUGGING
3146 print_note (char *prefix
, rtx insn
, rtx note
)
3148 fprintf (stderr
, "%s %d ", prefix
, INSN_UID (insn
));
3149 print_rtl (stderr
, note
);
3150 fprintf (stderr
, "\n");
3154 /* Allocate the lifetime information. */
3157 df_ri_alloc (struct dataflow
*dflow
,
3158 bitmap blocks_to_rescan ATTRIBUTE_UNUSED
,
3159 bitmap all_blocks ATTRIBUTE_UNUSED
)
3162 struct df
*df
= dflow
->df
;
3164 if (dflow
->flags
& DF_RI_LIFE
)
3166 max_regno
= max_reg_num ();
3167 allocate_reg_info (max_regno
, FALSE
, FALSE
);
3169 /* Reset all the data we'll collect. */
3170 for (i
= 0; i
< max_regno
; i
++)
3172 REG_N_SETS (i
) = DF_REG_DEF_COUNT (df
, i
);
3173 REG_N_REFS (i
) = DF_REG_USE_COUNT (df
, i
) + REG_N_SETS (i
);
3174 REG_N_DEATHS (i
) = 0;
3175 REG_N_CALLS_CROSSED (i
) = 0;
3176 REG_N_THROWING_CALLS_CROSSED (i
) = 0;
3177 REG_LIVE_LENGTH (i
) = 0;
3179 REG_BASIC_BLOCK (i
) = REG_BLOCK_UNKNOWN
;
3185 /* After reg-stack, the x86 floating point stack regs are difficult to
3186 analyze because of all of the pushes, pops and rotations. Thus, we
3187 just leave the notes alone. */
3190 df_ignore_stack_reg (int regno ATTRIBUTE_UNUSED
)
3193 return (regstack_completed
3194 && IN_RANGE (regno
, FIRST_STACK_REG
, LAST_STACK_REG
));
3201 /* Remove all of the REG_DEAD or REG_UNUSED notes from INSN. */
3204 df_kill_notes (rtx insn
, int flags
)
3206 rtx
*pprev
= ®_NOTES (insn
);
3211 switch (REG_NOTE_KIND (link
))
3214 if (flags
& DF_RI_LIFE
)
3215 if (df_ignore_stack_reg (REGNO (XEXP (link
, 0))))
3216 REG_N_DEATHS (REGNO (XEXP (link
, 0)))++;
3220 if (!df_ignore_stack_reg (REGNO (XEXP (link
, 0))))
3222 rtx next
= XEXP (link
, 1);
3223 #ifdef REG_DEAD_DEBUGGING
3224 print_note ("deleting: ", insn
, link
);
3226 free_EXPR_LIST_node (link
);
3227 *pprev
= link
= next
;
3232 pprev
= &XEXP (link
, 1);
3240 /* Set the REG_UNUSED notes for the multiword hardreg defs in INSN
3241 based on the bits in LIVE. Do not generate notes for registers in
3242 artificial uses. DO_NOT_GEN is updated so that REG_DEAD notes are
3243 not generated if the reg is both read and written by the
3248 df_set_unused_notes_for_mw (rtx insn
, struct df_mw_hardreg
*mws
,
3249 bitmap live
, bitmap do_not_gen
,
3250 bitmap artificial_uses
, int flags
)
3252 bool all_dead
= true;
3253 struct df_link
*regs
= mws
->regs
;
3254 unsigned int regno
= DF_REF_REGNO (regs
->ref
);
3256 #ifdef REG_DEAD_DEBUGGING
3257 fprintf (stderr
, "mw unused looking at %d\n", DF_REF_REGNO (regs
->ref
));
3258 df_ref_debug (regs
->ref
, stderr
);
3262 unsigned int regno
= DF_REF_REGNO (regs
->ref
);
3263 if ((bitmap_bit_p (live
, regno
))
3264 || bitmap_bit_p (artificial_uses
, regno
))
3274 struct df_link
*regs
= mws
->regs
;
3275 rtx note
= alloc_EXPR_LIST (REG_UNUSED
, *DF_REF_LOC (regs
->ref
),
3277 REG_NOTES (insn
) = note
;
3278 #ifdef REG_DEAD_DEBUGGING
3279 print_note ("adding 1: ", insn
, note
);
3281 bitmap_set_bit (do_not_gen
, regno
);
3282 /* Only do this if the value is totally dead. */
3283 if (flags
& DF_RI_LIFE
)
3285 REG_N_DEATHS (regno
) ++;
3286 REG_LIVE_LENGTH (regno
)++;
3291 struct df_link
*regs
= mws
->regs
;
3294 struct df_ref
*ref
= regs
->ref
;
3296 regno
= DF_REF_REGNO (ref
);
3297 if ((!bitmap_bit_p (live
, regno
))
3298 && (!bitmap_bit_p (artificial_uses
, regno
)))
3300 rtx note
= alloc_EXPR_LIST (REG_UNUSED
, regno_reg_rtx
[regno
],
3302 REG_NOTES (insn
) = note
;
3303 #ifdef REG_DEAD_DEBUGGING
3304 print_note ("adding 2: ", insn
, note
);
3307 bitmap_set_bit (do_not_gen
, regno
);
3314 /* Set the REG_DEAD notes for the multiword hardreg use in INSN based
3315 on the bits in LIVE. DO_NOT_GEN is used to keep REG_DEAD notes
3316 from being set if the instruction both reads and writes the
3320 df_set_dead_notes_for_mw (rtx insn
, struct df_mw_hardreg
*mws
,
3321 bitmap live
, bitmap do_not_gen
,
3322 bitmap artificial_uses
, int flags
)
3324 bool all_dead
= true;
3325 struct df_link
*regs
= mws
->regs
;
3326 unsigned int regno
= DF_REF_REGNO (regs
->ref
);
3328 #ifdef REG_DEAD_DEBUGGING
3329 fprintf (stderr
, "mw looking at %d\n", DF_REF_REGNO (regs
->ref
));
3330 df_ref_debug (regs
->ref
, stderr
);
3334 unsigned int regno
= DF_REF_REGNO (regs
->ref
);
3335 if ((bitmap_bit_p (live
, regno
))
3336 || bitmap_bit_p (artificial_uses
, regno
))
3346 if (!bitmap_bit_p (do_not_gen
, regno
))
3348 /* Add a dead note for the entire multi word register. */
3349 struct df_link
*regs
= mws
->regs
;
3350 rtx note
= alloc_EXPR_LIST (REG_DEAD
, *DF_REF_LOC (regs
->ref
),
3352 REG_NOTES (insn
) = note
;
3353 #ifdef REG_DEAD_DEBUGGING
3354 print_note ("adding 1: ", insn
, note
);
3357 if (flags
& DF_RI_LIFE
)
3359 struct df_link
*regs
= mws
->regs
;
3362 struct df_ref
*ref
= regs
->ref
;
3363 regno
= DF_REF_REGNO (ref
);
3364 REG_N_DEATHS (regno
)++;
3372 struct df_link
*regs
= mws
->regs
;
3375 struct df_ref
*ref
= regs
->ref
;
3377 regno
= DF_REF_REGNO (ref
);
3378 if ((!bitmap_bit_p (live
, regno
))
3379 && (!bitmap_bit_p (artificial_uses
, regno
))
3380 && (!bitmap_bit_p (do_not_gen
, regno
)))
3382 rtx note
= alloc_EXPR_LIST (REG_DEAD
, regno_reg_rtx
[regno
],
3384 REG_NOTES (insn
) = note
;
3385 if (flags
& DF_RI_LIFE
)
3386 REG_N_DEATHS (regno
)++;
3387 #ifdef REG_DEAD_DEBUGGING
3388 print_note ("adding 2: ", insn
, note
);
3398 /* Create a REG_UNUSED note if necessary for DEF in INSN updating LIVE
3399 and DO_NOT_GEN. Do not generate notes for registers in artificial
3403 df_create_unused_note (basic_block bb
, rtx insn
, struct df_ref
*def
,
3404 bitmap live
, bitmap do_not_gen
, bitmap artificial_uses
,
3405 bitmap local_live
, bitmap local_processed
,
3406 int flags
, int luid
)
3408 unsigned int dregno
= DF_REF_REGNO (def
);
3410 #ifdef REG_DEAD_DEBUGGING
3411 fprintf (stderr
, " regular looking at def ");
3412 df_ref_debug (def
, stderr
);
3415 if (bitmap_bit_p (live
, dregno
))
3417 if (flags
& DF_RI_LIFE
)
3419 /* If we have seen this regno, then it has already been
3420 processed correctly with the per insn increment. If we
3421 have not seen it we need to add the length from here to
3422 the end of the block to the live length. */
3423 if (bitmap_bit_p (local_processed
, dregno
))
3425 if (!(DF_REF_FLAGS (def
) & DF_REF_PARTIAL
))
3426 bitmap_clear_bit (local_live
, dregno
);
3430 bitmap_set_bit (local_processed
, dregno
);
3431 REG_LIVE_LENGTH (dregno
) += luid
;
3435 else if ((!(DF_REF_FLAGS (def
) & DF_REF_MW_HARDREG
))
3436 && (!bitmap_bit_p (artificial_uses
, dregno
))
3437 && (!df_ignore_stack_reg (dregno
)))
3439 rtx reg
= GET_CODE (*DF_REF_LOC (def
)) == SUBREG
?
3440 SUBREG_REG (*DF_REF_LOC (def
)) : *DF_REF_LOC (def
);
3441 rtx note
= alloc_EXPR_LIST (REG_UNUSED
, reg
, REG_NOTES (insn
));
3442 REG_NOTES (insn
) = note
;
3443 #ifdef REG_DEAD_DEBUGGING
3444 print_note ("adding 3: ", insn
, note
);
3446 if (flags
& DF_RI_LIFE
)
3448 REG_N_DEATHS (dregno
) ++;
3449 REG_LIVE_LENGTH (dregno
)++;
3453 if ((flags
& DF_RI_LIFE
) && (dregno
>= FIRST_PSEUDO_REGISTER
))
3455 REG_FREQ (dregno
) += REG_FREQ_FROM_BB (bb
);
3456 if (REG_BASIC_BLOCK (dregno
) == REG_BLOCK_UNKNOWN
)
3457 REG_BASIC_BLOCK (dregno
) = bb
->index
;
3458 else if (REG_BASIC_BLOCK (dregno
) != bb
->index
)
3459 REG_BASIC_BLOCK (dregno
) = REG_BLOCK_GLOBAL
;
3462 if (!(DF_REF_FLAGS (def
) & (DF_REF_MUST_CLOBBER
+ DF_REF_MAY_CLOBBER
)))
3463 bitmap_set_bit (do_not_gen
, dregno
);
3465 /* Kill this register if it is not a subreg store. */
3466 if (!(DF_REF_FLAGS (def
) & DF_REF_PARTIAL
))
3467 bitmap_clear_bit (live
, dregno
);
3471 /* Recompute the REG_DEAD and REG_UNUSED notes and compute register
3472 info: lifetime, bb, and number of defs and uses for basic block
3473 BB. The three bitvectors are scratch regs used here. */
3476 df_ri_bb_compute (struct dataflow
*dflow
, unsigned int bb_index
,
3477 bitmap live
, bitmap do_not_gen
, bitmap artificial_uses
,
3478 bitmap local_live
, bitmap local_processed
, bitmap setjumps_crossed
)
3480 struct df
*df
= dflow
->df
;
3481 basic_block bb
= BASIC_BLOCK (bb_index
);
3487 bitmap_copy (live
, df_get_live_out (df
, bb
));
3488 bitmap_clear (artificial_uses
);
3490 if (dflow
->flags
& DF_RI_LIFE
)
3492 /* Process the regs live at the end of the block. Mark them as
3493 not local to any one basic block. */
3496 EXECUTE_IF_SET_IN_BITMAP (live
, 0, regno
, bi
)
3497 REG_BASIC_BLOCK (regno
) = REG_BLOCK_GLOBAL
;
3500 /* Process the artificial defs and uses at the bottom of the block
3501 to begin processing. */
3502 for (def
= df_get_artificial_defs (df
, bb_index
); def
; def
= def
->next_ref
)
3503 if ((DF_REF_FLAGS (def
) & DF_REF_AT_TOP
) == 0)
3504 bitmap_clear_bit (live
, DF_REF_REGNO (def
));
3506 for (use
= df_get_artificial_uses (df
, bb_index
); use
; use
= use
->next_ref
)
3507 if ((DF_REF_FLAGS (use
) & DF_REF_AT_TOP
) == 0)
3509 unsigned int regno
= DF_REF_REGNO (use
);
3510 bitmap_set_bit (live
, regno
);
3512 /* Notes are not generated for any of the artificial registers
3513 at the bottom of the block. */
3514 bitmap_set_bit (artificial_uses
, regno
);
3517 FOR_BB_INSNS_REVERSE (bb
, insn
)
3519 unsigned int uid
= INSN_UID (insn
);
3522 struct df_mw_hardreg
*mws
;
3527 if (dflow
->flags
& DF_RI_LIFE
)
3529 /* Increment the live_length for all of the registers that
3530 are are referenced in this block and live at this
3531 particular point. */
3534 EXECUTE_IF_SET_IN_BITMAP (local_live
, 0, regno
, bi
)
3536 REG_LIVE_LENGTH (regno
)++;
3541 bitmap_clear (do_not_gen
);
3542 df_kill_notes (insn
, dflow
->flags
);
3544 /* Process the defs. */
3547 if (dflow
->flags
& DF_RI_LIFE
)
3549 bool can_throw
= can_throw_internal (insn
);
3550 bool set_jump
= (find_reg_note (insn
, REG_SETJMP
, NULL
) != NULL
);
3551 EXECUTE_IF_SET_IN_BITMAP (live
, 0, regno
, bi
)
3553 REG_N_CALLS_CROSSED (regno
)++;
3555 REG_N_THROWING_CALLS_CROSSED (regno
)++;
3557 /* We have a problem with any pseudoreg that lives
3558 across the setjmp. ANSI says that if a user
3559 variable does not change in value between the
3560 setjmp and the longjmp, then the longjmp
3561 preserves it. This includes longjmp from a place
3562 where the pseudo appears dead. (In principle,
3563 the value still exists if it is in scope.) If
3564 the pseudo goes in a hard reg, some other value
3565 may occupy that hard reg where this pseudo is
3566 dead, thus clobbering the pseudo. Conclusion:
3567 such a pseudo must not go in a hard reg. */
3568 if (set_jump
&& regno
>= FIRST_PSEUDO_REGISTER
)
3569 bitmap_set_bit (setjumps_crossed
, regno
);
3573 /* We only care about real sets for calls. Clobbers only
3574 may clobber and cannot be depended on. */
3575 for (mws
= DF_INSN_UID_MWS (df
, uid
); mws
; mws
= mws
->next
)
3577 if ((mws
->type
== DF_REF_REG_DEF
)
3578 && !df_ignore_stack_reg (REGNO (mws
->mw_reg
)))
3579 df_set_unused_notes_for_mw (insn
, mws
, live
, do_not_gen
,
3580 artificial_uses
, dflow
->flags
);
3583 /* All of the defs except the return value are some sort of
3584 clobber. This code is for the return. */
3585 for (def
= DF_INSN_UID_DEFS (df
, uid
); def
; def
= def
->next_ref
)
3586 if (!(DF_REF_FLAGS (def
) & (DF_REF_MUST_CLOBBER
| DF_REF_MAY_CLOBBER
)))
3587 df_create_unused_note (bb
, insn
, def
, live
, do_not_gen
,
3588 artificial_uses
, local_live
,
3589 local_processed
, dflow
->flags
, luid
);
3595 for (mws
= DF_INSN_UID_MWS (df
, uid
); mws
; mws
= mws
->next
)
3597 if (mws
->type
== DF_REF_REG_DEF
)
3598 df_set_unused_notes_for_mw (insn
, mws
, live
, do_not_gen
,
3599 artificial_uses
, dflow
->flags
);
3602 for (def
= DF_INSN_UID_DEFS (df
, uid
); def
; def
= def
->next_ref
)
3603 df_create_unused_note (bb
, insn
, def
, live
, do_not_gen
,
3604 artificial_uses
, local_live
,
3605 local_processed
, dflow
->flags
, luid
);
3608 /* Process the uses. */
3609 for (mws
= DF_INSN_UID_MWS (df
, uid
); mws
; mws
= mws
->next
)
3611 if ((mws
->type
!= DF_REF_REG_DEF
)
3612 && !df_ignore_stack_reg (REGNO (mws
->mw_reg
)))
3613 df_set_dead_notes_for_mw (insn
, mws
, live
, do_not_gen
,
3614 artificial_uses
, dflow
->flags
);
3617 for (use
= DF_INSN_UID_USES (df
, uid
); use
; use
= use
->next_ref
)
3619 unsigned int uregno
= DF_REF_REGNO (use
);
3621 if ((dflow
->flags
& DF_RI_LIFE
) && (uregno
>= FIRST_PSEUDO_REGISTER
))
3623 REG_FREQ (uregno
) += REG_FREQ_FROM_BB (bb
);
3624 if (REG_BASIC_BLOCK (uregno
) == REG_BLOCK_UNKNOWN
)
3625 REG_BASIC_BLOCK (uregno
) = bb
->index
;
3626 else if (REG_BASIC_BLOCK (uregno
) != bb
->index
)
3627 REG_BASIC_BLOCK (uregno
) = REG_BLOCK_GLOBAL
;
3630 #ifdef REG_DEAD_DEBUGGING
3631 fprintf (stderr
, " regular looking at use ");
3632 df_ref_debug (use
, stderr
);
3634 if (!bitmap_bit_p (live
, uregno
))
3636 if ( (!(DF_REF_FLAGS (use
) & DF_REF_MW_HARDREG
))
3637 && (!bitmap_bit_p (do_not_gen
, uregno
))
3638 && (!bitmap_bit_p (artificial_uses
, uregno
))
3639 && (!(DF_REF_FLAGS (use
) & DF_REF_READ_WRITE
))
3640 && (!df_ignore_stack_reg (uregno
)))
3642 rtx reg
= GET_CODE (*DF_REF_LOC (use
)) == SUBREG
?
3643 SUBREG_REG (*DF_REF_LOC (use
)) : *DF_REF_LOC (use
);
3644 rtx note
= alloc_EXPR_LIST (REG_DEAD
, reg
, REG_NOTES (insn
));
3645 REG_NOTES (insn
) = note
;
3646 if (dflow
->flags
& DF_RI_LIFE
)
3647 REG_N_DEATHS (uregno
)++;
3649 #ifdef REG_DEAD_DEBUGGING
3650 print_note ("adding 4: ", insn
, note
);
3653 /* This register is now live. */
3654 bitmap_set_bit (live
, uregno
);
3656 if (dflow
->flags
& DF_RI_LIFE
)
3658 /* If we have seen this regno, then it has already
3659 been processed correctly with the per insn
3660 increment. If we have not seen it we set the bit
3661 so that begins to get processed locally. Note
3662 that we don't even get here if the variable was
3663 live at the end of the block since just a ref
3664 inside the block does not effect the
3666 REG_LIVE_LENGTH (uregno
) ++;
3667 bitmap_set_bit (local_live
, uregno
);
3668 bitmap_set_bit (local_processed
, uregno
);
3674 if (dflow
->flags
& DF_RI_LIFE
)
3676 /* Add the length of the block to all of the registers that were
3677 not referenced, but still live in this block. */
3680 bitmap_and_compl_into (live
, local_processed
);
3681 EXECUTE_IF_SET_IN_BITMAP (live
, 0, regno
, bi
)
3683 REG_LIVE_LENGTH (regno
) += luid
;
3685 bitmap_clear (local_processed
);
3686 bitmap_clear (local_live
);
3691 /* Compute register info: lifetime, bb, and number of defs and uses. */
3693 df_ri_compute (struct dataflow
*dflow
, bitmap all_blocks ATTRIBUTE_UNUSED
,
3694 bitmap blocks_to_scan
)
3696 unsigned int bb_index
;
3698 bitmap live
= BITMAP_ALLOC (NULL
);
3699 bitmap do_not_gen
= BITMAP_ALLOC (NULL
);
3700 bitmap artificial_uses
= BITMAP_ALLOC (NULL
);
3701 bitmap local_live
= NULL
;
3702 bitmap local_processed
= NULL
;
3703 bitmap setjumps_crossed
= NULL
;
3705 if (dflow
->flags
& DF_RI_LIFE
)
3707 local_live
= BITMAP_ALLOC (NULL
);
3708 local_processed
= BITMAP_ALLOC (NULL
);
3709 setjumps_crossed
= BITMAP_ALLOC (NULL
);
3713 #ifdef REG_DEAD_DEBUGGING
3714 df_lr_dump (dflow
->df
->problems_by_index
[DF_LR
], stderr
);
3715 print_rtl_with_bb (stderr
, get_insns());
3718 EXECUTE_IF_SET_IN_BITMAP (blocks_to_scan
, 0, bb_index
, bi
)
3720 df_ri_bb_compute (dflow
, bb_index
, live
, do_not_gen
, artificial_uses
,
3721 local_live
, local_processed
, setjumps_crossed
);
3725 BITMAP_FREE (do_not_gen
);
3726 BITMAP_FREE (artificial_uses
);
3727 if (dflow
->flags
& DF_RI_LIFE
)
3731 /* See the setjump comment in df_ri_bb_compute. */
3732 EXECUTE_IF_SET_IN_BITMAP (setjumps_crossed
, 0, regno
, bi
)
3734 REG_BASIC_BLOCK (regno
) = REG_BLOCK_UNKNOWN
;
3735 REG_LIVE_LENGTH (regno
) = -1;
3738 BITMAP_FREE (local_live
);
3739 BITMAP_FREE (local_processed
);
3740 BITMAP_FREE (setjumps_crossed
);
3745 /* Free all storage associated with the problem. */
3748 df_ri_free (struct dataflow
*dflow
)
3750 free (dflow
->problem_data
);
3755 /* Debugging info. */
3758 df_ri_dump (struct dataflow
*dflow
, FILE *file
)
3760 print_rtl_with_bb (file
, get_insns ());
3762 if (dflow
->flags
& DF_RI_LIFE
)
3764 fprintf (file
, "Register info:\n");
3765 dump_flow_info (file
, -1);
3769 /* All of the information associated every instance of the problem. */
3771 static struct df_problem problem_RI
=
3773 DF_RI
, /* Problem id. */
3774 DF_NONE
, /* Direction. */
3775 df_ri_alloc
, /* Allocate the problem specific data. */
3776 NULL
, /* Reset global information. */
3777 NULL
, /* Free basic block info. */
3778 df_ri_compute
, /* Local compute function. */
3779 NULL
, /* Init the solution specific data. */
3780 NULL
, /* Iterative solver. */
3781 NULL
, /* Confluence operator 0. */
3782 NULL
, /* Confluence operator n. */
3783 NULL
, /* Transfer function. */
3784 NULL
, /* Finalize function. */
3785 df_ri_free
, /* Free all of the problem information. */
3786 df_ri_dump
, /* Debugging. */
3788 /* Technically this is only dependent on the live registers problem
3789 but it will produce information if built one of uninitialized
3790 register problems (UR, UREC) is also run. */
3791 df_lr_add_problem
, /* Dependent problem. */
3792 0 /* Changeable flags. */
3796 /* Create a new DATAFLOW instance and add it to an existing instance
3797 of DF. The returned structure is what is used to get at the
3801 df_ri_add_problem (struct df
*df
, int flags
)
3803 return df_add_problem (df
, &problem_RI
, flags
);