1 /* Standard problems for dataflow support routines.
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006
3 Free Software Foundation, Inc.
4 Originally contributed by Michael P. Hayes
5 (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
6 Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
7 and Kenneth Zadeck (zadeck@naturalbridge.com).
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 2, or (at your option) any later
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING. If not, write to the Free
23 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
28 #include "coretypes.h"
32 #include "insn-config.h"
37 #include "alloc-pool.h"
39 #include "hard-reg-set.h"
40 #include "basic-block.h"
49 #define REG_DEAD_DEBUGGING
52 #define DF_SPARSE_THRESHOLD 32
54 static bitmap seen_in_block
= NULL
;
55 static bitmap seen_in_insn
= NULL
;
56 static void df_ri_dump (struct dataflow
*, FILE *);
59 /*----------------------------------------------------------------------------
60 Public functions access functions for the dataflow problems.
61 ----------------------------------------------------------------------------*/
63 /* Create a du or ud chain from SRC to DST and link it into SRC. */
66 df_chain_create (struct dataflow
*dflow
, struct df_ref
*src
, struct df_ref
*dst
)
68 struct df_link
*head
= DF_REF_CHAIN (src
);
69 struct df_link
*link
= pool_alloc (dflow
->block_pool
);;
71 DF_REF_CHAIN (src
) = link
;
78 /* Delete a du or ud chain for REF. If LINK is NULL, delete all
79 chains for ref and check to see if the reverse chains can also be
80 deleted. If LINK is not NULL it must be a link off of ref. In
81 this case, the other end is not deleted. */
84 df_chain_unlink (struct dataflow
*dflow
, struct df_ref
*ref
, struct df_link
*link
)
86 struct df_link
*chain
= DF_REF_CHAIN (ref
);
89 /* Link was the first element in the chain. */
91 DF_REF_CHAIN (ref
) = link
->next
;
94 /* Link is an internal element in the chain. */
95 struct df_link
*prev
= chain
;
100 prev
->next
= chain
->next
;
107 pool_free (dflow
->block_pool
, link
);
111 /* If chain is NULL here, it was because of a recursive call
112 when the other flavor of chains was not built. Just run thru
113 the entire chain calling the other side and then deleting the
117 struct df_link
*next
= chain
->next
;
118 /* Delete the other side if it exists. */
119 df_chain_unlink (dflow
, chain
->ref
, chain
);
126 /* Copy the du or ud chain starting at FROM_REF and attach it to
130 df_chain_copy (struct dataflow
*dflow
,
131 struct df_ref
*to_ref
,
132 struct df_link
*from_ref
)
136 df_chain_create (dflow
, to_ref
, from_ref
->ref
);
137 from_ref
= from_ref
->next
;
142 /* Get the live in set for BB no matter what problem happens to be
146 df_get_live_in (struct df
*df
, basic_block bb
)
148 gcc_assert (df
->problems_by_index
[DF_LR
]);
150 if (df
->problems_by_index
[DF_UREC
])
151 return DF_RA_LIVE_IN (df
, bb
);
152 else if (df
->problems_by_index
[DF_UR
])
153 return DF_LIVE_IN (df
, bb
);
155 return DF_UPWARD_LIVE_IN (df
, bb
);
159 /* Get the live out set for BB no matter what problem happens to be
163 df_get_live_out (struct df
*df
, basic_block bb
)
165 gcc_assert (df
->problems_by_index
[DF_LR
]);
167 if (df
->problems_by_index
[DF_UREC
])
168 return DF_RA_LIVE_OUT (df
, bb
);
169 else if (df
->problems_by_index
[DF_UR
])
170 return DF_LIVE_OUT (df
, bb
);
172 return DF_UPWARD_LIVE_OUT (df
, bb
);
176 /*----------------------------------------------------------------------------
178 ----------------------------------------------------------------------------*/
180 /* Generic versions to get the void* version of the block info. Only
181 used inside the problem instance vectors. */
183 /* Grow the bb_info array. */
186 df_grow_bb_info (struct dataflow
*dflow
)
188 unsigned int new_size
= last_basic_block
+ 1;
189 if (dflow
->block_info_size
< new_size
)
191 new_size
+= new_size
/ 4;
192 dflow
->block_info
= xrealloc (dflow
->block_info
,
193 new_size
*sizeof (void*));
194 memset (dflow
->block_info
+ dflow
->block_info_size
, 0,
195 (new_size
- dflow
->block_info_size
) *sizeof (void *));
196 dflow
->block_info_size
= new_size
;
200 /* Dump a def-use or use-def chain for REF to FILE. */
203 df_chain_dump (struct df_link
*link
, FILE *file
)
205 fprintf (file
, "{ ");
206 for (; link
; link
= link
->next
)
208 fprintf (file
, "%c%d(bb %d insn %d) ",
209 DF_REF_REG_DEF_P (link
->ref
) ? 'd' : 'u',
210 DF_REF_ID (link
->ref
),
211 DF_REF_BBNO (link
->ref
),
212 DF_REF_INSN (link
->ref
) ? DF_REF_INSN_UID (link
->ref
) : -1);
218 /* Print some basic block info as part of df_dump. */
221 df_print_bb_index (basic_block bb
, FILE *file
)
226 fprintf (file
, "( ");
227 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
229 basic_block pred
= e
->src
;
230 fprintf (file
, "%d ", pred
->index
);
232 fprintf (file
, ")->[%d]->( ", bb
->index
);
233 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
235 basic_block succ
= e
->dest
;
236 fprintf (file
, "%d ", succ
->index
);
238 fprintf (file
, ")\n");
242 /* Return a bitmap for REGNO from the cache MAPS. The bitmap is to
243 contain COUNT bits starting at START. These bitmaps are not to be
244 changed since there is a cache of them. */
247 df_ref_bitmap (bitmap
*maps
, unsigned int regno
, int start
, int count
)
249 bitmap ids
= maps
[regno
];
253 unsigned int end
= start
+ count
;;
254 ids
= BITMAP_ALLOC (NULL
);
256 for (i
= start
; i
< end
; i
++)
257 bitmap_set_bit (ids
, i
);
263 /* Make sure that the seen_in_insn and seen_in_block sbitmaps are set
269 seen_in_block
= BITMAP_ALLOC (NULL
);
270 seen_in_insn
= BITMAP_ALLOC (NULL
);
277 BITMAP_FREE (seen_in_block
);
278 BITMAP_FREE (seen_in_insn
);
283 /*----------------------------------------------------------------------------
286 Find the locations in the function where each use site for a pseudo
287 can reach backwards. In and out bitvectors are built for each basic
288 block. The id field in the ref is used to index into these sets.
289 See df.h for details.
291 ----------------------------------------------------------------------------*/
293 /* This problem plays a large number of games for the sake of
296 1) The order of the bits in the bitvectors. After the scanning
297 phase, all of the uses are sorted. All of the uses for the reg 0
298 are first, followed by all uses for reg 1 and so on.
300 2) There are two kill sets, one if the number of uses is less or
301 equal to DF_SPARSE_THRESHOLD and another if it is greater.
303 <= : There is a bitmap for each register, uses_sites[N], that is
304 built on demand. This bitvector contains a 1 for each use or reg
307 > : One level of indirection is used to keep from generating long
308 strings of 1 bits in the kill sets. Bitvectors that are indexed
309 by the regnum are used to represent that there is a killing def
310 for the register. The confluence and transfer functions use
311 these along with the bitmap_clear_range call to remove ranges of
312 bits without actually generating a knockout vector.
314 The kill and sparse_kill and the dense_invalidated_by_call and
315 sparse_invalidated_by call both play this game. */
317 /* Private data used to compute the solution for this problem. These
318 data structures are not accessible outside of this module. */
319 struct df_ru_problem_data
322 bitmap
*use_sites
; /* Bitmap of uses for each pseudo. */
323 unsigned int use_sites_size
; /* Size of use_sites. */
324 /* The set of defs to regs invalidated by call. */
325 bitmap sparse_invalidated_by_call
;
326 /* The set of defs to regs invalidated by call for ru. */
327 bitmap dense_invalidated_by_call
;
330 /* Get basic block info. */
332 struct df_ru_bb_info
*
333 df_ru_get_bb_info (struct dataflow
*dflow
, unsigned int index
)
335 return (struct df_ru_bb_info
*) dflow
->block_info
[index
];
339 /* Set basic block info. */
342 df_ru_set_bb_info (struct dataflow
*dflow
, unsigned int index
,
343 struct df_ru_bb_info
*bb_info
)
345 dflow
->block_info
[index
] = bb_info
;
349 /* Free basic block info. */
352 df_ru_free_bb_info (struct dataflow
*dflow
,
353 basic_block bb ATTRIBUTE_UNUSED
,
356 struct df_ru_bb_info
*bb_info
= (struct df_ru_bb_info
*) vbb_info
;
359 BITMAP_FREE (bb_info
->kill
);
360 BITMAP_FREE (bb_info
->sparse_kill
);
361 BITMAP_FREE (bb_info
->gen
);
362 BITMAP_FREE (bb_info
->in
);
363 BITMAP_FREE (bb_info
->out
);
364 pool_free (dflow
->block_pool
, bb_info
);
369 /* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
370 not touched unless the block is new. */
373 df_ru_alloc (struct dataflow
*dflow
,
374 bitmap blocks_to_rescan ATTRIBUTE_UNUSED
,
377 unsigned int bb_index
;
379 unsigned int reg_size
= max_reg_num ();
381 if (!dflow
->block_pool
)
382 dflow
->block_pool
= create_alloc_pool ("df_ru_block pool",
383 sizeof (struct df_ru_bb_info
), 50);
385 if (dflow
->problem_data
)
388 struct df_ru_problem_data
*problem_data
389 = (struct df_ru_problem_data
*) dflow
->problem_data
;
391 for (i
= 0; i
< problem_data
->use_sites_size
; i
++)
393 bitmap bm
= problem_data
->use_sites
[i
];
397 problem_data
->use_sites
[i
] = NULL
;
401 if (problem_data
->use_sites_size
< reg_size
)
403 problem_data
->use_sites
404 = xrealloc (problem_data
->use_sites
, reg_size
* sizeof (bitmap
));
405 memset (problem_data
->use_sites
+ problem_data
->use_sites_size
, 0,
406 (reg_size
- problem_data
->use_sites_size
) * sizeof (bitmap
));
407 problem_data
->use_sites_size
= reg_size
;
410 bitmap_clear (problem_data
->sparse_invalidated_by_call
);
411 bitmap_clear (problem_data
->dense_invalidated_by_call
);
415 struct df_ru_problem_data
*problem_data
= XNEW (struct df_ru_problem_data
);
416 dflow
->problem_data
= problem_data
;
418 problem_data
->use_sites
= XCNEWVEC (bitmap
, reg_size
);
419 problem_data
->use_sites_size
= reg_size
;
420 problem_data
->sparse_invalidated_by_call
= BITMAP_ALLOC (NULL
);
421 problem_data
->dense_invalidated_by_call
= BITMAP_ALLOC (NULL
);
424 df_grow_bb_info (dflow
);
426 /* Because of the clustering of all def sites for the same pseudo,
427 we have to process all of the blocks before doing the
430 EXECUTE_IF_SET_IN_BITMAP (all_blocks
, 0, bb_index
, bi
)
432 struct df_ru_bb_info
*bb_info
= df_ru_get_bb_info (dflow
, bb_index
);
435 bitmap_clear (bb_info
->kill
);
436 bitmap_clear (bb_info
->sparse_kill
);
437 bitmap_clear (bb_info
->gen
);
441 bb_info
= (struct df_ru_bb_info
*) pool_alloc (dflow
->block_pool
);
442 df_ru_set_bb_info (dflow
, bb_index
, bb_info
);
443 bb_info
->kill
= BITMAP_ALLOC (NULL
);
444 bb_info
->sparse_kill
= BITMAP_ALLOC (NULL
);
445 bb_info
->gen
= BITMAP_ALLOC (NULL
);
446 bb_info
->in
= BITMAP_ALLOC (NULL
);
447 bb_info
->out
= BITMAP_ALLOC (NULL
);
453 /* Process a list of DEFs for df_ru_bb_local_compute. */
456 df_ru_bb_local_compute_process_def (struct dataflow
*dflow
,
457 struct df_ru_bb_info
*bb_info
,
459 enum df_ref_flags top_flag
)
461 struct df
*df
= dflow
->df
;
464 if ((top_flag
== (DF_REF_FLAGS (def
) & DF_REF_AT_TOP
))
465 /* If the def is to only part of the reg, it is as if it did
466 not happen, since some of the bits may get thru. */
467 && (!(DF_REF_FLAGS (def
) & DF_REF_PARTIAL
)))
469 unsigned int regno
= DF_REF_REGNO (def
);
470 unsigned int begin
= DF_REG_USE_GET (df
, regno
)->begin
;
471 unsigned int n_uses
= DF_REG_USE_GET (df
, regno
)->n_refs
;
472 if (!bitmap_bit_p (seen_in_block
, regno
))
474 /* The first def for regno in the insn, causes the kill
475 info to be generated. Do not modify the gen set
476 because the only values in it are the uses from here
477 to the top of the block and this def does not effect
479 if (!bitmap_bit_p (seen_in_insn
, regno
))
481 if (n_uses
> DF_SPARSE_THRESHOLD
)
482 bitmap_set_bit (bb_info
->sparse_kill
, regno
);
485 struct df_ru_problem_data
* problem_data
486 = (struct df_ru_problem_data
*)dflow
->problem_data
;
488 = df_ref_bitmap (problem_data
->use_sites
, regno
,
490 bitmap_ior_into (bb_info
->kill
, uses
);
493 bitmap_set_bit (seen_in_insn
, regno
);
501 /* Process a list of USEs for df_ru_bb_local_compute. */
504 df_ru_bb_local_compute_process_use (struct df_ru_bb_info
*bb_info
,
506 enum df_ref_flags top_flag
)
510 if (top_flag
== (DF_REF_FLAGS (use
) & DF_REF_AT_TOP
))
512 /* Add use to set of gens in this BB unless we have seen a
513 def in a previous instruction. */
514 unsigned int regno
= DF_REF_REGNO (use
);
515 if (!bitmap_bit_p (seen_in_block
, regno
))
516 bitmap_set_bit (bb_info
->gen
, DF_REF_ID (use
));
522 /* Compute local reaching use (upward exposed use) info for basic
523 block BB. USE_INFO->REGS[R] caches the set of uses for register R. */
525 df_ru_bb_local_compute (struct dataflow
*dflow
, unsigned int bb_index
)
527 struct df
*df
= dflow
->df
;
528 basic_block bb
= BASIC_BLOCK (bb_index
);
529 struct df_ru_bb_info
*bb_info
= df_ru_get_bb_info (dflow
, bb_index
);
532 /* Set when a def for regno is seen. */
533 bitmap_clear (seen_in_block
);
534 bitmap_clear (seen_in_insn
);
537 /* Variables defined in the prolog that are used by the exception
539 df_ru_bb_local_compute_process_use (bb_info
,
540 df_get_artificial_uses (df
, bb_index
),
543 df_ru_bb_local_compute_process_def (dflow
, bb_info
,
544 df_get_artificial_defs (df
, bb_index
),
547 FOR_BB_INSNS (bb
, insn
)
549 unsigned int uid
= INSN_UID (insn
);
553 df_ru_bb_local_compute_process_use (bb_info
,
554 DF_INSN_UID_USES (df
, uid
), 0);
556 df_ru_bb_local_compute_process_def (dflow
, bb_info
,
557 DF_INSN_UID_DEFS (df
, uid
), 0);
559 bitmap_ior_into (seen_in_block
, seen_in_insn
);
560 bitmap_clear (seen_in_insn
);
563 /* Process the hardware registers that are always live. */
564 df_ru_bb_local_compute_process_use (bb_info
,
565 df_get_artificial_uses (df
, bb_index
), 0);
567 df_ru_bb_local_compute_process_def (dflow
, bb_info
,
568 df_get_artificial_defs (df
, bb_index
), 0);
572 /* Compute local reaching use (upward exposed use) info for each basic
573 block within BLOCKS. */
575 df_ru_local_compute (struct dataflow
*dflow
,
577 bitmap rescan_blocks ATTRIBUTE_UNUSED
)
579 struct df
*df
= dflow
->df
;
580 unsigned int bb_index
;
583 struct df_ru_problem_data
*problem_data
584 = (struct df_ru_problem_data
*) dflow
->problem_data
;
585 bitmap sparse_invalidated
= problem_data
->sparse_invalidated_by_call
;
586 bitmap dense_invalidated
= problem_data
->dense_invalidated_by_call
;
589 df_reorganize_refs (&df
->use_info
);
591 EXECUTE_IF_SET_IN_BITMAP (all_blocks
, 0, bb_index
, bi
)
593 df_ru_bb_local_compute (dflow
, bb_index
);
596 /* Set up the knockout bit vectors to be applied across EH_EDGES. */
597 EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call
, 0, regno
, bi
)
599 struct df_reg_info
*reg_info
= DF_REG_USE_GET (df
, regno
);
600 if (reg_info
->n_refs
> DF_SPARSE_THRESHOLD
)
601 bitmap_set_bit (sparse_invalidated
, regno
);
604 bitmap defs
= df_ref_bitmap (problem_data
->use_sites
, regno
,
605 reg_info
->begin
, reg_info
->n_refs
);
606 bitmap_ior_into (dense_invalidated
, defs
);
614 /* Initialize the solution bit vectors for problem. */
617 df_ru_init_solution (struct dataflow
*dflow
, bitmap all_blocks
)
619 unsigned int bb_index
;
622 EXECUTE_IF_SET_IN_BITMAP (all_blocks
, 0, bb_index
, bi
)
624 struct df_ru_bb_info
*bb_info
= df_ru_get_bb_info (dflow
, bb_index
);
625 bitmap_copy (bb_info
->in
, bb_info
->gen
);
626 bitmap_clear (bb_info
->out
);
631 /* Out of target gets or of in of source. */
634 df_ru_confluence_n (struct dataflow
*dflow
, edge e
)
636 bitmap op1
= df_ru_get_bb_info (dflow
, e
->src
->index
)->out
;
637 bitmap op2
= df_ru_get_bb_info (dflow
, e
->dest
->index
)->in
;
639 if (e
->flags
& EDGE_EH
)
641 struct df_ru_problem_data
*problem_data
642 = (struct df_ru_problem_data
*) dflow
->problem_data
;
643 bitmap sparse_invalidated
= problem_data
->sparse_invalidated_by_call
;
644 bitmap dense_invalidated
= problem_data
->dense_invalidated_by_call
;
645 struct df
*df
= dflow
->df
;
648 bitmap tmp
= BITMAP_ALLOC (NULL
);
650 bitmap_copy (tmp
, op2
);
651 bitmap_and_compl_into (tmp
, dense_invalidated
);
653 EXECUTE_IF_SET_IN_BITMAP (sparse_invalidated
, 0, regno
, bi
)
655 bitmap_clear_range (tmp
,
656 DF_REG_USE_GET (df
, regno
)->begin
,
657 DF_REG_USE_GET (df
, regno
)->n_refs
);
659 bitmap_ior_into (op1
, tmp
);
663 bitmap_ior_into (op1
, op2
);
667 /* Transfer function. */
670 df_ru_transfer_function (struct dataflow
*dflow
, int bb_index
)
672 struct df_ru_bb_info
*bb_info
= df_ru_get_bb_info (dflow
, bb_index
);
675 bitmap in
= bb_info
->in
;
676 bitmap out
= bb_info
->out
;
677 bitmap gen
= bb_info
->gen
;
678 bitmap kill
= bb_info
->kill
;
679 bitmap sparse_kill
= bb_info
->sparse_kill
;
681 if (bitmap_empty_p (sparse_kill
))
682 return bitmap_ior_and_compl (in
, gen
, out
, kill
);
685 struct df
*df
= dflow
->df
;
686 bool changed
= false;
687 bitmap tmp
= BITMAP_ALLOC (NULL
);
688 bitmap_copy (tmp
, out
);
689 EXECUTE_IF_SET_IN_BITMAP (sparse_kill
, 0, regno
, bi
)
691 bitmap_clear_range (tmp
,
692 DF_REG_USE_GET (df
, regno
)->begin
,
693 DF_REG_USE_GET (df
, regno
)->n_refs
);
695 bitmap_and_compl_into (tmp
, kill
);
696 bitmap_ior_into (tmp
, gen
);
697 changed
= !bitmap_equal_p (tmp
, in
);
710 /* Free all storage associated with the problem. */
713 df_ru_free (struct dataflow
*dflow
)
716 struct df_ru_problem_data
*problem_data
717 = (struct df_ru_problem_data
*) dflow
->problem_data
;
721 for (i
= 0; i
< dflow
->block_info_size
; i
++)
723 struct df_ru_bb_info
*bb_info
= df_ru_get_bb_info (dflow
, i
);
726 BITMAP_FREE (bb_info
->kill
);
727 BITMAP_FREE (bb_info
->sparse_kill
);
728 BITMAP_FREE (bb_info
->gen
);
729 BITMAP_FREE (bb_info
->in
);
730 BITMAP_FREE (bb_info
->out
);
734 free_alloc_pool (dflow
->block_pool
);
736 for (i
= 0; i
< problem_data
->use_sites_size
; i
++)
738 bitmap bm
= problem_data
->use_sites
[i
];
743 free (problem_data
->use_sites
);
744 BITMAP_FREE (problem_data
->sparse_invalidated_by_call
);
745 BITMAP_FREE (problem_data
->dense_invalidated_by_call
);
747 dflow
->block_info_size
= 0;
748 free (dflow
->block_info
);
749 free (dflow
->problem_data
);
755 /* Debugging info. */
758 df_ru_dump (struct dataflow
*dflow
, FILE *file
)
761 struct df
*df
= dflow
->df
;
762 struct df_ru_problem_data
*problem_data
763 = (struct df_ru_problem_data
*) dflow
->problem_data
;
764 unsigned int m
= max_reg_num ();
767 if (!dflow
->block_info
)
770 fprintf (file
, "Reaching uses:\n");
772 fprintf (file
, " sparse invalidated \t");
773 dump_bitmap (file
, problem_data
->sparse_invalidated_by_call
);
774 fprintf (file
, " dense invalidated \t");
775 dump_bitmap (file
, problem_data
->dense_invalidated_by_call
);
777 for (regno
= 0; regno
< m
; regno
++)
778 if (DF_REG_USE_GET (df
, regno
)->n_refs
)
779 fprintf (file
, "%d[%d,%d] ", regno
,
780 DF_REG_USE_GET (df
, regno
)->begin
,
781 DF_REG_USE_GET (df
, regno
)->n_refs
);
782 fprintf (file
, "\n");
786 struct df_ru_bb_info
*bb_info
= df_ru_get_bb_info (dflow
, bb
->index
);
787 df_print_bb_index (bb
, file
);
792 fprintf (file
, " in \t(%d)\n", (int) bitmap_count_bits (bb_info
->in
));
793 dump_bitmap (file
, bb_info
->in
);
794 fprintf (file
, " gen \t(%d)\n", (int) bitmap_count_bits (bb_info
->gen
));
795 dump_bitmap (file
, bb_info
->gen
);
796 fprintf (file
, " kill\t(%d)\n", (int) bitmap_count_bits (bb_info
->kill
));
797 dump_bitmap (file
, bb_info
->kill
);
798 fprintf (file
, " out \t(%d)\n", (int) bitmap_count_bits (bb_info
->out
));
799 dump_bitmap (file
, bb_info
->out
);
803 /* All of the information associated with every instance of the problem. */
805 static struct df_problem problem_RU
=
807 DF_RU
, /* Problem id. */
808 DF_BACKWARD
, /* Direction. */
809 df_ru_alloc
, /* Allocate the problem specific data. */
810 NULL
, /* Reset global information. */
811 df_ru_free_bb_info
, /* Free basic block info. */
812 df_ru_local_compute
, /* Local compute function. */
813 df_ru_init_solution
, /* Init the solution specific data. */
814 df_iterative_dataflow
, /* Iterative solver. */
815 NULL
, /* Confluence operator 0. */
816 df_ru_confluence_n
, /* Confluence operator n. */
817 df_ru_transfer_function
, /* Transfer function. */
818 NULL
, /* Finalize function. */
819 df_ru_free
, /* Free all of the problem information. */
820 df_ru_dump
, /* Debugging. */
821 NULL
, /* Dependent problem. */
822 0 /* Changeable flags. */
827 /* Create a new DATAFLOW instance and add it to an existing instance
828 of DF. The returned structure is what is used to get at the
832 df_ru_add_problem (struct df
*df
, int flags
)
834 return df_add_problem (df
, &problem_RU
, flags
);
838 /*----------------------------------------------------------------------------
841 Find the locations in the function where each definition site for a
842 pseudo reaches. In and out bitvectors are built for each basic
843 block. The id field in the ref is used to index into these sets.
844 See df.h for details.
845 ----------------------------------------------------------------------------*/
847 /* See the comment at the top of the Reaching Uses problem for how the
848 uses are represented in the kill sets. The same games are played
849 here for the defs. */
851 /* Private data used to compute the solution for this problem. These
852 data structures are not accessible outside of this module. */
853 struct df_rd_problem_data
855 /* If the number of defs for regnum N is less than
856 DF_SPARSE_THRESHOLD, uses_sites[N] contains a mask of the all of
857 the defs of reg N indexed by the id in the ref structure. If
858 there are more than DF_SPARSE_THRESHOLD defs for regnum N a
859 different mechanism is used to mask the def. */
860 bitmap
*def_sites
; /* Bitmap of defs for each pseudo. */
861 unsigned int def_sites_size
; /* Size of def_sites. */
862 /* The set of defs to regs invalidated by call. */
863 bitmap sparse_invalidated_by_call
;
864 /* The set of defs to regs invalidate by call for rd. */
865 bitmap dense_invalidated_by_call
;
868 /* Get basic block info. */
870 struct df_rd_bb_info
*
871 df_rd_get_bb_info (struct dataflow
*dflow
, unsigned int index
)
873 return (struct df_rd_bb_info
*) dflow
->block_info
[index
];
877 /* Set basic block info. */
880 df_rd_set_bb_info (struct dataflow
*dflow
, unsigned int index
,
881 struct df_rd_bb_info
*bb_info
)
883 dflow
->block_info
[index
] = bb_info
;
887 /* Free basic block info. */
890 df_rd_free_bb_info (struct dataflow
*dflow
,
891 basic_block bb ATTRIBUTE_UNUSED
,
894 struct df_rd_bb_info
*bb_info
= (struct df_rd_bb_info
*) vbb_info
;
897 BITMAP_FREE (bb_info
->kill
);
898 BITMAP_FREE (bb_info
->sparse_kill
);
899 BITMAP_FREE (bb_info
->gen
);
900 BITMAP_FREE (bb_info
->in
);
901 BITMAP_FREE (bb_info
->out
);
902 pool_free (dflow
->block_pool
, bb_info
);
907 /* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
908 not touched unless the block is new. */
911 df_rd_alloc (struct dataflow
*dflow
,
912 bitmap blocks_to_rescan ATTRIBUTE_UNUSED
,
915 unsigned int bb_index
;
917 unsigned int reg_size
= max_reg_num ();
919 if (!dflow
->block_pool
)
920 dflow
->block_pool
= create_alloc_pool ("df_rd_block pool",
921 sizeof (struct df_rd_bb_info
), 50);
923 if (dflow
->problem_data
)
926 struct df_rd_problem_data
*problem_data
927 = (struct df_rd_problem_data
*) dflow
->problem_data
;
929 for (i
= 0; i
< problem_data
->def_sites_size
; i
++)
931 bitmap bm
= problem_data
->def_sites
[i
];
935 problem_data
->def_sites
[i
] = NULL
;
939 if (problem_data
->def_sites_size
< reg_size
)
941 problem_data
->def_sites
942 = xrealloc (problem_data
->def_sites
, reg_size
*sizeof (bitmap
));
943 memset (problem_data
->def_sites
+ problem_data
->def_sites_size
, 0,
944 (reg_size
- problem_data
->def_sites_size
) *sizeof (bitmap
));
945 problem_data
->def_sites_size
= reg_size
;
948 bitmap_clear (problem_data
->sparse_invalidated_by_call
);
949 bitmap_clear (problem_data
->dense_invalidated_by_call
);
953 struct df_rd_problem_data
*problem_data
= XNEW (struct df_rd_problem_data
);
954 dflow
->problem_data
= problem_data
;
956 problem_data
->def_sites
= XCNEWVEC (bitmap
, reg_size
);
957 problem_data
->def_sites_size
= reg_size
;
958 problem_data
->sparse_invalidated_by_call
= BITMAP_ALLOC (NULL
);
959 problem_data
->dense_invalidated_by_call
= BITMAP_ALLOC (NULL
);
962 df_grow_bb_info (dflow
);
964 /* Because of the clustering of all use sites for the same pseudo,
965 we have to process all of the blocks before doing the
968 EXECUTE_IF_SET_IN_BITMAP (all_blocks
, 0, bb_index
, bi
)
970 struct df_rd_bb_info
*bb_info
= df_rd_get_bb_info (dflow
, bb_index
);
973 bitmap_clear (bb_info
->kill
);
974 bitmap_clear (bb_info
->sparse_kill
);
975 bitmap_clear (bb_info
->gen
);
979 bb_info
= (struct df_rd_bb_info
*) pool_alloc (dflow
->block_pool
);
980 df_rd_set_bb_info (dflow
, bb_index
, bb_info
);
981 bb_info
->kill
= BITMAP_ALLOC (NULL
);
982 bb_info
->sparse_kill
= BITMAP_ALLOC (NULL
);
983 bb_info
->gen
= BITMAP_ALLOC (NULL
);
984 bb_info
->in
= BITMAP_ALLOC (NULL
);
985 bb_info
->out
= BITMAP_ALLOC (NULL
);
991 /* Process a list of DEFs for df_rd_bb_local_compute. */
994 df_rd_bb_local_compute_process_def (struct dataflow
*dflow
,
995 struct df_rd_bb_info
*bb_info
,
997 enum df_ref_flags top_flag
)
999 struct df
*df
= dflow
->df
;
1002 if (top_flag
== (DF_REF_FLAGS (def
) & DF_REF_AT_TOP
))
1004 unsigned int regno
= DF_REF_REGNO (def
);
1005 unsigned int begin
= DF_REG_DEF_GET (df
, regno
)->begin
;
1006 unsigned int n_defs
= DF_REG_DEF_GET (df
, regno
)->n_refs
;
1008 /* Only the last def(s) for a regno in the block has any
1010 if (!bitmap_bit_p (seen_in_block
, regno
))
1012 /* The first def for regno in insn gets to knock out the
1013 defs from other instructions. */
1014 if ((!bitmap_bit_p (seen_in_insn
, regno
))
1015 /* If the def is to only part of the reg, it does
1016 not kill the other defs that reach here. */
1017 && (!((DF_REF_FLAGS (def
) & DF_REF_PARTIAL
)
1018 || (DF_REF_FLAGS (def
) & DF_REF_MAY_CLOBBER
))))
1020 if (n_defs
> DF_SPARSE_THRESHOLD
)
1022 bitmap_set_bit (bb_info
->sparse_kill
, regno
);
1023 bitmap_clear_range(bb_info
->gen
, begin
, n_defs
);
1027 struct df_rd_problem_data
* problem_data
1028 = (struct df_rd_problem_data
*)dflow
->problem_data
;
1029 bitmap defs
= df_ref_bitmap (problem_data
->def_sites
,
1030 regno
, begin
, n_defs
);
1031 bitmap_ior_into (bb_info
->kill
, defs
);
1032 bitmap_and_compl_into (bb_info
->gen
, defs
);
1036 bitmap_set_bit (seen_in_insn
, regno
);
1037 /* All defs for regno in the instruction may be put into
1039 if (!(DF_REF_FLAGS (def
)
1040 & (DF_REF_MUST_CLOBBER
| DF_REF_MAY_CLOBBER
)))
1041 bitmap_set_bit (bb_info
->gen
, DF_REF_ID (def
));
1044 def
= def
->next_ref
;
1048 /* Compute local reaching def info for basic block BB. */
1051 df_rd_bb_local_compute (struct dataflow
*dflow
, unsigned int bb_index
)
1053 struct df
*df
= dflow
->df
;
1054 basic_block bb
= BASIC_BLOCK (bb_index
);
1055 struct df_rd_bb_info
*bb_info
= df_rd_get_bb_info (dflow
, bb_index
);
1058 bitmap_clear (seen_in_block
);
1059 bitmap_clear (seen_in_insn
);
1061 df_rd_bb_local_compute_process_def (dflow
, bb_info
,
1062 df_get_artificial_defs (df
, bb_index
), 0);
1064 FOR_BB_INSNS_REVERSE (bb
, insn
)
1066 unsigned int uid
= INSN_UID (insn
);
1071 df_rd_bb_local_compute_process_def (dflow
, bb_info
,
1072 DF_INSN_UID_DEFS (df
, uid
), 0);
1074 /* This complex dance with the two bitmaps is required because
1075 instructions can assign twice to the same pseudo. This
1076 generally happens with calls that will have one def for the
1077 result and another def for the clobber. If only one vector
1078 is used and the clobber goes first, the result will be
1080 bitmap_ior_into (seen_in_block
, seen_in_insn
);
1081 bitmap_clear (seen_in_insn
);
1084 /* Process the artificial defs at the top of the block last since we
1085 are going backwards through the block and these are logically at
1087 df_rd_bb_local_compute_process_def (dflow
, bb_info
,
1088 df_get_artificial_defs (df
, bb_index
),
1093 /* Compute local reaching def info for each basic block within BLOCKS. */
1096 df_rd_local_compute (struct dataflow
*dflow
,
1098 bitmap rescan_blocks ATTRIBUTE_UNUSED
)
1100 struct df
*df
= dflow
->df
;
1101 unsigned int bb_index
;
1104 struct df_rd_problem_data
*problem_data
1105 = (struct df_rd_problem_data
*) dflow
->problem_data
;
1106 bitmap sparse_invalidated
= problem_data
->sparse_invalidated_by_call
;
1107 bitmap dense_invalidated
= problem_data
->dense_invalidated_by_call
;
1110 df_reorganize_refs (&df
->def_info
);
1112 EXECUTE_IF_SET_IN_BITMAP (all_blocks
, 0, bb_index
, bi
)
1114 df_rd_bb_local_compute (dflow
, bb_index
);
1117 /* Set up the knockout bit vectors to be applied across EH_EDGES. */
1118 EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call
, 0, regno
, bi
)
1120 struct df_reg_info
*reg_info
= DF_REG_DEF_GET (df
, regno
);
1121 if (reg_info
->n_refs
> DF_SPARSE_THRESHOLD
)
1123 bitmap_set_bit (sparse_invalidated
, regno
);
1127 bitmap defs
= df_ref_bitmap (problem_data
->def_sites
, regno
,
1128 reg_info
->begin
, reg_info
->n_refs
);
1129 bitmap_ior_into (dense_invalidated
, defs
);
1136 /* Initialize the solution bit vectors for problem. */
1139 df_rd_init_solution (struct dataflow
*dflow
, bitmap all_blocks
)
1141 unsigned int bb_index
;
1144 EXECUTE_IF_SET_IN_BITMAP (all_blocks
, 0, bb_index
, bi
)
1146 struct df_rd_bb_info
*bb_info
= df_rd_get_bb_info (dflow
, bb_index
);
1148 bitmap_copy (bb_info
->out
, bb_info
->gen
);
1149 bitmap_clear (bb_info
->in
);
1153 /* In of target gets or of out of source. */
1156 df_rd_confluence_n (struct dataflow
*dflow
, edge e
)
1158 bitmap op1
= df_rd_get_bb_info (dflow
, e
->dest
->index
)->in
;
1159 bitmap op2
= df_rd_get_bb_info (dflow
, e
->src
->index
)->out
;
1161 if (e
->flags
& EDGE_EH
)
1163 struct df_rd_problem_data
*problem_data
1164 = (struct df_rd_problem_data
*) dflow
->problem_data
;
1165 bitmap sparse_invalidated
= problem_data
->sparse_invalidated_by_call
;
1166 bitmap dense_invalidated
= problem_data
->dense_invalidated_by_call
;
1167 struct df
*df
= dflow
->df
;
1170 bitmap tmp
= BITMAP_ALLOC (NULL
);
1172 bitmap_copy (tmp
, op2
);
1173 bitmap_and_compl_into (tmp
, dense_invalidated
);
1175 EXECUTE_IF_SET_IN_BITMAP (sparse_invalidated
, 0, regno
, bi
)
1177 bitmap_clear_range (tmp
,
1178 DF_REG_DEF_GET (df
, regno
)->begin
,
1179 DF_REG_DEF_GET (df
, regno
)->n_refs
);
1181 bitmap_ior_into (op1
, tmp
);
1185 bitmap_ior_into (op1
, op2
);
1189 /* Transfer function. */
1192 df_rd_transfer_function (struct dataflow
*dflow
, int bb_index
)
1194 struct df_rd_bb_info
*bb_info
= df_rd_get_bb_info (dflow
, bb_index
);
1197 bitmap in
= bb_info
->in
;
1198 bitmap out
= bb_info
->out
;
1199 bitmap gen
= bb_info
->gen
;
1200 bitmap kill
= bb_info
->kill
;
1201 bitmap sparse_kill
= bb_info
->sparse_kill
;
1203 if (bitmap_empty_p (sparse_kill
))
1204 return bitmap_ior_and_compl (out
, gen
, in
, kill
);
1207 struct df
*df
= dflow
->df
;
1208 bool changed
= false;
1209 bitmap tmp
= BITMAP_ALLOC (NULL
);
1210 bitmap_copy (tmp
, in
);
1211 EXECUTE_IF_SET_IN_BITMAP (sparse_kill
, 0, regno
, bi
)
1213 bitmap_clear_range (tmp
,
1214 DF_REG_DEF_GET (df
, regno
)->begin
,
1215 DF_REG_DEF_GET (df
, regno
)->n_refs
);
1217 bitmap_and_compl_into (tmp
, kill
);
1218 bitmap_ior_into (tmp
, gen
);
1219 changed
= !bitmap_equal_p (tmp
, out
);
1232 /* Free all storage associated with the problem. */
1235 df_rd_free (struct dataflow
*dflow
)
1238 struct df_rd_problem_data
*problem_data
1239 = (struct df_rd_problem_data
*) dflow
->problem_data
;
1243 for (i
= 0; i
< dflow
->block_info_size
; i
++)
1245 struct df_rd_bb_info
*bb_info
= df_rd_get_bb_info (dflow
, i
);
1248 BITMAP_FREE (bb_info
->kill
);
1249 BITMAP_FREE (bb_info
->sparse_kill
);
1250 BITMAP_FREE (bb_info
->gen
);
1251 BITMAP_FREE (bb_info
->in
);
1252 BITMAP_FREE (bb_info
->out
);
1256 free_alloc_pool (dflow
->block_pool
);
1258 for (i
= 0; i
< problem_data
->def_sites_size
; i
++)
1260 bitmap bm
= problem_data
->def_sites
[i
];
1265 free (problem_data
->def_sites
);
1266 BITMAP_FREE (problem_data
->sparse_invalidated_by_call
);
1267 BITMAP_FREE (problem_data
->dense_invalidated_by_call
);
1269 dflow
->block_info_size
= 0;
1270 free (dflow
->block_info
);
1271 free (dflow
->problem_data
);
1277 /* Debugging info. */
1280 df_rd_dump (struct dataflow
*dflow
, FILE *file
)
1282 struct df
*df
= dflow
->df
;
1284 struct df_rd_problem_data
*problem_data
1285 = (struct df_rd_problem_data
*) dflow
->problem_data
;
1286 unsigned int m
= max_reg_num ();
1289 if (!dflow
->block_info
)
1292 fprintf (file
, "Reaching defs:\n\n");
1294 fprintf (file
, " sparse invalidated \t");
1295 dump_bitmap (file
, problem_data
->sparse_invalidated_by_call
);
1296 fprintf (file
, " dense invalidated \t");
1297 dump_bitmap (file
, problem_data
->dense_invalidated_by_call
);
1299 for (regno
= 0; regno
< m
; regno
++)
1300 if (DF_REG_DEF_GET (df
, regno
)->n_refs
)
1301 fprintf (file
, "%d[%d,%d] ", regno
,
1302 DF_REG_DEF_GET (df
, regno
)->begin
,
1303 DF_REG_DEF_GET (df
, regno
)->n_refs
);
1304 fprintf (file
, "\n");
1308 struct df_rd_bb_info
*bb_info
= df_rd_get_bb_info (dflow
, bb
->index
);
1309 df_print_bb_index (bb
, file
);
1314 fprintf (file
, " in \t(%d)\n", (int) bitmap_count_bits (bb_info
->in
));
1315 dump_bitmap (file
, bb_info
->in
);
1316 fprintf (file
, " gen \t(%d)\n", (int) bitmap_count_bits (bb_info
->gen
));
1317 dump_bitmap (file
, bb_info
->gen
);
1318 fprintf (file
, " kill\t(%d)\n", (int) bitmap_count_bits (bb_info
->kill
));
1319 dump_bitmap (file
, bb_info
->kill
);
1320 fprintf (file
, " out \t(%d)\n", (int) bitmap_count_bits (bb_info
->out
));
1321 dump_bitmap (file
, bb_info
->out
);
1325 /* All of the information associated with every instance of the problem. */
1327 static struct df_problem problem_RD
=
1329 DF_RD
, /* Problem id. */
1330 DF_FORWARD
, /* Direction. */
1331 df_rd_alloc
, /* Allocate the problem specific data. */
1332 NULL
, /* Reset global information. */
1333 df_rd_free_bb_info
, /* Free basic block info. */
1334 df_rd_local_compute
, /* Local compute function. */
1335 df_rd_init_solution
, /* Init the solution specific data. */
1336 df_iterative_dataflow
, /* Iterative solver. */
1337 NULL
, /* Confluence operator 0. */
1338 df_rd_confluence_n
, /* Confluence operator n. */
1339 df_rd_transfer_function
, /* Transfer function. */
1340 NULL
, /* Finalize function. */
1341 df_rd_free
, /* Free all of the problem information. */
1342 df_rd_dump
, /* Debugging. */
1343 NULL
, /* Dependent problem. */
1344 0 /* Changeable flags. */
1349 /* Create a new DATAFLOW instance and add it to an existing instance
1350 of DF. The returned structure is what is used to get at the
1354 df_rd_add_problem (struct df
*df
, int flags
)
1356 return df_add_problem (df
, &problem_RD
, flags
);
1361 /*----------------------------------------------------------------------------
1364 Find the locations in the function where any use of a pseudo can
1365 reach in the backwards direction. In and out bitvectors are built
1366 for each basic block. The regnum is used to index into these sets.
1367 See df.h for details.
1368 ----------------------------------------------------------------------------*/
1370 /* Get basic block info. */
1372 struct df_lr_bb_info
*
1373 df_lr_get_bb_info (struct dataflow
*dflow
, unsigned int index
)
1375 return (struct df_lr_bb_info
*) dflow
->block_info
[index
];
1379 /* Set basic block info. */
1382 df_lr_set_bb_info (struct dataflow
*dflow
, unsigned int index
,
1383 struct df_lr_bb_info
*bb_info
)
1385 dflow
->block_info
[index
] = bb_info
;
1389 /* Free basic block info. */
1392 df_lr_free_bb_info (struct dataflow
*dflow
,
1393 basic_block bb ATTRIBUTE_UNUSED
,
1396 struct df_lr_bb_info
*bb_info
= (struct df_lr_bb_info
*) vbb_info
;
1399 BITMAP_FREE (bb_info
->use
);
1400 BITMAP_FREE (bb_info
->def
);
1401 BITMAP_FREE (bb_info
->in
);
1402 BITMAP_FREE (bb_info
->out
);
1403 pool_free (dflow
->block_pool
, bb_info
);
1408 /* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
1409 not touched unless the block is new. */
1412 df_lr_alloc (struct dataflow
*dflow
, bitmap blocks_to_rescan
,
1413 bitmap all_blocks ATTRIBUTE_UNUSED
)
1415 unsigned int bb_index
;
1418 if (!dflow
->block_pool
)
1419 dflow
->block_pool
= create_alloc_pool ("df_lr_block pool",
1420 sizeof (struct df_lr_bb_info
), 50);
1422 df_grow_bb_info (dflow
);
1424 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan
, 0, bb_index
, bi
)
1426 struct df_lr_bb_info
*bb_info
= df_lr_get_bb_info (dflow
, bb_index
);
1429 bitmap_clear (bb_info
->def
);
1430 bitmap_clear (bb_info
->use
);
1434 bb_info
= (struct df_lr_bb_info
*) pool_alloc (dflow
->block_pool
);
1435 df_lr_set_bb_info (dflow
, bb_index
, bb_info
);
1436 bb_info
->use
= BITMAP_ALLOC (NULL
);
1437 bb_info
->def
= BITMAP_ALLOC (NULL
);
1438 bb_info
->in
= BITMAP_ALLOC (NULL
);
1439 bb_info
->out
= BITMAP_ALLOC (NULL
);
1445 /* Compute local live register info for basic block BB. */
1448 df_lr_bb_local_compute (struct dataflow
*dflow
,
1449 struct df
*df
, unsigned int bb_index
)
1451 basic_block bb
= BASIC_BLOCK (bb_index
);
1452 struct df_lr_bb_info
*bb_info
= df_lr_get_bb_info (dflow
, bb_index
);
1457 /* Process the registers set in an exception handler. */
1458 for (def
= df_get_artificial_defs (df
, bb_index
); def
; def
= def
->next_ref
)
1459 if (((DF_REF_FLAGS (def
) & DF_REF_AT_TOP
) == 0)
1460 && (!(DF_REF_FLAGS (def
) & DF_REF_PARTIAL
)))
1462 unsigned int dregno
= DF_REF_REGNO (def
);
1463 bitmap_set_bit (bb_info
->def
, dregno
);
1464 bitmap_clear_bit (bb_info
->use
, dregno
);
1467 /* Process the hardware registers that are always live. */
1468 for (use
= df_get_artificial_uses (df
, bb_index
); use
; use
= use
->next_ref
)
1469 /* Add use to set of uses in this BB. */
1470 if ((DF_REF_FLAGS (use
) & DF_REF_AT_TOP
) == 0)
1471 bitmap_set_bit (bb_info
->use
, DF_REF_REGNO (use
));
1473 FOR_BB_INSNS_REVERSE (bb
, insn
)
1475 unsigned int uid
= INSN_UID (insn
);
1482 for (def
= DF_INSN_UID_DEFS (df
, uid
); def
; def
= def
->next_ref
)
1484 unsigned int dregno
= DF_REF_REGNO (def
);
1486 if (dregno
>= FIRST_PSEUDO_REGISTER
1487 || !(SIBLING_CALL_P (insn
)
1488 && bitmap_bit_p (df
->exit_block_uses
, dregno
)
1489 && !refers_to_regno_p (dregno
, dregno
+1,
1490 current_function_return_rtx
,
1493 /* If the def is to only part of the reg, it does
1494 not kill the other defs that reach here. */
1495 if (!(DF_REF_FLAGS (def
) & DF_REF_PARTIAL
))
1497 bitmap_set_bit (bb_info
->def
, dregno
);
1498 bitmap_clear_bit (bb_info
->use
, dregno
);
1505 for (def
= DF_INSN_UID_DEFS (df
, uid
); def
; def
= def
->next_ref
)
1507 unsigned int dregno
= DF_REF_REGNO (def
);
1509 if (DF_INSN_CONTAINS_ASM (df
, insn
)
1510 && dregno
< FIRST_PSEUDO_REGISTER
)
1513 unsigned int end
= dregno
1514 + hard_regno_nregs
[dregno
][GET_MODE (DF_REF_REG (def
))] - 1;
1515 for (i
= dregno
; i
<= end
; ++i
)
1516 regs_asm_clobbered
[i
] = 1;
1518 /* If the def is to only part of the reg, it does
1519 not kill the other defs that reach here. */
1520 if (!(DF_REF_FLAGS (def
) & DF_REF_PARTIAL
))
1522 bitmap_set_bit (bb_info
->def
, dregno
);
1523 bitmap_clear_bit (bb_info
->use
, dregno
);
1528 for (use
= DF_INSN_UID_USES (df
, uid
); use
; use
= use
->next_ref
)
1529 /* Add use to set of uses in this BB. */
1530 bitmap_set_bit (bb_info
->use
, DF_REF_REGNO (use
));
1533 /* Process the registers set in an exception handler. */
1534 for (def
= df_get_artificial_defs (df
, bb_index
); def
; def
= def
->next_ref
)
1535 if ((DF_REF_FLAGS (def
) & DF_REF_AT_TOP
)
1536 && (!(DF_REF_FLAGS (def
) & DF_REF_PARTIAL
)))
1538 unsigned int dregno
= DF_REF_REGNO (def
);
1539 bitmap_set_bit (bb_info
->def
, dregno
);
1540 bitmap_clear_bit (bb_info
->use
, dregno
);
1544 /* Process the uses that are live into an exception handler. */
1545 for (use
= df_get_artificial_uses (df
, bb_index
); use
; use
= use
->next_ref
)
1546 /* Add use to set of uses in this BB. */
1547 if (DF_REF_FLAGS (use
) & DF_REF_AT_TOP
)
1548 bitmap_set_bit (bb_info
->use
, DF_REF_REGNO (use
));
1553 /* Compute local live register info for each basic block within BLOCKS. */
1556 df_lr_local_compute (struct dataflow
*dflow
,
1558 bitmap rescan_blocks
)
1560 struct df
*df
= dflow
->df
;
1561 unsigned int bb_index
;
1564 /* Assume that the stack pointer is unchanging if alloca hasn't
1566 if (bitmap_equal_p (all_blocks
, rescan_blocks
))
1567 memset (regs_asm_clobbered
, 0, sizeof (regs_asm_clobbered
));
1569 bitmap_clear (df
->hardware_regs_used
);
1571 /* The all-important stack pointer must always be live. */
1572 bitmap_set_bit (df
->hardware_regs_used
, STACK_POINTER_REGNUM
);
1574 /* Before reload, there are a few registers that must be forced
1575 live everywhere -- which might not already be the case for
1576 blocks within infinite loops. */
1577 if (!reload_completed
)
1579 /* Any reference to any pseudo before reload is a potential
1580 reference of the frame pointer. */
1581 bitmap_set_bit (df
->hardware_regs_used
, FRAME_POINTER_REGNUM
);
1583 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1584 /* Pseudos with argument area equivalences may require
1585 reloading via the argument pointer. */
1586 if (fixed_regs
[ARG_POINTER_REGNUM
])
1587 bitmap_set_bit (df
->hardware_regs_used
, ARG_POINTER_REGNUM
);
1590 /* Any constant, or pseudo with constant equivalences, may
1591 require reloading from memory using the pic register. */
1592 if ((unsigned) PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
1593 && fixed_regs
[PIC_OFFSET_TABLE_REGNUM
])
1594 bitmap_set_bit (df
->hardware_regs_used
, PIC_OFFSET_TABLE_REGNUM
);
1597 if (bitmap_bit_p (rescan_blocks
, EXIT_BLOCK
))
1599 /* The exit block is special for this problem and its bits are
1600 computed from thin air. */
1601 struct df_lr_bb_info
*bb_info
= df_lr_get_bb_info (dflow
, EXIT_BLOCK
);
1602 bitmap_copy (bb_info
->use
, df
->exit_block_uses
);
1605 EXECUTE_IF_SET_IN_BITMAP (rescan_blocks
, 0, bb_index
, bi
)
1607 if (bb_index
== EXIT_BLOCK
)
1609 df_lr_bb_local_compute (dflow
, df
, bb_index
);
1614 /* Initialize the solution vectors. */
1617 df_lr_init (struct dataflow
*dflow
, bitmap all_blocks
)
1619 unsigned int bb_index
;
1622 EXECUTE_IF_SET_IN_BITMAP (all_blocks
, 0, bb_index
, bi
)
1624 struct df_lr_bb_info
*bb_info
= df_lr_get_bb_info (dflow
, bb_index
);
1625 bitmap_copy (bb_info
->in
, bb_info
->use
);
1626 bitmap_clear (bb_info
->out
);
1631 /* Confluence function that processes infinite loops. This might be a
1632 noreturn function that throws. And even if it isn't, getting the
1633 unwind info right helps debugging. */
1635 df_lr_confluence_0 (struct dataflow
*dflow
, basic_block bb
)
1637 struct df
*df
= dflow
->df
;
1639 bitmap op1
= df_lr_get_bb_info (dflow
, bb
->index
)->out
;
1640 if (bb
!= EXIT_BLOCK_PTR
)
1641 bitmap_copy (op1
, df
->hardware_regs_used
);
1645 /* Confluence function that ignores fake edges. */
1648 df_lr_confluence_n (struct dataflow
*dflow
, edge e
)
1650 bitmap op1
= df_lr_get_bb_info (dflow
, e
->src
->index
)->out
;
1651 bitmap op2
= df_lr_get_bb_info (dflow
, e
->dest
->index
)->in
;
1653 /* Call-clobbered registers die across exception and call edges. */
1654 /* ??? Abnormal call edges ignored for the moment, as this gets
1655 confused by sibling call edges, which crashes reg-stack. */
1656 if (e
->flags
& EDGE_EH
)
1657 bitmap_ior_and_compl_into (op1
, op2
, df_invalidated_by_call
);
1659 bitmap_ior_into (op1
, op2
);
1661 bitmap_ior_into (op1
, dflow
->df
->hardware_regs_used
);
1665 /* Transfer function. */
1668 df_lr_transfer_function (struct dataflow
*dflow
, int bb_index
)
1670 struct df_lr_bb_info
*bb_info
= df_lr_get_bb_info (dflow
, bb_index
);
1671 bitmap in
= bb_info
->in
;
1672 bitmap out
= bb_info
->out
;
1673 bitmap use
= bb_info
->use
;
1674 bitmap def
= bb_info
->def
;
1676 return bitmap_ior_and_compl (in
, use
, out
, def
);
1680 /* Free all storage associated with the problem. */
1683 df_lr_free (struct dataflow
*dflow
)
1685 if (dflow
->block_info
)
1688 for (i
= 0; i
< dflow
->block_info_size
; i
++)
1690 struct df_lr_bb_info
*bb_info
= df_lr_get_bb_info (dflow
, i
);
1693 BITMAP_FREE (bb_info
->use
);
1694 BITMAP_FREE (bb_info
->def
);
1695 BITMAP_FREE (bb_info
->in
);
1696 BITMAP_FREE (bb_info
->out
);
1699 free_alloc_pool (dflow
->block_pool
);
1701 dflow
->block_info_size
= 0;
1702 free (dflow
->block_info
);
1705 free (dflow
->problem_data
);
1710 /* Debugging info. */
1713 df_lr_dump (struct dataflow
*dflow
, FILE *file
)
1717 if (!dflow
->block_info
)
1720 fprintf (file
, "Live Registers:\n");
1723 struct df_lr_bb_info
*bb_info
= df_lr_get_bb_info (dflow
, bb
->index
);
1724 df_print_bb_index (bb
, file
);
1729 fprintf (file
, " in \t");
1730 dump_bitmap (file
, bb_info
->in
);
1731 fprintf (file
, " use \t");
1732 dump_bitmap (file
, bb_info
->use
);
1733 fprintf (file
, " def \t");
1734 dump_bitmap (file
, bb_info
->def
);
1735 fprintf (file
, " out \t");
1736 dump_bitmap (file
, bb_info
->out
);
1740 /* All of the information associated with every instance of the problem. */
1742 static struct df_problem problem_LR
=
1744 DF_LR
, /* Problem id. */
1745 DF_BACKWARD
, /* Direction. */
1746 df_lr_alloc
, /* Allocate the problem specific data. */
1747 NULL
, /* Reset global information. */
1748 df_lr_free_bb_info
, /* Free basic block info. */
1749 df_lr_local_compute
, /* Local compute function. */
1750 df_lr_init
, /* Init the solution specific data. */
1751 df_iterative_dataflow
, /* Iterative solver. */
1752 df_lr_confluence_0
, /* Confluence operator 0. */
1753 df_lr_confluence_n
, /* Confluence operator n. */
1754 df_lr_transfer_function
, /* Transfer function. */
1755 NULL
, /* Finalize function. */
1756 df_lr_free
, /* Free all of the problem information. */
1757 df_lr_dump
, /* Debugging. */
1758 NULL
, /* Dependent problem. */
1763 /* Create a new DATAFLOW instance and add it to an existing instance
1764 of DF. The returned structure is what is used to get at the
1768 df_lr_add_problem (struct df
*df
, int flags
)
1770 return df_add_problem (df
, &problem_LR
, flags
);
1775 /*----------------------------------------------------------------------------
1776 UNINITIALIZED REGISTERS
1778 Find the set of uses for registers that are reachable from the entry
1779 block without passing thru a definition. In and out bitvectors are built
1780 for each basic block. The regnum is used to index into these sets.
1781 See df.h for details.
1782 ----------------------------------------------------------------------------*/
1784 /* Get basic block info. */
1786 struct df_ur_bb_info
*
1787 df_ur_get_bb_info (struct dataflow
*dflow
, unsigned int index
)
1789 return (struct df_ur_bb_info
*) dflow
->block_info
[index
];
1793 /* Set basic block info. */
1796 df_ur_set_bb_info (struct dataflow
*dflow
, unsigned int index
,
1797 struct df_ur_bb_info
*bb_info
)
1799 dflow
->block_info
[index
] = bb_info
;
1803 /* Free basic block info. */
1806 df_ur_free_bb_info (struct dataflow
*dflow
,
1807 basic_block bb ATTRIBUTE_UNUSED
,
1810 struct df_ur_bb_info
*bb_info
= (struct df_ur_bb_info
*) vbb_info
;
1813 BITMAP_FREE (bb_info
->gen
);
1814 BITMAP_FREE (bb_info
->kill
);
1815 BITMAP_FREE (bb_info
->in
);
1816 BITMAP_FREE (bb_info
->out
);
1817 pool_free (dflow
->block_pool
, bb_info
);
1822 /* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
1823 not touched unless the block is new. */
1826 df_ur_alloc (struct dataflow
*dflow
, bitmap blocks_to_rescan
,
1827 bitmap all_blocks ATTRIBUTE_UNUSED
)
1829 unsigned int bb_index
;
1832 if (!dflow
->block_pool
)
1833 dflow
->block_pool
= create_alloc_pool ("df_ur_block pool",
1834 sizeof (struct df_ur_bb_info
), 100);
1836 df_grow_bb_info (dflow
);
1838 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan
, 0, bb_index
, bi
)
1840 struct df_ur_bb_info
*bb_info
= df_ur_get_bb_info (dflow
, bb_index
);
1843 bitmap_clear (bb_info
->kill
);
1844 bitmap_clear (bb_info
->gen
);
1848 bb_info
= (struct df_ur_bb_info
*) pool_alloc (dflow
->block_pool
);
1849 df_ur_set_bb_info (dflow
, bb_index
, bb_info
);
1850 bb_info
->kill
= BITMAP_ALLOC (NULL
);
1851 bb_info
->gen
= BITMAP_ALLOC (NULL
);
1852 bb_info
->in
= BITMAP_ALLOC (NULL
);
1853 bb_info
->out
= BITMAP_ALLOC (NULL
);
1859 /* Compute local uninitialized register info for basic block BB. */
1862 df_ur_bb_local_compute (struct dataflow
*dflow
, unsigned int bb_index
)
1864 struct df
*df
= dflow
->df
;
1865 basic_block bb
= BASIC_BLOCK (bb_index
);
1866 struct df_ur_bb_info
*bb_info
= df_ur_get_bb_info (dflow
, bb_index
);
1870 bitmap_clear (seen_in_block
);
1871 bitmap_clear (seen_in_insn
);
1873 for (def
= df_get_artificial_defs (df
, bb_index
); def
; def
= def
->next_ref
)
1874 if ((DF_REF_FLAGS (def
) & DF_REF_AT_TOP
) == 0)
1876 unsigned int regno
= DF_REF_REGNO (def
);
1877 if (!bitmap_bit_p (seen_in_block
, regno
))
1879 bitmap_set_bit (seen_in_block
, regno
);
1880 bitmap_set_bit (bb_info
->gen
, regno
);
1884 FOR_BB_INSNS_REVERSE (bb
, insn
)
1886 unsigned int uid
= INSN_UID (insn
);
1890 for (def
= DF_INSN_UID_DEFS (df
, uid
); def
; def
= def
->next_ref
)
1892 unsigned int regno
= DF_REF_REGNO (def
);
1893 /* Only the last def counts. */
1894 if (!bitmap_bit_p (seen_in_block
, regno
))
1896 bitmap_set_bit (seen_in_insn
, regno
);
1898 if (DF_REF_FLAGS (def
)
1899 & (DF_REF_MUST_CLOBBER
| DF_REF_MAY_CLOBBER
))
1901 /* Only must clobbers for the entire reg destroy the
1903 if ((DF_REF_FLAGS (def
) & DF_REF_MUST_CLOBBER
)
1904 && (!DF_REF_FLAGS (def
) & DF_REF_PARTIAL
))
1905 bitmap_set_bit (bb_info
->kill
, regno
);
1908 bitmap_set_bit (bb_info
->gen
, regno
);
1911 bitmap_ior_into (seen_in_block
, seen_in_insn
);
1912 bitmap_clear (seen_in_insn
);
1915 for (def
= df_get_artificial_defs (df
, bb_index
); def
; def
= def
->next_ref
)
1916 if (DF_REF_FLAGS (def
) & DF_REF_AT_TOP
)
1918 unsigned int regno
= DF_REF_REGNO (def
);
1919 if (!bitmap_bit_p (seen_in_block
, regno
))
1921 bitmap_set_bit (seen_in_block
, regno
);
1922 bitmap_set_bit (bb_info
->gen
, regno
);
1928 /* Compute local uninitialized register info. */
1931 df_ur_local_compute (struct dataflow
*dflow
,
1932 bitmap all_blocks ATTRIBUTE_UNUSED
,
1933 bitmap rescan_blocks
)
1935 unsigned int bb_index
;
1940 EXECUTE_IF_SET_IN_BITMAP (rescan_blocks
, 0, bb_index
, bi
)
1942 df_ur_bb_local_compute (dflow
, bb_index
);
1949 /* Initialize the solution vectors. */
1952 df_ur_init (struct dataflow
*dflow
, bitmap all_blocks
)
1954 unsigned int bb_index
;
1957 EXECUTE_IF_SET_IN_BITMAP (all_blocks
, 0, bb_index
, bi
)
1959 struct df_ur_bb_info
*bb_info
= df_ur_get_bb_info (dflow
, bb_index
);
1961 bitmap_copy (bb_info
->out
, bb_info
->gen
);
1962 bitmap_clear (bb_info
->in
);
1967 /* Or in the stack regs, hard regs and early clobber regs into the
1968 ur_in sets of all of the blocks. */
1971 df_ur_local_finalize (struct dataflow
*dflow
, bitmap all_blocks
)
1973 struct df
*df
= dflow
->df
;
1974 struct dataflow
*lr_dflow
= df
->problems_by_index
[DF_LR
];
1975 bitmap tmp
= BITMAP_ALLOC (NULL
);
1977 unsigned int bb_index
;
1979 EXECUTE_IF_SET_IN_BITMAP (all_blocks
, 0, bb_index
, bi
)
1981 struct df_ur_bb_info
*bb_info
= df_ur_get_bb_info (dflow
, bb_index
);
1982 struct df_lr_bb_info
*bb_lr_info
= df_lr_get_bb_info (lr_dflow
, bb_index
);
1984 /* No register may reach a location where it is not used. Thus
1985 we trim the rr result to the places where it is used. */
1986 bitmap_and_into (bb_info
->in
, bb_lr_info
->in
);
1987 bitmap_and_into (bb_info
->out
, bb_lr_info
->out
);
1990 /* Hard registers may still stick in the ur_out set, but not
1991 be in the ur_in set, if their only mention was in a call
1992 in this block. This is because a call kills in the lr
1993 problem but does not kill in the ur problem. To clean
1994 this up, we execute the transfer function on the lr_in
1995 set and then use that to knock bits out of ur_out. */
1996 bitmap_ior_and_compl (tmp
, bb_info
->gen
, bb_lr_info
->in
,
1998 bitmap_and_into (bb_info
->out
, tmp
);
2006 /* Confluence function that ignores fake edges. */
2009 df_ur_confluence_n (struct dataflow
*dflow
, edge e
)
2011 bitmap op1
= df_ur_get_bb_info (dflow
, e
->dest
->index
)->in
;
2012 bitmap op2
= df_ur_get_bb_info (dflow
, e
->src
->index
)->out
;
2014 if (e
->flags
& EDGE_FAKE
)
2017 bitmap_ior_into (op1
, op2
);
2021 /* Transfer function. */
2024 df_ur_transfer_function (struct dataflow
*dflow
, int bb_index
)
2026 struct df_ur_bb_info
*bb_info
= df_ur_get_bb_info (dflow
, bb_index
);
2027 bitmap in
= bb_info
->in
;
2028 bitmap out
= bb_info
->out
;
2029 bitmap gen
= bb_info
->gen
;
2030 bitmap kill
= bb_info
->kill
;
2032 return bitmap_ior_and_compl (out
, gen
, in
, kill
);
2036 /* Free all storage associated with the problem. */
2039 df_ur_free (struct dataflow
*dflow
)
2041 if (dflow
->block_info
)
2045 for (i
= 0; i
< dflow
->block_info_size
; i
++)
2047 struct df_ur_bb_info
*bb_info
= df_ur_get_bb_info (dflow
, i
);
2050 BITMAP_FREE (bb_info
->gen
);
2051 BITMAP_FREE (bb_info
->kill
);
2052 BITMAP_FREE (bb_info
->in
);
2053 BITMAP_FREE (bb_info
->out
);
2057 free_alloc_pool (dflow
->block_pool
);
2058 dflow
->block_info_size
= 0;
2059 free (dflow
->block_info
);
2065 /* Debugging info. */
2068 df_ur_dump (struct dataflow
*dflow
, FILE *file
)
2072 if (!dflow
->block_info
)
2075 fprintf (file
, "Undefined regs:\n");
2079 struct df_ur_bb_info
*bb_info
= df_ur_get_bb_info (dflow
, bb
->index
);
2080 df_print_bb_index (bb
, file
);
2085 fprintf (file
, " in \t");
2086 dump_bitmap (file
, bb_info
->in
);
2087 fprintf (file
, " gen \t");
2088 dump_bitmap (file
, bb_info
->gen
);
2089 fprintf (file
, " kill\t");
2090 dump_bitmap (file
, bb_info
->kill
);
2091 fprintf (file
, " out \t");
2092 dump_bitmap (file
, bb_info
->out
);
2096 /* All of the information associated with every instance of the problem. */
2098 static struct df_problem problem_UR
=
2100 DF_UR
, /* Problem id. */
2101 DF_FORWARD
, /* Direction. */
2102 df_ur_alloc
, /* Allocate the problem specific data. */
2103 NULL
, /* Reset global information. */
2104 df_ur_free_bb_info
, /* Free basic block info. */
2105 df_ur_local_compute
, /* Local compute function. */
2106 df_ur_init
, /* Init the solution specific data. */
2107 df_iterative_dataflow
, /* Iterative solver. */
2108 NULL
, /* Confluence operator 0. */
2109 df_ur_confluence_n
, /* Confluence operator n. */
2110 df_ur_transfer_function
, /* Transfer function. */
2111 df_ur_local_finalize
, /* Finalize function. */
2112 df_ur_free
, /* Free all of the problem information. */
2113 df_ur_dump
, /* Debugging. */
2114 df_lr_add_problem
, /* Dependent problem. */
2115 0 /* Changeable flags. */
2119 /* Create a new DATAFLOW instance and add it to an existing instance
2120 of DF. The returned structure is what is used to get at the
2124 df_ur_add_problem (struct df
*df
, int flags
)
2126 return df_add_problem (df
, &problem_UR
, flags
);
2131 /*----------------------------------------------------------------------------
2132 UNINITIALIZED REGISTERS WITH EARLYCLOBBER
2134 Find the set of uses for registers that are reachable from the entry
2135 block without passing thru a definition. In and out bitvectors are built
2136 for each basic block. The regnum is used to index into these sets.
2137 See df.h for details.
2139 This is a variant of the UR problem above that has a lot of special
2140 features just for the register allocation phase. This problem
2141 should go a away if someone would fix the interference graph.
2143 ----------------------------------------------------------------------------*/
2145 /* Private data used to compute the solution for this problem. These
2146 data structures are not accessible outside of this module. */
2147 struct df_urec_problem_data
2149 bool earlyclobbers_found
; /* True if any instruction contains an
2152 bitmap stack_regs
; /* Registers that may be allocated to a STACK_REGS. */
2157 /* Get basic block info. */
2159 struct df_urec_bb_info
*
2160 df_urec_get_bb_info (struct dataflow
*dflow
, unsigned int index
)
2162 return (struct df_urec_bb_info
*) dflow
->block_info
[index
];
2166 /* Set basic block info. */
2169 df_urec_set_bb_info (struct dataflow
*dflow
, unsigned int index
,
2170 struct df_urec_bb_info
*bb_info
)
2172 dflow
->block_info
[index
] = bb_info
;
2176 /* Free basic block info. */
2179 df_urec_free_bb_info (struct dataflow
*dflow
,
2180 basic_block bb ATTRIBUTE_UNUSED
,
2183 struct df_urec_bb_info
*bb_info
= (struct df_urec_bb_info
*) vbb_info
;
2186 BITMAP_FREE (bb_info
->gen
);
2187 BITMAP_FREE (bb_info
->kill
);
2188 BITMAP_FREE (bb_info
->in
);
2189 BITMAP_FREE (bb_info
->out
);
2190 BITMAP_FREE (bb_info
->earlyclobber
);
2191 pool_free (dflow
->block_pool
, bb_info
);
2196 /* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
2197 not touched unless the block is new. */
2200 df_urec_alloc (struct dataflow
*dflow
, bitmap blocks_to_rescan
,
2201 bitmap all_blocks ATTRIBUTE_UNUSED
)
2204 unsigned int bb_index
;
2206 struct df_urec_problem_data
*problem_data
2207 = (struct df_urec_problem_data
*) dflow
->problem_data
;
2209 if (!dflow
->block_pool
)
2210 dflow
->block_pool
= create_alloc_pool ("df_urec_block pool",
2211 sizeof (struct df_urec_bb_info
), 50);
2213 if (!dflow
->problem_data
)
2215 problem_data
= XNEW (struct df_urec_problem_data
);
2216 dflow
->problem_data
= problem_data
;
2218 problem_data
->earlyclobbers_found
= false;
2220 df_grow_bb_info (dflow
);
2222 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan
, 0, bb_index
, bi
)
2224 struct df_urec_bb_info
*bb_info
= df_urec_get_bb_info (dflow
, bb_index
);
2227 bitmap_clear (bb_info
->kill
);
2228 bitmap_clear (bb_info
->gen
);
2229 bitmap_clear (bb_info
->earlyclobber
);
2233 bb_info
= (struct df_urec_bb_info
*) pool_alloc (dflow
->block_pool
);
2234 df_urec_set_bb_info (dflow
, bb_index
, bb_info
);
2235 bb_info
->kill
= BITMAP_ALLOC (NULL
);
2236 bb_info
->gen
= BITMAP_ALLOC (NULL
);
2237 bb_info
->in
= BITMAP_ALLOC (NULL
);
2238 bb_info
->out
= BITMAP_ALLOC (NULL
);
2239 bb_info
->earlyclobber
= BITMAP_ALLOC (NULL
);
2245 /* The function modifies local info for register REG being changed in
2246 SETTER. DATA is used to pass the current basic block info. */
2249 df_urec_mark_reg_change (rtx reg
, rtx setter
, void *data
)
2254 struct df_urec_bb_info
*bb_info
= (struct df_urec_bb_info
*) data
;
2256 if (GET_CODE (reg
) == SUBREG
)
2257 reg
= SUBREG_REG (reg
);
2263 endregno
= regno
= REGNO (reg
);
2264 if (regno
< FIRST_PSEUDO_REGISTER
)
2266 endregno
+=hard_regno_nregs
[regno
][GET_MODE (reg
)];
2268 for (i
= regno
; i
< endregno
; i
++)
2270 bitmap_set_bit (bb_info
->kill
, i
);
2272 if (GET_CODE (setter
) != CLOBBER
)
2273 bitmap_set_bit (bb_info
->gen
, i
);
2275 bitmap_clear_bit (bb_info
->gen
, i
);
2280 bitmap_set_bit (bb_info
->kill
, regno
);
2282 if (GET_CODE (setter
) != CLOBBER
)
2283 bitmap_set_bit (bb_info
->gen
, regno
);
2285 bitmap_clear_bit (bb_info
->gen
, regno
);
2288 /* Classes of registers which could be early clobbered in the current
2291 static VEC(int,heap
) *earlyclobber_regclass
;
2293 /* This function finds and stores register classes that could be early
2294 clobbered in INSN. If any earlyclobber classes are found, the function
2295 returns TRUE, in all other cases it returns FALSE. */
2298 df_urec_check_earlyclobber (rtx insn
)
2303 extract_insn (insn
);
2305 VEC_truncate (int, earlyclobber_regclass
, 0);
2306 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2311 enum reg_class
class;
2312 const char *p
= recog_data
.constraints
[opno
];
2321 case '=': case '+': case '?':
2324 case 'm': case '<': case '>': case 'V': case 'o':
2325 case 'E': case 'F': case 'G': case 'H':
2326 case 's': case 'i': case 'n':
2327 case 'I': case 'J': case 'K': case 'L':
2328 case 'M': case 'N': case 'O': case 'P':
2330 case '0': case '1': case '2': case '3': case '4':
2331 case '5': case '6': case '7': case '8': case '9':
2332 /* These don't say anything we care about. */
2340 if (amp_p
&& class != NO_REGS
)
2346 VEC_iterate (int, earlyclobber_regclass
, i
, rc
);
2349 if (rc
== (int) class)
2353 /* We use VEC_quick_push here because
2354 earlyclobber_regclass holds no more than
2355 N_REG_CLASSES elements. */
2356 VEC_quick_push (int, earlyclobber_regclass
, (int) class);
2366 class = GENERAL_REGS
;
2370 class = REG_CLASS_FROM_CONSTRAINT (c
, p
);
2375 p
+= CONSTRAINT_LEN (c
, p
);
2382 /* The function checks that pseudo-register *X has a class
2383 intersecting with the class of pseudo-register could be early
2384 clobbered in the same insn.
2386 This function is a no-op if earlyclobber_regclass is empty.
2388 Reload can assign the same hard register to uninitialized
2389 pseudo-register and early clobbered pseudo-register in an insn if
2390 the pseudo-register is used first time in given BB and not lived at
2391 the BB start. To prevent this we don't change life information for
2392 such pseudo-registers. */
2395 df_urec_mark_reg_use_for_earlyclobber (rtx
*x
, void *data
)
2397 enum reg_class pref_class
, alt_class
;
2399 struct df_urec_bb_info
*bb_info
= (struct df_urec_bb_info
*) data
;
2401 if (REG_P (*x
) && REGNO (*x
) >= FIRST_PSEUDO_REGISTER
)
2406 if (bitmap_bit_p (bb_info
->kill
, regno
)
2407 || bitmap_bit_p (bb_info
->gen
, regno
))
2409 pref_class
= reg_preferred_class (regno
);
2410 alt_class
= reg_alternate_class (regno
);
2411 for (i
= 0; VEC_iterate (int, earlyclobber_regclass
, i
, rc
); i
++)
2413 if (reg_classes_intersect_p (rc
, pref_class
)
2415 && reg_classes_intersect_p (rc
, alt_class
)))
2417 bitmap_set_bit (bb_info
->earlyclobber
, regno
);
2425 /* The function processes all pseudo-registers in *X with the aid of
2426 previous function. */
2429 df_urec_mark_reg_use_for_earlyclobber_1 (rtx
*x
, void *data
)
2431 for_each_rtx (x
, df_urec_mark_reg_use_for_earlyclobber
, data
);
2435 /* Compute local uninitialized register info for basic block BB. */
2438 df_urec_bb_local_compute (struct dataflow
*dflow
, unsigned int bb_index
)
2440 struct df
*df
= dflow
->df
;
2441 basic_block bb
= BASIC_BLOCK (bb_index
);
2442 struct df_urec_bb_info
*bb_info
= df_urec_get_bb_info (dflow
, bb_index
);
2446 for (def
= df_get_artificial_defs (df
, bb_index
); def
; def
= def
->next_ref
)
2447 if (DF_REF_FLAGS (def
) & DF_REF_AT_TOP
)
2449 unsigned int regno
= DF_REF_REGNO (def
);
2450 bitmap_set_bit (bb_info
->gen
, regno
);
2453 FOR_BB_INSNS (bb
, insn
)
2457 note_stores (PATTERN (insn
), df_urec_mark_reg_change
, bb_info
);
2458 if (df_urec_check_earlyclobber (insn
))
2460 struct df_urec_problem_data
*problem_data
2461 = (struct df_urec_problem_data
*) dflow
->problem_data
;
2462 problem_data
->earlyclobbers_found
= true;
2463 note_uses (&PATTERN (insn
),
2464 df_urec_mark_reg_use_for_earlyclobber_1
, bb_info
);
2469 for (def
= df_get_artificial_defs (df
, bb_index
); def
; def
= def
->next_ref
)
2470 if ((DF_REF_FLAGS (def
) & DF_REF_AT_TOP
) == 0)
2472 unsigned int regno
= DF_REF_REGNO (def
);
2473 bitmap_set_bit (bb_info
->gen
, regno
);
2479 /* Compute local uninitialized register info. */
2482 df_urec_local_compute (struct dataflow
*dflow
,
2483 bitmap all_blocks ATTRIBUTE_UNUSED
,
2484 bitmap rescan_blocks
)
2486 unsigned int bb_index
;
2490 HARD_REG_SET zero
, stack_hard_regs
, used
;
2491 struct df_urec_problem_data
*problem_data
2492 = (struct df_urec_problem_data
*) dflow
->problem_data
;
2494 /* Any register that MAY be allocated to a register stack (like the
2495 387) is treated poorly. Each such register is marked as being
2496 live everywhere. This keeps the register allocator and the
2497 subsequent passes from doing anything useful with these values.
2499 FIXME: This seems like an incredibly poor idea. */
2501 CLEAR_HARD_REG_SET (zero
);
2502 CLEAR_HARD_REG_SET (stack_hard_regs
);
2503 for (i
= FIRST_STACK_REG
; i
<= LAST_STACK_REG
; i
++)
2504 SET_HARD_REG_BIT (stack_hard_regs
, i
);
2505 problem_data
->stack_regs
= BITMAP_ALLOC (NULL
);
2506 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
2508 COPY_HARD_REG_SET (used
, reg_class_contents
[reg_preferred_class (i
)]);
2509 IOR_HARD_REG_SET (used
, reg_class_contents
[reg_alternate_class (i
)]);
2510 AND_HARD_REG_SET (used
, stack_hard_regs
);
2511 GO_IF_HARD_REG_EQUAL (used
, zero
, skip
);
2512 bitmap_set_bit (problem_data
->stack_regs
, i
);
2518 /* We know that earlyclobber_regclass holds no more than
2519 N_REG_CLASSES elements. See df_urec_check_earlyclobber. */
2520 earlyclobber_regclass
= VEC_alloc (int, heap
, N_REG_CLASSES
);
2522 EXECUTE_IF_SET_IN_BITMAP (rescan_blocks
, 0, bb_index
, bi
)
2524 df_urec_bb_local_compute (dflow
, bb_index
);
2527 VEC_free (int, heap
, earlyclobber_regclass
);
2531 /* Initialize the solution vectors. */
2534 df_urec_init (struct dataflow
*dflow
, bitmap all_blocks
)
2536 unsigned int bb_index
;
2539 EXECUTE_IF_SET_IN_BITMAP (all_blocks
, 0, bb_index
, bi
)
2541 struct df_urec_bb_info
*bb_info
= df_urec_get_bb_info (dflow
, bb_index
);
2543 bitmap_copy (bb_info
->out
, bb_info
->gen
);
2544 bitmap_clear (bb_info
->in
);
2549 /* Or in the stack regs, hard regs and early clobber regs into the
2550 ur_in sets of all of the blocks. */
2553 df_urec_local_finalize (struct dataflow
*dflow
, bitmap all_blocks
)
2555 struct df
*df
= dflow
->df
;
2556 struct dataflow
*lr_dflow
= df
->problems_by_index
[DF_LR
];
2557 bitmap tmp
= BITMAP_ALLOC (NULL
);
2559 unsigned int bb_index
;
2560 struct df_urec_problem_data
*problem_data
2561 = (struct df_urec_problem_data
*) dflow
->problem_data
;
2563 EXECUTE_IF_SET_IN_BITMAP (all_blocks
, 0, bb_index
, bi
)
2565 struct df_urec_bb_info
*bb_info
= df_urec_get_bb_info (dflow
, bb_index
);
2566 struct df_lr_bb_info
*bb_lr_info
= df_lr_get_bb_info (lr_dflow
, bb_index
);
2568 if (bb_index
!= ENTRY_BLOCK
&& bb_index
!= EXIT_BLOCK
)
2570 if (problem_data
->earlyclobbers_found
)
2571 bitmap_ior_into (bb_info
->in
, bb_info
->earlyclobber
);
2574 /* We can not use the same stack register for uninitialized
2575 pseudo-register and another living pseudo-register
2576 because if the uninitialized pseudo-register dies,
2577 subsequent pass reg-stack will be confused (it will
2578 believe that the other register dies). */
2579 bitmap_ior_into (bb_info
->in
, problem_data
->stack_regs
);
2580 bitmap_ior_into (bb_info
->out
, problem_data
->stack_regs
);
2584 /* No register may reach a location where it is not used. Thus
2585 we trim the rr result to the places where it is used. */
2586 bitmap_and_into (bb_info
->in
, bb_lr_info
->in
);
2587 bitmap_and_into (bb_info
->out
, bb_lr_info
->out
);
2590 /* Hard registers may still stick in the ur_out set, but not
2591 be in the ur_in set, if their only mention was in a call
2592 in this block. This is because a call kills in the lr
2593 problem but does not kill in the rr problem. To clean
2594 this up, we execute the transfer function on the lr_in
2595 set and then use that to knock bits out of ur_out. */
2596 bitmap_ior_and_compl (tmp
, bb_info
->gen
, bb_lr_info
->in
,
2598 bitmap_and_into (bb_info
->out
, tmp
);
2603 BITMAP_FREE (problem_data
->stack_regs
);
2609 /* Confluence function that ignores fake edges. */
2612 df_urec_confluence_n (struct dataflow
*dflow
, edge e
)
2614 bitmap op1
= df_urec_get_bb_info (dflow
, e
->dest
->index
)->in
;
2615 bitmap op2
= df_urec_get_bb_info (dflow
, e
->src
->index
)->out
;
2617 if (e
->flags
& EDGE_FAKE
)
2620 bitmap_ior_into (op1
, op2
);
2624 /* Transfer function. */
2627 df_urec_transfer_function (struct dataflow
*dflow
, int bb_index
)
2629 struct df_urec_bb_info
*bb_info
= df_urec_get_bb_info (dflow
, bb_index
);
2630 bitmap in
= bb_info
->in
;
2631 bitmap out
= bb_info
->out
;
2632 bitmap gen
= bb_info
->gen
;
2633 bitmap kill
= bb_info
->kill
;
2635 return bitmap_ior_and_compl (out
, gen
, in
, kill
);
2639 /* Free all storage associated with the problem. */
2642 df_urec_free (struct dataflow
*dflow
)
2644 if (dflow
->block_info
)
2648 for (i
= 0; i
< dflow
->block_info_size
; i
++)
2650 struct df_urec_bb_info
*bb_info
= df_urec_get_bb_info (dflow
, i
);
2653 BITMAP_FREE (bb_info
->gen
);
2654 BITMAP_FREE (bb_info
->kill
);
2655 BITMAP_FREE (bb_info
->in
);
2656 BITMAP_FREE (bb_info
->out
);
2657 BITMAP_FREE (bb_info
->earlyclobber
);
2661 free_alloc_pool (dflow
->block_pool
);
2663 dflow
->block_info_size
= 0;
2664 free (dflow
->block_info
);
2665 free (dflow
->problem_data
);
2671 /* Debugging info. */
2674 df_urec_dump (struct dataflow
*dflow
, FILE *file
)
2678 if (!dflow
->block_info
)
2681 fprintf (file
, "Undefined regs:\n");
2685 struct df_urec_bb_info
*bb_info
= df_urec_get_bb_info (dflow
, bb
->index
);
2686 df_print_bb_index (bb
, file
);
2691 fprintf (file
, " in \t");
2692 dump_bitmap (file
, bb_info
->in
);
2693 fprintf (file
, " gen \t");
2694 dump_bitmap (file
, bb_info
->gen
);
2695 fprintf (file
, " kill\t");
2696 dump_bitmap (file
, bb_info
->kill
);
2697 fprintf (file
, " ec\t");
2698 dump_bitmap (file
, bb_info
->earlyclobber
);
2699 fprintf (file
, " out \t");
2700 dump_bitmap (file
, bb_info
->out
);
2704 /* All of the information associated with every instance of the problem. */
2706 static struct df_problem problem_UREC
=
2708 DF_UREC
, /* Problem id. */
2709 DF_FORWARD
, /* Direction. */
2710 df_urec_alloc
, /* Allocate the problem specific data. */
2711 NULL
, /* Reset global information. */
2712 df_urec_free_bb_info
, /* Free basic block info. */
2713 df_urec_local_compute
, /* Local compute function. */
2714 df_urec_init
, /* Init the solution specific data. */
2715 df_iterative_dataflow
, /* Iterative solver. */
2716 NULL
, /* Confluence operator 0. */
2717 df_urec_confluence_n
, /* Confluence operator n. */
2718 df_urec_transfer_function
, /* Transfer function. */
2719 df_urec_local_finalize
, /* Finalize function. */
2720 df_urec_free
, /* Free all of the problem information. */
2721 df_urec_dump
, /* Debugging. */
2722 df_lr_add_problem
, /* Dependent problem. */
2723 0 /* Changeable flags. */
2727 /* Create a new DATAFLOW instance and add it to an existing instance
2728 of DF. The returned structure is what is used to get at the
2732 df_urec_add_problem (struct df
*df
, int flags
)
2734 return df_add_problem (df
, &problem_UREC
, flags
);
2739 /*----------------------------------------------------------------------------
2740 CREATE DEF_USE (DU) and / or USE_DEF (UD) CHAINS
2742 Link either the defs to the uses and / or the uses to the defs.
2744 These problems are set up like the other dataflow problems so that
2745 they nicely fit into the framework. They are much simpler and only
2746 involve a single traversal of instructions and an examination of
2747 the reaching defs information (the dependent problem).
2748 ----------------------------------------------------------------------------*/
2750 /* Create def-use or use-def chains. */
2753 df_chain_alloc (struct dataflow
*dflow
,
2754 bitmap blocks_to_rescan ATTRIBUTE_UNUSED
,
2755 bitmap all_blocks ATTRIBUTE_UNUSED
)
2758 struct df
*df
= dflow
->df
;
2761 /* Wholesale destruction of the old chains. */
2762 if (dflow
->block_pool
)
2763 free_alloc_pool (dflow
->block_pool
);
2765 dflow
->block_pool
= create_alloc_pool ("df_chain_chain_block pool",
2766 sizeof (struct df_link
), 100);
2768 if (dflow
->flags
& DF_DU_CHAIN
)
2770 df_reorganize_refs (&df
->def_info
);
2772 /* Clear out the pointers from the refs. */
2773 for (i
= 0; i
< DF_DEFS_SIZE (df
); i
++)
2775 struct df_ref
*ref
= df
->def_info
.refs
[i
];
2776 DF_REF_CHAIN (ref
) = NULL
;
2780 if (dflow
->flags
& DF_UD_CHAIN
)
2782 df_reorganize_refs (&df
->use_info
);
2783 for (i
= 0; i
< DF_USES_SIZE (df
); i
++)
2785 struct df_ref
*ref
= df
->use_info
.refs
[i
];
2786 DF_REF_CHAIN (ref
) = NULL
;
2792 /* Reset all def_use and use_def chains in INSN. */
2795 df_chain_insn_reset (struct dataflow
*dflow
, rtx insn
)
2797 struct df
*df
= dflow
->df
;
2798 unsigned int uid
= INSN_UID (insn
);
2799 struct df_insn_info
*insn_info
= NULL
;
2802 if (uid
< df
->insns_size
)
2803 insn_info
= DF_INSN_UID_GET (df
, uid
);
2807 if (dflow
->flags
& DF_DU_CHAIN
)
2809 ref
= insn_info
->defs
;
2813 ref
= ref
->next_ref
;
2817 if (dflow
->flags
& DF_UD_CHAIN
)
2819 ref
= insn_info
->uses
;
2823 ref
= ref
->next_ref
;
2830 /* Reset all def_use and use_def chains in basic block. */
2833 df_chain_bb_reset (struct dataflow
*dflow
, unsigned int bb_index
)
2835 struct df
*df
= dflow
->df
;
2837 basic_block bb
= BASIC_BLOCK (bb_index
);
2839 /* Some one deleted the basic block out from under us. */
2843 FOR_BB_INSNS (bb
, insn
)
2847 /* Record defs within INSN. */
2848 df_chain_insn_reset (dflow
, insn
);
2852 /* Get rid of any chains in artificial uses or defs. */
2853 if (dflow
->flags
& DF_DU_CHAIN
)
2856 def
= df_get_artificial_defs (df
, bb_index
);
2860 def
= def
->next_ref
;
2864 if (dflow
->flags
& DF_UD_CHAIN
)
2867 use
= df_get_artificial_uses (df
, bb_index
);
2871 use
= use
->next_ref
;
2877 /* Reset all of the chains when the set of basic blocks changes. */
2881 df_chain_reset (struct dataflow
*dflow
, bitmap blocks_to_clear
)
2884 unsigned int bb_index
;
2886 EXECUTE_IF_SET_IN_BITMAP (blocks_to_clear
, 0, bb_index
, bi
)
2888 df_chain_bb_reset (dflow
, bb_index
);
2891 free_alloc_pool (dflow
->block_pool
);
2892 dflow
->block_pool
= NULL
;
2896 /* Create the chains for a list of USEs. */
2899 df_chain_create_bb_process_use (struct dataflow
*dflow
,
2902 enum df_ref_flags top_flag
)
2904 struct df
*df
= dflow
->df
;
2906 unsigned int def_index
;
2910 /* Do not want to go through this for an uninitialized var. */
2911 unsigned int uregno
= DF_REF_REGNO (use
);
2912 int count
= DF_REG_DEF_GET (df
, uregno
)->n_refs
;
2915 if (top_flag
== (DF_REF_FLAGS (use
) & DF_REF_AT_TOP
))
2917 unsigned int first_index
= DF_REG_DEF_GET (df
, uregno
)->begin
;
2918 unsigned int last_index
= first_index
+ count
- 1;
2920 EXECUTE_IF_SET_IN_BITMAP (local_rd
, first_index
, def_index
, bi
)
2923 if (def_index
> last_index
)
2926 def
= DF_DEFS_GET (df
, def_index
);
2927 if (dflow
->flags
& DF_DU_CHAIN
)
2928 df_chain_create (dflow
, def
, use
);
2929 if (dflow
->flags
& DF_UD_CHAIN
)
2930 df_chain_create (dflow
, use
, def
);
2934 use
= use
->next_ref
;
2938 /* Reset the storage pool that the def-use or use-def chains have been
2939 allocated in. We do not need to re adjust the pointers in the refs,
2940 these have already been clean out.*/
2942 /* Create chains from reaching defs bitmaps for basic block BB. */
2944 df_chain_create_bb (struct dataflow
*dflow
,
2945 struct dataflow
*rd_dflow
,
2946 unsigned int bb_index
)
2948 basic_block bb
= BASIC_BLOCK (bb_index
);
2949 struct df_rd_bb_info
*bb_info
= df_rd_get_bb_info (rd_dflow
, bb_index
);
2951 bitmap cpy
= BITMAP_ALLOC (NULL
);
2952 struct df
*df
= dflow
->df
;
2955 bitmap_copy (cpy
, bb_info
->in
);
2957 /* Since we are going forwards, process the artificial uses first
2958 then the artificial defs second. */
2961 /* Create the chains for the artificial uses from the EH_USES at the
2962 beginning of the block. */
2963 df_chain_create_bb_process_use (dflow
, cpy
,
2964 df_get_artificial_uses (df
, bb
->index
),
2968 for (def
= df_get_artificial_defs (df
, bb_index
); def
; def
= def
->next_ref
)
2969 if (DF_REF_FLAGS (def
) & DF_REF_AT_TOP
)
2971 unsigned int dregno
= DF_REF_REGNO (def
);
2972 if (!(DF_REF_FLAGS (def
) & DF_REF_PARTIAL
))
2973 bitmap_clear_range (cpy
,
2974 DF_REG_DEF_GET (df
, dregno
)->begin
,
2975 DF_REG_DEF_GET (df
, dregno
)->n_refs
);
2976 bitmap_set_bit (cpy
, DF_REF_ID (def
));
2979 /* Process the regular instructions next. */
2980 FOR_BB_INSNS (bb
, insn
)
2983 unsigned int uid
= INSN_UID (insn
);
2988 /* Now scan the uses and link them up with the defs that remain
2989 in the cpy vector. */
2991 df_chain_create_bb_process_use (dflow
, cpy
,
2992 DF_INSN_UID_USES (df
, uid
), 0);
2994 /* Since we are going forwards, process the defs second. This
2995 pass only changes the bits in cpy. */
2996 for (def
= DF_INSN_UID_DEFS (df
, uid
); def
; def
= def
->next_ref
)
2998 unsigned int dregno
= DF_REF_REGNO (def
);
2999 if (!(DF_REF_FLAGS (def
) & DF_REF_PARTIAL
))
3000 bitmap_clear_range (cpy
,
3001 DF_REG_DEF_GET (df
, dregno
)->begin
,
3002 DF_REG_DEF_GET (df
, dregno
)->n_refs
);
3003 if (!(DF_REF_FLAGS (def
)
3004 & (DF_REF_MUST_CLOBBER
| DF_REF_MAY_CLOBBER
)))
3005 bitmap_set_bit (cpy
, DF_REF_ID (def
));
3009 /* Create the chains for the artificial uses of the hard registers
3010 at the end of the block. */
3011 df_chain_create_bb_process_use (dflow
, cpy
,
3012 df_get_artificial_uses (df
, bb
->index
), 0);
3015 /* Create def-use chains from reaching use bitmaps for basic blocks
3019 df_chain_finalize (struct dataflow
*dflow
, bitmap all_blocks
)
3021 unsigned int bb_index
;
3023 struct df
*df
= dflow
->df
;
3024 struct dataflow
*rd_dflow
= df
->problems_by_index
[DF_RD
];
3026 EXECUTE_IF_SET_IN_BITMAP (all_blocks
, 0, bb_index
, bi
)
3028 df_chain_create_bb (dflow
, rd_dflow
, bb_index
);
3033 /* Free all storage associated with the problem. */
3036 df_chain_free (struct dataflow
*dflow
)
3038 free_alloc_pool (dflow
->block_pool
);
3043 /* Debugging info. */
3046 df_chains_dump (struct dataflow
*dflow
, FILE *file
)
3048 struct df
*df
= dflow
->df
;
3051 if (dflow
->flags
& DF_DU_CHAIN
)
3053 fprintf (file
, "Def-use chains:\n");
3054 for (j
= 0; j
< df
->def_info
.bitmap_size
; j
++)
3056 struct df_ref
*def
= DF_DEFS_GET (df
, j
);
3059 fprintf (file
, "d%d bb %d luid %d insn %d reg %d ",
3060 j
, DF_REF_BBNO (def
),
3062 DF_INSN_LUID (df
, DF_REF_INSN (def
)):
3064 DF_REF_INSN (def
) ? DF_REF_INSN_UID (def
) : -1,
3065 DF_REF_REGNO (def
));
3066 if (def
->flags
& DF_REF_READ_WRITE
)
3067 fprintf (file
, "read/write ");
3068 df_chain_dump (DF_REF_CHAIN (def
), file
);
3069 fprintf (file
, "\n");
3074 if (dflow
->flags
& DF_UD_CHAIN
)
3076 fprintf (file
, "Use-def chains:\n");
3077 for (j
= 0; j
< df
->use_info
.bitmap_size
; j
++)
3079 struct df_ref
*use
= DF_USES_GET (df
, j
);
3082 fprintf (file
, "u%d bb %d luid %d insn %d reg %d ",
3083 j
, DF_REF_BBNO (use
),
3085 DF_INSN_LUID (df
, DF_REF_INSN (use
))
3087 DF_REF_INSN (DF_USES_GET (df
, j
)) ?
3088 DF_REF_INSN_UID (DF_USES_GET (df
,j
))
3090 DF_REF_REGNO (use
));
3091 if (use
->flags
& DF_REF_READ_WRITE
)
3092 fprintf (file
, "read/write ");
3093 if (use
->flags
& DF_REF_STRIPPED
)
3094 fprintf (file
, "stripped ");
3095 if (use
->flags
& DF_REF_IN_NOTE
)
3096 fprintf (file
, "note ");
3097 df_chain_dump (DF_REF_CHAIN (use
), file
);
3098 fprintf (file
, "\n");
3105 static struct df_problem problem_CHAIN
=
3107 DF_CHAIN
, /* Problem id. */
3108 DF_NONE
, /* Direction. */
3109 df_chain_alloc
, /* Allocate the problem specific data. */
3110 df_chain_reset
, /* Reset global information. */
3111 NULL
, /* Free basic block info. */
3112 NULL
, /* Local compute function. */
3113 NULL
, /* Init the solution specific data. */
3114 NULL
, /* Iterative solver. */
3115 NULL
, /* Confluence operator 0. */
3116 NULL
, /* Confluence operator n. */
3117 NULL
, /* Transfer function. */
3118 df_chain_finalize
, /* Finalize function. */
3119 df_chain_free
, /* Free all of the problem information. */
3120 df_chains_dump
, /* Debugging. */
3121 df_rd_add_problem
, /* Dependent problem. */
3122 0 /* Changeable flags. */
3126 /* Create a new DATAFLOW instance and add it to an existing instance
3127 of DF. The returned structure is what is used to get at the
3131 df_chain_add_problem (struct df
*df
, int flags
)
3133 return df_add_problem (df
, &problem_CHAIN
, flags
);
3137 /*----------------------------------------------------------------------------
3138 REGISTER INFORMATION
3140 This pass properly computes REG_DEAD and REG_UNUSED notes.
3142 If the DF_RI_LIFE flag is set the following vectors containing
3143 information about register usage are properly set: REG_N_REFS,
3144 REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH, REG_N_CALLS_CROSSED,
3145 REG_N_THROWING_CALLS_CROSSED and REG_BASIC_BLOCK.
3147 ----------------------------------------------------------------------------*/
3149 #ifdef REG_DEAD_DEBUGGING
3151 print_note (char *prefix
, rtx insn
, rtx note
)
3153 fprintf (stderr
, "%s %d ", prefix
, INSN_UID (insn
));
3154 print_rtl (stderr
, note
);
3155 fprintf (stderr
, "\n");
3159 /* Allocate the lifetime information. */
3162 df_ri_alloc (struct dataflow
*dflow
,
3163 bitmap blocks_to_rescan ATTRIBUTE_UNUSED
,
3164 bitmap all_blocks ATTRIBUTE_UNUSED
)
3167 struct df
*df
= dflow
->df
;
3169 if (dflow
->flags
& DF_RI_LIFE
)
3171 max_regno
= max_reg_num ();
3172 allocate_reg_info (max_regno
, FALSE
, FALSE
);
3174 /* Reset all the data we'll collect. */
3175 for (i
= 0; i
< max_regno
; i
++)
3177 REG_N_SETS (i
) = DF_REG_DEF_COUNT (df
, i
);
3178 REG_N_REFS (i
) = DF_REG_USE_COUNT (df
, i
) + REG_N_SETS (i
);
3179 REG_N_DEATHS (i
) = 0;
3180 REG_N_CALLS_CROSSED (i
) = 0;
3181 REG_N_THROWING_CALLS_CROSSED (i
) = 0;
3182 REG_LIVE_LENGTH (i
) = 0;
3184 REG_BASIC_BLOCK (i
) = REG_BLOCK_UNKNOWN
;
3190 /* After reg-stack, the x86 floating point stack regs are difficult to
3191 analyze because of all of the pushes, pops and rotations. Thus, we
3192 just leave the notes alone. */
3195 df_ignore_stack_reg (int regno ATTRIBUTE_UNUSED
)
3198 return (regstack_completed
3199 && IN_RANGE (regno
, FIRST_STACK_REG
, LAST_STACK_REG
));
3206 /* Remove all of the REG_DEAD or REG_UNUSED notes from INSN. */
3209 df_kill_notes (rtx insn
, int flags
)
3211 rtx
*pprev
= ®_NOTES (insn
);
3216 switch (REG_NOTE_KIND (link
))
3219 if (flags
& DF_RI_LIFE
)
3220 if (df_ignore_stack_reg (REGNO (XEXP (link
, 0))))
3221 REG_N_DEATHS (REGNO (XEXP (link
, 0)))++;
3225 if (!df_ignore_stack_reg (REGNO (XEXP (link
, 0))))
3227 rtx next
= XEXP (link
, 1);
3228 #ifdef REG_DEAD_DEBUGGING
3229 print_note ("deleting: ", insn
, link
);
3231 free_EXPR_LIST_node (link
);
3232 *pprev
= link
= next
;
3237 pprev
= &XEXP (link
, 1);
3245 /* Set the REG_UNUSED notes for the multiword hardreg defs in INSN
3246 based on the bits in LIVE. Do not generate notes for registers in
3247 artificial uses. DO_NOT_GEN is updated so that REG_DEAD notes are
3248 not generated if the reg is both read and written by the
3253 df_set_unused_notes_for_mw (rtx insn
, struct df_mw_hardreg
*mws
,
3254 bitmap live
, bitmap do_not_gen
,
3255 bitmap artificial_uses
, int flags
)
3257 bool all_dead
= true;
3258 struct df_link
*regs
= mws
->regs
;
3259 unsigned int regno
= DF_REF_REGNO (regs
->ref
);
3261 #ifdef REG_DEAD_DEBUGGING
3262 fprintf (stderr
, "mw unused looking at %d\n", DF_REF_REGNO (regs
->ref
));
3263 df_ref_debug (regs
->ref
, stderr
);
3267 unsigned int regno
= DF_REF_REGNO (regs
->ref
);
3268 if ((bitmap_bit_p (live
, regno
))
3269 || bitmap_bit_p (artificial_uses
, regno
))
3279 struct df_link
*regs
= mws
->regs
;
3280 rtx note
= alloc_EXPR_LIST (REG_UNUSED
, *DF_REF_LOC (regs
->ref
),
3282 REG_NOTES (insn
) = note
;
3283 #ifdef REG_DEAD_DEBUGGING
3284 print_note ("adding 1: ", insn
, note
);
3286 bitmap_set_bit (do_not_gen
, regno
);
3287 /* Only do this if the value is totally dead. */
3288 if (flags
& DF_RI_LIFE
)
3290 REG_N_DEATHS (regno
) ++;
3291 REG_LIVE_LENGTH (regno
)++;
3296 struct df_link
*regs
= mws
->regs
;
3299 struct df_ref
*ref
= regs
->ref
;
3301 regno
= DF_REF_REGNO (ref
);
3302 if ((!bitmap_bit_p (live
, regno
))
3303 && (!bitmap_bit_p (artificial_uses
, regno
)))
3305 rtx note
= alloc_EXPR_LIST (REG_UNUSED
, regno_reg_rtx
[regno
],
3307 REG_NOTES (insn
) = note
;
3308 #ifdef REG_DEAD_DEBUGGING
3309 print_note ("adding 2: ", insn
, note
);
3312 bitmap_set_bit (do_not_gen
, regno
);
3319 /* Set the REG_DEAD notes for the multiword hardreg use in INSN based
3320 on the bits in LIVE. DO_NOT_GEN is used to keep REG_DEAD notes
3321 from being set if the instruction both reads and writes the
3325 df_set_dead_notes_for_mw (rtx insn
, struct df_mw_hardreg
*mws
,
3326 bitmap live
, bitmap do_not_gen
,
3327 bitmap artificial_uses
, int flags
)
3329 bool all_dead
= true;
3330 struct df_link
*regs
= mws
->regs
;
3331 unsigned int regno
= DF_REF_REGNO (regs
->ref
);
3333 #ifdef REG_DEAD_DEBUGGING
3334 fprintf (stderr
, "mw looking at %d\n", DF_REF_REGNO (regs
->ref
));
3335 df_ref_debug (regs
->ref
, stderr
);
3339 unsigned int regno
= DF_REF_REGNO (regs
->ref
);
3340 if ((bitmap_bit_p (live
, regno
))
3341 || bitmap_bit_p (artificial_uses
, regno
))
3351 if (!bitmap_bit_p (do_not_gen
, regno
))
3353 /* Add a dead note for the entire multi word register. */
3354 struct df_link
*regs
= mws
->regs
;
3355 rtx note
= alloc_EXPR_LIST (REG_DEAD
, *DF_REF_LOC (regs
->ref
),
3357 REG_NOTES (insn
) = note
;
3358 #ifdef REG_DEAD_DEBUGGING
3359 print_note ("adding 1: ", insn
, note
);
3362 if (flags
& DF_RI_LIFE
)
3364 struct df_link
*regs
= mws
->regs
;
3367 struct df_ref
*ref
= regs
->ref
;
3368 regno
= DF_REF_REGNO (ref
);
3369 REG_N_DEATHS (regno
)++;
3377 struct df_link
*regs
= mws
->regs
;
3380 struct df_ref
*ref
= regs
->ref
;
3382 regno
= DF_REF_REGNO (ref
);
3383 if ((!bitmap_bit_p (live
, regno
))
3384 && (!bitmap_bit_p (artificial_uses
, regno
))
3385 && (!bitmap_bit_p (do_not_gen
, regno
)))
3387 rtx note
= alloc_EXPR_LIST (REG_DEAD
, regno_reg_rtx
[regno
],
3389 REG_NOTES (insn
) = note
;
3390 if (flags
& DF_RI_LIFE
)
3391 REG_N_DEATHS (regno
)++;
3392 #ifdef REG_DEAD_DEBUGGING
3393 print_note ("adding 2: ", insn
, note
);
3403 /* Create a REG_UNUSED note if necessary for DEF in INSN updating LIVE
3404 and DO_NOT_GEN. Do not generate notes for registers in artificial
3408 df_create_unused_note (basic_block bb
, rtx insn
, struct df_ref
*def
,
3409 bitmap live
, bitmap do_not_gen
, bitmap artificial_uses
,
3410 bitmap local_live
, bitmap local_processed
,
3411 int flags
, int luid
)
3413 unsigned int dregno
= DF_REF_REGNO (def
);
3415 #ifdef REG_DEAD_DEBUGGING
3416 fprintf (stderr
, " regular looking at def ");
3417 df_ref_debug (def
, stderr
);
3420 if (bitmap_bit_p (live
, dregno
))
3422 if (flags
& DF_RI_LIFE
)
3424 /* If we have seen this regno, then it has already been
3425 processed correctly with the per insn increment. If we
3426 have not seen it we need to add the length from here to
3427 the end of the block to the live length. */
3428 if (bitmap_bit_p (local_processed
, dregno
))
3430 if (!(DF_REF_FLAGS (def
) & DF_REF_PARTIAL
))
3431 bitmap_clear_bit (local_live
, dregno
);
3435 bitmap_set_bit (local_processed
, dregno
);
3436 REG_LIVE_LENGTH (dregno
) += luid
;
3440 else if ((!(DF_REF_FLAGS (def
) & DF_REF_MW_HARDREG
))
3441 && (!bitmap_bit_p (artificial_uses
, dregno
))
3442 && (!df_ignore_stack_reg (dregno
)))
3444 rtx reg
= GET_CODE (*DF_REF_LOC (def
)) == SUBREG
?
3445 SUBREG_REG (*DF_REF_LOC (def
)) : *DF_REF_LOC (def
);
3446 rtx note
= alloc_EXPR_LIST (REG_UNUSED
, reg
, REG_NOTES (insn
));
3447 REG_NOTES (insn
) = note
;
3448 #ifdef REG_DEAD_DEBUGGING
3449 print_note ("adding 3: ", insn
, note
);
3451 if (flags
& DF_RI_LIFE
)
3453 REG_N_DEATHS (dregno
) ++;
3454 REG_LIVE_LENGTH (dregno
)++;
3458 if ((flags
& DF_RI_LIFE
) && (dregno
>= FIRST_PSEUDO_REGISTER
))
3460 REG_FREQ (dregno
) += REG_FREQ_FROM_BB (bb
);
3461 if (REG_BASIC_BLOCK (dregno
) == REG_BLOCK_UNKNOWN
)
3462 REG_BASIC_BLOCK (dregno
) = bb
->index
;
3463 else if (REG_BASIC_BLOCK (dregno
) != bb
->index
)
3464 REG_BASIC_BLOCK (dregno
) = REG_BLOCK_GLOBAL
;
3467 if (!(DF_REF_FLAGS (def
) & (DF_REF_MUST_CLOBBER
+ DF_REF_MAY_CLOBBER
)))
3468 bitmap_set_bit (do_not_gen
, dregno
);
3470 /* Kill this register if it is not a subreg store. */
3471 if (!(DF_REF_FLAGS (def
) & DF_REF_PARTIAL
))
3472 bitmap_clear_bit (live
, dregno
);
3476 /* Recompute the REG_DEAD and REG_UNUSED notes and compute register
3477 info: lifetime, bb, and number of defs and uses for basic block
3478 BB. The three bitvectors are scratch regs used here. */
3481 df_ri_bb_compute (struct dataflow
*dflow
, unsigned int bb_index
,
3482 bitmap live
, bitmap do_not_gen
, bitmap artificial_uses
,
3483 bitmap local_live
, bitmap local_processed
, bitmap setjumps_crossed
)
3485 struct df
*df
= dflow
->df
;
3486 basic_block bb
= BASIC_BLOCK (bb_index
);
3492 bitmap_copy (live
, df_get_live_out (df
, bb
));
3493 bitmap_clear (artificial_uses
);
3495 if (dflow
->flags
& DF_RI_LIFE
)
3497 /* Process the regs live at the end of the block. Mark them as
3498 not local to any one basic block. */
3501 EXECUTE_IF_SET_IN_BITMAP (live
, 0, regno
, bi
)
3502 REG_BASIC_BLOCK (regno
) = REG_BLOCK_GLOBAL
;
3505 /* Process the artificial defs and uses at the bottom of the block
3506 to begin processing. */
3507 for (def
= df_get_artificial_defs (df
, bb_index
); def
; def
= def
->next_ref
)
3508 if ((DF_REF_FLAGS (def
) & DF_REF_AT_TOP
) == 0)
3509 bitmap_clear_bit (live
, DF_REF_REGNO (def
));
3511 for (use
= df_get_artificial_uses (df
, bb_index
); use
; use
= use
->next_ref
)
3512 if ((DF_REF_FLAGS (use
) & DF_REF_AT_TOP
) == 0)
3514 unsigned int regno
= DF_REF_REGNO (use
);
3515 bitmap_set_bit (live
, regno
);
3517 /* Notes are not generated for any of the artificial registers
3518 at the bottom of the block. */
3519 bitmap_set_bit (artificial_uses
, regno
);
3522 FOR_BB_INSNS_REVERSE (bb
, insn
)
3524 unsigned int uid
= INSN_UID (insn
);
3527 struct df_mw_hardreg
*mws
;
3532 if (dflow
->flags
& DF_RI_LIFE
)
3534 /* Increment the live_length for all of the registers that
3535 are are referenced in this block and live at this
3536 particular point. */
3539 EXECUTE_IF_SET_IN_BITMAP (local_live
, 0, regno
, bi
)
3541 REG_LIVE_LENGTH (regno
)++;
3546 bitmap_clear (do_not_gen
);
3547 df_kill_notes (insn
, dflow
->flags
);
3549 /* Process the defs. */
3552 if (dflow
->flags
& DF_RI_LIFE
)
3554 bool can_throw
= can_throw_internal (insn
);
3555 bool set_jump
= (find_reg_note (insn
, REG_SETJMP
, NULL
) != NULL
);
3556 EXECUTE_IF_SET_IN_BITMAP (live
, 0, regno
, bi
)
3558 REG_N_CALLS_CROSSED (regno
)++;
3560 REG_N_THROWING_CALLS_CROSSED (regno
)++;
3562 /* We have a problem with any pseudoreg that lives
3563 across the setjmp. ANSI says that if a user
3564 variable does not change in value between the
3565 setjmp and the longjmp, then the longjmp
3566 preserves it. This includes longjmp from a place
3567 where the pseudo appears dead. (In principle,
3568 the value still exists if it is in scope.) If
3569 the pseudo goes in a hard reg, some other value
3570 may occupy that hard reg where this pseudo is
3571 dead, thus clobbering the pseudo. Conclusion:
3572 such a pseudo must not go in a hard reg. */
3573 if (set_jump
&& regno
>= FIRST_PSEUDO_REGISTER
)
3574 bitmap_set_bit (setjumps_crossed
, regno
);
3578 /* We only care about real sets for calls. Clobbers only
3579 may clobber and cannot be depended on. */
3580 for (mws
= DF_INSN_UID_MWS (df
, uid
); mws
; mws
= mws
->next
)
3582 if ((mws
->type
== DF_REF_REG_DEF
)
3583 && !df_ignore_stack_reg (REGNO (mws
->mw_reg
)))
3584 df_set_unused_notes_for_mw (insn
, mws
, live
, do_not_gen
,
3585 artificial_uses
, dflow
->flags
);
3588 /* All of the defs except the return value are some sort of
3589 clobber. This code is for the return. */
3590 for (def
= DF_INSN_UID_DEFS (df
, uid
); def
; def
= def
->next_ref
)
3591 if (!(DF_REF_FLAGS (def
) & (DF_REF_MUST_CLOBBER
| DF_REF_MAY_CLOBBER
)))
3592 df_create_unused_note (bb
, insn
, def
, live
, do_not_gen
,
3593 artificial_uses
, local_live
,
3594 local_processed
, dflow
->flags
, luid
);
3600 for (mws
= DF_INSN_UID_MWS (df
, uid
); mws
; mws
= mws
->next
)
3602 if (mws
->type
== DF_REF_REG_DEF
)
3603 df_set_unused_notes_for_mw (insn
, mws
, live
, do_not_gen
,
3604 artificial_uses
, dflow
->flags
);
3607 for (def
= DF_INSN_UID_DEFS (df
, uid
); def
; def
= def
->next_ref
)
3608 df_create_unused_note (bb
, insn
, def
, live
, do_not_gen
,
3609 artificial_uses
, local_live
,
3610 local_processed
, dflow
->flags
, luid
);
3613 /* Process the uses. */
3614 for (mws
= DF_INSN_UID_MWS (df
, uid
); mws
; mws
= mws
->next
)
3616 if ((mws
->type
!= DF_REF_REG_DEF
)
3617 && !df_ignore_stack_reg (REGNO (mws
->mw_reg
)))
3618 df_set_dead_notes_for_mw (insn
, mws
, live
, do_not_gen
,
3619 artificial_uses
, dflow
->flags
);
3622 for (use
= DF_INSN_UID_USES (df
, uid
); use
; use
= use
->next_ref
)
3624 unsigned int uregno
= DF_REF_REGNO (use
);
3626 if ((dflow
->flags
& DF_RI_LIFE
) && (uregno
>= FIRST_PSEUDO_REGISTER
))
3628 REG_FREQ (uregno
) += REG_FREQ_FROM_BB (bb
);
3629 if (REG_BASIC_BLOCK (uregno
) == REG_BLOCK_UNKNOWN
)
3630 REG_BASIC_BLOCK (uregno
) = bb
->index
;
3631 else if (REG_BASIC_BLOCK (uregno
) != bb
->index
)
3632 REG_BASIC_BLOCK (uregno
) = REG_BLOCK_GLOBAL
;
3635 #ifdef REG_DEAD_DEBUGGING
3636 fprintf (stderr
, " regular looking at use ");
3637 df_ref_debug (use
, stderr
);
3639 if (!bitmap_bit_p (live
, uregno
))
3641 if ( (!(DF_REF_FLAGS (use
) & DF_REF_MW_HARDREG
))
3642 && (!bitmap_bit_p (do_not_gen
, uregno
))
3643 && (!bitmap_bit_p (artificial_uses
, uregno
))
3644 && (!(DF_REF_FLAGS (use
) & DF_REF_READ_WRITE
))
3645 && (!df_ignore_stack_reg (uregno
)))
3647 rtx reg
= GET_CODE (*DF_REF_LOC (use
)) == SUBREG
?
3648 SUBREG_REG (*DF_REF_LOC (use
)) : *DF_REF_LOC (use
);
3649 rtx note
= alloc_EXPR_LIST (REG_DEAD
, reg
, REG_NOTES (insn
));
3650 REG_NOTES (insn
) = note
;
3651 if (dflow
->flags
& DF_RI_LIFE
)
3652 REG_N_DEATHS (uregno
)++;
3654 #ifdef REG_DEAD_DEBUGGING
3655 print_note ("adding 4: ", insn
, note
);
3658 /* This register is now live. */
3659 bitmap_set_bit (live
, uregno
);
3661 if (dflow
->flags
& DF_RI_LIFE
)
3663 /* If we have seen this regno, then it has already
3664 been processed correctly with the per insn
3665 increment. If we have not seen it we set the bit
3666 so that begins to get processed locally. Note
3667 that we don't even get here if the variable was
3668 live at the end of the block since just a ref
3669 inside the block does not effect the
3671 REG_LIVE_LENGTH (uregno
) ++;
3672 bitmap_set_bit (local_live
, uregno
);
3673 bitmap_set_bit (local_processed
, uregno
);
3679 if (dflow
->flags
& DF_RI_LIFE
)
3681 /* Add the length of the block to all of the registers that were
3682 not referenced, but still live in this block. */
3685 bitmap_and_compl_into (live
, local_processed
);
3686 EXECUTE_IF_SET_IN_BITMAP (live
, 0, regno
, bi
)
3688 REG_LIVE_LENGTH (regno
) += luid
;
3690 bitmap_clear (local_processed
);
3691 bitmap_clear (local_live
);
3696 /* Compute register info: lifetime, bb, and number of defs and uses. */
3698 df_ri_compute (struct dataflow
*dflow
, bitmap all_blocks ATTRIBUTE_UNUSED
,
3699 bitmap blocks_to_scan
)
3701 unsigned int bb_index
;
3703 bitmap live
= BITMAP_ALLOC (NULL
);
3704 bitmap do_not_gen
= BITMAP_ALLOC (NULL
);
3705 bitmap artificial_uses
= BITMAP_ALLOC (NULL
);
3706 bitmap local_live
= NULL
;
3707 bitmap local_processed
= NULL
;
3708 bitmap setjumps_crossed
= NULL
;
3710 if (dflow
->flags
& DF_RI_LIFE
)
3712 local_live
= BITMAP_ALLOC (NULL
);
3713 local_processed
= BITMAP_ALLOC (NULL
);
3714 setjumps_crossed
= BITMAP_ALLOC (NULL
);
3718 #ifdef REG_DEAD_DEBUGGING
3719 df_lr_dump (dflow
->df
->problems_by_index
[DF_LR
], stderr
);
3720 print_rtl_with_bb (stderr
, get_insns());
3723 EXECUTE_IF_SET_IN_BITMAP (blocks_to_scan
, 0, bb_index
, bi
)
3725 df_ri_bb_compute (dflow
, bb_index
, live
, do_not_gen
, artificial_uses
,
3726 local_live
, local_processed
, setjumps_crossed
);
3730 BITMAP_FREE (do_not_gen
);
3731 BITMAP_FREE (artificial_uses
);
3732 if (dflow
->flags
& DF_RI_LIFE
)
3736 /* See the setjump comment in df_ri_bb_compute. */
3737 EXECUTE_IF_SET_IN_BITMAP (setjumps_crossed
, 0, regno
, bi
)
3739 REG_BASIC_BLOCK (regno
) = REG_BLOCK_UNKNOWN
;
3740 REG_LIVE_LENGTH (regno
) = -1;
3743 BITMAP_FREE (local_live
);
3744 BITMAP_FREE (local_processed
);
3745 BITMAP_FREE (setjumps_crossed
);
3750 /* Free all storage associated with the problem. */
3753 df_ri_free (struct dataflow
*dflow
)
3755 free (dflow
->problem_data
);
3760 /* Debugging info. */
3763 df_ri_dump (struct dataflow
*dflow
, FILE *file
)
3765 print_rtl_with_bb (file
, get_insns ());
3767 if (dflow
->flags
& DF_RI_LIFE
)
3769 fprintf (file
, "Register info:\n");
3770 dump_flow_info (file
, -1);
3774 /* All of the information associated every instance of the problem. */
3776 static struct df_problem problem_RI
=
3778 DF_RI
, /* Problem id. */
3779 DF_NONE
, /* Direction. */
3780 df_ri_alloc
, /* Allocate the problem specific data. */
3781 NULL
, /* Reset global information. */
3782 NULL
, /* Free basic block info. */
3783 df_ri_compute
, /* Local compute function. */
3784 NULL
, /* Init the solution specific data. */
3785 NULL
, /* Iterative solver. */
3786 NULL
, /* Confluence operator 0. */
3787 NULL
, /* Confluence operator n. */
3788 NULL
, /* Transfer function. */
3789 NULL
, /* Finalize function. */
3790 df_ri_free
, /* Free all of the problem information. */
3791 df_ri_dump
, /* Debugging. */
3793 /* Technically this is only dependent on the live registers problem
3794 but it will produce information if built one of uninitialized
3795 register problems (UR, UREC) is also run. */
3796 df_lr_add_problem
, /* Dependent problem. */
3797 0 /* Changeable flags. */
3801 /* Create a new DATAFLOW instance and add it to an existing instance
3802 of DF. The returned structure is what is used to get at the
3806 df_ri_add_problem (struct df
*df
, int flags
)
3808 return df_add_problem (df
, &problem_RI
, flags
);