1 /* Liveness for SSA trees.
2 Copyright (C) 2003-2013 Free Software Foundation, Inc.
3 Contributed by Andrew MacLeod <amacleod@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
24 #include "hash-table.h"
27 #include "gimple-pretty-print.h"
33 #include "tree-ssa-live.h"
34 #include "diagnostic-core.h"
39 #ifdef ENABLE_CHECKING
40 static void verify_live_on_entry (tree_live_info_p
);
44 /* VARMAP maintains a mapping from SSA version number to real variables.
46 All SSA_NAMES are divided into partitions. Initially each ssa_name is the
47 only member of it's own partition. Coalescing will attempt to group any
48 ssa_names which occur in a copy or in a PHI node into the same partition.
50 At the end of out-of-ssa, each partition becomes a "real" variable and is
51 rewritten as a compiler variable.
53 The var_map data structure is used to manage these partitions. It allows
54 partitions to be combined, and determines which partition belongs to what
55 ssa_name or variable, and vice versa. */
58 /* Hashtable helpers. */
60 struct tree_int_map_hasher
: typed_noop_remove
<tree_int_map
>
62 typedef tree_int_map value_type
;
63 typedef tree_int_map compare_type
;
64 static inline hashval_t
hash (const value_type
*);
65 static inline bool equal (const value_type
*, const compare_type
*);
69 tree_int_map_hasher::hash (const value_type
*v
)
71 return tree_map_base_hash (v
);
75 tree_int_map_hasher::equal (const value_type
*v
, const compare_type
*c
)
77 return tree_int_map_eq (v
, c
);
81 /* This routine will initialize the basevar fields of MAP. */
84 var_map_base_init (var_map map
)
88 hash_table
<tree_int_map_hasher
> tree_to_index
;
89 struct tree_int_map
*m
, *mapstorage
;
91 num_part
= num_var_partitions (map
);
92 tree_to_index
.create (num_part
);
93 /* We can have at most num_part entries in the hash tables, so it's
94 enough to allocate so many map elements once, saving some malloc
96 mapstorage
= m
= XNEWVEC (struct tree_int_map
, num_part
);
98 /* If a base table already exists, clear it, otherwise create it. */
99 free (map
->partition_to_base_index
);
100 map
->partition_to_base_index
= (int *) xmalloc (sizeof (int) * num_part
);
102 /* Build the base variable list, and point partitions at their bases. */
103 for (x
= 0; x
< num_part
; x
++)
105 struct tree_int_map
**slot
;
107 var
= partition_to_var (map
, x
);
108 if (SSA_NAME_VAR (var
)
109 && (!VAR_P (SSA_NAME_VAR (var
))
110 || !DECL_IGNORED_P (SSA_NAME_VAR (var
))))
111 m
->base
.from
= SSA_NAME_VAR (var
);
113 /* This restricts what anonymous SSA names we can coalesce
114 as it restricts the sets we compute conflicts for.
115 Using TREE_TYPE to generate sets is the easies as
116 type equivalency also holds for SSA names with the same
119 Check gimple_can_coalesce_p when changing this code. */
120 m
->base
.from
= (TYPE_CANONICAL (TREE_TYPE (var
))
121 ? TYPE_CANONICAL (TREE_TYPE (var
))
123 /* If base variable hasn't been seen, set it up. */
124 slot
= tree_to_index
.find_slot (m
, INSERT
);
127 baseindex
= m
- mapstorage
;
133 baseindex
= (*slot
)->to
;
134 map
->partition_to_base_index
[x
] = baseindex
;
137 map
->num_basevars
= m
- mapstorage
;
140 tree_to_index
. dispose ();
144 /* Remove the base table in MAP. */
147 var_map_base_fini (var_map map
)
149 /* Free the basevar info if it is present. */
150 if (map
->partition_to_base_index
!= NULL
)
152 free (map
->partition_to_base_index
);
153 map
->partition_to_base_index
= NULL
;
154 map
->num_basevars
= 0;
157 /* Create a variable partition map of SIZE, initialize and return it. */
160 init_var_map (int size
)
164 map
= (var_map
) xmalloc (sizeof (struct _var_map
));
165 map
->var_partition
= partition_new (size
);
167 map
->partition_to_view
= NULL
;
168 map
->view_to_partition
= NULL
;
169 map
->num_partitions
= size
;
170 map
->partition_size
= size
;
171 map
->num_basevars
= 0;
172 map
->partition_to_base_index
= NULL
;
177 /* Free memory associated with MAP. */
180 delete_var_map (var_map map
)
182 var_map_base_fini (map
);
183 partition_delete (map
->var_partition
);
184 free (map
->partition_to_view
);
185 free (map
->view_to_partition
);
190 /* This function will combine the partitions in MAP for VAR1 and VAR2. It
191 Returns the partition which represents the new partition. If the two
192 partitions cannot be combined, NO_PARTITION is returned. */
195 var_union (var_map map
, tree var1
, tree var2
)
199 gcc_assert (TREE_CODE (var1
) == SSA_NAME
);
200 gcc_assert (TREE_CODE (var2
) == SSA_NAME
);
202 /* This is independent of partition_to_view. If partition_to_view is
203 on, then whichever one of these partitions is absorbed will never have a
204 dereference into the partition_to_view array any more. */
206 p1
= partition_find (map
->var_partition
, SSA_NAME_VERSION (var1
));
207 p2
= partition_find (map
->var_partition
, SSA_NAME_VERSION (var2
));
209 gcc_assert (p1
!= NO_PARTITION
);
210 gcc_assert (p2
!= NO_PARTITION
);
215 p3
= partition_union (map
->var_partition
, p1
, p2
);
217 if (map
->partition_to_view
)
218 p3
= map
->partition_to_view
[p3
];
224 /* Compress the partition numbers in MAP such that they fall in the range
225 0..(num_partitions-1) instead of wherever they turned out during
226 the partitioning exercise. This removes any references to unused
227 partitions, thereby allowing bitmaps and other vectors to be much
230 This is implemented such that compaction doesn't affect partitioning.
231 Ie., once partitions are created and possibly merged, running one
232 or more different kind of compaction will not affect the partitions
233 themselves. Their index might change, but all the same variables will
234 still be members of the same partition group. This allows work on reduced
235 sets, and no loss of information when a larger set is later desired.
237 In particular, coalescing can work on partitions which have 2 or more
238 definitions, and then 'recompact' later to include all the single
239 definitions for assignment to program variables. */
242 /* Set MAP back to the initial state of having no partition view. Return a
243 bitmap which has a bit set for each partition number which is in use in the
247 partition_view_init (var_map map
)
253 used
= BITMAP_ALLOC (NULL
);
255 /* Already in a view? Abandon the old one. */
256 if (map
->partition_to_view
)
258 free (map
->partition_to_view
);
259 map
->partition_to_view
= NULL
;
261 if (map
->view_to_partition
)
263 free (map
->view_to_partition
);
264 map
->view_to_partition
= NULL
;
267 /* Find out which partitions are actually referenced. */
268 for (x
= 0; x
< map
->partition_size
; x
++)
270 tmp
= partition_find (map
->var_partition
, x
);
271 if (ssa_name (tmp
) != NULL_TREE
&& !virtual_operand_p (ssa_name (tmp
))
272 && (!has_zero_uses (ssa_name (tmp
))
273 || !SSA_NAME_IS_DEFAULT_DEF (ssa_name (tmp
))))
274 bitmap_set_bit (used
, tmp
);
277 map
->num_partitions
= map
->partition_size
;
282 /* This routine will finalize the view data for MAP based on the partitions
283 set in SELECTED. This is either the same bitmap returned from
284 partition_view_init, or a trimmed down version if some of those partitions
285 were not desired in this view. SELECTED is freed before returning. */
288 partition_view_fini (var_map map
, bitmap selected
)
291 unsigned count
, i
, x
, limit
;
293 gcc_assert (selected
);
295 count
= bitmap_count_bits (selected
);
296 limit
= map
->partition_size
;
298 /* If its a one-to-one ratio, we don't need any view compaction. */
301 map
->partition_to_view
= (int *)xmalloc (limit
* sizeof (int));
302 memset (map
->partition_to_view
, 0xff, (limit
* sizeof (int)));
303 map
->view_to_partition
= (int *)xmalloc (count
* sizeof (int));
306 /* Give each selected partition an index. */
307 EXECUTE_IF_SET_IN_BITMAP (selected
, 0, x
, bi
)
309 map
->partition_to_view
[x
] = i
;
310 map
->view_to_partition
[i
] = x
;
313 gcc_assert (i
== count
);
314 map
->num_partitions
= i
;
317 BITMAP_FREE (selected
);
321 /* Create a partition view which includes all the used partitions in MAP. If
322 WANT_BASES is true, create the base variable map as well. */
325 partition_view_normal (var_map map
, bool want_bases
)
329 used
= partition_view_init (map
);
330 partition_view_fini (map
, used
);
333 var_map_base_init (map
);
335 var_map_base_fini (map
);
339 /* Create a partition view in MAP which includes just partitions which occur in
340 the bitmap ONLY. If WANT_BASES is true, create the base variable map
344 partition_view_bitmap (var_map map
, bitmap only
, bool want_bases
)
347 bitmap new_partitions
= BITMAP_ALLOC (NULL
);
351 used
= partition_view_init (map
);
352 EXECUTE_IF_SET_IN_BITMAP (only
, 0, x
, bi
)
354 p
= partition_find (map
->var_partition
, x
);
355 gcc_assert (bitmap_bit_p (used
, p
));
356 bitmap_set_bit (new_partitions
, p
);
358 partition_view_fini (map
, new_partitions
);
361 var_map_base_init (map
);
363 var_map_base_fini (map
);
367 static bitmap usedvars
;
369 /* Mark VAR as used, so that it'll be preserved during rtl expansion.
370 Returns true if VAR wasn't marked before. */
373 set_is_used (tree var
)
375 return bitmap_set_bit (usedvars
, DECL_UID (var
));
378 /* Return true if VAR is marked as used. */
383 return bitmap_bit_p (usedvars
, DECL_UID (var
));
386 static inline void mark_all_vars_used (tree
*);
388 /* Helper function for mark_all_vars_used, called via walk_tree. */
391 mark_all_vars_used_1 (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
394 enum tree_code_class c
= TREE_CODE_CLASS (TREE_CODE (t
));
397 if (TREE_CODE (t
) == SSA_NAME
)
400 t
= SSA_NAME_VAR (t
);
405 if (IS_EXPR_CODE_CLASS (c
)
406 && (b
= TREE_BLOCK (t
)) != NULL
)
407 TREE_USED (b
) = true;
409 /* Ignore TMR_OFFSET and TMR_STEP for TARGET_MEM_REFS, as those
410 fields do not contain vars. */
411 if (TREE_CODE (t
) == TARGET_MEM_REF
)
413 mark_all_vars_used (&TMR_BASE (t
));
414 mark_all_vars_used (&TMR_INDEX (t
));
415 mark_all_vars_used (&TMR_INDEX2 (t
));
420 /* Only need to mark VAR_DECLS; parameters and return results are not
421 eliminated as unused. */
422 if (TREE_CODE (t
) == VAR_DECL
)
424 /* When a global var becomes used for the first time also walk its
425 initializer (non global ones don't have any). */
426 if (set_is_used (t
) && is_global_var (t
))
427 mark_all_vars_used (&DECL_INITIAL (t
));
429 /* remove_unused_scope_block_p requires information about labels
430 which are not DECL_IGNORED_P to tell if they might be used in the IL. */
431 else if (TREE_CODE (t
) == LABEL_DECL
)
432 /* Although the TREE_USED values that the frontend uses would be
433 acceptable (albeit slightly over-conservative) for our purposes,
434 init_vars_expansion clears TREE_USED for LABEL_DECLs too, so we
435 must re-compute it here. */
438 if (IS_TYPE_OR_DECL_P (t
))
444 /* Mark the scope block SCOPE and its subblocks unused when they can be
445 possibly eliminated if dead. */
448 mark_scope_block_unused (tree scope
)
451 TREE_USED (scope
) = false;
452 if (!(*debug_hooks
->ignore_block
) (scope
))
453 TREE_USED (scope
) = true;
454 for (t
= BLOCK_SUBBLOCKS (scope
); t
; t
= BLOCK_CHAIN (t
))
455 mark_scope_block_unused (t
);
458 /* Look if the block is dead (by possibly eliminating its dead subblocks)
459 and return true if so.
460 Block is declared dead if:
461 1) No statements are associated with it.
462 2) Declares no live variables
463 3) All subblocks are dead
464 or there is precisely one subblocks and the block
465 has same abstract origin as outer block and declares
466 no variables, so it is pure wrapper.
467 When we are not outputting full debug info, we also eliminate dead variables
468 out of scope blocks to let them to be recycled by GGC and to save copying work
469 done by the inliner. */
472 remove_unused_scope_block_p (tree scope
)
475 bool unused
= !TREE_USED (scope
);
478 for (t
= &BLOCK_VARS (scope
); *t
; t
= next
)
480 next
= &DECL_CHAIN (*t
);
482 /* Debug info of nested function refers to the block of the
483 function. We might stil call it even if all statements
484 of function it was nested into was elliminated.
486 TODO: We can actually look into cgraph to see if function
487 will be output to file. */
488 if (TREE_CODE (*t
) == FUNCTION_DECL
)
491 /* If a decl has a value expr, we need to instantiate it
492 regardless of debug info generation, to avoid codegen
493 differences in memory overlap tests. update_equiv_regs() may
494 indirectly call validate_equiv_mem() to test whether a
495 SET_DEST overlaps with others, and if the value expr changes
496 by virtual register instantiation, we may get end up with
497 different results. */
498 else if (TREE_CODE (*t
) == VAR_DECL
&& DECL_HAS_VALUE_EXPR_P (*t
))
501 /* Remove everything we don't generate debug info for. */
502 else if (DECL_IGNORED_P (*t
))
504 *t
= DECL_CHAIN (*t
);
508 /* When we are outputting debug info, we usually want to output
509 info about optimized-out variables in the scope blocks.
510 Exception are the scope blocks not containing any instructions
511 at all so user can't get into the scopes at first place. */
512 else if (is_used_p (*t
))
514 else if (TREE_CODE (*t
) == LABEL_DECL
&& TREE_USED (*t
))
515 /* For labels that are still used in the IL, the decision to
516 preserve them must not depend DEBUG_INFO_LEVEL, otherwise we
517 risk having different ordering in debug vs. non-debug builds
518 during inlining or versioning.
519 A label appearing here (we have already checked DECL_IGNORED_P)
520 should not be used in the IL unless it has been explicitly used
521 before, so we use TREE_USED as an approximation. */
522 /* In principle, we should do the same here as for the debug case
523 below, however, when debugging, there might be additional nested
524 levels that keep an upper level with a label live, so we have to
525 force this block to be considered used, too. */
528 /* When we are not doing full debug info, we however can keep around
529 only the used variables for cfgexpand's memory packing saving quite
532 For sake of -g3, we keep around those vars but we don't count this as
533 use of block, so innermost block with no used vars and no instructions
534 can be considered dead. We only want to keep around blocks user can
535 breakpoint into and ask about value of optimized out variables.
537 Similarly we need to keep around types at least until all
538 variables of all nested blocks are gone. We track no
539 information on whether given type is used or not, so we have
540 to keep them even when not emitting debug information,
541 otherwise we may end up remapping variables and their (local)
542 types in different orders depending on whether debug
543 information is being generated. */
545 else if (TREE_CODE (*t
) == TYPE_DECL
546 || debug_info_level
== DINFO_LEVEL_NORMAL
547 || debug_info_level
== DINFO_LEVEL_VERBOSE
)
551 *t
= DECL_CHAIN (*t
);
556 for (t
= &BLOCK_SUBBLOCKS (scope
); *t
;)
557 if (remove_unused_scope_block_p (*t
))
559 if (BLOCK_SUBBLOCKS (*t
))
561 tree next
= BLOCK_CHAIN (*t
);
562 tree supercontext
= BLOCK_SUPERCONTEXT (*t
);
564 *t
= BLOCK_SUBBLOCKS (*t
);
565 while (BLOCK_CHAIN (*t
))
567 BLOCK_SUPERCONTEXT (*t
) = supercontext
;
568 t
= &BLOCK_CHAIN (*t
);
570 BLOCK_CHAIN (*t
) = next
;
571 BLOCK_SUPERCONTEXT (*t
) = supercontext
;
572 t
= &BLOCK_CHAIN (*t
);
576 *t
= BLOCK_CHAIN (*t
);
580 t
= &BLOCK_CHAIN (*t
);
587 /* Outer scope is always used. */
588 else if (!BLOCK_SUPERCONTEXT (scope
)
589 || TREE_CODE (BLOCK_SUPERCONTEXT (scope
)) == FUNCTION_DECL
)
591 /* Innermost blocks with no live variables nor statements can be always
593 else if (!nsubblocks
)
595 /* For terse debug info we can eliminate info on unused variables. */
596 else if (debug_info_level
== DINFO_LEVEL_NONE
597 || debug_info_level
== DINFO_LEVEL_TERSE
)
599 /* Even for -g0/-g1 don't prune outer scopes from artificial
600 functions, otherwise diagnostics using tree_nonartificial_location
601 will not be emitted properly. */
602 if (inlined_function_outer_scope_p (scope
))
607 && TREE_CODE (ao
) == BLOCK
608 && BLOCK_ABSTRACT_ORIGIN (ao
) != ao
)
609 ao
= BLOCK_ABSTRACT_ORIGIN (ao
);
611 && TREE_CODE (ao
) == FUNCTION_DECL
612 && DECL_DECLARED_INLINE_P (ao
)
613 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao
)))
617 else if (BLOCK_VARS (scope
) || BLOCK_NUM_NONLOCALIZED_VARS (scope
))
619 /* See if this block is important for representation of inlined function.
620 Inlined functions are always represented by block with
621 block_ultimate_origin being set to FUNCTION_DECL and DECL_SOURCE_LOCATION
623 else if (inlined_function_outer_scope_p (scope
))
626 /* Verfify that only blocks with source location set
627 are entry points to the inlined functions. */
628 gcc_assert (LOCATION_LOCUS (BLOCK_SOURCE_LOCATION (scope
))
629 == UNKNOWN_LOCATION
);
631 TREE_USED (scope
) = !unused
;
635 /* Mark all VAR_DECLS under *EXPR_P as used, so that they won't be
636 eliminated during the tree->rtl conversion process. */
639 mark_all_vars_used (tree
*expr_p
)
641 walk_tree (expr_p
, mark_all_vars_used_1
, NULL
, NULL
);
644 /* Helper function for clear_unused_block_pointer, called via walk_tree. */
647 clear_unused_block_pointer_1 (tree
*tp
, int *, void *)
649 if (EXPR_P (*tp
) && TREE_BLOCK (*tp
)
650 && !TREE_USED (TREE_BLOCK (*tp
)))
651 TREE_SET_BLOCK (*tp
, NULL
);
655 /* Set all block pointer in debug or clobber stmt to NULL if the block
656 is unused, so that they will not be streamed out. */
659 clear_unused_block_pointer (void)
662 gimple_stmt_iterator gsi
;
665 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
669 gimple stmt
= gsi_stmt (gsi
);
671 if (!is_gimple_debug (stmt
) && !gimple_clobber_p (stmt
))
673 b
= gimple_block (stmt
);
674 if (b
&& !TREE_USED (b
))
675 gimple_set_block (stmt
, NULL
);
676 for (i
= 0; i
< gimple_num_ops (stmt
); i
++)
677 walk_tree (gimple_op_ptr (stmt
, i
), clear_unused_block_pointer_1
,
682 /* Dump scope blocks starting at SCOPE to FILE. INDENT is the
683 indentation level and FLAGS is as in print_generic_expr. */
686 dump_scope_block (FILE *file
, int indent
, tree scope
, int flags
)
691 fprintf (file
, "\n%*s{ Scope block #%i%s%s",indent
, "" , BLOCK_NUMBER (scope
),
692 TREE_USED (scope
) ? "" : " (unused)",
693 BLOCK_ABSTRACT (scope
) ? " (abstract)": "");
694 if (LOCATION_LOCUS (BLOCK_SOURCE_LOCATION (scope
)) != UNKNOWN_LOCATION
)
696 expanded_location s
= expand_location (BLOCK_SOURCE_LOCATION (scope
));
697 fprintf (file
, " %s:%i", s
.file
, s
.line
);
699 if (BLOCK_ABSTRACT_ORIGIN (scope
))
701 tree origin
= block_ultimate_origin (scope
);
704 fprintf (file
, " Originating from :");
706 print_generic_decl (file
, origin
, flags
);
708 fprintf (file
, "#%i", BLOCK_NUMBER (origin
));
711 fprintf (file
, " \n");
712 for (var
= BLOCK_VARS (scope
); var
; var
= DECL_CHAIN (var
))
714 fprintf (file
, "%*s", indent
, "");
715 print_generic_decl (file
, var
, flags
);
716 fprintf (file
, "\n");
718 for (i
= 0; i
< BLOCK_NUM_NONLOCALIZED_VARS (scope
); i
++)
720 fprintf (file
, "%*s",indent
, "");
721 print_generic_decl (file
, BLOCK_NONLOCALIZED_VAR (scope
, i
),
723 fprintf (file
, " (nonlocalized)\n");
725 for (t
= BLOCK_SUBBLOCKS (scope
); t
; t
= BLOCK_CHAIN (t
))
726 dump_scope_block (file
, indent
+ 2, t
, flags
);
727 fprintf (file
, "\n%*s}\n",indent
, "");
730 /* Dump the tree of lexical scopes starting at SCOPE to stderr. FLAGS
731 is as in print_generic_expr. */
734 debug_scope_block (tree scope
, int flags
)
736 dump_scope_block (stderr
, 0, scope
, flags
);
740 /* Dump the tree of lexical scopes of current_function_decl to FILE.
741 FLAGS is as in print_generic_expr. */
744 dump_scope_blocks (FILE *file
, int flags
)
746 dump_scope_block (file
, 0, DECL_INITIAL (current_function_decl
), flags
);
750 /* Dump the tree of lexical scopes of current_function_decl to stderr.
751 FLAGS is as in print_generic_expr. */
754 debug_scope_blocks (int flags
)
756 dump_scope_blocks (stderr
, flags
);
759 /* Remove local variables that are not referenced in the IL. */
762 remove_unused_locals (void)
766 unsigned srcidx
, dstidx
, num
;
767 bool have_local_clobbers
= false;
769 /* Removing declarations from lexical blocks when not optimizing is
770 not only a waste of time, it actually causes differences in stack
775 timevar_push (TV_REMOVE_UNUSED
);
777 mark_scope_block_unused (DECL_INITIAL (current_function_decl
));
779 usedvars
= BITMAP_ALLOC (NULL
);
781 /* Walk the CFG marking all referenced symbols. */
784 gimple_stmt_iterator gsi
;
789 /* Walk the statements. */
790 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
792 gimple stmt
= gsi_stmt (gsi
);
793 tree b
= gimple_block (stmt
);
795 if (is_gimple_debug (stmt
))
798 if (gimple_clobber_p (stmt
))
800 have_local_clobbers
= true;
805 TREE_USED (b
) = true;
807 for (i
= 0; i
< gimple_num_ops (stmt
); i
++)
808 mark_all_vars_used (gimple_op_ptr (gsi_stmt (gsi
), i
));
811 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
816 gimple phi
= gsi_stmt (gsi
);
818 if (virtual_operand_p (gimple_phi_result (phi
)))
821 def
= gimple_phi_result (phi
);
822 mark_all_vars_used (&def
);
824 FOR_EACH_PHI_ARG (arg_p
, phi
, i
, SSA_OP_ALL_USES
)
826 tree arg
= USE_FROM_PTR (arg_p
);
827 int index
= PHI_ARG_INDEX_FROM_USE (arg_p
);
829 LOCATION_BLOCK (gimple_phi_arg_location (phi
, index
));
831 TREE_USED (block
) = true;
832 mark_all_vars_used (&arg
);
836 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
837 if (LOCATION_BLOCK (e
->goto_locus
) != NULL
)
838 TREE_USED (LOCATION_BLOCK (e
->goto_locus
)) = true;
841 /* We do a two-pass approach about the out-of-scope clobbers. We want
842 to remove them if they are the only references to a local variable,
843 but we want to retain them when there's any other. So the first pass
844 ignores them, and the second pass (if there were any) tries to remove
846 if (have_local_clobbers
)
849 gimple_stmt_iterator gsi
;
851 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);)
853 gimple stmt
= gsi_stmt (gsi
);
854 tree b
= gimple_block (stmt
);
856 if (gimple_clobber_p (stmt
))
858 tree lhs
= gimple_assign_lhs (stmt
);
859 tree base
= get_base_address (lhs
);
860 /* Remove clobbers referencing unused vars, or clobbers
861 with MEM_REF lhs referencing uninitialized pointers. */
862 if ((TREE_CODE (base
) == VAR_DECL
&& !is_used_p (base
))
863 || (TREE_CODE (lhs
) == MEM_REF
864 && TREE_CODE (TREE_OPERAND (lhs
, 0)) == SSA_NAME
865 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (lhs
, 0))
866 && (TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (lhs
, 0)))
869 unlink_stmt_vdef (stmt
);
870 gsi_remove (&gsi
, true);
875 TREE_USED (b
) = true;
881 cfun
->has_local_explicit_reg_vars
= false;
883 /* Remove unmarked local and global vars from local_decls. */
884 num
= vec_safe_length (cfun
->local_decls
);
885 for (srcidx
= 0, dstidx
= 0; srcidx
< num
; srcidx
++)
887 var
= (*cfun
->local_decls
)[srcidx
];
888 if (TREE_CODE (var
) == VAR_DECL
)
890 if (!is_used_p (var
))
893 if (cfun
->nonlocal_goto_save_area
894 && TREE_OPERAND (cfun
->nonlocal_goto_save_area
, 0) == var
)
895 cfun
->nonlocal_goto_save_area
= NULL
;
896 /* Release any default def associated with var. */
897 if ((def
= ssa_default_def (cfun
, var
)) != NULL_TREE
)
899 set_ssa_default_def (cfun
, var
, NULL_TREE
);
900 release_ssa_name (def
);
905 if (TREE_CODE (var
) == VAR_DECL
906 && DECL_HARD_REGISTER (var
)
907 && !is_global_var (var
))
908 cfun
->has_local_explicit_reg_vars
= true;
910 if (srcidx
!= dstidx
)
911 (*cfun
->local_decls
)[dstidx
] = var
;
916 statistics_counter_event (cfun
, "unused VAR_DECLs removed", num
- dstidx
);
917 cfun
->local_decls
->truncate (dstidx
);
920 remove_unused_scope_block_p (DECL_INITIAL (current_function_decl
));
921 clear_unused_block_pointer ();
923 BITMAP_FREE (usedvars
);
925 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
927 fprintf (dump_file
, "Scope blocks after cleanups:\n");
928 dump_scope_blocks (dump_file
, dump_flags
);
931 timevar_pop (TV_REMOVE_UNUSED
);
934 /* Obstack for globale liveness info bitmaps. We don't want to put these
935 on the default obstack because these bitmaps can grow quite large and
936 we'll hold on to all that memory until the end of the compiler run.
937 As a bonus, delete_tree_live_info can destroy all the bitmaps by just
938 releasing the whole obstack. */
939 static bitmap_obstack liveness_bitmap_obstack
;
941 /* Allocate and return a new live range information object base on MAP. */
943 static tree_live_info_p
944 new_tree_live_info (var_map map
)
946 tree_live_info_p live
;
949 live
= XNEW (struct tree_live_info_d
);
951 live
->num_blocks
= last_basic_block
;
953 live
->livein
= XNEWVEC (bitmap_head
, last_basic_block
);
955 bitmap_initialize (&live
->livein
[bb
->index
], &liveness_bitmap_obstack
);
957 live
->liveout
= XNEWVEC (bitmap_head
, last_basic_block
);
959 bitmap_initialize (&live
->liveout
[bb
->index
], &liveness_bitmap_obstack
);
961 live
->work_stack
= XNEWVEC (int, last_basic_block
);
962 live
->stack_top
= live
->work_stack
;
964 live
->global
= BITMAP_ALLOC (&liveness_bitmap_obstack
);
969 /* Free storage for live range info object LIVE. */
972 delete_tree_live_info (tree_live_info_p live
)
974 bitmap_obstack_release (&liveness_bitmap_obstack
);
975 free (live
->work_stack
);
976 free (live
->liveout
);
982 /* Visit basic block BB and propagate any required live on entry bits from
983 LIVE into the predecessors. VISITED is the bitmap of visited blocks.
984 TMP is a temporary work bitmap which is passed in to avoid reallocating
988 loe_visit_block (tree_live_info_p live
, basic_block bb
, sbitmap visited
,
997 gcc_checking_assert (!bitmap_bit_p (visited
, bb
->index
));
998 bitmap_set_bit (visited
, bb
->index
);
1000 loe
= live_on_entry (live
, bb
);
1002 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1005 if (pred_bb
== ENTRY_BLOCK_PTR
)
1007 /* TMP is variables live-on-entry from BB that aren't defined in the
1008 predecessor block. This should be the live on entry vars to pred.
1009 Note that liveout is the DEFs in a block while live on entry is
1010 being calculated. */
1011 bitmap_and_compl (tmp
, loe
, &live
->liveout
[pred_bb
->index
]);
1013 /* Add these bits to live-on-entry for the pred. if there are any
1014 changes, and pred_bb has been visited already, add it to the
1016 change
= bitmap_ior_into (live_on_entry (live
, pred_bb
), tmp
);
1017 if (bitmap_bit_p (visited
, pred_bb
->index
) && change
)
1019 bitmap_clear_bit (visited
, pred_bb
->index
);
1020 *(live
->stack_top
)++ = pred_bb
->index
;
1026 /* Using LIVE, fill in all the live-on-entry blocks between the defs and uses
1027 of all the variables. */
1030 live_worklist (tree_live_info_p live
)
1034 sbitmap visited
= sbitmap_alloc (last_basic_block
+ 1);
1035 bitmap tmp
= BITMAP_ALLOC (&liveness_bitmap_obstack
);
1037 bitmap_clear (visited
);
1039 /* Visit all the blocks in reverse order and propagate live on entry values
1040 into the predecessors blocks. */
1041 FOR_EACH_BB_REVERSE (bb
)
1042 loe_visit_block (live
, bb
, visited
, tmp
);
1044 /* Process any blocks which require further iteration. */
1045 while (live
->stack_top
!= live
->work_stack
)
1047 b
= *--(live
->stack_top
);
1048 loe_visit_block (live
, BASIC_BLOCK (b
), visited
, tmp
);
1052 sbitmap_free (visited
);
1056 /* Calculate the initial live on entry vector for SSA_NAME using immediate_use
1057 links. Set the live on entry fields in LIVE. Def's are marked temporarily
1058 in the liveout vector. */
1061 set_var_live_on_entry (tree ssa_name
, tree_live_info_p live
)
1066 basic_block def_bb
= NULL
;
1067 imm_use_iterator imm_iter
;
1068 bool global
= false;
1070 p
= var_to_partition (live
->map
, ssa_name
);
1071 if (p
== NO_PARTITION
)
1074 stmt
= SSA_NAME_DEF_STMT (ssa_name
);
1077 def_bb
= gimple_bb (stmt
);
1078 /* Mark defs in liveout bitmap temporarily. */
1080 bitmap_set_bit (&live
->liveout
[def_bb
->index
], p
);
1083 def_bb
= ENTRY_BLOCK_PTR
;
1085 /* Visit each use of SSA_NAME and if it isn't in the same block as the def,
1086 add it to the list of live on entry blocks. */
1087 FOR_EACH_IMM_USE_FAST (use
, imm_iter
, ssa_name
)
1089 gimple use_stmt
= USE_STMT (use
);
1090 basic_block add_block
= NULL
;
1092 if (gimple_code (use_stmt
) == GIMPLE_PHI
)
1094 /* Uses in PHI's are considered to be live at exit of the SRC block
1095 as this is where a copy would be inserted. Check to see if it is
1096 defined in that block, or whether its live on entry. */
1097 int index
= PHI_ARG_INDEX_FROM_USE (use
);
1098 edge e
= gimple_phi_arg_edge (use_stmt
, index
);
1099 if (e
->src
!= ENTRY_BLOCK_PTR
)
1101 if (e
->src
!= def_bb
)
1105 else if (is_gimple_debug (use_stmt
))
1109 /* If its not defined in this block, its live on entry. */
1110 basic_block use_bb
= gimple_bb (use_stmt
);
1111 if (use_bb
!= def_bb
)
1115 /* If there was a live on entry use, set the bit. */
1119 bitmap_set_bit (&live
->livein
[add_block
->index
], p
);
1123 /* If SSA_NAME is live on entry to at least one block, fill in all the live
1124 on entry blocks between the def and all the uses. */
1126 bitmap_set_bit (live
->global
, p
);
1130 /* Calculate the live on exit vectors based on the entry info in LIVEINFO. */
1133 calculate_live_on_exit (tree_live_info_p liveinfo
)
1139 /* live on entry calculations used liveout vectors for defs, clear them. */
1141 bitmap_clear (&liveinfo
->liveout
[bb
->index
]);
1143 /* Set all the live-on-exit bits for uses in PHIs. */
1146 gimple_stmt_iterator gsi
;
1149 /* Mark the PHI arguments which are live on exit to the pred block. */
1150 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1152 gimple phi
= gsi_stmt (gsi
);
1153 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
1155 tree t
= PHI_ARG_DEF (phi
, i
);
1158 if (TREE_CODE (t
) != SSA_NAME
)
1161 p
= var_to_partition (liveinfo
->map
, t
);
1162 if (p
== NO_PARTITION
)
1164 e
= gimple_phi_arg_edge (phi
, i
);
1165 if (e
->src
!= ENTRY_BLOCK_PTR
)
1166 bitmap_set_bit (&liveinfo
->liveout
[e
->src
->index
], p
);
1170 /* Add each successors live on entry to this bock live on exit. */
1171 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1172 if (e
->dest
!= EXIT_BLOCK_PTR
)
1173 bitmap_ior_into (&liveinfo
->liveout
[bb
->index
],
1174 live_on_entry (liveinfo
, e
->dest
));
1179 /* Given partition map MAP, calculate all the live on entry bitmaps for
1180 each partition. Return a new live info object. */
1183 calculate_live_ranges (var_map map
)
1187 tree_live_info_p live
;
1189 bitmap_obstack_initialize (&liveness_bitmap_obstack
);
1190 live
= new_tree_live_info (map
);
1191 for (i
= 0; i
< num_var_partitions (map
); i
++)
1193 var
= partition_to_var (map
, i
);
1194 if (var
!= NULL_TREE
)
1195 set_var_live_on_entry (var
, live
);
1198 live_worklist (live
);
1200 #ifdef ENABLE_CHECKING
1201 verify_live_on_entry (live
);
1204 calculate_live_on_exit (live
);
1209 /* Output partition map MAP to file F. */
1212 dump_var_map (FILE *f
, var_map map
)
1218 fprintf (f
, "\nPartition map \n\n");
1220 for (x
= 0; x
< map
->num_partitions
; x
++)
1222 if (map
->view_to_partition
!= NULL
)
1223 p
= map
->view_to_partition
[x
];
1227 if (ssa_name (p
) == NULL_TREE
1228 || virtual_operand_p (ssa_name (p
)))
1232 for (y
= 1; y
< num_ssa_names
; y
++)
1234 p
= partition_find (map
->var_partition
, y
);
1235 if (map
->partition_to_view
)
1236 p
= map
->partition_to_view
[p
];
1241 fprintf (f
, "Partition %d (", x
);
1242 print_generic_expr (f
, partition_to_var (map
, p
), TDF_SLIM
);
1245 fprintf (f
, "%d ", y
);
1255 /* Generic dump for the above. */
1258 debug (_var_map
&ref
)
1260 dump_var_map (stderr
, &ref
);
1264 debug (_var_map
*ptr
)
1269 fprintf (stderr
, "<nil>\n");
1273 /* Output live range info LIVE to file F, controlled by FLAG. */
1276 dump_live_info (FILE *f
, tree_live_info_p live
, int flag
)
1280 var_map map
= live
->map
;
1283 if ((flag
& LIVEDUMP_ENTRY
) && live
->livein
)
1287 fprintf (f
, "\nLive on entry to BB%d : ", bb
->index
);
1288 EXECUTE_IF_SET_IN_BITMAP (&live
->livein
[bb
->index
], 0, i
, bi
)
1290 print_generic_expr (f
, partition_to_var (map
, i
), TDF_SLIM
);
1297 if ((flag
& LIVEDUMP_EXIT
) && live
->liveout
)
1301 fprintf (f
, "\nLive on exit from BB%d : ", bb
->index
);
1302 EXECUTE_IF_SET_IN_BITMAP (&live
->liveout
[bb
->index
], 0, i
, bi
)
1304 print_generic_expr (f
, partition_to_var (map
, i
), TDF_SLIM
);
1313 /* Generic dump for the above. */
1316 debug (tree_live_info_d
&ref
)
1318 dump_live_info (stderr
, &ref
, 0);
1322 debug (tree_live_info_d
*ptr
)
1327 fprintf (stderr
, "<nil>\n");
1331 #ifdef ENABLE_CHECKING
1332 /* Verify that SSA_VAR is a non-virtual SSA_NAME. */
1335 register_ssa_partition_check (tree ssa_var
)
1337 gcc_assert (TREE_CODE (ssa_var
) == SSA_NAME
);
1338 if (virtual_operand_p (ssa_var
))
1340 fprintf (stderr
, "Illegally registering a virtual SSA name :");
1341 print_generic_expr (stderr
, ssa_var
, TDF_SLIM
);
1342 fprintf (stderr
, " in the SSA->Normal phase.\n");
1343 internal_error ("SSA corruption");
1348 /* Verify that the info in LIVE matches the current cfg. */
1351 verify_live_on_entry (tree_live_info_p live
)
1360 var_map map
= live
->map
;
1362 /* Check for live on entry partitions and report those with a DEF in
1363 the program. This will typically mean an optimization has done
1365 bb
= ENTRY_BLOCK_PTR
;
1367 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1369 int entry_block
= e
->dest
->index
;
1370 if (e
->dest
== EXIT_BLOCK_PTR
)
1372 for (i
= 0; i
< (unsigned)num_var_partitions (map
); i
++)
1377 var
= partition_to_var (map
, i
);
1378 stmt
= SSA_NAME_DEF_STMT (var
);
1379 tmp
= gimple_bb (stmt
);
1380 if (SSA_NAME_VAR (var
))
1381 d
= ssa_default_def (cfun
, SSA_NAME_VAR (var
));
1383 loe
= live_on_entry (live
, e
->dest
);
1384 if (loe
&& bitmap_bit_p (loe
, i
))
1386 if (!gimple_nop_p (stmt
))
1389 print_generic_expr (stderr
, var
, TDF_SLIM
);
1390 fprintf (stderr
, " is defined ");
1392 fprintf (stderr
, " in BB%d, ", tmp
->index
);
1393 fprintf (stderr
, "by:\n");
1394 print_gimple_stmt (stderr
, stmt
, 0, TDF_SLIM
);
1395 fprintf (stderr
, "\nIt is also live-on-entry to entry BB %d",
1397 fprintf (stderr
, " So it appears to have multiple defs.\n");
1404 print_generic_expr (stderr
, var
, TDF_SLIM
);
1405 fprintf (stderr
, " is live-on-entry to BB%d ",
1409 fprintf (stderr
, " but is not the default def of ");
1410 print_generic_expr (stderr
, d
, TDF_SLIM
);
1411 fprintf (stderr
, "\n");
1414 fprintf (stderr
, " and there is no default def.\n");
1421 /* The only way this var shouldn't be marked live on entry is
1422 if it occurs in a PHI argument of the block. */
1425 gimple_stmt_iterator gsi
;
1426 for (gsi
= gsi_start_phis (e
->dest
);
1427 !gsi_end_p (gsi
) && !ok
;
1430 gimple phi
= gsi_stmt (gsi
);
1431 for (z
= 0; z
< gimple_phi_num_args (phi
); z
++)
1432 if (var
== gimple_phi_arg_def (phi
, z
))
1441 print_generic_expr (stderr
, var
, TDF_SLIM
);
1442 fprintf (stderr
, " is not marked live-on-entry to entry BB%d ",
1444 fprintf (stderr
, "but it is a default def so it should be.\n");
1448 gcc_assert (num
<= 0);