1 /* Liveness for SSA trees.
2 Copyright (C) 2003-2013 Free Software Foundation, Inc.
3 Contributed by Andrew MacLeod <amacleod@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
24 #include "hash-table.h"
27 #include "gimple-pretty-print.h"
32 #include "tree-ssa-live.h"
33 #include "diagnostic-core.h"
38 #ifdef ENABLE_CHECKING
39 static void verify_live_on_entry (tree_live_info_p
);
43 /* VARMAP maintains a mapping from SSA version number to real variables.
45 All SSA_NAMES are divided into partitions. Initially each ssa_name is the
46 only member of it's own partition. Coalescing will attempt to group any
47 ssa_names which occur in a copy or in a PHI node into the same partition.
49 At the end of out-of-ssa, each partition becomes a "real" variable and is
50 rewritten as a compiler variable.
52 The var_map data structure is used to manage these partitions. It allows
53 partitions to be combined, and determines which partition belongs to what
54 ssa_name or variable, and vice versa. */
57 /* Hashtable helpers. */
59 struct tree_int_map_hasher
: typed_noop_remove
<tree_int_map
>
61 typedef tree_int_map value_type
;
62 typedef tree_int_map compare_type
;
63 static inline hashval_t
hash (const value_type
*);
64 static inline bool equal (const value_type
*, const compare_type
*);
68 tree_int_map_hasher::hash (const value_type
*v
)
70 return tree_map_base_hash (v
);
74 tree_int_map_hasher::equal (const value_type
*v
, const compare_type
*c
)
76 return tree_int_map_eq (v
, c
);
80 /* This routine will initialize the basevar fields of MAP. */
83 var_map_base_init (var_map map
)
87 hash_table
<tree_int_map_hasher
> tree_to_index
;
88 struct tree_int_map
*m
, *mapstorage
;
90 num_part
= num_var_partitions (map
);
91 tree_to_index
.create (num_part
);
92 /* We can have at most num_part entries in the hash tables, so it's
93 enough to allocate so many map elements once, saving some malloc
95 mapstorage
= m
= XNEWVEC (struct tree_int_map
, num_part
);
97 /* If a base table already exists, clear it, otherwise create it. */
98 free (map
->partition_to_base_index
);
99 map
->partition_to_base_index
= (int *) xmalloc (sizeof (int) * num_part
);
101 /* Build the base variable list, and point partitions at their bases. */
102 for (x
= 0; x
< num_part
; x
++)
104 struct tree_int_map
**slot
;
106 var
= partition_to_var (map
, x
);
107 if (SSA_NAME_VAR (var
))
108 m
->base
.from
= SSA_NAME_VAR (var
);
110 /* This restricts what anonymous SSA names we can coalesce
111 as it restricts the sets we compute conflicts for.
112 Using TREE_TYPE to generate sets is the easies as
113 type equivalency also holds for SSA names with the same
116 Check gimple_can_coalesce_p when changing this code. */
117 m
->base
.from
= (TYPE_CANONICAL (TREE_TYPE (var
))
118 ? TYPE_CANONICAL (TREE_TYPE (var
))
120 /* If base variable hasn't been seen, set it up. */
121 slot
= tree_to_index
.find_slot (m
, INSERT
);
124 baseindex
= m
- mapstorage
;
130 baseindex
= (*slot
)->to
;
131 map
->partition_to_base_index
[x
] = baseindex
;
134 map
->num_basevars
= m
- mapstorage
;
137 tree_to_index
. dispose ();
141 /* Remove the base table in MAP. */
144 var_map_base_fini (var_map map
)
146 /* Free the basevar info if it is present. */
147 if (map
->partition_to_base_index
!= NULL
)
149 free (map
->partition_to_base_index
);
150 map
->partition_to_base_index
= NULL
;
151 map
->num_basevars
= 0;
154 /* Create a variable partition map of SIZE, initialize and return it. */
157 init_var_map (int size
)
161 map
= (var_map
) xmalloc (sizeof (struct _var_map
));
162 map
->var_partition
= partition_new (size
);
164 map
->partition_to_view
= NULL
;
165 map
->view_to_partition
= NULL
;
166 map
->num_partitions
= size
;
167 map
->partition_size
= size
;
168 map
->num_basevars
= 0;
169 map
->partition_to_base_index
= NULL
;
174 /* Free memory associated with MAP. */
177 delete_var_map (var_map map
)
179 var_map_base_fini (map
);
180 partition_delete (map
->var_partition
);
181 free (map
->partition_to_view
);
182 free (map
->view_to_partition
);
187 /* This function will combine the partitions in MAP for VAR1 and VAR2. It
188 Returns the partition which represents the new partition. If the two
189 partitions cannot be combined, NO_PARTITION is returned. */
192 var_union (var_map map
, tree var1
, tree var2
)
196 gcc_assert (TREE_CODE (var1
) == SSA_NAME
);
197 gcc_assert (TREE_CODE (var2
) == SSA_NAME
);
199 /* This is independent of partition_to_view. If partition_to_view is
200 on, then whichever one of these partitions is absorbed will never have a
201 dereference into the partition_to_view array any more. */
203 p1
= partition_find (map
->var_partition
, SSA_NAME_VERSION (var1
));
204 p2
= partition_find (map
->var_partition
, SSA_NAME_VERSION (var2
));
206 gcc_assert (p1
!= NO_PARTITION
);
207 gcc_assert (p2
!= NO_PARTITION
);
212 p3
= partition_union (map
->var_partition
, p1
, p2
);
214 if (map
->partition_to_view
)
215 p3
= map
->partition_to_view
[p3
];
221 /* Compress the partition numbers in MAP such that they fall in the range
222 0..(num_partitions-1) instead of wherever they turned out during
223 the partitioning exercise. This removes any references to unused
224 partitions, thereby allowing bitmaps and other vectors to be much
227 This is implemented such that compaction doesn't affect partitioning.
228 Ie., once partitions are created and possibly merged, running one
229 or more different kind of compaction will not affect the partitions
230 themselves. Their index might change, but all the same variables will
231 still be members of the same partition group. This allows work on reduced
232 sets, and no loss of information when a larger set is later desired.
234 In particular, coalescing can work on partitions which have 2 or more
235 definitions, and then 'recompact' later to include all the single
236 definitions for assignment to program variables. */
239 /* Set MAP back to the initial state of having no partition view. Return a
240 bitmap which has a bit set for each partition number which is in use in the
244 partition_view_init (var_map map
)
250 used
= BITMAP_ALLOC (NULL
);
252 /* Already in a view? Abandon the old one. */
253 if (map
->partition_to_view
)
255 free (map
->partition_to_view
);
256 map
->partition_to_view
= NULL
;
258 if (map
->view_to_partition
)
260 free (map
->view_to_partition
);
261 map
->view_to_partition
= NULL
;
264 /* Find out which partitions are actually referenced. */
265 for (x
= 0; x
< map
->partition_size
; x
++)
267 tmp
= partition_find (map
->var_partition
, x
);
268 if (ssa_name (tmp
) != NULL_TREE
&& !virtual_operand_p (ssa_name (tmp
))
269 && (!has_zero_uses (ssa_name (tmp
))
270 || !SSA_NAME_IS_DEFAULT_DEF (ssa_name (tmp
))))
271 bitmap_set_bit (used
, tmp
);
274 map
->num_partitions
= map
->partition_size
;
279 /* This routine will finalize the view data for MAP based on the partitions
280 set in SELECTED. This is either the same bitmap returned from
281 partition_view_init, or a trimmed down version if some of those partitions
282 were not desired in this view. SELECTED is freed before returning. */
285 partition_view_fini (var_map map
, bitmap selected
)
288 unsigned count
, i
, x
, limit
;
290 gcc_assert (selected
);
292 count
= bitmap_count_bits (selected
);
293 limit
= map
->partition_size
;
295 /* If its a one-to-one ratio, we don't need any view compaction. */
298 map
->partition_to_view
= (int *)xmalloc (limit
* sizeof (int));
299 memset (map
->partition_to_view
, 0xff, (limit
* sizeof (int)));
300 map
->view_to_partition
= (int *)xmalloc (count
* sizeof (int));
303 /* Give each selected partition an index. */
304 EXECUTE_IF_SET_IN_BITMAP (selected
, 0, x
, bi
)
306 map
->partition_to_view
[x
] = i
;
307 map
->view_to_partition
[i
] = x
;
310 gcc_assert (i
== count
);
311 map
->num_partitions
= i
;
314 BITMAP_FREE (selected
);
318 /* Create a partition view which includes all the used partitions in MAP. If
319 WANT_BASES is true, create the base variable map as well. */
322 partition_view_normal (var_map map
, bool want_bases
)
326 used
= partition_view_init (map
);
327 partition_view_fini (map
, used
);
330 var_map_base_init (map
);
332 var_map_base_fini (map
);
336 /* Create a partition view in MAP which includes just partitions which occur in
337 the bitmap ONLY. If WANT_BASES is true, create the base variable map
341 partition_view_bitmap (var_map map
, bitmap only
, bool want_bases
)
344 bitmap new_partitions
= BITMAP_ALLOC (NULL
);
348 used
= partition_view_init (map
);
349 EXECUTE_IF_SET_IN_BITMAP (only
, 0, x
, bi
)
351 p
= partition_find (map
->var_partition
, x
);
352 gcc_assert (bitmap_bit_p (used
, p
));
353 bitmap_set_bit (new_partitions
, p
);
355 partition_view_fini (map
, new_partitions
);
358 var_map_base_init (map
);
360 var_map_base_fini (map
);
364 static bitmap usedvars
;
366 /* Mark VAR as used, so that it'll be preserved during rtl expansion.
367 Returns true if VAR wasn't marked before. */
370 set_is_used (tree var
)
372 return bitmap_set_bit (usedvars
, DECL_UID (var
));
375 /* Return true if VAR is marked as used. */
380 return bitmap_bit_p (usedvars
, DECL_UID (var
));
383 static inline void mark_all_vars_used (tree
*);
385 /* Helper function for mark_all_vars_used, called via walk_tree. */
388 mark_all_vars_used_1 (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
391 enum tree_code_class c
= TREE_CODE_CLASS (TREE_CODE (t
));
394 if (TREE_CODE (t
) == SSA_NAME
)
397 t
= SSA_NAME_VAR (t
);
402 if (IS_EXPR_CODE_CLASS (c
)
403 && (b
= TREE_BLOCK (t
)) != NULL
)
404 TREE_USED (b
) = true;
406 /* Ignore TMR_OFFSET and TMR_STEP for TARGET_MEM_REFS, as those
407 fields do not contain vars. */
408 if (TREE_CODE (t
) == TARGET_MEM_REF
)
410 mark_all_vars_used (&TMR_BASE (t
));
411 mark_all_vars_used (&TMR_INDEX (t
));
412 mark_all_vars_used (&TMR_INDEX2 (t
));
417 /* Only need to mark VAR_DECLS; parameters and return results are not
418 eliminated as unused. */
419 if (TREE_CODE (t
) == VAR_DECL
)
421 /* When a global var becomes used for the first time also walk its
422 initializer (non global ones don't have any). */
423 if (set_is_used (t
) && is_global_var (t
))
424 mark_all_vars_used (&DECL_INITIAL (t
));
426 /* remove_unused_scope_block_p requires information about labels
427 which are not DECL_IGNORED_P to tell if they might be used in the IL. */
428 else if (TREE_CODE (t
) == LABEL_DECL
)
429 /* Although the TREE_USED values that the frontend uses would be
430 acceptable (albeit slightly over-conservative) for our purposes,
431 init_vars_expansion clears TREE_USED for LABEL_DECLs too, so we
432 must re-compute it here. */
435 if (IS_TYPE_OR_DECL_P (t
))
441 /* Mark the scope block SCOPE and its subblocks unused when they can be
442 possibly eliminated if dead. */
445 mark_scope_block_unused (tree scope
)
448 TREE_USED (scope
) = false;
449 if (!(*debug_hooks
->ignore_block
) (scope
))
450 TREE_USED (scope
) = true;
451 for (t
= BLOCK_SUBBLOCKS (scope
); t
; t
= BLOCK_CHAIN (t
))
452 mark_scope_block_unused (t
);
455 /* Look if the block is dead (by possibly eliminating its dead subblocks)
456 and return true if so.
457 Block is declared dead if:
458 1) No statements are associated with it.
459 2) Declares no live variables
460 3) All subblocks are dead
461 or there is precisely one subblocks and the block
462 has same abstract origin as outer block and declares
463 no variables, so it is pure wrapper.
464 When we are not outputting full debug info, we also eliminate dead variables
465 out of scope blocks to let them to be recycled by GGC and to save copying work
466 done by the inliner. */
469 remove_unused_scope_block_p (tree scope
)
472 bool unused
= !TREE_USED (scope
);
475 for (t
= &BLOCK_VARS (scope
); *t
; t
= next
)
477 next
= &DECL_CHAIN (*t
);
479 /* Debug info of nested function refers to the block of the
480 function. We might stil call it even if all statements
481 of function it was nested into was elliminated.
483 TODO: We can actually look into cgraph to see if function
484 will be output to file. */
485 if (TREE_CODE (*t
) == FUNCTION_DECL
)
488 /* If a decl has a value expr, we need to instantiate it
489 regardless of debug info generation, to avoid codegen
490 differences in memory overlap tests. update_equiv_regs() may
491 indirectly call validate_equiv_mem() to test whether a
492 SET_DEST overlaps with others, and if the value expr changes
493 by virtual register instantiation, we may get end up with
494 different results. */
495 else if (TREE_CODE (*t
) == VAR_DECL
&& DECL_HAS_VALUE_EXPR_P (*t
))
498 /* Remove everything we don't generate debug info for. */
499 else if (DECL_IGNORED_P (*t
))
501 *t
= DECL_CHAIN (*t
);
505 /* When we are outputting debug info, we usually want to output
506 info about optimized-out variables in the scope blocks.
507 Exception are the scope blocks not containing any instructions
508 at all so user can't get into the scopes at first place. */
509 else if (is_used_p (*t
))
511 else if (TREE_CODE (*t
) == LABEL_DECL
&& TREE_USED (*t
))
512 /* For labels that are still used in the IL, the decision to
513 preserve them must not depend DEBUG_INFO_LEVEL, otherwise we
514 risk having different ordering in debug vs. non-debug builds
515 during inlining or versioning.
516 A label appearing here (we have already checked DECL_IGNORED_P)
517 should not be used in the IL unless it has been explicitly used
518 before, so we use TREE_USED as an approximation. */
519 /* In principle, we should do the same here as for the debug case
520 below, however, when debugging, there might be additional nested
521 levels that keep an upper level with a label live, so we have to
522 force this block to be considered used, too. */
525 /* When we are not doing full debug info, we however can keep around
526 only the used variables for cfgexpand's memory packing saving quite
529 For sake of -g3, we keep around those vars but we don't count this as
530 use of block, so innermost block with no used vars and no instructions
531 can be considered dead. We only want to keep around blocks user can
532 breakpoint into and ask about value of optimized out variables.
534 Similarly we need to keep around types at least until all
535 variables of all nested blocks are gone. We track no
536 information on whether given type is used or not, so we have
537 to keep them even when not emitting debug information,
538 otherwise we may end up remapping variables and their (local)
539 types in different orders depending on whether debug
540 information is being generated. */
542 else if (TREE_CODE (*t
) == TYPE_DECL
543 || debug_info_level
== DINFO_LEVEL_NORMAL
544 || debug_info_level
== DINFO_LEVEL_VERBOSE
)
548 *t
= DECL_CHAIN (*t
);
553 for (t
= &BLOCK_SUBBLOCKS (scope
); *t
;)
554 if (remove_unused_scope_block_p (*t
))
556 if (BLOCK_SUBBLOCKS (*t
))
558 tree next
= BLOCK_CHAIN (*t
);
559 tree supercontext
= BLOCK_SUPERCONTEXT (*t
);
561 *t
= BLOCK_SUBBLOCKS (*t
);
562 while (BLOCK_CHAIN (*t
))
564 BLOCK_SUPERCONTEXT (*t
) = supercontext
;
565 t
= &BLOCK_CHAIN (*t
);
567 BLOCK_CHAIN (*t
) = next
;
568 BLOCK_SUPERCONTEXT (*t
) = supercontext
;
569 t
= &BLOCK_CHAIN (*t
);
573 *t
= BLOCK_CHAIN (*t
);
577 t
= &BLOCK_CHAIN (*t
);
584 /* Outer scope is always used. */
585 else if (!BLOCK_SUPERCONTEXT (scope
)
586 || TREE_CODE (BLOCK_SUPERCONTEXT (scope
)) == FUNCTION_DECL
)
588 /* Innermost blocks with no live variables nor statements can be always
590 else if (!nsubblocks
)
592 /* For terse debug info we can eliminate info on unused variables. */
593 else if (debug_info_level
== DINFO_LEVEL_NONE
594 || debug_info_level
== DINFO_LEVEL_TERSE
)
596 /* Even for -g0/-g1 don't prune outer scopes from artificial
597 functions, otherwise diagnostics using tree_nonartificial_location
598 will not be emitted properly. */
599 if (inlined_function_outer_scope_p (scope
))
604 && TREE_CODE (ao
) == BLOCK
605 && BLOCK_ABSTRACT_ORIGIN (ao
) != ao
)
606 ao
= BLOCK_ABSTRACT_ORIGIN (ao
);
608 && TREE_CODE (ao
) == FUNCTION_DECL
609 && DECL_DECLARED_INLINE_P (ao
)
610 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao
)))
614 else if (BLOCK_VARS (scope
) || BLOCK_NUM_NONLOCALIZED_VARS (scope
))
616 /* See if this block is important for representation of inlined function.
617 Inlined functions are always represented by block with
618 block_ultimate_origin being set to FUNCTION_DECL and DECL_SOURCE_LOCATION
620 else if (inlined_function_outer_scope_p (scope
))
623 /* Verfify that only blocks with source location set
624 are entry points to the inlined functions. */
625 gcc_assert (LOCATION_LOCUS (BLOCK_SOURCE_LOCATION (scope
))
626 == UNKNOWN_LOCATION
);
628 TREE_USED (scope
) = !unused
;
632 /* Mark all VAR_DECLS under *EXPR_P as used, so that they won't be
633 eliminated during the tree->rtl conversion process. */
636 mark_all_vars_used (tree
*expr_p
)
638 walk_tree (expr_p
, mark_all_vars_used_1
, NULL
, NULL
);
641 /* Helper function for clear_unused_block_pointer, called via walk_tree. */
644 clear_unused_block_pointer_1 (tree
*tp
, int *, void *)
646 if (EXPR_P (*tp
) && TREE_BLOCK (*tp
)
647 && !TREE_USED (TREE_BLOCK (*tp
)))
648 TREE_SET_BLOCK (*tp
, NULL
);
652 /* Set all block pointer in debug or clobber stmt to NULL if the block
653 is unused, so that they will not be streamed out. */
656 clear_unused_block_pointer (void)
659 gimple_stmt_iterator gsi
;
662 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
666 gimple stmt
= gsi_stmt (gsi
);
668 if (!is_gimple_debug (stmt
) && !gimple_clobber_p (stmt
))
670 b
= gimple_block (stmt
);
671 if (b
&& !TREE_USED (b
))
672 gimple_set_block (stmt
, NULL
);
673 for (i
= 0; i
< gimple_num_ops (stmt
); i
++)
674 walk_tree (gimple_op_ptr (stmt
, i
), clear_unused_block_pointer_1
,
679 /* Dump scope blocks starting at SCOPE to FILE. INDENT is the
680 indentation level and FLAGS is as in print_generic_expr. */
683 dump_scope_block (FILE *file
, int indent
, tree scope
, int flags
)
688 fprintf (file
, "\n%*s{ Scope block #%i%s%s",indent
, "" , BLOCK_NUMBER (scope
),
689 TREE_USED (scope
) ? "" : " (unused)",
690 BLOCK_ABSTRACT (scope
) ? " (abstract)": "");
691 if (LOCATION_LOCUS (BLOCK_SOURCE_LOCATION (scope
)) != UNKNOWN_LOCATION
)
693 expanded_location s
= expand_location (BLOCK_SOURCE_LOCATION (scope
));
694 fprintf (file
, " %s:%i", s
.file
, s
.line
);
696 if (BLOCK_ABSTRACT_ORIGIN (scope
))
698 tree origin
= block_ultimate_origin (scope
);
701 fprintf (file
, " Originating from :");
703 print_generic_decl (file
, origin
, flags
);
705 fprintf (file
, "#%i", BLOCK_NUMBER (origin
));
708 fprintf (file
, " \n");
709 for (var
= BLOCK_VARS (scope
); var
; var
= DECL_CHAIN (var
))
711 fprintf (file
, "%*s", indent
, "");
712 print_generic_decl (file
, var
, flags
);
713 fprintf (file
, "\n");
715 for (i
= 0; i
< BLOCK_NUM_NONLOCALIZED_VARS (scope
); i
++)
717 fprintf (file
, "%*s",indent
, "");
718 print_generic_decl (file
, BLOCK_NONLOCALIZED_VAR (scope
, i
),
720 fprintf (file
, " (nonlocalized)\n");
722 for (t
= BLOCK_SUBBLOCKS (scope
); t
; t
= BLOCK_CHAIN (t
))
723 dump_scope_block (file
, indent
+ 2, t
, flags
);
724 fprintf (file
, "\n%*s}\n",indent
, "");
727 /* Dump the tree of lexical scopes starting at SCOPE to stderr. FLAGS
728 is as in print_generic_expr. */
731 debug_scope_block (tree scope
, int flags
)
733 dump_scope_block (stderr
, 0, scope
, flags
);
737 /* Dump the tree of lexical scopes of current_function_decl to FILE.
738 FLAGS is as in print_generic_expr. */
741 dump_scope_blocks (FILE *file
, int flags
)
743 dump_scope_block (file
, 0, DECL_INITIAL (current_function_decl
), flags
);
747 /* Dump the tree of lexical scopes of current_function_decl to stderr.
748 FLAGS is as in print_generic_expr. */
751 debug_scope_blocks (int flags
)
753 dump_scope_blocks (stderr
, flags
);
756 /* Remove local variables that are not referenced in the IL. */
759 remove_unused_locals (void)
763 unsigned srcidx
, dstidx
, num
;
764 bool have_local_clobbers
= false;
766 /* Removing declarations from lexical blocks when not optimizing is
767 not only a waste of time, it actually causes differences in stack
772 timevar_push (TV_REMOVE_UNUSED
);
774 mark_scope_block_unused (DECL_INITIAL (current_function_decl
));
776 usedvars
= BITMAP_ALLOC (NULL
);
778 /* Walk the CFG marking all referenced symbols. */
781 gimple_stmt_iterator gsi
;
786 /* Walk the statements. */
787 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
789 gimple stmt
= gsi_stmt (gsi
);
790 tree b
= gimple_block (stmt
);
792 if (is_gimple_debug (stmt
))
795 if (gimple_clobber_p (stmt
))
797 have_local_clobbers
= true;
802 TREE_USED (b
) = true;
804 for (i
= 0; i
< gimple_num_ops (stmt
); i
++)
805 mark_all_vars_used (gimple_op_ptr (gsi_stmt (gsi
), i
));
808 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
813 gimple phi
= gsi_stmt (gsi
);
815 if (virtual_operand_p (gimple_phi_result (phi
)))
818 def
= gimple_phi_result (phi
);
819 mark_all_vars_used (&def
);
821 FOR_EACH_PHI_ARG (arg_p
, phi
, i
, SSA_OP_ALL_USES
)
823 tree arg
= USE_FROM_PTR (arg_p
);
824 int index
= PHI_ARG_INDEX_FROM_USE (arg_p
);
826 LOCATION_BLOCK (gimple_phi_arg_location (phi
, index
));
828 TREE_USED (block
) = true;
829 mark_all_vars_used (&arg
);
833 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
834 if (LOCATION_BLOCK (e
->goto_locus
) != NULL
)
835 TREE_USED (LOCATION_BLOCK (e
->goto_locus
)) = true;
838 /* We do a two-pass approach about the out-of-scope clobbers. We want
839 to remove them if they are the only references to a local variable,
840 but we want to retain them when there's any other. So the first pass
841 ignores them, and the second pass (if there were any) tries to remove
843 if (have_local_clobbers
)
846 gimple_stmt_iterator gsi
;
848 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);)
850 gimple stmt
= gsi_stmt (gsi
);
851 tree b
= gimple_block (stmt
);
853 if (gimple_clobber_p (stmt
))
855 tree lhs
= gimple_assign_lhs (stmt
);
856 tree base
= get_base_address (lhs
);
857 /* Remove clobbers referencing unused vars, or clobbers
858 with MEM_REF lhs referencing uninitialized pointers. */
859 if ((TREE_CODE (base
) == VAR_DECL
&& !is_used_p (base
))
860 || (TREE_CODE (lhs
) == MEM_REF
861 && TREE_CODE (TREE_OPERAND (lhs
, 0)) == SSA_NAME
862 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (lhs
, 0))
863 && (TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (lhs
, 0)))
866 unlink_stmt_vdef (stmt
);
867 gsi_remove (&gsi
, true);
872 TREE_USED (b
) = true;
878 cfun
->has_local_explicit_reg_vars
= false;
880 /* Remove unmarked local and global vars from local_decls. */
881 num
= vec_safe_length (cfun
->local_decls
);
882 for (srcidx
= 0, dstidx
= 0; srcidx
< num
; srcidx
++)
884 var
= (*cfun
->local_decls
)[srcidx
];
885 if (TREE_CODE (var
) == VAR_DECL
)
887 if (!is_used_p (var
))
890 if (cfun
->nonlocal_goto_save_area
891 && TREE_OPERAND (cfun
->nonlocal_goto_save_area
, 0) == var
)
892 cfun
->nonlocal_goto_save_area
= NULL
;
893 /* Release any default def associated with var. */
894 if ((def
= ssa_default_def (cfun
, var
)) != NULL_TREE
)
896 set_ssa_default_def (cfun
, var
, NULL_TREE
);
897 release_ssa_name (def
);
902 if (TREE_CODE (var
) == VAR_DECL
903 && DECL_HARD_REGISTER (var
)
904 && !is_global_var (var
))
905 cfun
->has_local_explicit_reg_vars
= true;
907 if (srcidx
!= dstidx
)
908 (*cfun
->local_decls
)[dstidx
] = var
;
913 statistics_counter_event (cfun
, "unused VAR_DECLs removed", num
- dstidx
);
914 cfun
->local_decls
->truncate (dstidx
);
917 remove_unused_scope_block_p (DECL_INITIAL (current_function_decl
));
918 clear_unused_block_pointer ();
920 BITMAP_FREE (usedvars
);
922 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
924 fprintf (dump_file
, "Scope blocks after cleanups:\n");
925 dump_scope_blocks (dump_file
, dump_flags
);
928 timevar_pop (TV_REMOVE_UNUSED
);
931 /* Obstack for globale liveness info bitmaps. We don't want to put these
932 on the default obstack because these bitmaps can grow quite large and
933 we'll hold on to all that memory until the end of the compiler run.
934 As a bonus, delete_tree_live_info can destroy all the bitmaps by just
935 releasing the whole obstack. */
936 static bitmap_obstack liveness_bitmap_obstack
;
938 /* Allocate and return a new live range information object base on MAP. */
940 static tree_live_info_p
941 new_tree_live_info (var_map map
)
943 tree_live_info_p live
;
946 live
= XNEW (struct tree_live_info_d
);
948 live
->num_blocks
= last_basic_block
;
950 live
->livein
= XNEWVEC (bitmap_head
, last_basic_block
);
952 bitmap_initialize (&live
->livein
[bb
->index
], &liveness_bitmap_obstack
);
954 live
->liveout
= XNEWVEC (bitmap_head
, last_basic_block
);
956 bitmap_initialize (&live
->liveout
[bb
->index
], &liveness_bitmap_obstack
);
958 live
->work_stack
= XNEWVEC (int, last_basic_block
);
959 live
->stack_top
= live
->work_stack
;
961 live
->global
= BITMAP_ALLOC (&liveness_bitmap_obstack
);
966 /* Free storage for live range info object LIVE. */
969 delete_tree_live_info (tree_live_info_p live
)
971 bitmap_obstack_release (&liveness_bitmap_obstack
);
972 free (live
->work_stack
);
973 free (live
->liveout
);
979 /* Visit basic block BB and propagate any required live on entry bits from
980 LIVE into the predecessors. VISITED is the bitmap of visited blocks.
981 TMP is a temporary work bitmap which is passed in to avoid reallocating
985 loe_visit_block (tree_live_info_p live
, basic_block bb
, sbitmap visited
,
993 gcc_assert (!bitmap_bit_p (visited
, bb
->index
));
995 bitmap_set_bit (visited
, bb
->index
);
996 loe
= live_on_entry (live
, bb
);
998 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1001 if (pred_bb
== ENTRY_BLOCK_PTR
)
1003 /* TMP is variables live-on-entry from BB that aren't defined in the
1004 predecessor block. This should be the live on entry vars to pred.
1005 Note that liveout is the DEFs in a block while live on entry is
1006 being calculated. */
1007 bitmap_and_compl (tmp
, loe
, &live
->liveout
[pred_bb
->index
]);
1009 /* Add these bits to live-on-entry for the pred. if there are any
1010 changes, and pred_bb has been visited already, add it to the
1012 change
= bitmap_ior_into (live_on_entry (live
, pred_bb
), tmp
);
1013 if (bitmap_bit_p (visited
, pred_bb
->index
) && change
)
1015 bitmap_clear_bit (visited
, pred_bb
->index
);
1016 *(live
->stack_top
)++ = pred_bb
->index
;
1022 /* Using LIVE, fill in all the live-on-entry blocks between the defs and uses
1023 of all the variables. */
1026 live_worklist (tree_live_info_p live
)
1030 sbitmap visited
= sbitmap_alloc (last_basic_block
+ 1);
1031 bitmap tmp
= BITMAP_ALLOC (&liveness_bitmap_obstack
);
1033 bitmap_clear (visited
);
1035 /* Visit all the blocks in reverse order and propagate live on entry values
1036 into the predecessors blocks. */
1037 FOR_EACH_BB_REVERSE (bb
)
1038 loe_visit_block (live
, bb
, visited
, tmp
);
1040 /* Process any blocks which require further iteration. */
1041 while (live
->stack_top
!= live
->work_stack
)
1043 b
= *--(live
->stack_top
);
1044 loe_visit_block (live
, BASIC_BLOCK (b
), visited
, tmp
);
1048 sbitmap_free (visited
);
1052 /* Calculate the initial live on entry vector for SSA_NAME using immediate_use
1053 links. Set the live on entry fields in LIVE. Def's are marked temporarily
1054 in the liveout vector. */
1057 set_var_live_on_entry (tree ssa_name
, tree_live_info_p live
)
1062 basic_block def_bb
= NULL
;
1063 imm_use_iterator imm_iter
;
1064 bool global
= false;
1066 p
= var_to_partition (live
->map
, ssa_name
);
1067 if (p
== NO_PARTITION
)
1070 stmt
= SSA_NAME_DEF_STMT (ssa_name
);
1073 def_bb
= gimple_bb (stmt
);
1074 /* Mark defs in liveout bitmap temporarily. */
1076 bitmap_set_bit (&live
->liveout
[def_bb
->index
], p
);
1079 def_bb
= ENTRY_BLOCK_PTR
;
1081 /* Visit each use of SSA_NAME and if it isn't in the same block as the def,
1082 add it to the list of live on entry blocks. */
1083 FOR_EACH_IMM_USE_FAST (use
, imm_iter
, ssa_name
)
1085 gimple use_stmt
= USE_STMT (use
);
1086 basic_block add_block
= NULL
;
1088 if (gimple_code (use_stmt
) == GIMPLE_PHI
)
1090 /* Uses in PHI's are considered to be live at exit of the SRC block
1091 as this is where a copy would be inserted. Check to see if it is
1092 defined in that block, or whether its live on entry. */
1093 int index
= PHI_ARG_INDEX_FROM_USE (use
);
1094 edge e
= gimple_phi_arg_edge (use_stmt
, index
);
1095 if (e
->src
!= ENTRY_BLOCK_PTR
)
1097 if (e
->src
!= def_bb
)
1101 else if (is_gimple_debug (use_stmt
))
1105 /* If its not defined in this block, its live on entry. */
1106 basic_block use_bb
= gimple_bb (use_stmt
);
1107 if (use_bb
!= def_bb
)
1111 /* If there was a live on entry use, set the bit. */
1115 bitmap_set_bit (&live
->livein
[add_block
->index
], p
);
1119 /* If SSA_NAME is live on entry to at least one block, fill in all the live
1120 on entry blocks between the def and all the uses. */
1122 bitmap_set_bit (live
->global
, p
);
1126 /* Calculate the live on exit vectors based on the entry info in LIVEINFO. */
1129 calculate_live_on_exit (tree_live_info_p liveinfo
)
1135 /* live on entry calculations used liveout vectors for defs, clear them. */
1137 bitmap_clear (&liveinfo
->liveout
[bb
->index
]);
1139 /* Set all the live-on-exit bits for uses in PHIs. */
1142 gimple_stmt_iterator gsi
;
1145 /* Mark the PHI arguments which are live on exit to the pred block. */
1146 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1148 gimple phi
= gsi_stmt (gsi
);
1149 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
1151 tree t
= PHI_ARG_DEF (phi
, i
);
1154 if (TREE_CODE (t
) != SSA_NAME
)
1157 p
= var_to_partition (liveinfo
->map
, t
);
1158 if (p
== NO_PARTITION
)
1160 e
= gimple_phi_arg_edge (phi
, i
);
1161 if (e
->src
!= ENTRY_BLOCK_PTR
)
1162 bitmap_set_bit (&liveinfo
->liveout
[e
->src
->index
], p
);
1166 /* Add each successors live on entry to this bock live on exit. */
1167 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1168 if (e
->dest
!= EXIT_BLOCK_PTR
)
1169 bitmap_ior_into (&liveinfo
->liveout
[bb
->index
],
1170 live_on_entry (liveinfo
, e
->dest
));
1175 /* Given partition map MAP, calculate all the live on entry bitmaps for
1176 each partition. Return a new live info object. */
1179 calculate_live_ranges (var_map map
)
1183 tree_live_info_p live
;
1185 bitmap_obstack_initialize (&liveness_bitmap_obstack
);
1186 live
= new_tree_live_info (map
);
1187 for (i
= 0; i
< num_var_partitions (map
); i
++)
1189 var
= partition_to_var (map
, i
);
1190 if (var
!= NULL_TREE
)
1191 set_var_live_on_entry (var
, live
);
1194 live_worklist (live
);
1196 #ifdef ENABLE_CHECKING
1197 verify_live_on_entry (live
);
1200 calculate_live_on_exit (live
);
1205 /* Output partition map MAP to file F. */
1208 dump_var_map (FILE *f
, var_map map
)
1214 fprintf (f
, "\nPartition map \n\n");
1216 for (x
= 0; x
< map
->num_partitions
; x
++)
1218 if (map
->view_to_partition
!= NULL
)
1219 p
= map
->view_to_partition
[x
];
1223 if (ssa_name (p
) == NULL_TREE
1224 || virtual_operand_p (ssa_name (p
)))
1228 for (y
= 1; y
< num_ssa_names
; y
++)
1230 p
= partition_find (map
->var_partition
, y
);
1231 if (map
->partition_to_view
)
1232 p
= map
->partition_to_view
[p
];
1237 fprintf(f
, "Partition %d (", x
);
1238 print_generic_expr (f
, partition_to_var (map
, p
), TDF_SLIM
);
1241 fprintf (f
, "%d ", y
);
1251 /* Generic dump for the above. */
1254 debug (_var_map
&ref
)
1256 dump_var_map (stderr
, &ref
);
1260 debug (_var_map
*ptr
)
1265 fprintf (stderr
, "<nil>\n");
1269 /* Output live range info LIVE to file F, controlled by FLAG. */
1272 dump_live_info (FILE *f
, tree_live_info_p live
, int flag
)
1276 var_map map
= live
->map
;
1279 if ((flag
& LIVEDUMP_ENTRY
) && live
->livein
)
1283 fprintf (f
, "\nLive on entry to BB%d : ", bb
->index
);
1284 EXECUTE_IF_SET_IN_BITMAP (&live
->livein
[bb
->index
], 0, i
, bi
)
1286 print_generic_expr (f
, partition_to_var (map
, i
), TDF_SLIM
);
1293 if ((flag
& LIVEDUMP_EXIT
) && live
->liveout
)
1297 fprintf (f
, "\nLive on exit from BB%d : ", bb
->index
);
1298 EXECUTE_IF_SET_IN_BITMAP (&live
->liveout
[bb
->index
], 0, i
, bi
)
1300 print_generic_expr (f
, partition_to_var (map
, i
), TDF_SLIM
);
1309 /* Generic dump for the above. */
1312 debug (tree_live_info_d
&ref
)
1314 dump_live_info (stderr
, &ref
, 0);
1318 debug (tree_live_info_d
*ptr
)
1323 fprintf (stderr
, "<nil>\n");
1327 #ifdef ENABLE_CHECKING
1328 /* Verify that SSA_VAR is a non-virtual SSA_NAME. */
1331 register_ssa_partition_check (tree ssa_var
)
1333 gcc_assert (TREE_CODE (ssa_var
) == SSA_NAME
);
1334 if (virtual_operand_p (ssa_var
))
1336 fprintf (stderr
, "Illegally registering a virtual SSA name :");
1337 print_generic_expr (stderr
, ssa_var
, TDF_SLIM
);
1338 fprintf (stderr
, " in the SSA->Normal phase.\n");
1339 internal_error ("SSA corruption");
1344 /* Verify that the info in LIVE matches the current cfg. */
1347 verify_live_on_entry (tree_live_info_p live
)
1356 var_map map
= live
->map
;
1358 /* Check for live on entry partitions and report those with a DEF in
1359 the program. This will typically mean an optimization has done
1361 bb
= ENTRY_BLOCK_PTR
;
1363 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1365 int entry_block
= e
->dest
->index
;
1366 if (e
->dest
== EXIT_BLOCK_PTR
)
1368 for (i
= 0; i
< (unsigned)num_var_partitions (map
); i
++)
1373 var
= partition_to_var (map
, i
);
1374 stmt
= SSA_NAME_DEF_STMT (var
);
1375 tmp
= gimple_bb (stmt
);
1376 if (SSA_NAME_VAR (var
))
1377 d
= ssa_default_def (cfun
, SSA_NAME_VAR (var
));
1379 loe
= live_on_entry (live
, e
->dest
);
1380 if (loe
&& bitmap_bit_p (loe
, i
))
1382 if (!gimple_nop_p (stmt
))
1385 print_generic_expr (stderr
, var
, TDF_SLIM
);
1386 fprintf (stderr
, " is defined ");
1388 fprintf (stderr
, " in BB%d, ", tmp
->index
);
1389 fprintf (stderr
, "by:\n");
1390 print_gimple_stmt (stderr
, stmt
, 0, TDF_SLIM
);
1391 fprintf (stderr
, "\nIt is also live-on-entry to entry BB %d",
1393 fprintf (stderr
, " So it appears to have multiple defs.\n");
1400 print_generic_expr (stderr
, var
, TDF_SLIM
);
1401 fprintf (stderr
, " is live-on-entry to BB%d ",
1405 fprintf (stderr
, " but is not the default def of ");
1406 print_generic_expr (stderr
, d
, TDF_SLIM
);
1407 fprintf (stderr
, "\n");
1410 fprintf (stderr
, " and there is no default def.\n");
1417 /* The only way this var shouldn't be marked live on entry is
1418 if it occurs in a PHI argument of the block. */
1421 gimple_stmt_iterator gsi
;
1422 for (gsi
= gsi_start_phis (e
->dest
);
1423 !gsi_end_p (gsi
) && !ok
;
1426 gimple phi
= gsi_stmt (gsi
);
1427 for (z
= 0; z
< gimple_phi_num_args (phi
); z
++)
1428 if (var
== gimple_phi_arg_def (phi
, z
))
1437 print_generic_expr (stderr
, var
, TDF_SLIM
);
1438 fprintf (stderr
, " is not marked live-on-entry to entry BB%d ",
1440 fprintf (stderr
, "but it is a default def so it should be.\n");
1444 gcc_assert (num
<= 0);