1 /* Liveness for SSA trees.
2 Copyright (C) 2003-2015 Free Software Foundation, Inc.
3 Contributed by Andrew MacLeod <amacleod@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
30 #include "fold-const.h"
31 #include "gimple-pretty-print.h"
32 #include "internal-fn.h"
33 #include "gimple-iterator.h"
35 #include "insn-config.h"
47 #include "tree-ssa-live.h"
48 #include "diagnostic-core.h"
52 #include "ipa-utils.h"
55 #ifdef ENABLE_CHECKING
56 static void verify_live_on_entry (tree_live_info_p
);
60 /* VARMAP maintains a mapping from SSA version number to real variables.
62 All SSA_NAMES are divided into partitions. Initially each ssa_name is the
63 only member of it's own partition. Coalescing will attempt to group any
64 ssa_names which occur in a copy or in a PHI node into the same partition.
66 At the end of out-of-ssa, each partition becomes a "real" variable and is
67 rewritten as a compiler variable.
69 The var_map data structure is used to manage these partitions. It allows
70 partitions to be combined, and determines which partition belongs to what
71 ssa_name or variable, and vice versa. */
74 /* Remove the base table in MAP. */
77 var_map_base_fini (var_map map
)
79 /* Free the basevar info if it is present. */
80 if (map
->partition_to_base_index
!= NULL
)
82 free (map
->partition_to_base_index
);
83 map
->partition_to_base_index
= NULL
;
84 map
->num_basevars
= 0;
87 /* Create a variable partition map of SIZE, initialize and return it. */
90 init_var_map (int size
)
94 map
= (var_map
) xmalloc (sizeof (struct _var_map
));
95 map
->var_partition
= partition_new (size
);
97 map
->partition_to_view
= NULL
;
98 map
->view_to_partition
= NULL
;
99 map
->num_partitions
= size
;
100 map
->partition_size
= size
;
101 map
->num_basevars
= 0;
102 map
->partition_to_base_index
= NULL
;
107 /* Free memory associated with MAP. */
110 delete_var_map (var_map map
)
112 var_map_base_fini (map
);
113 partition_delete (map
->var_partition
);
114 free (map
->partition_to_view
);
115 free (map
->view_to_partition
);
120 /* This function will combine the partitions in MAP for VAR1 and VAR2. It
121 Returns the partition which represents the new partition. If the two
122 partitions cannot be combined, NO_PARTITION is returned. */
125 var_union (var_map map
, tree var1
, tree var2
)
129 gcc_assert (TREE_CODE (var1
) == SSA_NAME
);
130 gcc_assert (TREE_CODE (var2
) == SSA_NAME
);
132 /* This is independent of partition_to_view. If partition_to_view is
133 on, then whichever one of these partitions is absorbed will never have a
134 dereference into the partition_to_view array any more. */
136 p1
= partition_find (map
->var_partition
, SSA_NAME_VERSION (var1
));
137 p2
= partition_find (map
->var_partition
, SSA_NAME_VERSION (var2
));
139 gcc_assert (p1
!= NO_PARTITION
);
140 gcc_assert (p2
!= NO_PARTITION
);
145 p3
= partition_union (map
->var_partition
, p1
, p2
);
147 if (map
->partition_to_view
)
148 p3
= map
->partition_to_view
[p3
];
154 /* Compress the partition numbers in MAP such that they fall in the range
155 0..(num_partitions-1) instead of wherever they turned out during
156 the partitioning exercise. This removes any references to unused
157 partitions, thereby allowing bitmaps and other vectors to be much
160 This is implemented such that compaction doesn't affect partitioning.
161 Ie., once partitions are created and possibly merged, running one
162 or more different kind of compaction will not affect the partitions
163 themselves. Their index might change, but all the same variables will
164 still be members of the same partition group. This allows work on reduced
165 sets, and no loss of information when a larger set is later desired.
167 In particular, coalescing can work on partitions which have 2 or more
168 definitions, and then 'recompact' later to include all the single
169 definitions for assignment to program variables. */
172 /* Set MAP back to the initial state of having no partition view. Return a
173 bitmap which has a bit set for each partition number which is in use in the
177 partition_view_init (var_map map
)
183 used
= BITMAP_ALLOC (NULL
);
185 /* Already in a view? Abandon the old one. */
186 if (map
->partition_to_view
)
188 free (map
->partition_to_view
);
189 map
->partition_to_view
= NULL
;
191 if (map
->view_to_partition
)
193 free (map
->view_to_partition
);
194 map
->view_to_partition
= NULL
;
197 /* Find out which partitions are actually referenced. */
198 for (x
= 0; x
< map
->partition_size
; x
++)
200 tmp
= partition_find (map
->var_partition
, x
);
201 if (ssa_name (tmp
) != NULL_TREE
&& !virtual_operand_p (ssa_name (tmp
))
202 && (!has_zero_uses (ssa_name (tmp
))
203 || !SSA_NAME_IS_DEFAULT_DEF (ssa_name (tmp
))))
204 bitmap_set_bit (used
, tmp
);
207 map
->num_partitions
= map
->partition_size
;
212 /* This routine will finalize the view data for MAP based on the partitions
213 set in SELECTED. This is either the same bitmap returned from
214 partition_view_init, or a trimmed down version if some of those partitions
215 were not desired in this view. SELECTED is freed before returning. */
218 partition_view_fini (var_map map
, bitmap selected
)
221 unsigned count
, i
, x
, limit
;
223 gcc_assert (selected
);
225 count
= bitmap_count_bits (selected
);
226 limit
= map
->partition_size
;
228 /* If its a one-to-one ratio, we don't need any view compaction. */
231 map
->partition_to_view
= (int *)xmalloc (limit
* sizeof (int));
232 memset (map
->partition_to_view
, 0xff, (limit
* sizeof (int)));
233 map
->view_to_partition
= (int *)xmalloc (count
* sizeof (int));
236 /* Give each selected partition an index. */
237 EXECUTE_IF_SET_IN_BITMAP (selected
, 0, x
, bi
)
239 map
->partition_to_view
[x
] = i
;
240 map
->view_to_partition
[i
] = x
;
243 gcc_assert (i
== count
);
244 map
->num_partitions
= i
;
247 BITMAP_FREE (selected
);
251 /* Create a partition view which includes all the used partitions in MAP. */
254 partition_view_normal (var_map map
)
258 used
= partition_view_init (map
);
259 partition_view_fini (map
, used
);
261 var_map_base_fini (map
);
265 /* Create a partition view in MAP which includes just partitions which occur in
266 the bitmap ONLY. If WANT_BASES is true, create the base variable map
270 partition_view_bitmap (var_map map
, bitmap only
)
273 bitmap new_partitions
= BITMAP_ALLOC (NULL
);
277 used
= partition_view_init (map
);
278 EXECUTE_IF_SET_IN_BITMAP (only
, 0, x
, bi
)
280 p
= partition_find (map
->var_partition
, x
);
281 gcc_assert (bitmap_bit_p (used
, p
));
282 bitmap_set_bit (new_partitions
, p
);
284 partition_view_fini (map
, new_partitions
);
286 var_map_base_fini (map
);
290 static bitmap usedvars
;
292 /* Mark VAR as used, so that it'll be preserved during rtl expansion.
293 Returns true if VAR wasn't marked before. */
296 set_is_used (tree var
)
298 return bitmap_set_bit (usedvars
, DECL_UID (var
));
301 /* Return true if VAR is marked as used. */
306 return bitmap_bit_p (usedvars
, DECL_UID (var
));
309 static inline void mark_all_vars_used (tree
*);
311 /* Helper function for mark_all_vars_used, called via walk_tree. */
314 mark_all_vars_used_1 (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
317 enum tree_code_class c
= TREE_CODE_CLASS (TREE_CODE (t
));
320 if (TREE_CODE (t
) == SSA_NAME
)
323 t
= SSA_NAME_VAR (t
);
328 if (IS_EXPR_CODE_CLASS (c
)
329 && (b
= TREE_BLOCK (t
)) != NULL
)
330 TREE_USED (b
) = true;
332 /* Ignore TMR_OFFSET and TMR_STEP for TARGET_MEM_REFS, as those
333 fields do not contain vars. */
334 if (TREE_CODE (t
) == TARGET_MEM_REF
)
336 mark_all_vars_used (&TMR_BASE (t
));
337 mark_all_vars_used (&TMR_INDEX (t
));
338 mark_all_vars_used (&TMR_INDEX2 (t
));
343 /* Only need to mark VAR_DECLS; parameters and return results are not
344 eliminated as unused. */
345 if (TREE_CODE (t
) == VAR_DECL
)
347 /* When a global var becomes used for the first time also walk its
348 initializer (non global ones don't have any). */
349 if (set_is_used (t
) && is_global_var (t
)
350 && DECL_CONTEXT (t
) == current_function_decl
)
351 mark_all_vars_used (&DECL_INITIAL (t
));
353 /* remove_unused_scope_block_p requires information about labels
354 which are not DECL_IGNORED_P to tell if they might be used in the IL. */
355 else if (TREE_CODE (t
) == LABEL_DECL
)
356 /* Although the TREE_USED values that the frontend uses would be
357 acceptable (albeit slightly over-conservative) for our purposes,
358 init_vars_expansion clears TREE_USED for LABEL_DECLs too, so we
359 must re-compute it here. */
362 if (IS_TYPE_OR_DECL_P (t
))
368 /* Mark the scope block SCOPE and its subblocks unused when they can be
369 possibly eliminated if dead. */
372 mark_scope_block_unused (tree scope
)
375 TREE_USED (scope
) = false;
376 if (!(*debug_hooks
->ignore_block
) (scope
))
377 TREE_USED (scope
) = true;
378 for (t
= BLOCK_SUBBLOCKS (scope
); t
; t
= BLOCK_CHAIN (t
))
379 mark_scope_block_unused (t
);
382 /* Look if the block is dead (by possibly eliminating its dead subblocks)
383 and return true if so.
384 Block is declared dead if:
385 1) No statements are associated with it.
386 2) Declares no live variables
387 3) All subblocks are dead
388 or there is precisely one subblocks and the block
389 has same abstract origin as outer block and declares
390 no variables, so it is pure wrapper.
391 When we are not outputting full debug info, we also eliminate dead variables
392 out of scope blocks to let them to be recycled by GGC and to save copying work
393 done by the inliner. */
396 remove_unused_scope_block_p (tree scope
, bool in_ctor_dtor_block
)
399 bool unused
= !TREE_USED (scope
);
402 /* For ipa-polymorphic-call.c purposes, preserve blocks:
403 1) with BLOCK_ABSTRACT_ORIGIN of a ctor/dtor or their clones */
404 if (inlined_polymorphic_ctor_dtor_block_p (scope
, true))
406 in_ctor_dtor_block
= true;
409 /* 2) inside such blocks, the outermost block with BLOCK_ABSTRACT_ORIGIN
410 being a FUNCTION_DECL. */
411 else if (in_ctor_dtor_block
412 && BLOCK_ABSTRACT_ORIGIN (scope
)
413 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (scope
)) == FUNCTION_DECL
)
415 in_ctor_dtor_block
= false;
419 for (t
= &BLOCK_VARS (scope
); *t
; t
= next
)
421 next
= &DECL_CHAIN (*t
);
423 /* Debug info of nested function refers to the block of the
424 function. We might stil call it even if all statements
425 of function it was nested into was elliminated.
427 TODO: We can actually look into cgraph to see if function
428 will be output to file. */
429 if (TREE_CODE (*t
) == FUNCTION_DECL
)
432 /* If a decl has a value expr, we need to instantiate it
433 regardless of debug info generation, to avoid codegen
434 differences in memory overlap tests. update_equiv_regs() may
435 indirectly call validate_equiv_mem() to test whether a
436 SET_DEST overlaps with others, and if the value expr changes
437 by virtual register instantiation, we may get end up with
438 different results. */
439 else if (TREE_CODE (*t
) == VAR_DECL
&& DECL_HAS_VALUE_EXPR_P (*t
))
442 /* Remove everything we don't generate debug info for. */
443 else if (DECL_IGNORED_P (*t
))
445 *t
= DECL_CHAIN (*t
);
449 /* When we are outputting debug info, we usually want to output
450 info about optimized-out variables in the scope blocks.
451 Exception are the scope blocks not containing any instructions
452 at all so user can't get into the scopes at first place. */
453 else if (is_used_p (*t
))
455 else if (TREE_CODE (*t
) == LABEL_DECL
&& TREE_USED (*t
))
456 /* For labels that are still used in the IL, the decision to
457 preserve them must not depend DEBUG_INFO_LEVEL, otherwise we
458 risk having different ordering in debug vs. non-debug builds
459 during inlining or versioning.
460 A label appearing here (we have already checked DECL_IGNORED_P)
461 should not be used in the IL unless it has been explicitly used
462 before, so we use TREE_USED as an approximation. */
463 /* In principle, we should do the same here as for the debug case
464 below, however, when debugging, there might be additional nested
465 levels that keep an upper level with a label live, so we have to
466 force this block to be considered used, too. */
469 /* When we are not doing full debug info, we however can keep around
470 only the used variables for cfgexpand's memory packing saving quite
473 For sake of -g3, we keep around those vars but we don't count this as
474 use of block, so innermost block with no used vars and no instructions
475 can be considered dead. We only want to keep around blocks user can
476 breakpoint into and ask about value of optimized out variables.
478 Similarly we need to keep around types at least until all
479 variables of all nested blocks are gone. We track no
480 information on whether given type is used or not, so we have
481 to keep them even when not emitting debug information,
482 otherwise we may end up remapping variables and their (local)
483 types in different orders depending on whether debug
484 information is being generated. */
486 else if (TREE_CODE (*t
) == TYPE_DECL
487 || debug_info_level
== DINFO_LEVEL_NORMAL
488 || debug_info_level
== DINFO_LEVEL_VERBOSE
)
492 *t
= DECL_CHAIN (*t
);
497 for (t
= &BLOCK_SUBBLOCKS (scope
); *t
;)
498 if (remove_unused_scope_block_p (*t
, in_ctor_dtor_block
))
500 if (BLOCK_SUBBLOCKS (*t
))
502 tree next
= BLOCK_CHAIN (*t
);
503 tree supercontext
= BLOCK_SUPERCONTEXT (*t
);
505 *t
= BLOCK_SUBBLOCKS (*t
);
506 while (BLOCK_CHAIN (*t
))
508 BLOCK_SUPERCONTEXT (*t
) = supercontext
;
509 t
= &BLOCK_CHAIN (*t
);
511 BLOCK_CHAIN (*t
) = next
;
512 BLOCK_SUPERCONTEXT (*t
) = supercontext
;
513 t
= &BLOCK_CHAIN (*t
);
517 *t
= BLOCK_CHAIN (*t
);
521 t
= &BLOCK_CHAIN (*t
);
528 /* Outer scope is always used. */
529 else if (!BLOCK_SUPERCONTEXT (scope
)
530 || TREE_CODE (BLOCK_SUPERCONTEXT (scope
)) == FUNCTION_DECL
)
532 /* Innermost blocks with no live variables nor statements can be always
534 else if (!nsubblocks
)
536 /* When not generating debug info we can eliminate info on unused
538 else if (!flag_auto_profile
&& debug_info_level
== DINFO_LEVEL_NONE
)
540 /* Even for -g0 don't prune outer scopes from artificial
541 functions, otherwise diagnostics using tree_nonartificial_location
542 will not be emitted properly. */
543 if (inlined_function_outer_scope_p (scope
))
548 && TREE_CODE (ao
) == BLOCK
549 && BLOCK_ABSTRACT_ORIGIN (ao
) != ao
)
550 ao
= BLOCK_ABSTRACT_ORIGIN (ao
);
552 && TREE_CODE (ao
) == FUNCTION_DECL
553 && DECL_DECLARED_INLINE_P (ao
)
554 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao
)))
558 else if (BLOCK_VARS (scope
) || BLOCK_NUM_NONLOCALIZED_VARS (scope
))
560 /* See if this block is important for representation of inlined function.
561 Inlined functions are always represented by block with
562 block_ultimate_origin being set to FUNCTION_DECL and DECL_SOURCE_LOCATION
564 else if (inlined_function_outer_scope_p (scope
))
567 /* Verfify that only blocks with source location set
568 are entry points to the inlined functions. */
569 gcc_assert (LOCATION_LOCUS (BLOCK_SOURCE_LOCATION (scope
))
570 == UNKNOWN_LOCATION
);
572 TREE_USED (scope
) = !unused
;
576 /* Mark all VAR_DECLS under *EXPR_P as used, so that they won't be
577 eliminated during the tree->rtl conversion process. */
580 mark_all_vars_used (tree
*expr_p
)
582 walk_tree (expr_p
, mark_all_vars_used_1
, NULL
, NULL
);
585 /* Helper function for clear_unused_block_pointer, called via walk_tree. */
588 clear_unused_block_pointer_1 (tree
*tp
, int *, void *)
590 if (EXPR_P (*tp
) && TREE_BLOCK (*tp
)
591 && !TREE_USED (TREE_BLOCK (*tp
)))
592 TREE_SET_BLOCK (*tp
, NULL
);
596 /* Set all block pointer in debug or clobber stmt to NULL if the block
597 is unused, so that they will not be streamed out. */
600 clear_unused_block_pointer (void)
603 gimple_stmt_iterator gsi
;
605 FOR_EACH_BB_FN (bb
, cfun
)
606 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
610 gimple
*stmt
= gsi_stmt (gsi
);
612 if (!is_gimple_debug (stmt
) && !gimple_clobber_p (stmt
))
614 b
= gimple_block (stmt
);
615 if (b
&& !TREE_USED (b
))
616 gimple_set_block (stmt
, NULL
);
617 for (i
= 0; i
< gimple_num_ops (stmt
); i
++)
618 walk_tree (gimple_op_ptr (stmt
, i
), clear_unused_block_pointer_1
,
623 /* Dump scope blocks starting at SCOPE to FILE. INDENT is the
624 indentation level and FLAGS is as in print_generic_expr. */
627 dump_scope_block (FILE *file
, int indent
, tree scope
, int flags
)
632 fprintf (file
, "\n%*s{ Scope block #%i%s%s",indent
, "" , BLOCK_NUMBER (scope
),
633 TREE_USED (scope
) ? "" : " (unused)",
634 BLOCK_ABSTRACT (scope
) ? " (abstract)": "");
635 if (LOCATION_LOCUS (BLOCK_SOURCE_LOCATION (scope
)) != UNKNOWN_LOCATION
)
637 expanded_location s
= expand_location (BLOCK_SOURCE_LOCATION (scope
));
638 fprintf (file
, " %s:%i", s
.file
, s
.line
);
640 if (BLOCK_ABSTRACT_ORIGIN (scope
))
642 tree origin
= block_ultimate_origin (scope
);
645 fprintf (file
, " Originating from :");
647 print_generic_decl (file
, origin
, flags
);
649 fprintf (file
, "#%i", BLOCK_NUMBER (origin
));
652 fprintf (file
, " \n");
653 for (var
= BLOCK_VARS (scope
); var
; var
= DECL_CHAIN (var
))
655 fprintf (file
, "%*s", indent
, "");
656 print_generic_decl (file
, var
, flags
);
657 fprintf (file
, "\n");
659 for (i
= 0; i
< BLOCK_NUM_NONLOCALIZED_VARS (scope
); i
++)
661 fprintf (file
, "%*s",indent
, "");
662 print_generic_decl (file
, BLOCK_NONLOCALIZED_VAR (scope
, i
),
664 fprintf (file
, " (nonlocalized)\n");
666 for (t
= BLOCK_SUBBLOCKS (scope
); t
; t
= BLOCK_CHAIN (t
))
667 dump_scope_block (file
, indent
+ 2, t
, flags
);
668 fprintf (file
, "\n%*s}\n",indent
, "");
671 /* Dump the tree of lexical scopes starting at SCOPE to stderr. FLAGS
672 is as in print_generic_expr. */
675 debug_scope_block (tree scope
, int flags
)
677 dump_scope_block (stderr
, 0, scope
, flags
);
681 /* Dump the tree of lexical scopes of current_function_decl to FILE.
682 FLAGS is as in print_generic_expr. */
685 dump_scope_blocks (FILE *file
, int flags
)
687 dump_scope_block (file
, 0, DECL_INITIAL (current_function_decl
), flags
);
691 /* Dump the tree of lexical scopes of current_function_decl to stderr.
692 FLAGS is as in print_generic_expr. */
695 debug_scope_blocks (int flags
)
697 dump_scope_blocks (stderr
, flags
);
700 /* Remove local variables that are not referenced in the IL. */
703 remove_unused_locals (void)
707 unsigned srcidx
, dstidx
, num
;
708 bool have_local_clobbers
= false;
710 /* Removing declarations from lexical blocks when not optimizing is
711 not only a waste of time, it actually causes differences in stack
716 timevar_push (TV_REMOVE_UNUSED
);
718 mark_scope_block_unused (DECL_INITIAL (current_function_decl
));
720 usedvars
= BITMAP_ALLOC (NULL
);
722 /* Walk the CFG marking all referenced symbols. */
723 FOR_EACH_BB_FN (bb
, cfun
)
725 gimple_stmt_iterator gsi
;
730 /* Walk the statements. */
731 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
733 gimple
*stmt
= gsi_stmt (gsi
);
734 tree b
= gimple_block (stmt
);
736 if (is_gimple_debug (stmt
))
739 if (gimple_clobber_p (stmt
))
741 have_local_clobbers
= true;
746 TREE_USED (b
) = true;
748 for (i
= 0; i
< gimple_num_ops (stmt
); i
++)
749 mark_all_vars_used (gimple_op_ptr (gsi_stmt (gsi
), i
));
752 for (gphi_iterator gpi
= gsi_start_phis (bb
);
759 gphi
*phi
= gpi
.phi ();
761 if (virtual_operand_p (gimple_phi_result (phi
)))
764 def
= gimple_phi_result (phi
);
765 mark_all_vars_used (&def
);
767 FOR_EACH_PHI_ARG (arg_p
, phi
, i
, SSA_OP_ALL_USES
)
769 tree arg
= USE_FROM_PTR (arg_p
);
770 int index
= PHI_ARG_INDEX_FROM_USE (arg_p
);
772 LOCATION_BLOCK (gimple_phi_arg_location (phi
, index
));
774 TREE_USED (block
) = true;
775 mark_all_vars_used (&arg
);
779 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
780 if (LOCATION_BLOCK (e
->goto_locus
) != NULL
)
781 TREE_USED (LOCATION_BLOCK (e
->goto_locus
)) = true;
784 /* We do a two-pass approach about the out-of-scope clobbers. We want
785 to remove them if they are the only references to a local variable,
786 but we want to retain them when there's any other. So the first pass
787 ignores them, and the second pass (if there were any) tries to remove
789 if (have_local_clobbers
)
790 FOR_EACH_BB_FN (bb
, cfun
)
792 gimple_stmt_iterator gsi
;
794 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);)
796 gimple
*stmt
= gsi_stmt (gsi
);
797 tree b
= gimple_block (stmt
);
799 if (gimple_clobber_p (stmt
))
801 tree lhs
= gimple_assign_lhs (stmt
);
802 tree base
= get_base_address (lhs
);
803 /* Remove clobbers referencing unused vars, or clobbers
804 with MEM_REF lhs referencing uninitialized pointers. */
805 if ((TREE_CODE (base
) == VAR_DECL
&& !is_used_p (base
))
806 || (TREE_CODE (lhs
) == MEM_REF
807 && TREE_CODE (TREE_OPERAND (lhs
, 0)) == SSA_NAME
808 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (lhs
, 0))
809 && (TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (lhs
, 0)))
812 unlink_stmt_vdef (stmt
);
813 gsi_remove (&gsi
, true);
818 TREE_USED (b
) = true;
824 if (cfun
->has_simduid_loops
)
827 FOR_EACH_LOOP (loop
, 0)
828 if (loop
->simduid
&& !is_used_p (loop
->simduid
))
829 loop
->simduid
= NULL_TREE
;
832 cfun
->has_local_explicit_reg_vars
= false;
834 /* Remove unmarked local and global vars from local_decls. */
835 num
= vec_safe_length (cfun
->local_decls
);
836 for (srcidx
= 0, dstidx
= 0; srcidx
< num
; srcidx
++)
838 var
= (*cfun
->local_decls
)[srcidx
];
839 if (TREE_CODE (var
) == VAR_DECL
)
841 if (!is_used_p (var
))
844 if (cfun
->nonlocal_goto_save_area
845 && TREE_OPERAND (cfun
->nonlocal_goto_save_area
, 0) == var
)
846 cfun
->nonlocal_goto_save_area
= NULL
;
847 /* Release any default def associated with var. */
848 if ((def
= ssa_default_def (cfun
, var
)) != NULL_TREE
)
850 set_ssa_default_def (cfun
, var
, NULL_TREE
);
851 release_ssa_name (def
);
856 if (TREE_CODE (var
) == VAR_DECL
857 && DECL_HARD_REGISTER (var
)
858 && !is_global_var (var
))
859 cfun
->has_local_explicit_reg_vars
= true;
861 if (srcidx
!= dstidx
)
862 (*cfun
->local_decls
)[dstidx
] = var
;
867 statistics_counter_event (cfun
, "unused VAR_DECLs removed", num
- dstidx
);
868 cfun
->local_decls
->truncate (dstidx
);
871 remove_unused_scope_block_p (DECL_INITIAL (current_function_decl
), false);
872 clear_unused_block_pointer ();
874 BITMAP_FREE (usedvars
);
876 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
878 fprintf (dump_file
, "Scope blocks after cleanups:\n");
879 dump_scope_blocks (dump_file
, dump_flags
);
882 timevar_pop (TV_REMOVE_UNUSED
);
885 /* Allocate and return a new live range information object base on MAP. */
887 static tree_live_info_p
888 new_tree_live_info (var_map map
)
890 tree_live_info_p live
;
893 live
= XNEW (struct tree_live_info_d
);
895 live
->num_blocks
= last_basic_block_for_fn (cfun
);
897 bitmap_obstack_initialize (&live
->livein_obstack
);
898 bitmap_obstack_initialize (&live
->liveout_obstack
);
899 live
->livein
= XNEWVEC (bitmap_head
, last_basic_block_for_fn (cfun
));
900 FOR_EACH_BB_FN (bb
, cfun
)
901 bitmap_initialize (&live
->livein
[bb
->index
], &live
->livein_obstack
);
903 live
->liveout
= XNEWVEC (bitmap_head
, last_basic_block_for_fn (cfun
));
904 FOR_EACH_BB_FN (bb
, cfun
)
905 bitmap_initialize (&live
->liveout
[bb
->index
], &live
->liveout_obstack
);
907 live
->work_stack
= XNEWVEC (int, last_basic_block_for_fn (cfun
));
908 live
->stack_top
= live
->work_stack
;
910 live
->global
= BITMAP_ALLOC (NULL
);
915 /* Free storage for live range info object LIVE. */
918 delete_tree_live_info (tree_live_info_p live
)
922 bitmap_obstack_release (&live
->livein_obstack
);
927 bitmap_obstack_release (&live
->liveout_obstack
);
928 free (live
->liveout
);
930 BITMAP_FREE (live
->global
);
931 free (live
->work_stack
);
936 /* Visit basic block BB and propagate any required live on entry bits from
937 LIVE into the predecessors. VISITED is the bitmap of visited blocks.
938 TMP is a temporary work bitmap which is passed in to avoid reallocating
942 loe_visit_block (tree_live_info_p live
, basic_block bb
, sbitmap visited
)
950 gcc_checking_assert (!bitmap_bit_p (visited
, bb
->index
));
951 bitmap_set_bit (visited
, bb
->index
);
953 loe
= live_on_entry (live
, bb
);
955 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
958 if (pred_bb
== ENTRY_BLOCK_PTR_FOR_FN (cfun
))
960 /* Variables live-on-entry from BB that aren't defined in the
961 predecessor block. This should be the live on entry vars to pred.
962 Note that liveout is the DEFs in a block while live on entry is
964 Add these bits to live-on-entry for the pred. if there are any
965 changes, and pred_bb has been visited already, add it to the
967 change
= bitmap_ior_and_compl_into (live_on_entry (live
, pred_bb
),
968 loe
, &live
->liveout
[pred_bb
->index
]);
970 && bitmap_bit_p (visited
, pred_bb
->index
))
972 bitmap_clear_bit (visited
, pred_bb
->index
);
973 *(live
->stack_top
)++ = pred_bb
->index
;
979 /* Using LIVE, fill in all the live-on-entry blocks between the defs and uses
980 of all the variables. */
983 live_worklist (tree_live_info_p live
)
987 sbitmap visited
= sbitmap_alloc (last_basic_block_for_fn (cfun
) + 1);
989 bitmap_clear (visited
);
991 /* Visit all the blocks in reverse order and propagate live on entry values
992 into the predecessors blocks. */
993 FOR_EACH_BB_REVERSE_FN (bb
, cfun
)
994 loe_visit_block (live
, bb
, visited
);
996 /* Process any blocks which require further iteration. */
997 while (live
->stack_top
!= live
->work_stack
)
999 b
= *--(live
->stack_top
);
1000 loe_visit_block (live
, BASIC_BLOCK_FOR_FN (cfun
, b
), visited
);
1003 sbitmap_free (visited
);
1007 /* Calculate the initial live on entry vector for SSA_NAME using immediate_use
1008 links. Set the live on entry fields in LIVE. Def's are marked temporarily
1009 in the liveout vector. */
1012 set_var_live_on_entry (tree ssa_name
, tree_live_info_p live
)
1017 basic_block def_bb
= NULL
;
1018 imm_use_iterator imm_iter
;
1019 bool global
= false;
1021 p
= var_to_partition (live
->map
, ssa_name
);
1022 if (p
== NO_PARTITION
)
1025 stmt
= SSA_NAME_DEF_STMT (ssa_name
);
1028 def_bb
= gimple_bb (stmt
);
1029 /* Mark defs in liveout bitmap temporarily. */
1031 bitmap_set_bit (&live
->liveout
[def_bb
->index
], p
);
1034 def_bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
);
1036 /* An undefined local variable does not need to be very alive. */
1037 if (ssa_undefined_value_p (ssa_name
, false))
1040 /* Visit each use of SSA_NAME and if it isn't in the same block as the def,
1041 add it to the list of live on entry blocks. */
1042 FOR_EACH_IMM_USE_FAST (use
, imm_iter
, ssa_name
)
1044 gimple
*use_stmt
= USE_STMT (use
);
1045 basic_block add_block
= NULL
;
1047 if (gimple_code (use_stmt
) == GIMPLE_PHI
)
1049 /* Uses in PHI's are considered to be live at exit of the SRC block
1050 as this is where a copy would be inserted. Check to see if it is
1051 defined in that block, or whether its live on entry. */
1052 int index
= PHI_ARG_INDEX_FROM_USE (use
);
1053 edge e
= gimple_phi_arg_edge (as_a
<gphi
*> (use_stmt
), index
);
1054 if (e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
))
1056 if (e
->src
!= def_bb
)
1060 else if (is_gimple_debug (use_stmt
))
1064 /* If its not defined in this block, its live on entry. */
1065 basic_block use_bb
= gimple_bb (use_stmt
);
1066 if (use_bb
!= def_bb
)
1070 /* If there was a live on entry use, set the bit. */
1074 bitmap_set_bit (&live
->livein
[add_block
->index
], p
);
1078 /* If SSA_NAME is live on entry to at least one block, fill in all the live
1079 on entry blocks between the def and all the uses. */
1081 bitmap_set_bit (live
->global
, p
);
1085 /* Calculate the live on exit vectors based on the entry info in LIVEINFO. */
1088 calculate_live_on_exit (tree_live_info_p liveinfo
)
1094 /* live on entry calculations used liveout vectors for defs, clear them. */
1095 FOR_EACH_BB_FN (bb
, cfun
)
1096 bitmap_clear (&liveinfo
->liveout
[bb
->index
]);
1098 /* Set all the live-on-exit bits for uses in PHIs. */
1099 FOR_EACH_BB_FN (bb
, cfun
)
1104 /* Mark the PHI arguments which are live on exit to the pred block. */
1105 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1107 gphi
*phi
= gsi
.phi ();
1108 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
1110 tree t
= PHI_ARG_DEF (phi
, i
);
1113 if (TREE_CODE (t
) != SSA_NAME
)
1116 p
= var_to_partition (liveinfo
->map
, t
);
1117 if (p
== NO_PARTITION
)
1119 e
= gimple_phi_arg_edge (phi
, i
);
1120 if (e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
))
1121 bitmap_set_bit (&liveinfo
->liveout
[e
->src
->index
], p
);
1125 /* Add each successors live on entry to this bock live on exit. */
1126 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1127 if (e
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
1128 bitmap_ior_into (&liveinfo
->liveout
[bb
->index
],
1129 live_on_entry (liveinfo
, e
->dest
));
1134 /* Given partition map MAP, calculate all the live on entry bitmaps for
1135 each partition. Return a new live info object. */
1138 calculate_live_ranges (var_map map
, bool want_livein
)
1142 tree_live_info_p live
;
1144 live
= new_tree_live_info (map
);
1145 for (i
= 0; i
< num_var_partitions (map
); i
++)
1147 var
= partition_to_var (map
, i
);
1148 if (var
!= NULL_TREE
)
1149 set_var_live_on_entry (var
, live
);
1152 live_worklist (live
);
1154 #ifdef ENABLE_CHECKING
1155 verify_live_on_entry (live
);
1158 calculate_live_on_exit (live
);
1162 bitmap_obstack_release (&live
->livein_obstack
);
1163 free (live
->livein
);
1164 live
->livein
= NULL
;
1171 /* Output partition map MAP to file F. */
1174 dump_var_map (FILE *f
, var_map map
)
1180 fprintf (f
, "\nPartition map \n\n");
1182 for (x
= 0; x
< map
->num_partitions
; x
++)
1184 if (map
->view_to_partition
!= NULL
)
1185 p
= map
->view_to_partition
[x
];
1189 if (ssa_name (p
) == NULL_TREE
1190 || virtual_operand_p (ssa_name (p
)))
1194 for (y
= 1; y
< num_ssa_names
; y
++)
1196 p
= partition_find (map
->var_partition
, y
);
1197 if (map
->partition_to_view
)
1198 p
= map
->partition_to_view
[p
];
1203 fprintf (f
, "Partition %d (", x
);
1204 print_generic_expr (f
, partition_to_var (map
, p
), TDF_SLIM
);
1207 fprintf (f
, "%d ", y
);
1217 /* Generic dump for the above. */
1220 debug (_var_map
&ref
)
1222 dump_var_map (stderr
, &ref
);
1226 debug (_var_map
*ptr
)
1231 fprintf (stderr
, "<nil>\n");
1235 /* Output live range info LIVE to file F, controlled by FLAG. */
1238 dump_live_info (FILE *f
, tree_live_info_p live
, int flag
)
1242 var_map map
= live
->map
;
1245 if ((flag
& LIVEDUMP_ENTRY
) && live
->livein
)
1247 FOR_EACH_BB_FN (bb
, cfun
)
1249 fprintf (f
, "\nLive on entry to BB%d : ", bb
->index
);
1250 EXECUTE_IF_SET_IN_BITMAP (&live
->livein
[bb
->index
], 0, i
, bi
)
1252 print_generic_expr (f
, partition_to_var (map
, i
), TDF_SLIM
);
1259 if ((flag
& LIVEDUMP_EXIT
) && live
->liveout
)
1261 FOR_EACH_BB_FN (bb
, cfun
)
1263 fprintf (f
, "\nLive on exit from BB%d : ", bb
->index
);
1264 EXECUTE_IF_SET_IN_BITMAP (&live
->liveout
[bb
->index
], 0, i
, bi
)
1266 print_generic_expr (f
, partition_to_var (map
, i
), TDF_SLIM
);
1275 /* Generic dump for the above. */
1278 debug (tree_live_info_d
&ref
)
1280 dump_live_info (stderr
, &ref
, 0);
1284 debug (tree_live_info_d
*ptr
)
1289 fprintf (stderr
, "<nil>\n");
1293 #ifdef ENABLE_CHECKING
1294 /* Verify that SSA_VAR is a non-virtual SSA_NAME. */
1297 register_ssa_partition_check (tree ssa_var
)
1299 gcc_assert (TREE_CODE (ssa_var
) == SSA_NAME
);
1300 if (virtual_operand_p (ssa_var
))
1302 fprintf (stderr
, "Illegally registering a virtual SSA name :");
1303 print_generic_expr (stderr
, ssa_var
, TDF_SLIM
);
1304 fprintf (stderr
, " in the SSA->Normal phase.\n");
1305 internal_error ("SSA corruption");
1310 /* Verify that the info in LIVE matches the current cfg. */
1313 verify_live_on_entry (tree_live_info_p live
)
1322 var_map map
= live
->map
;
1324 /* Check for live on entry partitions and report those with a DEF in
1325 the program. This will typically mean an optimization has done
1327 bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
);
1329 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1331 int entry_block
= e
->dest
->index
;
1332 if (e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
1334 for (i
= 0; i
< (unsigned)num_var_partitions (map
); i
++)
1339 var
= partition_to_var (map
, i
);
1340 stmt
= SSA_NAME_DEF_STMT (var
);
1341 tmp
= gimple_bb (stmt
);
1342 if (SSA_NAME_VAR (var
))
1343 d
= ssa_default_def (cfun
, SSA_NAME_VAR (var
));
1345 loe
= live_on_entry (live
, e
->dest
);
1346 if (loe
&& bitmap_bit_p (loe
, i
))
1348 if (!gimple_nop_p (stmt
))
1351 print_generic_expr (stderr
, var
, TDF_SLIM
);
1352 fprintf (stderr
, " is defined ");
1354 fprintf (stderr
, " in BB%d, ", tmp
->index
);
1355 fprintf (stderr
, "by:\n");
1356 print_gimple_stmt (stderr
, stmt
, 0, TDF_SLIM
);
1357 fprintf (stderr
, "\nIt is also live-on-entry to entry BB %d",
1359 fprintf (stderr
, " So it appears to have multiple defs.\n");
1366 print_generic_expr (stderr
, var
, TDF_SLIM
);
1367 fprintf (stderr
, " is live-on-entry to BB%d ",
1371 fprintf (stderr
, " but is not the default def of ");
1372 print_generic_expr (stderr
, d
, TDF_SLIM
);
1373 fprintf (stderr
, "\n");
1376 fprintf (stderr
, " and there is no default def.\n");
1383 /* An undefined local variable does not need to be very
1385 if (ssa_undefined_value_p (var
, false))
1388 /* The only way this var shouldn't be marked live on entry is
1389 if it occurs in a PHI argument of the block. */
1393 for (gsi
= gsi_start_phis (e
->dest
);
1394 !gsi_end_p (gsi
) && !ok
;
1397 gphi
*phi
= gsi
.phi ();
1398 for (z
= 0; z
< gimple_phi_num_args (phi
); z
++)
1399 if (var
== gimple_phi_arg_def (phi
, z
))
1408 print_generic_expr (stderr
, var
, TDF_SLIM
);
1409 fprintf (stderr
, " is not marked live-on-entry to entry BB%d ",
1411 fprintf (stderr
, "but it is a default def so it should be.\n");
1415 gcc_assert (num
<= 0);