1 /* Liveness for SSA trees.
2 Copyright (C) 2003-2023 Free Software Foundation, Inc.
3 Contributed by Andrew MacLeod <amacleod@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
31 #include "gimple-pretty-print.h"
32 #include "diagnostic-core.h"
33 #include "gimple-iterator.h"
36 #include "tree-ssa-live.h"
39 #include "ipa-utils.h"
41 #include "stringpool.h"
44 #include "gimple-walk.h"
48 static void verify_live_on_entry (tree_live_info_p
);
51 /* VARMAP maintains a mapping from SSA version number to real variables.
53 All SSA_NAMES are divided into partitions. Initially each ssa_name is the
54 only member of it's own partition. Coalescing will attempt to group any
55 ssa_names which occur in a copy or in a PHI node into the same partition.
57 At the end of out-of-ssa, each partition becomes a "real" variable and is
58 rewritten as a compiler variable.
60 The var_map data structure is used to manage these partitions. It allows
61 partitions to be combined, and determines which partition belongs to what
62 ssa_name or variable, and vice versa. */
65 /* Remove the base table in MAP. */
68 var_map_base_fini (var_map map
)
70 /* Free the basevar info if it is present. */
71 if (map
->partition_to_base_index
!= NULL
)
73 free (map
->partition_to_base_index
);
74 map
->partition_to_base_index
= NULL
;
75 map
->num_basevars
= 0;
78 /* Create a variable partition map of SIZE for region, initialize and return
79 it. Region is a loop if LOOP is non-NULL, otherwise is the current
83 init_var_map (int size
, class loop
*loop
)
87 map
= (var_map
) xmalloc (sizeof (struct _var_map
));
88 map
->var_partition
= partition_new (size
);
90 map
->partition_to_view
= NULL
;
91 map
->view_to_partition
= NULL
;
92 map
->num_partitions
= size
;
93 map
->partition_size
= size
;
94 map
->num_basevars
= 0;
95 map
->partition_to_base_index
= NULL
;
99 map
->bmp_bbs
= BITMAP_ALLOC (NULL
);
100 map
->outofssa_p
= false;
101 basic_block
*bbs
= get_loop_body_in_dom_order (loop
);
102 for (unsigned i
= 0; i
< loop
->num_nodes
; ++i
)
104 bitmap_set_bit (map
->bmp_bbs
, bbs
[i
]->index
);
105 map
->vec_bbs
.safe_push (bbs
[i
]);
112 map
->outofssa_p
= true;
114 FOR_EACH_BB_FN (bb
, cfun
)
115 map
->vec_bbs
.safe_push (bb
);
121 /* Free memory associated with MAP. */
124 delete_var_map (var_map map
)
126 var_map_base_fini (map
);
127 partition_delete (map
->var_partition
);
128 free (map
->partition_to_view
);
129 free (map
->view_to_partition
);
131 BITMAP_FREE (map
->bmp_bbs
);
132 map
->vec_bbs
.release ();
137 /* This function will combine the partitions in MAP for VAR1 and VAR2. It
138 Returns the partition which represents the new partition. If the two
139 partitions cannot be combined, NO_PARTITION is returned. */
142 var_union (var_map map
, tree var1
, tree var2
)
146 gcc_assert (TREE_CODE (var1
) == SSA_NAME
);
147 gcc_assert (TREE_CODE (var2
) == SSA_NAME
);
149 /* This is independent of partition_to_view. If partition_to_view is
150 on, then whichever one of these partitions is absorbed will never have a
151 dereference into the partition_to_view array any more. */
153 p1
= partition_find (map
->var_partition
, SSA_NAME_VERSION (var1
));
154 p2
= partition_find (map
->var_partition
, SSA_NAME_VERSION (var2
));
156 gcc_assert (p1
!= NO_PARTITION
);
157 gcc_assert (p2
!= NO_PARTITION
);
162 p3
= partition_union (map
->var_partition
, p1
, p2
);
164 if (map
->partition_to_view
)
165 p3
= map
->partition_to_view
[p3
];
171 /* Compress the partition numbers in MAP such that they fall in the range
172 0..(num_partitions-1) instead of wherever they turned out during
173 the partitioning exercise. This removes any references to unused
174 partitions, thereby allowing bitmaps and other vectors to be much
177 This is implemented such that compaction doesn't affect partitioning.
178 Ie., once partitions are created and possibly merged, running one
179 or more different kind of compaction will not affect the partitions
180 themselves. Their index might change, but all the same variables will
181 still be members of the same partition group. This allows work on reduced
182 sets, and no loss of information when a larger set is later desired.
184 In particular, coalescing can work on partitions which have 2 or more
185 definitions, and then 'recompact' later to include all the single
186 definitions for assignment to program variables. */
189 /* Set MAP back to the initial state of having no partition view. Return a
190 bitmap which has a bit set for each partition number which is in use in the
194 partition_view_init (var_map map
)
200 used
= BITMAP_ALLOC (NULL
);
202 /* Already in a view? Abandon the old one. */
203 if (map
->partition_to_view
)
205 free (map
->partition_to_view
);
206 map
->partition_to_view
= NULL
;
208 if (map
->view_to_partition
)
210 free (map
->view_to_partition
);
211 map
->view_to_partition
= NULL
;
214 /* Find out which partitions are actually referenced. */
215 for (x
= 0; x
< map
->partition_size
; x
++)
217 tmp
= partition_find (map
->var_partition
, x
);
218 if (ssa_name (tmp
) != NULL_TREE
&& !virtual_operand_p (ssa_name (tmp
))
219 && (!has_zero_uses (ssa_name (tmp
))
220 || !SSA_NAME_IS_DEFAULT_DEF (ssa_name (tmp
))
221 || (SSA_NAME_VAR (ssa_name (tmp
))
222 && !VAR_P (SSA_NAME_VAR (ssa_name (tmp
))))))
223 bitmap_set_bit (used
, tmp
);
226 map
->num_partitions
= map
->partition_size
;
231 /* This routine will finalize the view data for MAP based on the partitions
232 set in SELECTED. This is either the same bitmap returned from
233 partition_view_init, or a trimmed down version if some of those partitions
234 were not desired in this view. SELECTED is freed before returning. */
237 partition_view_fini (var_map map
, bitmap selected
)
240 unsigned count
, i
, x
, limit
;
242 gcc_assert (selected
);
244 count
= bitmap_count_bits (selected
);
245 limit
= map
->partition_size
;
247 /* If its a one-to-one ratio, we don't need any view compaction. */
250 map
->partition_to_view
= (int *)xmalloc (limit
* sizeof (int));
251 memset (map
->partition_to_view
, 0xff, (limit
* sizeof (int)));
252 map
->view_to_partition
= (int *)xmalloc (count
* sizeof (int));
255 /* Give each selected partition an index. */
256 EXECUTE_IF_SET_IN_BITMAP (selected
, 0, x
, bi
)
258 map
->partition_to_view
[x
] = i
;
259 map
->view_to_partition
[i
] = x
;
262 gcc_assert (i
== count
);
263 map
->num_partitions
= i
;
266 BITMAP_FREE (selected
);
270 /* Create a partition view which includes all the used partitions in MAP. */
273 partition_view_normal (var_map map
)
277 used
= partition_view_init (map
);
278 partition_view_fini (map
, used
);
280 var_map_base_fini (map
);
284 /* Create a partition view in MAP which includes just partitions which occur in
285 the bitmap ONLY. If WANT_BASES is true, create the base variable map
289 partition_view_bitmap (var_map map
, bitmap only
)
292 bitmap new_partitions
= BITMAP_ALLOC (NULL
);
296 used
= partition_view_init (map
);
297 EXECUTE_IF_SET_IN_BITMAP (only
, 0, x
, bi
)
299 p
= partition_find (map
->var_partition
, x
);
300 gcc_assert (bitmap_bit_p (used
, p
));
301 bitmap_set_bit (new_partitions
, p
);
303 partition_view_fini (map
, new_partitions
);
305 var_map_base_fini (map
);
309 static bitmap usedvars
;
311 /* Mark VAR as used, so that it'll be preserved during rtl expansion.
312 Returns true if VAR wasn't marked before. */
315 set_is_used (tree var
)
317 return bitmap_set_bit (usedvars
, DECL_UID (var
));
320 /* Return true if VAR is marked as used. */
325 return bitmap_bit_p (usedvars
, DECL_UID (var
));
328 static inline void mark_all_vars_used (tree
*);
330 /* Helper function for mark_all_vars_used, called via walk_tree. */
333 mark_all_vars_used_1 (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
336 enum tree_code_class c
= TREE_CODE_CLASS (TREE_CODE (t
));
339 if (TREE_CODE (t
) == SSA_NAME
)
342 t
= SSA_NAME_VAR (t
);
347 if (IS_EXPR_CODE_CLASS (c
)
348 && (b
= TREE_BLOCK (t
)) != NULL
)
349 TREE_USED (b
) = true;
351 /* Ignore TMR_OFFSET and TMR_STEP for TARGET_MEM_REFS, as those
352 fields do not contain vars. */
353 if (TREE_CODE (t
) == TARGET_MEM_REF
)
355 mark_all_vars_used (&TMR_BASE (t
));
356 mark_all_vars_used (&TMR_INDEX (t
));
357 mark_all_vars_used (&TMR_INDEX2 (t
));
362 /* Only need to mark VAR_DECLS; parameters and return results are not
363 eliminated as unused. */
366 /* When a global var becomes used for the first time also walk its
367 initializer (non global ones don't have any). */
368 if (set_is_used (t
) && is_global_var (t
)
369 && DECL_CONTEXT (t
) == current_function_decl
)
370 mark_all_vars_used (&DECL_INITIAL (t
));
372 /* remove_unused_scope_block_p requires information about labels
373 which are not DECL_IGNORED_P to tell if they might be used in the IL. */
374 else if (TREE_CODE (t
) == LABEL_DECL
)
375 /* Although the TREE_USED values that the frontend uses would be
376 acceptable (albeit slightly over-conservative) for our purposes,
377 init_vars_expansion clears TREE_USED for LABEL_DECLs too, so we
378 must re-compute it here. */
381 if (IS_TYPE_OR_DECL_P (t
))
387 /* Mark the scope block SCOPE and its subblocks unused when they can be
388 possibly eliminated if dead. */
391 mark_scope_block_unused (tree scope
)
394 TREE_USED (scope
) = false;
395 if (!(*debug_hooks
->ignore_block
) (scope
))
396 TREE_USED (scope
) = true;
397 for (t
= BLOCK_SUBBLOCKS (scope
); t
; t
= BLOCK_CHAIN (t
))
398 mark_scope_block_unused (t
);
401 /* Look if the block is dead (by possibly eliminating its dead subblocks)
402 and return true if so.
403 Block is declared dead if:
404 1) No statements are associated with it.
405 2) Declares no live variables
406 3) All subblocks are dead
407 or there is precisely one subblocks and the block
408 has same abstract origin as outer block and declares
409 no variables, so it is pure wrapper.
410 When we are not outputting full debug info, we also eliminate dead variables
411 out of scope blocks to let them to be recycled by GGC and to save copying work
412 done by the inliner. */
415 remove_unused_scope_block_p (tree scope
, bool in_ctor_dtor_block
)
418 bool unused
= !TREE_USED (scope
);
421 /* For ipa-polymorphic-call.cc purposes, preserve blocks:
422 1) with BLOCK_ABSTRACT_ORIGIN of a ctor/dtor or their clones */
423 if (inlined_polymorphic_ctor_dtor_block_p (scope
, true))
425 in_ctor_dtor_block
= true;
428 /* 2) inside such blocks, the outermost block with block_ultimate_origin
429 being a FUNCTION_DECL. */
430 else if (in_ctor_dtor_block
)
432 tree fn
= block_ultimate_origin (scope
);
433 if (fn
&& TREE_CODE (fn
) == FUNCTION_DECL
)
435 in_ctor_dtor_block
= false;
440 for (t
= &BLOCK_VARS (scope
); *t
; t
= next
)
442 next
= &DECL_CHAIN (*t
);
444 /* Debug info of nested function refers to the block of the
445 function. We might stil call it even if all statements
446 of function it was nested into was elliminated.
448 TODO: We can actually look into cgraph to see if function
449 will be output to file. */
450 if (TREE_CODE (*t
) == FUNCTION_DECL
)
453 /* If a decl has a value expr, we need to instantiate it
454 regardless of debug info generation, to avoid codegen
455 differences in memory overlap tests. update_equiv_regs() may
456 indirectly call validate_equiv_mem() to test whether a
457 SET_DEST overlaps with others, and if the value expr changes
458 by virtual register instantiation, we may get end up with
459 different results. */
460 else if (VAR_P (*t
) && DECL_HAS_VALUE_EXPR_P (*t
))
463 /* Remove everything we don't generate debug info for. */
464 else if (DECL_IGNORED_P (*t
))
466 *t
= DECL_CHAIN (*t
);
470 /* When we are outputting debug info, we usually want to output
471 info about optimized-out variables in the scope blocks.
472 Exception are the scope blocks not containing any instructions
473 at all so user can't get into the scopes at first place. */
474 else if (is_used_p (*t
))
476 else if (TREE_CODE (*t
) == LABEL_DECL
&& TREE_USED (*t
))
477 /* For labels that are still used in the IL, the decision to
478 preserve them must not depend DEBUG_INFO_LEVEL, otherwise we
479 risk having different ordering in debug vs. non-debug builds
480 during inlining or versioning.
481 A label appearing here (we have already checked DECL_IGNORED_P)
482 should not be used in the IL unless it has been explicitly used
483 before, so we use TREE_USED as an approximation. */
484 /* In principle, we should do the same here as for the debug case
485 below, however, when debugging, there might be additional nested
486 levels that keep an upper level with a label live, so we have to
487 force this block to be considered used, too. */
490 /* When we are not doing full debug info, we however can keep around
491 only the used variables for cfgexpand's memory packing saving quite
494 For sake of -g3, we keep around those vars but we don't count this as
495 use of block, so innermost block with no used vars and no instructions
496 can be considered dead. We only want to keep around blocks user can
497 breakpoint into and ask about value of optimized out variables.
499 Similarly we need to keep around types at least until all
500 variables of all nested blocks are gone. We track no
501 information on whether given type is used or not, so we have
502 to keep them even when not emitting debug information,
503 otherwise we may end up remapping variables and their (local)
504 types in different orders depending on whether debug
505 information is being generated. */
507 else if (TREE_CODE (*t
) == TYPE_DECL
508 || debug_info_level
== DINFO_LEVEL_NORMAL
509 || debug_info_level
== DINFO_LEVEL_VERBOSE
)
513 *t
= DECL_CHAIN (*t
);
518 for (t
= &BLOCK_SUBBLOCKS (scope
); *t
;)
519 if (remove_unused_scope_block_p (*t
, in_ctor_dtor_block
))
521 if (BLOCK_SUBBLOCKS (*t
))
523 tree next
= BLOCK_CHAIN (*t
);
524 tree supercontext
= BLOCK_SUPERCONTEXT (*t
);
526 *t
= BLOCK_SUBBLOCKS (*t
);
527 while (BLOCK_CHAIN (*t
))
529 BLOCK_SUPERCONTEXT (*t
) = supercontext
;
530 t
= &BLOCK_CHAIN (*t
);
532 BLOCK_CHAIN (*t
) = next
;
533 BLOCK_SUPERCONTEXT (*t
) = supercontext
;
534 t
= &BLOCK_CHAIN (*t
);
538 *t
= BLOCK_CHAIN (*t
);
542 t
= &BLOCK_CHAIN (*t
);
549 /* Outer scope is always used. */
550 else if (!BLOCK_SUPERCONTEXT (scope
)
551 || TREE_CODE (BLOCK_SUPERCONTEXT (scope
)) == FUNCTION_DECL
)
553 /* Innermost blocks with no live variables nor statements can be always
555 else if (!nsubblocks
)
557 /* When not generating debug info we can eliminate info on unused
559 else if (!flag_auto_profile
560 && debug_info_level
== DINFO_LEVEL_NONE
561 && !optinfo_wants_inlining_info_p ())
563 /* Even for -g0 don't prune outer scopes from inlined functions,
564 otherwise late diagnostics from such functions will not be
565 emitted or suppressed properly. */
566 if (inlined_function_outer_scope_p (scope
))
568 gcc_assert (TREE_CODE (BLOCK_ORIGIN (scope
)) == FUNCTION_DECL
);
572 else if (BLOCK_VARS (scope
) || BLOCK_NUM_NONLOCALIZED_VARS (scope
))
574 /* See if this block is important for representation of inlined
575 function. Inlined functions are always represented by block
576 with block_ultimate_origin being set to FUNCTION_DECL and
577 DECL_SOURCE_LOCATION set, unless they expand to nothing... */
578 else if (inlined_function_outer_scope_p (scope
))
581 /* Verfify that only blocks with source location set
582 are entry points to the inlined functions. */
583 gcc_assert (LOCATION_LOCUS (BLOCK_SOURCE_LOCATION (scope
))
584 == UNKNOWN_LOCATION
);
586 TREE_USED (scope
) = !unused
;
590 /* Mark all VAR_DECLS under *EXPR_P as used, so that they won't be
591 eliminated during the tree->rtl conversion process. */
594 mark_all_vars_used (tree
*expr_p
)
596 walk_tree (expr_p
, mark_all_vars_used_1
, NULL
, NULL
);
599 /* Helper function for clear_unused_block_pointer, called via walk_tree. */
602 clear_unused_block_pointer_1 (tree
*tp
, int *, void *)
604 if (EXPR_P (*tp
) && TREE_BLOCK (*tp
)
605 && !TREE_USED (TREE_BLOCK (*tp
)))
606 TREE_SET_BLOCK (*tp
, NULL
);
610 /* Set all block pointer in debug or clobber stmt to NULL if the block
611 is unused, so that they will not be streamed out. */
614 clear_unused_block_pointer (void)
617 gimple_stmt_iterator gsi
;
619 FOR_EACH_BB_FN (bb
, cfun
)
620 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
627 stmt
= gsi_stmt (gsi
);
628 if (!is_gimple_debug (stmt
) && !gimple_clobber_p (stmt
))
630 b
= gimple_block (stmt
);
631 if (b
&& !TREE_USED (b
))
633 /* Elide debug marker stmts that have an associated BLOCK from an
634 inline instance removed with also the outermost scope BLOCK of
635 said inline instance removed. If the outermost scope BLOCK of
636 said inline instance is preserved use that in place of the
637 removed BLOCK. That keeps the marker associated to the correct
638 inline instance (or no inline instance in case it was not from
639 an inline instance). */
640 if (gimple_debug_nonbind_marker_p (stmt
)
641 && BLOCK_ABSTRACT_ORIGIN (b
))
643 while (TREE_CODE (b
) == BLOCK
644 && !inlined_function_outer_scope_p (b
))
645 b
= BLOCK_SUPERCONTEXT (b
);
646 if (TREE_CODE (b
) == BLOCK
)
650 gimple_set_block (stmt
, b
);
653 gsi_remove (&gsi
, true);
659 gimple_set_block (stmt
, NULL
);
661 for (i
= 0; i
< gimple_num_ops (stmt
); i
++)
662 walk_tree (gimple_op_ptr (stmt
, i
), clear_unused_block_pointer_1
,
667 /* Dump scope blocks starting at SCOPE to FILE. INDENT is the
668 indentation level and FLAGS is as in print_generic_expr. */
671 dump_scope_block (FILE *file
, int indent
, tree scope
, dump_flags_t flags
)
676 fprintf (file
, "\n%*s{ Scope block #%i%s",indent
, "" , BLOCK_NUMBER (scope
),
677 TREE_USED (scope
) ? "" : " (unused)");
678 if (LOCATION_LOCUS (BLOCK_SOURCE_LOCATION (scope
)) != UNKNOWN_LOCATION
)
680 expanded_location s
= expand_location (BLOCK_SOURCE_LOCATION (scope
));
681 fprintf (file
, " %s:%i", s
.file
, s
.line
);
683 if (BLOCK_ABSTRACT_ORIGIN (scope
))
685 tree origin
= block_ultimate_origin (scope
);
688 fprintf (file
, " Originating from :");
690 print_generic_decl (file
, origin
, flags
);
692 fprintf (file
, "#%i", BLOCK_NUMBER (origin
));
695 if (BLOCK_FRAGMENT_ORIGIN (scope
))
696 fprintf (file
, " Fragment of : #%i",
697 BLOCK_NUMBER (BLOCK_FRAGMENT_ORIGIN (scope
)));
698 else if (BLOCK_FRAGMENT_CHAIN (scope
))
700 fprintf (file
, " Fragment chain :");
701 for (t
= BLOCK_FRAGMENT_CHAIN (scope
); t
;
702 t
= BLOCK_FRAGMENT_CHAIN (t
))
703 fprintf (file
, " #%i", BLOCK_NUMBER (t
));
705 fprintf (file
, " \n");
706 for (var
= BLOCK_VARS (scope
); var
; var
= DECL_CHAIN (var
))
708 fprintf (file
, "%*s", indent
, "");
709 print_generic_decl (file
, var
, flags
);
710 fprintf (file
, "\n");
712 for (i
= 0; i
< BLOCK_NUM_NONLOCALIZED_VARS (scope
); i
++)
714 fprintf (file
, "%*s",indent
, "");
715 print_generic_decl (file
, BLOCK_NONLOCALIZED_VAR (scope
, i
),
717 fprintf (file
, " (nonlocalized)\n");
719 for (t
= BLOCK_SUBBLOCKS (scope
); t
; t
= BLOCK_CHAIN (t
))
720 dump_scope_block (file
, indent
+ 2, t
, flags
);
721 fprintf (file
, "\n%*s}\n",indent
, "");
724 /* Dump the tree of lexical scopes starting at SCOPE to stderr. FLAGS
725 is as in print_generic_expr. */
728 debug_scope_block (tree scope
, dump_flags_t flags
)
730 dump_scope_block (stderr
, 0, scope
, flags
);
734 /* Dump the tree of lexical scopes of current_function_decl to FILE.
735 FLAGS is as in print_generic_expr. */
738 dump_scope_blocks (FILE *file
, dump_flags_t flags
)
740 dump_scope_block (file
, 0, DECL_INITIAL (current_function_decl
), flags
);
744 /* Dump the tree of lexical scopes of current_function_decl to stderr.
745 FLAGS is as in print_generic_expr. */
748 debug_scope_blocks (dump_flags_t flags
)
750 dump_scope_blocks (stderr
, flags
);
753 /* Remove local variables that are not referenced in the IL. */
756 remove_unused_locals (void)
760 unsigned srcidx
, dstidx
, num
;
761 bool have_local_clobbers
= false;
763 /* Removing declarations from lexical blocks when not optimizing is
764 not only a waste of time, it actually causes differences in stack
769 timevar_push (TV_REMOVE_UNUSED
);
771 mark_scope_block_unused (DECL_INITIAL (current_function_decl
));
773 usedvars
= BITMAP_ALLOC (NULL
);
774 auto_bitmap useddebug
;
776 /* Walk the CFG marking all referenced symbols. */
777 FOR_EACH_BB_FN (bb
, cfun
)
779 gimple_stmt_iterator gsi
;
784 /* Walk the statements. */
785 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
787 gimple
*stmt
= gsi_stmt (gsi
);
788 tree b
= gimple_block (stmt
);
790 /* If we wanted to mark the block referenced by the inline
791 entry point marker as used, this would be a good spot to
792 do it. If the block is not otherwise used, the stmt will
793 be cleaned up in clean_unused_block_pointer. */
794 if (is_gimple_debug (stmt
))
796 if (gimple_debug_bind_p (stmt
))
798 tree var
= gimple_debug_bind_get_var (stmt
);
801 if (!gimple_debug_bind_get_value (stmt
))
802 /* Run the 2nd phase. */
803 have_local_clobbers
= true;
805 bitmap_set_bit (useddebug
, DECL_UID (var
));
811 if (gimple_clobber_p (stmt
))
813 have_local_clobbers
= true;
817 if (gimple_call_internal_p (stmt
, IFN_DEFERRED_INIT
))
819 have_local_clobbers
= true;
824 TREE_USED (b
) = true;
826 for (i
= 0; i
< gimple_num_ops (stmt
); i
++)
827 mark_all_vars_used (gimple_op_ptr (gsi_stmt (gsi
), i
));
830 for (gphi_iterator gpi
= gsi_start_phis (bb
);
837 gphi
*phi
= gpi
.phi ();
839 if (virtual_operand_p (gimple_phi_result (phi
)))
842 def
= gimple_phi_result (phi
);
843 mark_all_vars_used (&def
);
845 FOR_EACH_PHI_ARG (arg_p
, phi
, i
, SSA_OP_ALL_USES
)
847 tree arg
= USE_FROM_PTR (arg_p
);
848 int index
= PHI_ARG_INDEX_FROM_USE (arg_p
);
850 LOCATION_BLOCK (gimple_phi_arg_location (phi
, index
));
852 TREE_USED (block
) = true;
853 mark_all_vars_used (&arg
);
857 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
858 if (LOCATION_BLOCK (e
->goto_locus
) != NULL
)
859 TREE_USED (LOCATION_BLOCK (e
->goto_locus
)) = true;
862 /* We do a two-pass approach about the out-of-scope clobbers. We want
863 to remove them if they are the only references to a local variable,
864 but we want to retain them when there's any other. So the first pass
865 ignores them, and the second pass (if there were any) tries to remove
866 them. We do the same for .DEFERRED_INIT. */
867 if (have_local_clobbers
)
868 FOR_EACH_BB_FN (bb
, cfun
)
870 gimple_stmt_iterator gsi
;
872 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);)
874 gimple
*stmt
= gsi_stmt (gsi
);
875 tree b
= gimple_block (stmt
);
877 if (gimple_clobber_p (stmt
))
879 tree lhs
= gimple_assign_lhs (stmt
);
880 tree base
= get_base_address (lhs
);
881 /* Remove clobbers referencing unused vars, or clobbers
882 with MEM_REF lhs referencing uninitialized pointers. */
883 if ((VAR_P (base
) && !is_used_p (base
))
884 || (TREE_CODE (lhs
) == MEM_REF
885 && TREE_CODE (TREE_OPERAND (lhs
, 0)) == SSA_NAME
886 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (lhs
, 0))
887 && (TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (lhs
, 0)))
890 unlink_stmt_vdef (stmt
);
891 gsi_remove (&gsi
, true);
896 TREE_USED (b
) = true;
898 else if (gimple_call_internal_p (stmt
, IFN_DEFERRED_INIT
))
900 tree lhs
= gimple_call_lhs (stmt
);
901 tree base
= get_base_address (lhs
);
902 if (DECL_P (base
) && !is_used_p (base
))
904 unlink_stmt_vdef (stmt
);
905 gsi_remove (&gsi
, true);
910 TREE_USED (b
) = true;
912 else if (gimple_debug_bind_p (stmt
))
914 tree var
= gimple_debug_bind_get_var (stmt
);
916 && !bitmap_bit_p (useddebug
, DECL_UID (var
))
919 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
920 fprintf (dump_file
, "Dead debug bind reset to %u\n",
922 gsi_remove (&gsi
, true);
930 if (cfun
->has_simduid_loops
)
932 for (auto loop
: loops_list (cfun
, 0))
933 if (loop
->simduid
&& !is_used_p (loop
->simduid
))
934 loop
->simduid
= NULL_TREE
;
937 cfun
->has_local_explicit_reg_vars
= false;
939 /* Remove unmarked local and global vars from local_decls. */
940 num
= vec_safe_length (cfun
->local_decls
);
941 for (srcidx
= 0, dstidx
= 0; srcidx
< num
; srcidx
++)
943 var
= (*cfun
->local_decls
)[srcidx
];
946 if (!is_used_p (var
))
949 if (cfun
->nonlocal_goto_save_area
950 && TREE_OPERAND (cfun
->nonlocal_goto_save_area
, 0) == var
)
951 cfun
->nonlocal_goto_save_area
= NULL
;
952 /* Release any default def associated with var. */
953 if ((def
= ssa_default_def (cfun
, var
)) != NULL_TREE
)
955 set_ssa_default_def (cfun
, var
, NULL_TREE
);
956 release_ssa_name (def
);
961 if (VAR_P (var
) && DECL_HARD_REGISTER (var
) && !is_global_var (var
))
962 cfun
->has_local_explicit_reg_vars
= true;
964 if (srcidx
!= dstidx
)
965 (*cfun
->local_decls
)[dstidx
] = var
;
970 statistics_counter_event (cfun
, "unused VAR_DECLs removed", num
- dstidx
);
971 cfun
->local_decls
->truncate (dstidx
);
974 remove_unused_scope_block_p (DECL_INITIAL (current_function_decl
),
975 polymorphic_ctor_dtor_p (current_function_decl
,
977 clear_unused_block_pointer ();
979 BITMAP_FREE (usedvars
);
981 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
983 fprintf (dump_file
, "Scope blocks after cleanups:\n");
984 dump_scope_blocks (dump_file
, dump_flags
);
987 timevar_pop (TV_REMOVE_UNUSED
);
990 /* Allocate and return a new live range information object base on MAP. */
992 static tree_live_info_p
993 new_tree_live_info (var_map map
)
995 tree_live_info_p live
;
998 live
= XNEW (struct tree_live_info_d
);
1000 live
->num_blocks
= last_basic_block_for_fn (cfun
);
1002 bitmap_obstack_initialize (&live
->livein_obstack
);
1003 bitmap_obstack_initialize (&live
->liveout_obstack
);
1005 live
->livein
= XCNEWVEC (bitmap_head
, last_basic_block_for_fn (cfun
));
1006 live
->liveout
= XCNEWVEC (bitmap_head
, last_basic_block_for_fn (cfun
));
1007 for (unsigned i
= 0; map
->vec_bbs
.iterate (i
, &bb
); ++i
)
1009 bitmap_initialize (&live
->livein
[bb
->index
], &live
->livein_obstack
);
1010 bitmap_initialize (&live
->liveout
[bb
->index
], &live
->liveout_obstack
);
1013 live
->work_stack
= XNEWVEC (int, last_basic_block_for_fn (cfun
));
1014 live
->stack_top
= live
->work_stack
;
1016 live
->global
= BITMAP_ALLOC (NULL
);
1021 /* Free storage for live range info object LIVE. */
1024 delete_tree_live_info (tree_live_info_p live
)
1028 bitmap_obstack_release (&live
->livein_obstack
);
1029 free (live
->livein
);
1033 bitmap_obstack_release (&live
->liveout_obstack
);
1034 free (live
->liveout
);
1036 BITMAP_FREE (live
->global
);
1037 free (live
->work_stack
);
1042 /* Visit basic block BB and propagate any required live on entry bits from
1043 LIVE into the predecessors. VISITED is the bitmap of visited blocks.
1044 TMP is a temporary work bitmap which is passed in to avoid reallocating
1048 loe_visit_block (tree_live_info_p live
, basic_block bb
, sbitmap visited
)
1053 basic_block pred_bb
;
1056 gcc_checking_assert (!bitmap_bit_p (visited
, bb
->index
));
1057 bitmap_set_bit (visited
, bb
->index
);
1059 loe
= live_on_entry (live
, bb
);
1061 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1064 if (!region_contains_p (live
->map
, pred_bb
))
1066 /* Variables live-on-entry from BB that aren't defined in the
1067 predecessor block. This should be the live on entry vars to pred.
1068 Note that liveout is the DEFs in a block while live on entry is
1070 Add these bits to live-on-entry for the pred. if there are any
1071 changes, and pred_bb has been visited already, add it to the
1073 change
= bitmap_ior_and_compl_into (live_on_entry (live
, pred_bb
),
1074 loe
, &live
->liveout
[pred_bb
->index
]);
1076 && bitmap_bit_p (visited
, pred_bb
->index
))
1078 bitmap_clear_bit (visited
, pred_bb
->index
);
1079 *(live
->stack_top
)++ = pred_bb
->index
;
1085 /* Using LIVE, fill in all the live-on-entry blocks between the defs and uses
1086 of all the variables. */
1089 live_worklist (tree_live_info_p live
)
1093 auto_sbitmap
visited (last_basic_block_for_fn (cfun
) + 1);
1095 bitmap_clear (visited
);
1097 /* Visit region's blocks in reverse order and propagate live on entry values
1098 into the predecessors blocks. */
1099 for (unsigned i
= live
->map
->vec_bbs
.length () - 1;
1100 live
->map
->vec_bbs
.iterate (i
, &bb
); --i
)
1101 loe_visit_block (live
, bb
, visited
);
1103 /* Process any blocks which require further iteration. */
1104 while (live
->stack_top
!= live
->work_stack
)
1106 b
= *--(live
->stack_top
);
1107 loe_visit_block (live
, BASIC_BLOCK_FOR_FN (cfun
, b
), visited
);
1112 /* Calculate the initial live on entry vector for SSA_NAME using immediate_use
1113 links. Set the live on entry fields in LIVE. Def's are marked temporarily
1114 in the liveout vector. */
1117 set_var_live_on_entry (tree ssa_name
, tree_live_info_p live
)
1122 basic_block def_bb
= NULL
;
1123 imm_use_iterator imm_iter
;
1124 bool global
= false;
1126 p
= var_to_partition (live
->map
, ssa_name
);
1127 if (p
== NO_PARTITION
)
1130 stmt
= SSA_NAME_DEF_STMT (ssa_name
);
1133 def_bb
= gimple_bb (stmt
);
1134 /* Mark defs in liveout bitmap temporarily. */
1135 if (def_bb
&& region_contains_p (live
->map
, def_bb
))
1136 bitmap_set_bit (&live
->liveout
[def_bb
->index
], p
);
1139 def_bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
);
1141 /* An undefined local variable does not need to be very alive. */
1142 if (ssa_undefined_value_p (ssa_name
, false))
1145 /* Visit each use of SSA_NAME and if it isn't in the same block as the def,
1146 add it to the list of live on entry blocks. */
1147 FOR_EACH_IMM_USE_FAST (use
, imm_iter
, ssa_name
)
1149 gimple
*use_stmt
= USE_STMT (use
);
1150 basic_block add_block
= NULL
;
1152 if (gimple_code (use_stmt
) == GIMPLE_PHI
)
1154 /* Uses in PHI's are considered to be live at exit of the SRC block
1155 as this is where a copy would be inserted. Check to see if it is
1156 defined in that block, or whether its live on entry. */
1157 int index
= PHI_ARG_INDEX_FROM_USE (use
);
1158 edge e
= gimple_phi_arg_edge (as_a
<gphi
*> (use_stmt
), index
);
1159 if (e
->src
!= def_bb
&& region_contains_p (live
->map
, e
->src
))
1162 else if (is_gimple_debug (use_stmt
))
1166 /* If its not defined in this block, its live on entry. */
1167 basic_block use_bb
= gimple_bb (use_stmt
);
1168 if (use_bb
!= def_bb
&& region_contains_p (live
->map
, use_bb
))
1172 /* If there was a live on entry use, set the bit. */
1176 bitmap_set_bit (&live
->livein
[add_block
->index
], p
);
1180 /* If SSA_NAME is live on entry to at least one block, fill in all the live
1181 on entry blocks between the def and all the uses. */
1183 bitmap_set_bit (live
->global
, p
);
1187 /* Calculate the live on exit vectors based on the entry info in LIVEINFO. */
1190 calculate_live_on_exit (tree_live_info_p liveinfo
)
1196 /* live on entry calculations used liveout vectors for defs, clear them. */
1197 for (unsigned i
= 0; liveinfo
->map
->vec_bbs
.iterate (i
, &bb
); ++i
)
1198 bitmap_clear (&liveinfo
->liveout
[bb
->index
]);
1200 /* Set all the live-on-exit bits for uses in PHIs. */
1201 FOR_EACH_BB_FN (bb
, cfun
)
1206 /* Mark the PHI arguments which are live on exit to the pred block. */
1207 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1209 gphi
*phi
= gsi
.phi ();
1210 if (virtual_operand_p (gimple_phi_result (phi
)))
1212 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
1214 tree t
= PHI_ARG_DEF (phi
, i
);
1217 if (TREE_CODE (t
) != SSA_NAME
)
1220 p
= var_to_partition (liveinfo
->map
, t
);
1221 if (p
== NO_PARTITION
)
1223 e
= gimple_phi_arg_edge (phi
, i
);
1224 if (region_contains_p (liveinfo
->map
, e
->src
))
1225 bitmap_set_bit (&liveinfo
->liveout
[e
->src
->index
], p
);
1229 if (!region_contains_p (liveinfo
->map
, bb
))
1232 /* Add each successors live on entry to this bock live on exit. */
1233 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1234 if (region_contains_p (liveinfo
->map
, e
->dest
))
1235 bitmap_ior_into (&liveinfo
->liveout
[bb
->index
],
1236 live_on_entry (liveinfo
, e
->dest
));
1241 /* Given partition map MAP, calculate all the live on entry bitmaps for
1242 each partition. Return a new live info object. */
1245 calculate_live_ranges (var_map map
, bool want_livein
)
1249 tree_live_info_p live
;
1251 live
= new_tree_live_info (map
);
1252 for (i
= 0; i
< num_var_partitions (map
); i
++)
1254 var
= partition_to_var (map
, i
);
1255 if (var
!= NULL_TREE
)
1256 set_var_live_on_entry (var
, live
);
1259 live_worklist (live
);
1262 verify_live_on_entry (live
);
1264 calculate_live_on_exit (live
);
1268 bitmap_obstack_release (&live
->livein_obstack
);
1269 free (live
->livein
);
1270 live
->livein
= NULL
;
1276 /* Data structure for compute_live_vars* functions. */
1278 struct compute_live_vars_data
{
1279 /* Vector of bitmaps for live vars indices at the end of basic blocks,
1280 indexed by bb->index. ACTIVE[ENTRY_BLOCK] must be empty bitmap,
1281 ACTIVE[EXIT_BLOCK] is used for STOP_AFTER. */
1282 vec
<bitmap_head
> active
;
1283 /* Work bitmap of currently live variables. */
1285 /* Set of interesting variables. Variables with uids not in this
1286 hash_map are not tracked. */
1287 live_vars_map
*vars
;
1290 /* Callback for walk_stmt_load_store_addr_ops. If OP is a VAR_DECL with
1291 uid set in DATA->vars, enter its corresponding index into bitmap
1295 compute_live_vars_visit (gimple
*, tree op
, tree
, void *pdata
)
1297 compute_live_vars_data
*data
= (compute_live_vars_data
*) pdata
;
1298 op
= get_base_address (op
);
1299 if (op
&& VAR_P (op
))
1300 if (unsigned int *v
= data
->vars
->get (DECL_UID (op
)))
1301 bitmap_set_bit (data
->work
, *v
);
1305 /* Helper routine for compute_live_vars, calculating the sets of live
1306 variables at the end of BB, leaving the result in DATA->work.
1307 If STOP_AFTER is non-NULL, stop processing after that stmt. */
1310 compute_live_vars_1 (basic_block bb
, compute_live_vars_data
*data
,
1315 gimple_stmt_iterator gsi
;
1316 walk_stmt_load_store_addr_fn visit
= compute_live_vars_visit
;
1318 bitmap_clear (data
->work
);
1319 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1320 bitmap_ior_into (data
->work
, &data
->active
[e
->src
->index
]);
1322 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1323 walk_stmt_load_store_addr_ops (gsi_stmt (gsi
), data
, NULL
, NULL
, visit
);
1324 for (gsi
= gsi_after_labels (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1326 gimple
*stmt
= gsi_stmt (gsi
);
1328 if (gimple_clobber_p (stmt
))
1330 tree lhs
= gimple_assign_lhs (stmt
);
1332 if (unsigned int *v
= data
->vars
->get (DECL_UID (lhs
)))
1333 bitmap_clear_bit (data
->work
, *v
);
1335 else if (!is_gimple_debug (stmt
))
1336 walk_stmt_load_store_addr_ops (stmt
, data
, visit
, visit
, visit
);
1337 if (stmt
== stop_after
)
1342 /* For function FN and live_vars_map (hash map from DECL_UIDs to a dense set of
1343 indexes of automatic variables VARS, compute which of those variables are
1344 (might be) live at the end of each basic block. */
1347 compute_live_vars (struct function
*fn
, live_vars_map
*vars
)
1349 vec
<bitmap_head
> active
;
1351 /* We approximate the live range of a stack variable by taking the first
1352 mention of its name as starting point(s), and by the end-of-scope
1353 death clobber added by gimplify as ending point(s) of the range.
1354 This overapproximates in the case we for instance moved an address-taken
1355 operation upward, without also moving a dereference to it upwards.
1356 But it's conservatively correct as a variable never can hold values
1357 before its name is mentioned at least once.
1359 We then do a mostly classical bitmap liveness algorithm. */
1361 active
.create (last_basic_block_for_fn (fn
));
1362 active
.quick_grow (last_basic_block_for_fn (fn
));
1363 for (int i
= 0; i
< last_basic_block_for_fn (fn
); i
++)
1364 bitmap_initialize (&active
[i
], &bitmap_default_obstack
);
1366 bitmap work
= BITMAP_ALLOC (NULL
);
1368 int *rpo
= XNEWVEC (int, last_basic_block_for_fn (fn
));
1369 int n_bbs
= pre_and_rev_post_order_compute_fn (fn
, NULL
, rpo
, false);
1371 bool changed
= true;
1372 compute_live_vars_data data
= { active
, work
, vars
};
1377 for (i
= 0; i
< n_bbs
; i
++)
1379 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[i
]);
1380 compute_live_vars_1 (bb
, &data
, NULL
);
1381 if (bitmap_ior_into (&active
[bb
->index
], work
))
1392 /* For ACTIVE computed by compute_live_vars, compute a bitmap of variables
1393 live after the STOP_AFTER statement and return that bitmap. */
1396 live_vars_at_stmt (vec
<bitmap_head
> &active
, live_vars_map
*vars
,
1399 bitmap work
= BITMAP_ALLOC (NULL
);
1400 compute_live_vars_data data
= { active
, work
, vars
};
1401 basic_block bb
= gimple_bb (stop_after
);
1402 compute_live_vars_1 (bb
, &data
, stop_after
);
1406 /* Destroy what compute_live_vars has returned when it is no longer needed. */
1409 destroy_live_vars (vec
<bitmap_head
> &active
)
1411 unsigned len
= active
.length ();
1412 for (unsigned i
= 0; i
< len
; i
++)
1413 bitmap_clear (&active
[i
]);
1418 /* Output partition map MAP to file F. */
1421 dump_var_map (FILE *f
, var_map map
)
1427 fprintf (f
, "\nPartition map \n\n");
1429 for (x
= 0; x
< map
->num_partitions
; x
++)
1431 if (map
->view_to_partition
!= NULL
)
1432 p
= map
->view_to_partition
[x
];
1436 if (ssa_name (p
) == NULL_TREE
1437 || virtual_operand_p (ssa_name (p
)))
1441 for (y
= 1; y
< num_ssa_names
; y
++)
1443 p
= partition_find (map
->var_partition
, y
);
1444 if (map
->partition_to_view
)
1445 p
= map
->partition_to_view
[p
];
1450 fprintf (f
, "Partition %d (", x
);
1451 print_generic_expr (f
, partition_to_var (map
, p
), TDF_SLIM
);
1454 fprintf (f
, "%d ", y
);
1464 /* Generic dump for the above. */
1467 debug (_var_map
&ref
)
1469 dump_var_map (stderr
, &ref
);
1473 debug (_var_map
*ptr
)
1478 fprintf (stderr
, "<nil>\n");
1482 /* Output live range info LIVE to file F, controlled by FLAG. */
1485 dump_live_info (FILE *f
, tree_live_info_p live
, int flag
)
1489 var_map map
= live
->map
;
1492 if ((flag
& LIVEDUMP_ENTRY
) && live
->livein
)
1494 FOR_EACH_BB_FN (bb
, cfun
)
1496 fprintf (f
, "\nLive on entry to BB%d : ", bb
->index
);
1497 EXECUTE_IF_SET_IN_BITMAP (&live
->livein
[bb
->index
], 0, i
, bi
)
1499 print_generic_expr (f
, partition_to_var (map
, i
), TDF_SLIM
);
1506 if ((flag
& LIVEDUMP_EXIT
) && live
->liveout
)
1508 FOR_EACH_BB_FN (bb
, cfun
)
1510 fprintf (f
, "\nLive on exit from BB%d : ", bb
->index
);
1511 EXECUTE_IF_SET_IN_BITMAP (&live
->liveout
[bb
->index
], 0, i
, bi
)
1513 print_generic_expr (f
, partition_to_var (map
, i
), TDF_SLIM
);
1522 /* Generic dump for the above. */
1525 debug (tree_live_info_d
&ref
)
1527 dump_live_info (stderr
, &ref
, 0);
1531 debug (tree_live_info_d
*ptr
)
1536 fprintf (stderr
, "<nil>\n");
1540 /* Verify that the info in LIVE matches the current cfg. */
1543 verify_live_on_entry (tree_live_info_p live
)
1552 var_map map
= live
->map
;
1554 /* Check for live on entry partitions and report those with a DEF in
1555 the program. This will typically mean an optimization has done
1557 bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
);
1559 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1561 int entry_block
= e
->dest
->index
;
1562 if (!region_contains_p (live
->map
, e
->dest
))
1564 for (i
= 0; i
< (unsigned)num_var_partitions (map
); i
++)
1569 var
= partition_to_var (map
, i
);
1570 stmt
= SSA_NAME_DEF_STMT (var
);
1571 tmp
= gimple_bb (stmt
);
1572 if (SSA_NAME_VAR (var
))
1573 d
= ssa_default_def (cfun
, SSA_NAME_VAR (var
));
1575 loe
= live_on_entry (live
, e
->dest
);
1576 if (loe
&& bitmap_bit_p (loe
, i
))
1578 if (!gimple_nop_p (stmt
))
1581 print_generic_expr (stderr
, var
, TDF_SLIM
);
1582 fprintf (stderr
, " is defined ");
1584 fprintf (stderr
, " in BB%d, ", tmp
->index
);
1585 fprintf (stderr
, "by:\n");
1586 print_gimple_stmt (stderr
, stmt
, 0, TDF_SLIM
);
1587 fprintf (stderr
, "\nIt is also live-on-entry to entry BB %d",
1589 fprintf (stderr
, " So it appears to have multiple defs.\n");
1596 print_generic_expr (stderr
, var
, TDF_SLIM
);
1597 fprintf (stderr
, " is live-on-entry to BB%d ",
1601 fprintf (stderr
, " but is not the default def of ");
1602 print_generic_expr (stderr
, d
, TDF_SLIM
);
1603 fprintf (stderr
, "\n");
1606 fprintf (stderr
, " and there is no default def.\n");
1613 /* An undefined local variable does not need to be very
1615 if (ssa_undefined_value_p (var
, false))
1618 /* The only way this var shouldn't be marked live on entry is
1619 if it occurs in a PHI argument of the block. */
1623 for (gsi
= gsi_start_phis (e
->dest
);
1624 !gsi_end_p (gsi
) && !ok
;
1627 gphi
*phi
= gsi
.phi ();
1628 if (virtual_operand_p (gimple_phi_result (phi
)))
1630 for (z
= 0; z
< gimple_phi_num_args (phi
); z
++)
1631 if (var
== gimple_phi_arg_def (phi
, z
))
1639 /* Expand adds unused default defs for PARM_DECLs and
1640 RESULT_DECLs. They're ok. */
1641 if (has_zero_uses (var
)
1642 && SSA_NAME_VAR (var
)
1643 && !VAR_P (SSA_NAME_VAR (var
)))
1646 print_generic_expr (stderr
, var
, TDF_SLIM
);
1647 fprintf (stderr
, " is not marked live-on-entry to entry BB%d ",
1649 fprintf (stderr
, "but it is a default def so it should be.\n");
1653 gcc_assert (num
<= 0);
1657 /* Virtual operand liveness analysis data init. */
1660 virtual_operand_live::init ()
1662 liveout
= XCNEWVEC (tree
, last_basic_block_for_fn (cfun
) + 1);
1663 liveout
[ENTRY_BLOCK
] = ssa_default_def (cfun
, gimple_vop (cfun
));
1666 /* Compute live-in of BB from cached live-out. */
1669 virtual_operand_live::get_live_in (basic_block bb
)
1671 /* A virtual PHI is a convenient cache for live-in. */
1672 gphi
*phi
= get_virtual_phi (bb
);
1674 return gimple_phi_result (phi
);
1679 /* Since we don't have a virtual PHI and we don't know whether there's
1680 a downstream virtual use (and thus PHIs are inserted where necessary)
1681 we now have to check each incoming edge live-out. */
1684 tree livein
= NULL_TREE
;
1685 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1686 if (e
->flags
& EDGE_DFS_BACK
)
1687 /* We can ignore backedges since if there's a def there it would
1688 have forced a PHI in the source because it also acts as use
1692 livein
= get_live_out (e
->src
);
1693 else if (get_live_out (e
->src
) != livein
)
1694 /* When there's no virtual use downstream this indicates a point
1695 where we'd insert a PHI merging the different live virtual
1702 /* Compute live-out of BB. */
1705 virtual_operand_live::get_live_out (basic_block bb
)
1710 if (liveout
[bb
->index
])
1711 return liveout
[bb
->index
];
1713 tree lo
= NULL_TREE
;
1714 for (auto gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
1716 gimple
*stmt
= gsi_stmt (gsi
);
1717 if (gimple_vdef (stmt
))
1719 lo
= gimple_vdef (stmt
);
1722 if (gimple_vuse (stmt
))
1724 lo
= gimple_vuse (stmt
);
1729 lo
= get_live_in (bb
);
1730 liveout
[bb
->index
] = lo
;