2015-09-25 Vladimir Makarov <vmakarov@redhat.com>
[official-gcc.git] / gcc / tree-ssa-live.c
blobe0317259b6dfa0676a83a411247cab96b8e2b0ed
1 /* Liveness for SSA trees.
2 Copyright (C) 2003-2015 Free Software Foundation, Inc.
3 Contributed by Andrew MacLeod <amacleod@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "rtl.h"
28 #include "ssa.h"
29 #include "alias.h"
30 #include "fold-const.h"
31 #include "gimple-pretty-print.h"
32 #include "internal-fn.h"
33 #include "gimple-iterator.h"
34 #include "flags.h"
35 #include "insn-config.h"
36 #include "expmed.h"
37 #include "dojump.h"
38 #include "explow.h"
39 #include "calls.h"
40 #include "emit-rtl.h"
41 #include "varasm.h"
42 #include "stmt.h"
43 #include "expr.h"
44 #include "tree-dfa.h"
45 #include "timevar.h"
46 #include "dumpfile.h"
47 #include "tree-ssa-live.h"
48 #include "diagnostic-core.h"
49 #include "debug.h"
50 #include "tree-ssa.h"
51 #include "cgraph.h"
52 #include "ipa-utils.h"
53 #include "cfgloop.h"
55 #ifdef ENABLE_CHECKING
56 static void verify_live_on_entry (tree_live_info_p);
57 #endif
60 /* VARMAP maintains a mapping from SSA version number to real variables.
62 All SSA_NAMES are divided into partitions. Initially each ssa_name is the
63 only member of it's own partition. Coalescing will attempt to group any
64 ssa_names which occur in a copy or in a PHI node into the same partition.
66 At the end of out-of-ssa, each partition becomes a "real" variable and is
67 rewritten as a compiler variable.
69 The var_map data structure is used to manage these partitions. It allows
70 partitions to be combined, and determines which partition belongs to what
71 ssa_name or variable, and vice versa. */
74 /* Remove the base table in MAP. */
76 static void
77 var_map_base_fini (var_map map)
79 /* Free the basevar info if it is present. */
80 if (map->partition_to_base_index != NULL)
82 free (map->partition_to_base_index);
83 map->partition_to_base_index = NULL;
84 map->num_basevars = 0;
87 /* Create a variable partition map of SIZE, initialize and return it. */
89 var_map
90 init_var_map (int size)
92 var_map map;
94 map = (var_map) xmalloc (sizeof (struct _var_map));
95 map->var_partition = partition_new (size);
97 map->partition_to_view = NULL;
98 map->view_to_partition = NULL;
99 map->num_partitions = size;
100 map->partition_size = size;
101 map->num_basevars = 0;
102 map->partition_to_base_index = NULL;
103 return map;
107 /* Free memory associated with MAP. */
109 void
110 delete_var_map (var_map map)
112 var_map_base_fini (map);
113 partition_delete (map->var_partition);
114 free (map->partition_to_view);
115 free (map->view_to_partition);
116 free (map);
120 /* This function will combine the partitions in MAP for VAR1 and VAR2. It
121 Returns the partition which represents the new partition. If the two
122 partitions cannot be combined, NO_PARTITION is returned. */
125 var_union (var_map map, tree var1, tree var2)
127 int p1, p2, p3;
129 gcc_assert (TREE_CODE (var1) == SSA_NAME);
130 gcc_assert (TREE_CODE (var2) == SSA_NAME);
132 /* This is independent of partition_to_view. If partition_to_view is
133 on, then whichever one of these partitions is absorbed will never have a
134 dereference into the partition_to_view array any more. */
136 p1 = partition_find (map->var_partition, SSA_NAME_VERSION (var1));
137 p2 = partition_find (map->var_partition, SSA_NAME_VERSION (var2));
139 gcc_assert (p1 != NO_PARTITION);
140 gcc_assert (p2 != NO_PARTITION);
142 if (p1 == p2)
143 p3 = p1;
144 else
145 p3 = partition_union (map->var_partition, p1, p2);
147 if (map->partition_to_view)
148 p3 = map->partition_to_view[p3];
150 return p3;
154 /* Compress the partition numbers in MAP such that they fall in the range
155 0..(num_partitions-1) instead of wherever they turned out during
156 the partitioning exercise. This removes any references to unused
157 partitions, thereby allowing bitmaps and other vectors to be much
158 denser.
160 This is implemented such that compaction doesn't affect partitioning.
161 Ie., once partitions are created and possibly merged, running one
162 or more different kind of compaction will not affect the partitions
163 themselves. Their index might change, but all the same variables will
164 still be members of the same partition group. This allows work on reduced
165 sets, and no loss of information when a larger set is later desired.
167 In particular, coalescing can work on partitions which have 2 or more
168 definitions, and then 'recompact' later to include all the single
169 definitions for assignment to program variables. */
172 /* Set MAP back to the initial state of having no partition view. Return a
173 bitmap which has a bit set for each partition number which is in use in the
174 varmap. */
176 static bitmap
177 partition_view_init (var_map map)
179 bitmap used;
180 int tmp;
181 unsigned int x;
183 used = BITMAP_ALLOC (NULL);
185 /* Already in a view? Abandon the old one. */
186 if (map->partition_to_view)
188 free (map->partition_to_view);
189 map->partition_to_view = NULL;
191 if (map->view_to_partition)
193 free (map->view_to_partition);
194 map->view_to_partition = NULL;
197 /* Find out which partitions are actually referenced. */
198 for (x = 0; x < map->partition_size; x++)
200 tmp = partition_find (map->var_partition, x);
201 if (ssa_name (tmp) != NULL_TREE && !virtual_operand_p (ssa_name (tmp))
202 && (!has_zero_uses (ssa_name (tmp))
203 || !SSA_NAME_IS_DEFAULT_DEF (ssa_name (tmp))))
204 bitmap_set_bit (used, tmp);
207 map->num_partitions = map->partition_size;
208 return used;
212 /* This routine will finalize the view data for MAP based on the partitions
213 set in SELECTED. This is either the same bitmap returned from
214 partition_view_init, or a trimmed down version if some of those partitions
215 were not desired in this view. SELECTED is freed before returning. */
217 static void
218 partition_view_fini (var_map map, bitmap selected)
220 bitmap_iterator bi;
221 unsigned count, i, x, limit;
223 gcc_assert (selected);
225 count = bitmap_count_bits (selected);
226 limit = map->partition_size;
228 /* If its a one-to-one ratio, we don't need any view compaction. */
229 if (count < limit)
231 map->partition_to_view = (int *)xmalloc (limit * sizeof (int));
232 memset (map->partition_to_view, 0xff, (limit * sizeof (int)));
233 map->view_to_partition = (int *)xmalloc (count * sizeof (int));
235 i = 0;
236 /* Give each selected partition an index. */
237 EXECUTE_IF_SET_IN_BITMAP (selected, 0, x, bi)
239 map->partition_to_view[x] = i;
240 map->view_to_partition[i] = x;
241 i++;
243 gcc_assert (i == count);
244 map->num_partitions = i;
247 BITMAP_FREE (selected);
251 /* Create a partition view which includes all the used partitions in MAP. */
253 void
254 partition_view_normal (var_map map)
256 bitmap used;
258 used = partition_view_init (map);
259 partition_view_fini (map, used);
261 var_map_base_fini (map);
265 /* Create a partition view in MAP which includes just partitions which occur in
266 the bitmap ONLY. If WANT_BASES is true, create the base variable map
267 as well. */
269 void
270 partition_view_bitmap (var_map map, bitmap only)
272 bitmap used;
273 bitmap new_partitions = BITMAP_ALLOC (NULL);
274 unsigned x, p;
275 bitmap_iterator bi;
277 used = partition_view_init (map);
278 EXECUTE_IF_SET_IN_BITMAP (only, 0, x, bi)
280 p = partition_find (map->var_partition, x);
281 gcc_assert (bitmap_bit_p (used, p));
282 bitmap_set_bit (new_partitions, p);
284 partition_view_fini (map, new_partitions);
286 var_map_base_fini (map);
290 static bitmap usedvars;
292 /* Mark VAR as used, so that it'll be preserved during rtl expansion.
293 Returns true if VAR wasn't marked before. */
295 static inline bool
296 set_is_used (tree var)
298 return bitmap_set_bit (usedvars, DECL_UID (var));
301 /* Return true if VAR is marked as used. */
303 static inline bool
304 is_used_p (tree var)
306 return bitmap_bit_p (usedvars, DECL_UID (var));
309 static inline void mark_all_vars_used (tree *);
311 /* Helper function for mark_all_vars_used, called via walk_tree. */
313 static tree
314 mark_all_vars_used_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
316 tree t = *tp;
317 enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
318 tree b;
320 if (TREE_CODE (t) == SSA_NAME)
322 *walk_subtrees = 0;
323 t = SSA_NAME_VAR (t);
324 if (!t)
325 return NULL;
328 if (IS_EXPR_CODE_CLASS (c)
329 && (b = TREE_BLOCK (t)) != NULL)
330 TREE_USED (b) = true;
332 /* Ignore TMR_OFFSET and TMR_STEP for TARGET_MEM_REFS, as those
333 fields do not contain vars. */
334 if (TREE_CODE (t) == TARGET_MEM_REF)
336 mark_all_vars_used (&TMR_BASE (t));
337 mark_all_vars_used (&TMR_INDEX (t));
338 mark_all_vars_used (&TMR_INDEX2 (t));
339 *walk_subtrees = 0;
340 return NULL;
343 /* Only need to mark VAR_DECLS; parameters and return results are not
344 eliminated as unused. */
345 if (TREE_CODE (t) == VAR_DECL)
347 /* When a global var becomes used for the first time also walk its
348 initializer (non global ones don't have any). */
349 if (set_is_used (t) && is_global_var (t)
350 && DECL_CONTEXT (t) == current_function_decl)
351 mark_all_vars_used (&DECL_INITIAL (t));
353 /* remove_unused_scope_block_p requires information about labels
354 which are not DECL_IGNORED_P to tell if they might be used in the IL. */
355 else if (TREE_CODE (t) == LABEL_DECL)
356 /* Although the TREE_USED values that the frontend uses would be
357 acceptable (albeit slightly over-conservative) for our purposes,
358 init_vars_expansion clears TREE_USED for LABEL_DECLs too, so we
359 must re-compute it here. */
360 TREE_USED (t) = 1;
362 if (IS_TYPE_OR_DECL_P (t))
363 *walk_subtrees = 0;
365 return NULL;
368 /* Mark the scope block SCOPE and its subblocks unused when they can be
369 possibly eliminated if dead. */
371 static void
372 mark_scope_block_unused (tree scope)
374 tree t;
375 TREE_USED (scope) = false;
376 if (!(*debug_hooks->ignore_block) (scope))
377 TREE_USED (scope) = true;
378 for (t = BLOCK_SUBBLOCKS (scope); t ; t = BLOCK_CHAIN (t))
379 mark_scope_block_unused (t);
382 /* Look if the block is dead (by possibly eliminating its dead subblocks)
383 and return true if so.
384 Block is declared dead if:
385 1) No statements are associated with it.
386 2) Declares no live variables
387 3) All subblocks are dead
388 or there is precisely one subblocks and the block
389 has same abstract origin as outer block and declares
390 no variables, so it is pure wrapper.
391 When we are not outputting full debug info, we also eliminate dead variables
392 out of scope blocks to let them to be recycled by GGC and to save copying work
393 done by the inliner. */
395 static bool
396 remove_unused_scope_block_p (tree scope, bool in_ctor_dtor_block)
398 tree *t, *next;
399 bool unused = !TREE_USED (scope);
400 int nsubblocks = 0;
402 /* For ipa-polymorphic-call.c purposes, preserve blocks:
403 1) with BLOCK_ABSTRACT_ORIGIN of a ctor/dtor or their clones */
404 if (inlined_polymorphic_ctor_dtor_block_p (scope, true))
406 in_ctor_dtor_block = true;
407 unused = false;
409 /* 2) inside such blocks, the outermost block with BLOCK_ABSTRACT_ORIGIN
410 being a FUNCTION_DECL. */
411 else if (in_ctor_dtor_block
412 && BLOCK_ABSTRACT_ORIGIN (scope)
413 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (scope)) == FUNCTION_DECL)
415 in_ctor_dtor_block = false;
416 unused = false;
419 for (t = &BLOCK_VARS (scope); *t; t = next)
421 next = &DECL_CHAIN (*t);
423 /* Debug info of nested function refers to the block of the
424 function. We might stil call it even if all statements
425 of function it was nested into was elliminated.
427 TODO: We can actually look into cgraph to see if function
428 will be output to file. */
429 if (TREE_CODE (*t) == FUNCTION_DECL)
430 unused = false;
432 /* If a decl has a value expr, we need to instantiate it
433 regardless of debug info generation, to avoid codegen
434 differences in memory overlap tests. update_equiv_regs() may
435 indirectly call validate_equiv_mem() to test whether a
436 SET_DEST overlaps with others, and if the value expr changes
437 by virtual register instantiation, we may get end up with
438 different results. */
439 else if (TREE_CODE (*t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*t))
440 unused = false;
442 /* Remove everything we don't generate debug info for. */
443 else if (DECL_IGNORED_P (*t))
445 *t = DECL_CHAIN (*t);
446 next = t;
449 /* When we are outputting debug info, we usually want to output
450 info about optimized-out variables in the scope blocks.
451 Exception are the scope blocks not containing any instructions
452 at all so user can't get into the scopes at first place. */
453 else if (is_used_p (*t))
454 unused = false;
455 else if (TREE_CODE (*t) == LABEL_DECL && TREE_USED (*t))
456 /* For labels that are still used in the IL, the decision to
457 preserve them must not depend DEBUG_INFO_LEVEL, otherwise we
458 risk having different ordering in debug vs. non-debug builds
459 during inlining or versioning.
460 A label appearing here (we have already checked DECL_IGNORED_P)
461 should not be used in the IL unless it has been explicitly used
462 before, so we use TREE_USED as an approximation. */
463 /* In principle, we should do the same here as for the debug case
464 below, however, when debugging, there might be additional nested
465 levels that keep an upper level with a label live, so we have to
466 force this block to be considered used, too. */
467 unused = false;
469 /* When we are not doing full debug info, we however can keep around
470 only the used variables for cfgexpand's memory packing saving quite
471 a lot of memory.
473 For sake of -g3, we keep around those vars but we don't count this as
474 use of block, so innermost block with no used vars and no instructions
475 can be considered dead. We only want to keep around blocks user can
476 breakpoint into and ask about value of optimized out variables.
478 Similarly we need to keep around types at least until all
479 variables of all nested blocks are gone. We track no
480 information on whether given type is used or not, so we have
481 to keep them even when not emitting debug information,
482 otherwise we may end up remapping variables and their (local)
483 types in different orders depending on whether debug
484 information is being generated. */
486 else if (TREE_CODE (*t) == TYPE_DECL
487 || debug_info_level == DINFO_LEVEL_NORMAL
488 || debug_info_level == DINFO_LEVEL_VERBOSE)
490 else
492 *t = DECL_CHAIN (*t);
493 next = t;
497 for (t = &BLOCK_SUBBLOCKS (scope); *t ;)
498 if (remove_unused_scope_block_p (*t, in_ctor_dtor_block))
500 if (BLOCK_SUBBLOCKS (*t))
502 tree next = BLOCK_CHAIN (*t);
503 tree supercontext = BLOCK_SUPERCONTEXT (*t);
505 *t = BLOCK_SUBBLOCKS (*t);
506 while (BLOCK_CHAIN (*t))
508 BLOCK_SUPERCONTEXT (*t) = supercontext;
509 t = &BLOCK_CHAIN (*t);
511 BLOCK_CHAIN (*t) = next;
512 BLOCK_SUPERCONTEXT (*t) = supercontext;
513 t = &BLOCK_CHAIN (*t);
514 nsubblocks ++;
516 else
517 *t = BLOCK_CHAIN (*t);
519 else
521 t = &BLOCK_CHAIN (*t);
522 nsubblocks ++;
526 if (!unused)
528 /* Outer scope is always used. */
529 else if (!BLOCK_SUPERCONTEXT (scope)
530 || TREE_CODE (BLOCK_SUPERCONTEXT (scope)) == FUNCTION_DECL)
531 unused = false;
532 /* Innermost blocks with no live variables nor statements can be always
533 eliminated. */
534 else if (!nsubblocks)
536 /* When not generating debug info we can eliminate info on unused
537 variables. */
538 else if (!flag_auto_profile && debug_info_level == DINFO_LEVEL_NONE)
540 /* Even for -g0 don't prune outer scopes from artificial
541 functions, otherwise diagnostics using tree_nonartificial_location
542 will not be emitted properly. */
543 if (inlined_function_outer_scope_p (scope))
545 tree ao = scope;
547 while (ao
548 && TREE_CODE (ao) == BLOCK
549 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
550 ao = BLOCK_ABSTRACT_ORIGIN (ao);
551 if (ao
552 && TREE_CODE (ao) == FUNCTION_DECL
553 && DECL_DECLARED_INLINE_P (ao)
554 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
555 unused = false;
558 else if (BLOCK_VARS (scope) || BLOCK_NUM_NONLOCALIZED_VARS (scope))
559 unused = false;
560 /* See if this block is important for representation of inlined function.
561 Inlined functions are always represented by block with
562 block_ultimate_origin being set to FUNCTION_DECL and DECL_SOURCE_LOCATION
563 set... */
564 else if (inlined_function_outer_scope_p (scope))
565 unused = false;
566 else
567 /* Verfify that only blocks with source location set
568 are entry points to the inlined functions. */
569 gcc_assert (LOCATION_LOCUS (BLOCK_SOURCE_LOCATION (scope))
570 == UNKNOWN_LOCATION);
572 TREE_USED (scope) = !unused;
573 return unused;
576 /* Mark all VAR_DECLS under *EXPR_P as used, so that they won't be
577 eliminated during the tree->rtl conversion process. */
579 static inline void
580 mark_all_vars_used (tree *expr_p)
582 walk_tree (expr_p, mark_all_vars_used_1, NULL, NULL);
585 /* Helper function for clear_unused_block_pointer, called via walk_tree. */
587 static tree
588 clear_unused_block_pointer_1 (tree *tp, int *, void *)
590 if (EXPR_P (*tp) && TREE_BLOCK (*tp)
591 && !TREE_USED (TREE_BLOCK (*tp)))
592 TREE_SET_BLOCK (*tp, NULL);
593 return NULL_TREE;
596 /* Set all block pointer in debug or clobber stmt to NULL if the block
597 is unused, so that they will not be streamed out. */
599 static void
600 clear_unused_block_pointer (void)
602 basic_block bb;
603 gimple_stmt_iterator gsi;
605 FOR_EACH_BB_FN (bb, cfun)
606 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
608 unsigned i;
609 tree b;
610 gimple *stmt = gsi_stmt (gsi);
612 if (!is_gimple_debug (stmt) && !gimple_clobber_p (stmt))
613 continue;
614 b = gimple_block (stmt);
615 if (b && !TREE_USED (b))
616 gimple_set_block (stmt, NULL);
617 for (i = 0; i < gimple_num_ops (stmt); i++)
618 walk_tree (gimple_op_ptr (stmt, i), clear_unused_block_pointer_1,
619 NULL, NULL);
623 /* Dump scope blocks starting at SCOPE to FILE. INDENT is the
624 indentation level and FLAGS is as in print_generic_expr. */
626 static void
627 dump_scope_block (FILE *file, int indent, tree scope, int flags)
629 tree var, t;
630 unsigned int i;
632 fprintf (file, "\n%*s{ Scope block #%i%s%s",indent, "" , BLOCK_NUMBER (scope),
633 TREE_USED (scope) ? "" : " (unused)",
634 BLOCK_ABSTRACT (scope) ? " (abstract)": "");
635 if (LOCATION_LOCUS (BLOCK_SOURCE_LOCATION (scope)) != UNKNOWN_LOCATION)
637 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (scope));
638 fprintf (file, " %s:%i", s.file, s.line);
640 if (BLOCK_ABSTRACT_ORIGIN (scope))
642 tree origin = block_ultimate_origin (scope);
643 if (origin)
645 fprintf (file, " Originating from :");
646 if (DECL_P (origin))
647 print_generic_decl (file, origin, flags);
648 else
649 fprintf (file, "#%i", BLOCK_NUMBER (origin));
652 fprintf (file, " \n");
653 for (var = BLOCK_VARS (scope); var; var = DECL_CHAIN (var))
655 fprintf (file, "%*s", indent, "");
656 print_generic_decl (file, var, flags);
657 fprintf (file, "\n");
659 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (scope); i++)
661 fprintf (file, "%*s",indent, "");
662 print_generic_decl (file, BLOCK_NONLOCALIZED_VAR (scope, i),
663 flags);
664 fprintf (file, " (nonlocalized)\n");
666 for (t = BLOCK_SUBBLOCKS (scope); t ; t = BLOCK_CHAIN (t))
667 dump_scope_block (file, indent + 2, t, flags);
668 fprintf (file, "\n%*s}\n",indent, "");
671 /* Dump the tree of lexical scopes starting at SCOPE to stderr. FLAGS
672 is as in print_generic_expr. */
674 DEBUG_FUNCTION void
675 debug_scope_block (tree scope, int flags)
677 dump_scope_block (stderr, 0, scope, flags);
681 /* Dump the tree of lexical scopes of current_function_decl to FILE.
682 FLAGS is as in print_generic_expr. */
684 void
685 dump_scope_blocks (FILE *file, int flags)
687 dump_scope_block (file, 0, DECL_INITIAL (current_function_decl), flags);
691 /* Dump the tree of lexical scopes of current_function_decl to stderr.
692 FLAGS is as in print_generic_expr. */
694 DEBUG_FUNCTION void
695 debug_scope_blocks (int flags)
697 dump_scope_blocks (stderr, flags);
700 /* Remove local variables that are not referenced in the IL. */
702 void
703 remove_unused_locals (void)
705 basic_block bb;
706 tree var;
707 unsigned srcidx, dstidx, num;
708 bool have_local_clobbers = false;
710 /* Removing declarations from lexical blocks when not optimizing is
711 not only a waste of time, it actually causes differences in stack
712 layout. */
713 if (!optimize)
714 return;
716 timevar_push (TV_REMOVE_UNUSED);
718 mark_scope_block_unused (DECL_INITIAL (current_function_decl));
720 usedvars = BITMAP_ALLOC (NULL);
722 /* Walk the CFG marking all referenced symbols. */
723 FOR_EACH_BB_FN (bb, cfun)
725 gimple_stmt_iterator gsi;
726 size_t i;
727 edge_iterator ei;
728 edge e;
730 /* Walk the statements. */
731 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
733 gimple *stmt = gsi_stmt (gsi);
734 tree b = gimple_block (stmt);
736 if (is_gimple_debug (stmt))
737 continue;
739 if (gimple_clobber_p (stmt))
741 have_local_clobbers = true;
742 continue;
745 if (b)
746 TREE_USED (b) = true;
748 for (i = 0; i < gimple_num_ops (stmt); i++)
749 mark_all_vars_used (gimple_op_ptr (gsi_stmt (gsi), i));
752 for (gphi_iterator gpi = gsi_start_phis (bb);
753 !gsi_end_p (gpi);
754 gsi_next (&gpi))
756 use_operand_p arg_p;
757 ssa_op_iter i;
758 tree def;
759 gphi *phi = gpi.phi ();
761 if (virtual_operand_p (gimple_phi_result (phi)))
762 continue;
764 def = gimple_phi_result (phi);
765 mark_all_vars_used (&def);
767 FOR_EACH_PHI_ARG (arg_p, phi, i, SSA_OP_ALL_USES)
769 tree arg = USE_FROM_PTR (arg_p);
770 int index = PHI_ARG_INDEX_FROM_USE (arg_p);
771 tree block =
772 LOCATION_BLOCK (gimple_phi_arg_location (phi, index));
773 if (block != NULL)
774 TREE_USED (block) = true;
775 mark_all_vars_used (&arg);
779 FOR_EACH_EDGE (e, ei, bb->succs)
780 if (LOCATION_BLOCK (e->goto_locus) != NULL)
781 TREE_USED (LOCATION_BLOCK (e->goto_locus)) = true;
784 /* We do a two-pass approach about the out-of-scope clobbers. We want
785 to remove them if they are the only references to a local variable,
786 but we want to retain them when there's any other. So the first pass
787 ignores them, and the second pass (if there were any) tries to remove
788 them. */
789 if (have_local_clobbers)
790 FOR_EACH_BB_FN (bb, cfun)
792 gimple_stmt_iterator gsi;
794 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
796 gimple *stmt = gsi_stmt (gsi);
797 tree b = gimple_block (stmt);
799 if (gimple_clobber_p (stmt))
801 tree lhs = gimple_assign_lhs (stmt);
802 tree base = get_base_address (lhs);
803 /* Remove clobbers referencing unused vars, or clobbers
804 with MEM_REF lhs referencing uninitialized pointers. */
805 if ((TREE_CODE (base) == VAR_DECL && !is_used_p (base))
806 || (TREE_CODE (lhs) == MEM_REF
807 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME
808 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (lhs, 0))
809 && (TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (lhs, 0)))
810 != PARM_DECL)))
812 unlink_stmt_vdef (stmt);
813 gsi_remove (&gsi, true);
814 release_defs (stmt);
815 continue;
817 if (b)
818 TREE_USED (b) = true;
820 gsi_next (&gsi);
824 if (cfun->has_simduid_loops)
826 struct loop *loop;
827 FOR_EACH_LOOP (loop, 0)
828 if (loop->simduid && !is_used_p (loop->simduid))
829 loop->simduid = NULL_TREE;
832 cfun->has_local_explicit_reg_vars = false;
834 /* Remove unmarked local and global vars from local_decls. */
835 num = vec_safe_length (cfun->local_decls);
836 for (srcidx = 0, dstidx = 0; srcidx < num; srcidx++)
838 var = (*cfun->local_decls)[srcidx];
839 if (TREE_CODE (var) == VAR_DECL)
841 if (!is_used_p (var))
843 tree def;
844 if (cfun->nonlocal_goto_save_area
845 && TREE_OPERAND (cfun->nonlocal_goto_save_area, 0) == var)
846 cfun->nonlocal_goto_save_area = NULL;
847 /* Release any default def associated with var. */
848 if ((def = ssa_default_def (cfun, var)) != NULL_TREE)
850 set_ssa_default_def (cfun, var, NULL_TREE);
851 release_ssa_name (def);
853 continue;
856 if (TREE_CODE (var) == VAR_DECL
857 && DECL_HARD_REGISTER (var)
858 && !is_global_var (var))
859 cfun->has_local_explicit_reg_vars = true;
861 if (srcidx != dstidx)
862 (*cfun->local_decls)[dstidx] = var;
863 dstidx++;
865 if (dstidx != num)
867 statistics_counter_event (cfun, "unused VAR_DECLs removed", num - dstidx);
868 cfun->local_decls->truncate (dstidx);
871 remove_unused_scope_block_p (DECL_INITIAL (current_function_decl), false);
872 clear_unused_block_pointer ();
874 BITMAP_FREE (usedvars);
876 if (dump_file && (dump_flags & TDF_DETAILS))
878 fprintf (dump_file, "Scope blocks after cleanups:\n");
879 dump_scope_blocks (dump_file, dump_flags);
882 timevar_pop (TV_REMOVE_UNUSED);
885 /* Allocate and return a new live range information object base on MAP. */
887 static tree_live_info_p
888 new_tree_live_info (var_map map)
890 tree_live_info_p live;
891 basic_block bb;
893 live = XNEW (struct tree_live_info_d);
894 live->map = map;
895 live->num_blocks = last_basic_block_for_fn (cfun);
897 bitmap_obstack_initialize (&live->livein_obstack);
898 bitmap_obstack_initialize (&live->liveout_obstack);
899 live->livein = XNEWVEC (bitmap_head, last_basic_block_for_fn (cfun));
900 FOR_EACH_BB_FN (bb, cfun)
901 bitmap_initialize (&live->livein[bb->index], &live->livein_obstack);
903 live->liveout = XNEWVEC (bitmap_head, last_basic_block_for_fn (cfun));
904 FOR_EACH_BB_FN (bb, cfun)
905 bitmap_initialize (&live->liveout[bb->index], &live->liveout_obstack);
907 live->work_stack = XNEWVEC (int, last_basic_block_for_fn (cfun));
908 live->stack_top = live->work_stack;
910 live->global = BITMAP_ALLOC (NULL);
911 return live;
915 /* Free storage for live range info object LIVE. */
917 void
918 delete_tree_live_info (tree_live_info_p live)
920 if (live->livein)
922 bitmap_obstack_release (&live->livein_obstack);
923 free (live->livein);
925 if (live->liveout)
927 bitmap_obstack_release (&live->liveout_obstack);
928 free (live->liveout);
930 BITMAP_FREE (live->global);
931 free (live->work_stack);
932 free (live);
936 /* Visit basic block BB and propagate any required live on entry bits from
937 LIVE into the predecessors. VISITED is the bitmap of visited blocks.
938 TMP is a temporary work bitmap which is passed in to avoid reallocating
939 it each time. */
941 static void
942 loe_visit_block (tree_live_info_p live, basic_block bb, sbitmap visited)
944 edge e;
945 bool change;
946 edge_iterator ei;
947 basic_block pred_bb;
948 bitmap loe;
950 gcc_checking_assert (!bitmap_bit_p (visited, bb->index));
951 bitmap_set_bit (visited, bb->index);
953 loe = live_on_entry (live, bb);
955 FOR_EACH_EDGE (e, ei, bb->preds)
957 pred_bb = e->src;
958 if (pred_bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
959 continue;
960 /* Variables live-on-entry from BB that aren't defined in the
961 predecessor block. This should be the live on entry vars to pred.
962 Note that liveout is the DEFs in a block while live on entry is
963 being calculated.
964 Add these bits to live-on-entry for the pred. if there are any
965 changes, and pred_bb has been visited already, add it to the
966 revisit stack. */
967 change = bitmap_ior_and_compl_into (live_on_entry (live, pred_bb),
968 loe, &live->liveout[pred_bb->index]);
969 if (change
970 && bitmap_bit_p (visited, pred_bb->index))
972 bitmap_clear_bit (visited, pred_bb->index);
973 *(live->stack_top)++ = pred_bb->index;
979 /* Using LIVE, fill in all the live-on-entry blocks between the defs and uses
980 of all the variables. */
982 static void
983 live_worklist (tree_live_info_p live)
985 unsigned b;
986 basic_block bb;
987 sbitmap visited = sbitmap_alloc (last_basic_block_for_fn (cfun) + 1);
989 bitmap_clear (visited);
991 /* Visit all the blocks in reverse order and propagate live on entry values
992 into the predecessors blocks. */
993 FOR_EACH_BB_REVERSE_FN (bb, cfun)
994 loe_visit_block (live, bb, visited);
996 /* Process any blocks which require further iteration. */
997 while (live->stack_top != live->work_stack)
999 b = *--(live->stack_top);
1000 loe_visit_block (live, BASIC_BLOCK_FOR_FN (cfun, b), visited);
1003 sbitmap_free (visited);
1007 /* Calculate the initial live on entry vector for SSA_NAME using immediate_use
1008 links. Set the live on entry fields in LIVE. Def's are marked temporarily
1009 in the liveout vector. */
1011 static void
1012 set_var_live_on_entry (tree ssa_name, tree_live_info_p live)
1014 int p;
1015 gimple *stmt;
1016 use_operand_p use;
1017 basic_block def_bb = NULL;
1018 imm_use_iterator imm_iter;
1019 bool global = false;
1021 p = var_to_partition (live->map, ssa_name);
1022 if (p == NO_PARTITION)
1023 return;
1025 stmt = SSA_NAME_DEF_STMT (ssa_name);
1026 if (stmt)
1028 def_bb = gimple_bb (stmt);
1029 /* Mark defs in liveout bitmap temporarily. */
1030 if (def_bb)
1031 bitmap_set_bit (&live->liveout[def_bb->index], p);
1033 else
1034 def_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1036 /* An undefined local variable does not need to be very alive. */
1037 if (ssa_undefined_value_p (ssa_name, false))
1038 return;
1040 /* Visit each use of SSA_NAME and if it isn't in the same block as the def,
1041 add it to the list of live on entry blocks. */
1042 FOR_EACH_IMM_USE_FAST (use, imm_iter, ssa_name)
1044 gimple *use_stmt = USE_STMT (use);
1045 basic_block add_block = NULL;
1047 if (gimple_code (use_stmt) == GIMPLE_PHI)
1049 /* Uses in PHI's are considered to be live at exit of the SRC block
1050 as this is where a copy would be inserted. Check to see if it is
1051 defined in that block, or whether its live on entry. */
1052 int index = PHI_ARG_INDEX_FROM_USE (use);
1053 edge e = gimple_phi_arg_edge (as_a <gphi *> (use_stmt), index);
1054 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
1056 if (e->src != def_bb)
1057 add_block = e->src;
1060 else if (is_gimple_debug (use_stmt))
1061 continue;
1062 else
1064 /* If its not defined in this block, its live on entry. */
1065 basic_block use_bb = gimple_bb (use_stmt);
1066 if (use_bb != def_bb)
1067 add_block = use_bb;
1070 /* If there was a live on entry use, set the bit. */
1071 if (add_block)
1073 global = true;
1074 bitmap_set_bit (&live->livein[add_block->index], p);
1078 /* If SSA_NAME is live on entry to at least one block, fill in all the live
1079 on entry blocks between the def and all the uses. */
1080 if (global)
1081 bitmap_set_bit (live->global, p);
1085 /* Calculate the live on exit vectors based on the entry info in LIVEINFO. */
1087 static void
1088 calculate_live_on_exit (tree_live_info_p liveinfo)
1090 basic_block bb;
1091 edge e;
1092 edge_iterator ei;
1094 /* live on entry calculations used liveout vectors for defs, clear them. */
1095 FOR_EACH_BB_FN (bb, cfun)
1096 bitmap_clear (&liveinfo->liveout[bb->index]);
1098 /* Set all the live-on-exit bits for uses in PHIs. */
1099 FOR_EACH_BB_FN (bb, cfun)
1101 gphi_iterator gsi;
1102 size_t i;
1104 /* Mark the PHI arguments which are live on exit to the pred block. */
1105 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1107 gphi *phi = gsi.phi ();
1108 for (i = 0; i < gimple_phi_num_args (phi); i++)
1110 tree t = PHI_ARG_DEF (phi, i);
1111 int p;
1113 if (TREE_CODE (t) != SSA_NAME)
1114 continue;
1116 p = var_to_partition (liveinfo->map, t);
1117 if (p == NO_PARTITION)
1118 continue;
1119 e = gimple_phi_arg_edge (phi, i);
1120 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
1121 bitmap_set_bit (&liveinfo->liveout[e->src->index], p);
1125 /* Add each successors live on entry to this bock live on exit. */
1126 FOR_EACH_EDGE (e, ei, bb->succs)
1127 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1128 bitmap_ior_into (&liveinfo->liveout[bb->index],
1129 live_on_entry (liveinfo, e->dest));
1134 /* Given partition map MAP, calculate all the live on entry bitmaps for
1135 each partition. Return a new live info object. */
1137 tree_live_info_p
1138 calculate_live_ranges (var_map map, bool want_livein)
1140 tree var;
1141 unsigned i;
1142 tree_live_info_p live;
1144 live = new_tree_live_info (map);
1145 for (i = 0; i < num_var_partitions (map); i++)
1147 var = partition_to_var (map, i);
1148 if (var != NULL_TREE)
1149 set_var_live_on_entry (var, live);
1152 live_worklist (live);
1154 #ifdef ENABLE_CHECKING
1155 verify_live_on_entry (live);
1156 #endif
1158 calculate_live_on_exit (live);
1160 if (!want_livein)
1162 bitmap_obstack_release (&live->livein_obstack);
1163 free (live->livein);
1164 live->livein = NULL;
1167 return live;
1171 /* Output partition map MAP to file F. */
1173 void
1174 dump_var_map (FILE *f, var_map map)
1176 int t;
1177 unsigned x, y;
1178 int p;
1180 fprintf (f, "\nPartition map \n\n");
1182 for (x = 0; x < map->num_partitions; x++)
1184 if (map->view_to_partition != NULL)
1185 p = map->view_to_partition[x];
1186 else
1187 p = x;
1189 if (ssa_name (p) == NULL_TREE
1190 || virtual_operand_p (ssa_name (p)))
1191 continue;
1193 t = 0;
1194 for (y = 1; y < num_ssa_names; y++)
1196 p = partition_find (map->var_partition, y);
1197 if (map->partition_to_view)
1198 p = map->partition_to_view[p];
1199 if (p == (int)x)
1201 if (t++ == 0)
1203 fprintf (f, "Partition %d (", x);
1204 print_generic_expr (f, partition_to_var (map, p), TDF_SLIM);
1205 fprintf (f, " - ");
1207 fprintf (f, "%d ", y);
1210 if (t != 0)
1211 fprintf (f, ")\n");
1213 fprintf (f, "\n");
1217 /* Generic dump for the above. */
1219 DEBUG_FUNCTION void
1220 debug (_var_map &ref)
1222 dump_var_map (stderr, &ref);
1225 DEBUG_FUNCTION void
1226 debug (_var_map *ptr)
1228 if (ptr)
1229 debug (*ptr);
1230 else
1231 fprintf (stderr, "<nil>\n");
1235 /* Output live range info LIVE to file F, controlled by FLAG. */
1237 void
1238 dump_live_info (FILE *f, tree_live_info_p live, int flag)
1240 basic_block bb;
1241 unsigned i;
1242 var_map map = live->map;
1243 bitmap_iterator bi;
1245 if ((flag & LIVEDUMP_ENTRY) && live->livein)
1247 FOR_EACH_BB_FN (bb, cfun)
1249 fprintf (f, "\nLive on entry to BB%d : ", bb->index);
1250 EXECUTE_IF_SET_IN_BITMAP (&live->livein[bb->index], 0, i, bi)
1252 print_generic_expr (f, partition_to_var (map, i), TDF_SLIM);
1253 fprintf (f, " ");
1255 fprintf (f, "\n");
1259 if ((flag & LIVEDUMP_EXIT) && live->liveout)
1261 FOR_EACH_BB_FN (bb, cfun)
1263 fprintf (f, "\nLive on exit from BB%d : ", bb->index);
1264 EXECUTE_IF_SET_IN_BITMAP (&live->liveout[bb->index], 0, i, bi)
1266 print_generic_expr (f, partition_to_var (map, i), TDF_SLIM);
1267 fprintf (f, " ");
1269 fprintf (f, "\n");
1275 /* Generic dump for the above. */
1277 DEBUG_FUNCTION void
1278 debug (tree_live_info_d &ref)
1280 dump_live_info (stderr, &ref, 0);
1283 DEBUG_FUNCTION void
1284 debug (tree_live_info_d *ptr)
1286 if (ptr)
1287 debug (*ptr);
1288 else
1289 fprintf (stderr, "<nil>\n");
1293 #ifdef ENABLE_CHECKING
1294 /* Verify that SSA_VAR is a non-virtual SSA_NAME. */
1296 void
1297 register_ssa_partition_check (tree ssa_var)
1299 gcc_assert (TREE_CODE (ssa_var) == SSA_NAME);
1300 if (virtual_operand_p (ssa_var))
1302 fprintf (stderr, "Illegally registering a virtual SSA name :");
1303 print_generic_expr (stderr, ssa_var, TDF_SLIM);
1304 fprintf (stderr, " in the SSA->Normal phase.\n");
1305 internal_error ("SSA corruption");
1310 /* Verify that the info in LIVE matches the current cfg. */
1312 static void
1313 verify_live_on_entry (tree_live_info_p live)
1315 unsigned i;
1316 tree var;
1317 gimple *stmt;
1318 basic_block bb;
1319 edge e;
1320 int num;
1321 edge_iterator ei;
1322 var_map map = live->map;
1324 /* Check for live on entry partitions and report those with a DEF in
1325 the program. This will typically mean an optimization has done
1326 something wrong. */
1327 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1328 num = 0;
1329 FOR_EACH_EDGE (e, ei, bb->succs)
1331 int entry_block = e->dest->index;
1332 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1333 continue;
1334 for (i = 0; i < (unsigned)num_var_partitions (map); i++)
1336 basic_block tmp;
1337 tree d = NULL_TREE;
1338 bitmap loe;
1339 var = partition_to_var (map, i);
1340 stmt = SSA_NAME_DEF_STMT (var);
1341 tmp = gimple_bb (stmt);
1342 if (SSA_NAME_VAR (var))
1343 d = ssa_default_def (cfun, SSA_NAME_VAR (var));
1345 loe = live_on_entry (live, e->dest);
1346 if (loe && bitmap_bit_p (loe, i))
1348 if (!gimple_nop_p (stmt))
1350 num++;
1351 print_generic_expr (stderr, var, TDF_SLIM);
1352 fprintf (stderr, " is defined ");
1353 if (tmp)
1354 fprintf (stderr, " in BB%d, ", tmp->index);
1355 fprintf (stderr, "by:\n");
1356 print_gimple_stmt (stderr, stmt, 0, TDF_SLIM);
1357 fprintf (stderr, "\nIt is also live-on-entry to entry BB %d",
1358 entry_block);
1359 fprintf (stderr, " So it appears to have multiple defs.\n");
1361 else
1363 if (d != var)
1365 num++;
1366 print_generic_expr (stderr, var, TDF_SLIM);
1367 fprintf (stderr, " is live-on-entry to BB%d ",
1368 entry_block);
1369 if (d)
1371 fprintf (stderr, " but is not the default def of ");
1372 print_generic_expr (stderr, d, TDF_SLIM);
1373 fprintf (stderr, "\n");
1375 else
1376 fprintf (stderr, " and there is no default def.\n");
1380 else
1381 if (d == var)
1383 /* An undefined local variable does not need to be very
1384 alive. */
1385 if (ssa_undefined_value_p (var, false))
1386 continue;
1388 /* The only way this var shouldn't be marked live on entry is
1389 if it occurs in a PHI argument of the block. */
1390 size_t z;
1391 bool ok = false;
1392 gphi_iterator gsi;
1393 for (gsi = gsi_start_phis (e->dest);
1394 !gsi_end_p (gsi) && !ok;
1395 gsi_next (&gsi))
1397 gphi *phi = gsi.phi ();
1398 for (z = 0; z < gimple_phi_num_args (phi); z++)
1399 if (var == gimple_phi_arg_def (phi, z))
1401 ok = true;
1402 break;
1405 if (ok)
1406 continue;
1407 num++;
1408 print_generic_expr (stderr, var, TDF_SLIM);
1409 fprintf (stderr, " is not marked live-on-entry to entry BB%d ",
1410 entry_block);
1411 fprintf (stderr, "but it is a default def so it should be.\n");
1415 gcc_assert (num <= 0);
1417 #endif