change a function argument from rtx to rtx_insn *
[official-gcc.git] / gcc / tree-ssa-live.c
blob25b548b89c7ccdfcbd1a40cf73b8bee6d07c5292
1 /* Liveness for SSA trees.
2 Copyright (C) 2003-2015 Free Software Foundation, Inc.
3 Contributed by Andrew MacLeod <amacleod@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "rtl.h"
28 #include "ssa.h"
29 #include "alias.h"
30 #include "fold-const.h"
31 #include "gimple-pretty-print.h"
32 #include "internal-fn.h"
33 #include "gimple-iterator.h"
34 #include "flags.h"
35 #include "insn-config.h"
36 #include "expmed.h"
37 #include "dojump.h"
38 #include "explow.h"
39 #include "calls.h"
40 #include "emit-rtl.h"
41 #include "varasm.h"
42 #include "stmt.h"
43 #include "expr.h"
44 #include "tree-dfa.h"
45 #include "timevar.h"
46 #include "dumpfile.h"
47 #include "tree-ssa-live.h"
48 #include "diagnostic-core.h"
49 #include "debug.h"
50 #include "tree-ssa.h"
51 #include "cgraph.h"
52 #include "ipa-utils.h"
53 #include "cfgloop.h"
55 #ifdef ENABLE_CHECKING
56 static void verify_live_on_entry (tree_live_info_p);
57 #endif
60 /* VARMAP maintains a mapping from SSA version number to real variables.
62 All SSA_NAMES are divided into partitions. Initially each ssa_name is the
63 only member of it's own partition. Coalescing will attempt to group any
64 ssa_names which occur in a copy or in a PHI node into the same partition.
66 At the end of out-of-ssa, each partition becomes a "real" variable and is
67 rewritten as a compiler variable.
69 The var_map data structure is used to manage these partitions. It allows
70 partitions to be combined, and determines which partition belongs to what
71 ssa_name or variable, and vice versa. */
74 /* Remove the base table in MAP. */
76 static void
77 var_map_base_fini (var_map map)
79 /* Free the basevar info if it is present. */
80 if (map->partition_to_base_index != NULL)
82 free (map->partition_to_base_index);
83 map->partition_to_base_index = NULL;
84 map->num_basevars = 0;
87 /* Create a variable partition map of SIZE, initialize and return it. */
89 var_map
90 init_var_map (int size)
92 var_map map;
94 map = (var_map) xmalloc (sizeof (struct _var_map));
95 map->var_partition = partition_new (size);
97 map->partition_to_view = NULL;
98 map->view_to_partition = NULL;
99 map->num_partitions = size;
100 map->partition_size = size;
101 map->num_basevars = 0;
102 map->partition_to_base_index = NULL;
103 return map;
107 /* Free memory associated with MAP. */
109 void
110 delete_var_map (var_map map)
112 var_map_base_fini (map);
113 partition_delete (map->var_partition);
114 free (map->partition_to_view);
115 free (map->view_to_partition);
116 free (map);
120 /* This function will combine the partitions in MAP for VAR1 and VAR2. It
121 Returns the partition which represents the new partition. If the two
122 partitions cannot be combined, NO_PARTITION is returned. */
125 var_union (var_map map, tree var1, tree var2)
127 int p1, p2, p3;
129 gcc_assert (TREE_CODE (var1) == SSA_NAME);
130 gcc_assert (TREE_CODE (var2) == SSA_NAME);
132 /* This is independent of partition_to_view. If partition_to_view is
133 on, then whichever one of these partitions is absorbed will never have a
134 dereference into the partition_to_view array any more. */
136 p1 = partition_find (map->var_partition, SSA_NAME_VERSION (var1));
137 p2 = partition_find (map->var_partition, SSA_NAME_VERSION (var2));
139 gcc_assert (p1 != NO_PARTITION);
140 gcc_assert (p2 != NO_PARTITION);
142 if (p1 == p2)
143 p3 = p1;
144 else
145 p3 = partition_union (map->var_partition, p1, p2);
147 if (map->partition_to_view)
148 p3 = map->partition_to_view[p3];
150 return p3;
154 /* Compress the partition numbers in MAP such that they fall in the range
155 0..(num_partitions-1) instead of wherever they turned out during
156 the partitioning exercise. This removes any references to unused
157 partitions, thereby allowing bitmaps and other vectors to be much
158 denser.
160 This is implemented such that compaction doesn't affect partitioning.
161 Ie., once partitions are created and possibly merged, running one
162 or more different kind of compaction will not affect the partitions
163 themselves. Their index might change, but all the same variables will
164 still be members of the same partition group. This allows work on reduced
165 sets, and no loss of information when a larger set is later desired.
167 In particular, coalescing can work on partitions which have 2 or more
168 definitions, and then 'recompact' later to include all the single
169 definitions for assignment to program variables. */
172 /* Set MAP back to the initial state of having no partition view. Return a
173 bitmap which has a bit set for each partition number which is in use in the
174 varmap. */
176 static bitmap
177 partition_view_init (var_map map)
179 bitmap used;
180 int tmp;
181 unsigned int x;
183 used = BITMAP_ALLOC (NULL);
185 /* Already in a view? Abandon the old one. */
186 if (map->partition_to_view)
188 free (map->partition_to_view);
189 map->partition_to_view = NULL;
191 if (map->view_to_partition)
193 free (map->view_to_partition);
194 map->view_to_partition = NULL;
197 /* Find out which partitions are actually referenced. */
198 for (x = 0; x < map->partition_size; x++)
200 tmp = partition_find (map->var_partition, x);
201 if (ssa_name (tmp) != NULL_TREE && !virtual_operand_p (ssa_name (tmp))
202 && (!has_zero_uses (ssa_name (tmp))
203 || !SSA_NAME_IS_DEFAULT_DEF (ssa_name (tmp))
204 || (SSA_NAME_VAR (ssa_name (tmp))
205 && !VAR_P (SSA_NAME_VAR (ssa_name (tmp))))))
206 bitmap_set_bit (used, tmp);
209 map->num_partitions = map->partition_size;
210 return used;
214 /* This routine will finalize the view data for MAP based on the partitions
215 set in SELECTED. This is either the same bitmap returned from
216 partition_view_init, or a trimmed down version if some of those partitions
217 were not desired in this view. SELECTED is freed before returning. */
219 static void
220 partition_view_fini (var_map map, bitmap selected)
222 bitmap_iterator bi;
223 unsigned count, i, x, limit;
225 gcc_assert (selected);
227 count = bitmap_count_bits (selected);
228 limit = map->partition_size;
230 /* If its a one-to-one ratio, we don't need any view compaction. */
231 if (count < limit)
233 map->partition_to_view = (int *)xmalloc (limit * sizeof (int));
234 memset (map->partition_to_view, 0xff, (limit * sizeof (int)));
235 map->view_to_partition = (int *)xmalloc (count * sizeof (int));
237 i = 0;
238 /* Give each selected partition an index. */
239 EXECUTE_IF_SET_IN_BITMAP (selected, 0, x, bi)
241 map->partition_to_view[x] = i;
242 map->view_to_partition[i] = x;
243 i++;
245 gcc_assert (i == count);
246 map->num_partitions = i;
249 BITMAP_FREE (selected);
253 /* Create a partition view which includes all the used partitions in MAP. */
255 void
256 partition_view_normal (var_map map)
258 bitmap used;
260 used = partition_view_init (map);
261 partition_view_fini (map, used);
263 var_map_base_fini (map);
267 /* Create a partition view in MAP which includes just partitions which occur in
268 the bitmap ONLY. If WANT_BASES is true, create the base variable map
269 as well. */
271 void
272 partition_view_bitmap (var_map map, bitmap only)
274 bitmap used;
275 bitmap new_partitions = BITMAP_ALLOC (NULL);
276 unsigned x, p;
277 bitmap_iterator bi;
279 used = partition_view_init (map);
280 EXECUTE_IF_SET_IN_BITMAP (only, 0, x, bi)
282 p = partition_find (map->var_partition, x);
283 gcc_assert (bitmap_bit_p (used, p));
284 bitmap_set_bit (new_partitions, p);
286 partition_view_fini (map, new_partitions);
288 var_map_base_fini (map);
292 static bitmap usedvars;
294 /* Mark VAR as used, so that it'll be preserved during rtl expansion.
295 Returns true if VAR wasn't marked before. */
297 static inline bool
298 set_is_used (tree var)
300 return bitmap_set_bit (usedvars, DECL_UID (var));
303 /* Return true if VAR is marked as used. */
305 static inline bool
306 is_used_p (tree var)
308 return bitmap_bit_p (usedvars, DECL_UID (var));
311 static inline void mark_all_vars_used (tree *);
313 /* Helper function for mark_all_vars_used, called via walk_tree. */
315 static tree
316 mark_all_vars_used_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
318 tree t = *tp;
319 enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
320 tree b;
322 if (TREE_CODE (t) == SSA_NAME)
324 *walk_subtrees = 0;
325 t = SSA_NAME_VAR (t);
326 if (!t)
327 return NULL;
330 if (IS_EXPR_CODE_CLASS (c)
331 && (b = TREE_BLOCK (t)) != NULL)
332 TREE_USED (b) = true;
334 /* Ignore TMR_OFFSET and TMR_STEP for TARGET_MEM_REFS, as those
335 fields do not contain vars. */
336 if (TREE_CODE (t) == TARGET_MEM_REF)
338 mark_all_vars_used (&TMR_BASE (t));
339 mark_all_vars_used (&TMR_INDEX (t));
340 mark_all_vars_used (&TMR_INDEX2 (t));
341 *walk_subtrees = 0;
342 return NULL;
345 /* Only need to mark VAR_DECLS; parameters and return results are not
346 eliminated as unused. */
347 if (TREE_CODE (t) == VAR_DECL)
349 /* When a global var becomes used for the first time also walk its
350 initializer (non global ones don't have any). */
351 if (set_is_used (t) && is_global_var (t)
352 && DECL_CONTEXT (t) == current_function_decl)
353 mark_all_vars_used (&DECL_INITIAL (t));
355 /* remove_unused_scope_block_p requires information about labels
356 which are not DECL_IGNORED_P to tell if they might be used in the IL. */
357 else if (TREE_CODE (t) == LABEL_DECL)
358 /* Although the TREE_USED values that the frontend uses would be
359 acceptable (albeit slightly over-conservative) for our purposes,
360 init_vars_expansion clears TREE_USED for LABEL_DECLs too, so we
361 must re-compute it here. */
362 TREE_USED (t) = 1;
364 if (IS_TYPE_OR_DECL_P (t))
365 *walk_subtrees = 0;
367 return NULL;
370 /* Mark the scope block SCOPE and its subblocks unused when they can be
371 possibly eliminated if dead. */
373 static void
374 mark_scope_block_unused (tree scope)
376 tree t;
377 TREE_USED (scope) = false;
378 if (!(*debug_hooks->ignore_block) (scope))
379 TREE_USED (scope) = true;
380 for (t = BLOCK_SUBBLOCKS (scope); t ; t = BLOCK_CHAIN (t))
381 mark_scope_block_unused (t);
384 /* Look if the block is dead (by possibly eliminating its dead subblocks)
385 and return true if so.
386 Block is declared dead if:
387 1) No statements are associated with it.
388 2) Declares no live variables
389 3) All subblocks are dead
390 or there is precisely one subblocks and the block
391 has same abstract origin as outer block and declares
392 no variables, so it is pure wrapper.
393 When we are not outputting full debug info, we also eliminate dead variables
394 out of scope blocks to let them to be recycled by GGC and to save copying work
395 done by the inliner. */
397 static bool
398 remove_unused_scope_block_p (tree scope, bool in_ctor_dtor_block)
400 tree *t, *next;
401 bool unused = !TREE_USED (scope);
402 int nsubblocks = 0;
404 /* For ipa-polymorphic-call.c purposes, preserve blocks:
405 1) with BLOCK_ABSTRACT_ORIGIN of a ctor/dtor or their clones */
406 if (inlined_polymorphic_ctor_dtor_block_p (scope, true))
408 in_ctor_dtor_block = true;
409 unused = false;
411 /* 2) inside such blocks, the outermost block with BLOCK_ABSTRACT_ORIGIN
412 being a FUNCTION_DECL. */
413 else if (in_ctor_dtor_block
414 && BLOCK_ABSTRACT_ORIGIN (scope)
415 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (scope)) == FUNCTION_DECL)
417 in_ctor_dtor_block = false;
418 unused = false;
421 for (t = &BLOCK_VARS (scope); *t; t = next)
423 next = &DECL_CHAIN (*t);
425 /* Debug info of nested function refers to the block of the
426 function. We might stil call it even if all statements
427 of function it was nested into was elliminated.
429 TODO: We can actually look into cgraph to see if function
430 will be output to file. */
431 if (TREE_CODE (*t) == FUNCTION_DECL)
432 unused = false;
434 /* If a decl has a value expr, we need to instantiate it
435 regardless of debug info generation, to avoid codegen
436 differences in memory overlap tests. update_equiv_regs() may
437 indirectly call validate_equiv_mem() to test whether a
438 SET_DEST overlaps with others, and if the value expr changes
439 by virtual register instantiation, we may get end up with
440 different results. */
441 else if (TREE_CODE (*t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*t))
442 unused = false;
444 /* Remove everything we don't generate debug info for. */
445 else if (DECL_IGNORED_P (*t))
447 *t = DECL_CHAIN (*t);
448 next = t;
451 /* When we are outputting debug info, we usually want to output
452 info about optimized-out variables in the scope blocks.
453 Exception are the scope blocks not containing any instructions
454 at all so user can't get into the scopes at first place. */
455 else if (is_used_p (*t))
456 unused = false;
457 else if (TREE_CODE (*t) == LABEL_DECL && TREE_USED (*t))
458 /* For labels that are still used in the IL, the decision to
459 preserve them must not depend DEBUG_INFO_LEVEL, otherwise we
460 risk having different ordering in debug vs. non-debug builds
461 during inlining or versioning.
462 A label appearing here (we have already checked DECL_IGNORED_P)
463 should not be used in the IL unless it has been explicitly used
464 before, so we use TREE_USED as an approximation. */
465 /* In principle, we should do the same here as for the debug case
466 below, however, when debugging, there might be additional nested
467 levels that keep an upper level with a label live, so we have to
468 force this block to be considered used, too. */
469 unused = false;
471 /* When we are not doing full debug info, we however can keep around
472 only the used variables for cfgexpand's memory packing saving quite
473 a lot of memory.
475 For sake of -g3, we keep around those vars but we don't count this as
476 use of block, so innermost block with no used vars and no instructions
477 can be considered dead. We only want to keep around blocks user can
478 breakpoint into and ask about value of optimized out variables.
480 Similarly we need to keep around types at least until all
481 variables of all nested blocks are gone. We track no
482 information on whether given type is used or not, so we have
483 to keep them even when not emitting debug information,
484 otherwise we may end up remapping variables and their (local)
485 types in different orders depending on whether debug
486 information is being generated. */
488 else if (TREE_CODE (*t) == TYPE_DECL
489 || debug_info_level == DINFO_LEVEL_NORMAL
490 || debug_info_level == DINFO_LEVEL_VERBOSE)
492 else
494 *t = DECL_CHAIN (*t);
495 next = t;
499 for (t = &BLOCK_SUBBLOCKS (scope); *t ;)
500 if (remove_unused_scope_block_p (*t, in_ctor_dtor_block))
502 if (BLOCK_SUBBLOCKS (*t))
504 tree next = BLOCK_CHAIN (*t);
505 tree supercontext = BLOCK_SUPERCONTEXT (*t);
507 *t = BLOCK_SUBBLOCKS (*t);
508 while (BLOCK_CHAIN (*t))
510 BLOCK_SUPERCONTEXT (*t) = supercontext;
511 t = &BLOCK_CHAIN (*t);
513 BLOCK_CHAIN (*t) = next;
514 BLOCK_SUPERCONTEXT (*t) = supercontext;
515 t = &BLOCK_CHAIN (*t);
516 nsubblocks ++;
518 else
519 *t = BLOCK_CHAIN (*t);
521 else
523 t = &BLOCK_CHAIN (*t);
524 nsubblocks ++;
528 if (!unused)
530 /* Outer scope is always used. */
531 else if (!BLOCK_SUPERCONTEXT (scope)
532 || TREE_CODE (BLOCK_SUPERCONTEXT (scope)) == FUNCTION_DECL)
533 unused = false;
534 /* Innermost blocks with no live variables nor statements can be always
535 eliminated. */
536 else if (!nsubblocks)
538 /* When not generating debug info we can eliminate info on unused
539 variables. */
540 else if (!flag_auto_profile && debug_info_level == DINFO_LEVEL_NONE)
542 /* Even for -g0 don't prune outer scopes from artificial
543 functions, otherwise diagnostics using tree_nonartificial_location
544 will not be emitted properly. */
545 if (inlined_function_outer_scope_p (scope))
547 tree ao = scope;
549 while (ao
550 && TREE_CODE (ao) == BLOCK
551 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
552 ao = BLOCK_ABSTRACT_ORIGIN (ao);
553 if (ao
554 && TREE_CODE (ao) == FUNCTION_DECL
555 && DECL_DECLARED_INLINE_P (ao)
556 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
557 unused = false;
560 else if (BLOCK_VARS (scope) || BLOCK_NUM_NONLOCALIZED_VARS (scope))
561 unused = false;
562 /* See if this block is important for representation of inlined function.
563 Inlined functions are always represented by block with
564 block_ultimate_origin being set to FUNCTION_DECL and DECL_SOURCE_LOCATION
565 set... */
566 else if (inlined_function_outer_scope_p (scope))
567 unused = false;
568 else
569 /* Verfify that only blocks with source location set
570 are entry points to the inlined functions. */
571 gcc_assert (LOCATION_LOCUS (BLOCK_SOURCE_LOCATION (scope))
572 == UNKNOWN_LOCATION);
574 TREE_USED (scope) = !unused;
575 return unused;
578 /* Mark all VAR_DECLS under *EXPR_P as used, so that they won't be
579 eliminated during the tree->rtl conversion process. */
581 static inline void
582 mark_all_vars_used (tree *expr_p)
584 walk_tree (expr_p, mark_all_vars_used_1, NULL, NULL);
587 /* Helper function for clear_unused_block_pointer, called via walk_tree. */
589 static tree
590 clear_unused_block_pointer_1 (tree *tp, int *, void *)
592 if (EXPR_P (*tp) && TREE_BLOCK (*tp)
593 && !TREE_USED (TREE_BLOCK (*tp)))
594 TREE_SET_BLOCK (*tp, NULL);
595 return NULL_TREE;
598 /* Set all block pointer in debug or clobber stmt to NULL if the block
599 is unused, so that they will not be streamed out. */
601 static void
602 clear_unused_block_pointer (void)
604 basic_block bb;
605 gimple_stmt_iterator gsi;
607 FOR_EACH_BB_FN (bb, cfun)
608 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
610 unsigned i;
611 tree b;
612 gimple *stmt = gsi_stmt (gsi);
614 if (!is_gimple_debug (stmt) && !gimple_clobber_p (stmt))
615 continue;
616 b = gimple_block (stmt);
617 if (b && !TREE_USED (b))
618 gimple_set_block (stmt, NULL);
619 for (i = 0; i < gimple_num_ops (stmt); i++)
620 walk_tree (gimple_op_ptr (stmt, i), clear_unused_block_pointer_1,
621 NULL, NULL);
625 /* Dump scope blocks starting at SCOPE to FILE. INDENT is the
626 indentation level and FLAGS is as in print_generic_expr. */
628 static void
629 dump_scope_block (FILE *file, int indent, tree scope, int flags)
631 tree var, t;
632 unsigned int i;
634 fprintf (file, "\n%*s{ Scope block #%i%s%s",indent, "" , BLOCK_NUMBER (scope),
635 TREE_USED (scope) ? "" : " (unused)",
636 BLOCK_ABSTRACT (scope) ? " (abstract)": "");
637 if (LOCATION_LOCUS (BLOCK_SOURCE_LOCATION (scope)) != UNKNOWN_LOCATION)
639 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (scope));
640 fprintf (file, " %s:%i", s.file, s.line);
642 if (BLOCK_ABSTRACT_ORIGIN (scope))
644 tree origin = block_ultimate_origin (scope);
645 if (origin)
647 fprintf (file, " Originating from :");
648 if (DECL_P (origin))
649 print_generic_decl (file, origin, flags);
650 else
651 fprintf (file, "#%i", BLOCK_NUMBER (origin));
654 fprintf (file, " \n");
655 for (var = BLOCK_VARS (scope); var; var = DECL_CHAIN (var))
657 fprintf (file, "%*s", indent, "");
658 print_generic_decl (file, var, flags);
659 fprintf (file, "\n");
661 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (scope); i++)
663 fprintf (file, "%*s",indent, "");
664 print_generic_decl (file, BLOCK_NONLOCALIZED_VAR (scope, i),
665 flags);
666 fprintf (file, " (nonlocalized)\n");
668 for (t = BLOCK_SUBBLOCKS (scope); t ; t = BLOCK_CHAIN (t))
669 dump_scope_block (file, indent + 2, t, flags);
670 fprintf (file, "\n%*s}\n",indent, "");
673 /* Dump the tree of lexical scopes starting at SCOPE to stderr. FLAGS
674 is as in print_generic_expr. */
676 DEBUG_FUNCTION void
677 debug_scope_block (tree scope, int flags)
679 dump_scope_block (stderr, 0, scope, flags);
683 /* Dump the tree of lexical scopes of current_function_decl to FILE.
684 FLAGS is as in print_generic_expr. */
686 void
687 dump_scope_blocks (FILE *file, int flags)
689 dump_scope_block (file, 0, DECL_INITIAL (current_function_decl), flags);
693 /* Dump the tree of lexical scopes of current_function_decl to stderr.
694 FLAGS is as in print_generic_expr. */
696 DEBUG_FUNCTION void
697 debug_scope_blocks (int flags)
699 dump_scope_blocks (stderr, flags);
702 /* Remove local variables that are not referenced in the IL. */
704 void
705 remove_unused_locals (void)
707 basic_block bb;
708 tree var;
709 unsigned srcidx, dstidx, num;
710 bool have_local_clobbers = false;
712 /* Removing declarations from lexical blocks when not optimizing is
713 not only a waste of time, it actually causes differences in stack
714 layout. */
715 if (!optimize)
716 return;
718 timevar_push (TV_REMOVE_UNUSED);
720 mark_scope_block_unused (DECL_INITIAL (current_function_decl));
722 usedvars = BITMAP_ALLOC (NULL);
724 /* Walk the CFG marking all referenced symbols. */
725 FOR_EACH_BB_FN (bb, cfun)
727 gimple_stmt_iterator gsi;
728 size_t i;
729 edge_iterator ei;
730 edge e;
732 /* Walk the statements. */
733 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
735 gimple *stmt = gsi_stmt (gsi);
736 tree b = gimple_block (stmt);
738 if (is_gimple_debug (stmt))
739 continue;
741 if (gimple_clobber_p (stmt))
743 have_local_clobbers = true;
744 continue;
747 if (b)
748 TREE_USED (b) = true;
750 for (i = 0; i < gimple_num_ops (stmt); i++)
751 mark_all_vars_used (gimple_op_ptr (gsi_stmt (gsi), i));
754 for (gphi_iterator gpi = gsi_start_phis (bb);
755 !gsi_end_p (gpi);
756 gsi_next (&gpi))
758 use_operand_p arg_p;
759 ssa_op_iter i;
760 tree def;
761 gphi *phi = gpi.phi ();
763 if (virtual_operand_p (gimple_phi_result (phi)))
764 continue;
766 def = gimple_phi_result (phi);
767 mark_all_vars_used (&def);
769 FOR_EACH_PHI_ARG (arg_p, phi, i, SSA_OP_ALL_USES)
771 tree arg = USE_FROM_PTR (arg_p);
772 int index = PHI_ARG_INDEX_FROM_USE (arg_p);
773 tree block =
774 LOCATION_BLOCK (gimple_phi_arg_location (phi, index));
775 if (block != NULL)
776 TREE_USED (block) = true;
777 mark_all_vars_used (&arg);
781 FOR_EACH_EDGE (e, ei, bb->succs)
782 if (LOCATION_BLOCK (e->goto_locus) != NULL)
783 TREE_USED (LOCATION_BLOCK (e->goto_locus)) = true;
786 /* We do a two-pass approach about the out-of-scope clobbers. We want
787 to remove them if they are the only references to a local variable,
788 but we want to retain them when there's any other. So the first pass
789 ignores them, and the second pass (if there were any) tries to remove
790 them. */
791 if (have_local_clobbers)
792 FOR_EACH_BB_FN (bb, cfun)
794 gimple_stmt_iterator gsi;
796 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
798 gimple *stmt = gsi_stmt (gsi);
799 tree b = gimple_block (stmt);
801 if (gimple_clobber_p (stmt))
803 tree lhs = gimple_assign_lhs (stmt);
804 tree base = get_base_address (lhs);
805 /* Remove clobbers referencing unused vars, or clobbers
806 with MEM_REF lhs referencing uninitialized pointers. */
807 if ((TREE_CODE (base) == VAR_DECL && !is_used_p (base))
808 || (TREE_CODE (lhs) == MEM_REF
809 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME
810 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (lhs, 0))
811 && (TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (lhs, 0)))
812 != PARM_DECL)))
814 unlink_stmt_vdef (stmt);
815 gsi_remove (&gsi, true);
816 release_defs (stmt);
817 continue;
819 if (b)
820 TREE_USED (b) = true;
822 gsi_next (&gsi);
826 if (cfun->has_simduid_loops)
828 struct loop *loop;
829 FOR_EACH_LOOP (loop, 0)
830 if (loop->simduid && !is_used_p (loop->simduid))
831 loop->simduid = NULL_TREE;
834 cfun->has_local_explicit_reg_vars = false;
836 /* Remove unmarked local and global vars from local_decls. */
837 num = vec_safe_length (cfun->local_decls);
838 for (srcidx = 0, dstidx = 0; srcidx < num; srcidx++)
840 var = (*cfun->local_decls)[srcidx];
841 if (TREE_CODE (var) == VAR_DECL)
843 if (!is_used_p (var))
845 tree def;
846 if (cfun->nonlocal_goto_save_area
847 && TREE_OPERAND (cfun->nonlocal_goto_save_area, 0) == var)
848 cfun->nonlocal_goto_save_area = NULL;
849 /* Release any default def associated with var. */
850 if ((def = ssa_default_def (cfun, var)) != NULL_TREE)
852 set_ssa_default_def (cfun, var, NULL_TREE);
853 release_ssa_name (def);
855 continue;
858 if (TREE_CODE (var) == VAR_DECL
859 && DECL_HARD_REGISTER (var)
860 && !is_global_var (var))
861 cfun->has_local_explicit_reg_vars = true;
863 if (srcidx != dstidx)
864 (*cfun->local_decls)[dstidx] = var;
865 dstidx++;
867 if (dstidx != num)
869 statistics_counter_event (cfun, "unused VAR_DECLs removed", num - dstidx);
870 cfun->local_decls->truncate (dstidx);
873 remove_unused_scope_block_p (DECL_INITIAL (current_function_decl), false);
874 clear_unused_block_pointer ();
876 BITMAP_FREE (usedvars);
878 if (dump_file && (dump_flags & TDF_DETAILS))
880 fprintf (dump_file, "Scope blocks after cleanups:\n");
881 dump_scope_blocks (dump_file, dump_flags);
884 timevar_pop (TV_REMOVE_UNUSED);
887 /* Allocate and return a new live range information object base on MAP. */
889 static tree_live_info_p
890 new_tree_live_info (var_map map)
892 tree_live_info_p live;
893 basic_block bb;
895 live = XNEW (struct tree_live_info_d);
896 live->map = map;
897 live->num_blocks = last_basic_block_for_fn (cfun);
899 bitmap_obstack_initialize (&live->livein_obstack);
900 bitmap_obstack_initialize (&live->liveout_obstack);
901 live->livein = XNEWVEC (bitmap_head, last_basic_block_for_fn (cfun));
902 FOR_EACH_BB_FN (bb, cfun)
903 bitmap_initialize (&live->livein[bb->index], &live->livein_obstack);
905 live->liveout = XNEWVEC (bitmap_head, last_basic_block_for_fn (cfun));
906 FOR_EACH_BB_FN (bb, cfun)
907 bitmap_initialize (&live->liveout[bb->index], &live->liveout_obstack);
909 live->work_stack = XNEWVEC (int, last_basic_block_for_fn (cfun));
910 live->stack_top = live->work_stack;
912 live->global = BITMAP_ALLOC (NULL);
913 return live;
917 /* Free storage for live range info object LIVE. */
919 void
920 delete_tree_live_info (tree_live_info_p live)
922 if (live->livein)
924 bitmap_obstack_release (&live->livein_obstack);
925 free (live->livein);
927 if (live->liveout)
929 bitmap_obstack_release (&live->liveout_obstack);
930 free (live->liveout);
932 BITMAP_FREE (live->global);
933 free (live->work_stack);
934 free (live);
938 /* Visit basic block BB and propagate any required live on entry bits from
939 LIVE into the predecessors. VISITED is the bitmap of visited blocks.
940 TMP is a temporary work bitmap which is passed in to avoid reallocating
941 it each time. */
943 static void
944 loe_visit_block (tree_live_info_p live, basic_block bb, sbitmap visited)
946 edge e;
947 bool change;
948 edge_iterator ei;
949 basic_block pred_bb;
950 bitmap loe;
952 gcc_checking_assert (!bitmap_bit_p (visited, bb->index));
953 bitmap_set_bit (visited, bb->index);
955 loe = live_on_entry (live, bb);
957 FOR_EACH_EDGE (e, ei, bb->preds)
959 pred_bb = e->src;
960 if (pred_bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
961 continue;
962 /* Variables live-on-entry from BB that aren't defined in the
963 predecessor block. This should be the live on entry vars to pred.
964 Note that liveout is the DEFs in a block while live on entry is
965 being calculated.
966 Add these bits to live-on-entry for the pred. if there are any
967 changes, and pred_bb has been visited already, add it to the
968 revisit stack. */
969 change = bitmap_ior_and_compl_into (live_on_entry (live, pred_bb),
970 loe, &live->liveout[pred_bb->index]);
971 if (change
972 && bitmap_bit_p (visited, pred_bb->index))
974 bitmap_clear_bit (visited, pred_bb->index);
975 *(live->stack_top)++ = pred_bb->index;
981 /* Using LIVE, fill in all the live-on-entry blocks between the defs and uses
982 of all the variables. */
984 static void
985 live_worklist (tree_live_info_p live)
987 unsigned b;
988 basic_block bb;
989 sbitmap visited = sbitmap_alloc (last_basic_block_for_fn (cfun) + 1);
991 bitmap_clear (visited);
993 /* Visit all the blocks in reverse order and propagate live on entry values
994 into the predecessors blocks. */
995 FOR_EACH_BB_REVERSE_FN (bb, cfun)
996 loe_visit_block (live, bb, visited);
998 /* Process any blocks which require further iteration. */
999 while (live->stack_top != live->work_stack)
1001 b = *--(live->stack_top);
1002 loe_visit_block (live, BASIC_BLOCK_FOR_FN (cfun, b), visited);
1005 sbitmap_free (visited);
1009 /* Calculate the initial live on entry vector for SSA_NAME using immediate_use
1010 links. Set the live on entry fields in LIVE. Def's are marked temporarily
1011 in the liveout vector. */
1013 static void
1014 set_var_live_on_entry (tree ssa_name, tree_live_info_p live)
1016 int p;
1017 gimple *stmt;
1018 use_operand_p use;
1019 basic_block def_bb = NULL;
1020 imm_use_iterator imm_iter;
1021 bool global = false;
1023 p = var_to_partition (live->map, ssa_name);
1024 if (p == NO_PARTITION)
1025 return;
1027 stmt = SSA_NAME_DEF_STMT (ssa_name);
1028 if (stmt)
1030 def_bb = gimple_bb (stmt);
1031 /* Mark defs in liveout bitmap temporarily. */
1032 if (def_bb)
1033 bitmap_set_bit (&live->liveout[def_bb->index], p);
1035 else
1036 def_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1038 /* An undefined local variable does not need to be very alive. */
1039 if (ssa_undefined_value_p (ssa_name, false))
1040 return;
1042 /* Visit each use of SSA_NAME and if it isn't in the same block as the def,
1043 add it to the list of live on entry blocks. */
1044 FOR_EACH_IMM_USE_FAST (use, imm_iter, ssa_name)
1046 gimple *use_stmt = USE_STMT (use);
1047 basic_block add_block = NULL;
1049 if (gimple_code (use_stmt) == GIMPLE_PHI)
1051 /* Uses in PHI's are considered to be live at exit of the SRC block
1052 as this is where a copy would be inserted. Check to see if it is
1053 defined in that block, or whether its live on entry. */
1054 int index = PHI_ARG_INDEX_FROM_USE (use);
1055 edge e = gimple_phi_arg_edge (as_a <gphi *> (use_stmt), index);
1056 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
1058 if (e->src != def_bb)
1059 add_block = e->src;
1062 else if (is_gimple_debug (use_stmt))
1063 continue;
1064 else
1066 /* If its not defined in this block, its live on entry. */
1067 basic_block use_bb = gimple_bb (use_stmt);
1068 if (use_bb != def_bb)
1069 add_block = use_bb;
1072 /* If there was a live on entry use, set the bit. */
1073 if (add_block)
1075 global = true;
1076 bitmap_set_bit (&live->livein[add_block->index], p);
1080 /* If SSA_NAME is live on entry to at least one block, fill in all the live
1081 on entry blocks between the def and all the uses. */
1082 if (global)
1083 bitmap_set_bit (live->global, p);
1087 /* Calculate the live on exit vectors based on the entry info in LIVEINFO. */
1089 static void
1090 calculate_live_on_exit (tree_live_info_p liveinfo)
1092 basic_block bb;
1093 edge e;
1094 edge_iterator ei;
1096 /* live on entry calculations used liveout vectors for defs, clear them. */
1097 FOR_EACH_BB_FN (bb, cfun)
1098 bitmap_clear (&liveinfo->liveout[bb->index]);
1100 /* Set all the live-on-exit bits for uses in PHIs. */
1101 FOR_EACH_BB_FN (bb, cfun)
1103 gphi_iterator gsi;
1104 size_t i;
1106 /* Mark the PHI arguments which are live on exit to the pred block. */
1107 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1109 gphi *phi = gsi.phi ();
1110 for (i = 0; i < gimple_phi_num_args (phi); i++)
1112 tree t = PHI_ARG_DEF (phi, i);
1113 int p;
1115 if (TREE_CODE (t) != SSA_NAME)
1116 continue;
1118 p = var_to_partition (liveinfo->map, t);
1119 if (p == NO_PARTITION)
1120 continue;
1121 e = gimple_phi_arg_edge (phi, i);
1122 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
1123 bitmap_set_bit (&liveinfo->liveout[e->src->index], p);
1127 /* Add each successors live on entry to this bock live on exit. */
1128 FOR_EACH_EDGE (e, ei, bb->succs)
1129 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1130 bitmap_ior_into (&liveinfo->liveout[bb->index],
1131 live_on_entry (liveinfo, e->dest));
1136 /* Given partition map MAP, calculate all the live on entry bitmaps for
1137 each partition. Return a new live info object. */
1139 tree_live_info_p
1140 calculate_live_ranges (var_map map, bool want_livein)
1142 tree var;
1143 unsigned i;
1144 tree_live_info_p live;
1146 live = new_tree_live_info (map);
1147 for (i = 0; i < num_var_partitions (map); i++)
1149 var = partition_to_var (map, i);
1150 if (var != NULL_TREE)
1151 set_var_live_on_entry (var, live);
1154 live_worklist (live);
1156 #ifdef ENABLE_CHECKING
1157 verify_live_on_entry (live);
1158 #endif
1160 calculate_live_on_exit (live);
1162 if (!want_livein)
1164 bitmap_obstack_release (&live->livein_obstack);
1165 free (live->livein);
1166 live->livein = NULL;
1169 return live;
1173 /* Output partition map MAP to file F. */
1175 void
1176 dump_var_map (FILE *f, var_map map)
1178 int t;
1179 unsigned x, y;
1180 int p;
1182 fprintf (f, "\nPartition map \n\n");
1184 for (x = 0; x < map->num_partitions; x++)
1186 if (map->view_to_partition != NULL)
1187 p = map->view_to_partition[x];
1188 else
1189 p = x;
1191 if (ssa_name (p) == NULL_TREE
1192 || virtual_operand_p (ssa_name (p)))
1193 continue;
1195 t = 0;
1196 for (y = 1; y < num_ssa_names; y++)
1198 p = partition_find (map->var_partition, y);
1199 if (map->partition_to_view)
1200 p = map->partition_to_view[p];
1201 if (p == (int)x)
1203 if (t++ == 0)
1205 fprintf (f, "Partition %d (", x);
1206 print_generic_expr (f, partition_to_var (map, p), TDF_SLIM);
1207 fprintf (f, " - ");
1209 fprintf (f, "%d ", y);
1212 if (t != 0)
1213 fprintf (f, ")\n");
1215 fprintf (f, "\n");
1219 /* Generic dump for the above. */
1221 DEBUG_FUNCTION void
1222 debug (_var_map &ref)
1224 dump_var_map (stderr, &ref);
1227 DEBUG_FUNCTION void
1228 debug (_var_map *ptr)
1230 if (ptr)
1231 debug (*ptr);
1232 else
1233 fprintf (stderr, "<nil>\n");
1237 /* Output live range info LIVE to file F, controlled by FLAG. */
1239 void
1240 dump_live_info (FILE *f, tree_live_info_p live, int flag)
1242 basic_block bb;
1243 unsigned i;
1244 var_map map = live->map;
1245 bitmap_iterator bi;
1247 if ((flag & LIVEDUMP_ENTRY) && live->livein)
1249 FOR_EACH_BB_FN (bb, cfun)
1251 fprintf (f, "\nLive on entry to BB%d : ", bb->index);
1252 EXECUTE_IF_SET_IN_BITMAP (&live->livein[bb->index], 0, i, bi)
1254 print_generic_expr (f, partition_to_var (map, i), TDF_SLIM);
1255 fprintf (f, " ");
1257 fprintf (f, "\n");
1261 if ((flag & LIVEDUMP_EXIT) && live->liveout)
1263 FOR_EACH_BB_FN (bb, cfun)
1265 fprintf (f, "\nLive on exit from BB%d : ", bb->index);
1266 EXECUTE_IF_SET_IN_BITMAP (&live->liveout[bb->index], 0, i, bi)
1268 print_generic_expr (f, partition_to_var (map, i), TDF_SLIM);
1269 fprintf (f, " ");
1271 fprintf (f, "\n");
1277 /* Generic dump for the above. */
1279 DEBUG_FUNCTION void
1280 debug (tree_live_info_d &ref)
1282 dump_live_info (stderr, &ref, 0);
1285 DEBUG_FUNCTION void
1286 debug (tree_live_info_d *ptr)
1288 if (ptr)
1289 debug (*ptr);
1290 else
1291 fprintf (stderr, "<nil>\n");
1295 #ifdef ENABLE_CHECKING
1296 /* Verify that SSA_VAR is a non-virtual SSA_NAME. */
1298 void
1299 register_ssa_partition_check (tree ssa_var)
1301 gcc_assert (TREE_CODE (ssa_var) == SSA_NAME);
1302 if (virtual_operand_p (ssa_var))
1304 fprintf (stderr, "Illegally registering a virtual SSA name :");
1305 print_generic_expr (stderr, ssa_var, TDF_SLIM);
1306 fprintf (stderr, " in the SSA->Normal phase.\n");
1307 internal_error ("SSA corruption");
1312 /* Verify that the info in LIVE matches the current cfg. */
1314 static void
1315 verify_live_on_entry (tree_live_info_p live)
1317 unsigned i;
1318 tree var;
1319 gimple *stmt;
1320 basic_block bb;
1321 edge e;
1322 int num;
1323 edge_iterator ei;
1324 var_map map = live->map;
1326 /* Check for live on entry partitions and report those with a DEF in
1327 the program. This will typically mean an optimization has done
1328 something wrong. */
1329 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1330 num = 0;
1331 FOR_EACH_EDGE (e, ei, bb->succs)
1333 int entry_block = e->dest->index;
1334 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1335 continue;
1336 for (i = 0; i < (unsigned)num_var_partitions (map); i++)
1338 basic_block tmp;
1339 tree d = NULL_TREE;
1340 bitmap loe;
1341 var = partition_to_var (map, i);
1342 stmt = SSA_NAME_DEF_STMT (var);
1343 tmp = gimple_bb (stmt);
1344 if (SSA_NAME_VAR (var))
1345 d = ssa_default_def (cfun, SSA_NAME_VAR (var));
1347 loe = live_on_entry (live, e->dest);
1348 if (loe && bitmap_bit_p (loe, i))
1350 if (!gimple_nop_p (stmt))
1352 num++;
1353 print_generic_expr (stderr, var, TDF_SLIM);
1354 fprintf (stderr, " is defined ");
1355 if (tmp)
1356 fprintf (stderr, " in BB%d, ", tmp->index);
1357 fprintf (stderr, "by:\n");
1358 print_gimple_stmt (stderr, stmt, 0, TDF_SLIM);
1359 fprintf (stderr, "\nIt is also live-on-entry to entry BB %d",
1360 entry_block);
1361 fprintf (stderr, " So it appears to have multiple defs.\n");
1363 else
1365 if (d != var)
1367 num++;
1368 print_generic_expr (stderr, var, TDF_SLIM);
1369 fprintf (stderr, " is live-on-entry to BB%d ",
1370 entry_block);
1371 if (d)
1373 fprintf (stderr, " but is not the default def of ");
1374 print_generic_expr (stderr, d, TDF_SLIM);
1375 fprintf (stderr, "\n");
1377 else
1378 fprintf (stderr, " and there is no default def.\n");
1382 else
1383 if (d == var)
1385 /* An undefined local variable does not need to be very
1386 alive. */
1387 if (ssa_undefined_value_p (var, false))
1388 continue;
1390 /* The only way this var shouldn't be marked live on entry is
1391 if it occurs in a PHI argument of the block. */
1392 size_t z;
1393 bool ok = false;
1394 gphi_iterator gsi;
1395 for (gsi = gsi_start_phis (e->dest);
1396 !gsi_end_p (gsi) && !ok;
1397 gsi_next (&gsi))
1399 gphi *phi = gsi.phi ();
1400 for (z = 0; z < gimple_phi_num_args (phi); z++)
1401 if (var == gimple_phi_arg_def (phi, z))
1403 ok = true;
1404 break;
1407 if (ok)
1408 continue;
1409 /* Expand adds unused default defs for PARM_DECLs and
1410 RESULT_DECLs. They're ok. */
1411 if (has_zero_uses (var)
1412 && SSA_NAME_VAR (var)
1413 && !VAR_P (SSA_NAME_VAR (var)))
1414 continue;
1415 num++;
1416 print_generic_expr (stderr, var, TDF_SLIM);
1417 fprintf (stderr, " is not marked live-on-entry to entry BB%d ",
1418 entry_block);
1419 fprintf (stderr, "but it is a default def so it should be.\n");
1423 gcc_assert (num <= 0);
1425 #endif