2014-07-16 Yvan Roux <yvan.roux@linaro.org>
[official-gcc.git] / gcc-4_8-branch / gcc / tree-ssa-live.c
blob8794569acfa155478ff403fbdc3b04995b8956eb
1 /* Liveness for SSA trees.
2 Copyright (C) 2003-2013 Free Software Foundation, Inc.
3 Contributed by Andrew MacLeod <amacleod@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "gimple-pretty-print.h"
27 #include "bitmap.h"
28 #include "tree-flow.h"
29 #include "timevar.h"
30 #include "dumpfile.h"
31 #include "tree-ssa-live.h"
32 #include "diagnostic-core.h"
33 #include "debug.h"
34 #include "flags.h"
35 #include "gimple.h"
37 #ifdef ENABLE_CHECKING
38 static void verify_live_on_entry (tree_live_info_p);
39 #endif
42 /* VARMAP maintains a mapping from SSA version number to real variables.
44 All SSA_NAMES are divided into partitions. Initially each ssa_name is the
45 only member of it's own partition. Coalescing will attempt to group any
46 ssa_names which occur in a copy or in a PHI node into the same partition.
48 At the end of out-of-ssa, each partition becomes a "real" variable and is
49 rewritten as a compiler variable.
51 The var_map data structure is used to manage these partitions. It allows
52 partitions to be combined, and determines which partition belongs to what
53 ssa_name or variable, and vice versa. */
56 /* This routine will initialize the basevar fields of MAP. */
58 static void
59 var_map_base_init (var_map map)
61 int x, num_part;
62 tree var;
63 htab_t tree_to_index;
64 struct tree_int_map *m, *mapstorage;
66 num_part = num_var_partitions (map);
67 tree_to_index = htab_create (num_part, tree_map_base_hash,
68 tree_int_map_eq, NULL);
69 /* We can have at most num_part entries in the hash tables, so it's
70 enough to allocate so many map elements once, saving some malloc
71 calls. */
72 mapstorage = m = XNEWVEC (struct tree_int_map, num_part);
74 /* If a base table already exists, clear it, otherwise create it. */
75 free (map->partition_to_base_index);
76 map->partition_to_base_index = (int *) xmalloc (sizeof (int) * num_part);
78 /* Build the base variable list, and point partitions at their bases. */
79 for (x = 0; x < num_part; x++)
81 struct tree_int_map **slot;
82 unsigned baseindex;
83 var = partition_to_var (map, x);
84 if (SSA_NAME_VAR (var))
85 m->base.from = SSA_NAME_VAR (var);
86 else
87 /* This restricts what anonymous SSA names we can coalesce
88 as it restricts the sets we compute conflicts for.
89 Using TREE_TYPE to generate sets is the easies as
90 type equivalency also holds for SSA names with the same
91 underlying decl.
93 Check gimple_can_coalesce_p when changing this code. */
94 m->base.from = (TYPE_CANONICAL (TREE_TYPE (var))
95 ? TYPE_CANONICAL (TREE_TYPE (var))
96 : TREE_TYPE (var));
97 /* If base variable hasn't been seen, set it up. */
98 slot = (struct tree_int_map **) htab_find_slot (tree_to_index,
99 m, INSERT);
100 if (!*slot)
102 baseindex = m - mapstorage;
103 m->to = baseindex;
104 *slot = m;
105 m++;
107 else
108 baseindex = (*slot)->to;
109 map->partition_to_base_index[x] = baseindex;
112 map->num_basevars = m - mapstorage;
114 free (mapstorage);
115 htab_delete (tree_to_index);
119 /* Remove the base table in MAP. */
121 static void
122 var_map_base_fini (var_map map)
124 /* Free the basevar info if it is present. */
125 if (map->partition_to_base_index != NULL)
127 free (map->partition_to_base_index);
128 map->partition_to_base_index = NULL;
129 map->num_basevars = 0;
132 /* Create a variable partition map of SIZE, initialize and return it. */
134 var_map
135 init_var_map (int size)
137 var_map map;
139 map = (var_map) xmalloc (sizeof (struct _var_map));
140 map->var_partition = partition_new (size);
142 map->partition_to_view = NULL;
143 map->view_to_partition = NULL;
144 map->num_partitions = size;
145 map->partition_size = size;
146 map->num_basevars = 0;
147 map->partition_to_base_index = NULL;
148 return map;
152 /* Free memory associated with MAP. */
154 void
155 delete_var_map (var_map map)
157 var_map_base_fini (map);
158 partition_delete (map->var_partition);
159 free (map->partition_to_view);
160 free (map->view_to_partition);
161 free (map);
165 /* This function will combine the partitions in MAP for VAR1 and VAR2. It
166 Returns the partition which represents the new partition. If the two
167 partitions cannot be combined, NO_PARTITION is returned. */
170 var_union (var_map map, tree var1, tree var2)
172 int p1, p2, p3;
174 gcc_assert (TREE_CODE (var1) == SSA_NAME);
175 gcc_assert (TREE_CODE (var2) == SSA_NAME);
177 /* This is independent of partition_to_view. If partition_to_view is
178 on, then whichever one of these partitions is absorbed will never have a
179 dereference into the partition_to_view array any more. */
181 p1 = partition_find (map->var_partition, SSA_NAME_VERSION (var1));
182 p2 = partition_find (map->var_partition, SSA_NAME_VERSION (var2));
184 gcc_assert (p1 != NO_PARTITION);
185 gcc_assert (p2 != NO_PARTITION);
187 if (p1 == p2)
188 p3 = p1;
189 else
190 p3 = partition_union (map->var_partition, p1, p2);
192 if (map->partition_to_view)
193 p3 = map->partition_to_view[p3];
195 return p3;
199 /* Compress the partition numbers in MAP such that they fall in the range
200 0..(num_partitions-1) instead of wherever they turned out during
201 the partitioning exercise. This removes any references to unused
202 partitions, thereby allowing bitmaps and other vectors to be much
203 denser.
205 This is implemented such that compaction doesn't affect partitioning.
206 Ie., once partitions are created and possibly merged, running one
207 or more different kind of compaction will not affect the partitions
208 themselves. Their index might change, but all the same variables will
209 still be members of the same partition group. This allows work on reduced
210 sets, and no loss of information when a larger set is later desired.
212 In particular, coalescing can work on partitions which have 2 or more
213 definitions, and then 'recompact' later to include all the single
214 definitions for assignment to program variables. */
217 /* Set MAP back to the initial state of having no partition view. Return a
218 bitmap which has a bit set for each partition number which is in use in the
219 varmap. */
221 static bitmap
222 partition_view_init (var_map map)
224 bitmap used;
225 int tmp;
226 unsigned int x;
228 used = BITMAP_ALLOC (NULL);
230 /* Already in a view? Abandon the old one. */
231 if (map->partition_to_view)
233 free (map->partition_to_view);
234 map->partition_to_view = NULL;
236 if (map->view_to_partition)
238 free (map->view_to_partition);
239 map->view_to_partition = NULL;
242 /* Find out which partitions are actually referenced. */
243 for (x = 0; x < map->partition_size; x++)
245 tmp = partition_find (map->var_partition, x);
246 if (ssa_name (tmp) != NULL_TREE && !virtual_operand_p (ssa_name (tmp))
247 && (!has_zero_uses (ssa_name (tmp))
248 || !SSA_NAME_IS_DEFAULT_DEF (ssa_name (tmp))))
249 bitmap_set_bit (used, tmp);
252 map->num_partitions = map->partition_size;
253 return used;
257 /* This routine will finalize the view data for MAP based on the partitions
258 set in SELECTED. This is either the same bitmap returned from
259 partition_view_init, or a trimmed down version if some of those partitions
260 were not desired in this view. SELECTED is freed before returning. */
262 static void
263 partition_view_fini (var_map map, bitmap selected)
265 bitmap_iterator bi;
266 unsigned count, i, x, limit;
268 gcc_assert (selected);
270 count = bitmap_count_bits (selected);
271 limit = map->partition_size;
273 /* If its a one-to-one ratio, we don't need any view compaction. */
274 if (count < limit)
276 map->partition_to_view = (int *)xmalloc (limit * sizeof (int));
277 memset (map->partition_to_view, 0xff, (limit * sizeof (int)));
278 map->view_to_partition = (int *)xmalloc (count * sizeof (int));
280 i = 0;
281 /* Give each selected partition an index. */
282 EXECUTE_IF_SET_IN_BITMAP (selected, 0, x, bi)
284 map->partition_to_view[x] = i;
285 map->view_to_partition[i] = x;
286 i++;
288 gcc_assert (i == count);
289 map->num_partitions = i;
292 BITMAP_FREE (selected);
296 /* Create a partition view which includes all the used partitions in MAP. If
297 WANT_BASES is true, create the base variable map as well. */
299 void
300 partition_view_normal (var_map map, bool want_bases)
302 bitmap used;
304 used = partition_view_init (map);
305 partition_view_fini (map, used);
307 if (want_bases)
308 var_map_base_init (map);
309 else
310 var_map_base_fini (map);
314 /* Create a partition view in MAP which includes just partitions which occur in
315 the bitmap ONLY. If WANT_BASES is true, create the base variable map
316 as well. */
318 void
319 partition_view_bitmap (var_map map, bitmap only, bool want_bases)
321 bitmap used;
322 bitmap new_partitions = BITMAP_ALLOC (NULL);
323 unsigned x, p;
324 bitmap_iterator bi;
326 used = partition_view_init (map);
327 EXECUTE_IF_SET_IN_BITMAP (only, 0, x, bi)
329 p = partition_find (map->var_partition, x);
330 gcc_assert (bitmap_bit_p (used, p));
331 bitmap_set_bit (new_partitions, p);
333 partition_view_fini (map, new_partitions);
335 if (want_bases)
336 var_map_base_init (map);
337 else
338 var_map_base_fini (map);
342 static bitmap usedvars;
344 /* Mark VAR as used, so that it'll be preserved during rtl expansion.
345 Returns true if VAR wasn't marked before. */
347 static inline bool
348 set_is_used (tree var)
350 return bitmap_set_bit (usedvars, DECL_UID (var));
353 /* Return true if VAR is marked as used. */
355 static inline bool
356 is_used_p (tree var)
358 return bitmap_bit_p (usedvars, DECL_UID (var));
361 static inline void mark_all_vars_used (tree *);
363 /* Helper function for mark_all_vars_used, called via walk_tree. */
365 static tree
366 mark_all_vars_used_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
368 tree t = *tp;
369 enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
370 tree b;
372 if (TREE_CODE (t) == SSA_NAME)
374 *walk_subtrees = 0;
375 t = SSA_NAME_VAR (t);
376 if (!t)
377 return NULL;
380 if (IS_EXPR_CODE_CLASS (c)
381 && (b = TREE_BLOCK (t)) != NULL)
382 TREE_USED (b) = true;
384 /* Ignore TMR_OFFSET and TMR_STEP for TARGET_MEM_REFS, as those
385 fields do not contain vars. */
386 if (TREE_CODE (t) == TARGET_MEM_REF)
388 mark_all_vars_used (&TMR_BASE (t));
389 mark_all_vars_used (&TMR_INDEX (t));
390 mark_all_vars_used (&TMR_INDEX2 (t));
391 *walk_subtrees = 0;
392 return NULL;
395 /* Only need to mark VAR_DECLS; parameters and return results are not
396 eliminated as unused. */
397 if (TREE_CODE (t) == VAR_DECL)
399 /* When a global var becomes used for the first time also walk its
400 initializer (non global ones don't have any). */
401 if (set_is_used (t) && is_global_var (t)
402 && DECL_CONTEXT (t) == current_function_decl)
403 mark_all_vars_used (&DECL_INITIAL (t));
405 /* remove_unused_scope_block_p requires information about labels
406 which are not DECL_IGNORED_P to tell if they might be used in the IL. */
407 else if (TREE_CODE (t) == LABEL_DECL)
408 /* Although the TREE_USED values that the frontend uses would be
409 acceptable (albeit slightly over-conservative) for our purposes,
410 init_vars_expansion clears TREE_USED for LABEL_DECLs too, so we
411 must re-compute it here. */
412 TREE_USED (t) = 1;
414 if (IS_TYPE_OR_DECL_P (t))
415 *walk_subtrees = 0;
417 return NULL;
420 /* Mark the scope block SCOPE and its subblocks unused when they can be
421 possibly eliminated if dead. */
423 static void
424 mark_scope_block_unused (tree scope)
426 tree t;
427 TREE_USED (scope) = false;
428 if (!(*debug_hooks->ignore_block) (scope))
429 TREE_USED (scope) = true;
430 for (t = BLOCK_SUBBLOCKS (scope); t ; t = BLOCK_CHAIN (t))
431 mark_scope_block_unused (t);
434 /* Look if the block is dead (by possibly eliminating its dead subblocks)
435 and return true if so.
436 Block is declared dead if:
437 1) No statements are associated with it.
438 2) Declares no live variables
439 3) All subblocks are dead
440 or there is precisely one subblocks and the block
441 has same abstract origin as outer block and declares
442 no variables, so it is pure wrapper.
443 When we are not outputting full debug info, we also eliminate dead variables
444 out of scope blocks to let them to be recycled by GGC and to save copying work
445 done by the inliner. */
447 static bool
448 remove_unused_scope_block_p (tree scope)
450 tree *t, *next;
451 bool unused = !TREE_USED (scope);
452 int nsubblocks = 0;
454 for (t = &BLOCK_VARS (scope); *t; t = next)
456 next = &DECL_CHAIN (*t);
458 /* Debug info of nested function refers to the block of the
459 function. We might stil call it even if all statements
460 of function it was nested into was elliminated.
462 TODO: We can actually look into cgraph to see if function
463 will be output to file. */
464 if (TREE_CODE (*t) == FUNCTION_DECL)
465 unused = false;
467 /* If a decl has a value expr, we need to instantiate it
468 regardless of debug info generation, to avoid codegen
469 differences in memory overlap tests. update_equiv_regs() may
470 indirectly call validate_equiv_mem() to test whether a
471 SET_DEST overlaps with others, and if the value expr changes
472 by virtual register instantiation, we may get end up with
473 different results. */
474 else if (TREE_CODE (*t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*t))
475 unused = false;
477 /* Remove everything we don't generate debug info for. */
478 else if (DECL_IGNORED_P (*t))
480 *t = DECL_CHAIN (*t);
481 next = t;
484 /* When we are outputting debug info, we usually want to output
485 info about optimized-out variables in the scope blocks.
486 Exception are the scope blocks not containing any instructions
487 at all so user can't get into the scopes at first place. */
488 else if (is_used_p (*t))
489 unused = false;
490 else if (TREE_CODE (*t) == LABEL_DECL && TREE_USED (*t))
491 /* For labels that are still used in the IL, the decision to
492 preserve them must not depend DEBUG_INFO_LEVEL, otherwise we
493 risk having different ordering in debug vs. non-debug builds
494 during inlining or versioning.
495 A label appearing here (we have already checked DECL_IGNORED_P)
496 should not be used in the IL unless it has been explicitly used
497 before, so we use TREE_USED as an approximation. */
498 /* In principle, we should do the same here as for the debug case
499 below, however, when debugging, there might be additional nested
500 levels that keep an upper level with a label live, so we have to
501 force this block to be considered used, too. */
502 unused = false;
504 /* When we are not doing full debug info, we however can keep around
505 only the used variables for cfgexpand's memory packing saving quite
506 a lot of memory.
508 For sake of -g3, we keep around those vars but we don't count this as
509 use of block, so innermost block with no used vars and no instructions
510 can be considered dead. We only want to keep around blocks user can
511 breakpoint into and ask about value of optimized out variables.
513 Similarly we need to keep around types at least until all
514 variables of all nested blocks are gone. We track no
515 information on whether given type is used or not, so we have
516 to keep them even when not emitting debug information,
517 otherwise we may end up remapping variables and their (local)
518 types in different orders depending on whether debug
519 information is being generated. */
521 else if (TREE_CODE (*t) == TYPE_DECL
522 || debug_info_level == DINFO_LEVEL_NORMAL
523 || debug_info_level == DINFO_LEVEL_VERBOSE)
525 else
527 *t = DECL_CHAIN (*t);
528 next = t;
532 for (t = &BLOCK_SUBBLOCKS (scope); *t ;)
533 if (remove_unused_scope_block_p (*t))
535 if (BLOCK_SUBBLOCKS (*t))
537 tree next = BLOCK_CHAIN (*t);
538 tree supercontext = BLOCK_SUPERCONTEXT (*t);
540 *t = BLOCK_SUBBLOCKS (*t);
541 while (BLOCK_CHAIN (*t))
543 BLOCK_SUPERCONTEXT (*t) = supercontext;
544 t = &BLOCK_CHAIN (*t);
546 BLOCK_CHAIN (*t) = next;
547 BLOCK_SUPERCONTEXT (*t) = supercontext;
548 t = &BLOCK_CHAIN (*t);
549 nsubblocks ++;
551 else
552 *t = BLOCK_CHAIN (*t);
554 else
556 t = &BLOCK_CHAIN (*t);
557 nsubblocks ++;
561 if (!unused)
563 /* Outer scope is always used. */
564 else if (!BLOCK_SUPERCONTEXT (scope)
565 || TREE_CODE (BLOCK_SUPERCONTEXT (scope)) == FUNCTION_DECL)
566 unused = false;
567 /* Innermost blocks with no live variables nor statements can be always
568 eliminated. */
569 else if (!nsubblocks)
571 /* For terse debug info we can eliminate info on unused variables. */
572 else if (debug_info_level == DINFO_LEVEL_NONE
573 || debug_info_level == DINFO_LEVEL_TERSE)
575 /* Even for -g0/-g1 don't prune outer scopes from artificial
576 functions, otherwise diagnostics using tree_nonartificial_location
577 will not be emitted properly. */
578 if (inlined_function_outer_scope_p (scope))
580 tree ao = scope;
582 while (ao
583 && TREE_CODE (ao) == BLOCK
584 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
585 ao = BLOCK_ABSTRACT_ORIGIN (ao);
586 if (ao
587 && TREE_CODE (ao) == FUNCTION_DECL
588 && DECL_DECLARED_INLINE_P (ao)
589 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
590 unused = false;
593 else if (BLOCK_VARS (scope) || BLOCK_NUM_NONLOCALIZED_VARS (scope))
594 unused = false;
595 /* See if this block is important for representation of inlined function.
596 Inlined functions are always represented by block with
597 block_ultimate_origin being set to FUNCTION_DECL and DECL_SOURCE_LOCATION
598 set... */
599 else if (inlined_function_outer_scope_p (scope))
600 unused = false;
601 else
602 /* Verfify that only blocks with source location set
603 are entry points to the inlined functions. */
604 gcc_assert (LOCATION_LOCUS (BLOCK_SOURCE_LOCATION (scope))
605 == UNKNOWN_LOCATION);
607 TREE_USED (scope) = !unused;
608 return unused;
611 /* Mark all VAR_DECLS under *EXPR_P as used, so that they won't be
612 eliminated during the tree->rtl conversion process. */
614 static inline void
615 mark_all_vars_used (tree *expr_p)
617 walk_tree (expr_p, mark_all_vars_used_1, NULL, NULL);
620 /* Helper function for clear_unused_block_pointer, called via walk_tree. */
622 static tree
623 clear_unused_block_pointer_1 (tree *tp, int *, void *)
625 if (EXPR_P (*tp) && TREE_BLOCK (*tp)
626 && !TREE_USED (TREE_BLOCK (*tp)))
627 TREE_SET_BLOCK (*tp, NULL);
628 if (TREE_CODE (*tp) == VAR_DECL && DECL_DEBUG_EXPR_IS_FROM (*tp))
630 tree debug_expr = DECL_DEBUG_EXPR (*tp);
631 walk_tree (&debug_expr, clear_unused_block_pointer_1, NULL, NULL);
633 return NULL_TREE;
636 /* Set all block pointer in debug stmt to NULL if the block is unused,
637 so that they will not be streamed out. */
639 static void
640 clear_unused_block_pointer (void)
642 basic_block bb;
643 gimple_stmt_iterator gsi;
644 tree t;
645 unsigned i;
647 FOR_EACH_LOCAL_DECL (cfun, i, t)
648 if (TREE_CODE (t) == VAR_DECL && DECL_DEBUG_EXPR_IS_FROM (t))
650 tree debug_expr = DECL_DEBUG_EXPR (t);
651 walk_tree (&debug_expr, clear_unused_block_pointer_1, NULL, NULL);
654 FOR_EACH_BB (bb)
655 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
657 unsigned i;
658 tree b;
659 gimple stmt = gsi_stmt (gsi);
661 if (!is_gimple_debug (stmt))
662 continue;
663 b = gimple_block (stmt);
664 if (b && !TREE_USED (b))
665 gimple_set_block (stmt, NULL);
666 for (i = 0; i < gimple_num_ops (stmt); i++)
667 walk_tree (gimple_op_ptr (stmt, i), clear_unused_block_pointer_1,
668 NULL, NULL);
672 /* Dump scope blocks starting at SCOPE to FILE. INDENT is the
673 indentation level and FLAGS is as in print_generic_expr. */
675 static void
676 dump_scope_block (FILE *file, int indent, tree scope, int flags)
678 tree var, t;
679 unsigned int i;
681 fprintf (file, "\n%*s{ Scope block #%i%s%s",indent, "" , BLOCK_NUMBER (scope),
682 TREE_USED (scope) ? "" : " (unused)",
683 BLOCK_ABSTRACT (scope) ? " (abstract)": "");
684 if (LOCATION_LOCUS (BLOCK_SOURCE_LOCATION (scope)) != UNKNOWN_LOCATION)
686 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (scope));
687 fprintf (file, " %s:%i", s.file, s.line);
689 if (BLOCK_ABSTRACT_ORIGIN (scope))
691 tree origin = block_ultimate_origin (scope);
692 if (origin)
694 fprintf (file, " Originating from :");
695 if (DECL_P (origin))
696 print_generic_decl (file, origin, flags);
697 else
698 fprintf (file, "#%i", BLOCK_NUMBER (origin));
701 fprintf (file, " \n");
702 for (var = BLOCK_VARS (scope); var; var = DECL_CHAIN (var))
704 fprintf (file, "%*s", indent, "");
705 print_generic_decl (file, var, flags);
706 fprintf (file, "\n");
708 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (scope); i++)
710 fprintf (file, "%*s",indent, "");
711 print_generic_decl (file, BLOCK_NONLOCALIZED_VAR (scope, i),
712 flags);
713 fprintf (file, " (nonlocalized)\n");
715 for (t = BLOCK_SUBBLOCKS (scope); t ; t = BLOCK_CHAIN (t))
716 dump_scope_block (file, indent + 2, t, flags);
717 fprintf (file, "\n%*s}\n",indent, "");
720 /* Dump the tree of lexical scopes starting at SCOPE to stderr. FLAGS
721 is as in print_generic_expr. */
723 DEBUG_FUNCTION void
724 debug_scope_block (tree scope, int flags)
726 dump_scope_block (stderr, 0, scope, flags);
730 /* Dump the tree of lexical scopes of current_function_decl to FILE.
731 FLAGS is as in print_generic_expr. */
733 void
734 dump_scope_blocks (FILE *file, int flags)
736 dump_scope_block (file, 0, DECL_INITIAL (current_function_decl), flags);
740 /* Dump the tree of lexical scopes of current_function_decl to stderr.
741 FLAGS is as in print_generic_expr. */
743 DEBUG_FUNCTION void
744 debug_scope_blocks (int flags)
746 dump_scope_blocks (stderr, flags);
749 /* Remove local variables that are not referenced in the IL. */
751 void
752 remove_unused_locals (void)
754 basic_block bb;
755 tree var;
756 unsigned srcidx, dstidx, num;
757 bool have_local_clobbers = false;
759 /* Removing declarations from lexical blocks when not optimizing is
760 not only a waste of time, it actually causes differences in stack
761 layout. */
762 if (!optimize)
763 return;
765 timevar_push (TV_REMOVE_UNUSED);
767 mark_scope_block_unused (DECL_INITIAL (current_function_decl));
769 usedvars = BITMAP_ALLOC (NULL);
771 /* Walk the CFG marking all referenced symbols. */
772 FOR_EACH_BB (bb)
774 gimple_stmt_iterator gsi;
775 size_t i;
776 edge_iterator ei;
777 edge e;
779 /* Walk the statements. */
780 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
782 gimple stmt = gsi_stmt (gsi);
783 tree b = gimple_block (stmt);
785 if (is_gimple_debug (stmt))
786 continue;
788 if (gimple_clobber_p (stmt))
790 have_local_clobbers = true;
791 continue;
794 if (b)
795 TREE_USED (b) = true;
797 for (i = 0; i < gimple_num_ops (stmt); i++)
798 mark_all_vars_used (gimple_op_ptr (gsi_stmt (gsi), i));
801 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
803 use_operand_p arg_p;
804 ssa_op_iter i;
805 tree def;
806 gimple phi = gsi_stmt (gsi);
808 if (virtual_operand_p (gimple_phi_result (phi)))
809 continue;
811 def = gimple_phi_result (phi);
812 mark_all_vars_used (&def);
814 FOR_EACH_PHI_ARG (arg_p, phi, i, SSA_OP_ALL_USES)
816 tree arg = USE_FROM_PTR (arg_p);
817 int index = PHI_ARG_INDEX_FROM_USE (arg_p);
818 tree block =
819 LOCATION_BLOCK (gimple_phi_arg_location (phi, index));
820 if (block != NULL)
821 TREE_USED (block) = true;
822 mark_all_vars_used (&arg);
826 FOR_EACH_EDGE (e, ei, bb->succs)
827 if (LOCATION_BLOCK (e->goto_locus) != NULL)
828 TREE_USED (LOCATION_BLOCK (e->goto_locus)) = true;
831 /* We do a two-pass approach about the out-of-scope clobbers. We want
832 to remove them if they are the only references to a local variable,
833 but we want to retain them when there's any other. So the first pass
834 ignores them, and the second pass (if there were any) tries to remove
835 them. */
836 if (have_local_clobbers)
837 FOR_EACH_BB (bb)
839 gimple_stmt_iterator gsi;
841 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
843 gimple stmt = gsi_stmt (gsi);
844 tree b = gimple_block (stmt);
846 if (gimple_clobber_p (stmt))
848 tree lhs = gimple_assign_lhs (stmt);
849 if (TREE_CODE (lhs) == VAR_DECL && !is_used_p (lhs))
851 unlink_stmt_vdef (stmt);
852 gsi_remove (&gsi, true);
853 release_defs (stmt);
854 continue;
856 if (b)
857 TREE_USED (b) = true;
859 gsi_next (&gsi);
863 cfun->has_local_explicit_reg_vars = false;
865 /* Remove unmarked local and global vars from local_decls. */
866 num = vec_safe_length (cfun->local_decls);
867 for (srcidx = 0, dstidx = 0; srcidx < num; srcidx++)
869 var = (*cfun->local_decls)[srcidx];
870 if (TREE_CODE (var) == VAR_DECL)
872 if (!is_used_p (var))
874 tree def;
875 if (cfun->nonlocal_goto_save_area
876 && TREE_OPERAND (cfun->nonlocal_goto_save_area, 0) == var)
877 cfun->nonlocal_goto_save_area = NULL;
878 /* Release any default def associated with var. */
879 if ((def = ssa_default_def (cfun, var)) != NULL_TREE)
881 set_ssa_default_def (cfun, var, NULL_TREE);
882 release_ssa_name (def);
884 continue;
887 if (TREE_CODE (var) == VAR_DECL
888 && DECL_HARD_REGISTER (var)
889 && !is_global_var (var))
890 cfun->has_local_explicit_reg_vars = true;
892 if (srcidx != dstidx)
893 (*cfun->local_decls)[dstidx] = var;
894 dstidx++;
896 if (dstidx != num)
898 statistics_counter_event (cfun, "unused VAR_DECLs removed", num - dstidx);
899 cfun->local_decls->truncate (dstidx);
902 remove_unused_scope_block_p (DECL_INITIAL (current_function_decl));
903 clear_unused_block_pointer ();
905 BITMAP_FREE (usedvars);
907 if (dump_file && (dump_flags & TDF_DETAILS))
909 fprintf (dump_file, "Scope blocks after cleanups:\n");
910 dump_scope_blocks (dump_file, dump_flags);
913 timevar_pop (TV_REMOVE_UNUSED);
916 /* Obstack for globale liveness info bitmaps. We don't want to put these
917 on the default obstack because these bitmaps can grow quite large and
918 we'll hold on to all that memory until the end of the compiler run.
919 As a bonus, delete_tree_live_info can destroy all the bitmaps by just
920 releasing the whole obstack. */
921 static bitmap_obstack liveness_bitmap_obstack;
923 /* Allocate and return a new live range information object base on MAP. */
925 static tree_live_info_p
926 new_tree_live_info (var_map map)
928 tree_live_info_p live;
929 basic_block bb;
931 live = XNEW (struct tree_live_info_d);
932 live->map = map;
933 live->num_blocks = last_basic_block;
935 live->livein = XNEWVEC (bitmap_head, last_basic_block);
936 FOR_EACH_BB (bb)
937 bitmap_initialize (&live->livein[bb->index], &liveness_bitmap_obstack);
939 live->liveout = XNEWVEC (bitmap_head, last_basic_block);
940 FOR_EACH_BB (bb)
941 bitmap_initialize (&live->liveout[bb->index], &liveness_bitmap_obstack);
943 live->work_stack = XNEWVEC (int, last_basic_block);
944 live->stack_top = live->work_stack;
946 live->global = BITMAP_ALLOC (&liveness_bitmap_obstack);
947 return live;
951 /* Free storage for live range info object LIVE. */
953 void
954 delete_tree_live_info (tree_live_info_p live)
956 bitmap_obstack_release (&liveness_bitmap_obstack);
957 free (live->work_stack);
958 free (live->liveout);
959 free (live->livein);
960 free (live);
964 /* Visit basic block BB and propagate any required live on entry bits from
965 LIVE into the predecessors. VISITED is the bitmap of visited blocks.
966 TMP is a temporary work bitmap which is passed in to avoid reallocating
967 it each time. */
969 static void
970 loe_visit_block (tree_live_info_p live, basic_block bb, sbitmap visited,
971 bitmap tmp)
973 edge e;
974 bool change;
975 edge_iterator ei;
976 basic_block pred_bb;
977 bitmap loe;
978 gcc_assert (!bitmap_bit_p (visited, bb->index));
980 bitmap_set_bit (visited, bb->index);
981 loe = live_on_entry (live, bb);
983 FOR_EACH_EDGE (e, ei, bb->preds)
985 pred_bb = e->src;
986 if (pred_bb == ENTRY_BLOCK_PTR)
987 continue;
988 /* TMP is variables live-on-entry from BB that aren't defined in the
989 predecessor block. This should be the live on entry vars to pred.
990 Note that liveout is the DEFs in a block while live on entry is
991 being calculated. */
992 bitmap_and_compl (tmp, loe, &live->liveout[pred_bb->index]);
994 /* Add these bits to live-on-entry for the pred. if there are any
995 changes, and pred_bb has been visited already, add it to the
996 revisit stack. */
997 change = bitmap_ior_into (live_on_entry (live, pred_bb), tmp);
998 if (bitmap_bit_p (visited, pred_bb->index) && change)
1000 bitmap_clear_bit (visited, pred_bb->index);
1001 *(live->stack_top)++ = pred_bb->index;
1007 /* Using LIVE, fill in all the live-on-entry blocks between the defs and uses
1008 of all the variables. */
1010 static void
1011 live_worklist (tree_live_info_p live)
1013 unsigned b;
1014 basic_block bb;
1015 sbitmap visited = sbitmap_alloc (last_basic_block + 1);
1016 bitmap tmp = BITMAP_ALLOC (&liveness_bitmap_obstack);
1018 bitmap_clear (visited);
1020 /* Visit all the blocks in reverse order and propagate live on entry values
1021 into the predecessors blocks. */
1022 FOR_EACH_BB_REVERSE (bb)
1023 loe_visit_block (live, bb, visited, tmp);
1025 /* Process any blocks which require further iteration. */
1026 while (live->stack_top != live->work_stack)
1028 b = *--(live->stack_top);
1029 loe_visit_block (live, BASIC_BLOCK (b), visited, tmp);
1032 BITMAP_FREE (tmp);
1033 sbitmap_free (visited);
1037 /* Calculate the initial live on entry vector for SSA_NAME using immediate_use
1038 links. Set the live on entry fields in LIVE. Def's are marked temporarily
1039 in the liveout vector. */
1041 static void
1042 set_var_live_on_entry (tree ssa_name, tree_live_info_p live)
1044 int p;
1045 gimple stmt;
1046 use_operand_p use;
1047 basic_block def_bb = NULL;
1048 imm_use_iterator imm_iter;
1049 bool global = false;
1051 p = var_to_partition (live->map, ssa_name);
1052 if (p == NO_PARTITION)
1053 return;
1055 stmt = SSA_NAME_DEF_STMT (ssa_name);
1056 if (stmt)
1058 def_bb = gimple_bb (stmt);
1059 /* Mark defs in liveout bitmap temporarily. */
1060 if (def_bb)
1061 bitmap_set_bit (&live->liveout[def_bb->index], p);
1063 else
1064 def_bb = ENTRY_BLOCK_PTR;
1066 /* Visit each use of SSA_NAME and if it isn't in the same block as the def,
1067 add it to the list of live on entry blocks. */
1068 FOR_EACH_IMM_USE_FAST (use, imm_iter, ssa_name)
1070 gimple use_stmt = USE_STMT (use);
1071 basic_block add_block = NULL;
1073 if (gimple_code (use_stmt) == GIMPLE_PHI)
1075 /* Uses in PHI's are considered to be live at exit of the SRC block
1076 as this is where a copy would be inserted. Check to see if it is
1077 defined in that block, or whether its live on entry. */
1078 int index = PHI_ARG_INDEX_FROM_USE (use);
1079 edge e = gimple_phi_arg_edge (use_stmt, index);
1080 if (e->src != ENTRY_BLOCK_PTR)
1082 if (e->src != def_bb)
1083 add_block = e->src;
1086 else if (is_gimple_debug (use_stmt))
1087 continue;
1088 else
1090 /* If its not defined in this block, its live on entry. */
1091 basic_block use_bb = gimple_bb (use_stmt);
1092 if (use_bb != def_bb)
1093 add_block = use_bb;
1096 /* If there was a live on entry use, set the bit. */
1097 if (add_block)
1099 global = true;
1100 bitmap_set_bit (&live->livein[add_block->index], p);
1104 /* If SSA_NAME is live on entry to at least one block, fill in all the live
1105 on entry blocks between the def and all the uses. */
1106 if (global)
1107 bitmap_set_bit (live->global, p);
1111 /* Calculate the live on exit vectors based on the entry info in LIVEINFO. */
1113 void
1114 calculate_live_on_exit (tree_live_info_p liveinfo)
1116 basic_block bb;
1117 edge e;
1118 edge_iterator ei;
1120 /* live on entry calculations used liveout vectors for defs, clear them. */
1121 FOR_EACH_BB (bb)
1122 bitmap_clear (&liveinfo->liveout[bb->index]);
1124 /* Set all the live-on-exit bits for uses in PHIs. */
1125 FOR_EACH_BB (bb)
1127 gimple_stmt_iterator gsi;
1128 size_t i;
1130 /* Mark the PHI arguments which are live on exit to the pred block. */
1131 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1133 gimple phi = gsi_stmt (gsi);
1134 for (i = 0; i < gimple_phi_num_args (phi); i++)
1136 tree t = PHI_ARG_DEF (phi, i);
1137 int p;
1139 if (TREE_CODE (t) != SSA_NAME)
1140 continue;
1142 p = var_to_partition (liveinfo->map, t);
1143 if (p == NO_PARTITION)
1144 continue;
1145 e = gimple_phi_arg_edge (phi, i);
1146 if (e->src != ENTRY_BLOCK_PTR)
1147 bitmap_set_bit (&liveinfo->liveout[e->src->index], p);
1151 /* Add each successors live on entry to this bock live on exit. */
1152 FOR_EACH_EDGE (e, ei, bb->succs)
1153 if (e->dest != EXIT_BLOCK_PTR)
1154 bitmap_ior_into (&liveinfo->liveout[bb->index],
1155 live_on_entry (liveinfo, e->dest));
1160 /* Given partition map MAP, calculate all the live on entry bitmaps for
1161 each partition. Return a new live info object. */
1163 tree_live_info_p
1164 calculate_live_ranges (var_map map)
1166 tree var;
1167 unsigned i;
1168 tree_live_info_p live;
1170 bitmap_obstack_initialize (&liveness_bitmap_obstack);
1171 live = new_tree_live_info (map);
1172 for (i = 0; i < num_var_partitions (map); i++)
1174 var = partition_to_var (map, i);
1175 if (var != NULL_TREE)
1176 set_var_live_on_entry (var, live);
1179 live_worklist (live);
1181 #ifdef ENABLE_CHECKING
1182 verify_live_on_entry (live);
1183 #endif
1185 calculate_live_on_exit (live);
1186 return live;
1190 /* Output partition map MAP to file F. */
1192 void
1193 dump_var_map (FILE *f, var_map map)
1195 int t;
1196 unsigned x, y;
1197 int p;
1199 fprintf (f, "\nPartition map \n\n");
1201 for (x = 0; x < map->num_partitions; x++)
1203 if (map->view_to_partition != NULL)
1204 p = map->view_to_partition[x];
1205 else
1206 p = x;
1208 if (ssa_name (p) == NULL_TREE
1209 || virtual_operand_p (ssa_name (p)))
1210 continue;
1212 t = 0;
1213 for (y = 1; y < num_ssa_names; y++)
1215 p = partition_find (map->var_partition, y);
1216 if (map->partition_to_view)
1217 p = map->partition_to_view[p];
1218 if (p == (int)x)
1220 if (t++ == 0)
1222 fprintf(f, "Partition %d (", x);
1223 print_generic_expr (f, partition_to_var (map, p), TDF_SLIM);
1224 fprintf (f, " - ");
1226 fprintf (f, "%d ", y);
1229 if (t != 0)
1230 fprintf (f, ")\n");
1232 fprintf (f, "\n");
1236 /* Output live range info LIVE to file F, controlled by FLAG. */
1238 void
1239 dump_live_info (FILE *f, tree_live_info_p live, int flag)
1241 basic_block bb;
1242 unsigned i;
1243 var_map map = live->map;
1244 bitmap_iterator bi;
1246 if ((flag & LIVEDUMP_ENTRY) && live->livein)
1248 FOR_EACH_BB (bb)
1250 fprintf (f, "\nLive on entry to BB%d : ", bb->index);
1251 EXECUTE_IF_SET_IN_BITMAP (&live->livein[bb->index], 0, i, bi)
1253 print_generic_expr (f, partition_to_var (map, i), TDF_SLIM);
1254 fprintf (f, " ");
1256 fprintf (f, "\n");
1260 if ((flag & LIVEDUMP_EXIT) && live->liveout)
1262 FOR_EACH_BB (bb)
1264 fprintf (f, "\nLive on exit from BB%d : ", bb->index);
1265 EXECUTE_IF_SET_IN_BITMAP (&live->liveout[bb->index], 0, i, bi)
1267 print_generic_expr (f, partition_to_var (map, i), TDF_SLIM);
1268 fprintf (f, " ");
1270 fprintf (f, "\n");
1275 #ifdef ENABLE_CHECKING
1276 /* Verify that SSA_VAR is a non-virtual SSA_NAME. */
1278 void
1279 register_ssa_partition_check (tree ssa_var)
1281 gcc_assert (TREE_CODE (ssa_var) == SSA_NAME);
1282 if (virtual_operand_p (ssa_var))
1284 fprintf (stderr, "Illegally registering a virtual SSA name :");
1285 print_generic_expr (stderr, ssa_var, TDF_SLIM);
1286 fprintf (stderr, " in the SSA->Normal phase.\n");
1287 internal_error ("SSA corruption");
1292 /* Verify that the info in LIVE matches the current cfg. */
1294 static void
1295 verify_live_on_entry (tree_live_info_p live)
1297 unsigned i;
1298 tree var;
1299 gimple stmt;
1300 basic_block bb;
1301 edge e;
1302 int num;
1303 edge_iterator ei;
1304 var_map map = live->map;
1306 /* Check for live on entry partitions and report those with a DEF in
1307 the program. This will typically mean an optimization has done
1308 something wrong. */
1309 bb = ENTRY_BLOCK_PTR;
1310 num = 0;
1311 FOR_EACH_EDGE (e, ei, bb->succs)
1313 int entry_block = e->dest->index;
1314 if (e->dest == EXIT_BLOCK_PTR)
1315 continue;
1316 for (i = 0; i < (unsigned)num_var_partitions (map); i++)
1318 basic_block tmp;
1319 tree d = NULL_TREE;
1320 bitmap loe;
1321 var = partition_to_var (map, i);
1322 stmt = SSA_NAME_DEF_STMT (var);
1323 tmp = gimple_bb (stmt);
1324 if (SSA_NAME_VAR (var))
1325 d = ssa_default_def (cfun, SSA_NAME_VAR (var));
1327 loe = live_on_entry (live, e->dest);
1328 if (loe && bitmap_bit_p (loe, i))
1330 if (!gimple_nop_p (stmt))
1332 num++;
1333 print_generic_expr (stderr, var, TDF_SLIM);
1334 fprintf (stderr, " is defined ");
1335 if (tmp)
1336 fprintf (stderr, " in BB%d, ", tmp->index);
1337 fprintf (stderr, "by:\n");
1338 print_gimple_stmt (stderr, stmt, 0, TDF_SLIM);
1339 fprintf (stderr, "\nIt is also live-on-entry to entry BB %d",
1340 entry_block);
1341 fprintf (stderr, " So it appears to have multiple defs.\n");
1343 else
1345 if (d != var)
1347 num++;
1348 print_generic_expr (stderr, var, TDF_SLIM);
1349 fprintf (stderr, " is live-on-entry to BB%d ",
1350 entry_block);
1351 if (d)
1353 fprintf (stderr, " but is not the default def of ");
1354 print_generic_expr (stderr, d, TDF_SLIM);
1355 fprintf (stderr, "\n");
1357 else
1358 fprintf (stderr, " and there is no default def.\n");
1362 else
1363 if (d == var)
1365 /* The only way this var shouldn't be marked live on entry is
1366 if it occurs in a PHI argument of the block. */
1367 size_t z;
1368 bool ok = false;
1369 gimple_stmt_iterator gsi;
1370 for (gsi = gsi_start_phis (e->dest);
1371 !gsi_end_p (gsi) && !ok;
1372 gsi_next (&gsi))
1374 gimple phi = gsi_stmt (gsi);
1375 for (z = 0; z < gimple_phi_num_args (phi); z++)
1376 if (var == gimple_phi_arg_def (phi, z))
1378 ok = true;
1379 break;
1382 if (ok)
1383 continue;
1384 num++;
1385 print_generic_expr (stderr, var, TDF_SLIM);
1386 fprintf (stderr, " is not marked live-on-entry to entry BB%d ",
1387 entry_block);
1388 fprintf (stderr, "but it is a default def so it should be.\n");
1392 gcc_assert (num <= 0);
1394 #endif