2011-04-11 Tobias Burnus <burnus@net-b.de>
[official-gcc.git] / gcc / tree-ssa-live.c
blobc99d987b6b63044cfa71e107de10c96d4ddece8f
1 /* Liveness for SSA trees.
2 Copyright (C) 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Andrew MacLeod <amacleod@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "tree-pretty-print.h"
28 #include "gimple-pretty-print.h"
29 #include "bitmap.h"
30 #include "tree-flow.h"
31 #include "tree-dump.h"
32 #include "tree-ssa-live.h"
33 #include "diagnostic-core.h"
34 #include "debug.h"
35 #include "flags.h"
36 #include "gimple.h"
38 #ifdef ENABLE_CHECKING
39 static void verify_live_on_entry (tree_live_info_p);
40 #endif
43 /* VARMAP maintains a mapping from SSA version number to real variables.
45 All SSA_NAMES are divided into partitions. Initially each ssa_name is the
46 only member of it's own partition. Coalescing will attempt to group any
47 ssa_names which occur in a copy or in a PHI node into the same partition.
49 At the end of out-of-ssa, each partition becomes a "real" variable and is
50 rewritten as a compiler variable.
52 The var_map data structure is used to manage these partitions. It allows
53 partitions to be combined, and determines which partition belongs to what
54 ssa_name or variable, and vice versa. */
57 /* This routine will initialize the basevar fields of MAP. */
59 static void
60 var_map_base_init (var_map map)
62 int x, num_part, num;
63 tree var;
64 var_ann_t ann;
66 num = 0;
67 num_part = num_var_partitions (map);
69 /* If a base table already exists, clear it, otherwise create it. */
70 if (map->partition_to_base_index != NULL)
72 free (map->partition_to_base_index);
73 VEC_truncate (tree, map->basevars, 0);
75 else
76 map->basevars = VEC_alloc (tree, heap, MAX (40, (num_part / 10)));
78 map->partition_to_base_index = (int *) xmalloc (sizeof (int) * num_part);
80 /* Build the base variable list, and point partitions at their bases. */
81 for (x = 0; x < num_part; x++)
83 var = partition_to_var (map, x);
84 if (TREE_CODE (var) == SSA_NAME)
85 var = SSA_NAME_VAR (var);
86 ann = var_ann (var);
87 /* If base variable hasn't been seen, set it up. */
88 if (!ann->base_var_processed)
90 ann->base_var_processed = 1;
91 VAR_ANN_BASE_INDEX (ann) = num++;
92 VEC_safe_push (tree, heap, map->basevars, var);
94 map->partition_to_base_index[x] = VAR_ANN_BASE_INDEX (ann);
97 map->num_basevars = num;
99 /* Now clear the processed bit. */
100 for (x = 0; x < num; x++)
102 var = VEC_index (tree, map->basevars, x);
103 var_ann (var)->base_var_processed = 0;
106 #ifdef ENABLE_CHECKING
107 for (x = 0; x < num_part; x++)
109 tree var2;
110 var = SSA_NAME_VAR (partition_to_var (map, x));
111 var2 = VEC_index (tree, map->basevars, basevar_index (map, x));
112 gcc_assert (var == var2);
114 #endif
118 /* Remove the base table in MAP. */
120 static void
121 var_map_base_fini (var_map map)
123 /* Free the basevar info if it is present. */
124 if (map->partition_to_base_index != NULL)
126 VEC_free (tree, heap, map->basevars);
127 free (map->partition_to_base_index);
128 map->partition_to_base_index = NULL;
129 map->num_basevars = 0;
132 /* Create a variable partition map of SIZE, initialize and return it. */
134 var_map
135 init_var_map (int size)
137 var_map map;
139 map = (var_map) xmalloc (sizeof (struct _var_map));
140 map->var_partition = partition_new (size);
142 map->partition_to_view = NULL;
143 map->view_to_partition = NULL;
144 map->num_partitions = size;
145 map->partition_size = size;
146 map->num_basevars = 0;
147 map->partition_to_base_index = NULL;
148 map->basevars = NULL;
149 return map;
153 /* Free memory associated with MAP. */
155 void
156 delete_var_map (var_map map)
158 var_map_base_fini (map);
159 partition_delete (map->var_partition);
160 if (map->partition_to_view)
161 free (map->partition_to_view);
162 if (map->view_to_partition)
163 free (map->view_to_partition);
164 free (map);
168 /* This function will combine the partitions in MAP for VAR1 and VAR2. It
169 Returns the partition which represents the new partition. If the two
170 partitions cannot be combined, NO_PARTITION is returned. */
173 var_union (var_map map, tree var1, tree var2)
175 int p1, p2, p3;
177 gcc_assert (TREE_CODE (var1) == SSA_NAME);
178 gcc_assert (TREE_CODE (var2) == SSA_NAME);
180 /* This is independent of partition_to_view. If partition_to_view is
181 on, then whichever one of these partitions is absorbed will never have a
182 dereference into the partition_to_view array any more. */
184 p1 = partition_find (map->var_partition, SSA_NAME_VERSION (var1));
185 p2 = partition_find (map->var_partition, SSA_NAME_VERSION (var2));
187 gcc_assert (p1 != NO_PARTITION);
188 gcc_assert (p2 != NO_PARTITION);
190 if (p1 == p2)
191 p3 = p1;
192 else
193 p3 = partition_union (map->var_partition, p1, p2);
195 if (map->partition_to_view)
196 p3 = map->partition_to_view[p3];
198 return p3;
202 /* Compress the partition numbers in MAP such that they fall in the range
203 0..(num_partitions-1) instead of wherever they turned out during
204 the partitioning exercise. This removes any references to unused
205 partitions, thereby allowing bitmaps and other vectors to be much
206 denser.
208 This is implemented such that compaction doesn't affect partitioning.
209 Ie., once partitions are created and possibly merged, running one
210 or more different kind of compaction will not affect the partitions
211 themselves. Their index might change, but all the same variables will
212 still be members of the same partition group. This allows work on reduced
213 sets, and no loss of information when a larger set is later desired.
215 In particular, coalescing can work on partitions which have 2 or more
216 definitions, and then 'recompact' later to include all the single
217 definitions for assignment to program variables. */
220 /* Set MAP back to the initial state of having no partition view. Return a
221 bitmap which has a bit set for each partition number which is in use in the
222 varmap. */
224 static bitmap
225 partition_view_init (var_map map)
227 bitmap used;
228 int tmp;
229 unsigned int x;
231 used = BITMAP_ALLOC (NULL);
233 /* Already in a view? Abandon the old one. */
234 if (map->partition_to_view)
236 free (map->partition_to_view);
237 map->partition_to_view = NULL;
239 if (map->view_to_partition)
241 free (map->view_to_partition);
242 map->view_to_partition = NULL;
245 /* Find out which partitions are actually referenced. */
246 for (x = 0; x < map->partition_size; x++)
248 tmp = partition_find (map->var_partition, x);
249 if (ssa_name (tmp) != NULL_TREE && is_gimple_reg (ssa_name (tmp))
250 && (!has_zero_uses (ssa_name (tmp))
251 || !SSA_NAME_IS_DEFAULT_DEF (ssa_name (tmp))))
252 bitmap_set_bit (used, tmp);
255 map->num_partitions = map->partition_size;
256 return used;
260 /* This routine will finalize the view data for MAP based on the partitions
261 set in SELECTED. This is either the same bitmap returned from
262 partition_view_init, or a trimmed down version if some of those partitions
263 were not desired in this view. SELECTED is freed before returning. */
265 static void
266 partition_view_fini (var_map map, bitmap selected)
268 bitmap_iterator bi;
269 unsigned count, i, x, limit;
271 gcc_assert (selected);
273 count = bitmap_count_bits (selected);
274 limit = map->partition_size;
276 /* If its a one-to-one ratio, we don't need any view compaction. */
277 if (count < limit)
279 map->partition_to_view = (int *)xmalloc (limit * sizeof (int));
280 memset (map->partition_to_view, 0xff, (limit * sizeof (int)));
281 map->view_to_partition = (int *)xmalloc (count * sizeof (int));
283 i = 0;
284 /* Give each selected partition an index. */
285 EXECUTE_IF_SET_IN_BITMAP (selected, 0, x, bi)
287 map->partition_to_view[x] = i;
288 map->view_to_partition[i] = x;
289 i++;
291 gcc_assert (i == count);
292 map->num_partitions = i;
295 BITMAP_FREE (selected);
299 /* Create a partition view which includes all the used partitions in MAP. If
300 WANT_BASES is true, create the base variable map as well. */
302 extern void
303 partition_view_normal (var_map map, bool want_bases)
305 bitmap used;
307 used = partition_view_init (map);
308 partition_view_fini (map, used);
310 if (want_bases)
311 var_map_base_init (map);
312 else
313 var_map_base_fini (map);
317 /* Create a partition view in MAP which includes just partitions which occur in
318 the bitmap ONLY. If WANT_BASES is true, create the base variable map
319 as well. */
321 extern void
322 partition_view_bitmap (var_map map, bitmap only, bool want_bases)
324 bitmap used;
325 bitmap new_partitions = BITMAP_ALLOC (NULL);
326 unsigned x, p;
327 bitmap_iterator bi;
329 used = partition_view_init (map);
330 EXECUTE_IF_SET_IN_BITMAP (only, 0, x, bi)
332 p = partition_find (map->var_partition, x);
333 gcc_assert (bitmap_bit_p (used, p));
334 bitmap_set_bit (new_partitions, p);
336 partition_view_fini (map, new_partitions);
338 BITMAP_FREE (used);
339 if (want_bases)
340 var_map_base_init (map);
341 else
342 var_map_base_fini (map);
346 static inline void mark_all_vars_used (tree *, void *data);
348 /* Helper function for mark_all_vars_used, called via walk_tree. */
350 static tree
351 mark_all_vars_used_1 (tree *tp, int *walk_subtrees, void *data)
353 tree t = *tp;
354 enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
355 tree b;
357 if (TREE_CODE (t) == SSA_NAME)
358 t = SSA_NAME_VAR (t);
360 if (IS_EXPR_CODE_CLASS (c)
361 && (b = TREE_BLOCK (t)) != NULL)
362 TREE_USED (b) = true;
364 /* Ignore TMR_OFFSET and TMR_STEP for TARGET_MEM_REFS, as those
365 fields do not contain vars. */
366 if (TREE_CODE (t) == TARGET_MEM_REF)
368 mark_all_vars_used (&TMR_BASE (t), data);
369 mark_all_vars_used (&TMR_INDEX (t), data);
370 mark_all_vars_used (&TMR_INDEX2 (t), data);
371 *walk_subtrees = 0;
372 return NULL;
375 /* Only need to mark VAR_DECLS; parameters and return results are not
376 eliminated as unused. */
377 if (TREE_CODE (t) == VAR_DECL)
379 if (data != NULL && bitmap_clear_bit ((bitmap) data, DECL_UID (t)))
380 mark_all_vars_used (&DECL_INITIAL (t), data);
381 set_is_used (t);
383 /* remove_unused_scope_block_p requires information about labels
384 which are not DECL_IGNORED_P to tell if they might be used in the IL. */
385 if (TREE_CODE (t) == LABEL_DECL)
386 /* Although the TREE_USED values that the frontend uses would be
387 acceptable (albeit slightly over-conservative) for our purposes,
388 init_vars_expansion clears TREE_USED for LABEL_DECLs too, so we
389 must re-compute it here. */
390 TREE_USED (t) = 1;
392 if (IS_TYPE_OR_DECL_P (t))
393 *walk_subtrees = 0;
395 return NULL;
398 /* Mark the scope block SCOPE and its subblocks unused when they can be
399 possibly eliminated if dead. */
401 static void
402 mark_scope_block_unused (tree scope)
404 tree t;
405 TREE_USED (scope) = false;
406 if (!(*debug_hooks->ignore_block) (scope))
407 TREE_USED (scope) = true;
408 for (t = BLOCK_SUBBLOCKS (scope); t ; t = BLOCK_CHAIN (t))
409 mark_scope_block_unused (t);
412 /* Look if the block is dead (by possibly eliminating its dead subblocks)
413 and return true if so.
414 Block is declared dead if:
415 1) No statements are associated with it.
416 2) Declares no live variables
417 3) All subblocks are dead
418 or there is precisely one subblocks and the block
419 has same abstract origin as outer block and declares
420 no variables, so it is pure wrapper.
421 When we are not outputting full debug info, we also eliminate dead variables
422 out of scope blocks to let them to be recycled by GGC and to save copying work
423 done by the inliner. */
425 static bool
426 remove_unused_scope_block_p (tree scope)
428 tree *t, *next;
429 bool unused = !TREE_USED (scope);
430 int nsubblocks = 0;
432 for (t = &BLOCK_VARS (scope); *t; t = next)
434 next = &DECL_CHAIN (*t);
436 /* Debug info of nested function refers to the block of the
437 function. We might stil call it even if all statements
438 of function it was nested into was elliminated.
440 TODO: We can actually look into cgraph to see if function
441 will be output to file. */
442 if (TREE_CODE (*t) == FUNCTION_DECL)
443 unused = false;
445 /* If a decl has a value expr, we need to instantiate it
446 regardless of debug info generation, to avoid codegen
447 differences in memory overlap tests. update_equiv_regs() may
448 indirectly call validate_equiv_mem() to test whether a
449 SET_DEST overlaps with others, and if the value expr changes
450 by virtual register instantiation, we may get end up with
451 different results. */
452 else if (TREE_CODE (*t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*t))
453 unused = false;
455 /* Remove everything we don't generate debug info for.
456 Don't remove larger vars though, because BLOCK_VARS are
457 used also during expansion to determine which variables
458 might share stack space. */
459 else if (DECL_IGNORED_P (*t) && is_gimple_reg (*t))
461 *t = DECL_CHAIN (*t);
462 next = t;
465 /* When we are outputting debug info, we usually want to output
466 info about optimized-out variables in the scope blocks.
467 Exception are the scope blocks not containing any instructions
468 at all so user can't get into the scopes at first place. */
469 else if (var_ann (*t) != NULL && is_used_p (*t))
470 unused = false;
471 else if (TREE_CODE (*t) == LABEL_DECL && TREE_USED (*t))
472 /* For labels that are still used in the IL, the decision to
473 preserve them must not depend DEBUG_INFO_LEVEL, otherwise we
474 risk having different ordering in debug vs. non-debug builds
475 during inlining or versioning.
476 A label appearing here (we have already checked DECL_IGNORED_P)
477 should not be used in the IL unless it has been explicitly used
478 before, so we use TREE_USED as an approximation. */
479 /* In principle, we should do the same here as for the debug case
480 below, however, when debugging, there might be additional nested
481 levels that keep an upper level with a label live, so we have to
482 force this block to be considered used, too. */
483 unused = false;
485 /* When we are not doing full debug info, we however can keep around
486 only the used variables for cfgexpand's memory packing saving quite
487 a lot of memory.
489 For sake of -g3, we keep around those vars but we don't count this as
490 use of block, so innermost block with no used vars and no instructions
491 can be considered dead. We only want to keep around blocks user can
492 breakpoint into and ask about value of optimized out variables.
494 Similarly we need to keep around types at least until all
495 variables of all nested blocks are gone. We track no
496 information on whether given type is used or not, so we have
497 to keep them even when not emitting debug information,
498 otherwise we may end up remapping variables and their (local)
499 types in different orders depending on whether debug
500 information is being generated. */
502 else if (TREE_CODE (*t) == TYPE_DECL
503 || debug_info_level == DINFO_LEVEL_NORMAL
504 || debug_info_level == DINFO_LEVEL_VERBOSE)
506 else
508 *t = DECL_CHAIN (*t);
509 next = t;
513 for (t = &BLOCK_SUBBLOCKS (scope); *t ;)
514 if (remove_unused_scope_block_p (*t))
516 if (BLOCK_SUBBLOCKS (*t))
518 tree next = BLOCK_CHAIN (*t);
519 tree supercontext = BLOCK_SUPERCONTEXT (*t);
521 *t = BLOCK_SUBBLOCKS (*t);
522 while (BLOCK_CHAIN (*t))
524 BLOCK_SUPERCONTEXT (*t) = supercontext;
525 t = &BLOCK_CHAIN (*t);
527 BLOCK_CHAIN (*t) = next;
528 BLOCK_SUPERCONTEXT (*t) = supercontext;
529 t = &BLOCK_CHAIN (*t);
530 nsubblocks ++;
532 else
533 *t = BLOCK_CHAIN (*t);
535 else
537 t = &BLOCK_CHAIN (*t);
538 nsubblocks ++;
542 if (!unused)
544 /* Outer scope is always used. */
545 else if (!BLOCK_SUPERCONTEXT (scope)
546 || TREE_CODE (BLOCK_SUPERCONTEXT (scope)) == FUNCTION_DECL)
547 unused = false;
548 /* Innermost blocks with no live variables nor statements can be always
549 eliminated. */
550 else if (!nsubblocks)
552 /* For terse debug info we can eliminate info on unused variables. */
553 else if (debug_info_level == DINFO_LEVEL_NONE
554 || debug_info_level == DINFO_LEVEL_TERSE)
556 /* Even for -g0/-g1 don't prune outer scopes from artificial
557 functions, otherwise diagnostics using tree_nonartificial_location
558 will not be emitted properly. */
559 if (inlined_function_outer_scope_p (scope))
561 tree ao = scope;
563 while (ao
564 && TREE_CODE (ao) == BLOCK
565 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
566 ao = BLOCK_ABSTRACT_ORIGIN (ao);
567 if (ao
568 && TREE_CODE (ao) == FUNCTION_DECL
569 && DECL_DECLARED_INLINE_P (ao)
570 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
571 unused = false;
574 else if (BLOCK_VARS (scope) || BLOCK_NUM_NONLOCALIZED_VARS (scope))
575 unused = false;
576 /* See if this block is important for representation of inlined function.
577 Inlined functions are always represented by block with
578 block_ultimate_origin being set to FUNCTION_DECL and DECL_SOURCE_LOCATION
579 set... */
580 else if (inlined_function_outer_scope_p (scope))
581 unused = false;
582 else
583 /* Verfify that only blocks with source location set
584 are entry points to the inlined functions. */
585 gcc_assert (BLOCK_SOURCE_LOCATION (scope) == UNKNOWN_LOCATION);
587 TREE_USED (scope) = !unused;
588 return unused;
591 /* Mark all VAR_DECLS under *EXPR_P as used, so that they won't be
592 eliminated during the tree->rtl conversion process. */
594 static inline void
595 mark_all_vars_used (tree *expr_p, void *data)
597 walk_tree (expr_p, mark_all_vars_used_1, data, NULL);
601 /* Dump scope blocks starting at SCOPE to FILE. INDENT is the
602 indentation level and FLAGS is as in print_generic_expr. */
604 static void
605 dump_scope_block (FILE *file, int indent, tree scope, int flags)
607 tree var, t;
608 unsigned int i;
610 fprintf (file, "\n%*s{ Scope block #%i%s%s",indent, "" , BLOCK_NUMBER (scope),
611 TREE_USED (scope) ? "" : " (unused)",
612 BLOCK_ABSTRACT (scope) ? " (abstract)": "");
613 if (BLOCK_SOURCE_LOCATION (scope) != UNKNOWN_LOCATION)
615 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (scope));
616 fprintf (file, " %s:%i", s.file, s.line);
618 if (BLOCK_ABSTRACT_ORIGIN (scope))
620 tree origin = block_ultimate_origin (scope);
621 if (origin)
623 fprintf (file, " Originating from :");
624 if (DECL_P (origin))
625 print_generic_decl (file, origin, flags);
626 else
627 fprintf (file, "#%i", BLOCK_NUMBER (origin));
630 fprintf (file, " \n");
631 for (var = BLOCK_VARS (scope); var; var = DECL_CHAIN (var))
633 bool used = false;
635 if (var_ann (var))
636 used = is_used_p (var);
638 fprintf (file, "%*s", indent, "");
639 print_generic_decl (file, var, flags);
640 fprintf (file, "%s\n", used ? "" : " (unused)");
642 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (scope); i++)
644 fprintf (file, "%*s",indent, "");
645 print_generic_decl (file, BLOCK_NONLOCALIZED_VAR (scope, i),
646 flags);
647 fprintf (file, " (nonlocalized)\n");
649 for (t = BLOCK_SUBBLOCKS (scope); t ; t = BLOCK_CHAIN (t))
650 dump_scope_block (file, indent + 2, t, flags);
651 fprintf (file, "\n%*s}\n",indent, "");
654 /* Dump the tree of lexical scopes starting at SCOPE to stderr. FLAGS
655 is as in print_generic_expr. */
657 DEBUG_FUNCTION void
658 debug_scope_block (tree scope, int flags)
660 dump_scope_block (stderr, 0, scope, flags);
664 /* Dump the tree of lexical scopes of current_function_decl to FILE.
665 FLAGS is as in print_generic_expr. */
667 void
668 dump_scope_blocks (FILE *file, int flags)
670 dump_scope_block (file, 0, DECL_INITIAL (current_function_decl), flags);
674 /* Dump the tree of lexical scopes of current_function_decl to stderr.
675 FLAGS is as in print_generic_expr. */
677 DEBUG_FUNCTION void
678 debug_scope_blocks (int flags)
680 dump_scope_blocks (stderr, flags);
683 /* Remove local variables that are not referenced in the IL. */
685 void
686 remove_unused_locals (void)
688 basic_block bb;
689 tree var, t;
690 referenced_var_iterator rvi;
691 bitmap global_unused_vars = NULL;
692 unsigned srcidx, dstidx, num;
694 /* Removing declarations from lexical blocks when not optimizing is
695 not only a waste of time, it actually causes differences in stack
696 layout. */
697 if (!optimize)
698 return;
700 timevar_push (TV_REMOVE_UNUSED);
702 mark_scope_block_unused (DECL_INITIAL (current_function_decl));
704 /* Assume all locals are unused. */
705 FOR_EACH_REFERENCED_VAR (cfun, t, rvi)
706 clear_is_used (t);
708 /* Walk the CFG marking all referenced symbols. */
709 FOR_EACH_BB (bb)
711 gimple_stmt_iterator gsi;
712 size_t i;
713 edge_iterator ei;
714 edge e;
716 /* Walk the statements. */
717 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
719 gimple stmt = gsi_stmt (gsi);
720 tree b = gimple_block (stmt);
722 if (is_gimple_debug (stmt))
723 continue;
725 if (b)
726 TREE_USED (b) = true;
728 for (i = 0; i < gimple_num_ops (stmt); i++)
729 mark_all_vars_used (gimple_op_ptr (gsi_stmt (gsi), i), NULL);
732 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
734 use_operand_p arg_p;
735 ssa_op_iter i;
736 tree def;
737 gimple phi = gsi_stmt (gsi);
739 /* No point processing globals. */
740 if (is_global_var (SSA_NAME_VAR (gimple_phi_result (phi))))
741 continue;
743 def = gimple_phi_result (phi);
744 mark_all_vars_used (&def, NULL);
746 FOR_EACH_PHI_ARG (arg_p, phi, i, SSA_OP_ALL_USES)
748 tree arg = USE_FROM_PTR (arg_p);
749 mark_all_vars_used (&arg, NULL);
753 FOR_EACH_EDGE (e, ei, bb->succs)
754 if (e->goto_locus)
755 TREE_USED (e->goto_block) = true;
758 cfun->has_local_explicit_reg_vars = false;
760 /* Remove unmarked local vars from local_decls. */
761 num = VEC_length (tree, cfun->local_decls);
762 for (srcidx = 0, dstidx = 0; srcidx < num; srcidx++)
764 var = VEC_index (tree, cfun->local_decls, srcidx);
765 if (TREE_CODE (var) != FUNCTION_DECL
766 && (!var_ann (var)
767 || !is_used_p (var)))
769 if (is_global_var (var))
771 if (global_unused_vars == NULL)
772 global_unused_vars = BITMAP_ALLOC (NULL);
773 bitmap_set_bit (global_unused_vars, DECL_UID (var));
775 else
776 continue;
778 else if (TREE_CODE (var) == VAR_DECL
779 && DECL_HARD_REGISTER (var)
780 && !is_global_var (var))
781 cfun->has_local_explicit_reg_vars = true;
783 if (srcidx != dstidx)
784 VEC_replace (tree, cfun->local_decls, dstidx, var);
785 dstidx++;
787 if (dstidx != num)
788 VEC_truncate (tree, cfun->local_decls, dstidx);
790 /* Remove unmarked global vars from local_decls. */
791 if (global_unused_vars != NULL)
793 tree var;
794 unsigned ix;
795 FOR_EACH_LOCAL_DECL (cfun, ix, var)
796 if (TREE_CODE (var) == VAR_DECL
797 && is_global_var (var)
798 && var_ann (var) != NULL
799 && is_used_p (var))
800 mark_all_vars_used (&DECL_INITIAL (var), global_unused_vars);
802 num = VEC_length (tree, cfun->local_decls);
803 for (srcidx = 0, dstidx = 0; srcidx < num; srcidx++)
805 var = VEC_index (tree, cfun->local_decls, srcidx);
806 if (TREE_CODE (var) == VAR_DECL
807 && is_global_var (var)
808 && bitmap_bit_p (global_unused_vars, DECL_UID (var)))
809 continue;
811 if (srcidx != dstidx)
812 VEC_replace (tree, cfun->local_decls, dstidx, var);
813 dstidx++;
815 if (dstidx != num)
816 VEC_truncate (tree, cfun->local_decls, dstidx);
817 BITMAP_FREE (global_unused_vars);
820 /* Remove unused variables from REFERENCED_VARs. */
821 FOR_EACH_REFERENCED_VAR (cfun, t, rvi)
822 if (!is_global_var (t)
823 && TREE_CODE (t) != PARM_DECL
824 && TREE_CODE (t) != RESULT_DECL
825 && !is_used_p (t)
826 && !var_ann (t)->is_heapvar)
827 remove_referenced_var (t);
828 remove_unused_scope_block_p (DECL_INITIAL (current_function_decl));
829 if (dump_file && (dump_flags & TDF_DETAILS))
831 fprintf (dump_file, "Scope blocks after cleanups:\n");
832 dump_scope_blocks (dump_file, dump_flags);
835 timevar_pop (TV_REMOVE_UNUSED);
839 /* Allocate and return a new live range information object base on MAP. */
841 static tree_live_info_p
842 new_tree_live_info (var_map map)
844 tree_live_info_p live;
845 unsigned x;
847 live = (tree_live_info_p) xmalloc (sizeof (struct tree_live_info_d));
848 live->map = map;
849 live->num_blocks = last_basic_block;
851 live->livein = (bitmap *)xmalloc (last_basic_block * sizeof (bitmap));
852 for (x = 0; x < (unsigned)last_basic_block; x++)
853 live->livein[x] = BITMAP_ALLOC (NULL);
855 live->liveout = (bitmap *)xmalloc (last_basic_block * sizeof (bitmap));
856 for (x = 0; x < (unsigned)last_basic_block; x++)
857 live->liveout[x] = BITMAP_ALLOC (NULL);
859 live->work_stack = XNEWVEC (int, last_basic_block);
860 live->stack_top = live->work_stack;
862 live->global = BITMAP_ALLOC (NULL);
863 return live;
867 /* Free storage for live range info object LIVE. */
869 void
870 delete_tree_live_info (tree_live_info_p live)
872 int x;
874 BITMAP_FREE (live->global);
875 free (live->work_stack);
877 for (x = live->num_blocks - 1; x >= 0; x--)
878 BITMAP_FREE (live->liveout[x]);
879 free (live->liveout);
881 for (x = live->num_blocks - 1; x >= 0; x--)
882 BITMAP_FREE (live->livein[x]);
883 free (live->livein);
885 free (live);
889 /* Visit basic block BB and propagate any required live on entry bits from
890 LIVE into the predecessors. VISITED is the bitmap of visited blocks.
891 TMP is a temporary work bitmap which is passed in to avoid reallocating
892 it each time. */
894 static void
895 loe_visit_block (tree_live_info_p live, basic_block bb, sbitmap visited,
896 bitmap tmp)
898 edge e;
899 bool change;
900 edge_iterator ei;
901 basic_block pred_bb;
902 bitmap loe;
903 gcc_assert (!TEST_BIT (visited, bb->index));
905 SET_BIT (visited, bb->index);
906 loe = live_on_entry (live, bb);
908 FOR_EACH_EDGE (e, ei, bb->preds)
910 pred_bb = e->src;
911 if (pred_bb == ENTRY_BLOCK_PTR)
912 continue;
913 /* TMP is variables live-on-entry from BB that aren't defined in the
914 predecessor block. This should be the live on entry vars to pred.
915 Note that liveout is the DEFs in a block while live on entry is
916 being calculated. */
917 bitmap_and_compl (tmp, loe, live->liveout[pred_bb->index]);
919 /* Add these bits to live-on-entry for the pred. if there are any
920 changes, and pred_bb has been visited already, add it to the
921 revisit stack. */
922 change = bitmap_ior_into (live_on_entry (live, pred_bb), tmp);
923 if (TEST_BIT (visited, pred_bb->index) && change)
925 RESET_BIT (visited, pred_bb->index);
926 *(live->stack_top)++ = pred_bb->index;
932 /* Using LIVE, fill in all the live-on-entry blocks between the defs and uses
933 of all the variables. */
935 static void
936 live_worklist (tree_live_info_p live)
938 unsigned b;
939 basic_block bb;
940 sbitmap visited = sbitmap_alloc (last_basic_block + 1);
941 bitmap tmp = BITMAP_ALLOC (NULL);
943 sbitmap_zero (visited);
945 /* Visit all the blocks in reverse order and propagate live on entry values
946 into the predecessors blocks. */
947 FOR_EACH_BB_REVERSE (bb)
948 loe_visit_block (live, bb, visited, tmp);
950 /* Process any blocks which require further iteration. */
951 while (live->stack_top != live->work_stack)
953 b = *--(live->stack_top);
954 loe_visit_block (live, BASIC_BLOCK (b), visited, tmp);
957 BITMAP_FREE (tmp);
958 sbitmap_free (visited);
962 /* Calculate the initial live on entry vector for SSA_NAME using immediate_use
963 links. Set the live on entry fields in LIVE. Def's are marked temporarily
964 in the liveout vector. */
966 static void
967 set_var_live_on_entry (tree ssa_name, tree_live_info_p live)
969 int p;
970 gimple stmt;
971 use_operand_p use;
972 basic_block def_bb = NULL;
973 imm_use_iterator imm_iter;
974 bool global = false;
976 p = var_to_partition (live->map, ssa_name);
977 if (p == NO_PARTITION)
978 return;
980 stmt = SSA_NAME_DEF_STMT (ssa_name);
981 if (stmt)
983 def_bb = gimple_bb (stmt);
984 /* Mark defs in liveout bitmap temporarily. */
985 if (def_bb)
986 bitmap_set_bit (live->liveout[def_bb->index], p);
988 else
989 def_bb = ENTRY_BLOCK_PTR;
991 /* Visit each use of SSA_NAME and if it isn't in the same block as the def,
992 add it to the list of live on entry blocks. */
993 FOR_EACH_IMM_USE_FAST (use, imm_iter, ssa_name)
995 gimple use_stmt = USE_STMT (use);
996 basic_block add_block = NULL;
998 if (gimple_code (use_stmt) == GIMPLE_PHI)
1000 /* Uses in PHI's are considered to be live at exit of the SRC block
1001 as this is where a copy would be inserted. Check to see if it is
1002 defined in that block, or whether its live on entry. */
1003 int index = PHI_ARG_INDEX_FROM_USE (use);
1004 edge e = gimple_phi_arg_edge (use_stmt, index);
1005 if (e->src != ENTRY_BLOCK_PTR)
1007 if (e->src != def_bb)
1008 add_block = e->src;
1011 else if (is_gimple_debug (use_stmt))
1012 continue;
1013 else
1015 /* If its not defined in this block, its live on entry. */
1016 basic_block use_bb = gimple_bb (use_stmt);
1017 if (use_bb != def_bb)
1018 add_block = use_bb;
1021 /* If there was a live on entry use, set the bit. */
1022 if (add_block)
1024 global = true;
1025 bitmap_set_bit (live->livein[add_block->index], p);
1029 /* If SSA_NAME is live on entry to at least one block, fill in all the live
1030 on entry blocks between the def and all the uses. */
1031 if (global)
1032 bitmap_set_bit (live->global, p);
1036 /* Calculate the live on exit vectors based on the entry info in LIVEINFO. */
1038 void
1039 calculate_live_on_exit (tree_live_info_p liveinfo)
1041 basic_block bb;
1042 edge e;
1043 edge_iterator ei;
1045 /* live on entry calculations used liveout vectors for defs, clear them. */
1046 FOR_EACH_BB (bb)
1047 bitmap_clear (liveinfo->liveout[bb->index]);
1049 /* Set all the live-on-exit bits for uses in PHIs. */
1050 FOR_EACH_BB (bb)
1052 gimple_stmt_iterator gsi;
1053 size_t i;
1055 /* Mark the PHI arguments which are live on exit to the pred block. */
1056 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1058 gimple phi = gsi_stmt (gsi);
1059 for (i = 0; i < gimple_phi_num_args (phi); i++)
1061 tree t = PHI_ARG_DEF (phi, i);
1062 int p;
1064 if (TREE_CODE (t) != SSA_NAME)
1065 continue;
1067 p = var_to_partition (liveinfo->map, t);
1068 if (p == NO_PARTITION)
1069 continue;
1070 e = gimple_phi_arg_edge (phi, i);
1071 if (e->src != ENTRY_BLOCK_PTR)
1072 bitmap_set_bit (liveinfo->liveout[e->src->index], p);
1076 /* Add each successors live on entry to this bock live on exit. */
1077 FOR_EACH_EDGE (e, ei, bb->succs)
1078 if (e->dest != EXIT_BLOCK_PTR)
1079 bitmap_ior_into (liveinfo->liveout[bb->index],
1080 live_on_entry (liveinfo, e->dest));
1085 /* Given partition map MAP, calculate all the live on entry bitmaps for
1086 each partition. Return a new live info object. */
1088 tree_live_info_p
1089 calculate_live_ranges (var_map map)
1091 tree var;
1092 unsigned i;
1093 tree_live_info_p live;
1095 live = new_tree_live_info (map);
1096 for (i = 0; i < num_var_partitions (map); i++)
1098 var = partition_to_var (map, i);
1099 if (var != NULL_TREE)
1100 set_var_live_on_entry (var, live);
1103 live_worklist (live);
1105 #ifdef ENABLE_CHECKING
1106 verify_live_on_entry (live);
1107 #endif
1109 calculate_live_on_exit (live);
1110 return live;
1114 /* Output partition map MAP to file F. */
1116 void
1117 dump_var_map (FILE *f, var_map map)
1119 int t;
1120 unsigned x, y;
1121 int p;
1123 fprintf (f, "\nPartition map \n\n");
1125 for (x = 0; x < map->num_partitions; x++)
1127 if (map->view_to_partition != NULL)
1128 p = map->view_to_partition[x];
1129 else
1130 p = x;
1132 if (ssa_name (p) == NULL_TREE)
1133 continue;
1135 t = 0;
1136 for (y = 1; y < num_ssa_names; y++)
1138 p = partition_find (map->var_partition, y);
1139 if (map->partition_to_view)
1140 p = map->partition_to_view[p];
1141 if (p == (int)x)
1143 if (t++ == 0)
1145 fprintf(f, "Partition %d (", x);
1146 print_generic_expr (f, partition_to_var (map, p), TDF_SLIM);
1147 fprintf (f, " - ");
1149 fprintf (f, "%d ", y);
1152 if (t != 0)
1153 fprintf (f, ")\n");
1155 fprintf (f, "\n");
1159 /* Output live range info LIVE to file F, controlled by FLAG. */
1161 void
1162 dump_live_info (FILE *f, tree_live_info_p live, int flag)
1164 basic_block bb;
1165 unsigned i;
1166 var_map map = live->map;
1167 bitmap_iterator bi;
1169 if ((flag & LIVEDUMP_ENTRY) && live->livein)
1171 FOR_EACH_BB (bb)
1173 fprintf (f, "\nLive on entry to BB%d : ", bb->index);
1174 EXECUTE_IF_SET_IN_BITMAP (live->livein[bb->index], 0, i, bi)
1176 print_generic_expr (f, partition_to_var (map, i), TDF_SLIM);
1177 fprintf (f, " ");
1179 fprintf (f, "\n");
1183 if ((flag & LIVEDUMP_EXIT) && live->liveout)
1185 FOR_EACH_BB (bb)
1187 fprintf (f, "\nLive on exit from BB%d : ", bb->index);
1188 EXECUTE_IF_SET_IN_BITMAP (live->liveout[bb->index], 0, i, bi)
1190 print_generic_expr (f, partition_to_var (map, i), TDF_SLIM);
1191 fprintf (f, " ");
1193 fprintf (f, "\n");
1198 struct GTY(()) numbered_tree_d
1200 tree t;
1201 int num;
1203 typedef struct numbered_tree_d numbered_tree;
1205 DEF_VEC_O (numbered_tree);
1206 DEF_VEC_ALLOC_O (numbered_tree, heap);
1208 /* Compare two declarations references by their DECL_UID / sequence number.
1209 Called via qsort. */
1211 static int
1212 compare_decls_by_uid (const void *pa, const void *pb)
1214 const numbered_tree *nt_a = ((const numbered_tree *)pa);
1215 const numbered_tree *nt_b = ((const numbered_tree *)pb);
1217 if (DECL_UID (nt_a->t) != DECL_UID (nt_b->t))
1218 return DECL_UID (nt_a->t) - DECL_UID (nt_b->t);
1219 return nt_a->num - nt_b->num;
1222 /* Called via walk_gimple_stmt / walk_gimple_op by dump_enumerated_decls. */
1223 static tree
1224 dump_enumerated_decls_push (tree *tp, int *walk_subtrees, void *data)
1226 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1227 VEC (numbered_tree, heap) **list = (VEC (numbered_tree, heap) **) &wi->info;
1228 numbered_tree nt;
1230 if (!DECL_P (*tp))
1231 return NULL_TREE;
1232 nt.t = *tp;
1233 nt.num = VEC_length (numbered_tree, *list);
1234 VEC_safe_push (numbered_tree, heap, *list, &nt);
1235 *walk_subtrees = 0;
1236 return NULL_TREE;
1239 /* Find all the declarations used by the current function, sort them by uid,
1240 and emit the sorted list. Each declaration is tagged with a sequence
1241 number indicating when it was found during statement / tree walking,
1242 so that TDF_NOUID comparisons of anonymous declarations are still
1243 meaningful. Where a declaration was encountered more than once, we
1244 emit only the sequence number of the first encounter.
1245 FILE is the dump file where to output the list and FLAGS is as in
1246 print_generic_expr. */
1247 void
1248 dump_enumerated_decls (FILE *file, int flags)
1250 basic_block bb;
1251 struct walk_stmt_info wi;
1252 VEC (numbered_tree, heap) *decl_list = VEC_alloc (numbered_tree, heap, 40);
1254 memset (&wi, '\0', sizeof (wi));
1255 wi.info = (void*) decl_list;
1256 FOR_EACH_BB (bb)
1258 gimple_stmt_iterator gsi;
1260 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1261 if (!is_gimple_debug (gsi_stmt (gsi)))
1262 walk_gimple_stmt (&gsi, NULL, dump_enumerated_decls_push, &wi);
1264 decl_list = (VEC (numbered_tree, heap) *) wi.info;
1265 VEC_qsort (numbered_tree, decl_list, compare_decls_by_uid);
1266 if (VEC_length (numbered_tree, decl_list))
1268 unsigned ix;
1269 numbered_tree *ntp;
1270 tree last = NULL_TREE;
1272 fprintf (file, "Declarations used by %s, sorted by DECL_UID:\n",
1273 current_function_name ());
1274 FOR_EACH_VEC_ELT (numbered_tree, decl_list, ix, ntp)
1276 if (ntp->t == last)
1277 continue;
1278 fprintf (file, "%d: ", ntp->num);
1279 print_generic_decl (file, ntp->t, flags);
1280 fprintf (file, "\n");
1281 last = ntp->t;
1284 VEC_free (numbered_tree, heap, decl_list);
1287 #ifdef ENABLE_CHECKING
1288 /* Verify that SSA_VAR is a non-virtual SSA_NAME. */
1290 void
1291 register_ssa_partition_check (tree ssa_var)
1293 gcc_assert (TREE_CODE (ssa_var) == SSA_NAME);
1294 if (!is_gimple_reg (SSA_NAME_VAR (ssa_var)))
1296 fprintf (stderr, "Illegally registering a virtual SSA name :");
1297 print_generic_expr (stderr, ssa_var, TDF_SLIM);
1298 fprintf (stderr, " in the SSA->Normal phase.\n");
1299 internal_error ("SSA corruption");
1304 /* Verify that the info in LIVE matches the current cfg. */
1306 static void
1307 verify_live_on_entry (tree_live_info_p live)
1309 unsigned i;
1310 tree var;
1311 gimple stmt;
1312 basic_block bb;
1313 edge e;
1314 int num;
1315 edge_iterator ei;
1316 var_map map = live->map;
1318 /* Check for live on entry partitions and report those with a DEF in
1319 the program. This will typically mean an optimization has done
1320 something wrong. */
1321 bb = ENTRY_BLOCK_PTR;
1322 num = 0;
1323 FOR_EACH_EDGE (e, ei, bb->succs)
1325 int entry_block = e->dest->index;
1326 if (e->dest == EXIT_BLOCK_PTR)
1327 continue;
1328 for (i = 0; i < (unsigned)num_var_partitions (map); i++)
1330 basic_block tmp;
1331 tree d;
1332 bitmap loe;
1333 var = partition_to_var (map, i);
1334 stmt = SSA_NAME_DEF_STMT (var);
1335 tmp = gimple_bb (stmt);
1336 d = gimple_default_def (cfun, SSA_NAME_VAR (var));
1338 loe = live_on_entry (live, e->dest);
1339 if (loe && bitmap_bit_p (loe, i))
1341 if (!gimple_nop_p (stmt))
1343 num++;
1344 print_generic_expr (stderr, var, TDF_SLIM);
1345 fprintf (stderr, " is defined ");
1346 if (tmp)
1347 fprintf (stderr, " in BB%d, ", tmp->index);
1348 fprintf (stderr, "by:\n");
1349 print_gimple_stmt (stderr, stmt, 0, TDF_SLIM);
1350 fprintf (stderr, "\nIt is also live-on-entry to entry BB %d",
1351 entry_block);
1352 fprintf (stderr, " So it appears to have multiple defs.\n");
1354 else
1356 if (d != var)
1358 num++;
1359 print_generic_expr (stderr, var, TDF_SLIM);
1360 fprintf (stderr, " is live-on-entry to BB%d ",
1361 entry_block);
1362 if (d)
1364 fprintf (stderr, " but is not the default def of ");
1365 print_generic_expr (stderr, d, TDF_SLIM);
1366 fprintf (stderr, "\n");
1368 else
1369 fprintf (stderr, " and there is no default def.\n");
1373 else
1374 if (d == var)
1376 /* The only way this var shouldn't be marked live on entry is
1377 if it occurs in a PHI argument of the block. */
1378 size_t z;
1379 bool ok = false;
1380 gimple_stmt_iterator gsi;
1381 for (gsi = gsi_start_phis (e->dest);
1382 !gsi_end_p (gsi) && !ok;
1383 gsi_next (&gsi))
1385 gimple phi = gsi_stmt (gsi);
1386 for (z = 0; z < gimple_phi_num_args (phi); z++)
1387 if (var == gimple_phi_arg_def (phi, z))
1389 ok = true;
1390 break;
1393 if (ok)
1394 continue;
1395 num++;
1396 print_generic_expr (stderr, var, TDF_SLIM);
1397 fprintf (stderr, " is not marked live-on-entry to entry BB%d ",
1398 entry_block);
1399 fprintf (stderr, "but it is a default def so it should be.\n");
1403 gcc_assert (num <= 0);
1405 #endif