2005-09-26 Daniel Berlin <dberlin@dberlin.org>
[official-gcc.git] / gcc / tree-ssa-alias.c
blobf72fd95dd1e2f70b4d45858060cdf58ccac7b84c
1 /* Alias analysis for trees.
2 Copyright (C) 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "timevar.h"
32 #include "expr.h"
33 #include "ggc.h"
34 #include "langhooks.h"
35 #include "flags.h"
36 #include "function.h"
37 #include "diagnostic.h"
38 #include "tree-dump.h"
39 #include "tree-gimple.h"
40 #include "tree-flow.h"
41 #include "tree-inline.h"
42 #include "tree-pass.h"
43 #include "tree-ssa-structalias.h"
44 #include "convert.h"
45 #include "params.h"
46 #include "ipa-type-escape.h"
47 #include "vec.h"
48 #include "bitmap.h"
50 /* Obstack used to hold grouping bitmaps and other temporary bitmaps used by
51 aliasing */
52 static bitmap_obstack alias_obstack;
54 /* 'true' after aliases have been computed (see compute_may_aliases). */
55 bool aliases_computed_p;
57 /* Structure to map a variable to its alias set and keep track of the
58 virtual operands that will be needed to represent it. */
59 struct alias_map_d
61 /* Variable and its alias set. */
62 tree var;
63 HOST_WIDE_INT set;
65 /* Total number of virtual operands that will be needed to represent
66 all the aliases of VAR. */
67 long total_alias_vops;
69 /* Nonzero if the aliases for this memory tag have been grouped
70 already. Used in group_aliases. */
71 unsigned int grouped_p : 1;
73 /* Set of variables aliased with VAR. This is the exact same
74 information contained in VAR_ANN (VAR)->MAY_ALIASES, but in
75 bitmap form to speed up alias grouping. */
76 bitmap may_aliases;
80 /* Counters used to display statistics on alias analysis. */
81 struct alias_stats_d
83 unsigned int alias_queries;
84 unsigned int alias_mayalias;
85 unsigned int alias_noalias;
86 unsigned int simple_queries;
87 unsigned int simple_resolved;
88 unsigned int tbaa_queries;
89 unsigned int tbaa_resolved;
90 unsigned int structnoaddress_queries;
91 unsigned int structnoaddress_resolved;
95 /* Local variables. */
96 static struct alias_stats_d alias_stats;
98 /* Local functions. */
99 static void compute_flow_insensitive_aliasing (struct alias_info *);
100 static void dump_alias_stats (FILE *);
101 static bool may_alias_p (tree, HOST_WIDE_INT, tree, HOST_WIDE_INT, bool);
102 static tree create_memory_tag (tree type, bool is_type_tag);
103 static tree get_tmt_for (tree, struct alias_info *);
104 static tree get_nmt_for (tree);
105 static void add_may_alias (tree, tree);
106 static void replace_may_alias (tree, size_t, tree);
107 static struct alias_info *init_alias_info (void);
108 static void delete_alias_info (struct alias_info *);
109 static void compute_flow_sensitive_aliasing (struct alias_info *);
110 static void setup_pointers_and_addressables (struct alias_info *);
111 static void create_global_var (void);
112 static void maybe_create_global_var (struct alias_info *ai);
113 static void group_aliases (struct alias_info *);
114 static void set_pt_anything (tree ptr);
116 /* Global declarations. */
118 /* Call clobbered variables in the function. If bit I is set, then
119 REFERENCED_VARS (I) is call-clobbered. */
120 bitmap call_clobbered_vars;
122 /* Addressable variables in the function. If bit I is set, then
123 REFERENCED_VARS (I) has had its address taken. Note that
124 CALL_CLOBBERED_VARS and ADDRESSABLE_VARS are not related. An
125 addressable variable is not necessarily call-clobbered (e.g., a
126 local addressable whose address does not escape) and not all
127 call-clobbered variables are addressable (e.g., a local static
128 variable). */
129 bitmap addressable_vars;
131 /* When the program has too many call-clobbered variables and call-sites,
132 this variable is used to represent the clobbering effects of function
133 calls. In these cases, all the call clobbered variables in the program
134 are forced to alias this variable. This reduces compile times by not
135 having to keep track of too many V_MAY_DEF expressions at call sites. */
136 tree global_var;
138 /* Return true if TAG can touch global memory. */
139 static bool
140 tag_marked_global (tree tag)
142 gcc_assert (MTAG_P (tag));
143 return MTAG_GLOBAL (tag);
146 /* Mark TAG, an alias tag, as possibly touching global memory. */
147 static void
148 mark_MTAG_GLOBAL (tree tag)
150 gcc_assert (MTAG_P (tag));
151 MTAG_GLOBAL (tag) = 1;
154 /* qsort comparison function to sort type/name tags by DECL_UID. */
156 static int
157 sort_tags_by_id (const void *pa, const void *pb)
159 tree a = *(tree *)pa;
160 tree b = *(tree *)pb;
162 return DECL_UID (a) - DECL_UID (b);
165 /* Initialize WORKLIST to contain those memory tags that are marked call
166 clobbered. */
168 static void
169 init_transitive_clobber_worklist (VEC (tree, heap) **worklist)
171 referenced_var_iterator rvi;
172 tree curr;
174 FOR_EACH_REFERENCED_VAR (curr, rvi)
176 if (MTAG_P (curr) && is_call_clobbered (curr))
177 VEC_safe_push (tree, heap, *worklist, curr);
181 /* Add ALIAS to WORKLIST if ALIAS is not already marked call
182 clobbered, and is a memory tag. */
184 static void
185 add_to_worklist (tree alias, VEC (tree, heap) **worklist)
187 if (MTAG_P (alias) && !is_call_clobbered (alias))
188 VEC_safe_push (tree, heap, *worklist, alias);
191 /* Mark aliases of TAG as call clobbered, and place any tags on the
192 alias list that were not already call clobbered on WORKLIST. */
194 static void
195 mark_aliases_call_clobbered (tree tag, VEC (tree, heap) **worklist)
197 unsigned int i;
198 varray_type ma;
200 if (!MTAG_P (tag))
201 return;
202 ma = may_aliases (tag);
203 if (!ma)
204 return;
205 for (i = 0; i < VARRAY_ACTIVE_SIZE (ma); i++)
207 tree entry = VARRAY_TREE (ma, i);
208 if (!unmodifiable_var_p (entry))
210 add_to_worklist (entry, worklist);
211 mark_call_clobbered (entry);
216 /* Tags containing global vars need to be marked as global.
217 Tags containing call clobbered vars need to be marked as call
218 clobbered. */
220 static void
221 compute_tag_properties (void)
223 referenced_var_iterator rvi;
224 tree tag;
225 bool changed = true;
226 VEC (tree, heap) *taglist = NULL;
228 FOR_EACH_REFERENCED_VAR (tag, rvi)
230 if (!MTAG_P (tag) || TREE_CODE (tag) == STRUCT_FIELD_TAG)
231 continue;
232 VEC_safe_push (tree, heap, taglist, tag);
235 /* We sort the taglist by DECL_UID, for two reasons.
236 1. To get a sequential ordering to make the bitmap accesses
237 faster.
238 2. Because of the way we compute aliases, it's more likely that
239 an earlier tag is included in a later tag, and this will reduce
240 the number of iterations.
242 If we had a real tag graph, we would just topo-order it and be
243 done with it. */
244 qsort (VEC_address (tree, taglist),
245 VEC_length (tree, taglist),
246 sizeof (tree),
247 sort_tags_by_id);
249 /* Go through each tag not marked as global, and if it aliases
250 global vars, mark it global.
252 If the tag contains call clobbered vars, mark it call
253 clobbered. */
255 while (changed)
257 unsigned int k;
259 changed = false;
260 for (k = 0; VEC_iterate (tree, taglist, k, tag); k++)
262 varray_type ma;
263 unsigned int i;
265 if (is_call_clobbered (tag) && tag_marked_global (tag))
266 continue;
268 ma = may_aliases (tag);
269 if (!ma)
270 continue;
272 for (i = 0; i < VARRAY_ACTIVE_SIZE (ma); i++)
274 tree entry = VARRAY_TREE (ma, i);
276 /* Call clobbered entries cause the tag to be marked
277 call clobbered. */
278 if (is_call_clobbered (entry) && !is_call_clobbered (tag))
280 mark_call_clobbered (tag);
281 changed = true;
284 /* Global vars cause the tag to be marked global. */
285 if (is_global_var (entry) && !tag_marked_global (tag))
287 mark_MTAG_GLOBAL (tag);
288 changed = true;
293 VEC_free (tree, heap, taglist);
296 /* Set up the initial variable clobbers and globalness.
297 When this function completes, only tags whose aliases need to be
298 clobbered will be set clobbered. Tags clobbered because they
299 contain call clobbered vars are handled in compute_tag_properties. */
301 static void
302 set_initial_properties (struct alias_info *ai)
304 unsigned int i;
305 referenced_var_iterator rvi;
306 tree var;
308 FOR_EACH_REFERENCED_VAR (var, rvi)
310 if (is_global_var (var) && !var_can_have_subvars (var))
311 if (!unmodifiable_var_p (var))
312 mark_call_clobbered (var);
315 for (i = 0; i < VARRAY_ACTIVE_SIZE (ai->processed_ptrs); i++)
317 tree ptr = VARRAY_TREE (ai->processed_ptrs, i);
318 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
319 var_ann_t v_ann = var_ann (SSA_NAME_VAR (ptr));
321 if (pi->value_escapes_p || pi->pt_anything)
323 /* If PTR escapes or may point to anything, then its associated
324 memory tags and pointed-to variables are call-clobbered. */
325 if (pi->name_mem_tag)
326 mark_call_clobbered (pi->name_mem_tag);
328 if (v_ann->type_mem_tag)
329 mark_call_clobbered (v_ann->type_mem_tag);
331 if (pi->pt_vars)
333 bitmap_iterator bi;
334 unsigned int j;
335 EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j, bi)
336 if (!unmodifiable_var_p (referenced_var (j)))
337 mark_call_clobbered (referenced_var (j));
340 /* If the name tag is call clobbered, so is the type tag
341 associated with the base VAR_DECL. */
342 if (pi->name_mem_tag
343 && v_ann->type_mem_tag
344 && is_call_clobbered (pi->name_mem_tag))
345 mark_call_clobbered (v_ann->type_mem_tag);
347 if ((pi->pt_global_mem || pi->pt_anything) && pi->name_mem_tag)
348 mark_MTAG_GLOBAL (pi->name_mem_tag);
349 if ((pi->pt_global_mem || pi->pt_anything) && v_ann->type_mem_tag)
350 mark_MTAG_GLOBAL (v_ann->type_mem_tag);
353 /* Compute which variables need to be marked call clobbered because
354 their tag is call clobbered, and which tags need to be marked
355 global because they contain global variables. */
357 static void
358 compute_call_clobbered (struct alias_info *ai)
360 VEC (tree, heap) *worklist = NULL;
362 set_initial_properties (ai);
363 init_transitive_clobber_worklist (&worklist);
364 while (VEC_length (tree, worklist) != 0)
366 tree curr = VEC_pop (tree, worklist);
367 mark_call_clobbered (curr);
368 mark_aliases_call_clobbered (curr, &worklist);
370 VEC_free (tree, heap, worklist);
371 compute_tag_properties ();
374 /* Compute may-alias information for every variable referenced in function
375 FNDECL.
377 Alias analysis proceeds in 3 main phases:
379 1- Points-to and escape analysis.
381 This phase walks the use-def chains in the SSA web looking for three
382 things:
384 * Assignments of the form P_i = &VAR
385 * Assignments of the form P_i = malloc()
386 * Pointers and ADDR_EXPR that escape the current function.
388 The concept of 'escaping' is the same one used in the Java world. When
389 a pointer or an ADDR_EXPR escapes, it means that it has been exposed
390 outside of the current function. So, assignment to global variables,
391 function arguments and returning a pointer are all escape sites, as are
392 conversions between pointers and integers.
394 This is where we are currently limited. Since not everything is renamed
395 into SSA, we lose track of escape properties when a pointer is stashed
396 inside a field in a structure, for instance. In those cases, we are
397 assuming that the pointer does escape.
399 We use escape analysis to determine whether a variable is
400 call-clobbered. Simply put, if an ADDR_EXPR escapes, then the variable
401 is call-clobbered. If a pointer P_i escapes, then all the variables
402 pointed-to by P_i (and its memory tag) also escape.
404 2- Compute flow-sensitive aliases
406 We have two classes of memory tags. Memory tags associated with the
407 pointed-to data type of the pointers in the program. These tags are
408 called "type memory tag" (TMT). The other class are those associated
409 with SSA_NAMEs, called "name memory tag" (NMT). The basic idea is that
410 when adding operands for an INDIRECT_REF *P_i, we will first check
411 whether P_i has a name tag, if it does we use it, because that will have
412 more precise aliasing information. Otherwise, we use the standard type
413 tag.
415 In this phase, we go through all the pointers we found in points-to
416 analysis and create alias sets for the name memory tags associated with
417 each pointer P_i. If P_i escapes, we mark call-clobbered the variables
418 it points to and its tag.
421 3- Compute flow-insensitive aliases
423 This pass will compare the alias set of every type memory tag and every
424 addressable variable found in the program. Given a type memory tag TMT
425 and an addressable variable V. If the alias sets of TMT and V conflict
426 (as computed by may_alias_p), then V is marked as an alias tag and added
427 to the alias set of TMT.
429 For instance, consider the following function:
431 foo (int i)
433 int *p, a, b;
435 if (i > 10)
436 p = &a;
437 else
438 p = &b;
440 *p = 3;
441 a = b + 2;
442 return *p;
445 After aliasing analysis has finished, the type memory tag for pointer
446 'p' will have two aliases, namely variables 'a' and 'b'. Every time
447 pointer 'p' is dereferenced, we want to mark the operation as a
448 potential reference to 'a' and 'b'.
450 foo (int i)
452 int *p, a, b;
454 if (i_2 > 10)
455 p_4 = &a;
456 else
457 p_6 = &b;
458 # p_1 = PHI <p_4(1), p_6(2)>;
460 # a_7 = V_MAY_DEF <a_3>;
461 # b_8 = V_MAY_DEF <b_5>;
462 *p_1 = 3;
464 # a_9 = V_MAY_DEF <a_7>
465 # VUSE <b_8>
466 a_9 = b_8 + 2;
468 # VUSE <a_9>;
469 # VUSE <b_8>;
470 return *p_1;
473 In certain cases, the list of may aliases for a pointer may grow too
474 large. This may cause an explosion in the number of virtual operands
475 inserted in the code. Resulting in increased memory consumption and
476 compilation time.
478 When the number of virtual operands needed to represent aliased
479 loads and stores grows too large (configurable with @option{--param
480 max-aliased-vops}), alias sets are grouped to avoid severe
481 compile-time slow downs and memory consumption. See group_aliases. */
483 static void
484 compute_may_aliases (void)
486 struct alias_info *ai;
488 memset (&alias_stats, 0, sizeof (alias_stats));
490 /* Initialize aliasing information. */
491 ai = init_alias_info ();
493 /* For each pointer P_i, determine the sets of variables that P_i may
494 point-to. For every addressable variable V, determine whether the
495 address of V escapes the current function, making V call-clobbered
496 (i.e., whether &V is stored in a global variable or if its passed as a
497 function call argument). */
498 compute_points_to_sets (ai);
500 /* Collect all pointers and addressable variables, compute alias sets,
501 create memory tags for pointers and promote variables whose address is
502 not needed anymore. */
503 setup_pointers_and_addressables (ai);
505 /* Compute flow-sensitive, points-to based aliasing for all the name
506 memory tags. Note that this pass needs to be done before flow
507 insensitive analysis because it uses the points-to information
508 gathered before to mark call-clobbered type tags. */
509 compute_flow_sensitive_aliasing (ai);
511 /* Compute type-based flow-insensitive aliasing for all the type
512 memory tags. */
513 compute_flow_insensitive_aliasing (ai);
515 /* Determine if we need to enable alias grouping. */
516 if (ai->total_alias_vops >= MAX_ALIASED_VOPS)
517 group_aliases (ai);
519 /* Compute call clobbering information. */
520 compute_call_clobbered (ai);
522 /* If the program has too many call-clobbered variables and/or function
523 calls, create .GLOBAL_VAR and use it to model call-clobbering
524 semantics at call sites. This reduces the number of virtual operands
525 considerably, improving compile times at the expense of lost
526 aliasing precision. */
527 maybe_create_global_var (ai);
529 /* Debugging dumps. */
530 if (dump_file)
532 dump_referenced_vars (dump_file);
533 if (dump_flags & TDF_STATS)
534 dump_alias_stats (dump_file);
535 dump_points_to_info (dump_file);
536 dump_alias_info (dump_file);
539 /* Deallocate memory used by aliasing data structures. */
540 delete_alias_info (ai);
543 block_stmt_iterator bsi;
544 basic_block bb;
545 FOR_EACH_BB (bb)
547 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
549 update_stmt_if_modified (bsi_stmt (bsi));
556 struct tree_opt_pass pass_may_alias =
558 "alias", /* name */
559 NULL, /* gate */
560 compute_may_aliases, /* execute */
561 NULL, /* sub */
562 NULL, /* next */
563 0, /* static_pass_number */
564 TV_TREE_MAY_ALIAS, /* tv_id */
565 PROP_cfg | PROP_ssa, /* properties_required */
566 PROP_alias, /* properties_provided */
567 0, /* properties_destroyed */
568 0, /* todo_flags_start */
569 TODO_dump_func | TODO_update_ssa
570 | TODO_ggc_collect | TODO_verify_ssa
571 | TODO_verify_stmts, /* todo_flags_finish */
572 0 /* letter */
576 /* Data structure used to count the number of dereferences to PTR
577 inside an expression. */
578 struct count_ptr_d
580 tree ptr;
581 unsigned count;
585 /* Helper for count_uses_and_derefs. Called by walk_tree to look for
586 (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
588 static tree
589 count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
591 struct count_ptr_d *count_p = (struct count_ptr_d *) data;
593 /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
594 pointer 'ptr' is *not* dereferenced, it is simply used to compute
595 the address of 'fld' as 'ptr + offsetof(fld)'. */
596 if (TREE_CODE (*tp) == ADDR_EXPR)
598 *walk_subtrees = 0;
599 return NULL_TREE;
602 if (INDIRECT_REF_P (*tp) && TREE_OPERAND (*tp, 0) == count_p->ptr)
603 count_p->count++;
605 return NULL_TREE;
609 /* Count the number of direct and indirect uses for pointer PTR in
610 statement STMT. The two counts are stored in *NUM_USES_P and
611 *NUM_DEREFS_P respectively. *IS_STORE_P is set to 'true' if at
612 least one of those dereferences is a store operation. */
614 void
615 count_uses_and_derefs (tree ptr, tree stmt, unsigned *num_uses_p,
616 unsigned *num_derefs_p, bool *is_store)
618 ssa_op_iter i;
619 tree use;
621 *num_uses_p = 0;
622 *num_derefs_p = 0;
623 *is_store = false;
625 /* Find out the total number of uses of PTR in STMT. */
626 FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
627 if (use == ptr)
628 (*num_uses_p)++;
630 /* Now count the number of indirect references to PTR. This is
631 truly awful, but we don't have much choice. There are no parent
632 pointers inside INDIRECT_REFs, so an expression like
633 '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
634 find all the indirect and direct uses of x_1 inside. The only
635 shortcut we can take is the fact that GIMPLE only allows
636 INDIRECT_REFs inside the expressions below. */
637 if (TREE_CODE (stmt) == MODIFY_EXPR
638 || (TREE_CODE (stmt) == RETURN_EXPR
639 && TREE_CODE (TREE_OPERAND (stmt, 0)) == MODIFY_EXPR)
640 || TREE_CODE (stmt) == ASM_EXPR
641 || TREE_CODE (stmt) == CALL_EXPR)
643 tree lhs, rhs;
645 if (TREE_CODE (stmt) == MODIFY_EXPR)
647 lhs = TREE_OPERAND (stmt, 0);
648 rhs = TREE_OPERAND (stmt, 1);
650 else if (TREE_CODE (stmt) == RETURN_EXPR)
652 tree e = TREE_OPERAND (stmt, 0);
653 lhs = TREE_OPERAND (e, 0);
654 rhs = TREE_OPERAND (e, 1);
656 else if (TREE_CODE (stmt) == ASM_EXPR)
658 lhs = ASM_OUTPUTS (stmt);
659 rhs = ASM_INPUTS (stmt);
661 else
663 lhs = NULL_TREE;
664 rhs = stmt;
667 if (lhs && (TREE_CODE (lhs) == TREE_LIST || EXPR_P (lhs)))
669 struct count_ptr_d count;
670 count.ptr = ptr;
671 count.count = 0;
672 walk_tree (&lhs, count_ptr_derefs, &count, NULL);
673 *is_store = true;
674 *num_derefs_p = count.count;
677 if (rhs && (TREE_CODE (rhs) == TREE_LIST || EXPR_P (rhs)))
679 struct count_ptr_d count;
680 count.ptr = ptr;
681 count.count = 0;
682 walk_tree (&rhs, count_ptr_derefs, &count, NULL);
683 *num_derefs_p += count.count;
687 gcc_assert (*num_uses_p >= *num_derefs_p);
690 /* Initialize the data structures used for alias analysis. */
692 static struct alias_info *
693 init_alias_info (void)
695 struct alias_info *ai;
696 referenced_var_iterator rvi;
697 tree var;
699 bitmap_obstack_initialize (&alias_obstack);
700 ai = xcalloc (1, sizeof (struct alias_info));
701 ai->ssa_names_visited = sbitmap_alloc (num_ssa_names);
702 sbitmap_zero (ai->ssa_names_visited);
703 VARRAY_TREE_INIT (ai->processed_ptrs, 50, "processed_ptrs");
704 ai->written_vars = BITMAP_ALLOC (&alias_obstack);
705 ai->dereferenced_ptrs_store = BITMAP_ALLOC (&alias_obstack);
706 ai->dereferenced_ptrs_load = BITMAP_ALLOC (&alias_obstack);
708 /* If aliases have been computed before, clear existing information. */
709 if (aliases_computed_p)
711 unsigned i;
713 /* Similarly, clear the set of addressable variables. In this
714 case, we can just clear the set because addressability is
715 only computed here. */
716 bitmap_clear (addressable_vars);
718 /* Clear flow-insensitive alias information from each symbol. */
719 FOR_EACH_REFERENCED_VAR (var, rvi)
721 var_ann_t ann = var_ann (var);
723 ann->is_alias_tag = 0;
724 ann->may_aliases = NULL;
725 NUM_REFERENCES_CLEAR (ann);
727 /* Since we are about to re-discover call-clobbered
728 variables, clear the call-clobbered flag. Variables that
729 are intrinsically call-clobbered (globals, local statics,
730 etc) will not be marked by the aliasing code, so we can't
731 remove them from CALL_CLOBBERED_VARS.
733 NB: STRUCT_FIELDS are still call clobbered if they are for
734 a global variable, so we *don't* clear their call clobberedness
735 just because they are tags, though we will clear it if they
736 aren't for global variables. */
737 if (TREE_CODE (var) == NAME_MEMORY_TAG
738 || TREE_CODE (var) == TYPE_MEMORY_TAG
739 || !is_global_var (var))
740 clear_call_clobbered (var);
743 /* Clear flow-sensitive points-to information from each SSA name. */
744 for (i = 1; i < num_ssa_names; i++)
746 tree name = ssa_name (i);
748 if (!name || !POINTER_TYPE_P (TREE_TYPE (name)))
749 continue;
751 if (SSA_NAME_PTR_INFO (name))
753 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (name);
755 /* Clear all the flags but keep the name tag to
756 avoid creating new temporaries unnecessarily. If
757 this pointer is found to point to a subset or
758 superset of its former points-to set, then a new
759 tag will need to be created in create_name_tags. */
760 pi->pt_anything = 0;
761 pi->pt_null = 0;
762 pi->value_escapes_p = 0;
763 pi->is_dereferenced = 0;
764 if (pi->pt_vars)
765 bitmap_clear (pi->pt_vars);
770 /* Next time, we will need to reset alias information. */
771 aliases_computed_p = true;
773 return ai;
777 /* Deallocate memory used by alias analysis. */
779 static void
780 delete_alias_info (struct alias_info *ai)
782 size_t i;
783 referenced_var_iterator rvi;
784 tree var;
786 sbitmap_free (ai->ssa_names_visited);
787 ai->processed_ptrs = NULL;
789 for (i = 0; i < ai->num_addressable_vars; i++)
790 free (ai->addressable_vars[i]);
792 FOR_EACH_REFERENCED_VAR(var, rvi)
794 var_ann_t ann = var_ann (var);
795 NUM_REFERENCES_CLEAR (ann);
798 free (ai->addressable_vars);
800 for (i = 0; i < ai->num_pointers; i++)
801 free (ai->pointers[i]);
802 free (ai->pointers);
804 BITMAP_FREE (ai->written_vars);
805 BITMAP_FREE (ai->dereferenced_ptrs_store);
806 BITMAP_FREE (ai->dereferenced_ptrs_load);
807 bitmap_obstack_release (&alias_obstack);
808 free (ai);
810 delete_points_to_sets ();
813 /* Create name tags for all the pointers that have been dereferenced.
814 We only create a name tag for a pointer P if P is found to point to
815 a set of variables (so that we can alias them to *P) or if it is
816 the result of a call to malloc (which means that P cannot point to
817 anything else nor alias any other variable).
819 If two pointers P and Q point to the same set of variables, they
820 are assigned the same name tag. */
822 static void
823 create_name_tags (void)
825 size_t i;
826 VEC (tree, heap) *with_ptvars = NULL;
827 tree ptr;
829 /* Collect the list of pointers with a non-empty points to set. */
830 for (i = 1; i < num_ssa_names; i++)
832 tree ptr = ssa_name (i);
833 struct ptr_info_def *pi;
835 if (!ptr
836 || !POINTER_TYPE_P (TREE_TYPE (ptr))
837 || !SSA_NAME_PTR_INFO (ptr))
838 continue;
840 pi = SSA_NAME_PTR_INFO (ptr);
842 if (pi->pt_anything || !pi->is_dereferenced)
844 /* No name tags for pointers that have not been
845 dereferenced or point to an arbitrary location. */
846 pi->name_mem_tag = NULL_TREE;
847 continue;
850 /* Set pt_anything on the pointers without pt_vars filled in so
851 that they are assigned a type tag. */
853 if (pi->pt_vars && !bitmap_empty_p (pi->pt_vars))
854 VEC_safe_push (tree, heap, with_ptvars, ptr);
855 else
856 set_pt_anything (ptr);
859 /* If we didn't find any pointers with pt_vars set, we're done. */
860 if (!with_ptvars)
861 return;
863 /* Now go through the pointers with pt_vars, and find a name tag
864 with the same pt_vars as this pointer, or create one if one
865 doesn't exist. */
866 for (i = 0; VEC_iterate (tree, with_ptvars, i, ptr); i++)
868 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
869 size_t j;
870 tree ptr2;
871 tree old_name_tag = pi->name_mem_tag;
873 /* If PTR points to a set of variables, check if we don't
874 have another pointer Q with the same points-to set before
875 creating a tag. If so, use Q's tag instead of creating a
876 new one.
878 This is important for not creating unnecessary symbols
879 and also for copy propagation. If we ever need to
880 propagate PTR into Q or vice-versa, we would run into
881 problems if they both had different name tags because
882 they would have different SSA version numbers (which
883 would force us to take the name tags in and out of SSA). */
884 for (j = 0; j < i && VEC_iterate (tree, with_ptvars, j, ptr2); j++)
886 struct ptr_info_def *qi = SSA_NAME_PTR_INFO (ptr2);
888 if (bitmap_equal_p (pi->pt_vars, qi->pt_vars))
890 pi->name_mem_tag = qi->name_mem_tag;
891 break;
895 /* If we didn't find a pointer with the same points-to set
896 as PTR, create a new name tag if needed. */
897 if (pi->name_mem_tag == NULL_TREE)
898 pi->name_mem_tag = get_nmt_for (ptr);
900 /* If the new name tag computed for PTR is different than
901 the old name tag that it used to have, then the old tag
902 needs to be removed from the IL, so we mark it for
903 renaming. */
904 if (old_name_tag && old_name_tag != pi->name_mem_tag)
905 mark_sym_for_renaming (old_name_tag);
907 TREE_THIS_VOLATILE (pi->name_mem_tag)
908 |= TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (ptr)));
910 /* Mark the new name tag for renaming. */
911 mark_sym_for_renaming (pi->name_mem_tag);
914 VEC_free (tree, heap, with_ptvars);
918 /* For every pointer P_i in AI->PROCESSED_PTRS, create may-alias sets for
919 the name memory tag (NMT) associated with P_i. If P_i escapes, then its
920 name tag and the variables it points-to are call-clobbered. Finally, if
921 P_i escapes and we could not determine where it points to, then all the
922 variables in the same alias set as *P_i are marked call-clobbered. This
923 is necessary because we must assume that P_i may take the address of any
924 variable in the same alias set. */
926 static void
927 compute_flow_sensitive_aliasing (struct alias_info *ai)
929 size_t i;
931 for (i = 0; i < VARRAY_ACTIVE_SIZE (ai->processed_ptrs); i++)
933 tree ptr = VARRAY_TREE (ai->processed_ptrs, i);
934 if (!find_what_p_points_to (ptr))
935 set_pt_anything (ptr);
938 create_name_tags ();
940 for (i = 0; i < VARRAY_ACTIVE_SIZE (ai->processed_ptrs); i++)
942 unsigned j;
943 tree ptr = VARRAY_TREE (ai->processed_ptrs, i);
944 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
945 var_ann_t v_ann = var_ann (SSA_NAME_VAR (ptr));
946 bitmap_iterator bi;
949 /* Set up aliasing information for PTR's name memory tag (if it has
950 one). Note that only pointers that have been dereferenced will
951 have a name memory tag. */
952 if (pi->name_mem_tag && pi->pt_vars)
953 EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j, bi)
955 add_may_alias (pi->name_mem_tag, referenced_var (j));
956 add_may_alias (v_ann->type_mem_tag, referenced_var (j));
962 /* Compute type-based alias sets. Traverse all the pointers and
963 addressable variables found in setup_pointers_and_addressables.
965 For every pointer P in AI->POINTERS and addressable variable V in
966 AI->ADDRESSABLE_VARS, add V to the may-alias sets of P's type
967 memory tag (TMT) if their alias sets conflict. V is then marked as
968 an alias tag so that the operand scanner knows that statements
969 containing V have aliased operands. */
971 static void
972 compute_flow_insensitive_aliasing (struct alias_info *ai)
974 size_t i;
976 /* Initialize counter for the total number of virtual operands that
977 aliasing will introduce. When AI->TOTAL_ALIAS_VOPS goes beyond the
978 threshold set by --params max-alias-vops, we enable alias
979 grouping. */
980 ai->total_alias_vops = 0;
982 /* For every pointer P, determine which addressable variables may alias
983 with P's type memory tag. */
984 for (i = 0; i < ai->num_pointers; i++)
986 size_t j;
987 struct alias_map_d *p_map = ai->pointers[i];
988 tree tag = var_ann (p_map->var)->type_mem_tag;
989 var_ann_t tag_ann = var_ann (tag);
991 p_map->total_alias_vops = 0;
992 p_map->may_aliases = BITMAP_ALLOC (&alias_obstack);
994 for (j = 0; j < ai->num_addressable_vars; j++)
996 struct alias_map_d *v_map;
997 var_ann_t v_ann;
998 tree var;
999 bool tag_stored_p, var_stored_p;
1001 v_map = ai->addressable_vars[j];
1002 var = v_map->var;
1003 v_ann = var_ann (var);
1005 /* Skip memory tags and variables that have never been
1006 written to. We also need to check if the variables are
1007 call-clobbered because they may be overwritten by
1008 function calls.
1010 Note this is effectively random accessing elements in
1011 the sparse bitset, which can be highly inefficient.
1012 So we first check the call_clobbered status of the
1013 tag and variable before querying the bitmap. */
1014 tag_stored_p = is_call_clobbered (tag)
1015 || bitmap_bit_p (ai->written_vars, DECL_UID (tag));
1016 var_stored_p = is_call_clobbered (var)
1017 || bitmap_bit_p (ai->written_vars, DECL_UID (var));
1018 if (!tag_stored_p && !var_stored_p)
1019 continue;
1021 if (may_alias_p (p_map->var, p_map->set, var, v_map->set, false))
1023 subvar_t svars;
1024 size_t num_tag_refs, num_var_refs;
1026 num_tag_refs = NUM_REFERENCES (tag_ann);
1027 num_var_refs = NUM_REFERENCES (v_ann);
1029 /* Add VAR to TAG's may-aliases set. */
1031 /* If this is an aggregate, we may have subvariables for it
1032 that need to be pointed to. */
1033 if (var_can_have_subvars (var)
1034 && (svars = get_subvars_for_var (var)))
1036 subvar_t sv;
1038 for (sv = svars; sv; sv = sv->next)
1040 add_may_alias (tag, sv->var);
1041 /* Update the bitmap used to represent TAG's alias set
1042 in case we need to group aliases. */
1043 bitmap_set_bit (p_map->may_aliases, DECL_UID (sv->var));
1046 else
1048 add_may_alias (tag, var);
1049 /* Update the bitmap used to represent TAG's alias set
1050 in case we need to group aliases. */
1051 bitmap_set_bit (p_map->may_aliases, DECL_UID (var));
1054 /* Update the total number of virtual operands due to
1055 aliasing. Since we are adding one more alias to TAG's
1056 may-aliases set, the total number of virtual operands due
1057 to aliasing will be increased by the number of references
1058 made to VAR and TAG (every reference to TAG will also
1059 count as a reference to VAR). */
1060 ai->total_alias_vops += (num_var_refs + num_tag_refs);
1061 p_map->total_alias_vops += (num_var_refs + num_tag_refs);
1068 /* Since this analysis is based exclusively on symbols, it fails to
1069 handle cases where two pointers P and Q have different memory
1070 tags with conflicting alias set numbers but no aliased symbols in
1071 common.
1073 For example, suppose that we have two memory tags TMT.1 and TMT.2
1074 such that
1076 may-aliases (TMT.1) = { a }
1077 may-aliases (TMT.2) = { b }
1079 and the alias set number of TMT.1 conflicts with that of TMT.2.
1080 Since they don't have symbols in common, loads and stores from
1081 TMT.1 and TMT.2 will seem independent of each other, which will
1082 lead to the optimizers making invalid transformations (see
1083 testsuite/gcc.c-torture/execute/pr15262-[12].c).
1085 To avoid this problem, we do a final traversal of AI->POINTERS
1086 looking for pairs of pointers that have no aliased symbols in
1087 common and yet have conflicting alias set numbers. */
1088 for (i = 0; i < ai->num_pointers; i++)
1090 size_t j;
1091 struct alias_map_d *p_map1 = ai->pointers[i];
1092 tree tag1 = var_ann (p_map1->var)->type_mem_tag;
1093 bitmap may_aliases1 = p_map1->may_aliases;
1095 for (j = i + 1; j < ai->num_pointers; j++)
1097 struct alias_map_d *p_map2 = ai->pointers[j];
1098 tree tag2 = var_ann (p_map2->var)->type_mem_tag;
1099 bitmap may_aliases2 = p_map2->may_aliases;
1101 /* If the pointers may not point to each other, do nothing. */
1102 if (!may_alias_p (p_map1->var, p_map1->set, tag2, p_map2->set, true))
1103 continue;
1105 /* The two pointers may alias each other. If they already have
1106 symbols in common, do nothing. */
1107 if (bitmap_intersect_p (may_aliases1, may_aliases2))
1108 continue;
1110 if (!bitmap_empty_p (may_aliases2))
1112 unsigned int k;
1113 bitmap_iterator bi;
1115 /* Add all the aliases for TAG2 into TAG1's alias set.
1116 FIXME, update grouping heuristic counters. */
1117 EXECUTE_IF_SET_IN_BITMAP (may_aliases2, 0, k, bi)
1118 add_may_alias (tag1, referenced_var (k));
1119 bitmap_ior_into (may_aliases1, may_aliases2);
1121 else
1123 /* Since TAG2 does not have any aliases of its own, add
1124 TAG2 itself to the alias set of TAG1. */
1125 add_may_alias (tag1, tag2);
1126 bitmap_set_bit (may_aliases1, DECL_UID (tag2));
1131 if (dump_file)
1132 fprintf (dump_file, "\n%s: Total number of aliased vops: %ld\n",
1133 get_name (current_function_decl),
1134 ai->total_alias_vops);
1138 /* Comparison function for qsort used in group_aliases. */
1140 static int
1141 total_alias_vops_cmp (const void *p, const void *q)
1143 const struct alias_map_d **p1 = (const struct alias_map_d **)p;
1144 const struct alias_map_d **p2 = (const struct alias_map_d **)q;
1145 long n1 = (*p1)->total_alias_vops;
1146 long n2 = (*p2)->total_alias_vops;
1148 /* We want to sort in descending order. */
1149 return (n1 > n2 ? -1 : (n1 == n2) ? 0 : 1);
1152 /* Group all the aliases for TAG to make TAG represent all the
1153 variables in its alias set. Update the total number
1154 of virtual operands due to aliasing (AI->TOTAL_ALIAS_VOPS). This
1155 function will make TAG be the unique alias tag for all the
1156 variables in its may-aliases. So, given:
1158 may-aliases(TAG) = { V1, V2, V3 }
1160 This function will group the variables into:
1162 may-aliases(V1) = { TAG }
1163 may-aliases(V2) = { TAG }
1164 may-aliases(V2) = { TAG } */
1166 static void
1167 group_aliases_into (tree tag, bitmap tag_aliases, struct alias_info *ai)
1169 unsigned int i;
1170 var_ann_t tag_ann = var_ann (tag);
1171 size_t num_tag_refs = NUM_REFERENCES (tag_ann);
1172 bitmap_iterator bi;
1174 EXECUTE_IF_SET_IN_BITMAP (tag_aliases, 0, i, bi)
1176 tree var = referenced_var (i);
1177 var_ann_t ann = var_ann (var);
1179 /* Make TAG the unique alias of VAR. */
1180 ann->is_alias_tag = 0;
1181 ann->may_aliases = NULL;
1183 /* Note that VAR and TAG may be the same if the function has no
1184 addressable variables (see the discussion at the end of
1185 setup_pointers_and_addressables). */
1186 if (var != tag)
1187 add_may_alias (var, tag);
1189 /* Reduce total number of virtual operands contributed
1190 by TAG on behalf of VAR. Notice that the references to VAR
1191 itself won't be removed. We will merely replace them with
1192 references to TAG. */
1193 ai->total_alias_vops -= num_tag_refs;
1196 /* We have reduced the number of virtual operands that TAG makes on
1197 behalf of all the variables formerly aliased with it. However,
1198 we have also "removed" all the virtual operands for TAG itself,
1199 so we add them back. */
1200 ai->total_alias_vops += num_tag_refs;
1202 /* TAG no longer has any aliases. */
1203 tag_ann->may_aliases = NULL;
1207 /* Group may-aliases sets to reduce the number of virtual operands due
1208 to aliasing.
1210 1- Sort the list of pointers in decreasing number of contributed
1211 virtual operands.
1213 2- Take the first entry in AI->POINTERS and revert the role of
1214 the memory tag and its aliases. Usually, whenever an aliased
1215 variable Vi is found to alias with a memory tag T, we add Vi
1216 to the may-aliases set for T. Meaning that after alias
1217 analysis, we will have:
1219 may-aliases(T) = { V1, V2, V3, ..., Vn }
1221 This means that every statement that references T, will get 'n'
1222 virtual operands for each of the Vi tags. But, when alias
1223 grouping is enabled, we make T an alias tag and add it to the
1224 alias set of all the Vi variables:
1226 may-aliases(V1) = { T }
1227 may-aliases(V2) = { T }
1229 may-aliases(Vn) = { T }
1231 This has two effects: (a) statements referencing T will only get
1232 a single virtual operand, and, (b) all the variables Vi will now
1233 appear to alias each other. So, we lose alias precision to
1234 improve compile time. But, in theory, a program with such a high
1235 level of aliasing should not be very optimizable in the first
1236 place.
1238 3- Since variables may be in the alias set of more than one
1239 memory tag, the grouping done in step (2) needs to be extended
1240 to all the memory tags that have a non-empty intersection with
1241 the may-aliases set of tag T. For instance, if we originally
1242 had these may-aliases sets:
1244 may-aliases(T) = { V1, V2, V3 }
1245 may-aliases(R) = { V2, V4 }
1247 In step (2) we would have reverted the aliases for T as:
1249 may-aliases(V1) = { T }
1250 may-aliases(V2) = { T }
1251 may-aliases(V3) = { T }
1253 But note that now V2 is no longer aliased with R. We could
1254 add R to may-aliases(V2), but we are in the process of
1255 grouping aliases to reduce virtual operands so what we do is
1256 add V4 to the grouping to obtain:
1258 may-aliases(V1) = { T }
1259 may-aliases(V2) = { T }
1260 may-aliases(V3) = { T }
1261 may-aliases(V4) = { T }
1263 4- If the total number of virtual operands due to aliasing is
1264 still above the threshold set by max-alias-vops, go back to (2). */
1266 static void
1267 group_aliases (struct alias_info *ai)
1269 size_t i;
1271 /* Sort the POINTERS array in descending order of contributed
1272 virtual operands. */
1273 qsort (ai->pointers, ai->num_pointers, sizeof (struct alias_map_d *),
1274 total_alias_vops_cmp);
1276 /* For every pointer in AI->POINTERS, reverse the roles of its tag
1277 and the tag's may-aliases set. */
1278 for (i = 0; i < ai->num_pointers; i++)
1280 size_t j;
1281 tree tag1 = var_ann (ai->pointers[i]->var)->type_mem_tag;
1282 bitmap tag1_aliases = ai->pointers[i]->may_aliases;
1284 /* Skip tags that have been grouped already. */
1285 if (ai->pointers[i]->grouped_p)
1286 continue;
1288 /* See if TAG1 had any aliases in common with other type tags.
1289 If we find a TAG2 with common aliases with TAG1, add TAG2's
1290 aliases into TAG1. */
1291 for (j = i + 1; j < ai->num_pointers; j++)
1293 bitmap tag2_aliases = ai->pointers[j]->may_aliases;
1295 if (bitmap_intersect_p (tag1_aliases, tag2_aliases))
1297 tree tag2 = var_ann (ai->pointers[j]->var)->type_mem_tag;
1299 bitmap_ior_into (tag1_aliases, tag2_aliases);
1301 /* TAG2 does not need its aliases anymore. */
1302 bitmap_clear (tag2_aliases);
1303 var_ann (tag2)->may_aliases = NULL;
1305 /* TAG1 is the unique alias of TAG2. */
1306 add_may_alias (tag2, tag1);
1308 ai->pointers[j]->grouped_p = true;
1312 /* Now group all the aliases we collected into TAG1. */
1313 group_aliases_into (tag1, tag1_aliases, ai);
1315 /* If we've reduced total number of virtual operands below the
1316 threshold, stop. */
1317 if (ai->total_alias_vops < MAX_ALIASED_VOPS)
1318 break;
1321 /* Finally, all the variables that have been grouped cannot be in
1322 the may-alias set of name memory tags. Suppose that we have
1323 grouped the aliases in this code so that may-aliases(a) = TMT.20
1325 p_5 = &a;
1327 # a_9 = V_MAY_DEF <a_8>
1328 p_5->field = 0
1329 ... Several modifications to TMT.20 ...
1330 # VUSE <a_9>
1331 x_30 = p_5->field
1333 Since p_5 points to 'a', the optimizers will try to propagate 0
1334 into p_5->field, but that is wrong because there have been
1335 modifications to 'TMT.20' in between. To prevent this we have to
1336 replace 'a' with 'TMT.20' in the name tag of p_5. */
1337 for (i = 0; i < VARRAY_ACTIVE_SIZE (ai->processed_ptrs); i++)
1339 size_t j;
1340 tree ptr = VARRAY_TREE (ai->processed_ptrs, i);
1341 tree name_tag = SSA_NAME_PTR_INFO (ptr)->name_mem_tag;
1342 varray_type aliases;
1344 if (name_tag == NULL_TREE)
1345 continue;
1347 aliases = var_ann (name_tag)->may_aliases;
1348 for (j = 0; aliases && j < VARRAY_ACTIVE_SIZE (aliases); j++)
1350 tree alias = VARRAY_TREE (aliases, j);
1351 var_ann_t ann = var_ann (alias);
1353 if ((!MTAG_P (alias) || TREE_CODE (alias) == STRUCT_FIELD_TAG)
1354 && ann->may_aliases)
1356 tree new_alias;
1358 gcc_assert (VARRAY_ACTIVE_SIZE (ann->may_aliases) == 1);
1360 new_alias = VARRAY_TREE (ann->may_aliases, 0);
1361 replace_may_alias (name_tag, j, new_alias);
1366 if (dump_file)
1367 fprintf (dump_file,
1368 "%s: Total number of aliased vops after grouping: %ld%s\n",
1369 get_name (current_function_decl),
1370 ai->total_alias_vops,
1371 (ai->total_alias_vops < 0) ? " (negative values are OK)" : "");
1375 /* Create a new alias set entry for VAR in AI->ADDRESSABLE_VARS. */
1377 static void
1378 create_alias_map_for (tree var, struct alias_info *ai)
1380 struct alias_map_d *alias_map;
1381 alias_map = xcalloc (1, sizeof (*alias_map));
1382 alias_map->var = var;
1383 alias_map->set = get_alias_set (var);
1384 ai->addressable_vars[ai->num_addressable_vars++] = alias_map;
1388 /* Create memory tags for all the dereferenced pointers and build the
1389 ADDRESSABLE_VARS and POINTERS arrays used for building the may-alias
1390 sets. Based on the address escape and points-to information collected
1391 earlier, this pass will also clear the TREE_ADDRESSABLE flag from those
1392 variables whose address is not needed anymore. */
1394 static void
1395 setup_pointers_and_addressables (struct alias_info *ai)
1397 size_t n_vars, num_addressable_vars, num_pointers;
1398 referenced_var_iterator rvi;
1399 tree var;
1400 VEC (tree, heap) *varvec = NULL;
1401 safe_referenced_var_iterator srvi;
1403 /* Size up the arrays ADDRESSABLE_VARS and POINTERS. */
1404 num_addressable_vars = num_pointers = 0;
1406 FOR_EACH_REFERENCED_VAR (var, rvi)
1408 if (may_be_aliased (var))
1409 num_addressable_vars++;
1411 if (POINTER_TYPE_P (TREE_TYPE (var)))
1413 /* Since we don't keep track of volatile variables, assume that
1414 these pointers are used in indirect store operations. */
1415 if (TREE_THIS_VOLATILE (var))
1416 bitmap_set_bit (ai->dereferenced_ptrs_store, DECL_UID (var));
1418 num_pointers++;
1422 /* Create ADDRESSABLE_VARS and POINTERS. Note that these arrays are
1423 always going to be slightly bigger than we actually need them
1424 because some TREE_ADDRESSABLE variables will be marked
1425 non-addressable below and only pointers with unique type tags are
1426 going to be added to POINTERS. */
1427 ai->addressable_vars = xcalloc (num_addressable_vars,
1428 sizeof (struct alias_map_d *));
1429 ai->pointers = xcalloc (num_pointers, sizeof (struct alias_map_d *));
1430 ai->num_addressable_vars = 0;
1431 ai->num_pointers = 0;
1433 /* Since we will be creating type memory tags within this loop, cache the
1434 value of NUM_REFERENCED_VARS to avoid processing the additional tags
1435 unnecessarily. */
1436 n_vars = num_referenced_vars;
1438 FOR_EACH_REFERENCED_VAR_SAFE (var, varvec, srvi)
1440 var_ann_t v_ann = var_ann (var);
1441 subvar_t svars;
1443 /* Name memory tags already have flow-sensitive aliasing
1444 information, so they need not be processed by
1445 compute_flow_insensitive_aliasing. Similarly, type memory
1446 tags are already accounted for when we process their
1447 associated pointer.
1449 Structure fields, on the other hand, have to have some of this
1450 information processed for them, but it's pointless to mark them
1451 non-addressable (since they are fake variables anyway). */
1452 if (MTAG_P (var) && TREE_CODE (var) != STRUCT_FIELD_TAG)
1453 continue;
1455 /* Remove the ADDRESSABLE flag from every addressable variable whose
1456 address is not needed anymore. This is caused by the propagation
1457 of ADDR_EXPR constants into INDIRECT_REF expressions and the
1458 removal of dead pointer assignments done by the early scalar
1459 cleanup passes. */
1460 if (TREE_ADDRESSABLE (var))
1462 if (!bitmap_bit_p (addressable_vars, DECL_UID (var))
1463 && TREE_CODE (var) != RESULT_DECL
1464 && !is_global_var (var))
1466 bool okay_to_mark = true;
1468 /* Since VAR is now a regular GIMPLE register, we will need
1469 to rename VAR into SSA afterwards. */
1470 mark_sym_for_renaming (var);
1472 /* If VAR can have sub-variables, and any of its
1473 sub-variables has its address taken, then we cannot
1474 remove the addressable flag from VAR. */
1475 if (var_can_have_subvars (var)
1476 && (svars = get_subvars_for_var (var)))
1478 subvar_t sv;
1480 for (sv = svars; sv; sv = sv->next)
1482 if (bitmap_bit_p (addressable_vars, DECL_UID (sv->var)))
1483 okay_to_mark = false;
1484 mark_sym_for_renaming (sv->var);
1488 /* The address of VAR is not needed, remove the
1489 addressable bit, so that it can be optimized as a
1490 regular variable. */
1491 if (okay_to_mark)
1492 mark_non_addressable (var);
1496 /* Global variables and addressable locals may be aliased. Create an
1497 entry in ADDRESSABLE_VARS for VAR. */
1498 if (may_be_aliased (var))
1500 create_alias_map_for (var, ai);
1501 mark_sym_for_renaming (var);
1504 /* Add pointer variables that have been dereferenced to the POINTERS
1505 array and create a type memory tag for them. */
1506 if (POINTER_TYPE_P (TREE_TYPE (var)))
1508 if ((bitmap_bit_p (ai->dereferenced_ptrs_store, DECL_UID (var))
1509 || bitmap_bit_p (ai->dereferenced_ptrs_load, DECL_UID (var))))
1511 tree tag;
1512 var_ann_t t_ann;
1514 /* If pointer VAR still doesn't have a memory tag
1515 associated with it, create it now or re-use an
1516 existing one. */
1517 tag = get_tmt_for (var, ai);
1518 t_ann = var_ann (tag);
1520 /* The type tag will need to be renamed into SSA
1521 afterwards. Note that we cannot do this inside
1522 get_tmt_for because aliasing may run multiple times
1523 and we only create type tags the first time. */
1524 mark_sym_for_renaming (tag);
1526 /* Similarly, if pointer VAR used to have another type
1527 tag, we will need to process it in the renamer to
1528 remove the stale virtual operands. */
1529 if (v_ann->type_mem_tag)
1530 mark_sym_for_renaming (v_ann->type_mem_tag);
1532 /* Associate the tag with pointer VAR. */
1533 v_ann->type_mem_tag = tag;
1535 /* If pointer VAR has been used in a store operation,
1536 then its memory tag must be marked as written-to. */
1537 if (bitmap_bit_p (ai->dereferenced_ptrs_store, DECL_UID (var)))
1538 bitmap_set_bit (ai->written_vars, DECL_UID (tag));
1540 /* All the dereferences of pointer VAR count as
1541 references of TAG. Since TAG can be associated with
1542 several pointers, add the dereferences of VAR to the
1543 TAG. */
1544 NUM_REFERENCES_SET (t_ann,
1545 NUM_REFERENCES (t_ann)
1546 + NUM_REFERENCES (v_ann));
1548 else
1550 /* The pointer has not been dereferenced. If it had a
1551 type memory tag, remove it and mark the old tag for
1552 renaming to remove it out of the IL. */
1553 var_ann_t ann = var_ann (var);
1554 tree tag = ann->type_mem_tag;
1555 if (tag)
1557 mark_sym_for_renaming (tag);
1558 ann->type_mem_tag = NULL_TREE;
1563 VEC_free (tree, heap, varvec);
1567 /* Determine whether to use .GLOBAL_VAR to model call clobbering semantics. At
1568 every call site, we need to emit V_MAY_DEF expressions to represent the
1569 clobbering effects of the call for variables whose address escapes the
1570 current function.
1572 One approach is to group all call-clobbered variables into a single
1573 representative that is used as an alias of every call-clobbered variable
1574 (.GLOBAL_VAR). This works well, but it ties the optimizer hands because
1575 references to any call clobbered variable is a reference to .GLOBAL_VAR.
1577 The second approach is to emit a clobbering V_MAY_DEF for every
1578 call-clobbered variable at call sites. This is the preferred way in terms
1579 of optimization opportunities but it may create too many V_MAY_DEF operands
1580 if there are many call clobbered variables and function calls in the
1581 function.
1583 To decide whether or not to use .GLOBAL_VAR we multiply the number of
1584 function calls found by the number of call-clobbered variables. If that
1585 product is beyond a certain threshold, as determined by the parameterized
1586 values shown below, we use .GLOBAL_VAR.
1588 FIXME. This heuristic should be improved. One idea is to use several
1589 .GLOBAL_VARs of different types instead of a single one. The thresholds
1590 have been derived from a typical bootstrap cycle, including all target
1591 libraries. Compile times were found increase by ~1% compared to using
1592 .GLOBAL_VAR. */
1594 static void
1595 maybe_create_global_var (struct alias_info *ai)
1597 unsigned i, n_clobbered;
1598 bitmap_iterator bi;
1600 /* No need to create it, if we have one already. */
1601 if (global_var == NULL_TREE)
1603 /* Count all the call-clobbered variables. */
1604 n_clobbered = 0;
1605 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1607 n_clobbered++;
1610 /* If the number of virtual operands that would be needed to
1611 model all the call-clobbered variables is larger than
1612 GLOBAL_VAR_THRESHOLD, create .GLOBAL_VAR.
1614 Also create .GLOBAL_VAR if there are no call-clobbered
1615 variables and the program contains a mixture of pure/const
1616 and regular function calls. This is to avoid the problem
1617 described in PR 20115:
1619 int X;
1620 int func_pure (void) { return X; }
1621 int func_non_pure (int a) { X += a; }
1622 int foo ()
1624 int a = func_pure ();
1625 func_non_pure (a);
1626 a = func_pure ();
1627 return a;
1630 Since foo() has no call-clobbered variables, there is
1631 no relationship between the calls to func_pure and
1632 func_non_pure. Since func_pure has no side-effects, value
1633 numbering optimizations elide the second call to func_pure.
1634 So, if we have some pure/const and some regular calls in the
1635 program we create .GLOBAL_VAR to avoid missing these
1636 relations. */
1637 if (ai->num_calls_found * n_clobbered >= (size_t) GLOBAL_VAR_THRESHOLD
1638 || (n_clobbered == 0
1639 && ai->num_calls_found > 0
1640 && ai->num_pure_const_calls_found > 0
1641 && ai->num_calls_found > ai->num_pure_const_calls_found))
1642 create_global_var ();
1645 /* Mark all call-clobbered symbols for renaming. Since the initial
1646 rewrite into SSA ignored all call sites, we may need to rename
1647 .GLOBAL_VAR and the call-clobbered variables. */
1648 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1650 tree var = referenced_var (i);
1652 /* If the function has calls to clobbering functions and
1653 .GLOBAL_VAR has been created, make it an alias for all
1654 call-clobbered variables. */
1655 if (global_var && var != global_var)
1657 subvar_t svars;
1658 add_may_alias (var, global_var);
1659 if (var_can_have_subvars (var)
1660 && (svars = get_subvars_for_var (var)))
1662 subvar_t sv;
1663 for (sv = svars; sv; sv = sv->next)
1664 mark_sym_for_renaming (sv->var);
1668 mark_sym_for_renaming (var);
1673 /* Return TRUE if pointer PTR may point to variable VAR.
1675 MEM_ALIAS_SET is the alias set for the memory location pointed-to by PTR
1676 This is needed because when checking for type conflicts we are
1677 interested in the alias set of the memory location pointed-to by
1678 PTR. The alias set of PTR itself is irrelevant.
1680 VAR_ALIAS_SET is the alias set for VAR. */
1682 static bool
1683 may_alias_p (tree ptr, HOST_WIDE_INT mem_alias_set,
1684 tree var, HOST_WIDE_INT var_alias_set,
1685 bool alias_set_only)
1687 tree mem;
1688 var_ann_t m_ann;
1690 alias_stats.alias_queries++;
1691 alias_stats.simple_queries++;
1693 /* By convention, a variable cannot alias itself. */
1694 mem = var_ann (ptr)->type_mem_tag;
1695 if (mem == var)
1697 alias_stats.alias_noalias++;
1698 alias_stats.simple_resolved++;
1699 return false;
1702 /* If -fargument-noalias-global is >1, pointer arguments may
1703 not point to global variables. */
1704 if (flag_argument_noalias > 1 && is_global_var (var)
1705 && TREE_CODE (ptr) == PARM_DECL)
1707 alias_stats.alias_noalias++;
1708 alias_stats.simple_resolved++;
1709 return false;
1712 /* If either MEM or VAR is a read-only global and the other one
1713 isn't, then PTR cannot point to VAR. */
1714 if ((unmodifiable_var_p (mem) && !unmodifiable_var_p (var))
1715 || (unmodifiable_var_p (var) && !unmodifiable_var_p (mem)))
1717 alias_stats.alias_noalias++;
1718 alias_stats.simple_resolved++;
1719 return false;
1722 m_ann = var_ann (mem);
1724 gcc_assert (TREE_CODE (mem) == TYPE_MEMORY_TAG);
1726 alias_stats.tbaa_queries++;
1728 /* If the alias sets don't conflict then MEM cannot alias VAR. */
1729 if (!alias_sets_conflict_p (mem_alias_set, var_alias_set))
1731 alias_stats.alias_noalias++;
1732 alias_stats.tbaa_resolved++;
1733 return false;
1736 /* If var is a record or union type, ptr cannot point into var
1737 unless there is some operation explicit address operation in the
1738 program that can reference a field of the ptr's dereferenced
1739 type. This also assumes that the types of both var and ptr are
1740 contained within the compilation unit, and that there is no fancy
1741 addressing arithmetic associated with any of the types
1742 involved. */
1744 if ((mem_alias_set != 0) && (var_alias_set != 0))
1746 tree ptr_type = TREE_TYPE (ptr);
1747 tree var_type = TREE_TYPE (var);
1749 /* The star count is -1 if the type at the end of the pointer_to
1750 chain is not a record or union type. */
1751 if ((!alias_set_only) &&
1752 ipa_type_escape_star_count_of_interesting_type (var_type) >= 0)
1754 int ptr_star_count = 0;
1756 /* Ipa_type_escape_star_count_of_interesting_type is a little to
1757 restrictive for the pointer type, need to allow pointers to
1758 primitive types as long as those types cannot be pointers
1759 to everything. */
1760 while (POINTER_TYPE_P (ptr_type))
1761 /* Strip the *'s off. */
1763 ptr_type = TREE_TYPE (ptr_type);
1764 ptr_star_count++;
1767 /* There does not appear to be a better test to see if the
1768 pointer type was one of the pointer to everything
1769 types. */
1771 if (ptr_star_count > 0)
1773 alias_stats.structnoaddress_queries++;
1774 if (ipa_type_escape_field_does_not_clobber_p (var_type,
1775 TREE_TYPE (ptr)))
1777 alias_stats.structnoaddress_resolved++;
1778 alias_stats.alias_noalias++;
1779 return false;
1782 else if (ptr_star_count == 0)
1784 /* If ptr_type was not really a pointer to type, it cannot
1785 alias. */
1786 alias_stats.structnoaddress_queries++;
1787 alias_stats.structnoaddress_resolved++;
1788 alias_stats.alias_noalias++;
1789 return false;
1794 alias_stats.alias_mayalias++;
1795 return true;
1799 /* Add ALIAS to the set of variables that may alias VAR. */
1801 static void
1802 add_may_alias (tree var, tree alias)
1804 size_t i;
1805 var_ann_t v_ann = get_var_ann (var);
1806 var_ann_t a_ann = get_var_ann (alias);
1808 /* Don't allow self-referential aliases. */
1809 gcc_assert (var != alias);
1811 /* ALIAS must be addressable if it's being added to an alias set. */
1812 #if 1
1813 TREE_ADDRESSABLE (alias) = 1;
1814 #else
1815 gcc_assert (may_be_aliased (alias));
1816 #endif
1818 if (v_ann->may_aliases == NULL)
1819 VARRAY_TREE_INIT (v_ann->may_aliases, 2, "aliases");
1821 /* Avoid adding duplicates. */
1822 for (i = 0; i < VARRAY_ACTIVE_SIZE (v_ann->may_aliases); i++)
1823 if (alias == VARRAY_TREE (v_ann->may_aliases, i))
1824 return;
1826 VARRAY_PUSH_TREE (v_ann->may_aliases, alias);
1827 a_ann->is_alias_tag = 1;
1831 /* Replace alias I in the alias sets of VAR with NEW_ALIAS. */
1833 static void
1834 replace_may_alias (tree var, size_t i, tree new_alias)
1836 var_ann_t v_ann = var_ann (var);
1837 VARRAY_TREE (v_ann->may_aliases, i) = new_alias;
1841 /* Mark pointer PTR as pointing to an arbitrary memory location. */
1843 static void
1844 set_pt_anything (tree ptr)
1846 struct ptr_info_def *pi = get_ptr_info (ptr);
1848 pi->pt_anything = 1;
1849 pi->pt_vars = NULL;
1851 /* The pointer used to have a name tag, but we now found it pointing
1852 to an arbitrary location. The name tag needs to be renamed and
1853 disassociated from PTR. */
1854 if (pi->name_mem_tag)
1856 mark_sym_for_renaming (pi->name_mem_tag);
1857 pi->name_mem_tag = NULL_TREE;
1862 /* Return true if STMT is an "escape" site from the current function. Escape
1863 sites those statements which might expose the address of a variable
1864 outside the current function. STMT is an escape site iff:
1866 1- STMT is a function call, or
1867 2- STMT is an __asm__ expression, or
1868 3- STMT is an assignment to a non-local variable, or
1869 4- STMT is a return statement.
1871 AI points to the alias information collected so far. */
1873 bool
1874 is_escape_site (tree stmt, struct alias_info *ai)
1876 tree call = get_call_expr_in (stmt);
1877 if (call != NULL_TREE)
1879 ai->num_calls_found++;
1881 if (!TREE_SIDE_EFFECTS (call))
1882 ai->num_pure_const_calls_found++;
1884 return true;
1886 else if (TREE_CODE (stmt) == ASM_EXPR)
1887 return true;
1888 else if (TREE_CODE (stmt) == MODIFY_EXPR)
1890 tree lhs = TREE_OPERAND (stmt, 0);
1892 /* Get to the base of _REF nodes. */
1893 if (TREE_CODE (lhs) != SSA_NAME)
1894 lhs = get_base_address (lhs);
1896 /* If we couldn't recognize the LHS of the assignment, assume that it
1897 is a non-local store. */
1898 if (lhs == NULL_TREE)
1899 return true;
1901 /* If the RHS is a conversion between a pointer and an integer, the
1902 pointer escapes since we can't track the integer. */
1903 if ((TREE_CODE (TREE_OPERAND (stmt, 1)) == NOP_EXPR
1904 || TREE_CODE (TREE_OPERAND (stmt, 1)) == CONVERT_EXPR
1905 || TREE_CODE (TREE_OPERAND (stmt, 1)) == VIEW_CONVERT_EXPR)
1906 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND
1907 (TREE_OPERAND (stmt, 1), 0)))
1908 && !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (stmt, 1))))
1909 return true;
1911 /* If the LHS is an SSA name, it can't possibly represent a non-local
1912 memory store. */
1913 if (TREE_CODE (lhs) == SSA_NAME)
1914 return false;
1916 /* FIXME: LHS is not an SSA_NAME. Even if it's an assignment to a
1917 local variables we cannot be sure if it will escape, because we
1918 don't have information about objects not in SSA form. Need to
1919 implement something along the lines of
1921 J.-D. Choi, M. Gupta, M. J. Serrano, V. C. Sreedhar, and S. P.
1922 Midkiff, ``Escape analysis for java,'' in Proceedings of the
1923 Conference on Object-Oriented Programming Systems, Languages, and
1924 Applications (OOPSLA), pp. 1-19, 1999. */
1925 return true;
1927 else if (TREE_CODE (stmt) == RETURN_EXPR)
1928 return true;
1930 return false;
1934 /* Create a new memory tag of type TYPE.
1935 Does NOT push it into the current binding. */
1937 static tree
1938 create_tag_raw (enum tree_code code, tree type, const char *prefix)
1940 tree tmp_var;
1941 tree new_type;
1943 /* Make the type of the variable writable. */
1944 new_type = build_type_variant (type, 0, 0);
1945 TYPE_ATTRIBUTES (new_type) = TYPE_ATTRIBUTES (type);
1947 tmp_var = build_decl (code, create_tmp_var_name (prefix),
1948 type);
1949 /* Make the variable writable. */
1950 TREE_READONLY (tmp_var) = 0;
1952 /* It doesn't start out global. */
1953 MTAG_GLOBAL (tmp_var) = 0;
1954 TREE_STATIC (tmp_var) = 0;
1955 TREE_USED (tmp_var) = 1;
1957 return tmp_var;
1960 /* Create a new memory tag of type TYPE. If IS_TYPE_TAG is true, the tag
1961 is considered to represent all the pointers whose pointed-to types are
1962 in the same alias set class. Otherwise, the tag represents a single
1963 SSA_NAME pointer variable. */
1965 static tree
1966 create_memory_tag (tree type, bool is_type_tag)
1968 var_ann_t ann;
1969 tree tag = create_tag_raw (is_type_tag ? TYPE_MEMORY_TAG : NAME_MEMORY_TAG,
1970 type, (is_type_tag) ? "TMT" : "NMT");
1972 /* By default, memory tags are local variables. Alias analysis will
1973 determine whether they should be considered globals. */
1974 DECL_CONTEXT (tag) = current_function_decl;
1976 /* Memory tags are by definition addressable. */
1977 TREE_ADDRESSABLE (tag) = 1;
1979 ann = get_var_ann (tag);
1980 ann->type_mem_tag = NULL_TREE;
1982 /* Add the tag to the symbol table. */
1983 add_referenced_tmp_var (tag);
1985 return tag;
1989 /* Create a name memory tag to represent a specific SSA_NAME pointer P_i.
1990 This is used if P_i has been found to point to a specific set of
1991 variables or to a non-aliased memory location like the address returned
1992 by malloc functions. */
1994 static tree
1995 get_nmt_for (tree ptr)
1997 struct ptr_info_def *pi = get_ptr_info (ptr);
1998 tree tag = pi->name_mem_tag;
2000 if (tag == NULL_TREE)
2001 tag = create_memory_tag (TREE_TYPE (TREE_TYPE (ptr)), false);
2002 return tag;
2006 /* Return the type memory tag associated to pointer PTR. A memory tag is an
2007 artificial variable that represents the memory location pointed-to by
2008 PTR. It is used to model the effects of pointer de-references on
2009 addressable variables.
2011 AI points to the data gathered during alias analysis. This function
2012 populates the array AI->POINTERS. */
2014 static tree
2015 get_tmt_for (tree ptr, struct alias_info *ai)
2017 size_t i;
2018 tree tag;
2019 tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
2020 HOST_WIDE_INT tag_set = get_alias_set (tag_type);
2022 /* To avoid creating unnecessary memory tags, only create one memory tag
2023 per alias set class. Note that it may be tempting to group
2024 memory tags based on conflicting alias sets instead of
2025 equivalence. That would be wrong because alias sets are not
2026 necessarily transitive (as demonstrated by the libstdc++ test
2027 23_containers/vector/cons/4.cc). Given three alias sets A, B, C
2028 such that conflicts (A, B) == true and conflicts (A, C) == true,
2029 it does not necessarily follow that conflicts (B, C) == true. */
2030 for (i = 0, tag = NULL_TREE; i < ai->num_pointers; i++)
2032 struct alias_map_d *curr = ai->pointers[i];
2033 tree curr_tag = var_ann (curr->var)->type_mem_tag;
2034 if (tag_set == curr->set
2035 && TYPE_READONLY (tag_type) == TYPE_READONLY (TREE_TYPE (curr_tag)))
2037 tag = curr_tag;
2038 break;
2042 /* If VAR cannot alias with any of the existing memory tags, create a new
2043 tag for PTR and add it to the POINTERS array. */
2044 if (tag == NULL_TREE)
2046 struct alias_map_d *alias_map;
2048 /* If PTR did not have a type tag already, create a new TMT.*
2049 artificial variable representing the memory location
2050 pointed-to by PTR. */
2051 if (var_ann (ptr)->type_mem_tag == NULL_TREE)
2052 tag = create_memory_tag (tag_type, true);
2053 else
2054 tag = var_ann (ptr)->type_mem_tag;
2056 /* Add PTR to the POINTERS array. Note that we are not interested in
2057 PTR's alias set. Instead, we cache the alias set for the memory that
2058 PTR points to. */
2059 alias_map = xcalloc (1, sizeof (*alias_map));
2060 alias_map->var = ptr;
2061 alias_map->set = tag_set;
2062 ai->pointers[ai->num_pointers++] = alias_map;
2065 /* If the pointed-to type is volatile, so is the tag. */
2066 TREE_THIS_VOLATILE (tag) |= TREE_THIS_VOLATILE (tag_type);
2068 /* Make sure that the type tag has the same alias set as the
2069 pointed-to type. */
2070 gcc_assert (tag_set == get_alias_set (tag));
2072 /* If PTR's pointed-to type is read-only, then TAG's type must also
2073 be read-only. */
2074 gcc_assert (TYPE_READONLY (tag_type) == TYPE_READONLY (TREE_TYPE (tag)));
2076 return tag;
2080 /* Create GLOBAL_VAR, an artificial global variable to act as a
2081 representative of all the variables that may be clobbered by function
2082 calls. */
2084 static void
2085 create_global_var (void)
2087 global_var = build_decl (VAR_DECL, get_identifier (".GLOBAL_VAR"),
2088 void_type_node);
2089 DECL_ARTIFICIAL (global_var) = 1;
2090 TREE_READONLY (global_var) = 0;
2091 DECL_EXTERNAL (global_var) = 1;
2092 TREE_STATIC (global_var) = 1;
2093 TREE_USED (global_var) = 1;
2094 DECL_CONTEXT (global_var) = NULL_TREE;
2095 TREE_THIS_VOLATILE (global_var) = 0;
2096 TREE_ADDRESSABLE (global_var) = 0;
2097 mark_call_clobbered (global_var);
2098 add_referenced_tmp_var (global_var);
2099 mark_sym_for_renaming (global_var);
2103 /* Dump alias statistics on FILE. */
2105 static void
2106 dump_alias_stats (FILE *file)
2108 const char *funcname
2109 = lang_hooks.decl_printable_name (current_function_decl, 2);
2110 fprintf (file, "\nAlias statistics for %s\n\n", funcname);
2111 fprintf (file, "Total alias queries:\t%u\n", alias_stats.alias_queries);
2112 fprintf (file, "Total alias mayalias results:\t%u\n",
2113 alias_stats.alias_mayalias);
2114 fprintf (file, "Total alias noalias results:\t%u\n",
2115 alias_stats.alias_noalias);
2116 fprintf (file, "Total simple queries:\t%u\n",
2117 alias_stats.simple_queries);
2118 fprintf (file, "Total simple resolved:\t%u\n",
2119 alias_stats.simple_resolved);
2120 fprintf (file, "Total TBAA queries:\t%u\n",
2121 alias_stats.tbaa_queries);
2122 fprintf (file, "Total TBAA resolved:\t%u\n",
2123 alias_stats.tbaa_resolved);
2124 fprintf (file, "Total non-addressable structure type queries:\t%u\n",
2125 alias_stats.structnoaddress_queries);
2126 fprintf (file, "Total non-addressable structure type resolved:\t%u\n",
2127 alias_stats.structnoaddress_resolved);
2131 /* Dump alias information on FILE. */
2133 void
2134 dump_alias_info (FILE *file)
2136 size_t i;
2137 const char *funcname
2138 = lang_hooks.decl_printable_name (current_function_decl, 2);
2139 referenced_var_iterator rvi;
2140 tree var;
2142 fprintf (file, "\nFlow-insensitive alias information for %s\n\n", funcname);
2144 fprintf (file, "Aliased symbols\n\n");
2146 FOR_EACH_REFERENCED_VAR (var, rvi)
2148 if (may_be_aliased (var))
2149 dump_variable (file, var);
2152 fprintf (file, "\nDereferenced pointers\n\n");
2154 FOR_EACH_REFERENCED_VAR (var, rvi)
2156 var_ann_t ann = var_ann (var);
2157 if (ann->type_mem_tag)
2158 dump_variable (file, var);
2161 fprintf (file, "\nType memory tags\n\n");
2163 FOR_EACH_REFERENCED_VAR (var, rvi)
2165 if (TREE_CODE (var) == TYPE_MEMORY_TAG)
2166 dump_variable (file, var);
2169 fprintf (file, "\n\nFlow-sensitive alias information for %s\n\n", funcname);
2171 fprintf (file, "SSA_NAME pointers\n\n");
2172 for (i = 1; i < num_ssa_names; i++)
2174 tree ptr = ssa_name (i);
2175 struct ptr_info_def *pi;
2177 if (ptr == NULL_TREE)
2178 continue;
2180 pi = SSA_NAME_PTR_INFO (ptr);
2181 if (!SSA_NAME_IN_FREE_LIST (ptr)
2182 && pi
2183 && pi->name_mem_tag)
2184 dump_points_to_info_for (file, ptr);
2187 fprintf (file, "\nName memory tags\n\n");
2189 FOR_EACH_REFERENCED_VAR (var, rvi)
2191 if (TREE_CODE (var) == NAME_MEMORY_TAG)
2192 dump_variable (file, var);
2195 fprintf (file, "\n");
2199 /* Dump alias information on stderr. */
2201 void
2202 debug_alias_info (void)
2204 dump_alias_info (stderr);
2208 /* Return the alias information associated with pointer T. It creates a
2209 new instance if none existed. */
2211 struct ptr_info_def *
2212 get_ptr_info (tree t)
2214 struct ptr_info_def *pi;
2216 gcc_assert (POINTER_TYPE_P (TREE_TYPE (t)));
2218 pi = SSA_NAME_PTR_INFO (t);
2219 if (pi == NULL)
2221 pi = ggc_alloc (sizeof (*pi));
2222 memset ((void *)pi, 0, sizeof (*pi));
2223 SSA_NAME_PTR_INFO (t) = pi;
2226 return pi;
2230 /* Dump points-to information for SSA_NAME PTR into FILE. */
2232 void
2233 dump_points_to_info_for (FILE *file, tree ptr)
2235 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
2237 print_generic_expr (file, ptr, dump_flags);
2239 if (pi)
2241 if (pi->name_mem_tag)
2243 fprintf (file, ", name memory tag: ");
2244 print_generic_expr (file, pi->name_mem_tag, dump_flags);
2247 if (pi->is_dereferenced)
2248 fprintf (file, ", is dereferenced");
2250 if (pi->value_escapes_p)
2251 fprintf (file, ", its value escapes");
2253 if (pi->pt_anything)
2254 fprintf (file, ", points-to anything");
2256 if (pi->pt_null)
2257 fprintf (file, ", points-to NULL");
2259 if (pi->pt_vars)
2261 unsigned ix;
2262 bitmap_iterator bi;
2264 fprintf (file, ", points-to vars: { ");
2265 EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, ix, bi)
2267 print_generic_expr (file, referenced_var (ix), dump_flags);
2268 fprintf (file, " ");
2270 fprintf (file, "}");
2274 fprintf (file, "\n");
2278 /* Dump points-to information for VAR into stderr. */
2280 void
2281 debug_points_to_info_for (tree var)
2283 dump_points_to_info_for (stderr, var);
2287 /* Dump points-to information into FILE. NOTE: This function is slow, as
2288 it needs to traverse the whole CFG looking for pointer SSA_NAMEs. */
2290 void
2291 dump_points_to_info (FILE *file)
2293 basic_block bb;
2294 block_stmt_iterator si;
2295 ssa_op_iter iter;
2296 const char *fname =
2297 lang_hooks.decl_printable_name (current_function_decl, 2);
2298 referenced_var_iterator rvi;
2299 tree var;
2301 fprintf (file, "\n\nPointed-to sets for pointers in %s\n\n", fname);
2303 /* First dump points-to information for the default definitions of
2304 pointer variables. This is necessary because default definitions are
2305 not part of the code. */
2306 FOR_EACH_REFERENCED_VAR (var, rvi)
2308 if (POINTER_TYPE_P (TREE_TYPE (var)))
2310 tree def = default_def (var);
2311 if (def)
2312 dump_points_to_info_for (file, def);
2316 /* Dump points-to information for every pointer defined in the program. */
2317 FOR_EACH_BB (bb)
2319 tree phi;
2321 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
2323 tree ptr = PHI_RESULT (phi);
2324 if (POINTER_TYPE_P (TREE_TYPE (ptr)))
2325 dump_points_to_info_for (file, ptr);
2328 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
2330 tree stmt = bsi_stmt (si);
2331 tree def;
2332 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_DEF)
2333 if (POINTER_TYPE_P (TREE_TYPE (def)))
2334 dump_points_to_info_for (file, def);
2338 fprintf (file, "\n");
2342 /* Dump points-to info pointed to by PTO into STDERR. */
2344 void
2345 debug_points_to_info (void)
2347 dump_points_to_info (stderr);
2350 /* Dump to FILE the list of variables that may be aliasing VAR. */
2352 void
2353 dump_may_aliases_for (FILE *file, tree var)
2355 varray_type aliases;
2357 if (TREE_CODE (var) == SSA_NAME)
2358 var = SSA_NAME_VAR (var);
2360 aliases = var_ann (var)->may_aliases;
2361 if (aliases)
2363 size_t i;
2364 fprintf (file, "{ ");
2365 for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
2367 print_generic_expr (file, VARRAY_TREE (aliases, i), dump_flags);
2368 fprintf (file, " ");
2370 fprintf (file, "}");
2375 /* Dump to stderr the list of variables that may be aliasing VAR. */
2377 void
2378 debug_may_aliases_for (tree var)
2380 dump_may_aliases_for (stderr, var);
2383 /* Return true if VAR may be aliased. */
2385 bool
2386 may_be_aliased (tree var)
2388 /* Obviously. */
2389 if (TREE_ADDRESSABLE (var))
2390 return true;
2392 /* Globally visible variables can have their addresses taken by other
2393 translation units. */
2395 if (MTAG_P (var)
2396 && (MTAG_GLOBAL (var) || TREE_PUBLIC (var)))
2397 return true;
2398 else if (!MTAG_P (var)
2399 && (DECL_EXTERNAL (var) || TREE_PUBLIC (var)))
2400 return true;
2402 /* Automatic variables can't have their addresses escape any other way.
2403 This must be after the check for global variables, as extern declarations
2404 do not have TREE_STATIC set. */
2405 if (!TREE_STATIC (var))
2406 return false;
2408 /* If we're in unit-at-a-time mode, then we must have seen all occurrences
2409 of address-of operators, and so we can trust TREE_ADDRESSABLE. Otherwise
2410 we can only be sure the variable isn't addressable if it's local to the
2411 current function. */
2412 if (flag_unit_at_a_time)
2413 return false;
2414 if (decl_function_context (var) == current_function_decl)
2415 return false;
2417 return true;
2421 /* Given two symbols return TRUE if one is in the alias set of the other. */
2422 bool
2423 is_aliased_with (tree tag, tree sym)
2425 size_t i;
2426 varray_type aliases;
2428 if (var_ann (sym)->is_alias_tag)
2430 aliases = var_ann (tag)->may_aliases;
2432 if (aliases == NULL)
2433 return false;
2435 for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
2436 if (VARRAY_TREE (aliases, i) == sym)
2437 return true;
2439 else
2441 aliases = var_ann (sym)->may_aliases;
2443 if (aliases == NULL)
2444 return false;
2446 for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
2447 if (VARRAY_TREE (aliases, i) == tag)
2448 return true;
2451 return false;
2455 /* Add VAR to the list of may-aliases of PTR's type tag. If PTR
2456 doesn't already have a type tag, create one. */
2458 void
2459 add_type_alias (tree ptr, tree var)
2461 varray_type aliases;
2462 tree tag;
2463 var_ann_t ann = var_ann (ptr);
2464 subvar_t svars;
2465 VEC (tree, heap) *varvec = NULL;
2467 if (ann->type_mem_tag == NULL_TREE)
2469 tree q = NULL_TREE;
2470 tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
2471 HOST_WIDE_INT tag_set = get_alias_set (tag_type);
2472 safe_referenced_var_iterator rvi;
2474 /* PTR doesn't have a type tag, create a new one and add VAR to
2475 the new tag's alias set.
2477 FIXME, This is slower than necessary. We need to determine
2478 whether there is another pointer Q with the same alias set as
2479 PTR. This could be sped up by having type tags associated
2480 with types. */
2481 FOR_EACH_REFERENCED_VAR_SAFE (q, varvec, rvi)
2483 if (POINTER_TYPE_P (TREE_TYPE (q))
2484 && tag_set == get_alias_set (TREE_TYPE (TREE_TYPE (q))))
2486 /* Found another pointer Q with the same alias set as
2487 the PTR's pointed-to type. If Q has a type tag, use
2488 it. Otherwise, create a new memory tag for PTR. */
2489 var_ann_t ann1 = var_ann (q);
2490 if (ann1->type_mem_tag)
2491 ann->type_mem_tag = ann1->type_mem_tag;
2492 else
2493 ann->type_mem_tag = create_memory_tag (tag_type, true);
2494 goto found_tag;
2498 /* Couldn't find any other pointer with a type tag we could use.
2499 Create a new memory tag for PTR. */
2500 ann->type_mem_tag = create_memory_tag (tag_type, true);
2503 found_tag:
2504 /* If VAR is not already PTR's type tag, add it to the may-alias set
2505 for PTR's type tag. */
2506 gcc_assert (var_ann (var)->type_mem_tag == NULL);
2507 tag = ann->type_mem_tag;
2509 /* If VAR has subvars, add the subvars to the tag instead of the
2510 actual var. */
2511 if (var_can_have_subvars (var)
2512 && (svars = get_subvars_for_var (var)))
2514 subvar_t sv;
2515 for (sv = svars; sv; sv = sv->next)
2516 add_may_alias (tag, sv->var);
2518 else
2519 add_may_alias (tag, var);
2521 /* TAG and its set of aliases need to be marked for renaming. */
2522 mark_sym_for_renaming (tag);
2523 if ((aliases = var_ann (tag)->may_aliases) != NULL)
2525 size_t i;
2526 for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
2527 mark_sym_for_renaming (VARRAY_TREE (aliases, i));
2530 /* If we had grouped aliases, VAR may have aliases of its own. Mark
2531 them for renaming as well. Other statements referencing the
2532 aliases of VAR will need to be updated. */
2533 if ((aliases = var_ann (var)->may_aliases) != NULL)
2535 size_t i;
2536 for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
2537 mark_sym_for_renaming (VARRAY_TREE (aliases, i));
2539 VEC_free (tree, heap, varvec);
2543 /* Create a new type tag for PTR. Construct the may-alias list of this type
2544 tag so that it has the aliasing of VAR.
2546 Note, the set of aliases represented by the new type tag are not marked
2547 for renaming. */
2549 void
2550 new_type_alias (tree ptr, tree var)
2552 var_ann_t p_ann = var_ann (ptr);
2553 tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
2554 var_ann_t v_ann = var_ann (var);
2555 tree tag;
2556 subvar_t svars;
2558 gcc_assert (p_ann->type_mem_tag == NULL_TREE);
2559 gcc_assert (!MTAG_P (var));
2561 /* Add VAR to the may-alias set of PTR's new type tag. If VAR has
2562 subvars, add the subvars to the tag instead of the actual var. */
2563 if (var_can_have_subvars (var)
2564 && (svars = get_subvars_for_var (var)))
2566 subvar_t sv;
2568 tag = create_memory_tag (tag_type, true);
2569 p_ann->type_mem_tag = tag;
2571 for (sv = svars; sv; sv = sv->next)
2572 add_may_alias (tag, sv->var);
2574 else
2576 /* The following is based on code in add_stmt_operand to ensure that the
2577 same defs/uses/vdefs/vuses will be found after replacing a reference
2578 to var (or ARRAY_REF to var) with an INDIRECT_REF to ptr whose value
2579 is the address of var. */
2580 varray_type aliases = v_ann->may_aliases;
2582 if ((aliases != NULL)
2583 && (VARRAY_ACTIVE_SIZE (aliases) == 1))
2585 tree ali = VARRAY_TREE (aliases, 0);
2587 if (TREE_CODE (ali) == TYPE_MEMORY_TAG)
2589 p_ann->type_mem_tag = ali;
2590 return;
2594 tag = create_memory_tag (tag_type, true);
2595 p_ann->type_mem_tag = tag;
2597 if (aliases == NULL)
2598 add_may_alias (tag, var);
2599 else
2601 size_t i;
2603 for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
2604 add_may_alias (tag, VARRAY_TREE (aliases, i));
2611 /* This represents the used range of a variable. */
2613 typedef struct used_part
2615 HOST_WIDE_INT minused;
2616 HOST_WIDE_INT maxused;
2617 /* True if we have an explicit use/def of some portion of this variable,
2618 even if it is all of it. i.e. a.b = 5 or temp = a.b. */
2619 bool explicit_uses;
2620 /* True if we have an implicit use/def of some portion of this
2621 variable. Implicit uses occur when we can't tell what part we
2622 are referencing, and have to make conservative assumptions. */
2623 bool implicit_uses;
2624 } *used_part_t;
2626 /* An array of used_part structures, indexed by variable uid. */
2628 static htab_t used_portions;
2630 struct used_part_map
2632 unsigned int uid;
2633 used_part_t to;
2636 /* Return true if the uid in the two used part maps are equal. */
2638 static int
2639 used_part_map_eq (const void *va, const void *vb)
2641 const struct used_part_map *a = va, *b = vb;
2642 return (a->uid == b->uid);
2645 /* Hash a from uid in a used_part_map. */
2647 static unsigned int
2648 used_part_map_hash (const void *item)
2650 return ((const struct used_part_map *)item)->uid;
2653 /* Free a used part map element. */
2655 static void
2656 free_used_part_map (void *item)
2658 free (((struct used_part_map *)item)->to);
2659 free (item);
2662 /* Lookup a used_part structure for a UID. */
2664 static used_part_t
2665 up_lookup (unsigned int uid)
2667 struct used_part_map *h, in;
2668 in.uid = uid;
2669 h = htab_find_with_hash (used_portions, &in, uid);
2670 if (!h)
2671 return NULL;
2672 return h->to;
2675 /* Insert the pair UID, TO into the used part hashtable. */
2677 static void
2678 up_insert (unsigned int uid, used_part_t to)
2680 struct used_part_map *h;
2681 void **loc;
2683 h = xmalloc (sizeof (struct used_part_map));
2684 h->uid = uid;
2685 h->to = to;
2686 loc = htab_find_slot_with_hash (used_portions, h,
2687 uid, INSERT);
2688 if (*loc != NULL)
2689 free (*loc);
2690 *(struct used_part_map **) loc = h;
2694 /* Given a variable uid, UID, get or create the entry in the used portions
2695 table for the variable. */
2697 static used_part_t
2698 get_or_create_used_part_for (size_t uid)
2700 used_part_t up;
2701 if ((up = up_lookup (uid)) == NULL)
2703 up = xcalloc (1, sizeof (struct used_part));
2704 up->minused = INT_MAX;
2705 up->maxused = 0;
2706 up->explicit_uses = false;
2707 up->implicit_uses = false;
2710 return up;
2714 /* Create and return a structure sub-variable for field FIELD of
2715 variable VAR. */
2717 static tree
2718 create_sft (tree var, tree field)
2720 var_ann_t ann;
2721 tree subvar = create_tag_raw (STRUCT_FIELD_TAG, TREE_TYPE (field), "SFT");
2723 /* We need to copy the various flags from VAR to SUBVAR, so that
2724 they are is_global_var iff the original variable was. */
2725 DECL_CONTEXT (subvar) = DECL_CONTEXT (var);
2726 MTAG_GLOBAL (subvar) = DECL_EXTERNAL (var);
2727 TREE_PUBLIC (subvar) = TREE_PUBLIC (var);
2728 TREE_STATIC (subvar) = TREE_STATIC (var);
2729 TREE_READONLY (subvar) = TREE_READONLY (var);
2731 /* Add the new variable to REFERENCED_VARS. */
2732 ann = get_var_ann (subvar);
2733 ann->type_mem_tag = NULL;
2734 add_referenced_tmp_var (subvar);
2736 return subvar;
2740 /* Given an aggregate VAR, create the subvariables that represent its
2741 fields. */
2743 static void
2744 create_overlap_variables_for (tree var)
2746 VEC(fieldoff_s,heap) *fieldstack = NULL;
2747 used_part_t up;
2748 size_t uid = DECL_UID (var);
2750 if (!up_lookup (uid))
2751 return;
2753 up = up_lookup (uid);
2754 push_fields_onto_fieldstack (TREE_TYPE (var), &fieldstack, 0, NULL);
2755 if (VEC_length (fieldoff_s, fieldstack) != 0)
2757 subvar_t *subvars;
2758 fieldoff_s *fo;
2759 bool notokay = false;
2760 int fieldcount = 0;
2761 int i;
2762 HOST_WIDE_INT lastfooffset = -1;
2763 HOST_WIDE_INT lastfosize = -1;
2764 tree lastfotype = NULL_TREE;
2766 /* Not all fields have DECL_SIZE set, and those that don't, we don't
2767 know their size, and thus, can't handle.
2768 The same is true of fields with DECL_SIZE that is not an integer
2769 constant (such as variable sized fields).
2770 Fields with offsets which are not constant will have an offset < 0
2771 We *could* handle fields that are constant sized arrays, but
2772 currently don't. Doing so would require some extra changes to
2773 tree-ssa-operands.c. */
2775 for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
2777 if (!DECL_SIZE (fo->field)
2778 || TREE_CODE (DECL_SIZE (fo->field)) != INTEGER_CST
2779 || fo->offset < 0)
2781 notokay = true;
2782 break;
2784 fieldcount++;
2787 /* The current heuristic we use is as follows:
2788 If the variable has no used portions in this function, no
2789 structure vars are created for it.
2790 Otherwise,
2791 If the variable has less than SALIAS_MAX_IMPLICIT_FIELDS,
2792 we always create structure vars for them.
2793 If the variable has more than SALIAS_MAX_IMPLICIT_FIELDS, and
2794 some explicit uses, we create structure vars for them.
2795 If the variable has more than SALIAS_MAX_IMPLICIT_FIELDS, and
2796 no explicit uses, we do not create structure vars for them.
2799 if (fieldcount >= SALIAS_MAX_IMPLICIT_FIELDS
2800 && !up->explicit_uses)
2802 if (dump_file && (dump_flags & TDF_DETAILS))
2804 fprintf (dump_file, "Variable ");
2805 print_generic_expr (dump_file, var, 0);
2806 fprintf (dump_file, " has no explicit uses in this function, and is > SALIAS_MAX_IMPLICIT_FIELDS, so skipping\n");
2808 notokay = true;
2811 /* Bail out, if we can't create overlap variables. */
2812 if (notokay)
2814 VEC_free (fieldoff_s, heap, fieldstack);
2815 return;
2818 /* Otherwise, create the variables. */
2819 subvars = lookup_subvars_for_var (var);
2821 sort_fieldstack (fieldstack);
2823 for (i = VEC_length (fieldoff_s, fieldstack);
2824 VEC_iterate (fieldoff_s, fieldstack, --i, fo);)
2826 subvar_t sv;
2827 HOST_WIDE_INT fosize;
2828 tree currfotype;
2830 fosize = TREE_INT_CST_LOW (DECL_SIZE (fo->field));
2831 currfotype = TREE_TYPE (fo->field);
2833 /* If this field isn't in the used portion,
2834 or it has the exact same offset and size as the last
2835 field, skip it. */
2837 if (((fo->offset <= up->minused
2838 && fo->offset + fosize <= up->minused)
2839 || fo->offset >= up->maxused)
2840 || (fo->offset == lastfooffset
2841 && fosize == lastfosize
2842 && currfotype == lastfotype))
2843 continue;
2844 sv = ggc_alloc (sizeof (struct subvar));
2845 sv->offset = fo->offset;
2846 sv->size = fosize;
2847 sv->next = *subvars;
2848 sv->var = create_sft (var, fo->field);
2850 if (dump_file)
2852 fprintf (dump_file, "structure field tag %s created for var %s",
2853 get_name (sv->var), get_name (var));
2854 fprintf (dump_file, " offset " HOST_WIDE_INT_PRINT_DEC,
2855 sv->offset);
2856 fprintf (dump_file, " size " HOST_WIDE_INT_PRINT_DEC,
2857 sv->size);
2858 fprintf (dump_file, "\n");
2861 lastfotype = currfotype;
2862 lastfooffset = fo->offset;
2863 lastfosize = fosize;
2864 *subvars = sv;
2867 /* Once we have created subvars, the original is no longer call
2868 clobbered on its own. Its call clobbered status depends
2869 completely on the call clobbered status of the subvars.
2871 add_referenced_var in the above loop will take care of
2872 marking subvars of global variables as call clobbered for us
2873 to start, since they are global as well. */
2874 clear_call_clobbered (var);
2877 VEC_free (fieldoff_s, heap, fieldstack);
2881 /* Find the conservative answer to the question of what portions of what
2882 structures are used by this statement. We assume that if we have a
2883 component ref with a known size + offset, that we only need that part
2884 of the structure. For unknown cases, or cases where we do something
2885 to the whole structure, we assume we need to create fields for the
2886 entire structure. */
2888 static tree
2889 find_used_portions (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2891 switch (TREE_CODE (*tp))
2893 case COMPONENT_REF:
2895 HOST_WIDE_INT bitsize;
2896 HOST_WIDE_INT bitpos;
2897 tree offset;
2898 enum machine_mode mode;
2899 int unsignedp;
2900 int volatilep;
2901 tree ref;
2902 ref = get_inner_reference (*tp, &bitsize, &bitpos, &offset, &mode,
2903 &unsignedp, &volatilep, false);
2904 if (DECL_P (ref) && offset == NULL && bitsize != -1)
2906 size_t uid = DECL_UID (ref);
2907 used_part_t up;
2909 up = get_or_create_used_part_for (uid);
2911 if (bitpos <= up->minused)
2912 up->minused = bitpos;
2913 if ((bitpos + bitsize >= up->maxused))
2914 up->maxused = bitpos + bitsize;
2916 up->explicit_uses = true;
2917 up_insert (uid, up);
2919 *walk_subtrees = 0;
2920 return NULL_TREE;
2922 else if (DECL_P (ref))
2924 if (DECL_SIZE (ref)
2925 && var_can_have_subvars (ref)
2926 && TREE_CODE (DECL_SIZE (ref)) == INTEGER_CST)
2928 used_part_t up;
2929 size_t uid = DECL_UID (ref);
2931 up = get_or_create_used_part_for (uid);
2933 up->minused = 0;
2934 up->maxused = TREE_INT_CST_LOW (DECL_SIZE (ref));
2936 up->implicit_uses = true;
2938 up_insert (uid, up);
2940 *walk_subtrees = 0;
2941 return NULL_TREE;
2945 break;
2946 /* This is here to make sure we mark the entire base variable as used
2947 when you take its address. Because our used portion analysis is
2948 simple, we aren't looking at casts or pointer arithmetic to see what
2949 happens when you take the address. */
2950 case ADDR_EXPR:
2952 tree var = get_base_address (TREE_OPERAND (*tp, 0));
2954 if (var
2955 && DECL_P (var)
2956 && DECL_SIZE (var)
2957 && var_can_have_subvars (var)
2958 && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
2960 used_part_t up;
2961 size_t uid = DECL_UID (var);
2963 up = get_or_create_used_part_for (uid);
2965 up->minused = 0;
2966 up->maxused = TREE_INT_CST_LOW (DECL_SIZE (var));
2967 up->implicit_uses = true;
2969 up_insert (uid, up);
2970 *walk_subtrees = 0;
2971 return NULL_TREE;
2974 break;
2975 case VAR_DECL:
2976 case PARM_DECL:
2978 tree var = *tp;
2979 if (DECL_SIZE (var)
2980 && var_can_have_subvars (var)
2981 && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
2983 used_part_t up;
2984 size_t uid = DECL_UID (var);
2986 up = get_or_create_used_part_for (uid);
2988 up->minused = 0;
2989 up->maxused = TREE_INT_CST_LOW (DECL_SIZE (var));
2990 up->implicit_uses = true;
2992 up_insert (uid, up);
2993 *walk_subtrees = 0;
2994 return NULL_TREE;
2997 break;
2999 default:
3000 break;
3003 return NULL_TREE;
3006 /* Create structure field variables for structures used in this function. */
3008 static void
3009 create_structure_vars (void)
3011 basic_block bb;
3012 safe_referenced_var_iterator rvi;
3013 VEC (tree, heap) *varvec = NULL;
3014 tree var;
3016 used_portions = htab_create (10, used_part_map_hash, used_part_map_eq,
3017 free_used_part_map);
3019 FOR_EACH_BB (bb)
3021 block_stmt_iterator bsi;
3022 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3024 walk_tree_without_duplicates (bsi_stmt_ptr (bsi),
3025 find_used_portions,
3026 NULL);
3029 FOR_EACH_REFERENCED_VAR_SAFE (var, varvec, rvi)
3031 /* The C++ FE creates vars without DECL_SIZE set, for some reason. */
3032 if (var
3033 && DECL_SIZE (var)
3034 && var_can_have_subvars (var)
3035 && !MTAG_P (var)
3036 && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
3037 create_overlap_variables_for (var);
3039 htab_delete (used_portions);
3040 VEC_free (tree, heap, varvec);
3044 static bool
3045 gate_structure_vars (void)
3047 return flag_tree_salias != 0;
3050 struct tree_opt_pass pass_create_structure_vars =
3052 "salias", /* name */
3053 gate_structure_vars, /* gate */
3054 create_structure_vars, /* execute */
3055 NULL, /* sub */
3056 NULL, /* next */
3057 0, /* static_pass_number */
3058 0, /* tv_id */
3059 PROP_cfg, /* properties_required */
3060 0, /* properties_provided */
3061 0, /* properties_destroyed */
3062 0, /* todo_flags_start */
3063 TODO_dump_func, /* todo_flags_finish */
3064 0 /* letter */