UUID stuff from Ryan Raasch
[cegcc.git] / cegcc / src / gcc-4.3.2 / gcc / tree-ssa-alias.c
blob94729bcce9edc295ea2c16d387419a177e84decf
1 /* Alias analysis for trees.
2 Copyright (C) 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
30 #include "timevar.h"
31 #include "expr.h"
32 #include "ggc.h"
33 #include "langhooks.h"
34 #include "flags.h"
35 #include "function.h"
36 #include "diagnostic.h"
37 #include "tree-dump.h"
38 #include "tree-gimple.h"
39 #include "tree-flow.h"
40 #include "tree-inline.h"
41 #include "tree-pass.h"
42 #include "tree-ssa-structalias.h"
43 #include "convert.h"
44 #include "params.h"
45 #include "ipa-type-escape.h"
46 #include "vec.h"
47 #include "bitmap.h"
48 #include "vecprim.h"
49 #include "pointer-set.h"
50 #include "alloc-pool.h"
52 /* Broad overview of how aliasing works:
54 First we compute points-to sets, which is done in
55 tree-ssa-structalias.c
57 During points-to set constraint finding, a bunch of little bits of
58 information is collected.
59 This is not done because it is necessary for points-to, but because
60 points-to has to walk every statement anyway. The function performing
61 this collecting is update_alias_info.
63 Bits update_alias_info collects include:
64 1. Directly escaping variables and variables whose value escapes
65 (using is_escape_site). This is the set of variables and values that
66 escape prior to transitive closure of the clobbers.
67 2. The set of variables dereferenced on the LHS (into
68 dereferenced_ptr_stores)
69 3. The set of variables dereferenced on the RHS (into
70 dereferenced_ptr_loads)
71 4. The set of all pointers we saw.
72 5. The number of loads and stores for each variable
73 6. The number of statements touching memory
74 7. The set of address taken variables.
77 #1 is computed by a combination of is_escape_site, and counting the
78 number of uses/deref operators. This function properly accounts for
79 situations like &ptr->field, which is *not* a dereference.
81 After points-to sets are computed, the sets themselves still
82 contain points-to specific variables, such as a variable that says
83 the pointer points to anything, a variable that says the pointer
84 points to readonly memory, etc.
86 These are eliminated in a later phase, as we will see.
88 The rest of the phases are located in tree-ssa-alias.c
90 The next phase after points-to set computation is called
91 "setup_pointers_and_addressables"
93 This pass does 3 main things:
95 1. All variables that can have TREE_ADDRESSABLE removed safely (IE
96 non-globals whose address is not taken), have TREE_ADDRESSABLE
97 removed.
98 2. All variables that may be aliased (which is the set of addressable
99 variables and globals) at all, are marked for renaming, and have
100 symbol memory tags created for them.
101 3. All variables which are stored into have their SMT's added to
102 written vars.
105 After this function is run, all variables that will ever have an
106 SMT, have one, though its aliases are not filled in.
108 The next phase is to compute flow-insensitive aliasing, which in
109 our case, is a misnomer. it is really computing aliasing that
110 requires no transitive closure to be correct. In particular, it
111 uses stack vs non-stack, TBAA, etc, to determine whether two
112 symbols could *ever* alias . This phase works by going through all
113 the pointers we collected during update_alias_info, and for every
114 addressable variable in the program, seeing if they alias. If so,
115 the addressable variable is added to the symbol memory tag for the
116 pointer.
118 As part of this, we handle symbol memory tags that conflict but
119 have no aliases in common, by forcing them to have a symbol in
120 common (through unioning alias sets or adding one as an alias of
121 the other), or by adding one as an alias of another. The case of
122 conflicts with no aliases in common occurs mainly due to aliasing
123 we cannot see. In particular, it generally means we have a load
124 through a pointer whose value came from outside the function.
125 Without an addressable symbol to point to, they would get the wrong
126 answer.
128 After flow insensitive aliasing is computed, we compute name tags
129 (called compute_flow_sensitive_info). We walk each pointer we
130 collected and see if it has a usable points-to set. If so, we
131 generate a name tag using that pointer, and make an alias bitmap for
132 it. Name tags are shared between all things with the same alias
133 bitmap. The alias bitmap will be translated from what points-to
134 computed. In particular, the "anything" variable in points-to will be
135 transformed into a pruned set of SMT's and their aliases that
136 compute_flow_insensitive_aliasing computed.
137 Note that since 4.3, every pointer that points-to computed a solution for
138 will get a name tag (whereas before 4.3, only those whose set did
139 *not* include the anything variable would). At the point where name
140 tags are all assigned, symbol memory tags are dead, and could be
141 deleted, *except* on global variables. Global variables still use
142 symbol memory tags as of right now.
144 After name tags are computed, the set of clobbered variables is
145 transitively closed. In particular, we compute the set of clobbered
146 variables based on the initial set of clobbers, plus the aliases of
147 pointers which either escape, or have their value escape.
149 After this, maybe_create_global_var is run, which handles a corner
150 case where we have no call clobbered variables, but have pure and
151 non-pure functions.
153 Staring at this function, I now remember it is a hack for the fact
154 that we do not mark all globals in the program as call clobbered for a
155 function unless they are actually used in that function. Instead, we
156 only mark the set that is actually clobbered. As a result, you can
157 end up with situations where you have no call clobbered vars set.
159 After maybe_create_global_var, we set pointers with the REF_ALL flag
160 to have alias sets that include all clobbered
161 memory tags and variables.
163 After this, memory partitioning is computed (by the function
164 compute_memory_partitions) and alias sets are reworked accordingly.
166 Lastly, we delete partitions with no symbols, and clean up after
167 ourselves. */
169 /* Structure to map a variable to its alias set. */
170 struct alias_map_d
172 /* Variable and its alias set. */
173 tree var;
174 alias_set_type set;
178 /* Counters used to display statistics on alias analysis. */
179 struct alias_stats_d
181 unsigned int alias_queries;
182 unsigned int alias_mayalias;
183 unsigned int alias_noalias;
184 unsigned int simple_queries;
185 unsigned int simple_resolved;
186 unsigned int tbaa_queries;
187 unsigned int tbaa_resolved;
188 unsigned int structnoaddress_queries;
189 unsigned int structnoaddress_resolved;
193 /* Local variables. */
194 static struct alias_stats_d alias_stats;
195 static bitmap_obstack alias_bitmap_obstack;
197 /* Local functions. */
198 static void compute_flow_insensitive_aliasing (struct alias_info *);
199 static void finalize_ref_all_pointers (struct alias_info *);
200 static void dump_alias_stats (FILE *);
201 static bool may_alias_p (tree, alias_set_type, tree, alias_set_type, bool);
202 static tree create_memory_tag (tree type, bool is_type_tag);
203 static tree get_smt_for (tree, struct alias_info *);
204 static tree get_nmt_for (tree);
205 static void add_may_alias (tree, tree);
206 static struct alias_info *init_alias_info (void);
207 static void delete_alias_info (struct alias_info *);
208 static void compute_flow_sensitive_aliasing (struct alias_info *);
209 static void setup_pointers_and_addressables (struct alias_info *);
210 static void create_global_var (void);
211 static void maybe_create_global_var (void);
212 static void set_pt_anything (tree);
214 void debug_mp_info (VEC(mem_sym_stats_t,heap) *);
216 static alloc_pool mem_sym_stats_pool;
218 /* Return memory reference stats for symbol VAR. Create a new slot in
219 cfun->gimple_df->mem_sym_stats if needed. */
221 static struct mem_sym_stats_d *
222 get_mem_sym_stats_for (tree var)
224 void **slot;
225 struct mem_sym_stats_d *stats;
226 struct pointer_map_t *map = gimple_mem_ref_stats (cfun)->mem_sym_stats;
228 gcc_assert (map);
230 slot = pointer_map_insert (map, var);
231 if (*slot == NULL)
233 stats = pool_alloc (mem_sym_stats_pool);
234 memset (stats, 0, sizeof (*stats));
235 stats->var = var;
236 *slot = (void *) stats;
238 else
239 stats = (struct mem_sym_stats_d *) *slot;
241 return stats;
245 /* Return memory reference statistics for variable VAR in function FN.
246 This is computed by alias analysis, but it is not kept
247 incrementally up-to-date. So, these stats are only accurate if
248 pass_may_alias has been run recently. If no alias information
249 exists, this function returns NULL. */
251 static mem_sym_stats_t
252 mem_sym_stats (struct function *fn, tree var)
254 void **slot;
255 struct pointer_map_t *stats_map = gimple_mem_ref_stats (fn)->mem_sym_stats;
257 if (stats_map == NULL)
258 return NULL;
260 slot = pointer_map_contains (stats_map, var);
261 if (slot == NULL)
262 return NULL;
264 return (mem_sym_stats_t) *slot;
268 /* Set MPT to be the memory partition associated with symbol SYM. */
270 static inline void
271 set_memory_partition (tree sym, tree mpt)
273 #if defined ENABLE_CHECKING
274 if (mpt)
275 gcc_assert (TREE_CODE (mpt) == MEMORY_PARTITION_TAG
276 && !is_gimple_reg (sym));
277 #endif
279 var_ann (sym)->mpt = mpt;
280 if (mpt)
282 if (MPT_SYMBOLS (mpt) == NULL)
283 MPT_SYMBOLS (mpt) = BITMAP_ALLOC (&alias_bitmap_obstack);
285 bitmap_set_bit (MPT_SYMBOLS (mpt), DECL_UID (sym));
287 /* MPT inherits the call-clobbering attributes from SYM. */
288 if (is_call_clobbered (sym))
290 MTAG_GLOBAL (mpt) = 1;
291 mark_call_clobbered (mpt, ESCAPE_IS_GLOBAL);
297 /* Mark variable VAR as being non-addressable. */
299 static void
300 mark_non_addressable (tree var)
302 tree mpt;
304 if (!TREE_ADDRESSABLE (var))
305 return;
307 mpt = memory_partition (var);
309 if (!MTAG_P (var))
310 var_ann (var)->call_clobbered = false;
312 bitmap_clear_bit (gimple_call_clobbered_vars (cfun), DECL_UID (var));
313 TREE_ADDRESSABLE (var) = 0;
315 if (mpt)
317 /* Note that it's possible for a symbol to have an associated
318 MPT and the MPT have a NULL empty set. During
319 init_alias_info, all MPTs get their sets cleared out, but the
320 symbols still point to the old MPTs that used to hold them.
321 This is done so that compute_memory_partitions can now which
322 symbols are losing or changing partitions and mark them for
323 renaming. */
324 if (MPT_SYMBOLS (mpt))
325 bitmap_clear_bit (MPT_SYMBOLS (mpt), DECL_UID (var));
326 set_memory_partition (var, NULL_TREE);
331 /* qsort comparison function to sort type/name tags by DECL_UID. */
333 static int
334 sort_tags_by_id (const void *pa, const void *pb)
336 const_tree const a = *(const_tree const *)pa;
337 const_tree const b = *(const_tree const *)pb;
339 return DECL_UID (a) - DECL_UID (b);
342 /* Initialize WORKLIST to contain those memory tags that are marked call
343 clobbered. Initialized WORKLIST2 to contain the reasons these
344 memory tags escaped. */
346 static void
347 init_transitive_clobber_worklist (VEC (tree, heap) **worklist,
348 VEC (int, heap) **worklist2,
349 bitmap on_worklist)
351 referenced_var_iterator rvi;
352 tree curr;
354 FOR_EACH_REFERENCED_VAR (curr, rvi)
356 if (MTAG_P (curr) && is_call_clobbered (curr))
358 VEC_safe_push (tree, heap, *worklist, curr);
359 VEC_safe_push (int, heap, *worklist2,
360 var_ann (curr)->escape_mask);
361 bitmap_set_bit (on_worklist, DECL_UID (curr));
366 /* Add ALIAS to WORKLIST (and the reason for escaping REASON to WORKLIST2) if
367 ALIAS is not already marked call clobbered, and is a memory
368 tag. */
370 static void
371 add_to_worklist (tree alias, VEC (tree, heap) **worklist,
372 VEC (int, heap) **worklist2, int reason,
373 bitmap on_worklist)
375 if (MTAG_P (alias) && !is_call_clobbered (alias)
376 && !bitmap_bit_p (on_worklist, DECL_UID (alias)))
378 VEC_safe_push (tree, heap, *worklist, alias);
379 VEC_safe_push (int, heap, *worklist2, reason);
380 bitmap_set_bit (on_worklist, DECL_UID (alias));
384 /* Mark aliases of TAG as call clobbered, and place any tags on the
385 alias list that were not already call clobbered on WORKLIST. */
387 static void
388 mark_aliases_call_clobbered (tree tag, VEC (tree, heap) **worklist,
389 VEC (int, heap) **worklist2,
390 bitmap on_worklist, bitmap queued)
392 bitmap aliases;
393 bitmap_iterator bi;
394 unsigned int i;
395 tree entry;
396 var_ann_t ta = var_ann (tag);
398 if (!MTAG_P (tag))
399 return;
400 aliases = may_aliases (tag);
401 if (!aliases)
402 return;
404 EXECUTE_IF_SET_IN_BITMAP (aliases, 0, i, bi)
406 entry = referenced_var (i);
407 /* If you clobber one part of a structure, you
408 clobber the entire thing. While this does not make
409 the world a particularly nice place, it is necessary
410 in order to allow C/C++ tricks that involve
411 pointer arithmetic to work. */
412 if (TREE_CODE (entry) == STRUCT_FIELD_TAG)
413 bitmap_set_bit (queued, DECL_UID (SFT_PARENT_VAR (entry)));
414 else if (!unmodifiable_var_p (entry))
416 add_to_worklist (entry, worklist, worklist2, ta->escape_mask,
417 on_worklist);
418 mark_call_clobbered (entry, ta->escape_mask);
421 if (!bitmap_empty_p (queued))
423 EXECUTE_IF_SET_IN_BITMAP (queued, 0, i, bi)
425 subvar_t svars = get_subvars_for_var (referenced_var (i));
426 unsigned int i;
427 tree subvar;
429 for (i = 0; VEC_iterate (tree, svars, i, subvar); ++i)
430 if (!unmodifiable_var_p (subvar))
431 mark_call_clobbered (subvar, ta->escape_mask);
433 bitmap_clear (queued);
437 /* Tags containing global vars need to be marked as global.
438 Tags containing call clobbered vars need to be marked as call
439 clobbered. */
441 static void
442 compute_tag_properties (void)
444 referenced_var_iterator rvi;
445 tree tag;
446 bool changed = true;
447 VEC (tree, heap) *taglist = NULL;
449 FOR_EACH_REFERENCED_VAR (tag, rvi)
451 if (!MTAG_P (tag) || TREE_CODE (tag) == STRUCT_FIELD_TAG)
452 continue;
453 VEC_safe_push (tree, heap, taglist, tag);
456 /* We sort the taglist by DECL_UID, for two reasons.
457 1. To get a sequential ordering to make the bitmap accesses
458 faster.
459 2. Because of the way we compute aliases, it's more likely that
460 an earlier tag is included in a later tag, and this will reduce
461 the number of iterations.
463 If we had a real tag graph, we would just topo-order it and be
464 done with it. */
465 qsort (VEC_address (tree, taglist),
466 VEC_length (tree, taglist),
467 sizeof (tree),
468 sort_tags_by_id);
470 /* Go through each tag not marked as global, and if it aliases
471 global vars, mark it global.
473 If the tag contains call clobbered vars, mark it call
474 clobbered.
476 This loop iterates because tags may appear in the may-aliases
477 list of other tags when we group. */
479 while (changed)
481 unsigned int k;
483 changed = false;
484 for (k = 0; VEC_iterate (tree, taglist, k, tag); k++)
486 bitmap ma;
487 bitmap_iterator bi;
488 unsigned int i;
489 tree entry;
490 bool tagcc = is_call_clobbered (tag);
491 bool tagglobal = MTAG_GLOBAL (tag);
493 if (tagcc && tagglobal)
494 continue;
496 ma = may_aliases (tag);
497 if (!ma)
498 continue;
500 EXECUTE_IF_SET_IN_BITMAP (ma, 0, i, bi)
502 entry = referenced_var (i);
503 /* Call clobbered entries cause the tag to be marked
504 call clobbered. */
505 if (!tagcc && is_call_clobbered (entry))
507 mark_call_clobbered (tag, var_ann (entry)->escape_mask);
508 tagcc = true;
509 changed = true;
512 /* Global vars cause the tag to be marked global. */
513 if (!tagglobal && is_global_var (entry))
515 MTAG_GLOBAL (tag) = true;
516 changed = true;
517 tagglobal = true;
520 /* Early exit once both global and cc are set, since the
521 loop can't do any more than that. */
522 if (tagcc && tagglobal)
523 break;
527 VEC_free (tree, heap, taglist);
530 /* Set up the initial variable clobbers and globalness.
531 When this function completes, only tags whose aliases need to be
532 clobbered will be set clobbered. Tags clobbered because they
533 contain call clobbered vars are handled in compute_tag_properties. */
535 static void
536 set_initial_properties (struct alias_info *ai)
538 unsigned int i;
539 referenced_var_iterator rvi;
540 tree var;
541 tree ptr;
542 bitmap queued;
543 bool any_pt_anything = false;
544 enum escape_type pt_anything_mask = 0;
545 bitmap_iterator bi;
546 unsigned int j;
548 /* Temporary bitmap to avoid quadratic behavior in marking
549 call clobbers. */
550 queued = BITMAP_ALLOC (&alias_bitmap_obstack);
552 FOR_EACH_REFERENCED_VAR (var, rvi)
554 if (is_global_var (var)
555 && (!var_can_have_subvars (var)
556 || get_subvars_for_var (var) == NULL))
558 if (!unmodifiable_var_p (var))
559 mark_call_clobbered (var, ESCAPE_IS_GLOBAL);
561 else if (TREE_CODE (var) == PARM_DECL
562 && gimple_default_def (cfun, var)
563 && POINTER_TYPE_P (TREE_TYPE (var)))
565 tree def = gimple_default_def (cfun, var);
566 get_ptr_info (def)->value_escapes_p = 1;
567 get_ptr_info (def)->escape_mask |= ESCAPE_IS_PARM;
571 for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
573 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
574 tree tag = symbol_mem_tag (SSA_NAME_VAR (ptr));
576 if (pi->value_escapes_p)
578 /* If PTR escapes then its associated memory tags and
579 pointed-to variables are call-clobbered. */
580 if (pi->name_mem_tag)
581 mark_call_clobbered (pi->name_mem_tag, pi->escape_mask);
583 if (tag)
584 mark_call_clobbered (tag, pi->escape_mask);
586 if (pi->pt_vars)
588 EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j, bi)
590 tree alias = referenced_var (j);
592 /* If you clobber one part of a structure, you
593 clobber the entire thing. While this does not make
594 the world a particularly nice place, it is necessary
595 in order to allow C/C++ tricks that involve
596 pointer arithmetic to work. */
597 if (TREE_CODE (alias) == STRUCT_FIELD_TAG)
599 alias = SFT_PARENT_VAR (alias);
600 if (!unmodifiable_var_p (alias))
602 bitmap_set_bit (queued, DECL_UID (alias));
603 var_ann (alias)->escape_mask |= pi->escape_mask;
606 else if (!unmodifiable_var_p (alias))
607 mark_call_clobbered (alias, pi->escape_mask);
610 else if (pi->pt_anything)
612 any_pt_anything = true;
613 pt_anything_mask |= pi->escape_mask;
617 /* If the name tag is call clobbered, so is the symbol tag
618 associated with the base VAR_DECL. */
619 if (pi->name_mem_tag
620 && tag
621 && is_call_clobbered (pi->name_mem_tag))
622 mark_call_clobbered (tag, pi->escape_mask);
624 /* Name tags and symbol tags that we don't know where they point
625 to, might point to global memory, and thus, are clobbered.
627 FIXME: This is not quite right. They should only be
628 clobbered if value_escapes_p is true, regardless of whether
629 they point to global memory or not.
630 So removing this code and fixing all the bugs would be nice.
631 It is the cause of a bunch of clobbering. */
632 if ((pi->pt_global_mem || pi->pt_anything)
633 && pi->is_dereferenced && pi->name_mem_tag)
635 mark_call_clobbered (pi->name_mem_tag, ESCAPE_IS_GLOBAL);
636 MTAG_GLOBAL (pi->name_mem_tag) = true;
639 if ((pi->pt_global_mem || pi->pt_anything)
640 && pi->is_dereferenced
641 && tag)
643 mark_call_clobbered (tag, ESCAPE_IS_GLOBAL);
644 MTAG_GLOBAL (tag) = true;
648 /* If a pointer to anything escaped we need to mark all addressable
649 variables call clobbered. */
650 if (any_pt_anything)
652 EXECUTE_IF_SET_IN_BITMAP (gimple_addressable_vars (cfun),
653 0, j, bi)
655 tree var = referenced_var (j);
656 if (TREE_CODE (var) == STRUCT_FIELD_TAG)
658 var = SFT_PARENT_VAR (var);
659 if (!unmodifiable_var_p (var))
661 bitmap_set_bit (queued, DECL_UID (var));
662 var_ann (var)->escape_mask |= pt_anything_mask;
665 else if (!unmodifiable_var_p (var))
666 mark_call_clobbered (var, pt_anything_mask);
670 /* Process variables we need to clobber all parts of. */
671 EXECUTE_IF_SET_IN_BITMAP (queued, 0, j, bi)
673 tree var = referenced_var (j);
674 subvar_t svars = get_subvars_for_var (var);
675 unsigned int i;
676 tree subvar;
677 enum escape_type mask = var_ann (var)->escape_mask;
679 for (i = 0; VEC_iterate (tree, svars, i, subvar); ++i)
680 mark_call_clobbered (subvar, mask);
683 BITMAP_FREE (queued);
686 /* Compute which variables need to be marked call clobbered because
687 their tag is call clobbered, and which tags need to be marked
688 global because they contain global variables. */
690 static void
691 compute_call_clobbered (struct alias_info *ai)
693 VEC (tree, heap) *worklist = NULL;
694 VEC (int,heap) *worklist2 = NULL;
695 bitmap on_worklist, queued;
697 timevar_push (TV_CALL_CLOBBER);
698 on_worklist = BITMAP_ALLOC (NULL);
699 queued = BITMAP_ALLOC (NULL);
701 set_initial_properties (ai);
702 init_transitive_clobber_worklist (&worklist, &worklist2, on_worklist);
703 while (VEC_length (tree, worklist) != 0)
705 tree curr = VEC_pop (tree, worklist);
706 int reason = VEC_pop (int, worklist2);
708 bitmap_clear_bit (on_worklist, DECL_UID (curr));
709 mark_call_clobbered (curr, reason);
710 mark_aliases_call_clobbered (curr, &worklist, &worklist2,
711 on_worklist, queued);
713 VEC_free (tree, heap, worklist);
714 VEC_free (int, heap, worklist2);
715 BITMAP_FREE (on_worklist);
716 BITMAP_FREE (queued);
717 compute_tag_properties ();
718 timevar_pop (TV_CALL_CLOBBER);
722 /* Dump memory partition information to FILE. */
724 static void
725 dump_memory_partitions (FILE *file)
727 unsigned i, npart;
728 unsigned long nsyms;
729 tree mpt;
731 fprintf (file, "\nMemory partitions\n\n");
732 for (i = 0, npart = 0, nsyms = 0;
733 VEC_iterate (tree, gimple_ssa_operands (cfun)->mpt_table, i, mpt);
734 i++)
736 if (mpt)
738 bitmap syms = MPT_SYMBOLS (mpt);
739 unsigned long n = (syms) ? bitmap_count_bits (syms) : 0;
741 fprintf (file, "#%u: ", i);
742 print_generic_expr (file, mpt, 0);
743 fprintf (file, ": %lu elements: ", n);
744 dump_decl_set (file, syms);
745 npart++;
746 nsyms += n;
750 fprintf (file, "\n%u memory partitions holding %lu symbols\n", npart, nsyms);
754 /* Dump memory partition information to stderr. */
756 void
757 debug_memory_partitions (void)
759 dump_memory_partitions (stderr);
763 /* Return true if memory partitioning is required given the memory
764 reference estimates in STATS. */
766 static inline bool
767 need_to_partition_p (struct mem_ref_stats_d *stats)
769 long num_vops = stats->num_vuses + stats->num_vdefs;
770 long avg_vops = CEIL (num_vops, stats->num_mem_stmts);
771 return (num_vops > (long) MAX_ALIASED_VOPS
772 && avg_vops > (long) AVG_ALIASED_VOPS);
776 /* Count the actual number of virtual operators in CFUN. Note that
777 this is only meaningful after virtual operands have been populated,
778 so it should be invoked at the end of compute_may_aliases.
780 The number of virtual operators are stored in *NUM_VDEFS_P and
781 *NUM_VUSES_P, the number of partitioned symbols in
782 *NUM_PARTITIONED_P and the number of unpartitioned symbols in
783 *NUM_UNPARTITIONED_P.
785 If any of these pointers is NULL the corresponding count is not
786 computed. */
788 static void
789 count_mem_refs (long *num_vuses_p, long *num_vdefs_p,
790 long *num_partitioned_p, long *num_unpartitioned_p)
792 block_stmt_iterator bsi;
793 basic_block bb;
794 long num_vdefs, num_vuses, num_partitioned, num_unpartitioned;
795 referenced_var_iterator rvi;
796 tree sym;
798 num_vuses = num_vdefs = num_partitioned = num_unpartitioned = 0;
800 if (num_vuses_p || num_vdefs_p)
801 FOR_EACH_BB (bb)
802 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
804 tree stmt = bsi_stmt (bsi);
805 if (stmt_references_memory_p (stmt))
807 num_vuses += NUM_SSA_OPERANDS (stmt, SSA_OP_VUSE);
808 num_vdefs += NUM_SSA_OPERANDS (stmt, SSA_OP_VDEF);
812 if (num_partitioned_p || num_unpartitioned_p)
813 FOR_EACH_REFERENCED_VAR (sym, rvi)
815 if (is_gimple_reg (sym))
816 continue;
818 if (memory_partition (sym))
819 num_partitioned++;
820 else
821 num_unpartitioned++;
824 if (num_vdefs_p)
825 *num_vdefs_p = num_vdefs;
827 if (num_vuses_p)
828 *num_vuses_p = num_vuses;
830 if (num_partitioned_p)
831 *num_partitioned_p = num_partitioned;
833 if (num_unpartitioned_p)
834 *num_unpartitioned_p = num_unpartitioned;
838 /* The list is sorted by increasing partitioning score (PSCORE).
839 This score is computed such that symbols with high scores are
840 those that are least likely to be partitioned. Given a symbol
841 MP->VAR, PSCORE(S) is the result of the following weighted sum
843 PSCORE(S) = FW * 64 + FR * 32
844 + DW * 16 + DR * 8
845 + IW * 4 + IR * 2
846 + NO_ALIAS
848 where
850 FW Execution frequency of writes to S
851 FR Execution frequency of reads from S
852 DW Number of direct writes to S
853 DR Number of direct reads from S
854 IW Number of indirect writes to S
855 IR Number of indirect reads from S
856 NO_ALIAS State of the NO_ALIAS* flags
858 The basic idea here is that symbols that are frequently
859 written-to in hot paths of the code are the last to be considered
860 for partitioning. */
862 static inline long
863 mem_sym_score (mem_sym_stats_t mp)
865 /* Unpartitionable SFTs are automatically thrown to the bottom of
866 the list. They are not stored in partitions, but they are used
867 for computing overall statistics. */
868 if (TREE_CODE (mp->var) == STRUCT_FIELD_TAG
869 && SFT_UNPARTITIONABLE_P (mp->var)
870 && !is_call_clobbered (mp->var))
871 return LONG_MAX;
873 return mp->frequency_writes * 64 + mp->frequency_reads * 32
874 + mp->num_direct_writes * 16 + mp->num_direct_reads * 8
875 + mp->num_indirect_writes * 4 + mp->num_indirect_reads * 2
876 + var_ann (mp->var)->noalias_state;
880 /* Dump memory reference stats for function CFUN to FILE. */
882 void
883 dump_mem_ref_stats (FILE *file)
885 long actual_num_vuses, actual_num_vdefs;
886 long num_partitioned, num_unpartitioned;
887 struct mem_ref_stats_d *stats;
889 stats = gimple_mem_ref_stats (cfun);
891 count_mem_refs (&actual_num_vuses, &actual_num_vdefs, &num_partitioned,
892 &num_unpartitioned);
894 fprintf (file, "\nMemory reference statistics for %s\n\n",
895 lang_hooks.decl_printable_name (current_function_decl, 2));
897 fprintf (file, "Number of memory statements: %ld\n",
898 stats->num_mem_stmts);
899 fprintf (file, "Number of call sites: %ld\n",
900 stats->num_call_sites);
901 fprintf (file, "Number of pure/const call sites: %ld\n",
902 stats->num_pure_const_call_sites);
903 fprintf (file, "Number of asm sites: %ld\n",
904 stats->num_asm_sites);
905 fprintf (file, "Estimated number of loads: %ld (%ld/stmt)\n",
906 stats->num_vuses,
907 (stats->num_mem_stmts)
908 ? CEIL (stats->num_vuses, stats->num_mem_stmts)
909 : 0);
910 fprintf (file, "Actual number of loads: %ld (%ld/stmt)\n",
911 actual_num_vuses,
912 (stats->num_mem_stmts)
913 ? CEIL (actual_num_vuses, stats->num_mem_stmts)
914 : 0);
916 if (actual_num_vuses > stats->num_vuses + (stats->num_vuses / 25))
917 fprintf (file, "\t(warning: estimation is lower by more than 25%%)\n");
919 fprintf (file, "Estimated number of stores: %ld (%ld/stmt)\n",
920 stats->num_vdefs,
921 (stats->num_mem_stmts)
922 ? CEIL (stats->num_vdefs, stats->num_mem_stmts)
923 : 0);
924 fprintf (file, "Actual number of stores: %ld (%ld/stmt)\n",
925 actual_num_vdefs,
926 (stats->num_mem_stmts)
927 ? CEIL (actual_num_vdefs, stats->num_mem_stmts)
928 : 0);
930 if (actual_num_vdefs > stats->num_vdefs + (stats->num_vdefs / 25))
931 fprintf (file, "\t(warning: estimation is lower by more than 25%%)\n");
933 fprintf (file, "Partitioning thresholds: MAX = %d AVG = %d "
934 "(%sNEED TO PARTITION)\n", MAX_ALIASED_VOPS, AVG_ALIASED_VOPS,
935 stats->num_mem_stmts && need_to_partition_p (stats) ? "" : "NO ");
936 fprintf (file, "Number of partitioned symbols: %ld\n", num_partitioned);
937 fprintf (file, "Number of unpartitioned symbols: %ld\n", num_unpartitioned);
941 /* Dump memory reference stats for function FN to stderr. */
943 void
944 debug_mem_ref_stats (void)
946 dump_mem_ref_stats (stderr);
950 /* Dump memory reference stats for variable VAR to FILE. */
952 static void
953 dump_mem_sym_stats (FILE *file, tree var)
955 mem_sym_stats_t stats = mem_sym_stats (cfun, var);
957 if (stats == NULL)
958 return;
960 fprintf (file, "read frequency: %6ld, write frequency: %6ld, "
961 "direct reads: %3ld, direct writes: %3ld, "
962 "indirect reads: %4ld, indirect writes: %4ld, symbol: ",
963 stats->frequency_reads, stats->frequency_writes,
964 stats->num_direct_reads, stats->num_direct_writes,
965 stats->num_indirect_reads, stats->num_indirect_writes);
966 print_generic_expr (file, stats->var, 0);
967 fprintf (file, ", tags: ");
968 dump_decl_set (file, stats->parent_tags);
972 /* Dump memory reference stats for variable VAR to stderr. */
974 void
975 debug_mem_sym_stats (tree var)
977 dump_mem_sym_stats (stderr, var);
980 /* Dump memory reference stats for variable VAR to FILE. For use
981 of tree-dfa.c:dump_variable. */
983 void
984 dump_mem_sym_stats_for_var (FILE *file, tree var)
986 mem_sym_stats_t stats = mem_sym_stats (cfun, var);
988 if (stats == NULL)
989 return;
991 fprintf (file, ", score: %ld", mem_sym_score (stats));
992 fprintf (file, ", direct reads: %ld", stats->num_direct_reads);
993 fprintf (file, ", direct writes: %ld", stats->num_direct_writes);
994 fprintf (file, ", indirect reads: %ld", stats->num_indirect_reads);
995 fprintf (file, ", indirect writes: %ld", stats->num_indirect_writes);
998 /* Dump memory reference stats for all memory symbols to FILE. */
1000 static void
1001 dump_all_mem_sym_stats (FILE *file)
1003 referenced_var_iterator rvi;
1004 tree sym;
1006 FOR_EACH_REFERENCED_VAR (sym, rvi)
1008 if (is_gimple_reg (sym))
1009 continue;
1011 dump_mem_sym_stats (file, sym);
1016 /* Dump memory reference stats for all memory symbols to stderr. */
1018 void
1019 debug_all_mem_sym_stats (void)
1021 dump_all_mem_sym_stats (stderr);
1025 /* Dump the MP_INFO array to FILE. */
1027 static void
1028 dump_mp_info (FILE *file, VEC(mem_sym_stats_t,heap) *mp_info)
1030 unsigned i;
1031 mem_sym_stats_t mp_p;
1033 for (i = 0; VEC_iterate (mem_sym_stats_t, mp_info, i, mp_p); i++)
1034 if (!mp_p->partitioned_p)
1035 dump_mem_sym_stats (file, mp_p->var);
1039 /* Dump the MP_INFO array to stderr. */
1041 void
1042 debug_mp_info (VEC(mem_sym_stats_t,heap) *mp_info)
1044 dump_mp_info (stderr, mp_info);
1048 /* Update memory reference stats for symbol VAR in statement STMT.
1049 NUM_DIRECT_READS and NUM_DIRECT_WRITES specify the number of times
1050 that VAR is read/written in STMT (indirect reads/writes are not
1051 recorded by this function, see compute_memory_partitions). */
1053 void
1054 update_mem_sym_stats_from_stmt (tree var, tree stmt, long num_direct_reads,
1055 long num_direct_writes)
1057 mem_sym_stats_t stats;
1059 gcc_assert (num_direct_reads >= 0 && num_direct_writes >= 0);
1061 stats = get_mem_sym_stats_for (var);
1063 stats->num_direct_reads += num_direct_reads;
1064 stats->frequency_reads += ((long) bb_for_stmt (stmt)->frequency
1065 * num_direct_reads);
1067 stats->num_direct_writes += num_direct_writes;
1068 stats->frequency_writes += ((long) bb_for_stmt (stmt)->frequency
1069 * num_direct_writes);
1073 /* Given two MP_INFO entries MP1 and MP2, return -1 if MP1->VAR should
1074 be partitioned before MP2->VAR, 0 if they are the same or 1 if
1075 MP1->VAR should be partitioned after MP2->VAR. */
1077 static inline int
1078 compare_mp_info_entries (mem_sym_stats_t mp1, mem_sym_stats_t mp2)
1080 long pscore1 = mem_sym_score (mp1);
1081 long pscore2 = mem_sym_score (mp2);
1083 if (pscore1 < pscore2)
1084 return -1;
1085 else if (pscore1 > pscore2)
1086 return 1;
1087 else
1088 return DECL_UID (mp1->var) - DECL_UID (mp2->var);
1092 /* Comparison routine for qsort. The list is sorted by increasing
1093 partitioning score (PSCORE). This score is computed such that
1094 symbols with high scores are those that are least likely to be
1095 partitioned. */
1097 static int
1098 mp_info_cmp (const void *p, const void *q)
1100 mem_sym_stats_t e1 = *((const mem_sym_stats_t *) p);
1101 mem_sym_stats_t e2 = *((const mem_sym_stats_t *) q);
1102 return compare_mp_info_entries (e1, e2);
1106 /* Sort the array of reference counts used to compute memory partitions.
1107 Elements are sorted in ascending order of execution frequency and
1108 descending order of virtual operators needed. */
1110 static inline void
1111 sort_mp_info (VEC(mem_sym_stats_t,heap) *list)
1113 unsigned num = VEC_length (mem_sym_stats_t, list);
1115 if (num < 2)
1116 return;
1118 if (num == 2)
1120 if (compare_mp_info_entries (VEC_index (mem_sym_stats_t, list, 0),
1121 VEC_index (mem_sym_stats_t, list, 1)) > 0)
1123 /* Swap elements if they are in the wrong order. */
1124 mem_sym_stats_t tmp = VEC_index (mem_sym_stats_t, list, 0);
1125 VEC_replace (mem_sym_stats_t, list, 0,
1126 VEC_index (mem_sym_stats_t, list, 1));
1127 VEC_replace (mem_sym_stats_t, list, 1, tmp);
1130 return;
1133 /* There are 3 or more elements, call qsort. */
1134 qsort (VEC_address (mem_sym_stats_t, list),
1135 VEC_length (mem_sym_stats_t, list),
1136 sizeof (mem_sym_stats_t),
1137 mp_info_cmp);
1141 /* Return the memory partition tag (MPT) associated with memory
1142 symbol SYM. */
1144 static tree
1145 get_mpt_for (tree sym)
1147 tree mpt;
1149 /* Don't create a new tag unnecessarily. */
1150 mpt = memory_partition (sym);
1151 if (mpt == NULL_TREE)
1153 mpt = create_tag_raw (MEMORY_PARTITION_TAG, TREE_TYPE (sym), "MPT");
1154 TREE_ADDRESSABLE (mpt) = 0;
1155 add_referenced_var (mpt);
1156 VEC_safe_push (tree, heap, gimple_ssa_operands (cfun)->mpt_table, mpt);
1157 gcc_assert (MPT_SYMBOLS (mpt) == NULL);
1158 set_memory_partition (sym, mpt);
1161 return mpt;
1165 /* Add MP_P->VAR to a memory partition and return the partition. */
1167 static tree
1168 find_partition_for (mem_sym_stats_t mp_p)
1170 unsigned i;
1171 VEC(tree,heap) *mpt_table;
1172 tree mpt;
1174 mpt_table = gimple_ssa_operands (cfun)->mpt_table;
1175 mpt = NULL_TREE;
1177 /* Find an existing partition for MP_P->VAR. */
1178 for (i = 0; VEC_iterate (tree, mpt_table, i, mpt); i++)
1180 mem_sym_stats_t mpt_stats;
1182 /* If MPT does not have any symbols yet, use it. */
1183 if (MPT_SYMBOLS (mpt) == NULL)
1184 break;
1186 /* Otherwise, see if MPT has common parent tags with MP_P->VAR,
1187 but avoid grouping clobbered variables with non-clobbered
1188 variables (otherwise, this tends to creates a single memory
1189 partition because other call-clobbered variables may have
1190 common parent tags with non-clobbered ones). */
1191 mpt_stats = get_mem_sym_stats_for (mpt);
1192 if (mp_p->parent_tags
1193 && mpt_stats->parent_tags
1194 && is_call_clobbered (mpt) == is_call_clobbered (mp_p->var)
1195 && bitmap_intersect_p (mpt_stats->parent_tags, mp_p->parent_tags))
1196 break;
1198 /* If no common parent tags are found, see if both MPT and
1199 MP_P->VAR are call-clobbered. */
1200 if (is_call_clobbered (mpt) && is_call_clobbered (mp_p->var))
1201 break;
1204 if (mpt == NULL_TREE)
1205 mpt = get_mpt_for (mp_p->var);
1206 else
1207 set_memory_partition (mp_p->var, mpt);
1209 mp_p->partitioned_p = true;
1211 mark_sym_for_renaming (mp_p->var);
1212 mark_sym_for_renaming (mpt);
1214 return mpt;
1218 /* Rewrite the alias set for TAG to use the newly created partitions.
1219 If TAG is NULL, rewrite the set of call-clobbered variables.
1220 NEW_ALIASES is a scratch bitmap to build the new set of aliases for
1221 TAG. */
1223 static void
1224 rewrite_alias_set_for (tree tag, bitmap new_aliases)
1226 bitmap_iterator bi;
1227 unsigned i;
1228 tree mpt, sym;
1230 EXECUTE_IF_SET_IN_BITMAP (MTAG_ALIASES (tag), 0, i, bi)
1232 sym = referenced_var (i);
1233 mpt = memory_partition (sym);
1234 if (mpt)
1235 bitmap_set_bit (new_aliases, DECL_UID (mpt));
1236 else
1237 bitmap_set_bit (new_aliases, DECL_UID (sym));
1240 /* Rebuild the may-alias array for TAG. */
1241 bitmap_copy (MTAG_ALIASES (tag), new_aliases);
1245 /* Determine how many virtual operands can be saved by partitioning
1246 MP_P->VAR into MPT. When a symbol S is thrown inside a partition
1247 P, every virtual operand that used to reference S will now
1248 reference P. Whether it reduces the number of virtual operands
1249 depends on:
1251 1- Direct references to S are never saved. Instead of the virtual
1252 operand to S, we will now have a virtual operand to P.
1254 2- Indirect references to S are reduced only for those memory tags
1255 holding S that already had other symbols partitioned into P.
1256 For instance, if a memory tag T has the alias set { a b S c },
1257 the first time we partition S into P, the alias set will become
1258 { a b P c }, so no virtual operands will be saved. However, if
1259 we now partition symbol 'c' into P, then the alias set for T
1260 will become { a b P }, so we will be saving one virtual operand
1261 for every indirect reference to 'c'.
1263 3- Is S is call-clobbered, we save as many virtual operands as
1264 call/asm sites exist in the code, but only if other
1265 call-clobbered symbols have been grouped into P. The first
1266 call-clobbered symbol that we group does not produce any
1267 savings.
1269 MEM_REF_STATS points to CFUN's memory reference information. */
1271 static void
1272 estimate_vop_reduction (struct mem_ref_stats_d *mem_ref_stats,
1273 mem_sym_stats_t mp_p, tree mpt)
1275 unsigned i;
1276 bitmap_iterator bi;
1277 mem_sym_stats_t mpt_stats;
1279 /* We should only get symbols with indirect references here. */
1280 gcc_assert (mp_p->num_indirect_reads > 0 || mp_p->num_indirect_writes > 0);
1282 /* Note that the only statistics we keep for MPT is the set of
1283 parent tags to know which memory tags have had alias members
1284 partitioned, and the indicator has_call_clobbered_vars.
1285 Reference counts are not important for MPT. */
1286 mpt_stats = get_mem_sym_stats_for (mpt);
1288 /* Traverse all the parent tags for MP_P->VAR. For every tag T, if
1289 partition P is already grouping aliases of T, then reduce the
1290 number of virtual operands by the number of direct references
1291 to T. */
1292 if (mp_p->parent_tags)
1294 if (mpt_stats->parent_tags == NULL)
1295 mpt_stats->parent_tags = BITMAP_ALLOC (&alias_bitmap_obstack);
1297 EXECUTE_IF_SET_IN_BITMAP (mp_p->parent_tags, 0, i, bi)
1299 if (bitmap_bit_p (mpt_stats->parent_tags, i))
1301 /* Partition MPT is already partitioning symbols in the
1302 alias set for TAG. This means that we are now saving
1303 1 virtual operand for every direct reference to TAG. */
1304 tree tag = referenced_var (i);
1305 mem_sym_stats_t tag_stats = mem_sym_stats (cfun, tag);
1306 mem_ref_stats->num_vuses -= tag_stats->num_direct_reads;
1307 mem_ref_stats->num_vdefs -= tag_stats->num_direct_writes;
1309 else
1311 /* This is the first symbol in tag I's alias set that is
1312 being grouped under MPT. We will not save any
1313 virtual operands this time, but record that MPT is
1314 grouping a symbol from TAG's alias set so that the
1315 next time we get the savings. */
1316 bitmap_set_bit (mpt_stats->parent_tags, i);
1321 /* If MP_P->VAR is call-clobbered, and MPT is already grouping
1322 call-clobbered symbols, then we will save as many virtual
1323 operands as asm/call sites there are. */
1324 if (is_call_clobbered (mp_p->var))
1326 if (mpt_stats->has_call_clobbered_vars)
1327 mem_ref_stats->num_vdefs -= mem_ref_stats->num_call_sites
1328 + mem_ref_stats->num_asm_sites;
1329 else
1330 mpt_stats->has_call_clobbered_vars = true;
1335 /* Helper for compute_memory_partitions. Transfer reference counts
1336 from pointers to their pointed-to sets. Counters for pointers were
1337 computed by update_alias_info. MEM_REF_STATS points to CFUN's
1338 memory reference information. */
1340 static void
1341 update_reference_counts (struct mem_ref_stats_d *mem_ref_stats)
1343 unsigned i;
1344 bitmap_iterator bi;
1345 mem_sym_stats_t sym_stats;
1347 for (i = 1; i < num_ssa_names; i++)
1349 tree ptr;
1350 struct ptr_info_def *pi;
1352 ptr = ssa_name (i);
1353 if (ptr
1354 && POINTER_TYPE_P (TREE_TYPE (ptr))
1355 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1356 && pi->is_dereferenced)
1358 unsigned j;
1359 bitmap_iterator bj;
1360 tree tag;
1361 mem_sym_stats_t ptr_stats, tag_stats;
1363 /* If PTR has flow-sensitive points-to information, use
1364 PTR's name tag, otherwise use the symbol tag associated
1365 with PTR's symbol. */
1366 if (pi->name_mem_tag)
1367 tag = pi->name_mem_tag;
1368 else
1369 tag = symbol_mem_tag (SSA_NAME_VAR (ptr));
1371 ptr_stats = get_mem_sym_stats_for (ptr);
1372 tag_stats = get_mem_sym_stats_for (tag);
1374 /* TAG has as many direct references as dereferences we
1375 found for its parent pointer. */
1376 tag_stats->num_direct_reads += ptr_stats->num_direct_reads;
1377 tag_stats->num_direct_writes += ptr_stats->num_direct_writes;
1379 /* All the dereferences of pointer PTR are considered direct
1380 references to PTR's memory tag (TAG). In turn,
1381 references to TAG will become virtual operands for every
1382 symbol in TAG's alias set. So, for every symbol ALIAS in
1383 TAG's alias set, add as many indirect references to ALIAS
1384 as direct references there are for TAG. */
1385 if (MTAG_ALIASES (tag))
1386 EXECUTE_IF_SET_IN_BITMAP (MTAG_ALIASES (tag), 0, j, bj)
1388 tree alias = referenced_var (j);
1389 sym_stats = get_mem_sym_stats_for (alias);
1391 /* All the direct references to TAG are indirect references
1392 to ALIAS. */
1393 sym_stats->num_indirect_reads += ptr_stats->num_direct_reads;
1394 sym_stats->num_indirect_writes += ptr_stats->num_direct_writes;
1395 sym_stats->frequency_reads += ptr_stats->frequency_reads;
1396 sym_stats->frequency_writes += ptr_stats->frequency_writes;
1398 /* Indicate that TAG is one of ALIAS's parent tags. */
1399 if (sym_stats->parent_tags == NULL)
1400 sym_stats->parent_tags = BITMAP_ALLOC (&alias_bitmap_obstack);
1401 bitmap_set_bit (sym_stats->parent_tags, DECL_UID (tag));
1406 /* Call-clobbered symbols are indirectly written at every
1407 call/asm site. */
1408 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, i, bi)
1410 tree sym = referenced_var (i);
1411 sym_stats = get_mem_sym_stats_for (sym);
1412 sym_stats->num_indirect_writes += mem_ref_stats->num_call_sites
1413 + mem_ref_stats->num_asm_sites;
1416 /* Addressable symbols are indirectly written at some ASM sites.
1417 Since only ASM sites that clobber memory actually affect
1418 addressable symbols, this is an over-estimation. */
1419 EXECUTE_IF_SET_IN_BITMAP (gimple_addressable_vars (cfun), 0, i, bi)
1421 tree sym = referenced_var (i);
1422 sym_stats = get_mem_sym_stats_for (sym);
1423 sym_stats->num_indirect_writes += mem_ref_stats->num_asm_sites;
1428 /* Helper for compute_memory_partitions. Add all memory symbols to
1429 *MP_INFO_P and compute the initial estimate for the total number of
1430 virtual operands needed. MEM_REF_STATS points to CFUN's memory
1431 reference information. On exit, *TAGS_P will contain the list of
1432 memory tags whose alias set need to be rewritten after
1433 partitioning. */
1435 static void
1436 build_mp_info (struct mem_ref_stats_d *mem_ref_stats,
1437 VEC(mem_sym_stats_t,heap) **mp_info_p,
1438 VEC(tree,heap) **tags_p)
1440 tree var;
1441 referenced_var_iterator rvi;
1443 FOR_EACH_REFERENCED_VAR (var, rvi)
1445 mem_sym_stats_t sym_stats;
1446 tree old_mpt;
1448 /* We are only interested in memory symbols other than MPTs. */
1449 if (is_gimple_reg (var) || TREE_CODE (var) == MEMORY_PARTITION_TAG)
1450 continue;
1452 /* Collect memory tags into the TAGS array so that we can
1453 rewrite their alias sets after partitioning. */
1454 if (MTAG_P (var) && MTAG_ALIASES (var))
1455 VEC_safe_push (tree, heap, *tags_p, var);
1457 /* Since we are going to re-compute partitions, any symbols that
1458 used to belong to a partition must be detached from it and
1459 marked for renaming. */
1460 if ((old_mpt = memory_partition (var)) != NULL)
1462 mark_sym_for_renaming (old_mpt);
1463 set_memory_partition (var, NULL_TREE);
1464 mark_sym_for_renaming (var);
1467 sym_stats = get_mem_sym_stats_for (var);
1469 /* Add VAR's reference info to MP_INFO. Note that the only
1470 symbols that make sense to partition are those that have
1471 indirect references. If a symbol S is always directly
1472 referenced, partitioning it will not reduce the number of
1473 virtual operators. The only symbols that are profitable to
1474 partition are those that belong to alias sets and/or are
1475 call-clobbered. */
1476 if (sym_stats->num_indirect_reads > 0
1477 || sym_stats->num_indirect_writes > 0)
1478 VEC_safe_push (mem_sym_stats_t, heap, *mp_info_p, sym_stats);
1480 /* Update the number of estimated VOPS. Note that direct
1481 references to memory tags are always counted as indirect
1482 references to their alias set members, so if a memory tag has
1483 aliases, do not count its direct references to avoid double
1484 accounting. */
1485 if (!MTAG_P (var) || !MTAG_ALIASES (var))
1487 mem_ref_stats->num_vuses += sym_stats->num_direct_reads;
1488 mem_ref_stats->num_vdefs += sym_stats->num_direct_writes;
1491 mem_ref_stats->num_vuses += sym_stats->num_indirect_reads;
1492 mem_ref_stats->num_vdefs += sym_stats->num_indirect_writes;
1497 /* Compute memory partitions. A memory partition (MPT) is an
1498 arbitrary grouping of memory symbols, such that references to one
1499 member of the group is considered a reference to all the members of
1500 the group.
1502 As opposed to alias sets in memory tags, the grouping into
1503 partitions is completely arbitrary and only done to reduce the
1504 number of virtual operands. The only rule that needs to be
1505 observed when creating memory partitions is that given two memory
1506 partitions MPT.i and MPT.j, they must not contain symbols in
1507 common.
1509 Memory partitions are used when putting the program into Memory-SSA
1510 form. In particular, in Memory-SSA PHI nodes are not computed for
1511 individual memory symbols. They are computed for memory
1512 partitions. This reduces the amount of PHI nodes in the SSA graph
1513 at the expense of precision (i.e., it makes unrelated stores affect
1514 each other).
1516 However, it is possible to increase precision by changing this
1517 partitioning scheme. For instance, if the partitioning scheme is
1518 such that get_mpt_for is the identity function (that is,
1519 get_mpt_for (s) = s), this will result in ultimate precision at the
1520 expense of huge SSA webs.
1522 At the other extreme, a partitioning scheme that groups all the
1523 symbols in the same set results in minimal SSA webs and almost
1524 total loss of precision.
1526 There partitioning heuristic uses three parameters to decide the
1527 order in which symbols are processed. The list of symbols is
1528 sorted so that symbols that are more likely to be partitioned are
1529 near the top of the list:
1531 - Execution frequency. If a memory references is in a frequently
1532 executed code path, grouping it into a partition may block useful
1533 transformations and cause sub-optimal code generation. So, the
1534 partition heuristic tries to avoid grouping symbols with high
1535 execution frequency scores. Execution frequency is taken
1536 directly from the basic blocks where every reference is made (see
1537 update_mem_sym_stats_from_stmt), which in turn uses the
1538 profile guided machinery, so if the program is compiled with PGO
1539 enabled, more accurate partitioning decisions will be made.
1541 - Number of references. Symbols with few references in the code,
1542 are partitioned before symbols with many references.
1544 - NO_ALIAS attributes. Symbols with any of the NO_ALIAS*
1545 attributes are partitioned after symbols marked MAY_ALIAS.
1547 Once the list is sorted, the partitioning proceeds as follows:
1549 1- For every symbol S in MP_INFO, create a new memory partition MP,
1550 if necessary. To avoid memory partitions that contain symbols
1551 from non-conflicting alias sets, memory partitions are
1552 associated to the memory tag that holds S in its alias set. So,
1553 when looking for a memory partition for S, the memory partition
1554 associated with one of the memory tags holding S is chosen. If
1555 none exists, a new one is created.
1557 2- Add S to memory partition MP.
1559 3- Reduce by 1 the number of VOPS for every memory tag holding S.
1561 4- If the total number of VOPS is less than MAX_ALIASED_VOPS or the
1562 average number of VOPS per statement is less than
1563 AVG_ALIASED_VOPS, stop. Otherwise, go to the next symbol in the
1564 list. */
1566 static void
1567 compute_memory_partitions (void)
1569 tree tag;
1570 unsigned i;
1571 mem_sym_stats_t mp_p;
1572 VEC(mem_sym_stats_t,heap) *mp_info;
1573 bitmap new_aliases;
1574 VEC(tree,heap) *tags;
1575 struct mem_ref_stats_d *mem_ref_stats;
1576 int prev_max_aliased_vops;
1577 #ifdef ENABLE_CHECKING
1578 referenced_var_iterator rvi;
1579 #endif
1581 mem_ref_stats = gimple_mem_ref_stats (cfun);
1582 gcc_assert (mem_ref_stats->num_vuses == 0 && mem_ref_stats->num_vdefs == 0);
1584 if (mem_ref_stats->num_mem_stmts == 0)
1585 return;
1587 timevar_push (TV_MEMORY_PARTITIONING);
1589 mp_info = NULL;
1590 tags = NULL;
1591 prev_max_aliased_vops = MAX_ALIASED_VOPS;
1593 /* Since we clearly cannot lower the number of virtual operators
1594 below the total number of memory statements in the function, we
1595 may need to adjust MAX_ALIASED_VOPS beforehand. */
1596 if (MAX_ALIASED_VOPS < mem_ref_stats->num_mem_stmts)
1597 MAX_ALIASED_VOPS = mem_ref_stats->num_mem_stmts;
1599 /* Update reference stats for all the pointed-to variables and
1600 memory tags. */
1601 update_reference_counts (mem_ref_stats);
1603 /* Add all the memory symbols to MP_INFO. */
1604 build_mp_info (mem_ref_stats, &mp_info, &tags);
1606 /* No partitions required if we are below the threshold. */
1607 if (!need_to_partition_p (mem_ref_stats))
1609 if (dump_file)
1610 fprintf (dump_file, "\nMemory partitioning NOT NEEDED for %s\n",
1611 get_name (current_function_decl));
1612 goto done;
1615 /* Sort the MP_INFO array so that symbols that should be partitioned
1616 first are near the top of the list. */
1617 sort_mp_info (mp_info);
1619 if (dump_file)
1621 fprintf (dump_file, "\nMemory partitioning NEEDED for %s\n\n",
1622 get_name (current_function_decl));
1623 fprintf (dump_file, "Memory symbol references before partitioning:\n");
1624 dump_mp_info (dump_file, mp_info);
1627 /* Create partitions for variables in MP_INFO until we have enough
1628 to lower the total number of VOPS below MAX_ALIASED_VOPS or if
1629 the average number of VOPS per statement is below
1630 AVG_ALIASED_VOPS. */
1631 for (i = 0; VEC_iterate (mem_sym_stats_t, mp_info, i, mp_p); i++)
1633 tree mpt;
1635 /* If we are below the threshold, stop. */
1636 if (!need_to_partition_p (mem_ref_stats))
1637 break;
1639 /* SFTs that are marked unpartitionable should not be added to
1640 partitions. These SFTs are special because they mark the
1641 first SFT into a structure where a pointer is pointing to.
1642 This is needed by the operand scanner to find adjacent
1643 fields. See add_vars_for_offset for details. */
1644 if (TREE_CODE (mp_p->var) == STRUCT_FIELD_TAG
1645 && SFT_UNPARTITIONABLE_P (mp_p->var))
1647 subvar_t subvars;
1648 unsigned i;
1649 tree subvar;
1651 /* For call clobbered we can partition them because we
1652 are sure all subvars end up in the same partition. */
1653 if (!is_call_clobbered (mp_p->var))
1654 continue;
1656 mpt = find_partition_for (mp_p);
1657 estimate_vop_reduction (mem_ref_stats, mp_p, mpt);
1659 /* If we encounter a call-clobbered but unpartitionable SFT
1660 partition all SFTs of its parent variable. */
1661 subvars = get_subvars_for_var (SFT_PARENT_VAR (mp_p->var));
1662 for (i = 0; VEC_iterate (tree, subvars, i, subvar); ++i)
1664 if (!var_ann (subvar)->mpt)
1666 set_memory_partition (subvar, mpt);
1667 mark_sym_for_renaming (subvar);
1669 else
1670 gcc_assert (var_ann (subvar)->mpt == mpt);
1673 /* ??? We possibly underestimate the VOP reduction if
1674 we do not encounter all subvars before we are below
1675 the threshold. We could fix this by sorting in a way
1676 that all subvars of a var appear before all
1677 unpartitionable vars of it. */
1678 continue;
1681 /* We might encounter an already partitioned symbol due to
1682 the SFT handling above. Deal with that. */
1683 if (var_ann (mp_p->var)->mpt)
1684 mpt = var_ann (mp_p->var)->mpt;
1685 else
1686 mpt = find_partition_for (mp_p);
1687 estimate_vop_reduction (mem_ref_stats, mp_p, mpt);
1690 #ifdef ENABLE_CHECKING
1691 /* For all partitioned unpartitionable subvars make sure all
1692 subvars of its parent var are partitioned into the same partition. */
1693 FOR_EACH_REFERENCED_VAR (tag, rvi)
1694 if (TREE_CODE (tag) == STRUCT_FIELD_TAG
1695 && SFT_UNPARTITIONABLE_P (tag)
1696 && var_ann (tag)->mpt != NULL_TREE)
1698 subvar_t subvars;
1699 unsigned i;
1700 tree subvar;
1702 subvars = get_subvars_for_var (SFT_PARENT_VAR (tag));
1703 for (i = 0; VEC_iterate (tree, subvars, i, subvar); ++i)
1704 gcc_assert (var_ann (subvar)->mpt == var_ann (tag)->mpt);
1706 #endif
1708 /* After partitions have been created, rewrite alias sets to use
1709 them instead of the original symbols. This way, if the alias set
1710 was computed as { a b c d e f }, and the subset { b e f } was
1711 grouped into partition MPT.3, then the new alias set for the tag
1712 will be { a c d MPT.3 }.
1714 Note that this is not strictly necessary. The operand scanner
1715 will always check if a symbol belongs to a partition when adding
1716 virtual operands. However, by reducing the size of the alias
1717 sets to be scanned, the work needed inside the operand scanner is
1718 significantly reduced. */
1719 new_aliases = BITMAP_ALLOC (&alias_bitmap_obstack);
1721 for (i = 0; VEC_iterate (tree, tags, i, tag); i++)
1723 rewrite_alias_set_for (tag, new_aliases);
1724 bitmap_clear (new_aliases);
1727 BITMAP_FREE (new_aliases);
1729 if (dump_file)
1731 fprintf (dump_file, "\nMemory symbol references after partitioning:\n");
1732 dump_mp_info (dump_file, mp_info);
1735 done:
1736 /* Free allocated memory. */
1737 VEC_free (mem_sym_stats_t, heap, mp_info);
1738 VEC_free (tree, heap, tags);
1740 MAX_ALIASED_VOPS = prev_max_aliased_vops;
1742 timevar_pop (TV_MEMORY_PARTITIONING);
1746 /* Compute may-alias information for every variable referenced in function
1747 FNDECL.
1749 Alias analysis proceeds in 3 main phases:
1751 1- Points-to and escape analysis.
1753 This phase walks the use-def chains in the SSA web looking for three
1754 things:
1756 * Assignments of the form P_i = &VAR
1757 * Assignments of the form P_i = malloc()
1758 * Pointers and ADDR_EXPR that escape the current function.
1760 The concept of 'escaping' is the same one used in the Java world. When
1761 a pointer or an ADDR_EXPR escapes, it means that it has been exposed
1762 outside of the current function. So, assignment to global variables,
1763 function arguments and returning a pointer are all escape sites, as are
1764 conversions between pointers and integers.
1766 This is where we are currently limited. Since not everything is renamed
1767 into SSA, we lose track of escape properties when a pointer is stashed
1768 inside a field in a structure, for instance. In those cases, we are
1769 assuming that the pointer does escape.
1771 We use escape analysis to determine whether a variable is
1772 call-clobbered. Simply put, if an ADDR_EXPR escapes, then the variable
1773 is call-clobbered. If a pointer P_i escapes, then all the variables
1774 pointed-to by P_i (and its memory tag) also escape.
1776 2- Compute flow-sensitive aliases
1778 We have two classes of memory tags. Memory tags associated with the
1779 pointed-to data type of the pointers in the program. These tags are
1780 called "symbol memory tag" (SMT). The other class are those associated
1781 with SSA_NAMEs, called "name memory tag" (NMT). The basic idea is that
1782 when adding operands for an INDIRECT_REF *P_i, we will first check
1783 whether P_i has a name tag, if it does we use it, because that will have
1784 more precise aliasing information. Otherwise, we use the standard symbol
1785 tag.
1787 In this phase, we go through all the pointers we found in points-to
1788 analysis and create alias sets for the name memory tags associated with
1789 each pointer P_i. If P_i escapes, we mark call-clobbered the variables
1790 it points to and its tag.
1793 3- Compute flow-insensitive aliases
1795 This pass will compare the alias set of every symbol memory tag and
1796 every addressable variable found in the program. Given a symbol
1797 memory tag SMT and an addressable variable V. If the alias sets of
1798 SMT and V conflict (as computed by may_alias_p), then V is marked
1799 as an alias tag and added to the alias set of SMT.
1801 For instance, consider the following function:
1803 foo (int i)
1805 int *p, a, b;
1807 if (i > 10)
1808 p = &a;
1809 else
1810 p = &b;
1812 *p = 3;
1813 a = b + 2;
1814 return *p;
1817 After aliasing analysis has finished, the symbol memory tag for pointer
1818 'p' will have two aliases, namely variables 'a' and 'b'. Every time
1819 pointer 'p' is dereferenced, we want to mark the operation as a
1820 potential reference to 'a' and 'b'.
1822 foo (int i)
1824 int *p, a, b;
1826 if (i_2 > 10)
1827 p_4 = &a;
1828 else
1829 p_6 = &b;
1830 # p_1 = PHI <p_4(1), p_6(2)>;
1832 # a_7 = VDEF <a_3>;
1833 # b_8 = VDEF <b_5>;
1834 *p_1 = 3;
1836 # a_9 = VDEF <a_7>
1837 # VUSE <b_8>
1838 a_9 = b_8 + 2;
1840 # VUSE <a_9>;
1841 # VUSE <b_8>;
1842 return *p_1;
1845 In certain cases, the list of may aliases for a pointer may grow too
1846 large. This may cause an explosion in the number of virtual operands
1847 inserted in the code. Resulting in increased memory consumption and
1848 compilation time.
1850 When the number of virtual operands needed to represent aliased
1851 loads and stores grows too large (configurable with option --param
1852 max-aliased-vops and --param avg-aliased-vops), alias sets are
1853 grouped to avoid severe compile-time slow downs and memory
1854 consumption. See compute_memory_partitions. */
1856 unsigned int
1857 compute_may_aliases (void)
1859 struct alias_info *ai;
1861 timevar_push (TV_TREE_MAY_ALIAS);
1863 memset (&alias_stats, 0, sizeof (alias_stats));
1865 /* Initialize aliasing information. */
1866 ai = init_alias_info ();
1868 /* For each pointer P_i, determine the sets of variables that P_i may
1869 point-to. For every addressable variable V, determine whether the
1870 address of V escapes the current function, making V call-clobbered
1871 (i.e., whether &V is stored in a global variable or if its passed as a
1872 function call argument). */
1873 compute_points_to_sets (ai);
1875 /* Collect all pointers and addressable variables, compute alias sets,
1876 create memory tags for pointers and promote variables whose address is
1877 not needed anymore. */
1878 setup_pointers_and_addressables (ai);
1880 /* Compute type-based flow-insensitive aliasing for all the type
1881 memory tags. */
1882 compute_flow_insensitive_aliasing (ai);
1884 /* Compute flow-sensitive, points-to based aliasing for all the name
1885 memory tags. */
1886 compute_flow_sensitive_aliasing (ai);
1888 /* Compute call clobbering information. */
1889 compute_call_clobbered (ai);
1891 /* If the program makes no reference to global variables, but it
1892 contains a mixture of pure and non-pure functions, then we need
1893 to create use-def and def-def links between these functions to
1894 avoid invalid transformations on them. */
1895 maybe_create_global_var ();
1897 /* If the program contains ref-all pointers, finalize may-alias information
1898 for them. This pass needs to be run after call-clobbering information
1899 has been computed. */
1900 if (ai->ref_all_symbol_mem_tag)
1901 finalize_ref_all_pointers (ai);
1903 /* Compute memory partitions for every memory variable. */
1904 compute_memory_partitions ();
1906 /* Remove partitions with no symbols. Partitions may end up with an
1907 empty MPT_SYMBOLS set if a previous round of alias analysis
1908 needed to partition more symbols. Since we don't need those
1909 partitions anymore, remove them to free up the space. */
1911 tree mpt;
1912 unsigned i;
1913 VEC(tree,heap) *mpt_table;
1915 mpt_table = gimple_ssa_operands (cfun)->mpt_table;
1916 i = 0;
1917 while (i < VEC_length (tree, mpt_table))
1919 mpt = VEC_index (tree, mpt_table, i);
1920 if (MPT_SYMBOLS (mpt) == NULL)
1921 VEC_unordered_remove (tree, mpt_table, i);
1922 else
1923 i++;
1927 /* Populate all virtual operands and newly promoted register operands. */
1929 block_stmt_iterator bsi;
1930 basic_block bb;
1931 FOR_EACH_BB (bb)
1932 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1933 update_stmt_if_modified (bsi_stmt (bsi));
1936 /* Debugging dumps. */
1937 if (dump_file)
1939 dump_mem_ref_stats (dump_file);
1940 dump_alias_info (dump_file);
1941 dump_points_to_info (dump_file);
1943 if (dump_flags & TDF_STATS)
1944 dump_alias_stats (dump_file);
1946 if (dump_flags & TDF_DETAILS)
1947 dump_referenced_vars (dump_file);
1950 /* Report strict aliasing violations. */
1951 strict_aliasing_warning_backend ();
1953 /* Deallocate memory used by aliasing data structures. */
1954 delete_alias_info (ai);
1956 if (need_ssa_update_p ())
1957 update_ssa (TODO_update_ssa);
1959 timevar_pop (TV_TREE_MAY_ALIAS);
1961 return 0;
1964 /* Data structure used to count the number of dereferences to PTR
1965 inside an expression. */
1966 struct count_ptr_d
1968 tree ptr;
1969 unsigned count;
1973 /* Helper for count_uses_and_derefs. Called by walk_tree to look for
1974 (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
1976 static tree
1977 count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
1979 struct count_ptr_d *count_p = (struct count_ptr_d *) data;
1981 /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
1982 pointer 'ptr' is *not* dereferenced, it is simply used to compute
1983 the address of 'fld' as 'ptr + offsetof(fld)'. */
1984 if (TREE_CODE (*tp) == ADDR_EXPR)
1986 *walk_subtrees = 0;
1987 return NULL_TREE;
1990 if (INDIRECT_REF_P (*tp) && TREE_OPERAND (*tp, 0) == count_p->ptr)
1991 count_p->count++;
1993 return NULL_TREE;
1997 /* Count the number of direct and indirect uses for pointer PTR in
1998 statement STMT. The number of direct uses is stored in
1999 *NUM_USES_P. Indirect references are counted separately depending
2000 on whether they are store or load operations. The counts are
2001 stored in *NUM_STORES_P and *NUM_LOADS_P. */
2003 void
2004 count_uses_and_derefs (tree ptr, tree stmt, unsigned *num_uses_p,
2005 unsigned *num_loads_p, unsigned *num_stores_p)
2007 ssa_op_iter i;
2008 tree use;
2010 *num_uses_p = 0;
2011 *num_loads_p = 0;
2012 *num_stores_p = 0;
2014 /* Find out the total number of uses of PTR in STMT. */
2015 FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
2016 if (use == ptr)
2017 (*num_uses_p)++;
2019 /* Now count the number of indirect references to PTR. This is
2020 truly awful, but we don't have much choice. There are no parent
2021 pointers inside INDIRECT_REFs, so an expression like
2022 '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
2023 find all the indirect and direct uses of x_1 inside. The only
2024 shortcut we can take is the fact that GIMPLE only allows
2025 INDIRECT_REFs inside the expressions below. */
2026 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
2027 || (TREE_CODE (stmt) == RETURN_EXPR
2028 && TREE_CODE (TREE_OPERAND (stmt, 0)) == GIMPLE_MODIFY_STMT)
2029 || TREE_CODE (stmt) == ASM_EXPR
2030 || TREE_CODE (stmt) == CALL_EXPR)
2032 tree lhs, rhs;
2034 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
2036 lhs = GIMPLE_STMT_OPERAND (stmt, 0);
2037 rhs = GIMPLE_STMT_OPERAND (stmt, 1);
2039 else if (TREE_CODE (stmt) == RETURN_EXPR)
2041 tree e = TREE_OPERAND (stmt, 0);
2042 lhs = GIMPLE_STMT_OPERAND (e, 0);
2043 rhs = GIMPLE_STMT_OPERAND (e, 1);
2045 else if (TREE_CODE (stmt) == ASM_EXPR)
2047 lhs = ASM_OUTPUTS (stmt);
2048 rhs = ASM_INPUTS (stmt);
2050 else
2052 lhs = NULL_TREE;
2053 rhs = stmt;
2056 if (lhs
2057 && (TREE_CODE (lhs) == TREE_LIST
2058 || EXPR_P (lhs)
2059 || GIMPLE_STMT_P (lhs)))
2061 struct count_ptr_d count;
2062 count.ptr = ptr;
2063 count.count = 0;
2064 walk_tree (&lhs, count_ptr_derefs, &count, NULL);
2065 *num_stores_p = count.count;
2068 if (rhs
2069 && (TREE_CODE (rhs) == TREE_LIST
2070 || EXPR_P (rhs)
2071 || GIMPLE_STMT_P (rhs)))
2073 struct count_ptr_d count;
2074 count.ptr = ptr;
2075 count.count = 0;
2076 walk_tree (&rhs, count_ptr_derefs, &count, NULL);
2077 *num_loads_p = count.count;
2081 gcc_assert (*num_uses_p >= *num_loads_p + *num_stores_p);
2084 /* Remove memory references stats for function FN. */
2086 void
2087 delete_mem_ref_stats (struct function *fn)
2089 if (gimple_mem_ref_stats (fn)->mem_sym_stats)
2091 free_alloc_pool (mem_sym_stats_pool);
2092 pointer_map_destroy (gimple_mem_ref_stats (fn)->mem_sym_stats);
2094 gimple_mem_ref_stats (fn)->mem_sym_stats = NULL;
2098 /* Initialize memory reference stats. */
2100 static void
2101 init_mem_ref_stats (void)
2103 struct mem_ref_stats_d *mem_ref_stats = gimple_mem_ref_stats (cfun);
2105 mem_sym_stats_pool = create_alloc_pool ("Mem sym stats",
2106 sizeof (struct mem_sym_stats_d),
2107 100);
2108 memset (mem_ref_stats, 0, sizeof (struct mem_ref_stats_d));
2109 mem_ref_stats->mem_sym_stats = pointer_map_create ();
2113 /* Helper for init_alias_info. Reset existing aliasing information. */
2115 static void
2116 reset_alias_info (void)
2118 referenced_var_iterator rvi;
2119 tree var;
2120 unsigned i;
2121 bitmap active_nmts, all_nmts;
2123 /* Clear the set of addressable variables. We do not need to clear
2124 the TREE_ADDRESSABLE bit on every symbol because we are going to
2125 re-compute addressability here. */
2126 bitmap_clear (gimple_addressable_vars (cfun));
2128 active_nmts = BITMAP_ALLOC (&alias_bitmap_obstack);
2129 all_nmts = BITMAP_ALLOC (&alias_bitmap_obstack);
2131 /* Clear flow-insensitive alias information from each symbol. */
2132 FOR_EACH_REFERENCED_VAR (var, rvi)
2134 if (is_gimple_reg (var))
2135 continue;
2137 if (MTAG_P (var))
2138 MTAG_ALIASES (var) = NULL;
2140 /* Memory partition information will be computed from scratch. */
2141 if (TREE_CODE (var) == MEMORY_PARTITION_TAG)
2142 MPT_SYMBOLS (var) = NULL;
2144 /* Collect all the name tags to determine if we have any
2145 orphaned that need to be removed from the IL. A name tag
2146 will be orphaned if it is not associated with any active SSA
2147 name. */
2148 if (TREE_CODE (var) == NAME_MEMORY_TAG)
2149 bitmap_set_bit (all_nmts, DECL_UID (var));
2151 /* Since we are about to re-discover call-clobbered
2152 variables, clear the call-clobbered flag. Variables that
2153 are intrinsically call-clobbered (globals, local statics,
2154 etc) will not be marked by the aliasing code, so we can't
2155 remove them from CALL_CLOBBERED_VARS.
2157 NB: STRUCT_FIELDS are still call clobbered if they are for a
2158 global variable, so we *don't* clear their call clobberedness
2159 just because they are tags, though we will clear it if they
2160 aren't for global variables. */
2161 if (TREE_CODE (var) == NAME_MEMORY_TAG
2162 || TREE_CODE (var) == SYMBOL_MEMORY_TAG
2163 || TREE_CODE (var) == MEMORY_PARTITION_TAG
2164 || !is_global_var (var))
2165 clear_call_clobbered (var);
2168 /* Clear flow-sensitive points-to information from each SSA name. */
2169 for (i = 1; i < num_ssa_names; i++)
2171 tree name = ssa_name (i);
2173 if (!name || !POINTER_TYPE_P (TREE_TYPE (name)))
2174 continue;
2176 if (SSA_NAME_PTR_INFO (name))
2178 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (name);
2180 /* Clear all the flags but keep the name tag to
2181 avoid creating new temporaries unnecessarily. If
2182 this pointer is found to point to a subset or
2183 superset of its former points-to set, then a new
2184 tag will need to be created in create_name_tags. */
2185 pi->pt_anything = 0;
2186 pi->pt_null = 0;
2187 pi->value_escapes_p = 0;
2188 pi->is_dereferenced = 0;
2189 if (pi->pt_vars)
2190 bitmap_clear (pi->pt_vars);
2192 /* Add NAME's name tag to the set of active tags. */
2193 if (pi->name_mem_tag)
2194 bitmap_set_bit (active_nmts, DECL_UID (pi->name_mem_tag));
2198 /* Name memory tags that are no longer associated with an SSA name
2199 are considered stale and should be removed from the IL. All the
2200 name tags that are in the set ALL_NMTS but not in ACTIVE_NMTS are
2201 considered stale and marked for renaming. */
2202 bitmap_and_compl_into (all_nmts, active_nmts);
2203 mark_set_for_renaming (all_nmts);
2205 BITMAP_FREE (all_nmts);
2206 BITMAP_FREE (active_nmts);
2210 /* Initialize the data structures used for alias analysis. */
2212 static struct alias_info *
2213 init_alias_info (void)
2215 struct alias_info *ai;
2216 referenced_var_iterator rvi;
2217 tree var;
2219 ai = XCNEW (struct alias_info);
2220 ai->ssa_names_visited = sbitmap_alloc (num_ssa_names);
2221 sbitmap_zero (ai->ssa_names_visited);
2222 ai->processed_ptrs = VEC_alloc (tree, heap, 50);
2223 ai->written_vars = pointer_set_create ();
2224 ai->dereferenced_ptrs_store = pointer_set_create ();
2225 ai->dereferenced_ptrs_load = pointer_set_create ();
2227 /* Clear out all memory reference stats. */
2228 init_mem_ref_stats ();
2230 /* If aliases have been computed before, clear existing information. */
2231 if (gimple_aliases_computed_p (cfun))
2232 reset_alias_info ();
2233 else
2235 /* If this is the first time we compute aliasing information,
2236 every non-register symbol will need to be put into SSA form
2237 (the initial SSA form only operates on GIMPLE registers). */
2238 FOR_EACH_REFERENCED_VAR (var, rvi)
2239 if (!is_gimple_reg (var))
2240 mark_sym_for_renaming (var);
2243 /* Next time, we will need to reset alias information. */
2244 cfun->gimple_df->aliases_computed_p = true;
2245 if (alias_bitmap_obstack.elements != NULL)
2246 bitmap_obstack_release (&alias_bitmap_obstack);
2247 bitmap_obstack_initialize (&alias_bitmap_obstack);
2249 return ai;
2253 /* Deallocate memory used by alias analysis. */
2255 static void
2256 delete_alias_info (struct alias_info *ai)
2258 size_t i;
2260 sbitmap_free (ai->ssa_names_visited);
2262 VEC_free (tree, heap, ai->processed_ptrs);
2264 for (i = 0; i < ai->num_addressable_vars; i++)
2265 free (ai->addressable_vars[i]);
2266 free (ai->addressable_vars);
2268 for (i = 0; i < ai->num_pointers; i++)
2269 free (ai->pointers[i]);
2270 free (ai->pointers);
2272 pointer_set_destroy (ai->written_vars);
2273 pointer_set_destroy (ai->dereferenced_ptrs_store);
2274 pointer_set_destroy (ai->dereferenced_ptrs_load);
2275 free (ai);
2277 delete_mem_ref_stats (cfun);
2278 delete_points_to_sets ();
2282 /* Used for hashing to identify pointer infos with identical
2283 pt_vars bitmaps. */
2285 static int
2286 eq_ptr_info (const void *p1, const void *p2)
2288 const struct ptr_info_def *n1 = (const struct ptr_info_def *) p1;
2289 const struct ptr_info_def *n2 = (const struct ptr_info_def *) p2;
2290 return bitmap_equal_p (n1->pt_vars, n2->pt_vars);
2293 static hashval_t
2294 ptr_info_hash (const void *p)
2296 const struct ptr_info_def *n = (const struct ptr_info_def *) p;
2297 return bitmap_hash (n->pt_vars);
2301 /* Create name tags for all the pointers that have been dereferenced.
2302 We only create a name tag for a pointer P if P is found to point to
2303 a set of variables (so that we can alias them to *P) or if it is
2304 the result of a call to malloc (which means that P cannot point to
2305 anything else nor alias any other variable).
2307 If two pointers P and Q point to the same set of variables, they
2308 are assigned the same name tag. */
2310 static void
2311 create_name_tags (void)
2313 size_t i;
2314 VEC (tree, heap) *with_ptvars = NULL;
2315 tree ptr;
2316 htab_t ptr_hash;
2318 /* Collect the list of pointers with a non-empty points to set. */
2319 for (i = 1; i < num_ssa_names; i++)
2321 tree ptr = ssa_name (i);
2322 struct ptr_info_def *pi;
2324 if (!ptr
2325 || !POINTER_TYPE_P (TREE_TYPE (ptr))
2326 || !SSA_NAME_PTR_INFO (ptr))
2327 continue;
2329 pi = SSA_NAME_PTR_INFO (ptr);
2331 if (pi->pt_anything || !pi->is_dereferenced)
2333 /* No name tags for pointers that have not been
2334 dereferenced or point to an arbitrary location. */
2335 pi->name_mem_tag = NULL_TREE;
2336 continue;
2339 /* Set pt_anything on the pointers without pt_vars filled in so
2340 that they are assigned a symbol tag. */
2341 if (pi->pt_vars && !bitmap_empty_p (pi->pt_vars))
2342 VEC_safe_push (tree, heap, with_ptvars, ptr);
2343 else
2344 set_pt_anything (ptr);
2347 /* If we didn't find any pointers with pt_vars set, we're done. */
2348 if (!with_ptvars)
2349 return;
2351 ptr_hash = htab_create (10, ptr_info_hash, eq_ptr_info, NULL);
2353 /* Now go through the pointers with pt_vars, and find a name tag
2354 with the same pt_vars as this pointer, or create one if one
2355 doesn't exist. */
2356 for (i = 0; VEC_iterate (tree, with_ptvars, i, ptr); i++)
2358 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
2359 tree old_name_tag = pi->name_mem_tag;
2360 struct ptr_info_def **slot;
2362 /* If PTR points to a set of variables, check if we don't
2363 have another pointer Q with the same points-to set before
2364 creating a tag. If so, use Q's tag instead of creating a
2365 new one.
2367 This is important for not creating unnecessary symbols
2368 and also for copy propagation. If we ever need to
2369 propagate PTR into Q or vice-versa, we would run into
2370 problems if they both had different name tags because
2371 they would have different SSA version numbers (which
2372 would force us to take the name tags in and out of SSA). */
2373 slot = (struct ptr_info_def **) htab_find_slot (ptr_hash, pi, INSERT);
2374 if (*slot)
2375 pi->name_mem_tag = (*slot)->name_mem_tag;
2376 else
2378 *slot = pi;
2380 /* If we didn't find a pointer with the same points-to set
2381 as PTR, create a new name tag if needed. */
2382 if (pi->name_mem_tag == NULL_TREE)
2383 pi->name_mem_tag = get_nmt_for (ptr);
2386 /* If the new name tag computed for PTR is different than
2387 the old name tag that it used to have, then the old tag
2388 needs to be removed from the IL, so we mark it for
2389 renaming. */
2390 if (old_name_tag && old_name_tag != pi->name_mem_tag)
2391 mark_sym_for_renaming (old_name_tag);
2393 /* Inherit volatility from the pointed-to type. */
2394 TREE_THIS_VOLATILE (pi->name_mem_tag)
2395 |= TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (ptr)));
2397 /* Mark the new name tag for renaming. */
2398 mark_sym_for_renaming (pi->name_mem_tag);
2401 htab_delete (ptr_hash);
2403 VEC_free (tree, heap, with_ptvars);
2407 /* Union the alias set SET into the may-aliases for TAG. */
2409 static void
2410 union_alias_set_into (tree tag, bitmap set)
2412 bitmap ma = MTAG_ALIASES (tag);
2414 if (bitmap_empty_p (set))
2415 return;
2417 if (!ma)
2418 ma = MTAG_ALIASES (tag) = BITMAP_ALLOC (&alias_bitmap_obstack);
2419 bitmap_ior_into (ma, set);
2423 /* For every pointer P_i in AI->PROCESSED_PTRS, create may-alias sets for
2424 the name memory tag (NMT) associated with P_i. If P_i escapes, then its
2425 name tag and the variables it points-to are call-clobbered. Finally, if
2426 P_i escapes and we could not determine where it points to, then all the
2427 variables in the same alias set as *P_i are marked call-clobbered. This
2428 is necessary because we must assume that P_i may take the address of any
2429 variable in the same alias set. */
2431 static void
2432 compute_flow_sensitive_aliasing (struct alias_info *ai)
2434 size_t i;
2435 tree ptr;
2437 timevar_push (TV_FLOW_SENSITIVE);
2438 set_used_smts ();
2440 for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
2442 if (!find_what_p_points_to (ptr))
2443 set_pt_anything (ptr);
2446 create_name_tags ();
2448 for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
2450 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
2451 tree tag = symbol_mem_tag (SSA_NAME_VAR (ptr));
2453 /* Set up aliasing information for PTR's name memory tag (if it has
2454 one). Note that only pointers that have been dereferenced will
2455 have a name memory tag. */
2456 if (pi->name_mem_tag && pi->pt_vars)
2458 if (!bitmap_empty_p (pi->pt_vars))
2460 union_alias_set_into (pi->name_mem_tag, pi->pt_vars);
2461 union_alias_set_into (tag, pi->pt_vars);
2462 bitmap_clear_bit (MTAG_ALIASES (tag), DECL_UID (tag));
2464 /* It may be the case that this the tag uid was the only
2465 bit we had set in the aliases list, and in this case,
2466 we don't want to keep an empty bitmap, as this
2467 asserts in tree-ssa-operands.c . */
2468 if (bitmap_empty_p (MTAG_ALIASES (tag)))
2469 BITMAP_FREE (MTAG_ALIASES (tag));
2473 timevar_pop (TV_FLOW_SENSITIVE);
2477 /* Return TRUE if at least one symbol in TAG2's alias set is also
2478 present in TAG1's alias set. */
2480 static bool
2481 have_common_aliases_p (bitmap tag1aliases, bitmap tag2aliases)
2484 /* This is the old behavior of have_common_aliases_p, which is to
2485 return false if both sets are empty, or one set is and the other
2486 isn't. */
2487 if ((tag1aliases == NULL && tag2aliases != NULL)
2488 || (tag2aliases == NULL && tag1aliases != NULL)
2489 || (tag1aliases == NULL && tag2aliases == NULL))
2490 return false;
2492 return bitmap_intersect_p (tag1aliases, tag2aliases);
2495 /* Compute type-based alias sets. Traverse all the pointers and
2496 addressable variables found in setup_pointers_and_addressables.
2498 For every pointer P in AI->POINTERS and addressable variable V in
2499 AI->ADDRESSABLE_VARS, add V to the may-alias sets of P's symbol
2500 memory tag (SMT) if their alias sets conflict. V is then marked as
2501 an aliased symbol so that the operand scanner knows that statements
2502 containing V have aliased operands. */
2504 static void
2505 compute_flow_insensitive_aliasing (struct alias_info *ai)
2507 size_t i;
2509 timevar_push (TV_FLOW_INSENSITIVE);
2510 /* For every pointer P, determine which addressable variables may alias
2511 with P's symbol memory tag. */
2512 for (i = 0; i < ai->num_pointers; i++)
2514 size_t j;
2515 struct alias_map_d *p_map = ai->pointers[i];
2516 tree tag = symbol_mem_tag (p_map->var);
2517 tree var;
2519 /* Call-clobbering information is not finalized yet at this point. */
2520 if (PTR_IS_REF_ALL (p_map->var))
2521 continue;
2523 for (j = 0; j < ai->num_addressable_vars; j++)
2525 struct alias_map_d *v_map;
2526 var_ann_t v_ann;
2527 bool tag_stored_p, var_stored_p;
2529 v_map = ai->addressable_vars[j];
2530 var = v_map->var;
2531 v_ann = var_ann (var);
2533 /* Skip memory tags and variables that have never been
2534 written to. We also need to check if the variables are
2535 call-clobbered because they may be overwritten by
2536 function calls. */
2537 tag_stored_p = pointer_set_contains (ai->written_vars, tag)
2538 || is_call_clobbered (tag);
2539 var_stored_p = pointer_set_contains (ai->written_vars, var)
2540 || is_call_clobbered (var);
2541 if (!tag_stored_p && !var_stored_p)
2542 continue;
2544 if (may_alias_p (p_map->var, p_map->set, var, v_map->set, false))
2546 /* We should never have a var with subvars here, because
2547 they shouldn't get into the set of addressable vars */
2548 gcc_assert (!var_can_have_subvars (var)
2549 || get_subvars_for_var (var) == NULL);
2551 /* Add VAR to TAG's may-aliases set. */
2552 add_may_alias (tag, var);
2557 /* Since this analysis is based exclusively on symbols, it fails to
2558 handle cases where two pointers P and Q have different memory
2559 tags with conflicting alias set numbers but no aliased symbols in
2560 common.
2562 For example, suppose that we have two memory tags SMT.1 and SMT.2
2563 such that
2565 may-aliases (SMT.1) = { a }
2566 may-aliases (SMT.2) = { b }
2568 and the alias set number of SMT.1 conflicts with that of SMT.2.
2569 Since they don't have symbols in common, loads and stores from
2570 SMT.1 and SMT.2 will seem independent of each other, which will
2571 lead to the optimizers making invalid transformations (see
2572 testsuite/gcc.c-torture/execute/pr15262-[12].c).
2574 To avoid this problem, we do a final traversal of AI->POINTERS
2575 looking for pairs of pointers that have no aliased symbols in
2576 common and yet have conflicting alias set numbers. */
2577 for (i = 0; i < ai->num_pointers; i++)
2579 size_t j;
2580 struct alias_map_d *p_map1 = ai->pointers[i];
2581 tree tag1 = symbol_mem_tag (p_map1->var);
2582 bitmap may_aliases1 = MTAG_ALIASES (tag1);
2584 if (PTR_IS_REF_ALL (p_map1->var))
2585 continue;
2587 for (j = i + 1; j < ai->num_pointers; j++)
2589 struct alias_map_d *p_map2 = ai->pointers[j];
2590 tree tag2 = symbol_mem_tag (p_map2->var);
2591 bitmap may_aliases2 = may_aliases (tag2);
2593 if (PTR_IS_REF_ALL (p_map2->var))
2594 continue;
2596 /* If the pointers may not point to each other, do nothing. */
2597 if (!may_alias_p (p_map1->var, p_map1->set, tag2, p_map2->set, true))
2598 continue;
2600 /* The two pointers may alias each other. If they already have
2601 symbols in common, do nothing. */
2602 if (have_common_aliases_p (may_aliases1, may_aliases2))
2603 continue;
2605 if (may_aliases2 && !bitmap_empty_p (may_aliases2))
2607 union_alias_set_into (tag1, may_aliases2);
2609 else
2611 /* Since TAG2 does not have any aliases of its own, add
2612 TAG2 itself to the alias set of TAG1. */
2613 add_may_alias (tag1, tag2);
2618 timevar_pop (TV_FLOW_INSENSITIVE);
2622 /* Finalize may-alias information for ref-all pointers. Traverse all
2623 the addressable variables found in setup_pointers_and_addressables.
2625 If flow-sensitive alias analysis has attached a name memory tag to
2626 a ref-all pointer, we will use it for the dereferences because that
2627 will have more precise aliasing information. But if there is no
2628 name tag, we will use a special symbol tag that aliases all the
2629 call-clobbered addressable variables. */
2631 static void
2632 finalize_ref_all_pointers (struct alias_info *ai)
2634 size_t i;
2636 /* First add the real call-clobbered variables. */
2637 for (i = 0; i < ai->num_addressable_vars; i++)
2639 tree var = ai->addressable_vars[i]->var;
2640 if (is_call_clobbered (var))
2641 add_may_alias (ai->ref_all_symbol_mem_tag, var);
2644 /* Then add the call-clobbered pointer memory tags. See
2645 compute_flow_insensitive_aliasing for the rationale. */
2646 for (i = 0; i < ai->num_pointers; i++)
2648 tree ptr = ai->pointers[i]->var, tag;
2649 /* Avoid adding to self and clean up. */
2650 if (PTR_IS_REF_ALL (ptr))
2652 struct ptr_info_def *pi = get_ptr_info (ptr);
2653 if (pi->is_dereferenced)
2654 pi->pt_anything = 0;
2655 continue;
2657 tag = symbol_mem_tag (ptr);
2658 if (is_call_clobbered (tag))
2659 add_may_alias (ai->ref_all_symbol_mem_tag, tag);
2665 /* Create a new alias set entry for VAR in AI->ADDRESSABLE_VARS. */
2667 static void
2668 create_alias_map_for (tree var, struct alias_info *ai)
2670 struct alias_map_d *alias_map;
2671 alias_map = XCNEW (struct alias_map_d);
2672 alias_map->var = var;
2673 alias_map->set = get_alias_set (var);
2674 ai->addressable_vars[ai->num_addressable_vars++] = alias_map;
2678 /* Create memory tags for all the dereferenced pointers and build the
2679 ADDRESSABLE_VARS and POINTERS arrays used for building the may-alias
2680 sets. Based on the address escape and points-to information collected
2681 earlier, this pass will also clear the TREE_ADDRESSABLE flag from those
2682 variables whose address is not needed anymore. */
2684 static void
2685 setup_pointers_and_addressables (struct alias_info *ai)
2687 size_t num_addressable_vars, num_pointers;
2688 referenced_var_iterator rvi;
2689 tree var;
2690 VEC (tree, heap) *varvec = NULL;
2691 safe_referenced_var_iterator srvi;
2693 /* Size up the arrays ADDRESSABLE_VARS and POINTERS. */
2694 num_addressable_vars = num_pointers = 0;
2696 FOR_EACH_REFERENCED_VAR (var, rvi)
2698 if (may_be_aliased (var))
2699 num_addressable_vars++;
2701 if (POINTER_TYPE_P (TREE_TYPE (var)))
2703 /* Since we don't keep track of volatile variables, assume that
2704 these pointers are used in indirect store operations. */
2705 if (TREE_THIS_VOLATILE (var))
2706 pointer_set_insert (ai->dereferenced_ptrs_store, var);
2708 num_pointers++;
2712 /* Create ADDRESSABLE_VARS and POINTERS. Note that these arrays are
2713 always going to be slightly bigger than we actually need them
2714 because some TREE_ADDRESSABLE variables will be marked
2715 non-addressable below and only pointers with unique symbol tags are
2716 going to be added to POINTERS. */
2717 ai->addressable_vars = XCNEWVEC (struct alias_map_d *, num_addressable_vars);
2718 ai->pointers = XCNEWVEC (struct alias_map_d *, num_pointers);
2719 ai->num_addressable_vars = 0;
2720 ai->num_pointers = 0;
2722 FOR_EACH_REFERENCED_VAR_SAFE (var, varvec, srvi)
2724 subvar_t svars;
2726 /* Name memory tags already have flow-sensitive aliasing
2727 information, so they need not be processed by
2728 compute_flow_insensitive_aliasing. Similarly, symbol memory
2729 tags are already accounted for when we process their
2730 associated pointer.
2732 Structure fields, on the other hand, have to have some of this
2733 information processed for them, but it's pointless to mark them
2734 non-addressable (since they are fake variables anyway). */
2735 if (MTAG_P (var) && TREE_CODE (var) != STRUCT_FIELD_TAG)
2736 continue;
2738 /* Remove the ADDRESSABLE flag from every addressable variable whose
2739 address is not needed anymore. This is caused by the propagation
2740 of ADDR_EXPR constants into INDIRECT_REF expressions and the
2741 removal of dead pointer assignments done by the early scalar
2742 cleanup passes. */
2743 if (TREE_ADDRESSABLE (var))
2745 if (!bitmap_bit_p (gimple_addressable_vars (cfun), DECL_UID (var))
2746 && TREE_CODE (var) != RESULT_DECL
2747 && !is_global_var (var))
2749 bool okay_to_mark = true;
2751 /* Since VAR is now a regular GIMPLE register, we will need
2752 to rename VAR into SSA afterwards. */
2753 mark_sym_for_renaming (var);
2755 /* If VAR can have sub-variables, and any of its
2756 sub-variables has its address taken, then we cannot
2757 remove the addressable flag from VAR. */
2758 if (var_can_have_subvars (var)
2759 && (svars = get_subvars_for_var (var)))
2761 unsigned int i;
2762 tree subvar;
2764 for (i = 0; VEC_iterate (tree, svars, i, subvar); ++i)
2766 if (bitmap_bit_p (gimple_addressable_vars (cfun),
2767 DECL_UID (subvar)))
2768 okay_to_mark = false;
2769 mark_sym_for_renaming (subvar);
2773 /* The address of VAR is not needed, remove the
2774 addressable bit, so that it can be optimized as a
2775 regular variable. */
2776 if (okay_to_mark)
2778 /* The memory partition holding VAR will no longer
2779 contain VAR, and statements referencing it will need
2780 to be updated. */
2781 if (memory_partition (var))
2782 mark_sym_for_renaming (memory_partition (var));
2784 mark_non_addressable (var);
2789 /* Global variables and addressable locals may be aliased. Create an
2790 entry in ADDRESSABLE_VARS for VAR. */
2791 if (may_be_aliased (var))
2793 if (!var_can_have_subvars (var)
2794 || get_subvars_for_var (var) == NULL)
2795 create_alias_map_for (var, ai);
2797 mark_sym_for_renaming (var);
2800 /* Add pointer variables that have been dereferenced to the POINTERS
2801 array and create a symbol memory tag for them. */
2802 if (POINTER_TYPE_P (TREE_TYPE (var)))
2804 if ((pointer_set_contains (ai->dereferenced_ptrs_store, var)
2805 || pointer_set_contains (ai->dereferenced_ptrs_load, var)))
2807 tree tag, old_tag;
2808 var_ann_t t_ann;
2810 /* If pointer VAR still doesn't have a memory tag
2811 associated with it, create it now or re-use an
2812 existing one. */
2813 tag = get_smt_for (var, ai);
2814 t_ann = var_ann (tag);
2816 /* The symbol tag will need to be renamed into SSA
2817 afterwards. Note that we cannot do this inside
2818 get_smt_for because aliasing may run multiple times
2819 and we only create symbol tags the first time. */
2820 mark_sym_for_renaming (tag);
2822 /* Similarly, if pointer VAR used to have another type
2823 tag, we will need to process it in the renamer to
2824 remove the stale virtual operands. */
2825 old_tag = symbol_mem_tag (var);
2826 if (old_tag)
2827 mark_sym_for_renaming (old_tag);
2829 /* Associate the tag with pointer VAR. */
2830 set_symbol_mem_tag (var, tag);
2832 /* If pointer VAR has been used in a store operation,
2833 then its memory tag must be marked as written-to. */
2834 if (pointer_set_contains (ai->dereferenced_ptrs_store, var))
2835 pointer_set_insert (ai->written_vars, tag);
2837 else
2839 /* The pointer has not been dereferenced. If it had a
2840 symbol memory tag, remove it and mark the old tag for
2841 renaming to remove it out of the IL. */
2842 tree tag = symbol_mem_tag (var);
2843 if (tag)
2845 mark_sym_for_renaming (tag);
2846 set_symbol_mem_tag (var, NULL_TREE);
2852 VEC_free (tree, heap, varvec);
2856 /* Determine whether to use .GLOBAL_VAR to model call clobbering
2857 semantics. If the function makes no references to global
2858 variables and contains at least one call to a non-pure function,
2859 then we need to mark the side-effects of the call using .GLOBAL_VAR
2860 to represent all possible global memory referenced by the callee. */
2862 static void
2863 maybe_create_global_var (void)
2865 /* No need to create it, if we have one already. */
2866 if (gimple_global_var (cfun) == NULL_TREE)
2868 struct mem_ref_stats_d *stats = gimple_mem_ref_stats (cfun);
2870 /* Create .GLOBAL_VAR if there are no call-clobbered
2871 variables and the program contains a mixture of pure/const
2872 and regular function calls. This is to avoid the problem
2873 described in PR 20115:
2875 int X;
2876 int func_pure (void) { return X; }
2877 int func_non_pure (int a) { X += a; }
2878 int foo ()
2880 int a = func_pure ();
2881 func_non_pure (a);
2882 a = func_pure ();
2883 return a;
2886 Since foo() has no call-clobbered variables, there is
2887 no relationship between the calls to func_pure and
2888 func_non_pure. Since func_pure has no side-effects, value
2889 numbering optimizations elide the second call to func_pure.
2890 So, if we have some pure/const and some regular calls in the
2891 program we create .GLOBAL_VAR to avoid missing these
2892 relations. */
2893 if (bitmap_empty_p (gimple_call_clobbered_vars (cfun))
2894 && stats->num_call_sites > 0
2895 && stats->num_pure_const_call_sites > 0
2896 && stats->num_call_sites > stats->num_pure_const_call_sites)
2897 create_global_var ();
2902 /* Return TRUE if pointer PTR may point to variable VAR.
2904 MEM_ALIAS_SET is the alias set for the memory location pointed-to by PTR
2905 This is needed because when checking for type conflicts we are
2906 interested in the alias set of the memory location pointed-to by
2907 PTR. The alias set of PTR itself is irrelevant.
2909 VAR_ALIAS_SET is the alias set for VAR. */
2911 static bool
2912 may_alias_p (tree ptr, alias_set_type mem_alias_set,
2913 tree var, alias_set_type var_alias_set,
2914 bool alias_set_only)
2916 tree mem;
2918 alias_stats.alias_queries++;
2919 alias_stats.simple_queries++;
2921 /* By convention, a variable cannot alias itself. */
2922 mem = symbol_mem_tag (ptr);
2923 if (mem == var)
2925 alias_stats.alias_noalias++;
2926 alias_stats.simple_resolved++;
2927 return false;
2930 /* If -fargument-noalias-global is > 2, pointer arguments may
2931 not point to anything else. */
2932 if (flag_argument_noalias > 2 && TREE_CODE (ptr) == PARM_DECL)
2934 alias_stats.alias_noalias++;
2935 alias_stats.simple_resolved++;
2936 return false;
2939 /* If -fargument-noalias-global is > 1, pointer arguments may
2940 not point to global variables. */
2941 if (flag_argument_noalias > 1 && is_global_var (var)
2942 && TREE_CODE (ptr) == PARM_DECL)
2944 alias_stats.alias_noalias++;
2945 alias_stats.simple_resolved++;
2946 return false;
2949 /* If either MEM or VAR is a read-only global and the other one
2950 isn't, then PTR cannot point to VAR. */
2951 if ((unmodifiable_var_p (mem) && !unmodifiable_var_p (var))
2952 || (unmodifiable_var_p (var) && !unmodifiable_var_p (mem)))
2954 alias_stats.alias_noalias++;
2955 alias_stats.simple_resolved++;
2956 return false;
2959 gcc_assert (TREE_CODE (mem) == SYMBOL_MEMORY_TAG);
2961 if (!DECL_NO_TBAA_P (ptr))
2963 alias_stats.tbaa_queries++;
2965 /* If the alias sets don't conflict then MEM cannot alias VAR. */
2966 if (!alias_sets_conflict_p (mem_alias_set, var_alias_set))
2968 alias_stats.alias_noalias++;
2969 alias_stats.tbaa_resolved++;
2970 return false;
2973 /* If VAR is a record or union type, PTR cannot point into VAR
2974 unless there is some explicit address operation in the
2975 program that can reference a field of the type pointed-to by
2976 PTR. This also assumes that the types of both VAR and PTR
2977 are contained within the compilation unit, and that there is
2978 no fancy addressing arithmetic associated with any of the
2979 types involved. */
2980 if (mem_alias_set != 0 && var_alias_set != 0)
2982 tree ptr_type = TREE_TYPE (ptr);
2983 tree var_type = TREE_TYPE (var);
2985 /* The star count is -1 if the type at the end of the
2986 pointer_to chain is not a record or union type. */
2987 if ((!alias_set_only) &&
2988 ipa_type_escape_star_count_of_interesting_type (var_type) >= 0)
2990 int ptr_star_count = 0;
2992 /* ipa_type_escape_star_count_of_interesting_type is a
2993 little too restrictive for the pointer type, need to
2994 allow pointers to primitive types as long as those
2995 types cannot be pointers to everything. */
2996 while (POINTER_TYPE_P (ptr_type))
2998 /* Strip the *s off. */
2999 ptr_type = TREE_TYPE (ptr_type);
3000 ptr_star_count++;
3003 /* There does not appear to be a better test to see if
3004 the pointer type was one of the pointer to everything
3005 types. */
3006 if (ptr_star_count > 0)
3008 alias_stats.structnoaddress_queries++;
3009 if (ipa_type_escape_field_does_not_clobber_p (var_type,
3010 TREE_TYPE (ptr)))
3012 alias_stats.structnoaddress_resolved++;
3013 alias_stats.alias_noalias++;
3014 return false;
3017 else if (ptr_star_count == 0)
3019 /* If PTR_TYPE was not really a pointer to type, it cannot
3020 alias. */
3021 alias_stats.structnoaddress_queries++;
3022 alias_stats.structnoaddress_resolved++;
3023 alias_stats.alias_noalias++;
3024 return false;
3030 alias_stats.alias_mayalias++;
3031 return true;
3035 /* Add ALIAS to the set of variables that may alias VAR. */
3037 static void
3038 add_may_alias (tree var, tree alias)
3040 /* Don't allow self-referential aliases. */
3041 gcc_assert (var != alias);
3043 /* ALIAS must be addressable if it's being added to an alias set. */
3044 #if 1
3045 TREE_ADDRESSABLE (alias) = 1;
3046 #else
3047 gcc_assert (may_be_aliased (alias));
3048 #endif
3050 /* VAR must be a symbol or a name tag. */
3051 gcc_assert (TREE_CODE (var) == SYMBOL_MEMORY_TAG
3052 || TREE_CODE (var) == NAME_MEMORY_TAG);
3054 if (MTAG_ALIASES (var) == NULL)
3055 MTAG_ALIASES (var) = BITMAP_ALLOC (&alias_bitmap_obstack);
3057 bitmap_set_bit (MTAG_ALIASES (var), DECL_UID (alias));
3061 /* Mark pointer PTR as pointing to an arbitrary memory location. */
3063 static void
3064 set_pt_anything (tree ptr)
3066 struct ptr_info_def *pi = get_ptr_info (ptr);
3068 pi->pt_anything = 1;
3069 pi->pt_vars = NULL;
3071 /* The pointer used to have a name tag, but we now found it pointing
3072 to an arbitrary location. The name tag needs to be renamed and
3073 disassociated from PTR. */
3074 if (pi->name_mem_tag)
3076 mark_sym_for_renaming (pi->name_mem_tag);
3077 pi->name_mem_tag = NULL_TREE;
3082 /* Return true if STMT is an "escape" site from the current function. Escape
3083 sites those statements which might expose the address of a variable
3084 outside the current function. STMT is an escape site iff:
3086 1- STMT is a function call, or
3087 2- STMT is an __asm__ expression, or
3088 3- STMT is an assignment to a non-local variable, or
3089 4- STMT is a return statement.
3091 Return the type of escape site found, if we found one, or NO_ESCAPE
3092 if none. */
3094 enum escape_type
3095 is_escape_site (tree stmt)
3097 tree call = get_call_expr_in (stmt);
3098 if (call != NULL_TREE)
3100 if (!TREE_SIDE_EFFECTS (call))
3101 return ESCAPE_TO_PURE_CONST;
3103 return ESCAPE_TO_CALL;
3105 else if (TREE_CODE (stmt) == ASM_EXPR)
3106 return ESCAPE_TO_ASM;
3107 else if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
3109 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
3111 /* Get to the base of _REF nodes. */
3112 if (TREE_CODE (lhs) != SSA_NAME)
3113 lhs = get_base_address (lhs);
3115 /* If we couldn't recognize the LHS of the assignment, assume that it
3116 is a non-local store. */
3117 if (lhs == NULL_TREE)
3118 return ESCAPE_UNKNOWN;
3120 if (TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == NOP_EXPR
3121 || TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == CONVERT_EXPR
3122 || TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == VIEW_CONVERT_EXPR)
3124 tree from
3125 = TREE_TYPE (TREE_OPERAND (GIMPLE_STMT_OPERAND (stmt, 1), 0));
3126 tree to = TREE_TYPE (GIMPLE_STMT_OPERAND (stmt, 1));
3128 /* If the RHS is a conversion between a pointer and an integer, the
3129 pointer escapes since we can't track the integer. */
3130 if (POINTER_TYPE_P (from) && !POINTER_TYPE_P (to))
3131 return ESCAPE_BAD_CAST;
3133 /* Same if the RHS is a conversion between a regular pointer and a
3134 ref-all pointer since we can't track the SMT of the former. */
3135 if (POINTER_TYPE_P (from) && !TYPE_REF_CAN_ALIAS_ALL (from)
3136 && POINTER_TYPE_P (to) && TYPE_REF_CAN_ALIAS_ALL (to))
3137 return ESCAPE_BAD_CAST;
3140 /* If the LHS is an SSA name, it can't possibly represent a non-local
3141 memory store. */
3142 if (TREE_CODE (lhs) == SSA_NAME)
3143 return NO_ESCAPE;
3145 /* FIXME: LHS is not an SSA_NAME. Even if it's an assignment to a
3146 local variables we cannot be sure if it will escape, because we
3147 don't have information about objects not in SSA form. Need to
3148 implement something along the lines of
3150 J.-D. Choi, M. Gupta, M. J. Serrano, V. C. Sreedhar, and S. P.
3151 Midkiff, ``Escape analysis for java,'' in Proceedings of the
3152 Conference on Object-Oriented Programming Systems, Languages, and
3153 Applications (OOPSLA), pp. 1-19, 1999. */
3154 return ESCAPE_STORED_IN_GLOBAL;
3156 else if (TREE_CODE (stmt) == RETURN_EXPR)
3157 return ESCAPE_TO_RETURN;
3159 return NO_ESCAPE;
3162 /* Create a new memory tag of type TYPE.
3163 Does NOT push it into the current binding. */
3165 tree
3166 create_tag_raw (enum tree_code code, tree type, const char *prefix)
3168 tree tmp_var;
3170 tmp_var = build_decl (code, create_tmp_var_name (prefix), type);
3172 /* Make the variable writable. */
3173 TREE_READONLY (tmp_var) = 0;
3175 /* It doesn't start out global. */
3176 MTAG_GLOBAL (tmp_var) = 0;
3177 TREE_STATIC (tmp_var) = 0;
3178 TREE_USED (tmp_var) = 1;
3180 return tmp_var;
3183 /* Create a new memory tag of type TYPE. If IS_TYPE_TAG is true, the tag
3184 is considered to represent all the pointers whose pointed-to types are
3185 in the same alias set class. Otherwise, the tag represents a single
3186 SSA_NAME pointer variable. */
3188 static tree
3189 create_memory_tag (tree type, bool is_type_tag)
3191 tree tag = create_tag_raw (is_type_tag ? SYMBOL_MEMORY_TAG : NAME_MEMORY_TAG,
3192 type, (is_type_tag) ? "SMT" : "NMT");
3194 /* By default, memory tags are local variables. Alias analysis will
3195 determine whether they should be considered globals. */
3196 DECL_CONTEXT (tag) = current_function_decl;
3198 /* Memory tags are by definition addressable. */
3199 TREE_ADDRESSABLE (tag) = 1;
3201 set_symbol_mem_tag (tag, NULL_TREE);
3203 /* Add the tag to the symbol table. */
3204 add_referenced_var (tag);
3206 return tag;
3210 /* Create a name memory tag to represent a specific SSA_NAME pointer P_i.
3211 This is used if P_i has been found to point to a specific set of
3212 variables or to a non-aliased memory location like the address returned
3213 by malloc functions. */
3215 static tree
3216 get_nmt_for (tree ptr)
3218 struct ptr_info_def *pi = get_ptr_info (ptr);
3219 tree tag = pi->name_mem_tag;
3221 if (tag == NULL_TREE)
3222 tag = create_memory_tag (TREE_TYPE (TREE_TYPE (ptr)), false);
3223 return tag;
3227 /* Return the symbol memory tag associated to pointer PTR. A memory
3228 tag is an artificial variable that represents the memory location
3229 pointed-to by PTR. It is used to model the effects of pointer
3230 de-references on addressable variables.
3232 AI points to the data gathered during alias analysis. This
3233 function populates the array AI->POINTERS. */
3235 static tree
3236 get_smt_for (tree ptr, struct alias_info *ai)
3238 size_t i;
3239 tree tag;
3240 tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
3241 alias_set_type tag_set = get_alias_set (tag_type);
3243 /* We use a unique memory tag for all the ref-all pointers. */
3244 if (PTR_IS_REF_ALL (ptr))
3246 if (!ai->ref_all_symbol_mem_tag)
3247 ai->ref_all_symbol_mem_tag = create_memory_tag (void_type_node, true);
3248 return ai->ref_all_symbol_mem_tag;
3251 /* To avoid creating unnecessary memory tags, only create one memory tag
3252 per alias set class. Note that it may be tempting to group
3253 memory tags based on conflicting alias sets instead of
3254 equivalence. That would be wrong because alias sets are not
3255 necessarily transitive (as demonstrated by the libstdc++ test
3256 23_containers/vector/cons/4.cc). Given three alias sets A, B, C
3257 such that conflicts (A, B) == true and conflicts (A, C) == true,
3258 it does not necessarily follow that conflicts (B, C) == true. */
3259 for (i = 0, tag = NULL_TREE; i < ai->num_pointers; i++)
3261 struct alias_map_d *curr = ai->pointers[i];
3262 tree curr_tag = symbol_mem_tag (curr->var);
3263 if (tag_set == curr->set)
3265 tag = curr_tag;
3266 break;
3270 /* If VAR cannot alias with any of the existing memory tags, create a new
3271 tag for PTR and add it to the POINTERS array. */
3272 if (tag == NULL_TREE)
3274 struct alias_map_d *alias_map;
3276 /* If PTR did not have a symbol tag already, create a new SMT.*
3277 artificial variable representing the memory location
3278 pointed-to by PTR. */
3279 tag = symbol_mem_tag (ptr);
3280 if (tag == NULL_TREE)
3281 tag = create_memory_tag (tag_type, true);
3283 /* Add PTR to the POINTERS array. Note that we are not interested in
3284 PTR's alias set. Instead, we cache the alias set for the memory that
3285 PTR points to. */
3286 alias_map = XCNEW (struct alias_map_d);
3287 alias_map->var = ptr;
3288 alias_map->set = tag_set;
3289 ai->pointers[ai->num_pointers++] = alias_map;
3292 /* If the pointed-to type is volatile, so is the tag. */
3293 TREE_THIS_VOLATILE (tag) |= TREE_THIS_VOLATILE (tag_type);
3295 /* Make sure that the symbol tag has the same alias set as the
3296 pointed-to type or at least accesses through the pointer will
3297 alias that set. The latter can happen after the vectorizer
3298 created pointers of vector type. */
3299 gcc_assert (tag_set == get_alias_set (tag)
3300 || alias_set_subset_of (tag_set, get_alias_set (tag)));
3302 return tag;
3306 /* Create GLOBAL_VAR, an artificial global variable to act as a
3307 representative of all the variables that may be clobbered by function
3308 calls. */
3310 static void
3311 create_global_var (void)
3313 tree global_var = build_decl (VAR_DECL, get_identifier (".GLOBAL_VAR"),
3314 void_type_node);
3315 DECL_ARTIFICIAL (global_var) = 1;
3316 TREE_READONLY (global_var) = 0;
3317 DECL_EXTERNAL (global_var) = 1;
3318 TREE_STATIC (global_var) = 1;
3319 TREE_USED (global_var) = 1;
3320 DECL_CONTEXT (global_var) = NULL_TREE;
3321 TREE_THIS_VOLATILE (global_var) = 0;
3322 TREE_ADDRESSABLE (global_var) = 0;
3324 create_var_ann (global_var);
3325 mark_call_clobbered (global_var, ESCAPE_UNKNOWN);
3326 add_referenced_var (global_var);
3327 mark_sym_for_renaming (global_var);
3328 cfun->gimple_df->global_var = global_var;
3332 /* Dump alias statistics on FILE. */
3334 static void
3335 dump_alias_stats (FILE *file)
3337 const char *funcname
3338 = lang_hooks.decl_printable_name (current_function_decl, 2);
3339 fprintf (file, "\nAlias statistics for %s\n\n", funcname);
3340 fprintf (file, "Total alias queries:\t%u\n", alias_stats.alias_queries);
3341 fprintf (file, "Total alias mayalias results:\t%u\n",
3342 alias_stats.alias_mayalias);
3343 fprintf (file, "Total alias noalias results:\t%u\n",
3344 alias_stats.alias_noalias);
3345 fprintf (file, "Total simple queries:\t%u\n",
3346 alias_stats.simple_queries);
3347 fprintf (file, "Total simple resolved:\t%u\n",
3348 alias_stats.simple_resolved);
3349 fprintf (file, "Total TBAA queries:\t%u\n",
3350 alias_stats.tbaa_queries);
3351 fprintf (file, "Total TBAA resolved:\t%u\n",
3352 alias_stats.tbaa_resolved);
3353 fprintf (file, "Total non-addressable structure type queries:\t%u\n",
3354 alias_stats.structnoaddress_queries);
3355 fprintf (file, "Total non-addressable structure type resolved:\t%u\n",
3356 alias_stats.structnoaddress_resolved);
3360 /* Dump alias information on FILE. */
3362 void
3363 dump_alias_info (FILE *file)
3365 size_t i;
3366 const char *funcname
3367 = lang_hooks.decl_printable_name (current_function_decl, 2);
3368 referenced_var_iterator rvi;
3369 tree var;
3371 fprintf (file, "\nAlias information for %s\n\n", funcname);
3373 dump_memory_partitions (file);
3375 fprintf (file, "\nFlow-insensitive alias information for %s\n\n", funcname);
3377 fprintf (file, "Aliased symbols\n\n");
3379 FOR_EACH_REFERENCED_VAR (var, rvi)
3381 if (may_be_aliased (var))
3382 dump_variable (file, var);
3385 fprintf (file, "\nDereferenced pointers\n\n");
3387 FOR_EACH_REFERENCED_VAR (var, rvi)
3388 if (symbol_mem_tag (var))
3389 dump_variable (file, var);
3391 fprintf (file, "\nSymbol memory tags\n\n");
3393 FOR_EACH_REFERENCED_VAR (var, rvi)
3395 if (TREE_CODE (var) == SYMBOL_MEMORY_TAG)
3396 dump_variable (file, var);
3399 fprintf (file, "\n\nFlow-sensitive alias information for %s\n\n", funcname);
3401 fprintf (file, "SSA_NAME pointers\n\n");
3402 for (i = 1; i < num_ssa_names; i++)
3404 tree ptr = ssa_name (i);
3405 struct ptr_info_def *pi;
3407 if (ptr == NULL_TREE)
3408 continue;
3410 pi = SSA_NAME_PTR_INFO (ptr);
3411 if (!SSA_NAME_IN_FREE_LIST (ptr)
3412 && pi
3413 && pi->name_mem_tag)
3414 dump_points_to_info_for (file, ptr);
3417 fprintf (file, "\nName memory tags\n\n");
3419 FOR_EACH_REFERENCED_VAR (var, rvi)
3421 if (TREE_CODE (var) == NAME_MEMORY_TAG)
3422 dump_variable (file, var);
3425 fprintf (file, "\n");
3429 /* Dump alias information on stderr. */
3431 void
3432 debug_alias_info (void)
3434 dump_alias_info (stderr);
3438 /* Return the alias information associated with pointer T. It creates a
3439 new instance if none existed. */
3441 struct ptr_info_def *
3442 get_ptr_info (tree t)
3444 struct ptr_info_def *pi;
3446 gcc_assert (POINTER_TYPE_P (TREE_TYPE (t)));
3448 pi = SSA_NAME_PTR_INFO (t);
3449 if (pi == NULL)
3451 pi = GGC_CNEW (struct ptr_info_def);
3452 SSA_NAME_PTR_INFO (t) = pi;
3455 return pi;
3459 /* Dump points-to information for SSA_NAME PTR into FILE. */
3461 void
3462 dump_points_to_info_for (FILE *file, tree ptr)
3464 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
3466 print_generic_expr (file, ptr, dump_flags);
3468 if (pi)
3470 if (pi->name_mem_tag)
3472 fprintf (file, ", name memory tag: ");
3473 print_generic_expr (file, pi->name_mem_tag, dump_flags);
3476 if (pi->is_dereferenced)
3477 fprintf (file, ", is dereferenced");
3479 if (pi->value_escapes_p)
3480 fprintf (file, ", its value escapes");
3482 if (pi->pt_anything)
3483 fprintf (file, ", points-to anything");
3485 if (pi->pt_null)
3486 fprintf (file, ", points-to NULL");
3488 if (pi->pt_vars)
3490 fprintf (file, ", points-to vars: ");
3491 dump_decl_set (file, pi->pt_vars);
3495 fprintf (file, "\n");
3499 /* Dump points-to information for VAR into stderr. */
3501 void
3502 debug_points_to_info_for (tree var)
3504 dump_points_to_info_for (stderr, var);
3508 /* Dump points-to information into FILE. NOTE: This function is slow, as
3509 it needs to traverse the whole CFG looking for pointer SSA_NAMEs. */
3511 void
3512 dump_points_to_info (FILE *file)
3514 basic_block bb;
3515 block_stmt_iterator si;
3516 ssa_op_iter iter;
3517 const char *fname =
3518 lang_hooks.decl_printable_name (current_function_decl, 2);
3519 referenced_var_iterator rvi;
3520 tree var;
3522 fprintf (file, "\n\nPointed-to sets for pointers in %s\n\n", fname);
3524 /* First dump points-to information for the default definitions of
3525 pointer variables. This is necessary because default definitions are
3526 not part of the code. */
3527 FOR_EACH_REFERENCED_VAR (var, rvi)
3529 if (POINTER_TYPE_P (TREE_TYPE (var)))
3531 tree def = gimple_default_def (cfun, var);
3532 if (def)
3533 dump_points_to_info_for (file, def);
3537 /* Dump points-to information for every pointer defined in the program. */
3538 FOR_EACH_BB (bb)
3540 tree phi;
3542 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
3544 tree ptr = PHI_RESULT (phi);
3545 if (POINTER_TYPE_P (TREE_TYPE (ptr)))
3546 dump_points_to_info_for (file, ptr);
3549 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
3551 tree stmt = bsi_stmt (si);
3552 tree def;
3553 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_DEF)
3554 if (TREE_CODE (def) == SSA_NAME
3555 && POINTER_TYPE_P (TREE_TYPE (def)))
3556 dump_points_to_info_for (file, def);
3560 fprintf (file, "\n");
3564 /* Dump points-to info pointed to by PTO into STDERR. */
3566 void
3567 debug_points_to_info (void)
3569 dump_points_to_info (stderr);
3572 /* Dump to FILE the list of variables that may be aliasing VAR. */
3574 void
3575 dump_may_aliases_for (FILE *file, tree var)
3577 bitmap aliases;
3579 aliases = MTAG_ALIASES (var);
3580 if (aliases)
3582 bitmap_iterator bi;
3583 unsigned int i;
3584 tree al;
3586 fprintf (file, "{ ");
3587 EXECUTE_IF_SET_IN_BITMAP (aliases, 0, i, bi)
3589 al = referenced_var (i);
3590 print_generic_expr (file, al, dump_flags);
3591 fprintf (file, " ");
3593 fprintf (file, "}");
3598 /* Dump to stderr the list of variables that may be aliasing VAR. */
3600 void
3601 debug_may_aliases_for (tree var)
3603 dump_may_aliases_for (stderr, var);
3607 /* Return true if VAR may be aliased. */
3609 bool
3610 may_be_aliased (tree var)
3612 /* Obviously. */
3613 if (TREE_ADDRESSABLE (var))
3614 return true;
3616 /* Globally visible variables can have their addresses taken by other
3617 translation units. */
3618 if (MTAG_P (var)
3619 && (MTAG_GLOBAL (var) || TREE_PUBLIC (var)))
3620 return true;
3621 else if (!MTAG_P (var)
3622 && (DECL_EXTERNAL (var) || TREE_PUBLIC (var)))
3623 return true;
3625 /* Automatic variables can't have their addresses escape any other
3626 way. This must be after the check for global variables, as
3627 extern declarations do not have TREE_STATIC set. */
3628 if (!TREE_STATIC (var))
3629 return false;
3631 /* If we're in unit-at-a-time mode, then we must have seen all
3632 occurrences of address-of operators, and so we can trust
3633 TREE_ADDRESSABLE. Otherwise we can only be sure the variable
3634 isn't addressable if it's local to the current function. */
3635 if (flag_unit_at_a_time)
3636 return false;
3638 if (decl_function_context (var) == current_function_decl)
3639 return false;
3641 return true;
3644 /* The following is based on code in add_stmt_operand to ensure that the
3645 same defs/uses/vdefs/vuses will be found after replacing a reference
3646 to var (or ARRAY_REF to var) with an INDIRECT_REF to ptr whose value
3647 is the address of var. Return a memtag for the ptr, after adding the
3648 proper may_aliases to it (which are the aliases of var, if it has any,
3649 or var itself). */
3651 static tree
3652 add_may_alias_for_new_tag (tree tag, tree var)
3654 bitmap aliases = NULL;
3656 if (MTAG_P (var))
3657 aliases = may_aliases (var);
3659 /* Case 1: |aliases| == 1 */
3660 if (aliases
3661 && bitmap_single_bit_set_p (aliases))
3663 tree ali = referenced_var (bitmap_first_set_bit (aliases));
3664 if (TREE_CODE (ali) == SYMBOL_MEMORY_TAG)
3665 return ali;
3668 /* Case 2: |aliases| == 0 */
3669 if (aliases == NULL)
3670 add_may_alias (tag, var);
3671 else
3673 /* Case 3: |aliases| > 1 */
3674 union_alias_set_into (tag, aliases);
3676 return tag;
3679 /* Create a new symbol tag for PTR. Construct the may-alias list of this type
3680 tag so that it has the aliasing of VAR, or of the relevant subvars of VAR
3681 according to the location accessed by EXPR.
3683 Note, the set of aliases represented by the new symbol tag are not marked
3684 for renaming. */
3686 void
3687 new_type_alias (tree ptr, tree var, tree expr)
3689 tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
3690 tree tag;
3691 subvar_t svars;
3692 tree ali = NULL_TREE;
3693 HOST_WIDE_INT offset, size, maxsize;
3694 tree ref;
3695 VEC (tree, heap) *overlaps = NULL;
3696 unsigned int len, i;
3697 tree subvar;
3700 gcc_assert (symbol_mem_tag (ptr) == NULL_TREE);
3701 gcc_assert (!MTAG_P (var));
3703 ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
3704 gcc_assert (ref);
3706 tag = create_memory_tag (tag_type, true);
3707 set_symbol_mem_tag (ptr, tag);
3709 /* Add VAR to the may-alias set of PTR's new symbol tag. If VAR has
3710 subvars, add the subvars to the tag instead of the actual var. */
3711 if (var_can_have_subvars (ref)
3712 && (svars = get_subvars_for_var (ref)))
3714 for (i = 0; VEC_iterate (tree, svars, i, subvar); ++i)
3716 bool exact;
3718 if (overlap_subvar (offset, maxsize, subvar, &exact))
3719 VEC_safe_push (tree, heap, overlaps, subvar);
3721 gcc_assert (overlaps != NULL);
3723 else if (var_can_have_subvars (var)
3724 && (svars = get_subvars_for_var (var)))
3726 /* If the REF is not a direct access to VAR (e.g., it is a dereference
3727 of a pointer), we should scan the virtual operands of REF the same
3728 way as tree-ssa-operands do. At the moment, this is somewhat
3729 difficult, so we just give up and add all the subvars of VAR.
3730 On mem-ssa branch, the scanning for virtual operands have been
3731 split from the rest of tree-ssa-operands, so it should be much
3732 easier to fix this problem correctly once mem-ssa is merged. */
3733 for (i = 0; VEC_iterate (tree, svars, i, subvar); ++i)
3734 VEC_safe_push (tree, heap, overlaps, subvar);
3736 gcc_assert (overlaps != NULL);
3738 else
3739 ali = add_may_alias_for_new_tag (tag, var);
3741 len = VEC_length (tree, overlaps);
3742 if (len > 0)
3744 if (dump_file && (dump_flags & TDF_DETAILS))
3745 fprintf (dump_file, "\nnumber of overlapping subvars = %u\n", len);
3747 if (len == 1)
3748 ali = add_may_alias_for_new_tag (tag, VEC_index (tree, overlaps, 0));
3749 else if (len > 1)
3751 unsigned int k;
3752 tree sv_var;
3754 for (k = 0; VEC_iterate (tree, overlaps, k, sv_var); k++)
3756 ali = add_may_alias_for_new_tag (tag, sv_var);
3758 if (ali != tag)
3760 /* Can happen only if 'Case 1' of add_may_alias_for_new_tag
3761 took place. Since more than one svar was found, we add
3762 'ali' as one of the may_aliases of the new tag. */
3763 add_may_alias (tag, ali);
3764 ali = tag;
3768 VEC_free (tree, heap, overlaps);
3771 set_symbol_mem_tag (ptr, ali);
3772 MTAG_GLOBAL (tag) = is_global_var (var);
3775 /* This represents the used range of a variable. */
3777 typedef struct used_part
3779 HOST_WIDE_INT minused;
3780 HOST_WIDE_INT maxused;
3781 /* True if we have an explicit use/def of some portion of this variable,
3782 even if it is all of it. i.e. a.b = 5 or temp = a.b. */
3783 bool explicit_uses;
3784 /* True if we have an implicit use/def of some portion of this
3785 variable. Implicit uses occur when we can't tell what part we
3786 are referencing, and have to make conservative assumptions. */
3787 bool implicit_uses;
3788 /* True if the structure is only written to or taken its address. */
3789 bool write_only;
3790 } *used_part_t;
3792 /* An array of used_part structures, indexed by variable uid. */
3794 static htab_t used_portions;
3796 struct used_part_map
3798 unsigned int uid;
3799 used_part_t to;
3802 /* Return true if the uid in the two used part maps are equal. */
3804 static int
3805 used_part_map_eq (const void *va, const void *vb)
3807 const struct used_part_map *a = (const struct used_part_map *) va;
3808 const struct used_part_map *b = (const struct used_part_map *) vb;
3809 return (a->uid == b->uid);
3812 /* Hash a from uid in a used_part_map. */
3814 static unsigned int
3815 used_part_map_hash (const void *item)
3817 return ((const struct used_part_map *)item)->uid;
3820 /* Free a used part map element. */
3822 static void
3823 free_used_part_map (void *item)
3825 free (((struct used_part_map *)item)->to);
3826 free (item);
3829 /* Lookup a used_part structure for a UID. */
3831 static used_part_t
3832 up_lookup (unsigned int uid)
3834 struct used_part_map *h, in;
3835 in.uid = uid;
3836 h = (struct used_part_map *) htab_find_with_hash (used_portions, &in, uid);
3837 if (!h)
3838 return NULL;
3839 return h->to;
3842 /* Insert the pair UID, TO into the used part hashtable. */
3844 static void
3845 up_insert (unsigned int uid, used_part_t to)
3847 struct used_part_map *h;
3848 void **loc;
3850 h = XNEW (struct used_part_map);
3851 h->uid = uid;
3852 h->to = to;
3853 loc = htab_find_slot_with_hash (used_portions, h,
3854 uid, INSERT);
3855 if (*loc != NULL)
3856 free (*loc);
3857 *(struct used_part_map **) loc = h;
3861 /* Given a variable uid, UID, get or create the entry in the used portions
3862 table for the variable. */
3864 static used_part_t
3865 get_or_create_used_part_for (size_t uid)
3867 used_part_t up;
3868 if ((up = up_lookup (uid)) == NULL)
3870 up = XCNEW (struct used_part);
3871 up->minused = INT_MAX;
3872 up->maxused = 0;
3873 up->explicit_uses = false;
3874 up->implicit_uses = false;
3875 up->write_only = true;
3878 return up;
3882 /* Create and return a structure sub-variable for field type FIELD at
3883 offset OFFSET, with size SIZE, of variable VAR. If ALIAS_SET not
3884 -1 this field is non-addressable and we should use this alias set
3885 with this field. */
3887 static tree
3888 create_sft (tree var, tree field, unsigned HOST_WIDE_INT offset,
3889 unsigned HOST_WIDE_INT size, alias_set_type alias_set,
3890 bool base_for_components)
3892 tree subvar = create_tag_raw (STRUCT_FIELD_TAG, field, "SFT");
3894 /* We need to copy the various flags from VAR to SUBVAR, so that
3895 they are is_global_var iff the original variable was. */
3896 DECL_CONTEXT (subvar) = DECL_CONTEXT (var);
3897 MTAG_GLOBAL (subvar) = DECL_EXTERNAL (var);
3898 TREE_PUBLIC (subvar) = TREE_PUBLIC (var);
3899 TREE_STATIC (subvar) = TREE_STATIC (var);
3900 TREE_READONLY (subvar) = TREE_READONLY (var);
3901 TREE_ADDRESSABLE (subvar) = TREE_ADDRESSABLE (var);
3903 /* Add the new variable to REFERENCED_VARS. */
3904 set_symbol_mem_tag (subvar, NULL);
3905 add_referenced_var (subvar);
3906 SFT_PARENT_VAR (subvar) = var;
3907 SFT_OFFSET (subvar) = offset;
3908 SFT_SIZE (subvar) = size;
3909 SFT_ALIAS_SET (subvar) = alias_set;
3910 SFT_BASE_FOR_COMPONENTS_P (subvar) = base_for_components;
3911 SFT_UNPARTITIONABLE_P (subvar) = false;
3913 return subvar;
3917 /* Given an aggregate VAR, create the subvariables that represent its
3918 fields. */
3920 static void
3921 create_overlap_variables_for (tree var)
3923 VEC(fieldoff_s,heap) *fieldstack = NULL;
3924 used_part_t up;
3925 size_t uid = DECL_UID (var);
3927 up = up_lookup (uid);
3928 if (!up
3929 || up->write_only)
3930 return;
3932 push_fields_onto_fieldstack (TREE_TYPE (var), &fieldstack, 0, NULL,
3933 TREE_TYPE (var));
3934 /* Make sure to not create SFTs for structs we won't generate variable
3935 infos for. See tree-ssa-structalias.c:create_variable_info_for (). */
3936 if (VEC_length (fieldoff_s, fieldstack) > 1
3937 && VEC_length (fieldoff_s, fieldstack) <= MAX_FIELDS_FOR_FIELD_SENSITIVE)
3939 subvar_t *subvars;
3940 fieldoff_s *fo;
3941 bool notokay = false;
3942 int fieldcount = 0;
3943 int i;
3944 HOST_WIDE_INT lastfooffset = -1;
3945 HOST_WIDE_INT lastfosize = -1;
3946 tree lastfotype = NULL_TREE;
3948 /* Not all fields have DECL_SIZE set, and those that don't, we don't
3949 know their size, and thus, can't handle.
3950 The same is true of fields with DECL_SIZE that is not an integer
3951 constant (such as variable sized fields).
3952 Fields with offsets which are not constant will have an offset < 0
3953 We *could* handle fields that are constant sized arrays, but
3954 currently don't. Doing so would require some extra changes to
3955 tree-ssa-operands.c. */
3957 for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
3959 if (!fo->size
3960 || TREE_CODE (fo->size) != INTEGER_CST
3961 || fo->offset < 0)
3963 notokay = true;
3964 break;
3966 fieldcount++;
3969 /* The current heuristic we use is as follows:
3970 If the variable has no used portions in this function, no
3971 structure vars are created for it.
3972 Otherwise,
3973 If the variable has less than SALIAS_MAX_IMPLICIT_FIELDS,
3974 we always create structure vars for them.
3975 If the variable has more than SALIAS_MAX_IMPLICIT_FIELDS, and
3976 some explicit uses, we create structure vars for them.
3977 If the variable has more than SALIAS_MAX_IMPLICIT_FIELDS, and
3978 no explicit uses, we do not create structure vars for them.
3981 if (fieldcount >= SALIAS_MAX_IMPLICIT_FIELDS
3982 && !up->explicit_uses)
3984 if (dump_file && (dump_flags & TDF_DETAILS))
3986 fprintf (dump_file, "Variable ");
3987 print_generic_expr (dump_file, var, 0);
3988 fprintf (dump_file, " has no explicit uses in this function, and is > SALIAS_MAX_IMPLICIT_FIELDS, so skipping\n");
3990 notokay = true;
3993 /* Bail out, if we can't create overlap variables. */
3994 if (notokay)
3996 VEC_free (fieldoff_s, heap, fieldstack);
3997 return;
4000 /* Otherwise, create the variables. */
4001 subvars = lookup_subvars_for_var (var);
4002 *subvars = VEC_alloc (tree, gc, VEC_length (fieldoff_s, fieldstack));
4004 sort_fieldstack (fieldstack);
4006 for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); ++i)
4008 HOST_WIDE_INT fosize;
4009 tree currfotype, subvar;
4011 fosize = TREE_INT_CST_LOW (fo->size);
4012 currfotype = fo->type;
4014 /* If this field isn't in the used portion,
4015 or it has the exact same offset and size as the last
4016 field, skip it. Note that we always need the field at
4017 offset 0 so we can properly handle pointers to the
4018 structure. */
4020 if ((fo->offset != 0
4021 && ((fo->offset <= up->minused
4022 && fo->offset + fosize <= up->minused)
4023 || fo->offset >= up->maxused))
4024 || (fo->offset == lastfooffset
4025 && fosize == lastfosize
4026 && currfotype == lastfotype))
4027 continue;
4028 subvar = create_sft (var, fo->type, fo->offset,
4029 fosize, fo->alias_set, fo->base_for_components);
4030 VEC_quick_push (tree, *subvars, subvar);
4032 if (dump_file)
4034 fprintf (dump_file, "structure field tag %s created for var %s",
4035 get_name (subvar), get_name (var));
4036 fprintf (dump_file, " offset " HOST_WIDE_INT_PRINT_DEC,
4037 SFT_OFFSET (subvar));
4038 fprintf (dump_file, " size " HOST_WIDE_INT_PRINT_DEC,
4039 SFT_SIZE (subvar));
4040 fprintf (dump_file, "\n");
4043 lastfotype = currfotype;
4044 lastfooffset = fo->offset;
4045 lastfosize = fosize;
4048 /* Once we have created subvars, the original is no longer call
4049 clobbered on its own. Its call clobbered status depends
4050 completely on the call clobbered status of the subvars.
4052 add_referenced_var in the above loop will take care of
4053 marking subvars of global variables as call clobbered for us
4054 to start, since they are global as well. */
4055 clear_call_clobbered (var);
4058 VEC_free (fieldoff_s, heap, fieldstack);
4062 /* Find the conservative answer to the question of what portions of what
4063 structures are used by this statement. We assume that if we have a
4064 component ref with a known size + offset, that we only need that part
4065 of the structure. For unknown cases, or cases where we do something
4066 to the whole structure, we assume we need to create fields for the
4067 entire structure. */
4069 static tree
4070 find_used_portions (tree *tp, int *walk_subtrees, void *lhs_p)
4072 switch (TREE_CODE (*tp))
4074 case GIMPLE_MODIFY_STMT:
4075 /* Recurse manually here to track whether the use is in the
4076 LHS of an assignment. */
4077 find_used_portions (&GIMPLE_STMT_OPERAND (*tp, 0), walk_subtrees, tp);
4078 return find_used_portions (&GIMPLE_STMT_OPERAND (*tp, 1),
4079 walk_subtrees, NULL);
4080 case REALPART_EXPR:
4081 case IMAGPART_EXPR:
4082 case COMPONENT_REF:
4083 case ARRAY_REF:
4085 HOST_WIDE_INT bitsize;
4086 HOST_WIDE_INT bitmaxsize;
4087 HOST_WIDE_INT bitpos;
4088 tree ref;
4089 ref = get_ref_base_and_extent (*tp, &bitpos, &bitsize, &bitmaxsize);
4090 if (DECL_P (ref)
4091 && var_can_have_subvars (ref)
4092 && bitmaxsize != -1)
4094 size_t uid = DECL_UID (ref);
4095 used_part_t up;
4097 up = get_or_create_used_part_for (uid);
4099 if (bitpos <= up->minused)
4100 up->minused = bitpos;
4101 if ((bitpos + bitmaxsize >= up->maxused))
4102 up->maxused = bitpos + bitmaxsize;
4104 if (bitsize == bitmaxsize)
4105 up->explicit_uses = true;
4106 else
4107 up->implicit_uses = true;
4108 if (!lhs_p)
4109 up->write_only = false;
4110 up_insert (uid, up);
4112 *walk_subtrees = 0;
4113 return NULL_TREE;
4116 break;
4117 /* This is here to make sure we mark the entire base variable as used
4118 when you take its address. Because our used portion analysis is
4119 simple, we aren't looking at casts or pointer arithmetic to see what
4120 happens when you take the address. */
4121 case ADDR_EXPR:
4123 tree var = get_base_address (TREE_OPERAND (*tp, 0));
4125 if (var
4126 && DECL_P (var)
4127 && DECL_SIZE (var)
4128 && var_can_have_subvars (var)
4129 && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
4131 used_part_t up;
4132 size_t uid = DECL_UID (var);
4134 up = get_or_create_used_part_for (uid);
4136 up->minused = 0;
4137 up->maxused = TREE_INT_CST_LOW (DECL_SIZE (var));
4138 up->implicit_uses = true;
4139 if (!lhs_p)
4140 up->write_only = false;
4142 up_insert (uid, up);
4143 *walk_subtrees = 0;
4144 return NULL_TREE;
4147 break;
4148 case CALL_EXPR:
4150 int i;
4151 int nargs = call_expr_nargs (*tp);
4152 for (i = 0; i < nargs; i++)
4154 tree *arg = &CALL_EXPR_ARG (*tp, i);
4155 if (TREE_CODE (*arg) == ADDR_EXPR)
4156 find_used_portions (arg, walk_subtrees, NULL);
4158 *walk_subtrees = 0;
4159 return NULL_TREE;
4161 case VAR_DECL:
4162 case PARM_DECL:
4163 case RESULT_DECL:
4165 tree var = *tp;
4166 if (DECL_SIZE (var)
4167 && var_can_have_subvars (var)
4168 && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
4170 used_part_t up;
4171 size_t uid = DECL_UID (var);
4173 up = get_or_create_used_part_for (uid);
4175 up->minused = 0;
4176 up->maxused = TREE_INT_CST_LOW (DECL_SIZE (var));
4177 up->implicit_uses = true;
4179 up_insert (uid, up);
4180 *walk_subtrees = 0;
4181 return NULL_TREE;
4184 break;
4186 default:
4187 break;
4190 return NULL_TREE;
4193 /* Create structure field variables for structures used in this function. */
4195 static unsigned int
4196 create_structure_vars (void)
4198 basic_block bb;
4199 safe_referenced_var_iterator rvi;
4200 VEC (tree, heap) *varvec = NULL;
4201 tree var;
4203 used_portions = htab_create (10, used_part_map_hash, used_part_map_eq,
4204 free_used_part_map);
4206 FOR_EACH_BB (bb)
4208 block_stmt_iterator bsi;
4209 tree phi;
4211 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
4213 use_operand_p use;
4214 ssa_op_iter iter;
4216 FOR_EACH_PHI_ARG (use, phi, iter, SSA_OP_USE)
4218 tree op = USE_FROM_PTR (use);
4219 walk_tree_without_duplicates (&op, find_used_portions,
4220 NULL);
4224 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4226 walk_tree_without_duplicates (bsi_stmt_ptr (bsi),
4227 find_used_portions,
4228 NULL);
4231 FOR_EACH_REFERENCED_VAR_SAFE (var, varvec, rvi)
4233 /* The C++ FE creates vars without DECL_SIZE set, for some reason. */
4234 if (var
4235 && DECL_SIZE (var)
4236 && var_can_have_subvars (var)
4237 && !MTAG_P (var)
4238 && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
4239 create_overlap_variables_for (var);
4241 htab_delete (used_portions);
4242 VEC_free (tree, heap, varvec);
4244 /* Update SSA operands of statements mentioning variables we split. */
4245 if (gimple_in_ssa_p (cfun))
4246 FOR_EACH_BB (bb)
4248 block_stmt_iterator bsi;
4249 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4251 tree stmt = bsi_stmt (bsi);
4252 bool update = false;
4253 unsigned int i;
4254 bitmap_iterator bi;
4256 if (STORED_SYMS (stmt))
4257 EXECUTE_IF_SET_IN_BITMAP (STORED_SYMS (stmt), 0, i, bi)
4259 tree sym = referenced_var_lookup (i);
4260 if (get_subvars_for_var (sym))
4262 update = true;
4263 break;
4267 if (LOADED_SYMS (stmt) && !update)
4268 EXECUTE_IF_SET_IN_BITMAP (LOADED_SYMS (stmt), 0, i, bi)
4270 tree sym = referenced_var_lookup (i);
4271 if (get_subvars_for_var (sym))
4273 update = true;
4274 break;
4278 if (stmt_ann (stmt)->addresses_taken && !update)
4279 EXECUTE_IF_SET_IN_BITMAP (stmt_ann (stmt)->addresses_taken,
4280 0, i, bi)
4282 tree sym = referenced_var_lookup (i);
4283 if (get_subvars_for_var (sym))
4285 update = true;
4286 break;
4290 if (update)
4291 update_stmt (stmt);
4295 return TODO_rebuild_alias;
4298 static bool
4299 gate_structure_vars (void)
4301 return flag_tree_salias != 0;
4304 struct tree_opt_pass pass_create_structure_vars =
4306 "salias", /* name */
4307 gate_structure_vars, /* gate */
4308 create_structure_vars, /* execute */
4309 NULL, /* sub */
4310 NULL, /* next */
4311 0, /* static_pass_number */
4312 0, /* tv_id */
4313 PROP_cfg, /* properties_required */
4314 0, /* properties_provided */
4315 0, /* properties_destroyed */
4316 0, /* todo_flags_start */
4317 TODO_dump_func, /* todo_flags_finish */
4318 0 /* letter */
4321 /* Reset the call_clobbered flags on our referenced vars. In
4322 theory, this only needs to be done for globals. */
4324 static unsigned int
4325 reset_cc_flags (void)
4327 tree var;
4328 referenced_var_iterator rvi;
4330 FOR_EACH_REFERENCED_VAR (var, rvi)
4331 var_ann (var)->call_clobbered = false;
4332 return 0;
4335 struct tree_opt_pass pass_reset_cc_flags =
4337 NULL, /* name */
4338 NULL, /* gate */
4339 reset_cc_flags, /* execute */
4340 NULL, /* sub */
4341 NULL, /* next */
4342 0, /* static_pass_number */
4343 0, /* tv_id */
4344 PROP_referenced_vars |PROP_cfg, /* properties_required */
4345 0, /* properties_provided */
4346 0, /* properties_destroyed */
4347 0, /* todo_flags_start */
4348 0, /* todo_flags_finish */
4349 0 /* letter */
4352 static bool
4353 gate_build_alias (void)
4355 return !gate_structure_vars();
4359 struct tree_opt_pass pass_build_alias =
4361 "build_alias", /* name */
4362 gate_build_alias, /* gate */
4363 NULL, /* execute */
4364 NULL, /* sub */
4365 NULL, /* next */
4366 0, /* static_pass_number */
4367 0, /* tv_id */
4368 PROP_cfg | PROP_ssa, /* properties_required */
4369 PROP_alias, /* properties_provided */
4370 0, /* properties_destroyed */
4371 0, /* todo_flags_start */
4372 TODO_rebuild_alias, /* todo_flags_finish */
4373 0 /* letter */