Daily bump.
[official-gcc.git] / gcc / tree-ssa-alias.c
blob1ff3821ed5ac59e72a8de1a5c663300449343103
1 /* Alias analysis for trees.
2 Copyright (C) 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
30 #include "timevar.h"
31 #include "expr.h"
32 #include "ggc.h"
33 #include "langhooks.h"
34 #include "flags.h"
35 #include "function.h"
36 #include "diagnostic.h"
37 #include "tree-dump.h"
38 #include "tree-gimple.h"
39 #include "tree-flow.h"
40 #include "tree-inline.h"
41 #include "tree-pass.h"
42 #include "tree-ssa-structalias.h"
43 #include "convert.h"
44 #include "params.h"
45 #include "ipa-type-escape.h"
46 #include "vec.h"
47 #include "bitmap.h"
48 #include "vecprim.h"
49 #include "pointer-set.h"
50 #include "alloc-pool.h"
52 /* Broad overview of how aliasing works:
54 First we compute points-to sets, which is done in
55 tree-ssa-structalias.c
57 During points-to set constraint finding, a bunch of little bits of
58 information is collected.
59 This is not done because it is necessary for points-to, but because
60 points-to has to walk every statement anyway. The function performing
61 this collecting is update_alias_info.
63 Bits update_alias_info collects include:
64 1. Directly escaping variables and variables whose value escapes
65 (using is_escape_site). This is the set of variables and values that
66 escape prior to transitive closure of the clobbers.
67 2. The set of variables dereferenced on the LHS (into
68 dereferenced_ptr_stores)
69 3. The set of variables dereferenced on the RHS (into
70 dereferenced_ptr_loads)
71 4. The set of all pointers we saw.
72 5. The number of loads and stores for each variable
73 6. The number of statements touching memory
74 7. The set of address taken variables.
77 #1 is computed by a combination of is_escape_site, and counting the
78 number of uses/deref operators. This function properly accounts for
79 situations like &ptr->field, which is *not* a dereference.
81 After points-to sets are computed, the sets themselves still
82 contain points-to specific variables, such as a variable that says
83 the pointer points to anything, a variable that says the pointer
84 points to readonly memory, etc.
86 These are eliminated in a later phase, as we will see.
88 The rest of the phases are located in tree-ssa-alias.c
90 The next phase after points-to set computation is called
91 "setup_pointers_and_addressables"
93 This pass does 3 main things:
95 1. All variables that can have TREE_ADDRESSABLE removed safely (IE
96 non-globals whose address is not taken), have TREE_ADDRESSABLE
97 removed.
98 2. All variables that may be aliased (which is the set of addressable
99 variables and globals) at all, are marked for renaming, and have
100 symbol memory tags created for them.
101 3. All variables which are stored into have their SMT's added to
102 written vars.
105 After this function is run, all variables that will ever have an
106 SMT, have one, though its aliases are not filled in.
108 The next phase is to compute flow-insensitive aliasing, which in
109 our case, is a misnomer. it is really computing aliasing that
110 requires no transitive closure to be correct. In particular, it
111 uses stack vs non-stack, TBAA, etc, to determine whether two
112 symbols could *ever* alias . This phase works by going through all
113 the pointers we collected during update_alias_info, and for every
114 addressable variable in the program, seeing if they alias. If so,
115 the addressable variable is added to the symbol memory tag for the
116 pointer.
118 As part of this, we handle symbol memory tags that conflict but
119 have no aliases in common, by forcing them to have a symbol in
120 common (through unioning alias sets or adding one as an alias of
121 the other), or by adding one as an alias of another. The case of
122 conflicts with no aliases in common occurs mainly due to aliasing
123 we cannot see. In particular, it generally means we have a load
124 through a pointer whose value came from outside the function.
125 Without an addressable symbol to point to, they would get the wrong
126 answer.
128 After flow insensitive aliasing is computed, we compute name tags
129 (called compute_flow_sensitive_info). We walk each pointer we
130 collected and see if it has a usable points-to set. If so, we
131 generate a name tag using that pointer, and make an alias bitmap for
132 it. Name tags are shared between all things with the same alias
133 bitmap. The alias bitmap will be translated from what points-to
134 computed. In particular, the "anything" variable in points-to will be
135 transformed into a pruned set of SMT's and their aliases that
136 compute_flow_insensitive_aliasing computed.
137 Note that since 4.3, every pointer that points-to computed a solution for
138 will get a name tag (whereas before 4.3, only those whose set did
139 *not* include the anything variable would). At the point where name
140 tags are all assigned, symbol memory tags are dead, and could be
141 deleted, *except* on global variables. Global variables still use
142 symbol memory tags as of right now.
144 After name tags are computed, the set of clobbered variables is
145 transitively closed. In particular, we compute the set of clobbered
146 variables based on the initial set of clobbers, plus the aliases of
147 pointers which either escape, or have their value escape.
149 After this, maybe_create_global_var is run, which handles a corner
150 case where we have no call clobbered variables, but have pure and
151 non-pure functions.
153 Staring at this function, I now remember it is a hack for the fact
154 that we do not mark all globals in the program as call clobbered for a
155 function unless they are actually used in that function. Instead, we
156 only mark the set that is actually clobbered. As a result, you can
157 end up with situations where you have no call clobbered vars set.
159 After maybe_create_global_var, we set pointers with the REF_ALL flag
160 to have alias sets that include all clobbered
161 memory tags and variables.
163 After this, memory partitioning is computed (by the function
164 compute_memory_partitions) and alias sets are reworked accordingly.
166 Lastly, we delete partitions with no symbols, and clean up after
167 ourselves. */
169 /* Structure to map a variable to its alias set. */
170 struct alias_map_d
172 /* Variable and its alias set. */
173 tree var;
174 alias_set_type set;
178 /* Counters used to display statistics on alias analysis. */
179 struct alias_stats_d
181 unsigned int alias_queries;
182 unsigned int alias_mayalias;
183 unsigned int alias_noalias;
184 unsigned int simple_queries;
185 unsigned int simple_resolved;
186 unsigned int tbaa_queries;
187 unsigned int tbaa_resolved;
188 unsigned int structnoaddress_queries;
189 unsigned int structnoaddress_resolved;
193 /* Local variables. */
194 static struct alias_stats_d alias_stats;
195 static bitmap_obstack alias_bitmap_obstack;
197 /* Local functions. */
198 static void compute_flow_insensitive_aliasing (struct alias_info *);
199 static void finalize_ref_all_pointers (struct alias_info *);
200 static void dump_alias_stats (FILE *);
201 static bool may_alias_p (tree, alias_set_type, tree, alias_set_type, bool);
202 static tree create_memory_tag (tree type, bool is_type_tag);
203 static tree get_smt_for (tree, struct alias_info *);
204 static tree get_nmt_for (tree);
205 static void add_may_alias (tree, tree);
206 static struct alias_info *init_alias_info (void);
207 static void delete_alias_info (struct alias_info *);
208 static void compute_flow_sensitive_aliasing (struct alias_info *);
209 static void setup_pointers_and_addressables (struct alias_info *);
210 static void create_global_var (void);
211 static void maybe_create_global_var (void);
212 static void set_pt_anything (tree);
214 void debug_mp_info (VEC(mem_sym_stats_t,heap) *);
216 static alloc_pool mem_sym_stats_pool;
218 /* Return memory reference stats for symbol VAR. Create a new slot in
219 cfun->gimple_df->mem_sym_stats if needed. */
221 static struct mem_sym_stats_d *
222 get_mem_sym_stats_for (tree var)
224 void **slot;
225 struct mem_sym_stats_d *stats;
226 struct pointer_map_t *map = gimple_mem_ref_stats (cfun)->mem_sym_stats;
228 gcc_assert (map);
230 slot = pointer_map_insert (map, var);
231 if (*slot == NULL)
233 stats = pool_alloc (mem_sym_stats_pool);
234 memset (stats, 0, sizeof (*stats));
235 stats->var = var;
236 *slot = (void *) stats;
238 else
239 stats = (struct mem_sym_stats_d *) *slot;
241 return stats;
245 /* Set MPT to be the memory partition associated with symbol SYM. */
247 static inline void
248 set_memory_partition (tree sym, tree mpt)
250 #if defined ENABLE_CHECKING
251 if (mpt)
252 gcc_assert (TREE_CODE (mpt) == MEMORY_PARTITION_TAG
253 && !is_gimple_reg (sym));
254 #endif
256 var_ann (sym)->mpt = mpt;
257 if (mpt)
259 if (MPT_SYMBOLS (mpt) == NULL)
260 MPT_SYMBOLS (mpt) = BITMAP_ALLOC (&alias_bitmap_obstack);
262 bitmap_set_bit (MPT_SYMBOLS (mpt), DECL_UID (sym));
264 /* MPT inherits the call-clobbering attributes from SYM. */
265 if (is_call_clobbered (sym))
267 MTAG_GLOBAL (mpt) = 1;
268 mark_call_clobbered (mpt, ESCAPE_IS_GLOBAL);
274 /* Mark variable VAR as being non-addressable. */
276 static void
277 mark_non_addressable (tree var)
279 tree mpt;
281 if (!TREE_ADDRESSABLE (var))
282 return;
284 mpt = memory_partition (var);
286 if (!MTAG_P (var))
287 var_ann (var)->call_clobbered = false;
289 bitmap_clear_bit (gimple_call_clobbered_vars (cfun), DECL_UID (var));
290 TREE_ADDRESSABLE (var) = 0;
292 if (mpt)
294 /* Note that it's possible for a symbol to have an associated
295 MPT and the MPT have a NULL empty set. During
296 init_alias_info, all MPTs get their sets cleared out, but the
297 symbols still point to the old MPTs that used to hold them.
298 This is done so that compute_memory_partitions can now which
299 symbols are losing or changing partitions and mark them for
300 renaming. */
301 if (MPT_SYMBOLS (mpt))
302 bitmap_clear_bit (MPT_SYMBOLS (mpt), DECL_UID (var));
303 set_memory_partition (var, NULL_TREE);
308 /* qsort comparison function to sort type/name tags by DECL_UID. */
310 static int
311 sort_tags_by_id (const void *pa, const void *pb)
313 const_tree const a = *(const_tree const *)pa;
314 const_tree const b = *(const_tree const *)pb;
316 return DECL_UID (a) - DECL_UID (b);
319 /* Initialize WORKLIST to contain those memory tags that are marked call
320 clobbered. Initialized WORKLIST2 to contain the reasons these
321 memory tags escaped. */
323 static void
324 init_transitive_clobber_worklist (VEC (tree, heap) **worklist,
325 VEC (int, heap) **worklist2,
326 bitmap on_worklist)
328 referenced_var_iterator rvi;
329 tree curr;
331 FOR_EACH_REFERENCED_VAR (curr, rvi)
333 if (MTAG_P (curr) && is_call_clobbered (curr))
335 VEC_safe_push (tree, heap, *worklist, curr);
336 VEC_safe_push (int, heap, *worklist2,
337 var_ann (curr)->escape_mask);
338 bitmap_set_bit (on_worklist, DECL_UID (curr));
343 /* Add ALIAS to WORKLIST (and the reason for escaping REASON to WORKLIST2) if
344 ALIAS is not already marked call clobbered, and is a memory
345 tag. */
347 static void
348 add_to_worklist (tree alias, VEC (tree, heap) **worklist,
349 VEC (int, heap) **worklist2, int reason,
350 bitmap on_worklist)
352 if (MTAG_P (alias) && !is_call_clobbered (alias)
353 && !bitmap_bit_p (on_worklist, DECL_UID (alias)))
355 VEC_safe_push (tree, heap, *worklist, alias);
356 VEC_safe_push (int, heap, *worklist2, reason);
357 bitmap_set_bit (on_worklist, DECL_UID (alias));
361 /* Mark aliases of TAG as call clobbered, and place any tags on the
362 alias list that were not already call clobbered on WORKLIST. */
364 static void
365 mark_aliases_call_clobbered (tree tag, VEC (tree, heap) **worklist,
366 VEC (int, heap) **worklist2,
367 bitmap on_worklist, bitmap queued)
369 bitmap aliases;
370 bitmap_iterator bi;
371 unsigned int i;
372 tree entry;
373 var_ann_t ta = var_ann (tag);
375 if (!MTAG_P (tag))
376 return;
377 aliases = may_aliases (tag);
378 if (!aliases)
379 return;
381 EXECUTE_IF_SET_IN_BITMAP (aliases, 0, i, bi)
383 entry = referenced_var (i);
384 /* If you clobber one part of a structure, you
385 clobber the entire thing. While this does not make
386 the world a particularly nice place, it is necessary
387 in order to allow C/C++ tricks that involve
388 pointer arithmetic to work. */
389 if (TREE_CODE (entry) == STRUCT_FIELD_TAG)
390 bitmap_set_bit (queued, DECL_UID (SFT_PARENT_VAR (entry)));
391 else if (!unmodifiable_var_p (entry))
393 add_to_worklist (entry, worklist, worklist2, ta->escape_mask,
394 on_worklist);
395 mark_call_clobbered (entry, ta->escape_mask);
398 if (!bitmap_empty_p (queued))
400 EXECUTE_IF_SET_IN_BITMAP (queued, 0, i, bi)
402 subvar_t svars;
403 svars = get_subvars_for_var (referenced_var (i));
404 for (; svars; svars = svars->next)
405 if (!unmodifiable_var_p (svars->var))
406 mark_call_clobbered (svars->var, ta->escape_mask);
408 bitmap_clear (queued);
412 /* Tags containing global vars need to be marked as global.
413 Tags containing call clobbered vars need to be marked as call
414 clobbered. */
416 static void
417 compute_tag_properties (void)
419 referenced_var_iterator rvi;
420 tree tag;
421 bool changed = true;
422 VEC (tree, heap) *taglist = NULL;
424 FOR_EACH_REFERENCED_VAR (tag, rvi)
426 if (!MTAG_P (tag) || TREE_CODE (tag) == STRUCT_FIELD_TAG)
427 continue;
428 VEC_safe_push (tree, heap, taglist, tag);
431 /* We sort the taglist by DECL_UID, for two reasons.
432 1. To get a sequential ordering to make the bitmap accesses
433 faster.
434 2. Because of the way we compute aliases, it's more likely that
435 an earlier tag is included in a later tag, and this will reduce
436 the number of iterations.
438 If we had a real tag graph, we would just topo-order it and be
439 done with it. */
440 qsort (VEC_address (tree, taglist),
441 VEC_length (tree, taglist),
442 sizeof (tree),
443 sort_tags_by_id);
445 /* Go through each tag not marked as global, and if it aliases
446 global vars, mark it global.
448 If the tag contains call clobbered vars, mark it call
449 clobbered.
451 This loop iterates because tags may appear in the may-aliases
452 list of other tags when we group. */
454 while (changed)
456 unsigned int k;
458 changed = false;
459 for (k = 0; VEC_iterate (tree, taglist, k, tag); k++)
461 bitmap ma;
462 bitmap_iterator bi;
463 unsigned int i;
464 tree entry;
465 bool tagcc = is_call_clobbered (tag);
466 bool tagglobal = MTAG_GLOBAL (tag);
468 if (tagcc && tagglobal)
469 continue;
471 ma = may_aliases (tag);
472 if (!ma)
473 continue;
475 EXECUTE_IF_SET_IN_BITMAP (ma, 0, i, bi)
477 entry = referenced_var (i);
478 /* Call clobbered entries cause the tag to be marked
479 call clobbered. */
480 if (!tagcc && is_call_clobbered (entry))
482 mark_call_clobbered (tag, var_ann (entry)->escape_mask);
483 tagcc = true;
484 changed = true;
487 /* Global vars cause the tag to be marked global. */
488 if (!tagglobal && is_global_var (entry))
490 MTAG_GLOBAL (tag) = true;
491 changed = true;
492 tagglobal = true;
495 /* Early exit once both global and cc are set, since the
496 loop can't do any more than that. */
497 if (tagcc && tagglobal)
498 break;
502 VEC_free (tree, heap, taglist);
505 /* Set up the initial variable clobbers and globalness.
506 When this function completes, only tags whose aliases need to be
507 clobbered will be set clobbered. Tags clobbered because they
508 contain call clobbered vars are handled in compute_tag_properties. */
510 static void
511 set_initial_properties (struct alias_info *ai)
513 unsigned int i;
514 referenced_var_iterator rvi;
515 tree var;
516 tree ptr;
517 bitmap queued;
519 /* Temporary bitmap to avoid quadratic behavior in marking
520 call clobbers. */
521 queued = BITMAP_ALLOC (&alias_bitmap_obstack);
523 FOR_EACH_REFERENCED_VAR (var, rvi)
525 if (is_global_var (var)
526 && (!var_can_have_subvars (var)
527 || get_subvars_for_var (var) == NULL))
529 if (!unmodifiable_var_p (var))
530 mark_call_clobbered (var, ESCAPE_IS_GLOBAL);
532 else if (TREE_CODE (var) == PARM_DECL
533 && gimple_default_def (cfun, var)
534 && POINTER_TYPE_P (TREE_TYPE (var)))
536 tree def = gimple_default_def (cfun, var);
537 get_ptr_info (def)->value_escapes_p = 1;
538 get_ptr_info (def)->escape_mask |= ESCAPE_IS_PARM;
542 for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
544 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
545 tree tag = symbol_mem_tag (SSA_NAME_VAR (ptr));
547 if (pi->value_escapes_p)
549 /* If PTR escapes then its associated memory tags and
550 pointed-to variables are call-clobbered. */
551 if (pi->name_mem_tag)
552 mark_call_clobbered (pi->name_mem_tag, pi->escape_mask);
554 if (tag)
555 mark_call_clobbered (tag, pi->escape_mask);
557 if (pi->pt_vars)
559 bitmap_iterator bi;
560 unsigned int j;
561 EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j, bi)
563 tree alias = referenced_var (j);
565 /* If you clobber one part of a structure, you
566 clobber the entire thing. While this does not make
567 the world a particularly nice place, it is necessary
568 in order to allow C/C++ tricks that involve
569 pointer arithmetic to work. */
570 if (TREE_CODE (alias) == STRUCT_FIELD_TAG)
571 bitmap_set_bit (queued, DECL_UID (SFT_PARENT_VAR (alias)));
572 else if (!unmodifiable_var_p (alias))
573 mark_call_clobbered (alias, pi->escape_mask);
575 /* Process variables we need to clobber all parts of. */
576 if (!bitmap_empty_p (queued))
578 EXECUTE_IF_SET_IN_BITMAP (queued, 0, j, bi)
580 subvar_t svars;
581 svars = get_subvars_for_var (referenced_var (j));
582 for (; svars; svars = svars->next)
583 if (!unmodifiable_var_p (svars->var))
584 mark_call_clobbered (svars->var, pi->escape_mask);
586 bitmap_clear (queued);
591 /* If the name tag is call clobbered, so is the symbol tag
592 associated with the base VAR_DECL. */
593 if (pi->name_mem_tag
594 && tag
595 && is_call_clobbered (pi->name_mem_tag))
596 mark_call_clobbered (tag, pi->escape_mask);
598 /* Name tags and symbol tags that we don't know where they point
599 to, might point to global memory, and thus, are clobbered.
601 FIXME: This is not quite right. They should only be
602 clobbered if value_escapes_p is true, regardless of whether
603 they point to global memory or not.
604 So removing this code and fixing all the bugs would be nice.
605 It is the cause of a bunch of clobbering. */
606 if ((pi->pt_global_mem || pi->pt_anything)
607 && pi->is_dereferenced && pi->name_mem_tag)
609 mark_call_clobbered (pi->name_mem_tag, ESCAPE_IS_GLOBAL);
610 MTAG_GLOBAL (pi->name_mem_tag) = true;
613 if ((pi->pt_global_mem || pi->pt_anything)
614 && pi->is_dereferenced
615 && tag)
617 mark_call_clobbered (tag, ESCAPE_IS_GLOBAL);
618 MTAG_GLOBAL (tag) = true;
622 BITMAP_FREE (queued);
625 /* Compute which variables need to be marked call clobbered because
626 their tag is call clobbered, and which tags need to be marked
627 global because they contain global variables. */
629 static void
630 compute_call_clobbered (struct alias_info *ai)
632 VEC (tree, heap) *worklist = NULL;
633 VEC (int,heap) *worklist2 = NULL;
634 bitmap on_worklist, queued;
636 timevar_push (TV_CALL_CLOBBER);
637 on_worklist = BITMAP_ALLOC (NULL);
638 queued = BITMAP_ALLOC (NULL);
640 set_initial_properties (ai);
641 init_transitive_clobber_worklist (&worklist, &worklist2, on_worklist);
642 while (VEC_length (tree, worklist) != 0)
644 tree curr = VEC_pop (tree, worklist);
645 int reason = VEC_pop (int, worklist2);
647 bitmap_clear_bit (on_worklist, DECL_UID (curr));
648 mark_call_clobbered (curr, reason);
649 mark_aliases_call_clobbered (curr, &worklist, &worklist2,
650 on_worklist, queued);
652 VEC_free (tree, heap, worklist);
653 VEC_free (int, heap, worklist2);
654 BITMAP_FREE (on_worklist);
655 BITMAP_FREE (queued);
656 compute_tag_properties ();
657 timevar_pop (TV_CALL_CLOBBER);
661 /* Dump memory partition information to FILE. */
663 static void
664 dump_memory_partitions (FILE *file)
666 unsigned i, npart;
667 unsigned long nsyms;
668 tree mpt;
670 fprintf (file, "\nMemory partitions\n\n");
671 for (i = 0, npart = 0, nsyms = 0;
672 VEC_iterate (tree, gimple_ssa_operands (cfun)->mpt_table, i, mpt);
673 i++)
675 if (mpt)
677 bitmap syms = MPT_SYMBOLS (mpt);
678 unsigned long n = (syms) ? bitmap_count_bits (syms) : 0;
680 fprintf (file, "#%u: ", i);
681 print_generic_expr (file, mpt, 0);
682 fprintf (file, ": %lu elements: ", n);
683 dump_decl_set (file, syms);
684 npart++;
685 nsyms += n;
689 fprintf (file, "\n%u memory partitions holding %lu symbols\n", npart, nsyms);
693 /* Dump memory partition information to stderr. */
695 void
696 debug_memory_partitions (void)
698 dump_memory_partitions (stderr);
702 /* Return true if memory partitioning is required given the memory
703 reference estimates in STATS. */
705 static inline bool
706 need_to_partition_p (struct mem_ref_stats_d *stats)
708 long num_vops = stats->num_vuses + stats->num_vdefs;
709 long avg_vops = CEIL (num_vops, stats->num_mem_stmts);
710 return (num_vops > (long) MAX_ALIASED_VOPS
711 && avg_vops > (long) AVG_ALIASED_VOPS);
715 /* Count the actual number of virtual operators in CFUN. Note that
716 this is only meaningful after virtual operands have been populated,
717 so it should be invoked at the end of compute_may_aliases.
719 The number of virtual operators are stored in *NUM_VDEFS_P and
720 *NUM_VUSES_P, the number of partitioned symbols in
721 *NUM_PARTITIONED_P and the number of unpartitioned symbols in
722 *NUM_UNPARTITIONED_P.
724 If any of these pointers is NULL the corresponding count is not
725 computed. */
727 static void
728 count_mem_refs (long *num_vuses_p, long *num_vdefs_p,
729 long *num_partitioned_p, long *num_unpartitioned_p)
731 block_stmt_iterator bsi;
732 basic_block bb;
733 long num_vdefs, num_vuses, num_partitioned, num_unpartitioned;
734 referenced_var_iterator rvi;
735 tree sym;
737 num_vuses = num_vdefs = num_partitioned = num_unpartitioned = 0;
739 if (num_vuses_p || num_vdefs_p)
740 FOR_EACH_BB (bb)
741 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
743 tree stmt = bsi_stmt (bsi);
744 if (stmt_references_memory_p (stmt))
746 num_vuses += NUM_SSA_OPERANDS (stmt, SSA_OP_VUSE);
747 num_vdefs += NUM_SSA_OPERANDS (stmt, SSA_OP_VDEF);
751 if (num_partitioned_p || num_unpartitioned_p)
752 FOR_EACH_REFERENCED_VAR (sym, rvi)
754 if (is_gimple_reg (sym))
755 continue;
757 if (memory_partition (sym))
758 num_partitioned++;
759 else
760 num_unpartitioned++;
763 if (num_vdefs_p)
764 *num_vdefs_p = num_vdefs;
766 if (num_vuses_p)
767 *num_vuses_p = num_vuses;
769 if (num_partitioned_p)
770 *num_partitioned_p = num_partitioned;
772 if (num_unpartitioned_p)
773 *num_unpartitioned_p = num_unpartitioned;
777 /* Dump memory reference stats for function CFUN to FILE. */
779 void
780 dump_mem_ref_stats (FILE *file)
782 long actual_num_vuses, actual_num_vdefs;
783 long num_partitioned, num_unpartitioned;
784 struct mem_ref_stats_d *stats;
786 stats = gimple_mem_ref_stats (cfun);
788 count_mem_refs (&actual_num_vuses, &actual_num_vdefs, &num_partitioned,
789 &num_unpartitioned);
791 fprintf (file, "\nMemory reference statistics for %s\n\n",
792 lang_hooks.decl_printable_name (current_function_decl, 2));
794 fprintf (file, "Number of memory statements: %ld\n",
795 stats->num_mem_stmts);
796 fprintf (file, "Number of call sites: %ld\n",
797 stats->num_call_sites);
798 fprintf (file, "Number of pure/const call sites: %ld\n",
799 stats->num_pure_const_call_sites);
800 fprintf (file, "Number of asm sites: %ld\n",
801 stats->num_asm_sites);
802 fprintf (file, "Estimated number of loads: %ld (%ld/stmt)\n",
803 stats->num_vuses,
804 (stats->num_mem_stmts)
805 ? CEIL (stats->num_vuses, stats->num_mem_stmts)
806 : 0);
807 fprintf (file, "Actual number of loads: %ld (%ld/stmt)\n",
808 actual_num_vuses,
809 (stats->num_mem_stmts)
810 ? CEIL (actual_num_vuses, stats->num_mem_stmts)
811 : 0);
813 if (actual_num_vuses > stats->num_vuses + (stats->num_vuses / 25))
814 fprintf (file, "\t(warning: estimation is lower by more than 25%%)\n");
816 fprintf (file, "Estimated number of stores: %ld (%ld/stmt)\n",
817 stats->num_vdefs,
818 (stats->num_mem_stmts)
819 ? CEIL (stats->num_vdefs, stats->num_mem_stmts)
820 : 0);
821 fprintf (file, "Actual number of stores: %ld (%ld/stmt)\n",
822 actual_num_vdefs,
823 (stats->num_mem_stmts)
824 ? CEIL (actual_num_vdefs, stats->num_mem_stmts)
825 : 0);
827 if (actual_num_vdefs > stats->num_vdefs + (stats->num_vdefs / 25))
828 fprintf (file, "\t(warning: estimation is lower by more than 25%%)\n");
830 fprintf (file, "Partitioning thresholds: MAX = %d AVG = %d "
831 "(%sNEED TO PARTITION)\n", MAX_ALIASED_VOPS, AVG_ALIASED_VOPS,
832 stats->num_mem_stmts && need_to_partition_p (stats) ? "" : "NO ");
833 fprintf (file, "Number of partitioned symbols: %ld\n", num_partitioned);
834 fprintf (file, "Number of unpartitioned symbols: %ld\n", num_unpartitioned);
838 /* Dump memory reference stats for function FN to stderr. */
840 void
841 debug_mem_ref_stats (void)
843 dump_mem_ref_stats (stderr);
847 /* Dump memory reference stats for variable VAR to FILE. */
849 static void
850 dump_mem_sym_stats (FILE *file, tree var)
852 mem_sym_stats_t stats = mem_sym_stats (cfun, var);
854 if (stats == NULL)
855 return;
857 fprintf (file, "read frequency: %6ld, write frequency: %6ld, "
858 "direct reads: %3ld, direct writes: %3ld, "
859 "indirect reads: %4ld, indirect writes: %4ld, symbol: ",
860 stats->frequency_reads, stats->frequency_writes,
861 stats->num_direct_reads, stats->num_direct_writes,
862 stats->num_indirect_reads, stats->num_indirect_writes);
863 print_generic_expr (file, stats->var, 0);
864 fprintf (file, ", tags: ");
865 dump_decl_set (file, stats->parent_tags);
869 /* Dump memory reference stats for variable VAR to stderr. */
871 void
872 debug_mem_sym_stats (tree var)
874 dump_mem_sym_stats (stderr, var);
878 /* Dump memory reference stats for all memory symbols to FILE. */
880 static void
881 dump_all_mem_sym_stats (FILE *file)
883 referenced_var_iterator rvi;
884 tree sym;
886 FOR_EACH_REFERENCED_VAR (sym, rvi)
888 if (is_gimple_reg (sym))
889 continue;
891 dump_mem_sym_stats (file, sym);
896 /* Dump memory reference stats for all memory symbols to stderr. */
898 void
899 debug_all_mem_sym_stats (void)
901 dump_all_mem_sym_stats (stderr);
905 /* Dump the MP_INFO array to FILE. */
907 static void
908 dump_mp_info (FILE *file, VEC(mem_sym_stats_t,heap) *mp_info)
910 unsigned i;
911 mem_sym_stats_t mp_p;
913 for (i = 0; VEC_iterate (mem_sym_stats_t, mp_info, i, mp_p); i++)
914 if (!mp_p->partitioned_p)
915 dump_mem_sym_stats (file, mp_p->var);
919 /* Dump the MP_INFO array to stderr. */
921 void
922 debug_mp_info (VEC(mem_sym_stats_t,heap) *mp_info)
924 dump_mp_info (stderr, mp_info);
928 /* Update memory reference stats for symbol VAR in statement STMT.
929 NUM_DIRECT_READS and NUM_DIRECT_WRITES specify the number of times
930 that VAR is read/written in STMT (indirect reads/writes are not
931 recorded by this function, see compute_memory_partitions). */
933 void
934 update_mem_sym_stats_from_stmt (tree var, tree stmt, long num_direct_reads,
935 long num_direct_writes)
937 mem_sym_stats_t stats;
939 gcc_assert (num_direct_reads >= 0 && num_direct_writes >= 0);
941 stats = get_mem_sym_stats_for (var);
943 stats->num_direct_reads += num_direct_reads;
944 stats->frequency_reads += ((long) bb_for_stmt (stmt)->frequency
945 * num_direct_reads);
947 stats->num_direct_writes += num_direct_writes;
948 stats->frequency_writes += ((long) bb_for_stmt (stmt)->frequency
949 * num_direct_writes);
953 /* The list is sorted by increasing partitioning score (PSCORE).
954 This score is computed such that symbols with high scores are
955 those that are least likely to be partitioned. Given a symbol
956 MP->VAR, PSCORE(S) is the result of the following weighted sum
958 PSCORE(S) = FW * 64 + FR * 32
959 + DW * 16 + DR * 8
960 + IW * 4 + IR * 2
961 + NO_ALIAS
963 where
965 FW Execution frequency of writes to S
966 FR Execution frequency of reads from S
967 DW Number of direct writes to S
968 DR Number of direct reads from S
969 IW Number of indirect writes to S
970 IR Number of indirect reads from S
971 NO_ALIAS State of the NO_ALIAS* flags
973 The basic idea here is that symbols that are frequently
974 written-to in hot paths of the code are the last to be considered
975 for partitioning. */
977 static inline long
978 pscore (mem_sym_stats_t mp)
980 return mp->frequency_writes * 64 + mp->frequency_reads * 32
981 + mp->num_direct_writes * 16 + mp->num_direct_reads * 8
982 + mp->num_indirect_writes * 4 + mp->num_indirect_reads * 2
983 + var_ann (mp->var)->noalias_state;
987 /* Given two MP_INFO entries MP1 and MP2, return -1 if MP1->VAR should
988 be partitioned before MP2->VAR, 0 if they are the same or 1 if
989 MP1->VAR should be partitioned after MP2->VAR. */
991 static inline int
992 compare_mp_info_entries (mem_sym_stats_t mp1, mem_sym_stats_t mp2)
994 long pscore1 = pscore (mp1);
995 long pscore2 = pscore (mp2);
997 if (pscore1 < pscore2)
998 return -1;
999 else if (pscore1 > pscore2)
1000 return 1;
1001 else
1002 return 0;
1006 /* Comparison routine for qsort. The list is sorted by increasing
1007 partitioning score (PSCORE). This score is computed such that
1008 symbols with high scores are those that are least likely to be
1009 partitioned. */
1011 static int
1012 mp_info_cmp (const void *p, const void *q)
1014 mem_sym_stats_t e1 = *((const mem_sym_stats_t *) p);
1015 mem_sym_stats_t e2 = *((const mem_sym_stats_t *) q);
1016 return compare_mp_info_entries (e1, e2);
1020 /* Sort the array of reference counts used to compute memory partitions.
1021 Elements are sorted in ascending order of execution frequency and
1022 descending order of virtual operators needed. */
1024 static inline void
1025 sort_mp_info (VEC(mem_sym_stats_t,heap) *list)
1027 unsigned num = VEC_length (mem_sym_stats_t, list);
1029 if (num < 2)
1030 return;
1032 if (num == 2)
1034 if (compare_mp_info_entries (VEC_index (mem_sym_stats_t, list, 0),
1035 VEC_index (mem_sym_stats_t, list, 1)) > 0)
1037 /* Swap elements if they are in the wrong order. */
1038 mem_sym_stats_t tmp = VEC_index (mem_sym_stats_t, list, 0);
1039 VEC_replace (mem_sym_stats_t, list, 0,
1040 VEC_index (mem_sym_stats_t, list, 1));
1041 VEC_replace (mem_sym_stats_t, list, 1, tmp);
1044 return;
1047 /* There are 3 or more elements, call qsort. */
1048 qsort (VEC_address (mem_sym_stats_t, list),
1049 VEC_length (mem_sym_stats_t, list),
1050 sizeof (mem_sym_stats_t),
1051 mp_info_cmp);
1055 /* Return the memory partition tag (MPT) associated with memory
1056 symbol SYM. */
1058 static tree
1059 get_mpt_for (tree sym)
1061 tree mpt;
1063 /* Don't create a new tag unnecessarily. */
1064 mpt = memory_partition (sym);
1065 if (mpt == NULL_TREE)
1067 mpt = create_tag_raw (MEMORY_PARTITION_TAG, TREE_TYPE (sym), "MPT");
1068 TREE_ADDRESSABLE (mpt) = 0;
1069 add_referenced_var (mpt);
1070 VEC_safe_push (tree, heap, gimple_ssa_operands (cfun)->mpt_table, mpt);
1071 gcc_assert (MPT_SYMBOLS (mpt) == NULL);
1072 set_memory_partition (sym, mpt);
1075 return mpt;
1079 /* Add MP_P->VAR to a memory partition and return the partition. */
1081 static tree
1082 find_partition_for (mem_sym_stats_t mp_p)
1084 unsigned i;
1085 VEC(tree,heap) *mpt_table;
1086 tree mpt;
1088 mpt_table = gimple_ssa_operands (cfun)->mpt_table;
1089 mpt = NULL_TREE;
1091 /* Find an existing partition for MP_P->VAR. */
1092 for (i = 0; VEC_iterate (tree, mpt_table, i, mpt); i++)
1094 mem_sym_stats_t mpt_stats;
1096 /* If MPT does not have any symbols yet, use it. */
1097 if (MPT_SYMBOLS (mpt) == NULL)
1098 break;
1100 /* Otherwise, see if MPT has common parent tags with MP_P->VAR,
1101 but avoid grouping clobbered variables with non-clobbered
1102 variables (otherwise, this tends to creates a single memory
1103 partition because other call-clobbered variables may have
1104 common parent tags with non-clobbered ones). */
1105 mpt_stats = get_mem_sym_stats_for (mpt);
1106 if (mp_p->parent_tags
1107 && mpt_stats->parent_tags
1108 && is_call_clobbered (mpt) == is_call_clobbered (mp_p->var)
1109 && bitmap_intersect_p (mpt_stats->parent_tags, mp_p->parent_tags))
1110 break;
1112 /* If no common parent tags are found, see if both MPT and
1113 MP_P->VAR are call-clobbered. */
1114 if (is_call_clobbered (mpt) && is_call_clobbered (mp_p->var))
1115 break;
1118 if (mpt == NULL_TREE)
1119 mpt = get_mpt_for (mp_p->var);
1120 else
1121 set_memory_partition (mp_p->var, mpt);
1123 mp_p->partitioned_p = true;
1125 mark_sym_for_renaming (mp_p->var);
1126 mark_sym_for_renaming (mpt);
1128 return mpt;
1132 /* Rewrite the alias set for TAG to use the newly created partitions.
1133 If TAG is NULL, rewrite the set of call-clobbered variables.
1134 NEW_ALIASES is a scratch bitmap to build the new set of aliases for
1135 TAG. */
1137 static void
1138 rewrite_alias_set_for (tree tag, bitmap new_aliases)
1140 bitmap_iterator bi;
1141 unsigned i;
1142 tree mpt, sym;
1144 EXECUTE_IF_SET_IN_BITMAP (MTAG_ALIASES (tag), 0, i, bi)
1146 sym = referenced_var (i);
1147 mpt = memory_partition (sym);
1148 if (mpt)
1149 bitmap_set_bit (new_aliases, DECL_UID (mpt));
1150 else
1151 bitmap_set_bit (new_aliases, DECL_UID (sym));
1154 /* Rebuild the may-alias array for TAG. */
1155 bitmap_copy (MTAG_ALIASES (tag), new_aliases);
1159 /* Determine how many virtual operands can be saved by partitioning
1160 MP_P->VAR into MPT. When a symbol S is thrown inside a partition
1161 P, every virtual operand that used to reference S will now
1162 reference P. Whether it reduces the number of virtual operands
1163 depends on:
1165 1- Direct references to S are never saved. Instead of the virtual
1166 operand to S, we will now have a virtual operand to P.
1168 2- Indirect references to S are reduced only for those memory tags
1169 holding S that already had other symbols partitioned into P.
1170 For instance, if a memory tag T has the alias set { a b S c },
1171 the first time we partition S into P, the alias set will become
1172 { a b P c }, so no virtual operands will be saved. However, if
1173 we now partition symbol 'c' into P, then the alias set for T
1174 will become { a b P }, so we will be saving one virtual operand
1175 for every indirect reference to 'c'.
1177 3- Is S is call-clobbered, we save as many virtual operands as
1178 call/asm sites exist in the code, but only if other
1179 call-clobbered symbols have been grouped into P. The first
1180 call-clobbered symbol that we group does not produce any
1181 savings.
1183 MEM_REF_STATS points to CFUN's memory reference information. */
1185 static void
1186 estimate_vop_reduction (struct mem_ref_stats_d *mem_ref_stats,
1187 mem_sym_stats_t mp_p, tree mpt)
1189 unsigned i;
1190 bitmap_iterator bi;
1191 mem_sym_stats_t mpt_stats;
1193 /* We should only get symbols with indirect references here. */
1194 gcc_assert (mp_p->num_indirect_reads > 0 || mp_p->num_indirect_writes > 0);
1196 /* Note that the only statistics we keep for MPT is the set of
1197 parent tags to know which memory tags have had alias members
1198 partitioned, and the indicator has_call_clobbered_vars.
1199 Reference counts are not important for MPT. */
1200 mpt_stats = get_mem_sym_stats_for (mpt);
1202 /* Traverse all the parent tags for MP_P->VAR. For every tag T, if
1203 partition P is already grouping aliases of T, then reduce the
1204 number of virtual operands by the number of direct references
1205 to T. */
1206 if (mp_p->parent_tags)
1208 if (mpt_stats->parent_tags == NULL)
1209 mpt_stats->parent_tags = BITMAP_ALLOC (&alias_bitmap_obstack);
1211 EXECUTE_IF_SET_IN_BITMAP (mp_p->parent_tags, 0, i, bi)
1213 if (bitmap_bit_p (mpt_stats->parent_tags, i))
1215 /* Partition MPT is already partitioning symbols in the
1216 alias set for TAG. This means that we are now saving
1217 1 virtual operand for every direct reference to TAG. */
1218 tree tag = referenced_var (i);
1219 mem_sym_stats_t tag_stats = mem_sym_stats (cfun, tag);
1220 mem_ref_stats->num_vuses -= tag_stats->num_direct_reads;
1221 mem_ref_stats->num_vdefs -= tag_stats->num_direct_writes;
1223 else
1225 /* This is the first symbol in tag I's alias set that is
1226 being grouped under MPT. We will not save any
1227 virtual operands this time, but record that MPT is
1228 grouping a symbol from TAG's alias set so that the
1229 next time we get the savings. */
1230 bitmap_set_bit (mpt_stats->parent_tags, i);
1235 /* If MP_P->VAR is call-clobbered, and MPT is already grouping
1236 call-clobbered symbols, then we will save as many virtual
1237 operands as asm/call sites there are. */
1238 if (is_call_clobbered (mp_p->var))
1240 if (mpt_stats->has_call_clobbered_vars)
1241 mem_ref_stats->num_vdefs -= mem_ref_stats->num_call_sites
1242 + mem_ref_stats->num_asm_sites;
1243 else
1244 mpt_stats->has_call_clobbered_vars = true;
1249 /* Helper for compute_memory_partitions. Transfer reference counts
1250 from pointers to their pointed-to sets. Counters for pointers were
1251 computed by update_alias_info. MEM_REF_STATS points to CFUN's
1252 memory reference information. */
1254 static void
1255 update_reference_counts (struct mem_ref_stats_d *mem_ref_stats)
1257 unsigned i;
1258 bitmap_iterator bi;
1259 mem_sym_stats_t sym_stats;
1261 for (i = 1; i < num_ssa_names; i++)
1263 tree ptr;
1264 struct ptr_info_def *pi;
1266 ptr = ssa_name (i);
1267 if (ptr
1268 && POINTER_TYPE_P (TREE_TYPE (ptr))
1269 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1270 && pi->is_dereferenced)
1272 unsigned j;
1273 bitmap_iterator bj;
1274 tree tag;
1275 mem_sym_stats_t ptr_stats, tag_stats;
1277 /* If PTR has flow-sensitive points-to information, use
1278 PTR's name tag, otherwise use the symbol tag associated
1279 with PTR's symbol. */
1280 if (pi->name_mem_tag)
1281 tag = pi->name_mem_tag;
1282 else
1283 tag = symbol_mem_tag (SSA_NAME_VAR (ptr));
1285 ptr_stats = get_mem_sym_stats_for (ptr);
1286 tag_stats = get_mem_sym_stats_for (tag);
1288 /* TAG has as many direct references as dereferences we
1289 found for its parent pointer. */
1290 tag_stats->num_direct_reads += ptr_stats->num_direct_reads;
1291 tag_stats->num_direct_writes += ptr_stats->num_direct_writes;
1293 /* All the dereferences of pointer PTR are considered direct
1294 references to PTR's memory tag (TAG). In turn,
1295 references to TAG will become virtual operands for every
1296 symbol in TAG's alias set. So, for every symbol ALIAS in
1297 TAG's alias set, add as many indirect references to ALIAS
1298 as direct references there are for TAG. */
1299 if (MTAG_ALIASES (tag))
1300 EXECUTE_IF_SET_IN_BITMAP (MTAG_ALIASES (tag), 0, j, bj)
1302 tree alias = referenced_var (j);
1303 sym_stats = get_mem_sym_stats_for (alias);
1305 /* All the direct references to TAG are indirect references
1306 to ALIAS. */
1307 sym_stats->num_indirect_reads += ptr_stats->num_direct_reads;
1308 sym_stats->num_indirect_writes += ptr_stats->num_direct_writes;
1309 sym_stats->frequency_reads += ptr_stats->frequency_reads;
1310 sym_stats->frequency_writes += ptr_stats->frequency_writes;
1312 /* Indicate that TAG is one of ALIAS's parent tags. */
1313 if (sym_stats->parent_tags == NULL)
1314 sym_stats->parent_tags = BITMAP_ALLOC (&alias_bitmap_obstack);
1315 bitmap_set_bit (sym_stats->parent_tags, DECL_UID (tag));
1320 /* Call-clobbered symbols are indirectly written at every
1321 call/asm site. */
1322 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, i, bi)
1324 tree sym = referenced_var (i);
1325 sym_stats = get_mem_sym_stats_for (sym);
1326 sym_stats->num_indirect_writes += mem_ref_stats->num_call_sites
1327 + mem_ref_stats->num_asm_sites;
1330 /* Addressable symbols are indirectly written at some ASM sites.
1331 Since only ASM sites that clobber memory actually affect
1332 addressable symbols, this is an over-estimation. */
1333 EXECUTE_IF_SET_IN_BITMAP (gimple_addressable_vars (cfun), 0, i, bi)
1335 tree sym = referenced_var (i);
1336 sym_stats = get_mem_sym_stats_for (sym);
1337 sym_stats->num_indirect_writes += mem_ref_stats->num_asm_sites;
1342 /* Helper for compute_memory_partitions. Add all memory symbols to
1343 *MP_INFO_P and compute the initial estimate for the total number of
1344 virtual operands needed. MEM_REF_STATS points to CFUN's memory
1345 reference information. On exit, *TAGS_P will contain the list of
1346 memory tags whose alias set need to be rewritten after
1347 partitioning. */
1349 static void
1350 build_mp_info (struct mem_ref_stats_d *mem_ref_stats,
1351 VEC(mem_sym_stats_t,heap) **mp_info_p,
1352 VEC(tree,heap) **tags_p)
1354 tree var;
1355 referenced_var_iterator rvi;
1357 FOR_EACH_REFERENCED_VAR (var, rvi)
1359 mem_sym_stats_t sym_stats;
1360 tree old_mpt;
1362 /* We are only interested in memory symbols other than MPTs. */
1363 if (is_gimple_reg (var) || TREE_CODE (var) == MEMORY_PARTITION_TAG)
1364 continue;
1366 /* Collect memory tags into the TAGS array so that we can
1367 rewrite their alias sets after partitioning. */
1368 if (MTAG_P (var) && MTAG_ALIASES (var))
1369 VEC_safe_push (tree, heap, *tags_p, var);
1371 /* Since we are going to re-compute partitions, any symbols that
1372 used to belong to a partition must be detached from it and
1373 marked for renaming. */
1374 if ((old_mpt = memory_partition (var)) != NULL)
1376 mark_sym_for_renaming (old_mpt);
1377 set_memory_partition (var, NULL_TREE);
1378 mark_sym_for_renaming (var);
1381 sym_stats = get_mem_sym_stats_for (var);
1383 /* Add VAR's reference info to MP_INFO. Note that the only
1384 symbols that make sense to partition are those that have
1385 indirect references. If a symbol S is always directly
1386 referenced, partitioning it will not reduce the number of
1387 virtual operators. The only symbols that are profitable to
1388 partition are those that belong to alias sets and/or are
1389 call-clobbered. */
1390 if (sym_stats->num_indirect_reads > 0
1391 || sym_stats->num_indirect_writes > 0)
1392 VEC_safe_push (mem_sym_stats_t, heap, *mp_info_p, sym_stats);
1394 /* Update the number of estimated VOPS. Note that direct
1395 references to memory tags are always counted as indirect
1396 references to their alias set members, so if a memory tag has
1397 aliases, do not count its direct references to avoid double
1398 accounting. */
1399 if (!MTAG_P (var) || !MTAG_ALIASES (var))
1401 mem_ref_stats->num_vuses += sym_stats->num_direct_reads;
1402 mem_ref_stats->num_vdefs += sym_stats->num_direct_writes;
1405 mem_ref_stats->num_vuses += sym_stats->num_indirect_reads;
1406 mem_ref_stats->num_vdefs += sym_stats->num_indirect_writes;
1411 /* Compute memory partitions. A memory partition (MPT) is an
1412 arbitrary grouping of memory symbols, such that references to one
1413 member of the group is considered a reference to all the members of
1414 the group.
1416 As opposed to alias sets in memory tags, the grouping into
1417 partitions is completely arbitrary and only done to reduce the
1418 number of virtual operands. The only rule that needs to be
1419 observed when creating memory partitions is that given two memory
1420 partitions MPT.i and MPT.j, they must not contain symbols in
1421 common.
1423 Memory partitions are used when putting the program into Memory-SSA
1424 form. In particular, in Memory-SSA PHI nodes are not computed for
1425 individual memory symbols. They are computed for memory
1426 partitions. This reduces the amount of PHI nodes in the SSA graph
1427 at the expense of precision (i.e., it makes unrelated stores affect
1428 each other).
1430 However, it is possible to increase precision by changing this
1431 partitioning scheme. For instance, if the partitioning scheme is
1432 such that get_mpt_for is the identity function (that is,
1433 get_mpt_for (s) = s), this will result in ultimate precision at the
1434 expense of huge SSA webs.
1436 At the other extreme, a partitioning scheme that groups all the
1437 symbols in the same set results in minimal SSA webs and almost
1438 total loss of precision.
1440 There partitioning heuristic uses three parameters to decide the
1441 order in which symbols are processed. The list of symbols is
1442 sorted so that symbols that are more likely to be partitioned are
1443 near the top of the list:
1445 - Execution frequency. If a memory references is in a frequently
1446 executed code path, grouping it into a partition may block useful
1447 transformations and cause sub-optimal code generation. So, the
1448 partition heuristic tries to avoid grouping symbols with high
1449 execution frequency scores. Execution frequency is taken
1450 directly from the basic blocks where every reference is made (see
1451 update_mem_sym_stats_from_stmt), which in turn uses the
1452 profile guided machinery, so if the program is compiled with PGO
1453 enabled, more accurate partitioning decisions will be made.
1455 - Number of references. Symbols with few references in the code,
1456 are partitioned before symbols with many references.
1458 - NO_ALIAS attributes. Symbols with any of the NO_ALIAS*
1459 attributes are partitioned after symbols marked MAY_ALIAS.
1461 Once the list is sorted, the partitioning proceeds as follows:
1463 1- For every symbol S in MP_INFO, create a new memory partition MP,
1464 if necessary. To avoid memory partitions that contain symbols
1465 from non-conflicting alias sets, memory partitions are
1466 associated to the memory tag that holds S in its alias set. So,
1467 when looking for a memory partition for S, the memory partition
1468 associated with one of the memory tags holding S is chosen. If
1469 none exists, a new one is created.
1471 2- Add S to memory partition MP.
1473 3- Reduce by 1 the number of VOPS for every memory tag holding S.
1475 4- If the total number of VOPS is less than MAX_ALIASED_VOPS or the
1476 average number of VOPS per statement is less than
1477 AVG_ALIASED_VOPS, stop. Otherwise, go to the next symbol in the
1478 list. */
1480 static void
1481 compute_memory_partitions (void)
1483 tree tag;
1484 unsigned i;
1485 mem_sym_stats_t mp_p;
1486 VEC(mem_sym_stats_t,heap) *mp_info;
1487 bitmap new_aliases;
1488 VEC(tree,heap) *tags;
1489 struct mem_ref_stats_d *mem_ref_stats;
1490 int prev_max_aliased_vops;
1492 mem_ref_stats = gimple_mem_ref_stats (cfun);
1493 gcc_assert (mem_ref_stats->num_vuses == 0 && mem_ref_stats->num_vdefs == 0);
1495 if (mem_ref_stats->num_mem_stmts == 0)
1496 return;
1498 timevar_push (TV_MEMORY_PARTITIONING);
1500 mp_info = NULL;
1501 tags = NULL;
1502 prev_max_aliased_vops = MAX_ALIASED_VOPS;
1504 /* Since we clearly cannot lower the number of virtual operators
1505 below the total number of memory statements in the function, we
1506 may need to adjust MAX_ALIASED_VOPS beforehand. */
1507 if (MAX_ALIASED_VOPS < mem_ref_stats->num_mem_stmts)
1508 MAX_ALIASED_VOPS = mem_ref_stats->num_mem_stmts;
1510 /* Update reference stats for all the pointed-to variables and
1511 memory tags. */
1512 update_reference_counts (mem_ref_stats);
1514 /* Add all the memory symbols to MP_INFO. */
1515 build_mp_info (mem_ref_stats, &mp_info, &tags);
1517 /* No partitions required if we are below the threshold. */
1518 if (!need_to_partition_p (mem_ref_stats))
1520 if (dump_file)
1521 fprintf (dump_file, "\nMemory partitioning NOT NEEDED for %s\n",
1522 get_name (current_function_decl));
1523 goto done;
1526 /* Sort the MP_INFO array so that symbols that should be partitioned
1527 first are near the top of the list. */
1528 sort_mp_info (mp_info);
1530 if (dump_file)
1532 fprintf (dump_file, "\nMemory partitioning NEEDED for %s\n\n",
1533 get_name (current_function_decl));
1534 fprintf (dump_file, "Memory symbol references before partitioning:\n");
1535 dump_mp_info (dump_file, mp_info);
1538 /* Create partitions for variables in MP_INFO until we have enough
1539 to lower the total number of VOPS below MAX_ALIASED_VOPS or if
1540 the average number of VOPS per statement is below
1541 AVG_ALIASED_VOPS. */
1542 for (i = 0; VEC_iterate (mem_sym_stats_t, mp_info, i, mp_p); i++)
1544 tree mpt;
1546 /* If we are below the threshold, stop. */
1547 if (!need_to_partition_p (mem_ref_stats))
1548 break;
1550 mpt = find_partition_for (mp_p);
1551 estimate_vop_reduction (mem_ref_stats, mp_p, mpt);
1554 /* After partitions have been created, rewrite alias sets to use
1555 them instead of the original symbols. This way, if the alias set
1556 was computed as { a b c d e f }, and the subset { b e f } was
1557 grouped into partition MPT.3, then the new alias set for the tag
1558 will be { a c d MPT.3 }.
1560 Note that this is not strictly necessary. The operand scanner
1561 will always check if a symbol belongs to a partition when adding
1562 virtual operands. However, by reducing the size of the alias
1563 sets to be scanned, the work needed inside the operand scanner is
1564 significantly reduced. */
1565 new_aliases = BITMAP_ALLOC (&alias_bitmap_obstack);
1567 for (i = 0; VEC_iterate (tree, tags, i, tag); i++)
1569 rewrite_alias_set_for (tag, new_aliases);
1570 bitmap_clear (new_aliases);
1573 BITMAP_FREE (new_aliases);
1575 if (dump_file)
1577 fprintf (dump_file, "\nMemory symbol references after partitioning:\n");
1578 dump_mp_info (dump_file, mp_info);
1581 done:
1582 /* Free allocated memory. */
1583 VEC_free (mem_sym_stats_t, heap, mp_info);
1584 VEC_free (tree, heap, tags);
1586 MAX_ALIASED_VOPS = prev_max_aliased_vops;
1588 timevar_pop (TV_MEMORY_PARTITIONING);
1592 /* Compute may-alias information for every variable referenced in function
1593 FNDECL.
1595 Alias analysis proceeds in 3 main phases:
1597 1- Points-to and escape analysis.
1599 This phase walks the use-def chains in the SSA web looking for three
1600 things:
1602 * Assignments of the form P_i = &VAR
1603 * Assignments of the form P_i = malloc()
1604 * Pointers and ADDR_EXPR that escape the current function.
1606 The concept of 'escaping' is the same one used in the Java world. When
1607 a pointer or an ADDR_EXPR escapes, it means that it has been exposed
1608 outside of the current function. So, assignment to global variables,
1609 function arguments and returning a pointer are all escape sites, as are
1610 conversions between pointers and integers.
1612 This is where we are currently limited. Since not everything is renamed
1613 into SSA, we lose track of escape properties when a pointer is stashed
1614 inside a field in a structure, for instance. In those cases, we are
1615 assuming that the pointer does escape.
1617 We use escape analysis to determine whether a variable is
1618 call-clobbered. Simply put, if an ADDR_EXPR escapes, then the variable
1619 is call-clobbered. If a pointer P_i escapes, then all the variables
1620 pointed-to by P_i (and its memory tag) also escape.
1622 2- Compute flow-sensitive aliases
1624 We have two classes of memory tags. Memory tags associated with the
1625 pointed-to data type of the pointers in the program. These tags are
1626 called "symbol memory tag" (SMT). The other class are those associated
1627 with SSA_NAMEs, called "name memory tag" (NMT). The basic idea is that
1628 when adding operands for an INDIRECT_REF *P_i, we will first check
1629 whether P_i has a name tag, if it does we use it, because that will have
1630 more precise aliasing information. Otherwise, we use the standard symbol
1631 tag.
1633 In this phase, we go through all the pointers we found in points-to
1634 analysis and create alias sets for the name memory tags associated with
1635 each pointer P_i. If P_i escapes, we mark call-clobbered the variables
1636 it points to and its tag.
1639 3- Compute flow-insensitive aliases
1641 This pass will compare the alias set of every symbol memory tag and
1642 every addressable variable found in the program. Given a symbol
1643 memory tag SMT and an addressable variable V. If the alias sets of
1644 SMT and V conflict (as computed by may_alias_p), then V is marked
1645 as an alias tag and added to the alias set of SMT.
1647 For instance, consider the following function:
1649 foo (int i)
1651 int *p, a, b;
1653 if (i > 10)
1654 p = &a;
1655 else
1656 p = &b;
1658 *p = 3;
1659 a = b + 2;
1660 return *p;
1663 After aliasing analysis has finished, the symbol memory tag for pointer
1664 'p' will have two aliases, namely variables 'a' and 'b'. Every time
1665 pointer 'p' is dereferenced, we want to mark the operation as a
1666 potential reference to 'a' and 'b'.
1668 foo (int i)
1670 int *p, a, b;
1672 if (i_2 > 10)
1673 p_4 = &a;
1674 else
1675 p_6 = &b;
1676 # p_1 = PHI <p_4(1), p_6(2)>;
1678 # a_7 = VDEF <a_3>;
1679 # b_8 = VDEF <b_5>;
1680 *p_1 = 3;
1682 # a_9 = VDEF <a_7>
1683 # VUSE <b_8>
1684 a_9 = b_8 + 2;
1686 # VUSE <a_9>;
1687 # VUSE <b_8>;
1688 return *p_1;
1691 In certain cases, the list of may aliases for a pointer may grow too
1692 large. This may cause an explosion in the number of virtual operands
1693 inserted in the code. Resulting in increased memory consumption and
1694 compilation time.
1696 When the number of virtual operands needed to represent aliased
1697 loads and stores grows too large (configurable with option --param
1698 max-aliased-vops and --param avg-aliased-vops), alias sets are
1699 grouped to avoid severe compile-time slow downs and memory
1700 consumption. See compute_memory_partitions. */
1702 unsigned int
1703 compute_may_aliases (void)
1705 struct alias_info *ai;
1707 timevar_push (TV_TREE_MAY_ALIAS);
1709 memset (&alias_stats, 0, sizeof (alias_stats));
1711 /* Initialize aliasing information. */
1712 ai = init_alias_info ();
1714 /* For each pointer P_i, determine the sets of variables that P_i may
1715 point-to. For every addressable variable V, determine whether the
1716 address of V escapes the current function, making V call-clobbered
1717 (i.e., whether &V is stored in a global variable or if its passed as a
1718 function call argument). */
1719 compute_points_to_sets (ai);
1721 /* Collect all pointers and addressable variables, compute alias sets,
1722 create memory tags for pointers and promote variables whose address is
1723 not needed anymore. */
1724 setup_pointers_and_addressables (ai);
1726 /* Compute type-based flow-insensitive aliasing for all the type
1727 memory tags. */
1728 compute_flow_insensitive_aliasing (ai);
1730 /* Compute flow-sensitive, points-to based aliasing for all the name
1731 memory tags. */
1732 compute_flow_sensitive_aliasing (ai);
1734 /* Compute call clobbering information. */
1735 compute_call_clobbered (ai);
1737 /* If the program makes no reference to global variables, but it
1738 contains a mixture of pure and non-pure functions, then we need
1739 to create use-def and def-def links between these functions to
1740 avoid invalid transformations on them. */
1741 maybe_create_global_var ();
1743 /* If the program contains ref-all pointers, finalize may-alias information
1744 for them. This pass needs to be run after call-clobbering information
1745 has been computed. */
1746 if (ai->ref_all_symbol_mem_tag)
1747 finalize_ref_all_pointers (ai);
1749 /* Compute memory partitions for every memory variable. */
1750 compute_memory_partitions ();
1752 /* Remove partitions with no symbols. Partitions may end up with an
1753 empty MPT_SYMBOLS set if a previous round of alias analysis
1754 needed to partition more symbols. Since we don't need those
1755 partitions anymore, remove them to free up the space. */
1757 tree mpt;
1758 unsigned i;
1759 VEC(tree,heap) *mpt_table;
1761 mpt_table = gimple_ssa_operands (cfun)->mpt_table;
1762 i = 0;
1763 while (i < VEC_length (tree, mpt_table))
1765 mpt = VEC_index (tree, mpt_table, i);
1766 if (MPT_SYMBOLS (mpt) == NULL)
1767 VEC_unordered_remove (tree, mpt_table, i);
1768 else
1769 i++;
1773 /* Populate all virtual operands and newly promoted register operands. */
1775 block_stmt_iterator bsi;
1776 basic_block bb;
1777 FOR_EACH_BB (bb)
1778 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1779 update_stmt_if_modified (bsi_stmt (bsi));
1782 /* Debugging dumps. */
1783 if (dump_file)
1785 dump_mem_ref_stats (dump_file);
1786 dump_alias_info (dump_file);
1787 dump_points_to_info (dump_file);
1789 if (dump_flags & TDF_STATS)
1790 dump_alias_stats (dump_file);
1792 if (dump_flags & TDF_DETAILS)
1793 dump_referenced_vars (dump_file);
1796 /* Report strict aliasing violations. */
1797 strict_aliasing_warning_backend ();
1799 /* Deallocate memory used by aliasing data structures. */
1800 delete_alias_info (ai);
1802 if (need_ssa_update_p ())
1803 update_ssa (TODO_update_ssa);
1805 timevar_pop (TV_TREE_MAY_ALIAS);
1807 return 0;
1810 /* Data structure used to count the number of dereferences to PTR
1811 inside an expression. */
1812 struct count_ptr_d
1814 tree ptr;
1815 unsigned count;
1819 /* Helper for count_uses_and_derefs. Called by walk_tree to look for
1820 (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
1822 static tree
1823 count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
1825 struct count_ptr_d *count_p = (struct count_ptr_d *) data;
1827 /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
1828 pointer 'ptr' is *not* dereferenced, it is simply used to compute
1829 the address of 'fld' as 'ptr + offsetof(fld)'. */
1830 if (TREE_CODE (*tp) == ADDR_EXPR)
1832 *walk_subtrees = 0;
1833 return NULL_TREE;
1836 if (INDIRECT_REF_P (*tp) && TREE_OPERAND (*tp, 0) == count_p->ptr)
1837 count_p->count++;
1839 return NULL_TREE;
1843 /* Count the number of direct and indirect uses for pointer PTR in
1844 statement STMT. The number of direct uses is stored in
1845 *NUM_USES_P. Indirect references are counted separately depending
1846 on whether they are store or load operations. The counts are
1847 stored in *NUM_STORES_P and *NUM_LOADS_P. */
1849 void
1850 count_uses_and_derefs (tree ptr, tree stmt, unsigned *num_uses_p,
1851 unsigned *num_loads_p, unsigned *num_stores_p)
1853 ssa_op_iter i;
1854 tree use;
1856 *num_uses_p = 0;
1857 *num_loads_p = 0;
1858 *num_stores_p = 0;
1860 /* Find out the total number of uses of PTR in STMT. */
1861 FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
1862 if (use == ptr)
1863 (*num_uses_p)++;
1865 /* Now count the number of indirect references to PTR. This is
1866 truly awful, but we don't have much choice. There are no parent
1867 pointers inside INDIRECT_REFs, so an expression like
1868 '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
1869 find all the indirect and direct uses of x_1 inside. The only
1870 shortcut we can take is the fact that GIMPLE only allows
1871 INDIRECT_REFs inside the expressions below. */
1872 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
1873 || (TREE_CODE (stmt) == RETURN_EXPR
1874 && TREE_CODE (TREE_OPERAND (stmt, 0)) == GIMPLE_MODIFY_STMT)
1875 || TREE_CODE (stmt) == ASM_EXPR
1876 || TREE_CODE (stmt) == CALL_EXPR)
1878 tree lhs, rhs;
1880 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
1882 lhs = GIMPLE_STMT_OPERAND (stmt, 0);
1883 rhs = GIMPLE_STMT_OPERAND (stmt, 1);
1885 else if (TREE_CODE (stmt) == RETURN_EXPR)
1887 tree e = TREE_OPERAND (stmt, 0);
1888 lhs = GIMPLE_STMT_OPERAND (e, 0);
1889 rhs = GIMPLE_STMT_OPERAND (e, 1);
1891 else if (TREE_CODE (stmt) == ASM_EXPR)
1893 lhs = ASM_OUTPUTS (stmt);
1894 rhs = ASM_INPUTS (stmt);
1896 else
1898 lhs = NULL_TREE;
1899 rhs = stmt;
1902 if (lhs
1903 && (TREE_CODE (lhs) == TREE_LIST
1904 || EXPR_P (lhs)
1905 || GIMPLE_STMT_P (lhs)))
1907 struct count_ptr_d count;
1908 count.ptr = ptr;
1909 count.count = 0;
1910 walk_tree (&lhs, count_ptr_derefs, &count, NULL);
1911 *num_stores_p = count.count;
1914 if (rhs
1915 && (TREE_CODE (rhs) == TREE_LIST
1916 || EXPR_P (rhs)
1917 || GIMPLE_STMT_P (rhs)))
1919 struct count_ptr_d count;
1920 count.ptr = ptr;
1921 count.count = 0;
1922 walk_tree (&rhs, count_ptr_derefs, &count, NULL);
1923 *num_loads_p = count.count;
1927 gcc_assert (*num_uses_p >= *num_loads_p + *num_stores_p);
1930 /* Remove memory references stats for function FN. */
1932 void
1933 delete_mem_ref_stats (struct function *fn)
1935 if (gimple_mem_ref_stats (fn)->mem_sym_stats)
1937 free_alloc_pool (mem_sym_stats_pool);
1938 pointer_map_destroy (gimple_mem_ref_stats (fn)->mem_sym_stats);
1940 gimple_mem_ref_stats (fn)->mem_sym_stats = NULL;
1944 /* Initialize memory reference stats. */
1946 static void
1947 init_mem_ref_stats (void)
1949 struct mem_ref_stats_d *mem_ref_stats = gimple_mem_ref_stats (cfun);
1951 mem_sym_stats_pool = create_alloc_pool ("Mem sym stats",
1952 sizeof (struct mem_sym_stats_d),
1953 100);
1954 memset (mem_ref_stats, 0, sizeof (struct mem_ref_stats_d));
1955 mem_ref_stats->mem_sym_stats = pointer_map_create ();
1959 /* Helper for init_alias_info. Reset existing aliasing information. */
1961 static void
1962 reset_alias_info (void)
1964 referenced_var_iterator rvi;
1965 tree var;
1966 unsigned i;
1967 bitmap active_nmts, all_nmts;
1969 /* Clear the set of addressable variables. We do not need to clear
1970 the TREE_ADDRESSABLE bit on every symbol because we are going to
1971 re-compute addressability here. */
1972 bitmap_clear (gimple_addressable_vars (cfun));
1974 active_nmts = BITMAP_ALLOC (&alias_bitmap_obstack);
1975 all_nmts = BITMAP_ALLOC (&alias_bitmap_obstack);
1977 /* Clear flow-insensitive alias information from each symbol. */
1978 FOR_EACH_REFERENCED_VAR (var, rvi)
1980 if (is_gimple_reg (var))
1981 continue;
1983 if (MTAG_P (var))
1984 MTAG_ALIASES (var) = NULL;
1986 /* Memory partition information will be computed from scratch. */
1987 if (TREE_CODE (var) == MEMORY_PARTITION_TAG)
1988 MPT_SYMBOLS (var) = NULL;
1990 /* Collect all the name tags to determine if we have any
1991 orphaned that need to be removed from the IL. A name tag
1992 will be orphaned if it is not associated with any active SSA
1993 name. */
1994 if (TREE_CODE (var) == NAME_MEMORY_TAG)
1995 bitmap_set_bit (all_nmts, DECL_UID (var));
1997 /* Since we are about to re-discover call-clobbered
1998 variables, clear the call-clobbered flag. Variables that
1999 are intrinsically call-clobbered (globals, local statics,
2000 etc) will not be marked by the aliasing code, so we can't
2001 remove them from CALL_CLOBBERED_VARS.
2003 NB: STRUCT_FIELDS are still call clobbered if they are for a
2004 global variable, so we *don't* clear their call clobberedness
2005 just because they are tags, though we will clear it if they
2006 aren't for global variables. */
2007 if (TREE_CODE (var) == NAME_MEMORY_TAG
2008 || TREE_CODE (var) == SYMBOL_MEMORY_TAG
2009 || TREE_CODE (var) == MEMORY_PARTITION_TAG
2010 || !is_global_var (var))
2011 clear_call_clobbered (var);
2014 /* Clear flow-sensitive points-to information from each SSA name. */
2015 for (i = 1; i < num_ssa_names; i++)
2017 tree name = ssa_name (i);
2019 if (!name || !POINTER_TYPE_P (TREE_TYPE (name)))
2020 continue;
2022 if (SSA_NAME_PTR_INFO (name))
2024 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (name);
2026 /* Clear all the flags but keep the name tag to
2027 avoid creating new temporaries unnecessarily. If
2028 this pointer is found to point to a subset or
2029 superset of its former points-to set, then a new
2030 tag will need to be created in create_name_tags. */
2031 pi->pt_anything = 0;
2032 pi->pt_null = 0;
2033 pi->value_escapes_p = 0;
2034 pi->is_dereferenced = 0;
2035 if (pi->pt_vars)
2036 bitmap_clear (pi->pt_vars);
2038 /* Add NAME's name tag to the set of active tags. */
2039 if (pi->name_mem_tag)
2040 bitmap_set_bit (active_nmts, DECL_UID (pi->name_mem_tag));
2044 /* Name memory tags that are no longer associated with an SSA name
2045 are considered stale and should be removed from the IL. All the
2046 name tags that are in the set ALL_NMTS but not in ACTIVE_NMTS are
2047 considered stale and marked for renaming. */
2048 bitmap_and_compl_into (all_nmts, active_nmts);
2049 mark_set_for_renaming (all_nmts);
2051 BITMAP_FREE (all_nmts);
2052 BITMAP_FREE (active_nmts);
2056 /* Initialize the data structures used for alias analysis. */
2058 static struct alias_info *
2059 init_alias_info (void)
2061 struct alias_info *ai;
2062 referenced_var_iterator rvi;
2063 tree var;
2065 ai = XCNEW (struct alias_info);
2066 ai->ssa_names_visited = sbitmap_alloc (num_ssa_names);
2067 sbitmap_zero (ai->ssa_names_visited);
2068 ai->processed_ptrs = VEC_alloc (tree, heap, 50);
2069 ai->written_vars = pointer_set_create ();
2070 ai->dereferenced_ptrs_store = pointer_set_create ();
2071 ai->dereferenced_ptrs_load = pointer_set_create ();
2073 /* Clear out all memory reference stats. */
2074 init_mem_ref_stats ();
2076 /* If aliases have been computed before, clear existing information. */
2077 if (gimple_aliases_computed_p (cfun))
2078 reset_alias_info ();
2079 else
2081 /* If this is the first time we compute aliasing information,
2082 every non-register symbol will need to be put into SSA form
2083 (the initial SSA form only operates on GIMPLE registers). */
2084 FOR_EACH_REFERENCED_VAR (var, rvi)
2085 if (!is_gimple_reg (var))
2086 mark_sym_for_renaming (var);
2089 /* Next time, we will need to reset alias information. */
2090 cfun->gimple_df->aliases_computed_p = true;
2091 if (alias_bitmap_obstack.elements != NULL)
2092 bitmap_obstack_release (&alias_bitmap_obstack);
2093 bitmap_obstack_initialize (&alias_bitmap_obstack);
2095 return ai;
2099 /* Deallocate memory used by alias analysis. */
2101 static void
2102 delete_alias_info (struct alias_info *ai)
2104 size_t i;
2106 sbitmap_free (ai->ssa_names_visited);
2108 VEC_free (tree, heap, ai->processed_ptrs);
2110 for (i = 0; i < ai->num_addressable_vars; i++)
2111 free (ai->addressable_vars[i]);
2112 free (ai->addressable_vars);
2114 for (i = 0; i < ai->num_pointers; i++)
2115 free (ai->pointers[i]);
2116 free (ai->pointers);
2118 pointer_set_destroy (ai->written_vars);
2119 pointer_set_destroy (ai->dereferenced_ptrs_store);
2120 pointer_set_destroy (ai->dereferenced_ptrs_load);
2121 free (ai);
2123 delete_mem_ref_stats (cfun);
2124 delete_points_to_sets ();
2128 /* Used for hashing to identify pointer infos with identical
2129 pt_vars bitmaps. */
2131 static int
2132 eq_ptr_info (const void *p1, const void *p2)
2134 const struct ptr_info_def *n1 = (const struct ptr_info_def *) p1;
2135 const struct ptr_info_def *n2 = (const struct ptr_info_def *) p2;
2136 return bitmap_equal_p (n1->pt_vars, n2->pt_vars);
2139 static hashval_t
2140 ptr_info_hash (const void *p)
2142 const struct ptr_info_def *n = (const struct ptr_info_def *) p;
2143 return bitmap_hash (n->pt_vars);
2147 /* Create name tags for all the pointers that have been dereferenced.
2148 We only create a name tag for a pointer P if P is found to point to
2149 a set of variables (so that we can alias them to *P) or if it is
2150 the result of a call to malloc (which means that P cannot point to
2151 anything else nor alias any other variable).
2153 If two pointers P and Q point to the same set of variables, they
2154 are assigned the same name tag. */
2156 static void
2157 create_name_tags (void)
2159 size_t i;
2160 VEC (tree, heap) *with_ptvars = NULL;
2161 tree ptr;
2162 htab_t ptr_hash;
2164 /* Collect the list of pointers with a non-empty points to set. */
2165 for (i = 1; i < num_ssa_names; i++)
2167 tree ptr = ssa_name (i);
2168 struct ptr_info_def *pi;
2170 if (!ptr
2171 || !POINTER_TYPE_P (TREE_TYPE (ptr))
2172 || !SSA_NAME_PTR_INFO (ptr))
2173 continue;
2175 pi = SSA_NAME_PTR_INFO (ptr);
2177 if (pi->pt_anything || !pi->is_dereferenced)
2179 /* No name tags for pointers that have not been
2180 dereferenced or point to an arbitrary location. */
2181 pi->name_mem_tag = NULL_TREE;
2182 continue;
2185 /* Set pt_anything on the pointers without pt_vars filled in so
2186 that they are assigned a symbol tag. */
2187 if (pi->pt_vars && !bitmap_empty_p (pi->pt_vars))
2188 VEC_safe_push (tree, heap, with_ptvars, ptr);
2189 else
2190 set_pt_anything (ptr);
2193 /* If we didn't find any pointers with pt_vars set, we're done. */
2194 if (!with_ptvars)
2195 return;
2197 ptr_hash = htab_create (10, ptr_info_hash, eq_ptr_info, NULL);
2199 /* Now go through the pointers with pt_vars, and find a name tag
2200 with the same pt_vars as this pointer, or create one if one
2201 doesn't exist. */
2202 for (i = 0; VEC_iterate (tree, with_ptvars, i, ptr); i++)
2204 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
2205 tree old_name_tag = pi->name_mem_tag;
2206 struct ptr_info_def **slot;
2208 /* If PTR points to a set of variables, check if we don't
2209 have another pointer Q with the same points-to set before
2210 creating a tag. If so, use Q's tag instead of creating a
2211 new one.
2213 This is important for not creating unnecessary symbols
2214 and also for copy propagation. If we ever need to
2215 propagate PTR into Q or vice-versa, we would run into
2216 problems if they both had different name tags because
2217 they would have different SSA version numbers (which
2218 would force us to take the name tags in and out of SSA). */
2219 slot = (struct ptr_info_def **) htab_find_slot (ptr_hash, pi, INSERT);
2220 if (*slot)
2221 pi->name_mem_tag = (*slot)->name_mem_tag;
2222 else
2224 *slot = pi;
2226 /* If we didn't find a pointer with the same points-to set
2227 as PTR, create a new name tag if needed. */
2228 if (pi->name_mem_tag == NULL_TREE)
2229 pi->name_mem_tag = get_nmt_for (ptr);
2232 /* If the new name tag computed for PTR is different than
2233 the old name tag that it used to have, then the old tag
2234 needs to be removed from the IL, so we mark it for
2235 renaming. */
2236 if (old_name_tag && old_name_tag != pi->name_mem_tag)
2237 mark_sym_for_renaming (old_name_tag);
2239 /* Inherit volatility from the pointed-to type. */
2240 TREE_THIS_VOLATILE (pi->name_mem_tag)
2241 |= TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (ptr)));
2243 /* Mark the new name tag for renaming. */
2244 mark_sym_for_renaming (pi->name_mem_tag);
2247 htab_delete (ptr_hash);
2249 VEC_free (tree, heap, with_ptvars);
2253 /* Union the alias set SET into the may-aliases for TAG. */
2255 static void
2256 union_alias_set_into (tree tag, bitmap set)
2258 bitmap ma = MTAG_ALIASES (tag);
2260 if (bitmap_empty_p (set))
2261 return;
2263 if (!ma)
2264 ma = MTAG_ALIASES (tag) = BITMAP_ALLOC (&alias_bitmap_obstack);
2265 bitmap_ior_into (ma, set);
2269 /* For every pointer P_i in AI->PROCESSED_PTRS, create may-alias sets for
2270 the name memory tag (NMT) associated with P_i. If P_i escapes, then its
2271 name tag and the variables it points-to are call-clobbered. Finally, if
2272 P_i escapes and we could not determine where it points to, then all the
2273 variables in the same alias set as *P_i are marked call-clobbered. This
2274 is necessary because we must assume that P_i may take the address of any
2275 variable in the same alias set. */
2277 static void
2278 compute_flow_sensitive_aliasing (struct alias_info *ai)
2280 size_t i;
2281 tree ptr;
2283 timevar_push (TV_FLOW_SENSITIVE);
2284 set_used_smts ();
2286 for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
2288 if (!find_what_p_points_to (ptr))
2289 set_pt_anything (ptr);
2292 create_name_tags ();
2294 for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
2296 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
2297 tree tag = symbol_mem_tag (SSA_NAME_VAR (ptr));
2299 /* Set up aliasing information for PTR's name memory tag (if it has
2300 one). Note that only pointers that have been dereferenced will
2301 have a name memory tag. */
2302 if (pi->name_mem_tag && pi->pt_vars)
2304 if (!bitmap_empty_p (pi->pt_vars))
2306 union_alias_set_into (pi->name_mem_tag, pi->pt_vars);
2307 union_alias_set_into (tag, pi->pt_vars);
2308 bitmap_clear_bit (MTAG_ALIASES (tag), DECL_UID (tag));
2310 /* It may be the case that this the tag uid was the only
2311 bit we had set in the aliases list, and in this case,
2312 we don't want to keep an empty bitmap, as this
2313 asserts in tree-ssa-operands.c . */
2314 if (bitmap_empty_p (MTAG_ALIASES (tag)))
2315 BITMAP_FREE (MTAG_ALIASES (tag));
2319 timevar_pop (TV_FLOW_SENSITIVE);
2323 /* Return TRUE if at least one symbol in TAG2's alias set is also
2324 present in TAG1's alias set. */
2326 static bool
2327 have_common_aliases_p (bitmap tag1aliases, bitmap tag2aliases)
2330 /* This is the old behavior of have_common_aliases_p, which is to
2331 return false if both sets are empty, or one set is and the other
2332 isn't. */
2333 if ((tag1aliases == NULL && tag2aliases != NULL)
2334 || (tag2aliases == NULL && tag1aliases != NULL)
2335 || (tag1aliases == NULL && tag2aliases == NULL))
2336 return false;
2338 return bitmap_intersect_p (tag1aliases, tag2aliases);
2341 /* Compute type-based alias sets. Traverse all the pointers and
2342 addressable variables found in setup_pointers_and_addressables.
2344 For every pointer P in AI->POINTERS and addressable variable V in
2345 AI->ADDRESSABLE_VARS, add V to the may-alias sets of P's symbol
2346 memory tag (SMT) if their alias sets conflict. V is then marked as
2347 an aliased symbol so that the operand scanner knows that statements
2348 containing V have aliased operands. */
2350 static void
2351 compute_flow_insensitive_aliasing (struct alias_info *ai)
2353 size_t i;
2355 timevar_push (TV_FLOW_INSENSITIVE);
2356 /* For every pointer P, determine which addressable variables may alias
2357 with P's symbol memory tag. */
2358 for (i = 0; i < ai->num_pointers; i++)
2360 size_t j;
2361 struct alias_map_d *p_map = ai->pointers[i];
2362 tree tag = symbol_mem_tag (p_map->var);
2363 tree var;
2365 /* Call-clobbering information is not finalized yet at this point. */
2366 if (PTR_IS_REF_ALL (p_map->var))
2367 continue;
2369 for (j = 0; j < ai->num_addressable_vars; j++)
2371 struct alias_map_d *v_map;
2372 var_ann_t v_ann;
2373 bool tag_stored_p, var_stored_p;
2375 v_map = ai->addressable_vars[j];
2376 var = v_map->var;
2377 v_ann = var_ann (var);
2379 /* Skip memory tags and variables that have never been
2380 written to. We also need to check if the variables are
2381 call-clobbered because they may be overwritten by
2382 function calls. */
2383 tag_stored_p = pointer_set_contains (ai->written_vars, tag)
2384 || is_call_clobbered (tag);
2385 var_stored_p = pointer_set_contains (ai->written_vars, var)
2386 || is_call_clobbered (var);
2387 if (!tag_stored_p && !var_stored_p)
2388 continue;
2390 if (may_alias_p (p_map->var, p_map->set, var, v_map->set, false))
2392 /* We should never have a var with subvars here, because
2393 they shouldn't get into the set of addressable vars */
2394 gcc_assert (!var_can_have_subvars (var)
2395 || get_subvars_for_var (var) == NULL);
2397 /* Add VAR to TAG's may-aliases set. */
2398 add_may_alias (tag, var);
2403 /* Since this analysis is based exclusively on symbols, it fails to
2404 handle cases where two pointers P and Q have different memory
2405 tags with conflicting alias set numbers but no aliased symbols in
2406 common.
2408 For example, suppose that we have two memory tags SMT.1 and SMT.2
2409 such that
2411 may-aliases (SMT.1) = { a }
2412 may-aliases (SMT.2) = { b }
2414 and the alias set number of SMT.1 conflicts with that of SMT.2.
2415 Since they don't have symbols in common, loads and stores from
2416 SMT.1 and SMT.2 will seem independent of each other, which will
2417 lead to the optimizers making invalid transformations (see
2418 testsuite/gcc.c-torture/execute/pr15262-[12].c).
2420 To avoid this problem, we do a final traversal of AI->POINTERS
2421 looking for pairs of pointers that have no aliased symbols in
2422 common and yet have conflicting alias set numbers. */
2423 for (i = 0; i < ai->num_pointers; i++)
2425 size_t j;
2426 struct alias_map_d *p_map1 = ai->pointers[i];
2427 tree tag1 = symbol_mem_tag (p_map1->var);
2428 bitmap may_aliases1 = MTAG_ALIASES (tag1);
2430 if (PTR_IS_REF_ALL (p_map1->var))
2431 continue;
2433 for (j = i + 1; j < ai->num_pointers; j++)
2435 struct alias_map_d *p_map2 = ai->pointers[j];
2436 tree tag2 = symbol_mem_tag (p_map2->var);
2437 bitmap may_aliases2 = may_aliases (tag2);
2439 if (PTR_IS_REF_ALL (p_map2->var))
2440 continue;
2442 /* If the pointers may not point to each other, do nothing. */
2443 if (!may_alias_p (p_map1->var, p_map1->set, tag2, p_map2->set, true))
2444 continue;
2446 /* The two pointers may alias each other. If they already have
2447 symbols in common, do nothing. */
2448 if (have_common_aliases_p (may_aliases1, may_aliases2))
2449 continue;
2451 if (may_aliases2 && !bitmap_empty_p (may_aliases2))
2453 union_alias_set_into (tag1, may_aliases2);
2455 else
2457 /* Since TAG2 does not have any aliases of its own, add
2458 TAG2 itself to the alias set of TAG1. */
2459 add_may_alias (tag1, tag2);
2464 timevar_pop (TV_FLOW_INSENSITIVE);
2468 /* Finalize may-alias information for ref-all pointers. Traverse all
2469 the addressable variables found in setup_pointers_and_addressables.
2471 If flow-sensitive alias analysis has attached a name memory tag to
2472 a ref-all pointer, we will use it for the dereferences because that
2473 will have more precise aliasing information. But if there is no
2474 name tag, we will use a special symbol tag that aliases all the
2475 call-clobbered addressable variables. */
2477 static void
2478 finalize_ref_all_pointers (struct alias_info *ai)
2480 size_t i;
2482 /* First add the real call-clobbered variables. */
2483 for (i = 0; i < ai->num_addressable_vars; i++)
2485 tree var = ai->addressable_vars[i]->var;
2486 if (is_call_clobbered (var))
2487 add_may_alias (ai->ref_all_symbol_mem_tag, var);
2490 /* Then add the call-clobbered pointer memory tags. See
2491 compute_flow_insensitive_aliasing for the rationale. */
2492 for (i = 0; i < ai->num_pointers; i++)
2494 tree ptr = ai->pointers[i]->var, tag;
2495 /* Avoid adding to self and clean up. */
2496 if (PTR_IS_REF_ALL (ptr))
2498 struct ptr_info_def *pi = get_ptr_info (ptr);
2499 if (pi->is_dereferenced)
2500 pi->pt_anything = 0;
2501 continue;
2503 tag = symbol_mem_tag (ptr);
2504 if (is_call_clobbered (tag))
2505 add_may_alias (ai->ref_all_symbol_mem_tag, tag);
2511 /* Create a new alias set entry for VAR in AI->ADDRESSABLE_VARS. */
2513 static void
2514 create_alias_map_for (tree var, struct alias_info *ai)
2516 struct alias_map_d *alias_map;
2517 alias_map = XCNEW (struct alias_map_d);
2518 alias_map->var = var;
2519 alias_map->set = get_alias_set (var);
2520 ai->addressable_vars[ai->num_addressable_vars++] = alias_map;
2524 /* Create memory tags for all the dereferenced pointers and build the
2525 ADDRESSABLE_VARS and POINTERS arrays used for building the may-alias
2526 sets. Based on the address escape and points-to information collected
2527 earlier, this pass will also clear the TREE_ADDRESSABLE flag from those
2528 variables whose address is not needed anymore. */
2530 static void
2531 setup_pointers_and_addressables (struct alias_info *ai)
2533 size_t num_addressable_vars, num_pointers;
2534 referenced_var_iterator rvi;
2535 tree var;
2536 VEC (tree, heap) *varvec = NULL;
2537 safe_referenced_var_iterator srvi;
2539 /* Size up the arrays ADDRESSABLE_VARS and POINTERS. */
2540 num_addressable_vars = num_pointers = 0;
2542 FOR_EACH_REFERENCED_VAR (var, rvi)
2544 if (may_be_aliased (var))
2545 num_addressable_vars++;
2547 if (POINTER_TYPE_P (TREE_TYPE (var)))
2549 /* Since we don't keep track of volatile variables, assume that
2550 these pointers are used in indirect store operations. */
2551 if (TREE_THIS_VOLATILE (var))
2552 pointer_set_insert (ai->dereferenced_ptrs_store, var);
2554 num_pointers++;
2558 /* Create ADDRESSABLE_VARS and POINTERS. Note that these arrays are
2559 always going to be slightly bigger than we actually need them
2560 because some TREE_ADDRESSABLE variables will be marked
2561 non-addressable below and only pointers with unique symbol tags are
2562 going to be added to POINTERS. */
2563 ai->addressable_vars = XCNEWVEC (struct alias_map_d *, num_addressable_vars);
2564 ai->pointers = XCNEWVEC (struct alias_map_d *, num_pointers);
2565 ai->num_addressable_vars = 0;
2566 ai->num_pointers = 0;
2568 FOR_EACH_REFERENCED_VAR_SAFE (var, varvec, srvi)
2570 subvar_t svars;
2572 /* Name memory tags already have flow-sensitive aliasing
2573 information, so they need not be processed by
2574 compute_flow_insensitive_aliasing. Similarly, symbol memory
2575 tags are already accounted for when we process their
2576 associated pointer.
2578 Structure fields, on the other hand, have to have some of this
2579 information processed for them, but it's pointless to mark them
2580 non-addressable (since they are fake variables anyway). */
2581 if (MTAG_P (var) && TREE_CODE (var) != STRUCT_FIELD_TAG)
2582 continue;
2584 /* Remove the ADDRESSABLE flag from every addressable variable whose
2585 address is not needed anymore. This is caused by the propagation
2586 of ADDR_EXPR constants into INDIRECT_REF expressions and the
2587 removal of dead pointer assignments done by the early scalar
2588 cleanup passes. */
2589 if (TREE_ADDRESSABLE (var))
2591 if (!bitmap_bit_p (gimple_addressable_vars (cfun), DECL_UID (var))
2592 && TREE_CODE (var) != RESULT_DECL
2593 && !is_global_var (var))
2595 bool okay_to_mark = true;
2597 /* Since VAR is now a regular GIMPLE register, we will need
2598 to rename VAR into SSA afterwards. */
2599 mark_sym_for_renaming (var);
2601 /* If VAR can have sub-variables, and any of its
2602 sub-variables has its address taken, then we cannot
2603 remove the addressable flag from VAR. */
2604 if (var_can_have_subvars (var)
2605 && (svars = get_subvars_for_var (var)))
2607 subvar_t sv;
2609 for (sv = svars; sv; sv = sv->next)
2611 if (bitmap_bit_p (gimple_addressable_vars (cfun),
2612 DECL_UID (sv->var)))
2613 okay_to_mark = false;
2614 mark_sym_for_renaming (sv->var);
2618 /* The address of VAR is not needed, remove the
2619 addressable bit, so that it can be optimized as a
2620 regular variable. */
2621 if (okay_to_mark)
2623 /* The memory partition holding VAR will no longer
2624 contain VAR, and statements referencing it will need
2625 to be updated. */
2626 if (memory_partition (var))
2627 mark_sym_for_renaming (memory_partition (var));
2629 mark_non_addressable (var);
2634 /* Global variables and addressable locals may be aliased. Create an
2635 entry in ADDRESSABLE_VARS for VAR. */
2636 if (may_be_aliased (var))
2638 if (!var_can_have_subvars (var)
2639 || get_subvars_for_var (var) == NULL)
2640 create_alias_map_for (var, ai);
2642 mark_sym_for_renaming (var);
2645 /* Add pointer variables that have been dereferenced to the POINTERS
2646 array and create a symbol memory tag for them. */
2647 if (POINTER_TYPE_P (TREE_TYPE (var)))
2649 if ((pointer_set_contains (ai->dereferenced_ptrs_store, var)
2650 || pointer_set_contains (ai->dereferenced_ptrs_load, var)))
2652 tree tag, old_tag;
2653 var_ann_t t_ann;
2655 /* If pointer VAR still doesn't have a memory tag
2656 associated with it, create it now or re-use an
2657 existing one. */
2658 tag = get_smt_for (var, ai);
2659 t_ann = var_ann (tag);
2661 /* The symbol tag will need to be renamed into SSA
2662 afterwards. Note that we cannot do this inside
2663 get_smt_for because aliasing may run multiple times
2664 and we only create symbol tags the first time. */
2665 mark_sym_for_renaming (tag);
2667 /* Similarly, if pointer VAR used to have another type
2668 tag, we will need to process it in the renamer to
2669 remove the stale virtual operands. */
2670 old_tag = symbol_mem_tag (var);
2671 if (old_tag)
2672 mark_sym_for_renaming (old_tag);
2674 /* Associate the tag with pointer VAR. */
2675 set_symbol_mem_tag (var, tag);
2677 /* If pointer VAR has been used in a store operation,
2678 then its memory tag must be marked as written-to. */
2679 if (pointer_set_contains (ai->dereferenced_ptrs_store, var))
2680 pointer_set_insert (ai->written_vars, tag);
2682 else
2684 /* The pointer has not been dereferenced. If it had a
2685 symbol memory tag, remove it and mark the old tag for
2686 renaming to remove it out of the IL. */
2687 tree tag = symbol_mem_tag (var);
2688 if (tag)
2690 mark_sym_for_renaming (tag);
2691 set_symbol_mem_tag (var, NULL_TREE);
2697 VEC_free (tree, heap, varvec);
2701 /* Determine whether to use .GLOBAL_VAR to model call clobbering
2702 semantics. If the function makes no references to global
2703 variables and contains at least one call to a non-pure function,
2704 then we need to mark the side-effects of the call using .GLOBAL_VAR
2705 to represent all possible global memory referenced by the callee. */
2707 static void
2708 maybe_create_global_var (void)
2710 /* No need to create it, if we have one already. */
2711 if (gimple_global_var (cfun) == NULL_TREE)
2713 struct mem_ref_stats_d *stats = gimple_mem_ref_stats (cfun);
2715 /* Create .GLOBAL_VAR if there are no call-clobbered
2716 variables and the program contains a mixture of pure/const
2717 and regular function calls. This is to avoid the problem
2718 described in PR 20115:
2720 int X;
2721 int func_pure (void) { return X; }
2722 int func_non_pure (int a) { X += a; }
2723 int foo ()
2725 int a = func_pure ();
2726 func_non_pure (a);
2727 a = func_pure ();
2728 return a;
2731 Since foo() has no call-clobbered variables, there is
2732 no relationship between the calls to func_pure and
2733 func_non_pure. Since func_pure has no side-effects, value
2734 numbering optimizations elide the second call to func_pure.
2735 So, if we have some pure/const and some regular calls in the
2736 program we create .GLOBAL_VAR to avoid missing these
2737 relations. */
2738 if (bitmap_count_bits (gimple_call_clobbered_vars (cfun)) == 0
2739 && stats->num_call_sites > 0
2740 && stats->num_pure_const_call_sites > 0
2741 && stats->num_call_sites > stats->num_pure_const_call_sites)
2742 create_global_var ();
2747 /* Return TRUE if pointer PTR may point to variable VAR.
2749 MEM_ALIAS_SET is the alias set for the memory location pointed-to by PTR
2750 This is needed because when checking for type conflicts we are
2751 interested in the alias set of the memory location pointed-to by
2752 PTR. The alias set of PTR itself is irrelevant.
2754 VAR_ALIAS_SET is the alias set for VAR. */
2756 static bool
2757 may_alias_p (tree ptr, alias_set_type mem_alias_set,
2758 tree var, alias_set_type var_alias_set,
2759 bool alias_set_only)
2761 tree mem;
2763 alias_stats.alias_queries++;
2764 alias_stats.simple_queries++;
2766 /* By convention, a variable cannot alias itself. */
2767 mem = symbol_mem_tag (ptr);
2768 if (mem == var)
2770 alias_stats.alias_noalias++;
2771 alias_stats.simple_resolved++;
2772 return false;
2775 /* If -fargument-noalias-global is > 2, pointer arguments may
2776 not point to anything else. */
2777 if (flag_argument_noalias > 2 && TREE_CODE (ptr) == PARM_DECL)
2779 alias_stats.alias_noalias++;
2780 alias_stats.simple_resolved++;
2781 return false;
2784 /* If -fargument-noalias-global is > 1, pointer arguments may
2785 not point to global variables. */
2786 if (flag_argument_noalias > 1 && is_global_var (var)
2787 && TREE_CODE (ptr) == PARM_DECL)
2789 alias_stats.alias_noalias++;
2790 alias_stats.simple_resolved++;
2791 return false;
2794 /* If either MEM or VAR is a read-only global and the other one
2795 isn't, then PTR cannot point to VAR. */
2796 if ((unmodifiable_var_p (mem) && !unmodifiable_var_p (var))
2797 || (unmodifiable_var_p (var) && !unmodifiable_var_p (mem)))
2799 alias_stats.alias_noalias++;
2800 alias_stats.simple_resolved++;
2801 return false;
2804 gcc_assert (TREE_CODE (mem) == SYMBOL_MEMORY_TAG);
2806 if (!DECL_NO_TBAA_P (ptr))
2808 alias_stats.tbaa_queries++;
2810 /* If the alias sets don't conflict then MEM cannot alias VAR. */
2811 if (!alias_sets_conflict_p (mem_alias_set, var_alias_set))
2813 alias_stats.alias_noalias++;
2814 alias_stats.tbaa_resolved++;
2815 return false;
2818 /* If VAR is a record or union type, PTR cannot point into VAR
2819 unless there is some explicit address operation in the
2820 program that can reference a field of the type pointed-to by
2821 PTR. This also assumes that the types of both VAR and PTR
2822 are contained within the compilation unit, and that there is
2823 no fancy addressing arithmetic associated with any of the
2824 types involved. */
2825 if (mem_alias_set != 0 && var_alias_set != 0)
2827 tree ptr_type = TREE_TYPE (ptr);
2828 tree var_type = TREE_TYPE (var);
2830 /* The star count is -1 if the type at the end of the
2831 pointer_to chain is not a record or union type. */
2832 if ((!alias_set_only) &&
2833 ipa_type_escape_star_count_of_interesting_type (var_type) >= 0)
2835 int ptr_star_count = 0;
2837 /* ipa_type_escape_star_count_of_interesting_type is a
2838 little too restrictive for the pointer type, need to
2839 allow pointers to primitive types as long as those
2840 types cannot be pointers to everything. */
2841 while (POINTER_TYPE_P (ptr_type))
2843 /* Strip the *s off. */
2844 ptr_type = TREE_TYPE (ptr_type);
2845 ptr_star_count++;
2848 /* There does not appear to be a better test to see if
2849 the pointer type was one of the pointer to everything
2850 types. */
2851 if (ptr_star_count > 0)
2853 alias_stats.structnoaddress_queries++;
2854 if (ipa_type_escape_field_does_not_clobber_p (var_type,
2855 TREE_TYPE (ptr)))
2857 alias_stats.structnoaddress_resolved++;
2858 alias_stats.alias_noalias++;
2859 return false;
2862 else if (ptr_star_count == 0)
2864 /* If PTR_TYPE was not really a pointer to type, it cannot
2865 alias. */
2866 alias_stats.structnoaddress_queries++;
2867 alias_stats.structnoaddress_resolved++;
2868 alias_stats.alias_noalias++;
2869 return false;
2875 alias_stats.alias_mayalias++;
2876 return true;
2880 /* Add ALIAS to the set of variables that may alias VAR. */
2882 static void
2883 add_may_alias (tree var, tree alias)
2885 /* Don't allow self-referential aliases. */
2886 gcc_assert (var != alias);
2888 /* ALIAS must be addressable if it's being added to an alias set. */
2889 #if 1
2890 TREE_ADDRESSABLE (alias) = 1;
2891 #else
2892 gcc_assert (may_be_aliased (alias));
2893 #endif
2895 /* VAR must be a symbol or a name tag. */
2896 gcc_assert (TREE_CODE (var) == SYMBOL_MEMORY_TAG
2897 || TREE_CODE (var) == NAME_MEMORY_TAG);
2899 if (MTAG_ALIASES (var) == NULL)
2900 MTAG_ALIASES (var) = BITMAP_ALLOC (&alias_bitmap_obstack);
2902 bitmap_set_bit (MTAG_ALIASES (var), DECL_UID (alias));
2906 /* Mark pointer PTR as pointing to an arbitrary memory location. */
2908 static void
2909 set_pt_anything (tree ptr)
2911 struct ptr_info_def *pi = get_ptr_info (ptr);
2913 pi->pt_anything = 1;
2914 pi->pt_vars = NULL;
2916 /* The pointer used to have a name tag, but we now found it pointing
2917 to an arbitrary location. The name tag needs to be renamed and
2918 disassociated from PTR. */
2919 if (pi->name_mem_tag)
2921 mark_sym_for_renaming (pi->name_mem_tag);
2922 pi->name_mem_tag = NULL_TREE;
2927 /* Return true if STMT is an "escape" site from the current function. Escape
2928 sites those statements which might expose the address of a variable
2929 outside the current function. STMT is an escape site iff:
2931 1- STMT is a function call, or
2932 2- STMT is an __asm__ expression, or
2933 3- STMT is an assignment to a non-local variable, or
2934 4- STMT is a return statement.
2936 Return the type of escape site found, if we found one, or NO_ESCAPE
2937 if none. */
2939 enum escape_type
2940 is_escape_site (tree stmt)
2942 tree call = get_call_expr_in (stmt);
2943 if (call != NULL_TREE)
2945 if (!TREE_SIDE_EFFECTS (call))
2946 return ESCAPE_TO_PURE_CONST;
2948 return ESCAPE_TO_CALL;
2950 else if (TREE_CODE (stmt) == ASM_EXPR)
2951 return ESCAPE_TO_ASM;
2952 else if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
2954 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
2956 /* Get to the base of _REF nodes. */
2957 if (TREE_CODE (lhs) != SSA_NAME)
2958 lhs = get_base_address (lhs);
2960 /* If we couldn't recognize the LHS of the assignment, assume that it
2961 is a non-local store. */
2962 if (lhs == NULL_TREE)
2963 return ESCAPE_UNKNOWN;
2965 if (TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == NOP_EXPR
2966 || TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == CONVERT_EXPR
2967 || TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == VIEW_CONVERT_EXPR)
2969 tree from
2970 = TREE_TYPE (TREE_OPERAND (GIMPLE_STMT_OPERAND (stmt, 1), 0));
2971 tree to = TREE_TYPE (GIMPLE_STMT_OPERAND (stmt, 1));
2973 /* If the RHS is a conversion between a pointer and an integer, the
2974 pointer escapes since we can't track the integer. */
2975 if (POINTER_TYPE_P (from) && !POINTER_TYPE_P (to))
2976 return ESCAPE_BAD_CAST;
2978 /* Same if the RHS is a conversion between a regular pointer and a
2979 ref-all pointer since we can't track the SMT of the former. */
2980 if (POINTER_TYPE_P (from) && !TYPE_REF_CAN_ALIAS_ALL (from)
2981 && POINTER_TYPE_P (to) && TYPE_REF_CAN_ALIAS_ALL (to))
2982 return ESCAPE_BAD_CAST;
2985 /* If the LHS is an SSA name, it can't possibly represent a non-local
2986 memory store. */
2987 if (TREE_CODE (lhs) == SSA_NAME)
2988 return NO_ESCAPE;
2990 /* FIXME: LHS is not an SSA_NAME. Even if it's an assignment to a
2991 local variables we cannot be sure if it will escape, because we
2992 don't have information about objects not in SSA form. Need to
2993 implement something along the lines of
2995 J.-D. Choi, M. Gupta, M. J. Serrano, V. C. Sreedhar, and S. P.
2996 Midkiff, ``Escape analysis for java,'' in Proceedings of the
2997 Conference on Object-Oriented Programming Systems, Languages, and
2998 Applications (OOPSLA), pp. 1-19, 1999. */
2999 return ESCAPE_STORED_IN_GLOBAL;
3001 else if (TREE_CODE (stmt) == RETURN_EXPR)
3002 return ESCAPE_TO_RETURN;
3004 return NO_ESCAPE;
3007 /* Create a new memory tag of type TYPE.
3008 Does NOT push it into the current binding. */
3010 tree
3011 create_tag_raw (enum tree_code code, tree type, const char *prefix)
3013 tree tmp_var;
3015 tmp_var = build_decl (code, create_tmp_var_name (prefix), type);
3017 /* Make the variable writable. */
3018 TREE_READONLY (tmp_var) = 0;
3020 /* It doesn't start out global. */
3021 MTAG_GLOBAL (tmp_var) = 0;
3022 TREE_STATIC (tmp_var) = 0;
3023 TREE_USED (tmp_var) = 1;
3025 return tmp_var;
3028 /* Create a new memory tag of type TYPE. If IS_TYPE_TAG is true, the tag
3029 is considered to represent all the pointers whose pointed-to types are
3030 in the same alias set class. Otherwise, the tag represents a single
3031 SSA_NAME pointer variable. */
3033 static tree
3034 create_memory_tag (tree type, bool is_type_tag)
3036 tree tag = create_tag_raw (is_type_tag ? SYMBOL_MEMORY_TAG : NAME_MEMORY_TAG,
3037 type, (is_type_tag) ? "SMT" : "NMT");
3039 /* By default, memory tags are local variables. Alias analysis will
3040 determine whether they should be considered globals. */
3041 DECL_CONTEXT (tag) = current_function_decl;
3043 /* Memory tags are by definition addressable. */
3044 TREE_ADDRESSABLE (tag) = 1;
3046 set_symbol_mem_tag (tag, NULL_TREE);
3048 /* Add the tag to the symbol table. */
3049 add_referenced_var (tag);
3051 return tag;
3055 /* Create a name memory tag to represent a specific SSA_NAME pointer P_i.
3056 This is used if P_i has been found to point to a specific set of
3057 variables or to a non-aliased memory location like the address returned
3058 by malloc functions. */
3060 static tree
3061 get_nmt_for (tree ptr)
3063 struct ptr_info_def *pi = get_ptr_info (ptr);
3064 tree tag = pi->name_mem_tag;
3066 if (tag == NULL_TREE)
3067 tag = create_memory_tag (TREE_TYPE (TREE_TYPE (ptr)), false);
3068 return tag;
3072 /* Return the symbol memory tag associated to pointer PTR. A memory
3073 tag is an artificial variable that represents the memory location
3074 pointed-to by PTR. It is used to model the effects of pointer
3075 de-references on addressable variables.
3077 AI points to the data gathered during alias analysis. This
3078 function populates the array AI->POINTERS. */
3080 static tree
3081 get_smt_for (tree ptr, struct alias_info *ai)
3083 size_t i;
3084 tree tag;
3085 tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
3086 alias_set_type tag_set = get_alias_set (tag_type);
3088 /* We use a unique memory tag for all the ref-all pointers. */
3089 if (PTR_IS_REF_ALL (ptr))
3091 if (!ai->ref_all_symbol_mem_tag)
3092 ai->ref_all_symbol_mem_tag = create_memory_tag (void_type_node, true);
3093 return ai->ref_all_symbol_mem_tag;
3096 /* To avoid creating unnecessary memory tags, only create one memory tag
3097 per alias set class. Note that it may be tempting to group
3098 memory tags based on conflicting alias sets instead of
3099 equivalence. That would be wrong because alias sets are not
3100 necessarily transitive (as demonstrated by the libstdc++ test
3101 23_containers/vector/cons/4.cc). Given three alias sets A, B, C
3102 such that conflicts (A, B) == true and conflicts (A, C) == true,
3103 it does not necessarily follow that conflicts (B, C) == true. */
3104 for (i = 0, tag = NULL_TREE; i < ai->num_pointers; i++)
3106 struct alias_map_d *curr = ai->pointers[i];
3107 tree curr_tag = symbol_mem_tag (curr->var);
3108 if (tag_set == curr->set)
3110 tag = curr_tag;
3111 break;
3115 /* If VAR cannot alias with any of the existing memory tags, create a new
3116 tag for PTR and add it to the POINTERS array. */
3117 if (tag == NULL_TREE)
3119 struct alias_map_d *alias_map;
3121 /* If PTR did not have a symbol tag already, create a new SMT.*
3122 artificial variable representing the memory location
3123 pointed-to by PTR. */
3124 tag = symbol_mem_tag (ptr);
3125 if (tag == NULL_TREE)
3126 tag = create_memory_tag (tag_type, true);
3128 /* Add PTR to the POINTERS array. Note that we are not interested in
3129 PTR's alias set. Instead, we cache the alias set for the memory that
3130 PTR points to. */
3131 alias_map = XCNEW (struct alias_map_d);
3132 alias_map->var = ptr;
3133 alias_map->set = tag_set;
3134 ai->pointers[ai->num_pointers++] = alias_map;
3137 /* If the pointed-to type is volatile, so is the tag. */
3138 TREE_THIS_VOLATILE (tag) |= TREE_THIS_VOLATILE (tag_type);
3140 /* Make sure that the symbol tag has the same alias set as the
3141 pointed-to type. */
3142 gcc_assert (tag_set == get_alias_set (tag));
3144 return tag;
3148 /* Create GLOBAL_VAR, an artificial global variable to act as a
3149 representative of all the variables that may be clobbered by function
3150 calls. */
3152 static void
3153 create_global_var (void)
3155 tree global_var = build_decl (VAR_DECL, get_identifier (".GLOBAL_VAR"),
3156 void_type_node);
3157 DECL_ARTIFICIAL (global_var) = 1;
3158 TREE_READONLY (global_var) = 0;
3159 DECL_EXTERNAL (global_var) = 1;
3160 TREE_STATIC (global_var) = 1;
3161 TREE_USED (global_var) = 1;
3162 DECL_CONTEXT (global_var) = NULL_TREE;
3163 TREE_THIS_VOLATILE (global_var) = 0;
3164 TREE_ADDRESSABLE (global_var) = 0;
3166 create_var_ann (global_var);
3167 mark_call_clobbered (global_var, ESCAPE_UNKNOWN);
3168 add_referenced_var (global_var);
3169 mark_sym_for_renaming (global_var);
3170 cfun->gimple_df->global_var = global_var;
3174 /* Dump alias statistics on FILE. */
3176 static void
3177 dump_alias_stats (FILE *file)
3179 const char *funcname
3180 = lang_hooks.decl_printable_name (current_function_decl, 2);
3181 fprintf (file, "\nAlias statistics for %s\n\n", funcname);
3182 fprintf (file, "Total alias queries:\t%u\n", alias_stats.alias_queries);
3183 fprintf (file, "Total alias mayalias results:\t%u\n",
3184 alias_stats.alias_mayalias);
3185 fprintf (file, "Total alias noalias results:\t%u\n",
3186 alias_stats.alias_noalias);
3187 fprintf (file, "Total simple queries:\t%u\n",
3188 alias_stats.simple_queries);
3189 fprintf (file, "Total simple resolved:\t%u\n",
3190 alias_stats.simple_resolved);
3191 fprintf (file, "Total TBAA queries:\t%u\n",
3192 alias_stats.tbaa_queries);
3193 fprintf (file, "Total TBAA resolved:\t%u\n",
3194 alias_stats.tbaa_resolved);
3195 fprintf (file, "Total non-addressable structure type queries:\t%u\n",
3196 alias_stats.structnoaddress_queries);
3197 fprintf (file, "Total non-addressable structure type resolved:\t%u\n",
3198 alias_stats.structnoaddress_resolved);
3202 /* Dump alias information on FILE. */
3204 void
3205 dump_alias_info (FILE *file)
3207 size_t i;
3208 const char *funcname
3209 = lang_hooks.decl_printable_name (current_function_decl, 2);
3210 referenced_var_iterator rvi;
3211 tree var;
3213 fprintf (file, "\nAlias information for %s\n\n", funcname);
3215 dump_memory_partitions (file);
3217 fprintf (file, "\nFlow-insensitive alias information for %s\n\n", funcname);
3219 fprintf (file, "Aliased symbols\n\n");
3221 FOR_EACH_REFERENCED_VAR (var, rvi)
3223 if (may_be_aliased (var))
3224 dump_variable (file, var);
3227 fprintf (file, "\nDereferenced pointers\n\n");
3229 FOR_EACH_REFERENCED_VAR (var, rvi)
3230 if (symbol_mem_tag (var))
3231 dump_variable (file, var);
3233 fprintf (file, "\nSymbol memory tags\n\n");
3235 FOR_EACH_REFERENCED_VAR (var, rvi)
3237 if (TREE_CODE (var) == SYMBOL_MEMORY_TAG)
3238 dump_variable (file, var);
3241 fprintf (file, "\n\nFlow-sensitive alias information for %s\n\n", funcname);
3243 fprintf (file, "SSA_NAME pointers\n\n");
3244 for (i = 1; i < num_ssa_names; i++)
3246 tree ptr = ssa_name (i);
3247 struct ptr_info_def *pi;
3249 if (ptr == NULL_TREE)
3250 continue;
3252 pi = SSA_NAME_PTR_INFO (ptr);
3253 if (!SSA_NAME_IN_FREE_LIST (ptr)
3254 && pi
3255 && pi->name_mem_tag)
3256 dump_points_to_info_for (file, ptr);
3259 fprintf (file, "\nName memory tags\n\n");
3261 FOR_EACH_REFERENCED_VAR (var, rvi)
3263 if (TREE_CODE (var) == NAME_MEMORY_TAG)
3264 dump_variable (file, var);
3267 fprintf (file, "\n");
3271 /* Dump alias information on stderr. */
3273 void
3274 debug_alias_info (void)
3276 dump_alias_info (stderr);
3280 /* Return the alias information associated with pointer T. It creates a
3281 new instance if none existed. */
3283 struct ptr_info_def *
3284 get_ptr_info (tree t)
3286 struct ptr_info_def *pi;
3288 gcc_assert (POINTER_TYPE_P (TREE_TYPE (t)));
3290 pi = SSA_NAME_PTR_INFO (t);
3291 if (pi == NULL)
3293 pi = GGC_CNEW (struct ptr_info_def);
3294 SSA_NAME_PTR_INFO (t) = pi;
3297 return pi;
3301 /* Dump points-to information for SSA_NAME PTR into FILE. */
3303 void
3304 dump_points_to_info_for (FILE *file, tree ptr)
3306 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
3308 print_generic_expr (file, ptr, dump_flags);
3310 if (pi)
3312 if (pi->name_mem_tag)
3314 fprintf (file, ", name memory tag: ");
3315 print_generic_expr (file, pi->name_mem_tag, dump_flags);
3318 if (pi->is_dereferenced)
3319 fprintf (file, ", is dereferenced");
3321 if (pi->value_escapes_p)
3322 fprintf (file, ", its value escapes");
3324 if (pi->pt_anything)
3325 fprintf (file, ", points-to anything");
3327 if (pi->pt_null)
3328 fprintf (file, ", points-to NULL");
3330 if (pi->pt_vars)
3332 fprintf (file, ", points-to vars: ");
3333 dump_decl_set (file, pi->pt_vars);
3337 fprintf (file, "\n");
3341 /* Dump points-to information for VAR into stderr. */
3343 void
3344 debug_points_to_info_for (tree var)
3346 dump_points_to_info_for (stderr, var);
3350 /* Dump points-to information into FILE. NOTE: This function is slow, as
3351 it needs to traverse the whole CFG looking for pointer SSA_NAMEs. */
3353 void
3354 dump_points_to_info (FILE *file)
3356 basic_block bb;
3357 block_stmt_iterator si;
3358 ssa_op_iter iter;
3359 const char *fname =
3360 lang_hooks.decl_printable_name (current_function_decl, 2);
3361 referenced_var_iterator rvi;
3362 tree var;
3364 fprintf (file, "\n\nPointed-to sets for pointers in %s\n\n", fname);
3366 /* First dump points-to information for the default definitions of
3367 pointer variables. This is necessary because default definitions are
3368 not part of the code. */
3369 FOR_EACH_REFERENCED_VAR (var, rvi)
3371 if (POINTER_TYPE_P (TREE_TYPE (var)))
3373 tree def = gimple_default_def (cfun, var);
3374 if (def)
3375 dump_points_to_info_for (file, def);
3379 /* Dump points-to information for every pointer defined in the program. */
3380 FOR_EACH_BB (bb)
3382 tree phi;
3384 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
3386 tree ptr = PHI_RESULT (phi);
3387 if (POINTER_TYPE_P (TREE_TYPE (ptr)))
3388 dump_points_to_info_for (file, ptr);
3391 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
3393 tree stmt = bsi_stmt (si);
3394 tree def;
3395 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_DEF)
3396 if (TREE_CODE (def) == SSA_NAME
3397 && POINTER_TYPE_P (TREE_TYPE (def)))
3398 dump_points_to_info_for (file, def);
3402 fprintf (file, "\n");
3406 /* Dump points-to info pointed to by PTO into STDERR. */
3408 void
3409 debug_points_to_info (void)
3411 dump_points_to_info (stderr);
3414 /* Dump to FILE the list of variables that may be aliasing VAR. */
3416 void
3417 dump_may_aliases_for (FILE *file, tree var)
3419 bitmap aliases;
3421 aliases = MTAG_ALIASES (var);
3422 if (aliases)
3424 bitmap_iterator bi;
3425 unsigned int i;
3426 tree al;
3428 fprintf (file, "{ ");
3429 EXECUTE_IF_SET_IN_BITMAP (aliases, 0, i, bi)
3431 al = referenced_var (i);
3432 print_generic_expr (file, al, dump_flags);
3433 fprintf (file, " ");
3435 fprintf (file, "}");
3440 /* Dump to stderr the list of variables that may be aliasing VAR. */
3442 void
3443 debug_may_aliases_for (tree var)
3445 dump_may_aliases_for (stderr, var);
3449 /* Return true if VAR may be aliased. */
3451 bool
3452 may_be_aliased (tree var)
3454 /* Obviously. */
3455 if (TREE_ADDRESSABLE (var))
3456 return true;
3458 /* Globally visible variables can have their addresses taken by other
3459 translation units. */
3460 if (MTAG_P (var)
3461 && (MTAG_GLOBAL (var) || TREE_PUBLIC (var)))
3462 return true;
3463 else if (!MTAG_P (var)
3464 && (DECL_EXTERNAL (var) || TREE_PUBLIC (var)))
3465 return true;
3467 /* Automatic variables can't have their addresses escape any other
3468 way. This must be after the check for global variables, as
3469 extern declarations do not have TREE_STATIC set. */
3470 if (!TREE_STATIC (var))
3471 return false;
3473 /* If we're in unit-at-a-time mode, then we must have seen all
3474 occurrences of address-of operators, and so we can trust
3475 TREE_ADDRESSABLE. Otherwise we can only be sure the variable
3476 isn't addressable if it's local to the current function. */
3477 if (flag_unit_at_a_time)
3478 return false;
3480 if (decl_function_context (var) == current_function_decl)
3481 return false;
3483 return true;
3486 /* The following is based on code in add_stmt_operand to ensure that the
3487 same defs/uses/vdefs/vuses will be found after replacing a reference
3488 to var (or ARRAY_REF to var) with an INDIRECT_REF to ptr whose value
3489 is the address of var. Return a memtag for the ptr, after adding the
3490 proper may_aliases to it (which are the aliases of var, if it has any,
3491 or var itself). */
3493 static tree
3494 add_may_alias_for_new_tag (tree tag, tree var)
3496 bitmap aliases = NULL;
3498 if (MTAG_P (var))
3499 aliases = may_aliases (var);
3501 /* Case 1: |aliases| == 1 */
3502 if (aliases && bitmap_count_bits (aliases) == 1)
3504 tree ali = referenced_var (bitmap_first_set_bit (aliases));
3505 if (TREE_CODE (ali) == SYMBOL_MEMORY_TAG)
3506 return ali;
3509 /* Case 2: |aliases| == 0 */
3510 if (aliases == NULL)
3511 add_may_alias (tag, var);
3512 else
3514 /* Case 3: |aliases| > 1 */
3515 union_alias_set_into (tag, aliases);
3517 return tag;
3520 /* Create a new symbol tag for PTR. Construct the may-alias list of this type
3521 tag so that it has the aliasing of VAR, or of the relevant subvars of VAR
3522 according to the location accessed by EXPR.
3524 Note, the set of aliases represented by the new symbol tag are not marked
3525 for renaming. */
3527 void
3528 new_type_alias (tree ptr, tree var, tree expr)
3530 tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
3531 tree tag;
3532 subvar_t svars;
3533 tree ali = NULL_TREE;
3534 HOST_WIDE_INT offset, size, maxsize;
3535 tree ref;
3536 VEC (tree, heap) *overlaps = NULL;
3537 subvar_t sv;
3538 unsigned int len;
3540 gcc_assert (symbol_mem_tag (ptr) == NULL_TREE);
3541 gcc_assert (!MTAG_P (var));
3543 ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
3544 gcc_assert (ref);
3546 tag = create_memory_tag (tag_type, true);
3547 set_symbol_mem_tag (ptr, tag);
3549 /* Add VAR to the may-alias set of PTR's new symbol tag. If VAR has
3550 subvars, add the subvars to the tag instead of the actual var. */
3551 if (var_can_have_subvars (ref)
3552 && (svars = get_subvars_for_var (ref)))
3554 for (sv = svars; sv; sv = sv->next)
3556 bool exact;
3558 if (overlap_subvar (offset, maxsize, sv->var, &exact))
3559 VEC_safe_push (tree, heap, overlaps, sv->var);
3561 gcc_assert (overlaps != NULL);
3563 else if (var_can_have_subvars (var)
3564 && (svars = get_subvars_for_var (var)))
3566 /* If the REF is not a direct access to VAR (e.g., it is a dereference
3567 of a pointer), we should scan the virtual operands of REF the same
3568 way as tree-ssa-operands do. At the moment, this is somewhat
3569 difficult, so we just give up and add all the subvars of VAR.
3570 On mem-ssa branch, the scanning for virtual operands have been
3571 split from the rest of tree-ssa-operands, so it should be much
3572 easier to fix this problem correctly once mem-ssa is merged. */
3573 for (sv = svars; sv; sv = sv->next)
3574 VEC_safe_push (tree, heap, overlaps, sv->var);
3576 gcc_assert (overlaps != NULL);
3578 else
3579 ali = add_may_alias_for_new_tag (tag, var);
3581 len = VEC_length (tree, overlaps);
3582 if (len > 0)
3584 if (dump_file && (dump_flags & TDF_DETAILS))
3585 fprintf (dump_file, "\nnumber of overlapping subvars = %u\n", len);
3587 if (len == 1)
3588 ali = add_may_alias_for_new_tag (tag, VEC_index (tree, overlaps, 0));
3589 else if (len > 1)
3591 unsigned int k;
3592 tree sv_var;
3594 for (k = 0; VEC_iterate (tree, overlaps, k, sv_var); k++)
3596 ali = add_may_alias_for_new_tag (tag, sv_var);
3598 if (ali != tag)
3600 /* Can happen only if 'Case 1' of add_may_alias_for_new_tag
3601 took place. Since more than one svar was found, we add
3602 'ali' as one of the may_aliases of the new tag. */
3603 add_may_alias (tag, ali);
3604 ali = tag;
3608 VEC_free (tree, heap, overlaps);
3611 set_symbol_mem_tag (ptr, ali);
3612 TREE_READONLY (tag) = TREE_READONLY (var);
3613 MTAG_GLOBAL (tag) = is_global_var (var);
3616 /* This represents the used range of a variable. */
3618 typedef struct used_part
3620 HOST_WIDE_INT minused;
3621 HOST_WIDE_INT maxused;
3622 /* True if we have an explicit use/def of some portion of this variable,
3623 even if it is all of it. i.e. a.b = 5 or temp = a.b. */
3624 bool explicit_uses;
3625 /* True if we have an implicit use/def of some portion of this
3626 variable. Implicit uses occur when we can't tell what part we
3627 are referencing, and have to make conservative assumptions. */
3628 bool implicit_uses;
3629 /* True if the structure is only written to or taken its address. */
3630 bool write_only;
3631 } *used_part_t;
3633 /* An array of used_part structures, indexed by variable uid. */
3635 static htab_t used_portions;
3637 struct used_part_map
3639 unsigned int uid;
3640 used_part_t to;
3643 /* Return true if the uid in the two used part maps are equal. */
3645 static int
3646 used_part_map_eq (const void *va, const void *vb)
3648 const struct used_part_map *a = (const struct used_part_map *) va;
3649 const struct used_part_map *b = (const struct used_part_map *) vb;
3650 return (a->uid == b->uid);
3653 /* Hash a from uid in a used_part_map. */
3655 static unsigned int
3656 used_part_map_hash (const void *item)
3658 return ((const struct used_part_map *)item)->uid;
3661 /* Free a used part map element. */
3663 static void
3664 free_used_part_map (void *item)
3666 free (((struct used_part_map *)item)->to);
3667 free (item);
3670 /* Lookup a used_part structure for a UID. */
3672 static used_part_t
3673 up_lookup (unsigned int uid)
3675 struct used_part_map *h, in;
3676 in.uid = uid;
3677 h = (struct used_part_map *) htab_find_with_hash (used_portions, &in, uid);
3678 if (!h)
3679 return NULL;
3680 return h->to;
3683 /* Insert the pair UID, TO into the used part hashtable. */
3685 static void
3686 up_insert (unsigned int uid, used_part_t to)
3688 struct used_part_map *h;
3689 void **loc;
3691 h = XNEW (struct used_part_map);
3692 h->uid = uid;
3693 h->to = to;
3694 loc = htab_find_slot_with_hash (used_portions, h,
3695 uid, INSERT);
3696 if (*loc != NULL)
3697 free (*loc);
3698 *(struct used_part_map **) loc = h;
3702 /* Given a variable uid, UID, get or create the entry in the used portions
3703 table for the variable. */
3705 static used_part_t
3706 get_or_create_used_part_for (size_t uid)
3708 used_part_t up;
3709 if ((up = up_lookup (uid)) == NULL)
3711 up = XCNEW (struct used_part);
3712 up->minused = INT_MAX;
3713 up->maxused = 0;
3714 up->explicit_uses = false;
3715 up->implicit_uses = false;
3716 up->write_only = true;
3719 return up;
3723 /* Create and return a structure sub-variable for field type FIELD at
3724 offset OFFSET, with size SIZE, of variable VAR. If ALIAS_SET not
3725 -1 this field is non-addressable and we should use this alias set
3726 with this field. */
3728 static tree
3729 create_sft (tree var, tree field, unsigned HOST_WIDE_INT offset,
3730 unsigned HOST_WIDE_INT size, alias_set_type alias_set)
3732 tree subvar = create_tag_raw (STRUCT_FIELD_TAG, field, "SFT");
3734 /* We need to copy the various flags from VAR to SUBVAR, so that
3735 they are is_global_var iff the original variable was. */
3736 DECL_CONTEXT (subvar) = DECL_CONTEXT (var);
3737 MTAG_GLOBAL (subvar) = DECL_EXTERNAL (var);
3738 TREE_PUBLIC (subvar) = TREE_PUBLIC (var);
3739 TREE_STATIC (subvar) = TREE_STATIC (var);
3740 TREE_READONLY (subvar) = TREE_READONLY (var);
3741 TREE_ADDRESSABLE (subvar) = TREE_ADDRESSABLE (var);
3743 /* Add the new variable to REFERENCED_VARS. */
3744 set_symbol_mem_tag (subvar, NULL);
3745 add_referenced_var (subvar);
3746 SFT_PARENT_VAR (subvar) = var;
3747 SFT_OFFSET (subvar) = offset;
3748 SFT_SIZE (subvar) = size;
3749 SFT_ALIAS_SET (subvar) = alias_set;
3750 return subvar;
3754 /* Given an aggregate VAR, create the subvariables that represent its
3755 fields. */
3757 static void
3758 create_overlap_variables_for (tree var)
3760 VEC(fieldoff_s,heap) *fieldstack = NULL;
3761 used_part_t up;
3762 size_t uid = DECL_UID (var);
3764 up = up_lookup (uid);
3765 if (!up
3766 || up->write_only)
3767 return;
3769 push_fields_onto_fieldstack (TREE_TYPE (var), &fieldstack, 0, NULL,
3770 TREE_TYPE (var));
3771 if (VEC_length (fieldoff_s, fieldstack) != 0)
3773 subvar_t *subvars;
3774 fieldoff_s *fo;
3775 bool notokay = false;
3776 int fieldcount = 0;
3777 int i;
3778 HOST_WIDE_INT lastfooffset = -1;
3779 HOST_WIDE_INT lastfosize = -1;
3780 tree lastfotype = NULL_TREE;
3782 /* Not all fields have DECL_SIZE set, and those that don't, we don't
3783 know their size, and thus, can't handle.
3784 The same is true of fields with DECL_SIZE that is not an integer
3785 constant (such as variable sized fields).
3786 Fields with offsets which are not constant will have an offset < 0
3787 We *could* handle fields that are constant sized arrays, but
3788 currently don't. Doing so would require some extra changes to
3789 tree-ssa-operands.c. */
3791 for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
3793 if (!fo->size
3794 || TREE_CODE (fo->size) != INTEGER_CST
3795 || fo->offset < 0)
3797 notokay = true;
3798 break;
3800 fieldcount++;
3803 /* The current heuristic we use is as follows:
3804 If the variable has no used portions in this function, no
3805 structure vars are created for it.
3806 Otherwise,
3807 If the variable has less than SALIAS_MAX_IMPLICIT_FIELDS,
3808 we always create structure vars for them.
3809 If the variable has more than SALIAS_MAX_IMPLICIT_FIELDS, and
3810 some explicit uses, we create structure vars for them.
3811 If the variable has more than SALIAS_MAX_IMPLICIT_FIELDS, and
3812 no explicit uses, we do not create structure vars for them.
3815 if (fieldcount >= SALIAS_MAX_IMPLICIT_FIELDS
3816 && !up->explicit_uses)
3818 if (dump_file && (dump_flags & TDF_DETAILS))
3820 fprintf (dump_file, "Variable ");
3821 print_generic_expr (dump_file, var, 0);
3822 fprintf (dump_file, " has no explicit uses in this function, and is > SALIAS_MAX_IMPLICIT_FIELDS, so skipping\n");
3824 notokay = true;
3827 /* Bail out, if we can't create overlap variables. */
3828 if (notokay)
3830 VEC_free (fieldoff_s, heap, fieldstack);
3831 return;
3834 /* Otherwise, create the variables. */
3835 subvars = lookup_subvars_for_var (var);
3837 sort_fieldstack (fieldstack);
3839 for (i = VEC_length (fieldoff_s, fieldstack);
3840 VEC_iterate (fieldoff_s, fieldstack, --i, fo);)
3842 subvar_t sv;
3843 HOST_WIDE_INT fosize;
3844 tree currfotype;
3846 fosize = TREE_INT_CST_LOW (fo->size);
3847 currfotype = fo->type;
3849 /* If this field isn't in the used portion,
3850 or it has the exact same offset and size as the last
3851 field, skip it. Note that we always need the field at
3852 offset 0 so we can properly handle pointers to the
3853 structure. */
3855 if ((fo->offset != 0
3856 && ((fo->offset <= up->minused
3857 && fo->offset + fosize <= up->minused)
3858 || fo->offset >= up->maxused))
3859 || (fo->offset == lastfooffset
3860 && fosize == lastfosize
3861 && currfotype == lastfotype))
3862 continue;
3863 sv = GGC_NEW (struct subvar);
3864 sv->next = *subvars;
3865 sv->var =
3866 create_sft (var, fo->type, fo->offset, fosize, fo->alias_set);
3868 if (dump_file)
3870 fprintf (dump_file, "structure field tag %s created for var %s",
3871 get_name (sv->var), get_name (var));
3872 fprintf (dump_file, " offset " HOST_WIDE_INT_PRINT_DEC,
3873 SFT_OFFSET (sv->var));
3874 fprintf (dump_file, " size " HOST_WIDE_INT_PRINT_DEC,
3875 SFT_SIZE (sv->var));
3876 fprintf (dump_file, "\n");
3879 lastfotype = currfotype;
3880 lastfooffset = fo->offset;
3881 lastfosize = fosize;
3882 *subvars = sv;
3885 /* Once we have created subvars, the original is no longer call
3886 clobbered on its own. Its call clobbered status depends
3887 completely on the call clobbered status of the subvars.
3889 add_referenced_var in the above loop will take care of
3890 marking subvars of global variables as call clobbered for us
3891 to start, since they are global as well. */
3892 clear_call_clobbered (var);
3895 VEC_free (fieldoff_s, heap, fieldstack);
3899 /* Find the conservative answer to the question of what portions of what
3900 structures are used by this statement. We assume that if we have a
3901 component ref with a known size + offset, that we only need that part
3902 of the structure. For unknown cases, or cases where we do something
3903 to the whole structure, we assume we need to create fields for the
3904 entire structure. */
3906 static tree
3907 find_used_portions (tree *tp, int *walk_subtrees, void *lhs_p)
3909 switch (TREE_CODE (*tp))
3911 case GIMPLE_MODIFY_STMT:
3912 /* Recurse manually here to track whether the use is in the
3913 LHS of an assignment. */
3914 find_used_portions (&GIMPLE_STMT_OPERAND (*tp, 0), walk_subtrees, tp);
3915 return find_used_portions (&GIMPLE_STMT_OPERAND (*tp, 1),
3916 walk_subtrees, NULL);
3917 case REALPART_EXPR:
3918 case IMAGPART_EXPR:
3919 case COMPONENT_REF:
3920 case ARRAY_REF:
3922 HOST_WIDE_INT bitsize;
3923 HOST_WIDE_INT bitmaxsize;
3924 HOST_WIDE_INT bitpos;
3925 tree ref;
3926 ref = get_ref_base_and_extent (*tp, &bitpos, &bitsize, &bitmaxsize);
3927 if (DECL_P (ref)
3928 && var_can_have_subvars (ref)
3929 && bitmaxsize != -1)
3931 size_t uid = DECL_UID (ref);
3932 used_part_t up;
3934 up = get_or_create_used_part_for (uid);
3936 if (bitpos <= up->minused)
3937 up->minused = bitpos;
3938 if ((bitpos + bitmaxsize >= up->maxused))
3939 up->maxused = bitpos + bitmaxsize;
3941 if (bitsize == bitmaxsize)
3942 up->explicit_uses = true;
3943 else
3944 up->implicit_uses = true;
3945 if (!lhs_p)
3946 up->write_only = false;
3947 up_insert (uid, up);
3949 *walk_subtrees = 0;
3950 return NULL_TREE;
3953 break;
3954 /* This is here to make sure we mark the entire base variable as used
3955 when you take its address. Because our used portion analysis is
3956 simple, we aren't looking at casts or pointer arithmetic to see what
3957 happens when you take the address. */
3958 case ADDR_EXPR:
3960 tree var = get_base_address (TREE_OPERAND (*tp, 0));
3962 if (var
3963 && DECL_P (var)
3964 && DECL_SIZE (var)
3965 && var_can_have_subvars (var)
3966 && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
3968 used_part_t up;
3969 size_t uid = DECL_UID (var);
3971 up = get_or_create_used_part_for (uid);
3973 up->minused = 0;
3974 up->maxused = TREE_INT_CST_LOW (DECL_SIZE (var));
3975 up->implicit_uses = true;
3976 if (!lhs_p)
3977 up->write_only = false;
3979 up_insert (uid, up);
3980 *walk_subtrees = 0;
3981 return NULL_TREE;
3984 break;
3985 case CALL_EXPR:
3987 int i;
3988 int nargs = call_expr_nargs (*tp);
3989 for (i = 0; i < nargs; i++)
3991 tree *arg = &CALL_EXPR_ARG (*tp, i);
3992 if (TREE_CODE (*arg) == ADDR_EXPR)
3993 find_used_portions (arg, walk_subtrees, NULL);
3995 *walk_subtrees = 0;
3996 return NULL_TREE;
3998 case VAR_DECL:
3999 case PARM_DECL:
4000 case RESULT_DECL:
4002 tree var = *tp;
4003 if (DECL_SIZE (var)
4004 && var_can_have_subvars (var)
4005 && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
4007 used_part_t up;
4008 size_t uid = DECL_UID (var);
4010 up = get_or_create_used_part_for (uid);
4012 up->minused = 0;
4013 up->maxused = TREE_INT_CST_LOW (DECL_SIZE (var));
4014 up->implicit_uses = true;
4016 up_insert (uid, up);
4017 *walk_subtrees = 0;
4018 return NULL_TREE;
4021 break;
4023 default:
4024 break;
4027 return NULL_TREE;
4030 /* Create structure field variables for structures used in this function. */
4032 static unsigned int
4033 create_structure_vars (void)
4035 basic_block bb;
4036 safe_referenced_var_iterator rvi;
4037 VEC (tree, heap) *varvec = NULL;
4038 tree var;
4040 used_portions = htab_create (10, used_part_map_hash, used_part_map_eq,
4041 free_used_part_map);
4043 FOR_EACH_BB (bb)
4045 block_stmt_iterator bsi;
4046 tree phi;
4048 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
4050 use_operand_p use;
4051 ssa_op_iter iter;
4053 FOR_EACH_PHI_ARG (use, phi, iter, SSA_OP_USE)
4055 tree op = USE_FROM_PTR (use);
4056 walk_tree_without_duplicates (&op, find_used_portions,
4057 NULL);
4061 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4063 walk_tree_without_duplicates (bsi_stmt_ptr (bsi),
4064 find_used_portions,
4065 NULL);
4068 FOR_EACH_REFERENCED_VAR_SAFE (var, varvec, rvi)
4070 /* The C++ FE creates vars without DECL_SIZE set, for some reason. */
4071 if (var
4072 && DECL_SIZE (var)
4073 && var_can_have_subvars (var)
4074 && !MTAG_P (var)
4075 && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
4076 create_overlap_variables_for (var);
4078 htab_delete (used_portions);
4079 VEC_free (tree, heap, varvec);
4081 /* Update SSA operands of statements mentioning variables we split. */
4082 if (gimple_in_ssa_p (cfun))
4083 FOR_EACH_BB (bb)
4085 block_stmt_iterator bsi;
4086 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4088 tree stmt = bsi_stmt (bsi);
4089 bool update = false;
4090 unsigned int i;
4091 bitmap_iterator bi;
4093 if (STORED_SYMS (stmt))
4094 EXECUTE_IF_SET_IN_BITMAP (STORED_SYMS (stmt), 0, i, bi)
4096 tree sym = referenced_var_lookup (i);
4097 if (get_subvars_for_var (sym))
4099 update = true;
4100 break;
4104 if (LOADED_SYMS (stmt) && !update)
4105 EXECUTE_IF_SET_IN_BITMAP (LOADED_SYMS (stmt), 0, i, bi)
4107 tree sym = referenced_var_lookup (i);
4108 if (get_subvars_for_var (sym))
4110 update = true;
4111 break;
4115 if (stmt_ann (stmt)->addresses_taken && !update)
4116 EXECUTE_IF_SET_IN_BITMAP (stmt_ann (stmt)->addresses_taken,
4117 0, i, bi)
4119 tree sym = referenced_var_lookup (i);
4120 if (get_subvars_for_var (sym))
4122 update = true;
4123 break;
4127 if (update)
4128 update_stmt (stmt);
4132 return TODO_rebuild_alias;
4135 static bool
4136 gate_structure_vars (void)
4138 return flag_tree_salias != 0;
4141 struct tree_opt_pass pass_create_structure_vars =
4143 "salias", /* name */
4144 gate_structure_vars, /* gate */
4145 create_structure_vars, /* execute */
4146 NULL, /* sub */
4147 NULL, /* next */
4148 0, /* static_pass_number */
4149 0, /* tv_id */
4150 PROP_cfg, /* properties_required */
4151 0, /* properties_provided */
4152 0, /* properties_destroyed */
4153 0, /* todo_flags_start */
4154 TODO_dump_func, /* todo_flags_finish */
4155 0 /* letter */
4158 /* Reset the call_clobbered flags on our referenced vars. In
4159 theory, this only needs to be done for globals. */
4161 static unsigned int
4162 reset_cc_flags (void)
4164 tree var;
4165 referenced_var_iterator rvi;
4167 FOR_EACH_REFERENCED_VAR (var, rvi)
4168 var_ann (var)->call_clobbered = false;
4169 return 0;
4172 struct tree_opt_pass pass_reset_cc_flags =
4174 NULL, /* name */
4175 NULL, /* gate */
4176 reset_cc_flags, /* execute */
4177 NULL, /* sub */
4178 NULL, /* next */
4179 0, /* static_pass_number */
4180 0, /* tv_id */
4181 PROP_referenced_vars |PROP_cfg, /* properties_required */
4182 0, /* properties_provided */
4183 0, /* properties_destroyed */
4184 0, /* todo_flags_start */
4185 0, /* todo_flags_finish */
4186 0 /* letter */