sh-modes.def: comment pasto fix.
[official-gcc.git] / gcc / tree-ssa-alias.c
blob1607c71257a206e8c9ecc7c5874533a6923c9c6d
1 /* Alias analysis for trees.
2 Copyright (C) 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "timevar.h"
32 #include "expr.h"
33 #include "ggc.h"
34 #include "langhooks.h"
35 #include "flags.h"
36 #include "function.h"
37 #include "diagnostic.h"
38 #include "tree-dump.h"
39 #include "tree-gimple.h"
40 #include "tree-flow.h"
41 #include "tree-inline.h"
42 #include "tree-pass.h"
43 #include "tree-ssa-structalias.h"
44 #include "convert.h"
45 #include "params.h"
46 #include "ipa-type-escape.h"
47 #include "vec.h"
48 #include "bitmap.h"
49 #include "vecprim.h"
51 /* Obstack used to hold grouping bitmaps and other temporary bitmaps used by
52 aliasing */
53 static bitmap_obstack alias_obstack;
55 /* 'true' after aliases have been computed (see compute_may_aliases). */
56 bool aliases_computed_p;
58 /* Structure to map a variable to its alias set and keep track of the
59 virtual operands that will be needed to represent it. */
60 struct alias_map_d
62 /* Variable and its alias set. */
63 tree var;
64 HOST_WIDE_INT set;
66 /* Total number of virtual operands that will be needed to represent
67 all the aliases of VAR. */
68 long total_alias_vops;
70 /* Nonzero if the aliases for this memory tag have been grouped
71 already. Used in group_aliases. */
72 unsigned int grouped_p : 1;
74 /* Set of variables aliased with VAR. This is the exact same
75 information contained in VAR_ANN (VAR)->MAY_ALIASES, but in
76 bitmap form to speed up alias grouping. */
77 bitmap may_aliases;
81 /* Counters used to display statistics on alias analysis. */
82 struct alias_stats_d
84 unsigned int alias_queries;
85 unsigned int alias_mayalias;
86 unsigned int alias_noalias;
87 unsigned int simple_queries;
88 unsigned int simple_resolved;
89 unsigned int tbaa_queries;
90 unsigned int tbaa_resolved;
91 unsigned int structnoaddress_queries;
92 unsigned int structnoaddress_resolved;
96 /* Local variables. */
97 static struct alias_stats_d alias_stats;
99 /* Local functions. */
100 static void compute_flow_insensitive_aliasing (struct alias_info *);
101 static void finalize_ref_all_pointers (struct alias_info *);
102 static void dump_alias_stats (FILE *);
103 static bool may_alias_p (tree, HOST_WIDE_INT, tree, HOST_WIDE_INT, bool);
104 static tree create_memory_tag (tree type, bool is_type_tag);
105 static tree get_tmt_for (tree, struct alias_info *);
106 static tree get_nmt_for (tree);
107 static void add_may_alias (tree, tree);
108 static void replace_may_alias (tree, size_t, tree);
109 static struct alias_info *init_alias_info (void);
110 static void delete_alias_info (struct alias_info *);
111 static void compute_flow_sensitive_aliasing (struct alias_info *);
112 static void setup_pointers_and_addressables (struct alias_info *);
113 static void create_global_var (void);
114 static void maybe_create_global_var (struct alias_info *ai);
115 static void group_aliases (struct alias_info *);
116 static void set_pt_anything (tree ptr);
118 /* Global declarations. */
120 /* Call clobbered variables in the function. If bit I is set, then
121 REFERENCED_VARS (I) is call-clobbered. */
122 bitmap call_clobbered_vars;
124 /* Addressable variables in the function. If bit I is set, then
125 REFERENCED_VARS (I) has had its address taken. Note that
126 CALL_CLOBBERED_VARS and ADDRESSABLE_VARS are not related. An
127 addressable variable is not necessarily call-clobbered (e.g., a
128 local addressable whose address does not escape) and not all
129 call-clobbered variables are addressable (e.g., a local static
130 variable). */
131 bitmap addressable_vars;
133 /* When the program has too many call-clobbered variables and call-sites,
134 this variable is used to represent the clobbering effects of function
135 calls. In these cases, all the call clobbered variables in the program
136 are forced to alias this variable. This reduces compile times by not
137 having to keep track of too many V_MAY_DEF expressions at call sites. */
138 tree global_var;
140 /* qsort comparison function to sort type/name tags by DECL_UID. */
142 static int
143 sort_tags_by_id (const void *pa, const void *pb)
145 tree a = *(tree *)pa;
146 tree b = *(tree *)pb;
148 return DECL_UID (a) - DECL_UID (b);
151 /* Initialize WORKLIST to contain those memory tags that are marked call
152 clobbered. Initialized WORKLIST2 to contain the reasons these
153 memory tags escaped. */
155 static void
156 init_transitive_clobber_worklist (VEC (tree, heap) **worklist,
157 VEC (int, heap) **worklist2)
159 referenced_var_iterator rvi;
160 tree curr;
162 FOR_EACH_REFERENCED_VAR (curr, rvi)
164 if (MTAG_P (curr) && is_call_clobbered (curr))
166 VEC_safe_push (tree, heap, *worklist, curr);
167 VEC_safe_push (int, heap, *worklist2, var_ann (curr)->escape_mask);
172 /* Add ALIAS to WORKLIST (and the reason for escaping REASON to WORKLIST2) if
173 ALIAS is not already marked call clobbered, and is a memory
174 tag. */
176 static void
177 add_to_worklist (tree alias, VEC (tree, heap) **worklist,
178 VEC (int, heap) **worklist2,
179 int reason)
181 if (MTAG_P (alias) && !is_call_clobbered (alias))
183 VEC_safe_push (tree, heap, *worklist, alias);
184 VEC_safe_push (int, heap, *worklist2, reason);
188 /* Mark aliases of TAG as call clobbered, and place any tags on the
189 alias list that were not already call clobbered on WORKLIST. */
191 static void
192 mark_aliases_call_clobbered (tree tag, VEC (tree, heap) **worklist,
193 VEC (int, heap) **worklist2)
195 unsigned int i;
196 VEC (tree, gc) *ma;
197 tree entry;
198 var_ann_t ta = var_ann (tag);
200 if (!MTAG_P (tag))
201 return;
202 ma = may_aliases (tag);
203 if (!ma)
204 return;
206 for (i = 0; VEC_iterate (tree, ma, i, entry); i++)
208 if (!unmodifiable_var_p (entry))
210 add_to_worklist (entry, worklist, worklist2, ta->escape_mask);
211 mark_call_clobbered (entry, ta->escape_mask);
216 /* Tags containing global vars need to be marked as global.
217 Tags containing call clobbered vars need to be marked as call
218 clobbered. */
220 static void
221 compute_tag_properties (void)
223 referenced_var_iterator rvi;
224 tree tag;
225 bool changed = true;
226 VEC (tree, heap) *taglist = NULL;
228 FOR_EACH_REFERENCED_VAR (tag, rvi)
230 if (!MTAG_P (tag) || TREE_CODE (tag) == STRUCT_FIELD_TAG)
231 continue;
232 VEC_safe_push (tree, heap, taglist, tag);
235 /* We sort the taglist by DECL_UID, for two reasons.
236 1. To get a sequential ordering to make the bitmap accesses
237 faster.
238 2. Because of the way we compute aliases, it's more likely that
239 an earlier tag is included in a later tag, and this will reduce
240 the number of iterations.
242 If we had a real tag graph, we would just topo-order it and be
243 done with it. */
244 qsort (VEC_address (tree, taglist),
245 VEC_length (tree, taglist),
246 sizeof (tree),
247 sort_tags_by_id);
249 /* Go through each tag not marked as global, and if it aliases
250 global vars, mark it global.
252 If the tag contains call clobbered vars, mark it call
253 clobbered.
255 This loop iterates because tags may appear in the may-aliases
256 list of other tags when we group. */
258 while (changed)
260 unsigned int k;
262 changed = false;
263 for (k = 0; VEC_iterate (tree, taglist, k, tag); k++)
265 VEC (tree, gc) *ma;
266 unsigned int i;
267 tree entry;
268 bool tagcc = is_call_clobbered (tag);
269 bool tagglobal = MTAG_GLOBAL (tag);
271 if (tagcc && tagglobal)
272 continue;
274 ma = may_aliases (tag);
275 if (!ma)
276 continue;
278 for (i = 0; VEC_iterate (tree, ma, i, entry); i++)
280 /* Call clobbered entries cause the tag to be marked
281 call clobbered. */
282 if (!tagcc && is_call_clobbered (entry))
284 mark_call_clobbered (tag, var_ann (entry)->escape_mask);
285 tagcc = true;
286 changed = true;
289 /* Global vars cause the tag to be marked global. */
290 if (!tagglobal && is_global_var (entry))
292 MTAG_GLOBAL (tag) = true;
293 changed = true;
294 tagglobal = true;
297 /* Early exit once both global and cc are set, since the
298 loop can't do any more than that. */
299 if (tagcc && tagglobal)
300 break;
304 VEC_free (tree, heap, taglist);
307 /* Set up the initial variable clobbers and globalness.
308 When this function completes, only tags whose aliases need to be
309 clobbered will be set clobbered. Tags clobbered because they
310 contain call clobbered vars are handled in compute_tag_properties. */
312 static void
313 set_initial_properties (struct alias_info *ai)
315 unsigned int i;
316 referenced_var_iterator rvi;
317 tree var;
318 tree ptr;
320 FOR_EACH_REFERENCED_VAR (var, rvi)
322 if (is_global_var (var)
323 && (!var_can_have_subvars (var)
324 || get_subvars_for_var (var) == NULL))
326 if (!unmodifiable_var_p (var))
327 mark_call_clobbered (var, ESCAPE_IS_GLOBAL);
329 else if (TREE_CODE (var) == PARM_DECL
330 && default_def (var)
331 && POINTER_TYPE_P (TREE_TYPE (var)))
333 tree def = default_def (var);
334 get_ptr_info (def)->value_escapes_p = 1;
335 get_ptr_info (def)->escape_mask |= ESCAPE_IS_PARM;
339 for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
341 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
342 var_ann_t v_ann = var_ann (SSA_NAME_VAR (ptr));
344 if (pi->value_escapes_p)
346 /* If PTR escapes then its associated memory tags and
347 pointed-to variables are call-clobbered. */
348 if (pi->name_mem_tag)
349 mark_call_clobbered (pi->name_mem_tag, pi->escape_mask);
351 if (v_ann->symbol_mem_tag)
352 mark_call_clobbered (v_ann->symbol_mem_tag, pi->escape_mask);
354 if (pi->pt_vars)
356 bitmap_iterator bi;
357 unsigned int j;
358 EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j, bi)
359 if (!unmodifiable_var_p (referenced_var (j)))
360 mark_call_clobbered (referenced_var (j), pi->escape_mask);
364 /* If the name tag is call clobbered, so is the symbol tag
365 associated with the base VAR_DECL. */
366 if (pi->name_mem_tag
367 && v_ann->symbol_mem_tag
368 && is_call_clobbered (pi->name_mem_tag))
369 mark_call_clobbered (v_ann->symbol_mem_tag, pi->escape_mask);
371 /* Name tags and symbol tags that we don't know where they point
372 to, might point to global memory, and thus, are clobbered.
374 FIXME: This is not quite right. They should only be
375 clobbered if value_escapes_p is true, regardless of whether
376 they point to global memory or not.
377 So removing this code and fixing all the bugs would be nice.
378 It is the cause of a bunch of clobbering. */
379 if ((pi->pt_global_mem || pi->pt_anything)
380 && pi->is_dereferenced && pi->name_mem_tag)
382 mark_call_clobbered (pi->name_mem_tag, ESCAPE_IS_GLOBAL);
383 MTAG_GLOBAL (pi->name_mem_tag) = true;
386 if ((pi->pt_global_mem || pi->pt_anything)
387 && pi->is_dereferenced
388 && v_ann->symbol_mem_tag)
390 mark_call_clobbered (v_ann->symbol_mem_tag, ESCAPE_IS_GLOBAL);
391 MTAG_GLOBAL (v_ann->symbol_mem_tag) = true;
397 /* This variable is set to true if we are updating the used alone
398 information for SMTs, or are in a pass that is going to break it
399 temporarily. */
400 bool updating_used_alone;
402 /* Compute which variables need to be marked call clobbered because
403 their tag is call clobbered, and which tags need to be marked
404 global because they contain global variables. */
406 static void
407 compute_call_clobbered (struct alias_info *ai)
409 VEC (tree, heap) *worklist = NULL;
410 VEC(int,heap) *worklist2 = NULL;
412 set_initial_properties (ai);
413 init_transitive_clobber_worklist (&worklist, &worklist2);
414 while (VEC_length (tree, worklist) != 0)
416 tree curr = VEC_pop (tree, worklist);
417 int reason = VEC_pop (int, worklist2);
419 mark_call_clobbered (curr, reason);
420 mark_aliases_call_clobbered (curr, &worklist, &worklist2);
422 VEC_free (tree, heap, worklist);
423 VEC_free (int, heap, worklist2);
424 compute_tag_properties ();
428 /* Helper for recalculate_used_alone. Return a conservatively correct
429 answer as to whether STMT may make a store on the LHS to SYM. */
431 static bool
432 lhs_may_store_to (tree stmt, tree sym ATTRIBUTE_UNUSED)
434 tree lhs = TREE_OPERAND (stmt, 0);
436 lhs = get_base_address (lhs);
438 if (!lhs)
439 return false;
441 if (TREE_CODE (lhs) == SSA_NAME)
442 return false;
443 /* We could do better here by looking at the type tag of LHS, but it
444 is unclear whether this is worth it. */
445 return true;
448 /* Recalculate the used_alone information for SMTs . */
450 void
451 recalculate_used_alone (void)
453 VEC (tree, heap) *calls = NULL;
454 block_stmt_iterator bsi;
455 basic_block bb;
456 tree stmt;
457 size_t i;
458 referenced_var_iterator rvi;
459 tree var;
461 /* First, reset all the SMT used alone bits to zero. */
462 updating_used_alone = true;
463 FOR_EACH_REFERENCED_VAR (var, rvi)
464 if (TREE_CODE (var) == SYMBOL_MEMORY_TAG)
466 SMT_OLD_USED_ALONE (var) = SMT_USED_ALONE (var);
467 SMT_USED_ALONE (var) = 0;
470 /* Walk all the statements.
471 Calls get put into a list of statements to update, since we will
472 need to update operands on them if we make any changes.
473 If we see a bare use of a SMT anywhere in a real virtual use or virtual
474 def, mark the SMT as used alone, and for renaming. */
475 FOR_EACH_BB (bb)
477 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
479 bool iscall = false;
480 ssa_op_iter iter;
482 stmt = bsi_stmt (bsi);
484 if (TREE_CODE (stmt) == CALL_EXPR
485 || (TREE_CODE (stmt) == MODIFY_EXPR
486 && TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR))
488 iscall = true;
489 VEC_safe_push (tree, heap, calls, stmt);
492 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter,
493 SSA_OP_VUSE | SSA_OP_VIRTUAL_DEFS)
495 tree svar = var;
497 if (TREE_CODE (var) == SSA_NAME)
498 svar = SSA_NAME_VAR (var);
500 if (TREE_CODE (svar) == SYMBOL_MEMORY_TAG)
502 /* We only care about the LHS on calls. */
503 if (iscall && !lhs_may_store_to (stmt, svar))
504 continue;
506 if (!SMT_USED_ALONE (svar))
508 SMT_USED_ALONE (svar) = true;
510 /* Only need to mark for renaming if it wasn't
511 used alone before. */
512 if (!SMT_OLD_USED_ALONE (svar))
513 mark_sym_for_renaming (svar);
520 /* Update the operands on all the calls we saw. */
521 if (calls)
523 for (i = 0; VEC_iterate (tree, calls, i, stmt); i++)
524 update_stmt (stmt);
527 /* We need to mark SMT's that are no longer used for renaming so the
528 symbols go away, or else verification will be angry with us, even
529 though they are dead. */
530 FOR_EACH_REFERENCED_VAR (var, rvi)
531 if (TREE_CODE (var) == SYMBOL_MEMORY_TAG)
533 if (SMT_OLD_USED_ALONE (var) && !SMT_USED_ALONE (var))
534 mark_sym_for_renaming (var);
537 VEC_free (tree, heap, calls);
538 updating_used_alone = false;
541 /* Compute may-alias information for every variable referenced in function
542 FNDECL.
544 Alias analysis proceeds in 3 main phases:
546 1- Points-to and escape analysis.
548 This phase walks the use-def chains in the SSA web looking for three
549 things:
551 * Assignments of the form P_i = &VAR
552 * Assignments of the form P_i = malloc()
553 * Pointers and ADDR_EXPR that escape the current function.
555 The concept of 'escaping' is the same one used in the Java world. When
556 a pointer or an ADDR_EXPR escapes, it means that it has been exposed
557 outside of the current function. So, assignment to global variables,
558 function arguments and returning a pointer are all escape sites, as are
559 conversions between pointers and integers.
561 This is where we are currently limited. Since not everything is renamed
562 into SSA, we lose track of escape properties when a pointer is stashed
563 inside a field in a structure, for instance. In those cases, we are
564 assuming that the pointer does escape.
566 We use escape analysis to determine whether a variable is
567 call-clobbered. Simply put, if an ADDR_EXPR escapes, then the variable
568 is call-clobbered. If a pointer P_i escapes, then all the variables
569 pointed-to by P_i (and its memory tag) also escape.
571 2- Compute flow-sensitive aliases
573 We have two classes of memory tags. Memory tags associated with the
574 pointed-to data type of the pointers in the program. These tags are
575 called "symbol memory tag" (SMT). The other class are those associated
576 with SSA_NAMEs, called "name memory tag" (NMT). The basic idea is that
577 when adding operands for an INDIRECT_REF *P_i, we will first check
578 whether P_i has a name tag, if it does we use it, because that will have
579 more precise aliasing information. Otherwise, we use the standard symbol
580 tag.
582 In this phase, we go through all the pointers we found in points-to
583 analysis and create alias sets for the name memory tags associated with
584 each pointer P_i. If P_i escapes, we mark call-clobbered the variables
585 it points to and its tag.
588 3- Compute flow-insensitive aliases
590 This pass will compare the alias set of every symbol memory tag and
591 every addressable variable found in the program. Given a symbol
592 memory tag SMT and an addressable variable V. If the alias sets of
593 SMT and V conflict (as computed by may_alias_p), then V is marked
594 as an alias tag and added to the alias set of SMT.
596 For instance, consider the following function:
598 foo (int i)
600 int *p, a, b;
602 if (i > 10)
603 p = &a;
604 else
605 p = &b;
607 *p = 3;
608 a = b + 2;
609 return *p;
612 After aliasing analysis has finished, the symbol memory tag for pointer
613 'p' will have two aliases, namely variables 'a' and 'b'. Every time
614 pointer 'p' is dereferenced, we want to mark the operation as a
615 potential reference to 'a' and 'b'.
617 foo (int i)
619 int *p, a, b;
621 if (i_2 > 10)
622 p_4 = &a;
623 else
624 p_6 = &b;
625 # p_1 = PHI <p_4(1), p_6(2)>;
627 # a_7 = V_MAY_DEF <a_3>;
628 # b_8 = V_MAY_DEF <b_5>;
629 *p_1 = 3;
631 # a_9 = V_MAY_DEF <a_7>
632 # VUSE <b_8>
633 a_9 = b_8 + 2;
635 # VUSE <a_9>;
636 # VUSE <b_8>;
637 return *p_1;
640 In certain cases, the list of may aliases for a pointer may grow too
641 large. This may cause an explosion in the number of virtual operands
642 inserted in the code. Resulting in increased memory consumption and
643 compilation time.
645 When the number of virtual operands needed to represent aliased
646 loads and stores grows too large (configurable with @option{--param
647 max-aliased-vops}), alias sets are grouped to avoid severe
648 compile-time slow downs and memory consumption. See group_aliases. */
650 static unsigned int
651 compute_may_aliases (void)
653 struct alias_info *ai;
655 memset (&alias_stats, 0, sizeof (alias_stats));
657 /* Initialize aliasing information. */
658 ai = init_alias_info ();
660 /* For each pointer P_i, determine the sets of variables that P_i may
661 point-to. For every addressable variable V, determine whether the
662 address of V escapes the current function, making V call-clobbered
663 (i.e., whether &V is stored in a global variable or if its passed as a
664 function call argument). */
665 compute_points_to_sets (ai);
667 /* Collect all pointers and addressable variables, compute alias sets,
668 create memory tags for pointers and promote variables whose address is
669 not needed anymore. */
670 setup_pointers_and_addressables (ai);
672 /* Compute flow-sensitive, points-to based aliasing for all the name
673 memory tags. Note that this pass needs to be done before flow
674 insensitive analysis because it uses the points-to information
675 gathered before to mark call-clobbered symbol tags. */
676 compute_flow_sensitive_aliasing (ai);
678 /* Compute type-based flow-insensitive aliasing for all the type
679 memory tags. */
680 compute_flow_insensitive_aliasing (ai);
682 /* Determine if we need to enable alias grouping. */
683 if (ai->total_alias_vops >= MAX_ALIASED_VOPS)
684 group_aliases (ai);
686 /* Compute call clobbering information. */
687 compute_call_clobbered (ai);
689 /* If the program has too many call-clobbered variables and/or function
690 calls, create .GLOBAL_VAR and use it to model call-clobbering
691 semantics at call sites. This reduces the number of virtual operands
692 considerably, improving compile times at the expense of lost
693 aliasing precision. */
694 maybe_create_global_var (ai);
696 /* If the program contains ref-all pointers, finalize may-alias information
697 for them. This pass needs to be run after call-clobbering information
698 has been computed. */
699 if (ai->ref_all_symbol_mem_tag)
700 finalize_ref_all_pointers (ai);
702 /* Debugging dumps. */
703 if (dump_file)
705 dump_referenced_vars (dump_file);
706 if (dump_flags & TDF_STATS)
707 dump_alias_stats (dump_file);
708 dump_points_to_info (dump_file);
709 dump_alias_info (dump_file);
712 /* Deallocate memory used by aliasing data structures. */
713 delete_alias_info (ai);
715 updating_used_alone = true;
717 block_stmt_iterator bsi;
718 basic_block bb;
719 FOR_EACH_BB (bb)
721 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
723 update_stmt_if_modified (bsi_stmt (bsi));
727 recalculate_used_alone ();
728 updating_used_alone = false;
729 return 0;
733 struct tree_opt_pass pass_may_alias =
735 "alias", /* name */
736 NULL, /* gate */
737 compute_may_aliases, /* execute */
738 NULL, /* sub */
739 NULL, /* next */
740 0, /* static_pass_number */
741 TV_TREE_MAY_ALIAS, /* tv_id */
742 PROP_cfg | PROP_ssa, /* properties_required */
743 PROP_alias, /* properties_provided */
744 0, /* properties_destroyed */
745 0, /* todo_flags_start */
746 TODO_dump_func | TODO_update_ssa
747 | TODO_ggc_collect | TODO_verify_ssa
748 | TODO_verify_stmts, /* todo_flags_finish */
749 0 /* letter */
753 /* Data structure used to count the number of dereferences to PTR
754 inside an expression. */
755 struct count_ptr_d
757 tree ptr;
758 unsigned count;
762 /* Helper for count_uses_and_derefs. Called by walk_tree to look for
763 (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
765 static tree
766 count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
768 struct count_ptr_d *count_p = (struct count_ptr_d *) data;
770 /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
771 pointer 'ptr' is *not* dereferenced, it is simply used to compute
772 the address of 'fld' as 'ptr + offsetof(fld)'. */
773 if (TREE_CODE (*tp) == ADDR_EXPR)
775 *walk_subtrees = 0;
776 return NULL_TREE;
779 if (INDIRECT_REF_P (*tp) && TREE_OPERAND (*tp, 0) == count_p->ptr)
780 count_p->count++;
782 return NULL_TREE;
786 /* Count the number of direct and indirect uses for pointer PTR in
787 statement STMT. The two counts are stored in *NUM_USES_P and
788 *NUM_DEREFS_P respectively. *IS_STORE_P is set to 'true' if at
789 least one of those dereferences is a store operation. */
791 void
792 count_uses_and_derefs (tree ptr, tree stmt, unsigned *num_uses_p,
793 unsigned *num_derefs_p, bool *is_store)
795 ssa_op_iter i;
796 tree use;
798 *num_uses_p = 0;
799 *num_derefs_p = 0;
800 *is_store = false;
802 /* Find out the total number of uses of PTR in STMT. */
803 FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
804 if (use == ptr)
805 (*num_uses_p)++;
807 /* Now count the number of indirect references to PTR. This is
808 truly awful, but we don't have much choice. There are no parent
809 pointers inside INDIRECT_REFs, so an expression like
810 '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
811 find all the indirect and direct uses of x_1 inside. The only
812 shortcut we can take is the fact that GIMPLE only allows
813 INDIRECT_REFs inside the expressions below. */
814 if (TREE_CODE (stmt) == MODIFY_EXPR
815 || (TREE_CODE (stmt) == RETURN_EXPR
816 && TREE_CODE (TREE_OPERAND (stmt, 0)) == MODIFY_EXPR)
817 || TREE_CODE (stmt) == ASM_EXPR
818 || TREE_CODE (stmt) == CALL_EXPR)
820 tree lhs, rhs;
822 if (TREE_CODE (stmt) == MODIFY_EXPR)
824 lhs = TREE_OPERAND (stmt, 0);
825 rhs = TREE_OPERAND (stmt, 1);
827 else if (TREE_CODE (stmt) == RETURN_EXPR)
829 tree e = TREE_OPERAND (stmt, 0);
830 lhs = TREE_OPERAND (e, 0);
831 rhs = TREE_OPERAND (e, 1);
833 else if (TREE_CODE (stmt) == ASM_EXPR)
835 lhs = ASM_OUTPUTS (stmt);
836 rhs = ASM_INPUTS (stmt);
838 else
840 lhs = NULL_TREE;
841 rhs = stmt;
844 if (lhs && (TREE_CODE (lhs) == TREE_LIST || EXPR_P (lhs)))
846 struct count_ptr_d count;
847 count.ptr = ptr;
848 count.count = 0;
849 walk_tree (&lhs, count_ptr_derefs, &count, NULL);
850 *is_store = true;
851 *num_derefs_p = count.count;
854 if (rhs && (TREE_CODE (rhs) == TREE_LIST || EXPR_P (rhs)))
856 struct count_ptr_d count;
857 count.ptr = ptr;
858 count.count = 0;
859 walk_tree (&rhs, count_ptr_derefs, &count, NULL);
860 *num_derefs_p += count.count;
864 gcc_assert (*num_uses_p >= *num_derefs_p);
867 /* Initialize the data structures used for alias analysis. */
869 static struct alias_info *
870 init_alias_info (void)
872 struct alias_info *ai;
873 referenced_var_iterator rvi;
874 tree var;
876 bitmap_obstack_initialize (&alias_obstack);
877 ai = XCNEW (struct alias_info);
878 ai->ssa_names_visited = sbitmap_alloc (num_ssa_names);
879 sbitmap_zero (ai->ssa_names_visited);
880 ai->processed_ptrs = VEC_alloc (tree, heap, 50);
881 ai->written_vars = BITMAP_ALLOC (&alias_obstack);
882 ai->dereferenced_ptrs_store = BITMAP_ALLOC (&alias_obstack);
883 ai->dereferenced_ptrs_load = BITMAP_ALLOC (&alias_obstack);
885 /* If aliases have been computed before, clear existing information. */
886 if (aliases_computed_p)
888 unsigned i;
890 /* Similarly, clear the set of addressable variables. In this
891 case, we can just clear the set because addressability is
892 only computed here. */
893 bitmap_clear (addressable_vars);
895 /* Clear flow-insensitive alias information from each symbol. */
896 FOR_EACH_REFERENCED_VAR (var, rvi)
898 var_ann_t ann = var_ann (var);
900 ann->is_aliased = 0;
901 ann->may_aliases = NULL;
902 NUM_REFERENCES_CLEAR (ann);
904 /* Since we are about to re-discover call-clobbered
905 variables, clear the call-clobbered flag. Variables that
906 are intrinsically call-clobbered (globals, local statics,
907 etc) will not be marked by the aliasing code, so we can't
908 remove them from CALL_CLOBBERED_VARS.
910 NB: STRUCT_FIELDS are still call clobbered if they are for
911 a global variable, so we *don't* clear their call clobberedness
912 just because they are tags, though we will clear it if they
913 aren't for global variables. */
914 if (TREE_CODE (var) == NAME_MEMORY_TAG
915 || TREE_CODE (var) == SYMBOL_MEMORY_TAG
916 || !is_global_var (var))
917 clear_call_clobbered (var);
920 /* Clear flow-sensitive points-to information from each SSA name. */
921 for (i = 1; i < num_ssa_names; i++)
923 tree name = ssa_name (i);
925 if (!name || !POINTER_TYPE_P (TREE_TYPE (name)))
926 continue;
928 if (SSA_NAME_PTR_INFO (name))
930 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (name);
932 /* Clear all the flags but keep the name tag to
933 avoid creating new temporaries unnecessarily. If
934 this pointer is found to point to a subset or
935 superset of its former points-to set, then a new
936 tag will need to be created in create_name_tags. */
937 pi->pt_anything = 0;
938 pi->pt_null = 0;
939 pi->value_escapes_p = 0;
940 pi->is_dereferenced = 0;
941 if (pi->pt_vars)
942 bitmap_clear (pi->pt_vars);
947 /* Next time, we will need to reset alias information. */
948 aliases_computed_p = true;
950 return ai;
954 /* Deallocate memory used by alias analysis. */
956 static void
957 delete_alias_info (struct alias_info *ai)
959 size_t i;
960 referenced_var_iterator rvi;
961 tree var;
963 sbitmap_free (ai->ssa_names_visited);
964 VEC_free (tree, heap, ai->processed_ptrs);
966 for (i = 0; i < ai->num_addressable_vars; i++)
967 free (ai->addressable_vars[i]);
969 FOR_EACH_REFERENCED_VAR(var, rvi)
971 var_ann_t ann = var_ann (var);
972 NUM_REFERENCES_CLEAR (ann);
975 free (ai->addressable_vars);
977 for (i = 0; i < ai->num_pointers; i++)
978 free (ai->pointers[i]);
979 free (ai->pointers);
981 BITMAP_FREE (ai->written_vars);
982 BITMAP_FREE (ai->dereferenced_ptrs_store);
983 BITMAP_FREE (ai->dereferenced_ptrs_load);
984 bitmap_obstack_release (&alias_obstack);
985 free (ai);
987 delete_points_to_sets ();
990 /* Create name tags for all the pointers that have been dereferenced.
991 We only create a name tag for a pointer P if P is found to point to
992 a set of variables (so that we can alias them to *P) or if it is
993 the result of a call to malloc (which means that P cannot point to
994 anything else nor alias any other variable).
996 If two pointers P and Q point to the same set of variables, they
997 are assigned the same name tag. */
999 static void
1000 create_name_tags (void)
1002 size_t i;
1003 VEC (tree, heap) *with_ptvars = NULL;
1004 tree ptr;
1006 /* Collect the list of pointers with a non-empty points to set. */
1007 for (i = 1; i < num_ssa_names; i++)
1009 tree ptr = ssa_name (i);
1010 struct ptr_info_def *pi;
1012 if (!ptr
1013 || !POINTER_TYPE_P (TREE_TYPE (ptr))
1014 || !SSA_NAME_PTR_INFO (ptr))
1015 continue;
1017 pi = SSA_NAME_PTR_INFO (ptr);
1019 if (pi->pt_anything || !pi->is_dereferenced)
1021 /* No name tags for pointers that have not been
1022 dereferenced or point to an arbitrary location. */
1023 pi->name_mem_tag = NULL_TREE;
1024 continue;
1027 /* Set pt_anything on the pointers without pt_vars filled in so
1028 that they are assigned a symbol tag. */
1029 if (pi->pt_vars && !bitmap_empty_p (pi->pt_vars))
1030 VEC_safe_push (tree, heap, with_ptvars, ptr);
1031 else
1032 set_pt_anything (ptr);
1035 /* If we didn't find any pointers with pt_vars set, we're done. */
1036 if (!with_ptvars)
1037 return;
1039 /* Now go through the pointers with pt_vars, and find a name tag
1040 with the same pt_vars as this pointer, or create one if one
1041 doesn't exist. */
1042 for (i = 0; VEC_iterate (tree, with_ptvars, i, ptr); i++)
1044 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
1045 size_t j;
1046 tree ptr2;
1047 tree old_name_tag = pi->name_mem_tag;
1049 /* If PTR points to a set of variables, check if we don't
1050 have another pointer Q with the same points-to set before
1051 creating a tag. If so, use Q's tag instead of creating a
1052 new one.
1054 This is important for not creating unnecessary symbols
1055 and also for copy propagation. If we ever need to
1056 propagate PTR into Q or vice-versa, we would run into
1057 problems if they both had different name tags because
1058 they would have different SSA version numbers (which
1059 would force us to take the name tags in and out of SSA). */
1060 for (j = 0; j < i && VEC_iterate (tree, with_ptvars, j, ptr2); j++)
1062 struct ptr_info_def *qi = SSA_NAME_PTR_INFO (ptr2);
1064 if (bitmap_equal_p (pi->pt_vars, qi->pt_vars))
1066 pi->name_mem_tag = qi->name_mem_tag;
1067 break;
1071 /* If we didn't find a pointer with the same points-to set
1072 as PTR, create a new name tag if needed. */
1073 if (pi->name_mem_tag == NULL_TREE)
1074 pi->name_mem_tag = get_nmt_for (ptr);
1076 /* If the new name tag computed for PTR is different than
1077 the old name tag that it used to have, then the old tag
1078 needs to be removed from the IL, so we mark it for
1079 renaming. */
1080 if (old_name_tag && old_name_tag != pi->name_mem_tag)
1081 mark_sym_for_renaming (old_name_tag);
1083 TREE_THIS_VOLATILE (pi->name_mem_tag)
1084 |= TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (ptr)));
1086 /* Mark the new name tag for renaming. */
1087 mark_sym_for_renaming (pi->name_mem_tag);
1090 VEC_free (tree, heap, with_ptvars);
1094 /* For every pointer P_i in AI->PROCESSED_PTRS, create may-alias sets for
1095 the name memory tag (NMT) associated with P_i. If P_i escapes, then its
1096 name tag and the variables it points-to are call-clobbered. Finally, if
1097 P_i escapes and we could not determine where it points to, then all the
1098 variables in the same alias set as *P_i are marked call-clobbered. This
1099 is necessary because we must assume that P_i may take the address of any
1100 variable in the same alias set. */
1102 static void
1103 compute_flow_sensitive_aliasing (struct alias_info *ai)
1105 size_t i;
1106 tree ptr;
1108 for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
1110 if (!find_what_p_points_to (ptr))
1111 set_pt_anything (ptr);
1114 create_name_tags ();
1116 for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
1118 unsigned j;
1119 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
1120 var_ann_t v_ann = var_ann (SSA_NAME_VAR (ptr));
1121 bitmap_iterator bi;
1124 /* Set up aliasing information for PTR's name memory tag (if it has
1125 one). Note that only pointers that have been dereferenced will
1126 have a name memory tag. */
1127 if (pi->name_mem_tag && pi->pt_vars)
1128 EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j, bi)
1130 add_may_alias (pi->name_mem_tag, referenced_var (j));
1131 add_may_alias (v_ann->symbol_mem_tag, referenced_var (j));
1137 /* Compute type-based alias sets. Traverse all the pointers and
1138 addressable variables found in setup_pointers_and_addressables.
1140 For every pointer P in AI->POINTERS and addressable variable V in
1141 AI->ADDRESSABLE_VARS, add V to the may-alias sets of P's symbol
1142 memory tag (SMT) if their alias sets conflict. V is then marked as
1143 an alias tag so that the operand scanner knows that statements
1144 containing V have aliased operands. */
1146 static void
1147 compute_flow_insensitive_aliasing (struct alias_info *ai)
1149 size_t i;
1151 /* Initialize counter for the total number of virtual operands that
1152 aliasing will introduce. When AI->TOTAL_ALIAS_VOPS goes beyond the
1153 threshold set by --params max-alias-vops, we enable alias
1154 grouping. */
1155 ai->total_alias_vops = 0;
1157 /* For every pointer P, determine which addressable variables may alias
1158 with P's symbol memory tag. */
1159 for (i = 0; i < ai->num_pointers; i++)
1161 size_t j;
1162 struct alias_map_d *p_map = ai->pointers[i];
1163 tree tag = var_ann (p_map->var)->symbol_mem_tag;
1164 var_ann_t tag_ann = var_ann (tag);
1165 tree var;
1167 /* Call-clobbering information is not finalized yet at this point. */
1168 if (PTR_IS_REF_ALL (p_map->var))
1169 continue;
1171 p_map->total_alias_vops = 0;
1172 p_map->may_aliases = BITMAP_ALLOC (&alias_obstack);
1174 /* Add any pre-existing may_aliases to the bitmap used to represent
1175 TAG's alias set in case we need to group aliases. */
1176 for (j = 0; VEC_iterate (tree, tag_ann->may_aliases, j, var); ++j)
1177 bitmap_set_bit (p_map->may_aliases, DECL_UID (var));
1179 for (j = 0; j < ai->num_addressable_vars; j++)
1181 struct alias_map_d *v_map;
1182 var_ann_t v_ann;
1183 bool tag_stored_p, var_stored_p;
1185 v_map = ai->addressable_vars[j];
1186 var = v_map->var;
1187 v_ann = var_ann (var);
1189 /* Skip memory tags and variables that have never been
1190 written to. We also need to check if the variables are
1191 call-clobbered because they may be overwritten by
1192 function calls.
1194 Note this is effectively random accessing elements in
1195 the sparse bitset, which can be highly inefficient.
1196 So we first check the call_clobbered status of the
1197 tag and variable before querying the bitmap. */
1198 tag_stored_p = is_call_clobbered (tag)
1199 || bitmap_bit_p (ai->written_vars, DECL_UID (tag));
1200 var_stored_p = is_call_clobbered (var)
1201 || bitmap_bit_p (ai->written_vars, DECL_UID (var));
1202 if (!tag_stored_p && !var_stored_p)
1203 continue;
1205 if (may_alias_p (p_map->var, p_map->set, var, v_map->set, false))
1207 size_t num_tag_refs, num_var_refs;
1209 num_tag_refs = NUM_REFERENCES (tag_ann);
1210 num_var_refs = NUM_REFERENCES (v_ann);
1212 /* Add VAR to TAG's may-aliases set. */
1214 /* We should never have a var with subvars here, because
1215 they shouldn't get into the set of addressable vars */
1216 gcc_assert (!var_can_have_subvars (var)
1217 || get_subvars_for_var (var) == NULL);
1219 add_may_alias (tag, var);
1220 /* Update the bitmap used to represent TAG's alias set
1221 in case we need to group aliases. */
1222 bitmap_set_bit (p_map->may_aliases, DECL_UID (var));
1224 /* Update the total number of virtual operands due to
1225 aliasing. Since we are adding one more alias to TAG's
1226 may-aliases set, the total number of virtual operands due
1227 to aliasing will be increased by the number of references
1228 made to VAR and TAG (every reference to TAG will also
1229 count as a reference to VAR). */
1230 ai->total_alias_vops += (num_var_refs + num_tag_refs);
1231 p_map->total_alias_vops += (num_var_refs + num_tag_refs);
1238 /* Since this analysis is based exclusively on symbols, it fails to
1239 handle cases where two pointers P and Q have different memory
1240 tags with conflicting alias set numbers but no aliased symbols in
1241 common.
1243 For example, suppose that we have two memory tags SMT.1 and SMT.2
1244 such that
1246 may-aliases (SMT.1) = { a }
1247 may-aliases (SMT.2) = { b }
1249 and the alias set number of SMT.1 conflicts with that of SMT.2.
1250 Since they don't have symbols in common, loads and stores from
1251 SMT.1 and SMT.2 will seem independent of each other, which will
1252 lead to the optimizers making invalid transformations (see
1253 testsuite/gcc.c-torture/execute/pr15262-[12].c).
1255 To avoid this problem, we do a final traversal of AI->POINTERS
1256 looking for pairs of pointers that have no aliased symbols in
1257 common and yet have conflicting alias set numbers. */
1258 for (i = 0; i < ai->num_pointers; i++)
1260 size_t j;
1261 struct alias_map_d *p_map1 = ai->pointers[i];
1262 tree tag1 = var_ann (p_map1->var)->symbol_mem_tag;
1263 bitmap may_aliases1 = p_map1->may_aliases;
1265 if (PTR_IS_REF_ALL (p_map1->var))
1266 continue;
1268 for (j = i + 1; j < ai->num_pointers; j++)
1270 struct alias_map_d *p_map2 = ai->pointers[j];
1271 tree tag2 = var_ann (p_map2->var)->symbol_mem_tag;
1272 bitmap may_aliases2 = p_map2->may_aliases;
1274 if (PTR_IS_REF_ALL (p_map2->var))
1275 continue;
1277 /* If the pointers may not point to each other, do nothing. */
1278 if (!may_alias_p (p_map1->var, p_map1->set, tag2, p_map2->set, true))
1279 continue;
1281 /* The two pointers may alias each other. If they already have
1282 symbols in common, do nothing. */
1283 if (bitmap_intersect_p (may_aliases1, may_aliases2))
1284 continue;
1286 if (!bitmap_empty_p (may_aliases2))
1288 unsigned int k;
1289 bitmap_iterator bi;
1291 /* Add all the aliases for TAG2 into TAG1's alias set.
1292 FIXME, update grouping heuristic counters. */
1293 EXECUTE_IF_SET_IN_BITMAP (may_aliases2, 0, k, bi)
1294 add_may_alias (tag1, referenced_var (k));
1295 bitmap_ior_into (may_aliases1, may_aliases2);
1297 else
1299 /* Since TAG2 does not have any aliases of its own, add
1300 TAG2 itself to the alias set of TAG1. */
1301 add_may_alias (tag1, tag2);
1302 bitmap_set_bit (may_aliases1, DECL_UID (tag2));
1307 if (dump_file)
1308 fprintf (dump_file, "\n%s: Total number of aliased vops: %ld\n",
1309 get_name (current_function_decl),
1310 ai->total_alias_vops);
1314 /* Finalize may-alias information for ref-all pointers. Traverse all
1315 the addressable variables found in setup_pointers_and_addressables.
1317 If flow-sensitive alias analysis has attached a name memory tag to
1318 a ref-all pointer, we will use it for the dereferences because that
1319 will have more precise aliasing information. But if there is no
1320 name tag, we will use a special symbol tag that aliases all the
1321 call-clobbered addressable variables. */
1323 static void
1324 finalize_ref_all_pointers (struct alias_info *ai)
1326 size_t i;
1328 if (global_var)
1329 add_may_alias (ai->ref_all_symbol_mem_tag, global_var);
1330 else
1332 /* First add the real call-clobbered variables. */
1333 for (i = 0; i < ai->num_addressable_vars; i++)
1335 tree var = ai->addressable_vars[i]->var;
1336 if (is_call_clobbered (var))
1337 add_may_alias (ai->ref_all_symbol_mem_tag, var);
1340 /* Then add the call-clobbered pointer memory tags. See
1341 compute_flow_insensitive_aliasing for the rationale. */
1342 for (i = 0; i < ai->num_pointers; i++)
1344 tree ptr = ai->pointers[i]->var, tag;
1345 if (PTR_IS_REF_ALL (ptr))
1346 continue;
1347 tag = var_ann (ptr)->symbol_mem_tag;
1348 if (is_call_clobbered (tag))
1349 add_may_alias (ai->ref_all_symbol_mem_tag, tag);
1355 /* Comparison function for qsort used in group_aliases. */
1357 static int
1358 total_alias_vops_cmp (const void *p, const void *q)
1360 const struct alias_map_d **p1 = (const struct alias_map_d **)p;
1361 const struct alias_map_d **p2 = (const struct alias_map_d **)q;
1362 long n1 = (*p1)->total_alias_vops;
1363 long n2 = (*p2)->total_alias_vops;
1365 /* We want to sort in descending order. */
1366 return (n1 > n2 ? -1 : (n1 == n2) ? 0 : 1);
1369 /* Group all the aliases for TAG to make TAG represent all the
1370 variables in its alias set. Update the total number
1371 of virtual operands due to aliasing (AI->TOTAL_ALIAS_VOPS). This
1372 function will make TAG be the unique alias tag for all the
1373 variables in its may-aliases. So, given:
1375 may-aliases(TAG) = { V1, V2, V3 }
1377 This function will group the variables into:
1379 may-aliases(V1) = { TAG }
1380 may-aliases(V2) = { TAG }
1381 may-aliases(V2) = { TAG } */
1383 static void
1384 group_aliases_into (tree tag, bitmap tag_aliases, struct alias_info *ai)
1386 unsigned int i;
1387 var_ann_t tag_ann = var_ann (tag);
1388 size_t num_tag_refs = NUM_REFERENCES (tag_ann);
1389 bitmap_iterator bi;
1391 EXECUTE_IF_SET_IN_BITMAP (tag_aliases, 0, i, bi)
1393 tree var = referenced_var (i);
1394 var_ann_t ann = var_ann (var);
1396 /* Make TAG the unique alias of VAR. */
1397 ann->is_aliased = 0;
1398 ann->may_aliases = NULL;
1400 /* Note that VAR and TAG may be the same if the function has no
1401 addressable variables (see the discussion at the end of
1402 setup_pointers_and_addressables). */
1403 if (var != tag)
1404 add_may_alias (var, tag);
1406 /* Reduce total number of virtual operands contributed
1407 by TAG on behalf of VAR. Notice that the references to VAR
1408 itself won't be removed. We will merely replace them with
1409 references to TAG. */
1410 ai->total_alias_vops -= num_tag_refs;
1413 /* We have reduced the number of virtual operands that TAG makes on
1414 behalf of all the variables formerly aliased with it. However,
1415 we have also "removed" all the virtual operands for TAG itself,
1416 so we add them back. */
1417 ai->total_alias_vops += num_tag_refs;
1419 /* TAG no longer has any aliases. */
1420 tag_ann->may_aliases = NULL;
1424 /* Group may-aliases sets to reduce the number of virtual operands due
1425 to aliasing.
1427 1- Sort the list of pointers in decreasing number of contributed
1428 virtual operands.
1430 2- Take the first entry in AI->POINTERS and revert the role of
1431 the memory tag and its aliases. Usually, whenever an aliased
1432 variable Vi is found to alias with a memory tag T, we add Vi
1433 to the may-aliases set for T. Meaning that after alias
1434 analysis, we will have:
1436 may-aliases(T) = { V1, V2, V3, ..., Vn }
1438 This means that every statement that references T, will get 'n'
1439 virtual operands for each of the Vi tags. But, when alias
1440 grouping is enabled, we make T an alias tag and add it to the
1441 alias set of all the Vi variables:
1443 may-aliases(V1) = { T }
1444 may-aliases(V2) = { T }
1446 may-aliases(Vn) = { T }
1448 This has two effects: (a) statements referencing T will only get
1449 a single virtual operand, and, (b) all the variables Vi will now
1450 appear to alias each other. So, we lose alias precision to
1451 improve compile time. But, in theory, a program with such a high
1452 level of aliasing should not be very optimizable in the first
1453 place.
1455 3- Since variables may be in the alias set of more than one
1456 memory tag, the grouping done in step (2) needs to be extended
1457 to all the memory tags that have a non-empty intersection with
1458 the may-aliases set of tag T. For instance, if we originally
1459 had these may-aliases sets:
1461 may-aliases(T) = { V1, V2, V3 }
1462 may-aliases(R) = { V2, V4 }
1464 In step (2) we would have reverted the aliases for T as:
1466 may-aliases(V1) = { T }
1467 may-aliases(V2) = { T }
1468 may-aliases(V3) = { T }
1470 But note that now V2 is no longer aliased with R. We could
1471 add R to may-aliases(V2), but we are in the process of
1472 grouping aliases to reduce virtual operands so what we do is
1473 add V4 to the grouping to obtain:
1475 may-aliases(V1) = { T }
1476 may-aliases(V2) = { T }
1477 may-aliases(V3) = { T }
1478 may-aliases(V4) = { T }
1480 4- If the total number of virtual operands due to aliasing is
1481 still above the threshold set by max-alias-vops, go back to (2). */
1483 static void
1484 group_aliases (struct alias_info *ai)
1486 size_t i;
1487 tree ptr;
1489 /* Sort the POINTERS array in descending order of contributed
1490 virtual operands. */
1491 qsort (ai->pointers, ai->num_pointers, sizeof (struct alias_map_d *),
1492 total_alias_vops_cmp);
1494 /* For every pointer in AI->POINTERS, reverse the roles of its tag
1495 and the tag's may-aliases set. */
1496 for (i = 0; i < ai->num_pointers; i++)
1498 size_t j;
1499 tree tag1 = var_ann (ai->pointers[i]->var)->symbol_mem_tag;
1500 bitmap tag1_aliases = ai->pointers[i]->may_aliases;
1502 /* Skip tags that have been grouped already. */
1503 if (ai->pointers[i]->grouped_p)
1504 continue;
1506 /* See if TAG1 had any aliases in common with other symbol tags.
1507 If we find a TAG2 with common aliases with TAG1, add TAG2's
1508 aliases into TAG1. */
1509 for (j = i + 1; j < ai->num_pointers; j++)
1511 bitmap tag2_aliases = ai->pointers[j]->may_aliases;
1513 if (bitmap_intersect_p (tag1_aliases, tag2_aliases))
1515 tree tag2 = var_ann (ai->pointers[j]->var)->symbol_mem_tag;
1517 bitmap_ior_into (tag1_aliases, tag2_aliases);
1519 /* TAG2 does not need its aliases anymore. */
1520 bitmap_clear (tag2_aliases);
1521 var_ann (tag2)->may_aliases = NULL;
1523 /* TAG1 is the unique alias of TAG2. */
1524 add_may_alias (tag2, tag1);
1526 ai->pointers[j]->grouped_p = true;
1530 /* Now group all the aliases we collected into TAG1. */
1531 group_aliases_into (tag1, tag1_aliases, ai);
1533 /* If we've reduced total number of virtual operands below the
1534 threshold, stop. */
1535 if (ai->total_alias_vops < MAX_ALIASED_VOPS)
1536 break;
1539 /* Finally, all the variables that have been grouped cannot be in
1540 the may-alias set of name memory tags. Suppose that we have
1541 grouped the aliases in this code so that may-aliases(a) = SMT.20
1543 p_5 = &a;
1545 # a_9 = V_MAY_DEF <a_8>
1546 p_5->field = 0
1547 ... Several modifications to SMT.20 ...
1548 # VUSE <a_9>
1549 x_30 = p_5->field
1551 Since p_5 points to 'a', the optimizers will try to propagate 0
1552 into p_5->field, but that is wrong because there have been
1553 modifications to 'SMT.20' in between. To prevent this we have to
1554 replace 'a' with 'SMT.20' in the name tag of p_5. */
1555 for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
1557 size_t j;
1558 tree name_tag = SSA_NAME_PTR_INFO (ptr)->name_mem_tag;
1559 VEC(tree,gc) *aliases;
1560 tree alias;
1562 if (name_tag == NULL_TREE)
1563 continue;
1565 aliases = var_ann (name_tag)->may_aliases;
1566 for (j = 0; VEC_iterate (tree, aliases, j, alias); j++)
1568 var_ann_t ann = var_ann (alias);
1570 if ((!MTAG_P (alias)
1571 || TREE_CODE (alias) == STRUCT_FIELD_TAG)
1572 && ann->may_aliases)
1574 tree new_alias;
1576 gcc_assert (VEC_length (tree, ann->may_aliases) == 1);
1578 new_alias = VEC_index (tree, ann->may_aliases, 0);
1579 replace_may_alias (name_tag, j, new_alias);
1584 if (dump_file)
1585 fprintf (dump_file,
1586 "%s: Total number of aliased vops after grouping: %ld%s\n",
1587 get_name (current_function_decl),
1588 ai->total_alias_vops,
1589 (ai->total_alias_vops < 0) ? " (negative values are OK)" : "");
1593 /* Create a new alias set entry for VAR in AI->ADDRESSABLE_VARS. */
1595 static void
1596 create_alias_map_for (tree var, struct alias_info *ai)
1598 struct alias_map_d *alias_map;
1599 alias_map = XCNEW (struct alias_map_d);
1600 alias_map->var = var;
1601 alias_map->set = get_alias_set (var);
1602 ai->addressable_vars[ai->num_addressable_vars++] = alias_map;
1606 /* Create memory tags for all the dereferenced pointers and build the
1607 ADDRESSABLE_VARS and POINTERS arrays used for building the may-alias
1608 sets. Based on the address escape and points-to information collected
1609 earlier, this pass will also clear the TREE_ADDRESSABLE flag from those
1610 variables whose address is not needed anymore. */
1612 static void
1613 setup_pointers_and_addressables (struct alias_info *ai)
1615 size_t n_vars, num_addressable_vars, num_pointers;
1616 referenced_var_iterator rvi;
1617 tree var;
1618 VEC (tree, heap) *varvec = NULL;
1619 safe_referenced_var_iterator srvi;
1621 /* Size up the arrays ADDRESSABLE_VARS and POINTERS. */
1622 num_addressable_vars = num_pointers = 0;
1624 FOR_EACH_REFERENCED_VAR (var, rvi)
1626 if (may_be_aliased (var))
1627 num_addressable_vars++;
1629 if (POINTER_TYPE_P (TREE_TYPE (var)))
1631 /* Since we don't keep track of volatile variables, assume that
1632 these pointers are used in indirect store operations. */
1633 if (TREE_THIS_VOLATILE (var))
1634 bitmap_set_bit (ai->dereferenced_ptrs_store, DECL_UID (var));
1636 num_pointers++;
1640 /* Create ADDRESSABLE_VARS and POINTERS. Note that these arrays are
1641 always going to be slightly bigger than we actually need them
1642 because some TREE_ADDRESSABLE variables will be marked
1643 non-addressable below and only pointers with unique symbol tags are
1644 going to be added to POINTERS. */
1645 ai->addressable_vars = XCNEWVEC (struct alias_map_d *, num_addressable_vars);
1646 ai->pointers = XCNEWVEC (struct alias_map_d *, num_pointers);
1647 ai->num_addressable_vars = 0;
1648 ai->num_pointers = 0;
1650 /* Since we will be creating symbol memory tags within this loop,
1651 cache the value of NUM_REFERENCED_VARS to avoid processing the
1652 additional tags unnecessarily. */
1653 n_vars = num_referenced_vars;
1655 FOR_EACH_REFERENCED_VAR_SAFE (var, varvec, srvi)
1657 var_ann_t v_ann = var_ann (var);
1658 subvar_t svars;
1660 /* Name memory tags already have flow-sensitive aliasing
1661 information, so they need not be processed by
1662 compute_flow_insensitive_aliasing. Similarly, symbol memory
1663 tags are already accounted for when we process their
1664 associated pointer.
1666 Structure fields, on the other hand, have to have some of this
1667 information processed for them, but it's pointless to mark them
1668 non-addressable (since they are fake variables anyway). */
1669 if (MTAG_P (var) && TREE_CODE (var) != STRUCT_FIELD_TAG)
1670 continue;
1672 /* Remove the ADDRESSABLE flag from every addressable variable whose
1673 address is not needed anymore. This is caused by the propagation
1674 of ADDR_EXPR constants into INDIRECT_REF expressions and the
1675 removal of dead pointer assignments done by the early scalar
1676 cleanup passes. */
1677 if (TREE_ADDRESSABLE (var))
1679 if (!bitmap_bit_p (addressable_vars, DECL_UID (var))
1680 && TREE_CODE (var) != RESULT_DECL
1681 && !is_global_var (var))
1683 bool okay_to_mark = true;
1685 /* Since VAR is now a regular GIMPLE register, we will need
1686 to rename VAR into SSA afterwards. */
1687 mark_sym_for_renaming (var);
1689 /* If VAR can have sub-variables, and any of its
1690 sub-variables has its address taken, then we cannot
1691 remove the addressable flag from VAR. */
1692 if (var_can_have_subvars (var)
1693 && (svars = get_subvars_for_var (var)))
1695 subvar_t sv;
1697 for (sv = svars; sv; sv = sv->next)
1699 if (bitmap_bit_p (addressable_vars, DECL_UID (sv->var)))
1700 okay_to_mark = false;
1701 mark_sym_for_renaming (sv->var);
1705 /* The address of VAR is not needed, remove the
1706 addressable bit, so that it can be optimized as a
1707 regular variable. */
1708 if (okay_to_mark)
1709 mark_non_addressable (var);
1713 /* Global variables and addressable locals may be aliased. Create an
1714 entry in ADDRESSABLE_VARS for VAR. */
1715 if (may_be_aliased (var)
1716 && (!var_can_have_subvars (var)
1717 || get_subvars_for_var (var) == NULL))
1719 create_alias_map_for (var, ai);
1720 mark_sym_for_renaming (var);
1723 /* Add pointer variables that have been dereferenced to the POINTERS
1724 array and create a symbol memory tag for them. */
1725 if (POINTER_TYPE_P (TREE_TYPE (var)))
1727 if ((bitmap_bit_p (ai->dereferenced_ptrs_store, DECL_UID (var))
1728 || bitmap_bit_p (ai->dereferenced_ptrs_load, DECL_UID (var))))
1730 tree tag;
1731 var_ann_t t_ann;
1733 /* If pointer VAR still doesn't have a memory tag
1734 associated with it, create it now or re-use an
1735 existing one. */
1736 tag = get_tmt_for (var, ai);
1737 t_ann = var_ann (tag);
1739 /* The symbol tag will need to be renamed into SSA
1740 afterwards. Note that we cannot do this inside
1741 get_tmt_for because aliasing may run multiple times
1742 and we only create symbol tags the first time. */
1743 mark_sym_for_renaming (tag);
1745 /* Similarly, if pointer VAR used to have another type
1746 tag, we will need to process it in the renamer to
1747 remove the stale virtual operands. */
1748 if (v_ann->symbol_mem_tag)
1749 mark_sym_for_renaming (v_ann->symbol_mem_tag);
1751 /* Associate the tag with pointer VAR. */
1752 v_ann->symbol_mem_tag = tag;
1754 /* If pointer VAR has been used in a store operation,
1755 then its memory tag must be marked as written-to. */
1756 if (bitmap_bit_p (ai->dereferenced_ptrs_store, DECL_UID (var)))
1757 bitmap_set_bit (ai->written_vars, DECL_UID (tag));
1759 /* All the dereferences of pointer VAR count as
1760 references of TAG. Since TAG can be associated with
1761 several pointers, add the dereferences of VAR to the
1762 TAG. */
1763 NUM_REFERENCES_SET (t_ann,
1764 NUM_REFERENCES (t_ann)
1765 + NUM_REFERENCES (v_ann));
1767 else
1769 /* The pointer has not been dereferenced. If it had a
1770 symbol memory tag, remove it and mark the old tag for
1771 renaming to remove it out of the IL. */
1772 var_ann_t ann = var_ann (var);
1773 tree tag = ann->symbol_mem_tag;
1774 if (tag)
1776 mark_sym_for_renaming (tag);
1777 ann->symbol_mem_tag = NULL_TREE;
1782 VEC_free (tree, heap, varvec);
1786 /* Determine whether to use .GLOBAL_VAR to model call clobbering semantics. At
1787 every call site, we need to emit V_MAY_DEF expressions to represent the
1788 clobbering effects of the call for variables whose address escapes the
1789 current function.
1791 One approach is to group all call-clobbered variables into a single
1792 representative that is used as an alias of every call-clobbered variable
1793 (.GLOBAL_VAR). This works well, but it ties the optimizer hands because
1794 references to any call clobbered variable is a reference to .GLOBAL_VAR.
1796 The second approach is to emit a clobbering V_MAY_DEF for every
1797 call-clobbered variable at call sites. This is the preferred way in terms
1798 of optimization opportunities but it may create too many V_MAY_DEF operands
1799 if there are many call clobbered variables and function calls in the
1800 function.
1802 To decide whether or not to use .GLOBAL_VAR we multiply the number of
1803 function calls found by the number of call-clobbered variables. If that
1804 product is beyond a certain threshold, as determined by the parameterized
1805 values shown below, we use .GLOBAL_VAR.
1807 FIXME. This heuristic should be improved. One idea is to use several
1808 .GLOBAL_VARs of different types instead of a single one. The thresholds
1809 have been derived from a typical bootstrap cycle, including all target
1810 libraries. Compile times were found increase by ~1% compared to using
1811 .GLOBAL_VAR. */
1813 static void
1814 maybe_create_global_var (struct alias_info *ai)
1816 unsigned i, n_clobbered;
1817 bitmap_iterator bi;
1819 /* No need to create it, if we have one already. */
1820 if (global_var == NULL_TREE)
1822 /* Count all the call-clobbered variables. */
1823 n_clobbered = 0;
1824 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1826 n_clobbered++;
1829 /* If the number of virtual operands that would be needed to
1830 model all the call-clobbered variables is larger than
1831 GLOBAL_VAR_THRESHOLD, create .GLOBAL_VAR.
1833 Also create .GLOBAL_VAR if there are no call-clobbered
1834 variables and the program contains a mixture of pure/const
1835 and regular function calls. This is to avoid the problem
1836 described in PR 20115:
1838 int X;
1839 int func_pure (void) { return X; }
1840 int func_non_pure (int a) { X += a; }
1841 int foo ()
1843 int a = func_pure ();
1844 func_non_pure (a);
1845 a = func_pure ();
1846 return a;
1849 Since foo() has no call-clobbered variables, there is
1850 no relationship between the calls to func_pure and
1851 func_non_pure. Since func_pure has no side-effects, value
1852 numbering optimizations elide the second call to func_pure.
1853 So, if we have some pure/const and some regular calls in the
1854 program we create .GLOBAL_VAR to avoid missing these
1855 relations. */
1856 if (ai->num_calls_found * n_clobbered >= (size_t) GLOBAL_VAR_THRESHOLD
1857 || (n_clobbered == 0
1858 && ai->num_calls_found > 0
1859 && ai->num_pure_const_calls_found > 0
1860 && ai->num_calls_found > ai->num_pure_const_calls_found))
1861 create_global_var ();
1864 /* Mark all call-clobbered symbols for renaming. Since the initial
1865 rewrite into SSA ignored all call sites, we may need to rename
1866 .GLOBAL_VAR and the call-clobbered variables. */
1867 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1869 tree var = referenced_var (i);
1871 /* If the function has calls to clobbering functions and
1872 .GLOBAL_VAR has been created, make it an alias for all
1873 call-clobbered variables. */
1874 if (global_var && var != global_var)
1876 add_may_alias (var, global_var);
1877 gcc_assert (!get_subvars_for_var (var));
1880 mark_sym_for_renaming (var);
1885 /* Return TRUE if pointer PTR may point to variable VAR.
1887 MEM_ALIAS_SET is the alias set for the memory location pointed-to by PTR
1888 This is needed because when checking for type conflicts we are
1889 interested in the alias set of the memory location pointed-to by
1890 PTR. The alias set of PTR itself is irrelevant.
1892 VAR_ALIAS_SET is the alias set for VAR. */
1894 static bool
1895 may_alias_p (tree ptr, HOST_WIDE_INT mem_alias_set,
1896 tree var, HOST_WIDE_INT var_alias_set,
1897 bool alias_set_only)
1899 tree mem;
1901 alias_stats.alias_queries++;
1902 alias_stats.simple_queries++;
1904 /* By convention, a variable cannot alias itself. */
1905 mem = var_ann (ptr)->symbol_mem_tag;
1906 if (mem == var)
1908 alias_stats.alias_noalias++;
1909 alias_stats.simple_resolved++;
1910 return false;
1913 /* If -fargument-noalias-global is > 2, pointer arguments may
1914 not point to anything else. */
1915 if (flag_argument_noalias > 2 && TREE_CODE (ptr) == PARM_DECL)
1917 alias_stats.alias_noalias++;
1918 alias_stats.simple_resolved++;
1919 return false;
1922 /* If -fargument-noalias-global is > 1, pointer arguments may
1923 not point to global variables. */
1924 if (flag_argument_noalias > 1 && is_global_var (var)
1925 && TREE_CODE (ptr) == PARM_DECL)
1927 alias_stats.alias_noalias++;
1928 alias_stats.simple_resolved++;
1929 return false;
1932 /* If either MEM or VAR is a read-only global and the other one
1933 isn't, then PTR cannot point to VAR. */
1934 if ((unmodifiable_var_p (mem) && !unmodifiable_var_p (var))
1935 || (unmodifiable_var_p (var) && !unmodifiable_var_p (mem)))
1937 alias_stats.alias_noalias++;
1938 alias_stats.simple_resolved++;
1939 return false;
1942 gcc_assert (TREE_CODE (mem) == SYMBOL_MEMORY_TAG);
1944 alias_stats.tbaa_queries++;
1946 /* If the alias sets don't conflict then MEM cannot alias VAR. */
1947 if (!alias_sets_conflict_p (mem_alias_set, var_alias_set))
1949 alias_stats.alias_noalias++;
1950 alias_stats.tbaa_resolved++;
1951 return false;
1954 /* If var is a record or union type, ptr cannot point into var
1955 unless there is some operation explicit address operation in the
1956 program that can reference a field of the ptr's dereferenced
1957 type. This also assumes that the types of both var and ptr are
1958 contained within the compilation unit, and that there is no fancy
1959 addressing arithmetic associated with any of the types
1960 involved. */
1962 if ((mem_alias_set != 0) && (var_alias_set != 0))
1964 tree ptr_type = TREE_TYPE (ptr);
1965 tree var_type = TREE_TYPE (var);
1967 /* The star count is -1 if the type at the end of the pointer_to
1968 chain is not a record or union type. */
1969 if ((!alias_set_only) &&
1970 ipa_type_escape_star_count_of_interesting_type (var_type) >= 0)
1972 int ptr_star_count = 0;
1974 /* Ipa_type_escape_star_count_of_interesting_type is a little to
1975 restrictive for the pointer type, need to allow pointers to
1976 primitive types as long as those types cannot be pointers
1977 to everything. */
1978 while (POINTER_TYPE_P (ptr_type))
1979 /* Strip the *'s off. */
1981 ptr_type = TREE_TYPE (ptr_type);
1982 ptr_star_count++;
1985 /* There does not appear to be a better test to see if the
1986 pointer type was one of the pointer to everything
1987 types. */
1989 if (ptr_star_count > 0)
1991 alias_stats.structnoaddress_queries++;
1992 if (ipa_type_escape_field_does_not_clobber_p (var_type,
1993 TREE_TYPE (ptr)))
1995 alias_stats.structnoaddress_resolved++;
1996 alias_stats.alias_noalias++;
1997 return false;
2000 else if (ptr_star_count == 0)
2002 /* If ptr_type was not really a pointer to type, it cannot
2003 alias. */
2004 alias_stats.structnoaddress_queries++;
2005 alias_stats.structnoaddress_resolved++;
2006 alias_stats.alias_noalias++;
2007 return false;
2012 alias_stats.alias_mayalias++;
2013 return true;
2017 /* Add ALIAS to the set of variables that may alias VAR. */
2019 static void
2020 add_may_alias (tree var, tree alias)
2022 size_t i;
2023 var_ann_t v_ann = get_var_ann (var);
2024 var_ann_t a_ann = get_var_ann (alias);
2025 tree al;
2027 /* Don't allow self-referential aliases. */
2028 gcc_assert (var != alias);
2030 /* ALIAS must be addressable if it's being added to an alias set. */
2031 #if 1
2032 TREE_ADDRESSABLE (alias) = 1;
2033 #else
2034 gcc_assert (may_be_aliased (alias));
2035 #endif
2037 if (v_ann->may_aliases == NULL)
2038 v_ann->may_aliases = VEC_alloc (tree, gc, 2);
2040 /* Avoid adding duplicates. */
2041 for (i = 0; VEC_iterate (tree, v_ann->may_aliases, i, al); i++)
2042 if (alias == al)
2043 return;
2045 VEC_safe_push (tree, gc, v_ann->may_aliases, alias);
2046 a_ann->is_aliased = 1;
2050 /* Replace alias I in the alias sets of VAR with NEW_ALIAS. */
2052 static void
2053 replace_may_alias (tree var, size_t i, tree new_alias)
2055 var_ann_t v_ann = var_ann (var);
2056 VEC_replace (tree, v_ann->may_aliases, i, new_alias);
2060 /* Mark pointer PTR as pointing to an arbitrary memory location. */
2062 static void
2063 set_pt_anything (tree ptr)
2065 struct ptr_info_def *pi = get_ptr_info (ptr);
2067 pi->pt_anything = 1;
2068 pi->pt_vars = NULL;
2070 /* The pointer used to have a name tag, but we now found it pointing
2071 to an arbitrary location. The name tag needs to be renamed and
2072 disassociated from PTR. */
2073 if (pi->name_mem_tag)
2075 mark_sym_for_renaming (pi->name_mem_tag);
2076 pi->name_mem_tag = NULL_TREE;
2081 /* Return true if STMT is an "escape" site from the current function. Escape
2082 sites those statements which might expose the address of a variable
2083 outside the current function. STMT is an escape site iff:
2085 1- STMT is a function call, or
2086 2- STMT is an __asm__ expression, or
2087 3- STMT is an assignment to a non-local variable, or
2088 4- STMT is a return statement.
2090 AI points to the alias information collected so far.
2092 Return the type of escape site found, if we found one, or NO_ESCAPE
2093 if none. */
2095 enum escape_type
2096 is_escape_site (tree stmt, struct alias_info *ai)
2098 tree call = get_call_expr_in (stmt);
2099 if (call != NULL_TREE)
2101 ai->num_calls_found++;
2103 if (!TREE_SIDE_EFFECTS (call))
2105 ai->num_pure_const_calls_found++;
2106 return ESCAPE_TO_PURE_CONST;
2109 return ESCAPE_TO_CALL;
2111 else if (TREE_CODE (stmt) == ASM_EXPR)
2112 return ESCAPE_TO_ASM;
2113 else if (TREE_CODE (stmt) == MODIFY_EXPR)
2115 tree lhs = TREE_OPERAND (stmt, 0);
2117 /* Get to the base of _REF nodes. */
2118 if (TREE_CODE (lhs) != SSA_NAME)
2119 lhs = get_base_address (lhs);
2121 /* If we couldn't recognize the LHS of the assignment, assume that it
2122 is a non-local store. */
2123 if (lhs == NULL_TREE)
2124 return ESCAPE_UNKNOWN;
2126 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == NOP_EXPR
2127 || TREE_CODE (TREE_OPERAND (stmt, 1)) == CONVERT_EXPR
2128 || TREE_CODE (TREE_OPERAND (stmt, 1)) == VIEW_CONVERT_EXPR)
2130 tree from = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (stmt, 1), 0));
2131 tree to = TREE_TYPE (TREE_OPERAND (stmt, 1));
2133 /* If the RHS is a conversion between a pointer and an integer, the
2134 pointer escapes since we can't track the integer. */
2135 if (POINTER_TYPE_P (from) && !POINTER_TYPE_P (to))
2136 return ESCAPE_BAD_CAST;
2138 /* Same if the RHS is a conversion between a regular pointer and a
2139 ref-all pointer since we can't track the SMT of the former. */
2140 if (POINTER_TYPE_P (from) && !TYPE_REF_CAN_ALIAS_ALL (from)
2141 && POINTER_TYPE_P (to) && TYPE_REF_CAN_ALIAS_ALL (to))
2142 return ESCAPE_BAD_CAST;
2145 /* If the LHS is an SSA name, it can't possibly represent a non-local
2146 memory store. */
2147 if (TREE_CODE (lhs) == SSA_NAME)
2148 return NO_ESCAPE;
2150 /* FIXME: LHS is not an SSA_NAME. Even if it's an assignment to a
2151 local variables we cannot be sure if it will escape, because we
2152 don't have information about objects not in SSA form. Need to
2153 implement something along the lines of
2155 J.-D. Choi, M. Gupta, M. J. Serrano, V. C. Sreedhar, and S. P.
2156 Midkiff, ``Escape analysis for java,'' in Proceedings of the
2157 Conference on Object-Oriented Programming Systems, Languages, and
2158 Applications (OOPSLA), pp. 1-19, 1999. */
2159 return ESCAPE_STORED_IN_GLOBAL;
2161 else if (TREE_CODE (stmt) == RETURN_EXPR)
2162 return ESCAPE_TO_RETURN;
2164 return NO_ESCAPE;
2167 /* Create a new memory tag of type TYPE.
2168 Does NOT push it into the current binding. */
2170 static tree
2171 create_tag_raw (enum tree_code code, tree type, const char *prefix)
2173 tree tmp_var;
2174 tree new_type;
2176 /* Make the type of the variable writable. */
2177 new_type = build_type_variant (type, 0, 0);
2178 TYPE_ATTRIBUTES (new_type) = TYPE_ATTRIBUTES (type);
2180 tmp_var = build_decl (code, create_tmp_var_name (prefix),
2181 type);
2182 /* Make the variable writable. */
2183 TREE_READONLY (tmp_var) = 0;
2185 /* It doesn't start out global. */
2186 MTAG_GLOBAL (tmp_var) = 0;
2187 TREE_STATIC (tmp_var) = 0;
2188 TREE_USED (tmp_var) = 1;
2190 return tmp_var;
2193 /* Create a new memory tag of type TYPE. If IS_TYPE_TAG is true, the tag
2194 is considered to represent all the pointers whose pointed-to types are
2195 in the same alias set class. Otherwise, the tag represents a single
2196 SSA_NAME pointer variable. */
2198 static tree
2199 create_memory_tag (tree type, bool is_type_tag)
2201 var_ann_t ann;
2202 tree tag = create_tag_raw (is_type_tag ? SYMBOL_MEMORY_TAG : NAME_MEMORY_TAG,
2203 type, (is_type_tag) ? "SMT" : "NMT");
2205 /* By default, memory tags are local variables. Alias analysis will
2206 determine whether they should be considered globals. */
2207 DECL_CONTEXT (tag) = current_function_decl;
2209 /* Memory tags are by definition addressable. */
2210 TREE_ADDRESSABLE (tag) = 1;
2212 ann = get_var_ann (tag);
2213 ann->symbol_mem_tag = NULL_TREE;
2215 /* Add the tag to the symbol table. */
2216 add_referenced_var (tag);
2218 return tag;
2222 /* Create a name memory tag to represent a specific SSA_NAME pointer P_i.
2223 This is used if P_i has been found to point to a specific set of
2224 variables or to a non-aliased memory location like the address returned
2225 by malloc functions. */
2227 static tree
2228 get_nmt_for (tree ptr)
2230 struct ptr_info_def *pi = get_ptr_info (ptr);
2231 tree tag = pi->name_mem_tag;
2233 if (tag == NULL_TREE)
2234 tag = create_memory_tag (TREE_TYPE (TREE_TYPE (ptr)), false);
2235 return tag;
2239 /* Return the symbol memory tag associated to pointer PTR. A memory
2240 tag is an artificial variable that represents the memory location
2241 pointed-to by PTR. It is used to model the effects of pointer
2242 de-references on addressable variables.
2244 AI points to the data gathered during alias analysis. This
2245 function populates the array AI->POINTERS. */
2247 static tree
2248 get_tmt_for (tree ptr, struct alias_info *ai)
2250 size_t i;
2251 tree tag;
2252 tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
2253 HOST_WIDE_INT tag_set = get_alias_set (tag_type);
2255 /* We use a unique memory tag for all the ref-all pointers. */
2256 if (PTR_IS_REF_ALL (ptr))
2258 if (!ai->ref_all_symbol_mem_tag)
2259 ai->ref_all_symbol_mem_tag = create_memory_tag (void_type_node, true);
2260 return ai->ref_all_symbol_mem_tag;
2263 /* To avoid creating unnecessary memory tags, only create one memory tag
2264 per alias set class. Note that it may be tempting to group
2265 memory tags based on conflicting alias sets instead of
2266 equivalence. That would be wrong because alias sets are not
2267 necessarily transitive (as demonstrated by the libstdc++ test
2268 23_containers/vector/cons/4.cc). Given three alias sets A, B, C
2269 such that conflicts (A, B) == true and conflicts (A, C) == true,
2270 it does not necessarily follow that conflicts (B, C) == true. */
2271 for (i = 0, tag = NULL_TREE; i < ai->num_pointers; i++)
2273 struct alias_map_d *curr = ai->pointers[i];
2274 tree curr_tag = var_ann (curr->var)->symbol_mem_tag;
2275 if (tag_set == curr->set)
2277 tag = curr_tag;
2278 break;
2282 /* If VAR cannot alias with any of the existing memory tags, create a new
2283 tag for PTR and add it to the POINTERS array. */
2284 if (tag == NULL_TREE)
2286 struct alias_map_d *alias_map;
2288 /* If PTR did not have a symbol tag already, create a new SMT.*
2289 artificial variable representing the memory location
2290 pointed-to by PTR. */
2291 if (var_ann (ptr)->symbol_mem_tag == NULL_TREE)
2292 tag = create_memory_tag (tag_type, true);
2293 else
2294 tag = var_ann (ptr)->symbol_mem_tag;
2296 /* Add PTR to the POINTERS array. Note that we are not interested in
2297 PTR's alias set. Instead, we cache the alias set for the memory that
2298 PTR points to. */
2299 alias_map = XCNEW (struct alias_map_d);
2300 alias_map->var = ptr;
2301 alias_map->set = tag_set;
2302 ai->pointers[ai->num_pointers++] = alias_map;
2305 /* If the pointed-to type is volatile, so is the tag. */
2306 TREE_THIS_VOLATILE (tag) |= TREE_THIS_VOLATILE (tag_type);
2308 /* Make sure that the symbol tag has the same alias set as the
2309 pointed-to type. */
2310 gcc_assert (tag_set == get_alias_set (tag));
2312 return tag;
2316 /* Create GLOBAL_VAR, an artificial global variable to act as a
2317 representative of all the variables that may be clobbered by function
2318 calls. */
2320 static void
2321 create_global_var (void)
2323 global_var = build_decl (VAR_DECL, get_identifier (".GLOBAL_VAR"),
2324 void_type_node);
2325 DECL_ARTIFICIAL (global_var) = 1;
2326 TREE_READONLY (global_var) = 0;
2327 DECL_EXTERNAL (global_var) = 1;
2328 TREE_STATIC (global_var) = 1;
2329 TREE_USED (global_var) = 1;
2330 DECL_CONTEXT (global_var) = NULL_TREE;
2331 TREE_THIS_VOLATILE (global_var) = 0;
2332 TREE_ADDRESSABLE (global_var) = 0;
2334 create_var_ann (global_var);
2335 mark_call_clobbered (global_var, ESCAPE_UNKNOWN);
2336 add_referenced_var (global_var);
2337 mark_sym_for_renaming (global_var);
2341 /* Dump alias statistics on FILE. */
2343 static void
2344 dump_alias_stats (FILE *file)
2346 const char *funcname
2347 = lang_hooks.decl_printable_name (current_function_decl, 2);
2348 fprintf (file, "\nAlias statistics for %s\n\n", funcname);
2349 fprintf (file, "Total alias queries:\t%u\n", alias_stats.alias_queries);
2350 fprintf (file, "Total alias mayalias results:\t%u\n",
2351 alias_stats.alias_mayalias);
2352 fprintf (file, "Total alias noalias results:\t%u\n",
2353 alias_stats.alias_noalias);
2354 fprintf (file, "Total simple queries:\t%u\n",
2355 alias_stats.simple_queries);
2356 fprintf (file, "Total simple resolved:\t%u\n",
2357 alias_stats.simple_resolved);
2358 fprintf (file, "Total TBAA queries:\t%u\n",
2359 alias_stats.tbaa_queries);
2360 fprintf (file, "Total TBAA resolved:\t%u\n",
2361 alias_stats.tbaa_resolved);
2362 fprintf (file, "Total non-addressable structure type queries:\t%u\n",
2363 alias_stats.structnoaddress_queries);
2364 fprintf (file, "Total non-addressable structure type resolved:\t%u\n",
2365 alias_stats.structnoaddress_resolved);
2369 /* Dump alias information on FILE. */
2371 void
2372 dump_alias_info (FILE *file)
2374 size_t i;
2375 const char *funcname
2376 = lang_hooks.decl_printable_name (current_function_decl, 2);
2377 referenced_var_iterator rvi;
2378 tree var;
2380 fprintf (file, "\nFlow-insensitive alias information for %s\n\n", funcname);
2382 fprintf (file, "Aliased symbols\n\n");
2384 FOR_EACH_REFERENCED_VAR (var, rvi)
2386 if (may_be_aliased (var))
2387 dump_variable (file, var);
2390 fprintf (file, "\nDereferenced pointers\n\n");
2392 FOR_EACH_REFERENCED_VAR (var, rvi)
2394 var_ann_t ann = var_ann (var);
2395 if (ann->symbol_mem_tag)
2396 dump_variable (file, var);
2399 fprintf (file, "\nSymbol memory tags\n\n");
2401 FOR_EACH_REFERENCED_VAR (var, rvi)
2403 if (TREE_CODE (var) == SYMBOL_MEMORY_TAG)
2404 dump_variable (file, var);
2407 fprintf (file, "\n\nFlow-sensitive alias information for %s\n\n", funcname);
2409 fprintf (file, "SSA_NAME pointers\n\n");
2410 for (i = 1; i < num_ssa_names; i++)
2412 tree ptr = ssa_name (i);
2413 struct ptr_info_def *pi;
2415 if (ptr == NULL_TREE)
2416 continue;
2418 pi = SSA_NAME_PTR_INFO (ptr);
2419 if (!SSA_NAME_IN_FREE_LIST (ptr)
2420 && pi
2421 && pi->name_mem_tag)
2422 dump_points_to_info_for (file, ptr);
2425 fprintf (file, "\nName memory tags\n\n");
2427 FOR_EACH_REFERENCED_VAR (var, rvi)
2429 if (TREE_CODE (var) == NAME_MEMORY_TAG)
2430 dump_variable (file, var);
2433 fprintf (file, "\n");
2437 /* Dump alias information on stderr. */
2439 void
2440 debug_alias_info (void)
2442 dump_alias_info (stderr);
2446 /* Return the alias information associated with pointer T. It creates a
2447 new instance if none existed. */
2449 struct ptr_info_def *
2450 get_ptr_info (tree t)
2452 struct ptr_info_def *pi;
2454 gcc_assert (POINTER_TYPE_P (TREE_TYPE (t)));
2456 pi = SSA_NAME_PTR_INFO (t);
2457 if (pi == NULL)
2459 pi = GGC_NEW (struct ptr_info_def);
2460 memset ((void *)pi, 0, sizeof (*pi));
2461 SSA_NAME_PTR_INFO (t) = pi;
2464 return pi;
2468 /* Dump points-to information for SSA_NAME PTR into FILE. */
2470 void
2471 dump_points_to_info_for (FILE *file, tree ptr)
2473 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
2475 print_generic_expr (file, ptr, dump_flags);
2477 if (pi)
2479 if (pi->name_mem_tag)
2481 fprintf (file, ", name memory tag: ");
2482 print_generic_expr (file, pi->name_mem_tag, dump_flags);
2485 if (pi->is_dereferenced)
2486 fprintf (file, ", is dereferenced");
2488 if (pi->value_escapes_p)
2489 fprintf (file, ", its value escapes");
2491 if (pi->pt_anything)
2492 fprintf (file, ", points-to anything");
2494 if (pi->pt_null)
2495 fprintf (file, ", points-to NULL");
2497 if (pi->pt_vars)
2499 unsigned ix;
2500 bitmap_iterator bi;
2502 fprintf (file, ", points-to vars: { ");
2503 EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, ix, bi)
2505 print_generic_expr (file, referenced_var (ix), dump_flags);
2506 fprintf (file, " ");
2508 fprintf (file, "}");
2512 fprintf (file, "\n");
2516 /* Dump points-to information for VAR into stderr. */
2518 void
2519 debug_points_to_info_for (tree var)
2521 dump_points_to_info_for (stderr, var);
2525 /* Dump points-to information into FILE. NOTE: This function is slow, as
2526 it needs to traverse the whole CFG looking for pointer SSA_NAMEs. */
2528 void
2529 dump_points_to_info (FILE *file)
2531 basic_block bb;
2532 block_stmt_iterator si;
2533 ssa_op_iter iter;
2534 const char *fname =
2535 lang_hooks.decl_printable_name (current_function_decl, 2);
2536 referenced_var_iterator rvi;
2537 tree var;
2539 fprintf (file, "\n\nPointed-to sets for pointers in %s\n\n", fname);
2541 /* First dump points-to information for the default definitions of
2542 pointer variables. This is necessary because default definitions are
2543 not part of the code. */
2544 FOR_EACH_REFERENCED_VAR (var, rvi)
2546 if (POINTER_TYPE_P (TREE_TYPE (var)))
2548 tree def = default_def (var);
2549 if (def)
2550 dump_points_to_info_for (file, def);
2554 /* Dump points-to information for every pointer defined in the program. */
2555 FOR_EACH_BB (bb)
2557 tree phi;
2559 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
2561 tree ptr = PHI_RESULT (phi);
2562 if (POINTER_TYPE_P (TREE_TYPE (ptr)))
2563 dump_points_to_info_for (file, ptr);
2566 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
2568 tree stmt = bsi_stmt (si);
2569 tree def;
2570 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_DEF)
2571 if (POINTER_TYPE_P (TREE_TYPE (def)))
2572 dump_points_to_info_for (file, def);
2576 fprintf (file, "\n");
2580 /* Dump points-to info pointed to by PTO into STDERR. */
2582 void
2583 debug_points_to_info (void)
2585 dump_points_to_info (stderr);
2588 /* Dump to FILE the list of variables that may be aliasing VAR. */
2590 void
2591 dump_may_aliases_for (FILE *file, tree var)
2593 VEC(tree, gc) *aliases;
2595 if (TREE_CODE (var) == SSA_NAME)
2596 var = SSA_NAME_VAR (var);
2598 aliases = var_ann (var)->may_aliases;
2599 if (aliases)
2601 size_t i;
2602 tree al;
2603 fprintf (file, "{ ");
2604 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
2606 print_generic_expr (file, al, dump_flags);
2607 fprintf (file, " ");
2609 fprintf (file, "}");
2614 /* Dump to stderr the list of variables that may be aliasing VAR. */
2616 void
2617 debug_may_aliases_for (tree var)
2619 dump_may_aliases_for (stderr, var);
2622 /* Return true if VAR may be aliased. */
2624 bool
2625 may_be_aliased (tree var)
2627 /* Obviously. */
2628 if (TREE_ADDRESSABLE (var))
2629 return true;
2631 /* Globally visible variables can have their addresses taken by other
2632 translation units. */
2634 if (MTAG_P (var)
2635 && (MTAG_GLOBAL (var) || TREE_PUBLIC (var)))
2636 return true;
2637 else if (!MTAG_P (var)
2638 && (DECL_EXTERNAL (var) || TREE_PUBLIC (var)))
2639 return true;
2641 /* Automatic variables can't have their addresses escape any other way.
2642 This must be after the check for global variables, as extern declarations
2643 do not have TREE_STATIC set. */
2644 if (!TREE_STATIC (var))
2645 return false;
2647 /* If we're in unit-at-a-time mode, then we must have seen all occurrences
2648 of address-of operators, and so we can trust TREE_ADDRESSABLE. Otherwise
2649 we can only be sure the variable isn't addressable if it's local to the
2650 current function. */
2651 if (flag_unit_at_a_time)
2652 return false;
2653 if (decl_function_context (var) == current_function_decl)
2654 return false;
2656 return true;
2660 /* Given two symbols return TRUE if one is in the alias set of the other. */
2661 bool
2662 is_aliased_with (tree tag, tree sym)
2664 size_t i;
2665 VEC(tree,gc) *aliases;
2666 tree al;
2668 if (var_ann (sym)->is_aliased)
2670 aliases = var_ann (tag)->may_aliases;
2672 if (aliases == NULL)
2673 return false;
2675 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
2676 if (al == sym)
2677 return true;
2679 else
2681 aliases = var_ann (sym)->may_aliases;
2683 if (aliases == NULL)
2684 return false;
2686 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
2687 if (al == tag)
2688 return true;
2691 return false;
2695 /* Create a new symbol tag for PTR. Construct the may-alias list of this type
2696 tag so that it has the aliasing of VAR.
2698 Note, the set of aliases represented by the new symbol tag are not marked
2699 for renaming. */
2701 void
2702 new_type_alias (tree ptr, tree var)
2704 var_ann_t p_ann = var_ann (ptr);
2705 tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
2706 var_ann_t v_ann = var_ann (var);
2707 tree tag;
2708 subvar_t svars;
2710 gcc_assert (p_ann->symbol_mem_tag == NULL_TREE);
2711 gcc_assert (!MTAG_P (var));
2713 /* Add VAR to the may-alias set of PTR's new symbol tag. If VAR has
2714 subvars, add the subvars to the tag instead of the actual var. */
2715 if (var_can_have_subvars (var)
2716 && (svars = get_subvars_for_var (var)))
2718 subvar_t sv;
2720 tag = create_memory_tag (tag_type, true);
2721 p_ann->symbol_mem_tag = tag;
2723 for (sv = svars; sv; sv = sv->next)
2724 add_may_alias (tag, sv->var);
2726 else
2728 /* The following is based on code in add_stmt_operand to ensure that the
2729 same defs/uses/vdefs/vuses will be found after replacing a reference
2730 to var (or ARRAY_REF to var) with an INDIRECT_REF to ptr whose value
2731 is the address of var. */
2732 VEC(tree, gc) *aliases = v_ann->may_aliases;
2734 if ((aliases != NULL)
2735 && (VEC_length (tree, aliases) == 1))
2737 tree ali = VEC_index (tree, aliases, 0);
2739 if (TREE_CODE (ali) == SYMBOL_MEMORY_TAG)
2741 p_ann->symbol_mem_tag = ali;
2742 return;
2746 tag = create_memory_tag (tag_type, true);
2747 p_ann->symbol_mem_tag = tag;
2749 if (aliases == NULL)
2750 add_may_alias (tag, var);
2751 else
2753 unsigned i;
2754 tree al;
2756 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
2757 add_may_alias (tag, al);
2761 TREE_READONLY (tag) = TREE_READONLY (var);
2762 MTAG_GLOBAL (tag) = is_global_var (var);
2767 /* This represents the used range of a variable. */
2769 typedef struct used_part
2771 HOST_WIDE_INT minused;
2772 HOST_WIDE_INT maxused;
2773 /* True if we have an explicit use/def of some portion of this variable,
2774 even if it is all of it. i.e. a.b = 5 or temp = a.b. */
2775 bool explicit_uses;
2776 /* True if we have an implicit use/def of some portion of this
2777 variable. Implicit uses occur when we can't tell what part we
2778 are referencing, and have to make conservative assumptions. */
2779 bool implicit_uses;
2780 /* True if the structure is only written to or taken its address. */
2781 bool write_only;
2782 } *used_part_t;
2784 /* An array of used_part structures, indexed by variable uid. */
2786 static htab_t used_portions;
2788 struct used_part_map
2790 unsigned int uid;
2791 used_part_t to;
2794 /* Return true if the uid in the two used part maps are equal. */
2796 static int
2797 used_part_map_eq (const void *va, const void *vb)
2799 const struct used_part_map *a = (const struct used_part_map *) va;
2800 const struct used_part_map *b = (const struct used_part_map *) vb;
2801 return (a->uid == b->uid);
2804 /* Hash a from uid in a used_part_map. */
2806 static unsigned int
2807 used_part_map_hash (const void *item)
2809 return ((const struct used_part_map *)item)->uid;
2812 /* Free a used part map element. */
2814 static void
2815 free_used_part_map (void *item)
2817 free (((struct used_part_map *)item)->to);
2818 free (item);
2821 /* Lookup a used_part structure for a UID. */
2823 static used_part_t
2824 up_lookup (unsigned int uid)
2826 struct used_part_map *h, in;
2827 in.uid = uid;
2828 h = (struct used_part_map *) htab_find_with_hash (used_portions, &in, uid);
2829 if (!h)
2830 return NULL;
2831 return h->to;
2834 /* Insert the pair UID, TO into the used part hashtable. */
2836 static void
2837 up_insert (unsigned int uid, used_part_t to)
2839 struct used_part_map *h;
2840 void **loc;
2842 h = XNEW (struct used_part_map);
2843 h->uid = uid;
2844 h->to = to;
2845 loc = htab_find_slot_with_hash (used_portions, h,
2846 uid, INSERT);
2847 if (*loc != NULL)
2848 free (*loc);
2849 *(struct used_part_map **) loc = h;
2853 /* Given a variable uid, UID, get or create the entry in the used portions
2854 table for the variable. */
2856 static used_part_t
2857 get_or_create_used_part_for (size_t uid)
2859 used_part_t up;
2860 if ((up = up_lookup (uid)) == NULL)
2862 up = XCNEW (struct used_part);
2863 up->minused = INT_MAX;
2864 up->maxused = 0;
2865 up->explicit_uses = false;
2866 up->implicit_uses = false;
2867 up->write_only = true;
2870 return up;
2874 /* Create and return a structure sub-variable for field type FIELD at
2875 offset OFFSET, with size SIZE, of variable VAR. */
2877 static tree
2878 create_sft (tree var, tree field, unsigned HOST_WIDE_INT offset,
2879 unsigned HOST_WIDE_INT size)
2881 var_ann_t ann;
2882 tree subvar = create_tag_raw (STRUCT_FIELD_TAG, field, "SFT");
2884 /* We need to copy the various flags from VAR to SUBVAR, so that
2885 they are is_global_var iff the original variable was. */
2886 DECL_CONTEXT (subvar) = DECL_CONTEXT (var);
2887 MTAG_GLOBAL (subvar) = DECL_EXTERNAL (var);
2888 TREE_PUBLIC (subvar) = TREE_PUBLIC (var);
2889 TREE_STATIC (subvar) = TREE_STATIC (var);
2890 TREE_READONLY (subvar) = TREE_READONLY (var);
2891 TREE_ADDRESSABLE (subvar) = TREE_ADDRESSABLE (var);
2893 /* Add the new variable to REFERENCED_VARS. */
2894 ann = get_var_ann (subvar);
2895 ann->symbol_mem_tag = NULL;
2896 add_referenced_var (subvar);
2897 SFT_PARENT_VAR (subvar) = var;
2898 SFT_OFFSET (subvar) = offset;
2899 SFT_SIZE (subvar) = size;
2900 return subvar;
2904 /* Given an aggregate VAR, create the subvariables that represent its
2905 fields. */
2907 static void
2908 create_overlap_variables_for (tree var)
2910 VEC(fieldoff_s,heap) *fieldstack = NULL;
2911 used_part_t up;
2912 size_t uid = DECL_UID (var);
2914 up = up_lookup (uid);
2915 if (!up
2916 || up->write_only)
2917 return;
2919 push_fields_onto_fieldstack (TREE_TYPE (var), &fieldstack, 0, NULL);
2920 if (VEC_length (fieldoff_s, fieldstack) != 0)
2922 subvar_t *subvars;
2923 fieldoff_s *fo;
2924 bool notokay = false;
2925 int fieldcount = 0;
2926 int i;
2927 HOST_WIDE_INT lastfooffset = -1;
2928 HOST_WIDE_INT lastfosize = -1;
2929 tree lastfotype = NULL_TREE;
2931 /* Not all fields have DECL_SIZE set, and those that don't, we don't
2932 know their size, and thus, can't handle.
2933 The same is true of fields with DECL_SIZE that is not an integer
2934 constant (such as variable sized fields).
2935 Fields with offsets which are not constant will have an offset < 0
2936 We *could* handle fields that are constant sized arrays, but
2937 currently don't. Doing so would require some extra changes to
2938 tree-ssa-operands.c. */
2940 for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
2942 if (!fo->size
2943 || TREE_CODE (fo->size) != INTEGER_CST
2944 || fo->offset < 0)
2946 notokay = true;
2947 break;
2949 fieldcount++;
2952 /* The current heuristic we use is as follows:
2953 If the variable has no used portions in this function, no
2954 structure vars are created for it.
2955 Otherwise,
2956 If the variable has less than SALIAS_MAX_IMPLICIT_FIELDS,
2957 we always create structure vars for them.
2958 If the variable has more than SALIAS_MAX_IMPLICIT_FIELDS, and
2959 some explicit uses, we create structure vars for them.
2960 If the variable has more than SALIAS_MAX_IMPLICIT_FIELDS, and
2961 no explicit uses, we do not create structure vars for them.
2964 if (fieldcount >= SALIAS_MAX_IMPLICIT_FIELDS
2965 && !up->explicit_uses)
2967 if (dump_file && (dump_flags & TDF_DETAILS))
2969 fprintf (dump_file, "Variable ");
2970 print_generic_expr (dump_file, var, 0);
2971 fprintf (dump_file, " has no explicit uses in this function, and is > SALIAS_MAX_IMPLICIT_FIELDS, so skipping\n");
2973 notokay = true;
2976 /* Bail out, if we can't create overlap variables. */
2977 if (notokay)
2979 VEC_free (fieldoff_s, heap, fieldstack);
2980 return;
2983 /* Otherwise, create the variables. */
2984 subvars = lookup_subvars_for_var (var);
2986 sort_fieldstack (fieldstack);
2988 for (i = VEC_length (fieldoff_s, fieldstack);
2989 VEC_iterate (fieldoff_s, fieldstack, --i, fo);)
2991 subvar_t sv;
2992 HOST_WIDE_INT fosize;
2993 tree currfotype;
2995 fosize = TREE_INT_CST_LOW (fo->size);
2996 currfotype = fo->type;
2998 /* If this field isn't in the used portion,
2999 or it has the exact same offset and size as the last
3000 field, skip it. */
3002 if (((fo->offset <= up->minused
3003 && fo->offset + fosize <= up->minused)
3004 || fo->offset >= up->maxused)
3005 || (fo->offset == lastfooffset
3006 && fosize == lastfosize
3007 && currfotype == lastfotype))
3008 continue;
3009 sv = GGC_NEW (struct subvar);
3010 sv->next = *subvars;
3011 sv->var = create_sft (var, fo->type, fo->offset, fosize);
3013 if (dump_file)
3015 fprintf (dump_file, "structure field tag %s created for var %s",
3016 get_name (sv->var), get_name (var));
3017 fprintf (dump_file, " offset " HOST_WIDE_INT_PRINT_DEC,
3018 SFT_OFFSET (sv->var));
3019 fprintf (dump_file, " size " HOST_WIDE_INT_PRINT_DEC,
3020 SFT_SIZE (sv->var));
3021 fprintf (dump_file, "\n");
3024 lastfotype = currfotype;
3025 lastfooffset = fo->offset;
3026 lastfosize = fosize;
3027 *subvars = sv;
3030 /* Once we have created subvars, the original is no longer call
3031 clobbered on its own. Its call clobbered status depends
3032 completely on the call clobbered status of the subvars.
3034 add_referenced_var in the above loop will take care of
3035 marking subvars of global variables as call clobbered for us
3036 to start, since they are global as well. */
3037 clear_call_clobbered (var);
3040 VEC_free (fieldoff_s, heap, fieldstack);
3044 /* Find the conservative answer to the question of what portions of what
3045 structures are used by this statement. We assume that if we have a
3046 component ref with a known size + offset, that we only need that part
3047 of the structure. For unknown cases, or cases where we do something
3048 to the whole structure, we assume we need to create fields for the
3049 entire structure. */
3051 static tree
3052 find_used_portions (tree *tp, int *walk_subtrees, void *lhs_p)
3054 switch (TREE_CODE (*tp))
3056 case MODIFY_EXPR:
3057 /* Recurse manually here to track whether the use is in the
3058 LHS of an assignment. */
3059 find_used_portions (&TREE_OPERAND (*tp, 0), walk_subtrees, tp);
3060 return find_used_portions (&TREE_OPERAND (*tp, 1), walk_subtrees, NULL);
3061 case REALPART_EXPR:
3062 case IMAGPART_EXPR:
3063 case COMPONENT_REF:
3064 case ARRAY_REF:
3066 HOST_WIDE_INT bitsize;
3067 HOST_WIDE_INT bitmaxsize;
3068 HOST_WIDE_INT bitpos;
3069 tree ref;
3070 ref = get_ref_base_and_extent (*tp, &bitpos, &bitsize, &bitmaxsize);
3071 if (DECL_P (ref)
3072 && var_can_have_subvars (ref)
3073 && bitmaxsize != -1)
3075 size_t uid = DECL_UID (ref);
3076 used_part_t up;
3078 up = get_or_create_used_part_for (uid);
3080 if (bitpos <= up->minused)
3081 up->minused = bitpos;
3082 if ((bitpos + bitmaxsize >= up->maxused))
3083 up->maxused = bitpos + bitmaxsize;
3085 if (bitsize == bitmaxsize)
3086 up->explicit_uses = true;
3087 else
3088 up->implicit_uses = true;
3089 if (!lhs_p)
3090 up->write_only = false;
3091 up_insert (uid, up);
3093 *walk_subtrees = 0;
3094 return NULL_TREE;
3097 break;
3098 /* This is here to make sure we mark the entire base variable as used
3099 when you take its address. Because our used portion analysis is
3100 simple, we aren't looking at casts or pointer arithmetic to see what
3101 happens when you take the address. */
3102 case ADDR_EXPR:
3104 tree var = get_base_address (TREE_OPERAND (*tp, 0));
3106 if (var
3107 && DECL_P (var)
3108 && DECL_SIZE (var)
3109 && var_can_have_subvars (var)
3110 && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
3112 used_part_t up;
3113 size_t uid = DECL_UID (var);
3115 up = get_or_create_used_part_for (uid);
3117 up->minused = 0;
3118 up->maxused = TREE_INT_CST_LOW (DECL_SIZE (var));
3119 up->implicit_uses = true;
3120 if (!lhs_p)
3121 up->write_only = false;
3123 up_insert (uid, up);
3124 *walk_subtrees = 0;
3125 return NULL_TREE;
3128 break;
3129 case CALL_EXPR:
3131 tree *arg;
3132 for (arg = &TREE_OPERAND (*tp, 1); *arg; arg = &TREE_CHAIN (*arg))
3134 if (TREE_CODE (TREE_VALUE (*arg)) != ADDR_EXPR)
3135 find_used_portions (&TREE_VALUE (*arg), walk_subtrees, NULL);
3137 *walk_subtrees = 0;
3138 return NULL_TREE;
3140 case VAR_DECL:
3141 case PARM_DECL:
3142 case RESULT_DECL:
3144 tree var = *tp;
3145 if (DECL_SIZE (var)
3146 && var_can_have_subvars (var)
3147 && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
3149 used_part_t up;
3150 size_t uid = DECL_UID (var);
3152 up = get_or_create_used_part_for (uid);
3154 up->minused = 0;
3155 up->maxused = TREE_INT_CST_LOW (DECL_SIZE (var));
3156 up->implicit_uses = true;
3158 up_insert (uid, up);
3159 *walk_subtrees = 0;
3160 return NULL_TREE;
3163 break;
3165 default:
3166 break;
3169 return NULL_TREE;
3172 /* Create structure field variables for structures used in this function. */
3174 static unsigned int
3175 create_structure_vars (void)
3177 basic_block bb;
3178 safe_referenced_var_iterator rvi;
3179 VEC (tree, heap) *varvec = NULL;
3180 tree var;
3182 used_portions = htab_create (10, used_part_map_hash, used_part_map_eq,
3183 free_used_part_map);
3185 FOR_EACH_BB (bb)
3187 block_stmt_iterator bsi;
3188 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3190 walk_tree_without_duplicates (bsi_stmt_ptr (bsi),
3191 find_used_portions,
3192 NULL);
3195 FOR_EACH_REFERENCED_VAR_SAFE (var, varvec, rvi)
3197 /* The C++ FE creates vars without DECL_SIZE set, for some reason. */
3198 if (var
3199 && DECL_SIZE (var)
3200 && var_can_have_subvars (var)
3201 && !MTAG_P (var)
3202 && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
3203 create_overlap_variables_for (var);
3205 htab_delete (used_portions);
3206 VEC_free (tree, heap, varvec);
3207 return 0;
3210 static bool
3211 gate_structure_vars (void)
3213 return flag_tree_salias != 0;
3216 struct tree_opt_pass pass_create_structure_vars =
3218 "salias", /* name */
3219 gate_structure_vars, /* gate */
3220 create_structure_vars, /* execute */
3221 NULL, /* sub */
3222 NULL, /* next */
3223 0, /* static_pass_number */
3224 0, /* tv_id */
3225 PROP_cfg, /* properties_required */
3226 0, /* properties_provided */
3227 0, /* properties_destroyed */
3228 0, /* todo_flags_start */
3229 TODO_dump_func, /* todo_flags_finish */
3230 0 /* letter */
3233 /* Reset the DECL_CALL_CLOBBERED flags on our referenced vars. In
3234 theory, this only needs to be done for globals. */
3236 static unsigned int
3237 reset_cc_flags (void)
3239 tree var;
3240 referenced_var_iterator rvi;
3242 FOR_EACH_REFERENCED_VAR (var, rvi)
3243 DECL_CALL_CLOBBERED (var) = false;
3244 return 0;
3247 struct tree_opt_pass pass_reset_cc_flags =
3249 NULL, /* name */
3250 NULL, /* gate */
3251 reset_cc_flags, /* execute */
3252 NULL, /* sub */
3253 NULL, /* next */
3254 0, /* static_pass_number */
3255 0, /* tv_id */
3256 PROP_referenced_vars |PROP_cfg, /* properties_required */
3257 0, /* properties_provided */
3258 0, /* properties_destroyed */
3259 0, /* todo_flags_start */
3260 0, /* todo_flags_finish */
3261 0 /* letter */