Privatize SSA variables into gimple_df.
[official-gcc.git] / gcc / tree-ssa-alias.c
blob65a8417b9d329c33804466defb9a45c6d6e6c9ef
1 /* Alias analysis for trees.
2 Copyright (C) 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "timevar.h"
32 #include "expr.h"
33 #include "ggc.h"
34 #include "langhooks.h"
35 #include "flags.h"
36 #include "function.h"
37 #include "diagnostic.h"
38 #include "tree-dump.h"
39 #include "tree-gimple.h"
40 #include "tree-flow.h"
41 #include "tree-inline.h"
42 #include "tree-pass.h"
43 #include "tree-ssa-structalias.h"
44 #include "convert.h"
45 #include "params.h"
46 #include "ipa-type-escape.h"
47 #include "vec.h"
48 #include "bitmap.h"
49 #include "vecprim.h"
51 /* Obstack used to hold grouping bitmaps and other temporary bitmaps used by
52 aliasing */
53 static bitmap_obstack alias_obstack;
55 /* Structure to map a variable to its alias set and keep track of the
56 virtual operands that will be needed to represent it. */
57 struct alias_map_d
59 /* Variable and its alias set. */
60 tree var;
61 HOST_WIDE_INT set;
63 /* Total number of virtual operands that will be needed to represent
64 all the aliases of VAR. */
65 long total_alias_vops;
67 /* Nonzero if the aliases for this memory tag have been grouped
68 already. Used in group_aliases. */
69 unsigned int grouped_p : 1;
71 /* Set of variables aliased with VAR. This is the exact same
72 information contained in VAR_ANN (VAR)->MAY_ALIASES, but in
73 bitmap form to speed up alias grouping. */
74 bitmap may_aliases;
78 /* Counters used to display statistics on alias analysis. */
79 struct alias_stats_d
81 unsigned int alias_queries;
82 unsigned int alias_mayalias;
83 unsigned int alias_noalias;
84 unsigned int simple_queries;
85 unsigned int simple_resolved;
86 unsigned int tbaa_queries;
87 unsigned int tbaa_resolved;
88 unsigned int structnoaddress_queries;
89 unsigned int structnoaddress_resolved;
93 /* Local variables. */
94 static struct alias_stats_d alias_stats;
96 /* Local functions. */
97 static void compute_flow_insensitive_aliasing (struct alias_info *);
98 static void finalize_ref_all_pointers (struct alias_info *);
99 static void dump_alias_stats (FILE *);
100 static bool may_alias_p (tree, HOST_WIDE_INT, tree, HOST_WIDE_INT, bool);
101 static tree create_memory_tag (tree type, bool is_type_tag);
102 static tree get_tmt_for (tree, struct alias_info *);
103 static tree get_nmt_for (tree);
104 static void add_may_alias (tree, tree);
105 static void replace_may_alias (tree, size_t, tree);
106 static struct alias_info *init_alias_info (void);
107 static void delete_alias_info (struct alias_info *);
108 static void compute_flow_sensitive_aliasing (struct alias_info *);
109 static void setup_pointers_and_addressables (struct alias_info *);
110 static void create_global_var (void);
111 static void maybe_create_global_var (struct alias_info *ai);
112 static void group_aliases (struct alias_info *);
113 static void set_pt_anything (tree ptr);
115 /* Global declarations. */
117 /* qsort comparison function to sort type/name tags by DECL_UID. */
119 static int
120 sort_tags_by_id (const void *pa, const void *pb)
122 tree a = *(tree *)pa;
123 tree b = *(tree *)pb;
125 return DECL_UID (a) - DECL_UID (b);
128 /* Initialize WORKLIST to contain those memory tags that are marked call
129 clobbered. Initialized WORKLIST2 to contain the reasons these
130 memory tags escaped. */
132 static void
133 init_transitive_clobber_worklist (VEC (tree, heap) **worklist,
134 VEC (int, heap) **worklist2)
136 referenced_var_iterator rvi;
137 tree curr;
139 FOR_EACH_REFERENCED_VAR (curr, rvi)
141 if (MTAG_P (curr) && is_call_clobbered (curr))
143 VEC_safe_push (tree, heap, *worklist, curr);
144 VEC_safe_push (int, heap, *worklist2, var_ann (curr)->escape_mask);
149 /* Add ALIAS to WORKLIST (and the reason for escaping REASON to WORKLIST2) if
150 ALIAS is not already marked call clobbered, and is a memory
151 tag. */
153 static void
154 add_to_worklist (tree alias, VEC (tree, heap) **worklist,
155 VEC (int, heap) **worklist2,
156 int reason)
158 if (MTAG_P (alias) && !is_call_clobbered (alias))
160 VEC_safe_push (tree, heap, *worklist, alias);
161 VEC_safe_push (int, heap, *worklist2, reason);
165 /* Mark aliases of TAG as call clobbered, and place any tags on the
166 alias list that were not already call clobbered on WORKLIST. */
168 static void
169 mark_aliases_call_clobbered (tree tag, VEC (tree, heap) **worklist,
170 VEC (int, heap) **worklist2)
172 unsigned int i;
173 VEC (tree, gc) *ma;
174 tree entry;
175 var_ann_t ta = var_ann (tag);
177 if (!MTAG_P (tag))
178 return;
179 ma = may_aliases (tag);
180 if (!ma)
181 return;
183 for (i = 0; VEC_iterate (tree, ma, i, entry); i++)
185 if (!unmodifiable_var_p (entry))
187 add_to_worklist (entry, worklist, worklist2, ta->escape_mask);
188 mark_call_clobbered (entry, ta->escape_mask);
193 /* Tags containing global vars need to be marked as global.
194 Tags containing call clobbered vars need to be marked as call
195 clobbered. */
197 static void
198 compute_tag_properties (void)
200 referenced_var_iterator rvi;
201 tree tag;
202 bool changed = true;
203 VEC (tree, heap) *taglist = NULL;
205 FOR_EACH_REFERENCED_VAR (tag, rvi)
207 if (!MTAG_P (tag) || TREE_CODE (tag) == STRUCT_FIELD_TAG)
208 continue;
209 VEC_safe_push (tree, heap, taglist, tag);
212 /* We sort the taglist by DECL_UID, for two reasons.
213 1. To get a sequential ordering to make the bitmap accesses
214 faster.
215 2. Because of the way we compute aliases, it's more likely that
216 an earlier tag is included in a later tag, and this will reduce
217 the number of iterations.
219 If we had a real tag graph, we would just topo-order it and be
220 done with it. */
221 qsort (VEC_address (tree, taglist),
222 VEC_length (tree, taglist),
223 sizeof (tree),
224 sort_tags_by_id);
226 /* Go through each tag not marked as global, and if it aliases
227 global vars, mark it global.
229 If the tag contains call clobbered vars, mark it call
230 clobbered.
232 This loop iterates because tags may appear in the may-aliases
233 list of other tags when we group. */
235 while (changed)
237 unsigned int k;
239 changed = false;
240 for (k = 0; VEC_iterate (tree, taglist, k, tag); k++)
242 VEC (tree, gc) *ma;
243 unsigned int i;
244 tree entry;
245 bool tagcc = is_call_clobbered (tag);
246 bool tagglobal = MTAG_GLOBAL (tag);
248 if (tagcc && tagglobal)
249 continue;
251 ma = may_aliases (tag);
252 if (!ma)
253 continue;
255 for (i = 0; VEC_iterate (tree, ma, i, entry); i++)
257 /* Call clobbered entries cause the tag to be marked
258 call clobbered. */
259 if (!tagcc && is_call_clobbered (entry))
261 mark_call_clobbered (tag, var_ann (entry)->escape_mask);
262 tagcc = true;
263 changed = true;
266 /* Global vars cause the tag to be marked global. */
267 if (!tagglobal && is_global_var (entry))
269 MTAG_GLOBAL (tag) = true;
270 changed = true;
271 tagglobal = true;
274 /* Early exit once both global and cc are set, since the
275 loop can't do any more than that. */
276 if (tagcc && tagglobal)
277 break;
281 VEC_free (tree, heap, taglist);
284 /* Set up the initial variable clobbers and globalness.
285 When this function completes, only tags whose aliases need to be
286 clobbered will be set clobbered. Tags clobbered because they
287 contain call clobbered vars are handled in compute_tag_properties. */
289 static void
290 set_initial_properties (struct alias_info *ai)
292 unsigned int i;
293 referenced_var_iterator rvi;
294 tree var;
295 tree ptr;
297 FOR_EACH_REFERENCED_VAR (var, rvi)
299 if (is_global_var (var)
300 && (!var_can_have_subvars (var)
301 || get_subvars_for_var (var) == NULL))
303 if (!unmodifiable_var_p (var))
304 mark_call_clobbered (var, ESCAPE_IS_GLOBAL);
306 else if (TREE_CODE (var) == PARM_DECL
307 && gimple_default_def (cfun, var)
308 && POINTER_TYPE_P (TREE_TYPE (var)))
310 tree def = gimple_default_def (cfun, var);
311 get_ptr_info (def)->value_escapes_p = 1;
312 get_ptr_info (def)->escape_mask |= ESCAPE_IS_PARM;
316 for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
318 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
319 var_ann_t v_ann = var_ann (SSA_NAME_VAR (ptr));
321 if (pi->value_escapes_p)
323 /* If PTR escapes then its associated memory tags and
324 pointed-to variables are call-clobbered. */
325 if (pi->name_mem_tag)
326 mark_call_clobbered (pi->name_mem_tag, pi->escape_mask);
328 if (v_ann->symbol_mem_tag)
329 mark_call_clobbered (v_ann->symbol_mem_tag, pi->escape_mask);
331 if (pi->pt_vars)
333 bitmap_iterator bi;
334 unsigned int j;
335 EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j, bi)
336 if (!unmodifiable_var_p (referenced_var (j)))
337 mark_call_clobbered (referenced_var (j), pi->escape_mask);
341 /* If the name tag is call clobbered, so is the symbol tag
342 associated with the base VAR_DECL. */
343 if (pi->name_mem_tag
344 && v_ann->symbol_mem_tag
345 && is_call_clobbered (pi->name_mem_tag))
346 mark_call_clobbered (v_ann->symbol_mem_tag, pi->escape_mask);
348 /* Name tags and symbol tags that we don't know where they point
349 to, might point to global memory, and thus, are clobbered.
351 FIXME: This is not quite right. They should only be
352 clobbered if value_escapes_p is true, regardless of whether
353 they point to global memory or not.
354 So removing this code and fixing all the bugs would be nice.
355 It is the cause of a bunch of clobbering. */
356 if ((pi->pt_global_mem || pi->pt_anything)
357 && pi->is_dereferenced && pi->name_mem_tag)
359 mark_call_clobbered (pi->name_mem_tag, ESCAPE_IS_GLOBAL);
360 MTAG_GLOBAL (pi->name_mem_tag) = true;
363 if ((pi->pt_global_mem || pi->pt_anything)
364 && pi->is_dereferenced
365 && v_ann->symbol_mem_tag)
367 mark_call_clobbered (v_ann->symbol_mem_tag, ESCAPE_IS_GLOBAL);
368 MTAG_GLOBAL (v_ann->symbol_mem_tag) = true;
374 /* This variable is set to true if we are updating the used alone
375 information for SMTs, or are in a pass that is going to break it
376 temporarily. */
377 bool updating_used_alone;
379 /* Compute which variables need to be marked call clobbered because
380 their tag is call clobbered, and which tags need to be marked
381 global because they contain global variables. */
383 static void
384 compute_call_clobbered (struct alias_info *ai)
386 VEC (tree, heap) *worklist = NULL;
387 VEC(int,heap) *worklist2 = NULL;
389 set_initial_properties (ai);
390 init_transitive_clobber_worklist (&worklist, &worklist2);
391 while (VEC_length (tree, worklist) != 0)
393 tree curr = VEC_pop (tree, worklist);
394 int reason = VEC_pop (int, worklist2);
396 mark_call_clobbered (curr, reason);
397 mark_aliases_call_clobbered (curr, &worklist, &worklist2);
399 VEC_free (tree, heap, worklist);
400 VEC_free (int, heap, worklist2);
401 compute_tag_properties ();
405 /* Helper for recalculate_used_alone. Return a conservatively correct
406 answer as to whether STMT may make a store on the LHS to SYM. */
408 static bool
409 lhs_may_store_to (tree stmt, tree sym ATTRIBUTE_UNUSED)
411 tree lhs = TREE_OPERAND (stmt, 0);
413 lhs = get_base_address (lhs);
415 if (!lhs)
416 return false;
418 if (TREE_CODE (lhs) == SSA_NAME)
419 return false;
420 /* We could do better here by looking at the type tag of LHS, but it
421 is unclear whether this is worth it. */
422 return true;
425 /* Recalculate the used_alone information for SMTs . */
427 void
428 recalculate_used_alone (void)
430 VEC (tree, heap) *calls = NULL;
431 block_stmt_iterator bsi;
432 basic_block bb;
433 tree stmt;
434 size_t i;
435 referenced_var_iterator rvi;
436 tree var;
438 /* First, reset all the SMT used alone bits to zero. */
439 updating_used_alone = true;
440 FOR_EACH_REFERENCED_VAR (var, rvi)
441 if (TREE_CODE (var) == SYMBOL_MEMORY_TAG)
443 SMT_OLD_USED_ALONE (var) = SMT_USED_ALONE (var);
444 SMT_USED_ALONE (var) = 0;
447 /* Walk all the statements.
448 Calls get put into a list of statements to update, since we will
449 need to update operands on them if we make any changes.
450 If we see a bare use of a SMT anywhere in a real virtual use or virtual
451 def, mark the SMT as used alone, and for renaming. */
452 FOR_EACH_BB (bb)
454 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
456 bool iscall = false;
457 ssa_op_iter iter;
459 stmt = bsi_stmt (bsi);
461 if (TREE_CODE (stmt) == CALL_EXPR
462 || (TREE_CODE (stmt) == MODIFY_EXPR
463 && TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR))
465 iscall = true;
466 VEC_safe_push (tree, heap, calls, stmt);
469 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter,
470 SSA_OP_VUSE | SSA_OP_VIRTUAL_DEFS)
472 tree svar = var;
474 if (TREE_CODE (var) == SSA_NAME)
475 svar = SSA_NAME_VAR (var);
477 if (TREE_CODE (svar) == SYMBOL_MEMORY_TAG)
479 /* We only care about the LHS on calls. */
480 if (iscall && !lhs_may_store_to (stmt, svar))
481 continue;
483 if (!SMT_USED_ALONE (svar))
485 SMT_USED_ALONE (svar) = true;
487 /* Only need to mark for renaming if it wasn't
488 used alone before. */
489 if (!SMT_OLD_USED_ALONE (svar))
490 mark_sym_for_renaming (svar);
497 /* Update the operands on all the calls we saw. */
498 if (calls)
500 for (i = 0; VEC_iterate (tree, calls, i, stmt); i++)
501 update_stmt (stmt);
504 /* We need to mark SMT's that are no longer used for renaming so the
505 symbols go away, or else verification will be angry with us, even
506 though they are dead. */
507 FOR_EACH_REFERENCED_VAR (var, rvi)
508 if (TREE_CODE (var) == SYMBOL_MEMORY_TAG)
510 if (SMT_OLD_USED_ALONE (var) && !SMT_USED_ALONE (var))
511 mark_sym_for_renaming (var);
514 VEC_free (tree, heap, calls);
515 updating_used_alone = false;
518 /* Compute may-alias information for every variable referenced in function
519 FNDECL.
521 Alias analysis proceeds in 3 main phases:
523 1- Points-to and escape analysis.
525 This phase walks the use-def chains in the SSA web looking for three
526 things:
528 * Assignments of the form P_i = &VAR
529 * Assignments of the form P_i = malloc()
530 * Pointers and ADDR_EXPR that escape the current function.
532 The concept of 'escaping' is the same one used in the Java world. When
533 a pointer or an ADDR_EXPR escapes, it means that it has been exposed
534 outside of the current function. So, assignment to global variables,
535 function arguments and returning a pointer are all escape sites, as are
536 conversions between pointers and integers.
538 This is where we are currently limited. Since not everything is renamed
539 into SSA, we lose track of escape properties when a pointer is stashed
540 inside a field in a structure, for instance. In those cases, we are
541 assuming that the pointer does escape.
543 We use escape analysis to determine whether a variable is
544 call-clobbered. Simply put, if an ADDR_EXPR escapes, then the variable
545 is call-clobbered. If a pointer P_i escapes, then all the variables
546 pointed-to by P_i (and its memory tag) also escape.
548 2- Compute flow-sensitive aliases
550 We have two classes of memory tags. Memory tags associated with the
551 pointed-to data type of the pointers in the program. These tags are
552 called "symbol memory tag" (SMT). The other class are those associated
553 with SSA_NAMEs, called "name memory tag" (NMT). The basic idea is that
554 when adding operands for an INDIRECT_REF *P_i, we will first check
555 whether P_i has a name tag, if it does we use it, because that will have
556 more precise aliasing information. Otherwise, we use the standard symbol
557 tag.
559 In this phase, we go through all the pointers we found in points-to
560 analysis and create alias sets for the name memory tags associated with
561 each pointer P_i. If P_i escapes, we mark call-clobbered the variables
562 it points to and its tag.
565 3- Compute flow-insensitive aliases
567 This pass will compare the alias set of every symbol memory tag and
568 every addressable variable found in the program. Given a symbol
569 memory tag SMT and an addressable variable V. If the alias sets of
570 SMT and V conflict (as computed by may_alias_p), then V is marked
571 as an alias tag and added to the alias set of SMT.
573 For instance, consider the following function:
575 foo (int i)
577 int *p, a, b;
579 if (i > 10)
580 p = &a;
581 else
582 p = &b;
584 *p = 3;
585 a = b + 2;
586 return *p;
589 After aliasing analysis has finished, the symbol memory tag for pointer
590 'p' will have two aliases, namely variables 'a' and 'b'. Every time
591 pointer 'p' is dereferenced, we want to mark the operation as a
592 potential reference to 'a' and 'b'.
594 foo (int i)
596 int *p, a, b;
598 if (i_2 > 10)
599 p_4 = &a;
600 else
601 p_6 = &b;
602 # p_1 = PHI <p_4(1), p_6(2)>;
604 # a_7 = V_MAY_DEF <a_3>;
605 # b_8 = V_MAY_DEF <b_5>;
606 *p_1 = 3;
608 # a_9 = V_MAY_DEF <a_7>
609 # VUSE <b_8>
610 a_9 = b_8 + 2;
612 # VUSE <a_9>;
613 # VUSE <b_8>;
614 return *p_1;
617 In certain cases, the list of may aliases for a pointer may grow too
618 large. This may cause an explosion in the number of virtual operands
619 inserted in the code. Resulting in increased memory consumption and
620 compilation time.
622 When the number of virtual operands needed to represent aliased
623 loads and stores grows too large (configurable with @option{--param
624 max-aliased-vops}), alias sets are grouped to avoid severe
625 compile-time slow downs and memory consumption. See group_aliases. */
627 static unsigned int
628 compute_may_aliases (void)
630 struct alias_info *ai;
632 memset (&alias_stats, 0, sizeof (alias_stats));
634 /* Initialize aliasing information. */
635 ai = init_alias_info ();
637 /* For each pointer P_i, determine the sets of variables that P_i may
638 point-to. For every addressable variable V, determine whether the
639 address of V escapes the current function, making V call-clobbered
640 (i.e., whether &V is stored in a global variable or if its passed as a
641 function call argument). */
642 compute_points_to_sets (ai);
644 /* Collect all pointers and addressable variables, compute alias sets,
645 create memory tags for pointers and promote variables whose address is
646 not needed anymore. */
647 setup_pointers_and_addressables (ai);
649 /* Compute flow-sensitive, points-to based aliasing for all the name
650 memory tags. Note that this pass needs to be done before flow
651 insensitive analysis because it uses the points-to information
652 gathered before to mark call-clobbered symbol tags. */
653 compute_flow_sensitive_aliasing (ai);
655 /* Compute type-based flow-insensitive aliasing for all the type
656 memory tags. */
657 compute_flow_insensitive_aliasing (ai);
659 /* Compute call clobbering information. */
660 compute_call_clobbered (ai);
662 /* Determine if we need to enable alias grouping. */
663 if (ai->total_alias_vops >= MAX_ALIASED_VOPS)
664 group_aliases (ai);
666 /* If the program has too many call-clobbered variables and/or function
667 calls, create .GLOBAL_VAR and use it to model call-clobbering
668 semantics at call sites. This reduces the number of virtual operands
669 considerably, improving compile times at the expense of lost
670 aliasing precision. */
671 maybe_create_global_var (ai);
673 /* If the program contains ref-all pointers, finalize may-alias information
674 for them. This pass needs to be run after call-clobbering information
675 has been computed. */
676 if (ai->ref_all_symbol_mem_tag)
677 finalize_ref_all_pointers (ai);
679 /* Debugging dumps. */
680 if (dump_file)
682 dump_referenced_vars (dump_file);
683 if (dump_flags & TDF_STATS)
684 dump_alias_stats (dump_file);
685 dump_points_to_info (dump_file);
686 dump_alias_info (dump_file);
689 /* Deallocate memory used by aliasing data structures. */
690 delete_alias_info (ai);
692 updating_used_alone = true;
694 block_stmt_iterator bsi;
695 basic_block bb;
696 FOR_EACH_BB (bb)
698 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
700 update_stmt_if_modified (bsi_stmt (bsi));
704 recalculate_used_alone ();
705 updating_used_alone = false;
706 return 0;
710 struct tree_opt_pass pass_may_alias =
712 "alias", /* name */
713 NULL, /* gate */
714 compute_may_aliases, /* execute */
715 NULL, /* sub */
716 NULL, /* next */
717 0, /* static_pass_number */
718 TV_TREE_MAY_ALIAS, /* tv_id */
719 PROP_cfg | PROP_ssa, /* properties_required */
720 PROP_alias, /* properties_provided */
721 0, /* properties_destroyed */
722 0, /* todo_flags_start */
723 TODO_dump_func | TODO_update_ssa
724 | TODO_ggc_collect | TODO_verify_ssa
725 | TODO_verify_stmts, /* todo_flags_finish */
726 0 /* letter */
730 /* Data structure used to count the number of dereferences to PTR
731 inside an expression. */
732 struct count_ptr_d
734 tree ptr;
735 unsigned count;
739 /* Helper for count_uses_and_derefs. Called by walk_tree to look for
740 (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
742 static tree
743 count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
745 struct count_ptr_d *count_p = (struct count_ptr_d *) data;
747 /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
748 pointer 'ptr' is *not* dereferenced, it is simply used to compute
749 the address of 'fld' as 'ptr + offsetof(fld)'. */
750 if (TREE_CODE (*tp) == ADDR_EXPR)
752 *walk_subtrees = 0;
753 return NULL_TREE;
756 if (INDIRECT_REF_P (*tp) && TREE_OPERAND (*tp, 0) == count_p->ptr)
757 count_p->count++;
759 return NULL_TREE;
763 /* Count the number of direct and indirect uses for pointer PTR in
764 statement STMT. The two counts are stored in *NUM_USES_P and
765 *NUM_DEREFS_P respectively. *IS_STORE_P is set to 'true' if at
766 least one of those dereferences is a store operation. */
768 void
769 count_uses_and_derefs (tree ptr, tree stmt, unsigned *num_uses_p,
770 unsigned *num_derefs_p, bool *is_store)
772 ssa_op_iter i;
773 tree use;
775 *num_uses_p = 0;
776 *num_derefs_p = 0;
777 *is_store = false;
779 /* Find out the total number of uses of PTR in STMT. */
780 FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
781 if (use == ptr)
782 (*num_uses_p)++;
784 /* Now count the number of indirect references to PTR. This is
785 truly awful, but we don't have much choice. There are no parent
786 pointers inside INDIRECT_REFs, so an expression like
787 '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
788 find all the indirect and direct uses of x_1 inside. The only
789 shortcut we can take is the fact that GIMPLE only allows
790 INDIRECT_REFs inside the expressions below. */
791 if (TREE_CODE (stmt) == MODIFY_EXPR
792 || (TREE_CODE (stmt) == RETURN_EXPR
793 && TREE_CODE (TREE_OPERAND (stmt, 0)) == MODIFY_EXPR)
794 || TREE_CODE (stmt) == ASM_EXPR
795 || TREE_CODE (stmt) == CALL_EXPR)
797 tree lhs, rhs;
799 if (TREE_CODE (stmt) == MODIFY_EXPR)
801 lhs = TREE_OPERAND (stmt, 0);
802 rhs = TREE_OPERAND (stmt, 1);
804 else if (TREE_CODE (stmt) == RETURN_EXPR)
806 tree e = TREE_OPERAND (stmt, 0);
807 lhs = TREE_OPERAND (e, 0);
808 rhs = TREE_OPERAND (e, 1);
810 else if (TREE_CODE (stmt) == ASM_EXPR)
812 lhs = ASM_OUTPUTS (stmt);
813 rhs = ASM_INPUTS (stmt);
815 else
817 lhs = NULL_TREE;
818 rhs = stmt;
821 if (lhs && (TREE_CODE (lhs) == TREE_LIST || EXPR_P (lhs)))
823 struct count_ptr_d count;
824 count.ptr = ptr;
825 count.count = 0;
826 walk_tree (&lhs, count_ptr_derefs, &count, NULL);
827 *is_store = true;
828 *num_derefs_p = count.count;
831 if (rhs && (TREE_CODE (rhs) == TREE_LIST || EXPR_P (rhs)))
833 struct count_ptr_d count;
834 count.ptr = ptr;
835 count.count = 0;
836 walk_tree (&rhs, count_ptr_derefs, &count, NULL);
837 *num_derefs_p += count.count;
841 gcc_assert (*num_uses_p >= *num_derefs_p);
844 /* Initialize the data structures used for alias analysis. */
846 static struct alias_info *
847 init_alias_info (void)
849 struct alias_info *ai;
850 referenced_var_iterator rvi;
851 tree var;
853 bitmap_obstack_initialize (&alias_obstack);
854 ai = XCNEW (struct alias_info);
855 ai->ssa_names_visited = sbitmap_alloc (num_ssa_names);
856 sbitmap_zero (ai->ssa_names_visited);
857 ai->processed_ptrs = VEC_alloc (tree, heap, 50);
858 ai->written_vars = BITMAP_ALLOC (&alias_obstack);
859 ai->dereferenced_ptrs_store = BITMAP_ALLOC (&alias_obstack);
860 ai->dereferenced_ptrs_load = BITMAP_ALLOC (&alias_obstack);
862 /* If aliases have been computed before, clear existing information. */
863 if (gimple_aliases_computed_p (cfun))
865 unsigned i;
867 /* Similarly, clear the set of addressable variables. In this
868 case, we can just clear the set because addressability is
869 only computed here. */
870 bitmap_clear (gimple_addressable_vars (cfun));
872 /* Clear flow-insensitive alias information from each symbol. */
873 FOR_EACH_REFERENCED_VAR (var, rvi)
875 var_ann_t ann = var_ann (var);
877 ann->is_aliased = 0;
878 ann->may_aliases = NULL;
879 NUM_REFERENCES_CLEAR (ann);
881 /* Since we are about to re-discover call-clobbered
882 variables, clear the call-clobbered flag. Variables that
883 are intrinsically call-clobbered (globals, local statics,
884 etc) will not be marked by the aliasing code, so we can't
885 remove them from CALL_CLOBBERED_VARS.
887 NB: STRUCT_FIELDS are still call clobbered if they are for
888 a global variable, so we *don't* clear their call clobberedness
889 just because they are tags, though we will clear it if they
890 aren't for global variables. */
891 if (TREE_CODE (var) == NAME_MEMORY_TAG
892 || TREE_CODE (var) == SYMBOL_MEMORY_TAG
893 || !is_global_var (var))
894 clear_call_clobbered (var);
897 /* Clear flow-sensitive points-to information from each SSA name. */
898 for (i = 1; i < num_ssa_names; i++)
900 tree name = ssa_name (i);
902 if (!name || !POINTER_TYPE_P (TREE_TYPE (name)))
903 continue;
905 if (SSA_NAME_PTR_INFO (name))
907 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (name);
909 /* Clear all the flags but keep the name tag to
910 avoid creating new temporaries unnecessarily. If
911 this pointer is found to point to a subset or
912 superset of its former points-to set, then a new
913 tag will need to be created in create_name_tags. */
914 pi->pt_anything = 0;
915 pi->pt_null = 0;
916 pi->value_escapes_p = 0;
917 pi->is_dereferenced = 0;
918 if (pi->pt_vars)
919 bitmap_clear (pi->pt_vars);
924 /* Next time, we will need to reset alias information. */
925 cfun->gimple_df->aliases_computed_p = true;
927 return ai;
931 /* Deallocate memory used by alias analysis. */
933 static void
934 delete_alias_info (struct alias_info *ai)
936 size_t i;
937 referenced_var_iterator rvi;
938 tree var;
940 sbitmap_free (ai->ssa_names_visited);
941 VEC_free (tree, heap, ai->processed_ptrs);
943 for (i = 0; i < ai->num_addressable_vars; i++)
944 free (ai->addressable_vars[i]);
946 FOR_EACH_REFERENCED_VAR(var, rvi)
948 var_ann_t ann = var_ann (var);
949 NUM_REFERENCES_CLEAR (ann);
952 free (ai->addressable_vars);
954 for (i = 0; i < ai->num_pointers; i++)
955 free (ai->pointers[i]);
956 free (ai->pointers);
958 BITMAP_FREE (ai->written_vars);
959 BITMAP_FREE (ai->dereferenced_ptrs_store);
960 BITMAP_FREE (ai->dereferenced_ptrs_load);
961 bitmap_obstack_release (&alias_obstack);
962 free (ai);
964 delete_points_to_sets ();
967 /* Used for hashing to identify pointer infos with identical
968 pt_vars bitmaps. */
969 static int
970 eq_ptr_info (const void *p1, const void *p2)
972 const struct ptr_info_def *n1 = (const struct ptr_info_def *) p1;
973 const struct ptr_info_def *n2 = (const struct ptr_info_def *) p2;
974 return bitmap_equal_p (n1->pt_vars, n2->pt_vars);
977 static hashval_t
978 ptr_info_hash (const void *p)
980 const struct ptr_info_def *n = (const struct ptr_info_def *) p;
981 return bitmap_hash (n->pt_vars);
984 /* Create name tags for all the pointers that have been dereferenced.
985 We only create a name tag for a pointer P if P is found to point to
986 a set of variables (so that we can alias them to *P) or if it is
987 the result of a call to malloc (which means that P cannot point to
988 anything else nor alias any other variable).
990 If two pointers P and Q point to the same set of variables, they
991 are assigned the same name tag. */
993 static void
994 create_name_tags (void)
996 size_t i;
997 VEC (tree, heap) *with_ptvars = NULL;
998 tree ptr;
999 htab_t ptr_hash;
1001 /* Collect the list of pointers with a non-empty points to set. */
1002 for (i = 1; i < num_ssa_names; i++)
1004 tree ptr = ssa_name (i);
1005 struct ptr_info_def *pi;
1007 if (!ptr
1008 || !POINTER_TYPE_P (TREE_TYPE (ptr))
1009 || !SSA_NAME_PTR_INFO (ptr))
1010 continue;
1012 pi = SSA_NAME_PTR_INFO (ptr);
1014 if (pi->pt_anything || !pi->is_dereferenced)
1016 /* No name tags for pointers that have not been
1017 dereferenced or point to an arbitrary location. */
1018 pi->name_mem_tag = NULL_TREE;
1019 continue;
1022 /* Set pt_anything on the pointers without pt_vars filled in so
1023 that they are assigned a symbol tag. */
1024 if (pi->pt_vars && !bitmap_empty_p (pi->pt_vars))
1025 VEC_safe_push (tree, heap, with_ptvars, ptr);
1026 else
1027 set_pt_anything (ptr);
1030 /* If we didn't find any pointers with pt_vars set, we're done. */
1031 if (!with_ptvars)
1032 return;
1034 ptr_hash = htab_create (10, ptr_info_hash, eq_ptr_info, NULL);
1035 /* Now go through the pointers with pt_vars, and find a name tag
1036 with the same pt_vars as this pointer, or create one if one
1037 doesn't exist. */
1038 for (i = 0; VEC_iterate (tree, with_ptvars, i, ptr); i++)
1040 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
1041 tree old_name_tag = pi->name_mem_tag;
1042 struct ptr_info_def **slot;
1044 /* If PTR points to a set of variables, check if we don't
1045 have another pointer Q with the same points-to set before
1046 creating a tag. If so, use Q's tag instead of creating a
1047 new one.
1049 This is important for not creating unnecessary symbols
1050 and also for copy propagation. If we ever need to
1051 propagate PTR into Q or vice-versa, we would run into
1052 problems if they both had different name tags because
1053 they would have different SSA version numbers (which
1054 would force us to take the name tags in and out of SSA). */
1056 slot = (struct ptr_info_def **) htab_find_slot (ptr_hash, pi, INSERT);
1057 if (*slot)
1058 pi->name_mem_tag = (*slot)->name_mem_tag;
1059 else
1061 *slot = pi;
1062 /* If we didn't find a pointer with the same points-to set
1063 as PTR, create a new name tag if needed. */
1064 if (pi->name_mem_tag == NULL_TREE)
1065 pi->name_mem_tag = get_nmt_for (ptr);
1068 /* If the new name tag computed for PTR is different than
1069 the old name tag that it used to have, then the old tag
1070 needs to be removed from the IL, so we mark it for
1071 renaming. */
1072 if (old_name_tag && old_name_tag != pi->name_mem_tag)
1073 mark_sym_for_renaming (old_name_tag);
1075 TREE_THIS_VOLATILE (pi->name_mem_tag)
1076 |= TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (ptr)));
1078 /* Mark the new name tag for renaming. */
1079 mark_sym_for_renaming (pi->name_mem_tag);
1081 htab_delete (ptr_hash);
1083 VEC_free (tree, heap, with_ptvars);
1087 /* For every pointer P_i in AI->PROCESSED_PTRS, create may-alias sets for
1088 the name memory tag (NMT) associated with P_i. If P_i escapes, then its
1089 name tag and the variables it points-to are call-clobbered. Finally, if
1090 P_i escapes and we could not determine where it points to, then all the
1091 variables in the same alias set as *P_i are marked call-clobbered. This
1092 is necessary because we must assume that P_i may take the address of any
1093 variable in the same alias set. */
1095 static void
1096 compute_flow_sensitive_aliasing (struct alias_info *ai)
1098 size_t i;
1099 tree ptr;
1101 for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
1103 if (!find_what_p_points_to (ptr))
1104 set_pt_anything (ptr);
1107 create_name_tags ();
1109 for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
1111 unsigned j;
1112 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
1113 var_ann_t v_ann = var_ann (SSA_NAME_VAR (ptr));
1114 bitmap_iterator bi;
1117 /* Set up aliasing information for PTR's name memory tag (if it has
1118 one). Note that only pointers that have been dereferenced will
1119 have a name memory tag. */
1120 if (pi->name_mem_tag && pi->pt_vars)
1121 EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j, bi)
1123 add_may_alias (pi->name_mem_tag, referenced_var (j));
1124 add_may_alias (v_ann->symbol_mem_tag, referenced_var (j));
1130 /* Compute type-based alias sets. Traverse all the pointers and
1131 addressable variables found in setup_pointers_and_addressables.
1133 For every pointer P in AI->POINTERS and addressable variable V in
1134 AI->ADDRESSABLE_VARS, add V to the may-alias sets of P's symbol
1135 memory tag (SMT) if their alias sets conflict. V is then marked as
1136 an alias tag so that the operand scanner knows that statements
1137 containing V have aliased operands. */
1139 static void
1140 compute_flow_insensitive_aliasing (struct alias_info *ai)
1142 size_t i;
1144 /* Initialize counter for the total number of virtual operands that
1145 aliasing will introduce. When AI->TOTAL_ALIAS_VOPS goes beyond the
1146 threshold set by --params max-alias-vops, we enable alias
1147 grouping. */
1148 ai->total_alias_vops = 0;
1150 /* For every pointer P, determine which addressable variables may alias
1151 with P's symbol memory tag. */
1152 for (i = 0; i < ai->num_pointers; i++)
1154 size_t j;
1155 struct alias_map_d *p_map = ai->pointers[i];
1156 tree tag = var_ann (p_map->var)->symbol_mem_tag;
1157 var_ann_t tag_ann = var_ann (tag);
1158 tree var;
1160 /* Call-clobbering information is not finalized yet at this point. */
1161 if (PTR_IS_REF_ALL (p_map->var))
1162 continue;
1164 p_map->total_alias_vops = 0;
1165 p_map->may_aliases = BITMAP_ALLOC (&alias_obstack);
1167 /* Add any pre-existing may_aliases to the bitmap used to represent
1168 TAG's alias set in case we need to group aliases. */
1169 for (j = 0; VEC_iterate (tree, tag_ann->may_aliases, j, var); ++j)
1170 bitmap_set_bit (p_map->may_aliases, DECL_UID (var));
1172 for (j = 0; j < ai->num_addressable_vars; j++)
1174 struct alias_map_d *v_map;
1175 var_ann_t v_ann;
1176 bool tag_stored_p, var_stored_p;
1178 v_map = ai->addressable_vars[j];
1179 var = v_map->var;
1180 v_ann = var_ann (var);
1182 /* Skip memory tags and variables that have never been
1183 written to. We also need to check if the variables are
1184 call-clobbered because they may be overwritten by
1185 function calls.
1187 Note this is effectively random accessing elements in
1188 the sparse bitset, which can be highly inefficient.
1189 So we first check the call_clobbered status of the
1190 tag and variable before querying the bitmap. */
1191 tag_stored_p = is_call_clobbered (tag)
1192 || bitmap_bit_p (ai->written_vars, DECL_UID (tag));
1193 var_stored_p = is_call_clobbered (var)
1194 || bitmap_bit_p (ai->written_vars, DECL_UID (var));
1195 if (!tag_stored_p && !var_stored_p)
1196 continue;
1198 if (may_alias_p (p_map->var, p_map->set, var, v_map->set, false))
1200 size_t num_tag_refs, num_var_refs;
1202 num_tag_refs = NUM_REFERENCES (tag_ann);
1203 num_var_refs = NUM_REFERENCES (v_ann);
1205 /* Add VAR to TAG's may-aliases set. */
1207 /* We should never have a var with subvars here, because
1208 they shouldn't get into the set of addressable vars */
1209 gcc_assert (!var_can_have_subvars (var)
1210 || get_subvars_for_var (var) == NULL);
1212 add_may_alias (tag, var);
1213 /* Update the bitmap used to represent TAG's alias set
1214 in case we need to group aliases. */
1215 bitmap_set_bit (p_map->may_aliases, DECL_UID (var));
1217 /* Update the total number of virtual operands due to
1218 aliasing. Since we are adding one more alias to TAG's
1219 may-aliases set, the total number of virtual operands due
1220 to aliasing will be increased by the number of references
1221 made to VAR and TAG (every reference to TAG will also
1222 count as a reference to VAR). */
1223 ai->total_alias_vops += (num_var_refs + num_tag_refs);
1224 p_map->total_alias_vops += (num_var_refs + num_tag_refs);
1231 /* Since this analysis is based exclusively on symbols, it fails to
1232 handle cases where two pointers P and Q have different memory
1233 tags with conflicting alias set numbers but no aliased symbols in
1234 common.
1236 For example, suppose that we have two memory tags SMT.1 and SMT.2
1237 such that
1239 may-aliases (SMT.1) = { a }
1240 may-aliases (SMT.2) = { b }
1242 and the alias set number of SMT.1 conflicts with that of SMT.2.
1243 Since they don't have symbols in common, loads and stores from
1244 SMT.1 and SMT.2 will seem independent of each other, which will
1245 lead to the optimizers making invalid transformations (see
1246 testsuite/gcc.c-torture/execute/pr15262-[12].c).
1248 To avoid this problem, we do a final traversal of AI->POINTERS
1249 looking for pairs of pointers that have no aliased symbols in
1250 common and yet have conflicting alias set numbers. */
1251 for (i = 0; i < ai->num_pointers; i++)
1253 size_t j;
1254 struct alias_map_d *p_map1 = ai->pointers[i];
1255 tree tag1 = var_ann (p_map1->var)->symbol_mem_tag;
1256 bitmap may_aliases1 = p_map1->may_aliases;
1258 if (PTR_IS_REF_ALL (p_map1->var))
1259 continue;
1261 for (j = i + 1; j < ai->num_pointers; j++)
1263 struct alias_map_d *p_map2 = ai->pointers[j];
1264 tree tag2 = var_ann (p_map2->var)->symbol_mem_tag;
1265 bitmap may_aliases2 = p_map2->may_aliases;
1267 if (PTR_IS_REF_ALL (p_map2->var))
1268 continue;
1270 /* If the pointers may not point to each other, do nothing. */
1271 if (!may_alias_p (p_map1->var, p_map1->set, tag2, p_map2->set, true))
1272 continue;
1274 /* The two pointers may alias each other. If they already have
1275 symbols in common, do nothing. */
1276 if (bitmap_intersect_p (may_aliases1, may_aliases2))
1277 continue;
1279 if (!bitmap_empty_p (may_aliases2))
1281 unsigned int k;
1282 bitmap_iterator bi;
1284 /* Add all the aliases for TAG2 into TAG1's alias set.
1285 FIXME, update grouping heuristic counters. */
1286 EXECUTE_IF_SET_IN_BITMAP (may_aliases2, 0, k, bi)
1287 add_may_alias (tag1, referenced_var (k));
1288 bitmap_ior_into (may_aliases1, may_aliases2);
1290 else
1292 /* Since TAG2 does not have any aliases of its own, add
1293 TAG2 itself to the alias set of TAG1. */
1294 add_may_alias (tag1, tag2);
1295 bitmap_set_bit (may_aliases1, DECL_UID (tag2));
1300 if (dump_file)
1301 fprintf (dump_file, "\n%s: Total number of aliased vops: %ld\n",
1302 get_name (current_function_decl),
1303 ai->total_alias_vops);
1307 /* Finalize may-alias information for ref-all pointers. Traverse all
1308 the addressable variables found in setup_pointers_and_addressables.
1310 If flow-sensitive alias analysis has attached a name memory tag to
1311 a ref-all pointer, we will use it for the dereferences because that
1312 will have more precise aliasing information. But if there is no
1313 name tag, we will use a special symbol tag that aliases all the
1314 call-clobbered addressable variables. */
1316 static void
1317 finalize_ref_all_pointers (struct alias_info *ai)
1319 size_t i;
1321 if (gimple_global_var (cfun))
1322 add_may_alias (ai->ref_all_symbol_mem_tag, gimple_global_var (cfun));
1323 else
1325 /* First add the real call-clobbered variables. */
1326 for (i = 0; i < ai->num_addressable_vars; i++)
1328 tree var = ai->addressable_vars[i]->var;
1329 if (is_call_clobbered (var))
1330 add_may_alias (ai->ref_all_symbol_mem_tag, var);
1333 /* Then add the call-clobbered pointer memory tags. See
1334 compute_flow_insensitive_aliasing for the rationale. */
1335 for (i = 0; i < ai->num_pointers; i++)
1337 tree ptr = ai->pointers[i]->var, tag;
1338 if (PTR_IS_REF_ALL (ptr))
1339 continue;
1340 tag = var_ann (ptr)->symbol_mem_tag;
1341 if (is_call_clobbered (tag))
1342 add_may_alias (ai->ref_all_symbol_mem_tag, tag);
1348 /* Comparison function for qsort used in group_aliases. */
1350 static int
1351 total_alias_vops_cmp (const void *p, const void *q)
1353 const struct alias_map_d **p1 = (const struct alias_map_d **)p;
1354 const struct alias_map_d **p2 = (const struct alias_map_d **)q;
1355 long n1 = (*p1)->total_alias_vops;
1356 long n2 = (*p2)->total_alias_vops;
1358 /* We want to sort in descending order. */
1359 return (n1 > n2 ? -1 : (n1 == n2) ? 0 : 1);
1362 /* Group all the aliases for TAG to make TAG represent all the
1363 variables in its alias set. Update the total number
1364 of virtual operands due to aliasing (AI->TOTAL_ALIAS_VOPS). This
1365 function will make TAG be the unique alias tag for all the
1366 variables in its may-aliases. So, given:
1368 may-aliases(TAG) = { V1, V2, V3 }
1370 This function will group the variables into:
1372 may-aliases(V1) = { TAG }
1373 may-aliases(V2) = { TAG }
1374 may-aliases(V2) = { TAG } */
1376 static void
1377 group_aliases_into (tree tag, bitmap tag_aliases, struct alias_info *ai)
1379 unsigned int i;
1380 var_ann_t tag_ann = var_ann (tag);
1381 size_t num_tag_refs = NUM_REFERENCES (tag_ann);
1382 bitmap_iterator bi;
1384 EXECUTE_IF_SET_IN_BITMAP (tag_aliases, 0, i, bi)
1386 tree var = referenced_var (i);
1387 var_ann_t ann = var_ann (var);
1389 /* Make TAG the unique alias of VAR. */
1390 ann->is_aliased = 0;
1391 ann->may_aliases = NULL;
1393 /* Note that VAR and TAG may be the same if the function has no
1394 addressable variables (see the discussion at the end of
1395 setup_pointers_and_addressables). */
1396 if (var != tag)
1397 add_may_alias (var, tag);
1399 /* Reduce total number of virtual operands contributed
1400 by TAG on behalf of VAR. Notice that the references to VAR
1401 itself won't be removed. We will merely replace them with
1402 references to TAG. */
1403 ai->total_alias_vops -= num_tag_refs;
1406 /* We have reduced the number of virtual operands that TAG makes on
1407 behalf of all the variables formerly aliased with it. However,
1408 we have also "removed" all the virtual operands for TAG itself,
1409 so we add them back. */
1410 ai->total_alias_vops += num_tag_refs;
1412 /* TAG no longer has any aliases. */
1413 tag_ann->may_aliases = NULL;
1416 /* Simple comparison function for qsort that sorts based on pointer
1417 address. */
1419 static int
1420 tree_pointer_compare (const void *pa, const void *pb)
1422 const tree a = *((const tree *)pa);
1423 const tree b = *((const tree *)pb);
1425 return b - a;
1429 /* Replacing may aliases in name tags during grouping can up with the
1430 same SMT multiple times in the may_alias list. It's quicker to
1431 just remove them post-hoc than it is to avoid them during
1432 replacement. Thus, this routine sorts the may-alias list and
1433 removes duplicates. */
1435 static void
1436 compact_name_tags (void)
1438 referenced_var_iterator rvi;
1439 tree var;
1441 FOR_EACH_REFERENCED_VAR (var, rvi)
1443 if (TREE_CODE (var) == NAME_MEMORY_TAG)
1445 VEC(tree, gc) *aliases, *new_aliases;
1446 tree alias, last_alias;
1447 int i;
1449 last_alias = NULL;
1450 aliases = var_ann (var)->may_aliases;
1451 new_aliases = NULL;
1453 if (VEC_length (tree, aliases) > 1)
1455 bool changed = false;
1456 qsort (VEC_address (tree, aliases), VEC_length (tree, aliases),
1457 sizeof (tree), tree_pointer_compare);
1459 for (i = 0; VEC_iterate (tree, aliases, i, alias); i++)
1461 if (alias == last_alias)
1463 changed = true;
1464 continue;
1467 VEC_safe_push (tree, gc, new_aliases, alias);
1468 last_alias = alias;
1471 /* Only replace the array if something has changed. */
1472 if (changed)
1474 VEC_free (tree, gc, aliases);
1475 var_ann (var)->may_aliases = new_aliases;
1477 else
1478 VEC_free (tree, gc, new_aliases);
1484 /* Group may-aliases sets to reduce the number of virtual operands due
1485 to aliasing.
1487 1- Sort the list of pointers in decreasing number of contributed
1488 virtual operands.
1490 2- Take the first entry in AI->POINTERS and revert the role of
1491 the memory tag and its aliases. Usually, whenever an aliased
1492 variable Vi is found to alias with a memory tag T, we add Vi
1493 to the may-aliases set for T. Meaning that after alias
1494 analysis, we will have:
1496 may-aliases(T) = { V1, V2, V3, ..., Vn }
1498 This means that every statement that references T, will get 'n'
1499 virtual operands for each of the Vi tags. But, when alias
1500 grouping is enabled, we make T an alias tag and add it to the
1501 alias set of all the Vi variables:
1503 may-aliases(V1) = { T }
1504 may-aliases(V2) = { T }
1506 may-aliases(Vn) = { T }
1508 This has two effects: (a) statements referencing T will only get
1509 a single virtual operand, and, (b) all the variables Vi will now
1510 appear to alias each other. So, we lose alias precision to
1511 improve compile time. But, in theory, a program with such a high
1512 level of aliasing should not be very optimizable in the first
1513 place.
1515 3- Since variables may be in the alias set of more than one
1516 memory tag, the grouping done in step (2) needs to be extended
1517 to all the memory tags that have a non-empty intersection with
1518 the may-aliases set of tag T. For instance, if we originally
1519 had these may-aliases sets:
1521 may-aliases(T) = { V1, V2, V3 }
1522 may-aliases(R) = { V2, V4 }
1524 In step (2) we would have reverted the aliases for T as:
1526 may-aliases(V1) = { T }
1527 may-aliases(V2) = { T }
1528 may-aliases(V3) = { T }
1530 But note that now V2 is no longer aliased with R. We could
1531 add R to may-aliases(V2), but we are in the process of
1532 grouping aliases to reduce virtual operands so what we do is
1533 add V4 to the grouping to obtain:
1535 may-aliases(V1) = { T }
1536 may-aliases(V2) = { T }
1537 may-aliases(V3) = { T }
1538 may-aliases(V4) = { T }
1540 4- If the total number of virtual operands due to aliasing is
1541 still above the threshold set by max-alias-vops, go back to (2). */
1543 static void
1544 group_aliases (struct alias_info *ai)
1546 size_t i;
1547 tree ptr;
1549 /* Sort the POINTERS array in descending order of contributed
1550 virtual operands. */
1551 qsort (ai->pointers, ai->num_pointers, sizeof (struct alias_map_d *),
1552 total_alias_vops_cmp);
1554 /* For every pointer in AI->POINTERS, reverse the roles of its tag
1555 and the tag's may-aliases set. */
1556 for (i = 0; i < ai->num_pointers; i++)
1558 size_t j;
1559 tree tag1 = var_ann (ai->pointers[i]->var)->symbol_mem_tag;
1560 bitmap tag1_aliases = ai->pointers[i]->may_aliases;
1562 /* Skip tags that have been grouped already. */
1563 if (ai->pointers[i]->grouped_p)
1564 continue;
1566 /* See if TAG1 had any aliases in common with other symbol tags.
1567 If we find a TAG2 with common aliases with TAG1, add TAG2's
1568 aliases into TAG1. */
1569 for (j = i + 1; j < ai->num_pointers; j++)
1571 bitmap tag2_aliases = ai->pointers[j]->may_aliases;
1573 if (bitmap_intersect_p (tag1_aliases, tag2_aliases))
1575 tree tag2 = var_ann (ai->pointers[j]->var)->symbol_mem_tag;
1577 bitmap_ior_into (tag1_aliases, tag2_aliases);
1579 /* TAG2 does not need its aliases anymore. */
1580 bitmap_clear (tag2_aliases);
1581 var_ann (tag2)->may_aliases = NULL;
1583 /* TAG1 is the unique alias of TAG2. */
1584 add_may_alias (tag2, tag1);
1586 ai->pointers[j]->grouped_p = true;
1590 /* Now group all the aliases we collected into TAG1. */
1591 group_aliases_into (tag1, tag1_aliases, ai);
1593 /* If we've reduced total number of virtual operands below the
1594 threshold, stop. */
1595 if (ai->total_alias_vops < MAX_ALIASED_VOPS)
1596 break;
1599 /* Finally, all the variables that have been grouped cannot be in
1600 the may-alias set of name memory tags. Suppose that we have
1601 grouped the aliases in this code so that may-aliases(a) = SMT.20
1603 p_5 = &a;
1605 # a_9 = V_MAY_DEF <a_8>
1606 p_5->field = 0
1607 ... Several modifications to SMT.20 ...
1608 # VUSE <a_9>
1609 x_30 = p_5->field
1611 Since p_5 points to 'a', the optimizers will try to propagate 0
1612 into p_5->field, but that is wrong because there have been
1613 modifications to 'SMT.20' in between. To prevent this we have to
1614 replace 'a' with 'SMT.20' in the name tag of p_5. */
1615 for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
1617 size_t j;
1618 tree name_tag = SSA_NAME_PTR_INFO (ptr)->name_mem_tag;
1619 VEC(tree,gc) *aliases;
1620 tree alias;
1622 if (name_tag == NULL_TREE)
1623 continue;
1625 aliases = var_ann (name_tag)->may_aliases;
1626 for (j = 0; VEC_iterate (tree, aliases, j, alias); j++)
1628 var_ann_t ann = var_ann (alias);
1630 if ((!MTAG_P (alias)
1631 || TREE_CODE (alias) == STRUCT_FIELD_TAG)
1632 && ann->may_aliases)
1634 tree new_alias;
1636 gcc_assert (VEC_length (tree, ann->may_aliases) == 1);
1638 new_alias = VEC_index (tree, ann->may_aliases, 0);
1639 replace_may_alias (name_tag, j, new_alias);
1644 compact_name_tags ();
1646 if (dump_file)
1647 fprintf (dump_file,
1648 "%s: Total number of aliased vops after grouping: %ld%s\n",
1649 get_name (current_function_decl),
1650 ai->total_alias_vops,
1651 (ai->total_alias_vops < 0) ? " (negative values are OK)" : "");
1655 /* Create a new alias set entry for VAR in AI->ADDRESSABLE_VARS. */
1657 static void
1658 create_alias_map_for (tree var, struct alias_info *ai)
1660 struct alias_map_d *alias_map;
1661 alias_map = XCNEW (struct alias_map_d);
1662 alias_map->var = var;
1663 alias_map->set = get_alias_set (var);
1664 ai->addressable_vars[ai->num_addressable_vars++] = alias_map;
1668 /* Create memory tags for all the dereferenced pointers and build the
1669 ADDRESSABLE_VARS and POINTERS arrays used for building the may-alias
1670 sets. Based on the address escape and points-to information collected
1671 earlier, this pass will also clear the TREE_ADDRESSABLE flag from those
1672 variables whose address is not needed anymore. */
1674 static void
1675 setup_pointers_and_addressables (struct alias_info *ai)
1677 size_t n_vars, num_addressable_vars, num_pointers;
1678 referenced_var_iterator rvi;
1679 tree var;
1680 VEC (tree, heap) *varvec = NULL;
1681 safe_referenced_var_iterator srvi;
1683 /* Size up the arrays ADDRESSABLE_VARS and POINTERS. */
1684 num_addressable_vars = num_pointers = 0;
1686 FOR_EACH_REFERENCED_VAR (var, rvi)
1688 if (may_be_aliased (var))
1689 num_addressable_vars++;
1691 if (POINTER_TYPE_P (TREE_TYPE (var)))
1693 /* Since we don't keep track of volatile variables, assume that
1694 these pointers are used in indirect store operations. */
1695 if (TREE_THIS_VOLATILE (var))
1696 bitmap_set_bit (ai->dereferenced_ptrs_store, DECL_UID (var));
1698 num_pointers++;
1702 /* Create ADDRESSABLE_VARS and POINTERS. Note that these arrays are
1703 always going to be slightly bigger than we actually need them
1704 because some TREE_ADDRESSABLE variables will be marked
1705 non-addressable below and only pointers with unique symbol tags are
1706 going to be added to POINTERS. */
1707 ai->addressable_vars = XCNEWVEC (struct alias_map_d *, num_addressable_vars);
1708 ai->pointers = XCNEWVEC (struct alias_map_d *, num_pointers);
1709 ai->num_addressable_vars = 0;
1710 ai->num_pointers = 0;
1712 /* Since we will be creating symbol memory tags within this loop,
1713 cache the value of NUM_REFERENCED_VARS to avoid processing the
1714 additional tags unnecessarily. */
1715 n_vars = num_referenced_vars;
1717 FOR_EACH_REFERENCED_VAR_SAFE (var, varvec, srvi)
1719 var_ann_t v_ann = var_ann (var);
1720 subvar_t svars;
1722 /* Name memory tags already have flow-sensitive aliasing
1723 information, so they need not be processed by
1724 compute_flow_insensitive_aliasing. Similarly, symbol memory
1725 tags are already accounted for when we process their
1726 associated pointer.
1728 Structure fields, on the other hand, have to have some of this
1729 information processed for them, but it's pointless to mark them
1730 non-addressable (since they are fake variables anyway). */
1731 if (MTAG_P (var) && TREE_CODE (var) != STRUCT_FIELD_TAG)
1732 continue;
1734 /* Remove the ADDRESSABLE flag from every addressable variable whose
1735 address is not needed anymore. This is caused by the propagation
1736 of ADDR_EXPR constants into INDIRECT_REF expressions and the
1737 removal of dead pointer assignments done by the early scalar
1738 cleanup passes. */
1739 if (TREE_ADDRESSABLE (var))
1741 if (!bitmap_bit_p (gimple_addressable_vars (cfun), DECL_UID (var))
1742 && TREE_CODE (var) != RESULT_DECL
1743 && !is_global_var (var))
1745 bool okay_to_mark = true;
1747 /* Since VAR is now a regular GIMPLE register, we will need
1748 to rename VAR into SSA afterwards. */
1749 mark_sym_for_renaming (var);
1751 /* If VAR can have sub-variables, and any of its
1752 sub-variables has its address taken, then we cannot
1753 remove the addressable flag from VAR. */
1754 if (var_can_have_subvars (var)
1755 && (svars = get_subvars_for_var (var)))
1757 subvar_t sv;
1759 for (sv = svars; sv; sv = sv->next)
1761 if (bitmap_bit_p (gimple_addressable_vars (cfun),
1762 DECL_UID (sv->var)))
1763 okay_to_mark = false;
1764 mark_sym_for_renaming (sv->var);
1768 /* The address of VAR is not needed, remove the
1769 addressable bit, so that it can be optimized as a
1770 regular variable. */
1771 if (okay_to_mark)
1772 mark_non_addressable (var);
1776 /* Global variables and addressable locals may be aliased. Create an
1777 entry in ADDRESSABLE_VARS for VAR. */
1778 if (may_be_aliased (var)
1779 && (!var_can_have_subvars (var)
1780 || get_subvars_for_var (var) == NULL))
1782 create_alias_map_for (var, ai);
1783 mark_sym_for_renaming (var);
1786 /* Add pointer variables that have been dereferenced to the POINTERS
1787 array and create a symbol memory tag for them. */
1788 if (POINTER_TYPE_P (TREE_TYPE (var)))
1790 if ((bitmap_bit_p (ai->dereferenced_ptrs_store, DECL_UID (var))
1791 || bitmap_bit_p (ai->dereferenced_ptrs_load, DECL_UID (var))))
1793 tree tag;
1794 var_ann_t t_ann;
1796 /* If pointer VAR still doesn't have a memory tag
1797 associated with it, create it now or re-use an
1798 existing one. */
1799 tag = get_tmt_for (var, ai);
1800 t_ann = var_ann (tag);
1802 /* The symbol tag will need to be renamed into SSA
1803 afterwards. Note that we cannot do this inside
1804 get_tmt_for because aliasing may run multiple times
1805 and we only create symbol tags the first time. */
1806 mark_sym_for_renaming (tag);
1808 /* Similarly, if pointer VAR used to have another type
1809 tag, we will need to process it in the renamer to
1810 remove the stale virtual operands. */
1811 if (v_ann->symbol_mem_tag)
1812 mark_sym_for_renaming (v_ann->symbol_mem_tag);
1814 /* Associate the tag with pointer VAR. */
1815 v_ann->symbol_mem_tag = tag;
1817 /* If pointer VAR has been used in a store operation,
1818 then its memory tag must be marked as written-to. */
1819 if (bitmap_bit_p (ai->dereferenced_ptrs_store, DECL_UID (var)))
1820 bitmap_set_bit (ai->written_vars, DECL_UID (tag));
1822 /* All the dereferences of pointer VAR count as
1823 references of TAG. Since TAG can be associated with
1824 several pointers, add the dereferences of VAR to the
1825 TAG. */
1826 NUM_REFERENCES_SET (t_ann,
1827 NUM_REFERENCES (t_ann)
1828 + NUM_REFERENCES (v_ann));
1830 else
1832 /* The pointer has not been dereferenced. If it had a
1833 symbol memory tag, remove it and mark the old tag for
1834 renaming to remove it out of the IL. */
1835 var_ann_t ann = var_ann (var);
1836 tree tag = ann->symbol_mem_tag;
1837 if (tag)
1839 mark_sym_for_renaming (tag);
1840 ann->symbol_mem_tag = NULL_TREE;
1845 VEC_free (tree, heap, varvec);
1849 /* Determine whether to use .GLOBAL_VAR to model call clobbering semantics. At
1850 every call site, we need to emit V_MAY_DEF expressions to represent the
1851 clobbering effects of the call for variables whose address escapes the
1852 current function.
1854 One approach is to group all call-clobbered variables into a single
1855 representative that is used as an alias of every call-clobbered variable
1856 (.GLOBAL_VAR). This works well, but it ties the optimizer hands because
1857 references to any call clobbered variable is a reference to .GLOBAL_VAR.
1859 The second approach is to emit a clobbering V_MAY_DEF for every
1860 call-clobbered variable at call sites. This is the preferred way in terms
1861 of optimization opportunities but it may create too many V_MAY_DEF operands
1862 if there are many call clobbered variables and function calls in the
1863 function.
1865 To decide whether or not to use .GLOBAL_VAR we multiply the number of
1866 function calls found by the number of call-clobbered variables. If that
1867 product is beyond a certain threshold, as determined by the parameterized
1868 values shown below, we use .GLOBAL_VAR.
1870 FIXME. This heuristic should be improved. One idea is to use several
1871 .GLOBAL_VARs of different types instead of a single one. The thresholds
1872 have been derived from a typical bootstrap cycle, including all target
1873 libraries. Compile times were found increase by ~1% compared to using
1874 .GLOBAL_VAR. */
1876 static void
1877 maybe_create_global_var (struct alias_info *ai)
1879 unsigned i, n_clobbered;
1880 bitmap_iterator bi;
1882 /* No need to create it, if we have one already. */
1883 if (gimple_global_var (cfun) == NULL_TREE)
1885 /* Count all the call-clobbered variables. */
1886 n_clobbered = 0;
1887 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, i, bi)
1889 n_clobbered++;
1892 /* If the number of virtual operands that would be needed to
1893 model all the call-clobbered variables is larger than
1894 GLOBAL_VAR_THRESHOLD, create .GLOBAL_VAR.
1896 Also create .GLOBAL_VAR if there are no call-clobbered
1897 variables and the program contains a mixture of pure/const
1898 and regular function calls. This is to avoid the problem
1899 described in PR 20115:
1901 int X;
1902 int func_pure (void) { return X; }
1903 int func_non_pure (int a) { X += a; }
1904 int foo ()
1906 int a = func_pure ();
1907 func_non_pure (a);
1908 a = func_pure ();
1909 return a;
1912 Since foo() has no call-clobbered variables, there is
1913 no relationship between the calls to func_pure and
1914 func_non_pure. Since func_pure has no side-effects, value
1915 numbering optimizations elide the second call to func_pure.
1916 So, if we have some pure/const and some regular calls in the
1917 program we create .GLOBAL_VAR to avoid missing these
1918 relations. */
1919 if (ai->num_calls_found * n_clobbered >= (size_t) GLOBAL_VAR_THRESHOLD
1920 || (n_clobbered == 0
1921 && ai->num_calls_found > 0
1922 && ai->num_pure_const_calls_found > 0
1923 && ai->num_calls_found > ai->num_pure_const_calls_found))
1924 create_global_var ();
1927 /* Mark all call-clobbered symbols for renaming. Since the initial
1928 rewrite into SSA ignored all call sites, we may need to rename
1929 .GLOBAL_VAR and the call-clobbered variables. */
1930 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, i, bi)
1932 tree var = referenced_var (i);
1934 /* If the function has calls to clobbering functions and
1935 .GLOBAL_VAR has been created, make it an alias for all
1936 call-clobbered variables. */
1937 if (gimple_global_var (cfun) && var != gimple_global_var (cfun))
1939 add_may_alias (var, gimple_global_var (cfun));
1940 gcc_assert (!get_subvars_for_var (var));
1943 mark_sym_for_renaming (var);
1948 /* Return TRUE if pointer PTR may point to variable VAR.
1950 MEM_ALIAS_SET is the alias set for the memory location pointed-to by PTR
1951 This is needed because when checking for type conflicts we are
1952 interested in the alias set of the memory location pointed-to by
1953 PTR. The alias set of PTR itself is irrelevant.
1955 VAR_ALIAS_SET is the alias set for VAR. */
1957 static bool
1958 may_alias_p (tree ptr, HOST_WIDE_INT mem_alias_set,
1959 tree var, HOST_WIDE_INT var_alias_set,
1960 bool alias_set_only)
1962 tree mem;
1964 alias_stats.alias_queries++;
1965 alias_stats.simple_queries++;
1967 /* By convention, a variable cannot alias itself. */
1968 mem = var_ann (ptr)->symbol_mem_tag;
1969 if (mem == var)
1971 alias_stats.alias_noalias++;
1972 alias_stats.simple_resolved++;
1973 return false;
1976 /* If -fargument-noalias-global is > 2, pointer arguments may
1977 not point to anything else. */
1978 if (flag_argument_noalias > 2 && TREE_CODE (ptr) == PARM_DECL)
1980 alias_stats.alias_noalias++;
1981 alias_stats.simple_resolved++;
1982 return false;
1985 /* If -fargument-noalias-global is > 1, pointer arguments may
1986 not point to global variables. */
1987 if (flag_argument_noalias > 1 && is_global_var (var)
1988 && TREE_CODE (ptr) == PARM_DECL)
1990 alias_stats.alias_noalias++;
1991 alias_stats.simple_resolved++;
1992 return false;
1995 /* If either MEM or VAR is a read-only global and the other one
1996 isn't, then PTR cannot point to VAR. */
1997 if ((unmodifiable_var_p (mem) && !unmodifiable_var_p (var))
1998 || (unmodifiable_var_p (var) && !unmodifiable_var_p (mem)))
2000 alias_stats.alias_noalias++;
2001 alias_stats.simple_resolved++;
2002 return false;
2005 gcc_assert (TREE_CODE (mem) == SYMBOL_MEMORY_TAG);
2007 alias_stats.tbaa_queries++;
2009 /* If the alias sets don't conflict then MEM cannot alias VAR. */
2010 if (!alias_sets_conflict_p (mem_alias_set, var_alias_set))
2012 alias_stats.alias_noalias++;
2013 alias_stats.tbaa_resolved++;
2014 return false;
2017 /* If var is a record or union type, ptr cannot point into var
2018 unless there is some operation explicit address operation in the
2019 program that can reference a field of the ptr's dereferenced
2020 type. This also assumes that the types of both var and ptr are
2021 contained within the compilation unit, and that there is no fancy
2022 addressing arithmetic associated with any of the types
2023 involved. */
2025 if ((mem_alias_set != 0) && (var_alias_set != 0))
2027 tree ptr_type = TREE_TYPE (ptr);
2028 tree var_type = TREE_TYPE (var);
2030 /* The star count is -1 if the type at the end of the pointer_to
2031 chain is not a record or union type. */
2032 if ((!alias_set_only) &&
2033 ipa_type_escape_star_count_of_interesting_type (var_type) >= 0)
2035 int ptr_star_count = 0;
2037 /* Ipa_type_escape_star_count_of_interesting_type is a little to
2038 restrictive for the pointer type, need to allow pointers to
2039 primitive types as long as those types cannot be pointers
2040 to everything. */
2041 while (POINTER_TYPE_P (ptr_type))
2042 /* Strip the *'s off. */
2044 ptr_type = TREE_TYPE (ptr_type);
2045 ptr_star_count++;
2048 /* There does not appear to be a better test to see if the
2049 pointer type was one of the pointer to everything
2050 types. */
2052 if (ptr_star_count > 0)
2054 alias_stats.structnoaddress_queries++;
2055 if (ipa_type_escape_field_does_not_clobber_p (var_type,
2056 TREE_TYPE (ptr)))
2058 alias_stats.structnoaddress_resolved++;
2059 alias_stats.alias_noalias++;
2060 return false;
2063 else if (ptr_star_count == 0)
2065 /* If ptr_type was not really a pointer to type, it cannot
2066 alias. */
2067 alias_stats.structnoaddress_queries++;
2068 alias_stats.structnoaddress_resolved++;
2069 alias_stats.alias_noalias++;
2070 return false;
2075 alias_stats.alias_mayalias++;
2076 return true;
2080 /* Add ALIAS to the set of variables that may alias VAR. */
2082 static void
2083 add_may_alias (tree var, tree alias)
2085 size_t i;
2086 var_ann_t v_ann = get_var_ann (var);
2087 var_ann_t a_ann = get_var_ann (alias);
2088 tree al;
2090 /* Don't allow self-referential aliases. */
2091 gcc_assert (var != alias);
2093 /* ALIAS must be addressable if it's being added to an alias set. */
2094 #if 1
2095 TREE_ADDRESSABLE (alias) = 1;
2096 #else
2097 gcc_assert (may_be_aliased (alias));
2098 #endif
2100 if (v_ann->may_aliases == NULL)
2101 v_ann->may_aliases = VEC_alloc (tree, gc, 2);
2103 /* Avoid adding duplicates. */
2104 for (i = 0; VEC_iterate (tree, v_ann->may_aliases, i, al); i++)
2105 if (alias == al)
2106 return;
2108 VEC_safe_push (tree, gc, v_ann->may_aliases, alias);
2109 a_ann->is_aliased = 1;
2113 /* Replace alias I in the alias sets of VAR with NEW_ALIAS. */
2115 static void
2116 replace_may_alias (tree var, size_t i, tree new_alias)
2118 var_ann_t v_ann = var_ann (var);
2119 VEC_replace (tree, v_ann->may_aliases, i, new_alias);
2123 /* Mark pointer PTR as pointing to an arbitrary memory location. */
2125 static void
2126 set_pt_anything (tree ptr)
2128 struct ptr_info_def *pi = get_ptr_info (ptr);
2130 pi->pt_anything = 1;
2131 pi->pt_vars = NULL;
2133 /* The pointer used to have a name tag, but we now found it pointing
2134 to an arbitrary location. The name tag needs to be renamed and
2135 disassociated from PTR. */
2136 if (pi->name_mem_tag)
2138 mark_sym_for_renaming (pi->name_mem_tag);
2139 pi->name_mem_tag = NULL_TREE;
2144 /* Return true if STMT is an "escape" site from the current function. Escape
2145 sites those statements which might expose the address of a variable
2146 outside the current function. STMT is an escape site iff:
2148 1- STMT is a function call, or
2149 2- STMT is an __asm__ expression, or
2150 3- STMT is an assignment to a non-local variable, or
2151 4- STMT is a return statement.
2153 Return the type of escape site found, if we found one, or NO_ESCAPE
2154 if none. */
2156 enum escape_type
2157 is_escape_site (tree stmt)
2159 tree call = get_call_expr_in (stmt);
2160 if (call != NULL_TREE)
2162 if (!TREE_SIDE_EFFECTS (call))
2163 return ESCAPE_TO_PURE_CONST;
2165 return ESCAPE_TO_CALL;
2167 else if (TREE_CODE (stmt) == ASM_EXPR)
2168 return ESCAPE_TO_ASM;
2169 else if (TREE_CODE (stmt) == MODIFY_EXPR)
2171 tree lhs = TREE_OPERAND (stmt, 0);
2173 /* Get to the base of _REF nodes. */
2174 if (TREE_CODE (lhs) != SSA_NAME)
2175 lhs = get_base_address (lhs);
2177 /* If we couldn't recognize the LHS of the assignment, assume that it
2178 is a non-local store. */
2179 if (lhs == NULL_TREE)
2180 return ESCAPE_UNKNOWN;
2182 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == NOP_EXPR
2183 || TREE_CODE (TREE_OPERAND (stmt, 1)) == CONVERT_EXPR
2184 || TREE_CODE (TREE_OPERAND (stmt, 1)) == VIEW_CONVERT_EXPR)
2186 tree from = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (stmt, 1), 0));
2187 tree to = TREE_TYPE (TREE_OPERAND (stmt, 1));
2189 /* If the RHS is a conversion between a pointer and an integer, the
2190 pointer escapes since we can't track the integer. */
2191 if (POINTER_TYPE_P (from) && !POINTER_TYPE_P (to))
2192 return ESCAPE_BAD_CAST;
2194 /* Same if the RHS is a conversion between a regular pointer and a
2195 ref-all pointer since we can't track the SMT of the former. */
2196 if (POINTER_TYPE_P (from) && !TYPE_REF_CAN_ALIAS_ALL (from)
2197 && POINTER_TYPE_P (to) && TYPE_REF_CAN_ALIAS_ALL (to))
2198 return ESCAPE_BAD_CAST;
2201 /* If the LHS is an SSA name, it can't possibly represent a non-local
2202 memory store. */
2203 if (TREE_CODE (lhs) == SSA_NAME)
2204 return NO_ESCAPE;
2206 /* FIXME: LHS is not an SSA_NAME. Even if it's an assignment to a
2207 local variables we cannot be sure if it will escape, because we
2208 don't have information about objects not in SSA form. Need to
2209 implement something along the lines of
2211 J.-D. Choi, M. Gupta, M. J. Serrano, V. C. Sreedhar, and S. P.
2212 Midkiff, ``Escape analysis for java,'' in Proceedings of the
2213 Conference on Object-Oriented Programming Systems, Languages, and
2214 Applications (OOPSLA), pp. 1-19, 1999. */
2215 return ESCAPE_STORED_IN_GLOBAL;
2217 else if (TREE_CODE (stmt) == RETURN_EXPR)
2218 return ESCAPE_TO_RETURN;
2220 return NO_ESCAPE;
2223 /* Create a new memory tag of type TYPE.
2224 Does NOT push it into the current binding. */
2226 static tree
2227 create_tag_raw (enum tree_code code, tree type, const char *prefix)
2229 tree tmp_var;
2230 tree new_type;
2232 /* Make the type of the variable writable. */
2233 new_type = build_type_variant (type, 0, 0);
2234 TYPE_ATTRIBUTES (new_type) = TYPE_ATTRIBUTES (type);
2236 tmp_var = build_decl (code, create_tmp_var_name (prefix),
2237 type);
2238 /* Make the variable writable. */
2239 TREE_READONLY (tmp_var) = 0;
2241 /* It doesn't start out global. */
2242 MTAG_GLOBAL (tmp_var) = 0;
2243 TREE_STATIC (tmp_var) = 0;
2244 TREE_USED (tmp_var) = 1;
2246 return tmp_var;
2249 /* Create a new memory tag of type TYPE. If IS_TYPE_TAG is true, the tag
2250 is considered to represent all the pointers whose pointed-to types are
2251 in the same alias set class. Otherwise, the tag represents a single
2252 SSA_NAME pointer variable. */
2254 static tree
2255 create_memory_tag (tree type, bool is_type_tag)
2257 var_ann_t ann;
2258 tree tag = create_tag_raw (is_type_tag ? SYMBOL_MEMORY_TAG : NAME_MEMORY_TAG,
2259 type, (is_type_tag) ? "SMT" : "NMT");
2261 /* By default, memory tags are local variables. Alias analysis will
2262 determine whether they should be considered globals. */
2263 DECL_CONTEXT (tag) = current_function_decl;
2265 /* Memory tags are by definition addressable. */
2266 TREE_ADDRESSABLE (tag) = 1;
2268 ann = get_var_ann (tag);
2269 ann->symbol_mem_tag = NULL_TREE;
2271 /* Add the tag to the symbol table. */
2272 add_referenced_var (tag);
2274 return tag;
2278 /* Create a name memory tag to represent a specific SSA_NAME pointer P_i.
2279 This is used if P_i has been found to point to a specific set of
2280 variables or to a non-aliased memory location like the address returned
2281 by malloc functions. */
2283 static tree
2284 get_nmt_for (tree ptr)
2286 struct ptr_info_def *pi = get_ptr_info (ptr);
2287 tree tag = pi->name_mem_tag;
2289 if (tag == NULL_TREE)
2290 tag = create_memory_tag (TREE_TYPE (TREE_TYPE (ptr)), false);
2291 return tag;
2295 /* Return the symbol memory tag associated to pointer PTR. A memory
2296 tag is an artificial variable that represents the memory location
2297 pointed-to by PTR. It is used to model the effects of pointer
2298 de-references on addressable variables.
2300 AI points to the data gathered during alias analysis. This
2301 function populates the array AI->POINTERS. */
2303 static tree
2304 get_tmt_for (tree ptr, struct alias_info *ai)
2306 size_t i;
2307 tree tag;
2308 tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
2309 HOST_WIDE_INT tag_set = get_alias_set (tag_type);
2311 /* We use a unique memory tag for all the ref-all pointers. */
2312 if (PTR_IS_REF_ALL (ptr))
2314 if (!ai->ref_all_symbol_mem_tag)
2315 ai->ref_all_symbol_mem_tag = create_memory_tag (void_type_node, true);
2316 return ai->ref_all_symbol_mem_tag;
2319 /* To avoid creating unnecessary memory tags, only create one memory tag
2320 per alias set class. Note that it may be tempting to group
2321 memory tags based on conflicting alias sets instead of
2322 equivalence. That would be wrong because alias sets are not
2323 necessarily transitive (as demonstrated by the libstdc++ test
2324 23_containers/vector/cons/4.cc). Given three alias sets A, B, C
2325 such that conflicts (A, B) == true and conflicts (A, C) == true,
2326 it does not necessarily follow that conflicts (B, C) == true. */
2327 for (i = 0, tag = NULL_TREE; i < ai->num_pointers; i++)
2329 struct alias_map_d *curr = ai->pointers[i];
2330 tree curr_tag = var_ann (curr->var)->symbol_mem_tag;
2331 if (tag_set == curr->set)
2333 tag = curr_tag;
2334 break;
2338 /* If VAR cannot alias with any of the existing memory tags, create a new
2339 tag for PTR and add it to the POINTERS array. */
2340 if (tag == NULL_TREE)
2342 struct alias_map_d *alias_map;
2344 /* If PTR did not have a symbol tag already, create a new SMT.*
2345 artificial variable representing the memory location
2346 pointed-to by PTR. */
2347 if (var_ann (ptr)->symbol_mem_tag == NULL_TREE)
2348 tag = create_memory_tag (tag_type, true);
2349 else
2350 tag = var_ann (ptr)->symbol_mem_tag;
2352 /* Add PTR to the POINTERS array. Note that we are not interested in
2353 PTR's alias set. Instead, we cache the alias set for the memory that
2354 PTR points to. */
2355 alias_map = XCNEW (struct alias_map_d);
2356 alias_map->var = ptr;
2357 alias_map->set = tag_set;
2358 ai->pointers[ai->num_pointers++] = alias_map;
2361 /* If the pointed-to type is volatile, so is the tag. */
2362 TREE_THIS_VOLATILE (tag) |= TREE_THIS_VOLATILE (tag_type);
2364 /* Make sure that the symbol tag has the same alias set as the
2365 pointed-to type. */
2366 gcc_assert (tag_set == get_alias_set (tag));
2368 return tag;
2372 /* Create GLOBAL_VAR, an artificial global variable to act as a
2373 representative of all the variables that may be clobbered by function
2374 calls. */
2376 static void
2377 create_global_var (void)
2379 tree global_var = build_decl (VAR_DECL, get_identifier (".GLOBAL_VAR"),
2380 void_type_node);
2381 DECL_ARTIFICIAL (global_var) = 1;
2382 TREE_READONLY (global_var) = 0;
2383 DECL_EXTERNAL (global_var) = 1;
2384 TREE_STATIC (global_var) = 1;
2385 TREE_USED (global_var) = 1;
2386 DECL_CONTEXT (global_var) = NULL_TREE;
2387 TREE_THIS_VOLATILE (global_var) = 0;
2388 TREE_ADDRESSABLE (global_var) = 0;
2390 create_var_ann (global_var);
2391 mark_call_clobbered (global_var, ESCAPE_UNKNOWN);
2392 add_referenced_var (global_var);
2393 mark_sym_for_renaming (global_var);
2394 cfun->gimple_df->global_var = global_var;
2398 /* Dump alias statistics on FILE. */
2400 static void
2401 dump_alias_stats (FILE *file)
2403 const char *funcname
2404 = lang_hooks.decl_printable_name (current_function_decl, 2);
2405 fprintf (file, "\nAlias statistics for %s\n\n", funcname);
2406 fprintf (file, "Total alias queries:\t%u\n", alias_stats.alias_queries);
2407 fprintf (file, "Total alias mayalias results:\t%u\n",
2408 alias_stats.alias_mayalias);
2409 fprintf (file, "Total alias noalias results:\t%u\n",
2410 alias_stats.alias_noalias);
2411 fprintf (file, "Total simple queries:\t%u\n",
2412 alias_stats.simple_queries);
2413 fprintf (file, "Total simple resolved:\t%u\n",
2414 alias_stats.simple_resolved);
2415 fprintf (file, "Total TBAA queries:\t%u\n",
2416 alias_stats.tbaa_queries);
2417 fprintf (file, "Total TBAA resolved:\t%u\n",
2418 alias_stats.tbaa_resolved);
2419 fprintf (file, "Total non-addressable structure type queries:\t%u\n",
2420 alias_stats.structnoaddress_queries);
2421 fprintf (file, "Total non-addressable structure type resolved:\t%u\n",
2422 alias_stats.structnoaddress_resolved);
2426 /* Dump alias information on FILE. */
2428 void
2429 dump_alias_info (FILE *file)
2431 size_t i;
2432 const char *funcname
2433 = lang_hooks.decl_printable_name (current_function_decl, 2);
2434 referenced_var_iterator rvi;
2435 tree var;
2437 fprintf (file, "\nFlow-insensitive alias information for %s\n\n", funcname);
2439 fprintf (file, "Aliased symbols\n\n");
2441 FOR_EACH_REFERENCED_VAR (var, rvi)
2443 if (may_be_aliased (var))
2444 dump_variable (file, var);
2447 fprintf (file, "\nDereferenced pointers\n\n");
2449 FOR_EACH_REFERENCED_VAR (var, rvi)
2451 var_ann_t ann = var_ann (var);
2452 if (ann->symbol_mem_tag)
2453 dump_variable (file, var);
2456 fprintf (file, "\nSymbol memory tags\n\n");
2458 FOR_EACH_REFERENCED_VAR (var, rvi)
2460 if (TREE_CODE (var) == SYMBOL_MEMORY_TAG)
2461 dump_variable (file, var);
2464 fprintf (file, "\n\nFlow-sensitive alias information for %s\n\n", funcname);
2466 fprintf (file, "SSA_NAME pointers\n\n");
2467 for (i = 1; i < num_ssa_names; i++)
2469 tree ptr = ssa_name (i);
2470 struct ptr_info_def *pi;
2472 if (ptr == NULL_TREE)
2473 continue;
2475 pi = SSA_NAME_PTR_INFO (ptr);
2476 if (!SSA_NAME_IN_FREE_LIST (ptr)
2477 && pi
2478 && pi->name_mem_tag)
2479 dump_points_to_info_for (file, ptr);
2482 fprintf (file, "\nName memory tags\n\n");
2484 FOR_EACH_REFERENCED_VAR (var, rvi)
2486 if (TREE_CODE (var) == NAME_MEMORY_TAG)
2487 dump_variable (file, var);
2490 fprintf (file, "\n");
2494 /* Dump alias information on stderr. */
2496 void
2497 debug_alias_info (void)
2499 dump_alias_info (stderr);
2503 /* Return the alias information associated with pointer T. It creates a
2504 new instance if none existed. */
2506 struct ptr_info_def *
2507 get_ptr_info (tree t)
2509 struct ptr_info_def *pi;
2511 gcc_assert (POINTER_TYPE_P (TREE_TYPE (t)));
2513 pi = SSA_NAME_PTR_INFO (t);
2514 if (pi == NULL)
2516 pi = GGC_CNEW (struct ptr_info_def);
2517 SSA_NAME_PTR_INFO (t) = pi;
2520 return pi;
2524 /* Dump points-to information for SSA_NAME PTR into FILE. */
2526 void
2527 dump_points_to_info_for (FILE *file, tree ptr)
2529 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
2531 print_generic_expr (file, ptr, dump_flags);
2533 if (pi)
2535 if (pi->name_mem_tag)
2537 fprintf (file, ", name memory tag: ");
2538 print_generic_expr (file, pi->name_mem_tag, dump_flags);
2541 if (pi->is_dereferenced)
2542 fprintf (file, ", is dereferenced");
2544 if (pi->value_escapes_p)
2545 fprintf (file, ", its value escapes");
2547 if (pi->pt_anything)
2548 fprintf (file, ", points-to anything");
2550 if (pi->pt_null)
2551 fprintf (file, ", points-to NULL");
2553 if (pi->pt_vars)
2555 unsigned ix;
2556 bitmap_iterator bi;
2558 fprintf (file, ", points-to vars: { ");
2559 EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, ix, bi)
2561 print_generic_expr (file, referenced_var (ix), dump_flags);
2562 fprintf (file, " ");
2564 fprintf (file, "}");
2568 fprintf (file, "\n");
2572 /* Dump points-to information for VAR into stderr. */
2574 void
2575 debug_points_to_info_for (tree var)
2577 dump_points_to_info_for (stderr, var);
2581 /* Dump points-to information into FILE. NOTE: This function is slow, as
2582 it needs to traverse the whole CFG looking for pointer SSA_NAMEs. */
2584 void
2585 dump_points_to_info (FILE *file)
2587 basic_block bb;
2588 block_stmt_iterator si;
2589 ssa_op_iter iter;
2590 const char *fname =
2591 lang_hooks.decl_printable_name (current_function_decl, 2);
2592 referenced_var_iterator rvi;
2593 tree var;
2595 fprintf (file, "\n\nPointed-to sets for pointers in %s\n\n", fname);
2597 /* First dump points-to information for the default definitions of
2598 pointer variables. This is necessary because default definitions are
2599 not part of the code. */
2600 FOR_EACH_REFERENCED_VAR (var, rvi)
2602 if (POINTER_TYPE_P (TREE_TYPE (var)))
2604 tree def = gimple_default_def (cfun, var);
2605 if (def)
2606 dump_points_to_info_for (file, def);
2610 /* Dump points-to information for every pointer defined in the program. */
2611 FOR_EACH_BB (bb)
2613 tree phi;
2615 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
2617 tree ptr = PHI_RESULT (phi);
2618 if (POINTER_TYPE_P (TREE_TYPE (ptr)))
2619 dump_points_to_info_for (file, ptr);
2622 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
2624 tree stmt = bsi_stmt (si);
2625 tree def;
2626 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_DEF)
2627 if (POINTER_TYPE_P (TREE_TYPE (def)))
2628 dump_points_to_info_for (file, def);
2632 fprintf (file, "\n");
2636 /* Dump points-to info pointed to by PTO into STDERR. */
2638 void
2639 debug_points_to_info (void)
2641 dump_points_to_info (stderr);
2644 /* Dump to FILE the list of variables that may be aliasing VAR. */
2646 void
2647 dump_may_aliases_for (FILE *file, tree var)
2649 VEC(tree, gc) *aliases;
2651 if (TREE_CODE (var) == SSA_NAME)
2652 var = SSA_NAME_VAR (var);
2654 aliases = var_ann (var)->may_aliases;
2655 if (aliases)
2657 size_t i;
2658 tree al;
2659 fprintf (file, "{ ");
2660 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
2662 print_generic_expr (file, al, dump_flags);
2663 fprintf (file, " ");
2665 fprintf (file, "}");
2670 /* Dump to stderr the list of variables that may be aliasing VAR. */
2672 void
2673 debug_may_aliases_for (tree var)
2675 dump_may_aliases_for (stderr, var);
2678 /* Return true if VAR may be aliased. */
2680 bool
2681 may_be_aliased (tree var)
2683 /* Obviously. */
2684 if (TREE_ADDRESSABLE (var))
2685 return true;
2687 /* Globally visible variables can have their addresses taken by other
2688 translation units. */
2690 if (MTAG_P (var)
2691 && (MTAG_GLOBAL (var) || TREE_PUBLIC (var)))
2692 return true;
2693 else if (!MTAG_P (var)
2694 && (DECL_EXTERNAL (var) || TREE_PUBLIC (var)))
2695 return true;
2697 /* Automatic variables can't have their addresses escape any other way.
2698 This must be after the check for global variables, as extern declarations
2699 do not have TREE_STATIC set. */
2700 if (!TREE_STATIC (var))
2701 return false;
2703 /* If we're in unit-at-a-time mode, then we must have seen all occurrences
2704 of address-of operators, and so we can trust TREE_ADDRESSABLE. Otherwise
2705 we can only be sure the variable isn't addressable if it's local to the
2706 current function. */
2707 if (flag_unit_at_a_time)
2708 return false;
2709 if (decl_function_context (var) == current_function_decl)
2710 return false;
2712 return true;
2716 /* Given two symbols return TRUE if one is in the alias set of the other. */
2717 bool
2718 is_aliased_with (tree tag, tree sym)
2720 size_t i;
2721 VEC(tree,gc) *aliases;
2722 tree al;
2724 if (var_ann (sym)->is_aliased)
2726 aliases = var_ann (tag)->may_aliases;
2728 if (aliases == NULL)
2729 return false;
2731 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
2732 if (al == sym)
2733 return true;
2735 else
2737 aliases = var_ann (sym)->may_aliases;
2739 if (aliases == NULL)
2740 return false;
2742 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
2743 if (al == tag)
2744 return true;
2747 return false;
2750 /* The following is based on code in add_stmt_operand to ensure that the
2751 same defs/uses/vdefs/vuses will be found after replacing a reference
2752 to var (or ARRAY_REF to var) with an INDIRECT_REF to ptr whose value
2753 is the address of var. Return a memtag for the ptr, after adding the
2754 proper may_aliases to it (which are the aliases of var, if it has any,
2755 or var itself). */
2757 static tree
2758 add_may_alias_for_new_tag (tree tag, tree var)
2760 var_ann_t v_ann = var_ann (var);
2761 VEC(tree, gc) *aliases = v_ann->may_aliases;
2763 /* Case 1: |aliases| == 1 */
2764 if ((aliases != NULL)
2765 && (VEC_length (tree, aliases) == 1))
2767 tree ali = VEC_index (tree, aliases, 0);
2769 if (TREE_CODE (ali) == SYMBOL_MEMORY_TAG)
2770 return ali;
2773 /* Case 2: |aliases| == 0 */
2774 if (aliases == NULL)
2775 add_may_alias (tag, var);
2776 else
2778 /* Case 3: |aliases| > 1 */
2779 unsigned i;
2780 tree al;
2782 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
2783 add_may_alias (tag, al);
2786 return tag;
2789 /* Create a new symbol tag for PTR. Construct the may-alias list of this type
2790 tag so that it has the aliasing of VAR, or of the relevant subvars of VAR
2791 according to the location accessed by EXPR.
2793 Note, the set of aliases represented by the new symbol tag are not marked
2794 for renaming. */
2796 void
2797 new_type_alias (tree ptr, tree var, tree expr)
2799 var_ann_t p_ann = var_ann (ptr);
2800 tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
2801 tree tag;
2802 subvar_t svars;
2803 tree ali = NULL_TREE;
2804 HOST_WIDE_INT offset, size, maxsize;
2805 tree ref;
2806 VEC (tree, heap) *overlaps = NULL;
2807 subvar_t sv;
2808 unsigned int len;
2810 gcc_assert (p_ann->symbol_mem_tag == NULL_TREE);
2811 gcc_assert (!MTAG_P (var));
2813 ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
2814 gcc_assert (ref);
2816 tag = create_memory_tag (tag_type, true);
2817 p_ann->symbol_mem_tag = tag;
2819 /* Add VAR to the may-alias set of PTR's new symbol tag. If VAR has
2820 subvars, add the subvars to the tag instead of the actual var. */
2821 if (var_can_have_subvars (ref)
2822 && (svars = get_subvars_for_var (ref)))
2824 for (sv = svars; sv; sv = sv->next)
2826 bool exact;
2828 if (overlap_subvar (offset, maxsize, sv->var, &exact))
2829 VEC_safe_push (tree, heap, overlaps, sv->var);
2831 gcc_assert (overlaps != NULL);
2833 else if (var_can_have_subvars (var)
2834 && (svars = get_subvars_for_var (var)))
2836 /* If the REF is not a direct access to VAR (e.g., it is a dereference
2837 of a pointer), we should scan the virtual operands of REF the same
2838 way as tree-ssa-operands do. At the moment, this is somewhat
2839 difficult, so we just give up and add all the subvars of VAR.
2840 On mem-ssa branch, the scanning for virtual operands have been
2841 split from the rest of tree-ssa-operands, so it should be much
2842 easier to fix this problem correctly once mem-ssa is merged. */
2843 for (sv = svars; sv; sv = sv->next)
2844 VEC_safe_push (tree, heap, overlaps, sv->var);
2846 gcc_assert (overlaps != NULL);
2848 else
2849 ali = add_may_alias_for_new_tag (tag, var);
2851 len = VEC_length (tree, overlaps);
2852 if (len > 0)
2854 if (dump_file && (dump_flags & TDF_DETAILS))
2855 fprintf (dump_file, "\nnumber of overlapping subvars = %u\n", len);
2857 if (len == 1)
2858 ali = add_may_alias_for_new_tag (tag, VEC_index (tree, overlaps, 0));
2859 else if (len > 1)
2861 unsigned int k;
2862 tree sv_var;
2864 for (k = 0; VEC_iterate (tree, overlaps, k, sv_var); k++)
2866 ali = add_may_alias_for_new_tag (tag, sv_var);
2868 if (ali != tag)
2870 /* Can happen only if 'Case 1' of add_may_alias_for_new_tag
2871 took place. Since more than one svar was found, we add
2872 'ali' as one of the may_aliases of the new tag. */
2873 add_may_alias (tag, ali);
2874 ali = tag;
2878 VEC_free (tree, heap, overlaps);
2881 p_ann->symbol_mem_tag = ali;
2882 TREE_READONLY (tag) = TREE_READONLY (var);
2883 MTAG_GLOBAL (tag) = is_global_var (var);
2886 /* This represents the used range of a variable. */
2888 typedef struct used_part
2890 HOST_WIDE_INT minused;
2891 HOST_WIDE_INT maxused;
2892 /* True if we have an explicit use/def of some portion of this variable,
2893 even if it is all of it. i.e. a.b = 5 or temp = a.b. */
2894 bool explicit_uses;
2895 /* True if we have an implicit use/def of some portion of this
2896 variable. Implicit uses occur when we can't tell what part we
2897 are referencing, and have to make conservative assumptions. */
2898 bool implicit_uses;
2899 /* True if the structure is only written to or taken its address. */
2900 bool write_only;
2901 } *used_part_t;
2903 /* An array of used_part structures, indexed by variable uid. */
2905 static htab_t used_portions;
2907 struct used_part_map
2909 unsigned int uid;
2910 used_part_t to;
2913 /* Return true if the uid in the two used part maps are equal. */
2915 static int
2916 used_part_map_eq (const void *va, const void *vb)
2918 const struct used_part_map *a = (const struct used_part_map *) va;
2919 const struct used_part_map *b = (const struct used_part_map *) vb;
2920 return (a->uid == b->uid);
2923 /* Hash a from uid in a used_part_map. */
2925 static unsigned int
2926 used_part_map_hash (const void *item)
2928 return ((const struct used_part_map *)item)->uid;
2931 /* Free a used part map element. */
2933 static void
2934 free_used_part_map (void *item)
2936 free (((struct used_part_map *)item)->to);
2937 free (item);
2940 /* Lookup a used_part structure for a UID. */
2942 static used_part_t
2943 up_lookup (unsigned int uid)
2945 struct used_part_map *h, in;
2946 in.uid = uid;
2947 h = (struct used_part_map *) htab_find_with_hash (used_portions, &in, uid);
2948 if (!h)
2949 return NULL;
2950 return h->to;
2953 /* Insert the pair UID, TO into the used part hashtable. */
2955 static void
2956 up_insert (unsigned int uid, used_part_t to)
2958 struct used_part_map *h;
2959 void **loc;
2961 h = XNEW (struct used_part_map);
2962 h->uid = uid;
2963 h->to = to;
2964 loc = htab_find_slot_with_hash (used_portions, h,
2965 uid, INSERT);
2966 if (*loc != NULL)
2967 free (*loc);
2968 *(struct used_part_map **) loc = h;
2972 /* Given a variable uid, UID, get or create the entry in the used portions
2973 table for the variable. */
2975 static used_part_t
2976 get_or_create_used_part_for (size_t uid)
2978 used_part_t up;
2979 if ((up = up_lookup (uid)) == NULL)
2981 up = XCNEW (struct used_part);
2982 up->minused = INT_MAX;
2983 up->maxused = 0;
2984 up->explicit_uses = false;
2985 up->implicit_uses = false;
2986 up->write_only = true;
2989 return up;
2993 /* Create and return a structure sub-variable for field type FIELD at
2994 offset OFFSET, with size SIZE, of variable VAR. */
2996 static tree
2997 create_sft (tree var, tree field, unsigned HOST_WIDE_INT offset,
2998 unsigned HOST_WIDE_INT size)
3000 var_ann_t ann;
3001 tree subvar = create_tag_raw (STRUCT_FIELD_TAG, field, "SFT");
3003 /* We need to copy the various flags from VAR to SUBVAR, so that
3004 they are is_global_var iff the original variable was. */
3005 DECL_CONTEXT (subvar) = DECL_CONTEXT (var);
3006 MTAG_GLOBAL (subvar) = DECL_EXTERNAL (var);
3007 TREE_PUBLIC (subvar) = TREE_PUBLIC (var);
3008 TREE_STATIC (subvar) = TREE_STATIC (var);
3009 TREE_READONLY (subvar) = TREE_READONLY (var);
3010 TREE_ADDRESSABLE (subvar) = TREE_ADDRESSABLE (var);
3012 /* Add the new variable to REFERENCED_VARS. */
3013 ann = get_var_ann (subvar);
3014 ann->symbol_mem_tag = NULL;
3015 add_referenced_var (subvar);
3016 SFT_PARENT_VAR (subvar) = var;
3017 SFT_OFFSET (subvar) = offset;
3018 SFT_SIZE (subvar) = size;
3019 return subvar;
3023 /* Given an aggregate VAR, create the subvariables that represent its
3024 fields. */
3026 static void
3027 create_overlap_variables_for (tree var)
3029 VEC(fieldoff_s,heap) *fieldstack = NULL;
3030 used_part_t up;
3031 size_t uid = DECL_UID (var);
3033 up = up_lookup (uid);
3034 if (!up
3035 || up->write_only)
3036 return;
3038 push_fields_onto_fieldstack (TREE_TYPE (var), &fieldstack, 0, NULL);
3039 if (VEC_length (fieldoff_s, fieldstack) != 0)
3041 subvar_t *subvars;
3042 fieldoff_s *fo;
3043 bool notokay = false;
3044 int fieldcount = 0;
3045 int i;
3046 HOST_WIDE_INT lastfooffset = -1;
3047 HOST_WIDE_INT lastfosize = -1;
3048 tree lastfotype = NULL_TREE;
3050 /* Not all fields have DECL_SIZE set, and those that don't, we don't
3051 know their size, and thus, can't handle.
3052 The same is true of fields with DECL_SIZE that is not an integer
3053 constant (such as variable sized fields).
3054 Fields with offsets which are not constant will have an offset < 0
3055 We *could* handle fields that are constant sized arrays, but
3056 currently don't. Doing so would require some extra changes to
3057 tree-ssa-operands.c. */
3059 for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
3061 if (!fo->size
3062 || TREE_CODE (fo->size) != INTEGER_CST
3063 || fo->offset < 0)
3065 notokay = true;
3066 break;
3068 fieldcount++;
3071 /* The current heuristic we use is as follows:
3072 If the variable has no used portions in this function, no
3073 structure vars are created for it.
3074 Otherwise,
3075 If the variable has less than SALIAS_MAX_IMPLICIT_FIELDS,
3076 we always create structure vars for them.
3077 If the variable has more than SALIAS_MAX_IMPLICIT_FIELDS, and
3078 some explicit uses, we create structure vars for them.
3079 If the variable has more than SALIAS_MAX_IMPLICIT_FIELDS, and
3080 no explicit uses, we do not create structure vars for them.
3083 if (fieldcount >= SALIAS_MAX_IMPLICIT_FIELDS
3084 && !up->explicit_uses)
3086 if (dump_file && (dump_flags & TDF_DETAILS))
3088 fprintf (dump_file, "Variable ");
3089 print_generic_expr (dump_file, var, 0);
3090 fprintf (dump_file, " has no explicit uses in this function, and is > SALIAS_MAX_IMPLICIT_FIELDS, so skipping\n");
3092 notokay = true;
3095 /* Bail out, if we can't create overlap variables. */
3096 if (notokay)
3098 VEC_free (fieldoff_s, heap, fieldstack);
3099 return;
3102 /* Otherwise, create the variables. */
3103 subvars = lookup_subvars_for_var (var);
3105 sort_fieldstack (fieldstack);
3107 for (i = VEC_length (fieldoff_s, fieldstack);
3108 VEC_iterate (fieldoff_s, fieldstack, --i, fo);)
3110 subvar_t sv;
3111 HOST_WIDE_INT fosize;
3112 tree currfotype;
3114 fosize = TREE_INT_CST_LOW (fo->size);
3115 currfotype = fo->type;
3117 /* If this field isn't in the used portion,
3118 or it has the exact same offset and size as the last
3119 field, skip it. */
3121 if (((fo->offset <= up->minused
3122 && fo->offset + fosize <= up->minused)
3123 || fo->offset >= up->maxused)
3124 || (fo->offset == lastfooffset
3125 && fosize == lastfosize
3126 && currfotype == lastfotype))
3127 continue;
3128 sv = GGC_NEW (struct subvar);
3129 sv->next = *subvars;
3130 sv->var = create_sft (var, fo->type, fo->offset, fosize);
3132 if (dump_file)
3134 fprintf (dump_file, "structure field tag %s created for var %s",
3135 get_name (sv->var), get_name (var));
3136 fprintf (dump_file, " offset " HOST_WIDE_INT_PRINT_DEC,
3137 SFT_OFFSET (sv->var));
3138 fprintf (dump_file, " size " HOST_WIDE_INT_PRINT_DEC,
3139 SFT_SIZE (sv->var));
3140 fprintf (dump_file, "\n");
3143 lastfotype = currfotype;
3144 lastfooffset = fo->offset;
3145 lastfosize = fosize;
3146 *subvars = sv;
3149 /* Once we have created subvars, the original is no longer call
3150 clobbered on its own. Its call clobbered status depends
3151 completely on the call clobbered status of the subvars.
3153 add_referenced_var in the above loop will take care of
3154 marking subvars of global variables as call clobbered for us
3155 to start, since they are global as well. */
3156 clear_call_clobbered (var);
3159 VEC_free (fieldoff_s, heap, fieldstack);
3163 /* Find the conservative answer to the question of what portions of what
3164 structures are used by this statement. We assume that if we have a
3165 component ref with a known size + offset, that we only need that part
3166 of the structure. For unknown cases, or cases where we do something
3167 to the whole structure, we assume we need to create fields for the
3168 entire structure. */
3170 static tree
3171 find_used_portions (tree *tp, int *walk_subtrees, void *lhs_p)
3173 switch (TREE_CODE (*tp))
3175 case MODIFY_EXPR:
3176 /* Recurse manually here to track whether the use is in the
3177 LHS of an assignment. */
3178 find_used_portions (&TREE_OPERAND (*tp, 0), walk_subtrees, tp);
3179 return find_used_portions (&TREE_OPERAND (*tp, 1), walk_subtrees, NULL);
3180 case REALPART_EXPR:
3181 case IMAGPART_EXPR:
3182 case COMPONENT_REF:
3183 case ARRAY_REF:
3185 HOST_WIDE_INT bitsize;
3186 HOST_WIDE_INT bitmaxsize;
3187 HOST_WIDE_INT bitpos;
3188 tree ref;
3189 ref = get_ref_base_and_extent (*tp, &bitpos, &bitsize, &bitmaxsize);
3190 if (DECL_P (ref)
3191 && var_can_have_subvars (ref)
3192 && bitmaxsize != -1)
3194 size_t uid = DECL_UID (ref);
3195 used_part_t up;
3197 up = get_or_create_used_part_for (uid);
3199 if (bitpos <= up->minused)
3200 up->minused = bitpos;
3201 if ((bitpos + bitmaxsize >= up->maxused))
3202 up->maxused = bitpos + bitmaxsize;
3204 if (bitsize == bitmaxsize)
3205 up->explicit_uses = true;
3206 else
3207 up->implicit_uses = true;
3208 if (!lhs_p)
3209 up->write_only = false;
3210 up_insert (uid, up);
3212 *walk_subtrees = 0;
3213 return NULL_TREE;
3216 break;
3217 /* This is here to make sure we mark the entire base variable as used
3218 when you take its address. Because our used portion analysis is
3219 simple, we aren't looking at casts or pointer arithmetic to see what
3220 happens when you take the address. */
3221 case ADDR_EXPR:
3223 tree var = get_base_address (TREE_OPERAND (*tp, 0));
3225 if (var
3226 && DECL_P (var)
3227 && DECL_SIZE (var)
3228 && var_can_have_subvars (var)
3229 && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
3231 used_part_t up;
3232 size_t uid = DECL_UID (var);
3234 up = get_or_create_used_part_for (uid);
3236 up->minused = 0;
3237 up->maxused = TREE_INT_CST_LOW (DECL_SIZE (var));
3238 up->implicit_uses = true;
3239 if (!lhs_p)
3240 up->write_only = false;
3242 up_insert (uid, up);
3243 *walk_subtrees = 0;
3244 return NULL_TREE;
3247 break;
3248 case CALL_EXPR:
3250 tree *arg;
3251 for (arg = &TREE_OPERAND (*tp, 1); *arg; arg = &TREE_CHAIN (*arg))
3253 if (TREE_CODE (TREE_VALUE (*arg)) != ADDR_EXPR)
3254 find_used_portions (&TREE_VALUE (*arg), walk_subtrees, NULL);
3256 *walk_subtrees = 0;
3257 return NULL_TREE;
3259 case VAR_DECL:
3260 case PARM_DECL:
3261 case RESULT_DECL:
3263 tree var = *tp;
3264 if (DECL_SIZE (var)
3265 && var_can_have_subvars (var)
3266 && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
3268 used_part_t up;
3269 size_t uid = DECL_UID (var);
3271 up = get_or_create_used_part_for (uid);
3273 up->minused = 0;
3274 up->maxused = TREE_INT_CST_LOW (DECL_SIZE (var));
3275 up->implicit_uses = true;
3277 up_insert (uid, up);
3278 *walk_subtrees = 0;
3279 return NULL_TREE;
3282 break;
3284 default:
3285 break;
3288 return NULL_TREE;
3291 /* Create structure field variables for structures used in this function. */
3293 static unsigned int
3294 create_structure_vars (void)
3296 basic_block bb;
3297 safe_referenced_var_iterator rvi;
3298 VEC (tree, heap) *varvec = NULL;
3299 tree var;
3301 used_portions = htab_create (10, used_part_map_hash, used_part_map_eq,
3302 free_used_part_map);
3304 FOR_EACH_BB (bb)
3306 block_stmt_iterator bsi;
3307 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3309 walk_tree_without_duplicates (bsi_stmt_ptr (bsi),
3310 find_used_portions,
3311 NULL);
3314 FOR_EACH_REFERENCED_VAR_SAFE (var, varvec, rvi)
3316 /* The C++ FE creates vars without DECL_SIZE set, for some reason. */
3317 if (var
3318 && DECL_SIZE (var)
3319 && var_can_have_subvars (var)
3320 && !MTAG_P (var)
3321 && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
3322 create_overlap_variables_for (var);
3324 htab_delete (used_portions);
3325 VEC_free (tree, heap, varvec);
3326 return 0;
3329 static bool
3330 gate_structure_vars (void)
3332 return flag_tree_salias != 0;
3335 struct tree_opt_pass pass_create_structure_vars =
3337 "salias", /* name */
3338 gate_structure_vars, /* gate */
3339 create_structure_vars, /* execute */
3340 NULL, /* sub */
3341 NULL, /* next */
3342 0, /* static_pass_number */
3343 0, /* tv_id */
3344 PROP_cfg, /* properties_required */
3345 0, /* properties_provided */
3346 0, /* properties_destroyed */
3347 0, /* todo_flags_start */
3348 TODO_dump_func, /* todo_flags_finish */
3349 0 /* letter */
3352 /* Reset the DECL_CALL_CLOBBERED flags on our referenced vars. In
3353 theory, this only needs to be done for globals. */
3355 static unsigned int
3356 reset_cc_flags (void)
3358 tree var;
3359 referenced_var_iterator rvi;
3361 FOR_EACH_REFERENCED_VAR (var, rvi)
3362 DECL_CALL_CLOBBERED (var) = false;
3363 return 0;
3366 struct tree_opt_pass pass_reset_cc_flags =
3368 NULL, /* name */
3369 NULL, /* gate */
3370 reset_cc_flags, /* execute */
3371 NULL, /* sub */
3372 NULL, /* next */
3373 0, /* static_pass_number */
3374 0, /* tv_id */
3375 PROP_referenced_vars |PROP_cfg, /* properties_required */
3376 0, /* properties_provided */
3377 0, /* properties_destroyed */
3378 0, /* todo_flags_start */
3379 0, /* todo_flags_finish */
3380 0 /* letter */