Imported GNU Classpath 0.90
[official-gcc.git] / gcc / tree-ssa-alias.c
blob8f8fbfa04b99c18e98a82fcb936e0f1f01bed6af
1 /* Alias analysis for trees.
2 Copyright (C) 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "timevar.h"
32 #include "expr.h"
33 #include "ggc.h"
34 #include "langhooks.h"
35 #include "flags.h"
36 #include "function.h"
37 #include "diagnostic.h"
38 #include "tree-dump.h"
39 #include "tree-gimple.h"
40 #include "tree-flow.h"
41 #include "tree-inline.h"
42 #include "tree-pass.h"
43 #include "tree-ssa-structalias.h"
44 #include "convert.h"
45 #include "params.h"
46 #include "ipa-type-escape.h"
47 #include "vec.h"
48 #include "bitmap.h"
50 /* Obstack used to hold grouping bitmaps and other temporary bitmaps used by
51 aliasing */
52 static bitmap_obstack alias_obstack;
54 /* 'true' after aliases have been computed (see compute_may_aliases). */
55 bool aliases_computed_p;
57 /* Structure to map a variable to its alias set and keep track of the
58 virtual operands that will be needed to represent it. */
59 struct alias_map_d
61 /* Variable and its alias set. */
62 tree var;
63 HOST_WIDE_INT set;
65 /* Total number of virtual operands that will be needed to represent
66 all the aliases of VAR. */
67 long total_alias_vops;
69 /* Nonzero if the aliases for this memory tag have been grouped
70 already. Used in group_aliases. */
71 unsigned int grouped_p : 1;
73 /* Set of variables aliased with VAR. This is the exact same
74 information contained in VAR_ANN (VAR)->MAY_ALIASES, but in
75 bitmap form to speed up alias grouping. */
76 bitmap may_aliases;
80 /* Counters used to display statistics on alias analysis. */
81 struct alias_stats_d
83 unsigned int alias_queries;
84 unsigned int alias_mayalias;
85 unsigned int alias_noalias;
86 unsigned int simple_queries;
87 unsigned int simple_resolved;
88 unsigned int tbaa_queries;
89 unsigned int tbaa_resolved;
90 unsigned int structnoaddress_queries;
91 unsigned int structnoaddress_resolved;
95 /* Local variables. */
96 static struct alias_stats_d alias_stats;
98 /* Local functions. */
99 static void compute_flow_insensitive_aliasing (struct alias_info *);
100 static void dump_alias_stats (FILE *);
101 static bool may_alias_p (tree, HOST_WIDE_INT, tree, HOST_WIDE_INT, bool);
102 static tree create_memory_tag (tree type, bool is_type_tag);
103 static tree get_tmt_for (tree, struct alias_info *);
104 static tree get_nmt_for (tree);
105 static void add_may_alias (tree, tree);
106 static void replace_may_alias (tree, size_t, tree);
107 static struct alias_info *init_alias_info (void);
108 static void delete_alias_info (struct alias_info *);
109 static void compute_flow_sensitive_aliasing (struct alias_info *);
110 static void setup_pointers_and_addressables (struct alias_info *);
111 static void create_global_var (void);
112 static void maybe_create_global_var (struct alias_info *ai);
113 static void group_aliases (struct alias_info *);
114 static void set_pt_anything (tree ptr);
116 /* Global declarations. */
118 /* Call clobbered variables in the function. If bit I is set, then
119 REFERENCED_VARS (I) is call-clobbered. */
120 bitmap call_clobbered_vars;
122 /* Addressable variables in the function. If bit I is set, then
123 REFERENCED_VARS (I) has had its address taken. Note that
124 CALL_CLOBBERED_VARS and ADDRESSABLE_VARS are not related. An
125 addressable variable is not necessarily call-clobbered (e.g., a
126 local addressable whose address does not escape) and not all
127 call-clobbered variables are addressable (e.g., a local static
128 variable). */
129 bitmap addressable_vars;
131 /* When the program has too many call-clobbered variables and call-sites,
132 this variable is used to represent the clobbering effects of function
133 calls. In these cases, all the call clobbered variables in the program
134 are forced to alias this variable. This reduces compile times by not
135 having to keep track of too many V_MAY_DEF expressions at call sites. */
136 tree global_var;
138 DEF_VEC_I(int);
139 DEF_VEC_ALLOC_I(int,heap);
141 /* qsort comparison function to sort type/name tags by DECL_UID. */
143 static int
144 sort_tags_by_id (const void *pa, const void *pb)
146 tree a = *(tree *)pa;
147 tree b = *(tree *)pb;
149 return DECL_UID (a) - DECL_UID (b);
152 /* Initialize WORKLIST to contain those memory tags that are marked call
153 clobbered. Initialized WORKLIST2 to contain the reasons these
154 memory tags escaped. */
156 static void
157 init_transitive_clobber_worklist (VEC (tree, heap) **worklist,
158 VEC (int, heap) **worklist2)
160 referenced_var_iterator rvi;
161 tree curr;
163 FOR_EACH_REFERENCED_VAR (curr, rvi)
165 if (MTAG_P (curr) && is_call_clobbered (curr))
167 VEC_safe_push (tree, heap, *worklist, curr);
168 VEC_safe_push (int, heap, *worklist2, var_ann (curr)->escape_mask);
173 /* Add ALIAS to WORKLIST (and the reason for escaping REASON to WORKLIST2) if
174 ALIAS is not already marked call clobbered, and is a memory
175 tag. */
177 static void
178 add_to_worklist (tree alias, VEC (tree, heap) **worklist,
179 VEC (int, heap) **worklist2,
180 int reason)
182 if (MTAG_P (alias) && !is_call_clobbered (alias))
184 VEC_safe_push (tree, heap, *worklist, alias);
185 VEC_safe_push (int, heap, *worklist2, reason);
189 /* Mark aliases of TAG as call clobbered, and place any tags on the
190 alias list that were not already call clobbered on WORKLIST. */
192 static void
193 mark_aliases_call_clobbered (tree tag, VEC (tree, heap) **worklist,
194 VEC (int, heap) **worklist2)
196 unsigned int i;
197 VEC (tree, gc) *ma;
198 tree entry;
199 var_ann_t ta = var_ann (tag);
201 if (!MTAG_P (tag))
202 return;
203 ma = may_aliases (tag);
204 if (!ma)
205 return;
207 for (i = 0; VEC_iterate (tree, ma, i, entry); i++)
209 if (!unmodifiable_var_p (entry))
211 add_to_worklist (entry, worklist, worklist2, ta->escape_mask);
212 mark_call_clobbered (entry, ta->escape_mask);
217 /* Tags containing global vars need to be marked as global.
218 Tags containing call clobbered vars need to be marked as call
219 clobbered. */
221 static void
222 compute_tag_properties (void)
224 referenced_var_iterator rvi;
225 tree tag;
226 bool changed = true;
227 VEC (tree, heap) *taglist = NULL;
229 FOR_EACH_REFERENCED_VAR (tag, rvi)
231 if (!MTAG_P (tag) || TREE_CODE (tag) == STRUCT_FIELD_TAG)
232 continue;
233 VEC_safe_push (tree, heap, taglist, tag);
236 /* We sort the taglist by DECL_UID, for two reasons.
237 1. To get a sequential ordering to make the bitmap accesses
238 faster.
239 2. Because of the way we compute aliases, it's more likely that
240 an earlier tag is included in a later tag, and this will reduce
241 the number of iterations.
243 If we had a real tag graph, we would just topo-order it and be
244 done with it. */
245 qsort (VEC_address (tree, taglist),
246 VEC_length (tree, taglist),
247 sizeof (tree),
248 sort_tags_by_id);
250 /* Go through each tag not marked as global, and if it aliases
251 global vars, mark it global.
253 If the tag contains call clobbered vars, mark it call
254 clobbered.
256 This loop iterates because tags may appear in the may-aliases
257 list of other tags when we group. */
259 while (changed)
261 unsigned int k;
263 changed = false;
264 for (k = 0; VEC_iterate (tree, taglist, k, tag); k++)
266 VEC (tree, gc) *ma;
267 unsigned int i;
268 tree entry;
269 bool tagcc = is_call_clobbered (tag);
270 bool tagglobal = MTAG_GLOBAL (tag);
272 if (tagcc && tagglobal)
273 continue;
275 ma = may_aliases (tag);
276 if (!ma)
277 continue;
279 for (i = 0; VEC_iterate (tree, ma, i, entry); i++)
281 /* Call clobbered entries cause the tag to be marked
282 call clobbered. */
283 if (!tagcc && is_call_clobbered (entry))
285 mark_call_clobbered (tag, var_ann (entry)->escape_mask);
286 tagcc = true;
287 changed = true;
290 /* Global vars cause the tag to be marked global. */
291 if (!tagglobal && is_global_var (entry))
293 MTAG_GLOBAL (tag) = true;
294 changed = true;
295 tagglobal = true;
298 /* Early exit once both global and cc are set, since the
299 loop can't do any more than that. */
300 if (tagcc && tagglobal)
301 break;
305 VEC_free (tree, heap, taglist);
308 /* Set up the initial variable clobbers and globalness.
309 When this function completes, only tags whose aliases need to be
310 clobbered will be set clobbered. Tags clobbered because they
311 contain call clobbered vars are handled in compute_tag_properties. */
313 static void
314 set_initial_properties (struct alias_info *ai)
316 unsigned int i;
317 referenced_var_iterator rvi;
318 tree var;
320 FOR_EACH_REFERENCED_VAR (var, rvi)
322 if (is_global_var (var)
323 && (!var_can_have_subvars (var)
324 || get_subvars_for_var (var) == NULL))
326 if (!unmodifiable_var_p (var))
327 mark_call_clobbered (var, ESCAPE_IS_GLOBAL);
329 else if (TREE_CODE (var) == PARM_DECL
330 && default_def (var)
331 && POINTER_TYPE_P (TREE_TYPE (var)))
333 tree def = default_def (var);
334 get_ptr_info (def)->value_escapes_p = 1;
335 get_ptr_info (def)->escape_mask |= ESCAPE_IS_PARM;
339 for (i = 0; i < VARRAY_ACTIVE_SIZE (ai->processed_ptrs); i++)
341 tree ptr = VARRAY_TREE (ai->processed_ptrs, i);
342 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
343 var_ann_t v_ann = var_ann (SSA_NAME_VAR (ptr));
345 if (pi->value_escapes_p)
347 /* If PTR escapes then its associated memory tags and
348 pointed-to variables are call-clobbered. */
349 if (pi->name_mem_tag)
350 mark_call_clobbered (pi->name_mem_tag, pi->escape_mask);
352 if (v_ann->symbol_mem_tag)
353 mark_call_clobbered (v_ann->symbol_mem_tag, pi->escape_mask);
355 if (pi->pt_vars)
357 bitmap_iterator bi;
358 unsigned int j;
359 EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j, bi)
360 if (!unmodifiable_var_p (referenced_var (j)))
361 mark_call_clobbered (referenced_var (j), pi->escape_mask);
365 /* If the name tag is call clobbered, so is the symbol tag
366 associated with the base VAR_DECL. */
367 if (pi->name_mem_tag
368 && v_ann->symbol_mem_tag
369 && is_call_clobbered (pi->name_mem_tag))
370 mark_call_clobbered (v_ann->symbol_mem_tag, pi->escape_mask);
372 /* Name tags and symbol tags that we don't know where they point
373 to, might point to global memory, and thus, are clobbered.
375 FIXME: This is not quite right. They should only be
376 clobbered if value_escapes_p is true, regardless of whether
377 they point to global memory or not.
378 So removing this code and fixing all the bugs would be nice.
379 It is the cause of a bunch of clobbering. */
380 if ((pi->pt_global_mem || pi->pt_anything)
381 && pi->is_dereferenced && pi->name_mem_tag)
383 mark_call_clobbered (pi->name_mem_tag, ESCAPE_IS_GLOBAL);
384 MTAG_GLOBAL (pi->name_mem_tag) = true;
387 if ((pi->pt_global_mem || pi->pt_anything)
388 && pi->is_dereferenced
389 && v_ann->symbol_mem_tag)
391 mark_call_clobbered (v_ann->symbol_mem_tag, ESCAPE_IS_GLOBAL);
392 MTAG_GLOBAL (v_ann->symbol_mem_tag) = true;
398 /* This variable is set to true if we are updating the used alone
399 information for SMTs, or are in a pass that is going to break it
400 temporarily. */
401 bool updating_used_alone;
403 /* Compute which variables need to be marked call clobbered because
404 their tag is call clobbered, and which tags need to be marked
405 global because they contain global variables. */
407 static void
408 compute_call_clobbered (struct alias_info *ai)
410 VEC (tree, heap) *worklist = NULL;
411 VEC(int,heap) *worklist2 = NULL;
413 set_initial_properties (ai);
414 init_transitive_clobber_worklist (&worklist, &worklist2);
415 while (VEC_length (tree, worklist) != 0)
417 tree curr = VEC_pop (tree, worklist);
418 int reason = VEC_pop (int, worklist2);
420 mark_call_clobbered (curr, reason);
421 mark_aliases_call_clobbered (curr, &worklist, &worklist2);
423 VEC_free (tree, heap, worklist);
424 VEC_free (int, heap, worklist2);
425 compute_tag_properties ();
429 /* Recalculate the used_alone information for SMTs . */
431 void
432 recalculate_used_alone (void)
434 VEC (tree, heap) *calls = NULL;
435 block_stmt_iterator bsi;
436 basic_block bb;
437 tree stmt;
438 size_t i;
439 referenced_var_iterator rvi;
440 tree var;
442 /* First, reset all the SMT used alone bits to zero. */
443 updating_used_alone = true;
444 FOR_EACH_REFERENCED_VAR (var, rvi)
445 if (TREE_CODE (var) == SYMBOL_MEMORY_TAG)
446 SMT_USED_ALONE (var) = 0;
448 /* Walk all the statements.
449 Calls get put into a list of statements to update, since we will
450 need to update operands on them if we make any changes.
451 If we see a bare use of a SMT anywhere in a real virtual use or virtual
452 def, mark the SMT as used alone, and for renaming. */
453 FOR_EACH_BB (bb)
455 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
457 stmt = bsi_stmt (bsi);
458 if (TREE_CODE (stmt) == CALL_EXPR
459 || (TREE_CODE (stmt) == MODIFY_EXPR
460 && TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR))
461 VEC_safe_push (tree, heap, calls, stmt);
462 else
464 ssa_op_iter iter;
466 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter,
467 SSA_OP_VUSE | SSA_OP_VIRTUAL_DEFS)
469 tree svar = var;
471 if(TREE_CODE (var) == SSA_NAME)
472 svar = SSA_NAME_VAR (var);
474 if (TREE_CODE (svar) == SYMBOL_MEMORY_TAG)
476 if (!SMT_USED_ALONE (svar))
478 SMT_USED_ALONE (svar) = true;
479 mark_sym_for_renaming (svar);
487 /* Update the operands on all the calls we saw. */
488 if (calls)
490 for (i = 0; VEC_iterate (tree, calls, i, stmt); i++)
491 update_stmt (stmt);
493 VEC_free (tree, heap, calls);
494 updating_used_alone = false;
497 /* Compute may-alias information for every variable referenced in function
498 FNDECL.
500 Alias analysis proceeds in 3 main phases:
502 1- Points-to and escape analysis.
504 This phase walks the use-def chains in the SSA web looking for three
505 things:
507 * Assignments of the form P_i = &VAR
508 * Assignments of the form P_i = malloc()
509 * Pointers and ADDR_EXPR that escape the current function.
511 The concept of 'escaping' is the same one used in the Java world. When
512 a pointer or an ADDR_EXPR escapes, it means that it has been exposed
513 outside of the current function. So, assignment to global variables,
514 function arguments and returning a pointer are all escape sites, as are
515 conversions between pointers and integers.
517 This is where we are currently limited. Since not everything is renamed
518 into SSA, we lose track of escape properties when a pointer is stashed
519 inside a field in a structure, for instance. In those cases, we are
520 assuming that the pointer does escape.
522 We use escape analysis to determine whether a variable is
523 call-clobbered. Simply put, if an ADDR_EXPR escapes, then the variable
524 is call-clobbered. If a pointer P_i escapes, then all the variables
525 pointed-to by P_i (and its memory tag) also escape.
527 2- Compute flow-sensitive aliases
529 We have two classes of memory tags. Memory tags associated with the
530 pointed-to data type of the pointers in the program. These tags are
531 called "symbol memory tag" (SMT). The other class are those associated
532 with SSA_NAMEs, called "name memory tag" (NMT). The basic idea is that
533 when adding operands for an INDIRECT_REF *P_i, we will first check
534 whether P_i has a name tag, if it does we use it, because that will have
535 more precise aliasing information. Otherwise, we use the standard symbol
536 tag.
538 In this phase, we go through all the pointers we found in points-to
539 analysis and create alias sets for the name memory tags associated with
540 each pointer P_i. If P_i escapes, we mark call-clobbered the variables
541 it points to and its tag.
544 3- Compute flow-insensitive aliases
546 This pass will compare the alias set of every symbol memory tag and
547 every addressable variable found in the program. Given a symbol
548 memory tag SMT and an addressable variable V. If the alias sets of
549 SMT and V conflict (as computed by may_alias_p), then V is marked
550 as an alias tag and added to the alias set of SMT.
552 For instance, consider the following function:
554 foo (int i)
556 int *p, a, b;
558 if (i > 10)
559 p = &a;
560 else
561 p = &b;
563 *p = 3;
564 a = b + 2;
565 return *p;
568 After aliasing analysis has finished, the symbol memory tag for pointer
569 'p' will have two aliases, namely variables 'a' and 'b'. Every time
570 pointer 'p' is dereferenced, we want to mark the operation as a
571 potential reference to 'a' and 'b'.
573 foo (int i)
575 int *p, a, b;
577 if (i_2 > 10)
578 p_4 = &a;
579 else
580 p_6 = &b;
581 # p_1 = PHI <p_4(1), p_6(2)>;
583 # a_7 = V_MAY_DEF <a_3>;
584 # b_8 = V_MAY_DEF <b_5>;
585 *p_1 = 3;
587 # a_9 = V_MAY_DEF <a_7>
588 # VUSE <b_8>
589 a_9 = b_8 + 2;
591 # VUSE <a_9>;
592 # VUSE <b_8>;
593 return *p_1;
596 In certain cases, the list of may aliases for a pointer may grow too
597 large. This may cause an explosion in the number of virtual operands
598 inserted in the code. Resulting in increased memory consumption and
599 compilation time.
601 When the number of virtual operands needed to represent aliased
602 loads and stores grows too large (configurable with @option{--param
603 max-aliased-vops}), alias sets are grouped to avoid severe
604 compile-time slow downs and memory consumption. See group_aliases. */
606 static unsigned int
607 compute_may_aliases (void)
609 struct alias_info *ai;
611 memset (&alias_stats, 0, sizeof (alias_stats));
613 /* Initialize aliasing information. */
614 ai = init_alias_info ();
616 /* For each pointer P_i, determine the sets of variables that P_i may
617 point-to. For every addressable variable V, determine whether the
618 address of V escapes the current function, making V call-clobbered
619 (i.e., whether &V is stored in a global variable or if its passed as a
620 function call argument). */
621 compute_points_to_sets (ai);
623 /* Collect all pointers and addressable variables, compute alias sets,
624 create memory tags for pointers and promote variables whose address is
625 not needed anymore. */
626 setup_pointers_and_addressables (ai);
628 /* Compute flow-sensitive, points-to based aliasing for all the name
629 memory tags. Note that this pass needs to be done before flow
630 insensitive analysis because it uses the points-to information
631 gathered before to mark call-clobbered symbol tags. */
632 compute_flow_sensitive_aliasing (ai);
634 /* Compute type-based flow-insensitive aliasing for all the type
635 memory tags. */
636 compute_flow_insensitive_aliasing (ai);
638 /* Determine if we need to enable alias grouping. */
639 if (ai->total_alias_vops >= MAX_ALIASED_VOPS)
640 group_aliases (ai);
642 /* Compute call clobbering information. */
643 compute_call_clobbered (ai);
645 /* If the program has too many call-clobbered variables and/or function
646 calls, create .GLOBAL_VAR and use it to model call-clobbering
647 semantics at call sites. This reduces the number of virtual operands
648 considerably, improving compile times at the expense of lost
649 aliasing precision. */
650 maybe_create_global_var (ai);
652 /* Debugging dumps. */
653 if (dump_file)
655 dump_referenced_vars (dump_file);
656 if (dump_flags & TDF_STATS)
657 dump_alias_stats (dump_file);
658 dump_points_to_info (dump_file);
659 dump_alias_info (dump_file);
662 /* Deallocate memory used by aliasing data structures. */
663 delete_alias_info (ai);
665 updating_used_alone = true;
667 block_stmt_iterator bsi;
668 basic_block bb;
669 FOR_EACH_BB (bb)
671 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
673 update_stmt_if_modified (bsi_stmt (bsi));
677 recalculate_used_alone ();
678 updating_used_alone = false;
679 return 0;
683 struct tree_opt_pass pass_may_alias =
685 "alias", /* name */
686 NULL, /* gate */
687 compute_may_aliases, /* execute */
688 NULL, /* sub */
689 NULL, /* next */
690 0, /* static_pass_number */
691 TV_TREE_MAY_ALIAS, /* tv_id */
692 PROP_cfg | PROP_ssa, /* properties_required */
693 PROP_alias, /* properties_provided */
694 0, /* properties_destroyed */
695 0, /* todo_flags_start */
696 TODO_dump_func | TODO_update_ssa
697 | TODO_ggc_collect | TODO_verify_ssa
698 | TODO_verify_stmts, /* todo_flags_finish */
699 0 /* letter */
703 /* Data structure used to count the number of dereferences to PTR
704 inside an expression. */
705 struct count_ptr_d
707 tree ptr;
708 unsigned count;
712 /* Helper for count_uses_and_derefs. Called by walk_tree to look for
713 (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
715 static tree
716 count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
718 struct count_ptr_d *count_p = (struct count_ptr_d *) data;
720 /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
721 pointer 'ptr' is *not* dereferenced, it is simply used to compute
722 the address of 'fld' as 'ptr + offsetof(fld)'. */
723 if (TREE_CODE (*tp) == ADDR_EXPR)
725 *walk_subtrees = 0;
726 return NULL_TREE;
729 if (INDIRECT_REF_P (*tp) && TREE_OPERAND (*tp, 0) == count_p->ptr)
730 count_p->count++;
732 return NULL_TREE;
736 /* Count the number of direct and indirect uses for pointer PTR in
737 statement STMT. The two counts are stored in *NUM_USES_P and
738 *NUM_DEREFS_P respectively. *IS_STORE_P is set to 'true' if at
739 least one of those dereferences is a store operation. */
741 void
742 count_uses_and_derefs (tree ptr, tree stmt, unsigned *num_uses_p,
743 unsigned *num_derefs_p, bool *is_store)
745 ssa_op_iter i;
746 tree use;
748 *num_uses_p = 0;
749 *num_derefs_p = 0;
750 *is_store = false;
752 /* Find out the total number of uses of PTR in STMT. */
753 FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
754 if (use == ptr)
755 (*num_uses_p)++;
757 /* Now count the number of indirect references to PTR. This is
758 truly awful, but we don't have much choice. There are no parent
759 pointers inside INDIRECT_REFs, so an expression like
760 '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
761 find all the indirect and direct uses of x_1 inside. The only
762 shortcut we can take is the fact that GIMPLE only allows
763 INDIRECT_REFs inside the expressions below. */
764 if (TREE_CODE (stmt) == MODIFY_EXPR
765 || (TREE_CODE (stmt) == RETURN_EXPR
766 && TREE_CODE (TREE_OPERAND (stmt, 0)) == MODIFY_EXPR)
767 || TREE_CODE (stmt) == ASM_EXPR
768 || TREE_CODE (stmt) == CALL_EXPR)
770 tree lhs, rhs;
772 if (TREE_CODE (stmt) == MODIFY_EXPR)
774 lhs = TREE_OPERAND (stmt, 0);
775 rhs = TREE_OPERAND (stmt, 1);
777 else if (TREE_CODE (stmt) == RETURN_EXPR)
779 tree e = TREE_OPERAND (stmt, 0);
780 lhs = TREE_OPERAND (e, 0);
781 rhs = TREE_OPERAND (e, 1);
783 else if (TREE_CODE (stmt) == ASM_EXPR)
785 lhs = ASM_OUTPUTS (stmt);
786 rhs = ASM_INPUTS (stmt);
788 else
790 lhs = NULL_TREE;
791 rhs = stmt;
794 if (lhs && (TREE_CODE (lhs) == TREE_LIST || EXPR_P (lhs)))
796 struct count_ptr_d count;
797 count.ptr = ptr;
798 count.count = 0;
799 walk_tree (&lhs, count_ptr_derefs, &count, NULL);
800 *is_store = true;
801 *num_derefs_p = count.count;
804 if (rhs && (TREE_CODE (rhs) == TREE_LIST || EXPR_P (rhs)))
806 struct count_ptr_d count;
807 count.ptr = ptr;
808 count.count = 0;
809 walk_tree (&rhs, count_ptr_derefs, &count, NULL);
810 *num_derefs_p += count.count;
814 gcc_assert (*num_uses_p >= *num_derefs_p);
817 /* Initialize the data structures used for alias analysis. */
819 static struct alias_info *
820 init_alias_info (void)
822 struct alias_info *ai;
823 referenced_var_iterator rvi;
824 tree var;
826 bitmap_obstack_initialize (&alias_obstack);
827 ai = XCNEW (struct alias_info);
828 ai->ssa_names_visited = sbitmap_alloc (num_ssa_names);
829 sbitmap_zero (ai->ssa_names_visited);
830 VARRAY_TREE_INIT (ai->processed_ptrs, 50, "processed_ptrs");
831 ai->written_vars = BITMAP_ALLOC (&alias_obstack);
832 ai->dereferenced_ptrs_store = BITMAP_ALLOC (&alias_obstack);
833 ai->dereferenced_ptrs_load = BITMAP_ALLOC (&alias_obstack);
835 /* If aliases have been computed before, clear existing information. */
836 if (aliases_computed_p)
838 unsigned i;
840 /* Similarly, clear the set of addressable variables. In this
841 case, we can just clear the set because addressability is
842 only computed here. */
843 bitmap_clear (addressable_vars);
845 /* Clear flow-insensitive alias information from each symbol. */
846 FOR_EACH_REFERENCED_VAR (var, rvi)
848 var_ann_t ann = var_ann (var);
850 ann->is_aliased = 0;
851 ann->may_aliases = NULL;
852 NUM_REFERENCES_CLEAR (ann);
854 /* Since we are about to re-discover call-clobbered
855 variables, clear the call-clobbered flag. Variables that
856 are intrinsically call-clobbered (globals, local statics,
857 etc) will not be marked by the aliasing code, so we can't
858 remove them from CALL_CLOBBERED_VARS.
860 NB: STRUCT_FIELDS are still call clobbered if they are for
861 a global variable, so we *don't* clear their call clobberedness
862 just because they are tags, though we will clear it if they
863 aren't for global variables. */
864 if (TREE_CODE (var) == NAME_MEMORY_TAG
865 || TREE_CODE (var) == SYMBOL_MEMORY_TAG
866 || !is_global_var (var))
867 clear_call_clobbered (var);
870 /* Clear flow-sensitive points-to information from each SSA name. */
871 for (i = 1; i < num_ssa_names; i++)
873 tree name = ssa_name (i);
875 if (!name || !POINTER_TYPE_P (TREE_TYPE (name)))
876 continue;
878 if (SSA_NAME_PTR_INFO (name))
880 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (name);
882 /* Clear all the flags but keep the name tag to
883 avoid creating new temporaries unnecessarily. If
884 this pointer is found to point to a subset or
885 superset of its former points-to set, then a new
886 tag will need to be created in create_name_tags. */
887 pi->pt_anything = 0;
888 pi->pt_null = 0;
889 pi->value_escapes_p = 0;
890 pi->is_dereferenced = 0;
891 if (pi->pt_vars)
892 bitmap_clear (pi->pt_vars);
897 /* Next time, we will need to reset alias information. */
898 aliases_computed_p = true;
900 return ai;
904 /* Deallocate memory used by alias analysis. */
906 static void
907 delete_alias_info (struct alias_info *ai)
909 size_t i;
910 referenced_var_iterator rvi;
911 tree var;
913 sbitmap_free (ai->ssa_names_visited);
914 ai->processed_ptrs = NULL;
916 for (i = 0; i < ai->num_addressable_vars; i++)
917 free (ai->addressable_vars[i]);
919 FOR_EACH_REFERENCED_VAR(var, rvi)
921 var_ann_t ann = var_ann (var);
922 NUM_REFERENCES_CLEAR (ann);
925 free (ai->addressable_vars);
927 for (i = 0; i < ai->num_pointers; i++)
928 free (ai->pointers[i]);
929 free (ai->pointers);
931 BITMAP_FREE (ai->written_vars);
932 BITMAP_FREE (ai->dereferenced_ptrs_store);
933 BITMAP_FREE (ai->dereferenced_ptrs_load);
934 bitmap_obstack_release (&alias_obstack);
935 free (ai);
937 delete_points_to_sets ();
940 /* Create name tags for all the pointers that have been dereferenced.
941 We only create a name tag for a pointer P if P is found to point to
942 a set of variables (so that we can alias them to *P) or if it is
943 the result of a call to malloc (which means that P cannot point to
944 anything else nor alias any other variable).
946 If two pointers P and Q point to the same set of variables, they
947 are assigned the same name tag. */
949 static void
950 create_name_tags (void)
952 size_t i;
953 VEC (tree, heap) *with_ptvars = NULL;
954 tree ptr;
956 /* Collect the list of pointers with a non-empty points to set. */
957 for (i = 1; i < num_ssa_names; i++)
959 tree ptr = ssa_name (i);
960 struct ptr_info_def *pi;
962 if (!ptr
963 || !POINTER_TYPE_P (TREE_TYPE (ptr))
964 || !SSA_NAME_PTR_INFO (ptr))
965 continue;
967 pi = SSA_NAME_PTR_INFO (ptr);
969 if (pi->pt_anything || !pi->is_dereferenced)
971 /* No name tags for pointers that have not been
972 dereferenced or point to an arbitrary location. */
973 pi->name_mem_tag = NULL_TREE;
974 continue;
977 /* Set pt_anything on the pointers without pt_vars filled in so
978 that they are assigned a symbol tag. */
979 if (pi->pt_vars && !bitmap_empty_p (pi->pt_vars))
980 VEC_safe_push (tree, heap, with_ptvars, ptr);
981 else
982 set_pt_anything (ptr);
985 /* If we didn't find any pointers with pt_vars set, we're done. */
986 if (!with_ptvars)
987 return;
989 /* Now go through the pointers with pt_vars, and find a name tag
990 with the same pt_vars as this pointer, or create one if one
991 doesn't exist. */
992 for (i = 0; VEC_iterate (tree, with_ptvars, i, ptr); i++)
994 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
995 size_t j;
996 tree ptr2;
997 tree old_name_tag = pi->name_mem_tag;
999 /* If PTR points to a set of variables, check if we don't
1000 have another pointer Q with the same points-to set before
1001 creating a tag. If so, use Q's tag instead of creating a
1002 new one.
1004 This is important for not creating unnecessary symbols
1005 and also for copy propagation. If we ever need to
1006 propagate PTR into Q or vice-versa, we would run into
1007 problems if they both had different name tags because
1008 they would have different SSA version numbers (which
1009 would force us to take the name tags in and out of SSA). */
1010 for (j = 0; j < i && VEC_iterate (tree, with_ptvars, j, ptr2); j++)
1012 struct ptr_info_def *qi = SSA_NAME_PTR_INFO (ptr2);
1014 if (bitmap_equal_p (pi->pt_vars, qi->pt_vars))
1016 pi->name_mem_tag = qi->name_mem_tag;
1017 break;
1021 /* If we didn't find a pointer with the same points-to set
1022 as PTR, create a new name tag if needed. */
1023 if (pi->name_mem_tag == NULL_TREE)
1024 pi->name_mem_tag = get_nmt_for (ptr);
1026 /* If the new name tag computed for PTR is different than
1027 the old name tag that it used to have, then the old tag
1028 needs to be removed from the IL, so we mark it for
1029 renaming. */
1030 if (old_name_tag && old_name_tag != pi->name_mem_tag)
1031 mark_sym_for_renaming (old_name_tag);
1033 TREE_THIS_VOLATILE (pi->name_mem_tag)
1034 |= TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (ptr)));
1036 /* Mark the new name tag for renaming. */
1037 mark_sym_for_renaming (pi->name_mem_tag);
1040 VEC_free (tree, heap, with_ptvars);
1044 /* For every pointer P_i in AI->PROCESSED_PTRS, create may-alias sets for
1045 the name memory tag (NMT) associated with P_i. If P_i escapes, then its
1046 name tag and the variables it points-to are call-clobbered. Finally, if
1047 P_i escapes and we could not determine where it points to, then all the
1048 variables in the same alias set as *P_i are marked call-clobbered. This
1049 is necessary because we must assume that P_i may take the address of any
1050 variable in the same alias set. */
1052 static void
1053 compute_flow_sensitive_aliasing (struct alias_info *ai)
1055 size_t i;
1057 for (i = 0; i < VARRAY_ACTIVE_SIZE (ai->processed_ptrs); i++)
1059 tree ptr = VARRAY_TREE (ai->processed_ptrs, i);
1060 if (!find_what_p_points_to (ptr))
1061 set_pt_anything (ptr);
1064 create_name_tags ();
1066 for (i = 0; i < VARRAY_ACTIVE_SIZE (ai->processed_ptrs); i++)
1068 unsigned j;
1069 tree ptr = VARRAY_TREE (ai->processed_ptrs, i);
1070 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
1071 var_ann_t v_ann = var_ann (SSA_NAME_VAR (ptr));
1072 bitmap_iterator bi;
1075 /* Set up aliasing information for PTR's name memory tag (if it has
1076 one). Note that only pointers that have been dereferenced will
1077 have a name memory tag. */
1078 if (pi->name_mem_tag && pi->pt_vars)
1079 EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j, bi)
1081 add_may_alias (pi->name_mem_tag, referenced_var (j));
1082 add_may_alias (v_ann->symbol_mem_tag, referenced_var (j));
1088 /* Compute type-based alias sets. Traverse all the pointers and
1089 addressable variables found in setup_pointers_and_addressables.
1091 For every pointer P in AI->POINTERS and addressable variable V in
1092 AI->ADDRESSABLE_VARS, add V to the may-alias sets of P's symbol
1093 memory tag (SMT) if their alias sets conflict. V is then marked as
1094 an alias tag so that the operand scanner knows that statements
1095 containing V have aliased operands. */
1097 static void
1098 compute_flow_insensitive_aliasing (struct alias_info *ai)
1100 size_t i;
1102 /* Initialize counter for the total number of virtual operands that
1103 aliasing will introduce. When AI->TOTAL_ALIAS_VOPS goes beyond the
1104 threshold set by --params max-alias-vops, we enable alias
1105 grouping. */
1106 ai->total_alias_vops = 0;
1108 /* For every pointer P, determine which addressable variables may alias
1109 with P's symbol memory tag. */
1110 for (i = 0; i < ai->num_pointers; i++)
1112 size_t j;
1113 struct alias_map_d *p_map = ai->pointers[i];
1114 tree tag = var_ann (p_map->var)->symbol_mem_tag;
1115 var_ann_t tag_ann = var_ann (tag);
1117 p_map->total_alias_vops = 0;
1118 p_map->may_aliases = BITMAP_ALLOC (&alias_obstack);
1120 for (j = 0; j < ai->num_addressable_vars; j++)
1122 struct alias_map_d *v_map;
1123 var_ann_t v_ann;
1124 tree var;
1125 bool tag_stored_p, var_stored_p;
1127 v_map = ai->addressable_vars[j];
1128 var = v_map->var;
1129 v_ann = var_ann (var);
1131 /* Skip memory tags and variables that have never been
1132 written to. We also need to check if the variables are
1133 call-clobbered because they may be overwritten by
1134 function calls.
1136 Note this is effectively random accessing elements in
1137 the sparse bitset, which can be highly inefficient.
1138 So we first check the call_clobbered status of the
1139 tag and variable before querying the bitmap. */
1140 tag_stored_p = is_call_clobbered (tag)
1141 || bitmap_bit_p (ai->written_vars, DECL_UID (tag));
1142 var_stored_p = is_call_clobbered (var)
1143 || bitmap_bit_p (ai->written_vars, DECL_UID (var));
1144 if (!tag_stored_p && !var_stored_p)
1145 continue;
1147 if (may_alias_p (p_map->var, p_map->set, var, v_map->set, false))
1149 size_t num_tag_refs, num_var_refs;
1151 num_tag_refs = NUM_REFERENCES (tag_ann);
1152 num_var_refs = NUM_REFERENCES (v_ann);
1154 /* Add VAR to TAG's may-aliases set. */
1156 /* We should never have a var with subvars here, because
1157 they shouldn't get into the set of addressable vars */
1158 gcc_assert (!var_can_have_subvars (var)
1159 || get_subvars_for_var (var) == NULL);
1161 add_may_alias (tag, var);
1162 /* Update the bitmap used to represent TAG's alias set
1163 in case we need to group aliases. */
1164 bitmap_set_bit (p_map->may_aliases, DECL_UID (var));
1166 /* Update the total number of virtual operands due to
1167 aliasing. Since we are adding one more alias to TAG's
1168 may-aliases set, the total number of virtual operands due
1169 to aliasing will be increased by the number of references
1170 made to VAR and TAG (every reference to TAG will also
1171 count as a reference to VAR). */
1172 ai->total_alias_vops += (num_var_refs + num_tag_refs);
1173 p_map->total_alias_vops += (num_var_refs + num_tag_refs);
1180 /* Since this analysis is based exclusively on symbols, it fails to
1181 handle cases where two pointers P and Q have different memory
1182 tags with conflicting alias set numbers but no aliased symbols in
1183 common.
1185 For example, suppose that we have two memory tags SMT.1 and SMT.2
1186 such that
1188 may-aliases (SMT.1) = { a }
1189 may-aliases (SMT.2) = { b }
1191 and the alias set number of SMT.1 conflicts with that of SMT.2.
1192 Since they don't have symbols in common, loads and stores from
1193 SMT.1 and SMT.2 will seem independent of each other, which will
1194 lead to the optimizers making invalid transformations (see
1195 testsuite/gcc.c-torture/execute/pr15262-[12].c).
1197 To avoid this problem, we do a final traversal of AI->POINTERS
1198 looking for pairs of pointers that have no aliased symbols in
1199 common and yet have conflicting alias set numbers. */
1200 for (i = 0; i < ai->num_pointers; i++)
1202 size_t j;
1203 struct alias_map_d *p_map1 = ai->pointers[i];
1204 tree tag1 = var_ann (p_map1->var)->symbol_mem_tag;
1205 bitmap may_aliases1 = p_map1->may_aliases;
1207 for (j = i + 1; j < ai->num_pointers; j++)
1209 struct alias_map_d *p_map2 = ai->pointers[j];
1210 tree tag2 = var_ann (p_map2->var)->symbol_mem_tag;
1211 bitmap may_aliases2 = p_map2->may_aliases;
1213 /* If the pointers may not point to each other, do nothing. */
1214 if (!may_alias_p (p_map1->var, p_map1->set, tag2, p_map2->set, true))
1215 continue;
1217 /* The two pointers may alias each other. If they already have
1218 symbols in common, do nothing. */
1219 if (bitmap_intersect_p (may_aliases1, may_aliases2))
1220 continue;
1222 if (!bitmap_empty_p (may_aliases2))
1224 unsigned int k;
1225 bitmap_iterator bi;
1227 /* Add all the aliases for TAG2 into TAG1's alias set.
1228 FIXME, update grouping heuristic counters. */
1229 EXECUTE_IF_SET_IN_BITMAP (may_aliases2, 0, k, bi)
1230 add_may_alias (tag1, referenced_var (k));
1231 bitmap_ior_into (may_aliases1, may_aliases2);
1233 else
1235 /* Since TAG2 does not have any aliases of its own, add
1236 TAG2 itself to the alias set of TAG1. */
1237 add_may_alias (tag1, tag2);
1238 bitmap_set_bit (may_aliases1, DECL_UID (tag2));
1243 if (dump_file)
1244 fprintf (dump_file, "\n%s: Total number of aliased vops: %ld\n",
1245 get_name (current_function_decl),
1246 ai->total_alias_vops);
1250 /* Comparison function for qsort used in group_aliases. */
1252 static int
1253 total_alias_vops_cmp (const void *p, const void *q)
1255 const struct alias_map_d **p1 = (const struct alias_map_d **)p;
1256 const struct alias_map_d **p2 = (const struct alias_map_d **)q;
1257 long n1 = (*p1)->total_alias_vops;
1258 long n2 = (*p2)->total_alias_vops;
1260 /* We want to sort in descending order. */
1261 return (n1 > n2 ? -1 : (n1 == n2) ? 0 : 1);
1264 /* Group all the aliases for TAG to make TAG represent all the
1265 variables in its alias set. Update the total number
1266 of virtual operands due to aliasing (AI->TOTAL_ALIAS_VOPS). This
1267 function will make TAG be the unique alias tag for all the
1268 variables in its may-aliases. So, given:
1270 may-aliases(TAG) = { V1, V2, V3 }
1272 This function will group the variables into:
1274 may-aliases(V1) = { TAG }
1275 may-aliases(V2) = { TAG }
1276 may-aliases(V2) = { TAG } */
1278 static void
1279 group_aliases_into (tree tag, bitmap tag_aliases, struct alias_info *ai)
1281 unsigned int i;
1282 var_ann_t tag_ann = var_ann (tag);
1283 size_t num_tag_refs = NUM_REFERENCES (tag_ann);
1284 bitmap_iterator bi;
1286 EXECUTE_IF_SET_IN_BITMAP (tag_aliases, 0, i, bi)
1288 tree var = referenced_var (i);
1289 var_ann_t ann = var_ann (var);
1291 /* Make TAG the unique alias of VAR. */
1292 ann->is_aliased = 0;
1293 ann->may_aliases = NULL;
1295 /* Note that VAR and TAG may be the same if the function has no
1296 addressable variables (see the discussion at the end of
1297 setup_pointers_and_addressables). */
1298 if (var != tag)
1299 add_may_alias (var, tag);
1301 /* Reduce total number of virtual operands contributed
1302 by TAG on behalf of VAR. Notice that the references to VAR
1303 itself won't be removed. We will merely replace them with
1304 references to TAG. */
1305 ai->total_alias_vops -= num_tag_refs;
1308 /* We have reduced the number of virtual operands that TAG makes on
1309 behalf of all the variables formerly aliased with it. However,
1310 we have also "removed" all the virtual operands for TAG itself,
1311 so we add them back. */
1312 ai->total_alias_vops += num_tag_refs;
1314 /* TAG no longer has any aliases. */
1315 tag_ann->may_aliases = NULL;
1319 /* Group may-aliases sets to reduce the number of virtual operands due
1320 to aliasing.
1322 1- Sort the list of pointers in decreasing number of contributed
1323 virtual operands.
1325 2- Take the first entry in AI->POINTERS and revert the role of
1326 the memory tag and its aliases. Usually, whenever an aliased
1327 variable Vi is found to alias with a memory tag T, we add Vi
1328 to the may-aliases set for T. Meaning that after alias
1329 analysis, we will have:
1331 may-aliases(T) = { V1, V2, V3, ..., Vn }
1333 This means that every statement that references T, will get 'n'
1334 virtual operands for each of the Vi tags. But, when alias
1335 grouping is enabled, we make T an alias tag and add it to the
1336 alias set of all the Vi variables:
1338 may-aliases(V1) = { T }
1339 may-aliases(V2) = { T }
1341 may-aliases(Vn) = { T }
1343 This has two effects: (a) statements referencing T will only get
1344 a single virtual operand, and, (b) all the variables Vi will now
1345 appear to alias each other. So, we lose alias precision to
1346 improve compile time. But, in theory, a program with such a high
1347 level of aliasing should not be very optimizable in the first
1348 place.
1350 3- Since variables may be in the alias set of more than one
1351 memory tag, the grouping done in step (2) needs to be extended
1352 to all the memory tags that have a non-empty intersection with
1353 the may-aliases set of tag T. For instance, if we originally
1354 had these may-aliases sets:
1356 may-aliases(T) = { V1, V2, V3 }
1357 may-aliases(R) = { V2, V4 }
1359 In step (2) we would have reverted the aliases for T as:
1361 may-aliases(V1) = { T }
1362 may-aliases(V2) = { T }
1363 may-aliases(V3) = { T }
1365 But note that now V2 is no longer aliased with R. We could
1366 add R to may-aliases(V2), but we are in the process of
1367 grouping aliases to reduce virtual operands so what we do is
1368 add V4 to the grouping to obtain:
1370 may-aliases(V1) = { T }
1371 may-aliases(V2) = { T }
1372 may-aliases(V3) = { T }
1373 may-aliases(V4) = { T }
1375 4- If the total number of virtual operands due to aliasing is
1376 still above the threshold set by max-alias-vops, go back to (2). */
1378 static void
1379 group_aliases (struct alias_info *ai)
1381 size_t i;
1383 /* Sort the POINTERS array in descending order of contributed
1384 virtual operands. */
1385 qsort (ai->pointers, ai->num_pointers, sizeof (struct alias_map_d *),
1386 total_alias_vops_cmp);
1388 /* For every pointer in AI->POINTERS, reverse the roles of its tag
1389 and the tag's may-aliases set. */
1390 for (i = 0; i < ai->num_pointers; i++)
1392 size_t j;
1393 tree tag1 = var_ann (ai->pointers[i]->var)->symbol_mem_tag;
1394 bitmap tag1_aliases = ai->pointers[i]->may_aliases;
1396 /* Skip tags that have been grouped already. */
1397 if (ai->pointers[i]->grouped_p)
1398 continue;
1400 /* See if TAG1 had any aliases in common with other symbol tags.
1401 If we find a TAG2 with common aliases with TAG1, add TAG2's
1402 aliases into TAG1. */
1403 for (j = i + 1; j < ai->num_pointers; j++)
1405 bitmap tag2_aliases = ai->pointers[j]->may_aliases;
1407 if (bitmap_intersect_p (tag1_aliases, tag2_aliases))
1409 tree tag2 = var_ann (ai->pointers[j]->var)->symbol_mem_tag;
1411 bitmap_ior_into (tag1_aliases, tag2_aliases);
1413 /* TAG2 does not need its aliases anymore. */
1414 bitmap_clear (tag2_aliases);
1415 var_ann (tag2)->may_aliases = NULL;
1417 /* TAG1 is the unique alias of TAG2. */
1418 add_may_alias (tag2, tag1);
1420 ai->pointers[j]->grouped_p = true;
1424 /* Now group all the aliases we collected into TAG1. */
1425 group_aliases_into (tag1, tag1_aliases, ai);
1427 /* If we've reduced total number of virtual operands below the
1428 threshold, stop. */
1429 if (ai->total_alias_vops < MAX_ALIASED_VOPS)
1430 break;
1433 /* Finally, all the variables that have been grouped cannot be in
1434 the may-alias set of name memory tags. Suppose that we have
1435 grouped the aliases in this code so that may-aliases(a) = SMT.20
1437 p_5 = &a;
1439 # a_9 = V_MAY_DEF <a_8>
1440 p_5->field = 0
1441 ... Several modifications to SMT.20 ...
1442 # VUSE <a_9>
1443 x_30 = p_5->field
1445 Since p_5 points to 'a', the optimizers will try to propagate 0
1446 into p_5->field, but that is wrong because there have been
1447 modifications to 'SMT.20' in between. To prevent this we have to
1448 replace 'a' with 'SMT.20' in the name tag of p_5. */
1449 for (i = 0; i < VARRAY_ACTIVE_SIZE (ai->processed_ptrs); i++)
1451 size_t j;
1452 tree ptr = VARRAY_TREE (ai->processed_ptrs, i);
1453 tree name_tag = SSA_NAME_PTR_INFO (ptr)->name_mem_tag;
1454 VEC(tree,gc) *aliases;
1455 tree alias;
1457 if (name_tag == NULL_TREE)
1458 continue;
1460 aliases = var_ann (name_tag)->may_aliases;
1461 for (j = 0; VEC_iterate (tree, aliases, j, alias); j++)
1463 var_ann_t ann = var_ann (alias);
1465 if ((!MTAG_P (alias)
1466 || TREE_CODE (alias) == STRUCT_FIELD_TAG)
1467 && ann->may_aliases)
1469 tree new_alias;
1471 gcc_assert (VEC_length (tree, ann->may_aliases) == 1);
1473 new_alias = VEC_index (tree, ann->may_aliases, 0);
1474 replace_may_alias (name_tag, j, new_alias);
1479 if (dump_file)
1480 fprintf (dump_file,
1481 "%s: Total number of aliased vops after grouping: %ld%s\n",
1482 get_name (current_function_decl),
1483 ai->total_alias_vops,
1484 (ai->total_alias_vops < 0) ? " (negative values are OK)" : "");
1488 /* Create a new alias set entry for VAR in AI->ADDRESSABLE_VARS. */
1490 static void
1491 create_alias_map_for (tree var, struct alias_info *ai)
1493 struct alias_map_d *alias_map;
1494 alias_map = XCNEW (struct alias_map_d);
1495 alias_map->var = var;
1496 alias_map->set = get_alias_set (var);
1497 ai->addressable_vars[ai->num_addressable_vars++] = alias_map;
1501 /* Create memory tags for all the dereferenced pointers and build the
1502 ADDRESSABLE_VARS and POINTERS arrays used for building the may-alias
1503 sets. Based on the address escape and points-to information collected
1504 earlier, this pass will also clear the TREE_ADDRESSABLE flag from those
1505 variables whose address is not needed anymore. */
1507 static void
1508 setup_pointers_and_addressables (struct alias_info *ai)
1510 size_t n_vars, num_addressable_vars, num_pointers;
1511 referenced_var_iterator rvi;
1512 tree var;
1513 VEC (tree, heap) *varvec = NULL;
1514 safe_referenced_var_iterator srvi;
1516 /* Size up the arrays ADDRESSABLE_VARS and POINTERS. */
1517 num_addressable_vars = num_pointers = 0;
1519 FOR_EACH_REFERENCED_VAR (var, rvi)
1521 if (may_be_aliased (var))
1522 num_addressable_vars++;
1524 if (POINTER_TYPE_P (TREE_TYPE (var)))
1526 /* Since we don't keep track of volatile variables, assume that
1527 these pointers are used in indirect store operations. */
1528 if (TREE_THIS_VOLATILE (var))
1529 bitmap_set_bit (ai->dereferenced_ptrs_store, DECL_UID (var));
1531 num_pointers++;
1535 /* Create ADDRESSABLE_VARS and POINTERS. Note that these arrays are
1536 always going to be slightly bigger than we actually need them
1537 because some TREE_ADDRESSABLE variables will be marked
1538 non-addressable below and only pointers with unique symbol tags are
1539 going to be added to POINTERS. */
1540 ai->addressable_vars = XCNEWVEC (struct alias_map_d *, num_addressable_vars);
1541 ai->pointers = XCNEWVEC (struct alias_map_d *, num_pointers);
1542 ai->num_addressable_vars = 0;
1543 ai->num_pointers = 0;
1545 /* Since we will be creating symbol memory tags within this loop,
1546 cache the value of NUM_REFERENCED_VARS to avoid processing the
1547 additional tags unnecessarily. */
1548 n_vars = num_referenced_vars;
1550 FOR_EACH_REFERENCED_VAR_SAFE (var, varvec, srvi)
1552 var_ann_t v_ann = var_ann (var);
1553 subvar_t svars;
1555 /* Name memory tags already have flow-sensitive aliasing
1556 information, so they need not be processed by
1557 compute_flow_insensitive_aliasing. Similarly, symbol memory
1558 tags are already accounted for when we process their
1559 associated pointer.
1561 Structure fields, on the other hand, have to have some of this
1562 information processed for them, but it's pointless to mark them
1563 non-addressable (since they are fake variables anyway). */
1564 if (MTAG_P (var) && TREE_CODE (var) != STRUCT_FIELD_TAG)
1565 continue;
1567 /* Remove the ADDRESSABLE flag from every addressable variable whose
1568 address is not needed anymore. This is caused by the propagation
1569 of ADDR_EXPR constants into INDIRECT_REF expressions and the
1570 removal of dead pointer assignments done by the early scalar
1571 cleanup passes. */
1572 if (TREE_ADDRESSABLE (var))
1574 if (!bitmap_bit_p (addressable_vars, DECL_UID (var))
1575 && TREE_CODE (var) != RESULT_DECL
1576 && !is_global_var (var))
1578 bool okay_to_mark = true;
1580 /* Since VAR is now a regular GIMPLE register, we will need
1581 to rename VAR into SSA afterwards. */
1582 mark_sym_for_renaming (var);
1584 /* If VAR can have sub-variables, and any of its
1585 sub-variables has its address taken, then we cannot
1586 remove the addressable flag from VAR. */
1587 if (var_can_have_subvars (var)
1588 && (svars = get_subvars_for_var (var)))
1590 subvar_t sv;
1592 for (sv = svars; sv; sv = sv->next)
1594 if (bitmap_bit_p (addressable_vars, DECL_UID (sv->var)))
1595 okay_to_mark = false;
1596 mark_sym_for_renaming (sv->var);
1600 /* The address of VAR is not needed, remove the
1601 addressable bit, so that it can be optimized as a
1602 regular variable. */
1603 if (okay_to_mark)
1604 mark_non_addressable (var);
1608 /* Global variables and addressable locals may be aliased. Create an
1609 entry in ADDRESSABLE_VARS for VAR. */
1610 if (may_be_aliased (var)
1611 && (!var_can_have_subvars (var)
1612 || get_subvars_for_var (var) == NULL))
1614 create_alias_map_for (var, ai);
1615 mark_sym_for_renaming (var);
1618 /* Add pointer variables that have been dereferenced to the POINTERS
1619 array and create a symbol memory tag for them. */
1620 if (POINTER_TYPE_P (TREE_TYPE (var)))
1622 if ((bitmap_bit_p (ai->dereferenced_ptrs_store, DECL_UID (var))
1623 || bitmap_bit_p (ai->dereferenced_ptrs_load, DECL_UID (var))))
1625 tree tag;
1626 var_ann_t t_ann;
1628 /* If pointer VAR still doesn't have a memory tag
1629 associated with it, create it now or re-use an
1630 existing one. */
1631 tag = get_tmt_for (var, ai);
1632 t_ann = var_ann (tag);
1634 /* The symbol tag will need to be renamed into SSA
1635 afterwards. Note that we cannot do this inside
1636 get_tmt_for because aliasing may run multiple times
1637 and we only create symbol tags the first time. */
1638 mark_sym_for_renaming (tag);
1640 /* Similarly, if pointer VAR used to have another type
1641 tag, we will need to process it in the renamer to
1642 remove the stale virtual operands. */
1643 if (v_ann->symbol_mem_tag)
1644 mark_sym_for_renaming (v_ann->symbol_mem_tag);
1646 /* Associate the tag with pointer VAR. */
1647 v_ann->symbol_mem_tag = tag;
1649 /* If pointer VAR has been used in a store operation,
1650 then its memory tag must be marked as written-to. */
1651 if (bitmap_bit_p (ai->dereferenced_ptrs_store, DECL_UID (var)))
1652 bitmap_set_bit (ai->written_vars, DECL_UID (tag));
1654 /* All the dereferences of pointer VAR count as
1655 references of TAG. Since TAG can be associated with
1656 several pointers, add the dereferences of VAR to the
1657 TAG. */
1658 NUM_REFERENCES_SET (t_ann,
1659 NUM_REFERENCES (t_ann)
1660 + NUM_REFERENCES (v_ann));
1662 else
1664 /* The pointer has not been dereferenced. If it had a
1665 symbol memory tag, remove it and mark the old tag for
1666 renaming to remove it out of the IL. */
1667 var_ann_t ann = var_ann (var);
1668 tree tag = ann->symbol_mem_tag;
1669 if (tag)
1671 mark_sym_for_renaming (tag);
1672 ann->symbol_mem_tag = NULL_TREE;
1677 VEC_free (tree, heap, varvec);
1681 /* Determine whether to use .GLOBAL_VAR to model call clobbering semantics. At
1682 every call site, we need to emit V_MAY_DEF expressions to represent the
1683 clobbering effects of the call for variables whose address escapes the
1684 current function.
1686 One approach is to group all call-clobbered variables into a single
1687 representative that is used as an alias of every call-clobbered variable
1688 (.GLOBAL_VAR). This works well, but it ties the optimizer hands because
1689 references to any call clobbered variable is a reference to .GLOBAL_VAR.
1691 The second approach is to emit a clobbering V_MAY_DEF for every
1692 call-clobbered variable at call sites. This is the preferred way in terms
1693 of optimization opportunities but it may create too many V_MAY_DEF operands
1694 if there are many call clobbered variables and function calls in the
1695 function.
1697 To decide whether or not to use .GLOBAL_VAR we multiply the number of
1698 function calls found by the number of call-clobbered variables. If that
1699 product is beyond a certain threshold, as determined by the parameterized
1700 values shown below, we use .GLOBAL_VAR.
1702 FIXME. This heuristic should be improved. One idea is to use several
1703 .GLOBAL_VARs of different types instead of a single one. The thresholds
1704 have been derived from a typical bootstrap cycle, including all target
1705 libraries. Compile times were found increase by ~1% compared to using
1706 .GLOBAL_VAR. */
1708 static void
1709 maybe_create_global_var (struct alias_info *ai)
1711 unsigned i, n_clobbered;
1712 bitmap_iterator bi;
1714 /* No need to create it, if we have one already. */
1715 if (global_var == NULL_TREE)
1717 /* Count all the call-clobbered variables. */
1718 n_clobbered = 0;
1719 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1721 n_clobbered++;
1724 /* If the number of virtual operands that would be needed to
1725 model all the call-clobbered variables is larger than
1726 GLOBAL_VAR_THRESHOLD, create .GLOBAL_VAR.
1728 Also create .GLOBAL_VAR if there are no call-clobbered
1729 variables and the program contains a mixture of pure/const
1730 and regular function calls. This is to avoid the problem
1731 described in PR 20115:
1733 int X;
1734 int func_pure (void) { return X; }
1735 int func_non_pure (int a) { X += a; }
1736 int foo ()
1738 int a = func_pure ();
1739 func_non_pure (a);
1740 a = func_pure ();
1741 return a;
1744 Since foo() has no call-clobbered variables, there is
1745 no relationship between the calls to func_pure and
1746 func_non_pure. Since func_pure has no side-effects, value
1747 numbering optimizations elide the second call to func_pure.
1748 So, if we have some pure/const and some regular calls in the
1749 program we create .GLOBAL_VAR to avoid missing these
1750 relations. */
1751 if (ai->num_calls_found * n_clobbered >= (size_t) GLOBAL_VAR_THRESHOLD
1752 || (n_clobbered == 0
1753 && ai->num_calls_found > 0
1754 && ai->num_pure_const_calls_found > 0
1755 && ai->num_calls_found > ai->num_pure_const_calls_found))
1756 create_global_var ();
1759 /* Mark all call-clobbered symbols for renaming. Since the initial
1760 rewrite into SSA ignored all call sites, we may need to rename
1761 .GLOBAL_VAR and the call-clobbered variables. */
1762 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1764 tree var = referenced_var (i);
1766 /* If the function has calls to clobbering functions and
1767 .GLOBAL_VAR has been created, make it an alias for all
1768 call-clobbered variables. */
1769 if (global_var && var != global_var)
1771 add_may_alias (var, global_var);
1772 gcc_assert (!get_subvars_for_var (var));
1775 mark_sym_for_renaming (var);
1780 /* Return TRUE if pointer PTR may point to variable VAR.
1782 MEM_ALIAS_SET is the alias set for the memory location pointed-to by PTR
1783 This is needed because when checking for type conflicts we are
1784 interested in the alias set of the memory location pointed-to by
1785 PTR. The alias set of PTR itself is irrelevant.
1787 VAR_ALIAS_SET is the alias set for VAR. */
1789 static bool
1790 may_alias_p (tree ptr, HOST_WIDE_INT mem_alias_set,
1791 tree var, HOST_WIDE_INT var_alias_set,
1792 bool alias_set_only)
1794 tree mem;
1796 alias_stats.alias_queries++;
1797 alias_stats.simple_queries++;
1799 /* By convention, a variable cannot alias itself. */
1800 mem = var_ann (ptr)->symbol_mem_tag;
1801 if (mem == var)
1803 alias_stats.alias_noalias++;
1804 alias_stats.simple_resolved++;
1805 return false;
1808 /* If -fargument-noalias-global is >1, pointer arguments may
1809 not point to global variables. */
1810 if (flag_argument_noalias > 1 && is_global_var (var)
1811 && TREE_CODE (ptr) == PARM_DECL)
1813 alias_stats.alias_noalias++;
1814 alias_stats.simple_resolved++;
1815 return false;
1818 /* If either MEM or VAR is a read-only global and the other one
1819 isn't, then PTR cannot point to VAR. */
1820 if ((unmodifiable_var_p (mem) && !unmodifiable_var_p (var))
1821 || (unmodifiable_var_p (var) && !unmodifiable_var_p (mem)))
1823 alias_stats.alias_noalias++;
1824 alias_stats.simple_resolved++;
1825 return false;
1828 gcc_assert (TREE_CODE (mem) == SYMBOL_MEMORY_TAG);
1830 alias_stats.tbaa_queries++;
1832 /* If the alias sets don't conflict then MEM cannot alias VAR. */
1833 if (!alias_sets_conflict_p (mem_alias_set, var_alias_set))
1835 alias_stats.alias_noalias++;
1836 alias_stats.tbaa_resolved++;
1837 return false;
1840 /* If var is a record or union type, ptr cannot point into var
1841 unless there is some operation explicit address operation in the
1842 program that can reference a field of the ptr's dereferenced
1843 type. This also assumes that the types of both var and ptr are
1844 contained within the compilation unit, and that there is no fancy
1845 addressing arithmetic associated with any of the types
1846 involved. */
1848 if ((mem_alias_set != 0) && (var_alias_set != 0))
1850 tree ptr_type = TREE_TYPE (ptr);
1851 tree var_type = TREE_TYPE (var);
1853 /* The star count is -1 if the type at the end of the pointer_to
1854 chain is not a record or union type. */
1855 if ((!alias_set_only) &&
1856 ipa_type_escape_star_count_of_interesting_type (var_type) >= 0)
1858 int ptr_star_count = 0;
1860 /* Ipa_type_escape_star_count_of_interesting_type is a little to
1861 restrictive for the pointer type, need to allow pointers to
1862 primitive types as long as those types cannot be pointers
1863 to everything. */
1864 while (POINTER_TYPE_P (ptr_type))
1865 /* Strip the *'s off. */
1867 ptr_type = TREE_TYPE (ptr_type);
1868 ptr_star_count++;
1871 /* There does not appear to be a better test to see if the
1872 pointer type was one of the pointer to everything
1873 types. */
1875 if (ptr_star_count > 0)
1877 alias_stats.structnoaddress_queries++;
1878 if (ipa_type_escape_field_does_not_clobber_p (var_type,
1879 TREE_TYPE (ptr)))
1881 alias_stats.structnoaddress_resolved++;
1882 alias_stats.alias_noalias++;
1883 return false;
1886 else if (ptr_star_count == 0)
1888 /* If ptr_type was not really a pointer to type, it cannot
1889 alias. */
1890 alias_stats.structnoaddress_queries++;
1891 alias_stats.structnoaddress_resolved++;
1892 alias_stats.alias_noalias++;
1893 return false;
1898 alias_stats.alias_mayalias++;
1899 return true;
1903 /* Add ALIAS to the set of variables that may alias VAR. */
1905 static void
1906 add_may_alias (tree var, tree alias)
1908 size_t i;
1909 var_ann_t v_ann = get_var_ann (var);
1910 var_ann_t a_ann = get_var_ann (alias);
1911 tree al;
1913 /* Don't allow self-referential aliases. */
1914 gcc_assert (var != alias);
1916 /* ALIAS must be addressable if it's being added to an alias set. */
1917 #if 1
1918 TREE_ADDRESSABLE (alias) = 1;
1919 #else
1920 gcc_assert (may_be_aliased (alias));
1921 #endif
1923 if (v_ann->may_aliases == NULL)
1924 v_ann->may_aliases = VEC_alloc (tree, gc, 2);
1926 /* Avoid adding duplicates. */
1927 for (i = 0; VEC_iterate (tree, v_ann->may_aliases, i, al); i++)
1928 if (alias == al)
1929 return;
1931 VEC_safe_push (tree, gc, v_ann->may_aliases, alias);
1932 a_ann->is_aliased = 1;
1936 /* Replace alias I in the alias sets of VAR with NEW_ALIAS. */
1938 static void
1939 replace_may_alias (tree var, size_t i, tree new_alias)
1941 var_ann_t v_ann = var_ann (var);
1942 VEC_replace (tree, v_ann->may_aliases, i, new_alias);
1946 /* Mark pointer PTR as pointing to an arbitrary memory location. */
1948 static void
1949 set_pt_anything (tree ptr)
1951 struct ptr_info_def *pi = get_ptr_info (ptr);
1953 pi->pt_anything = 1;
1954 pi->pt_vars = NULL;
1956 /* The pointer used to have a name tag, but we now found it pointing
1957 to an arbitrary location. The name tag needs to be renamed and
1958 disassociated from PTR. */
1959 if (pi->name_mem_tag)
1961 mark_sym_for_renaming (pi->name_mem_tag);
1962 pi->name_mem_tag = NULL_TREE;
1967 /* Return true if STMT is an "escape" site from the current function. Escape
1968 sites those statements which might expose the address of a variable
1969 outside the current function. STMT is an escape site iff:
1971 1- STMT is a function call, or
1972 2- STMT is an __asm__ expression, or
1973 3- STMT is an assignment to a non-local variable, or
1974 4- STMT is a return statement.
1976 AI points to the alias information collected so far.
1978 Return the type of escape site found, if we found one, or NO_ESCAPE
1979 if none. */
1981 enum escape_type
1982 is_escape_site (tree stmt, struct alias_info *ai)
1984 tree call = get_call_expr_in (stmt);
1985 if (call != NULL_TREE)
1987 ai->num_calls_found++;
1989 if (!TREE_SIDE_EFFECTS (call))
1991 ai->num_pure_const_calls_found++;
1992 return ESCAPE_TO_PURE_CONST;
1995 return ESCAPE_TO_CALL;
1997 else if (TREE_CODE (stmt) == ASM_EXPR)
1998 return ESCAPE_TO_ASM;
1999 else if (TREE_CODE (stmt) == MODIFY_EXPR)
2001 tree lhs = TREE_OPERAND (stmt, 0);
2003 /* Get to the base of _REF nodes. */
2004 if (TREE_CODE (lhs) != SSA_NAME)
2005 lhs = get_base_address (lhs);
2007 /* If we couldn't recognize the LHS of the assignment, assume that it
2008 is a non-local store. */
2009 if (lhs == NULL_TREE)
2010 return ESCAPE_UNKNOWN;
2012 /* If the RHS is a conversion between a pointer and an integer, the
2013 pointer escapes since we can't track the integer. */
2014 if ((TREE_CODE (TREE_OPERAND (stmt, 1)) == NOP_EXPR
2015 || TREE_CODE (TREE_OPERAND (stmt, 1)) == CONVERT_EXPR
2016 || TREE_CODE (TREE_OPERAND (stmt, 1)) == VIEW_CONVERT_EXPR)
2017 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND
2018 (TREE_OPERAND (stmt, 1), 0)))
2019 && !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (stmt, 1))))
2020 return ESCAPE_BAD_CAST;
2022 /* If the LHS is an SSA name, it can't possibly represent a non-local
2023 memory store. */
2024 if (TREE_CODE (lhs) == SSA_NAME)
2025 return NO_ESCAPE;
2027 /* FIXME: LHS is not an SSA_NAME. Even if it's an assignment to a
2028 local variables we cannot be sure if it will escape, because we
2029 don't have information about objects not in SSA form. Need to
2030 implement something along the lines of
2032 J.-D. Choi, M. Gupta, M. J. Serrano, V. C. Sreedhar, and S. P.
2033 Midkiff, ``Escape analysis for java,'' in Proceedings of the
2034 Conference on Object-Oriented Programming Systems, Languages, and
2035 Applications (OOPSLA), pp. 1-19, 1999. */
2036 return ESCAPE_STORED_IN_GLOBAL;
2038 else if (TREE_CODE (stmt) == RETURN_EXPR)
2039 return ESCAPE_TO_RETURN;
2041 return NO_ESCAPE;
2044 /* Create a new memory tag of type TYPE.
2045 Does NOT push it into the current binding. */
2047 static tree
2048 create_tag_raw (enum tree_code code, tree type, const char *prefix)
2050 tree tmp_var;
2051 tree new_type;
2053 /* Make the type of the variable writable. */
2054 new_type = build_type_variant (type, 0, 0);
2055 TYPE_ATTRIBUTES (new_type) = TYPE_ATTRIBUTES (type);
2057 tmp_var = build_decl (code, create_tmp_var_name (prefix),
2058 type);
2059 /* Make the variable writable. */
2060 TREE_READONLY (tmp_var) = 0;
2062 /* It doesn't start out global. */
2063 MTAG_GLOBAL (tmp_var) = 0;
2064 TREE_STATIC (tmp_var) = 0;
2065 TREE_USED (tmp_var) = 1;
2067 return tmp_var;
2070 /* Create a new memory tag of type TYPE. If IS_TYPE_TAG is true, the tag
2071 is considered to represent all the pointers whose pointed-to types are
2072 in the same alias set class. Otherwise, the tag represents a single
2073 SSA_NAME pointer variable. */
2075 static tree
2076 create_memory_tag (tree type, bool is_type_tag)
2078 var_ann_t ann;
2079 tree tag = create_tag_raw (is_type_tag ? SYMBOL_MEMORY_TAG : NAME_MEMORY_TAG,
2080 type, (is_type_tag) ? "SMT" : "NMT");
2082 /* By default, memory tags are local variables. Alias analysis will
2083 determine whether they should be considered globals. */
2084 DECL_CONTEXT (tag) = current_function_decl;
2086 /* Memory tags are by definition addressable. */
2087 TREE_ADDRESSABLE (tag) = 1;
2089 ann = get_var_ann (tag);
2090 ann->symbol_mem_tag = NULL_TREE;
2092 /* Add the tag to the symbol table. */
2093 add_referenced_tmp_var (tag);
2095 return tag;
2099 /* Create a name memory tag to represent a specific SSA_NAME pointer P_i.
2100 This is used if P_i has been found to point to a specific set of
2101 variables or to a non-aliased memory location like the address returned
2102 by malloc functions. */
2104 static tree
2105 get_nmt_for (tree ptr)
2107 struct ptr_info_def *pi = get_ptr_info (ptr);
2108 tree tag = pi->name_mem_tag;
2110 if (tag == NULL_TREE)
2111 tag = create_memory_tag (TREE_TYPE (TREE_TYPE (ptr)), false);
2112 return tag;
2116 /* Return the symbol memory tag associated to pointer PTR. A memory
2117 tag is an artificial variable that represents the memory location
2118 pointed-to by PTR. It is used to model the effects of pointer
2119 de-references on addressable variables.
2121 AI points to the data gathered during alias analysis. This
2122 function populates the array AI->POINTERS. */
2124 static tree
2125 get_tmt_for (tree ptr, struct alias_info *ai)
2127 size_t i;
2128 tree tag;
2129 tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
2130 HOST_WIDE_INT tag_set = get_alias_set (tag_type);
2132 /* To avoid creating unnecessary memory tags, only create one memory tag
2133 per alias set class. Note that it may be tempting to group
2134 memory tags based on conflicting alias sets instead of
2135 equivalence. That would be wrong because alias sets are not
2136 necessarily transitive (as demonstrated by the libstdc++ test
2137 23_containers/vector/cons/4.cc). Given three alias sets A, B, C
2138 such that conflicts (A, B) == true and conflicts (A, C) == true,
2139 it does not necessarily follow that conflicts (B, C) == true. */
2140 for (i = 0, tag = NULL_TREE; i < ai->num_pointers; i++)
2142 struct alias_map_d *curr = ai->pointers[i];
2143 tree curr_tag = var_ann (curr->var)->symbol_mem_tag;
2144 if (tag_set == curr->set)
2146 tag = curr_tag;
2147 break;
2151 /* If VAR cannot alias with any of the existing memory tags, create a new
2152 tag for PTR and add it to the POINTERS array. */
2153 if (tag == NULL_TREE)
2155 struct alias_map_d *alias_map;
2157 /* If PTR did not have a symbol tag already, create a new SMT.*
2158 artificial variable representing the memory location
2159 pointed-to by PTR. */
2160 if (var_ann (ptr)->symbol_mem_tag == NULL_TREE)
2161 tag = create_memory_tag (tag_type, true);
2162 else
2163 tag = var_ann (ptr)->symbol_mem_tag;
2165 /* Add PTR to the POINTERS array. Note that we are not interested in
2166 PTR's alias set. Instead, we cache the alias set for the memory that
2167 PTR points to. */
2168 alias_map = XCNEW (struct alias_map_d);
2169 alias_map->var = ptr;
2170 alias_map->set = tag_set;
2171 ai->pointers[ai->num_pointers++] = alias_map;
2174 /* If the pointed-to type is volatile, so is the tag. */
2175 TREE_THIS_VOLATILE (tag) |= TREE_THIS_VOLATILE (tag_type);
2177 /* Make sure that the symbol tag has the same alias set as the
2178 pointed-to type. */
2179 gcc_assert (tag_set == get_alias_set (tag));
2181 return tag;
2185 /* Create GLOBAL_VAR, an artificial global variable to act as a
2186 representative of all the variables that may be clobbered by function
2187 calls. */
2189 static void
2190 create_global_var (void)
2192 global_var = build_decl (VAR_DECL, get_identifier (".GLOBAL_VAR"),
2193 void_type_node);
2194 DECL_ARTIFICIAL (global_var) = 1;
2195 TREE_READONLY (global_var) = 0;
2196 DECL_EXTERNAL (global_var) = 1;
2197 TREE_STATIC (global_var) = 1;
2198 TREE_USED (global_var) = 1;
2199 DECL_CONTEXT (global_var) = NULL_TREE;
2200 TREE_THIS_VOLATILE (global_var) = 0;
2201 TREE_ADDRESSABLE (global_var) = 0;
2203 create_var_ann (global_var);
2204 mark_call_clobbered (global_var, ESCAPE_UNKNOWN);
2205 add_referenced_tmp_var (global_var);
2206 mark_sym_for_renaming (global_var);
2210 /* Dump alias statistics on FILE. */
2212 static void
2213 dump_alias_stats (FILE *file)
2215 const char *funcname
2216 = lang_hooks.decl_printable_name (current_function_decl, 2);
2217 fprintf (file, "\nAlias statistics for %s\n\n", funcname);
2218 fprintf (file, "Total alias queries:\t%u\n", alias_stats.alias_queries);
2219 fprintf (file, "Total alias mayalias results:\t%u\n",
2220 alias_stats.alias_mayalias);
2221 fprintf (file, "Total alias noalias results:\t%u\n",
2222 alias_stats.alias_noalias);
2223 fprintf (file, "Total simple queries:\t%u\n",
2224 alias_stats.simple_queries);
2225 fprintf (file, "Total simple resolved:\t%u\n",
2226 alias_stats.simple_resolved);
2227 fprintf (file, "Total TBAA queries:\t%u\n",
2228 alias_stats.tbaa_queries);
2229 fprintf (file, "Total TBAA resolved:\t%u\n",
2230 alias_stats.tbaa_resolved);
2231 fprintf (file, "Total non-addressable structure type queries:\t%u\n",
2232 alias_stats.structnoaddress_queries);
2233 fprintf (file, "Total non-addressable structure type resolved:\t%u\n",
2234 alias_stats.structnoaddress_resolved);
2238 /* Dump alias information on FILE. */
2240 void
2241 dump_alias_info (FILE *file)
2243 size_t i;
2244 const char *funcname
2245 = lang_hooks.decl_printable_name (current_function_decl, 2);
2246 referenced_var_iterator rvi;
2247 tree var;
2249 fprintf (file, "\nFlow-insensitive alias information for %s\n\n", funcname);
2251 fprintf (file, "Aliased symbols\n\n");
2253 FOR_EACH_REFERENCED_VAR (var, rvi)
2255 if (may_be_aliased (var))
2256 dump_variable (file, var);
2259 fprintf (file, "\nDereferenced pointers\n\n");
2261 FOR_EACH_REFERENCED_VAR (var, rvi)
2263 var_ann_t ann = var_ann (var);
2264 if (ann->symbol_mem_tag)
2265 dump_variable (file, var);
2268 fprintf (file, "\nSymbol memory tags\n\n");
2270 FOR_EACH_REFERENCED_VAR (var, rvi)
2272 if (TREE_CODE (var) == SYMBOL_MEMORY_TAG)
2273 dump_variable (file, var);
2276 fprintf (file, "\n\nFlow-sensitive alias information for %s\n\n", funcname);
2278 fprintf (file, "SSA_NAME pointers\n\n");
2279 for (i = 1; i < num_ssa_names; i++)
2281 tree ptr = ssa_name (i);
2282 struct ptr_info_def *pi;
2284 if (ptr == NULL_TREE)
2285 continue;
2287 pi = SSA_NAME_PTR_INFO (ptr);
2288 if (!SSA_NAME_IN_FREE_LIST (ptr)
2289 && pi
2290 && pi->name_mem_tag)
2291 dump_points_to_info_for (file, ptr);
2294 fprintf (file, "\nName memory tags\n\n");
2296 FOR_EACH_REFERENCED_VAR (var, rvi)
2298 if (TREE_CODE (var) == NAME_MEMORY_TAG)
2299 dump_variable (file, var);
2302 fprintf (file, "\n");
2306 /* Dump alias information on stderr. */
2308 void
2309 debug_alias_info (void)
2311 dump_alias_info (stderr);
2315 /* Return the alias information associated with pointer T. It creates a
2316 new instance if none existed. */
2318 struct ptr_info_def *
2319 get_ptr_info (tree t)
2321 struct ptr_info_def *pi;
2323 gcc_assert (POINTER_TYPE_P (TREE_TYPE (t)));
2325 pi = SSA_NAME_PTR_INFO (t);
2326 if (pi == NULL)
2328 pi = GGC_NEW (struct ptr_info_def);
2329 memset ((void *)pi, 0, sizeof (*pi));
2330 SSA_NAME_PTR_INFO (t) = pi;
2333 return pi;
2337 /* Dump points-to information for SSA_NAME PTR into FILE. */
2339 void
2340 dump_points_to_info_for (FILE *file, tree ptr)
2342 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
2344 print_generic_expr (file, ptr, dump_flags);
2346 if (pi)
2348 if (pi->name_mem_tag)
2350 fprintf (file, ", name memory tag: ");
2351 print_generic_expr (file, pi->name_mem_tag, dump_flags);
2354 if (pi->is_dereferenced)
2355 fprintf (file, ", is dereferenced");
2357 if (pi->value_escapes_p)
2358 fprintf (file, ", its value escapes");
2360 if (pi->pt_anything)
2361 fprintf (file, ", points-to anything");
2363 if (pi->pt_null)
2364 fprintf (file, ", points-to NULL");
2366 if (pi->pt_vars)
2368 unsigned ix;
2369 bitmap_iterator bi;
2371 fprintf (file, ", points-to vars: { ");
2372 EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, ix, bi)
2374 print_generic_expr (file, referenced_var (ix), dump_flags);
2375 fprintf (file, " ");
2377 fprintf (file, "}");
2381 fprintf (file, "\n");
2385 /* Dump points-to information for VAR into stderr. */
2387 void
2388 debug_points_to_info_for (tree var)
2390 dump_points_to_info_for (stderr, var);
2394 /* Dump points-to information into FILE. NOTE: This function is slow, as
2395 it needs to traverse the whole CFG looking for pointer SSA_NAMEs. */
2397 void
2398 dump_points_to_info (FILE *file)
2400 basic_block bb;
2401 block_stmt_iterator si;
2402 ssa_op_iter iter;
2403 const char *fname =
2404 lang_hooks.decl_printable_name (current_function_decl, 2);
2405 referenced_var_iterator rvi;
2406 tree var;
2408 fprintf (file, "\n\nPointed-to sets for pointers in %s\n\n", fname);
2410 /* First dump points-to information for the default definitions of
2411 pointer variables. This is necessary because default definitions are
2412 not part of the code. */
2413 FOR_EACH_REFERENCED_VAR (var, rvi)
2415 if (POINTER_TYPE_P (TREE_TYPE (var)))
2417 tree def = default_def (var);
2418 if (def)
2419 dump_points_to_info_for (file, def);
2423 /* Dump points-to information for every pointer defined in the program. */
2424 FOR_EACH_BB (bb)
2426 tree phi;
2428 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
2430 tree ptr = PHI_RESULT (phi);
2431 if (POINTER_TYPE_P (TREE_TYPE (ptr)))
2432 dump_points_to_info_for (file, ptr);
2435 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
2437 tree stmt = bsi_stmt (si);
2438 tree def;
2439 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_DEF)
2440 if (POINTER_TYPE_P (TREE_TYPE (def)))
2441 dump_points_to_info_for (file, def);
2445 fprintf (file, "\n");
2449 /* Dump points-to info pointed to by PTO into STDERR. */
2451 void
2452 debug_points_to_info (void)
2454 dump_points_to_info (stderr);
2457 /* Dump to FILE the list of variables that may be aliasing VAR. */
2459 void
2460 dump_may_aliases_for (FILE *file, tree var)
2462 VEC(tree, gc) *aliases;
2464 if (TREE_CODE (var) == SSA_NAME)
2465 var = SSA_NAME_VAR (var);
2467 aliases = var_ann (var)->may_aliases;
2468 if (aliases)
2470 size_t i;
2471 tree al;
2472 fprintf (file, "{ ");
2473 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
2475 print_generic_expr (file, al, dump_flags);
2476 fprintf (file, " ");
2478 fprintf (file, "}");
2483 /* Dump to stderr the list of variables that may be aliasing VAR. */
2485 void
2486 debug_may_aliases_for (tree var)
2488 dump_may_aliases_for (stderr, var);
2491 /* Return true if VAR may be aliased. */
2493 bool
2494 may_be_aliased (tree var)
2496 /* Obviously. */
2497 if (TREE_ADDRESSABLE (var))
2498 return true;
2500 /* Globally visible variables can have their addresses taken by other
2501 translation units. */
2503 if (MTAG_P (var)
2504 && (MTAG_GLOBAL (var) || TREE_PUBLIC (var)))
2505 return true;
2506 else if (!MTAG_P (var)
2507 && (DECL_EXTERNAL (var) || TREE_PUBLIC (var)))
2508 return true;
2510 /* Automatic variables can't have their addresses escape any other way.
2511 This must be after the check for global variables, as extern declarations
2512 do not have TREE_STATIC set. */
2513 if (!TREE_STATIC (var))
2514 return false;
2516 /* If we're in unit-at-a-time mode, then we must have seen all occurrences
2517 of address-of operators, and so we can trust TREE_ADDRESSABLE. Otherwise
2518 we can only be sure the variable isn't addressable if it's local to the
2519 current function. */
2520 if (flag_unit_at_a_time)
2521 return false;
2522 if (decl_function_context (var) == current_function_decl)
2523 return false;
2525 return true;
2529 /* Given two symbols return TRUE if one is in the alias set of the other. */
2530 bool
2531 is_aliased_with (tree tag, tree sym)
2533 size_t i;
2534 VEC(tree,gc) *aliases;
2535 tree al;
2537 if (var_ann (sym)->is_aliased)
2539 aliases = var_ann (tag)->may_aliases;
2541 if (aliases == NULL)
2542 return false;
2544 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
2545 if (al == sym)
2546 return true;
2548 else
2550 aliases = var_ann (sym)->may_aliases;
2552 if (aliases == NULL)
2553 return false;
2555 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
2556 if (al == tag)
2557 return true;
2560 return false;
2564 /* Add VAR to the list of may-aliases of PTR's symbol tag. If PTR
2565 doesn't already have a symbol tag, create one. */
2567 void
2568 add_type_alias (tree ptr, tree var)
2570 VEC(tree, gc) *aliases;
2571 tree tag, al;
2572 var_ann_t ann = var_ann (ptr);
2573 subvar_t svars;
2574 VEC (tree, heap) *varvec = NULL;
2575 unsigned i;
2577 if (ann->symbol_mem_tag == NULL_TREE)
2579 tree q = NULL_TREE;
2580 tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
2581 HOST_WIDE_INT tag_set = get_alias_set (tag_type);
2582 safe_referenced_var_iterator rvi;
2584 /* PTR doesn't have a symbol tag, create a new one and add VAR to
2585 the new tag's alias set.
2587 FIXME, This is slower than necessary. We need to determine
2588 whether there is another pointer Q with the same alias set as
2589 PTR. This could be sped up by having symbol tags associated
2590 with types. */
2591 FOR_EACH_REFERENCED_VAR_SAFE (q, varvec, rvi)
2593 if (POINTER_TYPE_P (TREE_TYPE (q))
2594 && tag_set == get_alias_set (TREE_TYPE (TREE_TYPE (q))))
2596 /* Found another pointer Q with the same alias set as
2597 the PTR's pointed-to type. If Q has a symbol tag, use
2598 it. Otherwise, create a new memory tag for PTR. */
2599 var_ann_t ann1 = var_ann (q);
2600 if (ann1->symbol_mem_tag)
2601 ann->symbol_mem_tag = ann1->symbol_mem_tag;
2602 else
2603 ann->symbol_mem_tag = create_memory_tag (tag_type, true);
2604 goto found_tag;
2608 /* Couldn't find any other pointer with a symbol tag we could use.
2609 Create a new memory tag for PTR. */
2610 ann->symbol_mem_tag = create_memory_tag (tag_type, true);
2613 found_tag:
2614 /* If VAR is not already PTR's symbol tag, add it to the may-alias set
2615 for PTR's symbol tag. */
2616 gcc_assert (!MTAG_P (var));
2617 tag = ann->symbol_mem_tag;
2619 /* If VAR has subvars, add the subvars to the tag instead of the
2620 actual var. */
2621 if (var_can_have_subvars (var)
2622 && (svars = get_subvars_for_var (var)))
2624 subvar_t sv;
2625 for (sv = svars; sv; sv = sv->next)
2626 add_may_alias (tag, sv->var);
2628 else
2629 add_may_alias (tag, var);
2631 /* TAG and its set of aliases need to be marked for renaming. */
2632 mark_sym_for_renaming (tag);
2633 if ((aliases = var_ann (tag)->may_aliases) != NULL)
2635 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
2636 mark_sym_for_renaming (al);
2639 /* If we had grouped aliases, VAR may have aliases of its own. Mark
2640 them for renaming as well. Other statements referencing the
2641 aliases of VAR will need to be updated. */
2642 if ((aliases = var_ann (var)->may_aliases) != NULL)
2644 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
2645 mark_sym_for_renaming (al);
2647 VEC_free (tree, heap, varvec);
2651 /* Create a new symbol tag for PTR. Construct the may-alias list of this type
2652 tag so that it has the aliasing of VAR.
2654 Note, the set of aliases represented by the new symbol tag are not marked
2655 for renaming. */
2657 void
2658 new_type_alias (tree ptr, tree var)
2660 var_ann_t p_ann = var_ann (ptr);
2661 tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
2662 var_ann_t v_ann = var_ann (var);
2663 tree tag;
2664 subvar_t svars;
2666 gcc_assert (p_ann->symbol_mem_tag == NULL_TREE);
2667 gcc_assert (!MTAG_P (var));
2669 /* Add VAR to the may-alias set of PTR's new symbol tag. If VAR has
2670 subvars, add the subvars to the tag instead of the actual var. */
2671 if (var_can_have_subvars (var)
2672 && (svars = get_subvars_for_var (var)))
2674 subvar_t sv;
2676 tag = create_memory_tag (tag_type, true);
2677 p_ann->symbol_mem_tag = tag;
2679 for (sv = svars; sv; sv = sv->next)
2680 add_may_alias (tag, sv->var);
2682 else
2684 /* The following is based on code in add_stmt_operand to ensure that the
2685 same defs/uses/vdefs/vuses will be found after replacing a reference
2686 to var (or ARRAY_REF to var) with an INDIRECT_REF to ptr whose value
2687 is the address of var. */
2688 VEC(tree, gc) *aliases = v_ann->may_aliases;
2690 if ((aliases != NULL)
2691 && (VEC_length (tree, aliases) == 1))
2693 tree ali = VEC_index (tree, aliases, 0);
2695 if (TREE_CODE (ali) == SYMBOL_MEMORY_TAG)
2697 p_ann->symbol_mem_tag = ali;
2698 return;
2702 tag = create_memory_tag (tag_type, true);
2703 p_ann->symbol_mem_tag = tag;
2705 if (aliases == NULL)
2706 add_may_alias (tag, var);
2707 else
2709 unsigned i;
2710 tree al;
2712 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
2713 add_may_alias (tag, al);
2717 TREE_READONLY (tag) = TREE_READONLY (var);
2718 MTAG_GLOBAL (tag) = is_global_var (var);
2723 /* This represents the used range of a variable. */
2725 typedef struct used_part
2727 HOST_WIDE_INT minused;
2728 HOST_WIDE_INT maxused;
2729 /* True if we have an explicit use/def of some portion of this variable,
2730 even if it is all of it. i.e. a.b = 5 or temp = a.b. */
2731 bool explicit_uses;
2732 /* True if we have an implicit use/def of some portion of this
2733 variable. Implicit uses occur when we can't tell what part we
2734 are referencing, and have to make conservative assumptions. */
2735 bool implicit_uses;
2736 /* True if the structure is only written to or taken its address. */
2737 bool write_only;
2738 } *used_part_t;
2740 /* An array of used_part structures, indexed by variable uid. */
2742 static htab_t used_portions;
2744 struct used_part_map
2746 unsigned int uid;
2747 used_part_t to;
2750 /* Return true if the uid in the two used part maps are equal. */
2752 static int
2753 used_part_map_eq (const void *va, const void *vb)
2755 const struct used_part_map *a = (const struct used_part_map *) va;
2756 const struct used_part_map *b = (const struct used_part_map *) vb;
2757 return (a->uid == b->uid);
2760 /* Hash a from uid in a used_part_map. */
2762 static unsigned int
2763 used_part_map_hash (const void *item)
2765 return ((const struct used_part_map *)item)->uid;
2768 /* Free a used part map element. */
2770 static void
2771 free_used_part_map (void *item)
2773 free (((struct used_part_map *)item)->to);
2774 free (item);
2777 /* Lookup a used_part structure for a UID. */
2779 static used_part_t
2780 up_lookup (unsigned int uid)
2782 struct used_part_map *h, in;
2783 in.uid = uid;
2784 h = (struct used_part_map *) htab_find_with_hash (used_portions, &in, uid);
2785 if (!h)
2786 return NULL;
2787 return h->to;
2790 /* Insert the pair UID, TO into the used part hashtable. */
2792 static void
2793 up_insert (unsigned int uid, used_part_t to)
2795 struct used_part_map *h;
2796 void **loc;
2798 h = XNEW (struct used_part_map);
2799 h->uid = uid;
2800 h->to = to;
2801 loc = htab_find_slot_with_hash (used_portions, h,
2802 uid, INSERT);
2803 if (*loc != NULL)
2804 free (*loc);
2805 *(struct used_part_map **) loc = h;
2809 /* Given a variable uid, UID, get or create the entry in the used portions
2810 table for the variable. */
2812 static used_part_t
2813 get_or_create_used_part_for (size_t uid)
2815 used_part_t up;
2816 if ((up = up_lookup (uid)) == NULL)
2818 up = XCNEW (struct used_part);
2819 up->minused = INT_MAX;
2820 up->maxused = 0;
2821 up->explicit_uses = false;
2822 up->implicit_uses = false;
2823 up->write_only = true;
2826 return up;
2830 /* Create and return a structure sub-variable for field type FIELD at
2831 offset OFFSET, with size SIZE, of variable VAR. */
2833 static tree
2834 create_sft (tree var, tree field, unsigned HOST_WIDE_INT offset,
2835 unsigned HOST_WIDE_INT size)
2837 var_ann_t ann;
2838 tree subvar = create_tag_raw (STRUCT_FIELD_TAG, field, "SFT");
2840 /* We need to copy the various flags from VAR to SUBVAR, so that
2841 they are is_global_var iff the original variable was. */
2842 DECL_CONTEXT (subvar) = DECL_CONTEXT (var);
2843 MTAG_GLOBAL (subvar) = DECL_EXTERNAL (var);
2844 TREE_PUBLIC (subvar) = TREE_PUBLIC (var);
2845 TREE_STATIC (subvar) = TREE_STATIC (var);
2846 TREE_READONLY (subvar) = TREE_READONLY (var);
2848 /* Add the new variable to REFERENCED_VARS. */
2849 ann = get_var_ann (subvar);
2850 ann->symbol_mem_tag = NULL;
2851 add_referenced_tmp_var (subvar);
2852 SFT_PARENT_VAR (subvar) = var;
2853 SFT_OFFSET (subvar) = offset;
2854 SFT_SIZE (subvar) = size;
2855 return subvar;
2859 /* Given an aggregate VAR, create the subvariables that represent its
2860 fields. */
2862 static void
2863 create_overlap_variables_for (tree var)
2865 VEC(fieldoff_s,heap) *fieldstack = NULL;
2866 used_part_t up;
2867 size_t uid = DECL_UID (var);
2869 up = up_lookup (uid);
2870 if (!up
2871 || up->write_only)
2872 return;
2874 push_fields_onto_fieldstack (TREE_TYPE (var), &fieldstack, 0, NULL);
2875 if (VEC_length (fieldoff_s, fieldstack) != 0)
2877 subvar_t *subvars;
2878 fieldoff_s *fo;
2879 bool notokay = false;
2880 int fieldcount = 0;
2881 int i;
2882 HOST_WIDE_INT lastfooffset = -1;
2883 HOST_WIDE_INT lastfosize = -1;
2884 tree lastfotype = NULL_TREE;
2886 /* Not all fields have DECL_SIZE set, and those that don't, we don't
2887 know their size, and thus, can't handle.
2888 The same is true of fields with DECL_SIZE that is not an integer
2889 constant (such as variable sized fields).
2890 Fields with offsets which are not constant will have an offset < 0
2891 We *could* handle fields that are constant sized arrays, but
2892 currently don't. Doing so would require some extra changes to
2893 tree-ssa-operands.c. */
2895 for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
2897 if (!fo->size
2898 || TREE_CODE (fo->size) != INTEGER_CST
2899 || fo->offset < 0)
2901 notokay = true;
2902 break;
2904 fieldcount++;
2907 /* The current heuristic we use is as follows:
2908 If the variable has no used portions in this function, no
2909 structure vars are created for it.
2910 Otherwise,
2911 If the variable has less than SALIAS_MAX_IMPLICIT_FIELDS,
2912 we always create structure vars for them.
2913 If the variable has more than SALIAS_MAX_IMPLICIT_FIELDS, and
2914 some explicit uses, we create structure vars for them.
2915 If the variable has more than SALIAS_MAX_IMPLICIT_FIELDS, and
2916 no explicit uses, we do not create structure vars for them.
2919 if (fieldcount >= SALIAS_MAX_IMPLICIT_FIELDS
2920 && !up->explicit_uses)
2922 if (dump_file && (dump_flags & TDF_DETAILS))
2924 fprintf (dump_file, "Variable ");
2925 print_generic_expr (dump_file, var, 0);
2926 fprintf (dump_file, " has no explicit uses in this function, and is > SALIAS_MAX_IMPLICIT_FIELDS, so skipping\n");
2928 notokay = true;
2931 /* Bail out, if we can't create overlap variables. */
2932 if (notokay)
2934 VEC_free (fieldoff_s, heap, fieldstack);
2935 return;
2938 /* Otherwise, create the variables. */
2939 subvars = lookup_subvars_for_var (var);
2941 sort_fieldstack (fieldstack);
2943 for (i = VEC_length (fieldoff_s, fieldstack);
2944 VEC_iterate (fieldoff_s, fieldstack, --i, fo);)
2946 subvar_t sv;
2947 HOST_WIDE_INT fosize;
2948 tree currfotype;
2950 fosize = TREE_INT_CST_LOW (fo->size);
2951 currfotype = fo->type;
2953 /* If this field isn't in the used portion,
2954 or it has the exact same offset and size as the last
2955 field, skip it. */
2957 if (((fo->offset <= up->minused
2958 && fo->offset + fosize <= up->minused)
2959 || fo->offset >= up->maxused)
2960 || (fo->offset == lastfooffset
2961 && fosize == lastfosize
2962 && currfotype == lastfotype))
2963 continue;
2964 sv = GGC_NEW (struct subvar);
2965 sv->next = *subvars;
2966 sv->var = create_sft (var, fo->type, fo->offset, fosize);
2968 if (dump_file)
2970 fprintf (dump_file, "structure field tag %s created for var %s",
2971 get_name (sv->var), get_name (var));
2972 fprintf (dump_file, " offset " HOST_WIDE_INT_PRINT_DEC,
2973 SFT_OFFSET (sv->var));
2974 fprintf (dump_file, " size " HOST_WIDE_INT_PRINT_DEC,
2975 SFT_SIZE (sv->var));
2976 fprintf (dump_file, "\n");
2979 lastfotype = currfotype;
2980 lastfooffset = fo->offset;
2981 lastfosize = fosize;
2982 *subvars = sv;
2985 /* Once we have created subvars, the original is no longer call
2986 clobbered on its own. Its call clobbered status depends
2987 completely on the call clobbered status of the subvars.
2989 add_referenced_var in the above loop will take care of
2990 marking subvars of global variables as call clobbered for us
2991 to start, since they are global as well. */
2992 clear_call_clobbered (var);
2995 VEC_free (fieldoff_s, heap, fieldstack);
2999 /* Find the conservative answer to the question of what portions of what
3000 structures are used by this statement. We assume that if we have a
3001 component ref with a known size + offset, that we only need that part
3002 of the structure. For unknown cases, or cases where we do something
3003 to the whole structure, we assume we need to create fields for the
3004 entire structure. */
3006 static tree
3007 find_used_portions (tree *tp, int *walk_subtrees, void *lhs_p)
3009 switch (TREE_CODE (*tp))
3011 case MODIFY_EXPR:
3012 /* Recurse manually here to track whether the use is in the
3013 LHS of an assignment. */
3014 find_used_portions (&TREE_OPERAND (*tp, 0), walk_subtrees, tp);
3015 return find_used_portions (&TREE_OPERAND (*tp, 1), walk_subtrees, NULL);
3016 case REALPART_EXPR:
3017 case IMAGPART_EXPR:
3018 case COMPONENT_REF:
3019 case ARRAY_REF:
3021 HOST_WIDE_INT bitsize;
3022 HOST_WIDE_INT bitmaxsize;
3023 HOST_WIDE_INT bitpos;
3024 tree ref;
3025 ref = get_ref_base_and_extent (*tp, &bitpos, &bitsize, &bitmaxsize);
3026 if (DECL_P (ref)
3027 && var_can_have_subvars (ref)
3028 && bitmaxsize != -1)
3030 size_t uid = DECL_UID (ref);
3031 used_part_t up;
3033 up = get_or_create_used_part_for (uid);
3035 if (bitpos <= up->minused)
3036 up->minused = bitpos;
3037 if ((bitpos + bitmaxsize >= up->maxused))
3038 up->maxused = bitpos + bitmaxsize;
3040 if (bitsize == bitmaxsize)
3041 up->explicit_uses = true;
3042 else
3043 up->implicit_uses = true;
3044 if (!lhs_p)
3045 up->write_only = false;
3046 up_insert (uid, up);
3048 *walk_subtrees = 0;
3049 return NULL_TREE;
3052 break;
3053 /* This is here to make sure we mark the entire base variable as used
3054 when you take its address. Because our used portion analysis is
3055 simple, we aren't looking at casts or pointer arithmetic to see what
3056 happens when you take the address. */
3057 case ADDR_EXPR:
3059 tree var = get_base_address (TREE_OPERAND (*tp, 0));
3061 if (var
3062 && DECL_P (var)
3063 && DECL_SIZE (var)
3064 && var_can_have_subvars (var)
3065 && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
3067 used_part_t up;
3068 size_t uid = DECL_UID (var);
3070 up = get_or_create_used_part_for (uid);
3072 up->minused = 0;
3073 up->maxused = TREE_INT_CST_LOW (DECL_SIZE (var));
3074 up->implicit_uses = true;
3075 if (!lhs_p)
3076 up->write_only = false;
3078 up_insert (uid, up);
3079 *walk_subtrees = 0;
3080 return NULL_TREE;
3083 break;
3084 case CALL_EXPR:
3086 tree *arg;
3087 for (arg = &TREE_OPERAND (*tp, 1); *arg; arg = &TREE_CHAIN (*arg))
3089 if (TREE_CODE (TREE_VALUE (*arg)) != ADDR_EXPR)
3090 find_used_portions (&TREE_VALUE (*arg), walk_subtrees, NULL);
3092 *walk_subtrees = 0;
3093 return NULL_TREE;
3095 case VAR_DECL:
3096 case PARM_DECL:
3097 case RESULT_DECL:
3099 tree var = *tp;
3100 if (DECL_SIZE (var)
3101 && var_can_have_subvars (var)
3102 && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
3104 used_part_t up;
3105 size_t uid = DECL_UID (var);
3107 up = get_or_create_used_part_for (uid);
3109 up->minused = 0;
3110 up->maxused = TREE_INT_CST_LOW (DECL_SIZE (var));
3111 up->implicit_uses = true;
3113 up_insert (uid, up);
3114 *walk_subtrees = 0;
3115 return NULL_TREE;
3118 break;
3120 default:
3121 break;
3124 return NULL_TREE;
3127 /* Create structure field variables for structures used in this function. */
3129 static unsigned int
3130 create_structure_vars (void)
3132 basic_block bb;
3133 safe_referenced_var_iterator rvi;
3134 VEC (tree, heap) *varvec = NULL;
3135 tree var;
3137 used_portions = htab_create (10, used_part_map_hash, used_part_map_eq,
3138 free_used_part_map);
3140 FOR_EACH_BB (bb)
3142 block_stmt_iterator bsi;
3143 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3145 walk_tree_without_duplicates (bsi_stmt_ptr (bsi),
3146 find_used_portions,
3147 NULL);
3150 FOR_EACH_REFERENCED_VAR_SAFE (var, varvec, rvi)
3152 /* The C++ FE creates vars without DECL_SIZE set, for some reason. */
3153 if (var
3154 && DECL_SIZE (var)
3155 && var_can_have_subvars (var)
3156 && !MTAG_P (var)
3157 && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
3158 create_overlap_variables_for (var);
3160 htab_delete (used_portions);
3161 VEC_free (tree, heap, varvec);
3162 return 0;
3165 static bool
3166 gate_structure_vars (void)
3168 return flag_tree_salias != 0;
3171 struct tree_opt_pass pass_create_structure_vars =
3173 "salias", /* name */
3174 gate_structure_vars, /* gate */
3175 create_structure_vars, /* execute */
3176 NULL, /* sub */
3177 NULL, /* next */
3178 0, /* static_pass_number */
3179 0, /* tv_id */
3180 PROP_cfg, /* properties_required */
3181 0, /* properties_provided */
3182 0, /* properties_destroyed */
3183 0, /* todo_flags_start */
3184 TODO_dump_func, /* todo_flags_finish */
3185 0 /* letter */