* src/sh/sysv.S: Fix register numbers in the FDE for
[official-gcc.git] / gcc / tree-ssa-alias.c
bloba32c8439657584b4caca43baa67ecd3b545eec65
1 /* Alias analysis for trees.
2 Copyright (C) 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "timevar.h"
32 #include "expr.h"
33 #include "ggc.h"
34 #include "langhooks.h"
35 #include "flags.h"
36 #include "function.h"
37 #include "diagnostic.h"
38 #include "tree-dump.h"
39 #include "tree-gimple.h"
40 #include "tree-flow.h"
41 #include "tree-inline.h"
42 #include "tree-pass.h"
43 #include "tree-ssa-structalias.h"
44 #include "convert.h"
45 #include "params.h"
46 #include "ipa-type-escape.h"
47 #include "vec.h"
48 #include "bitmap.h"
50 /* Obstack used to hold grouping bitmaps and other temporary bitmaps used by
51 aliasing */
52 static bitmap_obstack alias_obstack;
54 /* 'true' after aliases have been computed (see compute_may_aliases). */
55 bool aliases_computed_p;
57 /* Structure to map a variable to its alias set and keep track of the
58 virtual operands that will be needed to represent it. */
59 struct alias_map_d
61 /* Variable and its alias set. */
62 tree var;
63 HOST_WIDE_INT set;
65 /* Total number of virtual operands that will be needed to represent
66 all the aliases of VAR. */
67 long total_alias_vops;
69 /* Nonzero if the aliases for this memory tag have been grouped
70 already. Used in group_aliases. */
71 unsigned int grouped_p : 1;
73 /* Set of variables aliased with VAR. This is the exact same
74 information contained in VAR_ANN (VAR)->MAY_ALIASES, but in
75 bitmap form to speed up alias grouping. */
76 bitmap may_aliases;
80 /* Counters used to display statistics on alias analysis. */
81 struct alias_stats_d
83 unsigned int alias_queries;
84 unsigned int alias_mayalias;
85 unsigned int alias_noalias;
86 unsigned int simple_queries;
87 unsigned int simple_resolved;
88 unsigned int tbaa_queries;
89 unsigned int tbaa_resolved;
90 unsigned int structnoaddress_queries;
91 unsigned int structnoaddress_resolved;
95 /* Local variables. */
96 static struct alias_stats_d alias_stats;
98 /* Local functions. */
99 static void compute_flow_insensitive_aliasing (struct alias_info *);
100 static void dump_alias_stats (FILE *);
101 static bool may_alias_p (tree, HOST_WIDE_INT, tree, HOST_WIDE_INT, bool);
102 static tree create_memory_tag (tree type, bool is_type_tag);
103 static tree get_tmt_for (tree, struct alias_info *);
104 static tree get_nmt_for (tree);
105 static void add_may_alias (tree, tree);
106 static void replace_may_alias (tree, size_t, tree);
107 static struct alias_info *init_alias_info (void);
108 static void delete_alias_info (struct alias_info *);
109 static void compute_flow_sensitive_aliasing (struct alias_info *);
110 static void setup_pointers_and_addressables (struct alias_info *);
111 static void create_global_var (void);
112 static void maybe_create_global_var (struct alias_info *ai);
113 static void group_aliases (struct alias_info *);
114 static void set_pt_anything (tree ptr);
116 /* Global declarations. */
118 /* Call clobbered variables in the function. If bit I is set, then
119 REFERENCED_VARS (I) is call-clobbered. */
120 bitmap call_clobbered_vars;
122 /* Addressable variables in the function. If bit I is set, then
123 REFERENCED_VARS (I) has had its address taken. Note that
124 CALL_CLOBBERED_VARS and ADDRESSABLE_VARS are not related. An
125 addressable variable is not necessarily call-clobbered (e.g., a
126 local addressable whose address does not escape) and not all
127 call-clobbered variables are addressable (e.g., a local static
128 variable). */
129 bitmap addressable_vars;
131 /* When the program has too many call-clobbered variables and call-sites,
132 this variable is used to represent the clobbering effects of function
133 calls. In these cases, all the call clobbered variables in the program
134 are forced to alias this variable. This reduces compile times by not
135 having to keep track of too many V_MAY_DEF expressions at call sites. */
136 tree global_var;
138 DEF_VEC_I(int);
139 DEF_VEC_ALLOC_I(int,heap);
141 /* qsort comparison function to sort type/name tags by DECL_UID. */
143 static int
144 sort_tags_by_id (const void *pa, const void *pb)
146 tree a = *(tree *)pa;
147 tree b = *(tree *)pb;
149 return DECL_UID (a) - DECL_UID (b);
152 /* Initialize WORKLIST to contain those memory tags that are marked call
153 clobbered. Initialized WORKLIST2 to contain the reasons these
154 memory tags escaped. */
156 static void
157 init_transitive_clobber_worklist (VEC (tree, heap) **worklist,
158 VEC (int, heap) **worklist2)
160 referenced_var_iterator rvi;
161 tree curr;
163 FOR_EACH_REFERENCED_VAR (curr, rvi)
165 if (MTAG_P (curr) && is_call_clobbered (curr))
167 VEC_safe_push (tree, heap, *worklist, curr);
168 VEC_safe_push (int, heap, *worklist2, var_ann (curr)->escape_mask);
173 /* Add ALIAS to WORKLIST (and the reason for escaping REASON to WORKLIST2) if
174 ALIAS is not already marked call clobbered, and is a memory
175 tag. */
177 static void
178 add_to_worklist (tree alias, VEC (tree, heap) **worklist,
179 VEC (int, heap) **worklist2,
180 int reason)
182 if (MTAG_P (alias) && !is_call_clobbered (alias))
184 VEC_safe_push (tree, heap, *worklist, alias);
185 VEC_safe_push (int, heap, *worklist2, reason);
189 /* Mark aliases of TAG as call clobbered, and place any tags on the
190 alias list that were not already call clobbered on WORKLIST. */
192 static void
193 mark_aliases_call_clobbered (tree tag, VEC (tree, heap) **worklist,
194 VEC (int, heap) **worklist2)
196 unsigned int i;
197 VEC (tree, gc) *ma;
198 tree entry;
199 var_ann_t ta = var_ann (tag);
201 if (!MTAG_P (tag))
202 return;
203 ma = may_aliases (tag);
204 if (!ma)
205 return;
207 for (i = 0; VEC_iterate (tree, ma, i, entry); i++)
209 if (!unmodifiable_var_p (entry))
211 add_to_worklist (entry, worklist, worklist2, ta->escape_mask);
212 mark_call_clobbered (entry, ta->escape_mask);
217 /* Tags containing global vars need to be marked as global.
218 Tags containing call clobbered vars need to be marked as call
219 clobbered. */
221 static void
222 compute_tag_properties (void)
224 referenced_var_iterator rvi;
225 tree tag;
226 bool changed = true;
227 VEC (tree, heap) *taglist = NULL;
229 FOR_EACH_REFERENCED_VAR (tag, rvi)
231 if (!MTAG_P (tag) || TREE_CODE (tag) == STRUCT_FIELD_TAG)
232 continue;
233 VEC_safe_push (tree, heap, taglist, tag);
236 /* We sort the taglist by DECL_UID, for two reasons.
237 1. To get a sequential ordering to make the bitmap accesses
238 faster.
239 2. Because of the way we compute aliases, it's more likely that
240 an earlier tag is included in a later tag, and this will reduce
241 the number of iterations.
243 If we had a real tag graph, we would just topo-order it and be
244 done with it. */
245 qsort (VEC_address (tree, taglist),
246 VEC_length (tree, taglist),
247 sizeof (tree),
248 sort_tags_by_id);
250 /* Go through each tag not marked as global, and if it aliases
251 global vars, mark it global.
253 If the tag contains call clobbered vars, mark it call
254 clobbered.
256 This loop iterates because tags may appear in the may-aliases
257 list of other tags when we group. */
259 while (changed)
261 unsigned int k;
263 changed = false;
264 for (k = 0; VEC_iterate (tree, taglist, k, tag); k++)
266 VEC (tree, gc) *ma;
267 unsigned int i;
268 tree entry;
269 bool tagcc = is_call_clobbered (tag);
270 bool tagglobal = MTAG_GLOBAL (tag);
272 if (tagcc && tagglobal)
273 continue;
275 ma = may_aliases (tag);
276 if (!ma)
277 continue;
279 for (i = 0; VEC_iterate (tree, ma, i, entry); i++)
281 /* Call clobbered entries cause the tag to be marked
282 call clobbered. */
283 if (!tagcc && is_call_clobbered (entry))
285 mark_call_clobbered (tag, var_ann (entry)->escape_mask);
286 tagcc = true;
287 changed = true;
290 /* Global vars cause the tag to be marked global. */
291 if (!tagglobal && is_global_var (entry))
293 MTAG_GLOBAL (tag) = true;
294 changed = true;
295 tagglobal = true;
298 /* Early exit once both global and cc are set, since the
299 loop can't do any more than that. */
300 if (tagcc && tagglobal)
301 break;
305 VEC_free (tree, heap, taglist);
308 /* Set up the initial variable clobbers and globalness.
309 When this function completes, only tags whose aliases need to be
310 clobbered will be set clobbered. Tags clobbered because they
311 contain call clobbered vars are handled in compute_tag_properties. */
313 static void
314 set_initial_properties (struct alias_info *ai)
316 unsigned int i;
317 referenced_var_iterator rvi;
318 tree var;
320 FOR_EACH_REFERENCED_VAR (var, rvi)
322 if (is_global_var (var)
323 && (!var_can_have_subvars (var)
324 || get_subvars_for_var (var) == NULL))
326 if (!unmodifiable_var_p (var))
327 mark_call_clobbered (var, ESCAPE_IS_GLOBAL);
329 else if (TREE_CODE (var) == PARM_DECL
330 && default_def (var)
331 && POINTER_TYPE_P (TREE_TYPE (var)))
333 tree def = default_def (var);
334 get_ptr_info (def)->value_escapes_p = 1;
335 get_ptr_info (def)->escape_mask |= ESCAPE_IS_PARM;
339 for (i = 0; i < VARRAY_ACTIVE_SIZE (ai->processed_ptrs); i++)
341 tree ptr = VARRAY_TREE (ai->processed_ptrs, i);
342 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
343 var_ann_t v_ann = var_ann (SSA_NAME_VAR (ptr));
345 if (pi->value_escapes_p)
347 /* If PTR escapes then its associated memory tags and
348 pointed-to variables are call-clobbered. */
349 if (pi->name_mem_tag)
350 mark_call_clobbered (pi->name_mem_tag, pi->escape_mask);
352 if (v_ann->type_mem_tag)
353 mark_call_clobbered (v_ann->type_mem_tag, pi->escape_mask);
355 if (pi->pt_vars)
357 bitmap_iterator bi;
358 unsigned int j;
359 EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j, bi)
360 if (!unmodifiable_var_p (referenced_var (j)))
361 mark_call_clobbered (referenced_var (j), pi->escape_mask);
364 /* If the name tag is call clobbered, so is the type tag
365 associated with the base VAR_DECL. */
366 if (pi->name_mem_tag
367 && v_ann->type_mem_tag
368 && is_call_clobbered (pi->name_mem_tag))
369 mark_call_clobbered (v_ann->type_mem_tag, pi->escape_mask);
371 /* Name tags and type tags that we don't know where they point
372 to, might point to global memory, and thus, are clobbered.
374 FIXME: This is not quite right. They should only be
375 clobbered if value_escapes_p is true, regardless of whether
376 they point to global memory or not.
377 So removing this code and fixing all the bugs would be nice.
378 It is the cause of a bunch of clobbering. */
379 if ((pi->pt_global_mem || pi->pt_anything)
380 && pi->is_dereferenced && pi->name_mem_tag)
382 mark_call_clobbered (pi->name_mem_tag, ESCAPE_IS_GLOBAL);
383 MTAG_GLOBAL (pi->name_mem_tag) = true;
386 if ((pi->pt_global_mem || pi->pt_anything)
387 && pi->is_dereferenced && v_ann->type_mem_tag)
389 mark_call_clobbered (v_ann->type_mem_tag, ESCAPE_IS_GLOBAL);
390 MTAG_GLOBAL (v_ann->type_mem_tag) = true;
395 /* This variable is set to true if we are updating the used alone
396 information for TMT's, or are in a pass that is going to break it
397 temporarily. */
399 bool updating_used_alone;
401 /* Compute which variables need to be marked call clobbered because
402 their tag is call clobbered, and which tags need to be marked
403 global because they contain global variables. */
405 static void
406 compute_call_clobbered (struct alias_info *ai)
408 VEC (tree, heap) *worklist = NULL;
409 VEC(int,heap) *worklist2 = NULL;
411 set_initial_properties (ai);
412 init_transitive_clobber_worklist (&worklist, &worklist2);
413 while (VEC_length (tree, worklist) != 0)
415 tree curr = VEC_pop (tree, worklist);
416 int reason = VEC_pop (int, worklist2);
418 mark_call_clobbered (curr, reason);
419 mark_aliases_call_clobbered (curr, &worklist, &worklist2);
421 VEC_free (tree, heap, worklist);
422 VEC_free (int, heap, worklist2);
423 compute_tag_properties ();
427 /* Recalculate the used_alone information for TMT's . */
428 void
429 recalculate_used_alone (void)
431 VEC (tree, heap) *calls = NULL;
432 block_stmt_iterator bsi;
433 basic_block bb;
434 tree stmt;
435 size_t i;
436 referenced_var_iterator rvi;
437 tree var;
439 /* First, reset all the TMT used alone bits to zero. */
440 updating_used_alone = true;
441 FOR_EACH_REFERENCED_VAR (var, rvi)
442 if (TREE_CODE (var) == TYPE_MEMORY_TAG)
443 TMT_USED_ALONE (var) = 0;
445 /* Walk all the statements.
446 Calls get put into a list of statements to update, since we will
447 need to update operands on them if we make any changes.
448 If we see a bare use of a TMT anywhere in a real virtual use or virtual
449 def, mark the TMT as used alone, and for renaming. */
452 FOR_EACH_BB (bb)
454 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
456 stmt = bsi_stmt (bsi);
457 if (TREE_CODE (stmt) == CALL_EXPR
458 || (TREE_CODE (stmt) == MODIFY_EXPR
459 && TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR))
460 VEC_safe_push (tree, heap, calls, stmt);
461 else
463 ssa_op_iter iter;
465 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter,
466 SSA_OP_VUSE | SSA_OP_VIRTUAL_DEFS)
468 tree svar = var;
470 if(TREE_CODE (var) == SSA_NAME)
471 svar = SSA_NAME_VAR (var);
473 if (TREE_CODE (svar) == TYPE_MEMORY_TAG)
475 if (!TMT_USED_ALONE (svar))
477 TMT_USED_ALONE (svar) = true;
478 mark_sym_for_renaming (svar);
486 /* Update the operands on all the calls we saw. */
487 if (calls)
489 for (i = 0; VEC_iterate (tree, calls, i, stmt); i++)
490 update_stmt (stmt);
492 VEC_free (tree, heap, calls);
493 updating_used_alone = false;
496 /* Compute may-alias information for every variable referenced in function
497 FNDECL.
499 Alias analysis proceeds in 3 main phases:
501 1- Points-to and escape analysis.
503 This phase walks the use-def chains in the SSA web looking for three
504 things:
506 * Assignments of the form P_i = &VAR
507 * Assignments of the form P_i = malloc()
508 * Pointers and ADDR_EXPR that escape the current function.
510 The concept of 'escaping' is the same one used in the Java world. When
511 a pointer or an ADDR_EXPR escapes, it means that it has been exposed
512 outside of the current function. So, assignment to global variables,
513 function arguments and returning a pointer are all escape sites, as are
514 conversions between pointers and integers.
516 This is where we are currently limited. Since not everything is renamed
517 into SSA, we lose track of escape properties when a pointer is stashed
518 inside a field in a structure, for instance. In those cases, we are
519 assuming that the pointer does escape.
521 We use escape analysis to determine whether a variable is
522 call-clobbered. Simply put, if an ADDR_EXPR escapes, then the variable
523 is call-clobbered. If a pointer P_i escapes, then all the variables
524 pointed-to by P_i (and its memory tag) also escape.
526 2- Compute flow-sensitive aliases
528 We have two classes of memory tags. Memory tags associated with the
529 pointed-to data type of the pointers in the program. These tags are
530 called "type memory tag" (TMT). The other class are those associated
531 with SSA_NAMEs, called "name memory tag" (NMT). The basic idea is that
532 when adding operands for an INDIRECT_REF *P_i, we will first check
533 whether P_i has a name tag, if it does we use it, because that will have
534 more precise aliasing information. Otherwise, we use the standard type
535 tag.
537 In this phase, we go through all the pointers we found in points-to
538 analysis and create alias sets for the name memory tags associated with
539 each pointer P_i. If P_i escapes, we mark call-clobbered the variables
540 it points to and its tag.
543 3- Compute flow-insensitive aliases
545 This pass will compare the alias set of every type memory tag and every
546 addressable variable found in the program. Given a type memory tag TMT
547 and an addressable variable V. If the alias sets of TMT and V conflict
548 (as computed by may_alias_p), then V is marked as an alias tag and added
549 to the alias set of TMT.
551 For instance, consider the following function:
553 foo (int i)
555 int *p, a, b;
557 if (i > 10)
558 p = &a;
559 else
560 p = &b;
562 *p = 3;
563 a = b + 2;
564 return *p;
567 After aliasing analysis has finished, the type memory tag for pointer
568 'p' will have two aliases, namely variables 'a' and 'b'. Every time
569 pointer 'p' is dereferenced, we want to mark the operation as a
570 potential reference to 'a' and 'b'.
572 foo (int i)
574 int *p, a, b;
576 if (i_2 > 10)
577 p_4 = &a;
578 else
579 p_6 = &b;
580 # p_1 = PHI <p_4(1), p_6(2)>;
582 # a_7 = V_MAY_DEF <a_3>;
583 # b_8 = V_MAY_DEF <b_5>;
584 *p_1 = 3;
586 # a_9 = V_MAY_DEF <a_7>
587 # VUSE <b_8>
588 a_9 = b_8 + 2;
590 # VUSE <a_9>;
591 # VUSE <b_8>;
592 return *p_1;
595 In certain cases, the list of may aliases for a pointer may grow too
596 large. This may cause an explosion in the number of virtual operands
597 inserted in the code. Resulting in increased memory consumption and
598 compilation time.
600 When the number of virtual operands needed to represent aliased
601 loads and stores grows too large (configurable with @option{--param
602 max-aliased-vops}), alias sets are grouped to avoid severe
603 compile-time slow downs and memory consumption. See group_aliases. */
605 static void
606 compute_may_aliases (void)
608 struct alias_info *ai;
610 memset (&alias_stats, 0, sizeof (alias_stats));
612 /* Initialize aliasing information. */
613 ai = init_alias_info ();
615 /* For each pointer P_i, determine the sets of variables that P_i may
616 point-to. For every addressable variable V, determine whether the
617 address of V escapes the current function, making V call-clobbered
618 (i.e., whether &V is stored in a global variable or if its passed as a
619 function call argument). */
620 compute_points_to_sets (ai);
622 /* Collect all pointers and addressable variables, compute alias sets,
623 create memory tags for pointers and promote variables whose address is
624 not needed anymore. */
625 setup_pointers_and_addressables (ai);
627 /* Compute flow-sensitive, points-to based aliasing for all the name
628 memory tags. Note that this pass needs to be done before flow
629 insensitive analysis because it uses the points-to information
630 gathered before to mark call-clobbered type tags. */
631 compute_flow_sensitive_aliasing (ai);
633 /* Compute type-based flow-insensitive aliasing for all the type
634 memory tags. */
635 compute_flow_insensitive_aliasing (ai);
637 /* Determine if we need to enable alias grouping. */
638 if (ai->total_alias_vops >= MAX_ALIASED_VOPS)
639 group_aliases (ai);
641 /* Compute call clobbering information. */
642 compute_call_clobbered (ai);
644 /* If the program has too many call-clobbered variables and/or function
645 calls, create .GLOBAL_VAR and use it to model call-clobbering
646 semantics at call sites. This reduces the number of virtual operands
647 considerably, improving compile times at the expense of lost
648 aliasing precision. */
649 maybe_create_global_var (ai);
651 /* Debugging dumps. */
652 if (dump_file)
654 dump_referenced_vars (dump_file);
655 if (dump_flags & TDF_STATS)
656 dump_alias_stats (dump_file);
657 dump_points_to_info (dump_file);
658 dump_alias_info (dump_file);
661 /* Deallocate memory used by aliasing data structures. */
662 delete_alias_info (ai);
664 updating_used_alone = true;
666 block_stmt_iterator bsi;
667 basic_block bb;
668 FOR_EACH_BB (bb)
670 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
672 update_stmt_if_modified (bsi_stmt (bsi));
676 recalculate_used_alone ();
677 updating_used_alone = false;
681 struct tree_opt_pass pass_may_alias =
683 "alias", /* name */
684 NULL, /* gate */
685 compute_may_aliases, /* execute */
686 NULL, /* sub */
687 NULL, /* next */
688 0, /* static_pass_number */
689 TV_TREE_MAY_ALIAS, /* tv_id */
690 PROP_cfg | PROP_ssa, /* properties_required */
691 PROP_alias, /* properties_provided */
692 0, /* properties_destroyed */
693 0, /* todo_flags_start */
694 TODO_dump_func | TODO_update_ssa
695 | TODO_ggc_collect | TODO_verify_ssa
696 | TODO_verify_stmts, /* todo_flags_finish */
697 0 /* letter */
701 /* Data structure used to count the number of dereferences to PTR
702 inside an expression. */
703 struct count_ptr_d
705 tree ptr;
706 unsigned count;
710 /* Helper for count_uses_and_derefs. Called by walk_tree to look for
711 (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
713 static tree
714 count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
716 struct count_ptr_d *count_p = (struct count_ptr_d *) data;
718 /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
719 pointer 'ptr' is *not* dereferenced, it is simply used to compute
720 the address of 'fld' as 'ptr + offsetof(fld)'. */
721 if (TREE_CODE (*tp) == ADDR_EXPR)
723 *walk_subtrees = 0;
724 return NULL_TREE;
727 if (INDIRECT_REF_P (*tp) && TREE_OPERAND (*tp, 0) == count_p->ptr)
728 count_p->count++;
730 return NULL_TREE;
734 /* Count the number of direct and indirect uses for pointer PTR in
735 statement STMT. The two counts are stored in *NUM_USES_P and
736 *NUM_DEREFS_P respectively. *IS_STORE_P is set to 'true' if at
737 least one of those dereferences is a store operation. */
739 void
740 count_uses_and_derefs (tree ptr, tree stmt, unsigned *num_uses_p,
741 unsigned *num_derefs_p, bool *is_store)
743 ssa_op_iter i;
744 tree use;
746 *num_uses_p = 0;
747 *num_derefs_p = 0;
748 *is_store = false;
750 /* Find out the total number of uses of PTR in STMT. */
751 FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
752 if (use == ptr)
753 (*num_uses_p)++;
755 /* Now count the number of indirect references to PTR. This is
756 truly awful, but we don't have much choice. There are no parent
757 pointers inside INDIRECT_REFs, so an expression like
758 '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
759 find all the indirect and direct uses of x_1 inside. The only
760 shortcut we can take is the fact that GIMPLE only allows
761 INDIRECT_REFs inside the expressions below. */
762 if (TREE_CODE (stmt) == MODIFY_EXPR
763 || (TREE_CODE (stmt) == RETURN_EXPR
764 && TREE_CODE (TREE_OPERAND (stmt, 0)) == MODIFY_EXPR)
765 || TREE_CODE (stmt) == ASM_EXPR
766 || TREE_CODE (stmt) == CALL_EXPR)
768 tree lhs, rhs;
770 if (TREE_CODE (stmt) == MODIFY_EXPR)
772 lhs = TREE_OPERAND (stmt, 0);
773 rhs = TREE_OPERAND (stmt, 1);
775 else if (TREE_CODE (stmt) == RETURN_EXPR)
777 tree e = TREE_OPERAND (stmt, 0);
778 lhs = TREE_OPERAND (e, 0);
779 rhs = TREE_OPERAND (e, 1);
781 else if (TREE_CODE (stmt) == ASM_EXPR)
783 lhs = ASM_OUTPUTS (stmt);
784 rhs = ASM_INPUTS (stmt);
786 else
788 lhs = NULL_TREE;
789 rhs = stmt;
792 if (lhs && (TREE_CODE (lhs) == TREE_LIST || EXPR_P (lhs)))
794 struct count_ptr_d count;
795 count.ptr = ptr;
796 count.count = 0;
797 walk_tree (&lhs, count_ptr_derefs, &count, NULL);
798 *is_store = true;
799 *num_derefs_p = count.count;
802 if (rhs && (TREE_CODE (rhs) == TREE_LIST || EXPR_P (rhs)))
804 struct count_ptr_d count;
805 count.ptr = ptr;
806 count.count = 0;
807 walk_tree (&rhs, count_ptr_derefs, &count, NULL);
808 *num_derefs_p += count.count;
812 gcc_assert (*num_uses_p >= *num_derefs_p);
815 /* Initialize the data structures used for alias analysis. */
817 static struct alias_info *
818 init_alias_info (void)
820 struct alias_info *ai;
821 referenced_var_iterator rvi;
822 tree var;
824 bitmap_obstack_initialize (&alias_obstack);
825 ai = XCNEW (struct alias_info);
826 ai->ssa_names_visited = sbitmap_alloc (num_ssa_names);
827 sbitmap_zero (ai->ssa_names_visited);
828 VARRAY_TREE_INIT (ai->processed_ptrs, 50, "processed_ptrs");
829 ai->written_vars = BITMAP_ALLOC (&alias_obstack);
830 ai->dereferenced_ptrs_store = BITMAP_ALLOC (&alias_obstack);
831 ai->dereferenced_ptrs_load = BITMAP_ALLOC (&alias_obstack);
833 /* If aliases have been computed before, clear existing information. */
834 if (aliases_computed_p)
836 unsigned i;
838 /* Similarly, clear the set of addressable variables. In this
839 case, we can just clear the set because addressability is
840 only computed here. */
841 bitmap_clear (addressable_vars);
843 /* Clear flow-insensitive alias information from each symbol. */
844 FOR_EACH_REFERENCED_VAR (var, rvi)
846 var_ann_t ann = var_ann (var);
848 ann->is_aliased = 0;
849 ann->may_aliases = NULL;
850 NUM_REFERENCES_CLEAR (ann);
852 /* Since we are about to re-discover call-clobbered
853 variables, clear the call-clobbered flag. Variables that
854 are intrinsically call-clobbered (globals, local statics,
855 etc) will not be marked by the aliasing code, so we can't
856 remove them from CALL_CLOBBERED_VARS.
858 NB: STRUCT_FIELDS are still call clobbered if they are for
859 a global variable, so we *don't* clear their call clobberedness
860 just because they are tags, though we will clear it if they
861 aren't for global variables. */
862 if (TREE_CODE (var) == NAME_MEMORY_TAG
863 || TREE_CODE (var) == TYPE_MEMORY_TAG
864 || !is_global_var (var))
865 clear_call_clobbered (var);
868 /* Clear flow-sensitive points-to information from each SSA name. */
869 for (i = 1; i < num_ssa_names; i++)
871 tree name = ssa_name (i);
873 if (!name || !POINTER_TYPE_P (TREE_TYPE (name)))
874 continue;
876 if (SSA_NAME_PTR_INFO (name))
878 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (name);
880 /* Clear all the flags but keep the name tag to
881 avoid creating new temporaries unnecessarily. If
882 this pointer is found to point to a subset or
883 superset of its former points-to set, then a new
884 tag will need to be created in create_name_tags. */
885 pi->pt_anything = 0;
886 pi->pt_null = 0;
887 pi->value_escapes_p = 0;
888 pi->is_dereferenced = 0;
889 if (pi->pt_vars)
890 bitmap_clear (pi->pt_vars);
895 /* Next time, we will need to reset alias information. */
896 aliases_computed_p = true;
898 return ai;
902 /* Deallocate memory used by alias analysis. */
904 static void
905 delete_alias_info (struct alias_info *ai)
907 size_t i;
908 referenced_var_iterator rvi;
909 tree var;
911 sbitmap_free (ai->ssa_names_visited);
912 ai->processed_ptrs = NULL;
914 for (i = 0; i < ai->num_addressable_vars; i++)
915 free (ai->addressable_vars[i]);
917 FOR_EACH_REFERENCED_VAR(var, rvi)
919 var_ann_t ann = var_ann (var);
920 NUM_REFERENCES_CLEAR (ann);
923 free (ai->addressable_vars);
925 for (i = 0; i < ai->num_pointers; i++)
926 free (ai->pointers[i]);
927 free (ai->pointers);
929 BITMAP_FREE (ai->written_vars);
930 BITMAP_FREE (ai->dereferenced_ptrs_store);
931 BITMAP_FREE (ai->dereferenced_ptrs_load);
932 bitmap_obstack_release (&alias_obstack);
933 free (ai);
935 delete_points_to_sets ();
938 /* Create name tags for all the pointers that have been dereferenced.
939 We only create a name tag for a pointer P if P is found to point to
940 a set of variables (so that we can alias them to *P) or if it is
941 the result of a call to malloc (which means that P cannot point to
942 anything else nor alias any other variable).
944 If two pointers P and Q point to the same set of variables, they
945 are assigned the same name tag. */
947 static void
948 create_name_tags (void)
950 size_t i;
951 VEC (tree, heap) *with_ptvars = NULL;
952 tree ptr;
954 /* Collect the list of pointers with a non-empty points to set. */
955 for (i = 1; i < num_ssa_names; i++)
957 tree ptr = ssa_name (i);
958 struct ptr_info_def *pi;
960 if (!ptr
961 || !POINTER_TYPE_P (TREE_TYPE (ptr))
962 || !SSA_NAME_PTR_INFO (ptr))
963 continue;
965 pi = SSA_NAME_PTR_INFO (ptr);
967 if (pi->pt_anything || !pi->is_dereferenced)
969 /* No name tags for pointers that have not been
970 dereferenced or point to an arbitrary location. */
971 pi->name_mem_tag = NULL_TREE;
972 continue;
975 /* Set pt_anything on the pointers without pt_vars filled in so
976 that they are assigned a type tag. */
978 if (pi->pt_vars && !bitmap_empty_p (pi->pt_vars))
979 VEC_safe_push (tree, heap, with_ptvars, ptr);
980 else
981 set_pt_anything (ptr);
984 /* If we didn't find any pointers with pt_vars set, we're done. */
985 if (!with_ptvars)
986 return;
988 /* Now go through the pointers with pt_vars, and find a name tag
989 with the same pt_vars as this pointer, or create one if one
990 doesn't exist. */
991 for (i = 0; VEC_iterate (tree, with_ptvars, i, ptr); i++)
993 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
994 size_t j;
995 tree ptr2;
996 tree old_name_tag = pi->name_mem_tag;
998 /* If PTR points to a set of variables, check if we don't
999 have another pointer Q with the same points-to set before
1000 creating a tag. If so, use Q's tag instead of creating a
1001 new one.
1003 This is important for not creating unnecessary symbols
1004 and also for copy propagation. If we ever need to
1005 propagate PTR into Q or vice-versa, we would run into
1006 problems if they both had different name tags because
1007 they would have different SSA version numbers (which
1008 would force us to take the name tags in and out of SSA). */
1009 for (j = 0; j < i && VEC_iterate (tree, with_ptvars, j, ptr2); j++)
1011 struct ptr_info_def *qi = SSA_NAME_PTR_INFO (ptr2);
1013 if (bitmap_equal_p (pi->pt_vars, qi->pt_vars))
1015 pi->name_mem_tag = qi->name_mem_tag;
1016 break;
1020 /* If we didn't find a pointer with the same points-to set
1021 as PTR, create a new name tag if needed. */
1022 if (pi->name_mem_tag == NULL_TREE)
1023 pi->name_mem_tag = get_nmt_for (ptr);
1025 /* If the new name tag computed for PTR is different than
1026 the old name tag that it used to have, then the old tag
1027 needs to be removed from the IL, so we mark it for
1028 renaming. */
1029 if (old_name_tag && old_name_tag != pi->name_mem_tag)
1030 mark_sym_for_renaming (old_name_tag);
1032 TREE_THIS_VOLATILE (pi->name_mem_tag)
1033 |= TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (ptr)));
1035 /* Mark the new name tag for renaming. */
1036 mark_sym_for_renaming (pi->name_mem_tag);
1039 VEC_free (tree, heap, with_ptvars);
1043 /* For every pointer P_i in AI->PROCESSED_PTRS, create may-alias sets for
1044 the name memory tag (NMT) associated with P_i. If P_i escapes, then its
1045 name tag and the variables it points-to are call-clobbered. Finally, if
1046 P_i escapes and we could not determine where it points to, then all the
1047 variables in the same alias set as *P_i are marked call-clobbered. This
1048 is necessary because we must assume that P_i may take the address of any
1049 variable in the same alias set. */
1051 static void
1052 compute_flow_sensitive_aliasing (struct alias_info *ai)
1054 size_t i;
1056 for (i = 0; i < VARRAY_ACTIVE_SIZE (ai->processed_ptrs); i++)
1058 tree ptr = VARRAY_TREE (ai->processed_ptrs, i);
1059 if (!find_what_p_points_to (ptr))
1060 set_pt_anything (ptr);
1063 create_name_tags ();
1065 for (i = 0; i < VARRAY_ACTIVE_SIZE (ai->processed_ptrs); i++)
1067 unsigned j;
1068 tree ptr = VARRAY_TREE (ai->processed_ptrs, i);
1069 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
1070 var_ann_t v_ann = var_ann (SSA_NAME_VAR (ptr));
1071 bitmap_iterator bi;
1074 /* Set up aliasing information for PTR's name memory tag (if it has
1075 one). Note that only pointers that have been dereferenced will
1076 have a name memory tag. */
1077 if (pi->name_mem_tag && pi->pt_vars)
1078 EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j, bi)
1080 add_may_alias (pi->name_mem_tag, referenced_var (j));
1081 add_may_alias (v_ann->type_mem_tag, referenced_var (j));
1087 /* Compute type-based alias sets. Traverse all the pointers and
1088 addressable variables found in setup_pointers_and_addressables.
1090 For every pointer P in AI->POINTERS and addressable variable V in
1091 AI->ADDRESSABLE_VARS, add V to the may-alias sets of P's type
1092 memory tag (TMT) if their alias sets conflict. V is then marked as
1093 an alias tag so that the operand scanner knows that statements
1094 containing V have aliased operands. */
1096 static void
1097 compute_flow_insensitive_aliasing (struct alias_info *ai)
1099 size_t i;
1101 /* Initialize counter for the total number of virtual operands that
1102 aliasing will introduce. When AI->TOTAL_ALIAS_VOPS goes beyond the
1103 threshold set by --params max-alias-vops, we enable alias
1104 grouping. */
1105 ai->total_alias_vops = 0;
1107 /* For every pointer P, determine which addressable variables may alias
1108 with P's type memory tag. */
1109 for (i = 0; i < ai->num_pointers; i++)
1111 size_t j;
1112 struct alias_map_d *p_map = ai->pointers[i];
1113 tree tag = var_ann (p_map->var)->type_mem_tag;
1114 var_ann_t tag_ann = var_ann (tag);
1116 p_map->total_alias_vops = 0;
1117 p_map->may_aliases = BITMAP_ALLOC (&alias_obstack);
1119 for (j = 0; j < ai->num_addressable_vars; j++)
1121 struct alias_map_d *v_map;
1122 var_ann_t v_ann;
1123 tree var;
1124 bool tag_stored_p, var_stored_p;
1126 v_map = ai->addressable_vars[j];
1127 var = v_map->var;
1128 v_ann = var_ann (var);
1130 /* Skip memory tags and variables that have never been
1131 written to. We also need to check if the variables are
1132 call-clobbered because they may be overwritten by
1133 function calls.
1135 Note this is effectively random accessing elements in
1136 the sparse bitset, which can be highly inefficient.
1137 So we first check the call_clobbered status of the
1138 tag and variable before querying the bitmap. */
1139 tag_stored_p = is_call_clobbered (tag)
1140 || bitmap_bit_p (ai->written_vars, DECL_UID (tag));
1141 var_stored_p = is_call_clobbered (var)
1142 || bitmap_bit_p (ai->written_vars, DECL_UID (var));
1143 if (!tag_stored_p && !var_stored_p)
1144 continue;
1146 if (may_alias_p (p_map->var, p_map->set, var, v_map->set, false))
1148 size_t num_tag_refs, num_var_refs;
1150 num_tag_refs = NUM_REFERENCES (tag_ann);
1151 num_var_refs = NUM_REFERENCES (v_ann);
1153 /* Add VAR to TAG's may-aliases set. */
1155 /* We should never have a var with subvars here, because
1156 they shouldn't get into the set of addressable vars */
1157 gcc_assert (!var_can_have_subvars (var)
1158 || get_subvars_for_var (var) == NULL);
1160 add_may_alias (tag, var);
1161 /* Update the bitmap used to represent TAG's alias set
1162 in case we need to group aliases. */
1163 bitmap_set_bit (p_map->may_aliases, DECL_UID (var));
1165 /* Update the total number of virtual operands due to
1166 aliasing. Since we are adding one more alias to TAG's
1167 may-aliases set, the total number of virtual operands due
1168 to aliasing will be increased by the number of references
1169 made to VAR and TAG (every reference to TAG will also
1170 count as a reference to VAR). */
1171 ai->total_alias_vops += (num_var_refs + num_tag_refs);
1172 p_map->total_alias_vops += (num_var_refs + num_tag_refs);
1179 /* Since this analysis is based exclusively on symbols, it fails to
1180 handle cases where two pointers P and Q have different memory
1181 tags with conflicting alias set numbers but no aliased symbols in
1182 common.
1184 For example, suppose that we have two memory tags TMT.1 and TMT.2
1185 such that
1187 may-aliases (TMT.1) = { a }
1188 may-aliases (TMT.2) = { b }
1190 and the alias set number of TMT.1 conflicts with that of TMT.2.
1191 Since they don't have symbols in common, loads and stores from
1192 TMT.1 and TMT.2 will seem independent of each other, which will
1193 lead to the optimizers making invalid transformations (see
1194 testsuite/gcc.c-torture/execute/pr15262-[12].c).
1196 To avoid this problem, we do a final traversal of AI->POINTERS
1197 looking for pairs of pointers that have no aliased symbols in
1198 common and yet have conflicting alias set numbers. */
1199 for (i = 0; i < ai->num_pointers; i++)
1201 size_t j;
1202 struct alias_map_d *p_map1 = ai->pointers[i];
1203 tree tag1 = var_ann (p_map1->var)->type_mem_tag;
1204 bitmap may_aliases1 = p_map1->may_aliases;
1206 for (j = i + 1; j < ai->num_pointers; j++)
1208 struct alias_map_d *p_map2 = ai->pointers[j];
1209 tree tag2 = var_ann (p_map2->var)->type_mem_tag;
1210 bitmap may_aliases2 = p_map2->may_aliases;
1212 /* If the pointers may not point to each other, do nothing. */
1213 if (!may_alias_p (p_map1->var, p_map1->set, tag2, p_map2->set, true))
1214 continue;
1216 /* The two pointers may alias each other. If they already have
1217 symbols in common, do nothing. */
1218 if (bitmap_intersect_p (may_aliases1, may_aliases2))
1219 continue;
1221 if (!bitmap_empty_p (may_aliases2))
1223 unsigned int k;
1224 bitmap_iterator bi;
1226 /* Add all the aliases for TAG2 into TAG1's alias set.
1227 FIXME, update grouping heuristic counters. */
1228 EXECUTE_IF_SET_IN_BITMAP (may_aliases2, 0, k, bi)
1229 add_may_alias (tag1, referenced_var (k));
1230 bitmap_ior_into (may_aliases1, may_aliases2);
1232 else
1234 /* Since TAG2 does not have any aliases of its own, add
1235 TAG2 itself to the alias set of TAG1. */
1236 add_may_alias (tag1, tag2);
1237 bitmap_set_bit (may_aliases1, DECL_UID (tag2));
1242 if (dump_file)
1243 fprintf (dump_file, "\n%s: Total number of aliased vops: %ld\n",
1244 get_name (current_function_decl),
1245 ai->total_alias_vops);
1249 /* Comparison function for qsort used in group_aliases. */
1251 static int
1252 total_alias_vops_cmp (const void *p, const void *q)
1254 const struct alias_map_d **p1 = (const struct alias_map_d **)p;
1255 const struct alias_map_d **p2 = (const struct alias_map_d **)q;
1256 long n1 = (*p1)->total_alias_vops;
1257 long n2 = (*p2)->total_alias_vops;
1259 /* We want to sort in descending order. */
1260 return (n1 > n2 ? -1 : (n1 == n2) ? 0 : 1);
1263 /* Group all the aliases for TAG to make TAG represent all the
1264 variables in its alias set. Update the total number
1265 of virtual operands due to aliasing (AI->TOTAL_ALIAS_VOPS). This
1266 function will make TAG be the unique alias tag for all the
1267 variables in its may-aliases. So, given:
1269 may-aliases(TAG) = { V1, V2, V3 }
1271 This function will group the variables into:
1273 may-aliases(V1) = { TAG }
1274 may-aliases(V2) = { TAG }
1275 may-aliases(V2) = { TAG } */
1277 static void
1278 group_aliases_into (tree tag, bitmap tag_aliases, struct alias_info *ai)
1280 unsigned int i;
1281 var_ann_t tag_ann = var_ann (tag);
1282 size_t num_tag_refs = NUM_REFERENCES (tag_ann);
1283 bitmap_iterator bi;
1285 EXECUTE_IF_SET_IN_BITMAP (tag_aliases, 0, i, bi)
1287 tree var = referenced_var (i);
1288 var_ann_t ann = var_ann (var);
1290 /* Make TAG the unique alias of VAR. */
1291 ann->is_aliased = 0;
1292 ann->may_aliases = NULL;
1294 /* Note that VAR and TAG may be the same if the function has no
1295 addressable variables (see the discussion at the end of
1296 setup_pointers_and_addressables). */
1297 if (var != tag)
1298 add_may_alias (var, tag);
1300 /* Reduce total number of virtual operands contributed
1301 by TAG on behalf of VAR. Notice that the references to VAR
1302 itself won't be removed. We will merely replace them with
1303 references to TAG. */
1304 ai->total_alias_vops -= num_tag_refs;
1307 /* We have reduced the number of virtual operands that TAG makes on
1308 behalf of all the variables formerly aliased with it. However,
1309 we have also "removed" all the virtual operands for TAG itself,
1310 so we add them back. */
1311 ai->total_alias_vops += num_tag_refs;
1313 /* TAG no longer has any aliases. */
1314 tag_ann->may_aliases = NULL;
1318 /* Group may-aliases sets to reduce the number of virtual operands due
1319 to aliasing.
1321 1- Sort the list of pointers in decreasing number of contributed
1322 virtual operands.
1324 2- Take the first entry in AI->POINTERS and revert the role of
1325 the memory tag and its aliases. Usually, whenever an aliased
1326 variable Vi is found to alias with a memory tag T, we add Vi
1327 to the may-aliases set for T. Meaning that after alias
1328 analysis, we will have:
1330 may-aliases(T) = { V1, V2, V3, ..., Vn }
1332 This means that every statement that references T, will get 'n'
1333 virtual operands for each of the Vi tags. But, when alias
1334 grouping is enabled, we make T an alias tag and add it to the
1335 alias set of all the Vi variables:
1337 may-aliases(V1) = { T }
1338 may-aliases(V2) = { T }
1340 may-aliases(Vn) = { T }
1342 This has two effects: (a) statements referencing T will only get
1343 a single virtual operand, and, (b) all the variables Vi will now
1344 appear to alias each other. So, we lose alias precision to
1345 improve compile time. But, in theory, a program with such a high
1346 level of aliasing should not be very optimizable in the first
1347 place.
1349 3- Since variables may be in the alias set of more than one
1350 memory tag, the grouping done in step (2) needs to be extended
1351 to all the memory tags that have a non-empty intersection with
1352 the may-aliases set of tag T. For instance, if we originally
1353 had these may-aliases sets:
1355 may-aliases(T) = { V1, V2, V3 }
1356 may-aliases(R) = { V2, V4 }
1358 In step (2) we would have reverted the aliases for T as:
1360 may-aliases(V1) = { T }
1361 may-aliases(V2) = { T }
1362 may-aliases(V3) = { T }
1364 But note that now V2 is no longer aliased with R. We could
1365 add R to may-aliases(V2), but we are in the process of
1366 grouping aliases to reduce virtual operands so what we do is
1367 add V4 to the grouping to obtain:
1369 may-aliases(V1) = { T }
1370 may-aliases(V2) = { T }
1371 may-aliases(V3) = { T }
1372 may-aliases(V4) = { T }
1374 4- If the total number of virtual operands due to aliasing is
1375 still above the threshold set by max-alias-vops, go back to (2). */
1377 static void
1378 group_aliases (struct alias_info *ai)
1380 size_t i;
1382 /* Sort the POINTERS array in descending order of contributed
1383 virtual operands. */
1384 qsort (ai->pointers, ai->num_pointers, sizeof (struct alias_map_d *),
1385 total_alias_vops_cmp);
1387 /* For every pointer in AI->POINTERS, reverse the roles of its tag
1388 and the tag's may-aliases set. */
1389 for (i = 0; i < ai->num_pointers; i++)
1391 size_t j;
1392 tree tag1 = var_ann (ai->pointers[i]->var)->type_mem_tag;
1393 bitmap tag1_aliases = ai->pointers[i]->may_aliases;
1395 /* Skip tags that have been grouped already. */
1396 if (ai->pointers[i]->grouped_p)
1397 continue;
1399 /* See if TAG1 had any aliases in common with other type tags.
1400 If we find a TAG2 with common aliases with TAG1, add TAG2's
1401 aliases into TAG1. */
1402 for (j = i + 1; j < ai->num_pointers; j++)
1404 bitmap tag2_aliases = ai->pointers[j]->may_aliases;
1406 if (bitmap_intersect_p (tag1_aliases, tag2_aliases))
1408 tree tag2 = var_ann (ai->pointers[j]->var)->type_mem_tag;
1410 bitmap_ior_into (tag1_aliases, tag2_aliases);
1412 /* TAG2 does not need its aliases anymore. */
1413 bitmap_clear (tag2_aliases);
1414 var_ann (tag2)->may_aliases = NULL;
1416 /* TAG1 is the unique alias of TAG2. */
1417 add_may_alias (tag2, tag1);
1419 ai->pointers[j]->grouped_p = true;
1423 /* Now group all the aliases we collected into TAG1. */
1424 group_aliases_into (tag1, tag1_aliases, ai);
1426 /* If we've reduced total number of virtual operands below the
1427 threshold, stop. */
1428 if (ai->total_alias_vops < MAX_ALIASED_VOPS)
1429 break;
1432 /* Finally, all the variables that have been grouped cannot be in
1433 the may-alias set of name memory tags. Suppose that we have
1434 grouped the aliases in this code so that may-aliases(a) = TMT.20
1436 p_5 = &a;
1438 # a_9 = V_MAY_DEF <a_8>
1439 p_5->field = 0
1440 ... Several modifications to TMT.20 ...
1441 # VUSE <a_9>
1442 x_30 = p_5->field
1444 Since p_5 points to 'a', the optimizers will try to propagate 0
1445 into p_5->field, but that is wrong because there have been
1446 modifications to 'TMT.20' in between. To prevent this we have to
1447 replace 'a' with 'TMT.20' in the name tag of p_5. */
1448 for (i = 0; i < VARRAY_ACTIVE_SIZE (ai->processed_ptrs); i++)
1450 size_t j;
1451 tree ptr = VARRAY_TREE (ai->processed_ptrs, i);
1452 tree name_tag = SSA_NAME_PTR_INFO (ptr)->name_mem_tag;
1453 VEC(tree,gc) *aliases;
1454 tree alias;
1456 if (name_tag == NULL_TREE)
1457 continue;
1459 aliases = var_ann (name_tag)->may_aliases;
1460 for (j = 0; VEC_iterate (tree, aliases, j, alias); j++)
1462 var_ann_t ann = var_ann (alias);
1464 if ((!MTAG_P (alias)
1465 || TREE_CODE (alias) == STRUCT_FIELD_TAG)
1466 && ann->may_aliases)
1468 tree new_alias;
1470 gcc_assert (VEC_length (tree, ann->may_aliases) == 1);
1472 new_alias = VEC_index (tree, ann->may_aliases, 0);
1473 replace_may_alias (name_tag, j, new_alias);
1478 if (dump_file)
1479 fprintf (dump_file,
1480 "%s: Total number of aliased vops after grouping: %ld%s\n",
1481 get_name (current_function_decl),
1482 ai->total_alias_vops,
1483 (ai->total_alias_vops < 0) ? " (negative values are OK)" : "");
1487 /* Create a new alias set entry for VAR in AI->ADDRESSABLE_VARS. */
1489 static void
1490 create_alias_map_for (tree var, struct alias_info *ai)
1492 struct alias_map_d *alias_map;
1493 alias_map = XCNEW (struct alias_map_d);
1494 alias_map->var = var;
1495 alias_map->set = get_alias_set (var);
1496 ai->addressable_vars[ai->num_addressable_vars++] = alias_map;
1500 /* Create memory tags for all the dereferenced pointers and build the
1501 ADDRESSABLE_VARS and POINTERS arrays used for building the may-alias
1502 sets. Based on the address escape and points-to information collected
1503 earlier, this pass will also clear the TREE_ADDRESSABLE flag from those
1504 variables whose address is not needed anymore. */
1506 static void
1507 setup_pointers_and_addressables (struct alias_info *ai)
1509 size_t n_vars, num_addressable_vars, num_pointers;
1510 referenced_var_iterator rvi;
1511 tree var;
1512 VEC (tree, heap) *varvec = NULL;
1513 safe_referenced_var_iterator srvi;
1515 /* Size up the arrays ADDRESSABLE_VARS and POINTERS. */
1516 num_addressable_vars = num_pointers = 0;
1518 FOR_EACH_REFERENCED_VAR (var, rvi)
1520 if (may_be_aliased (var))
1521 num_addressable_vars++;
1523 if (POINTER_TYPE_P (TREE_TYPE (var)))
1525 /* Since we don't keep track of volatile variables, assume that
1526 these pointers are used in indirect store operations. */
1527 if (TREE_THIS_VOLATILE (var))
1528 bitmap_set_bit (ai->dereferenced_ptrs_store, DECL_UID (var));
1530 num_pointers++;
1534 /* Create ADDRESSABLE_VARS and POINTERS. Note that these arrays are
1535 always going to be slightly bigger than we actually need them
1536 because some TREE_ADDRESSABLE variables will be marked
1537 non-addressable below and only pointers with unique type tags are
1538 going to be added to POINTERS. */
1539 ai->addressable_vars = XCNEWVEC (struct alias_map_d *, num_addressable_vars);
1540 ai->pointers = XCNEWVEC (struct alias_map_d *, num_pointers);
1541 ai->num_addressable_vars = 0;
1542 ai->num_pointers = 0;
1544 /* Since we will be creating type memory tags within this loop, cache the
1545 value of NUM_REFERENCED_VARS to avoid processing the additional tags
1546 unnecessarily. */
1547 n_vars = num_referenced_vars;
1549 FOR_EACH_REFERENCED_VAR_SAFE (var, varvec, srvi)
1551 var_ann_t v_ann = var_ann (var);
1552 subvar_t svars;
1554 /* Name memory tags already have flow-sensitive aliasing
1555 information, so they need not be processed by
1556 compute_flow_insensitive_aliasing. Similarly, type memory
1557 tags are already accounted for when we process their
1558 associated pointer.
1560 Structure fields, on the other hand, have to have some of this
1561 information processed for them, but it's pointless to mark them
1562 non-addressable (since they are fake variables anyway). */
1563 if (MTAG_P (var) && TREE_CODE (var) != STRUCT_FIELD_TAG)
1564 continue;
1566 /* Remove the ADDRESSABLE flag from every addressable variable whose
1567 address is not needed anymore. This is caused by the propagation
1568 of ADDR_EXPR constants into INDIRECT_REF expressions and the
1569 removal of dead pointer assignments done by the early scalar
1570 cleanup passes. */
1571 if (TREE_ADDRESSABLE (var))
1573 if (!bitmap_bit_p (addressable_vars, DECL_UID (var))
1574 && TREE_CODE (var) != RESULT_DECL
1575 && !is_global_var (var))
1577 bool okay_to_mark = true;
1579 /* Since VAR is now a regular GIMPLE register, we will need
1580 to rename VAR into SSA afterwards. */
1581 mark_sym_for_renaming (var);
1583 /* If VAR can have sub-variables, and any of its
1584 sub-variables has its address taken, then we cannot
1585 remove the addressable flag from VAR. */
1586 if (var_can_have_subvars (var)
1587 && (svars = get_subvars_for_var (var)))
1589 subvar_t sv;
1591 for (sv = svars; sv; sv = sv->next)
1593 if (bitmap_bit_p (addressable_vars, DECL_UID (sv->var)))
1594 okay_to_mark = false;
1595 mark_sym_for_renaming (sv->var);
1599 /* The address of VAR is not needed, remove the
1600 addressable bit, so that it can be optimized as a
1601 regular variable. */
1602 if (okay_to_mark)
1603 mark_non_addressable (var);
1607 /* Global variables and addressable locals may be aliased. Create an
1608 entry in ADDRESSABLE_VARS for VAR. */
1609 if (may_be_aliased (var)
1610 && (!var_can_have_subvars (var)
1611 || get_subvars_for_var (var) == NULL))
1613 create_alias_map_for (var, ai);
1614 mark_sym_for_renaming (var);
1617 /* Add pointer variables that have been dereferenced to the POINTERS
1618 array and create a type memory tag for them. */
1619 if (POINTER_TYPE_P (TREE_TYPE (var)))
1621 if ((bitmap_bit_p (ai->dereferenced_ptrs_store, DECL_UID (var))
1622 || bitmap_bit_p (ai->dereferenced_ptrs_load, DECL_UID (var))))
1624 tree tag;
1625 var_ann_t t_ann;
1627 /* If pointer VAR still doesn't have a memory tag
1628 associated with it, create it now or re-use an
1629 existing one. */
1630 tag = get_tmt_for (var, ai);
1631 t_ann = var_ann (tag);
1633 /* The type tag will need to be renamed into SSA
1634 afterwards. Note that we cannot do this inside
1635 get_tmt_for because aliasing may run multiple times
1636 and we only create type tags the first time. */
1637 mark_sym_for_renaming (tag);
1639 /* Similarly, if pointer VAR used to have another type
1640 tag, we will need to process it in the renamer to
1641 remove the stale virtual operands. */
1642 if (v_ann->type_mem_tag)
1643 mark_sym_for_renaming (v_ann->type_mem_tag);
1645 /* Associate the tag with pointer VAR. */
1646 v_ann->type_mem_tag = tag;
1648 /* If pointer VAR has been used in a store operation,
1649 then its memory tag must be marked as written-to. */
1650 if (bitmap_bit_p (ai->dereferenced_ptrs_store, DECL_UID (var)))
1651 bitmap_set_bit (ai->written_vars, DECL_UID (tag));
1653 /* All the dereferences of pointer VAR count as
1654 references of TAG. Since TAG can be associated with
1655 several pointers, add the dereferences of VAR to the
1656 TAG. */
1657 NUM_REFERENCES_SET (t_ann,
1658 NUM_REFERENCES (t_ann)
1659 + NUM_REFERENCES (v_ann));
1661 else
1663 /* The pointer has not been dereferenced. If it had a
1664 type memory tag, remove it and mark the old tag for
1665 renaming to remove it out of the IL. */
1666 var_ann_t ann = var_ann (var);
1667 tree tag = ann->type_mem_tag;
1668 if (tag)
1670 mark_sym_for_renaming (tag);
1671 ann->type_mem_tag = NULL_TREE;
1676 VEC_free (tree, heap, varvec);
1680 /* Determine whether to use .GLOBAL_VAR to model call clobbering semantics. At
1681 every call site, we need to emit V_MAY_DEF expressions to represent the
1682 clobbering effects of the call for variables whose address escapes the
1683 current function.
1685 One approach is to group all call-clobbered variables into a single
1686 representative that is used as an alias of every call-clobbered variable
1687 (.GLOBAL_VAR). This works well, but it ties the optimizer hands because
1688 references to any call clobbered variable is a reference to .GLOBAL_VAR.
1690 The second approach is to emit a clobbering V_MAY_DEF for every
1691 call-clobbered variable at call sites. This is the preferred way in terms
1692 of optimization opportunities but it may create too many V_MAY_DEF operands
1693 if there are many call clobbered variables and function calls in the
1694 function.
1696 To decide whether or not to use .GLOBAL_VAR we multiply the number of
1697 function calls found by the number of call-clobbered variables. If that
1698 product is beyond a certain threshold, as determined by the parameterized
1699 values shown below, we use .GLOBAL_VAR.
1701 FIXME. This heuristic should be improved. One idea is to use several
1702 .GLOBAL_VARs of different types instead of a single one. The thresholds
1703 have been derived from a typical bootstrap cycle, including all target
1704 libraries. Compile times were found increase by ~1% compared to using
1705 .GLOBAL_VAR. */
1707 static void
1708 maybe_create_global_var (struct alias_info *ai)
1710 unsigned i, n_clobbered;
1711 bitmap_iterator bi;
1713 /* No need to create it, if we have one already. */
1714 if (global_var == NULL_TREE)
1716 /* Count all the call-clobbered variables. */
1717 n_clobbered = 0;
1718 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1720 n_clobbered++;
1723 /* If the number of virtual operands that would be needed to
1724 model all the call-clobbered variables is larger than
1725 GLOBAL_VAR_THRESHOLD, create .GLOBAL_VAR.
1727 Also create .GLOBAL_VAR if there are no call-clobbered
1728 variables and the program contains a mixture of pure/const
1729 and regular function calls. This is to avoid the problem
1730 described in PR 20115:
1732 int X;
1733 int func_pure (void) { return X; }
1734 int func_non_pure (int a) { X += a; }
1735 int foo ()
1737 int a = func_pure ();
1738 func_non_pure (a);
1739 a = func_pure ();
1740 return a;
1743 Since foo() has no call-clobbered variables, there is
1744 no relationship between the calls to func_pure and
1745 func_non_pure. Since func_pure has no side-effects, value
1746 numbering optimizations elide the second call to func_pure.
1747 So, if we have some pure/const and some regular calls in the
1748 program we create .GLOBAL_VAR to avoid missing these
1749 relations. */
1750 if (ai->num_calls_found * n_clobbered >= (size_t) GLOBAL_VAR_THRESHOLD
1751 || (n_clobbered == 0
1752 && ai->num_calls_found > 0
1753 && ai->num_pure_const_calls_found > 0
1754 && ai->num_calls_found > ai->num_pure_const_calls_found))
1755 create_global_var ();
1758 /* Mark all call-clobbered symbols for renaming. Since the initial
1759 rewrite into SSA ignored all call sites, we may need to rename
1760 .GLOBAL_VAR and the call-clobbered variables. */
1761 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1763 tree var = referenced_var (i);
1765 /* If the function has calls to clobbering functions and
1766 .GLOBAL_VAR has been created, make it an alias for all
1767 call-clobbered variables. */
1768 if (global_var && var != global_var)
1770 add_may_alias (var, global_var);
1771 gcc_assert (!get_subvars_for_var (var));
1774 mark_sym_for_renaming (var);
1779 /* Return TRUE if pointer PTR may point to variable VAR.
1781 MEM_ALIAS_SET is the alias set for the memory location pointed-to by PTR
1782 This is needed because when checking for type conflicts we are
1783 interested in the alias set of the memory location pointed-to by
1784 PTR. The alias set of PTR itself is irrelevant.
1786 VAR_ALIAS_SET is the alias set for VAR. */
1788 static bool
1789 may_alias_p (tree ptr, HOST_WIDE_INT mem_alias_set,
1790 tree var, HOST_WIDE_INT var_alias_set,
1791 bool alias_set_only)
1793 tree mem;
1795 alias_stats.alias_queries++;
1796 alias_stats.simple_queries++;
1798 /* By convention, a variable cannot alias itself. */
1799 mem = var_ann (ptr)->type_mem_tag;
1800 if (mem == var)
1802 alias_stats.alias_noalias++;
1803 alias_stats.simple_resolved++;
1804 return false;
1807 /* If -fargument-noalias-global is >1, pointer arguments may
1808 not point to global variables. */
1809 if (flag_argument_noalias > 1 && is_global_var (var)
1810 && TREE_CODE (ptr) == PARM_DECL)
1812 alias_stats.alias_noalias++;
1813 alias_stats.simple_resolved++;
1814 return false;
1817 /* If either MEM or VAR is a read-only global and the other one
1818 isn't, then PTR cannot point to VAR. */
1819 if ((unmodifiable_var_p (mem) && !unmodifiable_var_p (var))
1820 || (unmodifiable_var_p (var) && !unmodifiable_var_p (mem)))
1822 alias_stats.alias_noalias++;
1823 alias_stats.simple_resolved++;
1824 return false;
1827 gcc_assert (TREE_CODE (mem) == TYPE_MEMORY_TAG);
1829 alias_stats.tbaa_queries++;
1831 /* If the alias sets don't conflict then MEM cannot alias VAR. */
1832 if (!alias_sets_conflict_p (mem_alias_set, var_alias_set))
1834 alias_stats.alias_noalias++;
1835 alias_stats.tbaa_resolved++;
1836 return false;
1839 /* If var is a record or union type, ptr cannot point into var
1840 unless there is some operation explicit address operation in the
1841 program that can reference a field of the ptr's dereferenced
1842 type. This also assumes that the types of both var and ptr are
1843 contained within the compilation unit, and that there is no fancy
1844 addressing arithmetic associated with any of the types
1845 involved. */
1847 if ((mem_alias_set != 0) && (var_alias_set != 0))
1849 tree ptr_type = TREE_TYPE (ptr);
1850 tree var_type = TREE_TYPE (var);
1852 /* The star count is -1 if the type at the end of the pointer_to
1853 chain is not a record or union type. */
1854 if ((!alias_set_only) &&
1855 ipa_type_escape_star_count_of_interesting_type (var_type) >= 0)
1857 int ptr_star_count = 0;
1859 /* Ipa_type_escape_star_count_of_interesting_type is a little to
1860 restrictive for the pointer type, need to allow pointers to
1861 primitive types as long as those types cannot be pointers
1862 to everything. */
1863 while (POINTER_TYPE_P (ptr_type))
1864 /* Strip the *'s off. */
1866 ptr_type = TREE_TYPE (ptr_type);
1867 ptr_star_count++;
1870 /* There does not appear to be a better test to see if the
1871 pointer type was one of the pointer to everything
1872 types. */
1874 if (ptr_star_count > 0)
1876 alias_stats.structnoaddress_queries++;
1877 if (ipa_type_escape_field_does_not_clobber_p (var_type,
1878 TREE_TYPE (ptr)))
1880 alias_stats.structnoaddress_resolved++;
1881 alias_stats.alias_noalias++;
1882 return false;
1885 else if (ptr_star_count == 0)
1887 /* If ptr_type was not really a pointer to type, it cannot
1888 alias. */
1889 alias_stats.structnoaddress_queries++;
1890 alias_stats.structnoaddress_resolved++;
1891 alias_stats.alias_noalias++;
1892 return false;
1897 alias_stats.alias_mayalias++;
1898 return true;
1902 /* Add ALIAS to the set of variables that may alias VAR. */
1904 static void
1905 add_may_alias (tree var, tree alias)
1907 size_t i;
1908 var_ann_t v_ann = get_var_ann (var);
1909 var_ann_t a_ann = get_var_ann (alias);
1910 tree al;
1912 /* Don't allow self-referential aliases. */
1913 gcc_assert (var != alias);
1915 /* ALIAS must be addressable if it's being added to an alias set. */
1916 #if 1
1917 TREE_ADDRESSABLE (alias) = 1;
1918 #else
1919 gcc_assert (may_be_aliased (alias));
1920 #endif
1922 if (v_ann->may_aliases == NULL)
1923 v_ann->may_aliases = VEC_alloc (tree, gc, 2);
1925 /* Avoid adding duplicates. */
1926 for (i = 0; VEC_iterate (tree, v_ann->may_aliases, i, al); i++)
1927 if (alias == al)
1928 return;
1930 VEC_safe_push (tree, gc, v_ann->may_aliases, alias);
1931 a_ann->is_aliased = 1;
1935 /* Replace alias I in the alias sets of VAR with NEW_ALIAS. */
1937 static void
1938 replace_may_alias (tree var, size_t i, tree new_alias)
1940 var_ann_t v_ann = var_ann (var);
1941 VEC_replace (tree, v_ann->may_aliases, i, new_alias);
1945 /* Mark pointer PTR as pointing to an arbitrary memory location. */
1947 static void
1948 set_pt_anything (tree ptr)
1950 struct ptr_info_def *pi = get_ptr_info (ptr);
1952 pi->pt_anything = 1;
1953 pi->pt_vars = NULL;
1955 /* The pointer used to have a name tag, but we now found it pointing
1956 to an arbitrary location. The name tag needs to be renamed and
1957 disassociated from PTR. */
1958 if (pi->name_mem_tag)
1960 mark_sym_for_renaming (pi->name_mem_tag);
1961 pi->name_mem_tag = NULL_TREE;
1966 /* Return true if STMT is an "escape" site from the current function. Escape
1967 sites those statements which might expose the address of a variable
1968 outside the current function. STMT is an escape site iff:
1970 1- STMT is a function call, or
1971 2- STMT is an __asm__ expression, or
1972 3- STMT is an assignment to a non-local variable, or
1973 4- STMT is a return statement.
1975 AI points to the alias information collected so far.
1977 Return the type of escape site found, if we found one, or NO_ESCAPE
1978 if none. */
1980 enum escape_type
1981 is_escape_site (tree stmt, struct alias_info *ai)
1983 tree call = get_call_expr_in (stmt);
1984 if (call != NULL_TREE)
1986 ai->num_calls_found++;
1988 if (!TREE_SIDE_EFFECTS (call))
1990 ai->num_pure_const_calls_found++;
1991 return ESCAPE_TO_PURE_CONST;
1994 return ESCAPE_TO_CALL;
1996 else if (TREE_CODE (stmt) == ASM_EXPR)
1997 return ESCAPE_TO_ASM;
1998 else if (TREE_CODE (stmt) == MODIFY_EXPR)
2000 tree lhs = TREE_OPERAND (stmt, 0);
2002 /* Get to the base of _REF nodes. */
2003 if (TREE_CODE (lhs) != SSA_NAME)
2004 lhs = get_base_address (lhs);
2006 /* If we couldn't recognize the LHS of the assignment, assume that it
2007 is a non-local store. */
2008 if (lhs == NULL_TREE)
2009 return ESCAPE_UNKNOWN;
2011 /* If the RHS is a conversion between a pointer and an integer, the
2012 pointer escapes since we can't track the integer. */
2013 if ((TREE_CODE (TREE_OPERAND (stmt, 1)) == NOP_EXPR
2014 || TREE_CODE (TREE_OPERAND (stmt, 1)) == CONVERT_EXPR
2015 || TREE_CODE (TREE_OPERAND (stmt, 1)) == VIEW_CONVERT_EXPR)
2016 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND
2017 (TREE_OPERAND (stmt, 1), 0)))
2018 && !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (stmt, 1))))
2019 return ESCAPE_BAD_CAST;
2021 /* If the LHS is an SSA name, it can't possibly represent a non-local
2022 memory store. */
2023 if (TREE_CODE (lhs) == SSA_NAME)
2024 return NO_ESCAPE;
2026 /* FIXME: LHS is not an SSA_NAME. Even if it's an assignment to a
2027 local variables we cannot be sure if it will escape, because we
2028 don't have information about objects not in SSA form. Need to
2029 implement something along the lines of
2031 J.-D. Choi, M. Gupta, M. J. Serrano, V. C. Sreedhar, and S. P.
2032 Midkiff, ``Escape analysis for java,'' in Proceedings of the
2033 Conference on Object-Oriented Programming Systems, Languages, and
2034 Applications (OOPSLA), pp. 1-19, 1999. */
2035 return ESCAPE_STORED_IN_GLOBAL;
2037 else if (TREE_CODE (stmt) == RETURN_EXPR)
2038 return ESCAPE_TO_RETURN;
2040 return NO_ESCAPE;
2043 /* Create a new memory tag of type TYPE.
2044 Does NOT push it into the current binding. */
2046 static tree
2047 create_tag_raw (enum tree_code code, tree type, const char *prefix)
2049 tree tmp_var;
2050 tree new_type;
2052 /* Make the type of the variable writable. */
2053 new_type = build_type_variant (type, 0, 0);
2054 TYPE_ATTRIBUTES (new_type) = TYPE_ATTRIBUTES (type);
2056 tmp_var = build_decl (code, create_tmp_var_name (prefix),
2057 type);
2058 /* Make the variable writable. */
2059 TREE_READONLY (tmp_var) = 0;
2061 /* It doesn't start out global. */
2062 MTAG_GLOBAL (tmp_var) = 0;
2063 TREE_STATIC (tmp_var) = 0;
2064 TREE_USED (tmp_var) = 1;
2066 return tmp_var;
2069 /* Create a new memory tag of type TYPE. If IS_TYPE_TAG is true, the tag
2070 is considered to represent all the pointers whose pointed-to types are
2071 in the same alias set class. Otherwise, the tag represents a single
2072 SSA_NAME pointer variable. */
2074 static tree
2075 create_memory_tag (tree type, bool is_type_tag)
2077 var_ann_t ann;
2078 tree tag = create_tag_raw (is_type_tag ? TYPE_MEMORY_TAG : NAME_MEMORY_TAG,
2079 type, (is_type_tag) ? "TMT" : "NMT");
2081 /* By default, memory tags are local variables. Alias analysis will
2082 determine whether they should be considered globals. */
2083 DECL_CONTEXT (tag) = current_function_decl;
2085 /* Memory tags are by definition addressable. */
2086 TREE_ADDRESSABLE (tag) = 1;
2088 ann = get_var_ann (tag);
2089 ann->type_mem_tag = NULL_TREE;
2091 /* Add the tag to the symbol table. */
2092 add_referenced_tmp_var (tag);
2094 return tag;
2098 /* Create a name memory tag to represent a specific SSA_NAME pointer P_i.
2099 This is used if P_i has been found to point to a specific set of
2100 variables or to a non-aliased memory location like the address returned
2101 by malloc functions. */
2103 static tree
2104 get_nmt_for (tree ptr)
2106 struct ptr_info_def *pi = get_ptr_info (ptr);
2107 tree tag = pi->name_mem_tag;
2109 if (tag == NULL_TREE)
2110 tag = create_memory_tag (TREE_TYPE (TREE_TYPE (ptr)), false);
2111 return tag;
2115 /* Return the type memory tag associated to pointer PTR. A memory tag is an
2116 artificial variable that represents the memory location pointed-to by
2117 PTR. It is used to model the effects of pointer de-references on
2118 addressable variables.
2120 AI points to the data gathered during alias analysis. This function
2121 populates the array AI->POINTERS. */
2123 static tree
2124 get_tmt_for (tree ptr, struct alias_info *ai)
2126 size_t i;
2127 tree tag;
2128 tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
2129 HOST_WIDE_INT tag_set = get_alias_set (tag_type);
2131 /* To avoid creating unnecessary memory tags, only create one memory tag
2132 per alias set class. Note that it may be tempting to group
2133 memory tags based on conflicting alias sets instead of
2134 equivalence. That would be wrong because alias sets are not
2135 necessarily transitive (as demonstrated by the libstdc++ test
2136 23_containers/vector/cons/4.cc). Given three alias sets A, B, C
2137 such that conflicts (A, B) == true and conflicts (A, C) == true,
2138 it does not necessarily follow that conflicts (B, C) == true. */
2139 for (i = 0, tag = NULL_TREE; i < ai->num_pointers; i++)
2141 struct alias_map_d *curr = ai->pointers[i];
2142 tree curr_tag = var_ann (curr->var)->type_mem_tag;
2143 if (tag_set == curr->set)
2145 tag = curr_tag;
2146 break;
2150 /* If VAR cannot alias with any of the existing memory tags, create a new
2151 tag for PTR and add it to the POINTERS array. */
2152 if (tag == NULL_TREE)
2154 struct alias_map_d *alias_map;
2156 /* If PTR did not have a type tag already, create a new TMT.*
2157 artificial variable representing the memory location
2158 pointed-to by PTR. */
2159 if (var_ann (ptr)->type_mem_tag == NULL_TREE)
2160 tag = create_memory_tag (tag_type, true);
2161 else
2162 tag = var_ann (ptr)->type_mem_tag;
2164 /* Add PTR to the POINTERS array. Note that we are not interested in
2165 PTR's alias set. Instead, we cache the alias set for the memory that
2166 PTR points to. */
2167 alias_map = XCNEW (struct alias_map_d);
2168 alias_map->var = ptr;
2169 alias_map->set = tag_set;
2170 ai->pointers[ai->num_pointers++] = alias_map;
2173 /* If the pointed-to type is volatile, so is the tag. */
2174 TREE_THIS_VOLATILE (tag) |= TREE_THIS_VOLATILE (tag_type);
2176 /* Make sure that the type tag has the same alias set as the
2177 pointed-to type. */
2178 gcc_assert (tag_set == get_alias_set (tag));
2180 return tag;
2184 /* Create GLOBAL_VAR, an artificial global variable to act as a
2185 representative of all the variables that may be clobbered by function
2186 calls. */
2188 static void
2189 create_global_var (void)
2191 global_var = build_decl (VAR_DECL, get_identifier (".GLOBAL_VAR"),
2192 void_type_node);
2193 DECL_ARTIFICIAL (global_var) = 1;
2194 TREE_READONLY (global_var) = 0;
2195 DECL_EXTERNAL (global_var) = 1;
2196 TREE_STATIC (global_var) = 1;
2197 TREE_USED (global_var) = 1;
2198 DECL_CONTEXT (global_var) = NULL_TREE;
2199 TREE_THIS_VOLATILE (global_var) = 0;
2200 TREE_ADDRESSABLE (global_var) = 0;
2202 create_var_ann (global_var);
2203 mark_call_clobbered (global_var, ESCAPE_UNKNOWN);
2204 add_referenced_tmp_var (global_var);
2205 mark_sym_for_renaming (global_var);
2209 /* Dump alias statistics on FILE. */
2211 static void
2212 dump_alias_stats (FILE *file)
2214 const char *funcname
2215 = lang_hooks.decl_printable_name (current_function_decl, 2);
2216 fprintf (file, "\nAlias statistics for %s\n\n", funcname);
2217 fprintf (file, "Total alias queries:\t%u\n", alias_stats.alias_queries);
2218 fprintf (file, "Total alias mayalias results:\t%u\n",
2219 alias_stats.alias_mayalias);
2220 fprintf (file, "Total alias noalias results:\t%u\n",
2221 alias_stats.alias_noalias);
2222 fprintf (file, "Total simple queries:\t%u\n",
2223 alias_stats.simple_queries);
2224 fprintf (file, "Total simple resolved:\t%u\n",
2225 alias_stats.simple_resolved);
2226 fprintf (file, "Total TBAA queries:\t%u\n",
2227 alias_stats.tbaa_queries);
2228 fprintf (file, "Total TBAA resolved:\t%u\n",
2229 alias_stats.tbaa_resolved);
2230 fprintf (file, "Total non-addressable structure type queries:\t%u\n",
2231 alias_stats.structnoaddress_queries);
2232 fprintf (file, "Total non-addressable structure type resolved:\t%u\n",
2233 alias_stats.structnoaddress_resolved);
2237 /* Dump alias information on FILE. */
2239 void
2240 dump_alias_info (FILE *file)
2242 size_t i;
2243 const char *funcname
2244 = lang_hooks.decl_printable_name (current_function_decl, 2);
2245 referenced_var_iterator rvi;
2246 tree var;
2248 fprintf (file, "\nFlow-insensitive alias information for %s\n\n", funcname);
2250 fprintf (file, "Aliased symbols\n\n");
2252 FOR_EACH_REFERENCED_VAR (var, rvi)
2254 if (may_be_aliased (var))
2255 dump_variable (file, var);
2258 fprintf (file, "\nDereferenced pointers\n\n");
2260 FOR_EACH_REFERENCED_VAR (var, rvi)
2262 var_ann_t ann = var_ann (var);
2263 if (ann->type_mem_tag)
2264 dump_variable (file, var);
2267 fprintf (file, "\nType memory tags\n\n");
2269 FOR_EACH_REFERENCED_VAR (var, rvi)
2271 if (TREE_CODE (var) == TYPE_MEMORY_TAG)
2272 dump_variable (file, var);
2275 fprintf (file, "\n\nFlow-sensitive alias information for %s\n\n", funcname);
2277 fprintf (file, "SSA_NAME pointers\n\n");
2278 for (i = 1; i < num_ssa_names; i++)
2280 tree ptr = ssa_name (i);
2281 struct ptr_info_def *pi;
2283 if (ptr == NULL_TREE)
2284 continue;
2286 pi = SSA_NAME_PTR_INFO (ptr);
2287 if (!SSA_NAME_IN_FREE_LIST (ptr)
2288 && pi
2289 && pi->name_mem_tag)
2290 dump_points_to_info_for (file, ptr);
2293 fprintf (file, "\nName memory tags\n\n");
2295 FOR_EACH_REFERENCED_VAR (var, rvi)
2297 if (TREE_CODE (var) == NAME_MEMORY_TAG)
2298 dump_variable (file, var);
2301 fprintf (file, "\n");
2305 /* Dump alias information on stderr. */
2307 void
2308 debug_alias_info (void)
2310 dump_alias_info (stderr);
2314 /* Return the alias information associated with pointer T. It creates a
2315 new instance if none existed. */
2317 struct ptr_info_def *
2318 get_ptr_info (tree t)
2320 struct ptr_info_def *pi;
2322 gcc_assert (POINTER_TYPE_P (TREE_TYPE (t)));
2324 pi = SSA_NAME_PTR_INFO (t);
2325 if (pi == NULL)
2327 pi = GGC_NEW (struct ptr_info_def);
2328 memset ((void *)pi, 0, sizeof (*pi));
2329 SSA_NAME_PTR_INFO (t) = pi;
2332 return pi;
2336 /* Dump points-to information for SSA_NAME PTR into FILE. */
2338 void
2339 dump_points_to_info_for (FILE *file, tree ptr)
2341 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
2343 print_generic_expr (file, ptr, dump_flags);
2345 if (pi)
2347 if (pi->name_mem_tag)
2349 fprintf (file, ", name memory tag: ");
2350 print_generic_expr (file, pi->name_mem_tag, dump_flags);
2353 if (pi->is_dereferenced)
2354 fprintf (file, ", is dereferenced");
2356 if (pi->value_escapes_p)
2357 fprintf (file, ", its value escapes");
2359 if (pi->pt_anything)
2360 fprintf (file, ", points-to anything");
2362 if (pi->pt_null)
2363 fprintf (file, ", points-to NULL");
2365 if (pi->pt_vars)
2367 unsigned ix;
2368 bitmap_iterator bi;
2370 fprintf (file, ", points-to vars: { ");
2371 EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, ix, bi)
2373 print_generic_expr (file, referenced_var (ix), dump_flags);
2374 fprintf (file, " ");
2376 fprintf (file, "}");
2380 fprintf (file, "\n");
2384 /* Dump points-to information for VAR into stderr. */
2386 void
2387 debug_points_to_info_for (tree var)
2389 dump_points_to_info_for (stderr, var);
2393 /* Dump points-to information into FILE. NOTE: This function is slow, as
2394 it needs to traverse the whole CFG looking for pointer SSA_NAMEs. */
2396 void
2397 dump_points_to_info (FILE *file)
2399 basic_block bb;
2400 block_stmt_iterator si;
2401 ssa_op_iter iter;
2402 const char *fname =
2403 lang_hooks.decl_printable_name (current_function_decl, 2);
2404 referenced_var_iterator rvi;
2405 tree var;
2407 fprintf (file, "\n\nPointed-to sets for pointers in %s\n\n", fname);
2409 /* First dump points-to information for the default definitions of
2410 pointer variables. This is necessary because default definitions are
2411 not part of the code. */
2412 FOR_EACH_REFERENCED_VAR (var, rvi)
2414 if (POINTER_TYPE_P (TREE_TYPE (var)))
2416 tree def = default_def (var);
2417 if (def)
2418 dump_points_to_info_for (file, def);
2422 /* Dump points-to information for every pointer defined in the program. */
2423 FOR_EACH_BB (bb)
2425 tree phi;
2427 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
2429 tree ptr = PHI_RESULT (phi);
2430 if (POINTER_TYPE_P (TREE_TYPE (ptr)))
2431 dump_points_to_info_for (file, ptr);
2434 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
2436 tree stmt = bsi_stmt (si);
2437 tree def;
2438 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_DEF)
2439 if (POINTER_TYPE_P (TREE_TYPE (def)))
2440 dump_points_to_info_for (file, def);
2444 fprintf (file, "\n");
2448 /* Dump points-to info pointed to by PTO into STDERR. */
2450 void
2451 debug_points_to_info (void)
2453 dump_points_to_info (stderr);
2456 /* Dump to FILE the list of variables that may be aliasing VAR. */
2458 void
2459 dump_may_aliases_for (FILE *file, tree var)
2461 VEC(tree, gc) *aliases;
2463 if (TREE_CODE (var) == SSA_NAME)
2464 var = SSA_NAME_VAR (var);
2466 aliases = var_ann (var)->may_aliases;
2467 if (aliases)
2469 size_t i;
2470 tree al;
2471 fprintf (file, "{ ");
2472 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
2474 print_generic_expr (file, al, dump_flags);
2475 fprintf (file, " ");
2477 fprintf (file, "}");
2482 /* Dump to stderr the list of variables that may be aliasing VAR. */
2484 void
2485 debug_may_aliases_for (tree var)
2487 dump_may_aliases_for (stderr, var);
2490 /* Return true if VAR may be aliased. */
2492 bool
2493 may_be_aliased (tree var)
2495 /* Obviously. */
2496 if (TREE_ADDRESSABLE (var))
2497 return true;
2499 /* Globally visible variables can have their addresses taken by other
2500 translation units. */
2502 if (MTAG_P (var)
2503 && (MTAG_GLOBAL (var) || TREE_PUBLIC (var)))
2504 return true;
2505 else if (!MTAG_P (var)
2506 && (DECL_EXTERNAL (var) || TREE_PUBLIC (var)))
2507 return true;
2509 /* Automatic variables can't have their addresses escape any other way.
2510 This must be after the check for global variables, as extern declarations
2511 do not have TREE_STATIC set. */
2512 if (!TREE_STATIC (var))
2513 return false;
2515 /* If we're in unit-at-a-time mode, then we must have seen all occurrences
2516 of address-of operators, and so we can trust TREE_ADDRESSABLE. Otherwise
2517 we can only be sure the variable isn't addressable if it's local to the
2518 current function. */
2519 if (flag_unit_at_a_time)
2520 return false;
2521 if (decl_function_context (var) == current_function_decl)
2522 return false;
2524 return true;
2528 /* Given two symbols return TRUE if one is in the alias set of the other. */
2529 bool
2530 is_aliased_with (tree tag, tree sym)
2532 size_t i;
2533 VEC(tree,gc) *aliases;
2534 tree al;
2536 if (var_ann (sym)->is_aliased)
2538 aliases = var_ann (tag)->may_aliases;
2540 if (aliases == NULL)
2541 return false;
2543 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
2544 if (al == sym)
2545 return true;
2547 else
2549 aliases = var_ann (sym)->may_aliases;
2551 if (aliases == NULL)
2552 return false;
2554 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
2555 if (al == tag)
2556 return true;
2559 return false;
2563 /* Add VAR to the list of may-aliases of PTR's type tag. If PTR
2564 doesn't already have a type tag, create one. */
2566 void
2567 add_type_alias (tree ptr, tree var)
2569 VEC(tree, gc) *aliases;
2570 tree tag, al;
2571 var_ann_t ann = var_ann (ptr);
2572 subvar_t svars;
2573 VEC (tree, heap) *varvec = NULL;
2574 unsigned i;
2576 if (ann->type_mem_tag == NULL_TREE)
2578 tree q = NULL_TREE;
2579 tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
2580 HOST_WIDE_INT tag_set = get_alias_set (tag_type);
2581 safe_referenced_var_iterator rvi;
2583 /* PTR doesn't have a type tag, create a new one and add VAR to
2584 the new tag's alias set.
2586 FIXME, This is slower than necessary. We need to determine
2587 whether there is another pointer Q with the same alias set as
2588 PTR. This could be sped up by having type tags associated
2589 with types. */
2590 FOR_EACH_REFERENCED_VAR_SAFE (q, varvec, rvi)
2592 if (POINTER_TYPE_P (TREE_TYPE (q))
2593 && tag_set == get_alias_set (TREE_TYPE (TREE_TYPE (q))))
2595 /* Found another pointer Q with the same alias set as
2596 the PTR's pointed-to type. If Q has a type tag, use
2597 it. Otherwise, create a new memory tag for PTR. */
2598 var_ann_t ann1 = var_ann (q);
2599 if (ann1->type_mem_tag)
2600 ann->type_mem_tag = ann1->type_mem_tag;
2601 else
2602 ann->type_mem_tag = create_memory_tag (tag_type, true);
2603 goto found_tag;
2607 /* Couldn't find any other pointer with a type tag we could use.
2608 Create a new memory tag for PTR. */
2609 ann->type_mem_tag = create_memory_tag (tag_type, true);
2612 found_tag:
2613 /* If VAR is not already PTR's type tag, add it to the may-alias set
2614 for PTR's type tag. */
2615 gcc_assert (!MTAG_P (var));
2616 tag = ann->type_mem_tag;
2618 /* If VAR has subvars, add the subvars to the tag instead of the
2619 actual var. */
2620 if (var_can_have_subvars (var)
2621 && (svars = get_subvars_for_var (var)))
2623 subvar_t sv;
2624 for (sv = svars; sv; sv = sv->next)
2625 add_may_alias (tag, sv->var);
2627 else
2628 add_may_alias (tag, var);
2630 /* TAG and its set of aliases need to be marked for renaming. */
2631 mark_sym_for_renaming (tag);
2632 if ((aliases = var_ann (tag)->may_aliases) != NULL)
2634 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
2635 mark_sym_for_renaming (al);
2638 /* If we had grouped aliases, VAR may have aliases of its own. Mark
2639 them for renaming as well. Other statements referencing the
2640 aliases of VAR will need to be updated. */
2641 if ((aliases = var_ann (var)->may_aliases) != NULL)
2643 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
2644 mark_sym_for_renaming (al);
2646 VEC_free (tree, heap, varvec);
2650 /* Create a new type tag for PTR. Construct the may-alias list of this type
2651 tag so that it has the aliasing of VAR.
2653 Note, the set of aliases represented by the new type tag are not marked
2654 for renaming. */
2656 void
2657 new_type_alias (tree ptr, tree var)
2659 var_ann_t p_ann = var_ann (ptr);
2660 tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
2661 var_ann_t v_ann = var_ann (var);
2662 tree tag;
2663 subvar_t svars;
2665 gcc_assert (p_ann->type_mem_tag == NULL_TREE);
2666 gcc_assert (!MTAG_P (var));
2668 /* Add VAR to the may-alias set of PTR's new type tag. If VAR has
2669 subvars, add the subvars to the tag instead of the actual var. */
2670 if (var_can_have_subvars (var)
2671 && (svars = get_subvars_for_var (var)))
2673 subvar_t sv;
2675 tag = create_memory_tag (tag_type, true);
2676 p_ann->type_mem_tag = tag;
2678 for (sv = svars; sv; sv = sv->next)
2679 add_may_alias (tag, sv->var);
2681 else
2683 /* The following is based on code in add_stmt_operand to ensure that the
2684 same defs/uses/vdefs/vuses will be found after replacing a reference
2685 to var (or ARRAY_REF to var) with an INDIRECT_REF to ptr whose value
2686 is the address of var. */
2687 VEC(tree, gc) *aliases = v_ann->may_aliases;
2689 if ((aliases != NULL)
2690 && (VEC_length (tree, aliases) == 1))
2692 tree ali = VEC_index (tree, aliases, 0);
2694 if (TREE_CODE (ali) == TYPE_MEMORY_TAG)
2696 p_ann->type_mem_tag = ali;
2697 return;
2701 tag = create_memory_tag (tag_type, true);
2702 p_ann->type_mem_tag = tag;
2704 if (aliases == NULL)
2705 add_may_alias (tag, var);
2706 else
2708 unsigned i;
2709 tree al;
2711 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
2712 add_may_alias (tag, al);
2719 /* This represents the used range of a variable. */
2721 typedef struct used_part
2723 HOST_WIDE_INT minused;
2724 HOST_WIDE_INT maxused;
2725 /* True if we have an explicit use/def of some portion of this variable,
2726 even if it is all of it. i.e. a.b = 5 or temp = a.b. */
2727 bool explicit_uses;
2728 /* True if we have an implicit use/def of some portion of this
2729 variable. Implicit uses occur when we can't tell what part we
2730 are referencing, and have to make conservative assumptions. */
2731 bool implicit_uses;
2732 /* True if the structure is only written to or taken its address. */
2733 bool write_only;
2734 } *used_part_t;
2736 /* An array of used_part structures, indexed by variable uid. */
2738 static htab_t used_portions;
2740 struct used_part_map
2742 unsigned int uid;
2743 used_part_t to;
2746 /* Return true if the uid in the two used part maps are equal. */
2748 static int
2749 used_part_map_eq (const void *va, const void *vb)
2751 const struct used_part_map *a = (const struct used_part_map *) va;
2752 const struct used_part_map *b = (const struct used_part_map *) vb;
2753 return (a->uid == b->uid);
2756 /* Hash a from uid in a used_part_map. */
2758 static unsigned int
2759 used_part_map_hash (const void *item)
2761 return ((const struct used_part_map *)item)->uid;
2764 /* Free a used part map element. */
2766 static void
2767 free_used_part_map (void *item)
2769 free (((struct used_part_map *)item)->to);
2770 free (item);
2773 /* Lookup a used_part structure for a UID. */
2775 static used_part_t
2776 up_lookup (unsigned int uid)
2778 struct used_part_map *h, in;
2779 in.uid = uid;
2780 h = (struct used_part_map *) htab_find_with_hash (used_portions, &in, uid);
2781 if (!h)
2782 return NULL;
2783 return h->to;
2786 /* Insert the pair UID, TO into the used part hashtable. */
2788 static void
2789 up_insert (unsigned int uid, used_part_t to)
2791 struct used_part_map *h;
2792 void **loc;
2794 h = XNEW (struct used_part_map);
2795 h->uid = uid;
2796 h->to = to;
2797 loc = htab_find_slot_with_hash (used_portions, h,
2798 uid, INSERT);
2799 if (*loc != NULL)
2800 free (*loc);
2801 *(struct used_part_map **) loc = h;
2805 /* Given a variable uid, UID, get or create the entry in the used portions
2806 table for the variable. */
2808 static used_part_t
2809 get_or_create_used_part_for (size_t uid)
2811 used_part_t up;
2812 if ((up = up_lookup (uid)) == NULL)
2814 up = XCNEW (struct used_part);
2815 up->minused = INT_MAX;
2816 up->maxused = 0;
2817 up->explicit_uses = false;
2818 up->implicit_uses = false;
2819 up->write_only = true;
2822 return up;
2826 /* Create and return a structure sub-variable for field type FIELD at
2827 offset OFFSET, with size SIZE, of variable VAR. */
2829 static tree
2830 create_sft (tree var, tree field, unsigned HOST_WIDE_INT offset,
2831 unsigned HOST_WIDE_INT size)
2833 var_ann_t ann;
2834 tree subvar = create_tag_raw (STRUCT_FIELD_TAG, field, "SFT");
2836 /* We need to copy the various flags from VAR to SUBVAR, so that
2837 they are is_global_var iff the original variable was. */
2838 DECL_CONTEXT (subvar) = DECL_CONTEXT (var);
2839 MTAG_GLOBAL (subvar) = DECL_EXTERNAL (var);
2840 TREE_PUBLIC (subvar) = TREE_PUBLIC (var);
2841 TREE_STATIC (subvar) = TREE_STATIC (var);
2842 TREE_READONLY (subvar) = TREE_READONLY (var);
2844 /* Add the new variable to REFERENCED_VARS. */
2845 ann = get_var_ann (subvar);
2846 ann->type_mem_tag = NULL;
2847 add_referenced_tmp_var (subvar);
2848 SFT_PARENT_VAR (subvar) = var;
2849 SFT_OFFSET (subvar) = offset;
2850 SFT_SIZE (subvar) = size;
2851 return subvar;
2855 /* Given an aggregate VAR, create the subvariables that represent its
2856 fields. */
2858 static void
2859 create_overlap_variables_for (tree var)
2861 VEC(fieldoff_s,heap) *fieldstack = NULL;
2862 used_part_t up;
2863 size_t uid = DECL_UID (var);
2865 up = up_lookup (uid);
2866 if (!up
2867 || up->write_only)
2868 return;
2870 push_fields_onto_fieldstack (TREE_TYPE (var), &fieldstack, 0, NULL);
2871 if (VEC_length (fieldoff_s, fieldstack) != 0)
2873 subvar_t *subvars;
2874 fieldoff_s *fo;
2875 bool notokay = false;
2876 int fieldcount = 0;
2877 int i;
2878 HOST_WIDE_INT lastfooffset = -1;
2879 HOST_WIDE_INT lastfosize = -1;
2880 tree lastfotype = NULL_TREE;
2882 /* Not all fields have DECL_SIZE set, and those that don't, we don't
2883 know their size, and thus, can't handle.
2884 The same is true of fields with DECL_SIZE that is not an integer
2885 constant (such as variable sized fields).
2886 Fields with offsets which are not constant will have an offset < 0
2887 We *could* handle fields that are constant sized arrays, but
2888 currently don't. Doing so would require some extra changes to
2889 tree-ssa-operands.c. */
2891 for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
2893 if (!fo->size
2894 || TREE_CODE (fo->size) != INTEGER_CST
2895 || fo->offset < 0)
2897 notokay = true;
2898 break;
2900 fieldcount++;
2903 /* The current heuristic we use is as follows:
2904 If the variable has no used portions in this function, no
2905 structure vars are created for it.
2906 Otherwise,
2907 If the variable has less than SALIAS_MAX_IMPLICIT_FIELDS,
2908 we always create structure vars for them.
2909 If the variable has more than SALIAS_MAX_IMPLICIT_FIELDS, and
2910 some explicit uses, we create structure vars for them.
2911 If the variable has more than SALIAS_MAX_IMPLICIT_FIELDS, and
2912 no explicit uses, we do not create structure vars for them.
2915 if (fieldcount >= SALIAS_MAX_IMPLICIT_FIELDS
2916 && !up->explicit_uses)
2918 if (dump_file && (dump_flags & TDF_DETAILS))
2920 fprintf (dump_file, "Variable ");
2921 print_generic_expr (dump_file, var, 0);
2922 fprintf (dump_file, " has no explicit uses in this function, and is > SALIAS_MAX_IMPLICIT_FIELDS, so skipping\n");
2924 notokay = true;
2927 /* Bail out, if we can't create overlap variables. */
2928 if (notokay)
2930 VEC_free (fieldoff_s, heap, fieldstack);
2931 return;
2934 /* Otherwise, create the variables. */
2935 subvars = lookup_subvars_for_var (var);
2937 sort_fieldstack (fieldstack);
2939 for (i = VEC_length (fieldoff_s, fieldstack);
2940 VEC_iterate (fieldoff_s, fieldstack, --i, fo);)
2942 subvar_t sv;
2943 HOST_WIDE_INT fosize;
2944 tree currfotype;
2946 fosize = TREE_INT_CST_LOW (fo->size);
2947 currfotype = fo->type;
2949 /* If this field isn't in the used portion,
2950 or it has the exact same offset and size as the last
2951 field, skip it. */
2953 if (((fo->offset <= up->minused
2954 && fo->offset + fosize <= up->minused)
2955 || fo->offset >= up->maxused)
2956 || (fo->offset == lastfooffset
2957 && fosize == lastfosize
2958 && currfotype == lastfotype))
2959 continue;
2960 sv = GGC_NEW (struct subvar);
2961 sv->next = *subvars;
2962 sv->var = create_sft (var, fo->type, fo->offset, fosize);
2964 if (dump_file)
2966 fprintf (dump_file, "structure field tag %s created for var %s",
2967 get_name (sv->var), get_name (var));
2968 fprintf (dump_file, " offset " HOST_WIDE_INT_PRINT_DEC,
2969 SFT_OFFSET (sv->var));
2970 fprintf (dump_file, " size " HOST_WIDE_INT_PRINT_DEC,
2971 SFT_SIZE (sv->var));
2972 fprintf (dump_file, "\n");
2975 lastfotype = currfotype;
2976 lastfooffset = fo->offset;
2977 lastfosize = fosize;
2978 *subvars = sv;
2981 /* Once we have created subvars, the original is no longer call
2982 clobbered on its own. Its call clobbered status depends
2983 completely on the call clobbered status of the subvars.
2985 add_referenced_var in the above loop will take care of
2986 marking subvars of global variables as call clobbered for us
2987 to start, since they are global as well. */
2988 clear_call_clobbered (var);
2991 VEC_free (fieldoff_s, heap, fieldstack);
2995 /* Find the conservative answer to the question of what portions of what
2996 structures are used by this statement. We assume that if we have a
2997 component ref with a known size + offset, that we only need that part
2998 of the structure. For unknown cases, or cases where we do something
2999 to the whole structure, we assume we need to create fields for the
3000 entire structure. */
3002 static tree
3003 find_used_portions (tree *tp, int *walk_subtrees, void *lhs_p)
3005 switch (TREE_CODE (*tp))
3007 case MODIFY_EXPR:
3008 /* Recurse manually here to track whether the use is in the
3009 LHS of an assignment. */
3010 find_used_portions (&TREE_OPERAND (*tp, 0), walk_subtrees, tp);
3011 return find_used_portions (&TREE_OPERAND (*tp, 1), walk_subtrees, NULL);
3012 case REALPART_EXPR:
3013 case IMAGPART_EXPR:
3014 case COMPONENT_REF:
3015 case ARRAY_REF:
3017 HOST_WIDE_INT bitsize;
3018 HOST_WIDE_INT bitmaxsize;
3019 HOST_WIDE_INT bitpos;
3020 tree ref;
3021 ref = get_ref_base_and_extent (*tp, &bitpos, &bitsize, &bitmaxsize);
3022 if (DECL_P (ref)
3023 && var_can_have_subvars (ref)
3024 && bitmaxsize != -1)
3026 size_t uid = DECL_UID (ref);
3027 used_part_t up;
3029 up = get_or_create_used_part_for (uid);
3031 if (bitpos <= up->minused)
3032 up->minused = bitpos;
3033 if ((bitpos + bitmaxsize >= up->maxused))
3034 up->maxused = bitpos + bitmaxsize;
3036 if (bitsize == bitmaxsize)
3037 up->explicit_uses = true;
3038 else
3039 up->implicit_uses = true;
3040 if (!lhs_p)
3041 up->write_only = false;
3042 up_insert (uid, up);
3044 *walk_subtrees = 0;
3045 return NULL_TREE;
3048 break;
3049 /* This is here to make sure we mark the entire base variable as used
3050 when you take its address. Because our used portion analysis is
3051 simple, we aren't looking at casts or pointer arithmetic to see what
3052 happens when you take the address. */
3053 case ADDR_EXPR:
3055 tree var = get_base_address (TREE_OPERAND (*tp, 0));
3057 if (var
3058 && DECL_P (var)
3059 && DECL_SIZE (var)
3060 && var_can_have_subvars (var)
3061 && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
3063 used_part_t up;
3064 size_t uid = DECL_UID (var);
3066 up = get_or_create_used_part_for (uid);
3068 up->minused = 0;
3069 up->maxused = TREE_INT_CST_LOW (DECL_SIZE (var));
3070 up->implicit_uses = true;
3072 up_insert (uid, up);
3073 *walk_subtrees = 0;
3074 return NULL_TREE;
3077 break;
3078 case VAR_DECL:
3079 case PARM_DECL:
3080 case RESULT_DECL:
3082 tree var = *tp;
3083 if (DECL_SIZE (var)
3084 && var_can_have_subvars (var)
3085 && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
3087 used_part_t up;
3088 size_t uid = DECL_UID (var);
3090 up = get_or_create_used_part_for (uid);
3092 up->minused = 0;
3093 up->maxused = TREE_INT_CST_LOW (DECL_SIZE (var));
3094 up->implicit_uses = true;
3096 up_insert (uid, up);
3097 *walk_subtrees = 0;
3098 return NULL_TREE;
3101 break;
3103 default:
3104 break;
3107 return NULL_TREE;
3110 /* Create structure field variables for structures used in this function. */
3112 static void
3113 create_structure_vars (void)
3115 basic_block bb;
3116 safe_referenced_var_iterator rvi;
3117 VEC (tree, heap) *varvec = NULL;
3118 tree var;
3120 used_portions = htab_create (10, used_part_map_hash, used_part_map_eq,
3121 free_used_part_map);
3123 FOR_EACH_BB (bb)
3125 block_stmt_iterator bsi;
3126 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3128 walk_tree_without_duplicates (bsi_stmt_ptr (bsi),
3129 find_used_portions,
3130 NULL);
3133 FOR_EACH_REFERENCED_VAR_SAFE (var, varvec, rvi)
3135 /* The C++ FE creates vars without DECL_SIZE set, for some reason. */
3136 if (var
3137 && DECL_SIZE (var)
3138 && var_can_have_subvars (var)
3139 && !MTAG_P (var)
3140 && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
3141 create_overlap_variables_for (var);
3143 htab_delete (used_portions);
3144 VEC_free (tree, heap, varvec);
3148 static bool
3149 gate_structure_vars (void)
3151 return flag_tree_salias != 0;
3154 struct tree_opt_pass pass_create_structure_vars =
3156 "salias", /* name */
3157 gate_structure_vars, /* gate */
3158 create_structure_vars, /* execute */
3159 NULL, /* sub */
3160 NULL, /* next */
3161 0, /* static_pass_number */
3162 0, /* tv_id */
3163 PROP_cfg, /* properties_required */
3164 0, /* properties_provided */
3165 0, /* properties_destroyed */
3166 0, /* todo_flags_start */
3167 TODO_dump_func, /* todo_flags_finish */
3168 0 /* letter */