c-common.c (c_expand_decl): Remove.
[official-gcc.git] / gcc / tree-ssa-structalias.c
blob6db829808d8d892f35708c0950c69d68e70da2fa
1 /* Tree based points-to analysis
2 Copyright (C) 2005, 2006, 2007 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "ggc.h"
26 #include "obstack.h"
27 #include "bitmap.h"
28 #include "flags.h"
29 #include "rtl.h"
30 #include "tm_p.h"
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
33 #include "output.h"
34 #include "errors.h"
35 #include "diagnostic.h"
36 #include "tree.h"
37 #include "c-common.h"
38 #include "tree-flow.h"
39 #include "tree-inline.h"
40 #include "varray.h"
41 #include "c-tree.h"
42 #include "gimple.h"
43 #include "hashtab.h"
44 #include "function.h"
45 #include "cgraph.h"
46 #include "tree-pass.h"
47 #include "timevar.h"
48 #include "alloc-pool.h"
49 #include "splay-tree.h"
50 #include "params.h"
51 #include "tree-ssa-structalias.h"
52 #include "cgraph.h"
53 #include "alias.h"
54 #include "pointer-set.h"
56 /* The idea behind this analyzer is to generate set constraints from the
57 program, then solve the resulting constraints in order to generate the
58 points-to sets.
60 Set constraints are a way of modeling program analysis problems that
61 involve sets. They consist of an inclusion constraint language,
62 describing the variables (each variable is a set) and operations that
63 are involved on the variables, and a set of rules that derive facts
64 from these operations. To solve a system of set constraints, you derive
65 all possible facts under the rules, which gives you the correct sets
66 as a consequence.
68 See "Efficient Field-sensitive pointer analysis for C" by "David
69 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
70 http://citeseer.ist.psu.edu/pearce04efficient.html
72 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
73 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
74 http://citeseer.ist.psu.edu/heintze01ultrafast.html
76 There are three types of real constraint expressions, DEREF,
77 ADDRESSOF, and SCALAR. Each constraint expression consists
78 of a constraint type, a variable, and an offset.
80 SCALAR is a constraint expression type used to represent x, whether
81 it appears on the LHS or the RHS of a statement.
82 DEREF is a constraint expression type used to represent *x, whether
83 it appears on the LHS or the RHS of a statement.
84 ADDRESSOF is a constraint expression used to represent &x, whether
85 it appears on the LHS or the RHS of a statement.
87 Each pointer variable in the program is assigned an integer id, and
88 each field of a structure variable is assigned an integer id as well.
90 Structure variables are linked to their list of fields through a "next
91 field" in each variable that points to the next field in offset
92 order.
93 Each variable for a structure field has
95 1. "size", that tells the size in bits of that field.
96 2. "fullsize, that tells the size in bits of the entire structure.
97 3. "offset", that tells the offset in bits from the beginning of the
98 structure to this field.
100 Thus,
101 struct f
103 int a;
104 int b;
105 } foo;
106 int *bar;
108 looks like
110 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
111 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
112 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
115 In order to solve the system of set constraints, the following is
116 done:
118 1. Each constraint variable x has a solution set associated with it,
119 Sol(x).
121 2. Constraints are separated into direct, copy, and complex.
122 Direct constraints are ADDRESSOF constraints that require no extra
123 processing, such as P = &Q
124 Copy constraints are those of the form P = Q.
125 Complex constraints are all the constraints involving dereferences
126 and offsets (including offsetted copies).
128 3. All direct constraints of the form P = &Q are processed, such
129 that Q is added to Sol(P)
131 4. All complex constraints for a given constraint variable are stored in a
132 linked list attached to that variable's node.
134 5. A directed graph is built out of the copy constraints. Each
135 constraint variable is a node in the graph, and an edge from
136 Q to P is added for each copy constraint of the form P = Q
138 6. The graph is then walked, and solution sets are
139 propagated along the copy edges, such that an edge from Q to P
140 causes Sol(P) <- Sol(P) union Sol(Q).
142 7. As we visit each node, all complex constraints associated with
143 that node are processed by adding appropriate copy edges to the graph, or the
144 appropriate variables to the solution set.
146 8. The process of walking the graph is iterated until no solution
147 sets change.
149 Prior to walking the graph in steps 6 and 7, We perform static
150 cycle elimination on the constraint graph, as well
151 as off-line variable substitution.
153 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
154 on and turned into anything), but isn't. You can just see what offset
155 inside the pointed-to struct it's going to access.
157 TODO: Constant bounded arrays can be handled as if they were structs of the
158 same number of elements.
160 TODO: Modeling heap and incoming pointers becomes much better if we
161 add fields to them as we discover them, which we could do.
163 TODO: We could handle unions, but to be honest, it's probably not
164 worth the pain or slowdown. */
166 static GTY ((if_marked ("tree_map_marked_p"), param_is (struct tree_map)))
167 htab_t heapvar_for_stmt;
169 static bool use_field_sensitive = true;
170 static int in_ipa_mode = 0;
172 /* Used for predecessor bitmaps. */
173 static bitmap_obstack predbitmap_obstack;
175 /* Used for points-to sets. */
176 static bitmap_obstack pta_obstack;
178 /* Used for oldsolution members of variables. */
179 static bitmap_obstack oldpta_obstack;
181 /* Used for per-solver-iteration bitmaps. */
182 static bitmap_obstack iteration_obstack;
184 static unsigned int create_variable_info_for (tree, const char *);
185 typedef struct constraint_graph *constraint_graph_t;
186 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
188 DEF_VEC_P(constraint_t);
189 DEF_VEC_ALLOC_P(constraint_t,heap);
191 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
192 if (a) \
193 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
195 static struct constraint_stats
197 unsigned int total_vars;
198 unsigned int nonpointer_vars;
199 unsigned int unified_vars_static;
200 unsigned int unified_vars_dynamic;
201 unsigned int iterations;
202 unsigned int num_edges;
203 unsigned int num_implicit_edges;
204 unsigned int points_to_sets_created;
205 } stats;
207 struct variable_info
209 /* ID of this variable */
210 unsigned int id;
212 /* True if this is a variable created by the constraint analysis, such as
213 heap variables and constraints we had to break up. */
214 unsigned int is_artificial_var:1;
216 /* True if this is a special variable whose solution set should not be
217 changed. */
218 unsigned int is_special_var:1;
220 /* True for variables whose size is not known or variable. */
221 unsigned int is_unknown_size_var:1;
223 /* True for (sub-)fields that represent a whole variable. */
224 unsigned int is_full_var : 1;
226 /* True if this is a heap variable. */
227 unsigned int is_heap_var:1;
229 /* True if we may not use TBAA to prune references to this
230 variable. This is used for C++ placement new. */
231 unsigned int no_tbaa_pruning : 1;
233 /* Variable id this was collapsed to due to type unsafety. Zero if
234 this variable was not collapsed. This should be unused completely
235 after build_succ_graph, or something is broken. */
236 unsigned int collapsed_to;
238 /* A link to the variable for the next field in this structure. */
239 struct variable_info *next;
241 /* Offset of this variable, in bits, from the base variable */
242 unsigned HOST_WIDE_INT offset;
244 /* Size of the variable, in bits. */
245 unsigned HOST_WIDE_INT size;
247 /* Full size of the base variable, in bits. */
248 unsigned HOST_WIDE_INT fullsize;
250 /* Name of this variable */
251 const char *name;
253 /* Tree that this variable is associated with. */
254 tree decl;
256 /* Points-to set for this variable. */
257 bitmap solution;
259 /* Old points-to set for this variable. */
260 bitmap oldsolution;
262 typedef struct variable_info *varinfo_t;
264 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
265 static varinfo_t lookup_vi_for_tree (tree);
267 /* Pool of variable info structures. */
268 static alloc_pool variable_info_pool;
270 DEF_VEC_P(varinfo_t);
272 DEF_VEC_ALLOC_P(varinfo_t, heap);
274 /* Table of variable info structures for constraint variables.
275 Indexed directly by variable info id. */
276 static VEC(varinfo_t,heap) *varmap;
278 /* Return the varmap element N */
280 static inline varinfo_t
281 get_varinfo (unsigned int n)
283 return VEC_index (varinfo_t, varmap, n);
286 /* Return the varmap element N, following the collapsed_to link. */
288 static inline varinfo_t
289 get_varinfo_fc (unsigned int n)
291 varinfo_t v = VEC_index (varinfo_t, varmap, n);
293 if (v->collapsed_to != 0)
294 return get_varinfo (v->collapsed_to);
295 return v;
298 /* Static IDs for the special variables. */
299 enum { nothing_id = 0, anything_id = 1, readonly_id = 2,
300 escaped_id = 3, nonlocal_id = 4, callused_id = 5, integer_id = 6 };
302 /* Variable that represents the unknown pointer. */
303 static varinfo_t var_anything;
304 static tree anything_tree;
306 /* Variable that represents the NULL pointer. */
307 static varinfo_t var_nothing;
308 static tree nothing_tree;
310 /* Variable that represents read only memory. */
311 static varinfo_t var_readonly;
312 static tree readonly_tree;
314 /* Variable that represents escaped memory. */
315 static varinfo_t var_escaped;
316 static tree escaped_tree;
318 /* Variable that represents nonlocal memory. */
319 static varinfo_t var_nonlocal;
320 static tree nonlocal_tree;
322 /* Variable that represents call-used memory. */
323 static varinfo_t var_callused;
324 static tree callused_tree;
326 /* Variable that represents integers. This is used for when people do things
327 like &0->a.b. */
328 static varinfo_t var_integer;
329 static tree integer_tree;
331 /* Lookup a heap var for FROM, and return it if we find one. */
333 static tree
334 heapvar_lookup (tree from)
336 struct tree_map *h, in;
337 in.base.from = from;
339 h = (struct tree_map *) htab_find_with_hash (heapvar_for_stmt, &in,
340 htab_hash_pointer (from));
341 if (h)
342 return h->to;
343 return NULL_TREE;
346 /* Insert a mapping FROM->TO in the heap var for statement
347 hashtable. */
349 static void
350 heapvar_insert (tree from, tree to)
352 struct tree_map *h;
353 void **loc;
355 h = GGC_NEW (struct tree_map);
356 h->hash = htab_hash_pointer (from);
357 h->base.from = from;
358 h->to = to;
359 loc = htab_find_slot_with_hash (heapvar_for_stmt, h, h->hash, INSERT);
360 *(struct tree_map **) loc = h;
363 /* Return a new variable info structure consisting for a variable
364 named NAME, and using constraint graph node NODE. */
366 static varinfo_t
367 new_var_info (tree t, unsigned int id, const char *name)
369 varinfo_t ret = (varinfo_t) pool_alloc (variable_info_pool);
370 tree var;
372 ret->id = id;
373 ret->name = name;
374 ret->decl = t;
375 ret->is_artificial_var = false;
376 ret->is_heap_var = false;
377 ret->is_special_var = false;
378 ret->is_unknown_size_var = false;
379 ret->is_full_var = false;
380 var = t;
381 if (TREE_CODE (var) == SSA_NAME)
382 var = SSA_NAME_VAR (var);
383 ret->no_tbaa_pruning = (DECL_P (var)
384 && POINTER_TYPE_P (TREE_TYPE (var))
385 && DECL_NO_TBAA_P (var));
386 ret->solution = BITMAP_ALLOC (&pta_obstack);
387 ret->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
388 ret->next = NULL;
389 ret->collapsed_to = 0;
390 return ret;
393 typedef enum {SCALAR, DEREF, ADDRESSOF} constraint_expr_type;
395 /* An expression that appears in a constraint. */
397 struct constraint_expr
399 /* Constraint type. */
400 constraint_expr_type type;
402 /* Variable we are referring to in the constraint. */
403 unsigned int var;
405 /* Offset, in bits, of this constraint from the beginning of
406 variables it ends up referring to.
408 IOW, in a deref constraint, we would deref, get the result set,
409 then add OFFSET to each member. */
410 unsigned HOST_WIDE_INT offset;
413 typedef struct constraint_expr ce_s;
414 DEF_VEC_O(ce_s);
415 DEF_VEC_ALLOC_O(ce_s, heap);
416 static void get_constraint_for_1 (tree, VEC(ce_s, heap) **, bool);
417 static void get_constraint_for (tree, VEC(ce_s, heap) **);
418 static void do_deref (VEC (ce_s, heap) **);
420 /* Our set constraints are made up of two constraint expressions, one
421 LHS, and one RHS.
423 As described in the introduction, our set constraints each represent an
424 operation between set valued variables.
426 struct constraint
428 struct constraint_expr lhs;
429 struct constraint_expr rhs;
432 /* List of constraints that we use to build the constraint graph from. */
434 static VEC(constraint_t,heap) *constraints;
435 static alloc_pool constraint_pool;
438 DEF_VEC_I(int);
439 DEF_VEC_ALLOC_I(int, heap);
441 /* The constraint graph is represented as an array of bitmaps
442 containing successor nodes. */
444 struct constraint_graph
446 /* Size of this graph, which may be different than the number of
447 nodes in the variable map. */
448 unsigned int size;
450 /* Explicit successors of each node. */
451 bitmap *succs;
453 /* Implicit predecessors of each node (Used for variable
454 substitution). */
455 bitmap *implicit_preds;
457 /* Explicit predecessors of each node (Used for variable substitution). */
458 bitmap *preds;
460 /* Indirect cycle representatives, or -1 if the node has no indirect
461 cycles. */
462 int *indirect_cycles;
464 /* Representative node for a node. rep[a] == a unless the node has
465 been unified. */
466 unsigned int *rep;
468 /* Equivalence class representative for a label. This is used for
469 variable substitution. */
470 int *eq_rep;
472 /* Pointer equivalence label for a node. All nodes with the same
473 pointer equivalence label can be unified together at some point
474 (either during constraint optimization or after the constraint
475 graph is built). */
476 unsigned int *pe;
478 /* Pointer equivalence representative for a label. This is used to
479 handle nodes that are pointer equivalent but not location
480 equivalent. We can unite these once the addressof constraints
481 are transformed into initial points-to sets. */
482 int *pe_rep;
484 /* Pointer equivalence label for each node, used during variable
485 substitution. */
486 unsigned int *pointer_label;
488 /* Location equivalence label for each node, used during location
489 equivalence finding. */
490 unsigned int *loc_label;
492 /* Pointed-by set for each node, used during location equivalence
493 finding. This is pointed-by rather than pointed-to, because it
494 is constructed using the predecessor graph. */
495 bitmap *pointed_by;
497 /* Points to sets for pointer equivalence. This is *not* the actual
498 points-to sets for nodes. */
499 bitmap *points_to;
501 /* Bitmap of nodes where the bit is set if the node is a direct
502 node. Used for variable substitution. */
503 sbitmap direct_nodes;
505 /* Bitmap of nodes where the bit is set if the node is address
506 taken. Used for variable substitution. */
507 bitmap address_taken;
509 /* True if points_to bitmap for this node is stored in the hash
510 table. */
511 sbitmap pt_used;
513 /* Number of incoming edges remaining to be processed by pointer
514 equivalence.
515 Used for variable substitution. */
516 unsigned int *number_incoming;
519 /* Vector of complex constraints for each graph node. Complex
520 constraints are those involving dereferences or offsets that are
521 not 0. */
522 VEC(constraint_t,heap) **complex;
525 static constraint_graph_t graph;
527 /* During variable substitution and the offline version of indirect
528 cycle finding, we create nodes to represent dereferences and
529 address taken constraints. These represent where these start and
530 end. */
531 #define FIRST_REF_NODE (VEC_length (varinfo_t, varmap))
532 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
534 /* Return the representative node for NODE, if NODE has been unioned
535 with another NODE.
536 This function performs path compression along the way to finding
537 the representative. */
539 static unsigned int
540 find (unsigned int node)
542 gcc_assert (node < graph->size);
543 if (graph->rep[node] != node)
544 return graph->rep[node] = find (graph->rep[node]);
545 return node;
548 /* Union the TO and FROM nodes to the TO nodes.
549 Note that at some point in the future, we may want to do
550 union-by-rank, in which case we are going to have to return the
551 node we unified to. */
553 static bool
554 unite (unsigned int to, unsigned int from)
556 gcc_assert (to < graph->size && from < graph->size);
557 if (to != from && graph->rep[from] != to)
559 graph->rep[from] = to;
560 return true;
562 return false;
565 /* Create a new constraint consisting of LHS and RHS expressions. */
567 static constraint_t
568 new_constraint (const struct constraint_expr lhs,
569 const struct constraint_expr rhs)
571 constraint_t ret = (constraint_t) pool_alloc (constraint_pool);
572 ret->lhs = lhs;
573 ret->rhs = rhs;
574 return ret;
577 /* Print out constraint C to FILE. */
579 void
580 dump_constraint (FILE *file, constraint_t c)
582 if (c->lhs.type == ADDRESSOF)
583 fprintf (file, "&");
584 else if (c->lhs.type == DEREF)
585 fprintf (file, "*");
586 fprintf (file, "%s", get_varinfo_fc (c->lhs.var)->name);
587 if (c->lhs.offset != 0)
588 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
589 fprintf (file, " = ");
590 if (c->rhs.type == ADDRESSOF)
591 fprintf (file, "&");
592 else if (c->rhs.type == DEREF)
593 fprintf (file, "*");
594 fprintf (file, "%s", get_varinfo_fc (c->rhs.var)->name);
595 if (c->rhs.offset != 0)
596 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
597 fprintf (file, "\n");
600 /* Print out constraint C to stderr. */
602 void
603 debug_constraint (constraint_t c)
605 dump_constraint (stderr, c);
608 /* Print out all constraints to FILE */
610 void
611 dump_constraints (FILE *file)
613 int i;
614 constraint_t c;
615 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
616 dump_constraint (file, c);
619 /* Print out all constraints to stderr. */
621 void
622 debug_constraints (void)
624 dump_constraints (stderr);
627 /* Print out to FILE the edge in the constraint graph that is created by
628 constraint c. The edge may have a label, depending on the type of
629 constraint that it represents. If complex1, e.g: a = *b, then the label
630 is "=*", if complex2, e.g: *a = b, then the label is "*=", if
631 complex with an offset, e.g: a = b + 8, then the label is "+".
632 Otherwise the edge has no label. */
634 void
635 dump_constraint_edge (FILE *file, constraint_t c)
637 if (c->rhs.type != ADDRESSOF)
639 const char *src = get_varinfo_fc (c->rhs.var)->name;
640 const char *dst = get_varinfo_fc (c->lhs.var)->name;
641 fprintf (file, " \"%s\" -> \"%s\" ", src, dst);
642 /* Due to preprocessing of constraints, instructions like *a = *b are
643 illegal; thus, we do not have to handle such cases. */
644 if (c->lhs.type == DEREF)
645 fprintf (file, " [ label=\"*=\" ] ;\n");
646 else if (c->rhs.type == DEREF)
647 fprintf (file, " [ label=\"=*\" ] ;\n");
648 else
650 /* We must check the case where the constraint is an offset.
651 In this case, it is treated as a complex constraint. */
652 if (c->rhs.offset != c->lhs.offset)
653 fprintf (file, " [ label=\"+\" ] ;\n");
654 else
655 fprintf (file, " ;\n");
660 /* Print the constraint graph in dot format. */
662 void
663 dump_constraint_graph (FILE *file)
665 unsigned int i=0, size;
666 constraint_t c;
668 /* Only print the graph if it has already been initialized: */
669 if (!graph)
670 return;
672 /* Print the constraints used to produce the constraint graph. The
673 constraints will be printed as comments in the dot file: */
674 fprintf (file, "\n\n/* Constraints used in the constraint graph:\n");
675 dump_constraints (file);
676 fprintf (file, "*/\n");
678 /* Prints the header of the dot file: */
679 fprintf (file, "\n\n// The constraint graph in dot format:\n");
680 fprintf (file, "strict digraph {\n");
681 fprintf (file, " node [\n shape = box\n ]\n");
682 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
683 fprintf (file, "\n // List of nodes in the constraint graph:\n");
685 /* The next lines print the nodes in the graph. In order to get the
686 number of nodes in the graph, we must choose the minimum between the
687 vector VEC (varinfo_t, varmap) and graph->size. If the graph has not
688 yet been initialized, then graph->size == 0, otherwise we must only
689 read nodes that have an entry in VEC (varinfo_t, varmap). */
690 size = VEC_length (varinfo_t, varmap);
691 size = size < graph->size ? size : graph->size;
692 for (i = 0; i < size; i++)
694 const char *name = get_varinfo_fc (graph->rep[i])->name;
695 fprintf (file, " \"%s\" ;\n", name);
698 /* Go over the list of constraints printing the edges in the constraint
699 graph. */
700 fprintf (file, "\n // The constraint edges:\n");
701 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
702 if (c)
703 dump_constraint_edge (file, c);
705 /* Prints the tail of the dot file. By now, only the closing bracket. */
706 fprintf (file, "}\n\n\n");
709 /* Print out the constraint graph to stderr. */
711 void
712 debug_constraint_graph (void)
714 dump_constraint_graph (stderr);
717 /* SOLVER FUNCTIONS
719 The solver is a simple worklist solver, that works on the following
720 algorithm:
722 sbitmap changed_nodes = all zeroes;
723 changed_count = 0;
724 For each node that is not already collapsed:
725 changed_count++;
726 set bit in changed nodes
728 while (changed_count > 0)
730 compute topological ordering for constraint graph
732 find and collapse cycles in the constraint graph (updating
733 changed if necessary)
735 for each node (n) in the graph in topological order:
736 changed_count--;
738 Process each complex constraint associated with the node,
739 updating changed if necessary.
741 For each outgoing edge from n, propagate the solution from n to
742 the destination of the edge, updating changed as necessary.
744 } */
746 /* Return true if two constraint expressions A and B are equal. */
748 static bool
749 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
751 return a.type == b.type && a.var == b.var && a.offset == b.offset;
754 /* Return true if constraint expression A is less than constraint expression
755 B. This is just arbitrary, but consistent, in order to give them an
756 ordering. */
758 static bool
759 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
761 if (a.type == b.type)
763 if (a.var == b.var)
764 return a.offset < b.offset;
765 else
766 return a.var < b.var;
768 else
769 return a.type < b.type;
772 /* Return true if constraint A is less than constraint B. This is just
773 arbitrary, but consistent, in order to give them an ordering. */
775 static bool
776 constraint_less (const constraint_t a, const constraint_t b)
778 if (constraint_expr_less (a->lhs, b->lhs))
779 return true;
780 else if (constraint_expr_less (b->lhs, a->lhs))
781 return false;
782 else
783 return constraint_expr_less (a->rhs, b->rhs);
786 /* Return true if two constraints A and B are equal. */
788 static bool
789 constraint_equal (struct constraint a, struct constraint b)
791 return constraint_expr_equal (a.lhs, b.lhs)
792 && constraint_expr_equal (a.rhs, b.rhs);
796 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
798 static constraint_t
799 constraint_vec_find (VEC(constraint_t,heap) *vec,
800 struct constraint lookfor)
802 unsigned int place;
803 constraint_t found;
805 if (vec == NULL)
806 return NULL;
808 place = VEC_lower_bound (constraint_t, vec, &lookfor, constraint_less);
809 if (place >= VEC_length (constraint_t, vec))
810 return NULL;
811 found = VEC_index (constraint_t, vec, place);
812 if (!constraint_equal (*found, lookfor))
813 return NULL;
814 return found;
817 /* Union two constraint vectors, TO and FROM. Put the result in TO. */
819 static void
820 constraint_set_union (VEC(constraint_t,heap) **to,
821 VEC(constraint_t,heap) **from)
823 int i;
824 constraint_t c;
826 for (i = 0; VEC_iterate (constraint_t, *from, i, c); i++)
828 if (constraint_vec_find (*to, *c) == NULL)
830 unsigned int place = VEC_lower_bound (constraint_t, *to, c,
831 constraint_less);
832 VEC_safe_insert (constraint_t, heap, *to, place, c);
837 /* Take a solution set SET, add OFFSET to each member of the set, and
838 overwrite SET with the result when done. */
840 static void
841 solution_set_add (bitmap set, unsigned HOST_WIDE_INT offset)
843 bitmap result = BITMAP_ALLOC (&iteration_obstack);
844 unsigned int i;
845 bitmap_iterator bi;
847 EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
849 varinfo_t vi = get_varinfo (i);
851 /* If this is a variable with just one field just set its bit
852 in the result. */
853 if (vi->is_artificial_var
854 || vi->is_unknown_size_var
855 || vi->is_full_var)
856 bitmap_set_bit (result, i);
857 else
859 unsigned HOST_WIDE_INT fieldoffset = vi->offset + offset;
860 varinfo_t v = first_vi_for_offset (vi, fieldoffset);
861 /* If the result is outside of the variable use the last field. */
862 if (!v)
864 v = vi;
865 while (v->next != NULL)
866 v = v->next;
868 bitmap_set_bit (result, v->id);
869 /* If the result is not exactly at fieldoffset include the next
870 field as well. See get_constraint_for_ptr_offset for more
871 rationale. */
872 if (v->offset != fieldoffset
873 && v->next != NULL)
874 bitmap_set_bit (result, v->next->id);
878 bitmap_copy (set, result);
879 BITMAP_FREE (result);
882 /* Union solution sets TO and FROM, and add INC to each member of FROM in the
883 process. */
885 static bool
886 set_union_with_increment (bitmap to, bitmap from, unsigned HOST_WIDE_INT inc)
888 if (inc == 0)
889 return bitmap_ior_into (to, from);
890 else
892 bitmap tmp;
893 bool res;
895 tmp = BITMAP_ALLOC (&iteration_obstack);
896 bitmap_copy (tmp, from);
897 solution_set_add (tmp, inc);
898 res = bitmap_ior_into (to, tmp);
899 BITMAP_FREE (tmp);
900 return res;
904 /* Insert constraint C into the list of complex constraints for graph
905 node VAR. */
907 static void
908 insert_into_complex (constraint_graph_t graph,
909 unsigned int var, constraint_t c)
911 VEC (constraint_t, heap) *complex = graph->complex[var];
912 unsigned int place = VEC_lower_bound (constraint_t, complex, c,
913 constraint_less);
915 /* Only insert constraints that do not already exist. */
916 if (place >= VEC_length (constraint_t, complex)
917 || !constraint_equal (*c, *VEC_index (constraint_t, complex, place)))
918 VEC_safe_insert (constraint_t, heap, graph->complex[var], place, c);
922 /* Condense two variable nodes into a single variable node, by moving
923 all associated info from SRC to TO. */
925 static void
926 merge_node_constraints (constraint_graph_t graph, unsigned int to,
927 unsigned int from)
929 unsigned int i;
930 constraint_t c;
932 gcc_assert (find (from) == to);
934 /* Move all complex constraints from src node into to node */
935 for (i = 0; VEC_iterate (constraint_t, graph->complex[from], i, c); i++)
937 /* In complex constraints for node src, we may have either
938 a = *src, and *src = a, or an offseted constraint which are
939 always added to the rhs node's constraints. */
941 if (c->rhs.type == DEREF)
942 c->rhs.var = to;
943 else if (c->lhs.type == DEREF)
944 c->lhs.var = to;
945 else
946 c->rhs.var = to;
948 constraint_set_union (&graph->complex[to], &graph->complex[from]);
949 VEC_free (constraint_t, heap, graph->complex[from]);
950 graph->complex[from] = NULL;
954 /* Remove edges involving NODE from GRAPH. */
956 static void
957 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
959 if (graph->succs[node])
960 BITMAP_FREE (graph->succs[node]);
963 /* Merge GRAPH nodes FROM and TO into node TO. */
965 static void
966 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
967 unsigned int from)
969 if (graph->indirect_cycles[from] != -1)
971 /* If we have indirect cycles with the from node, and we have
972 none on the to node, the to node has indirect cycles from the
973 from node now that they are unified.
974 If indirect cycles exist on both, unify the nodes that they
975 are in a cycle with, since we know they are in a cycle with
976 each other. */
977 if (graph->indirect_cycles[to] == -1)
978 graph->indirect_cycles[to] = graph->indirect_cycles[from];
981 /* Merge all the successor edges. */
982 if (graph->succs[from])
984 if (!graph->succs[to])
985 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
986 bitmap_ior_into (graph->succs[to],
987 graph->succs[from]);
990 clear_edges_for_node (graph, from);
994 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
995 it doesn't exist in the graph already. */
997 static void
998 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
999 unsigned int from)
1001 if (to == from)
1002 return;
1004 if (!graph->implicit_preds[to])
1005 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1007 if (bitmap_set_bit (graph->implicit_preds[to], from))
1008 stats.num_implicit_edges++;
1011 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
1012 it doesn't exist in the graph already.
1013 Return false if the edge already existed, true otherwise. */
1015 static void
1016 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
1017 unsigned int from)
1019 if (!graph->preds[to])
1020 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1021 bitmap_set_bit (graph->preds[to], from);
1024 /* Add a graph edge to GRAPH, going from FROM to TO if
1025 it doesn't exist in the graph already.
1026 Return false if the edge already existed, true otherwise. */
1028 static bool
1029 add_graph_edge (constraint_graph_t graph, unsigned int to,
1030 unsigned int from)
1032 if (to == from)
1034 return false;
1036 else
1038 bool r = false;
1040 if (!graph->succs[from])
1041 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
1042 if (bitmap_set_bit (graph->succs[from], to))
1044 r = true;
1045 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
1046 stats.num_edges++;
1048 return r;
1053 /* Return true if {DEST.SRC} is an existing graph edge in GRAPH. */
1055 static bool
1056 valid_graph_edge (constraint_graph_t graph, unsigned int src,
1057 unsigned int dest)
1059 return (graph->succs[dest]
1060 && bitmap_bit_p (graph->succs[dest], src));
1063 /* Initialize the constraint graph structure to contain SIZE nodes. */
1065 static void
1066 init_graph (unsigned int size)
1068 unsigned int j;
1070 graph = XCNEW (struct constraint_graph);
1071 graph->size = size;
1072 graph->succs = XCNEWVEC (bitmap, graph->size);
1073 graph->indirect_cycles = XNEWVEC (int, graph->size);
1074 graph->rep = XNEWVEC (unsigned int, graph->size);
1075 graph->complex = XCNEWVEC (VEC(constraint_t, heap) *, size);
1076 graph->pe = XCNEWVEC (unsigned int, graph->size);
1077 graph->pe_rep = XNEWVEC (int, graph->size);
1079 for (j = 0; j < graph->size; j++)
1081 graph->rep[j] = j;
1082 graph->pe_rep[j] = -1;
1083 graph->indirect_cycles[j] = -1;
1087 /* Build the constraint graph, adding only predecessor edges right now. */
1089 static void
1090 build_pred_graph (void)
1092 int i;
1093 constraint_t c;
1094 unsigned int j;
1096 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
1097 graph->preds = XCNEWVEC (bitmap, graph->size);
1098 graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
1099 graph->loc_label = XCNEWVEC (unsigned int, graph->size);
1100 graph->pointed_by = XCNEWVEC (bitmap, graph->size);
1101 graph->points_to = XCNEWVEC (bitmap, graph->size);
1102 graph->eq_rep = XNEWVEC (int, graph->size);
1103 graph->direct_nodes = sbitmap_alloc (graph->size);
1104 graph->pt_used = sbitmap_alloc (graph->size);
1105 graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1106 graph->number_incoming = XCNEWVEC (unsigned int, graph->size);
1107 sbitmap_zero (graph->direct_nodes);
1108 sbitmap_zero (graph->pt_used);
1110 for (j = 0; j < FIRST_REF_NODE; j++)
1112 if (!get_varinfo (j)->is_special_var)
1113 SET_BIT (graph->direct_nodes, j);
1116 for (j = 0; j < graph->size; j++)
1117 graph->eq_rep[j] = -1;
1119 for (j = 0; j < VEC_length (varinfo_t, varmap); j++)
1120 graph->indirect_cycles[j] = -1;
1122 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1124 struct constraint_expr lhs = c->lhs;
1125 struct constraint_expr rhs = c->rhs;
1126 unsigned int lhsvar = get_varinfo_fc (lhs.var)->id;
1127 unsigned int rhsvar = get_varinfo_fc (rhs.var)->id;
1129 if (lhs.type == DEREF)
1131 /* *x = y. */
1132 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1133 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1135 else if (rhs.type == DEREF)
1137 /* x = *y */
1138 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1139 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1140 else
1141 RESET_BIT (graph->direct_nodes, lhsvar);
1143 else if (rhs.type == ADDRESSOF)
1145 /* x = &y */
1146 if (graph->points_to[lhsvar] == NULL)
1147 graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1148 bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1150 if (graph->pointed_by[rhsvar] == NULL)
1151 graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1152 bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1154 /* Implicitly, *x = y */
1155 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1157 RESET_BIT (graph->direct_nodes, rhsvar);
1158 bitmap_set_bit (graph->address_taken, rhsvar);
1160 else if (lhsvar > anything_id
1161 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1163 /* x = y */
1164 add_pred_graph_edge (graph, lhsvar, rhsvar);
1165 /* Implicitly, *x = *y */
1166 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1167 FIRST_REF_NODE + rhsvar);
1169 else if (lhs.offset != 0 || rhs.offset != 0)
1171 if (rhs.offset != 0)
1172 RESET_BIT (graph->direct_nodes, lhs.var);
1173 else if (lhs.offset != 0)
1174 RESET_BIT (graph->direct_nodes, rhs.var);
1179 /* Build the constraint graph, adding successor edges. */
1181 static void
1182 build_succ_graph (void)
1184 int i;
1185 constraint_t c;
1187 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1189 struct constraint_expr lhs;
1190 struct constraint_expr rhs;
1191 unsigned int lhsvar;
1192 unsigned int rhsvar;
1194 if (!c)
1195 continue;
1197 lhs = c->lhs;
1198 rhs = c->rhs;
1199 lhsvar = find (get_varinfo_fc (lhs.var)->id);
1200 rhsvar = find (get_varinfo_fc (rhs.var)->id);
1202 if (lhs.type == DEREF)
1204 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1205 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1207 else if (rhs.type == DEREF)
1209 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1210 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1212 else if (rhs.type == ADDRESSOF)
1214 /* x = &y */
1215 gcc_assert (find (get_varinfo_fc (rhs.var)->id)
1216 == get_varinfo_fc (rhs.var)->id);
1217 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1219 else if (lhsvar > anything_id
1220 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1222 add_graph_edge (graph, lhsvar, rhsvar);
1228 /* Changed variables on the last iteration. */
1229 static unsigned int changed_count;
1230 static sbitmap changed;
1232 DEF_VEC_I(unsigned);
1233 DEF_VEC_ALLOC_I(unsigned,heap);
1236 /* Strongly Connected Component visitation info. */
1238 struct scc_info
1240 sbitmap visited;
1241 sbitmap deleted;
1242 unsigned int *dfs;
1243 unsigned int *node_mapping;
1244 int current_index;
1245 VEC(unsigned,heap) *scc_stack;
1249 /* Recursive routine to find strongly connected components in GRAPH.
1250 SI is the SCC info to store the information in, and N is the id of current
1251 graph node we are processing.
1253 This is Tarjan's strongly connected component finding algorithm, as
1254 modified by Nuutila to keep only non-root nodes on the stack.
1255 The algorithm can be found in "On finding the strongly connected
1256 connected components in a directed graph" by Esko Nuutila and Eljas
1257 Soisalon-Soininen, in Information Processing Letters volume 49,
1258 number 1, pages 9-14. */
1260 static void
1261 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1263 unsigned int i;
1264 bitmap_iterator bi;
1265 unsigned int my_dfs;
1267 SET_BIT (si->visited, n);
1268 si->dfs[n] = si->current_index ++;
1269 my_dfs = si->dfs[n];
1271 /* Visit all the successors. */
1272 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1274 unsigned int w;
1276 if (i > LAST_REF_NODE)
1277 break;
1279 w = find (i);
1280 if (TEST_BIT (si->deleted, w))
1281 continue;
1283 if (!TEST_BIT (si->visited, w))
1284 scc_visit (graph, si, w);
1286 unsigned int t = find (w);
1287 unsigned int nnode = find (n);
1288 gcc_assert (nnode == n);
1290 if (si->dfs[t] < si->dfs[nnode])
1291 si->dfs[n] = si->dfs[t];
1295 /* See if any components have been identified. */
1296 if (si->dfs[n] == my_dfs)
1298 if (VEC_length (unsigned, si->scc_stack) > 0
1299 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1301 bitmap scc = BITMAP_ALLOC (NULL);
1302 bool have_ref_node = n >= FIRST_REF_NODE;
1303 unsigned int lowest_node;
1304 bitmap_iterator bi;
1306 bitmap_set_bit (scc, n);
1308 while (VEC_length (unsigned, si->scc_stack) != 0
1309 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1311 unsigned int w = VEC_pop (unsigned, si->scc_stack);
1313 bitmap_set_bit (scc, w);
1314 if (w >= FIRST_REF_NODE)
1315 have_ref_node = true;
1318 lowest_node = bitmap_first_set_bit (scc);
1319 gcc_assert (lowest_node < FIRST_REF_NODE);
1321 /* Collapse the SCC nodes into a single node, and mark the
1322 indirect cycles. */
1323 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1325 if (i < FIRST_REF_NODE)
1327 if (unite (lowest_node, i))
1328 unify_nodes (graph, lowest_node, i, false);
1330 else
1332 unite (lowest_node, i);
1333 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1337 SET_BIT (si->deleted, n);
1339 else
1340 VEC_safe_push (unsigned, heap, si->scc_stack, n);
1343 /* Unify node FROM into node TO, updating the changed count if
1344 necessary when UPDATE_CHANGED is true. */
1346 static void
1347 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1348 bool update_changed)
1351 gcc_assert (to != from && find (to) == to);
1352 if (dump_file && (dump_flags & TDF_DETAILS))
1353 fprintf (dump_file, "Unifying %s to %s\n",
1354 get_varinfo (from)->name,
1355 get_varinfo (to)->name);
1357 if (update_changed)
1358 stats.unified_vars_dynamic++;
1359 else
1360 stats.unified_vars_static++;
1362 merge_graph_nodes (graph, to, from);
1363 merge_node_constraints (graph, to, from);
1365 if (get_varinfo (from)->no_tbaa_pruning)
1366 get_varinfo (to)->no_tbaa_pruning = true;
1368 /* Mark TO as changed if FROM was changed. If TO was already marked
1369 as changed, decrease the changed count. */
1371 if (update_changed && TEST_BIT (changed, from))
1373 RESET_BIT (changed, from);
1374 if (!TEST_BIT (changed, to))
1375 SET_BIT (changed, to);
1376 else
1378 gcc_assert (changed_count > 0);
1379 changed_count--;
1382 if (get_varinfo (from)->solution)
1384 /* If the solution changes because of the merging, we need to mark
1385 the variable as changed. */
1386 if (bitmap_ior_into (get_varinfo (to)->solution,
1387 get_varinfo (from)->solution))
1389 if (update_changed && !TEST_BIT (changed, to))
1391 SET_BIT (changed, to);
1392 changed_count++;
1396 BITMAP_FREE (get_varinfo (from)->solution);
1397 BITMAP_FREE (get_varinfo (from)->oldsolution);
1399 if (stats.iterations > 0)
1401 BITMAP_FREE (get_varinfo (to)->oldsolution);
1402 get_varinfo (to)->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
1405 if (valid_graph_edge (graph, to, to))
1407 if (graph->succs[to])
1408 bitmap_clear_bit (graph->succs[to], to);
1412 /* Information needed to compute the topological ordering of a graph. */
1414 struct topo_info
1416 /* sbitmap of visited nodes. */
1417 sbitmap visited;
1418 /* Array that stores the topological order of the graph, *in
1419 reverse*. */
1420 VEC(unsigned,heap) *topo_order;
1424 /* Initialize and return a topological info structure. */
1426 static struct topo_info *
1427 init_topo_info (void)
1429 size_t size = graph->size;
1430 struct topo_info *ti = XNEW (struct topo_info);
1431 ti->visited = sbitmap_alloc (size);
1432 sbitmap_zero (ti->visited);
1433 ti->topo_order = VEC_alloc (unsigned, heap, 1);
1434 return ti;
1438 /* Free the topological sort info pointed to by TI. */
1440 static void
1441 free_topo_info (struct topo_info *ti)
1443 sbitmap_free (ti->visited);
1444 VEC_free (unsigned, heap, ti->topo_order);
1445 free (ti);
1448 /* Visit the graph in topological order, and store the order in the
1449 topo_info structure. */
1451 static void
1452 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1453 unsigned int n)
1455 bitmap_iterator bi;
1456 unsigned int j;
1458 SET_BIT (ti->visited, n);
1460 if (graph->succs[n])
1461 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1463 if (!TEST_BIT (ti->visited, j))
1464 topo_visit (graph, ti, j);
1467 VEC_safe_push (unsigned, heap, ti->topo_order, n);
1470 /* Return true if variable N + OFFSET is a legal field of N. */
1472 static bool
1473 type_safe (unsigned int n, unsigned HOST_WIDE_INT *offset)
1475 varinfo_t ninfo = get_varinfo (n);
1477 /* For things we've globbed to single variables, any offset into the
1478 variable acts like the entire variable, so that it becomes offset
1479 0. */
1480 if (ninfo->is_special_var
1481 || ninfo->is_artificial_var
1482 || ninfo->is_unknown_size_var
1483 || ninfo->is_full_var)
1485 *offset = 0;
1486 return true;
1488 return (get_varinfo (n)->offset + *offset) < get_varinfo (n)->fullsize;
1491 /* Process a constraint C that represents x = *y, using DELTA as the
1492 starting solution. */
1494 static void
1495 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1496 bitmap delta)
1498 unsigned int lhs = c->lhs.var;
1499 bool flag = false;
1500 bitmap sol = get_varinfo (lhs)->solution;
1501 unsigned int j;
1502 bitmap_iterator bi;
1504 if (bitmap_bit_p (delta, anything_id))
1506 flag |= bitmap_set_bit (sol, anything_id);
1507 goto done;
1510 /* For x = *ESCAPED and x = *CALLUSED we want to compute the
1511 reachability set of the rhs var. As a pointer to a sub-field
1512 of a variable can also reach all other fields of the variable
1513 we simply have to expand the solution to contain all sub-fields
1514 if one sub-field is contained. */
1515 if (c->rhs.var == escaped_id
1516 || c->rhs.var == callused_id)
1518 bitmap vars = NULL;
1519 /* In a first pass record all variables we need to add all
1520 sub-fields off. This avoids quadratic behavior. */
1521 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1523 varinfo_t v = get_varinfo (j);
1524 if (v->is_full_var)
1525 continue;
1527 v = lookup_vi_for_tree (v->decl);
1528 if (v->next != NULL)
1530 if (vars == NULL)
1531 vars = BITMAP_ALLOC (NULL);
1532 bitmap_set_bit (vars, v->id);
1535 /* In the second pass now do the addition to the solution and
1536 to speed up solving add it to the delta as well. */
1537 if (vars != NULL)
1539 EXECUTE_IF_SET_IN_BITMAP (vars, 0, j, bi)
1541 varinfo_t v = get_varinfo (j);
1542 for (; v != NULL; v = v->next)
1544 if (bitmap_set_bit (sol, v->id))
1546 flag = true;
1547 bitmap_set_bit (delta, v->id);
1551 BITMAP_FREE (vars);
1555 /* For each variable j in delta (Sol(y)), add
1556 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1557 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1559 unsigned HOST_WIDE_INT roffset = c->rhs.offset;
1560 if (type_safe (j, &roffset))
1562 varinfo_t v;
1563 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + roffset;
1564 unsigned int t;
1566 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1567 /* If the access is outside of the variable we can ignore it. */
1568 if (!v)
1569 continue;
1570 t = find (v->id);
1572 /* Adding edges from the special vars is pointless.
1573 They don't have sets that can change. */
1574 if (get_varinfo (t)->is_special_var)
1575 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1576 /* Merging the solution from ESCAPED needlessly increases
1577 the set. Use ESCAPED as representative instead.
1578 Same for CALLUSED. */
1579 else if (get_varinfo (t)->id == escaped_id
1580 || get_varinfo (t)->id == callused_id)
1581 flag |= bitmap_set_bit (sol, get_varinfo (t)->id);
1582 else if (add_graph_edge (graph, lhs, t))
1583 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1587 done:
1588 /* If the LHS solution changed, mark the var as changed. */
1589 if (flag)
1591 get_varinfo (lhs)->solution = sol;
1592 if (!TEST_BIT (changed, lhs))
1594 SET_BIT (changed, lhs);
1595 changed_count++;
1600 /* Process a constraint C that represents *x = y. */
1602 static void
1603 do_ds_constraint (constraint_t c, bitmap delta)
1605 unsigned int rhs = c->rhs.var;
1606 bitmap sol = get_varinfo (rhs)->solution;
1607 unsigned int j;
1608 bitmap_iterator bi;
1610 if (bitmap_bit_p (sol, anything_id))
1612 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1614 varinfo_t jvi = get_varinfo (j);
1615 unsigned int t;
1616 unsigned int loff = c->lhs.offset;
1617 unsigned HOST_WIDE_INT fieldoffset = jvi->offset + loff;
1618 varinfo_t v;
1620 v = get_varinfo (j);
1621 if (!v->is_full_var)
1623 v = first_vi_for_offset (v, fieldoffset);
1624 /* If the access is outside of the variable we can ignore it. */
1625 if (!v)
1626 continue;
1628 t = find (v->id);
1630 if (bitmap_set_bit (get_varinfo (t)->solution, anything_id)
1631 && !TEST_BIT (changed, t))
1633 SET_BIT (changed, t);
1634 changed_count++;
1637 return;
1640 /* For each member j of delta (Sol(x)), add an edge from y to j and
1641 union Sol(y) into Sol(j) */
1642 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1644 unsigned HOST_WIDE_INT loff = c->lhs.offset;
1645 if (type_safe (j, &loff) && !(get_varinfo (j)->is_special_var))
1647 varinfo_t v;
1648 unsigned int t;
1649 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + loff;
1650 bitmap tmp;
1652 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1653 /* If the access is outside of the variable we can ignore it. */
1654 if (!v)
1655 continue;
1656 t = find (v->id);
1657 tmp = get_varinfo (t)->solution;
1659 if (set_union_with_increment (tmp, sol, 0))
1661 get_varinfo (t)->solution = tmp;
1662 if (t == rhs)
1663 sol = get_varinfo (rhs)->solution;
1664 if (!TEST_BIT (changed, t))
1666 SET_BIT (changed, t);
1667 changed_count++;
1674 /* Handle a non-simple (simple meaning requires no iteration),
1675 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1677 static void
1678 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
1680 if (c->lhs.type == DEREF)
1682 if (c->rhs.type == ADDRESSOF)
1684 gcc_unreachable();
1686 else
1688 /* *x = y */
1689 do_ds_constraint (c, delta);
1692 else if (c->rhs.type == DEREF)
1694 /* x = *y */
1695 if (!(get_varinfo (c->lhs.var)->is_special_var))
1696 do_sd_constraint (graph, c, delta);
1698 else
1700 bitmap tmp;
1701 bitmap solution;
1702 bool flag = false;
1704 gcc_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR);
1705 solution = get_varinfo (c->rhs.var)->solution;
1706 tmp = get_varinfo (c->lhs.var)->solution;
1708 flag = set_union_with_increment (tmp, solution, c->rhs.offset);
1710 if (flag)
1712 get_varinfo (c->lhs.var)->solution = tmp;
1713 if (!TEST_BIT (changed, c->lhs.var))
1715 SET_BIT (changed, c->lhs.var);
1716 changed_count++;
1722 /* Initialize and return a new SCC info structure. */
1724 static struct scc_info *
1725 init_scc_info (size_t size)
1727 struct scc_info *si = XNEW (struct scc_info);
1728 size_t i;
1730 si->current_index = 0;
1731 si->visited = sbitmap_alloc (size);
1732 sbitmap_zero (si->visited);
1733 si->deleted = sbitmap_alloc (size);
1734 sbitmap_zero (si->deleted);
1735 si->node_mapping = XNEWVEC (unsigned int, size);
1736 si->dfs = XCNEWVEC (unsigned int, size);
1738 for (i = 0; i < size; i++)
1739 si->node_mapping[i] = i;
1741 si->scc_stack = VEC_alloc (unsigned, heap, 1);
1742 return si;
1745 /* Free an SCC info structure pointed to by SI */
1747 static void
1748 free_scc_info (struct scc_info *si)
1750 sbitmap_free (si->visited);
1751 sbitmap_free (si->deleted);
1752 free (si->node_mapping);
1753 free (si->dfs);
1754 VEC_free (unsigned, heap, si->scc_stack);
1755 free (si);
1759 /* Find indirect cycles in GRAPH that occur, using strongly connected
1760 components, and note them in the indirect cycles map.
1762 This technique comes from Ben Hardekopf and Calvin Lin,
1763 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1764 Lines of Code", submitted to PLDI 2007. */
1766 static void
1767 find_indirect_cycles (constraint_graph_t graph)
1769 unsigned int i;
1770 unsigned int size = graph->size;
1771 struct scc_info *si = init_scc_info (size);
1773 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1774 if (!TEST_BIT (si->visited, i) && find (i) == i)
1775 scc_visit (graph, si, i);
1777 free_scc_info (si);
1780 /* Compute a topological ordering for GRAPH, and store the result in the
1781 topo_info structure TI. */
1783 static void
1784 compute_topo_order (constraint_graph_t graph,
1785 struct topo_info *ti)
1787 unsigned int i;
1788 unsigned int size = graph->size;
1790 for (i = 0; i != size; ++i)
1791 if (!TEST_BIT (ti->visited, i) && find (i) == i)
1792 topo_visit (graph, ti, i);
1795 /* Structure used to for hash value numbering of pointer equivalence
1796 classes. */
1798 typedef struct equiv_class_label
1800 unsigned int equivalence_class;
1801 bitmap labels;
1802 hashval_t hashcode;
1803 } *equiv_class_label_t;
1804 typedef const struct equiv_class_label *const_equiv_class_label_t;
1806 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1807 classes. */
1808 static htab_t pointer_equiv_class_table;
1810 /* A hashtable for mapping a bitmap of labels->location equivalence
1811 classes. */
1812 static htab_t location_equiv_class_table;
1814 /* Hash function for a equiv_class_label_t */
1816 static hashval_t
1817 equiv_class_label_hash (const void *p)
1819 const_equiv_class_label_t const ecl = (const_equiv_class_label_t) p;
1820 return ecl->hashcode;
1823 /* Equality function for two equiv_class_label_t's. */
1825 static int
1826 equiv_class_label_eq (const void *p1, const void *p2)
1828 const_equiv_class_label_t const eql1 = (const_equiv_class_label_t) p1;
1829 const_equiv_class_label_t const eql2 = (const_equiv_class_label_t) p2;
1830 return bitmap_equal_p (eql1->labels, eql2->labels);
1833 /* Lookup a equivalence class in TABLE by the bitmap of LABELS it
1834 contains. */
1836 static unsigned int
1837 equiv_class_lookup (htab_t table, bitmap labels)
1839 void **slot;
1840 struct equiv_class_label ecl;
1842 ecl.labels = labels;
1843 ecl.hashcode = bitmap_hash (labels);
1845 slot = htab_find_slot_with_hash (table, &ecl,
1846 ecl.hashcode, NO_INSERT);
1847 if (!slot)
1848 return 0;
1849 else
1850 return ((equiv_class_label_t) *slot)->equivalence_class;
1854 /* Add an equivalence class named EQUIVALENCE_CLASS with labels LABELS
1855 to TABLE. */
1857 static void
1858 equiv_class_add (htab_t table, unsigned int equivalence_class,
1859 bitmap labels)
1861 void **slot;
1862 equiv_class_label_t ecl = XNEW (struct equiv_class_label);
1864 ecl->labels = labels;
1865 ecl->equivalence_class = equivalence_class;
1866 ecl->hashcode = bitmap_hash (labels);
1868 slot = htab_find_slot_with_hash (table, ecl,
1869 ecl->hashcode, INSERT);
1870 gcc_assert (!*slot);
1871 *slot = (void *) ecl;
1874 /* Perform offline variable substitution.
1876 This is a worst case quadratic time way of identifying variables
1877 that must have equivalent points-to sets, including those caused by
1878 static cycles, and single entry subgraphs, in the constraint graph.
1880 The technique is described in "Exploiting Pointer and Location
1881 Equivalence to Optimize Pointer Analysis. In the 14th International
1882 Static Analysis Symposium (SAS), August 2007." It is known as the
1883 "HU" algorithm, and is equivalent to value numbering the collapsed
1884 constraint graph including evaluating unions.
1886 The general method of finding equivalence classes is as follows:
1887 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1888 Initialize all non-REF nodes to be direct nodes.
1889 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1890 variable}
1891 For each constraint containing the dereference, we also do the same
1892 thing.
1894 We then compute SCC's in the graph and unify nodes in the same SCC,
1895 including pts sets.
1897 For each non-collapsed node x:
1898 Visit all unvisited explicit incoming edges.
1899 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
1900 where y->x.
1901 Lookup the equivalence class for pts(x).
1902 If we found one, equivalence_class(x) = found class.
1903 Otherwise, equivalence_class(x) = new class, and new_class is
1904 added to the lookup table.
1906 All direct nodes with the same equivalence class can be replaced
1907 with a single representative node.
1908 All unlabeled nodes (label == 0) are not pointers and all edges
1909 involving them can be eliminated.
1910 We perform these optimizations during rewrite_constraints
1912 In addition to pointer equivalence class finding, we also perform
1913 location equivalence class finding. This is the set of variables
1914 that always appear together in points-to sets. We use this to
1915 compress the size of the points-to sets. */
1917 /* Current maximum pointer equivalence class id. */
1918 static int pointer_equiv_class;
1920 /* Current maximum location equivalence class id. */
1921 static int location_equiv_class;
1923 /* Recursive routine to find strongly connected components in GRAPH,
1924 and label it's nodes with DFS numbers. */
1926 static void
1927 condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1929 unsigned int i;
1930 bitmap_iterator bi;
1931 unsigned int my_dfs;
1933 gcc_assert (si->node_mapping[n] == n);
1934 SET_BIT (si->visited, n);
1935 si->dfs[n] = si->current_index ++;
1936 my_dfs = si->dfs[n];
1938 /* Visit all the successors. */
1939 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1941 unsigned int w = si->node_mapping[i];
1943 if (TEST_BIT (si->deleted, w))
1944 continue;
1946 if (!TEST_BIT (si->visited, w))
1947 condense_visit (graph, si, w);
1949 unsigned int t = si->node_mapping[w];
1950 unsigned int nnode = si->node_mapping[n];
1951 gcc_assert (nnode == n);
1953 if (si->dfs[t] < si->dfs[nnode])
1954 si->dfs[n] = si->dfs[t];
1958 /* Visit all the implicit predecessors. */
1959 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
1961 unsigned int w = si->node_mapping[i];
1963 if (TEST_BIT (si->deleted, w))
1964 continue;
1966 if (!TEST_BIT (si->visited, w))
1967 condense_visit (graph, si, w);
1969 unsigned int t = si->node_mapping[w];
1970 unsigned int nnode = si->node_mapping[n];
1971 gcc_assert (nnode == n);
1973 if (si->dfs[t] < si->dfs[nnode])
1974 si->dfs[n] = si->dfs[t];
1978 /* See if any components have been identified. */
1979 if (si->dfs[n] == my_dfs)
1981 while (VEC_length (unsigned, si->scc_stack) != 0
1982 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1984 unsigned int w = VEC_pop (unsigned, si->scc_stack);
1985 si->node_mapping[w] = n;
1987 if (!TEST_BIT (graph->direct_nodes, w))
1988 RESET_BIT (graph->direct_nodes, n);
1990 /* Unify our nodes. */
1991 if (graph->preds[w])
1993 if (!graph->preds[n])
1994 graph->preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
1995 bitmap_ior_into (graph->preds[n], graph->preds[w]);
1997 if (graph->implicit_preds[w])
1999 if (!graph->implicit_preds[n])
2000 graph->implicit_preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2001 bitmap_ior_into (graph->implicit_preds[n],
2002 graph->implicit_preds[w]);
2004 if (graph->points_to[w])
2006 if (!graph->points_to[n])
2007 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2008 bitmap_ior_into (graph->points_to[n],
2009 graph->points_to[w]);
2011 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2013 unsigned int rep = si->node_mapping[i];
2014 graph->number_incoming[rep]++;
2017 SET_BIT (si->deleted, n);
2019 else
2020 VEC_safe_push (unsigned, heap, si->scc_stack, n);
2023 /* Label pointer equivalences. */
2025 static void
2026 label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2028 unsigned int i;
2029 bitmap_iterator bi;
2030 SET_BIT (si->visited, n);
2032 if (!graph->points_to[n])
2033 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2035 /* Label and union our incoming edges's points to sets. */
2036 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2038 unsigned int w = si->node_mapping[i];
2039 if (!TEST_BIT (si->visited, w))
2040 label_visit (graph, si, w);
2042 /* Skip unused edges */
2043 if (w == n || graph->pointer_label[w] == 0)
2045 graph->number_incoming[w]--;
2046 continue;
2048 if (graph->points_to[w])
2049 bitmap_ior_into(graph->points_to[n], graph->points_to[w]);
2051 /* If all incoming edges to w have been processed and
2052 graph->points_to[w] was not stored in the hash table, we can
2053 free it. */
2054 graph->number_incoming[w]--;
2055 if (!graph->number_incoming[w] && !TEST_BIT (graph->pt_used, w))
2057 BITMAP_FREE (graph->points_to[w]);
2060 /* Indirect nodes get fresh variables. */
2061 if (!TEST_BIT (graph->direct_nodes, n))
2062 bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
2064 if (!bitmap_empty_p (graph->points_to[n]))
2066 unsigned int label = equiv_class_lookup (pointer_equiv_class_table,
2067 graph->points_to[n]);
2068 if (!label)
2070 SET_BIT (graph->pt_used, n);
2071 label = pointer_equiv_class++;
2072 equiv_class_add (pointer_equiv_class_table,
2073 label, graph->points_to[n]);
2075 graph->pointer_label[n] = label;
2079 /* Perform offline variable substitution, discovering equivalence
2080 classes, and eliminating non-pointer variables. */
2082 static struct scc_info *
2083 perform_var_substitution (constraint_graph_t graph)
2085 unsigned int i;
2086 unsigned int size = graph->size;
2087 struct scc_info *si = init_scc_info (size);
2089 bitmap_obstack_initialize (&iteration_obstack);
2090 pointer_equiv_class_table = htab_create (511, equiv_class_label_hash,
2091 equiv_class_label_eq, free);
2092 location_equiv_class_table = htab_create (511, equiv_class_label_hash,
2093 equiv_class_label_eq, free);
2094 pointer_equiv_class = 1;
2095 location_equiv_class = 1;
2097 /* Condense the nodes, which means to find SCC's, count incoming
2098 predecessors, and unite nodes in SCC's. */
2099 for (i = 0; i < FIRST_REF_NODE; i++)
2100 if (!TEST_BIT (si->visited, si->node_mapping[i]))
2101 condense_visit (graph, si, si->node_mapping[i]);
2103 sbitmap_zero (si->visited);
2104 /* Actually the label the nodes for pointer equivalences */
2105 for (i = 0; i < FIRST_REF_NODE; i++)
2106 if (!TEST_BIT (si->visited, si->node_mapping[i]))
2107 label_visit (graph, si, si->node_mapping[i]);
2109 /* Calculate location equivalence labels. */
2110 for (i = 0; i < FIRST_REF_NODE; i++)
2112 bitmap pointed_by;
2113 bitmap_iterator bi;
2114 unsigned int j;
2115 unsigned int label;
2117 if (!graph->pointed_by[i])
2118 continue;
2119 pointed_by = BITMAP_ALLOC (&iteration_obstack);
2121 /* Translate the pointed-by mapping for pointer equivalence
2122 labels. */
2123 EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
2125 bitmap_set_bit (pointed_by,
2126 graph->pointer_label[si->node_mapping[j]]);
2128 /* The original pointed_by is now dead. */
2129 BITMAP_FREE (graph->pointed_by[i]);
2131 /* Look up the location equivalence label if one exists, or make
2132 one otherwise. */
2133 label = equiv_class_lookup (location_equiv_class_table,
2134 pointed_by);
2135 if (label == 0)
2137 label = location_equiv_class++;
2138 equiv_class_add (location_equiv_class_table,
2139 label, pointed_by);
2141 else
2143 if (dump_file && (dump_flags & TDF_DETAILS))
2144 fprintf (dump_file, "Found location equivalence for node %s\n",
2145 get_varinfo (i)->name);
2146 BITMAP_FREE (pointed_by);
2148 graph->loc_label[i] = label;
2152 if (dump_file && (dump_flags & TDF_DETAILS))
2153 for (i = 0; i < FIRST_REF_NODE; i++)
2155 bool direct_node = TEST_BIT (graph->direct_nodes, i);
2156 fprintf (dump_file,
2157 "Equivalence classes for %s node id %d:%s are pointer: %d"
2158 ", location:%d\n",
2159 direct_node ? "Direct node" : "Indirect node", i,
2160 get_varinfo (i)->name,
2161 graph->pointer_label[si->node_mapping[i]],
2162 graph->loc_label[si->node_mapping[i]]);
2165 /* Quickly eliminate our non-pointer variables. */
2167 for (i = 0; i < FIRST_REF_NODE; i++)
2169 unsigned int node = si->node_mapping[i];
2171 if (graph->pointer_label[node] == 0)
2173 if (dump_file && (dump_flags & TDF_DETAILS))
2174 fprintf (dump_file,
2175 "%s is a non-pointer variable, eliminating edges.\n",
2176 get_varinfo (node)->name);
2177 stats.nonpointer_vars++;
2178 clear_edges_for_node (graph, node);
2182 return si;
2185 /* Free information that was only necessary for variable
2186 substitution. */
2188 static void
2189 free_var_substitution_info (struct scc_info *si)
2191 free_scc_info (si);
2192 free (graph->pointer_label);
2193 free (graph->loc_label);
2194 free (graph->pointed_by);
2195 free (graph->points_to);
2196 free (graph->number_incoming);
2197 free (graph->eq_rep);
2198 sbitmap_free (graph->direct_nodes);
2199 sbitmap_free (graph->pt_used);
2200 htab_delete (pointer_equiv_class_table);
2201 htab_delete (location_equiv_class_table);
2202 bitmap_obstack_release (&iteration_obstack);
2205 /* Return an existing node that is equivalent to NODE, which has
2206 equivalence class LABEL, if one exists. Return NODE otherwise. */
2208 static unsigned int
2209 find_equivalent_node (constraint_graph_t graph,
2210 unsigned int node, unsigned int label)
2212 /* If the address version of this variable is unused, we can
2213 substitute it for anything else with the same label.
2214 Otherwise, we know the pointers are equivalent, but not the
2215 locations, and we can unite them later. */
2217 if (!bitmap_bit_p (graph->address_taken, node))
2219 gcc_assert (label < graph->size);
2221 if (graph->eq_rep[label] != -1)
2223 /* Unify the two variables since we know they are equivalent. */
2224 if (unite (graph->eq_rep[label], node))
2225 unify_nodes (graph, graph->eq_rep[label], node, false);
2226 return graph->eq_rep[label];
2228 else
2230 graph->eq_rep[label] = node;
2231 graph->pe_rep[label] = node;
2234 else
2236 gcc_assert (label < graph->size);
2237 graph->pe[node] = label;
2238 if (graph->pe_rep[label] == -1)
2239 graph->pe_rep[label] = node;
2242 return node;
2245 /* Unite pointer equivalent but not location equivalent nodes in
2246 GRAPH. This may only be performed once variable substitution is
2247 finished. */
2249 static void
2250 unite_pointer_equivalences (constraint_graph_t graph)
2252 unsigned int i;
2254 /* Go through the pointer equivalences and unite them to their
2255 representative, if they aren't already. */
2256 for (i = 0; i < FIRST_REF_NODE; i++)
2258 unsigned int label = graph->pe[i];
2259 if (label)
2261 int label_rep = graph->pe_rep[label];
2263 if (label_rep == -1)
2264 continue;
2266 label_rep = find (label_rep);
2267 if (label_rep >= 0 && unite (label_rep, find (i)))
2268 unify_nodes (graph, label_rep, i, false);
2273 /* Move complex constraints to the GRAPH nodes they belong to. */
2275 static void
2276 move_complex_constraints (constraint_graph_t graph)
2278 int i;
2279 constraint_t c;
2281 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
2283 if (c)
2285 struct constraint_expr lhs = c->lhs;
2286 struct constraint_expr rhs = c->rhs;
2288 if (lhs.type == DEREF)
2290 insert_into_complex (graph, lhs.var, c);
2292 else if (rhs.type == DEREF)
2294 if (!(get_varinfo (lhs.var)->is_special_var))
2295 insert_into_complex (graph, rhs.var, c);
2297 else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2298 && (lhs.offset != 0 || rhs.offset != 0))
2300 insert_into_complex (graph, rhs.var, c);
2307 /* Optimize and rewrite complex constraints while performing
2308 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2309 result of perform_variable_substitution. */
2311 static void
2312 rewrite_constraints (constraint_graph_t graph,
2313 struct scc_info *si)
2315 int i;
2316 unsigned int j;
2317 constraint_t c;
2319 for (j = 0; j < graph->size; j++)
2320 gcc_assert (find (j) == j);
2322 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
2324 struct constraint_expr lhs = c->lhs;
2325 struct constraint_expr rhs = c->rhs;
2326 unsigned int lhsvar = find (get_varinfo_fc (lhs.var)->id);
2327 unsigned int rhsvar = find (get_varinfo_fc (rhs.var)->id);
2328 unsigned int lhsnode, rhsnode;
2329 unsigned int lhslabel, rhslabel;
2331 lhsnode = si->node_mapping[lhsvar];
2332 rhsnode = si->node_mapping[rhsvar];
2333 lhslabel = graph->pointer_label[lhsnode];
2334 rhslabel = graph->pointer_label[rhsnode];
2336 /* See if it is really a non-pointer variable, and if so, ignore
2337 the constraint. */
2338 if (lhslabel == 0)
2340 if (dump_file && (dump_flags & TDF_DETAILS))
2343 fprintf (dump_file, "%s is a non-pointer variable,"
2344 "ignoring constraint:",
2345 get_varinfo (lhs.var)->name);
2346 dump_constraint (dump_file, c);
2348 VEC_replace (constraint_t, constraints, i, NULL);
2349 continue;
2352 if (rhslabel == 0)
2354 if (dump_file && (dump_flags & TDF_DETAILS))
2357 fprintf (dump_file, "%s is a non-pointer variable,"
2358 "ignoring constraint:",
2359 get_varinfo (rhs.var)->name);
2360 dump_constraint (dump_file, c);
2362 VEC_replace (constraint_t, constraints, i, NULL);
2363 continue;
2366 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2367 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2368 c->lhs.var = lhsvar;
2369 c->rhs.var = rhsvar;
2374 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2375 part of an SCC, false otherwise. */
2377 static bool
2378 eliminate_indirect_cycles (unsigned int node)
2380 if (graph->indirect_cycles[node] != -1
2381 && !bitmap_empty_p (get_varinfo (node)->solution))
2383 unsigned int i;
2384 VEC(unsigned,heap) *queue = NULL;
2385 int queuepos;
2386 unsigned int to = find (graph->indirect_cycles[node]);
2387 bitmap_iterator bi;
2389 /* We can't touch the solution set and call unify_nodes
2390 at the same time, because unify_nodes is going to do
2391 bitmap unions into it. */
2393 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2395 if (find (i) == i && i != to)
2397 if (unite (to, i))
2398 VEC_safe_push (unsigned, heap, queue, i);
2402 for (queuepos = 0;
2403 VEC_iterate (unsigned, queue, queuepos, i);
2404 queuepos++)
2406 unify_nodes (graph, to, i, true);
2408 VEC_free (unsigned, heap, queue);
2409 return true;
2411 return false;
2414 /* Solve the constraint graph GRAPH using our worklist solver.
2415 This is based on the PW* family of solvers from the "Efficient Field
2416 Sensitive Pointer Analysis for C" paper.
2417 It works by iterating over all the graph nodes, processing the complex
2418 constraints and propagating the copy constraints, until everything stops
2419 changed. This corresponds to steps 6-8 in the solving list given above. */
2421 static void
2422 solve_graph (constraint_graph_t graph)
2424 unsigned int size = graph->size;
2425 unsigned int i;
2426 bitmap pts;
2428 changed_count = 0;
2429 changed = sbitmap_alloc (size);
2430 sbitmap_zero (changed);
2432 /* Mark all initial non-collapsed nodes as changed. */
2433 for (i = 0; i < size; i++)
2435 varinfo_t ivi = get_varinfo (i);
2436 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2437 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2438 || VEC_length (constraint_t, graph->complex[i]) > 0))
2440 SET_BIT (changed, i);
2441 changed_count++;
2445 /* Allocate a bitmap to be used to store the changed bits. */
2446 pts = BITMAP_ALLOC (&pta_obstack);
2448 while (changed_count > 0)
2450 unsigned int i;
2451 struct topo_info *ti = init_topo_info ();
2452 stats.iterations++;
2454 bitmap_obstack_initialize (&iteration_obstack);
2456 compute_topo_order (graph, ti);
2458 while (VEC_length (unsigned, ti->topo_order) != 0)
2461 i = VEC_pop (unsigned, ti->topo_order);
2463 /* If this variable is not a representative, skip it. */
2464 if (find (i) != i)
2465 continue;
2467 /* In certain indirect cycle cases, we may merge this
2468 variable to another. */
2469 if (eliminate_indirect_cycles (i) && find (i) != i)
2470 continue;
2472 /* If the node has changed, we need to process the
2473 complex constraints and outgoing edges again. */
2474 if (TEST_BIT (changed, i))
2476 unsigned int j;
2477 constraint_t c;
2478 bitmap solution;
2479 VEC(constraint_t,heap) *complex = graph->complex[i];
2480 bool solution_empty;
2482 RESET_BIT (changed, i);
2483 changed_count--;
2485 /* Compute the changed set of solution bits. */
2486 bitmap_and_compl (pts, get_varinfo (i)->solution,
2487 get_varinfo (i)->oldsolution);
2489 if (bitmap_empty_p (pts))
2490 continue;
2492 bitmap_ior_into (get_varinfo (i)->oldsolution, pts);
2494 solution = get_varinfo (i)->solution;
2495 solution_empty = bitmap_empty_p (solution);
2497 /* Process the complex constraints */
2498 for (j = 0; VEC_iterate (constraint_t, complex, j, c); j++)
2500 /* XXX: This is going to unsort the constraints in
2501 some cases, which will occasionally add duplicate
2502 constraints during unification. This does not
2503 affect correctness. */
2504 c->lhs.var = find (c->lhs.var);
2505 c->rhs.var = find (c->rhs.var);
2507 /* The only complex constraint that can change our
2508 solution to non-empty, given an empty solution,
2509 is a constraint where the lhs side is receiving
2510 some set from elsewhere. */
2511 if (!solution_empty || c->lhs.type != DEREF)
2512 do_complex_constraint (graph, c, pts);
2515 solution_empty = bitmap_empty_p (solution);
2517 if (!solution_empty
2518 /* Do not propagate the ESCAPED/CALLUSED solutions. */
2519 && i != escaped_id
2520 && i != callused_id)
2522 bitmap_iterator bi;
2524 /* Propagate solution to all successors. */
2525 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2526 0, j, bi)
2528 bitmap tmp;
2529 bool flag;
2531 unsigned int to = find (j);
2532 tmp = get_varinfo (to)->solution;
2533 flag = false;
2535 /* Don't try to propagate to ourselves. */
2536 if (to == i)
2537 continue;
2539 flag = set_union_with_increment (tmp, pts, 0);
2541 if (flag)
2543 get_varinfo (to)->solution = tmp;
2544 if (!TEST_BIT (changed, to))
2546 SET_BIT (changed, to);
2547 changed_count++;
2554 free_topo_info (ti);
2555 bitmap_obstack_release (&iteration_obstack);
2558 BITMAP_FREE (pts);
2559 sbitmap_free (changed);
2560 bitmap_obstack_release (&oldpta_obstack);
2563 /* Map from trees to variable infos. */
2564 static struct pointer_map_t *vi_for_tree;
2567 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2569 static void
2570 insert_vi_for_tree (tree t, varinfo_t vi)
2572 void **slot = pointer_map_insert (vi_for_tree, t);
2573 gcc_assert (vi);
2574 gcc_assert (*slot == NULL);
2575 *slot = vi;
2578 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2579 exist in the map, return NULL, otherwise, return the varinfo we found. */
2581 static varinfo_t
2582 lookup_vi_for_tree (tree t)
2584 void **slot = pointer_map_contains (vi_for_tree, t);
2585 if (slot == NULL)
2586 return NULL;
2588 return (varinfo_t) *slot;
2591 /* Return a printable name for DECL */
2593 static const char *
2594 alias_get_name (tree decl)
2596 const char *res = get_name (decl);
2597 char *temp;
2598 int num_printed = 0;
2600 if (res != NULL)
2601 return res;
2603 res = "NULL";
2604 if (!dump_file)
2605 return res;
2607 if (TREE_CODE (decl) == SSA_NAME)
2609 num_printed = asprintf (&temp, "%s_%u",
2610 alias_get_name (SSA_NAME_VAR (decl)),
2611 SSA_NAME_VERSION (decl));
2613 else if (DECL_P (decl))
2615 num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
2617 if (num_printed > 0)
2619 res = ggc_strdup (temp);
2620 free (temp);
2622 return res;
2625 /* Find the variable id for tree T in the map.
2626 If T doesn't exist in the map, create an entry for it and return it. */
2628 static varinfo_t
2629 get_vi_for_tree (tree t)
2631 void **slot = pointer_map_contains (vi_for_tree, t);
2632 if (slot == NULL)
2633 return get_varinfo (create_variable_info_for (t, alias_get_name (t)));
2635 return (varinfo_t) *slot;
2638 /* Get a constraint expression for a new temporary variable. */
2640 static struct constraint_expr
2641 get_constraint_exp_for_temp (tree t)
2643 struct constraint_expr cexpr;
2645 gcc_assert (SSA_VAR_P (t));
2647 cexpr.type = SCALAR;
2648 cexpr.var = get_vi_for_tree (t)->id;
2649 cexpr.offset = 0;
2651 return cexpr;
2654 /* Get a constraint expression vector from an SSA_VAR_P node.
2655 If address_p is true, the result will be taken its address of. */
2657 static void
2658 get_constraint_for_ssa_var (tree t, VEC(ce_s, heap) **results, bool address_p)
2660 struct constraint_expr cexpr;
2661 varinfo_t vi;
2663 /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
2664 gcc_assert (SSA_VAR_P (t) || DECL_P (t));
2666 /* For parameters, get at the points-to set for the actual parm
2667 decl. */
2668 if (TREE_CODE (t) == SSA_NAME
2669 && TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2670 && SSA_NAME_IS_DEFAULT_DEF (t))
2672 get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
2673 return;
2676 vi = get_vi_for_tree (t);
2677 cexpr.var = vi->id;
2678 cexpr.type = SCALAR;
2679 cexpr.offset = 0;
2680 /* If we determine the result is "anything", and we know this is readonly,
2681 say it points to readonly memory instead. */
2682 if (cexpr.var == anything_id && TREE_READONLY (t))
2684 gcc_unreachable ();
2685 cexpr.type = ADDRESSOF;
2686 cexpr.var = readonly_id;
2689 /* If we are not taking the address of the constraint expr, add all
2690 sub-fiels of the variable as well. */
2691 if (!address_p)
2693 for (; vi; vi = vi->next)
2695 cexpr.var = vi->id;
2696 VEC_safe_push (ce_s, heap, *results, &cexpr);
2698 return;
2701 VEC_safe_push (ce_s, heap, *results, &cexpr);
2704 /* Process constraint T, performing various simplifications and then
2705 adding it to our list of overall constraints. */
2707 static void
2708 process_constraint (constraint_t t)
2710 struct constraint_expr rhs = t->rhs;
2711 struct constraint_expr lhs = t->lhs;
2713 gcc_assert (rhs.var < VEC_length (varinfo_t, varmap));
2714 gcc_assert (lhs.var < VEC_length (varinfo_t, varmap));
2716 /* ANYTHING == ANYTHING is pointless. */
2717 if (lhs.var == anything_id && rhs.var == anything_id)
2718 return;
2720 /* If we have &ANYTHING = something, convert to SOMETHING = &ANYTHING) */
2721 else if (lhs.var == anything_id && lhs.type == ADDRESSOF)
2723 rhs = t->lhs;
2724 t->lhs = t->rhs;
2725 t->rhs = rhs;
2726 process_constraint (t);
2728 /* This can happen in our IR with things like n->a = *p */
2729 else if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
2731 /* Split into tmp = *rhs, *lhs = tmp */
2732 tree rhsdecl = get_varinfo (rhs.var)->decl;
2733 tree pointertype = TREE_TYPE (rhsdecl);
2734 tree pointedtotype = TREE_TYPE (pointertype);
2735 tree tmpvar = create_tmp_var_raw (pointedtotype, "doubledereftmp");
2736 struct constraint_expr tmplhs = get_constraint_exp_for_temp (tmpvar);
2738 process_constraint (new_constraint (tmplhs, rhs));
2739 process_constraint (new_constraint (lhs, tmplhs));
2741 else if (rhs.type == ADDRESSOF && lhs.type == DEREF)
2743 /* Split into tmp = &rhs, *lhs = tmp */
2744 tree rhsdecl = get_varinfo (rhs.var)->decl;
2745 tree pointertype = TREE_TYPE (rhsdecl);
2746 tree tmpvar = create_tmp_var_raw (pointertype, "derefaddrtmp");
2747 struct constraint_expr tmplhs = get_constraint_exp_for_temp (tmpvar);
2749 process_constraint (new_constraint (tmplhs, rhs));
2750 process_constraint (new_constraint (lhs, tmplhs));
2752 else
2754 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
2755 VEC_safe_push (constraint_t, heap, constraints, t);
2759 /* Return true if T is a variable of a type that could contain
2760 pointers. */
2762 static bool
2763 could_have_pointers (tree t)
2765 tree type = TREE_TYPE (t);
2767 if (POINTER_TYPE_P (type)
2768 || AGGREGATE_TYPE_P (type)
2769 || TREE_CODE (type) == COMPLEX_TYPE)
2770 return true;
2772 return false;
2775 /* Return the position, in bits, of FIELD_DECL from the beginning of its
2776 structure. */
2778 static HOST_WIDE_INT
2779 bitpos_of_field (const tree fdecl)
2782 if (!host_integerp (DECL_FIELD_OFFSET (fdecl), 0)
2783 || !host_integerp (DECL_FIELD_BIT_OFFSET (fdecl), 0))
2784 return -1;
2786 return (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (fdecl)) * 8
2787 + TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (fdecl)));
2791 /* Get constraint expressions for offsetting PTR by OFFSET. Stores the
2792 resulting constraint expressions in *RESULTS. */
2794 static void
2795 get_constraint_for_ptr_offset (tree ptr, tree offset,
2796 VEC (ce_s, heap) **results)
2798 struct constraint_expr *c;
2799 unsigned int j, n;
2800 unsigned HOST_WIDE_INT rhsunitoffset, rhsoffset;
2802 /* If we do not do field-sensitive PTA adding offsets to pointers
2803 does not change the points-to solution. */
2804 if (!use_field_sensitive)
2806 get_constraint_for (ptr, results);
2807 return;
2810 /* If the offset is not a non-negative integer constant that fits
2811 in a HOST_WIDE_INT, we have to fall back to a conservative
2812 solution which includes all sub-fields of all pointed-to
2813 variables of ptr.
2814 ??? As we do not have the ability to express this, fall back
2815 to anything. */
2816 if (!host_integerp (offset, 1))
2818 struct constraint_expr temp;
2819 temp.var = anything_id;
2820 temp.type = SCALAR;
2821 temp.offset = 0;
2822 VEC_safe_push (ce_s, heap, *results, &temp);
2823 return;
2826 /* Make sure the bit-offset also fits. */
2827 rhsunitoffset = TREE_INT_CST_LOW (offset);
2828 rhsoffset = rhsunitoffset * BITS_PER_UNIT;
2829 if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
2831 struct constraint_expr temp;
2832 temp.var = anything_id;
2833 temp.type = SCALAR;
2834 temp.offset = 0;
2835 VEC_safe_push (ce_s, heap, *results, &temp);
2836 return;
2839 get_constraint_for (ptr, results);
2840 if (rhsoffset == 0)
2841 return;
2843 /* As we are eventually appending to the solution do not use
2844 VEC_iterate here. */
2845 n = VEC_length (ce_s, *results);
2846 for (j = 0; j < n; j++)
2848 varinfo_t curr;
2849 c = VEC_index (ce_s, *results, j);
2850 curr = get_varinfo (c->var);
2852 if (c->type == ADDRESSOF
2853 && !curr->is_full_var)
2855 varinfo_t temp, curr = get_varinfo (c->var);
2857 /* Search the sub-field which overlaps with the
2858 pointed-to offset. As we deal with positive offsets
2859 only, we can start the search from the current variable. */
2860 temp = first_vi_for_offset (curr, curr->offset + rhsoffset);
2862 /* If the result is outside of the variable we have to provide
2863 a conservative result, as the variable is still reachable
2864 from the resulting pointer (even though it technically
2865 cannot point to anything). The last sub-field is such
2866 a conservative result.
2867 ??? If we always had a sub-field for &object + 1 then
2868 we could represent this in a more precise way. */
2869 if (temp == NULL)
2871 temp = curr;
2872 while (temp->next != NULL)
2873 temp = temp->next;
2874 continue;
2877 /* If the found variable is not exactly at the pointed to
2878 result, we have to include the next variable in the
2879 solution as well. Otherwise two increments by offset / 2
2880 do not result in the same or a conservative superset
2881 solution. */
2882 if (temp->offset != curr->offset + rhsoffset
2883 && temp->next != NULL)
2885 struct constraint_expr c2;
2886 c2.var = temp->next->id;
2887 c2.type = ADDRESSOF;
2888 c2.offset = 0;
2889 VEC_safe_push (ce_s, heap, *results, &c2);
2891 c->var = temp->id;
2892 c->offset = 0;
2894 else if (c->type == ADDRESSOF
2895 /* If this varinfo represents a full variable just use it. */
2896 && curr->is_full_var)
2897 c->offset = 0;
2898 else
2899 c->offset = rhsoffset;
2904 /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
2905 If address_p is true the result will be taken its address of. */
2907 static void
2908 get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results,
2909 bool address_p)
2911 tree orig_t = t;
2912 HOST_WIDE_INT bitsize = -1;
2913 HOST_WIDE_INT bitmaxsize = -1;
2914 HOST_WIDE_INT bitpos;
2915 tree forzero;
2916 struct constraint_expr *result;
2918 /* Some people like to do cute things like take the address of
2919 &0->a.b */
2920 forzero = t;
2921 while (!SSA_VAR_P (forzero) && !CONSTANT_CLASS_P (forzero))
2922 forzero = TREE_OPERAND (forzero, 0);
2924 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
2926 struct constraint_expr temp;
2928 temp.offset = 0;
2929 temp.var = integer_id;
2930 temp.type = SCALAR;
2931 VEC_safe_push (ce_s, heap, *results, &temp);
2932 return;
2935 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
2937 /* Pretend to take the address of the base, we'll take care of
2938 adding the required subset of sub-fields below. */
2939 get_constraint_for_1 (t, results, true);
2940 gcc_assert (VEC_length (ce_s, *results) == 1);
2941 result = VEC_last (ce_s, *results);
2943 /* This can also happen due to weird offsetof type macros. */
2944 if (TREE_CODE (t) != ADDR_EXPR && result->type == ADDRESSOF)
2945 result->type = SCALAR;
2947 if (result->type == SCALAR
2948 && get_varinfo (result->var)->is_full_var)
2949 /* For single-field vars do not bother about the offset. */
2950 result->offset = 0;
2951 else if (result->type == SCALAR)
2953 /* In languages like C, you can access one past the end of an
2954 array. You aren't allowed to dereference it, so we can
2955 ignore this constraint. When we handle pointer subtraction,
2956 we may have to do something cute here. */
2958 if ((unsigned HOST_WIDE_INT)bitpos < get_varinfo (result->var)->fullsize
2959 && bitmaxsize != 0)
2961 /* It's also not true that the constraint will actually start at the
2962 right offset, it may start in some padding. We only care about
2963 setting the constraint to the first actual field it touches, so
2964 walk to find it. */
2965 struct constraint_expr cexpr = *result;
2966 varinfo_t curr;
2967 VEC_pop (ce_s, *results);
2968 cexpr.offset = 0;
2969 for (curr = get_varinfo (cexpr.var); curr; curr = curr->next)
2971 if (ranges_overlap_p (curr->offset, curr->size,
2972 bitpos, bitmaxsize))
2974 cexpr.var = curr->id;
2975 VEC_safe_push (ce_s, heap, *results, &cexpr);
2976 if (address_p)
2977 break;
2980 /* If we are going to take the address of this field then
2981 to be able to compute reachability correctly add at least
2982 the last field of the variable. */
2983 if (address_p
2984 && VEC_length (ce_s, *results) == 0)
2986 curr = get_varinfo (cexpr.var);
2987 while (curr->next != NULL)
2988 curr = curr->next;
2989 cexpr.var = curr->id;
2990 VEC_safe_push (ce_s, heap, *results, &cexpr);
2992 else
2993 /* Assert that we found *some* field there. The user couldn't be
2994 accessing *only* padding. */
2995 /* Still the user could access one past the end of an array
2996 embedded in a struct resulting in accessing *only* padding. */
2997 gcc_assert (VEC_length (ce_s, *results) >= 1
2998 || ref_contains_array_ref (orig_t));
3000 else if (bitmaxsize == 0)
3002 if (dump_file && (dump_flags & TDF_DETAILS))
3003 fprintf (dump_file, "Access to zero-sized part of variable,"
3004 "ignoring\n");
3006 else
3007 if (dump_file && (dump_flags & TDF_DETAILS))
3008 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
3010 else if (bitmaxsize == -1)
3012 /* We can't handle DEREF constraints with unknown size, we'll
3013 get the wrong answer. Punt and return anything. */
3014 result->var = anything_id;
3015 result->offset = 0;
3017 else
3018 result->offset = bitpos;
3022 /* Dereference the constraint expression CONS, and return the result.
3023 DEREF (ADDRESSOF) = SCALAR
3024 DEREF (SCALAR) = DEREF
3025 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
3026 This is needed so that we can handle dereferencing DEREF constraints. */
3028 static void
3029 do_deref (VEC (ce_s, heap) **constraints)
3031 struct constraint_expr *c;
3032 unsigned int i = 0;
3034 for (i = 0; VEC_iterate (ce_s, *constraints, i, c); i++)
3036 if (c->type == SCALAR)
3037 c->type = DEREF;
3038 else if (c->type == ADDRESSOF)
3039 c->type = SCALAR;
3040 else if (c->type == DEREF)
3042 tree tmpvar = create_tmp_var_raw (ptr_type_node, "dereftmp");
3043 struct constraint_expr tmplhs = get_constraint_exp_for_temp (tmpvar);
3044 process_constraint (new_constraint (tmplhs, *c));
3045 c->var = tmplhs.var;
3047 else
3048 gcc_unreachable ();
3052 /* Given a tree T, return the constraint expression for it. */
3054 static void
3055 get_constraint_for_1 (tree t, VEC (ce_s, heap) **results, bool address_p)
3057 struct constraint_expr temp;
3059 /* x = integer is all glommed to a single variable, which doesn't
3060 point to anything by itself. That is, of course, unless it is an
3061 integer constant being treated as a pointer, in which case, we
3062 will return that this is really the addressof anything. This
3063 happens below, since it will fall into the default case. The only
3064 case we know something about an integer treated like a pointer is
3065 when it is the NULL pointer, and then we just say it points to
3066 NULL. */
3067 if (TREE_CODE (t) == INTEGER_CST
3068 && integer_zerop (t))
3070 temp.var = nothing_id;
3071 temp.type = ADDRESSOF;
3072 temp.offset = 0;
3073 VEC_safe_push (ce_s, heap, *results, &temp);
3074 return;
3077 /* String constants are read-only. */
3078 if (TREE_CODE (t) == STRING_CST)
3080 temp.var = readonly_id;
3081 temp.type = SCALAR;
3082 temp.offset = 0;
3083 VEC_safe_push (ce_s, heap, *results, &temp);
3084 return;
3087 switch (TREE_CODE_CLASS (TREE_CODE (t)))
3089 case tcc_expression:
3091 switch (TREE_CODE (t))
3093 case ADDR_EXPR:
3095 struct constraint_expr *c;
3096 unsigned int i;
3097 tree exp = TREE_OPERAND (t, 0);
3099 get_constraint_for_1 (exp, results, true);
3101 for (i = 0; VEC_iterate (ce_s, *results, i, c); i++)
3103 if (c->type == DEREF)
3104 c->type = SCALAR;
3105 else
3106 c->type = ADDRESSOF;
3108 return;
3110 break;
3111 default:;
3113 break;
3115 case tcc_reference:
3117 switch (TREE_CODE (t))
3119 case INDIRECT_REF:
3121 get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p);
3122 do_deref (results);
3123 return;
3125 case ARRAY_REF:
3126 case ARRAY_RANGE_REF:
3127 case COMPONENT_REF:
3128 get_constraint_for_component_ref (t, results, address_p);
3129 return;
3130 default:;
3132 break;
3134 case tcc_exceptional:
3136 switch (TREE_CODE (t))
3138 case SSA_NAME:
3140 get_constraint_for_ssa_var (t, results, address_p);
3141 return;
3143 default:;
3145 break;
3147 case tcc_declaration:
3149 get_constraint_for_ssa_var (t, results, address_p);
3150 return;
3152 default:;
3155 /* The default fallback is a constraint from anything. */
3156 temp.type = ADDRESSOF;
3157 temp.var = anything_id;
3158 temp.offset = 0;
3159 VEC_safe_push (ce_s, heap, *results, &temp);
3162 /* Given a gimple tree T, return the constraint expression vector for it. */
3164 static void
3165 get_constraint_for (tree t, VEC (ce_s, heap) **results)
3167 gcc_assert (VEC_length (ce_s, *results) == 0);
3169 get_constraint_for_1 (t, results, false);
3172 /* Handle the structure copy case where we have a simple structure copy
3173 between LHS and RHS that is of SIZE (in bits)
3175 For each field of the lhs variable (lhsfield)
3176 For each field of the rhs variable at lhsfield.offset (rhsfield)
3177 add the constraint lhsfield = rhsfield
3179 If we fail due to some kind of type unsafety or other thing we
3180 can't handle, return false. We expect the caller to collapse the
3181 variable in that case. */
3183 static bool
3184 do_simple_structure_copy (const struct constraint_expr lhs,
3185 const struct constraint_expr rhs,
3186 const unsigned HOST_WIDE_INT size)
3188 varinfo_t p = get_varinfo (lhs.var);
3189 unsigned HOST_WIDE_INT pstart, last;
3190 pstart = p->offset;
3191 last = p->offset + size;
3192 for (; p && p->offset < last; p = p->next)
3194 varinfo_t q;
3195 struct constraint_expr templhs = lhs;
3196 struct constraint_expr temprhs = rhs;
3197 unsigned HOST_WIDE_INT fieldoffset;
3199 templhs.var = p->id;
3200 q = get_varinfo (temprhs.var);
3201 fieldoffset = p->offset - pstart;
3202 q = first_vi_for_offset (q, q->offset + fieldoffset);
3203 if (!q)
3204 return false;
3205 temprhs.var = q->id;
3206 process_constraint (new_constraint (templhs, temprhs));
3208 return true;
3212 /* Handle the structure copy case where we have a structure copy between a
3213 aggregate on the LHS and a dereference of a pointer on the RHS
3214 that is of SIZE (in bits)
3216 For each field of the lhs variable (lhsfield)
3217 rhs.offset = lhsfield->offset
3218 add the constraint lhsfield = rhs
3221 static void
3222 do_rhs_deref_structure_copy (const struct constraint_expr lhs,
3223 const struct constraint_expr rhs,
3224 const unsigned HOST_WIDE_INT size)
3226 varinfo_t p = get_varinfo (lhs.var);
3227 unsigned HOST_WIDE_INT pstart,last;
3228 pstart = p->offset;
3229 last = p->offset + size;
3231 for (; p && p->offset < last; p = p->next)
3233 varinfo_t q;
3234 struct constraint_expr templhs = lhs;
3235 struct constraint_expr temprhs = rhs;
3236 unsigned HOST_WIDE_INT fieldoffset;
3239 if (templhs.type == SCALAR)
3240 templhs.var = p->id;
3241 else
3242 templhs.offset = p->offset;
3244 q = get_varinfo (temprhs.var);
3245 fieldoffset = p->offset - pstart;
3246 temprhs.offset += fieldoffset;
3247 process_constraint (new_constraint (templhs, temprhs));
3251 /* Handle the structure copy case where we have a structure copy
3252 between an aggregate on the RHS and a dereference of a pointer on
3253 the LHS that is of SIZE (in bits)
3255 For each field of the rhs variable (rhsfield)
3256 lhs.offset = rhsfield->offset
3257 add the constraint lhs = rhsfield
3260 static void
3261 do_lhs_deref_structure_copy (const struct constraint_expr lhs,
3262 const struct constraint_expr rhs,
3263 const unsigned HOST_WIDE_INT size)
3265 varinfo_t p = get_varinfo (rhs.var);
3266 unsigned HOST_WIDE_INT pstart,last;
3267 pstart = p->offset;
3268 last = p->offset + size;
3270 for (; p && p->offset < last; p = p->next)
3272 varinfo_t q;
3273 struct constraint_expr templhs = lhs;
3274 struct constraint_expr temprhs = rhs;
3275 unsigned HOST_WIDE_INT fieldoffset;
3278 if (temprhs.type == SCALAR)
3279 temprhs.var = p->id;
3280 else
3281 temprhs.offset = p->offset;
3283 q = get_varinfo (templhs.var);
3284 fieldoffset = p->offset - pstart;
3285 templhs.offset += fieldoffset;
3286 process_constraint (new_constraint (templhs, temprhs));
3290 /* Sometimes, frontends like to give us bad type information. This
3291 function will collapse all the fields from VAR to the end of VAR,
3292 into VAR, so that we treat those fields as a single variable.
3293 We return the variable they were collapsed into. */
3295 static unsigned int
3296 collapse_rest_of_var (unsigned int var)
3298 varinfo_t currvar = get_varinfo (var);
3299 varinfo_t field;
3301 for (field = currvar->next; field; field = field->next)
3303 if (dump_file)
3304 fprintf (dump_file, "Type safety: Collapsing var %s into %s\n",
3305 field->name, currvar->name);
3307 gcc_assert (field->collapsed_to == 0);
3308 field->collapsed_to = currvar->id;
3311 currvar->next = NULL;
3312 currvar->size = currvar->fullsize - currvar->offset;
3314 return currvar->id;
3317 /* Handle aggregate copies by expanding into copies of the respective
3318 fields of the structures. */
3320 static void
3321 do_structure_copy (tree lhsop, tree rhsop)
3323 struct constraint_expr lhs, rhs, tmp;
3324 VEC (ce_s, heap) *lhsc = NULL, *rhsc = NULL;
3325 varinfo_t p;
3326 unsigned HOST_WIDE_INT lhssize;
3327 unsigned HOST_WIDE_INT rhssize;
3329 /* Pretend we are taking the address of the constraint exprs.
3330 We deal with walking the sub-fields ourselves. */
3331 get_constraint_for_1 (lhsop, &lhsc, true);
3332 get_constraint_for_1 (rhsop, &rhsc, true);
3333 gcc_assert (VEC_length (ce_s, lhsc) == 1);
3334 gcc_assert (VEC_length (ce_s, rhsc) == 1);
3335 lhs = *(VEC_last (ce_s, lhsc));
3336 rhs = *(VEC_last (ce_s, rhsc));
3338 VEC_free (ce_s, heap, lhsc);
3339 VEC_free (ce_s, heap, rhsc);
3341 /* If we have special var = x, swap it around. */
3342 if (lhs.var <= integer_id && !(get_varinfo (rhs.var)->is_special_var))
3344 tmp = lhs;
3345 lhs = rhs;
3346 rhs = tmp;
3349 /* This is fairly conservative for the RHS == ADDRESSOF case, in that it's
3350 possible it's something we could handle. However, most cases falling
3351 into this are dealing with transparent unions, which are slightly
3352 weird. */
3353 if (rhs.type == ADDRESSOF && !(get_varinfo (rhs.var)->is_special_var))
3355 rhs.type = ADDRESSOF;
3356 rhs.var = anything_id;
3359 /* If the RHS is a special var, or an addressof, set all the LHS fields to
3360 that special var. */
3361 if (rhs.var <= integer_id)
3363 for (p = get_varinfo (lhs.var); p; p = p->next)
3365 struct constraint_expr templhs = lhs;
3366 struct constraint_expr temprhs = rhs;
3368 if (templhs.type == SCALAR )
3369 templhs.var = p->id;
3370 else
3371 templhs.offset += p->offset;
3372 process_constraint (new_constraint (templhs, temprhs));
3375 else
3377 tree rhstype = TREE_TYPE (rhsop);
3378 tree lhstype = TREE_TYPE (lhsop);
3379 tree rhstypesize;
3380 tree lhstypesize;
3382 lhstypesize = DECL_P (lhsop) ? DECL_SIZE (lhsop) : TYPE_SIZE (lhstype);
3383 rhstypesize = DECL_P (rhsop) ? DECL_SIZE (rhsop) : TYPE_SIZE (rhstype);
3385 /* If we have a variably sized types on the rhs or lhs, and a deref
3386 constraint, add the constraint, lhsconstraint = &ANYTHING.
3387 This is conservatively correct because either the lhs is an unknown
3388 sized var (if the constraint is SCALAR), or the lhs is a DEREF
3389 constraint, and every variable it can point to must be unknown sized
3390 anyway, so we don't need to worry about fields at all. */
3391 if ((rhs.type == DEREF && TREE_CODE (rhstypesize) != INTEGER_CST)
3392 || (lhs.type == DEREF && TREE_CODE (lhstypesize) != INTEGER_CST))
3394 rhs.var = anything_id;
3395 rhs.type = ADDRESSOF;
3396 rhs.offset = 0;
3397 process_constraint (new_constraint (lhs, rhs));
3398 return;
3401 /* The size only really matters insofar as we don't set more or less of
3402 the variable. If we hit an unknown size var, the size should be the
3403 whole darn thing. */
3404 if (get_varinfo (rhs.var)->is_unknown_size_var)
3405 rhssize = ~0;
3406 else
3407 rhssize = TREE_INT_CST_LOW (rhstypesize);
3409 if (get_varinfo (lhs.var)->is_unknown_size_var)
3410 lhssize = ~0;
3411 else
3412 lhssize = TREE_INT_CST_LOW (lhstypesize);
3415 if (rhs.type == SCALAR && lhs.type == SCALAR)
3417 if (!do_simple_structure_copy (lhs, rhs, MIN (lhssize, rhssize)))
3419 lhs.var = collapse_rest_of_var (lhs.var);
3420 rhs.var = collapse_rest_of_var (rhs.var);
3421 lhs.offset = 0;
3422 rhs.offset = 0;
3423 lhs.type = SCALAR;
3424 rhs.type = SCALAR;
3425 process_constraint (new_constraint (lhs, rhs));
3428 else if (lhs.type != DEREF && rhs.type == DEREF)
3429 do_rhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
3430 else if (lhs.type == DEREF && rhs.type != DEREF)
3431 do_lhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
3432 else
3434 tree pointedtotype = lhstype;
3435 tree tmpvar;
3437 gcc_assert (rhs.type == DEREF && lhs.type == DEREF);
3438 tmpvar = create_tmp_var_raw (pointedtotype, "structcopydereftmp");
3439 do_structure_copy (tmpvar, rhsop);
3440 do_structure_copy (lhsop, tmpvar);
3445 /* Create a constraint ID = OP. */
3447 static void
3448 make_constraint_to (unsigned id, tree op)
3450 VEC(ce_s, heap) *rhsc = NULL;
3451 struct constraint_expr *c;
3452 struct constraint_expr includes;
3453 unsigned int j;
3455 includes.var = id;
3456 includes.offset = 0;
3457 includes.type = SCALAR;
3459 get_constraint_for (op, &rhsc);
3460 for (j = 0; VEC_iterate (ce_s, rhsc, j, c); j++)
3461 process_constraint (new_constraint (includes, *c));
3462 VEC_free (ce_s, heap, rhsc);
3465 /* Make constraints necessary to make OP escape. */
3467 static void
3468 make_escape_constraint (tree op)
3470 make_constraint_to (escaped_id, op);
3473 /* For non-IPA mode, generate constraints necessary for a call on the
3474 RHS. */
3476 static void
3477 handle_rhs_call (gimple stmt)
3479 unsigned i;
3481 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3483 tree arg = gimple_call_arg (stmt, i);
3485 /* Find those pointers being passed, and make sure they end up
3486 pointing to anything. */
3487 if (could_have_pointers (arg))
3488 make_escape_constraint (arg);
3491 /* The static chain escapes as well. */
3492 if (gimple_call_chain (stmt))
3493 make_escape_constraint (gimple_call_chain (stmt));
3496 /* For non-IPA mode, generate constraints necessary for a call
3497 that returns a pointer and assigns it to LHS. This simply makes
3498 the LHS point to global and escaped variables. */
3500 static void
3501 handle_lhs_call (tree lhs, int flags)
3503 VEC(ce_s, heap) *lhsc = NULL;
3504 struct constraint_expr rhsc;
3505 unsigned int j;
3506 struct constraint_expr *lhsp;
3508 get_constraint_for (lhs, &lhsc);
3510 if (flags & ECF_MALLOC)
3512 tree heapvar = heapvar_lookup (lhs);
3513 varinfo_t vi;
3515 if (heapvar == NULL)
3517 heapvar = create_tmp_var_raw (ptr_type_node, "HEAP");
3518 DECL_EXTERNAL (heapvar) = 1;
3519 get_var_ann (heapvar)->is_heapvar = 1;
3520 if (gimple_referenced_vars (cfun))
3521 add_referenced_var (heapvar);
3522 heapvar_insert (lhs, heapvar);
3525 rhsc.var = create_variable_info_for (heapvar,
3526 alias_get_name (heapvar));
3527 vi = get_varinfo (rhsc.var);
3528 vi->is_artificial_var = 1;
3529 vi->is_heap_var = 1;
3530 rhsc.type = ADDRESSOF;
3531 rhsc.offset = 0;
3533 else
3535 rhsc.var = escaped_id;
3536 rhsc.offset = 0;
3537 rhsc.type = ADDRESSOF;
3539 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3540 process_constraint (new_constraint (*lhsp, rhsc));
3541 VEC_free (ce_s, heap, lhsc);
3544 /* For non-IPA mode, generate constraints necessary for a call of a
3545 const function that returns a pointer in the statement STMT. */
3547 static void
3548 handle_const_call (gimple stmt)
3550 tree lhs = gimple_call_lhs (stmt);
3551 VEC(ce_s, heap) *lhsc = NULL;
3552 struct constraint_expr rhsc;
3553 unsigned int j, k;
3554 struct constraint_expr *lhsp;
3555 tree tmpvar;
3556 struct constraint_expr tmpc;
3558 get_constraint_for (lhs, &lhsc);
3560 /* If this is a nested function then it can return anything. */
3561 if (gimple_call_chain (stmt))
3563 rhsc.var = anything_id;
3564 rhsc.offset = 0;
3565 rhsc.type = ADDRESSOF;
3566 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3567 process_constraint (new_constraint (*lhsp, rhsc));
3568 VEC_free (ce_s, heap, lhsc);
3569 return;
3572 /* We always use a temporary here, otherwise we end up with a quadratic
3573 amount of constraints for
3574 large_struct = const_call (large_struct);
3575 in field-sensitive PTA. */
3576 tmpvar = create_tmp_var_raw (ptr_type_node, "consttmp");
3577 tmpc = get_constraint_exp_for_temp (tmpvar);
3579 /* May return addresses of globals. */
3580 rhsc.var = nonlocal_id;
3581 rhsc.offset = 0;
3582 rhsc.type = ADDRESSOF;
3583 process_constraint (new_constraint (tmpc, rhsc));
3585 /* May return arguments. */
3586 for (k = 0; k < gimple_call_num_args (stmt); ++k)
3588 tree arg = gimple_call_arg (stmt, k);
3590 if (could_have_pointers (arg))
3592 VEC(ce_s, heap) *argc = NULL;
3593 struct constraint_expr *argp;
3594 int i;
3596 get_constraint_for (arg, &argc);
3597 for (i = 0; VEC_iterate (ce_s, argc, i, argp); i++)
3598 process_constraint (new_constraint (tmpc, *argp));
3599 VEC_free (ce_s, heap, argc);
3603 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3604 process_constraint (new_constraint (*lhsp, tmpc));
3606 VEC_free (ce_s, heap, lhsc);
3609 /* For non-IPA mode, generate constraints necessary for a call to a
3610 pure function in statement STMT. */
3612 static void
3613 handle_pure_call (gimple stmt)
3615 unsigned i;
3617 /* Memory reached from pointer arguments is call-used. */
3618 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3620 tree arg = gimple_call_arg (stmt, i);
3622 if (could_have_pointers (arg))
3623 make_constraint_to (callused_id, arg);
3626 /* The static chain is used as well. */
3627 if (gimple_call_chain (stmt))
3628 make_constraint_to (callused_id, gimple_call_chain (stmt));
3630 /* If the call returns a pointer it may point to reachable memory
3631 from the arguments. Not so for malloc functions though. */
3632 if (gimple_call_lhs (stmt)
3633 && could_have_pointers (gimple_call_lhs (stmt))
3634 && !(gimple_call_flags (stmt) & ECF_MALLOC))
3636 tree lhs = gimple_call_lhs (stmt);
3637 VEC(ce_s, heap) *lhsc = NULL;
3638 struct constraint_expr rhsc;
3639 struct constraint_expr *lhsp;
3640 unsigned j;
3642 get_constraint_for (lhs, &lhsc);
3644 /* If this is a nested function then it can return anything. */
3645 if (gimple_call_chain (stmt))
3647 rhsc.var = anything_id;
3648 rhsc.offset = 0;
3649 rhsc.type = ADDRESSOF;
3650 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3651 process_constraint (new_constraint (*lhsp, rhsc));
3652 VEC_free (ce_s, heap, lhsc);
3653 return;
3656 /* Else just add the call-used memory here. Escaped variables
3657 and globals will be dealt with in handle_lhs_call. */
3658 rhsc.var = callused_id;
3659 rhsc.offset = 0;
3660 rhsc.type = ADDRESSOF;
3661 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3662 process_constraint (new_constraint (*lhsp, rhsc));
3663 VEC_free (ce_s, heap, lhsc);
3667 /* Walk statement T setting up aliasing constraints according to the
3668 references found in T. This function is the main part of the
3669 constraint builder. AI points to auxiliary alias information used
3670 when building alias sets and computing alias grouping heuristics. */
3672 static void
3673 find_func_aliases (gimple origt)
3675 gimple t = origt;
3676 VEC(ce_s, heap) *lhsc = NULL;
3677 VEC(ce_s, heap) *rhsc = NULL;
3678 struct constraint_expr *c;
3679 enum escape_type stmt_escape_type;
3681 /* Now build constraints expressions. */
3682 if (gimple_code (t) == GIMPLE_PHI)
3684 gcc_assert (!AGGREGATE_TYPE_P (TREE_TYPE (gimple_phi_result (t))));
3686 /* Only care about pointers and structures containing
3687 pointers. */
3688 if (could_have_pointers (gimple_phi_result (t)))
3690 size_t i;
3691 unsigned int j;
3693 /* For a phi node, assign all the arguments to
3694 the result. */
3695 get_constraint_for (gimple_phi_result (t), &lhsc);
3696 for (i = 0; i < gimple_phi_num_args (t); i++)
3698 tree rhstype;
3699 tree strippedrhs = PHI_ARG_DEF (t, i);
3701 STRIP_NOPS (strippedrhs);
3702 rhstype = TREE_TYPE (strippedrhs);
3703 get_constraint_for (gimple_phi_arg_def (t, i), &rhsc);
3705 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3707 struct constraint_expr *c2;
3708 while (VEC_length (ce_s, rhsc) > 0)
3710 c2 = VEC_last (ce_s, rhsc);
3711 process_constraint (new_constraint (*c, *c2));
3712 VEC_pop (ce_s, rhsc);
3718 /* In IPA mode, we need to generate constraints to pass call
3719 arguments through their calls. There are two cases,
3720 either a GIMPLE_CALL returning a value, or just a plain
3721 GIMPLE_CALL when we are not.
3723 In non-ipa mode, we need to generate constraints for each
3724 pointer passed by address. */
3725 else if (is_gimple_call (t))
3727 if (!in_ipa_mode)
3729 int flags = gimple_call_flags (t);
3731 /* Const functions can return their arguments and addresses
3732 of global memory but not of escaped memory. */
3733 if (flags & ECF_CONST)
3735 if (gimple_call_lhs (t)
3736 && could_have_pointers (gimple_call_lhs (t)))
3737 handle_const_call (t);
3739 /* Pure functions can return addresses in and of memory
3740 reachable from their arguments, but they are not an escape
3741 point for reachable memory of their arguments. */
3742 else if (flags & ECF_PURE)
3744 handle_pure_call (t);
3745 if (gimple_call_lhs (t)
3746 && could_have_pointers (gimple_call_lhs (t)))
3747 handle_lhs_call (gimple_call_lhs (t), flags);
3749 else
3751 handle_rhs_call (t);
3752 if (gimple_call_lhs (t)
3753 && could_have_pointers (gimple_call_lhs (t)))
3754 handle_lhs_call (gimple_call_lhs (t), flags);
3757 else
3759 tree lhsop;
3760 varinfo_t fi;
3761 int i = 1;
3762 size_t j;
3763 tree decl;
3765 lhsop = gimple_call_lhs (t);
3766 decl = gimple_call_fndecl (t);
3768 /* If we can directly resolve the function being called, do so.
3769 Otherwise, it must be some sort of indirect expression that
3770 we should still be able to handle. */
3771 if (decl)
3772 fi = get_vi_for_tree (decl);
3773 else
3775 decl = gimple_call_fn (t);
3776 fi = get_vi_for_tree (decl);
3779 /* Assign all the passed arguments to the appropriate incoming
3780 parameters of the function. */
3781 for (j = 0; j < gimple_call_num_args (t); j++)
3783 struct constraint_expr lhs ;
3784 struct constraint_expr *rhsp;
3785 tree arg = gimple_call_arg (t, j);
3787 get_constraint_for (arg, &rhsc);
3788 if (TREE_CODE (decl) != FUNCTION_DECL)
3790 lhs.type = DEREF;
3791 lhs.var = fi->id;
3792 lhs.offset = i;
3794 else
3796 lhs.type = SCALAR;
3797 lhs.var = first_vi_for_offset (fi, i)->id;
3798 lhs.offset = 0;
3800 while (VEC_length (ce_s, rhsc) != 0)
3802 rhsp = VEC_last (ce_s, rhsc);
3803 process_constraint (new_constraint (lhs, *rhsp));
3804 VEC_pop (ce_s, rhsc);
3806 i++;
3809 /* If we are returning a value, assign it to the result. */
3810 if (lhsop)
3812 struct constraint_expr rhs;
3813 struct constraint_expr *lhsp;
3814 unsigned int j = 0;
3816 get_constraint_for (lhsop, &lhsc);
3817 if (TREE_CODE (decl) != FUNCTION_DECL)
3819 rhs.type = DEREF;
3820 rhs.var = fi->id;
3821 rhs.offset = i;
3823 else
3825 rhs.type = SCALAR;
3826 rhs.var = first_vi_for_offset (fi, i)->id;
3827 rhs.offset = 0;
3829 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3830 process_constraint (new_constraint (*lhsp, rhs));
3834 /* Otherwise, just a regular assignment statement. Only care about
3835 operations with pointer result, others are dealt with as escape
3836 points if they have pointer operands. */
3837 else if (is_gimple_assign (t)
3838 && could_have_pointers (gimple_assign_lhs (t)))
3840 /* Otherwise, just a regular assignment statement. */
3841 tree lhsop = gimple_assign_lhs (t);
3842 tree rhsop = (gimple_num_ops (t) == 2) ? gimple_assign_rhs1 (t) : NULL;
3844 if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
3845 do_structure_copy (lhsop, rhsop);
3846 else
3848 unsigned int j;
3849 struct constraint_expr temp;
3850 get_constraint_for (lhsop, &lhsc);
3852 if (gimple_assign_rhs_code (t) == POINTER_PLUS_EXPR)
3853 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
3854 gimple_assign_rhs2 (t), &rhsc);
3855 else if ((CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (t))
3856 && !(POINTER_TYPE_P (gimple_expr_type (t))
3857 && !POINTER_TYPE_P (TREE_TYPE (rhsop))))
3858 || gimple_assign_single_p (t))
3859 get_constraint_for (rhsop, &rhsc);
3860 else
3862 temp.type = ADDRESSOF;
3863 temp.var = anything_id;
3864 temp.offset = 0;
3865 VEC_safe_push (ce_s, heap, rhsc, &temp);
3867 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3869 struct constraint_expr *c2;
3870 unsigned int k;
3872 for (k = 0; VEC_iterate (ce_s, rhsc, k, c2); k++)
3873 process_constraint (new_constraint (*c, *c2));
3877 else if (gimple_code (t) == GIMPLE_CHANGE_DYNAMIC_TYPE)
3879 unsigned int j;
3881 get_constraint_for (gimple_cdt_location (t), &lhsc);
3882 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); ++j)
3883 get_varinfo (c->var)->no_tbaa_pruning = true;
3886 stmt_escape_type = is_escape_site (t);
3887 if (stmt_escape_type == ESCAPE_STORED_IN_GLOBAL)
3889 gcc_assert (is_gimple_assign (t));
3890 if (gimple_assign_rhs_code (t) == ADDR_EXPR)
3892 tree rhs = gimple_assign_rhs1 (t);
3893 tree base = get_base_address (TREE_OPERAND (rhs, 0));
3894 if (base
3895 && (!DECL_P (base)
3896 || !is_global_var (base)))
3897 make_escape_constraint (rhs);
3899 else if (get_gimple_rhs_class (gimple_assign_rhs_code (t))
3900 == GIMPLE_SINGLE_RHS)
3902 if (could_have_pointers (gimple_assign_rhs1 (t)))
3903 make_escape_constraint (gimple_assign_rhs1 (t));
3905 else
3906 gcc_unreachable ();
3908 else if (stmt_escape_type == ESCAPE_BAD_CAST)
3910 gcc_assert (is_gimple_assign (t));
3911 gcc_assert (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (t))
3912 || gimple_assign_rhs_code (t) == VIEW_CONVERT_EXPR);
3913 make_escape_constraint (gimple_assign_rhs1 (t));
3915 else if (stmt_escape_type == ESCAPE_TO_ASM)
3917 unsigned i;
3918 for (i = 0; i < gimple_asm_noutputs (t); ++i)
3920 tree op = TREE_VALUE (gimple_asm_output_op (t, i));
3921 if (op && could_have_pointers (op))
3922 /* Strictly we'd only need the constraints from ESCAPED and
3923 NONLOCAL. */
3924 make_escape_constraint (op);
3926 for (i = 0; i < gimple_asm_ninputs (t); ++i)
3928 tree op = TREE_VALUE (gimple_asm_input_op (t, i));
3929 if (op && could_have_pointers (op))
3930 /* Strictly we'd only need the constraint to ESCAPED. */
3931 make_escape_constraint (op);
3935 /* After promoting variables and computing aliasing we will
3936 need to re-scan most statements. FIXME: Try to minimize the
3937 number of statements re-scanned. It's not really necessary to
3938 re-scan *all* statements. */
3939 if (!in_ipa_mode)
3940 gimple_set_modified (origt, true);
3941 VEC_free (ce_s, heap, rhsc);
3942 VEC_free (ce_s, heap, lhsc);
3946 /* Find the first varinfo in the same variable as START that overlaps with
3947 OFFSET.
3948 Effectively, walk the chain of fields for the variable START to find the
3949 first field that overlaps with OFFSET.
3950 Return NULL if we can't find one. */
3952 static varinfo_t
3953 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
3955 varinfo_t curr = start;
3956 while (curr)
3958 /* We may not find a variable in the field list with the actual
3959 offset when when we have glommed a structure to a variable.
3960 In that case, however, offset should still be within the size
3961 of the variable. */
3962 if (offset >= curr->offset && offset < (curr->offset + curr->size))
3963 return curr;
3964 curr = curr->next;
3966 return NULL;
3970 /* Insert the varinfo FIELD into the field list for BASE, at the front
3971 of the list. */
3973 static void
3974 insert_into_field_list (varinfo_t base, varinfo_t field)
3976 varinfo_t prev = base;
3977 varinfo_t curr = base->next;
3979 field->next = curr;
3980 prev->next = field;
3983 /* Insert the varinfo FIELD into the field list for BASE, ordered by
3984 offset. */
3986 static void
3987 insert_into_field_list_sorted (varinfo_t base, varinfo_t field)
3989 varinfo_t prev = base;
3990 varinfo_t curr = base->next;
3992 if (curr == NULL)
3994 prev->next = field;
3995 field->next = NULL;
3997 else
3999 while (curr)
4001 if (field->offset <= curr->offset)
4002 break;
4003 prev = curr;
4004 curr = curr->next;
4006 field->next = prev->next;
4007 prev->next = field;
4011 /* This structure is used during pushing fields onto the fieldstack
4012 to track the offset of the field, since bitpos_of_field gives it
4013 relative to its immediate containing type, and we want it relative
4014 to the ultimate containing object. */
4016 struct fieldoff
4018 /* Offset from the base of the base containing object to this field. */
4019 HOST_WIDE_INT offset;
4021 /* Size, in bits, of the field. */
4022 unsigned HOST_WIDE_INT size;
4024 unsigned has_unknown_size : 1;
4026 unsigned may_have_pointers : 1;
4028 typedef struct fieldoff fieldoff_s;
4030 DEF_VEC_O(fieldoff_s);
4031 DEF_VEC_ALLOC_O(fieldoff_s,heap);
4033 /* qsort comparison function for two fieldoff's PA and PB */
4035 static int
4036 fieldoff_compare (const void *pa, const void *pb)
4038 const fieldoff_s *foa = (const fieldoff_s *)pa;
4039 const fieldoff_s *fob = (const fieldoff_s *)pb;
4040 unsigned HOST_WIDE_INT foasize, fobsize;
4042 if (foa->offset < fob->offset)
4043 return -1;
4044 else if (foa->offset > fob->offset)
4045 return 1;
4047 foasize = foa->size;
4048 fobsize = fob->size;
4049 if (foasize < fobsize)
4050 return -1;
4051 else if (foasize > fobsize)
4052 return 1;
4053 return 0;
4056 /* Sort a fieldstack according to the field offset and sizes. */
4057 static void
4058 sort_fieldstack (VEC(fieldoff_s,heap) *fieldstack)
4060 qsort (VEC_address (fieldoff_s, fieldstack),
4061 VEC_length (fieldoff_s, fieldstack),
4062 sizeof (fieldoff_s),
4063 fieldoff_compare);
4066 /* Return true if V is a tree that we can have subvars for.
4067 Normally, this is any aggregate type. Also complex
4068 types which are not gimple registers can have subvars. */
4070 static inline bool
4071 var_can_have_subvars (const_tree v)
4073 /* Volatile variables should never have subvars. */
4074 if (TREE_THIS_VOLATILE (v))
4075 return false;
4077 /* Non decls or memory tags can never have subvars. */
4078 if (!DECL_P (v) || MTAG_P (v))
4079 return false;
4081 /* Aggregates without overlapping fields can have subvars. */
4082 if (TREE_CODE (TREE_TYPE (v)) == RECORD_TYPE)
4083 return true;
4085 return false;
4088 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
4089 the fields of TYPE onto fieldstack, recording their offsets along
4090 the way.
4092 OFFSET is used to keep track of the offset in this entire
4093 structure, rather than just the immediately containing structure.
4094 Returns the number of fields pushed. */
4096 static int
4097 push_fields_onto_fieldstack (tree type, VEC(fieldoff_s,heap) **fieldstack,
4098 HOST_WIDE_INT offset)
4100 tree field;
4101 int count = 0;
4103 if (TREE_CODE (type) != RECORD_TYPE)
4104 return 0;
4106 /* If the vector of fields is growing too big, bail out early.
4107 Callers check for VEC_length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
4108 sure this fails. */
4109 if (VEC_length (fieldoff_s, *fieldstack) > MAX_FIELDS_FOR_FIELD_SENSITIVE)
4110 return 0;
4112 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
4113 if (TREE_CODE (field) == FIELD_DECL)
4115 bool push = false;
4116 int pushed = 0;
4117 HOST_WIDE_INT foff = bitpos_of_field (field);
4119 if (!var_can_have_subvars (field)
4120 || TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE
4121 || TREE_CODE (TREE_TYPE (field)) == UNION_TYPE)
4122 push = true;
4123 else if (!(pushed = push_fields_onto_fieldstack
4124 (TREE_TYPE (field), fieldstack, offset + foff))
4125 && (DECL_SIZE (field)
4126 && !integer_zerop (DECL_SIZE (field))))
4127 /* Empty structures may have actual size, like in C++. So
4128 see if we didn't push any subfields and the size is
4129 nonzero, push the field onto the stack. */
4130 push = true;
4132 if (push)
4134 fieldoff_s *pair = NULL;
4135 bool has_unknown_size = false;
4137 if (!VEC_empty (fieldoff_s, *fieldstack))
4138 pair = VEC_last (fieldoff_s, *fieldstack);
4140 if (!DECL_SIZE (field)
4141 || !host_integerp (DECL_SIZE (field), 1))
4142 has_unknown_size = true;
4144 /* If adjacent fields do not contain pointers merge them. */
4145 if (pair
4146 && !pair->may_have_pointers
4147 && !could_have_pointers (field)
4148 && !pair->has_unknown_size
4149 && !has_unknown_size
4150 && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff)
4152 pair = VEC_last (fieldoff_s, *fieldstack);
4153 pair->size += TREE_INT_CST_LOW (DECL_SIZE (field));
4155 else
4157 pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
4158 pair->offset = offset + foff;
4159 pair->has_unknown_size = has_unknown_size;
4160 if (!has_unknown_size)
4161 pair->size = TREE_INT_CST_LOW (DECL_SIZE (field));
4162 else
4163 pair->size = -1;
4164 pair->may_have_pointers = could_have_pointers (field);
4165 count++;
4168 else
4169 count += pushed;
4172 return count;
4175 /* Create a constraint ID = &FROM. */
4177 static void
4178 make_constraint_from (varinfo_t vi, int from)
4180 struct constraint_expr lhs, rhs;
4182 lhs.var = vi->id;
4183 lhs.offset = 0;
4184 lhs.type = SCALAR;
4186 rhs.var = from;
4187 rhs.offset = 0;
4188 rhs.type = ADDRESSOF;
4189 process_constraint (new_constraint (lhs, rhs));
4192 /* Count the number of arguments DECL has, and set IS_VARARGS to true
4193 if it is a varargs function. */
4195 static unsigned int
4196 count_num_arguments (tree decl, bool *is_varargs)
4198 unsigned int i = 0;
4199 tree t;
4201 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl));
4203 t = TREE_CHAIN (t))
4205 if (TREE_VALUE (t) == void_type_node)
4206 break;
4207 i++;
4210 if (!t)
4211 *is_varargs = true;
4212 return i;
4215 /* Creation function node for DECL, using NAME, and return the index
4216 of the variable we've created for the function. */
4218 static unsigned int
4219 create_function_info_for (tree decl, const char *name)
4221 unsigned int index = VEC_length (varinfo_t, varmap);
4222 varinfo_t vi;
4223 tree arg;
4224 unsigned int i;
4225 bool is_varargs = false;
4227 /* Create the variable info. */
4229 vi = new_var_info (decl, index, name);
4230 vi->decl = decl;
4231 vi->offset = 0;
4232 vi->size = 1;
4233 vi->fullsize = count_num_arguments (decl, &is_varargs) + 1;
4234 insert_vi_for_tree (vi->decl, vi);
4235 VEC_safe_push (varinfo_t, heap, varmap, vi);
4237 stats.total_vars++;
4239 /* If it's varargs, we don't know how many arguments it has, so we
4240 can't do much. */
4241 if (is_varargs)
4243 vi->fullsize = ~0;
4244 vi->size = ~0;
4245 vi->is_unknown_size_var = true;
4246 return index;
4250 arg = DECL_ARGUMENTS (decl);
4252 /* Set up variables for each argument. */
4253 for (i = 1; i < vi->fullsize; i++)
4255 varinfo_t argvi;
4256 const char *newname;
4257 char *tempname;
4258 unsigned int newindex;
4259 tree argdecl = decl;
4261 if (arg)
4262 argdecl = arg;
4264 newindex = VEC_length (varinfo_t, varmap);
4265 asprintf (&tempname, "%s.arg%d", name, i-1);
4266 newname = ggc_strdup (tempname);
4267 free (tempname);
4269 argvi = new_var_info (argdecl, newindex, newname);
4270 argvi->decl = argdecl;
4271 VEC_safe_push (varinfo_t, heap, varmap, argvi);
4272 argvi->offset = i;
4273 argvi->size = 1;
4274 argvi->is_full_var = true;
4275 argvi->fullsize = vi->fullsize;
4276 insert_into_field_list_sorted (vi, argvi);
4277 stats.total_vars ++;
4278 if (arg)
4280 insert_vi_for_tree (arg, argvi);
4281 arg = TREE_CHAIN (arg);
4285 /* Create a variable for the return var. */
4286 if (DECL_RESULT (decl) != NULL
4287 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
4289 varinfo_t resultvi;
4290 const char *newname;
4291 char *tempname;
4292 unsigned int newindex;
4293 tree resultdecl = decl;
4295 vi->fullsize ++;
4297 if (DECL_RESULT (decl))
4298 resultdecl = DECL_RESULT (decl);
4300 newindex = VEC_length (varinfo_t, varmap);
4301 asprintf (&tempname, "%s.result", name);
4302 newname = ggc_strdup (tempname);
4303 free (tempname);
4305 resultvi = new_var_info (resultdecl, newindex, newname);
4306 resultvi->decl = resultdecl;
4307 VEC_safe_push (varinfo_t, heap, varmap, resultvi);
4308 resultvi->offset = i;
4309 resultvi->size = 1;
4310 resultvi->fullsize = vi->fullsize;
4311 resultvi->is_full_var = true;
4312 insert_into_field_list_sorted (vi, resultvi);
4313 stats.total_vars ++;
4314 if (DECL_RESULT (decl))
4315 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
4317 return index;
4321 /* Return true if FIELDSTACK contains fields that overlap.
4322 FIELDSTACK is assumed to be sorted by offset. */
4324 static bool
4325 check_for_overlaps (VEC (fieldoff_s,heap) *fieldstack)
4327 fieldoff_s *fo = NULL;
4328 unsigned int i;
4329 HOST_WIDE_INT lastoffset = -1;
4331 for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
4333 if (fo->offset == lastoffset)
4334 return true;
4335 lastoffset = fo->offset;
4337 return false;
4340 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
4341 This will also create any varinfo structures necessary for fields
4342 of DECL. */
4344 static unsigned int
4345 create_variable_info_for (tree decl, const char *name)
4347 unsigned int index = VEC_length (varinfo_t, varmap);
4348 varinfo_t vi;
4349 tree decl_type = TREE_TYPE (decl);
4350 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type);
4351 bool is_global = DECL_P (decl) ? is_global_var (decl) : false;
4352 VEC (fieldoff_s,heap) *fieldstack = NULL;
4354 if (TREE_CODE (decl) == FUNCTION_DECL && in_ipa_mode)
4355 return create_function_info_for (decl, name);
4357 if (var_can_have_subvars (decl) && use_field_sensitive
4358 && (!var_ann (decl)
4359 || var_ann (decl)->noalias_state == 0)
4360 && (!var_ann (decl)
4361 || !var_ann (decl)->is_heapvar))
4362 push_fields_onto_fieldstack (decl_type, &fieldstack, 0);
4364 /* If the variable doesn't have subvars, we may end up needing to
4365 sort the field list and create fake variables for all the
4366 fields. */
4367 vi = new_var_info (decl, index, name);
4368 vi->decl = decl;
4369 vi->offset = 0;
4370 if (!declsize
4371 || !host_integerp (declsize, 1))
4373 vi->is_unknown_size_var = true;
4374 vi->fullsize = ~0;
4375 vi->size = ~0;
4377 else
4379 vi->fullsize = TREE_INT_CST_LOW (declsize);
4380 vi->size = vi->fullsize;
4383 insert_vi_for_tree (vi->decl, vi);
4384 VEC_safe_push (varinfo_t, heap, varmap, vi);
4385 if (is_global && (!flag_whole_program || !in_ipa_mode)
4386 && could_have_pointers (decl))
4388 if (var_ann (decl)
4389 && var_ann (decl)->noalias_state == NO_ALIAS_ANYTHING)
4390 make_constraint_from (vi, vi->id);
4391 else
4392 make_constraint_from (vi, escaped_id);
4395 stats.total_vars++;
4396 if (use_field_sensitive
4397 && !vi->is_unknown_size_var
4398 && var_can_have_subvars (decl)
4399 && VEC_length (fieldoff_s, fieldstack) > 1
4400 && VEC_length (fieldoff_s, fieldstack) <= MAX_FIELDS_FOR_FIELD_SENSITIVE)
4402 unsigned int newindex = VEC_length (varinfo_t, varmap);
4403 fieldoff_s *fo = NULL;
4404 bool notokay = false;
4405 unsigned int i;
4407 for (i = 0; !notokay && VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
4409 if (fo->has_unknown_size
4410 || fo->offset < 0)
4412 notokay = true;
4413 break;
4417 /* We can't sort them if we have a field with a variable sized type,
4418 which will make notokay = true. In that case, we are going to return
4419 without creating varinfos for the fields anyway, so sorting them is a
4420 waste to boot. */
4421 if (!notokay)
4423 sort_fieldstack (fieldstack);
4424 /* Due to some C++ FE issues, like PR 22488, we might end up
4425 what appear to be overlapping fields even though they,
4426 in reality, do not overlap. Until the C++ FE is fixed,
4427 we will simply disable field-sensitivity for these cases. */
4428 notokay = check_for_overlaps (fieldstack);
4432 if (VEC_length (fieldoff_s, fieldstack) != 0)
4433 fo = VEC_index (fieldoff_s, fieldstack, 0);
4435 if (fo == NULL || notokay)
4437 vi->is_unknown_size_var = 1;
4438 vi->fullsize = ~0;
4439 vi->size = ~0;
4440 vi->is_full_var = true;
4441 VEC_free (fieldoff_s, heap, fieldstack);
4442 return index;
4445 vi->size = fo->size;
4446 vi->offset = fo->offset;
4447 for (i = VEC_length (fieldoff_s, fieldstack) - 1;
4448 i >= 1 && VEC_iterate (fieldoff_s, fieldstack, i, fo);
4449 i--)
4451 varinfo_t newvi;
4452 const char *newname = "NULL";
4453 char *tempname;
4455 newindex = VEC_length (varinfo_t, varmap);
4456 if (dump_file)
4458 asprintf (&tempname, "%s." HOST_WIDE_INT_PRINT_DEC
4459 "+" HOST_WIDE_INT_PRINT_DEC,
4460 vi->name, fo->offset, fo->size);
4461 newname = ggc_strdup (tempname);
4462 free (tempname);
4464 newvi = new_var_info (decl, newindex, newname);
4465 newvi->offset = fo->offset;
4466 newvi->size = fo->size;
4467 newvi->fullsize = vi->fullsize;
4468 insert_into_field_list (vi, newvi);
4469 VEC_safe_push (varinfo_t, heap, varmap, newvi);
4470 if (is_global && (!flag_whole_program || !in_ipa_mode)
4471 && fo->may_have_pointers)
4472 make_constraint_from (newvi, escaped_id);
4474 stats.total_vars++;
4477 else
4478 vi->is_full_var = true;
4480 VEC_free (fieldoff_s, heap, fieldstack);
4482 return index;
4485 /* Print out the points-to solution for VAR to FILE. */
4487 void
4488 dump_solution_for_var (FILE *file, unsigned int var)
4490 varinfo_t vi = get_varinfo (var);
4491 unsigned int i;
4492 bitmap_iterator bi;
4494 if (find (var) != var)
4496 varinfo_t vipt = get_varinfo (find (var));
4497 fprintf (file, "%s = same as %s\n", vi->name, vipt->name);
4499 else
4501 fprintf (file, "%s = { ", vi->name);
4502 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4504 fprintf (file, "%s ", get_varinfo (i)->name);
4506 fprintf (file, "}");
4507 if (vi->no_tbaa_pruning)
4508 fprintf (file, " no-tbaa-pruning");
4509 fprintf (file, "\n");
4513 /* Print the points-to solution for VAR to stdout. */
4515 void
4516 debug_solution_for_var (unsigned int var)
4518 dump_solution_for_var (stdout, var);
4521 /* Create varinfo structures for all of the variables in the
4522 function for intraprocedural mode. */
4524 static void
4525 intra_create_variable_infos (void)
4527 tree t;
4528 struct constraint_expr lhs, rhs;
4530 /* For each incoming pointer argument arg, create the constraint ARG
4531 = NONLOCAL or a dummy variable if flag_argument_noalias is set. */
4532 for (t = DECL_ARGUMENTS (current_function_decl); t; t = TREE_CHAIN (t))
4534 varinfo_t p;
4536 if (!could_have_pointers (t))
4537 continue;
4539 /* If flag_argument_noalias is set, then function pointer
4540 arguments are guaranteed not to point to each other. In that
4541 case, create an artificial variable PARM_NOALIAS and the
4542 constraint ARG = &PARM_NOALIAS. */
4543 if (POINTER_TYPE_P (TREE_TYPE (t)) && flag_argument_noalias > 0)
4545 varinfo_t vi;
4546 tree heapvar = heapvar_lookup (t);
4548 lhs.offset = 0;
4549 lhs.type = SCALAR;
4550 lhs.var = get_vi_for_tree (t)->id;
4552 if (heapvar == NULL_TREE)
4554 var_ann_t ann;
4555 heapvar = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (t)),
4556 "PARM_NOALIAS");
4557 DECL_EXTERNAL (heapvar) = 1;
4558 if (gimple_referenced_vars (cfun))
4559 add_referenced_var (heapvar);
4561 heapvar_insert (t, heapvar);
4563 ann = get_var_ann (heapvar);
4564 ann->is_heapvar = 1;
4565 if (flag_argument_noalias == 1)
4566 ann->noalias_state = NO_ALIAS;
4567 else if (flag_argument_noalias == 2)
4568 ann->noalias_state = NO_ALIAS_GLOBAL;
4569 else if (flag_argument_noalias == 3)
4570 ann->noalias_state = NO_ALIAS_ANYTHING;
4571 else
4572 gcc_unreachable ();
4575 vi = get_vi_for_tree (heapvar);
4576 vi->is_artificial_var = 1;
4577 vi->is_heap_var = 1;
4578 rhs.var = vi->id;
4579 rhs.type = ADDRESSOF;
4580 rhs.offset = 0;
4581 for (p = get_varinfo (lhs.var); p; p = p->next)
4583 struct constraint_expr temp = lhs;
4584 temp.var = p->id;
4585 process_constraint (new_constraint (temp, rhs));
4588 else
4590 varinfo_t arg_vi = get_vi_for_tree (t);
4592 for (p = arg_vi; p; p = p->next)
4593 make_constraint_from (p, nonlocal_id);
4598 /* Structure used to put solution bitmaps in a hashtable so they can
4599 be shared among variables with the same points-to set. */
4601 typedef struct shared_bitmap_info
4603 bitmap pt_vars;
4604 hashval_t hashcode;
4605 } *shared_bitmap_info_t;
4606 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
4608 static htab_t shared_bitmap_table;
4610 /* Hash function for a shared_bitmap_info_t */
4612 static hashval_t
4613 shared_bitmap_hash (const void *p)
4615 const_shared_bitmap_info_t const bi = (const_shared_bitmap_info_t) p;
4616 return bi->hashcode;
4619 /* Equality function for two shared_bitmap_info_t's. */
4621 static int
4622 shared_bitmap_eq (const void *p1, const void *p2)
4624 const_shared_bitmap_info_t const sbi1 = (const_shared_bitmap_info_t) p1;
4625 const_shared_bitmap_info_t const sbi2 = (const_shared_bitmap_info_t) p2;
4626 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
4629 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
4630 existing instance if there is one, NULL otherwise. */
4632 static bitmap
4633 shared_bitmap_lookup (bitmap pt_vars)
4635 void **slot;
4636 struct shared_bitmap_info sbi;
4638 sbi.pt_vars = pt_vars;
4639 sbi.hashcode = bitmap_hash (pt_vars);
4641 slot = htab_find_slot_with_hash (shared_bitmap_table, &sbi,
4642 sbi.hashcode, NO_INSERT);
4643 if (!slot)
4644 return NULL;
4645 else
4646 return ((shared_bitmap_info_t) *slot)->pt_vars;
4650 /* Add a bitmap to the shared bitmap hashtable. */
4652 static void
4653 shared_bitmap_add (bitmap pt_vars)
4655 void **slot;
4656 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
4658 sbi->pt_vars = pt_vars;
4659 sbi->hashcode = bitmap_hash (pt_vars);
4661 slot = htab_find_slot_with_hash (shared_bitmap_table, sbi,
4662 sbi->hashcode, INSERT);
4663 gcc_assert (!*slot);
4664 *slot = (void *) sbi;
4668 /* Set bits in INTO corresponding to the variable uids in solution set
4669 FROM, which came from variable PTR.
4670 For variables that are actually dereferenced, we also use type
4671 based alias analysis to prune the points-to sets.
4672 IS_DEREFED is true if PTR was directly dereferenced, which we use to
4673 help determine whether we are we are allowed to prune using TBAA.
4674 If NO_TBAA_PRUNING is true, we do not perform any TBAA pruning of
4675 the from set. */
4677 static void
4678 set_uids_in_ptset (tree ptr, bitmap into, bitmap from, bool is_derefed,
4679 bool no_tbaa_pruning)
4681 unsigned int i;
4682 bitmap_iterator bi;
4684 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
4686 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
4688 varinfo_t vi = get_varinfo (i);
4690 /* The only artificial variables that are allowed in a may-alias
4691 set are heap variables. */
4692 if (vi->is_artificial_var && !vi->is_heap_var)
4693 continue;
4695 if (TREE_CODE (vi->decl) == VAR_DECL
4696 || TREE_CODE (vi->decl) == PARM_DECL
4697 || TREE_CODE (vi->decl) == RESULT_DECL)
4699 /* Just add VI->DECL to the alias set.
4700 Don't type prune artificial vars or points-to sets
4701 for pointers that have not been dereferenced or with
4702 type-based pruning disabled. */
4703 if (vi->is_artificial_var
4704 || !is_derefed
4705 || no_tbaa_pruning)
4706 bitmap_set_bit (into, DECL_UID (vi->decl));
4707 else
4709 alias_set_type var_alias_set, mem_alias_set;
4710 var_alias_set = get_alias_set (vi->decl);
4711 mem_alias_set = get_alias_set (TREE_TYPE (TREE_TYPE (ptr)));
4712 if (may_alias_p (SSA_NAME_VAR (ptr), mem_alias_set,
4713 vi->decl, var_alias_set, true))
4714 bitmap_set_bit (into, DECL_UID (vi->decl));
4721 static bool have_alias_info = false;
4723 /* Given a pointer variable P, fill in its points-to set, or return
4724 false if we can't.
4725 Rather than return false for variables that point-to anything, we
4726 instead find the corresponding SMT, and merge in its aliases. In
4727 addition to these aliases, we also set the bits for the SMT's
4728 themselves and their subsets, as SMT's are still in use by
4729 non-SSA_NAME's, and pruning may eliminate every one of their
4730 aliases. In such a case, if we did not include the right set of
4731 SMT's in the points-to set of the variable, we'd end up with
4732 statements that do not conflict but should. */
4734 bool
4735 find_what_p_points_to (tree p)
4737 tree lookup_p = p;
4738 varinfo_t vi;
4740 if (!have_alias_info)
4741 return false;
4743 /* For parameters, get at the points-to set for the actual parm
4744 decl. */
4745 if (TREE_CODE (p) == SSA_NAME
4746 && TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
4747 && SSA_NAME_IS_DEFAULT_DEF (p))
4748 lookup_p = SSA_NAME_VAR (p);
4750 vi = lookup_vi_for_tree (lookup_p);
4751 if (vi)
4753 if (vi->is_artificial_var)
4754 return false;
4756 /* See if this is a field or a structure. */
4757 if (vi->size != vi->fullsize)
4759 /* Nothing currently asks about structure fields directly,
4760 but when they do, we need code here to hand back the
4761 points-to set. */
4762 return false;
4764 else
4766 struct ptr_info_def *pi = get_ptr_info (p);
4767 unsigned int i;
4768 bitmap_iterator bi;
4769 bool was_pt_anything = false;
4770 bitmap finished_solution;
4771 bitmap result;
4773 if (!pi->memory_tag_needed)
4774 return false;
4776 /* This variable may have been collapsed, let's get the real
4777 variable. */
4778 vi = get_varinfo (find (vi->id));
4780 /* Translate artificial variables into SSA_NAME_PTR_INFO
4781 attributes. */
4782 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4784 varinfo_t vi = get_varinfo (i);
4786 if (vi->is_artificial_var)
4788 /* FIXME. READONLY should be handled better so that
4789 flow insensitive aliasing can disregard writable
4790 aliases. */
4791 if (vi->id == nothing_id)
4792 pi->pt_null = 1;
4793 else if (vi->id == anything_id
4794 || vi->id == nonlocal_id
4795 || vi->id == escaped_id
4796 || vi->id == callused_id)
4797 was_pt_anything = 1;
4798 else if (vi->id == readonly_id)
4799 was_pt_anything = 1;
4800 else if (vi->id == integer_id)
4801 was_pt_anything = 1;
4802 else if (vi->is_heap_var)
4803 pi->pt_global_mem = 1;
4807 /* Instead of doing extra work, simply do not create
4808 points-to information for pt_anything pointers. This
4809 will cause the operand scanner to fall back to the
4810 type-based SMT and its aliases. Which is the best
4811 we could do here for the points-to set as well. */
4812 if (was_pt_anything)
4813 return false;
4815 /* Share the final set of variables when possible. */
4816 finished_solution = BITMAP_GGC_ALLOC ();
4817 stats.points_to_sets_created++;
4819 set_uids_in_ptset (p, finished_solution, vi->solution,
4820 pi->is_dereferenced,
4821 vi->no_tbaa_pruning);
4822 result = shared_bitmap_lookup (finished_solution);
4824 if (!result)
4826 shared_bitmap_add (finished_solution);
4827 pi->pt_vars = finished_solution;
4829 else
4831 pi->pt_vars = result;
4832 bitmap_clear (finished_solution);
4835 if (bitmap_empty_p (pi->pt_vars))
4836 pi->pt_vars = NULL;
4838 return true;
4842 return false;
4845 /* Mark the ESCAPED solution as call clobbered. Returns false if
4846 pt_anything escaped which needs all locals that have their address
4847 taken marked call clobbered as well. */
4849 bool
4850 clobber_what_escaped (void)
4852 varinfo_t vi;
4853 unsigned int i;
4854 bitmap_iterator bi;
4856 if (!have_alias_info)
4857 return false;
4859 /* This variable may have been collapsed, let's get the real
4860 variable for escaped_id. */
4861 vi = get_varinfo (find (escaped_id));
4863 /* If call-used memory escapes we need to include it in the
4864 set of escaped variables. This can happen if a pure
4865 function returns a pointer and this pointer escapes. */
4866 if (bitmap_bit_p (vi->solution, callused_id))
4868 varinfo_t cu_vi = get_varinfo (find (callused_id));
4869 bitmap_ior_into (vi->solution, cu_vi->solution);
4872 /* Mark variables in the solution call-clobbered. */
4873 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4875 varinfo_t vi = get_varinfo (i);
4877 if (vi->is_artificial_var)
4879 /* nothing_id and readonly_id do not cause any
4880 call clobber ops. For anything_id and integer_id
4881 we need to clobber all addressable vars. */
4882 if (vi->id == anything_id
4883 || vi->id == integer_id)
4884 return false;
4887 /* Only artificial heap-vars are further interesting. */
4888 if (vi->is_artificial_var && !vi->is_heap_var)
4889 continue;
4891 if ((TREE_CODE (vi->decl) == VAR_DECL
4892 || TREE_CODE (vi->decl) == PARM_DECL
4893 || TREE_CODE (vi->decl) == RESULT_DECL)
4894 && !unmodifiable_var_p (vi->decl))
4895 mark_call_clobbered (vi->decl, ESCAPE_TO_CALL);
4898 return true;
4901 /* Compute the call-used variables. */
4903 void
4904 compute_call_used_vars (void)
4906 varinfo_t vi;
4907 unsigned int i;
4908 bitmap_iterator bi;
4909 bool has_anything_id = false;
4911 if (!have_alias_info)
4912 return;
4914 /* This variable may have been collapsed, let's get the real
4915 variable for escaped_id. */
4916 vi = get_varinfo (find (callused_id));
4918 /* Mark variables in the solution call-clobbered. */
4919 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4921 varinfo_t vi = get_varinfo (i);
4923 if (vi->is_artificial_var)
4925 /* For anything_id and integer_id we need to make
4926 all local addressable vars call-used. */
4927 if (vi->id == anything_id
4928 || vi->id == integer_id)
4929 has_anything_id = true;
4932 /* Only artificial heap-vars are further interesting. */
4933 if (vi->is_artificial_var && !vi->is_heap_var)
4934 continue;
4936 if ((TREE_CODE (vi->decl) == VAR_DECL
4937 || TREE_CODE (vi->decl) == PARM_DECL
4938 || TREE_CODE (vi->decl) == RESULT_DECL)
4939 && !unmodifiable_var_p (vi->decl))
4940 bitmap_set_bit (gimple_call_used_vars (cfun), DECL_UID (vi->decl));
4943 /* If anything is call-used, add all addressable locals to the set. */
4944 if (has_anything_id)
4945 bitmap_ior_into (gimple_call_used_vars (cfun),
4946 gimple_addressable_vars (cfun));
4950 /* Dump points-to information to OUTFILE. */
4952 void
4953 dump_sa_points_to_info (FILE *outfile)
4955 unsigned int i;
4957 fprintf (outfile, "\nPoints-to sets\n\n");
4959 if (dump_flags & TDF_STATS)
4961 fprintf (outfile, "Stats:\n");
4962 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
4963 fprintf (outfile, "Non-pointer vars: %d\n",
4964 stats.nonpointer_vars);
4965 fprintf (outfile, "Statically unified vars: %d\n",
4966 stats.unified_vars_static);
4967 fprintf (outfile, "Dynamically unified vars: %d\n",
4968 stats.unified_vars_dynamic);
4969 fprintf (outfile, "Iterations: %d\n", stats.iterations);
4970 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
4971 fprintf (outfile, "Number of implicit edges: %d\n",
4972 stats.num_implicit_edges);
4975 for (i = 0; i < VEC_length (varinfo_t, varmap); i++)
4976 dump_solution_for_var (outfile, i);
4980 /* Debug points-to information to stderr. */
4982 void
4983 debug_sa_points_to_info (void)
4985 dump_sa_points_to_info (stderr);
4989 /* Initialize the always-existing constraint variables for NULL
4990 ANYTHING, READONLY, and INTEGER */
4992 static void
4993 init_base_vars (void)
4995 struct constraint_expr lhs, rhs;
4997 /* Create the NULL variable, used to represent that a variable points
4998 to NULL. */
4999 nothing_tree = create_tmp_var_raw (void_type_node, "NULL");
5000 var_nothing = new_var_info (nothing_tree, nothing_id, "NULL");
5001 insert_vi_for_tree (nothing_tree, var_nothing);
5002 var_nothing->is_artificial_var = 1;
5003 var_nothing->offset = 0;
5004 var_nothing->size = ~0;
5005 var_nothing->fullsize = ~0;
5006 var_nothing->is_special_var = 1;
5007 VEC_safe_push (varinfo_t, heap, varmap, var_nothing);
5009 /* Create the ANYTHING variable, used to represent that a variable
5010 points to some unknown piece of memory. */
5011 anything_tree = create_tmp_var_raw (void_type_node, "ANYTHING");
5012 var_anything = new_var_info (anything_tree, anything_id, "ANYTHING");
5013 insert_vi_for_tree (anything_tree, var_anything);
5014 var_anything->is_artificial_var = 1;
5015 var_anything->size = ~0;
5016 var_anything->offset = 0;
5017 var_anything->next = NULL;
5018 var_anything->fullsize = ~0;
5019 var_anything->is_special_var = 1;
5021 /* Anything points to anything. This makes deref constraints just
5022 work in the presence of linked list and other p = *p type loops,
5023 by saying that *ANYTHING = ANYTHING. */
5024 VEC_safe_push (varinfo_t, heap, varmap, var_anything);
5025 lhs.type = SCALAR;
5026 lhs.var = anything_id;
5027 lhs.offset = 0;
5028 rhs.type = ADDRESSOF;
5029 rhs.var = anything_id;
5030 rhs.offset = 0;
5032 /* This specifically does not use process_constraint because
5033 process_constraint ignores all anything = anything constraints, since all
5034 but this one are redundant. */
5035 VEC_safe_push (constraint_t, heap, constraints, new_constraint (lhs, rhs));
5037 /* Create the READONLY variable, used to represent that a variable
5038 points to readonly memory. */
5039 readonly_tree = create_tmp_var_raw (void_type_node, "READONLY");
5040 var_readonly = new_var_info (readonly_tree, readonly_id, "READONLY");
5041 var_readonly->is_artificial_var = 1;
5042 var_readonly->offset = 0;
5043 var_readonly->size = ~0;
5044 var_readonly->fullsize = ~0;
5045 var_readonly->next = NULL;
5046 var_readonly->is_special_var = 1;
5047 insert_vi_for_tree (readonly_tree, var_readonly);
5048 VEC_safe_push (varinfo_t, heap, varmap, var_readonly);
5050 /* readonly memory points to anything, in order to make deref
5051 easier. In reality, it points to anything the particular
5052 readonly variable can point to, but we don't track this
5053 separately. */
5054 lhs.type = SCALAR;
5055 lhs.var = readonly_id;
5056 lhs.offset = 0;
5057 rhs.type = ADDRESSOF;
5058 rhs.var = readonly_id; /* FIXME */
5059 rhs.offset = 0;
5060 process_constraint (new_constraint (lhs, rhs));
5062 /* Create the ESCAPED variable, used to represent the set of escaped
5063 memory. */
5064 escaped_tree = create_tmp_var_raw (void_type_node, "ESCAPED");
5065 var_escaped = new_var_info (escaped_tree, escaped_id, "ESCAPED");
5066 insert_vi_for_tree (escaped_tree, var_escaped);
5067 var_escaped->is_artificial_var = 1;
5068 var_escaped->offset = 0;
5069 var_escaped->size = ~0;
5070 var_escaped->fullsize = ~0;
5071 var_escaped->is_special_var = 0;
5072 VEC_safe_push (varinfo_t, heap, varmap, var_escaped);
5073 gcc_assert (VEC_index (varinfo_t, varmap, 3) == var_escaped);
5075 /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
5076 lhs.type = SCALAR;
5077 lhs.var = escaped_id;
5078 lhs.offset = 0;
5079 rhs.type = DEREF;
5080 rhs.var = escaped_id;
5081 rhs.offset = 0;
5082 process_constraint (new_constraint (lhs, rhs));
5084 /* Create the NONLOCAL variable, used to represent the set of nonlocal
5085 memory. */
5086 nonlocal_tree = create_tmp_var_raw (void_type_node, "NONLOCAL");
5087 var_nonlocal = new_var_info (nonlocal_tree, nonlocal_id, "NONLOCAL");
5088 insert_vi_for_tree (nonlocal_tree, var_nonlocal);
5089 var_nonlocal->is_artificial_var = 1;
5090 var_nonlocal->offset = 0;
5091 var_nonlocal->size = ~0;
5092 var_nonlocal->fullsize = ~0;
5093 var_nonlocal->is_special_var = 1;
5094 VEC_safe_push (varinfo_t, heap, varmap, var_nonlocal);
5096 /* Nonlocal memory points to escaped (which includes nonlocal),
5097 in order to make deref easier. */
5098 lhs.type = SCALAR;
5099 lhs.var = nonlocal_id;
5100 lhs.offset = 0;
5101 rhs.type = ADDRESSOF;
5102 rhs.var = escaped_id;
5103 rhs.offset = 0;
5104 process_constraint (new_constraint (lhs, rhs));
5106 /* Create the CALLUSED variable, used to represent the set of call-used
5107 memory. */
5108 callused_tree = create_tmp_var_raw (void_type_node, "CALLUSED");
5109 var_callused = new_var_info (callused_tree, callused_id, "CALLUSED");
5110 insert_vi_for_tree (callused_tree, var_callused);
5111 var_callused->is_artificial_var = 1;
5112 var_callused->offset = 0;
5113 var_callused->size = ~0;
5114 var_callused->fullsize = ~0;
5115 var_callused->is_special_var = 0;
5116 VEC_safe_push (varinfo_t, heap, varmap, var_callused);
5118 /* CALLUSED = *CALLUSED, because call-used is may-deref'd at calls, etc. */
5119 lhs.type = SCALAR;
5120 lhs.var = callused_id;
5121 lhs.offset = 0;
5122 rhs.type = DEREF;
5123 rhs.var = callused_id;
5124 rhs.offset = 0;
5125 process_constraint (new_constraint (lhs, rhs));
5127 /* Create the INTEGER variable, used to represent that a variable points
5128 to an INTEGER. */
5129 integer_tree = create_tmp_var_raw (void_type_node, "INTEGER");
5130 var_integer = new_var_info (integer_tree, integer_id, "INTEGER");
5131 insert_vi_for_tree (integer_tree, var_integer);
5132 var_integer->is_artificial_var = 1;
5133 var_integer->size = ~0;
5134 var_integer->fullsize = ~0;
5135 var_integer->offset = 0;
5136 var_integer->next = NULL;
5137 var_integer->is_special_var = 1;
5138 VEC_safe_push (varinfo_t, heap, varmap, var_integer);
5140 /* INTEGER = ANYTHING, because we don't know where a dereference of
5141 a random integer will point to. */
5142 lhs.type = SCALAR;
5143 lhs.var = integer_id;
5144 lhs.offset = 0;
5145 rhs.type = ADDRESSOF;
5146 rhs.var = anything_id;
5147 rhs.offset = 0;
5148 process_constraint (new_constraint (lhs, rhs));
5150 /* *ESCAPED = &ESCAPED. This is true because we have to assume
5151 everything pointed to by escaped can also point to escaped. */
5152 lhs.type = DEREF;
5153 lhs.var = escaped_id;
5154 lhs.offset = 0;
5155 rhs.type = ADDRESSOF;
5156 rhs.var = escaped_id;
5157 rhs.offset = 0;
5158 process_constraint (new_constraint (lhs, rhs));
5160 /* *ESCAPED = &NONLOCAL. This is true because we have to assume
5161 everything pointed to by escaped can also point to nonlocal. */
5162 lhs.type = DEREF;
5163 lhs.var = escaped_id;
5164 lhs.offset = 0;
5165 rhs.type = ADDRESSOF;
5166 rhs.var = nonlocal_id;
5167 rhs.offset = 0;
5168 process_constraint (new_constraint (lhs, rhs));
5171 /* Initialize things necessary to perform PTA */
5173 static void
5174 init_alias_vars (void)
5176 use_field_sensitive = (MAX_FIELDS_FOR_FIELD_SENSITIVE > 1);
5178 bitmap_obstack_initialize (&pta_obstack);
5179 bitmap_obstack_initialize (&oldpta_obstack);
5180 bitmap_obstack_initialize (&predbitmap_obstack);
5182 constraint_pool = create_alloc_pool ("Constraint pool",
5183 sizeof (struct constraint), 30);
5184 variable_info_pool = create_alloc_pool ("Variable info pool",
5185 sizeof (struct variable_info), 30);
5186 constraints = VEC_alloc (constraint_t, heap, 8);
5187 varmap = VEC_alloc (varinfo_t, heap, 8);
5188 vi_for_tree = pointer_map_create ();
5190 memset (&stats, 0, sizeof (stats));
5191 shared_bitmap_table = htab_create (511, shared_bitmap_hash,
5192 shared_bitmap_eq, free);
5193 init_base_vars ();
5196 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
5197 predecessor edges. */
5199 static void
5200 remove_preds_and_fake_succs (constraint_graph_t graph)
5202 unsigned int i;
5204 /* Clear the implicit ref and address nodes from the successor
5205 lists. */
5206 for (i = 0; i < FIRST_REF_NODE; i++)
5208 if (graph->succs[i])
5209 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
5210 FIRST_REF_NODE * 2);
5213 /* Free the successor list for the non-ref nodes. */
5214 for (i = FIRST_REF_NODE; i < graph->size; i++)
5216 if (graph->succs[i])
5217 BITMAP_FREE (graph->succs[i]);
5220 /* Now reallocate the size of the successor list as, and blow away
5221 the predecessor bitmaps. */
5222 graph->size = VEC_length (varinfo_t, varmap);
5223 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
5225 free (graph->implicit_preds);
5226 graph->implicit_preds = NULL;
5227 free (graph->preds);
5228 graph->preds = NULL;
5229 bitmap_obstack_release (&predbitmap_obstack);
5232 /* Compute the set of variables we can't TBAA prune. */
5234 static void
5235 compute_tbaa_pruning (void)
5237 unsigned int size = VEC_length (varinfo_t, varmap);
5238 unsigned int i;
5239 bool any;
5241 changed_count = 0;
5242 changed = sbitmap_alloc (size);
5243 sbitmap_zero (changed);
5245 /* Mark all initial no_tbaa_pruning nodes as changed. */
5246 any = false;
5247 for (i = 0; i < size; ++i)
5249 varinfo_t ivi = get_varinfo (i);
5251 if (find (i) == i && ivi->no_tbaa_pruning)
5253 any = true;
5254 if ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
5255 || VEC_length (constraint_t, graph->complex[i]) > 0)
5257 SET_BIT (changed, i);
5258 ++changed_count;
5263 while (changed_count > 0)
5265 struct topo_info *ti = init_topo_info ();
5266 ++stats.iterations;
5268 compute_topo_order (graph, ti);
5270 while (VEC_length (unsigned, ti->topo_order) != 0)
5272 bitmap_iterator bi;
5274 i = VEC_pop (unsigned, ti->topo_order);
5276 /* If this variable is not a representative, skip it. */
5277 if (find (i) != i)
5278 continue;
5280 /* If the node has changed, we need to process the complex
5281 constraints and outgoing edges again. */
5282 if (TEST_BIT (changed, i))
5284 unsigned int j;
5285 constraint_t c;
5286 VEC(constraint_t,heap) *complex = graph->complex[i];
5288 RESET_BIT (changed, i);
5289 --changed_count;
5291 /* Process the complex copy constraints. */
5292 for (j = 0; VEC_iterate (constraint_t, complex, j, c); ++j)
5294 if (c->lhs.type == SCALAR && c->rhs.type == SCALAR)
5296 varinfo_t lhsvi = get_varinfo (find (c->lhs.var));
5298 if (!lhsvi->no_tbaa_pruning)
5300 lhsvi->no_tbaa_pruning = true;
5301 if (!TEST_BIT (changed, lhsvi->id))
5303 SET_BIT (changed, lhsvi->id);
5304 ++changed_count;
5310 /* Propagate to all successors. */
5311 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
5313 unsigned int to = find (j);
5314 varinfo_t tovi = get_varinfo (to);
5316 /* Don't propagate to ourselves. */
5317 if (to == i)
5318 continue;
5320 if (!tovi->no_tbaa_pruning)
5322 tovi->no_tbaa_pruning = true;
5323 if (!TEST_BIT (changed, to))
5325 SET_BIT (changed, to);
5326 ++changed_count;
5333 free_topo_info (ti);
5336 sbitmap_free (changed);
5338 if (any)
5340 for (i = 0; i < size; ++i)
5342 varinfo_t ivi = get_varinfo (i);
5343 varinfo_t ivip = get_varinfo (find (i));
5345 if (ivip->no_tbaa_pruning)
5347 tree var = ivi->decl;
5349 if (TREE_CODE (var) == SSA_NAME)
5350 var = SSA_NAME_VAR (var);
5352 if (POINTER_TYPE_P (TREE_TYPE (var)))
5354 DECL_NO_TBAA_P (var) = 1;
5356 /* Tell the RTL layer that this pointer can alias
5357 anything. */
5358 DECL_POINTER_ALIAS_SET (var) = 0;
5365 /* Create points-to sets for the current function. See the comments
5366 at the start of the file for an algorithmic overview. */
5368 void
5369 compute_points_to_sets (void)
5371 struct scc_info *si;
5372 basic_block bb;
5374 timevar_push (TV_TREE_PTA);
5376 init_alias_vars ();
5377 init_alias_heapvars ();
5379 intra_create_variable_infos ();
5381 /* Now walk all statements and derive aliases. */
5382 FOR_EACH_BB (bb)
5384 gimple_stmt_iterator gsi;
5386 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5388 gimple phi = gsi_stmt (gsi);
5390 if (is_gimple_reg (gimple_phi_result (phi)))
5391 find_func_aliases (phi);
5394 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
5396 gimple stmt = gsi_stmt (gsi);
5398 find_func_aliases (stmt);
5400 /* The information in GIMPLE_CHANGE_DYNAMIC_TYPE statements
5401 has now been captured, and we can remove them. */
5402 if (gimple_code (stmt) == GIMPLE_CHANGE_DYNAMIC_TYPE)
5403 gsi_remove (&gsi, true);
5404 else
5405 gsi_next (&gsi);
5410 if (dump_file)
5412 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
5413 dump_constraints (dump_file);
5416 if (dump_file)
5417 fprintf (dump_file,
5418 "\nCollapsing static cycles and doing variable "
5419 "substitution\n");
5421 init_graph (VEC_length (varinfo_t, varmap) * 2);
5423 if (dump_file)
5424 fprintf (dump_file, "Building predecessor graph\n");
5425 build_pred_graph ();
5427 if (dump_file)
5428 fprintf (dump_file, "Detecting pointer and location "
5429 "equivalences\n");
5430 si = perform_var_substitution (graph);
5432 if (dump_file)
5433 fprintf (dump_file, "Rewriting constraints and unifying "
5434 "variables\n");
5435 rewrite_constraints (graph, si);
5436 free_var_substitution_info (si);
5438 build_succ_graph ();
5440 if (dump_file && (dump_flags & TDF_GRAPH))
5441 dump_constraint_graph (dump_file);
5443 move_complex_constraints (graph);
5445 if (dump_file)
5446 fprintf (dump_file, "Uniting pointer but not location equivalent "
5447 "variables\n");
5448 unite_pointer_equivalences (graph);
5450 if (dump_file)
5451 fprintf (dump_file, "Finding indirect cycles\n");
5452 find_indirect_cycles (graph);
5454 /* Implicit nodes and predecessors are no longer necessary at this
5455 point. */
5456 remove_preds_and_fake_succs (graph);
5458 if (dump_file)
5459 fprintf (dump_file, "Solving graph\n");
5461 solve_graph (graph);
5463 compute_tbaa_pruning ();
5465 if (dump_file)
5466 dump_sa_points_to_info (dump_file);
5468 have_alias_info = true;
5470 timevar_pop (TV_TREE_PTA);
5474 /* Delete created points-to sets. */
5476 void
5477 delete_points_to_sets (void)
5479 unsigned int i;
5481 htab_delete (shared_bitmap_table);
5482 if (dump_file && (dump_flags & TDF_STATS))
5483 fprintf (dump_file, "Points to sets created:%d\n",
5484 stats.points_to_sets_created);
5486 pointer_map_destroy (vi_for_tree);
5487 bitmap_obstack_release (&pta_obstack);
5488 VEC_free (constraint_t, heap, constraints);
5490 for (i = 0; i < graph->size; i++)
5491 VEC_free (constraint_t, heap, graph->complex[i]);
5492 free (graph->complex);
5494 free (graph->rep);
5495 free (graph->succs);
5496 free (graph->pe);
5497 free (graph->pe_rep);
5498 free (graph->indirect_cycles);
5499 free (graph);
5501 VEC_free (varinfo_t, heap, varmap);
5502 free_alloc_pool (variable_info_pool);
5503 free_alloc_pool (constraint_pool);
5504 have_alias_info = false;
5507 /* Return true if we should execute IPA PTA. */
5508 static bool
5509 gate_ipa_pta (void)
5511 return (flag_ipa_pta
5512 /* Don't bother doing anything if the program has errors. */
5513 && !(errorcount || sorrycount));
5516 /* Execute the driver for IPA PTA. */
5517 static unsigned int
5518 ipa_pta_execute (void)
5520 struct cgraph_node *node;
5521 struct scc_info *si;
5523 in_ipa_mode = 1;
5524 init_alias_heapvars ();
5525 init_alias_vars ();
5527 for (node = cgraph_nodes; node; node = node->next)
5529 if (!node->analyzed || cgraph_is_master_clone (node))
5531 unsigned int varid;
5533 varid = create_function_info_for (node->decl,
5534 cgraph_node_name (node));
5535 if (node->local.externally_visible)
5537 varinfo_t fi = get_varinfo (varid);
5538 for (; fi; fi = fi->next)
5539 make_constraint_from (fi, anything_id);
5543 for (node = cgraph_nodes; node; node = node->next)
5545 if (node->analyzed && cgraph_is_master_clone (node))
5547 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
5548 basic_block bb;
5549 tree old_func_decl = current_function_decl;
5550 if (dump_file)
5551 fprintf (dump_file,
5552 "Generating constraints for %s\n",
5553 cgraph_node_name (node));
5554 push_cfun (func);
5555 current_function_decl = node->decl;
5557 FOR_EACH_BB_FN (bb, func)
5559 gimple_stmt_iterator gsi;
5561 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
5562 gsi_next (&gsi))
5564 gimple phi = gsi_stmt (gsi);
5566 if (is_gimple_reg (gimple_phi_result (phi)))
5567 find_func_aliases (phi);
5570 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5571 find_func_aliases (gsi_stmt (gsi));
5573 current_function_decl = old_func_decl;
5574 pop_cfun ();
5576 else
5578 /* Make point to anything. */
5582 if (dump_file)
5584 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
5585 dump_constraints (dump_file);
5588 if (dump_file)
5589 fprintf (dump_file,
5590 "\nCollapsing static cycles and doing variable "
5591 "substitution:\n");
5593 init_graph (VEC_length (varinfo_t, varmap) * 2);
5594 build_pred_graph ();
5595 si = perform_var_substitution (graph);
5596 rewrite_constraints (graph, si);
5597 free_var_substitution_info (si);
5599 build_succ_graph ();
5600 move_complex_constraints (graph);
5601 unite_pointer_equivalences (graph);
5602 find_indirect_cycles (graph);
5604 /* Implicit nodes and predecessors are no longer necessary at this
5605 point. */
5606 remove_preds_and_fake_succs (graph);
5608 if (dump_file)
5609 fprintf (dump_file, "\nSolving graph\n");
5611 solve_graph (graph);
5613 if (dump_file)
5614 dump_sa_points_to_info (dump_file);
5616 in_ipa_mode = 0;
5617 delete_alias_heapvars ();
5618 delete_points_to_sets ();
5619 return 0;
5622 struct simple_ipa_opt_pass pass_ipa_pta =
5625 SIMPLE_IPA_PASS,
5626 "pta", /* name */
5627 gate_ipa_pta, /* gate */
5628 ipa_pta_execute, /* execute */
5629 NULL, /* sub */
5630 NULL, /* next */
5631 0, /* static_pass_number */
5632 TV_IPA_PTA, /* tv_id */
5633 0, /* properties_required */
5634 0, /* properties_provided */
5635 0, /* properties_destroyed */
5636 0, /* todo_flags_start */
5637 TODO_update_ssa /* todo_flags_finish */
5641 /* Initialize the heapvar for statement mapping. */
5642 void
5643 init_alias_heapvars (void)
5645 if (!heapvar_for_stmt)
5646 heapvar_for_stmt = htab_create_ggc (11, tree_map_hash, tree_map_eq,
5647 NULL);
5650 void
5651 delete_alias_heapvars (void)
5653 htab_delete (heapvar_for_stmt);
5654 heapvar_for_stmt = NULL;
5657 #include "gt-tree-ssa-structalias.h"