* config/mips/mips.md (*branch_bit<bbv><mode>,
[official-gcc.git] / gcc / tree-ssa-structalias.c
blob3d64c1cbd331fdbadcfcbd6c621ade968d82827c
1 /* Tree based points-to analysis
2 Copyright (C) 2005, 2006, 2007 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "ggc.h"
26 #include "obstack.h"
27 #include "bitmap.h"
28 #include "flags.h"
29 #include "rtl.h"
30 #include "tm_p.h"
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
33 #include "output.h"
34 #include "tree.h"
35 #include "c-common.h"
36 #include "tree-flow.h"
37 #include "tree-inline.h"
38 #include "varray.h"
39 #include "c-tree.h"
40 #include "diagnostic.h"
41 #include "toplev.h"
42 #include "gimple.h"
43 #include "hashtab.h"
44 #include "function.h"
45 #include "cgraph.h"
46 #include "tree-pass.h"
47 #include "timevar.h"
48 #include "alloc-pool.h"
49 #include "splay-tree.h"
50 #include "params.h"
51 #include "tree-ssa-structalias.h"
52 #include "cgraph.h"
53 #include "alias.h"
54 #include "pointer-set.h"
56 /* The idea behind this analyzer is to generate set constraints from the
57 program, then solve the resulting constraints in order to generate the
58 points-to sets.
60 Set constraints are a way of modeling program analysis problems that
61 involve sets. They consist of an inclusion constraint language,
62 describing the variables (each variable is a set) and operations that
63 are involved on the variables, and a set of rules that derive facts
64 from these operations. To solve a system of set constraints, you derive
65 all possible facts under the rules, which gives you the correct sets
66 as a consequence.
68 See "Efficient Field-sensitive pointer analysis for C" by "David
69 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
70 http://citeseer.ist.psu.edu/pearce04efficient.html
72 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
73 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
74 http://citeseer.ist.psu.edu/heintze01ultrafast.html
76 There are three types of real constraint expressions, DEREF,
77 ADDRESSOF, and SCALAR. Each constraint expression consists
78 of a constraint type, a variable, and an offset.
80 SCALAR is a constraint expression type used to represent x, whether
81 it appears on the LHS or the RHS of a statement.
82 DEREF is a constraint expression type used to represent *x, whether
83 it appears on the LHS or the RHS of a statement.
84 ADDRESSOF is a constraint expression used to represent &x, whether
85 it appears on the LHS or the RHS of a statement.
87 Each pointer variable in the program is assigned an integer id, and
88 each field of a structure variable is assigned an integer id as well.
90 Structure variables are linked to their list of fields through a "next
91 field" in each variable that points to the next field in offset
92 order.
93 Each variable for a structure field has
95 1. "size", that tells the size in bits of that field.
96 2. "fullsize, that tells the size in bits of the entire structure.
97 3. "offset", that tells the offset in bits from the beginning of the
98 structure to this field.
100 Thus,
101 struct f
103 int a;
104 int b;
105 } foo;
106 int *bar;
108 looks like
110 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
111 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
112 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
115 In order to solve the system of set constraints, the following is
116 done:
118 1. Each constraint variable x has a solution set associated with it,
119 Sol(x).
121 2. Constraints are separated into direct, copy, and complex.
122 Direct constraints are ADDRESSOF constraints that require no extra
123 processing, such as P = &Q
124 Copy constraints are those of the form P = Q.
125 Complex constraints are all the constraints involving dereferences
126 and offsets (including offsetted copies).
128 3. All direct constraints of the form P = &Q are processed, such
129 that Q is added to Sol(P)
131 4. All complex constraints for a given constraint variable are stored in a
132 linked list attached to that variable's node.
134 5. A directed graph is built out of the copy constraints. Each
135 constraint variable is a node in the graph, and an edge from
136 Q to P is added for each copy constraint of the form P = Q
138 6. The graph is then walked, and solution sets are
139 propagated along the copy edges, such that an edge from Q to P
140 causes Sol(P) <- Sol(P) union Sol(Q).
142 7. As we visit each node, all complex constraints associated with
143 that node are processed by adding appropriate copy edges to the graph, or the
144 appropriate variables to the solution set.
146 8. The process of walking the graph is iterated until no solution
147 sets change.
149 Prior to walking the graph in steps 6 and 7, We perform static
150 cycle elimination on the constraint graph, as well
151 as off-line variable substitution.
153 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
154 on and turned into anything), but isn't. You can just see what offset
155 inside the pointed-to struct it's going to access.
157 TODO: Constant bounded arrays can be handled as if they were structs of the
158 same number of elements.
160 TODO: Modeling heap and incoming pointers becomes much better if we
161 add fields to them as we discover them, which we could do.
163 TODO: We could handle unions, but to be honest, it's probably not
164 worth the pain or slowdown. */
166 static GTY ((if_marked ("tree_map_marked_p"), param_is (struct tree_map)))
167 htab_t heapvar_for_stmt;
169 static bool use_field_sensitive = true;
170 static int in_ipa_mode = 0;
172 /* Used for predecessor bitmaps. */
173 static bitmap_obstack predbitmap_obstack;
175 /* Used for points-to sets. */
176 static bitmap_obstack pta_obstack;
178 /* Used for oldsolution members of variables. */
179 static bitmap_obstack oldpta_obstack;
181 /* Used for per-solver-iteration bitmaps. */
182 static bitmap_obstack iteration_obstack;
184 static unsigned int create_variable_info_for (tree, const char *);
185 typedef struct constraint_graph *constraint_graph_t;
186 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
188 DEF_VEC_P(constraint_t);
189 DEF_VEC_ALLOC_P(constraint_t,heap);
191 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
192 if (a) \
193 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
195 static struct constraint_stats
197 unsigned int total_vars;
198 unsigned int nonpointer_vars;
199 unsigned int unified_vars_static;
200 unsigned int unified_vars_dynamic;
201 unsigned int iterations;
202 unsigned int num_edges;
203 unsigned int num_implicit_edges;
204 unsigned int points_to_sets_created;
205 } stats;
207 struct variable_info
209 /* ID of this variable */
210 unsigned int id;
212 /* True if this is a variable created by the constraint analysis, such as
213 heap variables and constraints we had to break up. */
214 unsigned int is_artificial_var:1;
216 /* True if this is a special variable whose solution set should not be
217 changed. */
218 unsigned int is_special_var:1;
220 /* True for variables whose size is not known or variable. */
221 unsigned int is_unknown_size_var:1;
223 /* True for (sub-)fields that represent a whole variable. */
224 unsigned int is_full_var : 1;
226 /* True if this is a heap variable. */
227 unsigned int is_heap_var:1;
229 /* True if we may not use TBAA to prune references to this
230 variable. This is used for C++ placement new. */
231 unsigned int no_tbaa_pruning : 1;
233 /* Variable id this was collapsed to due to type unsafety. Zero if
234 this variable was not collapsed. This should be unused completely
235 after build_succ_graph, or something is broken. */
236 unsigned int collapsed_to;
238 /* A link to the variable for the next field in this structure. */
239 struct variable_info *next;
241 /* Offset of this variable, in bits, from the base variable */
242 unsigned HOST_WIDE_INT offset;
244 /* Size of the variable, in bits. */
245 unsigned HOST_WIDE_INT size;
247 /* Full size of the base variable, in bits. */
248 unsigned HOST_WIDE_INT fullsize;
250 /* Name of this variable */
251 const char *name;
253 /* Tree that this variable is associated with. */
254 tree decl;
256 /* Points-to set for this variable. */
257 bitmap solution;
259 /* Old points-to set for this variable. */
260 bitmap oldsolution;
262 typedef struct variable_info *varinfo_t;
264 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
265 static varinfo_t lookup_vi_for_tree (tree);
267 /* Pool of variable info structures. */
268 static alloc_pool variable_info_pool;
270 DEF_VEC_P(varinfo_t);
272 DEF_VEC_ALLOC_P(varinfo_t, heap);
274 /* Table of variable info structures for constraint variables.
275 Indexed directly by variable info id. */
276 static VEC(varinfo_t,heap) *varmap;
278 /* Return the varmap element N */
280 static inline varinfo_t
281 get_varinfo (unsigned int n)
283 return VEC_index (varinfo_t, varmap, n);
286 /* Return the varmap element N, following the collapsed_to link. */
288 static inline varinfo_t
289 get_varinfo_fc (unsigned int n)
291 varinfo_t v = VEC_index (varinfo_t, varmap, n);
293 if (v->collapsed_to != 0)
294 return get_varinfo (v->collapsed_to);
295 return v;
298 /* Static IDs for the special variables. */
299 enum { nothing_id = 0, anything_id = 1, readonly_id = 2,
300 escaped_id = 3, nonlocal_id = 4, callused_id = 5, integer_id = 6 };
302 /* Variable that represents the unknown pointer. */
303 static varinfo_t var_anything;
304 static tree anything_tree;
306 /* Variable that represents the NULL pointer. */
307 static varinfo_t var_nothing;
308 static tree nothing_tree;
310 /* Variable that represents read only memory. */
311 static varinfo_t var_readonly;
312 static tree readonly_tree;
314 /* Variable that represents escaped memory. */
315 static varinfo_t var_escaped;
316 static tree escaped_tree;
318 /* Variable that represents nonlocal memory. */
319 static varinfo_t var_nonlocal;
320 static tree nonlocal_tree;
322 /* Variable that represents call-used memory. */
323 static varinfo_t var_callused;
324 static tree callused_tree;
326 /* Variable that represents integers. This is used for when people do things
327 like &0->a.b. */
328 static varinfo_t var_integer;
329 static tree integer_tree;
331 /* Lookup a heap var for FROM, and return it if we find one. */
333 static tree
334 heapvar_lookup (tree from)
336 struct tree_map *h, in;
337 in.base.from = from;
339 h = (struct tree_map *) htab_find_with_hash (heapvar_for_stmt, &in,
340 htab_hash_pointer (from));
341 if (h)
342 return h->to;
343 return NULL_TREE;
346 /* Insert a mapping FROM->TO in the heap var for statement
347 hashtable. */
349 static void
350 heapvar_insert (tree from, tree to)
352 struct tree_map *h;
353 void **loc;
355 h = GGC_NEW (struct tree_map);
356 h->hash = htab_hash_pointer (from);
357 h->base.from = from;
358 h->to = to;
359 loc = htab_find_slot_with_hash (heapvar_for_stmt, h, h->hash, INSERT);
360 *(struct tree_map **) loc = h;
363 /* Return a new variable info structure consisting for a variable
364 named NAME, and using constraint graph node NODE. */
366 static varinfo_t
367 new_var_info (tree t, unsigned int id, const char *name)
369 varinfo_t ret = (varinfo_t) pool_alloc (variable_info_pool);
370 tree var;
372 ret->id = id;
373 ret->name = name;
374 ret->decl = t;
375 ret->is_artificial_var = false;
376 ret->is_heap_var = false;
377 ret->is_special_var = false;
378 ret->is_unknown_size_var = false;
379 ret->is_full_var = false;
380 var = t;
381 if (TREE_CODE (var) == SSA_NAME)
382 var = SSA_NAME_VAR (var);
383 ret->no_tbaa_pruning = (DECL_P (var)
384 && POINTER_TYPE_P (TREE_TYPE (var))
385 && DECL_NO_TBAA_P (var));
386 ret->solution = BITMAP_ALLOC (&pta_obstack);
387 ret->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
388 ret->next = NULL;
389 ret->collapsed_to = 0;
390 return ret;
393 typedef enum {SCALAR, DEREF, ADDRESSOF} constraint_expr_type;
395 /* An expression that appears in a constraint. */
397 struct constraint_expr
399 /* Constraint type. */
400 constraint_expr_type type;
402 /* Variable we are referring to in the constraint. */
403 unsigned int var;
405 /* Offset, in bits, of this constraint from the beginning of
406 variables it ends up referring to.
408 IOW, in a deref constraint, we would deref, get the result set,
409 then add OFFSET to each member. */
410 unsigned HOST_WIDE_INT offset;
413 typedef struct constraint_expr ce_s;
414 DEF_VEC_O(ce_s);
415 DEF_VEC_ALLOC_O(ce_s, heap);
416 static void get_constraint_for_1 (tree, VEC(ce_s, heap) **, bool);
417 static void get_constraint_for (tree, VEC(ce_s, heap) **);
418 static void do_deref (VEC (ce_s, heap) **);
420 /* Our set constraints are made up of two constraint expressions, one
421 LHS, and one RHS.
423 As described in the introduction, our set constraints each represent an
424 operation between set valued variables.
426 struct constraint
428 struct constraint_expr lhs;
429 struct constraint_expr rhs;
432 /* List of constraints that we use to build the constraint graph from. */
434 static VEC(constraint_t,heap) *constraints;
435 static alloc_pool constraint_pool;
438 DEF_VEC_I(int);
439 DEF_VEC_ALLOC_I(int, heap);
441 /* The constraint graph is represented as an array of bitmaps
442 containing successor nodes. */
444 struct constraint_graph
446 /* Size of this graph, which may be different than the number of
447 nodes in the variable map. */
448 unsigned int size;
450 /* Explicit successors of each node. */
451 bitmap *succs;
453 /* Implicit predecessors of each node (Used for variable
454 substitution). */
455 bitmap *implicit_preds;
457 /* Explicit predecessors of each node (Used for variable substitution). */
458 bitmap *preds;
460 /* Indirect cycle representatives, or -1 if the node has no indirect
461 cycles. */
462 int *indirect_cycles;
464 /* Representative node for a node. rep[a] == a unless the node has
465 been unified. */
466 unsigned int *rep;
468 /* Equivalence class representative for a label. This is used for
469 variable substitution. */
470 int *eq_rep;
472 /* Pointer equivalence label for a node. All nodes with the same
473 pointer equivalence label can be unified together at some point
474 (either during constraint optimization or after the constraint
475 graph is built). */
476 unsigned int *pe;
478 /* Pointer equivalence representative for a label. This is used to
479 handle nodes that are pointer equivalent but not location
480 equivalent. We can unite these once the addressof constraints
481 are transformed into initial points-to sets. */
482 int *pe_rep;
484 /* Pointer equivalence label for each node, used during variable
485 substitution. */
486 unsigned int *pointer_label;
488 /* Location equivalence label for each node, used during location
489 equivalence finding. */
490 unsigned int *loc_label;
492 /* Pointed-by set for each node, used during location equivalence
493 finding. This is pointed-by rather than pointed-to, because it
494 is constructed using the predecessor graph. */
495 bitmap *pointed_by;
497 /* Points to sets for pointer equivalence. This is *not* the actual
498 points-to sets for nodes. */
499 bitmap *points_to;
501 /* Bitmap of nodes where the bit is set if the node is a direct
502 node. Used for variable substitution. */
503 sbitmap direct_nodes;
505 /* Bitmap of nodes where the bit is set if the node is address
506 taken. Used for variable substitution. */
507 bitmap address_taken;
509 /* Vector of complex constraints for each graph node. Complex
510 constraints are those involving dereferences or offsets that are
511 not 0. */
512 VEC(constraint_t,heap) **complex;
515 static constraint_graph_t graph;
517 /* During variable substitution and the offline version of indirect
518 cycle finding, we create nodes to represent dereferences and
519 address taken constraints. These represent where these start and
520 end. */
521 #define FIRST_REF_NODE (VEC_length (varinfo_t, varmap))
522 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
524 /* Return the representative node for NODE, if NODE has been unioned
525 with another NODE.
526 This function performs path compression along the way to finding
527 the representative. */
529 static unsigned int
530 find (unsigned int node)
532 gcc_assert (node < graph->size);
533 if (graph->rep[node] != node)
534 return graph->rep[node] = find (graph->rep[node]);
535 return node;
538 /* Union the TO and FROM nodes to the TO nodes.
539 Note that at some point in the future, we may want to do
540 union-by-rank, in which case we are going to have to return the
541 node we unified to. */
543 static bool
544 unite (unsigned int to, unsigned int from)
546 gcc_assert (to < graph->size && from < graph->size);
547 if (to != from && graph->rep[from] != to)
549 graph->rep[from] = to;
550 return true;
552 return false;
555 /* Create a new constraint consisting of LHS and RHS expressions. */
557 static constraint_t
558 new_constraint (const struct constraint_expr lhs,
559 const struct constraint_expr rhs)
561 constraint_t ret = (constraint_t) pool_alloc (constraint_pool);
562 ret->lhs = lhs;
563 ret->rhs = rhs;
564 return ret;
567 /* Print out constraint C to FILE. */
569 void
570 dump_constraint (FILE *file, constraint_t c)
572 if (c->lhs.type == ADDRESSOF)
573 fprintf (file, "&");
574 else if (c->lhs.type == DEREF)
575 fprintf (file, "*");
576 fprintf (file, "%s", get_varinfo_fc (c->lhs.var)->name);
577 if (c->lhs.offset != 0)
578 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
579 fprintf (file, " = ");
580 if (c->rhs.type == ADDRESSOF)
581 fprintf (file, "&");
582 else if (c->rhs.type == DEREF)
583 fprintf (file, "*");
584 fprintf (file, "%s", get_varinfo_fc (c->rhs.var)->name);
585 if (c->rhs.offset != 0)
586 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
587 fprintf (file, "\n");
590 /* Print out constraint C to stderr. */
592 void
593 debug_constraint (constraint_t c)
595 dump_constraint (stderr, c);
598 /* Print out all constraints to FILE */
600 void
601 dump_constraints (FILE *file)
603 int i;
604 constraint_t c;
605 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
606 dump_constraint (file, c);
609 /* Print out all constraints to stderr. */
611 void
612 debug_constraints (void)
614 dump_constraints (stderr);
617 /* Print out to FILE the edge in the constraint graph that is created by
618 constraint c. The edge may have a label, depending on the type of
619 constraint that it represents. If complex1, e.g: a = *b, then the label
620 is "=*", if complex2, e.g: *a = b, then the label is "*=", if
621 complex with an offset, e.g: a = b + 8, then the label is "+".
622 Otherwise the edge has no label. */
624 void
625 dump_constraint_edge (FILE *file, constraint_t c)
627 if (c->rhs.type != ADDRESSOF)
629 const char *src = get_varinfo_fc (c->rhs.var)->name;
630 const char *dst = get_varinfo_fc (c->lhs.var)->name;
631 fprintf (file, " \"%s\" -> \"%s\" ", src, dst);
632 /* Due to preprocessing of constraints, instructions like *a = *b are
633 illegal; thus, we do not have to handle such cases. */
634 if (c->lhs.type == DEREF)
635 fprintf (file, " [ label=\"*=\" ] ;\n");
636 else if (c->rhs.type == DEREF)
637 fprintf (file, " [ label=\"=*\" ] ;\n");
638 else
640 /* We must check the case where the constraint is an offset.
641 In this case, it is treated as a complex constraint. */
642 if (c->rhs.offset != c->lhs.offset)
643 fprintf (file, " [ label=\"+\" ] ;\n");
644 else
645 fprintf (file, " ;\n");
650 /* Print the constraint graph in dot format. */
652 void
653 dump_constraint_graph (FILE *file)
655 unsigned int i=0, size;
656 constraint_t c;
658 /* Only print the graph if it has already been initialized: */
659 if (!graph)
660 return;
662 /* Print the constraints used to produce the constraint graph. The
663 constraints will be printed as comments in the dot file: */
664 fprintf (file, "\n\n/* Constraints used in the constraint graph:\n");
665 dump_constraints (file);
666 fprintf (file, "*/\n");
668 /* Prints the header of the dot file: */
669 fprintf (file, "\n\n// The constraint graph in dot format:\n");
670 fprintf (file, "strict digraph {\n");
671 fprintf (file, " node [\n shape = box\n ]\n");
672 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
673 fprintf (file, "\n // List of nodes in the constraint graph:\n");
675 /* The next lines print the nodes in the graph. In order to get the
676 number of nodes in the graph, we must choose the minimum between the
677 vector VEC (varinfo_t, varmap) and graph->size. If the graph has not
678 yet been initialized, then graph->size == 0, otherwise we must only
679 read nodes that have an entry in VEC (varinfo_t, varmap). */
680 size = VEC_length (varinfo_t, varmap);
681 size = size < graph->size ? size : graph->size;
682 for (i = 0; i < size; i++)
684 const char *name = get_varinfo_fc (graph->rep[i])->name;
685 fprintf (file, " \"%s\" ;\n", name);
688 /* Go over the list of constraints printing the edges in the constraint
689 graph. */
690 fprintf (file, "\n // The constraint edges:\n");
691 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
692 if (c)
693 dump_constraint_edge (file, c);
695 /* Prints the tail of the dot file. By now, only the closing bracket. */
696 fprintf (file, "}\n\n\n");
699 /* Print out the constraint graph to stderr. */
701 void
702 debug_constraint_graph (void)
704 dump_constraint_graph (stderr);
707 /* SOLVER FUNCTIONS
709 The solver is a simple worklist solver, that works on the following
710 algorithm:
712 sbitmap changed_nodes = all zeroes;
713 changed_count = 0;
714 For each node that is not already collapsed:
715 changed_count++;
716 set bit in changed nodes
718 while (changed_count > 0)
720 compute topological ordering for constraint graph
722 find and collapse cycles in the constraint graph (updating
723 changed if necessary)
725 for each node (n) in the graph in topological order:
726 changed_count--;
728 Process each complex constraint associated with the node,
729 updating changed if necessary.
731 For each outgoing edge from n, propagate the solution from n to
732 the destination of the edge, updating changed as necessary.
734 } */
736 /* Return true if two constraint expressions A and B are equal. */
738 static bool
739 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
741 return a.type == b.type && a.var == b.var && a.offset == b.offset;
744 /* Return true if constraint expression A is less than constraint expression
745 B. This is just arbitrary, but consistent, in order to give them an
746 ordering. */
748 static bool
749 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
751 if (a.type == b.type)
753 if (a.var == b.var)
754 return a.offset < b.offset;
755 else
756 return a.var < b.var;
758 else
759 return a.type < b.type;
762 /* Return true if constraint A is less than constraint B. This is just
763 arbitrary, but consistent, in order to give them an ordering. */
765 static bool
766 constraint_less (const constraint_t a, const constraint_t b)
768 if (constraint_expr_less (a->lhs, b->lhs))
769 return true;
770 else if (constraint_expr_less (b->lhs, a->lhs))
771 return false;
772 else
773 return constraint_expr_less (a->rhs, b->rhs);
776 /* Return true if two constraints A and B are equal. */
778 static bool
779 constraint_equal (struct constraint a, struct constraint b)
781 return constraint_expr_equal (a.lhs, b.lhs)
782 && constraint_expr_equal (a.rhs, b.rhs);
786 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
788 static constraint_t
789 constraint_vec_find (VEC(constraint_t,heap) *vec,
790 struct constraint lookfor)
792 unsigned int place;
793 constraint_t found;
795 if (vec == NULL)
796 return NULL;
798 place = VEC_lower_bound (constraint_t, vec, &lookfor, constraint_less);
799 if (place >= VEC_length (constraint_t, vec))
800 return NULL;
801 found = VEC_index (constraint_t, vec, place);
802 if (!constraint_equal (*found, lookfor))
803 return NULL;
804 return found;
807 /* Union two constraint vectors, TO and FROM. Put the result in TO. */
809 static void
810 constraint_set_union (VEC(constraint_t,heap) **to,
811 VEC(constraint_t,heap) **from)
813 int i;
814 constraint_t c;
816 for (i = 0; VEC_iterate (constraint_t, *from, i, c); i++)
818 if (constraint_vec_find (*to, *c) == NULL)
820 unsigned int place = VEC_lower_bound (constraint_t, *to, c,
821 constraint_less);
822 VEC_safe_insert (constraint_t, heap, *to, place, c);
827 /* Take a solution set SET, add OFFSET to each member of the set, and
828 overwrite SET with the result when done. */
830 static void
831 solution_set_add (bitmap set, unsigned HOST_WIDE_INT offset)
833 bitmap result = BITMAP_ALLOC (&iteration_obstack);
834 unsigned int i;
835 bitmap_iterator bi;
837 EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
839 varinfo_t vi = get_varinfo (i);
841 /* If this is a variable with just one field just set its bit
842 in the result. */
843 if (vi->is_artificial_var
844 || vi->is_unknown_size_var
845 || vi->is_full_var)
846 bitmap_set_bit (result, i);
847 else
849 unsigned HOST_WIDE_INT fieldoffset = vi->offset + offset;
850 varinfo_t v = first_vi_for_offset (vi, fieldoffset);
851 /* If the result is outside of the variable use the last field. */
852 if (!v)
854 v = vi;
855 while (v->next != NULL)
856 v = v->next;
858 bitmap_set_bit (result, v->id);
859 /* If the result is not exactly at fieldoffset include the next
860 field as well. See get_constraint_for_ptr_offset for more
861 rationale. */
862 if (v->offset != fieldoffset
863 && v->next != NULL)
864 bitmap_set_bit (result, v->next->id);
868 bitmap_copy (set, result);
869 BITMAP_FREE (result);
872 /* Union solution sets TO and FROM, and add INC to each member of FROM in the
873 process. */
875 static bool
876 set_union_with_increment (bitmap to, bitmap from, unsigned HOST_WIDE_INT inc)
878 if (inc == 0)
879 return bitmap_ior_into (to, from);
880 else
882 bitmap tmp;
883 bool res;
885 tmp = BITMAP_ALLOC (&iteration_obstack);
886 bitmap_copy (tmp, from);
887 solution_set_add (tmp, inc);
888 res = bitmap_ior_into (to, tmp);
889 BITMAP_FREE (tmp);
890 return res;
894 /* Insert constraint C into the list of complex constraints for graph
895 node VAR. */
897 static void
898 insert_into_complex (constraint_graph_t graph,
899 unsigned int var, constraint_t c)
901 VEC (constraint_t, heap) *complex = graph->complex[var];
902 unsigned int place = VEC_lower_bound (constraint_t, complex, c,
903 constraint_less);
905 /* Only insert constraints that do not already exist. */
906 if (place >= VEC_length (constraint_t, complex)
907 || !constraint_equal (*c, *VEC_index (constraint_t, complex, place)))
908 VEC_safe_insert (constraint_t, heap, graph->complex[var], place, c);
912 /* Condense two variable nodes into a single variable node, by moving
913 all associated info from SRC to TO. */
915 static void
916 merge_node_constraints (constraint_graph_t graph, unsigned int to,
917 unsigned int from)
919 unsigned int i;
920 constraint_t c;
922 gcc_assert (find (from) == to);
924 /* Move all complex constraints from src node into to node */
925 for (i = 0; VEC_iterate (constraint_t, graph->complex[from], i, c); i++)
927 /* In complex constraints for node src, we may have either
928 a = *src, and *src = a, or an offseted constraint which are
929 always added to the rhs node's constraints. */
931 if (c->rhs.type == DEREF)
932 c->rhs.var = to;
933 else if (c->lhs.type == DEREF)
934 c->lhs.var = to;
935 else
936 c->rhs.var = to;
938 constraint_set_union (&graph->complex[to], &graph->complex[from]);
939 VEC_free (constraint_t, heap, graph->complex[from]);
940 graph->complex[from] = NULL;
944 /* Remove edges involving NODE from GRAPH. */
946 static void
947 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
949 if (graph->succs[node])
950 BITMAP_FREE (graph->succs[node]);
953 /* Merge GRAPH nodes FROM and TO into node TO. */
955 static void
956 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
957 unsigned int from)
959 if (graph->indirect_cycles[from] != -1)
961 /* If we have indirect cycles with the from node, and we have
962 none on the to node, the to node has indirect cycles from the
963 from node now that they are unified.
964 If indirect cycles exist on both, unify the nodes that they
965 are in a cycle with, since we know they are in a cycle with
966 each other. */
967 if (graph->indirect_cycles[to] == -1)
968 graph->indirect_cycles[to] = graph->indirect_cycles[from];
971 /* Merge all the successor edges. */
972 if (graph->succs[from])
974 if (!graph->succs[to])
975 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
976 bitmap_ior_into (graph->succs[to],
977 graph->succs[from]);
980 clear_edges_for_node (graph, from);
984 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
985 it doesn't exist in the graph already. */
987 static void
988 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
989 unsigned int from)
991 if (to == from)
992 return;
994 if (!graph->implicit_preds[to])
995 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
997 if (bitmap_set_bit (graph->implicit_preds[to], from))
998 stats.num_implicit_edges++;
1001 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
1002 it doesn't exist in the graph already.
1003 Return false if the edge already existed, true otherwise. */
1005 static void
1006 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
1007 unsigned int from)
1009 if (!graph->preds[to])
1010 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1011 bitmap_set_bit (graph->preds[to], from);
1014 /* Add a graph edge to GRAPH, going from FROM to TO if
1015 it doesn't exist in the graph already.
1016 Return false if the edge already existed, true otherwise. */
1018 static bool
1019 add_graph_edge (constraint_graph_t graph, unsigned int to,
1020 unsigned int from)
1022 if (to == from)
1024 return false;
1026 else
1028 bool r = false;
1030 if (!graph->succs[from])
1031 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
1032 if (bitmap_set_bit (graph->succs[from], to))
1034 r = true;
1035 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
1036 stats.num_edges++;
1038 return r;
1043 /* Return true if {DEST.SRC} is an existing graph edge in GRAPH. */
1045 static bool
1046 valid_graph_edge (constraint_graph_t graph, unsigned int src,
1047 unsigned int dest)
1049 return (graph->succs[dest]
1050 && bitmap_bit_p (graph->succs[dest], src));
1053 /* Initialize the constraint graph structure to contain SIZE nodes. */
1055 static void
1056 init_graph (unsigned int size)
1058 unsigned int j;
1060 graph = XCNEW (struct constraint_graph);
1061 graph->size = size;
1062 graph->succs = XCNEWVEC (bitmap, graph->size);
1063 graph->indirect_cycles = XNEWVEC (int, graph->size);
1064 graph->rep = XNEWVEC (unsigned int, graph->size);
1065 graph->complex = XCNEWVEC (VEC(constraint_t, heap) *, size);
1066 graph->pe = XCNEWVEC (unsigned int, graph->size);
1067 graph->pe_rep = XNEWVEC (int, graph->size);
1069 for (j = 0; j < graph->size; j++)
1071 graph->rep[j] = j;
1072 graph->pe_rep[j] = -1;
1073 graph->indirect_cycles[j] = -1;
1077 /* Build the constraint graph, adding only predecessor edges right now. */
1079 static void
1080 build_pred_graph (void)
1082 int i;
1083 constraint_t c;
1084 unsigned int j;
1086 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
1087 graph->preds = XCNEWVEC (bitmap, graph->size);
1088 graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
1089 graph->loc_label = XCNEWVEC (unsigned int, graph->size);
1090 graph->pointed_by = XCNEWVEC (bitmap, graph->size);
1091 graph->points_to = XCNEWVEC (bitmap, graph->size);
1092 graph->eq_rep = XNEWVEC (int, graph->size);
1093 graph->direct_nodes = sbitmap_alloc (graph->size);
1094 graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1095 sbitmap_zero (graph->direct_nodes);
1097 for (j = 0; j < FIRST_REF_NODE; j++)
1099 if (!get_varinfo (j)->is_special_var)
1100 SET_BIT (graph->direct_nodes, j);
1103 for (j = 0; j < graph->size; j++)
1104 graph->eq_rep[j] = -1;
1106 for (j = 0; j < VEC_length (varinfo_t, varmap); j++)
1107 graph->indirect_cycles[j] = -1;
1109 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1111 struct constraint_expr lhs = c->lhs;
1112 struct constraint_expr rhs = c->rhs;
1113 unsigned int lhsvar = get_varinfo_fc (lhs.var)->id;
1114 unsigned int rhsvar = get_varinfo_fc (rhs.var)->id;
1116 if (lhs.type == DEREF)
1118 /* *x = y. */
1119 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1120 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1122 else if (rhs.type == DEREF)
1124 /* x = *y */
1125 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1126 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1127 else
1128 RESET_BIT (graph->direct_nodes, lhsvar);
1130 else if (rhs.type == ADDRESSOF)
1132 /* x = &y */
1133 if (graph->points_to[lhsvar] == NULL)
1134 graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1135 bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1137 if (graph->pointed_by[rhsvar] == NULL)
1138 graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1139 bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1141 /* Implicitly, *x = y */
1142 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1144 RESET_BIT (graph->direct_nodes, rhsvar);
1145 bitmap_set_bit (graph->address_taken, rhsvar);
1147 else if (lhsvar > anything_id
1148 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1150 /* x = y */
1151 add_pred_graph_edge (graph, lhsvar, rhsvar);
1152 /* Implicitly, *x = *y */
1153 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1154 FIRST_REF_NODE + rhsvar);
1156 else if (lhs.offset != 0 || rhs.offset != 0)
1158 if (rhs.offset != 0)
1159 RESET_BIT (graph->direct_nodes, lhs.var);
1160 else if (lhs.offset != 0)
1161 RESET_BIT (graph->direct_nodes, rhs.var);
1166 /* Build the constraint graph, adding successor edges. */
1168 static void
1169 build_succ_graph (void)
1171 int i;
1172 constraint_t c;
1174 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1176 struct constraint_expr lhs;
1177 struct constraint_expr rhs;
1178 unsigned int lhsvar;
1179 unsigned int rhsvar;
1181 if (!c)
1182 continue;
1184 lhs = c->lhs;
1185 rhs = c->rhs;
1186 lhsvar = find (get_varinfo_fc (lhs.var)->id);
1187 rhsvar = find (get_varinfo_fc (rhs.var)->id);
1189 if (lhs.type == DEREF)
1191 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1192 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1194 else if (rhs.type == DEREF)
1196 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1197 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1199 else if (rhs.type == ADDRESSOF)
1201 /* x = &y */
1202 gcc_assert (find (get_varinfo_fc (rhs.var)->id)
1203 == get_varinfo_fc (rhs.var)->id);
1204 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1206 else if (lhsvar > anything_id
1207 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1209 add_graph_edge (graph, lhsvar, rhsvar);
1215 /* Changed variables on the last iteration. */
1216 static unsigned int changed_count;
1217 static sbitmap changed;
1219 DEF_VEC_I(unsigned);
1220 DEF_VEC_ALLOC_I(unsigned,heap);
1223 /* Strongly Connected Component visitation info. */
1225 struct scc_info
1227 sbitmap visited;
1228 sbitmap deleted;
1229 unsigned int *dfs;
1230 unsigned int *node_mapping;
1231 int current_index;
1232 VEC(unsigned,heap) *scc_stack;
1236 /* Recursive routine to find strongly connected components in GRAPH.
1237 SI is the SCC info to store the information in, and N is the id of current
1238 graph node we are processing.
1240 This is Tarjan's strongly connected component finding algorithm, as
1241 modified by Nuutila to keep only non-root nodes on the stack.
1242 The algorithm can be found in "On finding the strongly connected
1243 connected components in a directed graph" by Esko Nuutila and Eljas
1244 Soisalon-Soininen, in Information Processing Letters volume 49,
1245 number 1, pages 9-14. */
1247 static void
1248 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1250 unsigned int i;
1251 bitmap_iterator bi;
1252 unsigned int my_dfs;
1254 SET_BIT (si->visited, n);
1255 si->dfs[n] = si->current_index ++;
1256 my_dfs = si->dfs[n];
1258 /* Visit all the successors. */
1259 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1261 unsigned int w;
1263 if (i > LAST_REF_NODE)
1264 break;
1266 w = find (i);
1267 if (TEST_BIT (si->deleted, w))
1268 continue;
1270 if (!TEST_BIT (si->visited, w))
1271 scc_visit (graph, si, w);
1273 unsigned int t = find (w);
1274 unsigned int nnode = find (n);
1275 gcc_assert (nnode == n);
1277 if (si->dfs[t] < si->dfs[nnode])
1278 si->dfs[n] = si->dfs[t];
1282 /* See if any components have been identified. */
1283 if (si->dfs[n] == my_dfs)
1285 if (VEC_length (unsigned, si->scc_stack) > 0
1286 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1288 bitmap scc = BITMAP_ALLOC (NULL);
1289 bool have_ref_node = n >= FIRST_REF_NODE;
1290 unsigned int lowest_node;
1291 bitmap_iterator bi;
1293 bitmap_set_bit (scc, n);
1295 while (VEC_length (unsigned, si->scc_stack) != 0
1296 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1298 unsigned int w = VEC_pop (unsigned, si->scc_stack);
1300 bitmap_set_bit (scc, w);
1301 if (w >= FIRST_REF_NODE)
1302 have_ref_node = true;
1305 lowest_node = bitmap_first_set_bit (scc);
1306 gcc_assert (lowest_node < FIRST_REF_NODE);
1308 /* Collapse the SCC nodes into a single node, and mark the
1309 indirect cycles. */
1310 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1312 if (i < FIRST_REF_NODE)
1314 if (unite (lowest_node, i))
1315 unify_nodes (graph, lowest_node, i, false);
1317 else
1319 unite (lowest_node, i);
1320 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1324 SET_BIT (si->deleted, n);
1326 else
1327 VEC_safe_push (unsigned, heap, si->scc_stack, n);
1330 /* Unify node FROM into node TO, updating the changed count if
1331 necessary when UPDATE_CHANGED is true. */
1333 static void
1334 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1335 bool update_changed)
1338 gcc_assert (to != from && find (to) == to);
1339 if (dump_file && (dump_flags & TDF_DETAILS))
1340 fprintf (dump_file, "Unifying %s to %s\n",
1341 get_varinfo (from)->name,
1342 get_varinfo (to)->name);
1344 if (update_changed)
1345 stats.unified_vars_dynamic++;
1346 else
1347 stats.unified_vars_static++;
1349 merge_graph_nodes (graph, to, from);
1350 merge_node_constraints (graph, to, from);
1352 if (get_varinfo (from)->no_tbaa_pruning)
1353 get_varinfo (to)->no_tbaa_pruning = true;
1355 /* Mark TO as changed if FROM was changed. If TO was already marked
1356 as changed, decrease the changed count. */
1358 if (update_changed && TEST_BIT (changed, from))
1360 RESET_BIT (changed, from);
1361 if (!TEST_BIT (changed, to))
1362 SET_BIT (changed, to);
1363 else
1365 gcc_assert (changed_count > 0);
1366 changed_count--;
1369 if (get_varinfo (from)->solution)
1371 /* If the solution changes because of the merging, we need to mark
1372 the variable as changed. */
1373 if (bitmap_ior_into (get_varinfo (to)->solution,
1374 get_varinfo (from)->solution))
1376 if (update_changed && !TEST_BIT (changed, to))
1378 SET_BIT (changed, to);
1379 changed_count++;
1383 BITMAP_FREE (get_varinfo (from)->solution);
1384 BITMAP_FREE (get_varinfo (from)->oldsolution);
1386 if (stats.iterations > 0)
1388 BITMAP_FREE (get_varinfo (to)->oldsolution);
1389 get_varinfo (to)->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
1392 if (valid_graph_edge (graph, to, to))
1394 if (graph->succs[to])
1395 bitmap_clear_bit (graph->succs[to], to);
1399 /* Information needed to compute the topological ordering of a graph. */
1401 struct topo_info
1403 /* sbitmap of visited nodes. */
1404 sbitmap visited;
1405 /* Array that stores the topological order of the graph, *in
1406 reverse*. */
1407 VEC(unsigned,heap) *topo_order;
1411 /* Initialize and return a topological info structure. */
1413 static struct topo_info *
1414 init_topo_info (void)
1416 size_t size = graph->size;
1417 struct topo_info *ti = XNEW (struct topo_info);
1418 ti->visited = sbitmap_alloc (size);
1419 sbitmap_zero (ti->visited);
1420 ti->topo_order = VEC_alloc (unsigned, heap, 1);
1421 return ti;
1425 /* Free the topological sort info pointed to by TI. */
1427 static void
1428 free_topo_info (struct topo_info *ti)
1430 sbitmap_free (ti->visited);
1431 VEC_free (unsigned, heap, ti->topo_order);
1432 free (ti);
1435 /* Visit the graph in topological order, and store the order in the
1436 topo_info structure. */
1438 static void
1439 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1440 unsigned int n)
1442 bitmap_iterator bi;
1443 unsigned int j;
1445 SET_BIT (ti->visited, n);
1447 if (graph->succs[n])
1448 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1450 if (!TEST_BIT (ti->visited, j))
1451 topo_visit (graph, ti, j);
1454 VEC_safe_push (unsigned, heap, ti->topo_order, n);
1457 /* Return true if variable N + OFFSET is a legal field of N. */
1459 static bool
1460 type_safe (unsigned int n, unsigned HOST_WIDE_INT *offset)
1462 varinfo_t ninfo = get_varinfo (n);
1464 /* For things we've globbed to single variables, any offset into the
1465 variable acts like the entire variable, so that it becomes offset
1466 0. */
1467 if (ninfo->is_special_var
1468 || ninfo->is_artificial_var
1469 || ninfo->is_unknown_size_var
1470 || ninfo->is_full_var)
1472 *offset = 0;
1473 return true;
1475 return (get_varinfo (n)->offset + *offset) < get_varinfo (n)->fullsize;
1478 /* Process a constraint C that represents x = *y, using DELTA as the
1479 starting solution. */
1481 static void
1482 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1483 bitmap delta)
1485 unsigned int lhs = c->lhs.var;
1486 bool flag = false;
1487 bitmap sol = get_varinfo (lhs)->solution;
1488 unsigned int j;
1489 bitmap_iterator bi;
1491 if (bitmap_bit_p (delta, anything_id))
1493 flag |= bitmap_set_bit (sol, anything_id);
1494 goto done;
1497 /* For x = *ESCAPED and x = *CALLUSED we want to compute the
1498 reachability set of the rhs var. As a pointer to a sub-field
1499 of a variable can also reach all other fields of the variable
1500 we simply have to expand the solution to contain all sub-fields
1501 if one sub-field is contained. */
1502 if (c->rhs.var == escaped_id
1503 || c->rhs.var == callused_id)
1505 bitmap vars = NULL;
1506 /* In a first pass record all variables we need to add all
1507 sub-fields off. This avoids quadratic behavior. */
1508 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1510 varinfo_t v = get_varinfo (j);
1511 if (v->is_full_var)
1512 continue;
1514 v = lookup_vi_for_tree (v->decl);
1515 if (v->next != NULL)
1517 if (vars == NULL)
1518 vars = BITMAP_ALLOC (NULL);
1519 bitmap_set_bit (vars, v->id);
1522 /* In the second pass now do the addition to the solution and
1523 to speed up solving add it to the delta as well. */
1524 if (vars != NULL)
1526 EXECUTE_IF_SET_IN_BITMAP (vars, 0, j, bi)
1528 varinfo_t v = get_varinfo (j);
1529 for (; v != NULL; v = v->next)
1531 if (bitmap_set_bit (sol, v->id))
1533 flag = true;
1534 bitmap_set_bit (delta, v->id);
1538 BITMAP_FREE (vars);
1542 /* For each variable j in delta (Sol(y)), add
1543 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1544 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1546 unsigned HOST_WIDE_INT roffset = c->rhs.offset;
1547 if (type_safe (j, &roffset))
1549 varinfo_t v;
1550 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + roffset;
1551 unsigned int t;
1553 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1554 /* If the access is outside of the variable we can ignore it. */
1555 if (!v)
1556 continue;
1557 t = find (v->id);
1559 /* Adding edges from the special vars is pointless.
1560 They don't have sets that can change. */
1561 if (get_varinfo (t)->is_special_var)
1562 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1563 /* Merging the solution from ESCAPED needlessly increases
1564 the set. Use ESCAPED as representative instead.
1565 Same for CALLUSED. */
1566 else if (get_varinfo (t)->id == escaped_id
1567 || get_varinfo (t)->id == callused_id)
1568 flag |= bitmap_set_bit (sol, get_varinfo (t)->id);
1569 else if (add_graph_edge (graph, lhs, t))
1570 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1574 done:
1575 /* If the LHS solution changed, mark the var as changed. */
1576 if (flag)
1578 get_varinfo (lhs)->solution = sol;
1579 if (!TEST_BIT (changed, lhs))
1581 SET_BIT (changed, lhs);
1582 changed_count++;
1587 /* Process a constraint C that represents *x = y. */
1589 static void
1590 do_ds_constraint (constraint_t c, bitmap delta)
1592 unsigned int rhs = c->rhs.var;
1593 bitmap sol = get_varinfo (rhs)->solution;
1594 unsigned int j;
1595 bitmap_iterator bi;
1597 if (bitmap_bit_p (sol, anything_id))
1599 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1601 varinfo_t jvi = get_varinfo (j);
1602 unsigned int t;
1603 unsigned int loff = c->lhs.offset;
1604 unsigned HOST_WIDE_INT fieldoffset = jvi->offset + loff;
1605 varinfo_t v;
1607 v = get_varinfo (j);
1608 if (!v->is_full_var)
1610 v = first_vi_for_offset (v, fieldoffset);
1611 /* If the access is outside of the variable we can ignore it. */
1612 if (!v)
1613 continue;
1615 t = find (v->id);
1617 if (bitmap_set_bit (get_varinfo (t)->solution, anything_id)
1618 && !TEST_BIT (changed, t))
1620 SET_BIT (changed, t);
1621 changed_count++;
1624 return;
1627 /* For each member j of delta (Sol(x)), add an edge from y to j and
1628 union Sol(y) into Sol(j) */
1629 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1631 unsigned HOST_WIDE_INT loff = c->lhs.offset;
1632 if (type_safe (j, &loff) && !(get_varinfo (j)->is_special_var))
1634 varinfo_t v;
1635 unsigned int t;
1636 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + loff;
1637 bitmap tmp;
1639 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1640 /* If the access is outside of the variable we can ignore it. */
1641 if (!v)
1642 continue;
1643 t = find (v->id);
1644 tmp = get_varinfo (t)->solution;
1646 if (set_union_with_increment (tmp, sol, 0))
1648 get_varinfo (t)->solution = tmp;
1649 if (t == rhs)
1650 sol = get_varinfo (rhs)->solution;
1651 if (!TEST_BIT (changed, t))
1653 SET_BIT (changed, t);
1654 changed_count++;
1661 /* Handle a non-simple (simple meaning requires no iteration),
1662 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1664 static void
1665 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
1667 if (c->lhs.type == DEREF)
1669 if (c->rhs.type == ADDRESSOF)
1671 gcc_unreachable();
1673 else
1675 /* *x = y */
1676 do_ds_constraint (c, delta);
1679 else if (c->rhs.type == DEREF)
1681 /* x = *y */
1682 if (!(get_varinfo (c->lhs.var)->is_special_var))
1683 do_sd_constraint (graph, c, delta);
1685 else
1687 bitmap tmp;
1688 bitmap solution;
1689 bool flag = false;
1691 gcc_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR);
1692 solution = get_varinfo (c->rhs.var)->solution;
1693 tmp = get_varinfo (c->lhs.var)->solution;
1695 flag = set_union_with_increment (tmp, solution, c->rhs.offset);
1697 if (flag)
1699 get_varinfo (c->lhs.var)->solution = tmp;
1700 if (!TEST_BIT (changed, c->lhs.var))
1702 SET_BIT (changed, c->lhs.var);
1703 changed_count++;
1709 /* Initialize and return a new SCC info structure. */
1711 static struct scc_info *
1712 init_scc_info (size_t size)
1714 struct scc_info *si = XNEW (struct scc_info);
1715 size_t i;
1717 si->current_index = 0;
1718 si->visited = sbitmap_alloc (size);
1719 sbitmap_zero (si->visited);
1720 si->deleted = sbitmap_alloc (size);
1721 sbitmap_zero (si->deleted);
1722 si->node_mapping = XNEWVEC (unsigned int, size);
1723 si->dfs = XCNEWVEC (unsigned int, size);
1725 for (i = 0; i < size; i++)
1726 si->node_mapping[i] = i;
1728 si->scc_stack = VEC_alloc (unsigned, heap, 1);
1729 return si;
1732 /* Free an SCC info structure pointed to by SI */
1734 static void
1735 free_scc_info (struct scc_info *si)
1737 sbitmap_free (si->visited);
1738 sbitmap_free (si->deleted);
1739 free (si->node_mapping);
1740 free (si->dfs);
1741 VEC_free (unsigned, heap, si->scc_stack);
1742 free (si);
1746 /* Find indirect cycles in GRAPH that occur, using strongly connected
1747 components, and note them in the indirect cycles map.
1749 This technique comes from Ben Hardekopf and Calvin Lin,
1750 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1751 Lines of Code", submitted to PLDI 2007. */
1753 static void
1754 find_indirect_cycles (constraint_graph_t graph)
1756 unsigned int i;
1757 unsigned int size = graph->size;
1758 struct scc_info *si = init_scc_info (size);
1760 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1761 if (!TEST_BIT (si->visited, i) && find (i) == i)
1762 scc_visit (graph, si, i);
1764 free_scc_info (si);
1767 /* Compute a topological ordering for GRAPH, and store the result in the
1768 topo_info structure TI. */
1770 static void
1771 compute_topo_order (constraint_graph_t graph,
1772 struct topo_info *ti)
1774 unsigned int i;
1775 unsigned int size = graph->size;
1777 for (i = 0; i != size; ++i)
1778 if (!TEST_BIT (ti->visited, i) && find (i) == i)
1779 topo_visit (graph, ti, i);
1782 /* Structure used to for hash value numbering of pointer equivalence
1783 classes. */
1785 typedef struct equiv_class_label
1787 unsigned int equivalence_class;
1788 bitmap labels;
1789 hashval_t hashcode;
1790 } *equiv_class_label_t;
1791 typedef const struct equiv_class_label *const_equiv_class_label_t;
1793 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1794 classes. */
1795 static htab_t pointer_equiv_class_table;
1797 /* A hashtable for mapping a bitmap of labels->location equivalence
1798 classes. */
1799 static htab_t location_equiv_class_table;
1801 /* Hash function for a equiv_class_label_t */
1803 static hashval_t
1804 equiv_class_label_hash (const void *p)
1806 const_equiv_class_label_t const ecl = (const_equiv_class_label_t) p;
1807 return ecl->hashcode;
1810 /* Equality function for two equiv_class_label_t's. */
1812 static int
1813 equiv_class_label_eq (const void *p1, const void *p2)
1815 const_equiv_class_label_t const eql1 = (const_equiv_class_label_t) p1;
1816 const_equiv_class_label_t const eql2 = (const_equiv_class_label_t) p2;
1817 return bitmap_equal_p (eql1->labels, eql2->labels);
1820 /* Lookup a equivalence class in TABLE by the bitmap of LABELS it
1821 contains. */
1823 static unsigned int
1824 equiv_class_lookup (htab_t table, bitmap labels)
1826 void **slot;
1827 struct equiv_class_label ecl;
1829 ecl.labels = labels;
1830 ecl.hashcode = bitmap_hash (labels);
1832 slot = htab_find_slot_with_hash (table, &ecl,
1833 ecl.hashcode, NO_INSERT);
1834 if (!slot)
1835 return 0;
1836 else
1837 return ((equiv_class_label_t) *slot)->equivalence_class;
1841 /* Add an equivalence class named EQUIVALENCE_CLASS with labels LABELS
1842 to TABLE. */
1844 static void
1845 equiv_class_add (htab_t table, unsigned int equivalence_class,
1846 bitmap labels)
1848 void **slot;
1849 equiv_class_label_t ecl = XNEW (struct equiv_class_label);
1851 ecl->labels = labels;
1852 ecl->equivalence_class = equivalence_class;
1853 ecl->hashcode = bitmap_hash (labels);
1855 slot = htab_find_slot_with_hash (table, ecl,
1856 ecl->hashcode, INSERT);
1857 gcc_assert (!*slot);
1858 *slot = (void *) ecl;
1861 /* Perform offline variable substitution.
1863 This is a worst case quadratic time way of identifying variables
1864 that must have equivalent points-to sets, including those caused by
1865 static cycles, and single entry subgraphs, in the constraint graph.
1867 The technique is described in "Exploiting Pointer and Location
1868 Equivalence to Optimize Pointer Analysis. In the 14th International
1869 Static Analysis Symposium (SAS), August 2007." It is known as the
1870 "HU" algorithm, and is equivalent to value numbering the collapsed
1871 constraint graph including evaluating unions.
1873 The general method of finding equivalence classes is as follows:
1874 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1875 Initialize all non-REF nodes to be direct nodes.
1876 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1877 variable}
1878 For each constraint containing the dereference, we also do the same
1879 thing.
1881 We then compute SCC's in the graph and unify nodes in the same SCC,
1882 including pts sets.
1884 For each non-collapsed node x:
1885 Visit all unvisited explicit incoming edges.
1886 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
1887 where y->x.
1888 Lookup the equivalence class for pts(x).
1889 If we found one, equivalence_class(x) = found class.
1890 Otherwise, equivalence_class(x) = new class, and new_class is
1891 added to the lookup table.
1893 All direct nodes with the same equivalence class can be replaced
1894 with a single representative node.
1895 All unlabeled nodes (label == 0) are not pointers and all edges
1896 involving them can be eliminated.
1897 We perform these optimizations during rewrite_constraints
1899 In addition to pointer equivalence class finding, we also perform
1900 location equivalence class finding. This is the set of variables
1901 that always appear together in points-to sets. We use this to
1902 compress the size of the points-to sets. */
1904 /* Current maximum pointer equivalence class id. */
1905 static int pointer_equiv_class;
1907 /* Current maximum location equivalence class id. */
1908 static int location_equiv_class;
1910 /* Recursive routine to find strongly connected components in GRAPH,
1911 and label it's nodes with DFS numbers. */
1913 static void
1914 condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1916 unsigned int i;
1917 bitmap_iterator bi;
1918 unsigned int my_dfs;
1920 gcc_assert (si->node_mapping[n] == n);
1921 SET_BIT (si->visited, n);
1922 si->dfs[n] = si->current_index ++;
1923 my_dfs = si->dfs[n];
1925 /* Visit all the successors. */
1926 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1928 unsigned int w = si->node_mapping[i];
1930 if (TEST_BIT (si->deleted, w))
1931 continue;
1933 if (!TEST_BIT (si->visited, w))
1934 condense_visit (graph, si, w);
1936 unsigned int t = si->node_mapping[w];
1937 unsigned int nnode = si->node_mapping[n];
1938 gcc_assert (nnode == n);
1940 if (si->dfs[t] < si->dfs[nnode])
1941 si->dfs[n] = si->dfs[t];
1945 /* Visit all the implicit predecessors. */
1946 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
1948 unsigned int w = si->node_mapping[i];
1950 if (TEST_BIT (si->deleted, w))
1951 continue;
1953 if (!TEST_BIT (si->visited, w))
1954 condense_visit (graph, si, w);
1956 unsigned int t = si->node_mapping[w];
1957 unsigned int nnode = si->node_mapping[n];
1958 gcc_assert (nnode == n);
1960 if (si->dfs[t] < si->dfs[nnode])
1961 si->dfs[n] = si->dfs[t];
1965 /* See if any components have been identified. */
1966 if (si->dfs[n] == my_dfs)
1968 while (VEC_length (unsigned, si->scc_stack) != 0
1969 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1971 unsigned int w = VEC_pop (unsigned, si->scc_stack);
1972 si->node_mapping[w] = n;
1974 if (!TEST_BIT (graph->direct_nodes, w))
1975 RESET_BIT (graph->direct_nodes, n);
1977 /* Unify our nodes. */
1978 if (graph->preds[w])
1980 if (!graph->preds[n])
1981 graph->preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
1982 bitmap_ior_into (graph->preds[n], graph->preds[w]);
1984 if (graph->implicit_preds[w])
1986 if (!graph->implicit_preds[n])
1987 graph->implicit_preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
1988 bitmap_ior_into (graph->implicit_preds[n],
1989 graph->implicit_preds[w]);
1991 if (graph->points_to[w])
1993 if (!graph->points_to[n])
1994 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
1995 bitmap_ior_into (graph->points_to[n],
1996 graph->points_to[w]);
1999 SET_BIT (si->deleted, n);
2001 else
2002 VEC_safe_push (unsigned, heap, si->scc_stack, n);
2005 /* Label pointer equivalences. */
2007 static void
2008 label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2010 unsigned int i;
2011 bitmap_iterator bi;
2012 SET_BIT (si->visited, n);
2014 if (!graph->points_to[n])
2015 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2017 /* Label and union our incoming edges's points to sets. */
2018 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2020 unsigned int w = si->node_mapping[i];
2021 if (!TEST_BIT (si->visited, w))
2022 label_visit (graph, si, w);
2024 /* Skip unused edges */
2025 if (w == n || graph->pointer_label[w] == 0)
2026 continue;
2028 if (graph->points_to[w])
2029 bitmap_ior_into(graph->points_to[n], graph->points_to[w]);
2031 /* Indirect nodes get fresh variables. */
2032 if (!TEST_BIT (graph->direct_nodes, n))
2033 bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
2035 if (!bitmap_empty_p (graph->points_to[n]))
2037 unsigned int label = equiv_class_lookup (pointer_equiv_class_table,
2038 graph->points_to[n]);
2039 if (!label)
2041 label = pointer_equiv_class++;
2042 equiv_class_add (pointer_equiv_class_table,
2043 label, graph->points_to[n]);
2045 graph->pointer_label[n] = label;
2049 /* Perform offline variable substitution, discovering equivalence
2050 classes, and eliminating non-pointer variables. */
2052 static struct scc_info *
2053 perform_var_substitution (constraint_graph_t graph)
2055 unsigned int i;
2056 unsigned int size = graph->size;
2057 struct scc_info *si = init_scc_info (size);
2059 bitmap_obstack_initialize (&iteration_obstack);
2060 pointer_equiv_class_table = htab_create (511, equiv_class_label_hash,
2061 equiv_class_label_eq, free);
2062 location_equiv_class_table = htab_create (511, equiv_class_label_hash,
2063 equiv_class_label_eq, free);
2064 pointer_equiv_class = 1;
2065 location_equiv_class = 1;
2067 /* Condense the nodes, which means to find SCC's, count incoming
2068 predecessors, and unite nodes in SCC's. */
2069 for (i = 0; i < FIRST_REF_NODE; i++)
2070 if (!TEST_BIT (si->visited, si->node_mapping[i]))
2071 condense_visit (graph, si, si->node_mapping[i]);
2073 sbitmap_zero (si->visited);
2074 /* Actually the label the nodes for pointer equivalences */
2075 for (i = 0; i < FIRST_REF_NODE; i++)
2076 if (!TEST_BIT (si->visited, si->node_mapping[i]))
2077 label_visit (graph, si, si->node_mapping[i]);
2079 /* Calculate location equivalence labels. */
2080 for (i = 0; i < FIRST_REF_NODE; i++)
2082 bitmap pointed_by;
2083 bitmap_iterator bi;
2084 unsigned int j;
2085 unsigned int label;
2087 if (!graph->pointed_by[i])
2088 continue;
2089 pointed_by = BITMAP_ALLOC (&iteration_obstack);
2091 /* Translate the pointed-by mapping for pointer equivalence
2092 labels. */
2093 EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
2095 bitmap_set_bit (pointed_by,
2096 graph->pointer_label[si->node_mapping[j]]);
2098 /* The original pointed_by is now dead. */
2099 BITMAP_FREE (graph->pointed_by[i]);
2101 /* Look up the location equivalence label if one exists, or make
2102 one otherwise. */
2103 label = equiv_class_lookup (location_equiv_class_table,
2104 pointed_by);
2105 if (label == 0)
2107 label = location_equiv_class++;
2108 equiv_class_add (location_equiv_class_table,
2109 label, pointed_by);
2111 else
2113 if (dump_file && (dump_flags & TDF_DETAILS))
2114 fprintf (dump_file, "Found location equivalence for node %s\n",
2115 get_varinfo (i)->name);
2116 BITMAP_FREE (pointed_by);
2118 graph->loc_label[i] = label;
2122 if (dump_file && (dump_flags & TDF_DETAILS))
2123 for (i = 0; i < FIRST_REF_NODE; i++)
2125 bool direct_node = TEST_BIT (graph->direct_nodes, i);
2126 fprintf (dump_file,
2127 "Equivalence classes for %s node id %d:%s are pointer: %d"
2128 ", location:%d\n",
2129 direct_node ? "Direct node" : "Indirect node", i,
2130 get_varinfo (i)->name,
2131 graph->pointer_label[si->node_mapping[i]],
2132 graph->loc_label[si->node_mapping[i]]);
2135 /* Quickly eliminate our non-pointer variables. */
2137 for (i = 0; i < FIRST_REF_NODE; i++)
2139 unsigned int node = si->node_mapping[i];
2141 if (graph->pointer_label[node] == 0)
2143 if (dump_file && (dump_flags & TDF_DETAILS))
2144 fprintf (dump_file,
2145 "%s is a non-pointer variable, eliminating edges.\n",
2146 get_varinfo (node)->name);
2147 stats.nonpointer_vars++;
2148 clear_edges_for_node (graph, node);
2152 return si;
2155 /* Free information that was only necessary for variable
2156 substitution. */
2158 static void
2159 free_var_substitution_info (struct scc_info *si)
2161 free_scc_info (si);
2162 free (graph->pointer_label);
2163 free (graph->loc_label);
2164 free (graph->pointed_by);
2165 free (graph->points_to);
2166 free (graph->eq_rep);
2167 sbitmap_free (graph->direct_nodes);
2168 htab_delete (pointer_equiv_class_table);
2169 htab_delete (location_equiv_class_table);
2170 bitmap_obstack_release (&iteration_obstack);
2173 /* Return an existing node that is equivalent to NODE, which has
2174 equivalence class LABEL, if one exists. Return NODE otherwise. */
2176 static unsigned int
2177 find_equivalent_node (constraint_graph_t graph,
2178 unsigned int node, unsigned int label)
2180 /* If the address version of this variable is unused, we can
2181 substitute it for anything else with the same label.
2182 Otherwise, we know the pointers are equivalent, but not the
2183 locations, and we can unite them later. */
2185 if (!bitmap_bit_p (graph->address_taken, node))
2187 gcc_assert (label < graph->size);
2189 if (graph->eq_rep[label] != -1)
2191 /* Unify the two variables since we know they are equivalent. */
2192 if (unite (graph->eq_rep[label], node))
2193 unify_nodes (graph, graph->eq_rep[label], node, false);
2194 return graph->eq_rep[label];
2196 else
2198 graph->eq_rep[label] = node;
2199 graph->pe_rep[label] = node;
2202 else
2204 gcc_assert (label < graph->size);
2205 graph->pe[node] = label;
2206 if (graph->pe_rep[label] == -1)
2207 graph->pe_rep[label] = node;
2210 return node;
2213 /* Unite pointer equivalent but not location equivalent nodes in
2214 GRAPH. This may only be performed once variable substitution is
2215 finished. */
2217 static void
2218 unite_pointer_equivalences (constraint_graph_t graph)
2220 unsigned int i;
2222 /* Go through the pointer equivalences and unite them to their
2223 representative, if they aren't already. */
2224 for (i = 0; i < FIRST_REF_NODE; i++)
2226 unsigned int label = graph->pe[i];
2227 if (label)
2229 int label_rep = graph->pe_rep[label];
2231 if (label_rep == -1)
2232 continue;
2234 label_rep = find (label_rep);
2235 if (label_rep >= 0 && unite (label_rep, find (i)))
2236 unify_nodes (graph, label_rep, i, false);
2241 /* Move complex constraints to the GRAPH nodes they belong to. */
2243 static void
2244 move_complex_constraints (constraint_graph_t graph)
2246 int i;
2247 constraint_t c;
2249 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
2251 if (c)
2253 struct constraint_expr lhs = c->lhs;
2254 struct constraint_expr rhs = c->rhs;
2256 if (lhs.type == DEREF)
2258 insert_into_complex (graph, lhs.var, c);
2260 else if (rhs.type == DEREF)
2262 if (!(get_varinfo (lhs.var)->is_special_var))
2263 insert_into_complex (graph, rhs.var, c);
2265 else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2266 && (lhs.offset != 0 || rhs.offset != 0))
2268 insert_into_complex (graph, rhs.var, c);
2275 /* Optimize and rewrite complex constraints while performing
2276 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2277 result of perform_variable_substitution. */
2279 static void
2280 rewrite_constraints (constraint_graph_t graph,
2281 struct scc_info *si)
2283 int i;
2284 unsigned int j;
2285 constraint_t c;
2287 for (j = 0; j < graph->size; j++)
2288 gcc_assert (find (j) == j);
2290 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
2292 struct constraint_expr lhs = c->lhs;
2293 struct constraint_expr rhs = c->rhs;
2294 unsigned int lhsvar = find (get_varinfo_fc (lhs.var)->id);
2295 unsigned int rhsvar = find (get_varinfo_fc (rhs.var)->id);
2296 unsigned int lhsnode, rhsnode;
2297 unsigned int lhslabel, rhslabel;
2299 lhsnode = si->node_mapping[lhsvar];
2300 rhsnode = si->node_mapping[rhsvar];
2301 lhslabel = graph->pointer_label[lhsnode];
2302 rhslabel = graph->pointer_label[rhsnode];
2304 /* See if it is really a non-pointer variable, and if so, ignore
2305 the constraint. */
2306 if (lhslabel == 0)
2308 if (dump_file && (dump_flags & TDF_DETAILS))
2311 fprintf (dump_file, "%s is a non-pointer variable,"
2312 "ignoring constraint:",
2313 get_varinfo (lhs.var)->name);
2314 dump_constraint (dump_file, c);
2316 VEC_replace (constraint_t, constraints, i, NULL);
2317 continue;
2320 if (rhslabel == 0)
2322 if (dump_file && (dump_flags & TDF_DETAILS))
2325 fprintf (dump_file, "%s is a non-pointer variable,"
2326 "ignoring constraint:",
2327 get_varinfo (rhs.var)->name);
2328 dump_constraint (dump_file, c);
2330 VEC_replace (constraint_t, constraints, i, NULL);
2331 continue;
2334 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2335 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2336 c->lhs.var = lhsvar;
2337 c->rhs.var = rhsvar;
2342 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2343 part of an SCC, false otherwise. */
2345 static bool
2346 eliminate_indirect_cycles (unsigned int node)
2348 if (graph->indirect_cycles[node] != -1
2349 && !bitmap_empty_p (get_varinfo (node)->solution))
2351 unsigned int i;
2352 VEC(unsigned,heap) *queue = NULL;
2353 int queuepos;
2354 unsigned int to = find (graph->indirect_cycles[node]);
2355 bitmap_iterator bi;
2357 /* We can't touch the solution set and call unify_nodes
2358 at the same time, because unify_nodes is going to do
2359 bitmap unions into it. */
2361 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2363 if (find (i) == i && i != to)
2365 if (unite (to, i))
2366 VEC_safe_push (unsigned, heap, queue, i);
2370 for (queuepos = 0;
2371 VEC_iterate (unsigned, queue, queuepos, i);
2372 queuepos++)
2374 unify_nodes (graph, to, i, true);
2376 VEC_free (unsigned, heap, queue);
2377 return true;
2379 return false;
2382 /* Solve the constraint graph GRAPH using our worklist solver.
2383 This is based on the PW* family of solvers from the "Efficient Field
2384 Sensitive Pointer Analysis for C" paper.
2385 It works by iterating over all the graph nodes, processing the complex
2386 constraints and propagating the copy constraints, until everything stops
2387 changed. This corresponds to steps 6-8 in the solving list given above. */
2389 static void
2390 solve_graph (constraint_graph_t graph)
2392 unsigned int size = graph->size;
2393 unsigned int i;
2394 bitmap pts;
2396 changed_count = 0;
2397 changed = sbitmap_alloc (size);
2398 sbitmap_zero (changed);
2400 /* Mark all initial non-collapsed nodes as changed. */
2401 for (i = 0; i < size; i++)
2403 varinfo_t ivi = get_varinfo (i);
2404 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2405 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2406 || VEC_length (constraint_t, graph->complex[i]) > 0))
2408 SET_BIT (changed, i);
2409 changed_count++;
2413 /* Allocate a bitmap to be used to store the changed bits. */
2414 pts = BITMAP_ALLOC (&pta_obstack);
2416 while (changed_count > 0)
2418 unsigned int i;
2419 struct topo_info *ti = init_topo_info ();
2420 stats.iterations++;
2422 bitmap_obstack_initialize (&iteration_obstack);
2424 compute_topo_order (graph, ti);
2426 while (VEC_length (unsigned, ti->topo_order) != 0)
2429 i = VEC_pop (unsigned, ti->topo_order);
2431 /* If this variable is not a representative, skip it. */
2432 if (find (i) != i)
2433 continue;
2435 /* In certain indirect cycle cases, we may merge this
2436 variable to another. */
2437 if (eliminate_indirect_cycles (i) && find (i) != i)
2438 continue;
2440 /* If the node has changed, we need to process the
2441 complex constraints and outgoing edges again. */
2442 if (TEST_BIT (changed, i))
2444 unsigned int j;
2445 constraint_t c;
2446 bitmap solution;
2447 VEC(constraint_t,heap) *complex = graph->complex[i];
2448 bool solution_empty;
2450 RESET_BIT (changed, i);
2451 changed_count--;
2453 /* Compute the changed set of solution bits. */
2454 bitmap_and_compl (pts, get_varinfo (i)->solution,
2455 get_varinfo (i)->oldsolution);
2457 if (bitmap_empty_p (pts))
2458 continue;
2460 bitmap_ior_into (get_varinfo (i)->oldsolution, pts);
2462 solution = get_varinfo (i)->solution;
2463 solution_empty = bitmap_empty_p (solution);
2465 /* Process the complex constraints */
2466 for (j = 0; VEC_iterate (constraint_t, complex, j, c); j++)
2468 /* XXX: This is going to unsort the constraints in
2469 some cases, which will occasionally add duplicate
2470 constraints during unification. This does not
2471 affect correctness. */
2472 c->lhs.var = find (c->lhs.var);
2473 c->rhs.var = find (c->rhs.var);
2475 /* The only complex constraint that can change our
2476 solution to non-empty, given an empty solution,
2477 is a constraint where the lhs side is receiving
2478 some set from elsewhere. */
2479 if (!solution_empty || c->lhs.type != DEREF)
2480 do_complex_constraint (graph, c, pts);
2483 solution_empty = bitmap_empty_p (solution);
2485 if (!solution_empty
2486 /* Do not propagate the ESCAPED/CALLUSED solutions. */
2487 && i != escaped_id
2488 && i != callused_id)
2490 bitmap_iterator bi;
2492 /* Propagate solution to all successors. */
2493 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2494 0, j, bi)
2496 bitmap tmp;
2497 bool flag;
2499 unsigned int to = find (j);
2500 tmp = get_varinfo (to)->solution;
2501 flag = false;
2503 /* Don't try to propagate to ourselves. */
2504 if (to == i)
2505 continue;
2507 flag = set_union_with_increment (tmp, pts, 0);
2509 if (flag)
2511 get_varinfo (to)->solution = tmp;
2512 if (!TEST_BIT (changed, to))
2514 SET_BIT (changed, to);
2515 changed_count++;
2522 free_topo_info (ti);
2523 bitmap_obstack_release (&iteration_obstack);
2526 BITMAP_FREE (pts);
2527 sbitmap_free (changed);
2528 bitmap_obstack_release (&oldpta_obstack);
2531 /* Map from trees to variable infos. */
2532 static struct pointer_map_t *vi_for_tree;
2535 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2537 static void
2538 insert_vi_for_tree (tree t, varinfo_t vi)
2540 void **slot = pointer_map_insert (vi_for_tree, t);
2541 gcc_assert (vi);
2542 gcc_assert (*slot == NULL);
2543 *slot = vi;
2546 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2547 exist in the map, return NULL, otherwise, return the varinfo we found. */
2549 static varinfo_t
2550 lookup_vi_for_tree (tree t)
2552 void **slot = pointer_map_contains (vi_for_tree, t);
2553 if (slot == NULL)
2554 return NULL;
2556 return (varinfo_t) *slot;
2559 /* Return a printable name for DECL */
2561 static const char *
2562 alias_get_name (tree decl)
2564 const char *res = get_name (decl);
2565 char *temp;
2566 int num_printed = 0;
2568 if (res != NULL)
2569 return res;
2571 res = "NULL";
2572 if (!dump_file)
2573 return res;
2575 if (TREE_CODE (decl) == SSA_NAME)
2577 num_printed = asprintf (&temp, "%s_%u",
2578 alias_get_name (SSA_NAME_VAR (decl)),
2579 SSA_NAME_VERSION (decl));
2581 else if (DECL_P (decl))
2583 num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
2585 if (num_printed > 0)
2587 res = ggc_strdup (temp);
2588 free (temp);
2590 return res;
2593 /* Find the variable id for tree T in the map.
2594 If T doesn't exist in the map, create an entry for it and return it. */
2596 static varinfo_t
2597 get_vi_for_tree (tree t)
2599 void **slot = pointer_map_contains (vi_for_tree, t);
2600 if (slot == NULL)
2601 return get_varinfo (create_variable_info_for (t, alias_get_name (t)));
2603 return (varinfo_t) *slot;
2606 /* Get a constraint expression for a new temporary variable. */
2608 static struct constraint_expr
2609 get_constraint_exp_for_temp (tree t)
2611 struct constraint_expr cexpr;
2613 gcc_assert (SSA_VAR_P (t));
2615 cexpr.type = SCALAR;
2616 cexpr.var = get_vi_for_tree (t)->id;
2617 cexpr.offset = 0;
2619 return cexpr;
2622 /* Get a constraint expression vector from an SSA_VAR_P node.
2623 If address_p is true, the result will be taken its address of. */
2625 static void
2626 get_constraint_for_ssa_var (tree t, VEC(ce_s, heap) **results, bool address_p)
2628 struct constraint_expr cexpr;
2629 varinfo_t vi;
2631 /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
2632 gcc_assert (SSA_VAR_P (t) || DECL_P (t));
2634 /* For parameters, get at the points-to set for the actual parm
2635 decl. */
2636 if (TREE_CODE (t) == SSA_NAME
2637 && TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2638 && SSA_NAME_IS_DEFAULT_DEF (t))
2640 get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
2641 return;
2644 vi = get_vi_for_tree (t);
2645 cexpr.var = vi->id;
2646 cexpr.type = SCALAR;
2647 cexpr.offset = 0;
2648 /* If we determine the result is "anything", and we know this is readonly,
2649 say it points to readonly memory instead. */
2650 if (cexpr.var == anything_id && TREE_READONLY (t))
2652 gcc_unreachable ();
2653 cexpr.type = ADDRESSOF;
2654 cexpr.var = readonly_id;
2657 /* If we are not taking the address of the constraint expr, add all
2658 sub-fiels of the variable as well. */
2659 if (!address_p)
2661 for (; vi; vi = vi->next)
2663 cexpr.var = vi->id;
2664 VEC_safe_push (ce_s, heap, *results, &cexpr);
2666 return;
2669 VEC_safe_push (ce_s, heap, *results, &cexpr);
2672 /* Process constraint T, performing various simplifications and then
2673 adding it to our list of overall constraints. */
2675 static void
2676 process_constraint (constraint_t t)
2678 struct constraint_expr rhs = t->rhs;
2679 struct constraint_expr lhs = t->lhs;
2681 gcc_assert (rhs.var < VEC_length (varinfo_t, varmap));
2682 gcc_assert (lhs.var < VEC_length (varinfo_t, varmap));
2684 /* ANYTHING == ANYTHING is pointless. */
2685 if (lhs.var == anything_id && rhs.var == anything_id)
2686 return;
2688 /* If we have &ANYTHING = something, convert to SOMETHING = &ANYTHING) */
2689 else if (lhs.var == anything_id && lhs.type == ADDRESSOF)
2691 rhs = t->lhs;
2692 t->lhs = t->rhs;
2693 t->rhs = rhs;
2694 process_constraint (t);
2696 /* This can happen in our IR with things like n->a = *p */
2697 else if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
2699 /* Split into tmp = *rhs, *lhs = tmp */
2700 tree rhsdecl = get_varinfo (rhs.var)->decl;
2701 tree pointertype = TREE_TYPE (rhsdecl);
2702 tree pointedtotype = TREE_TYPE (pointertype);
2703 tree tmpvar = create_tmp_var_raw (pointedtotype, "doubledereftmp");
2704 struct constraint_expr tmplhs = get_constraint_exp_for_temp (tmpvar);
2706 process_constraint (new_constraint (tmplhs, rhs));
2707 process_constraint (new_constraint (lhs, tmplhs));
2709 else if (rhs.type == ADDRESSOF && lhs.type == DEREF)
2711 /* Split into tmp = &rhs, *lhs = tmp */
2712 tree rhsdecl = get_varinfo (rhs.var)->decl;
2713 tree pointertype = TREE_TYPE (rhsdecl);
2714 tree tmpvar = create_tmp_var_raw (pointertype, "derefaddrtmp");
2715 struct constraint_expr tmplhs = get_constraint_exp_for_temp (tmpvar);
2717 process_constraint (new_constraint (tmplhs, rhs));
2718 process_constraint (new_constraint (lhs, tmplhs));
2720 else
2722 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
2723 VEC_safe_push (constraint_t, heap, constraints, t);
2727 /* Return true if T is a variable of a type that could contain
2728 pointers. */
2730 static bool
2731 could_have_pointers (tree t)
2733 tree type = TREE_TYPE (t);
2735 if (POINTER_TYPE_P (type)
2736 || AGGREGATE_TYPE_P (type))
2737 return true;
2739 return false;
2742 /* Return the position, in bits, of FIELD_DECL from the beginning of its
2743 structure. */
2745 static HOST_WIDE_INT
2746 bitpos_of_field (const tree fdecl)
2749 if (!host_integerp (DECL_FIELD_OFFSET (fdecl), 0)
2750 || !host_integerp (DECL_FIELD_BIT_OFFSET (fdecl), 0))
2751 return -1;
2753 return (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (fdecl)) * 8
2754 + TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (fdecl)));
2758 /* Get constraint expressions for offsetting PTR by OFFSET. Stores the
2759 resulting constraint expressions in *RESULTS. */
2761 static void
2762 get_constraint_for_ptr_offset (tree ptr, tree offset,
2763 VEC (ce_s, heap) **results)
2765 struct constraint_expr *c;
2766 unsigned int j, n;
2767 unsigned HOST_WIDE_INT rhsunitoffset, rhsoffset;
2769 /* If we do not do field-sensitive PTA adding offsets to pointers
2770 does not change the points-to solution. */
2771 if (!use_field_sensitive)
2773 get_constraint_for (ptr, results);
2774 return;
2777 /* If the offset is not a non-negative integer constant that fits
2778 in a HOST_WIDE_INT, we have to fall back to a conservative
2779 solution which includes all sub-fields of all pointed-to
2780 variables of ptr.
2781 ??? As we do not have the ability to express this, fall back
2782 to anything. */
2783 if (!host_integerp (offset, 1))
2785 struct constraint_expr temp;
2786 temp.var = anything_id;
2787 temp.type = SCALAR;
2788 temp.offset = 0;
2789 VEC_safe_push (ce_s, heap, *results, &temp);
2790 return;
2793 /* Make sure the bit-offset also fits. */
2794 rhsunitoffset = TREE_INT_CST_LOW (offset);
2795 rhsoffset = rhsunitoffset * BITS_PER_UNIT;
2796 if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
2798 struct constraint_expr temp;
2799 temp.var = anything_id;
2800 temp.type = SCALAR;
2801 temp.offset = 0;
2802 VEC_safe_push (ce_s, heap, *results, &temp);
2803 return;
2806 get_constraint_for (ptr, results);
2807 if (rhsoffset == 0)
2808 return;
2810 /* As we are eventually appending to the solution do not use
2811 VEC_iterate here. */
2812 n = VEC_length (ce_s, *results);
2813 for (j = 0; j < n; j++)
2815 varinfo_t curr;
2816 c = VEC_index (ce_s, *results, j);
2817 curr = get_varinfo (c->var);
2819 if (c->type == ADDRESSOF
2820 && !curr->is_full_var)
2822 varinfo_t temp, curr = get_varinfo (c->var);
2824 /* Search the sub-field which overlaps with the
2825 pointed-to offset. As we deal with positive offsets
2826 only, we can start the search from the current variable. */
2827 temp = first_vi_for_offset (curr, curr->offset + rhsoffset);
2829 /* If the result is outside of the variable we have to provide
2830 a conservative result, as the variable is still reachable
2831 from the resulting pointer (even though it technically
2832 cannot point to anything). The last sub-field is such
2833 a conservative result.
2834 ??? If we always had a sub-field for &object + 1 then
2835 we could represent this in a more precise way. */
2836 if (temp == NULL)
2838 temp = curr;
2839 while (temp->next != NULL)
2840 temp = temp->next;
2841 continue;
2844 /* If the found variable is not exactly at the pointed to
2845 result, we have to include the next variable in the
2846 solution as well. Otherwise two increments by offset / 2
2847 do not result in the same or a conservative superset
2848 solution. */
2849 if (temp->offset != curr->offset + rhsoffset
2850 && temp->next != NULL)
2852 struct constraint_expr c2;
2853 c2.var = temp->next->id;
2854 c2.type = ADDRESSOF;
2855 c2.offset = 0;
2856 VEC_safe_push (ce_s, heap, *results, &c2);
2858 c->var = temp->id;
2859 c->offset = 0;
2861 else if (c->type == ADDRESSOF
2862 /* If this varinfo represents a full variable just use it. */
2863 && curr->is_full_var)
2864 c->offset = 0;
2865 else
2866 c->offset = rhsoffset;
2871 /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
2872 If address_p is true the result will be taken its address of. */
2874 static void
2875 get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results,
2876 bool address_p)
2878 tree orig_t = t;
2879 HOST_WIDE_INT bitsize = -1;
2880 HOST_WIDE_INT bitmaxsize = -1;
2881 HOST_WIDE_INT bitpos;
2882 tree forzero;
2883 struct constraint_expr *result;
2885 /* Some people like to do cute things like take the address of
2886 &0->a.b */
2887 forzero = t;
2888 while (!SSA_VAR_P (forzero) && !CONSTANT_CLASS_P (forzero))
2889 forzero = TREE_OPERAND (forzero, 0);
2891 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
2893 struct constraint_expr temp;
2895 temp.offset = 0;
2896 temp.var = integer_id;
2897 temp.type = SCALAR;
2898 VEC_safe_push (ce_s, heap, *results, &temp);
2899 return;
2902 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
2904 /* Pretend to take the address of the base, we'll take care of
2905 adding the required subset of sub-fields below. */
2906 get_constraint_for_1 (t, results, true);
2907 gcc_assert (VEC_length (ce_s, *results) == 1);
2908 result = VEC_last (ce_s, *results);
2910 /* This can also happen due to weird offsetof type macros. */
2911 if (TREE_CODE (t) != ADDR_EXPR && result->type == ADDRESSOF)
2912 result->type = SCALAR;
2914 if (result->type == SCALAR
2915 && get_varinfo (result->var)->is_full_var)
2916 /* For single-field vars do not bother about the offset. */
2917 result->offset = 0;
2918 else if (result->type == SCALAR)
2920 /* In languages like C, you can access one past the end of an
2921 array. You aren't allowed to dereference it, so we can
2922 ignore this constraint. When we handle pointer subtraction,
2923 we may have to do something cute here. */
2925 if ((unsigned HOST_WIDE_INT)bitpos < get_varinfo (result->var)->fullsize
2926 && bitmaxsize != 0)
2928 /* It's also not true that the constraint will actually start at the
2929 right offset, it may start in some padding. We only care about
2930 setting the constraint to the first actual field it touches, so
2931 walk to find it. */
2932 struct constraint_expr cexpr = *result;
2933 varinfo_t curr;
2934 VEC_pop (ce_s, *results);
2935 cexpr.offset = 0;
2936 for (curr = get_varinfo (cexpr.var); curr; curr = curr->next)
2938 if (ranges_overlap_p (curr->offset, curr->size,
2939 bitpos, bitmaxsize))
2941 cexpr.var = curr->id;
2942 VEC_safe_push (ce_s, heap, *results, &cexpr);
2943 if (address_p)
2944 break;
2947 /* If we are going to take the address of this field then
2948 to be able to compute reachability correctly add at least
2949 the last field of the variable. */
2950 if (address_p
2951 && VEC_length (ce_s, *results) == 0)
2953 curr = get_varinfo (cexpr.var);
2954 while (curr->next != NULL)
2955 curr = curr->next;
2956 cexpr.var = curr->id;
2957 VEC_safe_push (ce_s, heap, *results, &cexpr);
2959 else
2960 /* Assert that we found *some* field there. The user couldn't be
2961 accessing *only* padding. */
2962 /* Still the user could access one past the end of an array
2963 embedded in a struct resulting in accessing *only* padding. */
2964 gcc_assert (VEC_length (ce_s, *results) >= 1
2965 || ref_contains_array_ref (orig_t));
2967 else if (bitmaxsize == 0)
2969 if (dump_file && (dump_flags & TDF_DETAILS))
2970 fprintf (dump_file, "Access to zero-sized part of variable,"
2971 "ignoring\n");
2973 else
2974 if (dump_file && (dump_flags & TDF_DETAILS))
2975 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
2977 else if (bitmaxsize == -1)
2979 /* We can't handle DEREF constraints with unknown size, we'll
2980 get the wrong answer. Punt and return anything. */
2981 result->var = anything_id;
2982 result->offset = 0;
2984 else
2985 result->offset = bitpos;
2989 /* Dereference the constraint expression CONS, and return the result.
2990 DEREF (ADDRESSOF) = SCALAR
2991 DEREF (SCALAR) = DEREF
2992 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
2993 This is needed so that we can handle dereferencing DEREF constraints. */
2995 static void
2996 do_deref (VEC (ce_s, heap) **constraints)
2998 struct constraint_expr *c;
2999 unsigned int i = 0;
3001 for (i = 0; VEC_iterate (ce_s, *constraints, i, c); i++)
3003 if (c->type == SCALAR)
3004 c->type = DEREF;
3005 else if (c->type == ADDRESSOF)
3006 c->type = SCALAR;
3007 else if (c->type == DEREF)
3009 tree tmpvar = create_tmp_var_raw (ptr_type_node, "dereftmp");
3010 struct constraint_expr tmplhs = get_constraint_exp_for_temp (tmpvar);
3011 process_constraint (new_constraint (tmplhs, *c));
3012 c->var = tmplhs.var;
3014 else
3015 gcc_unreachable ();
3019 /* Given a tree T, return the constraint expression for it. */
3021 static void
3022 get_constraint_for_1 (tree t, VEC (ce_s, heap) **results, bool address_p)
3024 struct constraint_expr temp;
3026 /* x = integer is all glommed to a single variable, which doesn't
3027 point to anything by itself. That is, of course, unless it is an
3028 integer constant being treated as a pointer, in which case, we
3029 will return that this is really the addressof anything. This
3030 happens below, since it will fall into the default case. The only
3031 case we know something about an integer treated like a pointer is
3032 when it is the NULL pointer, and then we just say it points to
3033 NULL. */
3034 if (TREE_CODE (t) == INTEGER_CST
3035 && integer_zerop (t))
3037 temp.var = nothing_id;
3038 temp.type = ADDRESSOF;
3039 temp.offset = 0;
3040 VEC_safe_push (ce_s, heap, *results, &temp);
3041 return;
3044 /* String constants are read-only. */
3045 if (TREE_CODE (t) == STRING_CST)
3047 temp.var = readonly_id;
3048 temp.type = SCALAR;
3049 temp.offset = 0;
3050 VEC_safe_push (ce_s, heap, *results, &temp);
3051 return;
3054 switch (TREE_CODE_CLASS (TREE_CODE (t)))
3056 case tcc_expression:
3058 switch (TREE_CODE (t))
3060 case ADDR_EXPR:
3062 struct constraint_expr *c;
3063 unsigned int i;
3064 tree exp = TREE_OPERAND (t, 0);
3066 get_constraint_for_1 (exp, results, true);
3068 for (i = 0; VEC_iterate (ce_s, *results, i, c); i++)
3070 if (c->type == DEREF)
3071 c->type = SCALAR;
3072 else
3073 c->type = ADDRESSOF;
3075 return;
3077 break;
3078 default:;
3080 break;
3082 case tcc_reference:
3084 switch (TREE_CODE (t))
3086 case INDIRECT_REF:
3088 get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p);
3089 do_deref (results);
3090 return;
3092 case ARRAY_REF:
3093 case ARRAY_RANGE_REF:
3094 case COMPONENT_REF:
3095 get_constraint_for_component_ref (t, results, address_p);
3096 return;
3097 default:;
3099 break;
3101 case tcc_exceptional:
3103 switch (TREE_CODE (t))
3105 case SSA_NAME:
3107 get_constraint_for_ssa_var (t, results, address_p);
3108 return;
3110 default:;
3112 break;
3114 case tcc_declaration:
3116 get_constraint_for_ssa_var (t, results, address_p);
3117 return;
3119 default:;
3122 /* The default fallback is a constraint from anything. */
3123 temp.type = ADDRESSOF;
3124 temp.var = anything_id;
3125 temp.offset = 0;
3126 VEC_safe_push (ce_s, heap, *results, &temp);
3129 /* Given a gimple tree T, return the constraint expression vector for it. */
3131 static void
3132 get_constraint_for (tree t, VEC (ce_s, heap) **results)
3134 gcc_assert (VEC_length (ce_s, *results) == 0);
3136 get_constraint_for_1 (t, results, false);
3139 /* Handle the structure copy case where we have a simple structure copy
3140 between LHS and RHS that is of SIZE (in bits)
3142 For each field of the lhs variable (lhsfield)
3143 For each field of the rhs variable at lhsfield.offset (rhsfield)
3144 add the constraint lhsfield = rhsfield
3146 If we fail due to some kind of type unsafety or other thing we
3147 can't handle, return false. We expect the caller to collapse the
3148 variable in that case. */
3150 static bool
3151 do_simple_structure_copy (const struct constraint_expr lhs,
3152 const struct constraint_expr rhs,
3153 const unsigned HOST_WIDE_INT size)
3155 varinfo_t p = get_varinfo (lhs.var);
3156 unsigned HOST_WIDE_INT pstart, last;
3157 pstart = p->offset;
3158 last = p->offset + size;
3159 for (; p && p->offset < last; p = p->next)
3161 varinfo_t q;
3162 struct constraint_expr templhs = lhs;
3163 struct constraint_expr temprhs = rhs;
3164 unsigned HOST_WIDE_INT fieldoffset;
3166 templhs.var = p->id;
3167 q = get_varinfo (temprhs.var);
3168 fieldoffset = p->offset - pstart;
3169 q = first_vi_for_offset (q, q->offset + fieldoffset);
3170 if (!q)
3171 return false;
3172 temprhs.var = q->id;
3173 process_constraint (new_constraint (templhs, temprhs));
3175 return true;
3179 /* Handle the structure copy case where we have a structure copy between a
3180 aggregate on the LHS and a dereference of a pointer on the RHS
3181 that is of SIZE (in bits)
3183 For each field of the lhs variable (lhsfield)
3184 rhs.offset = lhsfield->offset
3185 add the constraint lhsfield = rhs
3188 static void
3189 do_rhs_deref_structure_copy (const struct constraint_expr lhs,
3190 const struct constraint_expr rhs,
3191 const unsigned HOST_WIDE_INT size)
3193 varinfo_t p = get_varinfo (lhs.var);
3194 unsigned HOST_WIDE_INT pstart,last;
3195 pstart = p->offset;
3196 last = p->offset + size;
3198 for (; p && p->offset < last; p = p->next)
3200 varinfo_t q;
3201 struct constraint_expr templhs = lhs;
3202 struct constraint_expr temprhs = rhs;
3203 unsigned HOST_WIDE_INT fieldoffset;
3206 if (templhs.type == SCALAR)
3207 templhs.var = p->id;
3208 else
3209 templhs.offset = p->offset;
3211 q = get_varinfo (temprhs.var);
3212 fieldoffset = p->offset - pstart;
3213 temprhs.offset += fieldoffset;
3214 process_constraint (new_constraint (templhs, temprhs));
3218 /* Handle the structure copy case where we have a structure copy
3219 between an aggregate on the RHS and a dereference of a pointer on
3220 the LHS that is of SIZE (in bits)
3222 For each field of the rhs variable (rhsfield)
3223 lhs.offset = rhsfield->offset
3224 add the constraint lhs = rhsfield
3227 static void
3228 do_lhs_deref_structure_copy (const struct constraint_expr lhs,
3229 const struct constraint_expr rhs,
3230 const unsigned HOST_WIDE_INT size)
3232 varinfo_t p = get_varinfo (rhs.var);
3233 unsigned HOST_WIDE_INT pstart,last;
3234 pstart = p->offset;
3235 last = p->offset + size;
3237 for (; p && p->offset < last; p = p->next)
3239 varinfo_t q;
3240 struct constraint_expr templhs = lhs;
3241 struct constraint_expr temprhs = rhs;
3242 unsigned HOST_WIDE_INT fieldoffset;
3245 if (temprhs.type == SCALAR)
3246 temprhs.var = p->id;
3247 else
3248 temprhs.offset = p->offset;
3250 q = get_varinfo (templhs.var);
3251 fieldoffset = p->offset - pstart;
3252 templhs.offset += fieldoffset;
3253 process_constraint (new_constraint (templhs, temprhs));
3257 /* Sometimes, frontends like to give us bad type information. This
3258 function will collapse all the fields from VAR to the end of VAR,
3259 into VAR, so that we treat those fields as a single variable.
3260 We return the variable they were collapsed into. */
3262 static unsigned int
3263 collapse_rest_of_var (unsigned int var)
3265 varinfo_t currvar = get_varinfo (var);
3266 varinfo_t field;
3268 for (field = currvar->next; field; field = field->next)
3270 if (dump_file)
3271 fprintf (dump_file, "Type safety: Collapsing var %s into %s\n",
3272 field->name, currvar->name);
3274 gcc_assert (field->collapsed_to == 0);
3275 field->collapsed_to = currvar->id;
3278 currvar->next = NULL;
3279 currvar->size = currvar->fullsize - currvar->offset;
3281 return currvar->id;
3284 /* Handle aggregate copies by expanding into copies of the respective
3285 fields of the structures. */
3287 static void
3288 do_structure_copy (tree lhsop, tree rhsop)
3290 struct constraint_expr lhs, rhs, tmp;
3291 VEC (ce_s, heap) *lhsc = NULL, *rhsc = NULL;
3292 varinfo_t p;
3293 unsigned HOST_WIDE_INT lhssize;
3294 unsigned HOST_WIDE_INT rhssize;
3296 /* Pretend we are taking the address of the constraint exprs.
3297 We deal with walking the sub-fields ourselves. */
3298 get_constraint_for_1 (lhsop, &lhsc, true);
3299 get_constraint_for_1 (rhsop, &rhsc, true);
3300 gcc_assert (VEC_length (ce_s, lhsc) == 1);
3301 gcc_assert (VEC_length (ce_s, rhsc) == 1);
3302 lhs = *(VEC_last (ce_s, lhsc));
3303 rhs = *(VEC_last (ce_s, rhsc));
3305 VEC_free (ce_s, heap, lhsc);
3306 VEC_free (ce_s, heap, rhsc);
3308 /* If we have special var = x, swap it around. */
3309 if (lhs.var <= integer_id && !(get_varinfo (rhs.var)->is_special_var))
3311 tmp = lhs;
3312 lhs = rhs;
3313 rhs = tmp;
3316 /* This is fairly conservative for the RHS == ADDRESSOF case, in that it's
3317 possible it's something we could handle. However, most cases falling
3318 into this are dealing with transparent unions, which are slightly
3319 weird. */
3320 if (rhs.type == ADDRESSOF && !(get_varinfo (rhs.var)->is_special_var))
3322 rhs.type = ADDRESSOF;
3323 rhs.var = anything_id;
3326 /* If the RHS is a special var, or an addressof, set all the LHS fields to
3327 that special var. */
3328 if (rhs.var <= integer_id)
3330 for (p = get_varinfo (lhs.var); p; p = p->next)
3332 struct constraint_expr templhs = lhs;
3333 struct constraint_expr temprhs = rhs;
3335 if (templhs.type == SCALAR )
3336 templhs.var = p->id;
3337 else
3338 templhs.offset += p->offset;
3339 process_constraint (new_constraint (templhs, temprhs));
3342 else
3344 tree rhstype = TREE_TYPE (rhsop);
3345 tree lhstype = TREE_TYPE (lhsop);
3346 tree rhstypesize;
3347 tree lhstypesize;
3349 lhstypesize = DECL_P (lhsop) ? DECL_SIZE (lhsop) : TYPE_SIZE (lhstype);
3350 rhstypesize = DECL_P (rhsop) ? DECL_SIZE (rhsop) : TYPE_SIZE (rhstype);
3352 /* If we have a variably sized types on the rhs or lhs, and a deref
3353 constraint, add the constraint, lhsconstraint = &ANYTHING.
3354 This is conservatively correct because either the lhs is an unknown
3355 sized var (if the constraint is SCALAR), or the lhs is a DEREF
3356 constraint, and every variable it can point to must be unknown sized
3357 anyway, so we don't need to worry about fields at all. */
3358 if ((rhs.type == DEREF && TREE_CODE (rhstypesize) != INTEGER_CST)
3359 || (lhs.type == DEREF && TREE_CODE (lhstypesize) != INTEGER_CST))
3361 rhs.var = anything_id;
3362 rhs.type = ADDRESSOF;
3363 rhs.offset = 0;
3364 process_constraint (new_constraint (lhs, rhs));
3365 return;
3368 /* The size only really matters insofar as we don't set more or less of
3369 the variable. If we hit an unknown size var, the size should be the
3370 whole darn thing. */
3371 if (get_varinfo (rhs.var)->is_unknown_size_var)
3372 rhssize = ~0;
3373 else
3374 rhssize = TREE_INT_CST_LOW (rhstypesize);
3376 if (get_varinfo (lhs.var)->is_unknown_size_var)
3377 lhssize = ~0;
3378 else
3379 lhssize = TREE_INT_CST_LOW (lhstypesize);
3382 if (rhs.type == SCALAR && lhs.type == SCALAR)
3384 if (!do_simple_structure_copy (lhs, rhs, MIN (lhssize, rhssize)))
3386 lhs.var = collapse_rest_of_var (lhs.var);
3387 rhs.var = collapse_rest_of_var (rhs.var);
3388 lhs.offset = 0;
3389 rhs.offset = 0;
3390 lhs.type = SCALAR;
3391 rhs.type = SCALAR;
3392 process_constraint (new_constraint (lhs, rhs));
3395 else if (lhs.type != DEREF && rhs.type == DEREF)
3396 do_rhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
3397 else if (lhs.type == DEREF && rhs.type != DEREF)
3398 do_lhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
3399 else
3401 tree pointedtotype = lhstype;
3402 tree tmpvar;
3404 gcc_assert (rhs.type == DEREF && lhs.type == DEREF);
3405 tmpvar = create_tmp_var_raw (pointedtotype, "structcopydereftmp");
3406 do_structure_copy (tmpvar, rhsop);
3407 do_structure_copy (lhsop, tmpvar);
3412 /* Create a constraint ID = OP. */
3414 static void
3415 make_constraint_to (unsigned id, tree op)
3417 VEC(ce_s, heap) *rhsc = NULL;
3418 struct constraint_expr *c;
3419 struct constraint_expr includes;
3420 unsigned int j;
3422 includes.var = id;
3423 includes.offset = 0;
3424 includes.type = SCALAR;
3426 get_constraint_for (op, &rhsc);
3427 for (j = 0; VEC_iterate (ce_s, rhsc, j, c); j++)
3428 process_constraint (new_constraint (includes, *c));
3429 VEC_free (ce_s, heap, rhsc);
3432 /* Make constraints necessary to make OP escape. */
3434 static void
3435 make_escape_constraint (tree op)
3437 make_constraint_to (escaped_id, op);
3440 /* For non-IPA mode, generate constraints necessary for a call on the
3441 RHS. */
3443 static void
3444 handle_rhs_call (gimple stmt)
3446 unsigned i;
3448 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3450 tree arg = gimple_call_arg (stmt, i);
3452 /* Find those pointers being passed, and make sure they end up
3453 pointing to anything. */
3454 if (could_have_pointers (arg))
3455 make_escape_constraint (arg);
3458 /* The static chain escapes as well. */
3459 if (gimple_call_chain (stmt))
3460 make_escape_constraint (gimple_call_chain (stmt));
3463 /* For non-IPA mode, generate constraints necessary for a call
3464 that returns a pointer and assigns it to LHS. This simply makes
3465 the LHS point to global and escaped variables. */
3467 static void
3468 handle_lhs_call (tree lhs, int flags)
3470 VEC(ce_s, heap) *lhsc = NULL;
3471 struct constraint_expr rhsc;
3472 unsigned int j;
3473 struct constraint_expr *lhsp;
3475 get_constraint_for (lhs, &lhsc);
3477 if (flags & ECF_MALLOC)
3479 tree heapvar = heapvar_lookup (lhs);
3480 varinfo_t vi;
3482 if (heapvar == NULL)
3484 heapvar = create_tmp_var_raw (ptr_type_node, "HEAP");
3485 DECL_EXTERNAL (heapvar) = 1;
3486 get_var_ann (heapvar)->is_heapvar = 1;
3487 if (gimple_referenced_vars (cfun))
3488 add_referenced_var (heapvar);
3489 heapvar_insert (lhs, heapvar);
3492 rhsc.var = create_variable_info_for (heapvar,
3493 alias_get_name (heapvar));
3494 vi = get_varinfo (rhsc.var);
3495 vi->is_artificial_var = 1;
3496 vi->is_heap_var = 1;
3497 rhsc.type = ADDRESSOF;
3498 rhsc.offset = 0;
3500 else
3502 rhsc.var = escaped_id;
3503 rhsc.offset = 0;
3504 rhsc.type = ADDRESSOF;
3506 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3507 process_constraint (new_constraint (*lhsp, rhsc));
3508 VEC_free (ce_s, heap, lhsc);
3511 /* For non-IPA mode, generate constraints necessary for a call of a
3512 const function that returns a pointer in the statement STMT. */
3514 static void
3515 handle_const_call (gimple stmt)
3517 tree lhs = gimple_call_lhs (stmt);
3518 VEC(ce_s, heap) *lhsc = NULL;
3519 struct constraint_expr rhsc;
3520 unsigned int j, k;
3521 struct constraint_expr *lhsp;
3522 tree tmpvar;
3523 struct constraint_expr tmpc;
3525 get_constraint_for (lhs, &lhsc);
3527 /* If this is a nested function then it can return anything. */
3528 if (gimple_call_chain (stmt))
3530 rhsc.var = anything_id;
3531 rhsc.offset = 0;
3532 rhsc.type = ADDRESSOF;
3533 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3534 process_constraint (new_constraint (*lhsp, rhsc));
3535 VEC_free (ce_s, heap, lhsc);
3536 return;
3539 /* We always use a temporary here, otherwise we end up with a quadratic
3540 amount of constraints for
3541 large_struct = const_call (large_struct);
3542 in field-sensitive PTA. */
3543 tmpvar = create_tmp_var_raw (ptr_type_node, "consttmp");
3544 tmpc = get_constraint_exp_for_temp (tmpvar);
3546 /* May return addresses of globals. */
3547 rhsc.var = nonlocal_id;
3548 rhsc.offset = 0;
3549 rhsc.type = ADDRESSOF;
3550 process_constraint (new_constraint (tmpc, rhsc));
3552 /* May return arguments. */
3553 for (k = 0; k < gimple_call_num_args (stmt); ++k)
3555 tree arg = gimple_call_arg (stmt, k);
3557 if (could_have_pointers (arg))
3559 VEC(ce_s, heap) *argc = NULL;
3560 struct constraint_expr *argp;
3561 int i;
3563 get_constraint_for (arg, &argc);
3564 for (i = 0; VEC_iterate (ce_s, argc, i, argp); i++)
3565 process_constraint (new_constraint (tmpc, *argp));
3566 VEC_free (ce_s, heap, argc);
3570 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3571 process_constraint (new_constraint (*lhsp, tmpc));
3573 VEC_free (ce_s, heap, lhsc);
3576 /* For non-IPA mode, generate constraints necessary for a call to a
3577 pure function in statement STMT. */
3579 static void
3580 handle_pure_call (gimple stmt)
3582 unsigned i;
3584 /* Memory reached from pointer arguments is call-used. */
3585 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3587 tree arg = gimple_call_arg (stmt, i);
3589 if (could_have_pointers (arg))
3590 make_constraint_to (callused_id, arg);
3593 /* The static chain is used as well. */
3594 if (gimple_call_chain (stmt))
3595 make_constraint_to (callused_id, gimple_call_chain (stmt));
3597 /* If the call returns a pointer it may point to reachable memory
3598 from the arguments. Not so for malloc functions though. */
3599 if (gimple_call_lhs (stmt)
3600 && could_have_pointers (gimple_call_lhs (stmt))
3601 && !(gimple_call_flags (stmt) & ECF_MALLOC))
3603 tree lhs = gimple_call_lhs (stmt);
3604 VEC(ce_s, heap) *lhsc = NULL;
3605 struct constraint_expr rhsc;
3606 struct constraint_expr *lhsp;
3607 unsigned j;
3609 get_constraint_for (lhs, &lhsc);
3611 /* If this is a nested function then it can return anything. */
3612 if (gimple_call_chain (stmt))
3614 rhsc.var = anything_id;
3615 rhsc.offset = 0;
3616 rhsc.type = ADDRESSOF;
3617 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3618 process_constraint (new_constraint (*lhsp, rhsc));
3619 VEC_free (ce_s, heap, lhsc);
3620 return;
3623 /* Else just add the call-used memory here. Escaped variables
3624 and globals will be dealt with in handle_lhs_call. */
3625 rhsc.var = callused_id;
3626 rhsc.offset = 0;
3627 rhsc.type = ADDRESSOF;
3628 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3629 process_constraint (new_constraint (*lhsp, rhsc));
3630 VEC_free (ce_s, heap, lhsc);
3634 /* Walk statement T setting up aliasing constraints according to the
3635 references found in T. This function is the main part of the
3636 constraint builder. AI points to auxiliary alias information used
3637 when building alias sets and computing alias grouping heuristics. */
3639 static void
3640 find_func_aliases (gimple origt)
3642 gimple t = origt;
3643 VEC(ce_s, heap) *lhsc = NULL;
3644 VEC(ce_s, heap) *rhsc = NULL;
3645 struct constraint_expr *c;
3646 enum escape_type stmt_escape_type;
3648 /* Now build constraints expressions. */
3649 if (gimple_code (t) == GIMPLE_PHI)
3651 gcc_assert (!AGGREGATE_TYPE_P (TREE_TYPE (gimple_phi_result (t))));
3653 /* Only care about pointers and structures containing
3654 pointers. */
3655 if (could_have_pointers (gimple_phi_result (t)))
3657 size_t i;
3658 unsigned int j;
3660 /* For a phi node, assign all the arguments to
3661 the result. */
3662 get_constraint_for (gimple_phi_result (t), &lhsc);
3663 for (i = 0; i < gimple_phi_num_args (t); i++)
3665 tree rhstype;
3666 tree strippedrhs = PHI_ARG_DEF (t, i);
3668 STRIP_NOPS (strippedrhs);
3669 rhstype = TREE_TYPE (strippedrhs);
3670 get_constraint_for (gimple_phi_arg_def (t, i), &rhsc);
3672 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3674 struct constraint_expr *c2;
3675 while (VEC_length (ce_s, rhsc) > 0)
3677 c2 = VEC_last (ce_s, rhsc);
3678 process_constraint (new_constraint (*c, *c2));
3679 VEC_pop (ce_s, rhsc);
3685 /* In IPA mode, we need to generate constraints to pass call
3686 arguments through their calls. There are two cases,
3687 either a GIMPLE_CALL returning a value, or just a plain
3688 GIMPLE_CALL when we are not.
3690 In non-ipa mode, we need to generate constraints for each
3691 pointer passed by address. */
3692 else if (is_gimple_call (t))
3694 if (!in_ipa_mode)
3696 int flags = gimple_call_flags (t);
3698 /* Const functions can return their arguments and addresses
3699 of global memory but not of escaped memory. */
3700 if (flags & ECF_CONST)
3702 if (gimple_call_lhs (t)
3703 && could_have_pointers (gimple_call_lhs (t)))
3704 handle_const_call (t);
3706 /* Pure functions can return addresses in and of memory
3707 reachable from their arguments, but they are not an escape
3708 point for reachable memory of their arguments. */
3709 else if (flags & ECF_PURE)
3711 handle_pure_call (t);
3712 if (gimple_call_lhs (t)
3713 && could_have_pointers (gimple_call_lhs (t)))
3714 handle_lhs_call (gimple_call_lhs (t), flags);
3716 else
3718 handle_rhs_call (t);
3719 if (gimple_call_lhs (t)
3720 && could_have_pointers (gimple_call_lhs (t)))
3721 handle_lhs_call (gimple_call_lhs (t), flags);
3724 else
3726 tree lhsop;
3727 varinfo_t fi;
3728 int i = 1;
3729 size_t j;
3730 tree decl;
3732 lhsop = gimple_call_lhs (t);
3733 decl = gimple_call_fndecl (t);
3735 /* If we can directly resolve the function being called, do so.
3736 Otherwise, it must be some sort of indirect expression that
3737 we should still be able to handle. */
3738 if (decl)
3739 fi = get_vi_for_tree (decl);
3740 else
3742 decl = gimple_call_fn (t);
3743 fi = get_vi_for_tree (decl);
3746 /* Assign all the passed arguments to the appropriate incoming
3747 parameters of the function. */
3748 for (j = 0; j < gimple_call_num_args (t); j++)
3750 struct constraint_expr lhs ;
3751 struct constraint_expr *rhsp;
3752 tree arg = gimple_call_arg (t, j);
3754 get_constraint_for (arg, &rhsc);
3755 if (TREE_CODE (decl) != FUNCTION_DECL)
3757 lhs.type = DEREF;
3758 lhs.var = fi->id;
3759 lhs.offset = i;
3761 else
3763 lhs.type = SCALAR;
3764 lhs.var = first_vi_for_offset (fi, i)->id;
3765 lhs.offset = 0;
3767 while (VEC_length (ce_s, rhsc) != 0)
3769 rhsp = VEC_last (ce_s, rhsc);
3770 process_constraint (new_constraint (lhs, *rhsp));
3771 VEC_pop (ce_s, rhsc);
3773 i++;
3776 /* If we are returning a value, assign it to the result. */
3777 if (lhsop)
3779 struct constraint_expr rhs;
3780 struct constraint_expr *lhsp;
3781 unsigned int j = 0;
3783 get_constraint_for (lhsop, &lhsc);
3784 if (TREE_CODE (decl) != FUNCTION_DECL)
3786 rhs.type = DEREF;
3787 rhs.var = fi->id;
3788 rhs.offset = i;
3790 else
3792 rhs.type = SCALAR;
3793 rhs.var = first_vi_for_offset (fi, i)->id;
3794 rhs.offset = 0;
3796 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3797 process_constraint (new_constraint (*lhsp, rhs));
3801 /* Otherwise, just a regular assignment statement. Only care about
3802 operations with pointer result, others are dealt with as escape
3803 points if they have pointer operands. */
3804 else if (is_gimple_assign (t)
3805 && could_have_pointers (gimple_assign_lhs (t)))
3807 /* Otherwise, just a regular assignment statement. */
3808 tree lhsop = gimple_assign_lhs (t);
3809 tree rhsop = (gimple_num_ops (t) == 2) ? gimple_assign_rhs1 (t) : NULL;
3811 if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
3812 do_structure_copy (lhsop, rhsop);
3813 else
3815 unsigned int j;
3816 struct constraint_expr temp;
3817 get_constraint_for (lhsop, &lhsc);
3819 if (gimple_assign_rhs_code (t) == POINTER_PLUS_EXPR)
3820 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
3821 gimple_assign_rhs2 (t), &rhsc);
3822 else if ((CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (t))
3823 && !(POINTER_TYPE_P (gimple_expr_type (t))
3824 && !POINTER_TYPE_P (TREE_TYPE (rhsop))))
3825 || gimple_assign_single_p (t))
3826 get_constraint_for (rhsop, &rhsc);
3827 else
3829 temp.type = ADDRESSOF;
3830 temp.var = anything_id;
3831 temp.offset = 0;
3832 VEC_safe_push (ce_s, heap, rhsc, &temp);
3834 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3836 struct constraint_expr *c2;
3837 unsigned int k;
3839 for (k = 0; VEC_iterate (ce_s, rhsc, k, c2); k++)
3840 process_constraint (new_constraint (*c, *c2));
3844 else if (gimple_code (t) == GIMPLE_CHANGE_DYNAMIC_TYPE)
3846 unsigned int j;
3848 get_constraint_for (gimple_cdt_location (t), &lhsc);
3849 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); ++j)
3850 get_varinfo (c->var)->no_tbaa_pruning = true;
3853 stmt_escape_type = is_escape_site (t);
3854 if (stmt_escape_type == ESCAPE_STORED_IN_GLOBAL)
3856 gcc_assert (is_gimple_assign (t));
3857 if (gimple_assign_rhs_code (t) == ADDR_EXPR)
3859 tree rhs = gimple_assign_rhs1 (t);
3860 tree base = get_base_address (TREE_OPERAND (rhs, 0));
3861 if (base
3862 && (!DECL_P (base)
3863 || !is_global_var (base)))
3864 make_escape_constraint (rhs);
3866 else if (get_gimple_rhs_class (gimple_assign_rhs_code (t))
3867 == GIMPLE_SINGLE_RHS)
3869 if (could_have_pointers (gimple_assign_rhs1 (t)))
3870 make_escape_constraint (gimple_assign_rhs1 (t));
3872 else
3873 gcc_unreachable ();
3875 else if (stmt_escape_type == ESCAPE_BAD_CAST)
3877 gcc_assert (is_gimple_assign (t));
3878 gcc_assert (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (t))
3879 || gimple_assign_rhs_code (t) == VIEW_CONVERT_EXPR);
3880 make_escape_constraint (gimple_assign_rhs1 (t));
3882 else if (stmt_escape_type == ESCAPE_TO_ASM)
3884 unsigned i;
3885 for (i = 0; i < gimple_asm_noutputs (t); ++i)
3887 tree op = TREE_VALUE (gimple_asm_output_op (t, i));
3888 if (op && could_have_pointers (op))
3889 /* Strictly we'd only need the constraints from ESCAPED and
3890 NONLOCAL. */
3891 make_escape_constraint (op);
3893 for (i = 0; i < gimple_asm_ninputs (t); ++i)
3895 tree op = TREE_VALUE (gimple_asm_input_op (t, i));
3896 if (op && could_have_pointers (op))
3897 /* Strictly we'd only need the constraint to ESCAPED. */
3898 make_escape_constraint (op);
3902 /* After promoting variables and computing aliasing we will
3903 need to re-scan most statements. FIXME: Try to minimize the
3904 number of statements re-scanned. It's not really necessary to
3905 re-scan *all* statements. */
3906 if (!in_ipa_mode)
3907 gimple_set_modified (origt, true);
3908 VEC_free (ce_s, heap, rhsc);
3909 VEC_free (ce_s, heap, lhsc);
3913 /* Find the first varinfo in the same variable as START that overlaps with
3914 OFFSET.
3915 Effectively, walk the chain of fields for the variable START to find the
3916 first field that overlaps with OFFSET.
3917 Return NULL if we can't find one. */
3919 static varinfo_t
3920 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
3922 varinfo_t curr = start;
3923 while (curr)
3925 /* We may not find a variable in the field list with the actual
3926 offset when when we have glommed a structure to a variable.
3927 In that case, however, offset should still be within the size
3928 of the variable. */
3929 if (offset >= curr->offset && offset < (curr->offset + curr->size))
3930 return curr;
3931 curr = curr->next;
3933 return NULL;
3937 /* Insert the varinfo FIELD into the field list for BASE, at the front
3938 of the list. */
3940 static void
3941 insert_into_field_list (varinfo_t base, varinfo_t field)
3943 varinfo_t prev = base;
3944 varinfo_t curr = base->next;
3946 field->next = curr;
3947 prev->next = field;
3950 /* Insert the varinfo FIELD into the field list for BASE, ordered by
3951 offset. */
3953 static void
3954 insert_into_field_list_sorted (varinfo_t base, varinfo_t field)
3956 varinfo_t prev = base;
3957 varinfo_t curr = base->next;
3959 if (curr == NULL)
3961 prev->next = field;
3962 field->next = NULL;
3964 else
3966 while (curr)
3968 if (field->offset <= curr->offset)
3969 break;
3970 prev = curr;
3971 curr = curr->next;
3973 field->next = prev->next;
3974 prev->next = field;
3978 /* This structure is used during pushing fields onto the fieldstack
3979 to track the offset of the field, since bitpos_of_field gives it
3980 relative to its immediate containing type, and we want it relative
3981 to the ultimate containing object. */
3983 struct fieldoff
3985 /* Offset from the base of the base containing object to this field. */
3986 HOST_WIDE_INT offset;
3988 /* Size, in bits, of the field. */
3989 unsigned HOST_WIDE_INT size;
3991 unsigned has_unknown_size : 1;
3993 unsigned may_have_pointers : 1;
3995 typedef struct fieldoff fieldoff_s;
3997 DEF_VEC_O(fieldoff_s);
3998 DEF_VEC_ALLOC_O(fieldoff_s,heap);
4000 /* qsort comparison function for two fieldoff's PA and PB */
4002 static int
4003 fieldoff_compare (const void *pa, const void *pb)
4005 const fieldoff_s *foa = (const fieldoff_s *)pa;
4006 const fieldoff_s *fob = (const fieldoff_s *)pb;
4007 unsigned HOST_WIDE_INT foasize, fobsize;
4009 if (foa->offset < fob->offset)
4010 return -1;
4011 else if (foa->offset > fob->offset)
4012 return 1;
4014 foasize = foa->size;
4015 fobsize = fob->size;
4016 if (foasize < fobsize)
4017 return -1;
4018 else if (foasize > fobsize)
4019 return 1;
4020 return 0;
4023 /* Sort a fieldstack according to the field offset and sizes. */
4024 static void
4025 sort_fieldstack (VEC(fieldoff_s,heap) *fieldstack)
4027 qsort (VEC_address (fieldoff_s, fieldstack),
4028 VEC_length (fieldoff_s, fieldstack),
4029 sizeof (fieldoff_s),
4030 fieldoff_compare);
4033 /* Return true if V is a tree that we can have subvars for.
4034 Normally, this is any aggregate type. Also complex
4035 types which are not gimple registers can have subvars. */
4037 static inline bool
4038 var_can_have_subvars (const_tree v)
4040 /* Volatile variables should never have subvars. */
4041 if (TREE_THIS_VOLATILE (v))
4042 return false;
4044 /* Non decls or memory tags can never have subvars. */
4045 if (!DECL_P (v) || MTAG_P (v))
4046 return false;
4048 /* Aggregates without overlapping fields can have subvars. */
4049 if (TREE_CODE (TREE_TYPE (v)) == RECORD_TYPE)
4050 return true;
4052 return false;
4055 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
4056 the fields of TYPE onto fieldstack, recording their offsets along
4057 the way.
4059 OFFSET is used to keep track of the offset in this entire
4060 structure, rather than just the immediately containing structure.
4061 Returns the number of fields pushed. */
4063 static int
4064 push_fields_onto_fieldstack (tree type, VEC(fieldoff_s,heap) **fieldstack,
4065 HOST_WIDE_INT offset)
4067 tree field;
4068 int count = 0;
4070 if (TREE_CODE (type) != RECORD_TYPE)
4071 return 0;
4073 /* If the vector of fields is growing too big, bail out early.
4074 Callers check for VEC_length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
4075 sure this fails. */
4076 if (VEC_length (fieldoff_s, *fieldstack) > MAX_FIELDS_FOR_FIELD_SENSITIVE)
4077 return 0;
4079 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
4080 if (TREE_CODE (field) == FIELD_DECL)
4082 bool push = false;
4083 int pushed = 0;
4084 HOST_WIDE_INT foff = bitpos_of_field (field);
4086 if (!var_can_have_subvars (field)
4087 || TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE
4088 || TREE_CODE (TREE_TYPE (field)) == UNION_TYPE)
4089 push = true;
4090 else if (!(pushed = push_fields_onto_fieldstack
4091 (TREE_TYPE (field), fieldstack, offset + foff))
4092 && (DECL_SIZE (field)
4093 && !integer_zerop (DECL_SIZE (field))))
4094 /* Empty structures may have actual size, like in C++. So
4095 see if we didn't push any subfields and the size is
4096 nonzero, push the field onto the stack. */
4097 push = true;
4099 if (push)
4101 fieldoff_s *pair = NULL;
4102 bool has_unknown_size = false;
4104 if (!VEC_empty (fieldoff_s, *fieldstack))
4105 pair = VEC_last (fieldoff_s, *fieldstack);
4107 if (!DECL_SIZE (field)
4108 || !host_integerp (DECL_SIZE (field), 1))
4109 has_unknown_size = true;
4111 /* If adjacent fields do not contain pointers merge them. */
4112 if (pair
4113 && !pair->may_have_pointers
4114 && !could_have_pointers (field)
4115 && !pair->has_unknown_size
4116 && !has_unknown_size
4117 && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff)
4119 pair = VEC_last (fieldoff_s, *fieldstack);
4120 pair->size += TREE_INT_CST_LOW (DECL_SIZE (field));
4122 else
4124 pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
4125 pair->offset = offset + foff;
4126 pair->has_unknown_size = has_unknown_size;
4127 if (!has_unknown_size)
4128 pair->size = TREE_INT_CST_LOW (DECL_SIZE (field));
4129 else
4130 pair->size = -1;
4131 pair->may_have_pointers = could_have_pointers (field);
4132 count++;
4135 else
4136 count += pushed;
4139 return count;
4142 /* Create a constraint ID = &FROM. */
4144 static void
4145 make_constraint_from (varinfo_t vi, int from)
4147 struct constraint_expr lhs, rhs;
4149 lhs.var = vi->id;
4150 lhs.offset = 0;
4151 lhs.type = SCALAR;
4153 rhs.var = from;
4154 rhs.offset = 0;
4155 rhs.type = ADDRESSOF;
4156 process_constraint (new_constraint (lhs, rhs));
4159 /* Count the number of arguments DECL has, and set IS_VARARGS to true
4160 if it is a varargs function. */
4162 static unsigned int
4163 count_num_arguments (tree decl, bool *is_varargs)
4165 unsigned int i = 0;
4166 tree t;
4168 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl));
4170 t = TREE_CHAIN (t))
4172 if (TREE_VALUE (t) == void_type_node)
4173 break;
4174 i++;
4177 if (!t)
4178 *is_varargs = true;
4179 return i;
4182 /* Creation function node for DECL, using NAME, and return the index
4183 of the variable we've created for the function. */
4185 static unsigned int
4186 create_function_info_for (tree decl, const char *name)
4188 unsigned int index = VEC_length (varinfo_t, varmap);
4189 varinfo_t vi;
4190 tree arg;
4191 unsigned int i;
4192 bool is_varargs = false;
4194 /* Create the variable info. */
4196 vi = new_var_info (decl, index, name);
4197 vi->decl = decl;
4198 vi->offset = 0;
4199 vi->size = 1;
4200 vi->fullsize = count_num_arguments (decl, &is_varargs) + 1;
4201 insert_vi_for_tree (vi->decl, vi);
4202 VEC_safe_push (varinfo_t, heap, varmap, vi);
4204 stats.total_vars++;
4206 /* If it's varargs, we don't know how many arguments it has, so we
4207 can't do much. */
4208 if (is_varargs)
4210 vi->fullsize = ~0;
4211 vi->size = ~0;
4212 vi->is_unknown_size_var = true;
4213 return index;
4217 arg = DECL_ARGUMENTS (decl);
4219 /* Set up variables for each argument. */
4220 for (i = 1; i < vi->fullsize; i++)
4222 varinfo_t argvi;
4223 const char *newname;
4224 char *tempname;
4225 unsigned int newindex;
4226 tree argdecl = decl;
4228 if (arg)
4229 argdecl = arg;
4231 newindex = VEC_length (varinfo_t, varmap);
4232 asprintf (&tempname, "%s.arg%d", name, i-1);
4233 newname = ggc_strdup (tempname);
4234 free (tempname);
4236 argvi = new_var_info (argdecl, newindex, newname);
4237 argvi->decl = argdecl;
4238 VEC_safe_push (varinfo_t, heap, varmap, argvi);
4239 argvi->offset = i;
4240 argvi->size = 1;
4241 argvi->is_full_var = true;
4242 argvi->fullsize = vi->fullsize;
4243 insert_into_field_list_sorted (vi, argvi);
4244 stats.total_vars ++;
4245 if (arg)
4247 insert_vi_for_tree (arg, argvi);
4248 arg = TREE_CHAIN (arg);
4252 /* Create a variable for the return var. */
4253 if (DECL_RESULT (decl) != NULL
4254 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
4256 varinfo_t resultvi;
4257 const char *newname;
4258 char *tempname;
4259 unsigned int newindex;
4260 tree resultdecl = decl;
4262 vi->fullsize ++;
4264 if (DECL_RESULT (decl))
4265 resultdecl = DECL_RESULT (decl);
4267 newindex = VEC_length (varinfo_t, varmap);
4268 asprintf (&tempname, "%s.result", name);
4269 newname = ggc_strdup (tempname);
4270 free (tempname);
4272 resultvi = new_var_info (resultdecl, newindex, newname);
4273 resultvi->decl = resultdecl;
4274 VEC_safe_push (varinfo_t, heap, varmap, resultvi);
4275 resultvi->offset = i;
4276 resultvi->size = 1;
4277 resultvi->fullsize = vi->fullsize;
4278 resultvi->is_full_var = true;
4279 insert_into_field_list_sorted (vi, resultvi);
4280 stats.total_vars ++;
4281 if (DECL_RESULT (decl))
4282 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
4284 return index;
4288 /* Return true if FIELDSTACK contains fields that overlap.
4289 FIELDSTACK is assumed to be sorted by offset. */
4291 static bool
4292 check_for_overlaps (VEC (fieldoff_s,heap) *fieldstack)
4294 fieldoff_s *fo = NULL;
4295 unsigned int i;
4296 HOST_WIDE_INT lastoffset = -1;
4298 for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
4300 if (fo->offset == lastoffset)
4301 return true;
4302 lastoffset = fo->offset;
4304 return false;
4307 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
4308 This will also create any varinfo structures necessary for fields
4309 of DECL. */
4311 static unsigned int
4312 create_variable_info_for (tree decl, const char *name)
4314 unsigned int index = VEC_length (varinfo_t, varmap);
4315 varinfo_t vi;
4316 tree decl_type = TREE_TYPE (decl);
4317 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type);
4318 bool is_global = DECL_P (decl) ? is_global_var (decl) : false;
4319 VEC (fieldoff_s,heap) *fieldstack = NULL;
4321 if (TREE_CODE (decl) == FUNCTION_DECL && in_ipa_mode)
4322 return create_function_info_for (decl, name);
4324 if (var_can_have_subvars (decl) && use_field_sensitive
4325 && (!var_ann (decl)
4326 || var_ann (decl)->noalias_state == 0)
4327 && (!var_ann (decl)
4328 || !var_ann (decl)->is_heapvar))
4329 push_fields_onto_fieldstack (decl_type, &fieldstack, 0);
4331 /* If the variable doesn't have subvars, we may end up needing to
4332 sort the field list and create fake variables for all the
4333 fields. */
4334 vi = new_var_info (decl, index, name);
4335 vi->decl = decl;
4336 vi->offset = 0;
4337 if (!declsize
4338 || !host_integerp (declsize, 1))
4340 vi->is_unknown_size_var = true;
4341 vi->fullsize = ~0;
4342 vi->size = ~0;
4344 else
4346 vi->fullsize = TREE_INT_CST_LOW (declsize);
4347 vi->size = vi->fullsize;
4350 insert_vi_for_tree (vi->decl, vi);
4351 VEC_safe_push (varinfo_t, heap, varmap, vi);
4352 if (is_global && (!flag_whole_program || !in_ipa_mode)
4353 && could_have_pointers (decl))
4355 if (var_ann (decl)
4356 && var_ann (decl)->noalias_state == NO_ALIAS_ANYTHING)
4357 make_constraint_from (vi, vi->id);
4358 else
4359 make_constraint_from (vi, escaped_id);
4362 stats.total_vars++;
4363 if (use_field_sensitive
4364 && !vi->is_unknown_size_var
4365 && var_can_have_subvars (decl)
4366 && VEC_length (fieldoff_s, fieldstack) > 1
4367 && VEC_length (fieldoff_s, fieldstack) <= MAX_FIELDS_FOR_FIELD_SENSITIVE)
4369 unsigned int newindex = VEC_length (varinfo_t, varmap);
4370 fieldoff_s *fo = NULL;
4371 bool notokay = false;
4372 unsigned int i;
4374 for (i = 0; !notokay && VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
4376 if (fo->has_unknown_size
4377 || fo->offset < 0)
4379 notokay = true;
4380 break;
4384 /* We can't sort them if we have a field with a variable sized type,
4385 which will make notokay = true. In that case, we are going to return
4386 without creating varinfos for the fields anyway, so sorting them is a
4387 waste to boot. */
4388 if (!notokay)
4390 sort_fieldstack (fieldstack);
4391 /* Due to some C++ FE issues, like PR 22488, we might end up
4392 what appear to be overlapping fields even though they,
4393 in reality, do not overlap. Until the C++ FE is fixed,
4394 we will simply disable field-sensitivity for these cases. */
4395 notokay = check_for_overlaps (fieldstack);
4399 if (VEC_length (fieldoff_s, fieldstack) != 0)
4400 fo = VEC_index (fieldoff_s, fieldstack, 0);
4402 if (fo == NULL || notokay)
4404 vi->is_unknown_size_var = 1;
4405 vi->fullsize = ~0;
4406 vi->size = ~0;
4407 vi->is_full_var = true;
4408 VEC_free (fieldoff_s, heap, fieldstack);
4409 return index;
4412 vi->size = fo->size;
4413 vi->offset = fo->offset;
4414 for (i = VEC_length (fieldoff_s, fieldstack) - 1;
4415 i >= 1 && VEC_iterate (fieldoff_s, fieldstack, i, fo);
4416 i--)
4418 varinfo_t newvi;
4419 const char *newname = "NULL";
4420 char *tempname;
4422 newindex = VEC_length (varinfo_t, varmap);
4423 if (dump_file)
4425 asprintf (&tempname, "%s." HOST_WIDE_INT_PRINT_DEC
4426 "+" HOST_WIDE_INT_PRINT_DEC,
4427 vi->name, fo->offset, fo->size);
4428 newname = ggc_strdup (tempname);
4429 free (tempname);
4431 newvi = new_var_info (decl, newindex, newname);
4432 newvi->offset = fo->offset;
4433 newvi->size = fo->size;
4434 newvi->fullsize = vi->fullsize;
4435 insert_into_field_list (vi, newvi);
4436 VEC_safe_push (varinfo_t, heap, varmap, newvi);
4437 if (is_global && (!flag_whole_program || !in_ipa_mode)
4438 && fo->may_have_pointers)
4439 make_constraint_from (newvi, escaped_id);
4441 stats.total_vars++;
4444 else
4445 vi->is_full_var = true;
4447 VEC_free (fieldoff_s, heap, fieldstack);
4449 return index;
4452 /* Print out the points-to solution for VAR to FILE. */
4454 void
4455 dump_solution_for_var (FILE *file, unsigned int var)
4457 varinfo_t vi = get_varinfo (var);
4458 unsigned int i;
4459 bitmap_iterator bi;
4461 if (find (var) != var)
4463 varinfo_t vipt = get_varinfo (find (var));
4464 fprintf (file, "%s = same as %s\n", vi->name, vipt->name);
4466 else
4468 fprintf (file, "%s = { ", vi->name);
4469 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4471 fprintf (file, "%s ", get_varinfo (i)->name);
4473 fprintf (file, "}");
4474 if (vi->no_tbaa_pruning)
4475 fprintf (file, " no-tbaa-pruning");
4476 fprintf (file, "\n");
4480 /* Print the points-to solution for VAR to stdout. */
4482 void
4483 debug_solution_for_var (unsigned int var)
4485 dump_solution_for_var (stdout, var);
4488 /* Create varinfo structures for all of the variables in the
4489 function for intraprocedural mode. */
4491 static void
4492 intra_create_variable_infos (void)
4494 tree t;
4495 struct constraint_expr lhs, rhs;
4497 /* For each incoming pointer argument arg, create the constraint ARG
4498 = NONLOCAL or a dummy variable if flag_argument_noalias is set. */
4499 for (t = DECL_ARGUMENTS (current_function_decl); t; t = TREE_CHAIN (t))
4501 varinfo_t p;
4503 if (!could_have_pointers (t))
4504 continue;
4506 /* If flag_argument_noalias is set, then function pointer
4507 arguments are guaranteed not to point to each other. In that
4508 case, create an artificial variable PARM_NOALIAS and the
4509 constraint ARG = &PARM_NOALIAS. */
4510 if (POINTER_TYPE_P (TREE_TYPE (t)) && flag_argument_noalias > 0)
4512 varinfo_t vi;
4513 tree heapvar = heapvar_lookup (t);
4515 lhs.offset = 0;
4516 lhs.type = SCALAR;
4517 lhs.var = get_vi_for_tree (t)->id;
4519 if (heapvar == NULL_TREE)
4521 var_ann_t ann;
4522 heapvar = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (t)),
4523 "PARM_NOALIAS");
4524 DECL_EXTERNAL (heapvar) = 1;
4525 if (gimple_referenced_vars (cfun))
4526 add_referenced_var (heapvar);
4528 heapvar_insert (t, heapvar);
4530 ann = get_var_ann (heapvar);
4531 ann->is_heapvar = 1;
4532 if (flag_argument_noalias == 1)
4533 ann->noalias_state = NO_ALIAS;
4534 else if (flag_argument_noalias == 2)
4535 ann->noalias_state = NO_ALIAS_GLOBAL;
4536 else if (flag_argument_noalias == 3)
4537 ann->noalias_state = NO_ALIAS_ANYTHING;
4538 else
4539 gcc_unreachable ();
4542 vi = get_vi_for_tree (heapvar);
4543 vi->is_artificial_var = 1;
4544 vi->is_heap_var = 1;
4545 rhs.var = vi->id;
4546 rhs.type = ADDRESSOF;
4547 rhs.offset = 0;
4548 for (p = get_varinfo (lhs.var); p; p = p->next)
4550 struct constraint_expr temp = lhs;
4551 temp.var = p->id;
4552 process_constraint (new_constraint (temp, rhs));
4555 else
4557 varinfo_t arg_vi = get_vi_for_tree (t);
4559 for (p = arg_vi; p; p = p->next)
4560 make_constraint_from (p, nonlocal_id);
4564 /* Add a constraint for the incoming static chain parameter. */
4565 if (cfun->static_chain_decl != NULL_TREE)
4567 varinfo_t p, chain_vi = get_vi_for_tree (cfun->static_chain_decl);
4569 for (p = chain_vi; p; p = p->next)
4570 make_constraint_from (p, nonlocal_id);
4574 /* Structure used to put solution bitmaps in a hashtable so they can
4575 be shared among variables with the same points-to set. */
4577 typedef struct shared_bitmap_info
4579 bitmap pt_vars;
4580 hashval_t hashcode;
4581 } *shared_bitmap_info_t;
4582 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
4584 static htab_t shared_bitmap_table;
4586 /* Hash function for a shared_bitmap_info_t */
4588 static hashval_t
4589 shared_bitmap_hash (const void *p)
4591 const_shared_bitmap_info_t const bi = (const_shared_bitmap_info_t) p;
4592 return bi->hashcode;
4595 /* Equality function for two shared_bitmap_info_t's. */
4597 static int
4598 shared_bitmap_eq (const void *p1, const void *p2)
4600 const_shared_bitmap_info_t const sbi1 = (const_shared_bitmap_info_t) p1;
4601 const_shared_bitmap_info_t const sbi2 = (const_shared_bitmap_info_t) p2;
4602 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
4605 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
4606 existing instance if there is one, NULL otherwise. */
4608 static bitmap
4609 shared_bitmap_lookup (bitmap pt_vars)
4611 void **slot;
4612 struct shared_bitmap_info sbi;
4614 sbi.pt_vars = pt_vars;
4615 sbi.hashcode = bitmap_hash (pt_vars);
4617 slot = htab_find_slot_with_hash (shared_bitmap_table, &sbi,
4618 sbi.hashcode, NO_INSERT);
4619 if (!slot)
4620 return NULL;
4621 else
4622 return ((shared_bitmap_info_t) *slot)->pt_vars;
4626 /* Add a bitmap to the shared bitmap hashtable. */
4628 static void
4629 shared_bitmap_add (bitmap pt_vars)
4631 void **slot;
4632 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
4634 sbi->pt_vars = pt_vars;
4635 sbi->hashcode = bitmap_hash (pt_vars);
4637 slot = htab_find_slot_with_hash (shared_bitmap_table, sbi,
4638 sbi->hashcode, INSERT);
4639 gcc_assert (!*slot);
4640 *slot = (void *) sbi;
4644 /* Set bits in INTO corresponding to the variable uids in solution set
4645 FROM, which came from variable PTR.
4646 For variables that are actually dereferenced, we also use type
4647 based alias analysis to prune the points-to sets.
4648 IS_DEREFED is true if PTR was directly dereferenced, which we use to
4649 help determine whether we are we are allowed to prune using TBAA.
4650 If NO_TBAA_PRUNING is true, we do not perform any TBAA pruning of
4651 the from set. Returns the number of pruned variables. */
4653 static unsigned
4654 set_uids_in_ptset (tree ptr, bitmap into, bitmap from, bool is_derefed,
4655 bool no_tbaa_pruning)
4657 unsigned int i;
4658 bitmap_iterator bi;
4659 unsigned pruned = 0;
4661 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
4663 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
4665 varinfo_t vi = get_varinfo (i);
4667 /* The only artificial variables that are allowed in a may-alias
4668 set are heap variables. */
4669 if (vi->is_artificial_var && !vi->is_heap_var)
4670 continue;
4672 if (TREE_CODE (vi->decl) == VAR_DECL
4673 || TREE_CODE (vi->decl) == PARM_DECL
4674 || TREE_CODE (vi->decl) == RESULT_DECL)
4676 /* Just add VI->DECL to the alias set.
4677 Don't type prune artificial vars or points-to sets
4678 for pointers that have not been dereferenced or with
4679 type-based pruning disabled. */
4680 if (vi->is_artificial_var
4681 || !is_derefed
4682 || no_tbaa_pruning)
4683 bitmap_set_bit (into, DECL_UID (vi->decl));
4684 else
4686 alias_set_type var_alias_set, mem_alias_set;
4687 var_alias_set = get_alias_set (vi->decl);
4688 mem_alias_set = get_alias_set (TREE_TYPE (TREE_TYPE (ptr)));
4689 if (may_alias_p (SSA_NAME_VAR (ptr), mem_alias_set,
4690 vi->decl, var_alias_set, true))
4691 bitmap_set_bit (into, DECL_UID (vi->decl));
4692 else
4693 ++pruned;
4698 return pruned;
4702 static bool have_alias_info = false;
4704 /* Emit a note for the pointer initialization point DEF. */
4706 static void
4707 emit_pointer_definition (tree ptr, bitmap visited)
4709 gimple def = SSA_NAME_DEF_STMT (ptr);
4710 if (gimple_code (def) == GIMPLE_PHI)
4712 use_operand_p argp;
4713 ssa_op_iter oi;
4715 FOR_EACH_PHI_ARG (argp, def, oi, SSA_OP_USE)
4717 tree arg = USE_FROM_PTR (argp);
4718 if (TREE_CODE (arg) == SSA_NAME)
4720 if (bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
4721 emit_pointer_definition (arg, visited);
4723 else
4724 inform (0, "initialized from %qE", arg);
4727 else if (!gimple_nop_p (def))
4728 inform (gimple_location (def), "initialized from here");
4731 /* Emit a strict aliasing warning for dereferencing the pointer PTR. */
4733 static void
4734 emit_alias_warning (tree ptr)
4736 gimple use;
4737 imm_use_iterator ui;
4738 unsigned warned = 0;
4740 FOR_EACH_IMM_USE_STMT (use, ui, ptr)
4742 tree deref = NULL_TREE;
4744 if (gimple_has_lhs (use))
4746 tree lhs = get_base_address (gimple_get_lhs (use));
4747 if (lhs
4748 && INDIRECT_REF_P (lhs)
4749 && TREE_OPERAND (lhs, 0) == ptr)
4750 deref = lhs;
4752 if (gimple_assign_single_p (use))
4754 tree rhs = get_base_address (gimple_assign_rhs1 (use));
4755 if (rhs
4756 && INDIRECT_REF_P (rhs)
4757 && TREE_OPERAND (rhs, 0) == ptr)
4758 deref = rhs;
4760 else if (is_gimple_call (use))
4762 unsigned i;
4763 for (i = 0; i < gimple_call_num_args (use); ++i)
4765 tree op = get_base_address (gimple_call_arg (use, i));
4766 if (op
4767 && INDIRECT_REF_P (op)
4768 && TREE_OPERAND (op, 0) == ptr)
4769 deref = op;
4772 if (deref
4773 && !TREE_NO_WARNING (deref))
4775 TREE_NO_WARNING (deref) = 1;
4776 warning_at (gimple_location (use), OPT_Wstrict_aliasing,
4777 "dereferencing pointer %qD does break strict-aliasing "
4778 "rules", SSA_NAME_VAR (ptr));
4779 ++warned;
4782 if (warned > 0)
4784 bitmap visited = BITMAP_ALLOC (NULL);
4785 emit_pointer_definition (ptr, visited);
4786 BITMAP_FREE (visited);
4790 /* Given a pointer variable P, fill in its points-to set, or return
4791 false if we can't.
4792 Rather than return false for variables that point-to anything, we
4793 instead find the corresponding SMT, and merge in its aliases. In
4794 addition to these aliases, we also set the bits for the SMT's
4795 themselves and their subsets, as SMT's are still in use by
4796 non-SSA_NAME's, and pruning may eliminate every one of their
4797 aliases. In such a case, if we did not include the right set of
4798 SMT's in the points-to set of the variable, we'd end up with
4799 statements that do not conflict but should. */
4801 bool
4802 find_what_p_points_to (tree p)
4804 tree lookup_p = p;
4805 varinfo_t vi;
4807 if (!have_alias_info)
4808 return false;
4810 /* For parameters, get at the points-to set for the actual parm
4811 decl. */
4812 if (TREE_CODE (p) == SSA_NAME
4813 && TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
4814 && SSA_NAME_IS_DEFAULT_DEF (p))
4815 lookup_p = SSA_NAME_VAR (p);
4817 vi = lookup_vi_for_tree (lookup_p);
4818 if (vi)
4820 if (vi->is_artificial_var)
4821 return false;
4823 /* See if this is a field or a structure. */
4824 if (vi->size != vi->fullsize)
4826 /* Nothing currently asks about structure fields directly,
4827 but when they do, we need code here to hand back the
4828 points-to set. */
4829 return false;
4831 else
4833 struct ptr_info_def *pi = get_ptr_info (p);
4834 unsigned int i, pruned;
4835 bitmap_iterator bi;
4836 bool was_pt_anything = false;
4837 bitmap finished_solution;
4838 bitmap result;
4840 if (!pi->memory_tag_needed)
4841 return false;
4843 /* This variable may have been collapsed, let's get the real
4844 variable. */
4845 vi = get_varinfo (find (vi->id));
4847 /* Translate artificial variables into SSA_NAME_PTR_INFO
4848 attributes. */
4849 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4851 varinfo_t vi = get_varinfo (i);
4853 if (vi->is_artificial_var)
4855 /* FIXME. READONLY should be handled better so that
4856 flow insensitive aliasing can disregard writable
4857 aliases. */
4858 if (vi->id == nothing_id)
4859 pi->pt_null = 1;
4860 else if (vi->id == anything_id
4861 || vi->id == nonlocal_id
4862 || vi->id == escaped_id
4863 || vi->id == callused_id)
4864 was_pt_anything = 1;
4865 else if (vi->id == readonly_id)
4866 was_pt_anything = 1;
4867 else if (vi->id == integer_id)
4868 was_pt_anything = 1;
4869 else if (vi->is_heap_var)
4870 pi->pt_global_mem = 1;
4874 /* Instead of doing extra work, simply do not create
4875 points-to information for pt_anything pointers. This
4876 will cause the operand scanner to fall back to the
4877 type-based SMT and its aliases. Which is the best
4878 we could do here for the points-to set as well. */
4879 if (was_pt_anything)
4880 return false;
4882 /* Share the final set of variables when possible. */
4883 finished_solution = BITMAP_GGC_ALLOC ();
4884 stats.points_to_sets_created++;
4886 pruned = set_uids_in_ptset (p, finished_solution, vi->solution,
4887 pi->is_dereferenced,
4888 vi->no_tbaa_pruning);
4889 result = shared_bitmap_lookup (finished_solution);
4891 if (!result)
4893 shared_bitmap_add (finished_solution);
4894 pi->pt_vars = finished_solution;
4896 else
4898 pi->pt_vars = result;
4899 bitmap_clear (finished_solution);
4902 if (bitmap_empty_p (pi->pt_vars))
4904 pi->pt_vars = NULL;
4905 if (pruned > 0
4906 && pi->is_dereferenced
4907 && warn_strict_aliasing > 0
4908 && !SSA_NAME_IS_DEFAULT_DEF (p))
4910 if (dump_file && dump_flags & TDF_DETAILS)
4912 fprintf (dump_file, "alias warning for ");
4913 print_generic_expr (dump_file, p, 0);
4914 fprintf (dump_file, "\n");
4916 emit_alias_warning (p);
4920 return true;
4924 return false;
4927 /* Mark the ESCAPED solution as call clobbered. Returns false if
4928 pt_anything escaped which needs all locals that have their address
4929 taken marked call clobbered as well. */
4931 bool
4932 clobber_what_escaped (void)
4934 varinfo_t vi;
4935 unsigned int i;
4936 bitmap_iterator bi;
4938 if (!have_alias_info)
4939 return false;
4941 /* This variable may have been collapsed, let's get the real
4942 variable for escaped_id. */
4943 vi = get_varinfo (find (escaped_id));
4945 /* If call-used memory escapes we need to include it in the
4946 set of escaped variables. This can happen if a pure
4947 function returns a pointer and this pointer escapes. */
4948 if (bitmap_bit_p (vi->solution, callused_id))
4950 varinfo_t cu_vi = get_varinfo (find (callused_id));
4951 bitmap_ior_into (vi->solution, cu_vi->solution);
4954 /* Mark variables in the solution call-clobbered. */
4955 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4957 varinfo_t vi = get_varinfo (i);
4959 if (vi->is_artificial_var)
4961 /* nothing_id and readonly_id do not cause any
4962 call clobber ops. For anything_id and integer_id
4963 we need to clobber all addressable vars. */
4964 if (vi->id == anything_id
4965 || vi->id == integer_id)
4966 return false;
4969 /* Only artificial heap-vars are further interesting. */
4970 if (vi->is_artificial_var && !vi->is_heap_var)
4971 continue;
4973 if ((TREE_CODE (vi->decl) == VAR_DECL
4974 || TREE_CODE (vi->decl) == PARM_DECL
4975 || TREE_CODE (vi->decl) == RESULT_DECL)
4976 && !unmodifiable_var_p (vi->decl))
4977 mark_call_clobbered (vi->decl, ESCAPE_TO_CALL);
4980 return true;
4983 /* Compute the call-used variables. */
4985 void
4986 compute_call_used_vars (void)
4988 varinfo_t vi;
4989 unsigned int i;
4990 bitmap_iterator bi;
4991 bool has_anything_id = false;
4993 if (!have_alias_info)
4994 return;
4996 /* This variable may have been collapsed, let's get the real
4997 variable for escaped_id. */
4998 vi = get_varinfo (find (callused_id));
5000 /* Mark variables in the solution call-clobbered. */
5001 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
5003 varinfo_t vi = get_varinfo (i);
5005 if (vi->is_artificial_var)
5007 /* For anything_id and integer_id we need to make
5008 all local addressable vars call-used. */
5009 if (vi->id == anything_id
5010 || vi->id == integer_id)
5011 has_anything_id = true;
5014 /* Only artificial heap-vars are further interesting. */
5015 if (vi->is_artificial_var && !vi->is_heap_var)
5016 continue;
5018 if ((TREE_CODE (vi->decl) == VAR_DECL
5019 || TREE_CODE (vi->decl) == PARM_DECL
5020 || TREE_CODE (vi->decl) == RESULT_DECL)
5021 && !unmodifiable_var_p (vi->decl))
5022 bitmap_set_bit (gimple_call_used_vars (cfun), DECL_UID (vi->decl));
5025 /* If anything is call-used, add all addressable locals to the set. */
5026 if (has_anything_id)
5027 bitmap_ior_into (gimple_call_used_vars (cfun),
5028 gimple_addressable_vars (cfun));
5032 /* Dump points-to information to OUTFILE. */
5034 void
5035 dump_sa_points_to_info (FILE *outfile)
5037 unsigned int i;
5039 fprintf (outfile, "\nPoints-to sets\n\n");
5041 if (dump_flags & TDF_STATS)
5043 fprintf (outfile, "Stats:\n");
5044 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
5045 fprintf (outfile, "Non-pointer vars: %d\n",
5046 stats.nonpointer_vars);
5047 fprintf (outfile, "Statically unified vars: %d\n",
5048 stats.unified_vars_static);
5049 fprintf (outfile, "Dynamically unified vars: %d\n",
5050 stats.unified_vars_dynamic);
5051 fprintf (outfile, "Iterations: %d\n", stats.iterations);
5052 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
5053 fprintf (outfile, "Number of implicit edges: %d\n",
5054 stats.num_implicit_edges);
5057 for (i = 0; i < VEC_length (varinfo_t, varmap); i++)
5058 dump_solution_for_var (outfile, i);
5062 /* Debug points-to information to stderr. */
5064 void
5065 debug_sa_points_to_info (void)
5067 dump_sa_points_to_info (stderr);
5071 /* Initialize the always-existing constraint variables for NULL
5072 ANYTHING, READONLY, and INTEGER */
5074 static void
5075 init_base_vars (void)
5077 struct constraint_expr lhs, rhs;
5079 /* Create the NULL variable, used to represent that a variable points
5080 to NULL. */
5081 nothing_tree = create_tmp_var_raw (void_type_node, "NULL");
5082 var_nothing = new_var_info (nothing_tree, nothing_id, "NULL");
5083 insert_vi_for_tree (nothing_tree, var_nothing);
5084 var_nothing->is_artificial_var = 1;
5085 var_nothing->offset = 0;
5086 var_nothing->size = ~0;
5087 var_nothing->fullsize = ~0;
5088 var_nothing->is_special_var = 1;
5089 VEC_safe_push (varinfo_t, heap, varmap, var_nothing);
5091 /* Create the ANYTHING variable, used to represent that a variable
5092 points to some unknown piece of memory. */
5093 anything_tree = create_tmp_var_raw (void_type_node, "ANYTHING");
5094 var_anything = new_var_info (anything_tree, anything_id, "ANYTHING");
5095 insert_vi_for_tree (anything_tree, var_anything);
5096 var_anything->is_artificial_var = 1;
5097 var_anything->size = ~0;
5098 var_anything->offset = 0;
5099 var_anything->next = NULL;
5100 var_anything->fullsize = ~0;
5101 var_anything->is_special_var = 1;
5103 /* Anything points to anything. This makes deref constraints just
5104 work in the presence of linked list and other p = *p type loops,
5105 by saying that *ANYTHING = ANYTHING. */
5106 VEC_safe_push (varinfo_t, heap, varmap, var_anything);
5107 lhs.type = SCALAR;
5108 lhs.var = anything_id;
5109 lhs.offset = 0;
5110 rhs.type = ADDRESSOF;
5111 rhs.var = anything_id;
5112 rhs.offset = 0;
5114 /* This specifically does not use process_constraint because
5115 process_constraint ignores all anything = anything constraints, since all
5116 but this one are redundant. */
5117 VEC_safe_push (constraint_t, heap, constraints, new_constraint (lhs, rhs));
5119 /* Create the READONLY variable, used to represent that a variable
5120 points to readonly memory. */
5121 readonly_tree = create_tmp_var_raw (void_type_node, "READONLY");
5122 var_readonly = new_var_info (readonly_tree, readonly_id, "READONLY");
5123 var_readonly->is_artificial_var = 1;
5124 var_readonly->offset = 0;
5125 var_readonly->size = ~0;
5126 var_readonly->fullsize = ~0;
5127 var_readonly->next = NULL;
5128 var_readonly->is_special_var = 1;
5129 insert_vi_for_tree (readonly_tree, var_readonly);
5130 VEC_safe_push (varinfo_t, heap, varmap, var_readonly);
5132 /* readonly memory points to anything, in order to make deref
5133 easier. In reality, it points to anything the particular
5134 readonly variable can point to, but we don't track this
5135 separately. */
5136 lhs.type = SCALAR;
5137 lhs.var = readonly_id;
5138 lhs.offset = 0;
5139 rhs.type = ADDRESSOF;
5140 rhs.var = readonly_id; /* FIXME */
5141 rhs.offset = 0;
5142 process_constraint (new_constraint (lhs, rhs));
5144 /* Create the ESCAPED variable, used to represent the set of escaped
5145 memory. */
5146 escaped_tree = create_tmp_var_raw (void_type_node, "ESCAPED");
5147 var_escaped = new_var_info (escaped_tree, escaped_id, "ESCAPED");
5148 insert_vi_for_tree (escaped_tree, var_escaped);
5149 var_escaped->is_artificial_var = 1;
5150 var_escaped->offset = 0;
5151 var_escaped->size = ~0;
5152 var_escaped->fullsize = ~0;
5153 var_escaped->is_special_var = 0;
5154 VEC_safe_push (varinfo_t, heap, varmap, var_escaped);
5155 gcc_assert (VEC_index (varinfo_t, varmap, 3) == var_escaped);
5157 /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
5158 lhs.type = SCALAR;
5159 lhs.var = escaped_id;
5160 lhs.offset = 0;
5161 rhs.type = DEREF;
5162 rhs.var = escaped_id;
5163 rhs.offset = 0;
5164 process_constraint (new_constraint (lhs, rhs));
5166 /* Create the NONLOCAL variable, used to represent the set of nonlocal
5167 memory. */
5168 nonlocal_tree = create_tmp_var_raw (void_type_node, "NONLOCAL");
5169 var_nonlocal = new_var_info (nonlocal_tree, nonlocal_id, "NONLOCAL");
5170 insert_vi_for_tree (nonlocal_tree, var_nonlocal);
5171 var_nonlocal->is_artificial_var = 1;
5172 var_nonlocal->offset = 0;
5173 var_nonlocal->size = ~0;
5174 var_nonlocal->fullsize = ~0;
5175 var_nonlocal->is_special_var = 1;
5176 VEC_safe_push (varinfo_t, heap, varmap, var_nonlocal);
5178 /* Nonlocal memory points to escaped (which includes nonlocal),
5179 in order to make deref easier. */
5180 lhs.type = SCALAR;
5181 lhs.var = nonlocal_id;
5182 lhs.offset = 0;
5183 rhs.type = ADDRESSOF;
5184 rhs.var = escaped_id;
5185 rhs.offset = 0;
5186 process_constraint (new_constraint (lhs, rhs));
5188 /* Create the CALLUSED variable, used to represent the set of call-used
5189 memory. */
5190 callused_tree = create_tmp_var_raw (void_type_node, "CALLUSED");
5191 var_callused = new_var_info (callused_tree, callused_id, "CALLUSED");
5192 insert_vi_for_tree (callused_tree, var_callused);
5193 var_callused->is_artificial_var = 1;
5194 var_callused->offset = 0;
5195 var_callused->size = ~0;
5196 var_callused->fullsize = ~0;
5197 var_callused->is_special_var = 0;
5198 VEC_safe_push (varinfo_t, heap, varmap, var_callused);
5200 /* CALLUSED = *CALLUSED, because call-used is may-deref'd at calls, etc. */
5201 lhs.type = SCALAR;
5202 lhs.var = callused_id;
5203 lhs.offset = 0;
5204 rhs.type = DEREF;
5205 rhs.var = callused_id;
5206 rhs.offset = 0;
5207 process_constraint (new_constraint (lhs, rhs));
5209 /* Create the INTEGER variable, used to represent that a variable points
5210 to an INTEGER. */
5211 integer_tree = create_tmp_var_raw (void_type_node, "INTEGER");
5212 var_integer = new_var_info (integer_tree, integer_id, "INTEGER");
5213 insert_vi_for_tree (integer_tree, var_integer);
5214 var_integer->is_artificial_var = 1;
5215 var_integer->size = ~0;
5216 var_integer->fullsize = ~0;
5217 var_integer->offset = 0;
5218 var_integer->next = NULL;
5219 var_integer->is_special_var = 1;
5220 VEC_safe_push (varinfo_t, heap, varmap, var_integer);
5222 /* INTEGER = ANYTHING, because we don't know where a dereference of
5223 a random integer will point to. */
5224 lhs.type = SCALAR;
5225 lhs.var = integer_id;
5226 lhs.offset = 0;
5227 rhs.type = ADDRESSOF;
5228 rhs.var = anything_id;
5229 rhs.offset = 0;
5230 process_constraint (new_constraint (lhs, rhs));
5232 /* *ESCAPED = &ESCAPED. This is true because we have to assume
5233 everything pointed to by escaped can also point to escaped. */
5234 lhs.type = DEREF;
5235 lhs.var = escaped_id;
5236 lhs.offset = 0;
5237 rhs.type = ADDRESSOF;
5238 rhs.var = escaped_id;
5239 rhs.offset = 0;
5240 process_constraint (new_constraint (lhs, rhs));
5242 /* *ESCAPED = &NONLOCAL. This is true because we have to assume
5243 everything pointed to by escaped can also point to nonlocal. */
5244 lhs.type = DEREF;
5245 lhs.var = escaped_id;
5246 lhs.offset = 0;
5247 rhs.type = ADDRESSOF;
5248 rhs.var = nonlocal_id;
5249 rhs.offset = 0;
5250 process_constraint (new_constraint (lhs, rhs));
5253 /* Initialize things necessary to perform PTA */
5255 static void
5256 init_alias_vars (void)
5258 use_field_sensitive = (MAX_FIELDS_FOR_FIELD_SENSITIVE > 1);
5260 bitmap_obstack_initialize (&pta_obstack);
5261 bitmap_obstack_initialize (&oldpta_obstack);
5262 bitmap_obstack_initialize (&predbitmap_obstack);
5264 constraint_pool = create_alloc_pool ("Constraint pool",
5265 sizeof (struct constraint), 30);
5266 variable_info_pool = create_alloc_pool ("Variable info pool",
5267 sizeof (struct variable_info), 30);
5268 constraints = VEC_alloc (constraint_t, heap, 8);
5269 varmap = VEC_alloc (varinfo_t, heap, 8);
5270 vi_for_tree = pointer_map_create ();
5272 memset (&stats, 0, sizeof (stats));
5273 shared_bitmap_table = htab_create (511, shared_bitmap_hash,
5274 shared_bitmap_eq, free);
5275 init_base_vars ();
5278 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
5279 predecessor edges. */
5281 static void
5282 remove_preds_and_fake_succs (constraint_graph_t graph)
5284 unsigned int i;
5286 /* Clear the implicit ref and address nodes from the successor
5287 lists. */
5288 for (i = 0; i < FIRST_REF_NODE; i++)
5290 if (graph->succs[i])
5291 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
5292 FIRST_REF_NODE * 2);
5295 /* Free the successor list for the non-ref nodes. */
5296 for (i = FIRST_REF_NODE; i < graph->size; i++)
5298 if (graph->succs[i])
5299 BITMAP_FREE (graph->succs[i]);
5302 /* Now reallocate the size of the successor list as, and blow away
5303 the predecessor bitmaps. */
5304 graph->size = VEC_length (varinfo_t, varmap);
5305 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
5307 free (graph->implicit_preds);
5308 graph->implicit_preds = NULL;
5309 free (graph->preds);
5310 graph->preds = NULL;
5311 bitmap_obstack_release (&predbitmap_obstack);
5314 /* Compute the set of variables we can't TBAA prune. */
5316 static void
5317 compute_tbaa_pruning (void)
5319 unsigned int size = VEC_length (varinfo_t, varmap);
5320 unsigned int i;
5321 bool any;
5323 changed_count = 0;
5324 changed = sbitmap_alloc (size);
5325 sbitmap_zero (changed);
5327 /* Mark all initial no_tbaa_pruning nodes as changed. */
5328 any = false;
5329 for (i = 0; i < size; ++i)
5331 varinfo_t ivi = get_varinfo (i);
5333 if (find (i) == i && ivi->no_tbaa_pruning)
5335 any = true;
5336 if ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
5337 || VEC_length (constraint_t, graph->complex[i]) > 0)
5339 SET_BIT (changed, i);
5340 ++changed_count;
5345 while (changed_count > 0)
5347 struct topo_info *ti = init_topo_info ();
5348 ++stats.iterations;
5350 compute_topo_order (graph, ti);
5352 while (VEC_length (unsigned, ti->topo_order) != 0)
5354 bitmap_iterator bi;
5356 i = VEC_pop (unsigned, ti->topo_order);
5358 /* If this variable is not a representative, skip it. */
5359 if (find (i) != i)
5360 continue;
5362 /* If the node has changed, we need to process the complex
5363 constraints and outgoing edges again. */
5364 if (TEST_BIT (changed, i))
5366 unsigned int j;
5367 constraint_t c;
5368 VEC(constraint_t,heap) *complex = graph->complex[i];
5370 RESET_BIT (changed, i);
5371 --changed_count;
5373 /* Process the complex copy constraints. */
5374 for (j = 0; VEC_iterate (constraint_t, complex, j, c); ++j)
5376 if (c->lhs.type == SCALAR && c->rhs.type == SCALAR)
5378 varinfo_t lhsvi = get_varinfo (find (c->lhs.var));
5380 if (!lhsvi->no_tbaa_pruning)
5382 lhsvi->no_tbaa_pruning = true;
5383 if (!TEST_BIT (changed, lhsvi->id))
5385 SET_BIT (changed, lhsvi->id);
5386 ++changed_count;
5392 /* Propagate to all successors. */
5393 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
5395 unsigned int to = find (j);
5396 varinfo_t tovi = get_varinfo (to);
5398 /* Don't propagate to ourselves. */
5399 if (to == i)
5400 continue;
5402 if (!tovi->no_tbaa_pruning)
5404 tovi->no_tbaa_pruning = true;
5405 if (!TEST_BIT (changed, to))
5407 SET_BIT (changed, to);
5408 ++changed_count;
5415 free_topo_info (ti);
5418 sbitmap_free (changed);
5420 if (any)
5422 for (i = 0; i < size; ++i)
5424 varinfo_t ivi = get_varinfo (i);
5425 varinfo_t ivip = get_varinfo (find (i));
5427 if (ivip->no_tbaa_pruning)
5429 tree var = ivi->decl;
5431 if (TREE_CODE (var) == SSA_NAME)
5432 var = SSA_NAME_VAR (var);
5434 if (POINTER_TYPE_P (TREE_TYPE (var)))
5436 DECL_NO_TBAA_P (var) = 1;
5438 /* Tell the RTL layer that this pointer can alias
5439 anything. */
5440 DECL_POINTER_ALIAS_SET (var) = 0;
5447 /* Create points-to sets for the current function. See the comments
5448 at the start of the file for an algorithmic overview. */
5450 void
5451 compute_points_to_sets (void)
5453 struct scc_info *si;
5454 basic_block bb;
5456 timevar_push (TV_TREE_PTA);
5458 init_alias_vars ();
5459 init_alias_heapvars ();
5461 intra_create_variable_infos ();
5463 /* Now walk all statements and derive aliases. */
5464 FOR_EACH_BB (bb)
5466 gimple_stmt_iterator gsi;
5468 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5470 gimple phi = gsi_stmt (gsi);
5472 if (is_gimple_reg (gimple_phi_result (phi)))
5473 find_func_aliases (phi);
5476 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
5478 gimple stmt = gsi_stmt (gsi);
5480 find_func_aliases (stmt);
5482 /* The information in GIMPLE_CHANGE_DYNAMIC_TYPE statements
5483 has now been captured, and we can remove them. */
5484 if (gimple_code (stmt) == GIMPLE_CHANGE_DYNAMIC_TYPE)
5485 gsi_remove (&gsi, true);
5486 else
5487 gsi_next (&gsi);
5492 if (dump_file)
5494 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
5495 dump_constraints (dump_file);
5498 if (dump_file)
5499 fprintf (dump_file,
5500 "\nCollapsing static cycles and doing variable "
5501 "substitution\n");
5503 init_graph (VEC_length (varinfo_t, varmap) * 2);
5505 if (dump_file)
5506 fprintf (dump_file, "Building predecessor graph\n");
5507 build_pred_graph ();
5509 if (dump_file)
5510 fprintf (dump_file, "Detecting pointer and location "
5511 "equivalences\n");
5512 si = perform_var_substitution (graph);
5514 if (dump_file)
5515 fprintf (dump_file, "Rewriting constraints and unifying "
5516 "variables\n");
5517 rewrite_constraints (graph, si);
5518 free_var_substitution_info (si);
5520 build_succ_graph ();
5522 if (dump_file && (dump_flags & TDF_GRAPH))
5523 dump_constraint_graph (dump_file);
5525 move_complex_constraints (graph);
5527 if (dump_file)
5528 fprintf (dump_file, "Uniting pointer but not location equivalent "
5529 "variables\n");
5530 unite_pointer_equivalences (graph);
5532 if (dump_file)
5533 fprintf (dump_file, "Finding indirect cycles\n");
5534 find_indirect_cycles (graph);
5536 /* Implicit nodes and predecessors are no longer necessary at this
5537 point. */
5538 remove_preds_and_fake_succs (graph);
5540 if (dump_file)
5541 fprintf (dump_file, "Solving graph\n");
5543 solve_graph (graph);
5545 compute_tbaa_pruning ();
5547 if (dump_file)
5548 dump_sa_points_to_info (dump_file);
5550 have_alias_info = true;
5552 timevar_pop (TV_TREE_PTA);
5556 /* Delete created points-to sets. */
5558 void
5559 delete_points_to_sets (void)
5561 unsigned int i;
5563 htab_delete (shared_bitmap_table);
5564 if (dump_file && (dump_flags & TDF_STATS))
5565 fprintf (dump_file, "Points to sets created:%d\n",
5566 stats.points_to_sets_created);
5568 pointer_map_destroy (vi_for_tree);
5569 bitmap_obstack_release (&pta_obstack);
5570 VEC_free (constraint_t, heap, constraints);
5572 for (i = 0; i < graph->size; i++)
5573 VEC_free (constraint_t, heap, graph->complex[i]);
5574 free (graph->complex);
5576 free (graph->rep);
5577 free (graph->succs);
5578 free (graph->pe);
5579 free (graph->pe_rep);
5580 free (graph->indirect_cycles);
5581 free (graph);
5583 VEC_free (varinfo_t, heap, varmap);
5584 free_alloc_pool (variable_info_pool);
5585 free_alloc_pool (constraint_pool);
5586 have_alias_info = false;
5589 /* Return true if we should execute IPA PTA. */
5590 static bool
5591 gate_ipa_pta (void)
5593 return (flag_ipa_pta
5594 /* Don't bother doing anything if the program has errors. */
5595 && !(errorcount || sorrycount));
5598 /* Execute the driver for IPA PTA. */
5599 static unsigned int
5600 ipa_pta_execute (void)
5602 struct cgraph_node *node;
5603 struct scc_info *si;
5605 in_ipa_mode = 1;
5606 init_alias_heapvars ();
5607 init_alias_vars ();
5609 for (node = cgraph_nodes; node; node = node->next)
5611 if (!node->analyzed || cgraph_is_master_clone (node))
5613 unsigned int varid;
5615 varid = create_function_info_for (node->decl,
5616 cgraph_node_name (node));
5617 if (node->local.externally_visible)
5619 varinfo_t fi = get_varinfo (varid);
5620 for (; fi; fi = fi->next)
5621 make_constraint_from (fi, anything_id);
5625 for (node = cgraph_nodes; node; node = node->next)
5627 if (node->analyzed && cgraph_is_master_clone (node))
5629 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
5630 basic_block bb;
5631 tree old_func_decl = current_function_decl;
5632 if (dump_file)
5633 fprintf (dump_file,
5634 "Generating constraints for %s\n",
5635 cgraph_node_name (node));
5636 push_cfun (func);
5637 current_function_decl = node->decl;
5639 FOR_EACH_BB_FN (bb, func)
5641 gimple_stmt_iterator gsi;
5643 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
5644 gsi_next (&gsi))
5646 gimple phi = gsi_stmt (gsi);
5648 if (is_gimple_reg (gimple_phi_result (phi)))
5649 find_func_aliases (phi);
5652 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5653 find_func_aliases (gsi_stmt (gsi));
5655 current_function_decl = old_func_decl;
5656 pop_cfun ();
5658 else
5660 /* Make point to anything. */
5664 if (dump_file)
5666 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
5667 dump_constraints (dump_file);
5670 if (dump_file)
5671 fprintf (dump_file,
5672 "\nCollapsing static cycles and doing variable "
5673 "substitution:\n");
5675 init_graph (VEC_length (varinfo_t, varmap) * 2);
5676 build_pred_graph ();
5677 si = perform_var_substitution (graph);
5678 rewrite_constraints (graph, si);
5679 free_var_substitution_info (si);
5681 build_succ_graph ();
5682 move_complex_constraints (graph);
5683 unite_pointer_equivalences (graph);
5684 find_indirect_cycles (graph);
5686 /* Implicit nodes and predecessors are no longer necessary at this
5687 point. */
5688 remove_preds_and_fake_succs (graph);
5690 if (dump_file)
5691 fprintf (dump_file, "\nSolving graph\n");
5693 solve_graph (graph);
5695 if (dump_file)
5696 dump_sa_points_to_info (dump_file);
5698 in_ipa_mode = 0;
5699 delete_alias_heapvars ();
5700 delete_points_to_sets ();
5701 return 0;
5704 struct simple_ipa_opt_pass pass_ipa_pta =
5707 SIMPLE_IPA_PASS,
5708 "pta", /* name */
5709 gate_ipa_pta, /* gate */
5710 ipa_pta_execute, /* execute */
5711 NULL, /* sub */
5712 NULL, /* next */
5713 0, /* static_pass_number */
5714 TV_IPA_PTA, /* tv_id */
5715 0, /* properties_required */
5716 0, /* properties_provided */
5717 0, /* properties_destroyed */
5718 0, /* todo_flags_start */
5719 TODO_update_ssa /* todo_flags_finish */
5723 /* Initialize the heapvar for statement mapping. */
5724 void
5725 init_alias_heapvars (void)
5727 if (!heapvar_for_stmt)
5728 heapvar_for_stmt = htab_create_ggc (11, tree_map_hash, tree_map_eq,
5729 NULL);
5732 void
5733 delete_alias_heapvars (void)
5735 htab_delete (heapvar_for_stmt);
5736 heapvar_for_stmt = NULL;
5739 #include "gt-tree-ssa-structalias.h"