2008-07-01 Jerry DeLisle <jvdelisle@gcc.gnu.org>
[official-gcc.git] / gcc / tree-ssa-structalias.c
blob57a02a4c754d421ee08b3ca8226906ab1a659de9
1 /* Tree based points-to analysis
2 Copyright (C) 2005, 2006, 2007 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "ggc.h"
26 #include "obstack.h"
27 #include "bitmap.h"
28 #include "flags.h"
29 #include "rtl.h"
30 #include "tm_p.h"
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
33 #include "output.h"
34 #include "errors.h"
35 #include "diagnostic.h"
36 #include "tree.h"
37 #include "c-common.h"
38 #include "tree-flow.h"
39 #include "tree-inline.h"
40 #include "varray.h"
41 #include "c-tree.h"
42 #include "tree-gimple.h"
43 #include "hashtab.h"
44 #include "function.h"
45 #include "cgraph.h"
46 #include "tree-pass.h"
47 #include "timevar.h"
48 #include "alloc-pool.h"
49 #include "splay-tree.h"
50 #include "params.h"
51 #include "tree-ssa-structalias.h"
52 #include "cgraph.h"
53 #include "alias.h"
54 #include "pointer-set.h"
56 /* The idea behind this analyzer is to generate set constraints from the
57 program, then solve the resulting constraints in order to generate the
58 points-to sets.
60 Set constraints are a way of modeling program analysis problems that
61 involve sets. They consist of an inclusion constraint language,
62 describing the variables (each variable is a set) and operations that
63 are involved on the variables, and a set of rules that derive facts
64 from these operations. To solve a system of set constraints, you derive
65 all possible facts under the rules, which gives you the correct sets
66 as a consequence.
68 See "Efficient Field-sensitive pointer analysis for C" by "David
69 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
70 http://citeseer.ist.psu.edu/pearce04efficient.html
72 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
73 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
74 http://citeseer.ist.psu.edu/heintze01ultrafast.html
76 There are three types of real constraint expressions, DEREF,
77 ADDRESSOF, and SCALAR. Each constraint expression consists
78 of a constraint type, a variable, and an offset.
80 SCALAR is a constraint expression type used to represent x, whether
81 it appears on the LHS or the RHS of a statement.
82 DEREF is a constraint expression type used to represent *x, whether
83 it appears on the LHS or the RHS of a statement.
84 ADDRESSOF is a constraint expression used to represent &x, whether
85 it appears on the LHS or the RHS of a statement.
87 Each pointer variable in the program is assigned an integer id, and
88 each field of a structure variable is assigned an integer id as well.
90 Structure variables are linked to their list of fields through a "next
91 field" in each variable that points to the next field in offset
92 order.
93 Each variable for a structure field has
95 1. "size", that tells the size in bits of that field.
96 2. "fullsize, that tells the size in bits of the entire structure.
97 3. "offset", that tells the offset in bits from the beginning of the
98 structure to this field.
100 Thus,
101 struct f
103 int a;
104 int b;
105 } foo;
106 int *bar;
108 looks like
110 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
111 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
112 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
115 In order to solve the system of set constraints, the following is
116 done:
118 1. Each constraint variable x has a solution set associated with it,
119 Sol(x).
121 2. Constraints are separated into direct, copy, and complex.
122 Direct constraints are ADDRESSOF constraints that require no extra
123 processing, such as P = &Q
124 Copy constraints are those of the form P = Q.
125 Complex constraints are all the constraints involving dereferences
126 and offsets (including offsetted copies).
128 3. All direct constraints of the form P = &Q are processed, such
129 that Q is added to Sol(P)
131 4. All complex constraints for a given constraint variable are stored in a
132 linked list attached to that variable's node.
134 5. A directed graph is built out of the copy constraints. Each
135 constraint variable is a node in the graph, and an edge from
136 Q to P is added for each copy constraint of the form P = Q
138 6. The graph is then walked, and solution sets are
139 propagated along the copy edges, such that an edge from Q to P
140 causes Sol(P) <- Sol(P) union Sol(Q).
142 7. As we visit each node, all complex constraints associated with
143 that node are processed by adding appropriate copy edges to the graph, or the
144 appropriate variables to the solution set.
146 8. The process of walking the graph is iterated until no solution
147 sets change.
149 Prior to walking the graph in steps 6 and 7, We perform static
150 cycle elimination on the constraint graph, as well
151 as off-line variable substitution.
153 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
154 on and turned into anything), but isn't. You can just see what offset
155 inside the pointed-to struct it's going to access.
157 TODO: Constant bounded arrays can be handled as if they were structs of the
158 same number of elements.
160 TODO: Modeling heap and incoming pointers becomes much better if we
161 add fields to them as we discover them, which we could do.
163 TODO: We could handle unions, but to be honest, it's probably not
164 worth the pain or slowdown. */
166 static GTY ((if_marked ("tree_map_marked_p"), param_is (struct tree_map)))
167 htab_t heapvar_for_stmt;
169 static bool use_field_sensitive = true;
170 static int in_ipa_mode = 0;
172 /* Used for predecessor bitmaps. */
173 static bitmap_obstack predbitmap_obstack;
175 /* Used for points-to sets. */
176 static bitmap_obstack pta_obstack;
178 /* Used for oldsolution members of variables. */
179 static bitmap_obstack oldpta_obstack;
181 /* Used for per-solver-iteration bitmaps. */
182 static bitmap_obstack iteration_obstack;
184 static unsigned int create_variable_info_for (tree, const char *);
185 typedef struct constraint_graph *constraint_graph_t;
186 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
188 DEF_VEC_P(constraint_t);
189 DEF_VEC_ALLOC_P(constraint_t,heap);
191 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
192 if (a) \
193 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
195 static struct constraint_stats
197 unsigned int total_vars;
198 unsigned int nonpointer_vars;
199 unsigned int unified_vars_static;
200 unsigned int unified_vars_dynamic;
201 unsigned int iterations;
202 unsigned int num_edges;
203 unsigned int num_implicit_edges;
204 unsigned int points_to_sets_created;
205 } stats;
207 struct variable_info
209 /* ID of this variable */
210 unsigned int id;
212 /* Name of this variable */
213 const char *name;
215 /* Tree that this variable is associated with. */
216 tree decl;
218 /* Offset of this variable, in bits, from the base variable */
219 unsigned HOST_WIDE_INT offset;
221 /* Size of the variable, in bits. */
222 unsigned HOST_WIDE_INT size;
224 /* Full size of the base variable, in bits. */
225 unsigned HOST_WIDE_INT fullsize;
227 /* A link to the variable for the next field in this structure. */
228 struct variable_info *next;
230 /* True if this is a variable created by the constraint analysis, such as
231 heap variables and constraints we had to break up. */
232 unsigned int is_artificial_var:1;
234 /* True if this is a special variable whose solution set should not be
235 changed. */
236 unsigned int is_special_var:1;
238 /* True for variables whose size is not known or variable. */
239 unsigned int is_unknown_size_var:1;
241 /* True for variables that have unions somewhere in them. */
242 unsigned int has_union:1;
244 /* True if this is a heap variable. */
245 unsigned int is_heap_var:1;
247 /* True if we may not use TBAA to prune references to this
248 variable. This is used for C++ placement new. */
249 unsigned int no_tbaa_pruning : 1;
251 /* Points-to set for this variable. */
252 bitmap solution;
254 /* Old points-to set for this variable. */
255 bitmap oldsolution;
257 /* Variable id this was collapsed to due to type unsafety. This
258 should be unused completely after build_succ_graph, or something
259 is broken. */
260 struct variable_info *collapsed_to;
262 typedef struct variable_info *varinfo_t;
264 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
266 /* Pool of variable info structures. */
267 static alloc_pool variable_info_pool;
269 DEF_VEC_P(varinfo_t);
271 DEF_VEC_ALLOC_P(varinfo_t, heap);
273 /* Table of variable info structures for constraint variables.
274 Indexed directly by variable info id. */
275 static VEC(varinfo_t,heap) *varmap;
277 /* Return the varmap element N */
279 static inline varinfo_t
280 get_varinfo (unsigned int n)
282 return VEC_index (varinfo_t, varmap, n);
285 /* Return the varmap element N, following the collapsed_to link. */
287 static inline varinfo_t
288 get_varinfo_fc (unsigned int n)
290 varinfo_t v = VEC_index (varinfo_t, varmap, n);
292 if (v->collapsed_to)
293 return v->collapsed_to;
294 return v;
297 /* Static IDs for the special variables. */
298 enum { nothing_id = 0, anything_id = 1, readonly_id = 2,
299 escaped_id = 3, nonlocal_id = 4, callused_id = 5, integer_id = 6 };
301 /* Variable that represents the unknown pointer. */
302 static varinfo_t var_anything;
303 static tree anything_tree;
305 /* Variable that represents the NULL pointer. */
306 static varinfo_t var_nothing;
307 static tree nothing_tree;
309 /* Variable that represents read only memory. */
310 static varinfo_t var_readonly;
311 static tree readonly_tree;
313 /* Variable that represents escaped memory. */
314 static varinfo_t var_escaped;
315 static tree escaped_tree;
317 /* Variable that represents nonlocal memory. */
318 static varinfo_t var_nonlocal;
319 static tree nonlocal_tree;
321 /* Variable that represents call-used memory. */
322 static varinfo_t var_callused;
323 static tree callused_tree;
325 /* Variable that represents integers. This is used for when people do things
326 like &0->a.b. */
327 static varinfo_t var_integer;
328 static tree integer_tree;
330 /* Lookup a heap var for FROM, and return it if we find one. */
332 static tree
333 heapvar_lookup (tree from)
335 struct tree_map *h, in;
336 in.base.from = from;
338 h = (struct tree_map *) htab_find_with_hash (heapvar_for_stmt, &in,
339 htab_hash_pointer (from));
340 if (h)
341 return h->to;
342 return NULL_TREE;
345 /* Insert a mapping FROM->TO in the heap var for statement
346 hashtable. */
348 static void
349 heapvar_insert (tree from, tree to)
351 struct tree_map *h;
352 void **loc;
354 h = GGC_NEW (struct tree_map);
355 h->hash = htab_hash_pointer (from);
356 h->base.from = from;
357 h->to = to;
358 loc = htab_find_slot_with_hash (heapvar_for_stmt, h, h->hash, INSERT);
359 *(struct tree_map **) loc = h;
362 /* Return a new variable info structure consisting for a variable
363 named NAME, and using constraint graph node NODE. */
365 static varinfo_t
366 new_var_info (tree t, unsigned int id, const char *name)
368 varinfo_t ret = (varinfo_t) pool_alloc (variable_info_pool);
369 tree var;
371 ret->id = id;
372 ret->name = name;
373 ret->decl = t;
374 ret->is_artificial_var = false;
375 ret->is_heap_var = false;
376 ret->is_special_var = false;
377 ret->is_unknown_size_var = false;
378 ret->has_union = false;
379 var = t;
380 if (TREE_CODE (var) == SSA_NAME)
381 var = SSA_NAME_VAR (var);
382 ret->no_tbaa_pruning = (DECL_P (var)
383 && POINTER_TYPE_P (TREE_TYPE (var))
384 && DECL_NO_TBAA_P (var));
385 ret->solution = BITMAP_ALLOC (&pta_obstack);
386 ret->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
387 ret->next = NULL;
388 ret->collapsed_to = NULL;
389 return ret;
392 typedef enum {SCALAR, DEREF, ADDRESSOF} constraint_expr_type;
394 /* An expression that appears in a constraint. */
396 struct constraint_expr
398 /* Constraint type. */
399 constraint_expr_type type;
401 /* Variable we are referring to in the constraint. */
402 unsigned int var;
404 /* Offset, in bits, of this constraint from the beginning of
405 variables it ends up referring to.
407 IOW, in a deref constraint, we would deref, get the result set,
408 then add OFFSET to each member. */
409 unsigned HOST_WIDE_INT offset;
412 typedef struct constraint_expr ce_s;
413 DEF_VEC_O(ce_s);
414 DEF_VEC_ALLOC_O(ce_s, heap);
415 static void get_constraint_for_1 (tree, VEC(ce_s, heap) **, bool);
416 static void get_constraint_for (tree, VEC(ce_s, heap) **);
417 static void do_deref (VEC (ce_s, heap) **);
419 /* Our set constraints are made up of two constraint expressions, one
420 LHS, and one RHS.
422 As described in the introduction, our set constraints each represent an
423 operation between set valued variables.
425 struct constraint
427 struct constraint_expr lhs;
428 struct constraint_expr rhs;
431 /* List of constraints that we use to build the constraint graph from. */
433 static VEC(constraint_t,heap) *constraints;
434 static alloc_pool constraint_pool;
437 DEF_VEC_I(int);
438 DEF_VEC_ALLOC_I(int, heap);
440 /* The constraint graph is represented as an array of bitmaps
441 containing successor nodes. */
443 struct constraint_graph
445 /* Size of this graph, which may be different than the number of
446 nodes in the variable map. */
447 unsigned int size;
449 /* Explicit successors of each node. */
450 bitmap *succs;
452 /* Implicit predecessors of each node (Used for variable
453 substitution). */
454 bitmap *implicit_preds;
456 /* Explicit predecessors of each node (Used for variable substitution). */
457 bitmap *preds;
459 /* Indirect cycle representatives, or -1 if the node has no indirect
460 cycles. */
461 int *indirect_cycles;
463 /* Representative node for a node. rep[a] == a unless the node has
464 been unified. */
465 unsigned int *rep;
467 /* Equivalence class representative for a label. This is used for
468 variable substitution. */
469 int *eq_rep;
471 /* Pointer equivalence label for a node. All nodes with the same
472 pointer equivalence label can be unified together at some point
473 (either during constraint optimization or after the constraint
474 graph is built). */
475 unsigned int *pe;
477 /* Pointer equivalence representative for a label. This is used to
478 handle nodes that are pointer equivalent but not location
479 equivalent. We can unite these once the addressof constraints
480 are transformed into initial points-to sets. */
481 int *pe_rep;
483 /* Pointer equivalence label for each node, used during variable
484 substitution. */
485 unsigned int *pointer_label;
487 /* Location equivalence label for each node, used during location
488 equivalence finding. */
489 unsigned int *loc_label;
491 /* Pointed-by set for each node, used during location equivalence
492 finding. This is pointed-by rather than pointed-to, because it
493 is constructed using the predecessor graph. */
494 bitmap *pointed_by;
496 /* Points to sets for pointer equivalence. This is *not* the actual
497 points-to sets for nodes. */
498 bitmap *points_to;
500 /* Bitmap of nodes where the bit is set if the node is a direct
501 node. Used for variable substitution. */
502 sbitmap direct_nodes;
504 /* Bitmap of nodes where the bit is set if the node is address
505 taken. Used for variable substitution. */
506 bitmap address_taken;
508 /* True if points_to bitmap for this node is stored in the hash
509 table. */
510 sbitmap pt_used;
512 /* Number of incoming edges remaining to be processed by pointer
513 equivalence.
514 Used for variable substitution. */
515 unsigned int *number_incoming;
518 /* Vector of complex constraints for each graph node. Complex
519 constraints are those involving dereferences or offsets that are
520 not 0. */
521 VEC(constraint_t,heap) **complex;
524 static constraint_graph_t graph;
526 /* During variable substitution and the offline version of indirect
527 cycle finding, we create nodes to represent dereferences and
528 address taken constraints. These represent where these start and
529 end. */
530 #define FIRST_REF_NODE (VEC_length (varinfo_t, varmap))
531 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
533 /* Return the representative node for NODE, if NODE has been unioned
534 with another NODE.
535 This function performs path compression along the way to finding
536 the representative. */
538 static unsigned int
539 find (unsigned int node)
541 gcc_assert (node < graph->size);
542 if (graph->rep[node] != node)
543 return graph->rep[node] = find (graph->rep[node]);
544 return node;
547 /* Union the TO and FROM nodes to the TO nodes.
548 Note that at some point in the future, we may want to do
549 union-by-rank, in which case we are going to have to return the
550 node we unified to. */
552 static bool
553 unite (unsigned int to, unsigned int from)
555 gcc_assert (to < graph->size && from < graph->size);
556 if (to != from && graph->rep[from] != to)
558 graph->rep[from] = to;
559 return true;
561 return false;
564 /* Create a new constraint consisting of LHS and RHS expressions. */
566 static constraint_t
567 new_constraint (const struct constraint_expr lhs,
568 const struct constraint_expr rhs)
570 constraint_t ret = (constraint_t) pool_alloc (constraint_pool);
571 ret->lhs = lhs;
572 ret->rhs = rhs;
573 return ret;
576 /* Print out constraint C to FILE. */
578 void
579 dump_constraint (FILE *file, constraint_t c)
581 if (c->lhs.type == ADDRESSOF)
582 fprintf (file, "&");
583 else if (c->lhs.type == DEREF)
584 fprintf (file, "*");
585 fprintf (file, "%s", get_varinfo_fc (c->lhs.var)->name);
586 if (c->lhs.offset != 0)
587 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
588 fprintf (file, " = ");
589 if (c->rhs.type == ADDRESSOF)
590 fprintf (file, "&");
591 else if (c->rhs.type == DEREF)
592 fprintf (file, "*");
593 fprintf (file, "%s", get_varinfo_fc (c->rhs.var)->name);
594 if (c->rhs.offset != 0)
595 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
596 fprintf (file, "\n");
599 /* Print out constraint C to stderr. */
601 void
602 debug_constraint (constraint_t c)
604 dump_constraint (stderr, c);
607 /* Print out all constraints to FILE */
609 void
610 dump_constraints (FILE *file)
612 int i;
613 constraint_t c;
614 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
615 dump_constraint (file, c);
618 /* Print out all constraints to stderr. */
620 void
621 debug_constraints (void)
623 dump_constraints (stderr);
626 /* SOLVER FUNCTIONS
628 The solver is a simple worklist solver, that works on the following
629 algorithm:
631 sbitmap changed_nodes = all zeroes;
632 changed_count = 0;
633 For each node that is not already collapsed:
634 changed_count++;
635 set bit in changed nodes
637 while (changed_count > 0)
639 compute topological ordering for constraint graph
641 find and collapse cycles in the constraint graph (updating
642 changed if necessary)
644 for each node (n) in the graph in topological order:
645 changed_count--;
647 Process each complex constraint associated with the node,
648 updating changed if necessary.
650 For each outgoing edge from n, propagate the solution from n to
651 the destination of the edge, updating changed as necessary.
653 } */
655 /* Return true if two constraint expressions A and B are equal. */
657 static bool
658 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
660 return a.type == b.type && a.var == b.var && a.offset == b.offset;
663 /* Return true if constraint expression A is less than constraint expression
664 B. This is just arbitrary, but consistent, in order to give them an
665 ordering. */
667 static bool
668 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
670 if (a.type == b.type)
672 if (a.var == b.var)
673 return a.offset < b.offset;
674 else
675 return a.var < b.var;
677 else
678 return a.type < b.type;
681 /* Return true if constraint A is less than constraint B. This is just
682 arbitrary, but consistent, in order to give them an ordering. */
684 static bool
685 constraint_less (const constraint_t a, const constraint_t b)
687 if (constraint_expr_less (a->lhs, b->lhs))
688 return true;
689 else if (constraint_expr_less (b->lhs, a->lhs))
690 return false;
691 else
692 return constraint_expr_less (a->rhs, b->rhs);
695 /* Return true if two constraints A and B are equal. */
697 static bool
698 constraint_equal (struct constraint a, struct constraint b)
700 return constraint_expr_equal (a.lhs, b.lhs)
701 && constraint_expr_equal (a.rhs, b.rhs);
705 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
707 static constraint_t
708 constraint_vec_find (VEC(constraint_t,heap) *vec,
709 struct constraint lookfor)
711 unsigned int place;
712 constraint_t found;
714 if (vec == NULL)
715 return NULL;
717 place = VEC_lower_bound (constraint_t, vec, &lookfor, constraint_less);
718 if (place >= VEC_length (constraint_t, vec))
719 return NULL;
720 found = VEC_index (constraint_t, vec, place);
721 if (!constraint_equal (*found, lookfor))
722 return NULL;
723 return found;
726 /* Union two constraint vectors, TO and FROM. Put the result in TO. */
728 static void
729 constraint_set_union (VEC(constraint_t,heap) **to,
730 VEC(constraint_t,heap) **from)
732 int i;
733 constraint_t c;
735 for (i = 0; VEC_iterate (constraint_t, *from, i, c); i++)
737 if (constraint_vec_find (*to, *c) == NULL)
739 unsigned int place = VEC_lower_bound (constraint_t, *to, c,
740 constraint_less);
741 VEC_safe_insert (constraint_t, heap, *to, place, c);
746 /* Take a solution set SET, add OFFSET to each member of the set, and
747 overwrite SET with the result when done. */
749 static void
750 solution_set_add (bitmap set, unsigned HOST_WIDE_INT offset)
752 bitmap result = BITMAP_ALLOC (&iteration_obstack);
753 unsigned int i;
754 bitmap_iterator bi;
756 EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
758 /* If this is a properly sized variable, only add offset if it's
759 less than end. Otherwise, it is globbed to a single
760 variable. */
762 if ((get_varinfo (i)->offset + offset) < get_varinfo (i)->fullsize)
764 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (i)->offset + offset;
765 varinfo_t v = first_vi_for_offset (get_varinfo (i), fieldoffset);
766 if (!v)
767 continue;
768 bitmap_set_bit (result, v->id);
770 else if (get_varinfo (i)->is_artificial_var
771 || get_varinfo (i)->has_union
772 || get_varinfo (i)->is_unknown_size_var)
774 bitmap_set_bit (result, i);
778 bitmap_copy (set, result);
779 BITMAP_FREE (result);
782 /* Union solution sets TO and FROM, and add INC to each member of FROM in the
783 process. */
785 static bool
786 set_union_with_increment (bitmap to, bitmap from, unsigned HOST_WIDE_INT inc)
788 if (inc == 0)
789 return bitmap_ior_into (to, from);
790 else
792 bitmap tmp;
793 bool res;
795 tmp = BITMAP_ALLOC (&iteration_obstack);
796 bitmap_copy (tmp, from);
797 solution_set_add (tmp, inc);
798 res = bitmap_ior_into (to, tmp);
799 BITMAP_FREE (tmp);
800 return res;
804 /* Insert constraint C into the list of complex constraints for graph
805 node VAR. */
807 static void
808 insert_into_complex (constraint_graph_t graph,
809 unsigned int var, constraint_t c)
811 VEC (constraint_t, heap) *complex = graph->complex[var];
812 unsigned int place = VEC_lower_bound (constraint_t, complex, c,
813 constraint_less);
815 /* Only insert constraints that do not already exist. */
816 if (place >= VEC_length (constraint_t, complex)
817 || !constraint_equal (*c, *VEC_index (constraint_t, complex, place)))
818 VEC_safe_insert (constraint_t, heap, graph->complex[var], place, c);
822 /* Condense two variable nodes into a single variable node, by moving
823 all associated info from SRC to TO. */
825 static void
826 merge_node_constraints (constraint_graph_t graph, unsigned int to,
827 unsigned int from)
829 unsigned int i;
830 constraint_t c;
832 gcc_assert (find (from) == to);
834 /* Move all complex constraints from src node into to node */
835 for (i = 0; VEC_iterate (constraint_t, graph->complex[from], i, c); i++)
837 /* In complex constraints for node src, we may have either
838 a = *src, and *src = a, or an offseted constraint which are
839 always added to the rhs node's constraints. */
841 if (c->rhs.type == DEREF)
842 c->rhs.var = to;
843 else if (c->lhs.type == DEREF)
844 c->lhs.var = to;
845 else
846 c->rhs.var = to;
848 constraint_set_union (&graph->complex[to], &graph->complex[from]);
849 VEC_free (constraint_t, heap, graph->complex[from]);
850 graph->complex[from] = NULL;
854 /* Remove edges involving NODE from GRAPH. */
856 static void
857 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
859 if (graph->succs[node])
860 BITMAP_FREE (graph->succs[node]);
863 /* Merge GRAPH nodes FROM and TO into node TO. */
865 static void
866 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
867 unsigned int from)
869 if (graph->indirect_cycles[from] != -1)
871 /* If we have indirect cycles with the from node, and we have
872 none on the to node, the to node has indirect cycles from the
873 from node now that they are unified.
874 If indirect cycles exist on both, unify the nodes that they
875 are in a cycle with, since we know they are in a cycle with
876 each other. */
877 if (graph->indirect_cycles[to] == -1)
878 graph->indirect_cycles[to] = graph->indirect_cycles[from];
881 /* Merge all the successor edges. */
882 if (graph->succs[from])
884 if (!graph->succs[to])
885 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
886 bitmap_ior_into (graph->succs[to],
887 graph->succs[from]);
890 clear_edges_for_node (graph, from);
894 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
895 it doesn't exist in the graph already. */
897 static void
898 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
899 unsigned int from)
901 if (to == from)
902 return;
904 if (!graph->implicit_preds[to])
905 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
907 if (!bitmap_bit_p (graph->implicit_preds[to], from))
909 stats.num_implicit_edges++;
910 bitmap_set_bit (graph->implicit_preds[to], from);
914 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
915 it doesn't exist in the graph already.
916 Return false if the edge already existed, true otherwise. */
918 static void
919 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
920 unsigned int from)
922 if (!graph->preds[to])
923 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
924 if (!bitmap_bit_p (graph->preds[to], from))
925 bitmap_set_bit (graph->preds[to], from);
928 /* Add a graph edge to GRAPH, going from FROM to TO if
929 it doesn't exist in the graph already.
930 Return false if the edge already existed, true otherwise. */
932 static bool
933 add_graph_edge (constraint_graph_t graph, unsigned int to,
934 unsigned int from)
936 if (to == from)
938 return false;
940 else
942 bool r = false;
944 if (!graph->succs[from])
945 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
946 if (!bitmap_bit_p (graph->succs[from], to))
948 r = true;
949 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
950 stats.num_edges++;
951 bitmap_set_bit (graph->succs[from], to);
953 return r;
958 /* Return true if {DEST.SRC} is an existing graph edge in GRAPH. */
960 static bool
961 valid_graph_edge (constraint_graph_t graph, unsigned int src,
962 unsigned int dest)
964 return (graph->succs[dest]
965 && bitmap_bit_p (graph->succs[dest], src));
968 /* Initialize the constraint graph structure to contain SIZE nodes. */
970 static void
971 init_graph (unsigned int size)
973 unsigned int j;
975 graph = XCNEW (struct constraint_graph);
976 graph->size = size;
977 graph->succs = XCNEWVEC (bitmap, graph->size);
978 graph->indirect_cycles = XNEWVEC (int, graph->size);
979 graph->rep = XNEWVEC (unsigned int, graph->size);
980 graph->complex = XCNEWVEC (VEC(constraint_t, heap) *, size);
981 graph->pe = XCNEWVEC (unsigned int, graph->size);
982 graph->pe_rep = XNEWVEC (int, graph->size);
984 for (j = 0; j < graph->size; j++)
986 graph->rep[j] = j;
987 graph->pe_rep[j] = -1;
988 graph->indirect_cycles[j] = -1;
992 /* Build the constraint graph, adding only predecessor edges right now. */
994 static void
995 build_pred_graph (void)
997 int i;
998 constraint_t c;
999 unsigned int j;
1001 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
1002 graph->preds = XCNEWVEC (bitmap, graph->size);
1003 graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
1004 graph->loc_label = XCNEWVEC (unsigned int, graph->size);
1005 graph->pointed_by = XCNEWVEC (bitmap, graph->size);
1006 graph->points_to = XCNEWVEC (bitmap, graph->size);
1007 graph->eq_rep = XNEWVEC (int, graph->size);
1008 graph->direct_nodes = sbitmap_alloc (graph->size);
1009 graph->pt_used = sbitmap_alloc (graph->size);
1010 graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1011 graph->number_incoming = XCNEWVEC (unsigned int, graph->size);
1012 sbitmap_zero (graph->direct_nodes);
1013 sbitmap_zero (graph->pt_used);
1015 for (j = 0; j < FIRST_REF_NODE; j++)
1017 if (!get_varinfo (j)->is_special_var)
1018 SET_BIT (graph->direct_nodes, j);
1021 for (j = 0; j < graph->size; j++)
1022 graph->eq_rep[j] = -1;
1024 for (j = 0; j < VEC_length (varinfo_t, varmap); j++)
1025 graph->indirect_cycles[j] = -1;
1027 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1029 struct constraint_expr lhs = c->lhs;
1030 struct constraint_expr rhs = c->rhs;
1031 unsigned int lhsvar = get_varinfo_fc (lhs.var)->id;
1032 unsigned int rhsvar = get_varinfo_fc (rhs.var)->id;
1034 if (lhs.type == DEREF)
1036 /* *x = y. */
1037 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1038 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1040 else if (rhs.type == DEREF)
1042 /* x = *y */
1043 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1044 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1045 else
1046 RESET_BIT (graph->direct_nodes, lhsvar);
1048 else if (rhs.type == ADDRESSOF)
1050 /* x = &y */
1051 if (graph->points_to[lhsvar] == NULL)
1052 graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1053 bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1055 if (graph->pointed_by[rhsvar] == NULL)
1056 graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1057 bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1059 /* Implicitly, *x = y */
1060 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1062 RESET_BIT (graph->direct_nodes, rhsvar);
1063 bitmap_set_bit (graph->address_taken, rhsvar);
1065 else if (lhsvar > anything_id
1066 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1068 /* x = y */
1069 add_pred_graph_edge (graph, lhsvar, rhsvar);
1070 /* Implicitly, *x = *y */
1071 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1072 FIRST_REF_NODE + rhsvar);
1074 else if (lhs.offset != 0 || rhs.offset != 0)
1076 if (rhs.offset != 0)
1077 RESET_BIT (graph->direct_nodes, lhs.var);
1078 else if (lhs.offset != 0)
1079 RESET_BIT (graph->direct_nodes, rhs.var);
1084 /* Build the constraint graph, adding successor edges. */
1086 static void
1087 build_succ_graph (void)
1089 int i;
1090 constraint_t c;
1092 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1094 struct constraint_expr lhs;
1095 struct constraint_expr rhs;
1096 unsigned int lhsvar;
1097 unsigned int rhsvar;
1099 if (!c)
1100 continue;
1102 lhs = c->lhs;
1103 rhs = c->rhs;
1104 lhsvar = find (get_varinfo_fc (lhs.var)->id);
1105 rhsvar = find (get_varinfo_fc (rhs.var)->id);
1107 if (lhs.type == DEREF)
1109 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1110 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1112 else if (rhs.type == DEREF)
1114 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1115 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1117 else if (rhs.type == ADDRESSOF)
1119 /* x = &y */
1120 gcc_assert (find (get_varinfo_fc (rhs.var)->id)
1121 == get_varinfo_fc (rhs.var)->id);
1122 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1124 else if (lhsvar > anything_id
1125 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1127 add_graph_edge (graph, lhsvar, rhsvar);
1133 /* Changed variables on the last iteration. */
1134 static unsigned int changed_count;
1135 static sbitmap changed;
1137 DEF_VEC_I(unsigned);
1138 DEF_VEC_ALLOC_I(unsigned,heap);
1141 /* Strongly Connected Component visitation info. */
1143 struct scc_info
1145 sbitmap visited;
1146 sbitmap deleted;
1147 unsigned int *dfs;
1148 unsigned int *node_mapping;
1149 int current_index;
1150 VEC(unsigned,heap) *scc_stack;
1154 /* Recursive routine to find strongly connected components in GRAPH.
1155 SI is the SCC info to store the information in, and N is the id of current
1156 graph node we are processing.
1158 This is Tarjan's strongly connected component finding algorithm, as
1159 modified by Nuutila to keep only non-root nodes on the stack.
1160 The algorithm can be found in "On finding the strongly connected
1161 connected components in a directed graph" by Esko Nuutila and Eljas
1162 Soisalon-Soininen, in Information Processing Letters volume 49,
1163 number 1, pages 9-14. */
1165 static void
1166 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1168 unsigned int i;
1169 bitmap_iterator bi;
1170 unsigned int my_dfs;
1172 SET_BIT (si->visited, n);
1173 si->dfs[n] = si->current_index ++;
1174 my_dfs = si->dfs[n];
1176 /* Visit all the successors. */
1177 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1179 unsigned int w;
1181 if (i > LAST_REF_NODE)
1182 break;
1184 w = find (i);
1185 if (TEST_BIT (si->deleted, w))
1186 continue;
1188 if (!TEST_BIT (si->visited, w))
1189 scc_visit (graph, si, w);
1191 unsigned int t = find (w);
1192 unsigned int nnode = find (n);
1193 gcc_assert (nnode == n);
1195 if (si->dfs[t] < si->dfs[nnode])
1196 si->dfs[n] = si->dfs[t];
1200 /* See if any components have been identified. */
1201 if (si->dfs[n] == my_dfs)
1203 if (VEC_length (unsigned, si->scc_stack) > 0
1204 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1206 bitmap scc = BITMAP_ALLOC (NULL);
1207 bool have_ref_node = n >= FIRST_REF_NODE;
1208 unsigned int lowest_node;
1209 bitmap_iterator bi;
1211 bitmap_set_bit (scc, n);
1213 while (VEC_length (unsigned, si->scc_stack) != 0
1214 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1216 unsigned int w = VEC_pop (unsigned, si->scc_stack);
1218 bitmap_set_bit (scc, w);
1219 if (w >= FIRST_REF_NODE)
1220 have_ref_node = true;
1223 lowest_node = bitmap_first_set_bit (scc);
1224 gcc_assert (lowest_node < FIRST_REF_NODE);
1226 /* Collapse the SCC nodes into a single node, and mark the
1227 indirect cycles. */
1228 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1230 if (i < FIRST_REF_NODE)
1232 if (unite (lowest_node, i))
1233 unify_nodes (graph, lowest_node, i, false);
1235 else
1237 unite (lowest_node, i);
1238 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1242 SET_BIT (si->deleted, n);
1244 else
1245 VEC_safe_push (unsigned, heap, si->scc_stack, n);
1248 /* Unify node FROM into node TO, updating the changed count if
1249 necessary when UPDATE_CHANGED is true. */
1251 static void
1252 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1253 bool update_changed)
1256 gcc_assert (to != from && find (to) == to);
1257 if (dump_file && (dump_flags & TDF_DETAILS))
1258 fprintf (dump_file, "Unifying %s to %s\n",
1259 get_varinfo (from)->name,
1260 get_varinfo (to)->name);
1262 if (update_changed)
1263 stats.unified_vars_dynamic++;
1264 else
1265 stats.unified_vars_static++;
1267 merge_graph_nodes (graph, to, from);
1268 merge_node_constraints (graph, to, from);
1270 if (get_varinfo (from)->no_tbaa_pruning)
1271 get_varinfo (to)->no_tbaa_pruning = true;
1273 /* Mark TO as changed if FROM was changed. If TO was already marked
1274 as changed, decrease the changed count. */
1276 if (update_changed && TEST_BIT (changed, from))
1278 RESET_BIT (changed, from);
1279 if (!TEST_BIT (changed, to))
1280 SET_BIT (changed, to);
1281 else
1283 gcc_assert (changed_count > 0);
1284 changed_count--;
1287 if (get_varinfo (from)->solution)
1289 /* If the solution changes because of the merging, we need to mark
1290 the variable as changed. */
1291 if (bitmap_ior_into (get_varinfo (to)->solution,
1292 get_varinfo (from)->solution))
1294 if (update_changed && !TEST_BIT (changed, to))
1296 SET_BIT (changed, to);
1297 changed_count++;
1301 BITMAP_FREE (get_varinfo (from)->solution);
1302 BITMAP_FREE (get_varinfo (from)->oldsolution);
1304 if (stats.iterations > 0)
1306 BITMAP_FREE (get_varinfo (to)->oldsolution);
1307 get_varinfo (to)->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
1310 if (valid_graph_edge (graph, to, to))
1312 if (graph->succs[to])
1313 bitmap_clear_bit (graph->succs[to], to);
1317 /* Information needed to compute the topological ordering of a graph. */
1319 struct topo_info
1321 /* sbitmap of visited nodes. */
1322 sbitmap visited;
1323 /* Array that stores the topological order of the graph, *in
1324 reverse*. */
1325 VEC(unsigned,heap) *topo_order;
1329 /* Initialize and return a topological info structure. */
1331 static struct topo_info *
1332 init_topo_info (void)
1334 size_t size = graph->size;
1335 struct topo_info *ti = XNEW (struct topo_info);
1336 ti->visited = sbitmap_alloc (size);
1337 sbitmap_zero (ti->visited);
1338 ti->topo_order = VEC_alloc (unsigned, heap, 1);
1339 return ti;
1343 /* Free the topological sort info pointed to by TI. */
1345 static void
1346 free_topo_info (struct topo_info *ti)
1348 sbitmap_free (ti->visited);
1349 VEC_free (unsigned, heap, ti->topo_order);
1350 free (ti);
1353 /* Visit the graph in topological order, and store the order in the
1354 topo_info structure. */
1356 static void
1357 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1358 unsigned int n)
1360 bitmap_iterator bi;
1361 unsigned int j;
1363 SET_BIT (ti->visited, n);
1365 if (graph->succs[n])
1366 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1368 if (!TEST_BIT (ti->visited, j))
1369 topo_visit (graph, ti, j);
1372 VEC_safe_push (unsigned, heap, ti->topo_order, n);
1375 /* Return true if variable N + OFFSET is a legal field of N. */
1377 static bool
1378 type_safe (unsigned int n, unsigned HOST_WIDE_INT *offset)
1380 varinfo_t ninfo = get_varinfo (n);
1382 /* For things we've globbed to single variables, any offset into the
1383 variable acts like the entire variable, so that it becomes offset
1384 0. */
1385 if (ninfo->is_special_var
1386 || ninfo->is_artificial_var
1387 || ninfo->is_unknown_size_var)
1389 *offset = 0;
1390 return true;
1392 return (get_varinfo (n)->offset + *offset) < get_varinfo (n)->fullsize;
1395 /* Process a constraint C that represents x = *y, using DELTA as the
1396 starting solution. */
1398 static void
1399 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1400 bitmap delta)
1402 unsigned int lhs = c->lhs.var;
1403 bool flag = false;
1404 bitmap sol = get_varinfo (lhs)->solution;
1405 unsigned int j;
1406 bitmap_iterator bi;
1408 if (bitmap_bit_p (delta, anything_id))
1410 flag = !bitmap_bit_p (sol, anything_id);
1411 if (flag)
1412 bitmap_set_bit (sol, anything_id);
1413 goto done;
1416 /* For each variable j in delta (Sol(y)), add
1417 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1418 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1420 unsigned HOST_WIDE_INT roffset = c->rhs.offset;
1421 if (type_safe (j, &roffset))
1423 varinfo_t v;
1424 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + roffset;
1425 unsigned int t;
1427 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1428 if (!v)
1429 continue;
1430 t = find (v->id);
1432 /* Adding edges from the special vars is pointless.
1433 They don't have sets that can change. */
1434 if (get_varinfo (t)->is_special_var)
1435 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1436 /* Merging the solution from ESCAPED needlessly increases
1437 the set. Use ESCAPED as representative instead.
1438 Same for CALLUSED. */
1439 else if ((get_varinfo (t)->id == escaped_id
1440 || get_varinfo (t)->id == callused_id)
1441 && !bitmap_bit_p (sol, get_varinfo (t)->id))
1443 bitmap_set_bit (sol, get_varinfo (t)->id);
1444 flag = true;
1446 else if (add_graph_edge (graph, lhs, t))
1447 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1451 done:
1452 /* If the LHS solution changed, mark the var as changed. */
1453 if (flag)
1455 get_varinfo (lhs)->solution = sol;
1456 if (!TEST_BIT (changed, lhs))
1458 SET_BIT (changed, lhs);
1459 changed_count++;
1464 /* Process a constraint C that represents *x = y. */
1466 static void
1467 do_ds_constraint (constraint_t c, bitmap delta)
1469 unsigned int rhs = c->rhs.var;
1470 bitmap sol = get_varinfo (rhs)->solution;
1471 unsigned int j;
1472 bitmap_iterator bi;
1474 if (bitmap_bit_p (sol, anything_id))
1476 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1478 varinfo_t jvi = get_varinfo (j);
1479 unsigned int t;
1480 unsigned int loff = c->lhs.offset;
1481 unsigned HOST_WIDE_INT fieldoffset = jvi->offset + loff;
1482 varinfo_t v;
1484 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1485 if (!v)
1486 continue;
1487 t = find (v->id);
1489 if (!bitmap_bit_p (get_varinfo (t)->solution, anything_id))
1491 bitmap_set_bit (get_varinfo (t)->solution, anything_id);
1492 if (!TEST_BIT (changed, t))
1494 SET_BIT (changed, t);
1495 changed_count++;
1499 return;
1502 /* For each member j of delta (Sol(x)), add an edge from y to j and
1503 union Sol(y) into Sol(j) */
1504 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1506 unsigned HOST_WIDE_INT loff = c->lhs.offset;
1507 if (type_safe (j, &loff) && !(get_varinfo (j)->is_special_var))
1509 varinfo_t v;
1510 unsigned int t;
1511 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + loff;
1512 bitmap tmp;
1514 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1515 if (!v)
1516 continue;
1517 t = find (v->id);
1518 tmp = get_varinfo (t)->solution;
1520 if (set_union_with_increment (tmp, sol, 0))
1522 get_varinfo (t)->solution = tmp;
1523 if (t == rhs)
1524 sol = get_varinfo (rhs)->solution;
1525 if (!TEST_BIT (changed, t))
1527 SET_BIT (changed, t);
1528 changed_count++;
1535 /* Handle a non-simple (simple meaning requires no iteration),
1536 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1538 static void
1539 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
1541 if (c->lhs.type == DEREF)
1543 if (c->rhs.type == ADDRESSOF)
1545 gcc_unreachable();
1547 else
1549 /* *x = y */
1550 do_ds_constraint (c, delta);
1553 else if (c->rhs.type == DEREF)
1555 /* x = *y */
1556 if (!(get_varinfo (c->lhs.var)->is_special_var))
1557 do_sd_constraint (graph, c, delta);
1559 else
1561 bitmap tmp;
1562 bitmap solution;
1563 bool flag = false;
1565 gcc_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR);
1566 solution = get_varinfo (c->rhs.var)->solution;
1567 tmp = get_varinfo (c->lhs.var)->solution;
1569 flag = set_union_with_increment (tmp, solution, c->rhs.offset);
1571 if (flag)
1573 get_varinfo (c->lhs.var)->solution = tmp;
1574 if (!TEST_BIT (changed, c->lhs.var))
1576 SET_BIT (changed, c->lhs.var);
1577 changed_count++;
1583 /* Initialize and return a new SCC info structure. */
1585 static struct scc_info *
1586 init_scc_info (size_t size)
1588 struct scc_info *si = XNEW (struct scc_info);
1589 size_t i;
1591 si->current_index = 0;
1592 si->visited = sbitmap_alloc (size);
1593 sbitmap_zero (si->visited);
1594 si->deleted = sbitmap_alloc (size);
1595 sbitmap_zero (si->deleted);
1596 si->node_mapping = XNEWVEC (unsigned int, size);
1597 si->dfs = XCNEWVEC (unsigned int, size);
1599 for (i = 0; i < size; i++)
1600 si->node_mapping[i] = i;
1602 si->scc_stack = VEC_alloc (unsigned, heap, 1);
1603 return si;
1606 /* Free an SCC info structure pointed to by SI */
1608 static void
1609 free_scc_info (struct scc_info *si)
1611 sbitmap_free (si->visited);
1612 sbitmap_free (si->deleted);
1613 free (si->node_mapping);
1614 free (si->dfs);
1615 VEC_free (unsigned, heap, si->scc_stack);
1616 free (si);
1620 /* Find indirect cycles in GRAPH that occur, using strongly connected
1621 components, and note them in the indirect cycles map.
1623 This technique comes from Ben Hardekopf and Calvin Lin,
1624 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1625 Lines of Code", submitted to PLDI 2007. */
1627 static void
1628 find_indirect_cycles (constraint_graph_t graph)
1630 unsigned int i;
1631 unsigned int size = graph->size;
1632 struct scc_info *si = init_scc_info (size);
1634 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1635 if (!TEST_BIT (si->visited, i) && find (i) == i)
1636 scc_visit (graph, si, i);
1638 free_scc_info (si);
1641 /* Compute a topological ordering for GRAPH, and store the result in the
1642 topo_info structure TI. */
1644 static void
1645 compute_topo_order (constraint_graph_t graph,
1646 struct topo_info *ti)
1648 unsigned int i;
1649 unsigned int size = graph->size;
1651 for (i = 0; i != size; ++i)
1652 if (!TEST_BIT (ti->visited, i) && find (i) == i)
1653 topo_visit (graph, ti, i);
1656 /* Structure used to for hash value numbering of pointer equivalence
1657 classes. */
1659 typedef struct equiv_class_label
1661 unsigned int equivalence_class;
1662 bitmap labels;
1663 hashval_t hashcode;
1664 } *equiv_class_label_t;
1665 typedef const struct equiv_class_label *const_equiv_class_label_t;
1667 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1668 classes. */
1669 static htab_t pointer_equiv_class_table;
1671 /* A hashtable for mapping a bitmap of labels->location equivalence
1672 classes. */
1673 static htab_t location_equiv_class_table;
1675 /* Hash function for a equiv_class_label_t */
1677 static hashval_t
1678 equiv_class_label_hash (const void *p)
1680 const_equiv_class_label_t const ecl = (const_equiv_class_label_t) p;
1681 return ecl->hashcode;
1684 /* Equality function for two equiv_class_label_t's. */
1686 static int
1687 equiv_class_label_eq (const void *p1, const void *p2)
1689 const_equiv_class_label_t const eql1 = (const_equiv_class_label_t) p1;
1690 const_equiv_class_label_t const eql2 = (const_equiv_class_label_t) p2;
1691 return bitmap_equal_p (eql1->labels, eql2->labels);
1694 /* Lookup a equivalence class in TABLE by the bitmap of LABELS it
1695 contains. */
1697 static unsigned int
1698 equiv_class_lookup (htab_t table, bitmap labels)
1700 void **slot;
1701 struct equiv_class_label ecl;
1703 ecl.labels = labels;
1704 ecl.hashcode = bitmap_hash (labels);
1706 slot = htab_find_slot_with_hash (table, &ecl,
1707 ecl.hashcode, NO_INSERT);
1708 if (!slot)
1709 return 0;
1710 else
1711 return ((equiv_class_label_t) *slot)->equivalence_class;
1715 /* Add an equivalence class named EQUIVALENCE_CLASS with labels LABELS
1716 to TABLE. */
1718 static void
1719 equiv_class_add (htab_t table, unsigned int equivalence_class,
1720 bitmap labels)
1722 void **slot;
1723 equiv_class_label_t ecl = XNEW (struct equiv_class_label);
1725 ecl->labels = labels;
1726 ecl->equivalence_class = equivalence_class;
1727 ecl->hashcode = bitmap_hash (labels);
1729 slot = htab_find_slot_with_hash (table, ecl,
1730 ecl->hashcode, INSERT);
1731 gcc_assert (!*slot);
1732 *slot = (void *) ecl;
1735 /* Perform offline variable substitution.
1737 This is a worst case quadratic time way of identifying variables
1738 that must have equivalent points-to sets, including those caused by
1739 static cycles, and single entry subgraphs, in the constraint graph.
1741 The technique is described in "Exploiting Pointer and Location
1742 Equivalence to Optimize Pointer Analysis. In the 14th International
1743 Static Analysis Symposium (SAS), August 2007." It is known as the
1744 "HU" algorithm, and is equivalent to value numbering the collapsed
1745 constraint graph including evaluating unions.
1747 The general method of finding equivalence classes is as follows:
1748 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1749 Initialize all non-REF nodes to be direct nodes.
1750 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1751 variable}
1752 For each constraint containing the dereference, we also do the same
1753 thing.
1755 We then compute SCC's in the graph and unify nodes in the same SCC,
1756 including pts sets.
1758 For each non-collapsed node x:
1759 Visit all unvisited explicit incoming edges.
1760 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
1761 where y->x.
1762 Lookup the equivalence class for pts(x).
1763 If we found one, equivalence_class(x) = found class.
1764 Otherwise, equivalence_class(x) = new class, and new_class is
1765 added to the lookup table.
1767 All direct nodes with the same equivalence class can be replaced
1768 with a single representative node.
1769 All unlabeled nodes (label == 0) are not pointers and all edges
1770 involving them can be eliminated.
1771 We perform these optimizations during rewrite_constraints
1773 In addition to pointer equivalence class finding, we also perform
1774 location equivalence class finding. This is the set of variables
1775 that always appear together in points-to sets. We use this to
1776 compress the size of the points-to sets. */
1778 /* Current maximum pointer equivalence class id. */
1779 static int pointer_equiv_class;
1781 /* Current maximum location equivalence class id. */
1782 static int location_equiv_class;
1784 /* Recursive routine to find strongly connected components in GRAPH,
1785 and label it's nodes with DFS numbers. */
1787 static void
1788 condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1790 unsigned int i;
1791 bitmap_iterator bi;
1792 unsigned int my_dfs;
1794 gcc_assert (si->node_mapping[n] == n);
1795 SET_BIT (si->visited, n);
1796 si->dfs[n] = si->current_index ++;
1797 my_dfs = si->dfs[n];
1799 /* Visit all the successors. */
1800 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1802 unsigned int w = si->node_mapping[i];
1804 if (TEST_BIT (si->deleted, w))
1805 continue;
1807 if (!TEST_BIT (si->visited, w))
1808 condense_visit (graph, si, w);
1810 unsigned int t = si->node_mapping[w];
1811 unsigned int nnode = si->node_mapping[n];
1812 gcc_assert (nnode == n);
1814 if (si->dfs[t] < si->dfs[nnode])
1815 si->dfs[n] = si->dfs[t];
1819 /* Visit all the implicit predecessors. */
1820 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
1822 unsigned int w = si->node_mapping[i];
1824 if (TEST_BIT (si->deleted, w))
1825 continue;
1827 if (!TEST_BIT (si->visited, w))
1828 condense_visit (graph, si, w);
1830 unsigned int t = si->node_mapping[w];
1831 unsigned int nnode = si->node_mapping[n];
1832 gcc_assert (nnode == n);
1834 if (si->dfs[t] < si->dfs[nnode])
1835 si->dfs[n] = si->dfs[t];
1839 /* See if any components have been identified. */
1840 if (si->dfs[n] == my_dfs)
1842 while (VEC_length (unsigned, si->scc_stack) != 0
1843 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1845 unsigned int w = VEC_pop (unsigned, si->scc_stack);
1846 si->node_mapping[w] = n;
1848 if (!TEST_BIT (graph->direct_nodes, w))
1849 RESET_BIT (graph->direct_nodes, n);
1851 /* Unify our nodes. */
1852 if (graph->preds[w])
1854 if (!graph->preds[n])
1855 graph->preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
1856 bitmap_ior_into (graph->preds[n], graph->preds[w]);
1858 if (graph->implicit_preds[w])
1860 if (!graph->implicit_preds[n])
1861 graph->implicit_preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
1862 bitmap_ior_into (graph->implicit_preds[n],
1863 graph->implicit_preds[w]);
1865 if (graph->points_to[w])
1867 if (!graph->points_to[n])
1868 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
1869 bitmap_ior_into (graph->points_to[n],
1870 graph->points_to[w]);
1872 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1874 unsigned int rep = si->node_mapping[i];
1875 graph->number_incoming[rep]++;
1878 SET_BIT (si->deleted, n);
1880 else
1881 VEC_safe_push (unsigned, heap, si->scc_stack, n);
1884 /* Label pointer equivalences. */
1886 static void
1887 label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1889 unsigned int i;
1890 bitmap_iterator bi;
1891 SET_BIT (si->visited, n);
1893 if (!graph->points_to[n])
1894 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
1896 /* Label and union our incoming edges's points to sets. */
1897 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1899 unsigned int w = si->node_mapping[i];
1900 if (!TEST_BIT (si->visited, w))
1901 label_visit (graph, si, w);
1903 /* Skip unused edges */
1904 if (w == n || graph->pointer_label[w] == 0)
1906 graph->number_incoming[w]--;
1907 continue;
1909 if (graph->points_to[w])
1910 bitmap_ior_into(graph->points_to[n], graph->points_to[w]);
1912 /* If all incoming edges to w have been processed and
1913 graph->points_to[w] was not stored in the hash table, we can
1914 free it. */
1915 graph->number_incoming[w]--;
1916 if (!graph->number_incoming[w] && !TEST_BIT (graph->pt_used, w))
1918 BITMAP_FREE (graph->points_to[w]);
1921 /* Indirect nodes get fresh variables. */
1922 if (!TEST_BIT (graph->direct_nodes, n))
1923 bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
1925 if (!bitmap_empty_p (graph->points_to[n]))
1927 unsigned int label = equiv_class_lookup (pointer_equiv_class_table,
1928 graph->points_to[n]);
1929 if (!label)
1931 SET_BIT (graph->pt_used, n);
1932 label = pointer_equiv_class++;
1933 equiv_class_add (pointer_equiv_class_table,
1934 label, graph->points_to[n]);
1936 graph->pointer_label[n] = label;
1940 /* Perform offline variable substitution, discovering equivalence
1941 classes, and eliminating non-pointer variables. */
1943 static struct scc_info *
1944 perform_var_substitution (constraint_graph_t graph)
1946 unsigned int i;
1947 unsigned int size = graph->size;
1948 struct scc_info *si = init_scc_info (size);
1950 bitmap_obstack_initialize (&iteration_obstack);
1951 pointer_equiv_class_table = htab_create (511, equiv_class_label_hash,
1952 equiv_class_label_eq, free);
1953 location_equiv_class_table = htab_create (511, equiv_class_label_hash,
1954 equiv_class_label_eq, free);
1955 pointer_equiv_class = 1;
1956 location_equiv_class = 1;
1958 /* Condense the nodes, which means to find SCC's, count incoming
1959 predecessors, and unite nodes in SCC's. */
1960 for (i = 0; i < FIRST_REF_NODE; i++)
1961 if (!TEST_BIT (si->visited, si->node_mapping[i]))
1962 condense_visit (graph, si, si->node_mapping[i]);
1964 sbitmap_zero (si->visited);
1965 /* Actually the label the nodes for pointer equivalences */
1966 for (i = 0; i < FIRST_REF_NODE; i++)
1967 if (!TEST_BIT (si->visited, si->node_mapping[i]))
1968 label_visit (graph, si, si->node_mapping[i]);
1970 /* Calculate location equivalence labels. */
1971 for (i = 0; i < FIRST_REF_NODE; i++)
1973 bitmap pointed_by;
1974 bitmap_iterator bi;
1975 unsigned int j;
1976 unsigned int label;
1978 if (!graph->pointed_by[i])
1979 continue;
1980 pointed_by = BITMAP_ALLOC (&iteration_obstack);
1982 /* Translate the pointed-by mapping for pointer equivalence
1983 labels. */
1984 EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
1986 bitmap_set_bit (pointed_by,
1987 graph->pointer_label[si->node_mapping[j]]);
1989 /* The original pointed_by is now dead. */
1990 BITMAP_FREE (graph->pointed_by[i]);
1992 /* Look up the location equivalence label if one exists, or make
1993 one otherwise. */
1994 label = equiv_class_lookup (location_equiv_class_table,
1995 pointed_by);
1996 if (label == 0)
1998 label = location_equiv_class++;
1999 equiv_class_add (location_equiv_class_table,
2000 label, pointed_by);
2002 else
2004 if (dump_file && (dump_flags & TDF_DETAILS))
2005 fprintf (dump_file, "Found location equivalence for node %s\n",
2006 get_varinfo (i)->name);
2007 BITMAP_FREE (pointed_by);
2009 graph->loc_label[i] = label;
2013 if (dump_file && (dump_flags & TDF_DETAILS))
2014 for (i = 0; i < FIRST_REF_NODE; i++)
2016 bool direct_node = TEST_BIT (graph->direct_nodes, i);
2017 fprintf (dump_file,
2018 "Equivalence classes for %s node id %d:%s are pointer: %d"
2019 ", location:%d\n",
2020 direct_node ? "Direct node" : "Indirect node", i,
2021 get_varinfo (i)->name,
2022 graph->pointer_label[si->node_mapping[i]],
2023 graph->loc_label[si->node_mapping[i]]);
2026 /* Quickly eliminate our non-pointer variables. */
2028 for (i = 0; i < FIRST_REF_NODE; i++)
2030 unsigned int node = si->node_mapping[i];
2032 if (graph->pointer_label[node] == 0)
2034 if (dump_file && (dump_flags & TDF_DETAILS))
2035 fprintf (dump_file,
2036 "%s is a non-pointer variable, eliminating edges.\n",
2037 get_varinfo (node)->name);
2038 stats.nonpointer_vars++;
2039 clear_edges_for_node (graph, node);
2043 return si;
2046 /* Free information that was only necessary for variable
2047 substitution. */
2049 static void
2050 free_var_substitution_info (struct scc_info *si)
2052 free_scc_info (si);
2053 free (graph->pointer_label);
2054 free (graph->loc_label);
2055 free (graph->pointed_by);
2056 free (graph->points_to);
2057 free (graph->number_incoming);
2058 free (graph->eq_rep);
2059 sbitmap_free (graph->direct_nodes);
2060 sbitmap_free (graph->pt_used);
2061 htab_delete (pointer_equiv_class_table);
2062 htab_delete (location_equiv_class_table);
2063 bitmap_obstack_release (&iteration_obstack);
2066 /* Return an existing node that is equivalent to NODE, which has
2067 equivalence class LABEL, if one exists. Return NODE otherwise. */
2069 static unsigned int
2070 find_equivalent_node (constraint_graph_t graph,
2071 unsigned int node, unsigned int label)
2073 /* If the address version of this variable is unused, we can
2074 substitute it for anything else with the same label.
2075 Otherwise, we know the pointers are equivalent, but not the
2076 locations, and we can unite them later. */
2078 if (!bitmap_bit_p (graph->address_taken, node))
2080 gcc_assert (label < graph->size);
2082 if (graph->eq_rep[label] != -1)
2084 /* Unify the two variables since we know they are equivalent. */
2085 if (unite (graph->eq_rep[label], node))
2086 unify_nodes (graph, graph->eq_rep[label], node, false);
2087 return graph->eq_rep[label];
2089 else
2091 graph->eq_rep[label] = node;
2092 graph->pe_rep[label] = node;
2095 else
2097 gcc_assert (label < graph->size);
2098 graph->pe[node] = label;
2099 if (graph->pe_rep[label] == -1)
2100 graph->pe_rep[label] = node;
2103 return node;
2106 /* Unite pointer equivalent but not location equivalent nodes in
2107 GRAPH. This may only be performed once variable substitution is
2108 finished. */
2110 static void
2111 unite_pointer_equivalences (constraint_graph_t graph)
2113 unsigned int i;
2115 /* Go through the pointer equivalences and unite them to their
2116 representative, if they aren't already. */
2117 for (i = 0; i < FIRST_REF_NODE; i++)
2119 unsigned int label = graph->pe[i];
2120 if (label)
2122 int label_rep = graph->pe_rep[label];
2124 if (label_rep == -1)
2125 continue;
2127 label_rep = find (label_rep);
2128 if (label_rep >= 0 && unite (label_rep, find (i)))
2129 unify_nodes (graph, label_rep, i, false);
2134 /* Move complex constraints to the GRAPH nodes they belong to. */
2136 static void
2137 move_complex_constraints (constraint_graph_t graph)
2139 int i;
2140 constraint_t c;
2142 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
2144 if (c)
2146 struct constraint_expr lhs = c->lhs;
2147 struct constraint_expr rhs = c->rhs;
2149 if (lhs.type == DEREF)
2151 insert_into_complex (graph, lhs.var, c);
2153 else if (rhs.type == DEREF)
2155 if (!(get_varinfo (lhs.var)->is_special_var))
2156 insert_into_complex (graph, rhs.var, c);
2158 else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2159 && (lhs.offset != 0 || rhs.offset != 0))
2161 insert_into_complex (graph, rhs.var, c);
2168 /* Optimize and rewrite complex constraints while performing
2169 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2170 result of perform_variable_substitution. */
2172 static void
2173 rewrite_constraints (constraint_graph_t graph,
2174 struct scc_info *si)
2176 int i;
2177 unsigned int j;
2178 constraint_t c;
2180 for (j = 0; j < graph->size; j++)
2181 gcc_assert (find (j) == j);
2183 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
2185 struct constraint_expr lhs = c->lhs;
2186 struct constraint_expr rhs = c->rhs;
2187 unsigned int lhsvar = find (get_varinfo_fc (lhs.var)->id);
2188 unsigned int rhsvar = find (get_varinfo_fc (rhs.var)->id);
2189 unsigned int lhsnode, rhsnode;
2190 unsigned int lhslabel, rhslabel;
2192 lhsnode = si->node_mapping[lhsvar];
2193 rhsnode = si->node_mapping[rhsvar];
2194 lhslabel = graph->pointer_label[lhsnode];
2195 rhslabel = graph->pointer_label[rhsnode];
2197 /* See if it is really a non-pointer variable, and if so, ignore
2198 the constraint. */
2199 if (lhslabel == 0)
2201 if (dump_file && (dump_flags & TDF_DETAILS))
2204 fprintf (dump_file, "%s is a non-pointer variable,"
2205 "ignoring constraint:",
2206 get_varinfo (lhs.var)->name);
2207 dump_constraint (dump_file, c);
2209 VEC_replace (constraint_t, constraints, i, NULL);
2210 continue;
2213 if (rhslabel == 0)
2215 if (dump_file && (dump_flags & TDF_DETAILS))
2218 fprintf (dump_file, "%s is a non-pointer variable,"
2219 "ignoring constraint:",
2220 get_varinfo (rhs.var)->name);
2221 dump_constraint (dump_file, c);
2223 VEC_replace (constraint_t, constraints, i, NULL);
2224 continue;
2227 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2228 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2229 c->lhs.var = lhsvar;
2230 c->rhs.var = rhsvar;
2235 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2236 part of an SCC, false otherwise. */
2238 static bool
2239 eliminate_indirect_cycles (unsigned int node)
2241 if (graph->indirect_cycles[node] != -1
2242 && !bitmap_empty_p (get_varinfo (node)->solution))
2244 unsigned int i;
2245 VEC(unsigned,heap) *queue = NULL;
2246 int queuepos;
2247 unsigned int to = find (graph->indirect_cycles[node]);
2248 bitmap_iterator bi;
2250 /* We can't touch the solution set and call unify_nodes
2251 at the same time, because unify_nodes is going to do
2252 bitmap unions into it. */
2254 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2256 if (find (i) == i && i != to)
2258 if (unite (to, i))
2259 VEC_safe_push (unsigned, heap, queue, i);
2263 for (queuepos = 0;
2264 VEC_iterate (unsigned, queue, queuepos, i);
2265 queuepos++)
2267 unify_nodes (graph, to, i, true);
2269 VEC_free (unsigned, heap, queue);
2270 return true;
2272 return false;
2275 /* Solve the constraint graph GRAPH using our worklist solver.
2276 This is based on the PW* family of solvers from the "Efficient Field
2277 Sensitive Pointer Analysis for C" paper.
2278 It works by iterating over all the graph nodes, processing the complex
2279 constraints and propagating the copy constraints, until everything stops
2280 changed. This corresponds to steps 6-8 in the solving list given above. */
2282 static void
2283 solve_graph (constraint_graph_t graph)
2285 unsigned int size = graph->size;
2286 unsigned int i;
2287 bitmap pts;
2289 changed_count = 0;
2290 changed = sbitmap_alloc (size);
2291 sbitmap_zero (changed);
2293 /* Mark all initial non-collapsed nodes as changed. */
2294 for (i = 0; i < size; i++)
2296 varinfo_t ivi = get_varinfo (i);
2297 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2298 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2299 || VEC_length (constraint_t, graph->complex[i]) > 0))
2301 SET_BIT (changed, i);
2302 changed_count++;
2306 /* Allocate a bitmap to be used to store the changed bits. */
2307 pts = BITMAP_ALLOC (&pta_obstack);
2309 while (changed_count > 0)
2311 unsigned int i;
2312 struct topo_info *ti = init_topo_info ();
2313 stats.iterations++;
2315 bitmap_obstack_initialize (&iteration_obstack);
2317 compute_topo_order (graph, ti);
2319 while (VEC_length (unsigned, ti->topo_order) != 0)
2322 i = VEC_pop (unsigned, ti->topo_order);
2324 /* If this variable is not a representative, skip it. */
2325 if (find (i) != i)
2326 continue;
2328 /* In certain indirect cycle cases, we may merge this
2329 variable to another. */
2330 if (eliminate_indirect_cycles (i) && find (i) != i)
2331 continue;
2333 /* If the node has changed, we need to process the
2334 complex constraints and outgoing edges again. */
2335 if (TEST_BIT (changed, i))
2337 unsigned int j;
2338 constraint_t c;
2339 bitmap solution;
2340 VEC(constraint_t,heap) *complex = graph->complex[i];
2341 bool solution_empty;
2343 RESET_BIT (changed, i);
2344 changed_count--;
2346 /* Compute the changed set of solution bits. */
2347 bitmap_and_compl (pts, get_varinfo (i)->solution,
2348 get_varinfo (i)->oldsolution);
2350 if (bitmap_empty_p (pts))
2351 continue;
2353 bitmap_ior_into (get_varinfo (i)->oldsolution, pts);
2355 solution = get_varinfo (i)->solution;
2356 solution_empty = bitmap_empty_p (solution);
2358 /* Process the complex constraints */
2359 for (j = 0; VEC_iterate (constraint_t, complex, j, c); j++)
2361 /* XXX: This is going to unsort the constraints in
2362 some cases, which will occasionally add duplicate
2363 constraints during unification. This does not
2364 affect correctness. */
2365 c->lhs.var = find (c->lhs.var);
2366 c->rhs.var = find (c->rhs.var);
2368 /* The only complex constraint that can change our
2369 solution to non-empty, given an empty solution,
2370 is a constraint where the lhs side is receiving
2371 some set from elsewhere. */
2372 if (!solution_empty || c->lhs.type != DEREF)
2373 do_complex_constraint (graph, c, pts);
2376 solution_empty = bitmap_empty_p (solution);
2378 if (!solution_empty
2379 /* Do not propagate the ESCAPED/CALLUSED solutions. */
2380 && i != escaped_id
2381 && i != callused_id)
2383 bitmap_iterator bi;
2385 /* Propagate solution to all successors. */
2386 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2387 0, j, bi)
2389 bitmap tmp;
2390 bool flag;
2392 unsigned int to = find (j);
2393 tmp = get_varinfo (to)->solution;
2394 flag = false;
2396 /* Don't try to propagate to ourselves. */
2397 if (to == i)
2398 continue;
2400 flag = set_union_with_increment (tmp, pts, 0);
2402 if (flag)
2404 get_varinfo (to)->solution = tmp;
2405 if (!TEST_BIT (changed, to))
2407 SET_BIT (changed, to);
2408 changed_count++;
2415 free_topo_info (ti);
2416 bitmap_obstack_release (&iteration_obstack);
2419 BITMAP_FREE (pts);
2420 sbitmap_free (changed);
2421 bitmap_obstack_release (&oldpta_obstack);
2424 /* Map from trees to variable infos. */
2425 static struct pointer_map_t *vi_for_tree;
2428 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2430 static void
2431 insert_vi_for_tree (tree t, varinfo_t vi)
2433 void **slot = pointer_map_insert (vi_for_tree, t);
2434 gcc_assert (vi);
2435 gcc_assert (*slot == NULL);
2436 *slot = vi;
2439 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2440 exist in the map, return NULL, otherwise, return the varinfo we found. */
2442 static varinfo_t
2443 lookup_vi_for_tree (tree t)
2445 void **slot = pointer_map_contains (vi_for_tree, t);
2446 if (slot == NULL)
2447 return NULL;
2449 return (varinfo_t) *slot;
2452 /* Return a printable name for DECL */
2454 static const char *
2455 alias_get_name (tree decl)
2457 const char *res = get_name (decl);
2458 char *temp;
2459 int num_printed = 0;
2461 if (res != NULL)
2462 return res;
2464 res = "NULL";
2465 if (!dump_file)
2466 return res;
2468 if (TREE_CODE (decl) == SSA_NAME)
2470 num_printed = asprintf (&temp, "%s_%u",
2471 alias_get_name (SSA_NAME_VAR (decl)),
2472 SSA_NAME_VERSION (decl));
2474 else if (DECL_P (decl))
2476 num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
2478 if (num_printed > 0)
2480 res = ggc_strdup (temp);
2481 free (temp);
2483 return res;
2486 /* Find the variable id for tree T in the map.
2487 If T doesn't exist in the map, create an entry for it and return it. */
2489 static varinfo_t
2490 get_vi_for_tree (tree t)
2492 void **slot = pointer_map_contains (vi_for_tree, t);
2493 if (slot == NULL)
2494 return get_varinfo (create_variable_info_for (t, alias_get_name (t)));
2496 return (varinfo_t) *slot;
2499 /* Get a constraint expression for a new temporary variable. */
2501 static struct constraint_expr
2502 get_constraint_exp_for_temp (tree t)
2504 struct constraint_expr cexpr;
2506 gcc_assert (SSA_VAR_P (t));
2508 cexpr.type = SCALAR;
2509 cexpr.var = get_vi_for_tree (t)->id;
2510 cexpr.offset = 0;
2512 return cexpr;
2515 /* Get a constraint expression vector from an SSA_VAR_P node.
2516 If address_p is true, the result will be taken its address of. */
2518 static void
2519 get_constraint_for_ssa_var (tree t, VEC(ce_s, heap) **results, bool address_p)
2521 struct constraint_expr cexpr;
2522 varinfo_t vi;
2524 /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
2525 gcc_assert (SSA_VAR_P (t) || DECL_P (t));
2527 /* For parameters, get at the points-to set for the actual parm
2528 decl. */
2529 if (TREE_CODE (t) == SSA_NAME
2530 && TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2531 && SSA_NAME_IS_DEFAULT_DEF (t))
2533 get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
2534 return;
2537 vi = get_vi_for_tree (t);
2538 cexpr.var = vi->id;
2539 cexpr.type = SCALAR;
2540 cexpr.offset = 0;
2541 /* If we determine the result is "anything", and we know this is readonly,
2542 say it points to readonly memory instead. */
2543 if (cexpr.var == anything_id && TREE_READONLY (t))
2545 gcc_unreachable ();
2546 cexpr.type = ADDRESSOF;
2547 cexpr.var = readonly_id;
2550 /* If we are not taking the address of the constraint expr, add all
2551 sub-fiels of the variable as well. */
2552 if (!address_p)
2554 for (; vi; vi = vi->next)
2556 cexpr.var = vi->id;
2557 VEC_safe_push (ce_s, heap, *results, &cexpr);
2559 return;
2562 VEC_safe_push (ce_s, heap, *results, &cexpr);
2565 /* Process constraint T, performing various simplifications and then
2566 adding it to our list of overall constraints. */
2568 static void
2569 process_constraint (constraint_t t)
2571 struct constraint_expr rhs = t->rhs;
2572 struct constraint_expr lhs = t->lhs;
2574 gcc_assert (rhs.var < VEC_length (varinfo_t, varmap));
2575 gcc_assert (lhs.var < VEC_length (varinfo_t, varmap));
2577 if (!use_field_sensitive)
2579 t->rhs.offset = 0;
2580 t->lhs.offset = 0;
2583 /* ANYTHING == ANYTHING is pointless. */
2584 if (lhs.var == anything_id && rhs.var == anything_id)
2585 return;
2587 /* If we have &ANYTHING = something, convert to SOMETHING = &ANYTHING) */
2588 else if (lhs.var == anything_id && lhs.type == ADDRESSOF)
2590 rhs = t->lhs;
2591 t->lhs = t->rhs;
2592 t->rhs = rhs;
2593 process_constraint (t);
2595 /* This can happen in our IR with things like n->a = *p */
2596 else if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
2598 /* Split into tmp = *rhs, *lhs = tmp */
2599 tree rhsdecl = get_varinfo (rhs.var)->decl;
2600 tree pointertype = TREE_TYPE (rhsdecl);
2601 tree pointedtotype = TREE_TYPE (pointertype);
2602 tree tmpvar = create_tmp_var_raw (pointedtotype, "doubledereftmp");
2603 struct constraint_expr tmplhs = get_constraint_exp_for_temp (tmpvar);
2605 process_constraint (new_constraint (tmplhs, rhs));
2606 process_constraint (new_constraint (lhs, tmplhs));
2608 else if (rhs.type == ADDRESSOF && lhs.type == DEREF)
2610 /* Split into tmp = &rhs, *lhs = tmp */
2611 tree rhsdecl = get_varinfo (rhs.var)->decl;
2612 tree pointertype = TREE_TYPE (rhsdecl);
2613 tree tmpvar = create_tmp_var_raw (pointertype, "derefaddrtmp");
2614 struct constraint_expr tmplhs = get_constraint_exp_for_temp (tmpvar);
2616 process_constraint (new_constraint (tmplhs, rhs));
2617 process_constraint (new_constraint (lhs, tmplhs));
2619 else
2621 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
2622 VEC_safe_push (constraint_t, heap, constraints, t);
2626 /* Return true if T is a variable of a type that could contain
2627 pointers. */
2629 static bool
2630 could_have_pointers (tree t)
2632 tree type = TREE_TYPE (t);
2634 if (POINTER_TYPE_P (type)
2635 || AGGREGATE_TYPE_P (type)
2636 || TREE_CODE (type) == COMPLEX_TYPE)
2637 return true;
2639 return false;
2642 /* Return the position, in bits, of FIELD_DECL from the beginning of its
2643 structure. */
2645 static unsigned HOST_WIDE_INT
2646 bitpos_of_field (const tree fdecl)
2649 if (TREE_CODE (DECL_FIELD_OFFSET (fdecl)) != INTEGER_CST
2650 || TREE_CODE (DECL_FIELD_BIT_OFFSET (fdecl)) != INTEGER_CST)
2651 return -1;
2653 return (tree_low_cst (DECL_FIELD_OFFSET (fdecl), 1) * 8)
2654 + tree_low_cst (DECL_FIELD_BIT_OFFSET (fdecl), 1);
2658 /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
2659 If address_p is true the result will be taken its address of. */
2661 static void
2662 get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results,
2663 bool address_p)
2665 tree orig_t = t;
2666 HOST_WIDE_INT bitsize = -1;
2667 HOST_WIDE_INT bitmaxsize = -1;
2668 HOST_WIDE_INT bitpos;
2669 tree forzero;
2670 struct constraint_expr *result;
2672 /* Some people like to do cute things like take the address of
2673 &0->a.b */
2674 forzero = t;
2675 while (!SSA_VAR_P (forzero) && !CONSTANT_CLASS_P (forzero))
2676 forzero = TREE_OPERAND (forzero, 0);
2678 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
2680 struct constraint_expr temp;
2682 temp.offset = 0;
2683 temp.var = integer_id;
2684 temp.type = SCALAR;
2685 VEC_safe_push (ce_s, heap, *results, &temp);
2686 return;
2689 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
2691 /* Pretend to take the address of the base, we'll take care of
2692 adding the required subset of sub-fields below. */
2693 get_constraint_for_1 (t, results, true);
2694 result = VEC_last (ce_s, *results);
2696 gcc_assert (VEC_length (ce_s, *results) == 1);
2698 /* This can also happen due to weird offsetof type macros. */
2699 if (TREE_CODE (t) != ADDR_EXPR && result->type == ADDRESSOF)
2700 result->type = SCALAR;
2702 if (result->type == SCALAR)
2704 /* In languages like C, you can access one past the end of an
2705 array. You aren't allowed to dereference it, so we can
2706 ignore this constraint. When we handle pointer subtraction,
2707 we may have to do something cute here. */
2709 if ((unsigned HOST_WIDE_INT)bitpos < get_varinfo (result->var)->fullsize
2710 && bitmaxsize != 0)
2712 /* It's also not true that the constraint will actually start at the
2713 right offset, it may start in some padding. We only care about
2714 setting the constraint to the first actual field it touches, so
2715 walk to find it. */
2716 struct constraint_expr cexpr = *result;
2717 varinfo_t curr;
2718 VEC_pop (ce_s, *results);
2719 cexpr.offset = 0;
2720 for (curr = get_varinfo (cexpr.var); curr; curr = curr->next)
2722 if (ranges_overlap_p (curr->offset, curr->size,
2723 bitpos, bitmaxsize))
2725 cexpr.var = curr->id;
2726 VEC_safe_push (ce_s, heap, *results, &cexpr);
2727 if (address_p)
2728 break;
2731 /* assert that we found *some* field there. The user couldn't be
2732 accessing *only* padding. */
2733 /* Still the user could access one past the end of an array
2734 embedded in a struct resulting in accessing *only* padding. */
2735 gcc_assert (VEC_length (ce_s, *results) >= 1
2736 || ref_contains_array_ref (orig_t));
2738 else if (bitmaxsize == 0)
2740 if (dump_file && (dump_flags & TDF_DETAILS))
2741 fprintf (dump_file, "Access to zero-sized part of variable,"
2742 "ignoring\n");
2744 else
2745 if (dump_file && (dump_flags & TDF_DETAILS))
2746 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
2748 else if (bitmaxsize == -1)
2750 /* We can't handle DEREF constraints with unknown size, we'll
2751 get the wrong answer. Punt and return anything. */
2752 result->var = anything_id;
2753 result->offset = 0;
2755 else
2756 result->offset = bitpos;
2760 /* Dereference the constraint expression CONS, and return the result.
2761 DEREF (ADDRESSOF) = SCALAR
2762 DEREF (SCALAR) = DEREF
2763 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
2764 This is needed so that we can handle dereferencing DEREF constraints. */
2766 static void
2767 do_deref (VEC (ce_s, heap) **constraints)
2769 struct constraint_expr *c;
2770 unsigned int i = 0;
2772 for (i = 0; VEC_iterate (ce_s, *constraints, i, c); i++)
2774 if (c->type == SCALAR)
2775 c->type = DEREF;
2776 else if (c->type == ADDRESSOF)
2777 c->type = SCALAR;
2778 else if (c->type == DEREF)
2780 tree tmpvar = create_tmp_var_raw (ptr_type_node, "dereftmp");
2781 struct constraint_expr tmplhs = get_constraint_exp_for_temp (tmpvar);
2782 process_constraint (new_constraint (tmplhs, *c));
2783 c->var = tmplhs.var;
2785 else
2786 gcc_unreachable ();
2790 /* Given a tree T, return the constraint expression for it. */
2792 static void
2793 get_constraint_for_1 (tree t, VEC (ce_s, heap) **results, bool address_p)
2795 struct constraint_expr temp;
2797 /* x = integer is all glommed to a single variable, which doesn't
2798 point to anything by itself. That is, of course, unless it is an
2799 integer constant being treated as a pointer, in which case, we
2800 will return that this is really the addressof anything. This
2801 happens below, since it will fall into the default case. The only
2802 case we know something about an integer treated like a pointer is
2803 when it is the NULL pointer, and then we just say it points to
2804 NULL. */
2805 if (TREE_CODE (t) == INTEGER_CST
2806 && integer_zerop (t))
2808 temp.var = nothing_id;
2809 temp.type = ADDRESSOF;
2810 temp.offset = 0;
2811 VEC_safe_push (ce_s, heap, *results, &temp);
2812 return;
2815 /* String constants are read-only. */
2816 if (TREE_CODE (t) == STRING_CST)
2818 temp.var = readonly_id;
2819 temp.type = SCALAR;
2820 temp.offset = 0;
2821 VEC_safe_push (ce_s, heap, *results, &temp);
2822 return;
2825 switch (TREE_CODE_CLASS (TREE_CODE (t)))
2827 case tcc_expression:
2828 case tcc_vl_exp:
2830 switch (TREE_CODE (t))
2832 case ADDR_EXPR:
2834 struct constraint_expr *c;
2835 unsigned int i;
2836 tree exp = TREE_OPERAND (t, 0);
2838 get_constraint_for_1 (exp, results, true);
2840 for (i = 0; VEC_iterate (ce_s, *results, i, c); i++)
2842 if (c->type == DEREF)
2843 c->type = SCALAR;
2844 else
2845 c->type = ADDRESSOF;
2847 return;
2849 break;
2850 case CALL_EXPR:
2851 /* XXX: In interprocedural mode, if we didn't have the
2852 body, we would need to do *each pointer argument =
2853 &ANYTHING added. */
2854 if (call_expr_flags (t) & (ECF_MALLOC | ECF_MAY_BE_ALLOCA))
2856 varinfo_t vi;
2857 tree heapvar = heapvar_lookup (t);
2859 if (heapvar == NULL)
2861 heapvar = create_tmp_var_raw (ptr_type_node, "HEAP");
2862 DECL_EXTERNAL (heapvar) = 1;
2863 get_var_ann (heapvar)->is_heapvar = 1;
2864 if (gimple_referenced_vars (cfun))
2865 add_referenced_var (heapvar);
2866 heapvar_insert (t, heapvar);
2869 temp.var = create_variable_info_for (heapvar,
2870 alias_get_name (heapvar));
2872 vi = get_varinfo (temp.var);
2873 vi->is_artificial_var = 1;
2874 vi->is_heap_var = 1;
2875 temp.type = ADDRESSOF;
2876 temp.offset = 0;
2877 VEC_safe_push (ce_s, heap, *results, &temp);
2878 return;
2880 else
2882 temp.var = anything_id;
2883 temp.type = SCALAR;
2884 temp.offset = 0;
2885 VEC_safe_push (ce_s, heap, *results, &temp);
2886 return;
2888 break;
2889 default:
2891 temp.type = ADDRESSOF;
2892 temp.var = anything_id;
2893 temp.offset = 0;
2894 VEC_safe_push (ce_s, heap, *results, &temp);
2895 return;
2899 case tcc_reference:
2901 switch (TREE_CODE (t))
2903 case INDIRECT_REF:
2905 get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p);
2906 do_deref (results);
2907 return;
2909 case ARRAY_REF:
2910 case ARRAY_RANGE_REF:
2911 case COMPONENT_REF:
2912 get_constraint_for_component_ref (t, results, address_p);
2913 return;
2914 default:
2916 temp.type = ADDRESSOF;
2917 temp.var = anything_id;
2918 temp.offset = 0;
2919 VEC_safe_push (ce_s, heap, *results, &temp);
2920 return;
2924 case tcc_unary:
2926 switch (TREE_CODE (t))
2928 CASE_CONVERT:
2930 tree op = TREE_OPERAND (t, 0);
2932 /* Cast from non-pointer to pointers are bad news for us.
2933 Anything else, we see through */
2934 if (!(POINTER_TYPE_P (TREE_TYPE (t))
2935 && ! POINTER_TYPE_P (TREE_TYPE (op))))
2937 get_constraint_for_1 (op, results, address_p);
2938 return;
2941 /* FALLTHRU */
2943 default:
2945 temp.type = ADDRESSOF;
2946 temp.var = anything_id;
2947 temp.offset = 0;
2948 VEC_safe_push (ce_s, heap, *results, &temp);
2949 return;
2953 case tcc_exceptional:
2955 switch (TREE_CODE (t))
2957 case PHI_NODE:
2959 get_constraint_for_1 (PHI_RESULT (t), results, address_p);
2960 return;
2962 break;
2963 case SSA_NAME:
2965 get_constraint_for_ssa_var (t, results, address_p);
2966 return;
2968 break;
2969 default:
2971 temp.type = ADDRESSOF;
2972 temp.var = anything_id;
2973 temp.offset = 0;
2974 VEC_safe_push (ce_s, heap, *results, &temp);
2975 return;
2979 case tcc_declaration:
2981 get_constraint_for_ssa_var (t, results, address_p);
2982 return;
2984 default:
2986 temp.type = ADDRESSOF;
2987 temp.var = anything_id;
2988 temp.offset = 0;
2989 VEC_safe_push (ce_s, heap, *results, &temp);
2990 return;
2995 /* Given a gimple tree T, return the constraint expression vector for it. */
2997 static void
2998 get_constraint_for (tree t, VEC (ce_s, heap) **results)
3000 gcc_assert (VEC_length (ce_s, *results) == 0);
3002 get_constraint_for_1 (t, results, false);
3005 /* Handle the structure copy case where we have a simple structure copy
3006 between LHS and RHS that is of SIZE (in bits)
3008 For each field of the lhs variable (lhsfield)
3009 For each field of the rhs variable at lhsfield.offset (rhsfield)
3010 add the constraint lhsfield = rhsfield
3012 If we fail due to some kind of type unsafety or other thing we
3013 can't handle, return false. We expect the caller to collapse the
3014 variable in that case. */
3016 static bool
3017 do_simple_structure_copy (const struct constraint_expr lhs,
3018 const struct constraint_expr rhs,
3019 const unsigned HOST_WIDE_INT size)
3021 varinfo_t p = get_varinfo (lhs.var);
3022 unsigned HOST_WIDE_INT pstart, last;
3023 pstart = p->offset;
3024 last = p->offset + size;
3025 for (; p && p->offset < last; p = p->next)
3027 varinfo_t q;
3028 struct constraint_expr templhs = lhs;
3029 struct constraint_expr temprhs = rhs;
3030 unsigned HOST_WIDE_INT fieldoffset;
3032 templhs.var = p->id;
3033 q = get_varinfo (temprhs.var);
3034 fieldoffset = p->offset - pstart;
3035 q = first_vi_for_offset (q, q->offset + fieldoffset);
3036 if (!q)
3037 return false;
3038 temprhs.var = q->id;
3039 process_constraint (new_constraint (templhs, temprhs));
3041 return true;
3045 /* Handle the structure copy case where we have a structure copy between a
3046 aggregate on the LHS and a dereference of a pointer on the RHS
3047 that is of SIZE (in bits)
3049 For each field of the lhs variable (lhsfield)
3050 rhs.offset = lhsfield->offset
3051 add the constraint lhsfield = rhs
3054 static void
3055 do_rhs_deref_structure_copy (const struct constraint_expr lhs,
3056 const struct constraint_expr rhs,
3057 const unsigned HOST_WIDE_INT size)
3059 varinfo_t p = get_varinfo (lhs.var);
3060 unsigned HOST_WIDE_INT pstart,last;
3061 pstart = p->offset;
3062 last = p->offset + size;
3064 for (; p && p->offset < last; p = p->next)
3066 varinfo_t q;
3067 struct constraint_expr templhs = lhs;
3068 struct constraint_expr temprhs = rhs;
3069 unsigned HOST_WIDE_INT fieldoffset;
3072 if (templhs.type == SCALAR)
3073 templhs.var = p->id;
3074 else
3075 templhs.offset = p->offset;
3077 q = get_varinfo (temprhs.var);
3078 fieldoffset = p->offset - pstart;
3079 temprhs.offset += fieldoffset;
3080 process_constraint (new_constraint (templhs, temprhs));
3084 /* Handle the structure copy case where we have a structure copy
3085 between an aggregate on the RHS and a dereference of a pointer on
3086 the LHS that is of SIZE (in bits)
3088 For each field of the rhs variable (rhsfield)
3089 lhs.offset = rhsfield->offset
3090 add the constraint lhs = rhsfield
3093 static void
3094 do_lhs_deref_structure_copy (const struct constraint_expr lhs,
3095 const struct constraint_expr rhs,
3096 const unsigned HOST_WIDE_INT size)
3098 varinfo_t p = get_varinfo (rhs.var);
3099 unsigned HOST_WIDE_INT pstart,last;
3100 pstart = p->offset;
3101 last = p->offset + size;
3103 for (; p && p->offset < last; p = p->next)
3105 varinfo_t q;
3106 struct constraint_expr templhs = lhs;
3107 struct constraint_expr temprhs = rhs;
3108 unsigned HOST_WIDE_INT fieldoffset;
3111 if (temprhs.type == SCALAR)
3112 temprhs.var = p->id;
3113 else
3114 temprhs.offset = p->offset;
3116 q = get_varinfo (templhs.var);
3117 fieldoffset = p->offset - pstart;
3118 templhs.offset += fieldoffset;
3119 process_constraint (new_constraint (templhs, temprhs));
3123 /* Sometimes, frontends like to give us bad type information. This
3124 function will collapse all the fields from VAR to the end of VAR,
3125 into VAR, so that we treat those fields as a single variable.
3126 We return the variable they were collapsed into. */
3128 static unsigned int
3129 collapse_rest_of_var (unsigned int var)
3131 varinfo_t currvar = get_varinfo (var);
3132 varinfo_t field;
3134 for (field = currvar->next; field; field = field->next)
3136 if (dump_file)
3137 fprintf (dump_file, "Type safety: Collapsing var %s into %s\n",
3138 field->name, currvar->name);
3140 gcc_assert (!field->collapsed_to);
3141 field->collapsed_to = currvar;
3144 currvar->next = NULL;
3145 currvar->size = currvar->fullsize - currvar->offset;
3147 return currvar->id;
3150 /* Handle aggregate copies by expanding into copies of the respective
3151 fields of the structures. */
3153 static void
3154 do_structure_copy (tree lhsop, tree rhsop)
3156 struct constraint_expr lhs, rhs, tmp;
3157 VEC (ce_s, heap) *lhsc = NULL, *rhsc = NULL;
3158 varinfo_t p;
3159 unsigned HOST_WIDE_INT lhssize;
3160 unsigned HOST_WIDE_INT rhssize;
3162 /* Pretend we are taking the address of the constraint exprs.
3163 We deal with walking the sub-fields ourselves. */
3164 get_constraint_for_1 (lhsop, &lhsc, true);
3165 get_constraint_for_1 (rhsop, &rhsc, true);
3166 gcc_assert (VEC_length (ce_s, lhsc) == 1);
3167 gcc_assert (VEC_length (ce_s, rhsc) == 1);
3168 lhs = *(VEC_last (ce_s, lhsc));
3169 rhs = *(VEC_last (ce_s, rhsc));
3171 VEC_free (ce_s, heap, lhsc);
3172 VEC_free (ce_s, heap, rhsc);
3174 /* If we have special var = x, swap it around. */
3175 if (lhs.var <= integer_id && !(get_varinfo (rhs.var)->is_special_var))
3177 tmp = lhs;
3178 lhs = rhs;
3179 rhs = tmp;
3182 /* This is fairly conservative for the RHS == ADDRESSOF case, in that it's
3183 possible it's something we could handle. However, most cases falling
3184 into this are dealing with transparent unions, which are slightly
3185 weird. */
3186 if (rhs.type == ADDRESSOF && !(get_varinfo (rhs.var)->is_special_var))
3188 rhs.type = ADDRESSOF;
3189 rhs.var = anything_id;
3192 /* If the RHS is a special var, or an addressof, set all the LHS fields to
3193 that special var. */
3194 if (rhs.var <= integer_id)
3196 for (p = get_varinfo (lhs.var); p; p = p->next)
3198 struct constraint_expr templhs = lhs;
3199 struct constraint_expr temprhs = rhs;
3201 if (templhs.type == SCALAR )
3202 templhs.var = p->id;
3203 else
3204 templhs.offset += p->offset;
3205 process_constraint (new_constraint (templhs, temprhs));
3208 else
3210 tree rhstype = TREE_TYPE (rhsop);
3211 tree lhstype = TREE_TYPE (lhsop);
3212 tree rhstypesize;
3213 tree lhstypesize;
3215 lhstypesize = DECL_P (lhsop) ? DECL_SIZE (lhsop) : TYPE_SIZE (lhstype);
3216 rhstypesize = DECL_P (rhsop) ? DECL_SIZE (rhsop) : TYPE_SIZE (rhstype);
3218 /* If we have a variably sized types on the rhs or lhs, and a deref
3219 constraint, add the constraint, lhsconstraint = &ANYTHING.
3220 This is conservatively correct because either the lhs is an unknown
3221 sized var (if the constraint is SCALAR), or the lhs is a DEREF
3222 constraint, and every variable it can point to must be unknown sized
3223 anyway, so we don't need to worry about fields at all. */
3224 if ((rhs.type == DEREF && TREE_CODE (rhstypesize) != INTEGER_CST)
3225 || (lhs.type == DEREF && TREE_CODE (lhstypesize) != INTEGER_CST))
3227 rhs.var = anything_id;
3228 rhs.type = ADDRESSOF;
3229 rhs.offset = 0;
3230 process_constraint (new_constraint (lhs, rhs));
3231 return;
3234 /* The size only really matters insofar as we don't set more or less of
3235 the variable. If we hit an unknown size var, the size should be the
3236 whole darn thing. */
3237 if (get_varinfo (rhs.var)->is_unknown_size_var)
3238 rhssize = ~0;
3239 else
3240 rhssize = TREE_INT_CST_LOW (rhstypesize);
3242 if (get_varinfo (lhs.var)->is_unknown_size_var)
3243 lhssize = ~0;
3244 else
3245 lhssize = TREE_INT_CST_LOW (lhstypesize);
3248 if (rhs.type == SCALAR && lhs.type == SCALAR)
3250 if (!do_simple_structure_copy (lhs, rhs, MIN (lhssize, rhssize)))
3252 lhs.var = collapse_rest_of_var (lhs.var);
3253 rhs.var = collapse_rest_of_var (rhs.var);
3254 lhs.offset = 0;
3255 rhs.offset = 0;
3256 lhs.type = SCALAR;
3257 rhs.type = SCALAR;
3258 process_constraint (new_constraint (lhs, rhs));
3261 else if (lhs.type != DEREF && rhs.type == DEREF)
3262 do_rhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
3263 else if (lhs.type == DEREF && rhs.type != DEREF)
3264 do_lhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
3265 else
3267 tree pointedtotype = lhstype;
3268 tree tmpvar;
3270 gcc_assert (rhs.type == DEREF && lhs.type == DEREF);
3271 tmpvar = create_tmp_var_raw (pointedtotype, "structcopydereftmp");
3272 do_structure_copy (tmpvar, rhsop);
3273 do_structure_copy (lhsop, tmpvar);
3279 /* Handle pointer arithmetic EXPR when creating aliasing constraints.
3280 Expressions of the type PTR + CST can be handled in two ways:
3282 1- If the constraint for PTR is ADDRESSOF for a non-structure
3283 variable, then we can use it directly because adding or
3284 subtracting a constant may not alter the original ADDRESSOF
3285 constraint (i.e., pointer arithmetic may not legally go outside
3286 an object's boundaries).
3288 2- If the constraint for PTR is ADDRESSOF for a structure variable,
3289 then if CST is a compile-time constant that can be used as an
3290 offset, we can determine which sub-variable will be pointed-to
3291 by the expression.
3293 Return true if the expression is handled. For any other kind of
3294 expression, return false so that each operand can be added as a
3295 separate constraint by the caller. */
3297 static bool
3298 handle_ptr_arith (VEC (ce_s, heap) *lhsc, tree expr)
3300 tree op0, op1;
3301 struct constraint_expr *c, *c2;
3302 unsigned int i = 0;
3303 unsigned int j = 0;
3304 VEC (ce_s, heap) *temp = NULL;
3305 unsigned HOST_WIDE_INT rhsunitoffset, rhsoffset;
3307 if (TREE_CODE (expr) != POINTER_PLUS_EXPR)
3308 return false;
3310 op0 = TREE_OPERAND (expr, 0);
3311 op1 = TREE_OPERAND (expr, 1);
3312 gcc_assert (POINTER_TYPE_P (TREE_TYPE (op0)));
3314 /* If the offset is not a non-negative integer constant that fits
3315 in a HOST_WIDE_INT, we cannot handle it here. */
3316 if (!host_integerp (op1, 1))
3317 return false;
3319 /* Make sure the bit-offset also fits. */
3320 rhsunitoffset = TREE_INT_CST_LOW (op1);
3321 rhsoffset = rhsunitoffset * BITS_PER_UNIT;
3322 if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
3323 return false;
3325 get_constraint_for (op0, &temp);
3327 for (i = 0; VEC_iterate (ce_s, lhsc, i, c); i++)
3328 for (j = 0; VEC_iterate (ce_s, temp, j, c2); j++)
3330 if (c2->type == ADDRESSOF && rhsoffset != 0)
3332 varinfo_t temp = get_varinfo (c2->var);
3334 /* An access one after the end of an array is valid,
3335 so simply punt on accesses we cannot resolve. */
3336 temp = first_vi_for_offset (temp, rhsoffset);
3337 if (temp == NULL)
3338 continue;
3339 c2->var = temp->id;
3340 c2->offset = 0;
3342 else
3343 c2->offset = rhsoffset;
3344 process_constraint (new_constraint (*c, *c2));
3347 VEC_free (ce_s, heap, temp);
3349 return true;
3352 /* Create a constraint ID = OP. */
3354 static void
3355 make_constraint_to (unsigned id, tree op)
3357 VEC(ce_s, heap) *rhsc = NULL;
3358 struct constraint_expr *c;
3359 struct constraint_expr includes;
3360 unsigned int j;
3362 includes.var = id;
3363 includes.offset = 0;
3364 includes.type = SCALAR;
3366 get_constraint_for (op, &rhsc);
3367 for (j = 0; VEC_iterate (ce_s, rhsc, j, c); j++)
3368 process_constraint (new_constraint (includes, *c));
3369 VEC_free (ce_s, heap, rhsc);
3372 /* Make constraints necessary to make OP escape. */
3374 static void
3375 make_escape_constraint (tree op)
3377 make_constraint_to (escaped_id, op);
3380 /* For non-IPA mode, generate constraints necessary for a call on the
3381 RHS. */
3383 static void
3384 handle_rhs_call (tree rhs)
3386 tree arg;
3387 call_expr_arg_iterator iter;
3389 FOR_EACH_CALL_EXPR_ARG (arg, iter, rhs)
3390 /* Find those pointers being passed, and make sure they end up
3391 pointing to anything. */
3392 if (could_have_pointers (arg))
3393 make_escape_constraint (arg);
3395 /* The static chain escapes as well. */
3396 if (CALL_EXPR_STATIC_CHAIN (rhs))
3397 make_escape_constraint (CALL_EXPR_STATIC_CHAIN (rhs));
3400 /* For non-IPA mode, generate constraints necessary for a call
3401 that returns a pointer and assigns it to LHS. This simply makes
3402 the LHS point to global and escaped variables. */
3404 static void
3405 handle_lhs_call (tree lhs, int flags)
3407 VEC(ce_s, heap) *lhsc = NULL;
3408 struct constraint_expr rhsc;
3409 unsigned int j;
3410 struct constraint_expr *lhsp;
3412 get_constraint_for (lhs, &lhsc);
3414 if (flags & ECF_MALLOC)
3416 tree heapvar = heapvar_lookup (lhs);
3417 varinfo_t vi;
3419 if (heapvar == NULL)
3421 heapvar = create_tmp_var_raw (ptr_type_node, "HEAP");
3422 DECL_EXTERNAL (heapvar) = 1;
3423 get_var_ann (heapvar)->is_heapvar = 1;
3424 if (gimple_referenced_vars (cfun))
3425 add_referenced_var (heapvar);
3426 heapvar_insert (lhs, heapvar);
3429 rhsc.var = create_variable_info_for (heapvar,
3430 alias_get_name (heapvar));
3431 vi = get_varinfo (rhsc.var);
3432 vi->is_artificial_var = 1;
3433 vi->is_heap_var = 1;
3434 rhsc.type = ADDRESSOF;
3435 rhsc.offset = 0;
3437 else
3439 rhsc.var = escaped_id;
3440 rhsc.offset = 0;
3441 rhsc.type = ADDRESSOF;
3443 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3444 process_constraint (new_constraint (*lhsp, rhsc));
3445 VEC_free (ce_s, heap, lhsc);
3448 /* For non-IPA mode, generate constraints necessary for a call of a
3449 const function that returns a pointer in the statement STMT. */
3451 static void
3452 handle_const_call (tree stmt)
3454 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
3455 tree call = get_call_expr_in (stmt);
3456 VEC(ce_s, heap) *lhsc = NULL;
3457 struct constraint_expr rhsc;
3458 unsigned int j;
3459 struct constraint_expr *lhsp;
3460 tree arg, tmpvar;
3461 call_expr_arg_iterator iter;
3462 struct constraint_expr tmpc;
3464 get_constraint_for (lhs, &lhsc);
3466 /* If this is a nested function then it can return anything. */
3467 if (CALL_EXPR_STATIC_CHAIN (call))
3469 rhsc.var = anything_id;
3470 rhsc.offset = 0;
3471 rhsc.type = ADDRESSOF;
3472 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3473 process_constraint (new_constraint (*lhsp, rhsc));
3474 VEC_free (ce_s, heap, lhsc);
3475 return;
3478 /* We always use a temporary here, otherwise we end up with a quadratic
3479 amount of constraints for
3480 large_struct = const_call (large_struct);
3481 in field-sensitive PTA. */
3482 tmpvar = create_tmp_var_raw (ptr_type_node, "consttmp");
3483 tmpc = get_constraint_exp_for_temp (tmpvar);
3485 /* May return addresses of globals. */
3486 rhsc.var = nonlocal_id;
3487 rhsc.offset = 0;
3488 rhsc.type = ADDRESSOF;
3489 process_constraint (new_constraint (tmpc, rhsc));
3491 /* May return arguments. */
3492 FOR_EACH_CALL_EXPR_ARG (arg, iter, call)
3493 if (could_have_pointers (arg))
3495 VEC(ce_s, heap) *argc = NULL;
3496 struct constraint_expr *argp;
3497 int i;
3499 get_constraint_for (arg, &argc);
3500 for (i = 0; VEC_iterate (ce_s, argc, i, argp); i++)
3501 process_constraint (new_constraint (tmpc, *argp));
3502 VEC_free (ce_s, heap, argc);
3505 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3506 process_constraint (new_constraint (*lhsp, tmpc));
3508 VEC_free (ce_s, heap, lhsc);
3511 /* For non-IPA mode, generate constraints necessary for a call to a
3512 pure function in statement STMT. */
3514 static void
3515 handle_pure_call (tree stmt)
3517 tree call = get_call_expr_in (stmt);
3518 tree arg;
3519 call_expr_arg_iterator iter;
3521 /* Memory reached from pointer arguments is call-used. */
3522 FOR_EACH_CALL_EXPR_ARG (arg, iter, call)
3523 if (could_have_pointers (arg))
3524 make_constraint_to (callused_id, arg);
3526 /* The static chain is used as well. */
3527 if (CALL_EXPR_STATIC_CHAIN (call))
3528 make_constraint_to (callused_id, CALL_EXPR_STATIC_CHAIN (call));
3530 /* If the call returns a pointer it may point to reachable memory
3531 from the arguments. Not so for malloc functions though. */
3532 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
3533 && could_have_pointers (GIMPLE_STMT_OPERAND (stmt, 0))
3534 && !(call_expr_flags (call) & ECF_MALLOC))
3536 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
3537 VEC(ce_s, heap) *lhsc = NULL;
3538 struct constraint_expr rhsc;
3539 struct constraint_expr *lhsp;
3540 unsigned j;
3542 get_constraint_for (lhs, &lhsc);
3544 /* If this is a nested function then it can return anything. */
3545 if (CALL_EXPR_STATIC_CHAIN (call))
3547 rhsc.var = anything_id;
3548 rhsc.offset = 0;
3549 rhsc.type = ADDRESSOF;
3550 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3551 process_constraint (new_constraint (*lhsp, rhsc));
3552 VEC_free (ce_s, heap, lhsc);
3553 return;
3556 /* Else just add the call-used memory here. Escaped variables
3557 and globals will be dealt with in handle_lhs_call. */
3558 rhsc.var = callused_id;
3559 rhsc.offset = 0;
3560 rhsc.type = ADDRESSOF;
3561 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3562 process_constraint (new_constraint (*lhsp, rhsc));
3563 VEC_free (ce_s, heap, lhsc);
3567 /* Walk statement T setting up aliasing constraints according to the
3568 references found in T. This function is the main part of the
3569 constraint builder. AI points to auxiliary alias information used
3570 when building alias sets and computing alias grouping heuristics. */
3572 static void
3573 find_func_aliases (tree origt)
3575 tree call, t = origt;
3576 VEC(ce_s, heap) *lhsc = NULL;
3577 VEC(ce_s, heap) *rhsc = NULL;
3578 struct constraint_expr *c;
3579 enum escape_type stmt_escape_type;
3581 if (TREE_CODE (t) == RETURN_EXPR && TREE_OPERAND (t, 0))
3582 t = TREE_OPERAND (t, 0);
3584 /* Now build constraints expressions. */
3585 if (TREE_CODE (t) == PHI_NODE)
3587 gcc_assert (!AGGREGATE_TYPE_P (TREE_TYPE (PHI_RESULT (t))));
3589 /* Only care about pointers and structures containing
3590 pointers. */
3591 if (could_have_pointers (PHI_RESULT (t)))
3593 int i;
3594 unsigned int j;
3596 /* For a phi node, assign all the arguments to
3597 the result. */
3598 get_constraint_for (PHI_RESULT (t), &lhsc);
3599 for (i = 0; i < PHI_NUM_ARGS (t); i++)
3601 tree rhstype;
3602 tree strippedrhs = PHI_ARG_DEF (t, i);
3604 STRIP_NOPS (strippedrhs);
3605 rhstype = TREE_TYPE (strippedrhs);
3606 get_constraint_for (PHI_ARG_DEF (t, i), &rhsc);
3608 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3610 struct constraint_expr *c2;
3611 while (VEC_length (ce_s, rhsc) > 0)
3613 c2 = VEC_last (ce_s, rhsc);
3614 process_constraint (new_constraint (*c, *c2));
3615 VEC_pop (ce_s, rhsc);
3621 /* In IPA mode, we need to generate constraints to pass call
3622 arguments through their calls. There are two cases, either a
3623 GIMPLE_MODIFY_STMT when we are returning a value, or just a plain
3624 CALL_EXPR when we are not.
3626 In non-ipa mode, we need to generate constraints for each
3627 pointer passed by address. */
3628 else if ((call = get_call_expr_in (t)) != NULL_TREE)
3630 int flags = call_expr_flags (call);
3631 if (!in_ipa_mode)
3633 /* Const functions can return their arguments and addresses
3634 of global memory but not of escaped memory. */
3635 if (flags & ECF_CONST)
3637 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT
3638 && could_have_pointers (GIMPLE_STMT_OPERAND (t, 1)))
3639 handle_const_call (t);
3641 else if (flags & ECF_PURE)
3643 handle_pure_call (t);
3644 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT
3645 && could_have_pointers (GIMPLE_STMT_OPERAND (t, 1)))
3646 handle_lhs_call (GIMPLE_STMT_OPERAND (t, 0), flags);
3648 /* Pure functions can return addresses in and of memory
3649 reachable from their arguments, but they are not an escape
3650 point for reachable memory of their arguments. But as we
3651 do not compute call-used memory separately we cannot do
3652 something special here. */
3653 else if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
3655 handle_rhs_call (GIMPLE_STMT_OPERAND (t, 1));
3656 if (could_have_pointers (GIMPLE_STMT_OPERAND (t, 1)))
3657 handle_lhs_call (GIMPLE_STMT_OPERAND (t, 0), flags);
3659 else
3660 handle_rhs_call (t);
3662 else
3664 tree lhsop;
3665 tree rhsop;
3666 tree arg;
3667 call_expr_arg_iterator iter;
3668 varinfo_t fi;
3669 int i = 1;
3670 tree decl;
3671 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
3673 lhsop = GIMPLE_STMT_OPERAND (t, 0);
3674 rhsop = GIMPLE_STMT_OPERAND (t, 1);
3676 else
3678 lhsop = NULL;
3679 rhsop = t;
3681 decl = get_callee_fndecl (rhsop);
3683 /* If we can directly resolve the function being called, do so.
3684 Otherwise, it must be some sort of indirect expression that
3685 we should still be able to handle. */
3686 if (decl)
3688 fi = get_vi_for_tree (decl);
3690 else
3692 decl = CALL_EXPR_FN (rhsop);
3693 fi = get_vi_for_tree (decl);
3696 /* Assign all the passed arguments to the appropriate incoming
3697 parameters of the function. */
3699 FOR_EACH_CALL_EXPR_ARG (arg, iter, rhsop)
3701 struct constraint_expr lhs ;
3702 struct constraint_expr *rhsp;
3704 get_constraint_for (arg, &rhsc);
3705 if (TREE_CODE (decl) != FUNCTION_DECL)
3707 lhs.type = DEREF;
3708 lhs.var = fi->id;
3709 lhs.offset = i;
3711 else
3713 lhs.type = SCALAR;
3714 lhs.var = first_vi_for_offset (fi, i)->id;
3715 lhs.offset = 0;
3717 while (VEC_length (ce_s, rhsc) != 0)
3719 rhsp = VEC_last (ce_s, rhsc);
3720 process_constraint (new_constraint (lhs, *rhsp));
3721 VEC_pop (ce_s, rhsc);
3723 i++;
3726 /* If we are returning a value, assign it to the result. */
3727 if (lhsop)
3729 struct constraint_expr rhs;
3730 struct constraint_expr *lhsp;
3731 unsigned int j = 0;
3733 get_constraint_for (lhsop, &lhsc);
3734 if (TREE_CODE (decl) != FUNCTION_DECL)
3736 rhs.type = DEREF;
3737 rhs.var = fi->id;
3738 rhs.offset = i;
3740 else
3742 rhs.type = SCALAR;
3743 rhs.var = first_vi_for_offset (fi, i)->id;
3744 rhs.offset = 0;
3746 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3747 process_constraint (new_constraint (*lhsp, rhs));
3751 /* Otherwise, just a regular assignment statement. */
3752 else if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
3754 tree lhsop = GIMPLE_STMT_OPERAND (t, 0);
3755 tree rhsop = GIMPLE_STMT_OPERAND (t, 1);
3756 int i;
3758 if (AGGREGATE_TYPE_P (TREE_TYPE (lhsop))
3759 && AGGREGATE_TYPE_P (TREE_TYPE (rhsop)))
3761 do_structure_copy (lhsop, rhsop);
3763 else
3765 /* Only care about operations with pointers, structures
3766 containing pointers, dereferences, and call expressions. */
3767 if (could_have_pointers (lhsop)
3768 || TREE_CODE (rhsop) == CALL_EXPR)
3770 get_constraint_for (lhsop, &lhsc);
3771 switch (TREE_CODE_CLASS (TREE_CODE (rhsop)))
3773 /* RHS that consist of unary operations,
3774 exceptional types, or bare decls/constants, get
3775 handled directly by get_constraint_for. */
3776 case tcc_reference:
3777 case tcc_declaration:
3778 case tcc_constant:
3779 case tcc_exceptional:
3780 case tcc_expression:
3781 case tcc_vl_exp:
3782 case tcc_unary:
3784 unsigned int j;
3786 get_constraint_for (rhsop, &rhsc);
3787 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3789 struct constraint_expr *c2;
3790 unsigned int k;
3792 for (k = 0; VEC_iterate (ce_s, rhsc, k, c2); k++)
3793 process_constraint (new_constraint (*c, *c2));
3797 break;
3799 case tcc_binary:
3801 /* For pointer arithmetic of the form
3802 PTR + CST, we can simply use PTR's
3803 constraint because pointer arithmetic is
3804 not allowed to go out of bounds. */
3805 if (handle_ptr_arith (lhsc, rhsop))
3806 break;
3808 /* FALLTHRU */
3810 /* Otherwise, walk each operand. Notice that we
3811 can't use the operand interface because we need
3812 to process expressions other than simple operands
3813 (e.g. INDIRECT_REF, ADDR_EXPR, CALL_EXPR). */
3814 default:
3815 for (i = 0; i < TREE_OPERAND_LENGTH (rhsop); i++)
3817 tree op = TREE_OPERAND (rhsop, i);
3818 unsigned int j;
3820 gcc_assert (VEC_length (ce_s, rhsc) == 0);
3821 get_constraint_for (op, &rhsc);
3822 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3824 struct constraint_expr *c2;
3825 while (VEC_length (ce_s, rhsc) > 0)
3827 c2 = VEC_last (ce_s, rhsc);
3828 process_constraint (new_constraint (*c, *c2));
3829 VEC_pop (ce_s, rhsc);
3837 else if (TREE_CODE (t) == CHANGE_DYNAMIC_TYPE_EXPR)
3839 unsigned int j;
3841 get_constraint_for (CHANGE_DYNAMIC_TYPE_LOCATION (t), &lhsc);
3842 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); ++j)
3843 get_varinfo (c->var)->no_tbaa_pruning = true;
3846 stmt_escape_type = is_escape_site (t);
3847 if (stmt_escape_type == ESCAPE_STORED_IN_GLOBAL)
3849 tree rhs;
3850 gcc_assert (TREE_CODE (t) == GIMPLE_MODIFY_STMT);
3851 rhs = GIMPLE_STMT_OPERAND (t, 1);
3852 if (TREE_CODE (rhs) == ADDR_EXPR)
3854 tree base = get_base_address (TREE_OPERAND (rhs, 0));
3855 if (base
3856 && (!DECL_P (base)
3857 || !is_global_var (base)))
3858 make_escape_constraint (rhs);
3860 else if (TREE_CODE (rhs) == SSA_NAME
3861 && POINTER_TYPE_P (TREE_TYPE (rhs)))
3862 make_escape_constraint (rhs);
3863 else if (could_have_pointers (rhs))
3864 make_escape_constraint (rhs);
3866 else if (stmt_escape_type == ESCAPE_BAD_CAST)
3868 tree rhs;
3869 gcc_assert (TREE_CODE (t) == GIMPLE_MODIFY_STMT);
3870 rhs = GIMPLE_STMT_OPERAND (t, 1);
3871 gcc_assert (CONVERT_EXPR_P (rhs)
3872 || TREE_CODE (rhs) == VIEW_CONVERT_EXPR);
3873 rhs = TREE_OPERAND (rhs, 0);
3874 make_escape_constraint (rhs);
3876 else if (stmt_escape_type == ESCAPE_TO_ASM)
3878 tree link;
3879 int i;
3880 for (i = 0, link = ASM_OUTPUTS (t); link; i++, link = TREE_CHAIN (link))
3882 tree op = TREE_VALUE (link);
3883 if (op && could_have_pointers (op))
3884 /* Strictly we'd only need the constraints from ESCAPED and
3885 NONLOCAL. */
3886 make_escape_constraint (op);
3888 for (i = 0, link = ASM_INPUTS (t); link; i++, link = TREE_CHAIN (link))
3890 tree op = TREE_VALUE (link);
3891 if (op && could_have_pointers (op))
3892 /* Strictly we'd only need the constraint to ESCAPED. */
3893 make_escape_constraint (op);
3897 /* After promoting variables and computing aliasing we will
3898 need to re-scan most statements. FIXME: Try to minimize the
3899 number of statements re-scanned. It's not really necessary to
3900 re-scan *all* statements. */
3901 if (!in_ipa_mode)
3902 mark_stmt_modified (origt);
3903 VEC_free (ce_s, heap, rhsc);
3904 VEC_free (ce_s, heap, lhsc);
3908 /* Find the first varinfo in the same variable as START that overlaps with
3909 OFFSET.
3910 Effectively, walk the chain of fields for the variable START to find the
3911 first field that overlaps with OFFSET.
3912 Return NULL if we can't find one. */
3914 static varinfo_t
3915 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
3917 varinfo_t curr = start;
3918 while (curr)
3920 /* We may not find a variable in the field list with the actual
3921 offset when when we have glommed a structure to a variable.
3922 In that case, however, offset should still be within the size
3923 of the variable. */
3924 if (offset >= curr->offset && offset < (curr->offset + curr->size))
3925 return curr;
3926 curr = curr->next;
3928 return NULL;
3932 /* Insert the varinfo FIELD into the field list for BASE, at the front
3933 of the list. */
3935 static void
3936 insert_into_field_list (varinfo_t base, varinfo_t field)
3938 varinfo_t prev = base;
3939 varinfo_t curr = base->next;
3941 field->next = curr;
3942 prev->next = field;
3945 /* Insert the varinfo FIELD into the field list for BASE, ordered by
3946 offset. */
3948 static void
3949 insert_into_field_list_sorted (varinfo_t base, varinfo_t field)
3951 varinfo_t prev = base;
3952 varinfo_t curr = base->next;
3954 if (curr == NULL)
3956 prev->next = field;
3957 field->next = NULL;
3959 else
3961 while (curr)
3963 if (field->offset <= curr->offset)
3964 break;
3965 prev = curr;
3966 curr = curr->next;
3968 field->next = prev->next;
3969 prev->next = field;
3973 /* This structure is used during pushing fields onto the fieldstack
3974 to track the offset of the field, since bitpos_of_field gives it
3975 relative to its immediate containing type, and we want it relative
3976 to the ultimate containing object. */
3978 struct fieldoff
3980 /* Type of the field. */
3981 tree type;
3983 /* Size, in bits, of the field. */
3984 tree size;
3986 /* Field. */
3987 tree decl;
3989 /* Offset from the base of the base containing object to this field. */
3990 HOST_WIDE_INT offset;
3992 typedef struct fieldoff fieldoff_s;
3994 DEF_VEC_O(fieldoff_s);
3995 DEF_VEC_ALLOC_O(fieldoff_s,heap);
3997 /* qsort comparison function for two fieldoff's PA and PB */
3999 static int
4000 fieldoff_compare (const void *pa, const void *pb)
4002 const fieldoff_s *foa = (const fieldoff_s *)pa;
4003 const fieldoff_s *fob = (const fieldoff_s *)pb;
4004 unsigned HOST_WIDE_INT foasize, fobsize;
4006 if (foa->offset < fob->offset)
4007 return -1;
4008 else if (foa->offset > fob->offset)
4009 return 1;
4011 foasize = TREE_INT_CST_LOW (foa->size);
4012 fobsize = TREE_INT_CST_LOW (fob->size);
4013 if (foasize < fobsize)
4014 return - 1;
4015 else if (foasize > fobsize)
4016 return 1;
4017 return 0;
4020 /* Sort a fieldstack according to the field offset and sizes. */
4021 static void
4022 sort_fieldstack (VEC(fieldoff_s,heap) *fieldstack)
4024 qsort (VEC_address (fieldoff_s, fieldstack),
4025 VEC_length (fieldoff_s, fieldstack),
4026 sizeof (fieldoff_s),
4027 fieldoff_compare);
4030 /* Return true if V is a tree that we can have subvars for.
4031 Normally, this is any aggregate type. Also complex
4032 types which are not gimple registers can have subvars. */
4034 static inline bool
4035 var_can_have_subvars (const_tree v)
4037 /* Volatile variables should never have subvars. */
4038 if (TREE_THIS_VOLATILE (v))
4039 return false;
4041 /* Non decls or memory tags can never have subvars. */
4042 if (!DECL_P (v) || MTAG_P (v))
4043 return false;
4045 /* Aggregates without overlapping fields can have subvars. */
4046 if (TREE_CODE (TREE_TYPE (v)) == RECORD_TYPE)
4047 return true;
4049 return false;
4052 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
4053 the fields of TYPE onto fieldstack, recording their offsets along
4054 the way.
4056 OFFSET is used to keep track of the offset in this entire
4057 structure, rather than just the immediately containing structure.
4058 Returns the number of fields pushed.
4060 HAS_UNION is set to true if we find a union type as a field of
4061 TYPE. */
4063 static int
4064 push_fields_onto_fieldstack (tree type, VEC(fieldoff_s,heap) **fieldstack,
4065 HOST_WIDE_INT offset, bool *has_union)
4067 tree field;
4068 int count = 0;
4070 if (TREE_CODE (type) != RECORD_TYPE)
4071 return 0;
4073 /* If the vector of fields is growing too big, bail out early.
4074 Callers check for VEC_length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
4075 sure this fails. */
4076 if (VEC_length (fieldoff_s, *fieldstack) > MAX_FIELDS_FOR_FIELD_SENSITIVE)
4077 return 0;
4079 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
4080 if (TREE_CODE (field) == FIELD_DECL)
4082 bool push = false;
4083 int pushed = 0;
4085 if (has_union
4086 && (TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE
4087 || TREE_CODE (TREE_TYPE (field)) == UNION_TYPE))
4088 *has_union = true;
4090 if (!var_can_have_subvars (field))
4091 push = true;
4092 else if (!(pushed = push_fields_onto_fieldstack
4093 (TREE_TYPE (field),
4094 fieldstack,
4095 offset + bitpos_of_field (field),
4096 has_union))
4097 && (DECL_SIZE (field)
4098 && !integer_zerop (DECL_SIZE (field))))
4099 /* Empty structures may have actual size, like in C++. So
4100 see if we didn't push any subfields and the size is
4101 nonzero, push the field onto the stack. */
4102 push = true;
4104 if (push)
4106 fieldoff_s *pair;
4108 pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
4109 pair->type = TREE_TYPE (field);
4110 pair->size = DECL_SIZE (field);
4111 pair->decl = field;
4112 pair->offset = offset + bitpos_of_field (field);
4113 count++;
4115 else
4116 count += pushed;
4119 return count;
4122 /* Create a constraint ID = &FROM. */
4124 static void
4125 make_constraint_from (varinfo_t vi, int from)
4127 struct constraint_expr lhs, rhs;
4129 lhs.var = vi->id;
4130 lhs.offset = 0;
4131 lhs.type = SCALAR;
4133 rhs.var = from;
4134 rhs.offset = 0;
4135 rhs.type = ADDRESSOF;
4136 process_constraint (new_constraint (lhs, rhs));
4139 /* Count the number of arguments DECL has, and set IS_VARARGS to true
4140 if it is a varargs function. */
4142 static unsigned int
4143 count_num_arguments (tree decl, bool *is_varargs)
4145 unsigned int i = 0;
4146 tree t;
4148 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl));
4150 t = TREE_CHAIN (t))
4152 if (TREE_VALUE (t) == void_type_node)
4153 break;
4154 i++;
4157 if (!t)
4158 *is_varargs = true;
4159 return i;
4162 /* Creation function node for DECL, using NAME, and return the index
4163 of the variable we've created for the function. */
4165 static unsigned int
4166 create_function_info_for (tree decl, const char *name)
4168 unsigned int index = VEC_length (varinfo_t, varmap);
4169 varinfo_t vi;
4170 tree arg;
4171 unsigned int i;
4172 bool is_varargs = false;
4174 /* Create the variable info. */
4176 vi = new_var_info (decl, index, name);
4177 vi->decl = decl;
4178 vi->offset = 0;
4179 vi->has_union = 0;
4180 vi->size = 1;
4181 vi->fullsize = count_num_arguments (decl, &is_varargs) + 1;
4182 insert_vi_for_tree (vi->decl, vi);
4183 VEC_safe_push (varinfo_t, heap, varmap, vi);
4185 stats.total_vars++;
4187 /* If it's varargs, we don't know how many arguments it has, so we
4188 can't do much.
4190 if (is_varargs)
4192 vi->fullsize = ~0;
4193 vi->size = ~0;
4194 vi->is_unknown_size_var = true;
4195 return index;
4199 arg = DECL_ARGUMENTS (decl);
4201 /* Set up variables for each argument. */
4202 for (i = 1; i < vi->fullsize; i++)
4204 varinfo_t argvi;
4205 const char *newname;
4206 char *tempname;
4207 unsigned int newindex;
4208 tree argdecl = decl;
4210 if (arg)
4211 argdecl = arg;
4213 newindex = VEC_length (varinfo_t, varmap);
4214 asprintf (&tempname, "%s.arg%d", name, i-1);
4215 newname = ggc_strdup (tempname);
4216 free (tempname);
4218 argvi = new_var_info (argdecl, newindex, newname);
4219 argvi->decl = argdecl;
4220 VEC_safe_push (varinfo_t, heap, varmap, argvi);
4221 argvi->offset = i;
4222 argvi->size = 1;
4223 argvi->fullsize = vi->fullsize;
4224 argvi->has_union = false;
4225 insert_into_field_list_sorted (vi, argvi);
4226 stats.total_vars ++;
4227 if (arg)
4229 insert_vi_for_tree (arg, argvi);
4230 arg = TREE_CHAIN (arg);
4234 /* Create a variable for the return var. */
4235 if (DECL_RESULT (decl) != NULL
4236 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
4238 varinfo_t resultvi;
4239 const char *newname;
4240 char *tempname;
4241 unsigned int newindex;
4242 tree resultdecl = decl;
4244 vi->fullsize ++;
4246 if (DECL_RESULT (decl))
4247 resultdecl = DECL_RESULT (decl);
4249 newindex = VEC_length (varinfo_t, varmap);
4250 asprintf (&tempname, "%s.result", name);
4251 newname = ggc_strdup (tempname);
4252 free (tempname);
4254 resultvi = new_var_info (resultdecl, newindex, newname);
4255 resultvi->decl = resultdecl;
4256 VEC_safe_push (varinfo_t, heap, varmap, resultvi);
4257 resultvi->offset = i;
4258 resultvi->size = 1;
4259 resultvi->fullsize = vi->fullsize;
4260 resultvi->has_union = false;
4261 insert_into_field_list_sorted (vi, resultvi);
4262 stats.total_vars ++;
4263 if (DECL_RESULT (decl))
4264 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
4266 return index;
4270 /* Return true if FIELDSTACK contains fields that overlap.
4271 FIELDSTACK is assumed to be sorted by offset. */
4273 static bool
4274 check_for_overlaps (VEC (fieldoff_s,heap) *fieldstack)
4276 fieldoff_s *fo = NULL;
4277 unsigned int i;
4278 HOST_WIDE_INT lastoffset = -1;
4280 for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
4282 if (fo->offset == lastoffset)
4283 return true;
4284 lastoffset = fo->offset;
4286 return false;
4289 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
4290 This will also create any varinfo structures necessary for fields
4291 of DECL. */
4293 static unsigned int
4294 create_variable_info_for (tree decl, const char *name)
4296 unsigned int index = VEC_length (varinfo_t, varmap);
4297 varinfo_t vi;
4298 tree decltype = TREE_TYPE (decl);
4299 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decltype);
4300 bool notokay = false;
4301 bool hasunion;
4302 bool is_global = DECL_P (decl) ? is_global_var (decl) : false;
4303 VEC (fieldoff_s,heap) *fieldstack = NULL;
4305 if (TREE_CODE (decl) == FUNCTION_DECL && in_ipa_mode)
4306 return create_function_info_for (decl, name);
4308 hasunion = TREE_CODE (decltype) == UNION_TYPE
4309 || TREE_CODE (decltype) == QUAL_UNION_TYPE;
4310 if (var_can_have_subvars (decl) && use_field_sensitive && !hasunion)
4312 push_fields_onto_fieldstack (decltype, &fieldstack, 0, &hasunion);
4313 if (hasunion)
4315 VEC_free (fieldoff_s, heap, fieldstack);
4316 notokay = true;
4320 /* If the variable doesn't have subvars, we may end up needing to
4321 sort the field list and create fake variables for all the
4322 fields. */
4323 vi = new_var_info (decl, index, name);
4324 vi->decl = decl;
4325 vi->offset = 0;
4326 vi->has_union = hasunion;
4327 if (!declsize
4328 || TREE_CODE (declsize) != INTEGER_CST
4329 || TREE_CODE (decltype) == UNION_TYPE
4330 || TREE_CODE (decltype) == QUAL_UNION_TYPE)
4332 vi->is_unknown_size_var = true;
4333 vi->fullsize = ~0;
4334 vi->size = ~0;
4336 else
4338 vi->fullsize = TREE_INT_CST_LOW (declsize);
4339 vi->size = vi->fullsize;
4342 insert_vi_for_tree (vi->decl, vi);
4343 VEC_safe_push (varinfo_t, heap, varmap, vi);
4344 if (is_global && (!flag_whole_program || !in_ipa_mode)
4345 && could_have_pointers (decl))
4346 make_constraint_from (vi, escaped_id);
4348 stats.total_vars++;
4349 if (use_field_sensitive
4350 && !notokay
4351 && !vi->is_unknown_size_var
4352 && var_can_have_subvars (decl)
4353 && VEC_length (fieldoff_s, fieldstack) > 1
4354 && VEC_length (fieldoff_s, fieldstack) <= MAX_FIELDS_FOR_FIELD_SENSITIVE)
4356 unsigned int newindex = VEC_length (varinfo_t, varmap);
4357 fieldoff_s *fo = NULL;
4358 unsigned int i;
4360 for (i = 0; !notokay && VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
4362 if (! fo->size
4363 || TREE_CODE (fo->size) != INTEGER_CST
4364 || fo->offset < 0)
4366 notokay = true;
4367 break;
4371 /* We can't sort them if we have a field with a variable sized type,
4372 which will make notokay = true. In that case, we are going to return
4373 without creating varinfos for the fields anyway, so sorting them is a
4374 waste to boot. */
4375 if (!notokay)
4377 sort_fieldstack (fieldstack);
4378 /* Due to some C++ FE issues, like PR 22488, we might end up
4379 what appear to be overlapping fields even though they,
4380 in reality, do not overlap. Until the C++ FE is fixed,
4381 we will simply disable field-sensitivity for these cases. */
4382 notokay = check_for_overlaps (fieldstack);
4386 if (VEC_length (fieldoff_s, fieldstack) != 0)
4387 fo = VEC_index (fieldoff_s, fieldstack, 0);
4389 if (fo == NULL || notokay)
4391 vi->is_unknown_size_var = 1;
4392 vi->fullsize = ~0;
4393 vi->size = ~0;
4394 VEC_free (fieldoff_s, heap, fieldstack);
4395 return index;
4398 vi->size = TREE_INT_CST_LOW (fo->size);
4399 vi->offset = fo->offset;
4400 for (i = VEC_length (fieldoff_s, fieldstack) - 1;
4401 i >= 1 && VEC_iterate (fieldoff_s, fieldstack, i, fo);
4402 i--)
4404 varinfo_t newvi;
4405 const char *newname = "NULL";
4406 char *tempname;
4408 newindex = VEC_length (varinfo_t, varmap);
4409 if (dump_file)
4411 if (fo->decl)
4412 asprintf (&tempname, "%s.%s",
4413 vi->name, alias_get_name (fo->decl));
4414 else
4415 asprintf (&tempname, "%s." HOST_WIDE_INT_PRINT_DEC,
4416 vi->name, fo->offset);
4417 newname = ggc_strdup (tempname);
4418 free (tempname);
4420 newvi = new_var_info (decl, newindex, newname);
4421 newvi->offset = fo->offset;
4422 newvi->size = TREE_INT_CST_LOW (fo->size);
4423 newvi->fullsize = vi->fullsize;
4424 insert_into_field_list (vi, newvi);
4425 VEC_safe_push (varinfo_t, heap, varmap, newvi);
4426 if (is_global && (!flag_whole_program || !in_ipa_mode)
4427 && (!fo->decl || could_have_pointers (fo->decl)))
4428 make_constraint_from (newvi, escaped_id);
4430 stats.total_vars++;
4434 VEC_free (fieldoff_s, heap, fieldstack);
4436 return index;
4439 /* Print out the points-to solution for VAR to FILE. */
4441 void
4442 dump_solution_for_var (FILE *file, unsigned int var)
4444 varinfo_t vi = get_varinfo (var);
4445 unsigned int i;
4446 bitmap_iterator bi;
4448 if (find (var) != var)
4450 varinfo_t vipt = get_varinfo (find (var));
4451 fprintf (file, "%s = same as %s\n", vi->name, vipt->name);
4453 else
4455 fprintf (file, "%s = { ", vi->name);
4456 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4458 fprintf (file, "%s ", get_varinfo (i)->name);
4460 fprintf (file, "}");
4461 if (vi->no_tbaa_pruning)
4462 fprintf (file, " no-tbaa-pruning");
4463 fprintf (file, "\n");
4467 /* Print the points-to solution for VAR to stdout. */
4469 void
4470 debug_solution_for_var (unsigned int var)
4472 dump_solution_for_var (stdout, var);
4475 /* Create varinfo structures for all of the variables in the
4476 function for intraprocedural mode. */
4478 static void
4479 intra_create_variable_infos (void)
4481 tree t;
4482 struct constraint_expr lhs, rhs;
4484 /* For each incoming pointer argument arg, create the constraint ARG
4485 = NONLOCAL or a dummy variable if flag_argument_noalias is set. */
4486 for (t = DECL_ARGUMENTS (current_function_decl); t; t = TREE_CHAIN (t))
4488 varinfo_t p;
4490 if (!could_have_pointers (t))
4491 continue;
4493 /* If flag_argument_noalias is set, then function pointer
4494 arguments are guaranteed not to point to each other. In that
4495 case, create an artificial variable PARM_NOALIAS and the
4496 constraint ARG = &PARM_NOALIAS. */
4497 if (POINTER_TYPE_P (TREE_TYPE (t)) && flag_argument_noalias > 0)
4499 varinfo_t vi;
4500 tree heapvar = heapvar_lookup (t);
4502 lhs.offset = 0;
4503 lhs.type = SCALAR;
4504 lhs.var = get_vi_for_tree (t)->id;
4506 if (heapvar == NULL_TREE)
4508 var_ann_t ann;
4509 heapvar = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (t)),
4510 "PARM_NOALIAS");
4511 DECL_EXTERNAL (heapvar) = 1;
4512 if (gimple_referenced_vars (cfun))
4513 add_referenced_var (heapvar);
4515 heapvar_insert (t, heapvar);
4517 ann = get_var_ann (heapvar);
4518 if (flag_argument_noalias == 1)
4519 ann->noalias_state = NO_ALIAS;
4520 else if (flag_argument_noalias == 2)
4521 ann->noalias_state = NO_ALIAS_GLOBAL;
4522 else if (flag_argument_noalias == 3)
4523 ann->noalias_state = NO_ALIAS_ANYTHING;
4524 else
4525 gcc_unreachable ();
4528 vi = get_vi_for_tree (heapvar);
4529 vi->is_artificial_var = 1;
4530 vi->is_heap_var = 1;
4531 rhs.var = vi->id;
4532 rhs.type = ADDRESSOF;
4533 rhs.offset = 0;
4534 for (p = get_varinfo (lhs.var); p; p = p->next)
4536 struct constraint_expr temp = lhs;
4537 temp.var = p->id;
4538 process_constraint (new_constraint (temp, rhs));
4541 else
4543 varinfo_t arg_vi = get_vi_for_tree (t);
4545 for (p = arg_vi; p; p = p->next)
4546 make_constraint_from (p, nonlocal_id);
4551 /* Structure used to put solution bitmaps in a hashtable so they can
4552 be shared among variables with the same points-to set. */
4554 typedef struct shared_bitmap_info
4556 bitmap pt_vars;
4557 hashval_t hashcode;
4558 } *shared_bitmap_info_t;
4559 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
4561 static htab_t shared_bitmap_table;
4563 /* Hash function for a shared_bitmap_info_t */
4565 static hashval_t
4566 shared_bitmap_hash (const void *p)
4568 const_shared_bitmap_info_t const bi = (const_shared_bitmap_info_t) p;
4569 return bi->hashcode;
4572 /* Equality function for two shared_bitmap_info_t's. */
4574 static int
4575 shared_bitmap_eq (const void *p1, const void *p2)
4577 const_shared_bitmap_info_t const sbi1 = (const_shared_bitmap_info_t) p1;
4578 const_shared_bitmap_info_t const sbi2 = (const_shared_bitmap_info_t) p2;
4579 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
4582 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
4583 existing instance if there is one, NULL otherwise. */
4585 static bitmap
4586 shared_bitmap_lookup (bitmap pt_vars)
4588 void **slot;
4589 struct shared_bitmap_info sbi;
4591 sbi.pt_vars = pt_vars;
4592 sbi.hashcode = bitmap_hash (pt_vars);
4594 slot = htab_find_slot_with_hash (shared_bitmap_table, &sbi,
4595 sbi.hashcode, NO_INSERT);
4596 if (!slot)
4597 return NULL;
4598 else
4599 return ((shared_bitmap_info_t) *slot)->pt_vars;
4603 /* Add a bitmap to the shared bitmap hashtable. */
4605 static void
4606 shared_bitmap_add (bitmap pt_vars)
4608 void **slot;
4609 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
4611 sbi->pt_vars = pt_vars;
4612 sbi->hashcode = bitmap_hash (pt_vars);
4614 slot = htab_find_slot_with_hash (shared_bitmap_table, sbi,
4615 sbi->hashcode, INSERT);
4616 gcc_assert (!*slot);
4617 *slot = (void *) sbi;
4621 /* Set bits in INTO corresponding to the variable uids in solution set
4622 FROM, which came from variable PTR.
4623 For variables that are actually dereferenced, we also use type
4624 based alias analysis to prune the points-to sets.
4625 IS_DEREFED is true if PTR was directly dereferenced, which we use to
4626 help determine whether we are we are allowed to prune using TBAA.
4627 If NO_TBAA_PRUNING is true, we do not perform any TBAA pruning of
4628 the from set. */
4630 static void
4631 set_uids_in_ptset (tree ptr, bitmap into, bitmap from, bool is_derefed,
4632 bool no_tbaa_pruning)
4634 unsigned int i;
4635 bitmap_iterator bi;
4637 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
4639 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
4641 varinfo_t vi = get_varinfo (i);
4643 /* The only artificial variables that are allowed in a may-alias
4644 set are heap variables. */
4645 if (vi->is_artificial_var && !vi->is_heap_var)
4646 continue;
4648 if (TREE_CODE (vi->decl) == VAR_DECL
4649 || TREE_CODE (vi->decl) == PARM_DECL
4650 || TREE_CODE (vi->decl) == RESULT_DECL)
4652 /* Just add VI->DECL to the alias set.
4653 Don't type prune artificial vars or points-to sets
4654 for pointers that have not been dereferenced or with
4655 type-based pruning disabled. */
4656 if (vi->is_artificial_var
4657 || !is_derefed
4658 || no_tbaa_pruning)
4659 bitmap_set_bit (into, DECL_UID (vi->decl));
4660 else
4662 alias_set_type var_alias_set, mem_alias_set;
4663 var_alias_set = get_alias_set (vi->decl);
4664 mem_alias_set = get_alias_set (TREE_TYPE (TREE_TYPE (ptr)));
4665 if (may_alias_p (SSA_NAME_VAR (ptr), mem_alias_set,
4666 vi->decl, var_alias_set, true))
4667 bitmap_set_bit (into, DECL_UID (vi->decl));
4674 static bool have_alias_info = false;
4676 /* The list of SMT's that are in use by our pointer variables. This
4677 is the set of SMT's for all pointers that can point to anything. */
4678 static bitmap used_smts;
4680 /* Due to the ordering of points-to set calculation and SMT
4681 calculation being a bit co-dependent, we can't just calculate SMT
4682 used info whenever we want, we have to calculate it around the time
4683 that find_what_p_points_to is called. */
4685 /* Mark which SMT's are in use by points-to anything variables. */
4687 void
4688 set_used_smts (void)
4690 int i;
4691 varinfo_t vi;
4692 used_smts = BITMAP_ALLOC (&pta_obstack);
4694 for (i = 0; VEC_iterate (varinfo_t, varmap, i, vi); i++)
4696 tree var = vi->decl;
4697 varinfo_t withsolution = get_varinfo (find (i));
4698 tree smt;
4699 var_ann_t va;
4700 struct ptr_info_def *pi = NULL;
4702 /* For parm decls, the pointer info may be under the default
4703 def. */
4704 if (TREE_CODE (vi->decl) == PARM_DECL
4705 && gimple_default_def (cfun, var))
4706 pi = SSA_NAME_PTR_INFO (gimple_default_def (cfun, var));
4707 else if (TREE_CODE (var) == SSA_NAME)
4708 pi = SSA_NAME_PTR_INFO (var);
4710 /* Skip the special variables and those that can't be aliased. */
4711 if (vi->is_special_var
4712 || !SSA_VAR_P (var)
4713 || (pi && !pi->memory_tag_needed)
4714 || (TREE_CODE (var) == VAR_DECL && !may_be_aliased (var))
4715 || !POINTER_TYPE_P (TREE_TYPE (var)))
4716 continue;
4718 if (TREE_CODE (var) == SSA_NAME)
4719 var = SSA_NAME_VAR (var);
4721 va = var_ann (var);
4722 if (!va)
4723 continue;
4725 smt = va->symbol_mem_tag;
4726 if (smt && bitmap_bit_p (withsolution->solution, anything_id))
4727 bitmap_set_bit (used_smts, DECL_UID (smt));
4731 /* Given a pointer variable P, fill in its points-to set, or return
4732 false if we can't.
4733 Rather than return false for variables that point-to anything, we
4734 instead find the corresponding SMT, and merge in its aliases. In
4735 addition to these aliases, we also set the bits for the SMT's
4736 themselves and their subsets, as SMT's are still in use by
4737 non-SSA_NAME's, and pruning may eliminate every one of their
4738 aliases. In such a case, if we did not include the right set of
4739 SMT's in the points-to set of the variable, we'd end up with
4740 statements that do not conflict but should. */
4742 bool
4743 find_what_p_points_to (tree p)
4745 tree lookup_p = p;
4746 varinfo_t vi;
4748 if (!have_alias_info)
4749 return false;
4751 /* For parameters, get at the points-to set for the actual parm
4752 decl. */
4753 if (TREE_CODE (p) == SSA_NAME
4754 && TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
4755 && SSA_NAME_IS_DEFAULT_DEF (p))
4756 lookup_p = SSA_NAME_VAR (p);
4758 vi = lookup_vi_for_tree (lookup_p);
4759 if (vi)
4761 if (vi->is_artificial_var)
4762 return false;
4764 /* See if this is a field or a structure. */
4765 if (vi->size != vi->fullsize)
4767 /* Nothing currently asks about structure fields directly,
4768 but when they do, we need code here to hand back the
4769 points-to set. */
4770 return false;
4772 else
4774 struct ptr_info_def *pi = get_ptr_info (p);
4775 unsigned int i;
4776 bitmap_iterator bi;
4777 bool was_pt_anything = false;
4778 bitmap finished_solution;
4779 bitmap result;
4781 if (!pi->memory_tag_needed)
4782 return false;
4784 /* This variable may have been collapsed, let's get the real
4785 variable. */
4786 vi = get_varinfo (find (vi->id));
4788 /* Translate artificial variables into SSA_NAME_PTR_INFO
4789 attributes. */
4790 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4792 varinfo_t vi = get_varinfo (i);
4794 if (vi->is_artificial_var)
4796 /* FIXME. READONLY should be handled better so that
4797 flow insensitive aliasing can disregard writable
4798 aliases. */
4799 if (vi->id == nothing_id)
4800 pi->pt_null = 1;
4801 else if (vi->id == anything_id
4802 || vi->id == nonlocal_id
4803 || vi->id == escaped_id
4804 || vi->id == callused_id)
4805 was_pt_anything = 1;
4806 else if (vi->id == readonly_id)
4807 was_pt_anything = 1;
4808 else if (vi->id == integer_id)
4809 was_pt_anything = 1;
4810 else if (vi->is_heap_var)
4811 pi->pt_global_mem = 1;
4815 /* Instead of doing extra work, simply do not create
4816 points-to information for pt_anything pointers. This
4817 will cause the operand scanner to fall back to the
4818 type-based SMT and its aliases. Which is the best
4819 we could do here for the points-to set as well. */
4820 if (was_pt_anything)
4821 return false;
4823 /* Share the final set of variables when possible. */
4824 finished_solution = BITMAP_GGC_ALLOC ();
4825 stats.points_to_sets_created++;
4827 set_uids_in_ptset (p, finished_solution, vi->solution,
4828 pi->is_dereferenced,
4829 vi->no_tbaa_pruning);
4830 result = shared_bitmap_lookup (finished_solution);
4832 if (!result)
4834 shared_bitmap_add (finished_solution);
4835 pi->pt_vars = finished_solution;
4837 else
4839 pi->pt_vars = result;
4840 bitmap_clear (finished_solution);
4843 if (bitmap_empty_p (pi->pt_vars))
4844 pi->pt_vars = NULL;
4846 return true;
4850 return false;
4853 /* Mark the ESCAPED solution as call clobbered. Returns false if
4854 pt_anything escaped which needs all locals that have their address
4855 taken marked call clobbered as well. */
4857 bool
4858 clobber_what_escaped (void)
4860 varinfo_t vi;
4861 unsigned int i;
4862 bitmap_iterator bi;
4864 if (!have_alias_info)
4865 return false;
4867 /* This variable may have been collapsed, let's get the real
4868 variable for escaped_id. */
4869 vi = get_varinfo (find (escaped_id));
4871 /* If call-used memory escapes we need to include it in the
4872 set of escaped variables. This can happen if a pure
4873 function returns a pointer and this pointer escapes. */
4874 if (bitmap_bit_p (vi->solution, callused_id))
4876 varinfo_t cu_vi = get_varinfo (find (callused_id));
4877 bitmap_ior_into (vi->solution, cu_vi->solution);
4880 /* Mark variables in the solution call-clobbered. */
4881 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4883 varinfo_t vi = get_varinfo (i);
4885 if (vi->is_artificial_var)
4887 /* nothing_id and readonly_id do not cause any
4888 call clobber ops. For anything_id and integer_id
4889 we need to clobber all addressable vars. */
4890 if (vi->id == anything_id
4891 || vi->id == integer_id)
4892 return false;
4895 /* Only artificial heap-vars are further interesting. */
4896 if (vi->is_artificial_var && !vi->is_heap_var)
4897 continue;
4899 if ((TREE_CODE (vi->decl) == VAR_DECL
4900 || TREE_CODE (vi->decl) == PARM_DECL
4901 || TREE_CODE (vi->decl) == RESULT_DECL)
4902 && !unmodifiable_var_p (vi->decl))
4903 mark_call_clobbered (vi->decl, ESCAPE_TO_CALL);
4906 return true;
4909 /* Compute the call-used variables. */
4911 void
4912 compute_call_used_vars (void)
4914 varinfo_t vi;
4915 unsigned int i;
4916 bitmap_iterator bi;
4917 bool has_anything_id = false;
4919 if (!have_alias_info)
4920 return;
4922 /* This variable may have been collapsed, let's get the real
4923 variable for escaped_id. */
4924 vi = get_varinfo (find (callused_id));
4926 /* Mark variables in the solution call-clobbered. */
4927 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4929 varinfo_t vi = get_varinfo (i);
4931 if (vi->is_artificial_var)
4933 /* For anything_id and integer_id we need to make
4934 all local addressable vars call-used. */
4935 if (vi->id == anything_id
4936 || vi->id == integer_id)
4937 has_anything_id = true;
4940 /* Only artificial heap-vars are further interesting. */
4941 if (vi->is_artificial_var && !vi->is_heap_var)
4942 continue;
4944 if ((TREE_CODE (vi->decl) == VAR_DECL
4945 || TREE_CODE (vi->decl) == PARM_DECL
4946 || TREE_CODE (vi->decl) == RESULT_DECL)
4947 && !unmodifiable_var_p (vi->decl))
4948 bitmap_set_bit (gimple_call_used_vars (cfun), DECL_UID (vi->decl));
4951 /* If anything is call-used, add all addressable locals to the set. */
4952 if (has_anything_id)
4953 bitmap_ior_into (gimple_call_used_vars (cfun),
4954 gimple_addressable_vars (cfun));
4958 /* Dump points-to information to OUTFILE. */
4960 void
4961 dump_sa_points_to_info (FILE *outfile)
4963 unsigned int i;
4965 fprintf (outfile, "\nPoints-to sets\n\n");
4967 if (dump_flags & TDF_STATS)
4969 fprintf (outfile, "Stats:\n");
4970 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
4971 fprintf (outfile, "Non-pointer vars: %d\n",
4972 stats.nonpointer_vars);
4973 fprintf (outfile, "Statically unified vars: %d\n",
4974 stats.unified_vars_static);
4975 fprintf (outfile, "Dynamically unified vars: %d\n",
4976 stats.unified_vars_dynamic);
4977 fprintf (outfile, "Iterations: %d\n", stats.iterations);
4978 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
4979 fprintf (outfile, "Number of implicit edges: %d\n",
4980 stats.num_implicit_edges);
4983 for (i = 0; i < VEC_length (varinfo_t, varmap); i++)
4984 dump_solution_for_var (outfile, i);
4988 /* Debug points-to information to stderr. */
4990 void
4991 debug_sa_points_to_info (void)
4993 dump_sa_points_to_info (stderr);
4997 /* Initialize the always-existing constraint variables for NULL
4998 ANYTHING, READONLY, and INTEGER */
5000 static void
5001 init_base_vars (void)
5003 struct constraint_expr lhs, rhs;
5005 /* Create the NULL variable, used to represent that a variable points
5006 to NULL. */
5007 nothing_tree = create_tmp_var_raw (void_type_node, "NULL");
5008 var_nothing = new_var_info (nothing_tree, nothing_id, "NULL");
5009 insert_vi_for_tree (nothing_tree, var_nothing);
5010 var_nothing->is_artificial_var = 1;
5011 var_nothing->offset = 0;
5012 var_nothing->size = ~0;
5013 var_nothing->fullsize = ~0;
5014 var_nothing->is_special_var = 1;
5015 VEC_safe_push (varinfo_t, heap, varmap, var_nothing);
5017 /* Create the ANYTHING variable, used to represent that a variable
5018 points to some unknown piece of memory. */
5019 anything_tree = create_tmp_var_raw (void_type_node, "ANYTHING");
5020 var_anything = new_var_info (anything_tree, anything_id, "ANYTHING");
5021 insert_vi_for_tree (anything_tree, var_anything);
5022 var_anything->is_artificial_var = 1;
5023 var_anything->size = ~0;
5024 var_anything->offset = 0;
5025 var_anything->next = NULL;
5026 var_anything->fullsize = ~0;
5027 var_anything->is_special_var = 1;
5029 /* Anything points to anything. This makes deref constraints just
5030 work in the presence of linked list and other p = *p type loops,
5031 by saying that *ANYTHING = ANYTHING. */
5032 VEC_safe_push (varinfo_t, heap, varmap, var_anything);
5033 lhs.type = SCALAR;
5034 lhs.var = anything_id;
5035 lhs.offset = 0;
5036 rhs.type = ADDRESSOF;
5037 rhs.var = anything_id;
5038 rhs.offset = 0;
5040 /* This specifically does not use process_constraint because
5041 process_constraint ignores all anything = anything constraints, since all
5042 but this one are redundant. */
5043 VEC_safe_push (constraint_t, heap, constraints, new_constraint (lhs, rhs));
5045 /* Create the READONLY variable, used to represent that a variable
5046 points to readonly memory. */
5047 readonly_tree = create_tmp_var_raw (void_type_node, "READONLY");
5048 var_readonly = new_var_info (readonly_tree, readonly_id, "READONLY");
5049 var_readonly->is_artificial_var = 1;
5050 var_readonly->offset = 0;
5051 var_readonly->size = ~0;
5052 var_readonly->fullsize = ~0;
5053 var_readonly->next = NULL;
5054 var_readonly->is_special_var = 1;
5055 insert_vi_for_tree (readonly_tree, var_readonly);
5056 VEC_safe_push (varinfo_t, heap, varmap, var_readonly);
5058 /* readonly memory points to anything, in order to make deref
5059 easier. In reality, it points to anything the particular
5060 readonly variable can point to, but we don't track this
5061 separately. */
5062 lhs.type = SCALAR;
5063 lhs.var = readonly_id;
5064 lhs.offset = 0;
5065 rhs.type = ADDRESSOF;
5066 rhs.var = readonly_id; /* FIXME */
5067 rhs.offset = 0;
5068 process_constraint (new_constraint (lhs, rhs));
5070 /* Create the ESCAPED variable, used to represent the set of escaped
5071 memory. */
5072 escaped_tree = create_tmp_var_raw (void_type_node, "ESCAPED");
5073 var_escaped = new_var_info (escaped_tree, escaped_id, "ESCAPED");
5074 insert_vi_for_tree (escaped_tree, var_escaped);
5075 var_escaped->is_artificial_var = 1;
5076 var_escaped->offset = 0;
5077 var_escaped->size = ~0;
5078 var_escaped->fullsize = ~0;
5079 var_escaped->is_special_var = 0;
5080 VEC_safe_push (varinfo_t, heap, varmap, var_escaped);
5081 gcc_assert (VEC_index (varinfo_t, varmap, 3) == var_escaped);
5083 /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
5084 lhs.type = SCALAR;
5085 lhs.var = escaped_id;
5086 lhs.offset = 0;
5087 rhs.type = DEREF;
5088 rhs.var = escaped_id;
5089 rhs.offset = 0;
5090 process_constraint (new_constraint (lhs, rhs));
5092 /* Create the NONLOCAL variable, used to represent the set of nonlocal
5093 memory. */
5094 nonlocal_tree = create_tmp_var_raw (void_type_node, "NONLOCAL");
5095 var_nonlocal = new_var_info (nonlocal_tree, nonlocal_id, "NONLOCAL");
5096 insert_vi_for_tree (nonlocal_tree, var_nonlocal);
5097 var_nonlocal->is_artificial_var = 1;
5098 var_nonlocal->offset = 0;
5099 var_nonlocal->size = ~0;
5100 var_nonlocal->fullsize = ~0;
5101 var_nonlocal->is_special_var = 1;
5102 VEC_safe_push (varinfo_t, heap, varmap, var_nonlocal);
5104 /* Nonlocal memory points to escaped (which includes nonlocal),
5105 in order to make deref easier. */
5106 lhs.type = SCALAR;
5107 lhs.var = nonlocal_id;
5108 lhs.offset = 0;
5109 rhs.type = ADDRESSOF;
5110 rhs.var = escaped_id;
5111 rhs.offset = 0;
5112 process_constraint (new_constraint (lhs, rhs));
5114 /* Create the CALLUSED variable, used to represent the set of call-used
5115 memory. */
5116 callused_tree = create_tmp_var_raw (void_type_node, "CALLUSED");
5117 var_callused = new_var_info (callused_tree, callused_id, "CALLUSED");
5118 insert_vi_for_tree (callused_tree, var_callused);
5119 var_callused->is_artificial_var = 1;
5120 var_callused->offset = 0;
5121 var_callused->size = ~0;
5122 var_callused->fullsize = ~0;
5123 var_callused->is_special_var = 0;
5124 VEC_safe_push (varinfo_t, heap, varmap, var_callused);
5126 /* CALLUSED = *CALLUSED, because call-used is may-deref'd at calls, etc. */
5127 lhs.type = SCALAR;
5128 lhs.var = callused_id;
5129 lhs.offset = 0;
5130 rhs.type = DEREF;
5131 rhs.var = callused_id;
5132 rhs.offset = 0;
5133 process_constraint (new_constraint (lhs, rhs));
5135 /* Create the INTEGER variable, used to represent that a variable points
5136 to an INTEGER. */
5137 integer_tree = create_tmp_var_raw (void_type_node, "INTEGER");
5138 var_integer = new_var_info (integer_tree, integer_id, "INTEGER");
5139 insert_vi_for_tree (integer_tree, var_integer);
5140 var_integer->is_artificial_var = 1;
5141 var_integer->size = ~0;
5142 var_integer->fullsize = ~0;
5143 var_integer->offset = 0;
5144 var_integer->next = NULL;
5145 var_integer->is_special_var = 1;
5146 VEC_safe_push (varinfo_t, heap, varmap, var_integer);
5148 /* INTEGER = ANYTHING, because we don't know where a dereference of
5149 a random integer will point to. */
5150 lhs.type = SCALAR;
5151 lhs.var = integer_id;
5152 lhs.offset = 0;
5153 rhs.type = ADDRESSOF;
5154 rhs.var = anything_id;
5155 rhs.offset = 0;
5156 process_constraint (new_constraint (lhs, rhs));
5158 /* *ESCAPED = &ESCAPED. This is true because we have to assume
5159 everything pointed to by escaped can also point to escaped. */
5160 lhs.type = DEREF;
5161 lhs.var = escaped_id;
5162 lhs.offset = 0;
5163 rhs.type = ADDRESSOF;
5164 rhs.var = escaped_id;
5165 rhs.offset = 0;
5166 process_constraint (new_constraint (lhs, rhs));
5168 /* *ESCAPED = &NONLOCAL. This is true because we have to assume
5169 everything pointed to by escaped can also point to nonlocal. */
5170 lhs.type = DEREF;
5171 lhs.var = escaped_id;
5172 lhs.offset = 0;
5173 rhs.type = ADDRESSOF;
5174 rhs.var = nonlocal_id;
5175 rhs.offset = 0;
5176 process_constraint (new_constraint (lhs, rhs));
5179 /* Initialize things necessary to perform PTA */
5181 static void
5182 init_alias_vars (void)
5184 bitmap_obstack_initialize (&pta_obstack);
5185 bitmap_obstack_initialize (&oldpta_obstack);
5186 bitmap_obstack_initialize (&predbitmap_obstack);
5188 constraint_pool = create_alloc_pool ("Constraint pool",
5189 sizeof (struct constraint), 30);
5190 variable_info_pool = create_alloc_pool ("Variable info pool",
5191 sizeof (struct variable_info), 30);
5192 constraints = VEC_alloc (constraint_t, heap, 8);
5193 varmap = VEC_alloc (varinfo_t, heap, 8);
5194 vi_for_tree = pointer_map_create ();
5196 memset (&stats, 0, sizeof (stats));
5197 shared_bitmap_table = htab_create (511, shared_bitmap_hash,
5198 shared_bitmap_eq, free);
5199 init_base_vars ();
5202 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
5203 predecessor edges. */
5205 static void
5206 remove_preds_and_fake_succs (constraint_graph_t graph)
5208 unsigned int i;
5210 /* Clear the implicit ref and address nodes from the successor
5211 lists. */
5212 for (i = 0; i < FIRST_REF_NODE; i++)
5214 if (graph->succs[i])
5215 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
5216 FIRST_REF_NODE * 2);
5219 /* Free the successor list for the non-ref nodes. */
5220 for (i = FIRST_REF_NODE; i < graph->size; i++)
5222 if (graph->succs[i])
5223 BITMAP_FREE (graph->succs[i]);
5226 /* Now reallocate the size of the successor list as, and blow away
5227 the predecessor bitmaps. */
5228 graph->size = VEC_length (varinfo_t, varmap);
5229 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
5231 free (graph->implicit_preds);
5232 graph->implicit_preds = NULL;
5233 free (graph->preds);
5234 graph->preds = NULL;
5235 bitmap_obstack_release (&predbitmap_obstack);
5238 /* Compute the set of variables we can't TBAA prune. */
5240 static void
5241 compute_tbaa_pruning (void)
5243 unsigned int size = VEC_length (varinfo_t, varmap);
5244 unsigned int i;
5245 bool any;
5247 changed_count = 0;
5248 changed = sbitmap_alloc (size);
5249 sbitmap_zero (changed);
5251 /* Mark all initial no_tbaa_pruning nodes as changed. */
5252 any = false;
5253 for (i = 0; i < size; ++i)
5255 varinfo_t ivi = get_varinfo (i);
5257 if (find (i) == i && ivi->no_tbaa_pruning)
5259 any = true;
5260 if ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
5261 || VEC_length (constraint_t, graph->complex[i]) > 0)
5263 SET_BIT (changed, i);
5264 ++changed_count;
5269 while (changed_count > 0)
5271 struct topo_info *ti = init_topo_info ();
5272 ++stats.iterations;
5274 compute_topo_order (graph, ti);
5276 while (VEC_length (unsigned, ti->topo_order) != 0)
5278 bitmap_iterator bi;
5280 i = VEC_pop (unsigned, ti->topo_order);
5282 /* If this variable is not a representative, skip it. */
5283 if (find (i) != i)
5284 continue;
5286 /* If the node has changed, we need to process the complex
5287 constraints and outgoing edges again. */
5288 if (TEST_BIT (changed, i))
5290 unsigned int j;
5291 constraint_t c;
5292 VEC(constraint_t,heap) *complex = graph->complex[i];
5294 RESET_BIT (changed, i);
5295 --changed_count;
5297 /* Process the complex copy constraints. */
5298 for (j = 0; VEC_iterate (constraint_t, complex, j, c); ++j)
5300 if (c->lhs.type == SCALAR && c->rhs.type == SCALAR)
5302 varinfo_t lhsvi = get_varinfo (find (c->lhs.var));
5304 if (!lhsvi->no_tbaa_pruning)
5306 lhsvi->no_tbaa_pruning = true;
5307 if (!TEST_BIT (changed, lhsvi->id))
5309 SET_BIT (changed, lhsvi->id);
5310 ++changed_count;
5316 /* Propagate to all successors. */
5317 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
5319 unsigned int to = find (j);
5320 varinfo_t tovi = get_varinfo (to);
5322 /* Don't propagate to ourselves. */
5323 if (to == i)
5324 continue;
5326 if (!tovi->no_tbaa_pruning)
5328 tovi->no_tbaa_pruning = true;
5329 if (!TEST_BIT (changed, to))
5331 SET_BIT (changed, to);
5332 ++changed_count;
5339 free_topo_info (ti);
5342 sbitmap_free (changed);
5344 if (any)
5346 for (i = 0; i < size; ++i)
5348 varinfo_t ivi = get_varinfo (i);
5349 varinfo_t ivip = get_varinfo (find (i));
5351 if (ivip->no_tbaa_pruning)
5353 tree var = ivi->decl;
5355 if (TREE_CODE (var) == SSA_NAME)
5356 var = SSA_NAME_VAR (var);
5358 if (POINTER_TYPE_P (TREE_TYPE (var)))
5360 DECL_NO_TBAA_P (var) = 1;
5362 /* Tell the RTL layer that this pointer can alias
5363 anything. */
5364 DECL_POINTER_ALIAS_SET (var) = 0;
5371 /* Create points-to sets for the current function. See the comments
5372 at the start of the file for an algorithmic overview. */
5374 void
5375 compute_points_to_sets (void)
5377 struct scc_info *si;
5378 basic_block bb;
5380 timevar_push (TV_TREE_PTA);
5382 init_alias_vars ();
5383 init_alias_heapvars ();
5385 intra_create_variable_infos ();
5387 /* Now walk all statements and derive aliases. */
5388 FOR_EACH_BB (bb)
5390 block_stmt_iterator bsi;
5391 tree phi;
5393 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
5394 if (is_gimple_reg (PHI_RESULT (phi)))
5395 find_func_aliases (phi);
5397 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
5399 tree stmt = bsi_stmt (bsi);
5401 find_func_aliases (stmt);
5403 /* The information in CHANGE_DYNAMIC_TYPE_EXPR nodes has now
5404 been captured, and we can remove them. */
5405 if (TREE_CODE (stmt) == CHANGE_DYNAMIC_TYPE_EXPR)
5406 bsi_remove (&bsi, true);
5407 else
5408 bsi_next (&bsi);
5413 if (dump_file)
5415 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
5416 dump_constraints (dump_file);
5419 if (dump_file)
5420 fprintf (dump_file,
5421 "\nCollapsing static cycles and doing variable "
5422 "substitution\n");
5424 init_graph (VEC_length (varinfo_t, varmap) * 2);
5426 if (dump_file)
5427 fprintf (dump_file, "Building predecessor graph\n");
5428 build_pred_graph ();
5430 if (dump_file)
5431 fprintf (dump_file, "Detecting pointer and location "
5432 "equivalences\n");
5433 si = perform_var_substitution (graph);
5435 if (dump_file)
5436 fprintf (dump_file, "Rewriting constraints and unifying "
5437 "variables\n");
5438 rewrite_constraints (graph, si);
5439 free_var_substitution_info (si);
5441 build_succ_graph ();
5442 move_complex_constraints (graph);
5444 if (dump_file)
5445 fprintf (dump_file, "Uniting pointer but not location equivalent "
5446 "variables\n");
5447 unite_pointer_equivalences (graph);
5449 if (dump_file)
5450 fprintf (dump_file, "Finding indirect cycles\n");
5451 find_indirect_cycles (graph);
5453 /* Implicit nodes and predecessors are no longer necessary at this
5454 point. */
5455 remove_preds_and_fake_succs (graph);
5457 if (dump_file)
5458 fprintf (dump_file, "Solving graph\n");
5460 solve_graph (graph);
5462 compute_tbaa_pruning ();
5464 if (dump_file)
5465 dump_sa_points_to_info (dump_file);
5467 have_alias_info = true;
5469 timevar_pop (TV_TREE_PTA);
5473 /* Delete created points-to sets. */
5475 void
5476 delete_points_to_sets (void)
5478 unsigned int i;
5480 htab_delete (shared_bitmap_table);
5481 if (dump_file && (dump_flags & TDF_STATS))
5482 fprintf (dump_file, "Points to sets created:%d\n",
5483 stats.points_to_sets_created);
5485 pointer_map_destroy (vi_for_tree);
5486 bitmap_obstack_release (&pta_obstack);
5487 VEC_free (constraint_t, heap, constraints);
5489 for (i = 0; i < graph->size; i++)
5490 VEC_free (constraint_t, heap, graph->complex[i]);
5491 free (graph->complex);
5493 free (graph->rep);
5494 free (graph->succs);
5495 free (graph->pe);
5496 free (graph->pe_rep);
5497 free (graph->indirect_cycles);
5498 free (graph);
5500 VEC_free (varinfo_t, heap, varmap);
5501 free_alloc_pool (variable_info_pool);
5502 free_alloc_pool (constraint_pool);
5503 have_alias_info = false;
5506 /* Return true if we should execute IPA PTA. */
5507 static bool
5508 gate_ipa_pta (void)
5510 return (flag_unit_at_a_time != 0
5511 && flag_ipa_pta
5512 /* Don't bother doing anything if the program has errors. */
5513 && !(errorcount || sorrycount));
5516 /* Execute the driver for IPA PTA. */
5517 static unsigned int
5518 ipa_pta_execute (void)
5520 struct cgraph_node *node;
5521 struct scc_info *si;
5523 in_ipa_mode = 1;
5524 init_alias_heapvars ();
5525 init_alias_vars ();
5527 for (node = cgraph_nodes; node; node = node->next)
5529 if (!node->analyzed || cgraph_is_master_clone (node))
5531 unsigned int varid;
5533 varid = create_function_info_for (node->decl,
5534 cgraph_node_name (node));
5535 if (node->local.externally_visible)
5537 varinfo_t fi = get_varinfo (varid);
5538 for (; fi; fi = fi->next)
5539 make_constraint_from (fi, anything_id);
5543 for (node = cgraph_nodes; node; node = node->next)
5545 if (node->analyzed && cgraph_is_master_clone (node))
5547 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
5548 basic_block bb;
5549 tree old_func_decl = current_function_decl;
5550 if (dump_file)
5551 fprintf (dump_file,
5552 "Generating constraints for %s\n",
5553 cgraph_node_name (node));
5554 push_cfun (func);
5555 current_function_decl = node->decl;
5557 FOR_EACH_BB_FN (bb, func)
5559 block_stmt_iterator bsi;
5560 tree phi;
5562 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
5564 if (is_gimple_reg (PHI_RESULT (phi)))
5566 find_func_aliases (phi);
5570 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
5572 tree stmt = bsi_stmt (bsi);
5573 find_func_aliases (stmt);
5576 current_function_decl = old_func_decl;
5577 pop_cfun ();
5579 else
5581 /* Make point to anything. */
5585 if (dump_file)
5587 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
5588 dump_constraints (dump_file);
5591 if (dump_file)
5592 fprintf (dump_file,
5593 "\nCollapsing static cycles and doing variable "
5594 "substitution:\n");
5596 init_graph (VEC_length (varinfo_t, varmap) * 2);
5597 build_pred_graph ();
5598 si = perform_var_substitution (graph);
5599 rewrite_constraints (graph, si);
5600 free_var_substitution_info (si);
5602 build_succ_graph ();
5603 move_complex_constraints (graph);
5604 unite_pointer_equivalences (graph);
5605 find_indirect_cycles (graph);
5607 /* Implicit nodes and predecessors are no longer necessary at this
5608 point. */
5609 remove_preds_and_fake_succs (graph);
5611 if (dump_file)
5612 fprintf (dump_file, "\nSolving graph\n");
5614 solve_graph (graph);
5616 if (dump_file)
5617 dump_sa_points_to_info (dump_file);
5619 in_ipa_mode = 0;
5620 delete_alias_heapvars ();
5621 delete_points_to_sets ();
5622 return 0;
5625 struct simple_ipa_opt_pass pass_ipa_pta =
5628 SIMPLE_IPA_PASS,
5629 "pta", /* name */
5630 gate_ipa_pta, /* gate */
5631 ipa_pta_execute, /* execute */
5632 NULL, /* sub */
5633 NULL, /* next */
5634 0, /* static_pass_number */
5635 TV_IPA_PTA, /* tv_id */
5636 0, /* properties_required */
5637 0, /* properties_provided */
5638 0, /* properties_destroyed */
5639 0, /* todo_flags_start */
5640 TODO_update_ssa /* todo_flags_finish */
5644 /* Initialize the heapvar for statement mapping. */
5645 void
5646 init_alias_heapvars (void)
5648 if (!heapvar_for_stmt)
5649 heapvar_for_stmt = htab_create_ggc (11, tree_map_hash, tree_map_eq,
5650 NULL);
5653 void
5654 delete_alias_heapvars (void)
5656 htab_delete (heapvar_for_stmt);
5657 heapvar_for_stmt = NULL;
5661 #include "gt-tree-ssa-structalias.h"