2008-07-06 Kai Tietz <kai.tietz@onevision.com>
[official-gcc.git] / gcc / tree-ssa-structalias.c
blob0b68b84dce027b990abaa7c265816e43d0530fb6
1 /* Tree based points-to analysis
2 Copyright (C) 2005, 2006, 2007 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "ggc.h"
26 #include "obstack.h"
27 #include "bitmap.h"
28 #include "flags.h"
29 #include "rtl.h"
30 #include "tm_p.h"
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
33 #include "output.h"
34 #include "errors.h"
35 #include "diagnostic.h"
36 #include "tree.h"
37 #include "c-common.h"
38 #include "tree-flow.h"
39 #include "tree-inline.h"
40 #include "varray.h"
41 #include "c-tree.h"
42 #include "tree-gimple.h"
43 #include "hashtab.h"
44 #include "function.h"
45 #include "cgraph.h"
46 #include "tree-pass.h"
47 #include "timevar.h"
48 #include "alloc-pool.h"
49 #include "splay-tree.h"
50 #include "params.h"
51 #include "tree-ssa-structalias.h"
52 #include "cgraph.h"
53 #include "alias.h"
54 #include "pointer-set.h"
56 /* The idea behind this analyzer is to generate set constraints from the
57 program, then solve the resulting constraints in order to generate the
58 points-to sets.
60 Set constraints are a way of modeling program analysis problems that
61 involve sets. They consist of an inclusion constraint language,
62 describing the variables (each variable is a set) and operations that
63 are involved on the variables, and a set of rules that derive facts
64 from these operations. To solve a system of set constraints, you derive
65 all possible facts under the rules, which gives you the correct sets
66 as a consequence.
68 See "Efficient Field-sensitive pointer analysis for C" by "David
69 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
70 http://citeseer.ist.psu.edu/pearce04efficient.html
72 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
73 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
74 http://citeseer.ist.psu.edu/heintze01ultrafast.html
76 There are three types of real constraint expressions, DEREF,
77 ADDRESSOF, and SCALAR. Each constraint expression consists
78 of a constraint type, a variable, and an offset.
80 SCALAR is a constraint expression type used to represent x, whether
81 it appears on the LHS or the RHS of a statement.
82 DEREF is a constraint expression type used to represent *x, whether
83 it appears on the LHS or the RHS of a statement.
84 ADDRESSOF is a constraint expression used to represent &x, whether
85 it appears on the LHS or the RHS of a statement.
87 Each pointer variable in the program is assigned an integer id, and
88 each field of a structure variable is assigned an integer id as well.
90 Structure variables are linked to their list of fields through a "next
91 field" in each variable that points to the next field in offset
92 order.
93 Each variable for a structure field has
95 1. "size", that tells the size in bits of that field.
96 2. "fullsize, that tells the size in bits of the entire structure.
97 3. "offset", that tells the offset in bits from the beginning of the
98 structure to this field.
100 Thus,
101 struct f
103 int a;
104 int b;
105 } foo;
106 int *bar;
108 looks like
110 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
111 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
112 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
115 In order to solve the system of set constraints, the following is
116 done:
118 1. Each constraint variable x has a solution set associated with it,
119 Sol(x).
121 2. Constraints are separated into direct, copy, and complex.
122 Direct constraints are ADDRESSOF constraints that require no extra
123 processing, such as P = &Q
124 Copy constraints are those of the form P = Q.
125 Complex constraints are all the constraints involving dereferences
126 and offsets (including offsetted copies).
128 3. All direct constraints of the form P = &Q are processed, such
129 that Q is added to Sol(P)
131 4. All complex constraints for a given constraint variable are stored in a
132 linked list attached to that variable's node.
134 5. A directed graph is built out of the copy constraints. Each
135 constraint variable is a node in the graph, and an edge from
136 Q to P is added for each copy constraint of the form P = Q
138 6. The graph is then walked, and solution sets are
139 propagated along the copy edges, such that an edge from Q to P
140 causes Sol(P) <- Sol(P) union Sol(Q).
142 7. As we visit each node, all complex constraints associated with
143 that node are processed by adding appropriate copy edges to the graph, or the
144 appropriate variables to the solution set.
146 8. The process of walking the graph is iterated until no solution
147 sets change.
149 Prior to walking the graph in steps 6 and 7, We perform static
150 cycle elimination on the constraint graph, as well
151 as off-line variable substitution.
153 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
154 on and turned into anything), but isn't. You can just see what offset
155 inside the pointed-to struct it's going to access.
157 TODO: Constant bounded arrays can be handled as if they were structs of the
158 same number of elements.
160 TODO: Modeling heap and incoming pointers becomes much better if we
161 add fields to them as we discover them, which we could do.
163 TODO: We could handle unions, but to be honest, it's probably not
164 worth the pain or slowdown. */
166 static GTY ((if_marked ("tree_map_marked_p"), param_is (struct tree_map)))
167 htab_t heapvar_for_stmt;
169 static bool use_field_sensitive = true;
170 static int in_ipa_mode = 0;
172 /* Used for predecessor bitmaps. */
173 static bitmap_obstack predbitmap_obstack;
175 /* Used for points-to sets. */
176 static bitmap_obstack pta_obstack;
178 /* Used for oldsolution members of variables. */
179 static bitmap_obstack oldpta_obstack;
181 /* Used for per-solver-iteration bitmaps. */
182 static bitmap_obstack iteration_obstack;
184 static unsigned int create_variable_info_for (tree, const char *);
185 typedef struct constraint_graph *constraint_graph_t;
186 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
188 DEF_VEC_P(constraint_t);
189 DEF_VEC_ALLOC_P(constraint_t,heap);
191 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
192 if (a) \
193 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
195 static struct constraint_stats
197 unsigned int total_vars;
198 unsigned int nonpointer_vars;
199 unsigned int unified_vars_static;
200 unsigned int unified_vars_dynamic;
201 unsigned int iterations;
202 unsigned int num_edges;
203 unsigned int num_implicit_edges;
204 unsigned int points_to_sets_created;
205 } stats;
207 struct variable_info
209 /* ID of this variable */
210 unsigned int id;
212 /* True if this is a variable created by the constraint analysis, such as
213 heap variables and constraints we had to break up. */
214 unsigned int is_artificial_var:1;
216 /* True if this is a special variable whose solution set should not be
217 changed. */
218 unsigned int is_special_var:1;
220 /* True for variables whose size is not known or variable. */
221 unsigned int is_unknown_size_var:1;
223 /* True for variables that have unions somewhere in them. */
224 unsigned int has_union:1;
226 /* True if this is a heap variable. */
227 unsigned int is_heap_var:1;
229 /* True if we may not use TBAA to prune references to this
230 variable. This is used for C++ placement new. */
231 unsigned int no_tbaa_pruning : 1;
233 /* Variable id this was collapsed to due to type unsafety. Zero if
234 this variable was not collapsed. This should be unused completely
235 after build_succ_graph, or something is broken. */
236 unsigned int collapsed_to;
238 /* A link to the variable for the next field in this structure. */
239 struct variable_info *next;
241 /* Offset of this variable, in bits, from the base variable */
242 unsigned HOST_WIDE_INT offset;
244 /* Size of the variable, in bits. */
245 unsigned HOST_WIDE_INT size;
247 /* Full size of the base variable, in bits. */
248 unsigned HOST_WIDE_INT fullsize;
250 /* Name of this variable */
251 const char *name;
253 /* Tree that this variable is associated with. */
254 tree decl;
256 /* Points-to set for this variable. */
257 bitmap solution;
259 /* Old points-to set for this variable. */
260 bitmap oldsolution;
262 typedef struct variable_info *varinfo_t;
264 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
265 static varinfo_t lookup_vi_for_tree (tree);
267 /* Pool of variable info structures. */
268 static alloc_pool variable_info_pool;
270 DEF_VEC_P(varinfo_t);
272 DEF_VEC_ALLOC_P(varinfo_t, heap);
274 /* Table of variable info structures for constraint variables.
275 Indexed directly by variable info id. */
276 static VEC(varinfo_t,heap) *varmap;
278 /* Return the varmap element N */
280 static inline varinfo_t
281 get_varinfo (unsigned int n)
283 return VEC_index (varinfo_t, varmap, n);
286 /* Return the varmap element N, following the collapsed_to link. */
288 static inline varinfo_t
289 get_varinfo_fc (unsigned int n)
291 varinfo_t v = VEC_index (varinfo_t, varmap, n);
293 if (v->collapsed_to != 0)
294 return get_varinfo (v->collapsed_to);
295 return v;
298 /* Static IDs for the special variables. */
299 enum { nothing_id = 0, anything_id = 1, readonly_id = 2,
300 escaped_id = 3, nonlocal_id = 4, callused_id = 5, integer_id = 6 };
302 /* Variable that represents the unknown pointer. */
303 static varinfo_t var_anything;
304 static tree anything_tree;
306 /* Variable that represents the NULL pointer. */
307 static varinfo_t var_nothing;
308 static tree nothing_tree;
310 /* Variable that represents read only memory. */
311 static varinfo_t var_readonly;
312 static tree readonly_tree;
314 /* Variable that represents escaped memory. */
315 static varinfo_t var_escaped;
316 static tree escaped_tree;
318 /* Variable that represents nonlocal memory. */
319 static varinfo_t var_nonlocal;
320 static tree nonlocal_tree;
322 /* Variable that represents call-used memory. */
323 static varinfo_t var_callused;
324 static tree callused_tree;
326 /* Variable that represents integers. This is used for when people do things
327 like &0->a.b. */
328 static varinfo_t var_integer;
329 static tree integer_tree;
331 /* Lookup a heap var for FROM, and return it if we find one. */
333 static tree
334 heapvar_lookup (tree from)
336 struct tree_map *h, in;
337 in.base.from = from;
339 h = (struct tree_map *) htab_find_with_hash (heapvar_for_stmt, &in,
340 htab_hash_pointer (from));
341 if (h)
342 return h->to;
343 return NULL_TREE;
346 /* Insert a mapping FROM->TO in the heap var for statement
347 hashtable. */
349 static void
350 heapvar_insert (tree from, tree to)
352 struct tree_map *h;
353 void **loc;
355 h = GGC_NEW (struct tree_map);
356 h->hash = htab_hash_pointer (from);
357 h->base.from = from;
358 h->to = to;
359 loc = htab_find_slot_with_hash (heapvar_for_stmt, h, h->hash, INSERT);
360 *(struct tree_map **) loc = h;
363 /* Return a new variable info structure consisting for a variable
364 named NAME, and using constraint graph node NODE. */
366 static varinfo_t
367 new_var_info (tree t, unsigned int id, const char *name)
369 varinfo_t ret = (varinfo_t) pool_alloc (variable_info_pool);
370 tree var;
372 ret->id = id;
373 ret->name = name;
374 ret->decl = t;
375 ret->is_artificial_var = false;
376 ret->is_heap_var = false;
377 ret->is_special_var = false;
378 ret->is_unknown_size_var = false;
379 ret->has_union = false;
380 var = t;
381 if (TREE_CODE (var) == SSA_NAME)
382 var = SSA_NAME_VAR (var);
383 ret->no_tbaa_pruning = (DECL_P (var)
384 && POINTER_TYPE_P (TREE_TYPE (var))
385 && DECL_NO_TBAA_P (var));
386 ret->solution = BITMAP_ALLOC (&pta_obstack);
387 ret->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
388 ret->next = NULL;
389 ret->collapsed_to = 0;
390 return ret;
393 typedef enum {SCALAR, DEREF, ADDRESSOF} constraint_expr_type;
395 /* An expression that appears in a constraint. */
397 struct constraint_expr
399 /* Constraint type. */
400 constraint_expr_type type;
402 /* Variable we are referring to in the constraint. */
403 unsigned int var;
405 /* Offset, in bits, of this constraint from the beginning of
406 variables it ends up referring to.
408 IOW, in a deref constraint, we would deref, get the result set,
409 then add OFFSET to each member. */
410 unsigned HOST_WIDE_INT offset;
413 typedef struct constraint_expr ce_s;
414 DEF_VEC_O(ce_s);
415 DEF_VEC_ALLOC_O(ce_s, heap);
416 static void get_constraint_for_1 (tree, VEC(ce_s, heap) **, bool);
417 static void get_constraint_for (tree, VEC(ce_s, heap) **);
418 static void do_deref (VEC (ce_s, heap) **);
420 /* Our set constraints are made up of two constraint expressions, one
421 LHS, and one RHS.
423 As described in the introduction, our set constraints each represent an
424 operation between set valued variables.
426 struct constraint
428 struct constraint_expr lhs;
429 struct constraint_expr rhs;
432 /* List of constraints that we use to build the constraint graph from. */
434 static VEC(constraint_t,heap) *constraints;
435 static alloc_pool constraint_pool;
438 DEF_VEC_I(int);
439 DEF_VEC_ALLOC_I(int, heap);
441 /* The constraint graph is represented as an array of bitmaps
442 containing successor nodes. */
444 struct constraint_graph
446 /* Size of this graph, which may be different than the number of
447 nodes in the variable map. */
448 unsigned int size;
450 /* Explicit successors of each node. */
451 bitmap *succs;
453 /* Implicit predecessors of each node (Used for variable
454 substitution). */
455 bitmap *implicit_preds;
457 /* Explicit predecessors of each node (Used for variable substitution). */
458 bitmap *preds;
460 /* Indirect cycle representatives, or -1 if the node has no indirect
461 cycles. */
462 int *indirect_cycles;
464 /* Representative node for a node. rep[a] == a unless the node has
465 been unified. */
466 unsigned int *rep;
468 /* Equivalence class representative for a label. This is used for
469 variable substitution. */
470 int *eq_rep;
472 /* Pointer equivalence label for a node. All nodes with the same
473 pointer equivalence label can be unified together at some point
474 (either during constraint optimization or after the constraint
475 graph is built). */
476 unsigned int *pe;
478 /* Pointer equivalence representative for a label. This is used to
479 handle nodes that are pointer equivalent but not location
480 equivalent. We can unite these once the addressof constraints
481 are transformed into initial points-to sets. */
482 int *pe_rep;
484 /* Pointer equivalence label for each node, used during variable
485 substitution. */
486 unsigned int *pointer_label;
488 /* Location equivalence label for each node, used during location
489 equivalence finding. */
490 unsigned int *loc_label;
492 /* Pointed-by set for each node, used during location equivalence
493 finding. This is pointed-by rather than pointed-to, because it
494 is constructed using the predecessor graph. */
495 bitmap *pointed_by;
497 /* Points to sets for pointer equivalence. This is *not* the actual
498 points-to sets for nodes. */
499 bitmap *points_to;
501 /* Bitmap of nodes where the bit is set if the node is a direct
502 node. Used for variable substitution. */
503 sbitmap direct_nodes;
505 /* Bitmap of nodes where the bit is set if the node is address
506 taken. Used for variable substitution. */
507 bitmap address_taken;
509 /* True if points_to bitmap for this node is stored in the hash
510 table. */
511 sbitmap pt_used;
513 /* Number of incoming edges remaining to be processed by pointer
514 equivalence.
515 Used for variable substitution. */
516 unsigned int *number_incoming;
519 /* Vector of complex constraints for each graph node. Complex
520 constraints are those involving dereferences or offsets that are
521 not 0. */
522 VEC(constraint_t,heap) **complex;
525 static constraint_graph_t graph;
527 /* During variable substitution and the offline version of indirect
528 cycle finding, we create nodes to represent dereferences and
529 address taken constraints. These represent where these start and
530 end. */
531 #define FIRST_REF_NODE (VEC_length (varinfo_t, varmap))
532 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
534 /* Return the representative node for NODE, if NODE has been unioned
535 with another NODE.
536 This function performs path compression along the way to finding
537 the representative. */
539 static unsigned int
540 find (unsigned int node)
542 gcc_assert (node < graph->size);
543 if (graph->rep[node] != node)
544 return graph->rep[node] = find (graph->rep[node]);
545 return node;
548 /* Union the TO and FROM nodes to the TO nodes.
549 Note that at some point in the future, we may want to do
550 union-by-rank, in which case we are going to have to return the
551 node we unified to. */
553 static bool
554 unite (unsigned int to, unsigned int from)
556 gcc_assert (to < graph->size && from < graph->size);
557 if (to != from && graph->rep[from] != to)
559 graph->rep[from] = to;
560 return true;
562 return false;
565 /* Create a new constraint consisting of LHS and RHS expressions. */
567 static constraint_t
568 new_constraint (const struct constraint_expr lhs,
569 const struct constraint_expr rhs)
571 constraint_t ret = (constraint_t) pool_alloc (constraint_pool);
572 ret->lhs = lhs;
573 ret->rhs = rhs;
574 return ret;
577 /* Print out constraint C to FILE. */
579 void
580 dump_constraint (FILE *file, constraint_t c)
582 if (c->lhs.type == ADDRESSOF)
583 fprintf (file, "&");
584 else if (c->lhs.type == DEREF)
585 fprintf (file, "*");
586 fprintf (file, "%s", get_varinfo_fc (c->lhs.var)->name);
587 if (c->lhs.offset != 0)
588 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
589 fprintf (file, " = ");
590 if (c->rhs.type == ADDRESSOF)
591 fprintf (file, "&");
592 else if (c->rhs.type == DEREF)
593 fprintf (file, "*");
594 fprintf (file, "%s", get_varinfo_fc (c->rhs.var)->name);
595 if (c->rhs.offset != 0)
596 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
597 fprintf (file, "\n");
600 /* Print out constraint C to stderr. */
602 void
603 debug_constraint (constraint_t c)
605 dump_constraint (stderr, c);
608 /* Print out all constraints to FILE */
610 void
611 dump_constraints (FILE *file)
613 int i;
614 constraint_t c;
615 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
616 dump_constraint (file, c);
619 /* Print out all constraints to stderr. */
621 void
622 debug_constraints (void)
624 dump_constraints (stderr);
627 /* SOLVER FUNCTIONS
629 The solver is a simple worklist solver, that works on the following
630 algorithm:
632 sbitmap changed_nodes = all zeroes;
633 changed_count = 0;
634 For each node that is not already collapsed:
635 changed_count++;
636 set bit in changed nodes
638 while (changed_count > 0)
640 compute topological ordering for constraint graph
642 find and collapse cycles in the constraint graph (updating
643 changed if necessary)
645 for each node (n) in the graph in topological order:
646 changed_count--;
648 Process each complex constraint associated with the node,
649 updating changed if necessary.
651 For each outgoing edge from n, propagate the solution from n to
652 the destination of the edge, updating changed as necessary.
654 } */
656 /* Return true if two constraint expressions A and B are equal. */
658 static bool
659 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
661 return a.type == b.type && a.var == b.var && a.offset == b.offset;
664 /* Return true if constraint expression A is less than constraint expression
665 B. This is just arbitrary, but consistent, in order to give them an
666 ordering. */
668 static bool
669 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
671 if (a.type == b.type)
673 if (a.var == b.var)
674 return a.offset < b.offset;
675 else
676 return a.var < b.var;
678 else
679 return a.type < b.type;
682 /* Return true if constraint A is less than constraint B. This is just
683 arbitrary, but consistent, in order to give them an ordering. */
685 static bool
686 constraint_less (const constraint_t a, const constraint_t b)
688 if (constraint_expr_less (a->lhs, b->lhs))
689 return true;
690 else if (constraint_expr_less (b->lhs, a->lhs))
691 return false;
692 else
693 return constraint_expr_less (a->rhs, b->rhs);
696 /* Return true if two constraints A and B are equal. */
698 static bool
699 constraint_equal (struct constraint a, struct constraint b)
701 return constraint_expr_equal (a.lhs, b.lhs)
702 && constraint_expr_equal (a.rhs, b.rhs);
706 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
708 static constraint_t
709 constraint_vec_find (VEC(constraint_t,heap) *vec,
710 struct constraint lookfor)
712 unsigned int place;
713 constraint_t found;
715 if (vec == NULL)
716 return NULL;
718 place = VEC_lower_bound (constraint_t, vec, &lookfor, constraint_less);
719 if (place >= VEC_length (constraint_t, vec))
720 return NULL;
721 found = VEC_index (constraint_t, vec, place);
722 if (!constraint_equal (*found, lookfor))
723 return NULL;
724 return found;
727 /* Union two constraint vectors, TO and FROM. Put the result in TO. */
729 static void
730 constraint_set_union (VEC(constraint_t,heap) **to,
731 VEC(constraint_t,heap) **from)
733 int i;
734 constraint_t c;
736 for (i = 0; VEC_iterate (constraint_t, *from, i, c); i++)
738 if (constraint_vec_find (*to, *c) == NULL)
740 unsigned int place = VEC_lower_bound (constraint_t, *to, c,
741 constraint_less);
742 VEC_safe_insert (constraint_t, heap, *to, place, c);
747 /* Take a solution set SET, add OFFSET to each member of the set, and
748 overwrite SET with the result when done. */
750 static void
751 solution_set_add (bitmap set, unsigned HOST_WIDE_INT offset)
753 bitmap result = BITMAP_ALLOC (&iteration_obstack);
754 unsigned int i;
755 bitmap_iterator bi;
757 EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
759 /* If this is a properly sized variable, only add offset if it's
760 less than end. Otherwise, it is globbed to a single
761 variable. */
763 if ((get_varinfo (i)->offset + offset) < get_varinfo (i)->fullsize)
765 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (i)->offset + offset;
766 varinfo_t v = first_vi_for_offset (get_varinfo (i), fieldoffset);
767 if (!v)
768 continue;
769 bitmap_set_bit (result, v->id);
771 else if (get_varinfo (i)->is_artificial_var
772 || get_varinfo (i)->has_union
773 || get_varinfo (i)->is_unknown_size_var)
775 bitmap_set_bit (result, i);
779 bitmap_copy (set, result);
780 BITMAP_FREE (result);
783 /* Union solution sets TO and FROM, and add INC to each member of FROM in the
784 process. */
786 static bool
787 set_union_with_increment (bitmap to, bitmap from, unsigned HOST_WIDE_INT inc)
789 if (inc == 0)
790 return bitmap_ior_into (to, from);
791 else
793 bitmap tmp;
794 bool res;
796 tmp = BITMAP_ALLOC (&iteration_obstack);
797 bitmap_copy (tmp, from);
798 solution_set_add (tmp, inc);
799 res = bitmap_ior_into (to, tmp);
800 BITMAP_FREE (tmp);
801 return res;
805 /* Insert constraint C into the list of complex constraints for graph
806 node VAR. */
808 static void
809 insert_into_complex (constraint_graph_t graph,
810 unsigned int var, constraint_t c)
812 VEC (constraint_t, heap) *complex = graph->complex[var];
813 unsigned int place = VEC_lower_bound (constraint_t, complex, c,
814 constraint_less);
816 /* Only insert constraints that do not already exist. */
817 if (place >= VEC_length (constraint_t, complex)
818 || !constraint_equal (*c, *VEC_index (constraint_t, complex, place)))
819 VEC_safe_insert (constraint_t, heap, graph->complex[var], place, c);
823 /* Condense two variable nodes into a single variable node, by moving
824 all associated info from SRC to TO. */
826 static void
827 merge_node_constraints (constraint_graph_t graph, unsigned int to,
828 unsigned int from)
830 unsigned int i;
831 constraint_t c;
833 gcc_assert (find (from) == to);
835 /* Move all complex constraints from src node into to node */
836 for (i = 0; VEC_iterate (constraint_t, graph->complex[from], i, c); i++)
838 /* In complex constraints for node src, we may have either
839 a = *src, and *src = a, or an offseted constraint which are
840 always added to the rhs node's constraints. */
842 if (c->rhs.type == DEREF)
843 c->rhs.var = to;
844 else if (c->lhs.type == DEREF)
845 c->lhs.var = to;
846 else
847 c->rhs.var = to;
849 constraint_set_union (&graph->complex[to], &graph->complex[from]);
850 VEC_free (constraint_t, heap, graph->complex[from]);
851 graph->complex[from] = NULL;
855 /* Remove edges involving NODE from GRAPH. */
857 static void
858 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
860 if (graph->succs[node])
861 BITMAP_FREE (graph->succs[node]);
864 /* Merge GRAPH nodes FROM and TO into node TO. */
866 static void
867 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
868 unsigned int from)
870 if (graph->indirect_cycles[from] != -1)
872 /* If we have indirect cycles with the from node, and we have
873 none on the to node, the to node has indirect cycles from the
874 from node now that they are unified.
875 If indirect cycles exist on both, unify the nodes that they
876 are in a cycle with, since we know they are in a cycle with
877 each other. */
878 if (graph->indirect_cycles[to] == -1)
879 graph->indirect_cycles[to] = graph->indirect_cycles[from];
882 /* Merge all the successor edges. */
883 if (graph->succs[from])
885 if (!graph->succs[to])
886 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
887 bitmap_ior_into (graph->succs[to],
888 graph->succs[from]);
891 clear_edges_for_node (graph, from);
895 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
896 it doesn't exist in the graph already. */
898 static void
899 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
900 unsigned int from)
902 if (to == from)
903 return;
905 if (!graph->implicit_preds[to])
906 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
908 if (bitmap_set_bit (graph->implicit_preds[to], from))
909 stats.num_implicit_edges++;
912 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
913 it doesn't exist in the graph already.
914 Return false if the edge already existed, true otherwise. */
916 static void
917 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
918 unsigned int from)
920 if (!graph->preds[to])
921 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
922 bitmap_set_bit (graph->preds[to], from);
925 /* Add a graph edge to GRAPH, going from FROM to TO if
926 it doesn't exist in the graph already.
927 Return false if the edge already existed, true otherwise. */
929 static bool
930 add_graph_edge (constraint_graph_t graph, unsigned int to,
931 unsigned int from)
933 if (to == from)
935 return false;
937 else
939 bool r = false;
941 if (!graph->succs[from])
942 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
943 if (bitmap_set_bit (graph->succs[from], to))
945 r = true;
946 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
947 stats.num_edges++;
949 return r;
954 /* Return true if {DEST.SRC} is an existing graph edge in GRAPH. */
956 static bool
957 valid_graph_edge (constraint_graph_t graph, unsigned int src,
958 unsigned int dest)
960 return (graph->succs[dest]
961 && bitmap_bit_p (graph->succs[dest], src));
964 /* Initialize the constraint graph structure to contain SIZE nodes. */
966 static void
967 init_graph (unsigned int size)
969 unsigned int j;
971 graph = XCNEW (struct constraint_graph);
972 graph->size = size;
973 graph->succs = XCNEWVEC (bitmap, graph->size);
974 graph->indirect_cycles = XNEWVEC (int, graph->size);
975 graph->rep = XNEWVEC (unsigned int, graph->size);
976 graph->complex = XCNEWVEC (VEC(constraint_t, heap) *, size);
977 graph->pe = XCNEWVEC (unsigned int, graph->size);
978 graph->pe_rep = XNEWVEC (int, graph->size);
980 for (j = 0; j < graph->size; j++)
982 graph->rep[j] = j;
983 graph->pe_rep[j] = -1;
984 graph->indirect_cycles[j] = -1;
988 /* Build the constraint graph, adding only predecessor edges right now. */
990 static void
991 build_pred_graph (void)
993 int i;
994 constraint_t c;
995 unsigned int j;
997 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
998 graph->preds = XCNEWVEC (bitmap, graph->size);
999 graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
1000 graph->loc_label = XCNEWVEC (unsigned int, graph->size);
1001 graph->pointed_by = XCNEWVEC (bitmap, graph->size);
1002 graph->points_to = XCNEWVEC (bitmap, graph->size);
1003 graph->eq_rep = XNEWVEC (int, graph->size);
1004 graph->direct_nodes = sbitmap_alloc (graph->size);
1005 graph->pt_used = sbitmap_alloc (graph->size);
1006 graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1007 graph->number_incoming = XCNEWVEC (unsigned int, graph->size);
1008 sbitmap_zero (graph->direct_nodes);
1009 sbitmap_zero (graph->pt_used);
1011 for (j = 0; j < FIRST_REF_NODE; j++)
1013 if (!get_varinfo (j)->is_special_var)
1014 SET_BIT (graph->direct_nodes, j);
1017 for (j = 0; j < graph->size; j++)
1018 graph->eq_rep[j] = -1;
1020 for (j = 0; j < VEC_length (varinfo_t, varmap); j++)
1021 graph->indirect_cycles[j] = -1;
1023 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1025 struct constraint_expr lhs = c->lhs;
1026 struct constraint_expr rhs = c->rhs;
1027 unsigned int lhsvar = get_varinfo_fc (lhs.var)->id;
1028 unsigned int rhsvar = get_varinfo_fc (rhs.var)->id;
1030 if (lhs.type == DEREF)
1032 /* *x = y. */
1033 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1034 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1036 else if (rhs.type == DEREF)
1038 /* x = *y */
1039 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1040 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1041 else
1042 RESET_BIT (graph->direct_nodes, lhsvar);
1044 else if (rhs.type == ADDRESSOF)
1046 /* x = &y */
1047 if (graph->points_to[lhsvar] == NULL)
1048 graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1049 bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1051 if (graph->pointed_by[rhsvar] == NULL)
1052 graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1053 bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1055 /* Implicitly, *x = y */
1056 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1058 RESET_BIT (graph->direct_nodes, rhsvar);
1059 bitmap_set_bit (graph->address_taken, rhsvar);
1061 else if (lhsvar > anything_id
1062 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1064 /* x = y */
1065 add_pred_graph_edge (graph, lhsvar, rhsvar);
1066 /* Implicitly, *x = *y */
1067 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1068 FIRST_REF_NODE + rhsvar);
1070 else if (lhs.offset != 0 || rhs.offset != 0)
1072 if (rhs.offset != 0)
1073 RESET_BIT (graph->direct_nodes, lhs.var);
1074 else if (lhs.offset != 0)
1075 RESET_BIT (graph->direct_nodes, rhs.var);
1080 /* Build the constraint graph, adding successor edges. */
1082 static void
1083 build_succ_graph (void)
1085 int i;
1086 constraint_t c;
1088 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1090 struct constraint_expr lhs;
1091 struct constraint_expr rhs;
1092 unsigned int lhsvar;
1093 unsigned int rhsvar;
1095 if (!c)
1096 continue;
1098 lhs = c->lhs;
1099 rhs = c->rhs;
1100 lhsvar = find (get_varinfo_fc (lhs.var)->id);
1101 rhsvar = find (get_varinfo_fc (rhs.var)->id);
1103 if (lhs.type == DEREF)
1105 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1106 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1108 else if (rhs.type == DEREF)
1110 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1111 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1113 else if (rhs.type == ADDRESSOF)
1115 /* x = &y */
1116 gcc_assert (find (get_varinfo_fc (rhs.var)->id)
1117 == get_varinfo_fc (rhs.var)->id);
1118 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1120 else if (lhsvar > anything_id
1121 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1123 add_graph_edge (graph, lhsvar, rhsvar);
1129 /* Changed variables on the last iteration. */
1130 static unsigned int changed_count;
1131 static sbitmap changed;
1133 DEF_VEC_I(unsigned);
1134 DEF_VEC_ALLOC_I(unsigned,heap);
1137 /* Strongly Connected Component visitation info. */
1139 struct scc_info
1141 sbitmap visited;
1142 sbitmap deleted;
1143 unsigned int *dfs;
1144 unsigned int *node_mapping;
1145 int current_index;
1146 VEC(unsigned,heap) *scc_stack;
1150 /* Recursive routine to find strongly connected components in GRAPH.
1151 SI is the SCC info to store the information in, and N is the id of current
1152 graph node we are processing.
1154 This is Tarjan's strongly connected component finding algorithm, as
1155 modified by Nuutila to keep only non-root nodes on the stack.
1156 The algorithm can be found in "On finding the strongly connected
1157 connected components in a directed graph" by Esko Nuutila and Eljas
1158 Soisalon-Soininen, in Information Processing Letters volume 49,
1159 number 1, pages 9-14. */
1161 static void
1162 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1164 unsigned int i;
1165 bitmap_iterator bi;
1166 unsigned int my_dfs;
1168 SET_BIT (si->visited, n);
1169 si->dfs[n] = si->current_index ++;
1170 my_dfs = si->dfs[n];
1172 /* Visit all the successors. */
1173 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1175 unsigned int w;
1177 if (i > LAST_REF_NODE)
1178 break;
1180 w = find (i);
1181 if (TEST_BIT (si->deleted, w))
1182 continue;
1184 if (!TEST_BIT (si->visited, w))
1185 scc_visit (graph, si, w);
1187 unsigned int t = find (w);
1188 unsigned int nnode = find (n);
1189 gcc_assert (nnode == n);
1191 if (si->dfs[t] < si->dfs[nnode])
1192 si->dfs[n] = si->dfs[t];
1196 /* See if any components have been identified. */
1197 if (si->dfs[n] == my_dfs)
1199 if (VEC_length (unsigned, si->scc_stack) > 0
1200 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1202 bitmap scc = BITMAP_ALLOC (NULL);
1203 bool have_ref_node = n >= FIRST_REF_NODE;
1204 unsigned int lowest_node;
1205 bitmap_iterator bi;
1207 bitmap_set_bit (scc, n);
1209 while (VEC_length (unsigned, si->scc_stack) != 0
1210 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1212 unsigned int w = VEC_pop (unsigned, si->scc_stack);
1214 bitmap_set_bit (scc, w);
1215 if (w >= FIRST_REF_NODE)
1216 have_ref_node = true;
1219 lowest_node = bitmap_first_set_bit (scc);
1220 gcc_assert (lowest_node < FIRST_REF_NODE);
1222 /* Collapse the SCC nodes into a single node, and mark the
1223 indirect cycles. */
1224 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1226 if (i < FIRST_REF_NODE)
1228 if (unite (lowest_node, i))
1229 unify_nodes (graph, lowest_node, i, false);
1231 else
1233 unite (lowest_node, i);
1234 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1238 SET_BIT (si->deleted, n);
1240 else
1241 VEC_safe_push (unsigned, heap, si->scc_stack, n);
1244 /* Unify node FROM into node TO, updating the changed count if
1245 necessary when UPDATE_CHANGED is true. */
1247 static void
1248 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1249 bool update_changed)
1252 gcc_assert (to != from && find (to) == to);
1253 if (dump_file && (dump_flags & TDF_DETAILS))
1254 fprintf (dump_file, "Unifying %s to %s\n",
1255 get_varinfo (from)->name,
1256 get_varinfo (to)->name);
1258 if (update_changed)
1259 stats.unified_vars_dynamic++;
1260 else
1261 stats.unified_vars_static++;
1263 merge_graph_nodes (graph, to, from);
1264 merge_node_constraints (graph, to, from);
1266 if (get_varinfo (from)->no_tbaa_pruning)
1267 get_varinfo (to)->no_tbaa_pruning = true;
1269 /* Mark TO as changed if FROM was changed. If TO was already marked
1270 as changed, decrease the changed count. */
1272 if (update_changed && TEST_BIT (changed, from))
1274 RESET_BIT (changed, from);
1275 if (!TEST_BIT (changed, to))
1276 SET_BIT (changed, to);
1277 else
1279 gcc_assert (changed_count > 0);
1280 changed_count--;
1283 if (get_varinfo (from)->solution)
1285 /* If the solution changes because of the merging, we need to mark
1286 the variable as changed. */
1287 if (bitmap_ior_into (get_varinfo (to)->solution,
1288 get_varinfo (from)->solution))
1290 if (update_changed && !TEST_BIT (changed, to))
1292 SET_BIT (changed, to);
1293 changed_count++;
1297 BITMAP_FREE (get_varinfo (from)->solution);
1298 BITMAP_FREE (get_varinfo (from)->oldsolution);
1300 if (stats.iterations > 0)
1302 BITMAP_FREE (get_varinfo (to)->oldsolution);
1303 get_varinfo (to)->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
1306 if (valid_graph_edge (graph, to, to))
1308 if (graph->succs[to])
1309 bitmap_clear_bit (graph->succs[to], to);
1313 /* Information needed to compute the topological ordering of a graph. */
1315 struct topo_info
1317 /* sbitmap of visited nodes. */
1318 sbitmap visited;
1319 /* Array that stores the topological order of the graph, *in
1320 reverse*. */
1321 VEC(unsigned,heap) *topo_order;
1325 /* Initialize and return a topological info structure. */
1327 static struct topo_info *
1328 init_topo_info (void)
1330 size_t size = graph->size;
1331 struct topo_info *ti = XNEW (struct topo_info);
1332 ti->visited = sbitmap_alloc (size);
1333 sbitmap_zero (ti->visited);
1334 ti->topo_order = VEC_alloc (unsigned, heap, 1);
1335 return ti;
1339 /* Free the topological sort info pointed to by TI. */
1341 static void
1342 free_topo_info (struct topo_info *ti)
1344 sbitmap_free (ti->visited);
1345 VEC_free (unsigned, heap, ti->topo_order);
1346 free (ti);
1349 /* Visit the graph in topological order, and store the order in the
1350 topo_info structure. */
1352 static void
1353 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1354 unsigned int n)
1356 bitmap_iterator bi;
1357 unsigned int j;
1359 SET_BIT (ti->visited, n);
1361 if (graph->succs[n])
1362 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1364 if (!TEST_BIT (ti->visited, j))
1365 topo_visit (graph, ti, j);
1368 VEC_safe_push (unsigned, heap, ti->topo_order, n);
1371 /* Return true if variable N + OFFSET is a legal field of N. */
1373 static bool
1374 type_safe (unsigned int n, unsigned HOST_WIDE_INT *offset)
1376 varinfo_t ninfo = get_varinfo (n);
1378 /* For things we've globbed to single variables, any offset into the
1379 variable acts like the entire variable, so that it becomes offset
1380 0. */
1381 if (ninfo->is_special_var
1382 || ninfo->is_artificial_var
1383 || ninfo->is_unknown_size_var)
1385 *offset = 0;
1386 return true;
1388 return (get_varinfo (n)->offset + *offset) < get_varinfo (n)->fullsize;
1391 /* Process a constraint C that represents x = *y, using DELTA as the
1392 starting solution. */
1394 static void
1395 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1396 bitmap delta)
1398 unsigned int lhs = c->lhs.var;
1399 bool flag = false;
1400 bitmap sol = get_varinfo (lhs)->solution;
1401 unsigned int j;
1402 bitmap_iterator bi;
1404 if (bitmap_bit_p (delta, anything_id))
1406 flag |= bitmap_set_bit (sol, anything_id);
1407 goto done;
1410 /* For x = *ESCAPED and x = *CALLUSED we want to compute the
1411 reachability set of the rhs var. As a pointer to a sub-field
1412 of a variable can also reach all other fields of the variable
1413 we simply have to expand the solution to contain all sub-fields
1414 if one sub-field is contained. */
1415 if (c->rhs.var == escaped_id
1416 || c->rhs.var == callused_id)
1418 bitmap vars = NULL;
1419 /* In a first pass record all variables we need to add all
1420 sub-fields off. This avoids quadratic behavior. */
1421 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1423 varinfo_t v = lookup_vi_for_tree (get_varinfo (j)->decl);
1424 if (v->next != NULL)
1426 if (vars == NULL)
1427 vars = BITMAP_ALLOC (NULL);
1428 bitmap_set_bit (vars, v->id);
1431 /* In the second pass now do the addition to the solution and
1432 to speed up solving add it to the delta as well. */
1433 if (vars != NULL)
1435 EXECUTE_IF_SET_IN_BITMAP (vars, 0, j, bi)
1437 varinfo_t v = get_varinfo (j);
1438 for (; v != NULL; v = v->next)
1440 if (bitmap_set_bit (sol, v->id))
1442 flag = true;
1443 bitmap_set_bit (delta, v->id);
1447 BITMAP_FREE (vars);
1451 /* For each variable j in delta (Sol(y)), add
1452 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1453 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1455 unsigned HOST_WIDE_INT roffset = c->rhs.offset;
1456 if (type_safe (j, &roffset))
1458 varinfo_t v;
1459 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + roffset;
1460 unsigned int t;
1462 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1463 if (!v)
1464 continue;
1465 t = find (v->id);
1467 /* Adding edges from the special vars is pointless.
1468 They don't have sets that can change. */
1469 if (get_varinfo (t)->is_special_var)
1470 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1471 /* Merging the solution from ESCAPED needlessly increases
1472 the set. Use ESCAPED as representative instead.
1473 Same for CALLUSED. */
1474 else if (get_varinfo (t)->id == escaped_id
1475 || get_varinfo (t)->id == callused_id)
1476 flag |= bitmap_set_bit (sol, get_varinfo (t)->id);
1477 else if (add_graph_edge (graph, lhs, t))
1478 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1482 done:
1483 /* If the LHS solution changed, mark the var as changed. */
1484 if (flag)
1486 get_varinfo (lhs)->solution = sol;
1487 if (!TEST_BIT (changed, lhs))
1489 SET_BIT (changed, lhs);
1490 changed_count++;
1495 /* Process a constraint C that represents *x = y. */
1497 static void
1498 do_ds_constraint (constraint_t c, bitmap delta)
1500 unsigned int rhs = c->rhs.var;
1501 bitmap sol = get_varinfo (rhs)->solution;
1502 unsigned int j;
1503 bitmap_iterator bi;
1505 if (bitmap_bit_p (sol, anything_id))
1507 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1509 varinfo_t jvi = get_varinfo (j);
1510 unsigned int t;
1511 unsigned int loff = c->lhs.offset;
1512 unsigned HOST_WIDE_INT fieldoffset = jvi->offset + loff;
1513 varinfo_t v;
1515 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1516 if (!v)
1517 continue;
1518 t = find (v->id);
1520 if (bitmap_set_bit (get_varinfo (t)->solution, anything_id)
1521 && !TEST_BIT (changed, t))
1523 SET_BIT (changed, t);
1524 changed_count++;
1527 return;
1530 /* For each member j of delta (Sol(x)), add an edge from y to j and
1531 union Sol(y) into Sol(j) */
1532 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1534 unsigned HOST_WIDE_INT loff = c->lhs.offset;
1535 if (type_safe (j, &loff) && !(get_varinfo (j)->is_special_var))
1537 varinfo_t v;
1538 unsigned int t;
1539 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + loff;
1540 bitmap tmp;
1542 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1543 if (!v)
1544 continue;
1545 t = find (v->id);
1546 tmp = get_varinfo (t)->solution;
1548 if (set_union_with_increment (tmp, sol, 0))
1550 get_varinfo (t)->solution = tmp;
1551 if (t == rhs)
1552 sol = get_varinfo (rhs)->solution;
1553 if (!TEST_BIT (changed, t))
1555 SET_BIT (changed, t);
1556 changed_count++;
1563 /* Handle a non-simple (simple meaning requires no iteration),
1564 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1566 static void
1567 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
1569 if (c->lhs.type == DEREF)
1571 if (c->rhs.type == ADDRESSOF)
1573 gcc_unreachable();
1575 else
1577 /* *x = y */
1578 do_ds_constraint (c, delta);
1581 else if (c->rhs.type == DEREF)
1583 /* x = *y */
1584 if (!(get_varinfo (c->lhs.var)->is_special_var))
1585 do_sd_constraint (graph, c, delta);
1587 else
1589 bitmap tmp;
1590 bitmap solution;
1591 bool flag = false;
1593 gcc_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR);
1594 solution = get_varinfo (c->rhs.var)->solution;
1595 tmp = get_varinfo (c->lhs.var)->solution;
1597 flag = set_union_with_increment (tmp, solution, c->rhs.offset);
1599 if (flag)
1601 get_varinfo (c->lhs.var)->solution = tmp;
1602 if (!TEST_BIT (changed, c->lhs.var))
1604 SET_BIT (changed, c->lhs.var);
1605 changed_count++;
1611 /* Initialize and return a new SCC info structure. */
1613 static struct scc_info *
1614 init_scc_info (size_t size)
1616 struct scc_info *si = XNEW (struct scc_info);
1617 size_t i;
1619 si->current_index = 0;
1620 si->visited = sbitmap_alloc (size);
1621 sbitmap_zero (si->visited);
1622 si->deleted = sbitmap_alloc (size);
1623 sbitmap_zero (si->deleted);
1624 si->node_mapping = XNEWVEC (unsigned int, size);
1625 si->dfs = XCNEWVEC (unsigned int, size);
1627 for (i = 0; i < size; i++)
1628 si->node_mapping[i] = i;
1630 si->scc_stack = VEC_alloc (unsigned, heap, 1);
1631 return si;
1634 /* Free an SCC info structure pointed to by SI */
1636 static void
1637 free_scc_info (struct scc_info *si)
1639 sbitmap_free (si->visited);
1640 sbitmap_free (si->deleted);
1641 free (si->node_mapping);
1642 free (si->dfs);
1643 VEC_free (unsigned, heap, si->scc_stack);
1644 free (si);
1648 /* Find indirect cycles in GRAPH that occur, using strongly connected
1649 components, and note them in the indirect cycles map.
1651 This technique comes from Ben Hardekopf and Calvin Lin,
1652 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1653 Lines of Code", submitted to PLDI 2007. */
1655 static void
1656 find_indirect_cycles (constraint_graph_t graph)
1658 unsigned int i;
1659 unsigned int size = graph->size;
1660 struct scc_info *si = init_scc_info (size);
1662 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1663 if (!TEST_BIT (si->visited, i) && find (i) == i)
1664 scc_visit (graph, si, i);
1666 free_scc_info (si);
1669 /* Compute a topological ordering for GRAPH, and store the result in the
1670 topo_info structure TI. */
1672 static void
1673 compute_topo_order (constraint_graph_t graph,
1674 struct topo_info *ti)
1676 unsigned int i;
1677 unsigned int size = graph->size;
1679 for (i = 0; i != size; ++i)
1680 if (!TEST_BIT (ti->visited, i) && find (i) == i)
1681 topo_visit (graph, ti, i);
1684 /* Structure used to for hash value numbering of pointer equivalence
1685 classes. */
1687 typedef struct equiv_class_label
1689 unsigned int equivalence_class;
1690 bitmap labels;
1691 hashval_t hashcode;
1692 } *equiv_class_label_t;
1693 typedef const struct equiv_class_label *const_equiv_class_label_t;
1695 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1696 classes. */
1697 static htab_t pointer_equiv_class_table;
1699 /* A hashtable for mapping a bitmap of labels->location equivalence
1700 classes. */
1701 static htab_t location_equiv_class_table;
1703 /* Hash function for a equiv_class_label_t */
1705 static hashval_t
1706 equiv_class_label_hash (const void *p)
1708 const_equiv_class_label_t const ecl = (const_equiv_class_label_t) p;
1709 return ecl->hashcode;
1712 /* Equality function for two equiv_class_label_t's. */
1714 static int
1715 equiv_class_label_eq (const void *p1, const void *p2)
1717 const_equiv_class_label_t const eql1 = (const_equiv_class_label_t) p1;
1718 const_equiv_class_label_t const eql2 = (const_equiv_class_label_t) p2;
1719 return bitmap_equal_p (eql1->labels, eql2->labels);
1722 /* Lookup a equivalence class in TABLE by the bitmap of LABELS it
1723 contains. */
1725 static unsigned int
1726 equiv_class_lookup (htab_t table, bitmap labels)
1728 void **slot;
1729 struct equiv_class_label ecl;
1731 ecl.labels = labels;
1732 ecl.hashcode = bitmap_hash (labels);
1734 slot = htab_find_slot_with_hash (table, &ecl,
1735 ecl.hashcode, NO_INSERT);
1736 if (!slot)
1737 return 0;
1738 else
1739 return ((equiv_class_label_t) *slot)->equivalence_class;
1743 /* Add an equivalence class named EQUIVALENCE_CLASS with labels LABELS
1744 to TABLE. */
1746 static void
1747 equiv_class_add (htab_t table, unsigned int equivalence_class,
1748 bitmap labels)
1750 void **slot;
1751 equiv_class_label_t ecl = XNEW (struct equiv_class_label);
1753 ecl->labels = labels;
1754 ecl->equivalence_class = equivalence_class;
1755 ecl->hashcode = bitmap_hash (labels);
1757 slot = htab_find_slot_with_hash (table, ecl,
1758 ecl->hashcode, INSERT);
1759 gcc_assert (!*slot);
1760 *slot = (void *) ecl;
1763 /* Perform offline variable substitution.
1765 This is a worst case quadratic time way of identifying variables
1766 that must have equivalent points-to sets, including those caused by
1767 static cycles, and single entry subgraphs, in the constraint graph.
1769 The technique is described in "Exploiting Pointer and Location
1770 Equivalence to Optimize Pointer Analysis. In the 14th International
1771 Static Analysis Symposium (SAS), August 2007." It is known as the
1772 "HU" algorithm, and is equivalent to value numbering the collapsed
1773 constraint graph including evaluating unions.
1775 The general method of finding equivalence classes is as follows:
1776 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1777 Initialize all non-REF nodes to be direct nodes.
1778 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1779 variable}
1780 For each constraint containing the dereference, we also do the same
1781 thing.
1783 We then compute SCC's in the graph and unify nodes in the same SCC,
1784 including pts sets.
1786 For each non-collapsed node x:
1787 Visit all unvisited explicit incoming edges.
1788 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
1789 where y->x.
1790 Lookup the equivalence class for pts(x).
1791 If we found one, equivalence_class(x) = found class.
1792 Otherwise, equivalence_class(x) = new class, and new_class is
1793 added to the lookup table.
1795 All direct nodes with the same equivalence class can be replaced
1796 with a single representative node.
1797 All unlabeled nodes (label == 0) are not pointers and all edges
1798 involving them can be eliminated.
1799 We perform these optimizations during rewrite_constraints
1801 In addition to pointer equivalence class finding, we also perform
1802 location equivalence class finding. This is the set of variables
1803 that always appear together in points-to sets. We use this to
1804 compress the size of the points-to sets. */
1806 /* Current maximum pointer equivalence class id. */
1807 static int pointer_equiv_class;
1809 /* Current maximum location equivalence class id. */
1810 static int location_equiv_class;
1812 /* Recursive routine to find strongly connected components in GRAPH,
1813 and label it's nodes with DFS numbers. */
1815 static void
1816 condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1818 unsigned int i;
1819 bitmap_iterator bi;
1820 unsigned int my_dfs;
1822 gcc_assert (si->node_mapping[n] == n);
1823 SET_BIT (si->visited, n);
1824 si->dfs[n] = si->current_index ++;
1825 my_dfs = si->dfs[n];
1827 /* Visit all the successors. */
1828 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1830 unsigned int w = si->node_mapping[i];
1832 if (TEST_BIT (si->deleted, w))
1833 continue;
1835 if (!TEST_BIT (si->visited, w))
1836 condense_visit (graph, si, w);
1838 unsigned int t = si->node_mapping[w];
1839 unsigned int nnode = si->node_mapping[n];
1840 gcc_assert (nnode == n);
1842 if (si->dfs[t] < si->dfs[nnode])
1843 si->dfs[n] = si->dfs[t];
1847 /* Visit all the implicit predecessors. */
1848 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
1850 unsigned int w = si->node_mapping[i];
1852 if (TEST_BIT (si->deleted, w))
1853 continue;
1855 if (!TEST_BIT (si->visited, w))
1856 condense_visit (graph, si, w);
1858 unsigned int t = si->node_mapping[w];
1859 unsigned int nnode = si->node_mapping[n];
1860 gcc_assert (nnode == n);
1862 if (si->dfs[t] < si->dfs[nnode])
1863 si->dfs[n] = si->dfs[t];
1867 /* See if any components have been identified. */
1868 if (si->dfs[n] == my_dfs)
1870 while (VEC_length (unsigned, si->scc_stack) != 0
1871 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1873 unsigned int w = VEC_pop (unsigned, si->scc_stack);
1874 si->node_mapping[w] = n;
1876 if (!TEST_BIT (graph->direct_nodes, w))
1877 RESET_BIT (graph->direct_nodes, n);
1879 /* Unify our nodes. */
1880 if (graph->preds[w])
1882 if (!graph->preds[n])
1883 graph->preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
1884 bitmap_ior_into (graph->preds[n], graph->preds[w]);
1886 if (graph->implicit_preds[w])
1888 if (!graph->implicit_preds[n])
1889 graph->implicit_preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
1890 bitmap_ior_into (graph->implicit_preds[n],
1891 graph->implicit_preds[w]);
1893 if (graph->points_to[w])
1895 if (!graph->points_to[n])
1896 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
1897 bitmap_ior_into (graph->points_to[n],
1898 graph->points_to[w]);
1900 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1902 unsigned int rep = si->node_mapping[i];
1903 graph->number_incoming[rep]++;
1906 SET_BIT (si->deleted, n);
1908 else
1909 VEC_safe_push (unsigned, heap, si->scc_stack, n);
1912 /* Label pointer equivalences. */
1914 static void
1915 label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1917 unsigned int i;
1918 bitmap_iterator bi;
1919 SET_BIT (si->visited, n);
1921 if (!graph->points_to[n])
1922 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
1924 /* Label and union our incoming edges's points to sets. */
1925 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1927 unsigned int w = si->node_mapping[i];
1928 if (!TEST_BIT (si->visited, w))
1929 label_visit (graph, si, w);
1931 /* Skip unused edges */
1932 if (w == n || graph->pointer_label[w] == 0)
1934 graph->number_incoming[w]--;
1935 continue;
1937 if (graph->points_to[w])
1938 bitmap_ior_into(graph->points_to[n], graph->points_to[w]);
1940 /* If all incoming edges to w have been processed and
1941 graph->points_to[w] was not stored in the hash table, we can
1942 free it. */
1943 graph->number_incoming[w]--;
1944 if (!graph->number_incoming[w] && !TEST_BIT (graph->pt_used, w))
1946 BITMAP_FREE (graph->points_to[w]);
1949 /* Indirect nodes get fresh variables. */
1950 if (!TEST_BIT (graph->direct_nodes, n))
1951 bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
1953 if (!bitmap_empty_p (graph->points_to[n]))
1955 unsigned int label = equiv_class_lookup (pointer_equiv_class_table,
1956 graph->points_to[n]);
1957 if (!label)
1959 SET_BIT (graph->pt_used, n);
1960 label = pointer_equiv_class++;
1961 equiv_class_add (pointer_equiv_class_table,
1962 label, graph->points_to[n]);
1964 graph->pointer_label[n] = label;
1968 /* Perform offline variable substitution, discovering equivalence
1969 classes, and eliminating non-pointer variables. */
1971 static struct scc_info *
1972 perform_var_substitution (constraint_graph_t graph)
1974 unsigned int i;
1975 unsigned int size = graph->size;
1976 struct scc_info *si = init_scc_info (size);
1978 bitmap_obstack_initialize (&iteration_obstack);
1979 pointer_equiv_class_table = htab_create (511, equiv_class_label_hash,
1980 equiv_class_label_eq, free);
1981 location_equiv_class_table = htab_create (511, equiv_class_label_hash,
1982 equiv_class_label_eq, free);
1983 pointer_equiv_class = 1;
1984 location_equiv_class = 1;
1986 /* Condense the nodes, which means to find SCC's, count incoming
1987 predecessors, and unite nodes in SCC's. */
1988 for (i = 0; i < FIRST_REF_NODE; i++)
1989 if (!TEST_BIT (si->visited, si->node_mapping[i]))
1990 condense_visit (graph, si, si->node_mapping[i]);
1992 sbitmap_zero (si->visited);
1993 /* Actually the label the nodes for pointer equivalences */
1994 for (i = 0; i < FIRST_REF_NODE; i++)
1995 if (!TEST_BIT (si->visited, si->node_mapping[i]))
1996 label_visit (graph, si, si->node_mapping[i]);
1998 /* Calculate location equivalence labels. */
1999 for (i = 0; i < FIRST_REF_NODE; i++)
2001 bitmap pointed_by;
2002 bitmap_iterator bi;
2003 unsigned int j;
2004 unsigned int label;
2006 if (!graph->pointed_by[i])
2007 continue;
2008 pointed_by = BITMAP_ALLOC (&iteration_obstack);
2010 /* Translate the pointed-by mapping for pointer equivalence
2011 labels. */
2012 EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
2014 bitmap_set_bit (pointed_by,
2015 graph->pointer_label[si->node_mapping[j]]);
2017 /* The original pointed_by is now dead. */
2018 BITMAP_FREE (graph->pointed_by[i]);
2020 /* Look up the location equivalence label if one exists, or make
2021 one otherwise. */
2022 label = equiv_class_lookup (location_equiv_class_table,
2023 pointed_by);
2024 if (label == 0)
2026 label = location_equiv_class++;
2027 equiv_class_add (location_equiv_class_table,
2028 label, pointed_by);
2030 else
2032 if (dump_file && (dump_flags & TDF_DETAILS))
2033 fprintf (dump_file, "Found location equivalence for node %s\n",
2034 get_varinfo (i)->name);
2035 BITMAP_FREE (pointed_by);
2037 graph->loc_label[i] = label;
2041 if (dump_file && (dump_flags & TDF_DETAILS))
2042 for (i = 0; i < FIRST_REF_NODE; i++)
2044 bool direct_node = TEST_BIT (graph->direct_nodes, i);
2045 fprintf (dump_file,
2046 "Equivalence classes for %s node id %d:%s are pointer: %d"
2047 ", location:%d\n",
2048 direct_node ? "Direct node" : "Indirect node", i,
2049 get_varinfo (i)->name,
2050 graph->pointer_label[si->node_mapping[i]],
2051 graph->loc_label[si->node_mapping[i]]);
2054 /* Quickly eliminate our non-pointer variables. */
2056 for (i = 0; i < FIRST_REF_NODE; i++)
2058 unsigned int node = si->node_mapping[i];
2060 if (graph->pointer_label[node] == 0)
2062 if (dump_file && (dump_flags & TDF_DETAILS))
2063 fprintf (dump_file,
2064 "%s is a non-pointer variable, eliminating edges.\n",
2065 get_varinfo (node)->name);
2066 stats.nonpointer_vars++;
2067 clear_edges_for_node (graph, node);
2071 return si;
2074 /* Free information that was only necessary for variable
2075 substitution. */
2077 static void
2078 free_var_substitution_info (struct scc_info *si)
2080 free_scc_info (si);
2081 free (graph->pointer_label);
2082 free (graph->loc_label);
2083 free (graph->pointed_by);
2084 free (graph->points_to);
2085 free (graph->number_incoming);
2086 free (graph->eq_rep);
2087 sbitmap_free (graph->direct_nodes);
2088 sbitmap_free (graph->pt_used);
2089 htab_delete (pointer_equiv_class_table);
2090 htab_delete (location_equiv_class_table);
2091 bitmap_obstack_release (&iteration_obstack);
2094 /* Return an existing node that is equivalent to NODE, which has
2095 equivalence class LABEL, if one exists. Return NODE otherwise. */
2097 static unsigned int
2098 find_equivalent_node (constraint_graph_t graph,
2099 unsigned int node, unsigned int label)
2101 /* If the address version of this variable is unused, we can
2102 substitute it for anything else with the same label.
2103 Otherwise, we know the pointers are equivalent, but not the
2104 locations, and we can unite them later. */
2106 if (!bitmap_bit_p (graph->address_taken, node))
2108 gcc_assert (label < graph->size);
2110 if (graph->eq_rep[label] != -1)
2112 /* Unify the two variables since we know they are equivalent. */
2113 if (unite (graph->eq_rep[label], node))
2114 unify_nodes (graph, graph->eq_rep[label], node, false);
2115 return graph->eq_rep[label];
2117 else
2119 graph->eq_rep[label] = node;
2120 graph->pe_rep[label] = node;
2123 else
2125 gcc_assert (label < graph->size);
2126 graph->pe[node] = label;
2127 if (graph->pe_rep[label] == -1)
2128 graph->pe_rep[label] = node;
2131 return node;
2134 /* Unite pointer equivalent but not location equivalent nodes in
2135 GRAPH. This may only be performed once variable substitution is
2136 finished. */
2138 static void
2139 unite_pointer_equivalences (constraint_graph_t graph)
2141 unsigned int i;
2143 /* Go through the pointer equivalences and unite them to their
2144 representative, if they aren't already. */
2145 for (i = 0; i < FIRST_REF_NODE; i++)
2147 unsigned int label = graph->pe[i];
2148 if (label)
2150 int label_rep = graph->pe_rep[label];
2152 if (label_rep == -1)
2153 continue;
2155 label_rep = find (label_rep);
2156 if (label_rep >= 0 && unite (label_rep, find (i)))
2157 unify_nodes (graph, label_rep, i, false);
2162 /* Move complex constraints to the GRAPH nodes they belong to. */
2164 static void
2165 move_complex_constraints (constraint_graph_t graph)
2167 int i;
2168 constraint_t c;
2170 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
2172 if (c)
2174 struct constraint_expr lhs = c->lhs;
2175 struct constraint_expr rhs = c->rhs;
2177 if (lhs.type == DEREF)
2179 insert_into_complex (graph, lhs.var, c);
2181 else if (rhs.type == DEREF)
2183 if (!(get_varinfo (lhs.var)->is_special_var))
2184 insert_into_complex (graph, rhs.var, c);
2186 else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2187 && (lhs.offset != 0 || rhs.offset != 0))
2189 insert_into_complex (graph, rhs.var, c);
2196 /* Optimize and rewrite complex constraints while performing
2197 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2198 result of perform_variable_substitution. */
2200 static void
2201 rewrite_constraints (constraint_graph_t graph,
2202 struct scc_info *si)
2204 int i;
2205 unsigned int j;
2206 constraint_t c;
2208 for (j = 0; j < graph->size; j++)
2209 gcc_assert (find (j) == j);
2211 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
2213 struct constraint_expr lhs = c->lhs;
2214 struct constraint_expr rhs = c->rhs;
2215 unsigned int lhsvar = find (get_varinfo_fc (lhs.var)->id);
2216 unsigned int rhsvar = find (get_varinfo_fc (rhs.var)->id);
2217 unsigned int lhsnode, rhsnode;
2218 unsigned int lhslabel, rhslabel;
2220 lhsnode = si->node_mapping[lhsvar];
2221 rhsnode = si->node_mapping[rhsvar];
2222 lhslabel = graph->pointer_label[lhsnode];
2223 rhslabel = graph->pointer_label[rhsnode];
2225 /* See if it is really a non-pointer variable, and if so, ignore
2226 the constraint. */
2227 if (lhslabel == 0)
2229 if (dump_file && (dump_flags & TDF_DETAILS))
2232 fprintf (dump_file, "%s is a non-pointer variable,"
2233 "ignoring constraint:",
2234 get_varinfo (lhs.var)->name);
2235 dump_constraint (dump_file, c);
2237 VEC_replace (constraint_t, constraints, i, NULL);
2238 continue;
2241 if (rhslabel == 0)
2243 if (dump_file && (dump_flags & TDF_DETAILS))
2246 fprintf (dump_file, "%s is a non-pointer variable,"
2247 "ignoring constraint:",
2248 get_varinfo (rhs.var)->name);
2249 dump_constraint (dump_file, c);
2251 VEC_replace (constraint_t, constraints, i, NULL);
2252 continue;
2255 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2256 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2257 c->lhs.var = lhsvar;
2258 c->rhs.var = rhsvar;
2263 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2264 part of an SCC, false otherwise. */
2266 static bool
2267 eliminate_indirect_cycles (unsigned int node)
2269 if (graph->indirect_cycles[node] != -1
2270 && !bitmap_empty_p (get_varinfo (node)->solution))
2272 unsigned int i;
2273 VEC(unsigned,heap) *queue = NULL;
2274 int queuepos;
2275 unsigned int to = find (graph->indirect_cycles[node]);
2276 bitmap_iterator bi;
2278 /* We can't touch the solution set and call unify_nodes
2279 at the same time, because unify_nodes is going to do
2280 bitmap unions into it. */
2282 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2284 if (find (i) == i && i != to)
2286 if (unite (to, i))
2287 VEC_safe_push (unsigned, heap, queue, i);
2291 for (queuepos = 0;
2292 VEC_iterate (unsigned, queue, queuepos, i);
2293 queuepos++)
2295 unify_nodes (graph, to, i, true);
2297 VEC_free (unsigned, heap, queue);
2298 return true;
2300 return false;
2303 /* Solve the constraint graph GRAPH using our worklist solver.
2304 This is based on the PW* family of solvers from the "Efficient Field
2305 Sensitive Pointer Analysis for C" paper.
2306 It works by iterating over all the graph nodes, processing the complex
2307 constraints and propagating the copy constraints, until everything stops
2308 changed. This corresponds to steps 6-8 in the solving list given above. */
2310 static void
2311 solve_graph (constraint_graph_t graph)
2313 unsigned int size = graph->size;
2314 unsigned int i;
2315 bitmap pts;
2317 changed_count = 0;
2318 changed = sbitmap_alloc (size);
2319 sbitmap_zero (changed);
2321 /* Mark all initial non-collapsed nodes as changed. */
2322 for (i = 0; i < size; i++)
2324 varinfo_t ivi = get_varinfo (i);
2325 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2326 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2327 || VEC_length (constraint_t, graph->complex[i]) > 0))
2329 SET_BIT (changed, i);
2330 changed_count++;
2334 /* Allocate a bitmap to be used to store the changed bits. */
2335 pts = BITMAP_ALLOC (&pta_obstack);
2337 while (changed_count > 0)
2339 unsigned int i;
2340 struct topo_info *ti = init_topo_info ();
2341 stats.iterations++;
2343 bitmap_obstack_initialize (&iteration_obstack);
2345 compute_topo_order (graph, ti);
2347 while (VEC_length (unsigned, ti->topo_order) != 0)
2350 i = VEC_pop (unsigned, ti->topo_order);
2352 /* If this variable is not a representative, skip it. */
2353 if (find (i) != i)
2354 continue;
2356 /* In certain indirect cycle cases, we may merge this
2357 variable to another. */
2358 if (eliminate_indirect_cycles (i) && find (i) != i)
2359 continue;
2361 /* If the node has changed, we need to process the
2362 complex constraints and outgoing edges again. */
2363 if (TEST_BIT (changed, i))
2365 unsigned int j;
2366 constraint_t c;
2367 bitmap solution;
2368 VEC(constraint_t,heap) *complex = graph->complex[i];
2369 bool solution_empty;
2371 RESET_BIT (changed, i);
2372 changed_count--;
2374 /* Compute the changed set of solution bits. */
2375 bitmap_and_compl (pts, get_varinfo (i)->solution,
2376 get_varinfo (i)->oldsolution);
2378 if (bitmap_empty_p (pts))
2379 continue;
2381 bitmap_ior_into (get_varinfo (i)->oldsolution, pts);
2383 solution = get_varinfo (i)->solution;
2384 solution_empty = bitmap_empty_p (solution);
2386 /* Process the complex constraints */
2387 for (j = 0; VEC_iterate (constraint_t, complex, j, c); j++)
2389 /* XXX: This is going to unsort the constraints in
2390 some cases, which will occasionally add duplicate
2391 constraints during unification. This does not
2392 affect correctness. */
2393 c->lhs.var = find (c->lhs.var);
2394 c->rhs.var = find (c->rhs.var);
2396 /* The only complex constraint that can change our
2397 solution to non-empty, given an empty solution,
2398 is a constraint where the lhs side is receiving
2399 some set from elsewhere. */
2400 if (!solution_empty || c->lhs.type != DEREF)
2401 do_complex_constraint (graph, c, pts);
2404 solution_empty = bitmap_empty_p (solution);
2406 if (!solution_empty
2407 /* Do not propagate the ESCAPED/CALLUSED solutions. */
2408 && i != escaped_id
2409 && i != callused_id)
2411 bitmap_iterator bi;
2413 /* Propagate solution to all successors. */
2414 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2415 0, j, bi)
2417 bitmap tmp;
2418 bool flag;
2420 unsigned int to = find (j);
2421 tmp = get_varinfo (to)->solution;
2422 flag = false;
2424 /* Don't try to propagate to ourselves. */
2425 if (to == i)
2426 continue;
2428 flag = set_union_with_increment (tmp, pts, 0);
2430 if (flag)
2432 get_varinfo (to)->solution = tmp;
2433 if (!TEST_BIT (changed, to))
2435 SET_BIT (changed, to);
2436 changed_count++;
2443 free_topo_info (ti);
2444 bitmap_obstack_release (&iteration_obstack);
2447 BITMAP_FREE (pts);
2448 sbitmap_free (changed);
2449 bitmap_obstack_release (&oldpta_obstack);
2452 /* Map from trees to variable infos. */
2453 static struct pointer_map_t *vi_for_tree;
2456 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2458 static void
2459 insert_vi_for_tree (tree t, varinfo_t vi)
2461 void **slot = pointer_map_insert (vi_for_tree, t);
2462 gcc_assert (vi);
2463 gcc_assert (*slot == NULL);
2464 *slot = vi;
2467 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2468 exist in the map, return NULL, otherwise, return the varinfo we found. */
2470 static varinfo_t
2471 lookup_vi_for_tree (tree t)
2473 void **slot = pointer_map_contains (vi_for_tree, t);
2474 if (slot == NULL)
2475 return NULL;
2477 return (varinfo_t) *slot;
2480 /* Return a printable name for DECL */
2482 static const char *
2483 alias_get_name (tree decl)
2485 const char *res = get_name (decl);
2486 char *temp;
2487 int num_printed = 0;
2489 if (res != NULL)
2490 return res;
2492 res = "NULL";
2493 if (!dump_file)
2494 return res;
2496 if (TREE_CODE (decl) == SSA_NAME)
2498 num_printed = asprintf (&temp, "%s_%u",
2499 alias_get_name (SSA_NAME_VAR (decl)),
2500 SSA_NAME_VERSION (decl));
2502 else if (DECL_P (decl))
2504 num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
2506 if (num_printed > 0)
2508 res = ggc_strdup (temp);
2509 free (temp);
2511 return res;
2514 /* Find the variable id for tree T in the map.
2515 If T doesn't exist in the map, create an entry for it and return it. */
2517 static varinfo_t
2518 get_vi_for_tree (tree t)
2520 void **slot = pointer_map_contains (vi_for_tree, t);
2521 if (slot == NULL)
2522 return get_varinfo (create_variable_info_for (t, alias_get_name (t)));
2524 return (varinfo_t) *slot;
2527 /* Get a constraint expression for a new temporary variable. */
2529 static struct constraint_expr
2530 get_constraint_exp_for_temp (tree t)
2532 struct constraint_expr cexpr;
2534 gcc_assert (SSA_VAR_P (t));
2536 cexpr.type = SCALAR;
2537 cexpr.var = get_vi_for_tree (t)->id;
2538 cexpr.offset = 0;
2540 return cexpr;
2543 /* Get a constraint expression vector from an SSA_VAR_P node.
2544 If address_p is true, the result will be taken its address of. */
2546 static void
2547 get_constraint_for_ssa_var (tree t, VEC(ce_s, heap) **results, bool address_p)
2549 struct constraint_expr cexpr;
2550 varinfo_t vi;
2552 /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
2553 gcc_assert (SSA_VAR_P (t) || DECL_P (t));
2555 /* For parameters, get at the points-to set for the actual parm
2556 decl. */
2557 if (TREE_CODE (t) == SSA_NAME
2558 && TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2559 && SSA_NAME_IS_DEFAULT_DEF (t))
2561 get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
2562 return;
2565 vi = get_vi_for_tree (t);
2566 cexpr.var = vi->id;
2567 cexpr.type = SCALAR;
2568 cexpr.offset = 0;
2569 /* If we determine the result is "anything", and we know this is readonly,
2570 say it points to readonly memory instead. */
2571 if (cexpr.var == anything_id && TREE_READONLY (t))
2573 gcc_unreachable ();
2574 cexpr.type = ADDRESSOF;
2575 cexpr.var = readonly_id;
2578 /* If we are not taking the address of the constraint expr, add all
2579 sub-fiels of the variable as well. */
2580 if (!address_p)
2582 for (; vi; vi = vi->next)
2584 cexpr.var = vi->id;
2585 VEC_safe_push (ce_s, heap, *results, &cexpr);
2587 return;
2590 VEC_safe_push (ce_s, heap, *results, &cexpr);
2593 /* Process constraint T, performing various simplifications and then
2594 adding it to our list of overall constraints. */
2596 static void
2597 process_constraint (constraint_t t)
2599 struct constraint_expr rhs = t->rhs;
2600 struct constraint_expr lhs = t->lhs;
2602 gcc_assert (rhs.var < VEC_length (varinfo_t, varmap));
2603 gcc_assert (lhs.var < VEC_length (varinfo_t, varmap));
2605 if (!use_field_sensitive)
2607 t->rhs.offset = 0;
2608 t->lhs.offset = 0;
2611 /* ANYTHING == ANYTHING is pointless. */
2612 if (lhs.var == anything_id && rhs.var == anything_id)
2613 return;
2615 /* If we have &ANYTHING = something, convert to SOMETHING = &ANYTHING) */
2616 else if (lhs.var == anything_id && lhs.type == ADDRESSOF)
2618 rhs = t->lhs;
2619 t->lhs = t->rhs;
2620 t->rhs = rhs;
2621 process_constraint (t);
2623 /* This can happen in our IR with things like n->a = *p */
2624 else if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
2626 /* Split into tmp = *rhs, *lhs = tmp */
2627 tree rhsdecl = get_varinfo (rhs.var)->decl;
2628 tree pointertype = TREE_TYPE (rhsdecl);
2629 tree pointedtotype = TREE_TYPE (pointertype);
2630 tree tmpvar = create_tmp_var_raw (pointedtotype, "doubledereftmp");
2631 struct constraint_expr tmplhs = get_constraint_exp_for_temp (tmpvar);
2633 process_constraint (new_constraint (tmplhs, rhs));
2634 process_constraint (new_constraint (lhs, tmplhs));
2636 else if (rhs.type == ADDRESSOF && lhs.type == DEREF)
2638 /* Split into tmp = &rhs, *lhs = tmp */
2639 tree rhsdecl = get_varinfo (rhs.var)->decl;
2640 tree pointertype = TREE_TYPE (rhsdecl);
2641 tree tmpvar = create_tmp_var_raw (pointertype, "derefaddrtmp");
2642 struct constraint_expr tmplhs = get_constraint_exp_for_temp (tmpvar);
2644 process_constraint (new_constraint (tmplhs, rhs));
2645 process_constraint (new_constraint (lhs, tmplhs));
2647 else
2649 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
2650 VEC_safe_push (constraint_t, heap, constraints, t);
2654 /* Return true if T is a variable of a type that could contain
2655 pointers. */
2657 static bool
2658 could_have_pointers (tree t)
2660 tree type = TREE_TYPE (t);
2662 if (POINTER_TYPE_P (type)
2663 || AGGREGATE_TYPE_P (type)
2664 || TREE_CODE (type) == COMPLEX_TYPE)
2665 return true;
2667 return false;
2670 /* Return the position, in bits, of FIELD_DECL from the beginning of its
2671 structure. */
2673 static unsigned HOST_WIDE_INT
2674 bitpos_of_field (const tree fdecl)
2677 if (TREE_CODE (DECL_FIELD_OFFSET (fdecl)) != INTEGER_CST
2678 || TREE_CODE (DECL_FIELD_BIT_OFFSET (fdecl)) != INTEGER_CST)
2679 return -1;
2681 return (tree_low_cst (DECL_FIELD_OFFSET (fdecl), 1) * 8)
2682 + tree_low_cst (DECL_FIELD_BIT_OFFSET (fdecl), 1);
2686 /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
2687 If address_p is true the result will be taken its address of. */
2689 static void
2690 get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results,
2691 bool address_p)
2693 tree orig_t = t;
2694 HOST_WIDE_INT bitsize = -1;
2695 HOST_WIDE_INT bitmaxsize = -1;
2696 HOST_WIDE_INT bitpos;
2697 tree forzero;
2698 struct constraint_expr *result;
2700 /* Some people like to do cute things like take the address of
2701 &0->a.b */
2702 forzero = t;
2703 while (!SSA_VAR_P (forzero) && !CONSTANT_CLASS_P (forzero))
2704 forzero = TREE_OPERAND (forzero, 0);
2706 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
2708 struct constraint_expr temp;
2710 temp.offset = 0;
2711 temp.var = integer_id;
2712 temp.type = SCALAR;
2713 VEC_safe_push (ce_s, heap, *results, &temp);
2714 return;
2717 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
2719 /* Pretend to take the address of the base, we'll take care of
2720 adding the required subset of sub-fields below. */
2721 get_constraint_for_1 (t, results, true);
2722 result = VEC_last (ce_s, *results);
2724 gcc_assert (VEC_length (ce_s, *results) == 1);
2726 /* This can also happen due to weird offsetof type macros. */
2727 if (TREE_CODE (t) != ADDR_EXPR && result->type == ADDRESSOF)
2728 result->type = SCALAR;
2730 if (result->type == SCALAR)
2732 /* In languages like C, you can access one past the end of an
2733 array. You aren't allowed to dereference it, so we can
2734 ignore this constraint. When we handle pointer subtraction,
2735 we may have to do something cute here. */
2737 if ((unsigned HOST_WIDE_INT)bitpos < get_varinfo (result->var)->fullsize
2738 && bitmaxsize != 0)
2740 /* It's also not true that the constraint will actually start at the
2741 right offset, it may start in some padding. We only care about
2742 setting the constraint to the first actual field it touches, so
2743 walk to find it. */
2744 struct constraint_expr cexpr = *result;
2745 varinfo_t curr;
2746 VEC_pop (ce_s, *results);
2747 cexpr.offset = 0;
2748 for (curr = get_varinfo (cexpr.var); curr; curr = curr->next)
2750 if (ranges_overlap_p (curr->offset, curr->size,
2751 bitpos, bitmaxsize))
2753 cexpr.var = curr->id;
2754 VEC_safe_push (ce_s, heap, *results, &cexpr);
2755 if (address_p)
2756 break;
2759 /* assert that we found *some* field there. The user couldn't be
2760 accessing *only* padding. */
2761 /* Still the user could access one past the end of an array
2762 embedded in a struct resulting in accessing *only* padding. */
2763 gcc_assert (VEC_length (ce_s, *results) >= 1
2764 || ref_contains_array_ref (orig_t));
2766 else if (bitmaxsize == 0)
2768 if (dump_file && (dump_flags & TDF_DETAILS))
2769 fprintf (dump_file, "Access to zero-sized part of variable,"
2770 "ignoring\n");
2772 else
2773 if (dump_file && (dump_flags & TDF_DETAILS))
2774 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
2776 else if (bitmaxsize == -1)
2778 /* We can't handle DEREF constraints with unknown size, we'll
2779 get the wrong answer. Punt and return anything. */
2780 result->var = anything_id;
2781 result->offset = 0;
2783 else
2784 result->offset = bitpos;
2788 /* Dereference the constraint expression CONS, and return the result.
2789 DEREF (ADDRESSOF) = SCALAR
2790 DEREF (SCALAR) = DEREF
2791 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
2792 This is needed so that we can handle dereferencing DEREF constraints. */
2794 static void
2795 do_deref (VEC (ce_s, heap) **constraints)
2797 struct constraint_expr *c;
2798 unsigned int i = 0;
2800 for (i = 0; VEC_iterate (ce_s, *constraints, i, c); i++)
2802 if (c->type == SCALAR)
2803 c->type = DEREF;
2804 else if (c->type == ADDRESSOF)
2805 c->type = SCALAR;
2806 else if (c->type == DEREF)
2808 tree tmpvar = create_tmp_var_raw (ptr_type_node, "dereftmp");
2809 struct constraint_expr tmplhs = get_constraint_exp_for_temp (tmpvar);
2810 process_constraint (new_constraint (tmplhs, *c));
2811 c->var = tmplhs.var;
2813 else
2814 gcc_unreachable ();
2818 /* Given a tree T, return the constraint expression for it. */
2820 static void
2821 get_constraint_for_1 (tree t, VEC (ce_s, heap) **results, bool address_p)
2823 struct constraint_expr temp;
2825 /* x = integer is all glommed to a single variable, which doesn't
2826 point to anything by itself. That is, of course, unless it is an
2827 integer constant being treated as a pointer, in which case, we
2828 will return that this is really the addressof anything. This
2829 happens below, since it will fall into the default case. The only
2830 case we know something about an integer treated like a pointer is
2831 when it is the NULL pointer, and then we just say it points to
2832 NULL. */
2833 if (TREE_CODE (t) == INTEGER_CST
2834 && integer_zerop (t))
2836 temp.var = nothing_id;
2837 temp.type = ADDRESSOF;
2838 temp.offset = 0;
2839 VEC_safe_push (ce_s, heap, *results, &temp);
2840 return;
2843 /* String constants are read-only. */
2844 if (TREE_CODE (t) == STRING_CST)
2846 temp.var = readonly_id;
2847 temp.type = SCALAR;
2848 temp.offset = 0;
2849 VEC_safe_push (ce_s, heap, *results, &temp);
2850 return;
2853 switch (TREE_CODE_CLASS (TREE_CODE (t)))
2855 case tcc_expression:
2856 case tcc_vl_exp:
2858 switch (TREE_CODE (t))
2860 case ADDR_EXPR:
2862 struct constraint_expr *c;
2863 unsigned int i;
2864 tree exp = TREE_OPERAND (t, 0);
2866 get_constraint_for_1 (exp, results, true);
2868 for (i = 0; VEC_iterate (ce_s, *results, i, c); i++)
2870 if (c->type == DEREF)
2871 c->type = SCALAR;
2872 else
2873 c->type = ADDRESSOF;
2875 return;
2877 break;
2878 case CALL_EXPR:
2879 /* XXX: In interprocedural mode, if we didn't have the
2880 body, we would need to do *each pointer argument =
2881 &ANYTHING added. */
2882 if (call_expr_flags (t) & (ECF_MALLOC | ECF_MAY_BE_ALLOCA))
2884 varinfo_t vi;
2885 tree heapvar = heapvar_lookup (t);
2887 if (heapvar == NULL)
2889 heapvar = create_tmp_var_raw (ptr_type_node, "HEAP");
2890 DECL_EXTERNAL (heapvar) = 1;
2891 get_var_ann (heapvar)->is_heapvar = 1;
2892 if (gimple_referenced_vars (cfun))
2893 add_referenced_var (heapvar);
2894 heapvar_insert (t, heapvar);
2897 temp.var = create_variable_info_for (heapvar,
2898 alias_get_name (heapvar));
2900 vi = get_varinfo (temp.var);
2901 vi->is_artificial_var = 1;
2902 vi->is_heap_var = 1;
2903 temp.type = ADDRESSOF;
2904 temp.offset = 0;
2905 VEC_safe_push (ce_s, heap, *results, &temp);
2906 return;
2908 else
2910 temp.var = anything_id;
2911 temp.type = SCALAR;
2912 temp.offset = 0;
2913 VEC_safe_push (ce_s, heap, *results, &temp);
2914 return;
2916 break;
2917 default:
2919 temp.type = ADDRESSOF;
2920 temp.var = anything_id;
2921 temp.offset = 0;
2922 VEC_safe_push (ce_s, heap, *results, &temp);
2923 return;
2927 case tcc_reference:
2929 switch (TREE_CODE (t))
2931 case INDIRECT_REF:
2933 get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p);
2934 do_deref (results);
2935 return;
2937 case ARRAY_REF:
2938 case ARRAY_RANGE_REF:
2939 case COMPONENT_REF:
2940 get_constraint_for_component_ref (t, results, address_p);
2941 return;
2942 default:
2944 temp.type = ADDRESSOF;
2945 temp.var = anything_id;
2946 temp.offset = 0;
2947 VEC_safe_push (ce_s, heap, *results, &temp);
2948 return;
2952 case tcc_unary:
2954 switch (TREE_CODE (t))
2956 CASE_CONVERT:
2958 tree op = TREE_OPERAND (t, 0);
2960 /* Cast from non-pointer to pointers are bad news for us.
2961 Anything else, we see through */
2962 if (!(POINTER_TYPE_P (TREE_TYPE (t))
2963 && ! POINTER_TYPE_P (TREE_TYPE (op))))
2965 get_constraint_for_1 (op, results, address_p);
2966 return;
2969 /* FALLTHRU */
2971 default:
2973 temp.type = ADDRESSOF;
2974 temp.var = anything_id;
2975 temp.offset = 0;
2976 VEC_safe_push (ce_s, heap, *results, &temp);
2977 return;
2981 case tcc_exceptional:
2983 switch (TREE_CODE (t))
2985 case PHI_NODE:
2987 get_constraint_for_1 (PHI_RESULT (t), results, address_p);
2988 return;
2990 break;
2991 case SSA_NAME:
2993 get_constraint_for_ssa_var (t, results, address_p);
2994 return;
2996 break;
2997 default:
2999 temp.type = ADDRESSOF;
3000 temp.var = anything_id;
3001 temp.offset = 0;
3002 VEC_safe_push (ce_s, heap, *results, &temp);
3003 return;
3007 case tcc_declaration:
3009 get_constraint_for_ssa_var (t, results, address_p);
3010 return;
3012 default:
3014 temp.type = ADDRESSOF;
3015 temp.var = anything_id;
3016 temp.offset = 0;
3017 VEC_safe_push (ce_s, heap, *results, &temp);
3018 return;
3023 /* Given a gimple tree T, return the constraint expression vector for it. */
3025 static void
3026 get_constraint_for (tree t, VEC (ce_s, heap) **results)
3028 gcc_assert (VEC_length (ce_s, *results) == 0);
3030 get_constraint_for_1 (t, results, false);
3033 /* Handle the structure copy case where we have a simple structure copy
3034 between LHS and RHS that is of SIZE (in bits)
3036 For each field of the lhs variable (lhsfield)
3037 For each field of the rhs variable at lhsfield.offset (rhsfield)
3038 add the constraint lhsfield = rhsfield
3040 If we fail due to some kind of type unsafety or other thing we
3041 can't handle, return false. We expect the caller to collapse the
3042 variable in that case. */
3044 static bool
3045 do_simple_structure_copy (const struct constraint_expr lhs,
3046 const struct constraint_expr rhs,
3047 const unsigned HOST_WIDE_INT size)
3049 varinfo_t p = get_varinfo (lhs.var);
3050 unsigned HOST_WIDE_INT pstart, last;
3051 pstart = p->offset;
3052 last = p->offset + size;
3053 for (; p && p->offset < last; p = p->next)
3055 varinfo_t q;
3056 struct constraint_expr templhs = lhs;
3057 struct constraint_expr temprhs = rhs;
3058 unsigned HOST_WIDE_INT fieldoffset;
3060 templhs.var = p->id;
3061 q = get_varinfo (temprhs.var);
3062 fieldoffset = p->offset - pstart;
3063 q = first_vi_for_offset (q, q->offset + fieldoffset);
3064 if (!q)
3065 return false;
3066 temprhs.var = q->id;
3067 process_constraint (new_constraint (templhs, temprhs));
3069 return true;
3073 /* Handle the structure copy case where we have a structure copy between a
3074 aggregate on the LHS and a dereference of a pointer on the RHS
3075 that is of SIZE (in bits)
3077 For each field of the lhs variable (lhsfield)
3078 rhs.offset = lhsfield->offset
3079 add the constraint lhsfield = rhs
3082 static void
3083 do_rhs_deref_structure_copy (const struct constraint_expr lhs,
3084 const struct constraint_expr rhs,
3085 const unsigned HOST_WIDE_INT size)
3087 varinfo_t p = get_varinfo (lhs.var);
3088 unsigned HOST_WIDE_INT pstart,last;
3089 pstart = p->offset;
3090 last = p->offset + size;
3092 for (; p && p->offset < last; p = p->next)
3094 varinfo_t q;
3095 struct constraint_expr templhs = lhs;
3096 struct constraint_expr temprhs = rhs;
3097 unsigned HOST_WIDE_INT fieldoffset;
3100 if (templhs.type == SCALAR)
3101 templhs.var = p->id;
3102 else
3103 templhs.offset = p->offset;
3105 q = get_varinfo (temprhs.var);
3106 fieldoffset = p->offset - pstart;
3107 temprhs.offset += fieldoffset;
3108 process_constraint (new_constraint (templhs, temprhs));
3112 /* Handle the structure copy case where we have a structure copy
3113 between an aggregate on the RHS and a dereference of a pointer on
3114 the LHS that is of SIZE (in bits)
3116 For each field of the rhs variable (rhsfield)
3117 lhs.offset = rhsfield->offset
3118 add the constraint lhs = rhsfield
3121 static void
3122 do_lhs_deref_structure_copy (const struct constraint_expr lhs,
3123 const struct constraint_expr rhs,
3124 const unsigned HOST_WIDE_INT size)
3126 varinfo_t p = get_varinfo (rhs.var);
3127 unsigned HOST_WIDE_INT pstart,last;
3128 pstart = p->offset;
3129 last = p->offset + size;
3131 for (; p && p->offset < last; p = p->next)
3133 varinfo_t q;
3134 struct constraint_expr templhs = lhs;
3135 struct constraint_expr temprhs = rhs;
3136 unsigned HOST_WIDE_INT fieldoffset;
3139 if (temprhs.type == SCALAR)
3140 temprhs.var = p->id;
3141 else
3142 temprhs.offset = p->offset;
3144 q = get_varinfo (templhs.var);
3145 fieldoffset = p->offset - pstart;
3146 templhs.offset += fieldoffset;
3147 process_constraint (new_constraint (templhs, temprhs));
3151 /* Sometimes, frontends like to give us bad type information. This
3152 function will collapse all the fields from VAR to the end of VAR,
3153 into VAR, so that we treat those fields as a single variable.
3154 We return the variable they were collapsed into. */
3156 static unsigned int
3157 collapse_rest_of_var (unsigned int var)
3159 varinfo_t currvar = get_varinfo (var);
3160 varinfo_t field;
3162 for (field = currvar->next; field; field = field->next)
3164 if (dump_file)
3165 fprintf (dump_file, "Type safety: Collapsing var %s into %s\n",
3166 field->name, currvar->name);
3168 gcc_assert (field->collapsed_to == 0);
3169 field->collapsed_to = currvar->id;
3172 currvar->next = NULL;
3173 currvar->size = currvar->fullsize - currvar->offset;
3175 return currvar->id;
3178 /* Handle aggregate copies by expanding into copies of the respective
3179 fields of the structures. */
3181 static void
3182 do_structure_copy (tree lhsop, tree rhsop)
3184 struct constraint_expr lhs, rhs, tmp;
3185 VEC (ce_s, heap) *lhsc = NULL, *rhsc = NULL;
3186 varinfo_t p;
3187 unsigned HOST_WIDE_INT lhssize;
3188 unsigned HOST_WIDE_INT rhssize;
3190 /* Pretend we are taking the address of the constraint exprs.
3191 We deal with walking the sub-fields ourselves. */
3192 get_constraint_for_1 (lhsop, &lhsc, true);
3193 get_constraint_for_1 (rhsop, &rhsc, true);
3194 gcc_assert (VEC_length (ce_s, lhsc) == 1);
3195 gcc_assert (VEC_length (ce_s, rhsc) == 1);
3196 lhs = *(VEC_last (ce_s, lhsc));
3197 rhs = *(VEC_last (ce_s, rhsc));
3199 VEC_free (ce_s, heap, lhsc);
3200 VEC_free (ce_s, heap, rhsc);
3202 /* If we have special var = x, swap it around. */
3203 if (lhs.var <= integer_id && !(get_varinfo (rhs.var)->is_special_var))
3205 tmp = lhs;
3206 lhs = rhs;
3207 rhs = tmp;
3210 /* This is fairly conservative for the RHS == ADDRESSOF case, in that it's
3211 possible it's something we could handle. However, most cases falling
3212 into this are dealing with transparent unions, which are slightly
3213 weird. */
3214 if (rhs.type == ADDRESSOF && !(get_varinfo (rhs.var)->is_special_var))
3216 rhs.type = ADDRESSOF;
3217 rhs.var = anything_id;
3220 /* If the RHS is a special var, or an addressof, set all the LHS fields to
3221 that special var. */
3222 if (rhs.var <= integer_id)
3224 for (p = get_varinfo (lhs.var); p; p = p->next)
3226 struct constraint_expr templhs = lhs;
3227 struct constraint_expr temprhs = rhs;
3229 if (templhs.type == SCALAR )
3230 templhs.var = p->id;
3231 else
3232 templhs.offset += p->offset;
3233 process_constraint (new_constraint (templhs, temprhs));
3236 else
3238 tree rhstype = TREE_TYPE (rhsop);
3239 tree lhstype = TREE_TYPE (lhsop);
3240 tree rhstypesize;
3241 tree lhstypesize;
3243 lhstypesize = DECL_P (lhsop) ? DECL_SIZE (lhsop) : TYPE_SIZE (lhstype);
3244 rhstypesize = DECL_P (rhsop) ? DECL_SIZE (rhsop) : TYPE_SIZE (rhstype);
3246 /* If we have a variably sized types on the rhs or lhs, and a deref
3247 constraint, add the constraint, lhsconstraint = &ANYTHING.
3248 This is conservatively correct because either the lhs is an unknown
3249 sized var (if the constraint is SCALAR), or the lhs is a DEREF
3250 constraint, and every variable it can point to must be unknown sized
3251 anyway, so we don't need to worry about fields at all. */
3252 if ((rhs.type == DEREF && TREE_CODE (rhstypesize) != INTEGER_CST)
3253 || (lhs.type == DEREF && TREE_CODE (lhstypesize) != INTEGER_CST))
3255 rhs.var = anything_id;
3256 rhs.type = ADDRESSOF;
3257 rhs.offset = 0;
3258 process_constraint (new_constraint (lhs, rhs));
3259 return;
3262 /* The size only really matters insofar as we don't set more or less of
3263 the variable. If we hit an unknown size var, the size should be the
3264 whole darn thing. */
3265 if (get_varinfo (rhs.var)->is_unknown_size_var)
3266 rhssize = ~0;
3267 else
3268 rhssize = TREE_INT_CST_LOW (rhstypesize);
3270 if (get_varinfo (lhs.var)->is_unknown_size_var)
3271 lhssize = ~0;
3272 else
3273 lhssize = TREE_INT_CST_LOW (lhstypesize);
3276 if (rhs.type == SCALAR && lhs.type == SCALAR)
3278 if (!do_simple_structure_copy (lhs, rhs, MIN (lhssize, rhssize)))
3280 lhs.var = collapse_rest_of_var (lhs.var);
3281 rhs.var = collapse_rest_of_var (rhs.var);
3282 lhs.offset = 0;
3283 rhs.offset = 0;
3284 lhs.type = SCALAR;
3285 rhs.type = SCALAR;
3286 process_constraint (new_constraint (lhs, rhs));
3289 else if (lhs.type != DEREF && rhs.type == DEREF)
3290 do_rhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
3291 else if (lhs.type == DEREF && rhs.type != DEREF)
3292 do_lhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
3293 else
3295 tree pointedtotype = lhstype;
3296 tree tmpvar;
3298 gcc_assert (rhs.type == DEREF && lhs.type == DEREF);
3299 tmpvar = create_tmp_var_raw (pointedtotype, "structcopydereftmp");
3300 do_structure_copy (tmpvar, rhsop);
3301 do_structure_copy (lhsop, tmpvar);
3307 /* Handle pointer arithmetic EXPR when creating aliasing constraints.
3308 Expressions of the type PTR + CST can be handled in two ways:
3310 1- If the constraint for PTR is ADDRESSOF for a non-structure
3311 variable, then we can use it directly because adding or
3312 subtracting a constant may not alter the original ADDRESSOF
3313 constraint (i.e., pointer arithmetic may not legally go outside
3314 an object's boundaries).
3316 2- If the constraint for PTR is ADDRESSOF for a structure variable,
3317 then if CST is a compile-time constant that can be used as an
3318 offset, we can determine which sub-variable will be pointed-to
3319 by the expression.
3321 Return true if the expression is handled. For any other kind of
3322 expression, return false so that each operand can be added as a
3323 separate constraint by the caller. */
3325 static bool
3326 handle_ptr_arith (VEC (ce_s, heap) *lhsc, tree expr)
3328 tree op0, op1;
3329 struct constraint_expr *c, *c2;
3330 unsigned int i = 0;
3331 unsigned int j = 0;
3332 VEC (ce_s, heap) *temp = NULL;
3333 unsigned HOST_WIDE_INT rhsunitoffset, rhsoffset;
3335 if (TREE_CODE (expr) != POINTER_PLUS_EXPR)
3336 return false;
3338 op0 = TREE_OPERAND (expr, 0);
3339 op1 = TREE_OPERAND (expr, 1);
3340 gcc_assert (POINTER_TYPE_P (TREE_TYPE (op0)));
3342 /* If the offset is not a non-negative integer constant that fits
3343 in a HOST_WIDE_INT, we cannot handle it here. */
3344 if (!host_integerp (op1, 1))
3345 return false;
3347 /* Make sure the bit-offset also fits. */
3348 rhsunitoffset = TREE_INT_CST_LOW (op1);
3349 rhsoffset = rhsunitoffset * BITS_PER_UNIT;
3350 if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
3351 return false;
3353 get_constraint_for (op0, &temp);
3355 for (i = 0; VEC_iterate (ce_s, lhsc, i, c); i++)
3356 for (j = 0; VEC_iterate (ce_s, temp, j, c2); j++)
3358 if (c2->type == ADDRESSOF && rhsoffset != 0)
3360 varinfo_t temp = get_varinfo (c2->var);
3362 /* An access one after the end of an array is valid,
3363 so simply punt on accesses we cannot resolve. */
3364 temp = first_vi_for_offset (temp, rhsoffset);
3365 if (temp == NULL)
3366 continue;
3367 c2->var = temp->id;
3368 c2->offset = 0;
3370 else
3371 c2->offset = rhsoffset;
3372 process_constraint (new_constraint (*c, *c2));
3375 VEC_free (ce_s, heap, temp);
3377 return true;
3380 /* Create a constraint ID = OP. */
3382 static void
3383 make_constraint_to (unsigned id, tree op)
3385 VEC(ce_s, heap) *rhsc = NULL;
3386 struct constraint_expr *c;
3387 struct constraint_expr includes;
3388 unsigned int j;
3390 includes.var = id;
3391 includes.offset = 0;
3392 includes.type = SCALAR;
3394 get_constraint_for (op, &rhsc);
3395 for (j = 0; VEC_iterate (ce_s, rhsc, j, c); j++)
3396 process_constraint (new_constraint (includes, *c));
3397 VEC_free (ce_s, heap, rhsc);
3400 /* Make constraints necessary to make OP escape. */
3402 static void
3403 make_escape_constraint (tree op)
3405 make_constraint_to (escaped_id, op);
3408 /* For non-IPA mode, generate constraints necessary for a call on the
3409 RHS. */
3411 static void
3412 handle_rhs_call (tree rhs)
3414 tree arg;
3415 call_expr_arg_iterator iter;
3417 FOR_EACH_CALL_EXPR_ARG (arg, iter, rhs)
3418 /* Find those pointers being passed, and make sure they end up
3419 pointing to anything. */
3420 if (could_have_pointers (arg))
3421 make_escape_constraint (arg);
3423 /* The static chain escapes as well. */
3424 if (CALL_EXPR_STATIC_CHAIN (rhs))
3425 make_escape_constraint (CALL_EXPR_STATIC_CHAIN (rhs));
3428 /* For non-IPA mode, generate constraints necessary for a call
3429 that returns a pointer and assigns it to LHS. This simply makes
3430 the LHS point to global and escaped variables. */
3432 static void
3433 handle_lhs_call (tree lhs, int flags)
3435 VEC(ce_s, heap) *lhsc = NULL;
3436 struct constraint_expr rhsc;
3437 unsigned int j;
3438 struct constraint_expr *lhsp;
3440 get_constraint_for (lhs, &lhsc);
3442 if (flags & ECF_MALLOC)
3444 tree heapvar = heapvar_lookup (lhs);
3445 varinfo_t vi;
3447 if (heapvar == NULL)
3449 heapvar = create_tmp_var_raw (ptr_type_node, "HEAP");
3450 DECL_EXTERNAL (heapvar) = 1;
3451 get_var_ann (heapvar)->is_heapvar = 1;
3452 if (gimple_referenced_vars (cfun))
3453 add_referenced_var (heapvar);
3454 heapvar_insert (lhs, heapvar);
3457 rhsc.var = create_variable_info_for (heapvar,
3458 alias_get_name (heapvar));
3459 vi = get_varinfo (rhsc.var);
3460 vi->is_artificial_var = 1;
3461 vi->is_heap_var = 1;
3462 rhsc.type = ADDRESSOF;
3463 rhsc.offset = 0;
3465 else
3467 rhsc.var = escaped_id;
3468 rhsc.offset = 0;
3469 rhsc.type = ADDRESSOF;
3471 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3472 process_constraint (new_constraint (*lhsp, rhsc));
3473 VEC_free (ce_s, heap, lhsc);
3476 /* For non-IPA mode, generate constraints necessary for a call of a
3477 const function that returns a pointer in the statement STMT. */
3479 static void
3480 handle_const_call (tree stmt)
3482 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
3483 tree call = get_call_expr_in (stmt);
3484 VEC(ce_s, heap) *lhsc = NULL;
3485 struct constraint_expr rhsc;
3486 unsigned int j;
3487 struct constraint_expr *lhsp;
3488 tree arg, tmpvar;
3489 call_expr_arg_iterator iter;
3490 struct constraint_expr tmpc;
3492 get_constraint_for (lhs, &lhsc);
3494 /* If this is a nested function then it can return anything. */
3495 if (CALL_EXPR_STATIC_CHAIN (call))
3497 rhsc.var = anything_id;
3498 rhsc.offset = 0;
3499 rhsc.type = ADDRESSOF;
3500 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3501 process_constraint (new_constraint (*lhsp, rhsc));
3502 VEC_free (ce_s, heap, lhsc);
3503 return;
3506 /* We always use a temporary here, otherwise we end up with a quadratic
3507 amount of constraints for
3508 large_struct = const_call (large_struct);
3509 in field-sensitive PTA. */
3510 tmpvar = create_tmp_var_raw (ptr_type_node, "consttmp");
3511 tmpc = get_constraint_exp_for_temp (tmpvar);
3513 /* May return addresses of globals. */
3514 rhsc.var = nonlocal_id;
3515 rhsc.offset = 0;
3516 rhsc.type = ADDRESSOF;
3517 process_constraint (new_constraint (tmpc, rhsc));
3519 /* May return arguments. */
3520 FOR_EACH_CALL_EXPR_ARG (arg, iter, call)
3521 if (could_have_pointers (arg))
3523 VEC(ce_s, heap) *argc = NULL;
3524 struct constraint_expr *argp;
3525 int i;
3527 get_constraint_for (arg, &argc);
3528 for (i = 0; VEC_iterate (ce_s, argc, i, argp); i++)
3529 process_constraint (new_constraint (tmpc, *argp));
3530 VEC_free (ce_s, heap, argc);
3533 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3534 process_constraint (new_constraint (*lhsp, tmpc));
3536 VEC_free (ce_s, heap, lhsc);
3539 /* For non-IPA mode, generate constraints necessary for a call to a
3540 pure function in statement STMT. */
3542 static void
3543 handle_pure_call (tree stmt)
3545 tree call = get_call_expr_in (stmt);
3546 tree arg;
3547 call_expr_arg_iterator iter;
3549 /* Memory reached from pointer arguments is call-used. */
3550 FOR_EACH_CALL_EXPR_ARG (arg, iter, call)
3551 if (could_have_pointers (arg))
3552 make_constraint_to (callused_id, arg);
3554 /* The static chain is used as well. */
3555 if (CALL_EXPR_STATIC_CHAIN (call))
3556 make_constraint_to (callused_id, CALL_EXPR_STATIC_CHAIN (call));
3558 /* If the call returns a pointer it may point to reachable memory
3559 from the arguments. Not so for malloc functions though. */
3560 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
3561 && could_have_pointers (GIMPLE_STMT_OPERAND (stmt, 0))
3562 && !(call_expr_flags (call) & ECF_MALLOC))
3564 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
3565 VEC(ce_s, heap) *lhsc = NULL;
3566 struct constraint_expr rhsc;
3567 struct constraint_expr *lhsp;
3568 unsigned j;
3570 get_constraint_for (lhs, &lhsc);
3572 /* If this is a nested function then it can return anything. */
3573 if (CALL_EXPR_STATIC_CHAIN (call))
3575 rhsc.var = anything_id;
3576 rhsc.offset = 0;
3577 rhsc.type = ADDRESSOF;
3578 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3579 process_constraint (new_constraint (*lhsp, rhsc));
3580 VEC_free (ce_s, heap, lhsc);
3581 return;
3584 /* Else just add the call-used memory here. Escaped variables
3585 and globals will be dealt with in handle_lhs_call. */
3586 rhsc.var = callused_id;
3587 rhsc.offset = 0;
3588 rhsc.type = ADDRESSOF;
3589 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3590 process_constraint (new_constraint (*lhsp, rhsc));
3591 VEC_free (ce_s, heap, lhsc);
3595 /* Walk statement T setting up aliasing constraints according to the
3596 references found in T. This function is the main part of the
3597 constraint builder. AI points to auxiliary alias information used
3598 when building alias sets and computing alias grouping heuristics. */
3600 static void
3601 find_func_aliases (tree origt)
3603 tree call, t = origt;
3604 VEC(ce_s, heap) *lhsc = NULL;
3605 VEC(ce_s, heap) *rhsc = NULL;
3606 struct constraint_expr *c;
3607 enum escape_type stmt_escape_type;
3609 if (TREE_CODE (t) == RETURN_EXPR && TREE_OPERAND (t, 0))
3610 t = TREE_OPERAND (t, 0);
3612 /* Now build constraints expressions. */
3613 if (TREE_CODE (t) == PHI_NODE)
3615 gcc_assert (!AGGREGATE_TYPE_P (TREE_TYPE (PHI_RESULT (t))));
3617 /* Only care about pointers and structures containing
3618 pointers. */
3619 if (could_have_pointers (PHI_RESULT (t)))
3621 int i;
3622 unsigned int j;
3624 /* For a phi node, assign all the arguments to
3625 the result. */
3626 get_constraint_for (PHI_RESULT (t), &lhsc);
3627 for (i = 0; i < PHI_NUM_ARGS (t); i++)
3629 tree rhstype;
3630 tree strippedrhs = PHI_ARG_DEF (t, i);
3632 STRIP_NOPS (strippedrhs);
3633 rhstype = TREE_TYPE (strippedrhs);
3634 get_constraint_for (PHI_ARG_DEF (t, i), &rhsc);
3636 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3638 struct constraint_expr *c2;
3639 while (VEC_length (ce_s, rhsc) > 0)
3641 c2 = VEC_last (ce_s, rhsc);
3642 process_constraint (new_constraint (*c, *c2));
3643 VEC_pop (ce_s, rhsc);
3649 /* In IPA mode, we need to generate constraints to pass call
3650 arguments through their calls. There are two cases, either a
3651 GIMPLE_MODIFY_STMT when we are returning a value, or just a plain
3652 CALL_EXPR when we are not.
3654 In non-ipa mode, we need to generate constraints for each
3655 pointer passed by address. */
3656 else if ((call = get_call_expr_in (t)) != NULL_TREE)
3658 int flags = call_expr_flags (call);
3659 if (!in_ipa_mode)
3661 /* Const functions can return their arguments and addresses
3662 of global memory but not of escaped memory. */
3663 if (flags & ECF_CONST)
3665 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT
3666 && could_have_pointers (GIMPLE_STMT_OPERAND (t, 1)))
3667 handle_const_call (t);
3669 else if (flags & ECF_PURE)
3671 handle_pure_call (t);
3672 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT
3673 && could_have_pointers (GIMPLE_STMT_OPERAND (t, 1)))
3674 handle_lhs_call (GIMPLE_STMT_OPERAND (t, 0), flags);
3676 /* Pure functions can return addresses in and of memory
3677 reachable from their arguments, but they are not an escape
3678 point for reachable memory of their arguments. But as we
3679 do not compute call-used memory separately we cannot do
3680 something special here. */
3681 else if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
3683 handle_rhs_call (GIMPLE_STMT_OPERAND (t, 1));
3684 if (could_have_pointers (GIMPLE_STMT_OPERAND (t, 1)))
3685 handle_lhs_call (GIMPLE_STMT_OPERAND (t, 0), flags);
3687 else
3688 handle_rhs_call (t);
3690 else
3692 tree lhsop;
3693 tree rhsop;
3694 tree arg;
3695 call_expr_arg_iterator iter;
3696 varinfo_t fi;
3697 int i = 1;
3698 tree decl;
3699 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
3701 lhsop = GIMPLE_STMT_OPERAND (t, 0);
3702 rhsop = GIMPLE_STMT_OPERAND (t, 1);
3704 else
3706 lhsop = NULL;
3707 rhsop = t;
3709 decl = get_callee_fndecl (rhsop);
3711 /* If we can directly resolve the function being called, do so.
3712 Otherwise, it must be some sort of indirect expression that
3713 we should still be able to handle. */
3714 if (decl)
3716 fi = get_vi_for_tree (decl);
3718 else
3720 decl = CALL_EXPR_FN (rhsop);
3721 fi = get_vi_for_tree (decl);
3724 /* Assign all the passed arguments to the appropriate incoming
3725 parameters of the function. */
3727 FOR_EACH_CALL_EXPR_ARG (arg, iter, rhsop)
3729 struct constraint_expr lhs ;
3730 struct constraint_expr *rhsp;
3732 get_constraint_for (arg, &rhsc);
3733 if (TREE_CODE (decl) != FUNCTION_DECL)
3735 lhs.type = DEREF;
3736 lhs.var = fi->id;
3737 lhs.offset = i;
3739 else
3741 lhs.type = SCALAR;
3742 lhs.var = first_vi_for_offset (fi, i)->id;
3743 lhs.offset = 0;
3745 while (VEC_length (ce_s, rhsc) != 0)
3747 rhsp = VEC_last (ce_s, rhsc);
3748 process_constraint (new_constraint (lhs, *rhsp));
3749 VEC_pop (ce_s, rhsc);
3751 i++;
3754 /* If we are returning a value, assign it to the result. */
3755 if (lhsop)
3757 struct constraint_expr rhs;
3758 struct constraint_expr *lhsp;
3759 unsigned int j = 0;
3761 get_constraint_for (lhsop, &lhsc);
3762 if (TREE_CODE (decl) != FUNCTION_DECL)
3764 rhs.type = DEREF;
3765 rhs.var = fi->id;
3766 rhs.offset = i;
3768 else
3770 rhs.type = SCALAR;
3771 rhs.var = first_vi_for_offset (fi, i)->id;
3772 rhs.offset = 0;
3774 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3775 process_constraint (new_constraint (*lhsp, rhs));
3779 /* Otherwise, just a regular assignment statement. */
3780 else if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
3782 tree lhsop = GIMPLE_STMT_OPERAND (t, 0);
3783 tree rhsop = GIMPLE_STMT_OPERAND (t, 1);
3784 int i;
3786 if (AGGREGATE_TYPE_P (TREE_TYPE (lhsop))
3787 && AGGREGATE_TYPE_P (TREE_TYPE (rhsop)))
3789 do_structure_copy (lhsop, rhsop);
3791 else
3793 /* Only care about operations with pointers, structures
3794 containing pointers, dereferences, and call expressions. */
3795 if (could_have_pointers (lhsop)
3796 || TREE_CODE (rhsop) == CALL_EXPR)
3798 get_constraint_for (lhsop, &lhsc);
3799 switch (TREE_CODE_CLASS (TREE_CODE (rhsop)))
3801 /* RHS that consist of unary operations,
3802 exceptional types, or bare decls/constants, get
3803 handled directly by get_constraint_for. */
3804 case tcc_reference:
3805 case tcc_declaration:
3806 case tcc_constant:
3807 case tcc_exceptional:
3808 case tcc_expression:
3809 case tcc_vl_exp:
3810 case tcc_unary:
3812 unsigned int j;
3814 get_constraint_for (rhsop, &rhsc);
3815 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3817 struct constraint_expr *c2;
3818 unsigned int k;
3820 for (k = 0; VEC_iterate (ce_s, rhsc, k, c2); k++)
3821 process_constraint (new_constraint (*c, *c2));
3825 break;
3827 case tcc_binary:
3829 /* For pointer arithmetic of the form
3830 PTR + CST, we can simply use PTR's
3831 constraint because pointer arithmetic is
3832 not allowed to go out of bounds. */
3833 if (handle_ptr_arith (lhsc, rhsop))
3834 break;
3836 /* FALLTHRU */
3838 /* Otherwise, walk each operand. Notice that we
3839 can't use the operand interface because we need
3840 to process expressions other than simple operands
3841 (e.g. INDIRECT_REF, ADDR_EXPR, CALL_EXPR). */
3842 default:
3843 for (i = 0; i < TREE_OPERAND_LENGTH (rhsop); i++)
3845 tree op = TREE_OPERAND (rhsop, i);
3846 unsigned int j;
3848 gcc_assert (VEC_length (ce_s, rhsc) == 0);
3849 get_constraint_for (op, &rhsc);
3850 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3852 struct constraint_expr *c2;
3853 while (VEC_length (ce_s, rhsc) > 0)
3855 c2 = VEC_last (ce_s, rhsc);
3856 process_constraint (new_constraint (*c, *c2));
3857 VEC_pop (ce_s, rhsc);
3865 else if (TREE_CODE (t) == CHANGE_DYNAMIC_TYPE_EXPR)
3867 unsigned int j;
3869 get_constraint_for (CHANGE_DYNAMIC_TYPE_LOCATION (t), &lhsc);
3870 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); ++j)
3871 get_varinfo (c->var)->no_tbaa_pruning = true;
3874 stmt_escape_type = is_escape_site (t);
3875 if (stmt_escape_type == ESCAPE_STORED_IN_GLOBAL)
3877 tree rhs;
3878 gcc_assert (TREE_CODE (t) == GIMPLE_MODIFY_STMT);
3879 rhs = GIMPLE_STMT_OPERAND (t, 1);
3880 if (TREE_CODE (rhs) == ADDR_EXPR)
3882 tree base = get_base_address (TREE_OPERAND (rhs, 0));
3883 if (base
3884 && (!DECL_P (base)
3885 || !is_global_var (base)))
3886 make_escape_constraint (rhs);
3888 else if (TREE_CODE (rhs) == SSA_NAME
3889 && POINTER_TYPE_P (TREE_TYPE (rhs)))
3890 make_escape_constraint (rhs);
3891 else if (could_have_pointers (rhs))
3892 make_escape_constraint (rhs);
3894 else if (stmt_escape_type == ESCAPE_BAD_CAST)
3896 tree rhs;
3897 gcc_assert (TREE_CODE (t) == GIMPLE_MODIFY_STMT);
3898 rhs = GIMPLE_STMT_OPERAND (t, 1);
3899 gcc_assert (CONVERT_EXPR_P (rhs)
3900 || TREE_CODE (rhs) == VIEW_CONVERT_EXPR);
3901 rhs = TREE_OPERAND (rhs, 0);
3902 make_escape_constraint (rhs);
3904 else if (stmt_escape_type == ESCAPE_TO_ASM)
3906 tree link;
3907 int i;
3908 for (i = 0, link = ASM_OUTPUTS (t); link; i++, link = TREE_CHAIN (link))
3910 tree op = TREE_VALUE (link);
3911 if (op && could_have_pointers (op))
3912 /* Strictly we'd only need the constraints from ESCAPED and
3913 NONLOCAL. */
3914 make_escape_constraint (op);
3916 for (i = 0, link = ASM_INPUTS (t); link; i++, link = TREE_CHAIN (link))
3918 tree op = TREE_VALUE (link);
3919 if (op && could_have_pointers (op))
3920 /* Strictly we'd only need the constraint to ESCAPED. */
3921 make_escape_constraint (op);
3925 /* After promoting variables and computing aliasing we will
3926 need to re-scan most statements. FIXME: Try to minimize the
3927 number of statements re-scanned. It's not really necessary to
3928 re-scan *all* statements. */
3929 if (!in_ipa_mode)
3930 mark_stmt_modified (origt);
3931 VEC_free (ce_s, heap, rhsc);
3932 VEC_free (ce_s, heap, lhsc);
3936 /* Find the first varinfo in the same variable as START that overlaps with
3937 OFFSET.
3938 Effectively, walk the chain of fields for the variable START to find the
3939 first field that overlaps with OFFSET.
3940 Return NULL if we can't find one. */
3942 static varinfo_t
3943 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
3945 varinfo_t curr = start;
3946 while (curr)
3948 /* We may not find a variable in the field list with the actual
3949 offset when when we have glommed a structure to a variable.
3950 In that case, however, offset should still be within the size
3951 of the variable. */
3952 if (offset >= curr->offset && offset < (curr->offset + curr->size))
3953 return curr;
3954 curr = curr->next;
3956 return NULL;
3960 /* Insert the varinfo FIELD into the field list for BASE, at the front
3961 of the list. */
3963 static void
3964 insert_into_field_list (varinfo_t base, varinfo_t field)
3966 varinfo_t prev = base;
3967 varinfo_t curr = base->next;
3969 field->next = curr;
3970 prev->next = field;
3973 /* Insert the varinfo FIELD into the field list for BASE, ordered by
3974 offset. */
3976 static void
3977 insert_into_field_list_sorted (varinfo_t base, varinfo_t field)
3979 varinfo_t prev = base;
3980 varinfo_t curr = base->next;
3982 if (curr == NULL)
3984 prev->next = field;
3985 field->next = NULL;
3987 else
3989 while (curr)
3991 if (field->offset <= curr->offset)
3992 break;
3993 prev = curr;
3994 curr = curr->next;
3996 field->next = prev->next;
3997 prev->next = field;
4001 /* This structure is used during pushing fields onto the fieldstack
4002 to track the offset of the field, since bitpos_of_field gives it
4003 relative to its immediate containing type, and we want it relative
4004 to the ultimate containing object. */
4006 struct fieldoff
4008 /* Type of the field. */
4009 tree type;
4011 /* Size, in bits, of the field. */
4012 tree size;
4014 /* Field. */
4015 tree decl;
4017 /* Offset from the base of the base containing object to this field. */
4018 HOST_WIDE_INT offset;
4020 typedef struct fieldoff fieldoff_s;
4022 DEF_VEC_O(fieldoff_s);
4023 DEF_VEC_ALLOC_O(fieldoff_s,heap);
4025 /* qsort comparison function for two fieldoff's PA and PB */
4027 static int
4028 fieldoff_compare (const void *pa, const void *pb)
4030 const fieldoff_s *foa = (const fieldoff_s *)pa;
4031 const fieldoff_s *fob = (const fieldoff_s *)pb;
4032 unsigned HOST_WIDE_INT foasize, fobsize;
4034 if (foa->offset < fob->offset)
4035 return -1;
4036 else if (foa->offset > fob->offset)
4037 return 1;
4039 foasize = TREE_INT_CST_LOW (foa->size);
4040 fobsize = TREE_INT_CST_LOW (fob->size);
4041 if (foasize < fobsize)
4042 return - 1;
4043 else if (foasize > fobsize)
4044 return 1;
4045 return 0;
4048 /* Sort a fieldstack according to the field offset and sizes. */
4049 static void
4050 sort_fieldstack (VEC(fieldoff_s,heap) *fieldstack)
4052 qsort (VEC_address (fieldoff_s, fieldstack),
4053 VEC_length (fieldoff_s, fieldstack),
4054 sizeof (fieldoff_s),
4055 fieldoff_compare);
4058 /* Return true if V is a tree that we can have subvars for.
4059 Normally, this is any aggregate type. Also complex
4060 types which are not gimple registers can have subvars. */
4062 static inline bool
4063 var_can_have_subvars (const_tree v)
4065 /* Volatile variables should never have subvars. */
4066 if (TREE_THIS_VOLATILE (v))
4067 return false;
4069 /* Non decls or memory tags can never have subvars. */
4070 if (!DECL_P (v) || MTAG_P (v))
4071 return false;
4073 /* Aggregates without overlapping fields can have subvars. */
4074 if (TREE_CODE (TREE_TYPE (v)) == RECORD_TYPE)
4075 return true;
4077 return false;
4080 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
4081 the fields of TYPE onto fieldstack, recording their offsets along
4082 the way.
4084 OFFSET is used to keep track of the offset in this entire
4085 structure, rather than just the immediately containing structure.
4086 Returns the number of fields pushed.
4088 HAS_UNION is set to true if we find a union type as a field of
4089 TYPE. */
4091 static int
4092 push_fields_onto_fieldstack (tree type, VEC(fieldoff_s,heap) **fieldstack,
4093 HOST_WIDE_INT offset, bool *has_union)
4095 tree field;
4096 int count = 0;
4098 if (TREE_CODE (type) != RECORD_TYPE)
4099 return 0;
4101 /* If the vector of fields is growing too big, bail out early.
4102 Callers check for VEC_length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
4103 sure this fails. */
4104 if (VEC_length (fieldoff_s, *fieldstack) > MAX_FIELDS_FOR_FIELD_SENSITIVE)
4105 return 0;
4107 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
4108 if (TREE_CODE (field) == FIELD_DECL)
4110 bool push = false;
4111 int pushed = 0;
4113 if (has_union
4114 && (TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE
4115 || TREE_CODE (TREE_TYPE (field)) == UNION_TYPE))
4116 *has_union = true;
4118 if (!var_can_have_subvars (field))
4119 push = true;
4120 else if (!(pushed = push_fields_onto_fieldstack
4121 (TREE_TYPE (field),
4122 fieldstack,
4123 offset + bitpos_of_field (field),
4124 has_union))
4125 && (DECL_SIZE (field)
4126 && !integer_zerop (DECL_SIZE (field))))
4127 /* Empty structures may have actual size, like in C++. So
4128 see if we didn't push any subfields and the size is
4129 nonzero, push the field onto the stack. */
4130 push = true;
4132 if (push)
4134 fieldoff_s *pair;
4136 pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
4137 pair->type = TREE_TYPE (field);
4138 pair->size = DECL_SIZE (field);
4139 pair->decl = field;
4140 pair->offset = offset + bitpos_of_field (field);
4141 count++;
4143 else
4144 count += pushed;
4147 return count;
4150 /* Create a constraint ID = &FROM. */
4152 static void
4153 make_constraint_from (varinfo_t vi, int from)
4155 struct constraint_expr lhs, rhs;
4157 lhs.var = vi->id;
4158 lhs.offset = 0;
4159 lhs.type = SCALAR;
4161 rhs.var = from;
4162 rhs.offset = 0;
4163 rhs.type = ADDRESSOF;
4164 process_constraint (new_constraint (lhs, rhs));
4167 /* Count the number of arguments DECL has, and set IS_VARARGS to true
4168 if it is a varargs function. */
4170 static unsigned int
4171 count_num_arguments (tree decl, bool *is_varargs)
4173 unsigned int i = 0;
4174 tree t;
4176 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl));
4178 t = TREE_CHAIN (t))
4180 if (TREE_VALUE (t) == void_type_node)
4181 break;
4182 i++;
4185 if (!t)
4186 *is_varargs = true;
4187 return i;
4190 /* Creation function node for DECL, using NAME, and return the index
4191 of the variable we've created for the function. */
4193 static unsigned int
4194 create_function_info_for (tree decl, const char *name)
4196 unsigned int index = VEC_length (varinfo_t, varmap);
4197 varinfo_t vi;
4198 tree arg;
4199 unsigned int i;
4200 bool is_varargs = false;
4202 /* Create the variable info. */
4204 vi = new_var_info (decl, index, name);
4205 vi->decl = decl;
4206 vi->offset = 0;
4207 vi->has_union = 0;
4208 vi->size = 1;
4209 vi->fullsize = count_num_arguments (decl, &is_varargs) + 1;
4210 insert_vi_for_tree (vi->decl, vi);
4211 VEC_safe_push (varinfo_t, heap, varmap, vi);
4213 stats.total_vars++;
4215 /* If it's varargs, we don't know how many arguments it has, so we
4216 can't do much.
4218 if (is_varargs)
4220 vi->fullsize = ~0;
4221 vi->size = ~0;
4222 vi->is_unknown_size_var = true;
4223 return index;
4227 arg = DECL_ARGUMENTS (decl);
4229 /* Set up variables for each argument. */
4230 for (i = 1; i < vi->fullsize; i++)
4232 varinfo_t argvi;
4233 const char *newname;
4234 char *tempname;
4235 unsigned int newindex;
4236 tree argdecl = decl;
4238 if (arg)
4239 argdecl = arg;
4241 newindex = VEC_length (varinfo_t, varmap);
4242 asprintf (&tempname, "%s.arg%d", name, i-1);
4243 newname = ggc_strdup (tempname);
4244 free (tempname);
4246 argvi = new_var_info (argdecl, newindex, newname);
4247 argvi->decl = argdecl;
4248 VEC_safe_push (varinfo_t, heap, varmap, argvi);
4249 argvi->offset = i;
4250 argvi->size = 1;
4251 argvi->fullsize = vi->fullsize;
4252 argvi->has_union = false;
4253 insert_into_field_list_sorted (vi, argvi);
4254 stats.total_vars ++;
4255 if (arg)
4257 insert_vi_for_tree (arg, argvi);
4258 arg = TREE_CHAIN (arg);
4262 /* Create a variable for the return var. */
4263 if (DECL_RESULT (decl) != NULL
4264 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
4266 varinfo_t resultvi;
4267 const char *newname;
4268 char *tempname;
4269 unsigned int newindex;
4270 tree resultdecl = decl;
4272 vi->fullsize ++;
4274 if (DECL_RESULT (decl))
4275 resultdecl = DECL_RESULT (decl);
4277 newindex = VEC_length (varinfo_t, varmap);
4278 asprintf (&tempname, "%s.result", name);
4279 newname = ggc_strdup (tempname);
4280 free (tempname);
4282 resultvi = new_var_info (resultdecl, newindex, newname);
4283 resultvi->decl = resultdecl;
4284 VEC_safe_push (varinfo_t, heap, varmap, resultvi);
4285 resultvi->offset = i;
4286 resultvi->size = 1;
4287 resultvi->fullsize = vi->fullsize;
4288 resultvi->has_union = false;
4289 insert_into_field_list_sorted (vi, resultvi);
4290 stats.total_vars ++;
4291 if (DECL_RESULT (decl))
4292 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
4294 return index;
4298 /* Return true if FIELDSTACK contains fields that overlap.
4299 FIELDSTACK is assumed to be sorted by offset. */
4301 static bool
4302 check_for_overlaps (VEC (fieldoff_s,heap) *fieldstack)
4304 fieldoff_s *fo = NULL;
4305 unsigned int i;
4306 HOST_WIDE_INT lastoffset = -1;
4308 for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
4310 if (fo->offset == lastoffset)
4311 return true;
4312 lastoffset = fo->offset;
4314 return false;
4317 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
4318 This will also create any varinfo structures necessary for fields
4319 of DECL. */
4321 static unsigned int
4322 create_variable_info_for (tree decl, const char *name)
4324 unsigned int index = VEC_length (varinfo_t, varmap);
4325 varinfo_t vi;
4326 tree decltype = TREE_TYPE (decl);
4327 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decltype);
4328 bool notokay = false;
4329 bool hasunion;
4330 bool is_global = DECL_P (decl) ? is_global_var (decl) : false;
4331 VEC (fieldoff_s,heap) *fieldstack = NULL;
4333 if (TREE_CODE (decl) == FUNCTION_DECL && in_ipa_mode)
4334 return create_function_info_for (decl, name);
4336 hasunion = TREE_CODE (decltype) == UNION_TYPE
4337 || TREE_CODE (decltype) == QUAL_UNION_TYPE;
4338 if (var_can_have_subvars (decl) && use_field_sensitive && !hasunion)
4340 push_fields_onto_fieldstack (decltype, &fieldstack, 0, &hasunion);
4341 if (hasunion)
4343 VEC_free (fieldoff_s, heap, fieldstack);
4344 notokay = true;
4348 /* If the variable doesn't have subvars, we may end up needing to
4349 sort the field list and create fake variables for all the
4350 fields. */
4351 vi = new_var_info (decl, index, name);
4352 vi->decl = decl;
4353 vi->offset = 0;
4354 vi->has_union = hasunion;
4355 if (!declsize
4356 || TREE_CODE (declsize) != INTEGER_CST
4357 || TREE_CODE (decltype) == UNION_TYPE
4358 || TREE_CODE (decltype) == QUAL_UNION_TYPE)
4360 vi->is_unknown_size_var = true;
4361 vi->fullsize = ~0;
4362 vi->size = ~0;
4364 else
4366 vi->fullsize = TREE_INT_CST_LOW (declsize);
4367 vi->size = vi->fullsize;
4370 insert_vi_for_tree (vi->decl, vi);
4371 VEC_safe_push (varinfo_t, heap, varmap, vi);
4372 if (is_global && (!flag_whole_program || !in_ipa_mode)
4373 && could_have_pointers (decl))
4374 make_constraint_from (vi, escaped_id);
4376 stats.total_vars++;
4377 if (use_field_sensitive
4378 && !notokay
4379 && !vi->is_unknown_size_var
4380 && var_can_have_subvars (decl)
4381 && VEC_length (fieldoff_s, fieldstack) > 1
4382 && VEC_length (fieldoff_s, fieldstack) <= MAX_FIELDS_FOR_FIELD_SENSITIVE)
4384 unsigned int newindex = VEC_length (varinfo_t, varmap);
4385 fieldoff_s *fo = NULL;
4386 unsigned int i;
4388 for (i = 0; !notokay && VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
4390 if (! fo->size
4391 || TREE_CODE (fo->size) != INTEGER_CST
4392 || fo->offset < 0)
4394 notokay = true;
4395 break;
4399 /* We can't sort them if we have a field with a variable sized type,
4400 which will make notokay = true. In that case, we are going to return
4401 without creating varinfos for the fields anyway, so sorting them is a
4402 waste to boot. */
4403 if (!notokay)
4405 sort_fieldstack (fieldstack);
4406 /* Due to some C++ FE issues, like PR 22488, we might end up
4407 what appear to be overlapping fields even though they,
4408 in reality, do not overlap. Until the C++ FE is fixed,
4409 we will simply disable field-sensitivity for these cases. */
4410 notokay = check_for_overlaps (fieldstack);
4414 if (VEC_length (fieldoff_s, fieldstack) != 0)
4415 fo = VEC_index (fieldoff_s, fieldstack, 0);
4417 if (fo == NULL || notokay)
4419 vi->is_unknown_size_var = 1;
4420 vi->fullsize = ~0;
4421 vi->size = ~0;
4422 VEC_free (fieldoff_s, heap, fieldstack);
4423 return index;
4426 vi->size = TREE_INT_CST_LOW (fo->size);
4427 vi->offset = fo->offset;
4428 for (i = VEC_length (fieldoff_s, fieldstack) - 1;
4429 i >= 1 && VEC_iterate (fieldoff_s, fieldstack, i, fo);
4430 i--)
4432 varinfo_t newvi;
4433 const char *newname = "NULL";
4434 char *tempname;
4436 newindex = VEC_length (varinfo_t, varmap);
4437 if (dump_file)
4439 if (fo->decl)
4440 asprintf (&tempname, "%s.%s",
4441 vi->name, alias_get_name (fo->decl));
4442 else
4443 asprintf (&tempname, "%s." HOST_WIDE_INT_PRINT_DEC,
4444 vi->name, fo->offset);
4445 newname = ggc_strdup (tempname);
4446 free (tempname);
4448 newvi = new_var_info (decl, newindex, newname);
4449 newvi->offset = fo->offset;
4450 newvi->size = TREE_INT_CST_LOW (fo->size);
4451 newvi->fullsize = vi->fullsize;
4452 insert_into_field_list (vi, newvi);
4453 VEC_safe_push (varinfo_t, heap, varmap, newvi);
4454 if (is_global && (!flag_whole_program || !in_ipa_mode)
4455 && (!fo->decl || could_have_pointers (fo->decl)))
4456 make_constraint_from (newvi, escaped_id);
4458 stats.total_vars++;
4462 VEC_free (fieldoff_s, heap, fieldstack);
4464 return index;
4467 /* Print out the points-to solution for VAR to FILE. */
4469 void
4470 dump_solution_for_var (FILE *file, unsigned int var)
4472 varinfo_t vi = get_varinfo (var);
4473 unsigned int i;
4474 bitmap_iterator bi;
4476 if (find (var) != var)
4478 varinfo_t vipt = get_varinfo (find (var));
4479 fprintf (file, "%s = same as %s\n", vi->name, vipt->name);
4481 else
4483 fprintf (file, "%s = { ", vi->name);
4484 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4486 fprintf (file, "%s ", get_varinfo (i)->name);
4488 fprintf (file, "}");
4489 if (vi->no_tbaa_pruning)
4490 fprintf (file, " no-tbaa-pruning");
4491 fprintf (file, "\n");
4495 /* Print the points-to solution for VAR to stdout. */
4497 void
4498 debug_solution_for_var (unsigned int var)
4500 dump_solution_for_var (stdout, var);
4503 /* Create varinfo structures for all of the variables in the
4504 function for intraprocedural mode. */
4506 static void
4507 intra_create_variable_infos (void)
4509 tree t;
4510 struct constraint_expr lhs, rhs;
4512 /* For each incoming pointer argument arg, create the constraint ARG
4513 = NONLOCAL or a dummy variable if flag_argument_noalias is set. */
4514 for (t = DECL_ARGUMENTS (current_function_decl); t; t = TREE_CHAIN (t))
4516 varinfo_t p;
4518 if (!could_have_pointers (t))
4519 continue;
4521 /* If flag_argument_noalias is set, then function pointer
4522 arguments are guaranteed not to point to each other. In that
4523 case, create an artificial variable PARM_NOALIAS and the
4524 constraint ARG = &PARM_NOALIAS. */
4525 if (POINTER_TYPE_P (TREE_TYPE (t)) && flag_argument_noalias > 0)
4527 varinfo_t vi;
4528 tree heapvar = heapvar_lookup (t);
4530 lhs.offset = 0;
4531 lhs.type = SCALAR;
4532 lhs.var = get_vi_for_tree (t)->id;
4534 if (heapvar == NULL_TREE)
4536 var_ann_t ann;
4537 heapvar = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (t)),
4538 "PARM_NOALIAS");
4539 DECL_EXTERNAL (heapvar) = 1;
4540 if (gimple_referenced_vars (cfun))
4541 add_referenced_var (heapvar);
4543 heapvar_insert (t, heapvar);
4545 ann = get_var_ann (heapvar);
4546 if (flag_argument_noalias == 1)
4547 ann->noalias_state = NO_ALIAS;
4548 else if (flag_argument_noalias == 2)
4549 ann->noalias_state = NO_ALIAS_GLOBAL;
4550 else if (flag_argument_noalias == 3)
4551 ann->noalias_state = NO_ALIAS_ANYTHING;
4552 else
4553 gcc_unreachable ();
4556 vi = get_vi_for_tree (heapvar);
4557 vi->is_artificial_var = 1;
4558 vi->is_heap_var = 1;
4559 rhs.var = vi->id;
4560 rhs.type = ADDRESSOF;
4561 rhs.offset = 0;
4562 for (p = get_varinfo (lhs.var); p; p = p->next)
4564 struct constraint_expr temp = lhs;
4565 temp.var = p->id;
4566 process_constraint (new_constraint (temp, rhs));
4569 else
4571 varinfo_t arg_vi = get_vi_for_tree (t);
4573 for (p = arg_vi; p; p = p->next)
4574 make_constraint_from (p, nonlocal_id);
4579 /* Structure used to put solution bitmaps in a hashtable so they can
4580 be shared among variables with the same points-to set. */
4582 typedef struct shared_bitmap_info
4584 bitmap pt_vars;
4585 hashval_t hashcode;
4586 } *shared_bitmap_info_t;
4587 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
4589 static htab_t shared_bitmap_table;
4591 /* Hash function for a shared_bitmap_info_t */
4593 static hashval_t
4594 shared_bitmap_hash (const void *p)
4596 const_shared_bitmap_info_t const bi = (const_shared_bitmap_info_t) p;
4597 return bi->hashcode;
4600 /* Equality function for two shared_bitmap_info_t's. */
4602 static int
4603 shared_bitmap_eq (const void *p1, const void *p2)
4605 const_shared_bitmap_info_t const sbi1 = (const_shared_bitmap_info_t) p1;
4606 const_shared_bitmap_info_t const sbi2 = (const_shared_bitmap_info_t) p2;
4607 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
4610 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
4611 existing instance if there is one, NULL otherwise. */
4613 static bitmap
4614 shared_bitmap_lookup (bitmap pt_vars)
4616 void **slot;
4617 struct shared_bitmap_info sbi;
4619 sbi.pt_vars = pt_vars;
4620 sbi.hashcode = bitmap_hash (pt_vars);
4622 slot = htab_find_slot_with_hash (shared_bitmap_table, &sbi,
4623 sbi.hashcode, NO_INSERT);
4624 if (!slot)
4625 return NULL;
4626 else
4627 return ((shared_bitmap_info_t) *slot)->pt_vars;
4631 /* Add a bitmap to the shared bitmap hashtable. */
4633 static void
4634 shared_bitmap_add (bitmap pt_vars)
4636 void **slot;
4637 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
4639 sbi->pt_vars = pt_vars;
4640 sbi->hashcode = bitmap_hash (pt_vars);
4642 slot = htab_find_slot_with_hash (shared_bitmap_table, sbi,
4643 sbi->hashcode, INSERT);
4644 gcc_assert (!*slot);
4645 *slot = (void *) sbi;
4649 /* Set bits in INTO corresponding to the variable uids in solution set
4650 FROM, which came from variable PTR.
4651 For variables that are actually dereferenced, we also use type
4652 based alias analysis to prune the points-to sets.
4653 IS_DEREFED is true if PTR was directly dereferenced, which we use to
4654 help determine whether we are we are allowed to prune using TBAA.
4655 If NO_TBAA_PRUNING is true, we do not perform any TBAA pruning of
4656 the from set. */
4658 static void
4659 set_uids_in_ptset (tree ptr, bitmap into, bitmap from, bool is_derefed,
4660 bool no_tbaa_pruning)
4662 unsigned int i;
4663 bitmap_iterator bi;
4665 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
4667 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
4669 varinfo_t vi = get_varinfo (i);
4671 /* The only artificial variables that are allowed in a may-alias
4672 set are heap variables. */
4673 if (vi->is_artificial_var && !vi->is_heap_var)
4674 continue;
4676 if (TREE_CODE (vi->decl) == VAR_DECL
4677 || TREE_CODE (vi->decl) == PARM_DECL
4678 || TREE_CODE (vi->decl) == RESULT_DECL)
4680 /* Just add VI->DECL to the alias set.
4681 Don't type prune artificial vars or points-to sets
4682 for pointers that have not been dereferenced or with
4683 type-based pruning disabled. */
4684 if (vi->is_artificial_var
4685 || !is_derefed
4686 || no_tbaa_pruning)
4687 bitmap_set_bit (into, DECL_UID (vi->decl));
4688 else
4690 alias_set_type var_alias_set, mem_alias_set;
4691 var_alias_set = get_alias_set (vi->decl);
4692 mem_alias_set = get_alias_set (TREE_TYPE (TREE_TYPE (ptr)));
4693 if (may_alias_p (SSA_NAME_VAR (ptr), mem_alias_set,
4694 vi->decl, var_alias_set, true))
4695 bitmap_set_bit (into, DECL_UID (vi->decl));
4702 static bool have_alias_info = false;
4704 /* The list of SMT's that are in use by our pointer variables. This
4705 is the set of SMT's for all pointers that can point to anything. */
4706 static bitmap used_smts;
4708 /* Due to the ordering of points-to set calculation and SMT
4709 calculation being a bit co-dependent, we can't just calculate SMT
4710 used info whenever we want, we have to calculate it around the time
4711 that find_what_p_points_to is called. */
4713 /* Mark which SMT's are in use by points-to anything variables. */
4715 void
4716 set_used_smts (void)
4718 int i;
4719 varinfo_t vi;
4720 used_smts = BITMAP_ALLOC (&pta_obstack);
4722 for (i = 0; VEC_iterate (varinfo_t, varmap, i, vi); i++)
4724 tree var = vi->decl;
4725 varinfo_t withsolution = get_varinfo (find (i));
4726 tree smt;
4727 var_ann_t va;
4728 struct ptr_info_def *pi = NULL;
4730 /* For parm decls, the pointer info may be under the default
4731 def. */
4732 if (TREE_CODE (vi->decl) == PARM_DECL
4733 && gimple_default_def (cfun, var))
4734 pi = SSA_NAME_PTR_INFO (gimple_default_def (cfun, var));
4735 else if (TREE_CODE (var) == SSA_NAME)
4736 pi = SSA_NAME_PTR_INFO (var);
4738 /* Skip the special variables and those that can't be aliased. */
4739 if (vi->is_special_var
4740 || !SSA_VAR_P (var)
4741 || (pi && !pi->memory_tag_needed)
4742 || (TREE_CODE (var) == VAR_DECL && !may_be_aliased (var))
4743 || !POINTER_TYPE_P (TREE_TYPE (var)))
4744 continue;
4746 if (TREE_CODE (var) == SSA_NAME)
4747 var = SSA_NAME_VAR (var);
4749 va = var_ann (var);
4750 if (!va)
4751 continue;
4753 smt = va->symbol_mem_tag;
4754 if (smt && bitmap_bit_p (withsolution->solution, anything_id))
4755 bitmap_set_bit (used_smts, DECL_UID (smt));
4759 /* Given a pointer variable P, fill in its points-to set, or return
4760 false if we can't.
4761 Rather than return false for variables that point-to anything, we
4762 instead find the corresponding SMT, and merge in its aliases. In
4763 addition to these aliases, we also set the bits for the SMT's
4764 themselves and their subsets, as SMT's are still in use by
4765 non-SSA_NAME's, and pruning may eliminate every one of their
4766 aliases. In such a case, if we did not include the right set of
4767 SMT's in the points-to set of the variable, we'd end up with
4768 statements that do not conflict but should. */
4770 bool
4771 find_what_p_points_to (tree p)
4773 tree lookup_p = p;
4774 varinfo_t vi;
4776 if (!have_alias_info)
4777 return false;
4779 /* For parameters, get at the points-to set for the actual parm
4780 decl. */
4781 if (TREE_CODE (p) == SSA_NAME
4782 && TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
4783 && SSA_NAME_IS_DEFAULT_DEF (p))
4784 lookup_p = SSA_NAME_VAR (p);
4786 vi = lookup_vi_for_tree (lookup_p);
4787 if (vi)
4789 if (vi->is_artificial_var)
4790 return false;
4792 /* See if this is a field or a structure. */
4793 if (vi->size != vi->fullsize)
4795 /* Nothing currently asks about structure fields directly,
4796 but when they do, we need code here to hand back the
4797 points-to set. */
4798 return false;
4800 else
4802 struct ptr_info_def *pi = get_ptr_info (p);
4803 unsigned int i;
4804 bitmap_iterator bi;
4805 bool was_pt_anything = false;
4806 bitmap finished_solution;
4807 bitmap result;
4809 if (!pi->memory_tag_needed)
4810 return false;
4812 /* This variable may have been collapsed, let's get the real
4813 variable. */
4814 vi = get_varinfo (find (vi->id));
4816 /* Translate artificial variables into SSA_NAME_PTR_INFO
4817 attributes. */
4818 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4820 varinfo_t vi = get_varinfo (i);
4822 if (vi->is_artificial_var)
4824 /* FIXME. READONLY should be handled better so that
4825 flow insensitive aliasing can disregard writable
4826 aliases. */
4827 if (vi->id == nothing_id)
4828 pi->pt_null = 1;
4829 else if (vi->id == anything_id
4830 || vi->id == nonlocal_id
4831 || vi->id == escaped_id
4832 || vi->id == callused_id)
4833 was_pt_anything = 1;
4834 else if (vi->id == readonly_id)
4835 was_pt_anything = 1;
4836 else if (vi->id == integer_id)
4837 was_pt_anything = 1;
4838 else if (vi->is_heap_var)
4839 pi->pt_global_mem = 1;
4843 /* Instead of doing extra work, simply do not create
4844 points-to information for pt_anything pointers. This
4845 will cause the operand scanner to fall back to the
4846 type-based SMT and its aliases. Which is the best
4847 we could do here for the points-to set as well. */
4848 if (was_pt_anything)
4849 return false;
4851 /* Share the final set of variables when possible. */
4852 finished_solution = BITMAP_GGC_ALLOC ();
4853 stats.points_to_sets_created++;
4855 set_uids_in_ptset (p, finished_solution, vi->solution,
4856 pi->is_dereferenced,
4857 vi->no_tbaa_pruning);
4858 result = shared_bitmap_lookup (finished_solution);
4860 if (!result)
4862 shared_bitmap_add (finished_solution);
4863 pi->pt_vars = finished_solution;
4865 else
4867 pi->pt_vars = result;
4868 bitmap_clear (finished_solution);
4871 if (bitmap_empty_p (pi->pt_vars))
4872 pi->pt_vars = NULL;
4874 return true;
4878 return false;
4881 /* Mark the ESCAPED solution as call clobbered. Returns false if
4882 pt_anything escaped which needs all locals that have their address
4883 taken marked call clobbered as well. */
4885 bool
4886 clobber_what_escaped (void)
4888 varinfo_t vi;
4889 unsigned int i;
4890 bitmap_iterator bi;
4892 if (!have_alias_info)
4893 return false;
4895 /* This variable may have been collapsed, let's get the real
4896 variable for escaped_id. */
4897 vi = get_varinfo (find (escaped_id));
4899 /* If call-used memory escapes we need to include it in the
4900 set of escaped variables. This can happen if a pure
4901 function returns a pointer and this pointer escapes. */
4902 if (bitmap_bit_p (vi->solution, callused_id))
4904 varinfo_t cu_vi = get_varinfo (find (callused_id));
4905 bitmap_ior_into (vi->solution, cu_vi->solution);
4908 /* Mark variables in the solution call-clobbered. */
4909 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4911 varinfo_t vi = get_varinfo (i);
4913 if (vi->is_artificial_var)
4915 /* nothing_id and readonly_id do not cause any
4916 call clobber ops. For anything_id and integer_id
4917 we need to clobber all addressable vars. */
4918 if (vi->id == anything_id
4919 || vi->id == integer_id)
4920 return false;
4923 /* Only artificial heap-vars are further interesting. */
4924 if (vi->is_artificial_var && !vi->is_heap_var)
4925 continue;
4927 if ((TREE_CODE (vi->decl) == VAR_DECL
4928 || TREE_CODE (vi->decl) == PARM_DECL
4929 || TREE_CODE (vi->decl) == RESULT_DECL)
4930 && !unmodifiable_var_p (vi->decl))
4931 mark_call_clobbered (vi->decl, ESCAPE_TO_CALL);
4934 return true;
4937 /* Compute the call-used variables. */
4939 void
4940 compute_call_used_vars (void)
4942 varinfo_t vi;
4943 unsigned int i;
4944 bitmap_iterator bi;
4945 bool has_anything_id = false;
4947 if (!have_alias_info)
4948 return;
4950 /* This variable may have been collapsed, let's get the real
4951 variable for escaped_id. */
4952 vi = get_varinfo (find (callused_id));
4954 /* Mark variables in the solution call-clobbered. */
4955 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4957 varinfo_t vi = get_varinfo (i);
4959 if (vi->is_artificial_var)
4961 /* For anything_id and integer_id we need to make
4962 all local addressable vars call-used. */
4963 if (vi->id == anything_id
4964 || vi->id == integer_id)
4965 has_anything_id = true;
4968 /* Only artificial heap-vars are further interesting. */
4969 if (vi->is_artificial_var && !vi->is_heap_var)
4970 continue;
4972 if ((TREE_CODE (vi->decl) == VAR_DECL
4973 || TREE_CODE (vi->decl) == PARM_DECL
4974 || TREE_CODE (vi->decl) == RESULT_DECL)
4975 && !unmodifiable_var_p (vi->decl))
4976 bitmap_set_bit (gimple_call_used_vars (cfun), DECL_UID (vi->decl));
4979 /* If anything is call-used, add all addressable locals to the set. */
4980 if (has_anything_id)
4981 bitmap_ior_into (gimple_call_used_vars (cfun),
4982 gimple_addressable_vars (cfun));
4986 /* Dump points-to information to OUTFILE. */
4988 void
4989 dump_sa_points_to_info (FILE *outfile)
4991 unsigned int i;
4993 fprintf (outfile, "\nPoints-to sets\n\n");
4995 if (dump_flags & TDF_STATS)
4997 fprintf (outfile, "Stats:\n");
4998 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
4999 fprintf (outfile, "Non-pointer vars: %d\n",
5000 stats.nonpointer_vars);
5001 fprintf (outfile, "Statically unified vars: %d\n",
5002 stats.unified_vars_static);
5003 fprintf (outfile, "Dynamically unified vars: %d\n",
5004 stats.unified_vars_dynamic);
5005 fprintf (outfile, "Iterations: %d\n", stats.iterations);
5006 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
5007 fprintf (outfile, "Number of implicit edges: %d\n",
5008 stats.num_implicit_edges);
5011 for (i = 0; i < VEC_length (varinfo_t, varmap); i++)
5012 dump_solution_for_var (outfile, i);
5016 /* Debug points-to information to stderr. */
5018 void
5019 debug_sa_points_to_info (void)
5021 dump_sa_points_to_info (stderr);
5025 /* Initialize the always-existing constraint variables for NULL
5026 ANYTHING, READONLY, and INTEGER */
5028 static void
5029 init_base_vars (void)
5031 struct constraint_expr lhs, rhs;
5033 /* Create the NULL variable, used to represent that a variable points
5034 to NULL. */
5035 nothing_tree = create_tmp_var_raw (void_type_node, "NULL");
5036 var_nothing = new_var_info (nothing_tree, nothing_id, "NULL");
5037 insert_vi_for_tree (nothing_tree, var_nothing);
5038 var_nothing->is_artificial_var = 1;
5039 var_nothing->offset = 0;
5040 var_nothing->size = ~0;
5041 var_nothing->fullsize = ~0;
5042 var_nothing->is_special_var = 1;
5043 VEC_safe_push (varinfo_t, heap, varmap, var_nothing);
5045 /* Create the ANYTHING variable, used to represent that a variable
5046 points to some unknown piece of memory. */
5047 anything_tree = create_tmp_var_raw (void_type_node, "ANYTHING");
5048 var_anything = new_var_info (anything_tree, anything_id, "ANYTHING");
5049 insert_vi_for_tree (anything_tree, var_anything);
5050 var_anything->is_artificial_var = 1;
5051 var_anything->size = ~0;
5052 var_anything->offset = 0;
5053 var_anything->next = NULL;
5054 var_anything->fullsize = ~0;
5055 var_anything->is_special_var = 1;
5057 /* Anything points to anything. This makes deref constraints just
5058 work in the presence of linked list and other p = *p type loops,
5059 by saying that *ANYTHING = ANYTHING. */
5060 VEC_safe_push (varinfo_t, heap, varmap, var_anything);
5061 lhs.type = SCALAR;
5062 lhs.var = anything_id;
5063 lhs.offset = 0;
5064 rhs.type = ADDRESSOF;
5065 rhs.var = anything_id;
5066 rhs.offset = 0;
5068 /* This specifically does not use process_constraint because
5069 process_constraint ignores all anything = anything constraints, since all
5070 but this one are redundant. */
5071 VEC_safe_push (constraint_t, heap, constraints, new_constraint (lhs, rhs));
5073 /* Create the READONLY variable, used to represent that a variable
5074 points to readonly memory. */
5075 readonly_tree = create_tmp_var_raw (void_type_node, "READONLY");
5076 var_readonly = new_var_info (readonly_tree, readonly_id, "READONLY");
5077 var_readonly->is_artificial_var = 1;
5078 var_readonly->offset = 0;
5079 var_readonly->size = ~0;
5080 var_readonly->fullsize = ~0;
5081 var_readonly->next = NULL;
5082 var_readonly->is_special_var = 1;
5083 insert_vi_for_tree (readonly_tree, var_readonly);
5084 VEC_safe_push (varinfo_t, heap, varmap, var_readonly);
5086 /* readonly memory points to anything, in order to make deref
5087 easier. In reality, it points to anything the particular
5088 readonly variable can point to, but we don't track this
5089 separately. */
5090 lhs.type = SCALAR;
5091 lhs.var = readonly_id;
5092 lhs.offset = 0;
5093 rhs.type = ADDRESSOF;
5094 rhs.var = readonly_id; /* FIXME */
5095 rhs.offset = 0;
5096 process_constraint (new_constraint (lhs, rhs));
5098 /* Create the ESCAPED variable, used to represent the set of escaped
5099 memory. */
5100 escaped_tree = create_tmp_var_raw (void_type_node, "ESCAPED");
5101 var_escaped = new_var_info (escaped_tree, escaped_id, "ESCAPED");
5102 insert_vi_for_tree (escaped_tree, var_escaped);
5103 var_escaped->is_artificial_var = 1;
5104 var_escaped->offset = 0;
5105 var_escaped->size = ~0;
5106 var_escaped->fullsize = ~0;
5107 var_escaped->is_special_var = 0;
5108 VEC_safe_push (varinfo_t, heap, varmap, var_escaped);
5109 gcc_assert (VEC_index (varinfo_t, varmap, 3) == var_escaped);
5111 /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
5112 lhs.type = SCALAR;
5113 lhs.var = escaped_id;
5114 lhs.offset = 0;
5115 rhs.type = DEREF;
5116 rhs.var = escaped_id;
5117 rhs.offset = 0;
5118 process_constraint (new_constraint (lhs, rhs));
5120 /* Create the NONLOCAL variable, used to represent the set of nonlocal
5121 memory. */
5122 nonlocal_tree = create_tmp_var_raw (void_type_node, "NONLOCAL");
5123 var_nonlocal = new_var_info (nonlocal_tree, nonlocal_id, "NONLOCAL");
5124 insert_vi_for_tree (nonlocal_tree, var_nonlocal);
5125 var_nonlocal->is_artificial_var = 1;
5126 var_nonlocal->offset = 0;
5127 var_nonlocal->size = ~0;
5128 var_nonlocal->fullsize = ~0;
5129 var_nonlocal->is_special_var = 1;
5130 VEC_safe_push (varinfo_t, heap, varmap, var_nonlocal);
5132 /* Nonlocal memory points to escaped (which includes nonlocal),
5133 in order to make deref easier. */
5134 lhs.type = SCALAR;
5135 lhs.var = nonlocal_id;
5136 lhs.offset = 0;
5137 rhs.type = ADDRESSOF;
5138 rhs.var = escaped_id;
5139 rhs.offset = 0;
5140 process_constraint (new_constraint (lhs, rhs));
5142 /* Create the CALLUSED variable, used to represent the set of call-used
5143 memory. */
5144 callused_tree = create_tmp_var_raw (void_type_node, "CALLUSED");
5145 var_callused = new_var_info (callused_tree, callused_id, "CALLUSED");
5146 insert_vi_for_tree (callused_tree, var_callused);
5147 var_callused->is_artificial_var = 1;
5148 var_callused->offset = 0;
5149 var_callused->size = ~0;
5150 var_callused->fullsize = ~0;
5151 var_callused->is_special_var = 0;
5152 VEC_safe_push (varinfo_t, heap, varmap, var_callused);
5154 /* CALLUSED = *CALLUSED, because call-used is may-deref'd at calls, etc. */
5155 lhs.type = SCALAR;
5156 lhs.var = callused_id;
5157 lhs.offset = 0;
5158 rhs.type = DEREF;
5159 rhs.var = callused_id;
5160 rhs.offset = 0;
5161 process_constraint (new_constraint (lhs, rhs));
5163 /* Create the INTEGER variable, used to represent that a variable points
5164 to an INTEGER. */
5165 integer_tree = create_tmp_var_raw (void_type_node, "INTEGER");
5166 var_integer = new_var_info (integer_tree, integer_id, "INTEGER");
5167 insert_vi_for_tree (integer_tree, var_integer);
5168 var_integer->is_artificial_var = 1;
5169 var_integer->size = ~0;
5170 var_integer->fullsize = ~0;
5171 var_integer->offset = 0;
5172 var_integer->next = NULL;
5173 var_integer->is_special_var = 1;
5174 VEC_safe_push (varinfo_t, heap, varmap, var_integer);
5176 /* INTEGER = ANYTHING, because we don't know where a dereference of
5177 a random integer will point to. */
5178 lhs.type = SCALAR;
5179 lhs.var = integer_id;
5180 lhs.offset = 0;
5181 rhs.type = ADDRESSOF;
5182 rhs.var = anything_id;
5183 rhs.offset = 0;
5184 process_constraint (new_constraint (lhs, rhs));
5186 /* *ESCAPED = &ESCAPED. This is true because we have to assume
5187 everything pointed to by escaped can also point to escaped. */
5188 lhs.type = DEREF;
5189 lhs.var = escaped_id;
5190 lhs.offset = 0;
5191 rhs.type = ADDRESSOF;
5192 rhs.var = escaped_id;
5193 rhs.offset = 0;
5194 process_constraint (new_constraint (lhs, rhs));
5196 /* *ESCAPED = &NONLOCAL. This is true because we have to assume
5197 everything pointed to by escaped can also point to nonlocal. */
5198 lhs.type = DEREF;
5199 lhs.var = escaped_id;
5200 lhs.offset = 0;
5201 rhs.type = ADDRESSOF;
5202 rhs.var = nonlocal_id;
5203 rhs.offset = 0;
5204 process_constraint (new_constraint (lhs, rhs));
5207 /* Initialize things necessary to perform PTA */
5209 static void
5210 init_alias_vars (void)
5212 bitmap_obstack_initialize (&pta_obstack);
5213 bitmap_obstack_initialize (&oldpta_obstack);
5214 bitmap_obstack_initialize (&predbitmap_obstack);
5216 constraint_pool = create_alloc_pool ("Constraint pool",
5217 sizeof (struct constraint), 30);
5218 variable_info_pool = create_alloc_pool ("Variable info pool",
5219 sizeof (struct variable_info), 30);
5220 constraints = VEC_alloc (constraint_t, heap, 8);
5221 varmap = VEC_alloc (varinfo_t, heap, 8);
5222 vi_for_tree = pointer_map_create ();
5224 memset (&stats, 0, sizeof (stats));
5225 shared_bitmap_table = htab_create (511, shared_bitmap_hash,
5226 shared_bitmap_eq, free);
5227 init_base_vars ();
5230 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
5231 predecessor edges. */
5233 static void
5234 remove_preds_and_fake_succs (constraint_graph_t graph)
5236 unsigned int i;
5238 /* Clear the implicit ref and address nodes from the successor
5239 lists. */
5240 for (i = 0; i < FIRST_REF_NODE; i++)
5242 if (graph->succs[i])
5243 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
5244 FIRST_REF_NODE * 2);
5247 /* Free the successor list for the non-ref nodes. */
5248 for (i = FIRST_REF_NODE; i < graph->size; i++)
5250 if (graph->succs[i])
5251 BITMAP_FREE (graph->succs[i]);
5254 /* Now reallocate the size of the successor list as, and blow away
5255 the predecessor bitmaps. */
5256 graph->size = VEC_length (varinfo_t, varmap);
5257 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
5259 free (graph->implicit_preds);
5260 graph->implicit_preds = NULL;
5261 free (graph->preds);
5262 graph->preds = NULL;
5263 bitmap_obstack_release (&predbitmap_obstack);
5266 /* Compute the set of variables we can't TBAA prune. */
5268 static void
5269 compute_tbaa_pruning (void)
5271 unsigned int size = VEC_length (varinfo_t, varmap);
5272 unsigned int i;
5273 bool any;
5275 changed_count = 0;
5276 changed = sbitmap_alloc (size);
5277 sbitmap_zero (changed);
5279 /* Mark all initial no_tbaa_pruning nodes as changed. */
5280 any = false;
5281 for (i = 0; i < size; ++i)
5283 varinfo_t ivi = get_varinfo (i);
5285 if (find (i) == i && ivi->no_tbaa_pruning)
5287 any = true;
5288 if ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
5289 || VEC_length (constraint_t, graph->complex[i]) > 0)
5291 SET_BIT (changed, i);
5292 ++changed_count;
5297 while (changed_count > 0)
5299 struct topo_info *ti = init_topo_info ();
5300 ++stats.iterations;
5302 compute_topo_order (graph, ti);
5304 while (VEC_length (unsigned, ti->topo_order) != 0)
5306 bitmap_iterator bi;
5308 i = VEC_pop (unsigned, ti->topo_order);
5310 /* If this variable is not a representative, skip it. */
5311 if (find (i) != i)
5312 continue;
5314 /* If the node has changed, we need to process the complex
5315 constraints and outgoing edges again. */
5316 if (TEST_BIT (changed, i))
5318 unsigned int j;
5319 constraint_t c;
5320 VEC(constraint_t,heap) *complex = graph->complex[i];
5322 RESET_BIT (changed, i);
5323 --changed_count;
5325 /* Process the complex copy constraints. */
5326 for (j = 0; VEC_iterate (constraint_t, complex, j, c); ++j)
5328 if (c->lhs.type == SCALAR && c->rhs.type == SCALAR)
5330 varinfo_t lhsvi = get_varinfo (find (c->lhs.var));
5332 if (!lhsvi->no_tbaa_pruning)
5334 lhsvi->no_tbaa_pruning = true;
5335 if (!TEST_BIT (changed, lhsvi->id))
5337 SET_BIT (changed, lhsvi->id);
5338 ++changed_count;
5344 /* Propagate to all successors. */
5345 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
5347 unsigned int to = find (j);
5348 varinfo_t tovi = get_varinfo (to);
5350 /* Don't propagate to ourselves. */
5351 if (to == i)
5352 continue;
5354 if (!tovi->no_tbaa_pruning)
5356 tovi->no_tbaa_pruning = true;
5357 if (!TEST_BIT (changed, to))
5359 SET_BIT (changed, to);
5360 ++changed_count;
5367 free_topo_info (ti);
5370 sbitmap_free (changed);
5372 if (any)
5374 for (i = 0; i < size; ++i)
5376 varinfo_t ivi = get_varinfo (i);
5377 varinfo_t ivip = get_varinfo (find (i));
5379 if (ivip->no_tbaa_pruning)
5381 tree var = ivi->decl;
5383 if (TREE_CODE (var) == SSA_NAME)
5384 var = SSA_NAME_VAR (var);
5386 if (POINTER_TYPE_P (TREE_TYPE (var)))
5388 DECL_NO_TBAA_P (var) = 1;
5390 /* Tell the RTL layer that this pointer can alias
5391 anything. */
5392 DECL_POINTER_ALIAS_SET (var) = 0;
5399 /* Create points-to sets for the current function. See the comments
5400 at the start of the file for an algorithmic overview. */
5402 void
5403 compute_points_to_sets (void)
5405 struct scc_info *si;
5406 basic_block bb;
5408 timevar_push (TV_TREE_PTA);
5410 init_alias_vars ();
5411 init_alias_heapvars ();
5413 intra_create_variable_infos ();
5415 /* Now walk all statements and derive aliases. */
5416 FOR_EACH_BB (bb)
5418 block_stmt_iterator bsi;
5419 tree phi;
5421 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
5422 if (is_gimple_reg (PHI_RESULT (phi)))
5423 find_func_aliases (phi);
5425 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
5427 tree stmt = bsi_stmt (bsi);
5429 find_func_aliases (stmt);
5431 /* The information in CHANGE_DYNAMIC_TYPE_EXPR nodes has now
5432 been captured, and we can remove them. */
5433 if (TREE_CODE (stmt) == CHANGE_DYNAMIC_TYPE_EXPR)
5434 bsi_remove (&bsi, true);
5435 else
5436 bsi_next (&bsi);
5441 if (dump_file)
5443 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
5444 dump_constraints (dump_file);
5447 if (dump_file)
5448 fprintf (dump_file,
5449 "\nCollapsing static cycles and doing variable "
5450 "substitution\n");
5452 init_graph (VEC_length (varinfo_t, varmap) * 2);
5454 if (dump_file)
5455 fprintf (dump_file, "Building predecessor graph\n");
5456 build_pred_graph ();
5458 if (dump_file)
5459 fprintf (dump_file, "Detecting pointer and location "
5460 "equivalences\n");
5461 si = perform_var_substitution (graph);
5463 if (dump_file)
5464 fprintf (dump_file, "Rewriting constraints and unifying "
5465 "variables\n");
5466 rewrite_constraints (graph, si);
5467 free_var_substitution_info (si);
5469 build_succ_graph ();
5470 move_complex_constraints (graph);
5472 if (dump_file)
5473 fprintf (dump_file, "Uniting pointer but not location equivalent "
5474 "variables\n");
5475 unite_pointer_equivalences (graph);
5477 if (dump_file)
5478 fprintf (dump_file, "Finding indirect cycles\n");
5479 find_indirect_cycles (graph);
5481 /* Implicit nodes and predecessors are no longer necessary at this
5482 point. */
5483 remove_preds_and_fake_succs (graph);
5485 if (dump_file)
5486 fprintf (dump_file, "Solving graph\n");
5488 solve_graph (graph);
5490 compute_tbaa_pruning ();
5492 if (dump_file)
5493 dump_sa_points_to_info (dump_file);
5495 have_alias_info = true;
5497 timevar_pop (TV_TREE_PTA);
5501 /* Delete created points-to sets. */
5503 void
5504 delete_points_to_sets (void)
5506 unsigned int i;
5508 htab_delete (shared_bitmap_table);
5509 if (dump_file && (dump_flags & TDF_STATS))
5510 fprintf (dump_file, "Points to sets created:%d\n",
5511 stats.points_to_sets_created);
5513 pointer_map_destroy (vi_for_tree);
5514 bitmap_obstack_release (&pta_obstack);
5515 VEC_free (constraint_t, heap, constraints);
5517 for (i = 0; i < graph->size; i++)
5518 VEC_free (constraint_t, heap, graph->complex[i]);
5519 free (graph->complex);
5521 free (graph->rep);
5522 free (graph->succs);
5523 free (graph->pe);
5524 free (graph->pe_rep);
5525 free (graph->indirect_cycles);
5526 free (graph);
5528 VEC_free (varinfo_t, heap, varmap);
5529 free_alloc_pool (variable_info_pool);
5530 free_alloc_pool (constraint_pool);
5531 have_alias_info = false;
5534 /* Return true if we should execute IPA PTA. */
5535 static bool
5536 gate_ipa_pta (void)
5538 return (flag_unit_at_a_time != 0
5539 && flag_ipa_pta
5540 /* Don't bother doing anything if the program has errors. */
5541 && !(errorcount || sorrycount));
5544 /* Execute the driver for IPA PTA. */
5545 static unsigned int
5546 ipa_pta_execute (void)
5548 struct cgraph_node *node;
5549 struct scc_info *si;
5551 in_ipa_mode = 1;
5552 init_alias_heapvars ();
5553 init_alias_vars ();
5555 for (node = cgraph_nodes; node; node = node->next)
5557 if (!node->analyzed || cgraph_is_master_clone (node))
5559 unsigned int varid;
5561 varid = create_function_info_for (node->decl,
5562 cgraph_node_name (node));
5563 if (node->local.externally_visible)
5565 varinfo_t fi = get_varinfo (varid);
5566 for (; fi; fi = fi->next)
5567 make_constraint_from (fi, anything_id);
5571 for (node = cgraph_nodes; node; node = node->next)
5573 if (node->analyzed && cgraph_is_master_clone (node))
5575 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
5576 basic_block bb;
5577 tree old_func_decl = current_function_decl;
5578 if (dump_file)
5579 fprintf (dump_file,
5580 "Generating constraints for %s\n",
5581 cgraph_node_name (node));
5582 push_cfun (func);
5583 current_function_decl = node->decl;
5585 FOR_EACH_BB_FN (bb, func)
5587 block_stmt_iterator bsi;
5588 tree phi;
5590 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
5592 if (is_gimple_reg (PHI_RESULT (phi)))
5594 find_func_aliases (phi);
5598 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
5600 tree stmt = bsi_stmt (bsi);
5601 find_func_aliases (stmt);
5604 current_function_decl = old_func_decl;
5605 pop_cfun ();
5607 else
5609 /* Make point to anything. */
5613 if (dump_file)
5615 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
5616 dump_constraints (dump_file);
5619 if (dump_file)
5620 fprintf (dump_file,
5621 "\nCollapsing static cycles and doing variable "
5622 "substitution:\n");
5624 init_graph (VEC_length (varinfo_t, varmap) * 2);
5625 build_pred_graph ();
5626 si = perform_var_substitution (graph);
5627 rewrite_constraints (graph, si);
5628 free_var_substitution_info (si);
5630 build_succ_graph ();
5631 move_complex_constraints (graph);
5632 unite_pointer_equivalences (graph);
5633 find_indirect_cycles (graph);
5635 /* Implicit nodes and predecessors are no longer necessary at this
5636 point. */
5637 remove_preds_and_fake_succs (graph);
5639 if (dump_file)
5640 fprintf (dump_file, "\nSolving graph\n");
5642 solve_graph (graph);
5644 if (dump_file)
5645 dump_sa_points_to_info (dump_file);
5647 in_ipa_mode = 0;
5648 delete_alias_heapvars ();
5649 delete_points_to_sets ();
5650 return 0;
5653 struct simple_ipa_opt_pass pass_ipa_pta =
5656 SIMPLE_IPA_PASS,
5657 "pta", /* name */
5658 gate_ipa_pta, /* gate */
5659 ipa_pta_execute, /* execute */
5660 NULL, /* sub */
5661 NULL, /* next */
5662 0, /* static_pass_number */
5663 TV_IPA_PTA, /* tv_id */
5664 0, /* properties_required */
5665 0, /* properties_provided */
5666 0, /* properties_destroyed */
5667 0, /* todo_flags_start */
5668 TODO_update_ssa /* todo_flags_finish */
5672 /* Initialize the heapvar for statement mapping. */
5673 void
5674 init_alias_heapvars (void)
5676 if (!heapvar_for_stmt)
5677 heapvar_for_stmt = htab_create_ggc (11, tree_map_hash, tree_map_eq,
5678 NULL);
5681 void
5682 delete_alias_heapvars (void)
5684 htab_delete (heapvar_for_stmt);
5685 heapvar_for_stmt = NULL;
5689 #include "gt-tree-ssa-structalias.h"