EnumSet*.class: Regenerate
[official-gcc.git] / gcc / tree-ssa-structalias.c
bloba379e7e2a8dfa354631edf575bc455c3df9b5a6b
1 /* Tree based points-to analysis
2 Copyright (C) 2005, 2006, 2007 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "ggc.h"
26 #include "obstack.h"
27 #include "bitmap.h"
28 #include "flags.h"
29 #include "rtl.h"
30 #include "tm_p.h"
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
33 #include "output.h"
34 #include "errors.h"
35 #include "diagnostic.h"
36 #include "tree.h"
37 #include "c-common.h"
38 #include "tree-flow.h"
39 #include "tree-inline.h"
40 #include "varray.h"
41 #include "c-tree.h"
42 #include "tree-gimple.h"
43 #include "hashtab.h"
44 #include "function.h"
45 #include "cgraph.h"
46 #include "tree-pass.h"
47 #include "timevar.h"
48 #include "alloc-pool.h"
49 #include "splay-tree.h"
50 #include "params.h"
51 #include "tree-ssa-structalias.h"
52 #include "cgraph.h"
53 #include "alias.h"
54 #include "pointer-set.h"
56 /* The idea behind this analyzer is to generate set constraints from the
57 program, then solve the resulting constraints in order to generate the
58 points-to sets.
60 Set constraints are a way of modeling program analysis problems that
61 involve sets. They consist of an inclusion constraint language,
62 describing the variables (each variable is a set) and operations that
63 are involved on the variables, and a set of rules that derive facts
64 from these operations. To solve a system of set constraints, you derive
65 all possible facts under the rules, which gives you the correct sets
66 as a consequence.
68 See "Efficient Field-sensitive pointer analysis for C" by "David
69 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
70 http://citeseer.ist.psu.edu/pearce04efficient.html
72 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
73 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
74 http://citeseer.ist.psu.edu/heintze01ultrafast.html
76 There are three types of real constraint expressions, DEREF,
77 ADDRESSOF, and SCALAR. Each constraint expression consists
78 of a constraint type, a variable, and an offset.
80 SCALAR is a constraint expression type used to represent x, whether
81 it appears on the LHS or the RHS of a statement.
82 DEREF is a constraint expression type used to represent *x, whether
83 it appears on the LHS or the RHS of a statement.
84 ADDRESSOF is a constraint expression used to represent &x, whether
85 it appears on the LHS or the RHS of a statement.
87 Each pointer variable in the program is assigned an integer id, and
88 each field of a structure variable is assigned an integer id as well.
90 Structure variables are linked to their list of fields through a "next
91 field" in each variable that points to the next field in offset
92 order.
93 Each variable for a structure field has
95 1. "size", that tells the size in bits of that field.
96 2. "fullsize, that tells the size in bits of the entire structure.
97 3. "offset", that tells the offset in bits from the beginning of the
98 structure to this field.
100 Thus,
101 struct f
103 int a;
104 int b;
105 } foo;
106 int *bar;
108 looks like
110 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
111 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
112 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
115 In order to solve the system of set constraints, the following is
116 done:
118 1. Each constraint variable x has a solution set associated with it,
119 Sol(x).
121 2. Constraints are separated into direct, copy, and complex.
122 Direct constraints are ADDRESSOF constraints that require no extra
123 processing, such as P = &Q
124 Copy constraints are those of the form P = Q.
125 Complex constraints are all the constraints involving dereferences
126 and offsets (including offsetted copies).
128 3. All direct constraints of the form P = &Q are processed, such
129 that Q is added to Sol(P)
131 4. All complex constraints for a given constraint variable are stored in a
132 linked list attached to that variable's node.
134 5. A directed graph is built out of the copy constraints. Each
135 constraint variable is a node in the graph, and an edge from
136 Q to P is added for each copy constraint of the form P = Q
138 6. The graph is then walked, and solution sets are
139 propagated along the copy edges, such that an edge from Q to P
140 causes Sol(P) <- Sol(P) union Sol(Q).
142 7. As we visit each node, all complex constraints associated with
143 that node are processed by adding appropriate copy edges to the graph, or the
144 appropriate variables to the solution set.
146 8. The process of walking the graph is iterated until no solution
147 sets change.
149 Prior to walking the graph in steps 6 and 7, We perform static
150 cycle elimination on the constraint graph, as well
151 as off-line variable substitution.
153 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
154 on and turned into anything), but isn't. You can just see what offset
155 inside the pointed-to struct it's going to access.
157 TODO: Constant bounded arrays can be handled as if they were structs of the
158 same number of elements.
160 TODO: Modeling heap and incoming pointers becomes much better if we
161 add fields to them as we discover them, which we could do.
163 TODO: We could handle unions, but to be honest, it's probably not
164 worth the pain or slowdown. */
166 static GTY ((if_marked ("tree_map_marked_p"), param_is (struct tree_map)))
167 htab_t heapvar_for_stmt;
169 static bool use_field_sensitive = true;
170 static int in_ipa_mode = 0;
172 /* Used for predecessor bitmaps. */
173 static bitmap_obstack predbitmap_obstack;
175 /* Used for points-to sets. */
176 static bitmap_obstack pta_obstack;
178 /* Used for oldsolution members of variables. */
179 static bitmap_obstack oldpta_obstack;
181 /* Used for per-solver-iteration bitmaps. */
182 static bitmap_obstack iteration_obstack;
184 static unsigned int create_variable_info_for (tree, const char *);
185 typedef struct constraint_graph *constraint_graph_t;
186 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
188 DEF_VEC_P(constraint_t);
189 DEF_VEC_ALLOC_P(constraint_t,heap);
191 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
192 if (a) \
193 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
195 static struct constraint_stats
197 unsigned int total_vars;
198 unsigned int nonpointer_vars;
199 unsigned int unified_vars_static;
200 unsigned int unified_vars_dynamic;
201 unsigned int iterations;
202 unsigned int num_edges;
203 unsigned int num_implicit_edges;
204 unsigned int points_to_sets_created;
205 } stats;
207 struct variable_info
209 /* ID of this variable */
210 unsigned int id;
212 /* Name of this variable */
213 const char *name;
215 /* Tree that this variable is associated with. */
216 tree decl;
218 /* Offset of this variable, in bits, from the base variable */
219 unsigned HOST_WIDE_INT offset;
221 /* Size of the variable, in bits. */
222 unsigned HOST_WIDE_INT size;
224 /* Full size of the base variable, in bits. */
225 unsigned HOST_WIDE_INT fullsize;
227 /* A link to the variable for the next field in this structure. */
228 struct variable_info *next;
230 /* True if the variable is directly the target of a dereference.
231 This is used to track which variables are *actually* dereferenced
232 so we can prune their points to listed. */
233 unsigned int directly_dereferenced:1;
235 /* True if this is a variable created by the constraint analysis, such as
236 heap variables and constraints we had to break up. */
237 unsigned int is_artificial_var:1;
239 /* True if this is a special variable whose solution set should not be
240 changed. */
241 unsigned int is_special_var:1;
243 /* True for variables whose size is not known or variable. */
244 unsigned int is_unknown_size_var:1;
246 /* True for variables that have unions somewhere in them. */
247 unsigned int has_union:1;
249 /* True if this is a heap variable. */
250 unsigned int is_heap_var:1;
252 /* True if we may not use TBAA to prune references to this
253 variable. This is used for C++ placement new. */
254 unsigned int no_tbaa_pruning : 1;
256 /* Points-to set for this variable. */
257 bitmap solution;
259 /* Old points-to set for this variable. */
260 bitmap oldsolution;
262 /* Variable id this was collapsed to due to type unsafety. This
263 should be unused completely after build_succ_graph, or something
264 is broken. */
265 struct variable_info *collapsed_to;
267 typedef struct variable_info *varinfo_t;
269 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
271 /* Pool of variable info structures. */
272 static alloc_pool variable_info_pool;
274 DEF_VEC_P(varinfo_t);
276 DEF_VEC_ALLOC_P(varinfo_t, heap);
278 /* Table of variable info structures for constraint variables.
279 Indexed directly by variable info id. */
280 static VEC(varinfo_t,heap) *varmap;
282 /* Return the varmap element N */
284 static inline varinfo_t
285 get_varinfo (unsigned int n)
287 return VEC_index (varinfo_t, varmap, n);
290 /* Return the varmap element N, following the collapsed_to link. */
292 static inline varinfo_t
293 get_varinfo_fc (unsigned int n)
295 varinfo_t v = VEC_index (varinfo_t, varmap, n);
297 if (v->collapsed_to)
298 return v->collapsed_to;
299 return v;
302 /* Variable that represents the unknown pointer. */
303 static varinfo_t var_anything;
304 static tree anything_tree;
305 static unsigned int anything_id;
307 /* Variable that represents the NULL pointer. */
308 static varinfo_t var_nothing;
309 static tree nothing_tree;
310 static unsigned int nothing_id;
312 /* Variable that represents read only memory. */
313 static varinfo_t var_readonly;
314 static tree readonly_tree;
315 static unsigned int readonly_id;
317 /* Variable that represents integers. This is used for when people do things
318 like &0->a.b. */
319 static varinfo_t var_integer;
320 static tree integer_tree;
321 static unsigned int integer_id;
323 /* Lookup a heap var for FROM, and return it if we find one. */
325 static tree
326 heapvar_lookup (tree from)
328 struct tree_map *h, in;
329 in.base.from = from;
331 h = (struct tree_map *) htab_find_with_hash (heapvar_for_stmt, &in,
332 htab_hash_pointer (from));
333 if (h)
334 return h->to;
335 return NULL_TREE;
338 /* Insert a mapping FROM->TO in the heap var for statement
339 hashtable. */
341 static void
342 heapvar_insert (tree from, tree to)
344 struct tree_map *h;
345 void **loc;
347 h = GGC_NEW (struct tree_map);
348 h->hash = htab_hash_pointer (from);
349 h->base.from = from;
350 h->to = to;
351 loc = htab_find_slot_with_hash (heapvar_for_stmt, h, h->hash, INSERT);
352 *(struct tree_map **) loc = h;
355 /* Return a new variable info structure consisting for a variable
356 named NAME, and using constraint graph node NODE. */
358 static varinfo_t
359 new_var_info (tree t, unsigned int id, const char *name)
361 varinfo_t ret = (varinfo_t) pool_alloc (variable_info_pool);
362 tree var;
364 ret->id = id;
365 ret->name = name;
366 ret->decl = t;
367 ret->directly_dereferenced = false;
368 ret->is_artificial_var = false;
369 ret->is_heap_var = false;
370 ret->is_special_var = false;
371 ret->is_unknown_size_var = false;
372 ret->has_union = false;
373 var = t;
374 if (TREE_CODE (var) == SSA_NAME)
375 var = SSA_NAME_VAR (var);
376 ret->no_tbaa_pruning = (DECL_P (var)
377 && POINTER_TYPE_P (TREE_TYPE (var))
378 && DECL_NO_TBAA_P (var));
379 ret->solution = BITMAP_ALLOC (&pta_obstack);
380 ret->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
381 ret->next = NULL;
382 ret->collapsed_to = NULL;
383 return ret;
386 typedef enum {SCALAR, DEREF, ADDRESSOF} constraint_expr_type;
388 /* An expression that appears in a constraint. */
390 struct constraint_expr
392 /* Constraint type. */
393 constraint_expr_type type;
395 /* Variable we are referring to in the constraint. */
396 unsigned int var;
398 /* Offset, in bits, of this constraint from the beginning of
399 variables it ends up referring to.
401 IOW, in a deref constraint, we would deref, get the result set,
402 then add OFFSET to each member. */
403 unsigned HOST_WIDE_INT offset;
406 typedef struct constraint_expr ce_s;
407 DEF_VEC_O(ce_s);
408 DEF_VEC_ALLOC_O(ce_s, heap);
409 static void get_constraint_for (tree, VEC(ce_s, heap) **);
410 static void do_deref (VEC (ce_s, heap) **);
412 /* Our set constraints are made up of two constraint expressions, one
413 LHS, and one RHS.
415 As described in the introduction, our set constraints each represent an
416 operation between set valued variables.
418 struct constraint
420 struct constraint_expr lhs;
421 struct constraint_expr rhs;
424 /* List of constraints that we use to build the constraint graph from. */
426 static VEC(constraint_t,heap) *constraints;
427 static alloc_pool constraint_pool;
430 DEF_VEC_I(int);
431 DEF_VEC_ALLOC_I(int, heap);
433 /* The constraint graph is represented as an array of bitmaps
434 containing successor nodes. */
436 struct constraint_graph
438 /* Size of this graph, which may be different than the number of
439 nodes in the variable map. */
440 unsigned int size;
442 /* Explicit successors of each node. */
443 bitmap *succs;
445 /* Implicit predecessors of each node (Used for variable
446 substitution). */
447 bitmap *implicit_preds;
449 /* Explicit predecessors of each node (Used for variable substitution). */
450 bitmap *preds;
452 /* Indirect cycle representatives, or -1 if the node has no indirect
453 cycles. */
454 int *indirect_cycles;
456 /* Representative node for a node. rep[a] == a unless the node has
457 been unified. */
458 unsigned int *rep;
460 /* Equivalence class representative for a label. This is used for
461 variable substitution. */
462 int *eq_rep;
464 /* Pointer equivalence node for a node. if pe[a] != a, then node a
465 can be united with node pe[a] after initial constraint building. */
466 unsigned int *pe;
468 /* Pointer equivalence representative for a label. This is used to
469 handle nodes that are pointer equivalent but not location
470 equivalent. We can unite these once the addressof constraints
471 are transformed into initial points-to sets. */
472 int *pe_rep;
474 /* Pointer equivalence label for each node, used during variable
475 substitution. */
476 unsigned int *pointer_label;
478 /* Location equivalence label for each node, used during location
479 equivalence finding. */
480 unsigned int *loc_label;
482 /* Pointed-by set for each node, used during location equivalence
483 finding. This is pointed-by rather than pointed-to, because it
484 is constructed using the predecessor graph. */
485 bitmap *pointed_by;
487 /* Points to sets for pointer equivalence. This is *not* the actual
488 points-to sets for nodes. */
489 bitmap *points_to;
491 /* Bitmap of nodes where the bit is set if the node is a direct
492 node. Used for variable substitution. */
493 sbitmap direct_nodes;
495 /* Bitmap of nodes where the bit is set if the node is address
496 taken. Used for variable substitution. */
497 bitmap address_taken;
499 /* True if points_to bitmap for this node is stored in the hash
500 table. */
501 sbitmap pt_used;
503 /* Number of incoming edges remaining to be processed by pointer
504 equivalence.
505 Used for variable substitution. */
506 unsigned int *number_incoming;
509 /* Vector of complex constraints for each graph node. Complex
510 constraints are those involving dereferences or offsets that are
511 not 0. */
512 VEC(constraint_t,heap) **complex;
515 static constraint_graph_t graph;
517 /* During variable substitution and the offline version of indirect
518 cycle finding, we create nodes to represent dereferences and
519 address taken constraints. These represent where these start and
520 end. */
521 #define FIRST_REF_NODE (VEC_length (varinfo_t, varmap))
522 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
524 /* Return the representative node for NODE, if NODE has been unioned
525 with another NODE.
526 This function performs path compression along the way to finding
527 the representative. */
529 static unsigned int
530 find (unsigned int node)
532 gcc_assert (node < graph->size);
533 if (graph->rep[node] != node)
534 return graph->rep[node] = find (graph->rep[node]);
535 return node;
538 /* Union the TO and FROM nodes to the TO nodes.
539 Note that at some point in the future, we may want to do
540 union-by-rank, in which case we are going to have to return the
541 node we unified to. */
543 static bool
544 unite (unsigned int to, unsigned int from)
546 gcc_assert (to < graph->size && from < graph->size);
547 if (to != from && graph->rep[from] != to)
549 graph->rep[from] = to;
550 return true;
552 return false;
555 /* Create a new constraint consisting of LHS and RHS expressions. */
557 static constraint_t
558 new_constraint (const struct constraint_expr lhs,
559 const struct constraint_expr rhs)
561 constraint_t ret = (constraint_t) pool_alloc (constraint_pool);
562 ret->lhs = lhs;
563 ret->rhs = rhs;
564 return ret;
567 /* Print out constraint C to FILE. */
569 void
570 dump_constraint (FILE *file, constraint_t c)
572 if (c->lhs.type == ADDRESSOF)
573 fprintf (file, "&");
574 else if (c->lhs.type == DEREF)
575 fprintf (file, "*");
576 fprintf (file, "%s", get_varinfo_fc (c->lhs.var)->name);
577 if (c->lhs.offset != 0)
578 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
579 fprintf (file, " = ");
580 if (c->rhs.type == ADDRESSOF)
581 fprintf (file, "&");
582 else if (c->rhs.type == DEREF)
583 fprintf (file, "*");
584 fprintf (file, "%s", get_varinfo_fc (c->rhs.var)->name);
585 if (c->rhs.offset != 0)
586 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
587 fprintf (file, "\n");
590 /* Print out constraint C to stderr. */
592 void
593 debug_constraint (constraint_t c)
595 dump_constraint (stderr, c);
598 /* Print out all constraints to FILE */
600 void
601 dump_constraints (FILE *file)
603 int i;
604 constraint_t c;
605 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
606 dump_constraint (file, c);
609 /* Print out all constraints to stderr. */
611 void
612 debug_constraints (void)
614 dump_constraints (stderr);
617 /* SOLVER FUNCTIONS
619 The solver is a simple worklist solver, that works on the following
620 algorithm:
622 sbitmap changed_nodes = all zeroes;
623 changed_count = 0;
624 For each node that is not already collapsed:
625 changed_count++;
626 set bit in changed nodes
628 while (changed_count > 0)
630 compute topological ordering for constraint graph
632 find and collapse cycles in the constraint graph (updating
633 changed if necessary)
635 for each node (n) in the graph in topological order:
636 changed_count--;
638 Process each complex constraint associated with the node,
639 updating changed if necessary.
641 For each outgoing edge from n, propagate the solution from n to
642 the destination of the edge, updating changed as necessary.
644 } */
646 /* Return true if two constraint expressions A and B are equal. */
648 static bool
649 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
651 return a.type == b.type && a.var == b.var && a.offset == b.offset;
654 /* Return true if constraint expression A is less than constraint expression
655 B. This is just arbitrary, but consistent, in order to give them an
656 ordering. */
658 static bool
659 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
661 if (a.type == b.type)
663 if (a.var == b.var)
664 return a.offset < b.offset;
665 else
666 return a.var < b.var;
668 else
669 return a.type < b.type;
672 /* Return true if constraint A is less than constraint B. This is just
673 arbitrary, but consistent, in order to give them an ordering. */
675 static bool
676 constraint_less (const constraint_t a, const constraint_t b)
678 if (constraint_expr_less (a->lhs, b->lhs))
679 return true;
680 else if (constraint_expr_less (b->lhs, a->lhs))
681 return false;
682 else
683 return constraint_expr_less (a->rhs, b->rhs);
686 /* Return true if two constraints A and B are equal. */
688 static bool
689 constraint_equal (struct constraint a, struct constraint b)
691 return constraint_expr_equal (a.lhs, b.lhs)
692 && constraint_expr_equal (a.rhs, b.rhs);
696 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
698 static constraint_t
699 constraint_vec_find (VEC(constraint_t,heap) *vec,
700 struct constraint lookfor)
702 unsigned int place;
703 constraint_t found;
705 if (vec == NULL)
706 return NULL;
708 place = VEC_lower_bound (constraint_t, vec, &lookfor, constraint_less);
709 if (place >= VEC_length (constraint_t, vec))
710 return NULL;
711 found = VEC_index (constraint_t, vec, place);
712 if (!constraint_equal (*found, lookfor))
713 return NULL;
714 return found;
717 /* Union two constraint vectors, TO and FROM. Put the result in TO. */
719 static void
720 constraint_set_union (VEC(constraint_t,heap) **to,
721 VEC(constraint_t,heap) **from)
723 int i;
724 constraint_t c;
726 for (i = 0; VEC_iterate (constraint_t, *from, i, c); i++)
728 if (constraint_vec_find (*to, *c) == NULL)
730 unsigned int place = VEC_lower_bound (constraint_t, *to, c,
731 constraint_less);
732 VEC_safe_insert (constraint_t, heap, *to, place, c);
737 /* Take a solution set SET, add OFFSET to each member of the set, and
738 overwrite SET with the result when done. */
740 static void
741 solution_set_add (bitmap set, unsigned HOST_WIDE_INT offset)
743 bitmap result = BITMAP_ALLOC (&iteration_obstack);
744 unsigned int i;
745 bitmap_iterator bi;
747 EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
749 /* If this is a properly sized variable, only add offset if it's
750 less than end. Otherwise, it is globbed to a single
751 variable. */
753 if ((get_varinfo (i)->offset + offset) < get_varinfo (i)->fullsize)
755 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (i)->offset + offset;
756 varinfo_t v = first_vi_for_offset (get_varinfo (i), fieldoffset);
757 if (!v)
758 continue;
759 bitmap_set_bit (result, v->id);
761 else if (get_varinfo (i)->is_artificial_var
762 || get_varinfo (i)->has_union
763 || get_varinfo (i)->is_unknown_size_var)
765 bitmap_set_bit (result, i);
769 bitmap_copy (set, result);
770 BITMAP_FREE (result);
773 /* Union solution sets TO and FROM, and add INC to each member of FROM in the
774 process. */
776 static bool
777 set_union_with_increment (bitmap to, bitmap from, unsigned HOST_WIDE_INT inc)
779 if (inc == 0)
780 return bitmap_ior_into (to, from);
781 else
783 bitmap tmp;
784 bool res;
786 tmp = BITMAP_ALLOC (&iteration_obstack);
787 bitmap_copy (tmp, from);
788 solution_set_add (tmp, inc);
789 res = bitmap_ior_into (to, tmp);
790 BITMAP_FREE (tmp);
791 return res;
795 /* Insert constraint C into the list of complex constraints for graph
796 node VAR. */
798 static void
799 insert_into_complex (constraint_graph_t graph,
800 unsigned int var, constraint_t c)
802 VEC (constraint_t, heap) *complex = graph->complex[var];
803 unsigned int place = VEC_lower_bound (constraint_t, complex, c,
804 constraint_less);
806 /* Only insert constraints that do not already exist. */
807 if (place >= VEC_length (constraint_t, complex)
808 || !constraint_equal (*c, *VEC_index (constraint_t, complex, place)))
809 VEC_safe_insert (constraint_t, heap, graph->complex[var], place, c);
813 /* Condense two variable nodes into a single variable node, by moving
814 all associated info from SRC to TO. */
816 static void
817 merge_node_constraints (constraint_graph_t graph, unsigned int to,
818 unsigned int from)
820 unsigned int i;
821 constraint_t c;
823 gcc_assert (find (from) == to);
825 /* Move all complex constraints from src node into to node */
826 for (i = 0; VEC_iterate (constraint_t, graph->complex[from], i, c); i++)
828 /* In complex constraints for node src, we may have either
829 a = *src, and *src = a, or an offseted constraint which are
830 always added to the rhs node's constraints. */
832 if (c->rhs.type == DEREF)
833 c->rhs.var = to;
834 else if (c->lhs.type == DEREF)
835 c->lhs.var = to;
836 else
837 c->rhs.var = to;
839 constraint_set_union (&graph->complex[to], &graph->complex[from]);
840 VEC_free (constraint_t, heap, graph->complex[from]);
841 graph->complex[from] = NULL;
845 /* Remove edges involving NODE from GRAPH. */
847 static void
848 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
850 if (graph->succs[node])
851 BITMAP_FREE (graph->succs[node]);
854 /* Merge GRAPH nodes FROM and TO into node TO. */
856 static void
857 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
858 unsigned int from)
860 if (graph->indirect_cycles[from] != -1)
862 /* If we have indirect cycles with the from node, and we have
863 none on the to node, the to node has indirect cycles from the
864 from node now that they are unified.
865 If indirect cycles exist on both, unify the nodes that they
866 are in a cycle with, since we know they are in a cycle with
867 each other. */
868 if (graph->indirect_cycles[to] == -1)
869 graph->indirect_cycles[to] = graph->indirect_cycles[from];
872 /* Merge all the successor edges. */
873 if (graph->succs[from])
875 if (!graph->succs[to])
876 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
877 bitmap_ior_into (graph->succs[to],
878 graph->succs[from]);
881 clear_edges_for_node (graph, from);
885 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
886 it doesn't exist in the graph already. */
888 static void
889 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
890 unsigned int from)
892 if (to == from)
893 return;
895 if (!graph->implicit_preds[to])
896 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
898 if (!bitmap_bit_p (graph->implicit_preds[to], from))
900 stats.num_implicit_edges++;
901 bitmap_set_bit (graph->implicit_preds[to], from);
905 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
906 it doesn't exist in the graph already.
907 Return false if the edge already existed, true otherwise. */
909 static void
910 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
911 unsigned int from)
913 if (!graph->preds[to])
914 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
915 if (!bitmap_bit_p (graph->preds[to], from))
916 bitmap_set_bit (graph->preds[to], from);
919 /* Add a graph edge to GRAPH, going from FROM to TO if
920 it doesn't exist in the graph already.
921 Return false if the edge already existed, true otherwise. */
923 static bool
924 add_graph_edge (constraint_graph_t graph, unsigned int to,
925 unsigned int from)
927 if (to == from)
929 return false;
931 else
933 bool r = false;
935 if (!graph->succs[from])
936 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
937 if (!bitmap_bit_p (graph->succs[from], to))
939 r = true;
940 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
941 stats.num_edges++;
942 bitmap_set_bit (graph->succs[from], to);
944 return r;
949 /* Return true if {DEST.SRC} is an existing graph edge in GRAPH. */
951 static bool
952 valid_graph_edge (constraint_graph_t graph, unsigned int src,
953 unsigned int dest)
955 return (graph->succs[dest]
956 && bitmap_bit_p (graph->succs[dest], src));
959 /* Initialize the constraint graph structure to contain SIZE nodes. */
961 static void
962 init_graph (unsigned int size)
964 unsigned int j;
966 graph = XCNEW (struct constraint_graph);
967 graph->size = size;
968 graph->succs = XCNEWVEC (bitmap, graph->size);
969 graph->indirect_cycles = XNEWVEC (int, graph->size);
970 graph->rep = XNEWVEC (unsigned int, graph->size);
971 graph->complex = XCNEWVEC (VEC(constraint_t, heap) *, size);
972 graph->pe = XNEWVEC (unsigned int, graph->size);
973 graph->pe_rep = XNEWVEC (int, graph->size);
975 for (j = 0; j < graph->size; j++)
977 graph->rep[j] = j;
978 graph->pe[j] = j;
979 graph->pe_rep[j] = -1;
980 graph->indirect_cycles[j] = -1;
984 /* Build the constraint graph, adding only predecessor edges right now. */
986 static void
987 build_pred_graph (void)
989 int i;
990 constraint_t c;
991 unsigned int j;
993 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
994 graph->preds = XCNEWVEC (bitmap, graph->size);
995 graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
996 graph->loc_label = XCNEWVEC (unsigned int, graph->size);
997 graph->pointed_by = XCNEWVEC (bitmap, graph->size);
998 graph->points_to = XCNEWVEC (bitmap, graph->size);
999 graph->eq_rep = XNEWVEC (int, graph->size);
1000 graph->direct_nodes = sbitmap_alloc (graph->size);
1001 graph->pt_used = sbitmap_alloc (graph->size);
1002 graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1003 graph->number_incoming = XCNEWVEC (unsigned int, graph->size);
1004 sbitmap_zero (graph->direct_nodes);
1005 sbitmap_zero (graph->pt_used);
1007 for (j = 0; j < FIRST_REF_NODE; j++)
1009 if (!get_varinfo (j)->is_special_var)
1010 SET_BIT (graph->direct_nodes, j);
1013 for (j = 0; j < graph->size; j++)
1014 graph->eq_rep[j] = -1;
1016 for (j = 0; j < VEC_length (varinfo_t, varmap); j++)
1017 graph->indirect_cycles[j] = -1;
1019 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1021 struct constraint_expr lhs = c->lhs;
1022 struct constraint_expr rhs = c->rhs;
1023 unsigned int lhsvar = get_varinfo_fc (lhs.var)->id;
1024 unsigned int rhsvar = get_varinfo_fc (rhs.var)->id;
1026 if (lhs.type == DEREF)
1028 /* *x = y. */
1029 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1030 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1032 else if (rhs.type == DEREF)
1034 /* x = *y */
1035 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1036 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1037 else
1038 RESET_BIT (graph->direct_nodes, lhsvar);
1040 else if (rhs.type == ADDRESSOF)
1042 /* x = &y */
1043 if (graph->points_to[lhsvar] == NULL)
1044 graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1045 bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1047 if (graph->pointed_by[rhsvar] == NULL)
1048 graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1049 bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1051 /* Implicitly, *x = y */
1052 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1054 RESET_BIT (graph->direct_nodes, rhsvar);
1055 bitmap_set_bit (graph->address_taken, rhsvar);
1057 else if (lhsvar > anything_id
1058 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1060 /* x = y */
1061 add_pred_graph_edge (graph, lhsvar, rhsvar);
1062 /* Implicitly, *x = *y */
1063 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1064 FIRST_REF_NODE + rhsvar);
1066 else if (lhs.offset != 0 || rhs.offset != 0)
1068 if (rhs.offset != 0)
1069 RESET_BIT (graph->direct_nodes, lhs.var);
1070 else if (lhs.offset != 0)
1071 RESET_BIT (graph->direct_nodes, rhs.var);
1076 /* Build the constraint graph, adding successor edges. */
1078 static void
1079 build_succ_graph (void)
1081 int i;
1082 constraint_t c;
1084 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1086 struct constraint_expr lhs;
1087 struct constraint_expr rhs;
1088 unsigned int lhsvar;
1089 unsigned int rhsvar;
1091 if (!c)
1092 continue;
1094 lhs = c->lhs;
1095 rhs = c->rhs;
1096 lhsvar = find (get_varinfo_fc (lhs.var)->id);
1097 rhsvar = find (get_varinfo_fc (rhs.var)->id);
1099 if (lhs.type == DEREF)
1101 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1102 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1104 else if (rhs.type == DEREF)
1106 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1107 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1109 else if (rhs.type == ADDRESSOF)
1111 /* x = &y */
1112 gcc_assert (find (get_varinfo_fc (rhs.var)->id)
1113 == get_varinfo_fc (rhs.var)->id);
1114 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1116 else if (lhsvar > anything_id
1117 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1119 add_graph_edge (graph, lhsvar, rhsvar);
1125 /* Changed variables on the last iteration. */
1126 static unsigned int changed_count;
1127 static sbitmap changed;
1129 DEF_VEC_I(unsigned);
1130 DEF_VEC_ALLOC_I(unsigned,heap);
1133 /* Strongly Connected Component visitation info. */
1135 struct scc_info
1137 sbitmap visited;
1138 sbitmap deleted;
1139 unsigned int *dfs;
1140 unsigned int *node_mapping;
1141 int current_index;
1142 VEC(unsigned,heap) *scc_stack;
1146 /* Recursive routine to find strongly connected components in GRAPH.
1147 SI is the SCC info to store the information in, and N is the id of current
1148 graph node we are processing.
1150 This is Tarjan's strongly connected component finding algorithm, as
1151 modified by Nuutila to keep only non-root nodes on the stack.
1152 The algorithm can be found in "On finding the strongly connected
1153 connected components in a directed graph" by Esko Nuutila and Eljas
1154 Soisalon-Soininen, in Information Processing Letters volume 49,
1155 number 1, pages 9-14. */
1157 static void
1158 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1160 unsigned int i;
1161 bitmap_iterator bi;
1162 unsigned int my_dfs;
1164 SET_BIT (si->visited, n);
1165 si->dfs[n] = si->current_index ++;
1166 my_dfs = si->dfs[n];
1168 /* Visit all the successors. */
1169 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1171 unsigned int w;
1173 if (i > LAST_REF_NODE)
1174 break;
1176 w = find (i);
1177 if (TEST_BIT (si->deleted, w))
1178 continue;
1180 if (!TEST_BIT (si->visited, w))
1181 scc_visit (graph, si, w);
1183 unsigned int t = find (w);
1184 unsigned int nnode = find (n);
1185 gcc_assert (nnode == n);
1187 if (si->dfs[t] < si->dfs[nnode])
1188 si->dfs[n] = si->dfs[t];
1192 /* See if any components have been identified. */
1193 if (si->dfs[n] == my_dfs)
1195 if (VEC_length (unsigned, si->scc_stack) > 0
1196 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1198 bitmap scc = BITMAP_ALLOC (NULL);
1199 bool have_ref_node = n >= FIRST_REF_NODE;
1200 unsigned int lowest_node;
1201 bitmap_iterator bi;
1203 bitmap_set_bit (scc, n);
1205 while (VEC_length (unsigned, si->scc_stack) != 0
1206 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1208 unsigned int w = VEC_pop (unsigned, si->scc_stack);
1210 bitmap_set_bit (scc, w);
1211 if (w >= FIRST_REF_NODE)
1212 have_ref_node = true;
1215 lowest_node = bitmap_first_set_bit (scc);
1216 gcc_assert (lowest_node < FIRST_REF_NODE);
1218 /* Collapse the SCC nodes into a single node, and mark the
1219 indirect cycles. */
1220 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1222 if (i < FIRST_REF_NODE)
1224 if (unite (lowest_node, i))
1225 unify_nodes (graph, lowest_node, i, false);
1227 else
1229 unite (lowest_node, i);
1230 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1234 SET_BIT (si->deleted, n);
1236 else
1237 VEC_safe_push (unsigned, heap, si->scc_stack, n);
1240 /* Unify node FROM into node TO, updating the changed count if
1241 necessary when UPDATE_CHANGED is true. */
1243 static void
1244 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1245 bool update_changed)
1248 gcc_assert (to != from && find (to) == to);
1249 if (dump_file && (dump_flags & TDF_DETAILS))
1250 fprintf (dump_file, "Unifying %s to %s\n",
1251 get_varinfo (from)->name,
1252 get_varinfo (to)->name);
1254 if (update_changed)
1255 stats.unified_vars_dynamic++;
1256 else
1257 stats.unified_vars_static++;
1259 merge_graph_nodes (graph, to, from);
1260 merge_node_constraints (graph, to, from);
1262 if (get_varinfo (from)->no_tbaa_pruning)
1263 get_varinfo (to)->no_tbaa_pruning = true;
1265 /* Mark TO as changed if FROM was changed. If TO was already marked
1266 as changed, decrease the changed count. */
1268 if (update_changed && TEST_BIT (changed, from))
1270 RESET_BIT (changed, from);
1271 if (!TEST_BIT (changed, to))
1272 SET_BIT (changed, to);
1273 else
1275 gcc_assert (changed_count > 0);
1276 changed_count--;
1280 /* If the solution changes because of the merging, we need to mark
1281 the variable as changed. */
1282 if (bitmap_ior_into (get_varinfo (to)->solution,
1283 get_varinfo (from)->solution))
1285 if (update_changed && !TEST_BIT (changed, to))
1287 SET_BIT (changed, to);
1288 changed_count++;
1292 BITMAP_FREE (get_varinfo (from)->solution);
1293 BITMAP_FREE (get_varinfo (from)->oldsolution);
1295 if (stats.iterations > 0)
1297 BITMAP_FREE (get_varinfo (to)->oldsolution);
1298 get_varinfo (to)->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
1301 if (valid_graph_edge (graph, to, to))
1303 if (graph->succs[to])
1304 bitmap_clear_bit (graph->succs[to], to);
1308 /* Information needed to compute the topological ordering of a graph. */
1310 struct topo_info
1312 /* sbitmap of visited nodes. */
1313 sbitmap visited;
1314 /* Array that stores the topological order of the graph, *in
1315 reverse*. */
1316 VEC(unsigned,heap) *topo_order;
1320 /* Initialize and return a topological info structure. */
1322 static struct topo_info *
1323 init_topo_info (void)
1325 size_t size = graph->size;
1326 struct topo_info *ti = XNEW (struct topo_info);
1327 ti->visited = sbitmap_alloc (size);
1328 sbitmap_zero (ti->visited);
1329 ti->topo_order = VEC_alloc (unsigned, heap, 1);
1330 return ti;
1334 /* Free the topological sort info pointed to by TI. */
1336 static void
1337 free_topo_info (struct topo_info *ti)
1339 sbitmap_free (ti->visited);
1340 VEC_free (unsigned, heap, ti->topo_order);
1341 free (ti);
1344 /* Visit the graph in topological order, and store the order in the
1345 topo_info structure. */
1347 static void
1348 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1349 unsigned int n)
1351 bitmap_iterator bi;
1352 unsigned int j;
1354 SET_BIT (ti->visited, n);
1356 if (graph->succs[n])
1357 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1359 if (!TEST_BIT (ti->visited, j))
1360 topo_visit (graph, ti, j);
1363 VEC_safe_push (unsigned, heap, ti->topo_order, n);
1366 /* Return true if variable N + OFFSET is a legal field of N. */
1368 static bool
1369 type_safe (unsigned int n, unsigned HOST_WIDE_INT *offset)
1371 varinfo_t ninfo = get_varinfo (n);
1373 /* For things we've globbed to single variables, any offset into the
1374 variable acts like the entire variable, so that it becomes offset
1375 0. */
1376 if (ninfo->is_special_var
1377 || ninfo->is_artificial_var
1378 || ninfo->is_unknown_size_var)
1380 *offset = 0;
1381 return true;
1383 return (get_varinfo (n)->offset + *offset) < get_varinfo (n)->fullsize;
1386 /* Process a constraint C that represents x = *y, using DELTA as the
1387 starting solution. */
1389 static void
1390 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1391 bitmap delta)
1393 unsigned int lhs = c->lhs.var;
1394 bool flag = false;
1395 bitmap sol = get_varinfo (lhs)->solution;
1396 unsigned int j;
1397 bitmap_iterator bi;
1399 if (bitmap_bit_p (delta, anything_id))
1401 flag = !bitmap_bit_p (sol, anything_id);
1402 if (flag)
1403 bitmap_set_bit (sol, anything_id);
1404 goto done;
1406 /* For each variable j in delta (Sol(y)), add
1407 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1408 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1410 unsigned HOST_WIDE_INT roffset = c->rhs.offset;
1411 if (type_safe (j, &roffset))
1413 varinfo_t v;
1414 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + roffset;
1415 unsigned int t;
1417 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1418 if (!v)
1419 continue;
1420 t = find (v->id);
1422 /* Adding edges from the special vars is pointless.
1423 They don't have sets that can change. */
1424 if (get_varinfo (t) ->is_special_var)
1425 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1426 else if (add_graph_edge (graph, lhs, t))
1427 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1429 else if (0 && dump_file && !(get_varinfo (j)->is_special_var))
1430 fprintf (dump_file, "Untypesafe usage in do_sd_constraint\n");
1434 done:
1435 /* If the LHS solution changed, mark the var as changed. */
1436 if (flag)
1438 get_varinfo (lhs)->solution = sol;
1439 if (!TEST_BIT (changed, lhs))
1441 SET_BIT (changed, lhs);
1442 changed_count++;
1447 /* Process a constraint C that represents *x = y. */
1449 static void
1450 do_ds_constraint (constraint_t c, bitmap delta)
1452 unsigned int rhs = c->rhs.var;
1453 bitmap sol = get_varinfo (rhs)->solution;
1454 unsigned int j;
1455 bitmap_iterator bi;
1457 if (bitmap_bit_p (sol, anything_id))
1459 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1461 varinfo_t jvi = get_varinfo (j);
1462 unsigned int t;
1463 unsigned int loff = c->lhs.offset;
1464 unsigned HOST_WIDE_INT fieldoffset = jvi->offset + loff;
1465 varinfo_t v;
1467 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1468 if (!v)
1469 continue;
1470 t = find (v->id);
1472 if (!bitmap_bit_p (get_varinfo (t)->solution, anything_id))
1474 bitmap_set_bit (get_varinfo (t)->solution, anything_id);
1475 if (!TEST_BIT (changed, t))
1477 SET_BIT (changed, t);
1478 changed_count++;
1482 return;
1485 /* For each member j of delta (Sol(x)), add an edge from y to j and
1486 union Sol(y) into Sol(j) */
1487 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1489 unsigned HOST_WIDE_INT loff = c->lhs.offset;
1490 if (type_safe (j, &loff) && !(get_varinfo (j)->is_special_var))
1492 varinfo_t v;
1493 unsigned int t;
1494 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + loff;
1495 bitmap tmp;
1497 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1498 if (!v)
1499 continue;
1500 t = find (v->id);
1501 tmp = get_varinfo (t)->solution;
1503 if (set_union_with_increment (tmp, sol, 0))
1505 get_varinfo (t)->solution = tmp;
1506 if (t == rhs)
1507 sol = get_varinfo (rhs)->solution;
1508 if (!TEST_BIT (changed, t))
1510 SET_BIT (changed, t);
1511 changed_count++;
1515 else if (0 && dump_file && !(get_varinfo (j)->is_special_var))
1516 fprintf (dump_file, "Untypesafe usage in do_ds_constraint\n");
1520 /* Handle a non-simple (simple meaning requires no iteration),
1521 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1523 static void
1524 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
1526 if (c->lhs.type == DEREF)
1528 if (c->rhs.type == ADDRESSOF)
1530 gcc_unreachable();
1532 else
1534 /* *x = y */
1535 do_ds_constraint (c, delta);
1538 else if (c->rhs.type == DEREF)
1540 /* x = *y */
1541 if (!(get_varinfo (c->lhs.var)->is_special_var))
1542 do_sd_constraint (graph, c, delta);
1544 else
1546 bitmap tmp;
1547 bitmap solution;
1548 bool flag = false;
1550 gcc_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR);
1551 solution = get_varinfo (c->rhs.var)->solution;
1552 tmp = get_varinfo (c->lhs.var)->solution;
1554 flag = set_union_with_increment (tmp, solution, c->rhs.offset);
1556 if (flag)
1558 get_varinfo (c->lhs.var)->solution = tmp;
1559 if (!TEST_BIT (changed, c->lhs.var))
1561 SET_BIT (changed, c->lhs.var);
1562 changed_count++;
1568 /* Initialize and return a new SCC info structure. */
1570 static struct scc_info *
1571 init_scc_info (size_t size)
1573 struct scc_info *si = XNEW (struct scc_info);
1574 size_t i;
1576 si->current_index = 0;
1577 si->visited = sbitmap_alloc (size);
1578 sbitmap_zero (si->visited);
1579 si->deleted = sbitmap_alloc (size);
1580 sbitmap_zero (si->deleted);
1581 si->node_mapping = XNEWVEC (unsigned int, size);
1582 si->dfs = XCNEWVEC (unsigned int, size);
1584 for (i = 0; i < size; i++)
1585 si->node_mapping[i] = i;
1587 si->scc_stack = VEC_alloc (unsigned, heap, 1);
1588 return si;
1591 /* Free an SCC info structure pointed to by SI */
1593 static void
1594 free_scc_info (struct scc_info *si)
1596 sbitmap_free (si->visited);
1597 sbitmap_free (si->deleted);
1598 free (si->node_mapping);
1599 free (si->dfs);
1600 VEC_free (unsigned, heap, si->scc_stack);
1601 free (si);
1605 /* Find indirect cycles in GRAPH that occur, using strongly connected
1606 components, and note them in the indirect cycles map.
1608 This technique comes from Ben Hardekopf and Calvin Lin,
1609 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1610 Lines of Code", submitted to PLDI 2007. */
1612 static void
1613 find_indirect_cycles (constraint_graph_t graph)
1615 unsigned int i;
1616 unsigned int size = graph->size;
1617 struct scc_info *si = init_scc_info (size);
1619 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1620 if (!TEST_BIT (si->visited, i) && find (i) == i)
1621 scc_visit (graph, si, i);
1623 free_scc_info (si);
1626 /* Compute a topological ordering for GRAPH, and store the result in the
1627 topo_info structure TI. */
1629 static void
1630 compute_topo_order (constraint_graph_t graph,
1631 struct topo_info *ti)
1633 unsigned int i;
1634 unsigned int size = graph->size;
1636 for (i = 0; i != size; ++i)
1637 if (!TEST_BIT (ti->visited, i) && find (i) == i)
1638 topo_visit (graph, ti, i);
1641 /* Structure used to for hash value numbering of pointer equivalence
1642 classes. */
1644 typedef struct equiv_class_label
1646 unsigned int equivalence_class;
1647 bitmap labels;
1648 hashval_t hashcode;
1649 } *equiv_class_label_t;
1650 typedef const struct equiv_class_label *const_equiv_class_label_t;
1652 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1653 classes. */
1654 static htab_t pointer_equiv_class_table;
1656 /* A hashtable for mapping a bitmap of labels->location equivalence
1657 classes. */
1658 static htab_t location_equiv_class_table;
1660 /* Hash function for a equiv_class_label_t */
1662 static hashval_t
1663 equiv_class_label_hash (const void *p)
1665 const_equiv_class_label_t const ecl = (const_equiv_class_label_t) p;
1666 return ecl->hashcode;
1669 /* Equality function for two equiv_class_label_t's. */
1671 static int
1672 equiv_class_label_eq (const void *p1, const void *p2)
1674 const_equiv_class_label_t const eql1 = (const_equiv_class_label_t) p1;
1675 const_equiv_class_label_t const eql2 = (const_equiv_class_label_t) p2;
1676 return bitmap_equal_p (eql1->labels, eql2->labels);
1679 /* Lookup a equivalence class in TABLE by the bitmap of LABELS it
1680 contains. */
1682 static unsigned int
1683 equiv_class_lookup (htab_t table, bitmap labels)
1685 void **slot;
1686 struct equiv_class_label ecl;
1688 ecl.labels = labels;
1689 ecl.hashcode = bitmap_hash (labels);
1691 slot = htab_find_slot_with_hash (table, &ecl,
1692 ecl.hashcode, NO_INSERT);
1693 if (!slot)
1694 return 0;
1695 else
1696 return ((equiv_class_label_t) *slot)->equivalence_class;
1700 /* Add an equivalence class named EQUIVALENCE_CLASS with labels LABELS
1701 to TABLE. */
1703 static void
1704 equiv_class_add (htab_t table, unsigned int equivalence_class,
1705 bitmap labels)
1707 void **slot;
1708 equiv_class_label_t ecl = XNEW (struct equiv_class_label);
1710 ecl->labels = labels;
1711 ecl->equivalence_class = equivalence_class;
1712 ecl->hashcode = bitmap_hash (labels);
1714 slot = htab_find_slot_with_hash (table, ecl,
1715 ecl->hashcode, INSERT);
1716 gcc_assert (!*slot);
1717 *slot = (void *) ecl;
1720 /* Perform offline variable substitution.
1722 This is a worst case quadratic time way of identifying variables
1723 that must have equivalent points-to sets, including those caused by
1724 static cycles, and single entry subgraphs, in the constraint graph.
1726 The technique is described in "Exploiting Pointer and Location
1727 Equivalence to Optimize Pointer Analysis. In the 14th International
1728 Static Analysis Symposium (SAS), August 2007." It is known as the
1729 "HU" algorithm, and is equivalent to value numbering the collapsed
1730 constraint graph including evaluating unions.
1732 The general method of finding equivalence classes is as follows:
1733 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1734 Initialize all non-REF nodes to be direct nodes.
1735 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1736 variable}
1737 For each constraint containing the dereference, we also do the same
1738 thing.
1740 We then compute SCC's in the graph and unify nodes in the same SCC,
1741 including pts sets.
1743 For each non-collapsed node x:
1744 Visit all unvisited explicit incoming edges.
1745 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
1746 where y->x.
1747 Lookup the equivalence class for pts(x).
1748 If we found one, equivalence_class(x) = found class.
1749 Otherwise, equivalence_class(x) = new class, and new_class is
1750 added to the lookup table.
1752 All direct nodes with the same equivalence class can be replaced
1753 with a single representative node.
1754 All unlabeled nodes (label == 0) are not pointers and all edges
1755 involving them can be eliminated.
1756 We perform these optimizations during rewrite_constraints
1758 In addition to pointer equivalence class finding, we also perform
1759 location equivalence class finding. This is the set of variables
1760 that always appear together in points-to sets. We use this to
1761 compress the size of the points-to sets. */
1763 /* Current maximum pointer equivalence class id. */
1764 static int pointer_equiv_class;
1766 /* Current maximum location equivalence class id. */
1767 static int location_equiv_class;
1769 /* Recursive routine to find strongly connected components in GRAPH,
1770 and label it's nodes with DFS numbers. */
1772 static void
1773 condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1775 unsigned int i;
1776 bitmap_iterator bi;
1777 unsigned int my_dfs;
1779 gcc_assert (si->node_mapping[n] == n);
1780 SET_BIT (si->visited, n);
1781 si->dfs[n] = si->current_index ++;
1782 my_dfs = si->dfs[n];
1784 /* Visit all the successors. */
1785 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1787 unsigned int w = si->node_mapping[i];
1789 if (TEST_BIT (si->deleted, w))
1790 continue;
1792 if (!TEST_BIT (si->visited, w))
1793 condense_visit (graph, si, w);
1795 unsigned int t = si->node_mapping[w];
1796 unsigned int nnode = si->node_mapping[n];
1797 gcc_assert (nnode == n);
1799 if (si->dfs[t] < si->dfs[nnode])
1800 si->dfs[n] = si->dfs[t];
1804 /* Visit all the implicit predecessors. */
1805 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
1807 unsigned int w = si->node_mapping[i];
1809 if (TEST_BIT (si->deleted, w))
1810 continue;
1812 if (!TEST_BIT (si->visited, w))
1813 condense_visit (graph, si, w);
1815 unsigned int t = si->node_mapping[w];
1816 unsigned int nnode = si->node_mapping[n];
1817 gcc_assert (nnode == n);
1819 if (si->dfs[t] < si->dfs[nnode])
1820 si->dfs[n] = si->dfs[t];
1824 /* See if any components have been identified. */
1825 if (si->dfs[n] == my_dfs)
1827 while (VEC_length (unsigned, si->scc_stack) != 0
1828 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1830 unsigned int w = VEC_pop (unsigned, si->scc_stack);
1831 si->node_mapping[w] = n;
1833 if (!TEST_BIT (graph->direct_nodes, w))
1834 RESET_BIT (graph->direct_nodes, n);
1836 /* Unify our nodes. */
1837 if (graph->preds[w])
1839 if (!graph->preds[n])
1840 graph->preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
1841 bitmap_ior_into (graph->preds[n], graph->preds[w]);
1843 if (graph->implicit_preds[w])
1845 if (!graph->implicit_preds[n])
1846 graph->implicit_preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
1847 bitmap_ior_into (graph->implicit_preds[n],
1848 graph->implicit_preds[w]);
1850 if (graph->points_to[w])
1852 if (!graph->points_to[n])
1853 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
1854 bitmap_ior_into (graph->points_to[n],
1855 graph->points_to[w]);
1857 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1859 unsigned int rep = si->node_mapping[i];
1860 graph->number_incoming[rep]++;
1863 SET_BIT (si->deleted, n);
1865 else
1866 VEC_safe_push (unsigned, heap, si->scc_stack, n);
1869 /* Label pointer equivalences. */
1871 static void
1872 label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1874 unsigned int i;
1875 bitmap_iterator bi;
1876 SET_BIT (si->visited, n);
1878 if (!graph->points_to[n])
1879 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
1881 /* Label and union our incoming edges's points to sets. */
1882 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1884 unsigned int w = si->node_mapping[i];
1885 if (!TEST_BIT (si->visited, w))
1886 label_visit (graph, si, w);
1888 /* Skip unused edges */
1889 if (w == n || graph->pointer_label[w] == 0)
1891 graph->number_incoming[w]--;
1892 continue;
1894 if (graph->points_to[w])
1895 bitmap_ior_into(graph->points_to[n], graph->points_to[w]);
1897 /* If all incoming edges to w have been processed and
1898 graph->points_to[w] was not stored in the hash table, we can
1899 free it. */
1900 graph->number_incoming[w]--;
1901 if (!graph->number_incoming[w] && !TEST_BIT (graph->pt_used, w))
1903 BITMAP_FREE (graph->points_to[w]);
1906 /* Indirect nodes get fresh variables. */
1907 if (!TEST_BIT (graph->direct_nodes, n))
1908 bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
1910 if (!bitmap_empty_p (graph->points_to[n]))
1912 unsigned int label = equiv_class_lookup (pointer_equiv_class_table,
1913 graph->points_to[n]);
1914 if (!label)
1916 SET_BIT (graph->pt_used, n);
1917 label = pointer_equiv_class++;
1918 equiv_class_add (pointer_equiv_class_table,
1919 label, graph->points_to[n]);
1921 graph->pointer_label[n] = label;
1925 /* Perform offline variable substitution, discovering equivalence
1926 classes, and eliminating non-pointer variables. */
1928 static struct scc_info *
1929 perform_var_substitution (constraint_graph_t graph)
1931 unsigned int i;
1932 unsigned int size = graph->size;
1933 struct scc_info *si = init_scc_info (size);
1935 bitmap_obstack_initialize (&iteration_obstack);
1936 pointer_equiv_class_table = htab_create (511, equiv_class_label_hash,
1937 equiv_class_label_eq, free);
1938 location_equiv_class_table = htab_create (511, equiv_class_label_hash,
1939 equiv_class_label_eq, free);
1940 pointer_equiv_class = 1;
1941 location_equiv_class = 1;
1943 /* Condense the nodes, which means to find SCC's, count incoming
1944 predecessors, and unite nodes in SCC's. */
1945 for (i = 0; i < LAST_REF_NODE; i++)
1946 if (!TEST_BIT (si->visited, si->node_mapping[i]))
1947 condense_visit (graph, si, si->node_mapping[i]);
1949 sbitmap_zero (si->visited);
1950 /* Actually the label the nodes for pointer equivalences */
1951 for (i = 0; i < LAST_REF_NODE; i++)
1952 if (!TEST_BIT (si->visited, si->node_mapping[i]))
1953 label_visit (graph, si, si->node_mapping[i]);
1955 /* Calculate location equivalence labels. */
1956 for (i = 0; i < FIRST_REF_NODE; i++)
1958 bitmap pointed_by;
1959 bitmap_iterator bi;
1960 unsigned int j;
1961 unsigned int label;
1963 if (!graph->pointed_by[i])
1964 continue;
1965 pointed_by = BITMAP_ALLOC (&iteration_obstack);
1967 /* Translate the pointed-by mapping for pointer equivalence
1968 labels. */
1969 EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
1971 bitmap_set_bit (pointed_by,
1972 graph->pointer_label[si->node_mapping[j]]);
1974 /* The original pointed_by is now dead. */
1975 BITMAP_FREE (graph->pointed_by[i]);
1977 /* Look up the location equivalence label if one exists, or make
1978 one otherwise. */
1979 label = equiv_class_lookup (location_equiv_class_table,
1980 pointed_by);
1981 if (label == 0)
1983 label = location_equiv_class++;
1984 equiv_class_add (location_equiv_class_table,
1985 label, pointed_by);
1987 else
1989 if (dump_file && (dump_flags & TDF_DETAILS))
1990 fprintf (dump_file, "Found location equivalence for node %s\n",
1991 get_varinfo (i)->name);
1992 BITMAP_FREE (pointed_by);
1994 graph->loc_label[i] = label;
1998 if (dump_file && (dump_flags & TDF_DETAILS))
1999 for (i = 0; i < FIRST_REF_NODE; i++)
2001 bool direct_node = TEST_BIT (graph->direct_nodes, i);
2002 fprintf (dump_file,
2003 "Equivalence classes for %s node id %d:%s are pointer: %d"
2004 ", location:%d\n",
2005 direct_node ? "Direct node" : "Indirect node", i,
2006 get_varinfo (i)->name,
2007 graph->pointer_label[si->node_mapping[i]],
2008 graph->loc_label[si->node_mapping[i]]);
2011 /* Quickly eliminate our non-pointer variables. */
2013 for (i = 0; i < FIRST_REF_NODE; i++)
2015 unsigned int node = si->node_mapping[i];
2017 if (graph->pointer_label[node] == 0
2018 && TEST_BIT (graph->direct_nodes, node))
2020 if (dump_file && (dump_flags & TDF_DETAILS))
2021 fprintf (dump_file,
2022 "%s is a non-pointer variable, eliminating edges.\n",
2023 get_varinfo (node)->name);
2024 stats.nonpointer_vars++;
2025 clear_edges_for_node (graph, node);
2029 return si;
2032 /* Free information that was only necessary for variable
2033 substitution. */
2035 static void
2036 free_var_substitution_info (struct scc_info *si)
2038 free_scc_info (si);
2039 free (graph->pointer_label);
2040 free (graph->loc_label);
2041 free (graph->pointed_by);
2042 free (graph->points_to);
2043 free (graph->number_incoming);
2044 free (graph->eq_rep);
2045 sbitmap_free (graph->direct_nodes);
2046 sbitmap_free (graph->pt_used);
2047 htab_delete (pointer_equiv_class_table);
2048 htab_delete (location_equiv_class_table);
2049 bitmap_obstack_release (&iteration_obstack);
2052 /* Return an existing node that is equivalent to NODE, which has
2053 equivalence class LABEL, if one exists. Return NODE otherwise. */
2055 static unsigned int
2056 find_equivalent_node (constraint_graph_t graph,
2057 unsigned int node, unsigned int label)
2059 /* If the address version of this variable is unused, we can
2060 substitute it for anything else with the same label.
2061 Otherwise, we know the pointers are equivalent, but not the
2062 locations, and we can unite them later. */
2064 if (!bitmap_bit_p (graph->address_taken, node))
2066 gcc_assert (label < graph->size);
2068 if (graph->eq_rep[label] != -1)
2070 /* Unify the two variables since we know they are equivalent. */
2071 if (unite (graph->eq_rep[label], node))
2072 unify_nodes (graph, graph->eq_rep[label], node, false);
2073 return graph->eq_rep[label];
2075 else
2077 graph->eq_rep[label] = node;
2078 graph->pe_rep[label] = node;
2081 else
2083 gcc_assert (label < graph->size);
2084 graph->pe[node] = label;
2085 if (graph->pe_rep[label] == -1)
2086 graph->pe_rep[label] = node;
2089 return node;
2092 /* Unite pointer equivalent but not location equivalent nodes in
2093 GRAPH. This may only be performed once variable substitution is
2094 finished. */
2096 static void
2097 unite_pointer_equivalences (constraint_graph_t graph)
2099 unsigned int i;
2101 /* Go through the pointer equivalences and unite them to their
2102 representative, if they aren't already. */
2103 for (i = 0; i < graph->size; i++)
2105 unsigned int label = graph->pe[i];
2106 int label_rep = graph->pe_rep[label];
2108 if (label != i && unite (label_rep, i))
2109 unify_nodes (graph, label_rep, i, false);
2113 /* Move complex constraints to the GRAPH nodes they belong to. */
2115 static void
2116 move_complex_constraints (constraint_graph_t graph)
2118 int i;
2119 constraint_t c;
2121 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
2123 if (c)
2125 struct constraint_expr lhs = c->lhs;
2126 struct constraint_expr rhs = c->rhs;
2128 if (lhs.type == DEREF)
2130 insert_into_complex (graph, lhs.var, c);
2132 else if (rhs.type == DEREF)
2134 if (!(get_varinfo (lhs.var)->is_special_var))
2135 insert_into_complex (graph, rhs.var, c);
2137 else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2138 && (lhs.offset != 0 || rhs.offset != 0))
2140 insert_into_complex (graph, rhs.var, c);
2147 /* Optimize and rewrite complex constraints while performing
2148 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2149 result of perform_variable_substitution. */
2151 static void
2152 rewrite_constraints (constraint_graph_t graph,
2153 struct scc_info *si)
2155 int i;
2156 unsigned int j;
2157 constraint_t c;
2159 for (j = 0; j < graph->size; j++)
2160 gcc_assert (find (j) == j);
2162 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
2164 struct constraint_expr lhs = c->lhs;
2165 struct constraint_expr rhs = c->rhs;
2166 unsigned int lhsvar = find (get_varinfo_fc (lhs.var)->id);
2167 unsigned int rhsvar = find (get_varinfo_fc (rhs.var)->id);
2168 unsigned int lhsnode, rhsnode;
2169 unsigned int lhslabel, rhslabel;
2171 lhsnode = si->node_mapping[lhsvar];
2172 rhsnode = si->node_mapping[rhsvar];
2173 lhslabel = graph->pointer_label[lhsnode];
2174 rhslabel = graph->pointer_label[rhsnode];
2176 /* See if it is really a non-pointer variable, and if so, ignore
2177 the constraint. */
2178 if (lhslabel == 0)
2180 if (!TEST_BIT (graph->direct_nodes, lhsnode))
2181 lhslabel = graph->pointer_label[lhsnode] = pointer_equiv_class++;
2182 else
2184 if (dump_file && (dump_flags & TDF_DETAILS))
2187 fprintf (dump_file, "%s is a non-pointer variable,"
2188 "ignoring constraint:",
2189 get_varinfo (lhs.var)->name);
2190 dump_constraint (dump_file, c);
2192 VEC_replace (constraint_t, constraints, i, NULL);
2193 continue;
2197 if (rhslabel == 0)
2199 if (!TEST_BIT (graph->direct_nodes, rhsnode))
2200 rhslabel = graph->pointer_label[rhsnode] = pointer_equiv_class++;
2201 else
2203 if (dump_file && (dump_flags & TDF_DETAILS))
2206 fprintf (dump_file, "%s is a non-pointer variable,"
2207 "ignoring constraint:",
2208 get_varinfo (rhs.var)->name);
2209 dump_constraint (dump_file, c);
2211 VEC_replace (constraint_t, constraints, i, NULL);
2212 continue;
2216 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2217 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2218 c->lhs.var = lhsvar;
2219 c->rhs.var = rhsvar;
2224 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2225 part of an SCC, false otherwise. */
2227 static bool
2228 eliminate_indirect_cycles (unsigned int node)
2230 if (graph->indirect_cycles[node] != -1
2231 && !bitmap_empty_p (get_varinfo (node)->solution))
2233 unsigned int i;
2234 VEC(unsigned,heap) *queue = NULL;
2235 int queuepos;
2236 unsigned int to = find (graph->indirect_cycles[node]);
2237 bitmap_iterator bi;
2239 /* We can't touch the solution set and call unify_nodes
2240 at the same time, because unify_nodes is going to do
2241 bitmap unions into it. */
2243 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2245 if (find (i) == i && i != to)
2247 if (unite (to, i))
2248 VEC_safe_push (unsigned, heap, queue, i);
2252 for (queuepos = 0;
2253 VEC_iterate (unsigned, queue, queuepos, i);
2254 queuepos++)
2256 unify_nodes (graph, to, i, true);
2258 VEC_free (unsigned, heap, queue);
2259 return true;
2261 return false;
2264 /* Solve the constraint graph GRAPH using our worklist solver.
2265 This is based on the PW* family of solvers from the "Efficient Field
2266 Sensitive Pointer Analysis for C" paper.
2267 It works by iterating over all the graph nodes, processing the complex
2268 constraints and propagating the copy constraints, until everything stops
2269 changed. This corresponds to steps 6-8 in the solving list given above. */
2271 static void
2272 solve_graph (constraint_graph_t graph)
2274 unsigned int size = graph->size;
2275 unsigned int i;
2276 bitmap pts;
2278 changed_count = 0;
2279 changed = sbitmap_alloc (size);
2280 sbitmap_zero (changed);
2282 /* Mark all initial non-collapsed nodes as changed. */
2283 for (i = 0; i < size; i++)
2285 varinfo_t ivi = get_varinfo (i);
2286 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2287 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2288 || VEC_length (constraint_t, graph->complex[i]) > 0))
2290 SET_BIT (changed, i);
2291 changed_count++;
2295 /* Allocate a bitmap to be used to store the changed bits. */
2296 pts = BITMAP_ALLOC (&pta_obstack);
2298 while (changed_count > 0)
2300 unsigned int i;
2301 struct topo_info *ti = init_topo_info ();
2302 stats.iterations++;
2304 bitmap_obstack_initialize (&iteration_obstack);
2306 compute_topo_order (graph, ti);
2308 while (VEC_length (unsigned, ti->topo_order) != 0)
2311 i = VEC_pop (unsigned, ti->topo_order);
2313 /* If this variable is not a representative, skip it. */
2314 if (find (i) != i)
2315 continue;
2317 /* In certain indirect cycle cases, we may merge this
2318 variable to another. */
2319 if (eliminate_indirect_cycles (i) && find (i) != i)
2320 continue;
2322 /* If the node has changed, we need to process the
2323 complex constraints and outgoing edges again. */
2324 if (TEST_BIT (changed, i))
2326 unsigned int j;
2327 constraint_t c;
2328 bitmap solution;
2329 VEC(constraint_t,heap) *complex = graph->complex[i];
2330 bool solution_empty;
2332 RESET_BIT (changed, i);
2333 changed_count--;
2335 /* Compute the changed set of solution bits. */
2336 bitmap_and_compl (pts, get_varinfo (i)->solution,
2337 get_varinfo (i)->oldsolution);
2339 if (bitmap_empty_p (pts))
2340 continue;
2342 bitmap_ior_into (get_varinfo (i)->oldsolution, pts);
2344 solution = get_varinfo (i)->solution;
2345 solution_empty = bitmap_empty_p (solution);
2347 /* Process the complex constraints */
2348 for (j = 0; VEC_iterate (constraint_t, complex, j, c); j++)
2350 /* XXX: This is going to unsort the constraints in
2351 some cases, which will occasionally add duplicate
2352 constraints during unification. This does not
2353 affect correctness. */
2354 c->lhs.var = find (c->lhs.var);
2355 c->rhs.var = find (c->rhs.var);
2357 /* The only complex constraint that can change our
2358 solution to non-empty, given an empty solution,
2359 is a constraint where the lhs side is receiving
2360 some set from elsewhere. */
2361 if (!solution_empty || c->lhs.type != DEREF)
2362 do_complex_constraint (graph, c, pts);
2365 solution_empty = bitmap_empty_p (solution);
2367 if (!solution_empty)
2369 bitmap_iterator bi;
2371 /* Propagate solution to all successors. */
2372 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2373 0, j, bi)
2375 bitmap tmp;
2376 bool flag;
2378 unsigned int to = find (j);
2379 tmp = get_varinfo (to)->solution;
2380 flag = false;
2382 /* Don't try to propagate to ourselves. */
2383 if (to == i)
2384 continue;
2386 flag = set_union_with_increment (tmp, pts, 0);
2388 if (flag)
2390 get_varinfo (to)->solution = tmp;
2391 if (!TEST_BIT (changed, to))
2393 SET_BIT (changed, to);
2394 changed_count++;
2401 free_topo_info (ti);
2402 bitmap_obstack_release (&iteration_obstack);
2405 BITMAP_FREE (pts);
2406 sbitmap_free (changed);
2407 bitmap_obstack_release (&oldpta_obstack);
2410 /* Map from trees to variable infos. */
2411 static struct pointer_map_t *vi_for_tree;
2414 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2416 static void
2417 insert_vi_for_tree (tree t, varinfo_t vi)
2419 void **slot = pointer_map_insert (vi_for_tree, t);
2420 gcc_assert (vi);
2421 gcc_assert (*slot == NULL);
2422 *slot = vi;
2425 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2426 exist in the map, return NULL, otherwise, return the varinfo we found. */
2428 static varinfo_t
2429 lookup_vi_for_tree (tree t)
2431 void **slot = pointer_map_contains (vi_for_tree, t);
2432 if (slot == NULL)
2433 return NULL;
2435 return (varinfo_t) *slot;
2438 /* Return a printable name for DECL */
2440 static const char *
2441 alias_get_name (tree decl)
2443 const char *res = get_name (decl);
2444 char *temp;
2445 int num_printed = 0;
2447 if (res != NULL)
2448 return res;
2450 res = "NULL";
2451 if (!dump_file)
2452 return res;
2454 if (TREE_CODE (decl) == SSA_NAME)
2456 num_printed = asprintf (&temp, "%s_%u",
2457 alias_get_name (SSA_NAME_VAR (decl)),
2458 SSA_NAME_VERSION (decl));
2460 else if (DECL_P (decl))
2462 num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
2464 if (num_printed > 0)
2466 res = ggc_strdup (temp);
2467 free (temp);
2469 return res;
2472 /* Find the variable id for tree T in the map.
2473 If T doesn't exist in the map, create an entry for it and return it. */
2475 static varinfo_t
2476 get_vi_for_tree (tree t)
2478 void **slot = pointer_map_contains (vi_for_tree, t);
2479 if (slot == NULL)
2480 return get_varinfo (create_variable_info_for (t, alias_get_name (t)));
2482 return (varinfo_t) *slot;
2485 /* Get a constraint expression from an SSA_VAR_P node. */
2487 static struct constraint_expr
2488 get_constraint_exp_from_ssa_var (tree t)
2490 struct constraint_expr cexpr;
2492 gcc_assert (SSA_VAR_P (t) || DECL_P (t));
2494 /* For parameters, get at the points-to set for the actual parm
2495 decl. */
2496 if (TREE_CODE (t) == SSA_NAME
2497 && TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2498 && SSA_NAME_IS_DEFAULT_DEF (t))
2499 return get_constraint_exp_from_ssa_var (SSA_NAME_VAR (t));
2501 cexpr.type = SCALAR;
2503 cexpr.var = get_vi_for_tree (t)->id;
2504 /* If we determine the result is "anything", and we know this is readonly,
2505 say it points to readonly memory instead. */
2506 if (cexpr.var == anything_id && TREE_READONLY (t))
2508 cexpr.type = ADDRESSOF;
2509 cexpr.var = readonly_id;
2512 cexpr.offset = 0;
2513 return cexpr;
2516 /* Process a completed constraint T, and add it to the constraint
2517 list. FROM_CALL is true if this is a constraint coming from a
2518 call, which means any DEREFs we see are "may-deref's", not
2519 "must-deref"'s. */
2521 static void
2522 process_constraint_1 (constraint_t t, bool from_call)
2524 struct constraint_expr rhs = t->rhs;
2525 struct constraint_expr lhs = t->lhs;
2527 gcc_assert (rhs.var < VEC_length (varinfo_t, varmap));
2528 gcc_assert (lhs.var < VEC_length (varinfo_t, varmap));
2530 if (!from_call)
2532 if (lhs.type == DEREF)
2533 get_varinfo (lhs.var)->directly_dereferenced = true;
2534 if (rhs.type == DEREF)
2535 get_varinfo (rhs.var)->directly_dereferenced = true;
2538 if (!use_field_sensitive)
2540 t->rhs.offset = 0;
2541 t->lhs.offset = 0;
2544 /* ANYTHING == ANYTHING is pointless. */
2545 if (lhs.var == anything_id && rhs.var == anything_id)
2546 return;
2548 /* If we have &ANYTHING = something, convert to SOMETHING = &ANYTHING) */
2549 else if (lhs.var == anything_id && lhs.type == ADDRESSOF)
2551 rhs = t->lhs;
2552 t->lhs = t->rhs;
2553 t->rhs = rhs;
2554 process_constraint_1 (t, from_call);
2556 /* This can happen in our IR with things like n->a = *p */
2557 else if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
2559 /* Split into tmp = *rhs, *lhs = tmp */
2560 tree rhsdecl = get_varinfo (rhs.var)->decl;
2561 tree pointertype = TREE_TYPE (rhsdecl);
2562 tree pointedtotype = TREE_TYPE (pointertype);
2563 tree tmpvar = create_tmp_var_raw (pointedtotype, "doubledereftmp");
2564 struct constraint_expr tmplhs = get_constraint_exp_from_ssa_var (tmpvar);
2566 /* If this is an aggregate of known size, we should have passed
2567 this off to do_structure_copy, and it should have broken it
2568 up. */
2569 gcc_assert (!AGGREGATE_TYPE_P (pointedtotype)
2570 || get_varinfo (rhs.var)->is_unknown_size_var);
2572 process_constraint_1 (new_constraint (tmplhs, rhs), from_call);
2573 process_constraint_1 (new_constraint (lhs, tmplhs), from_call);
2575 else if (rhs.type == ADDRESSOF && lhs.type == DEREF)
2577 /* Split into tmp = &rhs, *lhs = tmp */
2578 tree rhsdecl = get_varinfo (rhs.var)->decl;
2579 tree pointertype = TREE_TYPE (rhsdecl);
2580 tree tmpvar = create_tmp_var_raw (pointertype, "derefaddrtmp");
2581 struct constraint_expr tmplhs = get_constraint_exp_from_ssa_var (tmpvar);
2583 process_constraint_1 (new_constraint (tmplhs, rhs), from_call);
2584 process_constraint_1 (new_constraint (lhs, tmplhs), from_call);
2586 else
2588 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
2589 VEC_safe_push (constraint_t, heap, constraints, t);
2594 /* Process constraint T, performing various simplifications and then
2595 adding it to our list of overall constraints. */
2597 static void
2598 process_constraint (constraint_t t)
2600 process_constraint_1 (t, false);
2603 /* Return true if T is a variable of a type that could contain
2604 pointers. */
2606 static bool
2607 could_have_pointers (tree t)
2609 tree type = TREE_TYPE (t);
2611 if (POINTER_TYPE_P (type)
2612 || AGGREGATE_TYPE_P (type)
2613 || TREE_CODE (type) == COMPLEX_TYPE)
2614 return true;
2616 return false;
2619 /* Return the position, in bits, of FIELD_DECL from the beginning of its
2620 structure. */
2622 static unsigned HOST_WIDE_INT
2623 bitpos_of_field (const tree fdecl)
2626 if (TREE_CODE (DECL_FIELD_OFFSET (fdecl)) != INTEGER_CST
2627 || TREE_CODE (DECL_FIELD_BIT_OFFSET (fdecl)) != INTEGER_CST)
2628 return -1;
2630 return (tree_low_cst (DECL_FIELD_OFFSET (fdecl), 1) * 8)
2631 + tree_low_cst (DECL_FIELD_BIT_OFFSET (fdecl), 1);
2635 /* Return true if an access to [ACCESSPOS, ACCESSSIZE]
2636 overlaps with a field at [FIELDPOS, FIELDSIZE] */
2638 static bool
2639 offset_overlaps_with_access (const unsigned HOST_WIDE_INT fieldpos,
2640 const unsigned HOST_WIDE_INT fieldsize,
2641 const unsigned HOST_WIDE_INT accesspos,
2642 const unsigned HOST_WIDE_INT accesssize)
2644 if (fieldpos == accesspos && fieldsize == accesssize)
2645 return true;
2646 if (accesspos >= fieldpos && accesspos < (fieldpos + fieldsize))
2647 return true;
2648 if (accesspos < fieldpos && (accesspos + accesssize > fieldpos))
2649 return true;
2651 return false;
2654 /* Given a COMPONENT_REF T, return the constraint_expr for it. */
2656 static void
2657 get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results)
2659 tree orig_t = t;
2660 HOST_WIDE_INT bitsize = -1;
2661 HOST_WIDE_INT bitmaxsize = -1;
2662 HOST_WIDE_INT bitpos;
2663 tree forzero;
2664 struct constraint_expr *result;
2665 unsigned int beforelength = VEC_length (ce_s, *results);
2667 /* Some people like to do cute things like take the address of
2668 &0->a.b */
2669 forzero = t;
2670 while (!SSA_VAR_P (forzero) && !CONSTANT_CLASS_P (forzero))
2671 forzero = TREE_OPERAND (forzero, 0);
2673 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
2675 struct constraint_expr temp;
2677 temp.offset = 0;
2678 temp.var = integer_id;
2679 temp.type = SCALAR;
2680 VEC_safe_push (ce_s, heap, *results, &temp);
2681 return;
2684 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
2686 /* String constants are readonly, so there is nothing to really do
2687 here. */
2688 if (TREE_CODE (t) == STRING_CST)
2689 return;
2691 get_constraint_for (t, results);
2692 result = VEC_last (ce_s, *results);
2693 result->offset = bitpos;
2695 gcc_assert (beforelength + 1 == VEC_length (ce_s, *results));
2697 /* This can also happen due to weird offsetof type macros. */
2698 if (TREE_CODE (t) != ADDR_EXPR && result->type == ADDRESSOF)
2699 result->type = SCALAR;
2701 if (result->type == SCALAR)
2703 /* In languages like C, you can access one past the end of an
2704 array. You aren't allowed to dereference it, so we can
2705 ignore this constraint. When we handle pointer subtraction,
2706 we may have to do something cute here. */
2708 if (result->offset < get_varinfo (result->var)->fullsize
2709 && bitmaxsize != 0)
2711 /* It's also not true that the constraint will actually start at the
2712 right offset, it may start in some padding. We only care about
2713 setting the constraint to the first actual field it touches, so
2714 walk to find it. */
2715 varinfo_t curr;
2716 for (curr = get_varinfo (result->var); curr; curr = curr->next)
2718 if (offset_overlaps_with_access (curr->offset, curr->size,
2719 result->offset, bitmaxsize))
2721 result->var = curr->id;
2722 break;
2725 /* assert that we found *some* field there. The user couldn't be
2726 accessing *only* padding. */
2727 /* Still the user could access one past the end of an array
2728 embedded in a struct resulting in accessing *only* padding. */
2729 gcc_assert (curr || ref_contains_array_ref (orig_t));
2731 else if (bitmaxsize == 0)
2733 if (dump_file && (dump_flags & TDF_DETAILS))
2734 fprintf (dump_file, "Access to zero-sized part of variable,"
2735 "ignoring\n");
2737 else
2738 if (dump_file && (dump_flags & TDF_DETAILS))
2739 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
2741 result->offset = 0;
2743 else if (bitmaxsize == -1)
2745 /* We can't handle DEREF constraints with unknown size, we'll
2746 get the wrong answer. Punt and return anything. */
2747 result->var = anything_id;
2748 result->offset = 0;
2753 /* Dereference the constraint expression CONS, and return the result.
2754 DEREF (ADDRESSOF) = SCALAR
2755 DEREF (SCALAR) = DEREF
2756 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
2757 This is needed so that we can handle dereferencing DEREF constraints. */
2759 static void
2760 do_deref (VEC (ce_s, heap) **constraints)
2762 struct constraint_expr *c;
2763 unsigned int i = 0;
2765 for (i = 0; VEC_iterate (ce_s, *constraints, i, c); i++)
2767 if (c->type == SCALAR)
2768 c->type = DEREF;
2769 else if (c->type == ADDRESSOF)
2770 c->type = SCALAR;
2771 else if (c->type == DEREF)
2773 tree tmpvar = create_tmp_var_raw (ptr_type_node, "dereftmp");
2774 struct constraint_expr tmplhs = get_constraint_exp_from_ssa_var (tmpvar);
2775 process_constraint (new_constraint (tmplhs, *c));
2776 c->var = tmplhs.var;
2778 else
2779 gcc_unreachable ();
2783 /* Given a tree T, return the constraint expression for it. */
2785 static void
2786 get_constraint_for (tree t, VEC (ce_s, heap) **results)
2788 struct constraint_expr temp;
2790 /* x = integer is all glommed to a single variable, which doesn't
2791 point to anything by itself. That is, of course, unless it is an
2792 integer constant being treated as a pointer, in which case, we
2793 will return that this is really the addressof anything. This
2794 happens below, since it will fall into the default case. The only
2795 case we know something about an integer treated like a pointer is
2796 when it is the NULL pointer, and then we just say it points to
2797 NULL. */
2798 if (TREE_CODE (t) == INTEGER_CST
2799 && integer_zerop (t))
2801 temp.var = nothing_id;
2802 temp.type = ADDRESSOF;
2803 temp.offset = 0;
2804 VEC_safe_push (ce_s, heap, *results, &temp);
2805 return;
2808 switch (TREE_CODE_CLASS (TREE_CODE (t)))
2810 case tcc_expression:
2811 case tcc_vl_exp:
2813 switch (TREE_CODE (t))
2815 case ADDR_EXPR:
2817 struct constraint_expr *c;
2818 unsigned int i;
2819 tree exp = TREE_OPERAND (t, 0);
2820 tree pttype = TREE_TYPE (TREE_TYPE (t));
2822 get_constraint_for (exp, results);
2825 /* Complex types are special. Taking the address of one
2826 allows you to access either part of it through that
2827 pointer. */
2828 if (VEC_length (ce_s, *results) == 1 &&
2829 TREE_CODE (pttype) == COMPLEX_TYPE)
2831 struct constraint_expr *origrhs;
2832 varinfo_t origvar;
2833 struct constraint_expr tmp;
2835 gcc_assert (VEC_length (ce_s, *results) == 1);
2836 origrhs = VEC_last (ce_s, *results);
2837 tmp = *origrhs;
2838 VEC_pop (ce_s, *results);
2839 origvar = get_varinfo (origrhs->var);
2840 for (; origvar; origvar = origvar->next)
2842 tmp.var = origvar->id;
2843 VEC_safe_push (ce_s, heap, *results, &tmp);
2847 for (i = 0; VEC_iterate (ce_s, *results, i, c); i++)
2849 if (c->type == DEREF)
2850 c->type = SCALAR;
2851 else
2852 c->type = ADDRESSOF;
2854 return;
2856 break;
2857 case CALL_EXPR:
2858 /* XXX: In interprocedural mode, if we didn't have the
2859 body, we would need to do *each pointer argument =
2860 &ANYTHING added. */
2861 if (call_expr_flags (t) & (ECF_MALLOC | ECF_MAY_BE_ALLOCA))
2863 varinfo_t vi;
2864 tree heapvar = heapvar_lookup (t);
2866 if (heapvar == NULL)
2868 heapvar = create_tmp_var_raw (ptr_type_node, "HEAP");
2869 DECL_EXTERNAL (heapvar) = 1;
2870 get_var_ann (heapvar)->is_heapvar = 1;
2871 if (gimple_referenced_vars (cfun))
2872 add_referenced_var (heapvar);
2873 heapvar_insert (t, heapvar);
2876 temp.var = create_variable_info_for (heapvar,
2877 alias_get_name (heapvar));
2879 vi = get_varinfo (temp.var);
2880 vi->is_artificial_var = 1;
2881 vi->is_heap_var = 1;
2882 temp.type = ADDRESSOF;
2883 temp.offset = 0;
2884 VEC_safe_push (ce_s, heap, *results, &temp);
2885 return;
2887 else
2889 temp.var = anything_id;
2890 temp.type = SCALAR;
2891 temp.offset = 0;
2892 VEC_safe_push (ce_s, heap, *results, &temp);
2893 return;
2895 break;
2896 default:
2898 temp.type = ADDRESSOF;
2899 temp.var = anything_id;
2900 temp.offset = 0;
2901 VEC_safe_push (ce_s, heap, *results, &temp);
2902 return;
2906 case tcc_reference:
2908 switch (TREE_CODE (t))
2910 case INDIRECT_REF:
2912 get_constraint_for (TREE_OPERAND (t, 0), results);
2913 do_deref (results);
2914 return;
2916 case ARRAY_REF:
2917 case ARRAY_RANGE_REF:
2918 case COMPONENT_REF:
2919 get_constraint_for_component_ref (t, results);
2920 return;
2921 default:
2923 temp.type = ADDRESSOF;
2924 temp.var = anything_id;
2925 temp.offset = 0;
2926 VEC_safe_push (ce_s, heap, *results, &temp);
2927 return;
2931 case tcc_unary:
2933 switch (TREE_CODE (t))
2935 case NOP_EXPR:
2936 case CONVERT_EXPR:
2937 case NON_LVALUE_EXPR:
2939 tree op = TREE_OPERAND (t, 0);
2941 /* Cast from non-pointer to pointers are bad news for us.
2942 Anything else, we see through */
2943 if (!(POINTER_TYPE_P (TREE_TYPE (t))
2944 && ! POINTER_TYPE_P (TREE_TYPE (op))))
2946 get_constraint_for (op, results);
2947 return;
2950 /* FALLTHRU */
2952 default:
2954 temp.type = ADDRESSOF;
2955 temp.var = anything_id;
2956 temp.offset = 0;
2957 VEC_safe_push (ce_s, heap, *results, &temp);
2958 return;
2962 case tcc_exceptional:
2964 switch (TREE_CODE (t))
2966 case PHI_NODE:
2968 get_constraint_for (PHI_RESULT (t), results);
2969 return;
2971 break;
2972 case SSA_NAME:
2974 struct constraint_expr temp;
2975 temp = get_constraint_exp_from_ssa_var (t);
2976 VEC_safe_push (ce_s, heap, *results, &temp);
2977 return;
2979 break;
2980 default:
2982 temp.type = ADDRESSOF;
2983 temp.var = anything_id;
2984 temp.offset = 0;
2985 VEC_safe_push (ce_s, heap, *results, &temp);
2986 return;
2990 case tcc_declaration:
2992 struct constraint_expr temp;
2993 temp = get_constraint_exp_from_ssa_var (t);
2994 VEC_safe_push (ce_s, heap, *results, &temp);
2995 return;
2997 default:
2999 temp.type = ADDRESSOF;
3000 temp.var = anything_id;
3001 temp.offset = 0;
3002 VEC_safe_push (ce_s, heap, *results, &temp);
3003 return;
3009 /* Handle the structure copy case where we have a simple structure copy
3010 between LHS and RHS that is of SIZE (in bits)
3012 For each field of the lhs variable (lhsfield)
3013 For each field of the rhs variable at lhsfield.offset (rhsfield)
3014 add the constraint lhsfield = rhsfield
3016 If we fail due to some kind of type unsafety or other thing we
3017 can't handle, return false. We expect the caller to collapse the
3018 variable in that case. */
3020 static bool
3021 do_simple_structure_copy (const struct constraint_expr lhs,
3022 const struct constraint_expr rhs,
3023 const unsigned HOST_WIDE_INT size)
3025 varinfo_t p = get_varinfo (lhs.var);
3026 unsigned HOST_WIDE_INT pstart, last;
3027 pstart = p->offset;
3028 last = p->offset + size;
3029 for (; p && p->offset < last; p = p->next)
3031 varinfo_t q;
3032 struct constraint_expr templhs = lhs;
3033 struct constraint_expr temprhs = rhs;
3034 unsigned HOST_WIDE_INT fieldoffset;
3036 templhs.var = p->id;
3037 q = get_varinfo (temprhs.var);
3038 fieldoffset = p->offset - pstart;
3039 q = first_vi_for_offset (q, q->offset + fieldoffset);
3040 if (!q)
3041 return false;
3042 temprhs.var = q->id;
3043 process_constraint (new_constraint (templhs, temprhs));
3045 return true;
3049 /* Handle the structure copy case where we have a structure copy between a
3050 aggregate on the LHS and a dereference of a pointer on the RHS
3051 that is of SIZE (in bits)
3053 For each field of the lhs variable (lhsfield)
3054 rhs.offset = lhsfield->offset
3055 add the constraint lhsfield = rhs
3058 static void
3059 do_rhs_deref_structure_copy (const struct constraint_expr lhs,
3060 const struct constraint_expr rhs,
3061 const unsigned HOST_WIDE_INT size)
3063 varinfo_t p = get_varinfo (lhs.var);
3064 unsigned HOST_WIDE_INT pstart,last;
3065 pstart = p->offset;
3066 last = p->offset + size;
3068 for (; p && p->offset < last; p = p->next)
3070 varinfo_t q;
3071 struct constraint_expr templhs = lhs;
3072 struct constraint_expr temprhs = rhs;
3073 unsigned HOST_WIDE_INT fieldoffset;
3076 if (templhs.type == SCALAR)
3077 templhs.var = p->id;
3078 else
3079 templhs.offset = p->offset;
3081 q = get_varinfo (temprhs.var);
3082 fieldoffset = p->offset - pstart;
3083 temprhs.offset += fieldoffset;
3084 process_constraint (new_constraint (templhs, temprhs));
3088 /* Handle the structure copy case where we have a structure copy
3089 between an aggregate on the RHS and a dereference of a pointer on
3090 the LHS that is of SIZE (in bits)
3092 For each field of the rhs variable (rhsfield)
3093 lhs.offset = rhsfield->offset
3094 add the constraint lhs = rhsfield
3097 static void
3098 do_lhs_deref_structure_copy (const struct constraint_expr lhs,
3099 const struct constraint_expr rhs,
3100 const unsigned HOST_WIDE_INT size)
3102 varinfo_t p = get_varinfo (rhs.var);
3103 unsigned HOST_WIDE_INT pstart,last;
3104 pstart = p->offset;
3105 last = p->offset + size;
3107 for (; p && p->offset < last; p = p->next)
3109 varinfo_t q;
3110 struct constraint_expr templhs = lhs;
3111 struct constraint_expr temprhs = rhs;
3112 unsigned HOST_WIDE_INT fieldoffset;
3115 if (temprhs.type == SCALAR)
3116 temprhs.var = p->id;
3117 else
3118 temprhs.offset = p->offset;
3120 q = get_varinfo (templhs.var);
3121 fieldoffset = p->offset - pstart;
3122 templhs.offset += fieldoffset;
3123 process_constraint (new_constraint (templhs, temprhs));
3127 /* Sometimes, frontends like to give us bad type information. This
3128 function will collapse all the fields from VAR to the end of VAR,
3129 into VAR, so that we treat those fields as a single variable.
3130 We return the variable they were collapsed into. */
3132 static unsigned int
3133 collapse_rest_of_var (unsigned int var)
3135 varinfo_t currvar = get_varinfo (var);
3136 varinfo_t field;
3138 for (field = currvar->next; field; field = field->next)
3140 if (dump_file)
3141 fprintf (dump_file, "Type safety: Collapsing var %s into %s\n",
3142 field->name, currvar->name);
3144 gcc_assert (!field->collapsed_to);
3145 field->collapsed_to = currvar;
3148 currvar->next = NULL;
3149 currvar->size = currvar->fullsize - currvar->offset;
3151 return currvar->id;
3154 /* Handle aggregate copies by expanding into copies of the respective
3155 fields of the structures. */
3157 static void
3158 do_structure_copy (tree lhsop, tree rhsop)
3160 struct constraint_expr lhs, rhs, tmp;
3161 VEC (ce_s, heap) *lhsc = NULL, *rhsc = NULL;
3162 varinfo_t p;
3163 unsigned HOST_WIDE_INT lhssize;
3164 unsigned HOST_WIDE_INT rhssize;
3166 get_constraint_for (lhsop, &lhsc);
3167 get_constraint_for (rhsop, &rhsc);
3168 gcc_assert (VEC_length (ce_s, lhsc) == 1);
3169 gcc_assert (VEC_length (ce_s, rhsc) == 1);
3170 lhs = *(VEC_last (ce_s, lhsc));
3171 rhs = *(VEC_last (ce_s, rhsc));
3173 VEC_free (ce_s, heap, lhsc);
3174 VEC_free (ce_s, heap, rhsc);
3176 /* If we have special var = x, swap it around. */
3177 if (lhs.var <= integer_id && !(get_varinfo (rhs.var)->is_special_var))
3179 tmp = lhs;
3180 lhs = rhs;
3181 rhs = tmp;
3184 /* This is fairly conservative for the RHS == ADDRESSOF case, in that it's
3185 possible it's something we could handle. However, most cases falling
3186 into this are dealing with transparent unions, which are slightly
3187 weird. */
3188 if (rhs.type == ADDRESSOF && !(get_varinfo (rhs.var)->is_special_var))
3190 rhs.type = ADDRESSOF;
3191 rhs.var = anything_id;
3194 /* If the RHS is a special var, or an addressof, set all the LHS fields to
3195 that special var. */
3196 if (rhs.var <= integer_id)
3198 for (p = get_varinfo (lhs.var); p; p = p->next)
3200 struct constraint_expr templhs = lhs;
3201 struct constraint_expr temprhs = rhs;
3203 if (templhs.type == SCALAR )
3204 templhs.var = p->id;
3205 else
3206 templhs.offset += p->offset;
3207 process_constraint (new_constraint (templhs, temprhs));
3210 else
3212 tree rhstype = TREE_TYPE (rhsop);
3213 tree lhstype = TREE_TYPE (lhsop);
3214 tree rhstypesize;
3215 tree lhstypesize;
3217 lhstypesize = DECL_P (lhsop) ? DECL_SIZE (lhsop) : TYPE_SIZE (lhstype);
3218 rhstypesize = DECL_P (rhsop) ? DECL_SIZE (rhsop) : TYPE_SIZE (rhstype);
3220 /* If we have a variably sized types on the rhs or lhs, and a deref
3221 constraint, add the constraint, lhsconstraint = &ANYTHING.
3222 This is conservatively correct because either the lhs is an unknown
3223 sized var (if the constraint is SCALAR), or the lhs is a DEREF
3224 constraint, and every variable it can point to must be unknown sized
3225 anyway, so we don't need to worry about fields at all. */
3226 if ((rhs.type == DEREF && TREE_CODE (rhstypesize) != INTEGER_CST)
3227 || (lhs.type == DEREF && TREE_CODE (lhstypesize) != INTEGER_CST))
3229 rhs.var = anything_id;
3230 rhs.type = ADDRESSOF;
3231 rhs.offset = 0;
3232 process_constraint (new_constraint (lhs, rhs));
3233 return;
3236 /* The size only really matters insofar as we don't set more or less of
3237 the variable. If we hit an unknown size var, the size should be the
3238 whole darn thing. */
3239 if (get_varinfo (rhs.var)->is_unknown_size_var)
3240 rhssize = ~0;
3241 else
3242 rhssize = TREE_INT_CST_LOW (rhstypesize);
3244 if (get_varinfo (lhs.var)->is_unknown_size_var)
3245 lhssize = ~0;
3246 else
3247 lhssize = TREE_INT_CST_LOW (lhstypesize);
3250 if (rhs.type == SCALAR && lhs.type == SCALAR)
3252 if (!do_simple_structure_copy (lhs, rhs, MIN (lhssize, rhssize)))
3254 lhs.var = collapse_rest_of_var (lhs.var);
3255 rhs.var = collapse_rest_of_var (rhs.var);
3256 lhs.offset = 0;
3257 rhs.offset = 0;
3258 lhs.type = SCALAR;
3259 rhs.type = SCALAR;
3260 process_constraint (new_constraint (lhs, rhs));
3263 else if (lhs.type != DEREF && rhs.type == DEREF)
3264 do_rhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
3265 else if (lhs.type == DEREF && rhs.type != DEREF)
3266 do_lhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
3267 else
3269 tree pointedtotype = lhstype;
3270 tree tmpvar;
3272 gcc_assert (rhs.type == DEREF && lhs.type == DEREF);
3273 tmpvar = create_tmp_var_raw (pointedtotype, "structcopydereftmp");
3274 do_structure_copy (tmpvar, rhsop);
3275 do_structure_copy (lhsop, tmpvar);
3281 /* Update related alias information kept in AI. This is used when
3282 building name tags, alias sets and deciding grouping heuristics.
3283 STMT is the statement to process. This function also updates
3284 ADDRESSABLE_VARS. */
3286 static void
3287 update_alias_info (tree stmt, struct alias_info *ai)
3289 bitmap addr_taken;
3290 use_operand_p use_p;
3291 ssa_op_iter iter;
3292 bool stmt_dereferences_ptr_p;
3293 enum escape_type stmt_escape_type = is_escape_site (stmt);
3294 struct mem_ref_stats_d *mem_ref_stats = gimple_mem_ref_stats (cfun);
3296 stmt_dereferences_ptr_p = false;
3298 if (stmt_escape_type == ESCAPE_TO_CALL
3299 || stmt_escape_type == ESCAPE_TO_PURE_CONST)
3301 mem_ref_stats->num_call_sites++;
3302 if (stmt_escape_type == ESCAPE_TO_PURE_CONST)
3303 mem_ref_stats->num_pure_const_call_sites++;
3305 else if (stmt_escape_type == ESCAPE_TO_ASM)
3306 mem_ref_stats->num_asm_sites++;
3308 /* Mark all the variables whose address are taken by the statement. */
3309 addr_taken = addresses_taken (stmt);
3310 if (addr_taken)
3312 bitmap_ior_into (gimple_addressable_vars (cfun), addr_taken);
3314 /* If STMT is an escape point, all the addresses taken by it are
3315 call-clobbered. */
3316 if (stmt_escape_type != NO_ESCAPE)
3318 bitmap_iterator bi;
3319 unsigned i;
3321 EXECUTE_IF_SET_IN_BITMAP (addr_taken, 0, i, bi)
3323 tree rvar = referenced_var (i);
3324 if (!unmodifiable_var_p (rvar))
3325 mark_call_clobbered (rvar, stmt_escape_type);
3330 /* Process each operand use. For pointers, determine whether they
3331 are dereferenced by the statement, or whether their value
3332 escapes, etc. */
3333 FOR_EACH_PHI_OR_STMT_USE (use_p, stmt, iter, SSA_OP_USE)
3335 tree op, var;
3336 var_ann_t v_ann;
3337 struct ptr_info_def *pi;
3338 unsigned num_uses, num_loads, num_stores;
3340 op = USE_FROM_PTR (use_p);
3342 /* If STMT is a PHI node, OP may be an ADDR_EXPR. If so, add it
3343 to the set of addressable variables. */
3344 if (TREE_CODE (op) == ADDR_EXPR)
3346 bitmap addressable_vars = gimple_addressable_vars (cfun);
3348 gcc_assert (TREE_CODE (stmt) == PHI_NODE);
3349 gcc_assert (addressable_vars);
3351 /* PHI nodes don't have annotations for pinning the set
3352 of addresses taken, so we collect them here.
3354 FIXME, should we allow PHI nodes to have annotations
3355 so that they can be treated like regular statements?
3356 Currently, they are treated as second-class
3357 statements. */
3358 add_to_addressable_set (TREE_OPERAND (op, 0), &addressable_vars);
3359 continue;
3362 /* Ignore constants (they may occur in PHI node arguments). */
3363 if (TREE_CODE (op) != SSA_NAME)
3364 continue;
3366 var = SSA_NAME_VAR (op);
3367 v_ann = var_ann (var);
3369 /* The base variable of an SSA name must be a GIMPLE register, and thus
3370 it cannot be aliased. */
3371 gcc_assert (!may_be_aliased (var));
3373 /* We are only interested in pointers. */
3374 if (!POINTER_TYPE_P (TREE_TYPE (op)))
3375 continue;
3377 pi = get_ptr_info (op);
3379 /* Add OP to AI->PROCESSED_PTRS, if it's not there already. */
3380 if (!TEST_BIT (ai->ssa_names_visited, SSA_NAME_VERSION (op)))
3382 SET_BIT (ai->ssa_names_visited, SSA_NAME_VERSION (op));
3383 VEC_safe_push (tree, heap, ai->processed_ptrs, op);
3386 /* If STMT is a PHI node, then it will not have pointer
3387 dereferences and it will not be an escape point. */
3388 if (TREE_CODE (stmt) == PHI_NODE)
3389 continue;
3391 /* Determine whether OP is a dereferenced pointer, and if STMT
3392 is an escape point, whether OP escapes. */
3393 count_uses_and_derefs (op, stmt, &num_uses, &num_loads, &num_stores);
3395 /* Handle a corner case involving address expressions of the
3396 form '&PTR->FLD'. The problem with these expressions is that
3397 they do not represent a dereference of PTR. However, if some
3398 other transformation propagates them into an INDIRECT_REF
3399 expression, we end up with '*(&PTR->FLD)' which is folded
3400 into 'PTR->FLD'.
3402 So, if the original code had no other dereferences of PTR,
3403 the aliaser will not create memory tags for it, and when
3404 &PTR->FLD gets propagated to INDIRECT_REF expressions, the
3405 memory operations will receive no VDEF/VUSE operands.
3407 One solution would be to have count_uses_and_derefs consider
3408 &PTR->FLD a dereference of PTR. But that is wrong, since it
3409 is not really a dereference but an offset calculation.
3411 What we do here is to recognize these special ADDR_EXPR
3412 nodes. Since these expressions are never GIMPLE values (they
3413 are not GIMPLE invariants), they can only appear on the RHS
3414 of an assignment and their base address is always an
3415 INDIRECT_REF expression. */
3416 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
3417 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == ADDR_EXPR
3418 && !is_gimple_val (GIMPLE_STMT_OPERAND (stmt, 1)))
3420 /* If the RHS if of the form &PTR->FLD and PTR == OP, then
3421 this represents a potential dereference of PTR. */
3422 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
3423 tree base = get_base_address (TREE_OPERAND (rhs, 0));
3424 if (TREE_CODE (base) == INDIRECT_REF
3425 && TREE_OPERAND (base, 0) == op)
3426 num_loads++;
3429 if (num_loads + num_stores > 0)
3431 /* Mark OP as dereferenced. In a subsequent pass,
3432 dereferenced pointers that point to a set of
3433 variables will be assigned a name tag to alias
3434 all the variables OP points to. */
3435 pi->is_dereferenced = 1;
3437 /* If this is a store operation, mark OP as being
3438 dereferenced to store, otherwise mark it as being
3439 dereferenced to load. */
3440 if (num_stores > 0)
3441 pointer_set_insert (ai->dereferenced_ptrs_store, var);
3442 else
3443 pointer_set_insert (ai->dereferenced_ptrs_load, var);
3445 /* Update the frequency estimate for all the dereferences of
3446 pointer OP. */
3447 update_mem_sym_stats_from_stmt (op, stmt, num_loads, num_stores);
3449 /* Indicate that STMT contains pointer dereferences. */
3450 stmt_dereferences_ptr_p = true;
3453 if (stmt_escape_type != NO_ESCAPE && num_loads + num_stores < num_uses)
3455 /* If STMT is an escape point and STMT contains at
3456 least one direct use of OP, then the value of OP
3457 escapes and so the pointed-to variables need to
3458 be marked call-clobbered. */
3459 pi->value_escapes_p = 1;
3460 pi->escape_mask |= stmt_escape_type;
3462 /* If the statement makes a function call, assume
3463 that pointer OP will be dereferenced in a store
3464 operation inside the called function. */
3465 if (get_call_expr_in (stmt)
3466 || stmt_escape_type == ESCAPE_STORED_IN_GLOBAL)
3468 pointer_set_insert (ai->dereferenced_ptrs_store, var);
3469 pi->is_dereferenced = 1;
3474 if (TREE_CODE (stmt) == PHI_NODE)
3475 return;
3477 /* Mark stored variables in STMT as being written to and update the
3478 memory reference stats for all memory symbols referenced by STMT. */
3479 if (stmt_references_memory_p (stmt))
3481 unsigned i;
3482 bitmap_iterator bi;
3484 mem_ref_stats->num_mem_stmts++;
3486 /* Notice that we only update memory reference stats for symbols
3487 loaded and stored by the statement if the statement does not
3488 contain pointer dereferences and it is not a call/asm site.
3489 This is to avoid double accounting problems when creating
3490 memory partitions. After computing points-to information,
3491 pointer dereference statistics are used to update the
3492 reference stats of the pointed-to variables, so here we
3493 should only update direct references to symbols.
3495 Indirect references are not updated here for two reasons: (1)
3496 The first time we compute alias information, the sets
3497 LOADED/STORED are empty for pointer dereferences, (2) After
3498 partitioning, LOADED/STORED may have references to
3499 partitions, not the original pointed-to variables. So, if we
3500 always counted LOADED/STORED here and during partitioning, we
3501 would count many symbols more than once.
3503 This does cause some imprecision when a statement has a
3504 combination of direct symbol references and pointer
3505 dereferences (e.g., MEMORY_VAR = *PTR) or if a call site has
3506 memory symbols in its argument list, but these cases do not
3507 occur so frequently as to constitute a serious problem. */
3508 if (STORED_SYMS (stmt))
3509 EXECUTE_IF_SET_IN_BITMAP (STORED_SYMS (stmt), 0, i, bi)
3511 tree sym = referenced_var (i);
3512 pointer_set_insert (ai->written_vars, sym);
3513 if (!stmt_dereferences_ptr_p
3514 && stmt_escape_type != ESCAPE_TO_CALL
3515 && stmt_escape_type != ESCAPE_TO_PURE_CONST
3516 && stmt_escape_type != ESCAPE_TO_ASM)
3517 update_mem_sym_stats_from_stmt (sym, stmt, 0, 1);
3520 if (!stmt_dereferences_ptr_p
3521 && LOADED_SYMS (stmt)
3522 && stmt_escape_type != ESCAPE_TO_CALL
3523 && stmt_escape_type != ESCAPE_TO_PURE_CONST
3524 && stmt_escape_type != ESCAPE_TO_ASM)
3525 EXECUTE_IF_SET_IN_BITMAP (LOADED_SYMS (stmt), 0, i, bi)
3526 update_mem_sym_stats_from_stmt (referenced_var (i), stmt, 1, 0);
3531 /* Handle pointer arithmetic EXPR when creating aliasing constraints.
3532 Expressions of the type PTR + CST can be handled in two ways:
3534 1- If the constraint for PTR is ADDRESSOF for a non-structure
3535 variable, then we can use it directly because adding or
3536 subtracting a constant may not alter the original ADDRESSOF
3537 constraint (i.e., pointer arithmetic may not legally go outside
3538 an object's boundaries).
3540 2- If the constraint for PTR is ADDRESSOF for a structure variable,
3541 then if CST is a compile-time constant that can be used as an
3542 offset, we can determine which sub-variable will be pointed-to
3543 by the expression.
3545 Return true if the expression is handled. For any other kind of
3546 expression, return false so that each operand can be added as a
3547 separate constraint by the caller. */
3549 static bool
3550 handle_ptr_arith (VEC (ce_s, heap) *lhsc, tree expr)
3552 tree op0, op1;
3553 struct constraint_expr *c, *c2;
3554 unsigned int i = 0;
3555 unsigned int j = 0;
3556 VEC (ce_s, heap) *temp = NULL;
3557 unsigned int rhsoffset = 0;
3558 bool unknown_addend = false;
3560 if (TREE_CODE (expr) != POINTER_PLUS_EXPR)
3561 return false;
3563 op0 = TREE_OPERAND (expr, 0);
3564 op1 = TREE_OPERAND (expr, 1);
3565 gcc_assert (POINTER_TYPE_P (TREE_TYPE (op0)));
3567 get_constraint_for (op0, &temp);
3569 /* Handle non-constants by making constraints from integer. */
3570 if (TREE_CODE (op1) == INTEGER_CST)
3571 rhsoffset = TREE_INT_CST_LOW (op1) * BITS_PER_UNIT;
3572 else
3573 unknown_addend = true;
3575 for (i = 0; VEC_iterate (ce_s, lhsc, i, c); i++)
3576 for (j = 0; VEC_iterate (ce_s, temp, j, c2); j++)
3578 if (c2->type == ADDRESSOF && rhsoffset != 0)
3580 varinfo_t temp = get_varinfo (c2->var);
3582 /* An access one after the end of an array is valid,
3583 so simply punt on accesses we cannot resolve. */
3584 temp = first_vi_for_offset (temp, rhsoffset);
3585 if (temp == NULL)
3586 continue;
3587 c2->var = temp->id;
3588 c2->offset = 0;
3590 else if (unknown_addend)
3592 /* Can't handle *a + integer where integer is unknown. */
3593 if (c2->type != SCALAR)
3595 struct constraint_expr intc;
3596 intc.var = integer_id;
3597 intc.offset = 0;
3598 intc.type = SCALAR;
3599 process_constraint (new_constraint (*c, intc));
3601 else
3603 /* We known it lives somewhere within c2->var. */
3604 varinfo_t tmp = get_varinfo (c2->var);
3605 for (; tmp; tmp = tmp->next)
3607 struct constraint_expr tmpc = *c2;
3608 c2->var = tmp->id;
3609 c2->offset = 0;
3610 process_constraint (new_constraint (*c, tmpc));
3614 else
3615 c2->offset = rhsoffset;
3616 process_constraint (new_constraint (*c, *c2));
3619 VEC_free (ce_s, heap, temp);
3621 return true;
3624 /* For non-IPA mode, generate constraints necessary for a call on the
3625 RHS. */
3627 static void
3628 handle_rhs_call (tree rhs)
3630 tree arg;
3631 call_expr_arg_iterator iter;
3632 struct constraint_expr rhsc;
3634 rhsc.var = anything_id;
3635 rhsc.offset = 0;
3636 rhsc.type = ADDRESSOF;
3638 FOR_EACH_CALL_EXPR_ARG (arg, iter, rhs)
3640 VEC(ce_s, heap) *lhsc = NULL;
3642 /* Find those pointers being passed, and make sure they end up
3643 pointing to anything. */
3644 if (POINTER_TYPE_P (TREE_TYPE (arg)))
3646 unsigned int j;
3647 struct constraint_expr *lhsp;
3649 get_constraint_for (arg, &lhsc);
3650 do_deref (&lhsc);
3651 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3652 process_constraint_1 (new_constraint (*lhsp, rhsc), true);
3653 VEC_free (ce_s, heap, lhsc);
3658 /* For non-IPA mode, generate constraints necessary for a call
3659 that returns a pointer and assigns it to LHS. This simply makes
3660 the LHS point to anything. */
3662 static void
3663 handle_lhs_call (tree lhs)
3665 VEC(ce_s, heap) *lhsc = NULL;
3666 struct constraint_expr rhsc;
3667 unsigned int j;
3668 struct constraint_expr *lhsp;
3670 rhsc.var = anything_id;
3671 rhsc.offset = 0;
3672 rhsc.type = ADDRESSOF;
3673 get_constraint_for (lhs, &lhsc);
3674 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3675 process_constraint_1 (new_constraint (*lhsp, rhsc), true);
3676 VEC_free (ce_s, heap, lhsc);
3679 /* Walk statement T setting up aliasing constraints according to the
3680 references found in T. This function is the main part of the
3681 constraint builder. AI points to auxiliary alias information used
3682 when building alias sets and computing alias grouping heuristics. */
3684 static void
3685 find_func_aliases (tree origt)
3687 tree t = origt;
3688 VEC(ce_s, heap) *lhsc = NULL;
3689 VEC(ce_s, heap) *rhsc = NULL;
3690 struct constraint_expr *c;
3692 if (TREE_CODE (t) == RETURN_EXPR && TREE_OPERAND (t, 0))
3693 t = TREE_OPERAND (t, 0);
3695 /* Now build constraints expressions. */
3696 if (TREE_CODE (t) == PHI_NODE)
3698 gcc_assert (!AGGREGATE_TYPE_P (TREE_TYPE (PHI_RESULT (t))));
3700 /* Only care about pointers and structures containing
3701 pointers. */
3702 if (could_have_pointers (PHI_RESULT (t)))
3704 int i;
3705 unsigned int j;
3707 /* For a phi node, assign all the arguments to
3708 the result. */
3709 get_constraint_for (PHI_RESULT (t), &lhsc);
3710 for (i = 0; i < PHI_NUM_ARGS (t); i++)
3712 tree rhstype;
3713 tree strippedrhs = PHI_ARG_DEF (t, i);
3715 STRIP_NOPS (strippedrhs);
3716 rhstype = TREE_TYPE (strippedrhs);
3717 get_constraint_for (PHI_ARG_DEF (t, i), &rhsc);
3719 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3721 struct constraint_expr *c2;
3722 while (VEC_length (ce_s, rhsc) > 0)
3724 c2 = VEC_last (ce_s, rhsc);
3725 process_constraint (new_constraint (*c, *c2));
3726 VEC_pop (ce_s, rhsc);
3732 /* In IPA mode, we need to generate constraints to pass call
3733 arguments through their calls. There are two cases, either a
3734 GIMPLE_MODIFY_STMT when we are returning a value, or just a plain
3735 CALL_EXPR when we are not.
3737 In non-ipa mode, we need to generate constraints for each
3738 pointer passed by address. */
3739 else if (((TREE_CODE (t) == GIMPLE_MODIFY_STMT
3740 && TREE_CODE (GIMPLE_STMT_OPERAND (t, 1)) == CALL_EXPR
3741 && !(call_expr_flags (GIMPLE_STMT_OPERAND (t, 1))
3742 & (ECF_MALLOC | ECF_MAY_BE_ALLOCA)))
3743 || (TREE_CODE (t) == CALL_EXPR
3744 && !(call_expr_flags (t)
3745 & (ECF_MALLOC | ECF_MAY_BE_ALLOCA)))))
3747 if (!in_ipa_mode)
3749 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
3751 handle_rhs_call (GIMPLE_STMT_OPERAND (t, 1));
3752 if (POINTER_TYPE_P (TREE_TYPE (GIMPLE_STMT_OPERAND (t, 1))))
3753 handle_lhs_call (GIMPLE_STMT_OPERAND (t, 0));
3755 else
3756 handle_rhs_call (t);
3758 else
3760 tree lhsop;
3761 tree rhsop;
3762 tree arg;
3763 call_expr_arg_iterator iter;
3764 varinfo_t fi;
3765 int i = 1;
3766 tree decl;
3767 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
3769 lhsop = GIMPLE_STMT_OPERAND (t, 0);
3770 rhsop = GIMPLE_STMT_OPERAND (t, 1);
3772 else
3774 lhsop = NULL;
3775 rhsop = t;
3777 decl = get_callee_fndecl (rhsop);
3779 /* If we can directly resolve the function being called, do so.
3780 Otherwise, it must be some sort of indirect expression that
3781 we should still be able to handle. */
3782 if (decl)
3784 fi = get_vi_for_tree (decl);
3786 else
3788 decl = CALL_EXPR_FN (rhsop);
3789 fi = get_vi_for_tree (decl);
3792 /* Assign all the passed arguments to the appropriate incoming
3793 parameters of the function. */
3795 FOR_EACH_CALL_EXPR_ARG (arg, iter, rhsop)
3797 struct constraint_expr lhs ;
3798 struct constraint_expr *rhsp;
3800 get_constraint_for (arg, &rhsc);
3801 if (TREE_CODE (decl) != FUNCTION_DECL)
3803 lhs.type = DEREF;
3804 lhs.var = fi->id;
3805 lhs.offset = i;
3807 else
3809 lhs.type = SCALAR;
3810 lhs.var = first_vi_for_offset (fi, i)->id;
3811 lhs.offset = 0;
3813 while (VEC_length (ce_s, rhsc) != 0)
3815 rhsp = VEC_last (ce_s, rhsc);
3816 process_constraint (new_constraint (lhs, *rhsp));
3817 VEC_pop (ce_s, rhsc);
3819 i++;
3822 /* If we are returning a value, assign it to the result. */
3823 if (lhsop)
3825 struct constraint_expr rhs;
3826 struct constraint_expr *lhsp;
3827 unsigned int j = 0;
3829 get_constraint_for (lhsop, &lhsc);
3830 if (TREE_CODE (decl) != FUNCTION_DECL)
3832 rhs.type = DEREF;
3833 rhs.var = fi->id;
3834 rhs.offset = i;
3836 else
3838 rhs.type = SCALAR;
3839 rhs.var = first_vi_for_offset (fi, i)->id;
3840 rhs.offset = 0;
3842 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3843 process_constraint (new_constraint (*lhsp, rhs));
3847 /* Otherwise, just a regular assignment statement. */
3848 else if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
3850 tree lhsop = GIMPLE_STMT_OPERAND (t, 0);
3851 tree rhsop = GIMPLE_STMT_OPERAND (t, 1);
3852 int i;
3854 if ((AGGREGATE_TYPE_P (TREE_TYPE (lhsop))
3855 || TREE_CODE (TREE_TYPE (lhsop)) == COMPLEX_TYPE)
3856 && (AGGREGATE_TYPE_P (TREE_TYPE (rhsop))
3857 || TREE_CODE (TREE_TYPE (lhsop)) == COMPLEX_TYPE))
3859 do_structure_copy (lhsop, rhsop);
3861 else
3863 /* Only care about operations with pointers, structures
3864 containing pointers, dereferences, and call expressions. */
3865 if (could_have_pointers (lhsop)
3866 || TREE_CODE (rhsop) == CALL_EXPR)
3868 get_constraint_for (lhsop, &lhsc);
3869 switch (TREE_CODE_CLASS (TREE_CODE (rhsop)))
3871 /* RHS that consist of unary operations,
3872 exceptional types, or bare decls/constants, get
3873 handled directly by get_constraint_for. */
3874 case tcc_reference:
3875 case tcc_declaration:
3876 case tcc_constant:
3877 case tcc_exceptional:
3878 case tcc_expression:
3879 case tcc_vl_exp:
3880 case tcc_unary:
3882 unsigned int j;
3884 get_constraint_for (rhsop, &rhsc);
3885 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3887 struct constraint_expr *c2;
3888 unsigned int k;
3890 for (k = 0; VEC_iterate (ce_s, rhsc, k, c2); k++)
3891 process_constraint (new_constraint (*c, *c2));
3895 break;
3897 case tcc_binary:
3899 /* For pointer arithmetic of the form
3900 PTR + CST, we can simply use PTR's
3901 constraint because pointer arithmetic is
3902 not allowed to go out of bounds. */
3903 if (handle_ptr_arith (lhsc, rhsop))
3904 break;
3906 /* FALLTHRU */
3908 /* Otherwise, walk each operand. Notice that we
3909 can't use the operand interface because we need
3910 to process expressions other than simple operands
3911 (e.g. INDIRECT_REF, ADDR_EXPR, CALL_EXPR). */
3912 default:
3913 for (i = 0; i < TREE_OPERAND_LENGTH (rhsop); i++)
3915 tree op = TREE_OPERAND (rhsop, i);
3916 unsigned int j;
3918 gcc_assert (VEC_length (ce_s, rhsc) == 0);
3919 get_constraint_for (op, &rhsc);
3920 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3922 struct constraint_expr *c2;
3923 while (VEC_length (ce_s, rhsc) > 0)
3925 c2 = VEC_last (ce_s, rhsc);
3926 process_constraint (new_constraint (*c, *c2));
3927 VEC_pop (ce_s, rhsc);
3935 else if (TREE_CODE (t) == CHANGE_DYNAMIC_TYPE_EXPR)
3937 unsigned int j;
3939 get_constraint_for (CHANGE_DYNAMIC_TYPE_LOCATION (t), &lhsc);
3940 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); ++j)
3941 get_varinfo (c->var)->no_tbaa_pruning = true;
3944 /* After promoting variables and computing aliasing we will
3945 need to re-scan most statements. FIXME: Try to minimize the
3946 number of statements re-scanned. It's not really necessary to
3947 re-scan *all* statements. */
3948 mark_stmt_modified (origt);
3949 VEC_free (ce_s, heap, rhsc);
3950 VEC_free (ce_s, heap, lhsc);
3954 /* Find the first varinfo in the same variable as START that overlaps with
3955 OFFSET.
3956 Effectively, walk the chain of fields for the variable START to find the
3957 first field that overlaps with OFFSET.
3958 Return NULL if we can't find one. */
3960 static varinfo_t
3961 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
3963 varinfo_t curr = start;
3964 while (curr)
3966 /* We may not find a variable in the field list with the actual
3967 offset when when we have glommed a structure to a variable.
3968 In that case, however, offset should still be within the size
3969 of the variable. */
3970 if (offset >= curr->offset && offset < (curr->offset + curr->size))
3971 return curr;
3972 curr = curr->next;
3974 return NULL;
3978 /* Insert the varinfo FIELD into the field list for BASE, at the front
3979 of the list. */
3981 static void
3982 insert_into_field_list (varinfo_t base, varinfo_t field)
3984 varinfo_t prev = base;
3985 varinfo_t curr = base->next;
3987 field->next = curr;
3988 prev->next = field;
3991 /* Insert the varinfo FIELD into the field list for BASE, ordered by
3992 offset. */
3994 static void
3995 insert_into_field_list_sorted (varinfo_t base, varinfo_t field)
3997 varinfo_t prev = base;
3998 varinfo_t curr = base->next;
4000 if (curr == NULL)
4002 prev->next = field;
4003 field->next = NULL;
4005 else
4007 while (curr)
4009 if (field->offset <= curr->offset)
4010 break;
4011 prev = curr;
4012 curr = curr->next;
4014 field->next = prev->next;
4015 prev->next = field;
4019 /* qsort comparison function for two fieldoff's PA and PB */
4021 static int
4022 fieldoff_compare (const void *pa, const void *pb)
4024 const fieldoff_s *foa = (const fieldoff_s *)pa;
4025 const fieldoff_s *fob = (const fieldoff_s *)pb;
4026 HOST_WIDE_INT foasize, fobsize;
4028 if (foa->offset != fob->offset)
4029 return foa->offset - fob->offset;
4031 foasize = TREE_INT_CST_LOW (foa->size);
4032 fobsize = TREE_INT_CST_LOW (fob->size);
4033 return foasize - fobsize;
4036 /* Sort a fieldstack according to the field offset and sizes. */
4037 void
4038 sort_fieldstack (VEC(fieldoff_s,heap) *fieldstack)
4040 qsort (VEC_address (fieldoff_s, fieldstack),
4041 VEC_length (fieldoff_s, fieldstack),
4042 sizeof (fieldoff_s),
4043 fieldoff_compare);
4046 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all the fields
4047 of TYPE onto fieldstack, recording their offsets along the way.
4048 OFFSET is used to keep track of the offset in this entire structure, rather
4049 than just the immediately containing structure. Returns the number
4050 of fields pushed.
4051 HAS_UNION is set to true if we find a union type as a field of
4052 TYPE. ADDRESSABLE_TYPE is the type of the outermost object that could have
4053 its address taken. */
4056 push_fields_onto_fieldstack (tree type, VEC(fieldoff_s,heap) **fieldstack,
4057 HOST_WIDE_INT offset, bool *has_union,
4058 tree addressable_type)
4060 tree field;
4061 int count = 0;
4063 if (TREE_CODE (type) == COMPLEX_TYPE)
4065 fieldoff_s *real_part, *img_part;
4066 real_part = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
4067 real_part->type = TREE_TYPE (type);
4068 real_part->size = TYPE_SIZE (TREE_TYPE (type));
4069 real_part->offset = offset;
4070 real_part->decl = NULL_TREE;
4071 real_part->alias_set = -1;
4073 img_part = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
4074 img_part->type = TREE_TYPE (type);
4075 img_part->size = TYPE_SIZE (TREE_TYPE (type));
4076 img_part->offset = offset + TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (type)));
4077 img_part->decl = NULL_TREE;
4078 img_part->alias_set = -1;
4080 return 2;
4083 if (TREE_CODE (type) == ARRAY_TYPE)
4085 tree sz = TYPE_SIZE (type);
4086 tree elsz = TYPE_SIZE (TREE_TYPE (type));
4087 HOST_WIDE_INT nr;
4088 int i;
4090 if (! sz
4091 || ! host_integerp (sz, 1)
4092 || TREE_INT_CST_LOW (sz) == 0
4093 || ! elsz
4094 || ! host_integerp (elsz, 1)
4095 || TREE_INT_CST_LOW (elsz) == 0)
4096 return 0;
4098 nr = TREE_INT_CST_LOW (sz) / TREE_INT_CST_LOW (elsz);
4099 if (nr > SALIAS_MAX_ARRAY_ELEMENTS)
4100 return 0;
4102 for (i = 0; i < nr; ++i)
4104 bool push = false;
4105 int pushed = 0;
4107 if (has_union
4108 && (TREE_CODE (TREE_TYPE (type)) == QUAL_UNION_TYPE
4109 || TREE_CODE (TREE_TYPE (type)) == UNION_TYPE))
4110 *has_union = true;
4112 if (!AGGREGATE_TYPE_P (TREE_TYPE (type))) /* var_can_have_subvars */
4113 push = true;
4114 else if (!(pushed = push_fields_onto_fieldstack
4115 (TREE_TYPE (type), fieldstack,
4116 offset + i * TREE_INT_CST_LOW (elsz), has_union,
4117 TREE_TYPE (type))))
4118 /* Empty structures may have actual size, like in C++. So
4119 see if we didn't push any subfields and the size is
4120 nonzero, push the field onto the stack */
4121 push = true;
4123 if (push)
4125 fieldoff_s *pair;
4127 pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
4128 pair->type = TREE_TYPE (type);
4129 pair->size = elsz;
4130 pair->decl = NULL_TREE;
4131 pair->offset = offset + i * TREE_INT_CST_LOW (elsz);
4132 pair->alias_set = -1;
4133 count++;
4135 else
4136 count += pushed;
4139 return count;
4142 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
4143 if (TREE_CODE (field) == FIELD_DECL)
4145 bool push = false;
4146 int pushed = 0;
4148 if (has_union
4149 && (TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE
4150 || TREE_CODE (TREE_TYPE (field)) == UNION_TYPE))
4151 *has_union = true;
4153 if (!var_can_have_subvars (field))
4154 push = true;
4155 else if (!(pushed = push_fields_onto_fieldstack
4156 (TREE_TYPE (field), fieldstack,
4157 offset + bitpos_of_field (field), has_union,
4158 (DECL_NONADDRESSABLE_P (field)
4159 ? addressable_type
4160 : TREE_TYPE (field))))
4161 && DECL_SIZE (field)
4162 && !integer_zerop (DECL_SIZE (field)))
4163 /* Empty structures may have actual size, like in C++. So
4164 see if we didn't push any subfields and the size is
4165 nonzero, push the field onto the stack */
4166 push = true;
4168 if (push)
4170 fieldoff_s *pair;
4172 pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
4173 pair->type = TREE_TYPE (field);
4174 pair->size = DECL_SIZE (field);
4175 pair->decl = field;
4176 pair->offset = offset + bitpos_of_field (field);
4177 if (DECL_NONADDRESSABLE_P (field))
4178 pair->alias_set = get_alias_set (addressable_type);
4179 else
4180 pair->alias_set = -1;
4181 count++;
4183 else
4184 count += pushed;
4187 return count;
4190 /* Create a constraint from ANYTHING variable to VI. */
4191 static void
4192 make_constraint_from_anything (varinfo_t vi)
4194 struct constraint_expr lhs, rhs;
4196 lhs.var = vi->id;
4197 lhs.offset = 0;
4198 lhs.type = SCALAR;
4200 rhs.var = anything_id;
4201 rhs.offset = 0;
4202 rhs.type = ADDRESSOF;
4203 process_constraint (new_constraint (lhs, rhs));
4206 /* Count the number of arguments DECL has, and set IS_VARARGS to true
4207 if it is a varargs function. */
4209 static unsigned int
4210 count_num_arguments (tree decl, bool *is_varargs)
4212 unsigned int i = 0;
4213 tree t;
4215 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl));
4217 t = TREE_CHAIN (t))
4219 if (TREE_VALUE (t) == void_type_node)
4220 break;
4221 i++;
4224 if (!t)
4225 *is_varargs = true;
4226 return i;
4229 /* Creation function node for DECL, using NAME, and return the index
4230 of the variable we've created for the function. */
4232 static unsigned int
4233 create_function_info_for (tree decl, const char *name)
4235 unsigned int index = VEC_length (varinfo_t, varmap);
4236 varinfo_t vi;
4237 tree arg;
4238 unsigned int i;
4239 bool is_varargs = false;
4241 /* Create the variable info. */
4243 vi = new_var_info (decl, index, name);
4244 vi->decl = decl;
4245 vi->offset = 0;
4246 vi->has_union = 0;
4247 vi->size = 1;
4248 vi->fullsize = count_num_arguments (decl, &is_varargs) + 1;
4249 insert_vi_for_tree (vi->decl, vi);
4250 VEC_safe_push (varinfo_t, heap, varmap, vi);
4252 stats.total_vars++;
4254 /* If it's varargs, we don't know how many arguments it has, so we
4255 can't do much.
4257 if (is_varargs)
4259 vi->fullsize = ~0;
4260 vi->size = ~0;
4261 vi->is_unknown_size_var = true;
4262 return index;
4266 arg = DECL_ARGUMENTS (decl);
4268 /* Set up variables for each argument. */
4269 for (i = 1; i < vi->fullsize; i++)
4271 varinfo_t argvi;
4272 const char *newname;
4273 char *tempname;
4274 unsigned int newindex;
4275 tree argdecl = decl;
4277 if (arg)
4278 argdecl = arg;
4280 newindex = VEC_length (varinfo_t, varmap);
4281 asprintf (&tempname, "%s.arg%d", name, i-1);
4282 newname = ggc_strdup (tempname);
4283 free (tempname);
4285 argvi = new_var_info (argdecl, newindex, newname);
4286 argvi->decl = argdecl;
4287 VEC_safe_push (varinfo_t, heap, varmap, argvi);
4288 argvi->offset = i;
4289 argvi->size = 1;
4290 argvi->fullsize = vi->fullsize;
4291 argvi->has_union = false;
4292 insert_into_field_list_sorted (vi, argvi);
4293 stats.total_vars ++;
4294 if (arg)
4296 insert_vi_for_tree (arg, argvi);
4297 arg = TREE_CHAIN (arg);
4301 /* Create a variable for the return var. */
4302 if (DECL_RESULT (decl) != NULL
4303 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
4305 varinfo_t resultvi;
4306 const char *newname;
4307 char *tempname;
4308 unsigned int newindex;
4309 tree resultdecl = decl;
4311 vi->fullsize ++;
4313 if (DECL_RESULT (decl))
4314 resultdecl = DECL_RESULT (decl);
4316 newindex = VEC_length (varinfo_t, varmap);
4317 asprintf (&tempname, "%s.result", name);
4318 newname = ggc_strdup (tempname);
4319 free (tempname);
4321 resultvi = new_var_info (resultdecl, newindex, newname);
4322 resultvi->decl = resultdecl;
4323 VEC_safe_push (varinfo_t, heap, varmap, resultvi);
4324 resultvi->offset = i;
4325 resultvi->size = 1;
4326 resultvi->fullsize = vi->fullsize;
4327 resultvi->has_union = false;
4328 insert_into_field_list_sorted (vi, resultvi);
4329 stats.total_vars ++;
4330 if (DECL_RESULT (decl))
4331 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
4333 return index;
4337 /* Return true if FIELDSTACK contains fields that overlap.
4338 FIELDSTACK is assumed to be sorted by offset. */
4340 static bool
4341 check_for_overlaps (VEC (fieldoff_s,heap) *fieldstack)
4343 fieldoff_s *fo = NULL;
4344 unsigned int i;
4345 HOST_WIDE_INT lastoffset = -1;
4347 for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
4349 if (fo->offset == lastoffset)
4350 return true;
4351 lastoffset = fo->offset;
4353 return false;
4356 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
4357 This will also create any varinfo structures necessary for fields
4358 of DECL. */
4360 static unsigned int
4361 create_variable_info_for (tree decl, const char *name)
4363 unsigned int index = VEC_length (varinfo_t, varmap);
4364 varinfo_t vi;
4365 tree decltype = TREE_TYPE (decl);
4366 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decltype);
4367 bool notokay = false;
4368 bool hasunion;
4369 bool is_global = DECL_P (decl) ? is_global_var (decl) : false;
4370 VEC (fieldoff_s,heap) *fieldstack = NULL;
4372 if (TREE_CODE (decl) == FUNCTION_DECL && in_ipa_mode)
4373 return create_function_info_for (decl, name);
4375 hasunion = TREE_CODE (decltype) == UNION_TYPE
4376 || TREE_CODE (decltype) == QUAL_UNION_TYPE;
4377 if (var_can_have_subvars (decl) && use_field_sensitive && !hasunion)
4379 push_fields_onto_fieldstack (decltype, &fieldstack, 0, &hasunion,
4380 decltype);
4381 if (hasunion)
4383 VEC_free (fieldoff_s, heap, fieldstack);
4384 notokay = true;
4389 /* If the variable doesn't have subvars, we may end up needing to
4390 sort the field list and create fake variables for all the
4391 fields. */
4392 vi = new_var_info (decl, index, name);
4393 vi->decl = decl;
4394 vi->offset = 0;
4395 vi->has_union = hasunion;
4396 if (!declsize
4397 || TREE_CODE (declsize) != INTEGER_CST
4398 || TREE_CODE (decltype) == UNION_TYPE
4399 || TREE_CODE (decltype) == QUAL_UNION_TYPE)
4401 vi->is_unknown_size_var = true;
4402 vi->fullsize = ~0;
4403 vi->size = ~0;
4405 else
4407 vi->fullsize = TREE_INT_CST_LOW (declsize);
4408 vi->size = vi->fullsize;
4411 insert_vi_for_tree (vi->decl, vi);
4412 VEC_safe_push (varinfo_t, heap, varmap, vi);
4413 if (is_global && (!flag_whole_program || !in_ipa_mode))
4414 make_constraint_from_anything (vi);
4416 stats.total_vars++;
4417 if (use_field_sensitive
4418 && !notokay
4419 && !vi->is_unknown_size_var
4420 && var_can_have_subvars (decl)
4421 && VEC_length (fieldoff_s, fieldstack) <= MAX_FIELDS_FOR_FIELD_SENSITIVE)
4423 unsigned int newindex = VEC_length (varinfo_t, varmap);
4424 fieldoff_s *fo = NULL;
4425 unsigned int i;
4427 for (i = 0; !notokay && VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
4429 if (! fo->size
4430 || TREE_CODE (fo->size) != INTEGER_CST
4431 || fo->offset < 0)
4433 notokay = true;
4434 break;
4438 /* We can't sort them if we have a field with a variable sized type,
4439 which will make notokay = true. In that case, we are going to return
4440 without creating varinfos for the fields anyway, so sorting them is a
4441 waste to boot. */
4442 if (!notokay)
4444 sort_fieldstack (fieldstack);
4445 /* Due to some C++ FE issues, like PR 22488, we might end up
4446 what appear to be overlapping fields even though they,
4447 in reality, do not overlap. Until the C++ FE is fixed,
4448 we will simply disable field-sensitivity for these cases. */
4449 notokay = check_for_overlaps (fieldstack);
4453 if (VEC_length (fieldoff_s, fieldstack) != 0)
4454 fo = VEC_index (fieldoff_s, fieldstack, 0);
4456 if (fo == NULL || notokay)
4458 vi->is_unknown_size_var = 1;
4459 vi->fullsize = ~0;
4460 vi->size = ~0;
4461 VEC_free (fieldoff_s, heap, fieldstack);
4462 return index;
4465 vi->size = TREE_INT_CST_LOW (fo->size);
4466 vi->offset = fo->offset;
4467 for (i = VEC_length (fieldoff_s, fieldstack) - 1;
4468 i >= 1 && VEC_iterate (fieldoff_s, fieldstack, i, fo);
4469 i--)
4471 varinfo_t newvi;
4472 const char *newname = "NULL";
4473 char *tempname;
4475 newindex = VEC_length (varinfo_t, varmap);
4476 if (dump_file)
4478 if (fo->decl)
4479 asprintf (&tempname, "%s.%s",
4480 vi->name, alias_get_name (fo->decl));
4481 else
4482 asprintf (&tempname, "%s." HOST_WIDE_INT_PRINT_DEC,
4483 vi->name, fo->offset);
4484 newname = ggc_strdup (tempname);
4485 free (tempname);
4487 newvi = new_var_info (decl, newindex, newname);
4488 newvi->offset = fo->offset;
4489 newvi->size = TREE_INT_CST_LOW (fo->size);
4490 newvi->fullsize = vi->fullsize;
4491 insert_into_field_list (vi, newvi);
4492 VEC_safe_push (varinfo_t, heap, varmap, newvi);
4493 if (is_global && (!flag_whole_program || !in_ipa_mode))
4494 make_constraint_from_anything (newvi);
4496 stats.total_vars++;
4498 VEC_free (fieldoff_s, heap, fieldstack);
4500 return index;
4503 /* Print out the points-to solution for VAR to FILE. */
4505 void
4506 dump_solution_for_var (FILE *file, unsigned int var)
4508 varinfo_t vi = get_varinfo (var);
4509 unsigned int i;
4510 bitmap_iterator bi;
4512 if (find (var) != var)
4514 varinfo_t vipt = get_varinfo (find (var));
4515 fprintf (file, "%s = same as %s\n", vi->name, vipt->name);
4517 else
4519 fprintf (file, "%s = { ", vi->name);
4520 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4522 fprintf (file, "%s ", get_varinfo (i)->name);
4524 fprintf (file, "}");
4525 if (vi->no_tbaa_pruning)
4526 fprintf (file, " no-tbaa-pruning");
4527 fprintf (file, "\n");
4531 /* Print the points-to solution for VAR to stdout. */
4533 void
4534 debug_solution_for_var (unsigned int var)
4536 dump_solution_for_var (stdout, var);
4539 /* Create varinfo structures for all of the variables in the
4540 function for intraprocedural mode. */
4542 static void
4543 intra_create_variable_infos (void)
4545 tree t;
4546 struct constraint_expr lhs, rhs;
4548 /* For each incoming pointer argument arg, create the constraint ARG
4549 = ANYTHING or a dummy variable if flag_argument_noalias is set. */
4550 for (t = DECL_ARGUMENTS (current_function_decl); t; t = TREE_CHAIN (t))
4552 varinfo_t p;
4554 if (!could_have_pointers (t))
4555 continue;
4557 /* If flag_argument_noalias is set, then function pointer
4558 arguments are guaranteed not to point to each other. In that
4559 case, create an artificial variable PARM_NOALIAS and the
4560 constraint ARG = &PARM_NOALIAS. */
4561 if (POINTER_TYPE_P (TREE_TYPE (t)) && flag_argument_noalias > 0)
4563 varinfo_t vi;
4564 tree heapvar = heapvar_lookup (t);
4566 lhs.offset = 0;
4567 lhs.type = SCALAR;
4568 lhs.var = get_vi_for_tree (t)->id;
4570 if (heapvar == NULL_TREE)
4572 var_ann_t ann;
4573 heapvar = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (t)),
4574 "PARM_NOALIAS");
4575 DECL_EXTERNAL (heapvar) = 1;
4576 if (gimple_referenced_vars (cfun))
4577 add_referenced_var (heapvar);
4579 heapvar_insert (t, heapvar);
4581 ann = get_var_ann (heapvar);
4582 if (flag_argument_noalias == 1)
4583 ann->noalias_state = NO_ALIAS;
4584 else if (flag_argument_noalias == 2)
4585 ann->noalias_state = NO_ALIAS_GLOBAL;
4586 else if (flag_argument_noalias == 3)
4587 ann->noalias_state = NO_ALIAS_ANYTHING;
4588 else
4589 gcc_unreachable ();
4592 vi = get_vi_for_tree (heapvar);
4593 vi->is_artificial_var = 1;
4594 vi->is_heap_var = 1;
4595 rhs.var = vi->id;
4596 rhs.type = ADDRESSOF;
4597 rhs.offset = 0;
4598 for (p = get_varinfo (lhs.var); p; p = p->next)
4600 struct constraint_expr temp = lhs;
4601 temp.var = p->id;
4602 process_constraint (new_constraint (temp, rhs));
4605 else
4607 varinfo_t arg_vi = get_vi_for_tree (t);
4609 for (p = arg_vi; p; p = p->next)
4610 make_constraint_from_anything (p);
4615 /* Structure used to put solution bitmaps in a hashtable so they can
4616 be shared among variables with the same points-to set. */
4618 typedef struct shared_bitmap_info
4620 bitmap pt_vars;
4621 hashval_t hashcode;
4622 } *shared_bitmap_info_t;
4623 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
4625 static htab_t shared_bitmap_table;
4627 /* Hash function for a shared_bitmap_info_t */
4629 static hashval_t
4630 shared_bitmap_hash (const void *p)
4632 const_shared_bitmap_info_t const bi = (const_shared_bitmap_info_t) p;
4633 return bi->hashcode;
4636 /* Equality function for two shared_bitmap_info_t's. */
4638 static int
4639 shared_bitmap_eq (const void *p1, const void *p2)
4641 const_shared_bitmap_info_t const sbi1 = (const_shared_bitmap_info_t) p1;
4642 const_shared_bitmap_info_t const sbi2 = (const_shared_bitmap_info_t) p2;
4643 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
4646 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
4647 existing instance if there is one, NULL otherwise. */
4649 static bitmap
4650 shared_bitmap_lookup (bitmap pt_vars)
4652 void **slot;
4653 struct shared_bitmap_info sbi;
4655 sbi.pt_vars = pt_vars;
4656 sbi.hashcode = bitmap_hash (pt_vars);
4658 slot = htab_find_slot_with_hash (shared_bitmap_table, &sbi,
4659 sbi.hashcode, NO_INSERT);
4660 if (!slot)
4661 return NULL;
4662 else
4663 return ((shared_bitmap_info_t) *slot)->pt_vars;
4667 /* Add a bitmap to the shared bitmap hashtable. */
4669 static void
4670 shared_bitmap_add (bitmap pt_vars)
4672 void **slot;
4673 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
4675 sbi->pt_vars = pt_vars;
4676 sbi->hashcode = bitmap_hash (pt_vars);
4678 slot = htab_find_slot_with_hash (shared_bitmap_table, sbi,
4679 sbi->hashcode, INSERT);
4680 gcc_assert (!*slot);
4681 *slot = (void *) sbi;
4685 /* Set bits in INTO corresponding to the variable uids in solution set
4686 FROM, which came from variable PTR.
4687 For variables that are actually dereferenced, we also use type
4688 based alias analysis to prune the points-to sets.
4689 IS_DEREFED is true if PTR was directly dereferenced, which we use to
4690 help determine whether we are we are allowed to prune using TBAA.
4691 If NO_TBAA_PRUNING is true, we do not perform any TBAA pruning of
4692 the from set. */
4694 static void
4695 set_uids_in_ptset (tree ptr, bitmap into, bitmap from, bool is_derefed,
4696 bool no_tbaa_pruning)
4698 unsigned int i;
4699 bitmap_iterator bi;
4700 subvar_t sv;
4701 alias_set_type ptr_alias_set = get_alias_set (TREE_TYPE (ptr));
4703 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
4705 varinfo_t vi = get_varinfo (i);
4706 alias_set_type var_alias_set;
4708 /* The only artificial variables that are allowed in a may-alias
4709 set are heap variables. */
4710 if (vi->is_artificial_var && !vi->is_heap_var)
4711 continue;
4713 if (vi->has_union && get_subvars_for_var (vi->decl) != NULL)
4715 /* Variables containing unions may need to be converted to
4716 their SFT's, because SFT's can have unions and we cannot. */
4717 for (sv = get_subvars_for_var (vi->decl); sv; sv = sv->next)
4718 bitmap_set_bit (into, DECL_UID (sv->var));
4720 else if (TREE_CODE (vi->decl) == VAR_DECL
4721 || TREE_CODE (vi->decl) == PARM_DECL
4722 || TREE_CODE (vi->decl) == RESULT_DECL)
4724 if (var_can_have_subvars (vi->decl)
4725 && get_subvars_for_var (vi->decl))
4727 /* If VI->DECL is an aggregate for which we created
4728 SFTs, add the SFT corresponding to VI->OFFSET. */
4729 tree sft = get_subvar_at (vi->decl, vi->offset);
4730 gcc_assert (sft);
4731 if (sft)
4733 var_alias_set = get_alias_set (sft);
4734 if (no_tbaa_pruning
4735 || (!is_derefed && !vi->directly_dereferenced)
4736 || alias_sets_conflict_p (ptr_alias_set, var_alias_set))
4737 bitmap_set_bit (into, DECL_UID (sft));
4740 else
4742 /* Otherwise, just add VI->DECL to the alias set.
4743 Don't type prune artificial vars. */
4744 if (vi->is_artificial_var)
4745 bitmap_set_bit (into, DECL_UID (vi->decl));
4746 else
4748 var_alias_set = get_alias_set (vi->decl);
4749 if (no_tbaa_pruning
4750 || (!is_derefed && !vi->directly_dereferenced)
4751 || alias_sets_conflict_p (ptr_alias_set, var_alias_set))
4752 bitmap_set_bit (into, DECL_UID (vi->decl));
4760 static bool have_alias_info = false;
4762 /* The list of SMT's that are in use by our pointer variables. This
4763 is the set of SMT's for all pointers that can point to anything. */
4764 static bitmap used_smts;
4766 /* Due to the ordering of points-to set calculation and SMT
4767 calculation being a bit co-dependent, we can't just calculate SMT
4768 used info whenever we want, we have to calculate it around the time
4769 that find_what_p_points_to is called. */
4771 /* Mark which SMT's are in use by points-to anything variables. */
4773 void
4774 set_used_smts (void)
4776 int i;
4777 varinfo_t vi;
4778 used_smts = BITMAP_ALLOC (&pta_obstack);
4780 for (i = 0; VEC_iterate (varinfo_t, varmap, i, vi); i++)
4782 tree var = vi->decl;
4783 varinfo_t withsolution = get_varinfo (find (i));
4784 tree smt;
4785 var_ann_t va;
4786 struct ptr_info_def *pi = NULL;
4788 /* For parm decls, the pointer info may be under the default
4789 def. */
4790 if (TREE_CODE (vi->decl) == PARM_DECL
4791 && gimple_default_def (cfun, var))
4792 pi = SSA_NAME_PTR_INFO (gimple_default_def (cfun, var));
4793 else if (TREE_CODE (var) == SSA_NAME)
4794 pi = SSA_NAME_PTR_INFO (var);
4796 /* Skip the special variables and those that can't be aliased. */
4797 if (vi->is_special_var
4798 || !SSA_VAR_P (var)
4799 || (pi && !pi->is_dereferenced)
4800 || (TREE_CODE (var) == VAR_DECL && !may_be_aliased (var))
4801 || !POINTER_TYPE_P (TREE_TYPE (var)))
4802 continue;
4804 if (TREE_CODE (var) == SSA_NAME)
4805 var = SSA_NAME_VAR (var);
4807 va = var_ann (var);
4808 if (!va)
4809 continue;
4811 smt = va->symbol_mem_tag;
4812 if (smt && bitmap_bit_p (withsolution->solution, anything_id))
4813 bitmap_set_bit (used_smts, DECL_UID (smt));
4817 /* Merge the necessary SMT's into the bitmap INTO, which is
4818 P's varinfo. This involves merging all SMT's that are a subset of
4819 the SMT necessary for P. */
4821 static void
4822 merge_smts_into (tree p, bitmap solution)
4824 unsigned int i;
4825 bitmap_iterator bi;
4826 tree smt;
4827 bitmap aliases;
4828 tree var = p;
4830 if (TREE_CODE (p) == SSA_NAME)
4831 var = SSA_NAME_VAR (p);
4833 smt = var_ann (var)->symbol_mem_tag;
4834 if (smt)
4836 alias_set_type smtset = get_alias_set (TREE_TYPE (smt));
4838 /* Need to set the SMT subsets first before this
4839 will work properly. */
4840 bitmap_set_bit (solution, DECL_UID (smt));
4841 EXECUTE_IF_SET_IN_BITMAP (used_smts, 0, i, bi)
4843 tree newsmt = referenced_var (i);
4844 tree newsmttype = TREE_TYPE (newsmt);
4846 if (alias_set_subset_of (get_alias_set (newsmttype),
4847 smtset))
4848 bitmap_set_bit (solution, i);
4851 aliases = MTAG_ALIASES (smt);
4852 if (aliases)
4853 bitmap_ior_into (solution, aliases);
4857 /* Given a pointer variable P, fill in its points-to set, or return
4858 false if we can't.
4859 Rather than return false for variables that point-to anything, we
4860 instead find the corresponding SMT, and merge in its aliases. In
4861 addition to these aliases, we also set the bits for the SMT's
4862 themselves and their subsets, as SMT's are still in use by
4863 non-SSA_NAME's, and pruning may eliminate every one of their
4864 aliases. In such a case, if we did not include the right set of
4865 SMT's in the points-to set of the variable, we'd end up with
4866 statements that do not conflict but should. */
4868 bool
4869 find_what_p_points_to (tree p)
4871 tree lookup_p = p;
4872 varinfo_t vi;
4874 if (!have_alias_info)
4875 return false;
4877 /* For parameters, get at the points-to set for the actual parm
4878 decl. */
4879 if (TREE_CODE (p) == SSA_NAME
4880 && TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
4881 && SSA_NAME_IS_DEFAULT_DEF (p))
4882 lookup_p = SSA_NAME_VAR (p);
4884 vi = lookup_vi_for_tree (lookup_p);
4885 if (vi)
4887 if (vi->is_artificial_var)
4888 return false;
4890 /* See if this is a field or a structure. */
4891 if (vi->size != vi->fullsize)
4893 /* Nothing currently asks about structure fields directly,
4894 but when they do, we need code here to hand back the
4895 points-to set. */
4896 if (!var_can_have_subvars (vi->decl)
4897 || get_subvars_for_var (vi->decl) == NULL)
4898 return false;
4900 else
4902 struct ptr_info_def *pi = get_ptr_info (p);
4903 unsigned int i;
4904 bitmap_iterator bi;
4905 bool was_pt_anything = false;
4906 bitmap finished_solution;
4907 bitmap result;
4909 if (!pi->is_dereferenced)
4910 return false;
4912 /* This variable may have been collapsed, let's get the real
4913 variable. */
4914 vi = get_varinfo (find (vi->id));
4916 /* Translate artificial variables into SSA_NAME_PTR_INFO
4917 attributes. */
4918 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4920 varinfo_t vi = get_varinfo (i);
4922 if (vi->is_artificial_var)
4924 /* FIXME. READONLY should be handled better so that
4925 flow insensitive aliasing can disregard writable
4926 aliases. */
4927 if (vi->id == nothing_id)
4928 pi->pt_null = 1;
4929 else if (vi->id == anything_id)
4930 was_pt_anything = 1;
4931 else if (vi->id == readonly_id)
4932 was_pt_anything = 1;
4933 else if (vi->id == integer_id)
4934 was_pt_anything = 1;
4935 else if (vi->is_heap_var)
4936 pi->pt_global_mem = 1;
4940 /* Share the final set of variables when possible. */
4942 finished_solution = BITMAP_GGC_ALLOC ();
4943 stats.points_to_sets_created++;
4945 /* Instead of using pt_anything, we merge in the SMT aliases
4946 for the underlying SMT. In addition, if they could have
4947 pointed to anything, they could point to global memory.
4948 But we cannot do that for ref-all pointers because these
4949 aliases have not been computed yet. */
4950 if (was_pt_anything)
4952 if (PTR_IS_REF_ALL (p))
4954 pi->pt_anything = 1;
4955 return false;
4958 merge_smts_into (p, finished_solution);
4959 pi->pt_global_mem = 1;
4962 set_uids_in_ptset (vi->decl, finished_solution, vi->solution,
4963 vi->directly_dereferenced,
4964 vi->no_tbaa_pruning);
4965 result = shared_bitmap_lookup (finished_solution);
4967 if (!result)
4969 shared_bitmap_add (finished_solution);
4970 pi->pt_vars = finished_solution;
4972 else
4974 pi->pt_vars = result;
4975 bitmap_clear (finished_solution);
4978 if (bitmap_empty_p (pi->pt_vars))
4979 pi->pt_vars = NULL;
4981 return true;
4985 return false;
4990 /* Dump points-to information to OUTFILE. */
4992 void
4993 dump_sa_points_to_info (FILE *outfile)
4995 unsigned int i;
4997 fprintf (outfile, "\nPoints-to sets\n\n");
4999 if (dump_flags & TDF_STATS)
5001 fprintf (outfile, "Stats:\n");
5002 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
5003 fprintf (outfile, "Non-pointer vars: %d\n",
5004 stats.nonpointer_vars);
5005 fprintf (outfile, "Statically unified vars: %d\n",
5006 stats.unified_vars_static);
5007 fprintf (outfile, "Dynamically unified vars: %d\n",
5008 stats.unified_vars_dynamic);
5009 fprintf (outfile, "Iterations: %d\n", stats.iterations);
5010 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
5011 fprintf (outfile, "Number of implicit edges: %d\n",
5012 stats.num_implicit_edges);
5015 for (i = 0; i < VEC_length (varinfo_t, varmap); i++)
5016 dump_solution_for_var (outfile, i);
5020 /* Debug points-to information to stderr. */
5022 void
5023 debug_sa_points_to_info (void)
5025 dump_sa_points_to_info (stderr);
5029 /* Initialize the always-existing constraint variables for NULL
5030 ANYTHING, READONLY, and INTEGER */
5032 static void
5033 init_base_vars (void)
5035 struct constraint_expr lhs, rhs;
5037 /* Create the NULL variable, used to represent that a variable points
5038 to NULL. */
5039 nothing_tree = create_tmp_var_raw (void_type_node, "NULL");
5040 var_nothing = new_var_info (nothing_tree, 0, "NULL");
5041 insert_vi_for_tree (nothing_tree, var_nothing);
5042 var_nothing->is_artificial_var = 1;
5043 var_nothing->offset = 0;
5044 var_nothing->size = ~0;
5045 var_nothing->fullsize = ~0;
5046 var_nothing->is_special_var = 1;
5047 nothing_id = 0;
5048 VEC_safe_push (varinfo_t, heap, varmap, var_nothing);
5050 /* Create the ANYTHING variable, used to represent that a variable
5051 points to some unknown piece of memory. */
5052 anything_tree = create_tmp_var_raw (void_type_node, "ANYTHING");
5053 var_anything = new_var_info (anything_tree, 1, "ANYTHING");
5054 insert_vi_for_tree (anything_tree, var_anything);
5055 var_anything->is_artificial_var = 1;
5056 var_anything->size = ~0;
5057 var_anything->offset = 0;
5058 var_anything->next = NULL;
5059 var_anything->fullsize = ~0;
5060 var_anything->is_special_var = 1;
5061 anything_id = 1;
5063 /* Anything points to anything. This makes deref constraints just
5064 work in the presence of linked list and other p = *p type loops,
5065 by saying that *ANYTHING = ANYTHING. */
5066 VEC_safe_push (varinfo_t, heap, varmap, var_anything);
5067 lhs.type = SCALAR;
5068 lhs.var = anything_id;
5069 lhs.offset = 0;
5070 rhs.type = ADDRESSOF;
5071 rhs.var = anything_id;
5072 rhs.offset = 0;
5074 /* This specifically does not use process_constraint because
5075 process_constraint ignores all anything = anything constraints, since all
5076 but this one are redundant. */
5077 VEC_safe_push (constraint_t, heap, constraints, new_constraint (lhs, rhs));
5079 /* Create the READONLY variable, used to represent that a variable
5080 points to readonly memory. */
5081 readonly_tree = create_tmp_var_raw (void_type_node, "READONLY");
5082 var_readonly = new_var_info (readonly_tree, 2, "READONLY");
5083 var_readonly->is_artificial_var = 1;
5084 var_readonly->offset = 0;
5085 var_readonly->size = ~0;
5086 var_readonly->fullsize = ~0;
5087 var_readonly->next = NULL;
5088 var_readonly->is_special_var = 1;
5089 insert_vi_for_tree (readonly_tree, var_readonly);
5090 readonly_id = 2;
5091 VEC_safe_push (varinfo_t, heap, varmap, var_readonly);
5093 /* readonly memory points to anything, in order to make deref
5094 easier. In reality, it points to anything the particular
5095 readonly variable can point to, but we don't track this
5096 separately. */
5097 lhs.type = SCALAR;
5098 lhs.var = readonly_id;
5099 lhs.offset = 0;
5100 rhs.type = ADDRESSOF;
5101 rhs.var = anything_id;
5102 rhs.offset = 0;
5104 process_constraint (new_constraint (lhs, rhs));
5106 /* Create the INTEGER variable, used to represent that a variable points
5107 to an INTEGER. */
5108 integer_tree = create_tmp_var_raw (void_type_node, "INTEGER");
5109 var_integer = new_var_info (integer_tree, 3, "INTEGER");
5110 insert_vi_for_tree (integer_tree, var_integer);
5111 var_integer->is_artificial_var = 1;
5112 var_integer->size = ~0;
5113 var_integer->fullsize = ~0;
5114 var_integer->offset = 0;
5115 var_integer->next = NULL;
5116 var_integer->is_special_var = 1;
5117 integer_id = 3;
5118 VEC_safe_push (varinfo_t, heap, varmap, var_integer);
5120 /* INTEGER = ANYTHING, because we don't know where a dereference of
5121 a random integer will point to. */
5122 lhs.type = SCALAR;
5123 lhs.var = integer_id;
5124 lhs.offset = 0;
5125 rhs.type = ADDRESSOF;
5126 rhs.var = anything_id;
5127 rhs.offset = 0;
5128 process_constraint (new_constraint (lhs, rhs));
5131 /* Initialize things necessary to perform PTA */
5133 static void
5134 init_alias_vars (void)
5136 bitmap_obstack_initialize (&pta_obstack);
5137 bitmap_obstack_initialize (&oldpta_obstack);
5138 bitmap_obstack_initialize (&predbitmap_obstack);
5140 constraint_pool = create_alloc_pool ("Constraint pool",
5141 sizeof (struct constraint), 30);
5142 variable_info_pool = create_alloc_pool ("Variable info pool",
5143 sizeof (struct variable_info), 30);
5144 constraints = VEC_alloc (constraint_t, heap, 8);
5145 varmap = VEC_alloc (varinfo_t, heap, 8);
5146 vi_for_tree = pointer_map_create ();
5148 memset (&stats, 0, sizeof (stats));
5149 shared_bitmap_table = htab_create (511, shared_bitmap_hash,
5150 shared_bitmap_eq, free);
5151 init_base_vars ();
5154 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
5155 predecessor edges. */
5157 static void
5158 remove_preds_and_fake_succs (constraint_graph_t graph)
5160 unsigned int i;
5162 /* Clear the implicit ref and address nodes from the successor
5163 lists. */
5164 for (i = 0; i < FIRST_REF_NODE; i++)
5166 if (graph->succs[i])
5167 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
5168 FIRST_REF_NODE * 2);
5171 /* Free the successor list for the non-ref nodes. */
5172 for (i = FIRST_REF_NODE; i < graph->size; i++)
5174 if (graph->succs[i])
5175 BITMAP_FREE (graph->succs[i]);
5178 /* Now reallocate the size of the successor list as, and blow away
5179 the predecessor bitmaps. */
5180 graph->size = VEC_length (varinfo_t, varmap);
5181 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
5183 free (graph->implicit_preds);
5184 graph->implicit_preds = NULL;
5185 free (graph->preds);
5186 graph->preds = NULL;
5187 bitmap_obstack_release (&predbitmap_obstack);
5190 /* Compute the set of variables we can't TBAA prune. */
5192 static void
5193 compute_tbaa_pruning (void)
5195 unsigned int size = VEC_length (varinfo_t, varmap);
5196 unsigned int i;
5197 bool any;
5199 changed_count = 0;
5200 changed = sbitmap_alloc (size);
5201 sbitmap_zero (changed);
5203 /* Mark all initial no_tbaa_pruning nodes as changed. */
5204 any = false;
5205 for (i = 0; i < size; ++i)
5207 varinfo_t ivi = get_varinfo (i);
5209 if (find (i) == i && ivi->no_tbaa_pruning)
5211 any = true;
5212 if ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
5213 || VEC_length (constraint_t, graph->complex[i]) > 0)
5215 SET_BIT (changed, i);
5216 ++changed_count;
5221 while (changed_count > 0)
5223 struct topo_info *ti = init_topo_info ();
5224 ++stats.iterations;
5226 compute_topo_order (graph, ti);
5228 while (VEC_length (unsigned, ti->topo_order) != 0)
5230 bitmap_iterator bi;
5232 i = VEC_pop (unsigned, ti->topo_order);
5234 /* If this variable is not a representative, skip it. */
5235 if (find (i) != i)
5236 continue;
5238 /* If the node has changed, we need to process the complex
5239 constraints and outgoing edges again. */
5240 if (TEST_BIT (changed, i))
5242 unsigned int j;
5243 constraint_t c;
5244 VEC(constraint_t,heap) *complex = graph->complex[i];
5246 RESET_BIT (changed, i);
5247 --changed_count;
5249 /* Process the complex copy constraints. */
5250 for (j = 0; VEC_iterate (constraint_t, complex, j, c); ++j)
5252 if (c->lhs.type == SCALAR && c->rhs.type == SCALAR)
5254 varinfo_t lhsvi = get_varinfo (find (c->lhs.var));
5256 if (!lhsvi->no_tbaa_pruning)
5258 lhsvi->no_tbaa_pruning = true;
5259 if (!TEST_BIT (changed, lhsvi->id))
5261 SET_BIT (changed, lhsvi->id);
5262 ++changed_count;
5268 /* Propagate to all successors. */
5269 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
5271 unsigned int to = find (j);
5272 varinfo_t tovi = get_varinfo (to);
5274 /* Don't propagate to ourselves. */
5275 if (to == i)
5276 continue;
5278 if (!tovi->no_tbaa_pruning)
5280 tovi->no_tbaa_pruning = true;
5281 if (!TEST_BIT (changed, to))
5283 SET_BIT (changed, to);
5284 ++changed_count;
5291 free_topo_info (ti);
5294 sbitmap_free (changed);
5296 if (any)
5298 for (i = 0; i < size; ++i)
5300 varinfo_t ivi = get_varinfo (i);
5301 varinfo_t ivip = get_varinfo (find (i));
5303 if (ivip->no_tbaa_pruning)
5305 tree var = ivi->decl;
5307 if (TREE_CODE (var) == SSA_NAME)
5308 var = SSA_NAME_VAR (var);
5310 if (POINTER_TYPE_P (TREE_TYPE (var)))
5312 DECL_NO_TBAA_P (var) = 1;
5314 /* Tell the RTL layer that this pointer can alias
5315 anything. */
5316 DECL_POINTER_ALIAS_SET (var) = 0;
5323 /* Create points-to sets for the current function. See the comments
5324 at the start of the file for an algorithmic overview. */
5326 void
5327 compute_points_to_sets (struct alias_info *ai)
5329 struct scc_info *si;
5330 basic_block bb;
5332 timevar_push (TV_TREE_PTA);
5334 init_alias_vars ();
5335 init_alias_heapvars ();
5337 intra_create_variable_infos ();
5339 /* Now walk all statements and derive aliases. */
5340 FOR_EACH_BB (bb)
5342 block_stmt_iterator bsi;
5343 tree phi;
5345 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
5347 if (is_gimple_reg (PHI_RESULT (phi)))
5349 find_func_aliases (phi);
5351 /* Update various related attributes like escaped
5352 addresses, pointer dereferences for loads and stores.
5353 This is used when creating name tags and alias
5354 sets. */
5355 update_alias_info (phi, ai);
5359 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
5361 tree stmt = bsi_stmt (bsi);
5363 find_func_aliases (stmt);
5365 /* Update various related attributes like escaped
5366 addresses, pointer dereferences for loads and stores.
5367 This is used when creating name tags and alias
5368 sets. */
5369 update_alias_info (stmt, ai);
5371 /* The information in CHANGE_DYNAMIC_TYPE_EXPR nodes has now
5372 been captured, and we can remove them. */
5373 if (TREE_CODE (stmt) == CHANGE_DYNAMIC_TYPE_EXPR)
5374 bsi_remove (&bsi, true);
5375 else
5376 bsi_next (&bsi);
5381 if (dump_file)
5383 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
5384 dump_constraints (dump_file);
5387 if (dump_file)
5388 fprintf (dump_file,
5389 "\nCollapsing static cycles and doing variable "
5390 "substitution\n");
5392 init_graph (VEC_length (varinfo_t, varmap) * 2);
5394 if (dump_file)
5395 fprintf (dump_file, "Building predecessor graph\n");
5396 build_pred_graph ();
5398 if (dump_file)
5399 fprintf (dump_file, "Detecting pointer and location "
5400 "equivalences\n");
5401 si = perform_var_substitution (graph);
5403 if (dump_file)
5404 fprintf (dump_file, "Rewriting constraints and unifying "
5405 "variables\n");
5406 rewrite_constraints (graph, si);
5407 free_var_substitution_info (si);
5409 build_succ_graph ();
5410 move_complex_constraints (graph);
5412 if (dump_file)
5413 fprintf (dump_file, "Uniting pointer but not location equivalent "
5414 "variables\n");
5415 unite_pointer_equivalences (graph);
5417 if (dump_file)
5418 fprintf (dump_file, "Finding indirect cycles\n");
5419 find_indirect_cycles (graph);
5421 /* Implicit nodes and predecessors are no longer necessary at this
5422 point. */
5423 remove_preds_and_fake_succs (graph);
5425 if (dump_file)
5426 fprintf (dump_file, "Solving graph\n");
5428 solve_graph (graph);
5430 compute_tbaa_pruning ();
5432 if (dump_file)
5433 dump_sa_points_to_info (dump_file);
5435 have_alias_info = true;
5437 timevar_pop (TV_TREE_PTA);
5441 /* Delete created points-to sets. */
5443 void
5444 delete_points_to_sets (void)
5446 unsigned int i;
5448 htab_delete (shared_bitmap_table);
5449 if (dump_file && (dump_flags & TDF_STATS))
5450 fprintf (dump_file, "Points to sets created:%d\n",
5451 stats.points_to_sets_created);
5453 pointer_map_destroy (vi_for_tree);
5454 bitmap_obstack_release (&pta_obstack);
5455 VEC_free (constraint_t, heap, constraints);
5457 for (i = 0; i < graph->size; i++)
5458 VEC_free (constraint_t, heap, graph->complex[i]);
5459 free (graph->complex);
5461 free (graph->rep);
5462 free (graph->succs);
5463 free (graph->pe);
5464 free (graph->pe_rep);
5465 free (graph->indirect_cycles);
5466 free (graph);
5468 VEC_free (varinfo_t, heap, varmap);
5469 free_alloc_pool (variable_info_pool);
5470 free_alloc_pool (constraint_pool);
5471 have_alias_info = false;
5474 /* Return true if we should execute IPA PTA. */
5475 static bool
5476 gate_ipa_pta (void)
5478 return (flag_unit_at_a_time != 0
5479 && flag_ipa_pta
5480 /* Don't bother doing anything if the program has errors. */
5481 && !(errorcount || sorrycount));
5484 /* Execute the driver for IPA PTA. */
5485 static unsigned int
5486 ipa_pta_execute (void)
5488 struct cgraph_node *node;
5489 struct scc_info *si;
5491 in_ipa_mode = 1;
5492 init_alias_heapvars ();
5493 init_alias_vars ();
5495 for (node = cgraph_nodes; node; node = node->next)
5497 if (!node->analyzed || cgraph_is_master_clone (node))
5499 unsigned int varid;
5501 varid = create_function_info_for (node->decl,
5502 cgraph_node_name (node));
5503 if (node->local.externally_visible)
5505 varinfo_t fi = get_varinfo (varid);
5506 for (; fi; fi = fi->next)
5507 make_constraint_from_anything (fi);
5511 for (node = cgraph_nodes; node; node = node->next)
5513 if (node->analyzed && cgraph_is_master_clone (node))
5515 struct function *cfun = DECL_STRUCT_FUNCTION (node->decl);
5516 basic_block bb;
5517 tree old_func_decl = current_function_decl;
5518 if (dump_file)
5519 fprintf (dump_file,
5520 "Generating constraints for %s\n",
5521 cgraph_node_name (node));
5522 push_cfun (cfun);
5523 current_function_decl = node->decl;
5525 FOR_EACH_BB_FN (bb, cfun)
5527 block_stmt_iterator bsi;
5528 tree phi;
5530 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
5532 if (is_gimple_reg (PHI_RESULT (phi)))
5534 find_func_aliases (phi);
5538 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
5540 tree stmt = bsi_stmt (bsi);
5541 find_func_aliases (stmt);
5544 current_function_decl = old_func_decl;
5545 pop_cfun ();
5547 else
5549 /* Make point to anything. */
5553 if (dump_file)
5555 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
5556 dump_constraints (dump_file);
5559 if (dump_file)
5560 fprintf (dump_file,
5561 "\nCollapsing static cycles and doing variable "
5562 "substitution:\n");
5564 init_graph (VEC_length (varinfo_t, varmap) * 2);
5565 build_pred_graph ();
5566 si = perform_var_substitution (graph);
5567 rewrite_constraints (graph, si);
5568 free_var_substitution_info (si);
5570 build_succ_graph ();
5571 move_complex_constraints (graph);
5572 unite_pointer_equivalences (graph);
5573 find_indirect_cycles (graph);
5575 /* Implicit nodes and predecessors are no longer necessary at this
5576 point. */
5577 remove_preds_and_fake_succs (graph);
5579 if (dump_file)
5580 fprintf (dump_file, "\nSolving graph\n");
5582 solve_graph (graph);
5584 if (dump_file)
5585 dump_sa_points_to_info (dump_file);
5587 in_ipa_mode = 0;
5588 delete_alias_heapvars ();
5589 delete_points_to_sets ();
5590 return 0;
5593 struct tree_opt_pass pass_ipa_pta =
5595 "pta", /* name */
5596 gate_ipa_pta, /* gate */
5597 ipa_pta_execute, /* execute */
5598 NULL, /* sub */
5599 NULL, /* next */
5600 0, /* static_pass_number */
5601 TV_IPA_PTA, /* tv_id */
5602 0, /* properties_required */
5603 0, /* properties_provided */
5604 0, /* properties_destroyed */
5605 0, /* todo_flags_start */
5606 TODO_update_ssa, /* todo_flags_finish */
5607 0 /* letter */
5610 /* Initialize the heapvar for statement mapping. */
5611 void
5612 init_alias_heapvars (void)
5614 if (!heapvar_for_stmt)
5615 heapvar_for_stmt = htab_create_ggc (11, tree_map_hash, tree_map_eq,
5616 NULL);
5619 void
5620 delete_alias_heapvars (void)
5622 htab_delete (heapvar_for_stmt);
5623 heapvar_for_stmt = NULL;
5627 #include "gt-tree-ssa-structalias.h"