* config/bfin/bfin.h (TARGET_CPU_CPP_BUILTINS): Define
[official-gcc/alias-decl.git] / gcc / tree-ssa-structalias.c
blobdbece90fc5b6cef44d827f2444dfb6ddf645e269
1 /* Tree based points-to analysis
2 Copyright (C) 2005, 2006, 2007 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "ggc.h"
26 #include "obstack.h"
27 #include "bitmap.h"
28 #include "flags.h"
29 #include "rtl.h"
30 #include "tm_p.h"
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
33 #include "output.h"
34 #include "errors.h"
35 #include "diagnostic.h"
36 #include "tree.h"
37 #include "c-common.h"
38 #include "tree-flow.h"
39 #include "tree-inline.h"
40 #include "varray.h"
41 #include "c-tree.h"
42 #include "tree-gimple.h"
43 #include "hashtab.h"
44 #include "function.h"
45 #include "cgraph.h"
46 #include "tree-pass.h"
47 #include "timevar.h"
48 #include "alloc-pool.h"
49 #include "splay-tree.h"
50 #include "params.h"
51 #include "tree-ssa-structalias.h"
52 #include "cgraph.h"
53 #include "alias.h"
54 #include "pointer-set.h"
56 /* The idea behind this analyzer is to generate set constraints from the
57 program, then solve the resulting constraints in order to generate the
58 points-to sets.
60 Set constraints are a way of modeling program analysis problems that
61 involve sets. They consist of an inclusion constraint language,
62 describing the variables (each variable is a set) and operations that
63 are involved on the variables, and a set of rules that derive facts
64 from these operations. To solve a system of set constraints, you derive
65 all possible facts under the rules, which gives you the correct sets
66 as a consequence.
68 See "Efficient Field-sensitive pointer analysis for C" by "David
69 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
70 http://citeseer.ist.psu.edu/pearce04efficient.html
72 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
73 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
74 http://citeseer.ist.psu.edu/heintze01ultrafast.html
76 There are three types of real constraint expressions, DEREF,
77 ADDRESSOF, and SCALAR. Each constraint expression consists
78 of a constraint type, a variable, and an offset.
80 SCALAR is a constraint expression type used to represent x, whether
81 it appears on the LHS or the RHS of a statement.
82 DEREF is a constraint expression type used to represent *x, whether
83 it appears on the LHS or the RHS of a statement.
84 ADDRESSOF is a constraint expression used to represent &x, whether
85 it appears on the LHS or the RHS of a statement.
87 Each pointer variable in the program is assigned an integer id, and
88 each field of a structure variable is assigned an integer id as well.
90 Structure variables are linked to their list of fields through a "next
91 field" in each variable that points to the next field in offset
92 order.
93 Each variable for a structure field has
95 1. "size", that tells the size in bits of that field.
96 2. "fullsize, that tells the size in bits of the entire structure.
97 3. "offset", that tells the offset in bits from the beginning of the
98 structure to this field.
100 Thus,
101 struct f
103 int a;
104 int b;
105 } foo;
106 int *bar;
108 looks like
110 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
111 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
112 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
115 In order to solve the system of set constraints, the following is
116 done:
118 1. Each constraint variable x has a solution set associated with it,
119 Sol(x).
121 2. Constraints are separated into direct, copy, and complex.
122 Direct constraints are ADDRESSOF constraints that require no extra
123 processing, such as P = &Q
124 Copy constraints are those of the form P = Q.
125 Complex constraints are all the constraints involving dereferences
126 and offsets (including offsetted copies).
128 3. All direct constraints of the form P = &Q are processed, such
129 that Q is added to Sol(P)
131 4. All complex constraints for a given constraint variable are stored in a
132 linked list attached to that variable's node.
134 5. A directed graph is built out of the copy constraints. Each
135 constraint variable is a node in the graph, and an edge from
136 Q to P is added for each copy constraint of the form P = Q
138 6. The graph is then walked, and solution sets are
139 propagated along the copy edges, such that an edge from Q to P
140 causes Sol(P) <- Sol(P) union Sol(Q).
142 7. As we visit each node, all complex constraints associated with
143 that node are processed by adding appropriate copy edges to the graph, or the
144 appropriate variables to the solution set.
146 8. The process of walking the graph is iterated until no solution
147 sets change.
149 Prior to walking the graph in steps 6 and 7, We perform static
150 cycle elimination on the constraint graph, as well
151 as off-line variable substitution.
153 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
154 on and turned into anything), but isn't. You can just see what offset
155 inside the pointed-to struct it's going to access.
157 TODO: Constant bounded arrays can be handled as if they were structs of the
158 same number of elements.
160 TODO: Modeling heap and incoming pointers becomes much better if we
161 add fields to them as we discover them, which we could do.
163 TODO: We could handle unions, but to be honest, it's probably not
164 worth the pain or slowdown. */
166 static GTY ((if_marked ("tree_map_marked_p"), param_is (struct tree_map)))
167 htab_t heapvar_for_stmt;
169 static bool use_field_sensitive = true;
170 static int in_ipa_mode = 0;
172 /* Used for predecessor bitmaps. */
173 static bitmap_obstack predbitmap_obstack;
175 /* Used for points-to sets. */
176 static bitmap_obstack pta_obstack;
178 /* Used for oldsolution members of variables. */
179 static bitmap_obstack oldpta_obstack;
181 /* Used for per-solver-iteration bitmaps. */
182 static bitmap_obstack iteration_obstack;
184 static unsigned int create_variable_info_for (tree, const char *);
185 typedef struct constraint_graph *constraint_graph_t;
186 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
188 DEF_VEC_P(constraint_t);
189 DEF_VEC_ALLOC_P(constraint_t,heap);
191 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
192 if (a) \
193 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
195 static struct constraint_stats
197 unsigned int total_vars;
198 unsigned int nonpointer_vars;
199 unsigned int unified_vars_static;
200 unsigned int unified_vars_dynamic;
201 unsigned int iterations;
202 unsigned int num_edges;
203 unsigned int num_implicit_edges;
204 unsigned int points_to_sets_created;
205 } stats;
207 struct variable_info
209 /* ID of this variable */
210 unsigned int id;
212 /* Name of this variable */
213 const char *name;
215 /* Tree that this variable is associated with. */
216 tree decl;
218 /* Offset of this variable, in bits, from the base variable */
219 unsigned HOST_WIDE_INT offset;
221 /* Size of the variable, in bits. */
222 unsigned HOST_WIDE_INT size;
224 /* Full size of the base variable, in bits. */
225 unsigned HOST_WIDE_INT fullsize;
227 /* A link to the variable for the next field in this structure. */
228 struct variable_info *next;
230 /* True if the variable is directly the target of a dereference.
231 This is used to track which variables are *actually* dereferenced
232 so we can prune their points to listed. */
233 unsigned int directly_dereferenced:1;
235 /* True if this is a variable created by the constraint analysis, such as
236 heap variables and constraints we had to break up. */
237 unsigned int is_artificial_var:1;
239 /* True if this is a special variable whose solution set should not be
240 changed. */
241 unsigned int is_special_var:1;
243 /* True for variables whose size is not known or variable. */
244 unsigned int is_unknown_size_var:1;
246 /* True for variables that have unions somewhere in them. */
247 unsigned int has_union:1;
249 /* True if this is a heap variable. */
250 unsigned int is_heap_var:1;
252 /* True if we may not use TBAA to prune references to this
253 variable. This is used for C++ placement new. */
254 unsigned int no_tbaa_pruning : 1;
256 /* Points-to set for this variable. */
257 bitmap solution;
259 /* Old points-to set for this variable. */
260 bitmap oldsolution;
262 /* Variable id this was collapsed to due to type unsafety. This
263 should be unused completely after build_succ_graph, or something
264 is broken. */
265 struct variable_info *collapsed_to;
267 typedef struct variable_info *varinfo_t;
269 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
271 /* Pool of variable info structures. */
272 static alloc_pool variable_info_pool;
274 DEF_VEC_P(varinfo_t);
276 DEF_VEC_ALLOC_P(varinfo_t, heap);
278 /* Table of variable info structures for constraint variables.
279 Indexed directly by variable info id. */
280 static VEC(varinfo_t,heap) *varmap;
282 /* Return the varmap element N */
284 static inline varinfo_t
285 get_varinfo (unsigned int n)
287 return VEC_index (varinfo_t, varmap, n);
290 /* Return the varmap element N, following the collapsed_to link. */
292 static inline varinfo_t
293 get_varinfo_fc (unsigned int n)
295 varinfo_t v = VEC_index (varinfo_t, varmap, n);
297 if (v->collapsed_to)
298 return v->collapsed_to;
299 return v;
302 /* Variable that represents the unknown pointer. */
303 static varinfo_t var_anything;
304 static tree anything_tree;
305 static unsigned int anything_id;
307 /* Variable that represents the NULL pointer. */
308 static varinfo_t var_nothing;
309 static tree nothing_tree;
310 static unsigned int nothing_id;
312 /* Variable that represents read only memory. */
313 static varinfo_t var_readonly;
314 static tree readonly_tree;
315 static unsigned int readonly_id;
317 /* Variable that represents integers. This is used for when people do things
318 like &0->a.b. */
319 static varinfo_t var_integer;
320 static tree integer_tree;
321 static unsigned int integer_id;
323 /* Lookup a heap var for FROM, and return it if we find one. */
325 static tree
326 heapvar_lookup (tree from)
328 struct tree_map *h, in;
329 in.base.from = from;
331 h = (struct tree_map *) htab_find_with_hash (heapvar_for_stmt, &in,
332 htab_hash_pointer (from));
333 if (h)
334 return h->to;
335 return NULL_TREE;
338 /* Insert a mapping FROM->TO in the heap var for statement
339 hashtable. */
341 static void
342 heapvar_insert (tree from, tree to)
344 struct tree_map *h;
345 void **loc;
347 h = GGC_NEW (struct tree_map);
348 h->hash = htab_hash_pointer (from);
349 h->base.from = from;
350 h->to = to;
351 loc = htab_find_slot_with_hash (heapvar_for_stmt, h, h->hash, INSERT);
352 *(struct tree_map **) loc = h;
355 /* Return a new variable info structure consisting for a variable
356 named NAME, and using constraint graph node NODE. */
358 static varinfo_t
359 new_var_info (tree t, unsigned int id, const char *name)
361 varinfo_t ret = (varinfo_t) pool_alloc (variable_info_pool);
362 tree var;
364 ret->id = id;
365 ret->name = name;
366 ret->decl = t;
367 ret->directly_dereferenced = false;
368 ret->is_artificial_var = false;
369 ret->is_heap_var = false;
370 ret->is_special_var = false;
371 ret->is_unknown_size_var = false;
372 ret->has_union = false;
373 var = t;
374 if (TREE_CODE (var) == SSA_NAME)
375 var = SSA_NAME_VAR (var);
376 ret->no_tbaa_pruning = (DECL_P (var)
377 && POINTER_TYPE_P (TREE_TYPE (var))
378 && DECL_NO_TBAA_P (var));
379 ret->solution = BITMAP_ALLOC (&pta_obstack);
380 ret->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
381 ret->next = NULL;
382 ret->collapsed_to = NULL;
383 return ret;
386 typedef enum {SCALAR, DEREF, ADDRESSOF} constraint_expr_type;
388 /* An expression that appears in a constraint. */
390 struct constraint_expr
392 /* Constraint type. */
393 constraint_expr_type type;
395 /* Variable we are referring to in the constraint. */
396 unsigned int var;
398 /* Offset, in bits, of this constraint from the beginning of
399 variables it ends up referring to.
401 IOW, in a deref constraint, we would deref, get the result set,
402 then add OFFSET to each member. */
403 unsigned HOST_WIDE_INT offset;
406 typedef struct constraint_expr ce_s;
407 DEF_VEC_O(ce_s);
408 DEF_VEC_ALLOC_O(ce_s, heap);
409 static void get_constraint_for (tree, VEC(ce_s, heap) **);
410 static void do_deref (VEC (ce_s, heap) **);
412 /* Our set constraints are made up of two constraint expressions, one
413 LHS, and one RHS.
415 As described in the introduction, our set constraints each represent an
416 operation between set valued variables.
418 struct constraint
420 struct constraint_expr lhs;
421 struct constraint_expr rhs;
424 /* List of constraints that we use to build the constraint graph from. */
426 static VEC(constraint_t,heap) *constraints;
427 static alloc_pool constraint_pool;
430 DEF_VEC_I(int);
431 DEF_VEC_ALLOC_I(int, heap);
433 /* The constraint graph is represented as an array of bitmaps
434 containing successor nodes. */
436 struct constraint_graph
438 /* Size of this graph, which may be different than the number of
439 nodes in the variable map. */
440 unsigned int size;
442 /* Explicit successors of each node. */
443 bitmap *succs;
445 /* Implicit predecessors of each node (Used for variable
446 substitution). */
447 bitmap *implicit_preds;
449 /* Explicit predecessors of each node (Used for variable substitution). */
450 bitmap *preds;
452 /* Indirect cycle representatives, or -1 if the node has no indirect
453 cycles. */
454 int *indirect_cycles;
456 /* Representative node for a node. rep[a] == a unless the node has
457 been unified. */
458 unsigned int *rep;
460 /* Equivalence class representative for a label. This is used for
461 variable substitution. */
462 int *eq_rep;
464 /* Pointer equivalence node for a node. if pe[a] != a, then node a
465 can be united with node pe[a] after initial constraint building. */
466 unsigned int *pe;
468 /* Pointer equivalence representative for a label. This is used to
469 handle nodes that are pointer equivalent but not location
470 equivalent. We can unite these once the addressof constraints
471 are transformed into initial points-to sets. */
472 int *pe_rep;
474 /* Pointer equivalence label for each node, used during variable
475 substitution. */
476 unsigned int *pointer_label;
478 /* Location equivalence label for each node, used during location
479 equivalence finding. */
480 unsigned int *loc_label;
482 /* Pointed-by set for each node, used during location equivalence
483 finding. This is pointed-by rather than pointed-to, because it
484 is constructed using the predecessor graph. */
485 bitmap *pointed_by;
487 /* Points to sets for pointer equivalence. This is *not* the actual
488 points-to sets for nodes. */
489 bitmap *points_to;
491 /* Bitmap of nodes where the bit is set if the node is a direct
492 node. Used for variable substitution. */
493 sbitmap direct_nodes;
495 /* Bitmap of nodes where the bit is set if the node is address
496 taken. Used for variable substitution. */
497 bitmap address_taken;
499 /* True if points_to bitmap for this node is stored in the hash
500 table. */
501 sbitmap pt_used;
503 /* Number of incoming edges remaining to be processed by pointer
504 equivalence.
505 Used for variable substitution. */
506 unsigned int *number_incoming;
509 /* Vector of complex constraints for each graph node. Complex
510 constraints are those involving dereferences or offsets that are
511 not 0. */
512 VEC(constraint_t,heap) **complex;
515 static constraint_graph_t graph;
517 /* During variable substitution and the offline version of indirect
518 cycle finding, we create nodes to represent dereferences and
519 address taken constraints. These represent where these start and
520 end. */
521 #define FIRST_REF_NODE (VEC_length (varinfo_t, varmap))
522 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
524 /* Return the representative node for NODE, if NODE has been unioned
525 with another NODE.
526 This function performs path compression along the way to finding
527 the representative. */
529 static unsigned int
530 find (unsigned int node)
532 gcc_assert (node < graph->size);
533 if (graph->rep[node] != node)
534 return graph->rep[node] = find (graph->rep[node]);
535 return node;
538 /* Union the TO and FROM nodes to the TO nodes.
539 Note that at some point in the future, we may want to do
540 union-by-rank, in which case we are going to have to return the
541 node we unified to. */
543 static bool
544 unite (unsigned int to, unsigned int from)
546 gcc_assert (to < graph->size && from < graph->size);
547 if (to != from && graph->rep[from] != to)
549 graph->rep[from] = to;
550 return true;
552 return false;
555 /* Create a new constraint consisting of LHS and RHS expressions. */
557 static constraint_t
558 new_constraint (const struct constraint_expr lhs,
559 const struct constraint_expr rhs)
561 constraint_t ret = (constraint_t) pool_alloc (constraint_pool);
562 ret->lhs = lhs;
563 ret->rhs = rhs;
564 return ret;
567 /* Print out constraint C to FILE. */
569 void
570 dump_constraint (FILE *file, constraint_t c)
572 if (c->lhs.type == ADDRESSOF)
573 fprintf (file, "&");
574 else if (c->lhs.type == DEREF)
575 fprintf (file, "*");
576 fprintf (file, "%s", get_varinfo_fc (c->lhs.var)->name);
577 if (c->lhs.offset != 0)
578 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
579 fprintf (file, " = ");
580 if (c->rhs.type == ADDRESSOF)
581 fprintf (file, "&");
582 else if (c->rhs.type == DEREF)
583 fprintf (file, "*");
584 fprintf (file, "%s", get_varinfo_fc (c->rhs.var)->name);
585 if (c->rhs.offset != 0)
586 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
587 fprintf (file, "\n");
590 /* Print out constraint C to stderr. */
592 void
593 debug_constraint (constraint_t c)
595 dump_constraint (stderr, c);
598 /* Print out all constraints to FILE */
600 void
601 dump_constraints (FILE *file)
603 int i;
604 constraint_t c;
605 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
606 dump_constraint (file, c);
609 /* Print out all constraints to stderr. */
611 void
612 debug_constraints (void)
614 dump_constraints (stderr);
617 /* SOLVER FUNCTIONS
619 The solver is a simple worklist solver, that works on the following
620 algorithm:
622 sbitmap changed_nodes = all zeroes;
623 changed_count = 0;
624 For each node that is not already collapsed:
625 changed_count++;
626 set bit in changed nodes
628 while (changed_count > 0)
630 compute topological ordering for constraint graph
632 find and collapse cycles in the constraint graph (updating
633 changed if necessary)
635 for each node (n) in the graph in topological order:
636 changed_count--;
638 Process each complex constraint associated with the node,
639 updating changed if necessary.
641 For each outgoing edge from n, propagate the solution from n to
642 the destination of the edge, updating changed as necessary.
644 } */
646 /* Return true if two constraint expressions A and B are equal. */
648 static bool
649 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
651 return a.type == b.type && a.var == b.var && a.offset == b.offset;
654 /* Return true if constraint expression A is less than constraint expression
655 B. This is just arbitrary, but consistent, in order to give them an
656 ordering. */
658 static bool
659 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
661 if (a.type == b.type)
663 if (a.var == b.var)
664 return a.offset < b.offset;
665 else
666 return a.var < b.var;
668 else
669 return a.type < b.type;
672 /* Return true if constraint A is less than constraint B. This is just
673 arbitrary, but consistent, in order to give them an ordering. */
675 static bool
676 constraint_less (const constraint_t a, const constraint_t b)
678 if (constraint_expr_less (a->lhs, b->lhs))
679 return true;
680 else if (constraint_expr_less (b->lhs, a->lhs))
681 return false;
682 else
683 return constraint_expr_less (a->rhs, b->rhs);
686 /* Return true if two constraints A and B are equal. */
688 static bool
689 constraint_equal (struct constraint a, struct constraint b)
691 return constraint_expr_equal (a.lhs, b.lhs)
692 && constraint_expr_equal (a.rhs, b.rhs);
696 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
698 static constraint_t
699 constraint_vec_find (VEC(constraint_t,heap) *vec,
700 struct constraint lookfor)
702 unsigned int place;
703 constraint_t found;
705 if (vec == NULL)
706 return NULL;
708 place = VEC_lower_bound (constraint_t, vec, &lookfor, constraint_less);
709 if (place >= VEC_length (constraint_t, vec))
710 return NULL;
711 found = VEC_index (constraint_t, vec, place);
712 if (!constraint_equal (*found, lookfor))
713 return NULL;
714 return found;
717 /* Union two constraint vectors, TO and FROM. Put the result in TO. */
719 static void
720 constraint_set_union (VEC(constraint_t,heap) **to,
721 VEC(constraint_t,heap) **from)
723 int i;
724 constraint_t c;
726 for (i = 0; VEC_iterate (constraint_t, *from, i, c); i++)
728 if (constraint_vec_find (*to, *c) == NULL)
730 unsigned int place = VEC_lower_bound (constraint_t, *to, c,
731 constraint_less);
732 VEC_safe_insert (constraint_t, heap, *to, place, c);
737 /* Take a solution set SET, add OFFSET to each member of the set, and
738 overwrite SET with the result when done. */
740 static void
741 solution_set_add (bitmap set, unsigned HOST_WIDE_INT offset)
743 bitmap result = BITMAP_ALLOC (&iteration_obstack);
744 unsigned int i;
745 bitmap_iterator bi;
747 EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
749 /* If this is a properly sized variable, only add offset if it's
750 less than end. Otherwise, it is globbed to a single
751 variable. */
753 if ((get_varinfo (i)->offset + offset) < get_varinfo (i)->fullsize)
755 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (i)->offset + offset;
756 varinfo_t v = first_vi_for_offset (get_varinfo (i), fieldoffset);
757 if (!v)
758 continue;
759 bitmap_set_bit (result, v->id);
761 else if (get_varinfo (i)->is_artificial_var
762 || get_varinfo (i)->has_union
763 || get_varinfo (i)->is_unknown_size_var)
765 bitmap_set_bit (result, i);
769 bitmap_copy (set, result);
770 BITMAP_FREE (result);
773 /* Union solution sets TO and FROM, and add INC to each member of FROM in the
774 process. */
776 static bool
777 set_union_with_increment (bitmap to, bitmap from, unsigned HOST_WIDE_INT inc)
779 if (inc == 0)
780 return bitmap_ior_into (to, from);
781 else
783 bitmap tmp;
784 bool res;
786 tmp = BITMAP_ALLOC (&iteration_obstack);
787 bitmap_copy (tmp, from);
788 solution_set_add (tmp, inc);
789 res = bitmap_ior_into (to, tmp);
790 BITMAP_FREE (tmp);
791 return res;
795 /* Insert constraint C into the list of complex constraints for graph
796 node VAR. */
798 static void
799 insert_into_complex (constraint_graph_t graph,
800 unsigned int var, constraint_t c)
802 VEC (constraint_t, heap) *complex = graph->complex[var];
803 unsigned int place = VEC_lower_bound (constraint_t, complex, c,
804 constraint_less);
806 /* Only insert constraints that do not already exist. */
807 if (place >= VEC_length (constraint_t, complex)
808 || !constraint_equal (*c, *VEC_index (constraint_t, complex, place)))
809 VEC_safe_insert (constraint_t, heap, graph->complex[var], place, c);
813 /* Condense two variable nodes into a single variable node, by moving
814 all associated info from SRC to TO. */
816 static void
817 merge_node_constraints (constraint_graph_t graph, unsigned int to,
818 unsigned int from)
820 unsigned int i;
821 constraint_t c;
823 gcc_assert (find (from) == to);
825 /* Move all complex constraints from src node into to node */
826 for (i = 0; VEC_iterate (constraint_t, graph->complex[from], i, c); i++)
828 /* In complex constraints for node src, we may have either
829 a = *src, and *src = a, or an offseted constraint which are
830 always added to the rhs node's constraints. */
832 if (c->rhs.type == DEREF)
833 c->rhs.var = to;
834 else if (c->lhs.type == DEREF)
835 c->lhs.var = to;
836 else
837 c->rhs.var = to;
839 constraint_set_union (&graph->complex[to], &graph->complex[from]);
840 VEC_free (constraint_t, heap, graph->complex[from]);
841 graph->complex[from] = NULL;
845 /* Remove edges involving NODE from GRAPH. */
847 static void
848 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
850 if (graph->succs[node])
851 BITMAP_FREE (graph->succs[node]);
854 /* Merge GRAPH nodes FROM and TO into node TO. */
856 static void
857 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
858 unsigned int from)
860 if (graph->indirect_cycles[from] != -1)
862 /* If we have indirect cycles with the from node, and we have
863 none on the to node, the to node has indirect cycles from the
864 from node now that they are unified.
865 If indirect cycles exist on both, unify the nodes that they
866 are in a cycle with, since we know they are in a cycle with
867 each other. */
868 if (graph->indirect_cycles[to] == -1)
869 graph->indirect_cycles[to] = graph->indirect_cycles[from];
872 /* Merge all the successor edges. */
873 if (graph->succs[from])
875 if (!graph->succs[to])
876 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
877 bitmap_ior_into (graph->succs[to],
878 graph->succs[from]);
881 clear_edges_for_node (graph, from);
885 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
886 it doesn't exist in the graph already. */
888 static void
889 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
890 unsigned int from)
892 if (to == from)
893 return;
895 if (!graph->implicit_preds[to])
896 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
898 if (!bitmap_bit_p (graph->implicit_preds[to], from))
900 stats.num_implicit_edges++;
901 bitmap_set_bit (graph->implicit_preds[to], from);
905 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
906 it doesn't exist in the graph already.
907 Return false if the edge already existed, true otherwise. */
909 static void
910 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
911 unsigned int from)
913 if (!graph->preds[to])
914 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
915 if (!bitmap_bit_p (graph->preds[to], from))
916 bitmap_set_bit (graph->preds[to], from);
919 /* Add a graph edge to GRAPH, going from FROM to TO if
920 it doesn't exist in the graph already.
921 Return false if the edge already existed, true otherwise. */
923 static bool
924 add_graph_edge (constraint_graph_t graph, unsigned int to,
925 unsigned int from)
927 if (to == from)
929 return false;
931 else
933 bool r = false;
935 if (!graph->succs[from])
936 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
937 if (!bitmap_bit_p (graph->succs[from], to))
939 r = true;
940 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
941 stats.num_edges++;
942 bitmap_set_bit (graph->succs[from], to);
944 return r;
949 /* Return true if {DEST.SRC} is an existing graph edge in GRAPH. */
951 static bool
952 valid_graph_edge (constraint_graph_t graph, unsigned int src,
953 unsigned int dest)
955 return (graph->succs[dest]
956 && bitmap_bit_p (graph->succs[dest], src));
959 /* Initialize the constraint graph structure to contain SIZE nodes. */
961 static void
962 init_graph (unsigned int size)
964 unsigned int j;
966 graph = XCNEW (struct constraint_graph);
967 graph->size = size;
968 graph->succs = XCNEWVEC (bitmap, graph->size);
969 graph->indirect_cycles = XNEWVEC (int, graph->size);
970 graph->rep = XNEWVEC (unsigned int, graph->size);
971 graph->complex = XCNEWVEC (VEC(constraint_t, heap) *, size);
972 graph->pe = XNEWVEC (unsigned int, graph->size);
973 graph->pe_rep = XNEWVEC (int, graph->size);
975 for (j = 0; j < graph->size; j++)
977 graph->rep[j] = j;
978 graph->pe[j] = j;
979 graph->pe_rep[j] = -1;
980 graph->indirect_cycles[j] = -1;
984 /* Build the constraint graph, adding only predecessor edges right now. */
986 static void
987 build_pred_graph (void)
989 int i;
990 constraint_t c;
991 unsigned int j;
993 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
994 graph->preds = XCNEWVEC (bitmap, graph->size);
995 graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
996 graph->loc_label = XCNEWVEC (unsigned int, graph->size);
997 graph->pointed_by = XCNEWVEC (bitmap, graph->size);
998 graph->points_to = XCNEWVEC (bitmap, graph->size);
999 graph->eq_rep = XNEWVEC (int, graph->size);
1000 graph->direct_nodes = sbitmap_alloc (graph->size);
1001 graph->pt_used = sbitmap_alloc (graph->size);
1002 graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1003 graph->number_incoming = XCNEWVEC (unsigned int, graph->size);
1004 sbitmap_zero (graph->direct_nodes);
1005 sbitmap_zero (graph->pt_used);
1007 for (j = 0; j < FIRST_REF_NODE; j++)
1009 if (!get_varinfo (j)->is_special_var)
1010 SET_BIT (graph->direct_nodes, j);
1013 for (j = 0; j < graph->size; j++)
1014 graph->eq_rep[j] = -1;
1016 for (j = 0; j < VEC_length (varinfo_t, varmap); j++)
1017 graph->indirect_cycles[j] = -1;
1019 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1021 struct constraint_expr lhs = c->lhs;
1022 struct constraint_expr rhs = c->rhs;
1023 unsigned int lhsvar = get_varinfo_fc (lhs.var)->id;
1024 unsigned int rhsvar = get_varinfo_fc (rhs.var)->id;
1026 if (lhs.type == DEREF)
1028 /* *x = y. */
1029 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1030 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1032 else if (rhs.type == DEREF)
1034 /* x = *y */
1035 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1036 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1037 else
1038 RESET_BIT (graph->direct_nodes, lhsvar);
1040 else if (rhs.type == ADDRESSOF)
1042 /* x = &y */
1043 if (graph->points_to[lhsvar] == NULL)
1044 graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1045 bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1047 if (graph->pointed_by[rhsvar] == NULL)
1048 graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1049 bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1051 /* Implicitly, *x = y */
1052 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1054 RESET_BIT (graph->direct_nodes, rhsvar);
1055 bitmap_set_bit (graph->address_taken, rhsvar);
1057 else if (lhsvar > anything_id
1058 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1060 /* x = y */
1061 add_pred_graph_edge (graph, lhsvar, rhsvar);
1062 /* Implicitly, *x = *y */
1063 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1064 FIRST_REF_NODE + rhsvar);
1066 else if (lhs.offset != 0 || rhs.offset != 0)
1068 if (rhs.offset != 0)
1069 RESET_BIT (graph->direct_nodes, lhs.var);
1070 else if (lhs.offset != 0)
1071 RESET_BIT (graph->direct_nodes, rhs.var);
1076 /* Build the constraint graph, adding successor edges. */
1078 static void
1079 build_succ_graph (void)
1081 int i;
1082 constraint_t c;
1084 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1086 struct constraint_expr lhs;
1087 struct constraint_expr rhs;
1088 unsigned int lhsvar;
1089 unsigned int rhsvar;
1091 if (!c)
1092 continue;
1094 lhs = c->lhs;
1095 rhs = c->rhs;
1096 lhsvar = find (get_varinfo_fc (lhs.var)->id);
1097 rhsvar = find (get_varinfo_fc (rhs.var)->id);
1099 if (lhs.type == DEREF)
1101 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1102 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1104 else if (rhs.type == DEREF)
1106 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1107 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1109 else if (rhs.type == ADDRESSOF)
1111 /* x = &y */
1112 gcc_assert (find (get_varinfo_fc (rhs.var)->id)
1113 == get_varinfo_fc (rhs.var)->id);
1114 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1116 else if (lhsvar > anything_id
1117 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1119 add_graph_edge (graph, lhsvar, rhsvar);
1125 /* Changed variables on the last iteration. */
1126 static unsigned int changed_count;
1127 static sbitmap changed;
1129 DEF_VEC_I(unsigned);
1130 DEF_VEC_ALLOC_I(unsigned,heap);
1133 /* Strongly Connected Component visitation info. */
1135 struct scc_info
1137 sbitmap visited;
1138 sbitmap deleted;
1139 unsigned int *dfs;
1140 unsigned int *node_mapping;
1141 int current_index;
1142 VEC(unsigned,heap) *scc_stack;
1146 /* Recursive routine to find strongly connected components in GRAPH.
1147 SI is the SCC info to store the information in, and N is the id of current
1148 graph node we are processing.
1150 This is Tarjan's strongly connected component finding algorithm, as
1151 modified by Nuutila to keep only non-root nodes on the stack.
1152 The algorithm can be found in "On finding the strongly connected
1153 connected components in a directed graph" by Esko Nuutila and Eljas
1154 Soisalon-Soininen, in Information Processing Letters volume 49,
1155 number 1, pages 9-14. */
1157 static void
1158 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1160 unsigned int i;
1161 bitmap_iterator bi;
1162 unsigned int my_dfs;
1164 SET_BIT (si->visited, n);
1165 si->dfs[n] = si->current_index ++;
1166 my_dfs = si->dfs[n];
1168 /* Visit all the successors. */
1169 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1171 unsigned int w;
1173 if (i > LAST_REF_NODE)
1174 break;
1176 w = find (i);
1177 if (TEST_BIT (si->deleted, w))
1178 continue;
1180 if (!TEST_BIT (si->visited, w))
1181 scc_visit (graph, si, w);
1183 unsigned int t = find (w);
1184 unsigned int nnode = find (n);
1185 gcc_assert (nnode == n);
1187 if (si->dfs[t] < si->dfs[nnode])
1188 si->dfs[n] = si->dfs[t];
1192 /* See if any components have been identified. */
1193 if (si->dfs[n] == my_dfs)
1195 if (VEC_length (unsigned, si->scc_stack) > 0
1196 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1198 bitmap scc = BITMAP_ALLOC (NULL);
1199 bool have_ref_node = n >= FIRST_REF_NODE;
1200 unsigned int lowest_node;
1201 bitmap_iterator bi;
1203 bitmap_set_bit (scc, n);
1205 while (VEC_length (unsigned, si->scc_stack) != 0
1206 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1208 unsigned int w = VEC_pop (unsigned, si->scc_stack);
1210 bitmap_set_bit (scc, w);
1211 if (w >= FIRST_REF_NODE)
1212 have_ref_node = true;
1215 lowest_node = bitmap_first_set_bit (scc);
1216 gcc_assert (lowest_node < FIRST_REF_NODE);
1218 /* Collapse the SCC nodes into a single node, and mark the
1219 indirect cycles. */
1220 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1222 if (i < FIRST_REF_NODE)
1224 if (unite (lowest_node, i))
1225 unify_nodes (graph, lowest_node, i, false);
1227 else
1229 unite (lowest_node, i);
1230 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1234 SET_BIT (si->deleted, n);
1236 else
1237 VEC_safe_push (unsigned, heap, si->scc_stack, n);
1240 /* Unify node FROM into node TO, updating the changed count if
1241 necessary when UPDATE_CHANGED is true. */
1243 static void
1244 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1245 bool update_changed)
1248 gcc_assert (to != from && find (to) == to);
1249 if (dump_file && (dump_flags & TDF_DETAILS))
1250 fprintf (dump_file, "Unifying %s to %s\n",
1251 get_varinfo (from)->name,
1252 get_varinfo (to)->name);
1254 if (update_changed)
1255 stats.unified_vars_dynamic++;
1256 else
1257 stats.unified_vars_static++;
1259 merge_graph_nodes (graph, to, from);
1260 merge_node_constraints (graph, to, from);
1262 if (get_varinfo (from)->no_tbaa_pruning)
1263 get_varinfo (to)->no_tbaa_pruning = true;
1265 /* Mark TO as changed if FROM was changed. If TO was already marked
1266 as changed, decrease the changed count. */
1268 if (update_changed && TEST_BIT (changed, from))
1270 RESET_BIT (changed, from);
1271 if (!TEST_BIT (changed, to))
1272 SET_BIT (changed, to);
1273 else
1275 gcc_assert (changed_count > 0);
1276 changed_count--;
1280 /* If the solution changes because of the merging, we need to mark
1281 the variable as changed. */
1282 if (bitmap_ior_into (get_varinfo (to)->solution,
1283 get_varinfo (from)->solution))
1285 if (update_changed && !TEST_BIT (changed, to))
1287 SET_BIT (changed, to);
1288 changed_count++;
1292 BITMAP_FREE (get_varinfo (from)->solution);
1293 BITMAP_FREE (get_varinfo (from)->oldsolution);
1295 if (stats.iterations > 0)
1297 BITMAP_FREE (get_varinfo (to)->oldsolution);
1298 get_varinfo (to)->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
1301 if (valid_graph_edge (graph, to, to))
1303 if (graph->succs[to])
1304 bitmap_clear_bit (graph->succs[to], to);
1308 /* Information needed to compute the topological ordering of a graph. */
1310 struct topo_info
1312 /* sbitmap of visited nodes. */
1313 sbitmap visited;
1314 /* Array that stores the topological order of the graph, *in
1315 reverse*. */
1316 VEC(unsigned,heap) *topo_order;
1320 /* Initialize and return a topological info structure. */
1322 static struct topo_info *
1323 init_topo_info (void)
1325 size_t size = graph->size;
1326 struct topo_info *ti = XNEW (struct topo_info);
1327 ti->visited = sbitmap_alloc (size);
1328 sbitmap_zero (ti->visited);
1329 ti->topo_order = VEC_alloc (unsigned, heap, 1);
1330 return ti;
1334 /* Free the topological sort info pointed to by TI. */
1336 static void
1337 free_topo_info (struct topo_info *ti)
1339 sbitmap_free (ti->visited);
1340 VEC_free (unsigned, heap, ti->topo_order);
1341 free (ti);
1344 /* Visit the graph in topological order, and store the order in the
1345 topo_info structure. */
1347 static void
1348 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1349 unsigned int n)
1351 bitmap_iterator bi;
1352 unsigned int j;
1354 SET_BIT (ti->visited, n);
1356 if (graph->succs[n])
1357 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1359 if (!TEST_BIT (ti->visited, j))
1360 topo_visit (graph, ti, j);
1363 VEC_safe_push (unsigned, heap, ti->topo_order, n);
1366 /* Return true if variable N + OFFSET is a legal field of N. */
1368 static bool
1369 type_safe (unsigned int n, unsigned HOST_WIDE_INT *offset)
1371 varinfo_t ninfo = get_varinfo (n);
1373 /* For things we've globbed to single variables, any offset into the
1374 variable acts like the entire variable, so that it becomes offset
1375 0. */
1376 if (ninfo->is_special_var
1377 || ninfo->is_artificial_var
1378 || ninfo->is_unknown_size_var)
1380 *offset = 0;
1381 return true;
1383 return (get_varinfo (n)->offset + *offset) < get_varinfo (n)->fullsize;
1386 /* Process a constraint C that represents x = *y, using DELTA as the
1387 starting solution. */
1389 static void
1390 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1391 bitmap delta)
1393 unsigned int lhs = c->lhs.var;
1394 bool flag = false;
1395 bitmap sol = get_varinfo (lhs)->solution;
1396 unsigned int j;
1397 bitmap_iterator bi;
1399 if (bitmap_bit_p (delta, anything_id))
1401 flag = !bitmap_bit_p (sol, anything_id);
1402 if (flag)
1403 bitmap_set_bit (sol, anything_id);
1404 goto done;
1406 /* For each variable j in delta (Sol(y)), add
1407 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1408 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1410 unsigned HOST_WIDE_INT roffset = c->rhs.offset;
1411 if (type_safe (j, &roffset))
1413 varinfo_t v;
1414 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + roffset;
1415 unsigned int t;
1417 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1418 if (!v)
1419 continue;
1420 t = find (v->id);
1422 /* Adding edges from the special vars is pointless.
1423 They don't have sets that can change. */
1424 if (get_varinfo (t) ->is_special_var)
1425 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1426 else if (add_graph_edge (graph, lhs, t))
1427 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1429 else if (0 && dump_file && !(get_varinfo (j)->is_special_var))
1430 fprintf (dump_file, "Untypesafe usage in do_sd_constraint\n");
1434 done:
1435 /* If the LHS solution changed, mark the var as changed. */
1436 if (flag)
1438 get_varinfo (lhs)->solution = sol;
1439 if (!TEST_BIT (changed, lhs))
1441 SET_BIT (changed, lhs);
1442 changed_count++;
1447 /* Process a constraint C that represents *x = y. */
1449 static void
1450 do_ds_constraint (constraint_t c, bitmap delta)
1452 unsigned int rhs = c->rhs.var;
1453 bitmap sol = get_varinfo (rhs)->solution;
1454 unsigned int j;
1455 bitmap_iterator bi;
1457 if (bitmap_bit_p (sol, anything_id))
1459 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1461 varinfo_t jvi = get_varinfo (j);
1462 unsigned int t;
1463 unsigned int loff = c->lhs.offset;
1464 unsigned HOST_WIDE_INT fieldoffset = jvi->offset + loff;
1465 varinfo_t v;
1467 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1468 if (!v)
1469 continue;
1470 t = find (v->id);
1472 if (!bitmap_bit_p (get_varinfo (t)->solution, anything_id))
1474 bitmap_set_bit (get_varinfo (t)->solution, anything_id);
1475 if (!TEST_BIT (changed, t))
1477 SET_BIT (changed, t);
1478 changed_count++;
1482 return;
1485 /* For each member j of delta (Sol(x)), add an edge from y to j and
1486 union Sol(y) into Sol(j) */
1487 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1489 unsigned HOST_WIDE_INT loff = c->lhs.offset;
1490 if (type_safe (j, &loff) && !(get_varinfo (j)->is_special_var))
1492 varinfo_t v;
1493 unsigned int t;
1494 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + loff;
1495 bitmap tmp;
1497 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1498 if (!v)
1499 continue;
1500 t = find (v->id);
1501 tmp = get_varinfo (t)->solution;
1503 if (set_union_with_increment (tmp, sol, 0))
1505 get_varinfo (t)->solution = tmp;
1506 if (t == rhs)
1507 sol = get_varinfo (rhs)->solution;
1508 if (!TEST_BIT (changed, t))
1510 SET_BIT (changed, t);
1511 changed_count++;
1515 else if (0 && dump_file && !(get_varinfo (j)->is_special_var))
1516 fprintf (dump_file, "Untypesafe usage in do_ds_constraint\n");
1520 /* Handle a non-simple (simple meaning requires no iteration),
1521 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1523 static void
1524 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
1526 if (c->lhs.type == DEREF)
1528 if (c->rhs.type == ADDRESSOF)
1530 gcc_unreachable();
1532 else
1534 /* *x = y */
1535 do_ds_constraint (c, delta);
1538 else if (c->rhs.type == DEREF)
1540 /* x = *y */
1541 if (!(get_varinfo (c->lhs.var)->is_special_var))
1542 do_sd_constraint (graph, c, delta);
1544 else
1546 bitmap tmp;
1547 bitmap solution;
1548 bool flag = false;
1550 gcc_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR);
1551 solution = get_varinfo (c->rhs.var)->solution;
1552 tmp = get_varinfo (c->lhs.var)->solution;
1554 flag = set_union_with_increment (tmp, solution, c->rhs.offset);
1556 if (flag)
1558 get_varinfo (c->lhs.var)->solution = tmp;
1559 if (!TEST_BIT (changed, c->lhs.var))
1561 SET_BIT (changed, c->lhs.var);
1562 changed_count++;
1568 /* Initialize and return a new SCC info structure. */
1570 static struct scc_info *
1571 init_scc_info (size_t size)
1573 struct scc_info *si = XNEW (struct scc_info);
1574 size_t i;
1576 si->current_index = 0;
1577 si->visited = sbitmap_alloc (size);
1578 sbitmap_zero (si->visited);
1579 si->deleted = sbitmap_alloc (size);
1580 sbitmap_zero (si->deleted);
1581 si->node_mapping = XNEWVEC (unsigned int, size);
1582 si->dfs = XCNEWVEC (unsigned int, size);
1584 for (i = 0; i < size; i++)
1585 si->node_mapping[i] = i;
1587 si->scc_stack = VEC_alloc (unsigned, heap, 1);
1588 return si;
1591 /* Free an SCC info structure pointed to by SI */
1593 static void
1594 free_scc_info (struct scc_info *si)
1596 sbitmap_free (si->visited);
1597 sbitmap_free (si->deleted);
1598 free (si->node_mapping);
1599 free (si->dfs);
1600 VEC_free (unsigned, heap, si->scc_stack);
1601 free (si);
1605 /* Find indirect cycles in GRAPH that occur, using strongly connected
1606 components, and note them in the indirect cycles map.
1608 This technique comes from Ben Hardekopf and Calvin Lin,
1609 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1610 Lines of Code", submitted to PLDI 2007. */
1612 static void
1613 find_indirect_cycles (constraint_graph_t graph)
1615 unsigned int i;
1616 unsigned int size = graph->size;
1617 struct scc_info *si = init_scc_info (size);
1619 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1620 if (!TEST_BIT (si->visited, i) && find (i) == i)
1621 scc_visit (graph, si, i);
1623 free_scc_info (si);
1626 /* Compute a topological ordering for GRAPH, and store the result in the
1627 topo_info structure TI. */
1629 static void
1630 compute_topo_order (constraint_graph_t graph,
1631 struct topo_info *ti)
1633 unsigned int i;
1634 unsigned int size = graph->size;
1636 for (i = 0; i != size; ++i)
1637 if (!TEST_BIT (ti->visited, i) && find (i) == i)
1638 topo_visit (graph, ti, i);
1641 /* Structure used to for hash value numbering of pointer equivalence
1642 classes. */
1644 typedef struct equiv_class_label
1646 unsigned int equivalence_class;
1647 bitmap labels;
1648 hashval_t hashcode;
1649 } *equiv_class_label_t;
1650 typedef const struct equiv_class_label *const_equiv_class_label_t;
1652 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1653 classes. */
1654 static htab_t pointer_equiv_class_table;
1656 /* A hashtable for mapping a bitmap of labels->location equivalence
1657 classes. */
1658 static htab_t location_equiv_class_table;
1660 /* Hash function for a equiv_class_label_t */
1662 static hashval_t
1663 equiv_class_label_hash (const void *p)
1665 const_equiv_class_label_t const ecl = (const_equiv_class_label_t) p;
1666 return ecl->hashcode;
1669 /* Equality function for two equiv_class_label_t's. */
1671 static int
1672 equiv_class_label_eq (const void *p1, const void *p2)
1674 const_equiv_class_label_t const eql1 = (const_equiv_class_label_t) p1;
1675 const_equiv_class_label_t const eql2 = (const_equiv_class_label_t) p2;
1676 return bitmap_equal_p (eql1->labels, eql2->labels);
1679 /* Lookup a equivalence class in TABLE by the bitmap of LABELS it
1680 contains. */
1682 static unsigned int
1683 equiv_class_lookup (htab_t table, bitmap labels)
1685 void **slot;
1686 struct equiv_class_label ecl;
1688 ecl.labels = labels;
1689 ecl.hashcode = bitmap_hash (labels);
1691 slot = htab_find_slot_with_hash (table, &ecl,
1692 ecl.hashcode, NO_INSERT);
1693 if (!slot)
1694 return 0;
1695 else
1696 return ((equiv_class_label_t) *slot)->equivalence_class;
1700 /* Add an equivalence class named EQUIVALENCE_CLASS with labels LABELS
1701 to TABLE. */
1703 static void
1704 equiv_class_add (htab_t table, unsigned int equivalence_class,
1705 bitmap labels)
1707 void **slot;
1708 equiv_class_label_t ecl = XNEW (struct equiv_class_label);
1710 ecl->labels = labels;
1711 ecl->equivalence_class = equivalence_class;
1712 ecl->hashcode = bitmap_hash (labels);
1714 slot = htab_find_slot_with_hash (table, ecl,
1715 ecl->hashcode, INSERT);
1716 gcc_assert (!*slot);
1717 *slot = (void *) ecl;
1720 /* Perform offline variable substitution.
1722 This is a worst case quadratic time way of identifying variables
1723 that must have equivalent points-to sets, including those caused by
1724 static cycles, and single entry subgraphs, in the constraint graph.
1726 The technique is described in "Exploiting Pointer and Location
1727 Equivalence to Optimize Pointer Analysis. In the 14th International
1728 Static Analysis Symposium (SAS), August 2007." It is known as the
1729 "HU" algorithm, and is equivalent to value numbering the collapsed
1730 constraint graph including evaluating unions.
1732 The general method of finding equivalence classes is as follows:
1733 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1734 Initialize all non-REF nodes to be direct nodes.
1735 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1736 variable}
1737 For each constraint containing the dereference, we also do the same
1738 thing.
1740 We then compute SCC's in the graph and unify nodes in the same SCC,
1741 including pts sets.
1743 For each non-collapsed node x:
1744 Visit all unvisited explicit incoming edges.
1745 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
1746 where y->x.
1747 Lookup the equivalence class for pts(x).
1748 If we found one, equivalence_class(x) = found class.
1749 Otherwise, equivalence_class(x) = new class, and new_class is
1750 added to the lookup table.
1752 All direct nodes with the same equivalence class can be replaced
1753 with a single representative node.
1754 All unlabeled nodes (label == 0) are not pointers and all edges
1755 involving them can be eliminated.
1756 We perform these optimizations during rewrite_constraints
1758 In addition to pointer equivalence class finding, we also perform
1759 location equivalence class finding. This is the set of variables
1760 that always appear together in points-to sets. We use this to
1761 compress the size of the points-to sets. */
1763 /* Current maximum pointer equivalence class id. */
1764 static int pointer_equiv_class;
1766 /* Current maximum location equivalence class id. */
1767 static int location_equiv_class;
1769 /* Recursive routine to find strongly connected components in GRAPH,
1770 and label it's nodes with DFS numbers. */
1772 static void
1773 condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1775 unsigned int i;
1776 bitmap_iterator bi;
1777 unsigned int my_dfs;
1779 gcc_assert (si->node_mapping[n] == n);
1780 SET_BIT (si->visited, n);
1781 si->dfs[n] = si->current_index ++;
1782 my_dfs = si->dfs[n];
1784 /* Visit all the successors. */
1785 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1787 unsigned int w = si->node_mapping[i];
1789 if (TEST_BIT (si->deleted, w))
1790 continue;
1792 if (!TEST_BIT (si->visited, w))
1793 condense_visit (graph, si, w);
1795 unsigned int t = si->node_mapping[w];
1796 unsigned int nnode = si->node_mapping[n];
1797 gcc_assert (nnode == n);
1799 if (si->dfs[t] < si->dfs[nnode])
1800 si->dfs[n] = si->dfs[t];
1804 /* Visit all the implicit predecessors. */
1805 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
1807 unsigned int w = si->node_mapping[i];
1809 if (TEST_BIT (si->deleted, w))
1810 continue;
1812 if (!TEST_BIT (si->visited, w))
1813 condense_visit (graph, si, w);
1815 unsigned int t = si->node_mapping[w];
1816 unsigned int nnode = si->node_mapping[n];
1817 gcc_assert (nnode == n);
1819 if (si->dfs[t] < si->dfs[nnode])
1820 si->dfs[n] = si->dfs[t];
1824 /* See if any components have been identified. */
1825 if (si->dfs[n] == my_dfs)
1827 while (VEC_length (unsigned, si->scc_stack) != 0
1828 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1830 unsigned int w = VEC_pop (unsigned, si->scc_stack);
1831 si->node_mapping[w] = n;
1833 if (!TEST_BIT (graph->direct_nodes, w))
1834 RESET_BIT (graph->direct_nodes, n);
1836 /* Unify our nodes. */
1837 if (graph->preds[w])
1839 if (!graph->preds[n])
1840 graph->preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
1841 bitmap_ior_into (graph->preds[n], graph->preds[w]);
1843 if (graph->implicit_preds[w])
1845 if (!graph->implicit_preds[n])
1846 graph->implicit_preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
1847 bitmap_ior_into (graph->implicit_preds[n],
1848 graph->implicit_preds[w]);
1850 if (graph->points_to[w])
1852 if (!graph->points_to[n])
1853 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
1854 bitmap_ior_into (graph->points_to[n],
1855 graph->points_to[w]);
1857 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1859 unsigned int rep = si->node_mapping[i];
1860 graph->number_incoming[rep]++;
1863 SET_BIT (si->deleted, n);
1865 else
1866 VEC_safe_push (unsigned, heap, si->scc_stack, n);
1869 /* Label pointer equivalences. */
1871 static void
1872 label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1874 unsigned int i;
1875 bitmap_iterator bi;
1876 SET_BIT (si->visited, n);
1878 if (!graph->points_to[n])
1879 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
1881 /* Label and union our incoming edges's points to sets. */
1882 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1884 unsigned int w = si->node_mapping[i];
1885 if (!TEST_BIT (si->visited, w))
1886 label_visit (graph, si, w);
1888 /* Skip unused edges */
1889 if (w == n || graph->pointer_label[w] == 0)
1891 graph->number_incoming[w]--;
1892 continue;
1894 if (graph->points_to[w])
1895 bitmap_ior_into(graph->points_to[n], graph->points_to[w]);
1897 /* If all incoming edges to w have been processed and
1898 graph->points_to[w] was not stored in the hash table, we can
1899 free it. */
1900 graph->number_incoming[w]--;
1901 if (!graph->number_incoming[w] && !TEST_BIT (graph->pt_used, w))
1903 BITMAP_FREE (graph->points_to[w]);
1906 /* Indirect nodes get fresh variables. */
1907 if (!TEST_BIT (graph->direct_nodes, n))
1908 bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
1910 if (!bitmap_empty_p (graph->points_to[n]))
1912 unsigned int label = equiv_class_lookup (pointer_equiv_class_table,
1913 graph->points_to[n]);
1914 if (!label)
1916 SET_BIT (graph->pt_used, n);
1917 label = pointer_equiv_class++;
1918 equiv_class_add (pointer_equiv_class_table,
1919 label, graph->points_to[n]);
1921 graph->pointer_label[n] = label;
1925 /* Perform offline variable substitution, discovering equivalence
1926 classes, and eliminating non-pointer variables. */
1928 static struct scc_info *
1929 perform_var_substitution (constraint_graph_t graph)
1931 unsigned int i;
1932 unsigned int size = graph->size;
1933 struct scc_info *si = init_scc_info (size);
1935 bitmap_obstack_initialize (&iteration_obstack);
1936 pointer_equiv_class_table = htab_create (511, equiv_class_label_hash,
1937 equiv_class_label_eq, free);
1938 location_equiv_class_table = htab_create (511, equiv_class_label_hash,
1939 equiv_class_label_eq, free);
1940 pointer_equiv_class = 1;
1941 location_equiv_class = 1;
1943 /* Condense the nodes, which means to find SCC's, count incoming
1944 predecessors, and unite nodes in SCC's. */
1945 for (i = 0; i < LAST_REF_NODE; i++)
1946 if (!TEST_BIT (si->visited, si->node_mapping[i]))
1947 condense_visit (graph, si, si->node_mapping[i]);
1949 sbitmap_zero (si->visited);
1950 /* Actually the label the nodes for pointer equivalences */
1951 for (i = 0; i < LAST_REF_NODE; i++)
1952 if (!TEST_BIT (si->visited, si->node_mapping[i]))
1953 label_visit (graph, si, si->node_mapping[i]);
1955 /* Calculate location equivalence labels. */
1956 for (i = 0; i < FIRST_REF_NODE; i++)
1958 bitmap pointed_by;
1959 bitmap_iterator bi;
1960 unsigned int j;
1961 unsigned int label;
1963 if (!graph->pointed_by[i])
1964 continue;
1965 pointed_by = BITMAP_ALLOC (&iteration_obstack);
1967 /* Translate the pointed-by mapping for pointer equivalence
1968 labels. */
1969 EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
1971 bitmap_set_bit (pointed_by,
1972 graph->pointer_label[si->node_mapping[j]]);
1974 /* The original pointed_by is now dead. */
1975 BITMAP_FREE (graph->pointed_by[i]);
1977 /* Look up the location equivalence label if one exists, or make
1978 one otherwise. */
1979 label = equiv_class_lookup (location_equiv_class_table,
1980 pointed_by);
1981 if (label == 0)
1983 label = location_equiv_class++;
1984 equiv_class_add (location_equiv_class_table,
1985 label, pointed_by);
1987 else
1989 if (dump_file && (dump_flags & TDF_DETAILS))
1990 fprintf (dump_file, "Found location equivalence for node %s\n",
1991 get_varinfo (i)->name);
1992 BITMAP_FREE (pointed_by);
1994 graph->loc_label[i] = label;
1998 if (dump_file && (dump_flags & TDF_DETAILS))
1999 for (i = 0; i < FIRST_REF_NODE; i++)
2001 bool direct_node = TEST_BIT (graph->direct_nodes, i);
2002 fprintf (dump_file,
2003 "Equivalence classes for %s node id %d:%s are pointer: %d"
2004 ", location:%d\n",
2005 direct_node ? "Direct node" : "Indirect node", i,
2006 get_varinfo (i)->name,
2007 graph->pointer_label[si->node_mapping[i]],
2008 graph->loc_label[si->node_mapping[i]]);
2011 /* Quickly eliminate our non-pointer variables. */
2013 for (i = 0; i < FIRST_REF_NODE; i++)
2015 unsigned int node = si->node_mapping[i];
2017 if (graph->pointer_label[node] == 0
2018 && TEST_BIT (graph->direct_nodes, node))
2020 if (dump_file && (dump_flags & TDF_DETAILS))
2021 fprintf (dump_file,
2022 "%s is a non-pointer variable, eliminating edges.\n",
2023 get_varinfo (node)->name);
2024 stats.nonpointer_vars++;
2025 clear_edges_for_node (graph, node);
2029 return si;
2032 /* Free information that was only necessary for variable
2033 substitution. */
2035 static void
2036 free_var_substitution_info (struct scc_info *si)
2038 free_scc_info (si);
2039 free (graph->pointer_label);
2040 free (graph->loc_label);
2041 free (graph->pointed_by);
2042 free (graph->points_to);
2043 free (graph->number_incoming);
2044 free (graph->eq_rep);
2045 sbitmap_free (graph->direct_nodes);
2046 sbitmap_free (graph->pt_used);
2047 htab_delete (pointer_equiv_class_table);
2048 htab_delete (location_equiv_class_table);
2049 bitmap_obstack_release (&iteration_obstack);
2052 /* Return an existing node that is equivalent to NODE, which has
2053 equivalence class LABEL, if one exists. Return NODE otherwise. */
2055 static unsigned int
2056 find_equivalent_node (constraint_graph_t graph,
2057 unsigned int node, unsigned int label)
2059 /* If the address version of this variable is unused, we can
2060 substitute it for anything else with the same label.
2061 Otherwise, we know the pointers are equivalent, but not the
2062 locations, and we can unite them later. */
2064 if (!bitmap_bit_p (graph->address_taken, node))
2066 gcc_assert (label < graph->size);
2068 if (graph->eq_rep[label] != -1)
2070 /* Unify the two variables since we know they are equivalent. */
2071 if (unite (graph->eq_rep[label], node))
2072 unify_nodes (graph, graph->eq_rep[label], node, false);
2073 return graph->eq_rep[label];
2075 else
2077 graph->eq_rep[label] = node;
2078 graph->pe_rep[label] = node;
2081 else
2083 gcc_assert (label < graph->size);
2084 graph->pe[node] = label;
2085 if (graph->pe_rep[label] == -1)
2086 graph->pe_rep[label] = node;
2089 return node;
2092 /* Unite pointer equivalent but not location equivalent nodes in
2093 GRAPH. This may only be performed once variable substitution is
2094 finished. */
2096 static void
2097 unite_pointer_equivalences (constraint_graph_t graph)
2099 unsigned int i;
2101 /* Go through the pointer equivalences and unite them to their
2102 representative, if they aren't already. */
2103 for (i = 0; i < graph->size; i++)
2105 unsigned int label = graph->pe[i];
2106 int label_rep = graph->pe_rep[label];
2108 if (label != i && unite (label_rep, i))
2109 unify_nodes (graph, label_rep, i, false);
2113 /* Move complex constraints to the GRAPH nodes they belong to. */
2115 static void
2116 move_complex_constraints (constraint_graph_t graph)
2118 int i;
2119 constraint_t c;
2121 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
2123 if (c)
2125 struct constraint_expr lhs = c->lhs;
2126 struct constraint_expr rhs = c->rhs;
2128 if (lhs.type == DEREF)
2130 insert_into_complex (graph, lhs.var, c);
2132 else if (rhs.type == DEREF)
2134 if (!(get_varinfo (lhs.var)->is_special_var))
2135 insert_into_complex (graph, rhs.var, c);
2137 else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2138 && (lhs.offset != 0 || rhs.offset != 0))
2140 insert_into_complex (graph, rhs.var, c);
2147 /* Optimize and rewrite complex constraints while performing
2148 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2149 result of perform_variable_substitution. */
2151 static void
2152 rewrite_constraints (constraint_graph_t graph,
2153 struct scc_info *si)
2155 int i;
2156 unsigned int j;
2157 constraint_t c;
2159 for (j = 0; j < graph->size; j++)
2160 gcc_assert (find (j) == j);
2162 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
2164 struct constraint_expr lhs = c->lhs;
2165 struct constraint_expr rhs = c->rhs;
2166 unsigned int lhsvar = find (get_varinfo_fc (lhs.var)->id);
2167 unsigned int rhsvar = find (get_varinfo_fc (rhs.var)->id);
2168 unsigned int lhsnode, rhsnode;
2169 unsigned int lhslabel, rhslabel;
2171 lhsnode = si->node_mapping[lhsvar];
2172 rhsnode = si->node_mapping[rhsvar];
2173 lhslabel = graph->pointer_label[lhsnode];
2174 rhslabel = graph->pointer_label[rhsnode];
2176 /* See if it is really a non-pointer variable, and if so, ignore
2177 the constraint. */
2178 if (lhslabel == 0)
2180 if (!TEST_BIT (graph->direct_nodes, lhsnode))
2181 lhslabel = graph->pointer_label[lhsnode] = pointer_equiv_class++;
2182 else
2184 if (dump_file && (dump_flags & TDF_DETAILS))
2187 fprintf (dump_file, "%s is a non-pointer variable,"
2188 "ignoring constraint:",
2189 get_varinfo (lhs.var)->name);
2190 dump_constraint (dump_file, c);
2192 VEC_replace (constraint_t, constraints, i, NULL);
2193 continue;
2197 if (rhslabel == 0)
2199 if (!TEST_BIT (graph->direct_nodes, rhsnode))
2200 rhslabel = graph->pointer_label[rhsnode] = pointer_equiv_class++;
2201 else
2203 if (dump_file && (dump_flags & TDF_DETAILS))
2206 fprintf (dump_file, "%s is a non-pointer variable,"
2207 "ignoring constraint:",
2208 get_varinfo (rhs.var)->name);
2209 dump_constraint (dump_file, c);
2211 VEC_replace (constraint_t, constraints, i, NULL);
2212 continue;
2216 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2217 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2218 c->lhs.var = lhsvar;
2219 c->rhs.var = rhsvar;
2224 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2225 part of an SCC, false otherwise. */
2227 static bool
2228 eliminate_indirect_cycles (unsigned int node)
2230 if (graph->indirect_cycles[node] != -1
2231 && !bitmap_empty_p (get_varinfo (node)->solution))
2233 unsigned int i;
2234 VEC(unsigned,heap) *queue = NULL;
2235 int queuepos;
2236 unsigned int to = find (graph->indirect_cycles[node]);
2237 bitmap_iterator bi;
2239 /* We can't touch the solution set and call unify_nodes
2240 at the same time, because unify_nodes is going to do
2241 bitmap unions into it. */
2243 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2245 if (find (i) == i && i != to)
2247 if (unite (to, i))
2248 VEC_safe_push (unsigned, heap, queue, i);
2252 for (queuepos = 0;
2253 VEC_iterate (unsigned, queue, queuepos, i);
2254 queuepos++)
2256 unify_nodes (graph, to, i, true);
2258 VEC_free (unsigned, heap, queue);
2259 return true;
2261 return false;
2264 /* Solve the constraint graph GRAPH using our worklist solver.
2265 This is based on the PW* family of solvers from the "Efficient Field
2266 Sensitive Pointer Analysis for C" paper.
2267 It works by iterating over all the graph nodes, processing the complex
2268 constraints and propagating the copy constraints, until everything stops
2269 changed. This corresponds to steps 6-8 in the solving list given above. */
2271 static void
2272 solve_graph (constraint_graph_t graph)
2274 unsigned int size = graph->size;
2275 unsigned int i;
2276 bitmap pts;
2278 changed_count = 0;
2279 changed = sbitmap_alloc (size);
2280 sbitmap_zero (changed);
2282 /* Mark all initial non-collapsed nodes as changed. */
2283 for (i = 0; i < size; i++)
2285 varinfo_t ivi = get_varinfo (i);
2286 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2287 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2288 || VEC_length (constraint_t, graph->complex[i]) > 0))
2290 SET_BIT (changed, i);
2291 changed_count++;
2295 /* Allocate a bitmap to be used to store the changed bits. */
2296 pts = BITMAP_ALLOC (&pta_obstack);
2298 while (changed_count > 0)
2300 unsigned int i;
2301 struct topo_info *ti = init_topo_info ();
2302 stats.iterations++;
2304 bitmap_obstack_initialize (&iteration_obstack);
2306 compute_topo_order (graph, ti);
2308 while (VEC_length (unsigned, ti->topo_order) != 0)
2311 i = VEC_pop (unsigned, ti->topo_order);
2313 /* If this variable is not a representative, skip it. */
2314 if (find (i) != i)
2315 continue;
2317 /* In certain indirect cycle cases, we may merge this
2318 variable to another. */
2319 if (eliminate_indirect_cycles (i) && find (i) != i)
2320 continue;
2322 /* If the node has changed, we need to process the
2323 complex constraints and outgoing edges again. */
2324 if (TEST_BIT (changed, i))
2326 unsigned int j;
2327 constraint_t c;
2328 bitmap solution;
2329 VEC(constraint_t,heap) *complex = graph->complex[i];
2330 bool solution_empty;
2332 RESET_BIT (changed, i);
2333 changed_count--;
2335 /* Compute the changed set of solution bits. */
2336 bitmap_and_compl (pts, get_varinfo (i)->solution,
2337 get_varinfo (i)->oldsolution);
2339 if (bitmap_empty_p (pts))
2340 continue;
2342 bitmap_ior_into (get_varinfo (i)->oldsolution, pts);
2344 solution = get_varinfo (i)->solution;
2345 solution_empty = bitmap_empty_p (solution);
2347 /* Process the complex constraints */
2348 for (j = 0; VEC_iterate (constraint_t, complex, j, c); j++)
2350 /* XXX: This is going to unsort the constraints in
2351 some cases, which will occasionally add duplicate
2352 constraints during unification. This does not
2353 affect correctness. */
2354 c->lhs.var = find (c->lhs.var);
2355 c->rhs.var = find (c->rhs.var);
2357 /* The only complex constraint that can change our
2358 solution to non-empty, given an empty solution,
2359 is a constraint where the lhs side is receiving
2360 some set from elsewhere. */
2361 if (!solution_empty || c->lhs.type != DEREF)
2362 do_complex_constraint (graph, c, pts);
2365 solution_empty = bitmap_empty_p (solution);
2367 if (!solution_empty)
2369 bitmap_iterator bi;
2371 /* Propagate solution to all successors. */
2372 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2373 0, j, bi)
2375 bitmap tmp;
2376 bool flag;
2378 unsigned int to = find (j);
2379 tmp = get_varinfo (to)->solution;
2380 flag = false;
2382 /* Don't try to propagate to ourselves. */
2383 if (to == i)
2384 continue;
2386 flag = set_union_with_increment (tmp, pts, 0);
2388 if (flag)
2390 get_varinfo (to)->solution = tmp;
2391 if (!TEST_BIT (changed, to))
2393 SET_BIT (changed, to);
2394 changed_count++;
2401 free_topo_info (ti);
2402 bitmap_obstack_release (&iteration_obstack);
2405 BITMAP_FREE (pts);
2406 sbitmap_free (changed);
2407 bitmap_obstack_release (&oldpta_obstack);
2410 /* Map from trees to variable infos. */
2411 static struct pointer_map_t *vi_for_tree;
2414 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2416 static void
2417 insert_vi_for_tree (tree t, varinfo_t vi)
2419 void **slot = pointer_map_insert (vi_for_tree, t);
2420 gcc_assert (vi);
2421 gcc_assert (*slot == NULL);
2422 *slot = vi;
2425 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2426 exist in the map, return NULL, otherwise, return the varinfo we found. */
2428 static varinfo_t
2429 lookup_vi_for_tree (tree t)
2431 void **slot = pointer_map_contains (vi_for_tree, t);
2432 if (slot == NULL)
2433 return NULL;
2435 return (varinfo_t) *slot;
2438 /* Return a printable name for DECL */
2440 static const char *
2441 alias_get_name (tree decl)
2443 const char *res = get_name (decl);
2444 char *temp;
2445 int num_printed = 0;
2447 if (res != NULL)
2448 return res;
2450 res = "NULL";
2451 if (!dump_file)
2452 return res;
2454 if (TREE_CODE (decl) == SSA_NAME)
2456 num_printed = asprintf (&temp, "%s_%u",
2457 alias_get_name (SSA_NAME_VAR (decl)),
2458 SSA_NAME_VERSION (decl));
2460 else if (DECL_P (decl))
2462 num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
2464 if (num_printed > 0)
2466 res = ggc_strdup (temp);
2467 free (temp);
2469 return res;
2472 /* Find the variable id for tree T in the map.
2473 If T doesn't exist in the map, create an entry for it and return it. */
2475 static varinfo_t
2476 get_vi_for_tree (tree t)
2478 void **slot = pointer_map_contains (vi_for_tree, t);
2479 if (slot == NULL)
2480 return get_varinfo (create_variable_info_for (t, alias_get_name (t)));
2482 return (varinfo_t) *slot;
2485 /* Get a constraint expression from an SSA_VAR_P node. */
2487 static struct constraint_expr
2488 get_constraint_exp_from_ssa_var (tree t)
2490 struct constraint_expr cexpr;
2492 gcc_assert (SSA_VAR_P (t) || DECL_P (t));
2494 /* For parameters, get at the points-to set for the actual parm
2495 decl. */
2496 if (TREE_CODE (t) == SSA_NAME
2497 && TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2498 && SSA_NAME_IS_DEFAULT_DEF (t))
2499 return get_constraint_exp_from_ssa_var (SSA_NAME_VAR (t));
2501 cexpr.type = SCALAR;
2503 cexpr.var = get_vi_for_tree (t)->id;
2504 /* If we determine the result is "anything", and we know this is readonly,
2505 say it points to readonly memory instead. */
2506 if (cexpr.var == anything_id && TREE_READONLY (t))
2508 cexpr.type = ADDRESSOF;
2509 cexpr.var = readonly_id;
2512 cexpr.offset = 0;
2513 return cexpr;
2516 /* Process a completed constraint T, and add it to the constraint
2517 list. FROM_CALL is true if this is a constraint coming from a
2518 call, which means any DEREFs we see are "may-deref's", not
2519 "must-deref"'s. */
2521 static void
2522 process_constraint_1 (constraint_t t, bool from_call)
2524 struct constraint_expr rhs = t->rhs;
2525 struct constraint_expr lhs = t->lhs;
2527 gcc_assert (rhs.var < VEC_length (varinfo_t, varmap));
2528 gcc_assert (lhs.var < VEC_length (varinfo_t, varmap));
2530 if (!from_call)
2532 if (lhs.type == DEREF)
2533 get_varinfo (lhs.var)->directly_dereferenced = true;
2534 if (rhs.type == DEREF)
2535 get_varinfo (rhs.var)->directly_dereferenced = true;
2538 if (!use_field_sensitive)
2540 t->rhs.offset = 0;
2541 t->lhs.offset = 0;
2544 /* ANYTHING == ANYTHING is pointless. */
2545 if (lhs.var == anything_id && rhs.var == anything_id)
2546 return;
2548 /* If we have &ANYTHING = something, convert to SOMETHING = &ANYTHING) */
2549 else if (lhs.var == anything_id && lhs.type == ADDRESSOF)
2551 rhs = t->lhs;
2552 t->lhs = t->rhs;
2553 t->rhs = rhs;
2554 process_constraint_1 (t, from_call);
2556 /* This can happen in our IR with things like n->a = *p */
2557 else if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
2559 /* Split into tmp = *rhs, *lhs = tmp */
2560 tree rhsdecl = get_varinfo (rhs.var)->decl;
2561 tree pointertype = TREE_TYPE (rhsdecl);
2562 tree pointedtotype = TREE_TYPE (pointertype);
2563 tree tmpvar = create_tmp_var_raw (pointedtotype, "doubledereftmp");
2564 struct constraint_expr tmplhs = get_constraint_exp_from_ssa_var (tmpvar);
2566 /* If this is an aggregate of known size, we should have passed
2567 this off to do_structure_copy, and it should have broken it
2568 up. */
2569 gcc_assert (!AGGREGATE_TYPE_P (pointedtotype)
2570 || get_varinfo (rhs.var)->is_unknown_size_var);
2572 process_constraint_1 (new_constraint (tmplhs, rhs), from_call);
2573 process_constraint_1 (new_constraint (lhs, tmplhs), from_call);
2575 else if (rhs.type == ADDRESSOF && lhs.type == DEREF)
2577 /* Split into tmp = &rhs, *lhs = tmp */
2578 tree rhsdecl = get_varinfo (rhs.var)->decl;
2579 tree pointertype = TREE_TYPE (rhsdecl);
2580 tree tmpvar = create_tmp_var_raw (pointertype, "derefaddrtmp");
2581 struct constraint_expr tmplhs = get_constraint_exp_from_ssa_var (tmpvar);
2583 process_constraint_1 (new_constraint (tmplhs, rhs), from_call);
2584 process_constraint_1 (new_constraint (lhs, tmplhs), from_call);
2586 else
2588 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
2589 VEC_safe_push (constraint_t, heap, constraints, t);
2594 /* Process constraint T, performing various simplifications and then
2595 adding it to our list of overall constraints. */
2597 static void
2598 process_constraint (constraint_t t)
2600 process_constraint_1 (t, false);
2603 /* Return true if T is a variable of a type that could contain
2604 pointers. */
2606 static bool
2607 could_have_pointers (tree t)
2609 tree type = TREE_TYPE (t);
2611 if (POINTER_TYPE_P (type)
2612 || AGGREGATE_TYPE_P (type)
2613 || TREE_CODE (type) == COMPLEX_TYPE)
2614 return true;
2616 return false;
2619 /* Return the position, in bits, of FIELD_DECL from the beginning of its
2620 structure. */
2622 static unsigned HOST_WIDE_INT
2623 bitpos_of_field (const tree fdecl)
2626 if (TREE_CODE (DECL_FIELD_OFFSET (fdecl)) != INTEGER_CST
2627 || TREE_CODE (DECL_FIELD_BIT_OFFSET (fdecl)) != INTEGER_CST)
2628 return -1;
2630 return (tree_low_cst (DECL_FIELD_OFFSET (fdecl), 1) * 8)
2631 + tree_low_cst (DECL_FIELD_BIT_OFFSET (fdecl), 1);
2635 /* Return true if an access to [ACCESSPOS, ACCESSSIZE]
2636 overlaps with a field at [FIELDPOS, FIELDSIZE] */
2638 static bool
2639 offset_overlaps_with_access (const unsigned HOST_WIDE_INT fieldpos,
2640 const unsigned HOST_WIDE_INT fieldsize,
2641 const unsigned HOST_WIDE_INT accesspos,
2642 const unsigned HOST_WIDE_INT accesssize)
2644 if (fieldpos == accesspos && fieldsize == accesssize)
2645 return true;
2646 if (accesspos >= fieldpos && accesspos < (fieldpos + fieldsize))
2647 return true;
2648 if (accesspos < fieldpos && (accesspos + accesssize > fieldpos))
2649 return true;
2651 return false;
2654 /* Given a COMPONENT_REF T, return the constraint_expr for it. */
2656 static void
2657 get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results)
2659 tree orig_t = t;
2660 HOST_WIDE_INT bitsize = -1;
2661 HOST_WIDE_INT bitmaxsize = -1;
2662 HOST_WIDE_INT bitpos;
2663 tree forzero;
2664 struct constraint_expr *result;
2665 unsigned int beforelength = VEC_length (ce_s, *results);
2667 /* Some people like to do cute things like take the address of
2668 &0->a.b */
2669 forzero = t;
2670 while (!SSA_VAR_P (forzero) && !CONSTANT_CLASS_P (forzero))
2671 forzero = TREE_OPERAND (forzero, 0);
2673 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
2675 struct constraint_expr temp;
2677 temp.offset = 0;
2678 temp.var = integer_id;
2679 temp.type = SCALAR;
2680 VEC_safe_push (ce_s, heap, *results, &temp);
2681 return;
2684 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
2686 /* String constants are readonly, so there is nothing to really do
2687 here. */
2688 if (TREE_CODE (t) == STRING_CST)
2689 return;
2691 get_constraint_for (t, results);
2692 result = VEC_last (ce_s, *results);
2693 result->offset = bitpos;
2695 gcc_assert (beforelength + 1 == VEC_length (ce_s, *results));
2697 /* This can also happen due to weird offsetof type macros. */
2698 if (TREE_CODE (t) != ADDR_EXPR && result->type == ADDRESSOF)
2699 result->type = SCALAR;
2701 if (result->type == SCALAR)
2703 /* In languages like C, you can access one past the end of an
2704 array. You aren't allowed to dereference it, so we can
2705 ignore this constraint. When we handle pointer subtraction,
2706 we may have to do something cute here. */
2708 if (result->offset < get_varinfo (result->var)->fullsize
2709 && bitmaxsize != 0)
2711 /* It's also not true that the constraint will actually start at the
2712 right offset, it may start in some padding. We only care about
2713 setting the constraint to the first actual field it touches, so
2714 walk to find it. */
2715 varinfo_t curr;
2716 for (curr = get_varinfo (result->var); curr; curr = curr->next)
2718 if (offset_overlaps_with_access (curr->offset, curr->size,
2719 result->offset, bitmaxsize))
2721 result->var = curr->id;
2722 break;
2725 /* assert that we found *some* field there. The user couldn't be
2726 accessing *only* padding. */
2727 /* Still the user could access one past the end of an array
2728 embedded in a struct resulting in accessing *only* padding. */
2729 gcc_assert (curr || ref_contains_array_ref (orig_t));
2731 else if (bitmaxsize == 0)
2733 if (dump_file && (dump_flags & TDF_DETAILS))
2734 fprintf (dump_file, "Access to zero-sized part of variable,"
2735 "ignoring\n");
2737 else
2738 if (dump_file && (dump_flags & TDF_DETAILS))
2739 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
2741 result->offset = 0;
2743 else if (bitmaxsize == -1)
2745 /* We can't handle DEREF constraints with unknown size, we'll
2746 get the wrong answer. Punt and return anything. */
2747 result->var = anything_id;
2748 result->offset = 0;
2753 /* Dereference the constraint expression CONS, and return the result.
2754 DEREF (ADDRESSOF) = SCALAR
2755 DEREF (SCALAR) = DEREF
2756 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
2757 This is needed so that we can handle dereferencing DEREF constraints. */
2759 static void
2760 do_deref (VEC (ce_s, heap) **constraints)
2762 struct constraint_expr *c;
2763 unsigned int i = 0;
2765 for (i = 0; VEC_iterate (ce_s, *constraints, i, c); i++)
2767 if (c->type == SCALAR)
2768 c->type = DEREF;
2769 else if (c->type == ADDRESSOF)
2770 c->type = SCALAR;
2771 else if (c->type == DEREF)
2773 tree tmpvar = create_tmp_var_raw (ptr_type_node, "dereftmp");
2774 struct constraint_expr tmplhs = get_constraint_exp_from_ssa_var (tmpvar);
2775 process_constraint (new_constraint (tmplhs, *c));
2776 c->var = tmplhs.var;
2778 else
2779 gcc_unreachable ();
2783 /* Given a tree T, return the constraint expression for it. */
2785 static void
2786 get_constraint_for (tree t, VEC (ce_s, heap) **results)
2788 struct constraint_expr temp;
2790 /* x = integer is all glommed to a single variable, which doesn't
2791 point to anything by itself. That is, of course, unless it is an
2792 integer constant being treated as a pointer, in which case, we
2793 will return that this is really the addressof anything. This
2794 happens below, since it will fall into the default case. The only
2795 case we know something about an integer treated like a pointer is
2796 when it is the NULL pointer, and then we just say it points to
2797 NULL. */
2798 if (TREE_CODE (t) == INTEGER_CST
2799 && integer_zerop (t))
2801 temp.var = nothing_id;
2802 temp.type = ADDRESSOF;
2803 temp.offset = 0;
2804 VEC_safe_push (ce_s, heap, *results, &temp);
2805 return;
2808 switch (TREE_CODE_CLASS (TREE_CODE (t)))
2810 case tcc_expression:
2811 case tcc_vl_exp:
2813 switch (TREE_CODE (t))
2815 case ADDR_EXPR:
2817 struct constraint_expr *c;
2818 unsigned int i;
2819 tree exp = TREE_OPERAND (t, 0);
2820 tree pttype = TREE_TYPE (TREE_TYPE (t));
2822 get_constraint_for (exp, results);
2825 /* Complex types are special. Taking the address of one
2826 allows you to access either part of it through that
2827 pointer. */
2828 if (VEC_length (ce_s, *results) == 1 &&
2829 TREE_CODE (pttype) == COMPLEX_TYPE)
2831 struct constraint_expr *origrhs;
2832 varinfo_t origvar;
2833 struct constraint_expr tmp;
2835 gcc_assert (VEC_length (ce_s, *results) == 1);
2836 origrhs = VEC_last (ce_s, *results);
2837 tmp = *origrhs;
2838 VEC_pop (ce_s, *results);
2839 origvar = get_varinfo (origrhs->var);
2840 for (; origvar; origvar = origvar->next)
2842 tmp.var = origvar->id;
2843 VEC_safe_push (ce_s, heap, *results, &tmp);
2847 for (i = 0; VEC_iterate (ce_s, *results, i, c); i++)
2849 if (c->type == DEREF)
2850 c->type = SCALAR;
2851 else
2852 c->type = ADDRESSOF;
2854 return;
2856 break;
2857 case CALL_EXPR:
2858 /* XXX: In interprocedural mode, if we didn't have the
2859 body, we would need to do *each pointer argument =
2860 &ANYTHING added. */
2861 if (call_expr_flags (t) & (ECF_MALLOC | ECF_MAY_BE_ALLOCA))
2863 varinfo_t vi;
2864 tree heapvar = heapvar_lookup (t);
2866 if (heapvar == NULL)
2868 heapvar = create_tmp_var_raw (ptr_type_node, "HEAP");
2869 DECL_EXTERNAL (heapvar) = 1;
2870 get_var_ann (heapvar)->is_heapvar = 1;
2871 if (gimple_referenced_vars (cfun))
2872 add_referenced_var (heapvar);
2873 heapvar_insert (t, heapvar);
2876 temp.var = create_variable_info_for (heapvar,
2877 alias_get_name (heapvar));
2879 vi = get_varinfo (temp.var);
2880 vi->is_artificial_var = 1;
2881 vi->is_heap_var = 1;
2882 temp.type = ADDRESSOF;
2883 temp.offset = 0;
2884 VEC_safe_push (ce_s, heap, *results, &temp);
2885 return;
2887 else
2889 temp.var = anything_id;
2890 temp.type = SCALAR;
2891 temp.offset = 0;
2892 VEC_safe_push (ce_s, heap, *results, &temp);
2893 return;
2895 break;
2896 default:
2898 temp.type = ADDRESSOF;
2899 temp.var = anything_id;
2900 temp.offset = 0;
2901 VEC_safe_push (ce_s, heap, *results, &temp);
2902 return;
2906 case tcc_reference:
2908 switch (TREE_CODE (t))
2910 case INDIRECT_REF:
2912 get_constraint_for (TREE_OPERAND (t, 0), results);
2913 do_deref (results);
2914 return;
2916 case ARRAY_REF:
2917 case ARRAY_RANGE_REF:
2918 case COMPONENT_REF:
2919 get_constraint_for_component_ref (t, results);
2920 return;
2921 default:
2923 temp.type = ADDRESSOF;
2924 temp.var = anything_id;
2925 temp.offset = 0;
2926 VEC_safe_push (ce_s, heap, *results, &temp);
2927 return;
2931 case tcc_unary:
2933 switch (TREE_CODE (t))
2935 case NOP_EXPR:
2936 case CONVERT_EXPR:
2937 case NON_LVALUE_EXPR:
2939 tree op = TREE_OPERAND (t, 0);
2941 /* Cast from non-pointer to pointers are bad news for us.
2942 Anything else, we see through */
2943 if (!(POINTER_TYPE_P (TREE_TYPE (t))
2944 && ! POINTER_TYPE_P (TREE_TYPE (op))))
2946 get_constraint_for (op, results);
2947 return;
2950 /* FALLTHRU */
2952 default:
2954 temp.type = ADDRESSOF;
2955 temp.var = anything_id;
2956 temp.offset = 0;
2957 VEC_safe_push (ce_s, heap, *results, &temp);
2958 return;
2962 case tcc_exceptional:
2964 switch (TREE_CODE (t))
2966 case PHI_NODE:
2968 get_constraint_for (PHI_RESULT (t), results);
2969 return;
2971 break;
2972 case SSA_NAME:
2974 struct constraint_expr temp;
2975 temp = get_constraint_exp_from_ssa_var (t);
2976 VEC_safe_push (ce_s, heap, *results, &temp);
2977 return;
2979 break;
2980 default:
2982 temp.type = ADDRESSOF;
2983 temp.var = anything_id;
2984 temp.offset = 0;
2985 VEC_safe_push (ce_s, heap, *results, &temp);
2986 return;
2990 case tcc_declaration:
2992 struct constraint_expr temp;
2993 temp = get_constraint_exp_from_ssa_var (t);
2994 VEC_safe_push (ce_s, heap, *results, &temp);
2995 return;
2997 default:
2999 temp.type = ADDRESSOF;
3000 temp.var = anything_id;
3001 temp.offset = 0;
3002 VEC_safe_push (ce_s, heap, *results, &temp);
3003 return;
3009 /* Handle the structure copy case where we have a simple structure copy
3010 between LHS and RHS that is of SIZE (in bits)
3012 For each field of the lhs variable (lhsfield)
3013 For each field of the rhs variable at lhsfield.offset (rhsfield)
3014 add the constraint lhsfield = rhsfield
3016 If we fail due to some kind of type unsafety or other thing we
3017 can't handle, return false. We expect the caller to collapse the
3018 variable in that case. */
3020 static bool
3021 do_simple_structure_copy (const struct constraint_expr lhs,
3022 const struct constraint_expr rhs,
3023 const unsigned HOST_WIDE_INT size)
3025 varinfo_t p = get_varinfo (lhs.var);
3026 unsigned HOST_WIDE_INT pstart, last;
3027 pstart = p->offset;
3028 last = p->offset + size;
3029 for (; p && p->offset < last; p = p->next)
3031 varinfo_t q;
3032 struct constraint_expr templhs = lhs;
3033 struct constraint_expr temprhs = rhs;
3034 unsigned HOST_WIDE_INT fieldoffset;
3036 templhs.var = p->id;
3037 q = get_varinfo (temprhs.var);
3038 fieldoffset = p->offset - pstart;
3039 q = first_vi_for_offset (q, q->offset + fieldoffset);
3040 if (!q)
3041 return false;
3042 temprhs.var = q->id;
3043 process_constraint (new_constraint (templhs, temprhs));
3045 return true;
3049 /* Handle the structure copy case where we have a structure copy between a
3050 aggregate on the LHS and a dereference of a pointer on the RHS
3051 that is of SIZE (in bits)
3053 For each field of the lhs variable (lhsfield)
3054 rhs.offset = lhsfield->offset
3055 add the constraint lhsfield = rhs
3058 static void
3059 do_rhs_deref_structure_copy (const struct constraint_expr lhs,
3060 const struct constraint_expr rhs,
3061 const unsigned HOST_WIDE_INT size)
3063 varinfo_t p = get_varinfo (lhs.var);
3064 unsigned HOST_WIDE_INT pstart,last;
3065 pstart = p->offset;
3066 last = p->offset + size;
3068 for (; p && p->offset < last; p = p->next)
3070 varinfo_t q;
3071 struct constraint_expr templhs = lhs;
3072 struct constraint_expr temprhs = rhs;
3073 unsigned HOST_WIDE_INT fieldoffset;
3076 if (templhs.type == SCALAR)
3077 templhs.var = p->id;
3078 else
3079 templhs.offset = p->offset;
3081 q = get_varinfo (temprhs.var);
3082 fieldoffset = p->offset - pstart;
3083 temprhs.offset += fieldoffset;
3084 process_constraint (new_constraint (templhs, temprhs));
3088 /* Handle the structure copy case where we have a structure copy
3089 between an aggregate on the RHS and a dereference of a pointer on
3090 the LHS that is of SIZE (in bits)
3092 For each field of the rhs variable (rhsfield)
3093 lhs.offset = rhsfield->offset
3094 add the constraint lhs = rhsfield
3097 static void
3098 do_lhs_deref_structure_copy (const struct constraint_expr lhs,
3099 const struct constraint_expr rhs,
3100 const unsigned HOST_WIDE_INT size)
3102 varinfo_t p = get_varinfo (rhs.var);
3103 unsigned HOST_WIDE_INT pstart,last;
3104 pstart = p->offset;
3105 last = p->offset + size;
3107 for (; p && p->offset < last; p = p->next)
3109 varinfo_t q;
3110 struct constraint_expr templhs = lhs;
3111 struct constraint_expr temprhs = rhs;
3112 unsigned HOST_WIDE_INT fieldoffset;
3115 if (temprhs.type == SCALAR)
3116 temprhs.var = p->id;
3117 else
3118 temprhs.offset = p->offset;
3120 q = get_varinfo (templhs.var);
3121 fieldoffset = p->offset - pstart;
3122 templhs.offset += fieldoffset;
3123 process_constraint (new_constraint (templhs, temprhs));
3127 /* Sometimes, frontends like to give us bad type information. This
3128 function will collapse all the fields from VAR to the end of VAR,
3129 into VAR, so that we treat those fields as a single variable.
3130 We return the variable they were collapsed into. */
3132 static unsigned int
3133 collapse_rest_of_var (unsigned int var)
3135 varinfo_t currvar = get_varinfo (var);
3136 varinfo_t field;
3138 for (field = currvar->next; field; field = field->next)
3140 if (dump_file)
3141 fprintf (dump_file, "Type safety: Collapsing var %s into %s\n",
3142 field->name, currvar->name);
3144 gcc_assert (!field->collapsed_to);
3145 field->collapsed_to = currvar;
3148 currvar->next = NULL;
3149 currvar->size = currvar->fullsize - currvar->offset;
3151 return currvar->id;
3154 /* Handle aggregate copies by expanding into copies of the respective
3155 fields of the structures. */
3157 static void
3158 do_structure_copy (tree lhsop, tree rhsop)
3160 struct constraint_expr lhs, rhs, tmp;
3161 VEC (ce_s, heap) *lhsc = NULL, *rhsc = NULL;
3162 varinfo_t p;
3163 unsigned HOST_WIDE_INT lhssize;
3164 unsigned HOST_WIDE_INT rhssize;
3166 get_constraint_for (lhsop, &lhsc);
3167 get_constraint_for (rhsop, &rhsc);
3168 gcc_assert (VEC_length (ce_s, lhsc) == 1);
3169 gcc_assert (VEC_length (ce_s, rhsc) == 1);
3170 lhs = *(VEC_last (ce_s, lhsc));
3171 rhs = *(VEC_last (ce_s, rhsc));
3173 VEC_free (ce_s, heap, lhsc);
3174 VEC_free (ce_s, heap, rhsc);
3176 /* If we have special var = x, swap it around. */
3177 if (lhs.var <= integer_id && !(get_varinfo (rhs.var)->is_special_var))
3179 tmp = lhs;
3180 lhs = rhs;
3181 rhs = tmp;
3184 /* This is fairly conservative for the RHS == ADDRESSOF case, in that it's
3185 possible it's something we could handle. However, most cases falling
3186 into this are dealing with transparent unions, which are slightly
3187 weird. */
3188 if (rhs.type == ADDRESSOF && !(get_varinfo (rhs.var)->is_special_var))
3190 rhs.type = ADDRESSOF;
3191 rhs.var = anything_id;
3194 /* If the RHS is a special var, or an addressof, set all the LHS fields to
3195 that special var. */
3196 if (rhs.var <= integer_id)
3198 for (p = get_varinfo (lhs.var); p; p = p->next)
3200 struct constraint_expr templhs = lhs;
3201 struct constraint_expr temprhs = rhs;
3203 if (templhs.type == SCALAR )
3204 templhs.var = p->id;
3205 else
3206 templhs.offset += p->offset;
3207 process_constraint (new_constraint (templhs, temprhs));
3210 else
3212 tree rhstype = TREE_TYPE (rhsop);
3213 tree lhstype = TREE_TYPE (lhsop);
3214 tree rhstypesize;
3215 tree lhstypesize;
3217 lhstypesize = DECL_P (lhsop) ? DECL_SIZE (lhsop) : TYPE_SIZE (lhstype);
3218 rhstypesize = DECL_P (rhsop) ? DECL_SIZE (rhsop) : TYPE_SIZE (rhstype);
3220 /* If we have a variably sized types on the rhs or lhs, and a deref
3221 constraint, add the constraint, lhsconstraint = &ANYTHING.
3222 This is conservatively correct because either the lhs is an unknown
3223 sized var (if the constraint is SCALAR), or the lhs is a DEREF
3224 constraint, and every variable it can point to must be unknown sized
3225 anyway, so we don't need to worry about fields at all. */
3226 if ((rhs.type == DEREF && TREE_CODE (rhstypesize) != INTEGER_CST)
3227 || (lhs.type == DEREF && TREE_CODE (lhstypesize) != INTEGER_CST))
3229 rhs.var = anything_id;
3230 rhs.type = ADDRESSOF;
3231 rhs.offset = 0;
3232 process_constraint (new_constraint (lhs, rhs));
3233 return;
3236 /* The size only really matters insofar as we don't set more or less of
3237 the variable. If we hit an unknown size var, the size should be the
3238 whole darn thing. */
3239 if (get_varinfo (rhs.var)->is_unknown_size_var)
3240 rhssize = ~0;
3241 else
3242 rhssize = TREE_INT_CST_LOW (rhstypesize);
3244 if (get_varinfo (lhs.var)->is_unknown_size_var)
3245 lhssize = ~0;
3246 else
3247 lhssize = TREE_INT_CST_LOW (lhstypesize);
3250 if (rhs.type == SCALAR && lhs.type == SCALAR)
3252 if (!do_simple_structure_copy (lhs, rhs, MIN (lhssize, rhssize)))
3254 lhs.var = collapse_rest_of_var (lhs.var);
3255 rhs.var = collapse_rest_of_var (rhs.var);
3256 lhs.offset = 0;
3257 rhs.offset = 0;
3258 lhs.type = SCALAR;
3259 rhs.type = SCALAR;
3260 process_constraint (new_constraint (lhs, rhs));
3263 else if (lhs.type != DEREF && rhs.type == DEREF)
3264 do_rhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
3265 else if (lhs.type == DEREF && rhs.type != DEREF)
3266 do_lhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
3267 else
3269 tree pointedtotype = lhstype;
3270 tree tmpvar;
3272 gcc_assert (rhs.type == DEREF && lhs.type == DEREF);
3273 tmpvar = create_tmp_var_raw (pointedtotype, "structcopydereftmp");
3274 do_structure_copy (tmpvar, rhsop);
3275 do_structure_copy (lhsop, tmpvar);
3281 /* Update related alias information kept in AI. This is used when
3282 building name tags, alias sets and deciding grouping heuristics.
3283 STMT is the statement to process. This function also updates
3284 ADDRESSABLE_VARS. */
3286 static void
3287 update_alias_info (tree stmt, struct alias_info *ai)
3289 bitmap addr_taken;
3290 use_operand_p use_p;
3291 ssa_op_iter iter;
3292 bool stmt_dereferences_ptr_p;
3293 enum escape_type stmt_escape_type = is_escape_site (stmt);
3294 struct mem_ref_stats_d *mem_ref_stats = gimple_mem_ref_stats (cfun);
3296 stmt_dereferences_ptr_p = false;
3298 if (stmt_escape_type == ESCAPE_TO_CALL
3299 || stmt_escape_type == ESCAPE_TO_PURE_CONST)
3301 mem_ref_stats->num_call_sites++;
3302 if (stmt_escape_type == ESCAPE_TO_PURE_CONST)
3303 mem_ref_stats->num_pure_const_call_sites++;
3305 else if (stmt_escape_type == ESCAPE_TO_ASM)
3306 mem_ref_stats->num_asm_sites++;
3308 /* Mark all the variables whose address are taken by the statement. */
3309 addr_taken = addresses_taken (stmt);
3310 if (addr_taken)
3312 bitmap_ior_into (gimple_addressable_vars (cfun), addr_taken);
3314 /* If STMT is an escape point, all the addresses taken by it are
3315 call-clobbered. */
3316 if (stmt_escape_type != NO_ESCAPE)
3318 bitmap_iterator bi;
3319 unsigned i;
3321 EXECUTE_IF_SET_IN_BITMAP (addr_taken, 0, i, bi)
3323 tree rvar = referenced_var (i);
3324 if (!unmodifiable_var_p (rvar))
3325 mark_call_clobbered (rvar, stmt_escape_type);
3330 /* Process each operand use. For pointers, determine whether they
3331 are dereferenced by the statement, or whether their value
3332 escapes, etc. */
3333 FOR_EACH_PHI_OR_STMT_USE (use_p, stmt, iter, SSA_OP_USE)
3335 tree op, var;
3336 var_ann_t v_ann;
3337 struct ptr_info_def *pi;
3338 unsigned num_uses, num_loads, num_stores;
3340 op = USE_FROM_PTR (use_p);
3342 /* If STMT is a PHI node, OP may be an ADDR_EXPR. If so, add it
3343 to the set of addressable variables. */
3344 if (TREE_CODE (op) == ADDR_EXPR)
3346 bitmap addressable_vars = gimple_addressable_vars (cfun);
3348 gcc_assert (TREE_CODE (stmt) == PHI_NODE);
3349 gcc_assert (addressable_vars);
3351 /* PHI nodes don't have annotations for pinning the set
3352 of addresses taken, so we collect them here.
3354 FIXME, should we allow PHI nodes to have annotations
3355 so that they can be treated like regular statements?
3356 Currently, they are treated as second-class
3357 statements. */
3358 add_to_addressable_set (TREE_OPERAND (op, 0), &addressable_vars);
3359 continue;
3362 /* Ignore constants (they may occur in PHI node arguments). */
3363 if (TREE_CODE (op) != SSA_NAME)
3364 continue;
3366 var = SSA_NAME_VAR (op);
3367 v_ann = var_ann (var);
3369 /* The base variable of an SSA name must be a GIMPLE register, and thus
3370 it cannot be aliased. */
3371 gcc_assert (!may_be_aliased (var));
3373 /* We are only interested in pointers. */
3374 if (!POINTER_TYPE_P (TREE_TYPE (op)))
3375 continue;
3377 pi = get_ptr_info (op);
3379 /* Add OP to AI->PROCESSED_PTRS, if it's not there already. */
3380 if (!TEST_BIT (ai->ssa_names_visited, SSA_NAME_VERSION (op)))
3382 SET_BIT (ai->ssa_names_visited, SSA_NAME_VERSION (op));
3383 VEC_safe_push (tree, heap, ai->processed_ptrs, op);
3386 /* If STMT is a PHI node, then it will not have pointer
3387 dereferences and it will not be an escape point. */
3388 if (TREE_CODE (stmt) == PHI_NODE)
3389 continue;
3391 /* Determine whether OP is a dereferenced pointer, and if STMT
3392 is an escape point, whether OP escapes. */
3393 count_uses_and_derefs (op, stmt, &num_uses, &num_loads, &num_stores);
3395 /* Handle a corner case involving address expressions of the
3396 form '&PTR->FLD'. The problem with these expressions is that
3397 they do not represent a dereference of PTR. However, if some
3398 other transformation propagates them into an INDIRECT_REF
3399 expression, we end up with '*(&PTR->FLD)' which is folded
3400 into 'PTR->FLD'.
3402 So, if the original code had no other dereferences of PTR,
3403 the aliaser will not create memory tags for it, and when
3404 &PTR->FLD gets propagated to INDIRECT_REF expressions, the
3405 memory operations will receive no VDEF/VUSE operands.
3407 One solution would be to have count_uses_and_derefs consider
3408 &PTR->FLD a dereference of PTR. But that is wrong, since it
3409 is not really a dereference but an offset calculation.
3411 What we do here is to recognize these special ADDR_EXPR
3412 nodes. Since these expressions are never GIMPLE values (they
3413 are not GIMPLE invariants), they can only appear on the RHS
3414 of an assignment and their base address is always an
3415 INDIRECT_REF expression. */
3416 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
3417 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == ADDR_EXPR
3418 && !is_gimple_val (GIMPLE_STMT_OPERAND (stmt, 1)))
3420 /* If the RHS if of the form &PTR->FLD and PTR == OP, then
3421 this represents a potential dereference of PTR. */
3422 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
3423 tree base = get_base_address (TREE_OPERAND (rhs, 0));
3424 if (TREE_CODE (base) == INDIRECT_REF
3425 && TREE_OPERAND (base, 0) == op)
3426 num_loads++;
3429 if (num_loads + num_stores > 0)
3431 /* Mark OP as dereferenced. In a subsequent pass,
3432 dereferenced pointers that point to a set of
3433 variables will be assigned a name tag to alias
3434 all the variables OP points to. */
3435 pi->is_dereferenced = 1;
3437 /* If this is a store operation, mark OP as being
3438 dereferenced to store, otherwise mark it as being
3439 dereferenced to load. */
3440 if (num_stores > 0)
3441 pointer_set_insert (ai->dereferenced_ptrs_store, var);
3442 else
3443 pointer_set_insert (ai->dereferenced_ptrs_load, var);
3445 /* Update the frequency estimate for all the dereferences of
3446 pointer OP. */
3447 update_mem_sym_stats_from_stmt (op, stmt, num_loads, num_stores);
3449 /* Indicate that STMT contains pointer dereferences. */
3450 stmt_dereferences_ptr_p = true;
3453 if (stmt_escape_type != NO_ESCAPE && num_loads + num_stores < num_uses)
3455 /* If STMT is an escape point and STMT contains at
3456 least one direct use of OP, then the value of OP
3457 escapes and so the pointed-to variables need to
3458 be marked call-clobbered. */
3459 pi->value_escapes_p = 1;
3460 pi->escape_mask |= stmt_escape_type;
3462 /* If the statement makes a function call, assume
3463 that pointer OP will be dereferenced in a store
3464 operation inside the called function. */
3465 if (get_call_expr_in (stmt)
3466 || stmt_escape_type == ESCAPE_STORED_IN_GLOBAL)
3468 pointer_set_insert (ai->dereferenced_ptrs_store, var);
3469 pi->is_dereferenced = 1;
3474 if (TREE_CODE (stmt) == PHI_NODE)
3475 return;
3477 /* Mark stored variables in STMT as being written to and update the
3478 memory reference stats for all memory symbols referenced by STMT. */
3479 if (stmt_references_memory_p (stmt))
3481 unsigned i;
3482 bitmap_iterator bi;
3484 mem_ref_stats->num_mem_stmts++;
3486 /* Notice that we only update memory reference stats for symbols
3487 loaded and stored by the statement if the statement does not
3488 contain pointer dereferences and it is not a call/asm site.
3489 This is to avoid double accounting problems when creating
3490 memory partitions. After computing points-to information,
3491 pointer dereference statistics are used to update the
3492 reference stats of the pointed-to variables, so here we
3493 should only update direct references to symbols.
3495 Indirect references are not updated here for two reasons: (1)
3496 The first time we compute alias information, the sets
3497 LOADED/STORED are empty for pointer dereferences, (2) After
3498 partitioning, LOADED/STORED may have references to
3499 partitions, not the original pointed-to variables. So, if we
3500 always counted LOADED/STORED here and during partitioning, we
3501 would count many symbols more than once.
3503 This does cause some imprecision when a statement has a
3504 combination of direct symbol references and pointer
3505 dereferences (e.g., MEMORY_VAR = *PTR) or if a call site has
3506 memory symbols in its argument list, but these cases do not
3507 occur so frequently as to constitute a serious problem. */
3508 if (STORED_SYMS (stmt))
3509 EXECUTE_IF_SET_IN_BITMAP (STORED_SYMS (stmt), 0, i, bi)
3511 tree sym = referenced_var (i);
3512 pointer_set_insert (ai->written_vars, sym);
3513 if (!stmt_dereferences_ptr_p
3514 && stmt_escape_type != ESCAPE_TO_CALL
3515 && stmt_escape_type != ESCAPE_TO_PURE_CONST
3516 && stmt_escape_type != ESCAPE_TO_ASM)
3517 update_mem_sym_stats_from_stmt (sym, stmt, 0, 1);
3520 if (!stmt_dereferences_ptr_p
3521 && LOADED_SYMS (stmt)
3522 && stmt_escape_type != ESCAPE_TO_CALL
3523 && stmt_escape_type != ESCAPE_TO_PURE_CONST
3524 && stmt_escape_type != ESCAPE_TO_ASM)
3525 EXECUTE_IF_SET_IN_BITMAP (LOADED_SYMS (stmt), 0, i, bi)
3526 update_mem_sym_stats_from_stmt (referenced_var (i), stmt, 1, 0);
3531 /* Handle pointer arithmetic EXPR when creating aliasing constraints.
3532 Expressions of the type PTR + CST can be handled in two ways:
3534 1- If the constraint for PTR is ADDRESSOF for a non-structure
3535 variable, then we can use it directly because adding or
3536 subtracting a constant may not alter the original ADDRESSOF
3537 constraint (i.e., pointer arithmetic may not legally go outside
3538 an object's boundaries).
3540 2- If the constraint for PTR is ADDRESSOF for a structure variable,
3541 then if CST is a compile-time constant that can be used as an
3542 offset, we can determine which sub-variable will be pointed-to
3543 by the expression.
3545 Return true if the expression is handled. For any other kind of
3546 expression, return false so that each operand can be added as a
3547 separate constraint by the caller. */
3549 static bool
3550 handle_ptr_arith (VEC (ce_s, heap) *lhsc, tree expr)
3552 tree op0, op1;
3553 struct constraint_expr *c, *c2;
3554 unsigned int i = 0;
3555 unsigned int j = 0;
3556 VEC (ce_s, heap) *temp = NULL;
3557 unsigned int rhsoffset = 0;
3558 bool unknown_addend = false;
3560 if (TREE_CODE (expr) != POINTER_PLUS_EXPR)
3561 return false;
3563 op0 = TREE_OPERAND (expr, 0);
3564 op1 = TREE_OPERAND (expr, 1);
3565 gcc_assert (POINTER_TYPE_P (TREE_TYPE (op0)));
3567 get_constraint_for (op0, &temp);
3569 /* Handle non-constants by making constraints from integer. */
3570 if (TREE_CODE (op1) == INTEGER_CST)
3571 rhsoffset = TREE_INT_CST_LOW (op1) * BITS_PER_UNIT;
3572 else
3573 unknown_addend = true;
3575 for (i = 0; VEC_iterate (ce_s, lhsc, i, c); i++)
3576 for (j = 0; VEC_iterate (ce_s, temp, j, c2); j++)
3578 if (c2->type == ADDRESSOF && rhsoffset != 0)
3580 varinfo_t temp = get_varinfo (c2->var);
3582 /* An access one after the end of an array is valid,
3583 so simply punt on accesses we cannot resolve. */
3584 temp = first_vi_for_offset (temp, rhsoffset);
3585 if (temp == NULL)
3586 continue;
3587 c2->var = temp->id;
3588 c2->offset = 0;
3590 else if (unknown_addend)
3592 /* Can't handle *a + integer where integer is unknown. */
3593 if (c2->type != SCALAR)
3595 struct constraint_expr intc;
3596 intc.var = integer_id;
3597 intc.offset = 0;
3598 intc.type = SCALAR;
3599 process_constraint (new_constraint (*c, intc));
3601 else
3603 /* We known it lives somewhere within c2->var. */
3604 varinfo_t tmp = get_varinfo (c2->var);
3605 for (; tmp; tmp = tmp->next)
3607 struct constraint_expr tmpc = *c2;
3608 c2->var = tmp->id;
3609 c2->offset = 0;
3610 process_constraint (new_constraint (*c, tmpc));
3614 else
3615 c2->offset = rhsoffset;
3616 process_constraint (new_constraint (*c, *c2));
3619 VEC_free (ce_s, heap, temp);
3621 return true;
3624 /* For non-IPA mode, generate constraints necessary for a call on the
3625 RHS. */
3627 static void
3628 handle_rhs_call (tree rhs)
3630 tree arg;
3631 call_expr_arg_iterator iter;
3632 struct constraint_expr rhsc;
3634 rhsc.var = anything_id;
3635 rhsc.offset = 0;
3636 rhsc.type = ADDRESSOF;
3638 FOR_EACH_CALL_EXPR_ARG (arg, iter, rhs)
3640 VEC(ce_s, heap) *lhsc = NULL;
3642 /* Find those pointers being passed, and make sure they end up
3643 pointing to anything. */
3644 if (POINTER_TYPE_P (TREE_TYPE (arg)))
3646 unsigned int j;
3647 struct constraint_expr *lhsp;
3649 get_constraint_for (arg, &lhsc);
3650 do_deref (&lhsc);
3651 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3652 process_constraint_1 (new_constraint (*lhsp, rhsc), true);
3653 VEC_free (ce_s, heap, lhsc);
3658 /* Walk statement T setting up aliasing constraints according to the
3659 references found in T. This function is the main part of the
3660 constraint builder. AI points to auxiliary alias information used
3661 when building alias sets and computing alias grouping heuristics. */
3663 static void
3664 find_func_aliases (tree origt)
3666 tree t = origt;
3667 VEC(ce_s, heap) *lhsc = NULL;
3668 VEC(ce_s, heap) *rhsc = NULL;
3669 struct constraint_expr *c;
3671 if (TREE_CODE (t) == RETURN_EXPR && TREE_OPERAND (t, 0))
3672 t = TREE_OPERAND (t, 0);
3674 /* Now build constraints expressions. */
3675 if (TREE_CODE (t) == PHI_NODE)
3677 gcc_assert (!AGGREGATE_TYPE_P (TREE_TYPE (PHI_RESULT (t))));
3679 /* Only care about pointers and structures containing
3680 pointers. */
3681 if (could_have_pointers (PHI_RESULT (t)))
3683 int i;
3684 unsigned int j;
3686 /* For a phi node, assign all the arguments to
3687 the result. */
3688 get_constraint_for (PHI_RESULT (t), &lhsc);
3689 for (i = 0; i < PHI_NUM_ARGS (t); i++)
3691 tree rhstype;
3692 tree strippedrhs = PHI_ARG_DEF (t, i);
3694 STRIP_NOPS (strippedrhs);
3695 rhstype = TREE_TYPE (strippedrhs);
3696 get_constraint_for (PHI_ARG_DEF (t, i), &rhsc);
3698 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3700 struct constraint_expr *c2;
3701 while (VEC_length (ce_s, rhsc) > 0)
3703 c2 = VEC_last (ce_s, rhsc);
3704 process_constraint (new_constraint (*c, *c2));
3705 VEC_pop (ce_s, rhsc);
3711 /* In IPA mode, we need to generate constraints to pass call
3712 arguments through their calls. There are two cases, either a
3713 GIMPLE_MODIFY_STMT when we are returning a value, or just a plain
3714 CALL_EXPR when we are not.
3716 In non-ipa mode, we need to generate constraints for each
3717 pointer passed by address. */
3718 else if (((TREE_CODE (t) == GIMPLE_MODIFY_STMT
3719 && TREE_CODE (GIMPLE_STMT_OPERAND (t, 1)) == CALL_EXPR
3720 && !(call_expr_flags (GIMPLE_STMT_OPERAND (t, 1))
3721 & (ECF_MALLOC | ECF_MAY_BE_ALLOCA)))
3722 || (TREE_CODE (t) == CALL_EXPR
3723 && !(call_expr_flags (t)
3724 & (ECF_MALLOC | ECF_MAY_BE_ALLOCA)))))
3726 if (!in_ipa_mode)
3728 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
3729 handle_rhs_call (GIMPLE_STMT_OPERAND (t, 1));
3730 else
3731 handle_rhs_call (t);
3733 else
3735 tree lhsop;
3736 tree rhsop;
3737 tree arg;
3738 call_expr_arg_iterator iter;
3739 varinfo_t fi;
3740 int i = 1;
3741 tree decl;
3742 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
3744 lhsop = GIMPLE_STMT_OPERAND (t, 0);
3745 rhsop = GIMPLE_STMT_OPERAND (t, 1);
3747 else
3749 lhsop = NULL;
3750 rhsop = t;
3752 decl = get_callee_fndecl (rhsop);
3754 /* If we can directly resolve the function being called, do so.
3755 Otherwise, it must be some sort of indirect expression that
3756 we should still be able to handle. */
3757 if (decl)
3759 fi = get_vi_for_tree (decl);
3761 else
3763 decl = CALL_EXPR_FN (rhsop);
3764 fi = get_vi_for_tree (decl);
3767 /* Assign all the passed arguments to the appropriate incoming
3768 parameters of the function. */
3770 FOR_EACH_CALL_EXPR_ARG (arg, iter, rhsop)
3772 struct constraint_expr lhs ;
3773 struct constraint_expr *rhsp;
3775 get_constraint_for (arg, &rhsc);
3776 if (TREE_CODE (decl) != FUNCTION_DECL)
3778 lhs.type = DEREF;
3779 lhs.var = fi->id;
3780 lhs.offset = i;
3782 else
3784 lhs.type = SCALAR;
3785 lhs.var = first_vi_for_offset (fi, i)->id;
3786 lhs.offset = 0;
3788 while (VEC_length (ce_s, rhsc) != 0)
3790 rhsp = VEC_last (ce_s, rhsc);
3791 process_constraint (new_constraint (lhs, *rhsp));
3792 VEC_pop (ce_s, rhsc);
3794 i++;
3797 /* If we are returning a value, assign it to the result. */
3798 if (lhsop)
3800 struct constraint_expr rhs;
3801 struct constraint_expr *lhsp;
3802 unsigned int j = 0;
3804 get_constraint_for (lhsop, &lhsc);
3805 if (TREE_CODE (decl) != FUNCTION_DECL)
3807 rhs.type = DEREF;
3808 rhs.var = fi->id;
3809 rhs.offset = i;
3811 else
3813 rhs.type = SCALAR;
3814 rhs.var = first_vi_for_offset (fi, i)->id;
3815 rhs.offset = 0;
3817 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3818 process_constraint (new_constraint (*lhsp, rhs));
3822 /* Otherwise, just a regular assignment statement. */
3823 else if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
3825 tree lhsop = GIMPLE_STMT_OPERAND (t, 0);
3826 tree rhsop = GIMPLE_STMT_OPERAND (t, 1);
3827 int i;
3829 if ((AGGREGATE_TYPE_P (TREE_TYPE (lhsop))
3830 || TREE_CODE (TREE_TYPE (lhsop)) == COMPLEX_TYPE)
3831 && (AGGREGATE_TYPE_P (TREE_TYPE (rhsop))
3832 || TREE_CODE (TREE_TYPE (lhsop)) == COMPLEX_TYPE))
3834 do_structure_copy (lhsop, rhsop);
3836 else
3838 /* Only care about operations with pointers, structures
3839 containing pointers, dereferences, and call expressions. */
3840 if (could_have_pointers (lhsop)
3841 || TREE_CODE (rhsop) == CALL_EXPR)
3843 get_constraint_for (lhsop, &lhsc);
3844 switch (TREE_CODE_CLASS (TREE_CODE (rhsop)))
3846 /* RHS that consist of unary operations,
3847 exceptional types, or bare decls/constants, get
3848 handled directly by get_constraint_for. */
3849 case tcc_reference:
3850 case tcc_declaration:
3851 case tcc_constant:
3852 case tcc_exceptional:
3853 case tcc_expression:
3854 case tcc_vl_exp:
3855 case tcc_unary:
3857 unsigned int j;
3859 get_constraint_for (rhsop, &rhsc);
3860 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3862 struct constraint_expr *c2;
3863 unsigned int k;
3865 for (k = 0; VEC_iterate (ce_s, rhsc, k, c2); k++)
3866 process_constraint (new_constraint (*c, *c2));
3870 break;
3872 case tcc_binary:
3874 /* For pointer arithmetic of the form
3875 PTR + CST, we can simply use PTR's
3876 constraint because pointer arithmetic is
3877 not allowed to go out of bounds. */
3878 if (handle_ptr_arith (lhsc, rhsop))
3879 break;
3881 /* FALLTHRU */
3883 /* Otherwise, walk each operand. Notice that we
3884 can't use the operand interface because we need
3885 to process expressions other than simple operands
3886 (e.g. INDIRECT_REF, ADDR_EXPR, CALL_EXPR). */
3887 default:
3888 for (i = 0; i < TREE_OPERAND_LENGTH (rhsop); i++)
3890 tree op = TREE_OPERAND (rhsop, i);
3891 unsigned int j;
3893 gcc_assert (VEC_length (ce_s, rhsc) == 0);
3894 get_constraint_for (op, &rhsc);
3895 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3897 struct constraint_expr *c2;
3898 while (VEC_length (ce_s, rhsc) > 0)
3900 c2 = VEC_last (ce_s, rhsc);
3901 process_constraint (new_constraint (*c, *c2));
3902 VEC_pop (ce_s, rhsc);
3910 else if (TREE_CODE (t) == CHANGE_DYNAMIC_TYPE_EXPR)
3912 unsigned int j;
3914 get_constraint_for (CHANGE_DYNAMIC_TYPE_LOCATION (t), &lhsc);
3915 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); ++j)
3916 get_varinfo (c->var)->no_tbaa_pruning = true;
3919 /* After promoting variables and computing aliasing we will
3920 need to re-scan most statements. FIXME: Try to minimize the
3921 number of statements re-scanned. It's not really necessary to
3922 re-scan *all* statements. */
3923 mark_stmt_modified (origt);
3924 VEC_free (ce_s, heap, rhsc);
3925 VEC_free (ce_s, heap, lhsc);
3929 /* Find the first varinfo in the same variable as START that overlaps with
3930 OFFSET.
3931 Effectively, walk the chain of fields for the variable START to find the
3932 first field that overlaps with OFFSET.
3933 Return NULL if we can't find one. */
3935 static varinfo_t
3936 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
3938 varinfo_t curr = start;
3939 while (curr)
3941 /* We may not find a variable in the field list with the actual
3942 offset when when we have glommed a structure to a variable.
3943 In that case, however, offset should still be within the size
3944 of the variable. */
3945 if (offset >= curr->offset && offset < (curr->offset + curr->size))
3946 return curr;
3947 curr = curr->next;
3949 return NULL;
3953 /* Insert the varinfo FIELD into the field list for BASE, at the front
3954 of the list. */
3956 static void
3957 insert_into_field_list (varinfo_t base, varinfo_t field)
3959 varinfo_t prev = base;
3960 varinfo_t curr = base->next;
3962 field->next = curr;
3963 prev->next = field;
3966 /* Insert the varinfo FIELD into the field list for BASE, ordered by
3967 offset. */
3969 static void
3970 insert_into_field_list_sorted (varinfo_t base, varinfo_t field)
3972 varinfo_t prev = base;
3973 varinfo_t curr = base->next;
3975 if (curr == NULL)
3977 prev->next = field;
3978 field->next = NULL;
3980 else
3982 while (curr)
3984 if (field->offset <= curr->offset)
3985 break;
3986 prev = curr;
3987 curr = curr->next;
3989 field->next = prev->next;
3990 prev->next = field;
3994 /* qsort comparison function for two fieldoff's PA and PB */
3996 static int
3997 fieldoff_compare (const void *pa, const void *pb)
3999 const fieldoff_s *foa = (const fieldoff_s *)pa;
4000 const fieldoff_s *fob = (const fieldoff_s *)pb;
4001 HOST_WIDE_INT foasize, fobsize;
4003 if (foa->offset != fob->offset)
4004 return foa->offset - fob->offset;
4006 foasize = TREE_INT_CST_LOW (foa->size);
4007 fobsize = TREE_INT_CST_LOW (fob->size);
4008 return foasize - fobsize;
4011 /* Sort a fieldstack according to the field offset and sizes. */
4012 void
4013 sort_fieldstack (VEC(fieldoff_s,heap) *fieldstack)
4015 qsort (VEC_address (fieldoff_s, fieldstack),
4016 VEC_length (fieldoff_s, fieldstack),
4017 sizeof (fieldoff_s),
4018 fieldoff_compare);
4021 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all the fields
4022 of TYPE onto fieldstack, recording their offsets along the way.
4023 OFFSET is used to keep track of the offset in this entire structure, rather
4024 than just the immediately containing structure. Returns the number
4025 of fields pushed.
4026 HAS_UNION is set to true if we find a union type as a field of
4027 TYPE. ADDRESSABLE_TYPE is the type of the outermost object that could have
4028 its address taken. */
4031 push_fields_onto_fieldstack (tree type, VEC(fieldoff_s,heap) **fieldstack,
4032 HOST_WIDE_INT offset, bool *has_union,
4033 tree addressable_type)
4035 tree field;
4036 int count = 0;
4038 if (TREE_CODE (type) == COMPLEX_TYPE)
4040 fieldoff_s *real_part, *img_part;
4041 real_part = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
4042 real_part->type = TREE_TYPE (type);
4043 real_part->size = TYPE_SIZE (TREE_TYPE (type));
4044 real_part->offset = offset;
4045 real_part->decl = NULL_TREE;
4046 real_part->alias_set = -1;
4048 img_part = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
4049 img_part->type = TREE_TYPE (type);
4050 img_part->size = TYPE_SIZE (TREE_TYPE (type));
4051 img_part->offset = offset + TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (type)));
4052 img_part->decl = NULL_TREE;
4053 img_part->alias_set = -1;
4055 return 2;
4058 if (TREE_CODE (type) == ARRAY_TYPE)
4060 tree sz = TYPE_SIZE (type);
4061 tree elsz = TYPE_SIZE (TREE_TYPE (type));
4062 HOST_WIDE_INT nr;
4063 int i;
4065 if (! sz
4066 || ! host_integerp (sz, 1)
4067 || TREE_INT_CST_LOW (sz) == 0
4068 || ! elsz
4069 || ! host_integerp (elsz, 1)
4070 || TREE_INT_CST_LOW (elsz) == 0)
4071 return 0;
4073 nr = TREE_INT_CST_LOW (sz) / TREE_INT_CST_LOW (elsz);
4074 if (nr > SALIAS_MAX_ARRAY_ELEMENTS)
4075 return 0;
4077 for (i = 0; i < nr; ++i)
4079 bool push = false;
4080 int pushed = 0;
4082 if (has_union
4083 && (TREE_CODE (TREE_TYPE (type)) == QUAL_UNION_TYPE
4084 || TREE_CODE (TREE_TYPE (type)) == UNION_TYPE))
4085 *has_union = true;
4087 if (!AGGREGATE_TYPE_P (TREE_TYPE (type))) /* var_can_have_subvars */
4088 push = true;
4089 else if (!(pushed = push_fields_onto_fieldstack
4090 (TREE_TYPE (type), fieldstack,
4091 offset + i * TREE_INT_CST_LOW (elsz), has_union,
4092 TREE_TYPE (type))))
4093 /* Empty structures may have actual size, like in C++. So
4094 see if we didn't push any subfields and the size is
4095 nonzero, push the field onto the stack */
4096 push = true;
4098 if (push)
4100 fieldoff_s *pair;
4102 pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
4103 pair->type = TREE_TYPE (type);
4104 pair->size = elsz;
4105 pair->decl = NULL_TREE;
4106 pair->offset = offset + i * TREE_INT_CST_LOW (elsz);
4107 pair->alias_set = -1;
4108 count++;
4110 else
4111 count += pushed;
4114 return count;
4117 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
4118 if (TREE_CODE (field) == FIELD_DECL)
4120 bool push = false;
4121 int pushed = 0;
4123 if (has_union
4124 && (TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE
4125 || TREE_CODE (TREE_TYPE (field)) == UNION_TYPE))
4126 *has_union = true;
4128 if (!var_can_have_subvars (field))
4129 push = true;
4130 else if (!(pushed = push_fields_onto_fieldstack
4131 (TREE_TYPE (field), fieldstack,
4132 offset + bitpos_of_field (field), has_union,
4133 (DECL_NONADDRESSABLE_P (field)
4134 ? addressable_type
4135 : TREE_TYPE (field))))
4136 && DECL_SIZE (field)
4137 && !integer_zerop (DECL_SIZE (field)))
4138 /* Empty structures may have actual size, like in C++. So
4139 see if we didn't push any subfields and the size is
4140 nonzero, push the field onto the stack */
4141 push = true;
4143 if (push)
4145 fieldoff_s *pair;
4147 pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
4148 pair->type = TREE_TYPE (field);
4149 pair->size = DECL_SIZE (field);
4150 pair->decl = field;
4151 pair->offset = offset + bitpos_of_field (field);
4152 if (DECL_NONADDRESSABLE_P (field))
4153 pair->alias_set = get_alias_set (addressable_type);
4154 else
4155 pair->alias_set = -1;
4156 count++;
4158 else
4159 count += pushed;
4162 return count;
4165 /* Create a constraint from ANYTHING variable to VI. */
4166 static void
4167 make_constraint_from_anything (varinfo_t vi)
4169 struct constraint_expr lhs, rhs;
4171 lhs.var = vi->id;
4172 lhs.offset = 0;
4173 lhs.type = SCALAR;
4175 rhs.var = anything_id;
4176 rhs.offset = 0;
4177 rhs.type = ADDRESSOF;
4178 process_constraint (new_constraint (lhs, rhs));
4181 /* Count the number of arguments DECL has, and set IS_VARARGS to true
4182 if it is a varargs function. */
4184 static unsigned int
4185 count_num_arguments (tree decl, bool *is_varargs)
4187 unsigned int i = 0;
4188 tree t;
4190 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl));
4192 t = TREE_CHAIN (t))
4194 if (TREE_VALUE (t) == void_type_node)
4195 break;
4196 i++;
4199 if (!t)
4200 *is_varargs = true;
4201 return i;
4204 /* Creation function node for DECL, using NAME, and return the index
4205 of the variable we've created for the function. */
4207 static unsigned int
4208 create_function_info_for (tree decl, const char *name)
4210 unsigned int index = VEC_length (varinfo_t, varmap);
4211 varinfo_t vi;
4212 tree arg;
4213 unsigned int i;
4214 bool is_varargs = false;
4216 /* Create the variable info. */
4218 vi = new_var_info (decl, index, name);
4219 vi->decl = decl;
4220 vi->offset = 0;
4221 vi->has_union = 0;
4222 vi->size = 1;
4223 vi->fullsize = count_num_arguments (decl, &is_varargs) + 1;
4224 insert_vi_for_tree (vi->decl, vi);
4225 VEC_safe_push (varinfo_t, heap, varmap, vi);
4227 stats.total_vars++;
4229 /* If it's varargs, we don't know how many arguments it has, so we
4230 can't do much.
4232 if (is_varargs)
4234 vi->fullsize = ~0;
4235 vi->size = ~0;
4236 vi->is_unknown_size_var = true;
4237 return index;
4241 arg = DECL_ARGUMENTS (decl);
4243 /* Set up variables for each argument. */
4244 for (i = 1; i < vi->fullsize; i++)
4246 varinfo_t argvi;
4247 const char *newname;
4248 char *tempname;
4249 unsigned int newindex;
4250 tree argdecl = decl;
4252 if (arg)
4253 argdecl = arg;
4255 newindex = VEC_length (varinfo_t, varmap);
4256 asprintf (&tempname, "%s.arg%d", name, i-1);
4257 newname = ggc_strdup (tempname);
4258 free (tempname);
4260 argvi = new_var_info (argdecl, newindex, newname);
4261 argvi->decl = argdecl;
4262 VEC_safe_push (varinfo_t, heap, varmap, argvi);
4263 argvi->offset = i;
4264 argvi->size = 1;
4265 argvi->fullsize = vi->fullsize;
4266 argvi->has_union = false;
4267 insert_into_field_list_sorted (vi, argvi);
4268 stats.total_vars ++;
4269 if (arg)
4271 insert_vi_for_tree (arg, argvi);
4272 arg = TREE_CHAIN (arg);
4276 /* Create a variable for the return var. */
4277 if (DECL_RESULT (decl) != NULL
4278 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
4280 varinfo_t resultvi;
4281 const char *newname;
4282 char *tempname;
4283 unsigned int newindex;
4284 tree resultdecl = decl;
4286 vi->fullsize ++;
4288 if (DECL_RESULT (decl))
4289 resultdecl = DECL_RESULT (decl);
4291 newindex = VEC_length (varinfo_t, varmap);
4292 asprintf (&tempname, "%s.result", name);
4293 newname = ggc_strdup (tempname);
4294 free (tempname);
4296 resultvi = new_var_info (resultdecl, newindex, newname);
4297 resultvi->decl = resultdecl;
4298 VEC_safe_push (varinfo_t, heap, varmap, resultvi);
4299 resultvi->offset = i;
4300 resultvi->size = 1;
4301 resultvi->fullsize = vi->fullsize;
4302 resultvi->has_union = false;
4303 insert_into_field_list_sorted (vi, resultvi);
4304 stats.total_vars ++;
4305 if (DECL_RESULT (decl))
4306 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
4308 return index;
4312 /* Return true if FIELDSTACK contains fields that overlap.
4313 FIELDSTACK is assumed to be sorted by offset. */
4315 static bool
4316 check_for_overlaps (VEC (fieldoff_s,heap) *fieldstack)
4318 fieldoff_s *fo = NULL;
4319 unsigned int i;
4320 HOST_WIDE_INT lastoffset = -1;
4322 for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
4324 if (fo->offset == lastoffset)
4325 return true;
4326 lastoffset = fo->offset;
4328 return false;
4331 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
4332 This will also create any varinfo structures necessary for fields
4333 of DECL. */
4335 static unsigned int
4336 create_variable_info_for (tree decl, const char *name)
4338 unsigned int index = VEC_length (varinfo_t, varmap);
4339 varinfo_t vi;
4340 tree decltype = TREE_TYPE (decl);
4341 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decltype);
4342 bool notokay = false;
4343 bool hasunion;
4344 bool is_global = DECL_P (decl) ? is_global_var (decl) : false;
4345 VEC (fieldoff_s,heap) *fieldstack = NULL;
4347 if (TREE_CODE (decl) == FUNCTION_DECL && in_ipa_mode)
4348 return create_function_info_for (decl, name);
4350 hasunion = TREE_CODE (decltype) == UNION_TYPE
4351 || TREE_CODE (decltype) == QUAL_UNION_TYPE;
4352 if (var_can_have_subvars (decl) && use_field_sensitive && !hasunion)
4354 push_fields_onto_fieldstack (decltype, &fieldstack, 0, &hasunion,
4355 decltype);
4356 if (hasunion)
4358 VEC_free (fieldoff_s, heap, fieldstack);
4359 notokay = true;
4364 /* If the variable doesn't have subvars, we may end up needing to
4365 sort the field list and create fake variables for all the
4366 fields. */
4367 vi = new_var_info (decl, index, name);
4368 vi->decl = decl;
4369 vi->offset = 0;
4370 vi->has_union = hasunion;
4371 if (!declsize
4372 || TREE_CODE (declsize) != INTEGER_CST
4373 || TREE_CODE (decltype) == UNION_TYPE
4374 || TREE_CODE (decltype) == QUAL_UNION_TYPE)
4376 vi->is_unknown_size_var = true;
4377 vi->fullsize = ~0;
4378 vi->size = ~0;
4380 else
4382 vi->fullsize = TREE_INT_CST_LOW (declsize);
4383 vi->size = vi->fullsize;
4386 insert_vi_for_tree (vi->decl, vi);
4387 VEC_safe_push (varinfo_t, heap, varmap, vi);
4388 if (is_global && (!flag_whole_program || !in_ipa_mode))
4389 make_constraint_from_anything (vi);
4391 stats.total_vars++;
4392 if (use_field_sensitive
4393 && !notokay
4394 && !vi->is_unknown_size_var
4395 && var_can_have_subvars (decl)
4396 && VEC_length (fieldoff_s, fieldstack) <= MAX_FIELDS_FOR_FIELD_SENSITIVE)
4398 unsigned int newindex = VEC_length (varinfo_t, varmap);
4399 fieldoff_s *fo = NULL;
4400 unsigned int i;
4402 for (i = 0; !notokay && VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
4404 if (! fo->size
4405 || TREE_CODE (fo->size) != INTEGER_CST
4406 || fo->offset < 0)
4408 notokay = true;
4409 break;
4413 /* We can't sort them if we have a field with a variable sized type,
4414 which will make notokay = true. In that case, we are going to return
4415 without creating varinfos for the fields anyway, so sorting them is a
4416 waste to boot. */
4417 if (!notokay)
4419 sort_fieldstack (fieldstack);
4420 /* Due to some C++ FE issues, like PR 22488, we might end up
4421 what appear to be overlapping fields even though they,
4422 in reality, do not overlap. Until the C++ FE is fixed,
4423 we will simply disable field-sensitivity for these cases. */
4424 notokay = check_for_overlaps (fieldstack);
4428 if (VEC_length (fieldoff_s, fieldstack) != 0)
4429 fo = VEC_index (fieldoff_s, fieldstack, 0);
4431 if (fo == NULL || notokay)
4433 vi->is_unknown_size_var = 1;
4434 vi->fullsize = ~0;
4435 vi->size = ~0;
4436 VEC_free (fieldoff_s, heap, fieldstack);
4437 return index;
4440 vi->size = TREE_INT_CST_LOW (fo->size);
4441 vi->offset = fo->offset;
4442 for (i = VEC_length (fieldoff_s, fieldstack) - 1;
4443 i >= 1 && VEC_iterate (fieldoff_s, fieldstack, i, fo);
4444 i--)
4446 varinfo_t newvi;
4447 const char *newname = "NULL";
4448 char *tempname;
4450 newindex = VEC_length (varinfo_t, varmap);
4451 if (dump_file)
4453 if (fo->decl)
4454 asprintf (&tempname, "%s.%s",
4455 vi->name, alias_get_name (fo->decl));
4456 else
4457 asprintf (&tempname, "%s." HOST_WIDE_INT_PRINT_DEC,
4458 vi->name, fo->offset);
4459 newname = ggc_strdup (tempname);
4460 free (tempname);
4462 newvi = new_var_info (decl, newindex, newname);
4463 newvi->offset = fo->offset;
4464 newvi->size = TREE_INT_CST_LOW (fo->size);
4465 newvi->fullsize = vi->fullsize;
4466 insert_into_field_list (vi, newvi);
4467 VEC_safe_push (varinfo_t, heap, varmap, newvi);
4468 if (is_global && (!flag_whole_program || !in_ipa_mode))
4469 make_constraint_from_anything (newvi);
4471 stats.total_vars++;
4473 VEC_free (fieldoff_s, heap, fieldstack);
4475 return index;
4478 /* Print out the points-to solution for VAR to FILE. */
4480 void
4481 dump_solution_for_var (FILE *file, unsigned int var)
4483 varinfo_t vi = get_varinfo (var);
4484 unsigned int i;
4485 bitmap_iterator bi;
4487 if (find (var) != var)
4489 varinfo_t vipt = get_varinfo (find (var));
4490 fprintf (file, "%s = same as %s\n", vi->name, vipt->name);
4492 else
4494 fprintf (file, "%s = { ", vi->name);
4495 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4497 fprintf (file, "%s ", get_varinfo (i)->name);
4499 fprintf (file, "}");
4500 if (vi->no_tbaa_pruning)
4501 fprintf (file, " no-tbaa-pruning");
4502 fprintf (file, "\n");
4506 /* Print the points-to solution for VAR to stdout. */
4508 void
4509 debug_solution_for_var (unsigned int var)
4511 dump_solution_for_var (stdout, var);
4514 /* Create varinfo structures for all of the variables in the
4515 function for intraprocedural mode. */
4517 static void
4518 intra_create_variable_infos (void)
4520 tree t;
4521 struct constraint_expr lhs, rhs;
4523 /* For each incoming pointer argument arg, create the constraint ARG
4524 = ANYTHING or a dummy variable if flag_argument_noalias is set. */
4525 for (t = DECL_ARGUMENTS (current_function_decl); t; t = TREE_CHAIN (t))
4527 varinfo_t p;
4529 if (!could_have_pointers (t))
4530 continue;
4532 /* If flag_argument_noalias is set, then function pointer
4533 arguments are guaranteed not to point to each other. In that
4534 case, create an artificial variable PARM_NOALIAS and the
4535 constraint ARG = &PARM_NOALIAS. */
4536 if (POINTER_TYPE_P (TREE_TYPE (t)) && flag_argument_noalias > 0)
4538 varinfo_t vi;
4539 tree heapvar = heapvar_lookup (t);
4541 lhs.offset = 0;
4542 lhs.type = SCALAR;
4543 lhs.var = get_vi_for_tree (t)->id;
4545 if (heapvar == NULL_TREE)
4547 var_ann_t ann;
4548 heapvar = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (t)),
4549 "PARM_NOALIAS");
4550 DECL_EXTERNAL (heapvar) = 1;
4551 if (gimple_referenced_vars (cfun))
4552 add_referenced_var (heapvar);
4554 heapvar_insert (t, heapvar);
4556 ann = get_var_ann (heapvar);
4557 if (flag_argument_noalias == 1)
4558 ann->noalias_state = NO_ALIAS;
4559 else if (flag_argument_noalias == 2)
4560 ann->noalias_state = NO_ALIAS_GLOBAL;
4561 else if (flag_argument_noalias == 3)
4562 ann->noalias_state = NO_ALIAS_ANYTHING;
4563 else
4564 gcc_unreachable ();
4567 vi = get_vi_for_tree (heapvar);
4568 vi->is_artificial_var = 1;
4569 vi->is_heap_var = 1;
4570 rhs.var = vi->id;
4571 rhs.type = ADDRESSOF;
4572 rhs.offset = 0;
4573 for (p = get_varinfo (lhs.var); p; p = p->next)
4575 struct constraint_expr temp = lhs;
4576 temp.var = p->id;
4577 process_constraint (new_constraint (temp, rhs));
4580 else
4582 varinfo_t arg_vi = get_vi_for_tree (t);
4584 for (p = arg_vi; p; p = p->next)
4585 make_constraint_from_anything (p);
4590 /* Structure used to put solution bitmaps in a hashtable so they can
4591 be shared among variables with the same points-to set. */
4593 typedef struct shared_bitmap_info
4595 bitmap pt_vars;
4596 hashval_t hashcode;
4597 } *shared_bitmap_info_t;
4598 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
4600 static htab_t shared_bitmap_table;
4602 /* Hash function for a shared_bitmap_info_t */
4604 static hashval_t
4605 shared_bitmap_hash (const void *p)
4607 const_shared_bitmap_info_t const bi = (const_shared_bitmap_info_t) p;
4608 return bi->hashcode;
4611 /* Equality function for two shared_bitmap_info_t's. */
4613 static int
4614 shared_bitmap_eq (const void *p1, const void *p2)
4616 const_shared_bitmap_info_t const sbi1 = (const_shared_bitmap_info_t) p1;
4617 const_shared_bitmap_info_t const sbi2 = (const_shared_bitmap_info_t) p2;
4618 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
4621 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
4622 existing instance if there is one, NULL otherwise. */
4624 static bitmap
4625 shared_bitmap_lookup (bitmap pt_vars)
4627 void **slot;
4628 struct shared_bitmap_info sbi;
4630 sbi.pt_vars = pt_vars;
4631 sbi.hashcode = bitmap_hash (pt_vars);
4633 slot = htab_find_slot_with_hash (shared_bitmap_table, &sbi,
4634 sbi.hashcode, NO_INSERT);
4635 if (!slot)
4636 return NULL;
4637 else
4638 return ((shared_bitmap_info_t) *slot)->pt_vars;
4642 /* Add a bitmap to the shared bitmap hashtable. */
4644 static void
4645 shared_bitmap_add (bitmap pt_vars)
4647 void **slot;
4648 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
4650 sbi->pt_vars = pt_vars;
4651 sbi->hashcode = bitmap_hash (pt_vars);
4653 slot = htab_find_slot_with_hash (shared_bitmap_table, sbi,
4654 sbi->hashcode, INSERT);
4655 gcc_assert (!*slot);
4656 *slot = (void *) sbi;
4660 /* Set bits in INTO corresponding to the variable uids in solution set
4661 FROM, which came from variable PTR.
4662 For variables that are actually dereferenced, we also use type
4663 based alias analysis to prune the points-to sets.
4664 IS_DEREFED is true if PTR was directly dereferenced, which we use to
4665 help determine whether we are we are allowed to prune using TBAA.
4666 If NO_TBAA_PRUNING is true, we do not perform any TBAA pruning of
4667 the from set. */
4669 static void
4670 set_uids_in_ptset (tree ptr, bitmap into, bitmap from, bool is_derefed,
4671 bool no_tbaa_pruning)
4673 unsigned int i;
4674 bitmap_iterator bi;
4675 subvar_t sv;
4676 alias_set_type ptr_alias_set = get_alias_set (TREE_TYPE (ptr));
4678 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
4680 varinfo_t vi = get_varinfo (i);
4681 alias_set_type var_alias_set;
4683 /* The only artificial variables that are allowed in a may-alias
4684 set are heap variables. */
4685 if (vi->is_artificial_var && !vi->is_heap_var)
4686 continue;
4688 if (vi->has_union && get_subvars_for_var (vi->decl) != NULL)
4690 /* Variables containing unions may need to be converted to
4691 their SFT's, because SFT's can have unions and we cannot. */
4692 for (sv = get_subvars_for_var (vi->decl); sv; sv = sv->next)
4693 bitmap_set_bit (into, DECL_UID (sv->var));
4695 else if (TREE_CODE (vi->decl) == VAR_DECL
4696 || TREE_CODE (vi->decl) == PARM_DECL
4697 || TREE_CODE (vi->decl) == RESULT_DECL)
4699 if (var_can_have_subvars (vi->decl)
4700 && get_subvars_for_var (vi->decl))
4702 /* If VI->DECL is an aggregate for which we created
4703 SFTs, add the SFT corresponding to VI->OFFSET. */
4704 tree sft = get_subvar_at (vi->decl, vi->offset);
4705 gcc_assert (sft);
4706 if (sft)
4708 var_alias_set = get_alias_set (sft);
4709 if (no_tbaa_pruning
4710 || (!is_derefed && !vi->directly_dereferenced)
4711 || alias_sets_conflict_p (ptr_alias_set, var_alias_set))
4712 bitmap_set_bit (into, DECL_UID (sft));
4715 else
4717 /* Otherwise, just add VI->DECL to the alias set.
4718 Don't type prune artificial vars. */
4719 if (vi->is_artificial_var)
4720 bitmap_set_bit (into, DECL_UID (vi->decl));
4721 else
4723 var_alias_set = get_alias_set (vi->decl);
4724 if (no_tbaa_pruning
4725 || (!is_derefed && !vi->directly_dereferenced)
4726 || alias_sets_conflict_p (ptr_alias_set, var_alias_set))
4727 bitmap_set_bit (into, DECL_UID (vi->decl));
4735 static bool have_alias_info = false;
4737 /* The list of SMT's that are in use by our pointer variables. This
4738 is the set of SMT's for all pointers that can point to anything. */
4739 static bitmap used_smts;
4741 /* Due to the ordering of points-to set calculation and SMT
4742 calculation being a bit co-dependent, we can't just calculate SMT
4743 used info whenever we want, we have to calculate it around the time
4744 that find_what_p_points_to is called. */
4746 /* Mark which SMT's are in use by points-to anything variables. */
4748 void
4749 set_used_smts (void)
4751 int i;
4752 varinfo_t vi;
4753 used_smts = BITMAP_ALLOC (&pta_obstack);
4755 for (i = 0; VEC_iterate (varinfo_t, varmap, i, vi); i++)
4757 tree var = vi->decl;
4758 varinfo_t withsolution = get_varinfo (find (i));
4759 tree smt;
4760 var_ann_t va;
4761 struct ptr_info_def *pi = NULL;
4763 /* For parm decls, the pointer info may be under the default
4764 def. */
4765 if (TREE_CODE (vi->decl) == PARM_DECL
4766 && gimple_default_def (cfun, var))
4767 pi = SSA_NAME_PTR_INFO (gimple_default_def (cfun, var));
4768 else if (TREE_CODE (var) == SSA_NAME)
4769 pi = SSA_NAME_PTR_INFO (var);
4771 /* Skip the special variables and those that can't be aliased. */
4772 if (vi->is_special_var
4773 || !SSA_VAR_P (var)
4774 || (pi && !pi->is_dereferenced)
4775 || (TREE_CODE (var) == VAR_DECL && !may_be_aliased (var))
4776 || !POINTER_TYPE_P (TREE_TYPE (var)))
4777 continue;
4779 if (TREE_CODE (var) == SSA_NAME)
4780 var = SSA_NAME_VAR (var);
4782 va = var_ann (var);
4783 if (!va)
4784 continue;
4786 smt = va->symbol_mem_tag;
4787 if (smt && bitmap_bit_p (withsolution->solution, anything_id))
4788 bitmap_set_bit (used_smts, DECL_UID (smt));
4792 /* Merge the necessary SMT's into the bitmap INTO, which is
4793 P's varinfo. This involves merging all SMT's that are a subset of
4794 the SMT necessary for P. */
4796 static void
4797 merge_smts_into (tree p, bitmap solution)
4799 unsigned int i;
4800 bitmap_iterator bi;
4801 tree smt;
4802 bitmap aliases;
4803 tree var = p;
4805 if (TREE_CODE (p) == SSA_NAME)
4806 var = SSA_NAME_VAR (p);
4808 smt = var_ann (var)->symbol_mem_tag;
4809 if (smt)
4811 alias_set_type smtset = get_alias_set (TREE_TYPE (smt));
4813 /* Need to set the SMT subsets first before this
4814 will work properly. */
4815 bitmap_set_bit (solution, DECL_UID (smt));
4816 EXECUTE_IF_SET_IN_BITMAP (used_smts, 0, i, bi)
4818 tree newsmt = referenced_var (i);
4819 tree newsmttype = TREE_TYPE (newsmt);
4821 if (alias_set_subset_of (get_alias_set (newsmttype),
4822 smtset))
4823 bitmap_set_bit (solution, i);
4826 aliases = MTAG_ALIASES (smt);
4827 if (aliases)
4828 bitmap_ior_into (solution, aliases);
4832 /* Given a pointer variable P, fill in its points-to set, or return
4833 false if we can't.
4834 Rather than return false for variables that point-to anything, we
4835 instead find the corresponding SMT, and merge in its aliases. In
4836 addition to these aliases, we also set the bits for the SMT's
4837 themselves and their subsets, as SMT's are still in use by
4838 non-SSA_NAME's, and pruning may eliminate every one of their
4839 aliases. In such a case, if we did not include the right set of
4840 SMT's in the points-to set of the variable, we'd end up with
4841 statements that do not conflict but should. */
4843 bool
4844 find_what_p_points_to (tree p)
4846 tree lookup_p = p;
4847 varinfo_t vi;
4849 if (!have_alias_info)
4850 return false;
4852 /* For parameters, get at the points-to set for the actual parm
4853 decl. */
4854 if (TREE_CODE (p) == SSA_NAME
4855 && TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
4856 && SSA_NAME_IS_DEFAULT_DEF (p))
4857 lookup_p = SSA_NAME_VAR (p);
4859 vi = lookup_vi_for_tree (lookup_p);
4860 if (vi)
4862 if (vi->is_artificial_var)
4863 return false;
4865 /* See if this is a field or a structure. */
4866 if (vi->size != vi->fullsize)
4868 /* Nothing currently asks about structure fields directly,
4869 but when they do, we need code here to hand back the
4870 points-to set. */
4871 if (!var_can_have_subvars (vi->decl)
4872 || get_subvars_for_var (vi->decl) == NULL)
4873 return false;
4875 else
4877 struct ptr_info_def *pi = get_ptr_info (p);
4878 unsigned int i;
4879 bitmap_iterator bi;
4880 bool was_pt_anything = false;
4881 bitmap finished_solution;
4882 bitmap result;
4884 if (!pi->is_dereferenced)
4885 return false;
4887 /* This variable may have been collapsed, let's get the real
4888 variable. */
4889 vi = get_varinfo (find (vi->id));
4891 /* Translate artificial variables into SSA_NAME_PTR_INFO
4892 attributes. */
4893 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4895 varinfo_t vi = get_varinfo (i);
4897 if (vi->is_artificial_var)
4899 /* FIXME. READONLY should be handled better so that
4900 flow insensitive aliasing can disregard writable
4901 aliases. */
4902 if (vi->id == nothing_id)
4903 pi->pt_null = 1;
4904 else if (vi->id == anything_id)
4905 was_pt_anything = 1;
4906 else if (vi->id == readonly_id)
4907 was_pt_anything = 1;
4908 else if (vi->id == integer_id)
4909 was_pt_anything = 1;
4910 else if (vi->is_heap_var)
4911 pi->pt_global_mem = 1;
4915 /* Share the final set of variables when possible. */
4917 finished_solution = BITMAP_GGC_ALLOC ();
4918 stats.points_to_sets_created++;
4920 /* Instead of using pt_anything, we merge in the SMT aliases
4921 for the underlying SMT. In addition, if they could have
4922 pointed to anything, they could point to global memory.
4923 But we cannot do that for ref-all pointers because these
4924 aliases have not been computed yet. */
4925 if (was_pt_anything)
4927 if (PTR_IS_REF_ALL (p))
4929 pi->pt_anything = 1;
4930 return false;
4933 merge_smts_into (p, finished_solution);
4934 pi->pt_global_mem = 1;
4937 set_uids_in_ptset (vi->decl, finished_solution, vi->solution,
4938 vi->directly_dereferenced,
4939 vi->no_tbaa_pruning);
4940 result = shared_bitmap_lookup (finished_solution);
4942 if (!result)
4944 shared_bitmap_add (finished_solution);
4945 pi->pt_vars = finished_solution;
4947 else
4949 pi->pt_vars = result;
4950 bitmap_clear (finished_solution);
4953 if (bitmap_empty_p (pi->pt_vars))
4954 pi->pt_vars = NULL;
4956 return true;
4960 return false;
4965 /* Dump points-to information to OUTFILE. */
4967 void
4968 dump_sa_points_to_info (FILE *outfile)
4970 unsigned int i;
4972 fprintf (outfile, "\nPoints-to sets\n\n");
4974 if (dump_flags & TDF_STATS)
4976 fprintf (outfile, "Stats:\n");
4977 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
4978 fprintf (outfile, "Non-pointer vars: %d\n",
4979 stats.nonpointer_vars);
4980 fprintf (outfile, "Statically unified vars: %d\n",
4981 stats.unified_vars_static);
4982 fprintf (outfile, "Dynamically unified vars: %d\n",
4983 stats.unified_vars_dynamic);
4984 fprintf (outfile, "Iterations: %d\n", stats.iterations);
4985 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
4986 fprintf (outfile, "Number of implicit edges: %d\n",
4987 stats.num_implicit_edges);
4990 for (i = 0; i < VEC_length (varinfo_t, varmap); i++)
4991 dump_solution_for_var (outfile, i);
4995 /* Debug points-to information to stderr. */
4997 void
4998 debug_sa_points_to_info (void)
5000 dump_sa_points_to_info (stderr);
5004 /* Initialize the always-existing constraint variables for NULL
5005 ANYTHING, READONLY, and INTEGER */
5007 static void
5008 init_base_vars (void)
5010 struct constraint_expr lhs, rhs;
5012 /* Create the NULL variable, used to represent that a variable points
5013 to NULL. */
5014 nothing_tree = create_tmp_var_raw (void_type_node, "NULL");
5015 var_nothing = new_var_info (nothing_tree, 0, "NULL");
5016 insert_vi_for_tree (nothing_tree, var_nothing);
5017 var_nothing->is_artificial_var = 1;
5018 var_nothing->offset = 0;
5019 var_nothing->size = ~0;
5020 var_nothing->fullsize = ~0;
5021 var_nothing->is_special_var = 1;
5022 nothing_id = 0;
5023 VEC_safe_push (varinfo_t, heap, varmap, var_nothing);
5025 /* Create the ANYTHING variable, used to represent that a variable
5026 points to some unknown piece of memory. */
5027 anything_tree = create_tmp_var_raw (void_type_node, "ANYTHING");
5028 var_anything = new_var_info (anything_tree, 1, "ANYTHING");
5029 insert_vi_for_tree (anything_tree, var_anything);
5030 var_anything->is_artificial_var = 1;
5031 var_anything->size = ~0;
5032 var_anything->offset = 0;
5033 var_anything->next = NULL;
5034 var_anything->fullsize = ~0;
5035 var_anything->is_special_var = 1;
5036 anything_id = 1;
5038 /* Anything points to anything. This makes deref constraints just
5039 work in the presence of linked list and other p = *p type loops,
5040 by saying that *ANYTHING = ANYTHING. */
5041 VEC_safe_push (varinfo_t, heap, varmap, var_anything);
5042 lhs.type = SCALAR;
5043 lhs.var = anything_id;
5044 lhs.offset = 0;
5045 rhs.type = ADDRESSOF;
5046 rhs.var = anything_id;
5047 rhs.offset = 0;
5049 /* This specifically does not use process_constraint because
5050 process_constraint ignores all anything = anything constraints, since all
5051 but this one are redundant. */
5052 VEC_safe_push (constraint_t, heap, constraints, new_constraint (lhs, rhs));
5054 /* Create the READONLY variable, used to represent that a variable
5055 points to readonly memory. */
5056 readonly_tree = create_tmp_var_raw (void_type_node, "READONLY");
5057 var_readonly = new_var_info (readonly_tree, 2, "READONLY");
5058 var_readonly->is_artificial_var = 1;
5059 var_readonly->offset = 0;
5060 var_readonly->size = ~0;
5061 var_readonly->fullsize = ~0;
5062 var_readonly->next = NULL;
5063 var_readonly->is_special_var = 1;
5064 insert_vi_for_tree (readonly_tree, var_readonly);
5065 readonly_id = 2;
5066 VEC_safe_push (varinfo_t, heap, varmap, var_readonly);
5068 /* readonly memory points to anything, in order to make deref
5069 easier. In reality, it points to anything the particular
5070 readonly variable can point to, but we don't track this
5071 separately. */
5072 lhs.type = SCALAR;
5073 lhs.var = readonly_id;
5074 lhs.offset = 0;
5075 rhs.type = ADDRESSOF;
5076 rhs.var = anything_id;
5077 rhs.offset = 0;
5079 process_constraint (new_constraint (lhs, rhs));
5081 /* Create the INTEGER variable, used to represent that a variable points
5082 to an INTEGER. */
5083 integer_tree = create_tmp_var_raw (void_type_node, "INTEGER");
5084 var_integer = new_var_info (integer_tree, 3, "INTEGER");
5085 insert_vi_for_tree (integer_tree, var_integer);
5086 var_integer->is_artificial_var = 1;
5087 var_integer->size = ~0;
5088 var_integer->fullsize = ~0;
5089 var_integer->offset = 0;
5090 var_integer->next = NULL;
5091 var_integer->is_special_var = 1;
5092 integer_id = 3;
5093 VEC_safe_push (varinfo_t, heap, varmap, var_integer);
5095 /* INTEGER = ANYTHING, because we don't know where a dereference of
5096 a random integer will point to. */
5097 lhs.type = SCALAR;
5098 lhs.var = integer_id;
5099 lhs.offset = 0;
5100 rhs.type = ADDRESSOF;
5101 rhs.var = anything_id;
5102 rhs.offset = 0;
5103 process_constraint (new_constraint (lhs, rhs));
5106 /* Initialize things necessary to perform PTA */
5108 static void
5109 init_alias_vars (void)
5111 bitmap_obstack_initialize (&pta_obstack);
5112 bitmap_obstack_initialize (&oldpta_obstack);
5113 bitmap_obstack_initialize (&predbitmap_obstack);
5115 constraint_pool = create_alloc_pool ("Constraint pool",
5116 sizeof (struct constraint), 30);
5117 variable_info_pool = create_alloc_pool ("Variable info pool",
5118 sizeof (struct variable_info), 30);
5119 constraints = VEC_alloc (constraint_t, heap, 8);
5120 varmap = VEC_alloc (varinfo_t, heap, 8);
5121 vi_for_tree = pointer_map_create ();
5123 memset (&stats, 0, sizeof (stats));
5124 shared_bitmap_table = htab_create (511, shared_bitmap_hash,
5125 shared_bitmap_eq, free);
5126 init_base_vars ();
5129 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
5130 predecessor edges. */
5132 static void
5133 remove_preds_and_fake_succs (constraint_graph_t graph)
5135 unsigned int i;
5137 /* Clear the implicit ref and address nodes from the successor
5138 lists. */
5139 for (i = 0; i < FIRST_REF_NODE; i++)
5141 if (graph->succs[i])
5142 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
5143 FIRST_REF_NODE * 2);
5146 /* Free the successor list for the non-ref nodes. */
5147 for (i = FIRST_REF_NODE; i < graph->size; i++)
5149 if (graph->succs[i])
5150 BITMAP_FREE (graph->succs[i]);
5153 /* Now reallocate the size of the successor list as, and blow away
5154 the predecessor bitmaps. */
5155 graph->size = VEC_length (varinfo_t, varmap);
5156 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
5158 free (graph->implicit_preds);
5159 graph->implicit_preds = NULL;
5160 free (graph->preds);
5161 graph->preds = NULL;
5162 bitmap_obstack_release (&predbitmap_obstack);
5165 /* Compute the set of variables we can't TBAA prune. */
5167 static void
5168 compute_tbaa_pruning (void)
5170 unsigned int size = VEC_length (varinfo_t, varmap);
5171 unsigned int i;
5172 bool any;
5174 changed_count = 0;
5175 changed = sbitmap_alloc (size);
5176 sbitmap_zero (changed);
5178 /* Mark all initial no_tbaa_pruning nodes as changed. */
5179 any = false;
5180 for (i = 0; i < size; ++i)
5182 varinfo_t ivi = get_varinfo (i);
5184 if (find (i) == i && ivi->no_tbaa_pruning)
5186 any = true;
5187 if ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
5188 || VEC_length (constraint_t, graph->complex[i]) > 0)
5190 SET_BIT (changed, i);
5191 ++changed_count;
5196 while (changed_count > 0)
5198 struct topo_info *ti = init_topo_info ();
5199 ++stats.iterations;
5201 compute_topo_order (graph, ti);
5203 while (VEC_length (unsigned, ti->topo_order) != 0)
5205 bitmap_iterator bi;
5207 i = VEC_pop (unsigned, ti->topo_order);
5209 /* If this variable is not a representative, skip it. */
5210 if (find (i) != i)
5211 continue;
5213 /* If the node has changed, we need to process the complex
5214 constraints and outgoing edges again. */
5215 if (TEST_BIT (changed, i))
5217 unsigned int j;
5218 constraint_t c;
5219 VEC(constraint_t,heap) *complex = graph->complex[i];
5221 RESET_BIT (changed, i);
5222 --changed_count;
5224 /* Process the complex copy constraints. */
5225 for (j = 0; VEC_iterate (constraint_t, complex, j, c); ++j)
5227 if (c->lhs.type == SCALAR && c->rhs.type == SCALAR)
5229 varinfo_t lhsvi = get_varinfo (find (c->lhs.var));
5231 if (!lhsvi->no_tbaa_pruning)
5233 lhsvi->no_tbaa_pruning = true;
5234 if (!TEST_BIT (changed, lhsvi->id))
5236 SET_BIT (changed, lhsvi->id);
5237 ++changed_count;
5243 /* Propagate to all successors. */
5244 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
5246 unsigned int to = find (j);
5247 varinfo_t tovi = get_varinfo (to);
5249 /* Don't propagate to ourselves. */
5250 if (to == i)
5251 continue;
5253 if (!tovi->no_tbaa_pruning)
5255 tovi->no_tbaa_pruning = true;
5256 if (!TEST_BIT (changed, to))
5258 SET_BIT (changed, to);
5259 ++changed_count;
5266 free_topo_info (ti);
5269 sbitmap_free (changed);
5271 if (any)
5273 for (i = 0; i < size; ++i)
5275 varinfo_t ivi = get_varinfo (i);
5276 varinfo_t ivip = get_varinfo (find (i));
5278 if (ivip->no_tbaa_pruning)
5280 tree var = ivi->decl;
5282 if (TREE_CODE (var) == SSA_NAME)
5283 var = SSA_NAME_VAR (var);
5285 if (POINTER_TYPE_P (TREE_TYPE (var)))
5287 DECL_NO_TBAA_P (var) = 1;
5289 /* Tell the RTL layer that this pointer can alias
5290 anything. */
5291 DECL_POINTER_ALIAS_SET (var) = 0;
5298 /* Create points-to sets for the current function. See the comments
5299 at the start of the file for an algorithmic overview. */
5301 void
5302 compute_points_to_sets (struct alias_info *ai)
5304 struct scc_info *si;
5305 basic_block bb;
5307 timevar_push (TV_TREE_PTA);
5309 init_alias_vars ();
5310 init_alias_heapvars ();
5312 intra_create_variable_infos ();
5314 /* Now walk all statements and derive aliases. */
5315 FOR_EACH_BB (bb)
5317 block_stmt_iterator bsi;
5318 tree phi;
5320 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
5322 if (is_gimple_reg (PHI_RESULT (phi)))
5324 find_func_aliases (phi);
5326 /* Update various related attributes like escaped
5327 addresses, pointer dereferences for loads and stores.
5328 This is used when creating name tags and alias
5329 sets. */
5330 update_alias_info (phi, ai);
5334 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
5336 tree stmt = bsi_stmt (bsi);
5338 find_func_aliases (stmt);
5340 /* Update various related attributes like escaped
5341 addresses, pointer dereferences for loads and stores.
5342 This is used when creating name tags and alias
5343 sets. */
5344 update_alias_info (stmt, ai);
5346 /* The information in CHANGE_DYNAMIC_TYPE_EXPR nodes has now
5347 been captured, and we can remove them. */
5348 if (TREE_CODE (stmt) == CHANGE_DYNAMIC_TYPE_EXPR)
5349 bsi_remove (&bsi, true);
5350 else
5351 bsi_next (&bsi);
5356 if (dump_file)
5358 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
5359 dump_constraints (dump_file);
5362 if (dump_file)
5363 fprintf (dump_file,
5364 "\nCollapsing static cycles and doing variable "
5365 "substitution\n");
5367 init_graph (VEC_length (varinfo_t, varmap) * 2);
5369 if (dump_file)
5370 fprintf (dump_file, "Building predecessor graph\n");
5371 build_pred_graph ();
5373 if (dump_file)
5374 fprintf (dump_file, "Detecting pointer and location "
5375 "equivalences\n");
5376 si = perform_var_substitution (graph);
5378 if (dump_file)
5379 fprintf (dump_file, "Rewriting constraints and unifying "
5380 "variables\n");
5381 rewrite_constraints (graph, si);
5382 free_var_substitution_info (si);
5384 build_succ_graph ();
5385 move_complex_constraints (graph);
5387 if (dump_file)
5388 fprintf (dump_file, "Uniting pointer but not location equivalent "
5389 "variables\n");
5390 unite_pointer_equivalences (graph);
5392 if (dump_file)
5393 fprintf (dump_file, "Finding indirect cycles\n");
5394 find_indirect_cycles (graph);
5396 /* Implicit nodes and predecessors are no longer necessary at this
5397 point. */
5398 remove_preds_and_fake_succs (graph);
5400 if (dump_file)
5401 fprintf (dump_file, "Solving graph\n");
5403 solve_graph (graph);
5405 compute_tbaa_pruning ();
5407 if (dump_file)
5408 dump_sa_points_to_info (dump_file);
5410 have_alias_info = true;
5412 timevar_pop (TV_TREE_PTA);
5416 /* Delete created points-to sets. */
5418 void
5419 delete_points_to_sets (void)
5421 unsigned int i;
5423 htab_delete (shared_bitmap_table);
5424 if (dump_file && (dump_flags & TDF_STATS))
5425 fprintf (dump_file, "Points to sets created:%d\n",
5426 stats.points_to_sets_created);
5428 pointer_map_destroy (vi_for_tree);
5429 bitmap_obstack_release (&pta_obstack);
5430 VEC_free (constraint_t, heap, constraints);
5432 for (i = 0; i < graph->size; i++)
5433 VEC_free (constraint_t, heap, graph->complex[i]);
5434 free (graph->complex);
5436 free (graph->rep);
5437 free (graph->succs);
5438 free (graph->pe);
5439 free (graph->pe_rep);
5440 free (graph->indirect_cycles);
5441 free (graph);
5443 VEC_free (varinfo_t, heap, varmap);
5444 free_alloc_pool (variable_info_pool);
5445 free_alloc_pool (constraint_pool);
5446 have_alias_info = false;
5449 /* Return true if we should execute IPA PTA. */
5450 static bool
5451 gate_ipa_pta (void)
5453 return (flag_unit_at_a_time != 0
5454 && flag_ipa_pta
5455 /* Don't bother doing anything if the program has errors. */
5456 && !(errorcount || sorrycount));
5459 /* Execute the driver for IPA PTA. */
5460 static unsigned int
5461 ipa_pta_execute (void)
5463 struct cgraph_node *node;
5464 struct scc_info *si;
5466 in_ipa_mode = 1;
5467 init_alias_heapvars ();
5468 init_alias_vars ();
5470 for (node = cgraph_nodes; node; node = node->next)
5472 if (!node->analyzed || cgraph_is_master_clone (node))
5474 unsigned int varid;
5476 varid = create_function_info_for (node->decl,
5477 cgraph_node_name (node));
5478 if (node->local.externally_visible)
5480 varinfo_t fi = get_varinfo (varid);
5481 for (; fi; fi = fi->next)
5482 make_constraint_from_anything (fi);
5486 for (node = cgraph_nodes; node; node = node->next)
5488 if (node->analyzed && cgraph_is_master_clone (node))
5490 struct function *cfun = DECL_STRUCT_FUNCTION (node->decl);
5491 basic_block bb;
5492 tree old_func_decl = current_function_decl;
5493 if (dump_file)
5494 fprintf (dump_file,
5495 "Generating constraints for %s\n",
5496 cgraph_node_name (node));
5497 push_cfun (cfun);
5498 current_function_decl = node->decl;
5500 FOR_EACH_BB_FN (bb, cfun)
5502 block_stmt_iterator bsi;
5503 tree phi;
5505 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
5507 if (is_gimple_reg (PHI_RESULT (phi)))
5509 find_func_aliases (phi);
5513 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
5515 tree stmt = bsi_stmt (bsi);
5516 find_func_aliases (stmt);
5519 current_function_decl = old_func_decl;
5520 pop_cfun ();
5522 else
5524 /* Make point to anything. */
5528 if (dump_file)
5530 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
5531 dump_constraints (dump_file);
5534 if (dump_file)
5535 fprintf (dump_file,
5536 "\nCollapsing static cycles and doing variable "
5537 "substitution:\n");
5539 init_graph (VEC_length (varinfo_t, varmap) * 2);
5540 build_pred_graph ();
5541 si = perform_var_substitution (graph);
5542 rewrite_constraints (graph, si);
5543 free_var_substitution_info (si);
5545 build_succ_graph ();
5546 move_complex_constraints (graph);
5547 unite_pointer_equivalences (graph);
5548 find_indirect_cycles (graph);
5550 /* Implicit nodes and predecessors are no longer necessary at this
5551 point. */
5552 remove_preds_and_fake_succs (graph);
5554 if (dump_file)
5555 fprintf (dump_file, "\nSolving graph\n");
5557 solve_graph (graph);
5559 if (dump_file)
5560 dump_sa_points_to_info (dump_file);
5562 in_ipa_mode = 0;
5563 delete_alias_heapvars ();
5564 delete_points_to_sets ();
5565 return 0;
5568 struct tree_opt_pass pass_ipa_pta =
5570 "pta", /* name */
5571 gate_ipa_pta, /* gate */
5572 ipa_pta_execute, /* execute */
5573 NULL, /* sub */
5574 NULL, /* next */
5575 0, /* static_pass_number */
5576 TV_IPA_PTA, /* tv_id */
5577 0, /* properties_required */
5578 0, /* properties_provided */
5579 0, /* properties_destroyed */
5580 0, /* todo_flags_start */
5581 TODO_update_ssa, /* todo_flags_finish */
5582 0 /* letter */
5585 /* Initialize the heapvar for statement mapping. */
5586 void
5587 init_alias_heapvars (void)
5589 if (!heapvar_for_stmt)
5590 heapvar_for_stmt = htab_create_ggc (11, tree_map_hash, tree_map_eq,
5591 NULL);
5594 void
5595 delete_alias_heapvars (void)
5597 htab_delete (heapvar_for_stmt);
5598 heapvar_for_stmt = NULL;
5602 #include "gt-tree-ssa-structalias.h"