2008-06-04 Xinliang David Li <davidxl@google.com>
[official-gcc.git] / gcc / tree-ssa-structalias.c
blob570c173b3a949de24149a6d9d8178714458a0cb8
1 /* Tree based points-to analysis
2 Copyright (C) 2005, 2006, 2007 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "ggc.h"
26 #include "obstack.h"
27 #include "bitmap.h"
28 #include "flags.h"
29 #include "rtl.h"
30 #include "tm_p.h"
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
33 #include "output.h"
34 #include "errors.h"
35 #include "diagnostic.h"
36 #include "tree.h"
37 #include "c-common.h"
38 #include "tree-flow.h"
39 #include "tree-inline.h"
40 #include "varray.h"
41 #include "c-tree.h"
42 #include "tree-gimple.h"
43 #include "hashtab.h"
44 #include "function.h"
45 #include "cgraph.h"
46 #include "tree-pass.h"
47 #include "timevar.h"
48 #include "alloc-pool.h"
49 #include "splay-tree.h"
50 #include "params.h"
51 #include "tree-ssa-structalias.h"
52 #include "cgraph.h"
53 #include "alias.h"
54 #include "pointer-set.h"
56 /* The idea behind this analyzer is to generate set constraints from the
57 program, then solve the resulting constraints in order to generate the
58 points-to sets.
60 Set constraints are a way of modeling program analysis problems that
61 involve sets. They consist of an inclusion constraint language,
62 describing the variables (each variable is a set) and operations that
63 are involved on the variables, and a set of rules that derive facts
64 from these operations. To solve a system of set constraints, you derive
65 all possible facts under the rules, which gives you the correct sets
66 as a consequence.
68 See "Efficient Field-sensitive pointer analysis for C" by "David
69 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
70 http://citeseer.ist.psu.edu/pearce04efficient.html
72 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
73 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
74 http://citeseer.ist.psu.edu/heintze01ultrafast.html
76 There are three types of real constraint expressions, DEREF,
77 ADDRESSOF, and SCALAR. Each constraint expression consists
78 of a constraint type, a variable, and an offset.
80 SCALAR is a constraint expression type used to represent x, whether
81 it appears on the LHS or the RHS of a statement.
82 DEREF is a constraint expression type used to represent *x, whether
83 it appears on the LHS or the RHS of a statement.
84 ADDRESSOF is a constraint expression used to represent &x, whether
85 it appears on the LHS or the RHS of a statement.
87 Each pointer variable in the program is assigned an integer id, and
88 each field of a structure variable is assigned an integer id as well.
90 Structure variables are linked to their list of fields through a "next
91 field" in each variable that points to the next field in offset
92 order.
93 Each variable for a structure field has
95 1. "size", that tells the size in bits of that field.
96 2. "fullsize, that tells the size in bits of the entire structure.
97 3. "offset", that tells the offset in bits from the beginning of the
98 structure to this field.
100 Thus,
101 struct f
103 int a;
104 int b;
105 } foo;
106 int *bar;
108 looks like
110 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
111 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
112 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
115 In order to solve the system of set constraints, the following is
116 done:
118 1. Each constraint variable x has a solution set associated with it,
119 Sol(x).
121 2. Constraints are separated into direct, copy, and complex.
122 Direct constraints are ADDRESSOF constraints that require no extra
123 processing, such as P = &Q
124 Copy constraints are those of the form P = Q.
125 Complex constraints are all the constraints involving dereferences
126 and offsets (including offsetted copies).
128 3. All direct constraints of the form P = &Q are processed, such
129 that Q is added to Sol(P)
131 4. All complex constraints for a given constraint variable are stored in a
132 linked list attached to that variable's node.
134 5. A directed graph is built out of the copy constraints. Each
135 constraint variable is a node in the graph, and an edge from
136 Q to P is added for each copy constraint of the form P = Q
138 6. The graph is then walked, and solution sets are
139 propagated along the copy edges, such that an edge from Q to P
140 causes Sol(P) <- Sol(P) union Sol(Q).
142 7. As we visit each node, all complex constraints associated with
143 that node are processed by adding appropriate copy edges to the graph, or the
144 appropriate variables to the solution set.
146 8. The process of walking the graph is iterated until no solution
147 sets change.
149 Prior to walking the graph in steps 6 and 7, We perform static
150 cycle elimination on the constraint graph, as well
151 as off-line variable substitution.
153 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
154 on and turned into anything), but isn't. You can just see what offset
155 inside the pointed-to struct it's going to access.
157 TODO: Constant bounded arrays can be handled as if they were structs of the
158 same number of elements.
160 TODO: Modeling heap and incoming pointers becomes much better if we
161 add fields to them as we discover them, which we could do.
163 TODO: We could handle unions, but to be honest, it's probably not
164 worth the pain or slowdown. */
166 static GTY ((if_marked ("tree_map_marked_p"), param_is (struct tree_map)))
167 htab_t heapvar_for_stmt;
169 static bool use_field_sensitive = true;
170 static int in_ipa_mode = 0;
172 /* Used for predecessor bitmaps. */
173 static bitmap_obstack predbitmap_obstack;
175 /* Used for points-to sets. */
176 static bitmap_obstack pta_obstack;
178 /* Used for oldsolution members of variables. */
179 static bitmap_obstack oldpta_obstack;
181 /* Used for per-solver-iteration bitmaps. */
182 static bitmap_obstack iteration_obstack;
184 static unsigned int create_variable_info_for (tree, const char *);
185 typedef struct constraint_graph *constraint_graph_t;
186 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
188 DEF_VEC_P(constraint_t);
189 DEF_VEC_ALLOC_P(constraint_t,heap);
191 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
192 if (a) \
193 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
195 static struct constraint_stats
197 unsigned int total_vars;
198 unsigned int nonpointer_vars;
199 unsigned int unified_vars_static;
200 unsigned int unified_vars_dynamic;
201 unsigned int iterations;
202 unsigned int num_edges;
203 unsigned int num_implicit_edges;
204 unsigned int points_to_sets_created;
205 } stats;
207 struct variable_info
209 /* ID of this variable */
210 unsigned int id;
212 /* Name of this variable */
213 const char *name;
215 /* Tree that this variable is associated with. */
216 tree decl;
218 /* Offset of this variable, in bits, from the base variable */
219 unsigned HOST_WIDE_INT offset;
221 /* Size of the variable, in bits. */
222 unsigned HOST_WIDE_INT size;
224 /* Full size of the base variable, in bits. */
225 unsigned HOST_WIDE_INT fullsize;
227 /* A link to the variable for the next field in this structure. */
228 struct variable_info *next;
230 /* True if the variable is directly the target of a dereference.
231 This is used to track which variables are *actually* dereferenced
232 so we can prune their points to listed. */
233 unsigned int directly_dereferenced:1;
235 /* True if this is a variable created by the constraint analysis, such as
236 heap variables and constraints we had to break up. */
237 unsigned int is_artificial_var:1;
239 /* True if this is a special variable whose solution set should not be
240 changed. */
241 unsigned int is_special_var:1;
243 /* True for variables whose size is not known or variable. */
244 unsigned int is_unknown_size_var:1;
246 /* True for variables that have unions somewhere in them. */
247 unsigned int has_union:1;
249 /* True if this is a heap variable. */
250 unsigned int is_heap_var:1;
252 /* True if we may not use TBAA to prune references to this
253 variable. This is used for C++ placement new. */
254 unsigned int no_tbaa_pruning : 1;
256 /* Points-to set for this variable. */
257 bitmap solution;
259 /* Old points-to set for this variable. */
260 bitmap oldsolution;
262 /* Variable id this was collapsed to due to type unsafety. This
263 should be unused completely after build_succ_graph, or something
264 is broken. */
265 struct variable_info *collapsed_to;
267 typedef struct variable_info *varinfo_t;
269 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
271 /* Pool of variable info structures. */
272 static alloc_pool variable_info_pool;
274 DEF_VEC_P(varinfo_t);
276 DEF_VEC_ALLOC_P(varinfo_t, heap);
278 /* Table of variable info structures for constraint variables.
279 Indexed directly by variable info id. */
280 static VEC(varinfo_t,heap) *varmap;
282 /* Return the varmap element N */
284 static inline varinfo_t
285 get_varinfo (unsigned int n)
287 return VEC_index (varinfo_t, varmap, n);
290 /* Return the varmap element N, following the collapsed_to link. */
292 static inline varinfo_t
293 get_varinfo_fc (unsigned int n)
295 varinfo_t v = VEC_index (varinfo_t, varmap, n);
297 if (v->collapsed_to)
298 return v->collapsed_to;
299 return v;
302 /* Variable that represents the unknown pointer. */
303 static varinfo_t var_anything;
304 static tree anything_tree;
305 static unsigned int anything_id;
307 /* Variable that represents the NULL pointer. */
308 static varinfo_t var_nothing;
309 static tree nothing_tree;
310 static unsigned int nothing_id;
312 /* Variable that represents read only memory. */
313 static varinfo_t var_readonly;
314 static tree readonly_tree;
315 static unsigned int readonly_id;
317 /* Variable that represents integers. This is used for when people do things
318 like &0->a.b. */
319 static varinfo_t var_integer;
320 static tree integer_tree;
321 static unsigned int integer_id;
323 /* Lookup a heap var for FROM, and return it if we find one. */
325 static tree
326 heapvar_lookup (tree from)
328 struct tree_map *h, in;
329 in.base.from = from;
331 h = (struct tree_map *) htab_find_with_hash (heapvar_for_stmt, &in,
332 htab_hash_pointer (from));
333 if (h)
334 return h->to;
335 return NULL_TREE;
338 /* Insert a mapping FROM->TO in the heap var for statement
339 hashtable. */
341 static void
342 heapvar_insert (tree from, tree to)
344 struct tree_map *h;
345 void **loc;
347 h = GGC_NEW (struct tree_map);
348 h->hash = htab_hash_pointer (from);
349 h->base.from = from;
350 h->to = to;
351 loc = htab_find_slot_with_hash (heapvar_for_stmt, h, h->hash, INSERT);
352 *(struct tree_map **) loc = h;
355 /* Return a new variable info structure consisting for a variable
356 named NAME, and using constraint graph node NODE. */
358 static varinfo_t
359 new_var_info (tree t, unsigned int id, const char *name)
361 varinfo_t ret = (varinfo_t) pool_alloc (variable_info_pool);
362 tree var;
364 ret->id = id;
365 ret->name = name;
366 ret->decl = t;
367 ret->directly_dereferenced = false;
368 ret->is_artificial_var = false;
369 ret->is_heap_var = false;
370 ret->is_special_var = false;
371 ret->is_unknown_size_var = false;
372 ret->has_union = false;
373 var = t;
374 if (TREE_CODE (var) == SSA_NAME)
375 var = SSA_NAME_VAR (var);
376 ret->no_tbaa_pruning = (DECL_P (var)
377 && POINTER_TYPE_P (TREE_TYPE (var))
378 && DECL_NO_TBAA_P (var));
379 ret->solution = BITMAP_ALLOC (&pta_obstack);
380 ret->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
381 ret->next = NULL;
382 ret->collapsed_to = NULL;
383 return ret;
386 typedef enum {SCALAR, DEREF, ADDRESSOF} constraint_expr_type;
388 /* An expression that appears in a constraint. */
390 struct constraint_expr
392 /* Constraint type. */
393 constraint_expr_type type;
395 /* Variable we are referring to in the constraint. */
396 unsigned int var;
398 /* Offset, in bits, of this constraint from the beginning of
399 variables it ends up referring to.
401 IOW, in a deref constraint, we would deref, get the result set,
402 then add OFFSET to each member. */
403 unsigned HOST_WIDE_INT offset;
406 typedef struct constraint_expr ce_s;
407 DEF_VEC_O(ce_s);
408 DEF_VEC_ALLOC_O(ce_s, heap);
409 static void get_constraint_for (tree, VEC(ce_s, heap) **);
410 static void do_deref (VEC (ce_s, heap) **);
412 /* Our set constraints are made up of two constraint expressions, one
413 LHS, and one RHS.
415 As described in the introduction, our set constraints each represent an
416 operation between set valued variables.
418 struct constraint
420 struct constraint_expr lhs;
421 struct constraint_expr rhs;
424 /* List of constraints that we use to build the constraint graph from. */
426 static VEC(constraint_t,heap) *constraints;
427 static alloc_pool constraint_pool;
430 DEF_VEC_I(int);
431 DEF_VEC_ALLOC_I(int, heap);
433 /* The constraint graph is represented as an array of bitmaps
434 containing successor nodes. */
436 struct constraint_graph
438 /* Size of this graph, which may be different than the number of
439 nodes in the variable map. */
440 unsigned int size;
442 /* Explicit successors of each node. */
443 bitmap *succs;
445 /* Implicit predecessors of each node (Used for variable
446 substitution). */
447 bitmap *implicit_preds;
449 /* Explicit predecessors of each node (Used for variable substitution). */
450 bitmap *preds;
452 /* Indirect cycle representatives, or -1 if the node has no indirect
453 cycles. */
454 int *indirect_cycles;
456 /* Representative node for a node. rep[a] == a unless the node has
457 been unified. */
458 unsigned int *rep;
460 /* Equivalence class representative for a label. This is used for
461 variable substitution. */
462 int *eq_rep;
464 /* Pointer equivalence label for a node. All nodes with the same
465 pointer equivalence label can be unified together at some point
466 (either during constraint optimization or after the constraint
467 graph is built). */
468 unsigned int *pe;
470 /* Pointer equivalence representative for a label. This is used to
471 handle nodes that are pointer equivalent but not location
472 equivalent. We can unite these once the addressof constraints
473 are transformed into initial points-to sets. */
474 int *pe_rep;
476 /* Pointer equivalence label for each node, used during variable
477 substitution. */
478 unsigned int *pointer_label;
480 /* Location equivalence label for each node, used during location
481 equivalence finding. */
482 unsigned int *loc_label;
484 /* Pointed-by set for each node, used during location equivalence
485 finding. This is pointed-by rather than pointed-to, because it
486 is constructed using the predecessor graph. */
487 bitmap *pointed_by;
489 /* Points to sets for pointer equivalence. This is *not* the actual
490 points-to sets for nodes. */
491 bitmap *points_to;
493 /* Bitmap of nodes where the bit is set if the node is a direct
494 node. Used for variable substitution. */
495 sbitmap direct_nodes;
497 /* Bitmap of nodes where the bit is set if the node is address
498 taken. Used for variable substitution. */
499 bitmap address_taken;
501 /* True if points_to bitmap for this node is stored in the hash
502 table. */
503 sbitmap pt_used;
505 /* Number of incoming edges remaining to be processed by pointer
506 equivalence.
507 Used for variable substitution. */
508 unsigned int *number_incoming;
511 /* Vector of complex constraints for each graph node. Complex
512 constraints are those involving dereferences or offsets that are
513 not 0. */
514 VEC(constraint_t,heap) **complex;
517 static constraint_graph_t graph;
519 /* During variable substitution and the offline version of indirect
520 cycle finding, we create nodes to represent dereferences and
521 address taken constraints. These represent where these start and
522 end. */
523 #define FIRST_REF_NODE (VEC_length (varinfo_t, varmap))
524 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
526 /* Return the representative node for NODE, if NODE has been unioned
527 with another NODE.
528 This function performs path compression along the way to finding
529 the representative. */
531 static unsigned int
532 find (unsigned int node)
534 gcc_assert (node < graph->size);
535 if (graph->rep[node] != node)
536 return graph->rep[node] = find (graph->rep[node]);
537 return node;
540 /* Union the TO and FROM nodes to the TO nodes.
541 Note that at some point in the future, we may want to do
542 union-by-rank, in which case we are going to have to return the
543 node we unified to. */
545 static bool
546 unite (unsigned int to, unsigned int from)
548 gcc_assert (to < graph->size && from < graph->size);
549 if (to != from && graph->rep[from] != to)
551 graph->rep[from] = to;
552 return true;
554 return false;
557 /* Create a new constraint consisting of LHS and RHS expressions. */
559 static constraint_t
560 new_constraint (const struct constraint_expr lhs,
561 const struct constraint_expr rhs)
563 constraint_t ret = (constraint_t) pool_alloc (constraint_pool);
564 ret->lhs = lhs;
565 ret->rhs = rhs;
566 return ret;
569 /* Print out constraint C to FILE. */
571 void
572 dump_constraint (FILE *file, constraint_t c)
574 if (c->lhs.type == ADDRESSOF)
575 fprintf (file, "&");
576 else if (c->lhs.type == DEREF)
577 fprintf (file, "*");
578 fprintf (file, "%s", get_varinfo_fc (c->lhs.var)->name);
579 if (c->lhs.offset != 0)
580 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
581 fprintf (file, " = ");
582 if (c->rhs.type == ADDRESSOF)
583 fprintf (file, "&");
584 else if (c->rhs.type == DEREF)
585 fprintf (file, "*");
586 fprintf (file, "%s", get_varinfo_fc (c->rhs.var)->name);
587 if (c->rhs.offset != 0)
588 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
589 fprintf (file, "\n");
592 /* Print out constraint C to stderr. */
594 void
595 debug_constraint (constraint_t c)
597 dump_constraint (stderr, c);
600 /* Print out all constraints to FILE */
602 void
603 dump_constraints (FILE *file)
605 int i;
606 constraint_t c;
607 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
608 dump_constraint (file, c);
611 /* Print out all constraints to stderr. */
613 void
614 debug_constraints (void)
616 dump_constraints (stderr);
619 /* SOLVER FUNCTIONS
621 The solver is a simple worklist solver, that works on the following
622 algorithm:
624 sbitmap changed_nodes = all zeroes;
625 changed_count = 0;
626 For each node that is not already collapsed:
627 changed_count++;
628 set bit in changed nodes
630 while (changed_count > 0)
632 compute topological ordering for constraint graph
634 find and collapse cycles in the constraint graph (updating
635 changed if necessary)
637 for each node (n) in the graph in topological order:
638 changed_count--;
640 Process each complex constraint associated with the node,
641 updating changed if necessary.
643 For each outgoing edge from n, propagate the solution from n to
644 the destination of the edge, updating changed as necessary.
646 } */
648 /* Return true if two constraint expressions A and B are equal. */
650 static bool
651 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
653 return a.type == b.type && a.var == b.var && a.offset == b.offset;
656 /* Return true if constraint expression A is less than constraint expression
657 B. This is just arbitrary, but consistent, in order to give them an
658 ordering. */
660 static bool
661 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
663 if (a.type == b.type)
665 if (a.var == b.var)
666 return a.offset < b.offset;
667 else
668 return a.var < b.var;
670 else
671 return a.type < b.type;
674 /* Return true if constraint A is less than constraint B. This is just
675 arbitrary, but consistent, in order to give them an ordering. */
677 static bool
678 constraint_less (const constraint_t a, const constraint_t b)
680 if (constraint_expr_less (a->lhs, b->lhs))
681 return true;
682 else if (constraint_expr_less (b->lhs, a->lhs))
683 return false;
684 else
685 return constraint_expr_less (a->rhs, b->rhs);
688 /* Return true if two constraints A and B are equal. */
690 static bool
691 constraint_equal (struct constraint a, struct constraint b)
693 return constraint_expr_equal (a.lhs, b.lhs)
694 && constraint_expr_equal (a.rhs, b.rhs);
698 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
700 static constraint_t
701 constraint_vec_find (VEC(constraint_t,heap) *vec,
702 struct constraint lookfor)
704 unsigned int place;
705 constraint_t found;
707 if (vec == NULL)
708 return NULL;
710 place = VEC_lower_bound (constraint_t, vec, &lookfor, constraint_less);
711 if (place >= VEC_length (constraint_t, vec))
712 return NULL;
713 found = VEC_index (constraint_t, vec, place);
714 if (!constraint_equal (*found, lookfor))
715 return NULL;
716 return found;
719 /* Union two constraint vectors, TO and FROM. Put the result in TO. */
721 static void
722 constraint_set_union (VEC(constraint_t,heap) **to,
723 VEC(constraint_t,heap) **from)
725 int i;
726 constraint_t c;
728 for (i = 0; VEC_iterate (constraint_t, *from, i, c); i++)
730 if (constraint_vec_find (*to, *c) == NULL)
732 unsigned int place = VEC_lower_bound (constraint_t, *to, c,
733 constraint_less);
734 VEC_safe_insert (constraint_t, heap, *to, place, c);
739 /* Take a solution set SET, add OFFSET to each member of the set, and
740 overwrite SET with the result when done. */
742 static void
743 solution_set_add (bitmap set, unsigned HOST_WIDE_INT offset)
745 bitmap result = BITMAP_ALLOC (&iteration_obstack);
746 unsigned int i;
747 bitmap_iterator bi;
749 EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
751 /* If this is a properly sized variable, only add offset if it's
752 less than end. Otherwise, it is globbed to a single
753 variable. */
755 if ((get_varinfo (i)->offset + offset) < get_varinfo (i)->fullsize)
757 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (i)->offset + offset;
758 varinfo_t v = first_vi_for_offset (get_varinfo (i), fieldoffset);
759 if (!v)
760 continue;
761 bitmap_set_bit (result, v->id);
763 else if (get_varinfo (i)->is_artificial_var
764 || get_varinfo (i)->has_union
765 || get_varinfo (i)->is_unknown_size_var)
767 bitmap_set_bit (result, i);
771 bitmap_copy (set, result);
772 BITMAP_FREE (result);
775 /* Union solution sets TO and FROM, and add INC to each member of FROM in the
776 process. */
778 static bool
779 set_union_with_increment (bitmap to, bitmap from, unsigned HOST_WIDE_INT inc)
781 if (inc == 0)
782 return bitmap_ior_into (to, from);
783 else
785 bitmap tmp;
786 bool res;
788 tmp = BITMAP_ALLOC (&iteration_obstack);
789 bitmap_copy (tmp, from);
790 solution_set_add (tmp, inc);
791 res = bitmap_ior_into (to, tmp);
792 BITMAP_FREE (tmp);
793 return res;
797 /* Insert constraint C into the list of complex constraints for graph
798 node VAR. */
800 static void
801 insert_into_complex (constraint_graph_t graph,
802 unsigned int var, constraint_t c)
804 VEC (constraint_t, heap) *complex = graph->complex[var];
805 unsigned int place = VEC_lower_bound (constraint_t, complex, c,
806 constraint_less);
808 /* Only insert constraints that do not already exist. */
809 if (place >= VEC_length (constraint_t, complex)
810 || !constraint_equal (*c, *VEC_index (constraint_t, complex, place)))
811 VEC_safe_insert (constraint_t, heap, graph->complex[var], place, c);
815 /* Condense two variable nodes into a single variable node, by moving
816 all associated info from SRC to TO. */
818 static void
819 merge_node_constraints (constraint_graph_t graph, unsigned int to,
820 unsigned int from)
822 unsigned int i;
823 constraint_t c;
825 gcc_assert (find (from) == to);
827 /* Move all complex constraints from src node into to node */
828 for (i = 0; VEC_iterate (constraint_t, graph->complex[from], i, c); i++)
830 /* In complex constraints for node src, we may have either
831 a = *src, and *src = a, or an offseted constraint which are
832 always added to the rhs node's constraints. */
834 if (c->rhs.type == DEREF)
835 c->rhs.var = to;
836 else if (c->lhs.type == DEREF)
837 c->lhs.var = to;
838 else
839 c->rhs.var = to;
841 constraint_set_union (&graph->complex[to], &graph->complex[from]);
842 VEC_free (constraint_t, heap, graph->complex[from]);
843 graph->complex[from] = NULL;
847 /* Remove edges involving NODE from GRAPH. */
849 static void
850 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
852 if (graph->succs[node])
853 BITMAP_FREE (graph->succs[node]);
856 /* Merge GRAPH nodes FROM and TO into node TO. */
858 static void
859 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
860 unsigned int from)
862 if (graph->indirect_cycles[from] != -1)
864 /* If we have indirect cycles with the from node, and we have
865 none on the to node, the to node has indirect cycles from the
866 from node now that they are unified.
867 If indirect cycles exist on both, unify the nodes that they
868 are in a cycle with, since we know they are in a cycle with
869 each other. */
870 if (graph->indirect_cycles[to] == -1)
871 graph->indirect_cycles[to] = graph->indirect_cycles[from];
874 /* Merge all the successor edges. */
875 if (graph->succs[from])
877 if (!graph->succs[to])
878 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
879 bitmap_ior_into (graph->succs[to],
880 graph->succs[from]);
883 clear_edges_for_node (graph, from);
887 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
888 it doesn't exist in the graph already. */
890 static void
891 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
892 unsigned int from)
894 if (to == from)
895 return;
897 if (!graph->implicit_preds[to])
898 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
900 if (!bitmap_bit_p (graph->implicit_preds[to], from))
902 stats.num_implicit_edges++;
903 bitmap_set_bit (graph->implicit_preds[to], from);
907 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
908 it doesn't exist in the graph already.
909 Return false if the edge already existed, true otherwise. */
911 static void
912 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
913 unsigned int from)
915 if (!graph->preds[to])
916 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
917 if (!bitmap_bit_p (graph->preds[to], from))
918 bitmap_set_bit (graph->preds[to], from);
921 /* Add a graph edge to GRAPH, going from FROM to TO if
922 it doesn't exist in the graph already.
923 Return false if the edge already existed, true otherwise. */
925 static bool
926 add_graph_edge (constraint_graph_t graph, unsigned int to,
927 unsigned int from)
929 if (to == from)
931 return false;
933 else
935 bool r = false;
937 if (!graph->succs[from])
938 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
939 if (!bitmap_bit_p (graph->succs[from], to))
941 r = true;
942 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
943 stats.num_edges++;
944 bitmap_set_bit (graph->succs[from], to);
946 return r;
951 /* Return true if {DEST.SRC} is an existing graph edge in GRAPH. */
953 static bool
954 valid_graph_edge (constraint_graph_t graph, unsigned int src,
955 unsigned int dest)
957 return (graph->succs[dest]
958 && bitmap_bit_p (graph->succs[dest], src));
961 /* Initialize the constraint graph structure to contain SIZE nodes. */
963 static void
964 init_graph (unsigned int size)
966 unsigned int j;
968 graph = XCNEW (struct constraint_graph);
969 graph->size = size;
970 graph->succs = XCNEWVEC (bitmap, graph->size);
971 graph->indirect_cycles = XNEWVEC (int, graph->size);
972 graph->rep = XNEWVEC (unsigned int, graph->size);
973 graph->complex = XCNEWVEC (VEC(constraint_t, heap) *, size);
974 graph->pe = XCNEWVEC (unsigned int, graph->size);
975 graph->pe_rep = XNEWVEC (int, graph->size);
977 for (j = 0; j < graph->size; j++)
979 graph->rep[j] = j;
980 graph->pe_rep[j] = -1;
981 graph->indirect_cycles[j] = -1;
985 /* Build the constraint graph, adding only predecessor edges right now. */
987 static void
988 build_pred_graph (void)
990 int i;
991 constraint_t c;
992 unsigned int j;
994 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
995 graph->preds = XCNEWVEC (bitmap, graph->size);
996 graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
997 graph->loc_label = XCNEWVEC (unsigned int, graph->size);
998 graph->pointed_by = XCNEWVEC (bitmap, graph->size);
999 graph->points_to = XCNEWVEC (bitmap, graph->size);
1000 graph->eq_rep = XNEWVEC (int, graph->size);
1001 graph->direct_nodes = sbitmap_alloc (graph->size);
1002 graph->pt_used = sbitmap_alloc (graph->size);
1003 graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1004 graph->number_incoming = XCNEWVEC (unsigned int, graph->size);
1005 sbitmap_zero (graph->direct_nodes);
1006 sbitmap_zero (graph->pt_used);
1008 for (j = 0; j < FIRST_REF_NODE; j++)
1010 if (!get_varinfo (j)->is_special_var)
1011 SET_BIT (graph->direct_nodes, j);
1014 for (j = 0; j < graph->size; j++)
1015 graph->eq_rep[j] = -1;
1017 for (j = 0; j < VEC_length (varinfo_t, varmap); j++)
1018 graph->indirect_cycles[j] = -1;
1020 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1022 struct constraint_expr lhs = c->lhs;
1023 struct constraint_expr rhs = c->rhs;
1024 unsigned int lhsvar = get_varinfo_fc (lhs.var)->id;
1025 unsigned int rhsvar = get_varinfo_fc (rhs.var)->id;
1027 if (lhs.type == DEREF)
1029 /* *x = y. */
1030 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1031 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1033 else if (rhs.type == DEREF)
1035 /* x = *y */
1036 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1037 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1038 else
1039 RESET_BIT (graph->direct_nodes, lhsvar);
1041 else if (rhs.type == ADDRESSOF)
1043 /* x = &y */
1044 if (graph->points_to[lhsvar] == NULL)
1045 graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1046 bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1048 if (graph->pointed_by[rhsvar] == NULL)
1049 graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1050 bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1052 /* Implicitly, *x = y */
1053 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1055 RESET_BIT (graph->direct_nodes, rhsvar);
1056 bitmap_set_bit (graph->address_taken, rhsvar);
1058 else if (lhsvar > anything_id
1059 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1061 /* x = y */
1062 add_pred_graph_edge (graph, lhsvar, rhsvar);
1063 /* Implicitly, *x = *y */
1064 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1065 FIRST_REF_NODE + rhsvar);
1067 else if (lhs.offset != 0 || rhs.offset != 0)
1069 if (rhs.offset != 0)
1070 RESET_BIT (graph->direct_nodes, lhs.var);
1071 else if (lhs.offset != 0)
1072 RESET_BIT (graph->direct_nodes, rhs.var);
1077 /* Build the constraint graph, adding successor edges. */
1079 static void
1080 build_succ_graph (void)
1082 int i;
1083 constraint_t c;
1085 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1087 struct constraint_expr lhs;
1088 struct constraint_expr rhs;
1089 unsigned int lhsvar;
1090 unsigned int rhsvar;
1092 if (!c)
1093 continue;
1095 lhs = c->lhs;
1096 rhs = c->rhs;
1097 lhsvar = find (get_varinfo_fc (lhs.var)->id);
1098 rhsvar = find (get_varinfo_fc (rhs.var)->id);
1100 if (lhs.type == DEREF)
1102 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1103 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1105 else if (rhs.type == DEREF)
1107 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1108 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1110 else if (rhs.type == ADDRESSOF)
1112 /* x = &y */
1113 gcc_assert (find (get_varinfo_fc (rhs.var)->id)
1114 == get_varinfo_fc (rhs.var)->id);
1115 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1117 else if (lhsvar > anything_id
1118 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1120 add_graph_edge (graph, lhsvar, rhsvar);
1126 /* Changed variables on the last iteration. */
1127 static unsigned int changed_count;
1128 static sbitmap changed;
1130 DEF_VEC_I(unsigned);
1131 DEF_VEC_ALLOC_I(unsigned,heap);
1134 /* Strongly Connected Component visitation info. */
1136 struct scc_info
1138 sbitmap visited;
1139 sbitmap deleted;
1140 unsigned int *dfs;
1141 unsigned int *node_mapping;
1142 int current_index;
1143 VEC(unsigned,heap) *scc_stack;
1147 /* Recursive routine to find strongly connected components in GRAPH.
1148 SI is the SCC info to store the information in, and N is the id of current
1149 graph node we are processing.
1151 This is Tarjan's strongly connected component finding algorithm, as
1152 modified by Nuutila to keep only non-root nodes on the stack.
1153 The algorithm can be found in "On finding the strongly connected
1154 connected components in a directed graph" by Esko Nuutila and Eljas
1155 Soisalon-Soininen, in Information Processing Letters volume 49,
1156 number 1, pages 9-14. */
1158 static void
1159 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1161 unsigned int i;
1162 bitmap_iterator bi;
1163 unsigned int my_dfs;
1165 SET_BIT (si->visited, n);
1166 si->dfs[n] = si->current_index ++;
1167 my_dfs = si->dfs[n];
1169 /* Visit all the successors. */
1170 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1172 unsigned int w;
1174 if (i > LAST_REF_NODE)
1175 break;
1177 w = find (i);
1178 if (TEST_BIT (si->deleted, w))
1179 continue;
1181 if (!TEST_BIT (si->visited, w))
1182 scc_visit (graph, si, w);
1184 unsigned int t = find (w);
1185 unsigned int nnode = find (n);
1186 gcc_assert (nnode == n);
1188 if (si->dfs[t] < si->dfs[nnode])
1189 si->dfs[n] = si->dfs[t];
1193 /* See if any components have been identified. */
1194 if (si->dfs[n] == my_dfs)
1196 if (VEC_length (unsigned, si->scc_stack) > 0
1197 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1199 bitmap scc = BITMAP_ALLOC (NULL);
1200 bool have_ref_node = n >= FIRST_REF_NODE;
1201 unsigned int lowest_node;
1202 bitmap_iterator bi;
1204 bitmap_set_bit (scc, n);
1206 while (VEC_length (unsigned, si->scc_stack) != 0
1207 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1209 unsigned int w = VEC_pop (unsigned, si->scc_stack);
1211 bitmap_set_bit (scc, w);
1212 if (w >= FIRST_REF_NODE)
1213 have_ref_node = true;
1216 lowest_node = bitmap_first_set_bit (scc);
1217 gcc_assert (lowest_node < FIRST_REF_NODE);
1219 /* Collapse the SCC nodes into a single node, and mark the
1220 indirect cycles. */
1221 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1223 if (i < FIRST_REF_NODE)
1225 if (unite (lowest_node, i))
1226 unify_nodes (graph, lowest_node, i, false);
1228 else
1230 unite (lowest_node, i);
1231 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1235 SET_BIT (si->deleted, n);
1237 else
1238 VEC_safe_push (unsigned, heap, si->scc_stack, n);
1241 /* Unify node FROM into node TO, updating the changed count if
1242 necessary when UPDATE_CHANGED is true. */
1244 static void
1245 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1246 bool update_changed)
1249 gcc_assert (to != from && find (to) == to);
1250 if (dump_file && (dump_flags & TDF_DETAILS))
1251 fprintf (dump_file, "Unifying %s to %s\n",
1252 get_varinfo (from)->name,
1253 get_varinfo (to)->name);
1255 if (update_changed)
1256 stats.unified_vars_dynamic++;
1257 else
1258 stats.unified_vars_static++;
1260 merge_graph_nodes (graph, to, from);
1261 merge_node_constraints (graph, to, from);
1263 if (get_varinfo (from)->no_tbaa_pruning)
1264 get_varinfo (to)->no_tbaa_pruning = true;
1266 /* Mark TO as changed if FROM was changed. If TO was already marked
1267 as changed, decrease the changed count. */
1269 if (update_changed && TEST_BIT (changed, from))
1271 RESET_BIT (changed, from);
1272 if (!TEST_BIT (changed, to))
1273 SET_BIT (changed, to);
1274 else
1276 gcc_assert (changed_count > 0);
1277 changed_count--;
1280 if (get_varinfo (from)->solution)
1282 /* If the solution changes because of the merging, we need to mark
1283 the variable as changed. */
1284 if (bitmap_ior_into (get_varinfo (to)->solution,
1285 get_varinfo (from)->solution))
1287 if (update_changed && !TEST_BIT (changed, to))
1289 SET_BIT (changed, to);
1290 changed_count++;
1294 BITMAP_FREE (get_varinfo (from)->solution);
1295 BITMAP_FREE (get_varinfo (from)->oldsolution);
1297 if (stats.iterations > 0)
1299 BITMAP_FREE (get_varinfo (to)->oldsolution);
1300 get_varinfo (to)->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
1303 if (valid_graph_edge (graph, to, to))
1305 if (graph->succs[to])
1306 bitmap_clear_bit (graph->succs[to], to);
1310 /* Information needed to compute the topological ordering of a graph. */
1312 struct topo_info
1314 /* sbitmap of visited nodes. */
1315 sbitmap visited;
1316 /* Array that stores the topological order of the graph, *in
1317 reverse*. */
1318 VEC(unsigned,heap) *topo_order;
1322 /* Initialize and return a topological info structure. */
1324 static struct topo_info *
1325 init_topo_info (void)
1327 size_t size = graph->size;
1328 struct topo_info *ti = XNEW (struct topo_info);
1329 ti->visited = sbitmap_alloc (size);
1330 sbitmap_zero (ti->visited);
1331 ti->topo_order = VEC_alloc (unsigned, heap, 1);
1332 return ti;
1336 /* Free the topological sort info pointed to by TI. */
1338 static void
1339 free_topo_info (struct topo_info *ti)
1341 sbitmap_free (ti->visited);
1342 VEC_free (unsigned, heap, ti->topo_order);
1343 free (ti);
1346 /* Visit the graph in topological order, and store the order in the
1347 topo_info structure. */
1349 static void
1350 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1351 unsigned int n)
1353 bitmap_iterator bi;
1354 unsigned int j;
1356 SET_BIT (ti->visited, n);
1358 if (graph->succs[n])
1359 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1361 if (!TEST_BIT (ti->visited, j))
1362 topo_visit (graph, ti, j);
1365 VEC_safe_push (unsigned, heap, ti->topo_order, n);
1368 /* Return true if variable N + OFFSET is a legal field of N. */
1370 static bool
1371 type_safe (unsigned int n, unsigned HOST_WIDE_INT *offset)
1373 varinfo_t ninfo = get_varinfo (n);
1375 /* For things we've globbed to single variables, any offset into the
1376 variable acts like the entire variable, so that it becomes offset
1377 0. */
1378 if (ninfo->is_special_var
1379 || ninfo->is_artificial_var
1380 || ninfo->is_unknown_size_var)
1382 *offset = 0;
1383 return true;
1385 return (get_varinfo (n)->offset + *offset) < get_varinfo (n)->fullsize;
1388 /* Process a constraint C that represents x = *y, using DELTA as the
1389 starting solution. */
1391 static void
1392 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1393 bitmap delta)
1395 unsigned int lhs = c->lhs.var;
1396 bool flag = false;
1397 bitmap sol = get_varinfo (lhs)->solution;
1398 unsigned int j;
1399 bitmap_iterator bi;
1401 if (bitmap_bit_p (delta, anything_id))
1403 flag = !bitmap_bit_p (sol, anything_id);
1404 if (flag)
1405 bitmap_set_bit (sol, anything_id);
1406 goto done;
1408 /* For each variable j in delta (Sol(y)), add
1409 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1410 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1412 unsigned HOST_WIDE_INT roffset = c->rhs.offset;
1413 if (type_safe (j, &roffset))
1415 varinfo_t v;
1416 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + roffset;
1417 unsigned int t;
1419 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1420 if (!v)
1421 continue;
1422 t = find (v->id);
1424 /* Adding edges from the special vars is pointless.
1425 They don't have sets that can change. */
1426 if (get_varinfo (t) ->is_special_var)
1427 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1428 else if (add_graph_edge (graph, lhs, t))
1429 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1433 done:
1434 /* If the LHS solution changed, mark the var as changed. */
1435 if (flag)
1437 get_varinfo (lhs)->solution = sol;
1438 if (!TEST_BIT (changed, lhs))
1440 SET_BIT (changed, lhs);
1441 changed_count++;
1446 /* Process a constraint C that represents *x = y. */
1448 static void
1449 do_ds_constraint (constraint_t c, bitmap delta)
1451 unsigned int rhs = c->rhs.var;
1452 bitmap sol = get_varinfo (rhs)->solution;
1453 unsigned int j;
1454 bitmap_iterator bi;
1456 if (bitmap_bit_p (sol, anything_id))
1458 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1460 varinfo_t jvi = get_varinfo (j);
1461 unsigned int t;
1462 unsigned int loff = c->lhs.offset;
1463 unsigned HOST_WIDE_INT fieldoffset = jvi->offset + loff;
1464 varinfo_t v;
1466 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1467 if (!v)
1468 continue;
1469 t = find (v->id);
1471 if (!bitmap_bit_p (get_varinfo (t)->solution, anything_id))
1473 bitmap_set_bit (get_varinfo (t)->solution, anything_id);
1474 if (!TEST_BIT (changed, t))
1476 SET_BIT (changed, t);
1477 changed_count++;
1481 return;
1484 /* For each member j of delta (Sol(x)), add an edge from y to j and
1485 union Sol(y) into Sol(j) */
1486 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1488 unsigned HOST_WIDE_INT loff = c->lhs.offset;
1489 if (type_safe (j, &loff) && !(get_varinfo (j)->is_special_var))
1491 varinfo_t v;
1492 unsigned int t;
1493 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + loff;
1494 bitmap tmp;
1496 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1497 if (!v)
1498 continue;
1499 t = find (v->id);
1500 tmp = get_varinfo (t)->solution;
1502 if (set_union_with_increment (tmp, sol, 0))
1504 get_varinfo (t)->solution = tmp;
1505 if (t == rhs)
1506 sol = get_varinfo (rhs)->solution;
1507 if (!TEST_BIT (changed, t))
1509 SET_BIT (changed, t);
1510 changed_count++;
1517 /* Handle a non-simple (simple meaning requires no iteration),
1518 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1520 static void
1521 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
1523 if (c->lhs.type == DEREF)
1525 if (c->rhs.type == ADDRESSOF)
1527 gcc_unreachable();
1529 else
1531 /* *x = y */
1532 do_ds_constraint (c, delta);
1535 else if (c->rhs.type == DEREF)
1537 /* x = *y */
1538 if (!(get_varinfo (c->lhs.var)->is_special_var))
1539 do_sd_constraint (graph, c, delta);
1541 else
1543 bitmap tmp;
1544 bitmap solution;
1545 bool flag = false;
1547 gcc_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR);
1548 solution = get_varinfo (c->rhs.var)->solution;
1549 tmp = get_varinfo (c->lhs.var)->solution;
1551 flag = set_union_with_increment (tmp, solution, c->rhs.offset);
1553 if (flag)
1555 get_varinfo (c->lhs.var)->solution = tmp;
1556 if (!TEST_BIT (changed, c->lhs.var))
1558 SET_BIT (changed, c->lhs.var);
1559 changed_count++;
1565 /* Initialize and return a new SCC info structure. */
1567 static struct scc_info *
1568 init_scc_info (size_t size)
1570 struct scc_info *si = XNEW (struct scc_info);
1571 size_t i;
1573 si->current_index = 0;
1574 si->visited = sbitmap_alloc (size);
1575 sbitmap_zero (si->visited);
1576 si->deleted = sbitmap_alloc (size);
1577 sbitmap_zero (si->deleted);
1578 si->node_mapping = XNEWVEC (unsigned int, size);
1579 si->dfs = XCNEWVEC (unsigned int, size);
1581 for (i = 0; i < size; i++)
1582 si->node_mapping[i] = i;
1584 si->scc_stack = VEC_alloc (unsigned, heap, 1);
1585 return si;
1588 /* Free an SCC info structure pointed to by SI */
1590 static void
1591 free_scc_info (struct scc_info *si)
1593 sbitmap_free (si->visited);
1594 sbitmap_free (si->deleted);
1595 free (si->node_mapping);
1596 free (si->dfs);
1597 VEC_free (unsigned, heap, si->scc_stack);
1598 free (si);
1602 /* Find indirect cycles in GRAPH that occur, using strongly connected
1603 components, and note them in the indirect cycles map.
1605 This technique comes from Ben Hardekopf and Calvin Lin,
1606 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1607 Lines of Code", submitted to PLDI 2007. */
1609 static void
1610 find_indirect_cycles (constraint_graph_t graph)
1612 unsigned int i;
1613 unsigned int size = graph->size;
1614 struct scc_info *si = init_scc_info (size);
1616 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1617 if (!TEST_BIT (si->visited, i) && find (i) == i)
1618 scc_visit (graph, si, i);
1620 free_scc_info (si);
1623 /* Compute a topological ordering for GRAPH, and store the result in the
1624 topo_info structure TI. */
1626 static void
1627 compute_topo_order (constraint_graph_t graph,
1628 struct topo_info *ti)
1630 unsigned int i;
1631 unsigned int size = graph->size;
1633 for (i = 0; i != size; ++i)
1634 if (!TEST_BIT (ti->visited, i) && find (i) == i)
1635 topo_visit (graph, ti, i);
1638 /* Structure used to for hash value numbering of pointer equivalence
1639 classes. */
1641 typedef struct equiv_class_label
1643 unsigned int equivalence_class;
1644 bitmap labels;
1645 hashval_t hashcode;
1646 } *equiv_class_label_t;
1647 typedef const struct equiv_class_label *const_equiv_class_label_t;
1649 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1650 classes. */
1651 static htab_t pointer_equiv_class_table;
1653 /* A hashtable for mapping a bitmap of labels->location equivalence
1654 classes. */
1655 static htab_t location_equiv_class_table;
1657 /* Hash function for a equiv_class_label_t */
1659 static hashval_t
1660 equiv_class_label_hash (const void *p)
1662 const_equiv_class_label_t const ecl = (const_equiv_class_label_t) p;
1663 return ecl->hashcode;
1666 /* Equality function for two equiv_class_label_t's. */
1668 static int
1669 equiv_class_label_eq (const void *p1, const void *p2)
1671 const_equiv_class_label_t const eql1 = (const_equiv_class_label_t) p1;
1672 const_equiv_class_label_t const eql2 = (const_equiv_class_label_t) p2;
1673 return bitmap_equal_p (eql1->labels, eql2->labels);
1676 /* Lookup a equivalence class in TABLE by the bitmap of LABELS it
1677 contains. */
1679 static unsigned int
1680 equiv_class_lookup (htab_t table, bitmap labels)
1682 void **slot;
1683 struct equiv_class_label ecl;
1685 ecl.labels = labels;
1686 ecl.hashcode = bitmap_hash (labels);
1688 slot = htab_find_slot_with_hash (table, &ecl,
1689 ecl.hashcode, NO_INSERT);
1690 if (!slot)
1691 return 0;
1692 else
1693 return ((equiv_class_label_t) *slot)->equivalence_class;
1697 /* Add an equivalence class named EQUIVALENCE_CLASS with labels LABELS
1698 to TABLE. */
1700 static void
1701 equiv_class_add (htab_t table, unsigned int equivalence_class,
1702 bitmap labels)
1704 void **slot;
1705 equiv_class_label_t ecl = XNEW (struct equiv_class_label);
1707 ecl->labels = labels;
1708 ecl->equivalence_class = equivalence_class;
1709 ecl->hashcode = bitmap_hash (labels);
1711 slot = htab_find_slot_with_hash (table, ecl,
1712 ecl->hashcode, INSERT);
1713 gcc_assert (!*slot);
1714 *slot = (void *) ecl;
1717 /* Perform offline variable substitution.
1719 This is a worst case quadratic time way of identifying variables
1720 that must have equivalent points-to sets, including those caused by
1721 static cycles, and single entry subgraphs, in the constraint graph.
1723 The technique is described in "Exploiting Pointer and Location
1724 Equivalence to Optimize Pointer Analysis. In the 14th International
1725 Static Analysis Symposium (SAS), August 2007." It is known as the
1726 "HU" algorithm, and is equivalent to value numbering the collapsed
1727 constraint graph including evaluating unions.
1729 The general method of finding equivalence classes is as follows:
1730 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1731 Initialize all non-REF nodes to be direct nodes.
1732 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1733 variable}
1734 For each constraint containing the dereference, we also do the same
1735 thing.
1737 We then compute SCC's in the graph and unify nodes in the same SCC,
1738 including pts sets.
1740 For each non-collapsed node x:
1741 Visit all unvisited explicit incoming edges.
1742 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
1743 where y->x.
1744 Lookup the equivalence class for pts(x).
1745 If we found one, equivalence_class(x) = found class.
1746 Otherwise, equivalence_class(x) = new class, and new_class is
1747 added to the lookup table.
1749 All direct nodes with the same equivalence class can be replaced
1750 with a single representative node.
1751 All unlabeled nodes (label == 0) are not pointers and all edges
1752 involving them can be eliminated.
1753 We perform these optimizations during rewrite_constraints
1755 In addition to pointer equivalence class finding, we also perform
1756 location equivalence class finding. This is the set of variables
1757 that always appear together in points-to sets. We use this to
1758 compress the size of the points-to sets. */
1760 /* Current maximum pointer equivalence class id. */
1761 static int pointer_equiv_class;
1763 /* Current maximum location equivalence class id. */
1764 static int location_equiv_class;
1766 /* Recursive routine to find strongly connected components in GRAPH,
1767 and label it's nodes with DFS numbers. */
1769 static void
1770 condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1772 unsigned int i;
1773 bitmap_iterator bi;
1774 unsigned int my_dfs;
1776 gcc_assert (si->node_mapping[n] == n);
1777 SET_BIT (si->visited, n);
1778 si->dfs[n] = si->current_index ++;
1779 my_dfs = si->dfs[n];
1781 /* Visit all the successors. */
1782 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1784 unsigned int w = si->node_mapping[i];
1786 if (TEST_BIT (si->deleted, w))
1787 continue;
1789 if (!TEST_BIT (si->visited, w))
1790 condense_visit (graph, si, w);
1792 unsigned int t = si->node_mapping[w];
1793 unsigned int nnode = si->node_mapping[n];
1794 gcc_assert (nnode == n);
1796 if (si->dfs[t] < si->dfs[nnode])
1797 si->dfs[n] = si->dfs[t];
1801 /* Visit all the implicit predecessors. */
1802 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
1804 unsigned int w = si->node_mapping[i];
1806 if (TEST_BIT (si->deleted, w))
1807 continue;
1809 if (!TEST_BIT (si->visited, w))
1810 condense_visit (graph, si, w);
1812 unsigned int t = si->node_mapping[w];
1813 unsigned int nnode = si->node_mapping[n];
1814 gcc_assert (nnode == n);
1816 if (si->dfs[t] < si->dfs[nnode])
1817 si->dfs[n] = si->dfs[t];
1821 /* See if any components have been identified. */
1822 if (si->dfs[n] == my_dfs)
1824 while (VEC_length (unsigned, si->scc_stack) != 0
1825 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1827 unsigned int w = VEC_pop (unsigned, si->scc_stack);
1828 si->node_mapping[w] = n;
1830 if (!TEST_BIT (graph->direct_nodes, w))
1831 RESET_BIT (graph->direct_nodes, n);
1833 /* Unify our nodes. */
1834 if (graph->preds[w])
1836 if (!graph->preds[n])
1837 graph->preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
1838 bitmap_ior_into (graph->preds[n], graph->preds[w]);
1840 if (graph->implicit_preds[w])
1842 if (!graph->implicit_preds[n])
1843 graph->implicit_preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
1844 bitmap_ior_into (graph->implicit_preds[n],
1845 graph->implicit_preds[w]);
1847 if (graph->points_to[w])
1849 if (!graph->points_to[n])
1850 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
1851 bitmap_ior_into (graph->points_to[n],
1852 graph->points_to[w]);
1854 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1856 unsigned int rep = si->node_mapping[i];
1857 graph->number_incoming[rep]++;
1860 SET_BIT (si->deleted, n);
1862 else
1863 VEC_safe_push (unsigned, heap, si->scc_stack, n);
1866 /* Label pointer equivalences. */
1868 static void
1869 label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1871 unsigned int i;
1872 bitmap_iterator bi;
1873 SET_BIT (si->visited, n);
1875 if (!graph->points_to[n])
1876 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
1878 /* Label and union our incoming edges's points to sets. */
1879 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1881 unsigned int w = si->node_mapping[i];
1882 if (!TEST_BIT (si->visited, w))
1883 label_visit (graph, si, w);
1885 /* Skip unused edges */
1886 if (w == n || graph->pointer_label[w] == 0)
1888 graph->number_incoming[w]--;
1889 continue;
1891 if (graph->points_to[w])
1892 bitmap_ior_into(graph->points_to[n], graph->points_to[w]);
1894 /* If all incoming edges to w have been processed and
1895 graph->points_to[w] was not stored in the hash table, we can
1896 free it. */
1897 graph->number_incoming[w]--;
1898 if (!graph->number_incoming[w] && !TEST_BIT (graph->pt_used, w))
1900 BITMAP_FREE (graph->points_to[w]);
1903 /* Indirect nodes get fresh variables. */
1904 if (!TEST_BIT (graph->direct_nodes, n))
1905 bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
1907 if (!bitmap_empty_p (graph->points_to[n]))
1909 unsigned int label = equiv_class_lookup (pointer_equiv_class_table,
1910 graph->points_to[n]);
1911 if (!label)
1913 SET_BIT (graph->pt_used, n);
1914 label = pointer_equiv_class++;
1915 equiv_class_add (pointer_equiv_class_table,
1916 label, graph->points_to[n]);
1918 graph->pointer_label[n] = label;
1922 /* Perform offline variable substitution, discovering equivalence
1923 classes, and eliminating non-pointer variables. */
1925 static struct scc_info *
1926 perform_var_substitution (constraint_graph_t graph)
1928 unsigned int i;
1929 unsigned int size = graph->size;
1930 struct scc_info *si = init_scc_info (size);
1932 bitmap_obstack_initialize (&iteration_obstack);
1933 pointer_equiv_class_table = htab_create (511, equiv_class_label_hash,
1934 equiv_class_label_eq, free);
1935 location_equiv_class_table = htab_create (511, equiv_class_label_hash,
1936 equiv_class_label_eq, free);
1937 pointer_equiv_class = 1;
1938 location_equiv_class = 1;
1940 /* Condense the nodes, which means to find SCC's, count incoming
1941 predecessors, and unite nodes in SCC's. */
1942 for (i = 0; i < FIRST_REF_NODE; i++)
1943 if (!TEST_BIT (si->visited, si->node_mapping[i]))
1944 condense_visit (graph, si, si->node_mapping[i]);
1946 sbitmap_zero (si->visited);
1947 /* Actually the label the nodes for pointer equivalences */
1948 for (i = 0; i < FIRST_REF_NODE; i++)
1949 if (!TEST_BIT (si->visited, si->node_mapping[i]))
1950 label_visit (graph, si, si->node_mapping[i]);
1952 /* Calculate location equivalence labels. */
1953 for (i = 0; i < FIRST_REF_NODE; i++)
1955 bitmap pointed_by;
1956 bitmap_iterator bi;
1957 unsigned int j;
1958 unsigned int label;
1960 if (!graph->pointed_by[i])
1961 continue;
1962 pointed_by = BITMAP_ALLOC (&iteration_obstack);
1964 /* Translate the pointed-by mapping for pointer equivalence
1965 labels. */
1966 EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
1968 bitmap_set_bit (pointed_by,
1969 graph->pointer_label[si->node_mapping[j]]);
1971 /* The original pointed_by is now dead. */
1972 BITMAP_FREE (graph->pointed_by[i]);
1974 /* Look up the location equivalence label if one exists, or make
1975 one otherwise. */
1976 label = equiv_class_lookup (location_equiv_class_table,
1977 pointed_by);
1978 if (label == 0)
1980 label = location_equiv_class++;
1981 equiv_class_add (location_equiv_class_table,
1982 label, pointed_by);
1984 else
1986 if (dump_file && (dump_flags & TDF_DETAILS))
1987 fprintf (dump_file, "Found location equivalence for node %s\n",
1988 get_varinfo (i)->name);
1989 BITMAP_FREE (pointed_by);
1991 graph->loc_label[i] = label;
1995 if (dump_file && (dump_flags & TDF_DETAILS))
1996 for (i = 0; i < FIRST_REF_NODE; i++)
1998 bool direct_node = TEST_BIT (graph->direct_nodes, i);
1999 fprintf (dump_file,
2000 "Equivalence classes for %s node id %d:%s are pointer: %d"
2001 ", location:%d\n",
2002 direct_node ? "Direct node" : "Indirect node", i,
2003 get_varinfo (i)->name,
2004 graph->pointer_label[si->node_mapping[i]],
2005 graph->loc_label[si->node_mapping[i]]);
2008 /* Quickly eliminate our non-pointer variables. */
2010 for (i = 0; i < FIRST_REF_NODE; i++)
2012 unsigned int node = si->node_mapping[i];
2014 if (graph->pointer_label[node] == 0)
2016 if (dump_file && (dump_flags & TDF_DETAILS))
2017 fprintf (dump_file,
2018 "%s is a non-pointer variable, eliminating edges.\n",
2019 get_varinfo (node)->name);
2020 stats.nonpointer_vars++;
2021 clear_edges_for_node (graph, node);
2025 return si;
2028 /* Free information that was only necessary for variable
2029 substitution. */
2031 static void
2032 free_var_substitution_info (struct scc_info *si)
2034 free_scc_info (si);
2035 free (graph->pointer_label);
2036 free (graph->loc_label);
2037 free (graph->pointed_by);
2038 free (graph->points_to);
2039 free (graph->number_incoming);
2040 free (graph->eq_rep);
2041 sbitmap_free (graph->direct_nodes);
2042 sbitmap_free (graph->pt_used);
2043 htab_delete (pointer_equiv_class_table);
2044 htab_delete (location_equiv_class_table);
2045 bitmap_obstack_release (&iteration_obstack);
2048 /* Return an existing node that is equivalent to NODE, which has
2049 equivalence class LABEL, if one exists. Return NODE otherwise. */
2051 static unsigned int
2052 find_equivalent_node (constraint_graph_t graph,
2053 unsigned int node, unsigned int label)
2055 /* If the address version of this variable is unused, we can
2056 substitute it for anything else with the same label.
2057 Otherwise, we know the pointers are equivalent, but not the
2058 locations, and we can unite them later. */
2060 if (!bitmap_bit_p (graph->address_taken, node))
2062 gcc_assert (label < graph->size);
2064 if (graph->eq_rep[label] != -1)
2066 /* Unify the two variables since we know they are equivalent. */
2067 if (unite (graph->eq_rep[label], node))
2068 unify_nodes (graph, graph->eq_rep[label], node, false);
2069 return graph->eq_rep[label];
2071 else
2073 graph->eq_rep[label] = node;
2074 graph->pe_rep[label] = node;
2077 else
2079 gcc_assert (label < graph->size);
2080 graph->pe[node] = label;
2081 if (graph->pe_rep[label] == -1)
2082 graph->pe_rep[label] = node;
2085 return node;
2088 /* Unite pointer equivalent but not location equivalent nodes in
2089 GRAPH. This may only be performed once variable substitution is
2090 finished. */
2092 static void
2093 unite_pointer_equivalences (constraint_graph_t graph)
2095 unsigned int i;
2097 /* Go through the pointer equivalences and unite them to their
2098 representative, if they aren't already. */
2099 for (i = 0; i < FIRST_REF_NODE; i++)
2101 unsigned int label = graph->pe[i];
2102 if (label)
2104 int label_rep = graph->pe_rep[label];
2106 if (label_rep == -1)
2107 continue;
2109 label_rep = find (label_rep);
2110 if (label_rep >= 0 && unite (label_rep, find (i)))
2111 unify_nodes (graph, label_rep, i, false);
2116 /* Move complex constraints to the GRAPH nodes they belong to. */
2118 static void
2119 move_complex_constraints (constraint_graph_t graph)
2121 int i;
2122 constraint_t c;
2124 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
2126 if (c)
2128 struct constraint_expr lhs = c->lhs;
2129 struct constraint_expr rhs = c->rhs;
2131 if (lhs.type == DEREF)
2133 insert_into_complex (graph, lhs.var, c);
2135 else if (rhs.type == DEREF)
2137 if (!(get_varinfo (lhs.var)->is_special_var))
2138 insert_into_complex (graph, rhs.var, c);
2140 else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2141 && (lhs.offset != 0 || rhs.offset != 0))
2143 insert_into_complex (graph, rhs.var, c);
2150 /* Optimize and rewrite complex constraints while performing
2151 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2152 result of perform_variable_substitution. */
2154 static void
2155 rewrite_constraints (constraint_graph_t graph,
2156 struct scc_info *si)
2158 int i;
2159 unsigned int j;
2160 constraint_t c;
2162 for (j = 0; j < graph->size; j++)
2163 gcc_assert (find (j) == j);
2165 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
2167 struct constraint_expr lhs = c->lhs;
2168 struct constraint_expr rhs = c->rhs;
2169 unsigned int lhsvar = find (get_varinfo_fc (lhs.var)->id);
2170 unsigned int rhsvar = find (get_varinfo_fc (rhs.var)->id);
2171 unsigned int lhsnode, rhsnode;
2172 unsigned int lhslabel, rhslabel;
2174 lhsnode = si->node_mapping[lhsvar];
2175 rhsnode = si->node_mapping[rhsvar];
2176 lhslabel = graph->pointer_label[lhsnode];
2177 rhslabel = graph->pointer_label[rhsnode];
2179 /* See if it is really a non-pointer variable, and if so, ignore
2180 the constraint. */
2181 if (lhslabel == 0)
2183 if (dump_file && (dump_flags & TDF_DETAILS))
2186 fprintf (dump_file, "%s is a non-pointer variable,"
2187 "ignoring constraint:",
2188 get_varinfo (lhs.var)->name);
2189 dump_constraint (dump_file, c);
2191 VEC_replace (constraint_t, constraints, i, NULL);
2192 continue;
2195 if (rhslabel == 0)
2197 if (dump_file && (dump_flags & TDF_DETAILS))
2200 fprintf (dump_file, "%s is a non-pointer variable,"
2201 "ignoring constraint:",
2202 get_varinfo (rhs.var)->name);
2203 dump_constraint (dump_file, c);
2205 VEC_replace (constraint_t, constraints, i, NULL);
2206 continue;
2209 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2210 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2211 c->lhs.var = lhsvar;
2212 c->rhs.var = rhsvar;
2217 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2218 part of an SCC, false otherwise. */
2220 static bool
2221 eliminate_indirect_cycles (unsigned int node)
2223 if (graph->indirect_cycles[node] != -1
2224 && !bitmap_empty_p (get_varinfo (node)->solution))
2226 unsigned int i;
2227 VEC(unsigned,heap) *queue = NULL;
2228 int queuepos;
2229 unsigned int to = find (graph->indirect_cycles[node]);
2230 bitmap_iterator bi;
2232 /* We can't touch the solution set and call unify_nodes
2233 at the same time, because unify_nodes is going to do
2234 bitmap unions into it. */
2236 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2238 if (find (i) == i && i != to)
2240 if (unite (to, i))
2241 VEC_safe_push (unsigned, heap, queue, i);
2245 for (queuepos = 0;
2246 VEC_iterate (unsigned, queue, queuepos, i);
2247 queuepos++)
2249 unify_nodes (graph, to, i, true);
2251 VEC_free (unsigned, heap, queue);
2252 return true;
2254 return false;
2257 /* Solve the constraint graph GRAPH using our worklist solver.
2258 This is based on the PW* family of solvers from the "Efficient Field
2259 Sensitive Pointer Analysis for C" paper.
2260 It works by iterating over all the graph nodes, processing the complex
2261 constraints and propagating the copy constraints, until everything stops
2262 changed. This corresponds to steps 6-8 in the solving list given above. */
2264 static void
2265 solve_graph (constraint_graph_t graph)
2267 unsigned int size = graph->size;
2268 unsigned int i;
2269 bitmap pts;
2271 changed_count = 0;
2272 changed = sbitmap_alloc (size);
2273 sbitmap_zero (changed);
2275 /* Mark all initial non-collapsed nodes as changed. */
2276 for (i = 0; i < size; i++)
2278 varinfo_t ivi = get_varinfo (i);
2279 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2280 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2281 || VEC_length (constraint_t, graph->complex[i]) > 0))
2283 SET_BIT (changed, i);
2284 changed_count++;
2288 /* Allocate a bitmap to be used to store the changed bits. */
2289 pts = BITMAP_ALLOC (&pta_obstack);
2291 while (changed_count > 0)
2293 unsigned int i;
2294 struct topo_info *ti = init_topo_info ();
2295 stats.iterations++;
2297 bitmap_obstack_initialize (&iteration_obstack);
2299 compute_topo_order (graph, ti);
2301 while (VEC_length (unsigned, ti->topo_order) != 0)
2304 i = VEC_pop (unsigned, ti->topo_order);
2306 /* If this variable is not a representative, skip it. */
2307 if (find (i) != i)
2308 continue;
2310 /* In certain indirect cycle cases, we may merge this
2311 variable to another. */
2312 if (eliminate_indirect_cycles (i) && find (i) != i)
2313 continue;
2315 /* If the node has changed, we need to process the
2316 complex constraints and outgoing edges again. */
2317 if (TEST_BIT (changed, i))
2319 unsigned int j;
2320 constraint_t c;
2321 bitmap solution;
2322 VEC(constraint_t,heap) *complex = graph->complex[i];
2323 bool solution_empty;
2325 RESET_BIT (changed, i);
2326 changed_count--;
2328 /* Compute the changed set of solution bits. */
2329 bitmap_and_compl (pts, get_varinfo (i)->solution,
2330 get_varinfo (i)->oldsolution);
2332 if (bitmap_empty_p (pts))
2333 continue;
2335 bitmap_ior_into (get_varinfo (i)->oldsolution, pts);
2337 solution = get_varinfo (i)->solution;
2338 solution_empty = bitmap_empty_p (solution);
2340 /* Process the complex constraints */
2341 for (j = 0; VEC_iterate (constraint_t, complex, j, c); j++)
2343 /* XXX: This is going to unsort the constraints in
2344 some cases, which will occasionally add duplicate
2345 constraints during unification. This does not
2346 affect correctness. */
2347 c->lhs.var = find (c->lhs.var);
2348 c->rhs.var = find (c->rhs.var);
2350 /* The only complex constraint that can change our
2351 solution to non-empty, given an empty solution,
2352 is a constraint where the lhs side is receiving
2353 some set from elsewhere. */
2354 if (!solution_empty || c->lhs.type != DEREF)
2355 do_complex_constraint (graph, c, pts);
2358 solution_empty = bitmap_empty_p (solution);
2360 if (!solution_empty)
2362 bitmap_iterator bi;
2364 /* Propagate solution to all successors. */
2365 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2366 0, j, bi)
2368 bitmap tmp;
2369 bool flag;
2371 unsigned int to = find (j);
2372 tmp = get_varinfo (to)->solution;
2373 flag = false;
2375 /* Don't try to propagate to ourselves. */
2376 if (to == i)
2377 continue;
2379 flag = set_union_with_increment (tmp, pts, 0);
2381 if (flag)
2383 get_varinfo (to)->solution = tmp;
2384 if (!TEST_BIT (changed, to))
2386 SET_BIT (changed, to);
2387 changed_count++;
2394 free_topo_info (ti);
2395 bitmap_obstack_release (&iteration_obstack);
2398 BITMAP_FREE (pts);
2399 sbitmap_free (changed);
2400 bitmap_obstack_release (&oldpta_obstack);
2403 /* Map from trees to variable infos. */
2404 static struct pointer_map_t *vi_for_tree;
2407 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2409 static void
2410 insert_vi_for_tree (tree t, varinfo_t vi)
2412 void **slot = pointer_map_insert (vi_for_tree, t);
2413 gcc_assert (vi);
2414 gcc_assert (*slot == NULL);
2415 *slot = vi;
2418 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2419 exist in the map, return NULL, otherwise, return the varinfo we found. */
2421 static varinfo_t
2422 lookup_vi_for_tree (tree t)
2424 void **slot = pointer_map_contains (vi_for_tree, t);
2425 if (slot == NULL)
2426 return NULL;
2428 return (varinfo_t) *slot;
2431 /* Return a printable name for DECL */
2433 static const char *
2434 alias_get_name (tree decl)
2436 const char *res = get_name (decl);
2437 char *temp;
2438 int num_printed = 0;
2440 if (res != NULL)
2441 return res;
2443 res = "NULL";
2444 if (!dump_file)
2445 return res;
2447 if (TREE_CODE (decl) == SSA_NAME)
2449 num_printed = asprintf (&temp, "%s_%u",
2450 alias_get_name (SSA_NAME_VAR (decl)),
2451 SSA_NAME_VERSION (decl));
2453 else if (DECL_P (decl))
2455 num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
2457 if (num_printed > 0)
2459 res = ggc_strdup (temp);
2460 free (temp);
2462 return res;
2465 /* Find the variable id for tree T in the map.
2466 If T doesn't exist in the map, create an entry for it and return it. */
2468 static varinfo_t
2469 get_vi_for_tree (tree t)
2471 void **slot = pointer_map_contains (vi_for_tree, t);
2472 if (slot == NULL)
2473 return get_varinfo (create_variable_info_for (t, alias_get_name (t)));
2475 return (varinfo_t) *slot;
2478 /* Get a constraint expression from an SSA_VAR_P node. */
2480 static struct constraint_expr
2481 get_constraint_exp_from_ssa_var (tree t)
2483 struct constraint_expr cexpr;
2485 gcc_assert (SSA_VAR_P (t) || DECL_P (t));
2487 /* For parameters, get at the points-to set for the actual parm
2488 decl. */
2489 if (TREE_CODE (t) == SSA_NAME
2490 && TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2491 && SSA_NAME_IS_DEFAULT_DEF (t))
2492 return get_constraint_exp_from_ssa_var (SSA_NAME_VAR (t));
2494 cexpr.type = SCALAR;
2496 cexpr.var = get_vi_for_tree (t)->id;
2497 /* If we determine the result is "anything", and we know this is readonly,
2498 say it points to readonly memory instead. */
2499 if (cexpr.var == anything_id && TREE_READONLY (t))
2501 cexpr.type = ADDRESSOF;
2502 cexpr.var = readonly_id;
2505 cexpr.offset = 0;
2506 return cexpr;
2509 /* Process a completed constraint T, and add it to the constraint
2510 list. FROM_CALL is true if this is a constraint coming from a
2511 call, which means any DEREFs we see are "may-deref's", not
2512 "must-deref"'s. */
2514 static void
2515 process_constraint_1 (constraint_t t, bool from_call)
2517 struct constraint_expr rhs = t->rhs;
2518 struct constraint_expr lhs = t->lhs;
2520 gcc_assert (rhs.var < VEC_length (varinfo_t, varmap));
2521 gcc_assert (lhs.var < VEC_length (varinfo_t, varmap));
2523 if (!from_call)
2525 if (lhs.type == DEREF)
2526 get_varinfo (lhs.var)->directly_dereferenced = true;
2527 if (rhs.type == DEREF)
2528 get_varinfo (rhs.var)->directly_dereferenced = true;
2531 if (!use_field_sensitive)
2533 t->rhs.offset = 0;
2534 t->lhs.offset = 0;
2537 /* ANYTHING == ANYTHING is pointless. */
2538 if (lhs.var == anything_id && rhs.var == anything_id)
2539 return;
2541 /* If we have &ANYTHING = something, convert to SOMETHING = &ANYTHING) */
2542 else if (lhs.var == anything_id && lhs.type == ADDRESSOF)
2544 rhs = t->lhs;
2545 t->lhs = t->rhs;
2546 t->rhs = rhs;
2547 process_constraint_1 (t, from_call);
2549 /* This can happen in our IR with things like n->a = *p */
2550 else if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
2552 /* Split into tmp = *rhs, *lhs = tmp */
2553 tree rhsdecl = get_varinfo (rhs.var)->decl;
2554 tree pointertype = TREE_TYPE (rhsdecl);
2555 tree pointedtotype = TREE_TYPE (pointertype);
2556 tree tmpvar = create_tmp_var_raw (pointedtotype, "doubledereftmp");
2557 struct constraint_expr tmplhs = get_constraint_exp_from_ssa_var (tmpvar);
2559 /* If this is an aggregate of known size, we should have passed
2560 this off to do_structure_copy, and it should have broken it
2561 up. */
2562 gcc_assert (!AGGREGATE_TYPE_P (pointedtotype)
2563 || get_varinfo (rhs.var)->is_unknown_size_var);
2565 process_constraint_1 (new_constraint (tmplhs, rhs), from_call);
2566 process_constraint_1 (new_constraint (lhs, tmplhs), from_call);
2568 else if (rhs.type == ADDRESSOF && lhs.type == DEREF)
2570 /* Split into tmp = &rhs, *lhs = tmp */
2571 tree rhsdecl = get_varinfo (rhs.var)->decl;
2572 tree pointertype = TREE_TYPE (rhsdecl);
2573 tree tmpvar = create_tmp_var_raw (pointertype, "derefaddrtmp");
2574 struct constraint_expr tmplhs = get_constraint_exp_from_ssa_var (tmpvar);
2576 process_constraint_1 (new_constraint (tmplhs, rhs), from_call);
2577 process_constraint_1 (new_constraint (lhs, tmplhs), from_call);
2579 else
2581 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
2582 VEC_safe_push (constraint_t, heap, constraints, t);
2587 /* Process constraint T, performing various simplifications and then
2588 adding it to our list of overall constraints. */
2590 static void
2591 process_constraint (constraint_t t)
2593 process_constraint_1 (t, false);
2596 /* Return true if T is a variable of a type that could contain
2597 pointers. */
2599 static bool
2600 could_have_pointers (tree t)
2602 tree type = TREE_TYPE (t);
2604 if (POINTER_TYPE_P (type)
2605 || AGGREGATE_TYPE_P (type)
2606 || TREE_CODE (type) == COMPLEX_TYPE)
2607 return true;
2609 return false;
2612 /* Return the position, in bits, of FIELD_DECL from the beginning of its
2613 structure. */
2615 static unsigned HOST_WIDE_INT
2616 bitpos_of_field (const tree fdecl)
2619 if (TREE_CODE (DECL_FIELD_OFFSET (fdecl)) != INTEGER_CST
2620 || TREE_CODE (DECL_FIELD_BIT_OFFSET (fdecl)) != INTEGER_CST)
2621 return -1;
2623 return (tree_low_cst (DECL_FIELD_OFFSET (fdecl), 1) * 8)
2624 + tree_low_cst (DECL_FIELD_BIT_OFFSET (fdecl), 1);
2628 /* Given a COMPONENT_REF T, return the constraint_expr for it. */
2630 static void
2631 get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results)
2633 tree orig_t = t;
2634 HOST_WIDE_INT bitsize = -1;
2635 HOST_WIDE_INT bitmaxsize = -1;
2636 HOST_WIDE_INT bitpos;
2637 tree forzero;
2638 struct constraint_expr *result;
2639 unsigned int beforelength = VEC_length (ce_s, *results);
2641 /* Some people like to do cute things like take the address of
2642 &0->a.b */
2643 forzero = t;
2644 while (!SSA_VAR_P (forzero) && !CONSTANT_CLASS_P (forzero))
2645 forzero = TREE_OPERAND (forzero, 0);
2647 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
2649 struct constraint_expr temp;
2651 temp.offset = 0;
2652 temp.var = integer_id;
2653 temp.type = SCALAR;
2654 VEC_safe_push (ce_s, heap, *results, &temp);
2655 return;
2658 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
2660 get_constraint_for (t, results);
2661 result = VEC_last (ce_s, *results);
2662 result->offset = bitpos;
2664 gcc_assert (beforelength + 1 == VEC_length (ce_s, *results));
2666 /* This can also happen due to weird offsetof type macros. */
2667 if (TREE_CODE (t) != ADDR_EXPR && result->type == ADDRESSOF)
2668 result->type = SCALAR;
2670 if (result->type == SCALAR)
2672 /* In languages like C, you can access one past the end of an
2673 array. You aren't allowed to dereference it, so we can
2674 ignore this constraint. When we handle pointer subtraction,
2675 we may have to do something cute here. */
2677 if (result->offset < get_varinfo (result->var)->fullsize
2678 && bitmaxsize != 0)
2680 /* It's also not true that the constraint will actually start at the
2681 right offset, it may start in some padding. We only care about
2682 setting the constraint to the first actual field it touches, so
2683 walk to find it. */
2684 varinfo_t curr;
2685 for (curr = get_varinfo (result->var); curr; curr = curr->next)
2687 if (ranges_overlap_p (curr->offset, curr->size,
2688 result->offset, bitmaxsize))
2690 result->var = curr->id;
2691 break;
2694 /* assert that we found *some* field there. The user couldn't be
2695 accessing *only* padding. */
2696 /* Still the user could access one past the end of an array
2697 embedded in a struct resulting in accessing *only* padding. */
2698 gcc_assert (curr || ref_contains_array_ref (orig_t));
2700 else if (bitmaxsize == 0)
2702 if (dump_file && (dump_flags & TDF_DETAILS))
2703 fprintf (dump_file, "Access to zero-sized part of variable,"
2704 "ignoring\n");
2706 else
2707 if (dump_file && (dump_flags & TDF_DETAILS))
2708 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
2710 result->offset = 0;
2712 else if (bitmaxsize == -1)
2714 /* We can't handle DEREF constraints with unknown size, we'll
2715 get the wrong answer. Punt and return anything. */
2716 result->var = anything_id;
2717 result->offset = 0;
2722 /* Dereference the constraint expression CONS, and return the result.
2723 DEREF (ADDRESSOF) = SCALAR
2724 DEREF (SCALAR) = DEREF
2725 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
2726 This is needed so that we can handle dereferencing DEREF constraints. */
2728 static void
2729 do_deref (VEC (ce_s, heap) **constraints)
2731 struct constraint_expr *c;
2732 unsigned int i = 0;
2734 for (i = 0; VEC_iterate (ce_s, *constraints, i, c); i++)
2736 if (c->type == SCALAR)
2737 c->type = DEREF;
2738 else if (c->type == ADDRESSOF)
2739 c->type = SCALAR;
2740 else if (c->type == DEREF)
2742 tree tmpvar = create_tmp_var_raw (ptr_type_node, "dereftmp");
2743 struct constraint_expr tmplhs = get_constraint_exp_from_ssa_var (tmpvar);
2744 process_constraint (new_constraint (tmplhs, *c));
2745 c->var = tmplhs.var;
2747 else
2748 gcc_unreachable ();
2752 /* Given a tree T, return the constraint expression for it. */
2754 static void
2755 get_constraint_for (tree t, VEC (ce_s, heap) **results)
2757 struct constraint_expr temp;
2759 /* x = integer is all glommed to a single variable, which doesn't
2760 point to anything by itself. That is, of course, unless it is an
2761 integer constant being treated as a pointer, in which case, we
2762 will return that this is really the addressof anything. This
2763 happens below, since it will fall into the default case. The only
2764 case we know something about an integer treated like a pointer is
2765 when it is the NULL pointer, and then we just say it points to
2766 NULL. */
2767 if (TREE_CODE (t) == INTEGER_CST
2768 && integer_zerop (t))
2770 temp.var = nothing_id;
2771 temp.type = ADDRESSOF;
2772 temp.offset = 0;
2773 VEC_safe_push (ce_s, heap, *results, &temp);
2774 return;
2777 /* String constants are read-only. */
2778 if (TREE_CODE (t) == STRING_CST)
2780 temp.var = readonly_id;
2781 temp.type = SCALAR;
2782 temp.offset = 0;
2783 VEC_safe_push (ce_s, heap, *results, &temp);
2784 return;
2787 switch (TREE_CODE_CLASS (TREE_CODE (t)))
2789 case tcc_expression:
2790 case tcc_vl_exp:
2792 switch (TREE_CODE (t))
2794 case ADDR_EXPR:
2796 struct constraint_expr *c;
2797 unsigned int i;
2798 tree exp = TREE_OPERAND (t, 0);
2799 tree pttype = TREE_TYPE (TREE_TYPE (t));
2801 get_constraint_for (exp, results);
2804 /* Complex types are special. Taking the address of one
2805 allows you to access either part of it through that
2806 pointer. */
2807 if (VEC_length (ce_s, *results) == 1 &&
2808 TREE_CODE (pttype) == COMPLEX_TYPE)
2810 struct constraint_expr *origrhs;
2811 varinfo_t origvar;
2812 struct constraint_expr tmp;
2814 gcc_assert (VEC_length (ce_s, *results) == 1);
2815 origrhs = VEC_last (ce_s, *results);
2816 tmp = *origrhs;
2817 VEC_pop (ce_s, *results);
2818 origvar = get_varinfo (origrhs->var);
2819 for (; origvar; origvar = origvar->next)
2821 tmp.var = origvar->id;
2822 VEC_safe_push (ce_s, heap, *results, &tmp);
2826 for (i = 0; VEC_iterate (ce_s, *results, i, c); i++)
2828 if (c->type == DEREF)
2829 c->type = SCALAR;
2830 else
2831 c->type = ADDRESSOF;
2833 return;
2835 break;
2836 case CALL_EXPR:
2837 /* XXX: In interprocedural mode, if we didn't have the
2838 body, we would need to do *each pointer argument =
2839 &ANYTHING added. */
2840 if (call_expr_flags (t) & (ECF_MALLOC | ECF_MAY_BE_ALLOCA))
2842 varinfo_t vi;
2843 tree heapvar = heapvar_lookup (t);
2845 if (heapvar == NULL)
2847 heapvar = create_tmp_var_raw (ptr_type_node, "HEAP");
2848 DECL_EXTERNAL (heapvar) = 1;
2849 get_var_ann (heapvar)->is_heapvar = 1;
2850 if (gimple_referenced_vars (cfun))
2851 add_referenced_var (heapvar);
2852 heapvar_insert (t, heapvar);
2855 temp.var = create_variable_info_for (heapvar,
2856 alias_get_name (heapvar));
2858 vi = get_varinfo (temp.var);
2859 vi->is_artificial_var = 1;
2860 vi->is_heap_var = 1;
2861 temp.type = ADDRESSOF;
2862 temp.offset = 0;
2863 VEC_safe_push (ce_s, heap, *results, &temp);
2864 return;
2866 else
2868 temp.var = anything_id;
2869 temp.type = SCALAR;
2870 temp.offset = 0;
2871 VEC_safe_push (ce_s, heap, *results, &temp);
2872 return;
2874 break;
2875 default:
2877 temp.type = ADDRESSOF;
2878 temp.var = anything_id;
2879 temp.offset = 0;
2880 VEC_safe_push (ce_s, heap, *results, &temp);
2881 return;
2885 case tcc_reference:
2887 switch (TREE_CODE (t))
2889 case INDIRECT_REF:
2891 get_constraint_for (TREE_OPERAND (t, 0), results);
2892 do_deref (results);
2893 return;
2895 case ARRAY_REF:
2896 case ARRAY_RANGE_REF:
2897 case COMPONENT_REF:
2898 get_constraint_for_component_ref (t, results);
2899 return;
2900 default:
2902 temp.type = ADDRESSOF;
2903 temp.var = anything_id;
2904 temp.offset = 0;
2905 VEC_safe_push (ce_s, heap, *results, &temp);
2906 return;
2910 case tcc_unary:
2912 switch (TREE_CODE (t))
2914 CASE_CONVERT:
2916 tree op = TREE_OPERAND (t, 0);
2918 /* Cast from non-pointer to pointers are bad news for us.
2919 Anything else, we see through */
2920 if (!(POINTER_TYPE_P (TREE_TYPE (t))
2921 && ! POINTER_TYPE_P (TREE_TYPE (op))))
2923 get_constraint_for (op, results);
2924 return;
2927 /* FALLTHRU */
2929 default:
2931 temp.type = ADDRESSOF;
2932 temp.var = anything_id;
2933 temp.offset = 0;
2934 VEC_safe_push (ce_s, heap, *results, &temp);
2935 return;
2939 case tcc_exceptional:
2941 switch (TREE_CODE (t))
2943 case PHI_NODE:
2945 get_constraint_for (PHI_RESULT (t), results);
2946 return;
2948 break;
2949 case SSA_NAME:
2951 struct constraint_expr temp;
2952 temp = get_constraint_exp_from_ssa_var (t);
2953 VEC_safe_push (ce_s, heap, *results, &temp);
2954 return;
2956 break;
2957 default:
2959 temp.type = ADDRESSOF;
2960 temp.var = anything_id;
2961 temp.offset = 0;
2962 VEC_safe_push (ce_s, heap, *results, &temp);
2963 return;
2967 case tcc_declaration:
2969 struct constraint_expr temp;
2970 temp = get_constraint_exp_from_ssa_var (t);
2971 VEC_safe_push (ce_s, heap, *results, &temp);
2972 return;
2974 default:
2976 temp.type = ADDRESSOF;
2977 temp.var = anything_id;
2978 temp.offset = 0;
2979 VEC_safe_push (ce_s, heap, *results, &temp);
2980 return;
2986 /* Handle the structure copy case where we have a simple structure copy
2987 between LHS and RHS that is of SIZE (in bits)
2989 For each field of the lhs variable (lhsfield)
2990 For each field of the rhs variable at lhsfield.offset (rhsfield)
2991 add the constraint lhsfield = rhsfield
2993 If we fail due to some kind of type unsafety or other thing we
2994 can't handle, return false. We expect the caller to collapse the
2995 variable in that case. */
2997 static bool
2998 do_simple_structure_copy (const struct constraint_expr lhs,
2999 const struct constraint_expr rhs,
3000 const unsigned HOST_WIDE_INT size)
3002 varinfo_t p = get_varinfo (lhs.var);
3003 unsigned HOST_WIDE_INT pstart, last;
3004 pstart = p->offset;
3005 last = p->offset + size;
3006 for (; p && p->offset < last; p = p->next)
3008 varinfo_t q;
3009 struct constraint_expr templhs = lhs;
3010 struct constraint_expr temprhs = rhs;
3011 unsigned HOST_WIDE_INT fieldoffset;
3013 templhs.var = p->id;
3014 q = get_varinfo (temprhs.var);
3015 fieldoffset = p->offset - pstart;
3016 q = first_vi_for_offset (q, q->offset + fieldoffset);
3017 if (!q)
3018 return false;
3019 temprhs.var = q->id;
3020 process_constraint (new_constraint (templhs, temprhs));
3022 return true;
3026 /* Handle the structure copy case where we have a structure copy between a
3027 aggregate on the LHS and a dereference of a pointer on the RHS
3028 that is of SIZE (in bits)
3030 For each field of the lhs variable (lhsfield)
3031 rhs.offset = lhsfield->offset
3032 add the constraint lhsfield = rhs
3035 static void
3036 do_rhs_deref_structure_copy (const struct constraint_expr lhs,
3037 const struct constraint_expr rhs,
3038 const unsigned HOST_WIDE_INT size)
3040 varinfo_t p = get_varinfo (lhs.var);
3041 unsigned HOST_WIDE_INT pstart,last;
3042 pstart = p->offset;
3043 last = p->offset + size;
3045 for (; p && p->offset < last; p = p->next)
3047 varinfo_t q;
3048 struct constraint_expr templhs = lhs;
3049 struct constraint_expr temprhs = rhs;
3050 unsigned HOST_WIDE_INT fieldoffset;
3053 if (templhs.type == SCALAR)
3054 templhs.var = p->id;
3055 else
3056 templhs.offset = p->offset;
3058 q = get_varinfo (temprhs.var);
3059 fieldoffset = p->offset - pstart;
3060 temprhs.offset += fieldoffset;
3061 process_constraint (new_constraint (templhs, temprhs));
3065 /* Handle the structure copy case where we have a structure copy
3066 between an aggregate on the RHS and a dereference of a pointer on
3067 the LHS that is of SIZE (in bits)
3069 For each field of the rhs variable (rhsfield)
3070 lhs.offset = rhsfield->offset
3071 add the constraint lhs = rhsfield
3074 static void
3075 do_lhs_deref_structure_copy (const struct constraint_expr lhs,
3076 const struct constraint_expr rhs,
3077 const unsigned HOST_WIDE_INT size)
3079 varinfo_t p = get_varinfo (rhs.var);
3080 unsigned HOST_WIDE_INT pstart,last;
3081 pstart = p->offset;
3082 last = p->offset + size;
3084 for (; p && p->offset < last; p = p->next)
3086 varinfo_t q;
3087 struct constraint_expr templhs = lhs;
3088 struct constraint_expr temprhs = rhs;
3089 unsigned HOST_WIDE_INT fieldoffset;
3092 if (temprhs.type == SCALAR)
3093 temprhs.var = p->id;
3094 else
3095 temprhs.offset = p->offset;
3097 q = get_varinfo (templhs.var);
3098 fieldoffset = p->offset - pstart;
3099 templhs.offset += fieldoffset;
3100 process_constraint (new_constraint (templhs, temprhs));
3104 /* Sometimes, frontends like to give us bad type information. This
3105 function will collapse all the fields from VAR to the end of VAR,
3106 into VAR, so that we treat those fields as a single variable.
3107 We return the variable they were collapsed into. */
3109 static unsigned int
3110 collapse_rest_of_var (unsigned int var)
3112 varinfo_t currvar = get_varinfo (var);
3113 varinfo_t field;
3115 for (field = currvar->next; field; field = field->next)
3117 if (dump_file)
3118 fprintf (dump_file, "Type safety: Collapsing var %s into %s\n",
3119 field->name, currvar->name);
3121 gcc_assert (!field->collapsed_to);
3122 field->collapsed_to = currvar;
3125 currvar->next = NULL;
3126 currvar->size = currvar->fullsize - currvar->offset;
3128 return currvar->id;
3131 /* Handle aggregate copies by expanding into copies of the respective
3132 fields of the structures. */
3134 static void
3135 do_structure_copy (tree lhsop, tree rhsop)
3137 struct constraint_expr lhs, rhs, tmp;
3138 VEC (ce_s, heap) *lhsc = NULL, *rhsc = NULL;
3139 varinfo_t p;
3140 unsigned HOST_WIDE_INT lhssize;
3141 unsigned HOST_WIDE_INT rhssize;
3143 get_constraint_for (lhsop, &lhsc);
3144 get_constraint_for (rhsop, &rhsc);
3145 gcc_assert (VEC_length (ce_s, lhsc) == 1);
3146 gcc_assert (VEC_length (ce_s, rhsc) == 1);
3147 lhs = *(VEC_last (ce_s, lhsc));
3148 rhs = *(VEC_last (ce_s, rhsc));
3150 VEC_free (ce_s, heap, lhsc);
3151 VEC_free (ce_s, heap, rhsc);
3153 /* If we have special var = x, swap it around. */
3154 if (lhs.var <= integer_id && !(get_varinfo (rhs.var)->is_special_var))
3156 tmp = lhs;
3157 lhs = rhs;
3158 rhs = tmp;
3161 /* This is fairly conservative for the RHS == ADDRESSOF case, in that it's
3162 possible it's something we could handle. However, most cases falling
3163 into this are dealing with transparent unions, which are slightly
3164 weird. */
3165 if (rhs.type == ADDRESSOF && !(get_varinfo (rhs.var)->is_special_var))
3167 rhs.type = ADDRESSOF;
3168 rhs.var = anything_id;
3171 /* If the RHS is a special var, or an addressof, set all the LHS fields to
3172 that special var. */
3173 if (rhs.var <= integer_id)
3175 for (p = get_varinfo (lhs.var); p; p = p->next)
3177 struct constraint_expr templhs = lhs;
3178 struct constraint_expr temprhs = rhs;
3180 if (templhs.type == SCALAR )
3181 templhs.var = p->id;
3182 else
3183 templhs.offset += p->offset;
3184 process_constraint (new_constraint (templhs, temprhs));
3187 else
3189 tree rhstype = TREE_TYPE (rhsop);
3190 tree lhstype = TREE_TYPE (lhsop);
3191 tree rhstypesize;
3192 tree lhstypesize;
3194 lhstypesize = DECL_P (lhsop) ? DECL_SIZE (lhsop) : TYPE_SIZE (lhstype);
3195 rhstypesize = DECL_P (rhsop) ? DECL_SIZE (rhsop) : TYPE_SIZE (rhstype);
3197 /* If we have a variably sized types on the rhs or lhs, and a deref
3198 constraint, add the constraint, lhsconstraint = &ANYTHING.
3199 This is conservatively correct because either the lhs is an unknown
3200 sized var (if the constraint is SCALAR), or the lhs is a DEREF
3201 constraint, and every variable it can point to must be unknown sized
3202 anyway, so we don't need to worry about fields at all. */
3203 if ((rhs.type == DEREF && TREE_CODE (rhstypesize) != INTEGER_CST)
3204 || (lhs.type == DEREF && TREE_CODE (lhstypesize) != INTEGER_CST))
3206 rhs.var = anything_id;
3207 rhs.type = ADDRESSOF;
3208 rhs.offset = 0;
3209 process_constraint (new_constraint (lhs, rhs));
3210 return;
3213 /* The size only really matters insofar as we don't set more or less of
3214 the variable. If we hit an unknown size var, the size should be the
3215 whole darn thing. */
3216 if (get_varinfo (rhs.var)->is_unknown_size_var)
3217 rhssize = ~0;
3218 else
3219 rhssize = TREE_INT_CST_LOW (rhstypesize);
3221 if (get_varinfo (lhs.var)->is_unknown_size_var)
3222 lhssize = ~0;
3223 else
3224 lhssize = TREE_INT_CST_LOW (lhstypesize);
3227 if (rhs.type == SCALAR && lhs.type == SCALAR)
3229 if (!do_simple_structure_copy (lhs, rhs, MIN (lhssize, rhssize)))
3231 lhs.var = collapse_rest_of_var (lhs.var);
3232 rhs.var = collapse_rest_of_var (rhs.var);
3233 lhs.offset = 0;
3234 rhs.offset = 0;
3235 lhs.type = SCALAR;
3236 rhs.type = SCALAR;
3237 process_constraint (new_constraint (lhs, rhs));
3240 else if (lhs.type != DEREF && rhs.type == DEREF)
3241 do_rhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
3242 else if (lhs.type == DEREF && rhs.type != DEREF)
3243 do_lhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
3244 else
3246 tree pointedtotype = lhstype;
3247 tree tmpvar;
3249 gcc_assert (rhs.type == DEREF && lhs.type == DEREF);
3250 tmpvar = create_tmp_var_raw (pointedtotype, "structcopydereftmp");
3251 do_structure_copy (tmpvar, rhsop);
3252 do_structure_copy (lhsop, tmpvar);
3258 /* Update related alias information kept in AI. This is used when
3259 building name tags, alias sets and deciding grouping heuristics.
3260 STMT is the statement to process. This function also updates
3261 ADDRESSABLE_VARS. */
3263 static void
3264 update_alias_info (tree stmt, struct alias_info *ai)
3266 bitmap addr_taken;
3267 use_operand_p use_p;
3268 ssa_op_iter iter;
3269 bool stmt_dereferences_ptr_p;
3270 enum escape_type stmt_escape_type = is_escape_site (stmt);
3271 struct mem_ref_stats_d *mem_ref_stats = gimple_mem_ref_stats (cfun);
3273 stmt_dereferences_ptr_p = false;
3275 if (stmt_escape_type == ESCAPE_TO_CALL
3276 || stmt_escape_type == ESCAPE_TO_PURE_CONST)
3278 mem_ref_stats->num_call_sites++;
3279 if (stmt_escape_type == ESCAPE_TO_PURE_CONST)
3280 mem_ref_stats->num_pure_const_call_sites++;
3282 else if (stmt_escape_type == ESCAPE_TO_ASM)
3283 mem_ref_stats->num_asm_sites++;
3285 /* Mark all the variables whose address are taken by the statement. */
3286 addr_taken = addresses_taken (stmt);
3287 if (addr_taken)
3289 bitmap_ior_into (gimple_addressable_vars (cfun), addr_taken);
3291 /* If STMT is an escape point, all the addresses taken by it are
3292 call-clobbered. */
3293 if (stmt_escape_type != NO_ESCAPE)
3295 bitmap_iterator bi;
3296 unsigned i;
3298 EXECUTE_IF_SET_IN_BITMAP (addr_taken, 0, i, bi)
3300 tree rvar = referenced_var (i);
3301 if (!unmodifiable_var_p (rvar))
3302 mark_call_clobbered (rvar, stmt_escape_type);
3307 /* Process each operand use. For pointers, determine whether they
3308 are dereferenced by the statement, or whether their value
3309 escapes, etc. */
3310 FOR_EACH_PHI_OR_STMT_USE (use_p, stmt, iter, SSA_OP_USE)
3312 tree op, var;
3313 var_ann_t v_ann;
3314 struct ptr_info_def *pi;
3315 unsigned num_uses, num_loads, num_stores;
3317 op = USE_FROM_PTR (use_p);
3319 /* If STMT is a PHI node, OP may be an ADDR_EXPR. If so, add it
3320 to the set of addressable variables. */
3321 if (TREE_CODE (op) == ADDR_EXPR)
3323 bitmap addressable_vars = gimple_addressable_vars (cfun);
3325 gcc_assert (TREE_CODE (stmt) == PHI_NODE);
3326 gcc_assert (addressable_vars);
3328 /* PHI nodes don't have annotations for pinning the set
3329 of addresses taken, so we collect them here.
3331 FIXME, should we allow PHI nodes to have annotations
3332 so that they can be treated like regular statements?
3333 Currently, they are treated as second-class
3334 statements. */
3335 add_to_addressable_set (TREE_OPERAND (op, 0), &addressable_vars);
3336 continue;
3339 /* Ignore constants (they may occur in PHI node arguments). */
3340 if (TREE_CODE (op) != SSA_NAME)
3341 continue;
3343 var = SSA_NAME_VAR (op);
3344 v_ann = var_ann (var);
3346 /* The base variable of an SSA name must be a GIMPLE register, and thus
3347 it cannot be aliased. */
3348 gcc_assert (!may_be_aliased (var));
3350 /* We are only interested in pointers. */
3351 if (!POINTER_TYPE_P (TREE_TYPE (op)))
3352 continue;
3354 pi = get_ptr_info (op);
3356 /* Add OP to AI->PROCESSED_PTRS, if it's not there already. */
3357 if (!TEST_BIT (ai->ssa_names_visited, SSA_NAME_VERSION (op)))
3359 SET_BIT (ai->ssa_names_visited, SSA_NAME_VERSION (op));
3360 VEC_safe_push (tree, heap, ai->processed_ptrs, op);
3363 /* If STMT is a PHI node, then it will not have pointer
3364 dereferences and it will not be an escape point. */
3365 if (TREE_CODE (stmt) == PHI_NODE)
3366 continue;
3368 /* Determine whether OP is a dereferenced pointer, and if STMT
3369 is an escape point, whether OP escapes. */
3370 count_uses_and_derefs (op, stmt, &num_uses, &num_loads, &num_stores);
3372 /* Handle a corner case involving address expressions of the
3373 form '&PTR->FLD'. The problem with these expressions is that
3374 they do not represent a dereference of PTR. However, if some
3375 other transformation propagates them into an INDIRECT_REF
3376 expression, we end up with '*(&PTR->FLD)' which is folded
3377 into 'PTR->FLD'.
3379 So, if the original code had no other dereferences of PTR,
3380 the aliaser will not create memory tags for it, and when
3381 &PTR->FLD gets propagated to INDIRECT_REF expressions, the
3382 memory operations will receive no VDEF/VUSE operands.
3384 One solution would be to have count_uses_and_derefs consider
3385 &PTR->FLD a dereference of PTR. But that is wrong, since it
3386 is not really a dereference but an offset calculation.
3388 What we do here is to recognize these special ADDR_EXPR
3389 nodes. Since these expressions are never GIMPLE values (they
3390 are not GIMPLE invariants), they can only appear on the RHS
3391 of an assignment and their base address is always an
3392 INDIRECT_REF expression. */
3393 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
3394 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == ADDR_EXPR
3395 && !is_gimple_val (GIMPLE_STMT_OPERAND (stmt, 1)))
3397 /* If the RHS if of the form &PTR->FLD and PTR == OP, then
3398 this represents a potential dereference of PTR. */
3399 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
3400 tree base = get_base_address (TREE_OPERAND (rhs, 0));
3401 if (TREE_CODE (base) == INDIRECT_REF
3402 && TREE_OPERAND (base, 0) == op)
3403 num_loads++;
3406 if (num_loads + num_stores > 0)
3408 /* Mark OP as dereferenced. In a subsequent pass,
3409 dereferenced pointers that point to a set of
3410 variables will be assigned a name tag to alias
3411 all the variables OP points to. */
3412 pi->is_dereferenced = 1;
3414 /* If this is a store operation, mark OP as being
3415 dereferenced to store, otherwise mark it as being
3416 dereferenced to load. */
3417 if (num_stores > 0)
3418 pointer_set_insert (ai->dereferenced_ptrs_store, var);
3419 else
3420 pointer_set_insert (ai->dereferenced_ptrs_load, var);
3422 /* Update the frequency estimate for all the dereferences of
3423 pointer OP. */
3424 update_mem_sym_stats_from_stmt (op, stmt, num_loads, num_stores);
3426 /* Indicate that STMT contains pointer dereferences. */
3427 stmt_dereferences_ptr_p = true;
3430 if (stmt_escape_type != NO_ESCAPE && num_loads + num_stores < num_uses)
3432 /* If STMT is an escape point and STMT contains at
3433 least one direct use of OP, then the value of OP
3434 escapes and so the pointed-to variables need to
3435 be marked call-clobbered. */
3436 pi->value_escapes_p = 1;
3437 pi->escape_mask |= stmt_escape_type;
3439 /* If the statement makes a function call, assume
3440 that pointer OP will be dereferenced in a store
3441 operation inside the called function. */
3442 if (get_call_expr_in (stmt)
3443 || stmt_escape_type == ESCAPE_STORED_IN_GLOBAL)
3445 pointer_set_insert (ai->dereferenced_ptrs_store, var);
3446 pi->is_dereferenced = 1;
3451 if (TREE_CODE (stmt) == PHI_NODE)
3452 return;
3454 /* Mark stored variables in STMT as being written to and update the
3455 memory reference stats for all memory symbols referenced by STMT. */
3456 if (stmt_references_memory_p (stmt))
3458 unsigned i;
3459 bitmap_iterator bi;
3461 mem_ref_stats->num_mem_stmts++;
3463 /* Notice that we only update memory reference stats for symbols
3464 loaded and stored by the statement if the statement does not
3465 contain pointer dereferences and it is not a call/asm site.
3466 This is to avoid double accounting problems when creating
3467 memory partitions. After computing points-to information,
3468 pointer dereference statistics are used to update the
3469 reference stats of the pointed-to variables, so here we
3470 should only update direct references to symbols.
3472 Indirect references are not updated here for two reasons: (1)
3473 The first time we compute alias information, the sets
3474 LOADED/STORED are empty for pointer dereferences, (2) After
3475 partitioning, LOADED/STORED may have references to
3476 partitions, not the original pointed-to variables. So, if we
3477 always counted LOADED/STORED here and during partitioning, we
3478 would count many symbols more than once.
3480 This does cause some imprecision when a statement has a
3481 combination of direct symbol references and pointer
3482 dereferences (e.g., MEMORY_VAR = *PTR) or if a call site has
3483 memory symbols in its argument list, but these cases do not
3484 occur so frequently as to constitute a serious problem. */
3485 if (STORED_SYMS (stmt))
3486 EXECUTE_IF_SET_IN_BITMAP (STORED_SYMS (stmt), 0, i, bi)
3488 tree sym = referenced_var (i);
3489 pointer_set_insert (ai->written_vars, sym);
3490 if (!stmt_dereferences_ptr_p
3491 && stmt_escape_type != ESCAPE_TO_CALL
3492 && stmt_escape_type != ESCAPE_TO_PURE_CONST
3493 && stmt_escape_type != ESCAPE_TO_ASM)
3494 update_mem_sym_stats_from_stmt (sym, stmt, 0, 1);
3497 if (!stmt_dereferences_ptr_p
3498 && LOADED_SYMS (stmt)
3499 && stmt_escape_type != ESCAPE_TO_CALL
3500 && stmt_escape_type != ESCAPE_TO_PURE_CONST
3501 && stmt_escape_type != ESCAPE_TO_ASM)
3502 EXECUTE_IF_SET_IN_BITMAP (LOADED_SYMS (stmt), 0, i, bi)
3503 update_mem_sym_stats_from_stmt (referenced_var (i), stmt, 1, 0);
3508 /* Handle pointer arithmetic EXPR when creating aliasing constraints.
3509 Expressions of the type PTR + CST can be handled in two ways:
3511 1- If the constraint for PTR is ADDRESSOF for a non-structure
3512 variable, then we can use it directly because adding or
3513 subtracting a constant may not alter the original ADDRESSOF
3514 constraint (i.e., pointer arithmetic may not legally go outside
3515 an object's boundaries).
3517 2- If the constraint for PTR is ADDRESSOF for a structure variable,
3518 then if CST is a compile-time constant that can be used as an
3519 offset, we can determine which sub-variable will be pointed-to
3520 by the expression.
3522 Return true if the expression is handled. For any other kind of
3523 expression, return false so that each operand can be added as a
3524 separate constraint by the caller. */
3526 static bool
3527 handle_ptr_arith (VEC (ce_s, heap) *lhsc, tree expr)
3529 tree op0, op1;
3530 struct constraint_expr *c, *c2;
3531 unsigned int i = 0;
3532 unsigned int j = 0;
3533 VEC (ce_s, heap) *temp = NULL;
3534 unsigned HOST_WIDE_INT rhsunitoffset, rhsoffset;
3536 if (TREE_CODE (expr) != POINTER_PLUS_EXPR)
3537 return false;
3539 op0 = TREE_OPERAND (expr, 0);
3540 op1 = TREE_OPERAND (expr, 1);
3541 gcc_assert (POINTER_TYPE_P (TREE_TYPE (op0)));
3543 /* If the offset is not a non-negative integer constant that fits
3544 in a HOST_WIDE_INT, we cannot handle it here. */
3545 if (!host_integerp (op1, 1))
3546 return false;
3548 /* Make sure the bit-offset also fits. */
3549 rhsunitoffset = TREE_INT_CST_LOW (op1);
3550 rhsoffset = rhsunitoffset * BITS_PER_UNIT;
3551 if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
3552 return false;
3554 get_constraint_for (op0, &temp);
3556 for (i = 0; VEC_iterate (ce_s, lhsc, i, c); i++)
3557 for (j = 0; VEC_iterate (ce_s, temp, j, c2); j++)
3559 if (c2->type == ADDRESSOF && rhsoffset != 0)
3561 varinfo_t temp = get_varinfo (c2->var);
3563 /* An access one after the end of an array is valid,
3564 so simply punt on accesses we cannot resolve. */
3565 temp = first_vi_for_offset (temp, rhsoffset);
3566 if (temp == NULL)
3567 continue;
3568 c2->var = temp->id;
3569 c2->offset = 0;
3571 else
3572 c2->offset = rhsoffset;
3573 process_constraint (new_constraint (*c, *c2));
3576 VEC_free (ce_s, heap, temp);
3578 return true;
3581 /* For non-IPA mode, generate constraints necessary for a call on the
3582 RHS. */
3584 static void
3585 handle_rhs_call (tree rhs)
3587 tree arg;
3588 call_expr_arg_iterator iter;
3589 struct constraint_expr rhsc;
3591 rhsc.var = anything_id;
3592 rhsc.offset = 0;
3593 rhsc.type = ADDRESSOF;
3595 FOR_EACH_CALL_EXPR_ARG (arg, iter, rhs)
3597 VEC(ce_s, heap) *lhsc = NULL;
3599 /* Find those pointers being passed, and make sure they end up
3600 pointing to anything. */
3601 if (POINTER_TYPE_P (TREE_TYPE (arg)))
3603 unsigned int j;
3604 struct constraint_expr *lhsp;
3606 get_constraint_for (arg, &lhsc);
3607 do_deref (&lhsc);
3608 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3609 process_constraint_1 (new_constraint (*lhsp, rhsc), true);
3610 VEC_free (ce_s, heap, lhsc);
3615 /* For non-IPA mode, generate constraints necessary for a call
3616 that returns a pointer and assigns it to LHS. This simply makes
3617 the LHS point to anything. */
3619 static void
3620 handle_lhs_call (tree lhs)
3622 VEC(ce_s, heap) *lhsc = NULL;
3623 struct constraint_expr rhsc;
3624 unsigned int j;
3625 struct constraint_expr *lhsp;
3627 rhsc.var = anything_id;
3628 rhsc.offset = 0;
3629 rhsc.type = ADDRESSOF;
3630 get_constraint_for (lhs, &lhsc);
3631 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3632 process_constraint_1 (new_constraint (*lhsp, rhsc), true);
3633 VEC_free (ce_s, heap, lhsc);
3636 /* Walk statement T setting up aliasing constraints according to the
3637 references found in T. This function is the main part of the
3638 constraint builder. AI points to auxiliary alias information used
3639 when building alias sets and computing alias grouping heuristics. */
3641 static void
3642 find_func_aliases (tree origt)
3644 tree t = origt;
3645 VEC(ce_s, heap) *lhsc = NULL;
3646 VEC(ce_s, heap) *rhsc = NULL;
3647 struct constraint_expr *c;
3649 if (TREE_CODE (t) == RETURN_EXPR && TREE_OPERAND (t, 0))
3650 t = TREE_OPERAND (t, 0);
3652 /* Now build constraints expressions. */
3653 if (TREE_CODE (t) == PHI_NODE)
3655 gcc_assert (!AGGREGATE_TYPE_P (TREE_TYPE (PHI_RESULT (t))));
3657 /* Only care about pointers and structures containing
3658 pointers. */
3659 if (could_have_pointers (PHI_RESULT (t)))
3661 int i;
3662 unsigned int j;
3664 /* For a phi node, assign all the arguments to
3665 the result. */
3666 get_constraint_for (PHI_RESULT (t), &lhsc);
3667 for (i = 0; i < PHI_NUM_ARGS (t); i++)
3669 tree rhstype;
3670 tree strippedrhs = PHI_ARG_DEF (t, i);
3672 STRIP_NOPS (strippedrhs);
3673 rhstype = TREE_TYPE (strippedrhs);
3674 get_constraint_for (PHI_ARG_DEF (t, i), &rhsc);
3676 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3678 struct constraint_expr *c2;
3679 while (VEC_length (ce_s, rhsc) > 0)
3681 c2 = VEC_last (ce_s, rhsc);
3682 process_constraint (new_constraint (*c, *c2));
3683 VEC_pop (ce_s, rhsc);
3689 /* In IPA mode, we need to generate constraints to pass call
3690 arguments through their calls. There are two cases, either a
3691 GIMPLE_MODIFY_STMT when we are returning a value, or just a plain
3692 CALL_EXPR when we are not.
3694 In non-ipa mode, we need to generate constraints for each
3695 pointer passed by address. */
3696 else if (((TREE_CODE (t) == GIMPLE_MODIFY_STMT
3697 && TREE_CODE (GIMPLE_STMT_OPERAND (t, 1)) == CALL_EXPR
3698 && !(call_expr_flags (GIMPLE_STMT_OPERAND (t, 1))
3699 & (ECF_MALLOC | ECF_MAY_BE_ALLOCA)))
3700 || (TREE_CODE (t) == CALL_EXPR
3701 && !(call_expr_flags (t)
3702 & (ECF_MALLOC | ECF_MAY_BE_ALLOCA)))))
3704 if (!in_ipa_mode)
3706 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
3708 handle_rhs_call (GIMPLE_STMT_OPERAND (t, 1));
3709 if (could_have_pointers (GIMPLE_STMT_OPERAND (t, 1)))
3710 handle_lhs_call (GIMPLE_STMT_OPERAND (t, 0));
3712 else
3713 handle_rhs_call (t);
3715 else
3717 tree lhsop;
3718 tree rhsop;
3719 tree arg;
3720 call_expr_arg_iterator iter;
3721 varinfo_t fi;
3722 int i = 1;
3723 tree decl;
3724 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
3726 lhsop = GIMPLE_STMT_OPERAND (t, 0);
3727 rhsop = GIMPLE_STMT_OPERAND (t, 1);
3729 else
3731 lhsop = NULL;
3732 rhsop = t;
3734 decl = get_callee_fndecl (rhsop);
3736 /* If we can directly resolve the function being called, do so.
3737 Otherwise, it must be some sort of indirect expression that
3738 we should still be able to handle. */
3739 if (decl)
3741 fi = get_vi_for_tree (decl);
3743 else
3745 decl = CALL_EXPR_FN (rhsop);
3746 fi = get_vi_for_tree (decl);
3749 /* Assign all the passed arguments to the appropriate incoming
3750 parameters of the function. */
3752 FOR_EACH_CALL_EXPR_ARG (arg, iter, rhsop)
3754 struct constraint_expr lhs ;
3755 struct constraint_expr *rhsp;
3757 get_constraint_for (arg, &rhsc);
3758 if (TREE_CODE (decl) != FUNCTION_DECL)
3760 lhs.type = DEREF;
3761 lhs.var = fi->id;
3762 lhs.offset = i;
3764 else
3766 lhs.type = SCALAR;
3767 lhs.var = first_vi_for_offset (fi, i)->id;
3768 lhs.offset = 0;
3770 while (VEC_length (ce_s, rhsc) != 0)
3772 rhsp = VEC_last (ce_s, rhsc);
3773 process_constraint (new_constraint (lhs, *rhsp));
3774 VEC_pop (ce_s, rhsc);
3776 i++;
3779 /* If we are returning a value, assign it to the result. */
3780 if (lhsop)
3782 struct constraint_expr rhs;
3783 struct constraint_expr *lhsp;
3784 unsigned int j = 0;
3786 get_constraint_for (lhsop, &lhsc);
3787 if (TREE_CODE (decl) != FUNCTION_DECL)
3789 rhs.type = DEREF;
3790 rhs.var = fi->id;
3791 rhs.offset = i;
3793 else
3795 rhs.type = SCALAR;
3796 rhs.var = first_vi_for_offset (fi, i)->id;
3797 rhs.offset = 0;
3799 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3800 process_constraint (new_constraint (*lhsp, rhs));
3804 /* Otherwise, just a regular assignment statement. */
3805 else if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
3807 tree lhsop = GIMPLE_STMT_OPERAND (t, 0);
3808 tree rhsop = GIMPLE_STMT_OPERAND (t, 1);
3809 int i;
3811 if ((AGGREGATE_TYPE_P (TREE_TYPE (lhsop))
3812 || TREE_CODE (TREE_TYPE (lhsop)) == COMPLEX_TYPE)
3813 && (AGGREGATE_TYPE_P (TREE_TYPE (rhsop))
3814 || TREE_CODE (TREE_TYPE (lhsop)) == COMPLEX_TYPE))
3816 do_structure_copy (lhsop, rhsop);
3818 else
3820 /* Only care about operations with pointers, structures
3821 containing pointers, dereferences, and call expressions. */
3822 if (could_have_pointers (lhsop)
3823 || TREE_CODE (rhsop) == CALL_EXPR)
3825 get_constraint_for (lhsop, &lhsc);
3826 switch (TREE_CODE_CLASS (TREE_CODE (rhsop)))
3828 /* RHS that consist of unary operations,
3829 exceptional types, or bare decls/constants, get
3830 handled directly by get_constraint_for. */
3831 case tcc_reference:
3832 case tcc_declaration:
3833 case tcc_constant:
3834 case tcc_exceptional:
3835 case tcc_expression:
3836 case tcc_vl_exp:
3837 case tcc_unary:
3839 unsigned int j;
3841 get_constraint_for (rhsop, &rhsc);
3842 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3844 struct constraint_expr *c2;
3845 unsigned int k;
3847 for (k = 0; VEC_iterate (ce_s, rhsc, k, c2); k++)
3848 process_constraint (new_constraint (*c, *c2));
3852 break;
3854 case tcc_binary:
3856 /* For pointer arithmetic of the form
3857 PTR + CST, we can simply use PTR's
3858 constraint because pointer arithmetic is
3859 not allowed to go out of bounds. */
3860 if (handle_ptr_arith (lhsc, rhsop))
3861 break;
3863 /* FALLTHRU */
3865 /* Otherwise, walk each operand. Notice that we
3866 can't use the operand interface because we need
3867 to process expressions other than simple operands
3868 (e.g. INDIRECT_REF, ADDR_EXPR, CALL_EXPR). */
3869 default:
3870 for (i = 0; i < TREE_OPERAND_LENGTH (rhsop); i++)
3872 tree op = TREE_OPERAND (rhsop, i);
3873 unsigned int j;
3875 gcc_assert (VEC_length (ce_s, rhsc) == 0);
3876 get_constraint_for (op, &rhsc);
3877 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3879 struct constraint_expr *c2;
3880 while (VEC_length (ce_s, rhsc) > 0)
3882 c2 = VEC_last (ce_s, rhsc);
3883 process_constraint (new_constraint (*c, *c2));
3884 VEC_pop (ce_s, rhsc);
3892 else if (TREE_CODE (t) == CHANGE_DYNAMIC_TYPE_EXPR)
3894 unsigned int j;
3896 get_constraint_for (CHANGE_DYNAMIC_TYPE_LOCATION (t), &lhsc);
3897 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); ++j)
3898 get_varinfo (c->var)->no_tbaa_pruning = true;
3901 /* After promoting variables and computing aliasing we will
3902 need to re-scan most statements. FIXME: Try to minimize the
3903 number of statements re-scanned. It's not really necessary to
3904 re-scan *all* statements. */
3905 mark_stmt_modified (origt);
3906 VEC_free (ce_s, heap, rhsc);
3907 VEC_free (ce_s, heap, lhsc);
3911 /* Find the first varinfo in the same variable as START that overlaps with
3912 OFFSET.
3913 Effectively, walk the chain of fields for the variable START to find the
3914 first field that overlaps with OFFSET.
3915 Return NULL if we can't find one. */
3917 static varinfo_t
3918 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
3920 varinfo_t curr = start;
3921 while (curr)
3923 /* We may not find a variable in the field list with the actual
3924 offset when when we have glommed a structure to a variable.
3925 In that case, however, offset should still be within the size
3926 of the variable. */
3927 if (offset >= curr->offset && offset < (curr->offset + curr->size))
3928 return curr;
3929 curr = curr->next;
3931 return NULL;
3935 /* Insert the varinfo FIELD into the field list for BASE, at the front
3936 of the list. */
3938 static void
3939 insert_into_field_list (varinfo_t base, varinfo_t field)
3941 varinfo_t prev = base;
3942 varinfo_t curr = base->next;
3944 field->next = curr;
3945 prev->next = field;
3948 /* Insert the varinfo FIELD into the field list for BASE, ordered by
3949 offset. */
3951 static void
3952 insert_into_field_list_sorted (varinfo_t base, varinfo_t field)
3954 varinfo_t prev = base;
3955 varinfo_t curr = base->next;
3957 if (curr == NULL)
3959 prev->next = field;
3960 field->next = NULL;
3962 else
3964 while (curr)
3966 if (field->offset <= curr->offset)
3967 break;
3968 prev = curr;
3969 curr = curr->next;
3971 field->next = prev->next;
3972 prev->next = field;
3976 /* This structure is used during pushing fields onto the fieldstack
3977 to track the offset of the field, since bitpos_of_field gives it
3978 relative to its immediate containing type, and we want it relative
3979 to the ultimate containing object. */
3981 struct fieldoff
3983 /* Type of the field. */
3984 tree type;
3986 /* Size, in bits, of the field. */
3987 tree size;
3989 /* Field. */
3990 tree decl;
3992 /* Offset from the base of the base containing object to this field. */
3993 HOST_WIDE_INT offset;
3995 typedef struct fieldoff fieldoff_s;
3997 DEF_VEC_O(fieldoff_s);
3998 DEF_VEC_ALLOC_O(fieldoff_s,heap);
4000 /* qsort comparison function for two fieldoff's PA and PB */
4002 static int
4003 fieldoff_compare (const void *pa, const void *pb)
4005 const fieldoff_s *foa = (const fieldoff_s *)pa;
4006 const fieldoff_s *fob = (const fieldoff_s *)pb;
4007 HOST_WIDE_INT foasize, fobsize;
4009 if (foa->offset != fob->offset)
4010 return foa->offset - fob->offset;
4012 foasize = TREE_INT_CST_LOW (foa->size);
4013 fobsize = TREE_INT_CST_LOW (fob->size);
4014 return foasize - fobsize;
4017 /* Sort a fieldstack according to the field offset and sizes. */
4018 static void
4019 sort_fieldstack (VEC(fieldoff_s,heap) *fieldstack)
4021 qsort (VEC_address (fieldoff_s, fieldstack),
4022 VEC_length (fieldoff_s, fieldstack),
4023 sizeof (fieldoff_s),
4024 fieldoff_compare);
4027 /* Return true if V is a tree that we can have subvars for.
4028 Normally, this is any aggregate type. Also complex
4029 types which are not gimple registers can have subvars. */
4031 static inline bool
4032 var_can_have_subvars (const_tree v)
4034 /* Volatile variables should never have subvars. */
4035 if (TREE_THIS_VOLATILE (v))
4036 return false;
4038 /* Non decls or memory tags can never have subvars. */
4039 if (!DECL_P (v) || MTAG_P (v))
4040 return false;
4042 /* Aggregates without overlapping fields can have subvars. */
4043 if (TREE_CODE (TREE_TYPE (v)) == RECORD_TYPE)
4044 return true;
4046 return false;
4049 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
4050 the fields of TYPE onto fieldstack, recording their offsets along
4051 the way.
4053 OFFSET is used to keep track of the offset in this entire
4054 structure, rather than just the immediately containing structure.
4055 Returns the number of fields pushed.
4057 HAS_UNION is set to true if we find a union type as a field of
4058 TYPE. */
4060 static int
4061 push_fields_onto_fieldstack (tree type, VEC(fieldoff_s,heap) **fieldstack,
4062 HOST_WIDE_INT offset, bool *has_union)
4064 tree field;
4065 int count = 0;
4067 if (TREE_CODE (type) != RECORD_TYPE)
4068 return 0;
4070 /* If the vector of fields is growing too big, bail out early.
4071 Callers check for VEC_length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
4072 sure this fails. */
4073 if (VEC_length (fieldoff_s, *fieldstack) > MAX_FIELDS_FOR_FIELD_SENSITIVE)
4074 return 0;
4076 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
4077 if (TREE_CODE (field) == FIELD_DECL)
4079 bool push = false;
4080 int pushed = 0;
4082 if (has_union
4083 && (TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE
4084 || TREE_CODE (TREE_TYPE (field)) == UNION_TYPE))
4085 *has_union = true;
4087 if (!var_can_have_subvars (field))
4088 push = true;
4089 else if (!(pushed = push_fields_onto_fieldstack
4090 (TREE_TYPE (field),
4091 fieldstack,
4092 offset + bitpos_of_field (field),
4093 has_union))
4094 && (DECL_SIZE (field)
4095 && !integer_zerop (DECL_SIZE (field))))
4096 /* Empty structures may have actual size, like in C++. So
4097 see if we didn't push any subfields and the size is
4098 nonzero, push the field onto the stack. */
4099 push = true;
4101 if (push)
4103 fieldoff_s *pair;
4105 pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
4106 pair->type = TREE_TYPE (field);
4107 pair->size = DECL_SIZE (field);
4108 pair->decl = field;
4109 pair->offset = offset + bitpos_of_field (field);
4110 count++;
4112 else
4113 count += pushed;
4116 return count;
4119 /* Create a constraint from ANYTHING variable to VI. */
4120 static void
4121 make_constraint_from_anything (varinfo_t vi)
4123 struct constraint_expr lhs, rhs;
4125 lhs.var = vi->id;
4126 lhs.offset = 0;
4127 lhs.type = SCALAR;
4129 rhs.var = anything_id;
4130 rhs.offset = 0;
4131 rhs.type = ADDRESSOF;
4132 process_constraint (new_constraint (lhs, rhs));
4135 /* Count the number of arguments DECL has, and set IS_VARARGS to true
4136 if it is a varargs function. */
4138 static unsigned int
4139 count_num_arguments (tree decl, bool *is_varargs)
4141 unsigned int i = 0;
4142 tree t;
4144 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl));
4146 t = TREE_CHAIN (t))
4148 if (TREE_VALUE (t) == void_type_node)
4149 break;
4150 i++;
4153 if (!t)
4154 *is_varargs = true;
4155 return i;
4158 /* Creation function node for DECL, using NAME, and return the index
4159 of the variable we've created for the function. */
4161 static unsigned int
4162 create_function_info_for (tree decl, const char *name)
4164 unsigned int index = VEC_length (varinfo_t, varmap);
4165 varinfo_t vi;
4166 tree arg;
4167 unsigned int i;
4168 bool is_varargs = false;
4170 /* Create the variable info. */
4172 vi = new_var_info (decl, index, name);
4173 vi->decl = decl;
4174 vi->offset = 0;
4175 vi->has_union = 0;
4176 vi->size = 1;
4177 vi->fullsize = count_num_arguments (decl, &is_varargs) + 1;
4178 insert_vi_for_tree (vi->decl, vi);
4179 VEC_safe_push (varinfo_t, heap, varmap, vi);
4181 stats.total_vars++;
4183 /* If it's varargs, we don't know how many arguments it has, so we
4184 can't do much.
4186 if (is_varargs)
4188 vi->fullsize = ~0;
4189 vi->size = ~0;
4190 vi->is_unknown_size_var = true;
4191 return index;
4195 arg = DECL_ARGUMENTS (decl);
4197 /* Set up variables for each argument. */
4198 for (i = 1; i < vi->fullsize; i++)
4200 varinfo_t argvi;
4201 const char *newname;
4202 char *tempname;
4203 unsigned int newindex;
4204 tree argdecl = decl;
4206 if (arg)
4207 argdecl = arg;
4209 newindex = VEC_length (varinfo_t, varmap);
4210 asprintf (&tempname, "%s.arg%d", name, i-1);
4211 newname = ggc_strdup (tempname);
4212 free (tempname);
4214 argvi = new_var_info (argdecl, newindex, newname);
4215 argvi->decl = argdecl;
4216 VEC_safe_push (varinfo_t, heap, varmap, argvi);
4217 argvi->offset = i;
4218 argvi->size = 1;
4219 argvi->fullsize = vi->fullsize;
4220 argvi->has_union = false;
4221 insert_into_field_list_sorted (vi, argvi);
4222 stats.total_vars ++;
4223 if (arg)
4225 insert_vi_for_tree (arg, argvi);
4226 arg = TREE_CHAIN (arg);
4230 /* Create a variable for the return var. */
4231 if (DECL_RESULT (decl) != NULL
4232 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
4234 varinfo_t resultvi;
4235 const char *newname;
4236 char *tempname;
4237 unsigned int newindex;
4238 tree resultdecl = decl;
4240 vi->fullsize ++;
4242 if (DECL_RESULT (decl))
4243 resultdecl = DECL_RESULT (decl);
4245 newindex = VEC_length (varinfo_t, varmap);
4246 asprintf (&tempname, "%s.result", name);
4247 newname = ggc_strdup (tempname);
4248 free (tempname);
4250 resultvi = new_var_info (resultdecl, newindex, newname);
4251 resultvi->decl = resultdecl;
4252 VEC_safe_push (varinfo_t, heap, varmap, resultvi);
4253 resultvi->offset = i;
4254 resultvi->size = 1;
4255 resultvi->fullsize = vi->fullsize;
4256 resultvi->has_union = false;
4257 insert_into_field_list_sorted (vi, resultvi);
4258 stats.total_vars ++;
4259 if (DECL_RESULT (decl))
4260 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
4262 return index;
4266 /* Return true if FIELDSTACK contains fields that overlap.
4267 FIELDSTACK is assumed to be sorted by offset. */
4269 static bool
4270 check_for_overlaps (VEC (fieldoff_s,heap) *fieldstack)
4272 fieldoff_s *fo = NULL;
4273 unsigned int i;
4274 HOST_WIDE_INT lastoffset = -1;
4276 for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
4278 if (fo->offset == lastoffset)
4279 return true;
4280 lastoffset = fo->offset;
4282 return false;
4285 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
4286 This will also create any varinfo structures necessary for fields
4287 of DECL. */
4289 static unsigned int
4290 create_variable_info_for (tree decl, const char *name)
4292 unsigned int index = VEC_length (varinfo_t, varmap);
4293 varinfo_t vi;
4294 tree decltype = TREE_TYPE (decl);
4295 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decltype);
4296 bool notokay = false;
4297 bool hasunion;
4298 bool is_global = DECL_P (decl) ? is_global_var (decl) : false;
4299 VEC (fieldoff_s,heap) *fieldstack = NULL;
4301 if (TREE_CODE (decl) == FUNCTION_DECL && in_ipa_mode)
4302 return create_function_info_for (decl, name);
4304 hasunion = TREE_CODE (decltype) == UNION_TYPE
4305 || TREE_CODE (decltype) == QUAL_UNION_TYPE;
4306 if (var_can_have_subvars (decl) && use_field_sensitive && !hasunion)
4308 push_fields_onto_fieldstack (decltype, &fieldstack, 0, &hasunion);
4309 if (hasunion)
4311 VEC_free (fieldoff_s, heap, fieldstack);
4312 notokay = true;
4316 /* If the variable doesn't have subvars, we may end up needing to
4317 sort the field list and create fake variables for all the
4318 fields. */
4319 vi = new_var_info (decl, index, name);
4320 vi->decl = decl;
4321 vi->offset = 0;
4322 vi->has_union = hasunion;
4323 if (!declsize
4324 || TREE_CODE (declsize) != INTEGER_CST
4325 || TREE_CODE (decltype) == UNION_TYPE
4326 || TREE_CODE (decltype) == QUAL_UNION_TYPE)
4328 vi->is_unknown_size_var = true;
4329 vi->fullsize = ~0;
4330 vi->size = ~0;
4332 else
4334 vi->fullsize = TREE_INT_CST_LOW (declsize);
4335 vi->size = vi->fullsize;
4338 insert_vi_for_tree (vi->decl, vi);
4339 VEC_safe_push (varinfo_t, heap, varmap, vi);
4340 if (is_global && (!flag_whole_program || !in_ipa_mode))
4341 make_constraint_from_anything (vi);
4343 stats.total_vars++;
4344 if (use_field_sensitive
4345 && !notokay
4346 && !vi->is_unknown_size_var
4347 && var_can_have_subvars (decl)
4348 && VEC_length (fieldoff_s, fieldstack) > 1
4349 && VEC_length (fieldoff_s, fieldstack) <= MAX_FIELDS_FOR_FIELD_SENSITIVE)
4351 unsigned int newindex = VEC_length (varinfo_t, varmap);
4352 fieldoff_s *fo = NULL;
4353 unsigned int i;
4355 for (i = 0; !notokay && VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
4357 if (! fo->size
4358 || TREE_CODE (fo->size) != INTEGER_CST
4359 || fo->offset < 0)
4361 notokay = true;
4362 break;
4366 /* We can't sort them if we have a field with a variable sized type,
4367 which will make notokay = true. In that case, we are going to return
4368 without creating varinfos for the fields anyway, so sorting them is a
4369 waste to boot. */
4370 if (!notokay)
4372 sort_fieldstack (fieldstack);
4373 /* Due to some C++ FE issues, like PR 22488, we might end up
4374 what appear to be overlapping fields even though they,
4375 in reality, do not overlap. Until the C++ FE is fixed,
4376 we will simply disable field-sensitivity for these cases. */
4377 notokay = check_for_overlaps (fieldstack);
4381 if (VEC_length (fieldoff_s, fieldstack) != 0)
4382 fo = VEC_index (fieldoff_s, fieldstack, 0);
4384 if (fo == NULL || notokay)
4386 vi->is_unknown_size_var = 1;
4387 vi->fullsize = ~0;
4388 vi->size = ~0;
4389 VEC_free (fieldoff_s, heap, fieldstack);
4390 return index;
4393 vi->size = TREE_INT_CST_LOW (fo->size);
4394 vi->offset = fo->offset;
4395 for (i = VEC_length (fieldoff_s, fieldstack) - 1;
4396 i >= 1 && VEC_iterate (fieldoff_s, fieldstack, i, fo);
4397 i--)
4399 varinfo_t newvi;
4400 const char *newname = "NULL";
4401 char *tempname;
4403 newindex = VEC_length (varinfo_t, varmap);
4404 if (dump_file)
4406 if (fo->decl)
4407 asprintf (&tempname, "%s.%s",
4408 vi->name, alias_get_name (fo->decl));
4409 else
4410 asprintf (&tempname, "%s." HOST_WIDE_INT_PRINT_DEC,
4411 vi->name, fo->offset);
4412 newname = ggc_strdup (tempname);
4413 free (tempname);
4415 newvi = new_var_info (decl, newindex, newname);
4416 newvi->offset = fo->offset;
4417 newvi->size = TREE_INT_CST_LOW (fo->size);
4418 newvi->fullsize = vi->fullsize;
4419 insert_into_field_list (vi, newvi);
4420 VEC_safe_push (varinfo_t, heap, varmap, newvi);
4421 if (is_global && (!flag_whole_program || !in_ipa_mode))
4422 make_constraint_from_anything (newvi);
4424 stats.total_vars++;
4428 VEC_free (fieldoff_s, heap, fieldstack);
4430 return index;
4433 /* Print out the points-to solution for VAR to FILE. */
4435 void
4436 dump_solution_for_var (FILE *file, unsigned int var)
4438 varinfo_t vi = get_varinfo (var);
4439 unsigned int i;
4440 bitmap_iterator bi;
4442 if (find (var) != var)
4444 varinfo_t vipt = get_varinfo (find (var));
4445 fprintf (file, "%s = same as %s\n", vi->name, vipt->name);
4447 else
4449 fprintf (file, "%s = { ", vi->name);
4450 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4452 fprintf (file, "%s ", get_varinfo (i)->name);
4454 fprintf (file, "}");
4455 if (vi->no_tbaa_pruning)
4456 fprintf (file, " no-tbaa-pruning");
4457 fprintf (file, "\n");
4461 /* Print the points-to solution for VAR to stdout. */
4463 void
4464 debug_solution_for_var (unsigned int var)
4466 dump_solution_for_var (stdout, var);
4469 /* Create varinfo structures for all of the variables in the
4470 function for intraprocedural mode. */
4472 static void
4473 intra_create_variable_infos (void)
4475 tree t;
4476 struct constraint_expr lhs, rhs;
4478 /* For each incoming pointer argument arg, create the constraint ARG
4479 = ANYTHING or a dummy variable if flag_argument_noalias is set. */
4480 for (t = DECL_ARGUMENTS (current_function_decl); t; t = TREE_CHAIN (t))
4482 varinfo_t p;
4484 if (!could_have_pointers (t))
4485 continue;
4487 /* If flag_argument_noalias is set, then function pointer
4488 arguments are guaranteed not to point to each other. In that
4489 case, create an artificial variable PARM_NOALIAS and the
4490 constraint ARG = &PARM_NOALIAS. */
4491 if (POINTER_TYPE_P (TREE_TYPE (t)) && flag_argument_noalias > 0)
4493 varinfo_t vi;
4494 tree heapvar = heapvar_lookup (t);
4496 lhs.offset = 0;
4497 lhs.type = SCALAR;
4498 lhs.var = get_vi_for_tree (t)->id;
4500 if (heapvar == NULL_TREE)
4502 var_ann_t ann;
4503 heapvar = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (t)),
4504 "PARM_NOALIAS");
4505 DECL_EXTERNAL (heapvar) = 1;
4506 if (gimple_referenced_vars (cfun))
4507 add_referenced_var (heapvar);
4509 heapvar_insert (t, heapvar);
4511 ann = get_var_ann (heapvar);
4512 if (flag_argument_noalias == 1)
4513 ann->noalias_state = NO_ALIAS;
4514 else if (flag_argument_noalias == 2)
4515 ann->noalias_state = NO_ALIAS_GLOBAL;
4516 else if (flag_argument_noalias == 3)
4517 ann->noalias_state = NO_ALIAS_ANYTHING;
4518 else
4519 gcc_unreachable ();
4522 vi = get_vi_for_tree (heapvar);
4523 vi->is_artificial_var = 1;
4524 vi->is_heap_var = 1;
4525 rhs.var = vi->id;
4526 rhs.type = ADDRESSOF;
4527 rhs.offset = 0;
4528 for (p = get_varinfo (lhs.var); p; p = p->next)
4530 struct constraint_expr temp = lhs;
4531 temp.var = p->id;
4532 process_constraint (new_constraint (temp, rhs));
4535 else
4537 varinfo_t arg_vi = get_vi_for_tree (t);
4539 for (p = arg_vi; p; p = p->next)
4540 make_constraint_from_anything (p);
4545 /* Structure used to put solution bitmaps in a hashtable so they can
4546 be shared among variables with the same points-to set. */
4548 typedef struct shared_bitmap_info
4550 bitmap pt_vars;
4551 hashval_t hashcode;
4552 } *shared_bitmap_info_t;
4553 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
4555 static htab_t shared_bitmap_table;
4557 /* Hash function for a shared_bitmap_info_t */
4559 static hashval_t
4560 shared_bitmap_hash (const void *p)
4562 const_shared_bitmap_info_t const bi = (const_shared_bitmap_info_t) p;
4563 return bi->hashcode;
4566 /* Equality function for two shared_bitmap_info_t's. */
4568 static int
4569 shared_bitmap_eq (const void *p1, const void *p2)
4571 const_shared_bitmap_info_t const sbi1 = (const_shared_bitmap_info_t) p1;
4572 const_shared_bitmap_info_t const sbi2 = (const_shared_bitmap_info_t) p2;
4573 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
4576 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
4577 existing instance if there is one, NULL otherwise. */
4579 static bitmap
4580 shared_bitmap_lookup (bitmap pt_vars)
4582 void **slot;
4583 struct shared_bitmap_info sbi;
4585 sbi.pt_vars = pt_vars;
4586 sbi.hashcode = bitmap_hash (pt_vars);
4588 slot = htab_find_slot_with_hash (shared_bitmap_table, &sbi,
4589 sbi.hashcode, NO_INSERT);
4590 if (!slot)
4591 return NULL;
4592 else
4593 return ((shared_bitmap_info_t) *slot)->pt_vars;
4597 /* Add a bitmap to the shared bitmap hashtable. */
4599 static void
4600 shared_bitmap_add (bitmap pt_vars)
4602 void **slot;
4603 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
4605 sbi->pt_vars = pt_vars;
4606 sbi->hashcode = bitmap_hash (pt_vars);
4608 slot = htab_find_slot_with_hash (shared_bitmap_table, sbi,
4609 sbi->hashcode, INSERT);
4610 gcc_assert (!*slot);
4611 *slot = (void *) sbi;
4615 /* Set bits in INTO corresponding to the variable uids in solution set
4616 FROM, which came from variable PTR.
4617 For variables that are actually dereferenced, we also use type
4618 based alias analysis to prune the points-to sets.
4619 IS_DEREFED is true if PTR was directly dereferenced, which we use to
4620 help determine whether we are we are allowed to prune using TBAA.
4621 If NO_TBAA_PRUNING is true, we do not perform any TBAA pruning of
4622 the from set. */
4624 static void
4625 set_uids_in_ptset (tree ptr, bitmap into, bitmap from, bool is_derefed,
4626 bool no_tbaa_pruning)
4628 unsigned int i;
4629 bitmap_iterator bi;
4631 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
4633 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
4635 varinfo_t vi = get_varinfo (i);
4637 /* The only artificial variables that are allowed in a may-alias
4638 set are heap variables. */
4639 if (vi->is_artificial_var && !vi->is_heap_var)
4640 continue;
4642 if (TREE_CODE (vi->decl) == VAR_DECL
4643 || TREE_CODE (vi->decl) == PARM_DECL
4644 || TREE_CODE (vi->decl) == RESULT_DECL)
4646 /* Just add VI->DECL to the alias set.
4647 Don't type prune artificial vars or points-to sets
4648 for pointers that have not been dereferenced or with
4649 type-based pruning disabled. */
4650 if (vi->is_artificial_var
4651 || !is_derefed
4652 || no_tbaa_pruning)
4653 bitmap_set_bit (into, DECL_UID (vi->decl));
4654 else
4656 alias_set_type var_alias_set, ptr_alias_set;
4657 var_alias_set = get_alias_set (vi->decl);
4658 ptr_alias_set = get_alias_set (TREE_TYPE (TREE_TYPE (ptr)));
4659 if (alias_sets_conflict_p (ptr_alias_set, var_alias_set))
4660 bitmap_set_bit (into, DECL_UID (vi->decl));
4667 static bool have_alias_info = false;
4669 /* The list of SMT's that are in use by our pointer variables. This
4670 is the set of SMT's for all pointers that can point to anything. */
4671 static bitmap used_smts;
4673 /* Due to the ordering of points-to set calculation and SMT
4674 calculation being a bit co-dependent, we can't just calculate SMT
4675 used info whenever we want, we have to calculate it around the time
4676 that find_what_p_points_to is called. */
4678 /* Mark which SMT's are in use by points-to anything variables. */
4680 void
4681 set_used_smts (void)
4683 int i;
4684 varinfo_t vi;
4685 used_smts = BITMAP_ALLOC (&pta_obstack);
4687 for (i = 0; VEC_iterate (varinfo_t, varmap, i, vi); i++)
4689 tree var = vi->decl;
4690 varinfo_t withsolution = get_varinfo (find (i));
4691 tree smt;
4692 var_ann_t va;
4693 struct ptr_info_def *pi = NULL;
4695 /* For parm decls, the pointer info may be under the default
4696 def. */
4697 if (TREE_CODE (vi->decl) == PARM_DECL
4698 && gimple_default_def (cfun, var))
4699 pi = SSA_NAME_PTR_INFO (gimple_default_def (cfun, var));
4700 else if (TREE_CODE (var) == SSA_NAME)
4701 pi = SSA_NAME_PTR_INFO (var);
4703 /* Skip the special variables and those that can't be aliased. */
4704 if (vi->is_special_var
4705 || !SSA_VAR_P (var)
4706 || (pi && !pi->is_dereferenced)
4707 || (TREE_CODE (var) == VAR_DECL && !may_be_aliased (var))
4708 || !POINTER_TYPE_P (TREE_TYPE (var)))
4709 continue;
4711 if (TREE_CODE (var) == SSA_NAME)
4712 var = SSA_NAME_VAR (var);
4714 va = var_ann (var);
4715 if (!va)
4716 continue;
4718 smt = va->symbol_mem_tag;
4719 if (smt && bitmap_bit_p (withsolution->solution, anything_id))
4720 bitmap_set_bit (used_smts, DECL_UID (smt));
4724 /* Merge the necessary SMT's into the bitmap INTO, which is
4725 P's varinfo. This involves merging all SMT's that are a subset of
4726 the SMT necessary for P. */
4728 static void
4729 merge_smts_into (tree p, bitmap solution)
4731 tree smt;
4732 bitmap aliases;
4733 tree var = p;
4735 if (TREE_CODE (p) == SSA_NAME)
4736 var = SSA_NAME_VAR (p);
4738 smt = var_ann (var)->symbol_mem_tag;
4739 if (smt)
4741 /* The smt itself isn't included in its aliases. */
4742 bitmap_set_bit (solution, DECL_UID (smt));
4744 aliases = MTAG_ALIASES (smt);
4745 if (aliases)
4746 bitmap_ior_into (solution, aliases);
4750 /* Given a pointer variable P, fill in its points-to set, or return
4751 false if we can't.
4752 Rather than return false for variables that point-to anything, we
4753 instead find the corresponding SMT, and merge in its aliases. In
4754 addition to these aliases, we also set the bits for the SMT's
4755 themselves and their subsets, as SMT's are still in use by
4756 non-SSA_NAME's, and pruning may eliminate every one of their
4757 aliases. In such a case, if we did not include the right set of
4758 SMT's in the points-to set of the variable, we'd end up with
4759 statements that do not conflict but should. */
4761 bool
4762 find_what_p_points_to (tree p)
4764 tree lookup_p = p;
4765 varinfo_t vi;
4767 if (!have_alias_info)
4768 return false;
4770 /* For parameters, get at the points-to set for the actual parm
4771 decl. */
4772 if (TREE_CODE (p) == SSA_NAME
4773 && TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
4774 && SSA_NAME_IS_DEFAULT_DEF (p))
4775 lookup_p = SSA_NAME_VAR (p);
4777 vi = lookup_vi_for_tree (lookup_p);
4778 if (vi)
4780 if (vi->is_artificial_var)
4781 return false;
4783 /* See if this is a field or a structure. */
4784 if (vi->size != vi->fullsize)
4786 /* Nothing currently asks about structure fields directly,
4787 but when they do, we need code here to hand back the
4788 points-to set. */
4789 return false;
4791 else
4793 struct ptr_info_def *pi = get_ptr_info (p);
4794 unsigned int i;
4795 bitmap_iterator bi;
4796 bool was_pt_anything = false;
4797 bitmap finished_solution;
4798 bitmap result;
4800 if (!pi->is_dereferenced)
4801 return false;
4803 /* This variable may have been collapsed, let's get the real
4804 variable. */
4805 vi = get_varinfo (find (vi->id));
4807 /* Translate artificial variables into SSA_NAME_PTR_INFO
4808 attributes. */
4809 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4811 varinfo_t vi = get_varinfo (i);
4813 if (vi->is_artificial_var)
4815 /* FIXME. READONLY should be handled better so that
4816 flow insensitive aliasing can disregard writable
4817 aliases. */
4818 if (vi->id == nothing_id)
4819 pi->pt_null = 1;
4820 else if (vi->id == anything_id)
4821 was_pt_anything = 1;
4822 else if (vi->id == readonly_id)
4823 was_pt_anything = 1;
4824 else if (vi->id == integer_id)
4825 was_pt_anything = 1;
4826 else if (vi->is_heap_var)
4827 pi->pt_global_mem = 1;
4831 /* Share the final set of variables when possible. */
4832 finished_solution = BITMAP_GGC_ALLOC ();
4833 stats.points_to_sets_created++;
4835 /* Instead of using pt_anything, we merge in the SMT aliases
4836 for the underlying SMT. In addition, if they could have
4837 pointed to anything, they could point to global memory. */
4838 if (was_pt_anything)
4840 merge_smts_into (p, finished_solution);
4841 pi->pt_global_mem = 1;
4844 set_uids_in_ptset (p, finished_solution, vi->solution,
4845 vi->directly_dereferenced,
4846 vi->no_tbaa_pruning);
4847 result = shared_bitmap_lookup (finished_solution);
4849 if (!result)
4851 shared_bitmap_add (finished_solution);
4852 pi->pt_vars = finished_solution;
4854 else
4856 pi->pt_vars = result;
4857 bitmap_clear (finished_solution);
4860 if (bitmap_empty_p (pi->pt_vars))
4861 pi->pt_vars = NULL;
4863 return true;
4867 return false;
4870 /* Mark everything that p points to as call clobbered. Returns true
4871 if everything is done and false if all addressable variables need to
4872 be clobbered because p points to anything. */
4874 bool
4875 clobber_what_p_points_to (tree p)
4877 tree lookup_p = p;
4878 varinfo_t vi;
4879 struct ptr_info_def *pi;
4880 unsigned int i;
4881 bitmap_iterator bi;
4883 if (!have_alias_info)
4884 return false;
4886 /* For parameters, get at the points-to set for the actual parm
4887 decl. */
4888 if (TREE_CODE (p) == SSA_NAME
4889 && TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
4890 && SSA_NAME_IS_DEFAULT_DEF (p))
4891 lookup_p = SSA_NAME_VAR (p);
4893 vi = lookup_vi_for_tree (lookup_p);
4894 if (!vi)
4895 return false;
4897 /* We are asking for the points-to solution of pointers. */
4898 gcc_assert (!vi->is_artificial_var
4899 && vi->size == vi->fullsize);
4901 pi = get_ptr_info (p);
4903 /* This variable may have been collapsed, let's get the real
4904 variable. */
4905 vi = get_varinfo (find (vi->id));
4907 /* Mark variables in the solution call-clobbered. */
4908 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4910 varinfo_t vi = get_varinfo (i);
4912 if (vi->is_artificial_var)
4914 /* nothing_id and readonly_id do not cause any
4915 call clobber ops. For anything_id and integer_id
4916 we need to clobber all addressable vars. */
4917 if (vi->id == anything_id
4918 || vi->id == integer_id)
4919 return false;
4922 /* Only artificial heap-vars are further interesting. */
4923 if (vi->is_artificial_var && !vi->is_heap_var)
4924 continue;
4926 if ((TREE_CODE (vi->decl) == VAR_DECL
4927 || TREE_CODE (vi->decl) == PARM_DECL
4928 || TREE_CODE (vi->decl) == RESULT_DECL)
4929 && !unmodifiable_var_p (vi->decl))
4930 mark_call_clobbered (vi->decl, pi->escape_mask);
4933 return true;
4936 /* Dump points-to information to OUTFILE. */
4938 void
4939 dump_sa_points_to_info (FILE *outfile)
4941 unsigned int i;
4943 fprintf (outfile, "\nPoints-to sets\n\n");
4945 if (dump_flags & TDF_STATS)
4947 fprintf (outfile, "Stats:\n");
4948 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
4949 fprintf (outfile, "Non-pointer vars: %d\n",
4950 stats.nonpointer_vars);
4951 fprintf (outfile, "Statically unified vars: %d\n",
4952 stats.unified_vars_static);
4953 fprintf (outfile, "Dynamically unified vars: %d\n",
4954 stats.unified_vars_dynamic);
4955 fprintf (outfile, "Iterations: %d\n", stats.iterations);
4956 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
4957 fprintf (outfile, "Number of implicit edges: %d\n",
4958 stats.num_implicit_edges);
4961 for (i = 0; i < VEC_length (varinfo_t, varmap); i++)
4962 dump_solution_for_var (outfile, i);
4966 /* Debug points-to information to stderr. */
4968 void
4969 debug_sa_points_to_info (void)
4971 dump_sa_points_to_info (stderr);
4975 /* Initialize the always-existing constraint variables for NULL
4976 ANYTHING, READONLY, and INTEGER */
4978 static void
4979 init_base_vars (void)
4981 struct constraint_expr lhs, rhs;
4983 /* Create the NULL variable, used to represent that a variable points
4984 to NULL. */
4985 nothing_tree = create_tmp_var_raw (void_type_node, "NULL");
4986 var_nothing = new_var_info (nothing_tree, 0, "NULL");
4987 insert_vi_for_tree (nothing_tree, var_nothing);
4988 var_nothing->is_artificial_var = 1;
4989 var_nothing->offset = 0;
4990 var_nothing->size = ~0;
4991 var_nothing->fullsize = ~0;
4992 var_nothing->is_special_var = 1;
4993 nothing_id = 0;
4994 VEC_safe_push (varinfo_t, heap, varmap, var_nothing);
4996 /* Create the ANYTHING variable, used to represent that a variable
4997 points to some unknown piece of memory. */
4998 anything_tree = create_tmp_var_raw (void_type_node, "ANYTHING");
4999 var_anything = new_var_info (anything_tree, 1, "ANYTHING");
5000 insert_vi_for_tree (anything_tree, var_anything);
5001 var_anything->is_artificial_var = 1;
5002 var_anything->size = ~0;
5003 var_anything->offset = 0;
5004 var_anything->next = NULL;
5005 var_anything->fullsize = ~0;
5006 var_anything->is_special_var = 1;
5007 anything_id = 1;
5009 /* Anything points to anything. This makes deref constraints just
5010 work in the presence of linked list and other p = *p type loops,
5011 by saying that *ANYTHING = ANYTHING. */
5012 VEC_safe_push (varinfo_t, heap, varmap, var_anything);
5013 lhs.type = SCALAR;
5014 lhs.var = anything_id;
5015 lhs.offset = 0;
5016 rhs.type = ADDRESSOF;
5017 rhs.var = anything_id;
5018 rhs.offset = 0;
5020 /* This specifically does not use process_constraint because
5021 process_constraint ignores all anything = anything constraints, since all
5022 but this one are redundant. */
5023 VEC_safe_push (constraint_t, heap, constraints, new_constraint (lhs, rhs));
5025 /* Create the READONLY variable, used to represent that a variable
5026 points to readonly memory. */
5027 readonly_tree = create_tmp_var_raw (void_type_node, "READONLY");
5028 var_readonly = new_var_info (readonly_tree, 2, "READONLY");
5029 var_readonly->is_artificial_var = 1;
5030 var_readonly->offset = 0;
5031 var_readonly->size = ~0;
5032 var_readonly->fullsize = ~0;
5033 var_readonly->next = NULL;
5034 var_readonly->is_special_var = 1;
5035 insert_vi_for_tree (readonly_tree, var_readonly);
5036 readonly_id = 2;
5037 VEC_safe_push (varinfo_t, heap, varmap, var_readonly);
5039 /* readonly memory points to anything, in order to make deref
5040 easier. In reality, it points to anything the particular
5041 readonly variable can point to, but we don't track this
5042 separately. */
5043 lhs.type = SCALAR;
5044 lhs.var = readonly_id;
5045 lhs.offset = 0;
5046 rhs.type = ADDRESSOF;
5047 rhs.var = anything_id;
5048 rhs.offset = 0;
5050 process_constraint (new_constraint (lhs, rhs));
5052 /* Create the INTEGER variable, used to represent that a variable points
5053 to an INTEGER. */
5054 integer_tree = create_tmp_var_raw (void_type_node, "INTEGER");
5055 var_integer = new_var_info (integer_tree, 3, "INTEGER");
5056 insert_vi_for_tree (integer_tree, var_integer);
5057 var_integer->is_artificial_var = 1;
5058 var_integer->size = ~0;
5059 var_integer->fullsize = ~0;
5060 var_integer->offset = 0;
5061 var_integer->next = NULL;
5062 var_integer->is_special_var = 1;
5063 integer_id = 3;
5064 VEC_safe_push (varinfo_t, heap, varmap, var_integer);
5066 /* INTEGER = ANYTHING, because we don't know where a dereference of
5067 a random integer will point to. */
5068 lhs.type = SCALAR;
5069 lhs.var = integer_id;
5070 lhs.offset = 0;
5071 rhs.type = ADDRESSOF;
5072 rhs.var = anything_id;
5073 rhs.offset = 0;
5074 process_constraint (new_constraint (lhs, rhs));
5077 /* Initialize things necessary to perform PTA */
5079 static void
5080 init_alias_vars (void)
5082 bitmap_obstack_initialize (&pta_obstack);
5083 bitmap_obstack_initialize (&oldpta_obstack);
5084 bitmap_obstack_initialize (&predbitmap_obstack);
5086 constraint_pool = create_alloc_pool ("Constraint pool",
5087 sizeof (struct constraint), 30);
5088 variable_info_pool = create_alloc_pool ("Variable info pool",
5089 sizeof (struct variable_info), 30);
5090 constraints = VEC_alloc (constraint_t, heap, 8);
5091 varmap = VEC_alloc (varinfo_t, heap, 8);
5092 vi_for_tree = pointer_map_create ();
5094 memset (&stats, 0, sizeof (stats));
5095 shared_bitmap_table = htab_create (511, shared_bitmap_hash,
5096 shared_bitmap_eq, free);
5097 init_base_vars ();
5100 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
5101 predecessor edges. */
5103 static void
5104 remove_preds_and_fake_succs (constraint_graph_t graph)
5106 unsigned int i;
5108 /* Clear the implicit ref and address nodes from the successor
5109 lists. */
5110 for (i = 0; i < FIRST_REF_NODE; i++)
5112 if (graph->succs[i])
5113 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
5114 FIRST_REF_NODE * 2);
5117 /* Free the successor list for the non-ref nodes. */
5118 for (i = FIRST_REF_NODE; i < graph->size; i++)
5120 if (graph->succs[i])
5121 BITMAP_FREE (graph->succs[i]);
5124 /* Now reallocate the size of the successor list as, and blow away
5125 the predecessor bitmaps. */
5126 graph->size = VEC_length (varinfo_t, varmap);
5127 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
5129 free (graph->implicit_preds);
5130 graph->implicit_preds = NULL;
5131 free (graph->preds);
5132 graph->preds = NULL;
5133 bitmap_obstack_release (&predbitmap_obstack);
5136 /* Compute the set of variables we can't TBAA prune. */
5138 static void
5139 compute_tbaa_pruning (void)
5141 unsigned int size = VEC_length (varinfo_t, varmap);
5142 unsigned int i;
5143 bool any;
5145 changed_count = 0;
5146 changed = sbitmap_alloc (size);
5147 sbitmap_zero (changed);
5149 /* Mark all initial no_tbaa_pruning nodes as changed. */
5150 any = false;
5151 for (i = 0; i < size; ++i)
5153 varinfo_t ivi = get_varinfo (i);
5155 if (find (i) == i && ivi->no_tbaa_pruning)
5157 any = true;
5158 if ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
5159 || VEC_length (constraint_t, graph->complex[i]) > 0)
5161 SET_BIT (changed, i);
5162 ++changed_count;
5167 while (changed_count > 0)
5169 struct topo_info *ti = init_topo_info ();
5170 ++stats.iterations;
5172 compute_topo_order (graph, ti);
5174 while (VEC_length (unsigned, ti->topo_order) != 0)
5176 bitmap_iterator bi;
5178 i = VEC_pop (unsigned, ti->topo_order);
5180 /* If this variable is not a representative, skip it. */
5181 if (find (i) != i)
5182 continue;
5184 /* If the node has changed, we need to process the complex
5185 constraints and outgoing edges again. */
5186 if (TEST_BIT (changed, i))
5188 unsigned int j;
5189 constraint_t c;
5190 VEC(constraint_t,heap) *complex = graph->complex[i];
5192 RESET_BIT (changed, i);
5193 --changed_count;
5195 /* Process the complex copy constraints. */
5196 for (j = 0; VEC_iterate (constraint_t, complex, j, c); ++j)
5198 if (c->lhs.type == SCALAR && c->rhs.type == SCALAR)
5200 varinfo_t lhsvi = get_varinfo (find (c->lhs.var));
5202 if (!lhsvi->no_tbaa_pruning)
5204 lhsvi->no_tbaa_pruning = true;
5205 if (!TEST_BIT (changed, lhsvi->id))
5207 SET_BIT (changed, lhsvi->id);
5208 ++changed_count;
5214 /* Propagate to all successors. */
5215 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
5217 unsigned int to = find (j);
5218 varinfo_t tovi = get_varinfo (to);
5220 /* Don't propagate to ourselves. */
5221 if (to == i)
5222 continue;
5224 if (!tovi->no_tbaa_pruning)
5226 tovi->no_tbaa_pruning = true;
5227 if (!TEST_BIT (changed, to))
5229 SET_BIT (changed, to);
5230 ++changed_count;
5237 free_topo_info (ti);
5240 sbitmap_free (changed);
5242 if (any)
5244 for (i = 0; i < size; ++i)
5246 varinfo_t ivi = get_varinfo (i);
5247 varinfo_t ivip = get_varinfo (find (i));
5249 if (ivip->no_tbaa_pruning)
5251 tree var = ivi->decl;
5253 if (TREE_CODE (var) == SSA_NAME)
5254 var = SSA_NAME_VAR (var);
5256 if (POINTER_TYPE_P (TREE_TYPE (var)))
5258 DECL_NO_TBAA_P (var) = 1;
5260 /* Tell the RTL layer that this pointer can alias
5261 anything. */
5262 DECL_POINTER_ALIAS_SET (var) = 0;
5269 /* Create points-to sets for the current function. See the comments
5270 at the start of the file for an algorithmic overview. */
5272 void
5273 compute_points_to_sets (struct alias_info *ai)
5275 struct scc_info *si;
5276 basic_block bb;
5278 timevar_push (TV_TREE_PTA);
5280 init_alias_vars ();
5281 init_alias_heapvars ();
5283 intra_create_variable_infos ();
5285 /* Now walk all statements and derive aliases. */
5286 FOR_EACH_BB (bb)
5288 block_stmt_iterator bsi;
5289 tree phi;
5291 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
5293 if (is_gimple_reg (PHI_RESULT (phi)))
5295 find_func_aliases (phi);
5297 /* Update various related attributes like escaped
5298 addresses, pointer dereferences for loads and stores.
5299 This is used when creating name tags and alias
5300 sets. */
5301 update_alias_info (phi, ai);
5305 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
5307 tree stmt = bsi_stmt (bsi);
5309 find_func_aliases (stmt);
5311 /* Update various related attributes like escaped
5312 addresses, pointer dereferences for loads and stores.
5313 This is used when creating name tags and alias
5314 sets. */
5315 update_alias_info (stmt, ai);
5317 /* The information in CHANGE_DYNAMIC_TYPE_EXPR nodes has now
5318 been captured, and we can remove them. */
5319 if (TREE_CODE (stmt) == CHANGE_DYNAMIC_TYPE_EXPR)
5320 bsi_remove (&bsi, true);
5321 else
5322 bsi_next (&bsi);
5327 if (dump_file)
5329 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
5330 dump_constraints (dump_file);
5333 if (dump_file)
5334 fprintf (dump_file,
5335 "\nCollapsing static cycles and doing variable "
5336 "substitution\n");
5338 init_graph (VEC_length (varinfo_t, varmap) * 2);
5340 if (dump_file)
5341 fprintf (dump_file, "Building predecessor graph\n");
5342 build_pred_graph ();
5344 if (dump_file)
5345 fprintf (dump_file, "Detecting pointer and location "
5346 "equivalences\n");
5347 si = perform_var_substitution (graph);
5349 if (dump_file)
5350 fprintf (dump_file, "Rewriting constraints and unifying "
5351 "variables\n");
5352 rewrite_constraints (graph, si);
5353 free_var_substitution_info (si);
5355 build_succ_graph ();
5356 move_complex_constraints (graph);
5358 if (dump_file)
5359 fprintf (dump_file, "Uniting pointer but not location equivalent "
5360 "variables\n");
5361 unite_pointer_equivalences (graph);
5363 if (dump_file)
5364 fprintf (dump_file, "Finding indirect cycles\n");
5365 find_indirect_cycles (graph);
5367 /* Implicit nodes and predecessors are no longer necessary at this
5368 point. */
5369 remove_preds_and_fake_succs (graph);
5371 if (dump_file)
5372 fprintf (dump_file, "Solving graph\n");
5374 solve_graph (graph);
5376 compute_tbaa_pruning ();
5378 if (dump_file)
5379 dump_sa_points_to_info (dump_file);
5381 have_alias_info = true;
5383 timevar_pop (TV_TREE_PTA);
5387 /* Delete created points-to sets. */
5389 void
5390 delete_points_to_sets (void)
5392 unsigned int i;
5394 htab_delete (shared_bitmap_table);
5395 if (dump_file && (dump_flags & TDF_STATS))
5396 fprintf (dump_file, "Points to sets created:%d\n",
5397 stats.points_to_sets_created);
5399 pointer_map_destroy (vi_for_tree);
5400 bitmap_obstack_release (&pta_obstack);
5401 VEC_free (constraint_t, heap, constraints);
5403 for (i = 0; i < graph->size; i++)
5404 VEC_free (constraint_t, heap, graph->complex[i]);
5405 free (graph->complex);
5407 free (graph->rep);
5408 free (graph->succs);
5409 free (graph->pe);
5410 free (graph->pe_rep);
5411 free (graph->indirect_cycles);
5412 free (graph);
5414 VEC_free (varinfo_t, heap, varmap);
5415 free_alloc_pool (variable_info_pool);
5416 free_alloc_pool (constraint_pool);
5417 have_alias_info = false;
5420 /* Return true if we should execute IPA PTA. */
5421 static bool
5422 gate_ipa_pta (void)
5424 return (flag_unit_at_a_time != 0
5425 && flag_ipa_pta
5426 /* Don't bother doing anything if the program has errors. */
5427 && !(errorcount || sorrycount));
5430 /* Execute the driver for IPA PTA. */
5431 static unsigned int
5432 ipa_pta_execute (void)
5434 struct cgraph_node *node;
5435 struct scc_info *si;
5437 in_ipa_mode = 1;
5438 init_alias_heapvars ();
5439 init_alias_vars ();
5441 for (node = cgraph_nodes; node; node = node->next)
5443 if (!node->analyzed || cgraph_is_master_clone (node))
5445 unsigned int varid;
5447 varid = create_function_info_for (node->decl,
5448 cgraph_node_name (node));
5449 if (node->local.externally_visible)
5451 varinfo_t fi = get_varinfo (varid);
5452 for (; fi; fi = fi->next)
5453 make_constraint_from_anything (fi);
5457 for (node = cgraph_nodes; node; node = node->next)
5459 if (node->analyzed && cgraph_is_master_clone (node))
5461 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
5462 basic_block bb;
5463 tree old_func_decl = current_function_decl;
5464 if (dump_file)
5465 fprintf (dump_file,
5466 "Generating constraints for %s\n",
5467 cgraph_node_name (node));
5468 push_cfun (func);
5469 current_function_decl = node->decl;
5471 FOR_EACH_BB_FN (bb, func)
5473 block_stmt_iterator bsi;
5474 tree phi;
5476 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
5478 if (is_gimple_reg (PHI_RESULT (phi)))
5480 find_func_aliases (phi);
5484 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
5486 tree stmt = bsi_stmt (bsi);
5487 find_func_aliases (stmt);
5490 current_function_decl = old_func_decl;
5491 pop_cfun ();
5493 else
5495 /* Make point to anything. */
5499 if (dump_file)
5501 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
5502 dump_constraints (dump_file);
5505 if (dump_file)
5506 fprintf (dump_file,
5507 "\nCollapsing static cycles and doing variable "
5508 "substitution:\n");
5510 init_graph (VEC_length (varinfo_t, varmap) * 2);
5511 build_pred_graph ();
5512 si = perform_var_substitution (graph);
5513 rewrite_constraints (graph, si);
5514 free_var_substitution_info (si);
5516 build_succ_graph ();
5517 move_complex_constraints (graph);
5518 unite_pointer_equivalences (graph);
5519 find_indirect_cycles (graph);
5521 /* Implicit nodes and predecessors are no longer necessary at this
5522 point. */
5523 remove_preds_and_fake_succs (graph);
5525 if (dump_file)
5526 fprintf (dump_file, "\nSolving graph\n");
5528 solve_graph (graph);
5530 if (dump_file)
5531 dump_sa_points_to_info (dump_file);
5533 in_ipa_mode = 0;
5534 delete_alias_heapvars ();
5535 delete_points_to_sets ();
5536 return 0;
5539 struct simple_ipa_opt_pass pass_ipa_pta =
5542 SIMPLE_IPA_PASS,
5543 "pta", /* name */
5544 gate_ipa_pta, /* gate */
5545 ipa_pta_execute, /* execute */
5546 NULL, /* sub */
5547 NULL, /* next */
5548 0, /* static_pass_number */
5549 TV_IPA_PTA, /* tv_id */
5550 0, /* properties_required */
5551 0, /* properties_provided */
5552 0, /* properties_destroyed */
5553 0, /* todo_flags_start */
5554 TODO_update_ssa /* todo_flags_finish */
5558 /* Initialize the heapvar for statement mapping. */
5559 void
5560 init_alias_heapvars (void)
5562 if (!heapvar_for_stmt)
5563 heapvar_for_stmt = htab_create_ggc (11, tree_map_hash, tree_map_eq,
5564 NULL);
5567 void
5568 delete_alias_heapvars (void)
5570 htab_delete (heapvar_for_stmt);
5571 heapvar_for_stmt = NULL;
5575 #include "gt-tree-ssa-structalias.h"