2008-05-08 Richard Guenther <rguenther@suse.de>
[official-gcc.git] / gcc / tree-ssa-structalias.c
blobd58478ebeb29591a4818d8b8eb07caf3d327afff
1 /* Tree based points-to analysis
2 Copyright (C) 2005, 2006, 2007 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "ggc.h"
26 #include "obstack.h"
27 #include "bitmap.h"
28 #include "flags.h"
29 #include "rtl.h"
30 #include "tm_p.h"
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
33 #include "output.h"
34 #include "errors.h"
35 #include "diagnostic.h"
36 #include "tree.h"
37 #include "c-common.h"
38 #include "tree-flow.h"
39 #include "tree-inline.h"
40 #include "varray.h"
41 #include "c-tree.h"
42 #include "tree-gimple.h"
43 #include "hashtab.h"
44 #include "function.h"
45 #include "cgraph.h"
46 #include "tree-pass.h"
47 #include "timevar.h"
48 #include "alloc-pool.h"
49 #include "splay-tree.h"
50 #include "params.h"
51 #include "tree-ssa-structalias.h"
52 #include "cgraph.h"
53 #include "alias.h"
54 #include "pointer-set.h"
56 /* The idea behind this analyzer is to generate set constraints from the
57 program, then solve the resulting constraints in order to generate the
58 points-to sets.
60 Set constraints are a way of modeling program analysis problems that
61 involve sets. They consist of an inclusion constraint language,
62 describing the variables (each variable is a set) and operations that
63 are involved on the variables, and a set of rules that derive facts
64 from these operations. To solve a system of set constraints, you derive
65 all possible facts under the rules, which gives you the correct sets
66 as a consequence.
68 See "Efficient Field-sensitive pointer analysis for C" by "David
69 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
70 http://citeseer.ist.psu.edu/pearce04efficient.html
72 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
73 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
74 http://citeseer.ist.psu.edu/heintze01ultrafast.html
76 There are three types of real constraint expressions, DEREF,
77 ADDRESSOF, and SCALAR. Each constraint expression consists
78 of a constraint type, a variable, and an offset.
80 SCALAR is a constraint expression type used to represent x, whether
81 it appears on the LHS or the RHS of a statement.
82 DEREF is a constraint expression type used to represent *x, whether
83 it appears on the LHS or the RHS of a statement.
84 ADDRESSOF is a constraint expression used to represent &x, whether
85 it appears on the LHS or the RHS of a statement.
87 Each pointer variable in the program is assigned an integer id, and
88 each field of a structure variable is assigned an integer id as well.
90 Structure variables are linked to their list of fields through a "next
91 field" in each variable that points to the next field in offset
92 order.
93 Each variable for a structure field has
95 1. "size", that tells the size in bits of that field.
96 2. "fullsize, that tells the size in bits of the entire structure.
97 3. "offset", that tells the offset in bits from the beginning of the
98 structure to this field.
100 Thus,
101 struct f
103 int a;
104 int b;
105 } foo;
106 int *bar;
108 looks like
110 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
111 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
112 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
115 In order to solve the system of set constraints, the following is
116 done:
118 1. Each constraint variable x has a solution set associated with it,
119 Sol(x).
121 2. Constraints are separated into direct, copy, and complex.
122 Direct constraints are ADDRESSOF constraints that require no extra
123 processing, such as P = &Q
124 Copy constraints are those of the form P = Q.
125 Complex constraints are all the constraints involving dereferences
126 and offsets (including offsetted copies).
128 3. All direct constraints of the form P = &Q are processed, such
129 that Q is added to Sol(P)
131 4. All complex constraints for a given constraint variable are stored in a
132 linked list attached to that variable's node.
134 5. A directed graph is built out of the copy constraints. Each
135 constraint variable is a node in the graph, and an edge from
136 Q to P is added for each copy constraint of the form P = Q
138 6. The graph is then walked, and solution sets are
139 propagated along the copy edges, such that an edge from Q to P
140 causes Sol(P) <- Sol(P) union Sol(Q).
142 7. As we visit each node, all complex constraints associated with
143 that node are processed by adding appropriate copy edges to the graph, or the
144 appropriate variables to the solution set.
146 8. The process of walking the graph is iterated until no solution
147 sets change.
149 Prior to walking the graph in steps 6 and 7, We perform static
150 cycle elimination on the constraint graph, as well
151 as off-line variable substitution.
153 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
154 on and turned into anything), but isn't. You can just see what offset
155 inside the pointed-to struct it's going to access.
157 TODO: Constant bounded arrays can be handled as if they were structs of the
158 same number of elements.
160 TODO: Modeling heap and incoming pointers becomes much better if we
161 add fields to them as we discover them, which we could do.
163 TODO: We could handle unions, but to be honest, it's probably not
164 worth the pain or slowdown. */
166 static GTY ((if_marked ("tree_map_marked_p"), param_is (struct tree_map)))
167 htab_t heapvar_for_stmt;
169 static bool use_field_sensitive = true;
170 static int in_ipa_mode = 0;
172 /* Used for predecessor bitmaps. */
173 static bitmap_obstack predbitmap_obstack;
175 /* Used for points-to sets. */
176 static bitmap_obstack pta_obstack;
178 /* Used for oldsolution members of variables. */
179 static bitmap_obstack oldpta_obstack;
181 /* Used for per-solver-iteration bitmaps. */
182 static bitmap_obstack iteration_obstack;
184 static unsigned int create_variable_info_for (tree, const char *);
185 typedef struct constraint_graph *constraint_graph_t;
186 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
188 DEF_VEC_P(constraint_t);
189 DEF_VEC_ALLOC_P(constraint_t,heap);
191 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
192 if (a) \
193 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
195 static struct constraint_stats
197 unsigned int total_vars;
198 unsigned int nonpointer_vars;
199 unsigned int unified_vars_static;
200 unsigned int unified_vars_dynamic;
201 unsigned int iterations;
202 unsigned int num_edges;
203 unsigned int num_implicit_edges;
204 unsigned int points_to_sets_created;
205 } stats;
207 struct variable_info
209 /* ID of this variable */
210 unsigned int id;
212 /* Name of this variable */
213 const char *name;
215 /* Tree that this variable is associated with. */
216 tree decl;
218 /* Offset of this variable, in bits, from the base variable */
219 unsigned HOST_WIDE_INT offset;
221 /* Size of the variable, in bits. */
222 unsigned HOST_WIDE_INT size;
224 /* Full size of the base variable, in bits. */
225 unsigned HOST_WIDE_INT fullsize;
227 /* A link to the variable for the next field in this structure. */
228 struct variable_info *next;
230 /* True if the variable is directly the target of a dereference.
231 This is used to track which variables are *actually* dereferenced
232 so we can prune their points to listed. */
233 unsigned int directly_dereferenced:1;
235 /* True if this is a variable created by the constraint analysis, such as
236 heap variables and constraints we had to break up. */
237 unsigned int is_artificial_var:1;
239 /* True if this is a special variable whose solution set should not be
240 changed. */
241 unsigned int is_special_var:1;
243 /* True for variables whose size is not known or variable. */
244 unsigned int is_unknown_size_var:1;
246 /* True for variables that have unions somewhere in them. */
247 unsigned int has_union:1;
249 /* True if this is a heap variable. */
250 unsigned int is_heap_var:1;
252 /* True if we may not use TBAA to prune references to this
253 variable. This is used for C++ placement new. */
254 unsigned int no_tbaa_pruning : 1;
256 /* Points-to set for this variable. */
257 bitmap solution;
259 /* Old points-to set for this variable. */
260 bitmap oldsolution;
262 /* Variable id this was collapsed to due to type unsafety. This
263 should be unused completely after build_succ_graph, or something
264 is broken. */
265 struct variable_info *collapsed_to;
267 typedef struct variable_info *varinfo_t;
269 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
271 /* Pool of variable info structures. */
272 static alloc_pool variable_info_pool;
274 DEF_VEC_P(varinfo_t);
276 DEF_VEC_ALLOC_P(varinfo_t, heap);
278 /* Table of variable info structures for constraint variables.
279 Indexed directly by variable info id. */
280 static VEC(varinfo_t,heap) *varmap;
282 /* Return the varmap element N */
284 static inline varinfo_t
285 get_varinfo (unsigned int n)
287 return VEC_index (varinfo_t, varmap, n);
290 /* Return the varmap element N, following the collapsed_to link. */
292 static inline varinfo_t
293 get_varinfo_fc (unsigned int n)
295 varinfo_t v = VEC_index (varinfo_t, varmap, n);
297 if (v->collapsed_to)
298 return v->collapsed_to;
299 return v;
302 /* Variable that represents the unknown pointer. */
303 static varinfo_t var_anything;
304 static tree anything_tree;
305 static unsigned int anything_id;
307 /* Variable that represents the NULL pointer. */
308 static varinfo_t var_nothing;
309 static tree nothing_tree;
310 static unsigned int nothing_id;
312 /* Variable that represents read only memory. */
313 static varinfo_t var_readonly;
314 static tree readonly_tree;
315 static unsigned int readonly_id;
317 /* Variable that represents integers. This is used for when people do things
318 like &0->a.b. */
319 static varinfo_t var_integer;
320 static tree integer_tree;
321 static unsigned int integer_id;
323 /* Lookup a heap var for FROM, and return it if we find one. */
325 static tree
326 heapvar_lookup (tree from)
328 struct tree_map *h, in;
329 in.base.from = from;
331 h = (struct tree_map *) htab_find_with_hash (heapvar_for_stmt, &in,
332 htab_hash_pointer (from));
333 if (h)
334 return h->to;
335 return NULL_TREE;
338 /* Insert a mapping FROM->TO in the heap var for statement
339 hashtable. */
341 static void
342 heapvar_insert (tree from, tree to)
344 struct tree_map *h;
345 void **loc;
347 h = GGC_NEW (struct tree_map);
348 h->hash = htab_hash_pointer (from);
349 h->base.from = from;
350 h->to = to;
351 loc = htab_find_slot_with_hash (heapvar_for_stmt, h, h->hash, INSERT);
352 *(struct tree_map **) loc = h;
355 /* Return a new variable info structure consisting for a variable
356 named NAME, and using constraint graph node NODE. */
358 static varinfo_t
359 new_var_info (tree t, unsigned int id, const char *name)
361 varinfo_t ret = (varinfo_t) pool_alloc (variable_info_pool);
362 tree var;
364 ret->id = id;
365 ret->name = name;
366 ret->decl = t;
367 ret->directly_dereferenced = false;
368 ret->is_artificial_var = false;
369 ret->is_heap_var = false;
370 ret->is_special_var = false;
371 ret->is_unknown_size_var = false;
372 ret->has_union = false;
373 var = t;
374 if (TREE_CODE (var) == SSA_NAME)
375 var = SSA_NAME_VAR (var);
376 ret->no_tbaa_pruning = (DECL_P (var)
377 && POINTER_TYPE_P (TREE_TYPE (var))
378 && DECL_NO_TBAA_P (var));
379 ret->solution = BITMAP_ALLOC (&pta_obstack);
380 ret->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
381 ret->next = NULL;
382 ret->collapsed_to = NULL;
383 return ret;
386 typedef enum {SCALAR, DEREF, ADDRESSOF} constraint_expr_type;
388 /* An expression that appears in a constraint. */
390 struct constraint_expr
392 /* Constraint type. */
393 constraint_expr_type type;
395 /* Variable we are referring to in the constraint. */
396 unsigned int var;
398 /* Offset, in bits, of this constraint from the beginning of
399 variables it ends up referring to.
401 IOW, in a deref constraint, we would deref, get the result set,
402 then add OFFSET to each member. */
403 unsigned HOST_WIDE_INT offset;
406 typedef struct constraint_expr ce_s;
407 DEF_VEC_O(ce_s);
408 DEF_VEC_ALLOC_O(ce_s, heap);
409 static void get_constraint_for (tree, VEC(ce_s, heap) **);
410 static void do_deref (VEC (ce_s, heap) **);
412 /* Our set constraints are made up of two constraint expressions, one
413 LHS, and one RHS.
415 As described in the introduction, our set constraints each represent an
416 operation between set valued variables.
418 struct constraint
420 struct constraint_expr lhs;
421 struct constraint_expr rhs;
424 /* List of constraints that we use to build the constraint graph from. */
426 static VEC(constraint_t,heap) *constraints;
427 static alloc_pool constraint_pool;
430 DEF_VEC_I(int);
431 DEF_VEC_ALLOC_I(int, heap);
433 /* The constraint graph is represented as an array of bitmaps
434 containing successor nodes. */
436 struct constraint_graph
438 /* Size of this graph, which may be different than the number of
439 nodes in the variable map. */
440 unsigned int size;
442 /* Explicit successors of each node. */
443 bitmap *succs;
445 /* Implicit predecessors of each node (Used for variable
446 substitution). */
447 bitmap *implicit_preds;
449 /* Explicit predecessors of each node (Used for variable substitution). */
450 bitmap *preds;
452 /* Indirect cycle representatives, or -1 if the node has no indirect
453 cycles. */
454 int *indirect_cycles;
456 /* Representative node for a node. rep[a] == a unless the node has
457 been unified. */
458 unsigned int *rep;
460 /* Equivalence class representative for a label. This is used for
461 variable substitution. */
462 int *eq_rep;
464 /* Pointer equivalence label for a node. All nodes with the same
465 pointer equivalence label can be unified together at some point
466 (either during constraint optimization or after the constraint
467 graph is built). */
468 unsigned int *pe;
470 /* Pointer equivalence representative for a label. This is used to
471 handle nodes that are pointer equivalent but not location
472 equivalent. We can unite these once the addressof constraints
473 are transformed into initial points-to sets. */
474 int *pe_rep;
476 /* Pointer equivalence label for each node, used during variable
477 substitution. */
478 unsigned int *pointer_label;
480 /* Location equivalence label for each node, used during location
481 equivalence finding. */
482 unsigned int *loc_label;
484 /* Pointed-by set for each node, used during location equivalence
485 finding. This is pointed-by rather than pointed-to, because it
486 is constructed using the predecessor graph. */
487 bitmap *pointed_by;
489 /* Points to sets for pointer equivalence. This is *not* the actual
490 points-to sets for nodes. */
491 bitmap *points_to;
493 /* Bitmap of nodes where the bit is set if the node is a direct
494 node. Used for variable substitution. */
495 sbitmap direct_nodes;
497 /* Bitmap of nodes where the bit is set if the node is address
498 taken. Used for variable substitution. */
499 bitmap address_taken;
501 /* True if points_to bitmap for this node is stored in the hash
502 table. */
503 sbitmap pt_used;
505 /* Number of incoming edges remaining to be processed by pointer
506 equivalence.
507 Used for variable substitution. */
508 unsigned int *number_incoming;
511 /* Vector of complex constraints for each graph node. Complex
512 constraints are those involving dereferences or offsets that are
513 not 0. */
514 VEC(constraint_t,heap) **complex;
517 static constraint_graph_t graph;
519 /* During variable substitution and the offline version of indirect
520 cycle finding, we create nodes to represent dereferences and
521 address taken constraints. These represent where these start and
522 end. */
523 #define FIRST_REF_NODE (VEC_length (varinfo_t, varmap))
524 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
526 /* Return the representative node for NODE, if NODE has been unioned
527 with another NODE.
528 This function performs path compression along the way to finding
529 the representative. */
531 static unsigned int
532 find (unsigned int node)
534 gcc_assert (node < graph->size);
535 if (graph->rep[node] != node)
536 return graph->rep[node] = find (graph->rep[node]);
537 return node;
540 /* Union the TO and FROM nodes to the TO nodes.
541 Note that at some point in the future, we may want to do
542 union-by-rank, in which case we are going to have to return the
543 node we unified to. */
545 static bool
546 unite (unsigned int to, unsigned int from)
548 gcc_assert (to < graph->size && from < graph->size);
549 if (to != from && graph->rep[from] != to)
551 graph->rep[from] = to;
552 return true;
554 return false;
557 /* Create a new constraint consisting of LHS and RHS expressions. */
559 static constraint_t
560 new_constraint (const struct constraint_expr lhs,
561 const struct constraint_expr rhs)
563 constraint_t ret = (constraint_t) pool_alloc (constraint_pool);
564 ret->lhs = lhs;
565 ret->rhs = rhs;
566 return ret;
569 /* Print out constraint C to FILE. */
571 void
572 dump_constraint (FILE *file, constraint_t c)
574 if (c->lhs.type == ADDRESSOF)
575 fprintf (file, "&");
576 else if (c->lhs.type == DEREF)
577 fprintf (file, "*");
578 fprintf (file, "%s", get_varinfo_fc (c->lhs.var)->name);
579 if (c->lhs.offset != 0)
580 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
581 fprintf (file, " = ");
582 if (c->rhs.type == ADDRESSOF)
583 fprintf (file, "&");
584 else if (c->rhs.type == DEREF)
585 fprintf (file, "*");
586 fprintf (file, "%s", get_varinfo_fc (c->rhs.var)->name);
587 if (c->rhs.offset != 0)
588 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
589 fprintf (file, "\n");
592 /* Print out constraint C to stderr. */
594 void
595 debug_constraint (constraint_t c)
597 dump_constraint (stderr, c);
600 /* Print out all constraints to FILE */
602 void
603 dump_constraints (FILE *file)
605 int i;
606 constraint_t c;
607 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
608 dump_constraint (file, c);
611 /* Print out all constraints to stderr. */
613 void
614 debug_constraints (void)
616 dump_constraints (stderr);
619 /* SOLVER FUNCTIONS
621 The solver is a simple worklist solver, that works on the following
622 algorithm:
624 sbitmap changed_nodes = all zeroes;
625 changed_count = 0;
626 For each node that is not already collapsed:
627 changed_count++;
628 set bit in changed nodes
630 while (changed_count > 0)
632 compute topological ordering for constraint graph
634 find and collapse cycles in the constraint graph (updating
635 changed if necessary)
637 for each node (n) in the graph in topological order:
638 changed_count--;
640 Process each complex constraint associated with the node,
641 updating changed if necessary.
643 For each outgoing edge from n, propagate the solution from n to
644 the destination of the edge, updating changed as necessary.
646 } */
648 /* Return true if two constraint expressions A and B are equal. */
650 static bool
651 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
653 return a.type == b.type && a.var == b.var && a.offset == b.offset;
656 /* Return true if constraint expression A is less than constraint expression
657 B. This is just arbitrary, but consistent, in order to give them an
658 ordering. */
660 static bool
661 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
663 if (a.type == b.type)
665 if (a.var == b.var)
666 return a.offset < b.offset;
667 else
668 return a.var < b.var;
670 else
671 return a.type < b.type;
674 /* Return true if constraint A is less than constraint B. This is just
675 arbitrary, but consistent, in order to give them an ordering. */
677 static bool
678 constraint_less (const constraint_t a, const constraint_t b)
680 if (constraint_expr_less (a->lhs, b->lhs))
681 return true;
682 else if (constraint_expr_less (b->lhs, a->lhs))
683 return false;
684 else
685 return constraint_expr_less (a->rhs, b->rhs);
688 /* Return true if two constraints A and B are equal. */
690 static bool
691 constraint_equal (struct constraint a, struct constraint b)
693 return constraint_expr_equal (a.lhs, b.lhs)
694 && constraint_expr_equal (a.rhs, b.rhs);
698 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
700 static constraint_t
701 constraint_vec_find (VEC(constraint_t,heap) *vec,
702 struct constraint lookfor)
704 unsigned int place;
705 constraint_t found;
707 if (vec == NULL)
708 return NULL;
710 place = VEC_lower_bound (constraint_t, vec, &lookfor, constraint_less);
711 if (place >= VEC_length (constraint_t, vec))
712 return NULL;
713 found = VEC_index (constraint_t, vec, place);
714 if (!constraint_equal (*found, lookfor))
715 return NULL;
716 return found;
719 /* Union two constraint vectors, TO and FROM. Put the result in TO. */
721 static void
722 constraint_set_union (VEC(constraint_t,heap) **to,
723 VEC(constraint_t,heap) **from)
725 int i;
726 constraint_t c;
728 for (i = 0; VEC_iterate (constraint_t, *from, i, c); i++)
730 if (constraint_vec_find (*to, *c) == NULL)
732 unsigned int place = VEC_lower_bound (constraint_t, *to, c,
733 constraint_less);
734 VEC_safe_insert (constraint_t, heap, *to, place, c);
739 /* Take a solution set SET, add OFFSET to each member of the set, and
740 overwrite SET with the result when done. */
742 static void
743 solution_set_add (bitmap set, unsigned HOST_WIDE_INT offset)
745 bitmap result = BITMAP_ALLOC (&iteration_obstack);
746 unsigned int i;
747 bitmap_iterator bi;
749 EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
751 /* If this is a properly sized variable, only add offset if it's
752 less than end. Otherwise, it is globbed to a single
753 variable. */
755 if ((get_varinfo (i)->offset + offset) < get_varinfo (i)->fullsize)
757 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (i)->offset + offset;
758 varinfo_t v = first_vi_for_offset (get_varinfo (i), fieldoffset);
759 if (!v)
760 continue;
761 bitmap_set_bit (result, v->id);
763 else if (get_varinfo (i)->is_artificial_var
764 || get_varinfo (i)->has_union
765 || get_varinfo (i)->is_unknown_size_var)
767 bitmap_set_bit (result, i);
771 bitmap_copy (set, result);
772 BITMAP_FREE (result);
775 /* Union solution sets TO and FROM, and add INC to each member of FROM in the
776 process. */
778 static bool
779 set_union_with_increment (bitmap to, bitmap from, unsigned HOST_WIDE_INT inc)
781 if (inc == 0)
782 return bitmap_ior_into (to, from);
783 else
785 bitmap tmp;
786 bool res;
788 tmp = BITMAP_ALLOC (&iteration_obstack);
789 bitmap_copy (tmp, from);
790 solution_set_add (tmp, inc);
791 res = bitmap_ior_into (to, tmp);
792 BITMAP_FREE (tmp);
793 return res;
797 /* Insert constraint C into the list of complex constraints for graph
798 node VAR. */
800 static void
801 insert_into_complex (constraint_graph_t graph,
802 unsigned int var, constraint_t c)
804 VEC (constraint_t, heap) *complex = graph->complex[var];
805 unsigned int place = VEC_lower_bound (constraint_t, complex, c,
806 constraint_less);
808 /* Only insert constraints that do not already exist. */
809 if (place >= VEC_length (constraint_t, complex)
810 || !constraint_equal (*c, *VEC_index (constraint_t, complex, place)))
811 VEC_safe_insert (constraint_t, heap, graph->complex[var], place, c);
815 /* Condense two variable nodes into a single variable node, by moving
816 all associated info from SRC to TO. */
818 static void
819 merge_node_constraints (constraint_graph_t graph, unsigned int to,
820 unsigned int from)
822 unsigned int i;
823 constraint_t c;
825 gcc_assert (find (from) == to);
827 /* Move all complex constraints from src node into to node */
828 for (i = 0; VEC_iterate (constraint_t, graph->complex[from], i, c); i++)
830 /* In complex constraints for node src, we may have either
831 a = *src, and *src = a, or an offseted constraint which are
832 always added to the rhs node's constraints. */
834 if (c->rhs.type == DEREF)
835 c->rhs.var = to;
836 else if (c->lhs.type == DEREF)
837 c->lhs.var = to;
838 else
839 c->rhs.var = to;
841 constraint_set_union (&graph->complex[to], &graph->complex[from]);
842 VEC_free (constraint_t, heap, graph->complex[from]);
843 graph->complex[from] = NULL;
847 /* Remove edges involving NODE from GRAPH. */
849 static void
850 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
852 if (graph->succs[node])
853 BITMAP_FREE (graph->succs[node]);
856 /* Merge GRAPH nodes FROM and TO into node TO. */
858 static void
859 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
860 unsigned int from)
862 if (graph->indirect_cycles[from] != -1)
864 /* If we have indirect cycles with the from node, and we have
865 none on the to node, the to node has indirect cycles from the
866 from node now that they are unified.
867 If indirect cycles exist on both, unify the nodes that they
868 are in a cycle with, since we know they are in a cycle with
869 each other. */
870 if (graph->indirect_cycles[to] == -1)
871 graph->indirect_cycles[to] = graph->indirect_cycles[from];
874 /* Merge all the successor edges. */
875 if (graph->succs[from])
877 if (!graph->succs[to])
878 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
879 bitmap_ior_into (graph->succs[to],
880 graph->succs[from]);
883 clear_edges_for_node (graph, from);
887 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
888 it doesn't exist in the graph already. */
890 static void
891 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
892 unsigned int from)
894 if (to == from)
895 return;
897 if (!graph->implicit_preds[to])
898 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
900 if (!bitmap_bit_p (graph->implicit_preds[to], from))
902 stats.num_implicit_edges++;
903 bitmap_set_bit (graph->implicit_preds[to], from);
907 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
908 it doesn't exist in the graph already.
909 Return false if the edge already existed, true otherwise. */
911 static void
912 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
913 unsigned int from)
915 if (!graph->preds[to])
916 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
917 if (!bitmap_bit_p (graph->preds[to], from))
918 bitmap_set_bit (graph->preds[to], from);
921 /* Add a graph edge to GRAPH, going from FROM to TO if
922 it doesn't exist in the graph already.
923 Return false if the edge already existed, true otherwise. */
925 static bool
926 add_graph_edge (constraint_graph_t graph, unsigned int to,
927 unsigned int from)
929 if (to == from)
931 return false;
933 else
935 bool r = false;
937 if (!graph->succs[from])
938 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
939 if (!bitmap_bit_p (graph->succs[from], to))
941 r = true;
942 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
943 stats.num_edges++;
944 bitmap_set_bit (graph->succs[from], to);
946 return r;
951 /* Return true if {DEST.SRC} is an existing graph edge in GRAPH. */
953 static bool
954 valid_graph_edge (constraint_graph_t graph, unsigned int src,
955 unsigned int dest)
957 return (graph->succs[dest]
958 && bitmap_bit_p (graph->succs[dest], src));
961 /* Initialize the constraint graph structure to contain SIZE nodes. */
963 static void
964 init_graph (unsigned int size)
966 unsigned int j;
968 graph = XCNEW (struct constraint_graph);
969 graph->size = size;
970 graph->succs = XCNEWVEC (bitmap, graph->size);
971 graph->indirect_cycles = XNEWVEC (int, graph->size);
972 graph->rep = XNEWVEC (unsigned int, graph->size);
973 graph->complex = XCNEWVEC (VEC(constraint_t, heap) *, size);
974 graph->pe = XCNEWVEC (unsigned int, graph->size);
975 graph->pe_rep = XNEWVEC (int, graph->size);
977 for (j = 0; j < graph->size; j++)
979 graph->rep[j] = j;
980 graph->pe_rep[j] = -1;
981 graph->indirect_cycles[j] = -1;
985 /* Build the constraint graph, adding only predecessor edges right now. */
987 static void
988 build_pred_graph (void)
990 int i;
991 constraint_t c;
992 unsigned int j;
994 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
995 graph->preds = XCNEWVEC (bitmap, graph->size);
996 graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
997 graph->loc_label = XCNEWVEC (unsigned int, graph->size);
998 graph->pointed_by = XCNEWVEC (bitmap, graph->size);
999 graph->points_to = XCNEWVEC (bitmap, graph->size);
1000 graph->eq_rep = XNEWVEC (int, graph->size);
1001 graph->direct_nodes = sbitmap_alloc (graph->size);
1002 graph->pt_used = sbitmap_alloc (graph->size);
1003 graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1004 graph->number_incoming = XCNEWVEC (unsigned int, graph->size);
1005 sbitmap_zero (graph->direct_nodes);
1006 sbitmap_zero (graph->pt_used);
1008 for (j = 0; j < FIRST_REF_NODE; j++)
1010 if (!get_varinfo (j)->is_special_var)
1011 SET_BIT (graph->direct_nodes, j);
1014 for (j = 0; j < graph->size; j++)
1015 graph->eq_rep[j] = -1;
1017 for (j = 0; j < VEC_length (varinfo_t, varmap); j++)
1018 graph->indirect_cycles[j] = -1;
1020 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1022 struct constraint_expr lhs = c->lhs;
1023 struct constraint_expr rhs = c->rhs;
1024 unsigned int lhsvar = get_varinfo_fc (lhs.var)->id;
1025 unsigned int rhsvar = get_varinfo_fc (rhs.var)->id;
1027 if (lhs.type == DEREF)
1029 /* *x = y. */
1030 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1031 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1033 else if (rhs.type == DEREF)
1035 /* x = *y */
1036 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1037 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1038 else
1039 RESET_BIT (graph->direct_nodes, lhsvar);
1041 else if (rhs.type == ADDRESSOF)
1043 /* x = &y */
1044 if (graph->points_to[lhsvar] == NULL)
1045 graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1046 bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1048 if (graph->pointed_by[rhsvar] == NULL)
1049 graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1050 bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1052 /* Implicitly, *x = y */
1053 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1055 RESET_BIT (graph->direct_nodes, rhsvar);
1056 bitmap_set_bit (graph->address_taken, rhsvar);
1058 else if (lhsvar > anything_id
1059 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1061 /* x = y */
1062 add_pred_graph_edge (graph, lhsvar, rhsvar);
1063 /* Implicitly, *x = *y */
1064 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1065 FIRST_REF_NODE + rhsvar);
1067 else if (lhs.offset != 0 || rhs.offset != 0)
1069 if (rhs.offset != 0)
1070 RESET_BIT (graph->direct_nodes, lhs.var);
1071 else if (lhs.offset != 0)
1072 RESET_BIT (graph->direct_nodes, rhs.var);
1077 /* Build the constraint graph, adding successor edges. */
1079 static void
1080 build_succ_graph (void)
1082 int i;
1083 constraint_t c;
1085 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1087 struct constraint_expr lhs;
1088 struct constraint_expr rhs;
1089 unsigned int lhsvar;
1090 unsigned int rhsvar;
1092 if (!c)
1093 continue;
1095 lhs = c->lhs;
1096 rhs = c->rhs;
1097 lhsvar = find (get_varinfo_fc (lhs.var)->id);
1098 rhsvar = find (get_varinfo_fc (rhs.var)->id);
1100 if (lhs.type == DEREF)
1102 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1103 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1105 else if (rhs.type == DEREF)
1107 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1108 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1110 else if (rhs.type == ADDRESSOF)
1112 /* x = &y */
1113 gcc_assert (find (get_varinfo_fc (rhs.var)->id)
1114 == get_varinfo_fc (rhs.var)->id);
1115 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1117 else if (lhsvar > anything_id
1118 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1120 add_graph_edge (graph, lhsvar, rhsvar);
1126 /* Changed variables on the last iteration. */
1127 static unsigned int changed_count;
1128 static sbitmap changed;
1130 DEF_VEC_I(unsigned);
1131 DEF_VEC_ALLOC_I(unsigned,heap);
1134 /* Strongly Connected Component visitation info. */
1136 struct scc_info
1138 sbitmap visited;
1139 sbitmap deleted;
1140 unsigned int *dfs;
1141 unsigned int *node_mapping;
1142 int current_index;
1143 VEC(unsigned,heap) *scc_stack;
1147 /* Recursive routine to find strongly connected components in GRAPH.
1148 SI is the SCC info to store the information in, and N is the id of current
1149 graph node we are processing.
1151 This is Tarjan's strongly connected component finding algorithm, as
1152 modified by Nuutila to keep only non-root nodes on the stack.
1153 The algorithm can be found in "On finding the strongly connected
1154 connected components in a directed graph" by Esko Nuutila and Eljas
1155 Soisalon-Soininen, in Information Processing Letters volume 49,
1156 number 1, pages 9-14. */
1158 static void
1159 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1161 unsigned int i;
1162 bitmap_iterator bi;
1163 unsigned int my_dfs;
1165 SET_BIT (si->visited, n);
1166 si->dfs[n] = si->current_index ++;
1167 my_dfs = si->dfs[n];
1169 /* Visit all the successors. */
1170 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1172 unsigned int w;
1174 if (i > LAST_REF_NODE)
1175 break;
1177 w = find (i);
1178 if (TEST_BIT (si->deleted, w))
1179 continue;
1181 if (!TEST_BIT (si->visited, w))
1182 scc_visit (graph, si, w);
1184 unsigned int t = find (w);
1185 unsigned int nnode = find (n);
1186 gcc_assert (nnode == n);
1188 if (si->dfs[t] < si->dfs[nnode])
1189 si->dfs[n] = si->dfs[t];
1193 /* See if any components have been identified. */
1194 if (si->dfs[n] == my_dfs)
1196 if (VEC_length (unsigned, si->scc_stack) > 0
1197 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1199 bitmap scc = BITMAP_ALLOC (NULL);
1200 bool have_ref_node = n >= FIRST_REF_NODE;
1201 unsigned int lowest_node;
1202 bitmap_iterator bi;
1204 bitmap_set_bit (scc, n);
1206 while (VEC_length (unsigned, si->scc_stack) != 0
1207 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1209 unsigned int w = VEC_pop (unsigned, si->scc_stack);
1211 bitmap_set_bit (scc, w);
1212 if (w >= FIRST_REF_NODE)
1213 have_ref_node = true;
1216 lowest_node = bitmap_first_set_bit (scc);
1217 gcc_assert (lowest_node < FIRST_REF_NODE);
1219 /* Collapse the SCC nodes into a single node, and mark the
1220 indirect cycles. */
1221 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1223 if (i < FIRST_REF_NODE)
1225 if (unite (lowest_node, i))
1226 unify_nodes (graph, lowest_node, i, false);
1228 else
1230 unite (lowest_node, i);
1231 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1235 SET_BIT (si->deleted, n);
1237 else
1238 VEC_safe_push (unsigned, heap, si->scc_stack, n);
1241 /* Unify node FROM into node TO, updating the changed count if
1242 necessary when UPDATE_CHANGED is true. */
1244 static void
1245 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1246 bool update_changed)
1249 gcc_assert (to != from && find (to) == to);
1250 if (dump_file && (dump_flags & TDF_DETAILS))
1251 fprintf (dump_file, "Unifying %s to %s\n",
1252 get_varinfo (from)->name,
1253 get_varinfo (to)->name);
1255 if (update_changed)
1256 stats.unified_vars_dynamic++;
1257 else
1258 stats.unified_vars_static++;
1260 merge_graph_nodes (graph, to, from);
1261 merge_node_constraints (graph, to, from);
1263 if (get_varinfo (from)->no_tbaa_pruning)
1264 get_varinfo (to)->no_tbaa_pruning = true;
1266 /* Mark TO as changed if FROM was changed. If TO was already marked
1267 as changed, decrease the changed count. */
1269 if (update_changed && TEST_BIT (changed, from))
1271 RESET_BIT (changed, from);
1272 if (!TEST_BIT (changed, to))
1273 SET_BIT (changed, to);
1274 else
1276 gcc_assert (changed_count > 0);
1277 changed_count--;
1280 if (get_varinfo (from)->solution)
1282 /* If the solution changes because of the merging, we need to mark
1283 the variable as changed. */
1284 if (bitmap_ior_into (get_varinfo (to)->solution,
1285 get_varinfo (from)->solution))
1287 if (update_changed && !TEST_BIT (changed, to))
1289 SET_BIT (changed, to);
1290 changed_count++;
1294 BITMAP_FREE (get_varinfo (from)->solution);
1295 BITMAP_FREE (get_varinfo (from)->oldsolution);
1297 if (stats.iterations > 0)
1299 BITMAP_FREE (get_varinfo (to)->oldsolution);
1300 get_varinfo (to)->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
1303 if (valid_graph_edge (graph, to, to))
1305 if (graph->succs[to])
1306 bitmap_clear_bit (graph->succs[to], to);
1310 /* Information needed to compute the topological ordering of a graph. */
1312 struct topo_info
1314 /* sbitmap of visited nodes. */
1315 sbitmap visited;
1316 /* Array that stores the topological order of the graph, *in
1317 reverse*. */
1318 VEC(unsigned,heap) *topo_order;
1322 /* Initialize and return a topological info structure. */
1324 static struct topo_info *
1325 init_topo_info (void)
1327 size_t size = graph->size;
1328 struct topo_info *ti = XNEW (struct topo_info);
1329 ti->visited = sbitmap_alloc (size);
1330 sbitmap_zero (ti->visited);
1331 ti->topo_order = VEC_alloc (unsigned, heap, 1);
1332 return ti;
1336 /* Free the topological sort info pointed to by TI. */
1338 static void
1339 free_topo_info (struct topo_info *ti)
1341 sbitmap_free (ti->visited);
1342 VEC_free (unsigned, heap, ti->topo_order);
1343 free (ti);
1346 /* Visit the graph in topological order, and store the order in the
1347 topo_info structure. */
1349 static void
1350 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1351 unsigned int n)
1353 bitmap_iterator bi;
1354 unsigned int j;
1356 SET_BIT (ti->visited, n);
1358 if (graph->succs[n])
1359 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1361 if (!TEST_BIT (ti->visited, j))
1362 topo_visit (graph, ti, j);
1365 VEC_safe_push (unsigned, heap, ti->topo_order, n);
1368 /* Return true if variable N + OFFSET is a legal field of N. */
1370 static bool
1371 type_safe (unsigned int n, unsigned HOST_WIDE_INT *offset)
1373 varinfo_t ninfo = get_varinfo (n);
1375 /* For things we've globbed to single variables, any offset into the
1376 variable acts like the entire variable, so that it becomes offset
1377 0. */
1378 if (ninfo->is_special_var
1379 || ninfo->is_artificial_var
1380 || ninfo->is_unknown_size_var)
1382 *offset = 0;
1383 return true;
1385 return (get_varinfo (n)->offset + *offset) < get_varinfo (n)->fullsize;
1388 /* Process a constraint C that represents x = *y, using DELTA as the
1389 starting solution. */
1391 static void
1392 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1393 bitmap delta)
1395 unsigned int lhs = c->lhs.var;
1396 bool flag = false;
1397 bitmap sol = get_varinfo (lhs)->solution;
1398 unsigned int j;
1399 bitmap_iterator bi;
1401 if (bitmap_bit_p (delta, anything_id))
1403 flag = !bitmap_bit_p (sol, anything_id);
1404 if (flag)
1405 bitmap_set_bit (sol, anything_id);
1406 goto done;
1408 /* For each variable j in delta (Sol(y)), add
1409 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1410 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1412 unsigned HOST_WIDE_INT roffset = c->rhs.offset;
1413 if (type_safe (j, &roffset))
1415 varinfo_t v;
1416 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + roffset;
1417 unsigned int t;
1419 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1420 if (!v)
1421 continue;
1422 t = find (v->id);
1424 /* Adding edges from the special vars is pointless.
1425 They don't have sets that can change. */
1426 if (get_varinfo (t) ->is_special_var)
1427 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1428 else if (add_graph_edge (graph, lhs, t))
1429 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1433 done:
1434 /* If the LHS solution changed, mark the var as changed. */
1435 if (flag)
1437 get_varinfo (lhs)->solution = sol;
1438 if (!TEST_BIT (changed, lhs))
1440 SET_BIT (changed, lhs);
1441 changed_count++;
1446 /* Process a constraint C that represents *x = y. */
1448 static void
1449 do_ds_constraint (constraint_t c, bitmap delta)
1451 unsigned int rhs = c->rhs.var;
1452 bitmap sol = get_varinfo (rhs)->solution;
1453 unsigned int j;
1454 bitmap_iterator bi;
1456 if (bitmap_bit_p (sol, anything_id))
1458 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1460 varinfo_t jvi = get_varinfo (j);
1461 unsigned int t;
1462 unsigned int loff = c->lhs.offset;
1463 unsigned HOST_WIDE_INT fieldoffset = jvi->offset + loff;
1464 varinfo_t v;
1466 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1467 if (!v)
1468 continue;
1469 t = find (v->id);
1471 if (!bitmap_bit_p (get_varinfo (t)->solution, anything_id))
1473 bitmap_set_bit (get_varinfo (t)->solution, anything_id);
1474 if (!TEST_BIT (changed, t))
1476 SET_BIT (changed, t);
1477 changed_count++;
1481 return;
1484 /* For each member j of delta (Sol(x)), add an edge from y to j and
1485 union Sol(y) into Sol(j) */
1486 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1488 unsigned HOST_WIDE_INT loff = c->lhs.offset;
1489 if (type_safe (j, &loff) && !(get_varinfo (j)->is_special_var))
1491 varinfo_t v;
1492 unsigned int t;
1493 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + loff;
1494 bitmap tmp;
1496 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1497 if (!v)
1498 continue;
1499 t = find (v->id);
1500 tmp = get_varinfo (t)->solution;
1502 if (set_union_with_increment (tmp, sol, 0))
1504 get_varinfo (t)->solution = tmp;
1505 if (t == rhs)
1506 sol = get_varinfo (rhs)->solution;
1507 if (!TEST_BIT (changed, t))
1509 SET_BIT (changed, t);
1510 changed_count++;
1517 /* Handle a non-simple (simple meaning requires no iteration),
1518 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1520 static void
1521 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
1523 if (c->lhs.type == DEREF)
1525 if (c->rhs.type == ADDRESSOF)
1527 gcc_unreachable();
1529 else
1531 /* *x = y */
1532 do_ds_constraint (c, delta);
1535 else if (c->rhs.type == DEREF)
1537 /* x = *y */
1538 if (!(get_varinfo (c->lhs.var)->is_special_var))
1539 do_sd_constraint (graph, c, delta);
1541 else
1543 bitmap tmp;
1544 bitmap solution;
1545 bool flag = false;
1547 gcc_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR);
1548 solution = get_varinfo (c->rhs.var)->solution;
1549 tmp = get_varinfo (c->lhs.var)->solution;
1551 flag = set_union_with_increment (tmp, solution, c->rhs.offset);
1553 if (flag)
1555 get_varinfo (c->lhs.var)->solution = tmp;
1556 if (!TEST_BIT (changed, c->lhs.var))
1558 SET_BIT (changed, c->lhs.var);
1559 changed_count++;
1565 /* Initialize and return a new SCC info structure. */
1567 static struct scc_info *
1568 init_scc_info (size_t size)
1570 struct scc_info *si = XNEW (struct scc_info);
1571 size_t i;
1573 si->current_index = 0;
1574 si->visited = sbitmap_alloc (size);
1575 sbitmap_zero (si->visited);
1576 si->deleted = sbitmap_alloc (size);
1577 sbitmap_zero (si->deleted);
1578 si->node_mapping = XNEWVEC (unsigned int, size);
1579 si->dfs = XCNEWVEC (unsigned int, size);
1581 for (i = 0; i < size; i++)
1582 si->node_mapping[i] = i;
1584 si->scc_stack = VEC_alloc (unsigned, heap, 1);
1585 return si;
1588 /* Free an SCC info structure pointed to by SI */
1590 static void
1591 free_scc_info (struct scc_info *si)
1593 sbitmap_free (si->visited);
1594 sbitmap_free (si->deleted);
1595 free (si->node_mapping);
1596 free (si->dfs);
1597 VEC_free (unsigned, heap, si->scc_stack);
1598 free (si);
1602 /* Find indirect cycles in GRAPH that occur, using strongly connected
1603 components, and note them in the indirect cycles map.
1605 This technique comes from Ben Hardekopf and Calvin Lin,
1606 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1607 Lines of Code", submitted to PLDI 2007. */
1609 static void
1610 find_indirect_cycles (constraint_graph_t graph)
1612 unsigned int i;
1613 unsigned int size = graph->size;
1614 struct scc_info *si = init_scc_info (size);
1616 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1617 if (!TEST_BIT (si->visited, i) && find (i) == i)
1618 scc_visit (graph, si, i);
1620 free_scc_info (si);
1623 /* Compute a topological ordering for GRAPH, and store the result in the
1624 topo_info structure TI. */
1626 static void
1627 compute_topo_order (constraint_graph_t graph,
1628 struct topo_info *ti)
1630 unsigned int i;
1631 unsigned int size = graph->size;
1633 for (i = 0; i != size; ++i)
1634 if (!TEST_BIT (ti->visited, i) && find (i) == i)
1635 topo_visit (graph, ti, i);
1638 /* Structure used to for hash value numbering of pointer equivalence
1639 classes. */
1641 typedef struct equiv_class_label
1643 unsigned int equivalence_class;
1644 bitmap labels;
1645 hashval_t hashcode;
1646 } *equiv_class_label_t;
1647 typedef const struct equiv_class_label *const_equiv_class_label_t;
1649 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1650 classes. */
1651 static htab_t pointer_equiv_class_table;
1653 /* A hashtable for mapping a bitmap of labels->location equivalence
1654 classes. */
1655 static htab_t location_equiv_class_table;
1657 /* Hash function for a equiv_class_label_t */
1659 static hashval_t
1660 equiv_class_label_hash (const void *p)
1662 const_equiv_class_label_t const ecl = (const_equiv_class_label_t) p;
1663 return ecl->hashcode;
1666 /* Equality function for two equiv_class_label_t's. */
1668 static int
1669 equiv_class_label_eq (const void *p1, const void *p2)
1671 const_equiv_class_label_t const eql1 = (const_equiv_class_label_t) p1;
1672 const_equiv_class_label_t const eql2 = (const_equiv_class_label_t) p2;
1673 return bitmap_equal_p (eql1->labels, eql2->labels);
1676 /* Lookup a equivalence class in TABLE by the bitmap of LABELS it
1677 contains. */
1679 static unsigned int
1680 equiv_class_lookup (htab_t table, bitmap labels)
1682 void **slot;
1683 struct equiv_class_label ecl;
1685 ecl.labels = labels;
1686 ecl.hashcode = bitmap_hash (labels);
1688 slot = htab_find_slot_with_hash (table, &ecl,
1689 ecl.hashcode, NO_INSERT);
1690 if (!slot)
1691 return 0;
1692 else
1693 return ((equiv_class_label_t) *slot)->equivalence_class;
1697 /* Add an equivalence class named EQUIVALENCE_CLASS with labels LABELS
1698 to TABLE. */
1700 static void
1701 equiv_class_add (htab_t table, unsigned int equivalence_class,
1702 bitmap labels)
1704 void **slot;
1705 equiv_class_label_t ecl = XNEW (struct equiv_class_label);
1707 ecl->labels = labels;
1708 ecl->equivalence_class = equivalence_class;
1709 ecl->hashcode = bitmap_hash (labels);
1711 slot = htab_find_slot_with_hash (table, ecl,
1712 ecl->hashcode, INSERT);
1713 gcc_assert (!*slot);
1714 *slot = (void *) ecl;
1717 /* Perform offline variable substitution.
1719 This is a worst case quadratic time way of identifying variables
1720 that must have equivalent points-to sets, including those caused by
1721 static cycles, and single entry subgraphs, in the constraint graph.
1723 The technique is described in "Exploiting Pointer and Location
1724 Equivalence to Optimize Pointer Analysis. In the 14th International
1725 Static Analysis Symposium (SAS), August 2007." It is known as the
1726 "HU" algorithm, and is equivalent to value numbering the collapsed
1727 constraint graph including evaluating unions.
1729 The general method of finding equivalence classes is as follows:
1730 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1731 Initialize all non-REF nodes to be direct nodes.
1732 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1733 variable}
1734 For each constraint containing the dereference, we also do the same
1735 thing.
1737 We then compute SCC's in the graph and unify nodes in the same SCC,
1738 including pts sets.
1740 For each non-collapsed node x:
1741 Visit all unvisited explicit incoming edges.
1742 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
1743 where y->x.
1744 Lookup the equivalence class for pts(x).
1745 If we found one, equivalence_class(x) = found class.
1746 Otherwise, equivalence_class(x) = new class, and new_class is
1747 added to the lookup table.
1749 All direct nodes with the same equivalence class can be replaced
1750 with a single representative node.
1751 All unlabeled nodes (label == 0) are not pointers and all edges
1752 involving them can be eliminated.
1753 We perform these optimizations during rewrite_constraints
1755 In addition to pointer equivalence class finding, we also perform
1756 location equivalence class finding. This is the set of variables
1757 that always appear together in points-to sets. We use this to
1758 compress the size of the points-to sets. */
1760 /* Current maximum pointer equivalence class id. */
1761 static int pointer_equiv_class;
1763 /* Current maximum location equivalence class id. */
1764 static int location_equiv_class;
1766 /* Recursive routine to find strongly connected components in GRAPH,
1767 and label it's nodes with DFS numbers. */
1769 static void
1770 condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1772 unsigned int i;
1773 bitmap_iterator bi;
1774 unsigned int my_dfs;
1776 gcc_assert (si->node_mapping[n] == n);
1777 SET_BIT (si->visited, n);
1778 si->dfs[n] = si->current_index ++;
1779 my_dfs = si->dfs[n];
1781 /* Visit all the successors. */
1782 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1784 unsigned int w = si->node_mapping[i];
1786 if (TEST_BIT (si->deleted, w))
1787 continue;
1789 if (!TEST_BIT (si->visited, w))
1790 condense_visit (graph, si, w);
1792 unsigned int t = si->node_mapping[w];
1793 unsigned int nnode = si->node_mapping[n];
1794 gcc_assert (nnode == n);
1796 if (si->dfs[t] < si->dfs[nnode])
1797 si->dfs[n] = si->dfs[t];
1801 /* Visit all the implicit predecessors. */
1802 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
1804 unsigned int w = si->node_mapping[i];
1806 if (TEST_BIT (si->deleted, w))
1807 continue;
1809 if (!TEST_BIT (si->visited, w))
1810 condense_visit (graph, si, w);
1812 unsigned int t = si->node_mapping[w];
1813 unsigned int nnode = si->node_mapping[n];
1814 gcc_assert (nnode == n);
1816 if (si->dfs[t] < si->dfs[nnode])
1817 si->dfs[n] = si->dfs[t];
1821 /* See if any components have been identified. */
1822 if (si->dfs[n] == my_dfs)
1824 while (VEC_length (unsigned, si->scc_stack) != 0
1825 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1827 unsigned int w = VEC_pop (unsigned, si->scc_stack);
1828 si->node_mapping[w] = n;
1830 if (!TEST_BIT (graph->direct_nodes, w))
1831 RESET_BIT (graph->direct_nodes, n);
1833 /* Unify our nodes. */
1834 if (graph->preds[w])
1836 if (!graph->preds[n])
1837 graph->preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
1838 bitmap_ior_into (graph->preds[n], graph->preds[w]);
1840 if (graph->implicit_preds[w])
1842 if (!graph->implicit_preds[n])
1843 graph->implicit_preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
1844 bitmap_ior_into (graph->implicit_preds[n],
1845 graph->implicit_preds[w]);
1847 if (graph->points_to[w])
1849 if (!graph->points_to[n])
1850 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
1851 bitmap_ior_into (graph->points_to[n],
1852 graph->points_to[w]);
1854 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1856 unsigned int rep = si->node_mapping[i];
1857 graph->number_incoming[rep]++;
1860 SET_BIT (si->deleted, n);
1862 else
1863 VEC_safe_push (unsigned, heap, si->scc_stack, n);
1866 /* Label pointer equivalences. */
1868 static void
1869 label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1871 unsigned int i;
1872 bitmap_iterator bi;
1873 SET_BIT (si->visited, n);
1875 if (!graph->points_to[n])
1876 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
1878 /* Label and union our incoming edges's points to sets. */
1879 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1881 unsigned int w = si->node_mapping[i];
1882 if (!TEST_BIT (si->visited, w))
1883 label_visit (graph, si, w);
1885 /* Skip unused edges */
1886 if (w == n || graph->pointer_label[w] == 0)
1888 graph->number_incoming[w]--;
1889 continue;
1891 if (graph->points_to[w])
1892 bitmap_ior_into(graph->points_to[n], graph->points_to[w]);
1894 /* If all incoming edges to w have been processed and
1895 graph->points_to[w] was not stored in the hash table, we can
1896 free it. */
1897 graph->number_incoming[w]--;
1898 if (!graph->number_incoming[w] && !TEST_BIT (graph->pt_used, w))
1900 BITMAP_FREE (graph->points_to[w]);
1903 /* Indirect nodes get fresh variables. */
1904 if (!TEST_BIT (graph->direct_nodes, n))
1905 bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
1907 if (!bitmap_empty_p (graph->points_to[n]))
1909 unsigned int label = equiv_class_lookup (pointer_equiv_class_table,
1910 graph->points_to[n]);
1911 if (!label)
1913 SET_BIT (graph->pt_used, n);
1914 label = pointer_equiv_class++;
1915 equiv_class_add (pointer_equiv_class_table,
1916 label, graph->points_to[n]);
1918 graph->pointer_label[n] = label;
1922 /* Perform offline variable substitution, discovering equivalence
1923 classes, and eliminating non-pointer variables. */
1925 static struct scc_info *
1926 perform_var_substitution (constraint_graph_t graph)
1928 unsigned int i;
1929 unsigned int size = graph->size;
1930 struct scc_info *si = init_scc_info (size);
1932 bitmap_obstack_initialize (&iteration_obstack);
1933 pointer_equiv_class_table = htab_create (511, equiv_class_label_hash,
1934 equiv_class_label_eq, free);
1935 location_equiv_class_table = htab_create (511, equiv_class_label_hash,
1936 equiv_class_label_eq, free);
1937 pointer_equiv_class = 1;
1938 location_equiv_class = 1;
1940 /* Condense the nodes, which means to find SCC's, count incoming
1941 predecessors, and unite nodes in SCC's. */
1942 for (i = 0; i < FIRST_REF_NODE; i++)
1943 if (!TEST_BIT (si->visited, si->node_mapping[i]))
1944 condense_visit (graph, si, si->node_mapping[i]);
1946 sbitmap_zero (si->visited);
1947 /* Actually the label the nodes for pointer equivalences */
1948 for (i = 0; i < FIRST_REF_NODE; i++)
1949 if (!TEST_BIT (si->visited, si->node_mapping[i]))
1950 label_visit (graph, si, si->node_mapping[i]);
1952 /* Calculate location equivalence labels. */
1953 for (i = 0; i < FIRST_REF_NODE; i++)
1955 bitmap pointed_by;
1956 bitmap_iterator bi;
1957 unsigned int j;
1958 unsigned int label;
1960 if (!graph->pointed_by[i])
1961 continue;
1962 pointed_by = BITMAP_ALLOC (&iteration_obstack);
1964 /* Translate the pointed-by mapping for pointer equivalence
1965 labels. */
1966 EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
1968 bitmap_set_bit (pointed_by,
1969 graph->pointer_label[si->node_mapping[j]]);
1971 /* The original pointed_by is now dead. */
1972 BITMAP_FREE (graph->pointed_by[i]);
1974 /* Look up the location equivalence label if one exists, or make
1975 one otherwise. */
1976 label = equiv_class_lookup (location_equiv_class_table,
1977 pointed_by);
1978 if (label == 0)
1980 label = location_equiv_class++;
1981 equiv_class_add (location_equiv_class_table,
1982 label, pointed_by);
1984 else
1986 if (dump_file && (dump_flags & TDF_DETAILS))
1987 fprintf (dump_file, "Found location equivalence for node %s\n",
1988 get_varinfo (i)->name);
1989 BITMAP_FREE (pointed_by);
1991 graph->loc_label[i] = label;
1995 if (dump_file && (dump_flags & TDF_DETAILS))
1996 for (i = 0; i < FIRST_REF_NODE; i++)
1998 bool direct_node = TEST_BIT (graph->direct_nodes, i);
1999 fprintf (dump_file,
2000 "Equivalence classes for %s node id %d:%s are pointer: %d"
2001 ", location:%d\n",
2002 direct_node ? "Direct node" : "Indirect node", i,
2003 get_varinfo (i)->name,
2004 graph->pointer_label[si->node_mapping[i]],
2005 graph->loc_label[si->node_mapping[i]]);
2008 /* Quickly eliminate our non-pointer variables. */
2010 for (i = 0; i < FIRST_REF_NODE; i++)
2012 unsigned int node = si->node_mapping[i];
2014 if (graph->pointer_label[node] == 0)
2016 if (dump_file && (dump_flags & TDF_DETAILS))
2017 fprintf (dump_file,
2018 "%s is a non-pointer variable, eliminating edges.\n",
2019 get_varinfo (node)->name);
2020 stats.nonpointer_vars++;
2021 clear_edges_for_node (graph, node);
2025 return si;
2028 /* Free information that was only necessary for variable
2029 substitution. */
2031 static void
2032 free_var_substitution_info (struct scc_info *si)
2034 free_scc_info (si);
2035 free (graph->pointer_label);
2036 free (graph->loc_label);
2037 free (graph->pointed_by);
2038 free (graph->points_to);
2039 free (graph->number_incoming);
2040 free (graph->eq_rep);
2041 sbitmap_free (graph->direct_nodes);
2042 sbitmap_free (graph->pt_used);
2043 htab_delete (pointer_equiv_class_table);
2044 htab_delete (location_equiv_class_table);
2045 bitmap_obstack_release (&iteration_obstack);
2048 /* Return an existing node that is equivalent to NODE, which has
2049 equivalence class LABEL, if one exists. Return NODE otherwise. */
2051 static unsigned int
2052 find_equivalent_node (constraint_graph_t graph,
2053 unsigned int node, unsigned int label)
2055 /* If the address version of this variable is unused, we can
2056 substitute it for anything else with the same label.
2057 Otherwise, we know the pointers are equivalent, but not the
2058 locations, and we can unite them later. */
2060 if (!bitmap_bit_p (graph->address_taken, node))
2062 gcc_assert (label < graph->size);
2064 if (graph->eq_rep[label] != -1)
2066 /* Unify the two variables since we know they are equivalent. */
2067 if (unite (graph->eq_rep[label], node))
2068 unify_nodes (graph, graph->eq_rep[label], node, false);
2069 return graph->eq_rep[label];
2071 else
2073 graph->eq_rep[label] = node;
2074 graph->pe_rep[label] = node;
2077 else
2079 gcc_assert (label < graph->size);
2080 graph->pe[node] = label;
2081 if (graph->pe_rep[label] == -1)
2082 graph->pe_rep[label] = node;
2085 return node;
2088 /* Unite pointer equivalent but not location equivalent nodes in
2089 GRAPH. This may only be performed once variable substitution is
2090 finished. */
2092 static void
2093 unite_pointer_equivalences (constraint_graph_t graph)
2095 unsigned int i;
2097 /* Go through the pointer equivalences and unite them to their
2098 representative, if they aren't already. */
2099 for (i = 0; i < FIRST_REF_NODE; i++)
2101 unsigned int label = graph->pe[i];
2102 if (label)
2104 int label_rep = graph->pe_rep[label];
2106 if (label_rep == -1)
2107 continue;
2109 label_rep = find (label_rep);
2110 if (label_rep >= 0 && unite (label_rep, find (i)))
2111 unify_nodes (graph, label_rep, i, false);
2116 /* Move complex constraints to the GRAPH nodes they belong to. */
2118 static void
2119 move_complex_constraints (constraint_graph_t graph)
2121 int i;
2122 constraint_t c;
2124 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
2126 if (c)
2128 struct constraint_expr lhs = c->lhs;
2129 struct constraint_expr rhs = c->rhs;
2131 if (lhs.type == DEREF)
2133 insert_into_complex (graph, lhs.var, c);
2135 else if (rhs.type == DEREF)
2137 if (!(get_varinfo (lhs.var)->is_special_var))
2138 insert_into_complex (graph, rhs.var, c);
2140 else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2141 && (lhs.offset != 0 || rhs.offset != 0))
2143 insert_into_complex (graph, rhs.var, c);
2150 /* Optimize and rewrite complex constraints while performing
2151 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2152 result of perform_variable_substitution. */
2154 static void
2155 rewrite_constraints (constraint_graph_t graph,
2156 struct scc_info *si)
2158 int i;
2159 unsigned int j;
2160 constraint_t c;
2162 for (j = 0; j < graph->size; j++)
2163 gcc_assert (find (j) == j);
2165 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
2167 struct constraint_expr lhs = c->lhs;
2168 struct constraint_expr rhs = c->rhs;
2169 unsigned int lhsvar = find (get_varinfo_fc (lhs.var)->id);
2170 unsigned int rhsvar = find (get_varinfo_fc (rhs.var)->id);
2171 unsigned int lhsnode, rhsnode;
2172 unsigned int lhslabel, rhslabel;
2174 lhsnode = si->node_mapping[lhsvar];
2175 rhsnode = si->node_mapping[rhsvar];
2176 lhslabel = graph->pointer_label[lhsnode];
2177 rhslabel = graph->pointer_label[rhsnode];
2179 /* See if it is really a non-pointer variable, and if so, ignore
2180 the constraint. */
2181 if (lhslabel == 0)
2183 if (dump_file && (dump_flags & TDF_DETAILS))
2186 fprintf (dump_file, "%s is a non-pointer variable,"
2187 "ignoring constraint:",
2188 get_varinfo (lhs.var)->name);
2189 dump_constraint (dump_file, c);
2191 VEC_replace (constraint_t, constraints, i, NULL);
2192 continue;
2195 if (rhslabel == 0)
2197 if (dump_file && (dump_flags & TDF_DETAILS))
2200 fprintf (dump_file, "%s is a non-pointer variable,"
2201 "ignoring constraint:",
2202 get_varinfo (rhs.var)->name);
2203 dump_constraint (dump_file, c);
2205 VEC_replace (constraint_t, constraints, i, NULL);
2206 continue;
2209 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2210 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2211 c->lhs.var = lhsvar;
2212 c->rhs.var = rhsvar;
2217 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2218 part of an SCC, false otherwise. */
2220 static bool
2221 eliminate_indirect_cycles (unsigned int node)
2223 if (graph->indirect_cycles[node] != -1
2224 && !bitmap_empty_p (get_varinfo (node)->solution))
2226 unsigned int i;
2227 VEC(unsigned,heap) *queue = NULL;
2228 int queuepos;
2229 unsigned int to = find (graph->indirect_cycles[node]);
2230 bitmap_iterator bi;
2232 /* We can't touch the solution set and call unify_nodes
2233 at the same time, because unify_nodes is going to do
2234 bitmap unions into it. */
2236 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2238 if (find (i) == i && i != to)
2240 if (unite (to, i))
2241 VEC_safe_push (unsigned, heap, queue, i);
2245 for (queuepos = 0;
2246 VEC_iterate (unsigned, queue, queuepos, i);
2247 queuepos++)
2249 unify_nodes (graph, to, i, true);
2251 VEC_free (unsigned, heap, queue);
2252 return true;
2254 return false;
2257 /* Solve the constraint graph GRAPH using our worklist solver.
2258 This is based on the PW* family of solvers from the "Efficient Field
2259 Sensitive Pointer Analysis for C" paper.
2260 It works by iterating over all the graph nodes, processing the complex
2261 constraints and propagating the copy constraints, until everything stops
2262 changed. This corresponds to steps 6-8 in the solving list given above. */
2264 static void
2265 solve_graph (constraint_graph_t graph)
2267 unsigned int size = graph->size;
2268 unsigned int i;
2269 bitmap pts;
2271 changed_count = 0;
2272 changed = sbitmap_alloc (size);
2273 sbitmap_zero (changed);
2275 /* Mark all initial non-collapsed nodes as changed. */
2276 for (i = 0; i < size; i++)
2278 varinfo_t ivi = get_varinfo (i);
2279 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2280 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2281 || VEC_length (constraint_t, graph->complex[i]) > 0))
2283 SET_BIT (changed, i);
2284 changed_count++;
2288 /* Allocate a bitmap to be used to store the changed bits. */
2289 pts = BITMAP_ALLOC (&pta_obstack);
2291 while (changed_count > 0)
2293 unsigned int i;
2294 struct topo_info *ti = init_topo_info ();
2295 stats.iterations++;
2297 bitmap_obstack_initialize (&iteration_obstack);
2299 compute_topo_order (graph, ti);
2301 while (VEC_length (unsigned, ti->topo_order) != 0)
2304 i = VEC_pop (unsigned, ti->topo_order);
2306 /* If this variable is not a representative, skip it. */
2307 if (find (i) != i)
2308 continue;
2310 /* In certain indirect cycle cases, we may merge this
2311 variable to another. */
2312 if (eliminate_indirect_cycles (i) && find (i) != i)
2313 continue;
2315 /* If the node has changed, we need to process the
2316 complex constraints and outgoing edges again. */
2317 if (TEST_BIT (changed, i))
2319 unsigned int j;
2320 constraint_t c;
2321 bitmap solution;
2322 VEC(constraint_t,heap) *complex = graph->complex[i];
2323 bool solution_empty;
2325 RESET_BIT (changed, i);
2326 changed_count--;
2328 /* Compute the changed set of solution bits. */
2329 bitmap_and_compl (pts, get_varinfo (i)->solution,
2330 get_varinfo (i)->oldsolution);
2332 if (bitmap_empty_p (pts))
2333 continue;
2335 bitmap_ior_into (get_varinfo (i)->oldsolution, pts);
2337 solution = get_varinfo (i)->solution;
2338 solution_empty = bitmap_empty_p (solution);
2340 /* Process the complex constraints */
2341 for (j = 0; VEC_iterate (constraint_t, complex, j, c); j++)
2343 /* XXX: This is going to unsort the constraints in
2344 some cases, which will occasionally add duplicate
2345 constraints during unification. This does not
2346 affect correctness. */
2347 c->lhs.var = find (c->lhs.var);
2348 c->rhs.var = find (c->rhs.var);
2350 /* The only complex constraint that can change our
2351 solution to non-empty, given an empty solution,
2352 is a constraint where the lhs side is receiving
2353 some set from elsewhere. */
2354 if (!solution_empty || c->lhs.type != DEREF)
2355 do_complex_constraint (graph, c, pts);
2358 solution_empty = bitmap_empty_p (solution);
2360 if (!solution_empty)
2362 bitmap_iterator bi;
2364 /* Propagate solution to all successors. */
2365 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2366 0, j, bi)
2368 bitmap tmp;
2369 bool flag;
2371 unsigned int to = find (j);
2372 tmp = get_varinfo (to)->solution;
2373 flag = false;
2375 /* Don't try to propagate to ourselves. */
2376 if (to == i)
2377 continue;
2379 flag = set_union_with_increment (tmp, pts, 0);
2381 if (flag)
2383 get_varinfo (to)->solution = tmp;
2384 if (!TEST_BIT (changed, to))
2386 SET_BIT (changed, to);
2387 changed_count++;
2394 free_topo_info (ti);
2395 bitmap_obstack_release (&iteration_obstack);
2398 BITMAP_FREE (pts);
2399 sbitmap_free (changed);
2400 bitmap_obstack_release (&oldpta_obstack);
2403 /* Map from trees to variable infos. */
2404 static struct pointer_map_t *vi_for_tree;
2407 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2409 static void
2410 insert_vi_for_tree (tree t, varinfo_t vi)
2412 void **slot = pointer_map_insert (vi_for_tree, t);
2413 gcc_assert (vi);
2414 gcc_assert (*slot == NULL);
2415 *slot = vi;
2418 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2419 exist in the map, return NULL, otherwise, return the varinfo we found. */
2421 static varinfo_t
2422 lookup_vi_for_tree (tree t)
2424 void **slot = pointer_map_contains (vi_for_tree, t);
2425 if (slot == NULL)
2426 return NULL;
2428 return (varinfo_t) *slot;
2431 /* Return a printable name for DECL */
2433 static const char *
2434 alias_get_name (tree decl)
2436 const char *res = get_name (decl);
2437 char *temp;
2438 int num_printed = 0;
2440 if (res != NULL)
2441 return res;
2443 res = "NULL";
2444 if (!dump_file)
2445 return res;
2447 if (TREE_CODE (decl) == SSA_NAME)
2449 num_printed = asprintf (&temp, "%s_%u",
2450 alias_get_name (SSA_NAME_VAR (decl)),
2451 SSA_NAME_VERSION (decl));
2453 else if (DECL_P (decl))
2455 num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
2457 if (num_printed > 0)
2459 res = ggc_strdup (temp);
2460 free (temp);
2462 return res;
2465 /* Find the variable id for tree T in the map.
2466 If T doesn't exist in the map, create an entry for it and return it. */
2468 static varinfo_t
2469 get_vi_for_tree (tree t)
2471 void **slot = pointer_map_contains (vi_for_tree, t);
2472 if (slot == NULL)
2473 return get_varinfo (create_variable_info_for (t, alias_get_name (t)));
2475 return (varinfo_t) *slot;
2478 /* Get a constraint expression from an SSA_VAR_P node. */
2480 static struct constraint_expr
2481 get_constraint_exp_from_ssa_var (tree t)
2483 struct constraint_expr cexpr;
2485 gcc_assert (SSA_VAR_P (t) || DECL_P (t));
2487 /* For parameters, get at the points-to set for the actual parm
2488 decl. */
2489 if (TREE_CODE (t) == SSA_NAME
2490 && TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2491 && SSA_NAME_IS_DEFAULT_DEF (t))
2492 return get_constraint_exp_from_ssa_var (SSA_NAME_VAR (t));
2494 cexpr.type = SCALAR;
2496 cexpr.var = get_vi_for_tree (t)->id;
2497 /* If we determine the result is "anything", and we know this is readonly,
2498 say it points to readonly memory instead. */
2499 if (cexpr.var == anything_id && TREE_READONLY (t))
2501 cexpr.type = ADDRESSOF;
2502 cexpr.var = readonly_id;
2505 cexpr.offset = 0;
2506 return cexpr;
2509 /* Process a completed constraint T, and add it to the constraint
2510 list. FROM_CALL is true if this is a constraint coming from a
2511 call, which means any DEREFs we see are "may-deref's", not
2512 "must-deref"'s. */
2514 static void
2515 process_constraint_1 (constraint_t t, bool from_call)
2517 struct constraint_expr rhs = t->rhs;
2518 struct constraint_expr lhs = t->lhs;
2520 gcc_assert (rhs.var < VEC_length (varinfo_t, varmap));
2521 gcc_assert (lhs.var < VEC_length (varinfo_t, varmap));
2523 if (!from_call)
2525 if (lhs.type == DEREF)
2526 get_varinfo (lhs.var)->directly_dereferenced = true;
2527 if (rhs.type == DEREF)
2528 get_varinfo (rhs.var)->directly_dereferenced = true;
2531 if (!use_field_sensitive)
2533 t->rhs.offset = 0;
2534 t->lhs.offset = 0;
2537 /* ANYTHING == ANYTHING is pointless. */
2538 if (lhs.var == anything_id && rhs.var == anything_id)
2539 return;
2541 /* If we have &ANYTHING = something, convert to SOMETHING = &ANYTHING) */
2542 else if (lhs.var == anything_id && lhs.type == ADDRESSOF)
2544 rhs = t->lhs;
2545 t->lhs = t->rhs;
2546 t->rhs = rhs;
2547 process_constraint_1 (t, from_call);
2549 /* This can happen in our IR with things like n->a = *p */
2550 else if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
2552 /* Split into tmp = *rhs, *lhs = tmp */
2553 tree rhsdecl = get_varinfo (rhs.var)->decl;
2554 tree pointertype = TREE_TYPE (rhsdecl);
2555 tree pointedtotype = TREE_TYPE (pointertype);
2556 tree tmpvar = create_tmp_var_raw (pointedtotype, "doubledereftmp");
2557 struct constraint_expr tmplhs = get_constraint_exp_from_ssa_var (tmpvar);
2559 /* If this is an aggregate of known size, we should have passed
2560 this off to do_structure_copy, and it should have broken it
2561 up. */
2562 gcc_assert (!AGGREGATE_TYPE_P (pointedtotype)
2563 || get_varinfo (rhs.var)->is_unknown_size_var);
2565 process_constraint_1 (new_constraint (tmplhs, rhs), from_call);
2566 process_constraint_1 (new_constraint (lhs, tmplhs), from_call);
2568 else if (rhs.type == ADDRESSOF && lhs.type == DEREF)
2570 /* Split into tmp = &rhs, *lhs = tmp */
2571 tree rhsdecl = get_varinfo (rhs.var)->decl;
2572 tree pointertype = TREE_TYPE (rhsdecl);
2573 tree tmpvar = create_tmp_var_raw (pointertype, "derefaddrtmp");
2574 struct constraint_expr tmplhs = get_constraint_exp_from_ssa_var (tmpvar);
2576 process_constraint_1 (new_constraint (tmplhs, rhs), from_call);
2577 process_constraint_1 (new_constraint (lhs, tmplhs), from_call);
2579 else
2581 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
2582 VEC_safe_push (constraint_t, heap, constraints, t);
2587 /* Process constraint T, performing various simplifications and then
2588 adding it to our list of overall constraints. */
2590 static void
2591 process_constraint (constraint_t t)
2593 process_constraint_1 (t, false);
2596 /* Return true if T is a variable of a type that could contain
2597 pointers. */
2599 static bool
2600 could_have_pointers (tree t)
2602 tree type = TREE_TYPE (t);
2604 if (POINTER_TYPE_P (type)
2605 || AGGREGATE_TYPE_P (type)
2606 || TREE_CODE (type) == COMPLEX_TYPE)
2607 return true;
2609 return false;
2612 /* Return the position, in bits, of FIELD_DECL from the beginning of its
2613 structure. */
2615 static unsigned HOST_WIDE_INT
2616 bitpos_of_field (const tree fdecl)
2619 if (TREE_CODE (DECL_FIELD_OFFSET (fdecl)) != INTEGER_CST
2620 || TREE_CODE (DECL_FIELD_BIT_OFFSET (fdecl)) != INTEGER_CST)
2621 return -1;
2623 return (tree_low_cst (DECL_FIELD_OFFSET (fdecl), 1) * 8)
2624 + tree_low_cst (DECL_FIELD_BIT_OFFSET (fdecl), 1);
2628 /* Given a COMPONENT_REF T, return the constraint_expr for it. */
2630 static void
2631 get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results)
2633 tree orig_t = t;
2634 HOST_WIDE_INT bitsize = -1;
2635 HOST_WIDE_INT bitmaxsize = -1;
2636 HOST_WIDE_INT bitpos;
2637 tree forzero;
2638 struct constraint_expr *result;
2639 unsigned int beforelength = VEC_length (ce_s, *results);
2641 /* Some people like to do cute things like take the address of
2642 &0->a.b */
2643 forzero = t;
2644 while (!SSA_VAR_P (forzero) && !CONSTANT_CLASS_P (forzero))
2645 forzero = TREE_OPERAND (forzero, 0);
2647 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
2649 struct constraint_expr temp;
2651 temp.offset = 0;
2652 temp.var = integer_id;
2653 temp.type = SCALAR;
2654 VEC_safe_push (ce_s, heap, *results, &temp);
2655 return;
2658 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
2660 get_constraint_for (t, results);
2661 result = VEC_last (ce_s, *results);
2662 result->offset = bitpos;
2664 gcc_assert (beforelength + 1 == VEC_length (ce_s, *results));
2666 /* This can also happen due to weird offsetof type macros. */
2667 if (TREE_CODE (t) != ADDR_EXPR && result->type == ADDRESSOF)
2668 result->type = SCALAR;
2670 if (result->type == SCALAR)
2672 /* In languages like C, you can access one past the end of an
2673 array. You aren't allowed to dereference it, so we can
2674 ignore this constraint. When we handle pointer subtraction,
2675 we may have to do something cute here. */
2677 if (result->offset < get_varinfo (result->var)->fullsize
2678 && bitmaxsize != 0)
2680 /* It's also not true that the constraint will actually start at the
2681 right offset, it may start in some padding. We only care about
2682 setting the constraint to the first actual field it touches, so
2683 walk to find it. */
2684 varinfo_t curr;
2685 for (curr = get_varinfo (result->var); curr; curr = curr->next)
2687 if (ranges_overlap_p (curr->offset, curr->size,
2688 result->offset, bitmaxsize))
2690 result->var = curr->id;
2691 break;
2694 /* assert that we found *some* field there. The user couldn't be
2695 accessing *only* padding. */
2696 /* Still the user could access one past the end of an array
2697 embedded in a struct resulting in accessing *only* padding. */
2698 gcc_assert (curr || ref_contains_array_ref (orig_t));
2700 else if (bitmaxsize == 0)
2702 if (dump_file && (dump_flags & TDF_DETAILS))
2703 fprintf (dump_file, "Access to zero-sized part of variable,"
2704 "ignoring\n");
2706 else
2707 if (dump_file && (dump_flags & TDF_DETAILS))
2708 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
2710 result->offset = 0;
2712 else if (bitmaxsize == -1)
2714 /* We can't handle DEREF constraints with unknown size, we'll
2715 get the wrong answer. Punt and return anything. */
2716 result->var = anything_id;
2717 result->offset = 0;
2722 /* Dereference the constraint expression CONS, and return the result.
2723 DEREF (ADDRESSOF) = SCALAR
2724 DEREF (SCALAR) = DEREF
2725 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
2726 This is needed so that we can handle dereferencing DEREF constraints. */
2728 static void
2729 do_deref (VEC (ce_s, heap) **constraints)
2731 struct constraint_expr *c;
2732 unsigned int i = 0;
2734 for (i = 0; VEC_iterate (ce_s, *constraints, i, c); i++)
2736 if (c->type == SCALAR)
2737 c->type = DEREF;
2738 else if (c->type == ADDRESSOF)
2739 c->type = SCALAR;
2740 else if (c->type == DEREF)
2742 tree tmpvar = create_tmp_var_raw (ptr_type_node, "dereftmp");
2743 struct constraint_expr tmplhs = get_constraint_exp_from_ssa_var (tmpvar);
2744 process_constraint (new_constraint (tmplhs, *c));
2745 c->var = tmplhs.var;
2747 else
2748 gcc_unreachable ();
2752 /* Given a tree T, return the constraint expression for it. */
2754 static void
2755 get_constraint_for (tree t, VEC (ce_s, heap) **results)
2757 struct constraint_expr temp;
2759 /* x = integer is all glommed to a single variable, which doesn't
2760 point to anything by itself. That is, of course, unless it is an
2761 integer constant being treated as a pointer, in which case, we
2762 will return that this is really the addressof anything. This
2763 happens below, since it will fall into the default case. The only
2764 case we know something about an integer treated like a pointer is
2765 when it is the NULL pointer, and then we just say it points to
2766 NULL. */
2767 if (TREE_CODE (t) == INTEGER_CST
2768 && integer_zerop (t))
2770 temp.var = nothing_id;
2771 temp.type = ADDRESSOF;
2772 temp.offset = 0;
2773 VEC_safe_push (ce_s, heap, *results, &temp);
2774 return;
2777 /* String constants are read-only. */
2778 if (TREE_CODE (t) == STRING_CST)
2780 temp.var = readonly_id;
2781 temp.type = SCALAR;
2782 temp.offset = 0;
2783 VEC_safe_push (ce_s, heap, *results, &temp);
2784 return;
2787 switch (TREE_CODE_CLASS (TREE_CODE (t)))
2789 case tcc_expression:
2790 case tcc_vl_exp:
2792 switch (TREE_CODE (t))
2794 case ADDR_EXPR:
2796 struct constraint_expr *c;
2797 unsigned int i;
2798 tree exp = TREE_OPERAND (t, 0);
2799 tree pttype = TREE_TYPE (TREE_TYPE (t));
2801 get_constraint_for (exp, results);
2804 /* Complex types are special. Taking the address of one
2805 allows you to access either part of it through that
2806 pointer. */
2807 if (VEC_length (ce_s, *results) == 1 &&
2808 TREE_CODE (pttype) == COMPLEX_TYPE)
2810 struct constraint_expr *origrhs;
2811 varinfo_t origvar;
2812 struct constraint_expr tmp;
2814 gcc_assert (VEC_length (ce_s, *results) == 1);
2815 origrhs = VEC_last (ce_s, *results);
2816 tmp = *origrhs;
2817 VEC_pop (ce_s, *results);
2818 origvar = get_varinfo (origrhs->var);
2819 for (; origvar; origvar = origvar->next)
2821 tmp.var = origvar->id;
2822 VEC_safe_push (ce_s, heap, *results, &tmp);
2826 for (i = 0; VEC_iterate (ce_s, *results, i, c); i++)
2828 if (c->type == DEREF)
2829 c->type = SCALAR;
2830 else
2831 c->type = ADDRESSOF;
2833 return;
2835 break;
2836 case CALL_EXPR:
2837 /* XXX: In interprocedural mode, if we didn't have the
2838 body, we would need to do *each pointer argument =
2839 &ANYTHING added. */
2840 if (call_expr_flags (t) & (ECF_MALLOC | ECF_MAY_BE_ALLOCA))
2842 varinfo_t vi;
2843 tree heapvar = heapvar_lookup (t);
2845 if (heapvar == NULL)
2847 heapvar = create_tmp_var_raw (ptr_type_node, "HEAP");
2848 DECL_EXTERNAL (heapvar) = 1;
2849 get_var_ann (heapvar)->is_heapvar = 1;
2850 if (gimple_referenced_vars (cfun))
2851 add_referenced_var (heapvar);
2852 heapvar_insert (t, heapvar);
2855 temp.var = create_variable_info_for (heapvar,
2856 alias_get_name (heapvar));
2858 vi = get_varinfo (temp.var);
2859 vi->is_artificial_var = 1;
2860 vi->is_heap_var = 1;
2861 temp.type = ADDRESSOF;
2862 temp.offset = 0;
2863 VEC_safe_push (ce_s, heap, *results, &temp);
2864 return;
2866 else
2868 temp.var = anything_id;
2869 temp.type = SCALAR;
2870 temp.offset = 0;
2871 VEC_safe_push (ce_s, heap, *results, &temp);
2872 return;
2874 break;
2875 default:
2877 temp.type = ADDRESSOF;
2878 temp.var = anything_id;
2879 temp.offset = 0;
2880 VEC_safe_push (ce_s, heap, *results, &temp);
2881 return;
2885 case tcc_reference:
2887 switch (TREE_CODE (t))
2889 case INDIRECT_REF:
2891 get_constraint_for (TREE_OPERAND (t, 0), results);
2892 do_deref (results);
2893 return;
2895 case ARRAY_REF:
2896 case ARRAY_RANGE_REF:
2897 case COMPONENT_REF:
2898 get_constraint_for_component_ref (t, results);
2899 return;
2900 default:
2902 temp.type = ADDRESSOF;
2903 temp.var = anything_id;
2904 temp.offset = 0;
2905 VEC_safe_push (ce_s, heap, *results, &temp);
2906 return;
2910 case tcc_unary:
2912 switch (TREE_CODE (t))
2914 case NOP_EXPR:
2915 case CONVERT_EXPR:
2917 tree op = TREE_OPERAND (t, 0);
2919 /* Cast from non-pointer to pointers are bad news for us.
2920 Anything else, we see through */
2921 if (!(POINTER_TYPE_P (TREE_TYPE (t))
2922 && ! POINTER_TYPE_P (TREE_TYPE (op))))
2924 get_constraint_for (op, results);
2925 return;
2928 /* FALLTHRU */
2930 default:
2932 temp.type = ADDRESSOF;
2933 temp.var = anything_id;
2934 temp.offset = 0;
2935 VEC_safe_push (ce_s, heap, *results, &temp);
2936 return;
2940 case tcc_exceptional:
2942 switch (TREE_CODE (t))
2944 case PHI_NODE:
2946 get_constraint_for (PHI_RESULT (t), results);
2947 return;
2949 break;
2950 case SSA_NAME:
2952 struct constraint_expr temp;
2953 temp = get_constraint_exp_from_ssa_var (t);
2954 VEC_safe_push (ce_s, heap, *results, &temp);
2955 return;
2957 break;
2958 default:
2960 temp.type = ADDRESSOF;
2961 temp.var = anything_id;
2962 temp.offset = 0;
2963 VEC_safe_push (ce_s, heap, *results, &temp);
2964 return;
2968 case tcc_declaration:
2970 struct constraint_expr temp;
2971 temp = get_constraint_exp_from_ssa_var (t);
2972 VEC_safe_push (ce_s, heap, *results, &temp);
2973 return;
2975 default:
2977 temp.type = ADDRESSOF;
2978 temp.var = anything_id;
2979 temp.offset = 0;
2980 VEC_safe_push (ce_s, heap, *results, &temp);
2981 return;
2987 /* Handle the structure copy case where we have a simple structure copy
2988 between LHS and RHS that is of SIZE (in bits)
2990 For each field of the lhs variable (lhsfield)
2991 For each field of the rhs variable at lhsfield.offset (rhsfield)
2992 add the constraint lhsfield = rhsfield
2994 If we fail due to some kind of type unsafety or other thing we
2995 can't handle, return false. We expect the caller to collapse the
2996 variable in that case. */
2998 static bool
2999 do_simple_structure_copy (const struct constraint_expr lhs,
3000 const struct constraint_expr rhs,
3001 const unsigned HOST_WIDE_INT size)
3003 varinfo_t p = get_varinfo (lhs.var);
3004 unsigned HOST_WIDE_INT pstart, last;
3005 pstart = p->offset;
3006 last = p->offset + size;
3007 for (; p && p->offset < last; p = p->next)
3009 varinfo_t q;
3010 struct constraint_expr templhs = lhs;
3011 struct constraint_expr temprhs = rhs;
3012 unsigned HOST_WIDE_INT fieldoffset;
3014 templhs.var = p->id;
3015 q = get_varinfo (temprhs.var);
3016 fieldoffset = p->offset - pstart;
3017 q = first_vi_for_offset (q, q->offset + fieldoffset);
3018 if (!q)
3019 return false;
3020 temprhs.var = q->id;
3021 process_constraint (new_constraint (templhs, temprhs));
3023 return true;
3027 /* Handle the structure copy case where we have a structure copy between a
3028 aggregate on the LHS and a dereference of a pointer on the RHS
3029 that is of SIZE (in bits)
3031 For each field of the lhs variable (lhsfield)
3032 rhs.offset = lhsfield->offset
3033 add the constraint lhsfield = rhs
3036 static void
3037 do_rhs_deref_structure_copy (const struct constraint_expr lhs,
3038 const struct constraint_expr rhs,
3039 const unsigned HOST_WIDE_INT size)
3041 varinfo_t p = get_varinfo (lhs.var);
3042 unsigned HOST_WIDE_INT pstart,last;
3043 pstart = p->offset;
3044 last = p->offset + size;
3046 for (; p && p->offset < last; p = p->next)
3048 varinfo_t q;
3049 struct constraint_expr templhs = lhs;
3050 struct constraint_expr temprhs = rhs;
3051 unsigned HOST_WIDE_INT fieldoffset;
3054 if (templhs.type == SCALAR)
3055 templhs.var = p->id;
3056 else
3057 templhs.offset = p->offset;
3059 q = get_varinfo (temprhs.var);
3060 fieldoffset = p->offset - pstart;
3061 temprhs.offset += fieldoffset;
3062 process_constraint (new_constraint (templhs, temprhs));
3066 /* Handle the structure copy case where we have a structure copy
3067 between an aggregate on the RHS and a dereference of a pointer on
3068 the LHS that is of SIZE (in bits)
3070 For each field of the rhs variable (rhsfield)
3071 lhs.offset = rhsfield->offset
3072 add the constraint lhs = rhsfield
3075 static void
3076 do_lhs_deref_structure_copy (const struct constraint_expr lhs,
3077 const struct constraint_expr rhs,
3078 const unsigned HOST_WIDE_INT size)
3080 varinfo_t p = get_varinfo (rhs.var);
3081 unsigned HOST_WIDE_INT pstart,last;
3082 pstart = p->offset;
3083 last = p->offset + size;
3085 for (; p && p->offset < last; p = p->next)
3087 varinfo_t q;
3088 struct constraint_expr templhs = lhs;
3089 struct constraint_expr temprhs = rhs;
3090 unsigned HOST_WIDE_INT fieldoffset;
3093 if (temprhs.type == SCALAR)
3094 temprhs.var = p->id;
3095 else
3096 temprhs.offset = p->offset;
3098 q = get_varinfo (templhs.var);
3099 fieldoffset = p->offset - pstart;
3100 templhs.offset += fieldoffset;
3101 process_constraint (new_constraint (templhs, temprhs));
3105 /* Sometimes, frontends like to give us bad type information. This
3106 function will collapse all the fields from VAR to the end of VAR,
3107 into VAR, so that we treat those fields as a single variable.
3108 We return the variable they were collapsed into. */
3110 static unsigned int
3111 collapse_rest_of_var (unsigned int var)
3113 varinfo_t currvar = get_varinfo (var);
3114 varinfo_t field;
3116 for (field = currvar->next; field; field = field->next)
3118 if (dump_file)
3119 fprintf (dump_file, "Type safety: Collapsing var %s into %s\n",
3120 field->name, currvar->name);
3122 gcc_assert (!field->collapsed_to);
3123 field->collapsed_to = currvar;
3126 currvar->next = NULL;
3127 currvar->size = currvar->fullsize - currvar->offset;
3129 return currvar->id;
3132 /* Handle aggregate copies by expanding into copies of the respective
3133 fields of the structures. */
3135 static void
3136 do_structure_copy (tree lhsop, tree rhsop)
3138 struct constraint_expr lhs, rhs, tmp;
3139 VEC (ce_s, heap) *lhsc = NULL, *rhsc = NULL;
3140 varinfo_t p;
3141 unsigned HOST_WIDE_INT lhssize;
3142 unsigned HOST_WIDE_INT rhssize;
3144 get_constraint_for (lhsop, &lhsc);
3145 get_constraint_for (rhsop, &rhsc);
3146 gcc_assert (VEC_length (ce_s, lhsc) == 1);
3147 gcc_assert (VEC_length (ce_s, rhsc) == 1);
3148 lhs = *(VEC_last (ce_s, lhsc));
3149 rhs = *(VEC_last (ce_s, rhsc));
3151 VEC_free (ce_s, heap, lhsc);
3152 VEC_free (ce_s, heap, rhsc);
3154 /* If we have special var = x, swap it around. */
3155 if (lhs.var <= integer_id && !(get_varinfo (rhs.var)->is_special_var))
3157 tmp = lhs;
3158 lhs = rhs;
3159 rhs = tmp;
3162 /* This is fairly conservative for the RHS == ADDRESSOF case, in that it's
3163 possible it's something we could handle. However, most cases falling
3164 into this are dealing with transparent unions, which are slightly
3165 weird. */
3166 if (rhs.type == ADDRESSOF && !(get_varinfo (rhs.var)->is_special_var))
3168 rhs.type = ADDRESSOF;
3169 rhs.var = anything_id;
3172 /* If the RHS is a special var, or an addressof, set all the LHS fields to
3173 that special var. */
3174 if (rhs.var <= integer_id)
3176 for (p = get_varinfo (lhs.var); p; p = p->next)
3178 struct constraint_expr templhs = lhs;
3179 struct constraint_expr temprhs = rhs;
3181 if (templhs.type == SCALAR )
3182 templhs.var = p->id;
3183 else
3184 templhs.offset += p->offset;
3185 process_constraint (new_constraint (templhs, temprhs));
3188 else
3190 tree rhstype = TREE_TYPE (rhsop);
3191 tree lhstype = TREE_TYPE (lhsop);
3192 tree rhstypesize;
3193 tree lhstypesize;
3195 lhstypesize = DECL_P (lhsop) ? DECL_SIZE (lhsop) : TYPE_SIZE (lhstype);
3196 rhstypesize = DECL_P (rhsop) ? DECL_SIZE (rhsop) : TYPE_SIZE (rhstype);
3198 /* If we have a variably sized types on the rhs or lhs, and a deref
3199 constraint, add the constraint, lhsconstraint = &ANYTHING.
3200 This is conservatively correct because either the lhs is an unknown
3201 sized var (if the constraint is SCALAR), or the lhs is a DEREF
3202 constraint, and every variable it can point to must be unknown sized
3203 anyway, so we don't need to worry about fields at all. */
3204 if ((rhs.type == DEREF && TREE_CODE (rhstypesize) != INTEGER_CST)
3205 || (lhs.type == DEREF && TREE_CODE (lhstypesize) != INTEGER_CST))
3207 rhs.var = anything_id;
3208 rhs.type = ADDRESSOF;
3209 rhs.offset = 0;
3210 process_constraint (new_constraint (lhs, rhs));
3211 return;
3214 /* The size only really matters insofar as we don't set more or less of
3215 the variable. If we hit an unknown size var, the size should be the
3216 whole darn thing. */
3217 if (get_varinfo (rhs.var)->is_unknown_size_var)
3218 rhssize = ~0;
3219 else
3220 rhssize = TREE_INT_CST_LOW (rhstypesize);
3222 if (get_varinfo (lhs.var)->is_unknown_size_var)
3223 lhssize = ~0;
3224 else
3225 lhssize = TREE_INT_CST_LOW (lhstypesize);
3228 if (rhs.type == SCALAR && lhs.type == SCALAR)
3230 if (!do_simple_structure_copy (lhs, rhs, MIN (lhssize, rhssize)))
3232 lhs.var = collapse_rest_of_var (lhs.var);
3233 rhs.var = collapse_rest_of_var (rhs.var);
3234 lhs.offset = 0;
3235 rhs.offset = 0;
3236 lhs.type = SCALAR;
3237 rhs.type = SCALAR;
3238 process_constraint (new_constraint (lhs, rhs));
3241 else if (lhs.type != DEREF && rhs.type == DEREF)
3242 do_rhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
3243 else if (lhs.type == DEREF && rhs.type != DEREF)
3244 do_lhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
3245 else
3247 tree pointedtotype = lhstype;
3248 tree tmpvar;
3250 gcc_assert (rhs.type == DEREF && lhs.type == DEREF);
3251 tmpvar = create_tmp_var_raw (pointedtotype, "structcopydereftmp");
3252 do_structure_copy (tmpvar, rhsop);
3253 do_structure_copy (lhsop, tmpvar);
3259 /* Update related alias information kept in AI. This is used when
3260 building name tags, alias sets and deciding grouping heuristics.
3261 STMT is the statement to process. This function also updates
3262 ADDRESSABLE_VARS. */
3264 static void
3265 update_alias_info (tree stmt, struct alias_info *ai)
3267 bitmap addr_taken;
3268 use_operand_p use_p;
3269 ssa_op_iter iter;
3270 bool stmt_dereferences_ptr_p;
3271 enum escape_type stmt_escape_type = is_escape_site (stmt);
3272 struct mem_ref_stats_d *mem_ref_stats = gimple_mem_ref_stats (cfun);
3274 stmt_dereferences_ptr_p = false;
3276 if (stmt_escape_type == ESCAPE_TO_CALL
3277 || stmt_escape_type == ESCAPE_TO_PURE_CONST)
3279 mem_ref_stats->num_call_sites++;
3280 if (stmt_escape_type == ESCAPE_TO_PURE_CONST)
3281 mem_ref_stats->num_pure_const_call_sites++;
3283 else if (stmt_escape_type == ESCAPE_TO_ASM)
3284 mem_ref_stats->num_asm_sites++;
3286 /* Mark all the variables whose address are taken by the statement. */
3287 addr_taken = addresses_taken (stmt);
3288 if (addr_taken)
3290 bitmap_ior_into (gimple_addressable_vars (cfun), addr_taken);
3292 /* If STMT is an escape point, all the addresses taken by it are
3293 call-clobbered. */
3294 if (stmt_escape_type != NO_ESCAPE)
3296 bitmap_iterator bi;
3297 unsigned i;
3299 EXECUTE_IF_SET_IN_BITMAP (addr_taken, 0, i, bi)
3301 tree rvar = referenced_var (i);
3302 if (!unmodifiable_var_p (rvar))
3303 mark_call_clobbered (rvar, stmt_escape_type);
3308 /* Process each operand use. For pointers, determine whether they
3309 are dereferenced by the statement, or whether their value
3310 escapes, etc. */
3311 FOR_EACH_PHI_OR_STMT_USE (use_p, stmt, iter, SSA_OP_USE)
3313 tree op, var;
3314 var_ann_t v_ann;
3315 struct ptr_info_def *pi;
3316 unsigned num_uses, num_loads, num_stores;
3318 op = USE_FROM_PTR (use_p);
3320 /* If STMT is a PHI node, OP may be an ADDR_EXPR. If so, add it
3321 to the set of addressable variables. */
3322 if (TREE_CODE (op) == ADDR_EXPR)
3324 bitmap addressable_vars = gimple_addressable_vars (cfun);
3326 gcc_assert (TREE_CODE (stmt) == PHI_NODE);
3327 gcc_assert (addressable_vars);
3329 /* PHI nodes don't have annotations for pinning the set
3330 of addresses taken, so we collect them here.
3332 FIXME, should we allow PHI nodes to have annotations
3333 so that they can be treated like regular statements?
3334 Currently, they are treated as second-class
3335 statements. */
3336 add_to_addressable_set (TREE_OPERAND (op, 0), &addressable_vars);
3337 continue;
3340 /* Ignore constants (they may occur in PHI node arguments). */
3341 if (TREE_CODE (op) != SSA_NAME)
3342 continue;
3344 var = SSA_NAME_VAR (op);
3345 v_ann = var_ann (var);
3347 /* The base variable of an SSA name must be a GIMPLE register, and thus
3348 it cannot be aliased. */
3349 gcc_assert (!may_be_aliased (var));
3351 /* We are only interested in pointers. */
3352 if (!POINTER_TYPE_P (TREE_TYPE (op)))
3353 continue;
3355 pi = get_ptr_info (op);
3357 /* Add OP to AI->PROCESSED_PTRS, if it's not there already. */
3358 if (!TEST_BIT (ai->ssa_names_visited, SSA_NAME_VERSION (op)))
3360 SET_BIT (ai->ssa_names_visited, SSA_NAME_VERSION (op));
3361 VEC_safe_push (tree, heap, ai->processed_ptrs, op);
3364 /* If STMT is a PHI node, then it will not have pointer
3365 dereferences and it will not be an escape point. */
3366 if (TREE_CODE (stmt) == PHI_NODE)
3367 continue;
3369 /* Determine whether OP is a dereferenced pointer, and if STMT
3370 is an escape point, whether OP escapes. */
3371 count_uses_and_derefs (op, stmt, &num_uses, &num_loads, &num_stores);
3373 /* Handle a corner case involving address expressions of the
3374 form '&PTR->FLD'. The problem with these expressions is that
3375 they do not represent a dereference of PTR. However, if some
3376 other transformation propagates them into an INDIRECT_REF
3377 expression, we end up with '*(&PTR->FLD)' which is folded
3378 into 'PTR->FLD'.
3380 So, if the original code had no other dereferences of PTR,
3381 the aliaser will not create memory tags for it, and when
3382 &PTR->FLD gets propagated to INDIRECT_REF expressions, the
3383 memory operations will receive no VDEF/VUSE operands.
3385 One solution would be to have count_uses_and_derefs consider
3386 &PTR->FLD a dereference of PTR. But that is wrong, since it
3387 is not really a dereference but an offset calculation.
3389 What we do here is to recognize these special ADDR_EXPR
3390 nodes. Since these expressions are never GIMPLE values (they
3391 are not GIMPLE invariants), they can only appear on the RHS
3392 of an assignment and their base address is always an
3393 INDIRECT_REF expression. */
3394 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
3395 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == ADDR_EXPR
3396 && !is_gimple_val (GIMPLE_STMT_OPERAND (stmt, 1)))
3398 /* If the RHS if of the form &PTR->FLD and PTR == OP, then
3399 this represents a potential dereference of PTR. */
3400 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
3401 tree base = get_base_address (TREE_OPERAND (rhs, 0));
3402 if (TREE_CODE (base) == INDIRECT_REF
3403 && TREE_OPERAND (base, 0) == op)
3404 num_loads++;
3407 if (num_loads + num_stores > 0)
3409 /* Mark OP as dereferenced. In a subsequent pass,
3410 dereferenced pointers that point to a set of
3411 variables will be assigned a name tag to alias
3412 all the variables OP points to. */
3413 pi->is_dereferenced = 1;
3415 /* If this is a store operation, mark OP as being
3416 dereferenced to store, otherwise mark it as being
3417 dereferenced to load. */
3418 if (num_stores > 0)
3419 pointer_set_insert (ai->dereferenced_ptrs_store, var);
3420 else
3421 pointer_set_insert (ai->dereferenced_ptrs_load, var);
3423 /* Update the frequency estimate for all the dereferences of
3424 pointer OP. */
3425 update_mem_sym_stats_from_stmt (op, stmt, num_loads, num_stores);
3427 /* Indicate that STMT contains pointer dereferences. */
3428 stmt_dereferences_ptr_p = true;
3431 if (stmt_escape_type != NO_ESCAPE && num_loads + num_stores < num_uses)
3433 /* If STMT is an escape point and STMT contains at
3434 least one direct use of OP, then the value of OP
3435 escapes and so the pointed-to variables need to
3436 be marked call-clobbered. */
3437 pi->value_escapes_p = 1;
3438 pi->escape_mask |= stmt_escape_type;
3440 /* If the statement makes a function call, assume
3441 that pointer OP will be dereferenced in a store
3442 operation inside the called function. */
3443 if (get_call_expr_in (stmt)
3444 || stmt_escape_type == ESCAPE_STORED_IN_GLOBAL)
3446 pointer_set_insert (ai->dereferenced_ptrs_store, var);
3447 pi->is_dereferenced = 1;
3452 if (TREE_CODE (stmt) == PHI_NODE)
3453 return;
3455 /* Mark stored variables in STMT as being written to and update the
3456 memory reference stats for all memory symbols referenced by STMT. */
3457 if (stmt_references_memory_p (stmt))
3459 unsigned i;
3460 bitmap_iterator bi;
3462 mem_ref_stats->num_mem_stmts++;
3464 /* Notice that we only update memory reference stats for symbols
3465 loaded and stored by the statement if the statement does not
3466 contain pointer dereferences and it is not a call/asm site.
3467 This is to avoid double accounting problems when creating
3468 memory partitions. After computing points-to information,
3469 pointer dereference statistics are used to update the
3470 reference stats of the pointed-to variables, so here we
3471 should only update direct references to symbols.
3473 Indirect references are not updated here for two reasons: (1)
3474 The first time we compute alias information, the sets
3475 LOADED/STORED are empty for pointer dereferences, (2) After
3476 partitioning, LOADED/STORED may have references to
3477 partitions, not the original pointed-to variables. So, if we
3478 always counted LOADED/STORED here and during partitioning, we
3479 would count many symbols more than once.
3481 This does cause some imprecision when a statement has a
3482 combination of direct symbol references and pointer
3483 dereferences (e.g., MEMORY_VAR = *PTR) or if a call site has
3484 memory symbols in its argument list, but these cases do not
3485 occur so frequently as to constitute a serious problem. */
3486 if (STORED_SYMS (stmt))
3487 EXECUTE_IF_SET_IN_BITMAP (STORED_SYMS (stmt), 0, i, bi)
3489 tree sym = referenced_var (i);
3490 pointer_set_insert (ai->written_vars, sym);
3491 if (!stmt_dereferences_ptr_p
3492 && stmt_escape_type != ESCAPE_TO_CALL
3493 && stmt_escape_type != ESCAPE_TO_PURE_CONST
3494 && stmt_escape_type != ESCAPE_TO_ASM)
3495 update_mem_sym_stats_from_stmt (sym, stmt, 0, 1);
3498 if (!stmt_dereferences_ptr_p
3499 && LOADED_SYMS (stmt)
3500 && stmt_escape_type != ESCAPE_TO_CALL
3501 && stmt_escape_type != ESCAPE_TO_PURE_CONST
3502 && stmt_escape_type != ESCAPE_TO_ASM)
3503 EXECUTE_IF_SET_IN_BITMAP (LOADED_SYMS (stmt), 0, i, bi)
3504 update_mem_sym_stats_from_stmt (referenced_var (i), stmt, 1, 0);
3509 /* Handle pointer arithmetic EXPR when creating aliasing constraints.
3510 Expressions of the type PTR + CST can be handled in two ways:
3512 1- If the constraint for PTR is ADDRESSOF for a non-structure
3513 variable, then we can use it directly because adding or
3514 subtracting a constant may not alter the original ADDRESSOF
3515 constraint (i.e., pointer arithmetic may not legally go outside
3516 an object's boundaries).
3518 2- If the constraint for PTR is ADDRESSOF for a structure variable,
3519 then if CST is a compile-time constant that can be used as an
3520 offset, we can determine which sub-variable will be pointed-to
3521 by the expression.
3523 Return true if the expression is handled. For any other kind of
3524 expression, return false so that each operand can be added as a
3525 separate constraint by the caller. */
3527 static bool
3528 handle_ptr_arith (VEC (ce_s, heap) *lhsc, tree expr)
3530 tree op0, op1;
3531 struct constraint_expr *c, *c2;
3532 unsigned int i = 0;
3533 unsigned int j = 0;
3534 VEC (ce_s, heap) *temp = NULL;
3535 unsigned int rhsoffset = 0;
3536 bool unknown_addend = false;
3538 if (TREE_CODE (expr) != POINTER_PLUS_EXPR)
3539 return false;
3541 op0 = TREE_OPERAND (expr, 0);
3542 op1 = TREE_OPERAND (expr, 1);
3543 gcc_assert (POINTER_TYPE_P (TREE_TYPE (op0)));
3545 get_constraint_for (op0, &temp);
3547 /* Handle non-constants by making constraints from integer. */
3548 if (TREE_CODE (op1) == INTEGER_CST)
3549 rhsoffset = TREE_INT_CST_LOW (op1) * BITS_PER_UNIT;
3550 else
3551 unknown_addend = true;
3553 for (i = 0; VEC_iterate (ce_s, lhsc, i, c); i++)
3554 for (j = 0; VEC_iterate (ce_s, temp, j, c2); j++)
3556 if (c2->type == ADDRESSOF && rhsoffset != 0)
3558 varinfo_t temp = get_varinfo (c2->var);
3560 /* An access one after the end of an array is valid,
3561 so simply punt on accesses we cannot resolve. */
3562 temp = first_vi_for_offset (temp, rhsoffset);
3563 if (temp == NULL)
3564 continue;
3565 c2->var = temp->id;
3566 c2->offset = 0;
3568 else if (unknown_addend)
3570 /* Can't handle *a + integer where integer is unknown. */
3571 if (c2->type != SCALAR)
3573 struct constraint_expr intc;
3574 intc.var = integer_id;
3575 intc.offset = 0;
3576 intc.type = SCALAR;
3577 process_constraint (new_constraint (*c, intc));
3579 else
3581 /* We known it lives somewhere within c2->var. */
3582 varinfo_t tmp = get_varinfo (c2->var);
3583 for (; tmp; tmp = tmp->next)
3585 struct constraint_expr tmpc = *c2;
3586 c2->var = tmp->id;
3587 c2->offset = 0;
3588 process_constraint (new_constraint (*c, tmpc));
3592 else
3593 c2->offset = rhsoffset;
3594 process_constraint (new_constraint (*c, *c2));
3597 VEC_free (ce_s, heap, temp);
3599 return true;
3602 /* For non-IPA mode, generate constraints necessary for a call on the
3603 RHS. */
3605 static void
3606 handle_rhs_call (tree rhs)
3608 tree arg;
3609 call_expr_arg_iterator iter;
3610 struct constraint_expr rhsc;
3612 rhsc.var = anything_id;
3613 rhsc.offset = 0;
3614 rhsc.type = ADDRESSOF;
3616 FOR_EACH_CALL_EXPR_ARG (arg, iter, rhs)
3618 VEC(ce_s, heap) *lhsc = NULL;
3620 /* Find those pointers being passed, and make sure they end up
3621 pointing to anything. */
3622 if (POINTER_TYPE_P (TREE_TYPE (arg)))
3624 unsigned int j;
3625 struct constraint_expr *lhsp;
3627 get_constraint_for (arg, &lhsc);
3628 do_deref (&lhsc);
3629 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3630 process_constraint_1 (new_constraint (*lhsp, rhsc), true);
3631 VEC_free (ce_s, heap, lhsc);
3636 /* For non-IPA mode, generate constraints necessary for a call
3637 that returns a pointer and assigns it to LHS. This simply makes
3638 the LHS point to anything. */
3640 static void
3641 handle_lhs_call (tree lhs)
3643 VEC(ce_s, heap) *lhsc = NULL;
3644 struct constraint_expr rhsc;
3645 unsigned int j;
3646 struct constraint_expr *lhsp;
3648 rhsc.var = anything_id;
3649 rhsc.offset = 0;
3650 rhsc.type = ADDRESSOF;
3651 get_constraint_for (lhs, &lhsc);
3652 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3653 process_constraint_1 (new_constraint (*lhsp, rhsc), true);
3654 VEC_free (ce_s, heap, lhsc);
3657 /* Walk statement T setting up aliasing constraints according to the
3658 references found in T. This function is the main part of the
3659 constraint builder. AI points to auxiliary alias information used
3660 when building alias sets and computing alias grouping heuristics. */
3662 static void
3663 find_func_aliases (tree origt)
3665 tree t = origt;
3666 VEC(ce_s, heap) *lhsc = NULL;
3667 VEC(ce_s, heap) *rhsc = NULL;
3668 struct constraint_expr *c;
3670 if (TREE_CODE (t) == RETURN_EXPR && TREE_OPERAND (t, 0))
3671 t = TREE_OPERAND (t, 0);
3673 /* Now build constraints expressions. */
3674 if (TREE_CODE (t) == PHI_NODE)
3676 gcc_assert (!AGGREGATE_TYPE_P (TREE_TYPE (PHI_RESULT (t))));
3678 /* Only care about pointers and structures containing
3679 pointers. */
3680 if (could_have_pointers (PHI_RESULT (t)))
3682 int i;
3683 unsigned int j;
3685 /* For a phi node, assign all the arguments to
3686 the result. */
3687 get_constraint_for (PHI_RESULT (t), &lhsc);
3688 for (i = 0; i < PHI_NUM_ARGS (t); i++)
3690 tree rhstype;
3691 tree strippedrhs = PHI_ARG_DEF (t, i);
3693 STRIP_NOPS (strippedrhs);
3694 rhstype = TREE_TYPE (strippedrhs);
3695 get_constraint_for (PHI_ARG_DEF (t, i), &rhsc);
3697 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3699 struct constraint_expr *c2;
3700 while (VEC_length (ce_s, rhsc) > 0)
3702 c2 = VEC_last (ce_s, rhsc);
3703 process_constraint (new_constraint (*c, *c2));
3704 VEC_pop (ce_s, rhsc);
3710 /* In IPA mode, we need to generate constraints to pass call
3711 arguments through their calls. There are two cases, either a
3712 GIMPLE_MODIFY_STMT when we are returning a value, or just a plain
3713 CALL_EXPR when we are not.
3715 In non-ipa mode, we need to generate constraints for each
3716 pointer passed by address. */
3717 else if (((TREE_CODE (t) == GIMPLE_MODIFY_STMT
3718 && TREE_CODE (GIMPLE_STMT_OPERAND (t, 1)) == CALL_EXPR
3719 && !(call_expr_flags (GIMPLE_STMT_OPERAND (t, 1))
3720 & (ECF_MALLOC | ECF_MAY_BE_ALLOCA)))
3721 || (TREE_CODE (t) == CALL_EXPR
3722 && !(call_expr_flags (t)
3723 & (ECF_MALLOC | ECF_MAY_BE_ALLOCA)))))
3725 if (!in_ipa_mode)
3727 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
3729 handle_rhs_call (GIMPLE_STMT_OPERAND (t, 1));
3730 if (POINTER_TYPE_P (TREE_TYPE (GIMPLE_STMT_OPERAND (t, 1))))
3731 handle_lhs_call (GIMPLE_STMT_OPERAND (t, 0));
3733 else
3734 handle_rhs_call (t);
3736 else
3738 tree lhsop;
3739 tree rhsop;
3740 tree arg;
3741 call_expr_arg_iterator iter;
3742 varinfo_t fi;
3743 int i = 1;
3744 tree decl;
3745 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
3747 lhsop = GIMPLE_STMT_OPERAND (t, 0);
3748 rhsop = GIMPLE_STMT_OPERAND (t, 1);
3750 else
3752 lhsop = NULL;
3753 rhsop = t;
3755 decl = get_callee_fndecl (rhsop);
3757 /* If we can directly resolve the function being called, do so.
3758 Otherwise, it must be some sort of indirect expression that
3759 we should still be able to handle. */
3760 if (decl)
3762 fi = get_vi_for_tree (decl);
3764 else
3766 decl = CALL_EXPR_FN (rhsop);
3767 fi = get_vi_for_tree (decl);
3770 /* Assign all the passed arguments to the appropriate incoming
3771 parameters of the function. */
3773 FOR_EACH_CALL_EXPR_ARG (arg, iter, rhsop)
3775 struct constraint_expr lhs ;
3776 struct constraint_expr *rhsp;
3778 get_constraint_for (arg, &rhsc);
3779 if (TREE_CODE (decl) != FUNCTION_DECL)
3781 lhs.type = DEREF;
3782 lhs.var = fi->id;
3783 lhs.offset = i;
3785 else
3787 lhs.type = SCALAR;
3788 lhs.var = first_vi_for_offset (fi, i)->id;
3789 lhs.offset = 0;
3791 while (VEC_length (ce_s, rhsc) != 0)
3793 rhsp = VEC_last (ce_s, rhsc);
3794 process_constraint (new_constraint (lhs, *rhsp));
3795 VEC_pop (ce_s, rhsc);
3797 i++;
3800 /* If we are returning a value, assign it to the result. */
3801 if (lhsop)
3803 struct constraint_expr rhs;
3804 struct constraint_expr *lhsp;
3805 unsigned int j = 0;
3807 get_constraint_for (lhsop, &lhsc);
3808 if (TREE_CODE (decl) != FUNCTION_DECL)
3810 rhs.type = DEREF;
3811 rhs.var = fi->id;
3812 rhs.offset = i;
3814 else
3816 rhs.type = SCALAR;
3817 rhs.var = first_vi_for_offset (fi, i)->id;
3818 rhs.offset = 0;
3820 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3821 process_constraint (new_constraint (*lhsp, rhs));
3825 /* Otherwise, just a regular assignment statement. */
3826 else if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
3828 tree lhsop = GIMPLE_STMT_OPERAND (t, 0);
3829 tree rhsop = GIMPLE_STMT_OPERAND (t, 1);
3830 int i;
3832 if ((AGGREGATE_TYPE_P (TREE_TYPE (lhsop))
3833 || TREE_CODE (TREE_TYPE (lhsop)) == COMPLEX_TYPE)
3834 && (AGGREGATE_TYPE_P (TREE_TYPE (rhsop))
3835 || TREE_CODE (TREE_TYPE (lhsop)) == COMPLEX_TYPE))
3837 do_structure_copy (lhsop, rhsop);
3839 else
3841 /* Only care about operations with pointers, structures
3842 containing pointers, dereferences, and call expressions. */
3843 if (could_have_pointers (lhsop)
3844 || TREE_CODE (rhsop) == CALL_EXPR)
3846 get_constraint_for (lhsop, &lhsc);
3847 switch (TREE_CODE_CLASS (TREE_CODE (rhsop)))
3849 /* RHS that consist of unary operations,
3850 exceptional types, or bare decls/constants, get
3851 handled directly by get_constraint_for. */
3852 case tcc_reference:
3853 case tcc_declaration:
3854 case tcc_constant:
3855 case tcc_exceptional:
3856 case tcc_expression:
3857 case tcc_vl_exp:
3858 case tcc_unary:
3860 unsigned int j;
3862 get_constraint_for (rhsop, &rhsc);
3863 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3865 struct constraint_expr *c2;
3866 unsigned int k;
3868 for (k = 0; VEC_iterate (ce_s, rhsc, k, c2); k++)
3869 process_constraint (new_constraint (*c, *c2));
3873 break;
3875 case tcc_binary:
3877 /* For pointer arithmetic of the form
3878 PTR + CST, we can simply use PTR's
3879 constraint because pointer arithmetic is
3880 not allowed to go out of bounds. */
3881 if (handle_ptr_arith (lhsc, rhsop))
3882 break;
3884 /* FALLTHRU */
3886 /* Otherwise, walk each operand. Notice that we
3887 can't use the operand interface because we need
3888 to process expressions other than simple operands
3889 (e.g. INDIRECT_REF, ADDR_EXPR, CALL_EXPR). */
3890 default:
3891 for (i = 0; i < TREE_OPERAND_LENGTH (rhsop); i++)
3893 tree op = TREE_OPERAND (rhsop, i);
3894 unsigned int j;
3896 gcc_assert (VEC_length (ce_s, rhsc) == 0);
3897 get_constraint_for (op, &rhsc);
3898 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3900 struct constraint_expr *c2;
3901 while (VEC_length (ce_s, rhsc) > 0)
3903 c2 = VEC_last (ce_s, rhsc);
3904 process_constraint (new_constraint (*c, *c2));
3905 VEC_pop (ce_s, rhsc);
3913 else if (TREE_CODE (t) == CHANGE_DYNAMIC_TYPE_EXPR)
3915 unsigned int j;
3917 get_constraint_for (CHANGE_DYNAMIC_TYPE_LOCATION (t), &lhsc);
3918 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); ++j)
3919 get_varinfo (c->var)->no_tbaa_pruning = true;
3922 /* After promoting variables and computing aliasing we will
3923 need to re-scan most statements. FIXME: Try to minimize the
3924 number of statements re-scanned. It's not really necessary to
3925 re-scan *all* statements. */
3926 mark_stmt_modified (origt);
3927 VEC_free (ce_s, heap, rhsc);
3928 VEC_free (ce_s, heap, lhsc);
3932 /* Find the first varinfo in the same variable as START that overlaps with
3933 OFFSET.
3934 Effectively, walk the chain of fields for the variable START to find the
3935 first field that overlaps with OFFSET.
3936 Return NULL if we can't find one. */
3938 static varinfo_t
3939 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
3941 varinfo_t curr = start;
3942 while (curr)
3944 /* We may not find a variable in the field list with the actual
3945 offset when when we have glommed a structure to a variable.
3946 In that case, however, offset should still be within the size
3947 of the variable. */
3948 if (offset >= curr->offset && offset < (curr->offset + curr->size))
3949 return curr;
3950 curr = curr->next;
3952 return NULL;
3956 /* Insert the varinfo FIELD into the field list for BASE, at the front
3957 of the list. */
3959 static void
3960 insert_into_field_list (varinfo_t base, varinfo_t field)
3962 varinfo_t prev = base;
3963 varinfo_t curr = base->next;
3965 field->next = curr;
3966 prev->next = field;
3969 /* Insert the varinfo FIELD into the field list for BASE, ordered by
3970 offset. */
3972 static void
3973 insert_into_field_list_sorted (varinfo_t base, varinfo_t field)
3975 varinfo_t prev = base;
3976 varinfo_t curr = base->next;
3978 if (curr == NULL)
3980 prev->next = field;
3981 field->next = NULL;
3983 else
3985 while (curr)
3987 if (field->offset <= curr->offset)
3988 break;
3989 prev = curr;
3990 curr = curr->next;
3992 field->next = prev->next;
3993 prev->next = field;
3997 /* qsort comparison function for two fieldoff's PA and PB */
3999 static int
4000 fieldoff_compare (const void *pa, const void *pb)
4002 const fieldoff_s *foa = (const fieldoff_s *)pa;
4003 const fieldoff_s *fob = (const fieldoff_s *)pb;
4004 HOST_WIDE_INT foasize, fobsize;
4006 if (foa->offset != fob->offset)
4007 return foa->offset - fob->offset;
4009 foasize = TREE_INT_CST_LOW (foa->size);
4010 fobsize = TREE_INT_CST_LOW (fob->size);
4011 return foasize - fobsize;
4014 /* Sort a fieldstack according to the field offset and sizes. */
4015 void
4016 sort_fieldstack (VEC(fieldoff_s,heap) *fieldstack)
4018 qsort (VEC_address (fieldoff_s, fieldstack),
4019 VEC_length (fieldoff_s, fieldstack),
4020 sizeof (fieldoff_s),
4021 fieldoff_compare);
4024 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
4025 the fields of TYPE onto fieldstack, recording their offsets along
4026 the way.
4028 OFFSET is used to keep track of the offset in this entire
4029 structure, rather than just the immediately containing structure.
4030 Returns the number of fields pushed.
4032 HAS_UNION is set to true if we find a union type as a field of
4033 TYPE.
4035 ADDRESSABLE_TYPE is the type of the outermost object that could
4036 have its address taken. */
4039 push_fields_onto_fieldstack (tree type, VEC(fieldoff_s,heap) **fieldstack,
4040 HOST_WIDE_INT offset, bool *has_union,
4041 tree addressable_type)
4043 tree field;
4044 int count = 0;
4045 unsigned int first_element = VEC_length (fieldoff_s, *fieldstack);
4047 /* If the vector of fields is growing too big, bail out early.
4048 Callers check for VEC_length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
4049 sure this fails. */
4050 if (first_element > MAX_FIELDS_FOR_FIELD_SENSITIVE)
4051 return 0;
4053 if (TREE_CODE (type) == COMPLEX_TYPE)
4055 fieldoff_s *real_part, *img_part;
4056 real_part = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
4057 real_part->type = TREE_TYPE (type);
4058 real_part->size = TYPE_SIZE (TREE_TYPE (type));
4059 real_part->offset = offset;
4060 real_part->decl = NULL_TREE;
4061 real_part->alias_set = -1;
4062 real_part->base_for_components = false;
4064 img_part = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
4065 img_part->type = TREE_TYPE (type);
4066 img_part->size = TYPE_SIZE (TREE_TYPE (type));
4067 img_part->offset = offset + TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (type)));
4068 img_part->decl = NULL_TREE;
4069 img_part->alias_set = -1;
4070 img_part->base_for_components = false;
4072 count = 2;
4075 else if (TREE_CODE (type) == ARRAY_TYPE)
4077 tree sz = TYPE_SIZE (type);
4078 tree elsz = TYPE_SIZE (TREE_TYPE (type));
4079 HOST_WIDE_INT nr;
4080 int i;
4082 if (! sz
4083 || ! host_integerp (sz, 1)
4084 || TREE_INT_CST_LOW (sz) == 0
4085 || ! elsz
4086 || ! host_integerp (elsz, 1)
4087 || TREE_INT_CST_LOW (elsz) == 0)
4088 return 0;
4090 nr = TREE_INT_CST_LOW (sz) / TREE_INT_CST_LOW (elsz);
4091 if (nr > SALIAS_MAX_ARRAY_ELEMENTS)
4092 return 0;
4094 for (i = 0; i < nr; ++i)
4096 bool push = false;
4097 int pushed = 0;
4099 if (has_union
4100 && (TREE_CODE (TREE_TYPE (type)) == QUAL_UNION_TYPE
4101 || TREE_CODE (TREE_TYPE (type)) == UNION_TYPE))
4102 *has_union = true;
4104 if (!AGGREGATE_TYPE_P (TREE_TYPE (type))) /* var_can_have_subvars */
4105 push = true;
4106 else if (!(pushed = push_fields_onto_fieldstack
4107 (TREE_TYPE (type),
4108 fieldstack,
4109 offset + i * TREE_INT_CST_LOW (elsz),
4110 has_union,
4111 (TYPE_NONALIASED_COMPONENT (type)
4112 ? addressable_type
4113 : TREE_TYPE (type)))))
4114 /* Empty structures may have actual size, like in C++. So
4115 see if we didn't push any subfields and the size is
4116 nonzero, push the field onto the stack */
4117 push = true;
4119 if (push)
4121 fieldoff_s *pair;
4123 pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
4124 pair->type = TREE_TYPE (type);
4125 pair->size = elsz;
4126 pair->decl = NULL_TREE;
4127 pair->offset = offset + i * TREE_INT_CST_LOW (elsz);
4128 if (TYPE_NONALIASED_COMPONENT (type))
4129 pair->alias_set = get_alias_set (addressable_type);
4130 else
4131 pair->alias_set = -1;
4132 pair->base_for_components = false;
4133 count++;
4135 else
4136 count += pushed;
4140 else
4142 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
4143 if (TREE_CODE (field) == FIELD_DECL)
4145 bool push = false;
4146 int pushed = 0;
4148 if (has_union
4149 && (TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE
4150 || TREE_CODE (TREE_TYPE (field)) == UNION_TYPE))
4151 *has_union = true;
4153 if (!var_can_have_subvars (field))
4154 push = true;
4155 else if (!(pushed = push_fields_onto_fieldstack
4156 (TREE_TYPE (field),
4157 fieldstack,
4158 offset + bitpos_of_field (field),
4159 has_union,
4160 (DECL_NONADDRESSABLE_P (field)
4161 ? addressable_type
4162 : TREE_TYPE (field))))
4163 && ((DECL_SIZE (field)
4164 && !integer_zerop (DECL_SIZE (field)))
4165 || (!DECL_SIZE (field)
4166 && TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)))
4167 /* Empty structures may have actual size, like in C++. So
4168 see if we didn't push any subfields and the size is
4169 nonzero, push the field onto the stack. Trailing flexible
4170 array members also need a representative to be able to
4171 treat taking their address in PTA. */
4172 push = true;
4174 if (push)
4176 fieldoff_s *pair;
4178 pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
4179 pair->type = TREE_TYPE (field);
4180 pair->size = DECL_SIZE (field);
4181 pair->decl = field;
4182 pair->offset = offset + bitpos_of_field (field);
4183 if (DECL_NONADDRESSABLE_P (field))
4184 pair->alias_set = get_alias_set (addressable_type);
4185 else
4186 pair->alias_set = -1;
4187 pair->base_for_components = false;
4188 count++;
4190 else
4191 count += pushed;
4195 /* Make sure the first pushed field is marked as eligible for
4196 being a base for component references. */
4197 if (count > 0)
4198 VEC_index (fieldoff_s, *fieldstack, first_element)->base_for_components = true;
4200 return count;
4203 /* Create a constraint from ANYTHING variable to VI. */
4204 static void
4205 make_constraint_from_anything (varinfo_t vi)
4207 struct constraint_expr lhs, rhs;
4209 lhs.var = vi->id;
4210 lhs.offset = 0;
4211 lhs.type = SCALAR;
4213 rhs.var = anything_id;
4214 rhs.offset = 0;
4215 rhs.type = ADDRESSOF;
4216 process_constraint (new_constraint (lhs, rhs));
4219 /* Count the number of arguments DECL has, and set IS_VARARGS to true
4220 if it is a varargs function. */
4222 static unsigned int
4223 count_num_arguments (tree decl, bool *is_varargs)
4225 unsigned int i = 0;
4226 tree t;
4228 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl));
4230 t = TREE_CHAIN (t))
4232 if (TREE_VALUE (t) == void_type_node)
4233 break;
4234 i++;
4237 if (!t)
4238 *is_varargs = true;
4239 return i;
4242 /* Creation function node for DECL, using NAME, and return the index
4243 of the variable we've created for the function. */
4245 static unsigned int
4246 create_function_info_for (tree decl, const char *name)
4248 unsigned int index = VEC_length (varinfo_t, varmap);
4249 varinfo_t vi;
4250 tree arg;
4251 unsigned int i;
4252 bool is_varargs = false;
4254 /* Create the variable info. */
4256 vi = new_var_info (decl, index, name);
4257 vi->decl = decl;
4258 vi->offset = 0;
4259 vi->has_union = 0;
4260 vi->size = 1;
4261 vi->fullsize = count_num_arguments (decl, &is_varargs) + 1;
4262 insert_vi_for_tree (vi->decl, vi);
4263 VEC_safe_push (varinfo_t, heap, varmap, vi);
4265 stats.total_vars++;
4267 /* If it's varargs, we don't know how many arguments it has, so we
4268 can't do much.
4270 if (is_varargs)
4272 vi->fullsize = ~0;
4273 vi->size = ~0;
4274 vi->is_unknown_size_var = true;
4275 return index;
4279 arg = DECL_ARGUMENTS (decl);
4281 /* Set up variables for each argument. */
4282 for (i = 1; i < vi->fullsize; i++)
4284 varinfo_t argvi;
4285 const char *newname;
4286 char *tempname;
4287 unsigned int newindex;
4288 tree argdecl = decl;
4290 if (arg)
4291 argdecl = arg;
4293 newindex = VEC_length (varinfo_t, varmap);
4294 asprintf (&tempname, "%s.arg%d", name, i-1);
4295 newname = ggc_strdup (tempname);
4296 free (tempname);
4298 argvi = new_var_info (argdecl, newindex, newname);
4299 argvi->decl = argdecl;
4300 VEC_safe_push (varinfo_t, heap, varmap, argvi);
4301 argvi->offset = i;
4302 argvi->size = 1;
4303 argvi->fullsize = vi->fullsize;
4304 argvi->has_union = false;
4305 insert_into_field_list_sorted (vi, argvi);
4306 stats.total_vars ++;
4307 if (arg)
4309 insert_vi_for_tree (arg, argvi);
4310 arg = TREE_CHAIN (arg);
4314 /* Create a variable for the return var. */
4315 if (DECL_RESULT (decl) != NULL
4316 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
4318 varinfo_t resultvi;
4319 const char *newname;
4320 char *tempname;
4321 unsigned int newindex;
4322 tree resultdecl = decl;
4324 vi->fullsize ++;
4326 if (DECL_RESULT (decl))
4327 resultdecl = DECL_RESULT (decl);
4329 newindex = VEC_length (varinfo_t, varmap);
4330 asprintf (&tempname, "%s.result", name);
4331 newname = ggc_strdup (tempname);
4332 free (tempname);
4334 resultvi = new_var_info (resultdecl, newindex, newname);
4335 resultvi->decl = resultdecl;
4336 VEC_safe_push (varinfo_t, heap, varmap, resultvi);
4337 resultvi->offset = i;
4338 resultvi->size = 1;
4339 resultvi->fullsize = vi->fullsize;
4340 resultvi->has_union = false;
4341 insert_into_field_list_sorted (vi, resultvi);
4342 stats.total_vars ++;
4343 if (DECL_RESULT (decl))
4344 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
4346 return index;
4350 /* Return true if FIELDSTACK contains fields that overlap.
4351 FIELDSTACK is assumed to be sorted by offset. */
4353 static bool
4354 check_for_overlaps (VEC (fieldoff_s,heap) *fieldstack)
4356 fieldoff_s *fo = NULL;
4357 unsigned int i;
4358 HOST_WIDE_INT lastoffset = -1;
4360 for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
4362 if (fo->offset == lastoffset)
4363 return true;
4364 lastoffset = fo->offset;
4366 return false;
4369 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
4370 This will also create any varinfo structures necessary for fields
4371 of DECL. */
4373 static unsigned int
4374 create_variable_info_for (tree decl, const char *name)
4376 unsigned int index = VEC_length (varinfo_t, varmap);
4377 varinfo_t vi;
4378 tree decltype = TREE_TYPE (decl);
4379 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decltype);
4380 bool notokay = false;
4381 bool hasunion;
4382 bool is_global = DECL_P (decl) ? is_global_var (decl) : false;
4383 VEC (fieldoff_s,heap) *fieldstack = NULL;
4385 if (TREE_CODE (decl) == FUNCTION_DECL && in_ipa_mode)
4386 return create_function_info_for (decl, name);
4388 hasunion = TREE_CODE (decltype) == UNION_TYPE
4389 || TREE_CODE (decltype) == QUAL_UNION_TYPE;
4390 if (var_can_have_subvars (decl) && use_field_sensitive && !hasunion)
4392 push_fields_onto_fieldstack (decltype, &fieldstack, 0, &hasunion,
4393 decltype);
4394 if (hasunion)
4396 VEC_free (fieldoff_s, heap, fieldstack);
4397 notokay = true;
4402 /* If the variable doesn't have subvars, we may end up needing to
4403 sort the field list and create fake variables for all the
4404 fields. */
4405 vi = new_var_info (decl, index, name);
4406 vi->decl = decl;
4407 vi->offset = 0;
4408 vi->has_union = hasunion;
4409 if (!declsize
4410 || TREE_CODE (declsize) != INTEGER_CST
4411 || TREE_CODE (decltype) == UNION_TYPE
4412 || TREE_CODE (decltype) == QUAL_UNION_TYPE)
4414 vi->is_unknown_size_var = true;
4415 vi->fullsize = ~0;
4416 vi->size = ~0;
4418 else
4420 vi->fullsize = TREE_INT_CST_LOW (declsize);
4421 vi->size = vi->fullsize;
4424 insert_vi_for_tree (vi->decl, vi);
4425 VEC_safe_push (varinfo_t, heap, varmap, vi);
4426 if (is_global && (!flag_whole_program || !in_ipa_mode))
4427 make_constraint_from_anything (vi);
4429 stats.total_vars++;
4430 if (use_field_sensitive
4431 && !notokay
4432 && !vi->is_unknown_size_var
4433 && var_can_have_subvars (decl)
4434 && VEC_length (fieldoff_s, fieldstack) > 1
4435 && VEC_length (fieldoff_s, fieldstack) <= MAX_FIELDS_FOR_FIELD_SENSITIVE)
4437 unsigned int newindex = VEC_length (varinfo_t, varmap);
4438 fieldoff_s *fo = NULL;
4439 unsigned int i;
4441 for (i = 0; !notokay && VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
4443 if (! fo->size
4444 || TREE_CODE (fo->size) != INTEGER_CST
4445 || fo->offset < 0)
4447 notokay = true;
4448 break;
4452 /* We can't sort them if we have a field with a variable sized type,
4453 which will make notokay = true. In that case, we are going to return
4454 without creating varinfos for the fields anyway, so sorting them is a
4455 waste to boot. */
4456 if (!notokay)
4458 sort_fieldstack (fieldstack);
4459 /* Due to some C++ FE issues, like PR 22488, we might end up
4460 what appear to be overlapping fields even though they,
4461 in reality, do not overlap. Until the C++ FE is fixed,
4462 we will simply disable field-sensitivity for these cases. */
4463 notokay = check_for_overlaps (fieldstack);
4467 if (VEC_length (fieldoff_s, fieldstack) != 0)
4468 fo = VEC_index (fieldoff_s, fieldstack, 0);
4470 if (fo == NULL || notokay)
4472 vi->is_unknown_size_var = 1;
4473 vi->fullsize = ~0;
4474 vi->size = ~0;
4475 VEC_free (fieldoff_s, heap, fieldstack);
4476 return index;
4479 vi->size = TREE_INT_CST_LOW (fo->size);
4480 vi->offset = fo->offset;
4481 for (i = VEC_length (fieldoff_s, fieldstack) - 1;
4482 i >= 1 && VEC_iterate (fieldoff_s, fieldstack, i, fo);
4483 i--)
4485 varinfo_t newvi;
4486 const char *newname = "NULL";
4487 char *tempname;
4489 newindex = VEC_length (varinfo_t, varmap);
4490 if (dump_file)
4492 if (fo->decl)
4493 asprintf (&tempname, "%s.%s",
4494 vi->name, alias_get_name (fo->decl));
4495 else
4496 asprintf (&tempname, "%s." HOST_WIDE_INT_PRINT_DEC,
4497 vi->name, fo->offset);
4498 newname = ggc_strdup (tempname);
4499 free (tempname);
4501 newvi = new_var_info (decl, newindex, newname);
4502 newvi->offset = fo->offset;
4503 newvi->size = TREE_INT_CST_LOW (fo->size);
4504 newvi->fullsize = vi->fullsize;
4505 insert_into_field_list (vi, newvi);
4506 VEC_safe_push (varinfo_t, heap, varmap, newvi);
4507 if (is_global && (!flag_whole_program || !in_ipa_mode))
4508 make_constraint_from_anything (newvi);
4510 stats.total_vars++;
4514 VEC_free (fieldoff_s, heap, fieldstack);
4516 return index;
4519 /* Print out the points-to solution for VAR to FILE. */
4521 void
4522 dump_solution_for_var (FILE *file, unsigned int var)
4524 varinfo_t vi = get_varinfo (var);
4525 unsigned int i;
4526 bitmap_iterator bi;
4528 if (find (var) != var)
4530 varinfo_t vipt = get_varinfo (find (var));
4531 fprintf (file, "%s = same as %s\n", vi->name, vipt->name);
4533 else
4535 fprintf (file, "%s = { ", vi->name);
4536 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4538 fprintf (file, "%s ", get_varinfo (i)->name);
4540 fprintf (file, "}");
4541 if (vi->no_tbaa_pruning)
4542 fprintf (file, " no-tbaa-pruning");
4543 fprintf (file, "\n");
4547 /* Print the points-to solution for VAR to stdout. */
4549 void
4550 debug_solution_for_var (unsigned int var)
4552 dump_solution_for_var (stdout, var);
4555 /* Create varinfo structures for all of the variables in the
4556 function for intraprocedural mode. */
4558 static void
4559 intra_create_variable_infos (void)
4561 tree t;
4562 struct constraint_expr lhs, rhs;
4564 /* For each incoming pointer argument arg, create the constraint ARG
4565 = ANYTHING or a dummy variable if flag_argument_noalias is set. */
4566 for (t = DECL_ARGUMENTS (current_function_decl); t; t = TREE_CHAIN (t))
4568 varinfo_t p;
4570 if (!could_have_pointers (t))
4571 continue;
4573 /* If flag_argument_noalias is set, then function pointer
4574 arguments are guaranteed not to point to each other. In that
4575 case, create an artificial variable PARM_NOALIAS and the
4576 constraint ARG = &PARM_NOALIAS. */
4577 if (POINTER_TYPE_P (TREE_TYPE (t)) && flag_argument_noalias > 0)
4579 varinfo_t vi;
4580 tree heapvar = heapvar_lookup (t);
4582 lhs.offset = 0;
4583 lhs.type = SCALAR;
4584 lhs.var = get_vi_for_tree (t)->id;
4586 if (heapvar == NULL_TREE)
4588 var_ann_t ann;
4589 heapvar = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (t)),
4590 "PARM_NOALIAS");
4591 DECL_EXTERNAL (heapvar) = 1;
4592 if (gimple_referenced_vars (cfun))
4593 add_referenced_var (heapvar);
4595 heapvar_insert (t, heapvar);
4597 ann = get_var_ann (heapvar);
4598 if (flag_argument_noalias == 1)
4599 ann->noalias_state = NO_ALIAS;
4600 else if (flag_argument_noalias == 2)
4601 ann->noalias_state = NO_ALIAS_GLOBAL;
4602 else if (flag_argument_noalias == 3)
4603 ann->noalias_state = NO_ALIAS_ANYTHING;
4604 else
4605 gcc_unreachable ();
4608 vi = get_vi_for_tree (heapvar);
4609 vi->is_artificial_var = 1;
4610 vi->is_heap_var = 1;
4611 rhs.var = vi->id;
4612 rhs.type = ADDRESSOF;
4613 rhs.offset = 0;
4614 for (p = get_varinfo (lhs.var); p; p = p->next)
4616 struct constraint_expr temp = lhs;
4617 temp.var = p->id;
4618 process_constraint (new_constraint (temp, rhs));
4621 else
4623 varinfo_t arg_vi = get_vi_for_tree (t);
4625 for (p = arg_vi; p; p = p->next)
4626 make_constraint_from_anything (p);
4631 /* Structure used to put solution bitmaps in a hashtable so they can
4632 be shared among variables with the same points-to set. */
4634 typedef struct shared_bitmap_info
4636 bitmap pt_vars;
4637 hashval_t hashcode;
4638 } *shared_bitmap_info_t;
4639 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
4641 static htab_t shared_bitmap_table;
4643 /* Hash function for a shared_bitmap_info_t */
4645 static hashval_t
4646 shared_bitmap_hash (const void *p)
4648 const_shared_bitmap_info_t const bi = (const_shared_bitmap_info_t) p;
4649 return bi->hashcode;
4652 /* Equality function for two shared_bitmap_info_t's. */
4654 static int
4655 shared_bitmap_eq (const void *p1, const void *p2)
4657 const_shared_bitmap_info_t const sbi1 = (const_shared_bitmap_info_t) p1;
4658 const_shared_bitmap_info_t const sbi2 = (const_shared_bitmap_info_t) p2;
4659 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
4662 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
4663 existing instance if there is one, NULL otherwise. */
4665 static bitmap
4666 shared_bitmap_lookup (bitmap pt_vars)
4668 void **slot;
4669 struct shared_bitmap_info sbi;
4671 sbi.pt_vars = pt_vars;
4672 sbi.hashcode = bitmap_hash (pt_vars);
4674 slot = htab_find_slot_with_hash (shared_bitmap_table, &sbi,
4675 sbi.hashcode, NO_INSERT);
4676 if (!slot)
4677 return NULL;
4678 else
4679 return ((shared_bitmap_info_t) *slot)->pt_vars;
4683 /* Add a bitmap to the shared bitmap hashtable. */
4685 static void
4686 shared_bitmap_add (bitmap pt_vars)
4688 void **slot;
4689 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
4691 sbi->pt_vars = pt_vars;
4692 sbi->hashcode = bitmap_hash (pt_vars);
4694 slot = htab_find_slot_with_hash (shared_bitmap_table, sbi,
4695 sbi->hashcode, INSERT);
4696 gcc_assert (!*slot);
4697 *slot = (void *) sbi;
4701 /* Set bits in INTO corresponding to the variable uids in solution set
4702 FROM, which came from variable PTR.
4703 For variables that are actually dereferenced, we also use type
4704 based alias analysis to prune the points-to sets.
4705 IS_DEREFED is true if PTR was directly dereferenced, which we use to
4706 help determine whether we are we are allowed to prune using TBAA.
4707 If NO_TBAA_PRUNING is true, we do not perform any TBAA pruning of
4708 the from set. */
4710 static void
4711 set_uids_in_ptset (tree ptr, bitmap into, bitmap from, bool is_derefed,
4712 bool no_tbaa_pruning)
4714 unsigned int i;
4715 bitmap_iterator bi;
4716 alias_set_type ptr_alias_set;
4718 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
4719 ptr_alias_set = get_alias_set (TREE_TYPE (TREE_TYPE (ptr)));
4721 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
4723 varinfo_t vi = get_varinfo (i);
4724 alias_set_type var_alias_set;
4726 /* The only artificial variables that are allowed in a may-alias
4727 set are heap variables. */
4728 if (vi->is_artificial_var && !vi->is_heap_var)
4729 continue;
4731 if (vi->has_union && get_subvars_for_var (vi->decl) != NULL)
4733 unsigned int i;
4734 tree subvar;
4735 subvar_t sv = get_subvars_for_var (vi->decl);
4737 /* Variables containing unions may need to be converted to
4738 their SFT's, because SFT's can have unions and we cannot. */
4739 for (i = 0; VEC_iterate (tree, sv, i, subvar); ++i)
4740 bitmap_set_bit (into, DECL_UID (subvar));
4742 else if (TREE_CODE (vi->decl) == VAR_DECL
4743 || TREE_CODE (vi->decl) == PARM_DECL
4744 || TREE_CODE (vi->decl) == RESULT_DECL)
4746 subvar_t sv;
4747 if (var_can_have_subvars (vi->decl)
4748 && (sv = get_subvars_for_var (vi->decl)))
4750 /* If VI->DECL is an aggregate for which we created
4751 SFTs, add the SFT corresponding to VI->OFFSET.
4752 If we didn't do field-sensitive PTA we need to to
4753 add all overlapping SFTs. */
4754 unsigned int j;
4755 tree sft = get_first_overlapping_subvar (sv, vi->offset,
4756 vi->size, &j);
4757 gcc_assert (sft);
4758 for (; VEC_iterate (tree, sv, j, sft); ++j)
4760 if (SFT_OFFSET (sft) > vi->offset
4761 && vi->size <= SFT_OFFSET (sft) - vi->offset)
4762 break;
4764 var_alias_set = get_alias_set (sft);
4765 if (no_tbaa_pruning
4766 || (!is_derefed && !vi->directly_dereferenced)
4767 || alias_sets_conflict_p (ptr_alias_set, var_alias_set))
4769 bitmap_set_bit (into, DECL_UID (sft));
4771 /* Pointed-to SFTs are needed by the operand scanner
4772 to adjust offsets when adding operands to memory
4773 expressions that dereference PTR. This means
4774 that memory partitioning may not partition
4775 this SFT because the operand scanner will not
4776 be able to find the other SFTs next to this
4777 one. But we only need to do this if the pointed
4778 to type is aggregate. */
4779 if (SFT_BASE_FOR_COMPONENTS_P (sft))
4780 SFT_UNPARTITIONABLE_P (sft) = true;
4784 else
4786 /* Otherwise, just add VI->DECL to the alias set.
4787 Don't type prune artificial vars. */
4788 if (vi->is_artificial_var)
4789 bitmap_set_bit (into, DECL_UID (vi->decl));
4790 else
4792 var_alias_set = get_alias_set (vi->decl);
4793 if (no_tbaa_pruning
4794 || (!is_derefed && !vi->directly_dereferenced)
4795 || alias_sets_conflict_p (ptr_alias_set, var_alias_set))
4796 bitmap_set_bit (into, DECL_UID (vi->decl));
4804 static bool have_alias_info = false;
4806 /* The list of SMT's that are in use by our pointer variables. This
4807 is the set of SMT's for all pointers that can point to anything. */
4808 static bitmap used_smts;
4810 /* Due to the ordering of points-to set calculation and SMT
4811 calculation being a bit co-dependent, we can't just calculate SMT
4812 used info whenever we want, we have to calculate it around the time
4813 that find_what_p_points_to is called. */
4815 /* Mark which SMT's are in use by points-to anything variables. */
4817 void
4818 set_used_smts (void)
4820 int i;
4821 varinfo_t vi;
4822 used_smts = BITMAP_ALLOC (&pta_obstack);
4824 for (i = 0; VEC_iterate (varinfo_t, varmap, i, vi); i++)
4826 tree var = vi->decl;
4827 varinfo_t withsolution = get_varinfo (find (i));
4828 tree smt;
4829 var_ann_t va;
4830 struct ptr_info_def *pi = NULL;
4832 /* For parm decls, the pointer info may be under the default
4833 def. */
4834 if (TREE_CODE (vi->decl) == PARM_DECL
4835 && gimple_default_def (cfun, var))
4836 pi = SSA_NAME_PTR_INFO (gimple_default_def (cfun, var));
4837 else if (TREE_CODE (var) == SSA_NAME)
4838 pi = SSA_NAME_PTR_INFO (var);
4840 /* Skip the special variables and those that can't be aliased. */
4841 if (vi->is_special_var
4842 || !SSA_VAR_P (var)
4843 || (pi && !pi->is_dereferenced)
4844 || (TREE_CODE (var) == VAR_DECL && !may_be_aliased (var))
4845 || !POINTER_TYPE_P (TREE_TYPE (var)))
4846 continue;
4848 if (TREE_CODE (var) == SSA_NAME)
4849 var = SSA_NAME_VAR (var);
4851 va = var_ann (var);
4852 if (!va)
4853 continue;
4855 smt = va->symbol_mem_tag;
4856 if (smt && bitmap_bit_p (withsolution->solution, anything_id))
4857 bitmap_set_bit (used_smts, DECL_UID (smt));
4861 /* Merge the necessary SMT's into the bitmap INTO, which is
4862 P's varinfo. This involves merging all SMT's that are a subset of
4863 the SMT necessary for P. */
4865 static void
4866 merge_smts_into (tree p, bitmap solution)
4868 tree smt;
4869 bitmap aliases;
4870 tree var = p;
4872 if (TREE_CODE (p) == SSA_NAME)
4873 var = SSA_NAME_VAR (p);
4875 smt = var_ann (var)->symbol_mem_tag;
4876 if (smt)
4878 /* The smt itself isn't included in its aliases. */
4879 bitmap_set_bit (solution, DECL_UID (smt));
4881 aliases = MTAG_ALIASES (smt);
4882 if (aliases)
4883 bitmap_ior_into (solution, aliases);
4887 /* Given a pointer variable P, fill in its points-to set, or return
4888 false if we can't.
4889 Rather than return false for variables that point-to anything, we
4890 instead find the corresponding SMT, and merge in its aliases. In
4891 addition to these aliases, we also set the bits for the SMT's
4892 themselves and their subsets, as SMT's are still in use by
4893 non-SSA_NAME's, and pruning may eliminate every one of their
4894 aliases. In such a case, if we did not include the right set of
4895 SMT's in the points-to set of the variable, we'd end up with
4896 statements that do not conflict but should. */
4898 bool
4899 find_what_p_points_to (tree p)
4901 tree lookup_p = p;
4902 varinfo_t vi;
4904 if (!have_alias_info)
4905 return false;
4907 /* For parameters, get at the points-to set for the actual parm
4908 decl. */
4909 if (TREE_CODE (p) == SSA_NAME
4910 && TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
4911 && SSA_NAME_IS_DEFAULT_DEF (p))
4912 lookup_p = SSA_NAME_VAR (p);
4914 vi = lookup_vi_for_tree (lookup_p);
4915 if (vi)
4917 if (vi->is_artificial_var)
4918 return false;
4920 /* See if this is a field or a structure. */
4921 if (vi->size != vi->fullsize)
4923 /* Nothing currently asks about structure fields directly,
4924 but when they do, we need code here to hand back the
4925 points-to set. */
4926 if (!var_can_have_subvars (vi->decl)
4927 || get_subvars_for_var (vi->decl) == NULL)
4928 return false;
4930 else
4932 struct ptr_info_def *pi = get_ptr_info (p);
4933 unsigned int i;
4934 bitmap_iterator bi;
4935 bool was_pt_anything = false;
4936 bitmap finished_solution;
4937 bitmap result;
4939 if (!pi->is_dereferenced)
4940 return false;
4942 /* This variable may have been collapsed, let's get the real
4943 variable. */
4944 vi = get_varinfo (find (vi->id));
4946 /* Translate artificial variables into SSA_NAME_PTR_INFO
4947 attributes. */
4948 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4950 varinfo_t vi = get_varinfo (i);
4952 if (vi->is_artificial_var)
4954 /* FIXME. READONLY should be handled better so that
4955 flow insensitive aliasing can disregard writable
4956 aliases. */
4957 if (vi->id == nothing_id)
4958 pi->pt_null = 1;
4959 else if (vi->id == anything_id)
4960 was_pt_anything = 1;
4961 else if (vi->id == readonly_id)
4962 was_pt_anything = 1;
4963 else if (vi->id == integer_id)
4964 was_pt_anything = 1;
4965 else if (vi->is_heap_var)
4966 pi->pt_global_mem = 1;
4970 /* Share the final set of variables when possible. */
4971 finished_solution = BITMAP_GGC_ALLOC ();
4972 stats.points_to_sets_created++;
4974 /* Instead of using pt_anything, we merge in the SMT aliases
4975 for the underlying SMT. In addition, if they could have
4976 pointed to anything, they could point to global memory. */
4977 if (was_pt_anything)
4979 merge_smts_into (p, finished_solution);
4980 pi->pt_global_mem = 1;
4983 set_uids_in_ptset (p, finished_solution, vi->solution,
4984 vi->directly_dereferenced,
4985 vi->no_tbaa_pruning);
4986 result = shared_bitmap_lookup (finished_solution);
4988 if (!result)
4990 shared_bitmap_add (finished_solution);
4991 pi->pt_vars = finished_solution;
4993 else
4995 pi->pt_vars = result;
4996 bitmap_clear (finished_solution);
4999 if (bitmap_empty_p (pi->pt_vars))
5000 pi->pt_vars = NULL;
5002 return true;
5006 return false;
5011 /* Dump points-to information to OUTFILE. */
5013 void
5014 dump_sa_points_to_info (FILE *outfile)
5016 unsigned int i;
5018 fprintf (outfile, "\nPoints-to sets\n\n");
5020 if (dump_flags & TDF_STATS)
5022 fprintf (outfile, "Stats:\n");
5023 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
5024 fprintf (outfile, "Non-pointer vars: %d\n",
5025 stats.nonpointer_vars);
5026 fprintf (outfile, "Statically unified vars: %d\n",
5027 stats.unified_vars_static);
5028 fprintf (outfile, "Dynamically unified vars: %d\n",
5029 stats.unified_vars_dynamic);
5030 fprintf (outfile, "Iterations: %d\n", stats.iterations);
5031 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
5032 fprintf (outfile, "Number of implicit edges: %d\n",
5033 stats.num_implicit_edges);
5036 for (i = 0; i < VEC_length (varinfo_t, varmap); i++)
5037 dump_solution_for_var (outfile, i);
5041 /* Debug points-to information to stderr. */
5043 void
5044 debug_sa_points_to_info (void)
5046 dump_sa_points_to_info (stderr);
5050 /* Initialize the always-existing constraint variables for NULL
5051 ANYTHING, READONLY, and INTEGER */
5053 static void
5054 init_base_vars (void)
5056 struct constraint_expr lhs, rhs;
5058 /* Create the NULL variable, used to represent that a variable points
5059 to NULL. */
5060 nothing_tree = create_tmp_var_raw (void_type_node, "NULL");
5061 var_nothing = new_var_info (nothing_tree, 0, "NULL");
5062 insert_vi_for_tree (nothing_tree, var_nothing);
5063 var_nothing->is_artificial_var = 1;
5064 var_nothing->offset = 0;
5065 var_nothing->size = ~0;
5066 var_nothing->fullsize = ~0;
5067 var_nothing->is_special_var = 1;
5068 nothing_id = 0;
5069 VEC_safe_push (varinfo_t, heap, varmap, var_nothing);
5071 /* Create the ANYTHING variable, used to represent that a variable
5072 points to some unknown piece of memory. */
5073 anything_tree = create_tmp_var_raw (void_type_node, "ANYTHING");
5074 var_anything = new_var_info (anything_tree, 1, "ANYTHING");
5075 insert_vi_for_tree (anything_tree, var_anything);
5076 var_anything->is_artificial_var = 1;
5077 var_anything->size = ~0;
5078 var_anything->offset = 0;
5079 var_anything->next = NULL;
5080 var_anything->fullsize = ~0;
5081 var_anything->is_special_var = 1;
5082 anything_id = 1;
5084 /* Anything points to anything. This makes deref constraints just
5085 work in the presence of linked list and other p = *p type loops,
5086 by saying that *ANYTHING = ANYTHING. */
5087 VEC_safe_push (varinfo_t, heap, varmap, var_anything);
5088 lhs.type = SCALAR;
5089 lhs.var = anything_id;
5090 lhs.offset = 0;
5091 rhs.type = ADDRESSOF;
5092 rhs.var = anything_id;
5093 rhs.offset = 0;
5095 /* This specifically does not use process_constraint because
5096 process_constraint ignores all anything = anything constraints, since all
5097 but this one are redundant. */
5098 VEC_safe_push (constraint_t, heap, constraints, new_constraint (lhs, rhs));
5100 /* Create the READONLY variable, used to represent that a variable
5101 points to readonly memory. */
5102 readonly_tree = create_tmp_var_raw (void_type_node, "READONLY");
5103 var_readonly = new_var_info (readonly_tree, 2, "READONLY");
5104 var_readonly->is_artificial_var = 1;
5105 var_readonly->offset = 0;
5106 var_readonly->size = ~0;
5107 var_readonly->fullsize = ~0;
5108 var_readonly->next = NULL;
5109 var_readonly->is_special_var = 1;
5110 insert_vi_for_tree (readonly_tree, var_readonly);
5111 readonly_id = 2;
5112 VEC_safe_push (varinfo_t, heap, varmap, var_readonly);
5114 /* readonly memory points to anything, in order to make deref
5115 easier. In reality, it points to anything the particular
5116 readonly variable can point to, but we don't track this
5117 separately. */
5118 lhs.type = SCALAR;
5119 lhs.var = readonly_id;
5120 lhs.offset = 0;
5121 rhs.type = ADDRESSOF;
5122 rhs.var = anything_id;
5123 rhs.offset = 0;
5125 process_constraint (new_constraint (lhs, rhs));
5127 /* Create the INTEGER variable, used to represent that a variable points
5128 to an INTEGER. */
5129 integer_tree = create_tmp_var_raw (void_type_node, "INTEGER");
5130 var_integer = new_var_info (integer_tree, 3, "INTEGER");
5131 insert_vi_for_tree (integer_tree, var_integer);
5132 var_integer->is_artificial_var = 1;
5133 var_integer->size = ~0;
5134 var_integer->fullsize = ~0;
5135 var_integer->offset = 0;
5136 var_integer->next = NULL;
5137 var_integer->is_special_var = 1;
5138 integer_id = 3;
5139 VEC_safe_push (varinfo_t, heap, varmap, var_integer);
5141 /* INTEGER = ANYTHING, because we don't know where a dereference of
5142 a random integer will point to. */
5143 lhs.type = SCALAR;
5144 lhs.var = integer_id;
5145 lhs.offset = 0;
5146 rhs.type = ADDRESSOF;
5147 rhs.var = anything_id;
5148 rhs.offset = 0;
5149 process_constraint (new_constraint (lhs, rhs));
5152 /* Initialize things necessary to perform PTA */
5154 static void
5155 init_alias_vars (void)
5157 bitmap_obstack_initialize (&pta_obstack);
5158 bitmap_obstack_initialize (&oldpta_obstack);
5159 bitmap_obstack_initialize (&predbitmap_obstack);
5161 constraint_pool = create_alloc_pool ("Constraint pool",
5162 sizeof (struct constraint), 30);
5163 variable_info_pool = create_alloc_pool ("Variable info pool",
5164 sizeof (struct variable_info), 30);
5165 constraints = VEC_alloc (constraint_t, heap, 8);
5166 varmap = VEC_alloc (varinfo_t, heap, 8);
5167 vi_for_tree = pointer_map_create ();
5169 memset (&stats, 0, sizeof (stats));
5170 shared_bitmap_table = htab_create (511, shared_bitmap_hash,
5171 shared_bitmap_eq, free);
5172 init_base_vars ();
5175 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
5176 predecessor edges. */
5178 static void
5179 remove_preds_and_fake_succs (constraint_graph_t graph)
5181 unsigned int i;
5183 /* Clear the implicit ref and address nodes from the successor
5184 lists. */
5185 for (i = 0; i < FIRST_REF_NODE; i++)
5187 if (graph->succs[i])
5188 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
5189 FIRST_REF_NODE * 2);
5192 /* Free the successor list for the non-ref nodes. */
5193 for (i = FIRST_REF_NODE; i < graph->size; i++)
5195 if (graph->succs[i])
5196 BITMAP_FREE (graph->succs[i]);
5199 /* Now reallocate the size of the successor list as, and blow away
5200 the predecessor bitmaps. */
5201 graph->size = VEC_length (varinfo_t, varmap);
5202 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
5204 free (graph->implicit_preds);
5205 graph->implicit_preds = NULL;
5206 free (graph->preds);
5207 graph->preds = NULL;
5208 bitmap_obstack_release (&predbitmap_obstack);
5211 /* Compute the set of variables we can't TBAA prune. */
5213 static void
5214 compute_tbaa_pruning (void)
5216 unsigned int size = VEC_length (varinfo_t, varmap);
5217 unsigned int i;
5218 bool any;
5220 changed_count = 0;
5221 changed = sbitmap_alloc (size);
5222 sbitmap_zero (changed);
5224 /* Mark all initial no_tbaa_pruning nodes as changed. */
5225 any = false;
5226 for (i = 0; i < size; ++i)
5228 varinfo_t ivi = get_varinfo (i);
5230 if (find (i) == i && ivi->no_tbaa_pruning)
5232 any = true;
5233 if ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
5234 || VEC_length (constraint_t, graph->complex[i]) > 0)
5236 SET_BIT (changed, i);
5237 ++changed_count;
5242 while (changed_count > 0)
5244 struct topo_info *ti = init_topo_info ();
5245 ++stats.iterations;
5247 compute_topo_order (graph, ti);
5249 while (VEC_length (unsigned, ti->topo_order) != 0)
5251 bitmap_iterator bi;
5253 i = VEC_pop (unsigned, ti->topo_order);
5255 /* If this variable is not a representative, skip it. */
5256 if (find (i) != i)
5257 continue;
5259 /* If the node has changed, we need to process the complex
5260 constraints and outgoing edges again. */
5261 if (TEST_BIT (changed, i))
5263 unsigned int j;
5264 constraint_t c;
5265 VEC(constraint_t,heap) *complex = graph->complex[i];
5267 RESET_BIT (changed, i);
5268 --changed_count;
5270 /* Process the complex copy constraints. */
5271 for (j = 0; VEC_iterate (constraint_t, complex, j, c); ++j)
5273 if (c->lhs.type == SCALAR && c->rhs.type == SCALAR)
5275 varinfo_t lhsvi = get_varinfo (find (c->lhs.var));
5277 if (!lhsvi->no_tbaa_pruning)
5279 lhsvi->no_tbaa_pruning = true;
5280 if (!TEST_BIT (changed, lhsvi->id))
5282 SET_BIT (changed, lhsvi->id);
5283 ++changed_count;
5289 /* Propagate to all successors. */
5290 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
5292 unsigned int to = find (j);
5293 varinfo_t tovi = get_varinfo (to);
5295 /* Don't propagate to ourselves. */
5296 if (to == i)
5297 continue;
5299 if (!tovi->no_tbaa_pruning)
5301 tovi->no_tbaa_pruning = true;
5302 if (!TEST_BIT (changed, to))
5304 SET_BIT (changed, to);
5305 ++changed_count;
5312 free_topo_info (ti);
5315 sbitmap_free (changed);
5317 if (any)
5319 for (i = 0; i < size; ++i)
5321 varinfo_t ivi = get_varinfo (i);
5322 varinfo_t ivip = get_varinfo (find (i));
5324 if (ivip->no_tbaa_pruning)
5326 tree var = ivi->decl;
5328 if (TREE_CODE (var) == SSA_NAME)
5329 var = SSA_NAME_VAR (var);
5331 if (POINTER_TYPE_P (TREE_TYPE (var)))
5333 DECL_NO_TBAA_P (var) = 1;
5335 /* Tell the RTL layer that this pointer can alias
5336 anything. */
5337 DECL_POINTER_ALIAS_SET (var) = 0;
5344 /* Create points-to sets for the current function. See the comments
5345 at the start of the file for an algorithmic overview. */
5347 void
5348 compute_points_to_sets (struct alias_info *ai)
5350 struct scc_info *si;
5351 basic_block bb;
5353 timevar_push (TV_TREE_PTA);
5355 init_alias_vars ();
5356 init_alias_heapvars ();
5358 intra_create_variable_infos ();
5360 /* Now walk all statements and derive aliases. */
5361 FOR_EACH_BB (bb)
5363 block_stmt_iterator bsi;
5364 tree phi;
5366 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
5368 if (is_gimple_reg (PHI_RESULT (phi)))
5370 find_func_aliases (phi);
5372 /* Update various related attributes like escaped
5373 addresses, pointer dereferences for loads and stores.
5374 This is used when creating name tags and alias
5375 sets. */
5376 update_alias_info (phi, ai);
5380 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
5382 tree stmt = bsi_stmt (bsi);
5384 find_func_aliases (stmt);
5386 /* Update various related attributes like escaped
5387 addresses, pointer dereferences for loads and stores.
5388 This is used when creating name tags and alias
5389 sets. */
5390 update_alias_info (stmt, ai);
5392 /* The information in CHANGE_DYNAMIC_TYPE_EXPR nodes has now
5393 been captured, and we can remove them. */
5394 if (TREE_CODE (stmt) == CHANGE_DYNAMIC_TYPE_EXPR)
5395 bsi_remove (&bsi, true);
5396 else
5397 bsi_next (&bsi);
5402 if (dump_file)
5404 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
5405 dump_constraints (dump_file);
5408 if (dump_file)
5409 fprintf (dump_file,
5410 "\nCollapsing static cycles and doing variable "
5411 "substitution\n");
5413 init_graph (VEC_length (varinfo_t, varmap) * 2);
5415 if (dump_file)
5416 fprintf (dump_file, "Building predecessor graph\n");
5417 build_pred_graph ();
5419 if (dump_file)
5420 fprintf (dump_file, "Detecting pointer and location "
5421 "equivalences\n");
5422 si = perform_var_substitution (graph);
5424 if (dump_file)
5425 fprintf (dump_file, "Rewriting constraints and unifying "
5426 "variables\n");
5427 rewrite_constraints (graph, si);
5428 free_var_substitution_info (si);
5430 build_succ_graph ();
5431 move_complex_constraints (graph);
5433 if (dump_file)
5434 fprintf (dump_file, "Uniting pointer but not location equivalent "
5435 "variables\n");
5436 unite_pointer_equivalences (graph);
5438 if (dump_file)
5439 fprintf (dump_file, "Finding indirect cycles\n");
5440 find_indirect_cycles (graph);
5442 /* Implicit nodes and predecessors are no longer necessary at this
5443 point. */
5444 remove_preds_and_fake_succs (graph);
5446 if (dump_file)
5447 fprintf (dump_file, "Solving graph\n");
5449 solve_graph (graph);
5451 compute_tbaa_pruning ();
5453 if (dump_file)
5454 dump_sa_points_to_info (dump_file);
5456 have_alias_info = true;
5458 timevar_pop (TV_TREE_PTA);
5462 /* Delete created points-to sets. */
5464 void
5465 delete_points_to_sets (void)
5467 unsigned int i;
5469 htab_delete (shared_bitmap_table);
5470 if (dump_file && (dump_flags & TDF_STATS))
5471 fprintf (dump_file, "Points to sets created:%d\n",
5472 stats.points_to_sets_created);
5474 pointer_map_destroy (vi_for_tree);
5475 bitmap_obstack_release (&pta_obstack);
5476 VEC_free (constraint_t, heap, constraints);
5478 for (i = 0; i < graph->size; i++)
5479 VEC_free (constraint_t, heap, graph->complex[i]);
5480 free (graph->complex);
5482 free (graph->rep);
5483 free (graph->succs);
5484 free (graph->pe);
5485 free (graph->pe_rep);
5486 free (graph->indirect_cycles);
5487 free (graph);
5489 VEC_free (varinfo_t, heap, varmap);
5490 free_alloc_pool (variable_info_pool);
5491 free_alloc_pool (constraint_pool);
5492 have_alias_info = false;
5495 /* Return true if we should execute IPA PTA. */
5496 static bool
5497 gate_ipa_pta (void)
5499 return (flag_unit_at_a_time != 0
5500 && flag_ipa_pta
5501 /* Don't bother doing anything if the program has errors. */
5502 && !(errorcount || sorrycount));
5505 /* Execute the driver for IPA PTA. */
5506 static unsigned int
5507 ipa_pta_execute (void)
5509 struct cgraph_node *node;
5510 struct scc_info *si;
5512 in_ipa_mode = 1;
5513 init_alias_heapvars ();
5514 init_alias_vars ();
5516 for (node = cgraph_nodes; node; node = node->next)
5518 if (!node->analyzed || cgraph_is_master_clone (node))
5520 unsigned int varid;
5522 varid = create_function_info_for (node->decl,
5523 cgraph_node_name (node));
5524 if (node->local.externally_visible)
5526 varinfo_t fi = get_varinfo (varid);
5527 for (; fi; fi = fi->next)
5528 make_constraint_from_anything (fi);
5532 for (node = cgraph_nodes; node; node = node->next)
5534 if (node->analyzed && cgraph_is_master_clone (node))
5536 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
5537 basic_block bb;
5538 tree old_func_decl = current_function_decl;
5539 if (dump_file)
5540 fprintf (dump_file,
5541 "Generating constraints for %s\n",
5542 cgraph_node_name (node));
5543 push_cfun (func);
5544 current_function_decl = node->decl;
5546 FOR_EACH_BB_FN (bb, func)
5548 block_stmt_iterator bsi;
5549 tree phi;
5551 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
5553 if (is_gimple_reg (PHI_RESULT (phi)))
5555 find_func_aliases (phi);
5559 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
5561 tree stmt = bsi_stmt (bsi);
5562 find_func_aliases (stmt);
5565 current_function_decl = old_func_decl;
5566 pop_cfun ();
5568 else
5570 /* Make point to anything. */
5574 if (dump_file)
5576 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
5577 dump_constraints (dump_file);
5580 if (dump_file)
5581 fprintf (dump_file,
5582 "\nCollapsing static cycles and doing variable "
5583 "substitution:\n");
5585 init_graph (VEC_length (varinfo_t, varmap) * 2);
5586 build_pred_graph ();
5587 si = perform_var_substitution (graph);
5588 rewrite_constraints (graph, si);
5589 free_var_substitution_info (si);
5591 build_succ_graph ();
5592 move_complex_constraints (graph);
5593 unite_pointer_equivalences (graph);
5594 find_indirect_cycles (graph);
5596 /* Implicit nodes and predecessors are no longer necessary at this
5597 point. */
5598 remove_preds_and_fake_succs (graph);
5600 if (dump_file)
5601 fprintf (dump_file, "\nSolving graph\n");
5603 solve_graph (graph);
5605 if (dump_file)
5606 dump_sa_points_to_info (dump_file);
5608 in_ipa_mode = 0;
5609 delete_alias_heapvars ();
5610 delete_points_to_sets ();
5611 return 0;
5614 struct simple_ipa_opt_pass pass_ipa_pta =
5617 SIMPLE_IPA_PASS,
5618 "pta", /* name */
5619 gate_ipa_pta, /* gate */
5620 ipa_pta_execute, /* execute */
5621 NULL, /* sub */
5622 NULL, /* next */
5623 0, /* static_pass_number */
5624 TV_IPA_PTA, /* tv_id */
5625 0, /* properties_required */
5626 0, /* properties_provided */
5627 0, /* properties_destroyed */
5628 0, /* todo_flags_start */
5629 TODO_update_ssa /* todo_flags_finish */
5633 /* Initialize the heapvar for statement mapping. */
5634 void
5635 init_alias_heapvars (void)
5637 if (!heapvar_for_stmt)
5638 heapvar_for_stmt = htab_create_ggc (11, tree_map_hash, tree_map_eq,
5639 NULL);
5642 void
5643 delete_alias_heapvars (void)
5645 htab_delete (heapvar_for_stmt);
5646 heapvar_for_stmt = NULL;
5650 #include "gt-tree-ssa-structalias.h"