mips.c (TARGET_MIN_ANCHOR_OFFSET): Delete.
[official-gcc.git] / gcc / tree-ssa-structalias.c
bloba99839c7e8443fa051d74b62f59d304c26093bd4
1 /* Tree based points-to analysis
2 Copyright (C) 2005, 2006, 2007 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "ggc.h"
26 #include "obstack.h"
27 #include "bitmap.h"
28 #include "flags.h"
29 #include "rtl.h"
30 #include "tm_p.h"
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
33 #include "output.h"
34 #include "errors.h"
35 #include "diagnostic.h"
36 #include "tree.h"
37 #include "c-common.h"
38 #include "tree-flow.h"
39 #include "tree-inline.h"
40 #include "varray.h"
41 #include "c-tree.h"
42 #include "tree-gimple.h"
43 #include "hashtab.h"
44 #include "function.h"
45 #include "cgraph.h"
46 #include "tree-pass.h"
47 #include "timevar.h"
48 #include "alloc-pool.h"
49 #include "splay-tree.h"
50 #include "params.h"
51 #include "tree-ssa-structalias.h"
52 #include "cgraph.h"
53 #include "alias.h"
54 #include "pointer-set.h"
56 /* The idea behind this analyzer is to generate set constraints from the
57 program, then solve the resulting constraints in order to generate the
58 points-to sets.
60 Set constraints are a way of modeling program analysis problems that
61 involve sets. They consist of an inclusion constraint language,
62 describing the variables (each variable is a set) and operations that
63 are involved on the variables, and a set of rules that derive facts
64 from these operations. To solve a system of set constraints, you derive
65 all possible facts under the rules, which gives you the correct sets
66 as a consequence.
68 See "Efficient Field-sensitive pointer analysis for C" by "David
69 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
70 http://citeseer.ist.psu.edu/pearce04efficient.html
72 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
73 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
74 http://citeseer.ist.psu.edu/heintze01ultrafast.html
76 There are three types of real constraint expressions, DEREF,
77 ADDRESSOF, and SCALAR. Each constraint expression consists
78 of a constraint type, a variable, and an offset.
80 SCALAR is a constraint expression type used to represent x, whether
81 it appears on the LHS or the RHS of a statement.
82 DEREF is a constraint expression type used to represent *x, whether
83 it appears on the LHS or the RHS of a statement.
84 ADDRESSOF is a constraint expression used to represent &x, whether
85 it appears on the LHS or the RHS of a statement.
87 Each pointer variable in the program is assigned an integer id, and
88 each field of a structure variable is assigned an integer id as well.
90 Structure variables are linked to their list of fields through a "next
91 field" in each variable that points to the next field in offset
92 order.
93 Each variable for a structure field has
95 1. "size", that tells the size in bits of that field.
96 2. "fullsize, that tells the size in bits of the entire structure.
97 3. "offset", that tells the offset in bits from the beginning of the
98 structure to this field.
100 Thus,
101 struct f
103 int a;
104 int b;
105 } foo;
106 int *bar;
108 looks like
110 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
111 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
112 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
115 In order to solve the system of set constraints, the following is
116 done:
118 1. Each constraint variable x has a solution set associated with it,
119 Sol(x).
121 2. Constraints are separated into direct, copy, and complex.
122 Direct constraints are ADDRESSOF constraints that require no extra
123 processing, such as P = &Q
124 Copy constraints are those of the form P = Q.
125 Complex constraints are all the constraints involving dereferences
126 and offsets (including offsetted copies).
128 3. All direct constraints of the form P = &Q are processed, such
129 that Q is added to Sol(P)
131 4. All complex constraints for a given constraint variable are stored in a
132 linked list attached to that variable's node.
134 5. A directed graph is built out of the copy constraints. Each
135 constraint variable is a node in the graph, and an edge from
136 Q to P is added for each copy constraint of the form P = Q
138 6. The graph is then walked, and solution sets are
139 propagated along the copy edges, such that an edge from Q to P
140 causes Sol(P) <- Sol(P) union Sol(Q).
142 7. As we visit each node, all complex constraints associated with
143 that node are processed by adding appropriate copy edges to the graph, or the
144 appropriate variables to the solution set.
146 8. The process of walking the graph is iterated until no solution
147 sets change.
149 Prior to walking the graph in steps 6 and 7, We perform static
150 cycle elimination on the constraint graph, as well
151 as off-line variable substitution.
153 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
154 on and turned into anything), but isn't. You can just see what offset
155 inside the pointed-to struct it's going to access.
157 TODO: Constant bounded arrays can be handled as if they were structs of the
158 same number of elements.
160 TODO: Modeling heap and incoming pointers becomes much better if we
161 add fields to them as we discover them, which we could do.
163 TODO: We could handle unions, but to be honest, it's probably not
164 worth the pain or slowdown. */
166 static GTY ((if_marked ("tree_map_marked_p"), param_is (struct tree_map)))
167 htab_t heapvar_for_stmt;
169 static bool use_field_sensitive = true;
170 static int in_ipa_mode = 0;
172 /* Used for predecessor bitmaps. */
173 static bitmap_obstack predbitmap_obstack;
175 /* Used for points-to sets. */
176 static bitmap_obstack pta_obstack;
178 /* Used for oldsolution members of variables. */
179 static bitmap_obstack oldpta_obstack;
181 /* Used for per-solver-iteration bitmaps. */
182 static bitmap_obstack iteration_obstack;
184 static unsigned int create_variable_info_for (tree, const char *);
185 typedef struct constraint_graph *constraint_graph_t;
186 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
188 DEF_VEC_P(constraint_t);
189 DEF_VEC_ALLOC_P(constraint_t,heap);
191 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
192 if (a) \
193 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
195 static struct constraint_stats
197 unsigned int total_vars;
198 unsigned int nonpointer_vars;
199 unsigned int unified_vars_static;
200 unsigned int unified_vars_dynamic;
201 unsigned int iterations;
202 unsigned int num_edges;
203 unsigned int num_implicit_edges;
204 unsigned int points_to_sets_created;
205 } stats;
207 struct variable_info
209 /* ID of this variable */
210 unsigned int id;
212 /* Name of this variable */
213 const char *name;
215 /* Tree that this variable is associated with. */
216 tree decl;
218 /* Offset of this variable, in bits, from the base variable */
219 unsigned HOST_WIDE_INT offset;
221 /* Size of the variable, in bits. */
222 unsigned HOST_WIDE_INT size;
224 /* Full size of the base variable, in bits. */
225 unsigned HOST_WIDE_INT fullsize;
227 /* A link to the variable for the next field in this structure. */
228 struct variable_info *next;
230 /* True if the variable is directly the target of a dereference.
231 This is used to track which variables are *actually* dereferenced
232 so we can prune their points to listed. */
233 unsigned int directly_dereferenced:1;
235 /* True if this is a variable created by the constraint analysis, such as
236 heap variables and constraints we had to break up. */
237 unsigned int is_artificial_var:1;
239 /* True if this is a special variable whose solution set should not be
240 changed. */
241 unsigned int is_special_var:1;
243 /* True for variables whose size is not known or variable. */
244 unsigned int is_unknown_size_var:1;
246 /* True for variables that have unions somewhere in them. */
247 unsigned int has_union:1;
249 /* True if this is a heap variable. */
250 unsigned int is_heap_var:1;
252 /* True if we may not use TBAA to prune references to this
253 variable. This is used for C++ placement new. */
254 unsigned int no_tbaa_pruning : 1;
256 /* Points-to set for this variable. */
257 bitmap solution;
259 /* Old points-to set for this variable. */
260 bitmap oldsolution;
262 /* Variable id this was collapsed to due to type unsafety. This
263 should be unused completely after build_succ_graph, or something
264 is broken. */
265 struct variable_info *collapsed_to;
267 typedef struct variable_info *varinfo_t;
269 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
271 /* Pool of variable info structures. */
272 static alloc_pool variable_info_pool;
274 DEF_VEC_P(varinfo_t);
276 DEF_VEC_ALLOC_P(varinfo_t, heap);
278 /* Table of variable info structures for constraint variables.
279 Indexed directly by variable info id. */
280 static VEC(varinfo_t,heap) *varmap;
282 /* Return the varmap element N */
284 static inline varinfo_t
285 get_varinfo (unsigned int n)
287 return VEC_index (varinfo_t, varmap, n);
290 /* Return the varmap element N, following the collapsed_to link. */
292 static inline varinfo_t
293 get_varinfo_fc (unsigned int n)
295 varinfo_t v = VEC_index (varinfo_t, varmap, n);
297 if (v->collapsed_to)
298 return v->collapsed_to;
299 return v;
302 /* Variable that represents the unknown pointer. */
303 static varinfo_t var_anything;
304 static tree anything_tree;
305 static unsigned int anything_id;
307 /* Variable that represents the NULL pointer. */
308 static varinfo_t var_nothing;
309 static tree nothing_tree;
310 static unsigned int nothing_id;
312 /* Variable that represents read only memory. */
313 static varinfo_t var_readonly;
314 static tree readonly_tree;
315 static unsigned int readonly_id;
317 /* Variable that represents integers. This is used for when people do things
318 like &0->a.b. */
319 static varinfo_t var_integer;
320 static tree integer_tree;
321 static unsigned int integer_id;
323 /* Lookup a heap var for FROM, and return it if we find one. */
325 static tree
326 heapvar_lookup (tree from)
328 struct tree_map *h, in;
329 in.base.from = from;
331 h = (struct tree_map *) htab_find_with_hash (heapvar_for_stmt, &in,
332 htab_hash_pointer (from));
333 if (h)
334 return h->to;
335 return NULL_TREE;
338 /* Insert a mapping FROM->TO in the heap var for statement
339 hashtable. */
341 static void
342 heapvar_insert (tree from, tree to)
344 struct tree_map *h;
345 void **loc;
347 h = GGC_NEW (struct tree_map);
348 h->hash = htab_hash_pointer (from);
349 h->base.from = from;
350 h->to = to;
351 loc = htab_find_slot_with_hash (heapvar_for_stmt, h, h->hash, INSERT);
352 *(struct tree_map **) loc = h;
355 /* Return a new variable info structure consisting for a variable
356 named NAME, and using constraint graph node NODE. */
358 static varinfo_t
359 new_var_info (tree t, unsigned int id, const char *name)
361 varinfo_t ret = (varinfo_t) pool_alloc (variable_info_pool);
362 tree var;
364 ret->id = id;
365 ret->name = name;
366 ret->decl = t;
367 ret->directly_dereferenced = false;
368 ret->is_artificial_var = false;
369 ret->is_heap_var = false;
370 ret->is_special_var = false;
371 ret->is_unknown_size_var = false;
372 ret->has_union = false;
373 var = t;
374 if (TREE_CODE (var) == SSA_NAME)
375 var = SSA_NAME_VAR (var);
376 ret->no_tbaa_pruning = (DECL_P (var)
377 && POINTER_TYPE_P (TREE_TYPE (var))
378 && DECL_NO_TBAA_P (var));
379 ret->solution = BITMAP_ALLOC (&pta_obstack);
380 ret->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
381 ret->next = NULL;
382 ret->collapsed_to = NULL;
383 return ret;
386 typedef enum {SCALAR, DEREF, ADDRESSOF} constraint_expr_type;
388 /* An expression that appears in a constraint. */
390 struct constraint_expr
392 /* Constraint type. */
393 constraint_expr_type type;
395 /* Variable we are referring to in the constraint. */
396 unsigned int var;
398 /* Offset, in bits, of this constraint from the beginning of
399 variables it ends up referring to.
401 IOW, in a deref constraint, we would deref, get the result set,
402 then add OFFSET to each member. */
403 unsigned HOST_WIDE_INT offset;
406 typedef struct constraint_expr ce_s;
407 DEF_VEC_O(ce_s);
408 DEF_VEC_ALLOC_O(ce_s, heap);
409 static void get_constraint_for (tree, VEC(ce_s, heap) **);
410 static void do_deref (VEC (ce_s, heap) **);
412 /* Our set constraints are made up of two constraint expressions, one
413 LHS, and one RHS.
415 As described in the introduction, our set constraints each represent an
416 operation between set valued variables.
418 struct constraint
420 struct constraint_expr lhs;
421 struct constraint_expr rhs;
424 /* List of constraints that we use to build the constraint graph from. */
426 static VEC(constraint_t,heap) *constraints;
427 static alloc_pool constraint_pool;
430 DEF_VEC_I(int);
431 DEF_VEC_ALLOC_I(int, heap);
433 /* The constraint graph is represented as an array of bitmaps
434 containing successor nodes. */
436 struct constraint_graph
438 /* Size of this graph, which may be different than the number of
439 nodes in the variable map. */
440 unsigned int size;
442 /* Explicit successors of each node. */
443 bitmap *succs;
445 /* Implicit predecessors of each node (Used for variable
446 substitution). */
447 bitmap *implicit_preds;
449 /* Explicit predecessors of each node (Used for variable substitution). */
450 bitmap *preds;
452 /* Indirect cycle representatives, or -1 if the node has no indirect
453 cycles. */
454 int *indirect_cycles;
456 /* Representative node for a node. rep[a] == a unless the node has
457 been unified. */
458 unsigned int *rep;
460 /* Equivalence class representative for a label. This is used for
461 variable substitution. */
462 int *eq_rep;
464 /* Pointer equivalence label for a node. All nodes with the same
465 pointer equivalence label can be unified together at some point
466 (either during constraint optimization or after the constraint
467 graph is built). */
468 unsigned int *pe;
470 /* Pointer equivalence representative for a label. This is used to
471 handle nodes that are pointer equivalent but not location
472 equivalent. We can unite these once the addressof constraints
473 are transformed into initial points-to sets. */
474 int *pe_rep;
476 /* Pointer equivalence label for each node, used during variable
477 substitution. */
478 unsigned int *pointer_label;
480 /* Location equivalence label for each node, used during location
481 equivalence finding. */
482 unsigned int *loc_label;
484 /* Pointed-by set for each node, used during location equivalence
485 finding. This is pointed-by rather than pointed-to, because it
486 is constructed using the predecessor graph. */
487 bitmap *pointed_by;
489 /* Points to sets for pointer equivalence. This is *not* the actual
490 points-to sets for nodes. */
491 bitmap *points_to;
493 /* Bitmap of nodes where the bit is set if the node is a direct
494 node. Used for variable substitution. */
495 sbitmap direct_nodes;
497 /* Bitmap of nodes where the bit is set if the node is address
498 taken. Used for variable substitution. */
499 bitmap address_taken;
501 /* True if points_to bitmap for this node is stored in the hash
502 table. */
503 sbitmap pt_used;
505 /* Number of incoming edges remaining to be processed by pointer
506 equivalence.
507 Used for variable substitution. */
508 unsigned int *number_incoming;
511 /* Vector of complex constraints for each graph node. Complex
512 constraints are those involving dereferences or offsets that are
513 not 0. */
514 VEC(constraint_t,heap) **complex;
517 static constraint_graph_t graph;
519 /* During variable substitution and the offline version of indirect
520 cycle finding, we create nodes to represent dereferences and
521 address taken constraints. These represent where these start and
522 end. */
523 #define FIRST_REF_NODE (VEC_length (varinfo_t, varmap))
524 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
526 /* Return the representative node for NODE, if NODE has been unioned
527 with another NODE.
528 This function performs path compression along the way to finding
529 the representative. */
531 static unsigned int
532 find (unsigned int node)
534 gcc_assert (node < graph->size);
535 if (graph->rep[node] != node)
536 return graph->rep[node] = find (graph->rep[node]);
537 return node;
540 /* Union the TO and FROM nodes to the TO nodes.
541 Note that at some point in the future, we may want to do
542 union-by-rank, in which case we are going to have to return the
543 node we unified to. */
545 static bool
546 unite (unsigned int to, unsigned int from)
548 gcc_assert (to < graph->size && from < graph->size);
549 if (to != from && graph->rep[from] != to)
551 graph->rep[from] = to;
552 return true;
554 return false;
557 /* Create a new constraint consisting of LHS and RHS expressions. */
559 static constraint_t
560 new_constraint (const struct constraint_expr lhs,
561 const struct constraint_expr rhs)
563 constraint_t ret = (constraint_t) pool_alloc (constraint_pool);
564 ret->lhs = lhs;
565 ret->rhs = rhs;
566 return ret;
569 /* Print out constraint C to FILE. */
571 void
572 dump_constraint (FILE *file, constraint_t c)
574 if (c->lhs.type == ADDRESSOF)
575 fprintf (file, "&");
576 else if (c->lhs.type == DEREF)
577 fprintf (file, "*");
578 fprintf (file, "%s", get_varinfo_fc (c->lhs.var)->name);
579 if (c->lhs.offset != 0)
580 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
581 fprintf (file, " = ");
582 if (c->rhs.type == ADDRESSOF)
583 fprintf (file, "&");
584 else if (c->rhs.type == DEREF)
585 fprintf (file, "*");
586 fprintf (file, "%s", get_varinfo_fc (c->rhs.var)->name);
587 if (c->rhs.offset != 0)
588 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
589 fprintf (file, "\n");
592 /* Print out constraint C to stderr. */
594 void
595 debug_constraint (constraint_t c)
597 dump_constraint (stderr, c);
600 /* Print out all constraints to FILE */
602 void
603 dump_constraints (FILE *file)
605 int i;
606 constraint_t c;
607 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
608 dump_constraint (file, c);
611 /* Print out all constraints to stderr. */
613 void
614 debug_constraints (void)
616 dump_constraints (stderr);
619 /* SOLVER FUNCTIONS
621 The solver is a simple worklist solver, that works on the following
622 algorithm:
624 sbitmap changed_nodes = all zeroes;
625 changed_count = 0;
626 For each node that is not already collapsed:
627 changed_count++;
628 set bit in changed nodes
630 while (changed_count > 0)
632 compute topological ordering for constraint graph
634 find and collapse cycles in the constraint graph (updating
635 changed if necessary)
637 for each node (n) in the graph in topological order:
638 changed_count--;
640 Process each complex constraint associated with the node,
641 updating changed if necessary.
643 For each outgoing edge from n, propagate the solution from n to
644 the destination of the edge, updating changed as necessary.
646 } */
648 /* Return true if two constraint expressions A and B are equal. */
650 static bool
651 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
653 return a.type == b.type && a.var == b.var && a.offset == b.offset;
656 /* Return true if constraint expression A is less than constraint expression
657 B. This is just arbitrary, but consistent, in order to give them an
658 ordering. */
660 static bool
661 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
663 if (a.type == b.type)
665 if (a.var == b.var)
666 return a.offset < b.offset;
667 else
668 return a.var < b.var;
670 else
671 return a.type < b.type;
674 /* Return true if constraint A is less than constraint B. This is just
675 arbitrary, but consistent, in order to give them an ordering. */
677 static bool
678 constraint_less (const constraint_t a, const constraint_t b)
680 if (constraint_expr_less (a->lhs, b->lhs))
681 return true;
682 else if (constraint_expr_less (b->lhs, a->lhs))
683 return false;
684 else
685 return constraint_expr_less (a->rhs, b->rhs);
688 /* Return true if two constraints A and B are equal. */
690 static bool
691 constraint_equal (struct constraint a, struct constraint b)
693 return constraint_expr_equal (a.lhs, b.lhs)
694 && constraint_expr_equal (a.rhs, b.rhs);
698 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
700 static constraint_t
701 constraint_vec_find (VEC(constraint_t,heap) *vec,
702 struct constraint lookfor)
704 unsigned int place;
705 constraint_t found;
707 if (vec == NULL)
708 return NULL;
710 place = VEC_lower_bound (constraint_t, vec, &lookfor, constraint_less);
711 if (place >= VEC_length (constraint_t, vec))
712 return NULL;
713 found = VEC_index (constraint_t, vec, place);
714 if (!constraint_equal (*found, lookfor))
715 return NULL;
716 return found;
719 /* Union two constraint vectors, TO and FROM. Put the result in TO. */
721 static void
722 constraint_set_union (VEC(constraint_t,heap) **to,
723 VEC(constraint_t,heap) **from)
725 int i;
726 constraint_t c;
728 for (i = 0; VEC_iterate (constraint_t, *from, i, c); i++)
730 if (constraint_vec_find (*to, *c) == NULL)
732 unsigned int place = VEC_lower_bound (constraint_t, *to, c,
733 constraint_less);
734 VEC_safe_insert (constraint_t, heap, *to, place, c);
739 /* Take a solution set SET, add OFFSET to each member of the set, and
740 overwrite SET with the result when done. */
742 static void
743 solution_set_add (bitmap set, unsigned HOST_WIDE_INT offset)
745 bitmap result = BITMAP_ALLOC (&iteration_obstack);
746 unsigned int i;
747 bitmap_iterator bi;
749 EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
751 /* If this is a properly sized variable, only add offset if it's
752 less than end. Otherwise, it is globbed to a single
753 variable. */
755 if ((get_varinfo (i)->offset + offset) < get_varinfo (i)->fullsize)
757 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (i)->offset + offset;
758 varinfo_t v = first_vi_for_offset (get_varinfo (i), fieldoffset);
759 if (!v)
760 continue;
761 bitmap_set_bit (result, v->id);
763 else if (get_varinfo (i)->is_artificial_var
764 || get_varinfo (i)->has_union
765 || get_varinfo (i)->is_unknown_size_var)
767 bitmap_set_bit (result, i);
771 bitmap_copy (set, result);
772 BITMAP_FREE (result);
775 /* Union solution sets TO and FROM, and add INC to each member of FROM in the
776 process. */
778 static bool
779 set_union_with_increment (bitmap to, bitmap from, unsigned HOST_WIDE_INT inc)
781 if (inc == 0)
782 return bitmap_ior_into (to, from);
783 else
785 bitmap tmp;
786 bool res;
788 tmp = BITMAP_ALLOC (&iteration_obstack);
789 bitmap_copy (tmp, from);
790 solution_set_add (tmp, inc);
791 res = bitmap_ior_into (to, tmp);
792 BITMAP_FREE (tmp);
793 return res;
797 /* Insert constraint C into the list of complex constraints for graph
798 node VAR. */
800 static void
801 insert_into_complex (constraint_graph_t graph,
802 unsigned int var, constraint_t c)
804 VEC (constraint_t, heap) *complex = graph->complex[var];
805 unsigned int place = VEC_lower_bound (constraint_t, complex, c,
806 constraint_less);
808 /* Only insert constraints that do not already exist. */
809 if (place >= VEC_length (constraint_t, complex)
810 || !constraint_equal (*c, *VEC_index (constraint_t, complex, place)))
811 VEC_safe_insert (constraint_t, heap, graph->complex[var], place, c);
815 /* Condense two variable nodes into a single variable node, by moving
816 all associated info from SRC to TO. */
818 static void
819 merge_node_constraints (constraint_graph_t graph, unsigned int to,
820 unsigned int from)
822 unsigned int i;
823 constraint_t c;
825 gcc_assert (find (from) == to);
827 /* Move all complex constraints from src node into to node */
828 for (i = 0; VEC_iterate (constraint_t, graph->complex[from], i, c); i++)
830 /* In complex constraints for node src, we may have either
831 a = *src, and *src = a, or an offseted constraint which are
832 always added to the rhs node's constraints. */
834 if (c->rhs.type == DEREF)
835 c->rhs.var = to;
836 else if (c->lhs.type == DEREF)
837 c->lhs.var = to;
838 else
839 c->rhs.var = to;
841 constraint_set_union (&graph->complex[to], &graph->complex[from]);
842 VEC_free (constraint_t, heap, graph->complex[from]);
843 graph->complex[from] = NULL;
847 /* Remove edges involving NODE from GRAPH. */
849 static void
850 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
852 if (graph->succs[node])
853 BITMAP_FREE (graph->succs[node]);
856 /* Merge GRAPH nodes FROM and TO into node TO. */
858 static void
859 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
860 unsigned int from)
862 if (graph->indirect_cycles[from] != -1)
864 /* If we have indirect cycles with the from node, and we have
865 none on the to node, the to node has indirect cycles from the
866 from node now that they are unified.
867 If indirect cycles exist on both, unify the nodes that they
868 are in a cycle with, since we know they are in a cycle with
869 each other. */
870 if (graph->indirect_cycles[to] == -1)
871 graph->indirect_cycles[to] = graph->indirect_cycles[from];
874 /* Merge all the successor edges. */
875 if (graph->succs[from])
877 if (!graph->succs[to])
878 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
879 bitmap_ior_into (graph->succs[to],
880 graph->succs[from]);
883 clear_edges_for_node (graph, from);
887 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
888 it doesn't exist in the graph already. */
890 static void
891 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
892 unsigned int from)
894 if (to == from)
895 return;
897 if (!graph->implicit_preds[to])
898 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
900 if (!bitmap_bit_p (graph->implicit_preds[to], from))
902 stats.num_implicit_edges++;
903 bitmap_set_bit (graph->implicit_preds[to], from);
907 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
908 it doesn't exist in the graph already.
909 Return false if the edge already existed, true otherwise. */
911 static void
912 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
913 unsigned int from)
915 if (!graph->preds[to])
916 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
917 if (!bitmap_bit_p (graph->preds[to], from))
918 bitmap_set_bit (graph->preds[to], from);
921 /* Add a graph edge to GRAPH, going from FROM to TO if
922 it doesn't exist in the graph already.
923 Return false if the edge already existed, true otherwise. */
925 static bool
926 add_graph_edge (constraint_graph_t graph, unsigned int to,
927 unsigned int from)
929 if (to == from)
931 return false;
933 else
935 bool r = false;
937 if (!graph->succs[from])
938 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
939 if (!bitmap_bit_p (graph->succs[from], to))
941 r = true;
942 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
943 stats.num_edges++;
944 bitmap_set_bit (graph->succs[from], to);
946 return r;
951 /* Return true if {DEST.SRC} is an existing graph edge in GRAPH. */
953 static bool
954 valid_graph_edge (constraint_graph_t graph, unsigned int src,
955 unsigned int dest)
957 return (graph->succs[dest]
958 && bitmap_bit_p (graph->succs[dest], src));
961 /* Initialize the constraint graph structure to contain SIZE nodes. */
963 static void
964 init_graph (unsigned int size)
966 unsigned int j;
968 graph = XCNEW (struct constraint_graph);
969 graph->size = size;
970 graph->succs = XCNEWVEC (bitmap, graph->size);
971 graph->indirect_cycles = XNEWVEC (int, graph->size);
972 graph->rep = XNEWVEC (unsigned int, graph->size);
973 graph->complex = XCNEWVEC (VEC(constraint_t, heap) *, size);
974 graph->pe = XCNEWVEC (unsigned int, graph->size);
975 graph->pe_rep = XNEWVEC (int, graph->size);
977 for (j = 0; j < graph->size; j++)
979 graph->rep[j] = j;
980 graph->pe_rep[j] = -1;
981 graph->indirect_cycles[j] = -1;
985 /* Build the constraint graph, adding only predecessor edges right now. */
987 static void
988 build_pred_graph (void)
990 int i;
991 constraint_t c;
992 unsigned int j;
994 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
995 graph->preds = XCNEWVEC (bitmap, graph->size);
996 graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
997 graph->loc_label = XCNEWVEC (unsigned int, graph->size);
998 graph->pointed_by = XCNEWVEC (bitmap, graph->size);
999 graph->points_to = XCNEWVEC (bitmap, graph->size);
1000 graph->eq_rep = XNEWVEC (int, graph->size);
1001 graph->direct_nodes = sbitmap_alloc (graph->size);
1002 graph->pt_used = sbitmap_alloc (graph->size);
1003 graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1004 graph->number_incoming = XCNEWVEC (unsigned int, graph->size);
1005 sbitmap_zero (graph->direct_nodes);
1006 sbitmap_zero (graph->pt_used);
1008 for (j = 0; j < FIRST_REF_NODE; j++)
1010 if (!get_varinfo (j)->is_special_var)
1011 SET_BIT (graph->direct_nodes, j);
1014 for (j = 0; j < graph->size; j++)
1015 graph->eq_rep[j] = -1;
1017 for (j = 0; j < VEC_length (varinfo_t, varmap); j++)
1018 graph->indirect_cycles[j] = -1;
1020 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1022 struct constraint_expr lhs = c->lhs;
1023 struct constraint_expr rhs = c->rhs;
1024 unsigned int lhsvar = get_varinfo_fc (lhs.var)->id;
1025 unsigned int rhsvar = get_varinfo_fc (rhs.var)->id;
1027 if (lhs.type == DEREF)
1029 /* *x = y. */
1030 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1031 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1033 else if (rhs.type == DEREF)
1035 /* x = *y */
1036 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1037 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1038 else
1039 RESET_BIT (graph->direct_nodes, lhsvar);
1041 else if (rhs.type == ADDRESSOF)
1043 /* x = &y */
1044 if (graph->points_to[lhsvar] == NULL)
1045 graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1046 bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1048 if (graph->pointed_by[rhsvar] == NULL)
1049 graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1050 bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1052 /* Implicitly, *x = y */
1053 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1055 RESET_BIT (graph->direct_nodes, rhsvar);
1056 bitmap_set_bit (graph->address_taken, rhsvar);
1058 else if (lhsvar > anything_id
1059 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1061 /* x = y */
1062 add_pred_graph_edge (graph, lhsvar, rhsvar);
1063 /* Implicitly, *x = *y */
1064 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1065 FIRST_REF_NODE + rhsvar);
1067 else if (lhs.offset != 0 || rhs.offset != 0)
1069 if (rhs.offset != 0)
1070 RESET_BIT (graph->direct_nodes, lhs.var);
1071 else if (lhs.offset != 0)
1072 RESET_BIT (graph->direct_nodes, rhs.var);
1077 /* Build the constraint graph, adding successor edges. */
1079 static void
1080 build_succ_graph (void)
1082 int i;
1083 constraint_t c;
1085 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1087 struct constraint_expr lhs;
1088 struct constraint_expr rhs;
1089 unsigned int lhsvar;
1090 unsigned int rhsvar;
1092 if (!c)
1093 continue;
1095 lhs = c->lhs;
1096 rhs = c->rhs;
1097 lhsvar = find (get_varinfo_fc (lhs.var)->id);
1098 rhsvar = find (get_varinfo_fc (rhs.var)->id);
1100 if (lhs.type == DEREF)
1102 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1103 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1105 else if (rhs.type == DEREF)
1107 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1108 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1110 else if (rhs.type == ADDRESSOF)
1112 /* x = &y */
1113 gcc_assert (find (get_varinfo_fc (rhs.var)->id)
1114 == get_varinfo_fc (rhs.var)->id);
1115 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1117 else if (lhsvar > anything_id
1118 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1120 add_graph_edge (graph, lhsvar, rhsvar);
1126 /* Changed variables on the last iteration. */
1127 static unsigned int changed_count;
1128 static sbitmap changed;
1130 DEF_VEC_I(unsigned);
1131 DEF_VEC_ALLOC_I(unsigned,heap);
1134 /* Strongly Connected Component visitation info. */
1136 struct scc_info
1138 sbitmap visited;
1139 sbitmap deleted;
1140 unsigned int *dfs;
1141 unsigned int *node_mapping;
1142 int current_index;
1143 VEC(unsigned,heap) *scc_stack;
1147 /* Recursive routine to find strongly connected components in GRAPH.
1148 SI is the SCC info to store the information in, and N is the id of current
1149 graph node we are processing.
1151 This is Tarjan's strongly connected component finding algorithm, as
1152 modified by Nuutila to keep only non-root nodes on the stack.
1153 The algorithm can be found in "On finding the strongly connected
1154 connected components in a directed graph" by Esko Nuutila and Eljas
1155 Soisalon-Soininen, in Information Processing Letters volume 49,
1156 number 1, pages 9-14. */
1158 static void
1159 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1161 unsigned int i;
1162 bitmap_iterator bi;
1163 unsigned int my_dfs;
1165 SET_BIT (si->visited, n);
1166 si->dfs[n] = si->current_index ++;
1167 my_dfs = si->dfs[n];
1169 /* Visit all the successors. */
1170 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1172 unsigned int w;
1174 if (i > LAST_REF_NODE)
1175 break;
1177 w = find (i);
1178 if (TEST_BIT (si->deleted, w))
1179 continue;
1181 if (!TEST_BIT (si->visited, w))
1182 scc_visit (graph, si, w);
1184 unsigned int t = find (w);
1185 unsigned int nnode = find (n);
1186 gcc_assert (nnode == n);
1188 if (si->dfs[t] < si->dfs[nnode])
1189 si->dfs[n] = si->dfs[t];
1193 /* See if any components have been identified. */
1194 if (si->dfs[n] == my_dfs)
1196 if (VEC_length (unsigned, si->scc_stack) > 0
1197 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1199 bitmap scc = BITMAP_ALLOC (NULL);
1200 bool have_ref_node = n >= FIRST_REF_NODE;
1201 unsigned int lowest_node;
1202 bitmap_iterator bi;
1204 bitmap_set_bit (scc, n);
1206 while (VEC_length (unsigned, si->scc_stack) != 0
1207 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1209 unsigned int w = VEC_pop (unsigned, si->scc_stack);
1211 bitmap_set_bit (scc, w);
1212 if (w >= FIRST_REF_NODE)
1213 have_ref_node = true;
1216 lowest_node = bitmap_first_set_bit (scc);
1217 gcc_assert (lowest_node < FIRST_REF_NODE);
1219 /* Collapse the SCC nodes into a single node, and mark the
1220 indirect cycles. */
1221 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1223 if (i < FIRST_REF_NODE)
1225 if (unite (lowest_node, i))
1226 unify_nodes (graph, lowest_node, i, false);
1228 else
1230 unite (lowest_node, i);
1231 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1235 SET_BIT (si->deleted, n);
1237 else
1238 VEC_safe_push (unsigned, heap, si->scc_stack, n);
1241 /* Unify node FROM into node TO, updating the changed count if
1242 necessary when UPDATE_CHANGED is true. */
1244 static void
1245 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1246 bool update_changed)
1249 gcc_assert (to != from && find (to) == to);
1250 if (dump_file && (dump_flags & TDF_DETAILS))
1251 fprintf (dump_file, "Unifying %s to %s\n",
1252 get_varinfo (from)->name,
1253 get_varinfo (to)->name);
1255 if (update_changed)
1256 stats.unified_vars_dynamic++;
1257 else
1258 stats.unified_vars_static++;
1260 merge_graph_nodes (graph, to, from);
1261 merge_node_constraints (graph, to, from);
1263 if (get_varinfo (from)->no_tbaa_pruning)
1264 get_varinfo (to)->no_tbaa_pruning = true;
1266 /* Mark TO as changed if FROM was changed. If TO was already marked
1267 as changed, decrease the changed count. */
1269 if (update_changed && TEST_BIT (changed, from))
1271 RESET_BIT (changed, from);
1272 if (!TEST_BIT (changed, to))
1273 SET_BIT (changed, to);
1274 else
1276 gcc_assert (changed_count > 0);
1277 changed_count--;
1280 if (get_varinfo (from)->solution)
1282 /* If the solution changes because of the merging, we need to mark
1283 the variable as changed. */
1284 if (bitmap_ior_into (get_varinfo (to)->solution,
1285 get_varinfo (from)->solution))
1287 if (update_changed && !TEST_BIT (changed, to))
1289 SET_BIT (changed, to);
1290 changed_count++;
1294 BITMAP_FREE (get_varinfo (from)->solution);
1295 BITMAP_FREE (get_varinfo (from)->oldsolution);
1297 if (stats.iterations > 0)
1299 BITMAP_FREE (get_varinfo (to)->oldsolution);
1300 get_varinfo (to)->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
1303 if (valid_graph_edge (graph, to, to))
1305 if (graph->succs[to])
1306 bitmap_clear_bit (graph->succs[to], to);
1310 /* Information needed to compute the topological ordering of a graph. */
1312 struct topo_info
1314 /* sbitmap of visited nodes. */
1315 sbitmap visited;
1316 /* Array that stores the topological order of the graph, *in
1317 reverse*. */
1318 VEC(unsigned,heap) *topo_order;
1322 /* Initialize and return a topological info structure. */
1324 static struct topo_info *
1325 init_topo_info (void)
1327 size_t size = graph->size;
1328 struct topo_info *ti = XNEW (struct topo_info);
1329 ti->visited = sbitmap_alloc (size);
1330 sbitmap_zero (ti->visited);
1331 ti->topo_order = VEC_alloc (unsigned, heap, 1);
1332 return ti;
1336 /* Free the topological sort info pointed to by TI. */
1338 static void
1339 free_topo_info (struct topo_info *ti)
1341 sbitmap_free (ti->visited);
1342 VEC_free (unsigned, heap, ti->topo_order);
1343 free (ti);
1346 /* Visit the graph in topological order, and store the order in the
1347 topo_info structure. */
1349 static void
1350 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1351 unsigned int n)
1353 bitmap_iterator bi;
1354 unsigned int j;
1356 SET_BIT (ti->visited, n);
1358 if (graph->succs[n])
1359 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1361 if (!TEST_BIT (ti->visited, j))
1362 topo_visit (graph, ti, j);
1365 VEC_safe_push (unsigned, heap, ti->topo_order, n);
1368 /* Return true if variable N + OFFSET is a legal field of N. */
1370 static bool
1371 type_safe (unsigned int n, unsigned HOST_WIDE_INT *offset)
1373 varinfo_t ninfo = get_varinfo (n);
1375 /* For things we've globbed to single variables, any offset into the
1376 variable acts like the entire variable, so that it becomes offset
1377 0. */
1378 if (ninfo->is_special_var
1379 || ninfo->is_artificial_var
1380 || ninfo->is_unknown_size_var)
1382 *offset = 0;
1383 return true;
1385 return (get_varinfo (n)->offset + *offset) < get_varinfo (n)->fullsize;
1388 /* Process a constraint C that represents x = *y, using DELTA as the
1389 starting solution. */
1391 static void
1392 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1393 bitmap delta)
1395 unsigned int lhs = c->lhs.var;
1396 bool flag = false;
1397 bitmap sol = get_varinfo (lhs)->solution;
1398 unsigned int j;
1399 bitmap_iterator bi;
1401 if (bitmap_bit_p (delta, anything_id))
1403 flag = !bitmap_bit_p (sol, anything_id);
1404 if (flag)
1405 bitmap_set_bit (sol, anything_id);
1406 goto done;
1408 /* For each variable j in delta (Sol(y)), add
1409 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1410 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1412 unsigned HOST_WIDE_INT roffset = c->rhs.offset;
1413 if (type_safe (j, &roffset))
1415 varinfo_t v;
1416 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + roffset;
1417 unsigned int t;
1419 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1420 if (!v)
1421 continue;
1422 t = find (v->id);
1424 /* Adding edges from the special vars is pointless.
1425 They don't have sets that can change. */
1426 if (get_varinfo (t) ->is_special_var)
1427 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1428 else if (add_graph_edge (graph, lhs, t))
1429 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1431 else if (0 && dump_file && !(get_varinfo (j)->is_special_var))
1432 fprintf (dump_file, "Untypesafe usage in do_sd_constraint\n");
1436 done:
1437 /* If the LHS solution changed, mark the var as changed. */
1438 if (flag)
1440 get_varinfo (lhs)->solution = sol;
1441 if (!TEST_BIT (changed, lhs))
1443 SET_BIT (changed, lhs);
1444 changed_count++;
1449 /* Process a constraint C that represents *x = y. */
1451 static void
1452 do_ds_constraint (constraint_t c, bitmap delta)
1454 unsigned int rhs = c->rhs.var;
1455 bitmap sol = get_varinfo (rhs)->solution;
1456 unsigned int j;
1457 bitmap_iterator bi;
1459 if (bitmap_bit_p (sol, anything_id))
1461 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1463 varinfo_t jvi = get_varinfo (j);
1464 unsigned int t;
1465 unsigned int loff = c->lhs.offset;
1466 unsigned HOST_WIDE_INT fieldoffset = jvi->offset + loff;
1467 varinfo_t v;
1469 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1470 if (!v)
1471 continue;
1472 t = find (v->id);
1474 if (!bitmap_bit_p (get_varinfo (t)->solution, anything_id))
1476 bitmap_set_bit (get_varinfo (t)->solution, anything_id);
1477 if (!TEST_BIT (changed, t))
1479 SET_BIT (changed, t);
1480 changed_count++;
1484 return;
1487 /* For each member j of delta (Sol(x)), add an edge from y to j and
1488 union Sol(y) into Sol(j) */
1489 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1491 unsigned HOST_WIDE_INT loff = c->lhs.offset;
1492 if (type_safe (j, &loff) && !(get_varinfo (j)->is_special_var))
1494 varinfo_t v;
1495 unsigned int t;
1496 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + loff;
1497 bitmap tmp;
1499 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1500 if (!v)
1501 continue;
1502 t = find (v->id);
1503 tmp = get_varinfo (t)->solution;
1505 if (set_union_with_increment (tmp, sol, 0))
1507 get_varinfo (t)->solution = tmp;
1508 if (t == rhs)
1509 sol = get_varinfo (rhs)->solution;
1510 if (!TEST_BIT (changed, t))
1512 SET_BIT (changed, t);
1513 changed_count++;
1517 else if (0 && dump_file && !(get_varinfo (j)->is_special_var))
1518 fprintf (dump_file, "Untypesafe usage in do_ds_constraint\n");
1522 /* Handle a non-simple (simple meaning requires no iteration),
1523 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1525 static void
1526 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
1528 if (c->lhs.type == DEREF)
1530 if (c->rhs.type == ADDRESSOF)
1532 gcc_unreachable();
1534 else
1536 /* *x = y */
1537 do_ds_constraint (c, delta);
1540 else if (c->rhs.type == DEREF)
1542 /* x = *y */
1543 if (!(get_varinfo (c->lhs.var)->is_special_var))
1544 do_sd_constraint (graph, c, delta);
1546 else
1548 bitmap tmp;
1549 bitmap solution;
1550 bool flag = false;
1552 gcc_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR);
1553 solution = get_varinfo (c->rhs.var)->solution;
1554 tmp = get_varinfo (c->lhs.var)->solution;
1556 flag = set_union_with_increment (tmp, solution, c->rhs.offset);
1558 if (flag)
1560 get_varinfo (c->lhs.var)->solution = tmp;
1561 if (!TEST_BIT (changed, c->lhs.var))
1563 SET_BIT (changed, c->lhs.var);
1564 changed_count++;
1570 /* Initialize and return a new SCC info structure. */
1572 static struct scc_info *
1573 init_scc_info (size_t size)
1575 struct scc_info *si = XNEW (struct scc_info);
1576 size_t i;
1578 si->current_index = 0;
1579 si->visited = sbitmap_alloc (size);
1580 sbitmap_zero (si->visited);
1581 si->deleted = sbitmap_alloc (size);
1582 sbitmap_zero (si->deleted);
1583 si->node_mapping = XNEWVEC (unsigned int, size);
1584 si->dfs = XCNEWVEC (unsigned int, size);
1586 for (i = 0; i < size; i++)
1587 si->node_mapping[i] = i;
1589 si->scc_stack = VEC_alloc (unsigned, heap, 1);
1590 return si;
1593 /* Free an SCC info structure pointed to by SI */
1595 static void
1596 free_scc_info (struct scc_info *si)
1598 sbitmap_free (si->visited);
1599 sbitmap_free (si->deleted);
1600 free (si->node_mapping);
1601 free (si->dfs);
1602 VEC_free (unsigned, heap, si->scc_stack);
1603 free (si);
1607 /* Find indirect cycles in GRAPH that occur, using strongly connected
1608 components, and note them in the indirect cycles map.
1610 This technique comes from Ben Hardekopf and Calvin Lin,
1611 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1612 Lines of Code", submitted to PLDI 2007. */
1614 static void
1615 find_indirect_cycles (constraint_graph_t graph)
1617 unsigned int i;
1618 unsigned int size = graph->size;
1619 struct scc_info *si = init_scc_info (size);
1621 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1622 if (!TEST_BIT (si->visited, i) && find (i) == i)
1623 scc_visit (graph, si, i);
1625 free_scc_info (si);
1628 /* Compute a topological ordering for GRAPH, and store the result in the
1629 topo_info structure TI. */
1631 static void
1632 compute_topo_order (constraint_graph_t graph,
1633 struct topo_info *ti)
1635 unsigned int i;
1636 unsigned int size = graph->size;
1638 for (i = 0; i != size; ++i)
1639 if (!TEST_BIT (ti->visited, i) && find (i) == i)
1640 topo_visit (graph, ti, i);
1643 /* Structure used to for hash value numbering of pointer equivalence
1644 classes. */
1646 typedef struct equiv_class_label
1648 unsigned int equivalence_class;
1649 bitmap labels;
1650 hashval_t hashcode;
1651 } *equiv_class_label_t;
1652 typedef const struct equiv_class_label *const_equiv_class_label_t;
1654 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1655 classes. */
1656 static htab_t pointer_equiv_class_table;
1658 /* A hashtable for mapping a bitmap of labels->location equivalence
1659 classes. */
1660 static htab_t location_equiv_class_table;
1662 /* Hash function for a equiv_class_label_t */
1664 static hashval_t
1665 equiv_class_label_hash (const void *p)
1667 const_equiv_class_label_t const ecl = (const_equiv_class_label_t) p;
1668 return ecl->hashcode;
1671 /* Equality function for two equiv_class_label_t's. */
1673 static int
1674 equiv_class_label_eq (const void *p1, const void *p2)
1676 const_equiv_class_label_t const eql1 = (const_equiv_class_label_t) p1;
1677 const_equiv_class_label_t const eql2 = (const_equiv_class_label_t) p2;
1678 return bitmap_equal_p (eql1->labels, eql2->labels);
1681 /* Lookup a equivalence class in TABLE by the bitmap of LABELS it
1682 contains. */
1684 static unsigned int
1685 equiv_class_lookup (htab_t table, bitmap labels)
1687 void **slot;
1688 struct equiv_class_label ecl;
1690 ecl.labels = labels;
1691 ecl.hashcode = bitmap_hash (labels);
1693 slot = htab_find_slot_with_hash (table, &ecl,
1694 ecl.hashcode, NO_INSERT);
1695 if (!slot)
1696 return 0;
1697 else
1698 return ((equiv_class_label_t) *slot)->equivalence_class;
1702 /* Add an equivalence class named EQUIVALENCE_CLASS with labels LABELS
1703 to TABLE. */
1705 static void
1706 equiv_class_add (htab_t table, unsigned int equivalence_class,
1707 bitmap labels)
1709 void **slot;
1710 equiv_class_label_t ecl = XNEW (struct equiv_class_label);
1712 ecl->labels = labels;
1713 ecl->equivalence_class = equivalence_class;
1714 ecl->hashcode = bitmap_hash (labels);
1716 slot = htab_find_slot_with_hash (table, ecl,
1717 ecl->hashcode, INSERT);
1718 gcc_assert (!*slot);
1719 *slot = (void *) ecl;
1722 /* Perform offline variable substitution.
1724 This is a worst case quadratic time way of identifying variables
1725 that must have equivalent points-to sets, including those caused by
1726 static cycles, and single entry subgraphs, in the constraint graph.
1728 The technique is described in "Exploiting Pointer and Location
1729 Equivalence to Optimize Pointer Analysis. In the 14th International
1730 Static Analysis Symposium (SAS), August 2007." It is known as the
1731 "HU" algorithm, and is equivalent to value numbering the collapsed
1732 constraint graph including evaluating unions.
1734 The general method of finding equivalence classes is as follows:
1735 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1736 Initialize all non-REF nodes to be direct nodes.
1737 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1738 variable}
1739 For each constraint containing the dereference, we also do the same
1740 thing.
1742 We then compute SCC's in the graph and unify nodes in the same SCC,
1743 including pts sets.
1745 For each non-collapsed node x:
1746 Visit all unvisited explicit incoming edges.
1747 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
1748 where y->x.
1749 Lookup the equivalence class for pts(x).
1750 If we found one, equivalence_class(x) = found class.
1751 Otherwise, equivalence_class(x) = new class, and new_class is
1752 added to the lookup table.
1754 All direct nodes with the same equivalence class can be replaced
1755 with a single representative node.
1756 All unlabeled nodes (label == 0) are not pointers and all edges
1757 involving them can be eliminated.
1758 We perform these optimizations during rewrite_constraints
1760 In addition to pointer equivalence class finding, we also perform
1761 location equivalence class finding. This is the set of variables
1762 that always appear together in points-to sets. We use this to
1763 compress the size of the points-to sets. */
1765 /* Current maximum pointer equivalence class id. */
1766 static int pointer_equiv_class;
1768 /* Current maximum location equivalence class id. */
1769 static int location_equiv_class;
1771 /* Recursive routine to find strongly connected components in GRAPH,
1772 and label it's nodes with DFS numbers. */
1774 static void
1775 condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1777 unsigned int i;
1778 bitmap_iterator bi;
1779 unsigned int my_dfs;
1781 gcc_assert (si->node_mapping[n] == n);
1782 SET_BIT (si->visited, n);
1783 si->dfs[n] = si->current_index ++;
1784 my_dfs = si->dfs[n];
1786 /* Visit all the successors. */
1787 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1789 unsigned int w = si->node_mapping[i];
1791 if (TEST_BIT (si->deleted, w))
1792 continue;
1794 if (!TEST_BIT (si->visited, w))
1795 condense_visit (graph, si, w);
1797 unsigned int t = si->node_mapping[w];
1798 unsigned int nnode = si->node_mapping[n];
1799 gcc_assert (nnode == n);
1801 if (si->dfs[t] < si->dfs[nnode])
1802 si->dfs[n] = si->dfs[t];
1806 /* Visit all the implicit predecessors. */
1807 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
1809 unsigned int w = si->node_mapping[i];
1811 if (TEST_BIT (si->deleted, w))
1812 continue;
1814 if (!TEST_BIT (si->visited, w))
1815 condense_visit (graph, si, w);
1817 unsigned int t = si->node_mapping[w];
1818 unsigned int nnode = si->node_mapping[n];
1819 gcc_assert (nnode == n);
1821 if (si->dfs[t] < si->dfs[nnode])
1822 si->dfs[n] = si->dfs[t];
1826 /* See if any components have been identified. */
1827 if (si->dfs[n] == my_dfs)
1829 while (VEC_length (unsigned, si->scc_stack) != 0
1830 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1832 unsigned int w = VEC_pop (unsigned, si->scc_stack);
1833 si->node_mapping[w] = n;
1835 if (!TEST_BIT (graph->direct_nodes, w))
1836 RESET_BIT (graph->direct_nodes, n);
1838 /* Unify our nodes. */
1839 if (graph->preds[w])
1841 if (!graph->preds[n])
1842 graph->preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
1843 bitmap_ior_into (graph->preds[n], graph->preds[w]);
1845 if (graph->implicit_preds[w])
1847 if (!graph->implicit_preds[n])
1848 graph->implicit_preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
1849 bitmap_ior_into (graph->implicit_preds[n],
1850 graph->implicit_preds[w]);
1852 if (graph->points_to[w])
1854 if (!graph->points_to[n])
1855 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
1856 bitmap_ior_into (graph->points_to[n],
1857 graph->points_to[w]);
1859 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1861 unsigned int rep = si->node_mapping[i];
1862 graph->number_incoming[rep]++;
1865 SET_BIT (si->deleted, n);
1867 else
1868 VEC_safe_push (unsigned, heap, si->scc_stack, n);
1871 /* Label pointer equivalences. */
1873 static void
1874 label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1876 unsigned int i;
1877 bitmap_iterator bi;
1878 SET_BIT (si->visited, n);
1880 if (!graph->points_to[n])
1881 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
1883 /* Label and union our incoming edges's points to sets. */
1884 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1886 unsigned int w = si->node_mapping[i];
1887 if (!TEST_BIT (si->visited, w))
1888 label_visit (graph, si, w);
1890 /* Skip unused edges */
1891 if (w == n || graph->pointer_label[w] == 0)
1893 graph->number_incoming[w]--;
1894 continue;
1896 if (graph->points_to[w])
1897 bitmap_ior_into(graph->points_to[n], graph->points_to[w]);
1899 /* If all incoming edges to w have been processed and
1900 graph->points_to[w] was not stored in the hash table, we can
1901 free it. */
1902 graph->number_incoming[w]--;
1903 if (!graph->number_incoming[w] && !TEST_BIT (graph->pt_used, w))
1905 BITMAP_FREE (graph->points_to[w]);
1908 /* Indirect nodes get fresh variables. */
1909 if (!TEST_BIT (graph->direct_nodes, n))
1910 bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
1912 if (!bitmap_empty_p (graph->points_to[n]))
1914 unsigned int label = equiv_class_lookup (pointer_equiv_class_table,
1915 graph->points_to[n]);
1916 if (!label)
1918 SET_BIT (graph->pt_used, n);
1919 label = pointer_equiv_class++;
1920 equiv_class_add (pointer_equiv_class_table,
1921 label, graph->points_to[n]);
1923 graph->pointer_label[n] = label;
1927 /* Perform offline variable substitution, discovering equivalence
1928 classes, and eliminating non-pointer variables. */
1930 static struct scc_info *
1931 perform_var_substitution (constraint_graph_t graph)
1933 unsigned int i;
1934 unsigned int size = graph->size;
1935 struct scc_info *si = init_scc_info (size);
1937 bitmap_obstack_initialize (&iteration_obstack);
1938 pointer_equiv_class_table = htab_create (511, equiv_class_label_hash,
1939 equiv_class_label_eq, free);
1940 location_equiv_class_table = htab_create (511, equiv_class_label_hash,
1941 equiv_class_label_eq, free);
1942 pointer_equiv_class = 1;
1943 location_equiv_class = 1;
1945 /* Condense the nodes, which means to find SCC's, count incoming
1946 predecessors, and unite nodes in SCC's. */
1947 for (i = 0; i < FIRST_REF_NODE; i++)
1948 if (!TEST_BIT (si->visited, si->node_mapping[i]))
1949 condense_visit (graph, si, si->node_mapping[i]);
1951 sbitmap_zero (si->visited);
1952 /* Actually the label the nodes for pointer equivalences */
1953 for (i = 0; i < FIRST_REF_NODE; i++)
1954 if (!TEST_BIT (si->visited, si->node_mapping[i]))
1955 label_visit (graph, si, si->node_mapping[i]);
1957 /* Calculate location equivalence labels. */
1958 for (i = 0; i < FIRST_REF_NODE; i++)
1960 bitmap pointed_by;
1961 bitmap_iterator bi;
1962 unsigned int j;
1963 unsigned int label;
1965 if (!graph->pointed_by[i])
1966 continue;
1967 pointed_by = BITMAP_ALLOC (&iteration_obstack);
1969 /* Translate the pointed-by mapping for pointer equivalence
1970 labels. */
1971 EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
1973 bitmap_set_bit (pointed_by,
1974 graph->pointer_label[si->node_mapping[j]]);
1976 /* The original pointed_by is now dead. */
1977 BITMAP_FREE (graph->pointed_by[i]);
1979 /* Look up the location equivalence label if one exists, or make
1980 one otherwise. */
1981 label = equiv_class_lookup (location_equiv_class_table,
1982 pointed_by);
1983 if (label == 0)
1985 label = location_equiv_class++;
1986 equiv_class_add (location_equiv_class_table,
1987 label, pointed_by);
1989 else
1991 if (dump_file && (dump_flags & TDF_DETAILS))
1992 fprintf (dump_file, "Found location equivalence for node %s\n",
1993 get_varinfo (i)->name);
1994 BITMAP_FREE (pointed_by);
1996 graph->loc_label[i] = label;
2000 if (dump_file && (dump_flags & TDF_DETAILS))
2001 for (i = 0; i < FIRST_REF_NODE; i++)
2003 bool direct_node = TEST_BIT (graph->direct_nodes, i);
2004 fprintf (dump_file,
2005 "Equivalence classes for %s node id %d:%s are pointer: %d"
2006 ", location:%d\n",
2007 direct_node ? "Direct node" : "Indirect node", i,
2008 get_varinfo (i)->name,
2009 graph->pointer_label[si->node_mapping[i]],
2010 graph->loc_label[si->node_mapping[i]]);
2013 /* Quickly eliminate our non-pointer variables. */
2015 for (i = 0; i < FIRST_REF_NODE; i++)
2017 unsigned int node = si->node_mapping[i];
2019 if (graph->pointer_label[node] == 0)
2021 if (dump_file && (dump_flags & TDF_DETAILS))
2022 fprintf (dump_file,
2023 "%s is a non-pointer variable, eliminating edges.\n",
2024 get_varinfo (node)->name);
2025 stats.nonpointer_vars++;
2026 clear_edges_for_node (graph, node);
2030 return si;
2033 /* Free information that was only necessary for variable
2034 substitution. */
2036 static void
2037 free_var_substitution_info (struct scc_info *si)
2039 free_scc_info (si);
2040 free (graph->pointer_label);
2041 free (graph->loc_label);
2042 free (graph->pointed_by);
2043 free (graph->points_to);
2044 free (graph->number_incoming);
2045 free (graph->eq_rep);
2046 sbitmap_free (graph->direct_nodes);
2047 sbitmap_free (graph->pt_used);
2048 htab_delete (pointer_equiv_class_table);
2049 htab_delete (location_equiv_class_table);
2050 bitmap_obstack_release (&iteration_obstack);
2053 /* Return an existing node that is equivalent to NODE, which has
2054 equivalence class LABEL, if one exists. Return NODE otherwise. */
2056 static unsigned int
2057 find_equivalent_node (constraint_graph_t graph,
2058 unsigned int node, unsigned int label)
2060 /* If the address version of this variable is unused, we can
2061 substitute it for anything else with the same label.
2062 Otherwise, we know the pointers are equivalent, but not the
2063 locations, and we can unite them later. */
2065 if (!bitmap_bit_p (graph->address_taken, node))
2067 gcc_assert (label < graph->size);
2069 if (graph->eq_rep[label] != -1)
2071 /* Unify the two variables since we know they are equivalent. */
2072 if (unite (graph->eq_rep[label], node))
2073 unify_nodes (graph, graph->eq_rep[label], node, false);
2074 return graph->eq_rep[label];
2076 else
2078 graph->eq_rep[label] = node;
2079 graph->pe_rep[label] = node;
2082 else
2084 gcc_assert (label < graph->size);
2085 graph->pe[node] = label;
2086 if (graph->pe_rep[label] == -1)
2087 graph->pe_rep[label] = node;
2090 return node;
2093 /* Unite pointer equivalent but not location equivalent nodes in
2094 GRAPH. This may only be performed once variable substitution is
2095 finished. */
2097 static void
2098 unite_pointer_equivalences (constraint_graph_t graph)
2100 unsigned int i;
2102 /* Go through the pointer equivalences and unite them to their
2103 representative, if they aren't already. */
2104 for (i = 0; i < FIRST_REF_NODE; i++)
2106 unsigned int label = graph->pe[i];
2107 if (label)
2109 int label_rep = graph->pe_rep[label];
2111 if (label_rep == -1)
2112 continue;
2114 label_rep = find (label_rep);
2115 if (label_rep >= 0 && unite (label_rep, find (i)))
2116 unify_nodes (graph, label_rep, i, false);
2121 /* Move complex constraints to the GRAPH nodes they belong to. */
2123 static void
2124 move_complex_constraints (constraint_graph_t graph)
2126 int i;
2127 constraint_t c;
2129 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
2131 if (c)
2133 struct constraint_expr lhs = c->lhs;
2134 struct constraint_expr rhs = c->rhs;
2136 if (lhs.type == DEREF)
2138 insert_into_complex (graph, lhs.var, c);
2140 else if (rhs.type == DEREF)
2142 if (!(get_varinfo (lhs.var)->is_special_var))
2143 insert_into_complex (graph, rhs.var, c);
2145 else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2146 && (lhs.offset != 0 || rhs.offset != 0))
2148 insert_into_complex (graph, rhs.var, c);
2155 /* Optimize and rewrite complex constraints while performing
2156 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2157 result of perform_variable_substitution. */
2159 static void
2160 rewrite_constraints (constraint_graph_t graph,
2161 struct scc_info *si)
2163 int i;
2164 unsigned int j;
2165 constraint_t c;
2167 for (j = 0; j < graph->size; j++)
2168 gcc_assert (find (j) == j);
2170 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
2172 struct constraint_expr lhs = c->lhs;
2173 struct constraint_expr rhs = c->rhs;
2174 unsigned int lhsvar = find (get_varinfo_fc (lhs.var)->id);
2175 unsigned int rhsvar = find (get_varinfo_fc (rhs.var)->id);
2176 unsigned int lhsnode, rhsnode;
2177 unsigned int lhslabel, rhslabel;
2179 lhsnode = si->node_mapping[lhsvar];
2180 rhsnode = si->node_mapping[rhsvar];
2181 lhslabel = graph->pointer_label[lhsnode];
2182 rhslabel = graph->pointer_label[rhsnode];
2184 /* See if it is really a non-pointer variable, and if so, ignore
2185 the constraint. */
2186 if (lhslabel == 0)
2188 if (dump_file && (dump_flags & TDF_DETAILS))
2191 fprintf (dump_file, "%s is a non-pointer variable,"
2192 "ignoring constraint:",
2193 get_varinfo (lhs.var)->name);
2194 dump_constraint (dump_file, c);
2196 VEC_replace (constraint_t, constraints, i, NULL);
2197 continue;
2200 if (rhslabel == 0)
2202 if (dump_file && (dump_flags & TDF_DETAILS))
2205 fprintf (dump_file, "%s is a non-pointer variable,"
2206 "ignoring constraint:",
2207 get_varinfo (rhs.var)->name);
2208 dump_constraint (dump_file, c);
2210 VEC_replace (constraint_t, constraints, i, NULL);
2211 continue;
2214 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2215 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2216 c->lhs.var = lhsvar;
2217 c->rhs.var = rhsvar;
2222 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2223 part of an SCC, false otherwise. */
2225 static bool
2226 eliminate_indirect_cycles (unsigned int node)
2228 if (graph->indirect_cycles[node] != -1
2229 && !bitmap_empty_p (get_varinfo (node)->solution))
2231 unsigned int i;
2232 VEC(unsigned,heap) *queue = NULL;
2233 int queuepos;
2234 unsigned int to = find (graph->indirect_cycles[node]);
2235 bitmap_iterator bi;
2237 /* We can't touch the solution set and call unify_nodes
2238 at the same time, because unify_nodes is going to do
2239 bitmap unions into it. */
2241 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2243 if (find (i) == i && i != to)
2245 if (unite (to, i))
2246 VEC_safe_push (unsigned, heap, queue, i);
2250 for (queuepos = 0;
2251 VEC_iterate (unsigned, queue, queuepos, i);
2252 queuepos++)
2254 unify_nodes (graph, to, i, true);
2256 VEC_free (unsigned, heap, queue);
2257 return true;
2259 return false;
2262 /* Solve the constraint graph GRAPH using our worklist solver.
2263 This is based on the PW* family of solvers from the "Efficient Field
2264 Sensitive Pointer Analysis for C" paper.
2265 It works by iterating over all the graph nodes, processing the complex
2266 constraints and propagating the copy constraints, until everything stops
2267 changed. This corresponds to steps 6-8 in the solving list given above. */
2269 static void
2270 solve_graph (constraint_graph_t graph)
2272 unsigned int size = graph->size;
2273 unsigned int i;
2274 bitmap pts;
2276 changed_count = 0;
2277 changed = sbitmap_alloc (size);
2278 sbitmap_zero (changed);
2280 /* Mark all initial non-collapsed nodes as changed. */
2281 for (i = 0; i < size; i++)
2283 varinfo_t ivi = get_varinfo (i);
2284 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2285 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2286 || VEC_length (constraint_t, graph->complex[i]) > 0))
2288 SET_BIT (changed, i);
2289 changed_count++;
2293 /* Allocate a bitmap to be used to store the changed bits. */
2294 pts = BITMAP_ALLOC (&pta_obstack);
2296 while (changed_count > 0)
2298 unsigned int i;
2299 struct topo_info *ti = init_topo_info ();
2300 stats.iterations++;
2302 bitmap_obstack_initialize (&iteration_obstack);
2304 compute_topo_order (graph, ti);
2306 while (VEC_length (unsigned, ti->topo_order) != 0)
2309 i = VEC_pop (unsigned, ti->topo_order);
2311 /* If this variable is not a representative, skip it. */
2312 if (find (i) != i)
2313 continue;
2315 /* In certain indirect cycle cases, we may merge this
2316 variable to another. */
2317 if (eliminate_indirect_cycles (i) && find (i) != i)
2318 continue;
2320 /* If the node has changed, we need to process the
2321 complex constraints and outgoing edges again. */
2322 if (TEST_BIT (changed, i))
2324 unsigned int j;
2325 constraint_t c;
2326 bitmap solution;
2327 VEC(constraint_t,heap) *complex = graph->complex[i];
2328 bool solution_empty;
2330 RESET_BIT (changed, i);
2331 changed_count--;
2333 /* Compute the changed set of solution bits. */
2334 bitmap_and_compl (pts, get_varinfo (i)->solution,
2335 get_varinfo (i)->oldsolution);
2337 if (bitmap_empty_p (pts))
2338 continue;
2340 bitmap_ior_into (get_varinfo (i)->oldsolution, pts);
2342 solution = get_varinfo (i)->solution;
2343 solution_empty = bitmap_empty_p (solution);
2345 /* Process the complex constraints */
2346 for (j = 0; VEC_iterate (constraint_t, complex, j, c); j++)
2348 /* XXX: This is going to unsort the constraints in
2349 some cases, which will occasionally add duplicate
2350 constraints during unification. This does not
2351 affect correctness. */
2352 c->lhs.var = find (c->lhs.var);
2353 c->rhs.var = find (c->rhs.var);
2355 /* The only complex constraint that can change our
2356 solution to non-empty, given an empty solution,
2357 is a constraint where the lhs side is receiving
2358 some set from elsewhere. */
2359 if (!solution_empty || c->lhs.type != DEREF)
2360 do_complex_constraint (graph, c, pts);
2363 solution_empty = bitmap_empty_p (solution);
2365 if (!solution_empty)
2367 bitmap_iterator bi;
2369 /* Propagate solution to all successors. */
2370 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2371 0, j, bi)
2373 bitmap tmp;
2374 bool flag;
2376 unsigned int to = find (j);
2377 tmp = get_varinfo (to)->solution;
2378 flag = false;
2380 /* Don't try to propagate to ourselves. */
2381 if (to == i)
2382 continue;
2384 flag = set_union_with_increment (tmp, pts, 0);
2386 if (flag)
2388 get_varinfo (to)->solution = tmp;
2389 if (!TEST_BIT (changed, to))
2391 SET_BIT (changed, to);
2392 changed_count++;
2399 free_topo_info (ti);
2400 bitmap_obstack_release (&iteration_obstack);
2403 BITMAP_FREE (pts);
2404 sbitmap_free (changed);
2405 bitmap_obstack_release (&oldpta_obstack);
2408 /* Map from trees to variable infos. */
2409 static struct pointer_map_t *vi_for_tree;
2412 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2414 static void
2415 insert_vi_for_tree (tree t, varinfo_t vi)
2417 void **slot = pointer_map_insert (vi_for_tree, t);
2418 gcc_assert (vi);
2419 gcc_assert (*slot == NULL);
2420 *slot = vi;
2423 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2424 exist in the map, return NULL, otherwise, return the varinfo we found. */
2426 static varinfo_t
2427 lookup_vi_for_tree (tree t)
2429 void **slot = pointer_map_contains (vi_for_tree, t);
2430 if (slot == NULL)
2431 return NULL;
2433 return (varinfo_t) *slot;
2436 /* Return a printable name for DECL */
2438 static const char *
2439 alias_get_name (tree decl)
2441 const char *res = get_name (decl);
2442 char *temp;
2443 int num_printed = 0;
2445 if (res != NULL)
2446 return res;
2448 res = "NULL";
2449 if (!dump_file)
2450 return res;
2452 if (TREE_CODE (decl) == SSA_NAME)
2454 num_printed = asprintf (&temp, "%s_%u",
2455 alias_get_name (SSA_NAME_VAR (decl)),
2456 SSA_NAME_VERSION (decl));
2458 else if (DECL_P (decl))
2460 num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
2462 if (num_printed > 0)
2464 res = ggc_strdup (temp);
2465 free (temp);
2467 return res;
2470 /* Find the variable id for tree T in the map.
2471 If T doesn't exist in the map, create an entry for it and return it. */
2473 static varinfo_t
2474 get_vi_for_tree (tree t)
2476 void **slot = pointer_map_contains (vi_for_tree, t);
2477 if (slot == NULL)
2478 return get_varinfo (create_variable_info_for (t, alias_get_name (t)));
2480 return (varinfo_t) *slot;
2483 /* Get a constraint expression from an SSA_VAR_P node. */
2485 static struct constraint_expr
2486 get_constraint_exp_from_ssa_var (tree t)
2488 struct constraint_expr cexpr;
2490 gcc_assert (SSA_VAR_P (t) || DECL_P (t));
2492 /* For parameters, get at the points-to set for the actual parm
2493 decl. */
2494 if (TREE_CODE (t) == SSA_NAME
2495 && TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2496 && SSA_NAME_IS_DEFAULT_DEF (t))
2497 return get_constraint_exp_from_ssa_var (SSA_NAME_VAR (t));
2499 cexpr.type = SCALAR;
2501 cexpr.var = get_vi_for_tree (t)->id;
2502 /* If we determine the result is "anything", and we know this is readonly,
2503 say it points to readonly memory instead. */
2504 if (cexpr.var == anything_id && TREE_READONLY (t))
2506 cexpr.type = ADDRESSOF;
2507 cexpr.var = readonly_id;
2510 cexpr.offset = 0;
2511 return cexpr;
2514 /* Process a completed constraint T, and add it to the constraint
2515 list. FROM_CALL is true if this is a constraint coming from a
2516 call, which means any DEREFs we see are "may-deref's", not
2517 "must-deref"'s. */
2519 static void
2520 process_constraint_1 (constraint_t t, bool from_call)
2522 struct constraint_expr rhs = t->rhs;
2523 struct constraint_expr lhs = t->lhs;
2525 gcc_assert (rhs.var < VEC_length (varinfo_t, varmap));
2526 gcc_assert (lhs.var < VEC_length (varinfo_t, varmap));
2528 if (!from_call)
2530 if (lhs.type == DEREF)
2531 get_varinfo (lhs.var)->directly_dereferenced = true;
2532 if (rhs.type == DEREF)
2533 get_varinfo (rhs.var)->directly_dereferenced = true;
2536 if (!use_field_sensitive)
2538 t->rhs.offset = 0;
2539 t->lhs.offset = 0;
2542 /* ANYTHING == ANYTHING is pointless. */
2543 if (lhs.var == anything_id && rhs.var == anything_id)
2544 return;
2546 /* If we have &ANYTHING = something, convert to SOMETHING = &ANYTHING) */
2547 else if (lhs.var == anything_id && lhs.type == ADDRESSOF)
2549 rhs = t->lhs;
2550 t->lhs = t->rhs;
2551 t->rhs = rhs;
2552 process_constraint_1 (t, from_call);
2554 /* This can happen in our IR with things like n->a = *p */
2555 else if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
2557 /* Split into tmp = *rhs, *lhs = tmp */
2558 tree rhsdecl = get_varinfo (rhs.var)->decl;
2559 tree pointertype = TREE_TYPE (rhsdecl);
2560 tree pointedtotype = TREE_TYPE (pointertype);
2561 tree tmpvar = create_tmp_var_raw (pointedtotype, "doubledereftmp");
2562 struct constraint_expr tmplhs = get_constraint_exp_from_ssa_var (tmpvar);
2564 /* If this is an aggregate of known size, we should have passed
2565 this off to do_structure_copy, and it should have broken it
2566 up. */
2567 gcc_assert (!AGGREGATE_TYPE_P (pointedtotype)
2568 || get_varinfo (rhs.var)->is_unknown_size_var);
2570 process_constraint_1 (new_constraint (tmplhs, rhs), from_call);
2571 process_constraint_1 (new_constraint (lhs, tmplhs), from_call);
2573 else if (rhs.type == ADDRESSOF && lhs.type == DEREF)
2575 /* Split into tmp = &rhs, *lhs = tmp */
2576 tree rhsdecl = get_varinfo (rhs.var)->decl;
2577 tree pointertype = TREE_TYPE (rhsdecl);
2578 tree tmpvar = create_tmp_var_raw (pointertype, "derefaddrtmp");
2579 struct constraint_expr tmplhs = get_constraint_exp_from_ssa_var (tmpvar);
2581 process_constraint_1 (new_constraint (tmplhs, rhs), from_call);
2582 process_constraint_1 (new_constraint (lhs, tmplhs), from_call);
2584 else
2586 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
2587 VEC_safe_push (constraint_t, heap, constraints, t);
2592 /* Process constraint T, performing various simplifications and then
2593 adding it to our list of overall constraints. */
2595 static void
2596 process_constraint (constraint_t t)
2598 process_constraint_1 (t, false);
2601 /* Return true if T is a variable of a type that could contain
2602 pointers. */
2604 static bool
2605 could_have_pointers (tree t)
2607 tree type = TREE_TYPE (t);
2609 if (POINTER_TYPE_P (type)
2610 || AGGREGATE_TYPE_P (type)
2611 || TREE_CODE (type) == COMPLEX_TYPE)
2612 return true;
2614 return false;
2617 /* Return the position, in bits, of FIELD_DECL from the beginning of its
2618 structure. */
2620 static unsigned HOST_WIDE_INT
2621 bitpos_of_field (const tree fdecl)
2624 if (TREE_CODE (DECL_FIELD_OFFSET (fdecl)) != INTEGER_CST
2625 || TREE_CODE (DECL_FIELD_BIT_OFFSET (fdecl)) != INTEGER_CST)
2626 return -1;
2628 return (tree_low_cst (DECL_FIELD_OFFSET (fdecl), 1) * 8)
2629 + tree_low_cst (DECL_FIELD_BIT_OFFSET (fdecl), 1);
2633 /* Return true if an access to [ACCESSPOS, ACCESSSIZE]
2634 overlaps with a field at [FIELDPOS, FIELDSIZE] */
2636 static bool
2637 offset_overlaps_with_access (const unsigned HOST_WIDE_INT fieldpos,
2638 const unsigned HOST_WIDE_INT fieldsize,
2639 const unsigned HOST_WIDE_INT accesspos,
2640 const unsigned HOST_WIDE_INT accesssize)
2642 if (fieldpos == accesspos && fieldsize == accesssize)
2643 return true;
2644 if (accesspos >= fieldpos && accesspos < (fieldpos + fieldsize))
2645 return true;
2646 if (accesspos < fieldpos && (accesspos + accesssize > fieldpos))
2647 return true;
2649 return false;
2652 /* Given a COMPONENT_REF T, return the constraint_expr for it. */
2654 static void
2655 get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results)
2657 tree orig_t = t;
2658 HOST_WIDE_INT bitsize = -1;
2659 HOST_WIDE_INT bitmaxsize = -1;
2660 HOST_WIDE_INT bitpos;
2661 tree forzero;
2662 struct constraint_expr *result;
2663 unsigned int beforelength = VEC_length (ce_s, *results);
2665 /* Some people like to do cute things like take the address of
2666 &0->a.b */
2667 forzero = t;
2668 while (!SSA_VAR_P (forzero) && !CONSTANT_CLASS_P (forzero))
2669 forzero = TREE_OPERAND (forzero, 0);
2671 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
2673 struct constraint_expr temp;
2675 temp.offset = 0;
2676 temp.var = integer_id;
2677 temp.type = SCALAR;
2678 VEC_safe_push (ce_s, heap, *results, &temp);
2679 return;
2682 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
2684 /* String constants are readonly, so there is nothing to really do
2685 here. */
2686 if (TREE_CODE (t) == STRING_CST)
2687 return;
2689 get_constraint_for (t, results);
2690 result = VEC_last (ce_s, *results);
2691 result->offset = bitpos;
2693 gcc_assert (beforelength + 1 == VEC_length (ce_s, *results));
2695 /* This can also happen due to weird offsetof type macros. */
2696 if (TREE_CODE (t) != ADDR_EXPR && result->type == ADDRESSOF)
2697 result->type = SCALAR;
2699 if (result->type == SCALAR)
2701 /* In languages like C, you can access one past the end of an
2702 array. You aren't allowed to dereference it, so we can
2703 ignore this constraint. When we handle pointer subtraction,
2704 we may have to do something cute here. */
2706 if (result->offset < get_varinfo (result->var)->fullsize
2707 && bitmaxsize != 0)
2709 /* It's also not true that the constraint will actually start at the
2710 right offset, it may start in some padding. We only care about
2711 setting the constraint to the first actual field it touches, so
2712 walk to find it. */
2713 varinfo_t curr;
2714 for (curr = get_varinfo (result->var); curr; curr = curr->next)
2716 if (offset_overlaps_with_access (curr->offset, curr->size,
2717 result->offset, bitmaxsize))
2719 result->var = curr->id;
2720 break;
2723 /* assert that we found *some* field there. The user couldn't be
2724 accessing *only* padding. */
2725 /* Still the user could access one past the end of an array
2726 embedded in a struct resulting in accessing *only* padding. */
2727 gcc_assert (curr || ref_contains_array_ref (orig_t));
2729 else if (bitmaxsize == 0)
2731 if (dump_file && (dump_flags & TDF_DETAILS))
2732 fprintf (dump_file, "Access to zero-sized part of variable,"
2733 "ignoring\n");
2735 else
2736 if (dump_file && (dump_flags & TDF_DETAILS))
2737 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
2739 result->offset = 0;
2741 else if (bitmaxsize == -1)
2743 /* We can't handle DEREF constraints with unknown size, we'll
2744 get the wrong answer. Punt and return anything. */
2745 result->var = anything_id;
2746 result->offset = 0;
2751 /* Dereference the constraint expression CONS, and return the result.
2752 DEREF (ADDRESSOF) = SCALAR
2753 DEREF (SCALAR) = DEREF
2754 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
2755 This is needed so that we can handle dereferencing DEREF constraints. */
2757 static void
2758 do_deref (VEC (ce_s, heap) **constraints)
2760 struct constraint_expr *c;
2761 unsigned int i = 0;
2763 for (i = 0; VEC_iterate (ce_s, *constraints, i, c); i++)
2765 if (c->type == SCALAR)
2766 c->type = DEREF;
2767 else if (c->type == ADDRESSOF)
2768 c->type = SCALAR;
2769 else if (c->type == DEREF)
2771 tree tmpvar = create_tmp_var_raw (ptr_type_node, "dereftmp");
2772 struct constraint_expr tmplhs = get_constraint_exp_from_ssa_var (tmpvar);
2773 process_constraint (new_constraint (tmplhs, *c));
2774 c->var = tmplhs.var;
2776 else
2777 gcc_unreachable ();
2781 /* Given a tree T, return the constraint expression for it. */
2783 static void
2784 get_constraint_for (tree t, VEC (ce_s, heap) **results)
2786 struct constraint_expr temp;
2788 /* x = integer is all glommed to a single variable, which doesn't
2789 point to anything by itself. That is, of course, unless it is an
2790 integer constant being treated as a pointer, in which case, we
2791 will return that this is really the addressof anything. This
2792 happens below, since it will fall into the default case. The only
2793 case we know something about an integer treated like a pointer is
2794 when it is the NULL pointer, and then we just say it points to
2795 NULL. */
2796 if (TREE_CODE (t) == INTEGER_CST
2797 && integer_zerop (t))
2799 temp.var = nothing_id;
2800 temp.type = ADDRESSOF;
2801 temp.offset = 0;
2802 VEC_safe_push (ce_s, heap, *results, &temp);
2803 return;
2806 switch (TREE_CODE_CLASS (TREE_CODE (t)))
2808 case tcc_expression:
2809 case tcc_vl_exp:
2811 switch (TREE_CODE (t))
2813 case ADDR_EXPR:
2815 struct constraint_expr *c;
2816 unsigned int i;
2817 tree exp = TREE_OPERAND (t, 0);
2818 tree pttype = TREE_TYPE (TREE_TYPE (t));
2820 get_constraint_for (exp, results);
2823 /* Complex types are special. Taking the address of one
2824 allows you to access either part of it through that
2825 pointer. */
2826 if (VEC_length (ce_s, *results) == 1 &&
2827 TREE_CODE (pttype) == COMPLEX_TYPE)
2829 struct constraint_expr *origrhs;
2830 varinfo_t origvar;
2831 struct constraint_expr tmp;
2833 gcc_assert (VEC_length (ce_s, *results) == 1);
2834 origrhs = VEC_last (ce_s, *results);
2835 tmp = *origrhs;
2836 VEC_pop (ce_s, *results);
2837 origvar = get_varinfo (origrhs->var);
2838 for (; origvar; origvar = origvar->next)
2840 tmp.var = origvar->id;
2841 VEC_safe_push (ce_s, heap, *results, &tmp);
2845 for (i = 0; VEC_iterate (ce_s, *results, i, c); i++)
2847 if (c->type == DEREF)
2848 c->type = SCALAR;
2849 else
2850 c->type = ADDRESSOF;
2852 return;
2854 break;
2855 case CALL_EXPR:
2856 /* XXX: In interprocedural mode, if we didn't have the
2857 body, we would need to do *each pointer argument =
2858 &ANYTHING added. */
2859 if (call_expr_flags (t) & (ECF_MALLOC | ECF_MAY_BE_ALLOCA))
2861 varinfo_t vi;
2862 tree heapvar = heapvar_lookup (t);
2864 if (heapvar == NULL)
2866 heapvar = create_tmp_var_raw (ptr_type_node, "HEAP");
2867 DECL_EXTERNAL (heapvar) = 1;
2868 get_var_ann (heapvar)->is_heapvar = 1;
2869 if (gimple_referenced_vars (cfun))
2870 add_referenced_var (heapvar);
2871 heapvar_insert (t, heapvar);
2874 temp.var = create_variable_info_for (heapvar,
2875 alias_get_name (heapvar));
2877 vi = get_varinfo (temp.var);
2878 vi->is_artificial_var = 1;
2879 vi->is_heap_var = 1;
2880 temp.type = ADDRESSOF;
2881 temp.offset = 0;
2882 VEC_safe_push (ce_s, heap, *results, &temp);
2883 return;
2885 else
2887 temp.var = anything_id;
2888 temp.type = SCALAR;
2889 temp.offset = 0;
2890 VEC_safe_push (ce_s, heap, *results, &temp);
2891 return;
2893 break;
2894 default:
2896 temp.type = ADDRESSOF;
2897 temp.var = anything_id;
2898 temp.offset = 0;
2899 VEC_safe_push (ce_s, heap, *results, &temp);
2900 return;
2904 case tcc_reference:
2906 switch (TREE_CODE (t))
2908 case INDIRECT_REF:
2910 get_constraint_for (TREE_OPERAND (t, 0), results);
2911 do_deref (results);
2912 return;
2914 case ARRAY_REF:
2915 case ARRAY_RANGE_REF:
2916 case COMPONENT_REF:
2917 get_constraint_for_component_ref (t, results);
2918 return;
2919 default:
2921 temp.type = ADDRESSOF;
2922 temp.var = anything_id;
2923 temp.offset = 0;
2924 VEC_safe_push (ce_s, heap, *results, &temp);
2925 return;
2929 case tcc_unary:
2931 switch (TREE_CODE (t))
2933 case NOP_EXPR:
2934 case CONVERT_EXPR:
2935 case NON_LVALUE_EXPR:
2937 tree op = TREE_OPERAND (t, 0);
2939 /* Cast from non-pointer to pointers are bad news for us.
2940 Anything else, we see through */
2941 if (!(POINTER_TYPE_P (TREE_TYPE (t))
2942 && ! POINTER_TYPE_P (TREE_TYPE (op))))
2944 get_constraint_for (op, results);
2945 return;
2948 /* FALLTHRU */
2950 default:
2952 temp.type = ADDRESSOF;
2953 temp.var = anything_id;
2954 temp.offset = 0;
2955 VEC_safe_push (ce_s, heap, *results, &temp);
2956 return;
2960 case tcc_exceptional:
2962 switch (TREE_CODE (t))
2964 case PHI_NODE:
2966 get_constraint_for (PHI_RESULT (t), results);
2967 return;
2969 break;
2970 case SSA_NAME:
2972 struct constraint_expr temp;
2973 temp = get_constraint_exp_from_ssa_var (t);
2974 VEC_safe_push (ce_s, heap, *results, &temp);
2975 return;
2977 break;
2978 default:
2980 temp.type = ADDRESSOF;
2981 temp.var = anything_id;
2982 temp.offset = 0;
2983 VEC_safe_push (ce_s, heap, *results, &temp);
2984 return;
2988 case tcc_declaration:
2990 struct constraint_expr temp;
2991 temp = get_constraint_exp_from_ssa_var (t);
2992 VEC_safe_push (ce_s, heap, *results, &temp);
2993 return;
2995 default:
2997 temp.type = ADDRESSOF;
2998 temp.var = anything_id;
2999 temp.offset = 0;
3000 VEC_safe_push (ce_s, heap, *results, &temp);
3001 return;
3007 /* Handle the structure copy case where we have a simple structure copy
3008 between LHS and RHS that is of SIZE (in bits)
3010 For each field of the lhs variable (lhsfield)
3011 For each field of the rhs variable at lhsfield.offset (rhsfield)
3012 add the constraint lhsfield = rhsfield
3014 If we fail due to some kind of type unsafety or other thing we
3015 can't handle, return false. We expect the caller to collapse the
3016 variable in that case. */
3018 static bool
3019 do_simple_structure_copy (const struct constraint_expr lhs,
3020 const struct constraint_expr rhs,
3021 const unsigned HOST_WIDE_INT size)
3023 varinfo_t p = get_varinfo (lhs.var);
3024 unsigned HOST_WIDE_INT pstart, last;
3025 pstart = p->offset;
3026 last = p->offset + size;
3027 for (; p && p->offset < last; p = p->next)
3029 varinfo_t q;
3030 struct constraint_expr templhs = lhs;
3031 struct constraint_expr temprhs = rhs;
3032 unsigned HOST_WIDE_INT fieldoffset;
3034 templhs.var = p->id;
3035 q = get_varinfo (temprhs.var);
3036 fieldoffset = p->offset - pstart;
3037 q = first_vi_for_offset (q, q->offset + fieldoffset);
3038 if (!q)
3039 return false;
3040 temprhs.var = q->id;
3041 process_constraint (new_constraint (templhs, temprhs));
3043 return true;
3047 /* Handle the structure copy case where we have a structure copy between a
3048 aggregate on the LHS and a dereference of a pointer on the RHS
3049 that is of SIZE (in bits)
3051 For each field of the lhs variable (lhsfield)
3052 rhs.offset = lhsfield->offset
3053 add the constraint lhsfield = rhs
3056 static void
3057 do_rhs_deref_structure_copy (const struct constraint_expr lhs,
3058 const struct constraint_expr rhs,
3059 const unsigned HOST_WIDE_INT size)
3061 varinfo_t p = get_varinfo (lhs.var);
3062 unsigned HOST_WIDE_INT pstart,last;
3063 pstart = p->offset;
3064 last = p->offset + size;
3066 for (; p && p->offset < last; p = p->next)
3068 varinfo_t q;
3069 struct constraint_expr templhs = lhs;
3070 struct constraint_expr temprhs = rhs;
3071 unsigned HOST_WIDE_INT fieldoffset;
3074 if (templhs.type == SCALAR)
3075 templhs.var = p->id;
3076 else
3077 templhs.offset = p->offset;
3079 q = get_varinfo (temprhs.var);
3080 fieldoffset = p->offset - pstart;
3081 temprhs.offset += fieldoffset;
3082 process_constraint (new_constraint (templhs, temprhs));
3086 /* Handle the structure copy case where we have a structure copy
3087 between an aggregate on the RHS and a dereference of a pointer on
3088 the LHS that is of SIZE (in bits)
3090 For each field of the rhs variable (rhsfield)
3091 lhs.offset = rhsfield->offset
3092 add the constraint lhs = rhsfield
3095 static void
3096 do_lhs_deref_structure_copy (const struct constraint_expr lhs,
3097 const struct constraint_expr rhs,
3098 const unsigned HOST_WIDE_INT size)
3100 varinfo_t p = get_varinfo (rhs.var);
3101 unsigned HOST_WIDE_INT pstart,last;
3102 pstart = p->offset;
3103 last = p->offset + size;
3105 for (; p && p->offset < last; p = p->next)
3107 varinfo_t q;
3108 struct constraint_expr templhs = lhs;
3109 struct constraint_expr temprhs = rhs;
3110 unsigned HOST_WIDE_INT fieldoffset;
3113 if (temprhs.type == SCALAR)
3114 temprhs.var = p->id;
3115 else
3116 temprhs.offset = p->offset;
3118 q = get_varinfo (templhs.var);
3119 fieldoffset = p->offset - pstart;
3120 templhs.offset += fieldoffset;
3121 process_constraint (new_constraint (templhs, temprhs));
3125 /* Sometimes, frontends like to give us bad type information. This
3126 function will collapse all the fields from VAR to the end of VAR,
3127 into VAR, so that we treat those fields as a single variable.
3128 We return the variable they were collapsed into. */
3130 static unsigned int
3131 collapse_rest_of_var (unsigned int var)
3133 varinfo_t currvar = get_varinfo (var);
3134 varinfo_t field;
3136 for (field = currvar->next; field; field = field->next)
3138 if (dump_file)
3139 fprintf (dump_file, "Type safety: Collapsing var %s into %s\n",
3140 field->name, currvar->name);
3142 gcc_assert (!field->collapsed_to);
3143 field->collapsed_to = currvar;
3146 currvar->next = NULL;
3147 currvar->size = currvar->fullsize - currvar->offset;
3149 return currvar->id;
3152 /* Handle aggregate copies by expanding into copies of the respective
3153 fields of the structures. */
3155 static void
3156 do_structure_copy (tree lhsop, tree rhsop)
3158 struct constraint_expr lhs, rhs, tmp;
3159 VEC (ce_s, heap) *lhsc = NULL, *rhsc = NULL;
3160 varinfo_t p;
3161 unsigned HOST_WIDE_INT lhssize;
3162 unsigned HOST_WIDE_INT rhssize;
3164 get_constraint_for (lhsop, &lhsc);
3165 get_constraint_for (rhsop, &rhsc);
3166 gcc_assert (VEC_length (ce_s, lhsc) == 1);
3167 gcc_assert (VEC_length (ce_s, rhsc) == 1);
3168 lhs = *(VEC_last (ce_s, lhsc));
3169 rhs = *(VEC_last (ce_s, rhsc));
3171 VEC_free (ce_s, heap, lhsc);
3172 VEC_free (ce_s, heap, rhsc);
3174 /* If we have special var = x, swap it around. */
3175 if (lhs.var <= integer_id && !(get_varinfo (rhs.var)->is_special_var))
3177 tmp = lhs;
3178 lhs = rhs;
3179 rhs = tmp;
3182 /* This is fairly conservative for the RHS == ADDRESSOF case, in that it's
3183 possible it's something we could handle. However, most cases falling
3184 into this are dealing with transparent unions, which are slightly
3185 weird. */
3186 if (rhs.type == ADDRESSOF && !(get_varinfo (rhs.var)->is_special_var))
3188 rhs.type = ADDRESSOF;
3189 rhs.var = anything_id;
3192 /* If the RHS is a special var, or an addressof, set all the LHS fields to
3193 that special var. */
3194 if (rhs.var <= integer_id)
3196 for (p = get_varinfo (lhs.var); p; p = p->next)
3198 struct constraint_expr templhs = lhs;
3199 struct constraint_expr temprhs = rhs;
3201 if (templhs.type == SCALAR )
3202 templhs.var = p->id;
3203 else
3204 templhs.offset += p->offset;
3205 process_constraint (new_constraint (templhs, temprhs));
3208 else
3210 tree rhstype = TREE_TYPE (rhsop);
3211 tree lhstype = TREE_TYPE (lhsop);
3212 tree rhstypesize;
3213 tree lhstypesize;
3215 lhstypesize = DECL_P (lhsop) ? DECL_SIZE (lhsop) : TYPE_SIZE (lhstype);
3216 rhstypesize = DECL_P (rhsop) ? DECL_SIZE (rhsop) : TYPE_SIZE (rhstype);
3218 /* If we have a variably sized types on the rhs or lhs, and a deref
3219 constraint, add the constraint, lhsconstraint = &ANYTHING.
3220 This is conservatively correct because either the lhs is an unknown
3221 sized var (if the constraint is SCALAR), or the lhs is a DEREF
3222 constraint, and every variable it can point to must be unknown sized
3223 anyway, so we don't need to worry about fields at all. */
3224 if ((rhs.type == DEREF && TREE_CODE (rhstypesize) != INTEGER_CST)
3225 || (lhs.type == DEREF && TREE_CODE (lhstypesize) != INTEGER_CST))
3227 rhs.var = anything_id;
3228 rhs.type = ADDRESSOF;
3229 rhs.offset = 0;
3230 process_constraint (new_constraint (lhs, rhs));
3231 return;
3234 /* The size only really matters insofar as we don't set more or less of
3235 the variable. If we hit an unknown size var, the size should be the
3236 whole darn thing. */
3237 if (get_varinfo (rhs.var)->is_unknown_size_var)
3238 rhssize = ~0;
3239 else
3240 rhssize = TREE_INT_CST_LOW (rhstypesize);
3242 if (get_varinfo (lhs.var)->is_unknown_size_var)
3243 lhssize = ~0;
3244 else
3245 lhssize = TREE_INT_CST_LOW (lhstypesize);
3248 if (rhs.type == SCALAR && lhs.type == SCALAR)
3250 if (!do_simple_structure_copy (lhs, rhs, MIN (lhssize, rhssize)))
3252 lhs.var = collapse_rest_of_var (lhs.var);
3253 rhs.var = collapse_rest_of_var (rhs.var);
3254 lhs.offset = 0;
3255 rhs.offset = 0;
3256 lhs.type = SCALAR;
3257 rhs.type = SCALAR;
3258 process_constraint (new_constraint (lhs, rhs));
3261 else if (lhs.type != DEREF && rhs.type == DEREF)
3262 do_rhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
3263 else if (lhs.type == DEREF && rhs.type != DEREF)
3264 do_lhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
3265 else
3267 tree pointedtotype = lhstype;
3268 tree tmpvar;
3270 gcc_assert (rhs.type == DEREF && lhs.type == DEREF);
3271 tmpvar = create_tmp_var_raw (pointedtotype, "structcopydereftmp");
3272 do_structure_copy (tmpvar, rhsop);
3273 do_structure_copy (lhsop, tmpvar);
3279 /* Update related alias information kept in AI. This is used when
3280 building name tags, alias sets and deciding grouping heuristics.
3281 STMT is the statement to process. This function also updates
3282 ADDRESSABLE_VARS. */
3284 static void
3285 update_alias_info (tree stmt, struct alias_info *ai)
3287 bitmap addr_taken;
3288 use_operand_p use_p;
3289 ssa_op_iter iter;
3290 bool stmt_dereferences_ptr_p;
3291 enum escape_type stmt_escape_type = is_escape_site (stmt);
3292 struct mem_ref_stats_d *mem_ref_stats = gimple_mem_ref_stats (cfun);
3294 stmt_dereferences_ptr_p = false;
3296 if (stmt_escape_type == ESCAPE_TO_CALL
3297 || stmt_escape_type == ESCAPE_TO_PURE_CONST)
3299 mem_ref_stats->num_call_sites++;
3300 if (stmt_escape_type == ESCAPE_TO_PURE_CONST)
3301 mem_ref_stats->num_pure_const_call_sites++;
3303 else if (stmt_escape_type == ESCAPE_TO_ASM)
3304 mem_ref_stats->num_asm_sites++;
3306 /* Mark all the variables whose address are taken by the statement. */
3307 addr_taken = addresses_taken (stmt);
3308 if (addr_taken)
3310 bitmap_ior_into (gimple_addressable_vars (cfun), addr_taken);
3312 /* If STMT is an escape point, all the addresses taken by it are
3313 call-clobbered. */
3314 if (stmt_escape_type != NO_ESCAPE)
3316 bitmap_iterator bi;
3317 unsigned i;
3319 EXECUTE_IF_SET_IN_BITMAP (addr_taken, 0, i, bi)
3321 tree rvar = referenced_var (i);
3322 if (!unmodifiable_var_p (rvar))
3323 mark_call_clobbered (rvar, stmt_escape_type);
3328 /* Process each operand use. For pointers, determine whether they
3329 are dereferenced by the statement, or whether their value
3330 escapes, etc. */
3331 FOR_EACH_PHI_OR_STMT_USE (use_p, stmt, iter, SSA_OP_USE)
3333 tree op, var;
3334 var_ann_t v_ann;
3335 struct ptr_info_def *pi;
3336 unsigned num_uses, num_loads, num_stores;
3338 op = USE_FROM_PTR (use_p);
3340 /* If STMT is a PHI node, OP may be an ADDR_EXPR. If so, add it
3341 to the set of addressable variables. */
3342 if (TREE_CODE (op) == ADDR_EXPR)
3344 bitmap addressable_vars = gimple_addressable_vars (cfun);
3346 gcc_assert (TREE_CODE (stmt) == PHI_NODE);
3347 gcc_assert (addressable_vars);
3349 /* PHI nodes don't have annotations for pinning the set
3350 of addresses taken, so we collect them here.
3352 FIXME, should we allow PHI nodes to have annotations
3353 so that they can be treated like regular statements?
3354 Currently, they are treated as second-class
3355 statements. */
3356 add_to_addressable_set (TREE_OPERAND (op, 0), &addressable_vars);
3357 continue;
3360 /* Ignore constants (they may occur in PHI node arguments). */
3361 if (TREE_CODE (op) != SSA_NAME)
3362 continue;
3364 var = SSA_NAME_VAR (op);
3365 v_ann = var_ann (var);
3367 /* The base variable of an SSA name must be a GIMPLE register, and thus
3368 it cannot be aliased. */
3369 gcc_assert (!may_be_aliased (var));
3371 /* We are only interested in pointers. */
3372 if (!POINTER_TYPE_P (TREE_TYPE (op)))
3373 continue;
3375 pi = get_ptr_info (op);
3377 /* Add OP to AI->PROCESSED_PTRS, if it's not there already. */
3378 if (!TEST_BIT (ai->ssa_names_visited, SSA_NAME_VERSION (op)))
3380 SET_BIT (ai->ssa_names_visited, SSA_NAME_VERSION (op));
3381 VEC_safe_push (tree, heap, ai->processed_ptrs, op);
3384 /* If STMT is a PHI node, then it will not have pointer
3385 dereferences and it will not be an escape point. */
3386 if (TREE_CODE (stmt) == PHI_NODE)
3387 continue;
3389 /* Determine whether OP is a dereferenced pointer, and if STMT
3390 is an escape point, whether OP escapes. */
3391 count_uses_and_derefs (op, stmt, &num_uses, &num_loads, &num_stores);
3393 /* Handle a corner case involving address expressions of the
3394 form '&PTR->FLD'. The problem with these expressions is that
3395 they do not represent a dereference of PTR. However, if some
3396 other transformation propagates them into an INDIRECT_REF
3397 expression, we end up with '*(&PTR->FLD)' which is folded
3398 into 'PTR->FLD'.
3400 So, if the original code had no other dereferences of PTR,
3401 the aliaser will not create memory tags for it, and when
3402 &PTR->FLD gets propagated to INDIRECT_REF expressions, the
3403 memory operations will receive no VDEF/VUSE operands.
3405 One solution would be to have count_uses_and_derefs consider
3406 &PTR->FLD a dereference of PTR. But that is wrong, since it
3407 is not really a dereference but an offset calculation.
3409 What we do here is to recognize these special ADDR_EXPR
3410 nodes. Since these expressions are never GIMPLE values (they
3411 are not GIMPLE invariants), they can only appear on the RHS
3412 of an assignment and their base address is always an
3413 INDIRECT_REF expression. */
3414 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
3415 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == ADDR_EXPR
3416 && !is_gimple_val (GIMPLE_STMT_OPERAND (stmt, 1)))
3418 /* If the RHS if of the form &PTR->FLD and PTR == OP, then
3419 this represents a potential dereference of PTR. */
3420 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
3421 tree base = get_base_address (TREE_OPERAND (rhs, 0));
3422 if (TREE_CODE (base) == INDIRECT_REF
3423 && TREE_OPERAND (base, 0) == op)
3424 num_loads++;
3427 if (num_loads + num_stores > 0)
3429 /* Mark OP as dereferenced. In a subsequent pass,
3430 dereferenced pointers that point to a set of
3431 variables will be assigned a name tag to alias
3432 all the variables OP points to. */
3433 pi->is_dereferenced = 1;
3435 /* If this is a store operation, mark OP as being
3436 dereferenced to store, otherwise mark it as being
3437 dereferenced to load. */
3438 if (num_stores > 0)
3439 pointer_set_insert (ai->dereferenced_ptrs_store, var);
3440 else
3441 pointer_set_insert (ai->dereferenced_ptrs_load, var);
3443 /* Update the frequency estimate for all the dereferences of
3444 pointer OP. */
3445 update_mem_sym_stats_from_stmt (op, stmt, num_loads, num_stores);
3447 /* Indicate that STMT contains pointer dereferences. */
3448 stmt_dereferences_ptr_p = true;
3451 if (stmt_escape_type != NO_ESCAPE && num_loads + num_stores < num_uses)
3453 /* If STMT is an escape point and STMT contains at
3454 least one direct use of OP, then the value of OP
3455 escapes and so the pointed-to variables need to
3456 be marked call-clobbered. */
3457 pi->value_escapes_p = 1;
3458 pi->escape_mask |= stmt_escape_type;
3460 /* If the statement makes a function call, assume
3461 that pointer OP will be dereferenced in a store
3462 operation inside the called function. */
3463 if (get_call_expr_in (stmt)
3464 || stmt_escape_type == ESCAPE_STORED_IN_GLOBAL)
3466 pointer_set_insert (ai->dereferenced_ptrs_store, var);
3467 pi->is_dereferenced = 1;
3472 if (TREE_CODE (stmt) == PHI_NODE)
3473 return;
3475 /* Mark stored variables in STMT as being written to and update the
3476 memory reference stats for all memory symbols referenced by STMT. */
3477 if (stmt_references_memory_p (stmt))
3479 unsigned i;
3480 bitmap_iterator bi;
3482 mem_ref_stats->num_mem_stmts++;
3484 /* Notice that we only update memory reference stats for symbols
3485 loaded and stored by the statement if the statement does not
3486 contain pointer dereferences and it is not a call/asm site.
3487 This is to avoid double accounting problems when creating
3488 memory partitions. After computing points-to information,
3489 pointer dereference statistics are used to update the
3490 reference stats of the pointed-to variables, so here we
3491 should only update direct references to symbols.
3493 Indirect references are not updated here for two reasons: (1)
3494 The first time we compute alias information, the sets
3495 LOADED/STORED are empty for pointer dereferences, (2) After
3496 partitioning, LOADED/STORED may have references to
3497 partitions, not the original pointed-to variables. So, if we
3498 always counted LOADED/STORED here and during partitioning, we
3499 would count many symbols more than once.
3501 This does cause some imprecision when a statement has a
3502 combination of direct symbol references and pointer
3503 dereferences (e.g., MEMORY_VAR = *PTR) or if a call site has
3504 memory symbols in its argument list, but these cases do not
3505 occur so frequently as to constitute a serious problem. */
3506 if (STORED_SYMS (stmt))
3507 EXECUTE_IF_SET_IN_BITMAP (STORED_SYMS (stmt), 0, i, bi)
3509 tree sym = referenced_var (i);
3510 pointer_set_insert (ai->written_vars, sym);
3511 if (!stmt_dereferences_ptr_p
3512 && stmt_escape_type != ESCAPE_TO_CALL
3513 && stmt_escape_type != ESCAPE_TO_PURE_CONST
3514 && stmt_escape_type != ESCAPE_TO_ASM)
3515 update_mem_sym_stats_from_stmt (sym, stmt, 0, 1);
3518 if (!stmt_dereferences_ptr_p
3519 && LOADED_SYMS (stmt)
3520 && stmt_escape_type != ESCAPE_TO_CALL
3521 && stmt_escape_type != ESCAPE_TO_PURE_CONST
3522 && stmt_escape_type != ESCAPE_TO_ASM)
3523 EXECUTE_IF_SET_IN_BITMAP (LOADED_SYMS (stmt), 0, i, bi)
3524 update_mem_sym_stats_from_stmt (referenced_var (i), stmt, 1, 0);
3529 /* Handle pointer arithmetic EXPR when creating aliasing constraints.
3530 Expressions of the type PTR + CST can be handled in two ways:
3532 1- If the constraint for PTR is ADDRESSOF for a non-structure
3533 variable, then we can use it directly because adding or
3534 subtracting a constant may not alter the original ADDRESSOF
3535 constraint (i.e., pointer arithmetic may not legally go outside
3536 an object's boundaries).
3538 2- If the constraint for PTR is ADDRESSOF for a structure variable,
3539 then if CST is a compile-time constant that can be used as an
3540 offset, we can determine which sub-variable will be pointed-to
3541 by the expression.
3543 Return true if the expression is handled. For any other kind of
3544 expression, return false so that each operand can be added as a
3545 separate constraint by the caller. */
3547 static bool
3548 handle_ptr_arith (VEC (ce_s, heap) *lhsc, tree expr)
3550 tree op0, op1;
3551 struct constraint_expr *c, *c2;
3552 unsigned int i = 0;
3553 unsigned int j = 0;
3554 VEC (ce_s, heap) *temp = NULL;
3555 unsigned int rhsoffset = 0;
3556 bool unknown_addend = false;
3558 if (TREE_CODE (expr) != POINTER_PLUS_EXPR)
3559 return false;
3561 op0 = TREE_OPERAND (expr, 0);
3562 op1 = TREE_OPERAND (expr, 1);
3563 gcc_assert (POINTER_TYPE_P (TREE_TYPE (op0)));
3565 get_constraint_for (op0, &temp);
3567 /* Handle non-constants by making constraints from integer. */
3568 if (TREE_CODE (op1) == INTEGER_CST)
3569 rhsoffset = TREE_INT_CST_LOW (op1) * BITS_PER_UNIT;
3570 else
3571 unknown_addend = true;
3573 for (i = 0; VEC_iterate (ce_s, lhsc, i, c); i++)
3574 for (j = 0; VEC_iterate (ce_s, temp, j, c2); j++)
3576 if (c2->type == ADDRESSOF && rhsoffset != 0)
3578 varinfo_t temp = get_varinfo (c2->var);
3580 /* An access one after the end of an array is valid,
3581 so simply punt on accesses we cannot resolve. */
3582 temp = first_vi_for_offset (temp, rhsoffset);
3583 if (temp == NULL)
3584 continue;
3585 c2->var = temp->id;
3586 c2->offset = 0;
3588 else if (unknown_addend)
3590 /* Can't handle *a + integer where integer is unknown. */
3591 if (c2->type != SCALAR)
3593 struct constraint_expr intc;
3594 intc.var = integer_id;
3595 intc.offset = 0;
3596 intc.type = SCALAR;
3597 process_constraint (new_constraint (*c, intc));
3599 else
3601 /* We known it lives somewhere within c2->var. */
3602 varinfo_t tmp = get_varinfo (c2->var);
3603 for (; tmp; tmp = tmp->next)
3605 struct constraint_expr tmpc = *c2;
3606 c2->var = tmp->id;
3607 c2->offset = 0;
3608 process_constraint (new_constraint (*c, tmpc));
3612 else
3613 c2->offset = rhsoffset;
3614 process_constraint (new_constraint (*c, *c2));
3617 VEC_free (ce_s, heap, temp);
3619 return true;
3622 /* For non-IPA mode, generate constraints necessary for a call on the
3623 RHS. */
3625 static void
3626 handle_rhs_call (tree rhs)
3628 tree arg;
3629 call_expr_arg_iterator iter;
3630 struct constraint_expr rhsc;
3632 rhsc.var = anything_id;
3633 rhsc.offset = 0;
3634 rhsc.type = ADDRESSOF;
3636 FOR_EACH_CALL_EXPR_ARG (arg, iter, rhs)
3638 VEC(ce_s, heap) *lhsc = NULL;
3640 /* Find those pointers being passed, and make sure they end up
3641 pointing to anything. */
3642 if (POINTER_TYPE_P (TREE_TYPE (arg)))
3644 unsigned int j;
3645 struct constraint_expr *lhsp;
3647 get_constraint_for (arg, &lhsc);
3648 do_deref (&lhsc);
3649 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3650 process_constraint_1 (new_constraint (*lhsp, rhsc), true);
3651 VEC_free (ce_s, heap, lhsc);
3656 /* For non-IPA mode, generate constraints necessary for a call
3657 that returns a pointer and assigns it to LHS. This simply makes
3658 the LHS point to anything. */
3660 static void
3661 handle_lhs_call (tree lhs)
3663 VEC(ce_s, heap) *lhsc = NULL;
3664 struct constraint_expr rhsc;
3665 unsigned int j;
3666 struct constraint_expr *lhsp;
3668 rhsc.var = anything_id;
3669 rhsc.offset = 0;
3670 rhsc.type = ADDRESSOF;
3671 get_constraint_for (lhs, &lhsc);
3672 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3673 process_constraint_1 (new_constraint (*lhsp, rhsc), true);
3674 VEC_free (ce_s, heap, lhsc);
3677 /* Walk statement T setting up aliasing constraints according to the
3678 references found in T. This function is the main part of the
3679 constraint builder. AI points to auxiliary alias information used
3680 when building alias sets and computing alias grouping heuristics. */
3682 static void
3683 find_func_aliases (tree origt)
3685 tree t = origt;
3686 VEC(ce_s, heap) *lhsc = NULL;
3687 VEC(ce_s, heap) *rhsc = NULL;
3688 struct constraint_expr *c;
3690 if (TREE_CODE (t) == RETURN_EXPR && TREE_OPERAND (t, 0))
3691 t = TREE_OPERAND (t, 0);
3693 /* Now build constraints expressions. */
3694 if (TREE_CODE (t) == PHI_NODE)
3696 gcc_assert (!AGGREGATE_TYPE_P (TREE_TYPE (PHI_RESULT (t))));
3698 /* Only care about pointers and structures containing
3699 pointers. */
3700 if (could_have_pointers (PHI_RESULT (t)))
3702 int i;
3703 unsigned int j;
3705 /* For a phi node, assign all the arguments to
3706 the result. */
3707 get_constraint_for (PHI_RESULT (t), &lhsc);
3708 for (i = 0; i < PHI_NUM_ARGS (t); i++)
3710 tree rhstype;
3711 tree strippedrhs = PHI_ARG_DEF (t, i);
3713 STRIP_NOPS (strippedrhs);
3714 rhstype = TREE_TYPE (strippedrhs);
3715 get_constraint_for (PHI_ARG_DEF (t, i), &rhsc);
3717 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3719 struct constraint_expr *c2;
3720 while (VEC_length (ce_s, rhsc) > 0)
3722 c2 = VEC_last (ce_s, rhsc);
3723 process_constraint (new_constraint (*c, *c2));
3724 VEC_pop (ce_s, rhsc);
3730 /* In IPA mode, we need to generate constraints to pass call
3731 arguments through their calls. There are two cases, either a
3732 GIMPLE_MODIFY_STMT when we are returning a value, or just a plain
3733 CALL_EXPR when we are not.
3735 In non-ipa mode, we need to generate constraints for each
3736 pointer passed by address. */
3737 else if (((TREE_CODE (t) == GIMPLE_MODIFY_STMT
3738 && TREE_CODE (GIMPLE_STMT_OPERAND (t, 1)) == CALL_EXPR
3739 && !(call_expr_flags (GIMPLE_STMT_OPERAND (t, 1))
3740 & (ECF_MALLOC | ECF_MAY_BE_ALLOCA)))
3741 || (TREE_CODE (t) == CALL_EXPR
3742 && !(call_expr_flags (t)
3743 & (ECF_MALLOC | ECF_MAY_BE_ALLOCA)))))
3745 if (!in_ipa_mode)
3747 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
3749 handle_rhs_call (GIMPLE_STMT_OPERAND (t, 1));
3750 if (POINTER_TYPE_P (TREE_TYPE (GIMPLE_STMT_OPERAND (t, 1))))
3751 handle_lhs_call (GIMPLE_STMT_OPERAND (t, 0));
3753 else
3754 handle_rhs_call (t);
3756 else
3758 tree lhsop;
3759 tree rhsop;
3760 tree arg;
3761 call_expr_arg_iterator iter;
3762 varinfo_t fi;
3763 int i = 1;
3764 tree decl;
3765 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
3767 lhsop = GIMPLE_STMT_OPERAND (t, 0);
3768 rhsop = GIMPLE_STMT_OPERAND (t, 1);
3770 else
3772 lhsop = NULL;
3773 rhsop = t;
3775 decl = get_callee_fndecl (rhsop);
3777 /* If we can directly resolve the function being called, do so.
3778 Otherwise, it must be some sort of indirect expression that
3779 we should still be able to handle. */
3780 if (decl)
3782 fi = get_vi_for_tree (decl);
3784 else
3786 decl = CALL_EXPR_FN (rhsop);
3787 fi = get_vi_for_tree (decl);
3790 /* Assign all the passed arguments to the appropriate incoming
3791 parameters of the function. */
3793 FOR_EACH_CALL_EXPR_ARG (arg, iter, rhsop)
3795 struct constraint_expr lhs ;
3796 struct constraint_expr *rhsp;
3798 get_constraint_for (arg, &rhsc);
3799 if (TREE_CODE (decl) != FUNCTION_DECL)
3801 lhs.type = DEREF;
3802 lhs.var = fi->id;
3803 lhs.offset = i;
3805 else
3807 lhs.type = SCALAR;
3808 lhs.var = first_vi_for_offset (fi, i)->id;
3809 lhs.offset = 0;
3811 while (VEC_length (ce_s, rhsc) != 0)
3813 rhsp = VEC_last (ce_s, rhsc);
3814 process_constraint (new_constraint (lhs, *rhsp));
3815 VEC_pop (ce_s, rhsc);
3817 i++;
3820 /* If we are returning a value, assign it to the result. */
3821 if (lhsop)
3823 struct constraint_expr rhs;
3824 struct constraint_expr *lhsp;
3825 unsigned int j = 0;
3827 get_constraint_for (lhsop, &lhsc);
3828 if (TREE_CODE (decl) != FUNCTION_DECL)
3830 rhs.type = DEREF;
3831 rhs.var = fi->id;
3832 rhs.offset = i;
3834 else
3836 rhs.type = SCALAR;
3837 rhs.var = first_vi_for_offset (fi, i)->id;
3838 rhs.offset = 0;
3840 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3841 process_constraint (new_constraint (*lhsp, rhs));
3845 /* Otherwise, just a regular assignment statement. */
3846 else if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
3848 tree lhsop = GIMPLE_STMT_OPERAND (t, 0);
3849 tree rhsop = GIMPLE_STMT_OPERAND (t, 1);
3850 int i;
3852 if ((AGGREGATE_TYPE_P (TREE_TYPE (lhsop))
3853 || TREE_CODE (TREE_TYPE (lhsop)) == COMPLEX_TYPE)
3854 && (AGGREGATE_TYPE_P (TREE_TYPE (rhsop))
3855 || TREE_CODE (TREE_TYPE (lhsop)) == COMPLEX_TYPE))
3857 do_structure_copy (lhsop, rhsop);
3859 else
3861 /* Only care about operations with pointers, structures
3862 containing pointers, dereferences, and call expressions. */
3863 if (could_have_pointers (lhsop)
3864 || TREE_CODE (rhsop) == CALL_EXPR)
3866 get_constraint_for (lhsop, &lhsc);
3867 switch (TREE_CODE_CLASS (TREE_CODE (rhsop)))
3869 /* RHS that consist of unary operations,
3870 exceptional types, or bare decls/constants, get
3871 handled directly by get_constraint_for. */
3872 case tcc_reference:
3873 case tcc_declaration:
3874 case tcc_constant:
3875 case tcc_exceptional:
3876 case tcc_expression:
3877 case tcc_vl_exp:
3878 case tcc_unary:
3880 unsigned int j;
3882 get_constraint_for (rhsop, &rhsc);
3883 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3885 struct constraint_expr *c2;
3886 unsigned int k;
3888 for (k = 0; VEC_iterate (ce_s, rhsc, k, c2); k++)
3889 process_constraint (new_constraint (*c, *c2));
3893 break;
3895 case tcc_binary:
3897 /* For pointer arithmetic of the form
3898 PTR + CST, we can simply use PTR's
3899 constraint because pointer arithmetic is
3900 not allowed to go out of bounds. */
3901 if (handle_ptr_arith (lhsc, rhsop))
3902 break;
3904 /* FALLTHRU */
3906 /* Otherwise, walk each operand. Notice that we
3907 can't use the operand interface because we need
3908 to process expressions other than simple operands
3909 (e.g. INDIRECT_REF, ADDR_EXPR, CALL_EXPR). */
3910 default:
3911 for (i = 0; i < TREE_OPERAND_LENGTH (rhsop); i++)
3913 tree op = TREE_OPERAND (rhsop, i);
3914 unsigned int j;
3916 gcc_assert (VEC_length (ce_s, rhsc) == 0);
3917 get_constraint_for (op, &rhsc);
3918 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3920 struct constraint_expr *c2;
3921 while (VEC_length (ce_s, rhsc) > 0)
3923 c2 = VEC_last (ce_s, rhsc);
3924 process_constraint (new_constraint (*c, *c2));
3925 VEC_pop (ce_s, rhsc);
3933 else if (TREE_CODE (t) == CHANGE_DYNAMIC_TYPE_EXPR)
3935 unsigned int j;
3937 get_constraint_for (CHANGE_DYNAMIC_TYPE_LOCATION (t), &lhsc);
3938 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); ++j)
3939 get_varinfo (c->var)->no_tbaa_pruning = true;
3942 /* After promoting variables and computing aliasing we will
3943 need to re-scan most statements. FIXME: Try to minimize the
3944 number of statements re-scanned. It's not really necessary to
3945 re-scan *all* statements. */
3946 mark_stmt_modified (origt);
3947 VEC_free (ce_s, heap, rhsc);
3948 VEC_free (ce_s, heap, lhsc);
3952 /* Find the first varinfo in the same variable as START that overlaps with
3953 OFFSET.
3954 Effectively, walk the chain of fields for the variable START to find the
3955 first field that overlaps with OFFSET.
3956 Return NULL if we can't find one. */
3958 static varinfo_t
3959 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
3961 varinfo_t curr = start;
3962 while (curr)
3964 /* We may not find a variable in the field list with the actual
3965 offset when when we have glommed a structure to a variable.
3966 In that case, however, offset should still be within the size
3967 of the variable. */
3968 if (offset >= curr->offset && offset < (curr->offset + curr->size))
3969 return curr;
3970 curr = curr->next;
3972 return NULL;
3976 /* Insert the varinfo FIELD into the field list for BASE, at the front
3977 of the list. */
3979 static void
3980 insert_into_field_list (varinfo_t base, varinfo_t field)
3982 varinfo_t prev = base;
3983 varinfo_t curr = base->next;
3985 field->next = curr;
3986 prev->next = field;
3989 /* Insert the varinfo FIELD into the field list for BASE, ordered by
3990 offset. */
3992 static void
3993 insert_into_field_list_sorted (varinfo_t base, varinfo_t field)
3995 varinfo_t prev = base;
3996 varinfo_t curr = base->next;
3998 if (curr == NULL)
4000 prev->next = field;
4001 field->next = NULL;
4003 else
4005 while (curr)
4007 if (field->offset <= curr->offset)
4008 break;
4009 prev = curr;
4010 curr = curr->next;
4012 field->next = prev->next;
4013 prev->next = field;
4017 /* qsort comparison function for two fieldoff's PA and PB */
4019 static int
4020 fieldoff_compare (const void *pa, const void *pb)
4022 const fieldoff_s *foa = (const fieldoff_s *)pa;
4023 const fieldoff_s *fob = (const fieldoff_s *)pb;
4024 HOST_WIDE_INT foasize, fobsize;
4026 if (foa->offset != fob->offset)
4027 return foa->offset - fob->offset;
4029 foasize = TREE_INT_CST_LOW (foa->size);
4030 fobsize = TREE_INT_CST_LOW (fob->size);
4031 return foasize - fobsize;
4034 /* Sort a fieldstack according to the field offset and sizes. */
4035 void
4036 sort_fieldstack (VEC(fieldoff_s,heap) *fieldstack)
4038 qsort (VEC_address (fieldoff_s, fieldstack),
4039 VEC_length (fieldoff_s, fieldstack),
4040 sizeof (fieldoff_s),
4041 fieldoff_compare);
4044 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all the fields
4045 of TYPE onto fieldstack, recording their offsets along the way.
4046 OFFSET is used to keep track of the offset in this entire structure, rather
4047 than just the immediately containing structure. Returns the number
4048 of fields pushed.
4049 HAS_UNION is set to true if we find a union type as a field of
4050 TYPE. ADDRESSABLE_TYPE is the type of the outermost object that could have
4051 its address taken. */
4054 push_fields_onto_fieldstack (tree type, VEC(fieldoff_s,heap) **fieldstack,
4055 HOST_WIDE_INT offset, bool *has_union,
4056 tree addressable_type)
4058 tree field;
4059 int count = 0;
4061 if (TREE_CODE (type) == COMPLEX_TYPE)
4063 fieldoff_s *real_part, *img_part;
4064 real_part = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
4065 real_part->type = TREE_TYPE (type);
4066 real_part->size = TYPE_SIZE (TREE_TYPE (type));
4067 real_part->offset = offset;
4068 real_part->decl = NULL_TREE;
4069 real_part->alias_set = -1;
4071 img_part = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
4072 img_part->type = TREE_TYPE (type);
4073 img_part->size = TYPE_SIZE (TREE_TYPE (type));
4074 img_part->offset = offset + TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (type)));
4075 img_part->decl = NULL_TREE;
4076 img_part->alias_set = -1;
4078 return 2;
4081 if (TREE_CODE (type) == ARRAY_TYPE)
4083 tree sz = TYPE_SIZE (type);
4084 tree elsz = TYPE_SIZE (TREE_TYPE (type));
4085 HOST_WIDE_INT nr;
4086 int i;
4088 if (! sz
4089 || ! host_integerp (sz, 1)
4090 || TREE_INT_CST_LOW (sz) == 0
4091 || ! elsz
4092 || ! host_integerp (elsz, 1)
4093 || TREE_INT_CST_LOW (elsz) == 0)
4094 return 0;
4096 nr = TREE_INT_CST_LOW (sz) / TREE_INT_CST_LOW (elsz);
4097 if (nr > SALIAS_MAX_ARRAY_ELEMENTS)
4098 return 0;
4100 for (i = 0; i < nr; ++i)
4102 bool push = false;
4103 int pushed = 0;
4105 if (has_union
4106 && (TREE_CODE (TREE_TYPE (type)) == QUAL_UNION_TYPE
4107 || TREE_CODE (TREE_TYPE (type)) == UNION_TYPE))
4108 *has_union = true;
4110 if (!AGGREGATE_TYPE_P (TREE_TYPE (type))) /* var_can_have_subvars */
4111 push = true;
4112 else if (!(pushed = push_fields_onto_fieldstack
4113 (TREE_TYPE (type), fieldstack,
4114 offset + i * TREE_INT_CST_LOW (elsz), has_union,
4115 (TYPE_NONALIASED_COMPONENT (type)
4116 ? addressable_type
4117 : TREE_TYPE (type)))))
4118 /* Empty structures may have actual size, like in C++. So
4119 see if we didn't push any subfields and the size is
4120 nonzero, push the field onto the stack */
4121 push = true;
4123 if (push)
4125 fieldoff_s *pair;
4127 pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
4128 pair->type = TREE_TYPE (type);
4129 pair->size = elsz;
4130 pair->decl = NULL_TREE;
4131 pair->offset = offset + i * TREE_INT_CST_LOW (elsz);
4132 if (TYPE_NONALIASED_COMPONENT (type))
4133 pair->alias_set = get_alias_set (addressable_type);
4134 else
4135 pair->alias_set = -1;
4136 count++;
4138 else
4139 count += pushed;
4142 return count;
4145 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
4146 if (TREE_CODE (field) == FIELD_DECL)
4148 bool push = false;
4149 int pushed = 0;
4151 if (has_union
4152 && (TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE
4153 || TREE_CODE (TREE_TYPE (field)) == UNION_TYPE))
4154 *has_union = true;
4156 if (!var_can_have_subvars (field))
4157 push = true;
4158 else if (!(pushed = push_fields_onto_fieldstack
4159 (TREE_TYPE (field), fieldstack,
4160 offset + bitpos_of_field (field), has_union,
4161 (DECL_NONADDRESSABLE_P (field)
4162 ? addressable_type
4163 : TREE_TYPE (field))))
4164 && DECL_SIZE (field)
4165 && !integer_zerop (DECL_SIZE (field)))
4166 /* Empty structures may have actual size, like in C++. So
4167 see if we didn't push any subfields and the size is
4168 nonzero, push the field onto the stack */
4169 push = true;
4171 if (push)
4173 fieldoff_s *pair;
4175 pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
4176 pair->type = TREE_TYPE (field);
4177 pair->size = DECL_SIZE (field);
4178 pair->decl = field;
4179 pair->offset = offset + bitpos_of_field (field);
4180 if (DECL_NONADDRESSABLE_P (field))
4181 pair->alias_set = get_alias_set (addressable_type);
4182 else
4183 pair->alias_set = -1;
4184 count++;
4186 else
4187 count += pushed;
4190 return count;
4193 /* Create a constraint from ANYTHING variable to VI. */
4194 static void
4195 make_constraint_from_anything (varinfo_t vi)
4197 struct constraint_expr lhs, rhs;
4199 lhs.var = vi->id;
4200 lhs.offset = 0;
4201 lhs.type = SCALAR;
4203 rhs.var = anything_id;
4204 rhs.offset = 0;
4205 rhs.type = ADDRESSOF;
4206 process_constraint (new_constraint (lhs, rhs));
4209 /* Count the number of arguments DECL has, and set IS_VARARGS to true
4210 if it is a varargs function. */
4212 static unsigned int
4213 count_num_arguments (tree decl, bool *is_varargs)
4215 unsigned int i = 0;
4216 tree t;
4218 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl));
4220 t = TREE_CHAIN (t))
4222 if (TREE_VALUE (t) == void_type_node)
4223 break;
4224 i++;
4227 if (!t)
4228 *is_varargs = true;
4229 return i;
4232 /* Creation function node for DECL, using NAME, and return the index
4233 of the variable we've created for the function. */
4235 static unsigned int
4236 create_function_info_for (tree decl, const char *name)
4238 unsigned int index = VEC_length (varinfo_t, varmap);
4239 varinfo_t vi;
4240 tree arg;
4241 unsigned int i;
4242 bool is_varargs = false;
4244 /* Create the variable info. */
4246 vi = new_var_info (decl, index, name);
4247 vi->decl = decl;
4248 vi->offset = 0;
4249 vi->has_union = 0;
4250 vi->size = 1;
4251 vi->fullsize = count_num_arguments (decl, &is_varargs) + 1;
4252 insert_vi_for_tree (vi->decl, vi);
4253 VEC_safe_push (varinfo_t, heap, varmap, vi);
4255 stats.total_vars++;
4257 /* If it's varargs, we don't know how many arguments it has, so we
4258 can't do much.
4260 if (is_varargs)
4262 vi->fullsize = ~0;
4263 vi->size = ~0;
4264 vi->is_unknown_size_var = true;
4265 return index;
4269 arg = DECL_ARGUMENTS (decl);
4271 /* Set up variables for each argument. */
4272 for (i = 1; i < vi->fullsize; i++)
4274 varinfo_t argvi;
4275 const char *newname;
4276 char *tempname;
4277 unsigned int newindex;
4278 tree argdecl = decl;
4280 if (arg)
4281 argdecl = arg;
4283 newindex = VEC_length (varinfo_t, varmap);
4284 asprintf (&tempname, "%s.arg%d", name, i-1);
4285 newname = ggc_strdup (tempname);
4286 free (tempname);
4288 argvi = new_var_info (argdecl, newindex, newname);
4289 argvi->decl = argdecl;
4290 VEC_safe_push (varinfo_t, heap, varmap, argvi);
4291 argvi->offset = i;
4292 argvi->size = 1;
4293 argvi->fullsize = vi->fullsize;
4294 argvi->has_union = false;
4295 insert_into_field_list_sorted (vi, argvi);
4296 stats.total_vars ++;
4297 if (arg)
4299 insert_vi_for_tree (arg, argvi);
4300 arg = TREE_CHAIN (arg);
4304 /* Create a variable for the return var. */
4305 if (DECL_RESULT (decl) != NULL
4306 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
4308 varinfo_t resultvi;
4309 const char *newname;
4310 char *tempname;
4311 unsigned int newindex;
4312 tree resultdecl = decl;
4314 vi->fullsize ++;
4316 if (DECL_RESULT (decl))
4317 resultdecl = DECL_RESULT (decl);
4319 newindex = VEC_length (varinfo_t, varmap);
4320 asprintf (&tempname, "%s.result", name);
4321 newname = ggc_strdup (tempname);
4322 free (tempname);
4324 resultvi = new_var_info (resultdecl, newindex, newname);
4325 resultvi->decl = resultdecl;
4326 VEC_safe_push (varinfo_t, heap, varmap, resultvi);
4327 resultvi->offset = i;
4328 resultvi->size = 1;
4329 resultvi->fullsize = vi->fullsize;
4330 resultvi->has_union = false;
4331 insert_into_field_list_sorted (vi, resultvi);
4332 stats.total_vars ++;
4333 if (DECL_RESULT (decl))
4334 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
4336 return index;
4340 /* Return true if FIELDSTACK contains fields that overlap.
4341 FIELDSTACK is assumed to be sorted by offset. */
4343 static bool
4344 check_for_overlaps (VEC (fieldoff_s,heap) *fieldstack)
4346 fieldoff_s *fo = NULL;
4347 unsigned int i;
4348 HOST_WIDE_INT lastoffset = -1;
4350 for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
4352 if (fo->offset == lastoffset)
4353 return true;
4354 lastoffset = fo->offset;
4356 return false;
4359 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
4360 This will also create any varinfo structures necessary for fields
4361 of DECL. */
4363 static unsigned int
4364 create_variable_info_for (tree decl, const char *name)
4366 unsigned int index = VEC_length (varinfo_t, varmap);
4367 varinfo_t vi;
4368 tree decltype = TREE_TYPE (decl);
4369 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decltype);
4370 bool notokay = false;
4371 bool hasunion;
4372 bool is_global = DECL_P (decl) ? is_global_var (decl) : false;
4373 VEC (fieldoff_s,heap) *fieldstack = NULL;
4375 if (TREE_CODE (decl) == FUNCTION_DECL && in_ipa_mode)
4376 return create_function_info_for (decl, name);
4378 hasunion = TREE_CODE (decltype) == UNION_TYPE
4379 || TREE_CODE (decltype) == QUAL_UNION_TYPE;
4380 if (var_can_have_subvars (decl) && use_field_sensitive && !hasunion)
4382 push_fields_onto_fieldstack (decltype, &fieldstack, 0, &hasunion,
4383 decltype);
4384 if (hasunion)
4386 VEC_free (fieldoff_s, heap, fieldstack);
4387 notokay = true;
4392 /* If the variable doesn't have subvars, we may end up needing to
4393 sort the field list and create fake variables for all the
4394 fields. */
4395 vi = new_var_info (decl, index, name);
4396 vi->decl = decl;
4397 vi->offset = 0;
4398 vi->has_union = hasunion;
4399 if (!declsize
4400 || TREE_CODE (declsize) != INTEGER_CST
4401 || TREE_CODE (decltype) == UNION_TYPE
4402 || TREE_CODE (decltype) == QUAL_UNION_TYPE)
4404 vi->is_unknown_size_var = true;
4405 vi->fullsize = ~0;
4406 vi->size = ~0;
4408 else
4410 vi->fullsize = TREE_INT_CST_LOW (declsize);
4411 vi->size = vi->fullsize;
4414 insert_vi_for_tree (vi->decl, vi);
4415 VEC_safe_push (varinfo_t, heap, varmap, vi);
4416 if (is_global && (!flag_whole_program || !in_ipa_mode))
4417 make_constraint_from_anything (vi);
4419 stats.total_vars++;
4420 if (use_field_sensitive
4421 && !notokay
4422 && !vi->is_unknown_size_var
4423 && var_can_have_subvars (decl)
4424 && VEC_length (fieldoff_s, fieldstack) <= MAX_FIELDS_FOR_FIELD_SENSITIVE)
4426 unsigned int newindex = VEC_length (varinfo_t, varmap);
4427 fieldoff_s *fo = NULL;
4428 unsigned int i;
4430 for (i = 0; !notokay && VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
4432 if (! fo->size
4433 || TREE_CODE (fo->size) != INTEGER_CST
4434 || fo->offset < 0)
4436 notokay = true;
4437 break;
4441 /* We can't sort them if we have a field with a variable sized type,
4442 which will make notokay = true. In that case, we are going to return
4443 without creating varinfos for the fields anyway, so sorting them is a
4444 waste to boot. */
4445 if (!notokay)
4447 sort_fieldstack (fieldstack);
4448 /* Due to some C++ FE issues, like PR 22488, we might end up
4449 what appear to be overlapping fields even though they,
4450 in reality, do not overlap. Until the C++ FE is fixed,
4451 we will simply disable field-sensitivity for these cases. */
4452 notokay = check_for_overlaps (fieldstack);
4456 if (VEC_length (fieldoff_s, fieldstack) != 0)
4457 fo = VEC_index (fieldoff_s, fieldstack, 0);
4459 if (fo == NULL || notokay)
4461 vi->is_unknown_size_var = 1;
4462 vi->fullsize = ~0;
4463 vi->size = ~0;
4464 VEC_free (fieldoff_s, heap, fieldstack);
4465 return index;
4468 vi->size = TREE_INT_CST_LOW (fo->size);
4469 vi->offset = fo->offset;
4470 for (i = VEC_length (fieldoff_s, fieldstack) - 1;
4471 i >= 1 && VEC_iterate (fieldoff_s, fieldstack, i, fo);
4472 i--)
4474 varinfo_t newvi;
4475 const char *newname = "NULL";
4476 char *tempname;
4478 newindex = VEC_length (varinfo_t, varmap);
4479 if (dump_file)
4481 if (fo->decl)
4482 asprintf (&tempname, "%s.%s",
4483 vi->name, alias_get_name (fo->decl));
4484 else
4485 asprintf (&tempname, "%s." HOST_WIDE_INT_PRINT_DEC,
4486 vi->name, fo->offset);
4487 newname = ggc_strdup (tempname);
4488 free (tempname);
4490 newvi = new_var_info (decl, newindex, newname);
4491 newvi->offset = fo->offset;
4492 newvi->size = TREE_INT_CST_LOW (fo->size);
4493 newvi->fullsize = vi->fullsize;
4494 insert_into_field_list (vi, newvi);
4495 VEC_safe_push (varinfo_t, heap, varmap, newvi);
4496 if (is_global && (!flag_whole_program || !in_ipa_mode))
4497 make_constraint_from_anything (newvi);
4499 stats.total_vars++;
4503 VEC_free (fieldoff_s, heap, fieldstack);
4505 return index;
4508 /* Print out the points-to solution for VAR to FILE. */
4510 void
4511 dump_solution_for_var (FILE *file, unsigned int var)
4513 varinfo_t vi = get_varinfo (var);
4514 unsigned int i;
4515 bitmap_iterator bi;
4517 if (find (var) != var)
4519 varinfo_t vipt = get_varinfo (find (var));
4520 fprintf (file, "%s = same as %s\n", vi->name, vipt->name);
4522 else
4524 fprintf (file, "%s = { ", vi->name);
4525 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4527 fprintf (file, "%s ", get_varinfo (i)->name);
4529 fprintf (file, "}");
4530 if (vi->no_tbaa_pruning)
4531 fprintf (file, " no-tbaa-pruning");
4532 fprintf (file, "\n");
4536 /* Print the points-to solution for VAR to stdout. */
4538 void
4539 debug_solution_for_var (unsigned int var)
4541 dump_solution_for_var (stdout, var);
4544 /* Create varinfo structures for all of the variables in the
4545 function for intraprocedural mode. */
4547 static void
4548 intra_create_variable_infos (void)
4550 tree t;
4551 struct constraint_expr lhs, rhs;
4553 /* For each incoming pointer argument arg, create the constraint ARG
4554 = ANYTHING or a dummy variable if flag_argument_noalias is set. */
4555 for (t = DECL_ARGUMENTS (current_function_decl); t; t = TREE_CHAIN (t))
4557 varinfo_t p;
4559 if (!could_have_pointers (t))
4560 continue;
4562 /* If flag_argument_noalias is set, then function pointer
4563 arguments are guaranteed not to point to each other. In that
4564 case, create an artificial variable PARM_NOALIAS and the
4565 constraint ARG = &PARM_NOALIAS. */
4566 if (POINTER_TYPE_P (TREE_TYPE (t)) && flag_argument_noalias > 0)
4568 varinfo_t vi;
4569 tree heapvar = heapvar_lookup (t);
4571 lhs.offset = 0;
4572 lhs.type = SCALAR;
4573 lhs.var = get_vi_for_tree (t)->id;
4575 if (heapvar == NULL_TREE)
4577 var_ann_t ann;
4578 heapvar = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (t)),
4579 "PARM_NOALIAS");
4580 DECL_EXTERNAL (heapvar) = 1;
4581 if (gimple_referenced_vars (cfun))
4582 add_referenced_var (heapvar);
4584 heapvar_insert (t, heapvar);
4586 ann = get_var_ann (heapvar);
4587 if (flag_argument_noalias == 1)
4588 ann->noalias_state = NO_ALIAS;
4589 else if (flag_argument_noalias == 2)
4590 ann->noalias_state = NO_ALIAS_GLOBAL;
4591 else if (flag_argument_noalias == 3)
4592 ann->noalias_state = NO_ALIAS_ANYTHING;
4593 else
4594 gcc_unreachable ();
4597 vi = get_vi_for_tree (heapvar);
4598 vi->is_artificial_var = 1;
4599 vi->is_heap_var = 1;
4600 rhs.var = vi->id;
4601 rhs.type = ADDRESSOF;
4602 rhs.offset = 0;
4603 for (p = get_varinfo (lhs.var); p; p = p->next)
4605 struct constraint_expr temp = lhs;
4606 temp.var = p->id;
4607 process_constraint (new_constraint (temp, rhs));
4610 else
4612 varinfo_t arg_vi = get_vi_for_tree (t);
4614 for (p = arg_vi; p; p = p->next)
4615 make_constraint_from_anything (p);
4620 /* Structure used to put solution bitmaps in a hashtable so they can
4621 be shared among variables with the same points-to set. */
4623 typedef struct shared_bitmap_info
4625 bitmap pt_vars;
4626 hashval_t hashcode;
4627 } *shared_bitmap_info_t;
4628 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
4630 static htab_t shared_bitmap_table;
4632 /* Hash function for a shared_bitmap_info_t */
4634 static hashval_t
4635 shared_bitmap_hash (const void *p)
4637 const_shared_bitmap_info_t const bi = (const_shared_bitmap_info_t) p;
4638 return bi->hashcode;
4641 /* Equality function for two shared_bitmap_info_t's. */
4643 static int
4644 shared_bitmap_eq (const void *p1, const void *p2)
4646 const_shared_bitmap_info_t const sbi1 = (const_shared_bitmap_info_t) p1;
4647 const_shared_bitmap_info_t const sbi2 = (const_shared_bitmap_info_t) p2;
4648 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
4651 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
4652 existing instance if there is one, NULL otherwise. */
4654 static bitmap
4655 shared_bitmap_lookup (bitmap pt_vars)
4657 void **slot;
4658 struct shared_bitmap_info sbi;
4660 sbi.pt_vars = pt_vars;
4661 sbi.hashcode = bitmap_hash (pt_vars);
4663 slot = htab_find_slot_with_hash (shared_bitmap_table, &sbi,
4664 sbi.hashcode, NO_INSERT);
4665 if (!slot)
4666 return NULL;
4667 else
4668 return ((shared_bitmap_info_t) *slot)->pt_vars;
4672 /* Add a bitmap to the shared bitmap hashtable. */
4674 static void
4675 shared_bitmap_add (bitmap pt_vars)
4677 void **slot;
4678 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
4680 sbi->pt_vars = pt_vars;
4681 sbi->hashcode = bitmap_hash (pt_vars);
4683 slot = htab_find_slot_with_hash (shared_bitmap_table, sbi,
4684 sbi->hashcode, INSERT);
4685 gcc_assert (!*slot);
4686 *slot = (void *) sbi;
4690 /* Set bits in INTO corresponding to the variable uids in solution set
4691 FROM, which came from variable PTR.
4692 For variables that are actually dereferenced, we also use type
4693 based alias analysis to prune the points-to sets.
4694 IS_DEREFED is true if PTR was directly dereferenced, which we use to
4695 help determine whether we are we are allowed to prune using TBAA.
4696 If NO_TBAA_PRUNING is true, we do not perform any TBAA pruning of
4697 the from set. */
4699 static void
4700 set_uids_in_ptset (tree ptr, bitmap into, bitmap from, bool is_derefed,
4701 bool no_tbaa_pruning)
4703 unsigned int i;
4704 bitmap_iterator bi;
4705 subvar_t sv;
4706 alias_set_type ptr_alias_set = get_alias_set (TREE_TYPE (ptr));
4708 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
4710 varinfo_t vi = get_varinfo (i);
4711 alias_set_type var_alias_set;
4713 /* The only artificial variables that are allowed in a may-alias
4714 set are heap variables. */
4715 if (vi->is_artificial_var && !vi->is_heap_var)
4716 continue;
4718 if (vi->has_union && get_subvars_for_var (vi->decl) != NULL)
4720 /* Variables containing unions may need to be converted to
4721 their SFT's, because SFT's can have unions and we cannot. */
4722 for (sv = get_subvars_for_var (vi->decl); sv; sv = sv->next)
4723 bitmap_set_bit (into, DECL_UID (sv->var));
4725 else if (TREE_CODE (vi->decl) == VAR_DECL
4726 || TREE_CODE (vi->decl) == PARM_DECL
4727 || TREE_CODE (vi->decl) == RESULT_DECL)
4729 if (var_can_have_subvars (vi->decl)
4730 && get_subvars_for_var (vi->decl))
4732 /* If VI->DECL is an aggregate for which we created
4733 SFTs, add the SFT corresponding to VI->OFFSET. */
4734 tree sft = get_subvar_at (vi->decl, vi->offset);
4735 gcc_assert (sft);
4736 if (sft)
4738 var_alias_set = get_alias_set (sft);
4739 if (no_tbaa_pruning
4740 || (!is_derefed && !vi->directly_dereferenced)
4741 || alias_sets_conflict_p (ptr_alias_set, var_alias_set))
4742 bitmap_set_bit (into, DECL_UID (sft));
4745 else
4747 /* Otherwise, just add VI->DECL to the alias set.
4748 Don't type prune artificial vars. */
4749 if (vi->is_artificial_var)
4750 bitmap_set_bit (into, DECL_UID (vi->decl));
4751 else
4753 var_alias_set = get_alias_set (vi->decl);
4754 if (no_tbaa_pruning
4755 || (!is_derefed && !vi->directly_dereferenced)
4756 || alias_sets_conflict_p (ptr_alias_set, var_alias_set))
4757 bitmap_set_bit (into, DECL_UID (vi->decl));
4765 static bool have_alias_info = false;
4767 /* The list of SMT's that are in use by our pointer variables. This
4768 is the set of SMT's for all pointers that can point to anything. */
4769 static bitmap used_smts;
4771 /* Due to the ordering of points-to set calculation and SMT
4772 calculation being a bit co-dependent, we can't just calculate SMT
4773 used info whenever we want, we have to calculate it around the time
4774 that find_what_p_points_to is called. */
4776 /* Mark which SMT's are in use by points-to anything variables. */
4778 void
4779 set_used_smts (void)
4781 int i;
4782 varinfo_t vi;
4783 used_smts = BITMAP_ALLOC (&pta_obstack);
4785 for (i = 0; VEC_iterate (varinfo_t, varmap, i, vi); i++)
4787 tree var = vi->decl;
4788 varinfo_t withsolution = get_varinfo (find (i));
4789 tree smt;
4790 var_ann_t va;
4791 struct ptr_info_def *pi = NULL;
4793 /* For parm decls, the pointer info may be under the default
4794 def. */
4795 if (TREE_CODE (vi->decl) == PARM_DECL
4796 && gimple_default_def (cfun, var))
4797 pi = SSA_NAME_PTR_INFO (gimple_default_def (cfun, var));
4798 else if (TREE_CODE (var) == SSA_NAME)
4799 pi = SSA_NAME_PTR_INFO (var);
4801 /* Skip the special variables and those that can't be aliased. */
4802 if (vi->is_special_var
4803 || !SSA_VAR_P (var)
4804 || (pi && !pi->is_dereferenced)
4805 || (TREE_CODE (var) == VAR_DECL && !may_be_aliased (var))
4806 || !POINTER_TYPE_P (TREE_TYPE (var)))
4807 continue;
4809 if (TREE_CODE (var) == SSA_NAME)
4810 var = SSA_NAME_VAR (var);
4812 va = var_ann (var);
4813 if (!va)
4814 continue;
4816 smt = va->symbol_mem_tag;
4817 if (smt && bitmap_bit_p (withsolution->solution, anything_id))
4818 bitmap_set_bit (used_smts, DECL_UID (smt));
4822 /* Merge the necessary SMT's into the bitmap INTO, which is
4823 P's varinfo. This involves merging all SMT's that are a subset of
4824 the SMT necessary for P. */
4826 static void
4827 merge_smts_into (tree p, bitmap solution)
4829 unsigned int i;
4830 bitmap_iterator bi;
4831 tree smt;
4832 bitmap aliases;
4833 tree var = p;
4835 if (TREE_CODE (p) == SSA_NAME)
4836 var = SSA_NAME_VAR (p);
4838 smt = var_ann (var)->symbol_mem_tag;
4839 if (smt)
4841 alias_set_type smtset = get_alias_set (TREE_TYPE (smt));
4843 /* Need to set the SMT subsets first before this
4844 will work properly. */
4845 bitmap_set_bit (solution, DECL_UID (smt));
4846 EXECUTE_IF_SET_IN_BITMAP (used_smts, 0, i, bi)
4848 tree newsmt = referenced_var (i);
4849 tree newsmttype = TREE_TYPE (newsmt);
4851 if (alias_set_subset_of (get_alias_set (newsmttype),
4852 smtset))
4853 bitmap_set_bit (solution, i);
4856 aliases = MTAG_ALIASES (smt);
4857 if (aliases)
4858 bitmap_ior_into (solution, aliases);
4862 /* Given a pointer variable P, fill in its points-to set, or return
4863 false if we can't.
4864 Rather than return false for variables that point-to anything, we
4865 instead find the corresponding SMT, and merge in its aliases. In
4866 addition to these aliases, we also set the bits for the SMT's
4867 themselves and their subsets, as SMT's are still in use by
4868 non-SSA_NAME's, and pruning may eliminate every one of their
4869 aliases. In such a case, if we did not include the right set of
4870 SMT's in the points-to set of the variable, we'd end up with
4871 statements that do not conflict but should. */
4873 bool
4874 find_what_p_points_to (tree p)
4876 tree lookup_p = p;
4877 varinfo_t vi;
4879 if (!have_alias_info)
4880 return false;
4882 /* For parameters, get at the points-to set for the actual parm
4883 decl. */
4884 if (TREE_CODE (p) == SSA_NAME
4885 && TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
4886 && SSA_NAME_IS_DEFAULT_DEF (p))
4887 lookup_p = SSA_NAME_VAR (p);
4889 vi = lookup_vi_for_tree (lookup_p);
4890 if (vi)
4892 if (vi->is_artificial_var)
4893 return false;
4895 /* See if this is a field or a structure. */
4896 if (vi->size != vi->fullsize)
4898 /* Nothing currently asks about structure fields directly,
4899 but when they do, we need code here to hand back the
4900 points-to set. */
4901 if (!var_can_have_subvars (vi->decl)
4902 || get_subvars_for_var (vi->decl) == NULL)
4903 return false;
4905 else
4907 struct ptr_info_def *pi = get_ptr_info (p);
4908 unsigned int i;
4909 bitmap_iterator bi;
4910 bool was_pt_anything = false;
4911 bitmap finished_solution;
4912 bitmap result;
4914 if (!pi->is_dereferenced)
4915 return false;
4917 /* This variable may have been collapsed, let's get the real
4918 variable. */
4919 vi = get_varinfo (find (vi->id));
4921 /* Translate artificial variables into SSA_NAME_PTR_INFO
4922 attributes. */
4923 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4925 varinfo_t vi = get_varinfo (i);
4927 if (vi->is_artificial_var)
4929 /* FIXME. READONLY should be handled better so that
4930 flow insensitive aliasing can disregard writable
4931 aliases. */
4932 if (vi->id == nothing_id)
4933 pi->pt_null = 1;
4934 else if (vi->id == anything_id)
4935 was_pt_anything = 1;
4936 else if (vi->id == readonly_id)
4937 was_pt_anything = 1;
4938 else if (vi->id == integer_id)
4939 was_pt_anything = 1;
4940 else if (vi->is_heap_var)
4941 pi->pt_global_mem = 1;
4945 /* Share the final set of variables when possible. */
4947 finished_solution = BITMAP_GGC_ALLOC ();
4948 stats.points_to_sets_created++;
4950 /* Instead of using pt_anything, we merge in the SMT aliases
4951 for the underlying SMT. In addition, if they could have
4952 pointed to anything, they could point to global memory.
4953 But we cannot do that for ref-all pointers because these
4954 aliases have not been computed yet. */
4955 if (was_pt_anything)
4957 if (PTR_IS_REF_ALL (p))
4959 pi->pt_anything = 1;
4960 return false;
4963 merge_smts_into (p, finished_solution);
4964 pi->pt_global_mem = 1;
4967 set_uids_in_ptset (vi->decl, finished_solution, vi->solution,
4968 vi->directly_dereferenced,
4969 vi->no_tbaa_pruning);
4970 result = shared_bitmap_lookup (finished_solution);
4972 if (!result)
4974 shared_bitmap_add (finished_solution);
4975 pi->pt_vars = finished_solution;
4977 else
4979 pi->pt_vars = result;
4980 bitmap_clear (finished_solution);
4983 if (bitmap_empty_p (pi->pt_vars))
4984 pi->pt_vars = NULL;
4986 return true;
4990 return false;
4995 /* Dump points-to information to OUTFILE. */
4997 void
4998 dump_sa_points_to_info (FILE *outfile)
5000 unsigned int i;
5002 fprintf (outfile, "\nPoints-to sets\n\n");
5004 if (dump_flags & TDF_STATS)
5006 fprintf (outfile, "Stats:\n");
5007 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
5008 fprintf (outfile, "Non-pointer vars: %d\n",
5009 stats.nonpointer_vars);
5010 fprintf (outfile, "Statically unified vars: %d\n",
5011 stats.unified_vars_static);
5012 fprintf (outfile, "Dynamically unified vars: %d\n",
5013 stats.unified_vars_dynamic);
5014 fprintf (outfile, "Iterations: %d\n", stats.iterations);
5015 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
5016 fprintf (outfile, "Number of implicit edges: %d\n",
5017 stats.num_implicit_edges);
5020 for (i = 0; i < VEC_length (varinfo_t, varmap); i++)
5021 dump_solution_for_var (outfile, i);
5025 /* Debug points-to information to stderr. */
5027 void
5028 debug_sa_points_to_info (void)
5030 dump_sa_points_to_info (stderr);
5034 /* Initialize the always-existing constraint variables for NULL
5035 ANYTHING, READONLY, and INTEGER */
5037 static void
5038 init_base_vars (void)
5040 struct constraint_expr lhs, rhs;
5042 /* Create the NULL variable, used to represent that a variable points
5043 to NULL. */
5044 nothing_tree = create_tmp_var_raw (void_type_node, "NULL");
5045 var_nothing = new_var_info (nothing_tree, 0, "NULL");
5046 insert_vi_for_tree (nothing_tree, var_nothing);
5047 var_nothing->is_artificial_var = 1;
5048 var_nothing->offset = 0;
5049 var_nothing->size = ~0;
5050 var_nothing->fullsize = ~0;
5051 var_nothing->is_special_var = 1;
5052 nothing_id = 0;
5053 VEC_safe_push (varinfo_t, heap, varmap, var_nothing);
5055 /* Create the ANYTHING variable, used to represent that a variable
5056 points to some unknown piece of memory. */
5057 anything_tree = create_tmp_var_raw (void_type_node, "ANYTHING");
5058 var_anything = new_var_info (anything_tree, 1, "ANYTHING");
5059 insert_vi_for_tree (anything_tree, var_anything);
5060 var_anything->is_artificial_var = 1;
5061 var_anything->size = ~0;
5062 var_anything->offset = 0;
5063 var_anything->next = NULL;
5064 var_anything->fullsize = ~0;
5065 var_anything->is_special_var = 1;
5066 anything_id = 1;
5068 /* Anything points to anything. This makes deref constraints just
5069 work in the presence of linked list and other p = *p type loops,
5070 by saying that *ANYTHING = ANYTHING. */
5071 VEC_safe_push (varinfo_t, heap, varmap, var_anything);
5072 lhs.type = SCALAR;
5073 lhs.var = anything_id;
5074 lhs.offset = 0;
5075 rhs.type = ADDRESSOF;
5076 rhs.var = anything_id;
5077 rhs.offset = 0;
5079 /* This specifically does not use process_constraint because
5080 process_constraint ignores all anything = anything constraints, since all
5081 but this one are redundant. */
5082 VEC_safe_push (constraint_t, heap, constraints, new_constraint (lhs, rhs));
5084 /* Create the READONLY variable, used to represent that a variable
5085 points to readonly memory. */
5086 readonly_tree = create_tmp_var_raw (void_type_node, "READONLY");
5087 var_readonly = new_var_info (readonly_tree, 2, "READONLY");
5088 var_readonly->is_artificial_var = 1;
5089 var_readonly->offset = 0;
5090 var_readonly->size = ~0;
5091 var_readonly->fullsize = ~0;
5092 var_readonly->next = NULL;
5093 var_readonly->is_special_var = 1;
5094 insert_vi_for_tree (readonly_tree, var_readonly);
5095 readonly_id = 2;
5096 VEC_safe_push (varinfo_t, heap, varmap, var_readonly);
5098 /* readonly memory points to anything, in order to make deref
5099 easier. In reality, it points to anything the particular
5100 readonly variable can point to, but we don't track this
5101 separately. */
5102 lhs.type = SCALAR;
5103 lhs.var = readonly_id;
5104 lhs.offset = 0;
5105 rhs.type = ADDRESSOF;
5106 rhs.var = anything_id;
5107 rhs.offset = 0;
5109 process_constraint (new_constraint (lhs, rhs));
5111 /* Create the INTEGER variable, used to represent that a variable points
5112 to an INTEGER. */
5113 integer_tree = create_tmp_var_raw (void_type_node, "INTEGER");
5114 var_integer = new_var_info (integer_tree, 3, "INTEGER");
5115 insert_vi_for_tree (integer_tree, var_integer);
5116 var_integer->is_artificial_var = 1;
5117 var_integer->size = ~0;
5118 var_integer->fullsize = ~0;
5119 var_integer->offset = 0;
5120 var_integer->next = NULL;
5121 var_integer->is_special_var = 1;
5122 integer_id = 3;
5123 VEC_safe_push (varinfo_t, heap, varmap, var_integer);
5125 /* INTEGER = ANYTHING, because we don't know where a dereference of
5126 a random integer will point to. */
5127 lhs.type = SCALAR;
5128 lhs.var = integer_id;
5129 lhs.offset = 0;
5130 rhs.type = ADDRESSOF;
5131 rhs.var = anything_id;
5132 rhs.offset = 0;
5133 process_constraint (new_constraint (lhs, rhs));
5136 /* Initialize things necessary to perform PTA */
5138 static void
5139 init_alias_vars (void)
5141 bitmap_obstack_initialize (&pta_obstack);
5142 bitmap_obstack_initialize (&oldpta_obstack);
5143 bitmap_obstack_initialize (&predbitmap_obstack);
5145 constraint_pool = create_alloc_pool ("Constraint pool",
5146 sizeof (struct constraint), 30);
5147 variable_info_pool = create_alloc_pool ("Variable info pool",
5148 sizeof (struct variable_info), 30);
5149 constraints = VEC_alloc (constraint_t, heap, 8);
5150 varmap = VEC_alloc (varinfo_t, heap, 8);
5151 vi_for_tree = pointer_map_create ();
5153 memset (&stats, 0, sizeof (stats));
5154 shared_bitmap_table = htab_create (511, shared_bitmap_hash,
5155 shared_bitmap_eq, free);
5156 init_base_vars ();
5159 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
5160 predecessor edges. */
5162 static void
5163 remove_preds_and_fake_succs (constraint_graph_t graph)
5165 unsigned int i;
5167 /* Clear the implicit ref and address nodes from the successor
5168 lists. */
5169 for (i = 0; i < FIRST_REF_NODE; i++)
5171 if (graph->succs[i])
5172 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
5173 FIRST_REF_NODE * 2);
5176 /* Free the successor list for the non-ref nodes. */
5177 for (i = FIRST_REF_NODE; i < graph->size; i++)
5179 if (graph->succs[i])
5180 BITMAP_FREE (graph->succs[i]);
5183 /* Now reallocate the size of the successor list as, and blow away
5184 the predecessor bitmaps. */
5185 graph->size = VEC_length (varinfo_t, varmap);
5186 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
5188 free (graph->implicit_preds);
5189 graph->implicit_preds = NULL;
5190 free (graph->preds);
5191 graph->preds = NULL;
5192 bitmap_obstack_release (&predbitmap_obstack);
5195 /* Compute the set of variables we can't TBAA prune. */
5197 static void
5198 compute_tbaa_pruning (void)
5200 unsigned int size = VEC_length (varinfo_t, varmap);
5201 unsigned int i;
5202 bool any;
5204 changed_count = 0;
5205 changed = sbitmap_alloc (size);
5206 sbitmap_zero (changed);
5208 /* Mark all initial no_tbaa_pruning nodes as changed. */
5209 any = false;
5210 for (i = 0; i < size; ++i)
5212 varinfo_t ivi = get_varinfo (i);
5214 if (find (i) == i && ivi->no_tbaa_pruning)
5216 any = true;
5217 if ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
5218 || VEC_length (constraint_t, graph->complex[i]) > 0)
5220 SET_BIT (changed, i);
5221 ++changed_count;
5226 while (changed_count > 0)
5228 struct topo_info *ti = init_topo_info ();
5229 ++stats.iterations;
5231 compute_topo_order (graph, ti);
5233 while (VEC_length (unsigned, ti->topo_order) != 0)
5235 bitmap_iterator bi;
5237 i = VEC_pop (unsigned, ti->topo_order);
5239 /* If this variable is not a representative, skip it. */
5240 if (find (i) != i)
5241 continue;
5243 /* If the node has changed, we need to process the complex
5244 constraints and outgoing edges again. */
5245 if (TEST_BIT (changed, i))
5247 unsigned int j;
5248 constraint_t c;
5249 VEC(constraint_t,heap) *complex = graph->complex[i];
5251 RESET_BIT (changed, i);
5252 --changed_count;
5254 /* Process the complex copy constraints. */
5255 for (j = 0; VEC_iterate (constraint_t, complex, j, c); ++j)
5257 if (c->lhs.type == SCALAR && c->rhs.type == SCALAR)
5259 varinfo_t lhsvi = get_varinfo (find (c->lhs.var));
5261 if (!lhsvi->no_tbaa_pruning)
5263 lhsvi->no_tbaa_pruning = true;
5264 if (!TEST_BIT (changed, lhsvi->id))
5266 SET_BIT (changed, lhsvi->id);
5267 ++changed_count;
5273 /* Propagate to all successors. */
5274 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
5276 unsigned int to = find (j);
5277 varinfo_t tovi = get_varinfo (to);
5279 /* Don't propagate to ourselves. */
5280 if (to == i)
5281 continue;
5283 if (!tovi->no_tbaa_pruning)
5285 tovi->no_tbaa_pruning = true;
5286 if (!TEST_BIT (changed, to))
5288 SET_BIT (changed, to);
5289 ++changed_count;
5296 free_topo_info (ti);
5299 sbitmap_free (changed);
5301 if (any)
5303 for (i = 0; i < size; ++i)
5305 varinfo_t ivi = get_varinfo (i);
5306 varinfo_t ivip = get_varinfo (find (i));
5308 if (ivip->no_tbaa_pruning)
5310 tree var = ivi->decl;
5312 if (TREE_CODE (var) == SSA_NAME)
5313 var = SSA_NAME_VAR (var);
5315 if (POINTER_TYPE_P (TREE_TYPE (var)))
5317 DECL_NO_TBAA_P (var) = 1;
5319 /* Tell the RTL layer that this pointer can alias
5320 anything. */
5321 DECL_POINTER_ALIAS_SET (var) = 0;
5328 /* Create points-to sets for the current function. See the comments
5329 at the start of the file for an algorithmic overview. */
5331 void
5332 compute_points_to_sets (struct alias_info *ai)
5334 struct scc_info *si;
5335 basic_block bb;
5337 timevar_push (TV_TREE_PTA);
5339 init_alias_vars ();
5340 init_alias_heapvars ();
5342 intra_create_variable_infos ();
5344 /* Now walk all statements and derive aliases. */
5345 FOR_EACH_BB (bb)
5347 block_stmt_iterator bsi;
5348 tree phi;
5350 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
5352 if (is_gimple_reg (PHI_RESULT (phi)))
5354 find_func_aliases (phi);
5356 /* Update various related attributes like escaped
5357 addresses, pointer dereferences for loads and stores.
5358 This is used when creating name tags and alias
5359 sets. */
5360 update_alias_info (phi, ai);
5364 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
5366 tree stmt = bsi_stmt (bsi);
5368 find_func_aliases (stmt);
5370 /* Update various related attributes like escaped
5371 addresses, pointer dereferences for loads and stores.
5372 This is used when creating name tags and alias
5373 sets. */
5374 update_alias_info (stmt, ai);
5376 /* The information in CHANGE_DYNAMIC_TYPE_EXPR nodes has now
5377 been captured, and we can remove them. */
5378 if (TREE_CODE (stmt) == CHANGE_DYNAMIC_TYPE_EXPR)
5379 bsi_remove (&bsi, true);
5380 else
5381 bsi_next (&bsi);
5386 if (dump_file)
5388 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
5389 dump_constraints (dump_file);
5392 if (dump_file)
5393 fprintf (dump_file,
5394 "\nCollapsing static cycles and doing variable "
5395 "substitution\n");
5397 init_graph (VEC_length (varinfo_t, varmap) * 2);
5399 if (dump_file)
5400 fprintf (dump_file, "Building predecessor graph\n");
5401 build_pred_graph ();
5403 if (dump_file)
5404 fprintf (dump_file, "Detecting pointer and location "
5405 "equivalences\n");
5406 si = perform_var_substitution (graph);
5408 if (dump_file)
5409 fprintf (dump_file, "Rewriting constraints and unifying "
5410 "variables\n");
5411 rewrite_constraints (graph, si);
5412 free_var_substitution_info (si);
5414 build_succ_graph ();
5415 move_complex_constraints (graph);
5417 if (dump_file)
5418 fprintf (dump_file, "Uniting pointer but not location equivalent "
5419 "variables\n");
5420 unite_pointer_equivalences (graph);
5422 if (dump_file)
5423 fprintf (dump_file, "Finding indirect cycles\n");
5424 find_indirect_cycles (graph);
5426 /* Implicit nodes and predecessors are no longer necessary at this
5427 point. */
5428 remove_preds_and_fake_succs (graph);
5430 if (dump_file)
5431 fprintf (dump_file, "Solving graph\n");
5433 solve_graph (graph);
5435 compute_tbaa_pruning ();
5437 if (dump_file)
5438 dump_sa_points_to_info (dump_file);
5440 have_alias_info = true;
5442 timevar_pop (TV_TREE_PTA);
5446 /* Delete created points-to sets. */
5448 void
5449 delete_points_to_sets (void)
5451 unsigned int i;
5453 htab_delete (shared_bitmap_table);
5454 if (dump_file && (dump_flags & TDF_STATS))
5455 fprintf (dump_file, "Points to sets created:%d\n",
5456 stats.points_to_sets_created);
5458 pointer_map_destroy (vi_for_tree);
5459 bitmap_obstack_release (&pta_obstack);
5460 VEC_free (constraint_t, heap, constraints);
5462 for (i = 0; i < graph->size; i++)
5463 VEC_free (constraint_t, heap, graph->complex[i]);
5464 free (graph->complex);
5466 free (graph->rep);
5467 free (graph->succs);
5468 free (graph->pe);
5469 free (graph->pe_rep);
5470 free (graph->indirect_cycles);
5471 free (graph);
5473 VEC_free (varinfo_t, heap, varmap);
5474 free_alloc_pool (variable_info_pool);
5475 free_alloc_pool (constraint_pool);
5476 have_alias_info = false;
5479 /* Return true if we should execute IPA PTA. */
5480 static bool
5481 gate_ipa_pta (void)
5483 return (flag_unit_at_a_time != 0
5484 && flag_ipa_pta
5485 /* Don't bother doing anything if the program has errors. */
5486 && !(errorcount || sorrycount));
5489 /* Execute the driver for IPA PTA. */
5490 static unsigned int
5491 ipa_pta_execute (void)
5493 struct cgraph_node *node;
5494 struct scc_info *si;
5496 in_ipa_mode = 1;
5497 init_alias_heapvars ();
5498 init_alias_vars ();
5500 for (node = cgraph_nodes; node; node = node->next)
5502 if (!node->analyzed || cgraph_is_master_clone (node))
5504 unsigned int varid;
5506 varid = create_function_info_for (node->decl,
5507 cgraph_node_name (node));
5508 if (node->local.externally_visible)
5510 varinfo_t fi = get_varinfo (varid);
5511 for (; fi; fi = fi->next)
5512 make_constraint_from_anything (fi);
5516 for (node = cgraph_nodes; node; node = node->next)
5518 if (node->analyzed && cgraph_is_master_clone (node))
5520 struct function *cfun = DECL_STRUCT_FUNCTION (node->decl);
5521 basic_block bb;
5522 tree old_func_decl = current_function_decl;
5523 if (dump_file)
5524 fprintf (dump_file,
5525 "Generating constraints for %s\n",
5526 cgraph_node_name (node));
5527 push_cfun (cfun);
5528 current_function_decl = node->decl;
5530 FOR_EACH_BB_FN (bb, cfun)
5532 block_stmt_iterator bsi;
5533 tree phi;
5535 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
5537 if (is_gimple_reg (PHI_RESULT (phi)))
5539 find_func_aliases (phi);
5543 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
5545 tree stmt = bsi_stmt (bsi);
5546 find_func_aliases (stmt);
5549 current_function_decl = old_func_decl;
5550 pop_cfun ();
5552 else
5554 /* Make point to anything. */
5558 if (dump_file)
5560 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
5561 dump_constraints (dump_file);
5564 if (dump_file)
5565 fprintf (dump_file,
5566 "\nCollapsing static cycles and doing variable "
5567 "substitution:\n");
5569 init_graph (VEC_length (varinfo_t, varmap) * 2);
5570 build_pred_graph ();
5571 si = perform_var_substitution (graph);
5572 rewrite_constraints (graph, si);
5573 free_var_substitution_info (si);
5575 build_succ_graph ();
5576 move_complex_constraints (graph);
5577 unite_pointer_equivalences (graph);
5578 find_indirect_cycles (graph);
5580 /* Implicit nodes and predecessors are no longer necessary at this
5581 point. */
5582 remove_preds_and_fake_succs (graph);
5584 if (dump_file)
5585 fprintf (dump_file, "\nSolving graph\n");
5587 solve_graph (graph);
5589 if (dump_file)
5590 dump_sa_points_to_info (dump_file);
5592 in_ipa_mode = 0;
5593 delete_alias_heapvars ();
5594 delete_points_to_sets ();
5595 return 0;
5598 struct tree_opt_pass pass_ipa_pta =
5600 "pta", /* name */
5601 gate_ipa_pta, /* gate */
5602 ipa_pta_execute, /* execute */
5603 NULL, /* sub */
5604 NULL, /* next */
5605 0, /* static_pass_number */
5606 TV_IPA_PTA, /* tv_id */
5607 0, /* properties_required */
5608 0, /* properties_provided */
5609 0, /* properties_destroyed */
5610 0, /* todo_flags_start */
5611 TODO_update_ssa, /* todo_flags_finish */
5612 0 /* letter */
5615 /* Initialize the heapvar for statement mapping. */
5616 void
5617 init_alias_heapvars (void)
5619 if (!heapvar_for_stmt)
5620 heapvar_for_stmt = htab_create_ggc (11, tree_map_hash, tree_map_eq,
5621 NULL);
5624 void
5625 delete_alias_heapvars (void)
5627 htab_delete (heapvar_for_stmt);
5628 heapvar_for_stmt = NULL;
5632 #include "gt-tree-ssa-structalias.h"