1 /* Tree based points-to analysis
2 Copyright (C) 2005, 2006, 2007 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
35 #include "diagnostic.h"
38 #include "tree-flow.h"
39 #include "tree-inline.h"
42 #include "tree-gimple.h"
46 #include "tree-pass.h"
48 #include "alloc-pool.h"
49 #include "splay-tree.h"
51 #include "tree-ssa-structalias.h"
54 #include "pointer-set.h"
56 /* The idea behind this analyzer is to generate set constraints from the
57 program, then solve the resulting constraints in order to generate the
60 Set constraints are a way of modeling program analysis problems that
61 involve sets. They consist of an inclusion constraint language,
62 describing the variables (each variable is a set) and operations that
63 are involved on the variables, and a set of rules that derive facts
64 from these operations. To solve a system of set constraints, you derive
65 all possible facts under the rules, which gives you the correct sets
68 See "Efficient Field-sensitive pointer analysis for C" by "David
69 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
70 http://citeseer.ist.psu.edu/pearce04efficient.html
72 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
73 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
74 http://citeseer.ist.psu.edu/heintze01ultrafast.html
76 There are three types of real constraint expressions, DEREF,
77 ADDRESSOF, and SCALAR. Each constraint expression consists
78 of a constraint type, a variable, and an offset.
80 SCALAR is a constraint expression type used to represent x, whether
81 it appears on the LHS or the RHS of a statement.
82 DEREF is a constraint expression type used to represent *x, whether
83 it appears on the LHS or the RHS of a statement.
84 ADDRESSOF is a constraint expression used to represent &x, whether
85 it appears on the LHS or the RHS of a statement.
87 Each pointer variable in the program is assigned an integer id, and
88 each field of a structure variable is assigned an integer id as well.
90 Structure variables are linked to their list of fields through a "next
91 field" in each variable that points to the next field in offset
93 Each variable for a structure field has
95 1. "size", that tells the size in bits of that field.
96 2. "fullsize, that tells the size in bits of the entire structure.
97 3. "offset", that tells the offset in bits from the beginning of the
98 structure to this field.
110 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
111 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
112 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
115 In order to solve the system of set constraints, the following is
118 1. Each constraint variable x has a solution set associated with it,
121 2. Constraints are separated into direct, copy, and complex.
122 Direct constraints are ADDRESSOF constraints that require no extra
123 processing, such as P = &Q
124 Copy constraints are those of the form P = Q.
125 Complex constraints are all the constraints involving dereferences
126 and offsets (including offsetted copies).
128 3. All direct constraints of the form P = &Q are processed, such
129 that Q is added to Sol(P)
131 4. All complex constraints for a given constraint variable are stored in a
132 linked list attached to that variable's node.
134 5. A directed graph is built out of the copy constraints. Each
135 constraint variable is a node in the graph, and an edge from
136 Q to P is added for each copy constraint of the form P = Q
138 6. The graph is then walked, and solution sets are
139 propagated along the copy edges, such that an edge from Q to P
140 causes Sol(P) <- Sol(P) union Sol(Q).
142 7. As we visit each node, all complex constraints associated with
143 that node are processed by adding appropriate copy edges to the graph, or the
144 appropriate variables to the solution set.
146 8. The process of walking the graph is iterated until no solution
149 Prior to walking the graph in steps 6 and 7, We perform static
150 cycle elimination on the constraint graph, as well
151 as off-line variable substitution.
153 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
154 on and turned into anything), but isn't. You can just see what offset
155 inside the pointed-to struct it's going to access.
157 TODO: Constant bounded arrays can be handled as if they were structs of the
158 same number of elements.
160 TODO: Modeling heap and incoming pointers becomes much better if we
161 add fields to them as we discover them, which we could do.
163 TODO: We could handle unions, but to be honest, it's probably not
164 worth the pain or slowdown. */
166 static GTY ((if_marked ("tree_map_marked_p"), param_is (struct tree_map
)))
167 htab_t heapvar_for_stmt
;
169 static bool use_field_sensitive
= true;
170 static int in_ipa_mode
= 0;
172 /* Used for predecessor bitmaps. */
173 static bitmap_obstack predbitmap_obstack
;
175 /* Used for points-to sets. */
176 static bitmap_obstack pta_obstack
;
178 /* Used for oldsolution members of variables. */
179 static bitmap_obstack oldpta_obstack
;
181 /* Used for per-solver-iteration bitmaps. */
182 static bitmap_obstack iteration_obstack
;
184 static unsigned int create_variable_info_for (tree
, const char *);
185 typedef struct constraint_graph
*constraint_graph_t
;
186 static void unify_nodes (constraint_graph_t
, unsigned int, unsigned int, bool);
188 DEF_VEC_P(constraint_t
);
189 DEF_VEC_ALLOC_P(constraint_t
,heap
);
191 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
193 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
195 static struct constraint_stats
197 unsigned int total_vars
;
198 unsigned int nonpointer_vars
;
199 unsigned int unified_vars_static
;
200 unsigned int unified_vars_dynamic
;
201 unsigned int iterations
;
202 unsigned int num_edges
;
203 unsigned int num_implicit_edges
;
204 unsigned int points_to_sets_created
;
209 /* ID of this variable */
212 /* Name of this variable */
215 /* Tree that this variable is associated with. */
218 /* Offset of this variable, in bits, from the base variable */
219 unsigned HOST_WIDE_INT offset
;
221 /* Size of the variable, in bits. */
222 unsigned HOST_WIDE_INT size
;
224 /* Full size of the base variable, in bits. */
225 unsigned HOST_WIDE_INT fullsize
;
227 /* A link to the variable for the next field in this structure. */
228 struct variable_info
*next
;
230 /* True if the variable is directly the target of a dereference.
231 This is used to track which variables are *actually* dereferenced
232 so we can prune their points to listed. */
233 unsigned int directly_dereferenced
:1;
235 /* True if this is a variable created by the constraint analysis, such as
236 heap variables and constraints we had to break up. */
237 unsigned int is_artificial_var
:1;
239 /* True if this is a special variable whose solution set should not be
241 unsigned int is_special_var
:1;
243 /* True for variables whose size is not known or variable. */
244 unsigned int is_unknown_size_var
:1;
246 /* True for variables that have unions somewhere in them. */
247 unsigned int has_union
:1;
249 /* True if this is a heap variable. */
250 unsigned int is_heap_var
:1;
252 /* True if we may not use TBAA to prune references to this
253 variable. This is used for C++ placement new. */
254 unsigned int no_tbaa_pruning
: 1;
256 /* True if this variable is inside a structure nested in the
257 structure for the base variable. For instance, in
258 struct X { int a; struct Y { int b; int c; } }, the variables for
259 fields 'b' and 'c' are inside a nested structure. We are not
260 interested in tracking how many levels of nesting, just whether
261 there is nesting at all. This is later used to adjust offsets
262 for pointers pointing into sub-structures. */
263 unsigned int in_nested_struct
: 1;
265 /* Points-to set for this variable. */
268 /* Old points-to set for this variable. */
271 /* Variable id this was collapsed to due to type unsafety. This
272 should be unused completely after build_succ_graph, or something
274 struct variable_info
*collapsed_to
;
276 typedef struct variable_info
*varinfo_t
;
278 static varinfo_t
first_vi_for_offset (varinfo_t
, unsigned HOST_WIDE_INT
);
280 /* Pool of variable info structures. */
281 static alloc_pool variable_info_pool
;
283 DEF_VEC_P(varinfo_t
);
285 DEF_VEC_ALLOC_P(varinfo_t
, heap
);
287 /* Table of variable info structures for constraint variables.
288 Indexed directly by variable info id. */
289 static VEC(varinfo_t
,heap
) *varmap
;
291 /* Return the varmap element N */
293 static inline varinfo_t
294 get_varinfo (unsigned int n
)
296 return VEC_index (varinfo_t
, varmap
, n
);
299 /* Return the varmap element N, following the collapsed_to link. */
301 static inline varinfo_t
302 get_varinfo_fc (unsigned int n
)
304 varinfo_t v
= VEC_index (varinfo_t
, varmap
, n
);
307 return v
->collapsed_to
;
311 /* Variable that represents the unknown pointer. */
312 static varinfo_t var_anything
;
313 static tree anything_tree
;
314 static unsigned int anything_id
;
316 /* Variable that represents the NULL pointer. */
317 static varinfo_t var_nothing
;
318 static tree nothing_tree
;
319 static unsigned int nothing_id
;
321 /* Variable that represents read only memory. */
322 static varinfo_t var_readonly
;
323 static tree readonly_tree
;
324 static unsigned int readonly_id
;
326 /* Variable that represents integers. This is used for when people do things
328 static varinfo_t var_integer
;
329 static tree integer_tree
;
330 static unsigned int integer_id
;
332 /* Lookup a heap var for FROM, and return it if we find one. */
335 heapvar_lookup (tree from
)
337 struct tree_map
*h
, in
;
340 h
= (struct tree_map
*) htab_find_with_hash (heapvar_for_stmt
, &in
,
341 htab_hash_pointer (from
));
347 /* Insert a mapping FROM->TO in the heap var for statement
351 heapvar_insert (tree from
, tree to
)
356 h
= GGC_NEW (struct tree_map
);
357 h
->hash
= htab_hash_pointer (from
);
360 loc
= htab_find_slot_with_hash (heapvar_for_stmt
, h
, h
->hash
, INSERT
);
361 *(struct tree_map
**) loc
= h
;
364 /* Return a new variable info structure consisting for a variable
365 named NAME, and using constraint graph node NODE. */
368 new_var_info (tree t
, unsigned int id
, const char *name
)
370 varinfo_t ret
= (varinfo_t
) pool_alloc (variable_info_pool
);
376 ret
->directly_dereferenced
= false;
377 ret
->is_artificial_var
= false;
378 ret
->is_heap_var
= false;
379 ret
->is_special_var
= false;
380 ret
->is_unknown_size_var
= false;
381 ret
->has_union
= false;
383 if (TREE_CODE (var
) == SSA_NAME
)
384 var
= SSA_NAME_VAR (var
);
385 ret
->no_tbaa_pruning
= (DECL_P (var
)
386 && POINTER_TYPE_P (TREE_TYPE (var
))
387 && DECL_NO_TBAA_P (var
));
388 ret
->solution
= BITMAP_ALLOC (&pta_obstack
);
389 ret
->oldsolution
= BITMAP_ALLOC (&oldpta_obstack
);
391 ret
->collapsed_to
= NULL
;
395 typedef enum {SCALAR
, DEREF
, ADDRESSOF
} constraint_expr_type
;
397 /* An expression that appears in a constraint. */
399 struct constraint_expr
401 /* Constraint type. */
402 constraint_expr_type type
;
404 /* Variable we are referring to in the constraint. */
407 /* Offset, in bits, of this constraint from the beginning of
408 variables it ends up referring to.
410 IOW, in a deref constraint, we would deref, get the result set,
411 then add OFFSET to each member. */
412 unsigned HOST_WIDE_INT offset
;
415 typedef struct constraint_expr ce_s
;
417 DEF_VEC_ALLOC_O(ce_s
, heap
);
418 static void get_constraint_for (tree
, VEC(ce_s
, heap
) **);
419 static void do_deref (VEC (ce_s
, heap
) **);
421 /* Our set constraints are made up of two constraint expressions, one
424 As described in the introduction, our set constraints each represent an
425 operation between set valued variables.
429 struct constraint_expr lhs
;
430 struct constraint_expr rhs
;
433 /* List of constraints that we use to build the constraint graph from. */
435 static VEC(constraint_t
,heap
) *constraints
;
436 static alloc_pool constraint_pool
;
440 DEF_VEC_ALLOC_I(int, heap
);
442 /* The constraint graph is represented as an array of bitmaps
443 containing successor nodes. */
445 struct constraint_graph
447 /* Size of this graph, which may be different than the number of
448 nodes in the variable map. */
451 /* Explicit successors of each node. */
454 /* Implicit predecessors of each node (Used for variable
456 bitmap
*implicit_preds
;
458 /* Explicit predecessors of each node (Used for variable substitution). */
461 /* Indirect cycle representatives, or -1 if the node has no indirect
463 int *indirect_cycles
;
465 /* Representative node for a node. rep[a] == a unless the node has
469 /* Equivalence class representative for a label. This is used for
470 variable substitution. */
473 /* Pointer equivalence label for a node. All nodes with the same
474 pointer equivalence label can be unified together at some point
475 (either during constraint optimization or after the constraint
479 /* Pointer equivalence representative for a label. This is used to
480 handle nodes that are pointer equivalent but not location
481 equivalent. We can unite these once the addressof constraints
482 are transformed into initial points-to sets. */
485 /* Pointer equivalence label for each node, used during variable
487 unsigned int *pointer_label
;
489 /* Location equivalence label for each node, used during location
490 equivalence finding. */
491 unsigned int *loc_label
;
493 /* Pointed-by set for each node, used during location equivalence
494 finding. This is pointed-by rather than pointed-to, because it
495 is constructed using the predecessor graph. */
498 /* Points to sets for pointer equivalence. This is *not* the actual
499 points-to sets for nodes. */
502 /* Bitmap of nodes where the bit is set if the node is a direct
503 node. Used for variable substitution. */
504 sbitmap direct_nodes
;
506 /* Bitmap of nodes where the bit is set if the node is address
507 taken. Used for variable substitution. */
508 bitmap address_taken
;
510 /* True if points_to bitmap for this node is stored in the hash
514 /* Number of incoming edges remaining to be processed by pointer
516 Used for variable substitution. */
517 unsigned int *number_incoming
;
520 /* Vector of complex constraints for each graph node. Complex
521 constraints are those involving dereferences or offsets that are
523 VEC(constraint_t
,heap
) **complex;
526 static constraint_graph_t graph
;
528 /* During variable substitution and the offline version of indirect
529 cycle finding, we create nodes to represent dereferences and
530 address taken constraints. These represent where these start and
532 #define FIRST_REF_NODE (VEC_length (varinfo_t, varmap))
533 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
535 /* Return the representative node for NODE, if NODE has been unioned
537 This function performs path compression along the way to finding
538 the representative. */
541 find (unsigned int node
)
543 gcc_assert (node
< graph
->size
);
544 if (graph
->rep
[node
] != node
)
545 return graph
->rep
[node
] = find (graph
->rep
[node
]);
549 /* Union the TO and FROM nodes to the TO nodes.
550 Note that at some point in the future, we may want to do
551 union-by-rank, in which case we are going to have to return the
552 node we unified to. */
555 unite (unsigned int to
, unsigned int from
)
557 gcc_assert (to
< graph
->size
&& from
< graph
->size
);
558 if (to
!= from
&& graph
->rep
[from
] != to
)
560 graph
->rep
[from
] = to
;
566 /* Create a new constraint consisting of LHS and RHS expressions. */
569 new_constraint (const struct constraint_expr lhs
,
570 const struct constraint_expr rhs
)
572 constraint_t ret
= (constraint_t
) pool_alloc (constraint_pool
);
578 /* Print out constraint C to FILE. */
581 dump_constraint (FILE *file
, constraint_t c
)
583 if (c
->lhs
.type
== ADDRESSOF
)
585 else if (c
->lhs
.type
== DEREF
)
587 fprintf (file
, "%s", get_varinfo_fc (c
->lhs
.var
)->name
);
588 if (c
->lhs
.offset
!= 0)
589 fprintf (file
, " + " HOST_WIDE_INT_PRINT_DEC
, c
->lhs
.offset
);
590 fprintf (file
, " = ");
591 if (c
->rhs
.type
== ADDRESSOF
)
593 else if (c
->rhs
.type
== DEREF
)
595 fprintf (file
, "%s", get_varinfo_fc (c
->rhs
.var
)->name
);
596 if (c
->rhs
.offset
!= 0)
597 fprintf (file
, " + " HOST_WIDE_INT_PRINT_DEC
, c
->rhs
.offset
);
598 fprintf (file
, "\n");
601 /* Print out constraint C to stderr. */
604 debug_constraint (constraint_t c
)
606 dump_constraint (stderr
, c
);
609 /* Print out all constraints to FILE */
612 dump_constraints (FILE *file
)
616 for (i
= 0; VEC_iterate (constraint_t
, constraints
, i
, c
); i
++)
617 dump_constraint (file
, c
);
620 /* Print out all constraints to stderr. */
623 debug_constraints (void)
625 dump_constraints (stderr
);
630 The solver is a simple worklist solver, that works on the following
633 sbitmap changed_nodes = all zeroes;
635 For each node that is not already collapsed:
637 set bit in changed nodes
639 while (changed_count > 0)
641 compute topological ordering for constraint graph
643 find and collapse cycles in the constraint graph (updating
644 changed if necessary)
646 for each node (n) in the graph in topological order:
649 Process each complex constraint associated with the node,
650 updating changed if necessary.
652 For each outgoing edge from n, propagate the solution from n to
653 the destination of the edge, updating changed as necessary.
657 /* Return true if two constraint expressions A and B are equal. */
660 constraint_expr_equal (struct constraint_expr a
, struct constraint_expr b
)
662 return a
.type
== b
.type
&& a
.var
== b
.var
&& a
.offset
== b
.offset
;
665 /* Return true if constraint expression A is less than constraint expression
666 B. This is just arbitrary, but consistent, in order to give them an
670 constraint_expr_less (struct constraint_expr a
, struct constraint_expr b
)
672 if (a
.type
== b
.type
)
675 return a
.offset
< b
.offset
;
677 return a
.var
< b
.var
;
680 return a
.type
< b
.type
;
683 /* Return true if constraint A is less than constraint B. This is just
684 arbitrary, but consistent, in order to give them an ordering. */
687 constraint_less (const constraint_t a
, const constraint_t b
)
689 if (constraint_expr_less (a
->lhs
, b
->lhs
))
691 else if (constraint_expr_less (b
->lhs
, a
->lhs
))
694 return constraint_expr_less (a
->rhs
, b
->rhs
);
697 /* Return true if two constraints A and B are equal. */
700 constraint_equal (struct constraint a
, struct constraint b
)
702 return constraint_expr_equal (a
.lhs
, b
.lhs
)
703 && constraint_expr_equal (a
.rhs
, b
.rhs
);
707 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
710 constraint_vec_find (VEC(constraint_t
,heap
) *vec
,
711 struct constraint lookfor
)
719 place
= VEC_lower_bound (constraint_t
, vec
, &lookfor
, constraint_less
);
720 if (place
>= VEC_length (constraint_t
, vec
))
722 found
= VEC_index (constraint_t
, vec
, place
);
723 if (!constraint_equal (*found
, lookfor
))
728 /* Union two constraint vectors, TO and FROM. Put the result in TO. */
731 constraint_set_union (VEC(constraint_t
,heap
) **to
,
732 VEC(constraint_t
,heap
) **from
)
737 for (i
= 0; VEC_iterate (constraint_t
, *from
, i
, c
); i
++)
739 if (constraint_vec_find (*to
, *c
) == NULL
)
741 unsigned int place
= VEC_lower_bound (constraint_t
, *to
, c
,
743 VEC_safe_insert (constraint_t
, heap
, *to
, place
, c
);
748 /* Take a solution set SET, add OFFSET to each member of the set, and
749 overwrite SET with the result when done. */
752 solution_set_add (bitmap set
, unsigned HOST_WIDE_INT offset
)
754 bitmap result
= BITMAP_ALLOC (&iteration_obstack
);
758 EXECUTE_IF_SET_IN_BITMAP (set
, 0, i
, bi
)
760 /* If this is a properly sized variable, only add offset if it's
761 less than end. Otherwise, it is globbed to a single
764 if ((get_varinfo (i
)->offset
+ offset
) < get_varinfo (i
)->fullsize
)
766 unsigned HOST_WIDE_INT fieldoffset
= get_varinfo (i
)->offset
+ offset
;
767 varinfo_t v
= first_vi_for_offset (get_varinfo (i
), fieldoffset
);
770 bitmap_set_bit (result
, v
->id
);
772 else if (get_varinfo (i
)->is_artificial_var
773 || get_varinfo (i
)->has_union
774 || get_varinfo (i
)->is_unknown_size_var
)
776 bitmap_set_bit (result
, i
);
780 bitmap_copy (set
, result
);
781 BITMAP_FREE (result
);
784 /* Union solution sets TO and FROM, and add INC to each member of FROM in the
788 set_union_with_increment (bitmap to
, bitmap from
, unsigned HOST_WIDE_INT inc
)
791 return bitmap_ior_into (to
, from
);
797 tmp
= BITMAP_ALLOC (&iteration_obstack
);
798 bitmap_copy (tmp
, from
);
799 solution_set_add (tmp
, inc
);
800 res
= bitmap_ior_into (to
, tmp
);
806 /* Insert constraint C into the list of complex constraints for graph
810 insert_into_complex (constraint_graph_t graph
,
811 unsigned int var
, constraint_t c
)
813 VEC (constraint_t
, heap
) *complex = graph
->complex[var
];
814 unsigned int place
= VEC_lower_bound (constraint_t
, complex, c
,
817 /* Only insert constraints that do not already exist. */
818 if (place
>= VEC_length (constraint_t
, complex)
819 || !constraint_equal (*c
, *VEC_index (constraint_t
, complex, place
)))
820 VEC_safe_insert (constraint_t
, heap
, graph
->complex[var
], place
, c
);
824 /* Condense two variable nodes into a single variable node, by moving
825 all associated info from SRC to TO. */
828 merge_node_constraints (constraint_graph_t graph
, unsigned int to
,
834 gcc_assert (find (from
) == to
);
836 /* Move all complex constraints from src node into to node */
837 for (i
= 0; VEC_iterate (constraint_t
, graph
->complex[from
], i
, c
); i
++)
839 /* In complex constraints for node src, we may have either
840 a = *src, and *src = a, or an offseted constraint which are
841 always added to the rhs node's constraints. */
843 if (c
->rhs
.type
== DEREF
)
845 else if (c
->lhs
.type
== DEREF
)
850 constraint_set_union (&graph
->complex[to
], &graph
->complex[from
]);
851 VEC_free (constraint_t
, heap
, graph
->complex[from
]);
852 graph
->complex[from
] = NULL
;
856 /* Remove edges involving NODE from GRAPH. */
859 clear_edges_for_node (constraint_graph_t graph
, unsigned int node
)
861 if (graph
->succs
[node
])
862 BITMAP_FREE (graph
->succs
[node
]);
865 /* Merge GRAPH nodes FROM and TO into node TO. */
868 merge_graph_nodes (constraint_graph_t graph
, unsigned int to
,
871 if (graph
->indirect_cycles
[from
] != -1)
873 /* If we have indirect cycles with the from node, and we have
874 none on the to node, the to node has indirect cycles from the
875 from node now that they are unified.
876 If indirect cycles exist on both, unify the nodes that they
877 are in a cycle with, since we know they are in a cycle with
879 if (graph
->indirect_cycles
[to
] == -1)
880 graph
->indirect_cycles
[to
] = graph
->indirect_cycles
[from
];
883 /* Merge all the successor edges. */
884 if (graph
->succs
[from
])
886 if (!graph
->succs
[to
])
887 graph
->succs
[to
] = BITMAP_ALLOC (&pta_obstack
);
888 bitmap_ior_into (graph
->succs
[to
],
892 clear_edges_for_node (graph
, from
);
896 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
897 it doesn't exist in the graph already. */
900 add_implicit_graph_edge (constraint_graph_t graph
, unsigned int to
,
906 if (!graph
->implicit_preds
[to
])
907 graph
->implicit_preds
[to
] = BITMAP_ALLOC (&predbitmap_obstack
);
909 if (!bitmap_bit_p (graph
->implicit_preds
[to
], from
))
911 stats
.num_implicit_edges
++;
912 bitmap_set_bit (graph
->implicit_preds
[to
], from
);
916 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
917 it doesn't exist in the graph already.
918 Return false if the edge already existed, true otherwise. */
921 add_pred_graph_edge (constraint_graph_t graph
, unsigned int to
,
924 if (!graph
->preds
[to
])
925 graph
->preds
[to
] = BITMAP_ALLOC (&predbitmap_obstack
);
926 if (!bitmap_bit_p (graph
->preds
[to
], from
))
927 bitmap_set_bit (graph
->preds
[to
], from
);
930 /* Add a graph edge to GRAPH, going from FROM to TO if
931 it doesn't exist in the graph already.
932 Return false if the edge already existed, true otherwise. */
935 add_graph_edge (constraint_graph_t graph
, unsigned int to
,
946 if (!graph
->succs
[from
])
947 graph
->succs
[from
] = BITMAP_ALLOC (&pta_obstack
);
948 if (!bitmap_bit_p (graph
->succs
[from
], to
))
951 if (to
< FIRST_REF_NODE
&& from
< FIRST_REF_NODE
)
953 bitmap_set_bit (graph
->succs
[from
], to
);
960 /* Return true if {DEST.SRC} is an existing graph edge in GRAPH. */
963 valid_graph_edge (constraint_graph_t graph
, unsigned int src
,
966 return (graph
->succs
[dest
]
967 && bitmap_bit_p (graph
->succs
[dest
], src
));
970 /* Initialize the constraint graph structure to contain SIZE nodes. */
973 init_graph (unsigned int size
)
977 graph
= XCNEW (struct constraint_graph
);
979 graph
->succs
= XCNEWVEC (bitmap
, graph
->size
);
980 graph
->indirect_cycles
= XNEWVEC (int, graph
->size
);
981 graph
->rep
= XNEWVEC (unsigned int, graph
->size
);
982 graph
->complex = XCNEWVEC (VEC(constraint_t
, heap
) *, size
);
983 graph
->pe
= XCNEWVEC (unsigned int, graph
->size
);
984 graph
->pe_rep
= XNEWVEC (int, graph
->size
);
986 for (j
= 0; j
< graph
->size
; j
++)
989 graph
->pe_rep
[j
] = -1;
990 graph
->indirect_cycles
[j
] = -1;
994 /* Build the constraint graph, adding only predecessor edges right now. */
997 build_pred_graph (void)
1003 graph
->implicit_preds
= XCNEWVEC (bitmap
, graph
->size
);
1004 graph
->preds
= XCNEWVEC (bitmap
, graph
->size
);
1005 graph
->pointer_label
= XCNEWVEC (unsigned int, graph
->size
);
1006 graph
->loc_label
= XCNEWVEC (unsigned int, graph
->size
);
1007 graph
->pointed_by
= XCNEWVEC (bitmap
, graph
->size
);
1008 graph
->points_to
= XCNEWVEC (bitmap
, graph
->size
);
1009 graph
->eq_rep
= XNEWVEC (int, graph
->size
);
1010 graph
->direct_nodes
= sbitmap_alloc (graph
->size
);
1011 graph
->pt_used
= sbitmap_alloc (graph
->size
);
1012 graph
->address_taken
= BITMAP_ALLOC (&predbitmap_obstack
);
1013 graph
->number_incoming
= XCNEWVEC (unsigned int, graph
->size
);
1014 sbitmap_zero (graph
->direct_nodes
);
1015 sbitmap_zero (graph
->pt_used
);
1017 for (j
= 0; j
< FIRST_REF_NODE
; j
++)
1019 if (!get_varinfo (j
)->is_special_var
)
1020 SET_BIT (graph
->direct_nodes
, j
);
1023 for (j
= 0; j
< graph
->size
; j
++)
1024 graph
->eq_rep
[j
] = -1;
1026 for (j
= 0; j
< VEC_length (varinfo_t
, varmap
); j
++)
1027 graph
->indirect_cycles
[j
] = -1;
1029 for (i
= 0; VEC_iterate (constraint_t
, constraints
, i
, c
); i
++)
1031 struct constraint_expr lhs
= c
->lhs
;
1032 struct constraint_expr rhs
= c
->rhs
;
1033 unsigned int lhsvar
= get_varinfo_fc (lhs
.var
)->id
;
1034 unsigned int rhsvar
= get_varinfo_fc (rhs
.var
)->id
;
1036 if (lhs
.type
== DEREF
)
1039 if (rhs
.offset
== 0 && lhs
.offset
== 0 && rhs
.type
== SCALAR
)
1040 add_pred_graph_edge (graph
, FIRST_REF_NODE
+ lhsvar
, rhsvar
);
1042 else if (rhs
.type
== DEREF
)
1045 if (rhs
.offset
== 0 && lhs
.offset
== 0 && lhs
.type
== SCALAR
)
1046 add_pred_graph_edge (graph
, lhsvar
, FIRST_REF_NODE
+ rhsvar
);
1048 RESET_BIT (graph
->direct_nodes
, lhsvar
);
1050 else if (rhs
.type
== ADDRESSOF
)
1053 if (graph
->points_to
[lhsvar
] == NULL
)
1054 graph
->points_to
[lhsvar
] = BITMAP_ALLOC (&predbitmap_obstack
);
1055 bitmap_set_bit (graph
->points_to
[lhsvar
], rhsvar
);
1057 if (graph
->pointed_by
[rhsvar
] == NULL
)
1058 graph
->pointed_by
[rhsvar
] = BITMAP_ALLOC (&predbitmap_obstack
);
1059 bitmap_set_bit (graph
->pointed_by
[rhsvar
], lhsvar
);
1061 /* Implicitly, *x = y */
1062 add_implicit_graph_edge (graph
, FIRST_REF_NODE
+ lhsvar
, rhsvar
);
1064 RESET_BIT (graph
->direct_nodes
, rhsvar
);
1065 bitmap_set_bit (graph
->address_taken
, rhsvar
);
1067 else if (lhsvar
> anything_id
1068 && lhsvar
!= rhsvar
&& lhs
.offset
== 0 && rhs
.offset
== 0)
1071 add_pred_graph_edge (graph
, lhsvar
, rhsvar
);
1072 /* Implicitly, *x = *y */
1073 add_implicit_graph_edge (graph
, FIRST_REF_NODE
+ lhsvar
,
1074 FIRST_REF_NODE
+ rhsvar
);
1076 else if (lhs
.offset
!= 0 || rhs
.offset
!= 0)
1078 if (rhs
.offset
!= 0)
1079 RESET_BIT (graph
->direct_nodes
, lhs
.var
);
1080 else if (lhs
.offset
!= 0)
1081 RESET_BIT (graph
->direct_nodes
, rhs
.var
);
1086 /* Build the constraint graph, adding successor edges. */
1089 build_succ_graph (void)
1094 for (i
= 0; VEC_iterate (constraint_t
, constraints
, i
, c
); i
++)
1096 struct constraint_expr lhs
;
1097 struct constraint_expr rhs
;
1098 unsigned int lhsvar
;
1099 unsigned int rhsvar
;
1106 lhsvar
= find (get_varinfo_fc (lhs
.var
)->id
);
1107 rhsvar
= find (get_varinfo_fc (rhs
.var
)->id
);
1109 if (lhs
.type
== DEREF
)
1111 if (rhs
.offset
== 0 && lhs
.offset
== 0 && rhs
.type
== SCALAR
)
1112 add_graph_edge (graph
, FIRST_REF_NODE
+ lhsvar
, rhsvar
);
1114 else if (rhs
.type
== DEREF
)
1116 if (rhs
.offset
== 0 && lhs
.offset
== 0 && lhs
.type
== SCALAR
)
1117 add_graph_edge (graph
, lhsvar
, FIRST_REF_NODE
+ rhsvar
);
1119 else if (rhs
.type
== ADDRESSOF
)
1122 gcc_assert (find (get_varinfo_fc (rhs
.var
)->id
)
1123 == get_varinfo_fc (rhs
.var
)->id
);
1124 bitmap_set_bit (get_varinfo (lhsvar
)->solution
, rhsvar
);
1126 else if (lhsvar
> anything_id
1127 && lhsvar
!= rhsvar
&& lhs
.offset
== 0 && rhs
.offset
== 0)
1129 add_graph_edge (graph
, lhsvar
, rhsvar
);
1135 /* Changed variables on the last iteration. */
1136 static unsigned int changed_count
;
1137 static sbitmap changed
;
1139 DEF_VEC_I(unsigned);
1140 DEF_VEC_ALLOC_I(unsigned,heap
);
1143 /* Strongly Connected Component visitation info. */
1150 unsigned int *node_mapping
;
1152 VEC(unsigned,heap
) *scc_stack
;
1156 /* Recursive routine to find strongly connected components in GRAPH.
1157 SI is the SCC info to store the information in, and N is the id of current
1158 graph node we are processing.
1160 This is Tarjan's strongly connected component finding algorithm, as
1161 modified by Nuutila to keep only non-root nodes on the stack.
1162 The algorithm can be found in "On finding the strongly connected
1163 connected components in a directed graph" by Esko Nuutila and Eljas
1164 Soisalon-Soininen, in Information Processing Letters volume 49,
1165 number 1, pages 9-14. */
1168 scc_visit (constraint_graph_t graph
, struct scc_info
*si
, unsigned int n
)
1172 unsigned int my_dfs
;
1174 SET_BIT (si
->visited
, n
);
1175 si
->dfs
[n
] = si
->current_index
++;
1176 my_dfs
= si
->dfs
[n
];
1178 /* Visit all the successors. */
1179 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->succs
[n
], 0, i
, bi
)
1183 if (i
> LAST_REF_NODE
)
1187 if (TEST_BIT (si
->deleted
, w
))
1190 if (!TEST_BIT (si
->visited
, w
))
1191 scc_visit (graph
, si
, w
);
1193 unsigned int t
= find (w
);
1194 unsigned int nnode
= find (n
);
1195 gcc_assert (nnode
== n
);
1197 if (si
->dfs
[t
] < si
->dfs
[nnode
])
1198 si
->dfs
[n
] = si
->dfs
[t
];
1202 /* See if any components have been identified. */
1203 if (si
->dfs
[n
] == my_dfs
)
1205 if (VEC_length (unsigned, si
->scc_stack
) > 0
1206 && si
->dfs
[VEC_last (unsigned, si
->scc_stack
)] >= my_dfs
)
1208 bitmap scc
= BITMAP_ALLOC (NULL
);
1209 bool have_ref_node
= n
>= FIRST_REF_NODE
;
1210 unsigned int lowest_node
;
1213 bitmap_set_bit (scc
, n
);
1215 while (VEC_length (unsigned, si
->scc_stack
) != 0
1216 && si
->dfs
[VEC_last (unsigned, si
->scc_stack
)] >= my_dfs
)
1218 unsigned int w
= VEC_pop (unsigned, si
->scc_stack
);
1220 bitmap_set_bit (scc
, w
);
1221 if (w
>= FIRST_REF_NODE
)
1222 have_ref_node
= true;
1225 lowest_node
= bitmap_first_set_bit (scc
);
1226 gcc_assert (lowest_node
< FIRST_REF_NODE
);
1228 /* Collapse the SCC nodes into a single node, and mark the
1230 EXECUTE_IF_SET_IN_BITMAP (scc
, 0, i
, bi
)
1232 if (i
< FIRST_REF_NODE
)
1234 if (unite (lowest_node
, i
))
1235 unify_nodes (graph
, lowest_node
, i
, false);
1239 unite (lowest_node
, i
);
1240 graph
->indirect_cycles
[i
- FIRST_REF_NODE
] = lowest_node
;
1244 SET_BIT (si
->deleted
, n
);
1247 VEC_safe_push (unsigned, heap
, si
->scc_stack
, n
);
1250 /* Unify node FROM into node TO, updating the changed count if
1251 necessary when UPDATE_CHANGED is true. */
1254 unify_nodes (constraint_graph_t graph
, unsigned int to
, unsigned int from
,
1255 bool update_changed
)
1258 gcc_assert (to
!= from
&& find (to
) == to
);
1259 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1260 fprintf (dump_file
, "Unifying %s to %s\n",
1261 get_varinfo (from
)->name
,
1262 get_varinfo (to
)->name
);
1265 stats
.unified_vars_dynamic
++;
1267 stats
.unified_vars_static
++;
1269 merge_graph_nodes (graph
, to
, from
);
1270 merge_node_constraints (graph
, to
, from
);
1272 if (get_varinfo (from
)->no_tbaa_pruning
)
1273 get_varinfo (to
)->no_tbaa_pruning
= true;
1275 /* Mark TO as changed if FROM was changed. If TO was already marked
1276 as changed, decrease the changed count. */
1278 if (update_changed
&& TEST_BIT (changed
, from
))
1280 RESET_BIT (changed
, from
);
1281 if (!TEST_BIT (changed
, to
))
1282 SET_BIT (changed
, to
);
1285 gcc_assert (changed_count
> 0);
1289 if (get_varinfo (from
)->solution
)
1291 /* If the solution changes because of the merging, we need to mark
1292 the variable as changed. */
1293 if (bitmap_ior_into (get_varinfo (to
)->solution
,
1294 get_varinfo (from
)->solution
))
1296 if (update_changed
&& !TEST_BIT (changed
, to
))
1298 SET_BIT (changed
, to
);
1303 BITMAP_FREE (get_varinfo (from
)->solution
);
1304 BITMAP_FREE (get_varinfo (from
)->oldsolution
);
1306 if (stats
.iterations
> 0)
1308 BITMAP_FREE (get_varinfo (to
)->oldsolution
);
1309 get_varinfo (to
)->oldsolution
= BITMAP_ALLOC (&oldpta_obstack
);
1312 if (valid_graph_edge (graph
, to
, to
))
1314 if (graph
->succs
[to
])
1315 bitmap_clear_bit (graph
->succs
[to
], to
);
1319 /* Information needed to compute the topological ordering of a graph. */
1323 /* sbitmap of visited nodes. */
1325 /* Array that stores the topological order of the graph, *in
1327 VEC(unsigned,heap
) *topo_order
;
1331 /* Initialize and return a topological info structure. */
1333 static struct topo_info
*
1334 init_topo_info (void)
1336 size_t size
= graph
->size
;
1337 struct topo_info
*ti
= XNEW (struct topo_info
);
1338 ti
->visited
= sbitmap_alloc (size
);
1339 sbitmap_zero (ti
->visited
);
1340 ti
->topo_order
= VEC_alloc (unsigned, heap
, 1);
1345 /* Free the topological sort info pointed to by TI. */
1348 free_topo_info (struct topo_info
*ti
)
1350 sbitmap_free (ti
->visited
);
1351 VEC_free (unsigned, heap
, ti
->topo_order
);
1355 /* Visit the graph in topological order, and store the order in the
1356 topo_info structure. */
1359 topo_visit (constraint_graph_t graph
, struct topo_info
*ti
,
1365 SET_BIT (ti
->visited
, n
);
1367 if (graph
->succs
[n
])
1368 EXECUTE_IF_SET_IN_BITMAP (graph
->succs
[n
], 0, j
, bi
)
1370 if (!TEST_BIT (ti
->visited
, j
))
1371 topo_visit (graph
, ti
, j
);
1374 VEC_safe_push (unsigned, heap
, ti
->topo_order
, n
);
1377 /* Return true if variable N + OFFSET is a legal field of N. */
1380 type_safe (unsigned int n
, unsigned HOST_WIDE_INT
*offset
)
1382 varinfo_t ninfo
= get_varinfo (n
);
1384 /* For things we've globbed to single variables, any offset into the
1385 variable acts like the entire variable, so that it becomes offset
1387 if (ninfo
->is_special_var
1388 || ninfo
->is_artificial_var
1389 || ninfo
->is_unknown_size_var
)
1394 return (get_varinfo (n
)->offset
+ *offset
) < get_varinfo (n
)->fullsize
;
1397 /* Process a constraint C that represents x = *y, using DELTA as the
1398 starting solution. */
1401 do_sd_constraint (constraint_graph_t graph
, constraint_t c
,
1404 unsigned int lhs
= c
->lhs
.var
;
1406 bitmap sol
= get_varinfo (lhs
)->solution
;
1410 if (bitmap_bit_p (delta
, anything_id
))
1412 flag
= !bitmap_bit_p (sol
, anything_id
);
1414 bitmap_set_bit (sol
, anything_id
);
1417 /* For each variable j in delta (Sol(y)), add
1418 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1419 EXECUTE_IF_SET_IN_BITMAP (delta
, 0, j
, bi
)
1421 unsigned HOST_WIDE_INT roffset
= c
->rhs
.offset
;
1422 if (type_safe (j
, &roffset
))
1425 unsigned HOST_WIDE_INT fieldoffset
= get_varinfo (j
)->offset
+ roffset
;
1428 v
= first_vi_for_offset (get_varinfo (j
), fieldoffset
);
1433 /* Adding edges from the special vars is pointless.
1434 They don't have sets that can change. */
1435 if (get_varinfo (t
) ->is_special_var
)
1436 flag
|= bitmap_ior_into (sol
, get_varinfo (t
)->solution
);
1437 else if (add_graph_edge (graph
, lhs
, t
))
1438 flag
|= bitmap_ior_into (sol
, get_varinfo (t
)->solution
);
1440 else if (0 && dump_file
&& !(get_varinfo (j
)->is_special_var
))
1441 fprintf (dump_file
, "Untypesafe usage in do_sd_constraint\n");
1446 /* If the LHS solution changed, mark the var as changed. */
1449 get_varinfo (lhs
)->solution
= sol
;
1450 if (!TEST_BIT (changed
, lhs
))
1452 SET_BIT (changed
, lhs
);
1458 /* Process a constraint C that represents *x = y. */
1461 do_ds_constraint (constraint_t c
, bitmap delta
)
1463 unsigned int rhs
= c
->rhs
.var
;
1464 bitmap sol
= get_varinfo (rhs
)->solution
;
1468 if (bitmap_bit_p (sol
, anything_id
))
1470 EXECUTE_IF_SET_IN_BITMAP (delta
, 0, j
, bi
)
1472 varinfo_t jvi
= get_varinfo (j
);
1474 unsigned int loff
= c
->lhs
.offset
;
1475 unsigned HOST_WIDE_INT fieldoffset
= jvi
->offset
+ loff
;
1478 v
= first_vi_for_offset (get_varinfo (j
), fieldoffset
);
1483 if (!bitmap_bit_p (get_varinfo (t
)->solution
, anything_id
))
1485 bitmap_set_bit (get_varinfo (t
)->solution
, anything_id
);
1486 if (!TEST_BIT (changed
, t
))
1488 SET_BIT (changed
, t
);
1496 /* For each member j of delta (Sol(x)), add an edge from y to j and
1497 union Sol(y) into Sol(j) */
1498 EXECUTE_IF_SET_IN_BITMAP (delta
, 0, j
, bi
)
1500 unsigned HOST_WIDE_INT loff
= c
->lhs
.offset
;
1501 if (type_safe (j
, &loff
) && !(get_varinfo (j
)->is_special_var
))
1505 unsigned HOST_WIDE_INT fieldoffset
= get_varinfo (j
)->offset
+ loff
;
1508 v
= first_vi_for_offset (get_varinfo (j
), fieldoffset
);
1512 tmp
= get_varinfo (t
)->solution
;
1514 if (set_union_with_increment (tmp
, sol
, 0))
1516 get_varinfo (t
)->solution
= tmp
;
1518 sol
= get_varinfo (rhs
)->solution
;
1519 if (!TEST_BIT (changed
, t
))
1521 SET_BIT (changed
, t
);
1526 else if (0 && dump_file
&& !(get_varinfo (j
)->is_special_var
))
1527 fprintf (dump_file
, "Untypesafe usage in do_ds_constraint\n");
1531 /* Handle a non-simple (simple meaning requires no iteration),
1532 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1535 do_complex_constraint (constraint_graph_t graph
, constraint_t c
, bitmap delta
)
1537 if (c
->lhs
.type
== DEREF
)
1539 if (c
->rhs
.type
== ADDRESSOF
)
1546 do_ds_constraint (c
, delta
);
1549 else if (c
->rhs
.type
== DEREF
)
1552 if (!(get_varinfo (c
->lhs
.var
)->is_special_var
))
1553 do_sd_constraint (graph
, c
, delta
);
1561 gcc_assert (c
->rhs
.type
== SCALAR
&& c
->lhs
.type
== SCALAR
);
1562 solution
= get_varinfo (c
->rhs
.var
)->solution
;
1563 tmp
= get_varinfo (c
->lhs
.var
)->solution
;
1565 flag
= set_union_with_increment (tmp
, solution
, c
->rhs
.offset
);
1569 get_varinfo (c
->lhs
.var
)->solution
= tmp
;
1570 if (!TEST_BIT (changed
, c
->lhs
.var
))
1572 SET_BIT (changed
, c
->lhs
.var
);
1579 /* Initialize and return a new SCC info structure. */
1581 static struct scc_info
*
1582 init_scc_info (size_t size
)
1584 struct scc_info
*si
= XNEW (struct scc_info
);
1587 si
->current_index
= 0;
1588 si
->visited
= sbitmap_alloc (size
);
1589 sbitmap_zero (si
->visited
);
1590 si
->deleted
= sbitmap_alloc (size
);
1591 sbitmap_zero (si
->deleted
);
1592 si
->node_mapping
= XNEWVEC (unsigned int, size
);
1593 si
->dfs
= XCNEWVEC (unsigned int, size
);
1595 for (i
= 0; i
< size
; i
++)
1596 si
->node_mapping
[i
] = i
;
1598 si
->scc_stack
= VEC_alloc (unsigned, heap
, 1);
1602 /* Free an SCC info structure pointed to by SI */
1605 free_scc_info (struct scc_info
*si
)
1607 sbitmap_free (si
->visited
);
1608 sbitmap_free (si
->deleted
);
1609 free (si
->node_mapping
);
1611 VEC_free (unsigned, heap
, si
->scc_stack
);
1616 /* Find indirect cycles in GRAPH that occur, using strongly connected
1617 components, and note them in the indirect cycles map.
1619 This technique comes from Ben Hardekopf and Calvin Lin,
1620 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1621 Lines of Code", submitted to PLDI 2007. */
1624 find_indirect_cycles (constraint_graph_t graph
)
1627 unsigned int size
= graph
->size
;
1628 struct scc_info
*si
= init_scc_info (size
);
1630 for (i
= 0; i
< MIN (LAST_REF_NODE
, size
); i
++ )
1631 if (!TEST_BIT (si
->visited
, i
) && find (i
) == i
)
1632 scc_visit (graph
, si
, i
);
1637 /* Compute a topological ordering for GRAPH, and store the result in the
1638 topo_info structure TI. */
1641 compute_topo_order (constraint_graph_t graph
,
1642 struct topo_info
*ti
)
1645 unsigned int size
= graph
->size
;
1647 for (i
= 0; i
!= size
; ++i
)
1648 if (!TEST_BIT (ti
->visited
, i
) && find (i
) == i
)
1649 topo_visit (graph
, ti
, i
);
1652 /* Structure used to for hash value numbering of pointer equivalence
1655 typedef struct equiv_class_label
1657 unsigned int equivalence_class
;
1660 } *equiv_class_label_t
;
1661 typedef const struct equiv_class_label
*const_equiv_class_label_t
;
1663 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1665 static htab_t pointer_equiv_class_table
;
1667 /* A hashtable for mapping a bitmap of labels->location equivalence
1669 static htab_t location_equiv_class_table
;
1671 /* Hash function for a equiv_class_label_t */
1674 equiv_class_label_hash (const void *p
)
1676 const_equiv_class_label_t
const ecl
= (const_equiv_class_label_t
) p
;
1677 return ecl
->hashcode
;
1680 /* Equality function for two equiv_class_label_t's. */
1683 equiv_class_label_eq (const void *p1
, const void *p2
)
1685 const_equiv_class_label_t
const eql1
= (const_equiv_class_label_t
) p1
;
1686 const_equiv_class_label_t
const eql2
= (const_equiv_class_label_t
) p2
;
1687 return bitmap_equal_p (eql1
->labels
, eql2
->labels
);
1690 /* Lookup a equivalence class in TABLE by the bitmap of LABELS it
1694 equiv_class_lookup (htab_t table
, bitmap labels
)
1697 struct equiv_class_label ecl
;
1699 ecl
.labels
= labels
;
1700 ecl
.hashcode
= bitmap_hash (labels
);
1702 slot
= htab_find_slot_with_hash (table
, &ecl
,
1703 ecl
.hashcode
, NO_INSERT
);
1707 return ((equiv_class_label_t
) *slot
)->equivalence_class
;
1711 /* Add an equivalence class named EQUIVALENCE_CLASS with labels LABELS
1715 equiv_class_add (htab_t table
, unsigned int equivalence_class
,
1719 equiv_class_label_t ecl
= XNEW (struct equiv_class_label
);
1721 ecl
->labels
= labels
;
1722 ecl
->equivalence_class
= equivalence_class
;
1723 ecl
->hashcode
= bitmap_hash (labels
);
1725 slot
= htab_find_slot_with_hash (table
, ecl
,
1726 ecl
->hashcode
, INSERT
);
1727 gcc_assert (!*slot
);
1728 *slot
= (void *) ecl
;
1731 /* Perform offline variable substitution.
1733 This is a worst case quadratic time way of identifying variables
1734 that must have equivalent points-to sets, including those caused by
1735 static cycles, and single entry subgraphs, in the constraint graph.
1737 The technique is described in "Exploiting Pointer and Location
1738 Equivalence to Optimize Pointer Analysis. In the 14th International
1739 Static Analysis Symposium (SAS), August 2007." It is known as the
1740 "HU" algorithm, and is equivalent to value numbering the collapsed
1741 constraint graph including evaluating unions.
1743 The general method of finding equivalence classes is as follows:
1744 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1745 Initialize all non-REF nodes to be direct nodes.
1746 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1748 For each constraint containing the dereference, we also do the same
1751 We then compute SCC's in the graph and unify nodes in the same SCC,
1754 For each non-collapsed node x:
1755 Visit all unvisited explicit incoming edges.
1756 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
1758 Lookup the equivalence class for pts(x).
1759 If we found one, equivalence_class(x) = found class.
1760 Otherwise, equivalence_class(x) = new class, and new_class is
1761 added to the lookup table.
1763 All direct nodes with the same equivalence class can be replaced
1764 with a single representative node.
1765 All unlabeled nodes (label == 0) are not pointers and all edges
1766 involving them can be eliminated.
1767 We perform these optimizations during rewrite_constraints
1769 In addition to pointer equivalence class finding, we also perform
1770 location equivalence class finding. This is the set of variables
1771 that always appear together in points-to sets. We use this to
1772 compress the size of the points-to sets. */
1774 /* Current maximum pointer equivalence class id. */
1775 static int pointer_equiv_class
;
1777 /* Current maximum location equivalence class id. */
1778 static int location_equiv_class
;
1780 /* Recursive routine to find strongly connected components in GRAPH,
1781 and label it's nodes with DFS numbers. */
1784 condense_visit (constraint_graph_t graph
, struct scc_info
*si
, unsigned int n
)
1788 unsigned int my_dfs
;
1790 gcc_assert (si
->node_mapping
[n
] == n
);
1791 SET_BIT (si
->visited
, n
);
1792 si
->dfs
[n
] = si
->current_index
++;
1793 my_dfs
= si
->dfs
[n
];
1795 /* Visit all the successors. */
1796 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->preds
[n
], 0, i
, bi
)
1798 unsigned int w
= si
->node_mapping
[i
];
1800 if (TEST_BIT (si
->deleted
, w
))
1803 if (!TEST_BIT (si
->visited
, w
))
1804 condense_visit (graph
, si
, w
);
1806 unsigned int t
= si
->node_mapping
[w
];
1807 unsigned int nnode
= si
->node_mapping
[n
];
1808 gcc_assert (nnode
== n
);
1810 if (si
->dfs
[t
] < si
->dfs
[nnode
])
1811 si
->dfs
[n
] = si
->dfs
[t
];
1815 /* Visit all the implicit predecessors. */
1816 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->implicit_preds
[n
], 0, i
, bi
)
1818 unsigned int w
= si
->node_mapping
[i
];
1820 if (TEST_BIT (si
->deleted
, w
))
1823 if (!TEST_BIT (si
->visited
, w
))
1824 condense_visit (graph
, si
, w
);
1826 unsigned int t
= si
->node_mapping
[w
];
1827 unsigned int nnode
= si
->node_mapping
[n
];
1828 gcc_assert (nnode
== n
);
1830 if (si
->dfs
[t
] < si
->dfs
[nnode
])
1831 si
->dfs
[n
] = si
->dfs
[t
];
1835 /* See if any components have been identified. */
1836 if (si
->dfs
[n
] == my_dfs
)
1838 while (VEC_length (unsigned, si
->scc_stack
) != 0
1839 && si
->dfs
[VEC_last (unsigned, si
->scc_stack
)] >= my_dfs
)
1841 unsigned int w
= VEC_pop (unsigned, si
->scc_stack
);
1842 si
->node_mapping
[w
] = n
;
1844 if (!TEST_BIT (graph
->direct_nodes
, w
))
1845 RESET_BIT (graph
->direct_nodes
, n
);
1847 /* Unify our nodes. */
1848 if (graph
->preds
[w
])
1850 if (!graph
->preds
[n
])
1851 graph
->preds
[n
] = BITMAP_ALLOC (&predbitmap_obstack
);
1852 bitmap_ior_into (graph
->preds
[n
], graph
->preds
[w
]);
1854 if (graph
->implicit_preds
[w
])
1856 if (!graph
->implicit_preds
[n
])
1857 graph
->implicit_preds
[n
] = BITMAP_ALLOC (&predbitmap_obstack
);
1858 bitmap_ior_into (graph
->implicit_preds
[n
],
1859 graph
->implicit_preds
[w
]);
1861 if (graph
->points_to
[w
])
1863 if (!graph
->points_to
[n
])
1864 graph
->points_to
[n
] = BITMAP_ALLOC (&predbitmap_obstack
);
1865 bitmap_ior_into (graph
->points_to
[n
],
1866 graph
->points_to
[w
]);
1868 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->preds
[n
], 0, i
, bi
)
1870 unsigned int rep
= si
->node_mapping
[i
];
1871 graph
->number_incoming
[rep
]++;
1874 SET_BIT (si
->deleted
, n
);
1877 VEC_safe_push (unsigned, heap
, si
->scc_stack
, n
);
1880 /* Label pointer equivalences. */
1883 label_visit (constraint_graph_t graph
, struct scc_info
*si
, unsigned int n
)
1887 SET_BIT (si
->visited
, n
);
1889 if (!graph
->points_to
[n
])
1890 graph
->points_to
[n
] = BITMAP_ALLOC (&predbitmap_obstack
);
1892 /* Label and union our incoming edges's points to sets. */
1893 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->preds
[n
], 0, i
, bi
)
1895 unsigned int w
= si
->node_mapping
[i
];
1896 if (!TEST_BIT (si
->visited
, w
))
1897 label_visit (graph
, si
, w
);
1899 /* Skip unused edges */
1900 if (w
== n
|| graph
->pointer_label
[w
] == 0)
1902 graph
->number_incoming
[w
]--;
1905 if (graph
->points_to
[w
])
1906 bitmap_ior_into(graph
->points_to
[n
], graph
->points_to
[w
]);
1908 /* If all incoming edges to w have been processed and
1909 graph->points_to[w] was not stored in the hash table, we can
1911 graph
->number_incoming
[w
]--;
1912 if (!graph
->number_incoming
[w
] && !TEST_BIT (graph
->pt_used
, w
))
1914 BITMAP_FREE (graph
->points_to
[w
]);
1917 /* Indirect nodes get fresh variables. */
1918 if (!TEST_BIT (graph
->direct_nodes
, n
))
1919 bitmap_set_bit (graph
->points_to
[n
], FIRST_REF_NODE
+ n
);
1921 if (!bitmap_empty_p (graph
->points_to
[n
]))
1923 unsigned int label
= equiv_class_lookup (pointer_equiv_class_table
,
1924 graph
->points_to
[n
]);
1927 SET_BIT (graph
->pt_used
, n
);
1928 label
= pointer_equiv_class
++;
1929 equiv_class_add (pointer_equiv_class_table
,
1930 label
, graph
->points_to
[n
]);
1932 graph
->pointer_label
[n
] = label
;
1936 /* Perform offline variable substitution, discovering equivalence
1937 classes, and eliminating non-pointer variables. */
1939 static struct scc_info
*
1940 perform_var_substitution (constraint_graph_t graph
)
1943 unsigned int size
= graph
->size
;
1944 struct scc_info
*si
= init_scc_info (size
);
1946 bitmap_obstack_initialize (&iteration_obstack
);
1947 pointer_equiv_class_table
= htab_create (511, equiv_class_label_hash
,
1948 equiv_class_label_eq
, free
);
1949 location_equiv_class_table
= htab_create (511, equiv_class_label_hash
,
1950 equiv_class_label_eq
, free
);
1951 pointer_equiv_class
= 1;
1952 location_equiv_class
= 1;
1954 /* Condense the nodes, which means to find SCC's, count incoming
1955 predecessors, and unite nodes in SCC's. */
1956 for (i
= 0; i
< FIRST_REF_NODE
; i
++)
1957 if (!TEST_BIT (si
->visited
, si
->node_mapping
[i
]))
1958 condense_visit (graph
, si
, si
->node_mapping
[i
]);
1960 sbitmap_zero (si
->visited
);
1961 /* Actually the label the nodes for pointer equivalences */
1962 for (i
= 0; i
< FIRST_REF_NODE
; i
++)
1963 if (!TEST_BIT (si
->visited
, si
->node_mapping
[i
]))
1964 label_visit (graph
, si
, si
->node_mapping
[i
]);
1966 /* Calculate location equivalence labels. */
1967 for (i
= 0; i
< FIRST_REF_NODE
; i
++)
1974 if (!graph
->pointed_by
[i
])
1976 pointed_by
= BITMAP_ALLOC (&iteration_obstack
);
1978 /* Translate the pointed-by mapping for pointer equivalence
1980 EXECUTE_IF_SET_IN_BITMAP (graph
->pointed_by
[i
], 0, j
, bi
)
1982 bitmap_set_bit (pointed_by
,
1983 graph
->pointer_label
[si
->node_mapping
[j
]]);
1985 /* The original pointed_by is now dead. */
1986 BITMAP_FREE (graph
->pointed_by
[i
]);
1988 /* Look up the location equivalence label if one exists, or make
1990 label
= equiv_class_lookup (location_equiv_class_table
,
1994 label
= location_equiv_class
++;
1995 equiv_class_add (location_equiv_class_table
,
2000 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2001 fprintf (dump_file
, "Found location equivalence for node %s\n",
2002 get_varinfo (i
)->name
);
2003 BITMAP_FREE (pointed_by
);
2005 graph
->loc_label
[i
] = label
;
2009 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2010 for (i
= 0; i
< FIRST_REF_NODE
; i
++)
2012 bool direct_node
= TEST_BIT (graph
->direct_nodes
, i
);
2014 "Equivalence classes for %s node id %d:%s are pointer: %d"
2016 direct_node
? "Direct node" : "Indirect node", i
,
2017 get_varinfo (i
)->name
,
2018 graph
->pointer_label
[si
->node_mapping
[i
]],
2019 graph
->loc_label
[si
->node_mapping
[i
]]);
2022 /* Quickly eliminate our non-pointer variables. */
2024 for (i
= 0; i
< FIRST_REF_NODE
; i
++)
2026 unsigned int node
= si
->node_mapping
[i
];
2028 if (graph
->pointer_label
[node
] == 0)
2030 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2032 "%s is a non-pointer variable, eliminating edges.\n",
2033 get_varinfo (node
)->name
);
2034 stats
.nonpointer_vars
++;
2035 clear_edges_for_node (graph
, node
);
2042 /* Free information that was only necessary for variable
2046 free_var_substitution_info (struct scc_info
*si
)
2049 free (graph
->pointer_label
);
2050 free (graph
->loc_label
);
2051 free (graph
->pointed_by
);
2052 free (graph
->points_to
);
2053 free (graph
->number_incoming
);
2054 free (graph
->eq_rep
);
2055 sbitmap_free (graph
->direct_nodes
);
2056 sbitmap_free (graph
->pt_used
);
2057 htab_delete (pointer_equiv_class_table
);
2058 htab_delete (location_equiv_class_table
);
2059 bitmap_obstack_release (&iteration_obstack
);
2062 /* Return an existing node that is equivalent to NODE, which has
2063 equivalence class LABEL, if one exists. Return NODE otherwise. */
2066 find_equivalent_node (constraint_graph_t graph
,
2067 unsigned int node
, unsigned int label
)
2069 /* If the address version of this variable is unused, we can
2070 substitute it for anything else with the same label.
2071 Otherwise, we know the pointers are equivalent, but not the
2072 locations, and we can unite them later. */
2074 if (!bitmap_bit_p (graph
->address_taken
, node
))
2076 gcc_assert (label
< graph
->size
);
2078 if (graph
->eq_rep
[label
] != -1)
2080 /* Unify the two variables since we know they are equivalent. */
2081 if (unite (graph
->eq_rep
[label
], node
))
2082 unify_nodes (graph
, graph
->eq_rep
[label
], node
, false);
2083 return graph
->eq_rep
[label
];
2087 graph
->eq_rep
[label
] = node
;
2088 graph
->pe_rep
[label
] = node
;
2093 gcc_assert (label
< graph
->size
);
2094 graph
->pe
[node
] = label
;
2095 if (graph
->pe_rep
[label
] == -1)
2096 graph
->pe_rep
[label
] = node
;
2102 /* Unite pointer equivalent but not location equivalent nodes in
2103 GRAPH. This may only be performed once variable substitution is
2107 unite_pointer_equivalences (constraint_graph_t graph
)
2111 /* Go through the pointer equivalences and unite them to their
2112 representative, if they aren't already. */
2113 for (i
= 0; i
< FIRST_REF_NODE
; i
++)
2115 unsigned int label
= graph
->pe
[i
];
2118 int label_rep
= graph
->pe_rep
[label
];
2120 if (label_rep
== -1)
2123 label_rep
= find (label_rep
);
2124 if (label_rep
>= 0 && unite (label_rep
, find (i
)))
2125 unify_nodes (graph
, label_rep
, i
, false);
2130 /* Move complex constraints to the GRAPH nodes they belong to. */
2133 move_complex_constraints (constraint_graph_t graph
)
2138 for (i
= 0; VEC_iterate (constraint_t
, constraints
, i
, c
); i
++)
2142 struct constraint_expr lhs
= c
->lhs
;
2143 struct constraint_expr rhs
= c
->rhs
;
2145 if (lhs
.type
== DEREF
)
2147 insert_into_complex (graph
, lhs
.var
, c
);
2149 else if (rhs
.type
== DEREF
)
2151 if (!(get_varinfo (lhs
.var
)->is_special_var
))
2152 insert_into_complex (graph
, rhs
.var
, c
);
2154 else if (rhs
.type
!= ADDRESSOF
&& lhs
.var
> anything_id
2155 && (lhs
.offset
!= 0 || rhs
.offset
!= 0))
2157 insert_into_complex (graph
, rhs
.var
, c
);
2164 /* Optimize and rewrite complex constraints while performing
2165 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2166 result of perform_variable_substitution. */
2169 rewrite_constraints (constraint_graph_t graph
,
2170 struct scc_info
*si
)
2176 for (j
= 0; j
< graph
->size
; j
++)
2177 gcc_assert (find (j
) == j
);
2179 for (i
= 0; VEC_iterate (constraint_t
, constraints
, i
, c
); i
++)
2181 struct constraint_expr lhs
= c
->lhs
;
2182 struct constraint_expr rhs
= c
->rhs
;
2183 unsigned int lhsvar
= find (get_varinfo_fc (lhs
.var
)->id
);
2184 unsigned int rhsvar
= find (get_varinfo_fc (rhs
.var
)->id
);
2185 unsigned int lhsnode
, rhsnode
;
2186 unsigned int lhslabel
, rhslabel
;
2188 lhsnode
= si
->node_mapping
[lhsvar
];
2189 rhsnode
= si
->node_mapping
[rhsvar
];
2190 lhslabel
= graph
->pointer_label
[lhsnode
];
2191 rhslabel
= graph
->pointer_label
[rhsnode
];
2193 /* See if it is really a non-pointer variable, and if so, ignore
2197 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2200 fprintf (dump_file
, "%s is a non-pointer variable,"
2201 "ignoring constraint:",
2202 get_varinfo (lhs
.var
)->name
);
2203 dump_constraint (dump_file
, c
);
2205 VEC_replace (constraint_t
, constraints
, i
, NULL
);
2211 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2214 fprintf (dump_file
, "%s is a non-pointer variable,"
2215 "ignoring constraint:",
2216 get_varinfo (rhs
.var
)->name
);
2217 dump_constraint (dump_file
, c
);
2219 VEC_replace (constraint_t
, constraints
, i
, NULL
);
2223 lhsvar
= find_equivalent_node (graph
, lhsvar
, lhslabel
);
2224 rhsvar
= find_equivalent_node (graph
, rhsvar
, rhslabel
);
2225 c
->lhs
.var
= lhsvar
;
2226 c
->rhs
.var
= rhsvar
;
2231 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2232 part of an SCC, false otherwise. */
2235 eliminate_indirect_cycles (unsigned int node
)
2237 if (graph
->indirect_cycles
[node
] != -1
2238 && !bitmap_empty_p (get_varinfo (node
)->solution
))
2241 VEC(unsigned,heap
) *queue
= NULL
;
2243 unsigned int to
= find (graph
->indirect_cycles
[node
]);
2246 /* We can't touch the solution set and call unify_nodes
2247 at the same time, because unify_nodes is going to do
2248 bitmap unions into it. */
2250 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node
)->solution
, 0, i
, bi
)
2252 if (find (i
) == i
&& i
!= to
)
2255 VEC_safe_push (unsigned, heap
, queue
, i
);
2260 VEC_iterate (unsigned, queue
, queuepos
, i
);
2263 unify_nodes (graph
, to
, i
, true);
2265 VEC_free (unsigned, heap
, queue
);
2271 /* Solve the constraint graph GRAPH using our worklist solver.
2272 This is based on the PW* family of solvers from the "Efficient Field
2273 Sensitive Pointer Analysis for C" paper.
2274 It works by iterating over all the graph nodes, processing the complex
2275 constraints and propagating the copy constraints, until everything stops
2276 changed. This corresponds to steps 6-8 in the solving list given above. */
2279 solve_graph (constraint_graph_t graph
)
2281 unsigned int size
= graph
->size
;
2286 changed
= sbitmap_alloc (size
);
2287 sbitmap_zero (changed
);
2289 /* Mark all initial non-collapsed nodes as changed. */
2290 for (i
= 0; i
< size
; i
++)
2292 varinfo_t ivi
= get_varinfo (i
);
2293 if (find (i
) == i
&& !bitmap_empty_p (ivi
->solution
)
2294 && ((graph
->succs
[i
] && !bitmap_empty_p (graph
->succs
[i
]))
2295 || VEC_length (constraint_t
, graph
->complex[i
]) > 0))
2297 SET_BIT (changed
, i
);
2302 /* Allocate a bitmap to be used to store the changed bits. */
2303 pts
= BITMAP_ALLOC (&pta_obstack
);
2305 while (changed_count
> 0)
2308 struct topo_info
*ti
= init_topo_info ();
2311 bitmap_obstack_initialize (&iteration_obstack
);
2313 compute_topo_order (graph
, ti
);
2315 while (VEC_length (unsigned, ti
->topo_order
) != 0)
2318 i
= VEC_pop (unsigned, ti
->topo_order
);
2320 /* If this variable is not a representative, skip it. */
2324 /* In certain indirect cycle cases, we may merge this
2325 variable to another. */
2326 if (eliminate_indirect_cycles (i
) && find (i
) != i
)
2329 /* If the node has changed, we need to process the
2330 complex constraints and outgoing edges again. */
2331 if (TEST_BIT (changed
, i
))
2336 VEC(constraint_t
,heap
) *complex = graph
->complex[i
];
2337 bool solution_empty
;
2339 RESET_BIT (changed
, i
);
2342 /* Compute the changed set of solution bits. */
2343 bitmap_and_compl (pts
, get_varinfo (i
)->solution
,
2344 get_varinfo (i
)->oldsolution
);
2346 if (bitmap_empty_p (pts
))
2349 bitmap_ior_into (get_varinfo (i
)->oldsolution
, pts
);
2351 solution
= get_varinfo (i
)->solution
;
2352 solution_empty
= bitmap_empty_p (solution
);
2354 /* Process the complex constraints */
2355 for (j
= 0; VEC_iterate (constraint_t
, complex, j
, c
); j
++)
2357 /* XXX: This is going to unsort the constraints in
2358 some cases, which will occasionally add duplicate
2359 constraints during unification. This does not
2360 affect correctness. */
2361 c
->lhs
.var
= find (c
->lhs
.var
);
2362 c
->rhs
.var
= find (c
->rhs
.var
);
2364 /* The only complex constraint that can change our
2365 solution to non-empty, given an empty solution,
2366 is a constraint where the lhs side is receiving
2367 some set from elsewhere. */
2368 if (!solution_empty
|| c
->lhs
.type
!= DEREF
)
2369 do_complex_constraint (graph
, c
, pts
);
2372 solution_empty
= bitmap_empty_p (solution
);
2374 if (!solution_empty
)
2378 /* Propagate solution to all successors. */
2379 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->succs
[i
],
2385 unsigned int to
= find (j
);
2386 tmp
= get_varinfo (to
)->solution
;
2389 /* Don't try to propagate to ourselves. */
2393 flag
= set_union_with_increment (tmp
, pts
, 0);
2397 get_varinfo (to
)->solution
= tmp
;
2398 if (!TEST_BIT (changed
, to
))
2400 SET_BIT (changed
, to
);
2408 free_topo_info (ti
);
2409 bitmap_obstack_release (&iteration_obstack
);
2413 sbitmap_free (changed
);
2414 bitmap_obstack_release (&oldpta_obstack
);
2417 /* Map from trees to variable infos. */
2418 static struct pointer_map_t
*vi_for_tree
;
2421 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2424 insert_vi_for_tree (tree t
, varinfo_t vi
)
2426 void **slot
= pointer_map_insert (vi_for_tree
, t
);
2428 gcc_assert (*slot
== NULL
);
2432 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2433 exist in the map, return NULL, otherwise, return the varinfo we found. */
2436 lookup_vi_for_tree (tree t
)
2438 void **slot
= pointer_map_contains (vi_for_tree
, t
);
2442 return (varinfo_t
) *slot
;
2445 /* Return a printable name for DECL */
2448 alias_get_name (tree decl
)
2450 const char *res
= get_name (decl
);
2452 int num_printed
= 0;
2461 if (TREE_CODE (decl
) == SSA_NAME
)
2463 num_printed
= asprintf (&temp
, "%s_%u",
2464 alias_get_name (SSA_NAME_VAR (decl
)),
2465 SSA_NAME_VERSION (decl
));
2467 else if (DECL_P (decl
))
2469 num_printed
= asprintf (&temp
, "D.%u", DECL_UID (decl
));
2471 if (num_printed
> 0)
2473 res
= ggc_strdup (temp
);
2479 /* Find the variable id for tree T in the map.
2480 If T doesn't exist in the map, create an entry for it and return it. */
2483 get_vi_for_tree (tree t
)
2485 void **slot
= pointer_map_contains (vi_for_tree
, t
);
2487 return get_varinfo (create_variable_info_for (t
, alias_get_name (t
)));
2489 return (varinfo_t
) *slot
;
2492 /* Get a constraint expression from an SSA_VAR_P node. */
2494 static struct constraint_expr
2495 get_constraint_exp_from_ssa_var (tree t
)
2497 struct constraint_expr cexpr
;
2499 gcc_assert (SSA_VAR_P (t
) || DECL_P (t
));
2501 /* For parameters, get at the points-to set for the actual parm
2503 if (TREE_CODE (t
) == SSA_NAME
2504 && TREE_CODE (SSA_NAME_VAR (t
)) == PARM_DECL
2505 && SSA_NAME_IS_DEFAULT_DEF (t
))
2506 return get_constraint_exp_from_ssa_var (SSA_NAME_VAR (t
));
2508 cexpr
.type
= SCALAR
;
2510 cexpr
.var
= get_vi_for_tree (t
)->id
;
2511 /* If we determine the result is "anything", and we know this is readonly,
2512 say it points to readonly memory instead. */
2513 if (cexpr
.var
== anything_id
&& TREE_READONLY (t
))
2515 cexpr
.type
= ADDRESSOF
;
2516 cexpr
.var
= readonly_id
;
2523 /* Process a completed constraint T, and add it to the constraint
2524 list. FROM_CALL is true if this is a constraint coming from a
2525 call, which means any DEREFs we see are "may-deref's", not
2529 process_constraint_1 (constraint_t t
, bool from_call
)
2531 struct constraint_expr rhs
= t
->rhs
;
2532 struct constraint_expr lhs
= t
->lhs
;
2534 gcc_assert (rhs
.var
< VEC_length (varinfo_t
, varmap
));
2535 gcc_assert (lhs
.var
< VEC_length (varinfo_t
, varmap
));
2539 if (lhs
.type
== DEREF
)
2540 get_varinfo (lhs
.var
)->directly_dereferenced
= true;
2541 if (rhs
.type
== DEREF
)
2542 get_varinfo (rhs
.var
)->directly_dereferenced
= true;
2545 if (!use_field_sensitive
)
2551 /* ANYTHING == ANYTHING is pointless. */
2552 if (lhs
.var
== anything_id
&& rhs
.var
== anything_id
)
2555 /* If we have &ANYTHING = something, convert to SOMETHING = &ANYTHING) */
2556 else if (lhs
.var
== anything_id
&& lhs
.type
== ADDRESSOF
)
2561 process_constraint_1 (t
, from_call
);
2563 /* This can happen in our IR with things like n->a = *p */
2564 else if (rhs
.type
== DEREF
&& lhs
.type
== DEREF
&& rhs
.var
!= anything_id
)
2566 /* Split into tmp = *rhs, *lhs = tmp */
2567 tree rhsdecl
= get_varinfo (rhs
.var
)->decl
;
2568 tree pointertype
= TREE_TYPE (rhsdecl
);
2569 tree pointedtotype
= TREE_TYPE (pointertype
);
2570 tree tmpvar
= create_tmp_var_raw (pointedtotype
, "doubledereftmp");
2571 struct constraint_expr tmplhs
= get_constraint_exp_from_ssa_var (tmpvar
);
2573 /* If this is an aggregate of known size, we should have passed
2574 this off to do_structure_copy, and it should have broken it
2576 gcc_assert (!AGGREGATE_TYPE_P (pointedtotype
)
2577 || get_varinfo (rhs
.var
)->is_unknown_size_var
);
2579 process_constraint_1 (new_constraint (tmplhs
, rhs
), from_call
);
2580 process_constraint_1 (new_constraint (lhs
, tmplhs
), from_call
);
2582 else if (rhs
.type
== ADDRESSOF
&& lhs
.type
== DEREF
)
2584 /* Split into tmp = &rhs, *lhs = tmp */
2585 tree rhsdecl
= get_varinfo (rhs
.var
)->decl
;
2586 tree pointertype
= TREE_TYPE (rhsdecl
);
2587 tree tmpvar
= create_tmp_var_raw (pointertype
, "derefaddrtmp");
2588 struct constraint_expr tmplhs
= get_constraint_exp_from_ssa_var (tmpvar
);
2590 process_constraint_1 (new_constraint (tmplhs
, rhs
), from_call
);
2591 process_constraint_1 (new_constraint (lhs
, tmplhs
), from_call
);
2595 gcc_assert (rhs
.type
!= ADDRESSOF
|| rhs
.offset
== 0);
2596 VEC_safe_push (constraint_t
, heap
, constraints
, t
);
2601 /* Process constraint T, performing various simplifications and then
2602 adding it to our list of overall constraints. */
2605 process_constraint (constraint_t t
)
2607 process_constraint_1 (t
, false);
2610 /* Return true if T is a variable of a type that could contain
2614 could_have_pointers (tree t
)
2616 tree type
= TREE_TYPE (t
);
2618 if (POINTER_TYPE_P (type
)
2619 || AGGREGATE_TYPE_P (type
)
2620 || TREE_CODE (type
) == COMPLEX_TYPE
)
2626 /* Return the position, in bits, of FIELD_DECL from the beginning of its
2629 static unsigned HOST_WIDE_INT
2630 bitpos_of_field (const tree fdecl
)
2633 if (TREE_CODE (DECL_FIELD_OFFSET (fdecl
)) != INTEGER_CST
2634 || TREE_CODE (DECL_FIELD_BIT_OFFSET (fdecl
)) != INTEGER_CST
)
2637 return (tree_low_cst (DECL_FIELD_OFFSET (fdecl
), 1) * 8)
2638 + tree_low_cst (DECL_FIELD_BIT_OFFSET (fdecl
), 1);
2642 /* Return true if an access to [ACCESSPOS, ACCESSSIZE]
2643 overlaps with a field at [FIELDPOS, FIELDSIZE] */
2646 offset_overlaps_with_access (const unsigned HOST_WIDE_INT fieldpos
,
2647 const unsigned HOST_WIDE_INT fieldsize
,
2648 const unsigned HOST_WIDE_INT accesspos
,
2649 const unsigned HOST_WIDE_INT accesssize
)
2651 if (fieldpos
== accesspos
&& fieldsize
== accesssize
)
2653 if (accesspos
>= fieldpos
&& accesspos
< (fieldpos
+ fieldsize
))
2655 if (accesspos
< fieldpos
&& (accesspos
+ accesssize
> fieldpos
))
2661 /* Given a COMPONENT_REF T, return the constraint_expr for it. */
2664 get_constraint_for_component_ref (tree t
, VEC(ce_s
, heap
) **results
)
2667 HOST_WIDE_INT bitsize
= -1;
2668 HOST_WIDE_INT bitmaxsize
= -1;
2669 HOST_WIDE_INT bitpos
;
2671 struct constraint_expr
*result
;
2672 unsigned int beforelength
= VEC_length (ce_s
, *results
);
2674 /* Some people like to do cute things like take the address of
2677 while (!SSA_VAR_P (forzero
) && !CONSTANT_CLASS_P (forzero
))
2678 forzero
= TREE_OPERAND (forzero
, 0);
2680 if (CONSTANT_CLASS_P (forzero
) && integer_zerop (forzero
))
2682 struct constraint_expr temp
;
2685 temp
.var
= integer_id
;
2687 VEC_safe_push (ce_s
, heap
, *results
, &temp
);
2691 t
= get_ref_base_and_extent (t
, &bitpos
, &bitsize
, &bitmaxsize
);
2693 /* String constants are readonly, so there is nothing to really do
2695 if (TREE_CODE (t
) == STRING_CST
)
2698 get_constraint_for (t
, results
);
2699 result
= VEC_last (ce_s
, *results
);
2700 result
->offset
= bitpos
;
2702 gcc_assert (beforelength
+ 1 == VEC_length (ce_s
, *results
));
2704 /* This can also happen due to weird offsetof type macros. */
2705 if (TREE_CODE (t
) != ADDR_EXPR
&& result
->type
== ADDRESSOF
)
2706 result
->type
= SCALAR
;
2708 if (result
->type
== SCALAR
)
2710 /* In languages like C, you can access one past the end of an
2711 array. You aren't allowed to dereference it, so we can
2712 ignore this constraint. When we handle pointer subtraction,
2713 we may have to do something cute here. */
2715 if (result
->offset
< get_varinfo (result
->var
)->fullsize
2718 /* It's also not true that the constraint will actually start at the
2719 right offset, it may start in some padding. We only care about
2720 setting the constraint to the first actual field it touches, so
2723 for (curr
= get_varinfo (result
->var
); curr
; curr
= curr
->next
)
2725 if (offset_overlaps_with_access (curr
->offset
, curr
->size
,
2726 result
->offset
, bitmaxsize
))
2728 result
->var
= curr
->id
;
2732 /* assert that we found *some* field there. The user couldn't be
2733 accessing *only* padding. */
2734 /* Still the user could access one past the end of an array
2735 embedded in a struct resulting in accessing *only* padding. */
2736 gcc_assert (curr
|| ref_contains_array_ref (orig_t
));
2738 else if (bitmaxsize
== 0)
2740 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2741 fprintf (dump_file
, "Access to zero-sized part of variable,"
2745 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2746 fprintf (dump_file
, "Access to past the end of variable, ignoring\n");
2750 else if (bitmaxsize
== -1)
2752 /* We can't handle DEREF constraints with unknown size, we'll
2753 get the wrong answer. Punt and return anything. */
2754 result
->var
= anything_id
;
2760 /* Dereference the constraint expression CONS, and return the result.
2761 DEREF (ADDRESSOF) = SCALAR
2762 DEREF (SCALAR) = DEREF
2763 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
2764 This is needed so that we can handle dereferencing DEREF constraints. */
2767 do_deref (VEC (ce_s
, heap
) **constraints
)
2769 struct constraint_expr
*c
;
2772 for (i
= 0; VEC_iterate (ce_s
, *constraints
, i
, c
); i
++)
2774 if (c
->type
== SCALAR
)
2776 else if (c
->type
== ADDRESSOF
)
2778 else if (c
->type
== DEREF
)
2780 tree tmpvar
= create_tmp_var_raw (ptr_type_node
, "dereftmp");
2781 struct constraint_expr tmplhs
= get_constraint_exp_from_ssa_var (tmpvar
);
2782 process_constraint (new_constraint (tmplhs
, *c
));
2783 c
->var
= tmplhs
.var
;
2790 /* Given a tree T, return the constraint expression for it. */
2793 get_constraint_for (tree t
, VEC (ce_s
, heap
) **results
)
2795 struct constraint_expr temp
;
2797 /* x = integer is all glommed to a single variable, which doesn't
2798 point to anything by itself. That is, of course, unless it is an
2799 integer constant being treated as a pointer, in which case, we
2800 will return that this is really the addressof anything. This
2801 happens below, since it will fall into the default case. The only
2802 case we know something about an integer treated like a pointer is
2803 when it is the NULL pointer, and then we just say it points to
2805 if (TREE_CODE (t
) == INTEGER_CST
2806 && integer_zerop (t
))
2808 temp
.var
= nothing_id
;
2809 temp
.type
= ADDRESSOF
;
2811 VEC_safe_push (ce_s
, heap
, *results
, &temp
);
2815 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
2817 case tcc_expression
:
2820 switch (TREE_CODE (t
))
2824 struct constraint_expr
*c
;
2826 tree exp
= TREE_OPERAND (t
, 0);
2827 tree pttype
= TREE_TYPE (TREE_TYPE (t
));
2829 get_constraint_for (exp
, results
);
2832 /* Complex types are special. Taking the address of one
2833 allows you to access either part of it through that
2835 if (VEC_length (ce_s
, *results
) == 1 &&
2836 TREE_CODE (pttype
) == COMPLEX_TYPE
)
2838 struct constraint_expr
*origrhs
;
2840 struct constraint_expr tmp
;
2842 gcc_assert (VEC_length (ce_s
, *results
) == 1);
2843 origrhs
= VEC_last (ce_s
, *results
);
2845 VEC_pop (ce_s
, *results
);
2846 origvar
= get_varinfo (origrhs
->var
);
2847 for (; origvar
; origvar
= origvar
->next
)
2849 tmp
.var
= origvar
->id
;
2850 VEC_safe_push (ce_s
, heap
, *results
, &tmp
);
2854 for (i
= 0; VEC_iterate (ce_s
, *results
, i
, c
); i
++)
2856 if (c
->type
== DEREF
)
2859 c
->type
= ADDRESSOF
;
2865 /* XXX: In interprocedural mode, if we didn't have the
2866 body, we would need to do *each pointer argument =
2868 if (call_expr_flags (t
) & (ECF_MALLOC
| ECF_MAY_BE_ALLOCA
))
2871 tree heapvar
= heapvar_lookup (t
);
2873 if (heapvar
== NULL
)
2875 heapvar
= create_tmp_var_raw (ptr_type_node
, "HEAP");
2876 DECL_EXTERNAL (heapvar
) = 1;
2877 get_var_ann (heapvar
)->is_heapvar
= 1;
2878 if (gimple_referenced_vars (cfun
))
2879 add_referenced_var (heapvar
);
2880 heapvar_insert (t
, heapvar
);
2883 temp
.var
= create_variable_info_for (heapvar
,
2884 alias_get_name (heapvar
));
2886 vi
= get_varinfo (temp
.var
);
2887 vi
->is_artificial_var
= 1;
2888 vi
->is_heap_var
= 1;
2889 temp
.type
= ADDRESSOF
;
2891 VEC_safe_push (ce_s
, heap
, *results
, &temp
);
2896 temp
.var
= anything_id
;
2899 VEC_safe_push (ce_s
, heap
, *results
, &temp
);
2905 temp
.type
= ADDRESSOF
;
2906 temp
.var
= anything_id
;
2908 VEC_safe_push (ce_s
, heap
, *results
, &temp
);
2915 switch (TREE_CODE (t
))
2919 get_constraint_for (TREE_OPERAND (t
, 0), results
);
2924 case ARRAY_RANGE_REF
:
2926 get_constraint_for_component_ref (t
, results
);
2930 temp
.type
= ADDRESSOF
;
2931 temp
.var
= anything_id
;
2933 VEC_safe_push (ce_s
, heap
, *results
, &temp
);
2940 switch (TREE_CODE (t
))
2944 case NON_LVALUE_EXPR
:
2946 tree op
= TREE_OPERAND (t
, 0);
2948 /* Cast from non-pointer to pointers are bad news for us.
2949 Anything else, we see through */
2950 if (!(POINTER_TYPE_P (TREE_TYPE (t
))
2951 && ! POINTER_TYPE_P (TREE_TYPE (op
))))
2953 get_constraint_for (op
, results
);
2961 temp
.type
= ADDRESSOF
;
2962 temp
.var
= anything_id
;
2964 VEC_safe_push (ce_s
, heap
, *results
, &temp
);
2969 case tcc_exceptional
:
2971 switch (TREE_CODE (t
))
2975 get_constraint_for (PHI_RESULT (t
), results
);
2981 struct constraint_expr temp
;
2982 temp
= get_constraint_exp_from_ssa_var (t
);
2983 VEC_safe_push (ce_s
, heap
, *results
, &temp
);
2989 temp
.type
= ADDRESSOF
;
2990 temp
.var
= anything_id
;
2992 VEC_safe_push (ce_s
, heap
, *results
, &temp
);
2997 case tcc_declaration
:
2999 struct constraint_expr temp
;
3000 temp
= get_constraint_exp_from_ssa_var (t
);
3001 VEC_safe_push (ce_s
, heap
, *results
, &temp
);
3006 temp
.type
= ADDRESSOF
;
3007 temp
.var
= anything_id
;
3009 VEC_safe_push (ce_s
, heap
, *results
, &temp
);
3016 /* Handle the structure copy case where we have a simple structure copy
3017 between LHS and RHS that is of SIZE (in bits)
3019 For each field of the lhs variable (lhsfield)
3020 For each field of the rhs variable at lhsfield.offset (rhsfield)
3021 add the constraint lhsfield = rhsfield
3023 If we fail due to some kind of type unsafety or other thing we
3024 can't handle, return false. We expect the caller to collapse the
3025 variable in that case. */
3028 do_simple_structure_copy (const struct constraint_expr lhs
,
3029 const struct constraint_expr rhs
,
3030 const unsigned HOST_WIDE_INT size
)
3032 varinfo_t p
= get_varinfo (lhs
.var
);
3033 unsigned HOST_WIDE_INT pstart
, last
;
3035 last
= p
->offset
+ size
;
3036 for (; p
&& p
->offset
< last
; p
= p
->next
)
3039 struct constraint_expr templhs
= lhs
;
3040 struct constraint_expr temprhs
= rhs
;
3041 unsigned HOST_WIDE_INT fieldoffset
;
3043 templhs
.var
= p
->id
;
3044 q
= get_varinfo (temprhs
.var
);
3045 fieldoffset
= p
->offset
- pstart
;
3046 q
= first_vi_for_offset (q
, q
->offset
+ fieldoffset
);
3049 temprhs
.var
= q
->id
;
3050 process_constraint (new_constraint (templhs
, temprhs
));
3056 /* Handle the structure copy case where we have a structure copy between a
3057 aggregate on the LHS and a dereference of a pointer on the RHS
3058 that is of SIZE (in bits)
3060 For each field of the lhs variable (lhsfield)
3061 rhs.offset = lhsfield->offset
3062 add the constraint lhsfield = rhs
3066 do_rhs_deref_structure_copy (const struct constraint_expr lhs
,
3067 const struct constraint_expr rhs
,
3068 const unsigned HOST_WIDE_INT size
)
3070 varinfo_t p
= get_varinfo (lhs
.var
);
3071 unsigned HOST_WIDE_INT pstart
,last
;
3073 last
= p
->offset
+ size
;
3075 for (; p
&& p
->offset
< last
; p
= p
->next
)
3078 struct constraint_expr templhs
= lhs
;
3079 struct constraint_expr temprhs
= rhs
;
3080 unsigned HOST_WIDE_INT fieldoffset
;
3083 if (templhs
.type
== SCALAR
)
3084 templhs
.var
= p
->id
;
3086 templhs
.offset
= p
->offset
;
3088 q
= get_varinfo (temprhs
.var
);
3089 fieldoffset
= p
->offset
- pstart
;
3090 temprhs
.offset
+= fieldoffset
;
3091 process_constraint (new_constraint (templhs
, temprhs
));
3095 /* Handle the structure copy case where we have a structure copy
3096 between an aggregate on the RHS and a dereference of a pointer on
3097 the LHS that is of SIZE (in bits)
3099 For each field of the rhs variable (rhsfield)
3100 lhs.offset = rhsfield->offset
3101 add the constraint lhs = rhsfield
3105 do_lhs_deref_structure_copy (const struct constraint_expr lhs
,
3106 const struct constraint_expr rhs
,
3107 const unsigned HOST_WIDE_INT size
)
3109 varinfo_t p
= get_varinfo (rhs
.var
);
3110 unsigned HOST_WIDE_INT pstart
,last
;
3112 last
= p
->offset
+ size
;
3114 for (; p
&& p
->offset
< last
; p
= p
->next
)
3117 struct constraint_expr templhs
= lhs
;
3118 struct constraint_expr temprhs
= rhs
;
3119 unsigned HOST_WIDE_INT fieldoffset
;
3122 if (temprhs
.type
== SCALAR
)
3123 temprhs
.var
= p
->id
;
3125 temprhs
.offset
= p
->offset
;
3127 q
= get_varinfo (templhs
.var
);
3128 fieldoffset
= p
->offset
- pstart
;
3129 templhs
.offset
+= fieldoffset
;
3130 process_constraint (new_constraint (templhs
, temprhs
));
3134 /* Sometimes, frontends like to give us bad type information. This
3135 function will collapse all the fields from VAR to the end of VAR,
3136 into VAR, so that we treat those fields as a single variable.
3137 We return the variable they were collapsed into. */
3140 collapse_rest_of_var (unsigned int var
)
3142 varinfo_t currvar
= get_varinfo (var
);
3145 for (field
= currvar
->next
; field
; field
= field
->next
)
3148 fprintf (dump_file
, "Type safety: Collapsing var %s into %s\n",
3149 field
->name
, currvar
->name
);
3151 gcc_assert (!field
->collapsed_to
);
3152 field
->collapsed_to
= currvar
;
3155 currvar
->next
= NULL
;
3156 currvar
->size
= currvar
->fullsize
- currvar
->offset
;
3161 /* Handle aggregate copies by expanding into copies of the respective
3162 fields of the structures. */
3165 do_structure_copy (tree lhsop
, tree rhsop
)
3167 struct constraint_expr lhs
, rhs
, tmp
;
3168 VEC (ce_s
, heap
) *lhsc
= NULL
, *rhsc
= NULL
;
3170 unsigned HOST_WIDE_INT lhssize
;
3171 unsigned HOST_WIDE_INT rhssize
;
3173 get_constraint_for (lhsop
, &lhsc
);
3174 get_constraint_for (rhsop
, &rhsc
);
3175 gcc_assert (VEC_length (ce_s
, lhsc
) == 1);
3176 gcc_assert (VEC_length (ce_s
, rhsc
) == 1);
3177 lhs
= *(VEC_last (ce_s
, lhsc
));
3178 rhs
= *(VEC_last (ce_s
, rhsc
));
3180 VEC_free (ce_s
, heap
, lhsc
);
3181 VEC_free (ce_s
, heap
, rhsc
);
3183 /* If we have special var = x, swap it around. */
3184 if (lhs
.var
<= integer_id
&& !(get_varinfo (rhs
.var
)->is_special_var
))
3191 /* This is fairly conservative for the RHS == ADDRESSOF case, in that it's
3192 possible it's something we could handle. However, most cases falling
3193 into this are dealing with transparent unions, which are slightly
3195 if (rhs
.type
== ADDRESSOF
&& !(get_varinfo (rhs
.var
)->is_special_var
))
3197 rhs
.type
= ADDRESSOF
;
3198 rhs
.var
= anything_id
;
3201 /* If the RHS is a special var, or an addressof, set all the LHS fields to
3202 that special var. */
3203 if (rhs
.var
<= integer_id
)
3205 for (p
= get_varinfo (lhs
.var
); p
; p
= p
->next
)
3207 struct constraint_expr templhs
= lhs
;
3208 struct constraint_expr temprhs
= rhs
;
3210 if (templhs
.type
== SCALAR
)
3211 templhs
.var
= p
->id
;
3213 templhs
.offset
+= p
->offset
;
3214 process_constraint (new_constraint (templhs
, temprhs
));
3219 tree rhstype
= TREE_TYPE (rhsop
);
3220 tree lhstype
= TREE_TYPE (lhsop
);
3224 lhstypesize
= DECL_P (lhsop
) ? DECL_SIZE (lhsop
) : TYPE_SIZE (lhstype
);
3225 rhstypesize
= DECL_P (rhsop
) ? DECL_SIZE (rhsop
) : TYPE_SIZE (rhstype
);
3227 /* If we have a variably sized types on the rhs or lhs, and a deref
3228 constraint, add the constraint, lhsconstraint = &ANYTHING.
3229 This is conservatively correct because either the lhs is an unknown
3230 sized var (if the constraint is SCALAR), or the lhs is a DEREF
3231 constraint, and every variable it can point to must be unknown sized
3232 anyway, so we don't need to worry about fields at all. */
3233 if ((rhs
.type
== DEREF
&& TREE_CODE (rhstypesize
) != INTEGER_CST
)
3234 || (lhs
.type
== DEREF
&& TREE_CODE (lhstypesize
) != INTEGER_CST
))
3236 rhs
.var
= anything_id
;
3237 rhs
.type
= ADDRESSOF
;
3239 process_constraint (new_constraint (lhs
, rhs
));
3243 /* The size only really matters insofar as we don't set more or less of
3244 the variable. If we hit an unknown size var, the size should be the
3245 whole darn thing. */
3246 if (get_varinfo (rhs
.var
)->is_unknown_size_var
)
3249 rhssize
= TREE_INT_CST_LOW (rhstypesize
);
3251 if (get_varinfo (lhs
.var
)->is_unknown_size_var
)
3254 lhssize
= TREE_INT_CST_LOW (lhstypesize
);
3257 if (rhs
.type
== SCALAR
&& lhs
.type
== SCALAR
)
3259 if (!do_simple_structure_copy (lhs
, rhs
, MIN (lhssize
, rhssize
)))
3261 lhs
.var
= collapse_rest_of_var (lhs
.var
);
3262 rhs
.var
= collapse_rest_of_var (rhs
.var
);
3267 process_constraint (new_constraint (lhs
, rhs
));
3270 else if (lhs
.type
!= DEREF
&& rhs
.type
== DEREF
)
3271 do_rhs_deref_structure_copy (lhs
, rhs
, MIN (lhssize
, rhssize
));
3272 else if (lhs
.type
== DEREF
&& rhs
.type
!= DEREF
)
3273 do_lhs_deref_structure_copy (lhs
, rhs
, MIN (lhssize
, rhssize
));
3276 tree pointedtotype
= lhstype
;
3279 gcc_assert (rhs
.type
== DEREF
&& lhs
.type
== DEREF
);
3280 tmpvar
= create_tmp_var_raw (pointedtotype
, "structcopydereftmp");
3281 do_structure_copy (tmpvar
, rhsop
);
3282 do_structure_copy (lhsop
, tmpvar
);
3288 /* Update related alias information kept in AI. This is used when
3289 building name tags, alias sets and deciding grouping heuristics.
3290 STMT is the statement to process. This function also updates
3291 ADDRESSABLE_VARS. */
3294 update_alias_info (tree stmt
, struct alias_info
*ai
)
3297 use_operand_p use_p
;
3299 bool stmt_dereferences_ptr_p
;
3300 enum escape_type stmt_escape_type
= is_escape_site (stmt
);
3301 struct mem_ref_stats_d
*mem_ref_stats
= gimple_mem_ref_stats (cfun
);
3303 stmt_dereferences_ptr_p
= false;
3305 if (stmt_escape_type
== ESCAPE_TO_CALL
3306 || stmt_escape_type
== ESCAPE_TO_PURE_CONST
)
3308 mem_ref_stats
->num_call_sites
++;
3309 if (stmt_escape_type
== ESCAPE_TO_PURE_CONST
)
3310 mem_ref_stats
->num_pure_const_call_sites
++;
3312 else if (stmt_escape_type
== ESCAPE_TO_ASM
)
3313 mem_ref_stats
->num_asm_sites
++;
3315 /* Mark all the variables whose address are taken by the statement. */
3316 addr_taken
= addresses_taken (stmt
);
3319 bitmap_ior_into (gimple_addressable_vars (cfun
), addr_taken
);
3321 /* If STMT is an escape point, all the addresses taken by it are
3323 if (stmt_escape_type
!= NO_ESCAPE
)
3328 EXECUTE_IF_SET_IN_BITMAP (addr_taken
, 0, i
, bi
)
3330 tree rvar
= referenced_var (i
);
3331 if (!unmodifiable_var_p (rvar
))
3332 mark_call_clobbered (rvar
, stmt_escape_type
);
3337 /* Process each operand use. For pointers, determine whether they
3338 are dereferenced by the statement, or whether their value
3340 FOR_EACH_PHI_OR_STMT_USE (use_p
, stmt
, iter
, SSA_OP_USE
)
3344 struct ptr_info_def
*pi
;
3345 unsigned num_uses
, num_loads
, num_stores
;
3347 op
= USE_FROM_PTR (use_p
);
3349 /* If STMT is a PHI node, OP may be an ADDR_EXPR. If so, add it
3350 to the set of addressable variables. */
3351 if (TREE_CODE (op
) == ADDR_EXPR
)
3353 bitmap addressable_vars
= gimple_addressable_vars (cfun
);
3355 gcc_assert (TREE_CODE (stmt
) == PHI_NODE
);
3356 gcc_assert (addressable_vars
);
3358 /* PHI nodes don't have annotations for pinning the set
3359 of addresses taken, so we collect them here.
3361 FIXME, should we allow PHI nodes to have annotations
3362 so that they can be treated like regular statements?
3363 Currently, they are treated as second-class
3365 add_to_addressable_set (TREE_OPERAND (op
, 0), &addressable_vars
);
3369 /* Ignore constants (they may occur in PHI node arguments). */
3370 if (TREE_CODE (op
) != SSA_NAME
)
3373 var
= SSA_NAME_VAR (op
);
3374 v_ann
= var_ann (var
);
3376 /* The base variable of an SSA name must be a GIMPLE register, and thus
3377 it cannot be aliased. */
3378 gcc_assert (!may_be_aliased (var
));
3380 /* We are only interested in pointers. */
3381 if (!POINTER_TYPE_P (TREE_TYPE (op
)))
3384 pi
= get_ptr_info (op
);
3386 /* Add OP to AI->PROCESSED_PTRS, if it's not there already. */
3387 if (!TEST_BIT (ai
->ssa_names_visited
, SSA_NAME_VERSION (op
)))
3389 SET_BIT (ai
->ssa_names_visited
, SSA_NAME_VERSION (op
));
3390 VEC_safe_push (tree
, heap
, ai
->processed_ptrs
, op
);
3393 /* If STMT is a PHI node, then it will not have pointer
3394 dereferences and it will not be an escape point. */
3395 if (TREE_CODE (stmt
) == PHI_NODE
)
3398 /* Determine whether OP is a dereferenced pointer, and if STMT
3399 is an escape point, whether OP escapes. */
3400 count_uses_and_derefs (op
, stmt
, &num_uses
, &num_loads
, &num_stores
);
3402 /* Handle a corner case involving address expressions of the
3403 form '&PTR->FLD'. The problem with these expressions is that
3404 they do not represent a dereference of PTR. However, if some
3405 other transformation propagates them into an INDIRECT_REF
3406 expression, we end up with '*(&PTR->FLD)' which is folded
3409 So, if the original code had no other dereferences of PTR,
3410 the aliaser will not create memory tags for it, and when
3411 &PTR->FLD gets propagated to INDIRECT_REF expressions, the
3412 memory operations will receive no VDEF/VUSE operands.
3414 One solution would be to have count_uses_and_derefs consider
3415 &PTR->FLD a dereference of PTR. But that is wrong, since it
3416 is not really a dereference but an offset calculation.
3418 What we do here is to recognize these special ADDR_EXPR
3419 nodes. Since these expressions are never GIMPLE values (they
3420 are not GIMPLE invariants), they can only appear on the RHS
3421 of an assignment and their base address is always an
3422 INDIRECT_REF expression. */
3423 if (TREE_CODE (stmt
) == GIMPLE_MODIFY_STMT
3424 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt
, 1)) == ADDR_EXPR
3425 && !is_gimple_val (GIMPLE_STMT_OPERAND (stmt
, 1)))
3427 /* If the RHS if of the form &PTR->FLD and PTR == OP, then
3428 this represents a potential dereference of PTR. */
3429 tree rhs
= GIMPLE_STMT_OPERAND (stmt
, 1);
3430 tree base
= get_base_address (TREE_OPERAND (rhs
, 0));
3431 if (TREE_CODE (base
) == INDIRECT_REF
3432 && TREE_OPERAND (base
, 0) == op
)
3436 if (num_loads
+ num_stores
> 0)
3438 /* Mark OP as dereferenced. In a subsequent pass,
3439 dereferenced pointers that point to a set of
3440 variables will be assigned a name tag to alias
3441 all the variables OP points to. */
3442 pi
->is_dereferenced
= 1;
3444 /* If this is a store operation, mark OP as being
3445 dereferenced to store, otherwise mark it as being
3446 dereferenced to load. */
3448 pointer_set_insert (ai
->dereferenced_ptrs_store
, var
);
3450 pointer_set_insert (ai
->dereferenced_ptrs_load
, var
);
3452 /* Update the frequency estimate for all the dereferences of
3454 update_mem_sym_stats_from_stmt (op
, stmt
, num_loads
, num_stores
);
3456 /* Indicate that STMT contains pointer dereferences. */
3457 stmt_dereferences_ptr_p
= true;
3460 if (stmt_escape_type
!= NO_ESCAPE
&& num_loads
+ num_stores
< num_uses
)
3462 /* If STMT is an escape point and STMT contains at
3463 least one direct use of OP, then the value of OP
3464 escapes and so the pointed-to variables need to
3465 be marked call-clobbered. */
3466 pi
->value_escapes_p
= 1;
3467 pi
->escape_mask
|= stmt_escape_type
;
3469 /* If the statement makes a function call, assume
3470 that pointer OP will be dereferenced in a store
3471 operation inside the called function. */
3472 if (get_call_expr_in (stmt
)
3473 || stmt_escape_type
== ESCAPE_STORED_IN_GLOBAL
)
3475 pointer_set_insert (ai
->dereferenced_ptrs_store
, var
);
3476 pi
->is_dereferenced
= 1;
3481 if (TREE_CODE (stmt
) == PHI_NODE
)
3484 /* Mark stored variables in STMT as being written to and update the
3485 memory reference stats for all memory symbols referenced by STMT. */
3486 if (stmt_references_memory_p (stmt
))
3491 mem_ref_stats
->num_mem_stmts
++;
3493 /* Notice that we only update memory reference stats for symbols
3494 loaded and stored by the statement if the statement does not
3495 contain pointer dereferences and it is not a call/asm site.
3496 This is to avoid double accounting problems when creating
3497 memory partitions. After computing points-to information,
3498 pointer dereference statistics are used to update the
3499 reference stats of the pointed-to variables, so here we
3500 should only update direct references to symbols.
3502 Indirect references are not updated here for two reasons: (1)
3503 The first time we compute alias information, the sets
3504 LOADED/STORED are empty for pointer dereferences, (2) After
3505 partitioning, LOADED/STORED may have references to
3506 partitions, not the original pointed-to variables. So, if we
3507 always counted LOADED/STORED here and during partitioning, we
3508 would count many symbols more than once.
3510 This does cause some imprecision when a statement has a
3511 combination of direct symbol references and pointer
3512 dereferences (e.g., MEMORY_VAR = *PTR) or if a call site has
3513 memory symbols in its argument list, but these cases do not
3514 occur so frequently as to constitute a serious problem. */
3515 if (STORED_SYMS (stmt
))
3516 EXECUTE_IF_SET_IN_BITMAP (STORED_SYMS (stmt
), 0, i
, bi
)
3518 tree sym
= referenced_var (i
);
3519 pointer_set_insert (ai
->written_vars
, sym
);
3520 if (!stmt_dereferences_ptr_p
3521 && stmt_escape_type
!= ESCAPE_TO_CALL
3522 && stmt_escape_type
!= ESCAPE_TO_PURE_CONST
3523 && stmt_escape_type
!= ESCAPE_TO_ASM
)
3524 update_mem_sym_stats_from_stmt (sym
, stmt
, 0, 1);
3527 if (!stmt_dereferences_ptr_p
3528 && LOADED_SYMS (stmt
)
3529 && stmt_escape_type
!= ESCAPE_TO_CALL
3530 && stmt_escape_type
!= ESCAPE_TO_PURE_CONST
3531 && stmt_escape_type
!= ESCAPE_TO_ASM
)
3532 EXECUTE_IF_SET_IN_BITMAP (LOADED_SYMS (stmt
), 0, i
, bi
)
3533 update_mem_sym_stats_from_stmt (referenced_var (i
), stmt
, 1, 0);
3538 /* Handle pointer arithmetic EXPR when creating aliasing constraints.
3539 Expressions of the type PTR + CST can be handled in two ways:
3541 1- If the constraint for PTR is ADDRESSOF for a non-structure
3542 variable, then we can use it directly because adding or
3543 subtracting a constant may not alter the original ADDRESSOF
3544 constraint (i.e., pointer arithmetic may not legally go outside
3545 an object's boundaries).
3547 2- If the constraint for PTR is ADDRESSOF for a structure variable,
3548 then if CST is a compile-time constant that can be used as an
3549 offset, we can determine which sub-variable will be pointed-to
3552 Return true if the expression is handled. For any other kind of
3553 expression, return false so that each operand can be added as a
3554 separate constraint by the caller. */
3557 handle_ptr_arith (VEC (ce_s
, heap
) *lhsc
, tree expr
)
3560 struct constraint_expr
*c
, *c2
;
3563 VEC (ce_s
, heap
) *temp
= NULL
;
3564 unsigned int rhsoffset
= 0;
3565 bool unknown_addend
= false;
3567 if (TREE_CODE (expr
) != POINTER_PLUS_EXPR
)
3570 op0
= TREE_OPERAND (expr
, 0);
3571 op1
= TREE_OPERAND (expr
, 1);
3572 gcc_assert (POINTER_TYPE_P (TREE_TYPE (op0
)));
3574 get_constraint_for (op0
, &temp
);
3576 /* Handle non-constants by making constraints from integer. */
3577 if (TREE_CODE (op1
) == INTEGER_CST
)
3578 rhsoffset
= TREE_INT_CST_LOW (op1
) * BITS_PER_UNIT
;
3580 unknown_addend
= true;
3582 for (i
= 0; VEC_iterate (ce_s
, lhsc
, i
, c
); i
++)
3583 for (j
= 0; VEC_iterate (ce_s
, temp
, j
, c2
); j
++)
3585 if (c2
->type
== ADDRESSOF
&& rhsoffset
!= 0)
3587 varinfo_t temp
= get_varinfo (c2
->var
);
3589 /* An access one after the end of an array is valid,
3590 so simply punt on accesses we cannot resolve. */
3591 temp
= first_vi_for_offset (temp
, rhsoffset
);
3597 else if (unknown_addend
)
3599 /* Can't handle *a + integer where integer is unknown. */
3600 if (c2
->type
!= SCALAR
)
3602 struct constraint_expr intc
;
3603 intc
.var
= integer_id
;
3606 process_constraint (new_constraint (*c
, intc
));
3610 /* We known it lives somewhere within c2->var. */
3611 varinfo_t tmp
= get_varinfo (c2
->var
);
3612 for (; tmp
; tmp
= tmp
->next
)
3614 struct constraint_expr tmpc
= *c2
;
3617 process_constraint (new_constraint (*c
, tmpc
));
3622 c2
->offset
= rhsoffset
;
3623 process_constraint (new_constraint (*c
, *c2
));
3626 VEC_free (ce_s
, heap
, temp
);
3631 /* For non-IPA mode, generate constraints necessary for a call on the
3635 handle_rhs_call (tree rhs
)
3638 call_expr_arg_iterator iter
;
3639 struct constraint_expr rhsc
;
3641 rhsc
.var
= anything_id
;
3643 rhsc
.type
= ADDRESSOF
;
3645 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, rhs
)
3647 VEC(ce_s
, heap
) *lhsc
= NULL
;
3649 /* Find those pointers being passed, and make sure they end up
3650 pointing to anything. */
3651 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
3654 struct constraint_expr
*lhsp
;
3656 get_constraint_for (arg
, &lhsc
);
3658 for (j
= 0; VEC_iterate (ce_s
, lhsc
, j
, lhsp
); j
++)
3659 process_constraint_1 (new_constraint (*lhsp
, rhsc
), true);
3660 VEC_free (ce_s
, heap
, lhsc
);
3665 /* For non-IPA mode, generate constraints necessary for a call
3666 that returns a pointer and assigns it to LHS. This simply makes
3667 the LHS point to anything. */
3670 handle_lhs_call (tree lhs
)
3672 VEC(ce_s
, heap
) *lhsc
= NULL
;
3673 struct constraint_expr rhsc
;
3675 struct constraint_expr
*lhsp
;
3677 rhsc
.var
= anything_id
;
3679 rhsc
.type
= ADDRESSOF
;
3680 get_constraint_for (lhs
, &lhsc
);
3681 for (j
= 0; VEC_iterate (ce_s
, lhsc
, j
, lhsp
); j
++)
3682 process_constraint_1 (new_constraint (*lhsp
, rhsc
), true);
3683 VEC_free (ce_s
, heap
, lhsc
);
3686 /* Walk statement T setting up aliasing constraints according to the
3687 references found in T. This function is the main part of the
3688 constraint builder. AI points to auxiliary alias information used
3689 when building alias sets and computing alias grouping heuristics. */
3692 find_func_aliases (tree origt
)
3695 VEC(ce_s
, heap
) *lhsc
= NULL
;
3696 VEC(ce_s
, heap
) *rhsc
= NULL
;
3697 struct constraint_expr
*c
;
3699 if (TREE_CODE (t
) == RETURN_EXPR
&& TREE_OPERAND (t
, 0))
3700 t
= TREE_OPERAND (t
, 0);
3702 /* Now build constraints expressions. */
3703 if (TREE_CODE (t
) == PHI_NODE
)
3705 gcc_assert (!AGGREGATE_TYPE_P (TREE_TYPE (PHI_RESULT (t
))));
3707 /* Only care about pointers and structures containing
3709 if (could_have_pointers (PHI_RESULT (t
)))
3714 /* For a phi node, assign all the arguments to
3716 get_constraint_for (PHI_RESULT (t
), &lhsc
);
3717 for (i
= 0; i
< PHI_NUM_ARGS (t
); i
++)
3720 tree strippedrhs
= PHI_ARG_DEF (t
, i
);
3722 STRIP_NOPS (strippedrhs
);
3723 rhstype
= TREE_TYPE (strippedrhs
);
3724 get_constraint_for (PHI_ARG_DEF (t
, i
), &rhsc
);
3726 for (j
= 0; VEC_iterate (ce_s
, lhsc
, j
, c
); j
++)
3728 struct constraint_expr
*c2
;
3729 while (VEC_length (ce_s
, rhsc
) > 0)
3731 c2
= VEC_last (ce_s
, rhsc
);
3732 process_constraint (new_constraint (*c
, *c2
));
3733 VEC_pop (ce_s
, rhsc
);
3739 /* In IPA mode, we need to generate constraints to pass call
3740 arguments through their calls. There are two cases, either a
3741 GIMPLE_MODIFY_STMT when we are returning a value, or just a plain
3742 CALL_EXPR when we are not.
3744 In non-ipa mode, we need to generate constraints for each
3745 pointer passed by address. */
3746 else if (((TREE_CODE (t
) == GIMPLE_MODIFY_STMT
3747 && TREE_CODE (GIMPLE_STMT_OPERAND (t
, 1)) == CALL_EXPR
3748 && !(call_expr_flags (GIMPLE_STMT_OPERAND (t
, 1))
3749 & (ECF_MALLOC
| ECF_MAY_BE_ALLOCA
)))
3750 || (TREE_CODE (t
) == CALL_EXPR
3751 && !(call_expr_flags (t
)
3752 & (ECF_MALLOC
| ECF_MAY_BE_ALLOCA
)))))
3756 if (TREE_CODE (t
) == GIMPLE_MODIFY_STMT
)
3758 handle_rhs_call (GIMPLE_STMT_OPERAND (t
, 1));
3759 if (POINTER_TYPE_P (TREE_TYPE (GIMPLE_STMT_OPERAND (t
, 1))))
3760 handle_lhs_call (GIMPLE_STMT_OPERAND (t
, 0));
3763 handle_rhs_call (t
);
3770 call_expr_arg_iterator iter
;
3774 if (TREE_CODE (t
) == GIMPLE_MODIFY_STMT
)
3776 lhsop
= GIMPLE_STMT_OPERAND (t
, 0);
3777 rhsop
= GIMPLE_STMT_OPERAND (t
, 1);
3784 decl
= get_callee_fndecl (rhsop
);
3786 /* If we can directly resolve the function being called, do so.
3787 Otherwise, it must be some sort of indirect expression that
3788 we should still be able to handle. */
3791 fi
= get_vi_for_tree (decl
);
3795 decl
= CALL_EXPR_FN (rhsop
);
3796 fi
= get_vi_for_tree (decl
);
3799 /* Assign all the passed arguments to the appropriate incoming
3800 parameters of the function. */
3802 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, rhsop
)
3804 struct constraint_expr lhs
;
3805 struct constraint_expr
*rhsp
;
3807 get_constraint_for (arg
, &rhsc
);
3808 if (TREE_CODE (decl
) != FUNCTION_DECL
)
3817 lhs
.var
= first_vi_for_offset (fi
, i
)->id
;
3820 while (VEC_length (ce_s
, rhsc
) != 0)
3822 rhsp
= VEC_last (ce_s
, rhsc
);
3823 process_constraint (new_constraint (lhs
, *rhsp
));
3824 VEC_pop (ce_s
, rhsc
);
3829 /* If we are returning a value, assign it to the result. */
3832 struct constraint_expr rhs
;
3833 struct constraint_expr
*lhsp
;
3836 get_constraint_for (lhsop
, &lhsc
);
3837 if (TREE_CODE (decl
) != FUNCTION_DECL
)
3846 rhs
.var
= first_vi_for_offset (fi
, i
)->id
;
3849 for (j
= 0; VEC_iterate (ce_s
, lhsc
, j
, lhsp
); j
++)
3850 process_constraint (new_constraint (*lhsp
, rhs
));
3854 /* Otherwise, just a regular assignment statement. */
3855 else if (TREE_CODE (t
) == GIMPLE_MODIFY_STMT
)
3857 tree lhsop
= GIMPLE_STMT_OPERAND (t
, 0);
3858 tree rhsop
= GIMPLE_STMT_OPERAND (t
, 1);
3861 if ((AGGREGATE_TYPE_P (TREE_TYPE (lhsop
))
3862 || TREE_CODE (TREE_TYPE (lhsop
)) == COMPLEX_TYPE
)
3863 && (AGGREGATE_TYPE_P (TREE_TYPE (rhsop
))
3864 || TREE_CODE (TREE_TYPE (lhsop
)) == COMPLEX_TYPE
))
3866 do_structure_copy (lhsop
, rhsop
);
3870 /* Only care about operations with pointers, structures
3871 containing pointers, dereferences, and call expressions. */
3872 if (could_have_pointers (lhsop
)
3873 || TREE_CODE (rhsop
) == CALL_EXPR
)
3875 get_constraint_for (lhsop
, &lhsc
);
3876 switch (TREE_CODE_CLASS (TREE_CODE (rhsop
)))
3878 /* RHS that consist of unary operations,
3879 exceptional types, or bare decls/constants, get
3880 handled directly by get_constraint_for. */
3882 case tcc_declaration
:
3884 case tcc_exceptional
:
3885 case tcc_expression
:
3891 get_constraint_for (rhsop
, &rhsc
);
3892 for (j
= 0; VEC_iterate (ce_s
, lhsc
, j
, c
); j
++)
3894 struct constraint_expr
*c2
;
3897 for (k
= 0; VEC_iterate (ce_s
, rhsc
, k
, c2
); k
++)
3898 process_constraint (new_constraint (*c
, *c2
));
3906 /* For pointer arithmetic of the form
3907 PTR + CST, we can simply use PTR's
3908 constraint because pointer arithmetic is
3909 not allowed to go out of bounds. */
3910 if (handle_ptr_arith (lhsc
, rhsop
))
3915 /* Otherwise, walk each operand. Notice that we
3916 can't use the operand interface because we need
3917 to process expressions other than simple operands
3918 (e.g. INDIRECT_REF, ADDR_EXPR, CALL_EXPR). */
3920 for (i
= 0; i
< TREE_OPERAND_LENGTH (rhsop
); i
++)
3922 tree op
= TREE_OPERAND (rhsop
, i
);
3925 gcc_assert (VEC_length (ce_s
, rhsc
) == 0);
3926 get_constraint_for (op
, &rhsc
);
3927 for (j
= 0; VEC_iterate (ce_s
, lhsc
, j
, c
); j
++)
3929 struct constraint_expr
*c2
;
3930 while (VEC_length (ce_s
, rhsc
) > 0)
3932 c2
= VEC_last (ce_s
, rhsc
);
3933 process_constraint (new_constraint (*c
, *c2
));
3934 VEC_pop (ce_s
, rhsc
);
3942 else if (TREE_CODE (t
) == CHANGE_DYNAMIC_TYPE_EXPR
)
3946 get_constraint_for (CHANGE_DYNAMIC_TYPE_LOCATION (t
), &lhsc
);
3947 for (j
= 0; VEC_iterate (ce_s
, lhsc
, j
, c
); ++j
)
3948 get_varinfo (c
->var
)->no_tbaa_pruning
= true;
3951 /* After promoting variables and computing aliasing we will
3952 need to re-scan most statements. FIXME: Try to minimize the
3953 number of statements re-scanned. It's not really necessary to
3954 re-scan *all* statements. */
3955 mark_stmt_modified (origt
);
3956 VEC_free (ce_s
, heap
, rhsc
);
3957 VEC_free (ce_s
, heap
, lhsc
);
3961 /* Find the first varinfo in the same variable as START that overlaps with
3963 Effectively, walk the chain of fields for the variable START to find the
3964 first field that overlaps with OFFSET.
3965 Return NULL if we can't find one. */
3968 first_vi_for_offset (varinfo_t start
, unsigned HOST_WIDE_INT offset
)
3970 varinfo_t curr
= start
;
3973 /* We may not find a variable in the field list with the actual
3974 offset when when we have glommed a structure to a variable.
3975 In that case, however, offset should still be within the size
3977 if (offset
>= curr
->offset
&& offset
< (curr
->offset
+ curr
->size
))
3985 /* Insert the varinfo FIELD into the field list for BASE, at the front
3989 insert_into_field_list (varinfo_t base
, varinfo_t field
)
3991 varinfo_t prev
= base
;
3992 varinfo_t curr
= base
->next
;
3998 /* Insert the varinfo FIELD into the field list for BASE, ordered by
4002 insert_into_field_list_sorted (varinfo_t base
, varinfo_t field
)
4004 varinfo_t prev
= base
;
4005 varinfo_t curr
= base
->next
;
4016 if (field
->offset
<= curr
->offset
)
4021 field
->next
= prev
->next
;
4026 /* qsort comparison function for two fieldoff's PA and PB */
4029 fieldoff_compare (const void *pa
, const void *pb
)
4031 const fieldoff_s
*foa
= (const fieldoff_s
*)pa
;
4032 const fieldoff_s
*fob
= (const fieldoff_s
*)pb
;
4033 HOST_WIDE_INT foasize
, fobsize
;
4035 if (foa
->offset
!= fob
->offset
)
4036 return foa
->offset
- fob
->offset
;
4038 foasize
= TREE_INT_CST_LOW (foa
->size
);
4039 fobsize
= TREE_INT_CST_LOW (fob
->size
);
4040 return foasize
- fobsize
;
4043 /* Sort a fieldstack according to the field offset and sizes. */
4045 sort_fieldstack (VEC(fieldoff_s
,heap
) *fieldstack
)
4047 qsort (VEC_address (fieldoff_s
, fieldstack
),
4048 VEC_length (fieldoff_s
, fieldstack
),
4049 sizeof (fieldoff_s
),
4053 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all the fields
4054 of TYPE onto fieldstack, recording their offsets along the way.
4055 OFFSET is used to keep track of the offset in this entire structure, rather
4056 than just the immediately containing structure. Returns the number
4058 HAS_UNION is set to true if we find a union type as a field of
4059 TYPE. ADDRESSABLE_TYPE is the type of the outermost object that could have
4060 its address taken. */
4063 push_fields_onto_fieldstack (tree type
, VEC(fieldoff_s
,heap
) **fieldstack
,
4064 HOST_WIDE_INT offset
, bool *has_union
,
4065 tree addressable_type
)
4070 if (TREE_CODE (type
) == COMPLEX_TYPE
)
4072 fieldoff_s
*real_part
, *img_part
;
4073 real_part
= VEC_safe_push (fieldoff_s
, heap
, *fieldstack
, NULL
);
4074 real_part
->type
= TREE_TYPE (type
);
4075 real_part
->size
= TYPE_SIZE (TREE_TYPE (type
));
4076 real_part
->offset
= offset
;
4077 real_part
->decl
= NULL_TREE
;
4078 real_part
->alias_set
= -1;
4080 img_part
= VEC_safe_push (fieldoff_s
, heap
, *fieldstack
, NULL
);
4081 img_part
->type
= TREE_TYPE (type
);
4082 img_part
->size
= TYPE_SIZE (TREE_TYPE (type
));
4083 img_part
->offset
= offset
+ TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (type
)));
4084 img_part
->decl
= NULL_TREE
;
4085 img_part
->alias_set
= -1;
4090 if (TREE_CODE (type
) == ARRAY_TYPE
)
4092 tree sz
= TYPE_SIZE (type
);
4093 tree elsz
= TYPE_SIZE (TREE_TYPE (type
));
4098 || ! host_integerp (sz
, 1)
4099 || TREE_INT_CST_LOW (sz
) == 0
4101 || ! host_integerp (elsz
, 1)
4102 || TREE_INT_CST_LOW (elsz
) == 0)
4105 nr
= TREE_INT_CST_LOW (sz
) / TREE_INT_CST_LOW (elsz
);
4106 if (nr
> SALIAS_MAX_ARRAY_ELEMENTS
)
4109 for (i
= 0; i
< nr
; ++i
)
4115 && (TREE_CODE (TREE_TYPE (type
)) == QUAL_UNION_TYPE
4116 || TREE_CODE (TREE_TYPE (type
)) == UNION_TYPE
))
4119 if (!AGGREGATE_TYPE_P (TREE_TYPE (type
))) /* var_can_have_subvars */
4121 else if (!(pushed
= push_fields_onto_fieldstack
4122 (TREE_TYPE (type
), fieldstack
,
4123 offset
+ i
* TREE_INT_CST_LOW (elsz
), has_union
,
4124 (TYPE_NONALIASED_COMPONENT (type
)
4126 : TREE_TYPE (type
)))))
4127 /* Empty structures may have actual size, like in C++. So
4128 see if we didn't push any subfields and the size is
4129 nonzero, push the field onto the stack */
4136 pair
= VEC_safe_push (fieldoff_s
, heap
, *fieldstack
, NULL
);
4137 pair
->type
= TREE_TYPE (type
);
4139 pair
->decl
= NULL_TREE
;
4140 pair
->offset
= offset
+ i
* TREE_INT_CST_LOW (elsz
);
4141 if (TYPE_NONALIASED_COMPONENT (type
))
4142 pair
->alias_set
= get_alias_set (addressable_type
);
4144 pair
->alias_set
= -1;
4146 /* If the base offset is positive, this field belongs to
4147 a structure nested inside the base structure. */
4149 pair
->in_nested_struct
= true;
4160 for (field
= TYPE_FIELDS (type
); field
; field
= TREE_CHAIN (field
))
4161 if (TREE_CODE (field
) == FIELD_DECL
)
4167 && (TREE_CODE (TREE_TYPE (field
)) == QUAL_UNION_TYPE
4168 || TREE_CODE (TREE_TYPE (field
)) == UNION_TYPE
))
4171 if (!var_can_have_subvars (field
))
4173 else if (!(pushed
= push_fields_onto_fieldstack
4174 (TREE_TYPE (field
), fieldstack
,
4175 offset
+ bitpos_of_field (field
), has_union
,
4176 (DECL_NONADDRESSABLE_P (field
)
4178 : TREE_TYPE (field
))))
4179 && DECL_SIZE (field
)
4180 && !integer_zerop (DECL_SIZE (field
)))
4181 /* Empty structures may have actual size, like in C++. So
4182 see if we didn't push any subfields and the size is
4183 nonzero, push the field onto the stack */
4190 pair
= VEC_safe_push (fieldoff_s
, heap
, *fieldstack
, NULL
);
4191 pair
->type
= TREE_TYPE (field
);
4192 pair
->size
= DECL_SIZE (field
);
4194 pair
->offset
= offset
+ bitpos_of_field (field
);
4195 if (DECL_NONADDRESSABLE_P (field
))
4196 pair
->alias_set
= get_alias_set (addressable_type
);
4198 pair
->alias_set
= -1;
4200 /* If the base offset is positive, this field belongs to
4201 a structure nested inside the base structure. */
4203 pair
->in_nested_struct
= true;
4214 /* Create a constraint from ANYTHING variable to VI. */
4216 make_constraint_from_anything (varinfo_t vi
)
4218 struct constraint_expr lhs
, rhs
;
4224 rhs
.var
= anything_id
;
4226 rhs
.type
= ADDRESSOF
;
4227 process_constraint (new_constraint (lhs
, rhs
));
4230 /* Count the number of arguments DECL has, and set IS_VARARGS to true
4231 if it is a varargs function. */
4234 count_num_arguments (tree decl
, bool *is_varargs
)
4239 for (t
= TYPE_ARG_TYPES (TREE_TYPE (decl
));
4243 if (TREE_VALUE (t
) == void_type_node
)
4253 /* Creation function node for DECL, using NAME, and return the index
4254 of the variable we've created for the function. */
4257 create_function_info_for (tree decl
, const char *name
)
4259 unsigned int index
= VEC_length (varinfo_t
, varmap
);
4263 bool is_varargs
= false;
4265 /* Create the variable info. */
4267 vi
= new_var_info (decl
, index
, name
);
4272 vi
->fullsize
= count_num_arguments (decl
, &is_varargs
) + 1;
4273 insert_vi_for_tree (vi
->decl
, vi
);
4274 VEC_safe_push (varinfo_t
, heap
, varmap
, vi
);
4278 /* If it's varargs, we don't know how many arguments it has, so we
4285 vi
->is_unknown_size_var
= true;
4290 arg
= DECL_ARGUMENTS (decl
);
4292 /* Set up variables for each argument. */
4293 for (i
= 1; i
< vi
->fullsize
; i
++)
4296 const char *newname
;
4298 unsigned int newindex
;
4299 tree argdecl
= decl
;
4304 newindex
= VEC_length (varinfo_t
, varmap
);
4305 asprintf (&tempname
, "%s.arg%d", name
, i
-1);
4306 newname
= ggc_strdup (tempname
);
4309 argvi
= new_var_info (argdecl
, newindex
, newname
);
4310 argvi
->decl
= argdecl
;
4311 VEC_safe_push (varinfo_t
, heap
, varmap
, argvi
);
4314 argvi
->fullsize
= vi
->fullsize
;
4315 argvi
->has_union
= false;
4316 insert_into_field_list_sorted (vi
, argvi
);
4317 stats
.total_vars
++;
4320 insert_vi_for_tree (arg
, argvi
);
4321 arg
= TREE_CHAIN (arg
);
4325 /* Create a variable for the return var. */
4326 if (DECL_RESULT (decl
) != NULL
4327 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
4330 const char *newname
;
4332 unsigned int newindex
;
4333 tree resultdecl
= decl
;
4337 if (DECL_RESULT (decl
))
4338 resultdecl
= DECL_RESULT (decl
);
4340 newindex
= VEC_length (varinfo_t
, varmap
);
4341 asprintf (&tempname
, "%s.result", name
);
4342 newname
= ggc_strdup (tempname
);
4345 resultvi
= new_var_info (resultdecl
, newindex
, newname
);
4346 resultvi
->decl
= resultdecl
;
4347 VEC_safe_push (varinfo_t
, heap
, varmap
, resultvi
);
4348 resultvi
->offset
= i
;
4350 resultvi
->fullsize
= vi
->fullsize
;
4351 resultvi
->has_union
= false;
4352 insert_into_field_list_sorted (vi
, resultvi
);
4353 stats
.total_vars
++;
4354 if (DECL_RESULT (decl
))
4355 insert_vi_for_tree (DECL_RESULT (decl
), resultvi
);
4361 /* Return true if FIELDSTACK contains fields that overlap.
4362 FIELDSTACK is assumed to be sorted by offset. */
4365 check_for_overlaps (VEC (fieldoff_s
,heap
) *fieldstack
)
4367 fieldoff_s
*fo
= NULL
;
4369 HOST_WIDE_INT lastoffset
= -1;
4371 for (i
= 0; VEC_iterate (fieldoff_s
, fieldstack
, i
, fo
); i
++)
4373 if (fo
->offset
== lastoffset
)
4375 lastoffset
= fo
->offset
;
4380 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
4381 This will also create any varinfo structures necessary for fields
4385 create_variable_info_for (tree decl
, const char *name
)
4387 unsigned int index
= VEC_length (varinfo_t
, varmap
);
4389 tree
decltype = TREE_TYPE (decl
);
4390 tree declsize
= DECL_P (decl
) ? DECL_SIZE (decl
) : TYPE_SIZE (decltype);
4391 bool notokay
= false;
4393 bool is_global
= DECL_P (decl
) ? is_global_var (decl
) : false;
4394 VEC (fieldoff_s
,heap
) *fieldstack
= NULL
;
4396 if (TREE_CODE (decl
) == FUNCTION_DECL
&& in_ipa_mode
)
4397 return create_function_info_for (decl
, name
);
4399 hasunion
= TREE_CODE (decltype) == UNION_TYPE
4400 || TREE_CODE (decltype) == QUAL_UNION_TYPE
;
4401 if (var_can_have_subvars (decl
) && use_field_sensitive
&& !hasunion
)
4403 push_fields_onto_fieldstack (decltype, &fieldstack
, 0, &hasunion
,
4407 VEC_free (fieldoff_s
, heap
, fieldstack
);
4413 /* If the variable doesn't have subvars, we may end up needing to
4414 sort the field list and create fake variables for all the
4416 vi
= new_var_info (decl
, index
, name
);
4419 vi
->has_union
= hasunion
;
4421 || TREE_CODE (declsize
) != INTEGER_CST
4422 || TREE_CODE (decltype) == UNION_TYPE
4423 || TREE_CODE (decltype) == QUAL_UNION_TYPE
)
4425 vi
->is_unknown_size_var
= true;
4431 vi
->fullsize
= TREE_INT_CST_LOW (declsize
);
4432 vi
->size
= vi
->fullsize
;
4435 insert_vi_for_tree (vi
->decl
, vi
);
4436 VEC_safe_push (varinfo_t
, heap
, varmap
, vi
);
4437 if (is_global
&& (!flag_whole_program
|| !in_ipa_mode
))
4438 make_constraint_from_anything (vi
);
4441 if (use_field_sensitive
4443 && !vi
->is_unknown_size_var
4444 && var_can_have_subvars (decl
)
4445 && VEC_length (fieldoff_s
, fieldstack
) <= MAX_FIELDS_FOR_FIELD_SENSITIVE
)
4447 unsigned int newindex
= VEC_length (varinfo_t
, varmap
);
4448 fieldoff_s
*fo
= NULL
;
4451 for (i
= 0; !notokay
&& VEC_iterate (fieldoff_s
, fieldstack
, i
, fo
); i
++)
4454 || TREE_CODE (fo
->size
) != INTEGER_CST
4462 /* We can't sort them if we have a field with a variable sized type,
4463 which will make notokay = true. In that case, we are going to return
4464 without creating varinfos for the fields anyway, so sorting them is a
4468 sort_fieldstack (fieldstack
);
4469 /* Due to some C++ FE issues, like PR 22488, we might end up
4470 what appear to be overlapping fields even though they,
4471 in reality, do not overlap. Until the C++ FE is fixed,
4472 we will simply disable field-sensitivity for these cases. */
4473 notokay
= check_for_overlaps (fieldstack
);
4477 if (VEC_length (fieldoff_s
, fieldstack
) != 0)
4478 fo
= VEC_index (fieldoff_s
, fieldstack
, 0);
4480 if (fo
== NULL
|| notokay
)
4482 vi
->is_unknown_size_var
= 1;
4485 VEC_free (fieldoff_s
, heap
, fieldstack
);
4489 vi
->size
= TREE_INT_CST_LOW (fo
->size
);
4490 vi
->offset
= fo
->offset
;
4491 for (i
= VEC_length (fieldoff_s
, fieldstack
) - 1;
4492 i
>= 1 && VEC_iterate (fieldoff_s
, fieldstack
, i
, fo
);
4496 const char *newname
= "NULL";
4499 newindex
= VEC_length (varinfo_t
, varmap
);
4503 asprintf (&tempname
, "%s.%s",
4504 vi
->name
, alias_get_name (fo
->decl
));
4506 asprintf (&tempname
, "%s." HOST_WIDE_INT_PRINT_DEC
,
4507 vi
->name
, fo
->offset
);
4508 newname
= ggc_strdup (tempname
);
4511 newvi
= new_var_info (decl
, newindex
, newname
);
4512 newvi
->offset
= fo
->offset
;
4513 newvi
->size
= TREE_INT_CST_LOW (fo
->size
);
4514 newvi
->fullsize
= vi
->fullsize
;
4515 newvi
->in_nested_struct
= fo
->in_nested_struct
;
4516 insert_into_field_list (vi
, newvi
);
4517 VEC_safe_push (varinfo_t
, heap
, varmap
, newvi
);
4518 if (is_global
&& (!flag_whole_program
|| !in_ipa_mode
))
4519 make_constraint_from_anything (newvi
);
4525 VEC_free (fieldoff_s
, heap
, fieldstack
);
4530 /* Print out the points-to solution for VAR to FILE. */
4533 dump_solution_for_var (FILE *file
, unsigned int var
)
4535 varinfo_t vi
= get_varinfo (var
);
4539 if (find (var
) != var
)
4541 varinfo_t vipt
= get_varinfo (find (var
));
4542 fprintf (file
, "%s = same as %s\n", vi
->name
, vipt
->name
);
4546 fprintf (file
, "%s = { ", vi
->name
);
4547 EXECUTE_IF_SET_IN_BITMAP (vi
->solution
, 0, i
, bi
)
4549 fprintf (file
, "%s ", get_varinfo (i
)->name
);
4551 fprintf (file
, "}");
4552 if (vi
->no_tbaa_pruning
)
4553 fprintf (file
, " no-tbaa-pruning");
4554 fprintf (file
, "\n");
4558 /* Print the points-to solution for VAR to stdout. */
4561 debug_solution_for_var (unsigned int var
)
4563 dump_solution_for_var (stdout
, var
);
4566 /* Create varinfo structures for all of the variables in the
4567 function for intraprocedural mode. */
4570 intra_create_variable_infos (void)
4573 struct constraint_expr lhs
, rhs
;
4575 /* For each incoming pointer argument arg, create the constraint ARG
4576 = ANYTHING or a dummy variable if flag_argument_noalias is set. */
4577 for (t
= DECL_ARGUMENTS (current_function_decl
); t
; t
= TREE_CHAIN (t
))
4581 if (!could_have_pointers (t
))
4584 /* If flag_argument_noalias is set, then function pointer
4585 arguments are guaranteed not to point to each other. In that
4586 case, create an artificial variable PARM_NOALIAS and the
4587 constraint ARG = &PARM_NOALIAS. */
4588 if (POINTER_TYPE_P (TREE_TYPE (t
)) && flag_argument_noalias
> 0)
4591 tree heapvar
= heapvar_lookup (t
);
4595 lhs
.var
= get_vi_for_tree (t
)->id
;
4597 if (heapvar
== NULL_TREE
)
4600 heapvar
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (t
)),
4602 DECL_EXTERNAL (heapvar
) = 1;
4603 if (gimple_referenced_vars (cfun
))
4604 add_referenced_var (heapvar
);
4606 heapvar_insert (t
, heapvar
);
4608 ann
= get_var_ann (heapvar
);
4609 if (flag_argument_noalias
== 1)
4610 ann
->noalias_state
= NO_ALIAS
;
4611 else if (flag_argument_noalias
== 2)
4612 ann
->noalias_state
= NO_ALIAS_GLOBAL
;
4613 else if (flag_argument_noalias
== 3)
4614 ann
->noalias_state
= NO_ALIAS_ANYTHING
;
4619 vi
= get_vi_for_tree (heapvar
);
4620 vi
->is_artificial_var
= 1;
4621 vi
->is_heap_var
= 1;
4623 rhs
.type
= ADDRESSOF
;
4625 for (p
= get_varinfo (lhs
.var
); p
; p
= p
->next
)
4627 struct constraint_expr temp
= lhs
;
4629 process_constraint (new_constraint (temp
, rhs
));
4634 varinfo_t arg_vi
= get_vi_for_tree (t
);
4636 for (p
= arg_vi
; p
; p
= p
->next
)
4637 make_constraint_from_anything (p
);
4642 /* Structure used to put solution bitmaps in a hashtable so they can
4643 be shared among variables with the same points-to set. */
4645 typedef struct shared_bitmap_info
4649 } *shared_bitmap_info_t
;
4650 typedef const struct shared_bitmap_info
*const_shared_bitmap_info_t
;
4652 static htab_t shared_bitmap_table
;
4654 /* Hash function for a shared_bitmap_info_t */
4657 shared_bitmap_hash (const void *p
)
4659 const_shared_bitmap_info_t
const bi
= (const_shared_bitmap_info_t
) p
;
4660 return bi
->hashcode
;
4663 /* Equality function for two shared_bitmap_info_t's. */
4666 shared_bitmap_eq (const void *p1
, const void *p2
)
4668 const_shared_bitmap_info_t
const sbi1
= (const_shared_bitmap_info_t
) p1
;
4669 const_shared_bitmap_info_t
const sbi2
= (const_shared_bitmap_info_t
) p2
;
4670 return bitmap_equal_p (sbi1
->pt_vars
, sbi2
->pt_vars
);
4673 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
4674 existing instance if there is one, NULL otherwise. */
4677 shared_bitmap_lookup (bitmap pt_vars
)
4680 struct shared_bitmap_info sbi
;
4682 sbi
.pt_vars
= pt_vars
;
4683 sbi
.hashcode
= bitmap_hash (pt_vars
);
4685 slot
= htab_find_slot_with_hash (shared_bitmap_table
, &sbi
,
4686 sbi
.hashcode
, NO_INSERT
);
4690 return ((shared_bitmap_info_t
) *slot
)->pt_vars
;
4694 /* Add a bitmap to the shared bitmap hashtable. */
4697 shared_bitmap_add (bitmap pt_vars
)
4700 shared_bitmap_info_t sbi
= XNEW (struct shared_bitmap_info
);
4702 sbi
->pt_vars
= pt_vars
;
4703 sbi
->hashcode
= bitmap_hash (pt_vars
);
4705 slot
= htab_find_slot_with_hash (shared_bitmap_table
, sbi
,
4706 sbi
->hashcode
, INSERT
);
4707 gcc_assert (!*slot
);
4708 *slot
= (void *) sbi
;
4712 /* Set bits in INTO corresponding to the variable uids in solution set
4713 FROM, which came from variable PTR.
4714 For variables that are actually dereferenced, we also use type
4715 based alias analysis to prune the points-to sets.
4716 IS_DEREFED is true if PTR was directly dereferenced, which we use to
4717 help determine whether we are we are allowed to prune using TBAA.
4718 If NO_TBAA_PRUNING is true, we do not perform any TBAA pruning of
4722 set_uids_in_ptset (tree ptr
, bitmap into
, bitmap from
, bool is_derefed
,
4723 bool no_tbaa_pruning
)
4727 alias_set_type ptr_alias_set
= get_alias_set (TREE_TYPE (ptr
));
4729 EXECUTE_IF_SET_IN_BITMAP (from
, 0, i
, bi
)
4731 varinfo_t vi
= get_varinfo (i
);
4732 alias_set_type var_alias_set
;
4734 /* The only artificial variables that are allowed in a may-alias
4735 set are heap variables. */
4736 if (vi
->is_artificial_var
&& !vi
->is_heap_var
)
4739 if (vi
->has_union
&& get_subvars_for_var (vi
->decl
) != NULL
)
4743 subvar_t sv
= get_subvars_for_var (vi
->decl
);
4745 /* Variables containing unions may need to be converted to
4746 their SFT's, because SFT's can have unions and we cannot. */
4747 for (i
= 0; VEC_iterate (tree
, sv
, i
, subvar
); ++i
)
4748 bitmap_set_bit (into
, DECL_UID (subvar
));
4750 else if (TREE_CODE (vi
->decl
) == VAR_DECL
4751 || TREE_CODE (vi
->decl
) == PARM_DECL
4752 || TREE_CODE (vi
->decl
) == RESULT_DECL
)
4754 if (var_can_have_subvars (vi
->decl
)
4755 && get_subvars_for_var (vi
->decl
))
4757 /* If VI->DECL is an aggregate for which we created
4758 SFTs, add the SFT corresponding to VI->OFFSET. */
4759 tree sft
= get_subvar_at (vi
->decl
, vi
->offset
);
4763 var_alias_set
= get_alias_set (sft
);
4765 || (!is_derefed
&& !vi
->directly_dereferenced
)
4766 || alias_sets_conflict_p (ptr_alias_set
, var_alias_set
))
4767 bitmap_set_bit (into
, DECL_UID (sft
));
4768 SFT_IN_NESTED_STRUCT (sft
) = vi
->in_nested_struct
;
4773 /* Otherwise, just add VI->DECL to the alias set.
4774 Don't type prune artificial vars. */
4775 if (vi
->is_artificial_var
)
4776 bitmap_set_bit (into
, DECL_UID (vi
->decl
));
4779 var_alias_set
= get_alias_set (vi
->decl
);
4781 || (!is_derefed
&& !vi
->directly_dereferenced
)
4782 || alias_sets_conflict_p (ptr_alias_set
, var_alias_set
))
4783 bitmap_set_bit (into
, DECL_UID (vi
->decl
));
4791 static bool have_alias_info
= false;
4793 /* The list of SMT's that are in use by our pointer variables. This
4794 is the set of SMT's for all pointers that can point to anything. */
4795 static bitmap used_smts
;
4797 /* Due to the ordering of points-to set calculation and SMT
4798 calculation being a bit co-dependent, we can't just calculate SMT
4799 used info whenever we want, we have to calculate it around the time
4800 that find_what_p_points_to is called. */
4802 /* Mark which SMT's are in use by points-to anything variables. */
4805 set_used_smts (void)
4809 used_smts
= BITMAP_ALLOC (&pta_obstack
);
4811 for (i
= 0; VEC_iterate (varinfo_t
, varmap
, i
, vi
); i
++)
4813 tree var
= vi
->decl
;
4814 varinfo_t withsolution
= get_varinfo (find (i
));
4817 struct ptr_info_def
*pi
= NULL
;
4819 /* For parm decls, the pointer info may be under the default
4821 if (TREE_CODE (vi
->decl
) == PARM_DECL
4822 && gimple_default_def (cfun
, var
))
4823 pi
= SSA_NAME_PTR_INFO (gimple_default_def (cfun
, var
));
4824 else if (TREE_CODE (var
) == SSA_NAME
)
4825 pi
= SSA_NAME_PTR_INFO (var
);
4827 /* Skip the special variables and those that can't be aliased. */
4828 if (vi
->is_special_var
4830 || (pi
&& !pi
->is_dereferenced
)
4831 || (TREE_CODE (var
) == VAR_DECL
&& !may_be_aliased (var
))
4832 || !POINTER_TYPE_P (TREE_TYPE (var
)))
4835 if (TREE_CODE (var
) == SSA_NAME
)
4836 var
= SSA_NAME_VAR (var
);
4842 smt
= va
->symbol_mem_tag
;
4843 if (smt
&& bitmap_bit_p (withsolution
->solution
, anything_id
))
4844 bitmap_set_bit (used_smts
, DECL_UID (smt
));
4848 /* Merge the necessary SMT's into the bitmap INTO, which is
4849 P's varinfo. This involves merging all SMT's that are a subset of
4850 the SMT necessary for P. */
4853 merge_smts_into (tree p
, bitmap solution
)
4861 if (TREE_CODE (p
) == SSA_NAME
)
4862 var
= SSA_NAME_VAR (p
);
4864 smt
= var_ann (var
)->symbol_mem_tag
;
4867 alias_set_type smtset
= get_alias_set (TREE_TYPE (smt
));
4869 /* Need to set the SMT subsets first before this
4870 will work properly. */
4871 bitmap_set_bit (solution
, DECL_UID (smt
));
4872 EXECUTE_IF_SET_IN_BITMAP (used_smts
, 0, i
, bi
)
4874 tree newsmt
= referenced_var (i
);
4875 tree newsmttype
= TREE_TYPE (newsmt
);
4877 if (alias_set_subset_of (get_alias_set (newsmttype
),
4879 bitmap_set_bit (solution
, i
);
4882 aliases
= MTAG_ALIASES (smt
);
4884 bitmap_ior_into (solution
, aliases
);
4888 /* Given a pointer variable P, fill in its points-to set, or return
4890 Rather than return false for variables that point-to anything, we
4891 instead find the corresponding SMT, and merge in its aliases. In
4892 addition to these aliases, we also set the bits for the SMT's
4893 themselves and their subsets, as SMT's are still in use by
4894 non-SSA_NAME's, and pruning may eliminate every one of their
4895 aliases. In such a case, if we did not include the right set of
4896 SMT's in the points-to set of the variable, we'd end up with
4897 statements that do not conflict but should. */
4900 find_what_p_points_to (tree p
)
4905 if (!have_alias_info
)
4908 /* For parameters, get at the points-to set for the actual parm
4910 if (TREE_CODE (p
) == SSA_NAME
4911 && TREE_CODE (SSA_NAME_VAR (p
)) == PARM_DECL
4912 && SSA_NAME_IS_DEFAULT_DEF (p
))
4913 lookup_p
= SSA_NAME_VAR (p
);
4915 vi
= lookup_vi_for_tree (lookup_p
);
4918 if (vi
->is_artificial_var
)
4921 /* See if this is a field or a structure. */
4922 if (vi
->size
!= vi
->fullsize
)
4924 /* Nothing currently asks about structure fields directly,
4925 but when they do, we need code here to hand back the
4927 if (!var_can_have_subvars (vi
->decl
)
4928 || get_subvars_for_var (vi
->decl
) == NULL
)
4933 struct ptr_info_def
*pi
= get_ptr_info (p
);
4936 bool was_pt_anything
= false;
4937 bitmap finished_solution
;
4940 if (!pi
->is_dereferenced
)
4943 /* This variable may have been collapsed, let's get the real
4945 vi
= get_varinfo (find (vi
->id
));
4947 /* Translate artificial variables into SSA_NAME_PTR_INFO
4949 EXECUTE_IF_SET_IN_BITMAP (vi
->solution
, 0, i
, bi
)
4951 varinfo_t vi
= get_varinfo (i
);
4953 if (vi
->is_artificial_var
)
4955 /* FIXME. READONLY should be handled better so that
4956 flow insensitive aliasing can disregard writable
4958 if (vi
->id
== nothing_id
)
4960 else if (vi
->id
== anything_id
)
4961 was_pt_anything
= 1;
4962 else if (vi
->id
== readonly_id
)
4963 was_pt_anything
= 1;
4964 else if (vi
->id
== integer_id
)
4965 was_pt_anything
= 1;
4966 else if (vi
->is_heap_var
)
4967 pi
->pt_global_mem
= 1;
4971 /* Share the final set of variables when possible. */
4972 finished_solution
= BITMAP_GGC_ALLOC ();
4973 stats
.points_to_sets_created
++;
4975 /* Instead of using pt_anything, we merge in the SMT aliases
4976 for the underlying SMT. In addition, if they could have
4977 pointed to anything, they could point to global memory.
4978 But we cannot do that for ref-all pointers because these
4979 aliases have not been computed yet. */
4980 if (was_pt_anything
)
4982 if (PTR_IS_REF_ALL (p
))
4984 pi
->pt_anything
= 1;
4988 merge_smts_into (p
, finished_solution
);
4989 pi
->pt_global_mem
= 1;
4992 set_uids_in_ptset (vi
->decl
, finished_solution
, vi
->solution
,
4993 vi
->directly_dereferenced
,
4994 vi
->no_tbaa_pruning
);
4995 result
= shared_bitmap_lookup (finished_solution
);
4999 shared_bitmap_add (finished_solution
);
5000 pi
->pt_vars
= finished_solution
;
5004 pi
->pt_vars
= result
;
5005 bitmap_clear (finished_solution
);
5008 if (bitmap_empty_p (pi
->pt_vars
))
5020 /* Dump points-to information to OUTFILE. */
5023 dump_sa_points_to_info (FILE *outfile
)
5027 fprintf (outfile
, "\nPoints-to sets\n\n");
5029 if (dump_flags
& TDF_STATS
)
5031 fprintf (outfile
, "Stats:\n");
5032 fprintf (outfile
, "Total vars: %d\n", stats
.total_vars
);
5033 fprintf (outfile
, "Non-pointer vars: %d\n",
5034 stats
.nonpointer_vars
);
5035 fprintf (outfile
, "Statically unified vars: %d\n",
5036 stats
.unified_vars_static
);
5037 fprintf (outfile
, "Dynamically unified vars: %d\n",
5038 stats
.unified_vars_dynamic
);
5039 fprintf (outfile
, "Iterations: %d\n", stats
.iterations
);
5040 fprintf (outfile
, "Number of edges: %d\n", stats
.num_edges
);
5041 fprintf (outfile
, "Number of implicit edges: %d\n",
5042 stats
.num_implicit_edges
);
5045 for (i
= 0; i
< VEC_length (varinfo_t
, varmap
); i
++)
5046 dump_solution_for_var (outfile
, i
);
5050 /* Debug points-to information to stderr. */
5053 debug_sa_points_to_info (void)
5055 dump_sa_points_to_info (stderr
);
5059 /* Initialize the always-existing constraint variables for NULL
5060 ANYTHING, READONLY, and INTEGER */
5063 init_base_vars (void)
5065 struct constraint_expr lhs
, rhs
;
5067 /* Create the NULL variable, used to represent that a variable points
5069 nothing_tree
= create_tmp_var_raw (void_type_node
, "NULL");
5070 var_nothing
= new_var_info (nothing_tree
, 0, "NULL");
5071 insert_vi_for_tree (nothing_tree
, var_nothing
);
5072 var_nothing
->is_artificial_var
= 1;
5073 var_nothing
->offset
= 0;
5074 var_nothing
->size
= ~0;
5075 var_nothing
->fullsize
= ~0;
5076 var_nothing
->is_special_var
= 1;
5078 VEC_safe_push (varinfo_t
, heap
, varmap
, var_nothing
);
5080 /* Create the ANYTHING variable, used to represent that a variable
5081 points to some unknown piece of memory. */
5082 anything_tree
= create_tmp_var_raw (void_type_node
, "ANYTHING");
5083 var_anything
= new_var_info (anything_tree
, 1, "ANYTHING");
5084 insert_vi_for_tree (anything_tree
, var_anything
);
5085 var_anything
->is_artificial_var
= 1;
5086 var_anything
->size
= ~0;
5087 var_anything
->offset
= 0;
5088 var_anything
->next
= NULL
;
5089 var_anything
->fullsize
= ~0;
5090 var_anything
->is_special_var
= 1;
5093 /* Anything points to anything. This makes deref constraints just
5094 work in the presence of linked list and other p = *p type loops,
5095 by saying that *ANYTHING = ANYTHING. */
5096 VEC_safe_push (varinfo_t
, heap
, varmap
, var_anything
);
5098 lhs
.var
= anything_id
;
5100 rhs
.type
= ADDRESSOF
;
5101 rhs
.var
= anything_id
;
5104 /* This specifically does not use process_constraint because
5105 process_constraint ignores all anything = anything constraints, since all
5106 but this one are redundant. */
5107 VEC_safe_push (constraint_t
, heap
, constraints
, new_constraint (lhs
, rhs
));
5109 /* Create the READONLY variable, used to represent that a variable
5110 points to readonly memory. */
5111 readonly_tree
= create_tmp_var_raw (void_type_node
, "READONLY");
5112 var_readonly
= new_var_info (readonly_tree
, 2, "READONLY");
5113 var_readonly
->is_artificial_var
= 1;
5114 var_readonly
->offset
= 0;
5115 var_readonly
->size
= ~0;
5116 var_readonly
->fullsize
= ~0;
5117 var_readonly
->next
= NULL
;
5118 var_readonly
->is_special_var
= 1;
5119 insert_vi_for_tree (readonly_tree
, var_readonly
);
5121 VEC_safe_push (varinfo_t
, heap
, varmap
, var_readonly
);
5123 /* readonly memory points to anything, in order to make deref
5124 easier. In reality, it points to anything the particular
5125 readonly variable can point to, but we don't track this
5128 lhs
.var
= readonly_id
;
5130 rhs
.type
= ADDRESSOF
;
5131 rhs
.var
= anything_id
;
5134 process_constraint (new_constraint (lhs
, rhs
));
5136 /* Create the INTEGER variable, used to represent that a variable points
5138 integer_tree
= create_tmp_var_raw (void_type_node
, "INTEGER");
5139 var_integer
= new_var_info (integer_tree
, 3, "INTEGER");
5140 insert_vi_for_tree (integer_tree
, var_integer
);
5141 var_integer
->is_artificial_var
= 1;
5142 var_integer
->size
= ~0;
5143 var_integer
->fullsize
= ~0;
5144 var_integer
->offset
= 0;
5145 var_integer
->next
= NULL
;
5146 var_integer
->is_special_var
= 1;
5148 VEC_safe_push (varinfo_t
, heap
, varmap
, var_integer
);
5150 /* INTEGER = ANYTHING, because we don't know where a dereference of
5151 a random integer will point to. */
5153 lhs
.var
= integer_id
;
5155 rhs
.type
= ADDRESSOF
;
5156 rhs
.var
= anything_id
;
5158 process_constraint (new_constraint (lhs
, rhs
));
5161 /* Initialize things necessary to perform PTA */
5164 init_alias_vars (void)
5166 bitmap_obstack_initialize (&pta_obstack
);
5167 bitmap_obstack_initialize (&oldpta_obstack
);
5168 bitmap_obstack_initialize (&predbitmap_obstack
);
5170 constraint_pool
= create_alloc_pool ("Constraint pool",
5171 sizeof (struct constraint
), 30);
5172 variable_info_pool
= create_alloc_pool ("Variable info pool",
5173 sizeof (struct variable_info
), 30);
5174 constraints
= VEC_alloc (constraint_t
, heap
, 8);
5175 varmap
= VEC_alloc (varinfo_t
, heap
, 8);
5176 vi_for_tree
= pointer_map_create ();
5178 memset (&stats
, 0, sizeof (stats
));
5179 shared_bitmap_table
= htab_create (511, shared_bitmap_hash
,
5180 shared_bitmap_eq
, free
);
5184 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
5185 predecessor edges. */
5188 remove_preds_and_fake_succs (constraint_graph_t graph
)
5192 /* Clear the implicit ref and address nodes from the successor
5194 for (i
= 0; i
< FIRST_REF_NODE
; i
++)
5196 if (graph
->succs
[i
])
5197 bitmap_clear_range (graph
->succs
[i
], FIRST_REF_NODE
,
5198 FIRST_REF_NODE
* 2);
5201 /* Free the successor list for the non-ref nodes. */
5202 for (i
= FIRST_REF_NODE
; i
< graph
->size
; i
++)
5204 if (graph
->succs
[i
])
5205 BITMAP_FREE (graph
->succs
[i
]);
5208 /* Now reallocate the size of the successor list as, and blow away
5209 the predecessor bitmaps. */
5210 graph
->size
= VEC_length (varinfo_t
, varmap
);
5211 graph
->succs
= XRESIZEVEC (bitmap
, graph
->succs
, graph
->size
);
5213 free (graph
->implicit_preds
);
5214 graph
->implicit_preds
= NULL
;
5215 free (graph
->preds
);
5216 graph
->preds
= NULL
;
5217 bitmap_obstack_release (&predbitmap_obstack
);
5220 /* Compute the set of variables we can't TBAA prune. */
5223 compute_tbaa_pruning (void)
5225 unsigned int size
= VEC_length (varinfo_t
, varmap
);
5230 changed
= sbitmap_alloc (size
);
5231 sbitmap_zero (changed
);
5233 /* Mark all initial no_tbaa_pruning nodes as changed. */
5235 for (i
= 0; i
< size
; ++i
)
5237 varinfo_t ivi
= get_varinfo (i
);
5239 if (find (i
) == i
&& ivi
->no_tbaa_pruning
)
5242 if ((graph
->succs
[i
] && !bitmap_empty_p (graph
->succs
[i
]))
5243 || VEC_length (constraint_t
, graph
->complex[i
]) > 0)
5245 SET_BIT (changed
, i
);
5251 while (changed_count
> 0)
5253 struct topo_info
*ti
= init_topo_info ();
5256 compute_topo_order (graph
, ti
);
5258 while (VEC_length (unsigned, ti
->topo_order
) != 0)
5262 i
= VEC_pop (unsigned, ti
->topo_order
);
5264 /* If this variable is not a representative, skip it. */
5268 /* If the node has changed, we need to process the complex
5269 constraints and outgoing edges again. */
5270 if (TEST_BIT (changed
, i
))
5274 VEC(constraint_t
,heap
) *complex = graph
->complex[i
];
5276 RESET_BIT (changed
, i
);
5279 /* Process the complex copy constraints. */
5280 for (j
= 0; VEC_iterate (constraint_t
, complex, j
, c
); ++j
)
5282 if (c
->lhs
.type
== SCALAR
&& c
->rhs
.type
== SCALAR
)
5284 varinfo_t lhsvi
= get_varinfo (find (c
->lhs
.var
));
5286 if (!lhsvi
->no_tbaa_pruning
)
5288 lhsvi
->no_tbaa_pruning
= true;
5289 if (!TEST_BIT (changed
, lhsvi
->id
))
5291 SET_BIT (changed
, lhsvi
->id
);
5298 /* Propagate to all successors. */
5299 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->succs
[i
], 0, j
, bi
)
5301 unsigned int to
= find (j
);
5302 varinfo_t tovi
= get_varinfo (to
);
5304 /* Don't propagate to ourselves. */
5308 if (!tovi
->no_tbaa_pruning
)
5310 tovi
->no_tbaa_pruning
= true;
5311 if (!TEST_BIT (changed
, to
))
5313 SET_BIT (changed
, to
);
5321 free_topo_info (ti
);
5324 sbitmap_free (changed
);
5328 for (i
= 0; i
< size
; ++i
)
5330 varinfo_t ivi
= get_varinfo (i
);
5331 varinfo_t ivip
= get_varinfo (find (i
));
5333 if (ivip
->no_tbaa_pruning
)
5335 tree var
= ivi
->decl
;
5337 if (TREE_CODE (var
) == SSA_NAME
)
5338 var
= SSA_NAME_VAR (var
);
5340 if (POINTER_TYPE_P (TREE_TYPE (var
)))
5342 DECL_NO_TBAA_P (var
) = 1;
5344 /* Tell the RTL layer that this pointer can alias
5346 DECL_POINTER_ALIAS_SET (var
) = 0;
5353 /* Create points-to sets for the current function. See the comments
5354 at the start of the file for an algorithmic overview. */
5357 compute_points_to_sets (struct alias_info
*ai
)
5359 struct scc_info
*si
;
5362 timevar_push (TV_TREE_PTA
);
5365 init_alias_heapvars ();
5367 intra_create_variable_infos ();
5369 /* Now walk all statements and derive aliases. */
5372 block_stmt_iterator bsi
;
5375 for (phi
= phi_nodes (bb
); phi
; phi
= PHI_CHAIN (phi
))
5377 if (is_gimple_reg (PHI_RESULT (phi
)))
5379 find_func_aliases (phi
);
5381 /* Update various related attributes like escaped
5382 addresses, pointer dereferences for loads and stores.
5383 This is used when creating name tags and alias
5385 update_alias_info (phi
, ai
);
5389 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); )
5391 tree stmt
= bsi_stmt (bsi
);
5393 find_func_aliases (stmt
);
5395 /* Update various related attributes like escaped
5396 addresses, pointer dereferences for loads and stores.
5397 This is used when creating name tags and alias
5399 update_alias_info (stmt
, ai
);
5401 /* The information in CHANGE_DYNAMIC_TYPE_EXPR nodes has now
5402 been captured, and we can remove them. */
5403 if (TREE_CODE (stmt
) == CHANGE_DYNAMIC_TYPE_EXPR
)
5404 bsi_remove (&bsi
, true);
5413 fprintf (dump_file
, "Points-to analysis\n\nConstraints:\n\n");
5414 dump_constraints (dump_file
);
5419 "\nCollapsing static cycles and doing variable "
5422 init_graph (VEC_length (varinfo_t
, varmap
) * 2);
5425 fprintf (dump_file
, "Building predecessor graph\n");
5426 build_pred_graph ();
5429 fprintf (dump_file
, "Detecting pointer and location "
5431 si
= perform_var_substitution (graph
);
5434 fprintf (dump_file
, "Rewriting constraints and unifying "
5436 rewrite_constraints (graph
, si
);
5437 free_var_substitution_info (si
);
5439 build_succ_graph ();
5440 move_complex_constraints (graph
);
5443 fprintf (dump_file
, "Uniting pointer but not location equivalent "
5445 unite_pointer_equivalences (graph
);
5448 fprintf (dump_file
, "Finding indirect cycles\n");
5449 find_indirect_cycles (graph
);
5451 /* Implicit nodes and predecessors are no longer necessary at this
5453 remove_preds_and_fake_succs (graph
);
5456 fprintf (dump_file
, "Solving graph\n");
5458 solve_graph (graph
);
5460 compute_tbaa_pruning ();
5463 dump_sa_points_to_info (dump_file
);
5465 have_alias_info
= true;
5467 timevar_pop (TV_TREE_PTA
);
5471 /* Delete created points-to sets. */
5474 delete_points_to_sets (void)
5478 htab_delete (shared_bitmap_table
);
5479 if (dump_file
&& (dump_flags
& TDF_STATS
))
5480 fprintf (dump_file
, "Points to sets created:%d\n",
5481 stats
.points_to_sets_created
);
5483 pointer_map_destroy (vi_for_tree
);
5484 bitmap_obstack_release (&pta_obstack
);
5485 VEC_free (constraint_t
, heap
, constraints
);
5487 for (i
= 0; i
< graph
->size
; i
++)
5488 VEC_free (constraint_t
, heap
, graph
->complex[i
]);
5489 free (graph
->complex);
5492 free (graph
->succs
);
5494 free (graph
->pe_rep
);
5495 free (graph
->indirect_cycles
);
5498 VEC_free (varinfo_t
, heap
, varmap
);
5499 free_alloc_pool (variable_info_pool
);
5500 free_alloc_pool (constraint_pool
);
5501 have_alias_info
= false;
5504 /* Return true if we should execute IPA PTA. */
5508 return (flag_unit_at_a_time
!= 0
5510 /* Don't bother doing anything if the program has errors. */
5511 && !(errorcount
|| sorrycount
));
5514 /* Execute the driver for IPA PTA. */
5516 ipa_pta_execute (void)
5518 struct cgraph_node
*node
;
5519 struct scc_info
*si
;
5522 init_alias_heapvars ();
5525 for (node
= cgraph_nodes
; node
; node
= node
->next
)
5527 if (!node
->analyzed
|| cgraph_is_master_clone (node
))
5531 varid
= create_function_info_for (node
->decl
,
5532 cgraph_node_name (node
));
5533 if (node
->local
.externally_visible
)
5535 varinfo_t fi
= get_varinfo (varid
);
5536 for (; fi
; fi
= fi
->next
)
5537 make_constraint_from_anything (fi
);
5541 for (node
= cgraph_nodes
; node
; node
= node
->next
)
5543 if (node
->analyzed
&& cgraph_is_master_clone (node
))
5545 struct function
*cfun
= DECL_STRUCT_FUNCTION (node
->decl
);
5547 tree old_func_decl
= current_function_decl
;
5550 "Generating constraints for %s\n",
5551 cgraph_node_name (node
));
5553 current_function_decl
= node
->decl
;
5555 FOR_EACH_BB_FN (bb
, cfun
)
5557 block_stmt_iterator bsi
;
5560 for (phi
= phi_nodes (bb
); phi
; phi
= PHI_CHAIN (phi
))
5562 if (is_gimple_reg (PHI_RESULT (phi
)))
5564 find_func_aliases (phi
);
5568 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
5570 tree stmt
= bsi_stmt (bsi
);
5571 find_func_aliases (stmt
);
5574 current_function_decl
= old_func_decl
;
5579 /* Make point to anything. */
5585 fprintf (dump_file
, "Points-to analysis\n\nConstraints:\n\n");
5586 dump_constraints (dump_file
);
5591 "\nCollapsing static cycles and doing variable "
5594 init_graph (VEC_length (varinfo_t
, varmap
) * 2);
5595 build_pred_graph ();
5596 si
= perform_var_substitution (graph
);
5597 rewrite_constraints (graph
, si
);
5598 free_var_substitution_info (si
);
5600 build_succ_graph ();
5601 move_complex_constraints (graph
);
5602 unite_pointer_equivalences (graph
);
5603 find_indirect_cycles (graph
);
5605 /* Implicit nodes and predecessors are no longer necessary at this
5607 remove_preds_and_fake_succs (graph
);
5610 fprintf (dump_file
, "\nSolving graph\n");
5612 solve_graph (graph
);
5615 dump_sa_points_to_info (dump_file
);
5618 delete_alias_heapvars ();
5619 delete_points_to_sets ();
5623 struct tree_opt_pass pass_ipa_pta
=
5626 gate_ipa_pta
, /* gate */
5627 ipa_pta_execute
, /* execute */
5630 0, /* static_pass_number */
5631 TV_IPA_PTA
, /* tv_id */
5632 0, /* properties_required */
5633 0, /* properties_provided */
5634 0, /* properties_destroyed */
5635 0, /* todo_flags_start */
5636 TODO_update_ssa
, /* todo_flags_finish */
5640 /* Initialize the heapvar for statement mapping. */
5642 init_alias_heapvars (void)
5644 if (!heapvar_for_stmt
)
5645 heapvar_for_stmt
= htab_create_ggc (11, tree_map_hash
, tree_map_eq
,
5650 delete_alias_heapvars (void)
5652 htab_delete (heapvar_for_stmt
);
5653 heapvar_for_stmt
= NULL
;
5657 #include "gt-tree-ssa-structalias.h"