1 /* Tree based points-to analysis
2 Copyright (C) 2005, 2006, 2007 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
35 #include "diagnostic.h"
38 #include "tree-flow.h"
39 #include "tree-inline.h"
42 #include "tree-gimple.h"
46 #include "tree-pass.h"
48 #include "alloc-pool.h"
49 #include "splay-tree.h"
51 #include "tree-ssa-structalias.h"
54 #include "pointer-set.h"
56 /* The idea behind this analyzer is to generate set constraints from the
57 program, then solve the resulting constraints in order to generate the
60 Set constraints are a way of modeling program analysis problems that
61 involve sets. They consist of an inclusion constraint language,
62 describing the variables (each variable is a set) and operations that
63 are involved on the variables, and a set of rules that derive facts
64 from these operations. To solve a system of set constraints, you derive
65 all possible facts under the rules, which gives you the correct sets
68 See "Efficient Field-sensitive pointer analysis for C" by "David
69 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
70 http://citeseer.ist.psu.edu/pearce04efficient.html
72 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
73 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
74 http://citeseer.ist.psu.edu/heintze01ultrafast.html
76 There are three types of real constraint expressions, DEREF,
77 ADDRESSOF, and SCALAR. Each constraint expression consists
78 of a constraint type, a variable, and an offset.
80 SCALAR is a constraint expression type used to represent x, whether
81 it appears on the LHS or the RHS of a statement.
82 DEREF is a constraint expression type used to represent *x, whether
83 it appears on the LHS or the RHS of a statement.
84 ADDRESSOF is a constraint expression used to represent &x, whether
85 it appears on the LHS or the RHS of a statement.
87 Each pointer variable in the program is assigned an integer id, and
88 each field of a structure variable is assigned an integer id as well.
90 Structure variables are linked to their list of fields through a "next
91 field" in each variable that points to the next field in offset
93 Each variable for a structure field has
95 1. "size", that tells the size in bits of that field.
96 2. "fullsize, that tells the size in bits of the entire structure.
97 3. "offset", that tells the offset in bits from the beginning of the
98 structure to this field.
110 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
111 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
112 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
115 In order to solve the system of set constraints, the following is
118 1. Each constraint variable x has a solution set associated with it,
121 2. Constraints are separated into direct, copy, and complex.
122 Direct constraints are ADDRESSOF constraints that require no extra
123 processing, such as P = &Q
124 Copy constraints are those of the form P = Q.
125 Complex constraints are all the constraints involving dereferences
126 and offsets (including offsetted copies).
128 3. All direct constraints of the form P = &Q are processed, such
129 that Q is added to Sol(P)
131 4. All complex constraints for a given constraint variable are stored in a
132 linked list attached to that variable's node.
134 5. A directed graph is built out of the copy constraints. Each
135 constraint variable is a node in the graph, and an edge from
136 Q to P is added for each copy constraint of the form P = Q
138 6. The graph is then walked, and solution sets are
139 propagated along the copy edges, such that an edge from Q to P
140 causes Sol(P) <- Sol(P) union Sol(Q).
142 7. As we visit each node, all complex constraints associated with
143 that node are processed by adding appropriate copy edges to the graph, or the
144 appropriate variables to the solution set.
146 8. The process of walking the graph is iterated until no solution
149 Prior to walking the graph in steps 6 and 7, We perform static
150 cycle elimination on the constraint graph, as well
151 as off-line variable substitution.
153 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
154 on and turned into anything), but isn't. You can just see what offset
155 inside the pointed-to struct it's going to access.
157 TODO: Constant bounded arrays can be handled as if they were structs of the
158 same number of elements.
160 TODO: Modeling heap and incoming pointers becomes much better if we
161 add fields to them as we discover them, which we could do.
163 TODO: We could handle unions, but to be honest, it's probably not
164 worth the pain or slowdown. */
166 static GTY ((if_marked ("tree_map_marked_p"), param_is (struct tree_map
)))
167 htab_t heapvar_for_stmt
;
169 static bool use_field_sensitive
= true;
170 static int in_ipa_mode
= 0;
172 /* Used for predecessor bitmaps. */
173 static bitmap_obstack predbitmap_obstack
;
175 /* Used for points-to sets. */
176 static bitmap_obstack pta_obstack
;
178 /* Used for oldsolution members of variables. */
179 static bitmap_obstack oldpta_obstack
;
181 /* Used for per-solver-iteration bitmaps. */
182 static bitmap_obstack iteration_obstack
;
184 static unsigned int create_variable_info_for (tree
, const char *);
185 typedef struct constraint_graph
*constraint_graph_t
;
186 static void unify_nodes (constraint_graph_t
, unsigned int, unsigned int, bool);
188 DEF_VEC_P(constraint_t
);
189 DEF_VEC_ALLOC_P(constraint_t
,heap
);
191 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
193 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
195 static struct constraint_stats
197 unsigned int total_vars
;
198 unsigned int nonpointer_vars
;
199 unsigned int unified_vars_static
;
200 unsigned int unified_vars_dynamic
;
201 unsigned int iterations
;
202 unsigned int num_edges
;
203 unsigned int num_implicit_edges
;
204 unsigned int points_to_sets_created
;
209 /* ID of this variable */
212 /* Name of this variable */
215 /* Tree that this variable is associated with. */
218 /* Offset of this variable, in bits, from the base variable */
219 unsigned HOST_WIDE_INT offset
;
221 /* Size of the variable, in bits. */
222 unsigned HOST_WIDE_INT size
;
224 /* Full size of the base variable, in bits. */
225 unsigned HOST_WIDE_INT fullsize
;
227 /* A link to the variable for the next field in this structure. */
228 struct variable_info
*next
;
230 /* True if the variable is directly the target of a dereference.
231 This is used to track which variables are *actually* dereferenced
232 so we can prune their points to listed. */
233 unsigned int directly_dereferenced
:1;
235 /* True if this is a variable created by the constraint analysis, such as
236 heap variables and constraints we had to break up. */
237 unsigned int is_artificial_var
:1;
239 /* True if this is a special variable whose solution set should not be
241 unsigned int is_special_var
:1;
243 /* True for variables whose size is not known or variable. */
244 unsigned int is_unknown_size_var
:1;
246 /* True for variables that have unions somewhere in them. */
247 unsigned int has_union
:1;
249 /* True if this is a heap variable. */
250 unsigned int is_heap_var
:1;
252 /* True if we may not use TBAA to prune references to this
253 variable. This is used for C++ placement new. */
254 unsigned int no_tbaa_pruning
: 1;
256 /* Points-to set for this variable. */
259 /* Old points-to set for this variable. */
262 /* Variable id this was collapsed to due to type unsafety. This
263 should be unused completely after build_succ_graph, or something
265 struct variable_info
*collapsed_to
;
267 typedef struct variable_info
*varinfo_t
;
269 static varinfo_t
first_vi_for_offset (varinfo_t
, unsigned HOST_WIDE_INT
);
271 /* Pool of variable info structures. */
272 static alloc_pool variable_info_pool
;
274 DEF_VEC_P(varinfo_t
);
276 DEF_VEC_ALLOC_P(varinfo_t
, heap
);
278 /* Table of variable info structures for constraint variables.
279 Indexed directly by variable info id. */
280 static VEC(varinfo_t
,heap
) *varmap
;
282 /* Return the varmap element N */
284 static inline varinfo_t
285 get_varinfo (unsigned int n
)
287 return VEC_index (varinfo_t
, varmap
, n
);
290 /* Return the varmap element N, following the collapsed_to link. */
292 static inline varinfo_t
293 get_varinfo_fc (unsigned int n
)
295 varinfo_t v
= VEC_index (varinfo_t
, varmap
, n
);
298 return v
->collapsed_to
;
302 /* Variable that represents the unknown pointer. */
303 static varinfo_t var_anything
;
304 static tree anything_tree
;
305 static unsigned int anything_id
;
307 /* Variable that represents the NULL pointer. */
308 static varinfo_t var_nothing
;
309 static tree nothing_tree
;
310 static unsigned int nothing_id
;
312 /* Variable that represents read only memory. */
313 static varinfo_t var_readonly
;
314 static tree readonly_tree
;
315 static unsigned int readonly_id
;
317 /* Variable that represents integers. This is used for when people do things
319 static varinfo_t var_integer
;
320 static tree integer_tree
;
321 static unsigned int integer_id
;
323 /* Lookup a heap var for FROM, and return it if we find one. */
326 heapvar_lookup (tree from
)
328 struct tree_map
*h
, in
;
331 h
= (struct tree_map
*) htab_find_with_hash (heapvar_for_stmt
, &in
,
332 htab_hash_pointer (from
));
338 /* Insert a mapping FROM->TO in the heap var for statement
342 heapvar_insert (tree from
, tree to
)
347 h
= GGC_NEW (struct tree_map
);
348 h
->hash
= htab_hash_pointer (from
);
351 loc
= htab_find_slot_with_hash (heapvar_for_stmt
, h
, h
->hash
, INSERT
);
352 *(struct tree_map
**) loc
= h
;
355 /* Return a new variable info structure consisting for a variable
356 named NAME, and using constraint graph node NODE. */
359 new_var_info (tree t
, unsigned int id
, const char *name
)
361 varinfo_t ret
= (varinfo_t
) pool_alloc (variable_info_pool
);
367 ret
->directly_dereferenced
= false;
368 ret
->is_artificial_var
= false;
369 ret
->is_heap_var
= false;
370 ret
->is_special_var
= false;
371 ret
->is_unknown_size_var
= false;
372 ret
->has_union
= false;
374 if (TREE_CODE (var
) == SSA_NAME
)
375 var
= SSA_NAME_VAR (var
);
376 ret
->no_tbaa_pruning
= (DECL_P (var
)
377 && POINTER_TYPE_P (TREE_TYPE (var
))
378 && DECL_NO_TBAA_P (var
));
379 ret
->solution
= BITMAP_ALLOC (&pta_obstack
);
380 ret
->oldsolution
= BITMAP_ALLOC (&oldpta_obstack
);
382 ret
->collapsed_to
= NULL
;
386 typedef enum {SCALAR
, DEREF
, ADDRESSOF
} constraint_expr_type
;
388 /* An expression that appears in a constraint. */
390 struct constraint_expr
392 /* Constraint type. */
393 constraint_expr_type type
;
395 /* Variable we are referring to in the constraint. */
398 /* Offset, in bits, of this constraint from the beginning of
399 variables it ends up referring to.
401 IOW, in a deref constraint, we would deref, get the result set,
402 then add OFFSET to each member. */
403 unsigned HOST_WIDE_INT offset
;
406 typedef struct constraint_expr ce_s
;
408 DEF_VEC_ALLOC_O(ce_s
, heap
);
409 static void get_constraint_for (tree
, VEC(ce_s
, heap
) **);
410 static void do_deref (VEC (ce_s
, heap
) **);
412 /* Our set constraints are made up of two constraint expressions, one
415 As described in the introduction, our set constraints each represent an
416 operation between set valued variables.
420 struct constraint_expr lhs
;
421 struct constraint_expr rhs
;
424 /* List of constraints that we use to build the constraint graph from. */
426 static VEC(constraint_t
,heap
) *constraints
;
427 static alloc_pool constraint_pool
;
431 DEF_VEC_ALLOC_I(int, heap
);
433 /* The constraint graph is represented as an array of bitmaps
434 containing successor nodes. */
436 struct constraint_graph
438 /* Size of this graph, which may be different than the number of
439 nodes in the variable map. */
442 /* Explicit successors of each node. */
445 /* Implicit predecessors of each node (Used for variable
447 bitmap
*implicit_preds
;
449 /* Explicit predecessors of each node (Used for variable substitution). */
452 /* Indirect cycle representatives, or -1 if the node has no indirect
454 int *indirect_cycles
;
456 /* Representative node for a node. rep[a] == a unless the node has
460 /* Equivalence class representative for a label. This is used for
461 variable substitution. */
464 /* Pointer equivalence label for a node. All nodes with the same
465 pointer equivalence label can be unified together at some point
466 (either during constraint optimization or after the constraint
470 /* Pointer equivalence representative for a label. This is used to
471 handle nodes that are pointer equivalent but not location
472 equivalent. We can unite these once the addressof constraints
473 are transformed into initial points-to sets. */
476 /* Pointer equivalence label for each node, used during variable
478 unsigned int *pointer_label
;
480 /* Location equivalence label for each node, used during location
481 equivalence finding. */
482 unsigned int *loc_label
;
484 /* Pointed-by set for each node, used during location equivalence
485 finding. This is pointed-by rather than pointed-to, because it
486 is constructed using the predecessor graph. */
489 /* Points to sets for pointer equivalence. This is *not* the actual
490 points-to sets for nodes. */
493 /* Bitmap of nodes where the bit is set if the node is a direct
494 node. Used for variable substitution. */
495 sbitmap direct_nodes
;
497 /* Bitmap of nodes where the bit is set if the node is address
498 taken. Used for variable substitution. */
499 bitmap address_taken
;
501 /* True if points_to bitmap for this node is stored in the hash
505 /* Number of incoming edges remaining to be processed by pointer
507 Used for variable substitution. */
508 unsigned int *number_incoming
;
511 /* Vector of complex constraints for each graph node. Complex
512 constraints are those involving dereferences or offsets that are
514 VEC(constraint_t
,heap
) **complex;
517 static constraint_graph_t graph
;
519 /* During variable substitution and the offline version of indirect
520 cycle finding, we create nodes to represent dereferences and
521 address taken constraints. These represent where these start and
523 #define FIRST_REF_NODE (VEC_length (varinfo_t, varmap))
524 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
526 /* Return the representative node for NODE, if NODE has been unioned
528 This function performs path compression along the way to finding
529 the representative. */
532 find (unsigned int node
)
534 gcc_assert (node
< graph
->size
);
535 if (graph
->rep
[node
] != node
)
536 return graph
->rep
[node
] = find (graph
->rep
[node
]);
540 /* Union the TO and FROM nodes to the TO nodes.
541 Note that at some point in the future, we may want to do
542 union-by-rank, in which case we are going to have to return the
543 node we unified to. */
546 unite (unsigned int to
, unsigned int from
)
548 gcc_assert (to
< graph
->size
&& from
< graph
->size
);
549 if (to
!= from
&& graph
->rep
[from
] != to
)
551 graph
->rep
[from
] = to
;
557 /* Create a new constraint consisting of LHS and RHS expressions. */
560 new_constraint (const struct constraint_expr lhs
,
561 const struct constraint_expr rhs
)
563 constraint_t ret
= (constraint_t
) pool_alloc (constraint_pool
);
569 /* Print out constraint C to FILE. */
572 dump_constraint (FILE *file
, constraint_t c
)
574 if (c
->lhs
.type
== ADDRESSOF
)
576 else if (c
->lhs
.type
== DEREF
)
578 fprintf (file
, "%s", get_varinfo_fc (c
->lhs
.var
)->name
);
579 if (c
->lhs
.offset
!= 0)
580 fprintf (file
, " + " HOST_WIDE_INT_PRINT_DEC
, c
->lhs
.offset
);
581 fprintf (file
, " = ");
582 if (c
->rhs
.type
== ADDRESSOF
)
584 else if (c
->rhs
.type
== DEREF
)
586 fprintf (file
, "%s", get_varinfo_fc (c
->rhs
.var
)->name
);
587 if (c
->rhs
.offset
!= 0)
588 fprintf (file
, " + " HOST_WIDE_INT_PRINT_DEC
, c
->rhs
.offset
);
589 fprintf (file
, "\n");
592 /* Print out constraint C to stderr. */
595 debug_constraint (constraint_t c
)
597 dump_constraint (stderr
, c
);
600 /* Print out all constraints to FILE */
603 dump_constraints (FILE *file
)
607 for (i
= 0; VEC_iterate (constraint_t
, constraints
, i
, c
); i
++)
608 dump_constraint (file
, c
);
611 /* Print out all constraints to stderr. */
614 debug_constraints (void)
616 dump_constraints (stderr
);
621 The solver is a simple worklist solver, that works on the following
624 sbitmap changed_nodes = all zeroes;
626 For each node that is not already collapsed:
628 set bit in changed nodes
630 while (changed_count > 0)
632 compute topological ordering for constraint graph
634 find and collapse cycles in the constraint graph (updating
635 changed if necessary)
637 for each node (n) in the graph in topological order:
640 Process each complex constraint associated with the node,
641 updating changed if necessary.
643 For each outgoing edge from n, propagate the solution from n to
644 the destination of the edge, updating changed as necessary.
648 /* Return true if two constraint expressions A and B are equal. */
651 constraint_expr_equal (struct constraint_expr a
, struct constraint_expr b
)
653 return a
.type
== b
.type
&& a
.var
== b
.var
&& a
.offset
== b
.offset
;
656 /* Return true if constraint expression A is less than constraint expression
657 B. This is just arbitrary, but consistent, in order to give them an
661 constraint_expr_less (struct constraint_expr a
, struct constraint_expr b
)
663 if (a
.type
== b
.type
)
666 return a
.offset
< b
.offset
;
668 return a
.var
< b
.var
;
671 return a
.type
< b
.type
;
674 /* Return true if constraint A is less than constraint B. This is just
675 arbitrary, but consistent, in order to give them an ordering. */
678 constraint_less (const constraint_t a
, const constraint_t b
)
680 if (constraint_expr_less (a
->lhs
, b
->lhs
))
682 else if (constraint_expr_less (b
->lhs
, a
->lhs
))
685 return constraint_expr_less (a
->rhs
, b
->rhs
);
688 /* Return true if two constraints A and B are equal. */
691 constraint_equal (struct constraint a
, struct constraint b
)
693 return constraint_expr_equal (a
.lhs
, b
.lhs
)
694 && constraint_expr_equal (a
.rhs
, b
.rhs
);
698 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
701 constraint_vec_find (VEC(constraint_t
,heap
) *vec
,
702 struct constraint lookfor
)
710 place
= VEC_lower_bound (constraint_t
, vec
, &lookfor
, constraint_less
);
711 if (place
>= VEC_length (constraint_t
, vec
))
713 found
= VEC_index (constraint_t
, vec
, place
);
714 if (!constraint_equal (*found
, lookfor
))
719 /* Union two constraint vectors, TO and FROM. Put the result in TO. */
722 constraint_set_union (VEC(constraint_t
,heap
) **to
,
723 VEC(constraint_t
,heap
) **from
)
728 for (i
= 0; VEC_iterate (constraint_t
, *from
, i
, c
); i
++)
730 if (constraint_vec_find (*to
, *c
) == NULL
)
732 unsigned int place
= VEC_lower_bound (constraint_t
, *to
, c
,
734 VEC_safe_insert (constraint_t
, heap
, *to
, place
, c
);
739 /* Take a solution set SET, add OFFSET to each member of the set, and
740 overwrite SET with the result when done. */
743 solution_set_add (bitmap set
, unsigned HOST_WIDE_INT offset
)
745 bitmap result
= BITMAP_ALLOC (&iteration_obstack
);
749 EXECUTE_IF_SET_IN_BITMAP (set
, 0, i
, bi
)
751 /* If this is a properly sized variable, only add offset if it's
752 less than end. Otherwise, it is globbed to a single
755 if ((get_varinfo (i
)->offset
+ offset
) < get_varinfo (i
)->fullsize
)
757 unsigned HOST_WIDE_INT fieldoffset
= get_varinfo (i
)->offset
+ offset
;
758 varinfo_t v
= first_vi_for_offset (get_varinfo (i
), fieldoffset
);
761 bitmap_set_bit (result
, v
->id
);
763 else if (get_varinfo (i
)->is_artificial_var
764 || get_varinfo (i
)->has_union
765 || get_varinfo (i
)->is_unknown_size_var
)
767 bitmap_set_bit (result
, i
);
771 bitmap_copy (set
, result
);
772 BITMAP_FREE (result
);
775 /* Union solution sets TO and FROM, and add INC to each member of FROM in the
779 set_union_with_increment (bitmap to
, bitmap from
, unsigned HOST_WIDE_INT inc
)
782 return bitmap_ior_into (to
, from
);
788 tmp
= BITMAP_ALLOC (&iteration_obstack
);
789 bitmap_copy (tmp
, from
);
790 solution_set_add (tmp
, inc
);
791 res
= bitmap_ior_into (to
, tmp
);
797 /* Insert constraint C into the list of complex constraints for graph
801 insert_into_complex (constraint_graph_t graph
,
802 unsigned int var
, constraint_t c
)
804 VEC (constraint_t
, heap
) *complex = graph
->complex[var
];
805 unsigned int place
= VEC_lower_bound (constraint_t
, complex, c
,
808 /* Only insert constraints that do not already exist. */
809 if (place
>= VEC_length (constraint_t
, complex)
810 || !constraint_equal (*c
, *VEC_index (constraint_t
, complex, place
)))
811 VEC_safe_insert (constraint_t
, heap
, graph
->complex[var
], place
, c
);
815 /* Condense two variable nodes into a single variable node, by moving
816 all associated info from SRC to TO. */
819 merge_node_constraints (constraint_graph_t graph
, unsigned int to
,
825 gcc_assert (find (from
) == to
);
827 /* Move all complex constraints from src node into to node */
828 for (i
= 0; VEC_iterate (constraint_t
, graph
->complex[from
], i
, c
); i
++)
830 /* In complex constraints for node src, we may have either
831 a = *src, and *src = a, or an offseted constraint which are
832 always added to the rhs node's constraints. */
834 if (c
->rhs
.type
== DEREF
)
836 else if (c
->lhs
.type
== DEREF
)
841 constraint_set_union (&graph
->complex[to
], &graph
->complex[from
]);
842 VEC_free (constraint_t
, heap
, graph
->complex[from
]);
843 graph
->complex[from
] = NULL
;
847 /* Remove edges involving NODE from GRAPH. */
850 clear_edges_for_node (constraint_graph_t graph
, unsigned int node
)
852 if (graph
->succs
[node
])
853 BITMAP_FREE (graph
->succs
[node
]);
856 /* Merge GRAPH nodes FROM and TO into node TO. */
859 merge_graph_nodes (constraint_graph_t graph
, unsigned int to
,
862 if (graph
->indirect_cycles
[from
] != -1)
864 /* If we have indirect cycles with the from node, and we have
865 none on the to node, the to node has indirect cycles from the
866 from node now that they are unified.
867 If indirect cycles exist on both, unify the nodes that they
868 are in a cycle with, since we know they are in a cycle with
870 if (graph
->indirect_cycles
[to
] == -1)
871 graph
->indirect_cycles
[to
] = graph
->indirect_cycles
[from
];
874 /* Merge all the successor edges. */
875 if (graph
->succs
[from
])
877 if (!graph
->succs
[to
])
878 graph
->succs
[to
] = BITMAP_ALLOC (&pta_obstack
);
879 bitmap_ior_into (graph
->succs
[to
],
883 clear_edges_for_node (graph
, from
);
887 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
888 it doesn't exist in the graph already. */
891 add_implicit_graph_edge (constraint_graph_t graph
, unsigned int to
,
897 if (!graph
->implicit_preds
[to
])
898 graph
->implicit_preds
[to
] = BITMAP_ALLOC (&predbitmap_obstack
);
900 if (!bitmap_bit_p (graph
->implicit_preds
[to
], from
))
902 stats
.num_implicit_edges
++;
903 bitmap_set_bit (graph
->implicit_preds
[to
], from
);
907 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
908 it doesn't exist in the graph already.
909 Return false if the edge already existed, true otherwise. */
912 add_pred_graph_edge (constraint_graph_t graph
, unsigned int to
,
915 if (!graph
->preds
[to
])
916 graph
->preds
[to
] = BITMAP_ALLOC (&predbitmap_obstack
);
917 if (!bitmap_bit_p (graph
->preds
[to
], from
))
918 bitmap_set_bit (graph
->preds
[to
], from
);
921 /* Add a graph edge to GRAPH, going from FROM to TO if
922 it doesn't exist in the graph already.
923 Return false if the edge already existed, true otherwise. */
926 add_graph_edge (constraint_graph_t graph
, unsigned int to
,
937 if (!graph
->succs
[from
])
938 graph
->succs
[from
] = BITMAP_ALLOC (&pta_obstack
);
939 if (!bitmap_bit_p (graph
->succs
[from
], to
))
942 if (to
< FIRST_REF_NODE
&& from
< FIRST_REF_NODE
)
944 bitmap_set_bit (graph
->succs
[from
], to
);
951 /* Return true if {DEST.SRC} is an existing graph edge in GRAPH. */
954 valid_graph_edge (constraint_graph_t graph
, unsigned int src
,
957 return (graph
->succs
[dest
]
958 && bitmap_bit_p (graph
->succs
[dest
], src
));
961 /* Initialize the constraint graph structure to contain SIZE nodes. */
964 init_graph (unsigned int size
)
968 graph
= XCNEW (struct constraint_graph
);
970 graph
->succs
= XCNEWVEC (bitmap
, graph
->size
);
971 graph
->indirect_cycles
= XNEWVEC (int, graph
->size
);
972 graph
->rep
= XNEWVEC (unsigned int, graph
->size
);
973 graph
->complex = XCNEWVEC (VEC(constraint_t
, heap
) *, size
);
974 graph
->pe
= XCNEWVEC (unsigned int, graph
->size
);
975 graph
->pe_rep
= XNEWVEC (int, graph
->size
);
977 for (j
= 0; j
< graph
->size
; j
++)
980 graph
->pe_rep
[j
] = -1;
981 graph
->indirect_cycles
[j
] = -1;
985 /* Build the constraint graph, adding only predecessor edges right now. */
988 build_pred_graph (void)
994 graph
->implicit_preds
= XCNEWVEC (bitmap
, graph
->size
);
995 graph
->preds
= XCNEWVEC (bitmap
, graph
->size
);
996 graph
->pointer_label
= XCNEWVEC (unsigned int, graph
->size
);
997 graph
->loc_label
= XCNEWVEC (unsigned int, graph
->size
);
998 graph
->pointed_by
= XCNEWVEC (bitmap
, graph
->size
);
999 graph
->points_to
= XCNEWVEC (bitmap
, graph
->size
);
1000 graph
->eq_rep
= XNEWVEC (int, graph
->size
);
1001 graph
->direct_nodes
= sbitmap_alloc (graph
->size
);
1002 graph
->pt_used
= sbitmap_alloc (graph
->size
);
1003 graph
->address_taken
= BITMAP_ALLOC (&predbitmap_obstack
);
1004 graph
->number_incoming
= XCNEWVEC (unsigned int, graph
->size
);
1005 sbitmap_zero (graph
->direct_nodes
);
1006 sbitmap_zero (graph
->pt_used
);
1008 for (j
= 0; j
< FIRST_REF_NODE
; j
++)
1010 if (!get_varinfo (j
)->is_special_var
)
1011 SET_BIT (graph
->direct_nodes
, j
);
1014 for (j
= 0; j
< graph
->size
; j
++)
1015 graph
->eq_rep
[j
] = -1;
1017 for (j
= 0; j
< VEC_length (varinfo_t
, varmap
); j
++)
1018 graph
->indirect_cycles
[j
] = -1;
1020 for (i
= 0; VEC_iterate (constraint_t
, constraints
, i
, c
); i
++)
1022 struct constraint_expr lhs
= c
->lhs
;
1023 struct constraint_expr rhs
= c
->rhs
;
1024 unsigned int lhsvar
= get_varinfo_fc (lhs
.var
)->id
;
1025 unsigned int rhsvar
= get_varinfo_fc (rhs
.var
)->id
;
1027 if (lhs
.type
== DEREF
)
1030 if (rhs
.offset
== 0 && lhs
.offset
== 0 && rhs
.type
== SCALAR
)
1031 add_pred_graph_edge (graph
, FIRST_REF_NODE
+ lhsvar
, rhsvar
);
1033 else if (rhs
.type
== DEREF
)
1036 if (rhs
.offset
== 0 && lhs
.offset
== 0 && lhs
.type
== SCALAR
)
1037 add_pred_graph_edge (graph
, lhsvar
, FIRST_REF_NODE
+ rhsvar
);
1039 RESET_BIT (graph
->direct_nodes
, lhsvar
);
1041 else if (rhs
.type
== ADDRESSOF
)
1044 if (graph
->points_to
[lhsvar
] == NULL
)
1045 graph
->points_to
[lhsvar
] = BITMAP_ALLOC (&predbitmap_obstack
);
1046 bitmap_set_bit (graph
->points_to
[lhsvar
], rhsvar
);
1048 if (graph
->pointed_by
[rhsvar
] == NULL
)
1049 graph
->pointed_by
[rhsvar
] = BITMAP_ALLOC (&predbitmap_obstack
);
1050 bitmap_set_bit (graph
->pointed_by
[rhsvar
], lhsvar
);
1052 /* Implicitly, *x = y */
1053 add_implicit_graph_edge (graph
, FIRST_REF_NODE
+ lhsvar
, rhsvar
);
1055 RESET_BIT (graph
->direct_nodes
, rhsvar
);
1056 bitmap_set_bit (graph
->address_taken
, rhsvar
);
1058 else if (lhsvar
> anything_id
1059 && lhsvar
!= rhsvar
&& lhs
.offset
== 0 && rhs
.offset
== 0)
1062 add_pred_graph_edge (graph
, lhsvar
, rhsvar
);
1063 /* Implicitly, *x = *y */
1064 add_implicit_graph_edge (graph
, FIRST_REF_NODE
+ lhsvar
,
1065 FIRST_REF_NODE
+ rhsvar
);
1067 else if (lhs
.offset
!= 0 || rhs
.offset
!= 0)
1069 if (rhs
.offset
!= 0)
1070 RESET_BIT (graph
->direct_nodes
, lhs
.var
);
1071 else if (lhs
.offset
!= 0)
1072 RESET_BIT (graph
->direct_nodes
, rhs
.var
);
1077 /* Build the constraint graph, adding successor edges. */
1080 build_succ_graph (void)
1085 for (i
= 0; VEC_iterate (constraint_t
, constraints
, i
, c
); i
++)
1087 struct constraint_expr lhs
;
1088 struct constraint_expr rhs
;
1089 unsigned int lhsvar
;
1090 unsigned int rhsvar
;
1097 lhsvar
= find (get_varinfo_fc (lhs
.var
)->id
);
1098 rhsvar
= find (get_varinfo_fc (rhs
.var
)->id
);
1100 if (lhs
.type
== DEREF
)
1102 if (rhs
.offset
== 0 && lhs
.offset
== 0 && rhs
.type
== SCALAR
)
1103 add_graph_edge (graph
, FIRST_REF_NODE
+ lhsvar
, rhsvar
);
1105 else if (rhs
.type
== DEREF
)
1107 if (rhs
.offset
== 0 && lhs
.offset
== 0 && lhs
.type
== SCALAR
)
1108 add_graph_edge (graph
, lhsvar
, FIRST_REF_NODE
+ rhsvar
);
1110 else if (rhs
.type
== ADDRESSOF
)
1113 gcc_assert (find (get_varinfo_fc (rhs
.var
)->id
)
1114 == get_varinfo_fc (rhs
.var
)->id
);
1115 bitmap_set_bit (get_varinfo (lhsvar
)->solution
, rhsvar
);
1117 else if (lhsvar
> anything_id
1118 && lhsvar
!= rhsvar
&& lhs
.offset
== 0 && rhs
.offset
== 0)
1120 add_graph_edge (graph
, lhsvar
, rhsvar
);
1126 /* Changed variables on the last iteration. */
1127 static unsigned int changed_count
;
1128 static sbitmap changed
;
1130 DEF_VEC_I(unsigned);
1131 DEF_VEC_ALLOC_I(unsigned,heap
);
1134 /* Strongly Connected Component visitation info. */
1141 unsigned int *node_mapping
;
1143 VEC(unsigned,heap
) *scc_stack
;
1147 /* Recursive routine to find strongly connected components in GRAPH.
1148 SI is the SCC info to store the information in, and N is the id of current
1149 graph node we are processing.
1151 This is Tarjan's strongly connected component finding algorithm, as
1152 modified by Nuutila to keep only non-root nodes on the stack.
1153 The algorithm can be found in "On finding the strongly connected
1154 connected components in a directed graph" by Esko Nuutila and Eljas
1155 Soisalon-Soininen, in Information Processing Letters volume 49,
1156 number 1, pages 9-14. */
1159 scc_visit (constraint_graph_t graph
, struct scc_info
*si
, unsigned int n
)
1163 unsigned int my_dfs
;
1165 SET_BIT (si
->visited
, n
);
1166 si
->dfs
[n
] = si
->current_index
++;
1167 my_dfs
= si
->dfs
[n
];
1169 /* Visit all the successors. */
1170 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->succs
[n
], 0, i
, bi
)
1174 if (i
> LAST_REF_NODE
)
1178 if (TEST_BIT (si
->deleted
, w
))
1181 if (!TEST_BIT (si
->visited
, w
))
1182 scc_visit (graph
, si
, w
);
1184 unsigned int t
= find (w
);
1185 unsigned int nnode
= find (n
);
1186 gcc_assert (nnode
== n
);
1188 if (si
->dfs
[t
] < si
->dfs
[nnode
])
1189 si
->dfs
[n
] = si
->dfs
[t
];
1193 /* See if any components have been identified. */
1194 if (si
->dfs
[n
] == my_dfs
)
1196 if (VEC_length (unsigned, si
->scc_stack
) > 0
1197 && si
->dfs
[VEC_last (unsigned, si
->scc_stack
)] >= my_dfs
)
1199 bitmap scc
= BITMAP_ALLOC (NULL
);
1200 bool have_ref_node
= n
>= FIRST_REF_NODE
;
1201 unsigned int lowest_node
;
1204 bitmap_set_bit (scc
, n
);
1206 while (VEC_length (unsigned, si
->scc_stack
) != 0
1207 && si
->dfs
[VEC_last (unsigned, si
->scc_stack
)] >= my_dfs
)
1209 unsigned int w
= VEC_pop (unsigned, si
->scc_stack
);
1211 bitmap_set_bit (scc
, w
);
1212 if (w
>= FIRST_REF_NODE
)
1213 have_ref_node
= true;
1216 lowest_node
= bitmap_first_set_bit (scc
);
1217 gcc_assert (lowest_node
< FIRST_REF_NODE
);
1219 /* Collapse the SCC nodes into a single node, and mark the
1221 EXECUTE_IF_SET_IN_BITMAP (scc
, 0, i
, bi
)
1223 if (i
< FIRST_REF_NODE
)
1225 if (unite (lowest_node
, i
))
1226 unify_nodes (graph
, lowest_node
, i
, false);
1230 unite (lowest_node
, i
);
1231 graph
->indirect_cycles
[i
- FIRST_REF_NODE
] = lowest_node
;
1235 SET_BIT (si
->deleted
, n
);
1238 VEC_safe_push (unsigned, heap
, si
->scc_stack
, n
);
1241 /* Unify node FROM into node TO, updating the changed count if
1242 necessary when UPDATE_CHANGED is true. */
1245 unify_nodes (constraint_graph_t graph
, unsigned int to
, unsigned int from
,
1246 bool update_changed
)
1249 gcc_assert (to
!= from
&& find (to
) == to
);
1250 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1251 fprintf (dump_file
, "Unifying %s to %s\n",
1252 get_varinfo (from
)->name
,
1253 get_varinfo (to
)->name
);
1256 stats
.unified_vars_dynamic
++;
1258 stats
.unified_vars_static
++;
1260 merge_graph_nodes (graph
, to
, from
);
1261 merge_node_constraints (graph
, to
, from
);
1263 if (get_varinfo (from
)->no_tbaa_pruning
)
1264 get_varinfo (to
)->no_tbaa_pruning
= true;
1266 /* Mark TO as changed if FROM was changed. If TO was already marked
1267 as changed, decrease the changed count. */
1269 if (update_changed
&& TEST_BIT (changed
, from
))
1271 RESET_BIT (changed
, from
);
1272 if (!TEST_BIT (changed
, to
))
1273 SET_BIT (changed
, to
);
1276 gcc_assert (changed_count
> 0);
1280 if (get_varinfo (from
)->solution
)
1282 /* If the solution changes because of the merging, we need to mark
1283 the variable as changed. */
1284 if (bitmap_ior_into (get_varinfo (to
)->solution
,
1285 get_varinfo (from
)->solution
))
1287 if (update_changed
&& !TEST_BIT (changed
, to
))
1289 SET_BIT (changed
, to
);
1294 BITMAP_FREE (get_varinfo (from
)->solution
);
1295 BITMAP_FREE (get_varinfo (from
)->oldsolution
);
1297 if (stats
.iterations
> 0)
1299 BITMAP_FREE (get_varinfo (to
)->oldsolution
);
1300 get_varinfo (to
)->oldsolution
= BITMAP_ALLOC (&oldpta_obstack
);
1303 if (valid_graph_edge (graph
, to
, to
))
1305 if (graph
->succs
[to
])
1306 bitmap_clear_bit (graph
->succs
[to
], to
);
1310 /* Information needed to compute the topological ordering of a graph. */
1314 /* sbitmap of visited nodes. */
1316 /* Array that stores the topological order of the graph, *in
1318 VEC(unsigned,heap
) *topo_order
;
1322 /* Initialize and return a topological info structure. */
1324 static struct topo_info
*
1325 init_topo_info (void)
1327 size_t size
= graph
->size
;
1328 struct topo_info
*ti
= XNEW (struct topo_info
);
1329 ti
->visited
= sbitmap_alloc (size
);
1330 sbitmap_zero (ti
->visited
);
1331 ti
->topo_order
= VEC_alloc (unsigned, heap
, 1);
1336 /* Free the topological sort info pointed to by TI. */
1339 free_topo_info (struct topo_info
*ti
)
1341 sbitmap_free (ti
->visited
);
1342 VEC_free (unsigned, heap
, ti
->topo_order
);
1346 /* Visit the graph in topological order, and store the order in the
1347 topo_info structure. */
1350 topo_visit (constraint_graph_t graph
, struct topo_info
*ti
,
1356 SET_BIT (ti
->visited
, n
);
1358 if (graph
->succs
[n
])
1359 EXECUTE_IF_SET_IN_BITMAP (graph
->succs
[n
], 0, j
, bi
)
1361 if (!TEST_BIT (ti
->visited
, j
))
1362 topo_visit (graph
, ti
, j
);
1365 VEC_safe_push (unsigned, heap
, ti
->topo_order
, n
);
1368 /* Return true if variable N + OFFSET is a legal field of N. */
1371 type_safe (unsigned int n
, unsigned HOST_WIDE_INT
*offset
)
1373 varinfo_t ninfo
= get_varinfo (n
);
1375 /* For things we've globbed to single variables, any offset into the
1376 variable acts like the entire variable, so that it becomes offset
1378 if (ninfo
->is_special_var
1379 || ninfo
->is_artificial_var
1380 || ninfo
->is_unknown_size_var
)
1385 return (get_varinfo (n
)->offset
+ *offset
) < get_varinfo (n
)->fullsize
;
1388 /* Process a constraint C that represents x = *y, using DELTA as the
1389 starting solution. */
1392 do_sd_constraint (constraint_graph_t graph
, constraint_t c
,
1395 unsigned int lhs
= c
->lhs
.var
;
1397 bitmap sol
= get_varinfo (lhs
)->solution
;
1401 if (bitmap_bit_p (delta
, anything_id
))
1403 flag
= !bitmap_bit_p (sol
, anything_id
);
1405 bitmap_set_bit (sol
, anything_id
);
1408 /* For each variable j in delta (Sol(y)), add
1409 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1410 EXECUTE_IF_SET_IN_BITMAP (delta
, 0, j
, bi
)
1412 unsigned HOST_WIDE_INT roffset
= c
->rhs
.offset
;
1413 if (type_safe (j
, &roffset
))
1416 unsigned HOST_WIDE_INT fieldoffset
= get_varinfo (j
)->offset
+ roffset
;
1419 v
= first_vi_for_offset (get_varinfo (j
), fieldoffset
);
1424 /* Adding edges from the special vars is pointless.
1425 They don't have sets that can change. */
1426 if (get_varinfo (t
) ->is_special_var
)
1427 flag
|= bitmap_ior_into (sol
, get_varinfo (t
)->solution
);
1428 else if (add_graph_edge (graph
, lhs
, t
))
1429 flag
|= bitmap_ior_into (sol
, get_varinfo (t
)->solution
);
1434 /* If the LHS solution changed, mark the var as changed. */
1437 get_varinfo (lhs
)->solution
= sol
;
1438 if (!TEST_BIT (changed
, lhs
))
1440 SET_BIT (changed
, lhs
);
1446 /* Process a constraint C that represents *x = y. */
1449 do_ds_constraint (constraint_t c
, bitmap delta
)
1451 unsigned int rhs
= c
->rhs
.var
;
1452 bitmap sol
= get_varinfo (rhs
)->solution
;
1456 if (bitmap_bit_p (sol
, anything_id
))
1458 EXECUTE_IF_SET_IN_BITMAP (delta
, 0, j
, bi
)
1460 varinfo_t jvi
= get_varinfo (j
);
1462 unsigned int loff
= c
->lhs
.offset
;
1463 unsigned HOST_WIDE_INT fieldoffset
= jvi
->offset
+ loff
;
1466 v
= first_vi_for_offset (get_varinfo (j
), fieldoffset
);
1471 if (!bitmap_bit_p (get_varinfo (t
)->solution
, anything_id
))
1473 bitmap_set_bit (get_varinfo (t
)->solution
, anything_id
);
1474 if (!TEST_BIT (changed
, t
))
1476 SET_BIT (changed
, t
);
1484 /* For each member j of delta (Sol(x)), add an edge from y to j and
1485 union Sol(y) into Sol(j) */
1486 EXECUTE_IF_SET_IN_BITMAP (delta
, 0, j
, bi
)
1488 unsigned HOST_WIDE_INT loff
= c
->lhs
.offset
;
1489 if (type_safe (j
, &loff
) && !(get_varinfo (j
)->is_special_var
))
1493 unsigned HOST_WIDE_INT fieldoffset
= get_varinfo (j
)->offset
+ loff
;
1496 v
= first_vi_for_offset (get_varinfo (j
), fieldoffset
);
1500 tmp
= get_varinfo (t
)->solution
;
1502 if (set_union_with_increment (tmp
, sol
, 0))
1504 get_varinfo (t
)->solution
= tmp
;
1506 sol
= get_varinfo (rhs
)->solution
;
1507 if (!TEST_BIT (changed
, t
))
1509 SET_BIT (changed
, t
);
1517 /* Handle a non-simple (simple meaning requires no iteration),
1518 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1521 do_complex_constraint (constraint_graph_t graph
, constraint_t c
, bitmap delta
)
1523 if (c
->lhs
.type
== DEREF
)
1525 if (c
->rhs
.type
== ADDRESSOF
)
1532 do_ds_constraint (c
, delta
);
1535 else if (c
->rhs
.type
== DEREF
)
1538 if (!(get_varinfo (c
->lhs
.var
)->is_special_var
))
1539 do_sd_constraint (graph
, c
, delta
);
1547 gcc_assert (c
->rhs
.type
== SCALAR
&& c
->lhs
.type
== SCALAR
);
1548 solution
= get_varinfo (c
->rhs
.var
)->solution
;
1549 tmp
= get_varinfo (c
->lhs
.var
)->solution
;
1551 flag
= set_union_with_increment (tmp
, solution
, c
->rhs
.offset
);
1555 get_varinfo (c
->lhs
.var
)->solution
= tmp
;
1556 if (!TEST_BIT (changed
, c
->lhs
.var
))
1558 SET_BIT (changed
, c
->lhs
.var
);
1565 /* Initialize and return a new SCC info structure. */
1567 static struct scc_info
*
1568 init_scc_info (size_t size
)
1570 struct scc_info
*si
= XNEW (struct scc_info
);
1573 si
->current_index
= 0;
1574 si
->visited
= sbitmap_alloc (size
);
1575 sbitmap_zero (si
->visited
);
1576 si
->deleted
= sbitmap_alloc (size
);
1577 sbitmap_zero (si
->deleted
);
1578 si
->node_mapping
= XNEWVEC (unsigned int, size
);
1579 si
->dfs
= XCNEWVEC (unsigned int, size
);
1581 for (i
= 0; i
< size
; i
++)
1582 si
->node_mapping
[i
] = i
;
1584 si
->scc_stack
= VEC_alloc (unsigned, heap
, 1);
1588 /* Free an SCC info structure pointed to by SI */
1591 free_scc_info (struct scc_info
*si
)
1593 sbitmap_free (si
->visited
);
1594 sbitmap_free (si
->deleted
);
1595 free (si
->node_mapping
);
1597 VEC_free (unsigned, heap
, si
->scc_stack
);
1602 /* Find indirect cycles in GRAPH that occur, using strongly connected
1603 components, and note them in the indirect cycles map.
1605 This technique comes from Ben Hardekopf and Calvin Lin,
1606 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1607 Lines of Code", submitted to PLDI 2007. */
1610 find_indirect_cycles (constraint_graph_t graph
)
1613 unsigned int size
= graph
->size
;
1614 struct scc_info
*si
= init_scc_info (size
);
1616 for (i
= 0; i
< MIN (LAST_REF_NODE
, size
); i
++ )
1617 if (!TEST_BIT (si
->visited
, i
) && find (i
) == i
)
1618 scc_visit (graph
, si
, i
);
1623 /* Compute a topological ordering for GRAPH, and store the result in the
1624 topo_info structure TI. */
1627 compute_topo_order (constraint_graph_t graph
,
1628 struct topo_info
*ti
)
1631 unsigned int size
= graph
->size
;
1633 for (i
= 0; i
!= size
; ++i
)
1634 if (!TEST_BIT (ti
->visited
, i
) && find (i
) == i
)
1635 topo_visit (graph
, ti
, i
);
1638 /* Structure used to for hash value numbering of pointer equivalence
1641 typedef struct equiv_class_label
1643 unsigned int equivalence_class
;
1646 } *equiv_class_label_t
;
1647 typedef const struct equiv_class_label
*const_equiv_class_label_t
;
1649 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1651 static htab_t pointer_equiv_class_table
;
1653 /* A hashtable for mapping a bitmap of labels->location equivalence
1655 static htab_t location_equiv_class_table
;
1657 /* Hash function for a equiv_class_label_t */
1660 equiv_class_label_hash (const void *p
)
1662 const_equiv_class_label_t
const ecl
= (const_equiv_class_label_t
) p
;
1663 return ecl
->hashcode
;
1666 /* Equality function for two equiv_class_label_t's. */
1669 equiv_class_label_eq (const void *p1
, const void *p2
)
1671 const_equiv_class_label_t
const eql1
= (const_equiv_class_label_t
) p1
;
1672 const_equiv_class_label_t
const eql2
= (const_equiv_class_label_t
) p2
;
1673 return bitmap_equal_p (eql1
->labels
, eql2
->labels
);
1676 /* Lookup a equivalence class in TABLE by the bitmap of LABELS it
1680 equiv_class_lookup (htab_t table
, bitmap labels
)
1683 struct equiv_class_label ecl
;
1685 ecl
.labels
= labels
;
1686 ecl
.hashcode
= bitmap_hash (labels
);
1688 slot
= htab_find_slot_with_hash (table
, &ecl
,
1689 ecl
.hashcode
, NO_INSERT
);
1693 return ((equiv_class_label_t
) *slot
)->equivalence_class
;
1697 /* Add an equivalence class named EQUIVALENCE_CLASS with labels LABELS
1701 equiv_class_add (htab_t table
, unsigned int equivalence_class
,
1705 equiv_class_label_t ecl
= XNEW (struct equiv_class_label
);
1707 ecl
->labels
= labels
;
1708 ecl
->equivalence_class
= equivalence_class
;
1709 ecl
->hashcode
= bitmap_hash (labels
);
1711 slot
= htab_find_slot_with_hash (table
, ecl
,
1712 ecl
->hashcode
, INSERT
);
1713 gcc_assert (!*slot
);
1714 *slot
= (void *) ecl
;
1717 /* Perform offline variable substitution.
1719 This is a worst case quadratic time way of identifying variables
1720 that must have equivalent points-to sets, including those caused by
1721 static cycles, and single entry subgraphs, in the constraint graph.
1723 The technique is described in "Exploiting Pointer and Location
1724 Equivalence to Optimize Pointer Analysis. In the 14th International
1725 Static Analysis Symposium (SAS), August 2007." It is known as the
1726 "HU" algorithm, and is equivalent to value numbering the collapsed
1727 constraint graph including evaluating unions.
1729 The general method of finding equivalence classes is as follows:
1730 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1731 Initialize all non-REF nodes to be direct nodes.
1732 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1734 For each constraint containing the dereference, we also do the same
1737 We then compute SCC's in the graph and unify nodes in the same SCC,
1740 For each non-collapsed node x:
1741 Visit all unvisited explicit incoming edges.
1742 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
1744 Lookup the equivalence class for pts(x).
1745 If we found one, equivalence_class(x) = found class.
1746 Otherwise, equivalence_class(x) = new class, and new_class is
1747 added to the lookup table.
1749 All direct nodes with the same equivalence class can be replaced
1750 with a single representative node.
1751 All unlabeled nodes (label == 0) are not pointers and all edges
1752 involving them can be eliminated.
1753 We perform these optimizations during rewrite_constraints
1755 In addition to pointer equivalence class finding, we also perform
1756 location equivalence class finding. This is the set of variables
1757 that always appear together in points-to sets. We use this to
1758 compress the size of the points-to sets. */
1760 /* Current maximum pointer equivalence class id. */
1761 static int pointer_equiv_class
;
1763 /* Current maximum location equivalence class id. */
1764 static int location_equiv_class
;
1766 /* Recursive routine to find strongly connected components in GRAPH,
1767 and label it's nodes with DFS numbers. */
1770 condense_visit (constraint_graph_t graph
, struct scc_info
*si
, unsigned int n
)
1774 unsigned int my_dfs
;
1776 gcc_assert (si
->node_mapping
[n
] == n
);
1777 SET_BIT (si
->visited
, n
);
1778 si
->dfs
[n
] = si
->current_index
++;
1779 my_dfs
= si
->dfs
[n
];
1781 /* Visit all the successors. */
1782 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->preds
[n
], 0, i
, bi
)
1784 unsigned int w
= si
->node_mapping
[i
];
1786 if (TEST_BIT (si
->deleted
, w
))
1789 if (!TEST_BIT (si
->visited
, w
))
1790 condense_visit (graph
, si
, w
);
1792 unsigned int t
= si
->node_mapping
[w
];
1793 unsigned int nnode
= si
->node_mapping
[n
];
1794 gcc_assert (nnode
== n
);
1796 if (si
->dfs
[t
] < si
->dfs
[nnode
])
1797 si
->dfs
[n
] = si
->dfs
[t
];
1801 /* Visit all the implicit predecessors. */
1802 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->implicit_preds
[n
], 0, i
, bi
)
1804 unsigned int w
= si
->node_mapping
[i
];
1806 if (TEST_BIT (si
->deleted
, w
))
1809 if (!TEST_BIT (si
->visited
, w
))
1810 condense_visit (graph
, si
, w
);
1812 unsigned int t
= si
->node_mapping
[w
];
1813 unsigned int nnode
= si
->node_mapping
[n
];
1814 gcc_assert (nnode
== n
);
1816 if (si
->dfs
[t
] < si
->dfs
[nnode
])
1817 si
->dfs
[n
] = si
->dfs
[t
];
1821 /* See if any components have been identified. */
1822 if (si
->dfs
[n
] == my_dfs
)
1824 while (VEC_length (unsigned, si
->scc_stack
) != 0
1825 && si
->dfs
[VEC_last (unsigned, si
->scc_stack
)] >= my_dfs
)
1827 unsigned int w
= VEC_pop (unsigned, si
->scc_stack
);
1828 si
->node_mapping
[w
] = n
;
1830 if (!TEST_BIT (graph
->direct_nodes
, w
))
1831 RESET_BIT (graph
->direct_nodes
, n
);
1833 /* Unify our nodes. */
1834 if (graph
->preds
[w
])
1836 if (!graph
->preds
[n
])
1837 graph
->preds
[n
] = BITMAP_ALLOC (&predbitmap_obstack
);
1838 bitmap_ior_into (graph
->preds
[n
], graph
->preds
[w
]);
1840 if (graph
->implicit_preds
[w
])
1842 if (!graph
->implicit_preds
[n
])
1843 graph
->implicit_preds
[n
] = BITMAP_ALLOC (&predbitmap_obstack
);
1844 bitmap_ior_into (graph
->implicit_preds
[n
],
1845 graph
->implicit_preds
[w
]);
1847 if (graph
->points_to
[w
])
1849 if (!graph
->points_to
[n
])
1850 graph
->points_to
[n
] = BITMAP_ALLOC (&predbitmap_obstack
);
1851 bitmap_ior_into (graph
->points_to
[n
],
1852 graph
->points_to
[w
]);
1854 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->preds
[n
], 0, i
, bi
)
1856 unsigned int rep
= si
->node_mapping
[i
];
1857 graph
->number_incoming
[rep
]++;
1860 SET_BIT (si
->deleted
, n
);
1863 VEC_safe_push (unsigned, heap
, si
->scc_stack
, n
);
1866 /* Label pointer equivalences. */
1869 label_visit (constraint_graph_t graph
, struct scc_info
*si
, unsigned int n
)
1873 SET_BIT (si
->visited
, n
);
1875 if (!graph
->points_to
[n
])
1876 graph
->points_to
[n
] = BITMAP_ALLOC (&predbitmap_obstack
);
1878 /* Label and union our incoming edges's points to sets. */
1879 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->preds
[n
], 0, i
, bi
)
1881 unsigned int w
= si
->node_mapping
[i
];
1882 if (!TEST_BIT (si
->visited
, w
))
1883 label_visit (graph
, si
, w
);
1885 /* Skip unused edges */
1886 if (w
== n
|| graph
->pointer_label
[w
] == 0)
1888 graph
->number_incoming
[w
]--;
1891 if (graph
->points_to
[w
])
1892 bitmap_ior_into(graph
->points_to
[n
], graph
->points_to
[w
]);
1894 /* If all incoming edges to w have been processed and
1895 graph->points_to[w] was not stored in the hash table, we can
1897 graph
->number_incoming
[w
]--;
1898 if (!graph
->number_incoming
[w
] && !TEST_BIT (graph
->pt_used
, w
))
1900 BITMAP_FREE (graph
->points_to
[w
]);
1903 /* Indirect nodes get fresh variables. */
1904 if (!TEST_BIT (graph
->direct_nodes
, n
))
1905 bitmap_set_bit (graph
->points_to
[n
], FIRST_REF_NODE
+ n
);
1907 if (!bitmap_empty_p (graph
->points_to
[n
]))
1909 unsigned int label
= equiv_class_lookup (pointer_equiv_class_table
,
1910 graph
->points_to
[n
]);
1913 SET_BIT (graph
->pt_used
, n
);
1914 label
= pointer_equiv_class
++;
1915 equiv_class_add (pointer_equiv_class_table
,
1916 label
, graph
->points_to
[n
]);
1918 graph
->pointer_label
[n
] = label
;
1922 /* Perform offline variable substitution, discovering equivalence
1923 classes, and eliminating non-pointer variables. */
1925 static struct scc_info
*
1926 perform_var_substitution (constraint_graph_t graph
)
1929 unsigned int size
= graph
->size
;
1930 struct scc_info
*si
= init_scc_info (size
);
1932 bitmap_obstack_initialize (&iteration_obstack
);
1933 pointer_equiv_class_table
= htab_create (511, equiv_class_label_hash
,
1934 equiv_class_label_eq
, free
);
1935 location_equiv_class_table
= htab_create (511, equiv_class_label_hash
,
1936 equiv_class_label_eq
, free
);
1937 pointer_equiv_class
= 1;
1938 location_equiv_class
= 1;
1940 /* Condense the nodes, which means to find SCC's, count incoming
1941 predecessors, and unite nodes in SCC's. */
1942 for (i
= 0; i
< FIRST_REF_NODE
; i
++)
1943 if (!TEST_BIT (si
->visited
, si
->node_mapping
[i
]))
1944 condense_visit (graph
, si
, si
->node_mapping
[i
]);
1946 sbitmap_zero (si
->visited
);
1947 /* Actually the label the nodes for pointer equivalences */
1948 for (i
= 0; i
< FIRST_REF_NODE
; i
++)
1949 if (!TEST_BIT (si
->visited
, si
->node_mapping
[i
]))
1950 label_visit (graph
, si
, si
->node_mapping
[i
]);
1952 /* Calculate location equivalence labels. */
1953 for (i
= 0; i
< FIRST_REF_NODE
; i
++)
1960 if (!graph
->pointed_by
[i
])
1962 pointed_by
= BITMAP_ALLOC (&iteration_obstack
);
1964 /* Translate the pointed-by mapping for pointer equivalence
1966 EXECUTE_IF_SET_IN_BITMAP (graph
->pointed_by
[i
], 0, j
, bi
)
1968 bitmap_set_bit (pointed_by
,
1969 graph
->pointer_label
[si
->node_mapping
[j
]]);
1971 /* The original pointed_by is now dead. */
1972 BITMAP_FREE (graph
->pointed_by
[i
]);
1974 /* Look up the location equivalence label if one exists, or make
1976 label
= equiv_class_lookup (location_equiv_class_table
,
1980 label
= location_equiv_class
++;
1981 equiv_class_add (location_equiv_class_table
,
1986 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1987 fprintf (dump_file
, "Found location equivalence for node %s\n",
1988 get_varinfo (i
)->name
);
1989 BITMAP_FREE (pointed_by
);
1991 graph
->loc_label
[i
] = label
;
1995 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1996 for (i
= 0; i
< FIRST_REF_NODE
; i
++)
1998 bool direct_node
= TEST_BIT (graph
->direct_nodes
, i
);
2000 "Equivalence classes for %s node id %d:%s are pointer: %d"
2002 direct_node
? "Direct node" : "Indirect node", i
,
2003 get_varinfo (i
)->name
,
2004 graph
->pointer_label
[si
->node_mapping
[i
]],
2005 graph
->loc_label
[si
->node_mapping
[i
]]);
2008 /* Quickly eliminate our non-pointer variables. */
2010 for (i
= 0; i
< FIRST_REF_NODE
; i
++)
2012 unsigned int node
= si
->node_mapping
[i
];
2014 if (graph
->pointer_label
[node
] == 0)
2016 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2018 "%s is a non-pointer variable, eliminating edges.\n",
2019 get_varinfo (node
)->name
);
2020 stats
.nonpointer_vars
++;
2021 clear_edges_for_node (graph
, node
);
2028 /* Free information that was only necessary for variable
2032 free_var_substitution_info (struct scc_info
*si
)
2035 free (graph
->pointer_label
);
2036 free (graph
->loc_label
);
2037 free (graph
->pointed_by
);
2038 free (graph
->points_to
);
2039 free (graph
->number_incoming
);
2040 free (graph
->eq_rep
);
2041 sbitmap_free (graph
->direct_nodes
);
2042 sbitmap_free (graph
->pt_used
);
2043 htab_delete (pointer_equiv_class_table
);
2044 htab_delete (location_equiv_class_table
);
2045 bitmap_obstack_release (&iteration_obstack
);
2048 /* Return an existing node that is equivalent to NODE, which has
2049 equivalence class LABEL, if one exists. Return NODE otherwise. */
2052 find_equivalent_node (constraint_graph_t graph
,
2053 unsigned int node
, unsigned int label
)
2055 /* If the address version of this variable is unused, we can
2056 substitute it for anything else with the same label.
2057 Otherwise, we know the pointers are equivalent, but not the
2058 locations, and we can unite them later. */
2060 if (!bitmap_bit_p (graph
->address_taken
, node
))
2062 gcc_assert (label
< graph
->size
);
2064 if (graph
->eq_rep
[label
] != -1)
2066 /* Unify the two variables since we know they are equivalent. */
2067 if (unite (graph
->eq_rep
[label
], node
))
2068 unify_nodes (graph
, graph
->eq_rep
[label
], node
, false);
2069 return graph
->eq_rep
[label
];
2073 graph
->eq_rep
[label
] = node
;
2074 graph
->pe_rep
[label
] = node
;
2079 gcc_assert (label
< graph
->size
);
2080 graph
->pe
[node
] = label
;
2081 if (graph
->pe_rep
[label
] == -1)
2082 graph
->pe_rep
[label
] = node
;
2088 /* Unite pointer equivalent but not location equivalent nodes in
2089 GRAPH. This may only be performed once variable substitution is
2093 unite_pointer_equivalences (constraint_graph_t graph
)
2097 /* Go through the pointer equivalences and unite them to their
2098 representative, if they aren't already. */
2099 for (i
= 0; i
< FIRST_REF_NODE
; i
++)
2101 unsigned int label
= graph
->pe
[i
];
2104 int label_rep
= graph
->pe_rep
[label
];
2106 if (label_rep
== -1)
2109 label_rep
= find (label_rep
);
2110 if (label_rep
>= 0 && unite (label_rep
, find (i
)))
2111 unify_nodes (graph
, label_rep
, i
, false);
2116 /* Move complex constraints to the GRAPH nodes they belong to. */
2119 move_complex_constraints (constraint_graph_t graph
)
2124 for (i
= 0; VEC_iterate (constraint_t
, constraints
, i
, c
); i
++)
2128 struct constraint_expr lhs
= c
->lhs
;
2129 struct constraint_expr rhs
= c
->rhs
;
2131 if (lhs
.type
== DEREF
)
2133 insert_into_complex (graph
, lhs
.var
, c
);
2135 else if (rhs
.type
== DEREF
)
2137 if (!(get_varinfo (lhs
.var
)->is_special_var
))
2138 insert_into_complex (graph
, rhs
.var
, c
);
2140 else if (rhs
.type
!= ADDRESSOF
&& lhs
.var
> anything_id
2141 && (lhs
.offset
!= 0 || rhs
.offset
!= 0))
2143 insert_into_complex (graph
, rhs
.var
, c
);
2150 /* Optimize and rewrite complex constraints while performing
2151 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2152 result of perform_variable_substitution. */
2155 rewrite_constraints (constraint_graph_t graph
,
2156 struct scc_info
*si
)
2162 for (j
= 0; j
< graph
->size
; j
++)
2163 gcc_assert (find (j
) == j
);
2165 for (i
= 0; VEC_iterate (constraint_t
, constraints
, i
, c
); i
++)
2167 struct constraint_expr lhs
= c
->lhs
;
2168 struct constraint_expr rhs
= c
->rhs
;
2169 unsigned int lhsvar
= find (get_varinfo_fc (lhs
.var
)->id
);
2170 unsigned int rhsvar
= find (get_varinfo_fc (rhs
.var
)->id
);
2171 unsigned int lhsnode
, rhsnode
;
2172 unsigned int lhslabel
, rhslabel
;
2174 lhsnode
= si
->node_mapping
[lhsvar
];
2175 rhsnode
= si
->node_mapping
[rhsvar
];
2176 lhslabel
= graph
->pointer_label
[lhsnode
];
2177 rhslabel
= graph
->pointer_label
[rhsnode
];
2179 /* See if it is really a non-pointer variable, and if so, ignore
2183 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2186 fprintf (dump_file
, "%s is a non-pointer variable,"
2187 "ignoring constraint:",
2188 get_varinfo (lhs
.var
)->name
);
2189 dump_constraint (dump_file
, c
);
2191 VEC_replace (constraint_t
, constraints
, i
, NULL
);
2197 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2200 fprintf (dump_file
, "%s is a non-pointer variable,"
2201 "ignoring constraint:",
2202 get_varinfo (rhs
.var
)->name
);
2203 dump_constraint (dump_file
, c
);
2205 VEC_replace (constraint_t
, constraints
, i
, NULL
);
2209 lhsvar
= find_equivalent_node (graph
, lhsvar
, lhslabel
);
2210 rhsvar
= find_equivalent_node (graph
, rhsvar
, rhslabel
);
2211 c
->lhs
.var
= lhsvar
;
2212 c
->rhs
.var
= rhsvar
;
2217 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2218 part of an SCC, false otherwise. */
2221 eliminate_indirect_cycles (unsigned int node
)
2223 if (graph
->indirect_cycles
[node
] != -1
2224 && !bitmap_empty_p (get_varinfo (node
)->solution
))
2227 VEC(unsigned,heap
) *queue
= NULL
;
2229 unsigned int to
= find (graph
->indirect_cycles
[node
]);
2232 /* We can't touch the solution set and call unify_nodes
2233 at the same time, because unify_nodes is going to do
2234 bitmap unions into it. */
2236 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node
)->solution
, 0, i
, bi
)
2238 if (find (i
) == i
&& i
!= to
)
2241 VEC_safe_push (unsigned, heap
, queue
, i
);
2246 VEC_iterate (unsigned, queue
, queuepos
, i
);
2249 unify_nodes (graph
, to
, i
, true);
2251 VEC_free (unsigned, heap
, queue
);
2257 /* Solve the constraint graph GRAPH using our worklist solver.
2258 This is based on the PW* family of solvers from the "Efficient Field
2259 Sensitive Pointer Analysis for C" paper.
2260 It works by iterating over all the graph nodes, processing the complex
2261 constraints and propagating the copy constraints, until everything stops
2262 changed. This corresponds to steps 6-8 in the solving list given above. */
2265 solve_graph (constraint_graph_t graph
)
2267 unsigned int size
= graph
->size
;
2272 changed
= sbitmap_alloc (size
);
2273 sbitmap_zero (changed
);
2275 /* Mark all initial non-collapsed nodes as changed. */
2276 for (i
= 0; i
< size
; i
++)
2278 varinfo_t ivi
= get_varinfo (i
);
2279 if (find (i
) == i
&& !bitmap_empty_p (ivi
->solution
)
2280 && ((graph
->succs
[i
] && !bitmap_empty_p (graph
->succs
[i
]))
2281 || VEC_length (constraint_t
, graph
->complex[i
]) > 0))
2283 SET_BIT (changed
, i
);
2288 /* Allocate a bitmap to be used to store the changed bits. */
2289 pts
= BITMAP_ALLOC (&pta_obstack
);
2291 while (changed_count
> 0)
2294 struct topo_info
*ti
= init_topo_info ();
2297 bitmap_obstack_initialize (&iteration_obstack
);
2299 compute_topo_order (graph
, ti
);
2301 while (VEC_length (unsigned, ti
->topo_order
) != 0)
2304 i
= VEC_pop (unsigned, ti
->topo_order
);
2306 /* If this variable is not a representative, skip it. */
2310 /* In certain indirect cycle cases, we may merge this
2311 variable to another. */
2312 if (eliminate_indirect_cycles (i
) && find (i
) != i
)
2315 /* If the node has changed, we need to process the
2316 complex constraints and outgoing edges again. */
2317 if (TEST_BIT (changed
, i
))
2322 VEC(constraint_t
,heap
) *complex = graph
->complex[i
];
2323 bool solution_empty
;
2325 RESET_BIT (changed
, i
);
2328 /* Compute the changed set of solution bits. */
2329 bitmap_and_compl (pts
, get_varinfo (i
)->solution
,
2330 get_varinfo (i
)->oldsolution
);
2332 if (bitmap_empty_p (pts
))
2335 bitmap_ior_into (get_varinfo (i
)->oldsolution
, pts
);
2337 solution
= get_varinfo (i
)->solution
;
2338 solution_empty
= bitmap_empty_p (solution
);
2340 /* Process the complex constraints */
2341 for (j
= 0; VEC_iterate (constraint_t
, complex, j
, c
); j
++)
2343 /* XXX: This is going to unsort the constraints in
2344 some cases, which will occasionally add duplicate
2345 constraints during unification. This does not
2346 affect correctness. */
2347 c
->lhs
.var
= find (c
->lhs
.var
);
2348 c
->rhs
.var
= find (c
->rhs
.var
);
2350 /* The only complex constraint that can change our
2351 solution to non-empty, given an empty solution,
2352 is a constraint where the lhs side is receiving
2353 some set from elsewhere. */
2354 if (!solution_empty
|| c
->lhs
.type
!= DEREF
)
2355 do_complex_constraint (graph
, c
, pts
);
2358 solution_empty
= bitmap_empty_p (solution
);
2360 if (!solution_empty
)
2364 /* Propagate solution to all successors. */
2365 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->succs
[i
],
2371 unsigned int to
= find (j
);
2372 tmp
= get_varinfo (to
)->solution
;
2375 /* Don't try to propagate to ourselves. */
2379 flag
= set_union_with_increment (tmp
, pts
, 0);
2383 get_varinfo (to
)->solution
= tmp
;
2384 if (!TEST_BIT (changed
, to
))
2386 SET_BIT (changed
, to
);
2394 free_topo_info (ti
);
2395 bitmap_obstack_release (&iteration_obstack
);
2399 sbitmap_free (changed
);
2400 bitmap_obstack_release (&oldpta_obstack
);
2403 /* Map from trees to variable infos. */
2404 static struct pointer_map_t
*vi_for_tree
;
2407 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2410 insert_vi_for_tree (tree t
, varinfo_t vi
)
2412 void **slot
= pointer_map_insert (vi_for_tree
, t
);
2414 gcc_assert (*slot
== NULL
);
2418 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2419 exist in the map, return NULL, otherwise, return the varinfo we found. */
2422 lookup_vi_for_tree (tree t
)
2424 void **slot
= pointer_map_contains (vi_for_tree
, t
);
2428 return (varinfo_t
) *slot
;
2431 /* Return a printable name for DECL */
2434 alias_get_name (tree decl
)
2436 const char *res
= get_name (decl
);
2438 int num_printed
= 0;
2447 if (TREE_CODE (decl
) == SSA_NAME
)
2449 num_printed
= asprintf (&temp
, "%s_%u",
2450 alias_get_name (SSA_NAME_VAR (decl
)),
2451 SSA_NAME_VERSION (decl
));
2453 else if (DECL_P (decl
))
2455 num_printed
= asprintf (&temp
, "D.%u", DECL_UID (decl
));
2457 if (num_printed
> 0)
2459 res
= ggc_strdup (temp
);
2465 /* Find the variable id for tree T in the map.
2466 If T doesn't exist in the map, create an entry for it and return it. */
2469 get_vi_for_tree (tree t
)
2471 void **slot
= pointer_map_contains (vi_for_tree
, t
);
2473 return get_varinfo (create_variable_info_for (t
, alias_get_name (t
)));
2475 return (varinfo_t
) *slot
;
2478 /* Get a constraint expression from an SSA_VAR_P node. */
2480 static struct constraint_expr
2481 get_constraint_exp_from_ssa_var (tree t
)
2483 struct constraint_expr cexpr
;
2485 gcc_assert (SSA_VAR_P (t
) || DECL_P (t
));
2487 /* For parameters, get at the points-to set for the actual parm
2489 if (TREE_CODE (t
) == SSA_NAME
2490 && TREE_CODE (SSA_NAME_VAR (t
)) == PARM_DECL
2491 && SSA_NAME_IS_DEFAULT_DEF (t
))
2492 return get_constraint_exp_from_ssa_var (SSA_NAME_VAR (t
));
2494 cexpr
.type
= SCALAR
;
2496 cexpr
.var
= get_vi_for_tree (t
)->id
;
2497 /* If we determine the result is "anything", and we know this is readonly,
2498 say it points to readonly memory instead. */
2499 if (cexpr
.var
== anything_id
&& TREE_READONLY (t
))
2501 cexpr
.type
= ADDRESSOF
;
2502 cexpr
.var
= readonly_id
;
2509 /* Process a completed constraint T, and add it to the constraint
2510 list. FROM_CALL is true if this is a constraint coming from a
2511 call, which means any DEREFs we see are "may-deref's", not
2515 process_constraint_1 (constraint_t t
, bool from_call
)
2517 struct constraint_expr rhs
= t
->rhs
;
2518 struct constraint_expr lhs
= t
->lhs
;
2520 gcc_assert (rhs
.var
< VEC_length (varinfo_t
, varmap
));
2521 gcc_assert (lhs
.var
< VEC_length (varinfo_t
, varmap
));
2525 if (lhs
.type
== DEREF
)
2526 get_varinfo (lhs
.var
)->directly_dereferenced
= true;
2527 if (rhs
.type
== DEREF
)
2528 get_varinfo (rhs
.var
)->directly_dereferenced
= true;
2531 if (!use_field_sensitive
)
2537 /* ANYTHING == ANYTHING is pointless. */
2538 if (lhs
.var
== anything_id
&& rhs
.var
== anything_id
)
2541 /* If we have &ANYTHING = something, convert to SOMETHING = &ANYTHING) */
2542 else if (lhs
.var
== anything_id
&& lhs
.type
== ADDRESSOF
)
2547 process_constraint_1 (t
, from_call
);
2549 /* This can happen in our IR with things like n->a = *p */
2550 else if (rhs
.type
== DEREF
&& lhs
.type
== DEREF
&& rhs
.var
!= anything_id
)
2552 /* Split into tmp = *rhs, *lhs = tmp */
2553 tree rhsdecl
= get_varinfo (rhs
.var
)->decl
;
2554 tree pointertype
= TREE_TYPE (rhsdecl
);
2555 tree pointedtotype
= TREE_TYPE (pointertype
);
2556 tree tmpvar
= create_tmp_var_raw (pointedtotype
, "doubledereftmp");
2557 struct constraint_expr tmplhs
= get_constraint_exp_from_ssa_var (tmpvar
);
2559 /* If this is an aggregate of known size, we should have passed
2560 this off to do_structure_copy, and it should have broken it
2562 gcc_assert (!AGGREGATE_TYPE_P (pointedtotype
)
2563 || get_varinfo (rhs
.var
)->is_unknown_size_var
);
2565 process_constraint_1 (new_constraint (tmplhs
, rhs
), from_call
);
2566 process_constraint_1 (new_constraint (lhs
, tmplhs
), from_call
);
2568 else if (rhs
.type
== ADDRESSOF
&& lhs
.type
== DEREF
)
2570 /* Split into tmp = &rhs, *lhs = tmp */
2571 tree rhsdecl
= get_varinfo (rhs
.var
)->decl
;
2572 tree pointertype
= TREE_TYPE (rhsdecl
);
2573 tree tmpvar
= create_tmp_var_raw (pointertype
, "derefaddrtmp");
2574 struct constraint_expr tmplhs
= get_constraint_exp_from_ssa_var (tmpvar
);
2576 process_constraint_1 (new_constraint (tmplhs
, rhs
), from_call
);
2577 process_constraint_1 (new_constraint (lhs
, tmplhs
), from_call
);
2581 gcc_assert (rhs
.type
!= ADDRESSOF
|| rhs
.offset
== 0);
2582 VEC_safe_push (constraint_t
, heap
, constraints
, t
);
2587 /* Process constraint T, performing various simplifications and then
2588 adding it to our list of overall constraints. */
2591 process_constraint (constraint_t t
)
2593 process_constraint_1 (t
, false);
2596 /* Return true if T is a variable of a type that could contain
2600 could_have_pointers (tree t
)
2602 tree type
= TREE_TYPE (t
);
2604 if (POINTER_TYPE_P (type
)
2605 || AGGREGATE_TYPE_P (type
)
2606 || TREE_CODE (type
) == COMPLEX_TYPE
)
2612 /* Return the position, in bits, of FIELD_DECL from the beginning of its
2615 static unsigned HOST_WIDE_INT
2616 bitpos_of_field (const tree fdecl
)
2619 if (TREE_CODE (DECL_FIELD_OFFSET (fdecl
)) != INTEGER_CST
2620 || TREE_CODE (DECL_FIELD_BIT_OFFSET (fdecl
)) != INTEGER_CST
)
2623 return (tree_low_cst (DECL_FIELD_OFFSET (fdecl
), 1) * 8)
2624 + tree_low_cst (DECL_FIELD_BIT_OFFSET (fdecl
), 1);
2628 /* Given a COMPONENT_REF T, return the constraint_expr for it. */
2631 get_constraint_for_component_ref (tree t
, VEC(ce_s
, heap
) **results
)
2634 HOST_WIDE_INT bitsize
= -1;
2635 HOST_WIDE_INT bitmaxsize
= -1;
2636 HOST_WIDE_INT bitpos
;
2638 struct constraint_expr
*result
;
2639 unsigned int beforelength
= VEC_length (ce_s
, *results
);
2641 /* Some people like to do cute things like take the address of
2644 while (!SSA_VAR_P (forzero
) && !CONSTANT_CLASS_P (forzero
))
2645 forzero
= TREE_OPERAND (forzero
, 0);
2647 if (CONSTANT_CLASS_P (forzero
) && integer_zerop (forzero
))
2649 struct constraint_expr temp
;
2652 temp
.var
= integer_id
;
2654 VEC_safe_push (ce_s
, heap
, *results
, &temp
);
2658 t
= get_ref_base_and_extent (t
, &bitpos
, &bitsize
, &bitmaxsize
);
2660 get_constraint_for (t
, results
);
2661 result
= VEC_last (ce_s
, *results
);
2662 result
->offset
= bitpos
;
2664 gcc_assert (beforelength
+ 1 == VEC_length (ce_s
, *results
));
2666 /* This can also happen due to weird offsetof type macros. */
2667 if (TREE_CODE (t
) != ADDR_EXPR
&& result
->type
== ADDRESSOF
)
2668 result
->type
= SCALAR
;
2670 if (result
->type
== SCALAR
)
2672 /* In languages like C, you can access one past the end of an
2673 array. You aren't allowed to dereference it, so we can
2674 ignore this constraint. When we handle pointer subtraction,
2675 we may have to do something cute here. */
2677 if (result
->offset
< get_varinfo (result
->var
)->fullsize
2680 /* It's also not true that the constraint will actually start at the
2681 right offset, it may start in some padding. We only care about
2682 setting the constraint to the first actual field it touches, so
2685 for (curr
= get_varinfo (result
->var
); curr
; curr
= curr
->next
)
2687 if (ranges_overlap_p (curr
->offset
, curr
->size
,
2688 result
->offset
, bitmaxsize
))
2690 result
->var
= curr
->id
;
2694 /* assert that we found *some* field there. The user couldn't be
2695 accessing *only* padding. */
2696 /* Still the user could access one past the end of an array
2697 embedded in a struct resulting in accessing *only* padding. */
2698 gcc_assert (curr
|| ref_contains_array_ref (orig_t
));
2700 else if (bitmaxsize
== 0)
2702 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2703 fprintf (dump_file
, "Access to zero-sized part of variable,"
2707 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2708 fprintf (dump_file
, "Access to past the end of variable, ignoring\n");
2712 else if (bitmaxsize
== -1)
2714 /* We can't handle DEREF constraints with unknown size, we'll
2715 get the wrong answer. Punt and return anything. */
2716 result
->var
= anything_id
;
2722 /* Dereference the constraint expression CONS, and return the result.
2723 DEREF (ADDRESSOF) = SCALAR
2724 DEREF (SCALAR) = DEREF
2725 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
2726 This is needed so that we can handle dereferencing DEREF constraints. */
2729 do_deref (VEC (ce_s
, heap
) **constraints
)
2731 struct constraint_expr
*c
;
2734 for (i
= 0; VEC_iterate (ce_s
, *constraints
, i
, c
); i
++)
2736 if (c
->type
== SCALAR
)
2738 else if (c
->type
== ADDRESSOF
)
2740 else if (c
->type
== DEREF
)
2742 tree tmpvar
= create_tmp_var_raw (ptr_type_node
, "dereftmp");
2743 struct constraint_expr tmplhs
= get_constraint_exp_from_ssa_var (tmpvar
);
2744 process_constraint (new_constraint (tmplhs
, *c
));
2745 c
->var
= tmplhs
.var
;
2752 /* Given a tree T, return the constraint expression for it. */
2755 get_constraint_for (tree t
, VEC (ce_s
, heap
) **results
)
2757 struct constraint_expr temp
;
2759 /* x = integer is all glommed to a single variable, which doesn't
2760 point to anything by itself. That is, of course, unless it is an
2761 integer constant being treated as a pointer, in which case, we
2762 will return that this is really the addressof anything. This
2763 happens below, since it will fall into the default case. The only
2764 case we know something about an integer treated like a pointer is
2765 when it is the NULL pointer, and then we just say it points to
2767 if (TREE_CODE (t
) == INTEGER_CST
2768 && integer_zerop (t
))
2770 temp
.var
= nothing_id
;
2771 temp
.type
= ADDRESSOF
;
2773 VEC_safe_push (ce_s
, heap
, *results
, &temp
);
2777 /* String constants are read-only. */
2778 if (TREE_CODE (t
) == STRING_CST
)
2780 temp
.var
= readonly_id
;
2783 VEC_safe_push (ce_s
, heap
, *results
, &temp
);
2787 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
2789 case tcc_expression
:
2792 switch (TREE_CODE (t
))
2796 struct constraint_expr
*c
;
2798 tree exp
= TREE_OPERAND (t
, 0);
2799 tree pttype
= TREE_TYPE (TREE_TYPE (t
));
2801 get_constraint_for (exp
, results
);
2804 /* Complex types are special. Taking the address of one
2805 allows you to access either part of it through that
2807 if (VEC_length (ce_s
, *results
) == 1 &&
2808 TREE_CODE (pttype
) == COMPLEX_TYPE
)
2810 struct constraint_expr
*origrhs
;
2812 struct constraint_expr tmp
;
2814 gcc_assert (VEC_length (ce_s
, *results
) == 1);
2815 origrhs
= VEC_last (ce_s
, *results
);
2817 VEC_pop (ce_s
, *results
);
2818 origvar
= get_varinfo (origrhs
->var
);
2819 for (; origvar
; origvar
= origvar
->next
)
2821 tmp
.var
= origvar
->id
;
2822 VEC_safe_push (ce_s
, heap
, *results
, &tmp
);
2826 for (i
= 0; VEC_iterate (ce_s
, *results
, i
, c
); i
++)
2828 if (c
->type
== DEREF
)
2831 c
->type
= ADDRESSOF
;
2837 /* XXX: In interprocedural mode, if we didn't have the
2838 body, we would need to do *each pointer argument =
2840 if (call_expr_flags (t
) & (ECF_MALLOC
| ECF_MAY_BE_ALLOCA
))
2843 tree heapvar
= heapvar_lookup (t
);
2845 if (heapvar
== NULL
)
2847 heapvar
= create_tmp_var_raw (ptr_type_node
, "HEAP");
2848 DECL_EXTERNAL (heapvar
) = 1;
2849 get_var_ann (heapvar
)->is_heapvar
= 1;
2850 if (gimple_referenced_vars (cfun
))
2851 add_referenced_var (heapvar
);
2852 heapvar_insert (t
, heapvar
);
2855 temp
.var
= create_variable_info_for (heapvar
,
2856 alias_get_name (heapvar
));
2858 vi
= get_varinfo (temp
.var
);
2859 vi
->is_artificial_var
= 1;
2860 vi
->is_heap_var
= 1;
2861 temp
.type
= ADDRESSOF
;
2863 VEC_safe_push (ce_s
, heap
, *results
, &temp
);
2868 temp
.var
= anything_id
;
2871 VEC_safe_push (ce_s
, heap
, *results
, &temp
);
2877 temp
.type
= ADDRESSOF
;
2878 temp
.var
= anything_id
;
2880 VEC_safe_push (ce_s
, heap
, *results
, &temp
);
2887 switch (TREE_CODE (t
))
2891 get_constraint_for (TREE_OPERAND (t
, 0), results
);
2896 case ARRAY_RANGE_REF
:
2898 get_constraint_for_component_ref (t
, results
);
2902 temp
.type
= ADDRESSOF
;
2903 temp
.var
= anything_id
;
2905 VEC_safe_push (ce_s
, heap
, *results
, &temp
);
2912 switch (TREE_CODE (t
))
2916 tree op
= TREE_OPERAND (t
, 0);
2918 /* Cast from non-pointer to pointers are bad news for us.
2919 Anything else, we see through */
2920 if (!(POINTER_TYPE_P (TREE_TYPE (t
))
2921 && ! POINTER_TYPE_P (TREE_TYPE (op
))))
2923 get_constraint_for (op
, results
);
2931 temp
.type
= ADDRESSOF
;
2932 temp
.var
= anything_id
;
2934 VEC_safe_push (ce_s
, heap
, *results
, &temp
);
2939 case tcc_exceptional
:
2941 switch (TREE_CODE (t
))
2945 get_constraint_for (PHI_RESULT (t
), results
);
2951 struct constraint_expr temp
;
2952 temp
= get_constraint_exp_from_ssa_var (t
);
2953 VEC_safe_push (ce_s
, heap
, *results
, &temp
);
2959 temp
.type
= ADDRESSOF
;
2960 temp
.var
= anything_id
;
2962 VEC_safe_push (ce_s
, heap
, *results
, &temp
);
2967 case tcc_declaration
:
2969 struct constraint_expr temp
;
2970 temp
= get_constraint_exp_from_ssa_var (t
);
2971 VEC_safe_push (ce_s
, heap
, *results
, &temp
);
2976 temp
.type
= ADDRESSOF
;
2977 temp
.var
= anything_id
;
2979 VEC_safe_push (ce_s
, heap
, *results
, &temp
);
2986 /* Handle the structure copy case where we have a simple structure copy
2987 between LHS and RHS that is of SIZE (in bits)
2989 For each field of the lhs variable (lhsfield)
2990 For each field of the rhs variable at lhsfield.offset (rhsfield)
2991 add the constraint lhsfield = rhsfield
2993 If we fail due to some kind of type unsafety or other thing we
2994 can't handle, return false. We expect the caller to collapse the
2995 variable in that case. */
2998 do_simple_structure_copy (const struct constraint_expr lhs
,
2999 const struct constraint_expr rhs
,
3000 const unsigned HOST_WIDE_INT size
)
3002 varinfo_t p
= get_varinfo (lhs
.var
);
3003 unsigned HOST_WIDE_INT pstart
, last
;
3005 last
= p
->offset
+ size
;
3006 for (; p
&& p
->offset
< last
; p
= p
->next
)
3009 struct constraint_expr templhs
= lhs
;
3010 struct constraint_expr temprhs
= rhs
;
3011 unsigned HOST_WIDE_INT fieldoffset
;
3013 templhs
.var
= p
->id
;
3014 q
= get_varinfo (temprhs
.var
);
3015 fieldoffset
= p
->offset
- pstart
;
3016 q
= first_vi_for_offset (q
, q
->offset
+ fieldoffset
);
3019 temprhs
.var
= q
->id
;
3020 process_constraint (new_constraint (templhs
, temprhs
));
3026 /* Handle the structure copy case where we have a structure copy between a
3027 aggregate on the LHS and a dereference of a pointer on the RHS
3028 that is of SIZE (in bits)
3030 For each field of the lhs variable (lhsfield)
3031 rhs.offset = lhsfield->offset
3032 add the constraint lhsfield = rhs
3036 do_rhs_deref_structure_copy (const struct constraint_expr lhs
,
3037 const struct constraint_expr rhs
,
3038 const unsigned HOST_WIDE_INT size
)
3040 varinfo_t p
= get_varinfo (lhs
.var
);
3041 unsigned HOST_WIDE_INT pstart
,last
;
3043 last
= p
->offset
+ size
;
3045 for (; p
&& p
->offset
< last
; p
= p
->next
)
3048 struct constraint_expr templhs
= lhs
;
3049 struct constraint_expr temprhs
= rhs
;
3050 unsigned HOST_WIDE_INT fieldoffset
;
3053 if (templhs
.type
== SCALAR
)
3054 templhs
.var
= p
->id
;
3056 templhs
.offset
= p
->offset
;
3058 q
= get_varinfo (temprhs
.var
);
3059 fieldoffset
= p
->offset
- pstart
;
3060 temprhs
.offset
+= fieldoffset
;
3061 process_constraint (new_constraint (templhs
, temprhs
));
3065 /* Handle the structure copy case where we have a structure copy
3066 between an aggregate on the RHS and a dereference of a pointer on
3067 the LHS that is of SIZE (in bits)
3069 For each field of the rhs variable (rhsfield)
3070 lhs.offset = rhsfield->offset
3071 add the constraint lhs = rhsfield
3075 do_lhs_deref_structure_copy (const struct constraint_expr lhs
,
3076 const struct constraint_expr rhs
,
3077 const unsigned HOST_WIDE_INT size
)
3079 varinfo_t p
= get_varinfo (rhs
.var
);
3080 unsigned HOST_WIDE_INT pstart
,last
;
3082 last
= p
->offset
+ size
;
3084 for (; p
&& p
->offset
< last
; p
= p
->next
)
3087 struct constraint_expr templhs
= lhs
;
3088 struct constraint_expr temprhs
= rhs
;
3089 unsigned HOST_WIDE_INT fieldoffset
;
3092 if (temprhs
.type
== SCALAR
)
3093 temprhs
.var
= p
->id
;
3095 temprhs
.offset
= p
->offset
;
3097 q
= get_varinfo (templhs
.var
);
3098 fieldoffset
= p
->offset
- pstart
;
3099 templhs
.offset
+= fieldoffset
;
3100 process_constraint (new_constraint (templhs
, temprhs
));
3104 /* Sometimes, frontends like to give us bad type information. This
3105 function will collapse all the fields from VAR to the end of VAR,
3106 into VAR, so that we treat those fields as a single variable.
3107 We return the variable they were collapsed into. */
3110 collapse_rest_of_var (unsigned int var
)
3112 varinfo_t currvar
= get_varinfo (var
);
3115 for (field
= currvar
->next
; field
; field
= field
->next
)
3118 fprintf (dump_file
, "Type safety: Collapsing var %s into %s\n",
3119 field
->name
, currvar
->name
);
3121 gcc_assert (!field
->collapsed_to
);
3122 field
->collapsed_to
= currvar
;
3125 currvar
->next
= NULL
;
3126 currvar
->size
= currvar
->fullsize
- currvar
->offset
;
3131 /* Handle aggregate copies by expanding into copies of the respective
3132 fields of the structures. */
3135 do_structure_copy (tree lhsop
, tree rhsop
)
3137 struct constraint_expr lhs
, rhs
, tmp
;
3138 VEC (ce_s
, heap
) *lhsc
= NULL
, *rhsc
= NULL
;
3140 unsigned HOST_WIDE_INT lhssize
;
3141 unsigned HOST_WIDE_INT rhssize
;
3143 get_constraint_for (lhsop
, &lhsc
);
3144 get_constraint_for (rhsop
, &rhsc
);
3145 gcc_assert (VEC_length (ce_s
, lhsc
) == 1);
3146 gcc_assert (VEC_length (ce_s
, rhsc
) == 1);
3147 lhs
= *(VEC_last (ce_s
, lhsc
));
3148 rhs
= *(VEC_last (ce_s
, rhsc
));
3150 VEC_free (ce_s
, heap
, lhsc
);
3151 VEC_free (ce_s
, heap
, rhsc
);
3153 /* If we have special var = x, swap it around. */
3154 if (lhs
.var
<= integer_id
&& !(get_varinfo (rhs
.var
)->is_special_var
))
3161 /* This is fairly conservative for the RHS == ADDRESSOF case, in that it's
3162 possible it's something we could handle. However, most cases falling
3163 into this are dealing with transparent unions, which are slightly
3165 if (rhs
.type
== ADDRESSOF
&& !(get_varinfo (rhs
.var
)->is_special_var
))
3167 rhs
.type
= ADDRESSOF
;
3168 rhs
.var
= anything_id
;
3171 /* If the RHS is a special var, or an addressof, set all the LHS fields to
3172 that special var. */
3173 if (rhs
.var
<= integer_id
)
3175 for (p
= get_varinfo (lhs
.var
); p
; p
= p
->next
)
3177 struct constraint_expr templhs
= lhs
;
3178 struct constraint_expr temprhs
= rhs
;
3180 if (templhs
.type
== SCALAR
)
3181 templhs
.var
= p
->id
;
3183 templhs
.offset
+= p
->offset
;
3184 process_constraint (new_constraint (templhs
, temprhs
));
3189 tree rhstype
= TREE_TYPE (rhsop
);
3190 tree lhstype
= TREE_TYPE (lhsop
);
3194 lhstypesize
= DECL_P (lhsop
) ? DECL_SIZE (lhsop
) : TYPE_SIZE (lhstype
);
3195 rhstypesize
= DECL_P (rhsop
) ? DECL_SIZE (rhsop
) : TYPE_SIZE (rhstype
);
3197 /* If we have a variably sized types on the rhs or lhs, and a deref
3198 constraint, add the constraint, lhsconstraint = &ANYTHING.
3199 This is conservatively correct because either the lhs is an unknown
3200 sized var (if the constraint is SCALAR), or the lhs is a DEREF
3201 constraint, and every variable it can point to must be unknown sized
3202 anyway, so we don't need to worry about fields at all. */
3203 if ((rhs
.type
== DEREF
&& TREE_CODE (rhstypesize
) != INTEGER_CST
)
3204 || (lhs
.type
== DEREF
&& TREE_CODE (lhstypesize
) != INTEGER_CST
))
3206 rhs
.var
= anything_id
;
3207 rhs
.type
= ADDRESSOF
;
3209 process_constraint (new_constraint (lhs
, rhs
));
3213 /* The size only really matters insofar as we don't set more or less of
3214 the variable. If we hit an unknown size var, the size should be the
3215 whole darn thing. */
3216 if (get_varinfo (rhs
.var
)->is_unknown_size_var
)
3219 rhssize
= TREE_INT_CST_LOW (rhstypesize
);
3221 if (get_varinfo (lhs
.var
)->is_unknown_size_var
)
3224 lhssize
= TREE_INT_CST_LOW (lhstypesize
);
3227 if (rhs
.type
== SCALAR
&& lhs
.type
== SCALAR
)
3229 if (!do_simple_structure_copy (lhs
, rhs
, MIN (lhssize
, rhssize
)))
3231 lhs
.var
= collapse_rest_of_var (lhs
.var
);
3232 rhs
.var
= collapse_rest_of_var (rhs
.var
);
3237 process_constraint (new_constraint (lhs
, rhs
));
3240 else if (lhs
.type
!= DEREF
&& rhs
.type
== DEREF
)
3241 do_rhs_deref_structure_copy (lhs
, rhs
, MIN (lhssize
, rhssize
));
3242 else if (lhs
.type
== DEREF
&& rhs
.type
!= DEREF
)
3243 do_lhs_deref_structure_copy (lhs
, rhs
, MIN (lhssize
, rhssize
));
3246 tree pointedtotype
= lhstype
;
3249 gcc_assert (rhs
.type
== DEREF
&& lhs
.type
== DEREF
);
3250 tmpvar
= create_tmp_var_raw (pointedtotype
, "structcopydereftmp");
3251 do_structure_copy (tmpvar
, rhsop
);
3252 do_structure_copy (lhsop
, tmpvar
);
3258 /* Update related alias information kept in AI. This is used when
3259 building name tags, alias sets and deciding grouping heuristics.
3260 STMT is the statement to process. This function also updates
3261 ADDRESSABLE_VARS. */
3264 update_alias_info (tree stmt
, struct alias_info
*ai
)
3267 use_operand_p use_p
;
3269 bool stmt_dereferences_ptr_p
;
3270 enum escape_type stmt_escape_type
= is_escape_site (stmt
);
3271 struct mem_ref_stats_d
*mem_ref_stats
= gimple_mem_ref_stats (cfun
);
3273 stmt_dereferences_ptr_p
= false;
3275 if (stmt_escape_type
== ESCAPE_TO_CALL
3276 || stmt_escape_type
== ESCAPE_TO_PURE_CONST
)
3278 mem_ref_stats
->num_call_sites
++;
3279 if (stmt_escape_type
== ESCAPE_TO_PURE_CONST
)
3280 mem_ref_stats
->num_pure_const_call_sites
++;
3282 else if (stmt_escape_type
== ESCAPE_TO_ASM
)
3283 mem_ref_stats
->num_asm_sites
++;
3285 /* Mark all the variables whose address are taken by the statement. */
3286 addr_taken
= addresses_taken (stmt
);
3289 bitmap_ior_into (gimple_addressable_vars (cfun
), addr_taken
);
3291 /* If STMT is an escape point, all the addresses taken by it are
3293 if (stmt_escape_type
!= NO_ESCAPE
)
3298 EXECUTE_IF_SET_IN_BITMAP (addr_taken
, 0, i
, bi
)
3300 tree rvar
= referenced_var (i
);
3301 if (!unmodifiable_var_p (rvar
))
3302 mark_call_clobbered (rvar
, stmt_escape_type
);
3307 /* Process each operand use. For pointers, determine whether they
3308 are dereferenced by the statement, or whether their value
3310 FOR_EACH_PHI_OR_STMT_USE (use_p
, stmt
, iter
, SSA_OP_USE
)
3314 struct ptr_info_def
*pi
;
3315 unsigned num_uses
, num_loads
, num_stores
;
3317 op
= USE_FROM_PTR (use_p
);
3319 /* If STMT is a PHI node, OP may be an ADDR_EXPR. If so, add it
3320 to the set of addressable variables. */
3321 if (TREE_CODE (op
) == ADDR_EXPR
)
3323 bitmap addressable_vars
= gimple_addressable_vars (cfun
);
3325 gcc_assert (TREE_CODE (stmt
) == PHI_NODE
);
3326 gcc_assert (addressable_vars
);
3328 /* PHI nodes don't have annotations for pinning the set
3329 of addresses taken, so we collect them here.
3331 FIXME, should we allow PHI nodes to have annotations
3332 so that they can be treated like regular statements?
3333 Currently, they are treated as second-class
3335 add_to_addressable_set (TREE_OPERAND (op
, 0), &addressable_vars
);
3339 /* Ignore constants (they may occur in PHI node arguments). */
3340 if (TREE_CODE (op
) != SSA_NAME
)
3343 var
= SSA_NAME_VAR (op
);
3344 v_ann
= var_ann (var
);
3346 /* The base variable of an SSA name must be a GIMPLE register, and thus
3347 it cannot be aliased. */
3348 gcc_assert (!may_be_aliased (var
));
3350 /* We are only interested in pointers. */
3351 if (!POINTER_TYPE_P (TREE_TYPE (op
)))
3354 pi
= get_ptr_info (op
);
3356 /* Add OP to AI->PROCESSED_PTRS, if it's not there already. */
3357 if (!TEST_BIT (ai
->ssa_names_visited
, SSA_NAME_VERSION (op
)))
3359 SET_BIT (ai
->ssa_names_visited
, SSA_NAME_VERSION (op
));
3360 VEC_safe_push (tree
, heap
, ai
->processed_ptrs
, op
);
3363 /* If STMT is a PHI node, then it will not have pointer
3364 dereferences and it will not be an escape point. */
3365 if (TREE_CODE (stmt
) == PHI_NODE
)
3368 /* Determine whether OP is a dereferenced pointer, and if STMT
3369 is an escape point, whether OP escapes. */
3370 count_uses_and_derefs (op
, stmt
, &num_uses
, &num_loads
, &num_stores
);
3372 /* Handle a corner case involving address expressions of the
3373 form '&PTR->FLD'. The problem with these expressions is that
3374 they do not represent a dereference of PTR. However, if some
3375 other transformation propagates them into an INDIRECT_REF
3376 expression, we end up with '*(&PTR->FLD)' which is folded
3379 So, if the original code had no other dereferences of PTR,
3380 the aliaser will not create memory tags for it, and when
3381 &PTR->FLD gets propagated to INDIRECT_REF expressions, the
3382 memory operations will receive no VDEF/VUSE operands.
3384 One solution would be to have count_uses_and_derefs consider
3385 &PTR->FLD a dereference of PTR. But that is wrong, since it
3386 is not really a dereference but an offset calculation.
3388 What we do here is to recognize these special ADDR_EXPR
3389 nodes. Since these expressions are never GIMPLE values (they
3390 are not GIMPLE invariants), they can only appear on the RHS
3391 of an assignment and their base address is always an
3392 INDIRECT_REF expression. */
3393 if (TREE_CODE (stmt
) == GIMPLE_MODIFY_STMT
3394 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt
, 1)) == ADDR_EXPR
3395 && !is_gimple_val (GIMPLE_STMT_OPERAND (stmt
, 1)))
3397 /* If the RHS if of the form &PTR->FLD and PTR == OP, then
3398 this represents a potential dereference of PTR. */
3399 tree rhs
= GIMPLE_STMT_OPERAND (stmt
, 1);
3400 tree base
= get_base_address (TREE_OPERAND (rhs
, 0));
3401 if (TREE_CODE (base
) == INDIRECT_REF
3402 && TREE_OPERAND (base
, 0) == op
)
3406 if (num_loads
+ num_stores
> 0)
3408 /* Mark OP as dereferenced. In a subsequent pass,
3409 dereferenced pointers that point to a set of
3410 variables will be assigned a name tag to alias
3411 all the variables OP points to. */
3412 pi
->is_dereferenced
= 1;
3414 /* If this is a store operation, mark OP as being
3415 dereferenced to store, otherwise mark it as being
3416 dereferenced to load. */
3418 pointer_set_insert (ai
->dereferenced_ptrs_store
, var
);
3420 pointer_set_insert (ai
->dereferenced_ptrs_load
, var
);
3422 /* Update the frequency estimate for all the dereferences of
3424 update_mem_sym_stats_from_stmt (op
, stmt
, num_loads
, num_stores
);
3426 /* Indicate that STMT contains pointer dereferences. */
3427 stmt_dereferences_ptr_p
= true;
3430 if (stmt_escape_type
!= NO_ESCAPE
&& num_loads
+ num_stores
< num_uses
)
3432 /* If STMT is an escape point and STMT contains at
3433 least one direct use of OP, then the value of OP
3434 escapes and so the pointed-to variables need to
3435 be marked call-clobbered. */
3436 pi
->value_escapes_p
= 1;
3437 pi
->escape_mask
|= stmt_escape_type
;
3439 /* If the statement makes a function call, assume
3440 that pointer OP will be dereferenced in a store
3441 operation inside the called function. */
3442 if (get_call_expr_in (stmt
)
3443 || stmt_escape_type
== ESCAPE_STORED_IN_GLOBAL
)
3445 pointer_set_insert (ai
->dereferenced_ptrs_store
, var
);
3446 pi
->is_dereferenced
= 1;
3451 if (TREE_CODE (stmt
) == PHI_NODE
)
3454 /* Mark stored variables in STMT as being written to and update the
3455 memory reference stats for all memory symbols referenced by STMT. */
3456 if (stmt_references_memory_p (stmt
))
3461 mem_ref_stats
->num_mem_stmts
++;
3463 /* Notice that we only update memory reference stats for symbols
3464 loaded and stored by the statement if the statement does not
3465 contain pointer dereferences and it is not a call/asm site.
3466 This is to avoid double accounting problems when creating
3467 memory partitions. After computing points-to information,
3468 pointer dereference statistics are used to update the
3469 reference stats of the pointed-to variables, so here we
3470 should only update direct references to symbols.
3472 Indirect references are not updated here for two reasons: (1)
3473 The first time we compute alias information, the sets
3474 LOADED/STORED are empty for pointer dereferences, (2) After
3475 partitioning, LOADED/STORED may have references to
3476 partitions, not the original pointed-to variables. So, if we
3477 always counted LOADED/STORED here and during partitioning, we
3478 would count many symbols more than once.
3480 This does cause some imprecision when a statement has a
3481 combination of direct symbol references and pointer
3482 dereferences (e.g., MEMORY_VAR = *PTR) or if a call site has
3483 memory symbols in its argument list, but these cases do not
3484 occur so frequently as to constitute a serious problem. */
3485 if (STORED_SYMS (stmt
))
3486 EXECUTE_IF_SET_IN_BITMAP (STORED_SYMS (stmt
), 0, i
, bi
)
3488 tree sym
= referenced_var (i
);
3489 pointer_set_insert (ai
->written_vars
, sym
);
3490 if (!stmt_dereferences_ptr_p
3491 && stmt_escape_type
!= ESCAPE_TO_CALL
3492 && stmt_escape_type
!= ESCAPE_TO_PURE_CONST
3493 && stmt_escape_type
!= ESCAPE_TO_ASM
)
3494 update_mem_sym_stats_from_stmt (sym
, stmt
, 0, 1);
3497 if (!stmt_dereferences_ptr_p
3498 && LOADED_SYMS (stmt
)
3499 && stmt_escape_type
!= ESCAPE_TO_CALL
3500 && stmt_escape_type
!= ESCAPE_TO_PURE_CONST
3501 && stmt_escape_type
!= ESCAPE_TO_ASM
)
3502 EXECUTE_IF_SET_IN_BITMAP (LOADED_SYMS (stmt
), 0, i
, bi
)
3503 update_mem_sym_stats_from_stmt (referenced_var (i
), stmt
, 1, 0);
3508 /* Handle pointer arithmetic EXPR when creating aliasing constraints.
3509 Expressions of the type PTR + CST can be handled in two ways:
3511 1- If the constraint for PTR is ADDRESSOF for a non-structure
3512 variable, then we can use it directly because adding or
3513 subtracting a constant may not alter the original ADDRESSOF
3514 constraint (i.e., pointer arithmetic may not legally go outside
3515 an object's boundaries).
3517 2- If the constraint for PTR is ADDRESSOF for a structure variable,
3518 then if CST is a compile-time constant that can be used as an
3519 offset, we can determine which sub-variable will be pointed-to
3522 Return true if the expression is handled. For any other kind of
3523 expression, return false so that each operand can be added as a
3524 separate constraint by the caller. */
3527 handle_ptr_arith (VEC (ce_s
, heap
) *lhsc
, tree expr
)
3530 struct constraint_expr
*c
, *c2
;
3533 VEC (ce_s
, heap
) *temp
= NULL
;
3534 unsigned int rhsoffset
= 0;
3535 bool unknown_addend
= false;
3537 if (TREE_CODE (expr
) != POINTER_PLUS_EXPR
)
3540 op0
= TREE_OPERAND (expr
, 0);
3541 op1
= TREE_OPERAND (expr
, 1);
3542 gcc_assert (POINTER_TYPE_P (TREE_TYPE (op0
)));
3544 get_constraint_for (op0
, &temp
);
3546 /* Handle non-constants by making constraints from integer. */
3547 if (TREE_CODE (op1
) == INTEGER_CST
)
3548 rhsoffset
= TREE_INT_CST_LOW (op1
) * BITS_PER_UNIT
;
3550 unknown_addend
= true;
3552 for (i
= 0; VEC_iterate (ce_s
, lhsc
, i
, c
); i
++)
3553 for (j
= 0; VEC_iterate (ce_s
, temp
, j
, c2
); j
++)
3555 if (c2
->type
== ADDRESSOF
&& rhsoffset
!= 0)
3557 varinfo_t temp
= get_varinfo (c2
->var
);
3559 /* An access one after the end of an array is valid,
3560 so simply punt on accesses we cannot resolve. */
3561 temp
= first_vi_for_offset (temp
, rhsoffset
);
3567 else if (unknown_addend
)
3569 /* Can't handle *a + integer where integer is unknown. */
3570 if (c2
->type
!= SCALAR
)
3572 struct constraint_expr intc
;
3573 intc
.var
= integer_id
;
3576 process_constraint (new_constraint (*c
, intc
));
3580 /* We known it lives somewhere within c2->var. */
3581 varinfo_t tmp
= get_varinfo (c2
->var
);
3582 for (; tmp
; tmp
= tmp
->next
)
3584 struct constraint_expr tmpc
= *c2
;
3587 process_constraint (new_constraint (*c
, tmpc
));
3592 c2
->offset
= rhsoffset
;
3593 process_constraint (new_constraint (*c
, *c2
));
3596 VEC_free (ce_s
, heap
, temp
);
3601 /* For non-IPA mode, generate constraints necessary for a call on the
3605 handle_rhs_call (tree rhs
)
3608 call_expr_arg_iterator iter
;
3609 struct constraint_expr rhsc
;
3611 rhsc
.var
= anything_id
;
3613 rhsc
.type
= ADDRESSOF
;
3615 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, rhs
)
3617 VEC(ce_s
, heap
) *lhsc
= NULL
;
3619 /* Find those pointers being passed, and make sure they end up
3620 pointing to anything. */
3621 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
3624 struct constraint_expr
*lhsp
;
3626 get_constraint_for (arg
, &lhsc
);
3628 for (j
= 0; VEC_iterate (ce_s
, lhsc
, j
, lhsp
); j
++)
3629 process_constraint_1 (new_constraint (*lhsp
, rhsc
), true);
3630 VEC_free (ce_s
, heap
, lhsc
);
3635 /* For non-IPA mode, generate constraints necessary for a call
3636 that returns a pointer and assigns it to LHS. This simply makes
3637 the LHS point to anything. */
3640 handle_lhs_call (tree lhs
)
3642 VEC(ce_s
, heap
) *lhsc
= NULL
;
3643 struct constraint_expr rhsc
;
3645 struct constraint_expr
*lhsp
;
3647 rhsc
.var
= anything_id
;
3649 rhsc
.type
= ADDRESSOF
;
3650 get_constraint_for (lhs
, &lhsc
);
3651 for (j
= 0; VEC_iterate (ce_s
, lhsc
, j
, lhsp
); j
++)
3652 process_constraint_1 (new_constraint (*lhsp
, rhsc
), true);
3653 VEC_free (ce_s
, heap
, lhsc
);
3656 /* Walk statement T setting up aliasing constraints according to the
3657 references found in T. This function is the main part of the
3658 constraint builder. AI points to auxiliary alias information used
3659 when building alias sets and computing alias grouping heuristics. */
3662 find_func_aliases (tree origt
)
3665 VEC(ce_s
, heap
) *lhsc
= NULL
;
3666 VEC(ce_s
, heap
) *rhsc
= NULL
;
3667 struct constraint_expr
*c
;
3669 if (TREE_CODE (t
) == RETURN_EXPR
&& TREE_OPERAND (t
, 0))
3670 t
= TREE_OPERAND (t
, 0);
3672 /* Now build constraints expressions. */
3673 if (TREE_CODE (t
) == PHI_NODE
)
3675 gcc_assert (!AGGREGATE_TYPE_P (TREE_TYPE (PHI_RESULT (t
))));
3677 /* Only care about pointers and structures containing
3679 if (could_have_pointers (PHI_RESULT (t
)))
3684 /* For a phi node, assign all the arguments to
3686 get_constraint_for (PHI_RESULT (t
), &lhsc
);
3687 for (i
= 0; i
< PHI_NUM_ARGS (t
); i
++)
3690 tree strippedrhs
= PHI_ARG_DEF (t
, i
);
3692 STRIP_NOPS (strippedrhs
);
3693 rhstype
= TREE_TYPE (strippedrhs
);
3694 get_constraint_for (PHI_ARG_DEF (t
, i
), &rhsc
);
3696 for (j
= 0; VEC_iterate (ce_s
, lhsc
, j
, c
); j
++)
3698 struct constraint_expr
*c2
;
3699 while (VEC_length (ce_s
, rhsc
) > 0)
3701 c2
= VEC_last (ce_s
, rhsc
);
3702 process_constraint (new_constraint (*c
, *c2
));
3703 VEC_pop (ce_s
, rhsc
);
3709 /* In IPA mode, we need to generate constraints to pass call
3710 arguments through their calls. There are two cases, either a
3711 GIMPLE_MODIFY_STMT when we are returning a value, or just a plain
3712 CALL_EXPR when we are not.
3714 In non-ipa mode, we need to generate constraints for each
3715 pointer passed by address. */
3716 else if (((TREE_CODE (t
) == GIMPLE_MODIFY_STMT
3717 && TREE_CODE (GIMPLE_STMT_OPERAND (t
, 1)) == CALL_EXPR
3718 && !(call_expr_flags (GIMPLE_STMT_OPERAND (t
, 1))
3719 & (ECF_MALLOC
| ECF_MAY_BE_ALLOCA
)))
3720 || (TREE_CODE (t
) == CALL_EXPR
3721 && !(call_expr_flags (t
)
3722 & (ECF_MALLOC
| ECF_MAY_BE_ALLOCA
)))))
3726 if (TREE_CODE (t
) == GIMPLE_MODIFY_STMT
)
3728 handle_rhs_call (GIMPLE_STMT_OPERAND (t
, 1));
3729 if (POINTER_TYPE_P (TREE_TYPE (GIMPLE_STMT_OPERAND (t
, 1))))
3730 handle_lhs_call (GIMPLE_STMT_OPERAND (t
, 0));
3733 handle_rhs_call (t
);
3740 call_expr_arg_iterator iter
;
3744 if (TREE_CODE (t
) == GIMPLE_MODIFY_STMT
)
3746 lhsop
= GIMPLE_STMT_OPERAND (t
, 0);
3747 rhsop
= GIMPLE_STMT_OPERAND (t
, 1);
3754 decl
= get_callee_fndecl (rhsop
);
3756 /* If we can directly resolve the function being called, do so.
3757 Otherwise, it must be some sort of indirect expression that
3758 we should still be able to handle. */
3761 fi
= get_vi_for_tree (decl
);
3765 decl
= CALL_EXPR_FN (rhsop
);
3766 fi
= get_vi_for_tree (decl
);
3769 /* Assign all the passed arguments to the appropriate incoming
3770 parameters of the function. */
3772 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, rhsop
)
3774 struct constraint_expr lhs
;
3775 struct constraint_expr
*rhsp
;
3777 get_constraint_for (arg
, &rhsc
);
3778 if (TREE_CODE (decl
) != FUNCTION_DECL
)
3787 lhs
.var
= first_vi_for_offset (fi
, i
)->id
;
3790 while (VEC_length (ce_s
, rhsc
) != 0)
3792 rhsp
= VEC_last (ce_s
, rhsc
);
3793 process_constraint (new_constraint (lhs
, *rhsp
));
3794 VEC_pop (ce_s
, rhsc
);
3799 /* If we are returning a value, assign it to the result. */
3802 struct constraint_expr rhs
;
3803 struct constraint_expr
*lhsp
;
3806 get_constraint_for (lhsop
, &lhsc
);
3807 if (TREE_CODE (decl
) != FUNCTION_DECL
)
3816 rhs
.var
= first_vi_for_offset (fi
, i
)->id
;
3819 for (j
= 0; VEC_iterate (ce_s
, lhsc
, j
, lhsp
); j
++)
3820 process_constraint (new_constraint (*lhsp
, rhs
));
3824 /* Otherwise, just a regular assignment statement. */
3825 else if (TREE_CODE (t
) == GIMPLE_MODIFY_STMT
)
3827 tree lhsop
= GIMPLE_STMT_OPERAND (t
, 0);
3828 tree rhsop
= GIMPLE_STMT_OPERAND (t
, 1);
3831 if ((AGGREGATE_TYPE_P (TREE_TYPE (lhsop
))
3832 || TREE_CODE (TREE_TYPE (lhsop
)) == COMPLEX_TYPE
)
3833 && (AGGREGATE_TYPE_P (TREE_TYPE (rhsop
))
3834 || TREE_CODE (TREE_TYPE (lhsop
)) == COMPLEX_TYPE
))
3836 do_structure_copy (lhsop
, rhsop
);
3840 /* Only care about operations with pointers, structures
3841 containing pointers, dereferences, and call expressions. */
3842 if (could_have_pointers (lhsop
)
3843 || TREE_CODE (rhsop
) == CALL_EXPR
)
3845 get_constraint_for (lhsop
, &lhsc
);
3846 switch (TREE_CODE_CLASS (TREE_CODE (rhsop
)))
3848 /* RHS that consist of unary operations,
3849 exceptional types, or bare decls/constants, get
3850 handled directly by get_constraint_for. */
3852 case tcc_declaration
:
3854 case tcc_exceptional
:
3855 case tcc_expression
:
3861 get_constraint_for (rhsop
, &rhsc
);
3862 for (j
= 0; VEC_iterate (ce_s
, lhsc
, j
, c
); j
++)
3864 struct constraint_expr
*c2
;
3867 for (k
= 0; VEC_iterate (ce_s
, rhsc
, k
, c2
); k
++)
3868 process_constraint (new_constraint (*c
, *c2
));
3876 /* For pointer arithmetic of the form
3877 PTR + CST, we can simply use PTR's
3878 constraint because pointer arithmetic is
3879 not allowed to go out of bounds. */
3880 if (handle_ptr_arith (lhsc
, rhsop
))
3885 /* Otherwise, walk each operand. Notice that we
3886 can't use the operand interface because we need
3887 to process expressions other than simple operands
3888 (e.g. INDIRECT_REF, ADDR_EXPR, CALL_EXPR). */
3890 for (i
= 0; i
< TREE_OPERAND_LENGTH (rhsop
); i
++)
3892 tree op
= TREE_OPERAND (rhsop
, i
);
3895 gcc_assert (VEC_length (ce_s
, rhsc
) == 0);
3896 get_constraint_for (op
, &rhsc
);
3897 for (j
= 0; VEC_iterate (ce_s
, lhsc
, j
, c
); j
++)
3899 struct constraint_expr
*c2
;
3900 while (VEC_length (ce_s
, rhsc
) > 0)
3902 c2
= VEC_last (ce_s
, rhsc
);
3903 process_constraint (new_constraint (*c
, *c2
));
3904 VEC_pop (ce_s
, rhsc
);
3912 else if (TREE_CODE (t
) == CHANGE_DYNAMIC_TYPE_EXPR
)
3916 get_constraint_for (CHANGE_DYNAMIC_TYPE_LOCATION (t
), &lhsc
);
3917 for (j
= 0; VEC_iterate (ce_s
, lhsc
, j
, c
); ++j
)
3918 get_varinfo (c
->var
)->no_tbaa_pruning
= true;
3921 /* After promoting variables and computing aliasing we will
3922 need to re-scan most statements. FIXME: Try to minimize the
3923 number of statements re-scanned. It's not really necessary to
3924 re-scan *all* statements. */
3925 mark_stmt_modified (origt
);
3926 VEC_free (ce_s
, heap
, rhsc
);
3927 VEC_free (ce_s
, heap
, lhsc
);
3931 /* Find the first varinfo in the same variable as START that overlaps with
3933 Effectively, walk the chain of fields for the variable START to find the
3934 first field that overlaps with OFFSET.
3935 Return NULL if we can't find one. */
3938 first_vi_for_offset (varinfo_t start
, unsigned HOST_WIDE_INT offset
)
3940 varinfo_t curr
= start
;
3943 /* We may not find a variable in the field list with the actual
3944 offset when when we have glommed a structure to a variable.
3945 In that case, however, offset should still be within the size
3947 if (offset
>= curr
->offset
&& offset
< (curr
->offset
+ curr
->size
))
3955 /* Insert the varinfo FIELD into the field list for BASE, at the front
3959 insert_into_field_list (varinfo_t base
, varinfo_t field
)
3961 varinfo_t prev
= base
;
3962 varinfo_t curr
= base
->next
;
3968 /* Insert the varinfo FIELD into the field list for BASE, ordered by
3972 insert_into_field_list_sorted (varinfo_t base
, varinfo_t field
)
3974 varinfo_t prev
= base
;
3975 varinfo_t curr
= base
->next
;
3986 if (field
->offset
<= curr
->offset
)
3991 field
->next
= prev
->next
;
3996 /* This structure is used during pushing fields onto the fieldstack
3997 to track the offset of the field, since bitpos_of_field gives it
3998 relative to its immediate containing type, and we want it relative
3999 to the ultimate containing object. */
4003 /* Type of the field. */
4006 /* Size, in bits, of the field. */
4012 /* Offset from the base of the base containing object to this field. */
4013 HOST_WIDE_INT offset
;
4015 typedef struct fieldoff fieldoff_s
;
4017 DEF_VEC_O(fieldoff_s
);
4018 DEF_VEC_ALLOC_O(fieldoff_s
,heap
);
4020 /* qsort comparison function for two fieldoff's PA and PB */
4023 fieldoff_compare (const void *pa
, const void *pb
)
4025 const fieldoff_s
*foa
= (const fieldoff_s
*)pa
;
4026 const fieldoff_s
*fob
= (const fieldoff_s
*)pb
;
4027 HOST_WIDE_INT foasize
, fobsize
;
4029 if (foa
->offset
!= fob
->offset
)
4030 return foa
->offset
- fob
->offset
;
4032 foasize
= TREE_INT_CST_LOW (foa
->size
);
4033 fobsize
= TREE_INT_CST_LOW (fob
->size
);
4034 return foasize
- fobsize
;
4037 /* Sort a fieldstack according to the field offset and sizes. */
4039 sort_fieldstack (VEC(fieldoff_s
,heap
) *fieldstack
)
4041 qsort (VEC_address (fieldoff_s
, fieldstack
),
4042 VEC_length (fieldoff_s
, fieldstack
),
4043 sizeof (fieldoff_s
),
4047 /* Return true if V is a tree that we can have subvars for.
4048 Normally, this is any aggregate type. Also complex
4049 types which are not gimple registers can have subvars. */
4052 var_can_have_subvars (const_tree v
)
4054 /* Volatile variables should never have subvars. */
4055 if (TREE_THIS_VOLATILE (v
))
4058 /* Non decls or memory tags can never have subvars. */
4059 if (!DECL_P (v
) || MTAG_P (v
))
4062 /* Aggregates without overlapping fields can have subvars. */
4063 if (TREE_CODE (TREE_TYPE (v
)) == RECORD_TYPE
)
4069 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
4070 the fields of TYPE onto fieldstack, recording their offsets along
4073 OFFSET is used to keep track of the offset in this entire
4074 structure, rather than just the immediately containing structure.
4075 Returns the number of fields pushed.
4077 HAS_UNION is set to true if we find a union type as a field of
4081 push_fields_onto_fieldstack (tree type
, VEC(fieldoff_s
,heap
) **fieldstack
,
4082 HOST_WIDE_INT offset
, bool *has_union
)
4087 if (TREE_CODE (type
) != RECORD_TYPE
)
4090 /* If the vector of fields is growing too big, bail out early.
4091 Callers check for VEC_length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
4093 if (VEC_length (fieldoff_s
, *fieldstack
) > MAX_FIELDS_FOR_FIELD_SENSITIVE
)
4096 for (field
= TYPE_FIELDS (type
); field
; field
= TREE_CHAIN (field
))
4097 if (TREE_CODE (field
) == FIELD_DECL
)
4103 && (TREE_CODE (TREE_TYPE (field
)) == QUAL_UNION_TYPE
4104 || TREE_CODE (TREE_TYPE (field
)) == UNION_TYPE
))
4107 if (!var_can_have_subvars (field
))
4109 else if (!(pushed
= push_fields_onto_fieldstack
4112 offset
+ bitpos_of_field (field
),
4114 && (DECL_SIZE (field
)
4115 && !integer_zerop (DECL_SIZE (field
))))
4116 /* Empty structures may have actual size, like in C++. So
4117 see if we didn't push any subfields and the size is
4118 nonzero, push the field onto the stack. */
4125 pair
= VEC_safe_push (fieldoff_s
, heap
, *fieldstack
, NULL
);
4126 pair
->type
= TREE_TYPE (field
);
4127 pair
->size
= DECL_SIZE (field
);
4129 pair
->offset
= offset
+ bitpos_of_field (field
);
4139 /* Create a constraint from ANYTHING variable to VI. */
4141 make_constraint_from_anything (varinfo_t vi
)
4143 struct constraint_expr lhs
, rhs
;
4149 rhs
.var
= anything_id
;
4151 rhs
.type
= ADDRESSOF
;
4152 process_constraint (new_constraint (lhs
, rhs
));
4155 /* Count the number of arguments DECL has, and set IS_VARARGS to true
4156 if it is a varargs function. */
4159 count_num_arguments (tree decl
, bool *is_varargs
)
4164 for (t
= TYPE_ARG_TYPES (TREE_TYPE (decl
));
4168 if (TREE_VALUE (t
) == void_type_node
)
4178 /* Creation function node for DECL, using NAME, and return the index
4179 of the variable we've created for the function. */
4182 create_function_info_for (tree decl
, const char *name
)
4184 unsigned int index
= VEC_length (varinfo_t
, varmap
);
4188 bool is_varargs
= false;
4190 /* Create the variable info. */
4192 vi
= new_var_info (decl
, index
, name
);
4197 vi
->fullsize
= count_num_arguments (decl
, &is_varargs
) + 1;
4198 insert_vi_for_tree (vi
->decl
, vi
);
4199 VEC_safe_push (varinfo_t
, heap
, varmap
, vi
);
4203 /* If it's varargs, we don't know how many arguments it has, so we
4210 vi
->is_unknown_size_var
= true;
4215 arg
= DECL_ARGUMENTS (decl
);
4217 /* Set up variables for each argument. */
4218 for (i
= 1; i
< vi
->fullsize
; i
++)
4221 const char *newname
;
4223 unsigned int newindex
;
4224 tree argdecl
= decl
;
4229 newindex
= VEC_length (varinfo_t
, varmap
);
4230 asprintf (&tempname
, "%s.arg%d", name
, i
-1);
4231 newname
= ggc_strdup (tempname
);
4234 argvi
= new_var_info (argdecl
, newindex
, newname
);
4235 argvi
->decl
= argdecl
;
4236 VEC_safe_push (varinfo_t
, heap
, varmap
, argvi
);
4239 argvi
->fullsize
= vi
->fullsize
;
4240 argvi
->has_union
= false;
4241 insert_into_field_list_sorted (vi
, argvi
);
4242 stats
.total_vars
++;
4245 insert_vi_for_tree (arg
, argvi
);
4246 arg
= TREE_CHAIN (arg
);
4250 /* Create a variable for the return var. */
4251 if (DECL_RESULT (decl
) != NULL
4252 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
4255 const char *newname
;
4257 unsigned int newindex
;
4258 tree resultdecl
= decl
;
4262 if (DECL_RESULT (decl
))
4263 resultdecl
= DECL_RESULT (decl
);
4265 newindex
= VEC_length (varinfo_t
, varmap
);
4266 asprintf (&tempname
, "%s.result", name
);
4267 newname
= ggc_strdup (tempname
);
4270 resultvi
= new_var_info (resultdecl
, newindex
, newname
);
4271 resultvi
->decl
= resultdecl
;
4272 VEC_safe_push (varinfo_t
, heap
, varmap
, resultvi
);
4273 resultvi
->offset
= i
;
4275 resultvi
->fullsize
= vi
->fullsize
;
4276 resultvi
->has_union
= false;
4277 insert_into_field_list_sorted (vi
, resultvi
);
4278 stats
.total_vars
++;
4279 if (DECL_RESULT (decl
))
4280 insert_vi_for_tree (DECL_RESULT (decl
), resultvi
);
4286 /* Return true if FIELDSTACK contains fields that overlap.
4287 FIELDSTACK is assumed to be sorted by offset. */
4290 check_for_overlaps (VEC (fieldoff_s
,heap
) *fieldstack
)
4292 fieldoff_s
*fo
= NULL
;
4294 HOST_WIDE_INT lastoffset
= -1;
4296 for (i
= 0; VEC_iterate (fieldoff_s
, fieldstack
, i
, fo
); i
++)
4298 if (fo
->offset
== lastoffset
)
4300 lastoffset
= fo
->offset
;
4305 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
4306 This will also create any varinfo structures necessary for fields
4310 create_variable_info_for (tree decl
, const char *name
)
4312 unsigned int index
= VEC_length (varinfo_t
, varmap
);
4314 tree
decltype = TREE_TYPE (decl
);
4315 tree declsize
= DECL_P (decl
) ? DECL_SIZE (decl
) : TYPE_SIZE (decltype);
4316 bool notokay
= false;
4318 bool is_global
= DECL_P (decl
) ? is_global_var (decl
) : false;
4319 VEC (fieldoff_s
,heap
) *fieldstack
= NULL
;
4321 if (TREE_CODE (decl
) == FUNCTION_DECL
&& in_ipa_mode
)
4322 return create_function_info_for (decl
, name
);
4324 hasunion
= TREE_CODE (decltype) == UNION_TYPE
4325 || TREE_CODE (decltype) == QUAL_UNION_TYPE
;
4326 if (var_can_have_subvars (decl
) && use_field_sensitive
&& !hasunion
)
4328 push_fields_onto_fieldstack (decltype, &fieldstack
, 0, &hasunion
);
4331 VEC_free (fieldoff_s
, heap
, fieldstack
);
4336 /* If the variable doesn't have subvars, we may end up needing to
4337 sort the field list and create fake variables for all the
4339 vi
= new_var_info (decl
, index
, name
);
4342 vi
->has_union
= hasunion
;
4344 || TREE_CODE (declsize
) != INTEGER_CST
4345 || TREE_CODE (decltype) == UNION_TYPE
4346 || TREE_CODE (decltype) == QUAL_UNION_TYPE
)
4348 vi
->is_unknown_size_var
= true;
4354 vi
->fullsize
= TREE_INT_CST_LOW (declsize
);
4355 vi
->size
= vi
->fullsize
;
4358 insert_vi_for_tree (vi
->decl
, vi
);
4359 VEC_safe_push (varinfo_t
, heap
, varmap
, vi
);
4360 if (is_global
&& (!flag_whole_program
|| !in_ipa_mode
))
4361 make_constraint_from_anything (vi
);
4364 if (use_field_sensitive
4366 && !vi
->is_unknown_size_var
4367 && var_can_have_subvars (decl
)
4368 && VEC_length (fieldoff_s
, fieldstack
) > 1
4369 && VEC_length (fieldoff_s
, fieldstack
) <= MAX_FIELDS_FOR_FIELD_SENSITIVE
)
4371 unsigned int newindex
= VEC_length (varinfo_t
, varmap
);
4372 fieldoff_s
*fo
= NULL
;
4375 for (i
= 0; !notokay
&& VEC_iterate (fieldoff_s
, fieldstack
, i
, fo
); i
++)
4378 || TREE_CODE (fo
->size
) != INTEGER_CST
4386 /* We can't sort them if we have a field with a variable sized type,
4387 which will make notokay = true. In that case, we are going to return
4388 without creating varinfos for the fields anyway, so sorting them is a
4392 sort_fieldstack (fieldstack
);
4393 /* Due to some C++ FE issues, like PR 22488, we might end up
4394 what appear to be overlapping fields even though they,
4395 in reality, do not overlap. Until the C++ FE is fixed,
4396 we will simply disable field-sensitivity for these cases. */
4397 notokay
= check_for_overlaps (fieldstack
);
4401 if (VEC_length (fieldoff_s
, fieldstack
) != 0)
4402 fo
= VEC_index (fieldoff_s
, fieldstack
, 0);
4404 if (fo
== NULL
|| notokay
)
4406 vi
->is_unknown_size_var
= 1;
4409 VEC_free (fieldoff_s
, heap
, fieldstack
);
4413 vi
->size
= TREE_INT_CST_LOW (fo
->size
);
4414 vi
->offset
= fo
->offset
;
4415 for (i
= VEC_length (fieldoff_s
, fieldstack
) - 1;
4416 i
>= 1 && VEC_iterate (fieldoff_s
, fieldstack
, i
, fo
);
4420 const char *newname
= "NULL";
4423 newindex
= VEC_length (varinfo_t
, varmap
);
4427 asprintf (&tempname
, "%s.%s",
4428 vi
->name
, alias_get_name (fo
->decl
));
4430 asprintf (&tempname
, "%s." HOST_WIDE_INT_PRINT_DEC
,
4431 vi
->name
, fo
->offset
);
4432 newname
= ggc_strdup (tempname
);
4435 newvi
= new_var_info (decl
, newindex
, newname
);
4436 newvi
->offset
= fo
->offset
;
4437 newvi
->size
= TREE_INT_CST_LOW (fo
->size
);
4438 newvi
->fullsize
= vi
->fullsize
;
4439 insert_into_field_list (vi
, newvi
);
4440 VEC_safe_push (varinfo_t
, heap
, varmap
, newvi
);
4441 if (is_global
&& (!flag_whole_program
|| !in_ipa_mode
))
4442 make_constraint_from_anything (newvi
);
4448 VEC_free (fieldoff_s
, heap
, fieldstack
);
4453 /* Print out the points-to solution for VAR to FILE. */
4456 dump_solution_for_var (FILE *file
, unsigned int var
)
4458 varinfo_t vi
= get_varinfo (var
);
4462 if (find (var
) != var
)
4464 varinfo_t vipt
= get_varinfo (find (var
));
4465 fprintf (file
, "%s = same as %s\n", vi
->name
, vipt
->name
);
4469 fprintf (file
, "%s = { ", vi
->name
);
4470 EXECUTE_IF_SET_IN_BITMAP (vi
->solution
, 0, i
, bi
)
4472 fprintf (file
, "%s ", get_varinfo (i
)->name
);
4474 fprintf (file
, "}");
4475 if (vi
->no_tbaa_pruning
)
4476 fprintf (file
, " no-tbaa-pruning");
4477 fprintf (file
, "\n");
4481 /* Print the points-to solution for VAR to stdout. */
4484 debug_solution_for_var (unsigned int var
)
4486 dump_solution_for_var (stdout
, var
);
4489 /* Create varinfo structures for all of the variables in the
4490 function for intraprocedural mode. */
4493 intra_create_variable_infos (void)
4496 struct constraint_expr lhs
, rhs
;
4498 /* For each incoming pointer argument arg, create the constraint ARG
4499 = ANYTHING or a dummy variable if flag_argument_noalias is set. */
4500 for (t
= DECL_ARGUMENTS (current_function_decl
); t
; t
= TREE_CHAIN (t
))
4504 if (!could_have_pointers (t
))
4507 /* If flag_argument_noalias is set, then function pointer
4508 arguments are guaranteed not to point to each other. In that
4509 case, create an artificial variable PARM_NOALIAS and the
4510 constraint ARG = &PARM_NOALIAS. */
4511 if (POINTER_TYPE_P (TREE_TYPE (t
)) && flag_argument_noalias
> 0)
4514 tree heapvar
= heapvar_lookup (t
);
4518 lhs
.var
= get_vi_for_tree (t
)->id
;
4520 if (heapvar
== NULL_TREE
)
4523 heapvar
= create_tmp_var_raw (TREE_TYPE (TREE_TYPE (t
)),
4525 DECL_EXTERNAL (heapvar
) = 1;
4526 if (gimple_referenced_vars (cfun
))
4527 add_referenced_var (heapvar
);
4529 heapvar_insert (t
, heapvar
);
4531 ann
= get_var_ann (heapvar
);
4532 if (flag_argument_noalias
== 1)
4533 ann
->noalias_state
= NO_ALIAS
;
4534 else if (flag_argument_noalias
== 2)
4535 ann
->noalias_state
= NO_ALIAS_GLOBAL
;
4536 else if (flag_argument_noalias
== 3)
4537 ann
->noalias_state
= NO_ALIAS_ANYTHING
;
4542 vi
= get_vi_for_tree (heapvar
);
4543 vi
->is_artificial_var
= 1;
4544 vi
->is_heap_var
= 1;
4546 rhs
.type
= ADDRESSOF
;
4548 for (p
= get_varinfo (lhs
.var
); p
; p
= p
->next
)
4550 struct constraint_expr temp
= lhs
;
4552 process_constraint (new_constraint (temp
, rhs
));
4557 varinfo_t arg_vi
= get_vi_for_tree (t
);
4559 for (p
= arg_vi
; p
; p
= p
->next
)
4560 make_constraint_from_anything (p
);
4565 /* Structure used to put solution bitmaps in a hashtable so they can
4566 be shared among variables with the same points-to set. */
4568 typedef struct shared_bitmap_info
4572 } *shared_bitmap_info_t
;
4573 typedef const struct shared_bitmap_info
*const_shared_bitmap_info_t
;
4575 static htab_t shared_bitmap_table
;
4577 /* Hash function for a shared_bitmap_info_t */
4580 shared_bitmap_hash (const void *p
)
4582 const_shared_bitmap_info_t
const bi
= (const_shared_bitmap_info_t
) p
;
4583 return bi
->hashcode
;
4586 /* Equality function for two shared_bitmap_info_t's. */
4589 shared_bitmap_eq (const void *p1
, const void *p2
)
4591 const_shared_bitmap_info_t
const sbi1
= (const_shared_bitmap_info_t
) p1
;
4592 const_shared_bitmap_info_t
const sbi2
= (const_shared_bitmap_info_t
) p2
;
4593 return bitmap_equal_p (sbi1
->pt_vars
, sbi2
->pt_vars
);
4596 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
4597 existing instance if there is one, NULL otherwise. */
4600 shared_bitmap_lookup (bitmap pt_vars
)
4603 struct shared_bitmap_info sbi
;
4605 sbi
.pt_vars
= pt_vars
;
4606 sbi
.hashcode
= bitmap_hash (pt_vars
);
4608 slot
= htab_find_slot_with_hash (shared_bitmap_table
, &sbi
,
4609 sbi
.hashcode
, NO_INSERT
);
4613 return ((shared_bitmap_info_t
) *slot
)->pt_vars
;
4617 /* Add a bitmap to the shared bitmap hashtable. */
4620 shared_bitmap_add (bitmap pt_vars
)
4623 shared_bitmap_info_t sbi
= XNEW (struct shared_bitmap_info
);
4625 sbi
->pt_vars
= pt_vars
;
4626 sbi
->hashcode
= bitmap_hash (pt_vars
);
4628 slot
= htab_find_slot_with_hash (shared_bitmap_table
, sbi
,
4629 sbi
->hashcode
, INSERT
);
4630 gcc_assert (!*slot
);
4631 *slot
= (void *) sbi
;
4635 /* Set bits in INTO corresponding to the variable uids in solution set
4636 FROM, which came from variable PTR.
4637 For variables that are actually dereferenced, we also use type
4638 based alias analysis to prune the points-to sets.
4639 IS_DEREFED is true if PTR was directly dereferenced, which we use to
4640 help determine whether we are we are allowed to prune using TBAA.
4641 If NO_TBAA_PRUNING is true, we do not perform any TBAA pruning of
4645 set_uids_in_ptset (tree ptr
, bitmap into
, bitmap from
, bool is_derefed
,
4646 bool no_tbaa_pruning
)
4651 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr
)));
4653 EXECUTE_IF_SET_IN_BITMAP (from
, 0, i
, bi
)
4655 varinfo_t vi
= get_varinfo (i
);
4657 /* The only artificial variables that are allowed in a may-alias
4658 set are heap variables. */
4659 if (vi
->is_artificial_var
&& !vi
->is_heap_var
)
4662 if (TREE_CODE (vi
->decl
) == VAR_DECL
4663 || TREE_CODE (vi
->decl
) == PARM_DECL
4664 || TREE_CODE (vi
->decl
) == RESULT_DECL
)
4666 /* Just add VI->DECL to the alias set.
4667 Don't type prune artificial vars. */
4668 if (vi
->is_artificial_var
)
4669 bitmap_set_bit (into
, DECL_UID (vi
->decl
));
4672 alias_set_type var_alias_set
, ptr_alias_set
;
4673 var_alias_set
= get_alias_set (vi
->decl
);
4674 ptr_alias_set
= get_alias_set (TREE_TYPE (TREE_TYPE (ptr
)));
4676 || (!is_derefed
&& !vi
->directly_dereferenced
)
4677 || alias_sets_conflict_p (ptr_alias_set
, var_alias_set
))
4678 bitmap_set_bit (into
, DECL_UID (vi
->decl
));
4685 static bool have_alias_info
= false;
4687 /* The list of SMT's that are in use by our pointer variables. This
4688 is the set of SMT's for all pointers that can point to anything. */
4689 static bitmap used_smts
;
4691 /* Due to the ordering of points-to set calculation and SMT
4692 calculation being a bit co-dependent, we can't just calculate SMT
4693 used info whenever we want, we have to calculate it around the time
4694 that find_what_p_points_to is called. */
4696 /* Mark which SMT's are in use by points-to anything variables. */
4699 set_used_smts (void)
4703 used_smts
= BITMAP_ALLOC (&pta_obstack
);
4705 for (i
= 0; VEC_iterate (varinfo_t
, varmap
, i
, vi
); i
++)
4707 tree var
= vi
->decl
;
4708 varinfo_t withsolution
= get_varinfo (find (i
));
4711 struct ptr_info_def
*pi
= NULL
;
4713 /* For parm decls, the pointer info may be under the default
4715 if (TREE_CODE (vi
->decl
) == PARM_DECL
4716 && gimple_default_def (cfun
, var
))
4717 pi
= SSA_NAME_PTR_INFO (gimple_default_def (cfun
, var
));
4718 else if (TREE_CODE (var
) == SSA_NAME
)
4719 pi
= SSA_NAME_PTR_INFO (var
);
4721 /* Skip the special variables and those that can't be aliased. */
4722 if (vi
->is_special_var
4724 || (pi
&& !pi
->is_dereferenced
)
4725 || (TREE_CODE (var
) == VAR_DECL
&& !may_be_aliased (var
))
4726 || !POINTER_TYPE_P (TREE_TYPE (var
)))
4729 if (TREE_CODE (var
) == SSA_NAME
)
4730 var
= SSA_NAME_VAR (var
);
4736 smt
= va
->symbol_mem_tag
;
4737 if (smt
&& bitmap_bit_p (withsolution
->solution
, anything_id
))
4738 bitmap_set_bit (used_smts
, DECL_UID (smt
));
4742 /* Merge the necessary SMT's into the bitmap INTO, which is
4743 P's varinfo. This involves merging all SMT's that are a subset of
4744 the SMT necessary for P. */
4747 merge_smts_into (tree p
, bitmap solution
)
4753 if (TREE_CODE (p
) == SSA_NAME
)
4754 var
= SSA_NAME_VAR (p
);
4756 smt
= var_ann (var
)->symbol_mem_tag
;
4759 /* The smt itself isn't included in its aliases. */
4760 bitmap_set_bit (solution
, DECL_UID (smt
));
4762 aliases
= MTAG_ALIASES (smt
);
4764 bitmap_ior_into (solution
, aliases
);
4768 /* Given a pointer variable P, fill in its points-to set, or return
4770 Rather than return false for variables that point-to anything, we
4771 instead find the corresponding SMT, and merge in its aliases. In
4772 addition to these aliases, we also set the bits for the SMT's
4773 themselves and their subsets, as SMT's are still in use by
4774 non-SSA_NAME's, and pruning may eliminate every one of their
4775 aliases. In such a case, if we did not include the right set of
4776 SMT's in the points-to set of the variable, we'd end up with
4777 statements that do not conflict but should. */
4780 find_what_p_points_to (tree p
)
4785 if (!have_alias_info
)
4788 /* For parameters, get at the points-to set for the actual parm
4790 if (TREE_CODE (p
) == SSA_NAME
4791 && TREE_CODE (SSA_NAME_VAR (p
)) == PARM_DECL
4792 && SSA_NAME_IS_DEFAULT_DEF (p
))
4793 lookup_p
= SSA_NAME_VAR (p
);
4795 vi
= lookup_vi_for_tree (lookup_p
);
4798 if (vi
->is_artificial_var
)
4801 /* See if this is a field or a structure. */
4802 if (vi
->size
!= vi
->fullsize
)
4804 /* Nothing currently asks about structure fields directly,
4805 but when they do, we need code here to hand back the
4811 struct ptr_info_def
*pi
= get_ptr_info (p
);
4814 bool was_pt_anything
= false;
4815 bitmap finished_solution
;
4818 if (!pi
->is_dereferenced
)
4821 /* This variable may have been collapsed, let's get the real
4823 vi
= get_varinfo (find (vi
->id
));
4825 /* Translate artificial variables into SSA_NAME_PTR_INFO
4827 EXECUTE_IF_SET_IN_BITMAP (vi
->solution
, 0, i
, bi
)
4829 varinfo_t vi
= get_varinfo (i
);
4831 if (vi
->is_artificial_var
)
4833 /* FIXME. READONLY should be handled better so that
4834 flow insensitive aliasing can disregard writable
4836 if (vi
->id
== nothing_id
)
4838 else if (vi
->id
== anything_id
)
4839 was_pt_anything
= 1;
4840 else if (vi
->id
== readonly_id
)
4841 was_pt_anything
= 1;
4842 else if (vi
->id
== integer_id
)
4843 was_pt_anything
= 1;
4844 else if (vi
->is_heap_var
)
4845 pi
->pt_global_mem
= 1;
4849 /* Share the final set of variables when possible. */
4850 finished_solution
= BITMAP_GGC_ALLOC ();
4851 stats
.points_to_sets_created
++;
4853 /* Instead of using pt_anything, we merge in the SMT aliases
4854 for the underlying SMT. In addition, if they could have
4855 pointed to anything, they could point to global memory. */
4856 if (was_pt_anything
)
4858 merge_smts_into (p
, finished_solution
);
4859 pi
->pt_global_mem
= 1;
4862 set_uids_in_ptset (p
, finished_solution
, vi
->solution
,
4863 vi
->directly_dereferenced
,
4864 vi
->no_tbaa_pruning
);
4865 result
= shared_bitmap_lookup (finished_solution
);
4869 shared_bitmap_add (finished_solution
);
4870 pi
->pt_vars
= finished_solution
;
4874 pi
->pt_vars
= result
;
4875 bitmap_clear (finished_solution
);
4878 if (bitmap_empty_p (pi
->pt_vars
))
4890 /* Dump points-to information to OUTFILE. */
4893 dump_sa_points_to_info (FILE *outfile
)
4897 fprintf (outfile
, "\nPoints-to sets\n\n");
4899 if (dump_flags
& TDF_STATS
)
4901 fprintf (outfile
, "Stats:\n");
4902 fprintf (outfile
, "Total vars: %d\n", stats
.total_vars
);
4903 fprintf (outfile
, "Non-pointer vars: %d\n",
4904 stats
.nonpointer_vars
);
4905 fprintf (outfile
, "Statically unified vars: %d\n",
4906 stats
.unified_vars_static
);
4907 fprintf (outfile
, "Dynamically unified vars: %d\n",
4908 stats
.unified_vars_dynamic
);
4909 fprintf (outfile
, "Iterations: %d\n", stats
.iterations
);
4910 fprintf (outfile
, "Number of edges: %d\n", stats
.num_edges
);
4911 fprintf (outfile
, "Number of implicit edges: %d\n",
4912 stats
.num_implicit_edges
);
4915 for (i
= 0; i
< VEC_length (varinfo_t
, varmap
); i
++)
4916 dump_solution_for_var (outfile
, i
);
4920 /* Debug points-to information to stderr. */
4923 debug_sa_points_to_info (void)
4925 dump_sa_points_to_info (stderr
);
4929 /* Initialize the always-existing constraint variables for NULL
4930 ANYTHING, READONLY, and INTEGER */
4933 init_base_vars (void)
4935 struct constraint_expr lhs
, rhs
;
4937 /* Create the NULL variable, used to represent that a variable points
4939 nothing_tree
= create_tmp_var_raw (void_type_node
, "NULL");
4940 var_nothing
= new_var_info (nothing_tree
, 0, "NULL");
4941 insert_vi_for_tree (nothing_tree
, var_nothing
);
4942 var_nothing
->is_artificial_var
= 1;
4943 var_nothing
->offset
= 0;
4944 var_nothing
->size
= ~0;
4945 var_nothing
->fullsize
= ~0;
4946 var_nothing
->is_special_var
= 1;
4948 VEC_safe_push (varinfo_t
, heap
, varmap
, var_nothing
);
4950 /* Create the ANYTHING variable, used to represent that a variable
4951 points to some unknown piece of memory. */
4952 anything_tree
= create_tmp_var_raw (void_type_node
, "ANYTHING");
4953 var_anything
= new_var_info (anything_tree
, 1, "ANYTHING");
4954 insert_vi_for_tree (anything_tree
, var_anything
);
4955 var_anything
->is_artificial_var
= 1;
4956 var_anything
->size
= ~0;
4957 var_anything
->offset
= 0;
4958 var_anything
->next
= NULL
;
4959 var_anything
->fullsize
= ~0;
4960 var_anything
->is_special_var
= 1;
4963 /* Anything points to anything. This makes deref constraints just
4964 work in the presence of linked list and other p = *p type loops,
4965 by saying that *ANYTHING = ANYTHING. */
4966 VEC_safe_push (varinfo_t
, heap
, varmap
, var_anything
);
4968 lhs
.var
= anything_id
;
4970 rhs
.type
= ADDRESSOF
;
4971 rhs
.var
= anything_id
;
4974 /* This specifically does not use process_constraint because
4975 process_constraint ignores all anything = anything constraints, since all
4976 but this one are redundant. */
4977 VEC_safe_push (constraint_t
, heap
, constraints
, new_constraint (lhs
, rhs
));
4979 /* Create the READONLY variable, used to represent that a variable
4980 points to readonly memory. */
4981 readonly_tree
= create_tmp_var_raw (void_type_node
, "READONLY");
4982 var_readonly
= new_var_info (readonly_tree
, 2, "READONLY");
4983 var_readonly
->is_artificial_var
= 1;
4984 var_readonly
->offset
= 0;
4985 var_readonly
->size
= ~0;
4986 var_readonly
->fullsize
= ~0;
4987 var_readonly
->next
= NULL
;
4988 var_readonly
->is_special_var
= 1;
4989 insert_vi_for_tree (readonly_tree
, var_readonly
);
4991 VEC_safe_push (varinfo_t
, heap
, varmap
, var_readonly
);
4993 /* readonly memory points to anything, in order to make deref
4994 easier. In reality, it points to anything the particular
4995 readonly variable can point to, but we don't track this
4998 lhs
.var
= readonly_id
;
5000 rhs
.type
= ADDRESSOF
;
5001 rhs
.var
= anything_id
;
5004 process_constraint (new_constraint (lhs
, rhs
));
5006 /* Create the INTEGER variable, used to represent that a variable points
5008 integer_tree
= create_tmp_var_raw (void_type_node
, "INTEGER");
5009 var_integer
= new_var_info (integer_tree
, 3, "INTEGER");
5010 insert_vi_for_tree (integer_tree
, var_integer
);
5011 var_integer
->is_artificial_var
= 1;
5012 var_integer
->size
= ~0;
5013 var_integer
->fullsize
= ~0;
5014 var_integer
->offset
= 0;
5015 var_integer
->next
= NULL
;
5016 var_integer
->is_special_var
= 1;
5018 VEC_safe_push (varinfo_t
, heap
, varmap
, var_integer
);
5020 /* INTEGER = ANYTHING, because we don't know where a dereference of
5021 a random integer will point to. */
5023 lhs
.var
= integer_id
;
5025 rhs
.type
= ADDRESSOF
;
5026 rhs
.var
= anything_id
;
5028 process_constraint (new_constraint (lhs
, rhs
));
5031 /* Initialize things necessary to perform PTA */
5034 init_alias_vars (void)
5036 bitmap_obstack_initialize (&pta_obstack
);
5037 bitmap_obstack_initialize (&oldpta_obstack
);
5038 bitmap_obstack_initialize (&predbitmap_obstack
);
5040 constraint_pool
= create_alloc_pool ("Constraint pool",
5041 sizeof (struct constraint
), 30);
5042 variable_info_pool
= create_alloc_pool ("Variable info pool",
5043 sizeof (struct variable_info
), 30);
5044 constraints
= VEC_alloc (constraint_t
, heap
, 8);
5045 varmap
= VEC_alloc (varinfo_t
, heap
, 8);
5046 vi_for_tree
= pointer_map_create ();
5048 memset (&stats
, 0, sizeof (stats
));
5049 shared_bitmap_table
= htab_create (511, shared_bitmap_hash
,
5050 shared_bitmap_eq
, free
);
5054 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
5055 predecessor edges. */
5058 remove_preds_and_fake_succs (constraint_graph_t graph
)
5062 /* Clear the implicit ref and address nodes from the successor
5064 for (i
= 0; i
< FIRST_REF_NODE
; i
++)
5066 if (graph
->succs
[i
])
5067 bitmap_clear_range (graph
->succs
[i
], FIRST_REF_NODE
,
5068 FIRST_REF_NODE
* 2);
5071 /* Free the successor list for the non-ref nodes. */
5072 for (i
= FIRST_REF_NODE
; i
< graph
->size
; i
++)
5074 if (graph
->succs
[i
])
5075 BITMAP_FREE (graph
->succs
[i
]);
5078 /* Now reallocate the size of the successor list as, and blow away
5079 the predecessor bitmaps. */
5080 graph
->size
= VEC_length (varinfo_t
, varmap
);
5081 graph
->succs
= XRESIZEVEC (bitmap
, graph
->succs
, graph
->size
);
5083 free (graph
->implicit_preds
);
5084 graph
->implicit_preds
= NULL
;
5085 free (graph
->preds
);
5086 graph
->preds
= NULL
;
5087 bitmap_obstack_release (&predbitmap_obstack
);
5090 /* Compute the set of variables we can't TBAA prune. */
5093 compute_tbaa_pruning (void)
5095 unsigned int size
= VEC_length (varinfo_t
, varmap
);
5100 changed
= sbitmap_alloc (size
);
5101 sbitmap_zero (changed
);
5103 /* Mark all initial no_tbaa_pruning nodes as changed. */
5105 for (i
= 0; i
< size
; ++i
)
5107 varinfo_t ivi
= get_varinfo (i
);
5109 if (find (i
) == i
&& ivi
->no_tbaa_pruning
)
5112 if ((graph
->succs
[i
] && !bitmap_empty_p (graph
->succs
[i
]))
5113 || VEC_length (constraint_t
, graph
->complex[i
]) > 0)
5115 SET_BIT (changed
, i
);
5121 while (changed_count
> 0)
5123 struct topo_info
*ti
= init_topo_info ();
5126 compute_topo_order (graph
, ti
);
5128 while (VEC_length (unsigned, ti
->topo_order
) != 0)
5132 i
= VEC_pop (unsigned, ti
->topo_order
);
5134 /* If this variable is not a representative, skip it. */
5138 /* If the node has changed, we need to process the complex
5139 constraints and outgoing edges again. */
5140 if (TEST_BIT (changed
, i
))
5144 VEC(constraint_t
,heap
) *complex = graph
->complex[i
];
5146 RESET_BIT (changed
, i
);
5149 /* Process the complex copy constraints. */
5150 for (j
= 0; VEC_iterate (constraint_t
, complex, j
, c
); ++j
)
5152 if (c
->lhs
.type
== SCALAR
&& c
->rhs
.type
== SCALAR
)
5154 varinfo_t lhsvi
= get_varinfo (find (c
->lhs
.var
));
5156 if (!lhsvi
->no_tbaa_pruning
)
5158 lhsvi
->no_tbaa_pruning
= true;
5159 if (!TEST_BIT (changed
, lhsvi
->id
))
5161 SET_BIT (changed
, lhsvi
->id
);
5168 /* Propagate to all successors. */
5169 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->succs
[i
], 0, j
, bi
)
5171 unsigned int to
= find (j
);
5172 varinfo_t tovi
= get_varinfo (to
);
5174 /* Don't propagate to ourselves. */
5178 if (!tovi
->no_tbaa_pruning
)
5180 tovi
->no_tbaa_pruning
= true;
5181 if (!TEST_BIT (changed
, to
))
5183 SET_BIT (changed
, to
);
5191 free_topo_info (ti
);
5194 sbitmap_free (changed
);
5198 for (i
= 0; i
< size
; ++i
)
5200 varinfo_t ivi
= get_varinfo (i
);
5201 varinfo_t ivip
= get_varinfo (find (i
));
5203 if (ivip
->no_tbaa_pruning
)
5205 tree var
= ivi
->decl
;
5207 if (TREE_CODE (var
) == SSA_NAME
)
5208 var
= SSA_NAME_VAR (var
);
5210 if (POINTER_TYPE_P (TREE_TYPE (var
)))
5212 DECL_NO_TBAA_P (var
) = 1;
5214 /* Tell the RTL layer that this pointer can alias
5216 DECL_POINTER_ALIAS_SET (var
) = 0;
5223 /* Create points-to sets for the current function. See the comments
5224 at the start of the file for an algorithmic overview. */
5227 compute_points_to_sets (struct alias_info
*ai
)
5229 struct scc_info
*si
;
5232 timevar_push (TV_TREE_PTA
);
5235 init_alias_heapvars ();
5237 intra_create_variable_infos ();
5239 /* Now walk all statements and derive aliases. */
5242 block_stmt_iterator bsi
;
5245 for (phi
= phi_nodes (bb
); phi
; phi
= PHI_CHAIN (phi
))
5247 if (is_gimple_reg (PHI_RESULT (phi
)))
5249 find_func_aliases (phi
);
5251 /* Update various related attributes like escaped
5252 addresses, pointer dereferences for loads and stores.
5253 This is used when creating name tags and alias
5255 update_alias_info (phi
, ai
);
5259 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); )
5261 tree stmt
= bsi_stmt (bsi
);
5263 find_func_aliases (stmt
);
5265 /* Update various related attributes like escaped
5266 addresses, pointer dereferences for loads and stores.
5267 This is used when creating name tags and alias
5269 update_alias_info (stmt
, ai
);
5271 /* The information in CHANGE_DYNAMIC_TYPE_EXPR nodes has now
5272 been captured, and we can remove them. */
5273 if (TREE_CODE (stmt
) == CHANGE_DYNAMIC_TYPE_EXPR
)
5274 bsi_remove (&bsi
, true);
5283 fprintf (dump_file
, "Points-to analysis\n\nConstraints:\n\n");
5284 dump_constraints (dump_file
);
5289 "\nCollapsing static cycles and doing variable "
5292 init_graph (VEC_length (varinfo_t
, varmap
) * 2);
5295 fprintf (dump_file
, "Building predecessor graph\n");
5296 build_pred_graph ();
5299 fprintf (dump_file
, "Detecting pointer and location "
5301 si
= perform_var_substitution (graph
);
5304 fprintf (dump_file
, "Rewriting constraints and unifying "
5306 rewrite_constraints (graph
, si
);
5307 free_var_substitution_info (si
);
5309 build_succ_graph ();
5310 move_complex_constraints (graph
);
5313 fprintf (dump_file
, "Uniting pointer but not location equivalent "
5315 unite_pointer_equivalences (graph
);
5318 fprintf (dump_file
, "Finding indirect cycles\n");
5319 find_indirect_cycles (graph
);
5321 /* Implicit nodes and predecessors are no longer necessary at this
5323 remove_preds_and_fake_succs (graph
);
5326 fprintf (dump_file
, "Solving graph\n");
5328 solve_graph (graph
);
5330 compute_tbaa_pruning ();
5333 dump_sa_points_to_info (dump_file
);
5335 have_alias_info
= true;
5337 timevar_pop (TV_TREE_PTA
);
5341 /* Delete created points-to sets. */
5344 delete_points_to_sets (void)
5348 htab_delete (shared_bitmap_table
);
5349 if (dump_file
&& (dump_flags
& TDF_STATS
))
5350 fprintf (dump_file
, "Points to sets created:%d\n",
5351 stats
.points_to_sets_created
);
5353 pointer_map_destroy (vi_for_tree
);
5354 bitmap_obstack_release (&pta_obstack
);
5355 VEC_free (constraint_t
, heap
, constraints
);
5357 for (i
= 0; i
< graph
->size
; i
++)
5358 VEC_free (constraint_t
, heap
, graph
->complex[i
]);
5359 free (graph
->complex);
5362 free (graph
->succs
);
5364 free (graph
->pe_rep
);
5365 free (graph
->indirect_cycles
);
5368 VEC_free (varinfo_t
, heap
, varmap
);
5369 free_alloc_pool (variable_info_pool
);
5370 free_alloc_pool (constraint_pool
);
5371 have_alias_info
= false;
5374 /* Return true if we should execute IPA PTA. */
5378 return (flag_unit_at_a_time
!= 0
5380 /* Don't bother doing anything if the program has errors. */
5381 && !(errorcount
|| sorrycount
));
5384 /* Execute the driver for IPA PTA. */
5386 ipa_pta_execute (void)
5388 struct cgraph_node
*node
;
5389 struct scc_info
*si
;
5392 init_alias_heapvars ();
5395 for (node
= cgraph_nodes
; node
; node
= node
->next
)
5397 if (!node
->analyzed
|| cgraph_is_master_clone (node
))
5401 varid
= create_function_info_for (node
->decl
,
5402 cgraph_node_name (node
));
5403 if (node
->local
.externally_visible
)
5405 varinfo_t fi
= get_varinfo (varid
);
5406 for (; fi
; fi
= fi
->next
)
5407 make_constraint_from_anything (fi
);
5411 for (node
= cgraph_nodes
; node
; node
= node
->next
)
5413 if (node
->analyzed
&& cgraph_is_master_clone (node
))
5415 struct function
*func
= DECL_STRUCT_FUNCTION (node
->decl
);
5417 tree old_func_decl
= current_function_decl
;
5420 "Generating constraints for %s\n",
5421 cgraph_node_name (node
));
5423 current_function_decl
= node
->decl
;
5425 FOR_EACH_BB_FN (bb
, func
)
5427 block_stmt_iterator bsi
;
5430 for (phi
= phi_nodes (bb
); phi
; phi
= PHI_CHAIN (phi
))
5432 if (is_gimple_reg (PHI_RESULT (phi
)))
5434 find_func_aliases (phi
);
5438 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
5440 tree stmt
= bsi_stmt (bsi
);
5441 find_func_aliases (stmt
);
5444 current_function_decl
= old_func_decl
;
5449 /* Make point to anything. */
5455 fprintf (dump_file
, "Points-to analysis\n\nConstraints:\n\n");
5456 dump_constraints (dump_file
);
5461 "\nCollapsing static cycles and doing variable "
5464 init_graph (VEC_length (varinfo_t
, varmap
) * 2);
5465 build_pred_graph ();
5466 si
= perform_var_substitution (graph
);
5467 rewrite_constraints (graph
, si
);
5468 free_var_substitution_info (si
);
5470 build_succ_graph ();
5471 move_complex_constraints (graph
);
5472 unite_pointer_equivalences (graph
);
5473 find_indirect_cycles (graph
);
5475 /* Implicit nodes and predecessors are no longer necessary at this
5477 remove_preds_and_fake_succs (graph
);
5480 fprintf (dump_file
, "\nSolving graph\n");
5482 solve_graph (graph
);
5485 dump_sa_points_to_info (dump_file
);
5488 delete_alias_heapvars ();
5489 delete_points_to_sets ();
5493 struct simple_ipa_opt_pass pass_ipa_pta
=
5498 gate_ipa_pta
, /* gate */
5499 ipa_pta_execute
, /* execute */
5502 0, /* static_pass_number */
5503 TV_IPA_PTA
, /* tv_id */
5504 0, /* properties_required */
5505 0, /* properties_provided */
5506 0, /* properties_destroyed */
5507 0, /* todo_flags_start */
5508 TODO_update_ssa
/* todo_flags_finish */
5512 /* Initialize the heapvar for statement mapping. */
5514 init_alias_heapvars (void)
5516 if (!heapvar_for_stmt
)
5517 heapvar_for_stmt
= htab_create_ggc (11, tree_map_hash
, tree_map_eq
,
5522 delete_alias_heapvars (void)
5524 htab_delete (heapvar_for_stmt
);
5525 heapvar_for_stmt
= NULL
;
5529 #include "gt-tree-ssa-structalias.h"