1 /* Tree based points-to analysis
2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
30 #include "basic-block.h"
33 #include "gimple-ssa.h"
35 #include "tree-ssanames.h"
36 #include "tree-into-ssa.h"
38 #include "tree-inline.h"
39 #include "diagnostic-core.h"
40 #include "hash-table.h"
42 #include "tree-pass.h"
43 #include "alloc-pool.h"
44 #include "splay-tree.h"
47 #include "pointer-set.h"
49 /* The idea behind this analyzer is to generate set constraints from the
50 program, then solve the resulting constraints in order to generate the
53 Set constraints are a way of modeling program analysis problems that
54 involve sets. They consist of an inclusion constraint language,
55 describing the variables (each variable is a set) and operations that
56 are involved on the variables, and a set of rules that derive facts
57 from these operations. To solve a system of set constraints, you derive
58 all possible facts under the rules, which gives you the correct sets
61 See "Efficient Field-sensitive pointer analysis for C" by "David
62 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
63 http://citeseer.ist.psu.edu/pearce04efficient.html
65 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
66 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
67 http://citeseer.ist.psu.edu/heintze01ultrafast.html
69 There are three types of real constraint expressions, DEREF,
70 ADDRESSOF, and SCALAR. Each constraint expression consists
71 of a constraint type, a variable, and an offset.
73 SCALAR is a constraint expression type used to represent x, whether
74 it appears on the LHS or the RHS of a statement.
75 DEREF is a constraint expression type used to represent *x, whether
76 it appears on the LHS or the RHS of a statement.
77 ADDRESSOF is a constraint expression used to represent &x, whether
78 it appears on the LHS or the RHS of a statement.
80 Each pointer variable in the program is assigned an integer id, and
81 each field of a structure variable is assigned an integer id as well.
83 Structure variables are linked to their list of fields through a "next
84 field" in each variable that points to the next field in offset
86 Each variable for a structure field has
88 1. "size", that tells the size in bits of that field.
89 2. "fullsize, that tells the size in bits of the entire structure.
90 3. "offset", that tells the offset in bits from the beginning of the
91 structure to this field.
103 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
104 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
105 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
108 In order to solve the system of set constraints, the following is
111 1. Each constraint variable x has a solution set associated with it,
114 2. Constraints are separated into direct, copy, and complex.
115 Direct constraints are ADDRESSOF constraints that require no extra
116 processing, such as P = &Q
117 Copy constraints are those of the form P = Q.
118 Complex constraints are all the constraints involving dereferences
119 and offsets (including offsetted copies).
121 3. All direct constraints of the form P = &Q are processed, such
122 that Q is added to Sol(P)
124 4. All complex constraints for a given constraint variable are stored in a
125 linked list attached to that variable's node.
127 5. A directed graph is built out of the copy constraints. Each
128 constraint variable is a node in the graph, and an edge from
129 Q to P is added for each copy constraint of the form P = Q
131 6. The graph is then walked, and solution sets are
132 propagated along the copy edges, such that an edge from Q to P
133 causes Sol(P) <- Sol(P) union Sol(Q).
135 7. As we visit each node, all complex constraints associated with
136 that node are processed by adding appropriate copy edges to the graph, or the
137 appropriate variables to the solution set.
139 8. The process of walking the graph is iterated until no solution
142 Prior to walking the graph in steps 6 and 7, We perform static
143 cycle elimination on the constraint graph, as well
144 as off-line variable substitution.
146 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
147 on and turned into anything), but isn't. You can just see what offset
148 inside the pointed-to struct it's going to access.
150 TODO: Constant bounded arrays can be handled as if they were structs of the
151 same number of elements.
153 TODO: Modeling heap and incoming pointers becomes much better if we
154 add fields to them as we discover them, which we could do.
156 TODO: We could handle unions, but to be honest, it's probably not
157 worth the pain or slowdown. */
159 /* IPA-PTA optimizations possible.
161 When the indirect function called is ANYTHING we can add disambiguation
162 based on the function signatures (or simply the parameter count which
163 is the varinfo size). We also do not need to consider functions that
164 do not have their address taken.
166 The is_global_var bit which marks escape points is overly conservative
167 in IPA mode. Split it to is_escape_point and is_global_var - only
168 externally visible globals are escape points in IPA mode. This is
169 also needed to fix the pt_solution_includes_global predicate
170 (and thus ptr_deref_may_alias_global_p).
172 The way we introduce DECL_PT_UID to avoid fixing up all points-to
173 sets in the translation unit when we copy a DECL during inlining
174 pessimizes precision. The advantage is that the DECL_PT_UID keeps
175 compile-time and memory usage overhead low - the points-to sets
176 do not grow or get unshared as they would during a fixup phase.
177 An alternative solution is to delay IPA PTA until after all
178 inlining transformations have been applied.
180 The way we propagate clobber/use information isn't optimized.
181 It should use a new complex constraint that properly filters
182 out local variables of the callee (though that would make
183 the sets invalid after inlining). OTOH we might as well
184 admit defeat to WHOPR and simply do all the clobber/use analysis
185 and propagation after PTA finished but before we threw away
186 points-to information for memory variables. WHOPR and PTA
187 do not play along well anyway - the whole constraint solving
188 would need to be done in WPA phase and it will be very interesting
189 to apply the results to local SSA names during LTRANS phase.
191 We probably should compute a per-function unit-ESCAPE solution
192 propagating it simply like the clobber / uses solutions. The
193 solution can go alongside the non-IPA espaced solution and be
194 used to query which vars escape the unit through a function.
196 We never put function decls in points-to sets so we do not
197 keep the set of called functions for indirect calls.
199 And probably more. */
201 static bool use_field_sensitive
= true;
202 static int in_ipa_mode
= 0;
204 /* Used for predecessor bitmaps. */
205 static bitmap_obstack predbitmap_obstack
;
207 /* Used for points-to sets. */
208 static bitmap_obstack pta_obstack
;
210 /* Used for oldsolution members of variables. */
211 static bitmap_obstack oldpta_obstack
;
213 /* Used for per-solver-iteration bitmaps. */
214 static bitmap_obstack iteration_obstack
;
216 static unsigned int create_variable_info_for (tree
, const char *);
217 typedef struct constraint_graph
*constraint_graph_t
;
218 static void unify_nodes (constraint_graph_t
, unsigned int, unsigned int, bool);
221 typedef struct constraint
*constraint_t
;
224 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
226 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
228 static struct constraint_stats
230 unsigned int total_vars
;
231 unsigned int nonpointer_vars
;
232 unsigned int unified_vars_static
;
233 unsigned int unified_vars_dynamic
;
234 unsigned int iterations
;
235 unsigned int num_edges
;
236 unsigned int num_implicit_edges
;
237 unsigned int points_to_sets_created
;
242 /* ID of this variable */
245 /* True if this is a variable created by the constraint analysis, such as
246 heap variables and constraints we had to break up. */
247 unsigned int is_artificial_var
: 1;
249 /* True if this is a special variable whose solution set should not be
251 unsigned int is_special_var
: 1;
253 /* True for variables whose size is not known or variable. */
254 unsigned int is_unknown_size_var
: 1;
256 /* True for (sub-)fields that represent a whole variable. */
257 unsigned int is_full_var
: 1;
259 /* True if this is a heap variable. */
260 unsigned int is_heap_var
: 1;
262 /* True if this field may contain pointers. */
263 unsigned int may_have_pointers
: 1;
265 /* True if this field has only restrict qualified pointers. */
266 unsigned int only_restrict_pointers
: 1;
268 /* True if this represents a global variable. */
269 unsigned int is_global_var
: 1;
271 /* True if this represents a IPA function info. */
272 unsigned int is_fn_info
: 1;
274 /* The ID of the variable for the next field in this structure
275 or zero for the last field in this structure. */
278 /* The ID of the variable for the first field in this structure. */
281 /* Offset of this variable, in bits, from the base variable */
282 unsigned HOST_WIDE_INT offset
;
284 /* Size of the variable, in bits. */
285 unsigned HOST_WIDE_INT size
;
287 /* Full size of the base variable, in bits. */
288 unsigned HOST_WIDE_INT fullsize
;
290 /* Name of this variable */
293 /* Tree that this variable is associated with. */
296 /* Points-to set for this variable. */
299 /* Old points-to set for this variable. */
302 typedef struct variable_info
*varinfo_t
;
304 static varinfo_t
first_vi_for_offset (varinfo_t
, unsigned HOST_WIDE_INT
);
305 static varinfo_t
first_or_preceding_vi_for_offset (varinfo_t
,
306 unsigned HOST_WIDE_INT
);
307 static varinfo_t
lookup_vi_for_tree (tree
);
308 static inline bool type_can_have_subvars (const_tree
);
310 /* Pool of variable info structures. */
311 static alloc_pool variable_info_pool
;
313 /* Map varinfo to final pt_solution. */
314 static pointer_map_t
*final_solutions
;
315 struct obstack final_solutions_obstack
;
317 /* Table of variable info structures for constraint variables.
318 Indexed directly by variable info id. */
319 static vec
<varinfo_t
> varmap
;
321 /* Return the varmap element N */
323 static inline varinfo_t
324 get_varinfo (unsigned int n
)
329 /* Return the next variable in the list of sub-variables of VI
330 or NULL if VI is the last sub-variable. */
332 static inline varinfo_t
333 vi_next (varinfo_t vi
)
335 return get_varinfo (vi
->next
);
338 /* Static IDs for the special variables. Variable ID zero is unused
339 and used as terminator for the sub-variable chain. */
340 enum { nothing_id
= 1, anything_id
= 2, readonly_id
= 3,
341 escaped_id
= 4, nonlocal_id
= 5,
342 storedanything_id
= 6, integer_id
= 7 };
344 /* Return a new variable info structure consisting for a variable
345 named NAME, and using constraint graph node NODE. Append it
346 to the vector of variable info structures. */
349 new_var_info (tree t
, const char *name
)
351 unsigned index
= varmap
.length ();
352 varinfo_t ret
= (varinfo_t
) pool_alloc (variable_info_pool
);
357 /* Vars without decl are artificial and do not have sub-variables. */
358 ret
->is_artificial_var
= (t
== NULL_TREE
);
359 ret
->is_special_var
= false;
360 ret
->is_unknown_size_var
= false;
361 ret
->is_full_var
= (t
== NULL_TREE
);
362 ret
->is_heap_var
= false;
363 ret
->may_have_pointers
= true;
364 ret
->only_restrict_pointers
= false;
365 ret
->is_global_var
= (t
== NULL_TREE
);
366 ret
->is_fn_info
= false;
368 ret
->is_global_var
= (is_global_var (t
)
369 /* We have to treat even local register variables
371 || (TREE_CODE (t
) == VAR_DECL
372 && DECL_HARD_REGISTER (t
)));
373 ret
->solution
= BITMAP_ALLOC (&pta_obstack
);
374 ret
->oldsolution
= NULL
;
380 varmap
.safe_push (ret
);
386 /* A map mapping call statements to per-stmt variables for uses
387 and clobbers specific to the call. */
388 static struct pointer_map_t
*call_stmt_vars
;
390 /* Lookup or create the variable for the call statement CALL. */
393 get_call_vi (gimple call
)
398 slot_p
= pointer_map_insert (call_stmt_vars
, call
);
400 return (varinfo_t
) *slot_p
;
402 vi
= new_var_info (NULL_TREE
, "CALLUSED");
406 vi
->is_full_var
= true;
408 vi2
= new_var_info (NULL_TREE
, "CALLCLOBBERED");
412 vi2
->is_full_var
= true;
416 *slot_p
= (void *) vi
;
420 /* Lookup the variable for the call statement CALL representing
421 the uses. Returns NULL if there is nothing special about this call. */
424 lookup_call_use_vi (gimple call
)
428 slot_p
= pointer_map_contains (call_stmt_vars
, call
);
430 return (varinfo_t
) *slot_p
;
435 /* Lookup the variable for the call statement CALL representing
436 the clobbers. Returns NULL if there is nothing special about this call. */
439 lookup_call_clobber_vi (gimple call
)
441 varinfo_t uses
= lookup_call_use_vi (call
);
445 return vi_next (uses
);
448 /* Lookup or create the variable for the call statement CALL representing
452 get_call_use_vi (gimple call
)
454 return get_call_vi (call
);
457 /* Lookup or create the variable for the call statement CALL representing
460 static varinfo_t ATTRIBUTE_UNUSED
461 get_call_clobber_vi (gimple call
)
463 return vi_next (get_call_vi (call
));
467 typedef enum {SCALAR
, DEREF
, ADDRESSOF
} constraint_expr_type
;
469 /* An expression that appears in a constraint. */
471 struct constraint_expr
473 /* Constraint type. */
474 constraint_expr_type type
;
476 /* Variable we are referring to in the constraint. */
479 /* Offset, in bits, of this constraint from the beginning of
480 variables it ends up referring to.
482 IOW, in a deref constraint, we would deref, get the result set,
483 then add OFFSET to each member. */
484 HOST_WIDE_INT offset
;
487 /* Use 0x8000... as special unknown offset. */
488 #define UNKNOWN_OFFSET HOST_WIDE_INT_MIN
490 typedef struct constraint_expr ce_s
;
491 static void get_constraint_for_1 (tree
, vec
<ce_s
> *, bool, bool);
492 static void get_constraint_for (tree
, vec
<ce_s
> *);
493 static void get_constraint_for_rhs (tree
, vec
<ce_s
> *);
494 static void do_deref (vec
<ce_s
> *);
496 /* Our set constraints are made up of two constraint expressions, one
499 As described in the introduction, our set constraints each represent an
500 operation between set valued variables.
504 struct constraint_expr lhs
;
505 struct constraint_expr rhs
;
508 /* List of constraints that we use to build the constraint graph from. */
510 static vec
<constraint_t
> constraints
;
511 static alloc_pool constraint_pool
;
513 /* The constraint graph is represented as an array of bitmaps
514 containing successor nodes. */
516 struct constraint_graph
518 /* Size of this graph, which may be different than the number of
519 nodes in the variable map. */
522 /* Explicit successors of each node. */
525 /* Implicit predecessors of each node (Used for variable
527 bitmap
*implicit_preds
;
529 /* Explicit predecessors of each node (Used for variable substitution). */
532 /* Indirect cycle representatives, or -1 if the node has no indirect
534 int *indirect_cycles
;
536 /* Representative node for a node. rep[a] == a unless the node has
540 /* Equivalence class representative for a label. This is used for
541 variable substitution. */
544 /* Pointer equivalence label for a node. All nodes with the same
545 pointer equivalence label can be unified together at some point
546 (either during constraint optimization or after the constraint
550 /* Pointer equivalence representative for a label. This is used to
551 handle nodes that are pointer equivalent but not location
552 equivalent. We can unite these once the addressof constraints
553 are transformed into initial points-to sets. */
556 /* Pointer equivalence label for each node, used during variable
558 unsigned int *pointer_label
;
560 /* Location equivalence label for each node, used during location
561 equivalence finding. */
562 unsigned int *loc_label
;
564 /* Pointed-by set for each node, used during location equivalence
565 finding. This is pointed-by rather than pointed-to, because it
566 is constructed using the predecessor graph. */
569 /* Points to sets for pointer equivalence. This is *not* the actual
570 points-to sets for nodes. */
573 /* Bitmap of nodes where the bit is set if the node is a direct
574 node. Used for variable substitution. */
575 sbitmap direct_nodes
;
577 /* Bitmap of nodes where the bit is set if the node is address
578 taken. Used for variable substitution. */
579 bitmap address_taken
;
581 /* Vector of complex constraints for each graph node. Complex
582 constraints are those involving dereferences or offsets that are
584 vec
<constraint_t
> *complex;
587 static constraint_graph_t graph
;
589 /* During variable substitution and the offline version of indirect
590 cycle finding, we create nodes to represent dereferences and
591 address taken constraints. These represent where these start and
593 #define FIRST_REF_NODE (varmap).length ()
594 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
596 /* Return the representative node for NODE, if NODE has been unioned
598 This function performs path compression along the way to finding
599 the representative. */
602 find (unsigned int node
)
604 gcc_checking_assert (node
< graph
->size
);
605 if (graph
->rep
[node
] != node
)
606 return graph
->rep
[node
] = find (graph
->rep
[node
]);
610 /* Union the TO and FROM nodes to the TO nodes.
611 Note that at some point in the future, we may want to do
612 union-by-rank, in which case we are going to have to return the
613 node we unified to. */
616 unite (unsigned int to
, unsigned int from
)
618 gcc_checking_assert (to
< graph
->size
&& from
< graph
->size
);
619 if (to
!= from
&& graph
->rep
[from
] != to
)
621 graph
->rep
[from
] = to
;
627 /* Create a new constraint consisting of LHS and RHS expressions. */
630 new_constraint (const struct constraint_expr lhs
,
631 const struct constraint_expr rhs
)
633 constraint_t ret
= (constraint_t
) pool_alloc (constraint_pool
);
639 /* Print out constraint C to FILE. */
642 dump_constraint (FILE *file
, constraint_t c
)
644 if (c
->lhs
.type
== ADDRESSOF
)
646 else if (c
->lhs
.type
== DEREF
)
648 fprintf (file
, "%s", get_varinfo (c
->lhs
.var
)->name
);
649 if (c
->lhs
.offset
== UNKNOWN_OFFSET
)
650 fprintf (file
, " + UNKNOWN");
651 else if (c
->lhs
.offset
!= 0)
652 fprintf (file
, " + " HOST_WIDE_INT_PRINT_DEC
, c
->lhs
.offset
);
653 fprintf (file
, " = ");
654 if (c
->rhs
.type
== ADDRESSOF
)
656 else if (c
->rhs
.type
== DEREF
)
658 fprintf (file
, "%s", get_varinfo (c
->rhs
.var
)->name
);
659 if (c
->rhs
.offset
== UNKNOWN_OFFSET
)
660 fprintf (file
, " + UNKNOWN");
661 else if (c
->rhs
.offset
!= 0)
662 fprintf (file
, " + " HOST_WIDE_INT_PRINT_DEC
, c
->rhs
.offset
);
666 void debug_constraint (constraint_t
);
667 void debug_constraints (void);
668 void debug_constraint_graph (void);
669 void debug_solution_for_var (unsigned int);
670 void debug_sa_points_to_info (void);
672 /* Print out constraint C to stderr. */
675 debug_constraint (constraint_t c
)
677 dump_constraint (stderr
, c
);
678 fprintf (stderr
, "\n");
681 /* Print out all constraints to FILE */
684 dump_constraints (FILE *file
, int from
)
688 for (i
= from
; constraints
.iterate (i
, &c
); i
++)
691 dump_constraint (file
, c
);
692 fprintf (file
, "\n");
696 /* Print out all constraints to stderr. */
699 debug_constraints (void)
701 dump_constraints (stderr
, 0);
704 /* Print the constraint graph in dot format. */
707 dump_constraint_graph (FILE *file
)
711 /* Only print the graph if it has already been initialized: */
715 /* Prints the header of the dot file: */
716 fprintf (file
, "strict digraph {\n");
717 fprintf (file
, " node [\n shape = box\n ]\n");
718 fprintf (file
, " edge [\n fontsize = \"12\"\n ]\n");
719 fprintf (file
, "\n // List of nodes and complex constraints in "
720 "the constraint graph:\n");
722 /* The next lines print the nodes in the graph together with the
723 complex constraints attached to them. */
724 for (i
= 1; i
< graph
->size
; i
++)
726 if (i
== FIRST_REF_NODE
)
730 if (i
< FIRST_REF_NODE
)
731 fprintf (file
, "\"%s\"", get_varinfo (i
)->name
);
733 fprintf (file
, "\"*%s\"", get_varinfo (i
- FIRST_REF_NODE
)->name
);
734 if (graph
->complex[i
].exists ())
738 fprintf (file
, " [label=\"\\N\\n");
739 for (j
= 0; graph
->complex[i
].iterate (j
, &c
); ++j
)
741 dump_constraint (file
, c
);
742 fprintf (file
, "\\l");
744 fprintf (file
, "\"]");
746 fprintf (file
, ";\n");
749 /* Go over the edges. */
750 fprintf (file
, "\n // Edges in the constraint graph:\n");
751 for (i
= 1; i
< graph
->size
; i
++)
757 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->succs
[i
], 0, j
, bi
)
759 unsigned to
= find (j
);
762 if (i
< FIRST_REF_NODE
)
763 fprintf (file
, "\"%s\"", get_varinfo (i
)->name
);
765 fprintf (file
, "\"*%s\"", get_varinfo (i
- FIRST_REF_NODE
)->name
);
766 fprintf (file
, " -> ");
767 if (to
< FIRST_REF_NODE
)
768 fprintf (file
, "\"%s\"", get_varinfo (to
)->name
);
770 fprintf (file
, "\"*%s\"", get_varinfo (to
- FIRST_REF_NODE
)->name
);
771 fprintf (file
, ";\n");
775 /* Prints the tail of the dot file. */
776 fprintf (file
, "}\n");
779 /* Print out the constraint graph to stderr. */
782 debug_constraint_graph (void)
784 dump_constraint_graph (stderr
);
789 The solver is a simple worklist solver, that works on the following
792 sbitmap changed_nodes = all zeroes;
794 For each node that is not already collapsed:
796 set bit in changed nodes
798 while (changed_count > 0)
800 compute topological ordering for constraint graph
802 find and collapse cycles in the constraint graph (updating
803 changed if necessary)
805 for each node (n) in the graph in topological order:
808 Process each complex constraint associated with the node,
809 updating changed if necessary.
811 For each outgoing edge from n, propagate the solution from n to
812 the destination of the edge, updating changed as necessary.
816 /* Return true if two constraint expressions A and B are equal. */
819 constraint_expr_equal (struct constraint_expr a
, struct constraint_expr b
)
821 return a
.type
== b
.type
&& a
.var
== b
.var
&& a
.offset
== b
.offset
;
824 /* Return true if constraint expression A is less than constraint expression
825 B. This is just arbitrary, but consistent, in order to give them an
829 constraint_expr_less (struct constraint_expr a
, struct constraint_expr b
)
831 if (a
.type
== b
.type
)
834 return a
.offset
< b
.offset
;
836 return a
.var
< b
.var
;
839 return a
.type
< b
.type
;
842 /* Return true if constraint A is less than constraint B. This is just
843 arbitrary, but consistent, in order to give them an ordering. */
846 constraint_less (const constraint_t
&a
, const constraint_t
&b
)
848 if (constraint_expr_less (a
->lhs
, b
->lhs
))
850 else if (constraint_expr_less (b
->lhs
, a
->lhs
))
853 return constraint_expr_less (a
->rhs
, b
->rhs
);
856 /* Return true if two constraints A and B are equal. */
859 constraint_equal (struct constraint a
, struct constraint b
)
861 return constraint_expr_equal (a
.lhs
, b
.lhs
)
862 && constraint_expr_equal (a
.rhs
, b
.rhs
);
866 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
869 constraint_vec_find (vec
<constraint_t
> vec
,
870 struct constraint lookfor
)
878 place
= vec
.lower_bound (&lookfor
, constraint_less
);
879 if (place
>= vec
.length ())
882 if (!constraint_equal (*found
, lookfor
))
887 /* Union two constraint vectors, TO and FROM. Put the result in TO. */
890 constraint_set_union (vec
<constraint_t
> *to
,
891 vec
<constraint_t
> *from
)
896 FOR_EACH_VEC_ELT (*from
, i
, c
)
898 if (constraint_vec_find (*to
, *c
) == NULL
)
900 unsigned int place
= to
->lower_bound (c
, constraint_less
);
901 to
->safe_insert (place
, c
);
906 /* Expands the solution in SET to all sub-fields of variables included. */
909 solution_set_expand (bitmap set
)
914 /* In a first pass expand to the head of the variables we need to
915 add all sub-fields off. This avoids quadratic behavior. */
916 EXECUTE_IF_SET_IN_BITMAP (set
, 0, j
, bi
)
918 varinfo_t v
= get_varinfo (j
);
919 if (v
->is_artificial_var
922 bitmap_set_bit (set
, v
->head
);
925 /* In the second pass now expand all head variables with subfields. */
926 EXECUTE_IF_SET_IN_BITMAP (set
, 0, j
, bi
)
928 varinfo_t v
= get_varinfo (j
);
929 if (v
->is_artificial_var
933 for (v
= vi_next (v
); v
!= NULL
; v
= vi_next (v
))
934 bitmap_set_bit (set
, v
->id
);
938 /* Union solution sets TO and FROM, and add INC to each member of FROM in the
942 set_union_with_increment (bitmap to
, bitmap from
, HOST_WIDE_INT inc
)
944 bool changed
= false;
948 /* If the solution of FROM contains anything it is good enough to transfer
950 if (bitmap_bit_p (from
, anything_id
))
951 return bitmap_set_bit (to
, anything_id
);
953 /* For zero offset simply union the solution into the destination. */
955 return bitmap_ior_into (to
, from
);
957 /* If the offset is unknown we have to expand the solution to
959 if (inc
== UNKNOWN_OFFSET
)
961 bitmap tmp
= BITMAP_ALLOC (&iteration_obstack
);
962 bitmap_copy (tmp
, from
);
963 solution_set_expand (tmp
);
964 changed
|= bitmap_ior_into (to
, tmp
);
969 /* For non-zero offset union the offsetted solution into the destination. */
970 EXECUTE_IF_SET_IN_BITMAP (from
, 0, i
, bi
)
972 varinfo_t vi
= get_varinfo (i
);
974 /* If this is a variable with just one field just set its bit
976 if (vi
->is_artificial_var
977 || vi
->is_unknown_size_var
979 changed
|= bitmap_set_bit (to
, i
);
982 unsigned HOST_WIDE_INT fieldoffset
= vi
->offset
+ inc
;
984 /* If the offset makes the pointer point to before the
985 variable use offset zero for the field lookup. */
987 && fieldoffset
> vi
->offset
)
990 vi
= first_or_preceding_vi_for_offset (vi
, fieldoffset
);
992 changed
|= bitmap_set_bit (to
, vi
->id
);
993 /* If the result is not exactly at fieldoffset include the next
994 field as well. See get_constraint_for_ptr_offset for more
996 if (vi
->offset
!= fieldoffset
998 changed
|= bitmap_set_bit (to
, vi
->next
);
1005 /* Insert constraint C into the list of complex constraints for graph
1009 insert_into_complex (constraint_graph_t graph
,
1010 unsigned int var
, constraint_t c
)
1012 vec
<constraint_t
> complex = graph
->complex[var
];
1013 unsigned int place
= complex.lower_bound (c
, constraint_less
);
1015 /* Only insert constraints that do not already exist. */
1016 if (place
>= complex.length ()
1017 || !constraint_equal (*c
, *complex[place
]))
1018 graph
->complex[var
].safe_insert (place
, c
);
1022 /* Condense two variable nodes into a single variable node, by moving
1023 all associated info from SRC to TO. */
1026 merge_node_constraints (constraint_graph_t graph
, unsigned int to
,
1032 gcc_checking_assert (find (from
) == to
);
1034 /* Move all complex constraints from src node into to node */
1035 FOR_EACH_VEC_ELT (graph
->complex[from
], i
, c
)
1037 /* In complex constraints for node src, we may have either
1038 a = *src, and *src = a, or an offseted constraint which are
1039 always added to the rhs node's constraints. */
1041 if (c
->rhs
.type
== DEREF
)
1043 else if (c
->lhs
.type
== DEREF
)
1048 constraint_set_union (&graph
->complex[to
], &graph
->complex[from
]);
1049 graph
->complex[from
].release ();
1053 /* Remove edges involving NODE from GRAPH. */
1056 clear_edges_for_node (constraint_graph_t graph
, unsigned int node
)
1058 if (graph
->succs
[node
])
1059 BITMAP_FREE (graph
->succs
[node
]);
1062 /* Merge GRAPH nodes FROM and TO into node TO. */
1065 merge_graph_nodes (constraint_graph_t graph
, unsigned int to
,
1068 if (graph
->indirect_cycles
[from
] != -1)
1070 /* If we have indirect cycles with the from node, and we have
1071 none on the to node, the to node has indirect cycles from the
1072 from node now that they are unified.
1073 If indirect cycles exist on both, unify the nodes that they
1074 are in a cycle with, since we know they are in a cycle with
1076 if (graph
->indirect_cycles
[to
] == -1)
1077 graph
->indirect_cycles
[to
] = graph
->indirect_cycles
[from
];
1080 /* Merge all the successor edges. */
1081 if (graph
->succs
[from
])
1083 if (!graph
->succs
[to
])
1084 graph
->succs
[to
] = BITMAP_ALLOC (&pta_obstack
);
1085 bitmap_ior_into (graph
->succs
[to
],
1086 graph
->succs
[from
]);
1089 clear_edges_for_node (graph
, from
);
1093 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
1094 it doesn't exist in the graph already. */
1097 add_implicit_graph_edge (constraint_graph_t graph
, unsigned int to
,
1103 if (!graph
->implicit_preds
[to
])
1104 graph
->implicit_preds
[to
] = BITMAP_ALLOC (&predbitmap_obstack
);
1106 if (bitmap_set_bit (graph
->implicit_preds
[to
], from
))
1107 stats
.num_implicit_edges
++;
1110 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
1111 it doesn't exist in the graph already.
1112 Return false if the edge already existed, true otherwise. */
1115 add_pred_graph_edge (constraint_graph_t graph
, unsigned int to
,
1118 if (!graph
->preds
[to
])
1119 graph
->preds
[to
] = BITMAP_ALLOC (&predbitmap_obstack
);
1120 bitmap_set_bit (graph
->preds
[to
], from
);
1123 /* Add a graph edge to GRAPH, going from FROM to TO if
1124 it doesn't exist in the graph already.
1125 Return false if the edge already existed, true otherwise. */
1128 add_graph_edge (constraint_graph_t graph
, unsigned int to
,
1139 if (!graph
->succs
[from
])
1140 graph
->succs
[from
] = BITMAP_ALLOC (&pta_obstack
);
1141 if (bitmap_set_bit (graph
->succs
[from
], to
))
1144 if (to
< FIRST_REF_NODE
&& from
< FIRST_REF_NODE
)
1152 /* Initialize the constraint graph structure to contain SIZE nodes. */
1155 init_graph (unsigned int size
)
1159 graph
= XCNEW (struct constraint_graph
);
1161 graph
->succs
= XCNEWVEC (bitmap
, graph
->size
);
1162 graph
->indirect_cycles
= XNEWVEC (int, graph
->size
);
1163 graph
->rep
= XNEWVEC (unsigned int, graph
->size
);
1164 /* ??? Macros do not support template types with multiple arguments,
1165 so we use a typedef to work around it. */
1166 typedef vec
<constraint_t
> vec_constraint_t_heap
;
1167 graph
->complex = XCNEWVEC (vec_constraint_t_heap
, size
);
1168 graph
->pe
= XCNEWVEC (unsigned int, graph
->size
);
1169 graph
->pe_rep
= XNEWVEC (int, graph
->size
);
1171 for (j
= 0; j
< graph
->size
; j
++)
1174 graph
->pe_rep
[j
] = -1;
1175 graph
->indirect_cycles
[j
] = -1;
1179 /* Build the constraint graph, adding only predecessor edges right now. */
1182 build_pred_graph (void)
1188 graph
->implicit_preds
= XCNEWVEC (bitmap
, graph
->size
);
1189 graph
->preds
= XCNEWVEC (bitmap
, graph
->size
);
1190 graph
->pointer_label
= XCNEWVEC (unsigned int, graph
->size
);
1191 graph
->loc_label
= XCNEWVEC (unsigned int, graph
->size
);
1192 graph
->pointed_by
= XCNEWVEC (bitmap
, graph
->size
);
1193 graph
->points_to
= XCNEWVEC (bitmap
, graph
->size
);
1194 graph
->eq_rep
= XNEWVEC (int, graph
->size
);
1195 graph
->direct_nodes
= sbitmap_alloc (graph
->size
);
1196 graph
->address_taken
= BITMAP_ALLOC (&predbitmap_obstack
);
1197 bitmap_clear (graph
->direct_nodes
);
1199 for (j
= 1; j
< FIRST_REF_NODE
; j
++)
1201 if (!get_varinfo (j
)->is_special_var
)
1202 bitmap_set_bit (graph
->direct_nodes
, j
);
1205 for (j
= 0; j
< graph
->size
; j
++)
1206 graph
->eq_rep
[j
] = -1;
1208 for (j
= 0; j
< varmap
.length (); j
++)
1209 graph
->indirect_cycles
[j
] = -1;
1211 FOR_EACH_VEC_ELT (constraints
, i
, c
)
1213 struct constraint_expr lhs
= c
->lhs
;
1214 struct constraint_expr rhs
= c
->rhs
;
1215 unsigned int lhsvar
= lhs
.var
;
1216 unsigned int rhsvar
= rhs
.var
;
1218 if (lhs
.type
== DEREF
)
1221 if (rhs
.offset
== 0 && lhs
.offset
== 0 && rhs
.type
== SCALAR
)
1222 add_pred_graph_edge (graph
, FIRST_REF_NODE
+ lhsvar
, rhsvar
);
1224 else if (rhs
.type
== DEREF
)
1227 if (rhs
.offset
== 0 && lhs
.offset
== 0 && lhs
.type
== SCALAR
)
1228 add_pred_graph_edge (graph
, lhsvar
, FIRST_REF_NODE
+ rhsvar
);
1230 bitmap_clear_bit (graph
->direct_nodes
, lhsvar
);
1232 else if (rhs
.type
== ADDRESSOF
)
1237 if (graph
->points_to
[lhsvar
] == NULL
)
1238 graph
->points_to
[lhsvar
] = BITMAP_ALLOC (&predbitmap_obstack
);
1239 bitmap_set_bit (graph
->points_to
[lhsvar
], rhsvar
);
1241 if (graph
->pointed_by
[rhsvar
] == NULL
)
1242 graph
->pointed_by
[rhsvar
] = BITMAP_ALLOC (&predbitmap_obstack
);
1243 bitmap_set_bit (graph
->pointed_by
[rhsvar
], lhsvar
);
1245 /* Implicitly, *x = y */
1246 add_implicit_graph_edge (graph
, FIRST_REF_NODE
+ lhsvar
, rhsvar
);
1248 /* All related variables are no longer direct nodes. */
1249 bitmap_clear_bit (graph
->direct_nodes
, rhsvar
);
1250 v
= get_varinfo (rhsvar
);
1251 if (!v
->is_full_var
)
1253 v
= get_varinfo (v
->head
);
1256 bitmap_clear_bit (graph
->direct_nodes
, v
->id
);
1261 bitmap_set_bit (graph
->address_taken
, rhsvar
);
1263 else if (lhsvar
> anything_id
1264 && lhsvar
!= rhsvar
&& lhs
.offset
== 0 && rhs
.offset
== 0)
1267 add_pred_graph_edge (graph
, lhsvar
, rhsvar
);
1268 /* Implicitly, *x = *y */
1269 add_implicit_graph_edge (graph
, FIRST_REF_NODE
+ lhsvar
,
1270 FIRST_REF_NODE
+ rhsvar
);
1272 else if (lhs
.offset
!= 0 || rhs
.offset
!= 0)
1274 if (rhs
.offset
!= 0)
1275 bitmap_clear_bit (graph
->direct_nodes
, lhs
.var
);
1276 else if (lhs
.offset
!= 0)
1277 bitmap_clear_bit (graph
->direct_nodes
, rhs
.var
);
1282 /* Build the constraint graph, adding successor edges. */
1285 build_succ_graph (void)
1290 FOR_EACH_VEC_ELT (constraints
, i
, c
)
1292 struct constraint_expr lhs
;
1293 struct constraint_expr rhs
;
1294 unsigned int lhsvar
;
1295 unsigned int rhsvar
;
1302 lhsvar
= find (lhs
.var
);
1303 rhsvar
= find (rhs
.var
);
1305 if (lhs
.type
== DEREF
)
1307 if (rhs
.offset
== 0 && lhs
.offset
== 0 && rhs
.type
== SCALAR
)
1308 add_graph_edge (graph
, FIRST_REF_NODE
+ lhsvar
, rhsvar
);
1310 else if (rhs
.type
== DEREF
)
1312 if (rhs
.offset
== 0 && lhs
.offset
== 0 && lhs
.type
== SCALAR
)
1313 add_graph_edge (graph
, lhsvar
, FIRST_REF_NODE
+ rhsvar
);
1315 else if (rhs
.type
== ADDRESSOF
)
1318 gcc_checking_assert (find (rhs
.var
) == rhs
.var
);
1319 bitmap_set_bit (get_varinfo (lhsvar
)->solution
, rhsvar
);
1321 else if (lhsvar
> anything_id
1322 && lhsvar
!= rhsvar
&& lhs
.offset
== 0 && rhs
.offset
== 0)
1324 add_graph_edge (graph
, lhsvar
, rhsvar
);
1328 /* Add edges from STOREDANYTHING to all non-direct nodes that can
1329 receive pointers. */
1330 t
= find (storedanything_id
);
1331 for (i
= integer_id
+ 1; i
< FIRST_REF_NODE
; ++i
)
1333 if (!bitmap_bit_p (graph
->direct_nodes
, i
)
1334 && get_varinfo (i
)->may_have_pointers
)
1335 add_graph_edge (graph
, find (i
), t
);
1338 /* Everything stored to ANYTHING also potentially escapes. */
1339 add_graph_edge (graph
, find (escaped_id
), t
);
1343 /* Changed variables on the last iteration. */
1344 static bitmap changed
;
1346 /* Strongly Connected Component visitation info. */
1353 unsigned int *node_mapping
;
1355 vec
<unsigned> scc_stack
;
1359 /* Recursive routine to find strongly connected components in GRAPH.
1360 SI is the SCC info to store the information in, and N is the id of current
1361 graph node we are processing.
1363 This is Tarjan's strongly connected component finding algorithm, as
1364 modified by Nuutila to keep only non-root nodes on the stack.
1365 The algorithm can be found in "On finding the strongly connected
1366 connected components in a directed graph" by Esko Nuutila and Eljas
1367 Soisalon-Soininen, in Information Processing Letters volume 49,
1368 number 1, pages 9-14. */
1371 scc_visit (constraint_graph_t graph
, struct scc_info
*si
, unsigned int n
)
1375 unsigned int my_dfs
;
1377 bitmap_set_bit (si
->visited
, n
);
1378 si
->dfs
[n
] = si
->current_index
++;
1379 my_dfs
= si
->dfs
[n
];
1381 /* Visit all the successors. */
1382 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->succs
[n
], 0, i
, bi
)
1386 if (i
> LAST_REF_NODE
)
1390 if (bitmap_bit_p (si
->deleted
, w
))
1393 if (!bitmap_bit_p (si
->visited
, w
))
1394 scc_visit (graph
, si
, w
);
1396 unsigned int t
= find (w
);
1397 gcc_checking_assert (find (n
) == n
);
1398 if (si
->dfs
[t
] < si
->dfs
[n
])
1399 si
->dfs
[n
] = si
->dfs
[t
];
1402 /* See if any components have been identified. */
1403 if (si
->dfs
[n
] == my_dfs
)
1405 if (si
->scc_stack
.length () > 0
1406 && si
->dfs
[si
->scc_stack
.last ()] >= my_dfs
)
1408 bitmap scc
= BITMAP_ALLOC (NULL
);
1409 unsigned int lowest_node
;
1412 bitmap_set_bit (scc
, n
);
1414 while (si
->scc_stack
.length () != 0
1415 && si
->dfs
[si
->scc_stack
.last ()] >= my_dfs
)
1417 unsigned int w
= si
->scc_stack
.pop ();
1419 bitmap_set_bit (scc
, w
);
1422 lowest_node
= bitmap_first_set_bit (scc
);
1423 gcc_assert (lowest_node
< FIRST_REF_NODE
);
1425 /* Collapse the SCC nodes into a single node, and mark the
1427 EXECUTE_IF_SET_IN_BITMAP (scc
, 0, i
, bi
)
1429 if (i
< FIRST_REF_NODE
)
1431 if (unite (lowest_node
, i
))
1432 unify_nodes (graph
, lowest_node
, i
, false);
1436 unite (lowest_node
, i
);
1437 graph
->indirect_cycles
[i
- FIRST_REF_NODE
] = lowest_node
;
1441 bitmap_set_bit (si
->deleted
, n
);
1444 si
->scc_stack
.safe_push (n
);
1447 /* Unify node FROM into node TO, updating the changed count if
1448 necessary when UPDATE_CHANGED is true. */
1451 unify_nodes (constraint_graph_t graph
, unsigned int to
, unsigned int from
,
1452 bool update_changed
)
1454 gcc_checking_assert (to
!= from
&& find (to
) == to
);
1456 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1457 fprintf (dump_file
, "Unifying %s to %s\n",
1458 get_varinfo (from
)->name
,
1459 get_varinfo (to
)->name
);
1462 stats
.unified_vars_dynamic
++;
1464 stats
.unified_vars_static
++;
1466 merge_graph_nodes (graph
, to
, from
);
1467 merge_node_constraints (graph
, to
, from
);
1469 /* Mark TO as changed if FROM was changed. If TO was already marked
1470 as changed, decrease the changed count. */
1473 && bitmap_clear_bit (changed
, from
))
1474 bitmap_set_bit (changed
, to
);
1475 varinfo_t fromvi
= get_varinfo (from
);
1476 if (fromvi
->solution
)
1478 /* If the solution changes because of the merging, we need to mark
1479 the variable as changed. */
1480 varinfo_t tovi
= get_varinfo (to
);
1481 if (bitmap_ior_into (tovi
->solution
, fromvi
->solution
))
1484 bitmap_set_bit (changed
, to
);
1487 BITMAP_FREE (fromvi
->solution
);
1488 if (fromvi
->oldsolution
)
1489 BITMAP_FREE (fromvi
->oldsolution
);
1491 if (stats
.iterations
> 0
1492 && tovi
->oldsolution
)
1493 BITMAP_FREE (tovi
->oldsolution
);
1495 if (graph
->succs
[to
])
1496 bitmap_clear_bit (graph
->succs
[to
], to
);
1499 /* Information needed to compute the topological ordering of a graph. */
1503 /* sbitmap of visited nodes. */
1505 /* Array that stores the topological order of the graph, *in
1507 vec
<unsigned> topo_order
;
1511 /* Initialize and return a topological info structure. */
1513 static struct topo_info
*
1514 init_topo_info (void)
1516 size_t size
= graph
->size
;
1517 struct topo_info
*ti
= XNEW (struct topo_info
);
1518 ti
->visited
= sbitmap_alloc (size
);
1519 bitmap_clear (ti
->visited
);
1520 ti
->topo_order
.create (1);
1525 /* Free the topological sort info pointed to by TI. */
1528 free_topo_info (struct topo_info
*ti
)
1530 sbitmap_free (ti
->visited
);
1531 ti
->topo_order
.release ();
1535 /* Visit the graph in topological order, and store the order in the
1536 topo_info structure. */
1539 topo_visit (constraint_graph_t graph
, struct topo_info
*ti
,
1545 bitmap_set_bit (ti
->visited
, n
);
1547 if (graph
->succs
[n
])
1548 EXECUTE_IF_SET_IN_BITMAP (graph
->succs
[n
], 0, j
, bi
)
1550 if (!bitmap_bit_p (ti
->visited
, j
))
1551 topo_visit (graph
, ti
, j
);
1554 ti
->topo_order
.safe_push (n
);
1557 /* Process a constraint C that represents x = *(y + off), using DELTA as the
1558 starting solution for y. */
1561 do_sd_constraint (constraint_graph_t graph
, constraint_t c
,
1564 unsigned int lhs
= c
->lhs
.var
;
1566 bitmap sol
= get_varinfo (lhs
)->solution
;
1569 HOST_WIDE_INT roffset
= c
->rhs
.offset
;
1571 /* Our IL does not allow this. */
1572 gcc_checking_assert (c
->lhs
.offset
== 0);
1574 /* If the solution of Y contains anything it is good enough to transfer
1576 if (bitmap_bit_p (delta
, anything_id
))
1578 flag
|= bitmap_set_bit (sol
, anything_id
);
1582 /* If we do not know at with offset the rhs is dereferenced compute
1583 the reachability set of DELTA, conservatively assuming it is
1584 dereferenced at all valid offsets. */
1585 if (roffset
== UNKNOWN_OFFSET
)
1587 solution_set_expand (delta
);
1588 /* No further offset processing is necessary. */
1592 /* For each variable j in delta (Sol(y)), add
1593 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1594 EXECUTE_IF_SET_IN_BITMAP (delta
, 0, j
, bi
)
1596 varinfo_t v
= get_varinfo (j
);
1597 HOST_WIDE_INT fieldoffset
= v
->offset
+ roffset
;
1601 fieldoffset
= v
->offset
;
1602 else if (roffset
!= 0)
1603 v
= first_vi_for_offset (v
, fieldoffset
);
1604 /* If the access is outside of the variable we can ignore it. */
1612 /* Adding edges from the special vars is pointless.
1613 They don't have sets that can change. */
1614 if (get_varinfo (t
)->is_special_var
)
1615 flag
|= bitmap_ior_into (sol
, get_varinfo (t
)->solution
);
1616 /* Merging the solution from ESCAPED needlessly increases
1617 the set. Use ESCAPED as representative instead. */
1618 else if (v
->id
== escaped_id
)
1619 flag
|= bitmap_set_bit (sol
, escaped_id
);
1620 else if (v
->may_have_pointers
1621 && add_graph_edge (graph
, lhs
, t
))
1622 flag
|= bitmap_ior_into (sol
, get_varinfo (t
)->solution
);
1624 /* If the variable is not exactly at the requested offset
1625 we have to include the next one. */
1626 if (v
->offset
== (unsigned HOST_WIDE_INT
)fieldoffset
1631 fieldoffset
= v
->offset
;
1637 /* If the LHS solution changed, mark the var as changed. */
1640 get_varinfo (lhs
)->solution
= sol
;
1641 bitmap_set_bit (changed
, lhs
);
1645 /* Process a constraint C that represents *(x + off) = y using DELTA
1646 as the starting solution for x. */
1649 do_ds_constraint (constraint_t c
, bitmap delta
)
1651 unsigned int rhs
= c
->rhs
.var
;
1652 bitmap sol
= get_varinfo (rhs
)->solution
;
1655 HOST_WIDE_INT loff
= c
->lhs
.offset
;
1656 bool escaped_p
= false;
1658 /* Our IL does not allow this. */
1659 gcc_checking_assert (c
->rhs
.offset
== 0);
1661 /* If the solution of y contains ANYTHING simply use the ANYTHING
1662 solution. This avoids needlessly increasing the points-to sets. */
1663 if (bitmap_bit_p (sol
, anything_id
))
1664 sol
= get_varinfo (find (anything_id
))->solution
;
1666 /* If the solution for x contains ANYTHING we have to merge the
1667 solution of y into all pointer variables which we do via
1669 if (bitmap_bit_p (delta
, anything_id
))
1671 unsigned t
= find (storedanything_id
);
1672 if (add_graph_edge (graph
, t
, rhs
))
1674 if (bitmap_ior_into (get_varinfo (t
)->solution
, sol
))
1675 bitmap_set_bit (changed
, t
);
1680 /* If we do not know at with offset the rhs is dereferenced compute
1681 the reachability set of DELTA, conservatively assuming it is
1682 dereferenced at all valid offsets. */
1683 if (loff
== UNKNOWN_OFFSET
)
1685 solution_set_expand (delta
);
1689 /* For each member j of delta (Sol(x)), add an edge from y to j and
1690 union Sol(y) into Sol(j) */
1691 EXECUTE_IF_SET_IN_BITMAP (delta
, 0, j
, bi
)
1693 varinfo_t v
= get_varinfo (j
);
1695 HOST_WIDE_INT fieldoffset
= v
->offset
+ loff
;
1698 fieldoffset
= v
->offset
;
1700 v
= first_vi_for_offset (v
, fieldoffset
);
1701 /* If the access is outside of the variable we can ignore it. */
1707 if (v
->may_have_pointers
)
1709 /* If v is a global variable then this is an escape point. */
1710 if (v
->is_global_var
1713 t
= find (escaped_id
);
1714 if (add_graph_edge (graph
, t
, rhs
)
1715 && bitmap_ior_into (get_varinfo (t
)->solution
, sol
))
1716 bitmap_set_bit (changed
, t
);
1717 /* Enough to let rhs escape once. */
1721 if (v
->is_special_var
)
1725 if (add_graph_edge (graph
, t
, rhs
)
1726 && bitmap_ior_into (get_varinfo (t
)->solution
, sol
))
1727 bitmap_set_bit (changed
, t
);
1730 /* If the variable is not exactly at the requested offset
1731 we have to include the next one. */
1732 if (v
->offset
== (unsigned HOST_WIDE_INT
)fieldoffset
1737 fieldoffset
= v
->offset
;
1743 /* Handle a non-simple (simple meaning requires no iteration),
1744 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1747 do_complex_constraint (constraint_graph_t graph
, constraint_t c
, bitmap delta
)
1749 if (c
->lhs
.type
== DEREF
)
1751 if (c
->rhs
.type
== ADDRESSOF
)
1758 do_ds_constraint (c
, delta
);
1761 else if (c
->rhs
.type
== DEREF
)
1764 if (!(get_varinfo (c
->lhs
.var
)->is_special_var
))
1765 do_sd_constraint (graph
, c
, delta
);
1773 gcc_checking_assert (c
->rhs
.type
== SCALAR
&& c
->lhs
.type
== SCALAR
);
1774 solution
= get_varinfo (c
->rhs
.var
)->solution
;
1775 tmp
= get_varinfo (c
->lhs
.var
)->solution
;
1777 flag
= set_union_with_increment (tmp
, solution
, c
->rhs
.offset
);
1780 bitmap_set_bit (changed
, c
->lhs
.var
);
1784 /* Initialize and return a new SCC info structure. */
1786 static struct scc_info
*
1787 init_scc_info (size_t size
)
1789 struct scc_info
*si
= XNEW (struct scc_info
);
1792 si
->current_index
= 0;
1793 si
->visited
= sbitmap_alloc (size
);
1794 bitmap_clear (si
->visited
);
1795 si
->deleted
= sbitmap_alloc (size
);
1796 bitmap_clear (si
->deleted
);
1797 si
->node_mapping
= XNEWVEC (unsigned int, size
);
1798 si
->dfs
= XCNEWVEC (unsigned int, size
);
1800 for (i
= 0; i
< size
; i
++)
1801 si
->node_mapping
[i
] = i
;
1803 si
->scc_stack
.create (1);
1807 /* Free an SCC info structure pointed to by SI */
1810 free_scc_info (struct scc_info
*si
)
1812 sbitmap_free (si
->visited
);
1813 sbitmap_free (si
->deleted
);
1814 free (si
->node_mapping
);
1816 si
->scc_stack
.release ();
1821 /* Find indirect cycles in GRAPH that occur, using strongly connected
1822 components, and note them in the indirect cycles map.
1824 This technique comes from Ben Hardekopf and Calvin Lin,
1825 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1826 Lines of Code", submitted to PLDI 2007. */
1829 find_indirect_cycles (constraint_graph_t graph
)
1832 unsigned int size
= graph
->size
;
1833 struct scc_info
*si
= init_scc_info (size
);
1835 for (i
= 0; i
< MIN (LAST_REF_NODE
, size
); i
++ )
1836 if (!bitmap_bit_p (si
->visited
, i
) && find (i
) == i
)
1837 scc_visit (graph
, si
, i
);
1842 /* Compute a topological ordering for GRAPH, and store the result in the
1843 topo_info structure TI. */
1846 compute_topo_order (constraint_graph_t graph
,
1847 struct topo_info
*ti
)
1850 unsigned int size
= graph
->size
;
1852 for (i
= 0; i
!= size
; ++i
)
1853 if (!bitmap_bit_p (ti
->visited
, i
) && find (i
) == i
)
1854 topo_visit (graph
, ti
, i
);
1857 /* Structure used to for hash value numbering of pointer equivalence
1860 typedef struct equiv_class_label
1863 unsigned int equivalence_class
;
1865 } *equiv_class_label_t
;
1866 typedef const struct equiv_class_label
*const_equiv_class_label_t
;
1868 /* Equiv_class_label hashtable helpers. */
1870 struct equiv_class_hasher
: typed_free_remove
<equiv_class_label
>
1872 typedef equiv_class_label value_type
;
1873 typedef equiv_class_label compare_type
;
1874 static inline hashval_t
hash (const value_type
*);
1875 static inline bool equal (const value_type
*, const compare_type
*);
1878 /* Hash function for a equiv_class_label_t */
1881 equiv_class_hasher::hash (const value_type
*ecl
)
1883 return ecl
->hashcode
;
1886 /* Equality function for two equiv_class_label_t's. */
1889 equiv_class_hasher::equal (const value_type
*eql1
, const compare_type
*eql2
)
1891 return (eql1
->hashcode
== eql2
->hashcode
1892 && bitmap_equal_p (eql1
->labels
, eql2
->labels
));
1895 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1897 static hash_table
<equiv_class_hasher
> pointer_equiv_class_table
;
1899 /* A hashtable for mapping a bitmap of labels->location equivalence
1901 static hash_table
<equiv_class_hasher
> location_equiv_class_table
;
1903 /* Lookup a equivalence class in TABLE by the bitmap of LABELS with
1904 hash HAS it contains. Sets *REF_LABELS to the bitmap LABELS
1905 is equivalent to. */
1907 static equiv_class_label
*
1908 equiv_class_lookup_or_add (hash_table
<equiv_class_hasher
> table
, bitmap labels
)
1910 equiv_class_label
**slot
;
1911 equiv_class_label ecl
;
1913 ecl
.labels
= labels
;
1914 ecl
.hashcode
= bitmap_hash (labels
);
1915 slot
= table
.find_slot_with_hash (&ecl
, ecl
.hashcode
, INSERT
);
1918 *slot
= XNEW (struct equiv_class_label
);
1919 (*slot
)->labels
= labels
;
1920 (*slot
)->hashcode
= ecl
.hashcode
;
1921 (*slot
)->equivalence_class
= 0;
1927 /* Perform offline variable substitution.
1929 This is a worst case quadratic time way of identifying variables
1930 that must have equivalent points-to sets, including those caused by
1931 static cycles, and single entry subgraphs, in the constraint graph.
1933 The technique is described in "Exploiting Pointer and Location
1934 Equivalence to Optimize Pointer Analysis. In the 14th International
1935 Static Analysis Symposium (SAS), August 2007." It is known as the
1936 "HU" algorithm, and is equivalent to value numbering the collapsed
1937 constraint graph including evaluating unions.
1939 The general method of finding equivalence classes is as follows:
1940 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1941 Initialize all non-REF nodes to be direct nodes.
1942 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1944 For each constraint containing the dereference, we also do the same
1947 We then compute SCC's in the graph and unify nodes in the same SCC,
1950 For each non-collapsed node x:
1951 Visit all unvisited explicit incoming edges.
1952 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
1954 Lookup the equivalence class for pts(x).
1955 If we found one, equivalence_class(x) = found class.
1956 Otherwise, equivalence_class(x) = new class, and new_class is
1957 added to the lookup table.
1959 All direct nodes with the same equivalence class can be replaced
1960 with a single representative node.
1961 All unlabeled nodes (label == 0) are not pointers and all edges
1962 involving them can be eliminated.
1963 We perform these optimizations during rewrite_constraints
1965 In addition to pointer equivalence class finding, we also perform
1966 location equivalence class finding. This is the set of variables
1967 that always appear together in points-to sets. We use this to
1968 compress the size of the points-to sets. */
1970 /* Current maximum pointer equivalence class id. */
1971 static int pointer_equiv_class
;
1973 /* Current maximum location equivalence class id. */
1974 static int location_equiv_class
;
1976 /* Recursive routine to find strongly connected components in GRAPH,
1977 and label it's nodes with DFS numbers. */
1980 condense_visit (constraint_graph_t graph
, struct scc_info
*si
, unsigned int n
)
1984 unsigned int my_dfs
;
1986 gcc_checking_assert (si
->node_mapping
[n
] == n
);
1987 bitmap_set_bit (si
->visited
, n
);
1988 si
->dfs
[n
] = si
->current_index
++;
1989 my_dfs
= si
->dfs
[n
];
1991 /* Visit all the successors. */
1992 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->preds
[n
], 0, i
, bi
)
1994 unsigned int w
= si
->node_mapping
[i
];
1996 if (bitmap_bit_p (si
->deleted
, w
))
1999 if (!bitmap_bit_p (si
->visited
, w
))
2000 condense_visit (graph
, si
, w
);
2002 unsigned int t
= si
->node_mapping
[w
];
2003 gcc_checking_assert (si
->node_mapping
[n
] == n
);
2004 if (si
->dfs
[t
] < si
->dfs
[n
])
2005 si
->dfs
[n
] = si
->dfs
[t
];
2008 /* Visit all the implicit predecessors. */
2009 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->implicit_preds
[n
], 0, i
, bi
)
2011 unsigned int w
= si
->node_mapping
[i
];
2013 if (bitmap_bit_p (si
->deleted
, w
))
2016 if (!bitmap_bit_p (si
->visited
, w
))
2017 condense_visit (graph
, si
, w
);
2019 unsigned int t
= si
->node_mapping
[w
];
2020 gcc_assert (si
->node_mapping
[n
] == n
);
2021 if (si
->dfs
[t
] < si
->dfs
[n
])
2022 si
->dfs
[n
] = si
->dfs
[t
];
2025 /* See if any components have been identified. */
2026 if (si
->dfs
[n
] == my_dfs
)
2028 while (si
->scc_stack
.length () != 0
2029 && si
->dfs
[si
->scc_stack
.last ()] >= my_dfs
)
2031 unsigned int w
= si
->scc_stack
.pop ();
2032 si
->node_mapping
[w
] = n
;
2034 if (!bitmap_bit_p (graph
->direct_nodes
, w
))
2035 bitmap_clear_bit (graph
->direct_nodes
, n
);
2037 /* Unify our nodes. */
2038 if (graph
->preds
[w
])
2040 if (!graph
->preds
[n
])
2041 graph
->preds
[n
] = BITMAP_ALLOC (&predbitmap_obstack
);
2042 bitmap_ior_into (graph
->preds
[n
], graph
->preds
[w
]);
2044 if (graph
->implicit_preds
[w
])
2046 if (!graph
->implicit_preds
[n
])
2047 graph
->implicit_preds
[n
] = BITMAP_ALLOC (&predbitmap_obstack
);
2048 bitmap_ior_into (graph
->implicit_preds
[n
],
2049 graph
->implicit_preds
[w
]);
2051 if (graph
->points_to
[w
])
2053 if (!graph
->points_to
[n
])
2054 graph
->points_to
[n
] = BITMAP_ALLOC (&predbitmap_obstack
);
2055 bitmap_ior_into (graph
->points_to
[n
],
2056 graph
->points_to
[w
]);
2059 bitmap_set_bit (si
->deleted
, n
);
2062 si
->scc_stack
.safe_push (n
);
2065 /* Label pointer equivalences. */
2068 label_visit (constraint_graph_t graph
, struct scc_info
*si
, unsigned int n
)
2070 unsigned int i
, first_pred
;
2073 bitmap_set_bit (si
->visited
, n
);
2075 /* Label and union our incoming edges's points to sets. */
2077 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->preds
[n
], 0, i
, bi
)
2079 unsigned int w
= si
->node_mapping
[i
];
2080 if (!bitmap_bit_p (si
->visited
, w
))
2081 label_visit (graph
, si
, w
);
2083 /* Skip unused edges */
2084 if (w
== n
|| graph
->pointer_label
[w
] == 0)
2087 if (graph
->points_to
[w
])
2089 if (!graph
->points_to
[n
])
2091 if (first_pred
== -1U)
2095 graph
->points_to
[n
] = BITMAP_ALLOC (&predbitmap_obstack
);
2096 bitmap_ior (graph
->points_to
[n
],
2097 graph
->points_to
[first_pred
],
2098 graph
->points_to
[w
]);
2102 bitmap_ior_into (graph
->points_to
[n
], graph
->points_to
[w
]);
2106 /* Indirect nodes get fresh variables and a new pointer equiv class. */
2107 if (!bitmap_bit_p (graph
->direct_nodes
, n
))
2109 if (!graph
->points_to
[n
])
2111 graph
->points_to
[n
] = BITMAP_ALLOC (&predbitmap_obstack
);
2112 if (first_pred
!= -1U)
2113 bitmap_copy (graph
->points_to
[n
], graph
->points_to
[first_pred
]);
2115 bitmap_set_bit (graph
->points_to
[n
], FIRST_REF_NODE
+ n
);
2116 graph
->pointer_label
[n
] = pointer_equiv_class
++;
2117 equiv_class_label_t ecl
;
2118 ecl
= equiv_class_lookup_or_add (pointer_equiv_class_table
,
2119 graph
->points_to
[n
]);
2120 ecl
->equivalence_class
= graph
->pointer_label
[n
];
2124 /* If there was only a single non-empty predecessor the pointer equiv
2125 class is the same. */
2126 if (!graph
->points_to
[n
])
2128 if (first_pred
!= -1U)
2130 graph
->pointer_label
[n
] = graph
->pointer_label
[first_pred
];
2131 graph
->points_to
[n
] = graph
->points_to
[first_pred
];
2136 if (!bitmap_empty_p (graph
->points_to
[n
]))
2138 equiv_class_label_t ecl
;
2139 ecl
= equiv_class_lookup_or_add (pointer_equiv_class_table
,
2140 graph
->points_to
[n
]);
2141 if (ecl
->equivalence_class
== 0)
2142 ecl
->equivalence_class
= pointer_equiv_class
++;
2145 BITMAP_FREE (graph
->points_to
[n
]);
2146 graph
->points_to
[n
] = ecl
->labels
;
2148 graph
->pointer_label
[n
] = ecl
->equivalence_class
;
2152 /* Print the pred graph in dot format. */
2155 dump_pred_graph (struct scc_info
*si
, FILE *file
)
2159 /* Only print the graph if it has already been initialized: */
2163 /* Prints the header of the dot file: */
2164 fprintf (file
, "strict digraph {\n");
2165 fprintf (file
, " node [\n shape = box\n ]\n");
2166 fprintf (file
, " edge [\n fontsize = \"12\"\n ]\n");
2167 fprintf (file
, "\n // List of nodes and complex constraints in "
2168 "the constraint graph:\n");
2170 /* The next lines print the nodes in the graph together with the
2171 complex constraints attached to them. */
2172 for (i
= 1; i
< graph
->size
; i
++)
2174 if (i
== FIRST_REF_NODE
)
2176 if (si
->node_mapping
[i
] != i
)
2178 if (i
< FIRST_REF_NODE
)
2179 fprintf (file
, "\"%s\"", get_varinfo (i
)->name
);
2181 fprintf (file
, "\"*%s\"", get_varinfo (i
- FIRST_REF_NODE
)->name
);
2182 if (graph
->points_to
[i
]
2183 && !bitmap_empty_p (graph
->points_to
[i
]))
2185 fprintf (file
, "[label=\"%s = {", get_varinfo (i
)->name
);
2188 EXECUTE_IF_SET_IN_BITMAP (graph
->points_to
[i
], 0, j
, bi
)
2189 fprintf (file
, " %d", j
);
2190 fprintf (file
, " }\"]");
2192 fprintf (file
, ";\n");
2195 /* Go over the edges. */
2196 fprintf (file
, "\n // Edges in the constraint graph:\n");
2197 for (i
= 1; i
< graph
->size
; i
++)
2201 if (si
->node_mapping
[i
] != i
)
2203 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->preds
[i
], 0, j
, bi
)
2205 unsigned from
= si
->node_mapping
[j
];
2206 if (from
< FIRST_REF_NODE
)
2207 fprintf (file
, "\"%s\"", get_varinfo (from
)->name
);
2209 fprintf (file
, "\"*%s\"", get_varinfo (from
- FIRST_REF_NODE
)->name
);
2210 fprintf (file
, " -> ");
2211 if (i
< FIRST_REF_NODE
)
2212 fprintf (file
, "\"%s\"", get_varinfo (i
)->name
);
2214 fprintf (file
, "\"*%s\"", get_varinfo (i
- FIRST_REF_NODE
)->name
);
2215 fprintf (file
, ";\n");
2219 /* Prints the tail of the dot file. */
2220 fprintf (file
, "}\n");
2223 /* Perform offline variable substitution, discovering equivalence
2224 classes, and eliminating non-pointer variables. */
2226 static struct scc_info
*
2227 perform_var_substitution (constraint_graph_t graph
)
2230 unsigned int size
= graph
->size
;
2231 struct scc_info
*si
= init_scc_info (size
);
2233 bitmap_obstack_initialize (&iteration_obstack
);
2234 pointer_equiv_class_table
.create (511);
2235 location_equiv_class_table
.create (511);
2236 pointer_equiv_class
= 1;
2237 location_equiv_class
= 1;
2239 /* Condense the nodes, which means to find SCC's, count incoming
2240 predecessors, and unite nodes in SCC's. */
2241 for (i
= 1; i
< FIRST_REF_NODE
; i
++)
2242 if (!bitmap_bit_p (si
->visited
, si
->node_mapping
[i
]))
2243 condense_visit (graph
, si
, si
->node_mapping
[i
]);
2245 if (dump_file
&& (dump_flags
& TDF_GRAPH
))
2247 fprintf (dump_file
, "\n\n// The constraint graph before var-substitution "
2248 "in dot format:\n");
2249 dump_pred_graph (si
, dump_file
);
2250 fprintf (dump_file
, "\n\n");
2253 bitmap_clear (si
->visited
);
2254 /* Actually the label the nodes for pointer equivalences */
2255 for (i
= 1; i
< FIRST_REF_NODE
; i
++)
2256 if (!bitmap_bit_p (si
->visited
, si
->node_mapping
[i
]))
2257 label_visit (graph
, si
, si
->node_mapping
[i
]);
2259 /* Calculate location equivalence labels. */
2260 for (i
= 1; i
< FIRST_REF_NODE
; i
++)
2266 if (!graph
->pointed_by
[i
])
2268 pointed_by
= BITMAP_ALLOC (&iteration_obstack
);
2270 /* Translate the pointed-by mapping for pointer equivalence
2272 EXECUTE_IF_SET_IN_BITMAP (graph
->pointed_by
[i
], 0, j
, bi
)
2274 bitmap_set_bit (pointed_by
,
2275 graph
->pointer_label
[si
->node_mapping
[j
]]);
2277 /* The original pointed_by is now dead. */
2278 BITMAP_FREE (graph
->pointed_by
[i
]);
2280 /* Look up the location equivalence label if one exists, or make
2282 equiv_class_label_t ecl
;
2283 ecl
= equiv_class_lookup_or_add (location_equiv_class_table
, pointed_by
);
2284 if (ecl
->equivalence_class
== 0)
2285 ecl
->equivalence_class
= location_equiv_class
++;
2288 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2289 fprintf (dump_file
, "Found location equivalence for node %s\n",
2290 get_varinfo (i
)->name
);
2291 BITMAP_FREE (pointed_by
);
2293 graph
->loc_label
[i
] = ecl
->equivalence_class
;
2297 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2298 for (i
= 1; i
< FIRST_REF_NODE
; i
++)
2300 unsigned j
= si
->node_mapping
[i
];
2303 fprintf (dump_file
, "%s node id %d ",
2304 bitmap_bit_p (graph
->direct_nodes
, i
)
2305 ? "Direct" : "Indirect", i
);
2306 if (i
< FIRST_REF_NODE
)
2307 fprintf (dump_file
, "\"%s\"", get_varinfo (i
)->name
);
2309 fprintf (dump_file
, "\"*%s\"",
2310 get_varinfo (i
- FIRST_REF_NODE
)->name
);
2311 fprintf (dump_file
, " mapped to SCC leader node id %d ", j
);
2312 if (j
< FIRST_REF_NODE
)
2313 fprintf (dump_file
, "\"%s\"\n", get_varinfo (j
)->name
);
2315 fprintf (dump_file
, "\"*%s\"\n",
2316 get_varinfo (j
- FIRST_REF_NODE
)->name
);
2321 "Equivalence classes for %s node id %d ",
2322 bitmap_bit_p (graph
->direct_nodes
, i
)
2323 ? "direct" : "indirect", i
);
2324 if (i
< FIRST_REF_NODE
)
2325 fprintf (dump_file
, "\"%s\"", get_varinfo (i
)->name
);
2327 fprintf (dump_file
, "\"*%s\"",
2328 get_varinfo (i
- FIRST_REF_NODE
)->name
);
2330 ": pointer %d, location %d\n",
2331 graph
->pointer_label
[i
], graph
->loc_label
[i
]);
2335 /* Quickly eliminate our non-pointer variables. */
2337 for (i
= 1; i
< FIRST_REF_NODE
; i
++)
2339 unsigned int node
= si
->node_mapping
[i
];
2341 if (graph
->pointer_label
[node
] == 0)
2343 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2345 "%s is a non-pointer variable, eliminating edges.\n",
2346 get_varinfo (node
)->name
);
2347 stats
.nonpointer_vars
++;
2348 clear_edges_for_node (graph
, node
);
2355 /* Free information that was only necessary for variable
2359 free_var_substitution_info (struct scc_info
*si
)
2362 free (graph
->pointer_label
);
2363 free (graph
->loc_label
);
2364 free (graph
->pointed_by
);
2365 free (graph
->points_to
);
2366 free (graph
->eq_rep
);
2367 sbitmap_free (graph
->direct_nodes
);
2368 pointer_equiv_class_table
.dispose ();
2369 location_equiv_class_table
.dispose ();
2370 bitmap_obstack_release (&iteration_obstack
);
2373 /* Return an existing node that is equivalent to NODE, which has
2374 equivalence class LABEL, if one exists. Return NODE otherwise. */
2377 find_equivalent_node (constraint_graph_t graph
,
2378 unsigned int node
, unsigned int label
)
2380 /* If the address version of this variable is unused, we can
2381 substitute it for anything else with the same label.
2382 Otherwise, we know the pointers are equivalent, but not the
2383 locations, and we can unite them later. */
2385 if (!bitmap_bit_p (graph
->address_taken
, node
))
2387 gcc_checking_assert (label
< graph
->size
);
2389 if (graph
->eq_rep
[label
] != -1)
2391 /* Unify the two variables since we know they are equivalent. */
2392 if (unite (graph
->eq_rep
[label
], node
))
2393 unify_nodes (graph
, graph
->eq_rep
[label
], node
, false);
2394 return graph
->eq_rep
[label
];
2398 graph
->eq_rep
[label
] = node
;
2399 graph
->pe_rep
[label
] = node
;
2404 gcc_checking_assert (label
< graph
->size
);
2405 graph
->pe
[node
] = label
;
2406 if (graph
->pe_rep
[label
] == -1)
2407 graph
->pe_rep
[label
] = node
;
2413 /* Unite pointer equivalent but not location equivalent nodes in
2414 GRAPH. This may only be performed once variable substitution is
2418 unite_pointer_equivalences (constraint_graph_t graph
)
2422 /* Go through the pointer equivalences and unite them to their
2423 representative, if they aren't already. */
2424 for (i
= 1; i
< FIRST_REF_NODE
; i
++)
2426 unsigned int label
= graph
->pe
[i
];
2429 int label_rep
= graph
->pe_rep
[label
];
2431 if (label_rep
== -1)
2434 label_rep
= find (label_rep
);
2435 if (label_rep
>= 0 && unite (label_rep
, find (i
)))
2436 unify_nodes (graph
, label_rep
, i
, false);
2441 /* Move complex constraints to the GRAPH nodes they belong to. */
2444 move_complex_constraints (constraint_graph_t graph
)
2449 FOR_EACH_VEC_ELT (constraints
, i
, c
)
2453 struct constraint_expr lhs
= c
->lhs
;
2454 struct constraint_expr rhs
= c
->rhs
;
2456 if (lhs
.type
== DEREF
)
2458 insert_into_complex (graph
, lhs
.var
, c
);
2460 else if (rhs
.type
== DEREF
)
2462 if (!(get_varinfo (lhs
.var
)->is_special_var
))
2463 insert_into_complex (graph
, rhs
.var
, c
);
2465 else if (rhs
.type
!= ADDRESSOF
&& lhs
.var
> anything_id
2466 && (lhs
.offset
!= 0 || rhs
.offset
!= 0))
2468 insert_into_complex (graph
, rhs
.var
, c
);
2475 /* Optimize and rewrite complex constraints while performing
2476 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2477 result of perform_variable_substitution. */
2480 rewrite_constraints (constraint_graph_t graph
,
2481 struct scc_info
*si
)
2486 #ifdef ENABLE_CHECKING
2487 for (unsigned int j
= 0; j
< graph
->size
; j
++)
2488 gcc_assert (find (j
) == j
);
2491 FOR_EACH_VEC_ELT (constraints
, i
, c
)
2493 struct constraint_expr lhs
= c
->lhs
;
2494 struct constraint_expr rhs
= c
->rhs
;
2495 unsigned int lhsvar
= find (lhs
.var
);
2496 unsigned int rhsvar
= find (rhs
.var
);
2497 unsigned int lhsnode
, rhsnode
;
2498 unsigned int lhslabel
, rhslabel
;
2500 lhsnode
= si
->node_mapping
[lhsvar
];
2501 rhsnode
= si
->node_mapping
[rhsvar
];
2502 lhslabel
= graph
->pointer_label
[lhsnode
];
2503 rhslabel
= graph
->pointer_label
[rhsnode
];
2505 /* See if it is really a non-pointer variable, and if so, ignore
2509 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2512 fprintf (dump_file
, "%s is a non-pointer variable,"
2513 "ignoring constraint:",
2514 get_varinfo (lhs
.var
)->name
);
2515 dump_constraint (dump_file
, c
);
2516 fprintf (dump_file
, "\n");
2518 constraints
[i
] = NULL
;
2524 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2527 fprintf (dump_file
, "%s is a non-pointer variable,"
2528 "ignoring constraint:",
2529 get_varinfo (rhs
.var
)->name
);
2530 dump_constraint (dump_file
, c
);
2531 fprintf (dump_file
, "\n");
2533 constraints
[i
] = NULL
;
2537 lhsvar
= find_equivalent_node (graph
, lhsvar
, lhslabel
);
2538 rhsvar
= find_equivalent_node (graph
, rhsvar
, rhslabel
);
2539 c
->lhs
.var
= lhsvar
;
2540 c
->rhs
.var
= rhsvar
;
2544 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2545 part of an SCC, false otherwise. */
2548 eliminate_indirect_cycles (unsigned int node
)
2550 if (graph
->indirect_cycles
[node
] != -1
2551 && !bitmap_empty_p (get_varinfo (node
)->solution
))
2554 vec
<unsigned> queue
= vNULL
;
2556 unsigned int to
= find (graph
->indirect_cycles
[node
]);
2559 /* We can't touch the solution set and call unify_nodes
2560 at the same time, because unify_nodes is going to do
2561 bitmap unions into it. */
2563 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node
)->solution
, 0, i
, bi
)
2565 if (find (i
) == i
&& i
!= to
)
2568 queue
.safe_push (i
);
2573 queue
.iterate (queuepos
, &i
);
2576 unify_nodes (graph
, to
, i
, true);
2584 /* Solve the constraint graph GRAPH using our worklist solver.
2585 This is based on the PW* family of solvers from the "Efficient Field
2586 Sensitive Pointer Analysis for C" paper.
2587 It works by iterating over all the graph nodes, processing the complex
2588 constraints and propagating the copy constraints, until everything stops
2589 changed. This corresponds to steps 6-8 in the solving list given above. */
2592 solve_graph (constraint_graph_t graph
)
2594 unsigned int size
= graph
->size
;
2598 changed
= BITMAP_ALLOC (NULL
);
2600 /* Mark all initial non-collapsed nodes as changed. */
2601 for (i
= 1; i
< size
; i
++)
2603 varinfo_t ivi
= get_varinfo (i
);
2604 if (find (i
) == i
&& !bitmap_empty_p (ivi
->solution
)
2605 && ((graph
->succs
[i
] && !bitmap_empty_p (graph
->succs
[i
]))
2606 || graph
->complex[i
].length () > 0))
2607 bitmap_set_bit (changed
, i
);
2610 /* Allocate a bitmap to be used to store the changed bits. */
2611 pts
= BITMAP_ALLOC (&pta_obstack
);
2613 while (!bitmap_empty_p (changed
))
2616 struct topo_info
*ti
= init_topo_info ();
2619 bitmap_obstack_initialize (&iteration_obstack
);
2621 compute_topo_order (graph
, ti
);
2623 while (ti
->topo_order
.length () != 0)
2626 i
= ti
->topo_order
.pop ();
2628 /* If this variable is not a representative, skip it. */
2632 /* In certain indirect cycle cases, we may merge this
2633 variable to another. */
2634 if (eliminate_indirect_cycles (i
) && find (i
) != i
)
2637 /* If the node has changed, we need to process the
2638 complex constraints and outgoing edges again. */
2639 if (bitmap_clear_bit (changed
, i
))
2644 vec
<constraint_t
> complex = graph
->complex[i
];
2645 varinfo_t vi
= get_varinfo (i
);
2646 bool solution_empty
;
2648 /* Compute the changed set of solution bits. If anything
2649 is in the solution just propagate that. */
2650 if (bitmap_bit_p (vi
->solution
, anything_id
))
2652 /* If anything is also in the old solution there is
2654 ??? But we shouldn't ended up with "changed" set ... */
2656 && bitmap_bit_p (vi
->oldsolution
, anything_id
))
2658 bitmap_copy (pts
, get_varinfo (find (anything_id
))->solution
);
2660 else if (vi
->oldsolution
)
2661 bitmap_and_compl (pts
, vi
->solution
, vi
->oldsolution
);
2663 bitmap_copy (pts
, vi
->solution
);
2665 if (bitmap_empty_p (pts
))
2668 if (vi
->oldsolution
)
2669 bitmap_ior_into (vi
->oldsolution
, pts
);
2672 vi
->oldsolution
= BITMAP_ALLOC (&oldpta_obstack
);
2673 bitmap_copy (vi
->oldsolution
, pts
);
2676 solution
= vi
->solution
;
2677 solution_empty
= bitmap_empty_p (solution
);
2679 /* Process the complex constraints */
2680 FOR_EACH_VEC_ELT (complex, j
, c
)
2682 /* XXX: This is going to unsort the constraints in
2683 some cases, which will occasionally add duplicate
2684 constraints during unification. This does not
2685 affect correctness. */
2686 c
->lhs
.var
= find (c
->lhs
.var
);
2687 c
->rhs
.var
= find (c
->rhs
.var
);
2689 /* The only complex constraint that can change our
2690 solution to non-empty, given an empty solution,
2691 is a constraint where the lhs side is receiving
2692 some set from elsewhere. */
2693 if (!solution_empty
|| c
->lhs
.type
!= DEREF
)
2694 do_complex_constraint (graph
, c
, pts
);
2697 solution_empty
= bitmap_empty_p (solution
);
2699 if (!solution_empty
)
2702 unsigned eff_escaped_id
= find (escaped_id
);
2704 /* Propagate solution to all successors. */
2705 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->succs
[i
],
2711 unsigned int to
= find (j
);
2712 tmp
= get_varinfo (to
)->solution
;
2715 /* Don't try to propagate to ourselves. */
2719 /* If we propagate from ESCAPED use ESCAPED as
2721 if (i
== eff_escaped_id
)
2722 flag
= bitmap_set_bit (tmp
, escaped_id
);
2724 flag
= bitmap_ior_into (tmp
, pts
);
2727 bitmap_set_bit (changed
, to
);
2732 free_topo_info (ti
);
2733 bitmap_obstack_release (&iteration_obstack
);
2737 BITMAP_FREE (changed
);
2738 bitmap_obstack_release (&oldpta_obstack
);
2741 /* Map from trees to variable infos. */
2742 static struct pointer_map_t
*vi_for_tree
;
2745 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2748 insert_vi_for_tree (tree t
, varinfo_t vi
)
2750 void **slot
= pointer_map_insert (vi_for_tree
, t
);
2752 gcc_assert (*slot
== NULL
);
2756 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2757 exist in the map, return NULL, otherwise, return the varinfo we found. */
2760 lookup_vi_for_tree (tree t
)
2762 void **slot
= pointer_map_contains (vi_for_tree
, t
);
2766 return (varinfo_t
) *slot
;
2769 /* Return a printable name for DECL */
2772 alias_get_name (tree decl
)
2774 const char *res
= NULL
;
2776 int num_printed
= 0;
2781 if (TREE_CODE (decl
) == SSA_NAME
)
2783 res
= get_name (decl
);
2785 num_printed
= asprintf (&temp
, "%s_%u", res
, SSA_NAME_VERSION (decl
));
2787 num_printed
= asprintf (&temp
, "_%u", SSA_NAME_VERSION (decl
));
2788 if (num_printed
> 0)
2790 res
= ggc_strdup (temp
);
2794 else if (DECL_P (decl
))
2796 if (DECL_ASSEMBLER_NAME_SET_P (decl
))
2797 res
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
));
2800 res
= get_name (decl
);
2803 num_printed
= asprintf (&temp
, "D.%u", DECL_UID (decl
));
2804 if (num_printed
> 0)
2806 res
= ggc_strdup (temp
);
2818 /* Find the variable id for tree T in the map.
2819 If T doesn't exist in the map, create an entry for it and return it. */
2822 get_vi_for_tree (tree t
)
2824 void **slot
= pointer_map_contains (vi_for_tree
, t
);
2826 return get_varinfo (create_variable_info_for (t
, alias_get_name (t
)));
2828 return (varinfo_t
) *slot
;
2831 /* Get a scalar constraint expression for a new temporary variable. */
2833 static struct constraint_expr
2834 new_scalar_tmp_constraint_exp (const char *name
)
2836 struct constraint_expr tmp
;
2839 vi
= new_var_info (NULL_TREE
, name
);
2843 vi
->is_full_var
= 1;
2852 /* Get a constraint expression vector from an SSA_VAR_P node.
2853 If address_p is true, the result will be taken its address of. */
2856 get_constraint_for_ssa_var (tree t
, vec
<ce_s
> *results
, bool address_p
)
2858 struct constraint_expr cexpr
;
2861 /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
2862 gcc_assert (TREE_CODE (t
) == SSA_NAME
|| DECL_P (t
));
2864 /* For parameters, get at the points-to set for the actual parm
2866 if (TREE_CODE (t
) == SSA_NAME
2867 && SSA_NAME_IS_DEFAULT_DEF (t
)
2868 && (TREE_CODE (SSA_NAME_VAR (t
)) == PARM_DECL
2869 || TREE_CODE (SSA_NAME_VAR (t
)) == RESULT_DECL
))
2871 get_constraint_for_ssa_var (SSA_NAME_VAR (t
), results
, address_p
);
2875 /* For global variables resort to the alias target. */
2876 if (TREE_CODE (t
) == VAR_DECL
2877 && (TREE_STATIC (t
) || DECL_EXTERNAL (t
)))
2879 struct varpool_node
*node
= varpool_get_node (t
);
2880 if (node
&& node
->symbol
.alias
&& node
->symbol
.analyzed
)
2882 node
= varpool_variable_node (node
, NULL
);
2883 t
= node
->symbol
.decl
;
2887 vi
= get_vi_for_tree (t
);
2889 cexpr
.type
= SCALAR
;
2891 /* If we determine the result is "anything", and we know this is readonly,
2892 say it points to readonly memory instead. */
2893 if (cexpr
.var
== anything_id
&& TREE_READONLY (t
))
2896 cexpr
.type
= ADDRESSOF
;
2897 cexpr
.var
= readonly_id
;
2900 /* If we are not taking the address of the constraint expr, add all
2901 sub-fiels of the variable as well. */
2903 && !vi
->is_full_var
)
2905 for (; vi
; vi
= vi_next (vi
))
2908 results
->safe_push (cexpr
);
2913 results
->safe_push (cexpr
);
2916 /* Process constraint T, performing various simplifications and then
2917 adding it to our list of overall constraints. */
2920 process_constraint (constraint_t t
)
2922 struct constraint_expr rhs
= t
->rhs
;
2923 struct constraint_expr lhs
= t
->lhs
;
2925 gcc_assert (rhs
.var
< varmap
.length ());
2926 gcc_assert (lhs
.var
< varmap
.length ());
2928 /* If we didn't get any useful constraint from the lhs we get
2929 &ANYTHING as fallback from get_constraint_for. Deal with
2930 it here by turning it into *ANYTHING. */
2931 if (lhs
.type
== ADDRESSOF
2932 && lhs
.var
== anything_id
)
2935 /* ADDRESSOF on the lhs is invalid. */
2936 gcc_assert (lhs
.type
!= ADDRESSOF
);
2938 /* We shouldn't add constraints from things that cannot have pointers.
2939 It's not completely trivial to avoid in the callers, so do it here. */
2940 if (rhs
.type
!= ADDRESSOF
2941 && !get_varinfo (rhs
.var
)->may_have_pointers
)
2944 /* Likewise adding to the solution of a non-pointer var isn't useful. */
2945 if (!get_varinfo (lhs
.var
)->may_have_pointers
)
2948 /* This can happen in our IR with things like n->a = *p */
2949 if (rhs
.type
== DEREF
&& lhs
.type
== DEREF
&& rhs
.var
!= anything_id
)
2951 /* Split into tmp = *rhs, *lhs = tmp */
2952 struct constraint_expr tmplhs
;
2953 tmplhs
= new_scalar_tmp_constraint_exp ("doubledereftmp");
2954 process_constraint (new_constraint (tmplhs
, rhs
));
2955 process_constraint (new_constraint (lhs
, tmplhs
));
2957 else if (rhs
.type
== ADDRESSOF
&& lhs
.type
== DEREF
)
2959 /* Split into tmp = &rhs, *lhs = tmp */
2960 struct constraint_expr tmplhs
;
2961 tmplhs
= new_scalar_tmp_constraint_exp ("derefaddrtmp");
2962 process_constraint (new_constraint (tmplhs
, rhs
));
2963 process_constraint (new_constraint (lhs
, tmplhs
));
2967 gcc_assert (rhs
.type
!= ADDRESSOF
|| rhs
.offset
== 0);
2968 constraints
.safe_push (t
);
2973 /* Return the position, in bits, of FIELD_DECL from the beginning of its
2976 static HOST_WIDE_INT
2977 bitpos_of_field (const tree fdecl
)
2979 if (!host_integerp (DECL_FIELD_OFFSET (fdecl
), 0)
2980 || !host_integerp (DECL_FIELD_BIT_OFFSET (fdecl
), 0))
2983 return (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (fdecl
)) * BITS_PER_UNIT
2984 + TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (fdecl
)));
2988 /* Get constraint expressions for offsetting PTR by OFFSET. Stores the
2989 resulting constraint expressions in *RESULTS. */
2992 get_constraint_for_ptr_offset (tree ptr
, tree offset
,
2995 struct constraint_expr c
;
2997 HOST_WIDE_INT rhsoffset
;
2999 /* If we do not do field-sensitive PTA adding offsets to pointers
3000 does not change the points-to solution. */
3001 if (!use_field_sensitive
)
3003 get_constraint_for_rhs (ptr
, results
);
3007 /* If the offset is not a non-negative integer constant that fits
3008 in a HOST_WIDE_INT, we have to fall back to a conservative
3009 solution which includes all sub-fields of all pointed-to
3010 variables of ptr. */
3011 if (offset
== NULL_TREE
3012 || TREE_CODE (offset
) != INTEGER_CST
)
3013 rhsoffset
= UNKNOWN_OFFSET
;
3016 /* Sign-extend the offset. */
3017 double_int soffset
= tree_to_double_int (offset
)
3018 .sext (TYPE_PRECISION (TREE_TYPE (offset
)));
3019 if (!soffset
.fits_shwi ())
3020 rhsoffset
= UNKNOWN_OFFSET
;
3023 /* Make sure the bit-offset also fits. */
3024 HOST_WIDE_INT rhsunitoffset
= soffset
.low
;
3025 rhsoffset
= rhsunitoffset
* BITS_PER_UNIT
;
3026 if (rhsunitoffset
!= rhsoffset
/ BITS_PER_UNIT
)
3027 rhsoffset
= UNKNOWN_OFFSET
;
3031 get_constraint_for_rhs (ptr
, results
);
3035 /* As we are eventually appending to the solution do not use
3036 vec::iterate here. */
3037 n
= results
->length ();
3038 for (j
= 0; j
< n
; j
++)
3042 curr
= get_varinfo (c
.var
);
3044 if (c
.type
== ADDRESSOF
3045 /* If this varinfo represents a full variable just use it. */
3046 && curr
->is_full_var
)
3048 else if (c
.type
== ADDRESSOF
3049 /* If we do not know the offset add all subfields. */
3050 && rhsoffset
== UNKNOWN_OFFSET
)
3052 varinfo_t temp
= get_varinfo (curr
->head
);
3055 struct constraint_expr c2
;
3057 c2
.type
= ADDRESSOF
;
3059 if (c2
.var
!= c
.var
)
3060 results
->safe_push (c2
);
3061 temp
= vi_next (temp
);
3065 else if (c
.type
== ADDRESSOF
)
3068 unsigned HOST_WIDE_INT offset
= curr
->offset
+ rhsoffset
;
3070 /* Search the sub-field which overlaps with the
3071 pointed-to offset. If the result is outside of the variable
3072 we have to provide a conservative result, as the variable is
3073 still reachable from the resulting pointer (even though it
3074 technically cannot point to anything). The last and first
3075 sub-fields are such conservative results.
3076 ??? If we always had a sub-field for &object + 1 then
3077 we could represent this in a more precise way. */
3079 && curr
->offset
< offset
)
3081 temp
= first_or_preceding_vi_for_offset (curr
, offset
);
3083 /* If the found variable is not exactly at the pointed to
3084 result, we have to include the next variable in the
3085 solution as well. Otherwise two increments by offset / 2
3086 do not result in the same or a conservative superset
3088 if (temp
->offset
!= offset
3091 struct constraint_expr c2
;
3092 c2
.var
= temp
->next
;
3093 c2
.type
= ADDRESSOF
;
3095 results
->safe_push (c2
);
3101 c
.offset
= rhsoffset
;
3108 /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
3109 If address_p is true the result will be taken its address of.
3110 If lhs_p is true then the constraint expression is assumed to be used
3114 get_constraint_for_component_ref (tree t
, vec
<ce_s
> *results
,
3115 bool address_p
, bool lhs_p
)
3118 HOST_WIDE_INT bitsize
= -1;
3119 HOST_WIDE_INT bitmaxsize
= -1;
3120 HOST_WIDE_INT bitpos
;
3123 /* Some people like to do cute things like take the address of
3126 while (handled_component_p (forzero
)
3127 || INDIRECT_REF_P (forzero
)
3128 || TREE_CODE (forzero
) == MEM_REF
)
3129 forzero
= TREE_OPERAND (forzero
, 0);
3131 if (CONSTANT_CLASS_P (forzero
) && integer_zerop (forzero
))
3133 struct constraint_expr temp
;
3136 temp
.var
= integer_id
;
3138 results
->safe_push (temp
);
3142 /* Handle type-punning through unions. If we are extracting a pointer
3143 from a union via a possibly type-punning access that pointer
3144 points to anything, similar to a conversion of an integer to
3150 TREE_CODE (u
) == COMPONENT_REF
|| TREE_CODE (u
) == ARRAY_REF
;
3151 u
= TREE_OPERAND (u
, 0))
3152 if (TREE_CODE (u
) == COMPONENT_REF
3153 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u
, 0))) == UNION_TYPE
)
3155 struct constraint_expr temp
;
3158 temp
.var
= anything_id
;
3159 temp
.type
= ADDRESSOF
;
3160 results
->safe_push (temp
);
3165 t
= get_ref_base_and_extent (t
, &bitpos
, &bitsize
, &bitmaxsize
);
3167 /* Pretend to take the address of the base, we'll take care of
3168 adding the required subset of sub-fields below. */
3169 get_constraint_for_1 (t
, results
, true, lhs_p
);
3170 gcc_assert (results
->length () == 1);
3171 struct constraint_expr
&result
= results
->last ();
3173 if (result
.type
== SCALAR
3174 && get_varinfo (result
.var
)->is_full_var
)
3175 /* For single-field vars do not bother about the offset. */
3177 else if (result
.type
== SCALAR
)
3179 /* In languages like C, you can access one past the end of an
3180 array. You aren't allowed to dereference it, so we can
3181 ignore this constraint. When we handle pointer subtraction,
3182 we may have to do something cute here. */
3184 if ((unsigned HOST_WIDE_INT
)bitpos
< get_varinfo (result
.var
)->fullsize
3187 /* It's also not true that the constraint will actually start at the
3188 right offset, it may start in some padding. We only care about
3189 setting the constraint to the first actual field it touches, so
3191 struct constraint_expr cexpr
= result
;
3195 for (curr
= get_varinfo (cexpr
.var
); curr
; curr
= vi_next (curr
))
3197 if (ranges_overlap_p (curr
->offset
, curr
->size
,
3198 bitpos
, bitmaxsize
))
3200 cexpr
.var
= curr
->id
;
3201 results
->safe_push (cexpr
);
3206 /* If we are going to take the address of this field then
3207 to be able to compute reachability correctly add at least
3208 the last field of the variable. */
3209 if (address_p
&& results
->length () == 0)
3211 curr
= get_varinfo (cexpr
.var
);
3212 while (curr
->next
!= 0)
3213 curr
= vi_next (curr
);
3214 cexpr
.var
= curr
->id
;
3215 results
->safe_push (cexpr
);
3217 else if (results
->length () == 0)
3218 /* Assert that we found *some* field there. The user couldn't be
3219 accessing *only* padding. */
3220 /* Still the user could access one past the end of an array
3221 embedded in a struct resulting in accessing *only* padding. */
3222 /* Or accessing only padding via type-punning to a type
3223 that has a filed just in padding space. */
3225 cexpr
.type
= SCALAR
;
3226 cexpr
.var
= anything_id
;
3228 results
->safe_push (cexpr
);
3231 else if (bitmaxsize
== 0)
3233 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3234 fprintf (dump_file
, "Access to zero-sized part of variable,"
3238 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3239 fprintf (dump_file
, "Access to past the end of variable, ignoring\n");
3241 else if (result
.type
== DEREF
)
3243 /* If we do not know exactly where the access goes say so. Note
3244 that only for non-structure accesses we know that we access
3245 at most one subfiled of any variable. */
3247 || bitsize
!= bitmaxsize
3248 || AGGREGATE_TYPE_P (TREE_TYPE (orig_t
))
3249 || result
.offset
== UNKNOWN_OFFSET
)
3250 result
.offset
= UNKNOWN_OFFSET
;
3252 result
.offset
+= bitpos
;
3254 else if (result
.type
== ADDRESSOF
)
3256 /* We can end up here for component references on a
3257 VIEW_CONVERT_EXPR <>(&foobar). */
3258 result
.type
= SCALAR
;
3259 result
.var
= anything_id
;
3267 /* Dereference the constraint expression CONS, and return the result.
3268 DEREF (ADDRESSOF) = SCALAR
3269 DEREF (SCALAR) = DEREF
3270 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
3271 This is needed so that we can handle dereferencing DEREF constraints. */
3274 do_deref (vec
<ce_s
> *constraints
)
3276 struct constraint_expr
*c
;
3279 FOR_EACH_VEC_ELT (*constraints
, i
, c
)
3281 if (c
->type
== SCALAR
)
3283 else if (c
->type
== ADDRESSOF
)
3285 else if (c
->type
== DEREF
)
3287 struct constraint_expr tmplhs
;
3288 tmplhs
= new_scalar_tmp_constraint_exp ("dereftmp");
3289 process_constraint (new_constraint (tmplhs
, *c
));
3290 c
->var
= tmplhs
.var
;
3297 /* Given a tree T, return the constraint expression for taking the
3301 get_constraint_for_address_of (tree t
, vec
<ce_s
> *results
)
3303 struct constraint_expr
*c
;
3306 get_constraint_for_1 (t
, results
, true, true);
3308 FOR_EACH_VEC_ELT (*results
, i
, c
)
3310 if (c
->type
== DEREF
)
3313 c
->type
= ADDRESSOF
;
3317 /* Given a tree T, return the constraint expression for it. */
3320 get_constraint_for_1 (tree t
, vec
<ce_s
> *results
, bool address_p
,
3323 struct constraint_expr temp
;
3325 /* x = integer is all glommed to a single variable, which doesn't
3326 point to anything by itself. That is, of course, unless it is an
3327 integer constant being treated as a pointer, in which case, we
3328 will return that this is really the addressof anything. This
3329 happens below, since it will fall into the default case. The only
3330 case we know something about an integer treated like a pointer is
3331 when it is the NULL pointer, and then we just say it points to
3334 Do not do that if -fno-delete-null-pointer-checks though, because
3335 in that case *NULL does not fail, so it _should_ alias *anything.
3336 It is not worth adding a new option or renaming the existing one,
3337 since this case is relatively obscure. */
3338 if ((TREE_CODE (t
) == INTEGER_CST
3339 && integer_zerop (t
))
3340 /* The only valid CONSTRUCTORs in gimple with pointer typed
3341 elements are zero-initializer. But in IPA mode we also
3342 process global initializers, so verify at least. */
3343 || (TREE_CODE (t
) == CONSTRUCTOR
3344 && CONSTRUCTOR_NELTS (t
) == 0))
3346 if (flag_delete_null_pointer_checks
)
3347 temp
.var
= nothing_id
;
3349 temp
.var
= nonlocal_id
;
3350 temp
.type
= ADDRESSOF
;
3352 results
->safe_push (temp
);
3356 /* String constants are read-only. */
3357 if (TREE_CODE (t
) == STRING_CST
)
3359 temp
.var
= readonly_id
;
3362 results
->safe_push (temp
);
3366 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
3368 case tcc_expression
:
3370 switch (TREE_CODE (t
))
3373 get_constraint_for_address_of (TREE_OPERAND (t
, 0), results
);
3381 switch (TREE_CODE (t
))
3385 struct constraint_expr cs
;
3387 get_constraint_for_ptr_offset (TREE_OPERAND (t
, 0),
3388 TREE_OPERAND (t
, 1), results
);
3391 /* If we are not taking the address then make sure to process
3392 all subvariables we might access. */
3396 cs
= results
->last ();
3397 if (cs
.type
== DEREF
3398 && type_can_have_subvars (TREE_TYPE (t
)))
3400 /* For dereferences this means we have to defer it
3402 results
->last ().offset
= UNKNOWN_OFFSET
;
3405 if (cs
.type
!= SCALAR
)
3408 vi
= get_varinfo (cs
.var
);
3409 curr
= vi_next (vi
);
3410 if (!vi
->is_full_var
3413 unsigned HOST_WIDE_INT size
;
3414 if (host_integerp (TYPE_SIZE (TREE_TYPE (t
)), 1))
3415 size
= TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (t
)));
3418 for (; curr
; curr
= vi_next (curr
))
3420 if (curr
->offset
- vi
->offset
< size
)
3423 results
->safe_push (cs
);
3432 case ARRAY_RANGE_REF
:
3434 get_constraint_for_component_ref (t
, results
, address_p
, lhs_p
);
3436 case VIEW_CONVERT_EXPR
:
3437 get_constraint_for_1 (TREE_OPERAND (t
, 0), results
, address_p
,
3440 /* We are missing handling for TARGET_MEM_REF here. */
3445 case tcc_exceptional
:
3447 switch (TREE_CODE (t
))
3451 get_constraint_for_ssa_var (t
, results
, address_p
);
3458 vec
<ce_s
> tmp
= vNULL
;
3459 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t
), i
, val
)
3461 struct constraint_expr
*rhsp
;
3463 get_constraint_for_1 (val
, &tmp
, address_p
, lhs_p
);
3464 FOR_EACH_VEC_ELT (tmp
, j
, rhsp
)
3465 results
->safe_push (*rhsp
);
3469 /* We do not know whether the constructor was complete,
3470 so technically we have to add &NOTHING or &ANYTHING
3471 like we do for an empty constructor as well. */
3478 case tcc_declaration
:
3480 get_constraint_for_ssa_var (t
, results
, address_p
);
3485 /* We cannot refer to automatic variables through constants. */
3486 temp
.type
= ADDRESSOF
;
3487 temp
.var
= nonlocal_id
;
3489 results
->safe_push (temp
);
3495 /* The default fallback is a constraint from anything. */
3496 temp
.type
= ADDRESSOF
;
3497 temp
.var
= anything_id
;
3499 results
->safe_push (temp
);
3502 /* Given a gimple tree T, return the constraint expression vector for it. */
3505 get_constraint_for (tree t
, vec
<ce_s
> *results
)
3507 gcc_assert (results
->length () == 0);
3509 get_constraint_for_1 (t
, results
, false, true);
3512 /* Given a gimple tree T, return the constraint expression vector for it
3513 to be used as the rhs of a constraint. */
3516 get_constraint_for_rhs (tree t
, vec
<ce_s
> *results
)
3518 gcc_assert (results
->length () == 0);
3520 get_constraint_for_1 (t
, results
, false, false);
3524 /* Efficiently generates constraints from all entries in *RHSC to all
3525 entries in *LHSC. */
3528 process_all_all_constraints (vec
<ce_s
> lhsc
,
3531 struct constraint_expr
*lhsp
, *rhsp
;
3534 if (lhsc
.length () <= 1 || rhsc
.length () <= 1)
3536 FOR_EACH_VEC_ELT (lhsc
, i
, lhsp
)
3537 FOR_EACH_VEC_ELT (rhsc
, j
, rhsp
)
3538 process_constraint (new_constraint (*lhsp
, *rhsp
));
3542 struct constraint_expr tmp
;
3543 tmp
= new_scalar_tmp_constraint_exp ("allalltmp");
3544 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
3545 process_constraint (new_constraint (tmp
, *rhsp
));
3546 FOR_EACH_VEC_ELT (lhsc
, i
, lhsp
)
3547 process_constraint (new_constraint (*lhsp
, tmp
));
3551 /* Handle aggregate copies by expanding into copies of the respective
3552 fields of the structures. */
3555 do_structure_copy (tree lhsop
, tree rhsop
)
3557 struct constraint_expr
*lhsp
, *rhsp
;
3558 vec
<ce_s
> lhsc
= vNULL
;
3559 vec
<ce_s
> rhsc
= vNULL
;
3562 get_constraint_for (lhsop
, &lhsc
);
3563 get_constraint_for_rhs (rhsop
, &rhsc
);
3566 if (lhsp
->type
== DEREF
3567 || (lhsp
->type
== ADDRESSOF
&& lhsp
->var
== anything_id
)
3568 || rhsp
->type
== DEREF
)
3570 if (lhsp
->type
== DEREF
)
3572 gcc_assert (lhsc
.length () == 1);
3573 lhsp
->offset
= UNKNOWN_OFFSET
;
3575 if (rhsp
->type
== DEREF
)
3577 gcc_assert (rhsc
.length () == 1);
3578 rhsp
->offset
= UNKNOWN_OFFSET
;
3580 process_all_all_constraints (lhsc
, rhsc
);
3582 else if (lhsp
->type
== SCALAR
3583 && (rhsp
->type
== SCALAR
3584 || rhsp
->type
== ADDRESSOF
))
3586 HOST_WIDE_INT lhssize
, lhsmaxsize
, lhsoffset
;
3587 HOST_WIDE_INT rhssize
, rhsmaxsize
, rhsoffset
;
3589 get_ref_base_and_extent (lhsop
, &lhsoffset
, &lhssize
, &lhsmaxsize
);
3590 get_ref_base_and_extent (rhsop
, &rhsoffset
, &rhssize
, &rhsmaxsize
);
3591 for (j
= 0; lhsc
.iterate (j
, &lhsp
);)
3593 varinfo_t lhsv
, rhsv
;
3595 lhsv
= get_varinfo (lhsp
->var
);
3596 rhsv
= get_varinfo (rhsp
->var
);
3597 if (lhsv
->may_have_pointers
3598 && (lhsv
->is_full_var
3599 || rhsv
->is_full_var
3600 || ranges_overlap_p (lhsv
->offset
+ rhsoffset
, lhsv
->size
,
3601 rhsv
->offset
+ lhsoffset
, rhsv
->size
)))
3602 process_constraint (new_constraint (*lhsp
, *rhsp
));
3603 if (!rhsv
->is_full_var
3604 && (lhsv
->is_full_var
3605 || (lhsv
->offset
+ rhsoffset
+ lhsv
->size
3606 > rhsv
->offset
+ lhsoffset
+ rhsv
->size
)))
3609 if (k
>= rhsc
.length ())
3623 /* Create constraints ID = { rhsc }. */
3626 make_constraints_to (unsigned id
, vec
<ce_s
> rhsc
)
3628 struct constraint_expr
*c
;
3629 struct constraint_expr includes
;
3633 includes
.offset
= 0;
3634 includes
.type
= SCALAR
;
3636 FOR_EACH_VEC_ELT (rhsc
, j
, c
)
3637 process_constraint (new_constraint (includes
, *c
));
3640 /* Create a constraint ID = OP. */
3643 make_constraint_to (unsigned id
, tree op
)
3645 vec
<ce_s
> rhsc
= vNULL
;
3646 get_constraint_for_rhs (op
, &rhsc
);
3647 make_constraints_to (id
, rhsc
);
3651 /* Create a constraint ID = &FROM. */
3654 make_constraint_from (varinfo_t vi
, int from
)
3656 struct constraint_expr lhs
, rhs
;
3664 rhs
.type
= ADDRESSOF
;
3665 process_constraint (new_constraint (lhs
, rhs
));
3668 /* Create a constraint ID = FROM. */
3671 make_copy_constraint (varinfo_t vi
, int from
)
3673 struct constraint_expr lhs
, rhs
;
3682 process_constraint (new_constraint (lhs
, rhs
));
3685 /* Make constraints necessary to make OP escape. */
3688 make_escape_constraint (tree op
)
3690 make_constraint_to (escaped_id
, op
);
3693 /* Add constraints to that the solution of VI is transitively closed. */
3696 make_transitive_closure_constraints (varinfo_t vi
)
3698 struct constraint_expr lhs
, rhs
;
3707 process_constraint (new_constraint (lhs
, rhs
));
3709 /* VAR = VAR + UNKNOWN; */
3715 rhs
.offset
= UNKNOWN_OFFSET
;
3716 process_constraint (new_constraint (lhs
, rhs
));
3719 /* Temporary storage for fake var decls. */
3720 struct obstack fake_var_decl_obstack
;
3722 /* Build a fake VAR_DECL acting as referrer to a DECL_UID. */
3725 build_fake_var_decl (tree type
)
3727 tree decl
= (tree
) XOBNEW (&fake_var_decl_obstack
, struct tree_var_decl
);
3728 memset (decl
, 0, sizeof (struct tree_var_decl
));
3729 TREE_SET_CODE (decl
, VAR_DECL
);
3730 TREE_TYPE (decl
) = type
;
3731 DECL_UID (decl
) = allocate_decl_uid ();
3732 SET_DECL_PT_UID (decl
, -1);
3733 layout_decl (decl
, 0);
3737 /* Create a new artificial heap variable with NAME.
3738 Return the created variable. */
3741 make_heapvar (const char *name
)
3746 heapvar
= build_fake_var_decl (ptr_type_node
);
3747 DECL_EXTERNAL (heapvar
) = 1;
3749 vi
= new_var_info (heapvar
, name
);
3750 vi
->is_artificial_var
= true;
3751 vi
->is_heap_var
= true;
3752 vi
->is_unknown_size_var
= true;
3756 vi
->is_full_var
= true;
3757 insert_vi_for_tree (heapvar
, vi
);
3762 /* Create a new artificial heap variable with NAME and make a
3763 constraint from it to LHS. Set flags according to a tag used
3764 for tracking restrict pointers. */
3767 make_constraint_from_restrict (varinfo_t lhs
, const char *name
)
3769 varinfo_t vi
= make_heapvar (name
);
3770 vi
->is_global_var
= 1;
3771 vi
->may_have_pointers
= 1;
3772 make_constraint_from (lhs
, vi
->id
);
3776 /* Create a new artificial heap variable with NAME and make a
3777 constraint from it to LHS. Set flags according to a tag used
3778 for tracking restrict pointers and make the artificial heap
3779 point to global memory. */
3782 make_constraint_from_global_restrict (varinfo_t lhs
, const char *name
)
3784 varinfo_t vi
= make_constraint_from_restrict (lhs
, name
);
3785 make_copy_constraint (vi
, nonlocal_id
);
3789 /* In IPA mode there are varinfos for different aspects of reach
3790 function designator. One for the points-to set of the return
3791 value, one for the variables that are clobbered by the function,
3792 one for its uses and one for each parameter (including a single
3793 glob for remaining variadic arguments). */
3795 enum { fi_clobbers
= 1, fi_uses
= 2,
3796 fi_static_chain
= 3, fi_result
= 4, fi_parm_base
= 5 };
3798 /* Get a constraint for the requested part of a function designator FI
3799 when operating in IPA mode. */
3801 static struct constraint_expr
3802 get_function_part_constraint (varinfo_t fi
, unsigned part
)
3804 struct constraint_expr c
;
3806 gcc_assert (in_ipa_mode
);
3808 if (fi
->id
== anything_id
)
3810 /* ??? We probably should have a ANYFN special variable. */
3811 c
.var
= anything_id
;
3815 else if (TREE_CODE (fi
->decl
) == FUNCTION_DECL
)
3817 varinfo_t ai
= first_vi_for_offset (fi
, part
);
3821 c
.var
= anything_id
;
3835 /* For non-IPA mode, generate constraints necessary for a call on the
3839 handle_rhs_call (gimple stmt
, vec
<ce_s
> *results
)
3841 struct constraint_expr rhsc
;
3843 bool returns_uses
= false;
3845 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3847 tree arg
= gimple_call_arg (stmt
, i
);
3848 int flags
= gimple_call_arg_flags (stmt
, i
);
3850 /* If the argument is not used we can ignore it. */
3851 if (flags
& EAF_UNUSED
)
3854 /* As we compute ESCAPED context-insensitive we do not gain
3855 any precision with just EAF_NOCLOBBER but not EAF_NOESCAPE
3856 set. The argument would still get clobbered through the
3858 if ((flags
& EAF_NOCLOBBER
)
3859 && (flags
& EAF_NOESCAPE
))
3861 varinfo_t uses
= get_call_use_vi (stmt
);
3862 if (!(flags
& EAF_DIRECT
))
3864 varinfo_t tem
= new_var_info (NULL_TREE
, "callarg");
3865 make_constraint_to (tem
->id
, arg
);
3866 make_transitive_closure_constraints (tem
);
3867 make_copy_constraint (uses
, tem
->id
);
3870 make_constraint_to (uses
->id
, arg
);
3871 returns_uses
= true;
3873 else if (flags
& EAF_NOESCAPE
)
3875 struct constraint_expr lhs
, rhs
;
3876 varinfo_t uses
= get_call_use_vi (stmt
);
3877 varinfo_t clobbers
= get_call_clobber_vi (stmt
);
3878 varinfo_t tem
= new_var_info (NULL_TREE
, "callarg");
3879 make_constraint_to (tem
->id
, arg
);
3880 if (!(flags
& EAF_DIRECT
))
3881 make_transitive_closure_constraints (tem
);
3882 make_copy_constraint (uses
, tem
->id
);
3883 make_copy_constraint (clobbers
, tem
->id
);
3884 /* Add *tem = nonlocal, do not add *tem = callused as
3885 EAF_NOESCAPE parameters do not escape to other parameters
3886 and all other uses appear in NONLOCAL as well. */
3891 rhs
.var
= nonlocal_id
;
3893 process_constraint (new_constraint (lhs
, rhs
));
3894 returns_uses
= true;
3897 make_escape_constraint (arg
);
3900 /* If we added to the calls uses solution make sure we account for
3901 pointers to it to be returned. */
3904 rhsc
.var
= get_call_use_vi (stmt
)->id
;
3907 results
->safe_push (rhsc
);
3910 /* The static chain escapes as well. */
3911 if (gimple_call_chain (stmt
))
3912 make_escape_constraint (gimple_call_chain (stmt
));
3914 /* And if we applied NRV the address of the return slot escapes as well. */
3915 if (gimple_call_return_slot_opt_p (stmt
)
3916 && gimple_call_lhs (stmt
) != NULL_TREE
3917 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt
))))
3919 vec
<ce_s
> tmpc
= vNULL
;
3920 struct constraint_expr lhsc
, *c
;
3921 get_constraint_for_address_of (gimple_call_lhs (stmt
), &tmpc
);
3922 lhsc
.var
= escaped_id
;
3925 FOR_EACH_VEC_ELT (tmpc
, i
, c
)
3926 process_constraint (new_constraint (lhsc
, *c
));
3930 /* Regular functions return nonlocal memory. */
3931 rhsc
.var
= nonlocal_id
;
3934 results
->safe_push (rhsc
);
3937 /* For non-IPA mode, generate constraints necessary for a call
3938 that returns a pointer and assigns it to LHS. This simply makes
3939 the LHS point to global and escaped variables. */
3942 handle_lhs_call (gimple stmt
, tree lhs
, int flags
, vec
<ce_s
> rhsc
,
3945 vec
<ce_s
> lhsc
= vNULL
;
3947 get_constraint_for (lhs
, &lhsc
);
3948 /* If the store is to a global decl make sure to
3949 add proper escape constraints. */
3950 lhs
= get_base_address (lhs
);
3953 && is_global_var (lhs
))
3955 struct constraint_expr tmpc
;
3956 tmpc
.var
= escaped_id
;
3959 lhsc
.safe_push (tmpc
);
3962 /* If the call returns an argument unmodified override the rhs
3964 flags
= gimple_call_return_flags (stmt
);
3965 if (flags
& ERF_RETURNS_ARG
3966 && (flags
& ERF_RETURN_ARG_MASK
) < gimple_call_num_args (stmt
))
3970 arg
= gimple_call_arg (stmt
, flags
& ERF_RETURN_ARG_MASK
);
3971 get_constraint_for (arg
, &rhsc
);
3972 process_all_all_constraints (lhsc
, rhsc
);
3975 else if (flags
& ERF_NOALIAS
)
3978 struct constraint_expr tmpc
;
3980 vi
= make_heapvar ("HEAP");
3981 /* We delay marking allocated storage global until we know if
3983 DECL_EXTERNAL (vi
->decl
) = 0;
3984 vi
->is_global_var
= 0;
3985 /* If this is not a real malloc call assume the memory was
3986 initialized and thus may point to global memory. All
3987 builtin functions with the malloc attribute behave in a sane way. */
3989 || DECL_BUILT_IN_CLASS (fndecl
) != BUILT_IN_NORMAL
)
3990 make_constraint_from (vi
, nonlocal_id
);
3993 tmpc
.type
= ADDRESSOF
;
3994 rhsc
.safe_push (tmpc
);
3995 process_all_all_constraints (lhsc
, rhsc
);
3999 process_all_all_constraints (lhsc
, rhsc
);
4004 /* For non-IPA mode, generate constraints necessary for a call of a
4005 const function that returns a pointer in the statement STMT. */
4008 handle_const_call (gimple stmt
, vec
<ce_s
> *results
)
4010 struct constraint_expr rhsc
;
4013 /* Treat nested const functions the same as pure functions as far
4014 as the static chain is concerned. */
4015 if (gimple_call_chain (stmt
))
4017 varinfo_t uses
= get_call_use_vi (stmt
);
4018 make_transitive_closure_constraints (uses
);
4019 make_constraint_to (uses
->id
, gimple_call_chain (stmt
));
4020 rhsc
.var
= uses
->id
;
4023 results
->safe_push (rhsc
);
4026 /* May return arguments. */
4027 for (k
= 0; k
< gimple_call_num_args (stmt
); ++k
)
4029 tree arg
= gimple_call_arg (stmt
, k
);
4030 vec
<ce_s
> argc
= vNULL
;
4032 struct constraint_expr
*argp
;
4033 get_constraint_for_rhs (arg
, &argc
);
4034 FOR_EACH_VEC_ELT (argc
, i
, argp
)
4035 results
->safe_push (*argp
);
4039 /* May return addresses of globals. */
4040 rhsc
.var
= nonlocal_id
;
4042 rhsc
.type
= ADDRESSOF
;
4043 results
->safe_push (rhsc
);
4046 /* For non-IPA mode, generate constraints necessary for a call to a
4047 pure function in statement STMT. */
4050 handle_pure_call (gimple stmt
, vec
<ce_s
> *results
)
4052 struct constraint_expr rhsc
;
4054 varinfo_t uses
= NULL
;
4056 /* Memory reached from pointer arguments is call-used. */
4057 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
4059 tree arg
= gimple_call_arg (stmt
, i
);
4062 uses
= get_call_use_vi (stmt
);
4063 make_transitive_closure_constraints (uses
);
4065 make_constraint_to (uses
->id
, arg
);
4068 /* The static chain is used as well. */
4069 if (gimple_call_chain (stmt
))
4073 uses
= get_call_use_vi (stmt
);
4074 make_transitive_closure_constraints (uses
);
4076 make_constraint_to (uses
->id
, gimple_call_chain (stmt
));
4079 /* Pure functions may return call-used and nonlocal memory. */
4082 rhsc
.var
= uses
->id
;
4085 results
->safe_push (rhsc
);
4087 rhsc
.var
= nonlocal_id
;
4090 results
->safe_push (rhsc
);
4094 /* Return the varinfo for the callee of CALL. */
4097 get_fi_for_callee (gimple call
)
4099 tree decl
, fn
= gimple_call_fn (call
);
4101 if (fn
&& TREE_CODE (fn
) == OBJ_TYPE_REF
)
4102 fn
= OBJ_TYPE_REF_EXPR (fn
);
4104 /* If we can directly resolve the function being called, do so.
4105 Otherwise, it must be some sort of indirect expression that
4106 we should still be able to handle. */
4107 decl
= gimple_call_addr_fndecl (fn
);
4109 return get_vi_for_tree (decl
);
4111 /* If the function is anything other than a SSA name pointer we have no
4112 clue and should be getting ANYFN (well, ANYTHING for now). */
4113 if (!fn
|| TREE_CODE (fn
) != SSA_NAME
)
4114 return get_varinfo (anything_id
);
4116 if (SSA_NAME_IS_DEFAULT_DEF (fn
)
4117 && (TREE_CODE (SSA_NAME_VAR (fn
)) == PARM_DECL
4118 || TREE_CODE (SSA_NAME_VAR (fn
)) == RESULT_DECL
))
4119 fn
= SSA_NAME_VAR (fn
);
4121 return get_vi_for_tree (fn
);
4124 /* Create constraints for the builtin call T. Return true if the call
4125 was handled, otherwise false. */
4128 find_func_aliases_for_builtin_call (gimple t
)
4130 tree fndecl
= gimple_call_fndecl (t
);
4131 vec
<ce_s
> lhsc
= vNULL
;
4132 vec
<ce_s
> rhsc
= vNULL
;
4135 if (gimple_call_builtin_p (t
, BUILT_IN_NORMAL
))
4136 /* ??? All builtins that are handled here need to be handled
4137 in the alias-oracle query functions explicitly! */
4138 switch (DECL_FUNCTION_CODE (fndecl
))
4140 /* All the following functions return a pointer to the same object
4141 as their first argument points to. The functions do not add
4142 to the ESCAPED solution. The functions make the first argument
4143 pointed to memory point to what the second argument pointed to
4144 memory points to. */
4145 case BUILT_IN_STRCPY
:
4146 case BUILT_IN_STRNCPY
:
4147 case BUILT_IN_BCOPY
:
4148 case BUILT_IN_MEMCPY
:
4149 case BUILT_IN_MEMMOVE
:
4150 case BUILT_IN_MEMPCPY
:
4151 case BUILT_IN_STPCPY
:
4152 case BUILT_IN_STPNCPY
:
4153 case BUILT_IN_STRCAT
:
4154 case BUILT_IN_STRNCAT
:
4155 case BUILT_IN_STRCPY_CHK
:
4156 case BUILT_IN_STRNCPY_CHK
:
4157 case BUILT_IN_MEMCPY_CHK
:
4158 case BUILT_IN_MEMMOVE_CHK
:
4159 case BUILT_IN_MEMPCPY_CHK
:
4160 case BUILT_IN_STPCPY_CHK
:
4161 case BUILT_IN_STPNCPY_CHK
:
4162 case BUILT_IN_STRCAT_CHK
:
4163 case BUILT_IN_STRNCAT_CHK
:
4164 case BUILT_IN_TM_MEMCPY
:
4165 case BUILT_IN_TM_MEMMOVE
:
4167 tree res
= gimple_call_lhs (t
);
4168 tree dest
= gimple_call_arg (t
, (DECL_FUNCTION_CODE (fndecl
)
4169 == BUILT_IN_BCOPY
? 1 : 0));
4170 tree src
= gimple_call_arg (t
, (DECL_FUNCTION_CODE (fndecl
)
4171 == BUILT_IN_BCOPY
? 0 : 1));
4172 if (res
!= NULL_TREE
)
4174 get_constraint_for (res
, &lhsc
);
4175 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_MEMPCPY
4176 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STPCPY
4177 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STPNCPY
4178 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_MEMPCPY_CHK
4179 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STPCPY_CHK
4180 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STPNCPY_CHK
)
4181 get_constraint_for_ptr_offset (dest
, NULL_TREE
, &rhsc
);
4183 get_constraint_for (dest
, &rhsc
);
4184 process_all_all_constraints (lhsc
, rhsc
);
4188 get_constraint_for_ptr_offset (dest
, NULL_TREE
, &lhsc
);
4189 get_constraint_for_ptr_offset (src
, NULL_TREE
, &rhsc
);
4192 process_all_all_constraints (lhsc
, rhsc
);
4197 case BUILT_IN_MEMSET
:
4198 case BUILT_IN_MEMSET_CHK
:
4199 case BUILT_IN_TM_MEMSET
:
4201 tree res
= gimple_call_lhs (t
);
4202 tree dest
= gimple_call_arg (t
, 0);
4205 struct constraint_expr ac
;
4206 if (res
!= NULL_TREE
)
4208 get_constraint_for (res
, &lhsc
);
4209 get_constraint_for (dest
, &rhsc
);
4210 process_all_all_constraints (lhsc
, rhsc
);
4214 get_constraint_for_ptr_offset (dest
, NULL_TREE
, &lhsc
);
4216 if (flag_delete_null_pointer_checks
4217 && integer_zerop (gimple_call_arg (t
, 1)))
4219 ac
.type
= ADDRESSOF
;
4220 ac
.var
= nothing_id
;
4225 ac
.var
= integer_id
;
4228 FOR_EACH_VEC_ELT (lhsc
, i
, lhsp
)
4229 process_constraint (new_constraint (*lhsp
, ac
));
4233 case BUILT_IN_ASSUME_ALIGNED
:
4235 tree res
= gimple_call_lhs (t
);
4236 tree dest
= gimple_call_arg (t
, 0);
4237 if (res
!= NULL_TREE
)
4239 get_constraint_for (res
, &lhsc
);
4240 get_constraint_for (dest
, &rhsc
);
4241 process_all_all_constraints (lhsc
, rhsc
);
4247 /* All the following functions do not return pointers, do not
4248 modify the points-to sets of memory reachable from their
4249 arguments and do not add to the ESCAPED solution. */
4250 case BUILT_IN_SINCOS
:
4251 case BUILT_IN_SINCOSF
:
4252 case BUILT_IN_SINCOSL
:
4253 case BUILT_IN_FREXP
:
4254 case BUILT_IN_FREXPF
:
4255 case BUILT_IN_FREXPL
:
4256 case BUILT_IN_GAMMA_R
:
4257 case BUILT_IN_GAMMAF_R
:
4258 case BUILT_IN_GAMMAL_R
:
4259 case BUILT_IN_LGAMMA_R
:
4260 case BUILT_IN_LGAMMAF_R
:
4261 case BUILT_IN_LGAMMAL_R
:
4263 case BUILT_IN_MODFF
:
4264 case BUILT_IN_MODFL
:
4265 case BUILT_IN_REMQUO
:
4266 case BUILT_IN_REMQUOF
:
4267 case BUILT_IN_REMQUOL
:
4270 case BUILT_IN_STRDUP
:
4271 case BUILT_IN_STRNDUP
:
4272 if (gimple_call_lhs (t
))
4274 handle_lhs_call (t
, gimple_call_lhs (t
), gimple_call_flags (t
),
4276 get_constraint_for_ptr_offset (gimple_call_lhs (t
),
4278 get_constraint_for_ptr_offset (gimple_call_arg (t
, 0),
4282 process_all_all_constraints (lhsc
, rhsc
);
4288 /* String / character search functions return a pointer into the
4289 source string or NULL. */
4290 case BUILT_IN_INDEX
:
4291 case BUILT_IN_STRCHR
:
4292 case BUILT_IN_STRRCHR
:
4293 case BUILT_IN_MEMCHR
:
4294 case BUILT_IN_STRSTR
:
4295 case BUILT_IN_STRPBRK
:
4296 if (gimple_call_lhs (t
))
4298 tree src
= gimple_call_arg (t
, 0);
4299 get_constraint_for_ptr_offset (src
, NULL_TREE
, &rhsc
);
4300 constraint_expr nul
;
4301 nul
.var
= nothing_id
;
4303 nul
.type
= ADDRESSOF
;
4304 rhsc
.safe_push (nul
);
4305 get_constraint_for (gimple_call_lhs (t
), &lhsc
);
4306 process_all_all_constraints (lhsc
, rhsc
);
4311 /* Trampolines are special - they set up passing the static
4313 case BUILT_IN_INIT_TRAMPOLINE
:
4315 tree tramp
= gimple_call_arg (t
, 0);
4316 tree nfunc
= gimple_call_arg (t
, 1);
4317 tree frame
= gimple_call_arg (t
, 2);
4319 struct constraint_expr lhs
, *rhsp
;
4322 varinfo_t nfi
= NULL
;
4323 gcc_assert (TREE_CODE (nfunc
) == ADDR_EXPR
);
4324 nfi
= lookup_vi_for_tree (TREE_OPERAND (nfunc
, 0));
4327 lhs
= get_function_part_constraint (nfi
, fi_static_chain
);
4328 get_constraint_for (frame
, &rhsc
);
4329 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
4330 process_constraint (new_constraint (lhs
, *rhsp
));
4333 /* Make the frame point to the function for
4334 the trampoline adjustment call. */
4335 get_constraint_for (tramp
, &lhsc
);
4337 get_constraint_for (nfunc
, &rhsc
);
4338 process_all_all_constraints (lhsc
, rhsc
);
4345 /* Else fallthru to generic handling which will let
4346 the frame escape. */
4349 case BUILT_IN_ADJUST_TRAMPOLINE
:
4351 tree tramp
= gimple_call_arg (t
, 0);
4352 tree res
= gimple_call_lhs (t
);
4353 if (in_ipa_mode
&& res
)
4355 get_constraint_for (res
, &lhsc
);
4356 get_constraint_for (tramp
, &rhsc
);
4358 process_all_all_constraints (lhsc
, rhsc
);
4364 CASE_BUILT_IN_TM_STORE (1):
4365 CASE_BUILT_IN_TM_STORE (2):
4366 CASE_BUILT_IN_TM_STORE (4):
4367 CASE_BUILT_IN_TM_STORE (8):
4368 CASE_BUILT_IN_TM_STORE (FLOAT
):
4369 CASE_BUILT_IN_TM_STORE (DOUBLE
):
4370 CASE_BUILT_IN_TM_STORE (LDOUBLE
):
4371 CASE_BUILT_IN_TM_STORE (M64
):
4372 CASE_BUILT_IN_TM_STORE (M128
):
4373 CASE_BUILT_IN_TM_STORE (M256
):
4375 tree addr
= gimple_call_arg (t
, 0);
4376 tree src
= gimple_call_arg (t
, 1);
4378 get_constraint_for (addr
, &lhsc
);
4380 get_constraint_for (src
, &rhsc
);
4381 process_all_all_constraints (lhsc
, rhsc
);
4386 CASE_BUILT_IN_TM_LOAD (1):
4387 CASE_BUILT_IN_TM_LOAD (2):
4388 CASE_BUILT_IN_TM_LOAD (4):
4389 CASE_BUILT_IN_TM_LOAD (8):
4390 CASE_BUILT_IN_TM_LOAD (FLOAT
):
4391 CASE_BUILT_IN_TM_LOAD (DOUBLE
):
4392 CASE_BUILT_IN_TM_LOAD (LDOUBLE
):
4393 CASE_BUILT_IN_TM_LOAD (M64
):
4394 CASE_BUILT_IN_TM_LOAD (M128
):
4395 CASE_BUILT_IN_TM_LOAD (M256
):
4397 tree dest
= gimple_call_lhs (t
);
4398 tree addr
= gimple_call_arg (t
, 0);
4400 get_constraint_for (dest
, &lhsc
);
4401 get_constraint_for (addr
, &rhsc
);
4403 process_all_all_constraints (lhsc
, rhsc
);
4408 /* Variadic argument handling needs to be handled in IPA
4410 case BUILT_IN_VA_START
:
4412 tree valist
= gimple_call_arg (t
, 0);
4413 struct constraint_expr rhs
, *lhsp
;
4415 get_constraint_for (valist
, &lhsc
);
4417 /* The va_list gets access to pointers in variadic
4418 arguments. Which we know in the case of IPA analysis
4419 and otherwise are just all nonlocal variables. */
4422 fi
= lookup_vi_for_tree (cfun
->decl
);
4423 rhs
= get_function_part_constraint (fi
, ~0);
4424 rhs
.type
= ADDRESSOF
;
4428 rhs
.var
= nonlocal_id
;
4429 rhs
.type
= ADDRESSOF
;
4432 FOR_EACH_VEC_ELT (lhsc
, i
, lhsp
)
4433 process_constraint (new_constraint (*lhsp
, rhs
));
4435 /* va_list is clobbered. */
4436 make_constraint_to (get_call_clobber_vi (t
)->id
, valist
);
4439 /* va_end doesn't have any effect that matters. */
4440 case BUILT_IN_VA_END
:
4442 /* Alternate return. Simply give up for now. */
4443 case BUILT_IN_RETURN
:
4447 || !(fi
= get_vi_for_tree (cfun
->decl
)))
4448 make_constraint_from (get_varinfo (escaped_id
), anything_id
);
4449 else if (in_ipa_mode
4452 struct constraint_expr lhs
, rhs
;
4453 lhs
= get_function_part_constraint (fi
, fi_result
);
4454 rhs
.var
= anything_id
;
4457 process_constraint (new_constraint (lhs
, rhs
));
4461 /* printf-style functions may have hooks to set pointers to
4462 point to somewhere into the generated string. Leave them
4463 for a later exercise... */
4465 /* Fallthru to general call handling. */;
4471 /* Create constraints for the call T. */
4474 find_func_aliases_for_call (gimple t
)
4476 tree fndecl
= gimple_call_fndecl (t
);
4477 vec
<ce_s
> lhsc
= vNULL
;
4478 vec
<ce_s
> rhsc
= vNULL
;
4481 if (fndecl
!= NULL_TREE
4482 && DECL_BUILT_IN (fndecl
)
4483 && find_func_aliases_for_builtin_call (t
))
4486 fi
= get_fi_for_callee (t
);
4488 || (fndecl
&& !fi
->is_fn_info
))
4490 vec
<ce_s
> rhsc
= vNULL
;
4491 int flags
= gimple_call_flags (t
);
4493 /* Const functions can return their arguments and addresses
4494 of global memory but not of escaped memory. */
4495 if (flags
& (ECF_CONST
|ECF_NOVOPS
))
4497 if (gimple_call_lhs (t
))
4498 handle_const_call (t
, &rhsc
);
4500 /* Pure functions can return addresses in and of memory
4501 reachable from their arguments, but they are not an escape
4502 point for reachable memory of their arguments. */
4503 else if (flags
& (ECF_PURE
|ECF_LOOPING_CONST_OR_PURE
))
4504 handle_pure_call (t
, &rhsc
);
4506 handle_rhs_call (t
, &rhsc
);
4507 if (gimple_call_lhs (t
))
4508 handle_lhs_call (t
, gimple_call_lhs (t
), flags
, rhsc
, fndecl
);
4516 /* Assign all the passed arguments to the appropriate incoming
4517 parameters of the function. */
4518 for (j
= 0; j
< gimple_call_num_args (t
); j
++)
4520 struct constraint_expr lhs
;
4521 struct constraint_expr
*rhsp
;
4522 tree arg
= gimple_call_arg (t
, j
);
4524 get_constraint_for_rhs (arg
, &rhsc
);
4525 lhs
= get_function_part_constraint (fi
, fi_parm_base
+ j
);
4526 while (rhsc
.length () != 0)
4528 rhsp
= &rhsc
.last ();
4529 process_constraint (new_constraint (lhs
, *rhsp
));
4534 /* If we are returning a value, assign it to the result. */
4535 lhsop
= gimple_call_lhs (t
);
4538 struct constraint_expr rhs
;
4539 struct constraint_expr
*lhsp
;
4541 get_constraint_for (lhsop
, &lhsc
);
4542 rhs
= get_function_part_constraint (fi
, fi_result
);
4544 && DECL_RESULT (fndecl
)
4545 && DECL_BY_REFERENCE (DECL_RESULT (fndecl
)))
4547 vec
<ce_s
> tem
= vNULL
;
4548 tem
.safe_push (rhs
);
4553 FOR_EACH_VEC_ELT (lhsc
, j
, lhsp
)
4554 process_constraint (new_constraint (*lhsp
, rhs
));
4557 /* If we pass the result decl by reference, honor that. */
4560 && DECL_RESULT (fndecl
)
4561 && DECL_BY_REFERENCE (DECL_RESULT (fndecl
)))
4563 struct constraint_expr lhs
;
4564 struct constraint_expr
*rhsp
;
4566 get_constraint_for_address_of (lhsop
, &rhsc
);
4567 lhs
= get_function_part_constraint (fi
, fi_result
);
4568 FOR_EACH_VEC_ELT (rhsc
, j
, rhsp
)
4569 process_constraint (new_constraint (lhs
, *rhsp
));
4573 /* If we use a static chain, pass it along. */
4574 if (gimple_call_chain (t
))
4576 struct constraint_expr lhs
;
4577 struct constraint_expr
*rhsp
;
4579 get_constraint_for (gimple_call_chain (t
), &rhsc
);
4580 lhs
= get_function_part_constraint (fi
, fi_static_chain
);
4581 FOR_EACH_VEC_ELT (rhsc
, j
, rhsp
)
4582 process_constraint (new_constraint (lhs
, *rhsp
));
4587 /* Walk statement T setting up aliasing constraints according to the
4588 references found in T. This function is the main part of the
4589 constraint builder. AI points to auxiliary alias information used
4590 when building alias sets and computing alias grouping heuristics. */
4593 find_func_aliases (gimple origt
)
4596 vec
<ce_s
> lhsc
= vNULL
;
4597 vec
<ce_s
> rhsc
= vNULL
;
4598 struct constraint_expr
*c
;
4601 /* Now build constraints expressions. */
4602 if (gimple_code (t
) == GIMPLE_PHI
)
4607 /* For a phi node, assign all the arguments to
4609 get_constraint_for (gimple_phi_result (t
), &lhsc
);
4610 for (i
= 0; i
< gimple_phi_num_args (t
); i
++)
4612 tree strippedrhs
= PHI_ARG_DEF (t
, i
);
4614 STRIP_NOPS (strippedrhs
);
4615 get_constraint_for_rhs (gimple_phi_arg_def (t
, i
), &rhsc
);
4617 FOR_EACH_VEC_ELT (lhsc
, j
, c
)
4619 struct constraint_expr
*c2
;
4620 while (rhsc
.length () > 0)
4623 process_constraint (new_constraint (*c
, *c2
));
4629 /* In IPA mode, we need to generate constraints to pass call
4630 arguments through their calls. There are two cases,
4631 either a GIMPLE_CALL returning a value, or just a plain
4632 GIMPLE_CALL when we are not.
4634 In non-ipa mode, we need to generate constraints for each
4635 pointer passed by address. */
4636 else if (is_gimple_call (t
))
4637 find_func_aliases_for_call (t
);
4639 /* Otherwise, just a regular assignment statement. Only care about
4640 operations with pointer result, others are dealt with as escape
4641 points if they have pointer operands. */
4642 else if (is_gimple_assign (t
))
4644 /* Otherwise, just a regular assignment statement. */
4645 tree lhsop
= gimple_assign_lhs (t
);
4646 tree rhsop
= (gimple_num_ops (t
) == 2) ? gimple_assign_rhs1 (t
) : NULL
;
4648 if (rhsop
&& TREE_CLOBBER_P (rhsop
))
4649 /* Ignore clobbers, they don't actually store anything into
4652 else if (rhsop
&& AGGREGATE_TYPE_P (TREE_TYPE (lhsop
)))
4653 do_structure_copy (lhsop
, rhsop
);
4656 enum tree_code code
= gimple_assign_rhs_code (t
);
4658 get_constraint_for (lhsop
, &lhsc
);
4660 if (FLOAT_TYPE_P (TREE_TYPE (lhsop
)))
4661 /* If the operation produces a floating point result then
4662 assume the value is not produced to transfer a pointer. */
4664 else if (code
== POINTER_PLUS_EXPR
)
4665 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t
),
4666 gimple_assign_rhs2 (t
), &rhsc
);
4667 else if (code
== BIT_AND_EXPR
4668 && TREE_CODE (gimple_assign_rhs2 (t
)) == INTEGER_CST
)
4670 /* Aligning a pointer via a BIT_AND_EXPR is offsetting
4671 the pointer. Handle it by offsetting it by UNKNOWN. */
4672 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t
),
4675 else if ((CONVERT_EXPR_CODE_P (code
)
4676 && !(POINTER_TYPE_P (gimple_expr_type (t
))
4677 && !POINTER_TYPE_P (TREE_TYPE (rhsop
))))
4678 || gimple_assign_single_p (t
))
4679 get_constraint_for_rhs (rhsop
, &rhsc
);
4680 else if (code
== COND_EXPR
)
4682 /* The result is a merge of both COND_EXPR arms. */
4683 vec
<ce_s
> tmp
= vNULL
;
4684 struct constraint_expr
*rhsp
;
4686 get_constraint_for_rhs (gimple_assign_rhs2 (t
), &rhsc
);
4687 get_constraint_for_rhs (gimple_assign_rhs3 (t
), &tmp
);
4688 FOR_EACH_VEC_ELT (tmp
, i
, rhsp
)
4689 rhsc
.safe_push (*rhsp
);
4692 else if (truth_value_p (code
))
4693 /* Truth value results are not pointer (parts). Or at least
4694 very very unreasonable obfuscation of a part. */
4698 /* All other operations are merges. */
4699 vec
<ce_s
> tmp
= vNULL
;
4700 struct constraint_expr
*rhsp
;
4702 get_constraint_for_rhs (gimple_assign_rhs1 (t
), &rhsc
);
4703 for (i
= 2; i
< gimple_num_ops (t
); ++i
)
4705 get_constraint_for_rhs (gimple_op (t
, i
), &tmp
);
4706 FOR_EACH_VEC_ELT (tmp
, j
, rhsp
)
4707 rhsc
.safe_push (*rhsp
);
4712 process_all_all_constraints (lhsc
, rhsc
);
4714 /* If there is a store to a global variable the rhs escapes. */
4715 if ((lhsop
= get_base_address (lhsop
)) != NULL_TREE
4717 && is_global_var (lhsop
)
4719 || DECL_EXTERNAL (lhsop
) || TREE_PUBLIC (lhsop
)))
4720 make_escape_constraint (rhsop
);
4722 /* Handle escapes through return. */
4723 else if (gimple_code (t
) == GIMPLE_RETURN
4724 && gimple_return_retval (t
) != NULL_TREE
)
4728 || !(fi
= get_vi_for_tree (cfun
->decl
)))
4729 make_escape_constraint (gimple_return_retval (t
));
4730 else if (in_ipa_mode
4733 struct constraint_expr lhs
;
4734 struct constraint_expr
*rhsp
;
4737 lhs
= get_function_part_constraint (fi
, fi_result
);
4738 get_constraint_for_rhs (gimple_return_retval (t
), &rhsc
);
4739 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
4740 process_constraint (new_constraint (lhs
, *rhsp
));
4743 /* Handle asms conservatively by adding escape constraints to everything. */
4744 else if (gimple_code (t
) == GIMPLE_ASM
)
4746 unsigned i
, noutputs
;
4747 const char **oconstraints
;
4748 const char *constraint
;
4749 bool allows_mem
, allows_reg
, is_inout
;
4751 noutputs
= gimple_asm_noutputs (t
);
4752 oconstraints
= XALLOCAVEC (const char *, noutputs
);
4754 for (i
= 0; i
< noutputs
; ++i
)
4756 tree link
= gimple_asm_output_op (t
, i
);
4757 tree op
= TREE_VALUE (link
);
4759 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
4760 oconstraints
[i
] = constraint
;
4761 parse_output_constraint (&constraint
, i
, 0, 0, &allows_mem
,
4762 &allows_reg
, &is_inout
);
4764 /* A memory constraint makes the address of the operand escape. */
4765 if (!allows_reg
&& allows_mem
)
4766 make_escape_constraint (build_fold_addr_expr (op
));
4768 /* The asm may read global memory, so outputs may point to
4769 any global memory. */
4772 vec
<ce_s
> lhsc
= vNULL
;
4773 struct constraint_expr rhsc
, *lhsp
;
4775 get_constraint_for (op
, &lhsc
);
4776 rhsc
.var
= nonlocal_id
;
4779 FOR_EACH_VEC_ELT (lhsc
, j
, lhsp
)
4780 process_constraint (new_constraint (*lhsp
, rhsc
));
4784 for (i
= 0; i
< gimple_asm_ninputs (t
); ++i
)
4786 tree link
= gimple_asm_input_op (t
, i
);
4787 tree op
= TREE_VALUE (link
);
4789 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
4791 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0, oconstraints
,
4792 &allows_mem
, &allows_reg
);
4794 /* A memory constraint makes the address of the operand escape. */
4795 if (!allows_reg
&& allows_mem
)
4796 make_escape_constraint (build_fold_addr_expr (op
));
4797 /* Strictly we'd only need the constraint to ESCAPED if
4798 the asm clobbers memory, otherwise using something
4799 along the lines of per-call clobbers/uses would be enough. */
4801 make_escape_constraint (op
);
4810 /* Create a constraint adding to the clobber set of FI the memory
4811 pointed to by PTR. */
4814 process_ipa_clobber (varinfo_t fi
, tree ptr
)
4816 vec
<ce_s
> ptrc
= vNULL
;
4817 struct constraint_expr
*c
, lhs
;
4819 get_constraint_for_rhs (ptr
, &ptrc
);
4820 lhs
= get_function_part_constraint (fi
, fi_clobbers
);
4821 FOR_EACH_VEC_ELT (ptrc
, i
, c
)
4822 process_constraint (new_constraint (lhs
, *c
));
4826 /* Walk statement T setting up clobber and use constraints according to the
4827 references found in T. This function is a main part of the
4828 IPA constraint builder. */
4831 find_func_clobbers (gimple origt
)
4834 vec
<ce_s
> lhsc
= vNULL
;
4835 vec
<ce_s
> rhsc
= vNULL
;
4838 /* Add constraints for clobbered/used in IPA mode.
4839 We are not interested in what automatic variables are clobbered
4840 or used as we only use the information in the caller to which
4841 they do not escape. */
4842 gcc_assert (in_ipa_mode
);
4844 /* If the stmt refers to memory in any way it better had a VUSE. */
4845 if (gimple_vuse (t
) == NULL_TREE
)
4848 /* We'd better have function information for the current function. */
4849 fi
= lookup_vi_for_tree (cfun
->decl
);
4850 gcc_assert (fi
!= NULL
);
4852 /* Account for stores in assignments and calls. */
4853 if (gimple_vdef (t
) != NULL_TREE
4854 && gimple_has_lhs (t
))
4856 tree lhs
= gimple_get_lhs (t
);
4858 while (handled_component_p (tem
))
4859 tem
= TREE_OPERAND (tem
, 0);
4861 && !auto_var_in_fn_p (tem
, cfun
->decl
))
4862 || INDIRECT_REF_P (tem
)
4863 || (TREE_CODE (tem
) == MEM_REF
4864 && !(TREE_CODE (TREE_OPERAND (tem
, 0)) == ADDR_EXPR
4866 (TREE_OPERAND (TREE_OPERAND (tem
, 0), 0), cfun
->decl
))))
4868 struct constraint_expr lhsc
, *rhsp
;
4870 lhsc
= get_function_part_constraint (fi
, fi_clobbers
);
4871 get_constraint_for_address_of (lhs
, &rhsc
);
4872 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
4873 process_constraint (new_constraint (lhsc
, *rhsp
));
4878 /* Account for uses in assigments and returns. */
4879 if (gimple_assign_single_p (t
)
4880 || (gimple_code (t
) == GIMPLE_RETURN
4881 && gimple_return_retval (t
) != NULL_TREE
))
4883 tree rhs
= (gimple_assign_single_p (t
)
4884 ? gimple_assign_rhs1 (t
) : gimple_return_retval (t
));
4886 while (handled_component_p (tem
))
4887 tem
= TREE_OPERAND (tem
, 0);
4889 && !auto_var_in_fn_p (tem
, cfun
->decl
))
4890 || INDIRECT_REF_P (tem
)
4891 || (TREE_CODE (tem
) == MEM_REF
4892 && !(TREE_CODE (TREE_OPERAND (tem
, 0)) == ADDR_EXPR
4894 (TREE_OPERAND (TREE_OPERAND (tem
, 0), 0), cfun
->decl
))))
4896 struct constraint_expr lhs
, *rhsp
;
4898 lhs
= get_function_part_constraint (fi
, fi_uses
);
4899 get_constraint_for_address_of (rhs
, &rhsc
);
4900 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
4901 process_constraint (new_constraint (lhs
, *rhsp
));
4906 if (is_gimple_call (t
))
4908 varinfo_t cfi
= NULL
;
4909 tree decl
= gimple_call_fndecl (t
);
4910 struct constraint_expr lhs
, rhs
;
4913 /* For builtins we do not have separate function info. For those
4914 we do not generate escapes for we have to generate clobbers/uses. */
4915 if (gimple_call_builtin_p (t
, BUILT_IN_NORMAL
))
4916 switch (DECL_FUNCTION_CODE (decl
))
4918 /* The following functions use and clobber memory pointed to
4919 by their arguments. */
4920 case BUILT_IN_STRCPY
:
4921 case BUILT_IN_STRNCPY
:
4922 case BUILT_IN_BCOPY
:
4923 case BUILT_IN_MEMCPY
:
4924 case BUILT_IN_MEMMOVE
:
4925 case BUILT_IN_MEMPCPY
:
4926 case BUILT_IN_STPCPY
:
4927 case BUILT_IN_STPNCPY
:
4928 case BUILT_IN_STRCAT
:
4929 case BUILT_IN_STRNCAT
:
4930 case BUILT_IN_STRCPY_CHK
:
4931 case BUILT_IN_STRNCPY_CHK
:
4932 case BUILT_IN_MEMCPY_CHK
:
4933 case BUILT_IN_MEMMOVE_CHK
:
4934 case BUILT_IN_MEMPCPY_CHK
:
4935 case BUILT_IN_STPCPY_CHK
:
4936 case BUILT_IN_STPNCPY_CHK
:
4937 case BUILT_IN_STRCAT_CHK
:
4938 case BUILT_IN_STRNCAT_CHK
:
4940 tree dest
= gimple_call_arg (t
, (DECL_FUNCTION_CODE (decl
)
4941 == BUILT_IN_BCOPY
? 1 : 0));
4942 tree src
= gimple_call_arg (t
, (DECL_FUNCTION_CODE (decl
)
4943 == BUILT_IN_BCOPY
? 0 : 1));
4945 struct constraint_expr
*rhsp
, *lhsp
;
4946 get_constraint_for_ptr_offset (dest
, NULL_TREE
, &lhsc
);
4947 lhs
= get_function_part_constraint (fi
, fi_clobbers
);
4948 FOR_EACH_VEC_ELT (lhsc
, i
, lhsp
)
4949 process_constraint (new_constraint (lhs
, *lhsp
));
4951 get_constraint_for_ptr_offset (src
, NULL_TREE
, &rhsc
);
4952 lhs
= get_function_part_constraint (fi
, fi_uses
);
4953 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
4954 process_constraint (new_constraint (lhs
, *rhsp
));
4958 /* The following function clobbers memory pointed to by
4960 case BUILT_IN_MEMSET
:
4961 case BUILT_IN_MEMSET_CHK
:
4963 tree dest
= gimple_call_arg (t
, 0);
4966 get_constraint_for_ptr_offset (dest
, NULL_TREE
, &lhsc
);
4967 lhs
= get_function_part_constraint (fi
, fi_clobbers
);
4968 FOR_EACH_VEC_ELT (lhsc
, i
, lhsp
)
4969 process_constraint (new_constraint (lhs
, *lhsp
));
4973 /* The following functions clobber their second and third
4975 case BUILT_IN_SINCOS
:
4976 case BUILT_IN_SINCOSF
:
4977 case BUILT_IN_SINCOSL
:
4979 process_ipa_clobber (fi
, gimple_call_arg (t
, 1));
4980 process_ipa_clobber (fi
, gimple_call_arg (t
, 2));
4983 /* The following functions clobber their second argument. */
4984 case BUILT_IN_FREXP
:
4985 case BUILT_IN_FREXPF
:
4986 case BUILT_IN_FREXPL
:
4987 case BUILT_IN_LGAMMA_R
:
4988 case BUILT_IN_LGAMMAF_R
:
4989 case BUILT_IN_LGAMMAL_R
:
4990 case BUILT_IN_GAMMA_R
:
4991 case BUILT_IN_GAMMAF_R
:
4992 case BUILT_IN_GAMMAL_R
:
4994 case BUILT_IN_MODFF
:
4995 case BUILT_IN_MODFL
:
4997 process_ipa_clobber (fi
, gimple_call_arg (t
, 1));
5000 /* The following functions clobber their third argument. */
5001 case BUILT_IN_REMQUO
:
5002 case BUILT_IN_REMQUOF
:
5003 case BUILT_IN_REMQUOL
:
5005 process_ipa_clobber (fi
, gimple_call_arg (t
, 2));
5008 /* The following functions neither read nor clobber memory. */
5009 case BUILT_IN_ASSUME_ALIGNED
:
5012 /* Trampolines are of no interest to us. */
5013 case BUILT_IN_INIT_TRAMPOLINE
:
5014 case BUILT_IN_ADJUST_TRAMPOLINE
:
5016 case BUILT_IN_VA_START
:
5017 case BUILT_IN_VA_END
:
5019 /* printf-style functions may have hooks to set pointers to
5020 point to somewhere into the generated string. Leave them
5021 for a later exercise... */
5023 /* Fallthru to general call handling. */;
5026 /* Parameters passed by value are used. */
5027 lhs
= get_function_part_constraint (fi
, fi_uses
);
5028 for (i
= 0; i
< gimple_call_num_args (t
); i
++)
5030 struct constraint_expr
*rhsp
;
5031 tree arg
= gimple_call_arg (t
, i
);
5033 if (TREE_CODE (arg
) == SSA_NAME
5034 || is_gimple_min_invariant (arg
))
5037 get_constraint_for_address_of (arg
, &rhsc
);
5038 FOR_EACH_VEC_ELT (rhsc
, j
, rhsp
)
5039 process_constraint (new_constraint (lhs
, *rhsp
));
5043 /* Build constraints for propagating clobbers/uses along the
5045 cfi
= get_fi_for_callee (t
);
5046 if (cfi
->id
== anything_id
)
5048 if (gimple_vdef (t
))
5049 make_constraint_from (first_vi_for_offset (fi
, fi_clobbers
),
5051 make_constraint_from (first_vi_for_offset (fi
, fi_uses
),
5056 /* For callees without function info (that's external functions),
5057 ESCAPED is clobbered and used. */
5058 if (gimple_call_fndecl (t
)
5059 && !cfi
->is_fn_info
)
5063 if (gimple_vdef (t
))
5064 make_copy_constraint (first_vi_for_offset (fi
, fi_clobbers
),
5066 make_copy_constraint (first_vi_for_offset (fi
, fi_uses
), escaped_id
);
5068 /* Also honor the call statement use/clobber info. */
5069 if ((vi
= lookup_call_clobber_vi (t
)) != NULL
)
5070 make_copy_constraint (first_vi_for_offset (fi
, fi_clobbers
),
5072 if ((vi
= lookup_call_use_vi (t
)) != NULL
)
5073 make_copy_constraint (first_vi_for_offset (fi
, fi_uses
),
5078 /* Otherwise the caller clobbers and uses what the callee does.
5079 ??? This should use a new complex constraint that filters
5080 local variables of the callee. */
5081 if (gimple_vdef (t
))
5083 lhs
= get_function_part_constraint (fi
, fi_clobbers
);
5084 rhs
= get_function_part_constraint (cfi
, fi_clobbers
);
5085 process_constraint (new_constraint (lhs
, rhs
));
5087 lhs
= get_function_part_constraint (fi
, fi_uses
);
5088 rhs
= get_function_part_constraint (cfi
, fi_uses
);
5089 process_constraint (new_constraint (lhs
, rhs
));
5091 else if (gimple_code (t
) == GIMPLE_ASM
)
5093 /* ??? Ick. We can do better. */
5094 if (gimple_vdef (t
))
5095 make_constraint_from (first_vi_for_offset (fi
, fi_clobbers
),
5097 make_constraint_from (first_vi_for_offset (fi
, fi_uses
),
5105 /* Find the first varinfo in the same variable as START that overlaps with
5106 OFFSET. Return NULL if we can't find one. */
5109 first_vi_for_offset (varinfo_t start
, unsigned HOST_WIDE_INT offset
)
5111 /* If the offset is outside of the variable, bail out. */
5112 if (offset
>= start
->fullsize
)
5115 /* If we cannot reach offset from start, lookup the first field
5116 and start from there. */
5117 if (start
->offset
> offset
)
5118 start
= get_varinfo (start
->head
);
5122 /* We may not find a variable in the field list with the actual
5123 offset when when we have glommed a structure to a variable.
5124 In that case, however, offset should still be within the size
5126 if (offset
>= start
->offset
5127 && (offset
- start
->offset
) < start
->size
)
5130 start
= vi_next (start
);
5136 /* Find the first varinfo in the same variable as START that overlaps with
5137 OFFSET. If there is no such varinfo the varinfo directly preceding
5138 OFFSET is returned. */
5141 first_or_preceding_vi_for_offset (varinfo_t start
,
5142 unsigned HOST_WIDE_INT offset
)
5144 /* If we cannot reach offset from start, lookup the first field
5145 and start from there. */
5146 if (start
->offset
> offset
)
5147 start
= get_varinfo (start
->head
);
5149 /* We may not find a variable in the field list with the actual
5150 offset when when we have glommed a structure to a variable.
5151 In that case, however, offset should still be within the size
5153 If we got beyond the offset we look for return the field
5154 directly preceding offset which may be the last field. */
5156 && offset
>= start
->offset
5157 && !((offset
- start
->offset
) < start
->size
))
5158 start
= vi_next (start
);
5164 /* This structure is used during pushing fields onto the fieldstack
5165 to track the offset of the field, since bitpos_of_field gives it
5166 relative to its immediate containing type, and we want it relative
5167 to the ultimate containing object. */
5171 /* Offset from the base of the base containing object to this field. */
5172 HOST_WIDE_INT offset
;
5174 /* Size, in bits, of the field. */
5175 unsigned HOST_WIDE_INT size
;
5177 unsigned has_unknown_size
: 1;
5179 unsigned must_have_pointers
: 1;
5181 unsigned may_have_pointers
: 1;
5183 unsigned only_restrict_pointers
: 1;
5185 typedef struct fieldoff fieldoff_s
;
5188 /* qsort comparison function for two fieldoff's PA and PB */
5191 fieldoff_compare (const void *pa
, const void *pb
)
5193 const fieldoff_s
*foa
= (const fieldoff_s
*)pa
;
5194 const fieldoff_s
*fob
= (const fieldoff_s
*)pb
;
5195 unsigned HOST_WIDE_INT foasize
, fobsize
;
5197 if (foa
->offset
< fob
->offset
)
5199 else if (foa
->offset
> fob
->offset
)
5202 foasize
= foa
->size
;
5203 fobsize
= fob
->size
;
5204 if (foasize
< fobsize
)
5206 else if (foasize
> fobsize
)
5211 /* Sort a fieldstack according to the field offset and sizes. */
5213 sort_fieldstack (vec
<fieldoff_s
> fieldstack
)
5215 fieldstack
.qsort (fieldoff_compare
);
5218 /* Return true if T is a type that can have subvars. */
5221 type_can_have_subvars (const_tree t
)
5223 /* Aggregates without overlapping fields can have subvars. */
5224 return TREE_CODE (t
) == RECORD_TYPE
;
5227 /* Return true if V is a tree that we can have subvars for.
5228 Normally, this is any aggregate type. Also complex
5229 types which are not gimple registers can have subvars. */
5232 var_can_have_subvars (const_tree v
)
5234 /* Volatile variables should never have subvars. */
5235 if (TREE_THIS_VOLATILE (v
))
5238 /* Non decls or memory tags can never have subvars. */
5242 return type_can_have_subvars (TREE_TYPE (v
));
5245 /* Return true if T is a type that does contain pointers. */
5248 type_must_have_pointers (tree type
)
5250 if (POINTER_TYPE_P (type
))
5253 if (TREE_CODE (type
) == ARRAY_TYPE
)
5254 return type_must_have_pointers (TREE_TYPE (type
));
5256 /* A function or method can have pointers as arguments, so track
5257 those separately. */
5258 if (TREE_CODE (type
) == FUNCTION_TYPE
5259 || TREE_CODE (type
) == METHOD_TYPE
)
5266 field_must_have_pointers (tree t
)
5268 return type_must_have_pointers (TREE_TYPE (t
));
5271 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
5272 the fields of TYPE onto fieldstack, recording their offsets along
5275 OFFSET is used to keep track of the offset in this entire
5276 structure, rather than just the immediately containing structure.
5277 Returns false if the caller is supposed to handle the field we
5281 push_fields_onto_fieldstack (tree type
, vec
<fieldoff_s
> *fieldstack
,
5282 HOST_WIDE_INT offset
)
5285 bool empty_p
= true;
5287 if (TREE_CODE (type
) != RECORD_TYPE
)
5290 /* If the vector of fields is growing too big, bail out early.
5291 Callers check for vec::length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
5293 if (fieldstack
->length () > MAX_FIELDS_FOR_FIELD_SENSITIVE
)
5296 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
5297 if (TREE_CODE (field
) == FIELD_DECL
)
5300 HOST_WIDE_INT foff
= bitpos_of_field (field
);
5302 if (!var_can_have_subvars (field
)
5303 || TREE_CODE (TREE_TYPE (field
)) == QUAL_UNION_TYPE
5304 || TREE_CODE (TREE_TYPE (field
)) == UNION_TYPE
)
5306 else if (!push_fields_onto_fieldstack
5307 (TREE_TYPE (field
), fieldstack
, offset
+ foff
)
5308 && (DECL_SIZE (field
)
5309 && !integer_zerop (DECL_SIZE (field
))))
5310 /* Empty structures may have actual size, like in C++. So
5311 see if we didn't push any subfields and the size is
5312 nonzero, push the field onto the stack. */
5317 fieldoff_s
*pair
= NULL
;
5318 bool has_unknown_size
= false;
5319 bool must_have_pointers_p
;
5321 if (!fieldstack
->is_empty ())
5322 pair
= &fieldstack
->last ();
5324 /* If there isn't anything at offset zero, create sth. */
5326 && offset
+ foff
!= 0)
5328 fieldoff_s e
= {0, offset
+ foff
, false, false, false, false};
5329 pair
= fieldstack
->safe_push (e
);
5332 if (!DECL_SIZE (field
)
5333 || !host_integerp (DECL_SIZE (field
), 1))
5334 has_unknown_size
= true;
5336 /* If adjacent fields do not contain pointers merge them. */
5337 must_have_pointers_p
= field_must_have_pointers (field
);
5339 && !has_unknown_size
5340 && !must_have_pointers_p
5341 && !pair
->must_have_pointers
5342 && !pair
->has_unknown_size
5343 && pair
->offset
+ (HOST_WIDE_INT
)pair
->size
== offset
+ foff
)
5345 pair
->size
+= TREE_INT_CST_LOW (DECL_SIZE (field
));
5350 e
.offset
= offset
+ foff
;
5351 e
.has_unknown_size
= has_unknown_size
;
5352 if (!has_unknown_size
)
5353 e
.size
= TREE_INT_CST_LOW (DECL_SIZE (field
));
5356 e
.must_have_pointers
= must_have_pointers_p
;
5357 e
.may_have_pointers
= true;
5358 e
.only_restrict_pointers
5359 = (!has_unknown_size
5360 && POINTER_TYPE_P (TREE_TYPE (field
))
5361 && TYPE_RESTRICT (TREE_TYPE (field
)));
5362 fieldstack
->safe_push (e
);
5372 /* Count the number of arguments DECL has, and set IS_VARARGS to true
5373 if it is a varargs function. */
5376 count_num_arguments (tree decl
, bool *is_varargs
)
5378 unsigned int num
= 0;
5381 /* Capture named arguments for K&R functions. They do not
5382 have a prototype and thus no TYPE_ARG_TYPES. */
5383 for (t
= DECL_ARGUMENTS (decl
); t
; t
= DECL_CHAIN (t
))
5386 /* Check if the function has variadic arguments. */
5387 for (t
= TYPE_ARG_TYPES (TREE_TYPE (decl
)); t
; t
= TREE_CHAIN (t
))
5388 if (TREE_VALUE (t
) == void_type_node
)
5396 /* Creation function node for DECL, using NAME, and return the index
5397 of the variable we've created for the function. */
5400 create_function_info_for (tree decl
, const char *name
)
5402 struct function
*fn
= DECL_STRUCT_FUNCTION (decl
);
5403 varinfo_t vi
, prev_vi
;
5406 bool is_varargs
= false;
5407 unsigned int num_args
= count_num_arguments (decl
, &is_varargs
);
5409 /* Create the variable info. */
5411 vi
= new_var_info (decl
, name
);
5414 vi
->fullsize
= fi_parm_base
+ num_args
;
5416 vi
->may_have_pointers
= false;
5419 insert_vi_for_tree (vi
->decl
, vi
);
5423 /* Create a variable for things the function clobbers and one for
5424 things the function uses. */
5426 varinfo_t clobbervi
, usevi
;
5427 const char *newname
;
5430 asprintf (&tempname
, "%s.clobber", name
);
5431 newname
= ggc_strdup (tempname
);
5434 clobbervi
= new_var_info (NULL
, newname
);
5435 clobbervi
->offset
= fi_clobbers
;
5436 clobbervi
->size
= 1;
5437 clobbervi
->fullsize
= vi
->fullsize
;
5438 clobbervi
->is_full_var
= true;
5439 clobbervi
->is_global_var
= false;
5440 gcc_assert (prev_vi
->offset
< clobbervi
->offset
);
5441 prev_vi
->next
= clobbervi
->id
;
5442 prev_vi
= clobbervi
;
5444 asprintf (&tempname
, "%s.use", name
);
5445 newname
= ggc_strdup (tempname
);
5448 usevi
= new_var_info (NULL
, newname
);
5449 usevi
->offset
= fi_uses
;
5451 usevi
->fullsize
= vi
->fullsize
;
5452 usevi
->is_full_var
= true;
5453 usevi
->is_global_var
= false;
5454 gcc_assert (prev_vi
->offset
< usevi
->offset
);
5455 prev_vi
->next
= usevi
->id
;
5459 /* And one for the static chain. */
5460 if (fn
->static_chain_decl
!= NULL_TREE
)
5463 const char *newname
;
5466 asprintf (&tempname
, "%s.chain", name
);
5467 newname
= ggc_strdup (tempname
);
5470 chainvi
= new_var_info (fn
->static_chain_decl
, newname
);
5471 chainvi
->offset
= fi_static_chain
;
5473 chainvi
->fullsize
= vi
->fullsize
;
5474 chainvi
->is_full_var
= true;
5475 chainvi
->is_global_var
= false;
5476 gcc_assert (prev_vi
->offset
< chainvi
->offset
);
5477 prev_vi
->next
= chainvi
->id
;
5479 insert_vi_for_tree (fn
->static_chain_decl
, chainvi
);
5482 /* Create a variable for the return var. */
5483 if (DECL_RESULT (decl
) != NULL
5484 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
5487 const char *newname
;
5489 tree resultdecl
= decl
;
5491 if (DECL_RESULT (decl
))
5492 resultdecl
= DECL_RESULT (decl
);
5494 asprintf (&tempname
, "%s.result", name
);
5495 newname
= ggc_strdup (tempname
);
5498 resultvi
= new_var_info (resultdecl
, newname
);
5499 resultvi
->offset
= fi_result
;
5501 resultvi
->fullsize
= vi
->fullsize
;
5502 resultvi
->is_full_var
= true;
5503 if (DECL_RESULT (decl
))
5504 resultvi
->may_have_pointers
= true;
5505 gcc_assert (prev_vi
->offset
< resultvi
->offset
);
5506 prev_vi
->next
= resultvi
->id
;
5508 if (DECL_RESULT (decl
))
5509 insert_vi_for_tree (DECL_RESULT (decl
), resultvi
);
5512 /* Set up variables for each argument. */
5513 arg
= DECL_ARGUMENTS (decl
);
5514 for (i
= 0; i
< num_args
; i
++)
5517 const char *newname
;
5519 tree argdecl
= decl
;
5524 asprintf (&tempname
, "%s.arg%d", name
, i
);
5525 newname
= ggc_strdup (tempname
);
5528 argvi
= new_var_info (argdecl
, newname
);
5529 argvi
->offset
= fi_parm_base
+ i
;
5531 argvi
->is_full_var
= true;
5532 argvi
->fullsize
= vi
->fullsize
;
5534 argvi
->may_have_pointers
= true;
5535 gcc_assert (prev_vi
->offset
< argvi
->offset
);
5536 prev_vi
->next
= argvi
->id
;
5540 insert_vi_for_tree (arg
, argvi
);
5541 arg
= DECL_CHAIN (arg
);
5545 /* Add one representative for all further args. */
5549 const char *newname
;
5553 asprintf (&tempname
, "%s.varargs", name
);
5554 newname
= ggc_strdup (tempname
);
5557 /* We need sth that can be pointed to for va_start. */
5558 decl
= build_fake_var_decl (ptr_type_node
);
5560 argvi
= new_var_info (decl
, newname
);
5561 argvi
->offset
= fi_parm_base
+ num_args
;
5563 argvi
->is_full_var
= true;
5564 argvi
->is_heap_var
= true;
5565 argvi
->fullsize
= vi
->fullsize
;
5566 gcc_assert (prev_vi
->offset
< argvi
->offset
);
5567 prev_vi
->next
= argvi
->id
;
5575 /* Return true if FIELDSTACK contains fields that overlap.
5576 FIELDSTACK is assumed to be sorted by offset. */
5579 check_for_overlaps (vec
<fieldoff_s
> fieldstack
)
5581 fieldoff_s
*fo
= NULL
;
5583 HOST_WIDE_INT lastoffset
= -1;
5585 FOR_EACH_VEC_ELT (fieldstack
, i
, fo
)
5587 if (fo
->offset
== lastoffset
)
5589 lastoffset
= fo
->offset
;
5594 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
5595 This will also create any varinfo structures necessary for fields
5599 create_variable_info_for_1 (tree decl
, const char *name
)
5601 varinfo_t vi
, newvi
;
5602 tree decl_type
= TREE_TYPE (decl
);
5603 tree declsize
= DECL_P (decl
) ? DECL_SIZE (decl
) : TYPE_SIZE (decl_type
);
5604 vec
<fieldoff_s
> fieldstack
= vNULL
;
5609 || !host_integerp (declsize
, 1))
5611 vi
= new_var_info (decl
, name
);
5615 vi
->is_unknown_size_var
= true;
5616 vi
->is_full_var
= true;
5617 vi
->may_have_pointers
= true;
5621 /* Collect field information. */
5622 if (use_field_sensitive
5623 && var_can_have_subvars (decl
)
5624 /* ??? Force us to not use subfields for global initializers
5625 in IPA mode. Else we'd have to parse arbitrary initializers. */
5627 && is_global_var (decl
)
5628 && DECL_INITIAL (decl
)))
5630 fieldoff_s
*fo
= NULL
;
5631 bool notokay
= false;
5634 push_fields_onto_fieldstack (decl_type
, &fieldstack
, 0);
5636 for (i
= 0; !notokay
&& fieldstack
.iterate (i
, &fo
); i
++)
5637 if (fo
->has_unknown_size
5644 /* We can't sort them if we have a field with a variable sized type,
5645 which will make notokay = true. In that case, we are going to return
5646 without creating varinfos for the fields anyway, so sorting them is a
5650 sort_fieldstack (fieldstack
);
5651 /* Due to some C++ FE issues, like PR 22488, we might end up
5652 what appear to be overlapping fields even though they,
5653 in reality, do not overlap. Until the C++ FE is fixed,
5654 we will simply disable field-sensitivity for these cases. */
5655 notokay
= check_for_overlaps (fieldstack
);
5659 fieldstack
.release ();
5662 /* If we didn't end up collecting sub-variables create a full
5663 variable for the decl. */
5664 if (fieldstack
.length () <= 1
5665 || fieldstack
.length () > MAX_FIELDS_FOR_FIELD_SENSITIVE
)
5667 vi
= new_var_info (decl
, name
);
5669 vi
->may_have_pointers
= true;
5670 vi
->fullsize
= TREE_INT_CST_LOW (declsize
);
5671 vi
->size
= vi
->fullsize
;
5672 vi
->is_full_var
= true;
5673 fieldstack
.release ();
5677 vi
= new_var_info (decl
, name
);
5678 vi
->fullsize
= TREE_INT_CST_LOW (declsize
);
5679 for (i
= 0, newvi
= vi
;
5680 fieldstack
.iterate (i
, &fo
);
5681 ++i
, newvi
= vi_next (newvi
))
5683 const char *newname
= "NULL";
5688 asprintf (&tempname
, "%s." HOST_WIDE_INT_PRINT_DEC
5689 "+" HOST_WIDE_INT_PRINT_DEC
, name
, fo
->offset
, fo
->size
);
5690 newname
= ggc_strdup (tempname
);
5693 newvi
->name
= newname
;
5694 newvi
->offset
= fo
->offset
;
5695 newvi
->size
= fo
->size
;
5696 newvi
->fullsize
= vi
->fullsize
;
5697 newvi
->may_have_pointers
= fo
->may_have_pointers
;
5698 newvi
->only_restrict_pointers
= fo
->only_restrict_pointers
;
5699 if (i
+ 1 < fieldstack
.length ())
5701 varinfo_t tem
= new_var_info (decl
, name
);
5702 newvi
->next
= tem
->id
;
5707 fieldstack
.release ();
5713 create_variable_info_for (tree decl
, const char *name
)
5715 varinfo_t vi
= create_variable_info_for_1 (decl
, name
);
5716 unsigned int id
= vi
->id
;
5718 insert_vi_for_tree (decl
, vi
);
5720 if (TREE_CODE (decl
) != VAR_DECL
)
5723 /* Create initial constraints for globals. */
5724 for (; vi
; vi
= vi_next (vi
))
5726 if (!vi
->may_have_pointers
5727 || !vi
->is_global_var
)
5730 /* Mark global restrict qualified pointers. */
5731 if ((POINTER_TYPE_P (TREE_TYPE (decl
))
5732 && TYPE_RESTRICT (TREE_TYPE (decl
)))
5733 || vi
->only_restrict_pointers
)
5735 make_constraint_from_global_restrict (vi
, "GLOBAL_RESTRICT");
5739 /* In non-IPA mode the initializer from nonlocal is all we need. */
5741 || DECL_HARD_REGISTER (decl
))
5742 make_copy_constraint (vi
, nonlocal_id
);
5744 /* In IPA mode parse the initializer and generate proper constraints
5748 struct varpool_node
*vnode
= varpool_get_node (decl
);
5750 /* For escaped variables initialize them from nonlocal. */
5751 if (!varpool_all_refs_explicit_p (vnode
))
5752 make_copy_constraint (vi
, nonlocal_id
);
5754 /* If this is a global variable with an initializer and we are in
5755 IPA mode generate constraints for it. */
5756 if (DECL_INITIAL (decl
)
5757 && vnode
->symbol
.definition
)
5759 vec
<ce_s
> rhsc
= vNULL
;
5760 struct constraint_expr lhs
, *rhsp
;
5762 get_constraint_for_rhs (DECL_INITIAL (decl
), &rhsc
);
5766 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
5767 process_constraint (new_constraint (lhs
, *rhsp
));
5768 /* If this is a variable that escapes from the unit
5769 the initializer escapes as well. */
5770 if (!varpool_all_refs_explicit_p (vnode
))
5772 lhs
.var
= escaped_id
;
5775 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
5776 process_constraint (new_constraint (lhs
, *rhsp
));
5786 /* Print out the points-to solution for VAR to FILE. */
5789 dump_solution_for_var (FILE *file
, unsigned int var
)
5791 varinfo_t vi
= get_varinfo (var
);
5795 /* Dump the solution for unified vars anyway, this avoids difficulties
5796 in scanning dumps in the testsuite. */
5797 fprintf (file
, "%s = { ", vi
->name
);
5798 vi
= get_varinfo (find (var
));
5799 EXECUTE_IF_SET_IN_BITMAP (vi
->solution
, 0, i
, bi
)
5800 fprintf (file
, "%s ", get_varinfo (i
)->name
);
5801 fprintf (file
, "}");
5803 /* But note when the variable was unified. */
5805 fprintf (file
, " same as %s", vi
->name
);
5807 fprintf (file
, "\n");
5810 /* Print the points-to solution for VAR to stdout. */
5813 debug_solution_for_var (unsigned int var
)
5815 dump_solution_for_var (stdout
, var
);
5818 /* Create varinfo structures for all of the variables in the
5819 function for intraprocedural mode. */
5822 intra_create_variable_infos (void)
5826 /* For each incoming pointer argument arg, create the constraint ARG
5827 = NONLOCAL or a dummy variable if it is a restrict qualified
5828 passed-by-reference argument. */
5829 for (t
= DECL_ARGUMENTS (current_function_decl
); t
; t
= DECL_CHAIN (t
))
5831 varinfo_t p
= get_vi_for_tree (t
);
5833 /* For restrict qualified pointers to objects passed by
5834 reference build a real representative for the pointed-to object.
5835 Treat restrict qualified references the same. */
5836 if (TYPE_RESTRICT (TREE_TYPE (t
))
5837 && ((DECL_BY_REFERENCE (t
) && POINTER_TYPE_P (TREE_TYPE (t
)))
5838 || TREE_CODE (TREE_TYPE (t
)) == REFERENCE_TYPE
)
5839 && !type_contains_placeholder_p (TREE_TYPE (TREE_TYPE (t
))))
5841 struct constraint_expr lhsc
, rhsc
;
5843 tree heapvar
= build_fake_var_decl (TREE_TYPE (TREE_TYPE (t
)));
5844 DECL_EXTERNAL (heapvar
) = 1;
5845 vi
= create_variable_info_for_1 (heapvar
, "PARM_NOALIAS");
5846 insert_vi_for_tree (heapvar
, vi
);
5851 rhsc
.type
= ADDRESSOF
;
5853 process_constraint (new_constraint (lhsc
, rhsc
));
5854 for (; vi
; vi
= vi_next (vi
))
5855 if (vi
->may_have_pointers
)
5857 if (vi
->only_restrict_pointers
)
5858 make_constraint_from_global_restrict (vi
, "GLOBAL_RESTRICT");
5860 make_copy_constraint (vi
, nonlocal_id
);
5865 if (POINTER_TYPE_P (TREE_TYPE (t
))
5866 && TYPE_RESTRICT (TREE_TYPE (t
)))
5867 make_constraint_from_global_restrict (p
, "PARM_RESTRICT");
5870 for (; p
; p
= vi_next (p
))
5872 if (p
->only_restrict_pointers
)
5873 make_constraint_from_global_restrict (p
, "PARM_RESTRICT");
5874 else if (p
->may_have_pointers
)
5875 make_constraint_from (p
, nonlocal_id
);
5880 /* Add a constraint for a result decl that is passed by reference. */
5881 if (DECL_RESULT (cfun
->decl
)
5882 && DECL_BY_REFERENCE (DECL_RESULT (cfun
->decl
)))
5884 varinfo_t p
, result_vi
= get_vi_for_tree (DECL_RESULT (cfun
->decl
));
5886 for (p
= result_vi
; p
; p
= vi_next (p
))
5887 make_constraint_from (p
, nonlocal_id
);
5890 /* Add a constraint for the incoming static chain parameter. */
5891 if (cfun
->static_chain_decl
!= NULL_TREE
)
5893 varinfo_t p
, chain_vi
= get_vi_for_tree (cfun
->static_chain_decl
);
5895 for (p
= chain_vi
; p
; p
= vi_next (p
))
5896 make_constraint_from (p
, nonlocal_id
);
5900 /* Structure used to put solution bitmaps in a hashtable so they can
5901 be shared among variables with the same points-to set. */
5903 typedef struct shared_bitmap_info
5907 } *shared_bitmap_info_t
;
5908 typedef const struct shared_bitmap_info
*const_shared_bitmap_info_t
;
5910 /* Shared_bitmap hashtable helpers. */
5912 struct shared_bitmap_hasher
: typed_free_remove
<shared_bitmap_info
>
5914 typedef shared_bitmap_info value_type
;
5915 typedef shared_bitmap_info compare_type
;
5916 static inline hashval_t
hash (const value_type
*);
5917 static inline bool equal (const value_type
*, const compare_type
*);
5920 /* Hash function for a shared_bitmap_info_t */
5923 shared_bitmap_hasher::hash (const value_type
*bi
)
5925 return bi
->hashcode
;
5928 /* Equality function for two shared_bitmap_info_t's. */
5931 shared_bitmap_hasher::equal (const value_type
*sbi1
, const compare_type
*sbi2
)
5933 return bitmap_equal_p (sbi1
->pt_vars
, sbi2
->pt_vars
);
5936 /* Shared_bitmap hashtable. */
5938 static hash_table
<shared_bitmap_hasher
> shared_bitmap_table
;
5940 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
5941 existing instance if there is one, NULL otherwise. */
5944 shared_bitmap_lookup (bitmap pt_vars
)
5946 shared_bitmap_info
**slot
;
5947 struct shared_bitmap_info sbi
;
5949 sbi
.pt_vars
= pt_vars
;
5950 sbi
.hashcode
= bitmap_hash (pt_vars
);
5952 slot
= shared_bitmap_table
.find_slot_with_hash (&sbi
, sbi
.hashcode
,
5957 return (*slot
)->pt_vars
;
5961 /* Add a bitmap to the shared bitmap hashtable. */
5964 shared_bitmap_add (bitmap pt_vars
)
5966 shared_bitmap_info
**slot
;
5967 shared_bitmap_info_t sbi
= XNEW (struct shared_bitmap_info
);
5969 sbi
->pt_vars
= pt_vars
;
5970 sbi
->hashcode
= bitmap_hash (pt_vars
);
5972 slot
= shared_bitmap_table
.find_slot_with_hash (sbi
, sbi
->hashcode
, INSERT
);
5973 gcc_assert (!*slot
);
5978 /* Set bits in INTO corresponding to the variable uids in solution set FROM. */
5981 set_uids_in_ptset (bitmap into
, bitmap from
, struct pt_solution
*pt
)
5986 EXECUTE_IF_SET_IN_BITMAP (from
, 0, i
, bi
)
5988 varinfo_t vi
= get_varinfo (i
);
5990 /* The only artificial variables that are allowed in a may-alias
5991 set are heap variables. */
5992 if (vi
->is_artificial_var
&& !vi
->is_heap_var
)
5995 if (TREE_CODE (vi
->decl
) == VAR_DECL
5996 || TREE_CODE (vi
->decl
) == PARM_DECL
5997 || TREE_CODE (vi
->decl
) == RESULT_DECL
)
5999 /* If we are in IPA mode we will not recompute points-to
6000 sets after inlining so make sure they stay valid. */
6002 && !DECL_PT_UID_SET_P (vi
->decl
))
6003 SET_DECL_PT_UID (vi
->decl
, DECL_UID (vi
->decl
));
6005 /* Add the decl to the points-to set. Note that the points-to
6006 set contains global variables. */
6007 bitmap_set_bit (into
, DECL_PT_UID (vi
->decl
));
6008 if (vi
->is_global_var
)
6009 pt
->vars_contains_global
= true;
6015 /* Compute the points-to solution *PT for the variable VI. */
6017 static struct pt_solution
6018 find_what_var_points_to (varinfo_t orig_vi
)
6022 bitmap finished_solution
;
6026 struct pt_solution
*pt
;
6028 /* This variable may have been collapsed, let's get the real
6030 vi
= get_varinfo (find (orig_vi
->id
));
6032 /* See if we have already computed the solution and return it. */
6033 slot
= pointer_map_insert (final_solutions
, vi
);
6035 return *(struct pt_solution
*)*slot
;
6037 *slot
= pt
= XOBNEW (&final_solutions_obstack
, struct pt_solution
);
6038 memset (pt
, 0, sizeof (struct pt_solution
));
6040 /* Translate artificial variables into SSA_NAME_PTR_INFO
6042 EXECUTE_IF_SET_IN_BITMAP (vi
->solution
, 0, i
, bi
)
6044 varinfo_t vi
= get_varinfo (i
);
6046 if (vi
->is_artificial_var
)
6048 if (vi
->id
== nothing_id
)
6050 else if (vi
->id
== escaped_id
)
6053 pt
->ipa_escaped
= 1;
6057 else if (vi
->id
== nonlocal_id
)
6059 else if (vi
->is_heap_var
)
6060 /* We represent heapvars in the points-to set properly. */
6062 else if (vi
->id
== readonly_id
)
6065 else if (vi
->id
== anything_id
6066 || vi
->id
== integer_id
)
6071 /* Instead of doing extra work, simply do not create
6072 elaborate points-to information for pt_anything pointers. */
6076 /* Share the final set of variables when possible. */
6077 finished_solution
= BITMAP_GGC_ALLOC ();
6078 stats
.points_to_sets_created
++;
6080 set_uids_in_ptset (finished_solution
, vi
->solution
, pt
);
6081 result
= shared_bitmap_lookup (finished_solution
);
6084 shared_bitmap_add (finished_solution
);
6085 pt
->vars
= finished_solution
;
6090 bitmap_clear (finished_solution
);
6096 /* Given a pointer variable P, fill in its points-to set. */
6099 find_what_p_points_to (tree p
)
6101 struct ptr_info_def
*pi
;
6105 /* For parameters, get at the points-to set for the actual parm
6107 if (TREE_CODE (p
) == SSA_NAME
6108 && SSA_NAME_IS_DEFAULT_DEF (p
)
6109 && (TREE_CODE (SSA_NAME_VAR (p
)) == PARM_DECL
6110 || TREE_CODE (SSA_NAME_VAR (p
)) == RESULT_DECL
))
6111 lookup_p
= SSA_NAME_VAR (p
);
6113 vi
= lookup_vi_for_tree (lookup_p
);
6117 pi
= get_ptr_info (p
);
6118 pi
->pt
= find_what_var_points_to (vi
);
6122 /* Query statistics for points-to solutions. */
6125 unsigned HOST_WIDE_INT pt_solution_includes_may_alias
;
6126 unsigned HOST_WIDE_INT pt_solution_includes_no_alias
;
6127 unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias
;
6128 unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias
;
6132 dump_pta_stats (FILE *s
)
6134 fprintf (s
, "\nPTA query stats:\n");
6135 fprintf (s
, " pt_solution_includes: "
6136 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
6137 HOST_WIDE_INT_PRINT_DEC
" queries\n",
6138 pta_stats
.pt_solution_includes_no_alias
,
6139 pta_stats
.pt_solution_includes_no_alias
6140 + pta_stats
.pt_solution_includes_may_alias
);
6141 fprintf (s
, " pt_solutions_intersect: "
6142 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
6143 HOST_WIDE_INT_PRINT_DEC
" queries\n",
6144 pta_stats
.pt_solutions_intersect_no_alias
,
6145 pta_stats
.pt_solutions_intersect_no_alias
6146 + pta_stats
.pt_solutions_intersect_may_alias
);
6150 /* Reset the points-to solution *PT to a conservative default
6151 (point to anything). */
6154 pt_solution_reset (struct pt_solution
*pt
)
6156 memset (pt
, 0, sizeof (struct pt_solution
));
6157 pt
->anything
= true;
6160 /* Set the points-to solution *PT to point only to the variables
6161 in VARS. VARS_CONTAINS_GLOBAL specifies whether that contains
6162 global variables and VARS_CONTAINS_RESTRICT specifies whether
6163 it contains restrict tag variables. */
6166 pt_solution_set (struct pt_solution
*pt
, bitmap vars
, bool vars_contains_global
)
6168 memset (pt
, 0, sizeof (struct pt_solution
));
6170 pt
->vars_contains_global
= vars_contains_global
;
6173 /* Set the points-to solution *PT to point only to the variable VAR. */
6176 pt_solution_set_var (struct pt_solution
*pt
, tree var
)
6178 memset (pt
, 0, sizeof (struct pt_solution
));
6179 pt
->vars
= BITMAP_GGC_ALLOC ();
6180 bitmap_set_bit (pt
->vars
, DECL_PT_UID (var
));
6181 pt
->vars_contains_global
= is_global_var (var
);
6184 /* Computes the union of the points-to solutions *DEST and *SRC and
6185 stores the result in *DEST. This changes the points-to bitmap
6186 of *DEST and thus may not be used if that might be shared.
6187 The points-to bitmap of *SRC and *DEST will not be shared after
6188 this function if they were not before. */
6191 pt_solution_ior_into (struct pt_solution
*dest
, struct pt_solution
*src
)
6193 dest
->anything
|= src
->anything
;
6196 pt_solution_reset (dest
);
6200 dest
->nonlocal
|= src
->nonlocal
;
6201 dest
->escaped
|= src
->escaped
;
6202 dest
->ipa_escaped
|= src
->ipa_escaped
;
6203 dest
->null
|= src
->null
;
6204 dest
->vars_contains_global
|= src
->vars_contains_global
;
6209 dest
->vars
= BITMAP_GGC_ALLOC ();
6210 bitmap_ior_into (dest
->vars
, src
->vars
);
6213 /* Return true if the points-to solution *PT is empty. */
6216 pt_solution_empty_p (struct pt_solution
*pt
)
6223 && !bitmap_empty_p (pt
->vars
))
6226 /* If the solution includes ESCAPED, check if that is empty. */
6228 && !pt_solution_empty_p (&cfun
->gimple_df
->escaped
))
6231 /* If the solution includes ESCAPED, check if that is empty. */
6233 && !pt_solution_empty_p (&ipa_escaped_pt
))
6239 /* Return true if the points-to solution *PT only point to a single var, and
6240 return the var uid in *UID. */
6243 pt_solution_singleton_p (struct pt_solution
*pt
, unsigned *uid
)
6245 if (pt
->anything
|| pt
->nonlocal
|| pt
->escaped
|| pt
->ipa_escaped
6246 || pt
->null
|| pt
->vars
== NULL
6247 || !bitmap_single_bit_set_p (pt
->vars
))
6250 *uid
= bitmap_first_set_bit (pt
->vars
);
6254 /* Return true if the points-to solution *PT includes global memory. */
6257 pt_solution_includes_global (struct pt_solution
*pt
)
6261 || pt
->vars_contains_global
)
6265 return pt_solution_includes_global (&cfun
->gimple_df
->escaped
);
6267 if (pt
->ipa_escaped
)
6268 return pt_solution_includes_global (&ipa_escaped_pt
);
6270 /* ??? This predicate is not correct for the IPA-PTA solution
6271 as we do not properly distinguish between unit escape points
6272 and global variables. */
6273 if (cfun
->gimple_df
->ipa_pta
)
6279 /* Return true if the points-to solution *PT includes the variable
6280 declaration DECL. */
6283 pt_solution_includes_1 (struct pt_solution
*pt
, const_tree decl
)
6289 && is_global_var (decl
))
6293 && bitmap_bit_p (pt
->vars
, DECL_PT_UID (decl
)))
6296 /* If the solution includes ESCAPED, check it. */
6298 && pt_solution_includes_1 (&cfun
->gimple_df
->escaped
, decl
))
6301 /* If the solution includes ESCAPED, check it. */
6303 && pt_solution_includes_1 (&ipa_escaped_pt
, decl
))
6310 pt_solution_includes (struct pt_solution
*pt
, const_tree decl
)
6312 bool res
= pt_solution_includes_1 (pt
, decl
);
6314 ++pta_stats
.pt_solution_includes_may_alias
;
6316 ++pta_stats
.pt_solution_includes_no_alias
;
6320 /* Return true if both points-to solutions PT1 and PT2 have a non-empty
6324 pt_solutions_intersect_1 (struct pt_solution
*pt1
, struct pt_solution
*pt2
)
6326 if (pt1
->anything
|| pt2
->anything
)
6329 /* If either points to unknown global memory and the other points to
6330 any global memory they alias. */
6333 || pt2
->vars_contains_global
))
6335 && pt1
->vars_contains_global
))
6338 /* Check the escaped solution if required. */
6339 if ((pt1
->escaped
|| pt2
->escaped
)
6340 && !pt_solution_empty_p (&cfun
->gimple_df
->escaped
))
6342 /* If both point to escaped memory and that solution
6343 is not empty they alias. */
6344 if (pt1
->escaped
&& pt2
->escaped
)
6347 /* If either points to escaped memory see if the escaped solution
6348 intersects with the other. */
6350 && pt_solutions_intersect_1 (&cfun
->gimple_df
->escaped
, pt2
))
6352 && pt_solutions_intersect_1 (&cfun
->gimple_df
->escaped
, pt1
)))
6356 /* Check the escaped solution if required.
6357 ??? Do we need to check the local against the IPA escaped sets? */
6358 if ((pt1
->ipa_escaped
|| pt2
->ipa_escaped
)
6359 && !pt_solution_empty_p (&ipa_escaped_pt
))
6361 /* If both point to escaped memory and that solution
6362 is not empty they alias. */
6363 if (pt1
->ipa_escaped
&& pt2
->ipa_escaped
)
6366 /* If either points to escaped memory see if the escaped solution
6367 intersects with the other. */
6368 if ((pt1
->ipa_escaped
6369 && pt_solutions_intersect_1 (&ipa_escaped_pt
, pt2
))
6370 || (pt2
->ipa_escaped
6371 && pt_solutions_intersect_1 (&ipa_escaped_pt
, pt1
)))
6375 /* Now both pointers alias if their points-to solution intersects. */
6378 && bitmap_intersect_p (pt1
->vars
, pt2
->vars
));
6382 pt_solutions_intersect (struct pt_solution
*pt1
, struct pt_solution
*pt2
)
6384 bool res
= pt_solutions_intersect_1 (pt1
, pt2
);
6386 ++pta_stats
.pt_solutions_intersect_may_alias
;
6388 ++pta_stats
.pt_solutions_intersect_no_alias
;
6393 /* Dump points-to information to OUTFILE. */
6396 dump_sa_points_to_info (FILE *outfile
)
6400 fprintf (outfile
, "\nPoints-to sets\n\n");
6402 if (dump_flags
& TDF_STATS
)
6404 fprintf (outfile
, "Stats:\n");
6405 fprintf (outfile
, "Total vars: %d\n", stats
.total_vars
);
6406 fprintf (outfile
, "Non-pointer vars: %d\n",
6407 stats
.nonpointer_vars
);
6408 fprintf (outfile
, "Statically unified vars: %d\n",
6409 stats
.unified_vars_static
);
6410 fprintf (outfile
, "Dynamically unified vars: %d\n",
6411 stats
.unified_vars_dynamic
);
6412 fprintf (outfile
, "Iterations: %d\n", stats
.iterations
);
6413 fprintf (outfile
, "Number of edges: %d\n", stats
.num_edges
);
6414 fprintf (outfile
, "Number of implicit edges: %d\n",
6415 stats
.num_implicit_edges
);
6418 for (i
= 1; i
< varmap
.length (); i
++)
6420 varinfo_t vi
= get_varinfo (i
);
6421 if (!vi
->may_have_pointers
)
6423 dump_solution_for_var (outfile
, i
);
6428 /* Debug points-to information to stderr. */
6431 debug_sa_points_to_info (void)
6433 dump_sa_points_to_info (stderr
);
6437 /* Initialize the always-existing constraint variables for NULL
6438 ANYTHING, READONLY, and INTEGER */
6441 init_base_vars (void)
6443 struct constraint_expr lhs
, rhs
;
6444 varinfo_t var_anything
;
6445 varinfo_t var_nothing
;
6446 varinfo_t var_readonly
;
6447 varinfo_t var_escaped
;
6448 varinfo_t var_nonlocal
;
6449 varinfo_t var_storedanything
;
6450 varinfo_t var_integer
;
6452 /* Variable ID zero is reserved and should be NULL. */
6453 varmap
.safe_push (NULL
);
6455 /* Create the NULL variable, used to represent that a variable points
6457 var_nothing
= new_var_info (NULL_TREE
, "NULL");
6458 gcc_assert (var_nothing
->id
== nothing_id
);
6459 var_nothing
->is_artificial_var
= 1;
6460 var_nothing
->offset
= 0;
6461 var_nothing
->size
= ~0;
6462 var_nothing
->fullsize
= ~0;
6463 var_nothing
->is_special_var
= 1;
6464 var_nothing
->may_have_pointers
= 0;
6465 var_nothing
->is_global_var
= 0;
6467 /* Create the ANYTHING variable, used to represent that a variable
6468 points to some unknown piece of memory. */
6469 var_anything
= new_var_info (NULL_TREE
, "ANYTHING");
6470 gcc_assert (var_anything
->id
== anything_id
);
6471 var_anything
->is_artificial_var
= 1;
6472 var_anything
->size
= ~0;
6473 var_anything
->offset
= 0;
6474 var_anything
->fullsize
= ~0;
6475 var_anything
->is_special_var
= 1;
6477 /* Anything points to anything. This makes deref constraints just
6478 work in the presence of linked list and other p = *p type loops,
6479 by saying that *ANYTHING = ANYTHING. */
6481 lhs
.var
= anything_id
;
6483 rhs
.type
= ADDRESSOF
;
6484 rhs
.var
= anything_id
;
6487 /* This specifically does not use process_constraint because
6488 process_constraint ignores all anything = anything constraints, since all
6489 but this one are redundant. */
6490 constraints
.safe_push (new_constraint (lhs
, rhs
));
6492 /* Create the READONLY variable, used to represent that a variable
6493 points to readonly memory. */
6494 var_readonly
= new_var_info (NULL_TREE
, "READONLY");
6495 gcc_assert (var_readonly
->id
== readonly_id
);
6496 var_readonly
->is_artificial_var
= 1;
6497 var_readonly
->offset
= 0;
6498 var_readonly
->size
= ~0;
6499 var_readonly
->fullsize
= ~0;
6500 var_readonly
->is_special_var
= 1;
6502 /* readonly memory points to anything, in order to make deref
6503 easier. In reality, it points to anything the particular
6504 readonly variable can point to, but we don't track this
6507 lhs
.var
= readonly_id
;
6509 rhs
.type
= ADDRESSOF
;
6510 rhs
.var
= readonly_id
; /* FIXME */
6512 process_constraint (new_constraint (lhs
, rhs
));
6514 /* Create the ESCAPED variable, used to represent the set of escaped
6516 var_escaped
= new_var_info (NULL_TREE
, "ESCAPED");
6517 gcc_assert (var_escaped
->id
== escaped_id
);
6518 var_escaped
->is_artificial_var
= 1;
6519 var_escaped
->offset
= 0;
6520 var_escaped
->size
= ~0;
6521 var_escaped
->fullsize
= ~0;
6522 var_escaped
->is_special_var
= 0;
6524 /* Create the NONLOCAL variable, used to represent the set of nonlocal
6526 var_nonlocal
= new_var_info (NULL_TREE
, "NONLOCAL");
6527 gcc_assert (var_nonlocal
->id
== nonlocal_id
);
6528 var_nonlocal
->is_artificial_var
= 1;
6529 var_nonlocal
->offset
= 0;
6530 var_nonlocal
->size
= ~0;
6531 var_nonlocal
->fullsize
= ~0;
6532 var_nonlocal
->is_special_var
= 1;
6534 /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
6536 lhs
.var
= escaped_id
;
6539 rhs
.var
= escaped_id
;
6541 process_constraint (new_constraint (lhs
, rhs
));
6543 /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the
6544 whole variable escapes. */
6546 lhs
.var
= escaped_id
;
6549 rhs
.var
= escaped_id
;
6550 rhs
.offset
= UNKNOWN_OFFSET
;
6551 process_constraint (new_constraint (lhs
, rhs
));
6553 /* *ESCAPED = NONLOCAL. This is true because we have to assume
6554 everything pointed to by escaped points to what global memory can
6557 lhs
.var
= escaped_id
;
6560 rhs
.var
= nonlocal_id
;
6562 process_constraint (new_constraint (lhs
, rhs
));
6564 /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED. This is true because
6565 global memory may point to global memory and escaped memory. */
6567 lhs
.var
= nonlocal_id
;
6569 rhs
.type
= ADDRESSOF
;
6570 rhs
.var
= nonlocal_id
;
6572 process_constraint (new_constraint (lhs
, rhs
));
6573 rhs
.type
= ADDRESSOF
;
6574 rhs
.var
= escaped_id
;
6576 process_constraint (new_constraint (lhs
, rhs
));
6578 /* Create the STOREDANYTHING variable, used to represent the set of
6579 variables stored to *ANYTHING. */
6580 var_storedanything
= new_var_info (NULL_TREE
, "STOREDANYTHING");
6581 gcc_assert (var_storedanything
->id
== storedanything_id
);
6582 var_storedanything
->is_artificial_var
= 1;
6583 var_storedanything
->offset
= 0;
6584 var_storedanything
->size
= ~0;
6585 var_storedanything
->fullsize
= ~0;
6586 var_storedanything
->is_special_var
= 0;
6588 /* Create the INTEGER variable, used to represent that a variable points
6589 to what an INTEGER "points to". */
6590 var_integer
= new_var_info (NULL_TREE
, "INTEGER");
6591 gcc_assert (var_integer
->id
== integer_id
);
6592 var_integer
->is_artificial_var
= 1;
6593 var_integer
->size
= ~0;
6594 var_integer
->fullsize
= ~0;
6595 var_integer
->offset
= 0;
6596 var_integer
->is_special_var
= 1;
6598 /* INTEGER = ANYTHING, because we don't know where a dereference of
6599 a random integer will point to. */
6601 lhs
.var
= integer_id
;
6603 rhs
.type
= ADDRESSOF
;
6604 rhs
.var
= anything_id
;
6606 process_constraint (new_constraint (lhs
, rhs
));
6609 /* Initialize things necessary to perform PTA */
6612 init_alias_vars (void)
6614 use_field_sensitive
= (MAX_FIELDS_FOR_FIELD_SENSITIVE
> 1);
6616 bitmap_obstack_initialize (&pta_obstack
);
6617 bitmap_obstack_initialize (&oldpta_obstack
);
6618 bitmap_obstack_initialize (&predbitmap_obstack
);
6620 constraint_pool
= create_alloc_pool ("Constraint pool",
6621 sizeof (struct constraint
), 30);
6622 variable_info_pool
= create_alloc_pool ("Variable info pool",
6623 sizeof (struct variable_info
), 30);
6624 constraints
.create (8);
6626 vi_for_tree
= pointer_map_create ();
6627 call_stmt_vars
= pointer_map_create ();
6629 memset (&stats
, 0, sizeof (stats
));
6630 shared_bitmap_table
.create (511);
6633 gcc_obstack_init (&fake_var_decl_obstack
);
6635 final_solutions
= pointer_map_create ();
6636 gcc_obstack_init (&final_solutions_obstack
);
6639 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
6640 predecessor edges. */
6643 remove_preds_and_fake_succs (constraint_graph_t graph
)
6647 /* Clear the implicit ref and address nodes from the successor
6649 for (i
= 1; i
< FIRST_REF_NODE
; i
++)
6651 if (graph
->succs
[i
])
6652 bitmap_clear_range (graph
->succs
[i
], FIRST_REF_NODE
,
6653 FIRST_REF_NODE
* 2);
6656 /* Free the successor list for the non-ref nodes. */
6657 for (i
= FIRST_REF_NODE
+ 1; i
< graph
->size
; i
++)
6659 if (graph
->succs
[i
])
6660 BITMAP_FREE (graph
->succs
[i
]);
6663 /* Now reallocate the size of the successor list as, and blow away
6664 the predecessor bitmaps. */
6665 graph
->size
= varmap
.length ();
6666 graph
->succs
= XRESIZEVEC (bitmap
, graph
->succs
, graph
->size
);
6668 free (graph
->implicit_preds
);
6669 graph
->implicit_preds
= NULL
;
6670 free (graph
->preds
);
6671 graph
->preds
= NULL
;
6672 bitmap_obstack_release (&predbitmap_obstack
);
6675 /* Solve the constraint set. */
6678 solve_constraints (void)
6680 struct scc_info
*si
;
6684 "\nCollapsing static cycles and doing variable "
6687 init_graph (varmap
.length () * 2);
6690 fprintf (dump_file
, "Building predecessor graph\n");
6691 build_pred_graph ();
6694 fprintf (dump_file
, "Detecting pointer and location "
6696 si
= perform_var_substitution (graph
);
6699 fprintf (dump_file
, "Rewriting constraints and unifying "
6701 rewrite_constraints (graph
, si
);
6703 build_succ_graph ();
6705 free_var_substitution_info (si
);
6707 /* Attach complex constraints to graph nodes. */
6708 move_complex_constraints (graph
);
6711 fprintf (dump_file
, "Uniting pointer but not location equivalent "
6713 unite_pointer_equivalences (graph
);
6716 fprintf (dump_file
, "Finding indirect cycles\n");
6717 find_indirect_cycles (graph
);
6719 /* Implicit nodes and predecessors are no longer necessary at this
6721 remove_preds_and_fake_succs (graph
);
6723 if (dump_file
&& (dump_flags
& TDF_GRAPH
))
6725 fprintf (dump_file
, "\n\n// The constraint graph before solve-graph "
6726 "in dot format:\n");
6727 dump_constraint_graph (dump_file
);
6728 fprintf (dump_file
, "\n\n");
6732 fprintf (dump_file
, "Solving graph\n");
6734 solve_graph (graph
);
6736 if (dump_file
&& (dump_flags
& TDF_GRAPH
))
6738 fprintf (dump_file
, "\n\n// The constraint graph after solve-graph "
6739 "in dot format:\n");
6740 dump_constraint_graph (dump_file
);
6741 fprintf (dump_file
, "\n\n");
6745 dump_sa_points_to_info (dump_file
);
6748 /* Create points-to sets for the current function. See the comments
6749 at the start of the file for an algorithmic overview. */
6752 compute_points_to_sets (void)
6758 timevar_push (TV_TREE_PTA
);
6762 intra_create_variable_infos ();
6764 /* Now walk all statements and build the constraint set. */
6767 gimple_stmt_iterator gsi
;
6769 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
6771 gimple phi
= gsi_stmt (gsi
);
6773 if (! virtual_operand_p (gimple_phi_result (phi
)))
6774 find_func_aliases (phi
);
6777 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
6779 gimple stmt
= gsi_stmt (gsi
);
6781 find_func_aliases (stmt
);
6787 fprintf (dump_file
, "Points-to analysis\n\nConstraints:\n\n");
6788 dump_constraints (dump_file
, 0);
6791 /* From the constraints compute the points-to sets. */
6792 solve_constraints ();
6794 /* Compute the points-to set for ESCAPED used for call-clobber analysis. */
6795 cfun
->gimple_df
->escaped
= find_what_var_points_to (get_varinfo (escaped_id
));
6797 /* Make sure the ESCAPED solution (which is used as placeholder in
6798 other solutions) does not reference itself. This simplifies
6799 points-to solution queries. */
6800 cfun
->gimple_df
->escaped
.escaped
= 0;
6802 /* Mark escaped HEAP variables as global. */
6803 FOR_EACH_VEC_ELT (varmap
, i
, vi
)
6806 && !vi
->is_global_var
)
6807 DECL_EXTERNAL (vi
->decl
) = vi
->is_global_var
6808 = pt_solution_includes (&cfun
->gimple_df
->escaped
, vi
->decl
);
6810 /* Compute the points-to sets for pointer SSA_NAMEs. */
6811 for (i
= 0; i
< num_ssa_names
; ++i
)
6813 tree ptr
= ssa_name (i
);
6815 && POINTER_TYPE_P (TREE_TYPE (ptr
)))
6816 find_what_p_points_to (ptr
);
6819 /* Compute the call-used/clobbered sets. */
6822 gimple_stmt_iterator gsi
;
6824 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
6826 gimple stmt
= gsi_stmt (gsi
);
6827 struct pt_solution
*pt
;
6828 if (!is_gimple_call (stmt
))
6831 pt
= gimple_call_use_set (stmt
);
6832 if (gimple_call_flags (stmt
) & ECF_CONST
)
6833 memset (pt
, 0, sizeof (struct pt_solution
));
6834 else if ((vi
= lookup_call_use_vi (stmt
)) != NULL
)
6836 *pt
= find_what_var_points_to (vi
);
6837 /* Escaped (and thus nonlocal) variables are always
6838 implicitly used by calls. */
6839 /* ??? ESCAPED can be empty even though NONLOCAL
6846 /* If there is nothing special about this call then
6847 we have made everything that is used also escape. */
6848 *pt
= cfun
->gimple_df
->escaped
;
6852 pt
= gimple_call_clobber_set (stmt
);
6853 if (gimple_call_flags (stmt
) & (ECF_CONST
|ECF_PURE
|ECF_NOVOPS
))
6854 memset (pt
, 0, sizeof (struct pt_solution
));
6855 else if ((vi
= lookup_call_clobber_vi (stmt
)) != NULL
)
6857 *pt
= find_what_var_points_to (vi
);
6858 /* Escaped (and thus nonlocal) variables are always
6859 implicitly clobbered by calls. */
6860 /* ??? ESCAPED can be empty even though NONLOCAL
6867 /* If there is nothing special about this call then
6868 we have made everything that is used also escape. */
6869 *pt
= cfun
->gimple_df
->escaped
;
6875 timevar_pop (TV_TREE_PTA
);
6879 /* Delete created points-to sets. */
6882 delete_points_to_sets (void)
6886 shared_bitmap_table
.dispose ();
6887 if (dump_file
&& (dump_flags
& TDF_STATS
))
6888 fprintf (dump_file
, "Points to sets created:%d\n",
6889 stats
.points_to_sets_created
);
6891 pointer_map_destroy (vi_for_tree
);
6892 pointer_map_destroy (call_stmt_vars
);
6893 bitmap_obstack_release (&pta_obstack
);
6894 constraints
.release ();
6896 for (i
= 0; i
< graph
->size
; i
++)
6897 graph
->complex[i
].release ();
6898 free (graph
->complex);
6901 free (graph
->succs
);
6903 free (graph
->pe_rep
);
6904 free (graph
->indirect_cycles
);
6908 free_alloc_pool (variable_info_pool
);
6909 free_alloc_pool (constraint_pool
);
6911 obstack_free (&fake_var_decl_obstack
, NULL
);
6913 pointer_map_destroy (final_solutions
);
6914 obstack_free (&final_solutions_obstack
, NULL
);
6918 /* Compute points-to information for every SSA_NAME pointer in the
6919 current function and compute the transitive closure of escaped
6920 variables to re-initialize the call-clobber states of local variables. */
6923 compute_may_aliases (void)
6925 if (cfun
->gimple_df
->ipa_pta
)
6929 fprintf (dump_file
, "\nNot re-computing points-to information "
6930 "because IPA points-to information is available.\n\n");
6932 /* But still dump what we have remaining it. */
6933 dump_alias_info (dump_file
);
6939 /* For each pointer P_i, determine the sets of variables that P_i may
6940 point-to. Compute the reachability set of escaped and call-used
6942 compute_points_to_sets ();
6944 /* Debugging dumps. */
6946 dump_alias_info (dump_file
);
6948 /* Deallocate memory used by aliasing data structures and the internal
6949 points-to solution. */
6950 delete_points_to_sets ();
6952 gcc_assert (!need_ssa_update_p (cfun
));
6958 gate_tree_pta (void)
6960 return flag_tree_pta
;
6963 /* A dummy pass to cause points-to information to be computed via
6964 TODO_rebuild_alias. */
6968 const pass_data pass_data_build_alias
=
6970 GIMPLE_PASS
, /* type */
6972 OPTGROUP_NONE
, /* optinfo_flags */
6973 true, /* has_gate */
6974 false, /* has_execute */
6975 TV_NONE
, /* tv_id */
6976 ( PROP_cfg
| PROP_ssa
), /* properties_required */
6977 0, /* properties_provided */
6978 0, /* properties_destroyed */
6979 0, /* todo_flags_start */
6980 TODO_rebuild_alias
, /* todo_flags_finish */
6983 class pass_build_alias
: public gimple_opt_pass
6986 pass_build_alias (gcc::context
*ctxt
)
6987 : gimple_opt_pass (pass_data_build_alias
, ctxt
)
6990 /* opt_pass methods: */
6991 bool gate () { return gate_tree_pta (); }
6993 }; // class pass_build_alias
6998 make_pass_build_alias (gcc::context
*ctxt
)
7000 return new pass_build_alias (ctxt
);
7003 /* A dummy pass to cause points-to information to be computed via
7004 TODO_rebuild_alias. */
7008 const pass_data pass_data_build_ealias
=
7010 GIMPLE_PASS
, /* type */
7011 "ealias", /* name */
7012 OPTGROUP_NONE
, /* optinfo_flags */
7013 true, /* has_gate */
7014 false, /* has_execute */
7015 TV_NONE
, /* tv_id */
7016 ( PROP_cfg
| PROP_ssa
), /* properties_required */
7017 0, /* properties_provided */
7018 0, /* properties_destroyed */
7019 0, /* todo_flags_start */
7020 TODO_rebuild_alias
, /* todo_flags_finish */
7023 class pass_build_ealias
: public gimple_opt_pass
7026 pass_build_ealias (gcc::context
*ctxt
)
7027 : gimple_opt_pass (pass_data_build_ealias
, ctxt
)
7030 /* opt_pass methods: */
7031 bool gate () { return gate_tree_pta (); }
7033 }; // class pass_build_ealias
7038 make_pass_build_ealias (gcc::context
*ctxt
)
7040 return new pass_build_ealias (ctxt
);
7044 /* Return true if we should execute IPA PTA. */
7050 /* Don't bother doing anything if the program has errors. */
7054 /* IPA PTA solutions for ESCAPED. */
7055 struct pt_solution ipa_escaped_pt
7056 = { true, false, false, false, false, false, NULL
};
7058 /* Associate node with varinfo DATA. Worker for
7059 cgraph_for_node_and_aliases. */
7061 associate_varinfo_to_alias (struct cgraph_node
*node
, void *data
)
7063 if ((node
->symbol
.alias
|| node
->thunk
.thunk_p
)
7064 && node
->symbol
.analyzed
)
7065 insert_vi_for_tree (node
->symbol
.decl
, (varinfo_t
)data
);
7069 /* Execute the driver for IPA PTA. */
7071 ipa_pta_execute (void)
7073 struct cgraph_node
*node
;
7074 struct varpool_node
*var
;
7081 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7083 dump_symtab (dump_file
);
7084 fprintf (dump_file
, "\n");
7087 /* Build the constraints. */
7088 FOR_EACH_DEFINED_FUNCTION (node
)
7091 /* Nodes without a body are not interesting. Especially do not
7092 visit clones at this point for now - we get duplicate decls
7093 there for inline clones at least. */
7094 if (!cgraph_function_with_gimple_body_p (node
) || node
->clone_of
)
7096 cgraph_get_body (node
);
7098 gcc_assert (!node
->clone_of
);
7100 vi
= create_function_info_for (node
->symbol
.decl
,
7101 alias_get_name (node
->symbol
.decl
));
7102 cgraph_for_node_and_aliases (node
, associate_varinfo_to_alias
, vi
, true);
7105 /* Create constraints for global variables and their initializers. */
7106 FOR_EACH_VARIABLE (var
)
7108 if (var
->symbol
.alias
&& var
->symbol
.analyzed
)
7111 get_vi_for_tree (var
->symbol
.decl
);
7117 "Generating constraints for global initializers\n\n");
7118 dump_constraints (dump_file
, 0);
7119 fprintf (dump_file
, "\n");
7121 from
= constraints
.length ();
7123 FOR_EACH_DEFINED_FUNCTION (node
)
7125 struct function
*func
;
7128 /* Nodes without a body are not interesting. */
7129 if (!cgraph_function_with_gimple_body_p (node
) || node
->clone_of
)
7135 "Generating constraints for %s", cgraph_node_name (node
));
7136 if (DECL_ASSEMBLER_NAME_SET_P (node
->symbol
.decl
))
7137 fprintf (dump_file
, " (%s)",
7139 (DECL_ASSEMBLER_NAME (node
->symbol
.decl
)));
7140 fprintf (dump_file
, "\n");
7143 func
= DECL_STRUCT_FUNCTION (node
->symbol
.decl
);
7146 /* For externally visible or attribute used annotated functions use
7147 local constraints for their arguments.
7148 For local functions we see all callers and thus do not need initial
7149 constraints for parameters. */
7150 if (node
->symbol
.used_from_other_partition
7151 || node
->symbol
.externally_visible
7152 || node
->symbol
.force_output
)
7154 intra_create_variable_infos ();
7156 /* We also need to make function return values escape. Nothing
7157 escapes by returning from main though. */
7158 if (!MAIN_NAME_P (DECL_NAME (node
->symbol
.decl
)))
7161 fi
= lookup_vi_for_tree (node
->symbol
.decl
);
7162 rvi
= first_vi_for_offset (fi
, fi_result
);
7163 if (rvi
&& rvi
->offset
== fi_result
)
7165 struct constraint_expr includes
;
7166 struct constraint_expr var
;
7167 includes
.var
= escaped_id
;
7168 includes
.offset
= 0;
7169 includes
.type
= SCALAR
;
7173 process_constraint (new_constraint (includes
, var
));
7178 /* Build constriants for the function body. */
7179 FOR_EACH_BB_FN (bb
, func
)
7181 gimple_stmt_iterator gsi
;
7183 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
);
7186 gimple phi
= gsi_stmt (gsi
);
7188 if (! virtual_operand_p (gimple_phi_result (phi
)))
7189 find_func_aliases (phi
);
7192 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
7194 gimple stmt
= gsi_stmt (gsi
);
7196 find_func_aliases (stmt
);
7197 find_func_clobbers (stmt
);
7205 fprintf (dump_file
, "\n");
7206 dump_constraints (dump_file
, from
);
7207 fprintf (dump_file
, "\n");
7209 from
= constraints
.length ();
7212 /* From the constraints compute the points-to sets. */
7213 solve_constraints ();
7215 /* Compute the global points-to sets for ESCAPED.
7216 ??? Note that the computed escape set is not correct
7217 for the whole unit as we fail to consider graph edges to
7218 externally visible functions. */
7219 ipa_escaped_pt
= find_what_var_points_to (get_varinfo (escaped_id
));
7221 /* Make sure the ESCAPED solution (which is used as placeholder in
7222 other solutions) does not reference itself. This simplifies
7223 points-to solution queries. */
7224 ipa_escaped_pt
.ipa_escaped
= 0;
7226 /* Assign the points-to sets to the SSA names in the unit. */
7227 FOR_EACH_DEFINED_FUNCTION (node
)
7230 struct function
*fn
;
7234 struct pt_solution uses
, clobbers
;
7235 struct cgraph_edge
*e
;
7237 /* Nodes without a body are not interesting. */
7238 if (!cgraph_function_with_gimple_body_p (node
) || node
->clone_of
)
7241 fn
= DECL_STRUCT_FUNCTION (node
->symbol
.decl
);
7243 /* Compute the points-to sets for pointer SSA_NAMEs. */
7244 FOR_EACH_VEC_ELT (*fn
->gimple_df
->ssa_names
, i
, ptr
)
7247 && POINTER_TYPE_P (TREE_TYPE (ptr
)))
7248 find_what_p_points_to (ptr
);
7251 /* Compute the call-use and call-clobber sets for all direct calls. */
7252 fi
= lookup_vi_for_tree (node
->symbol
.decl
);
7253 gcc_assert (fi
->is_fn_info
);
7255 = find_what_var_points_to (first_vi_for_offset (fi
, fi_clobbers
));
7256 uses
= find_what_var_points_to (first_vi_for_offset (fi
, fi_uses
));
7257 for (e
= node
->callers
; e
; e
= e
->next_caller
)
7262 *gimple_call_clobber_set (e
->call_stmt
) = clobbers
;
7263 *gimple_call_use_set (e
->call_stmt
) = uses
;
7266 /* Compute the call-use and call-clobber sets for indirect calls
7267 and calls to external functions. */
7268 FOR_EACH_BB_FN (bb
, fn
)
7270 gimple_stmt_iterator gsi
;
7272 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
7274 gimple stmt
= gsi_stmt (gsi
);
7275 struct pt_solution
*pt
;
7279 if (!is_gimple_call (stmt
))
7282 /* Handle direct calls to external functions. */
7283 decl
= gimple_call_fndecl (stmt
);
7285 && (!(fi
= lookup_vi_for_tree (decl
))
7286 || !fi
->is_fn_info
))
7288 pt
= gimple_call_use_set (stmt
);
7289 if (gimple_call_flags (stmt
) & ECF_CONST
)
7290 memset (pt
, 0, sizeof (struct pt_solution
));
7291 else if ((vi
= lookup_call_use_vi (stmt
)) != NULL
)
7293 *pt
= find_what_var_points_to (vi
);
7294 /* Escaped (and thus nonlocal) variables are always
7295 implicitly used by calls. */
7296 /* ??? ESCAPED can be empty even though NONLOCAL
7299 pt
->ipa_escaped
= 1;
7303 /* If there is nothing special about this call then
7304 we have made everything that is used also escape. */
7305 *pt
= ipa_escaped_pt
;
7309 pt
= gimple_call_clobber_set (stmt
);
7310 if (gimple_call_flags (stmt
) & (ECF_CONST
|ECF_PURE
|ECF_NOVOPS
))
7311 memset (pt
, 0, sizeof (struct pt_solution
));
7312 else if ((vi
= lookup_call_clobber_vi (stmt
)) != NULL
)
7314 *pt
= find_what_var_points_to (vi
);
7315 /* Escaped (and thus nonlocal) variables are always
7316 implicitly clobbered by calls. */
7317 /* ??? ESCAPED can be empty even though NONLOCAL
7320 pt
->ipa_escaped
= 1;
7324 /* If there is nothing special about this call then
7325 we have made everything that is used also escape. */
7326 *pt
= ipa_escaped_pt
;
7331 /* Handle indirect calls. */
7333 && (fi
= get_fi_for_callee (stmt
)))
7335 /* We need to accumulate all clobbers/uses of all possible
7337 fi
= get_varinfo (find (fi
->id
));
7338 /* If we cannot constrain the set of functions we'll end up
7339 calling we end up using/clobbering everything. */
7340 if (bitmap_bit_p (fi
->solution
, anything_id
)
7341 || bitmap_bit_p (fi
->solution
, nonlocal_id
)
7342 || bitmap_bit_p (fi
->solution
, escaped_id
))
7344 pt_solution_reset (gimple_call_clobber_set (stmt
));
7345 pt_solution_reset (gimple_call_use_set (stmt
));
7351 struct pt_solution
*uses
, *clobbers
;
7353 uses
= gimple_call_use_set (stmt
);
7354 clobbers
= gimple_call_clobber_set (stmt
);
7355 memset (uses
, 0, sizeof (struct pt_solution
));
7356 memset (clobbers
, 0, sizeof (struct pt_solution
));
7357 EXECUTE_IF_SET_IN_BITMAP (fi
->solution
, 0, i
, bi
)
7359 struct pt_solution sol
;
7361 vi
= get_varinfo (i
);
7362 if (!vi
->is_fn_info
)
7364 /* ??? We could be more precise here? */
7366 uses
->ipa_escaped
= 1;
7367 clobbers
->nonlocal
= 1;
7368 clobbers
->ipa_escaped
= 1;
7372 if (!uses
->anything
)
7374 sol
= find_what_var_points_to
7375 (first_vi_for_offset (vi
, fi_uses
));
7376 pt_solution_ior_into (uses
, &sol
);
7378 if (!clobbers
->anything
)
7380 sol
= find_what_var_points_to
7381 (first_vi_for_offset (vi
, fi_clobbers
));
7382 pt_solution_ior_into (clobbers
, &sol
);
7390 fn
->gimple_df
->ipa_pta
= true;
7393 delete_points_to_sets ();
7402 const pass_data pass_data_ipa_pta
=
7404 SIMPLE_IPA_PASS
, /* type */
7406 OPTGROUP_NONE
, /* optinfo_flags */
7407 true, /* has_gate */
7408 true, /* has_execute */
7409 TV_IPA_PTA
, /* tv_id */
7410 0, /* properties_required */
7411 0, /* properties_provided */
7412 0, /* properties_destroyed */
7413 0, /* todo_flags_start */
7414 TODO_update_ssa
, /* todo_flags_finish */
7417 class pass_ipa_pta
: public simple_ipa_opt_pass
7420 pass_ipa_pta (gcc::context
*ctxt
)
7421 : simple_ipa_opt_pass (pass_data_ipa_pta
, ctxt
)
7424 /* opt_pass methods: */
7425 bool gate () { return gate_ipa_pta (); }
7426 unsigned int execute () { return ipa_pta_execute (); }
7428 }; // class pass_ipa_pta
7432 simple_ipa_opt_pass
*
7433 make_pass_ipa_pta (gcc::context
*ctxt
)
7435 return new pass_ipa_pta (ctxt
);