1 /* Tree based points-to analysis
2 Copyright (C) 2005-2019 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "alloc-pool.h"
29 #include "tree-pass.h"
32 #include "tree-pretty-print.h"
33 #include "diagnostic-core.h"
34 #include "fold-const.h"
35 #include "stor-layout.h"
37 #include "gimple-iterator.h"
38 #include "tree-into-ssa.h"
41 #include "gimple-walk.h"
43 #include "stringpool.h"
48 /* The idea behind this analyzer is to generate set constraints from the
49 program, then solve the resulting constraints in order to generate the
52 Set constraints are a way of modeling program analysis problems that
53 involve sets. They consist of an inclusion constraint language,
54 describing the variables (each variable is a set) and operations that
55 are involved on the variables, and a set of rules that derive facts
56 from these operations. To solve a system of set constraints, you derive
57 all possible facts under the rules, which gives you the correct sets
60 See "Efficient Field-sensitive pointer analysis for C" by "David
61 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
62 http://citeseer.ist.psu.edu/pearce04efficient.html
64 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
65 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
66 http://citeseer.ist.psu.edu/heintze01ultrafast.html
68 There are three types of real constraint expressions, DEREF,
69 ADDRESSOF, and SCALAR. Each constraint expression consists
70 of a constraint type, a variable, and an offset.
72 SCALAR is a constraint expression type used to represent x, whether
73 it appears on the LHS or the RHS of a statement.
74 DEREF is a constraint expression type used to represent *x, whether
75 it appears on the LHS or the RHS of a statement.
76 ADDRESSOF is a constraint expression used to represent &x, whether
77 it appears on the LHS or the RHS of a statement.
79 Each pointer variable in the program is assigned an integer id, and
80 each field of a structure variable is assigned an integer id as well.
82 Structure variables are linked to their list of fields through a "next
83 field" in each variable that points to the next field in offset
85 Each variable for a structure field has
87 1. "size", that tells the size in bits of that field.
88 2. "fullsize, that tells the size in bits of the entire structure.
89 3. "offset", that tells the offset in bits from the beginning of the
90 structure to this field.
102 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
103 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
104 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
107 In order to solve the system of set constraints, the following is
110 1. Each constraint variable x has a solution set associated with it,
113 2. Constraints are separated into direct, copy, and complex.
114 Direct constraints are ADDRESSOF constraints that require no extra
115 processing, such as P = &Q
116 Copy constraints are those of the form P = Q.
117 Complex constraints are all the constraints involving dereferences
118 and offsets (including offsetted copies).
120 3. All direct constraints of the form P = &Q are processed, such
121 that Q is added to Sol(P)
123 4. All complex constraints for a given constraint variable are stored in a
124 linked list attached to that variable's node.
126 5. A directed graph is built out of the copy constraints. Each
127 constraint variable is a node in the graph, and an edge from
128 Q to P is added for each copy constraint of the form P = Q
130 6. The graph is then walked, and solution sets are
131 propagated along the copy edges, such that an edge from Q to P
132 causes Sol(P) <- Sol(P) union Sol(Q).
134 7. As we visit each node, all complex constraints associated with
135 that node are processed by adding appropriate copy edges to the graph, or the
136 appropriate variables to the solution set.
138 8. The process of walking the graph is iterated until no solution
141 Prior to walking the graph in steps 6 and 7, We perform static
142 cycle elimination on the constraint graph, as well
143 as off-line variable substitution.
145 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
146 on and turned into anything), but isn't. You can just see what offset
147 inside the pointed-to struct it's going to access.
149 TODO: Constant bounded arrays can be handled as if they were structs of the
150 same number of elements.
152 TODO: Modeling heap and incoming pointers becomes much better if we
153 add fields to them as we discover them, which we could do.
155 TODO: We could handle unions, but to be honest, it's probably not
156 worth the pain or slowdown. */
158 /* IPA-PTA optimizations possible.
160 When the indirect function called is ANYTHING we can add disambiguation
161 based on the function signatures (or simply the parameter count which
162 is the varinfo size). We also do not need to consider functions that
163 do not have their address taken.
165 The is_global_var bit which marks escape points is overly conservative
166 in IPA mode. Split it to is_escape_point and is_global_var - only
167 externally visible globals are escape points in IPA mode.
168 There is now is_ipa_escape_point but this is only used in a few
171 The way we introduce DECL_PT_UID to avoid fixing up all points-to
172 sets in the translation unit when we copy a DECL during inlining
173 pessimizes precision. The advantage is that the DECL_PT_UID keeps
174 compile-time and memory usage overhead low - the points-to sets
175 do not grow or get unshared as they would during a fixup phase.
176 An alternative solution is to delay IPA PTA until after all
177 inlining transformations have been applied.
179 The way we propagate clobber/use information isn't optimized.
180 It should use a new complex constraint that properly filters
181 out local variables of the callee (though that would make
182 the sets invalid after inlining). OTOH we might as well
183 admit defeat to WHOPR and simply do all the clobber/use analysis
184 and propagation after PTA finished but before we threw away
185 points-to information for memory variables. WHOPR and PTA
186 do not play along well anyway - the whole constraint solving
187 would need to be done in WPA phase and it will be very interesting
188 to apply the results to local SSA names during LTRANS phase.
190 We probably should compute a per-function unit-ESCAPE solution
191 propagating it simply like the clobber / uses solutions. The
192 solution can go alongside the non-IPA espaced solution and be
193 used to query which vars escape the unit through a function.
194 This is also required to make the escaped-HEAP trick work in IPA mode.
196 We never put function decls in points-to sets so we do not
197 keep the set of called functions for indirect calls.
199 And probably more. */
201 static bool use_field_sensitive
= true;
202 static int in_ipa_mode
= 0;
204 /* Used for predecessor bitmaps. */
205 static bitmap_obstack predbitmap_obstack
;
207 /* Used for points-to sets. */
208 static bitmap_obstack pta_obstack
;
210 /* Used for oldsolution members of variables. */
211 static bitmap_obstack oldpta_obstack
;
213 /* Used for per-solver-iteration bitmaps. */
214 static bitmap_obstack iteration_obstack
;
216 static unsigned int create_variable_info_for (tree
, const char *, bool);
217 typedef struct constraint_graph
*constraint_graph_t
;
218 static void unify_nodes (constraint_graph_t
, unsigned int, unsigned int, bool);
221 typedef struct constraint
*constraint_t
;
224 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
226 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
228 static struct constraint_stats
230 unsigned int total_vars
;
231 unsigned int nonpointer_vars
;
232 unsigned int unified_vars_static
;
233 unsigned int unified_vars_dynamic
;
234 unsigned int iterations
;
235 unsigned int num_edges
;
236 unsigned int num_implicit_edges
;
237 unsigned int points_to_sets_created
;
242 /* ID of this variable */
245 /* True if this is a variable created by the constraint analysis, such as
246 heap variables and constraints we had to break up. */
247 unsigned int is_artificial_var
: 1;
249 /* True if this is a special variable whose solution set should not be
251 unsigned int is_special_var
: 1;
253 /* True for variables whose size is not known or variable. */
254 unsigned int is_unknown_size_var
: 1;
256 /* True for (sub-)fields that represent a whole variable. */
257 unsigned int is_full_var
: 1;
259 /* True if this is a heap variable. */
260 unsigned int is_heap_var
: 1;
262 /* True if this is a register variable. */
263 unsigned int is_reg_var
: 1;
265 /* True if this field may contain pointers. */
266 unsigned int may_have_pointers
: 1;
268 /* True if this field has only restrict qualified pointers. */
269 unsigned int only_restrict_pointers
: 1;
271 /* True if this represents a heap var created for a restrict qualified
273 unsigned int is_restrict_var
: 1;
275 /* True if this represents a global variable. */
276 unsigned int is_global_var
: 1;
278 /* True if this represents a module escape point for IPA analysis. */
279 unsigned int is_ipa_escape_point
: 1;
281 /* True if this represents a IPA function info. */
282 unsigned int is_fn_info
: 1;
284 /* ??? Store somewhere better. */
287 /* The ID of the variable for the next field in this structure
288 or zero for the last field in this structure. */
291 /* The ID of the variable for the first field in this structure. */
294 /* Offset of this variable, in bits, from the base variable */
295 unsigned HOST_WIDE_INT offset
;
297 /* Size of the variable, in bits. */
298 unsigned HOST_WIDE_INT size
;
300 /* Full size of the base variable, in bits. */
301 unsigned HOST_WIDE_INT fullsize
;
303 /* In IPA mode the shadow UID in case the variable needs to be duplicated in
304 the final points-to solution because it reaches its containing
305 function recursively. Zero if none is needed. */
306 unsigned int shadow_var_uid
;
308 /* Name of this variable */
311 /* Tree that this variable is associated with. */
314 /* Points-to set for this variable. */
317 /* Old points-to set for this variable. */
320 typedef struct variable_info
*varinfo_t
;
322 static varinfo_t
first_vi_for_offset (varinfo_t
, unsigned HOST_WIDE_INT
);
323 static varinfo_t
first_or_preceding_vi_for_offset (varinfo_t
,
324 unsigned HOST_WIDE_INT
);
325 static varinfo_t
lookup_vi_for_tree (tree
);
326 static inline bool type_can_have_subvars (const_tree
);
327 static void make_param_constraints (varinfo_t
);
329 /* Pool of variable info structures. */
330 static object_allocator
<variable_info
> variable_info_pool
331 ("Variable info pool");
333 /* Map varinfo to final pt_solution. */
334 static hash_map
<varinfo_t
, pt_solution
*> *final_solutions
;
335 struct obstack final_solutions_obstack
;
337 /* Table of variable info structures for constraint variables.
338 Indexed directly by variable info id. */
339 static vec
<varinfo_t
> varmap
;
341 /* Return the varmap element N */
343 static inline varinfo_t
344 get_varinfo (unsigned int n
)
349 /* Return the next variable in the list of sub-variables of VI
350 or NULL if VI is the last sub-variable. */
352 static inline varinfo_t
353 vi_next (varinfo_t vi
)
355 return get_varinfo (vi
->next
);
358 /* Static IDs for the special variables. Variable ID zero is unused
359 and used as terminator for the sub-variable chain. */
360 enum { nothing_id
= 1, anything_id
= 2, string_id
= 3,
361 escaped_id
= 4, nonlocal_id
= 5,
362 storedanything_id
= 6, integer_id
= 7 };
364 /* Return a new variable info structure consisting for a variable
365 named NAME, and using constraint graph node NODE. Append it
366 to the vector of variable info structures. */
369 new_var_info (tree t
, const char *name
, bool add_id
)
371 unsigned index
= varmap
.length ();
372 varinfo_t ret
= variable_info_pool
.allocate ();
374 if (dump_file
&& add_id
)
376 char *tempname
= xasprintf ("%s(%d)", name
, index
);
377 name
= ggc_strdup (tempname
);
384 /* Vars without decl are artificial and do not have sub-variables. */
385 ret
->is_artificial_var
= (t
== NULL_TREE
);
386 ret
->is_special_var
= false;
387 ret
->is_unknown_size_var
= false;
388 ret
->is_full_var
= (t
== NULL_TREE
);
389 ret
->is_heap_var
= false;
390 ret
->may_have_pointers
= true;
391 ret
->only_restrict_pointers
= false;
392 ret
->is_restrict_var
= false;
394 ret
->is_global_var
= (t
== NULL_TREE
);
395 ret
->is_ipa_escape_point
= false;
396 ret
->is_fn_info
= false;
398 ret
->is_global_var
= (is_global_var (t
)
399 /* We have to treat even local register variables
401 || (VAR_P (t
) && DECL_HARD_REGISTER (t
)));
402 ret
->is_reg_var
= (t
&& TREE_CODE (t
) == SSA_NAME
);
403 ret
->solution
= BITMAP_ALLOC (&pta_obstack
);
404 ret
->oldsolution
= NULL
;
406 ret
->shadow_var_uid
= 0;
411 varmap
.safe_push (ret
);
416 /* A map mapping call statements to per-stmt variables for uses
417 and clobbers specific to the call. */
418 static hash_map
<gimple
*, varinfo_t
> *call_stmt_vars
;
420 /* Lookup or create the variable for the call statement CALL. */
423 get_call_vi (gcall
*call
)
428 varinfo_t
*slot_p
= &call_stmt_vars
->get_or_insert (call
, &existed
);
432 vi
= new_var_info (NULL_TREE
, "CALLUSED", true);
436 vi
->is_full_var
= true;
437 vi
->is_reg_var
= true;
439 vi2
= new_var_info (NULL_TREE
, "CALLCLOBBERED", true);
443 vi2
->is_full_var
= true;
444 vi2
->is_reg_var
= true;
452 /* Lookup the variable for the call statement CALL representing
453 the uses. Returns NULL if there is nothing special about this call. */
456 lookup_call_use_vi (gcall
*call
)
458 varinfo_t
*slot_p
= call_stmt_vars
->get (call
);
465 /* Lookup the variable for the call statement CALL representing
466 the clobbers. Returns NULL if there is nothing special about this call. */
469 lookup_call_clobber_vi (gcall
*call
)
471 varinfo_t uses
= lookup_call_use_vi (call
);
475 return vi_next (uses
);
478 /* Lookup or create the variable for the call statement CALL representing
482 get_call_use_vi (gcall
*call
)
484 return get_call_vi (call
);
487 /* Lookup or create the variable for the call statement CALL representing
490 static varinfo_t ATTRIBUTE_UNUSED
491 get_call_clobber_vi (gcall
*call
)
493 return vi_next (get_call_vi (call
));
497 enum constraint_expr_type
{SCALAR
, DEREF
, ADDRESSOF
};
499 /* An expression that appears in a constraint. */
501 struct constraint_expr
503 /* Constraint type. */
504 constraint_expr_type type
;
506 /* Variable we are referring to in the constraint. */
509 /* Offset, in bits, of this constraint from the beginning of
510 variables it ends up referring to.
512 IOW, in a deref constraint, we would deref, get the result set,
513 then add OFFSET to each member. */
514 HOST_WIDE_INT offset
;
517 /* Use 0x8000... as special unknown offset. */
518 #define UNKNOWN_OFFSET HOST_WIDE_INT_MIN
520 typedef struct constraint_expr ce_s
;
521 static void get_constraint_for_1 (tree
, vec
<ce_s
> *, bool, bool);
522 static void get_constraint_for (tree
, vec
<ce_s
> *);
523 static void get_constraint_for_rhs (tree
, vec
<ce_s
> *);
524 static void do_deref (vec
<ce_s
> *);
526 /* Our set constraints are made up of two constraint expressions, one
529 As described in the introduction, our set constraints each represent an
530 operation between set valued variables.
534 struct constraint_expr lhs
;
535 struct constraint_expr rhs
;
538 /* List of constraints that we use to build the constraint graph from. */
540 static vec
<constraint_t
> constraints
;
541 static object_allocator
<constraint
> constraint_pool ("Constraint pool");
543 /* The constraint graph is represented as an array of bitmaps
544 containing successor nodes. */
546 struct constraint_graph
548 /* Size of this graph, which may be different than the number of
549 nodes in the variable map. */
552 /* Explicit successors of each node. */
555 /* Implicit predecessors of each node (Used for variable
557 bitmap
*implicit_preds
;
559 /* Explicit predecessors of each node (Used for variable substitution). */
562 /* Indirect cycle representatives, or -1 if the node has no indirect
564 int *indirect_cycles
;
566 /* Representative node for a node. rep[a] == a unless the node has
570 /* Equivalence class representative for a label. This is used for
571 variable substitution. */
574 /* Pointer equivalence label for a node. All nodes with the same
575 pointer equivalence label can be unified together at some point
576 (either during constraint optimization or after the constraint
580 /* Pointer equivalence representative for a label. This is used to
581 handle nodes that are pointer equivalent but not location
582 equivalent. We can unite these once the addressof constraints
583 are transformed into initial points-to sets. */
586 /* Pointer equivalence label for each node, used during variable
588 unsigned int *pointer_label
;
590 /* Location equivalence label for each node, used during location
591 equivalence finding. */
592 unsigned int *loc_label
;
594 /* Pointed-by set for each node, used during location equivalence
595 finding. This is pointed-by rather than pointed-to, because it
596 is constructed using the predecessor graph. */
599 /* Points to sets for pointer equivalence. This is *not* the actual
600 points-to sets for nodes. */
603 /* Bitmap of nodes where the bit is set if the node is a direct
604 node. Used for variable substitution. */
605 sbitmap direct_nodes
;
607 /* Bitmap of nodes where the bit is set if the node is address
608 taken. Used for variable substitution. */
609 bitmap address_taken
;
611 /* Vector of complex constraints for each graph node. Complex
612 constraints are those involving dereferences or offsets that are
614 vec
<constraint_t
> *complex;
617 static constraint_graph_t graph
;
619 /* During variable substitution and the offline version of indirect
620 cycle finding, we create nodes to represent dereferences and
621 address taken constraints. These represent where these start and
623 #define FIRST_REF_NODE (varmap).length ()
624 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
626 /* Return the representative node for NODE, if NODE has been unioned
628 This function performs path compression along the way to finding
629 the representative. */
632 find (unsigned int node
)
634 gcc_checking_assert (node
< graph
->size
);
635 if (graph
->rep
[node
] != node
)
636 return graph
->rep
[node
] = find (graph
->rep
[node
]);
640 /* Union the TO and FROM nodes to the TO nodes.
641 Note that at some point in the future, we may want to do
642 union-by-rank, in which case we are going to have to return the
643 node we unified to. */
646 unite (unsigned int to
, unsigned int from
)
648 gcc_checking_assert (to
< graph
->size
&& from
< graph
->size
);
649 if (to
!= from
&& graph
->rep
[from
] != to
)
651 graph
->rep
[from
] = to
;
657 /* Create a new constraint consisting of LHS and RHS expressions. */
660 new_constraint (const struct constraint_expr lhs
,
661 const struct constraint_expr rhs
)
663 constraint_t ret
= constraint_pool
.allocate ();
669 /* Print out constraint C to FILE. */
672 dump_constraint (FILE *file
, constraint_t c
)
674 if (c
->lhs
.type
== ADDRESSOF
)
676 else if (c
->lhs
.type
== DEREF
)
678 fprintf (file
, "%s", get_varinfo (c
->lhs
.var
)->name
);
679 if (c
->lhs
.offset
== UNKNOWN_OFFSET
)
680 fprintf (file
, " + UNKNOWN");
681 else if (c
->lhs
.offset
!= 0)
682 fprintf (file
, " + " HOST_WIDE_INT_PRINT_DEC
, c
->lhs
.offset
);
683 fprintf (file
, " = ");
684 if (c
->rhs
.type
== ADDRESSOF
)
686 else if (c
->rhs
.type
== DEREF
)
688 fprintf (file
, "%s", get_varinfo (c
->rhs
.var
)->name
);
689 if (c
->rhs
.offset
== UNKNOWN_OFFSET
)
690 fprintf (file
, " + UNKNOWN");
691 else if (c
->rhs
.offset
!= 0)
692 fprintf (file
, " + " HOST_WIDE_INT_PRINT_DEC
, c
->rhs
.offset
);
696 void debug_constraint (constraint_t
);
697 void debug_constraints (void);
698 void debug_constraint_graph (void);
699 void debug_solution_for_var (unsigned int);
700 void debug_sa_points_to_info (void);
701 void debug_varinfo (varinfo_t
);
702 void debug_varmap (void);
704 /* Print out constraint C to stderr. */
707 debug_constraint (constraint_t c
)
709 dump_constraint (stderr
, c
);
710 fprintf (stderr
, "\n");
713 /* Print out all constraints to FILE */
716 dump_constraints (FILE *file
, int from
)
720 for (i
= from
; constraints
.iterate (i
, &c
); i
++)
723 dump_constraint (file
, c
);
724 fprintf (file
, "\n");
728 /* Print out all constraints to stderr. */
731 debug_constraints (void)
733 dump_constraints (stderr
, 0);
736 /* Print the constraint graph in dot format. */
739 dump_constraint_graph (FILE *file
)
743 /* Only print the graph if it has already been initialized: */
747 /* Prints the header of the dot file: */
748 fprintf (file
, "strict digraph {\n");
749 fprintf (file
, " node [\n shape = box\n ]\n");
750 fprintf (file
, " edge [\n fontsize = \"12\"\n ]\n");
751 fprintf (file
, "\n // List of nodes and complex constraints in "
752 "the constraint graph:\n");
754 /* The next lines print the nodes in the graph together with the
755 complex constraints attached to them. */
756 for (i
= 1; i
< graph
->size
; i
++)
758 if (i
== FIRST_REF_NODE
)
762 if (i
< FIRST_REF_NODE
)
763 fprintf (file
, "\"%s\"", get_varinfo (i
)->name
);
765 fprintf (file
, "\"*%s\"", get_varinfo (i
- FIRST_REF_NODE
)->name
);
766 if (graph
->complex[i
].exists ())
770 fprintf (file
, " [label=\"\\N\\n");
771 for (j
= 0; graph
->complex[i
].iterate (j
, &c
); ++j
)
773 dump_constraint (file
, c
);
774 fprintf (file
, "\\l");
776 fprintf (file
, "\"]");
778 fprintf (file
, ";\n");
781 /* Go over the edges. */
782 fprintf (file
, "\n // Edges in the constraint graph:\n");
783 for (i
= 1; i
< graph
->size
; i
++)
789 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->succs
[i
], 0, j
, bi
)
791 unsigned to
= find (j
);
794 if (i
< FIRST_REF_NODE
)
795 fprintf (file
, "\"%s\"", get_varinfo (i
)->name
);
797 fprintf (file
, "\"*%s\"", get_varinfo (i
- FIRST_REF_NODE
)->name
);
798 fprintf (file
, " -> ");
799 if (to
< FIRST_REF_NODE
)
800 fprintf (file
, "\"%s\"", get_varinfo (to
)->name
);
802 fprintf (file
, "\"*%s\"", get_varinfo (to
- FIRST_REF_NODE
)->name
);
803 fprintf (file
, ";\n");
807 /* Prints the tail of the dot file. */
808 fprintf (file
, "}\n");
811 /* Print out the constraint graph to stderr. */
814 debug_constraint_graph (void)
816 dump_constraint_graph (stderr
);
821 The solver is a simple worklist solver, that works on the following
824 sbitmap changed_nodes = all zeroes;
826 For each node that is not already collapsed:
828 set bit in changed nodes
830 while (changed_count > 0)
832 compute topological ordering for constraint graph
834 find and collapse cycles in the constraint graph (updating
835 changed if necessary)
837 for each node (n) in the graph in topological order:
840 Process each complex constraint associated with the node,
841 updating changed if necessary.
843 For each outgoing edge from n, propagate the solution from n to
844 the destination of the edge, updating changed as necessary.
848 /* Return true if two constraint expressions A and B are equal. */
851 constraint_expr_equal (struct constraint_expr a
, struct constraint_expr b
)
853 return a
.type
== b
.type
&& a
.var
== b
.var
&& a
.offset
== b
.offset
;
856 /* Return true if constraint expression A is less than constraint expression
857 B. This is just arbitrary, but consistent, in order to give them an
861 constraint_expr_less (struct constraint_expr a
, struct constraint_expr b
)
863 if (a
.type
== b
.type
)
866 return a
.offset
< b
.offset
;
868 return a
.var
< b
.var
;
871 return a
.type
< b
.type
;
874 /* Return true if constraint A is less than constraint B. This is just
875 arbitrary, but consistent, in order to give them an ordering. */
878 constraint_less (const constraint_t
&a
, const constraint_t
&b
)
880 if (constraint_expr_less (a
->lhs
, b
->lhs
))
882 else if (constraint_expr_less (b
->lhs
, a
->lhs
))
885 return constraint_expr_less (a
->rhs
, b
->rhs
);
888 /* Return true if two constraints A and B are equal. */
891 constraint_equal (struct constraint a
, struct constraint b
)
893 return constraint_expr_equal (a
.lhs
, b
.lhs
)
894 && constraint_expr_equal (a
.rhs
, b
.rhs
);
898 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
901 constraint_vec_find (vec
<constraint_t
> vec
,
902 struct constraint lookfor
)
910 place
= vec
.lower_bound (&lookfor
, constraint_less
);
911 if (place
>= vec
.length ())
914 if (!constraint_equal (*found
, lookfor
))
919 /* Union two constraint vectors, TO and FROM. Put the result in TO.
920 Returns true of TO set is changed. */
923 constraint_set_union (vec
<constraint_t
> *to
,
924 vec
<constraint_t
> *from
)
928 bool any_change
= false;
930 FOR_EACH_VEC_ELT (*from
, i
, c
)
932 if (constraint_vec_find (*to
, *c
) == NULL
)
934 unsigned int place
= to
->lower_bound (c
, constraint_less
);
935 to
->safe_insert (place
, c
);
942 /* Expands the solution in SET to all sub-fields of variables included. */
945 solution_set_expand (bitmap set
, bitmap
*expanded
)
953 *expanded
= BITMAP_ALLOC (&iteration_obstack
);
955 /* In a first pass expand to the head of the variables we need to
956 add all sub-fields off. This avoids quadratic behavior. */
957 EXECUTE_IF_SET_IN_BITMAP (set
, 0, j
, bi
)
959 varinfo_t v
= get_varinfo (j
);
960 if (v
->is_artificial_var
963 bitmap_set_bit (*expanded
, v
->head
);
966 /* In the second pass now expand all head variables with subfields. */
967 EXECUTE_IF_SET_IN_BITMAP (*expanded
, 0, j
, bi
)
969 varinfo_t v
= get_varinfo (j
);
972 for (v
= vi_next (v
); v
!= NULL
; v
= vi_next (v
))
973 bitmap_set_bit (*expanded
, v
->id
);
976 /* And finally set the rest of the bits from SET. */
977 bitmap_ior_into (*expanded
, set
);
982 /* Union solution sets TO and DELTA, and add INC to each member of DELTA in the
986 set_union_with_increment (bitmap to
, bitmap delta
, HOST_WIDE_INT inc
,
987 bitmap
*expanded_delta
)
989 bool changed
= false;
993 /* If the solution of DELTA contains anything it is good enough to transfer
995 if (bitmap_bit_p (delta
, anything_id
))
996 return bitmap_set_bit (to
, anything_id
);
998 /* If the offset is unknown we have to expand the solution to
1000 if (inc
== UNKNOWN_OFFSET
)
1002 delta
= solution_set_expand (delta
, expanded_delta
);
1003 changed
|= bitmap_ior_into (to
, delta
);
1007 /* For non-zero offset union the offsetted solution into the destination. */
1008 EXECUTE_IF_SET_IN_BITMAP (delta
, 0, i
, bi
)
1010 varinfo_t vi
= get_varinfo (i
);
1012 /* If this is a variable with just one field just set its bit
1014 if (vi
->is_artificial_var
1015 || vi
->is_unknown_size_var
1017 changed
|= bitmap_set_bit (to
, i
);
1020 HOST_WIDE_INT fieldoffset
= vi
->offset
+ inc
;
1021 unsigned HOST_WIDE_INT size
= vi
->size
;
1023 /* If the offset makes the pointer point to before the
1024 variable use offset zero for the field lookup. */
1025 if (fieldoffset
< 0)
1026 vi
= get_varinfo (vi
->head
);
1028 vi
= first_or_preceding_vi_for_offset (vi
, fieldoffset
);
1032 changed
|= bitmap_set_bit (to
, vi
->id
);
1037 /* We have to include all fields that overlap the current field
1041 while (vi
->offset
< fieldoffset
+ size
);
1048 /* Insert constraint C into the list of complex constraints for graph
1052 insert_into_complex (constraint_graph_t graph
,
1053 unsigned int var
, constraint_t c
)
1055 vec
<constraint_t
> complex = graph
->complex[var
];
1056 unsigned int place
= complex.lower_bound (c
, constraint_less
);
1058 /* Only insert constraints that do not already exist. */
1059 if (place
>= complex.length ()
1060 || !constraint_equal (*c
, *complex[place
]))
1061 graph
->complex[var
].safe_insert (place
, c
);
1065 /* Condense two variable nodes into a single variable node, by moving
1066 all associated info from FROM to TO. Returns true if TO node's
1067 constraint set changes after the merge. */
1070 merge_node_constraints (constraint_graph_t graph
, unsigned int to
,
1075 bool any_change
= false;
1077 gcc_checking_assert (find (from
) == to
);
1079 /* Move all complex constraints from src node into to node */
1080 FOR_EACH_VEC_ELT (graph
->complex[from
], i
, c
)
1082 /* In complex constraints for node FROM, we may have either
1083 a = *FROM, and *FROM = a, or an offseted constraint which are
1084 always added to the rhs node's constraints. */
1086 if (c
->rhs
.type
== DEREF
)
1088 else if (c
->lhs
.type
== DEREF
)
1094 any_change
= constraint_set_union (&graph
->complex[to
],
1095 &graph
->complex[from
]);
1096 graph
->complex[from
].release ();
1101 /* Remove edges involving NODE from GRAPH. */
1104 clear_edges_for_node (constraint_graph_t graph
, unsigned int node
)
1106 if (graph
->succs
[node
])
1107 BITMAP_FREE (graph
->succs
[node
]);
1110 /* Merge GRAPH nodes FROM and TO into node TO. */
1113 merge_graph_nodes (constraint_graph_t graph
, unsigned int to
,
1116 if (graph
->indirect_cycles
[from
] != -1)
1118 /* If we have indirect cycles with the from node, and we have
1119 none on the to node, the to node has indirect cycles from the
1120 from node now that they are unified.
1121 If indirect cycles exist on both, unify the nodes that they
1122 are in a cycle with, since we know they are in a cycle with
1124 if (graph
->indirect_cycles
[to
] == -1)
1125 graph
->indirect_cycles
[to
] = graph
->indirect_cycles
[from
];
1128 /* Merge all the successor edges. */
1129 if (graph
->succs
[from
])
1131 if (!graph
->succs
[to
])
1132 graph
->succs
[to
] = BITMAP_ALLOC (&pta_obstack
);
1133 bitmap_ior_into (graph
->succs
[to
],
1134 graph
->succs
[from
]);
1137 clear_edges_for_node (graph
, from
);
1141 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
1142 it doesn't exist in the graph already. */
1145 add_implicit_graph_edge (constraint_graph_t graph
, unsigned int to
,
1151 if (!graph
->implicit_preds
[to
])
1152 graph
->implicit_preds
[to
] = BITMAP_ALLOC (&predbitmap_obstack
);
1154 if (bitmap_set_bit (graph
->implicit_preds
[to
], from
))
1155 stats
.num_implicit_edges
++;
1158 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
1159 it doesn't exist in the graph already.
1160 Return false if the edge already existed, true otherwise. */
1163 add_pred_graph_edge (constraint_graph_t graph
, unsigned int to
,
1166 if (!graph
->preds
[to
])
1167 graph
->preds
[to
] = BITMAP_ALLOC (&predbitmap_obstack
);
1168 bitmap_set_bit (graph
->preds
[to
], from
);
1171 /* Add a graph edge to GRAPH, going from FROM to TO if
1172 it doesn't exist in the graph already.
1173 Return false if the edge already existed, true otherwise. */
1176 add_graph_edge (constraint_graph_t graph
, unsigned int to
,
1187 if (!graph
->succs
[from
])
1188 graph
->succs
[from
] = BITMAP_ALLOC (&pta_obstack
);
1189 if (bitmap_set_bit (graph
->succs
[from
], to
))
1192 if (to
< FIRST_REF_NODE
&& from
< FIRST_REF_NODE
)
1200 /* Initialize the constraint graph structure to contain SIZE nodes. */
1203 init_graph (unsigned int size
)
1207 graph
= XCNEW (struct constraint_graph
);
1209 graph
->succs
= XCNEWVEC (bitmap
, graph
->size
);
1210 graph
->indirect_cycles
= XNEWVEC (int, graph
->size
);
1211 graph
->rep
= XNEWVEC (unsigned int, graph
->size
);
1212 /* ??? Macros do not support template types with multiple arguments,
1213 so we use a typedef to work around it. */
1214 typedef vec
<constraint_t
> vec_constraint_t_heap
;
1215 graph
->complex = XCNEWVEC (vec_constraint_t_heap
, size
);
1216 graph
->pe
= XCNEWVEC (unsigned int, graph
->size
);
1217 graph
->pe_rep
= XNEWVEC (int, graph
->size
);
1219 for (j
= 0; j
< graph
->size
; j
++)
1222 graph
->pe_rep
[j
] = -1;
1223 graph
->indirect_cycles
[j
] = -1;
1227 /* Build the constraint graph, adding only predecessor edges right now. */
1230 build_pred_graph (void)
1236 graph
->implicit_preds
= XCNEWVEC (bitmap
, graph
->size
);
1237 graph
->preds
= XCNEWVEC (bitmap
, graph
->size
);
1238 graph
->pointer_label
= XCNEWVEC (unsigned int, graph
->size
);
1239 graph
->loc_label
= XCNEWVEC (unsigned int, graph
->size
);
1240 graph
->pointed_by
= XCNEWVEC (bitmap
, graph
->size
);
1241 graph
->points_to
= XCNEWVEC (bitmap
, graph
->size
);
1242 graph
->eq_rep
= XNEWVEC (int, graph
->size
);
1243 graph
->direct_nodes
= sbitmap_alloc (graph
->size
);
1244 graph
->address_taken
= BITMAP_ALLOC (&predbitmap_obstack
);
1245 bitmap_clear (graph
->direct_nodes
);
1247 for (j
= 1; j
< FIRST_REF_NODE
; j
++)
1249 if (!get_varinfo (j
)->is_special_var
)
1250 bitmap_set_bit (graph
->direct_nodes
, j
);
1253 for (j
= 0; j
< graph
->size
; j
++)
1254 graph
->eq_rep
[j
] = -1;
1256 for (j
= 0; j
< varmap
.length (); j
++)
1257 graph
->indirect_cycles
[j
] = -1;
1259 FOR_EACH_VEC_ELT (constraints
, i
, c
)
1261 struct constraint_expr lhs
= c
->lhs
;
1262 struct constraint_expr rhs
= c
->rhs
;
1263 unsigned int lhsvar
= lhs
.var
;
1264 unsigned int rhsvar
= rhs
.var
;
1266 if (lhs
.type
== DEREF
)
1269 if (rhs
.offset
== 0 && lhs
.offset
== 0 && rhs
.type
== SCALAR
)
1270 add_pred_graph_edge (graph
, FIRST_REF_NODE
+ lhsvar
, rhsvar
);
1272 else if (rhs
.type
== DEREF
)
1275 if (rhs
.offset
== 0 && lhs
.offset
== 0 && lhs
.type
== SCALAR
)
1276 add_pred_graph_edge (graph
, lhsvar
, FIRST_REF_NODE
+ rhsvar
);
1278 bitmap_clear_bit (graph
->direct_nodes
, lhsvar
);
1280 else if (rhs
.type
== ADDRESSOF
)
1285 if (graph
->points_to
[lhsvar
] == NULL
)
1286 graph
->points_to
[lhsvar
] = BITMAP_ALLOC (&predbitmap_obstack
);
1287 bitmap_set_bit (graph
->points_to
[lhsvar
], rhsvar
);
1289 if (graph
->pointed_by
[rhsvar
] == NULL
)
1290 graph
->pointed_by
[rhsvar
] = BITMAP_ALLOC (&predbitmap_obstack
);
1291 bitmap_set_bit (graph
->pointed_by
[rhsvar
], lhsvar
);
1293 /* Implicitly, *x = y */
1294 add_implicit_graph_edge (graph
, FIRST_REF_NODE
+ lhsvar
, rhsvar
);
1296 /* All related variables are no longer direct nodes. */
1297 bitmap_clear_bit (graph
->direct_nodes
, rhsvar
);
1298 v
= get_varinfo (rhsvar
);
1299 if (!v
->is_full_var
)
1301 v
= get_varinfo (v
->head
);
1304 bitmap_clear_bit (graph
->direct_nodes
, v
->id
);
1309 bitmap_set_bit (graph
->address_taken
, rhsvar
);
1311 else if (lhsvar
> anything_id
1312 && lhsvar
!= rhsvar
&& lhs
.offset
== 0 && rhs
.offset
== 0)
1315 add_pred_graph_edge (graph
, lhsvar
, rhsvar
);
1316 /* Implicitly, *x = *y */
1317 add_implicit_graph_edge (graph
, FIRST_REF_NODE
+ lhsvar
,
1318 FIRST_REF_NODE
+ rhsvar
);
1320 else if (lhs
.offset
!= 0 || rhs
.offset
!= 0)
1322 if (rhs
.offset
!= 0)
1323 bitmap_clear_bit (graph
->direct_nodes
, lhs
.var
);
1324 else if (lhs
.offset
!= 0)
1325 bitmap_clear_bit (graph
->direct_nodes
, rhs
.var
);
1330 /* Build the constraint graph, adding successor edges. */
1333 build_succ_graph (void)
1338 FOR_EACH_VEC_ELT (constraints
, i
, c
)
1340 struct constraint_expr lhs
;
1341 struct constraint_expr rhs
;
1342 unsigned int lhsvar
;
1343 unsigned int rhsvar
;
1350 lhsvar
= find (lhs
.var
);
1351 rhsvar
= find (rhs
.var
);
1353 if (lhs
.type
== DEREF
)
1355 if (rhs
.offset
== 0 && lhs
.offset
== 0 && rhs
.type
== SCALAR
)
1356 add_graph_edge (graph
, FIRST_REF_NODE
+ lhsvar
, rhsvar
);
1358 else if (rhs
.type
== DEREF
)
1360 if (rhs
.offset
== 0 && lhs
.offset
== 0 && lhs
.type
== SCALAR
)
1361 add_graph_edge (graph
, lhsvar
, FIRST_REF_NODE
+ rhsvar
);
1363 else if (rhs
.type
== ADDRESSOF
)
1366 gcc_checking_assert (find (rhs
.var
) == rhs
.var
);
1367 bitmap_set_bit (get_varinfo (lhsvar
)->solution
, rhsvar
);
1369 else if (lhsvar
> anything_id
1370 && lhsvar
!= rhsvar
&& lhs
.offset
== 0 && rhs
.offset
== 0)
1372 add_graph_edge (graph
, lhsvar
, rhsvar
);
1376 /* Add edges from STOREDANYTHING to all non-direct nodes that can
1377 receive pointers. */
1378 t
= find (storedanything_id
);
1379 for (i
= integer_id
+ 1; i
< FIRST_REF_NODE
; ++i
)
1381 if (!bitmap_bit_p (graph
->direct_nodes
, i
)
1382 && get_varinfo (i
)->may_have_pointers
)
1383 add_graph_edge (graph
, find (i
), t
);
1386 /* Everything stored to ANYTHING also potentially escapes. */
1387 add_graph_edge (graph
, find (escaped_id
), t
);
1391 /* Changed variables on the last iteration. */
1392 static bitmap changed
;
1394 /* Strongly Connected Component visitation info. */
1399 scc_info (size_t size
);
1402 auto_sbitmap visited
;
1403 auto_sbitmap deleted
;
1405 unsigned int *node_mapping
;
1407 auto_vec
<unsigned> scc_stack
;
1411 /* Recursive routine to find strongly connected components in GRAPH.
1412 SI is the SCC info to store the information in, and N is the id of current
1413 graph node we are processing.
1415 This is Tarjan's strongly connected component finding algorithm, as
1416 modified by Nuutila to keep only non-root nodes on the stack.
1417 The algorithm can be found in "On finding the strongly connected
1418 connected components in a directed graph" by Esko Nuutila and Eljas
1419 Soisalon-Soininen, in Information Processing Letters volume 49,
1420 number 1, pages 9-14. */
1423 scc_visit (constraint_graph_t graph
, class scc_info
*si
, unsigned int n
)
1427 unsigned int my_dfs
;
1429 bitmap_set_bit (si
->visited
, n
);
1430 si
->dfs
[n
] = si
->current_index
++;
1431 my_dfs
= si
->dfs
[n
];
1433 /* Visit all the successors. */
1434 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->succs
[n
], 0, i
, bi
)
1438 if (i
> LAST_REF_NODE
)
1442 if (bitmap_bit_p (si
->deleted
, w
))
1445 if (!bitmap_bit_p (si
->visited
, w
))
1446 scc_visit (graph
, si
, w
);
1448 unsigned int t
= find (w
);
1449 gcc_checking_assert (find (n
) == n
);
1450 if (si
->dfs
[t
] < si
->dfs
[n
])
1451 si
->dfs
[n
] = si
->dfs
[t
];
1454 /* See if any components have been identified. */
1455 if (si
->dfs
[n
] == my_dfs
)
1457 if (si
->scc_stack
.length () > 0
1458 && si
->dfs
[si
->scc_stack
.last ()] >= my_dfs
)
1460 bitmap scc
= BITMAP_ALLOC (NULL
);
1461 unsigned int lowest_node
;
1464 bitmap_set_bit (scc
, n
);
1466 while (si
->scc_stack
.length () != 0
1467 && si
->dfs
[si
->scc_stack
.last ()] >= my_dfs
)
1469 unsigned int w
= si
->scc_stack
.pop ();
1471 bitmap_set_bit (scc
, w
);
1474 lowest_node
= bitmap_first_set_bit (scc
);
1475 gcc_assert (lowest_node
< FIRST_REF_NODE
);
1477 /* Collapse the SCC nodes into a single node, and mark the
1479 EXECUTE_IF_SET_IN_BITMAP (scc
, 0, i
, bi
)
1481 if (i
< FIRST_REF_NODE
)
1483 if (unite (lowest_node
, i
))
1484 unify_nodes (graph
, lowest_node
, i
, false);
1488 unite (lowest_node
, i
);
1489 graph
->indirect_cycles
[i
- FIRST_REF_NODE
] = lowest_node
;
1493 bitmap_set_bit (si
->deleted
, n
);
1496 si
->scc_stack
.safe_push (n
);
1499 /* Unify node FROM into node TO, updating the changed count if
1500 necessary when UPDATE_CHANGED is true. */
1503 unify_nodes (constraint_graph_t graph
, unsigned int to
, unsigned int from
,
1504 bool update_changed
)
1506 gcc_checking_assert (to
!= from
&& find (to
) == to
);
1508 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1509 fprintf (dump_file
, "Unifying %s to %s\n",
1510 get_varinfo (from
)->name
,
1511 get_varinfo (to
)->name
);
1514 stats
.unified_vars_dynamic
++;
1516 stats
.unified_vars_static
++;
1518 merge_graph_nodes (graph
, to
, from
);
1519 if (merge_node_constraints (graph
, to
, from
))
1522 bitmap_set_bit (changed
, to
);
1525 /* Mark TO as changed if FROM was changed. If TO was already marked
1526 as changed, decrease the changed count. */
1529 && bitmap_clear_bit (changed
, from
))
1530 bitmap_set_bit (changed
, to
);
1531 varinfo_t fromvi
= get_varinfo (from
);
1532 if (fromvi
->solution
)
1534 /* If the solution changes because of the merging, we need to mark
1535 the variable as changed. */
1536 varinfo_t tovi
= get_varinfo (to
);
1537 if (bitmap_ior_into (tovi
->solution
, fromvi
->solution
))
1540 bitmap_set_bit (changed
, to
);
1543 BITMAP_FREE (fromvi
->solution
);
1544 if (fromvi
->oldsolution
)
1545 BITMAP_FREE (fromvi
->oldsolution
);
1547 if (stats
.iterations
> 0
1548 && tovi
->oldsolution
)
1549 BITMAP_FREE (tovi
->oldsolution
);
1551 if (graph
->succs
[to
])
1552 bitmap_clear_bit (graph
->succs
[to
], to
);
1555 /* Information needed to compute the topological ordering of a graph. */
1559 /* sbitmap of visited nodes. */
1561 /* Array that stores the topological order of the graph, *in
1563 vec
<unsigned> topo_order
;
1567 /* Initialize and return a topological info structure. */
1569 static struct topo_info
*
1570 init_topo_info (void)
1572 size_t size
= graph
->size
;
1573 struct topo_info
*ti
= XNEW (struct topo_info
);
1574 ti
->visited
= sbitmap_alloc (size
);
1575 bitmap_clear (ti
->visited
);
1576 ti
->topo_order
.create (1);
1581 /* Free the topological sort info pointed to by TI. */
1584 free_topo_info (struct topo_info
*ti
)
1586 sbitmap_free (ti
->visited
);
1587 ti
->topo_order
.release ();
1591 /* Visit the graph in topological order, and store the order in the
1592 topo_info structure. */
1595 topo_visit (constraint_graph_t graph
, struct topo_info
*ti
,
1601 bitmap_set_bit (ti
->visited
, n
);
1603 if (graph
->succs
[n
])
1604 EXECUTE_IF_SET_IN_BITMAP (graph
->succs
[n
], 0, j
, bi
)
1606 if (!bitmap_bit_p (ti
->visited
, j
))
1607 topo_visit (graph
, ti
, j
);
1610 ti
->topo_order
.safe_push (n
);
1613 /* Process a constraint C that represents x = *(y + off), using DELTA as the
1614 starting solution for y. */
1617 do_sd_constraint (constraint_graph_t graph
, constraint_t c
,
1618 bitmap delta
, bitmap
*expanded_delta
)
1620 unsigned int lhs
= c
->lhs
.var
;
1622 bitmap sol
= get_varinfo (lhs
)->solution
;
1625 HOST_WIDE_INT roffset
= c
->rhs
.offset
;
1627 /* Our IL does not allow this. */
1628 gcc_checking_assert (c
->lhs
.offset
== 0);
1630 /* If the solution of Y contains anything it is good enough to transfer
1632 if (bitmap_bit_p (delta
, anything_id
))
1634 flag
|= bitmap_set_bit (sol
, anything_id
);
1638 /* If we do not know at with offset the rhs is dereferenced compute
1639 the reachability set of DELTA, conservatively assuming it is
1640 dereferenced at all valid offsets. */
1641 if (roffset
== UNKNOWN_OFFSET
)
1643 delta
= solution_set_expand (delta
, expanded_delta
);
1644 /* No further offset processing is necessary. */
1648 /* For each variable j in delta (Sol(y)), add
1649 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1650 EXECUTE_IF_SET_IN_BITMAP (delta
, 0, j
, bi
)
1652 varinfo_t v
= get_varinfo (j
);
1653 HOST_WIDE_INT fieldoffset
= v
->offset
+ roffset
;
1654 unsigned HOST_WIDE_INT size
= v
->size
;
1659 else if (roffset
!= 0)
1661 if (fieldoffset
< 0)
1662 v
= get_varinfo (v
->head
);
1664 v
= first_or_preceding_vi_for_offset (v
, fieldoffset
);
1667 /* We have to include all fields that overlap the current field
1668 shifted by roffset. */
1673 /* Adding edges from the special vars is pointless.
1674 They don't have sets that can change. */
1675 if (get_varinfo (t
)->is_special_var
)
1676 flag
|= bitmap_ior_into (sol
, get_varinfo (t
)->solution
);
1677 /* Merging the solution from ESCAPED needlessly increases
1678 the set. Use ESCAPED as representative instead. */
1679 else if (v
->id
== escaped_id
)
1680 flag
|= bitmap_set_bit (sol
, escaped_id
);
1681 else if (v
->may_have_pointers
1682 && add_graph_edge (graph
, lhs
, t
))
1683 flag
|= bitmap_ior_into (sol
, get_varinfo (t
)->solution
);
1691 while (v
->offset
< fieldoffset
+ size
);
1695 /* If the LHS solution changed, mark the var as changed. */
1698 get_varinfo (lhs
)->solution
= sol
;
1699 bitmap_set_bit (changed
, lhs
);
1703 /* Process a constraint C that represents *(x + off) = y using DELTA
1704 as the starting solution for x. */
1707 do_ds_constraint (constraint_t c
, bitmap delta
, bitmap
*expanded_delta
)
1709 unsigned int rhs
= c
->rhs
.var
;
1710 bitmap sol
= get_varinfo (rhs
)->solution
;
1713 HOST_WIDE_INT loff
= c
->lhs
.offset
;
1714 bool escaped_p
= false;
1716 /* Our IL does not allow this. */
1717 gcc_checking_assert (c
->rhs
.offset
== 0);
1719 /* If the solution of y contains ANYTHING simply use the ANYTHING
1720 solution. This avoids needlessly increasing the points-to sets. */
1721 if (bitmap_bit_p (sol
, anything_id
))
1722 sol
= get_varinfo (find (anything_id
))->solution
;
1724 /* If the solution for x contains ANYTHING we have to merge the
1725 solution of y into all pointer variables which we do via
1727 if (bitmap_bit_p (delta
, anything_id
))
1729 unsigned t
= find (storedanything_id
);
1730 if (add_graph_edge (graph
, t
, rhs
))
1732 if (bitmap_ior_into (get_varinfo (t
)->solution
, sol
))
1733 bitmap_set_bit (changed
, t
);
1738 /* If we do not know at with offset the rhs is dereferenced compute
1739 the reachability set of DELTA, conservatively assuming it is
1740 dereferenced at all valid offsets. */
1741 if (loff
== UNKNOWN_OFFSET
)
1743 delta
= solution_set_expand (delta
, expanded_delta
);
1747 /* For each member j of delta (Sol(x)), add an edge from y to j and
1748 union Sol(y) into Sol(j) */
1749 EXECUTE_IF_SET_IN_BITMAP (delta
, 0, j
, bi
)
1751 varinfo_t v
= get_varinfo (j
);
1753 HOST_WIDE_INT fieldoffset
= v
->offset
+ loff
;
1754 unsigned HOST_WIDE_INT size
= v
->size
;
1760 if (fieldoffset
< 0)
1761 v
= get_varinfo (v
->head
);
1763 v
= first_or_preceding_vi_for_offset (v
, fieldoffset
);
1766 /* We have to include all fields that overlap the current field
1770 if (v
->may_have_pointers
)
1772 /* If v is a global variable then this is an escape point. */
1773 if (v
->is_global_var
1776 t
= find (escaped_id
);
1777 if (add_graph_edge (graph
, t
, rhs
)
1778 && bitmap_ior_into (get_varinfo (t
)->solution
, sol
))
1779 bitmap_set_bit (changed
, t
);
1780 /* Enough to let rhs escape once. */
1784 if (v
->is_special_var
)
1788 if (add_graph_edge (graph
, t
, rhs
)
1789 && bitmap_ior_into (get_varinfo (t
)->solution
, sol
))
1790 bitmap_set_bit (changed
, t
);
1799 while (v
->offset
< fieldoffset
+ size
);
1803 /* Handle a non-simple (simple meaning requires no iteration),
1804 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1807 do_complex_constraint (constraint_graph_t graph
, constraint_t c
, bitmap delta
,
1808 bitmap
*expanded_delta
)
1810 if (c
->lhs
.type
== DEREF
)
1812 if (c
->rhs
.type
== ADDRESSOF
)
1819 do_ds_constraint (c
, delta
, expanded_delta
);
1822 else if (c
->rhs
.type
== DEREF
)
1825 if (!(get_varinfo (c
->lhs
.var
)->is_special_var
))
1826 do_sd_constraint (graph
, c
, delta
, expanded_delta
);
1833 gcc_checking_assert (c
->rhs
.type
== SCALAR
&& c
->lhs
.type
== SCALAR
1834 && c
->rhs
.offset
!= 0 && c
->lhs
.offset
== 0);
1835 tmp
= get_varinfo (c
->lhs
.var
)->solution
;
1837 flag
= set_union_with_increment (tmp
, delta
, c
->rhs
.offset
,
1841 bitmap_set_bit (changed
, c
->lhs
.var
);
1845 /* Initialize and return a new SCC info structure. */
1847 scc_info::scc_info (size_t size
) :
1848 visited (size
), deleted (size
), current_index (0), scc_stack (1)
1850 bitmap_clear (visited
);
1851 bitmap_clear (deleted
);
1852 node_mapping
= XNEWVEC (unsigned int, size
);
1853 dfs
= XCNEWVEC (unsigned int, size
);
1855 for (size_t i
= 0; i
< size
; i
++)
1856 node_mapping
[i
] = i
;
1859 /* Free an SCC info structure pointed to by SI */
1861 scc_info::~scc_info ()
1863 free (node_mapping
);
1868 /* Find indirect cycles in GRAPH that occur, using strongly connected
1869 components, and note them in the indirect cycles map.
1871 This technique comes from Ben Hardekopf and Calvin Lin,
1872 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1873 Lines of Code", submitted to PLDI 2007. */
1876 find_indirect_cycles (constraint_graph_t graph
)
1879 unsigned int size
= graph
->size
;
1882 for (i
= 0; i
< MIN (LAST_REF_NODE
, size
); i
++ )
1883 if (!bitmap_bit_p (si
.visited
, i
) && find (i
) == i
)
1884 scc_visit (graph
, &si
, i
);
1887 /* Compute a topological ordering for GRAPH, and store the result in the
1888 topo_info structure TI. */
1891 compute_topo_order (constraint_graph_t graph
,
1892 struct topo_info
*ti
)
1895 unsigned int size
= graph
->size
;
1897 for (i
= 0; i
!= size
; ++i
)
1898 if (!bitmap_bit_p (ti
->visited
, i
) && find (i
) == i
)
1899 topo_visit (graph
, ti
, i
);
1902 /* Structure used to for hash value numbering of pointer equivalence
1905 typedef struct equiv_class_label
1908 unsigned int equivalence_class
;
1910 } *equiv_class_label_t
;
1911 typedef const struct equiv_class_label
*const_equiv_class_label_t
;
1913 /* Equiv_class_label hashtable helpers. */
1915 struct equiv_class_hasher
: free_ptr_hash
<equiv_class_label
>
1917 static inline hashval_t
hash (const equiv_class_label
*);
1918 static inline bool equal (const equiv_class_label
*,
1919 const equiv_class_label
*);
1922 /* Hash function for a equiv_class_label_t */
1925 equiv_class_hasher::hash (const equiv_class_label
*ecl
)
1927 return ecl
->hashcode
;
1930 /* Equality function for two equiv_class_label_t's. */
1933 equiv_class_hasher::equal (const equiv_class_label
*eql1
,
1934 const equiv_class_label
*eql2
)
1936 return (eql1
->hashcode
== eql2
->hashcode
1937 && bitmap_equal_p (eql1
->labels
, eql2
->labels
));
1940 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1942 static hash_table
<equiv_class_hasher
> *pointer_equiv_class_table
;
1944 /* A hashtable for mapping a bitmap of labels->location equivalence
1946 static hash_table
<equiv_class_hasher
> *location_equiv_class_table
;
1948 /* Lookup a equivalence class in TABLE by the bitmap of LABELS with
1949 hash HAS it contains. Sets *REF_LABELS to the bitmap LABELS
1950 is equivalent to. */
1952 static equiv_class_label
*
1953 equiv_class_lookup_or_add (hash_table
<equiv_class_hasher
> *table
,
1956 equiv_class_label
**slot
;
1957 equiv_class_label ecl
;
1959 ecl
.labels
= labels
;
1960 ecl
.hashcode
= bitmap_hash (labels
);
1961 slot
= table
->find_slot (&ecl
, INSERT
);
1964 *slot
= XNEW (struct equiv_class_label
);
1965 (*slot
)->labels
= labels
;
1966 (*slot
)->hashcode
= ecl
.hashcode
;
1967 (*slot
)->equivalence_class
= 0;
1973 /* Perform offline variable substitution.
1975 This is a worst case quadratic time way of identifying variables
1976 that must have equivalent points-to sets, including those caused by
1977 static cycles, and single entry subgraphs, in the constraint graph.
1979 The technique is described in "Exploiting Pointer and Location
1980 Equivalence to Optimize Pointer Analysis. In the 14th International
1981 Static Analysis Symposium (SAS), August 2007." It is known as the
1982 "HU" algorithm, and is equivalent to value numbering the collapsed
1983 constraint graph including evaluating unions.
1985 The general method of finding equivalence classes is as follows:
1986 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1987 Initialize all non-REF nodes to be direct nodes.
1988 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1990 For each constraint containing the dereference, we also do the same
1993 We then compute SCC's in the graph and unify nodes in the same SCC,
1996 For each non-collapsed node x:
1997 Visit all unvisited explicit incoming edges.
1998 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
2000 Lookup the equivalence class for pts(x).
2001 If we found one, equivalence_class(x) = found class.
2002 Otherwise, equivalence_class(x) = new class, and new_class is
2003 added to the lookup table.
2005 All direct nodes with the same equivalence class can be replaced
2006 with a single representative node.
2007 All unlabeled nodes (label == 0) are not pointers and all edges
2008 involving them can be eliminated.
2009 We perform these optimizations during rewrite_constraints
2011 In addition to pointer equivalence class finding, we also perform
2012 location equivalence class finding. This is the set of variables
2013 that always appear together in points-to sets. We use this to
2014 compress the size of the points-to sets. */
2016 /* Current maximum pointer equivalence class id. */
2017 static int pointer_equiv_class
;
2019 /* Current maximum location equivalence class id. */
2020 static int location_equiv_class
;
2022 /* Recursive routine to find strongly connected components in GRAPH,
2023 and label it's nodes with DFS numbers. */
2026 condense_visit (constraint_graph_t graph
, class scc_info
*si
, unsigned int n
)
2030 unsigned int my_dfs
;
2032 gcc_checking_assert (si
->node_mapping
[n
] == n
);
2033 bitmap_set_bit (si
->visited
, n
);
2034 si
->dfs
[n
] = si
->current_index
++;
2035 my_dfs
= si
->dfs
[n
];
2037 /* Visit all the successors. */
2038 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->preds
[n
], 0, i
, bi
)
2040 unsigned int w
= si
->node_mapping
[i
];
2042 if (bitmap_bit_p (si
->deleted
, w
))
2045 if (!bitmap_bit_p (si
->visited
, w
))
2046 condense_visit (graph
, si
, w
);
2048 unsigned int t
= si
->node_mapping
[w
];
2049 gcc_checking_assert (si
->node_mapping
[n
] == n
);
2050 if (si
->dfs
[t
] < si
->dfs
[n
])
2051 si
->dfs
[n
] = si
->dfs
[t
];
2054 /* Visit all the implicit predecessors. */
2055 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->implicit_preds
[n
], 0, i
, bi
)
2057 unsigned int w
= si
->node_mapping
[i
];
2059 if (bitmap_bit_p (si
->deleted
, w
))
2062 if (!bitmap_bit_p (si
->visited
, w
))
2063 condense_visit (graph
, si
, w
);
2065 unsigned int t
= si
->node_mapping
[w
];
2066 gcc_assert (si
->node_mapping
[n
] == n
);
2067 if (si
->dfs
[t
] < si
->dfs
[n
])
2068 si
->dfs
[n
] = si
->dfs
[t
];
2071 /* See if any components have been identified. */
2072 if (si
->dfs
[n
] == my_dfs
)
2074 if (si
->scc_stack
.length () != 0
2075 && si
->dfs
[si
->scc_stack
.last ()] >= my_dfs
)
2077 /* Find the first node of the SCC and do non-bitmap work. */
2078 bool direct_p
= true;
2079 unsigned first
= si
->scc_stack
.length ();
2083 unsigned int w
= si
->scc_stack
[first
];
2084 si
->node_mapping
[w
] = n
;
2085 if (!bitmap_bit_p (graph
->direct_nodes
, w
))
2089 && si
->dfs
[si
->scc_stack
[first
- 1]] >= my_dfs
);
2091 bitmap_clear_bit (graph
->direct_nodes
, n
);
2093 /* Want to reduce to node n, push that first. */
2094 si
->scc_stack
.reserve (1);
2095 si
->scc_stack
.quick_push (si
->scc_stack
[first
]);
2096 si
->scc_stack
[first
] = n
;
2098 unsigned scc_size
= si
->scc_stack
.length () - first
;
2099 unsigned split
= scc_size
/ 2;
2100 unsigned carry
= scc_size
- split
* 2;
2103 for (unsigned i
= 0; i
< split
; ++i
)
2105 unsigned a
= si
->scc_stack
[first
+ i
];
2106 unsigned b
= si
->scc_stack
[first
+ split
+ carry
+ i
];
2108 /* Unify our nodes. */
2109 if (graph
->preds
[b
])
2111 if (!graph
->preds
[a
])
2112 std::swap (graph
->preds
[a
], graph
->preds
[b
]);
2114 bitmap_ior_into_and_free (graph
->preds
[a
],
2117 if (graph
->implicit_preds
[b
])
2119 if (!graph
->implicit_preds
[a
])
2120 std::swap (graph
->implicit_preds
[a
],
2121 graph
->implicit_preds
[b
]);
2123 bitmap_ior_into_and_free (graph
->implicit_preds
[a
],
2124 &graph
->implicit_preds
[b
]);
2126 if (graph
->points_to
[b
])
2128 if (!graph
->points_to
[a
])
2129 std::swap (graph
->points_to
[a
], graph
->points_to
[b
]);
2131 bitmap_ior_into_and_free (graph
->points_to
[a
],
2132 &graph
->points_to
[b
]);
2135 unsigned remain
= split
+ carry
;
2137 carry
= remain
- split
* 2;
2139 /* Actually pop the SCC. */
2140 si
->scc_stack
.truncate (first
);
2142 bitmap_set_bit (si
->deleted
, n
);
2145 si
->scc_stack
.safe_push (n
);
2148 /* Label pointer equivalences.
2150 This performs a value numbering of the constraint graph to
2151 discover which variables will always have the same points-to sets
2152 under the current set of constraints.
2154 The way it value numbers is to store the set of points-to bits
2155 generated by the constraints and graph edges. This is just used as a
2156 hash and equality comparison. The *actual set of points-to bits* is
2157 completely irrelevant, in that we don't care about being able to
2160 The equality values (currently bitmaps) just have to satisfy a few
2161 constraints, the main ones being:
2162 1. The combining operation must be order independent.
2163 2. The end result of a given set of operations must be unique iff the
2164 combination of input values is unique
2168 label_visit (constraint_graph_t graph
, class scc_info
*si
, unsigned int n
)
2170 unsigned int i
, first_pred
;
2173 bitmap_set_bit (si
->visited
, n
);
2175 /* Label and union our incoming edges's points to sets. */
2177 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->preds
[n
], 0, i
, bi
)
2179 unsigned int w
= si
->node_mapping
[i
];
2180 if (!bitmap_bit_p (si
->visited
, w
))
2181 label_visit (graph
, si
, w
);
2183 /* Skip unused edges */
2184 if (w
== n
|| graph
->pointer_label
[w
] == 0)
2187 if (graph
->points_to
[w
])
2189 if (!graph
->points_to
[n
])
2191 if (first_pred
== -1U)
2195 graph
->points_to
[n
] = BITMAP_ALLOC (&predbitmap_obstack
);
2196 bitmap_ior (graph
->points_to
[n
],
2197 graph
->points_to
[first_pred
],
2198 graph
->points_to
[w
]);
2202 bitmap_ior_into (graph
->points_to
[n
], graph
->points_to
[w
]);
2206 /* Indirect nodes get fresh variables and a new pointer equiv class. */
2207 if (!bitmap_bit_p (graph
->direct_nodes
, n
))
2209 if (!graph
->points_to
[n
])
2211 graph
->points_to
[n
] = BITMAP_ALLOC (&predbitmap_obstack
);
2212 if (first_pred
!= -1U)
2213 bitmap_copy (graph
->points_to
[n
], graph
->points_to
[first_pred
]);
2215 bitmap_set_bit (graph
->points_to
[n
], FIRST_REF_NODE
+ n
);
2216 graph
->pointer_label
[n
] = pointer_equiv_class
++;
2217 equiv_class_label_t ecl
;
2218 ecl
= equiv_class_lookup_or_add (pointer_equiv_class_table
,
2219 graph
->points_to
[n
]);
2220 ecl
->equivalence_class
= graph
->pointer_label
[n
];
2224 /* If there was only a single non-empty predecessor the pointer equiv
2225 class is the same. */
2226 if (!graph
->points_to
[n
])
2228 if (first_pred
!= -1U)
2230 graph
->pointer_label
[n
] = graph
->pointer_label
[first_pred
];
2231 graph
->points_to
[n
] = graph
->points_to
[first_pred
];
2236 if (!bitmap_empty_p (graph
->points_to
[n
]))
2238 equiv_class_label_t ecl
;
2239 ecl
= equiv_class_lookup_or_add (pointer_equiv_class_table
,
2240 graph
->points_to
[n
]);
2241 if (ecl
->equivalence_class
== 0)
2242 ecl
->equivalence_class
= pointer_equiv_class
++;
2245 BITMAP_FREE (graph
->points_to
[n
]);
2246 graph
->points_to
[n
] = ecl
->labels
;
2248 graph
->pointer_label
[n
] = ecl
->equivalence_class
;
2252 /* Print the pred graph in dot format. */
2255 dump_pred_graph (class scc_info
*si
, FILE *file
)
2259 /* Only print the graph if it has already been initialized: */
2263 /* Prints the header of the dot file: */
2264 fprintf (file
, "strict digraph {\n");
2265 fprintf (file
, " node [\n shape = box\n ]\n");
2266 fprintf (file
, " edge [\n fontsize = \"12\"\n ]\n");
2267 fprintf (file
, "\n // List of nodes and complex constraints in "
2268 "the constraint graph:\n");
2270 /* The next lines print the nodes in the graph together with the
2271 complex constraints attached to them. */
2272 for (i
= 1; i
< graph
->size
; i
++)
2274 if (i
== FIRST_REF_NODE
)
2276 if (si
->node_mapping
[i
] != i
)
2278 if (i
< FIRST_REF_NODE
)
2279 fprintf (file
, "\"%s\"", get_varinfo (i
)->name
);
2281 fprintf (file
, "\"*%s\"", get_varinfo (i
- FIRST_REF_NODE
)->name
);
2282 if (graph
->points_to
[i
]
2283 && !bitmap_empty_p (graph
->points_to
[i
]))
2285 if (i
< FIRST_REF_NODE
)
2286 fprintf (file
, "[label=\"%s = {", get_varinfo (i
)->name
);
2288 fprintf (file
, "[label=\"*%s = {",
2289 get_varinfo (i
- FIRST_REF_NODE
)->name
);
2292 EXECUTE_IF_SET_IN_BITMAP (graph
->points_to
[i
], 0, j
, bi
)
2293 fprintf (file
, " %d", j
);
2294 fprintf (file
, " }\"]");
2296 fprintf (file
, ";\n");
2299 /* Go over the edges. */
2300 fprintf (file
, "\n // Edges in the constraint graph:\n");
2301 for (i
= 1; i
< graph
->size
; i
++)
2305 if (si
->node_mapping
[i
] != i
)
2307 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->preds
[i
], 0, j
, bi
)
2309 unsigned from
= si
->node_mapping
[j
];
2310 if (from
< FIRST_REF_NODE
)
2311 fprintf (file
, "\"%s\"", get_varinfo (from
)->name
);
2313 fprintf (file
, "\"*%s\"", get_varinfo (from
- FIRST_REF_NODE
)->name
);
2314 fprintf (file
, " -> ");
2315 if (i
< FIRST_REF_NODE
)
2316 fprintf (file
, "\"%s\"", get_varinfo (i
)->name
);
2318 fprintf (file
, "\"*%s\"", get_varinfo (i
- FIRST_REF_NODE
)->name
);
2319 fprintf (file
, ";\n");
2323 /* Prints the tail of the dot file. */
2324 fprintf (file
, "}\n");
2327 /* Perform offline variable substitution, discovering equivalence
2328 classes, and eliminating non-pointer variables. */
2330 static class scc_info
*
2331 perform_var_substitution (constraint_graph_t graph
)
2334 unsigned int size
= graph
->size
;
2335 scc_info
*si
= new scc_info (size
);
2337 bitmap_obstack_initialize (&iteration_obstack
);
2338 pointer_equiv_class_table
= new hash_table
<equiv_class_hasher
> (511);
2339 location_equiv_class_table
2340 = new hash_table
<equiv_class_hasher
> (511);
2341 pointer_equiv_class
= 1;
2342 location_equiv_class
= 1;
2344 /* Condense the nodes, which means to find SCC's, count incoming
2345 predecessors, and unite nodes in SCC's. */
2346 for (i
= 1; i
< FIRST_REF_NODE
; i
++)
2347 if (!bitmap_bit_p (si
->visited
, si
->node_mapping
[i
]))
2348 condense_visit (graph
, si
, si
->node_mapping
[i
]);
2350 if (dump_file
&& (dump_flags
& TDF_GRAPH
))
2352 fprintf (dump_file
, "\n\n// The constraint graph before var-substitution "
2353 "in dot format:\n");
2354 dump_pred_graph (si
, dump_file
);
2355 fprintf (dump_file
, "\n\n");
2358 bitmap_clear (si
->visited
);
2359 /* Actually the label the nodes for pointer equivalences */
2360 for (i
= 1; i
< FIRST_REF_NODE
; i
++)
2361 if (!bitmap_bit_p (si
->visited
, si
->node_mapping
[i
]))
2362 label_visit (graph
, si
, si
->node_mapping
[i
]);
2364 /* Calculate location equivalence labels. */
2365 for (i
= 1; i
< FIRST_REF_NODE
; i
++)
2371 if (!graph
->pointed_by
[i
])
2373 pointed_by
= BITMAP_ALLOC (&iteration_obstack
);
2375 /* Translate the pointed-by mapping for pointer equivalence
2377 EXECUTE_IF_SET_IN_BITMAP (graph
->pointed_by
[i
], 0, j
, bi
)
2379 bitmap_set_bit (pointed_by
,
2380 graph
->pointer_label
[si
->node_mapping
[j
]]);
2382 /* The original pointed_by is now dead. */
2383 BITMAP_FREE (graph
->pointed_by
[i
]);
2385 /* Look up the location equivalence label if one exists, or make
2387 equiv_class_label_t ecl
;
2388 ecl
= equiv_class_lookup_or_add (location_equiv_class_table
, pointed_by
);
2389 if (ecl
->equivalence_class
== 0)
2390 ecl
->equivalence_class
= location_equiv_class
++;
2393 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2394 fprintf (dump_file
, "Found location equivalence for node %s\n",
2395 get_varinfo (i
)->name
);
2396 BITMAP_FREE (pointed_by
);
2398 graph
->loc_label
[i
] = ecl
->equivalence_class
;
2402 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2403 for (i
= 1; i
< FIRST_REF_NODE
; i
++)
2405 unsigned j
= si
->node_mapping
[i
];
2408 fprintf (dump_file
, "%s node id %d ",
2409 bitmap_bit_p (graph
->direct_nodes
, i
)
2410 ? "Direct" : "Indirect", i
);
2411 if (i
< FIRST_REF_NODE
)
2412 fprintf (dump_file
, "\"%s\"", get_varinfo (i
)->name
);
2414 fprintf (dump_file
, "\"*%s\"",
2415 get_varinfo (i
- FIRST_REF_NODE
)->name
);
2416 fprintf (dump_file
, " mapped to SCC leader node id %d ", j
);
2417 if (j
< FIRST_REF_NODE
)
2418 fprintf (dump_file
, "\"%s\"\n", get_varinfo (j
)->name
);
2420 fprintf (dump_file
, "\"*%s\"\n",
2421 get_varinfo (j
- FIRST_REF_NODE
)->name
);
2426 "Equivalence classes for %s node id %d ",
2427 bitmap_bit_p (graph
->direct_nodes
, i
)
2428 ? "direct" : "indirect", i
);
2429 if (i
< FIRST_REF_NODE
)
2430 fprintf (dump_file
, "\"%s\"", get_varinfo (i
)->name
);
2432 fprintf (dump_file
, "\"*%s\"",
2433 get_varinfo (i
- FIRST_REF_NODE
)->name
);
2435 ": pointer %d, location %d\n",
2436 graph
->pointer_label
[i
], graph
->loc_label
[i
]);
2440 /* Quickly eliminate our non-pointer variables. */
2442 for (i
= 1; i
< FIRST_REF_NODE
; i
++)
2444 unsigned int node
= si
->node_mapping
[i
];
2446 if (graph
->pointer_label
[node
] == 0)
2448 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2450 "%s is a non-pointer variable, eliminating edges.\n",
2451 get_varinfo (node
)->name
);
2452 stats
.nonpointer_vars
++;
2453 clear_edges_for_node (graph
, node
);
2460 /* Free information that was only necessary for variable
2464 free_var_substitution_info (class scc_info
*si
)
2467 free (graph
->pointer_label
);
2468 free (graph
->loc_label
);
2469 free (graph
->pointed_by
);
2470 free (graph
->points_to
);
2471 free (graph
->eq_rep
);
2472 sbitmap_free (graph
->direct_nodes
);
2473 delete pointer_equiv_class_table
;
2474 pointer_equiv_class_table
= NULL
;
2475 delete location_equiv_class_table
;
2476 location_equiv_class_table
= NULL
;
2477 bitmap_obstack_release (&iteration_obstack
);
2480 /* Return an existing node that is equivalent to NODE, which has
2481 equivalence class LABEL, if one exists. Return NODE otherwise. */
2484 find_equivalent_node (constraint_graph_t graph
,
2485 unsigned int node
, unsigned int label
)
2487 /* If the address version of this variable is unused, we can
2488 substitute it for anything else with the same label.
2489 Otherwise, we know the pointers are equivalent, but not the
2490 locations, and we can unite them later. */
2492 if (!bitmap_bit_p (graph
->address_taken
, node
))
2494 gcc_checking_assert (label
< graph
->size
);
2496 if (graph
->eq_rep
[label
] != -1)
2498 /* Unify the two variables since we know they are equivalent. */
2499 if (unite (graph
->eq_rep
[label
], node
))
2500 unify_nodes (graph
, graph
->eq_rep
[label
], node
, false);
2501 return graph
->eq_rep
[label
];
2505 graph
->eq_rep
[label
] = node
;
2506 graph
->pe_rep
[label
] = node
;
2511 gcc_checking_assert (label
< graph
->size
);
2512 graph
->pe
[node
] = label
;
2513 if (graph
->pe_rep
[label
] == -1)
2514 graph
->pe_rep
[label
] = node
;
2520 /* Unite pointer equivalent but not location equivalent nodes in
2521 GRAPH. This may only be performed once variable substitution is
2525 unite_pointer_equivalences (constraint_graph_t graph
)
2529 /* Go through the pointer equivalences and unite them to their
2530 representative, if they aren't already. */
2531 for (i
= 1; i
< FIRST_REF_NODE
; i
++)
2533 unsigned int label
= graph
->pe
[i
];
2536 int label_rep
= graph
->pe_rep
[label
];
2538 if (label_rep
== -1)
2541 label_rep
= find (label_rep
);
2542 if (label_rep
>= 0 && unite (label_rep
, find (i
)))
2543 unify_nodes (graph
, label_rep
, i
, false);
2548 /* Move complex constraints to the GRAPH nodes they belong to. */
2551 move_complex_constraints (constraint_graph_t graph
)
2556 FOR_EACH_VEC_ELT (constraints
, i
, c
)
2560 struct constraint_expr lhs
= c
->lhs
;
2561 struct constraint_expr rhs
= c
->rhs
;
2563 if (lhs
.type
== DEREF
)
2565 insert_into_complex (graph
, lhs
.var
, c
);
2567 else if (rhs
.type
== DEREF
)
2569 if (!(get_varinfo (lhs
.var
)->is_special_var
))
2570 insert_into_complex (graph
, rhs
.var
, c
);
2572 else if (rhs
.type
!= ADDRESSOF
&& lhs
.var
> anything_id
2573 && (lhs
.offset
!= 0 || rhs
.offset
!= 0))
2575 insert_into_complex (graph
, rhs
.var
, c
);
2582 /* Optimize and rewrite complex constraints while performing
2583 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2584 result of perform_variable_substitution. */
2587 rewrite_constraints (constraint_graph_t graph
,
2595 for (unsigned int j
= 0; j
< graph
->size
; j
++)
2596 gcc_assert (find (j
) == j
);
2599 FOR_EACH_VEC_ELT (constraints
, i
, c
)
2601 struct constraint_expr lhs
= c
->lhs
;
2602 struct constraint_expr rhs
= c
->rhs
;
2603 unsigned int lhsvar
= find (lhs
.var
);
2604 unsigned int rhsvar
= find (rhs
.var
);
2605 unsigned int lhsnode
, rhsnode
;
2606 unsigned int lhslabel
, rhslabel
;
2608 lhsnode
= si
->node_mapping
[lhsvar
];
2609 rhsnode
= si
->node_mapping
[rhsvar
];
2610 lhslabel
= graph
->pointer_label
[lhsnode
];
2611 rhslabel
= graph
->pointer_label
[rhsnode
];
2613 /* See if it is really a non-pointer variable, and if so, ignore
2617 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2620 fprintf (dump_file
, "%s is a non-pointer variable, "
2621 "ignoring constraint:",
2622 get_varinfo (lhs
.var
)->name
);
2623 dump_constraint (dump_file
, c
);
2624 fprintf (dump_file
, "\n");
2626 constraints
[i
] = NULL
;
2632 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2635 fprintf (dump_file
, "%s is a non-pointer variable, "
2636 "ignoring constraint:",
2637 get_varinfo (rhs
.var
)->name
);
2638 dump_constraint (dump_file
, c
);
2639 fprintf (dump_file
, "\n");
2641 constraints
[i
] = NULL
;
2645 lhsvar
= find_equivalent_node (graph
, lhsvar
, lhslabel
);
2646 rhsvar
= find_equivalent_node (graph
, rhsvar
, rhslabel
);
2647 c
->lhs
.var
= lhsvar
;
2648 c
->rhs
.var
= rhsvar
;
2652 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2653 part of an SCC, false otherwise. */
2656 eliminate_indirect_cycles (unsigned int node
)
2658 if (graph
->indirect_cycles
[node
] != -1
2659 && !bitmap_empty_p (get_varinfo (node
)->solution
))
2662 auto_vec
<unsigned> queue
;
2664 unsigned int to
= find (graph
->indirect_cycles
[node
]);
2667 /* We can't touch the solution set and call unify_nodes
2668 at the same time, because unify_nodes is going to do
2669 bitmap unions into it. */
2671 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node
)->solution
, 0, i
, bi
)
2673 if (find (i
) == i
&& i
!= to
)
2676 queue
.safe_push (i
);
2681 queue
.iterate (queuepos
, &i
);
2684 unify_nodes (graph
, to
, i
, true);
2691 /* Solve the constraint graph GRAPH using our worklist solver.
2692 This is based on the PW* family of solvers from the "Efficient Field
2693 Sensitive Pointer Analysis for C" paper.
2694 It works by iterating over all the graph nodes, processing the complex
2695 constraints and propagating the copy constraints, until everything stops
2696 changed. This corresponds to steps 6-8 in the solving list given above. */
2699 solve_graph (constraint_graph_t graph
)
2701 unsigned int size
= graph
->size
;
2705 changed
= BITMAP_ALLOC (NULL
);
2707 /* Mark all initial non-collapsed nodes as changed. */
2708 for (i
= 1; i
< size
; i
++)
2710 varinfo_t ivi
= get_varinfo (i
);
2711 if (find (i
) == i
&& !bitmap_empty_p (ivi
->solution
)
2712 && ((graph
->succs
[i
] && !bitmap_empty_p (graph
->succs
[i
]))
2713 || graph
->complex[i
].length () > 0))
2714 bitmap_set_bit (changed
, i
);
2717 /* Allocate a bitmap to be used to store the changed bits. */
2718 pts
= BITMAP_ALLOC (&pta_obstack
);
2720 while (!bitmap_empty_p (changed
))
2723 struct topo_info
*ti
= init_topo_info ();
2726 bitmap_obstack_initialize (&iteration_obstack
);
2728 compute_topo_order (graph
, ti
);
2730 while (ti
->topo_order
.length () != 0)
2733 i
= ti
->topo_order
.pop ();
2735 /* If this variable is not a representative, skip it. */
2739 /* In certain indirect cycle cases, we may merge this
2740 variable to another. */
2741 if (eliminate_indirect_cycles (i
) && find (i
) != i
)
2744 /* If the node has changed, we need to process the
2745 complex constraints and outgoing edges again. */
2746 if (bitmap_clear_bit (changed
, i
))
2751 vec
<constraint_t
> complex = graph
->complex[i
];
2752 varinfo_t vi
= get_varinfo (i
);
2753 bool solution_empty
;
2755 /* Compute the changed set of solution bits. If anything
2756 is in the solution just propagate that. */
2757 if (bitmap_bit_p (vi
->solution
, anything_id
))
2759 /* If anything is also in the old solution there is
2761 ??? But we shouldn't ended up with "changed" set ... */
2763 && bitmap_bit_p (vi
->oldsolution
, anything_id
))
2765 bitmap_copy (pts
, get_varinfo (find (anything_id
))->solution
);
2767 else if (vi
->oldsolution
)
2768 bitmap_and_compl (pts
, vi
->solution
, vi
->oldsolution
);
2770 bitmap_copy (pts
, vi
->solution
);
2772 if (bitmap_empty_p (pts
))
2775 if (vi
->oldsolution
)
2776 bitmap_ior_into (vi
->oldsolution
, pts
);
2779 vi
->oldsolution
= BITMAP_ALLOC (&oldpta_obstack
);
2780 bitmap_copy (vi
->oldsolution
, pts
);
2783 solution
= vi
->solution
;
2784 solution_empty
= bitmap_empty_p (solution
);
2786 /* Process the complex constraints */
2787 bitmap expanded_pts
= NULL
;
2788 FOR_EACH_VEC_ELT (complex, j
, c
)
2790 /* XXX: This is going to unsort the constraints in
2791 some cases, which will occasionally add duplicate
2792 constraints during unification. This does not
2793 affect correctness. */
2794 c
->lhs
.var
= find (c
->lhs
.var
);
2795 c
->rhs
.var
= find (c
->rhs
.var
);
2797 /* The only complex constraint that can change our
2798 solution to non-empty, given an empty solution,
2799 is a constraint where the lhs side is receiving
2800 some set from elsewhere. */
2801 if (!solution_empty
|| c
->lhs
.type
!= DEREF
)
2802 do_complex_constraint (graph
, c
, pts
, &expanded_pts
);
2804 BITMAP_FREE (expanded_pts
);
2806 solution_empty
= bitmap_empty_p (solution
);
2808 if (!solution_empty
)
2811 unsigned eff_escaped_id
= find (escaped_id
);
2813 /* Propagate solution to all successors. */
2814 unsigned to_remove
= ~0U;
2815 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->succs
[i
],
2818 if (to_remove
!= ~0U)
2820 bitmap_clear_bit (graph
->succs
[i
], to_remove
);
2823 unsigned int to
= find (j
);
2826 /* Update the succ graph, avoiding duplicate
2829 if (! bitmap_set_bit (graph
->succs
[i
], to
))
2831 /* We eventually end up processing 'to' twice
2832 as it is undefined whether bitmap iteration
2833 iterates over bits set during iteration.
2834 Play safe instead of doing tricks. */
2836 /* Don't try to propagate to ourselves. */
2840 bitmap tmp
= get_varinfo (to
)->solution
;
2843 /* If we propagate from ESCAPED use ESCAPED as
2845 if (i
== eff_escaped_id
)
2846 flag
= bitmap_set_bit (tmp
, escaped_id
);
2848 flag
= bitmap_ior_into (tmp
, pts
);
2851 bitmap_set_bit (changed
, to
);
2853 if (to_remove
!= ~0U)
2854 bitmap_clear_bit (graph
->succs
[i
], to_remove
);
2858 free_topo_info (ti
);
2859 bitmap_obstack_release (&iteration_obstack
);
2863 BITMAP_FREE (changed
);
2864 bitmap_obstack_release (&oldpta_obstack
);
2867 /* Map from trees to variable infos. */
2868 static hash_map
<tree
, varinfo_t
> *vi_for_tree
;
2871 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2874 insert_vi_for_tree (tree t
, varinfo_t vi
)
2877 gcc_assert (!vi_for_tree
->put (t
, vi
));
2880 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2881 exist in the map, return NULL, otherwise, return the varinfo we found. */
2884 lookup_vi_for_tree (tree t
)
2886 varinfo_t
*slot
= vi_for_tree
->get (t
);
2893 /* Return a printable name for DECL */
2896 alias_get_name (tree decl
)
2898 const char *res
= "NULL";
2902 if (TREE_CODE (decl
) == SSA_NAME
)
2904 res
= get_name (decl
);
2905 temp
= xasprintf ("%s_%u", res
? res
: "", SSA_NAME_VERSION (decl
));
2907 else if (HAS_DECL_ASSEMBLER_NAME_P (decl
)
2908 && DECL_ASSEMBLER_NAME_SET_P (decl
))
2909 res
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME_RAW (decl
));
2910 else if (DECL_P (decl
))
2912 res
= get_name (decl
);
2914 temp
= xasprintf ("D.%u", DECL_UID (decl
));
2919 res
= ggc_strdup (temp
);
2927 /* Find the variable id for tree T in the map.
2928 If T doesn't exist in the map, create an entry for it and return it. */
2931 get_vi_for_tree (tree t
)
2933 varinfo_t
*slot
= vi_for_tree
->get (t
);
2936 unsigned int id
= create_variable_info_for (t
, alias_get_name (t
), false);
2937 return get_varinfo (id
);
2943 /* Get a scalar constraint expression for a new temporary variable. */
2945 static struct constraint_expr
2946 new_scalar_tmp_constraint_exp (const char *name
, bool add_id
)
2948 struct constraint_expr tmp
;
2951 vi
= new_var_info (NULL_TREE
, name
, add_id
);
2955 vi
->is_full_var
= 1;
2965 /* Get a constraint expression vector from an SSA_VAR_P node.
2966 If address_p is true, the result will be taken its address of. */
2969 get_constraint_for_ssa_var (tree t
, vec
<ce_s
> *results
, bool address_p
)
2971 struct constraint_expr cexpr
;
2974 /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
2975 gcc_assert (TREE_CODE (t
) == SSA_NAME
|| DECL_P (t
));
2977 if (TREE_CODE (t
) == SSA_NAME
2978 && SSA_NAME_IS_DEFAULT_DEF (t
))
2980 /* For parameters, get at the points-to set for the actual parm
2982 if (TREE_CODE (SSA_NAME_VAR (t
)) == PARM_DECL
2983 || TREE_CODE (SSA_NAME_VAR (t
)) == RESULT_DECL
)
2985 get_constraint_for_ssa_var (SSA_NAME_VAR (t
), results
, address_p
);
2988 /* For undefined SSA names return nothing. */
2989 else if (!ssa_defined_default_def_p (t
))
2991 cexpr
.var
= nothing_id
;
2992 cexpr
.type
= SCALAR
;
2994 results
->safe_push (cexpr
);
2999 /* For global variables resort to the alias target. */
3000 if (VAR_P (t
) && (TREE_STATIC (t
) || DECL_EXTERNAL (t
)))
3002 varpool_node
*node
= varpool_node::get (t
);
3003 if (node
&& node
->alias
&& node
->analyzed
)
3005 node
= node
->ultimate_alias_target ();
3006 /* Canonicalize the PT uid of all aliases to the ultimate target.
3007 ??? Hopefully the set of aliases can't change in a way that
3008 changes the ultimate alias target. */
3009 gcc_assert ((! DECL_PT_UID_SET_P (node
->decl
)
3010 || DECL_PT_UID (node
->decl
) == DECL_UID (node
->decl
))
3011 && (! DECL_PT_UID_SET_P (t
)
3012 || DECL_PT_UID (t
) == DECL_UID (node
->decl
)));
3013 DECL_PT_UID (t
) = DECL_UID (node
->decl
);
3017 /* If this is decl may bind to NULL note that. */
3019 && (! node
|| ! node
->nonzero_address ()))
3021 cexpr
.var
= nothing_id
;
3022 cexpr
.type
= SCALAR
;
3024 results
->safe_push (cexpr
);
3028 vi
= get_vi_for_tree (t
);
3030 cexpr
.type
= SCALAR
;
3033 /* If we are not taking the address of the constraint expr, add all
3034 sub-fiels of the variable as well. */
3036 && !vi
->is_full_var
)
3038 for (; vi
; vi
= vi_next (vi
))
3041 results
->safe_push (cexpr
);
3046 results
->safe_push (cexpr
);
3049 /* Process constraint T, performing various simplifications and then
3050 adding it to our list of overall constraints. */
3053 process_constraint (constraint_t t
)
3055 struct constraint_expr rhs
= t
->rhs
;
3056 struct constraint_expr lhs
= t
->lhs
;
3058 gcc_assert (rhs
.var
< varmap
.length ());
3059 gcc_assert (lhs
.var
< varmap
.length ());
3061 /* If we didn't get any useful constraint from the lhs we get
3062 &ANYTHING as fallback from get_constraint_for. Deal with
3063 it here by turning it into *ANYTHING. */
3064 if (lhs
.type
== ADDRESSOF
3065 && lhs
.var
== anything_id
)
3068 /* ADDRESSOF on the lhs is invalid. */
3069 gcc_assert (lhs
.type
!= ADDRESSOF
);
3071 /* We shouldn't add constraints from things that cannot have pointers.
3072 It's not completely trivial to avoid in the callers, so do it here. */
3073 if (rhs
.type
!= ADDRESSOF
3074 && !get_varinfo (rhs
.var
)->may_have_pointers
)
3077 /* Likewise adding to the solution of a non-pointer var isn't useful. */
3078 if (!get_varinfo (lhs
.var
)->may_have_pointers
)
3081 /* This can happen in our IR with things like n->a = *p */
3082 if (rhs
.type
== DEREF
&& lhs
.type
== DEREF
&& rhs
.var
!= anything_id
)
3084 /* Split into tmp = *rhs, *lhs = tmp */
3085 struct constraint_expr tmplhs
;
3086 tmplhs
= new_scalar_tmp_constraint_exp ("doubledereftmp", true);
3087 process_constraint (new_constraint (tmplhs
, rhs
));
3088 process_constraint (new_constraint (lhs
, tmplhs
));
3090 else if ((rhs
.type
!= SCALAR
|| rhs
.offset
!= 0) && lhs
.type
== DEREF
)
3092 /* Split into tmp = &rhs, *lhs = tmp */
3093 struct constraint_expr tmplhs
;
3094 tmplhs
= new_scalar_tmp_constraint_exp ("derefaddrtmp", true);
3095 process_constraint (new_constraint (tmplhs
, rhs
));
3096 process_constraint (new_constraint (lhs
, tmplhs
));
3100 gcc_assert (rhs
.type
!= ADDRESSOF
|| rhs
.offset
== 0);
3101 constraints
.safe_push (t
);
3106 /* Return the position, in bits, of FIELD_DECL from the beginning of its
3109 static HOST_WIDE_INT
3110 bitpos_of_field (const tree fdecl
)
3112 if (!tree_fits_shwi_p (DECL_FIELD_OFFSET (fdecl
))
3113 || !tree_fits_shwi_p (DECL_FIELD_BIT_OFFSET (fdecl
)))
3116 return (tree_to_shwi (DECL_FIELD_OFFSET (fdecl
)) * BITS_PER_UNIT
3117 + tree_to_shwi (DECL_FIELD_BIT_OFFSET (fdecl
)));
3121 /* Get constraint expressions for offsetting PTR by OFFSET. Stores the
3122 resulting constraint expressions in *RESULTS. */
3125 get_constraint_for_ptr_offset (tree ptr
, tree offset
,
3128 struct constraint_expr c
;
3130 HOST_WIDE_INT rhsoffset
;
3132 /* If we do not do field-sensitive PTA adding offsets to pointers
3133 does not change the points-to solution. */
3134 if (!use_field_sensitive
)
3136 get_constraint_for_rhs (ptr
, results
);
3140 /* If the offset is not a non-negative integer constant that fits
3141 in a HOST_WIDE_INT, we have to fall back to a conservative
3142 solution which includes all sub-fields of all pointed-to
3143 variables of ptr. */
3144 if (offset
== NULL_TREE
3145 || TREE_CODE (offset
) != INTEGER_CST
)
3146 rhsoffset
= UNKNOWN_OFFSET
;
3149 /* Sign-extend the offset. */
3150 offset_int soffset
= offset_int::from (wi::to_wide (offset
), SIGNED
);
3151 if (!wi::fits_shwi_p (soffset
))
3152 rhsoffset
= UNKNOWN_OFFSET
;
3155 /* Make sure the bit-offset also fits. */
3156 HOST_WIDE_INT rhsunitoffset
= soffset
.to_shwi ();
3157 rhsoffset
= rhsunitoffset
* (unsigned HOST_WIDE_INT
) BITS_PER_UNIT
;
3158 if (rhsunitoffset
!= rhsoffset
/ BITS_PER_UNIT
)
3159 rhsoffset
= UNKNOWN_OFFSET
;
3163 get_constraint_for_rhs (ptr
, results
);
3167 /* As we are eventually appending to the solution do not use
3168 vec::iterate here. */
3169 n
= results
->length ();
3170 for (j
= 0; j
< n
; j
++)
3174 curr
= get_varinfo (c
.var
);
3176 if (c
.type
== ADDRESSOF
3177 /* If this varinfo represents a full variable just use it. */
3178 && curr
->is_full_var
)
3180 else if (c
.type
== ADDRESSOF
3181 /* If we do not know the offset add all subfields. */
3182 && rhsoffset
== UNKNOWN_OFFSET
)
3184 varinfo_t temp
= get_varinfo (curr
->head
);
3187 struct constraint_expr c2
;
3189 c2
.type
= ADDRESSOF
;
3191 if (c2
.var
!= c
.var
)
3192 results
->safe_push (c2
);
3193 temp
= vi_next (temp
);
3197 else if (c
.type
== ADDRESSOF
)
3200 unsigned HOST_WIDE_INT offset
= curr
->offset
+ rhsoffset
;
3202 /* If curr->offset + rhsoffset is less than zero adjust it. */
3204 && curr
->offset
< offset
)
3207 /* We have to include all fields that overlap the current
3208 field shifted by rhsoffset. And we include at least
3209 the last or the first field of the variable to represent
3210 reachability of off-bound addresses, in particular &object + 1,
3211 conservatively correct. */
3212 temp
= first_or_preceding_vi_for_offset (curr
, offset
);
3215 temp
= vi_next (temp
);
3217 && temp
->offset
< offset
+ curr
->size
)
3219 struct constraint_expr c2
;
3221 c2
.type
= ADDRESSOF
;
3223 results
->safe_push (c2
);
3224 temp
= vi_next (temp
);
3227 else if (c
.type
== SCALAR
)
3229 gcc_assert (c
.offset
== 0);
3230 c
.offset
= rhsoffset
;
3233 /* We shouldn't get any DEREFs here. */
3241 /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
3242 If address_p is true the result will be taken its address of.
3243 If lhs_p is true then the constraint expression is assumed to be used
3247 get_constraint_for_component_ref (tree t
, vec
<ce_s
> *results
,
3248 bool address_p
, bool lhs_p
)
3251 poly_int64 bitsize
= -1;
3252 poly_int64 bitmaxsize
= -1;
3257 /* Some people like to do cute things like take the address of
3260 while (handled_component_p (forzero
)
3261 || INDIRECT_REF_P (forzero
)
3262 || TREE_CODE (forzero
) == MEM_REF
)
3263 forzero
= TREE_OPERAND (forzero
, 0);
3265 if (CONSTANT_CLASS_P (forzero
) && integer_zerop (forzero
))
3267 struct constraint_expr temp
;
3270 temp
.var
= integer_id
;
3272 results
->safe_push (temp
);
3276 t
= get_ref_base_and_extent (t
, &bitpos
, &bitsize
, &bitmaxsize
, &reverse
);
3278 /* We can end up here for component references on a
3279 VIEW_CONVERT_EXPR <>(&foobar) or things like a
3280 BIT_FIELD_REF <&MEM[(void *)&b + 4B], ...>. So for
3281 symbolic constants simply give up. */
3282 if (TREE_CODE (t
) == ADDR_EXPR
)
3284 constraint_expr result
;
3285 result
.type
= SCALAR
;
3286 result
.var
= anything_id
;
3288 results
->safe_push (result
);
3292 /* Avoid creating pointer-offset constraints, so handle MEM_REF
3293 offsets directly. Pretend to take the address of the base,
3294 we'll take care of adding the required subset of sub-fields below. */
3295 if (TREE_CODE (t
) == MEM_REF
3296 && !integer_zerop (TREE_OPERAND (t
, 0)))
3298 poly_offset_int off
= mem_ref_offset (t
);
3299 off
<<= LOG2_BITS_PER_UNIT
;
3302 if (off
.to_shwi (&off_hwi
))
3309 get_constraint_for_1 (TREE_OPERAND (t
, 0), results
, false, lhs_p
);
3313 get_constraint_for_1 (t
, results
, true, lhs_p
);
3315 /* Strip off nothing_id. */
3316 if (results
->length () == 2)
3318 gcc_assert ((*results
)[0].var
== nothing_id
);
3319 results
->unordered_remove (0);
3321 gcc_assert (results
->length () == 1);
3322 struct constraint_expr
&result
= results
->last ();
3324 if (result
.type
== SCALAR
3325 && get_varinfo (result
.var
)->is_full_var
)
3326 /* For single-field vars do not bother about the offset. */
3328 else if (result
.type
== SCALAR
)
3330 /* In languages like C, you can access one past the end of an
3331 array. You aren't allowed to dereference it, so we can
3332 ignore this constraint. When we handle pointer subtraction,
3333 we may have to do something cute here. */
3335 if (maybe_lt (poly_uint64 (bitpos
), get_varinfo (result
.var
)->fullsize
)
3336 && maybe_ne (bitmaxsize
, 0))
3338 /* It's also not true that the constraint will actually start at the
3339 right offset, it may start in some padding. We only care about
3340 setting the constraint to the first actual field it touches, so
3342 struct constraint_expr cexpr
= result
;
3346 for (curr
= get_varinfo (cexpr
.var
); curr
; curr
= vi_next (curr
))
3348 if (ranges_maybe_overlap_p (poly_int64 (curr
->offset
),
3349 curr
->size
, bitpos
, bitmaxsize
))
3351 cexpr
.var
= curr
->id
;
3352 results
->safe_push (cexpr
);
3357 /* If we are going to take the address of this field then
3358 to be able to compute reachability correctly add at least
3359 the last field of the variable. */
3360 if (address_p
&& results
->length () == 0)
3362 curr
= get_varinfo (cexpr
.var
);
3363 while (curr
->next
!= 0)
3364 curr
= vi_next (curr
);
3365 cexpr
.var
= curr
->id
;
3366 results
->safe_push (cexpr
);
3368 else if (results
->length () == 0)
3369 /* Assert that we found *some* field there. The user couldn't be
3370 accessing *only* padding. */
3371 /* Still the user could access one past the end of an array
3372 embedded in a struct resulting in accessing *only* padding. */
3373 /* Or accessing only padding via type-punning to a type
3374 that has a filed just in padding space. */
3376 cexpr
.type
= SCALAR
;
3377 cexpr
.var
= anything_id
;
3379 results
->safe_push (cexpr
);
3382 else if (known_eq (bitmaxsize
, 0))
3384 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3385 fprintf (dump_file
, "Access to zero-sized part of variable, "
3389 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3390 fprintf (dump_file
, "Access to past the end of variable, ignoring\n");
3392 else if (result
.type
== DEREF
)
3394 /* If we do not know exactly where the access goes say so. Note
3395 that only for non-structure accesses we know that we access
3396 at most one subfiled of any variable. */
3397 HOST_WIDE_INT const_bitpos
;
3398 if (!bitpos
.is_constant (&const_bitpos
)
3399 || const_bitpos
== -1
3400 || maybe_ne (bitsize
, bitmaxsize
)
3401 || AGGREGATE_TYPE_P (TREE_TYPE (orig_t
))
3402 || result
.offset
== UNKNOWN_OFFSET
)
3403 result
.offset
= UNKNOWN_OFFSET
;
3405 result
.offset
+= const_bitpos
;
3407 else if (result
.type
== ADDRESSOF
)
3409 /* We can end up here for component references on constants like
3410 VIEW_CONVERT_EXPR <>({ 0, 1, 2, 3 })[i]. */
3411 result
.type
= SCALAR
;
3412 result
.var
= anything_id
;
3420 /* Dereference the constraint expression CONS, and return the result.
3421 DEREF (ADDRESSOF) = SCALAR
3422 DEREF (SCALAR) = DEREF
3423 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
3424 This is needed so that we can handle dereferencing DEREF constraints. */
3427 do_deref (vec
<ce_s
> *constraints
)
3429 struct constraint_expr
*c
;
3432 FOR_EACH_VEC_ELT (*constraints
, i
, c
)
3434 if (c
->type
== SCALAR
)
3436 else if (c
->type
== ADDRESSOF
)
3438 else if (c
->type
== DEREF
)
3440 struct constraint_expr tmplhs
;
3441 tmplhs
= new_scalar_tmp_constraint_exp ("dereftmp", true);
3442 process_constraint (new_constraint (tmplhs
, *c
));
3443 c
->var
= tmplhs
.var
;
3450 /* Given a tree T, return the constraint expression for taking the
3454 get_constraint_for_address_of (tree t
, vec
<ce_s
> *results
)
3456 struct constraint_expr
*c
;
3459 get_constraint_for_1 (t
, results
, true, true);
3461 FOR_EACH_VEC_ELT (*results
, i
, c
)
3463 if (c
->type
== DEREF
)
3466 c
->type
= ADDRESSOF
;
3470 /* Given a tree T, return the constraint expression for it. */
3473 get_constraint_for_1 (tree t
, vec
<ce_s
> *results
, bool address_p
,
3476 struct constraint_expr temp
;
3478 /* x = integer is all glommed to a single variable, which doesn't
3479 point to anything by itself. That is, of course, unless it is an
3480 integer constant being treated as a pointer, in which case, we
3481 will return that this is really the addressof anything. This
3482 happens below, since it will fall into the default case. The only
3483 case we know something about an integer treated like a pointer is
3484 when it is the NULL pointer, and then we just say it points to
3487 Do not do that if -fno-delete-null-pointer-checks though, because
3488 in that case *NULL does not fail, so it _should_ alias *anything.
3489 It is not worth adding a new option or renaming the existing one,
3490 since this case is relatively obscure. */
3491 if ((TREE_CODE (t
) == INTEGER_CST
3492 && integer_zerop (t
))
3493 /* The only valid CONSTRUCTORs in gimple with pointer typed
3494 elements are zero-initializer. But in IPA mode we also
3495 process global initializers, so verify at least. */
3496 || (TREE_CODE (t
) == CONSTRUCTOR
3497 && CONSTRUCTOR_NELTS (t
) == 0))
3499 if (flag_delete_null_pointer_checks
)
3500 temp
.var
= nothing_id
;
3502 temp
.var
= nonlocal_id
;
3503 temp
.type
= ADDRESSOF
;
3505 results
->safe_push (temp
);
3509 /* String constants are read-only, ideally we'd have a CONST_DECL
3511 if (TREE_CODE (t
) == STRING_CST
)
3513 temp
.var
= string_id
;
3516 results
->safe_push (temp
);
3520 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
3522 case tcc_expression
:
3524 switch (TREE_CODE (t
))
3527 get_constraint_for_address_of (TREE_OPERAND (t
, 0), results
);
3535 switch (TREE_CODE (t
))
3539 struct constraint_expr cs
;
3541 get_constraint_for_ptr_offset (TREE_OPERAND (t
, 0),
3542 TREE_OPERAND (t
, 1), results
);
3545 /* If we are not taking the address then make sure to process
3546 all subvariables we might access. */
3550 cs
= results
->last ();
3551 if (cs
.type
== DEREF
3552 && type_can_have_subvars (TREE_TYPE (t
)))
3554 /* For dereferences this means we have to defer it
3556 results
->last ().offset
= UNKNOWN_OFFSET
;
3559 if (cs
.type
!= SCALAR
)
3562 vi
= get_varinfo (cs
.var
);
3563 curr
= vi_next (vi
);
3564 if (!vi
->is_full_var
3567 unsigned HOST_WIDE_INT size
;
3568 if (tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (t
))))
3569 size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t
)));
3572 for (; curr
; curr
= vi_next (curr
))
3574 if (curr
->offset
- vi
->offset
< size
)
3577 results
->safe_push (cs
);
3586 case ARRAY_RANGE_REF
:
3591 get_constraint_for_component_ref (t
, results
, address_p
, lhs_p
);
3593 case VIEW_CONVERT_EXPR
:
3594 get_constraint_for_1 (TREE_OPERAND (t
, 0), results
, address_p
,
3597 /* We are missing handling for TARGET_MEM_REF here. */
3602 case tcc_exceptional
:
3604 switch (TREE_CODE (t
))
3608 get_constraint_for_ssa_var (t
, results
, address_p
);
3616 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t
), i
, val
)
3618 struct constraint_expr
*rhsp
;
3620 get_constraint_for_1 (val
, &tmp
, address_p
, lhs_p
);
3621 FOR_EACH_VEC_ELT (tmp
, j
, rhsp
)
3622 results
->safe_push (*rhsp
);
3625 /* We do not know whether the constructor was complete,
3626 so technically we have to add &NOTHING or &ANYTHING
3627 like we do for an empty constructor as well. */
3634 case tcc_declaration
:
3636 get_constraint_for_ssa_var (t
, results
, address_p
);
3641 /* We cannot refer to automatic variables through constants. */
3642 temp
.type
= ADDRESSOF
;
3643 temp
.var
= nonlocal_id
;
3645 results
->safe_push (temp
);
3651 /* The default fallback is a constraint from anything. */
3652 temp
.type
= ADDRESSOF
;
3653 temp
.var
= anything_id
;
3655 results
->safe_push (temp
);
3658 /* Given a gimple tree T, return the constraint expression vector for it. */
3661 get_constraint_for (tree t
, vec
<ce_s
> *results
)
3663 gcc_assert (results
->length () == 0);
3665 get_constraint_for_1 (t
, results
, false, true);
3668 /* Given a gimple tree T, return the constraint expression vector for it
3669 to be used as the rhs of a constraint. */
3672 get_constraint_for_rhs (tree t
, vec
<ce_s
> *results
)
3674 gcc_assert (results
->length () == 0);
3676 get_constraint_for_1 (t
, results
, false, false);
3680 /* Efficiently generates constraints from all entries in *RHSC to all
3681 entries in *LHSC. */
3684 process_all_all_constraints (vec
<ce_s
> lhsc
,
3687 struct constraint_expr
*lhsp
, *rhsp
;
3690 if (lhsc
.length () <= 1 || rhsc
.length () <= 1)
3692 FOR_EACH_VEC_ELT (lhsc
, i
, lhsp
)
3693 FOR_EACH_VEC_ELT (rhsc
, j
, rhsp
)
3694 process_constraint (new_constraint (*lhsp
, *rhsp
));
3698 struct constraint_expr tmp
;
3699 tmp
= new_scalar_tmp_constraint_exp ("allalltmp", true);
3700 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
3701 process_constraint (new_constraint (tmp
, *rhsp
));
3702 FOR_EACH_VEC_ELT (lhsc
, i
, lhsp
)
3703 process_constraint (new_constraint (*lhsp
, tmp
));
3707 /* Handle aggregate copies by expanding into copies of the respective
3708 fields of the structures. */
3711 do_structure_copy (tree lhsop
, tree rhsop
)
3713 struct constraint_expr
*lhsp
, *rhsp
;
3714 auto_vec
<ce_s
> lhsc
;
3715 auto_vec
<ce_s
> rhsc
;
3718 get_constraint_for (lhsop
, &lhsc
);
3719 get_constraint_for_rhs (rhsop
, &rhsc
);
3722 if (lhsp
->type
== DEREF
3723 || (lhsp
->type
== ADDRESSOF
&& lhsp
->var
== anything_id
)
3724 || rhsp
->type
== DEREF
)
3726 if (lhsp
->type
== DEREF
)
3728 gcc_assert (lhsc
.length () == 1);
3729 lhsp
->offset
= UNKNOWN_OFFSET
;
3731 if (rhsp
->type
== DEREF
)
3733 gcc_assert (rhsc
.length () == 1);
3734 rhsp
->offset
= UNKNOWN_OFFSET
;
3736 process_all_all_constraints (lhsc
, rhsc
);
3738 else if (lhsp
->type
== SCALAR
3739 && (rhsp
->type
== SCALAR
3740 || rhsp
->type
== ADDRESSOF
))
3742 HOST_WIDE_INT lhssize
, lhsoffset
;
3743 HOST_WIDE_INT rhssize
, rhsoffset
;
3746 if (!get_ref_base_and_extent_hwi (lhsop
, &lhsoffset
, &lhssize
, &reverse
)
3747 || !get_ref_base_and_extent_hwi (rhsop
, &rhsoffset
, &rhssize
,
3750 process_all_all_constraints (lhsc
, rhsc
);
3753 for (j
= 0; lhsc
.iterate (j
, &lhsp
);)
3755 varinfo_t lhsv
, rhsv
;
3757 lhsv
= get_varinfo (lhsp
->var
);
3758 rhsv
= get_varinfo (rhsp
->var
);
3759 if (lhsv
->may_have_pointers
3760 && (lhsv
->is_full_var
3761 || rhsv
->is_full_var
3762 || ranges_overlap_p (lhsv
->offset
+ rhsoffset
, lhsv
->size
,
3763 rhsv
->offset
+ lhsoffset
, rhsv
->size
)))
3764 process_constraint (new_constraint (*lhsp
, *rhsp
));
3765 if (!rhsv
->is_full_var
3766 && (lhsv
->is_full_var
3767 || (lhsv
->offset
+ rhsoffset
+ lhsv
->size
3768 > rhsv
->offset
+ lhsoffset
+ rhsv
->size
)))
3771 if (k
>= rhsc
.length ())
3782 /* Create constraints ID = { rhsc }. */
3785 make_constraints_to (unsigned id
, vec
<ce_s
> rhsc
)
3787 struct constraint_expr
*c
;
3788 struct constraint_expr includes
;
3792 includes
.offset
= 0;
3793 includes
.type
= SCALAR
;
3795 FOR_EACH_VEC_ELT (rhsc
, j
, c
)
3796 process_constraint (new_constraint (includes
, *c
));
3799 /* Create a constraint ID = OP. */
3802 make_constraint_to (unsigned id
, tree op
)
3804 auto_vec
<ce_s
> rhsc
;
3805 get_constraint_for_rhs (op
, &rhsc
);
3806 make_constraints_to (id
, rhsc
);
3809 /* Create a constraint ID = &FROM. */
3812 make_constraint_from (varinfo_t vi
, int from
)
3814 struct constraint_expr lhs
, rhs
;
3822 rhs
.type
= ADDRESSOF
;
3823 process_constraint (new_constraint (lhs
, rhs
));
3826 /* Create a constraint ID = FROM. */
3829 make_copy_constraint (varinfo_t vi
, int from
)
3831 struct constraint_expr lhs
, rhs
;
3840 process_constraint (new_constraint (lhs
, rhs
));
3843 /* Make constraints necessary to make OP escape. */
3846 make_escape_constraint (tree op
)
3848 make_constraint_to (escaped_id
, op
);
3851 /* Add constraints to that the solution of VI is transitively closed. */
3854 make_transitive_closure_constraints (varinfo_t vi
)
3856 struct constraint_expr lhs
, rhs
;
3858 /* VAR = *(VAR + UNKNOWN); */
3864 rhs
.offset
= UNKNOWN_OFFSET
;
3865 process_constraint (new_constraint (lhs
, rhs
));
3868 /* Add constraints to that the solution of VI has all subvariables added. */
3871 make_any_offset_constraints (varinfo_t vi
)
3873 struct constraint_expr lhs
, rhs
;
3875 /* VAR = VAR + UNKNOWN; */
3881 rhs
.offset
= UNKNOWN_OFFSET
;
3882 process_constraint (new_constraint (lhs
, rhs
));
3885 /* Temporary storage for fake var decls. */
3886 struct obstack fake_var_decl_obstack
;
3888 /* Build a fake VAR_DECL acting as referrer to a DECL_UID. */
3891 build_fake_var_decl (tree type
)
3893 tree decl
= (tree
) XOBNEW (&fake_var_decl_obstack
, struct tree_var_decl
);
3894 memset (decl
, 0, sizeof (struct tree_var_decl
));
3895 TREE_SET_CODE (decl
, VAR_DECL
);
3896 TREE_TYPE (decl
) = type
;
3897 DECL_UID (decl
) = allocate_decl_uid ();
3898 SET_DECL_PT_UID (decl
, -1);
3899 layout_decl (decl
, 0);
3903 /* Create a new artificial heap variable with NAME.
3904 Return the created variable. */
3907 make_heapvar (const char *name
, bool add_id
)
3912 heapvar
= build_fake_var_decl (ptr_type_node
);
3913 DECL_EXTERNAL (heapvar
) = 1;
3915 vi
= new_var_info (heapvar
, name
, add_id
);
3916 vi
->is_heap_var
= true;
3917 vi
->is_unknown_size_var
= true;
3921 vi
->is_full_var
= true;
3922 insert_vi_for_tree (heapvar
, vi
);
3927 /* Create a new artificial heap variable with NAME and make a
3928 constraint from it to LHS. Set flags according to a tag used
3929 for tracking restrict pointers. */
3932 make_constraint_from_restrict (varinfo_t lhs
, const char *name
, bool add_id
)
3934 varinfo_t vi
= make_heapvar (name
, add_id
);
3935 vi
->is_restrict_var
= 1;
3936 vi
->is_global_var
= 1;
3937 vi
->may_have_pointers
= 1;
3938 make_constraint_from (lhs
, vi
->id
);
3942 /* Create a new artificial heap variable with NAME and make a
3943 constraint from it to LHS. Set flags according to a tag used
3944 for tracking restrict pointers and make the artificial heap
3945 point to global memory. */
3948 make_constraint_from_global_restrict (varinfo_t lhs
, const char *name
,
3951 varinfo_t vi
= make_constraint_from_restrict (lhs
, name
, add_id
);
3952 make_copy_constraint (vi
, nonlocal_id
);
3956 /* In IPA mode there are varinfos for different aspects of reach
3957 function designator. One for the points-to set of the return
3958 value, one for the variables that are clobbered by the function,
3959 one for its uses and one for each parameter (including a single
3960 glob for remaining variadic arguments). */
3962 enum { fi_clobbers
= 1, fi_uses
= 2,
3963 fi_static_chain
= 3, fi_result
= 4, fi_parm_base
= 5 };
3965 /* Get a constraint for the requested part of a function designator FI
3966 when operating in IPA mode. */
3968 static struct constraint_expr
3969 get_function_part_constraint (varinfo_t fi
, unsigned part
)
3971 struct constraint_expr c
;
3973 gcc_assert (in_ipa_mode
);
3975 if (fi
->id
== anything_id
)
3977 /* ??? We probably should have a ANYFN special variable. */
3978 c
.var
= anything_id
;
3982 else if (fi
->decl
&& TREE_CODE (fi
->decl
) == FUNCTION_DECL
)
3984 varinfo_t ai
= first_vi_for_offset (fi
, part
);
3988 c
.var
= anything_id
;
4002 /* For non-IPA mode, generate constraints necessary for a call on the
4006 handle_rhs_call (gcall
*stmt
, vec
<ce_s
> *results
)
4008 struct constraint_expr rhsc
;
4010 bool returns_uses
= false;
4012 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
4014 tree arg
= gimple_call_arg (stmt
, i
);
4015 int flags
= gimple_call_arg_flags (stmt
, i
);
4017 /* If the argument is not used we can ignore it. */
4018 if (flags
& EAF_UNUSED
)
4021 /* As we compute ESCAPED context-insensitive we do not gain
4022 any precision with just EAF_NOCLOBBER but not EAF_NOESCAPE
4023 set. The argument would still get clobbered through the
4025 if ((flags
& EAF_NOCLOBBER
)
4026 && (flags
& EAF_NOESCAPE
))
4028 varinfo_t uses
= get_call_use_vi (stmt
);
4029 varinfo_t tem
= new_var_info (NULL_TREE
, "callarg", true);
4030 tem
->is_reg_var
= true;
4031 make_constraint_to (tem
->id
, arg
);
4032 make_any_offset_constraints (tem
);
4033 if (!(flags
& EAF_DIRECT
))
4034 make_transitive_closure_constraints (tem
);
4035 make_copy_constraint (uses
, tem
->id
);
4036 returns_uses
= true;
4038 else if (flags
& EAF_NOESCAPE
)
4040 struct constraint_expr lhs
, rhs
;
4041 varinfo_t uses
= get_call_use_vi (stmt
);
4042 varinfo_t clobbers
= get_call_clobber_vi (stmt
);
4043 varinfo_t tem
= new_var_info (NULL_TREE
, "callarg", true);
4044 tem
->is_reg_var
= true;
4045 make_constraint_to (tem
->id
, arg
);
4046 make_any_offset_constraints (tem
);
4047 if (!(flags
& EAF_DIRECT
))
4048 make_transitive_closure_constraints (tem
);
4049 make_copy_constraint (uses
, tem
->id
);
4050 make_copy_constraint (clobbers
, tem
->id
);
4051 /* Add *tem = nonlocal, do not add *tem = callused as
4052 EAF_NOESCAPE parameters do not escape to other parameters
4053 and all other uses appear in NONLOCAL as well. */
4058 rhs
.var
= nonlocal_id
;
4060 process_constraint (new_constraint (lhs
, rhs
));
4061 returns_uses
= true;
4064 make_escape_constraint (arg
);
4067 /* If we added to the calls uses solution make sure we account for
4068 pointers to it to be returned. */
4071 rhsc
.var
= get_call_use_vi (stmt
)->id
;
4072 rhsc
.offset
= UNKNOWN_OFFSET
;
4074 results
->safe_push (rhsc
);
4077 /* The static chain escapes as well. */
4078 if (gimple_call_chain (stmt
))
4079 make_escape_constraint (gimple_call_chain (stmt
));
4081 /* And if we applied NRV the address of the return slot escapes as well. */
4082 if (gimple_call_return_slot_opt_p (stmt
)
4083 && gimple_call_lhs (stmt
) != NULL_TREE
4084 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt
))))
4086 auto_vec
<ce_s
> tmpc
;
4087 struct constraint_expr lhsc
, *c
;
4088 get_constraint_for_address_of (gimple_call_lhs (stmt
), &tmpc
);
4089 lhsc
.var
= escaped_id
;
4092 FOR_EACH_VEC_ELT (tmpc
, i
, c
)
4093 process_constraint (new_constraint (lhsc
, *c
));
4096 /* Regular functions return nonlocal memory. */
4097 rhsc
.var
= nonlocal_id
;
4100 results
->safe_push (rhsc
);
4103 /* For non-IPA mode, generate constraints necessary for a call
4104 that returns a pointer and assigns it to LHS. This simply makes
4105 the LHS point to global and escaped variables. */
4108 handle_lhs_call (gcall
*stmt
, tree lhs
, int flags
, vec
<ce_s
> rhsc
,
4111 auto_vec
<ce_s
> lhsc
;
4113 get_constraint_for (lhs
, &lhsc
);
4114 /* If the store is to a global decl make sure to
4115 add proper escape constraints. */
4116 lhs
= get_base_address (lhs
);
4119 && is_global_var (lhs
))
4121 struct constraint_expr tmpc
;
4122 tmpc
.var
= escaped_id
;
4125 lhsc
.safe_push (tmpc
);
4128 /* If the call returns an argument unmodified override the rhs
4130 if (flags
& ERF_RETURNS_ARG
4131 && (flags
& ERF_RETURN_ARG_MASK
) < gimple_call_num_args (stmt
))
4135 arg
= gimple_call_arg (stmt
, flags
& ERF_RETURN_ARG_MASK
);
4136 get_constraint_for (arg
, &rhsc
);
4137 process_all_all_constraints (lhsc
, rhsc
);
4140 else if (flags
& ERF_NOALIAS
)
4143 struct constraint_expr tmpc
;
4145 vi
= make_heapvar ("HEAP", true);
4146 /* We are marking allocated storage local, we deal with it becoming
4147 global by escaping and setting of vars_contains_escaped_heap. */
4148 DECL_EXTERNAL (vi
->decl
) = 0;
4149 vi
->is_global_var
= 0;
4150 /* If this is not a real malloc call assume the memory was
4151 initialized and thus may point to global memory. All
4152 builtin functions with the malloc attribute behave in a sane way. */
4154 || !fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
4155 make_constraint_from (vi
, nonlocal_id
);
4158 tmpc
.type
= ADDRESSOF
;
4159 rhsc
.safe_push (tmpc
);
4160 process_all_all_constraints (lhsc
, rhsc
);
4164 process_all_all_constraints (lhsc
, rhsc
);
4167 /* For non-IPA mode, generate constraints necessary for a call of a
4168 const function that returns a pointer in the statement STMT. */
4171 handle_const_call (gcall
*stmt
, vec
<ce_s
> *results
)
4173 struct constraint_expr rhsc
;
4175 bool need_uses
= false;
4177 /* Treat nested const functions the same as pure functions as far
4178 as the static chain is concerned. */
4179 if (gimple_call_chain (stmt
))
4181 varinfo_t uses
= get_call_use_vi (stmt
);
4182 make_constraint_to (uses
->id
, gimple_call_chain (stmt
));
4186 /* And if we applied NRV the address of the return slot escapes as well. */
4187 if (gimple_call_return_slot_opt_p (stmt
)
4188 && gimple_call_lhs (stmt
) != NULL_TREE
4189 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt
))))
4191 varinfo_t uses
= get_call_use_vi (stmt
);
4192 auto_vec
<ce_s
> tmpc
;
4193 get_constraint_for_address_of (gimple_call_lhs (stmt
), &tmpc
);
4194 make_constraints_to (uses
->id
, tmpc
);
4200 varinfo_t uses
= get_call_use_vi (stmt
);
4201 make_any_offset_constraints (uses
);
4202 make_transitive_closure_constraints (uses
);
4203 rhsc
.var
= uses
->id
;
4206 results
->safe_push (rhsc
);
4209 /* May return offsetted arguments. */
4210 varinfo_t tem
= NULL
;
4211 if (gimple_call_num_args (stmt
) != 0)
4213 tem
= new_var_info (NULL_TREE
, "callarg", true);
4214 tem
->is_reg_var
= true;
4216 for (k
= 0; k
< gimple_call_num_args (stmt
); ++k
)
4218 tree arg
= gimple_call_arg (stmt
, k
);
4219 auto_vec
<ce_s
> argc
;
4220 get_constraint_for_rhs (arg
, &argc
);
4221 make_constraints_to (tem
->id
, argc
);
4228 ce
.offset
= UNKNOWN_OFFSET
;
4229 results
->safe_push (ce
);
4232 /* May return addresses of globals. */
4233 rhsc
.var
= nonlocal_id
;
4235 rhsc
.type
= ADDRESSOF
;
4236 results
->safe_push (rhsc
);
4239 /* For non-IPA mode, generate constraints necessary for a call to a
4240 pure function in statement STMT. */
4243 handle_pure_call (gcall
*stmt
, vec
<ce_s
> *results
)
4245 struct constraint_expr rhsc
;
4247 varinfo_t uses
= NULL
;
4249 /* Memory reached from pointer arguments is call-used. */
4250 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
4252 tree arg
= gimple_call_arg (stmt
, i
);
4255 uses
= get_call_use_vi (stmt
);
4256 make_any_offset_constraints (uses
);
4257 make_transitive_closure_constraints (uses
);
4259 make_constraint_to (uses
->id
, arg
);
4262 /* The static chain is used as well. */
4263 if (gimple_call_chain (stmt
))
4267 uses
= get_call_use_vi (stmt
);
4268 make_any_offset_constraints (uses
);
4269 make_transitive_closure_constraints (uses
);
4271 make_constraint_to (uses
->id
, gimple_call_chain (stmt
));
4274 /* And if we applied NRV the address of the return slot. */
4275 if (gimple_call_return_slot_opt_p (stmt
)
4276 && gimple_call_lhs (stmt
) != NULL_TREE
4277 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt
))))
4281 uses
= get_call_use_vi (stmt
);
4282 make_any_offset_constraints (uses
);
4283 make_transitive_closure_constraints (uses
);
4285 auto_vec
<ce_s
> tmpc
;
4286 get_constraint_for_address_of (gimple_call_lhs (stmt
), &tmpc
);
4287 make_constraints_to (uses
->id
, tmpc
);
4290 /* Pure functions may return call-used and nonlocal memory. */
4293 rhsc
.var
= uses
->id
;
4296 results
->safe_push (rhsc
);
4298 rhsc
.var
= nonlocal_id
;
4301 results
->safe_push (rhsc
);
4305 /* Return the varinfo for the callee of CALL. */
4308 get_fi_for_callee (gcall
*call
)
4310 tree decl
, fn
= gimple_call_fn (call
);
4312 if (fn
&& TREE_CODE (fn
) == OBJ_TYPE_REF
)
4313 fn
= OBJ_TYPE_REF_EXPR (fn
);
4315 /* If we can directly resolve the function being called, do so.
4316 Otherwise, it must be some sort of indirect expression that
4317 we should still be able to handle. */
4318 decl
= gimple_call_addr_fndecl (fn
);
4320 return get_vi_for_tree (decl
);
4322 /* If the function is anything other than a SSA name pointer we have no
4323 clue and should be getting ANYFN (well, ANYTHING for now). */
4324 if (!fn
|| TREE_CODE (fn
) != SSA_NAME
)
4325 return get_varinfo (anything_id
);
4327 if (SSA_NAME_IS_DEFAULT_DEF (fn
)
4328 && (TREE_CODE (SSA_NAME_VAR (fn
)) == PARM_DECL
4329 || TREE_CODE (SSA_NAME_VAR (fn
)) == RESULT_DECL
))
4330 fn
= SSA_NAME_VAR (fn
);
4332 return get_vi_for_tree (fn
);
4335 /* Create constraints for assigning call argument ARG to the incoming parameter
4336 INDEX of function FI. */
4339 find_func_aliases_for_call_arg (varinfo_t fi
, unsigned index
, tree arg
)
4341 struct constraint_expr lhs
;
4342 lhs
= get_function_part_constraint (fi
, fi_parm_base
+ index
);
4344 auto_vec
<ce_s
, 2> rhsc
;
4345 get_constraint_for_rhs (arg
, &rhsc
);
4348 struct constraint_expr
*rhsp
;
4349 FOR_EACH_VEC_ELT (rhsc
, j
, rhsp
)
4350 process_constraint (new_constraint (lhs
, *rhsp
));
4353 /* Return true if FNDECL may be part of another lto partition. */
4356 fndecl_maybe_in_other_partition (tree fndecl
)
4358 cgraph_node
*fn_node
= cgraph_node::get (fndecl
);
4359 if (fn_node
== NULL
)
4362 return fn_node
->in_other_partition
;
4365 /* Create constraints for the builtin call T. Return true if the call
4366 was handled, otherwise false. */
4369 find_func_aliases_for_builtin_call (struct function
*fn
, gcall
*t
)
4371 tree fndecl
= gimple_call_fndecl (t
);
4372 auto_vec
<ce_s
, 2> lhsc
;
4373 auto_vec
<ce_s
, 4> rhsc
;
4376 if (gimple_call_builtin_p (t
, BUILT_IN_NORMAL
))
4377 /* ??? All builtins that are handled here need to be handled
4378 in the alias-oracle query functions explicitly! */
4379 switch (DECL_FUNCTION_CODE (fndecl
))
4381 /* All the following functions return a pointer to the same object
4382 as their first argument points to. The functions do not add
4383 to the ESCAPED solution. The functions make the first argument
4384 pointed to memory point to what the second argument pointed to
4385 memory points to. */
4386 case BUILT_IN_STRCPY
:
4387 case BUILT_IN_STRNCPY
:
4388 case BUILT_IN_BCOPY
:
4389 case BUILT_IN_MEMCPY
:
4390 case BUILT_IN_MEMMOVE
:
4391 case BUILT_IN_MEMPCPY
:
4392 case BUILT_IN_STPCPY
:
4393 case BUILT_IN_STPNCPY
:
4394 case BUILT_IN_STRCAT
:
4395 case BUILT_IN_STRNCAT
:
4396 case BUILT_IN_STRCPY_CHK
:
4397 case BUILT_IN_STRNCPY_CHK
:
4398 case BUILT_IN_MEMCPY_CHK
:
4399 case BUILT_IN_MEMMOVE_CHK
:
4400 case BUILT_IN_MEMPCPY_CHK
:
4401 case BUILT_IN_STPCPY_CHK
:
4402 case BUILT_IN_STPNCPY_CHK
:
4403 case BUILT_IN_STRCAT_CHK
:
4404 case BUILT_IN_STRNCAT_CHK
:
4405 case BUILT_IN_TM_MEMCPY
:
4406 case BUILT_IN_TM_MEMMOVE
:
4408 tree res
= gimple_call_lhs (t
);
4409 tree dest
= gimple_call_arg (t
, (DECL_FUNCTION_CODE (fndecl
)
4410 == BUILT_IN_BCOPY
? 1 : 0));
4411 tree src
= gimple_call_arg (t
, (DECL_FUNCTION_CODE (fndecl
)
4412 == BUILT_IN_BCOPY
? 0 : 1));
4413 if (res
!= NULL_TREE
)
4415 get_constraint_for (res
, &lhsc
);
4416 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_MEMPCPY
4417 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STPCPY
4418 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STPNCPY
4419 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_MEMPCPY_CHK
4420 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STPCPY_CHK
4421 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STPNCPY_CHK
)
4422 get_constraint_for_ptr_offset (dest
, NULL_TREE
, &rhsc
);
4424 get_constraint_for (dest
, &rhsc
);
4425 process_all_all_constraints (lhsc
, rhsc
);
4429 get_constraint_for_ptr_offset (dest
, NULL_TREE
, &lhsc
);
4430 get_constraint_for_ptr_offset (src
, NULL_TREE
, &rhsc
);
4433 process_all_all_constraints (lhsc
, rhsc
);
4436 case BUILT_IN_MEMSET
:
4437 case BUILT_IN_MEMSET_CHK
:
4438 case BUILT_IN_TM_MEMSET
:
4440 tree res
= gimple_call_lhs (t
);
4441 tree dest
= gimple_call_arg (t
, 0);
4444 struct constraint_expr ac
;
4445 if (res
!= NULL_TREE
)
4447 get_constraint_for (res
, &lhsc
);
4448 get_constraint_for (dest
, &rhsc
);
4449 process_all_all_constraints (lhsc
, rhsc
);
4452 get_constraint_for_ptr_offset (dest
, NULL_TREE
, &lhsc
);
4454 if (flag_delete_null_pointer_checks
4455 && integer_zerop (gimple_call_arg (t
, 1)))
4457 ac
.type
= ADDRESSOF
;
4458 ac
.var
= nothing_id
;
4463 ac
.var
= integer_id
;
4466 FOR_EACH_VEC_ELT (lhsc
, i
, lhsp
)
4467 process_constraint (new_constraint (*lhsp
, ac
));
4470 case BUILT_IN_STACK_SAVE
:
4471 case BUILT_IN_STACK_RESTORE
:
4472 /* Nothing interesting happens. */
4474 case BUILT_IN_ALLOCA
:
4475 case BUILT_IN_ALLOCA_WITH_ALIGN
:
4476 case BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
:
4478 tree ptr
= gimple_call_lhs (t
);
4479 if (ptr
== NULL_TREE
)
4481 get_constraint_for (ptr
, &lhsc
);
4482 varinfo_t vi
= make_heapvar ("HEAP", true);
4483 /* Alloca storage is never global. To exempt it from escaped
4484 handling make it a non-heap var. */
4485 DECL_EXTERNAL (vi
->decl
) = 0;
4486 vi
->is_global_var
= 0;
4487 vi
->is_heap_var
= 0;
4488 struct constraint_expr tmpc
;
4491 tmpc
.type
= ADDRESSOF
;
4492 rhsc
.safe_push (tmpc
);
4493 process_all_all_constraints (lhsc
, rhsc
);
4496 case BUILT_IN_POSIX_MEMALIGN
:
4498 tree ptrptr
= gimple_call_arg (t
, 0);
4499 get_constraint_for (ptrptr
, &lhsc
);
4501 varinfo_t vi
= make_heapvar ("HEAP", true);
4502 /* We are marking allocated storage local, we deal with it becoming
4503 global by escaping and setting of vars_contains_escaped_heap. */
4504 DECL_EXTERNAL (vi
->decl
) = 0;
4505 vi
->is_global_var
= 0;
4506 struct constraint_expr tmpc
;
4509 tmpc
.type
= ADDRESSOF
;
4510 rhsc
.safe_push (tmpc
);
4511 process_all_all_constraints (lhsc
, rhsc
);
4514 case BUILT_IN_ASSUME_ALIGNED
:
4516 tree res
= gimple_call_lhs (t
);
4517 tree dest
= gimple_call_arg (t
, 0);
4518 if (res
!= NULL_TREE
)
4520 get_constraint_for (res
, &lhsc
);
4521 get_constraint_for (dest
, &rhsc
);
4522 process_all_all_constraints (lhsc
, rhsc
);
4526 /* All the following functions do not return pointers, do not
4527 modify the points-to sets of memory reachable from their
4528 arguments and do not add to the ESCAPED solution. */
4529 case BUILT_IN_SINCOS
:
4530 case BUILT_IN_SINCOSF
:
4531 case BUILT_IN_SINCOSL
:
4532 case BUILT_IN_FREXP
:
4533 case BUILT_IN_FREXPF
:
4534 case BUILT_IN_FREXPL
:
4535 case BUILT_IN_GAMMA_R
:
4536 case BUILT_IN_GAMMAF_R
:
4537 case BUILT_IN_GAMMAL_R
:
4538 case BUILT_IN_LGAMMA_R
:
4539 case BUILT_IN_LGAMMAF_R
:
4540 case BUILT_IN_LGAMMAL_R
:
4542 case BUILT_IN_MODFF
:
4543 case BUILT_IN_MODFL
:
4544 case BUILT_IN_REMQUO
:
4545 case BUILT_IN_REMQUOF
:
4546 case BUILT_IN_REMQUOL
:
4549 case BUILT_IN_STRDUP
:
4550 case BUILT_IN_STRNDUP
:
4551 case BUILT_IN_REALLOC
:
4552 if (gimple_call_lhs (t
))
4554 handle_lhs_call (t
, gimple_call_lhs (t
),
4555 gimple_call_return_flags (t
) | ERF_NOALIAS
,
4557 get_constraint_for_ptr_offset (gimple_call_lhs (t
),
4559 get_constraint_for_ptr_offset (gimple_call_arg (t
, 0),
4563 process_all_all_constraints (lhsc
, rhsc
);
4566 /* For realloc the resulting pointer can be equal to the
4567 argument as well. But only doing this wouldn't be
4568 correct because with ptr == 0 realloc behaves like malloc. */
4569 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_REALLOC
)
4571 get_constraint_for (gimple_call_lhs (t
), &lhsc
);
4572 get_constraint_for (gimple_call_arg (t
, 0), &rhsc
);
4573 process_all_all_constraints (lhsc
, rhsc
);
4578 /* String / character search functions return a pointer into the
4579 source string or NULL. */
4580 case BUILT_IN_INDEX
:
4581 case BUILT_IN_STRCHR
:
4582 case BUILT_IN_STRRCHR
:
4583 case BUILT_IN_MEMCHR
:
4584 case BUILT_IN_STRSTR
:
4585 case BUILT_IN_STRPBRK
:
4586 if (gimple_call_lhs (t
))
4588 tree src
= gimple_call_arg (t
, 0);
4589 get_constraint_for_ptr_offset (src
, NULL_TREE
, &rhsc
);
4590 constraint_expr nul
;
4591 nul
.var
= nothing_id
;
4593 nul
.type
= ADDRESSOF
;
4594 rhsc
.safe_push (nul
);
4595 get_constraint_for (gimple_call_lhs (t
), &lhsc
);
4596 process_all_all_constraints (lhsc
, rhsc
);
4599 /* Pure functions that return something not based on any object and
4600 that use the memory pointed to by their arguments (but not
4602 case BUILT_IN_STRCMP
:
4603 case BUILT_IN_STRCMP_EQ
:
4604 case BUILT_IN_STRNCMP
:
4605 case BUILT_IN_STRNCMP_EQ
:
4606 case BUILT_IN_STRCASECMP
:
4607 case BUILT_IN_STRNCASECMP
:
4608 case BUILT_IN_MEMCMP
:
4610 case BUILT_IN_STRSPN
:
4611 case BUILT_IN_STRCSPN
:
4613 varinfo_t uses
= get_call_use_vi (t
);
4614 make_any_offset_constraints (uses
);
4615 make_constraint_to (uses
->id
, gimple_call_arg (t
, 0));
4616 make_constraint_to (uses
->id
, gimple_call_arg (t
, 1));
4617 /* No constraints are necessary for the return value. */
4620 case BUILT_IN_STRLEN
:
4622 varinfo_t uses
= get_call_use_vi (t
);
4623 make_any_offset_constraints (uses
);
4624 make_constraint_to (uses
->id
, gimple_call_arg (t
, 0));
4625 /* No constraints are necessary for the return value. */
4628 case BUILT_IN_OBJECT_SIZE
:
4629 case BUILT_IN_CONSTANT_P
:
4631 /* No constraints are necessary for the return value or the
4635 /* Trampolines are special - they set up passing the static
4637 case BUILT_IN_INIT_TRAMPOLINE
:
4639 tree tramp
= gimple_call_arg (t
, 0);
4640 tree nfunc
= gimple_call_arg (t
, 1);
4641 tree frame
= gimple_call_arg (t
, 2);
4643 struct constraint_expr lhs
, *rhsp
;
4646 varinfo_t nfi
= NULL
;
4647 gcc_assert (TREE_CODE (nfunc
) == ADDR_EXPR
);
4648 nfi
= lookup_vi_for_tree (TREE_OPERAND (nfunc
, 0));
4651 lhs
= get_function_part_constraint (nfi
, fi_static_chain
);
4652 get_constraint_for (frame
, &rhsc
);
4653 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
4654 process_constraint (new_constraint (lhs
, *rhsp
));
4657 /* Make the frame point to the function for
4658 the trampoline adjustment call. */
4659 get_constraint_for (tramp
, &lhsc
);
4661 get_constraint_for (nfunc
, &rhsc
);
4662 process_all_all_constraints (lhsc
, rhsc
);
4667 /* Else fallthru to generic handling which will let
4668 the frame escape. */
4671 case BUILT_IN_ADJUST_TRAMPOLINE
:
4673 tree tramp
= gimple_call_arg (t
, 0);
4674 tree res
= gimple_call_lhs (t
);
4675 if (in_ipa_mode
&& res
)
4677 get_constraint_for (res
, &lhsc
);
4678 get_constraint_for (tramp
, &rhsc
);
4680 process_all_all_constraints (lhsc
, rhsc
);
4684 CASE_BUILT_IN_TM_STORE (1):
4685 CASE_BUILT_IN_TM_STORE (2):
4686 CASE_BUILT_IN_TM_STORE (4):
4687 CASE_BUILT_IN_TM_STORE (8):
4688 CASE_BUILT_IN_TM_STORE (FLOAT
):
4689 CASE_BUILT_IN_TM_STORE (DOUBLE
):
4690 CASE_BUILT_IN_TM_STORE (LDOUBLE
):
4691 CASE_BUILT_IN_TM_STORE (M64
):
4692 CASE_BUILT_IN_TM_STORE (M128
):
4693 CASE_BUILT_IN_TM_STORE (M256
):
4695 tree addr
= gimple_call_arg (t
, 0);
4696 tree src
= gimple_call_arg (t
, 1);
4698 get_constraint_for (addr
, &lhsc
);
4700 get_constraint_for (src
, &rhsc
);
4701 process_all_all_constraints (lhsc
, rhsc
);
4704 CASE_BUILT_IN_TM_LOAD (1):
4705 CASE_BUILT_IN_TM_LOAD (2):
4706 CASE_BUILT_IN_TM_LOAD (4):
4707 CASE_BUILT_IN_TM_LOAD (8):
4708 CASE_BUILT_IN_TM_LOAD (FLOAT
):
4709 CASE_BUILT_IN_TM_LOAD (DOUBLE
):
4710 CASE_BUILT_IN_TM_LOAD (LDOUBLE
):
4711 CASE_BUILT_IN_TM_LOAD (M64
):
4712 CASE_BUILT_IN_TM_LOAD (M128
):
4713 CASE_BUILT_IN_TM_LOAD (M256
):
4715 tree dest
= gimple_call_lhs (t
);
4716 tree addr
= gimple_call_arg (t
, 0);
4718 get_constraint_for (dest
, &lhsc
);
4719 get_constraint_for (addr
, &rhsc
);
4721 process_all_all_constraints (lhsc
, rhsc
);
4724 /* Variadic argument handling needs to be handled in IPA
4726 case BUILT_IN_VA_START
:
4728 tree valist
= gimple_call_arg (t
, 0);
4729 struct constraint_expr rhs
, *lhsp
;
4731 get_constraint_for_ptr_offset (valist
, NULL_TREE
, &lhsc
);
4733 /* The va_list gets access to pointers in variadic
4734 arguments. Which we know in the case of IPA analysis
4735 and otherwise are just all nonlocal variables. */
4738 fi
= lookup_vi_for_tree (fn
->decl
);
4739 rhs
= get_function_part_constraint (fi
, ~0);
4740 rhs
.type
= ADDRESSOF
;
4744 rhs
.var
= nonlocal_id
;
4745 rhs
.type
= ADDRESSOF
;
4748 FOR_EACH_VEC_ELT (lhsc
, i
, lhsp
)
4749 process_constraint (new_constraint (*lhsp
, rhs
));
4750 /* va_list is clobbered. */
4751 make_constraint_to (get_call_clobber_vi (t
)->id
, valist
);
4754 /* va_end doesn't have any effect that matters. */
4755 case BUILT_IN_VA_END
:
4757 /* Alternate return. Simply give up for now. */
4758 case BUILT_IN_RETURN
:
4762 || !(fi
= get_vi_for_tree (fn
->decl
)))
4763 make_constraint_from (get_varinfo (escaped_id
), anything_id
);
4764 else if (in_ipa_mode
4767 struct constraint_expr lhs
, rhs
;
4768 lhs
= get_function_part_constraint (fi
, fi_result
);
4769 rhs
.var
= anything_id
;
4772 process_constraint (new_constraint (lhs
, rhs
));
4776 case BUILT_IN_GOMP_PARALLEL
:
4777 case BUILT_IN_GOACC_PARALLEL
:
4781 unsigned int fnpos
, argpos
;
4782 switch (DECL_FUNCTION_CODE (fndecl
))
4784 case BUILT_IN_GOMP_PARALLEL
:
4785 /* __builtin_GOMP_parallel (fn, data, num_threads, flags). */
4789 case BUILT_IN_GOACC_PARALLEL
:
4790 /* __builtin_GOACC_parallel (flags_m, fn, mapnum, hostaddrs,
4791 sizes, kinds, ...). */
4799 tree fnarg
= gimple_call_arg (t
, fnpos
);
4800 gcc_assert (TREE_CODE (fnarg
) == ADDR_EXPR
);
4801 tree fndecl
= TREE_OPERAND (fnarg
, 0);
4802 if (fndecl_maybe_in_other_partition (fndecl
))
4803 /* Fallthru to general call handling. */
4806 tree arg
= gimple_call_arg (t
, argpos
);
4808 varinfo_t fi
= get_vi_for_tree (fndecl
);
4809 find_func_aliases_for_call_arg (fi
, 0, arg
);
4812 /* Else fallthru to generic call handling. */
4815 /* printf-style functions may have hooks to set pointers to
4816 point to somewhere into the generated string. Leave them
4817 for a later exercise... */
4819 /* Fallthru to general call handling. */;
4825 /* Create constraints for the call T. */
4828 find_func_aliases_for_call (struct function
*fn
, gcall
*t
)
4830 tree fndecl
= gimple_call_fndecl (t
);
4833 if (fndecl
!= NULL_TREE
4834 && fndecl_built_in_p (fndecl
)
4835 && find_func_aliases_for_builtin_call (fn
, t
))
4838 fi
= get_fi_for_callee (t
);
4840 || (fi
->decl
&& fndecl
&& !fi
->is_fn_info
))
4842 auto_vec
<ce_s
, 16> rhsc
;
4843 int flags
= gimple_call_flags (t
);
4845 /* Const functions can return their arguments and addresses
4846 of global memory but not of escaped memory. */
4847 if (flags
& (ECF_CONST
|ECF_NOVOPS
))
4849 if (gimple_call_lhs (t
))
4850 handle_const_call (t
, &rhsc
);
4852 /* Pure functions can return addresses in and of memory
4853 reachable from their arguments, but they are not an escape
4854 point for reachable memory of their arguments. */
4855 else if (flags
& (ECF_PURE
|ECF_LOOPING_CONST_OR_PURE
))
4856 handle_pure_call (t
, &rhsc
);
4858 handle_rhs_call (t
, &rhsc
);
4859 if (gimple_call_lhs (t
))
4860 handle_lhs_call (t
, gimple_call_lhs (t
),
4861 gimple_call_return_flags (t
), rhsc
, fndecl
);
4865 auto_vec
<ce_s
, 2> rhsc
;
4869 /* Assign all the passed arguments to the appropriate incoming
4870 parameters of the function. */
4871 for (j
= 0; j
< gimple_call_num_args (t
); j
++)
4873 tree arg
= gimple_call_arg (t
, j
);
4874 find_func_aliases_for_call_arg (fi
, j
, arg
);
4877 /* If we are returning a value, assign it to the result. */
4878 lhsop
= gimple_call_lhs (t
);
4881 auto_vec
<ce_s
, 2> lhsc
;
4882 struct constraint_expr rhs
;
4883 struct constraint_expr
*lhsp
;
4884 bool aggr_p
= aggregate_value_p (lhsop
, gimple_call_fntype (t
));
4886 get_constraint_for (lhsop
, &lhsc
);
4887 rhs
= get_function_part_constraint (fi
, fi_result
);
4890 auto_vec
<ce_s
, 2> tem
;
4891 tem
.quick_push (rhs
);
4893 gcc_checking_assert (tem
.length () == 1);
4896 FOR_EACH_VEC_ELT (lhsc
, j
, lhsp
)
4897 process_constraint (new_constraint (*lhsp
, rhs
));
4899 /* If we pass the result decl by reference, honor that. */
4902 struct constraint_expr lhs
;
4903 struct constraint_expr
*rhsp
;
4905 get_constraint_for_address_of (lhsop
, &rhsc
);
4906 lhs
= get_function_part_constraint (fi
, fi_result
);
4907 FOR_EACH_VEC_ELT (rhsc
, j
, rhsp
)
4908 process_constraint (new_constraint (lhs
, *rhsp
));
4913 /* If we use a static chain, pass it along. */
4914 if (gimple_call_chain (t
))
4916 struct constraint_expr lhs
;
4917 struct constraint_expr
*rhsp
;
4919 get_constraint_for (gimple_call_chain (t
), &rhsc
);
4920 lhs
= get_function_part_constraint (fi
, fi_static_chain
);
4921 FOR_EACH_VEC_ELT (rhsc
, j
, rhsp
)
4922 process_constraint (new_constraint (lhs
, *rhsp
));
4927 /* Walk statement T setting up aliasing constraints according to the
4928 references found in T. This function is the main part of the
4929 constraint builder. AI points to auxiliary alias information used
4930 when building alias sets and computing alias grouping heuristics. */
4933 find_func_aliases (struct function
*fn
, gimple
*origt
)
4936 auto_vec
<ce_s
, 16> lhsc
;
4937 auto_vec
<ce_s
, 16> rhsc
;
4940 /* Now build constraints expressions. */
4941 if (gimple_code (t
) == GIMPLE_PHI
)
4943 /* For a phi node, assign all the arguments to
4945 get_constraint_for (gimple_phi_result (t
), &lhsc
);
4946 for (unsigned i
= 0; i
< gimple_phi_num_args (t
); i
++)
4948 get_constraint_for_rhs (gimple_phi_arg_def (t
, i
), &rhsc
);
4949 process_all_all_constraints (lhsc
, rhsc
);
4953 /* In IPA mode, we need to generate constraints to pass call
4954 arguments through their calls. There are two cases,
4955 either a GIMPLE_CALL returning a value, or just a plain
4956 GIMPLE_CALL when we are not.
4958 In non-ipa mode, we need to generate constraints for each
4959 pointer passed by address. */
4960 else if (is_gimple_call (t
))
4961 find_func_aliases_for_call (fn
, as_a
<gcall
*> (t
));
4963 /* Otherwise, just a regular assignment statement. Only care about
4964 operations with pointer result, others are dealt with as escape
4965 points if they have pointer operands. */
4966 else if (is_gimple_assign (t
))
4968 /* Otherwise, just a regular assignment statement. */
4969 tree lhsop
= gimple_assign_lhs (t
);
4970 tree rhsop
= (gimple_num_ops (t
) == 2) ? gimple_assign_rhs1 (t
) : NULL
;
4972 if (rhsop
&& TREE_CLOBBER_P (rhsop
))
4973 /* Ignore clobbers, they don't actually store anything into
4976 else if (rhsop
&& AGGREGATE_TYPE_P (TREE_TYPE (lhsop
)))
4977 do_structure_copy (lhsop
, rhsop
);
4980 enum tree_code code
= gimple_assign_rhs_code (t
);
4982 get_constraint_for (lhsop
, &lhsc
);
4984 if (code
== POINTER_PLUS_EXPR
)
4985 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t
),
4986 gimple_assign_rhs2 (t
), &rhsc
);
4987 else if (code
== POINTER_DIFF_EXPR
)
4988 /* The result is not a pointer (part). */
4990 else if (code
== BIT_AND_EXPR
4991 && TREE_CODE (gimple_assign_rhs2 (t
)) == INTEGER_CST
)
4993 /* Aligning a pointer via a BIT_AND_EXPR is offsetting
4994 the pointer. Handle it by offsetting it by UNKNOWN. */
4995 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t
),
4998 else if (code
== TRUNC_DIV_EXPR
4999 || code
== CEIL_DIV_EXPR
5000 || code
== FLOOR_DIV_EXPR
5001 || code
== ROUND_DIV_EXPR
5002 || code
== EXACT_DIV_EXPR
5003 || code
== TRUNC_MOD_EXPR
5004 || code
== CEIL_MOD_EXPR
5005 || code
== FLOOR_MOD_EXPR
5006 || code
== ROUND_MOD_EXPR
)
5007 /* Division and modulo transfer the pointer from the LHS. */
5008 get_constraint_for_rhs (gimple_assign_rhs1 (t
), &rhsc
);
5009 else if ((CONVERT_EXPR_CODE_P (code
)
5010 && !(POINTER_TYPE_P (gimple_expr_type (t
))
5011 && !POINTER_TYPE_P (TREE_TYPE (rhsop
))))
5012 || gimple_assign_single_p (t
))
5013 get_constraint_for_rhs (rhsop
, &rhsc
);
5014 else if (code
== COND_EXPR
)
5016 /* The result is a merge of both COND_EXPR arms. */
5017 auto_vec
<ce_s
, 2> tmp
;
5018 struct constraint_expr
*rhsp
;
5020 get_constraint_for_rhs (gimple_assign_rhs2 (t
), &rhsc
);
5021 get_constraint_for_rhs (gimple_assign_rhs3 (t
), &tmp
);
5022 FOR_EACH_VEC_ELT (tmp
, i
, rhsp
)
5023 rhsc
.safe_push (*rhsp
);
5025 else if (truth_value_p (code
))
5026 /* Truth value results are not pointer (parts). Or at least
5027 very unreasonable obfuscation of a part. */
5031 /* All other operations are merges. */
5032 auto_vec
<ce_s
, 4> tmp
;
5033 struct constraint_expr
*rhsp
;
5035 get_constraint_for_rhs (gimple_assign_rhs1 (t
), &rhsc
);
5036 for (i
= 2; i
< gimple_num_ops (t
); ++i
)
5038 get_constraint_for_rhs (gimple_op (t
, i
), &tmp
);
5039 FOR_EACH_VEC_ELT (tmp
, j
, rhsp
)
5040 rhsc
.safe_push (*rhsp
);
5044 process_all_all_constraints (lhsc
, rhsc
);
5046 /* If there is a store to a global variable the rhs escapes. */
5047 if ((lhsop
= get_base_address (lhsop
)) != NULL_TREE
5050 varinfo_t vi
= get_vi_for_tree (lhsop
);
5051 if ((! in_ipa_mode
&& vi
->is_global_var
)
5052 || vi
->is_ipa_escape_point
)
5053 make_escape_constraint (rhsop
);
5056 /* Handle escapes through return. */
5057 else if (gimple_code (t
) == GIMPLE_RETURN
5058 && gimple_return_retval (as_a
<greturn
*> (t
)) != NULL_TREE
)
5060 greturn
*return_stmt
= as_a
<greturn
*> (t
);
5063 && SSA_VAR_P (gimple_return_retval (return_stmt
)))
5065 /* We handle simple returns by post-processing the solutions. */
5068 if (!(fi
= get_vi_for_tree (fn
->decl
)))
5069 make_escape_constraint (gimple_return_retval (return_stmt
));
5070 else if (in_ipa_mode
)
5072 struct constraint_expr lhs
;
5073 struct constraint_expr
*rhsp
;
5076 lhs
= get_function_part_constraint (fi
, fi_result
);
5077 get_constraint_for_rhs (gimple_return_retval (return_stmt
), &rhsc
);
5078 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
5079 process_constraint (new_constraint (lhs
, *rhsp
));
5082 /* Handle asms conservatively by adding escape constraints to everything. */
5083 else if (gasm
*asm_stmt
= dyn_cast
<gasm
*> (t
))
5085 unsigned i
, noutputs
;
5086 const char **oconstraints
;
5087 const char *constraint
;
5088 bool allows_mem
, allows_reg
, is_inout
;
5090 noutputs
= gimple_asm_noutputs (asm_stmt
);
5091 oconstraints
= XALLOCAVEC (const char *, noutputs
);
5093 for (i
= 0; i
< noutputs
; ++i
)
5095 tree link
= gimple_asm_output_op (asm_stmt
, i
);
5096 tree op
= TREE_VALUE (link
);
5098 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
5099 oconstraints
[i
] = constraint
;
5100 parse_output_constraint (&constraint
, i
, 0, 0, &allows_mem
,
5101 &allows_reg
, &is_inout
);
5103 /* A memory constraint makes the address of the operand escape. */
5104 if (!allows_reg
&& allows_mem
)
5105 make_escape_constraint (build_fold_addr_expr (op
));
5107 /* The asm may read global memory, so outputs may point to
5108 any global memory. */
5111 auto_vec
<ce_s
, 2> lhsc
;
5112 struct constraint_expr rhsc
, *lhsp
;
5114 get_constraint_for (op
, &lhsc
);
5115 rhsc
.var
= nonlocal_id
;
5118 FOR_EACH_VEC_ELT (lhsc
, j
, lhsp
)
5119 process_constraint (new_constraint (*lhsp
, rhsc
));
5122 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
5124 tree link
= gimple_asm_input_op (asm_stmt
, i
);
5125 tree op
= TREE_VALUE (link
);
5127 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
5129 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0, oconstraints
,
5130 &allows_mem
, &allows_reg
);
5132 /* A memory constraint makes the address of the operand escape. */
5133 if (!allows_reg
&& allows_mem
)
5134 make_escape_constraint (build_fold_addr_expr (op
));
5135 /* Strictly we'd only need the constraint to ESCAPED if
5136 the asm clobbers memory, otherwise using something
5137 along the lines of per-call clobbers/uses would be enough. */
5139 make_escape_constraint (op
);
5145 /* Create a constraint adding to the clobber set of FI the memory
5146 pointed to by PTR. */
5149 process_ipa_clobber (varinfo_t fi
, tree ptr
)
5151 vec
<ce_s
> ptrc
= vNULL
;
5152 struct constraint_expr
*c
, lhs
;
5154 get_constraint_for_rhs (ptr
, &ptrc
);
5155 lhs
= get_function_part_constraint (fi
, fi_clobbers
);
5156 FOR_EACH_VEC_ELT (ptrc
, i
, c
)
5157 process_constraint (new_constraint (lhs
, *c
));
5161 /* Walk statement T setting up clobber and use constraints according to the
5162 references found in T. This function is a main part of the
5163 IPA constraint builder. */
5166 find_func_clobbers (struct function
*fn
, gimple
*origt
)
5169 auto_vec
<ce_s
, 16> lhsc
;
5170 auto_vec
<ce_s
, 16> rhsc
;
5173 /* Add constraints for clobbered/used in IPA mode.
5174 We are not interested in what automatic variables are clobbered
5175 or used as we only use the information in the caller to which
5176 they do not escape. */
5177 gcc_assert (in_ipa_mode
);
5179 /* If the stmt refers to memory in any way it better had a VUSE. */
5180 if (gimple_vuse (t
) == NULL_TREE
)
5183 /* We'd better have function information for the current function. */
5184 fi
= lookup_vi_for_tree (fn
->decl
);
5185 gcc_assert (fi
!= NULL
);
5187 /* Account for stores in assignments and calls. */
5188 if (gimple_vdef (t
) != NULL_TREE
5189 && gimple_has_lhs (t
))
5191 tree lhs
= gimple_get_lhs (t
);
5193 while (handled_component_p (tem
))
5194 tem
= TREE_OPERAND (tem
, 0);
5196 && !auto_var_in_fn_p (tem
, fn
->decl
))
5197 || INDIRECT_REF_P (tem
)
5198 || (TREE_CODE (tem
) == MEM_REF
5199 && !(TREE_CODE (TREE_OPERAND (tem
, 0)) == ADDR_EXPR
5201 (TREE_OPERAND (TREE_OPERAND (tem
, 0), 0), fn
->decl
))))
5203 struct constraint_expr lhsc
, *rhsp
;
5205 lhsc
= get_function_part_constraint (fi
, fi_clobbers
);
5206 get_constraint_for_address_of (lhs
, &rhsc
);
5207 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
5208 process_constraint (new_constraint (lhsc
, *rhsp
));
5213 /* Account for uses in assigments and returns. */
5214 if (gimple_assign_single_p (t
)
5215 || (gimple_code (t
) == GIMPLE_RETURN
5216 && gimple_return_retval (as_a
<greturn
*> (t
)) != NULL_TREE
))
5218 tree rhs
= (gimple_assign_single_p (t
)
5219 ? gimple_assign_rhs1 (t
)
5220 : gimple_return_retval (as_a
<greturn
*> (t
)));
5222 while (handled_component_p (tem
))
5223 tem
= TREE_OPERAND (tem
, 0);
5225 && !auto_var_in_fn_p (tem
, fn
->decl
))
5226 || INDIRECT_REF_P (tem
)
5227 || (TREE_CODE (tem
) == MEM_REF
5228 && !(TREE_CODE (TREE_OPERAND (tem
, 0)) == ADDR_EXPR
5230 (TREE_OPERAND (TREE_OPERAND (tem
, 0), 0), fn
->decl
))))
5232 struct constraint_expr lhs
, *rhsp
;
5234 lhs
= get_function_part_constraint (fi
, fi_uses
);
5235 get_constraint_for_address_of (rhs
, &rhsc
);
5236 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
5237 process_constraint (new_constraint (lhs
, *rhsp
));
5242 if (gcall
*call_stmt
= dyn_cast
<gcall
*> (t
))
5244 varinfo_t cfi
= NULL
;
5245 tree decl
= gimple_call_fndecl (t
);
5246 struct constraint_expr lhs
, rhs
;
5249 /* For builtins we do not have separate function info. For those
5250 we do not generate escapes for we have to generate clobbers/uses. */
5251 if (gimple_call_builtin_p (t
, BUILT_IN_NORMAL
))
5252 switch (DECL_FUNCTION_CODE (decl
))
5254 /* The following functions use and clobber memory pointed to
5255 by their arguments. */
5256 case BUILT_IN_STRCPY
:
5257 case BUILT_IN_STRNCPY
:
5258 case BUILT_IN_BCOPY
:
5259 case BUILT_IN_MEMCPY
:
5260 case BUILT_IN_MEMMOVE
:
5261 case BUILT_IN_MEMPCPY
:
5262 case BUILT_IN_STPCPY
:
5263 case BUILT_IN_STPNCPY
:
5264 case BUILT_IN_STRCAT
:
5265 case BUILT_IN_STRNCAT
:
5266 case BUILT_IN_STRCPY_CHK
:
5267 case BUILT_IN_STRNCPY_CHK
:
5268 case BUILT_IN_MEMCPY_CHK
:
5269 case BUILT_IN_MEMMOVE_CHK
:
5270 case BUILT_IN_MEMPCPY_CHK
:
5271 case BUILT_IN_STPCPY_CHK
:
5272 case BUILT_IN_STPNCPY_CHK
:
5273 case BUILT_IN_STRCAT_CHK
:
5274 case BUILT_IN_STRNCAT_CHK
:
5276 tree dest
= gimple_call_arg (t
, (DECL_FUNCTION_CODE (decl
)
5277 == BUILT_IN_BCOPY
? 1 : 0));
5278 tree src
= gimple_call_arg (t
, (DECL_FUNCTION_CODE (decl
)
5279 == BUILT_IN_BCOPY
? 0 : 1));
5281 struct constraint_expr
*rhsp
, *lhsp
;
5282 get_constraint_for_ptr_offset (dest
, NULL_TREE
, &lhsc
);
5283 lhs
= get_function_part_constraint (fi
, fi_clobbers
);
5284 FOR_EACH_VEC_ELT (lhsc
, i
, lhsp
)
5285 process_constraint (new_constraint (lhs
, *lhsp
));
5286 get_constraint_for_ptr_offset (src
, NULL_TREE
, &rhsc
);
5287 lhs
= get_function_part_constraint (fi
, fi_uses
);
5288 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
5289 process_constraint (new_constraint (lhs
, *rhsp
));
5292 /* The following function clobbers memory pointed to by
5294 case BUILT_IN_MEMSET
:
5295 case BUILT_IN_MEMSET_CHK
:
5296 case BUILT_IN_POSIX_MEMALIGN
:
5298 tree dest
= gimple_call_arg (t
, 0);
5301 get_constraint_for_ptr_offset (dest
, NULL_TREE
, &lhsc
);
5302 lhs
= get_function_part_constraint (fi
, fi_clobbers
);
5303 FOR_EACH_VEC_ELT (lhsc
, i
, lhsp
)
5304 process_constraint (new_constraint (lhs
, *lhsp
));
5307 /* The following functions clobber their second and third
5309 case BUILT_IN_SINCOS
:
5310 case BUILT_IN_SINCOSF
:
5311 case BUILT_IN_SINCOSL
:
5313 process_ipa_clobber (fi
, gimple_call_arg (t
, 1));
5314 process_ipa_clobber (fi
, gimple_call_arg (t
, 2));
5317 /* The following functions clobber their second argument. */
5318 case BUILT_IN_FREXP
:
5319 case BUILT_IN_FREXPF
:
5320 case BUILT_IN_FREXPL
:
5321 case BUILT_IN_LGAMMA_R
:
5322 case BUILT_IN_LGAMMAF_R
:
5323 case BUILT_IN_LGAMMAL_R
:
5324 case BUILT_IN_GAMMA_R
:
5325 case BUILT_IN_GAMMAF_R
:
5326 case BUILT_IN_GAMMAL_R
:
5328 case BUILT_IN_MODFF
:
5329 case BUILT_IN_MODFL
:
5331 process_ipa_clobber (fi
, gimple_call_arg (t
, 1));
5334 /* The following functions clobber their third argument. */
5335 case BUILT_IN_REMQUO
:
5336 case BUILT_IN_REMQUOF
:
5337 case BUILT_IN_REMQUOL
:
5339 process_ipa_clobber (fi
, gimple_call_arg (t
, 2));
5342 /* The following functions neither read nor clobber memory. */
5343 case BUILT_IN_ASSUME_ALIGNED
:
5346 /* Trampolines are of no interest to us. */
5347 case BUILT_IN_INIT_TRAMPOLINE
:
5348 case BUILT_IN_ADJUST_TRAMPOLINE
:
5350 case BUILT_IN_VA_START
:
5351 case BUILT_IN_VA_END
:
5353 case BUILT_IN_GOMP_PARALLEL
:
5354 case BUILT_IN_GOACC_PARALLEL
:
5356 unsigned int fnpos
, argpos
;
5357 unsigned int implicit_use_args
[2];
5358 unsigned int num_implicit_use_args
= 0;
5359 switch (DECL_FUNCTION_CODE (decl
))
5361 case BUILT_IN_GOMP_PARALLEL
:
5362 /* __builtin_GOMP_parallel (fn, data, num_threads, flags). */
5366 case BUILT_IN_GOACC_PARALLEL
:
5367 /* __builtin_GOACC_parallel (flags_m, fn, mapnum, hostaddrs,
5368 sizes, kinds, ...). */
5371 implicit_use_args
[num_implicit_use_args
++] = 4;
5372 implicit_use_args
[num_implicit_use_args
++] = 5;
5378 tree fnarg
= gimple_call_arg (t
, fnpos
);
5379 gcc_assert (TREE_CODE (fnarg
) == ADDR_EXPR
);
5380 tree fndecl
= TREE_OPERAND (fnarg
, 0);
5381 if (fndecl_maybe_in_other_partition (fndecl
))
5382 /* Fallthru to general call handling. */
5385 varinfo_t cfi
= get_vi_for_tree (fndecl
);
5387 tree arg
= gimple_call_arg (t
, argpos
);
5389 /* Parameter passed by value is used. */
5390 lhs
= get_function_part_constraint (fi
, fi_uses
);
5391 struct constraint_expr
*rhsp
;
5392 get_constraint_for (arg
, &rhsc
);
5393 FOR_EACH_VEC_ELT (rhsc
, j
, rhsp
)
5394 process_constraint (new_constraint (lhs
, *rhsp
));
5397 /* Handle parameters used by the call, but not used in cfi, as
5398 implicitly used by cfi. */
5399 lhs
= get_function_part_constraint (cfi
, fi_uses
);
5400 for (unsigned i
= 0; i
< num_implicit_use_args
; ++i
)
5402 tree arg
= gimple_call_arg (t
, implicit_use_args
[i
]);
5403 get_constraint_for (arg
, &rhsc
);
5404 FOR_EACH_VEC_ELT (rhsc
, j
, rhsp
)
5405 process_constraint (new_constraint (lhs
, *rhsp
));
5409 /* The caller clobbers what the callee does. */
5410 lhs
= get_function_part_constraint (fi
, fi_clobbers
);
5411 rhs
= get_function_part_constraint (cfi
, fi_clobbers
);
5412 process_constraint (new_constraint (lhs
, rhs
));
5414 /* The caller uses what the callee does. */
5415 lhs
= get_function_part_constraint (fi
, fi_uses
);
5416 rhs
= get_function_part_constraint (cfi
, fi_uses
);
5417 process_constraint (new_constraint (lhs
, rhs
));
5421 /* printf-style functions may have hooks to set pointers to
5422 point to somewhere into the generated string. Leave them
5423 for a later exercise... */
5425 /* Fallthru to general call handling. */;
5428 /* Parameters passed by value are used. */
5429 lhs
= get_function_part_constraint (fi
, fi_uses
);
5430 for (i
= 0; i
< gimple_call_num_args (t
); i
++)
5432 struct constraint_expr
*rhsp
;
5433 tree arg
= gimple_call_arg (t
, i
);
5435 if (TREE_CODE (arg
) == SSA_NAME
5436 || is_gimple_min_invariant (arg
))
5439 get_constraint_for_address_of (arg
, &rhsc
);
5440 FOR_EACH_VEC_ELT (rhsc
, j
, rhsp
)
5441 process_constraint (new_constraint (lhs
, *rhsp
));
5445 /* Build constraints for propagating clobbers/uses along the
5447 cfi
= get_fi_for_callee (call_stmt
);
5448 if (cfi
->id
== anything_id
)
5450 if (gimple_vdef (t
))
5451 make_constraint_from (first_vi_for_offset (fi
, fi_clobbers
),
5453 make_constraint_from (first_vi_for_offset (fi
, fi_uses
),
5458 /* For callees without function info (that's external functions),
5459 ESCAPED is clobbered and used. */
5461 && TREE_CODE (cfi
->decl
) == FUNCTION_DECL
5462 && !cfi
->is_fn_info
)
5466 if (gimple_vdef (t
))
5467 make_copy_constraint (first_vi_for_offset (fi
, fi_clobbers
),
5469 make_copy_constraint (first_vi_for_offset (fi
, fi_uses
), escaped_id
);
5471 /* Also honor the call statement use/clobber info. */
5472 if ((vi
= lookup_call_clobber_vi (call_stmt
)) != NULL
)
5473 make_copy_constraint (first_vi_for_offset (fi
, fi_clobbers
),
5475 if ((vi
= lookup_call_use_vi (call_stmt
)) != NULL
)
5476 make_copy_constraint (first_vi_for_offset (fi
, fi_uses
),
5481 /* Otherwise the caller clobbers and uses what the callee does.
5482 ??? This should use a new complex constraint that filters
5483 local variables of the callee. */
5484 if (gimple_vdef (t
))
5486 lhs
= get_function_part_constraint (fi
, fi_clobbers
);
5487 rhs
= get_function_part_constraint (cfi
, fi_clobbers
);
5488 process_constraint (new_constraint (lhs
, rhs
));
5490 lhs
= get_function_part_constraint (fi
, fi_uses
);
5491 rhs
= get_function_part_constraint (cfi
, fi_uses
);
5492 process_constraint (new_constraint (lhs
, rhs
));
5494 else if (gimple_code (t
) == GIMPLE_ASM
)
5496 /* ??? Ick. We can do better. */
5497 if (gimple_vdef (t
))
5498 make_constraint_from (first_vi_for_offset (fi
, fi_clobbers
),
5500 make_constraint_from (first_vi_for_offset (fi
, fi_uses
),
5506 /* Find the first varinfo in the same variable as START that overlaps with
5507 OFFSET. Return NULL if we can't find one. */
5510 first_vi_for_offset (varinfo_t start
, unsigned HOST_WIDE_INT offset
)
5512 /* If the offset is outside of the variable, bail out. */
5513 if (offset
>= start
->fullsize
)
5516 /* If we cannot reach offset from start, lookup the first field
5517 and start from there. */
5518 if (start
->offset
> offset
)
5519 start
= get_varinfo (start
->head
);
5523 /* We may not find a variable in the field list with the actual
5524 offset when we have glommed a structure to a variable.
5525 In that case, however, offset should still be within the size
5527 if (offset
>= start
->offset
5528 && (offset
- start
->offset
) < start
->size
)
5531 start
= vi_next (start
);
5537 /* Find the first varinfo in the same variable as START that overlaps with
5538 OFFSET. If there is no such varinfo the varinfo directly preceding
5539 OFFSET is returned. */
5542 first_or_preceding_vi_for_offset (varinfo_t start
,
5543 unsigned HOST_WIDE_INT offset
)
5545 /* If we cannot reach offset from start, lookup the first field
5546 and start from there. */
5547 if (start
->offset
> offset
)
5548 start
= get_varinfo (start
->head
);
5550 /* We may not find a variable in the field list with the actual
5551 offset when we have glommed a structure to a variable.
5552 In that case, however, offset should still be within the size
5554 If we got beyond the offset we look for return the field
5555 directly preceding offset which may be the last field. */
5557 && offset
>= start
->offset
5558 && !((offset
- start
->offset
) < start
->size
))
5559 start
= vi_next (start
);
5565 /* This structure is used during pushing fields onto the fieldstack
5566 to track the offset of the field, since bitpos_of_field gives it
5567 relative to its immediate containing type, and we want it relative
5568 to the ultimate containing object. */
5572 /* Offset from the base of the base containing object to this field. */
5573 HOST_WIDE_INT offset
;
5575 /* Size, in bits, of the field. */
5576 unsigned HOST_WIDE_INT size
;
5578 unsigned has_unknown_size
: 1;
5580 unsigned must_have_pointers
: 1;
5582 unsigned may_have_pointers
: 1;
5584 unsigned only_restrict_pointers
: 1;
5586 tree restrict_pointed_type
;
5588 typedef struct fieldoff fieldoff_s
;
5591 /* qsort comparison function for two fieldoff's PA and PB */
5594 fieldoff_compare (const void *pa
, const void *pb
)
5596 const fieldoff_s
*foa
= (const fieldoff_s
*)pa
;
5597 const fieldoff_s
*fob
= (const fieldoff_s
*)pb
;
5598 unsigned HOST_WIDE_INT foasize
, fobsize
;
5600 if (foa
->offset
< fob
->offset
)
5602 else if (foa
->offset
> fob
->offset
)
5605 foasize
= foa
->size
;
5606 fobsize
= fob
->size
;
5607 if (foasize
< fobsize
)
5609 else if (foasize
> fobsize
)
5614 /* Sort a fieldstack according to the field offset and sizes. */
5616 sort_fieldstack (vec
<fieldoff_s
> fieldstack
)
5618 fieldstack
.qsort (fieldoff_compare
);
5621 /* Return true if T is a type that can have subvars. */
5624 type_can_have_subvars (const_tree t
)
5626 /* Aggregates without overlapping fields can have subvars. */
5627 return TREE_CODE (t
) == RECORD_TYPE
;
5630 /* Return true if V is a tree that we can have subvars for.
5631 Normally, this is any aggregate type. Also complex
5632 types which are not gimple registers can have subvars. */
5635 var_can_have_subvars (const_tree v
)
5637 /* Volatile variables should never have subvars. */
5638 if (TREE_THIS_VOLATILE (v
))
5641 /* Non decls or memory tags can never have subvars. */
5645 return type_can_have_subvars (TREE_TYPE (v
));
5648 /* Return true if T is a type that does contain pointers. */
5651 type_must_have_pointers (tree type
)
5653 if (POINTER_TYPE_P (type
))
5656 if (TREE_CODE (type
) == ARRAY_TYPE
)
5657 return type_must_have_pointers (TREE_TYPE (type
));
5659 /* A function or method can have pointers as arguments, so track
5660 those separately. */
5661 if (TREE_CODE (type
) == FUNCTION_TYPE
5662 || TREE_CODE (type
) == METHOD_TYPE
)
5669 field_must_have_pointers (tree t
)
5671 return type_must_have_pointers (TREE_TYPE (t
));
5674 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
5675 the fields of TYPE onto fieldstack, recording their offsets along
5678 OFFSET is used to keep track of the offset in this entire
5679 structure, rather than just the immediately containing structure.
5680 Returns false if the caller is supposed to handle the field we
5684 push_fields_onto_fieldstack (tree type
, vec
<fieldoff_s
> *fieldstack
,
5685 HOST_WIDE_INT offset
)
5688 bool empty_p
= true;
5690 if (TREE_CODE (type
) != RECORD_TYPE
)
5693 /* If the vector of fields is growing too big, bail out early.
5694 Callers check for vec::length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
5696 if (fieldstack
->length () > MAX_FIELDS_FOR_FIELD_SENSITIVE
)
5699 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
5700 if (TREE_CODE (field
) == FIELD_DECL
)
5703 HOST_WIDE_INT foff
= bitpos_of_field (field
);
5704 tree field_type
= TREE_TYPE (field
);
5706 if (!var_can_have_subvars (field
)
5707 || TREE_CODE (field_type
) == QUAL_UNION_TYPE
5708 || TREE_CODE (field_type
) == UNION_TYPE
)
5710 else if (!push_fields_onto_fieldstack
5711 (field_type
, fieldstack
, offset
+ foff
)
5712 && (DECL_SIZE (field
)
5713 && !integer_zerop (DECL_SIZE (field
))))
5714 /* Empty structures may have actual size, like in C++. So
5715 see if we didn't push any subfields and the size is
5716 nonzero, push the field onto the stack. */
5721 fieldoff_s
*pair
= NULL
;
5722 bool has_unknown_size
= false;
5723 bool must_have_pointers_p
;
5725 if (!fieldstack
->is_empty ())
5726 pair
= &fieldstack
->last ();
5728 /* If there isn't anything at offset zero, create sth. */
5730 && offset
+ foff
!= 0)
5733 = {0, offset
+ foff
, false, false, true, false, NULL_TREE
};
5734 pair
= fieldstack
->safe_push (e
);
5737 if (!DECL_SIZE (field
)
5738 || !tree_fits_uhwi_p (DECL_SIZE (field
)))
5739 has_unknown_size
= true;
5741 /* If adjacent fields do not contain pointers merge them. */
5742 must_have_pointers_p
= field_must_have_pointers (field
);
5744 && !has_unknown_size
5745 && !must_have_pointers_p
5746 && !pair
->must_have_pointers
5747 && !pair
->has_unknown_size
5748 && pair
->offset
+ (HOST_WIDE_INT
)pair
->size
== offset
+ foff
)
5750 pair
->size
+= tree_to_uhwi (DECL_SIZE (field
));
5755 e
.offset
= offset
+ foff
;
5756 e
.has_unknown_size
= has_unknown_size
;
5757 if (!has_unknown_size
)
5758 e
.size
= tree_to_uhwi (DECL_SIZE (field
));
5761 e
.must_have_pointers
= must_have_pointers_p
;
5762 e
.may_have_pointers
= true;
5763 e
.only_restrict_pointers
5764 = (!has_unknown_size
5765 && POINTER_TYPE_P (field_type
)
5766 && TYPE_RESTRICT (field_type
));
5767 if (e
.only_restrict_pointers
)
5768 e
.restrict_pointed_type
= TREE_TYPE (field_type
);
5769 fieldstack
->safe_push (e
);
5779 /* Count the number of arguments DECL has, and set IS_VARARGS to true
5780 if it is a varargs function. */
5783 count_num_arguments (tree decl
, bool *is_varargs
)
5785 unsigned int num
= 0;
5788 /* Capture named arguments for K&R functions. They do not
5789 have a prototype and thus no TYPE_ARG_TYPES. */
5790 for (t
= DECL_ARGUMENTS (decl
); t
; t
= DECL_CHAIN (t
))
5793 /* Check if the function has variadic arguments. */
5794 for (t
= TYPE_ARG_TYPES (TREE_TYPE (decl
)); t
; t
= TREE_CHAIN (t
))
5795 if (TREE_VALUE (t
) == void_type_node
)
5803 /* Creation function node for DECL, using NAME, and return the index
5804 of the variable we've created for the function. If NONLOCAL_p, create
5805 initial constraints. */
5808 create_function_info_for (tree decl
, const char *name
, bool add_id
,
5811 struct function
*fn
= DECL_STRUCT_FUNCTION (decl
);
5812 varinfo_t vi
, prev_vi
;
5815 bool is_varargs
= false;
5816 unsigned int num_args
= count_num_arguments (decl
, &is_varargs
);
5818 /* Create the variable info. */
5820 vi
= new_var_info (decl
, name
, add_id
);
5823 vi
->fullsize
= fi_parm_base
+ num_args
;
5825 vi
->may_have_pointers
= false;
5828 insert_vi_for_tree (vi
->decl
, vi
);
5832 /* Create a variable for things the function clobbers and one for
5833 things the function uses. */
5835 varinfo_t clobbervi
, usevi
;
5836 const char *newname
;
5839 tempname
= xasprintf ("%s.clobber", name
);
5840 newname
= ggc_strdup (tempname
);
5843 clobbervi
= new_var_info (NULL
, newname
, false);
5844 clobbervi
->offset
= fi_clobbers
;
5845 clobbervi
->size
= 1;
5846 clobbervi
->fullsize
= vi
->fullsize
;
5847 clobbervi
->is_full_var
= true;
5848 clobbervi
->is_global_var
= false;
5849 clobbervi
->is_reg_var
= true;
5851 gcc_assert (prev_vi
->offset
< clobbervi
->offset
);
5852 prev_vi
->next
= clobbervi
->id
;
5853 prev_vi
= clobbervi
;
5855 tempname
= xasprintf ("%s.use", name
);
5856 newname
= ggc_strdup (tempname
);
5859 usevi
= new_var_info (NULL
, newname
, false);
5860 usevi
->offset
= fi_uses
;
5862 usevi
->fullsize
= vi
->fullsize
;
5863 usevi
->is_full_var
= true;
5864 usevi
->is_global_var
= false;
5865 usevi
->is_reg_var
= true;
5867 gcc_assert (prev_vi
->offset
< usevi
->offset
);
5868 prev_vi
->next
= usevi
->id
;
5872 /* And one for the static chain. */
5873 if (fn
->static_chain_decl
!= NULL_TREE
)
5876 const char *newname
;
5879 tempname
= xasprintf ("%s.chain", name
);
5880 newname
= ggc_strdup (tempname
);
5883 chainvi
= new_var_info (fn
->static_chain_decl
, newname
, false);
5884 chainvi
->offset
= fi_static_chain
;
5886 chainvi
->fullsize
= vi
->fullsize
;
5887 chainvi
->is_full_var
= true;
5888 chainvi
->is_global_var
= false;
5890 insert_vi_for_tree (fn
->static_chain_decl
, chainvi
);
5893 && chainvi
->may_have_pointers
)
5894 make_constraint_from (chainvi
, nonlocal_id
);
5896 gcc_assert (prev_vi
->offset
< chainvi
->offset
);
5897 prev_vi
->next
= chainvi
->id
;
5901 /* Create a variable for the return var. */
5902 if (DECL_RESULT (decl
) != NULL
5903 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
5906 const char *newname
;
5908 tree resultdecl
= decl
;
5910 if (DECL_RESULT (decl
))
5911 resultdecl
= DECL_RESULT (decl
);
5913 tempname
= xasprintf ("%s.result", name
);
5914 newname
= ggc_strdup (tempname
);
5917 resultvi
= new_var_info (resultdecl
, newname
, false);
5918 resultvi
->offset
= fi_result
;
5920 resultvi
->fullsize
= vi
->fullsize
;
5921 resultvi
->is_full_var
= true;
5922 if (DECL_RESULT (decl
))
5923 resultvi
->may_have_pointers
= true;
5925 if (DECL_RESULT (decl
))
5926 insert_vi_for_tree (DECL_RESULT (decl
), resultvi
);
5929 && DECL_RESULT (decl
)
5930 && DECL_BY_REFERENCE (DECL_RESULT (decl
)))
5931 make_constraint_from (resultvi
, nonlocal_id
);
5933 gcc_assert (prev_vi
->offset
< resultvi
->offset
);
5934 prev_vi
->next
= resultvi
->id
;
5938 /* We also need to make function return values escape. Nothing
5939 escapes by returning from main though. */
5941 && !MAIN_NAME_P (DECL_NAME (decl
)))
5944 fi
= lookup_vi_for_tree (decl
);
5945 rvi
= first_vi_for_offset (fi
, fi_result
);
5946 if (rvi
&& rvi
->offset
== fi_result
)
5947 make_copy_constraint (get_varinfo (escaped_id
), rvi
->id
);
5950 /* Set up variables for each argument. */
5951 arg
= DECL_ARGUMENTS (decl
);
5952 for (i
= 0; i
< num_args
; i
++)
5955 const char *newname
;
5957 tree argdecl
= decl
;
5962 tempname
= xasprintf ("%s.arg%d", name
, i
);
5963 newname
= ggc_strdup (tempname
);
5966 argvi
= new_var_info (argdecl
, newname
, false);
5967 argvi
->offset
= fi_parm_base
+ i
;
5969 argvi
->is_full_var
= true;
5970 argvi
->fullsize
= vi
->fullsize
;
5972 argvi
->may_have_pointers
= true;
5975 insert_vi_for_tree (arg
, argvi
);
5978 && argvi
->may_have_pointers
)
5979 make_constraint_from (argvi
, nonlocal_id
);
5981 gcc_assert (prev_vi
->offset
< argvi
->offset
);
5982 prev_vi
->next
= argvi
->id
;
5985 arg
= DECL_CHAIN (arg
);
5988 /* Add one representative for all further args. */
5992 const char *newname
;
5996 tempname
= xasprintf ("%s.varargs", name
);
5997 newname
= ggc_strdup (tempname
);
6000 /* We need sth that can be pointed to for va_start. */
6001 decl
= build_fake_var_decl (ptr_type_node
);
6003 argvi
= new_var_info (decl
, newname
, false);
6004 argvi
->offset
= fi_parm_base
+ num_args
;
6006 argvi
->is_full_var
= true;
6007 argvi
->is_heap_var
= true;
6008 argvi
->fullsize
= vi
->fullsize
;
6011 && argvi
->may_have_pointers
)
6012 make_constraint_from (argvi
, nonlocal_id
);
6014 gcc_assert (prev_vi
->offset
< argvi
->offset
);
6015 prev_vi
->next
= argvi
->id
;
6022 /* Return true if FIELDSTACK contains fields that overlap.
6023 FIELDSTACK is assumed to be sorted by offset. */
6026 check_for_overlaps (vec
<fieldoff_s
> fieldstack
)
6028 fieldoff_s
*fo
= NULL
;
6030 HOST_WIDE_INT lastoffset
= -1;
6032 FOR_EACH_VEC_ELT (fieldstack
, i
, fo
)
6034 if (fo
->offset
== lastoffset
)
6036 lastoffset
= fo
->offset
;
6041 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
6042 This will also create any varinfo structures necessary for fields
6043 of DECL. DECL is a function parameter if HANDLE_PARAM is set.
6044 HANDLED_STRUCT_TYPE is used to register struct types reached by following
6045 restrict pointers. This is needed to prevent infinite recursion.
6046 If ADD_RESTRICT, pretend that the pointer NAME is restrict even if DECL
6047 does not advertise it. */
6050 create_variable_info_for_1 (tree decl
, const char *name
, bool add_id
,
6051 bool handle_param
, bitmap handled_struct_type
,
6052 bool add_restrict
= false)
6054 varinfo_t vi
, newvi
;
6055 tree decl_type
= TREE_TYPE (decl
);
6056 tree declsize
= DECL_P (decl
) ? DECL_SIZE (decl
) : TYPE_SIZE (decl_type
);
6057 auto_vec
<fieldoff_s
> fieldstack
;
6062 || !tree_fits_uhwi_p (declsize
))
6064 vi
= new_var_info (decl
, name
, add_id
);
6068 vi
->is_unknown_size_var
= true;
6069 vi
->is_full_var
= true;
6070 vi
->may_have_pointers
= true;
6074 /* Collect field information. */
6075 if (use_field_sensitive
6076 && var_can_have_subvars (decl
)
6077 /* ??? Force us to not use subfields for globals in IPA mode.
6078 Else we'd have to parse arbitrary initializers. */
6080 && is_global_var (decl
)))
6082 fieldoff_s
*fo
= NULL
;
6083 bool notokay
= false;
6086 push_fields_onto_fieldstack (decl_type
, &fieldstack
, 0);
6088 for (i
= 0; !notokay
&& fieldstack
.iterate (i
, &fo
); i
++)
6089 if (fo
->has_unknown_size
6096 /* We can't sort them if we have a field with a variable sized type,
6097 which will make notokay = true. In that case, we are going to return
6098 without creating varinfos for the fields anyway, so sorting them is a
6102 sort_fieldstack (fieldstack
);
6103 /* Due to some C++ FE issues, like PR 22488, we might end up
6104 what appear to be overlapping fields even though they,
6105 in reality, do not overlap. Until the C++ FE is fixed,
6106 we will simply disable field-sensitivity for these cases. */
6107 notokay
= check_for_overlaps (fieldstack
);
6111 fieldstack
.release ();
6114 /* If we didn't end up collecting sub-variables create a full
6115 variable for the decl. */
6116 if (fieldstack
.length () == 0
6117 || fieldstack
.length () > MAX_FIELDS_FOR_FIELD_SENSITIVE
)
6119 vi
= new_var_info (decl
, name
, add_id
);
6121 vi
->may_have_pointers
= true;
6122 vi
->fullsize
= tree_to_uhwi (declsize
);
6123 vi
->size
= vi
->fullsize
;
6124 vi
->is_full_var
= true;
6125 if (POINTER_TYPE_P (decl_type
)
6126 && (TYPE_RESTRICT (decl_type
) || add_restrict
))
6127 vi
->only_restrict_pointers
= 1;
6128 if (vi
->only_restrict_pointers
6129 && !type_contains_placeholder_p (TREE_TYPE (decl_type
))
6131 && !bitmap_bit_p (handled_struct_type
,
6132 TYPE_UID (TREE_TYPE (decl_type
))))
6135 tree heapvar
= build_fake_var_decl (TREE_TYPE (decl_type
));
6136 DECL_EXTERNAL (heapvar
) = 1;
6137 if (var_can_have_subvars (heapvar
))
6138 bitmap_set_bit (handled_struct_type
,
6139 TYPE_UID (TREE_TYPE (decl_type
)));
6140 rvi
= create_variable_info_for_1 (heapvar
, "PARM_NOALIAS", true,
6141 true, handled_struct_type
);
6142 if (var_can_have_subvars (heapvar
))
6143 bitmap_clear_bit (handled_struct_type
,
6144 TYPE_UID (TREE_TYPE (decl_type
)));
6145 rvi
->is_restrict_var
= 1;
6146 insert_vi_for_tree (heapvar
, rvi
);
6147 make_constraint_from (vi
, rvi
->id
);
6148 make_param_constraints (rvi
);
6150 fieldstack
.release ();
6154 vi
= new_var_info (decl
, name
, add_id
);
6155 vi
->fullsize
= tree_to_uhwi (declsize
);
6156 if (fieldstack
.length () == 1)
6157 vi
->is_full_var
= true;
6158 for (i
= 0, newvi
= vi
;
6159 fieldstack
.iterate (i
, &fo
);
6160 ++i
, newvi
= vi_next (newvi
))
6162 const char *newname
= NULL
;
6167 if (fieldstack
.length () != 1)
6170 = xasprintf ("%s." HOST_WIDE_INT_PRINT_DEC
6171 "+" HOST_WIDE_INT_PRINT_DEC
, name
,
6172 fo
->offset
, fo
->size
);
6173 newname
= ggc_strdup (tempname
);
6181 newvi
->name
= newname
;
6182 newvi
->offset
= fo
->offset
;
6183 newvi
->size
= fo
->size
;
6184 newvi
->fullsize
= vi
->fullsize
;
6185 newvi
->may_have_pointers
= fo
->may_have_pointers
;
6186 newvi
->only_restrict_pointers
= fo
->only_restrict_pointers
;
6188 && newvi
->only_restrict_pointers
6189 && !type_contains_placeholder_p (fo
->restrict_pointed_type
)
6190 && !bitmap_bit_p (handled_struct_type
,
6191 TYPE_UID (fo
->restrict_pointed_type
)))
6194 tree heapvar
= build_fake_var_decl (fo
->restrict_pointed_type
);
6195 DECL_EXTERNAL (heapvar
) = 1;
6196 if (var_can_have_subvars (heapvar
))
6197 bitmap_set_bit (handled_struct_type
,
6198 TYPE_UID (fo
->restrict_pointed_type
));
6199 rvi
= create_variable_info_for_1 (heapvar
, "PARM_NOALIAS", true,
6200 true, handled_struct_type
);
6201 if (var_can_have_subvars (heapvar
))
6202 bitmap_clear_bit (handled_struct_type
,
6203 TYPE_UID (fo
->restrict_pointed_type
));
6204 rvi
->is_restrict_var
= 1;
6205 insert_vi_for_tree (heapvar
, rvi
);
6206 make_constraint_from (newvi
, rvi
->id
);
6207 make_param_constraints (rvi
);
6209 if (i
+ 1 < fieldstack
.length ())
6211 varinfo_t tem
= new_var_info (decl
, name
, false);
6212 newvi
->next
= tem
->id
;
6221 create_variable_info_for (tree decl
, const char *name
, bool add_id
)
6223 /* First see if we are dealing with an ifunc resolver call and
6224 assiociate that with a call to the resolver function result. */
6227 && TREE_CODE (decl
) == FUNCTION_DECL
6228 && (node
= cgraph_node::get (decl
))
6229 && node
->ifunc_resolver
)
6231 varinfo_t fi
= get_vi_for_tree (node
->get_alias_target ()->decl
);
6233 = get_function_part_constraint (fi
, fi_result
);
6234 fi
= new_var_info (NULL_TREE
, "ifuncres", true);
6235 fi
->is_reg_var
= true;
6236 constraint_expr lhs
;
6240 process_constraint (new_constraint (lhs
, rhs
));
6241 insert_vi_for_tree (decl
, fi
);
6245 varinfo_t vi
= create_variable_info_for_1 (decl
, name
, add_id
, false, NULL
);
6246 unsigned int id
= vi
->id
;
6248 insert_vi_for_tree (decl
, vi
);
6253 /* Create initial constraints for globals. */
6254 for (; vi
; vi
= vi_next (vi
))
6256 if (!vi
->may_have_pointers
6257 || !vi
->is_global_var
)
6260 /* Mark global restrict qualified pointers. */
6261 if ((POINTER_TYPE_P (TREE_TYPE (decl
))
6262 && TYPE_RESTRICT (TREE_TYPE (decl
)))
6263 || vi
->only_restrict_pointers
)
6266 = make_constraint_from_global_restrict (vi
, "GLOBAL_RESTRICT",
6268 /* ??? For now exclude reads from globals as restrict sources
6269 if those are not (indirectly) from incoming parameters. */
6270 rvi
->is_restrict_var
= false;
6274 /* In non-IPA mode the initializer from nonlocal is all we need. */
6276 || DECL_HARD_REGISTER (decl
))
6277 make_copy_constraint (vi
, nonlocal_id
);
6279 /* In IPA mode parse the initializer and generate proper constraints
6283 varpool_node
*vnode
= varpool_node::get (decl
);
6285 /* For escaped variables initialize them from nonlocal. */
6286 if (!vnode
->all_refs_explicit_p ())
6287 make_copy_constraint (vi
, nonlocal_id
);
6289 /* If this is a global variable with an initializer and we are in
6290 IPA mode generate constraints for it. */
6292 for (unsigned idx
= 0; vnode
->iterate_reference (idx
, ref
); ++idx
)
6294 auto_vec
<ce_s
> rhsc
;
6295 struct constraint_expr lhs
, *rhsp
;
6297 get_constraint_for_address_of (ref
->referred
->decl
, &rhsc
);
6301 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
6302 process_constraint (new_constraint (lhs
, *rhsp
));
6303 /* If this is a variable that escapes from the unit
6304 the initializer escapes as well. */
6305 if (!vnode
->all_refs_explicit_p ())
6307 lhs
.var
= escaped_id
;
6310 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
6311 process_constraint (new_constraint (lhs
, *rhsp
));
6320 /* Print out the points-to solution for VAR to FILE. */
6323 dump_solution_for_var (FILE *file
, unsigned int var
)
6325 varinfo_t vi
= get_varinfo (var
);
6329 /* Dump the solution for unified vars anyway, this avoids difficulties
6330 in scanning dumps in the testsuite. */
6331 fprintf (file
, "%s = { ", vi
->name
);
6332 vi
= get_varinfo (find (var
));
6333 EXECUTE_IF_SET_IN_BITMAP (vi
->solution
, 0, i
, bi
)
6334 fprintf (file
, "%s ", get_varinfo (i
)->name
);
6335 fprintf (file
, "}");
6337 /* But note when the variable was unified. */
6339 fprintf (file
, " same as %s", vi
->name
);
6341 fprintf (file
, "\n");
6344 /* Print the points-to solution for VAR to stderr. */
6347 debug_solution_for_var (unsigned int var
)
6349 dump_solution_for_var (stderr
, var
);
6352 /* Register the constraints for function parameter related VI. */
6355 make_param_constraints (varinfo_t vi
)
6357 for (; vi
; vi
= vi_next (vi
))
6359 if (vi
->only_restrict_pointers
)
6361 else if (vi
->may_have_pointers
)
6362 make_constraint_from (vi
, nonlocal_id
);
6364 if (vi
->is_full_var
)
6369 /* Create varinfo structures for all of the variables in the
6370 function for intraprocedural mode. */
6373 intra_create_variable_infos (struct function
*fn
)
6376 bitmap handled_struct_type
= NULL
;
6377 bool this_parm_in_ctor
= DECL_CXX_CONSTRUCTOR_P (fn
->decl
);
6379 /* For each incoming pointer argument arg, create the constraint ARG
6380 = NONLOCAL or a dummy variable if it is a restrict qualified
6381 passed-by-reference argument. */
6382 for (t
= DECL_ARGUMENTS (fn
->decl
); t
; t
= DECL_CHAIN (t
))
6384 if (handled_struct_type
== NULL
)
6385 handled_struct_type
= BITMAP_ALLOC (NULL
);
6388 = create_variable_info_for_1 (t
, alias_get_name (t
), false, true,
6389 handled_struct_type
, this_parm_in_ctor
);
6390 insert_vi_for_tree (t
, p
);
6392 make_param_constraints (p
);
6394 this_parm_in_ctor
= false;
6397 if (handled_struct_type
!= NULL
)
6398 BITMAP_FREE (handled_struct_type
);
6400 /* Add a constraint for a result decl that is passed by reference. */
6401 if (DECL_RESULT (fn
->decl
)
6402 && DECL_BY_REFERENCE (DECL_RESULT (fn
->decl
)))
6404 varinfo_t p
, result_vi
= get_vi_for_tree (DECL_RESULT (fn
->decl
));
6406 for (p
= result_vi
; p
; p
= vi_next (p
))
6407 make_constraint_from (p
, nonlocal_id
);
6410 /* Add a constraint for the incoming static chain parameter. */
6411 if (fn
->static_chain_decl
!= NULL_TREE
)
6413 varinfo_t p
, chain_vi
= get_vi_for_tree (fn
->static_chain_decl
);
6415 for (p
= chain_vi
; p
; p
= vi_next (p
))
6416 make_constraint_from (p
, nonlocal_id
);
6420 /* Structure used to put solution bitmaps in a hashtable so they can
6421 be shared among variables with the same points-to set. */
6423 typedef struct shared_bitmap_info
6427 } *shared_bitmap_info_t
;
6428 typedef const struct shared_bitmap_info
*const_shared_bitmap_info_t
;
6430 /* Shared_bitmap hashtable helpers. */
6432 struct shared_bitmap_hasher
: free_ptr_hash
<shared_bitmap_info
>
6434 static inline hashval_t
hash (const shared_bitmap_info
*);
6435 static inline bool equal (const shared_bitmap_info
*,
6436 const shared_bitmap_info
*);
6439 /* Hash function for a shared_bitmap_info_t */
6442 shared_bitmap_hasher::hash (const shared_bitmap_info
*bi
)
6444 return bi
->hashcode
;
6447 /* Equality function for two shared_bitmap_info_t's. */
6450 shared_bitmap_hasher::equal (const shared_bitmap_info
*sbi1
,
6451 const shared_bitmap_info
*sbi2
)
6453 return bitmap_equal_p (sbi1
->pt_vars
, sbi2
->pt_vars
);
6456 /* Shared_bitmap hashtable. */
6458 static hash_table
<shared_bitmap_hasher
> *shared_bitmap_table
;
6460 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
6461 existing instance if there is one, NULL otherwise. */
6464 shared_bitmap_lookup (bitmap pt_vars
)
6466 shared_bitmap_info
**slot
;
6467 struct shared_bitmap_info sbi
;
6469 sbi
.pt_vars
= pt_vars
;
6470 sbi
.hashcode
= bitmap_hash (pt_vars
);
6472 slot
= shared_bitmap_table
->find_slot (&sbi
, NO_INSERT
);
6476 return (*slot
)->pt_vars
;
6480 /* Add a bitmap to the shared bitmap hashtable. */
6483 shared_bitmap_add (bitmap pt_vars
)
6485 shared_bitmap_info
**slot
;
6486 shared_bitmap_info_t sbi
= XNEW (struct shared_bitmap_info
);
6488 sbi
->pt_vars
= pt_vars
;
6489 sbi
->hashcode
= bitmap_hash (pt_vars
);
6491 slot
= shared_bitmap_table
->find_slot (sbi
, INSERT
);
6492 gcc_assert (!*slot
);
6497 /* Set bits in INTO corresponding to the variable uids in solution set FROM. */
6500 set_uids_in_ptset (bitmap into
, bitmap from
, struct pt_solution
*pt
,
6505 varinfo_t escaped_vi
= get_varinfo (find (escaped_id
));
6506 bool everything_escaped
6507 = escaped_vi
->solution
&& bitmap_bit_p (escaped_vi
->solution
, anything_id
);
6509 EXECUTE_IF_SET_IN_BITMAP (from
, 0, i
, bi
)
6511 varinfo_t vi
= get_varinfo (i
);
6513 if (vi
->is_artificial_var
)
6516 if (everything_escaped
6517 || (escaped_vi
->solution
6518 && bitmap_bit_p (escaped_vi
->solution
, i
)))
6520 pt
->vars_contains_escaped
= true;
6521 pt
->vars_contains_escaped_heap
|= vi
->is_heap_var
;
6524 if (vi
->is_restrict_var
)
6525 pt
->vars_contains_restrict
= true;
6527 if (VAR_P (vi
->decl
)
6528 || TREE_CODE (vi
->decl
) == PARM_DECL
6529 || TREE_CODE (vi
->decl
) == RESULT_DECL
)
6531 /* If we are in IPA mode we will not recompute points-to
6532 sets after inlining so make sure they stay valid. */
6534 && !DECL_PT_UID_SET_P (vi
->decl
))
6535 SET_DECL_PT_UID (vi
->decl
, DECL_UID (vi
->decl
));
6537 /* Add the decl to the points-to set. Note that the points-to
6538 set contains global variables. */
6539 bitmap_set_bit (into
, DECL_PT_UID (vi
->decl
));
6540 if (vi
->is_global_var
6541 /* In IPA mode the escaped_heap trick doesn't work as
6542 ESCAPED is escaped from the unit but
6543 pt_solution_includes_global needs to answer true for
6544 all variables not automatic within a function.
6545 For the same reason is_global_var is not the
6546 correct flag to track - local variables from other
6547 functions also need to be considered global.
6548 Conveniently all HEAP vars are not put in function
6552 && ! auto_var_in_fn_p (vi
->decl
, fndecl
)))
6553 pt
->vars_contains_nonlocal
= true;
6555 /* If we have a variable that is interposable record that fact
6556 for pointer comparison simplification. */
6557 if (VAR_P (vi
->decl
)
6558 && (TREE_STATIC (vi
->decl
) || DECL_EXTERNAL (vi
->decl
))
6559 && ! decl_binds_to_current_def_p (vi
->decl
))
6560 pt
->vars_contains_interposable
= true;
6562 /* If this is a local variable we can have overlapping lifetime
6563 of different function invocations through recursion duplicate
6564 it with its shadow variable. */
6566 && vi
->shadow_var_uid
!= 0)
6568 bitmap_set_bit (into
, vi
->shadow_var_uid
);
6569 pt
->vars_contains_nonlocal
= true;
6573 else if (TREE_CODE (vi
->decl
) == FUNCTION_DECL
6574 || TREE_CODE (vi
->decl
) == LABEL_DECL
)
6576 /* Nothing should read/write from/to code so we can
6577 save bits by not including them in the points-to bitmaps.
6578 Still mark the points-to set as containing global memory
6579 to make code-patching possible - see PR70128. */
6580 pt
->vars_contains_nonlocal
= true;
6586 /* Compute the points-to solution *PT for the variable VI. */
6588 static struct pt_solution
6589 find_what_var_points_to (tree fndecl
, varinfo_t orig_vi
)
6593 bitmap finished_solution
;
6596 struct pt_solution
*pt
;
6598 /* This variable may have been collapsed, let's get the real
6600 vi
= get_varinfo (find (orig_vi
->id
));
6602 /* See if we have already computed the solution and return it. */
6603 pt_solution
**slot
= &final_solutions
->get_or_insert (vi
);
6607 *slot
= pt
= XOBNEW (&final_solutions_obstack
, struct pt_solution
);
6608 memset (pt
, 0, sizeof (struct pt_solution
));
6610 /* Translate artificial variables into SSA_NAME_PTR_INFO
6612 EXECUTE_IF_SET_IN_BITMAP (vi
->solution
, 0, i
, bi
)
6614 varinfo_t vi
= get_varinfo (i
);
6616 if (vi
->is_artificial_var
)
6618 if (vi
->id
== nothing_id
)
6620 else if (vi
->id
== escaped_id
)
6623 pt
->ipa_escaped
= 1;
6626 /* Expand some special vars of ESCAPED in-place here. */
6627 varinfo_t evi
= get_varinfo (find (escaped_id
));
6628 if (bitmap_bit_p (evi
->solution
, nonlocal_id
))
6631 else if (vi
->id
== nonlocal_id
)
6633 else if (vi
->id
== string_id
)
6634 /* Nobody cares - STRING_CSTs are read-only entities. */
6636 else if (vi
->id
== anything_id
6637 || vi
->id
== integer_id
)
6642 /* Instead of doing extra work, simply do not create
6643 elaborate points-to information for pt_anything pointers. */
6647 /* Share the final set of variables when possible. */
6648 finished_solution
= BITMAP_GGC_ALLOC ();
6649 stats
.points_to_sets_created
++;
6651 set_uids_in_ptset (finished_solution
, vi
->solution
, pt
, fndecl
);
6652 result
= shared_bitmap_lookup (finished_solution
);
6655 shared_bitmap_add (finished_solution
);
6656 pt
->vars
= finished_solution
;
6661 bitmap_clear (finished_solution
);
6667 /* Given a pointer variable P, fill in its points-to set. */
6670 find_what_p_points_to (tree fndecl
, tree p
)
6672 struct ptr_info_def
*pi
;
6675 bool nonnull
= get_ptr_nonnull (p
);
6677 /* For parameters, get at the points-to set for the actual parm
6679 if (TREE_CODE (p
) == SSA_NAME
6680 && SSA_NAME_IS_DEFAULT_DEF (p
)
6681 && (TREE_CODE (SSA_NAME_VAR (p
)) == PARM_DECL
6682 || TREE_CODE (SSA_NAME_VAR (p
)) == RESULT_DECL
))
6683 lookup_p
= SSA_NAME_VAR (p
);
6685 vi
= lookup_vi_for_tree (lookup_p
);
6689 pi
= get_ptr_info (p
);
6690 pi
->pt
= find_what_var_points_to (fndecl
, vi
);
6691 /* Conservatively set to NULL from PTA (to true). */
6693 /* Preserve pointer nonnull computed by VRP. See get_ptr_nonnull
6694 in gcc/tree-ssaname.c for more information. */
6696 set_ptr_nonnull (p
);
6700 /* Query statistics for points-to solutions. */
6703 unsigned HOST_WIDE_INT pt_solution_includes_may_alias
;
6704 unsigned HOST_WIDE_INT pt_solution_includes_no_alias
;
6705 unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias
;
6706 unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias
;
6710 dump_pta_stats (FILE *s
)
6712 fprintf (s
, "\nPTA query stats:\n");
6713 fprintf (s
, " pt_solution_includes: "
6714 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
6715 HOST_WIDE_INT_PRINT_DEC
" queries\n",
6716 pta_stats
.pt_solution_includes_no_alias
,
6717 pta_stats
.pt_solution_includes_no_alias
6718 + pta_stats
.pt_solution_includes_may_alias
);
6719 fprintf (s
, " pt_solutions_intersect: "
6720 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
6721 HOST_WIDE_INT_PRINT_DEC
" queries\n",
6722 pta_stats
.pt_solutions_intersect_no_alias
,
6723 pta_stats
.pt_solutions_intersect_no_alias
6724 + pta_stats
.pt_solutions_intersect_may_alias
);
6728 /* Reset the points-to solution *PT to a conservative default
6729 (point to anything). */
6732 pt_solution_reset (struct pt_solution
*pt
)
6734 memset (pt
, 0, sizeof (struct pt_solution
));
6735 pt
->anything
= true;
6739 /* Set the points-to solution *PT to point only to the variables
6740 in VARS. VARS_CONTAINS_GLOBAL specifies whether that contains
6741 global variables and VARS_CONTAINS_RESTRICT specifies whether
6742 it contains restrict tag variables. */
6745 pt_solution_set (struct pt_solution
*pt
, bitmap vars
,
6746 bool vars_contains_nonlocal
)
6748 memset (pt
, 0, sizeof (struct pt_solution
));
6750 pt
->vars_contains_nonlocal
= vars_contains_nonlocal
;
6751 pt
->vars_contains_escaped
6752 = (cfun
->gimple_df
->escaped
.anything
6753 || bitmap_intersect_p (cfun
->gimple_df
->escaped
.vars
, vars
));
6756 /* Set the points-to solution *PT to point only to the variable VAR. */
6759 pt_solution_set_var (struct pt_solution
*pt
, tree var
)
6761 memset (pt
, 0, sizeof (struct pt_solution
));
6762 pt
->vars
= BITMAP_GGC_ALLOC ();
6763 bitmap_set_bit (pt
->vars
, DECL_PT_UID (var
));
6764 pt
->vars_contains_nonlocal
= is_global_var (var
);
6765 pt
->vars_contains_escaped
6766 = (cfun
->gimple_df
->escaped
.anything
6767 || bitmap_bit_p (cfun
->gimple_df
->escaped
.vars
, DECL_PT_UID (var
)));
6770 /* Computes the union of the points-to solutions *DEST and *SRC and
6771 stores the result in *DEST. This changes the points-to bitmap
6772 of *DEST and thus may not be used if that might be shared.
6773 The points-to bitmap of *SRC and *DEST will not be shared after
6774 this function if they were not before. */
6777 pt_solution_ior_into (struct pt_solution
*dest
, struct pt_solution
*src
)
6779 dest
->anything
|= src
->anything
;
6782 pt_solution_reset (dest
);
6786 dest
->nonlocal
|= src
->nonlocal
;
6787 dest
->escaped
|= src
->escaped
;
6788 dest
->ipa_escaped
|= src
->ipa_escaped
;
6789 dest
->null
|= src
->null
;
6790 dest
->vars_contains_nonlocal
|= src
->vars_contains_nonlocal
;
6791 dest
->vars_contains_escaped
|= src
->vars_contains_escaped
;
6792 dest
->vars_contains_escaped_heap
|= src
->vars_contains_escaped_heap
;
6797 dest
->vars
= BITMAP_GGC_ALLOC ();
6798 bitmap_ior_into (dest
->vars
, src
->vars
);
6801 /* Return true if the points-to solution *PT is empty. */
6804 pt_solution_empty_p (struct pt_solution
*pt
)
6811 && !bitmap_empty_p (pt
->vars
))
6814 /* If the solution includes ESCAPED, check if that is empty. */
6816 && !pt_solution_empty_p (&cfun
->gimple_df
->escaped
))
6819 /* If the solution includes ESCAPED, check if that is empty. */
6821 && !pt_solution_empty_p (&ipa_escaped_pt
))
6827 /* Return true if the points-to solution *PT only point to a single var, and
6828 return the var uid in *UID. */
6831 pt_solution_singleton_or_null_p (struct pt_solution
*pt
, unsigned *uid
)
6833 if (pt
->anything
|| pt
->nonlocal
|| pt
->escaped
|| pt
->ipa_escaped
6835 || !bitmap_single_bit_set_p (pt
->vars
))
6838 *uid
= bitmap_first_set_bit (pt
->vars
);
6842 /* Return true if the points-to solution *PT includes global memory. */
6845 pt_solution_includes_global (struct pt_solution
*pt
)
6849 || pt
->vars_contains_nonlocal
6850 /* The following is a hack to make the malloc escape hack work.
6851 In reality we'd need different sets for escaped-through-return
6852 and escaped-to-callees and passes would need to be updated. */
6853 || pt
->vars_contains_escaped_heap
)
6856 /* 'escaped' is also a placeholder so we have to look into it. */
6858 return pt_solution_includes_global (&cfun
->gimple_df
->escaped
);
6860 if (pt
->ipa_escaped
)
6861 return pt_solution_includes_global (&ipa_escaped_pt
);
6866 /* Return true if the points-to solution *PT includes the variable
6867 declaration DECL. */
6870 pt_solution_includes_1 (struct pt_solution
*pt
, const_tree decl
)
6876 && is_global_var (decl
))
6880 && bitmap_bit_p (pt
->vars
, DECL_PT_UID (decl
)))
6883 /* If the solution includes ESCAPED, check it. */
6885 && pt_solution_includes_1 (&cfun
->gimple_df
->escaped
, decl
))
6888 /* If the solution includes ESCAPED, check it. */
6890 && pt_solution_includes_1 (&ipa_escaped_pt
, decl
))
6897 pt_solution_includes (struct pt_solution
*pt
, const_tree decl
)
6899 bool res
= pt_solution_includes_1 (pt
, decl
);
6901 ++pta_stats
.pt_solution_includes_may_alias
;
6903 ++pta_stats
.pt_solution_includes_no_alias
;
6907 /* Return true if both points-to solutions PT1 and PT2 have a non-empty
6911 pt_solutions_intersect_1 (struct pt_solution
*pt1
, struct pt_solution
*pt2
)
6913 if (pt1
->anything
|| pt2
->anything
)
6916 /* If either points to unknown global memory and the other points to
6917 any global memory they alias. */
6920 || pt2
->vars_contains_nonlocal
))
6922 && pt1
->vars_contains_nonlocal
))
6925 /* If either points to all escaped memory and the other points to
6926 any escaped memory they alias. */
6929 || pt2
->vars_contains_escaped
))
6931 && pt1
->vars_contains_escaped
))
6934 /* Check the escaped solution if required.
6935 ??? Do we need to check the local against the IPA escaped sets? */
6936 if ((pt1
->ipa_escaped
|| pt2
->ipa_escaped
)
6937 && !pt_solution_empty_p (&ipa_escaped_pt
))
6939 /* If both point to escaped memory and that solution
6940 is not empty they alias. */
6941 if (pt1
->ipa_escaped
&& pt2
->ipa_escaped
)
6944 /* If either points to escaped memory see if the escaped solution
6945 intersects with the other. */
6946 if ((pt1
->ipa_escaped
6947 && pt_solutions_intersect_1 (&ipa_escaped_pt
, pt2
))
6948 || (pt2
->ipa_escaped
6949 && pt_solutions_intersect_1 (&ipa_escaped_pt
, pt1
)))
6953 /* Now both pointers alias if their points-to solution intersects. */
6956 && bitmap_intersect_p (pt1
->vars
, pt2
->vars
));
6960 pt_solutions_intersect (struct pt_solution
*pt1
, struct pt_solution
*pt2
)
6962 bool res
= pt_solutions_intersect_1 (pt1
, pt2
);
6964 ++pta_stats
.pt_solutions_intersect_may_alias
;
6966 ++pta_stats
.pt_solutions_intersect_no_alias
;
6971 /* Dump points-to information to OUTFILE. */
6974 dump_sa_points_to_info (FILE *outfile
)
6978 fprintf (outfile
, "\nPoints-to sets\n\n");
6980 if (dump_flags
& TDF_STATS
)
6982 fprintf (outfile
, "Stats:\n");
6983 fprintf (outfile
, "Total vars: %d\n", stats
.total_vars
);
6984 fprintf (outfile
, "Non-pointer vars: %d\n",
6985 stats
.nonpointer_vars
);
6986 fprintf (outfile
, "Statically unified vars: %d\n",
6987 stats
.unified_vars_static
);
6988 fprintf (outfile
, "Dynamically unified vars: %d\n",
6989 stats
.unified_vars_dynamic
);
6990 fprintf (outfile
, "Iterations: %d\n", stats
.iterations
);
6991 fprintf (outfile
, "Number of edges: %d\n", stats
.num_edges
);
6992 fprintf (outfile
, "Number of implicit edges: %d\n",
6993 stats
.num_implicit_edges
);
6996 for (i
= 1; i
< varmap
.length (); i
++)
6998 varinfo_t vi
= get_varinfo (i
);
6999 if (!vi
->may_have_pointers
)
7001 dump_solution_for_var (outfile
, i
);
7006 /* Debug points-to information to stderr. */
7009 debug_sa_points_to_info (void)
7011 dump_sa_points_to_info (stderr
);
7015 /* Initialize the always-existing constraint variables for NULL
7016 ANYTHING, READONLY, and INTEGER */
7019 init_base_vars (void)
7021 struct constraint_expr lhs
, rhs
;
7022 varinfo_t var_anything
;
7023 varinfo_t var_nothing
;
7024 varinfo_t var_string
;
7025 varinfo_t var_escaped
;
7026 varinfo_t var_nonlocal
;
7027 varinfo_t var_storedanything
;
7028 varinfo_t var_integer
;
7030 /* Variable ID zero is reserved and should be NULL. */
7031 varmap
.safe_push (NULL
);
7033 /* Create the NULL variable, used to represent that a variable points
7035 var_nothing
= new_var_info (NULL_TREE
, "NULL", false);
7036 gcc_assert (var_nothing
->id
== nothing_id
);
7037 var_nothing
->is_artificial_var
= 1;
7038 var_nothing
->offset
= 0;
7039 var_nothing
->size
= ~0;
7040 var_nothing
->fullsize
= ~0;
7041 var_nothing
->is_special_var
= 1;
7042 var_nothing
->may_have_pointers
= 0;
7043 var_nothing
->is_global_var
= 0;
7045 /* Create the ANYTHING variable, used to represent that a variable
7046 points to some unknown piece of memory. */
7047 var_anything
= new_var_info (NULL_TREE
, "ANYTHING", false);
7048 gcc_assert (var_anything
->id
== anything_id
);
7049 var_anything
->is_artificial_var
= 1;
7050 var_anything
->size
= ~0;
7051 var_anything
->offset
= 0;
7052 var_anything
->fullsize
= ~0;
7053 var_anything
->is_special_var
= 1;
7055 /* Anything points to anything. This makes deref constraints just
7056 work in the presence of linked list and other p = *p type loops,
7057 by saying that *ANYTHING = ANYTHING. */
7059 lhs
.var
= anything_id
;
7061 rhs
.type
= ADDRESSOF
;
7062 rhs
.var
= anything_id
;
7065 /* This specifically does not use process_constraint because
7066 process_constraint ignores all anything = anything constraints, since all
7067 but this one are redundant. */
7068 constraints
.safe_push (new_constraint (lhs
, rhs
));
7070 /* Create the STRING variable, used to represent that a variable
7071 points to a string literal. String literals don't contain
7072 pointers so STRING doesn't point to anything. */
7073 var_string
= new_var_info (NULL_TREE
, "STRING", false);
7074 gcc_assert (var_string
->id
== string_id
);
7075 var_string
->is_artificial_var
= 1;
7076 var_string
->offset
= 0;
7077 var_string
->size
= ~0;
7078 var_string
->fullsize
= ~0;
7079 var_string
->is_special_var
= 1;
7080 var_string
->may_have_pointers
= 0;
7082 /* Create the ESCAPED variable, used to represent the set of escaped
7084 var_escaped
= new_var_info (NULL_TREE
, "ESCAPED", false);
7085 gcc_assert (var_escaped
->id
== escaped_id
);
7086 var_escaped
->is_artificial_var
= 1;
7087 var_escaped
->offset
= 0;
7088 var_escaped
->size
= ~0;
7089 var_escaped
->fullsize
= ~0;
7090 var_escaped
->is_special_var
= 0;
7092 /* Create the NONLOCAL variable, used to represent the set of nonlocal
7094 var_nonlocal
= new_var_info (NULL_TREE
, "NONLOCAL", false);
7095 gcc_assert (var_nonlocal
->id
== nonlocal_id
);
7096 var_nonlocal
->is_artificial_var
= 1;
7097 var_nonlocal
->offset
= 0;
7098 var_nonlocal
->size
= ~0;
7099 var_nonlocal
->fullsize
= ~0;
7100 var_nonlocal
->is_special_var
= 1;
7102 /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
7104 lhs
.var
= escaped_id
;
7107 rhs
.var
= escaped_id
;
7109 process_constraint (new_constraint (lhs
, rhs
));
7111 /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the
7112 whole variable escapes. */
7114 lhs
.var
= escaped_id
;
7117 rhs
.var
= escaped_id
;
7118 rhs
.offset
= UNKNOWN_OFFSET
;
7119 process_constraint (new_constraint (lhs
, rhs
));
7121 /* *ESCAPED = NONLOCAL. This is true because we have to assume
7122 everything pointed to by escaped points to what global memory can
7125 lhs
.var
= escaped_id
;
7128 rhs
.var
= nonlocal_id
;
7130 process_constraint (new_constraint (lhs
, rhs
));
7132 /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED. This is true because
7133 global memory may point to global memory and escaped memory. */
7135 lhs
.var
= nonlocal_id
;
7137 rhs
.type
= ADDRESSOF
;
7138 rhs
.var
= nonlocal_id
;
7140 process_constraint (new_constraint (lhs
, rhs
));
7141 rhs
.type
= ADDRESSOF
;
7142 rhs
.var
= escaped_id
;
7144 process_constraint (new_constraint (lhs
, rhs
));
7146 /* Create the STOREDANYTHING variable, used to represent the set of
7147 variables stored to *ANYTHING. */
7148 var_storedanything
= new_var_info (NULL_TREE
, "STOREDANYTHING", false);
7149 gcc_assert (var_storedanything
->id
== storedanything_id
);
7150 var_storedanything
->is_artificial_var
= 1;
7151 var_storedanything
->offset
= 0;
7152 var_storedanything
->size
= ~0;
7153 var_storedanything
->fullsize
= ~0;
7154 var_storedanything
->is_special_var
= 0;
7156 /* Create the INTEGER variable, used to represent that a variable points
7157 to what an INTEGER "points to". */
7158 var_integer
= new_var_info (NULL_TREE
, "INTEGER", false);
7159 gcc_assert (var_integer
->id
== integer_id
);
7160 var_integer
->is_artificial_var
= 1;
7161 var_integer
->size
= ~0;
7162 var_integer
->fullsize
= ~0;
7163 var_integer
->offset
= 0;
7164 var_integer
->is_special_var
= 1;
7166 /* INTEGER = ANYTHING, because we don't know where a dereference of
7167 a random integer will point to. */
7169 lhs
.var
= integer_id
;
7171 rhs
.type
= ADDRESSOF
;
7172 rhs
.var
= anything_id
;
7174 process_constraint (new_constraint (lhs
, rhs
));
7177 /* Initialize things necessary to perform PTA */
7180 init_alias_vars (void)
7182 use_field_sensitive
= (MAX_FIELDS_FOR_FIELD_SENSITIVE
> 1);
7184 bitmap_obstack_initialize (&pta_obstack
);
7185 bitmap_obstack_initialize (&oldpta_obstack
);
7186 bitmap_obstack_initialize (&predbitmap_obstack
);
7188 constraints
.create (8);
7190 vi_for_tree
= new hash_map
<tree
, varinfo_t
>;
7191 call_stmt_vars
= new hash_map
<gimple
*, varinfo_t
>;
7193 memset (&stats
, 0, sizeof (stats
));
7194 shared_bitmap_table
= new hash_table
<shared_bitmap_hasher
> (511);
7197 gcc_obstack_init (&fake_var_decl_obstack
);
7199 final_solutions
= new hash_map
<varinfo_t
, pt_solution
*>;
7200 gcc_obstack_init (&final_solutions_obstack
);
7203 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
7204 predecessor edges. */
7207 remove_preds_and_fake_succs (constraint_graph_t graph
)
7211 /* Clear the implicit ref and address nodes from the successor
7213 for (i
= 1; i
< FIRST_REF_NODE
; i
++)
7215 if (graph
->succs
[i
])
7216 bitmap_clear_range (graph
->succs
[i
], FIRST_REF_NODE
,
7217 FIRST_REF_NODE
* 2);
7220 /* Free the successor list for the non-ref nodes. */
7221 for (i
= FIRST_REF_NODE
+ 1; i
< graph
->size
; i
++)
7223 if (graph
->succs
[i
])
7224 BITMAP_FREE (graph
->succs
[i
]);
7227 /* Now reallocate the size of the successor list as, and blow away
7228 the predecessor bitmaps. */
7229 graph
->size
= varmap
.length ();
7230 graph
->succs
= XRESIZEVEC (bitmap
, graph
->succs
, graph
->size
);
7232 free (graph
->implicit_preds
);
7233 graph
->implicit_preds
= NULL
;
7234 free (graph
->preds
);
7235 graph
->preds
= NULL
;
7236 bitmap_obstack_release (&predbitmap_obstack
);
7239 /* Solve the constraint set. */
7242 solve_constraints (void)
7246 /* Sort varinfos so that ones that cannot be pointed to are last.
7247 This makes bitmaps more efficient. */
7248 unsigned int *map
= XNEWVEC (unsigned int, varmap
.length ());
7249 for (unsigned i
= 0; i
< integer_id
+ 1; ++i
)
7251 /* Start with non-register vars (as possibly address-taken), followed
7252 by register vars as conservative set of vars never appearing in
7253 the points-to solution bitmaps. */
7254 unsigned j
= integer_id
+ 1;
7255 for (unsigned i
= integer_id
+ 1; i
< varmap
.length (); ++i
)
7256 if (! varmap
[i
]->is_reg_var
)
7258 for (unsigned i
= integer_id
+ 1; i
< varmap
.length (); ++i
)
7259 if (varmap
[i
]->is_reg_var
)
7261 /* Shuffle varmap according to map. */
7262 for (unsigned i
= integer_id
+ 1; i
< varmap
.length (); ++i
)
7264 while (map
[varmap
[i
]->id
] != i
)
7265 std::swap (varmap
[i
], varmap
[map
[varmap
[i
]->id
]]);
7266 gcc_assert (bitmap_empty_p (varmap
[i
]->solution
));
7268 varmap
[i
]->next
= map
[varmap
[i
]->next
];
7269 varmap
[i
]->head
= map
[varmap
[i
]->head
];
7271 /* Finally rewrite constraints. */
7272 for (unsigned i
= 0; i
< constraints
.length (); ++i
)
7274 constraints
[i
]->lhs
.var
= map
[constraints
[i
]->lhs
.var
];
7275 constraints
[i
]->rhs
.var
= map
[constraints
[i
]->rhs
.var
];
7281 "\nCollapsing static cycles and doing variable "
7284 init_graph (varmap
.length () * 2);
7287 fprintf (dump_file
, "Building predecessor graph\n");
7288 build_pred_graph ();
7291 fprintf (dump_file
, "Detecting pointer and location "
7293 si
= perform_var_substitution (graph
);
7296 fprintf (dump_file
, "Rewriting constraints and unifying "
7298 rewrite_constraints (graph
, si
);
7300 build_succ_graph ();
7302 free_var_substitution_info (si
);
7304 /* Attach complex constraints to graph nodes. */
7305 move_complex_constraints (graph
);
7308 fprintf (dump_file
, "Uniting pointer but not location equivalent "
7310 unite_pointer_equivalences (graph
);
7313 fprintf (dump_file
, "Finding indirect cycles\n");
7314 find_indirect_cycles (graph
);
7316 /* Implicit nodes and predecessors are no longer necessary at this
7318 remove_preds_and_fake_succs (graph
);
7320 if (dump_file
&& (dump_flags
& TDF_GRAPH
))
7322 fprintf (dump_file
, "\n\n// The constraint graph before solve-graph "
7323 "in dot format:\n");
7324 dump_constraint_graph (dump_file
);
7325 fprintf (dump_file
, "\n\n");
7329 fprintf (dump_file
, "Solving graph\n");
7331 solve_graph (graph
);
7333 if (dump_file
&& (dump_flags
& TDF_GRAPH
))
7335 fprintf (dump_file
, "\n\n// The constraint graph after solve-graph "
7336 "in dot format:\n");
7337 dump_constraint_graph (dump_file
);
7338 fprintf (dump_file
, "\n\n");
7342 /* Create points-to sets for the current function. See the comments
7343 at the start of the file for an algorithmic overview. */
7346 compute_points_to_sets (void)
7351 timevar_push (TV_TREE_PTA
);
7355 intra_create_variable_infos (cfun
);
7357 /* Now walk all statements and build the constraint set. */
7358 FOR_EACH_BB_FN (bb
, cfun
)
7360 for (gphi_iterator gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
);
7363 gphi
*phi
= gsi
.phi ();
7365 if (! virtual_operand_p (gimple_phi_result (phi
)))
7366 find_func_aliases (cfun
, phi
);
7369 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);
7372 gimple
*stmt
= gsi_stmt (gsi
);
7374 find_func_aliases (cfun
, stmt
);
7380 fprintf (dump_file
, "Points-to analysis\n\nConstraints:\n\n");
7381 dump_constraints (dump_file
, 0);
7384 /* From the constraints compute the points-to sets. */
7385 solve_constraints ();
7387 /* Post-process solutions for escapes through returns. */
7390 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
7391 if (greturn
*ret
= safe_dyn_cast
<greturn
*> (last_stmt (e
->src
)))
7393 tree val
= gimple_return_retval (ret
);
7394 /* ??? Easy to handle simple indirections with some work.
7395 Arbitrary references like foo.bar.baz are more difficult
7396 (but conservatively easy enough with just looking at the base).
7397 Mind to fixup find_func_aliases as well. */
7398 if (!val
|| !SSA_VAR_P (val
))
7400 /* returns happen last in non-IPA so they only influence
7401 the ESCAPED solution and we can filter local variables. */
7402 varinfo_t escaped_vi
= get_varinfo (find (escaped_id
));
7403 varinfo_t vi
= lookup_vi_for_tree (val
);
7404 bitmap delta
= BITMAP_ALLOC (&pta_obstack
);
7407 for (; vi
; vi
= vi_next (vi
))
7409 varinfo_t part_vi
= get_varinfo (find (vi
->id
));
7410 EXECUTE_IF_AND_COMPL_IN_BITMAP (part_vi
->solution
,
7411 escaped_vi
->solution
, 0, i
, bi
)
7413 varinfo_t pointed_to_vi
= get_varinfo (i
);
7414 if (pointed_to_vi
->is_global_var
7415 /* We delay marking of heap memory as global. */
7416 || pointed_to_vi
->is_heap_var
)
7417 bitmap_set_bit (delta
, i
);
7421 /* Now compute the transitive closure. */
7422 bitmap_ior_into (escaped_vi
->solution
, delta
);
7423 bitmap new_delta
= BITMAP_ALLOC (&pta_obstack
);
7424 while (!bitmap_empty_p (delta
))
7426 EXECUTE_IF_SET_IN_BITMAP (delta
, 0, i
, bi
)
7428 varinfo_t pointed_to_vi
= get_varinfo (i
);
7429 pointed_to_vi
= get_varinfo (find (pointed_to_vi
->id
));
7431 bitmap_iterator bi2
;
7432 EXECUTE_IF_AND_COMPL_IN_BITMAP (pointed_to_vi
->solution
,
7433 escaped_vi
->solution
,
7436 varinfo_t pointed_to_vi2
= get_varinfo (j
);
7437 if (pointed_to_vi2
->is_global_var
7438 /* We delay marking of heap memory as global. */
7439 || pointed_to_vi2
->is_heap_var
)
7440 bitmap_set_bit (new_delta
, j
);
7443 bitmap_ior_into (escaped_vi
->solution
, new_delta
);
7444 bitmap_clear (delta
);
7445 std::swap (delta
, new_delta
);
7447 BITMAP_FREE (delta
);
7448 BITMAP_FREE (new_delta
);
7452 dump_sa_points_to_info (dump_file
);
7454 /* Compute the points-to set for ESCAPED used for call-clobber analysis. */
7455 cfun
->gimple_df
->escaped
= find_what_var_points_to (cfun
->decl
,
7456 get_varinfo (escaped_id
));
7458 /* Make sure the ESCAPED solution (which is used as placeholder in
7459 other solutions) does not reference itself. This simplifies
7460 points-to solution queries. */
7461 cfun
->gimple_df
->escaped
.escaped
= 0;
7463 /* Compute the points-to sets for pointer SSA_NAMEs. */
7467 FOR_EACH_SSA_NAME (i
, ptr
, cfun
)
7469 if (POINTER_TYPE_P (TREE_TYPE (ptr
)))
7470 find_what_p_points_to (cfun
->decl
, ptr
);
7473 /* Compute the call-used/clobbered sets. */
7474 FOR_EACH_BB_FN (bb
, cfun
)
7476 gimple_stmt_iterator gsi
;
7478 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
7481 struct pt_solution
*pt
;
7483 stmt
= dyn_cast
<gcall
*> (gsi_stmt (gsi
));
7487 pt
= gimple_call_use_set (stmt
);
7488 if (gimple_call_flags (stmt
) & ECF_CONST
)
7489 memset (pt
, 0, sizeof (struct pt_solution
));
7490 else if ((vi
= lookup_call_use_vi (stmt
)) != NULL
)
7492 *pt
= find_what_var_points_to (cfun
->decl
, vi
);
7493 /* Escaped (and thus nonlocal) variables are always
7494 implicitly used by calls. */
7495 /* ??? ESCAPED can be empty even though NONLOCAL
7502 /* If there is nothing special about this call then
7503 we have made everything that is used also escape. */
7504 *pt
= cfun
->gimple_df
->escaped
;
7508 pt
= gimple_call_clobber_set (stmt
);
7509 if (gimple_call_flags (stmt
) & (ECF_CONST
|ECF_PURE
|ECF_NOVOPS
))
7510 memset (pt
, 0, sizeof (struct pt_solution
));
7511 else if ((vi
= lookup_call_clobber_vi (stmt
)) != NULL
)
7513 *pt
= find_what_var_points_to (cfun
->decl
, vi
);
7514 /* Escaped (and thus nonlocal) variables are always
7515 implicitly clobbered by calls. */
7516 /* ??? ESCAPED can be empty even though NONLOCAL
7523 /* If there is nothing special about this call then
7524 we have made everything that is used also escape. */
7525 *pt
= cfun
->gimple_df
->escaped
;
7531 timevar_pop (TV_TREE_PTA
);
7535 /* Delete created points-to sets. */
7538 delete_points_to_sets (void)
7542 delete shared_bitmap_table
;
7543 shared_bitmap_table
= NULL
;
7544 if (dump_file
&& (dump_flags
& TDF_STATS
))
7545 fprintf (dump_file
, "Points to sets created:%d\n",
7546 stats
.points_to_sets_created
);
7549 delete call_stmt_vars
;
7550 bitmap_obstack_release (&pta_obstack
);
7551 constraints
.release ();
7553 for (i
= 0; i
< graph
->size
; i
++)
7554 graph
->complex[i
].release ();
7555 free (graph
->complex);
7558 free (graph
->succs
);
7560 free (graph
->pe_rep
);
7561 free (graph
->indirect_cycles
);
7565 variable_info_pool
.release ();
7566 constraint_pool
.release ();
7568 obstack_free (&fake_var_decl_obstack
, NULL
);
7570 delete final_solutions
;
7571 obstack_free (&final_solutions_obstack
, NULL
);
7576 unsigned short clique
;
7581 /* Mark "other" loads and stores as belonging to CLIQUE and with
7585 visit_loadstore (gimple
*, tree base
, tree ref
, void *data
)
7587 unsigned short clique
= ((vls_data
*) data
)->clique
;
7588 bitmap rvars
= ((vls_data
*) data
)->rvars
;
7589 bool escaped_p
= ((vls_data
*) data
)->escaped_p
;
7590 if (TREE_CODE (base
) == MEM_REF
7591 || TREE_CODE (base
) == TARGET_MEM_REF
)
7593 tree ptr
= TREE_OPERAND (base
, 0);
7594 if (TREE_CODE (ptr
) == SSA_NAME
)
7596 /* For parameters, get at the points-to set for the actual parm
7598 if (SSA_NAME_IS_DEFAULT_DEF (ptr
)
7599 && (TREE_CODE (SSA_NAME_VAR (ptr
)) == PARM_DECL
7600 || TREE_CODE (SSA_NAME_VAR (ptr
)) == RESULT_DECL
))
7601 ptr
= SSA_NAME_VAR (ptr
);
7603 /* We need to make sure 'ptr' doesn't include any of
7604 the restrict tags we added bases for in its points-to set. */
7605 varinfo_t vi
= lookup_vi_for_tree (ptr
);
7609 vi
= get_varinfo (find (vi
->id
));
7610 if (bitmap_intersect_p (rvars
, vi
->solution
)
7611 || (escaped_p
&& bitmap_bit_p (vi
->solution
, escaped_id
)))
7615 /* Do not overwrite existing cliques (that includes clique, base
7616 pairs we just set). */
7617 if (MR_DEPENDENCE_CLIQUE (base
) == 0)
7619 MR_DEPENDENCE_CLIQUE (base
) = clique
;
7620 MR_DEPENDENCE_BASE (base
) = 0;
7624 /* For plain decl accesses see whether they are accesses to globals
7625 and rewrite them to MEM_REFs with { clique, 0 }. */
7627 && is_global_var (base
)
7628 /* ??? We can't rewrite a plain decl with the walk_stmt_load_store
7633 while (handled_component_p (*basep
))
7634 basep
= &TREE_OPERAND (*basep
, 0);
7635 gcc_assert (VAR_P (*basep
));
7636 tree ptr
= build_fold_addr_expr (*basep
);
7637 tree zero
= build_int_cst (TREE_TYPE (ptr
), 0);
7638 *basep
= build2 (MEM_REF
, TREE_TYPE (*basep
), ptr
, zero
);
7639 MR_DEPENDENCE_CLIQUE (*basep
) = clique
;
7640 MR_DEPENDENCE_BASE (*basep
) = 0;
7648 unsigned short *clique
;
7649 unsigned short *last_ruid
;
7650 varinfo_t restrict_var
;
7653 /* If BASE is a MEM_REF then assign a clique, base pair to it, updating
7654 CLIQUE, *RESTRICT_VAR and LAST_RUID as passed via DATA.
7655 Return whether dependence info was assigned to BASE. */
7658 maybe_set_dependence_info (gimple
*, tree base
, tree
, void *data
)
7660 tree ptr
= ((msdi_data
*)data
)->ptr
;
7661 unsigned short &clique
= *((msdi_data
*)data
)->clique
;
7662 unsigned short &last_ruid
= *((msdi_data
*)data
)->last_ruid
;
7663 varinfo_t restrict_var
= ((msdi_data
*)data
)->restrict_var
;
7664 if ((TREE_CODE (base
) == MEM_REF
7665 || TREE_CODE (base
) == TARGET_MEM_REF
)
7666 && TREE_OPERAND (base
, 0) == ptr
)
7668 /* Do not overwrite existing cliques. This avoids overwriting dependence
7669 info inlined from a function with restrict parameters inlined
7670 into a function with restrict parameters. This usually means we
7671 prefer to be precise in innermost loops. */
7672 if (MR_DEPENDENCE_CLIQUE (base
) == 0)
7676 if (cfun
->last_clique
== 0)
7677 cfun
->last_clique
= 1;
7680 if (restrict_var
->ruid
== 0)
7681 restrict_var
->ruid
= ++last_ruid
;
7682 MR_DEPENDENCE_CLIQUE (base
) = clique
;
7683 MR_DEPENDENCE_BASE (base
) = restrict_var
->ruid
;
7690 /* Clear dependence info for the clique DATA. */
7693 clear_dependence_clique (gimple
*, tree base
, tree
, void *data
)
7695 unsigned short clique
= (uintptr_t)data
;
7696 if ((TREE_CODE (base
) == MEM_REF
7697 || TREE_CODE (base
) == TARGET_MEM_REF
)
7698 && MR_DEPENDENCE_CLIQUE (base
) == clique
)
7700 MR_DEPENDENCE_CLIQUE (base
) = 0;
7701 MR_DEPENDENCE_BASE (base
) = 0;
7707 /* Compute the set of independend memory references based on restrict
7708 tags and their conservative propagation to the points-to sets. */
7711 compute_dependence_clique (void)
7713 /* First clear the special "local" clique. */
7715 if (cfun
->last_clique
!= 0)
7716 FOR_EACH_BB_FN (bb
, cfun
)
7717 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
7718 !gsi_end_p (gsi
); gsi_next (&gsi
))
7720 gimple
*stmt
= gsi_stmt (gsi
);
7721 walk_stmt_load_store_ops (stmt
, (void *)(uintptr_t) 1,
7722 clear_dependence_clique
,
7723 clear_dependence_clique
);
7726 unsigned short clique
= 0;
7727 unsigned short last_ruid
= 0;
7728 bitmap rvars
= BITMAP_ALLOC (NULL
);
7729 bool escaped_p
= false;
7730 for (unsigned i
= 0; i
< num_ssa_names
; ++i
)
7732 tree ptr
= ssa_name (i
);
7733 if (!ptr
|| !POINTER_TYPE_P (TREE_TYPE (ptr
)))
7736 /* Avoid all this when ptr is not dereferenced? */
7738 if (SSA_NAME_IS_DEFAULT_DEF (ptr
)
7739 && (TREE_CODE (SSA_NAME_VAR (ptr
)) == PARM_DECL
7740 || TREE_CODE (SSA_NAME_VAR (ptr
)) == RESULT_DECL
))
7741 p
= SSA_NAME_VAR (ptr
);
7742 varinfo_t vi
= lookup_vi_for_tree (p
);
7745 vi
= get_varinfo (find (vi
->id
));
7748 varinfo_t restrict_var
= NULL
;
7749 EXECUTE_IF_SET_IN_BITMAP (vi
->solution
, 0, j
, bi
)
7751 varinfo_t oi
= get_varinfo (j
);
7753 oi
= get_varinfo (oi
->head
);
7754 if (oi
->is_restrict_var
)
7757 && restrict_var
!= oi
)
7759 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7761 fprintf (dump_file
, "found restrict pointed-to "
7763 print_generic_expr (dump_file
, ptr
);
7764 fprintf (dump_file
, " but not exclusively\n");
7766 restrict_var
= NULL
;
7771 /* NULL is the only other valid points-to entry. */
7772 else if (oi
->id
!= nothing_id
)
7774 restrict_var
= NULL
;
7778 /* Ok, found that ptr must(!) point to a single(!) restrict
7780 /* ??? PTA isn't really a proper propagation engine to compute
7782 ??? We could handle merging of two restricts by unifying them. */
7785 /* Now look at possible dereferences of ptr. */
7786 imm_use_iterator ui
;
7789 msdi_data data
= { ptr
, &clique
, &last_ruid
, restrict_var
};
7790 FOR_EACH_IMM_USE_STMT (use_stmt
, ui
, ptr
)
7791 used
|= walk_stmt_load_store_ops (use_stmt
, &data
,
7792 maybe_set_dependence_info
,
7793 maybe_set_dependence_info
);
7796 /* Add all subvars to the set of restrict pointed-to set. */
7797 for (unsigned sv
= restrict_var
->head
; sv
!= 0;
7798 sv
= get_varinfo (sv
)->next
)
7799 bitmap_set_bit (rvars
, sv
);
7800 varinfo_t escaped
= get_varinfo (find (escaped_id
));
7801 if (bitmap_bit_p (escaped
->solution
, restrict_var
->id
))
7809 /* Assign the BASE id zero to all accesses not based on a restrict
7810 pointer. That way they get disambiguated against restrict
7811 accesses but not against each other. */
7812 /* ??? For restricts derived from globals (thus not incoming
7813 parameters) we can't restrict scoping properly thus the following
7814 is too aggressive there. For now we have excluded those globals from
7815 getting into the MR_DEPENDENCE machinery. */
7816 vls_data data
= { clique
, escaped_p
, rvars
};
7818 FOR_EACH_BB_FN (bb
, cfun
)
7819 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
7820 !gsi_end_p (gsi
); gsi_next (&gsi
))
7822 gimple
*stmt
= gsi_stmt (gsi
);
7823 walk_stmt_load_store_ops (stmt
, &data
,
7824 visit_loadstore
, visit_loadstore
);
7828 BITMAP_FREE (rvars
);
7831 /* Compute points-to information for every SSA_NAME pointer in the
7832 current function and compute the transitive closure of escaped
7833 variables to re-initialize the call-clobber states of local variables. */
7836 compute_may_aliases (void)
7838 if (cfun
->gimple_df
->ipa_pta
)
7842 fprintf (dump_file
, "\nNot re-computing points-to information "
7843 "because IPA points-to information is available.\n\n");
7845 /* But still dump what we have remaining it. */
7846 dump_alias_info (dump_file
);
7852 /* For each pointer P_i, determine the sets of variables that P_i may
7853 point-to. Compute the reachability set of escaped and call-used
7855 compute_points_to_sets ();
7857 /* Debugging dumps. */
7859 dump_alias_info (dump_file
);
7861 /* Compute restrict-based memory disambiguations. */
7862 compute_dependence_clique ();
7864 /* Deallocate memory used by aliasing data structures and the internal
7865 points-to solution. */
7866 delete_points_to_sets ();
7868 gcc_assert (!need_ssa_update_p (cfun
));
7873 /* A dummy pass to cause points-to information to be computed via
7874 TODO_rebuild_alias. */
7878 const pass_data pass_data_build_alias
=
7880 GIMPLE_PASS
, /* type */
7882 OPTGROUP_NONE
, /* optinfo_flags */
7883 TV_NONE
, /* tv_id */
7884 ( PROP_cfg
| PROP_ssa
), /* properties_required */
7885 0, /* properties_provided */
7886 0, /* properties_destroyed */
7887 0, /* todo_flags_start */
7888 TODO_rebuild_alias
, /* todo_flags_finish */
7891 class pass_build_alias
: public gimple_opt_pass
7894 pass_build_alias (gcc::context
*ctxt
)
7895 : gimple_opt_pass (pass_data_build_alias
, ctxt
)
7898 /* opt_pass methods: */
7899 virtual bool gate (function
*) { return flag_tree_pta
; }
7901 }; // class pass_build_alias
7906 make_pass_build_alias (gcc::context
*ctxt
)
7908 return new pass_build_alias (ctxt
);
7911 /* A dummy pass to cause points-to information to be computed via
7912 TODO_rebuild_alias. */
7916 const pass_data pass_data_build_ealias
=
7918 GIMPLE_PASS
, /* type */
7919 "ealias", /* name */
7920 OPTGROUP_NONE
, /* optinfo_flags */
7921 TV_NONE
, /* tv_id */
7922 ( PROP_cfg
| PROP_ssa
), /* properties_required */
7923 0, /* properties_provided */
7924 0, /* properties_destroyed */
7925 0, /* todo_flags_start */
7926 TODO_rebuild_alias
, /* todo_flags_finish */
7929 class pass_build_ealias
: public gimple_opt_pass
7932 pass_build_ealias (gcc::context
*ctxt
)
7933 : gimple_opt_pass (pass_data_build_ealias
, ctxt
)
7936 /* opt_pass methods: */
7937 virtual bool gate (function
*) { return flag_tree_pta
; }
7939 }; // class pass_build_ealias
7944 make_pass_build_ealias (gcc::context
*ctxt
)
7946 return new pass_build_ealias (ctxt
);
7950 /* IPA PTA solutions for ESCAPED. */
7951 struct pt_solution ipa_escaped_pt
7952 = { true, false, false, false, false,
7953 false, false, false, false, false, NULL
};
7955 /* Associate node with varinfo DATA. Worker for
7956 cgraph_for_symbol_thunks_and_aliases. */
7958 associate_varinfo_to_alias (struct cgraph_node
*node
, void *data
)
7961 || (node
->thunk
.thunk_p
7962 && ! node
->global
.inlined_to
))
7964 && !node
->ifunc_resolver
)
7965 insert_vi_for_tree (node
->decl
, (varinfo_t
)data
);
7969 /* Dump varinfo VI to FILE. */
7972 dump_varinfo (FILE *file
, varinfo_t vi
)
7977 fprintf (file
, "%u: %s\n", vi
->id
, vi
->name
);
7979 const char *sep
= " ";
7980 if (vi
->is_artificial_var
)
7981 fprintf (file
, "%sartificial", sep
);
7982 if (vi
->is_special_var
)
7983 fprintf (file
, "%sspecial", sep
);
7984 if (vi
->is_unknown_size_var
)
7985 fprintf (file
, "%sunknown-size", sep
);
7986 if (vi
->is_full_var
)
7987 fprintf (file
, "%sfull", sep
);
7988 if (vi
->is_heap_var
)
7989 fprintf (file
, "%sheap", sep
);
7990 if (vi
->may_have_pointers
)
7991 fprintf (file
, "%smay-have-pointers", sep
);
7992 if (vi
->only_restrict_pointers
)
7993 fprintf (file
, "%sonly-restrict-pointers", sep
);
7994 if (vi
->is_restrict_var
)
7995 fprintf (file
, "%sis-restrict-var", sep
);
7996 if (vi
->is_global_var
)
7997 fprintf (file
, "%sglobal", sep
);
7998 if (vi
->is_ipa_escape_point
)
7999 fprintf (file
, "%sipa-escape-point", sep
);
8001 fprintf (file
, "%sfn-info", sep
);
8003 fprintf (file
, "%srestrict-uid:%u", sep
, vi
->ruid
);
8005 fprintf (file
, "%snext:%u", sep
, vi
->next
);
8006 if (vi
->head
!= vi
->id
)
8007 fprintf (file
, "%shead:%u", sep
, vi
->head
);
8009 fprintf (file
, "%soffset:" HOST_WIDE_INT_PRINT_DEC
, sep
, vi
->offset
);
8010 if (vi
->size
!= ~(unsigned HOST_WIDE_INT
)0)
8011 fprintf (file
, "%ssize:" HOST_WIDE_INT_PRINT_DEC
, sep
, vi
->size
);
8012 if (vi
->fullsize
!= ~(unsigned HOST_WIDE_INT
)0
8013 && vi
->fullsize
!= vi
->size
)
8014 fprintf (file
, "%sfullsize:" HOST_WIDE_INT_PRINT_DEC
, sep
,
8016 fprintf (file
, "\n");
8018 if (vi
->solution
&& !bitmap_empty_p (vi
->solution
))
8022 fprintf (file
, " solution: {");
8023 EXECUTE_IF_SET_IN_BITMAP (vi
->solution
, 0, i
, bi
)
8024 fprintf (file
, " %u", i
);
8025 fprintf (file
, " }\n");
8028 if (vi
->oldsolution
&& !bitmap_empty_p (vi
->oldsolution
)
8029 && !bitmap_equal_p (vi
->solution
, vi
->oldsolution
))
8033 fprintf (file
, " oldsolution: {");
8034 EXECUTE_IF_SET_IN_BITMAP (vi
->oldsolution
, 0, i
, bi
)
8035 fprintf (file
, " %u", i
);
8036 fprintf (file
, " }\n");
8040 /* Dump varinfo VI to stderr. */
8043 debug_varinfo (varinfo_t vi
)
8045 dump_varinfo (stderr
, vi
);
8048 /* Dump varmap to FILE. */
8051 dump_varmap (FILE *file
)
8053 if (varmap
.length () == 0)
8056 fprintf (file
, "variables:\n");
8058 for (unsigned int i
= 0; i
< varmap
.length (); ++i
)
8060 varinfo_t vi
= get_varinfo (i
);
8061 dump_varinfo (file
, vi
);
8064 fprintf (file
, "\n");
8067 /* Dump varmap to stderr. */
8072 dump_varmap (stderr
);
8075 /* Compute whether node is refered to non-locally. Worker for
8076 cgraph_for_symbol_thunks_and_aliases. */
8078 refered_from_nonlocal_fn (struct cgraph_node
*node
, void *data
)
8080 bool *nonlocal_p
= (bool *)data
;
8081 *nonlocal_p
|= (node
->used_from_other_partition
8082 || node
->externally_visible
8083 || node
->force_output
8084 || lookup_attribute ("noipa", DECL_ATTRIBUTES (node
->decl
)));
8088 /* Same for varpool nodes. */
8090 refered_from_nonlocal_var (struct varpool_node
*node
, void *data
)
8092 bool *nonlocal_p
= (bool *)data
;
8093 *nonlocal_p
|= (node
->used_from_other_partition
8094 || node
->externally_visible
8095 || node
->force_output
);
8099 /* Execute the driver for IPA PTA. */
8101 ipa_pta_execute (void)
8103 struct cgraph_node
*node
;
8105 unsigned int from
= 0;
8111 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
8113 symtab
->dump (dump_file
);
8114 fprintf (dump_file
, "\n");
8119 fprintf (dump_file
, "Generating generic constraints\n\n");
8120 dump_constraints (dump_file
, from
);
8121 fprintf (dump_file
, "\n");
8122 from
= constraints
.length ();
8125 /* Build the constraints. */
8126 FOR_EACH_DEFINED_FUNCTION (node
)
8129 /* Nodes without a body are not interesting. Especially do not
8130 visit clones at this point for now - we get duplicate decls
8131 there for inline clones at least. */
8132 if (!node
->has_gimple_body_p () || node
->global
.inlined_to
)
8136 gcc_assert (!node
->clone_of
);
8138 /* For externally visible or attribute used annotated functions use
8139 local constraints for their arguments.
8140 For local functions we see all callers and thus do not need initial
8141 constraints for parameters. */
8142 bool nonlocal_p
= (node
->used_from_other_partition
8143 || node
->externally_visible
8144 || node
->force_output
8145 || lookup_attribute ("noipa",
8146 DECL_ATTRIBUTES (node
->decl
)));
8147 node
->call_for_symbol_thunks_and_aliases (refered_from_nonlocal_fn
,
8150 vi
= create_function_info_for (node
->decl
,
8151 alias_get_name (node
->decl
), false,
8154 && from
!= constraints
.length ())
8157 "Generating intial constraints for %s", node
->name ());
8158 if (DECL_ASSEMBLER_NAME_SET_P (node
->decl
))
8159 fprintf (dump_file
, " (%s)",
8161 (DECL_ASSEMBLER_NAME (node
->decl
)));
8162 fprintf (dump_file
, "\n\n");
8163 dump_constraints (dump_file
, from
);
8164 fprintf (dump_file
, "\n");
8166 from
= constraints
.length ();
8169 node
->call_for_symbol_thunks_and_aliases
8170 (associate_varinfo_to_alias
, vi
, true);
8173 /* Create constraints for global variables and their initializers. */
8174 FOR_EACH_VARIABLE (var
)
8176 if (var
->alias
&& var
->analyzed
)
8179 varinfo_t vi
= get_vi_for_tree (var
->decl
);
8181 /* For the purpose of IPA PTA unit-local globals are not
8183 bool nonlocal_p
= (var
->used_from_other_partition
8184 || var
->externally_visible
8185 || var
->force_output
);
8186 var
->call_for_symbol_and_aliases (refered_from_nonlocal_var
,
8189 vi
->is_ipa_escape_point
= true;
8193 && from
!= constraints
.length ())
8196 "Generating constraints for global initializers\n\n");
8197 dump_constraints (dump_file
, from
);
8198 fprintf (dump_file
, "\n");
8199 from
= constraints
.length ();
8202 FOR_EACH_DEFINED_FUNCTION (node
)
8204 struct function
*func
;
8207 /* Nodes without a body are not interesting. */
8208 if (!node
->has_gimple_body_p () || node
->clone_of
)
8214 "Generating constraints for %s", node
->name ());
8215 if (DECL_ASSEMBLER_NAME_SET_P (node
->decl
))
8216 fprintf (dump_file
, " (%s)",
8218 (DECL_ASSEMBLER_NAME (node
->decl
)));
8219 fprintf (dump_file
, "\n");
8222 func
= DECL_STRUCT_FUNCTION (node
->decl
);
8223 gcc_assert (cfun
== NULL
);
8225 /* Build constriants for the function body. */
8226 FOR_EACH_BB_FN (bb
, func
)
8228 for (gphi_iterator gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
);
8231 gphi
*phi
= gsi
.phi ();
8233 if (! virtual_operand_p (gimple_phi_result (phi
)))
8234 find_func_aliases (func
, phi
);
8237 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);
8240 gimple
*stmt
= gsi_stmt (gsi
);
8242 find_func_aliases (func
, stmt
);
8243 find_func_clobbers (func
, stmt
);
8249 fprintf (dump_file
, "\n");
8250 dump_constraints (dump_file
, from
);
8251 fprintf (dump_file
, "\n");
8252 from
= constraints
.length ();
8256 /* From the constraints compute the points-to sets. */
8257 solve_constraints ();
8260 dump_sa_points_to_info (dump_file
);
8262 /* Now post-process solutions to handle locals from different
8263 runtime instantiations coming in through recursive invocations. */
8264 unsigned shadow_var_cnt
= 0;
8265 for (unsigned i
= 1; i
< varmap
.length (); ++i
)
8267 varinfo_t fi
= get_varinfo (i
);
8270 /* Automatic variables pointed to by their containing functions
8271 parameters need this treatment. */
8272 for (varinfo_t ai
= first_vi_for_offset (fi
, fi_parm_base
);
8273 ai
; ai
= vi_next (ai
))
8275 varinfo_t vi
= get_varinfo (find (ai
->id
));
8278 EXECUTE_IF_SET_IN_BITMAP (vi
->solution
, 0, j
, bi
)
8280 varinfo_t pt
= get_varinfo (j
);
8281 if (pt
->shadow_var_uid
== 0
8283 && auto_var_in_fn_p (pt
->decl
, fi
->decl
))
8285 pt
->shadow_var_uid
= allocate_decl_uid ();
8290 /* As well as global variables which are another way of passing
8291 arguments to recursive invocations. */
8292 else if (fi
->is_global_var
)
8294 for (varinfo_t ai
= fi
; ai
; ai
= vi_next (ai
))
8296 varinfo_t vi
= get_varinfo (find (ai
->id
));
8299 EXECUTE_IF_SET_IN_BITMAP (vi
->solution
, 0, j
, bi
)
8301 varinfo_t pt
= get_varinfo (j
);
8302 if (pt
->shadow_var_uid
== 0
8304 && auto_var_p (pt
->decl
))
8306 pt
->shadow_var_uid
= allocate_decl_uid ();
8313 if (shadow_var_cnt
&& dump_file
&& (dump_flags
& TDF_DETAILS
))
8314 fprintf (dump_file
, "Allocated %u shadow variables for locals "
8315 "maybe leaking into recursive invocations of their containing "
8316 "functions\n", shadow_var_cnt
);
8318 /* Compute the global points-to sets for ESCAPED.
8319 ??? Note that the computed escape set is not correct
8320 for the whole unit as we fail to consider graph edges to
8321 externally visible functions. */
8322 ipa_escaped_pt
= find_what_var_points_to (NULL
, get_varinfo (escaped_id
));
8324 /* Make sure the ESCAPED solution (which is used as placeholder in
8325 other solutions) does not reference itself. This simplifies
8326 points-to solution queries. */
8327 ipa_escaped_pt
.ipa_escaped
= 0;
8329 /* Assign the points-to sets to the SSA names in the unit. */
8330 FOR_EACH_DEFINED_FUNCTION (node
)
8333 struct function
*fn
;
8337 /* Nodes without a body are not interesting. */
8338 if (!node
->has_gimple_body_p () || node
->clone_of
)
8341 fn
= DECL_STRUCT_FUNCTION (node
->decl
);
8343 /* Compute the points-to sets for pointer SSA_NAMEs. */
8344 FOR_EACH_VEC_ELT (*fn
->gimple_df
->ssa_names
, i
, ptr
)
8347 && POINTER_TYPE_P (TREE_TYPE (ptr
)))
8348 find_what_p_points_to (node
->decl
, ptr
);
8351 /* Compute the call-use and call-clobber sets for indirect calls
8352 and calls to external functions. */
8353 FOR_EACH_BB_FN (bb
, fn
)
8355 gimple_stmt_iterator gsi
;
8357 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
8360 struct pt_solution
*pt
;
8364 stmt
= dyn_cast
<gcall
*> (gsi_stmt (gsi
));
8368 /* Handle direct calls to functions with body. */
8369 decl
= gimple_call_fndecl (stmt
);
8372 tree called_decl
= NULL_TREE
;
8373 if (gimple_call_builtin_p (stmt
, BUILT_IN_GOMP_PARALLEL
))
8374 called_decl
= TREE_OPERAND (gimple_call_arg (stmt
, 0), 0);
8375 else if (gimple_call_builtin_p (stmt
, BUILT_IN_GOACC_PARALLEL
))
8376 called_decl
= TREE_OPERAND (gimple_call_arg (stmt
, 1), 0);
8378 if (called_decl
!= NULL_TREE
8379 && !fndecl_maybe_in_other_partition (called_decl
))
8384 && (fi
= lookup_vi_for_tree (decl
))
8387 *gimple_call_clobber_set (stmt
)
8388 = find_what_var_points_to
8389 (node
->decl
, first_vi_for_offset (fi
, fi_clobbers
));
8390 *gimple_call_use_set (stmt
)
8391 = find_what_var_points_to
8392 (node
->decl
, first_vi_for_offset (fi
, fi_uses
));
8394 /* Handle direct calls to external functions. */
8395 else if (decl
&& (!fi
|| fi
->decl
))
8397 pt
= gimple_call_use_set (stmt
);
8398 if (gimple_call_flags (stmt
) & ECF_CONST
)
8399 memset (pt
, 0, sizeof (struct pt_solution
));
8400 else if ((vi
= lookup_call_use_vi (stmt
)) != NULL
)
8402 *pt
= find_what_var_points_to (node
->decl
, vi
);
8403 /* Escaped (and thus nonlocal) variables are always
8404 implicitly used by calls. */
8405 /* ??? ESCAPED can be empty even though NONLOCAL
8408 pt
->ipa_escaped
= 1;
8412 /* If there is nothing special about this call then
8413 we have made everything that is used also escape. */
8414 *pt
= ipa_escaped_pt
;
8418 pt
= gimple_call_clobber_set (stmt
);
8419 if (gimple_call_flags (stmt
) & (ECF_CONST
|ECF_PURE
|ECF_NOVOPS
))
8420 memset (pt
, 0, sizeof (struct pt_solution
));
8421 else if ((vi
= lookup_call_clobber_vi (stmt
)) != NULL
)
8423 *pt
= find_what_var_points_to (node
->decl
, vi
);
8424 /* Escaped (and thus nonlocal) variables are always
8425 implicitly clobbered by calls. */
8426 /* ??? ESCAPED can be empty even though NONLOCAL
8429 pt
->ipa_escaped
= 1;
8433 /* If there is nothing special about this call then
8434 we have made everything that is used also escape. */
8435 *pt
= ipa_escaped_pt
;
8439 /* Handle indirect calls. */
8440 else if ((fi
= get_fi_for_callee (stmt
)))
8442 /* We need to accumulate all clobbers/uses of all possible
8444 fi
= get_varinfo (find (fi
->id
));
8445 /* If we cannot constrain the set of functions we'll end up
8446 calling we end up using/clobbering everything. */
8447 if (bitmap_bit_p (fi
->solution
, anything_id
)
8448 || bitmap_bit_p (fi
->solution
, nonlocal_id
)
8449 || bitmap_bit_p (fi
->solution
, escaped_id
))
8451 pt_solution_reset (gimple_call_clobber_set (stmt
));
8452 pt_solution_reset (gimple_call_use_set (stmt
));
8458 struct pt_solution
*uses
, *clobbers
;
8460 uses
= gimple_call_use_set (stmt
);
8461 clobbers
= gimple_call_clobber_set (stmt
);
8462 memset (uses
, 0, sizeof (struct pt_solution
));
8463 memset (clobbers
, 0, sizeof (struct pt_solution
));
8464 EXECUTE_IF_SET_IN_BITMAP (fi
->solution
, 0, i
, bi
)
8466 struct pt_solution sol
;
8468 vi
= get_varinfo (i
);
8469 if (!vi
->is_fn_info
)
8471 /* ??? We could be more precise here? */
8473 uses
->ipa_escaped
= 1;
8474 clobbers
->nonlocal
= 1;
8475 clobbers
->ipa_escaped
= 1;
8479 if (!uses
->anything
)
8481 sol
= find_what_var_points_to
8483 first_vi_for_offset (vi
, fi_uses
));
8484 pt_solution_ior_into (uses
, &sol
);
8486 if (!clobbers
->anything
)
8488 sol
= find_what_var_points_to
8490 first_vi_for_offset (vi
, fi_clobbers
));
8491 pt_solution_ior_into (clobbers
, &sol
);
8501 fn
->gimple_df
->ipa_pta
= true;
8503 /* We have to re-set the final-solution cache after each function
8504 because what is a "global" is dependent on function context. */
8505 final_solutions
->empty ();
8506 obstack_free (&final_solutions_obstack
, NULL
);
8507 gcc_obstack_init (&final_solutions_obstack
);
8510 delete_points_to_sets ();
8519 const pass_data pass_data_ipa_pta
=
8521 SIMPLE_IPA_PASS
, /* type */
8523 OPTGROUP_NONE
, /* optinfo_flags */
8524 TV_IPA_PTA
, /* tv_id */
8525 0, /* properties_required */
8526 0, /* properties_provided */
8527 0, /* properties_destroyed */
8528 0, /* todo_flags_start */
8529 0, /* todo_flags_finish */
8532 class pass_ipa_pta
: public simple_ipa_opt_pass
8535 pass_ipa_pta (gcc::context
*ctxt
)
8536 : simple_ipa_opt_pass (pass_data_ipa_pta
, ctxt
)
8539 /* opt_pass methods: */
8540 virtual bool gate (function
*)
8544 /* Don't bother doing anything if the program has errors. */
8548 opt_pass
* clone () { return new pass_ipa_pta (m_ctxt
); }
8550 virtual unsigned int execute (function
*) { return ipa_pta_execute (); }
8552 }; // class pass_ipa_pta
8556 simple_ipa_opt_pass
*
8557 make_pass_ipa_pta (gcc::context
*ctxt
)
8559 return new pass_ipa_pta (ctxt
);