1 /* Tree based points-to analysis
2 Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dberlin@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3 of the License, or
11 (at your option) any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
30 #include "basic-block.h"
32 #include "tree-flow.h"
33 #include "tree-inline.h"
34 #include "diagnostic-core.h"
39 #include "tree-pass.h"
40 #include "alloc-pool.h"
41 #include "splay-tree.h"
45 #include "pointer-set.h"
47 /* The idea behind this analyzer is to generate set constraints from the
48 program, then solve the resulting constraints in order to generate the
51 Set constraints are a way of modeling program analysis problems that
52 involve sets. They consist of an inclusion constraint language,
53 describing the variables (each variable is a set) and operations that
54 are involved on the variables, and a set of rules that derive facts
55 from these operations. To solve a system of set constraints, you derive
56 all possible facts under the rules, which gives you the correct sets
59 See "Efficient Field-sensitive pointer analysis for C" by "David
60 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
61 http://citeseer.ist.psu.edu/pearce04efficient.html
63 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
64 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
65 http://citeseer.ist.psu.edu/heintze01ultrafast.html
67 There are three types of real constraint expressions, DEREF,
68 ADDRESSOF, and SCALAR. Each constraint expression consists
69 of a constraint type, a variable, and an offset.
71 SCALAR is a constraint expression type used to represent x, whether
72 it appears on the LHS or the RHS of a statement.
73 DEREF is a constraint expression type used to represent *x, whether
74 it appears on the LHS or the RHS of a statement.
75 ADDRESSOF is a constraint expression used to represent &x, whether
76 it appears on the LHS or the RHS of a statement.
78 Each pointer variable in the program is assigned an integer id, and
79 each field of a structure variable is assigned an integer id as well.
81 Structure variables are linked to their list of fields through a "next
82 field" in each variable that points to the next field in offset
84 Each variable for a structure field has
86 1. "size", that tells the size in bits of that field.
87 2. "fullsize, that tells the size in bits of the entire structure.
88 3. "offset", that tells the offset in bits from the beginning of the
89 structure to this field.
101 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
102 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
103 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
106 In order to solve the system of set constraints, the following is
109 1. Each constraint variable x has a solution set associated with it,
112 2. Constraints are separated into direct, copy, and complex.
113 Direct constraints are ADDRESSOF constraints that require no extra
114 processing, such as P = &Q
115 Copy constraints are those of the form P = Q.
116 Complex constraints are all the constraints involving dereferences
117 and offsets (including offsetted copies).
119 3. All direct constraints of the form P = &Q are processed, such
120 that Q is added to Sol(P)
122 4. All complex constraints for a given constraint variable are stored in a
123 linked list attached to that variable's node.
125 5. A directed graph is built out of the copy constraints. Each
126 constraint variable is a node in the graph, and an edge from
127 Q to P is added for each copy constraint of the form P = Q
129 6. The graph is then walked, and solution sets are
130 propagated along the copy edges, such that an edge from Q to P
131 causes Sol(P) <- Sol(P) union Sol(Q).
133 7. As we visit each node, all complex constraints associated with
134 that node are processed by adding appropriate copy edges to the graph, or the
135 appropriate variables to the solution set.
137 8. The process of walking the graph is iterated until no solution
140 Prior to walking the graph in steps 6 and 7, We perform static
141 cycle elimination on the constraint graph, as well
142 as off-line variable substitution.
144 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
145 on and turned into anything), but isn't. You can just see what offset
146 inside the pointed-to struct it's going to access.
148 TODO: Constant bounded arrays can be handled as if they were structs of the
149 same number of elements.
151 TODO: Modeling heap and incoming pointers becomes much better if we
152 add fields to them as we discover them, which we could do.
154 TODO: We could handle unions, but to be honest, it's probably not
155 worth the pain or slowdown. */
157 /* IPA-PTA optimizations possible.
159 When the indirect function called is ANYTHING we can add disambiguation
160 based on the function signatures (or simply the parameter count which
161 is the varinfo size). We also do not need to consider functions that
162 do not have their address taken.
164 The is_global_var bit which marks escape points is overly conservative
165 in IPA mode. Split it to is_escape_point and is_global_var - only
166 externally visible globals are escape points in IPA mode. This is
167 also needed to fix the pt_solution_includes_global predicate
168 (and thus ptr_deref_may_alias_global_p).
170 The way we introduce DECL_PT_UID to avoid fixing up all points-to
171 sets in the translation unit when we copy a DECL during inlining
172 pessimizes precision. The advantage is that the DECL_PT_UID keeps
173 compile-time and memory usage overhead low - the points-to sets
174 do not grow or get unshared as they would during a fixup phase.
175 An alternative solution is to delay IPA PTA until after all
176 inlining transformations have been applied.
178 The way we propagate clobber/use information isn't optimized.
179 It should use a new complex constraint that properly filters
180 out local variables of the callee (though that would make
181 the sets invalid after inlining). OTOH we might as well
182 admit defeat to WHOPR and simply do all the clobber/use analysis
183 and propagation after PTA finished but before we threw away
184 points-to information for memory variables. WHOPR and PTA
185 do not play along well anyway - the whole constraint solving
186 would need to be done in WPA phase and it will be very interesting
187 to apply the results to local SSA names during LTRANS phase.
189 We probably should compute a per-function unit-ESCAPE solution
190 propagating it simply like the clobber / uses solutions. The
191 solution can go alongside the non-IPA espaced solution and be
192 used to query which vars escape the unit through a function.
194 We never put function decls in points-to sets so we do not
195 keep the set of called functions for indirect calls.
197 And probably more. */
199 static bool use_field_sensitive
= true;
200 static int in_ipa_mode
= 0;
202 /* Used for predecessor bitmaps. */
203 static bitmap_obstack predbitmap_obstack
;
205 /* Used for points-to sets. */
206 static bitmap_obstack pta_obstack
;
208 /* Used for oldsolution members of variables. */
209 static bitmap_obstack oldpta_obstack
;
211 /* Used for per-solver-iteration bitmaps. */
212 static bitmap_obstack iteration_obstack
;
214 static unsigned int create_variable_info_for (tree
, const char *);
215 typedef struct constraint_graph
*constraint_graph_t
;
216 static void unify_nodes (constraint_graph_t
, unsigned int, unsigned int, bool);
219 typedef struct constraint
*constraint_t
;
222 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
224 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
226 static struct constraint_stats
228 unsigned int total_vars
;
229 unsigned int nonpointer_vars
;
230 unsigned int unified_vars_static
;
231 unsigned int unified_vars_dynamic
;
232 unsigned int iterations
;
233 unsigned int num_edges
;
234 unsigned int num_implicit_edges
;
235 unsigned int points_to_sets_created
;
240 /* ID of this variable */
243 /* True if this is a variable created by the constraint analysis, such as
244 heap variables and constraints we had to break up. */
245 unsigned int is_artificial_var
: 1;
247 /* True if this is a special variable whose solution set should not be
249 unsigned int is_special_var
: 1;
251 /* True for variables whose size is not known or variable. */
252 unsigned int is_unknown_size_var
: 1;
254 /* True for (sub-)fields that represent a whole variable. */
255 unsigned int is_full_var
: 1;
257 /* True if this is a heap variable. */
258 unsigned int is_heap_var
: 1;
260 /* True if this field may contain pointers. */
261 unsigned int may_have_pointers
: 1;
263 /* True if this field has only restrict qualified pointers. */
264 unsigned int only_restrict_pointers
: 1;
266 /* True if this represents a global variable. */
267 unsigned int is_global_var
: 1;
269 /* True if this represents a IPA function info. */
270 unsigned int is_fn_info
: 1;
272 /* A link to the variable for the next field in this structure. */
273 struct variable_info
*next
;
275 /* Offset of this variable, in bits, from the base variable */
276 unsigned HOST_WIDE_INT offset
;
278 /* Size of the variable, in bits. */
279 unsigned HOST_WIDE_INT size
;
281 /* Full size of the base variable, in bits. */
282 unsigned HOST_WIDE_INT fullsize
;
284 /* Name of this variable */
287 /* Tree that this variable is associated with. */
290 /* Points-to set for this variable. */
293 /* Old points-to set for this variable. */
296 typedef struct variable_info
*varinfo_t
;
298 static varinfo_t
first_vi_for_offset (varinfo_t
, unsigned HOST_WIDE_INT
);
299 static varinfo_t
first_or_preceding_vi_for_offset (varinfo_t
,
300 unsigned HOST_WIDE_INT
);
301 static varinfo_t
lookup_vi_for_tree (tree
);
302 static inline bool type_can_have_subvars (const_tree
);
304 /* Pool of variable info structures. */
305 static alloc_pool variable_info_pool
;
309 /* Table of variable info structures for constraint variables.
310 Indexed directly by variable info id. */
311 static vec
<varinfo_t
> varmap
;
313 /* Return the varmap element N */
315 static inline varinfo_t
316 get_varinfo (unsigned int n
)
321 /* Static IDs for the special variables. */
322 enum { nothing_id
= 0, anything_id
= 1, readonly_id
= 2,
323 escaped_id
= 3, nonlocal_id
= 4,
324 storedanything_id
= 5, integer_id
= 6 };
326 /* Return a new variable info structure consisting for a variable
327 named NAME, and using constraint graph node NODE. Append it
328 to the vector of variable info structures. */
331 new_var_info (tree t
, const char *name
)
333 unsigned index
= varmap
.length ();
334 varinfo_t ret
= (varinfo_t
) pool_alloc (variable_info_pool
);
339 /* Vars without decl are artificial and do not have sub-variables. */
340 ret
->is_artificial_var
= (t
== NULL_TREE
);
341 ret
->is_special_var
= false;
342 ret
->is_unknown_size_var
= false;
343 ret
->is_full_var
= (t
== NULL_TREE
);
344 ret
->is_heap_var
= false;
345 ret
->may_have_pointers
= true;
346 ret
->only_restrict_pointers
= false;
347 ret
->is_global_var
= (t
== NULL_TREE
);
348 ret
->is_fn_info
= false;
350 ret
->is_global_var
= (is_global_var (t
)
351 /* We have to treat even local register variables
353 || (TREE_CODE (t
) == VAR_DECL
354 && DECL_HARD_REGISTER (t
)));
355 ret
->solution
= BITMAP_ALLOC (&pta_obstack
);
356 ret
->oldsolution
= NULL
;
361 varmap
.safe_push (ret
);
367 /* A map mapping call statements to per-stmt variables for uses
368 and clobbers specific to the call. */
369 struct pointer_map_t
*call_stmt_vars
;
371 /* Lookup or create the variable for the call statement CALL. */
374 get_call_vi (gimple call
)
379 slot_p
= pointer_map_insert (call_stmt_vars
, call
);
381 return (varinfo_t
) *slot_p
;
383 vi
= new_var_info (NULL_TREE
, "CALLUSED");
387 vi
->is_full_var
= true;
389 vi
->next
= vi2
= new_var_info (NULL_TREE
, "CALLCLOBBERED");
393 vi2
->is_full_var
= true;
395 *slot_p
= (void *) vi
;
399 /* Lookup the variable for the call statement CALL representing
400 the uses. Returns NULL if there is nothing special about this call. */
403 lookup_call_use_vi (gimple call
)
407 slot_p
= pointer_map_contains (call_stmt_vars
, call
);
409 return (varinfo_t
) *slot_p
;
414 /* Lookup the variable for the call statement CALL representing
415 the clobbers. Returns NULL if there is nothing special about this call. */
418 lookup_call_clobber_vi (gimple call
)
420 varinfo_t uses
= lookup_call_use_vi (call
);
427 /* Lookup or create the variable for the call statement CALL representing
431 get_call_use_vi (gimple call
)
433 return get_call_vi (call
);
436 /* Lookup or create the variable for the call statement CALL representing
439 static varinfo_t ATTRIBUTE_UNUSED
440 get_call_clobber_vi (gimple call
)
442 return get_call_vi (call
)->next
;
446 typedef enum {SCALAR
, DEREF
, ADDRESSOF
} constraint_expr_type
;
448 /* An expression that appears in a constraint. */
450 struct constraint_expr
452 /* Constraint type. */
453 constraint_expr_type type
;
455 /* Variable we are referring to in the constraint. */
458 /* Offset, in bits, of this constraint from the beginning of
459 variables it ends up referring to.
461 IOW, in a deref constraint, we would deref, get the result set,
462 then add OFFSET to each member. */
463 HOST_WIDE_INT offset
;
466 /* Use 0x8000... as special unknown offset. */
467 #define UNKNOWN_OFFSET ((HOST_WIDE_INT)-1 << (HOST_BITS_PER_WIDE_INT-1))
469 typedef struct constraint_expr ce_s
;
470 static void get_constraint_for_1 (tree
, vec
<ce_s
> *, bool, bool);
471 static void get_constraint_for (tree
, vec
<ce_s
> *);
472 static void get_constraint_for_rhs (tree
, vec
<ce_s
> *);
473 static void do_deref (vec
<ce_s
> *);
475 /* Our set constraints are made up of two constraint expressions, one
478 As described in the introduction, our set constraints each represent an
479 operation between set valued variables.
483 struct constraint_expr lhs
;
484 struct constraint_expr rhs
;
487 /* List of constraints that we use to build the constraint graph from. */
489 static vec
<constraint_t
> constraints
;
490 static alloc_pool constraint_pool
;
492 /* The constraint graph is represented as an array of bitmaps
493 containing successor nodes. */
495 struct constraint_graph
497 /* Size of this graph, which may be different than the number of
498 nodes in the variable map. */
501 /* Explicit successors of each node. */
504 /* Implicit predecessors of each node (Used for variable
506 bitmap
*implicit_preds
;
508 /* Explicit predecessors of each node (Used for variable substitution). */
511 /* Indirect cycle representatives, or -1 if the node has no indirect
513 int *indirect_cycles
;
515 /* Representative node for a node. rep[a] == a unless the node has
519 /* Equivalence class representative for a label. This is used for
520 variable substitution. */
523 /* Pointer equivalence label for a node. All nodes with the same
524 pointer equivalence label can be unified together at some point
525 (either during constraint optimization or after the constraint
529 /* Pointer equivalence representative for a label. This is used to
530 handle nodes that are pointer equivalent but not location
531 equivalent. We can unite these once the addressof constraints
532 are transformed into initial points-to sets. */
535 /* Pointer equivalence label for each node, used during variable
537 unsigned int *pointer_label
;
539 /* Location equivalence label for each node, used during location
540 equivalence finding. */
541 unsigned int *loc_label
;
543 /* Pointed-by set for each node, used during location equivalence
544 finding. This is pointed-by rather than pointed-to, because it
545 is constructed using the predecessor graph. */
548 /* Points to sets for pointer equivalence. This is *not* the actual
549 points-to sets for nodes. */
552 /* Bitmap of nodes where the bit is set if the node is a direct
553 node. Used for variable substitution. */
554 sbitmap direct_nodes
;
556 /* Bitmap of nodes where the bit is set if the node is address
557 taken. Used for variable substitution. */
558 bitmap address_taken
;
560 /* Vector of complex constraints for each graph node. Complex
561 constraints are those involving dereferences or offsets that are
563 vec
<constraint_t
> *complex;
566 static constraint_graph_t graph
;
568 /* During variable substitution and the offline version of indirect
569 cycle finding, we create nodes to represent dereferences and
570 address taken constraints. These represent where these start and
572 #define FIRST_REF_NODE (varmap).length ()
573 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
575 /* Return the representative node for NODE, if NODE has been unioned
577 This function performs path compression along the way to finding
578 the representative. */
581 find (unsigned int node
)
583 gcc_assert (node
< graph
->size
);
584 if (graph
->rep
[node
] != node
)
585 return graph
->rep
[node
] = find (graph
->rep
[node
]);
589 /* Union the TO and FROM nodes to the TO nodes.
590 Note that at some point in the future, we may want to do
591 union-by-rank, in which case we are going to have to return the
592 node we unified to. */
595 unite (unsigned int to
, unsigned int from
)
597 gcc_assert (to
< graph
->size
&& from
< graph
->size
);
598 if (to
!= from
&& graph
->rep
[from
] != to
)
600 graph
->rep
[from
] = to
;
606 /* Create a new constraint consisting of LHS and RHS expressions. */
609 new_constraint (const struct constraint_expr lhs
,
610 const struct constraint_expr rhs
)
612 constraint_t ret
= (constraint_t
) pool_alloc (constraint_pool
);
618 /* Print out constraint C to FILE. */
621 dump_constraint (FILE *file
, constraint_t c
)
623 if (c
->lhs
.type
== ADDRESSOF
)
625 else if (c
->lhs
.type
== DEREF
)
627 fprintf (file
, "%s", get_varinfo (c
->lhs
.var
)->name
);
628 if (c
->lhs
.offset
== UNKNOWN_OFFSET
)
629 fprintf (file
, " + UNKNOWN");
630 else if (c
->lhs
.offset
!= 0)
631 fprintf (file
, " + " HOST_WIDE_INT_PRINT_DEC
, c
->lhs
.offset
);
632 fprintf (file
, " = ");
633 if (c
->rhs
.type
== ADDRESSOF
)
635 else if (c
->rhs
.type
== DEREF
)
637 fprintf (file
, "%s", get_varinfo (c
->rhs
.var
)->name
);
638 if (c
->rhs
.offset
== UNKNOWN_OFFSET
)
639 fprintf (file
, " + UNKNOWN");
640 else if (c
->rhs
.offset
!= 0)
641 fprintf (file
, " + " HOST_WIDE_INT_PRINT_DEC
, c
->rhs
.offset
);
645 void debug_constraint (constraint_t
);
646 void debug_constraints (void);
647 void debug_constraint_graph (void);
648 void debug_solution_for_var (unsigned int);
649 void debug_sa_points_to_info (void);
651 /* Print out constraint C to stderr. */
654 debug_constraint (constraint_t c
)
656 dump_constraint (stderr
, c
);
657 fprintf (stderr
, "\n");
660 /* Print out all constraints to FILE */
663 dump_constraints (FILE *file
, int from
)
667 for (i
= from
; constraints
.iterate (i
, &c
); i
++)
670 dump_constraint (file
, c
);
671 fprintf (file
, "\n");
675 /* Print out all constraints to stderr. */
678 debug_constraints (void)
680 dump_constraints (stderr
, 0);
683 /* Print the constraint graph in dot format. */
686 dump_constraint_graph (FILE *file
)
690 /* Only print the graph if it has already been initialized: */
694 /* Prints the header of the dot file: */
695 fprintf (file
, "strict digraph {\n");
696 fprintf (file
, " node [\n shape = box\n ]\n");
697 fprintf (file
, " edge [\n fontsize = \"12\"\n ]\n");
698 fprintf (file
, "\n // List of nodes and complex constraints in "
699 "the constraint graph:\n");
701 /* The next lines print the nodes in the graph together with the
702 complex constraints attached to them. */
703 for (i
= 0; i
< graph
->size
; i
++)
707 if (i
< FIRST_REF_NODE
)
708 fprintf (file
, "\"%s\"", get_varinfo (i
)->name
);
710 fprintf (file
, "\"*%s\"", get_varinfo (i
- FIRST_REF_NODE
)->name
);
711 if (graph
->complex[i
].exists ())
715 fprintf (file
, " [label=\"\\N\\n");
716 for (j
= 0; graph
->complex[i
].iterate (j
, &c
); ++j
)
718 dump_constraint (file
, c
);
719 fprintf (file
, "\\l");
721 fprintf (file
, "\"]");
723 fprintf (file
, ";\n");
726 /* Go over the edges. */
727 fprintf (file
, "\n // Edges in the constraint graph:\n");
728 for (i
= 0; i
< graph
->size
; i
++)
734 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->succs
[i
], 0, j
, bi
)
736 unsigned to
= find (j
);
739 if (i
< FIRST_REF_NODE
)
740 fprintf (file
, "\"%s\"", get_varinfo (i
)->name
);
742 fprintf (file
, "\"*%s\"", get_varinfo (i
- FIRST_REF_NODE
)->name
);
743 fprintf (file
, " -> ");
744 if (to
< FIRST_REF_NODE
)
745 fprintf (file
, "\"%s\"", get_varinfo (to
)->name
);
747 fprintf (file
, "\"*%s\"", get_varinfo (to
- FIRST_REF_NODE
)->name
);
748 fprintf (file
, ";\n");
752 /* Prints the tail of the dot file. */
753 fprintf (file
, "}\n");
756 /* Print out the constraint graph to stderr. */
759 debug_constraint_graph (void)
761 dump_constraint_graph (stderr
);
766 The solver is a simple worklist solver, that works on the following
769 sbitmap changed_nodes = all zeroes;
771 For each node that is not already collapsed:
773 set bit in changed nodes
775 while (changed_count > 0)
777 compute topological ordering for constraint graph
779 find and collapse cycles in the constraint graph (updating
780 changed if necessary)
782 for each node (n) in the graph in topological order:
785 Process each complex constraint associated with the node,
786 updating changed if necessary.
788 For each outgoing edge from n, propagate the solution from n to
789 the destination of the edge, updating changed as necessary.
793 /* Return true if two constraint expressions A and B are equal. */
796 constraint_expr_equal (struct constraint_expr a
, struct constraint_expr b
)
798 return a
.type
== b
.type
&& a
.var
== b
.var
&& a
.offset
== b
.offset
;
801 /* Return true if constraint expression A is less than constraint expression
802 B. This is just arbitrary, but consistent, in order to give them an
806 constraint_expr_less (struct constraint_expr a
, struct constraint_expr b
)
808 if (a
.type
== b
.type
)
811 return a
.offset
< b
.offset
;
813 return a
.var
< b
.var
;
816 return a
.type
< b
.type
;
819 /* Return true if constraint A is less than constraint B. This is just
820 arbitrary, but consistent, in order to give them an ordering. */
823 constraint_less (const constraint_t
&a
, const constraint_t
&b
)
825 if (constraint_expr_less (a
->lhs
, b
->lhs
))
827 else if (constraint_expr_less (b
->lhs
, a
->lhs
))
830 return constraint_expr_less (a
->rhs
, b
->rhs
);
833 /* Return true if two constraints A and B are equal. */
836 constraint_equal (struct constraint a
, struct constraint b
)
838 return constraint_expr_equal (a
.lhs
, b
.lhs
)
839 && constraint_expr_equal (a
.rhs
, b
.rhs
);
843 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
846 constraint_vec_find (vec
<constraint_t
> vec
,
847 struct constraint lookfor
)
855 place
= vec
.lower_bound (&lookfor
, constraint_less
);
856 if (place
>= vec
.length ())
859 if (!constraint_equal (*found
, lookfor
))
864 /* Union two constraint vectors, TO and FROM. Put the result in TO. */
867 constraint_set_union (vec
<constraint_t
> *to
,
868 vec
<constraint_t
> *from
)
873 FOR_EACH_VEC_ELT (*from
, i
, c
)
875 if (constraint_vec_find (*to
, *c
) == NULL
)
877 unsigned int place
= to
->lower_bound (c
, constraint_less
);
878 to
->safe_insert (place
, c
);
883 /* Expands the solution in SET to all sub-fields of variables included.
884 Union the expanded result into RESULT. */
887 solution_set_expand (bitmap result
, bitmap set
)
893 /* In a first pass record all variables we need to add all
894 sub-fields off. This avoids quadratic behavior. */
895 EXECUTE_IF_SET_IN_BITMAP (set
, 0, j
, bi
)
897 varinfo_t v
= get_varinfo (j
);
898 if (v
->is_artificial_var
901 v
= lookup_vi_for_tree (v
->decl
);
903 vars
= BITMAP_ALLOC (NULL
);
904 bitmap_set_bit (vars
, v
->id
);
907 /* In the second pass now do the addition to the solution and
908 to speed up solving add it to the delta as well. */
911 EXECUTE_IF_SET_IN_BITMAP (vars
, 0, j
, bi
)
913 varinfo_t v
= get_varinfo (j
);
914 for (; v
!= NULL
; v
= v
->next
)
915 bitmap_set_bit (result
, v
->id
);
921 /* Take a solution set SET, add OFFSET to each member of the set, and
922 overwrite SET with the result when done. */
925 solution_set_add (bitmap set
, HOST_WIDE_INT offset
)
927 bitmap result
= BITMAP_ALLOC (&iteration_obstack
);
931 /* If the offset is unknown we have to expand the solution to
933 if (offset
== UNKNOWN_OFFSET
)
935 solution_set_expand (set
, set
);
939 EXECUTE_IF_SET_IN_BITMAP (set
, 0, i
, bi
)
941 varinfo_t vi
= get_varinfo (i
);
943 /* If this is a variable with just one field just set its bit
945 if (vi
->is_artificial_var
946 || vi
->is_unknown_size_var
948 bitmap_set_bit (result
, i
);
951 unsigned HOST_WIDE_INT fieldoffset
= vi
->offset
+ offset
;
953 /* If the offset makes the pointer point to before the
954 variable use offset zero for the field lookup. */
956 && fieldoffset
> vi
->offset
)
960 vi
= first_or_preceding_vi_for_offset (vi
, fieldoffset
);
962 bitmap_set_bit (result
, vi
->id
);
963 /* If the result is not exactly at fieldoffset include the next
964 field as well. See get_constraint_for_ptr_offset for more
966 if (vi
->offset
!= fieldoffset
968 bitmap_set_bit (result
, vi
->next
->id
);
972 bitmap_copy (set
, result
);
973 BITMAP_FREE (result
);
976 /* Union solution sets TO and FROM, and add INC to each member of FROM in the
980 set_union_with_increment (bitmap to
, bitmap from
, HOST_WIDE_INT inc
)
983 return bitmap_ior_into (to
, from
);
989 tmp
= BITMAP_ALLOC (&iteration_obstack
);
990 bitmap_copy (tmp
, from
);
991 solution_set_add (tmp
, inc
);
992 res
= bitmap_ior_into (to
, tmp
);
998 /* Insert constraint C into the list of complex constraints for graph
1002 insert_into_complex (constraint_graph_t graph
,
1003 unsigned int var
, constraint_t c
)
1005 vec
<constraint_t
> complex = graph
->complex[var
];
1006 unsigned int place
= complex.lower_bound (c
, constraint_less
);
1008 /* Only insert constraints that do not already exist. */
1009 if (place
>= complex.length ()
1010 || !constraint_equal (*c
, *complex[place
]))
1011 graph
->complex[var
].safe_insert (place
, c
);
1015 /* Condense two variable nodes into a single variable node, by moving
1016 all associated info from SRC to TO. */
1019 merge_node_constraints (constraint_graph_t graph
, unsigned int to
,
1025 gcc_assert (find (from
) == to
);
1027 /* Move all complex constraints from src node into to node */
1028 FOR_EACH_VEC_ELT (graph
->complex[from
], i
, c
)
1030 /* In complex constraints for node src, we may have either
1031 a = *src, and *src = a, or an offseted constraint which are
1032 always added to the rhs node's constraints. */
1034 if (c
->rhs
.type
== DEREF
)
1036 else if (c
->lhs
.type
== DEREF
)
1041 constraint_set_union (&graph
->complex[to
], &graph
->complex[from
]);
1042 graph
->complex[from
].release ();
1046 /* Remove edges involving NODE from GRAPH. */
1049 clear_edges_for_node (constraint_graph_t graph
, unsigned int node
)
1051 if (graph
->succs
[node
])
1052 BITMAP_FREE (graph
->succs
[node
]);
1055 /* Merge GRAPH nodes FROM and TO into node TO. */
1058 merge_graph_nodes (constraint_graph_t graph
, unsigned int to
,
1061 if (graph
->indirect_cycles
[from
] != -1)
1063 /* If we have indirect cycles with the from node, and we have
1064 none on the to node, the to node has indirect cycles from the
1065 from node now that they are unified.
1066 If indirect cycles exist on both, unify the nodes that they
1067 are in a cycle with, since we know they are in a cycle with
1069 if (graph
->indirect_cycles
[to
] == -1)
1070 graph
->indirect_cycles
[to
] = graph
->indirect_cycles
[from
];
1073 /* Merge all the successor edges. */
1074 if (graph
->succs
[from
])
1076 if (!graph
->succs
[to
])
1077 graph
->succs
[to
] = BITMAP_ALLOC (&pta_obstack
);
1078 bitmap_ior_into (graph
->succs
[to
],
1079 graph
->succs
[from
]);
1082 clear_edges_for_node (graph
, from
);
1086 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
1087 it doesn't exist in the graph already. */
1090 add_implicit_graph_edge (constraint_graph_t graph
, unsigned int to
,
1096 if (!graph
->implicit_preds
[to
])
1097 graph
->implicit_preds
[to
] = BITMAP_ALLOC (&predbitmap_obstack
);
1099 if (bitmap_set_bit (graph
->implicit_preds
[to
], from
))
1100 stats
.num_implicit_edges
++;
1103 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
1104 it doesn't exist in the graph already.
1105 Return false if the edge already existed, true otherwise. */
1108 add_pred_graph_edge (constraint_graph_t graph
, unsigned int to
,
1111 if (!graph
->preds
[to
])
1112 graph
->preds
[to
] = BITMAP_ALLOC (&predbitmap_obstack
);
1113 bitmap_set_bit (graph
->preds
[to
], from
);
1116 /* Add a graph edge to GRAPH, going from FROM to TO if
1117 it doesn't exist in the graph already.
1118 Return false if the edge already existed, true otherwise. */
1121 add_graph_edge (constraint_graph_t graph
, unsigned int to
,
1132 if (!graph
->succs
[from
])
1133 graph
->succs
[from
] = BITMAP_ALLOC (&pta_obstack
);
1134 if (bitmap_set_bit (graph
->succs
[from
], to
))
1137 if (to
< FIRST_REF_NODE
&& from
< FIRST_REF_NODE
)
1145 /* Return true if {DEST.SRC} is an existing graph edge in GRAPH. */
1148 valid_graph_edge (constraint_graph_t graph
, unsigned int src
,
1151 return (graph
->succs
[dest
]
1152 && bitmap_bit_p (graph
->succs
[dest
], src
));
1155 /* Initialize the constraint graph structure to contain SIZE nodes. */
1158 init_graph (unsigned int size
)
1162 graph
= XCNEW (struct constraint_graph
);
1164 graph
->succs
= XCNEWVEC (bitmap
, graph
->size
);
1165 graph
->indirect_cycles
= XNEWVEC (int, graph
->size
);
1166 graph
->rep
= XNEWVEC (unsigned int, graph
->size
);
1167 /* ??? Macros do not support template types with multiple arguments,
1168 so we use a typedef to work around it. */
1169 typedef vec
<constraint_t
> vec_constraint_t_heap
;
1170 graph
->complex = XCNEWVEC (vec_constraint_t_heap
, size
);
1171 graph
->pe
= XCNEWVEC (unsigned int, graph
->size
);
1172 graph
->pe_rep
= XNEWVEC (int, graph
->size
);
1174 for (j
= 0; j
< graph
->size
; j
++)
1177 graph
->pe_rep
[j
] = -1;
1178 graph
->indirect_cycles
[j
] = -1;
1182 /* Build the constraint graph, adding only predecessor edges right now. */
1185 build_pred_graph (void)
1191 graph
->implicit_preds
= XCNEWVEC (bitmap
, graph
->size
);
1192 graph
->preds
= XCNEWVEC (bitmap
, graph
->size
);
1193 graph
->pointer_label
= XCNEWVEC (unsigned int, graph
->size
);
1194 graph
->loc_label
= XCNEWVEC (unsigned int, graph
->size
);
1195 graph
->pointed_by
= XCNEWVEC (bitmap
, graph
->size
);
1196 graph
->points_to
= XCNEWVEC (bitmap
, graph
->size
);
1197 graph
->eq_rep
= XNEWVEC (int, graph
->size
);
1198 graph
->direct_nodes
= sbitmap_alloc (graph
->size
);
1199 graph
->address_taken
= BITMAP_ALLOC (&predbitmap_obstack
);
1200 bitmap_clear (graph
->direct_nodes
);
1202 for (j
= 0; j
< FIRST_REF_NODE
; j
++)
1204 if (!get_varinfo (j
)->is_special_var
)
1205 bitmap_set_bit (graph
->direct_nodes
, j
);
1208 for (j
= 0; j
< graph
->size
; j
++)
1209 graph
->eq_rep
[j
] = -1;
1211 for (j
= 0; j
< varmap
.length (); j
++)
1212 graph
->indirect_cycles
[j
] = -1;
1214 FOR_EACH_VEC_ELT (constraints
, i
, c
)
1216 struct constraint_expr lhs
= c
->lhs
;
1217 struct constraint_expr rhs
= c
->rhs
;
1218 unsigned int lhsvar
= lhs
.var
;
1219 unsigned int rhsvar
= rhs
.var
;
1221 if (lhs
.type
== DEREF
)
1224 if (rhs
.offset
== 0 && lhs
.offset
== 0 && rhs
.type
== SCALAR
)
1225 add_pred_graph_edge (graph
, FIRST_REF_NODE
+ lhsvar
, rhsvar
);
1227 else if (rhs
.type
== DEREF
)
1230 if (rhs
.offset
== 0 && lhs
.offset
== 0 && lhs
.type
== SCALAR
)
1231 add_pred_graph_edge (graph
, lhsvar
, FIRST_REF_NODE
+ rhsvar
);
1233 bitmap_clear_bit (graph
->direct_nodes
, lhsvar
);
1235 else if (rhs
.type
== ADDRESSOF
)
1240 if (graph
->points_to
[lhsvar
] == NULL
)
1241 graph
->points_to
[lhsvar
] = BITMAP_ALLOC (&predbitmap_obstack
);
1242 bitmap_set_bit (graph
->points_to
[lhsvar
], rhsvar
);
1244 if (graph
->pointed_by
[rhsvar
] == NULL
)
1245 graph
->pointed_by
[rhsvar
] = BITMAP_ALLOC (&predbitmap_obstack
);
1246 bitmap_set_bit (graph
->pointed_by
[rhsvar
], lhsvar
);
1248 /* Implicitly, *x = y */
1249 add_implicit_graph_edge (graph
, FIRST_REF_NODE
+ lhsvar
, rhsvar
);
1251 /* All related variables are no longer direct nodes. */
1252 bitmap_clear_bit (graph
->direct_nodes
, rhsvar
);
1253 v
= get_varinfo (rhsvar
);
1254 if (!v
->is_full_var
)
1256 v
= lookup_vi_for_tree (v
->decl
);
1259 bitmap_clear_bit (graph
->direct_nodes
, v
->id
);
1264 bitmap_set_bit (graph
->address_taken
, rhsvar
);
1266 else if (lhsvar
> anything_id
1267 && lhsvar
!= rhsvar
&& lhs
.offset
== 0 && rhs
.offset
== 0)
1270 add_pred_graph_edge (graph
, lhsvar
, rhsvar
);
1271 /* Implicitly, *x = *y */
1272 add_implicit_graph_edge (graph
, FIRST_REF_NODE
+ lhsvar
,
1273 FIRST_REF_NODE
+ rhsvar
);
1275 else if (lhs
.offset
!= 0 || rhs
.offset
!= 0)
1277 if (rhs
.offset
!= 0)
1278 bitmap_clear_bit (graph
->direct_nodes
, lhs
.var
);
1279 else if (lhs
.offset
!= 0)
1280 bitmap_clear_bit (graph
->direct_nodes
, rhs
.var
);
1285 /* Build the constraint graph, adding successor edges. */
1288 build_succ_graph (void)
1293 FOR_EACH_VEC_ELT (constraints
, i
, c
)
1295 struct constraint_expr lhs
;
1296 struct constraint_expr rhs
;
1297 unsigned int lhsvar
;
1298 unsigned int rhsvar
;
1305 lhsvar
= find (lhs
.var
);
1306 rhsvar
= find (rhs
.var
);
1308 if (lhs
.type
== DEREF
)
1310 if (rhs
.offset
== 0 && lhs
.offset
== 0 && rhs
.type
== SCALAR
)
1311 add_graph_edge (graph
, FIRST_REF_NODE
+ lhsvar
, rhsvar
);
1313 else if (rhs
.type
== DEREF
)
1315 if (rhs
.offset
== 0 && lhs
.offset
== 0 && lhs
.type
== SCALAR
)
1316 add_graph_edge (graph
, lhsvar
, FIRST_REF_NODE
+ rhsvar
);
1318 else if (rhs
.type
== ADDRESSOF
)
1321 gcc_assert (find (rhs
.var
) == rhs
.var
);
1322 bitmap_set_bit (get_varinfo (lhsvar
)->solution
, rhsvar
);
1324 else if (lhsvar
> anything_id
1325 && lhsvar
!= rhsvar
&& lhs
.offset
== 0 && rhs
.offset
== 0)
1327 add_graph_edge (graph
, lhsvar
, rhsvar
);
1331 /* Add edges from STOREDANYTHING to all non-direct nodes that can
1332 receive pointers. */
1333 t
= find (storedanything_id
);
1334 for (i
= integer_id
+ 1; i
< FIRST_REF_NODE
; ++i
)
1336 if (!bitmap_bit_p (graph
->direct_nodes
, i
)
1337 && get_varinfo (i
)->may_have_pointers
)
1338 add_graph_edge (graph
, find (i
), t
);
1341 /* Everything stored to ANYTHING also potentially escapes. */
1342 add_graph_edge (graph
, find (escaped_id
), t
);
1346 /* Changed variables on the last iteration. */
1347 static bitmap changed
;
1349 /* Strongly Connected Component visitation info. */
1356 unsigned int *node_mapping
;
1358 vec
<unsigned> scc_stack
;
1362 /* Recursive routine to find strongly connected components in GRAPH.
1363 SI is the SCC info to store the information in, and N is the id of current
1364 graph node we are processing.
1366 This is Tarjan's strongly connected component finding algorithm, as
1367 modified by Nuutila to keep only non-root nodes on the stack.
1368 The algorithm can be found in "On finding the strongly connected
1369 connected components in a directed graph" by Esko Nuutila and Eljas
1370 Soisalon-Soininen, in Information Processing Letters volume 49,
1371 number 1, pages 9-14. */
1374 scc_visit (constraint_graph_t graph
, struct scc_info
*si
, unsigned int n
)
1378 unsigned int my_dfs
;
1380 bitmap_set_bit (si
->visited
, n
);
1381 si
->dfs
[n
] = si
->current_index
++;
1382 my_dfs
= si
->dfs
[n
];
1384 /* Visit all the successors. */
1385 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->succs
[n
], 0, i
, bi
)
1389 if (i
> LAST_REF_NODE
)
1393 if (bitmap_bit_p (si
->deleted
, w
))
1396 if (!bitmap_bit_p (si
->visited
, w
))
1397 scc_visit (graph
, si
, w
);
1399 unsigned int t
= find (w
);
1400 unsigned int nnode
= find (n
);
1401 gcc_assert (nnode
== n
);
1403 if (si
->dfs
[t
] < si
->dfs
[nnode
])
1404 si
->dfs
[n
] = si
->dfs
[t
];
1408 /* See if any components have been identified. */
1409 if (si
->dfs
[n
] == my_dfs
)
1411 if (si
->scc_stack
.length () > 0
1412 && si
->dfs
[si
->scc_stack
.last ()] >= my_dfs
)
1414 bitmap scc
= BITMAP_ALLOC (NULL
);
1415 unsigned int lowest_node
;
1418 bitmap_set_bit (scc
, n
);
1420 while (si
->scc_stack
.length () != 0
1421 && si
->dfs
[si
->scc_stack
.last ()] >= my_dfs
)
1423 unsigned int w
= si
->scc_stack
.pop ();
1425 bitmap_set_bit (scc
, w
);
1428 lowest_node
= bitmap_first_set_bit (scc
);
1429 gcc_assert (lowest_node
< FIRST_REF_NODE
);
1431 /* Collapse the SCC nodes into a single node, and mark the
1433 EXECUTE_IF_SET_IN_BITMAP (scc
, 0, i
, bi
)
1435 if (i
< FIRST_REF_NODE
)
1437 if (unite (lowest_node
, i
))
1438 unify_nodes (graph
, lowest_node
, i
, false);
1442 unite (lowest_node
, i
);
1443 graph
->indirect_cycles
[i
- FIRST_REF_NODE
] = lowest_node
;
1447 bitmap_set_bit (si
->deleted
, n
);
1450 si
->scc_stack
.safe_push (n
);
1453 /* Unify node FROM into node TO, updating the changed count if
1454 necessary when UPDATE_CHANGED is true. */
1457 unify_nodes (constraint_graph_t graph
, unsigned int to
, unsigned int from
,
1458 bool update_changed
)
1461 gcc_assert (to
!= from
&& find (to
) == to
);
1462 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1463 fprintf (dump_file
, "Unifying %s to %s\n",
1464 get_varinfo (from
)->name
,
1465 get_varinfo (to
)->name
);
1468 stats
.unified_vars_dynamic
++;
1470 stats
.unified_vars_static
++;
1472 merge_graph_nodes (graph
, to
, from
);
1473 merge_node_constraints (graph
, to
, from
);
1475 /* Mark TO as changed if FROM was changed. If TO was already marked
1476 as changed, decrease the changed count. */
1479 && bitmap_bit_p (changed
, from
))
1481 bitmap_clear_bit (changed
, from
);
1482 bitmap_set_bit (changed
, to
);
1484 if (get_varinfo (from
)->solution
)
1486 /* If the solution changes because of the merging, we need to mark
1487 the variable as changed. */
1488 if (bitmap_ior_into (get_varinfo (to
)->solution
,
1489 get_varinfo (from
)->solution
))
1492 bitmap_set_bit (changed
, to
);
1495 BITMAP_FREE (get_varinfo (from
)->solution
);
1496 if (get_varinfo (from
)->oldsolution
)
1497 BITMAP_FREE (get_varinfo (from
)->oldsolution
);
1499 if (stats
.iterations
> 0
1500 && get_varinfo (to
)->oldsolution
)
1501 BITMAP_FREE (get_varinfo (to
)->oldsolution
);
1503 if (valid_graph_edge (graph
, to
, to
))
1505 if (graph
->succs
[to
])
1506 bitmap_clear_bit (graph
->succs
[to
], to
);
1510 /* Information needed to compute the topological ordering of a graph. */
1514 /* sbitmap of visited nodes. */
1516 /* Array that stores the topological order of the graph, *in
1518 vec
<unsigned> topo_order
;
1522 /* Initialize and return a topological info structure. */
1524 static struct topo_info
*
1525 init_topo_info (void)
1527 size_t size
= graph
->size
;
1528 struct topo_info
*ti
= XNEW (struct topo_info
);
1529 ti
->visited
= sbitmap_alloc (size
);
1530 bitmap_clear (ti
->visited
);
1531 ti
->topo_order
.create (1);
1536 /* Free the topological sort info pointed to by TI. */
1539 free_topo_info (struct topo_info
*ti
)
1541 sbitmap_free (ti
->visited
);
1542 ti
->topo_order
.release ();
1546 /* Visit the graph in topological order, and store the order in the
1547 topo_info structure. */
1550 topo_visit (constraint_graph_t graph
, struct topo_info
*ti
,
1556 bitmap_set_bit (ti
->visited
, n
);
1558 if (graph
->succs
[n
])
1559 EXECUTE_IF_SET_IN_BITMAP (graph
->succs
[n
], 0, j
, bi
)
1561 if (!bitmap_bit_p (ti
->visited
, j
))
1562 topo_visit (graph
, ti
, j
);
1565 ti
->topo_order
.safe_push (n
);
1568 /* Process a constraint C that represents x = *(y + off), using DELTA as the
1569 starting solution for y. */
1572 do_sd_constraint (constraint_graph_t graph
, constraint_t c
,
1575 unsigned int lhs
= c
->lhs
.var
;
1577 bitmap sol
= get_varinfo (lhs
)->solution
;
1580 HOST_WIDE_INT roffset
= c
->rhs
.offset
;
1582 /* Our IL does not allow this. */
1583 gcc_assert (c
->lhs
.offset
== 0);
1585 /* If the solution of Y contains anything it is good enough to transfer
1587 if (bitmap_bit_p (delta
, anything_id
))
1589 flag
|= bitmap_set_bit (sol
, anything_id
);
1593 /* If we do not know at with offset the rhs is dereferenced compute
1594 the reachability set of DELTA, conservatively assuming it is
1595 dereferenced at all valid offsets. */
1596 if (roffset
== UNKNOWN_OFFSET
)
1598 solution_set_expand (delta
, delta
);
1599 /* No further offset processing is necessary. */
1603 /* For each variable j in delta (Sol(y)), add
1604 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1605 EXECUTE_IF_SET_IN_BITMAP (delta
, 0, j
, bi
)
1607 varinfo_t v
= get_varinfo (j
);
1608 HOST_WIDE_INT fieldoffset
= v
->offset
+ roffset
;
1612 fieldoffset
= v
->offset
;
1613 else if (roffset
!= 0)
1614 v
= first_vi_for_offset (v
, fieldoffset
);
1615 /* If the access is outside of the variable we can ignore it. */
1623 /* Adding edges from the special vars is pointless.
1624 They don't have sets that can change. */
1625 if (get_varinfo (t
)->is_special_var
)
1626 flag
|= bitmap_ior_into (sol
, get_varinfo (t
)->solution
);
1627 /* Merging the solution from ESCAPED needlessly increases
1628 the set. Use ESCAPED as representative instead. */
1629 else if (v
->id
== escaped_id
)
1630 flag
|= bitmap_set_bit (sol
, escaped_id
);
1631 else if (v
->may_have_pointers
1632 && add_graph_edge (graph
, lhs
, t
))
1633 flag
|= bitmap_ior_into (sol
, get_varinfo (t
)->solution
);
1635 /* If the variable is not exactly at the requested offset
1636 we have to include the next one. */
1637 if (v
->offset
== (unsigned HOST_WIDE_INT
)fieldoffset
1642 fieldoffset
= v
->offset
;
1648 /* If the LHS solution changed, mark the var as changed. */
1651 get_varinfo (lhs
)->solution
= sol
;
1652 bitmap_set_bit (changed
, lhs
);
1656 /* Process a constraint C that represents *(x + off) = y using DELTA
1657 as the starting solution for x. */
1660 do_ds_constraint (constraint_t c
, bitmap delta
)
1662 unsigned int rhs
= c
->rhs
.var
;
1663 bitmap sol
= get_varinfo (rhs
)->solution
;
1666 HOST_WIDE_INT loff
= c
->lhs
.offset
;
1667 bool escaped_p
= false;
1669 /* Our IL does not allow this. */
1670 gcc_assert (c
->rhs
.offset
== 0);
1672 /* If the solution of y contains ANYTHING simply use the ANYTHING
1673 solution. This avoids needlessly increasing the points-to sets. */
1674 if (bitmap_bit_p (sol
, anything_id
))
1675 sol
= get_varinfo (find (anything_id
))->solution
;
1677 /* If the solution for x contains ANYTHING we have to merge the
1678 solution of y into all pointer variables which we do via
1680 if (bitmap_bit_p (delta
, anything_id
))
1682 unsigned t
= find (storedanything_id
);
1683 if (add_graph_edge (graph
, t
, rhs
))
1685 if (bitmap_ior_into (get_varinfo (t
)->solution
, sol
))
1686 bitmap_set_bit (changed
, t
);
1691 /* If we do not know at with offset the rhs is dereferenced compute
1692 the reachability set of DELTA, conservatively assuming it is
1693 dereferenced at all valid offsets. */
1694 if (loff
== UNKNOWN_OFFSET
)
1696 solution_set_expand (delta
, delta
);
1700 /* For each member j of delta (Sol(x)), add an edge from y to j and
1701 union Sol(y) into Sol(j) */
1702 EXECUTE_IF_SET_IN_BITMAP (delta
, 0, j
, bi
)
1704 varinfo_t v
= get_varinfo (j
);
1706 HOST_WIDE_INT fieldoffset
= v
->offset
+ loff
;
1709 fieldoffset
= v
->offset
;
1711 v
= first_vi_for_offset (v
, fieldoffset
);
1712 /* If the access is outside of the variable we can ignore it. */
1718 if (v
->may_have_pointers
)
1720 /* If v is a global variable then this is an escape point. */
1721 if (v
->is_global_var
1724 t
= find (escaped_id
);
1725 if (add_graph_edge (graph
, t
, rhs
)
1726 && bitmap_ior_into (get_varinfo (t
)->solution
, sol
))
1727 bitmap_set_bit (changed
, t
);
1728 /* Enough to let rhs escape once. */
1732 if (v
->is_special_var
)
1736 if (add_graph_edge (graph
, t
, rhs
)
1737 && bitmap_ior_into (get_varinfo (t
)->solution
, sol
))
1738 bitmap_set_bit (changed
, t
);
1741 /* If the variable is not exactly at the requested offset
1742 we have to include the next one. */
1743 if (v
->offset
== (unsigned HOST_WIDE_INT
)fieldoffset
1748 fieldoffset
= v
->offset
;
1754 /* Handle a non-simple (simple meaning requires no iteration),
1755 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1758 do_complex_constraint (constraint_graph_t graph
, constraint_t c
, bitmap delta
)
1760 if (c
->lhs
.type
== DEREF
)
1762 if (c
->rhs
.type
== ADDRESSOF
)
1769 do_ds_constraint (c
, delta
);
1772 else if (c
->rhs
.type
== DEREF
)
1775 if (!(get_varinfo (c
->lhs
.var
)->is_special_var
))
1776 do_sd_constraint (graph
, c
, delta
);
1784 gcc_assert (c
->rhs
.type
== SCALAR
&& c
->lhs
.type
== SCALAR
);
1785 solution
= get_varinfo (c
->rhs
.var
)->solution
;
1786 tmp
= get_varinfo (c
->lhs
.var
)->solution
;
1788 flag
= set_union_with_increment (tmp
, solution
, c
->rhs
.offset
);
1792 get_varinfo (c
->lhs
.var
)->solution
= tmp
;
1793 bitmap_set_bit (changed
, c
->lhs
.var
);
1798 /* Initialize and return a new SCC info structure. */
1800 static struct scc_info
*
1801 init_scc_info (size_t size
)
1803 struct scc_info
*si
= XNEW (struct scc_info
);
1806 si
->current_index
= 0;
1807 si
->visited
= sbitmap_alloc (size
);
1808 bitmap_clear (si
->visited
);
1809 si
->deleted
= sbitmap_alloc (size
);
1810 bitmap_clear (si
->deleted
);
1811 si
->node_mapping
= XNEWVEC (unsigned int, size
);
1812 si
->dfs
= XCNEWVEC (unsigned int, size
);
1814 for (i
= 0; i
< size
; i
++)
1815 si
->node_mapping
[i
] = i
;
1817 si
->scc_stack
.create (1);
1821 /* Free an SCC info structure pointed to by SI */
1824 free_scc_info (struct scc_info
*si
)
1826 sbitmap_free (si
->visited
);
1827 sbitmap_free (si
->deleted
);
1828 free (si
->node_mapping
);
1830 si
->scc_stack
.release ();
1835 /* Find indirect cycles in GRAPH that occur, using strongly connected
1836 components, and note them in the indirect cycles map.
1838 This technique comes from Ben Hardekopf and Calvin Lin,
1839 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1840 Lines of Code", submitted to PLDI 2007. */
1843 find_indirect_cycles (constraint_graph_t graph
)
1846 unsigned int size
= graph
->size
;
1847 struct scc_info
*si
= init_scc_info (size
);
1849 for (i
= 0; i
< MIN (LAST_REF_NODE
, size
); i
++ )
1850 if (!bitmap_bit_p (si
->visited
, i
) && find (i
) == i
)
1851 scc_visit (graph
, si
, i
);
1856 /* Compute a topological ordering for GRAPH, and store the result in the
1857 topo_info structure TI. */
1860 compute_topo_order (constraint_graph_t graph
,
1861 struct topo_info
*ti
)
1864 unsigned int size
= graph
->size
;
1866 for (i
= 0; i
!= size
; ++i
)
1867 if (!bitmap_bit_p (ti
->visited
, i
) && find (i
) == i
)
1868 topo_visit (graph
, ti
, i
);
1871 /* Structure used to for hash value numbering of pointer equivalence
1874 typedef struct equiv_class_label
1877 unsigned int equivalence_class
;
1879 } *equiv_class_label_t
;
1880 typedef const struct equiv_class_label
*const_equiv_class_label_t
;
1882 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1884 static htab_t pointer_equiv_class_table
;
1886 /* A hashtable for mapping a bitmap of labels->location equivalence
1888 static htab_t location_equiv_class_table
;
1890 /* Hash function for a equiv_class_label_t */
1893 equiv_class_label_hash (const void *p
)
1895 const_equiv_class_label_t
const ecl
= (const_equiv_class_label_t
) p
;
1896 return ecl
->hashcode
;
1899 /* Equality function for two equiv_class_label_t's. */
1902 equiv_class_label_eq (const void *p1
, const void *p2
)
1904 const_equiv_class_label_t
const eql1
= (const_equiv_class_label_t
) p1
;
1905 const_equiv_class_label_t
const eql2
= (const_equiv_class_label_t
) p2
;
1906 return (eql1
->hashcode
== eql2
->hashcode
1907 && bitmap_equal_p (eql1
->labels
, eql2
->labels
));
1910 /* Lookup a equivalence class in TABLE by the bitmap of LABELS it
1914 equiv_class_lookup (htab_t table
, bitmap labels
)
1917 struct equiv_class_label ecl
;
1919 ecl
.labels
= labels
;
1920 ecl
.hashcode
= bitmap_hash (labels
);
1922 slot
= htab_find_slot_with_hash (table
, &ecl
,
1923 ecl
.hashcode
, NO_INSERT
);
1927 return ((equiv_class_label_t
) *slot
)->equivalence_class
;
1931 /* Add an equivalence class named EQUIVALENCE_CLASS with labels LABELS
1935 equiv_class_add (htab_t table
, unsigned int equivalence_class
,
1939 equiv_class_label_t ecl
= XNEW (struct equiv_class_label
);
1941 ecl
->labels
= labels
;
1942 ecl
->equivalence_class
= equivalence_class
;
1943 ecl
->hashcode
= bitmap_hash (labels
);
1945 slot
= htab_find_slot_with_hash (table
, ecl
,
1946 ecl
->hashcode
, INSERT
);
1947 gcc_assert (!*slot
);
1948 *slot
= (void *) ecl
;
1951 /* Perform offline variable substitution.
1953 This is a worst case quadratic time way of identifying variables
1954 that must have equivalent points-to sets, including those caused by
1955 static cycles, and single entry subgraphs, in the constraint graph.
1957 The technique is described in "Exploiting Pointer and Location
1958 Equivalence to Optimize Pointer Analysis. In the 14th International
1959 Static Analysis Symposium (SAS), August 2007." It is known as the
1960 "HU" algorithm, and is equivalent to value numbering the collapsed
1961 constraint graph including evaluating unions.
1963 The general method of finding equivalence classes is as follows:
1964 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1965 Initialize all non-REF nodes to be direct nodes.
1966 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1968 For each constraint containing the dereference, we also do the same
1971 We then compute SCC's in the graph and unify nodes in the same SCC,
1974 For each non-collapsed node x:
1975 Visit all unvisited explicit incoming edges.
1976 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
1978 Lookup the equivalence class for pts(x).
1979 If we found one, equivalence_class(x) = found class.
1980 Otherwise, equivalence_class(x) = new class, and new_class is
1981 added to the lookup table.
1983 All direct nodes with the same equivalence class can be replaced
1984 with a single representative node.
1985 All unlabeled nodes (label == 0) are not pointers and all edges
1986 involving them can be eliminated.
1987 We perform these optimizations during rewrite_constraints
1989 In addition to pointer equivalence class finding, we also perform
1990 location equivalence class finding. This is the set of variables
1991 that always appear together in points-to sets. We use this to
1992 compress the size of the points-to sets. */
1994 /* Current maximum pointer equivalence class id. */
1995 static int pointer_equiv_class
;
1997 /* Current maximum location equivalence class id. */
1998 static int location_equiv_class
;
2000 /* Recursive routine to find strongly connected components in GRAPH,
2001 and label it's nodes with DFS numbers. */
2004 condense_visit (constraint_graph_t graph
, struct scc_info
*si
, unsigned int n
)
2008 unsigned int my_dfs
;
2010 gcc_assert (si
->node_mapping
[n
] == n
);
2011 bitmap_set_bit (si
->visited
, n
);
2012 si
->dfs
[n
] = si
->current_index
++;
2013 my_dfs
= si
->dfs
[n
];
2015 /* Visit all the successors. */
2016 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->preds
[n
], 0, i
, bi
)
2018 unsigned int w
= si
->node_mapping
[i
];
2020 if (bitmap_bit_p (si
->deleted
, w
))
2023 if (!bitmap_bit_p (si
->visited
, w
))
2024 condense_visit (graph
, si
, w
);
2026 unsigned int t
= si
->node_mapping
[w
];
2027 unsigned int nnode
= si
->node_mapping
[n
];
2028 gcc_assert (nnode
== n
);
2030 if (si
->dfs
[t
] < si
->dfs
[nnode
])
2031 si
->dfs
[n
] = si
->dfs
[t
];
2035 /* Visit all the implicit predecessors. */
2036 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->implicit_preds
[n
], 0, i
, bi
)
2038 unsigned int w
= si
->node_mapping
[i
];
2040 if (bitmap_bit_p (si
->deleted
, w
))
2043 if (!bitmap_bit_p (si
->visited
, w
))
2044 condense_visit (graph
, si
, w
);
2046 unsigned int t
= si
->node_mapping
[w
];
2047 unsigned int nnode
= si
->node_mapping
[n
];
2048 gcc_assert (nnode
== n
);
2050 if (si
->dfs
[t
] < si
->dfs
[nnode
])
2051 si
->dfs
[n
] = si
->dfs
[t
];
2055 /* See if any components have been identified. */
2056 if (si
->dfs
[n
] == my_dfs
)
2058 while (si
->scc_stack
.length () != 0
2059 && si
->dfs
[si
->scc_stack
.last ()] >= my_dfs
)
2061 unsigned int w
= si
->scc_stack
.pop ();
2062 si
->node_mapping
[w
] = n
;
2064 if (!bitmap_bit_p (graph
->direct_nodes
, w
))
2065 bitmap_clear_bit (graph
->direct_nodes
, n
);
2067 /* Unify our nodes. */
2068 if (graph
->preds
[w
])
2070 if (!graph
->preds
[n
])
2071 graph
->preds
[n
] = BITMAP_ALLOC (&predbitmap_obstack
);
2072 bitmap_ior_into (graph
->preds
[n
], graph
->preds
[w
]);
2074 if (graph
->implicit_preds
[w
])
2076 if (!graph
->implicit_preds
[n
])
2077 graph
->implicit_preds
[n
] = BITMAP_ALLOC (&predbitmap_obstack
);
2078 bitmap_ior_into (graph
->implicit_preds
[n
],
2079 graph
->implicit_preds
[w
]);
2081 if (graph
->points_to
[w
])
2083 if (!graph
->points_to
[n
])
2084 graph
->points_to
[n
] = BITMAP_ALLOC (&predbitmap_obstack
);
2085 bitmap_ior_into (graph
->points_to
[n
],
2086 graph
->points_to
[w
]);
2089 bitmap_set_bit (si
->deleted
, n
);
2092 si
->scc_stack
.safe_push (n
);
2095 /* Label pointer equivalences. */
2098 label_visit (constraint_graph_t graph
, struct scc_info
*si
, unsigned int n
)
2102 bitmap_set_bit (si
->visited
, n
);
2104 if (!graph
->points_to
[n
])
2105 graph
->points_to
[n
] = BITMAP_ALLOC (&predbitmap_obstack
);
2107 /* Label and union our incoming edges's points to sets. */
2108 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->preds
[n
], 0, i
, bi
)
2110 unsigned int w
= si
->node_mapping
[i
];
2111 if (!bitmap_bit_p (si
->visited
, w
))
2112 label_visit (graph
, si
, w
);
2114 /* Skip unused edges */
2115 if (w
== n
|| graph
->pointer_label
[w
] == 0)
2118 if (graph
->points_to
[w
])
2119 bitmap_ior_into(graph
->points_to
[n
], graph
->points_to
[w
]);
2121 /* Indirect nodes get fresh variables. */
2122 if (!bitmap_bit_p (graph
->direct_nodes
, n
))
2123 bitmap_set_bit (graph
->points_to
[n
], FIRST_REF_NODE
+ n
);
2125 if (!bitmap_empty_p (graph
->points_to
[n
]))
2127 unsigned int label
= equiv_class_lookup (pointer_equiv_class_table
,
2128 graph
->points_to
[n
]);
2131 label
= pointer_equiv_class
++;
2132 equiv_class_add (pointer_equiv_class_table
,
2133 label
, graph
->points_to
[n
]);
2135 graph
->pointer_label
[n
] = label
;
2139 /* Perform offline variable substitution, discovering equivalence
2140 classes, and eliminating non-pointer variables. */
2142 static struct scc_info
*
2143 perform_var_substitution (constraint_graph_t graph
)
2146 unsigned int size
= graph
->size
;
2147 struct scc_info
*si
= init_scc_info (size
);
2149 bitmap_obstack_initialize (&iteration_obstack
);
2150 pointer_equiv_class_table
= htab_create (511, equiv_class_label_hash
,
2151 equiv_class_label_eq
, free
);
2152 location_equiv_class_table
= htab_create (511, equiv_class_label_hash
,
2153 equiv_class_label_eq
, free
);
2154 pointer_equiv_class
= 1;
2155 location_equiv_class
= 1;
2157 /* Condense the nodes, which means to find SCC's, count incoming
2158 predecessors, and unite nodes in SCC's. */
2159 for (i
= 0; i
< FIRST_REF_NODE
; i
++)
2160 if (!bitmap_bit_p (si
->visited
, si
->node_mapping
[i
]))
2161 condense_visit (graph
, si
, si
->node_mapping
[i
]);
2163 bitmap_clear (si
->visited
);
2164 /* Actually the label the nodes for pointer equivalences */
2165 for (i
= 0; i
< FIRST_REF_NODE
; i
++)
2166 if (!bitmap_bit_p (si
->visited
, si
->node_mapping
[i
]))
2167 label_visit (graph
, si
, si
->node_mapping
[i
]);
2169 /* Calculate location equivalence labels. */
2170 for (i
= 0; i
< FIRST_REF_NODE
; i
++)
2177 if (!graph
->pointed_by
[i
])
2179 pointed_by
= BITMAP_ALLOC (&iteration_obstack
);
2181 /* Translate the pointed-by mapping for pointer equivalence
2183 EXECUTE_IF_SET_IN_BITMAP (graph
->pointed_by
[i
], 0, j
, bi
)
2185 bitmap_set_bit (pointed_by
,
2186 graph
->pointer_label
[si
->node_mapping
[j
]]);
2188 /* The original pointed_by is now dead. */
2189 BITMAP_FREE (graph
->pointed_by
[i
]);
2191 /* Look up the location equivalence label if one exists, or make
2193 label
= equiv_class_lookup (location_equiv_class_table
,
2197 label
= location_equiv_class
++;
2198 equiv_class_add (location_equiv_class_table
,
2203 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2204 fprintf (dump_file
, "Found location equivalence for node %s\n",
2205 get_varinfo (i
)->name
);
2206 BITMAP_FREE (pointed_by
);
2208 graph
->loc_label
[i
] = label
;
2212 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2213 for (i
= 0; i
< FIRST_REF_NODE
; i
++)
2215 bool direct_node
= bitmap_bit_p (graph
->direct_nodes
, i
);
2217 "Equivalence classes for %s node id %d:%s are pointer: %d"
2219 direct_node
? "Direct node" : "Indirect node", i
,
2220 get_varinfo (i
)->name
,
2221 graph
->pointer_label
[si
->node_mapping
[i
]],
2222 graph
->loc_label
[si
->node_mapping
[i
]]);
2225 /* Quickly eliminate our non-pointer variables. */
2227 for (i
= 0; i
< FIRST_REF_NODE
; i
++)
2229 unsigned int node
= si
->node_mapping
[i
];
2231 if (graph
->pointer_label
[node
] == 0)
2233 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2235 "%s is a non-pointer variable, eliminating edges.\n",
2236 get_varinfo (node
)->name
);
2237 stats
.nonpointer_vars
++;
2238 clear_edges_for_node (graph
, node
);
2245 /* Free information that was only necessary for variable
2249 free_var_substitution_info (struct scc_info
*si
)
2252 free (graph
->pointer_label
);
2253 free (graph
->loc_label
);
2254 free (graph
->pointed_by
);
2255 free (graph
->points_to
);
2256 free (graph
->eq_rep
);
2257 sbitmap_free (graph
->direct_nodes
);
2258 htab_delete (pointer_equiv_class_table
);
2259 htab_delete (location_equiv_class_table
);
2260 bitmap_obstack_release (&iteration_obstack
);
2263 /* Return an existing node that is equivalent to NODE, which has
2264 equivalence class LABEL, if one exists. Return NODE otherwise. */
2267 find_equivalent_node (constraint_graph_t graph
,
2268 unsigned int node
, unsigned int label
)
2270 /* If the address version of this variable is unused, we can
2271 substitute it for anything else with the same label.
2272 Otherwise, we know the pointers are equivalent, but not the
2273 locations, and we can unite them later. */
2275 if (!bitmap_bit_p (graph
->address_taken
, node
))
2277 gcc_assert (label
< graph
->size
);
2279 if (graph
->eq_rep
[label
] != -1)
2281 /* Unify the two variables since we know they are equivalent. */
2282 if (unite (graph
->eq_rep
[label
], node
))
2283 unify_nodes (graph
, graph
->eq_rep
[label
], node
, false);
2284 return graph
->eq_rep
[label
];
2288 graph
->eq_rep
[label
] = node
;
2289 graph
->pe_rep
[label
] = node
;
2294 gcc_assert (label
< graph
->size
);
2295 graph
->pe
[node
] = label
;
2296 if (graph
->pe_rep
[label
] == -1)
2297 graph
->pe_rep
[label
] = node
;
2303 /* Unite pointer equivalent but not location equivalent nodes in
2304 GRAPH. This may only be performed once variable substitution is
2308 unite_pointer_equivalences (constraint_graph_t graph
)
2312 /* Go through the pointer equivalences and unite them to their
2313 representative, if they aren't already. */
2314 for (i
= 0; i
< FIRST_REF_NODE
; i
++)
2316 unsigned int label
= graph
->pe
[i
];
2319 int label_rep
= graph
->pe_rep
[label
];
2321 if (label_rep
== -1)
2324 label_rep
= find (label_rep
);
2325 if (label_rep
>= 0 && unite (label_rep
, find (i
)))
2326 unify_nodes (graph
, label_rep
, i
, false);
2331 /* Move complex constraints to the GRAPH nodes they belong to. */
2334 move_complex_constraints (constraint_graph_t graph
)
2339 FOR_EACH_VEC_ELT (constraints
, i
, c
)
2343 struct constraint_expr lhs
= c
->lhs
;
2344 struct constraint_expr rhs
= c
->rhs
;
2346 if (lhs
.type
== DEREF
)
2348 insert_into_complex (graph
, lhs
.var
, c
);
2350 else if (rhs
.type
== DEREF
)
2352 if (!(get_varinfo (lhs
.var
)->is_special_var
))
2353 insert_into_complex (graph
, rhs
.var
, c
);
2355 else if (rhs
.type
!= ADDRESSOF
&& lhs
.var
> anything_id
2356 && (lhs
.offset
!= 0 || rhs
.offset
!= 0))
2358 insert_into_complex (graph
, rhs
.var
, c
);
2365 /* Optimize and rewrite complex constraints while performing
2366 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2367 result of perform_variable_substitution. */
2370 rewrite_constraints (constraint_graph_t graph
,
2371 struct scc_info
*si
)
2377 for (j
= 0; j
< graph
->size
; j
++)
2378 gcc_assert (find (j
) == j
);
2380 FOR_EACH_VEC_ELT (constraints
, i
, c
)
2382 struct constraint_expr lhs
= c
->lhs
;
2383 struct constraint_expr rhs
= c
->rhs
;
2384 unsigned int lhsvar
= find (lhs
.var
);
2385 unsigned int rhsvar
= find (rhs
.var
);
2386 unsigned int lhsnode
, rhsnode
;
2387 unsigned int lhslabel
, rhslabel
;
2389 lhsnode
= si
->node_mapping
[lhsvar
];
2390 rhsnode
= si
->node_mapping
[rhsvar
];
2391 lhslabel
= graph
->pointer_label
[lhsnode
];
2392 rhslabel
= graph
->pointer_label
[rhsnode
];
2394 /* See if it is really a non-pointer variable, and if so, ignore
2398 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2401 fprintf (dump_file
, "%s is a non-pointer variable,"
2402 "ignoring constraint:",
2403 get_varinfo (lhs
.var
)->name
);
2404 dump_constraint (dump_file
, c
);
2405 fprintf (dump_file
, "\n");
2407 constraints
[i
] = NULL
;
2413 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2416 fprintf (dump_file
, "%s is a non-pointer variable,"
2417 "ignoring constraint:",
2418 get_varinfo (rhs
.var
)->name
);
2419 dump_constraint (dump_file
, c
);
2420 fprintf (dump_file
, "\n");
2422 constraints
[i
] = NULL
;
2426 lhsvar
= find_equivalent_node (graph
, lhsvar
, lhslabel
);
2427 rhsvar
= find_equivalent_node (graph
, rhsvar
, rhslabel
);
2428 c
->lhs
.var
= lhsvar
;
2429 c
->rhs
.var
= rhsvar
;
2434 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2435 part of an SCC, false otherwise. */
2438 eliminate_indirect_cycles (unsigned int node
)
2440 if (graph
->indirect_cycles
[node
] != -1
2441 && !bitmap_empty_p (get_varinfo (node
)->solution
))
2444 vec
<unsigned> queue
= vNULL
;
2446 unsigned int to
= find (graph
->indirect_cycles
[node
]);
2449 /* We can't touch the solution set and call unify_nodes
2450 at the same time, because unify_nodes is going to do
2451 bitmap unions into it. */
2453 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node
)->solution
, 0, i
, bi
)
2455 if (find (i
) == i
&& i
!= to
)
2458 queue
.safe_push (i
);
2463 queue
.iterate (queuepos
, &i
);
2466 unify_nodes (graph
, to
, i
, true);
2474 /* Solve the constraint graph GRAPH using our worklist solver.
2475 This is based on the PW* family of solvers from the "Efficient Field
2476 Sensitive Pointer Analysis for C" paper.
2477 It works by iterating over all the graph nodes, processing the complex
2478 constraints and propagating the copy constraints, until everything stops
2479 changed. This corresponds to steps 6-8 in the solving list given above. */
2482 solve_graph (constraint_graph_t graph
)
2484 unsigned int size
= graph
->size
;
2488 changed
= BITMAP_ALLOC (NULL
);
2490 /* Mark all initial non-collapsed nodes as changed. */
2491 for (i
= 0; i
< size
; i
++)
2493 varinfo_t ivi
= get_varinfo (i
);
2494 if (find (i
) == i
&& !bitmap_empty_p (ivi
->solution
)
2495 && ((graph
->succs
[i
] && !bitmap_empty_p (graph
->succs
[i
]))
2496 || graph
->complex[i
].length () > 0))
2497 bitmap_set_bit (changed
, i
);
2500 /* Allocate a bitmap to be used to store the changed bits. */
2501 pts
= BITMAP_ALLOC (&pta_obstack
);
2503 while (!bitmap_empty_p (changed
))
2506 struct topo_info
*ti
= init_topo_info ();
2509 bitmap_obstack_initialize (&iteration_obstack
);
2511 compute_topo_order (graph
, ti
);
2513 while (ti
->topo_order
.length () != 0)
2516 i
= ti
->topo_order
.pop ();
2518 /* If this variable is not a representative, skip it. */
2522 /* In certain indirect cycle cases, we may merge this
2523 variable to another. */
2524 if (eliminate_indirect_cycles (i
) && find (i
) != i
)
2527 /* If the node has changed, we need to process the
2528 complex constraints and outgoing edges again. */
2529 if (bitmap_clear_bit (changed
, i
))
2534 vec
<constraint_t
> complex = graph
->complex[i
];
2535 varinfo_t vi
= get_varinfo (i
);
2536 bool solution_empty
;
2538 /* Compute the changed set of solution bits. */
2539 if (vi
->oldsolution
)
2540 bitmap_and_compl (pts
, vi
->solution
, vi
->oldsolution
);
2542 bitmap_copy (pts
, vi
->solution
);
2544 if (bitmap_empty_p (pts
))
2547 if (vi
->oldsolution
)
2548 bitmap_ior_into (vi
->oldsolution
, pts
);
2551 vi
->oldsolution
= BITMAP_ALLOC (&oldpta_obstack
);
2552 bitmap_copy (vi
->oldsolution
, pts
);
2555 solution
= vi
->solution
;
2556 solution_empty
= bitmap_empty_p (solution
);
2558 /* Process the complex constraints */
2559 FOR_EACH_VEC_ELT (complex, j
, c
)
2561 /* XXX: This is going to unsort the constraints in
2562 some cases, which will occasionally add duplicate
2563 constraints during unification. This does not
2564 affect correctness. */
2565 c
->lhs
.var
= find (c
->lhs
.var
);
2566 c
->rhs
.var
= find (c
->rhs
.var
);
2568 /* The only complex constraint that can change our
2569 solution to non-empty, given an empty solution,
2570 is a constraint where the lhs side is receiving
2571 some set from elsewhere. */
2572 if (!solution_empty
|| c
->lhs
.type
!= DEREF
)
2573 do_complex_constraint (graph
, c
, pts
);
2576 solution_empty
= bitmap_empty_p (solution
);
2578 if (!solution_empty
)
2581 unsigned eff_escaped_id
= find (escaped_id
);
2583 /* Propagate solution to all successors. */
2584 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->succs
[i
],
2590 unsigned int to
= find (j
);
2591 tmp
= get_varinfo (to
)->solution
;
2594 /* Don't try to propagate to ourselves. */
2598 /* If we propagate from ESCAPED use ESCAPED as
2600 if (i
== eff_escaped_id
)
2601 flag
= bitmap_set_bit (tmp
, escaped_id
);
2603 flag
= set_union_with_increment (tmp
, pts
, 0);
2607 get_varinfo (to
)->solution
= tmp
;
2608 bitmap_set_bit (changed
, to
);
2614 free_topo_info (ti
);
2615 bitmap_obstack_release (&iteration_obstack
);
2619 BITMAP_FREE (changed
);
2620 bitmap_obstack_release (&oldpta_obstack
);
2623 /* Map from trees to variable infos. */
2624 static struct pointer_map_t
*vi_for_tree
;
2627 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2630 insert_vi_for_tree (tree t
, varinfo_t vi
)
2632 void **slot
= pointer_map_insert (vi_for_tree
, t
);
2634 gcc_assert (*slot
== NULL
);
2638 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2639 exist in the map, return NULL, otherwise, return the varinfo we found. */
2642 lookup_vi_for_tree (tree t
)
2644 void **slot
= pointer_map_contains (vi_for_tree
, t
);
2648 return (varinfo_t
) *slot
;
2651 /* Return a printable name for DECL */
2654 alias_get_name (tree decl
)
2656 const char *res
= NULL
;
2658 int num_printed
= 0;
2663 if (TREE_CODE (decl
) == SSA_NAME
)
2665 res
= get_name (decl
);
2667 num_printed
= asprintf (&temp
, "%s_%u", res
, SSA_NAME_VERSION (decl
));
2669 num_printed
= asprintf (&temp
, "_%u", SSA_NAME_VERSION (decl
));
2670 if (num_printed
> 0)
2672 res
= ggc_strdup (temp
);
2676 else if (DECL_P (decl
))
2678 if (DECL_ASSEMBLER_NAME_SET_P (decl
))
2679 res
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
));
2682 res
= get_name (decl
);
2685 num_printed
= asprintf (&temp
, "D.%u", DECL_UID (decl
));
2686 if (num_printed
> 0)
2688 res
= ggc_strdup (temp
);
2700 /* Find the variable id for tree T in the map.
2701 If T doesn't exist in the map, create an entry for it and return it. */
2704 get_vi_for_tree (tree t
)
2706 void **slot
= pointer_map_contains (vi_for_tree
, t
);
2708 return get_varinfo (create_variable_info_for (t
, alias_get_name (t
)));
2710 return (varinfo_t
) *slot
;
2713 /* Get a scalar constraint expression for a new temporary variable. */
2715 static struct constraint_expr
2716 new_scalar_tmp_constraint_exp (const char *name
)
2718 struct constraint_expr tmp
;
2721 vi
= new_var_info (NULL_TREE
, name
);
2725 vi
->is_full_var
= 1;
2734 /* Get a constraint expression vector from an SSA_VAR_P node.
2735 If address_p is true, the result will be taken its address of. */
2738 get_constraint_for_ssa_var (tree t
, vec
<ce_s
> *results
, bool address_p
)
2740 struct constraint_expr cexpr
;
2743 /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
2744 gcc_assert (TREE_CODE (t
) == SSA_NAME
|| DECL_P (t
));
2746 /* For parameters, get at the points-to set for the actual parm
2748 if (TREE_CODE (t
) == SSA_NAME
2749 && SSA_NAME_IS_DEFAULT_DEF (t
)
2750 && (TREE_CODE (SSA_NAME_VAR (t
)) == PARM_DECL
2751 || TREE_CODE (SSA_NAME_VAR (t
)) == RESULT_DECL
))
2753 get_constraint_for_ssa_var (SSA_NAME_VAR (t
), results
, address_p
);
2757 /* For global variables resort to the alias target. */
2758 if (TREE_CODE (t
) == VAR_DECL
2759 && (TREE_STATIC (t
) || DECL_EXTERNAL (t
)))
2761 struct varpool_node
*node
= varpool_get_node (t
);
2762 if (node
&& node
->alias
)
2764 node
= varpool_variable_node (node
, NULL
);
2765 t
= node
->symbol
.decl
;
2769 vi
= get_vi_for_tree (t
);
2771 cexpr
.type
= SCALAR
;
2773 /* If we determine the result is "anything", and we know this is readonly,
2774 say it points to readonly memory instead. */
2775 if (cexpr
.var
== anything_id
&& TREE_READONLY (t
))
2778 cexpr
.type
= ADDRESSOF
;
2779 cexpr
.var
= readonly_id
;
2782 /* If we are not taking the address of the constraint expr, add all
2783 sub-fiels of the variable as well. */
2785 && !vi
->is_full_var
)
2787 for (; vi
; vi
= vi
->next
)
2790 results
->safe_push (cexpr
);
2795 results
->safe_push (cexpr
);
2798 /* Process constraint T, performing various simplifications and then
2799 adding it to our list of overall constraints. */
2802 process_constraint (constraint_t t
)
2804 struct constraint_expr rhs
= t
->rhs
;
2805 struct constraint_expr lhs
= t
->lhs
;
2807 gcc_assert (rhs
.var
< varmap
.length ());
2808 gcc_assert (lhs
.var
< varmap
.length ());
2810 /* If we didn't get any useful constraint from the lhs we get
2811 &ANYTHING as fallback from get_constraint_for. Deal with
2812 it here by turning it into *ANYTHING. */
2813 if (lhs
.type
== ADDRESSOF
2814 && lhs
.var
== anything_id
)
2817 /* ADDRESSOF on the lhs is invalid. */
2818 gcc_assert (lhs
.type
!= ADDRESSOF
);
2820 /* We shouldn't add constraints from things that cannot have pointers.
2821 It's not completely trivial to avoid in the callers, so do it here. */
2822 if (rhs
.type
!= ADDRESSOF
2823 && !get_varinfo (rhs
.var
)->may_have_pointers
)
2826 /* Likewise adding to the solution of a non-pointer var isn't useful. */
2827 if (!get_varinfo (lhs
.var
)->may_have_pointers
)
2830 /* This can happen in our IR with things like n->a = *p */
2831 if (rhs
.type
== DEREF
&& lhs
.type
== DEREF
&& rhs
.var
!= anything_id
)
2833 /* Split into tmp = *rhs, *lhs = tmp */
2834 struct constraint_expr tmplhs
;
2835 tmplhs
= new_scalar_tmp_constraint_exp ("doubledereftmp");
2836 process_constraint (new_constraint (tmplhs
, rhs
));
2837 process_constraint (new_constraint (lhs
, tmplhs
));
2839 else if (rhs
.type
== ADDRESSOF
&& lhs
.type
== DEREF
)
2841 /* Split into tmp = &rhs, *lhs = tmp */
2842 struct constraint_expr tmplhs
;
2843 tmplhs
= new_scalar_tmp_constraint_exp ("derefaddrtmp");
2844 process_constraint (new_constraint (tmplhs
, rhs
));
2845 process_constraint (new_constraint (lhs
, tmplhs
));
2849 gcc_assert (rhs
.type
!= ADDRESSOF
|| rhs
.offset
== 0);
2850 constraints
.safe_push (t
);
2855 /* Return the position, in bits, of FIELD_DECL from the beginning of its
2858 static HOST_WIDE_INT
2859 bitpos_of_field (const tree fdecl
)
2861 if (!host_integerp (DECL_FIELD_OFFSET (fdecl
), 0)
2862 || !host_integerp (DECL_FIELD_BIT_OFFSET (fdecl
), 0))
2865 return (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (fdecl
)) * BITS_PER_UNIT
2866 + TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (fdecl
)));
2870 /* Get constraint expressions for offsetting PTR by OFFSET. Stores the
2871 resulting constraint expressions in *RESULTS. */
2874 get_constraint_for_ptr_offset (tree ptr
, tree offset
,
2877 struct constraint_expr c
;
2879 HOST_WIDE_INT rhsoffset
;
2881 /* If we do not do field-sensitive PTA adding offsets to pointers
2882 does not change the points-to solution. */
2883 if (!use_field_sensitive
)
2885 get_constraint_for_rhs (ptr
, results
);
2889 /* If the offset is not a non-negative integer constant that fits
2890 in a HOST_WIDE_INT, we have to fall back to a conservative
2891 solution which includes all sub-fields of all pointed-to
2892 variables of ptr. */
2893 if (offset
== NULL_TREE
2894 || TREE_CODE (offset
) != INTEGER_CST
)
2895 rhsoffset
= UNKNOWN_OFFSET
;
2898 /* Sign-extend the offset. */
2899 double_int soffset
= tree_to_double_int (offset
)
2900 .sext (TYPE_PRECISION (TREE_TYPE (offset
)));
2901 if (!soffset
.fits_shwi ())
2902 rhsoffset
= UNKNOWN_OFFSET
;
2905 /* Make sure the bit-offset also fits. */
2906 HOST_WIDE_INT rhsunitoffset
= soffset
.low
;
2907 rhsoffset
= rhsunitoffset
* BITS_PER_UNIT
;
2908 if (rhsunitoffset
!= rhsoffset
/ BITS_PER_UNIT
)
2909 rhsoffset
= UNKNOWN_OFFSET
;
2913 get_constraint_for_rhs (ptr
, results
);
2917 /* As we are eventually appending to the solution do not use
2918 vec::iterate here. */
2919 n
= results
->length ();
2920 for (j
= 0; j
< n
; j
++)
2924 curr
= get_varinfo (c
.var
);
2926 if (c
.type
== ADDRESSOF
2927 /* If this varinfo represents a full variable just use it. */
2928 && curr
->is_full_var
)
2930 else if (c
.type
== ADDRESSOF
2931 /* If we do not know the offset add all subfields. */
2932 && rhsoffset
== UNKNOWN_OFFSET
)
2934 varinfo_t temp
= lookup_vi_for_tree (curr
->decl
);
2937 struct constraint_expr c2
;
2939 c2
.type
= ADDRESSOF
;
2941 if (c2
.var
!= c
.var
)
2942 results
->safe_push (c2
);
2947 else if (c
.type
== ADDRESSOF
)
2950 unsigned HOST_WIDE_INT offset
= curr
->offset
+ rhsoffset
;
2952 /* Search the sub-field which overlaps with the
2953 pointed-to offset. If the result is outside of the variable
2954 we have to provide a conservative result, as the variable is
2955 still reachable from the resulting pointer (even though it
2956 technically cannot point to anything). The last and first
2957 sub-fields are such conservative results.
2958 ??? If we always had a sub-field for &object + 1 then
2959 we could represent this in a more precise way. */
2961 && curr
->offset
< offset
)
2963 temp
= first_or_preceding_vi_for_offset (curr
, offset
);
2965 /* If the found variable is not exactly at the pointed to
2966 result, we have to include the next variable in the
2967 solution as well. Otherwise two increments by offset / 2
2968 do not result in the same or a conservative superset
2970 if (temp
->offset
!= offset
2971 && temp
->next
!= NULL
)
2973 struct constraint_expr c2
;
2974 c2
.var
= temp
->next
->id
;
2975 c2
.type
= ADDRESSOF
;
2977 results
->safe_push (c2
);
2983 c
.offset
= rhsoffset
;
2990 /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
2991 If address_p is true the result will be taken its address of.
2992 If lhs_p is true then the constraint expression is assumed to be used
2996 get_constraint_for_component_ref (tree t
, vec
<ce_s
> *results
,
2997 bool address_p
, bool lhs_p
)
3000 HOST_WIDE_INT bitsize
= -1;
3001 HOST_WIDE_INT bitmaxsize
= -1;
3002 HOST_WIDE_INT bitpos
;
3005 /* Some people like to do cute things like take the address of
3008 while (handled_component_p (forzero
)
3009 || INDIRECT_REF_P (forzero
)
3010 || TREE_CODE (forzero
) == MEM_REF
)
3011 forzero
= TREE_OPERAND (forzero
, 0);
3013 if (CONSTANT_CLASS_P (forzero
) && integer_zerop (forzero
))
3015 struct constraint_expr temp
;
3018 temp
.var
= integer_id
;
3020 results
->safe_push (temp
);
3024 /* Handle type-punning through unions. If we are extracting a pointer
3025 from a union via a possibly type-punning access that pointer
3026 points to anything, similar to a conversion of an integer to
3032 TREE_CODE (u
) == COMPONENT_REF
|| TREE_CODE (u
) == ARRAY_REF
;
3033 u
= TREE_OPERAND (u
, 0))
3034 if (TREE_CODE (u
) == COMPONENT_REF
3035 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u
, 0))) == UNION_TYPE
)
3037 struct constraint_expr temp
;
3040 temp
.var
= anything_id
;
3041 temp
.type
= ADDRESSOF
;
3042 results
->safe_push (temp
);
3047 t
= get_ref_base_and_extent (t
, &bitpos
, &bitsize
, &bitmaxsize
);
3049 /* Pretend to take the address of the base, we'll take care of
3050 adding the required subset of sub-fields below. */
3051 get_constraint_for_1 (t
, results
, true, lhs_p
);
3052 gcc_assert (results
->length () == 1);
3053 struct constraint_expr
&result
= results
->last ();
3055 if (result
.type
== SCALAR
3056 && get_varinfo (result
.var
)->is_full_var
)
3057 /* For single-field vars do not bother about the offset. */
3059 else if (result
.type
== SCALAR
)
3061 /* In languages like C, you can access one past the end of an
3062 array. You aren't allowed to dereference it, so we can
3063 ignore this constraint. When we handle pointer subtraction,
3064 we may have to do something cute here. */
3066 if ((unsigned HOST_WIDE_INT
)bitpos
< get_varinfo (result
.var
)->fullsize
3069 /* It's also not true that the constraint will actually start at the
3070 right offset, it may start in some padding. We only care about
3071 setting the constraint to the first actual field it touches, so
3073 struct constraint_expr cexpr
= result
;
3077 for (curr
= get_varinfo (cexpr
.var
); curr
; curr
= curr
->next
)
3079 if (ranges_overlap_p (curr
->offset
, curr
->size
,
3080 bitpos
, bitmaxsize
))
3082 cexpr
.var
= curr
->id
;
3083 results
->safe_push (cexpr
);
3088 /* If we are going to take the address of this field then
3089 to be able to compute reachability correctly add at least
3090 the last field of the variable. */
3091 if (address_p
&& results
->length () == 0)
3093 curr
= get_varinfo (cexpr
.var
);
3094 while (curr
->next
!= NULL
)
3096 cexpr
.var
= curr
->id
;
3097 results
->safe_push (cexpr
);
3099 else if (results
->length () == 0)
3100 /* Assert that we found *some* field there. The user couldn't be
3101 accessing *only* padding. */
3102 /* Still the user could access one past the end of an array
3103 embedded in a struct resulting in accessing *only* padding. */
3104 /* Or accessing only padding via type-punning to a type
3105 that has a filed just in padding space. */
3107 cexpr
.type
= SCALAR
;
3108 cexpr
.var
= anything_id
;
3110 results
->safe_push (cexpr
);
3113 else if (bitmaxsize
== 0)
3115 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3116 fprintf (dump_file
, "Access to zero-sized part of variable,"
3120 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3121 fprintf (dump_file
, "Access to past the end of variable, ignoring\n");
3123 else if (result
.type
== DEREF
)
3125 /* If we do not know exactly where the access goes say so. Note
3126 that only for non-structure accesses we know that we access
3127 at most one subfiled of any variable. */
3129 || bitsize
!= bitmaxsize
3130 || AGGREGATE_TYPE_P (TREE_TYPE (orig_t
))
3131 || result
.offset
== UNKNOWN_OFFSET
)
3132 result
.offset
= UNKNOWN_OFFSET
;
3134 result
.offset
+= bitpos
;
3136 else if (result
.type
== ADDRESSOF
)
3138 /* We can end up here for component references on a
3139 VIEW_CONVERT_EXPR <>(&foobar). */
3140 result
.type
= SCALAR
;
3141 result
.var
= anything_id
;
3149 /* Dereference the constraint expression CONS, and return the result.
3150 DEREF (ADDRESSOF) = SCALAR
3151 DEREF (SCALAR) = DEREF
3152 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
3153 This is needed so that we can handle dereferencing DEREF constraints. */
3156 do_deref (vec
<ce_s
> *constraints
)
3158 struct constraint_expr
*c
;
3161 FOR_EACH_VEC_ELT (*constraints
, i
, c
)
3163 if (c
->type
== SCALAR
)
3165 else if (c
->type
== ADDRESSOF
)
3167 else if (c
->type
== DEREF
)
3169 struct constraint_expr tmplhs
;
3170 tmplhs
= new_scalar_tmp_constraint_exp ("dereftmp");
3171 process_constraint (new_constraint (tmplhs
, *c
));
3172 c
->var
= tmplhs
.var
;
3179 /* Given a tree T, return the constraint expression for taking the
3183 get_constraint_for_address_of (tree t
, vec
<ce_s
> *results
)
3185 struct constraint_expr
*c
;
3188 get_constraint_for_1 (t
, results
, true, true);
3190 FOR_EACH_VEC_ELT (*results
, i
, c
)
3192 if (c
->type
== DEREF
)
3195 c
->type
= ADDRESSOF
;
3199 /* Given a tree T, return the constraint expression for it. */
3202 get_constraint_for_1 (tree t
, vec
<ce_s
> *results
, bool address_p
,
3205 struct constraint_expr temp
;
3207 /* x = integer is all glommed to a single variable, which doesn't
3208 point to anything by itself. That is, of course, unless it is an
3209 integer constant being treated as a pointer, in which case, we
3210 will return that this is really the addressof anything. This
3211 happens below, since it will fall into the default case. The only
3212 case we know something about an integer treated like a pointer is
3213 when it is the NULL pointer, and then we just say it points to
3216 Do not do that if -fno-delete-null-pointer-checks though, because
3217 in that case *NULL does not fail, so it _should_ alias *anything.
3218 It is not worth adding a new option or renaming the existing one,
3219 since this case is relatively obscure. */
3220 if ((TREE_CODE (t
) == INTEGER_CST
3221 && integer_zerop (t
))
3222 /* The only valid CONSTRUCTORs in gimple with pointer typed
3223 elements are zero-initializer. But in IPA mode we also
3224 process global initializers, so verify at least. */
3225 || (TREE_CODE (t
) == CONSTRUCTOR
3226 && CONSTRUCTOR_NELTS (t
) == 0))
3228 if (flag_delete_null_pointer_checks
)
3229 temp
.var
= nothing_id
;
3231 temp
.var
= nonlocal_id
;
3232 temp
.type
= ADDRESSOF
;
3234 results
->safe_push (temp
);
3238 /* String constants are read-only. */
3239 if (TREE_CODE (t
) == STRING_CST
)
3241 temp
.var
= readonly_id
;
3244 results
->safe_push (temp
);
3248 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
3250 case tcc_expression
:
3252 switch (TREE_CODE (t
))
3255 get_constraint_for_address_of (TREE_OPERAND (t
, 0), results
);
3263 switch (TREE_CODE (t
))
3267 struct constraint_expr cs
;
3269 get_constraint_for_ptr_offset (TREE_OPERAND (t
, 0),
3270 TREE_OPERAND (t
, 1), results
);
3273 /* If we are not taking the address then make sure to process
3274 all subvariables we might access. */
3278 cs
= results
->last ();
3279 if (cs
.type
== DEREF
3280 && type_can_have_subvars (TREE_TYPE (t
)))
3282 /* For dereferences this means we have to defer it
3284 results
->last ().offset
= UNKNOWN_OFFSET
;
3287 if (cs
.type
!= SCALAR
)
3290 vi
= get_varinfo (cs
.var
);
3292 if (!vi
->is_full_var
3295 unsigned HOST_WIDE_INT size
;
3296 if (host_integerp (TYPE_SIZE (TREE_TYPE (t
)), 1))
3297 size
= TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (t
)));
3300 for (; curr
; curr
= curr
->next
)
3302 if (curr
->offset
- vi
->offset
< size
)
3305 results
->safe_push (cs
);
3314 case ARRAY_RANGE_REF
:
3316 get_constraint_for_component_ref (t
, results
, address_p
, lhs_p
);
3318 case VIEW_CONVERT_EXPR
:
3319 get_constraint_for_1 (TREE_OPERAND (t
, 0), results
, address_p
,
3322 /* We are missing handling for TARGET_MEM_REF here. */
3327 case tcc_exceptional
:
3329 switch (TREE_CODE (t
))
3333 get_constraint_for_ssa_var (t
, results
, address_p
);
3340 vec
<ce_s
> tmp
= vNULL
;
3341 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t
), i
, val
)
3343 struct constraint_expr
*rhsp
;
3345 get_constraint_for_1 (val
, &tmp
, address_p
, lhs_p
);
3346 FOR_EACH_VEC_ELT (tmp
, j
, rhsp
)
3347 results
->safe_push (*rhsp
);
3351 /* We do not know whether the constructor was complete,
3352 so technically we have to add &NOTHING or &ANYTHING
3353 like we do for an empty constructor as well. */
3360 case tcc_declaration
:
3362 get_constraint_for_ssa_var (t
, results
, address_p
);
3367 /* We cannot refer to automatic variables through constants. */
3368 temp
.type
= ADDRESSOF
;
3369 temp
.var
= nonlocal_id
;
3371 results
->safe_push (temp
);
3377 /* The default fallback is a constraint from anything. */
3378 temp
.type
= ADDRESSOF
;
3379 temp
.var
= anything_id
;
3381 results
->safe_push (temp
);
3384 /* Given a gimple tree T, return the constraint expression vector for it. */
3387 get_constraint_for (tree t
, vec
<ce_s
> *results
)
3389 gcc_assert (results
->length () == 0);
3391 get_constraint_for_1 (t
, results
, false, true);
3394 /* Given a gimple tree T, return the constraint expression vector for it
3395 to be used as the rhs of a constraint. */
3398 get_constraint_for_rhs (tree t
, vec
<ce_s
> *results
)
3400 gcc_assert (results
->length () == 0);
3402 get_constraint_for_1 (t
, results
, false, false);
3406 /* Efficiently generates constraints from all entries in *RHSC to all
3407 entries in *LHSC. */
3410 process_all_all_constraints (vec
<ce_s
> lhsc
,
3413 struct constraint_expr
*lhsp
, *rhsp
;
3416 if (lhsc
.length () <= 1 || rhsc
.length () <= 1)
3418 FOR_EACH_VEC_ELT (lhsc
, i
, lhsp
)
3419 FOR_EACH_VEC_ELT (rhsc
, j
, rhsp
)
3420 process_constraint (new_constraint (*lhsp
, *rhsp
));
3424 struct constraint_expr tmp
;
3425 tmp
= new_scalar_tmp_constraint_exp ("allalltmp");
3426 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
3427 process_constraint (new_constraint (tmp
, *rhsp
));
3428 FOR_EACH_VEC_ELT (lhsc
, i
, lhsp
)
3429 process_constraint (new_constraint (*lhsp
, tmp
));
3433 /* Handle aggregate copies by expanding into copies of the respective
3434 fields of the structures. */
3437 do_structure_copy (tree lhsop
, tree rhsop
)
3439 struct constraint_expr
*lhsp
, *rhsp
;
3440 vec
<ce_s
> lhsc
= vNULL
;
3441 vec
<ce_s
> rhsc
= vNULL
;
3444 get_constraint_for (lhsop
, &lhsc
);
3445 get_constraint_for_rhs (rhsop
, &rhsc
);
3448 if (lhsp
->type
== DEREF
3449 || (lhsp
->type
== ADDRESSOF
&& lhsp
->var
== anything_id
)
3450 || rhsp
->type
== DEREF
)
3452 if (lhsp
->type
== DEREF
)
3454 gcc_assert (lhsc
.length () == 1);
3455 lhsp
->offset
= UNKNOWN_OFFSET
;
3457 if (rhsp
->type
== DEREF
)
3459 gcc_assert (rhsc
.length () == 1);
3460 rhsp
->offset
= UNKNOWN_OFFSET
;
3462 process_all_all_constraints (lhsc
, rhsc
);
3464 else if (lhsp
->type
== SCALAR
3465 && (rhsp
->type
== SCALAR
3466 || rhsp
->type
== ADDRESSOF
))
3468 HOST_WIDE_INT lhssize
, lhsmaxsize
, lhsoffset
;
3469 HOST_WIDE_INT rhssize
, rhsmaxsize
, rhsoffset
;
3471 get_ref_base_and_extent (lhsop
, &lhsoffset
, &lhssize
, &lhsmaxsize
);
3472 get_ref_base_and_extent (rhsop
, &rhsoffset
, &rhssize
, &rhsmaxsize
);
3473 for (j
= 0; lhsc
.iterate (j
, &lhsp
);)
3475 varinfo_t lhsv
, rhsv
;
3477 lhsv
= get_varinfo (lhsp
->var
);
3478 rhsv
= get_varinfo (rhsp
->var
);
3479 if (lhsv
->may_have_pointers
3480 && (lhsv
->is_full_var
3481 || rhsv
->is_full_var
3482 || ranges_overlap_p (lhsv
->offset
+ rhsoffset
, lhsv
->size
,
3483 rhsv
->offset
+ lhsoffset
, rhsv
->size
)))
3484 process_constraint (new_constraint (*lhsp
, *rhsp
));
3485 if (!rhsv
->is_full_var
3486 && (lhsv
->is_full_var
3487 || (lhsv
->offset
+ rhsoffset
+ lhsv
->size
3488 > rhsv
->offset
+ lhsoffset
+ rhsv
->size
)))
3491 if (k
>= rhsc
.length ())
3505 /* Create constraints ID = { rhsc }. */
3508 make_constraints_to (unsigned id
, vec
<ce_s
> rhsc
)
3510 struct constraint_expr
*c
;
3511 struct constraint_expr includes
;
3515 includes
.offset
= 0;
3516 includes
.type
= SCALAR
;
3518 FOR_EACH_VEC_ELT (rhsc
, j
, c
)
3519 process_constraint (new_constraint (includes
, *c
));
3522 /* Create a constraint ID = OP. */
3525 make_constraint_to (unsigned id
, tree op
)
3527 vec
<ce_s
> rhsc
= vNULL
;
3528 get_constraint_for_rhs (op
, &rhsc
);
3529 make_constraints_to (id
, rhsc
);
3533 /* Create a constraint ID = &FROM. */
3536 make_constraint_from (varinfo_t vi
, int from
)
3538 struct constraint_expr lhs
, rhs
;
3546 rhs
.type
= ADDRESSOF
;
3547 process_constraint (new_constraint (lhs
, rhs
));
3550 /* Create a constraint ID = FROM. */
3553 make_copy_constraint (varinfo_t vi
, int from
)
3555 struct constraint_expr lhs
, rhs
;
3564 process_constraint (new_constraint (lhs
, rhs
));
3567 /* Make constraints necessary to make OP escape. */
3570 make_escape_constraint (tree op
)
3572 make_constraint_to (escaped_id
, op
);
3575 /* Add constraints to that the solution of VI is transitively closed. */
3578 make_transitive_closure_constraints (varinfo_t vi
)
3580 struct constraint_expr lhs
, rhs
;
3589 process_constraint (new_constraint (lhs
, rhs
));
3591 /* VAR = VAR + UNKNOWN; */
3597 rhs
.offset
= UNKNOWN_OFFSET
;
3598 process_constraint (new_constraint (lhs
, rhs
));
3601 /* Temporary storage for fake var decls. */
3602 struct obstack fake_var_decl_obstack
;
3604 /* Build a fake VAR_DECL acting as referrer to a DECL_UID. */
3607 build_fake_var_decl (tree type
)
3609 tree decl
= (tree
) XOBNEW (&fake_var_decl_obstack
, struct tree_var_decl
);
3610 memset (decl
, 0, sizeof (struct tree_var_decl
));
3611 TREE_SET_CODE (decl
, VAR_DECL
);
3612 TREE_TYPE (decl
) = type
;
3613 DECL_UID (decl
) = allocate_decl_uid ();
3614 SET_DECL_PT_UID (decl
, -1);
3615 layout_decl (decl
, 0);
3619 /* Create a new artificial heap variable with NAME.
3620 Return the created variable. */
3623 make_heapvar (const char *name
)
3628 heapvar
= build_fake_var_decl (ptr_type_node
);
3629 DECL_EXTERNAL (heapvar
) = 1;
3631 vi
= new_var_info (heapvar
, name
);
3632 vi
->is_artificial_var
= true;
3633 vi
->is_heap_var
= true;
3634 vi
->is_unknown_size_var
= true;
3638 vi
->is_full_var
= true;
3639 insert_vi_for_tree (heapvar
, vi
);
3644 /* Create a new artificial heap variable with NAME and make a
3645 constraint from it to LHS. Set flags according to a tag used
3646 for tracking restrict pointers. */
3649 make_constraint_from_restrict (varinfo_t lhs
, const char *name
)
3651 varinfo_t vi
= make_heapvar (name
);
3652 vi
->is_global_var
= 1;
3653 vi
->may_have_pointers
= 1;
3654 make_constraint_from (lhs
, vi
->id
);
3658 /* Create a new artificial heap variable with NAME and make a
3659 constraint from it to LHS. Set flags according to a tag used
3660 for tracking restrict pointers and make the artificial heap
3661 point to global memory. */
3664 make_constraint_from_global_restrict (varinfo_t lhs
, const char *name
)
3666 varinfo_t vi
= make_constraint_from_restrict (lhs
, name
);
3667 make_copy_constraint (vi
, nonlocal_id
);
3671 /* In IPA mode there are varinfos for different aspects of reach
3672 function designator. One for the points-to set of the return
3673 value, one for the variables that are clobbered by the function,
3674 one for its uses and one for each parameter (including a single
3675 glob for remaining variadic arguments). */
3677 enum { fi_clobbers
= 1, fi_uses
= 2,
3678 fi_static_chain
= 3, fi_result
= 4, fi_parm_base
= 5 };
3680 /* Get a constraint for the requested part of a function designator FI
3681 when operating in IPA mode. */
3683 static struct constraint_expr
3684 get_function_part_constraint (varinfo_t fi
, unsigned part
)
3686 struct constraint_expr c
;
3688 gcc_assert (in_ipa_mode
);
3690 if (fi
->id
== anything_id
)
3692 /* ??? We probably should have a ANYFN special variable. */
3693 c
.var
= anything_id
;
3697 else if (TREE_CODE (fi
->decl
) == FUNCTION_DECL
)
3699 varinfo_t ai
= first_vi_for_offset (fi
, part
);
3703 c
.var
= anything_id
;
3717 /* For non-IPA mode, generate constraints necessary for a call on the
3721 handle_rhs_call (gimple stmt
, vec
<ce_s
> *results
)
3723 struct constraint_expr rhsc
;
3725 bool returns_uses
= false;
3727 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3729 tree arg
= gimple_call_arg (stmt
, i
);
3730 int flags
= gimple_call_arg_flags (stmt
, i
);
3732 /* If the argument is not used we can ignore it. */
3733 if (flags
& EAF_UNUSED
)
3736 /* As we compute ESCAPED context-insensitive we do not gain
3737 any precision with just EAF_NOCLOBBER but not EAF_NOESCAPE
3738 set. The argument would still get clobbered through the
3740 if ((flags
& EAF_NOCLOBBER
)
3741 && (flags
& EAF_NOESCAPE
))
3743 varinfo_t uses
= get_call_use_vi (stmt
);
3744 if (!(flags
& EAF_DIRECT
))
3746 varinfo_t tem
= new_var_info (NULL_TREE
, "callarg");
3747 make_constraint_to (tem
->id
, arg
);
3748 make_transitive_closure_constraints (tem
);
3749 make_copy_constraint (uses
, tem
->id
);
3752 make_constraint_to (uses
->id
, arg
);
3753 returns_uses
= true;
3755 else if (flags
& EAF_NOESCAPE
)
3757 struct constraint_expr lhs
, rhs
;
3758 varinfo_t uses
= get_call_use_vi (stmt
);
3759 varinfo_t clobbers
= get_call_clobber_vi (stmt
);
3760 varinfo_t tem
= new_var_info (NULL_TREE
, "callarg");
3761 make_constraint_to (tem
->id
, arg
);
3762 if (!(flags
& EAF_DIRECT
))
3763 make_transitive_closure_constraints (tem
);
3764 make_copy_constraint (uses
, tem
->id
);
3765 make_copy_constraint (clobbers
, tem
->id
);
3766 /* Add *tem = nonlocal, do not add *tem = callused as
3767 EAF_NOESCAPE parameters do not escape to other parameters
3768 and all other uses appear in NONLOCAL as well. */
3773 rhs
.var
= nonlocal_id
;
3775 process_constraint (new_constraint (lhs
, rhs
));
3776 returns_uses
= true;
3779 make_escape_constraint (arg
);
3782 /* If we added to the calls uses solution make sure we account for
3783 pointers to it to be returned. */
3786 rhsc
.var
= get_call_use_vi (stmt
)->id
;
3789 results
->safe_push (rhsc
);
3792 /* The static chain escapes as well. */
3793 if (gimple_call_chain (stmt
))
3794 make_escape_constraint (gimple_call_chain (stmt
));
3796 /* And if we applied NRV the address of the return slot escapes as well. */
3797 if (gimple_call_return_slot_opt_p (stmt
)
3798 && gimple_call_lhs (stmt
) != NULL_TREE
3799 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt
))))
3801 vec
<ce_s
> tmpc
= vNULL
;
3802 struct constraint_expr lhsc
, *c
;
3803 get_constraint_for_address_of (gimple_call_lhs (stmt
), &tmpc
);
3804 lhsc
.var
= escaped_id
;
3807 FOR_EACH_VEC_ELT (tmpc
, i
, c
)
3808 process_constraint (new_constraint (lhsc
, *c
));
3812 /* Regular functions return nonlocal memory. */
3813 rhsc
.var
= nonlocal_id
;
3816 results
->safe_push (rhsc
);
3819 /* For non-IPA mode, generate constraints necessary for a call
3820 that returns a pointer and assigns it to LHS. This simply makes
3821 the LHS point to global and escaped variables. */
3824 handle_lhs_call (gimple stmt
, tree lhs
, int flags
, vec
<ce_s
> rhsc
,
3827 vec
<ce_s
> lhsc
= vNULL
;
3829 get_constraint_for (lhs
, &lhsc
);
3830 /* If the store is to a global decl make sure to
3831 add proper escape constraints. */
3832 lhs
= get_base_address (lhs
);
3835 && is_global_var (lhs
))
3837 struct constraint_expr tmpc
;
3838 tmpc
.var
= escaped_id
;
3841 lhsc
.safe_push (tmpc
);
3844 /* If the call returns an argument unmodified override the rhs
3846 flags
= gimple_call_return_flags (stmt
);
3847 if (flags
& ERF_RETURNS_ARG
3848 && (flags
& ERF_RETURN_ARG_MASK
) < gimple_call_num_args (stmt
))
3852 arg
= gimple_call_arg (stmt
, flags
& ERF_RETURN_ARG_MASK
);
3853 get_constraint_for (arg
, &rhsc
);
3854 process_all_all_constraints (lhsc
, rhsc
);
3857 else if (flags
& ERF_NOALIAS
)
3860 struct constraint_expr tmpc
;
3862 vi
= make_heapvar ("HEAP");
3863 /* We delay marking allocated storage global until we know if
3865 DECL_EXTERNAL (vi
->decl
) = 0;
3866 vi
->is_global_var
= 0;
3867 /* If this is not a real malloc call assume the memory was
3868 initialized and thus may point to global memory. All
3869 builtin functions with the malloc attribute behave in a sane way. */
3871 || DECL_BUILT_IN_CLASS (fndecl
) != BUILT_IN_NORMAL
)
3872 make_constraint_from (vi
, nonlocal_id
);
3875 tmpc
.type
= ADDRESSOF
;
3876 rhsc
.safe_push (tmpc
);
3877 process_all_all_constraints (lhsc
, rhsc
);
3881 process_all_all_constraints (lhsc
, rhsc
);
3886 /* For non-IPA mode, generate constraints necessary for a call of a
3887 const function that returns a pointer in the statement STMT. */
3890 handle_const_call (gimple stmt
, vec
<ce_s
> *results
)
3892 struct constraint_expr rhsc
;
3895 /* Treat nested const functions the same as pure functions as far
3896 as the static chain is concerned. */
3897 if (gimple_call_chain (stmt
))
3899 varinfo_t uses
= get_call_use_vi (stmt
);
3900 make_transitive_closure_constraints (uses
);
3901 make_constraint_to (uses
->id
, gimple_call_chain (stmt
));
3902 rhsc
.var
= uses
->id
;
3905 results
->safe_push (rhsc
);
3908 /* May return arguments. */
3909 for (k
= 0; k
< gimple_call_num_args (stmt
); ++k
)
3911 tree arg
= gimple_call_arg (stmt
, k
);
3912 vec
<ce_s
> argc
= vNULL
;
3914 struct constraint_expr
*argp
;
3915 get_constraint_for_rhs (arg
, &argc
);
3916 FOR_EACH_VEC_ELT (argc
, i
, argp
)
3917 results
->safe_push (*argp
);
3921 /* May return addresses of globals. */
3922 rhsc
.var
= nonlocal_id
;
3924 rhsc
.type
= ADDRESSOF
;
3925 results
->safe_push (rhsc
);
3928 /* For non-IPA mode, generate constraints necessary for a call to a
3929 pure function in statement STMT. */
3932 handle_pure_call (gimple stmt
, vec
<ce_s
> *results
)
3934 struct constraint_expr rhsc
;
3936 varinfo_t uses
= NULL
;
3938 /* Memory reached from pointer arguments is call-used. */
3939 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3941 tree arg
= gimple_call_arg (stmt
, i
);
3944 uses
= get_call_use_vi (stmt
);
3945 make_transitive_closure_constraints (uses
);
3947 make_constraint_to (uses
->id
, arg
);
3950 /* The static chain is used as well. */
3951 if (gimple_call_chain (stmt
))
3955 uses
= get_call_use_vi (stmt
);
3956 make_transitive_closure_constraints (uses
);
3958 make_constraint_to (uses
->id
, gimple_call_chain (stmt
));
3961 /* Pure functions may return call-used and nonlocal memory. */
3964 rhsc
.var
= uses
->id
;
3967 results
->safe_push (rhsc
);
3969 rhsc
.var
= nonlocal_id
;
3972 results
->safe_push (rhsc
);
3976 /* Return the varinfo for the callee of CALL. */
3979 get_fi_for_callee (gimple call
)
3981 tree decl
, fn
= gimple_call_fn (call
);
3983 if (fn
&& TREE_CODE (fn
) == OBJ_TYPE_REF
)
3984 fn
= OBJ_TYPE_REF_EXPR (fn
);
3986 /* If we can directly resolve the function being called, do so.
3987 Otherwise, it must be some sort of indirect expression that
3988 we should still be able to handle. */
3989 decl
= gimple_call_addr_fndecl (fn
);
3991 return get_vi_for_tree (decl
);
3993 /* If the function is anything other than a SSA name pointer we have no
3994 clue and should be getting ANYFN (well, ANYTHING for now). */
3995 if (!fn
|| TREE_CODE (fn
) != SSA_NAME
)
3996 return get_varinfo (anything_id
);
3998 if (SSA_NAME_IS_DEFAULT_DEF (fn
)
3999 && (TREE_CODE (SSA_NAME_VAR (fn
)) == PARM_DECL
4000 || TREE_CODE (SSA_NAME_VAR (fn
)) == RESULT_DECL
))
4001 fn
= SSA_NAME_VAR (fn
);
4003 return get_vi_for_tree (fn
);
4006 /* Create constraints for the builtin call T. Return true if the call
4007 was handled, otherwise false. */
4010 find_func_aliases_for_builtin_call (gimple t
)
4012 tree fndecl
= gimple_call_fndecl (t
);
4013 vec
<ce_s
> lhsc
= vNULL
;
4014 vec
<ce_s
> rhsc
= vNULL
;
4017 if (fndecl
!= NULL_TREE
4018 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
4019 /* ??? All builtins that are handled here need to be handled
4020 in the alias-oracle query functions explicitly! */
4021 switch (DECL_FUNCTION_CODE (fndecl
))
4023 /* All the following functions return a pointer to the same object
4024 as their first argument points to. The functions do not add
4025 to the ESCAPED solution. The functions make the first argument
4026 pointed to memory point to what the second argument pointed to
4027 memory points to. */
4028 case BUILT_IN_STRCPY
:
4029 case BUILT_IN_STRNCPY
:
4030 case BUILT_IN_BCOPY
:
4031 case BUILT_IN_MEMCPY
:
4032 case BUILT_IN_MEMMOVE
:
4033 case BUILT_IN_MEMPCPY
:
4034 case BUILT_IN_STPCPY
:
4035 case BUILT_IN_STPNCPY
:
4036 case BUILT_IN_STRCAT
:
4037 case BUILT_IN_STRNCAT
:
4038 case BUILT_IN_STRCPY_CHK
:
4039 case BUILT_IN_STRNCPY_CHK
:
4040 case BUILT_IN_MEMCPY_CHK
:
4041 case BUILT_IN_MEMMOVE_CHK
:
4042 case BUILT_IN_MEMPCPY_CHK
:
4043 case BUILT_IN_STPCPY_CHK
:
4044 case BUILT_IN_STPNCPY_CHK
:
4045 case BUILT_IN_STRCAT_CHK
:
4046 case BUILT_IN_STRNCAT_CHK
:
4047 case BUILT_IN_TM_MEMCPY
:
4048 case BUILT_IN_TM_MEMMOVE
:
4050 tree res
= gimple_call_lhs (t
);
4051 tree dest
= gimple_call_arg (t
, (DECL_FUNCTION_CODE (fndecl
)
4052 == BUILT_IN_BCOPY
? 1 : 0));
4053 tree src
= gimple_call_arg (t
, (DECL_FUNCTION_CODE (fndecl
)
4054 == BUILT_IN_BCOPY
? 0 : 1));
4055 if (res
!= NULL_TREE
)
4057 get_constraint_for (res
, &lhsc
);
4058 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_MEMPCPY
4059 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STPCPY
4060 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STPNCPY
4061 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_MEMPCPY_CHK
4062 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STPCPY_CHK
4063 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STPNCPY_CHK
)
4064 get_constraint_for_ptr_offset (dest
, NULL_TREE
, &rhsc
);
4066 get_constraint_for (dest
, &rhsc
);
4067 process_all_all_constraints (lhsc
, rhsc
);
4071 get_constraint_for_ptr_offset (dest
, NULL_TREE
, &lhsc
);
4072 get_constraint_for_ptr_offset (src
, NULL_TREE
, &rhsc
);
4075 process_all_all_constraints (lhsc
, rhsc
);
4080 case BUILT_IN_MEMSET
:
4081 case BUILT_IN_MEMSET_CHK
:
4082 case BUILT_IN_TM_MEMSET
:
4084 tree res
= gimple_call_lhs (t
);
4085 tree dest
= gimple_call_arg (t
, 0);
4088 struct constraint_expr ac
;
4089 if (res
!= NULL_TREE
)
4091 get_constraint_for (res
, &lhsc
);
4092 get_constraint_for (dest
, &rhsc
);
4093 process_all_all_constraints (lhsc
, rhsc
);
4097 get_constraint_for_ptr_offset (dest
, NULL_TREE
, &lhsc
);
4099 if (flag_delete_null_pointer_checks
4100 && integer_zerop (gimple_call_arg (t
, 1)))
4102 ac
.type
= ADDRESSOF
;
4103 ac
.var
= nothing_id
;
4108 ac
.var
= integer_id
;
4111 FOR_EACH_VEC_ELT (lhsc
, i
, lhsp
)
4112 process_constraint (new_constraint (*lhsp
, ac
));
4116 case BUILT_IN_ASSUME_ALIGNED
:
4118 tree res
= gimple_call_lhs (t
);
4119 tree dest
= gimple_call_arg (t
, 0);
4120 if (res
!= NULL_TREE
)
4122 get_constraint_for (res
, &lhsc
);
4123 get_constraint_for (dest
, &rhsc
);
4124 process_all_all_constraints (lhsc
, rhsc
);
4130 /* All the following functions do not return pointers, do not
4131 modify the points-to sets of memory reachable from their
4132 arguments and do not add to the ESCAPED solution. */
4133 case BUILT_IN_SINCOS
:
4134 case BUILT_IN_SINCOSF
:
4135 case BUILT_IN_SINCOSL
:
4136 case BUILT_IN_FREXP
:
4137 case BUILT_IN_FREXPF
:
4138 case BUILT_IN_FREXPL
:
4139 case BUILT_IN_GAMMA_R
:
4140 case BUILT_IN_GAMMAF_R
:
4141 case BUILT_IN_GAMMAL_R
:
4142 case BUILT_IN_LGAMMA_R
:
4143 case BUILT_IN_LGAMMAF_R
:
4144 case BUILT_IN_LGAMMAL_R
:
4146 case BUILT_IN_MODFF
:
4147 case BUILT_IN_MODFL
:
4148 case BUILT_IN_REMQUO
:
4149 case BUILT_IN_REMQUOF
:
4150 case BUILT_IN_REMQUOL
:
4153 case BUILT_IN_STRDUP
:
4154 case BUILT_IN_STRNDUP
:
4155 if (gimple_call_lhs (t
))
4157 handle_lhs_call (t
, gimple_call_lhs (t
), gimple_call_flags (t
),
4159 get_constraint_for_ptr_offset (gimple_call_lhs (t
),
4161 get_constraint_for_ptr_offset (gimple_call_arg (t
, 0),
4165 process_all_all_constraints (lhsc
, rhsc
);
4171 /* Trampolines are special - they set up passing the static
4173 case BUILT_IN_INIT_TRAMPOLINE
:
4175 tree tramp
= gimple_call_arg (t
, 0);
4176 tree nfunc
= gimple_call_arg (t
, 1);
4177 tree frame
= gimple_call_arg (t
, 2);
4179 struct constraint_expr lhs
, *rhsp
;
4182 varinfo_t nfi
= NULL
;
4183 gcc_assert (TREE_CODE (nfunc
) == ADDR_EXPR
);
4184 nfi
= lookup_vi_for_tree (TREE_OPERAND (nfunc
, 0));
4187 lhs
= get_function_part_constraint (nfi
, fi_static_chain
);
4188 get_constraint_for (frame
, &rhsc
);
4189 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
4190 process_constraint (new_constraint (lhs
, *rhsp
));
4193 /* Make the frame point to the function for
4194 the trampoline adjustment call. */
4195 get_constraint_for (tramp
, &lhsc
);
4197 get_constraint_for (nfunc
, &rhsc
);
4198 process_all_all_constraints (lhsc
, rhsc
);
4205 /* Else fallthru to generic handling which will let
4206 the frame escape. */
4209 case BUILT_IN_ADJUST_TRAMPOLINE
:
4211 tree tramp
= gimple_call_arg (t
, 0);
4212 tree res
= gimple_call_lhs (t
);
4213 if (in_ipa_mode
&& res
)
4215 get_constraint_for (res
, &lhsc
);
4216 get_constraint_for (tramp
, &rhsc
);
4218 process_all_all_constraints (lhsc
, rhsc
);
4224 CASE_BUILT_IN_TM_STORE (1):
4225 CASE_BUILT_IN_TM_STORE (2):
4226 CASE_BUILT_IN_TM_STORE (4):
4227 CASE_BUILT_IN_TM_STORE (8):
4228 CASE_BUILT_IN_TM_STORE (FLOAT
):
4229 CASE_BUILT_IN_TM_STORE (DOUBLE
):
4230 CASE_BUILT_IN_TM_STORE (LDOUBLE
):
4231 CASE_BUILT_IN_TM_STORE (M64
):
4232 CASE_BUILT_IN_TM_STORE (M128
):
4233 CASE_BUILT_IN_TM_STORE (M256
):
4235 tree addr
= gimple_call_arg (t
, 0);
4236 tree src
= gimple_call_arg (t
, 1);
4238 get_constraint_for (addr
, &lhsc
);
4240 get_constraint_for (src
, &rhsc
);
4241 process_all_all_constraints (lhsc
, rhsc
);
4246 CASE_BUILT_IN_TM_LOAD (1):
4247 CASE_BUILT_IN_TM_LOAD (2):
4248 CASE_BUILT_IN_TM_LOAD (4):
4249 CASE_BUILT_IN_TM_LOAD (8):
4250 CASE_BUILT_IN_TM_LOAD (FLOAT
):
4251 CASE_BUILT_IN_TM_LOAD (DOUBLE
):
4252 CASE_BUILT_IN_TM_LOAD (LDOUBLE
):
4253 CASE_BUILT_IN_TM_LOAD (M64
):
4254 CASE_BUILT_IN_TM_LOAD (M128
):
4255 CASE_BUILT_IN_TM_LOAD (M256
):
4257 tree dest
= gimple_call_lhs (t
);
4258 tree addr
= gimple_call_arg (t
, 0);
4260 get_constraint_for (dest
, &lhsc
);
4261 get_constraint_for (addr
, &rhsc
);
4263 process_all_all_constraints (lhsc
, rhsc
);
4268 /* Variadic argument handling needs to be handled in IPA
4270 case BUILT_IN_VA_START
:
4272 tree valist
= gimple_call_arg (t
, 0);
4273 struct constraint_expr rhs
, *lhsp
;
4275 get_constraint_for (valist
, &lhsc
);
4277 /* The va_list gets access to pointers in variadic
4278 arguments. Which we know in the case of IPA analysis
4279 and otherwise are just all nonlocal variables. */
4282 fi
= lookup_vi_for_tree (cfun
->decl
);
4283 rhs
= get_function_part_constraint (fi
, ~0);
4284 rhs
.type
= ADDRESSOF
;
4288 rhs
.var
= nonlocal_id
;
4289 rhs
.type
= ADDRESSOF
;
4292 FOR_EACH_VEC_ELT (lhsc
, i
, lhsp
)
4293 process_constraint (new_constraint (*lhsp
, rhs
));
4295 /* va_list is clobbered. */
4296 make_constraint_to (get_call_clobber_vi (t
)->id
, valist
);
4299 /* va_end doesn't have any effect that matters. */
4300 case BUILT_IN_VA_END
:
4302 /* Alternate return. Simply give up for now. */
4303 case BUILT_IN_RETURN
:
4307 || !(fi
= get_vi_for_tree (cfun
->decl
)))
4308 make_constraint_from (get_varinfo (escaped_id
), anything_id
);
4309 else if (in_ipa_mode
4312 struct constraint_expr lhs
, rhs
;
4313 lhs
= get_function_part_constraint (fi
, fi_result
);
4314 rhs
.var
= anything_id
;
4317 process_constraint (new_constraint (lhs
, rhs
));
4321 /* printf-style functions may have hooks to set pointers to
4322 point to somewhere into the generated string. Leave them
4323 for a later excercise... */
4325 /* Fallthru to general call handling. */;
4331 /* Create constraints for the call T. */
4334 find_func_aliases_for_call (gimple t
)
4336 tree fndecl
= gimple_call_fndecl (t
);
4337 vec
<ce_s
> lhsc
= vNULL
;
4338 vec
<ce_s
> rhsc
= vNULL
;
4341 if (fndecl
!= NULL_TREE
4342 && DECL_BUILT_IN (fndecl
)
4343 && find_func_aliases_for_builtin_call (t
))
4346 fi
= get_fi_for_callee (t
);
4348 || (fndecl
&& !fi
->is_fn_info
))
4350 vec
<ce_s
> rhsc
= vNULL
;
4351 int flags
= gimple_call_flags (t
);
4353 /* Const functions can return their arguments and addresses
4354 of global memory but not of escaped memory. */
4355 if (flags
& (ECF_CONST
|ECF_NOVOPS
))
4357 if (gimple_call_lhs (t
))
4358 handle_const_call (t
, &rhsc
);
4360 /* Pure functions can return addresses in and of memory
4361 reachable from their arguments, but they are not an escape
4362 point for reachable memory of their arguments. */
4363 else if (flags
& (ECF_PURE
|ECF_LOOPING_CONST_OR_PURE
))
4364 handle_pure_call (t
, &rhsc
);
4366 handle_rhs_call (t
, &rhsc
);
4367 if (gimple_call_lhs (t
))
4368 handle_lhs_call (t
, gimple_call_lhs (t
), flags
, rhsc
, fndecl
);
4376 /* Assign all the passed arguments to the appropriate incoming
4377 parameters of the function. */
4378 for (j
= 0; j
< gimple_call_num_args (t
); j
++)
4380 struct constraint_expr lhs
;
4381 struct constraint_expr
*rhsp
;
4382 tree arg
= gimple_call_arg (t
, j
);
4384 get_constraint_for_rhs (arg
, &rhsc
);
4385 lhs
= get_function_part_constraint (fi
, fi_parm_base
+ j
);
4386 while (rhsc
.length () != 0)
4388 rhsp
= &rhsc
.last ();
4389 process_constraint (new_constraint (lhs
, *rhsp
));
4394 /* If we are returning a value, assign it to the result. */
4395 lhsop
= gimple_call_lhs (t
);
4398 struct constraint_expr rhs
;
4399 struct constraint_expr
*lhsp
;
4401 get_constraint_for (lhsop
, &lhsc
);
4402 rhs
= get_function_part_constraint (fi
, fi_result
);
4404 && DECL_RESULT (fndecl
)
4405 && DECL_BY_REFERENCE (DECL_RESULT (fndecl
)))
4407 vec
<ce_s
> tem
= vNULL
;
4408 tem
.safe_push (rhs
);
4413 FOR_EACH_VEC_ELT (lhsc
, j
, lhsp
)
4414 process_constraint (new_constraint (*lhsp
, rhs
));
4417 /* If we pass the result decl by reference, honor that. */
4420 && DECL_RESULT (fndecl
)
4421 && DECL_BY_REFERENCE (DECL_RESULT (fndecl
)))
4423 struct constraint_expr lhs
;
4424 struct constraint_expr
*rhsp
;
4426 get_constraint_for_address_of (lhsop
, &rhsc
);
4427 lhs
= get_function_part_constraint (fi
, fi_result
);
4428 FOR_EACH_VEC_ELT (rhsc
, j
, rhsp
)
4429 process_constraint (new_constraint (lhs
, *rhsp
));
4433 /* If we use a static chain, pass it along. */
4434 if (gimple_call_chain (t
))
4436 struct constraint_expr lhs
;
4437 struct constraint_expr
*rhsp
;
4439 get_constraint_for (gimple_call_chain (t
), &rhsc
);
4440 lhs
= get_function_part_constraint (fi
, fi_static_chain
);
4441 FOR_EACH_VEC_ELT (rhsc
, j
, rhsp
)
4442 process_constraint (new_constraint (lhs
, *rhsp
));
4447 /* Walk statement T setting up aliasing constraints according to the
4448 references found in T. This function is the main part of the
4449 constraint builder. AI points to auxiliary alias information used
4450 when building alias sets and computing alias grouping heuristics. */
4453 find_func_aliases (gimple origt
)
4456 vec
<ce_s
> lhsc
= vNULL
;
4457 vec
<ce_s
> rhsc
= vNULL
;
4458 struct constraint_expr
*c
;
4461 /* Now build constraints expressions. */
4462 if (gimple_code (t
) == GIMPLE_PHI
)
4467 /* For a phi node, assign all the arguments to
4469 get_constraint_for (gimple_phi_result (t
), &lhsc
);
4470 for (i
= 0; i
< gimple_phi_num_args (t
); i
++)
4472 tree strippedrhs
= PHI_ARG_DEF (t
, i
);
4474 STRIP_NOPS (strippedrhs
);
4475 get_constraint_for_rhs (gimple_phi_arg_def (t
, i
), &rhsc
);
4477 FOR_EACH_VEC_ELT (lhsc
, j
, c
)
4479 struct constraint_expr
*c2
;
4480 while (rhsc
.length () > 0)
4483 process_constraint (new_constraint (*c
, *c2
));
4489 /* In IPA mode, we need to generate constraints to pass call
4490 arguments through their calls. There are two cases,
4491 either a GIMPLE_CALL returning a value, or just a plain
4492 GIMPLE_CALL when we are not.
4494 In non-ipa mode, we need to generate constraints for each
4495 pointer passed by address. */
4496 else if (is_gimple_call (t
))
4497 find_func_aliases_for_call (t
);
4499 /* Otherwise, just a regular assignment statement. Only care about
4500 operations with pointer result, others are dealt with as escape
4501 points if they have pointer operands. */
4502 else if (is_gimple_assign (t
))
4504 /* Otherwise, just a regular assignment statement. */
4505 tree lhsop
= gimple_assign_lhs (t
);
4506 tree rhsop
= (gimple_num_ops (t
) == 2) ? gimple_assign_rhs1 (t
) : NULL
;
4508 if (rhsop
&& TREE_CLOBBER_P (rhsop
))
4509 /* Ignore clobbers, they don't actually store anything into
4512 else if (rhsop
&& AGGREGATE_TYPE_P (TREE_TYPE (lhsop
)))
4513 do_structure_copy (lhsop
, rhsop
);
4516 enum tree_code code
= gimple_assign_rhs_code (t
);
4518 get_constraint_for (lhsop
, &lhsc
);
4520 if (code
== POINTER_PLUS_EXPR
)
4521 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t
),
4522 gimple_assign_rhs2 (t
), &rhsc
);
4523 else if (code
== BIT_AND_EXPR
4524 && TREE_CODE (gimple_assign_rhs2 (t
)) == INTEGER_CST
)
4526 /* Aligning a pointer via a BIT_AND_EXPR is offsetting
4527 the pointer. Handle it by offsetting it by UNKNOWN. */
4528 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t
),
4531 else if ((CONVERT_EXPR_CODE_P (code
)
4532 && !(POINTER_TYPE_P (gimple_expr_type (t
))
4533 && !POINTER_TYPE_P (TREE_TYPE (rhsop
))))
4534 || gimple_assign_single_p (t
))
4535 get_constraint_for_rhs (rhsop
, &rhsc
);
4536 else if (code
== COND_EXPR
)
4538 /* The result is a merge of both COND_EXPR arms. */
4539 vec
<ce_s
> tmp
= vNULL
;
4540 struct constraint_expr
*rhsp
;
4542 get_constraint_for_rhs (gimple_assign_rhs2 (t
), &rhsc
);
4543 get_constraint_for_rhs (gimple_assign_rhs3 (t
), &tmp
);
4544 FOR_EACH_VEC_ELT (tmp
, i
, rhsp
)
4545 rhsc
.safe_push (*rhsp
);
4548 else if (truth_value_p (code
))
4549 /* Truth value results are not pointer (parts). Or at least
4550 very very unreasonable obfuscation of a part. */
4554 /* All other operations are merges. */
4555 vec
<ce_s
> tmp
= vNULL
;
4556 struct constraint_expr
*rhsp
;
4558 get_constraint_for_rhs (gimple_assign_rhs1 (t
), &rhsc
);
4559 for (i
= 2; i
< gimple_num_ops (t
); ++i
)
4561 get_constraint_for_rhs (gimple_op (t
, i
), &tmp
);
4562 FOR_EACH_VEC_ELT (tmp
, j
, rhsp
)
4563 rhsc
.safe_push (*rhsp
);
4568 process_all_all_constraints (lhsc
, rhsc
);
4570 /* If there is a store to a global variable the rhs escapes. */
4571 if ((lhsop
= get_base_address (lhsop
)) != NULL_TREE
4573 && is_global_var (lhsop
)
4575 || DECL_EXTERNAL (lhsop
) || TREE_PUBLIC (lhsop
)))
4576 make_escape_constraint (rhsop
);
4578 /* Handle escapes through return. */
4579 else if (gimple_code (t
) == GIMPLE_RETURN
4580 && gimple_return_retval (t
) != NULL_TREE
)
4584 || !(fi
= get_vi_for_tree (cfun
->decl
)))
4585 make_escape_constraint (gimple_return_retval (t
));
4586 else if (in_ipa_mode
4589 struct constraint_expr lhs
;
4590 struct constraint_expr
*rhsp
;
4593 lhs
= get_function_part_constraint (fi
, fi_result
);
4594 get_constraint_for_rhs (gimple_return_retval (t
), &rhsc
);
4595 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
4596 process_constraint (new_constraint (lhs
, *rhsp
));
4599 /* Handle asms conservatively by adding escape constraints to everything. */
4600 else if (gimple_code (t
) == GIMPLE_ASM
)
4602 unsigned i
, noutputs
;
4603 const char **oconstraints
;
4604 const char *constraint
;
4605 bool allows_mem
, allows_reg
, is_inout
;
4607 noutputs
= gimple_asm_noutputs (t
);
4608 oconstraints
= XALLOCAVEC (const char *, noutputs
);
4610 for (i
= 0; i
< noutputs
; ++i
)
4612 tree link
= gimple_asm_output_op (t
, i
);
4613 tree op
= TREE_VALUE (link
);
4615 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
4616 oconstraints
[i
] = constraint
;
4617 parse_output_constraint (&constraint
, i
, 0, 0, &allows_mem
,
4618 &allows_reg
, &is_inout
);
4620 /* A memory constraint makes the address of the operand escape. */
4621 if (!allows_reg
&& allows_mem
)
4622 make_escape_constraint (build_fold_addr_expr (op
));
4624 /* The asm may read global memory, so outputs may point to
4625 any global memory. */
4628 vec
<ce_s
> lhsc
= vNULL
;
4629 struct constraint_expr rhsc
, *lhsp
;
4631 get_constraint_for (op
, &lhsc
);
4632 rhsc
.var
= nonlocal_id
;
4635 FOR_EACH_VEC_ELT (lhsc
, j
, lhsp
)
4636 process_constraint (new_constraint (*lhsp
, rhsc
));
4640 for (i
= 0; i
< gimple_asm_ninputs (t
); ++i
)
4642 tree link
= gimple_asm_input_op (t
, i
);
4643 tree op
= TREE_VALUE (link
);
4645 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
4647 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0, oconstraints
,
4648 &allows_mem
, &allows_reg
);
4650 /* A memory constraint makes the address of the operand escape. */
4651 if (!allows_reg
&& allows_mem
)
4652 make_escape_constraint (build_fold_addr_expr (op
));
4653 /* Strictly we'd only need the constraint to ESCAPED if
4654 the asm clobbers memory, otherwise using something
4655 along the lines of per-call clobbers/uses would be enough. */
4657 make_escape_constraint (op
);
4666 /* Create a constraint adding to the clobber set of FI the memory
4667 pointed to by PTR. */
4670 process_ipa_clobber (varinfo_t fi
, tree ptr
)
4672 vec
<ce_s
> ptrc
= vNULL
;
4673 struct constraint_expr
*c
, lhs
;
4675 get_constraint_for_rhs (ptr
, &ptrc
);
4676 lhs
= get_function_part_constraint (fi
, fi_clobbers
);
4677 FOR_EACH_VEC_ELT (ptrc
, i
, c
)
4678 process_constraint (new_constraint (lhs
, *c
));
4682 /* Walk statement T setting up clobber and use constraints according to the
4683 references found in T. This function is a main part of the
4684 IPA constraint builder. */
4687 find_func_clobbers (gimple origt
)
4690 vec
<ce_s
> lhsc
= vNULL
;
4691 vec
<ce_s
> rhsc
= vNULL
;
4694 /* Add constraints for clobbered/used in IPA mode.
4695 We are not interested in what automatic variables are clobbered
4696 or used as we only use the information in the caller to which
4697 they do not escape. */
4698 gcc_assert (in_ipa_mode
);
4700 /* If the stmt refers to memory in any way it better had a VUSE. */
4701 if (gimple_vuse (t
) == NULL_TREE
)
4704 /* We'd better have function information for the current function. */
4705 fi
= lookup_vi_for_tree (cfun
->decl
);
4706 gcc_assert (fi
!= NULL
);
4708 /* Account for stores in assignments and calls. */
4709 if (gimple_vdef (t
) != NULL_TREE
4710 && gimple_has_lhs (t
))
4712 tree lhs
= gimple_get_lhs (t
);
4714 while (handled_component_p (tem
))
4715 tem
= TREE_OPERAND (tem
, 0);
4717 && !auto_var_in_fn_p (tem
, cfun
->decl
))
4718 || INDIRECT_REF_P (tem
)
4719 || (TREE_CODE (tem
) == MEM_REF
4720 && !(TREE_CODE (TREE_OPERAND (tem
, 0)) == ADDR_EXPR
4722 (TREE_OPERAND (TREE_OPERAND (tem
, 0), 0), cfun
->decl
))))
4724 struct constraint_expr lhsc
, *rhsp
;
4726 lhsc
= get_function_part_constraint (fi
, fi_clobbers
);
4727 get_constraint_for_address_of (lhs
, &rhsc
);
4728 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
4729 process_constraint (new_constraint (lhsc
, *rhsp
));
4734 /* Account for uses in assigments and returns. */
4735 if (gimple_assign_single_p (t
)
4736 || (gimple_code (t
) == GIMPLE_RETURN
4737 && gimple_return_retval (t
) != NULL_TREE
))
4739 tree rhs
= (gimple_assign_single_p (t
)
4740 ? gimple_assign_rhs1 (t
) : gimple_return_retval (t
));
4742 while (handled_component_p (tem
))
4743 tem
= TREE_OPERAND (tem
, 0);
4745 && !auto_var_in_fn_p (tem
, cfun
->decl
))
4746 || INDIRECT_REF_P (tem
)
4747 || (TREE_CODE (tem
) == MEM_REF
4748 && !(TREE_CODE (TREE_OPERAND (tem
, 0)) == ADDR_EXPR
4750 (TREE_OPERAND (TREE_OPERAND (tem
, 0), 0), cfun
->decl
))))
4752 struct constraint_expr lhs
, *rhsp
;
4754 lhs
= get_function_part_constraint (fi
, fi_uses
);
4755 get_constraint_for_address_of (rhs
, &rhsc
);
4756 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
4757 process_constraint (new_constraint (lhs
, *rhsp
));
4762 if (is_gimple_call (t
))
4764 varinfo_t cfi
= NULL
;
4765 tree decl
= gimple_call_fndecl (t
);
4766 struct constraint_expr lhs
, rhs
;
4769 /* For builtins we do not have separate function info. For those
4770 we do not generate escapes for we have to generate clobbers/uses. */
4772 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
4773 switch (DECL_FUNCTION_CODE (decl
))
4775 /* The following functions use and clobber memory pointed to
4776 by their arguments. */
4777 case BUILT_IN_STRCPY
:
4778 case BUILT_IN_STRNCPY
:
4779 case BUILT_IN_BCOPY
:
4780 case BUILT_IN_MEMCPY
:
4781 case BUILT_IN_MEMMOVE
:
4782 case BUILT_IN_MEMPCPY
:
4783 case BUILT_IN_STPCPY
:
4784 case BUILT_IN_STPNCPY
:
4785 case BUILT_IN_STRCAT
:
4786 case BUILT_IN_STRNCAT
:
4787 case BUILT_IN_STRCPY_CHK
:
4788 case BUILT_IN_STRNCPY_CHK
:
4789 case BUILT_IN_MEMCPY_CHK
:
4790 case BUILT_IN_MEMMOVE_CHK
:
4791 case BUILT_IN_MEMPCPY_CHK
:
4792 case BUILT_IN_STPCPY_CHK
:
4793 case BUILT_IN_STPNCPY_CHK
:
4794 case BUILT_IN_STRCAT_CHK
:
4795 case BUILT_IN_STRNCAT_CHK
:
4797 tree dest
= gimple_call_arg (t
, (DECL_FUNCTION_CODE (decl
)
4798 == BUILT_IN_BCOPY
? 1 : 0));
4799 tree src
= gimple_call_arg (t
, (DECL_FUNCTION_CODE (decl
)
4800 == BUILT_IN_BCOPY
? 0 : 1));
4802 struct constraint_expr
*rhsp
, *lhsp
;
4803 get_constraint_for_ptr_offset (dest
, NULL_TREE
, &lhsc
);
4804 lhs
= get_function_part_constraint (fi
, fi_clobbers
);
4805 FOR_EACH_VEC_ELT (lhsc
, i
, lhsp
)
4806 process_constraint (new_constraint (lhs
, *lhsp
));
4808 get_constraint_for_ptr_offset (src
, NULL_TREE
, &rhsc
);
4809 lhs
= get_function_part_constraint (fi
, fi_uses
);
4810 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
4811 process_constraint (new_constraint (lhs
, *rhsp
));
4815 /* The following function clobbers memory pointed to by
4817 case BUILT_IN_MEMSET
:
4818 case BUILT_IN_MEMSET_CHK
:
4820 tree dest
= gimple_call_arg (t
, 0);
4823 get_constraint_for_ptr_offset (dest
, NULL_TREE
, &lhsc
);
4824 lhs
= get_function_part_constraint (fi
, fi_clobbers
);
4825 FOR_EACH_VEC_ELT (lhsc
, i
, lhsp
)
4826 process_constraint (new_constraint (lhs
, *lhsp
));
4830 /* The following functions clobber their second and third
4832 case BUILT_IN_SINCOS
:
4833 case BUILT_IN_SINCOSF
:
4834 case BUILT_IN_SINCOSL
:
4836 process_ipa_clobber (fi
, gimple_call_arg (t
, 1));
4837 process_ipa_clobber (fi
, gimple_call_arg (t
, 2));
4840 /* The following functions clobber their second argument. */
4841 case BUILT_IN_FREXP
:
4842 case BUILT_IN_FREXPF
:
4843 case BUILT_IN_FREXPL
:
4844 case BUILT_IN_LGAMMA_R
:
4845 case BUILT_IN_LGAMMAF_R
:
4846 case BUILT_IN_LGAMMAL_R
:
4847 case BUILT_IN_GAMMA_R
:
4848 case BUILT_IN_GAMMAF_R
:
4849 case BUILT_IN_GAMMAL_R
:
4851 case BUILT_IN_MODFF
:
4852 case BUILT_IN_MODFL
:
4854 process_ipa_clobber (fi
, gimple_call_arg (t
, 1));
4857 /* The following functions clobber their third argument. */
4858 case BUILT_IN_REMQUO
:
4859 case BUILT_IN_REMQUOF
:
4860 case BUILT_IN_REMQUOL
:
4862 process_ipa_clobber (fi
, gimple_call_arg (t
, 2));
4865 /* The following functions neither read nor clobber memory. */
4866 case BUILT_IN_ASSUME_ALIGNED
:
4869 /* Trampolines are of no interest to us. */
4870 case BUILT_IN_INIT_TRAMPOLINE
:
4871 case BUILT_IN_ADJUST_TRAMPOLINE
:
4873 case BUILT_IN_VA_START
:
4874 case BUILT_IN_VA_END
:
4876 /* printf-style functions may have hooks to set pointers to
4877 point to somewhere into the generated string. Leave them
4878 for a later excercise... */
4880 /* Fallthru to general call handling. */;
4883 /* Parameters passed by value are used. */
4884 lhs
= get_function_part_constraint (fi
, fi_uses
);
4885 for (i
= 0; i
< gimple_call_num_args (t
); i
++)
4887 struct constraint_expr
*rhsp
;
4888 tree arg
= gimple_call_arg (t
, i
);
4890 if (TREE_CODE (arg
) == SSA_NAME
4891 || is_gimple_min_invariant (arg
))
4894 get_constraint_for_address_of (arg
, &rhsc
);
4895 FOR_EACH_VEC_ELT (rhsc
, j
, rhsp
)
4896 process_constraint (new_constraint (lhs
, *rhsp
));
4900 /* Build constraints for propagating clobbers/uses along the
4902 cfi
= get_fi_for_callee (t
);
4903 if (cfi
->id
== anything_id
)
4905 if (gimple_vdef (t
))
4906 make_constraint_from (first_vi_for_offset (fi
, fi_clobbers
),
4908 make_constraint_from (first_vi_for_offset (fi
, fi_uses
),
4913 /* For callees without function info (that's external functions),
4914 ESCAPED is clobbered and used. */
4915 if (gimple_call_fndecl (t
)
4916 && !cfi
->is_fn_info
)
4920 if (gimple_vdef (t
))
4921 make_copy_constraint (first_vi_for_offset (fi
, fi_clobbers
),
4923 make_copy_constraint (first_vi_for_offset (fi
, fi_uses
), escaped_id
);
4925 /* Also honor the call statement use/clobber info. */
4926 if ((vi
= lookup_call_clobber_vi (t
)) != NULL
)
4927 make_copy_constraint (first_vi_for_offset (fi
, fi_clobbers
),
4929 if ((vi
= lookup_call_use_vi (t
)) != NULL
)
4930 make_copy_constraint (first_vi_for_offset (fi
, fi_uses
),
4935 /* Otherwise the caller clobbers and uses what the callee does.
4936 ??? This should use a new complex constraint that filters
4937 local variables of the callee. */
4938 if (gimple_vdef (t
))
4940 lhs
= get_function_part_constraint (fi
, fi_clobbers
);
4941 rhs
= get_function_part_constraint (cfi
, fi_clobbers
);
4942 process_constraint (new_constraint (lhs
, rhs
));
4944 lhs
= get_function_part_constraint (fi
, fi_uses
);
4945 rhs
= get_function_part_constraint (cfi
, fi_uses
);
4946 process_constraint (new_constraint (lhs
, rhs
));
4948 else if (gimple_code (t
) == GIMPLE_ASM
)
4950 /* ??? Ick. We can do better. */
4951 if (gimple_vdef (t
))
4952 make_constraint_from (first_vi_for_offset (fi
, fi_clobbers
),
4954 make_constraint_from (first_vi_for_offset (fi
, fi_uses
),
4962 /* Find the first varinfo in the same variable as START that overlaps with
4963 OFFSET. Return NULL if we can't find one. */
4966 first_vi_for_offset (varinfo_t start
, unsigned HOST_WIDE_INT offset
)
4968 /* If the offset is outside of the variable, bail out. */
4969 if (offset
>= start
->fullsize
)
4972 /* If we cannot reach offset from start, lookup the first field
4973 and start from there. */
4974 if (start
->offset
> offset
)
4975 start
= lookup_vi_for_tree (start
->decl
);
4979 /* We may not find a variable in the field list with the actual
4980 offset when when we have glommed a structure to a variable.
4981 In that case, however, offset should still be within the size
4983 if (offset
>= start
->offset
4984 && (offset
- start
->offset
) < start
->size
)
4993 /* Find the first varinfo in the same variable as START that overlaps with
4994 OFFSET. If there is no such varinfo the varinfo directly preceding
4995 OFFSET is returned. */
4998 first_or_preceding_vi_for_offset (varinfo_t start
,
4999 unsigned HOST_WIDE_INT offset
)
5001 /* If we cannot reach offset from start, lookup the first field
5002 and start from there. */
5003 if (start
->offset
> offset
)
5004 start
= lookup_vi_for_tree (start
->decl
);
5006 /* We may not find a variable in the field list with the actual
5007 offset when when we have glommed a structure to a variable.
5008 In that case, however, offset should still be within the size
5010 If we got beyond the offset we look for return the field
5011 directly preceding offset which may be the last field. */
5013 && offset
>= start
->offset
5014 && !((offset
- start
->offset
) < start
->size
))
5015 start
= start
->next
;
5021 /* This structure is used during pushing fields onto the fieldstack
5022 to track the offset of the field, since bitpos_of_field gives it
5023 relative to its immediate containing type, and we want it relative
5024 to the ultimate containing object. */
5028 /* Offset from the base of the base containing object to this field. */
5029 HOST_WIDE_INT offset
;
5031 /* Size, in bits, of the field. */
5032 unsigned HOST_WIDE_INT size
;
5034 unsigned has_unknown_size
: 1;
5036 unsigned must_have_pointers
: 1;
5038 unsigned may_have_pointers
: 1;
5040 unsigned only_restrict_pointers
: 1;
5042 typedef struct fieldoff fieldoff_s
;
5045 /* qsort comparison function for two fieldoff's PA and PB */
5048 fieldoff_compare (const void *pa
, const void *pb
)
5050 const fieldoff_s
*foa
= (const fieldoff_s
*)pa
;
5051 const fieldoff_s
*fob
= (const fieldoff_s
*)pb
;
5052 unsigned HOST_WIDE_INT foasize
, fobsize
;
5054 if (foa
->offset
< fob
->offset
)
5056 else if (foa
->offset
> fob
->offset
)
5059 foasize
= foa
->size
;
5060 fobsize
= fob
->size
;
5061 if (foasize
< fobsize
)
5063 else if (foasize
> fobsize
)
5068 /* Sort a fieldstack according to the field offset and sizes. */
5070 sort_fieldstack (vec
<fieldoff_s
> fieldstack
)
5072 fieldstack
.qsort (fieldoff_compare
);
5075 /* Return true if T is a type that can have subvars. */
5078 type_can_have_subvars (const_tree t
)
5080 /* Aggregates without overlapping fields can have subvars. */
5081 return TREE_CODE (t
) == RECORD_TYPE
;
5084 /* Return true if V is a tree that we can have subvars for.
5085 Normally, this is any aggregate type. Also complex
5086 types which are not gimple registers can have subvars. */
5089 var_can_have_subvars (const_tree v
)
5091 /* Volatile variables should never have subvars. */
5092 if (TREE_THIS_VOLATILE (v
))
5095 /* Non decls or memory tags can never have subvars. */
5099 return type_can_have_subvars (TREE_TYPE (v
));
5102 /* Return true if T is a type that does contain pointers. */
5105 type_must_have_pointers (tree type
)
5107 if (POINTER_TYPE_P (type
))
5110 if (TREE_CODE (type
) == ARRAY_TYPE
)
5111 return type_must_have_pointers (TREE_TYPE (type
));
5113 /* A function or method can have pointers as arguments, so track
5114 those separately. */
5115 if (TREE_CODE (type
) == FUNCTION_TYPE
5116 || TREE_CODE (type
) == METHOD_TYPE
)
5123 field_must_have_pointers (tree t
)
5125 return type_must_have_pointers (TREE_TYPE (t
));
5128 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
5129 the fields of TYPE onto fieldstack, recording their offsets along
5132 OFFSET is used to keep track of the offset in this entire
5133 structure, rather than just the immediately containing structure.
5134 Returns false if the caller is supposed to handle the field we
5138 push_fields_onto_fieldstack (tree type
, vec
<fieldoff_s
> *fieldstack
,
5139 HOST_WIDE_INT offset
)
5142 bool empty_p
= true;
5144 if (TREE_CODE (type
) != RECORD_TYPE
)
5147 /* If the vector of fields is growing too big, bail out early.
5148 Callers check for vec::length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
5150 if (fieldstack
->length () > MAX_FIELDS_FOR_FIELD_SENSITIVE
)
5153 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
5154 if (TREE_CODE (field
) == FIELD_DECL
)
5157 HOST_WIDE_INT foff
= bitpos_of_field (field
);
5159 if (!var_can_have_subvars (field
)
5160 || TREE_CODE (TREE_TYPE (field
)) == QUAL_UNION_TYPE
5161 || TREE_CODE (TREE_TYPE (field
)) == UNION_TYPE
)
5163 else if (!push_fields_onto_fieldstack
5164 (TREE_TYPE (field
), fieldstack
, offset
+ foff
)
5165 && (DECL_SIZE (field
)
5166 && !integer_zerop (DECL_SIZE (field
))))
5167 /* Empty structures may have actual size, like in C++. So
5168 see if we didn't push any subfields and the size is
5169 nonzero, push the field onto the stack. */
5174 fieldoff_s
*pair
= NULL
;
5175 bool has_unknown_size
= false;
5176 bool must_have_pointers_p
;
5178 if (!fieldstack
->is_empty ())
5179 pair
= &fieldstack
->last ();
5181 /* If there isn't anything at offset zero, create sth. */
5183 && offset
+ foff
!= 0)
5185 fieldoff_s e
= {0, offset
+ foff
, false, false, false, false};
5186 pair
= fieldstack
->safe_push (e
);
5189 if (!DECL_SIZE (field
)
5190 || !host_integerp (DECL_SIZE (field
), 1))
5191 has_unknown_size
= true;
5193 /* If adjacent fields do not contain pointers merge them. */
5194 must_have_pointers_p
= field_must_have_pointers (field
);
5196 && !has_unknown_size
5197 && !must_have_pointers_p
5198 && !pair
->must_have_pointers
5199 && !pair
->has_unknown_size
5200 && pair
->offset
+ (HOST_WIDE_INT
)pair
->size
== offset
+ foff
)
5202 pair
->size
+= TREE_INT_CST_LOW (DECL_SIZE (field
));
5207 e
.offset
= offset
+ foff
;
5208 e
.has_unknown_size
= has_unknown_size
;
5209 if (!has_unknown_size
)
5210 e
.size
= TREE_INT_CST_LOW (DECL_SIZE (field
));
5213 e
.must_have_pointers
= must_have_pointers_p
;
5214 e
.may_have_pointers
= true;
5215 e
.only_restrict_pointers
5216 = (!has_unknown_size
5217 && POINTER_TYPE_P (TREE_TYPE (field
))
5218 && TYPE_RESTRICT (TREE_TYPE (field
)));
5219 fieldstack
->safe_push (e
);
5229 /* Count the number of arguments DECL has, and set IS_VARARGS to true
5230 if it is a varargs function. */
5233 count_num_arguments (tree decl
, bool *is_varargs
)
5235 unsigned int num
= 0;
5238 /* Capture named arguments for K&R functions. They do not
5239 have a prototype and thus no TYPE_ARG_TYPES. */
5240 for (t
= DECL_ARGUMENTS (decl
); t
; t
= DECL_CHAIN (t
))
5243 /* Check if the function has variadic arguments. */
5244 for (t
= TYPE_ARG_TYPES (TREE_TYPE (decl
)); t
; t
= TREE_CHAIN (t
))
5245 if (TREE_VALUE (t
) == void_type_node
)
5253 /* Creation function node for DECL, using NAME, and return the index
5254 of the variable we've created for the function. */
5257 create_function_info_for (tree decl
, const char *name
)
5259 struct function
*fn
= DECL_STRUCT_FUNCTION (decl
);
5260 varinfo_t vi
, prev_vi
;
5263 bool is_varargs
= false;
5264 unsigned int num_args
= count_num_arguments (decl
, &is_varargs
);
5266 /* Create the variable info. */
5268 vi
= new_var_info (decl
, name
);
5271 vi
->fullsize
= fi_parm_base
+ num_args
;
5273 vi
->may_have_pointers
= false;
5276 insert_vi_for_tree (vi
->decl
, vi
);
5280 /* Create a variable for things the function clobbers and one for
5281 things the function uses. */
5283 varinfo_t clobbervi
, usevi
;
5284 const char *newname
;
5287 asprintf (&tempname
, "%s.clobber", name
);
5288 newname
= ggc_strdup (tempname
);
5291 clobbervi
= new_var_info (NULL
, newname
);
5292 clobbervi
->offset
= fi_clobbers
;
5293 clobbervi
->size
= 1;
5294 clobbervi
->fullsize
= vi
->fullsize
;
5295 clobbervi
->is_full_var
= true;
5296 clobbervi
->is_global_var
= false;
5297 gcc_assert (prev_vi
->offset
< clobbervi
->offset
);
5298 prev_vi
->next
= clobbervi
;
5299 prev_vi
= clobbervi
;
5301 asprintf (&tempname
, "%s.use", name
);
5302 newname
= ggc_strdup (tempname
);
5305 usevi
= new_var_info (NULL
, newname
);
5306 usevi
->offset
= fi_uses
;
5308 usevi
->fullsize
= vi
->fullsize
;
5309 usevi
->is_full_var
= true;
5310 usevi
->is_global_var
= false;
5311 gcc_assert (prev_vi
->offset
< usevi
->offset
);
5312 prev_vi
->next
= usevi
;
5316 /* And one for the static chain. */
5317 if (fn
->static_chain_decl
!= NULL_TREE
)
5320 const char *newname
;
5323 asprintf (&tempname
, "%s.chain", name
);
5324 newname
= ggc_strdup (tempname
);
5327 chainvi
= new_var_info (fn
->static_chain_decl
, newname
);
5328 chainvi
->offset
= fi_static_chain
;
5330 chainvi
->fullsize
= vi
->fullsize
;
5331 chainvi
->is_full_var
= true;
5332 chainvi
->is_global_var
= false;
5333 gcc_assert (prev_vi
->offset
< chainvi
->offset
);
5334 prev_vi
->next
= chainvi
;
5336 insert_vi_for_tree (fn
->static_chain_decl
, chainvi
);
5339 /* Create a variable for the return var. */
5340 if (DECL_RESULT (decl
) != NULL
5341 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
5344 const char *newname
;
5346 tree resultdecl
= decl
;
5348 if (DECL_RESULT (decl
))
5349 resultdecl
= DECL_RESULT (decl
);
5351 asprintf (&tempname
, "%s.result", name
);
5352 newname
= ggc_strdup (tempname
);
5355 resultvi
= new_var_info (resultdecl
, newname
);
5356 resultvi
->offset
= fi_result
;
5358 resultvi
->fullsize
= vi
->fullsize
;
5359 resultvi
->is_full_var
= true;
5360 if (DECL_RESULT (decl
))
5361 resultvi
->may_have_pointers
= true;
5362 gcc_assert (prev_vi
->offset
< resultvi
->offset
);
5363 prev_vi
->next
= resultvi
;
5365 if (DECL_RESULT (decl
))
5366 insert_vi_for_tree (DECL_RESULT (decl
), resultvi
);
5369 /* Set up variables for each argument. */
5370 arg
= DECL_ARGUMENTS (decl
);
5371 for (i
= 0; i
< num_args
; i
++)
5374 const char *newname
;
5376 tree argdecl
= decl
;
5381 asprintf (&tempname
, "%s.arg%d", name
, i
);
5382 newname
= ggc_strdup (tempname
);
5385 argvi
= new_var_info (argdecl
, newname
);
5386 argvi
->offset
= fi_parm_base
+ i
;
5388 argvi
->is_full_var
= true;
5389 argvi
->fullsize
= vi
->fullsize
;
5391 argvi
->may_have_pointers
= true;
5392 gcc_assert (prev_vi
->offset
< argvi
->offset
);
5393 prev_vi
->next
= argvi
;
5397 insert_vi_for_tree (arg
, argvi
);
5398 arg
= DECL_CHAIN (arg
);
5402 /* Add one representative for all further args. */
5406 const char *newname
;
5410 asprintf (&tempname
, "%s.varargs", name
);
5411 newname
= ggc_strdup (tempname
);
5414 /* We need sth that can be pointed to for va_start. */
5415 decl
= build_fake_var_decl (ptr_type_node
);
5417 argvi
= new_var_info (decl
, newname
);
5418 argvi
->offset
= fi_parm_base
+ num_args
;
5420 argvi
->is_full_var
= true;
5421 argvi
->is_heap_var
= true;
5422 argvi
->fullsize
= vi
->fullsize
;
5423 gcc_assert (prev_vi
->offset
< argvi
->offset
);
5424 prev_vi
->next
= argvi
;
5432 /* Return true if FIELDSTACK contains fields that overlap.
5433 FIELDSTACK is assumed to be sorted by offset. */
5436 check_for_overlaps (vec
<fieldoff_s
> fieldstack
)
5438 fieldoff_s
*fo
= NULL
;
5440 HOST_WIDE_INT lastoffset
= -1;
5442 FOR_EACH_VEC_ELT (fieldstack
, i
, fo
)
5444 if (fo
->offset
== lastoffset
)
5446 lastoffset
= fo
->offset
;
5451 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
5452 This will also create any varinfo structures necessary for fields
5456 create_variable_info_for_1 (tree decl
, const char *name
)
5458 varinfo_t vi
, newvi
;
5459 tree decl_type
= TREE_TYPE (decl
);
5460 tree declsize
= DECL_P (decl
) ? DECL_SIZE (decl
) : TYPE_SIZE (decl_type
);
5461 vec
<fieldoff_s
> fieldstack
= vNULL
;
5466 || !host_integerp (declsize
, 1))
5468 vi
= new_var_info (decl
, name
);
5472 vi
->is_unknown_size_var
= true;
5473 vi
->is_full_var
= true;
5474 vi
->may_have_pointers
= true;
5478 /* Collect field information. */
5479 if (use_field_sensitive
5480 && var_can_have_subvars (decl
)
5481 /* ??? Force us to not use subfields for global initializers
5482 in IPA mode. Else we'd have to parse arbitrary initializers. */
5484 && is_global_var (decl
)
5485 && DECL_INITIAL (decl
)))
5487 fieldoff_s
*fo
= NULL
;
5488 bool notokay
= false;
5491 push_fields_onto_fieldstack (decl_type
, &fieldstack
, 0);
5493 for (i
= 0; !notokay
&& fieldstack
.iterate (i
, &fo
); i
++)
5494 if (fo
->has_unknown_size
5501 /* We can't sort them if we have a field with a variable sized type,
5502 which will make notokay = true. In that case, we are going to return
5503 without creating varinfos for the fields anyway, so sorting them is a
5507 sort_fieldstack (fieldstack
);
5508 /* Due to some C++ FE issues, like PR 22488, we might end up
5509 what appear to be overlapping fields even though they,
5510 in reality, do not overlap. Until the C++ FE is fixed,
5511 we will simply disable field-sensitivity for these cases. */
5512 notokay
= check_for_overlaps (fieldstack
);
5516 fieldstack
.release ();
5519 /* If we didn't end up collecting sub-variables create a full
5520 variable for the decl. */
5521 if (fieldstack
.length () <= 1
5522 || fieldstack
.length () > MAX_FIELDS_FOR_FIELD_SENSITIVE
)
5524 vi
= new_var_info (decl
, name
);
5526 vi
->may_have_pointers
= true;
5527 vi
->fullsize
= TREE_INT_CST_LOW (declsize
);
5528 vi
->size
= vi
->fullsize
;
5529 vi
->is_full_var
= true;
5530 fieldstack
.release ();
5534 vi
= new_var_info (decl
, name
);
5535 vi
->fullsize
= TREE_INT_CST_LOW (declsize
);
5536 for (i
= 0, newvi
= vi
;
5537 fieldstack
.iterate (i
, &fo
);
5538 ++i
, newvi
= newvi
->next
)
5540 const char *newname
= "NULL";
5545 asprintf (&tempname
, "%s." HOST_WIDE_INT_PRINT_DEC
5546 "+" HOST_WIDE_INT_PRINT_DEC
, name
, fo
->offset
, fo
->size
);
5547 newname
= ggc_strdup (tempname
);
5550 newvi
->name
= newname
;
5551 newvi
->offset
= fo
->offset
;
5552 newvi
->size
= fo
->size
;
5553 newvi
->fullsize
= vi
->fullsize
;
5554 newvi
->may_have_pointers
= fo
->may_have_pointers
;
5555 newvi
->only_restrict_pointers
= fo
->only_restrict_pointers
;
5556 if (i
+ 1 < fieldstack
.length ())
5557 newvi
->next
= new_var_info (decl
, name
);
5560 fieldstack
.release ();
5566 create_variable_info_for (tree decl
, const char *name
)
5568 varinfo_t vi
= create_variable_info_for_1 (decl
, name
);
5569 unsigned int id
= vi
->id
;
5571 insert_vi_for_tree (decl
, vi
);
5573 if (TREE_CODE (decl
) != VAR_DECL
)
5576 /* Create initial constraints for globals. */
5577 for (; vi
; vi
= vi
->next
)
5579 if (!vi
->may_have_pointers
5580 || !vi
->is_global_var
)
5583 /* Mark global restrict qualified pointers. */
5584 if ((POINTER_TYPE_P (TREE_TYPE (decl
))
5585 && TYPE_RESTRICT (TREE_TYPE (decl
)))
5586 || vi
->only_restrict_pointers
)
5588 make_constraint_from_global_restrict (vi
, "GLOBAL_RESTRICT");
5592 /* In non-IPA mode the initializer from nonlocal is all we need. */
5594 || DECL_HARD_REGISTER (decl
))
5595 make_copy_constraint (vi
, nonlocal_id
);
5597 /* In IPA mode parse the initializer and generate proper constraints
5601 struct varpool_node
*vnode
= varpool_get_node (decl
);
5603 /* For escaped variables initialize them from nonlocal. */
5604 if (!varpool_all_refs_explicit_p (vnode
))
5605 make_copy_constraint (vi
, nonlocal_id
);
5607 /* If this is a global variable with an initializer and we are in
5608 IPA mode generate constraints for it. */
5609 if (DECL_INITIAL (decl
)
5612 vec
<ce_s
> rhsc
= vNULL
;
5613 struct constraint_expr lhs
, *rhsp
;
5615 get_constraint_for_rhs (DECL_INITIAL (decl
), &rhsc
);
5619 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
5620 process_constraint (new_constraint (lhs
, *rhsp
));
5621 /* If this is a variable that escapes from the unit
5622 the initializer escapes as well. */
5623 if (!varpool_all_refs_explicit_p (vnode
))
5625 lhs
.var
= escaped_id
;
5628 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
5629 process_constraint (new_constraint (lhs
, *rhsp
));
5639 /* Print out the points-to solution for VAR to FILE. */
5642 dump_solution_for_var (FILE *file
, unsigned int var
)
5644 varinfo_t vi
= get_varinfo (var
);
5648 /* Dump the solution for unified vars anyway, this avoids difficulties
5649 in scanning dumps in the testsuite. */
5650 fprintf (file
, "%s = { ", vi
->name
);
5651 vi
= get_varinfo (find (var
));
5652 EXECUTE_IF_SET_IN_BITMAP (vi
->solution
, 0, i
, bi
)
5653 fprintf (file
, "%s ", get_varinfo (i
)->name
);
5654 fprintf (file
, "}");
5656 /* But note when the variable was unified. */
5658 fprintf (file
, " same as %s", vi
->name
);
5660 fprintf (file
, "\n");
5663 /* Print the points-to solution for VAR to stdout. */
5666 debug_solution_for_var (unsigned int var
)
5668 dump_solution_for_var (stdout
, var
);
5671 /* Create varinfo structures for all of the variables in the
5672 function for intraprocedural mode. */
5675 intra_create_variable_infos (void)
5679 /* For each incoming pointer argument arg, create the constraint ARG
5680 = NONLOCAL or a dummy variable if it is a restrict qualified
5681 passed-by-reference argument. */
5682 for (t
= DECL_ARGUMENTS (current_function_decl
); t
; t
= DECL_CHAIN (t
))
5684 varinfo_t p
= get_vi_for_tree (t
);
5686 /* For restrict qualified pointers to objects passed by
5687 reference build a real representative for the pointed-to object.
5688 Treat restrict qualified references the same. */
5689 if (TYPE_RESTRICT (TREE_TYPE (t
))
5690 && ((DECL_BY_REFERENCE (t
) && POINTER_TYPE_P (TREE_TYPE (t
)))
5691 || TREE_CODE (TREE_TYPE (t
)) == REFERENCE_TYPE
)
5692 && !type_contains_placeholder_p (TREE_TYPE (TREE_TYPE (t
))))
5694 struct constraint_expr lhsc
, rhsc
;
5696 tree heapvar
= build_fake_var_decl (TREE_TYPE (TREE_TYPE (t
)));
5697 DECL_EXTERNAL (heapvar
) = 1;
5698 vi
= create_variable_info_for_1 (heapvar
, "PARM_NOALIAS");
5699 insert_vi_for_tree (heapvar
, vi
);
5704 rhsc
.type
= ADDRESSOF
;
5706 process_constraint (new_constraint (lhsc
, rhsc
));
5707 for (; vi
; vi
= vi
->next
)
5708 if (vi
->may_have_pointers
)
5710 if (vi
->only_restrict_pointers
)
5711 make_constraint_from_global_restrict (vi
, "GLOBAL_RESTRICT");
5713 make_copy_constraint (vi
, nonlocal_id
);
5718 if (POINTER_TYPE_P (TREE_TYPE (t
))
5719 && TYPE_RESTRICT (TREE_TYPE (t
)))
5720 make_constraint_from_global_restrict (p
, "PARM_RESTRICT");
5723 for (; p
; p
= p
->next
)
5725 if (p
->only_restrict_pointers
)
5726 make_constraint_from_global_restrict (p
, "PARM_RESTRICT");
5727 else if (p
->may_have_pointers
)
5728 make_constraint_from (p
, nonlocal_id
);
5733 /* Add a constraint for a result decl that is passed by reference. */
5734 if (DECL_RESULT (cfun
->decl
)
5735 && DECL_BY_REFERENCE (DECL_RESULT (cfun
->decl
)))
5737 varinfo_t p
, result_vi
= get_vi_for_tree (DECL_RESULT (cfun
->decl
));
5739 for (p
= result_vi
; p
; p
= p
->next
)
5740 make_constraint_from (p
, nonlocal_id
);
5743 /* Add a constraint for the incoming static chain parameter. */
5744 if (cfun
->static_chain_decl
!= NULL_TREE
)
5746 varinfo_t p
, chain_vi
= get_vi_for_tree (cfun
->static_chain_decl
);
5748 for (p
= chain_vi
; p
; p
= p
->next
)
5749 make_constraint_from (p
, nonlocal_id
);
5753 /* Structure used to put solution bitmaps in a hashtable so they can
5754 be shared among variables with the same points-to set. */
5756 typedef struct shared_bitmap_info
5760 } *shared_bitmap_info_t
;
5761 typedef const struct shared_bitmap_info
*const_shared_bitmap_info_t
;
5763 static htab_t shared_bitmap_table
;
5765 /* Hash function for a shared_bitmap_info_t */
5768 shared_bitmap_hash (const void *p
)
5770 const_shared_bitmap_info_t
const bi
= (const_shared_bitmap_info_t
) p
;
5771 return bi
->hashcode
;
5774 /* Equality function for two shared_bitmap_info_t's. */
5777 shared_bitmap_eq (const void *p1
, const void *p2
)
5779 const_shared_bitmap_info_t
const sbi1
= (const_shared_bitmap_info_t
) p1
;
5780 const_shared_bitmap_info_t
const sbi2
= (const_shared_bitmap_info_t
) p2
;
5781 return bitmap_equal_p (sbi1
->pt_vars
, sbi2
->pt_vars
);
5784 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
5785 existing instance if there is one, NULL otherwise. */
5788 shared_bitmap_lookup (bitmap pt_vars
)
5791 struct shared_bitmap_info sbi
;
5793 sbi
.pt_vars
= pt_vars
;
5794 sbi
.hashcode
= bitmap_hash (pt_vars
);
5796 slot
= htab_find_slot_with_hash (shared_bitmap_table
, &sbi
,
5797 sbi
.hashcode
, NO_INSERT
);
5801 return ((shared_bitmap_info_t
) *slot
)->pt_vars
;
5805 /* Add a bitmap to the shared bitmap hashtable. */
5808 shared_bitmap_add (bitmap pt_vars
)
5811 shared_bitmap_info_t sbi
= XNEW (struct shared_bitmap_info
);
5813 sbi
->pt_vars
= pt_vars
;
5814 sbi
->hashcode
= bitmap_hash (pt_vars
);
5816 slot
= htab_find_slot_with_hash (shared_bitmap_table
, sbi
,
5817 sbi
->hashcode
, INSERT
);
5818 gcc_assert (!*slot
);
5819 *slot
= (void *) sbi
;
5823 /* Set bits in INTO corresponding to the variable uids in solution set FROM. */
5826 set_uids_in_ptset (bitmap into
, bitmap from
, struct pt_solution
*pt
)
5831 EXECUTE_IF_SET_IN_BITMAP (from
, 0, i
, bi
)
5833 varinfo_t vi
= get_varinfo (i
);
5835 /* The only artificial variables that are allowed in a may-alias
5836 set are heap variables. */
5837 if (vi
->is_artificial_var
&& !vi
->is_heap_var
)
5840 if (TREE_CODE (vi
->decl
) == VAR_DECL
5841 || TREE_CODE (vi
->decl
) == PARM_DECL
5842 || TREE_CODE (vi
->decl
) == RESULT_DECL
)
5844 /* If we are in IPA mode we will not recompute points-to
5845 sets after inlining so make sure they stay valid. */
5847 && !DECL_PT_UID_SET_P (vi
->decl
))
5848 SET_DECL_PT_UID (vi
->decl
, DECL_UID (vi
->decl
));
5850 /* Add the decl to the points-to set. Note that the points-to
5851 set contains global variables. */
5852 bitmap_set_bit (into
, DECL_PT_UID (vi
->decl
));
5853 if (vi
->is_global_var
)
5854 pt
->vars_contains_global
= true;
5860 /* Compute the points-to solution *PT for the variable VI. */
5863 find_what_var_points_to (varinfo_t orig_vi
, struct pt_solution
*pt
)
5867 bitmap finished_solution
;
5871 memset (pt
, 0, sizeof (struct pt_solution
));
5873 /* This variable may have been collapsed, let's get the real
5875 vi
= get_varinfo (find (orig_vi
->id
));
5877 /* Translate artificial variables into SSA_NAME_PTR_INFO
5879 EXECUTE_IF_SET_IN_BITMAP (vi
->solution
, 0, i
, bi
)
5881 varinfo_t vi
= get_varinfo (i
);
5883 if (vi
->is_artificial_var
)
5885 if (vi
->id
== nothing_id
)
5887 else if (vi
->id
== escaped_id
)
5890 pt
->ipa_escaped
= 1;
5894 else if (vi
->id
== nonlocal_id
)
5896 else if (vi
->is_heap_var
)
5897 /* We represent heapvars in the points-to set properly. */
5899 else if (vi
->id
== readonly_id
)
5902 else if (vi
->id
== anything_id
5903 || vi
->id
== integer_id
)
5908 /* Instead of doing extra work, simply do not create
5909 elaborate points-to information for pt_anything pointers. */
5913 /* Share the final set of variables when possible. */
5914 finished_solution
= BITMAP_GGC_ALLOC ();
5915 stats
.points_to_sets_created
++;
5917 set_uids_in_ptset (finished_solution
, vi
->solution
, pt
);
5918 result
= shared_bitmap_lookup (finished_solution
);
5921 shared_bitmap_add (finished_solution
);
5922 pt
->vars
= finished_solution
;
5927 bitmap_clear (finished_solution
);
5931 /* Given a pointer variable P, fill in its points-to set. */
5934 find_what_p_points_to (tree p
)
5936 struct ptr_info_def
*pi
;
5940 /* For parameters, get at the points-to set for the actual parm
5942 if (TREE_CODE (p
) == SSA_NAME
5943 && SSA_NAME_IS_DEFAULT_DEF (p
)
5944 && (TREE_CODE (SSA_NAME_VAR (p
)) == PARM_DECL
5945 || TREE_CODE (SSA_NAME_VAR (p
)) == RESULT_DECL
))
5946 lookup_p
= SSA_NAME_VAR (p
);
5948 vi
= lookup_vi_for_tree (lookup_p
);
5952 pi
= get_ptr_info (p
);
5953 find_what_var_points_to (vi
, &pi
->pt
);
5957 /* Query statistics for points-to solutions. */
5960 unsigned HOST_WIDE_INT pt_solution_includes_may_alias
;
5961 unsigned HOST_WIDE_INT pt_solution_includes_no_alias
;
5962 unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias
;
5963 unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias
;
5967 dump_pta_stats (FILE *s
)
5969 fprintf (s
, "\nPTA query stats:\n");
5970 fprintf (s
, " pt_solution_includes: "
5971 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
5972 HOST_WIDE_INT_PRINT_DEC
" queries\n",
5973 pta_stats
.pt_solution_includes_no_alias
,
5974 pta_stats
.pt_solution_includes_no_alias
5975 + pta_stats
.pt_solution_includes_may_alias
);
5976 fprintf (s
, " pt_solutions_intersect: "
5977 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
5978 HOST_WIDE_INT_PRINT_DEC
" queries\n",
5979 pta_stats
.pt_solutions_intersect_no_alias
,
5980 pta_stats
.pt_solutions_intersect_no_alias
5981 + pta_stats
.pt_solutions_intersect_may_alias
);
5985 /* Reset the points-to solution *PT to a conservative default
5986 (point to anything). */
5989 pt_solution_reset (struct pt_solution
*pt
)
5991 memset (pt
, 0, sizeof (struct pt_solution
));
5992 pt
->anything
= true;
5995 /* Set the points-to solution *PT to point only to the variables
5996 in VARS. VARS_CONTAINS_GLOBAL specifies whether that contains
5997 global variables and VARS_CONTAINS_RESTRICT specifies whether
5998 it contains restrict tag variables. */
6001 pt_solution_set (struct pt_solution
*pt
, bitmap vars
, bool vars_contains_global
)
6003 memset (pt
, 0, sizeof (struct pt_solution
));
6005 pt
->vars_contains_global
= vars_contains_global
;
6008 /* Set the points-to solution *PT to point only to the variable VAR. */
6011 pt_solution_set_var (struct pt_solution
*pt
, tree var
)
6013 memset (pt
, 0, sizeof (struct pt_solution
));
6014 pt
->vars
= BITMAP_GGC_ALLOC ();
6015 bitmap_set_bit (pt
->vars
, DECL_PT_UID (var
));
6016 pt
->vars_contains_global
= is_global_var (var
);
6019 /* Computes the union of the points-to solutions *DEST and *SRC and
6020 stores the result in *DEST. This changes the points-to bitmap
6021 of *DEST and thus may not be used if that might be shared.
6022 The points-to bitmap of *SRC and *DEST will not be shared after
6023 this function if they were not before. */
6026 pt_solution_ior_into (struct pt_solution
*dest
, struct pt_solution
*src
)
6028 dest
->anything
|= src
->anything
;
6031 pt_solution_reset (dest
);
6035 dest
->nonlocal
|= src
->nonlocal
;
6036 dest
->escaped
|= src
->escaped
;
6037 dest
->ipa_escaped
|= src
->ipa_escaped
;
6038 dest
->null
|= src
->null
;
6039 dest
->vars_contains_global
|= src
->vars_contains_global
;
6044 dest
->vars
= BITMAP_GGC_ALLOC ();
6045 bitmap_ior_into (dest
->vars
, src
->vars
);
6048 /* Return true if the points-to solution *PT is empty. */
6051 pt_solution_empty_p (struct pt_solution
*pt
)
6058 && !bitmap_empty_p (pt
->vars
))
6061 /* If the solution includes ESCAPED, check if that is empty. */
6063 && !pt_solution_empty_p (&cfun
->gimple_df
->escaped
))
6066 /* If the solution includes ESCAPED, check if that is empty. */
6068 && !pt_solution_empty_p (&ipa_escaped_pt
))
6074 /* Return true if the points-to solution *PT only point to a single var, and
6075 return the var uid in *UID. */
6078 pt_solution_singleton_p (struct pt_solution
*pt
, unsigned *uid
)
6080 if (pt
->anything
|| pt
->nonlocal
|| pt
->escaped
|| pt
->ipa_escaped
6081 || pt
->null
|| pt
->vars
== NULL
6082 || !bitmap_single_bit_set_p (pt
->vars
))
6085 *uid
= bitmap_first_set_bit (pt
->vars
);
6089 /* Return true if the points-to solution *PT includes global memory. */
6092 pt_solution_includes_global (struct pt_solution
*pt
)
6096 || pt
->vars_contains_global
)
6100 return pt_solution_includes_global (&cfun
->gimple_df
->escaped
);
6102 if (pt
->ipa_escaped
)
6103 return pt_solution_includes_global (&ipa_escaped_pt
);
6105 /* ??? This predicate is not correct for the IPA-PTA solution
6106 as we do not properly distinguish between unit escape points
6107 and global variables. */
6108 if (cfun
->gimple_df
->ipa_pta
)
6114 /* Return true if the points-to solution *PT includes the variable
6115 declaration DECL. */
6118 pt_solution_includes_1 (struct pt_solution
*pt
, const_tree decl
)
6124 && is_global_var (decl
))
6128 && bitmap_bit_p (pt
->vars
, DECL_PT_UID (decl
)))
6131 /* If the solution includes ESCAPED, check it. */
6133 && pt_solution_includes_1 (&cfun
->gimple_df
->escaped
, decl
))
6136 /* If the solution includes ESCAPED, check it. */
6138 && pt_solution_includes_1 (&ipa_escaped_pt
, decl
))
6145 pt_solution_includes (struct pt_solution
*pt
, const_tree decl
)
6147 bool res
= pt_solution_includes_1 (pt
, decl
);
6149 ++pta_stats
.pt_solution_includes_may_alias
;
6151 ++pta_stats
.pt_solution_includes_no_alias
;
6155 /* Return true if both points-to solutions PT1 and PT2 have a non-empty
6159 pt_solutions_intersect_1 (struct pt_solution
*pt1
, struct pt_solution
*pt2
)
6161 if (pt1
->anything
|| pt2
->anything
)
6164 /* If either points to unknown global memory and the other points to
6165 any global memory they alias. */
6168 || pt2
->vars_contains_global
))
6170 && pt1
->vars_contains_global
))
6173 /* Check the escaped solution if required. */
6174 if ((pt1
->escaped
|| pt2
->escaped
)
6175 && !pt_solution_empty_p (&cfun
->gimple_df
->escaped
))
6177 /* If both point to escaped memory and that solution
6178 is not empty they alias. */
6179 if (pt1
->escaped
&& pt2
->escaped
)
6182 /* If either points to escaped memory see if the escaped solution
6183 intersects with the other. */
6185 && pt_solutions_intersect_1 (&cfun
->gimple_df
->escaped
, pt2
))
6187 && pt_solutions_intersect_1 (&cfun
->gimple_df
->escaped
, pt1
)))
6191 /* Check the escaped solution if required.
6192 ??? Do we need to check the local against the IPA escaped sets? */
6193 if ((pt1
->ipa_escaped
|| pt2
->ipa_escaped
)
6194 && !pt_solution_empty_p (&ipa_escaped_pt
))
6196 /* If both point to escaped memory and that solution
6197 is not empty they alias. */
6198 if (pt1
->ipa_escaped
&& pt2
->ipa_escaped
)
6201 /* If either points to escaped memory see if the escaped solution
6202 intersects with the other. */
6203 if ((pt1
->ipa_escaped
6204 && pt_solutions_intersect_1 (&ipa_escaped_pt
, pt2
))
6205 || (pt2
->ipa_escaped
6206 && pt_solutions_intersect_1 (&ipa_escaped_pt
, pt1
)))
6210 /* Now both pointers alias if their points-to solution intersects. */
6213 && bitmap_intersect_p (pt1
->vars
, pt2
->vars
));
6217 pt_solutions_intersect (struct pt_solution
*pt1
, struct pt_solution
*pt2
)
6219 bool res
= pt_solutions_intersect_1 (pt1
, pt2
);
6221 ++pta_stats
.pt_solutions_intersect_may_alias
;
6223 ++pta_stats
.pt_solutions_intersect_no_alias
;
6228 /* Dump points-to information to OUTFILE. */
6231 dump_sa_points_to_info (FILE *outfile
)
6235 fprintf (outfile
, "\nPoints-to sets\n\n");
6237 if (dump_flags
& TDF_STATS
)
6239 fprintf (outfile
, "Stats:\n");
6240 fprintf (outfile
, "Total vars: %d\n", stats
.total_vars
);
6241 fprintf (outfile
, "Non-pointer vars: %d\n",
6242 stats
.nonpointer_vars
);
6243 fprintf (outfile
, "Statically unified vars: %d\n",
6244 stats
.unified_vars_static
);
6245 fprintf (outfile
, "Dynamically unified vars: %d\n",
6246 stats
.unified_vars_dynamic
);
6247 fprintf (outfile
, "Iterations: %d\n", stats
.iterations
);
6248 fprintf (outfile
, "Number of edges: %d\n", stats
.num_edges
);
6249 fprintf (outfile
, "Number of implicit edges: %d\n",
6250 stats
.num_implicit_edges
);
6253 for (i
= 0; i
< varmap
.length (); i
++)
6255 varinfo_t vi
= get_varinfo (i
);
6256 if (!vi
->may_have_pointers
)
6258 dump_solution_for_var (outfile
, i
);
6263 /* Debug points-to information to stderr. */
6266 debug_sa_points_to_info (void)
6268 dump_sa_points_to_info (stderr
);
6272 /* Initialize the always-existing constraint variables for NULL
6273 ANYTHING, READONLY, and INTEGER */
6276 init_base_vars (void)
6278 struct constraint_expr lhs
, rhs
;
6279 varinfo_t var_anything
;
6280 varinfo_t var_nothing
;
6281 varinfo_t var_readonly
;
6282 varinfo_t var_escaped
;
6283 varinfo_t var_nonlocal
;
6284 varinfo_t var_storedanything
;
6285 varinfo_t var_integer
;
6287 /* Create the NULL variable, used to represent that a variable points
6289 var_nothing
= new_var_info (NULL_TREE
, "NULL");
6290 gcc_assert (var_nothing
->id
== nothing_id
);
6291 var_nothing
->is_artificial_var
= 1;
6292 var_nothing
->offset
= 0;
6293 var_nothing
->size
= ~0;
6294 var_nothing
->fullsize
= ~0;
6295 var_nothing
->is_special_var
= 1;
6296 var_nothing
->may_have_pointers
= 0;
6297 var_nothing
->is_global_var
= 0;
6299 /* Create the ANYTHING variable, used to represent that a variable
6300 points to some unknown piece of memory. */
6301 var_anything
= new_var_info (NULL_TREE
, "ANYTHING");
6302 gcc_assert (var_anything
->id
== anything_id
);
6303 var_anything
->is_artificial_var
= 1;
6304 var_anything
->size
= ~0;
6305 var_anything
->offset
= 0;
6306 var_anything
->next
= NULL
;
6307 var_anything
->fullsize
= ~0;
6308 var_anything
->is_special_var
= 1;
6310 /* Anything points to anything. This makes deref constraints just
6311 work in the presence of linked list and other p = *p type loops,
6312 by saying that *ANYTHING = ANYTHING. */
6314 lhs
.var
= anything_id
;
6316 rhs
.type
= ADDRESSOF
;
6317 rhs
.var
= anything_id
;
6320 /* This specifically does not use process_constraint because
6321 process_constraint ignores all anything = anything constraints, since all
6322 but this one are redundant. */
6323 constraints
.safe_push (new_constraint (lhs
, rhs
));
6325 /* Create the READONLY variable, used to represent that a variable
6326 points to readonly memory. */
6327 var_readonly
= new_var_info (NULL_TREE
, "READONLY");
6328 gcc_assert (var_readonly
->id
== readonly_id
);
6329 var_readonly
->is_artificial_var
= 1;
6330 var_readonly
->offset
= 0;
6331 var_readonly
->size
= ~0;
6332 var_readonly
->fullsize
= ~0;
6333 var_readonly
->next
= NULL
;
6334 var_readonly
->is_special_var
= 1;
6336 /* readonly memory points to anything, in order to make deref
6337 easier. In reality, it points to anything the particular
6338 readonly variable can point to, but we don't track this
6341 lhs
.var
= readonly_id
;
6343 rhs
.type
= ADDRESSOF
;
6344 rhs
.var
= readonly_id
; /* FIXME */
6346 process_constraint (new_constraint (lhs
, rhs
));
6348 /* Create the ESCAPED variable, used to represent the set of escaped
6350 var_escaped
= new_var_info (NULL_TREE
, "ESCAPED");
6351 gcc_assert (var_escaped
->id
== escaped_id
);
6352 var_escaped
->is_artificial_var
= 1;
6353 var_escaped
->offset
= 0;
6354 var_escaped
->size
= ~0;
6355 var_escaped
->fullsize
= ~0;
6356 var_escaped
->is_special_var
= 0;
6358 /* Create the NONLOCAL variable, used to represent the set of nonlocal
6360 var_nonlocal
= new_var_info (NULL_TREE
, "NONLOCAL");
6361 gcc_assert (var_nonlocal
->id
== nonlocal_id
);
6362 var_nonlocal
->is_artificial_var
= 1;
6363 var_nonlocal
->offset
= 0;
6364 var_nonlocal
->size
= ~0;
6365 var_nonlocal
->fullsize
= ~0;
6366 var_nonlocal
->is_special_var
= 1;
6368 /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
6370 lhs
.var
= escaped_id
;
6373 rhs
.var
= escaped_id
;
6375 process_constraint (new_constraint (lhs
, rhs
));
6377 /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the
6378 whole variable escapes. */
6380 lhs
.var
= escaped_id
;
6383 rhs
.var
= escaped_id
;
6384 rhs
.offset
= UNKNOWN_OFFSET
;
6385 process_constraint (new_constraint (lhs
, rhs
));
6387 /* *ESCAPED = NONLOCAL. This is true because we have to assume
6388 everything pointed to by escaped points to what global memory can
6391 lhs
.var
= escaped_id
;
6394 rhs
.var
= nonlocal_id
;
6396 process_constraint (new_constraint (lhs
, rhs
));
6398 /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED. This is true because
6399 global memory may point to global memory and escaped memory. */
6401 lhs
.var
= nonlocal_id
;
6403 rhs
.type
= ADDRESSOF
;
6404 rhs
.var
= nonlocal_id
;
6406 process_constraint (new_constraint (lhs
, rhs
));
6407 rhs
.type
= ADDRESSOF
;
6408 rhs
.var
= escaped_id
;
6410 process_constraint (new_constraint (lhs
, rhs
));
6412 /* Create the STOREDANYTHING variable, used to represent the set of
6413 variables stored to *ANYTHING. */
6414 var_storedanything
= new_var_info (NULL_TREE
, "STOREDANYTHING");
6415 gcc_assert (var_storedanything
->id
== storedanything_id
);
6416 var_storedanything
->is_artificial_var
= 1;
6417 var_storedanything
->offset
= 0;
6418 var_storedanything
->size
= ~0;
6419 var_storedanything
->fullsize
= ~0;
6420 var_storedanything
->is_special_var
= 0;
6422 /* Create the INTEGER variable, used to represent that a variable points
6423 to what an INTEGER "points to". */
6424 var_integer
= new_var_info (NULL_TREE
, "INTEGER");
6425 gcc_assert (var_integer
->id
== integer_id
);
6426 var_integer
->is_artificial_var
= 1;
6427 var_integer
->size
= ~0;
6428 var_integer
->fullsize
= ~0;
6429 var_integer
->offset
= 0;
6430 var_integer
->next
= NULL
;
6431 var_integer
->is_special_var
= 1;
6433 /* INTEGER = ANYTHING, because we don't know where a dereference of
6434 a random integer will point to. */
6436 lhs
.var
= integer_id
;
6438 rhs
.type
= ADDRESSOF
;
6439 rhs
.var
= anything_id
;
6441 process_constraint (new_constraint (lhs
, rhs
));
6444 /* Initialize things necessary to perform PTA */
6447 init_alias_vars (void)
6449 use_field_sensitive
= (MAX_FIELDS_FOR_FIELD_SENSITIVE
> 1);
6451 bitmap_obstack_initialize (&pta_obstack
);
6452 bitmap_obstack_initialize (&oldpta_obstack
);
6453 bitmap_obstack_initialize (&predbitmap_obstack
);
6455 constraint_pool
= create_alloc_pool ("Constraint pool",
6456 sizeof (struct constraint
), 30);
6457 variable_info_pool
= create_alloc_pool ("Variable info pool",
6458 sizeof (struct variable_info
), 30);
6459 constraints
.create (8);
6461 vi_for_tree
= pointer_map_create ();
6462 call_stmt_vars
= pointer_map_create ();
6464 memset (&stats
, 0, sizeof (stats
));
6465 shared_bitmap_table
= htab_create (511, shared_bitmap_hash
,
6466 shared_bitmap_eq
, free
);
6469 gcc_obstack_init (&fake_var_decl_obstack
);
6472 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
6473 predecessor edges. */
6476 remove_preds_and_fake_succs (constraint_graph_t graph
)
6480 /* Clear the implicit ref and address nodes from the successor
6482 for (i
= 0; i
< FIRST_REF_NODE
; i
++)
6484 if (graph
->succs
[i
])
6485 bitmap_clear_range (graph
->succs
[i
], FIRST_REF_NODE
,
6486 FIRST_REF_NODE
* 2);
6489 /* Free the successor list for the non-ref nodes. */
6490 for (i
= FIRST_REF_NODE
; i
< graph
->size
; i
++)
6492 if (graph
->succs
[i
])
6493 BITMAP_FREE (graph
->succs
[i
]);
6496 /* Now reallocate the size of the successor list as, and blow away
6497 the predecessor bitmaps. */
6498 graph
->size
= varmap
.length ();
6499 graph
->succs
= XRESIZEVEC (bitmap
, graph
->succs
, graph
->size
);
6501 free (graph
->implicit_preds
);
6502 graph
->implicit_preds
= NULL
;
6503 free (graph
->preds
);
6504 graph
->preds
= NULL
;
6505 bitmap_obstack_release (&predbitmap_obstack
);
6508 /* Solve the constraint set. */
6511 solve_constraints (void)
6513 struct scc_info
*si
;
6517 "\nCollapsing static cycles and doing variable "
6520 init_graph (varmap
.length () * 2);
6523 fprintf (dump_file
, "Building predecessor graph\n");
6524 build_pred_graph ();
6527 fprintf (dump_file
, "Detecting pointer and location "
6529 si
= perform_var_substitution (graph
);
6532 fprintf (dump_file
, "Rewriting constraints and unifying "
6534 rewrite_constraints (graph
, si
);
6536 build_succ_graph ();
6538 free_var_substitution_info (si
);
6540 /* Attach complex constraints to graph nodes. */
6541 move_complex_constraints (graph
);
6544 fprintf (dump_file
, "Uniting pointer but not location equivalent "
6546 unite_pointer_equivalences (graph
);
6549 fprintf (dump_file
, "Finding indirect cycles\n");
6550 find_indirect_cycles (graph
);
6552 /* Implicit nodes and predecessors are no longer necessary at this
6554 remove_preds_and_fake_succs (graph
);
6556 if (dump_file
&& (dump_flags
& TDF_GRAPH
))
6558 fprintf (dump_file
, "\n\n// The constraint graph before solve-graph "
6559 "in dot format:\n");
6560 dump_constraint_graph (dump_file
);
6561 fprintf (dump_file
, "\n\n");
6565 fprintf (dump_file
, "Solving graph\n");
6567 solve_graph (graph
);
6569 if (dump_file
&& (dump_flags
& TDF_GRAPH
))
6571 fprintf (dump_file
, "\n\n// The constraint graph after solve-graph "
6572 "in dot format:\n");
6573 dump_constraint_graph (dump_file
);
6574 fprintf (dump_file
, "\n\n");
6578 dump_sa_points_to_info (dump_file
);
6581 /* Create points-to sets for the current function. See the comments
6582 at the start of the file for an algorithmic overview. */
6585 compute_points_to_sets (void)
6591 timevar_push (TV_TREE_PTA
);
6595 intra_create_variable_infos ();
6597 /* Now walk all statements and build the constraint set. */
6600 gimple_stmt_iterator gsi
;
6602 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
6604 gimple phi
= gsi_stmt (gsi
);
6606 if (! virtual_operand_p (gimple_phi_result (phi
)))
6607 find_func_aliases (phi
);
6610 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
6612 gimple stmt
= gsi_stmt (gsi
);
6614 find_func_aliases (stmt
);
6620 fprintf (dump_file
, "Points-to analysis\n\nConstraints:\n\n");
6621 dump_constraints (dump_file
, 0);
6624 /* From the constraints compute the points-to sets. */
6625 solve_constraints ();
6627 /* Compute the points-to set for ESCAPED used for call-clobber analysis. */
6628 find_what_var_points_to (get_varinfo (escaped_id
),
6629 &cfun
->gimple_df
->escaped
);
6631 /* Make sure the ESCAPED solution (which is used as placeholder in
6632 other solutions) does not reference itself. This simplifies
6633 points-to solution queries. */
6634 cfun
->gimple_df
->escaped
.escaped
= 0;
6636 /* Mark escaped HEAP variables as global. */
6637 FOR_EACH_VEC_ELT (varmap
, i
, vi
)
6639 && !vi
->is_global_var
)
6640 DECL_EXTERNAL (vi
->decl
) = vi
->is_global_var
6641 = pt_solution_includes (&cfun
->gimple_df
->escaped
, vi
->decl
);
6643 /* Compute the points-to sets for pointer SSA_NAMEs. */
6644 for (i
= 0; i
< num_ssa_names
; ++i
)
6646 tree ptr
= ssa_name (i
);
6648 && POINTER_TYPE_P (TREE_TYPE (ptr
)))
6649 find_what_p_points_to (ptr
);
6652 /* Compute the call-used/clobbered sets. */
6655 gimple_stmt_iterator gsi
;
6657 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
6659 gimple stmt
= gsi_stmt (gsi
);
6660 struct pt_solution
*pt
;
6661 if (!is_gimple_call (stmt
))
6664 pt
= gimple_call_use_set (stmt
);
6665 if (gimple_call_flags (stmt
) & ECF_CONST
)
6666 memset (pt
, 0, sizeof (struct pt_solution
));
6667 else if ((vi
= lookup_call_use_vi (stmt
)) != NULL
)
6669 find_what_var_points_to (vi
, pt
);
6670 /* Escaped (and thus nonlocal) variables are always
6671 implicitly used by calls. */
6672 /* ??? ESCAPED can be empty even though NONLOCAL
6679 /* If there is nothing special about this call then
6680 we have made everything that is used also escape. */
6681 *pt
= cfun
->gimple_df
->escaped
;
6685 pt
= gimple_call_clobber_set (stmt
);
6686 if (gimple_call_flags (stmt
) & (ECF_CONST
|ECF_PURE
|ECF_NOVOPS
))
6687 memset (pt
, 0, sizeof (struct pt_solution
));
6688 else if ((vi
= lookup_call_clobber_vi (stmt
)) != NULL
)
6690 find_what_var_points_to (vi
, pt
);
6691 /* Escaped (and thus nonlocal) variables are always
6692 implicitly clobbered by calls. */
6693 /* ??? ESCAPED can be empty even though NONLOCAL
6700 /* If there is nothing special about this call then
6701 we have made everything that is used also escape. */
6702 *pt
= cfun
->gimple_df
->escaped
;
6708 timevar_pop (TV_TREE_PTA
);
6712 /* Delete created points-to sets. */
6715 delete_points_to_sets (void)
6719 htab_delete (shared_bitmap_table
);
6720 if (dump_file
&& (dump_flags
& TDF_STATS
))
6721 fprintf (dump_file
, "Points to sets created:%d\n",
6722 stats
.points_to_sets_created
);
6724 pointer_map_destroy (vi_for_tree
);
6725 pointer_map_destroy (call_stmt_vars
);
6726 bitmap_obstack_release (&pta_obstack
);
6727 constraints
.release ();
6729 for (i
= 0; i
< graph
->size
; i
++)
6730 graph
->complex[i
].release ();
6731 free (graph
->complex);
6734 free (graph
->succs
);
6736 free (graph
->pe_rep
);
6737 free (graph
->indirect_cycles
);
6741 free_alloc_pool (variable_info_pool
);
6742 free_alloc_pool (constraint_pool
);
6744 obstack_free (&fake_var_decl_obstack
, NULL
);
6748 /* Compute points-to information for every SSA_NAME pointer in the
6749 current function and compute the transitive closure of escaped
6750 variables to re-initialize the call-clobber states of local variables. */
6753 compute_may_aliases (void)
6755 if (cfun
->gimple_df
->ipa_pta
)
6759 fprintf (dump_file
, "\nNot re-computing points-to information "
6760 "because IPA points-to information is available.\n\n");
6762 /* But still dump what we have remaining it. */
6763 dump_alias_info (dump_file
);
6769 /* For each pointer P_i, determine the sets of variables that P_i may
6770 point-to. Compute the reachability set of escaped and call-used
6772 compute_points_to_sets ();
6774 /* Debugging dumps. */
6776 dump_alias_info (dump_file
);
6778 /* Deallocate memory used by aliasing data structures and the internal
6779 points-to solution. */
6780 delete_points_to_sets ();
6782 gcc_assert (!need_ssa_update_p (cfun
));
6788 gate_tree_pta (void)
6790 return flag_tree_pta
;
6793 /* A dummy pass to cause points-to information to be computed via
6794 TODO_rebuild_alias. */
6796 struct gimple_opt_pass pass_build_alias
=
6801 OPTGROUP_NONE
, /* optinfo_flags */
6802 gate_tree_pta
, /* gate */
6806 0, /* static_pass_number */
6807 TV_NONE
, /* tv_id */
6808 PROP_cfg
| PROP_ssa
, /* properties_required */
6809 0, /* properties_provided */
6810 0, /* properties_destroyed */
6811 0, /* todo_flags_start */
6812 TODO_rebuild_alias
/* todo_flags_finish */
6816 /* A dummy pass to cause points-to information to be computed via
6817 TODO_rebuild_alias. */
6819 struct gimple_opt_pass pass_build_ealias
=
6823 "ealias", /* name */
6824 OPTGROUP_NONE
, /* optinfo_flags */
6825 gate_tree_pta
, /* gate */
6829 0, /* static_pass_number */
6830 TV_NONE
, /* tv_id */
6831 PROP_cfg
| PROP_ssa
, /* properties_required */
6832 0, /* properties_provided */
6833 0, /* properties_destroyed */
6834 0, /* todo_flags_start */
6835 TODO_rebuild_alias
/* todo_flags_finish */
6840 /* Return true if we should execute IPA PTA. */
6846 /* Don't bother doing anything if the program has errors. */
6850 /* IPA PTA solutions for ESCAPED. */
6851 struct pt_solution ipa_escaped_pt
6852 = { true, false, false, false, false, false, NULL
};
6854 /* Associate node with varinfo DATA. Worker for
6855 cgraph_for_node_and_aliases. */
6857 associate_varinfo_to_alias (struct cgraph_node
*node
, void *data
)
6859 if (node
->alias
|| node
->thunk
.thunk_p
)
6860 insert_vi_for_tree (node
->symbol
.decl
, (varinfo_t
)data
);
6864 /* Execute the driver for IPA PTA. */
6866 ipa_pta_execute (void)
6868 struct cgraph_node
*node
;
6869 struct varpool_node
*var
;
6876 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6878 dump_symtab (dump_file
);
6879 fprintf (dump_file
, "\n");
6882 /* Build the constraints. */
6883 FOR_EACH_DEFINED_FUNCTION (node
)
6886 /* Nodes without a body are not interesting. Especially do not
6887 visit clones at this point for now - we get duplicate decls
6888 there for inline clones at least. */
6889 if (!cgraph_function_with_gimple_body_p (node
))
6892 gcc_assert (!node
->clone_of
);
6894 vi
= create_function_info_for (node
->symbol
.decl
,
6895 alias_get_name (node
->symbol
.decl
));
6896 cgraph_for_node_and_aliases (node
, associate_varinfo_to_alias
, vi
, true);
6899 /* Create constraints for global variables and their initializers. */
6900 FOR_EACH_VARIABLE (var
)
6905 get_vi_for_tree (var
->symbol
.decl
);
6911 "Generating constraints for global initializers\n\n");
6912 dump_constraints (dump_file
, 0);
6913 fprintf (dump_file
, "\n");
6915 from
= constraints
.length ();
6917 FOR_EACH_DEFINED_FUNCTION (node
)
6919 struct function
*func
;
6922 /* Nodes without a body are not interesting. */
6923 if (!cgraph_function_with_gimple_body_p (node
))
6929 "Generating constraints for %s", cgraph_node_name (node
));
6930 if (DECL_ASSEMBLER_NAME_SET_P (node
->symbol
.decl
))
6931 fprintf (dump_file
, " (%s)",
6933 (DECL_ASSEMBLER_NAME (node
->symbol
.decl
)));
6934 fprintf (dump_file
, "\n");
6937 func
= DECL_STRUCT_FUNCTION (node
->symbol
.decl
);
6940 /* For externally visible or attribute used annotated functions use
6941 local constraints for their arguments.
6942 For local functions we see all callers and thus do not need initial
6943 constraints for parameters. */
6944 if (node
->symbol
.used_from_other_partition
6945 || node
->symbol
.externally_visible
6946 || node
->symbol
.force_output
)
6948 intra_create_variable_infos ();
6950 /* We also need to make function return values escape. Nothing
6951 escapes by returning from main though. */
6952 if (!MAIN_NAME_P (DECL_NAME (node
->symbol
.decl
)))
6955 fi
= lookup_vi_for_tree (node
->symbol
.decl
);
6956 rvi
= first_vi_for_offset (fi
, fi_result
);
6957 if (rvi
&& rvi
->offset
== fi_result
)
6959 struct constraint_expr includes
;
6960 struct constraint_expr var
;
6961 includes
.var
= escaped_id
;
6962 includes
.offset
= 0;
6963 includes
.type
= SCALAR
;
6967 process_constraint (new_constraint (includes
, var
));
6972 /* Build constriants for the function body. */
6973 FOR_EACH_BB_FN (bb
, func
)
6975 gimple_stmt_iterator gsi
;
6977 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
);
6980 gimple phi
= gsi_stmt (gsi
);
6982 if (! virtual_operand_p (gimple_phi_result (phi
)))
6983 find_func_aliases (phi
);
6986 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
6988 gimple stmt
= gsi_stmt (gsi
);
6990 find_func_aliases (stmt
);
6991 find_func_clobbers (stmt
);
6999 fprintf (dump_file
, "\n");
7000 dump_constraints (dump_file
, from
);
7001 fprintf (dump_file
, "\n");
7003 from
= constraints
.length ();
7006 /* From the constraints compute the points-to sets. */
7007 solve_constraints ();
7009 /* Compute the global points-to sets for ESCAPED.
7010 ??? Note that the computed escape set is not correct
7011 for the whole unit as we fail to consider graph edges to
7012 externally visible functions. */
7013 find_what_var_points_to (get_varinfo (escaped_id
), &ipa_escaped_pt
);
7015 /* Make sure the ESCAPED solution (which is used as placeholder in
7016 other solutions) does not reference itself. This simplifies
7017 points-to solution queries. */
7018 ipa_escaped_pt
.ipa_escaped
= 0;
7020 /* Assign the points-to sets to the SSA names in the unit. */
7021 FOR_EACH_DEFINED_FUNCTION (node
)
7024 struct function
*fn
;
7028 struct pt_solution uses
, clobbers
;
7029 struct cgraph_edge
*e
;
7031 /* Nodes without a body are not interesting. */
7032 if (!cgraph_function_with_gimple_body_p (node
))
7035 fn
= DECL_STRUCT_FUNCTION (node
->symbol
.decl
);
7037 /* Compute the points-to sets for pointer SSA_NAMEs. */
7038 FOR_EACH_VEC_ELT (*fn
->gimple_df
->ssa_names
, i
, ptr
)
7041 && POINTER_TYPE_P (TREE_TYPE (ptr
)))
7042 find_what_p_points_to (ptr
);
7045 /* Compute the call-use and call-clobber sets for all direct calls. */
7046 fi
= lookup_vi_for_tree (node
->symbol
.decl
);
7047 gcc_assert (fi
->is_fn_info
);
7048 find_what_var_points_to (first_vi_for_offset (fi
, fi_clobbers
),
7050 find_what_var_points_to (first_vi_for_offset (fi
, fi_uses
), &uses
);
7051 for (e
= node
->callers
; e
; e
= e
->next_caller
)
7056 *gimple_call_clobber_set (e
->call_stmt
) = clobbers
;
7057 *gimple_call_use_set (e
->call_stmt
) = uses
;
7060 /* Compute the call-use and call-clobber sets for indirect calls
7061 and calls to external functions. */
7062 FOR_EACH_BB_FN (bb
, fn
)
7064 gimple_stmt_iterator gsi
;
7066 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
7068 gimple stmt
= gsi_stmt (gsi
);
7069 struct pt_solution
*pt
;
7073 if (!is_gimple_call (stmt
))
7076 /* Handle direct calls to external functions. */
7077 decl
= gimple_call_fndecl (stmt
);
7079 && (!(fi
= lookup_vi_for_tree (decl
))
7080 || !fi
->is_fn_info
))
7082 pt
= gimple_call_use_set (stmt
);
7083 if (gimple_call_flags (stmt
) & ECF_CONST
)
7084 memset (pt
, 0, sizeof (struct pt_solution
));
7085 else if ((vi
= lookup_call_use_vi (stmt
)) != NULL
)
7087 find_what_var_points_to (vi
, pt
);
7088 /* Escaped (and thus nonlocal) variables are always
7089 implicitly used by calls. */
7090 /* ??? ESCAPED can be empty even though NONLOCAL
7093 pt
->ipa_escaped
= 1;
7097 /* If there is nothing special about this call then
7098 we have made everything that is used also escape. */
7099 *pt
= ipa_escaped_pt
;
7103 pt
= gimple_call_clobber_set (stmt
);
7104 if (gimple_call_flags (stmt
) & (ECF_CONST
|ECF_PURE
|ECF_NOVOPS
))
7105 memset (pt
, 0, sizeof (struct pt_solution
));
7106 else if ((vi
= lookup_call_clobber_vi (stmt
)) != NULL
)
7108 find_what_var_points_to (vi
, pt
);
7109 /* Escaped (and thus nonlocal) variables are always
7110 implicitly clobbered by calls. */
7111 /* ??? ESCAPED can be empty even though NONLOCAL
7114 pt
->ipa_escaped
= 1;
7118 /* If there is nothing special about this call then
7119 we have made everything that is used also escape. */
7120 *pt
= ipa_escaped_pt
;
7125 /* Handle indirect calls. */
7127 && (fi
= get_fi_for_callee (stmt
)))
7129 /* We need to accumulate all clobbers/uses of all possible
7131 fi
= get_varinfo (find (fi
->id
));
7132 /* If we cannot constrain the set of functions we'll end up
7133 calling we end up using/clobbering everything. */
7134 if (bitmap_bit_p (fi
->solution
, anything_id
)
7135 || bitmap_bit_p (fi
->solution
, nonlocal_id
)
7136 || bitmap_bit_p (fi
->solution
, escaped_id
))
7138 pt_solution_reset (gimple_call_clobber_set (stmt
));
7139 pt_solution_reset (gimple_call_use_set (stmt
));
7145 struct pt_solution
*uses
, *clobbers
;
7147 uses
= gimple_call_use_set (stmt
);
7148 clobbers
= gimple_call_clobber_set (stmt
);
7149 memset (uses
, 0, sizeof (struct pt_solution
));
7150 memset (clobbers
, 0, sizeof (struct pt_solution
));
7151 EXECUTE_IF_SET_IN_BITMAP (fi
->solution
, 0, i
, bi
)
7153 struct pt_solution sol
;
7155 vi
= get_varinfo (i
);
7156 if (!vi
->is_fn_info
)
7158 /* ??? We could be more precise here? */
7160 uses
->ipa_escaped
= 1;
7161 clobbers
->nonlocal
= 1;
7162 clobbers
->ipa_escaped
= 1;
7166 if (!uses
->anything
)
7168 find_what_var_points_to
7169 (first_vi_for_offset (vi
, fi_uses
), &sol
);
7170 pt_solution_ior_into (uses
, &sol
);
7172 if (!clobbers
->anything
)
7174 find_what_var_points_to
7175 (first_vi_for_offset (vi
, fi_clobbers
), &sol
);
7176 pt_solution_ior_into (clobbers
, &sol
);
7184 fn
->gimple_df
->ipa_pta
= true;
7187 delete_points_to_sets ();
7194 struct simple_ipa_opt_pass pass_ipa_pta
=
7199 OPTGROUP_NONE
, /* optinfo_flags */
7200 gate_ipa_pta
, /* gate */
7201 ipa_pta_execute
, /* execute */
7204 0, /* static_pass_number */
7205 TV_IPA_PTA
, /* tv_id */
7206 0, /* properties_required */
7207 0, /* properties_provided */
7208 0, /* properties_destroyed */
7209 0, /* todo_flags_start */
7210 TODO_update_ssa
/* todo_flags_finish */