2013-01-30 Richard Biener <rguenther@suse.de>
[official-gcc.git] / gcc / tree-ssa-structalias.c
blob1bbe1cc5ae087c2704a13199ba7d851d5c896c33
1 /* Tree based points-to analysis
2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "ggc.h"
26 #include "obstack.h"
27 #include "bitmap.h"
28 #include "flags.h"
29 #include "basic-block.h"
30 #include "tree.h"
31 #include "tree-flow.h"
32 #include "tree-inline.h"
33 #include "diagnostic-core.h"
34 #include "gimple.h"
35 #include "hashtab.h"
36 #include "function.h"
37 #include "cgraph.h"
38 #include "tree-pass.h"
39 #include "alloc-pool.h"
40 #include "splay-tree.h"
41 #include "params.h"
42 #include "cgraph.h"
43 #include "alias.h"
44 #include "pointer-set.h"
46 /* The idea behind this analyzer is to generate set constraints from the
47 program, then solve the resulting constraints in order to generate the
48 points-to sets.
50 Set constraints are a way of modeling program analysis problems that
51 involve sets. They consist of an inclusion constraint language,
52 describing the variables (each variable is a set) and operations that
53 are involved on the variables, and a set of rules that derive facts
54 from these operations. To solve a system of set constraints, you derive
55 all possible facts under the rules, which gives you the correct sets
56 as a consequence.
58 See "Efficient Field-sensitive pointer analysis for C" by "David
59 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
60 http://citeseer.ist.psu.edu/pearce04efficient.html
62 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
63 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
64 http://citeseer.ist.psu.edu/heintze01ultrafast.html
66 There are three types of real constraint expressions, DEREF,
67 ADDRESSOF, and SCALAR. Each constraint expression consists
68 of a constraint type, a variable, and an offset.
70 SCALAR is a constraint expression type used to represent x, whether
71 it appears on the LHS or the RHS of a statement.
72 DEREF is a constraint expression type used to represent *x, whether
73 it appears on the LHS or the RHS of a statement.
74 ADDRESSOF is a constraint expression used to represent &x, whether
75 it appears on the LHS or the RHS of a statement.
77 Each pointer variable in the program is assigned an integer id, and
78 each field of a structure variable is assigned an integer id as well.
80 Structure variables are linked to their list of fields through a "next
81 field" in each variable that points to the next field in offset
82 order.
83 Each variable for a structure field has
85 1. "size", that tells the size in bits of that field.
86 2. "fullsize, that tells the size in bits of the entire structure.
87 3. "offset", that tells the offset in bits from the beginning of the
88 structure to this field.
90 Thus,
91 struct f
93 int a;
94 int b;
95 } foo;
96 int *bar;
98 looks like
100 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
101 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
102 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
105 In order to solve the system of set constraints, the following is
106 done:
108 1. Each constraint variable x has a solution set associated with it,
109 Sol(x).
111 2. Constraints are separated into direct, copy, and complex.
112 Direct constraints are ADDRESSOF constraints that require no extra
113 processing, such as P = &Q
114 Copy constraints are those of the form P = Q.
115 Complex constraints are all the constraints involving dereferences
116 and offsets (including offsetted copies).
118 3. All direct constraints of the form P = &Q are processed, such
119 that Q is added to Sol(P)
121 4. All complex constraints for a given constraint variable are stored in a
122 linked list attached to that variable's node.
124 5. A directed graph is built out of the copy constraints. Each
125 constraint variable is a node in the graph, and an edge from
126 Q to P is added for each copy constraint of the form P = Q
128 6. The graph is then walked, and solution sets are
129 propagated along the copy edges, such that an edge from Q to P
130 causes Sol(P) <- Sol(P) union Sol(Q).
132 7. As we visit each node, all complex constraints associated with
133 that node are processed by adding appropriate copy edges to the graph, or the
134 appropriate variables to the solution set.
136 8. The process of walking the graph is iterated until no solution
137 sets change.
139 Prior to walking the graph in steps 6 and 7, We perform static
140 cycle elimination on the constraint graph, as well
141 as off-line variable substitution.
143 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
144 on and turned into anything), but isn't. You can just see what offset
145 inside the pointed-to struct it's going to access.
147 TODO: Constant bounded arrays can be handled as if they were structs of the
148 same number of elements.
150 TODO: Modeling heap and incoming pointers becomes much better if we
151 add fields to them as we discover them, which we could do.
153 TODO: We could handle unions, but to be honest, it's probably not
154 worth the pain or slowdown. */
156 /* IPA-PTA optimizations possible.
158 When the indirect function called is ANYTHING we can add disambiguation
159 based on the function signatures (or simply the parameter count which
160 is the varinfo size). We also do not need to consider functions that
161 do not have their address taken.
163 The is_global_var bit which marks escape points is overly conservative
164 in IPA mode. Split it to is_escape_point and is_global_var - only
165 externally visible globals are escape points in IPA mode. This is
166 also needed to fix the pt_solution_includes_global predicate
167 (and thus ptr_deref_may_alias_global_p).
169 The way we introduce DECL_PT_UID to avoid fixing up all points-to
170 sets in the translation unit when we copy a DECL during inlining
171 pessimizes precision. The advantage is that the DECL_PT_UID keeps
172 compile-time and memory usage overhead low - the points-to sets
173 do not grow or get unshared as they would during a fixup phase.
174 An alternative solution is to delay IPA PTA until after all
175 inlining transformations have been applied.
177 The way we propagate clobber/use information isn't optimized.
178 It should use a new complex constraint that properly filters
179 out local variables of the callee (though that would make
180 the sets invalid after inlining). OTOH we might as well
181 admit defeat to WHOPR and simply do all the clobber/use analysis
182 and propagation after PTA finished but before we threw away
183 points-to information for memory variables. WHOPR and PTA
184 do not play along well anyway - the whole constraint solving
185 would need to be done in WPA phase and it will be very interesting
186 to apply the results to local SSA names during LTRANS phase.
188 We probably should compute a per-function unit-ESCAPE solution
189 propagating it simply like the clobber / uses solutions. The
190 solution can go alongside the non-IPA espaced solution and be
191 used to query which vars escape the unit through a function.
193 We never put function decls in points-to sets so we do not
194 keep the set of called functions for indirect calls.
196 And probably more. */
198 static bool use_field_sensitive = true;
199 static int in_ipa_mode = 0;
201 /* Used for predecessor bitmaps. */
202 static bitmap_obstack predbitmap_obstack;
204 /* Used for points-to sets. */
205 static bitmap_obstack pta_obstack;
207 /* Used for oldsolution members of variables. */
208 static bitmap_obstack oldpta_obstack;
210 /* Used for per-solver-iteration bitmaps. */
211 static bitmap_obstack iteration_obstack;
213 static unsigned int create_variable_info_for (tree, const char *);
214 typedef struct constraint_graph *constraint_graph_t;
215 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
217 struct constraint;
218 typedef struct constraint *constraint_t;
221 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
222 if (a) \
223 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
225 static struct constraint_stats
227 unsigned int total_vars;
228 unsigned int nonpointer_vars;
229 unsigned int unified_vars_static;
230 unsigned int unified_vars_dynamic;
231 unsigned int iterations;
232 unsigned int num_edges;
233 unsigned int num_implicit_edges;
234 unsigned int points_to_sets_created;
235 } stats;
237 struct variable_info
239 /* ID of this variable */
240 unsigned int id;
242 /* True if this is a variable created by the constraint analysis, such as
243 heap variables and constraints we had to break up. */
244 unsigned int is_artificial_var : 1;
246 /* True if this is a special variable whose solution set should not be
247 changed. */
248 unsigned int is_special_var : 1;
250 /* True for variables whose size is not known or variable. */
251 unsigned int is_unknown_size_var : 1;
253 /* True for (sub-)fields that represent a whole variable. */
254 unsigned int is_full_var : 1;
256 /* True if this is a heap variable. */
257 unsigned int is_heap_var : 1;
259 /* True if this field may contain pointers. */
260 unsigned int may_have_pointers : 1;
262 /* True if this field has only restrict qualified pointers. */
263 unsigned int only_restrict_pointers : 1;
265 /* True if this represents a global variable. */
266 unsigned int is_global_var : 1;
268 /* True if this represents a IPA function info. */
269 unsigned int is_fn_info : 1;
271 /* A link to the variable for the next field in this structure. */
272 struct variable_info *next;
274 /* Offset of this variable, in bits, from the base variable */
275 unsigned HOST_WIDE_INT offset;
277 /* Size of the variable, in bits. */
278 unsigned HOST_WIDE_INT size;
280 /* Full size of the base variable, in bits. */
281 unsigned HOST_WIDE_INT fullsize;
283 /* Name of this variable */
284 const char *name;
286 /* Tree that this variable is associated with. */
287 tree decl;
289 /* Points-to set for this variable. */
290 bitmap solution;
292 /* Old points-to set for this variable. */
293 bitmap oldsolution;
295 typedef struct variable_info *varinfo_t;
297 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
298 static varinfo_t first_or_preceding_vi_for_offset (varinfo_t,
299 unsigned HOST_WIDE_INT);
300 static varinfo_t lookup_vi_for_tree (tree);
301 static inline bool type_can_have_subvars (const_tree);
303 /* Pool of variable info structures. */
304 static alloc_pool variable_info_pool;
306 /* Map varinfo to final pt_solution. */
307 static pointer_map_t *final_solutions;
308 struct obstack final_solutions_obstack;
310 /* Table of variable info structures for constraint variables.
311 Indexed directly by variable info id. */
312 static vec<varinfo_t> varmap;
314 /* Return the varmap element N */
316 static inline varinfo_t
317 get_varinfo (unsigned int n)
319 return varmap[n];
322 /* Static IDs for the special variables. */
323 enum { nothing_id = 0, anything_id = 1, readonly_id = 2,
324 escaped_id = 3, nonlocal_id = 4,
325 storedanything_id = 5, integer_id = 6 };
327 /* Return a new variable info structure consisting for a variable
328 named NAME, and using constraint graph node NODE. Append it
329 to the vector of variable info structures. */
331 static varinfo_t
332 new_var_info (tree t, const char *name)
334 unsigned index = varmap.length ();
335 varinfo_t ret = (varinfo_t) pool_alloc (variable_info_pool);
337 ret->id = index;
338 ret->name = name;
339 ret->decl = t;
340 /* Vars without decl are artificial and do not have sub-variables. */
341 ret->is_artificial_var = (t == NULL_TREE);
342 ret->is_special_var = false;
343 ret->is_unknown_size_var = false;
344 ret->is_full_var = (t == NULL_TREE);
345 ret->is_heap_var = false;
346 ret->may_have_pointers = true;
347 ret->only_restrict_pointers = false;
348 ret->is_global_var = (t == NULL_TREE);
349 ret->is_fn_info = false;
350 if (t && DECL_P (t))
351 ret->is_global_var = (is_global_var (t)
352 /* We have to treat even local register variables
353 as escape points. */
354 || (TREE_CODE (t) == VAR_DECL
355 && DECL_HARD_REGISTER (t)));
356 ret->solution = BITMAP_ALLOC (&pta_obstack);
357 ret->oldsolution = NULL;
358 ret->next = NULL;
360 stats.total_vars++;
362 varmap.safe_push (ret);
364 return ret;
368 /* A map mapping call statements to per-stmt variables for uses
369 and clobbers specific to the call. */
370 struct pointer_map_t *call_stmt_vars;
372 /* Lookup or create the variable for the call statement CALL. */
374 static varinfo_t
375 get_call_vi (gimple call)
377 void **slot_p;
378 varinfo_t vi, vi2;
380 slot_p = pointer_map_insert (call_stmt_vars, call);
381 if (*slot_p)
382 return (varinfo_t) *slot_p;
384 vi = new_var_info (NULL_TREE, "CALLUSED");
385 vi->offset = 0;
386 vi->size = 1;
387 vi->fullsize = 2;
388 vi->is_full_var = true;
390 vi->next = vi2 = new_var_info (NULL_TREE, "CALLCLOBBERED");
391 vi2->offset = 1;
392 vi2->size = 1;
393 vi2->fullsize = 2;
394 vi2->is_full_var = true;
396 *slot_p = (void *) vi;
397 return vi;
400 /* Lookup the variable for the call statement CALL representing
401 the uses. Returns NULL if there is nothing special about this call. */
403 static varinfo_t
404 lookup_call_use_vi (gimple call)
406 void **slot_p;
408 slot_p = pointer_map_contains (call_stmt_vars, call);
409 if (slot_p)
410 return (varinfo_t) *slot_p;
412 return NULL;
415 /* Lookup the variable for the call statement CALL representing
416 the clobbers. Returns NULL if there is nothing special about this call. */
418 static varinfo_t
419 lookup_call_clobber_vi (gimple call)
421 varinfo_t uses = lookup_call_use_vi (call);
422 if (!uses)
423 return NULL;
425 return uses->next;
428 /* Lookup or create the variable for the call statement CALL representing
429 the uses. */
431 static varinfo_t
432 get_call_use_vi (gimple call)
434 return get_call_vi (call);
437 /* Lookup or create the variable for the call statement CALL representing
438 the clobbers. */
440 static varinfo_t ATTRIBUTE_UNUSED
441 get_call_clobber_vi (gimple call)
443 return get_call_vi (call)->next;
447 typedef enum {SCALAR, DEREF, ADDRESSOF} constraint_expr_type;
449 /* An expression that appears in a constraint. */
451 struct constraint_expr
453 /* Constraint type. */
454 constraint_expr_type type;
456 /* Variable we are referring to in the constraint. */
457 unsigned int var;
459 /* Offset, in bits, of this constraint from the beginning of
460 variables it ends up referring to.
462 IOW, in a deref constraint, we would deref, get the result set,
463 then add OFFSET to each member. */
464 HOST_WIDE_INT offset;
467 /* Use 0x8000... as special unknown offset. */
468 #define UNKNOWN_OFFSET ((HOST_WIDE_INT)-1 << (HOST_BITS_PER_WIDE_INT-1))
470 typedef struct constraint_expr ce_s;
471 static void get_constraint_for_1 (tree, vec<ce_s> *, bool, bool);
472 static void get_constraint_for (tree, vec<ce_s> *);
473 static void get_constraint_for_rhs (tree, vec<ce_s> *);
474 static void do_deref (vec<ce_s> *);
476 /* Our set constraints are made up of two constraint expressions, one
477 LHS, and one RHS.
479 As described in the introduction, our set constraints each represent an
480 operation between set valued variables.
482 struct constraint
484 struct constraint_expr lhs;
485 struct constraint_expr rhs;
488 /* List of constraints that we use to build the constraint graph from. */
490 static vec<constraint_t> constraints;
491 static alloc_pool constraint_pool;
493 /* The constraint graph is represented as an array of bitmaps
494 containing successor nodes. */
496 struct constraint_graph
498 /* Size of this graph, which may be different than the number of
499 nodes in the variable map. */
500 unsigned int size;
502 /* Explicit successors of each node. */
503 bitmap *succs;
505 /* Implicit predecessors of each node (Used for variable
506 substitution). */
507 bitmap *implicit_preds;
509 /* Explicit predecessors of each node (Used for variable substitution). */
510 bitmap *preds;
512 /* Indirect cycle representatives, or -1 if the node has no indirect
513 cycles. */
514 int *indirect_cycles;
516 /* Representative node for a node. rep[a] == a unless the node has
517 been unified. */
518 unsigned int *rep;
520 /* Equivalence class representative for a label. This is used for
521 variable substitution. */
522 int *eq_rep;
524 /* Pointer equivalence label for a node. All nodes with the same
525 pointer equivalence label can be unified together at some point
526 (either during constraint optimization or after the constraint
527 graph is built). */
528 unsigned int *pe;
530 /* Pointer equivalence representative for a label. This is used to
531 handle nodes that are pointer equivalent but not location
532 equivalent. We can unite these once the addressof constraints
533 are transformed into initial points-to sets. */
534 int *pe_rep;
536 /* Pointer equivalence label for each node, used during variable
537 substitution. */
538 unsigned int *pointer_label;
540 /* Location equivalence label for each node, used during location
541 equivalence finding. */
542 unsigned int *loc_label;
544 /* Pointed-by set for each node, used during location equivalence
545 finding. This is pointed-by rather than pointed-to, because it
546 is constructed using the predecessor graph. */
547 bitmap *pointed_by;
549 /* Points to sets for pointer equivalence. This is *not* the actual
550 points-to sets for nodes. */
551 bitmap *points_to;
553 /* Bitmap of nodes where the bit is set if the node is a direct
554 node. Used for variable substitution. */
555 sbitmap direct_nodes;
557 /* Bitmap of nodes where the bit is set if the node is address
558 taken. Used for variable substitution. */
559 bitmap address_taken;
561 /* Vector of complex constraints for each graph node. Complex
562 constraints are those involving dereferences or offsets that are
563 not 0. */
564 vec<constraint_t> *complex;
567 static constraint_graph_t graph;
569 /* During variable substitution and the offline version of indirect
570 cycle finding, we create nodes to represent dereferences and
571 address taken constraints. These represent where these start and
572 end. */
573 #define FIRST_REF_NODE (varmap).length ()
574 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
576 /* Return the representative node for NODE, if NODE has been unioned
577 with another NODE.
578 This function performs path compression along the way to finding
579 the representative. */
581 static unsigned int
582 find (unsigned int node)
584 gcc_assert (node < graph->size);
585 if (graph->rep[node] != node)
586 return graph->rep[node] = find (graph->rep[node]);
587 return node;
590 /* Union the TO and FROM nodes to the TO nodes.
591 Note that at some point in the future, we may want to do
592 union-by-rank, in which case we are going to have to return the
593 node we unified to. */
595 static bool
596 unite (unsigned int to, unsigned int from)
598 gcc_assert (to < graph->size && from < graph->size);
599 if (to != from && graph->rep[from] != to)
601 graph->rep[from] = to;
602 return true;
604 return false;
607 /* Create a new constraint consisting of LHS and RHS expressions. */
609 static constraint_t
610 new_constraint (const struct constraint_expr lhs,
611 const struct constraint_expr rhs)
613 constraint_t ret = (constraint_t) pool_alloc (constraint_pool);
614 ret->lhs = lhs;
615 ret->rhs = rhs;
616 return ret;
619 /* Print out constraint C to FILE. */
621 static void
622 dump_constraint (FILE *file, constraint_t c)
624 if (c->lhs.type == ADDRESSOF)
625 fprintf (file, "&");
626 else if (c->lhs.type == DEREF)
627 fprintf (file, "*");
628 fprintf (file, "%s", get_varinfo (c->lhs.var)->name);
629 if (c->lhs.offset == UNKNOWN_OFFSET)
630 fprintf (file, " + UNKNOWN");
631 else if (c->lhs.offset != 0)
632 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
633 fprintf (file, " = ");
634 if (c->rhs.type == ADDRESSOF)
635 fprintf (file, "&");
636 else if (c->rhs.type == DEREF)
637 fprintf (file, "*");
638 fprintf (file, "%s", get_varinfo (c->rhs.var)->name);
639 if (c->rhs.offset == UNKNOWN_OFFSET)
640 fprintf (file, " + UNKNOWN");
641 else if (c->rhs.offset != 0)
642 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
646 void debug_constraint (constraint_t);
647 void debug_constraints (void);
648 void debug_constraint_graph (void);
649 void debug_solution_for_var (unsigned int);
650 void debug_sa_points_to_info (void);
652 /* Print out constraint C to stderr. */
654 DEBUG_FUNCTION void
655 debug_constraint (constraint_t c)
657 dump_constraint (stderr, c);
658 fprintf (stderr, "\n");
661 /* Print out all constraints to FILE */
663 static void
664 dump_constraints (FILE *file, int from)
666 int i;
667 constraint_t c;
668 for (i = from; constraints.iterate (i, &c); i++)
669 if (c)
671 dump_constraint (file, c);
672 fprintf (file, "\n");
676 /* Print out all constraints to stderr. */
678 DEBUG_FUNCTION void
679 debug_constraints (void)
681 dump_constraints (stderr, 0);
684 /* Print the constraint graph in dot format. */
686 static void
687 dump_constraint_graph (FILE *file)
689 unsigned int i;
691 /* Only print the graph if it has already been initialized: */
692 if (!graph)
693 return;
695 /* Prints the header of the dot file: */
696 fprintf (file, "strict digraph {\n");
697 fprintf (file, " node [\n shape = box\n ]\n");
698 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
699 fprintf (file, "\n // List of nodes and complex constraints in "
700 "the constraint graph:\n");
702 /* The next lines print the nodes in the graph together with the
703 complex constraints attached to them. */
704 for (i = 0; i < graph->size; i++)
706 if (find (i) != i)
707 continue;
708 if (i < FIRST_REF_NODE)
709 fprintf (file, "\"%s\"", get_varinfo (i)->name);
710 else
711 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
712 if (graph->complex[i].exists ())
714 unsigned j;
715 constraint_t c;
716 fprintf (file, " [label=\"\\N\\n");
717 for (j = 0; graph->complex[i].iterate (j, &c); ++j)
719 dump_constraint (file, c);
720 fprintf (file, "\\l");
722 fprintf (file, "\"]");
724 fprintf (file, ";\n");
727 /* Go over the edges. */
728 fprintf (file, "\n // Edges in the constraint graph:\n");
729 for (i = 0; i < graph->size; i++)
731 unsigned j;
732 bitmap_iterator bi;
733 if (find (i) != i)
734 continue;
735 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
737 unsigned to = find (j);
738 if (i == to)
739 continue;
740 if (i < FIRST_REF_NODE)
741 fprintf (file, "\"%s\"", get_varinfo (i)->name);
742 else
743 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
744 fprintf (file, " -> ");
745 if (to < FIRST_REF_NODE)
746 fprintf (file, "\"%s\"", get_varinfo (to)->name);
747 else
748 fprintf (file, "\"*%s\"", get_varinfo (to - FIRST_REF_NODE)->name);
749 fprintf (file, ";\n");
753 /* Prints the tail of the dot file. */
754 fprintf (file, "}\n");
757 /* Print out the constraint graph to stderr. */
759 DEBUG_FUNCTION void
760 debug_constraint_graph (void)
762 dump_constraint_graph (stderr);
765 /* SOLVER FUNCTIONS
767 The solver is a simple worklist solver, that works on the following
768 algorithm:
770 sbitmap changed_nodes = all zeroes;
771 changed_count = 0;
772 For each node that is not already collapsed:
773 changed_count++;
774 set bit in changed nodes
776 while (changed_count > 0)
778 compute topological ordering for constraint graph
780 find and collapse cycles in the constraint graph (updating
781 changed if necessary)
783 for each node (n) in the graph in topological order:
784 changed_count--;
786 Process each complex constraint associated with the node,
787 updating changed if necessary.
789 For each outgoing edge from n, propagate the solution from n to
790 the destination of the edge, updating changed as necessary.
792 } */
794 /* Return true if two constraint expressions A and B are equal. */
796 static bool
797 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
799 return a.type == b.type && a.var == b.var && a.offset == b.offset;
802 /* Return true if constraint expression A is less than constraint expression
803 B. This is just arbitrary, but consistent, in order to give them an
804 ordering. */
806 static bool
807 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
809 if (a.type == b.type)
811 if (a.var == b.var)
812 return a.offset < b.offset;
813 else
814 return a.var < b.var;
816 else
817 return a.type < b.type;
820 /* Return true if constraint A is less than constraint B. This is just
821 arbitrary, but consistent, in order to give them an ordering. */
823 static bool
824 constraint_less (const constraint_t &a, const constraint_t &b)
826 if (constraint_expr_less (a->lhs, b->lhs))
827 return true;
828 else if (constraint_expr_less (b->lhs, a->lhs))
829 return false;
830 else
831 return constraint_expr_less (a->rhs, b->rhs);
834 /* Return true if two constraints A and B are equal. */
836 static bool
837 constraint_equal (struct constraint a, struct constraint b)
839 return constraint_expr_equal (a.lhs, b.lhs)
840 && constraint_expr_equal (a.rhs, b.rhs);
844 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
846 static constraint_t
847 constraint_vec_find (vec<constraint_t> vec,
848 struct constraint lookfor)
850 unsigned int place;
851 constraint_t found;
853 if (!vec.exists ())
854 return NULL;
856 place = vec.lower_bound (&lookfor, constraint_less);
857 if (place >= vec.length ())
858 return NULL;
859 found = vec[place];
860 if (!constraint_equal (*found, lookfor))
861 return NULL;
862 return found;
865 /* Union two constraint vectors, TO and FROM. Put the result in TO. */
867 static void
868 constraint_set_union (vec<constraint_t> *to,
869 vec<constraint_t> *from)
871 int i;
872 constraint_t c;
874 FOR_EACH_VEC_ELT (*from, i, c)
876 if (constraint_vec_find (*to, *c) == NULL)
878 unsigned int place = to->lower_bound (c, constraint_less);
879 to->safe_insert (place, c);
884 /* Expands the solution in SET to all sub-fields of variables included.
885 Union the expanded result into RESULT. */
887 static void
888 solution_set_expand (bitmap result, bitmap set)
890 bitmap_iterator bi;
891 bitmap vars = NULL;
892 unsigned j;
894 /* In a first pass record all variables we need to add all
895 sub-fields off. This avoids quadratic behavior. */
896 EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi)
898 varinfo_t v = get_varinfo (j);
899 if (v->is_artificial_var
900 || v->is_full_var)
901 continue;
902 v = lookup_vi_for_tree (v->decl);
903 if (vars == NULL)
904 vars = BITMAP_ALLOC (NULL);
905 bitmap_set_bit (vars, v->id);
908 /* In the second pass now do the addition to the solution and
909 to speed up solving add it to the delta as well. */
910 if (vars != NULL)
912 EXECUTE_IF_SET_IN_BITMAP (vars, 0, j, bi)
914 varinfo_t v = get_varinfo (j);
915 for (; v != NULL; v = v->next)
916 bitmap_set_bit (result, v->id);
918 BITMAP_FREE (vars);
922 /* Take a solution set SET, add OFFSET to each member of the set, and
923 overwrite SET with the result when done. */
925 static void
926 solution_set_add (bitmap set, HOST_WIDE_INT offset)
928 bitmap result = BITMAP_ALLOC (&iteration_obstack);
929 unsigned int i;
930 bitmap_iterator bi;
932 /* If the offset is unknown we have to expand the solution to
933 all subfields. */
934 if (offset == UNKNOWN_OFFSET)
936 solution_set_expand (set, set);
937 return;
940 EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
942 varinfo_t vi = get_varinfo (i);
944 /* If this is a variable with just one field just set its bit
945 in the result. */
946 if (vi->is_artificial_var
947 || vi->is_unknown_size_var
948 || vi->is_full_var)
949 bitmap_set_bit (result, i);
950 else
952 unsigned HOST_WIDE_INT fieldoffset = vi->offset + offset;
954 /* If the offset makes the pointer point to before the
955 variable use offset zero for the field lookup. */
956 if (offset < 0
957 && fieldoffset > vi->offset)
958 fieldoffset = 0;
960 if (offset != 0)
961 vi = first_or_preceding_vi_for_offset (vi, fieldoffset);
963 bitmap_set_bit (result, vi->id);
964 /* If the result is not exactly at fieldoffset include the next
965 field as well. See get_constraint_for_ptr_offset for more
966 rationale. */
967 if (vi->offset != fieldoffset
968 && vi->next != NULL)
969 bitmap_set_bit (result, vi->next->id);
973 bitmap_copy (set, result);
974 BITMAP_FREE (result);
977 /* Union solution sets TO and FROM, and add INC to each member of FROM in the
978 process. */
980 static bool
981 set_union_with_increment (bitmap to, bitmap from, HOST_WIDE_INT inc)
983 if (inc == 0)
984 return bitmap_ior_into (to, from);
985 else
987 bitmap tmp;
988 bool res;
990 tmp = BITMAP_ALLOC (&iteration_obstack);
991 bitmap_copy (tmp, from);
992 solution_set_add (tmp, inc);
993 res = bitmap_ior_into (to, tmp);
994 BITMAP_FREE (tmp);
995 return res;
999 /* Insert constraint C into the list of complex constraints for graph
1000 node VAR. */
1002 static void
1003 insert_into_complex (constraint_graph_t graph,
1004 unsigned int var, constraint_t c)
1006 vec<constraint_t> complex = graph->complex[var];
1007 unsigned int place = complex.lower_bound (c, constraint_less);
1009 /* Only insert constraints that do not already exist. */
1010 if (place >= complex.length ()
1011 || !constraint_equal (*c, *complex[place]))
1012 graph->complex[var].safe_insert (place, c);
1016 /* Condense two variable nodes into a single variable node, by moving
1017 all associated info from SRC to TO. */
1019 static void
1020 merge_node_constraints (constraint_graph_t graph, unsigned int to,
1021 unsigned int from)
1023 unsigned int i;
1024 constraint_t c;
1026 gcc_assert (find (from) == to);
1028 /* Move all complex constraints from src node into to node */
1029 FOR_EACH_VEC_ELT (graph->complex[from], i, c)
1031 /* In complex constraints for node src, we may have either
1032 a = *src, and *src = a, or an offseted constraint which are
1033 always added to the rhs node's constraints. */
1035 if (c->rhs.type == DEREF)
1036 c->rhs.var = to;
1037 else if (c->lhs.type == DEREF)
1038 c->lhs.var = to;
1039 else
1040 c->rhs.var = to;
1042 constraint_set_union (&graph->complex[to], &graph->complex[from]);
1043 graph->complex[from].release ();
1047 /* Remove edges involving NODE from GRAPH. */
1049 static void
1050 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
1052 if (graph->succs[node])
1053 BITMAP_FREE (graph->succs[node]);
1056 /* Merge GRAPH nodes FROM and TO into node TO. */
1058 static void
1059 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
1060 unsigned int from)
1062 if (graph->indirect_cycles[from] != -1)
1064 /* If we have indirect cycles with the from node, and we have
1065 none on the to node, the to node has indirect cycles from the
1066 from node now that they are unified.
1067 If indirect cycles exist on both, unify the nodes that they
1068 are in a cycle with, since we know they are in a cycle with
1069 each other. */
1070 if (graph->indirect_cycles[to] == -1)
1071 graph->indirect_cycles[to] = graph->indirect_cycles[from];
1074 /* Merge all the successor edges. */
1075 if (graph->succs[from])
1077 if (!graph->succs[to])
1078 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
1079 bitmap_ior_into (graph->succs[to],
1080 graph->succs[from]);
1083 clear_edges_for_node (graph, from);
1087 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
1088 it doesn't exist in the graph already. */
1090 static void
1091 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
1092 unsigned int from)
1094 if (to == from)
1095 return;
1097 if (!graph->implicit_preds[to])
1098 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1100 if (bitmap_set_bit (graph->implicit_preds[to], from))
1101 stats.num_implicit_edges++;
1104 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
1105 it doesn't exist in the graph already.
1106 Return false if the edge already existed, true otherwise. */
1108 static void
1109 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
1110 unsigned int from)
1112 if (!graph->preds[to])
1113 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1114 bitmap_set_bit (graph->preds[to], from);
1117 /* Add a graph edge to GRAPH, going from FROM to TO if
1118 it doesn't exist in the graph already.
1119 Return false if the edge already existed, true otherwise. */
1121 static bool
1122 add_graph_edge (constraint_graph_t graph, unsigned int to,
1123 unsigned int from)
1125 if (to == from)
1127 return false;
1129 else
1131 bool r = false;
1133 if (!graph->succs[from])
1134 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
1135 if (bitmap_set_bit (graph->succs[from], to))
1137 r = true;
1138 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
1139 stats.num_edges++;
1141 return r;
1146 /* Return true if {DEST.SRC} is an existing graph edge in GRAPH. */
1148 static bool
1149 valid_graph_edge (constraint_graph_t graph, unsigned int src,
1150 unsigned int dest)
1152 return (graph->succs[dest]
1153 && bitmap_bit_p (graph->succs[dest], src));
1156 /* Initialize the constraint graph structure to contain SIZE nodes. */
1158 static void
1159 init_graph (unsigned int size)
1161 unsigned int j;
1163 graph = XCNEW (struct constraint_graph);
1164 graph->size = size;
1165 graph->succs = XCNEWVEC (bitmap, graph->size);
1166 graph->indirect_cycles = XNEWVEC (int, graph->size);
1167 graph->rep = XNEWVEC (unsigned int, graph->size);
1168 /* ??? Macros do not support template types with multiple arguments,
1169 so we use a typedef to work around it. */
1170 typedef vec<constraint_t> vec_constraint_t_heap;
1171 graph->complex = XCNEWVEC (vec_constraint_t_heap, size);
1172 graph->pe = XCNEWVEC (unsigned int, graph->size);
1173 graph->pe_rep = XNEWVEC (int, graph->size);
1175 for (j = 0; j < graph->size; j++)
1177 graph->rep[j] = j;
1178 graph->pe_rep[j] = -1;
1179 graph->indirect_cycles[j] = -1;
1183 /* Build the constraint graph, adding only predecessor edges right now. */
1185 static void
1186 build_pred_graph (void)
1188 int i;
1189 constraint_t c;
1190 unsigned int j;
1192 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
1193 graph->preds = XCNEWVEC (bitmap, graph->size);
1194 graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
1195 graph->loc_label = XCNEWVEC (unsigned int, graph->size);
1196 graph->pointed_by = XCNEWVEC (bitmap, graph->size);
1197 graph->points_to = XCNEWVEC (bitmap, graph->size);
1198 graph->eq_rep = XNEWVEC (int, graph->size);
1199 graph->direct_nodes = sbitmap_alloc (graph->size);
1200 graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1201 bitmap_clear (graph->direct_nodes);
1203 for (j = 0; j < FIRST_REF_NODE; j++)
1205 if (!get_varinfo (j)->is_special_var)
1206 bitmap_set_bit (graph->direct_nodes, j);
1209 for (j = 0; j < graph->size; j++)
1210 graph->eq_rep[j] = -1;
1212 for (j = 0; j < varmap.length (); j++)
1213 graph->indirect_cycles[j] = -1;
1215 FOR_EACH_VEC_ELT (constraints, i, c)
1217 struct constraint_expr lhs = c->lhs;
1218 struct constraint_expr rhs = c->rhs;
1219 unsigned int lhsvar = lhs.var;
1220 unsigned int rhsvar = rhs.var;
1222 if (lhs.type == DEREF)
1224 /* *x = y. */
1225 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1226 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1228 else if (rhs.type == DEREF)
1230 /* x = *y */
1231 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1232 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1233 else
1234 bitmap_clear_bit (graph->direct_nodes, lhsvar);
1236 else if (rhs.type == ADDRESSOF)
1238 varinfo_t v;
1240 /* x = &y */
1241 if (graph->points_to[lhsvar] == NULL)
1242 graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1243 bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1245 if (graph->pointed_by[rhsvar] == NULL)
1246 graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1247 bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1249 /* Implicitly, *x = y */
1250 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1252 /* All related variables are no longer direct nodes. */
1253 bitmap_clear_bit (graph->direct_nodes, rhsvar);
1254 v = get_varinfo (rhsvar);
1255 if (!v->is_full_var)
1257 v = lookup_vi_for_tree (v->decl);
1260 bitmap_clear_bit (graph->direct_nodes, v->id);
1261 v = v->next;
1263 while (v != NULL);
1265 bitmap_set_bit (graph->address_taken, rhsvar);
1267 else if (lhsvar > anything_id
1268 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1270 /* x = y */
1271 add_pred_graph_edge (graph, lhsvar, rhsvar);
1272 /* Implicitly, *x = *y */
1273 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1274 FIRST_REF_NODE + rhsvar);
1276 else if (lhs.offset != 0 || rhs.offset != 0)
1278 if (rhs.offset != 0)
1279 bitmap_clear_bit (graph->direct_nodes, lhs.var);
1280 else if (lhs.offset != 0)
1281 bitmap_clear_bit (graph->direct_nodes, rhs.var);
1286 /* Build the constraint graph, adding successor edges. */
1288 static void
1289 build_succ_graph (void)
1291 unsigned i, t;
1292 constraint_t c;
1294 FOR_EACH_VEC_ELT (constraints, i, c)
1296 struct constraint_expr lhs;
1297 struct constraint_expr rhs;
1298 unsigned int lhsvar;
1299 unsigned int rhsvar;
1301 if (!c)
1302 continue;
1304 lhs = c->lhs;
1305 rhs = c->rhs;
1306 lhsvar = find (lhs.var);
1307 rhsvar = find (rhs.var);
1309 if (lhs.type == DEREF)
1311 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1312 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1314 else if (rhs.type == DEREF)
1316 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1317 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1319 else if (rhs.type == ADDRESSOF)
1321 /* x = &y */
1322 gcc_assert (find (rhs.var) == rhs.var);
1323 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1325 else if (lhsvar > anything_id
1326 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1328 add_graph_edge (graph, lhsvar, rhsvar);
1332 /* Add edges from STOREDANYTHING to all non-direct nodes that can
1333 receive pointers. */
1334 t = find (storedanything_id);
1335 for (i = integer_id + 1; i < FIRST_REF_NODE; ++i)
1337 if (!bitmap_bit_p (graph->direct_nodes, i)
1338 && get_varinfo (i)->may_have_pointers)
1339 add_graph_edge (graph, find (i), t);
1342 /* Everything stored to ANYTHING also potentially escapes. */
1343 add_graph_edge (graph, find (escaped_id), t);
1347 /* Changed variables on the last iteration. */
1348 static bitmap changed;
1350 /* Strongly Connected Component visitation info. */
1352 struct scc_info
1354 sbitmap visited;
1355 sbitmap deleted;
1356 unsigned int *dfs;
1357 unsigned int *node_mapping;
1358 int current_index;
1359 vec<unsigned> scc_stack;
1363 /* Recursive routine to find strongly connected components in GRAPH.
1364 SI is the SCC info to store the information in, and N is the id of current
1365 graph node we are processing.
1367 This is Tarjan's strongly connected component finding algorithm, as
1368 modified by Nuutila to keep only non-root nodes on the stack.
1369 The algorithm can be found in "On finding the strongly connected
1370 connected components in a directed graph" by Esko Nuutila and Eljas
1371 Soisalon-Soininen, in Information Processing Letters volume 49,
1372 number 1, pages 9-14. */
1374 static void
1375 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1377 unsigned int i;
1378 bitmap_iterator bi;
1379 unsigned int my_dfs;
1381 bitmap_set_bit (si->visited, n);
1382 si->dfs[n] = si->current_index ++;
1383 my_dfs = si->dfs[n];
1385 /* Visit all the successors. */
1386 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1388 unsigned int w;
1390 if (i > LAST_REF_NODE)
1391 break;
1393 w = find (i);
1394 if (bitmap_bit_p (si->deleted, w))
1395 continue;
1397 if (!bitmap_bit_p (si->visited, w))
1398 scc_visit (graph, si, w);
1400 unsigned int t = find (w);
1401 unsigned int nnode = find (n);
1402 gcc_assert (nnode == n);
1404 if (si->dfs[t] < si->dfs[nnode])
1405 si->dfs[n] = si->dfs[t];
1409 /* See if any components have been identified. */
1410 if (si->dfs[n] == my_dfs)
1412 if (si->scc_stack.length () > 0
1413 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1415 bitmap scc = BITMAP_ALLOC (NULL);
1416 unsigned int lowest_node;
1417 bitmap_iterator bi;
1419 bitmap_set_bit (scc, n);
1421 while (si->scc_stack.length () != 0
1422 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1424 unsigned int w = si->scc_stack.pop ();
1426 bitmap_set_bit (scc, w);
1429 lowest_node = bitmap_first_set_bit (scc);
1430 gcc_assert (lowest_node < FIRST_REF_NODE);
1432 /* Collapse the SCC nodes into a single node, and mark the
1433 indirect cycles. */
1434 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1436 if (i < FIRST_REF_NODE)
1438 if (unite (lowest_node, i))
1439 unify_nodes (graph, lowest_node, i, false);
1441 else
1443 unite (lowest_node, i);
1444 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1448 bitmap_set_bit (si->deleted, n);
1450 else
1451 si->scc_stack.safe_push (n);
1454 /* Unify node FROM into node TO, updating the changed count if
1455 necessary when UPDATE_CHANGED is true. */
1457 static void
1458 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1459 bool update_changed)
1462 gcc_assert (to != from && find (to) == to);
1463 if (dump_file && (dump_flags & TDF_DETAILS))
1464 fprintf (dump_file, "Unifying %s to %s\n",
1465 get_varinfo (from)->name,
1466 get_varinfo (to)->name);
1468 if (update_changed)
1469 stats.unified_vars_dynamic++;
1470 else
1471 stats.unified_vars_static++;
1473 merge_graph_nodes (graph, to, from);
1474 merge_node_constraints (graph, to, from);
1476 /* Mark TO as changed if FROM was changed. If TO was already marked
1477 as changed, decrease the changed count. */
1479 if (update_changed
1480 && bitmap_bit_p (changed, from))
1482 bitmap_clear_bit (changed, from);
1483 bitmap_set_bit (changed, to);
1485 if (get_varinfo (from)->solution)
1487 /* If the solution changes because of the merging, we need to mark
1488 the variable as changed. */
1489 if (bitmap_ior_into (get_varinfo (to)->solution,
1490 get_varinfo (from)->solution))
1492 if (update_changed)
1493 bitmap_set_bit (changed, to);
1496 BITMAP_FREE (get_varinfo (from)->solution);
1497 if (get_varinfo (from)->oldsolution)
1498 BITMAP_FREE (get_varinfo (from)->oldsolution);
1500 if (stats.iterations > 0
1501 && get_varinfo (to)->oldsolution)
1502 BITMAP_FREE (get_varinfo (to)->oldsolution);
1504 if (valid_graph_edge (graph, to, to))
1506 if (graph->succs[to])
1507 bitmap_clear_bit (graph->succs[to], to);
1511 /* Information needed to compute the topological ordering of a graph. */
1513 struct topo_info
1515 /* sbitmap of visited nodes. */
1516 sbitmap visited;
1517 /* Array that stores the topological order of the graph, *in
1518 reverse*. */
1519 vec<unsigned> topo_order;
1523 /* Initialize and return a topological info structure. */
1525 static struct topo_info *
1526 init_topo_info (void)
1528 size_t size = graph->size;
1529 struct topo_info *ti = XNEW (struct topo_info);
1530 ti->visited = sbitmap_alloc (size);
1531 bitmap_clear (ti->visited);
1532 ti->topo_order.create (1);
1533 return ti;
1537 /* Free the topological sort info pointed to by TI. */
1539 static void
1540 free_topo_info (struct topo_info *ti)
1542 sbitmap_free (ti->visited);
1543 ti->topo_order.release ();
1544 free (ti);
1547 /* Visit the graph in topological order, and store the order in the
1548 topo_info structure. */
1550 static void
1551 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1552 unsigned int n)
1554 bitmap_iterator bi;
1555 unsigned int j;
1557 bitmap_set_bit (ti->visited, n);
1559 if (graph->succs[n])
1560 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1562 if (!bitmap_bit_p (ti->visited, j))
1563 topo_visit (graph, ti, j);
1566 ti->topo_order.safe_push (n);
1569 /* Process a constraint C that represents x = *(y + off), using DELTA as the
1570 starting solution for y. */
1572 static void
1573 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1574 bitmap delta)
1576 unsigned int lhs = c->lhs.var;
1577 bool flag = false;
1578 bitmap sol = get_varinfo (lhs)->solution;
1579 unsigned int j;
1580 bitmap_iterator bi;
1581 HOST_WIDE_INT roffset = c->rhs.offset;
1583 /* Our IL does not allow this. */
1584 gcc_assert (c->lhs.offset == 0);
1586 /* If the solution of Y contains anything it is good enough to transfer
1587 this to the LHS. */
1588 if (bitmap_bit_p (delta, anything_id))
1590 flag |= bitmap_set_bit (sol, anything_id);
1591 goto done;
1594 /* If we do not know at with offset the rhs is dereferenced compute
1595 the reachability set of DELTA, conservatively assuming it is
1596 dereferenced at all valid offsets. */
1597 if (roffset == UNKNOWN_OFFSET)
1599 solution_set_expand (delta, delta);
1600 /* No further offset processing is necessary. */
1601 roffset = 0;
1604 /* For each variable j in delta (Sol(y)), add
1605 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1606 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1608 varinfo_t v = get_varinfo (j);
1609 HOST_WIDE_INT fieldoffset = v->offset + roffset;
1610 unsigned int t;
1612 if (v->is_full_var)
1613 fieldoffset = v->offset;
1614 else if (roffset != 0)
1615 v = first_vi_for_offset (v, fieldoffset);
1616 /* If the access is outside of the variable we can ignore it. */
1617 if (!v)
1618 continue;
1622 t = find (v->id);
1624 /* Adding edges from the special vars is pointless.
1625 They don't have sets that can change. */
1626 if (get_varinfo (t)->is_special_var)
1627 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1628 /* Merging the solution from ESCAPED needlessly increases
1629 the set. Use ESCAPED as representative instead. */
1630 else if (v->id == escaped_id)
1631 flag |= bitmap_set_bit (sol, escaped_id);
1632 else if (v->may_have_pointers
1633 && add_graph_edge (graph, lhs, t))
1634 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1636 /* If the variable is not exactly at the requested offset
1637 we have to include the next one. */
1638 if (v->offset == (unsigned HOST_WIDE_INT)fieldoffset
1639 || v->next == NULL)
1640 break;
1642 v = v->next;
1643 fieldoffset = v->offset;
1645 while (1);
1648 done:
1649 /* If the LHS solution changed, mark the var as changed. */
1650 if (flag)
1652 get_varinfo (lhs)->solution = sol;
1653 bitmap_set_bit (changed, lhs);
1657 /* Process a constraint C that represents *(x + off) = y using DELTA
1658 as the starting solution for x. */
1660 static void
1661 do_ds_constraint (constraint_t c, bitmap delta)
1663 unsigned int rhs = c->rhs.var;
1664 bitmap sol = get_varinfo (rhs)->solution;
1665 unsigned int j;
1666 bitmap_iterator bi;
1667 HOST_WIDE_INT loff = c->lhs.offset;
1668 bool escaped_p = false;
1670 /* Our IL does not allow this. */
1671 gcc_assert (c->rhs.offset == 0);
1673 /* If the solution of y contains ANYTHING simply use the ANYTHING
1674 solution. This avoids needlessly increasing the points-to sets. */
1675 if (bitmap_bit_p (sol, anything_id))
1676 sol = get_varinfo (find (anything_id))->solution;
1678 /* If the solution for x contains ANYTHING we have to merge the
1679 solution of y into all pointer variables which we do via
1680 STOREDANYTHING. */
1681 if (bitmap_bit_p (delta, anything_id))
1683 unsigned t = find (storedanything_id);
1684 if (add_graph_edge (graph, t, rhs))
1686 if (bitmap_ior_into (get_varinfo (t)->solution, sol))
1687 bitmap_set_bit (changed, t);
1689 return;
1692 /* If we do not know at with offset the rhs is dereferenced compute
1693 the reachability set of DELTA, conservatively assuming it is
1694 dereferenced at all valid offsets. */
1695 if (loff == UNKNOWN_OFFSET)
1697 solution_set_expand (delta, delta);
1698 loff = 0;
1701 /* For each member j of delta (Sol(x)), add an edge from y to j and
1702 union Sol(y) into Sol(j) */
1703 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1705 varinfo_t v = get_varinfo (j);
1706 unsigned int t;
1707 HOST_WIDE_INT fieldoffset = v->offset + loff;
1709 if (v->is_full_var)
1710 fieldoffset = v->offset;
1711 else if (loff != 0)
1712 v = first_vi_for_offset (v, fieldoffset);
1713 /* If the access is outside of the variable we can ignore it. */
1714 if (!v)
1715 continue;
1719 if (v->may_have_pointers)
1721 /* If v is a global variable then this is an escape point. */
1722 if (v->is_global_var
1723 && !escaped_p)
1725 t = find (escaped_id);
1726 if (add_graph_edge (graph, t, rhs)
1727 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1728 bitmap_set_bit (changed, t);
1729 /* Enough to let rhs escape once. */
1730 escaped_p = true;
1733 if (v->is_special_var)
1734 break;
1736 t = find (v->id);
1737 if (add_graph_edge (graph, t, rhs)
1738 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1739 bitmap_set_bit (changed, t);
1742 /* If the variable is not exactly at the requested offset
1743 we have to include the next one. */
1744 if (v->offset == (unsigned HOST_WIDE_INT)fieldoffset
1745 || v->next == NULL)
1746 break;
1748 v = v->next;
1749 fieldoffset = v->offset;
1751 while (1);
1755 /* Handle a non-simple (simple meaning requires no iteration),
1756 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1758 static void
1759 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
1761 if (c->lhs.type == DEREF)
1763 if (c->rhs.type == ADDRESSOF)
1765 gcc_unreachable();
1767 else
1769 /* *x = y */
1770 do_ds_constraint (c, delta);
1773 else if (c->rhs.type == DEREF)
1775 /* x = *y */
1776 if (!(get_varinfo (c->lhs.var)->is_special_var))
1777 do_sd_constraint (graph, c, delta);
1779 else
1781 bitmap tmp;
1782 bitmap solution;
1783 bool flag = false;
1785 gcc_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR);
1786 solution = get_varinfo (c->rhs.var)->solution;
1787 tmp = get_varinfo (c->lhs.var)->solution;
1789 flag = set_union_with_increment (tmp, solution, c->rhs.offset);
1791 if (flag)
1793 get_varinfo (c->lhs.var)->solution = tmp;
1794 bitmap_set_bit (changed, c->lhs.var);
1799 /* Initialize and return a new SCC info structure. */
1801 static struct scc_info *
1802 init_scc_info (size_t size)
1804 struct scc_info *si = XNEW (struct scc_info);
1805 size_t i;
1807 si->current_index = 0;
1808 si->visited = sbitmap_alloc (size);
1809 bitmap_clear (si->visited);
1810 si->deleted = sbitmap_alloc (size);
1811 bitmap_clear (si->deleted);
1812 si->node_mapping = XNEWVEC (unsigned int, size);
1813 si->dfs = XCNEWVEC (unsigned int, size);
1815 for (i = 0; i < size; i++)
1816 si->node_mapping[i] = i;
1818 si->scc_stack.create (1);
1819 return si;
1822 /* Free an SCC info structure pointed to by SI */
1824 static void
1825 free_scc_info (struct scc_info *si)
1827 sbitmap_free (si->visited);
1828 sbitmap_free (si->deleted);
1829 free (si->node_mapping);
1830 free (si->dfs);
1831 si->scc_stack.release ();
1832 free (si);
1836 /* Find indirect cycles in GRAPH that occur, using strongly connected
1837 components, and note them in the indirect cycles map.
1839 This technique comes from Ben Hardekopf and Calvin Lin,
1840 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1841 Lines of Code", submitted to PLDI 2007. */
1843 static void
1844 find_indirect_cycles (constraint_graph_t graph)
1846 unsigned int i;
1847 unsigned int size = graph->size;
1848 struct scc_info *si = init_scc_info (size);
1850 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1851 if (!bitmap_bit_p (si->visited, i) && find (i) == i)
1852 scc_visit (graph, si, i);
1854 free_scc_info (si);
1857 /* Compute a topological ordering for GRAPH, and store the result in the
1858 topo_info structure TI. */
1860 static void
1861 compute_topo_order (constraint_graph_t graph,
1862 struct topo_info *ti)
1864 unsigned int i;
1865 unsigned int size = graph->size;
1867 for (i = 0; i != size; ++i)
1868 if (!bitmap_bit_p (ti->visited, i) && find (i) == i)
1869 topo_visit (graph, ti, i);
1872 /* Structure used to for hash value numbering of pointer equivalence
1873 classes. */
1875 typedef struct equiv_class_label
1877 hashval_t hashcode;
1878 unsigned int equivalence_class;
1879 bitmap labels;
1880 } *equiv_class_label_t;
1881 typedef const struct equiv_class_label *const_equiv_class_label_t;
1883 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1884 classes. */
1885 static htab_t pointer_equiv_class_table;
1887 /* A hashtable for mapping a bitmap of labels->location equivalence
1888 classes. */
1889 static htab_t location_equiv_class_table;
1891 /* Hash function for a equiv_class_label_t */
1893 static hashval_t
1894 equiv_class_label_hash (const void *p)
1896 const_equiv_class_label_t const ecl = (const_equiv_class_label_t) p;
1897 return ecl->hashcode;
1900 /* Equality function for two equiv_class_label_t's. */
1902 static int
1903 equiv_class_label_eq (const void *p1, const void *p2)
1905 const_equiv_class_label_t const eql1 = (const_equiv_class_label_t) p1;
1906 const_equiv_class_label_t const eql2 = (const_equiv_class_label_t) p2;
1907 return (eql1->hashcode == eql2->hashcode
1908 && bitmap_equal_p (eql1->labels, eql2->labels));
1911 /* Lookup a equivalence class in TABLE by the bitmap of LABELS it
1912 contains. Sets *REF_LABELS to the bitmap LABELS is equivalent to. */
1914 static unsigned int
1915 equiv_class_lookup (htab_t table, bitmap labels, bitmap *ref_labels)
1917 void **slot;
1918 struct equiv_class_label ecl;
1920 ecl.labels = labels;
1921 ecl.hashcode = bitmap_hash (labels);
1923 slot = htab_find_slot_with_hash (table, &ecl,
1924 ecl.hashcode, NO_INSERT);
1925 if (!slot)
1927 if (ref_labels)
1928 *ref_labels = NULL;
1929 return 0;
1931 else
1933 equiv_class_label_t ec = (equiv_class_label_t) *slot;
1934 if (ref_labels)
1935 *ref_labels = ec->labels;
1936 return ec->equivalence_class;
1941 /* Add an equivalence class named EQUIVALENCE_CLASS with labels LABELS
1942 to TABLE. */
1944 static void
1945 equiv_class_add (htab_t table, unsigned int equivalence_class,
1946 bitmap labels)
1948 void **slot;
1949 equiv_class_label_t ecl = XNEW (struct equiv_class_label);
1951 ecl->labels = labels;
1952 ecl->equivalence_class = equivalence_class;
1953 ecl->hashcode = bitmap_hash (labels);
1955 slot = htab_find_slot_with_hash (table, ecl,
1956 ecl->hashcode, INSERT);
1957 gcc_assert (!*slot);
1958 *slot = (void *) ecl;
1961 /* Perform offline variable substitution.
1963 This is a worst case quadratic time way of identifying variables
1964 that must have equivalent points-to sets, including those caused by
1965 static cycles, and single entry subgraphs, in the constraint graph.
1967 The technique is described in "Exploiting Pointer and Location
1968 Equivalence to Optimize Pointer Analysis. In the 14th International
1969 Static Analysis Symposium (SAS), August 2007." It is known as the
1970 "HU" algorithm, and is equivalent to value numbering the collapsed
1971 constraint graph including evaluating unions.
1973 The general method of finding equivalence classes is as follows:
1974 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1975 Initialize all non-REF nodes to be direct nodes.
1976 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1977 variable}
1978 For each constraint containing the dereference, we also do the same
1979 thing.
1981 We then compute SCC's in the graph and unify nodes in the same SCC,
1982 including pts sets.
1984 For each non-collapsed node x:
1985 Visit all unvisited explicit incoming edges.
1986 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
1987 where y->x.
1988 Lookup the equivalence class for pts(x).
1989 If we found one, equivalence_class(x) = found class.
1990 Otherwise, equivalence_class(x) = new class, and new_class is
1991 added to the lookup table.
1993 All direct nodes with the same equivalence class can be replaced
1994 with a single representative node.
1995 All unlabeled nodes (label == 0) are not pointers and all edges
1996 involving them can be eliminated.
1997 We perform these optimizations during rewrite_constraints
1999 In addition to pointer equivalence class finding, we also perform
2000 location equivalence class finding. This is the set of variables
2001 that always appear together in points-to sets. We use this to
2002 compress the size of the points-to sets. */
2004 /* Current maximum pointer equivalence class id. */
2005 static int pointer_equiv_class;
2007 /* Current maximum location equivalence class id. */
2008 static int location_equiv_class;
2010 /* Recursive routine to find strongly connected components in GRAPH,
2011 and label it's nodes with DFS numbers. */
2013 static void
2014 condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2016 unsigned int i;
2017 bitmap_iterator bi;
2018 unsigned int my_dfs;
2020 gcc_assert (si->node_mapping[n] == n);
2021 bitmap_set_bit (si->visited, n);
2022 si->dfs[n] = si->current_index ++;
2023 my_dfs = si->dfs[n];
2025 /* Visit all the successors. */
2026 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2028 unsigned int w = si->node_mapping[i];
2030 if (bitmap_bit_p (si->deleted, w))
2031 continue;
2033 if (!bitmap_bit_p (si->visited, w))
2034 condense_visit (graph, si, w);
2036 unsigned int t = si->node_mapping[w];
2037 unsigned int nnode = si->node_mapping[n];
2038 gcc_assert (nnode == n);
2040 if (si->dfs[t] < si->dfs[nnode])
2041 si->dfs[n] = si->dfs[t];
2045 /* Visit all the implicit predecessors. */
2046 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
2048 unsigned int w = si->node_mapping[i];
2050 if (bitmap_bit_p (si->deleted, w))
2051 continue;
2053 if (!bitmap_bit_p (si->visited, w))
2054 condense_visit (graph, si, w);
2056 unsigned int t = si->node_mapping[w];
2057 unsigned int nnode = si->node_mapping[n];
2058 gcc_assert (nnode == n);
2060 if (si->dfs[t] < si->dfs[nnode])
2061 si->dfs[n] = si->dfs[t];
2065 /* See if any components have been identified. */
2066 if (si->dfs[n] == my_dfs)
2068 while (si->scc_stack.length () != 0
2069 && si->dfs[si->scc_stack.last ()] >= my_dfs)
2071 unsigned int w = si->scc_stack.pop ();
2072 si->node_mapping[w] = n;
2074 if (!bitmap_bit_p (graph->direct_nodes, w))
2075 bitmap_clear_bit (graph->direct_nodes, n);
2077 /* Unify our nodes. */
2078 if (graph->preds[w])
2080 if (!graph->preds[n])
2081 graph->preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2082 bitmap_ior_into (graph->preds[n], graph->preds[w]);
2084 if (graph->implicit_preds[w])
2086 if (!graph->implicit_preds[n])
2087 graph->implicit_preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2088 bitmap_ior_into (graph->implicit_preds[n],
2089 graph->implicit_preds[w]);
2091 if (graph->points_to[w])
2093 if (!graph->points_to[n])
2094 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2095 bitmap_ior_into (graph->points_to[n],
2096 graph->points_to[w]);
2099 bitmap_set_bit (si->deleted, n);
2101 else
2102 si->scc_stack.safe_push (n);
2105 /* Label pointer equivalences. */
2107 static void
2108 label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2110 unsigned int i;
2111 bitmap_iterator bi;
2112 bitmap_set_bit (si->visited, n);
2114 if (!graph->points_to[n])
2115 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2117 /* Label and union our incoming edges's points to sets. */
2118 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2120 unsigned int w = si->node_mapping[i];
2121 if (!bitmap_bit_p (si->visited, w))
2122 label_visit (graph, si, w);
2124 /* Skip unused edges */
2125 if (w == n || graph->pointer_label[w] == 0)
2126 continue;
2128 if (graph->points_to[w])
2129 bitmap_ior_into(graph->points_to[n], graph->points_to[w]);
2131 /* Indirect nodes get fresh variables. */
2132 if (!bitmap_bit_p (graph->direct_nodes, n))
2133 bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
2135 if (!bitmap_empty_p (graph->points_to[n]))
2137 bitmap ref_points_to;
2138 unsigned int label = equiv_class_lookup (pointer_equiv_class_table,
2139 graph->points_to[n],
2140 &ref_points_to);
2141 if (!label)
2143 label = pointer_equiv_class++;
2144 equiv_class_add (pointer_equiv_class_table,
2145 label, graph->points_to[n]);
2147 else
2149 BITMAP_FREE (graph->points_to[n]);
2150 graph->points_to[n] = ref_points_to;
2152 graph->pointer_label[n] = label;
2156 /* Perform offline variable substitution, discovering equivalence
2157 classes, and eliminating non-pointer variables. */
2159 static struct scc_info *
2160 perform_var_substitution (constraint_graph_t graph)
2162 unsigned int i;
2163 unsigned int size = graph->size;
2164 struct scc_info *si = init_scc_info (size);
2166 bitmap_obstack_initialize (&iteration_obstack);
2167 pointer_equiv_class_table = htab_create (511, equiv_class_label_hash,
2168 equiv_class_label_eq, free);
2169 location_equiv_class_table = htab_create (511, equiv_class_label_hash,
2170 equiv_class_label_eq, free);
2171 pointer_equiv_class = 1;
2172 location_equiv_class = 1;
2174 /* Condense the nodes, which means to find SCC's, count incoming
2175 predecessors, and unite nodes in SCC's. */
2176 for (i = 0; i < FIRST_REF_NODE; i++)
2177 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2178 condense_visit (graph, si, si->node_mapping[i]);
2180 bitmap_clear (si->visited);
2181 /* Actually the label the nodes for pointer equivalences */
2182 for (i = 0; i < FIRST_REF_NODE; i++)
2183 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2184 label_visit (graph, si, si->node_mapping[i]);
2186 /* Calculate location equivalence labels. */
2187 for (i = 0; i < FIRST_REF_NODE; i++)
2189 bitmap pointed_by;
2190 bitmap_iterator bi;
2191 unsigned int j;
2192 unsigned int label;
2194 if (!graph->pointed_by[i])
2195 continue;
2196 pointed_by = BITMAP_ALLOC (&iteration_obstack);
2198 /* Translate the pointed-by mapping for pointer equivalence
2199 labels. */
2200 EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
2202 bitmap_set_bit (pointed_by,
2203 graph->pointer_label[si->node_mapping[j]]);
2205 /* The original pointed_by is now dead. */
2206 BITMAP_FREE (graph->pointed_by[i]);
2208 /* Look up the location equivalence label if one exists, or make
2209 one otherwise. */
2210 label = equiv_class_lookup (location_equiv_class_table,
2211 pointed_by, NULL);
2212 if (label == 0)
2214 label = location_equiv_class++;
2215 equiv_class_add (location_equiv_class_table,
2216 label, pointed_by);
2218 else
2220 if (dump_file && (dump_flags & TDF_DETAILS))
2221 fprintf (dump_file, "Found location equivalence for node %s\n",
2222 get_varinfo (i)->name);
2223 BITMAP_FREE (pointed_by);
2225 graph->loc_label[i] = label;
2229 if (dump_file && (dump_flags & TDF_DETAILS))
2230 for (i = 0; i < FIRST_REF_NODE; i++)
2232 bool direct_node = bitmap_bit_p (graph->direct_nodes, i);
2233 fprintf (dump_file,
2234 "Equivalence classes for %s node id %d:%s are pointer: %d"
2235 ", location:%d\n",
2236 direct_node ? "Direct node" : "Indirect node", i,
2237 get_varinfo (i)->name,
2238 graph->pointer_label[si->node_mapping[i]],
2239 graph->loc_label[si->node_mapping[i]]);
2242 /* Quickly eliminate our non-pointer variables. */
2244 for (i = 0; i < FIRST_REF_NODE; i++)
2246 unsigned int node = si->node_mapping[i];
2248 if (graph->pointer_label[node] == 0)
2250 if (dump_file && (dump_flags & TDF_DETAILS))
2251 fprintf (dump_file,
2252 "%s is a non-pointer variable, eliminating edges.\n",
2253 get_varinfo (node)->name);
2254 stats.nonpointer_vars++;
2255 clear_edges_for_node (graph, node);
2259 return si;
2262 /* Free information that was only necessary for variable
2263 substitution. */
2265 static void
2266 free_var_substitution_info (struct scc_info *si)
2268 free_scc_info (si);
2269 free (graph->pointer_label);
2270 free (graph->loc_label);
2271 free (graph->pointed_by);
2272 free (graph->points_to);
2273 free (graph->eq_rep);
2274 sbitmap_free (graph->direct_nodes);
2275 htab_delete (pointer_equiv_class_table);
2276 htab_delete (location_equiv_class_table);
2277 bitmap_obstack_release (&iteration_obstack);
2280 /* Return an existing node that is equivalent to NODE, which has
2281 equivalence class LABEL, if one exists. Return NODE otherwise. */
2283 static unsigned int
2284 find_equivalent_node (constraint_graph_t graph,
2285 unsigned int node, unsigned int label)
2287 /* If the address version of this variable is unused, we can
2288 substitute it for anything else with the same label.
2289 Otherwise, we know the pointers are equivalent, but not the
2290 locations, and we can unite them later. */
2292 if (!bitmap_bit_p (graph->address_taken, node))
2294 gcc_assert (label < graph->size);
2296 if (graph->eq_rep[label] != -1)
2298 /* Unify the two variables since we know they are equivalent. */
2299 if (unite (graph->eq_rep[label], node))
2300 unify_nodes (graph, graph->eq_rep[label], node, false);
2301 return graph->eq_rep[label];
2303 else
2305 graph->eq_rep[label] = node;
2306 graph->pe_rep[label] = node;
2309 else
2311 gcc_assert (label < graph->size);
2312 graph->pe[node] = label;
2313 if (graph->pe_rep[label] == -1)
2314 graph->pe_rep[label] = node;
2317 return node;
2320 /* Unite pointer equivalent but not location equivalent nodes in
2321 GRAPH. This may only be performed once variable substitution is
2322 finished. */
2324 static void
2325 unite_pointer_equivalences (constraint_graph_t graph)
2327 unsigned int i;
2329 /* Go through the pointer equivalences and unite them to their
2330 representative, if they aren't already. */
2331 for (i = 0; i < FIRST_REF_NODE; i++)
2333 unsigned int label = graph->pe[i];
2334 if (label)
2336 int label_rep = graph->pe_rep[label];
2338 if (label_rep == -1)
2339 continue;
2341 label_rep = find (label_rep);
2342 if (label_rep >= 0 && unite (label_rep, find (i)))
2343 unify_nodes (graph, label_rep, i, false);
2348 /* Move complex constraints to the GRAPH nodes they belong to. */
2350 static void
2351 move_complex_constraints (constraint_graph_t graph)
2353 int i;
2354 constraint_t c;
2356 FOR_EACH_VEC_ELT (constraints, i, c)
2358 if (c)
2360 struct constraint_expr lhs = c->lhs;
2361 struct constraint_expr rhs = c->rhs;
2363 if (lhs.type == DEREF)
2365 insert_into_complex (graph, lhs.var, c);
2367 else if (rhs.type == DEREF)
2369 if (!(get_varinfo (lhs.var)->is_special_var))
2370 insert_into_complex (graph, rhs.var, c);
2372 else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2373 && (lhs.offset != 0 || rhs.offset != 0))
2375 insert_into_complex (graph, rhs.var, c);
2382 /* Optimize and rewrite complex constraints while performing
2383 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2384 result of perform_variable_substitution. */
2386 static void
2387 rewrite_constraints (constraint_graph_t graph,
2388 struct scc_info *si)
2390 int i;
2391 unsigned int j;
2392 constraint_t c;
2394 for (j = 0; j < graph->size; j++)
2395 gcc_assert (find (j) == j);
2397 FOR_EACH_VEC_ELT (constraints, i, c)
2399 struct constraint_expr lhs = c->lhs;
2400 struct constraint_expr rhs = c->rhs;
2401 unsigned int lhsvar = find (lhs.var);
2402 unsigned int rhsvar = find (rhs.var);
2403 unsigned int lhsnode, rhsnode;
2404 unsigned int lhslabel, rhslabel;
2406 lhsnode = si->node_mapping[lhsvar];
2407 rhsnode = si->node_mapping[rhsvar];
2408 lhslabel = graph->pointer_label[lhsnode];
2409 rhslabel = graph->pointer_label[rhsnode];
2411 /* See if it is really a non-pointer variable, and if so, ignore
2412 the constraint. */
2413 if (lhslabel == 0)
2415 if (dump_file && (dump_flags & TDF_DETAILS))
2418 fprintf (dump_file, "%s is a non-pointer variable,"
2419 "ignoring constraint:",
2420 get_varinfo (lhs.var)->name);
2421 dump_constraint (dump_file, c);
2422 fprintf (dump_file, "\n");
2424 constraints[i] = NULL;
2425 continue;
2428 if (rhslabel == 0)
2430 if (dump_file && (dump_flags & TDF_DETAILS))
2433 fprintf (dump_file, "%s is a non-pointer variable,"
2434 "ignoring constraint:",
2435 get_varinfo (rhs.var)->name);
2436 dump_constraint (dump_file, c);
2437 fprintf (dump_file, "\n");
2439 constraints[i] = NULL;
2440 continue;
2443 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2444 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2445 c->lhs.var = lhsvar;
2446 c->rhs.var = rhsvar;
2451 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2452 part of an SCC, false otherwise. */
2454 static bool
2455 eliminate_indirect_cycles (unsigned int node)
2457 if (graph->indirect_cycles[node] != -1
2458 && !bitmap_empty_p (get_varinfo (node)->solution))
2460 unsigned int i;
2461 vec<unsigned> queue = vNULL;
2462 int queuepos;
2463 unsigned int to = find (graph->indirect_cycles[node]);
2464 bitmap_iterator bi;
2466 /* We can't touch the solution set and call unify_nodes
2467 at the same time, because unify_nodes is going to do
2468 bitmap unions into it. */
2470 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2472 if (find (i) == i && i != to)
2474 if (unite (to, i))
2475 queue.safe_push (i);
2479 for (queuepos = 0;
2480 queue.iterate (queuepos, &i);
2481 queuepos++)
2483 unify_nodes (graph, to, i, true);
2485 queue.release ();
2486 return true;
2488 return false;
2491 /* Solve the constraint graph GRAPH using our worklist solver.
2492 This is based on the PW* family of solvers from the "Efficient Field
2493 Sensitive Pointer Analysis for C" paper.
2494 It works by iterating over all the graph nodes, processing the complex
2495 constraints and propagating the copy constraints, until everything stops
2496 changed. This corresponds to steps 6-8 in the solving list given above. */
2498 static void
2499 solve_graph (constraint_graph_t graph)
2501 unsigned int size = graph->size;
2502 unsigned int i;
2503 bitmap pts;
2505 changed = BITMAP_ALLOC (NULL);
2507 /* Mark all initial non-collapsed nodes as changed. */
2508 for (i = 0; i < size; i++)
2510 varinfo_t ivi = get_varinfo (i);
2511 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2512 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2513 || graph->complex[i].length () > 0))
2514 bitmap_set_bit (changed, i);
2517 /* Allocate a bitmap to be used to store the changed bits. */
2518 pts = BITMAP_ALLOC (&pta_obstack);
2520 while (!bitmap_empty_p (changed))
2522 unsigned int i;
2523 struct topo_info *ti = init_topo_info ();
2524 stats.iterations++;
2526 bitmap_obstack_initialize (&iteration_obstack);
2528 compute_topo_order (graph, ti);
2530 while (ti->topo_order.length () != 0)
2533 i = ti->topo_order.pop ();
2535 /* If this variable is not a representative, skip it. */
2536 if (find (i) != i)
2537 continue;
2539 /* In certain indirect cycle cases, we may merge this
2540 variable to another. */
2541 if (eliminate_indirect_cycles (i) && find (i) != i)
2542 continue;
2544 /* If the node has changed, we need to process the
2545 complex constraints and outgoing edges again. */
2546 if (bitmap_clear_bit (changed, i))
2548 unsigned int j;
2549 constraint_t c;
2550 bitmap solution;
2551 vec<constraint_t> complex = graph->complex[i];
2552 varinfo_t vi = get_varinfo (i);
2553 bool solution_empty;
2555 /* Compute the changed set of solution bits. */
2556 if (vi->oldsolution)
2557 bitmap_and_compl (pts, vi->solution, vi->oldsolution);
2558 else
2559 bitmap_copy (pts, vi->solution);
2561 if (bitmap_empty_p (pts))
2562 continue;
2564 if (vi->oldsolution)
2565 bitmap_ior_into (vi->oldsolution, pts);
2566 else
2568 vi->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
2569 bitmap_copy (vi->oldsolution, pts);
2572 solution = vi->solution;
2573 solution_empty = bitmap_empty_p (solution);
2575 /* Process the complex constraints */
2576 FOR_EACH_VEC_ELT (complex, j, c)
2578 /* XXX: This is going to unsort the constraints in
2579 some cases, which will occasionally add duplicate
2580 constraints during unification. This does not
2581 affect correctness. */
2582 c->lhs.var = find (c->lhs.var);
2583 c->rhs.var = find (c->rhs.var);
2585 /* The only complex constraint that can change our
2586 solution to non-empty, given an empty solution,
2587 is a constraint where the lhs side is receiving
2588 some set from elsewhere. */
2589 if (!solution_empty || c->lhs.type != DEREF)
2590 do_complex_constraint (graph, c, pts);
2593 solution_empty = bitmap_empty_p (solution);
2595 if (!solution_empty)
2597 bitmap_iterator bi;
2598 unsigned eff_escaped_id = find (escaped_id);
2600 /* Propagate solution to all successors. */
2601 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2602 0, j, bi)
2604 bitmap tmp;
2605 bool flag;
2607 unsigned int to = find (j);
2608 tmp = get_varinfo (to)->solution;
2609 flag = false;
2611 /* Don't try to propagate to ourselves. */
2612 if (to == i)
2613 continue;
2615 /* If we propagate from ESCAPED use ESCAPED as
2616 placeholder. */
2617 if (i == eff_escaped_id)
2618 flag = bitmap_set_bit (tmp, escaped_id);
2619 else
2620 flag = set_union_with_increment (tmp, pts, 0);
2622 if (flag)
2624 get_varinfo (to)->solution = tmp;
2625 bitmap_set_bit (changed, to);
2631 free_topo_info (ti);
2632 bitmap_obstack_release (&iteration_obstack);
2635 BITMAP_FREE (pts);
2636 BITMAP_FREE (changed);
2637 bitmap_obstack_release (&oldpta_obstack);
2640 /* Map from trees to variable infos. */
2641 static struct pointer_map_t *vi_for_tree;
2644 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2646 static void
2647 insert_vi_for_tree (tree t, varinfo_t vi)
2649 void **slot = pointer_map_insert (vi_for_tree, t);
2650 gcc_assert (vi);
2651 gcc_assert (*slot == NULL);
2652 *slot = vi;
2655 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2656 exist in the map, return NULL, otherwise, return the varinfo we found. */
2658 static varinfo_t
2659 lookup_vi_for_tree (tree t)
2661 void **slot = pointer_map_contains (vi_for_tree, t);
2662 if (slot == NULL)
2663 return NULL;
2665 return (varinfo_t) *slot;
2668 /* Return a printable name for DECL */
2670 static const char *
2671 alias_get_name (tree decl)
2673 const char *res = NULL;
2674 char *temp;
2675 int num_printed = 0;
2677 if (!dump_file)
2678 return "NULL";
2680 if (TREE_CODE (decl) == SSA_NAME)
2682 res = get_name (decl);
2683 if (res)
2684 num_printed = asprintf (&temp, "%s_%u", res, SSA_NAME_VERSION (decl));
2685 else
2686 num_printed = asprintf (&temp, "_%u", SSA_NAME_VERSION (decl));
2687 if (num_printed > 0)
2689 res = ggc_strdup (temp);
2690 free (temp);
2693 else if (DECL_P (decl))
2695 if (DECL_ASSEMBLER_NAME_SET_P (decl))
2696 res = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
2697 else
2699 res = get_name (decl);
2700 if (!res)
2702 num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
2703 if (num_printed > 0)
2705 res = ggc_strdup (temp);
2706 free (temp);
2711 if (res != NULL)
2712 return res;
2714 return "NULL";
2717 /* Find the variable id for tree T in the map.
2718 If T doesn't exist in the map, create an entry for it and return it. */
2720 static varinfo_t
2721 get_vi_for_tree (tree t)
2723 void **slot = pointer_map_contains (vi_for_tree, t);
2724 if (slot == NULL)
2725 return get_varinfo (create_variable_info_for (t, alias_get_name (t)));
2727 return (varinfo_t) *slot;
2730 /* Get a scalar constraint expression for a new temporary variable. */
2732 static struct constraint_expr
2733 new_scalar_tmp_constraint_exp (const char *name)
2735 struct constraint_expr tmp;
2736 varinfo_t vi;
2738 vi = new_var_info (NULL_TREE, name);
2739 vi->offset = 0;
2740 vi->size = -1;
2741 vi->fullsize = -1;
2742 vi->is_full_var = 1;
2744 tmp.var = vi->id;
2745 tmp.type = SCALAR;
2746 tmp.offset = 0;
2748 return tmp;
2751 /* Get a constraint expression vector from an SSA_VAR_P node.
2752 If address_p is true, the result will be taken its address of. */
2754 static void
2755 get_constraint_for_ssa_var (tree t, vec<ce_s> *results, bool address_p)
2757 struct constraint_expr cexpr;
2758 varinfo_t vi;
2760 /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
2761 gcc_assert (TREE_CODE (t) == SSA_NAME || DECL_P (t));
2763 /* For parameters, get at the points-to set for the actual parm
2764 decl. */
2765 if (TREE_CODE (t) == SSA_NAME
2766 && SSA_NAME_IS_DEFAULT_DEF (t)
2767 && (TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2768 || TREE_CODE (SSA_NAME_VAR (t)) == RESULT_DECL))
2770 get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
2771 return;
2774 /* For global variables resort to the alias target. */
2775 if (TREE_CODE (t) == VAR_DECL
2776 && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
2778 struct varpool_node *node = varpool_get_node (t);
2779 if (node && node->alias)
2781 node = varpool_variable_node (node, NULL);
2782 t = node->symbol.decl;
2786 vi = get_vi_for_tree (t);
2787 cexpr.var = vi->id;
2788 cexpr.type = SCALAR;
2789 cexpr.offset = 0;
2790 /* If we determine the result is "anything", and we know this is readonly,
2791 say it points to readonly memory instead. */
2792 if (cexpr.var == anything_id && TREE_READONLY (t))
2794 gcc_unreachable ();
2795 cexpr.type = ADDRESSOF;
2796 cexpr.var = readonly_id;
2799 /* If we are not taking the address of the constraint expr, add all
2800 sub-fiels of the variable as well. */
2801 if (!address_p
2802 && !vi->is_full_var)
2804 for (; vi; vi = vi->next)
2806 cexpr.var = vi->id;
2807 results->safe_push (cexpr);
2809 return;
2812 results->safe_push (cexpr);
2815 /* Process constraint T, performing various simplifications and then
2816 adding it to our list of overall constraints. */
2818 static void
2819 process_constraint (constraint_t t)
2821 struct constraint_expr rhs = t->rhs;
2822 struct constraint_expr lhs = t->lhs;
2824 gcc_assert (rhs.var < varmap.length ());
2825 gcc_assert (lhs.var < varmap.length ());
2827 /* If we didn't get any useful constraint from the lhs we get
2828 &ANYTHING as fallback from get_constraint_for. Deal with
2829 it here by turning it into *ANYTHING. */
2830 if (lhs.type == ADDRESSOF
2831 && lhs.var == anything_id)
2832 lhs.type = DEREF;
2834 /* ADDRESSOF on the lhs is invalid. */
2835 gcc_assert (lhs.type != ADDRESSOF);
2837 /* We shouldn't add constraints from things that cannot have pointers.
2838 It's not completely trivial to avoid in the callers, so do it here. */
2839 if (rhs.type != ADDRESSOF
2840 && !get_varinfo (rhs.var)->may_have_pointers)
2841 return;
2843 /* Likewise adding to the solution of a non-pointer var isn't useful. */
2844 if (!get_varinfo (lhs.var)->may_have_pointers)
2845 return;
2847 /* This can happen in our IR with things like n->a = *p */
2848 if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
2850 /* Split into tmp = *rhs, *lhs = tmp */
2851 struct constraint_expr tmplhs;
2852 tmplhs = new_scalar_tmp_constraint_exp ("doubledereftmp");
2853 process_constraint (new_constraint (tmplhs, rhs));
2854 process_constraint (new_constraint (lhs, tmplhs));
2856 else if (rhs.type == ADDRESSOF && lhs.type == DEREF)
2858 /* Split into tmp = &rhs, *lhs = tmp */
2859 struct constraint_expr tmplhs;
2860 tmplhs = new_scalar_tmp_constraint_exp ("derefaddrtmp");
2861 process_constraint (new_constraint (tmplhs, rhs));
2862 process_constraint (new_constraint (lhs, tmplhs));
2864 else
2866 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
2867 constraints.safe_push (t);
2872 /* Return the position, in bits, of FIELD_DECL from the beginning of its
2873 structure. */
2875 static HOST_WIDE_INT
2876 bitpos_of_field (const tree fdecl)
2878 if (!host_integerp (DECL_FIELD_OFFSET (fdecl), 0)
2879 || !host_integerp (DECL_FIELD_BIT_OFFSET (fdecl), 0))
2880 return -1;
2882 return (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (fdecl)) * BITS_PER_UNIT
2883 + TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (fdecl)));
2887 /* Get constraint expressions for offsetting PTR by OFFSET. Stores the
2888 resulting constraint expressions in *RESULTS. */
2890 static void
2891 get_constraint_for_ptr_offset (tree ptr, tree offset,
2892 vec<ce_s> *results)
2894 struct constraint_expr c;
2895 unsigned int j, n;
2896 HOST_WIDE_INT rhsoffset;
2898 /* If we do not do field-sensitive PTA adding offsets to pointers
2899 does not change the points-to solution. */
2900 if (!use_field_sensitive)
2902 get_constraint_for_rhs (ptr, results);
2903 return;
2906 /* If the offset is not a non-negative integer constant that fits
2907 in a HOST_WIDE_INT, we have to fall back to a conservative
2908 solution which includes all sub-fields of all pointed-to
2909 variables of ptr. */
2910 if (offset == NULL_TREE
2911 || TREE_CODE (offset) != INTEGER_CST)
2912 rhsoffset = UNKNOWN_OFFSET;
2913 else
2915 /* Sign-extend the offset. */
2916 double_int soffset = tree_to_double_int (offset)
2917 .sext (TYPE_PRECISION (TREE_TYPE (offset)));
2918 if (!soffset.fits_shwi ())
2919 rhsoffset = UNKNOWN_OFFSET;
2920 else
2922 /* Make sure the bit-offset also fits. */
2923 HOST_WIDE_INT rhsunitoffset = soffset.low;
2924 rhsoffset = rhsunitoffset * BITS_PER_UNIT;
2925 if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
2926 rhsoffset = UNKNOWN_OFFSET;
2930 get_constraint_for_rhs (ptr, results);
2931 if (rhsoffset == 0)
2932 return;
2934 /* As we are eventually appending to the solution do not use
2935 vec::iterate here. */
2936 n = results->length ();
2937 for (j = 0; j < n; j++)
2939 varinfo_t curr;
2940 c = (*results)[j];
2941 curr = get_varinfo (c.var);
2943 if (c.type == ADDRESSOF
2944 /* If this varinfo represents a full variable just use it. */
2945 && curr->is_full_var)
2946 c.offset = 0;
2947 else if (c.type == ADDRESSOF
2948 /* If we do not know the offset add all subfields. */
2949 && rhsoffset == UNKNOWN_OFFSET)
2951 varinfo_t temp = lookup_vi_for_tree (curr->decl);
2954 struct constraint_expr c2;
2955 c2.var = temp->id;
2956 c2.type = ADDRESSOF;
2957 c2.offset = 0;
2958 if (c2.var != c.var)
2959 results->safe_push (c2);
2960 temp = temp->next;
2962 while (temp);
2964 else if (c.type == ADDRESSOF)
2966 varinfo_t temp;
2967 unsigned HOST_WIDE_INT offset = curr->offset + rhsoffset;
2969 /* Search the sub-field which overlaps with the
2970 pointed-to offset. If the result is outside of the variable
2971 we have to provide a conservative result, as the variable is
2972 still reachable from the resulting pointer (even though it
2973 technically cannot point to anything). The last and first
2974 sub-fields are such conservative results.
2975 ??? If we always had a sub-field for &object + 1 then
2976 we could represent this in a more precise way. */
2977 if (rhsoffset < 0
2978 && curr->offset < offset)
2979 offset = 0;
2980 temp = first_or_preceding_vi_for_offset (curr, offset);
2982 /* If the found variable is not exactly at the pointed to
2983 result, we have to include the next variable in the
2984 solution as well. Otherwise two increments by offset / 2
2985 do not result in the same or a conservative superset
2986 solution. */
2987 if (temp->offset != offset
2988 && temp->next != NULL)
2990 struct constraint_expr c2;
2991 c2.var = temp->next->id;
2992 c2.type = ADDRESSOF;
2993 c2.offset = 0;
2994 results->safe_push (c2);
2996 c.var = temp->id;
2997 c.offset = 0;
2999 else
3000 c.offset = rhsoffset;
3002 (*results)[j] = c;
3007 /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
3008 If address_p is true the result will be taken its address of.
3009 If lhs_p is true then the constraint expression is assumed to be used
3010 as the lhs. */
3012 static void
3013 get_constraint_for_component_ref (tree t, vec<ce_s> *results,
3014 bool address_p, bool lhs_p)
3016 tree orig_t = t;
3017 HOST_WIDE_INT bitsize = -1;
3018 HOST_WIDE_INT bitmaxsize = -1;
3019 HOST_WIDE_INT bitpos;
3020 tree forzero;
3022 /* Some people like to do cute things like take the address of
3023 &0->a.b */
3024 forzero = t;
3025 while (handled_component_p (forzero)
3026 || INDIRECT_REF_P (forzero)
3027 || TREE_CODE (forzero) == MEM_REF)
3028 forzero = TREE_OPERAND (forzero, 0);
3030 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
3032 struct constraint_expr temp;
3034 temp.offset = 0;
3035 temp.var = integer_id;
3036 temp.type = SCALAR;
3037 results->safe_push (temp);
3038 return;
3041 /* Handle type-punning through unions. If we are extracting a pointer
3042 from a union via a possibly type-punning access that pointer
3043 points to anything, similar to a conversion of an integer to
3044 a pointer. */
3045 if (!lhs_p)
3047 tree u;
3048 for (u = t;
3049 TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
3050 u = TREE_OPERAND (u, 0))
3051 if (TREE_CODE (u) == COMPONENT_REF
3052 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
3054 struct constraint_expr temp;
3056 temp.offset = 0;
3057 temp.var = anything_id;
3058 temp.type = ADDRESSOF;
3059 results->safe_push (temp);
3060 return;
3064 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
3066 /* Pretend to take the address of the base, we'll take care of
3067 adding the required subset of sub-fields below. */
3068 get_constraint_for_1 (t, results, true, lhs_p);
3069 gcc_assert (results->length () == 1);
3070 struct constraint_expr &result = results->last ();
3072 if (result.type == SCALAR
3073 && get_varinfo (result.var)->is_full_var)
3074 /* For single-field vars do not bother about the offset. */
3075 result.offset = 0;
3076 else if (result.type == SCALAR)
3078 /* In languages like C, you can access one past the end of an
3079 array. You aren't allowed to dereference it, so we can
3080 ignore this constraint. When we handle pointer subtraction,
3081 we may have to do something cute here. */
3083 if ((unsigned HOST_WIDE_INT)bitpos < get_varinfo (result.var)->fullsize
3084 && bitmaxsize != 0)
3086 /* It's also not true that the constraint will actually start at the
3087 right offset, it may start in some padding. We only care about
3088 setting the constraint to the first actual field it touches, so
3089 walk to find it. */
3090 struct constraint_expr cexpr = result;
3091 varinfo_t curr;
3092 results->pop ();
3093 cexpr.offset = 0;
3094 for (curr = get_varinfo (cexpr.var); curr; curr = curr->next)
3096 if (ranges_overlap_p (curr->offset, curr->size,
3097 bitpos, bitmaxsize))
3099 cexpr.var = curr->id;
3100 results->safe_push (cexpr);
3101 if (address_p)
3102 break;
3105 /* If we are going to take the address of this field then
3106 to be able to compute reachability correctly add at least
3107 the last field of the variable. */
3108 if (address_p && results->length () == 0)
3110 curr = get_varinfo (cexpr.var);
3111 while (curr->next != NULL)
3112 curr = curr->next;
3113 cexpr.var = curr->id;
3114 results->safe_push (cexpr);
3116 else if (results->length () == 0)
3117 /* Assert that we found *some* field there. The user couldn't be
3118 accessing *only* padding. */
3119 /* Still the user could access one past the end of an array
3120 embedded in a struct resulting in accessing *only* padding. */
3121 /* Or accessing only padding via type-punning to a type
3122 that has a filed just in padding space. */
3124 cexpr.type = SCALAR;
3125 cexpr.var = anything_id;
3126 cexpr.offset = 0;
3127 results->safe_push (cexpr);
3130 else if (bitmaxsize == 0)
3132 if (dump_file && (dump_flags & TDF_DETAILS))
3133 fprintf (dump_file, "Access to zero-sized part of variable,"
3134 "ignoring\n");
3136 else
3137 if (dump_file && (dump_flags & TDF_DETAILS))
3138 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
3140 else if (result.type == DEREF)
3142 /* If we do not know exactly where the access goes say so. Note
3143 that only for non-structure accesses we know that we access
3144 at most one subfiled of any variable. */
3145 if (bitpos == -1
3146 || bitsize != bitmaxsize
3147 || AGGREGATE_TYPE_P (TREE_TYPE (orig_t))
3148 || result.offset == UNKNOWN_OFFSET)
3149 result.offset = UNKNOWN_OFFSET;
3150 else
3151 result.offset += bitpos;
3153 else if (result.type == ADDRESSOF)
3155 /* We can end up here for component references on a
3156 VIEW_CONVERT_EXPR <>(&foobar). */
3157 result.type = SCALAR;
3158 result.var = anything_id;
3159 result.offset = 0;
3161 else
3162 gcc_unreachable ();
3166 /* Dereference the constraint expression CONS, and return the result.
3167 DEREF (ADDRESSOF) = SCALAR
3168 DEREF (SCALAR) = DEREF
3169 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
3170 This is needed so that we can handle dereferencing DEREF constraints. */
3172 static void
3173 do_deref (vec<ce_s> *constraints)
3175 struct constraint_expr *c;
3176 unsigned int i = 0;
3178 FOR_EACH_VEC_ELT (*constraints, i, c)
3180 if (c->type == SCALAR)
3181 c->type = DEREF;
3182 else if (c->type == ADDRESSOF)
3183 c->type = SCALAR;
3184 else if (c->type == DEREF)
3186 struct constraint_expr tmplhs;
3187 tmplhs = new_scalar_tmp_constraint_exp ("dereftmp");
3188 process_constraint (new_constraint (tmplhs, *c));
3189 c->var = tmplhs.var;
3191 else
3192 gcc_unreachable ();
3196 /* Given a tree T, return the constraint expression for taking the
3197 address of it. */
3199 static void
3200 get_constraint_for_address_of (tree t, vec<ce_s> *results)
3202 struct constraint_expr *c;
3203 unsigned int i;
3205 get_constraint_for_1 (t, results, true, true);
3207 FOR_EACH_VEC_ELT (*results, i, c)
3209 if (c->type == DEREF)
3210 c->type = SCALAR;
3211 else
3212 c->type = ADDRESSOF;
3216 /* Given a tree T, return the constraint expression for it. */
3218 static void
3219 get_constraint_for_1 (tree t, vec<ce_s> *results, bool address_p,
3220 bool lhs_p)
3222 struct constraint_expr temp;
3224 /* x = integer is all glommed to a single variable, which doesn't
3225 point to anything by itself. That is, of course, unless it is an
3226 integer constant being treated as a pointer, in which case, we
3227 will return that this is really the addressof anything. This
3228 happens below, since it will fall into the default case. The only
3229 case we know something about an integer treated like a pointer is
3230 when it is the NULL pointer, and then we just say it points to
3231 NULL.
3233 Do not do that if -fno-delete-null-pointer-checks though, because
3234 in that case *NULL does not fail, so it _should_ alias *anything.
3235 It is not worth adding a new option or renaming the existing one,
3236 since this case is relatively obscure. */
3237 if ((TREE_CODE (t) == INTEGER_CST
3238 && integer_zerop (t))
3239 /* The only valid CONSTRUCTORs in gimple with pointer typed
3240 elements are zero-initializer. But in IPA mode we also
3241 process global initializers, so verify at least. */
3242 || (TREE_CODE (t) == CONSTRUCTOR
3243 && CONSTRUCTOR_NELTS (t) == 0))
3245 if (flag_delete_null_pointer_checks)
3246 temp.var = nothing_id;
3247 else
3248 temp.var = nonlocal_id;
3249 temp.type = ADDRESSOF;
3250 temp.offset = 0;
3251 results->safe_push (temp);
3252 return;
3255 /* String constants are read-only. */
3256 if (TREE_CODE (t) == STRING_CST)
3258 temp.var = readonly_id;
3259 temp.type = SCALAR;
3260 temp.offset = 0;
3261 results->safe_push (temp);
3262 return;
3265 switch (TREE_CODE_CLASS (TREE_CODE (t)))
3267 case tcc_expression:
3269 switch (TREE_CODE (t))
3271 case ADDR_EXPR:
3272 get_constraint_for_address_of (TREE_OPERAND (t, 0), results);
3273 return;
3274 default:;
3276 break;
3278 case tcc_reference:
3280 switch (TREE_CODE (t))
3282 case MEM_REF:
3284 struct constraint_expr cs;
3285 varinfo_t vi, curr;
3286 get_constraint_for_ptr_offset (TREE_OPERAND (t, 0),
3287 TREE_OPERAND (t, 1), results);
3288 do_deref (results);
3290 /* If we are not taking the address then make sure to process
3291 all subvariables we might access. */
3292 if (address_p)
3293 return;
3295 cs = results->last ();
3296 if (cs.type == DEREF
3297 && type_can_have_subvars (TREE_TYPE (t)))
3299 /* For dereferences this means we have to defer it
3300 to solving time. */
3301 results->last ().offset = UNKNOWN_OFFSET;
3302 return;
3304 if (cs.type != SCALAR)
3305 return;
3307 vi = get_varinfo (cs.var);
3308 curr = vi->next;
3309 if (!vi->is_full_var
3310 && curr)
3312 unsigned HOST_WIDE_INT size;
3313 if (host_integerp (TYPE_SIZE (TREE_TYPE (t)), 1))
3314 size = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (t)));
3315 else
3316 size = -1;
3317 for (; curr; curr = curr->next)
3319 if (curr->offset - vi->offset < size)
3321 cs.var = curr->id;
3322 results->safe_push (cs);
3324 else
3325 break;
3328 return;
3330 case ARRAY_REF:
3331 case ARRAY_RANGE_REF:
3332 case COMPONENT_REF:
3333 get_constraint_for_component_ref (t, results, address_p, lhs_p);
3334 return;
3335 case VIEW_CONVERT_EXPR:
3336 get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p,
3337 lhs_p);
3338 return;
3339 /* We are missing handling for TARGET_MEM_REF here. */
3340 default:;
3342 break;
3344 case tcc_exceptional:
3346 switch (TREE_CODE (t))
3348 case SSA_NAME:
3350 get_constraint_for_ssa_var (t, results, address_p);
3351 return;
3353 case CONSTRUCTOR:
3355 unsigned int i;
3356 tree val;
3357 vec<ce_s> tmp = vNULL;
3358 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
3360 struct constraint_expr *rhsp;
3361 unsigned j;
3362 get_constraint_for_1 (val, &tmp, address_p, lhs_p);
3363 FOR_EACH_VEC_ELT (tmp, j, rhsp)
3364 results->safe_push (*rhsp);
3365 tmp.truncate (0);
3367 tmp.release ();
3368 /* We do not know whether the constructor was complete,
3369 so technically we have to add &NOTHING or &ANYTHING
3370 like we do for an empty constructor as well. */
3371 return;
3373 default:;
3375 break;
3377 case tcc_declaration:
3379 get_constraint_for_ssa_var (t, results, address_p);
3380 return;
3382 case tcc_constant:
3384 /* We cannot refer to automatic variables through constants. */
3385 temp.type = ADDRESSOF;
3386 temp.var = nonlocal_id;
3387 temp.offset = 0;
3388 results->safe_push (temp);
3389 return;
3391 default:;
3394 /* The default fallback is a constraint from anything. */
3395 temp.type = ADDRESSOF;
3396 temp.var = anything_id;
3397 temp.offset = 0;
3398 results->safe_push (temp);
3401 /* Given a gimple tree T, return the constraint expression vector for it. */
3403 static void
3404 get_constraint_for (tree t, vec<ce_s> *results)
3406 gcc_assert (results->length () == 0);
3408 get_constraint_for_1 (t, results, false, true);
3411 /* Given a gimple tree T, return the constraint expression vector for it
3412 to be used as the rhs of a constraint. */
3414 static void
3415 get_constraint_for_rhs (tree t, vec<ce_s> *results)
3417 gcc_assert (results->length () == 0);
3419 get_constraint_for_1 (t, results, false, false);
3423 /* Efficiently generates constraints from all entries in *RHSC to all
3424 entries in *LHSC. */
3426 static void
3427 process_all_all_constraints (vec<ce_s> lhsc,
3428 vec<ce_s> rhsc)
3430 struct constraint_expr *lhsp, *rhsp;
3431 unsigned i, j;
3433 if (lhsc.length () <= 1 || rhsc.length () <= 1)
3435 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3436 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
3437 process_constraint (new_constraint (*lhsp, *rhsp));
3439 else
3441 struct constraint_expr tmp;
3442 tmp = new_scalar_tmp_constraint_exp ("allalltmp");
3443 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
3444 process_constraint (new_constraint (tmp, *rhsp));
3445 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3446 process_constraint (new_constraint (*lhsp, tmp));
3450 /* Handle aggregate copies by expanding into copies of the respective
3451 fields of the structures. */
3453 static void
3454 do_structure_copy (tree lhsop, tree rhsop)
3456 struct constraint_expr *lhsp, *rhsp;
3457 vec<ce_s> lhsc = vNULL;
3458 vec<ce_s> rhsc = vNULL;
3459 unsigned j;
3461 get_constraint_for (lhsop, &lhsc);
3462 get_constraint_for_rhs (rhsop, &rhsc);
3463 lhsp = &lhsc[0];
3464 rhsp = &rhsc[0];
3465 if (lhsp->type == DEREF
3466 || (lhsp->type == ADDRESSOF && lhsp->var == anything_id)
3467 || rhsp->type == DEREF)
3469 if (lhsp->type == DEREF)
3471 gcc_assert (lhsc.length () == 1);
3472 lhsp->offset = UNKNOWN_OFFSET;
3474 if (rhsp->type == DEREF)
3476 gcc_assert (rhsc.length () == 1);
3477 rhsp->offset = UNKNOWN_OFFSET;
3479 process_all_all_constraints (lhsc, rhsc);
3481 else if (lhsp->type == SCALAR
3482 && (rhsp->type == SCALAR
3483 || rhsp->type == ADDRESSOF))
3485 HOST_WIDE_INT lhssize, lhsmaxsize, lhsoffset;
3486 HOST_WIDE_INT rhssize, rhsmaxsize, rhsoffset;
3487 unsigned k = 0;
3488 get_ref_base_and_extent (lhsop, &lhsoffset, &lhssize, &lhsmaxsize);
3489 get_ref_base_and_extent (rhsop, &rhsoffset, &rhssize, &rhsmaxsize);
3490 for (j = 0; lhsc.iterate (j, &lhsp);)
3492 varinfo_t lhsv, rhsv;
3493 rhsp = &rhsc[k];
3494 lhsv = get_varinfo (lhsp->var);
3495 rhsv = get_varinfo (rhsp->var);
3496 if (lhsv->may_have_pointers
3497 && (lhsv->is_full_var
3498 || rhsv->is_full_var
3499 || ranges_overlap_p (lhsv->offset + rhsoffset, lhsv->size,
3500 rhsv->offset + lhsoffset, rhsv->size)))
3501 process_constraint (new_constraint (*lhsp, *rhsp));
3502 if (!rhsv->is_full_var
3503 && (lhsv->is_full_var
3504 || (lhsv->offset + rhsoffset + lhsv->size
3505 > rhsv->offset + lhsoffset + rhsv->size)))
3507 ++k;
3508 if (k >= rhsc.length ())
3509 break;
3511 else
3512 ++j;
3515 else
3516 gcc_unreachable ();
3518 lhsc.release ();
3519 rhsc.release ();
3522 /* Create constraints ID = { rhsc }. */
3524 static void
3525 make_constraints_to (unsigned id, vec<ce_s> rhsc)
3527 struct constraint_expr *c;
3528 struct constraint_expr includes;
3529 unsigned int j;
3531 includes.var = id;
3532 includes.offset = 0;
3533 includes.type = SCALAR;
3535 FOR_EACH_VEC_ELT (rhsc, j, c)
3536 process_constraint (new_constraint (includes, *c));
3539 /* Create a constraint ID = OP. */
3541 static void
3542 make_constraint_to (unsigned id, tree op)
3544 vec<ce_s> rhsc = vNULL;
3545 get_constraint_for_rhs (op, &rhsc);
3546 make_constraints_to (id, rhsc);
3547 rhsc.release ();
3550 /* Create a constraint ID = &FROM. */
3552 static void
3553 make_constraint_from (varinfo_t vi, int from)
3555 struct constraint_expr lhs, rhs;
3557 lhs.var = vi->id;
3558 lhs.offset = 0;
3559 lhs.type = SCALAR;
3561 rhs.var = from;
3562 rhs.offset = 0;
3563 rhs.type = ADDRESSOF;
3564 process_constraint (new_constraint (lhs, rhs));
3567 /* Create a constraint ID = FROM. */
3569 static void
3570 make_copy_constraint (varinfo_t vi, int from)
3572 struct constraint_expr lhs, rhs;
3574 lhs.var = vi->id;
3575 lhs.offset = 0;
3576 lhs.type = SCALAR;
3578 rhs.var = from;
3579 rhs.offset = 0;
3580 rhs.type = SCALAR;
3581 process_constraint (new_constraint (lhs, rhs));
3584 /* Make constraints necessary to make OP escape. */
3586 static void
3587 make_escape_constraint (tree op)
3589 make_constraint_to (escaped_id, op);
3592 /* Add constraints to that the solution of VI is transitively closed. */
3594 static void
3595 make_transitive_closure_constraints (varinfo_t vi)
3597 struct constraint_expr lhs, rhs;
3599 /* VAR = *VAR; */
3600 lhs.type = SCALAR;
3601 lhs.var = vi->id;
3602 lhs.offset = 0;
3603 rhs.type = DEREF;
3604 rhs.var = vi->id;
3605 rhs.offset = 0;
3606 process_constraint (new_constraint (lhs, rhs));
3608 /* VAR = VAR + UNKNOWN; */
3609 lhs.type = SCALAR;
3610 lhs.var = vi->id;
3611 lhs.offset = 0;
3612 rhs.type = SCALAR;
3613 rhs.var = vi->id;
3614 rhs.offset = UNKNOWN_OFFSET;
3615 process_constraint (new_constraint (lhs, rhs));
3618 /* Temporary storage for fake var decls. */
3619 struct obstack fake_var_decl_obstack;
3621 /* Build a fake VAR_DECL acting as referrer to a DECL_UID. */
3623 static tree
3624 build_fake_var_decl (tree type)
3626 tree decl = (tree) XOBNEW (&fake_var_decl_obstack, struct tree_var_decl);
3627 memset (decl, 0, sizeof (struct tree_var_decl));
3628 TREE_SET_CODE (decl, VAR_DECL);
3629 TREE_TYPE (decl) = type;
3630 DECL_UID (decl) = allocate_decl_uid ();
3631 SET_DECL_PT_UID (decl, -1);
3632 layout_decl (decl, 0);
3633 return decl;
3636 /* Create a new artificial heap variable with NAME.
3637 Return the created variable. */
3639 static varinfo_t
3640 make_heapvar (const char *name)
3642 varinfo_t vi;
3643 tree heapvar;
3645 heapvar = build_fake_var_decl (ptr_type_node);
3646 DECL_EXTERNAL (heapvar) = 1;
3648 vi = new_var_info (heapvar, name);
3649 vi->is_artificial_var = true;
3650 vi->is_heap_var = true;
3651 vi->is_unknown_size_var = true;
3652 vi->offset = 0;
3653 vi->fullsize = ~0;
3654 vi->size = ~0;
3655 vi->is_full_var = true;
3656 insert_vi_for_tree (heapvar, vi);
3658 return vi;
3661 /* Create a new artificial heap variable with NAME and make a
3662 constraint from it to LHS. Set flags according to a tag used
3663 for tracking restrict pointers. */
3665 static varinfo_t
3666 make_constraint_from_restrict (varinfo_t lhs, const char *name)
3668 varinfo_t vi = make_heapvar (name);
3669 vi->is_global_var = 1;
3670 vi->may_have_pointers = 1;
3671 make_constraint_from (lhs, vi->id);
3672 return vi;
3675 /* Create a new artificial heap variable with NAME and make a
3676 constraint from it to LHS. Set flags according to a tag used
3677 for tracking restrict pointers and make the artificial heap
3678 point to global memory. */
3680 static varinfo_t
3681 make_constraint_from_global_restrict (varinfo_t lhs, const char *name)
3683 varinfo_t vi = make_constraint_from_restrict (lhs, name);
3684 make_copy_constraint (vi, nonlocal_id);
3685 return vi;
3688 /* In IPA mode there are varinfos for different aspects of reach
3689 function designator. One for the points-to set of the return
3690 value, one for the variables that are clobbered by the function,
3691 one for its uses and one for each parameter (including a single
3692 glob for remaining variadic arguments). */
3694 enum { fi_clobbers = 1, fi_uses = 2,
3695 fi_static_chain = 3, fi_result = 4, fi_parm_base = 5 };
3697 /* Get a constraint for the requested part of a function designator FI
3698 when operating in IPA mode. */
3700 static struct constraint_expr
3701 get_function_part_constraint (varinfo_t fi, unsigned part)
3703 struct constraint_expr c;
3705 gcc_assert (in_ipa_mode);
3707 if (fi->id == anything_id)
3709 /* ??? We probably should have a ANYFN special variable. */
3710 c.var = anything_id;
3711 c.offset = 0;
3712 c.type = SCALAR;
3714 else if (TREE_CODE (fi->decl) == FUNCTION_DECL)
3716 varinfo_t ai = first_vi_for_offset (fi, part);
3717 if (ai)
3718 c.var = ai->id;
3719 else
3720 c.var = anything_id;
3721 c.offset = 0;
3722 c.type = SCALAR;
3724 else
3726 c.var = fi->id;
3727 c.offset = part;
3728 c.type = DEREF;
3731 return c;
3734 /* For non-IPA mode, generate constraints necessary for a call on the
3735 RHS. */
3737 static void
3738 handle_rhs_call (gimple stmt, vec<ce_s> *results)
3740 struct constraint_expr rhsc;
3741 unsigned i;
3742 bool returns_uses = false;
3744 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3746 tree arg = gimple_call_arg (stmt, i);
3747 int flags = gimple_call_arg_flags (stmt, i);
3749 /* If the argument is not used we can ignore it. */
3750 if (flags & EAF_UNUSED)
3751 continue;
3753 /* As we compute ESCAPED context-insensitive we do not gain
3754 any precision with just EAF_NOCLOBBER but not EAF_NOESCAPE
3755 set. The argument would still get clobbered through the
3756 escape solution. */
3757 if ((flags & EAF_NOCLOBBER)
3758 && (flags & EAF_NOESCAPE))
3760 varinfo_t uses = get_call_use_vi (stmt);
3761 if (!(flags & EAF_DIRECT))
3763 varinfo_t tem = new_var_info (NULL_TREE, "callarg");
3764 make_constraint_to (tem->id, arg);
3765 make_transitive_closure_constraints (tem);
3766 make_copy_constraint (uses, tem->id);
3768 else
3769 make_constraint_to (uses->id, arg);
3770 returns_uses = true;
3772 else if (flags & EAF_NOESCAPE)
3774 struct constraint_expr lhs, rhs;
3775 varinfo_t uses = get_call_use_vi (stmt);
3776 varinfo_t clobbers = get_call_clobber_vi (stmt);
3777 varinfo_t tem = new_var_info (NULL_TREE, "callarg");
3778 make_constraint_to (tem->id, arg);
3779 if (!(flags & EAF_DIRECT))
3780 make_transitive_closure_constraints (tem);
3781 make_copy_constraint (uses, tem->id);
3782 make_copy_constraint (clobbers, tem->id);
3783 /* Add *tem = nonlocal, do not add *tem = callused as
3784 EAF_NOESCAPE parameters do not escape to other parameters
3785 and all other uses appear in NONLOCAL as well. */
3786 lhs.type = DEREF;
3787 lhs.var = tem->id;
3788 lhs.offset = 0;
3789 rhs.type = SCALAR;
3790 rhs.var = nonlocal_id;
3791 rhs.offset = 0;
3792 process_constraint (new_constraint (lhs, rhs));
3793 returns_uses = true;
3795 else
3796 make_escape_constraint (arg);
3799 /* If we added to the calls uses solution make sure we account for
3800 pointers to it to be returned. */
3801 if (returns_uses)
3803 rhsc.var = get_call_use_vi (stmt)->id;
3804 rhsc.offset = 0;
3805 rhsc.type = SCALAR;
3806 results->safe_push (rhsc);
3809 /* The static chain escapes as well. */
3810 if (gimple_call_chain (stmt))
3811 make_escape_constraint (gimple_call_chain (stmt));
3813 /* And if we applied NRV the address of the return slot escapes as well. */
3814 if (gimple_call_return_slot_opt_p (stmt)
3815 && gimple_call_lhs (stmt) != NULL_TREE
3816 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
3818 vec<ce_s> tmpc = vNULL;
3819 struct constraint_expr lhsc, *c;
3820 get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
3821 lhsc.var = escaped_id;
3822 lhsc.offset = 0;
3823 lhsc.type = SCALAR;
3824 FOR_EACH_VEC_ELT (tmpc, i, c)
3825 process_constraint (new_constraint (lhsc, *c));
3826 tmpc.release ();
3829 /* Regular functions return nonlocal memory. */
3830 rhsc.var = nonlocal_id;
3831 rhsc.offset = 0;
3832 rhsc.type = SCALAR;
3833 results->safe_push (rhsc);
3836 /* For non-IPA mode, generate constraints necessary for a call
3837 that returns a pointer and assigns it to LHS. This simply makes
3838 the LHS point to global and escaped variables. */
3840 static void
3841 handle_lhs_call (gimple stmt, tree lhs, int flags, vec<ce_s> rhsc,
3842 tree fndecl)
3844 vec<ce_s> lhsc = vNULL;
3846 get_constraint_for (lhs, &lhsc);
3847 /* If the store is to a global decl make sure to
3848 add proper escape constraints. */
3849 lhs = get_base_address (lhs);
3850 if (lhs
3851 && DECL_P (lhs)
3852 && is_global_var (lhs))
3854 struct constraint_expr tmpc;
3855 tmpc.var = escaped_id;
3856 tmpc.offset = 0;
3857 tmpc.type = SCALAR;
3858 lhsc.safe_push (tmpc);
3861 /* If the call returns an argument unmodified override the rhs
3862 constraints. */
3863 flags = gimple_call_return_flags (stmt);
3864 if (flags & ERF_RETURNS_ARG
3865 && (flags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (stmt))
3867 tree arg;
3868 rhsc.create (0);
3869 arg = gimple_call_arg (stmt, flags & ERF_RETURN_ARG_MASK);
3870 get_constraint_for (arg, &rhsc);
3871 process_all_all_constraints (lhsc, rhsc);
3872 rhsc.release ();
3874 else if (flags & ERF_NOALIAS)
3876 varinfo_t vi;
3877 struct constraint_expr tmpc;
3878 rhsc.create (0);
3879 vi = make_heapvar ("HEAP");
3880 /* We delay marking allocated storage global until we know if
3881 it escapes. */
3882 DECL_EXTERNAL (vi->decl) = 0;
3883 vi->is_global_var = 0;
3884 /* If this is not a real malloc call assume the memory was
3885 initialized and thus may point to global memory. All
3886 builtin functions with the malloc attribute behave in a sane way. */
3887 if (!fndecl
3888 || DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL)
3889 make_constraint_from (vi, nonlocal_id);
3890 tmpc.var = vi->id;
3891 tmpc.offset = 0;
3892 tmpc.type = ADDRESSOF;
3893 rhsc.safe_push (tmpc);
3894 process_all_all_constraints (lhsc, rhsc);
3895 rhsc.release ();
3897 else
3898 process_all_all_constraints (lhsc, rhsc);
3900 lhsc.release ();
3903 /* For non-IPA mode, generate constraints necessary for a call of a
3904 const function that returns a pointer in the statement STMT. */
3906 static void
3907 handle_const_call (gimple stmt, vec<ce_s> *results)
3909 struct constraint_expr rhsc;
3910 unsigned int k;
3912 /* Treat nested const functions the same as pure functions as far
3913 as the static chain is concerned. */
3914 if (gimple_call_chain (stmt))
3916 varinfo_t uses = get_call_use_vi (stmt);
3917 make_transitive_closure_constraints (uses);
3918 make_constraint_to (uses->id, gimple_call_chain (stmt));
3919 rhsc.var = uses->id;
3920 rhsc.offset = 0;
3921 rhsc.type = SCALAR;
3922 results->safe_push (rhsc);
3925 /* May return arguments. */
3926 for (k = 0; k < gimple_call_num_args (stmt); ++k)
3928 tree arg = gimple_call_arg (stmt, k);
3929 vec<ce_s> argc = vNULL;
3930 unsigned i;
3931 struct constraint_expr *argp;
3932 get_constraint_for_rhs (arg, &argc);
3933 FOR_EACH_VEC_ELT (argc, i, argp)
3934 results->safe_push (*argp);
3935 argc.release ();
3938 /* May return addresses of globals. */
3939 rhsc.var = nonlocal_id;
3940 rhsc.offset = 0;
3941 rhsc.type = ADDRESSOF;
3942 results->safe_push (rhsc);
3945 /* For non-IPA mode, generate constraints necessary for a call to a
3946 pure function in statement STMT. */
3948 static void
3949 handle_pure_call (gimple stmt, vec<ce_s> *results)
3951 struct constraint_expr rhsc;
3952 unsigned i;
3953 varinfo_t uses = NULL;
3955 /* Memory reached from pointer arguments is call-used. */
3956 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3958 tree arg = gimple_call_arg (stmt, i);
3959 if (!uses)
3961 uses = get_call_use_vi (stmt);
3962 make_transitive_closure_constraints (uses);
3964 make_constraint_to (uses->id, arg);
3967 /* The static chain is used as well. */
3968 if (gimple_call_chain (stmt))
3970 if (!uses)
3972 uses = get_call_use_vi (stmt);
3973 make_transitive_closure_constraints (uses);
3975 make_constraint_to (uses->id, gimple_call_chain (stmt));
3978 /* Pure functions may return call-used and nonlocal memory. */
3979 if (uses)
3981 rhsc.var = uses->id;
3982 rhsc.offset = 0;
3983 rhsc.type = SCALAR;
3984 results->safe_push (rhsc);
3986 rhsc.var = nonlocal_id;
3987 rhsc.offset = 0;
3988 rhsc.type = SCALAR;
3989 results->safe_push (rhsc);
3993 /* Return the varinfo for the callee of CALL. */
3995 static varinfo_t
3996 get_fi_for_callee (gimple call)
3998 tree decl, fn = gimple_call_fn (call);
4000 if (fn && TREE_CODE (fn) == OBJ_TYPE_REF)
4001 fn = OBJ_TYPE_REF_EXPR (fn);
4003 /* If we can directly resolve the function being called, do so.
4004 Otherwise, it must be some sort of indirect expression that
4005 we should still be able to handle. */
4006 decl = gimple_call_addr_fndecl (fn);
4007 if (decl)
4008 return get_vi_for_tree (decl);
4010 /* If the function is anything other than a SSA name pointer we have no
4011 clue and should be getting ANYFN (well, ANYTHING for now). */
4012 if (!fn || TREE_CODE (fn) != SSA_NAME)
4013 return get_varinfo (anything_id);
4015 if (SSA_NAME_IS_DEFAULT_DEF (fn)
4016 && (TREE_CODE (SSA_NAME_VAR (fn)) == PARM_DECL
4017 || TREE_CODE (SSA_NAME_VAR (fn)) == RESULT_DECL))
4018 fn = SSA_NAME_VAR (fn);
4020 return get_vi_for_tree (fn);
4023 /* Create constraints for the builtin call T. Return true if the call
4024 was handled, otherwise false. */
4026 static bool
4027 find_func_aliases_for_builtin_call (gimple t)
4029 tree fndecl = gimple_call_fndecl (t);
4030 vec<ce_s> lhsc = vNULL;
4031 vec<ce_s> rhsc = vNULL;
4032 varinfo_t fi;
4034 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
4035 /* ??? All builtins that are handled here need to be handled
4036 in the alias-oracle query functions explicitly! */
4037 switch (DECL_FUNCTION_CODE (fndecl))
4039 /* All the following functions return a pointer to the same object
4040 as their first argument points to. The functions do not add
4041 to the ESCAPED solution. The functions make the first argument
4042 pointed to memory point to what the second argument pointed to
4043 memory points to. */
4044 case BUILT_IN_STRCPY:
4045 case BUILT_IN_STRNCPY:
4046 case BUILT_IN_BCOPY:
4047 case BUILT_IN_MEMCPY:
4048 case BUILT_IN_MEMMOVE:
4049 case BUILT_IN_MEMPCPY:
4050 case BUILT_IN_STPCPY:
4051 case BUILT_IN_STPNCPY:
4052 case BUILT_IN_STRCAT:
4053 case BUILT_IN_STRNCAT:
4054 case BUILT_IN_STRCPY_CHK:
4055 case BUILT_IN_STRNCPY_CHK:
4056 case BUILT_IN_MEMCPY_CHK:
4057 case BUILT_IN_MEMMOVE_CHK:
4058 case BUILT_IN_MEMPCPY_CHK:
4059 case BUILT_IN_STPCPY_CHK:
4060 case BUILT_IN_STPNCPY_CHK:
4061 case BUILT_IN_STRCAT_CHK:
4062 case BUILT_IN_STRNCAT_CHK:
4063 case BUILT_IN_TM_MEMCPY:
4064 case BUILT_IN_TM_MEMMOVE:
4066 tree res = gimple_call_lhs (t);
4067 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4068 == BUILT_IN_BCOPY ? 1 : 0));
4069 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4070 == BUILT_IN_BCOPY ? 0 : 1));
4071 if (res != NULL_TREE)
4073 get_constraint_for (res, &lhsc);
4074 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY
4075 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY
4076 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY
4077 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY_CHK
4078 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY_CHK
4079 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY_CHK)
4080 get_constraint_for_ptr_offset (dest, NULL_TREE, &rhsc);
4081 else
4082 get_constraint_for (dest, &rhsc);
4083 process_all_all_constraints (lhsc, rhsc);
4084 lhsc.release ();
4085 rhsc.release ();
4087 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4088 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4089 do_deref (&lhsc);
4090 do_deref (&rhsc);
4091 process_all_all_constraints (lhsc, rhsc);
4092 lhsc.release ();
4093 rhsc.release ();
4094 return true;
4096 case BUILT_IN_MEMSET:
4097 case BUILT_IN_MEMSET_CHK:
4098 case BUILT_IN_TM_MEMSET:
4100 tree res = gimple_call_lhs (t);
4101 tree dest = gimple_call_arg (t, 0);
4102 unsigned i;
4103 ce_s *lhsp;
4104 struct constraint_expr ac;
4105 if (res != NULL_TREE)
4107 get_constraint_for (res, &lhsc);
4108 get_constraint_for (dest, &rhsc);
4109 process_all_all_constraints (lhsc, rhsc);
4110 lhsc.release ();
4111 rhsc.release ();
4113 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4114 do_deref (&lhsc);
4115 if (flag_delete_null_pointer_checks
4116 && integer_zerop (gimple_call_arg (t, 1)))
4118 ac.type = ADDRESSOF;
4119 ac.var = nothing_id;
4121 else
4123 ac.type = SCALAR;
4124 ac.var = integer_id;
4126 ac.offset = 0;
4127 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4128 process_constraint (new_constraint (*lhsp, ac));
4129 lhsc.release ();
4130 return true;
4132 case BUILT_IN_ASSUME_ALIGNED:
4134 tree res = gimple_call_lhs (t);
4135 tree dest = gimple_call_arg (t, 0);
4136 if (res != NULL_TREE)
4138 get_constraint_for (res, &lhsc);
4139 get_constraint_for (dest, &rhsc);
4140 process_all_all_constraints (lhsc, rhsc);
4141 lhsc.release ();
4142 rhsc.release ();
4144 return true;
4146 /* All the following functions do not return pointers, do not
4147 modify the points-to sets of memory reachable from their
4148 arguments and do not add to the ESCAPED solution. */
4149 case BUILT_IN_SINCOS:
4150 case BUILT_IN_SINCOSF:
4151 case BUILT_IN_SINCOSL:
4152 case BUILT_IN_FREXP:
4153 case BUILT_IN_FREXPF:
4154 case BUILT_IN_FREXPL:
4155 case BUILT_IN_GAMMA_R:
4156 case BUILT_IN_GAMMAF_R:
4157 case BUILT_IN_GAMMAL_R:
4158 case BUILT_IN_LGAMMA_R:
4159 case BUILT_IN_LGAMMAF_R:
4160 case BUILT_IN_LGAMMAL_R:
4161 case BUILT_IN_MODF:
4162 case BUILT_IN_MODFF:
4163 case BUILT_IN_MODFL:
4164 case BUILT_IN_REMQUO:
4165 case BUILT_IN_REMQUOF:
4166 case BUILT_IN_REMQUOL:
4167 case BUILT_IN_FREE:
4168 return true;
4169 case BUILT_IN_STRDUP:
4170 case BUILT_IN_STRNDUP:
4171 if (gimple_call_lhs (t))
4173 handle_lhs_call (t, gimple_call_lhs (t), gimple_call_flags (t),
4174 vNULL, fndecl);
4175 get_constraint_for_ptr_offset (gimple_call_lhs (t),
4176 NULL_TREE, &lhsc);
4177 get_constraint_for_ptr_offset (gimple_call_arg (t, 0),
4178 NULL_TREE, &rhsc);
4179 do_deref (&lhsc);
4180 do_deref (&rhsc);
4181 process_all_all_constraints (lhsc, rhsc);
4182 lhsc.release ();
4183 rhsc.release ();
4184 return true;
4186 break;
4187 /* Trampolines are special - they set up passing the static
4188 frame. */
4189 case BUILT_IN_INIT_TRAMPOLINE:
4191 tree tramp = gimple_call_arg (t, 0);
4192 tree nfunc = gimple_call_arg (t, 1);
4193 tree frame = gimple_call_arg (t, 2);
4194 unsigned i;
4195 struct constraint_expr lhs, *rhsp;
4196 if (in_ipa_mode)
4198 varinfo_t nfi = NULL;
4199 gcc_assert (TREE_CODE (nfunc) == ADDR_EXPR);
4200 nfi = lookup_vi_for_tree (TREE_OPERAND (nfunc, 0));
4201 if (nfi)
4203 lhs = get_function_part_constraint (nfi, fi_static_chain);
4204 get_constraint_for (frame, &rhsc);
4205 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4206 process_constraint (new_constraint (lhs, *rhsp));
4207 rhsc.release ();
4209 /* Make the frame point to the function for
4210 the trampoline adjustment call. */
4211 get_constraint_for (tramp, &lhsc);
4212 do_deref (&lhsc);
4213 get_constraint_for (nfunc, &rhsc);
4214 process_all_all_constraints (lhsc, rhsc);
4215 rhsc.release ();
4216 lhsc.release ();
4218 return true;
4221 /* Else fallthru to generic handling which will let
4222 the frame escape. */
4223 break;
4225 case BUILT_IN_ADJUST_TRAMPOLINE:
4227 tree tramp = gimple_call_arg (t, 0);
4228 tree res = gimple_call_lhs (t);
4229 if (in_ipa_mode && res)
4231 get_constraint_for (res, &lhsc);
4232 get_constraint_for (tramp, &rhsc);
4233 do_deref (&rhsc);
4234 process_all_all_constraints (lhsc, rhsc);
4235 rhsc.release ();
4236 lhsc.release ();
4238 return true;
4240 CASE_BUILT_IN_TM_STORE (1):
4241 CASE_BUILT_IN_TM_STORE (2):
4242 CASE_BUILT_IN_TM_STORE (4):
4243 CASE_BUILT_IN_TM_STORE (8):
4244 CASE_BUILT_IN_TM_STORE (FLOAT):
4245 CASE_BUILT_IN_TM_STORE (DOUBLE):
4246 CASE_BUILT_IN_TM_STORE (LDOUBLE):
4247 CASE_BUILT_IN_TM_STORE (M64):
4248 CASE_BUILT_IN_TM_STORE (M128):
4249 CASE_BUILT_IN_TM_STORE (M256):
4251 tree addr = gimple_call_arg (t, 0);
4252 tree src = gimple_call_arg (t, 1);
4254 get_constraint_for (addr, &lhsc);
4255 do_deref (&lhsc);
4256 get_constraint_for (src, &rhsc);
4257 process_all_all_constraints (lhsc, rhsc);
4258 lhsc.release ();
4259 rhsc.release ();
4260 return true;
4262 CASE_BUILT_IN_TM_LOAD (1):
4263 CASE_BUILT_IN_TM_LOAD (2):
4264 CASE_BUILT_IN_TM_LOAD (4):
4265 CASE_BUILT_IN_TM_LOAD (8):
4266 CASE_BUILT_IN_TM_LOAD (FLOAT):
4267 CASE_BUILT_IN_TM_LOAD (DOUBLE):
4268 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
4269 CASE_BUILT_IN_TM_LOAD (M64):
4270 CASE_BUILT_IN_TM_LOAD (M128):
4271 CASE_BUILT_IN_TM_LOAD (M256):
4273 tree dest = gimple_call_lhs (t);
4274 tree addr = gimple_call_arg (t, 0);
4276 get_constraint_for (dest, &lhsc);
4277 get_constraint_for (addr, &rhsc);
4278 do_deref (&rhsc);
4279 process_all_all_constraints (lhsc, rhsc);
4280 lhsc.release ();
4281 rhsc.release ();
4282 return true;
4284 /* Variadic argument handling needs to be handled in IPA
4285 mode as well. */
4286 case BUILT_IN_VA_START:
4288 tree valist = gimple_call_arg (t, 0);
4289 struct constraint_expr rhs, *lhsp;
4290 unsigned i;
4291 get_constraint_for (valist, &lhsc);
4292 do_deref (&lhsc);
4293 /* The va_list gets access to pointers in variadic
4294 arguments. Which we know in the case of IPA analysis
4295 and otherwise are just all nonlocal variables. */
4296 if (in_ipa_mode)
4298 fi = lookup_vi_for_tree (cfun->decl);
4299 rhs = get_function_part_constraint (fi, ~0);
4300 rhs.type = ADDRESSOF;
4302 else
4304 rhs.var = nonlocal_id;
4305 rhs.type = ADDRESSOF;
4306 rhs.offset = 0;
4308 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4309 process_constraint (new_constraint (*lhsp, rhs));
4310 lhsc.release ();
4311 /* va_list is clobbered. */
4312 make_constraint_to (get_call_clobber_vi (t)->id, valist);
4313 return true;
4315 /* va_end doesn't have any effect that matters. */
4316 case BUILT_IN_VA_END:
4317 return true;
4318 /* Alternate return. Simply give up for now. */
4319 case BUILT_IN_RETURN:
4321 fi = NULL;
4322 if (!in_ipa_mode
4323 || !(fi = get_vi_for_tree (cfun->decl)))
4324 make_constraint_from (get_varinfo (escaped_id), anything_id);
4325 else if (in_ipa_mode
4326 && fi != NULL)
4328 struct constraint_expr lhs, rhs;
4329 lhs = get_function_part_constraint (fi, fi_result);
4330 rhs.var = anything_id;
4331 rhs.offset = 0;
4332 rhs.type = SCALAR;
4333 process_constraint (new_constraint (lhs, rhs));
4335 return true;
4337 /* printf-style functions may have hooks to set pointers to
4338 point to somewhere into the generated string. Leave them
4339 for a later excercise... */
4340 default:
4341 /* Fallthru to general call handling. */;
4344 return false;
4347 /* Create constraints for the call T. */
4349 static void
4350 find_func_aliases_for_call (gimple t)
4352 tree fndecl = gimple_call_fndecl (t);
4353 vec<ce_s> lhsc = vNULL;
4354 vec<ce_s> rhsc = vNULL;
4355 varinfo_t fi;
4357 if (fndecl != NULL_TREE
4358 && DECL_BUILT_IN (fndecl)
4359 && find_func_aliases_for_builtin_call (t))
4360 return;
4362 fi = get_fi_for_callee (t);
4363 if (!in_ipa_mode
4364 || (fndecl && !fi->is_fn_info))
4366 vec<ce_s> rhsc = vNULL;
4367 int flags = gimple_call_flags (t);
4369 /* Const functions can return their arguments and addresses
4370 of global memory but not of escaped memory. */
4371 if (flags & (ECF_CONST|ECF_NOVOPS))
4373 if (gimple_call_lhs (t))
4374 handle_const_call (t, &rhsc);
4376 /* Pure functions can return addresses in and of memory
4377 reachable from their arguments, but they are not an escape
4378 point for reachable memory of their arguments. */
4379 else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE))
4380 handle_pure_call (t, &rhsc);
4381 else
4382 handle_rhs_call (t, &rhsc);
4383 if (gimple_call_lhs (t))
4384 handle_lhs_call (t, gimple_call_lhs (t), flags, rhsc, fndecl);
4385 rhsc.release ();
4387 else
4389 tree lhsop;
4390 unsigned j;
4392 /* Assign all the passed arguments to the appropriate incoming
4393 parameters of the function. */
4394 for (j = 0; j < gimple_call_num_args (t); j++)
4396 struct constraint_expr lhs ;
4397 struct constraint_expr *rhsp;
4398 tree arg = gimple_call_arg (t, j);
4400 get_constraint_for_rhs (arg, &rhsc);
4401 lhs = get_function_part_constraint (fi, fi_parm_base + j);
4402 while (rhsc.length () != 0)
4404 rhsp = &rhsc.last ();
4405 process_constraint (new_constraint (lhs, *rhsp));
4406 rhsc.pop ();
4410 /* If we are returning a value, assign it to the result. */
4411 lhsop = gimple_call_lhs (t);
4412 if (lhsop)
4414 struct constraint_expr rhs;
4415 struct constraint_expr *lhsp;
4417 get_constraint_for (lhsop, &lhsc);
4418 rhs = get_function_part_constraint (fi, fi_result);
4419 if (fndecl
4420 && DECL_RESULT (fndecl)
4421 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
4423 vec<ce_s> tem = vNULL;
4424 tem.safe_push (rhs);
4425 do_deref (&tem);
4426 rhs = tem[0];
4427 tem.release ();
4429 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
4430 process_constraint (new_constraint (*lhsp, rhs));
4433 /* If we pass the result decl by reference, honor that. */
4434 if (lhsop
4435 && fndecl
4436 && DECL_RESULT (fndecl)
4437 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
4439 struct constraint_expr lhs;
4440 struct constraint_expr *rhsp;
4442 get_constraint_for_address_of (lhsop, &rhsc);
4443 lhs = get_function_part_constraint (fi, fi_result);
4444 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4445 process_constraint (new_constraint (lhs, *rhsp));
4446 rhsc.release ();
4449 /* If we use a static chain, pass it along. */
4450 if (gimple_call_chain (t))
4452 struct constraint_expr lhs;
4453 struct constraint_expr *rhsp;
4455 get_constraint_for (gimple_call_chain (t), &rhsc);
4456 lhs = get_function_part_constraint (fi, fi_static_chain);
4457 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4458 process_constraint (new_constraint (lhs, *rhsp));
4463 /* Walk statement T setting up aliasing constraints according to the
4464 references found in T. This function is the main part of the
4465 constraint builder. AI points to auxiliary alias information used
4466 when building alias sets and computing alias grouping heuristics. */
4468 static void
4469 find_func_aliases (gimple origt)
4471 gimple t = origt;
4472 vec<ce_s> lhsc = vNULL;
4473 vec<ce_s> rhsc = vNULL;
4474 struct constraint_expr *c;
4475 varinfo_t fi;
4477 /* Now build constraints expressions. */
4478 if (gimple_code (t) == GIMPLE_PHI)
4480 size_t i;
4481 unsigned int j;
4483 /* For a phi node, assign all the arguments to
4484 the result. */
4485 get_constraint_for (gimple_phi_result (t), &lhsc);
4486 for (i = 0; i < gimple_phi_num_args (t); i++)
4488 tree strippedrhs = PHI_ARG_DEF (t, i);
4490 STRIP_NOPS (strippedrhs);
4491 get_constraint_for_rhs (gimple_phi_arg_def (t, i), &rhsc);
4493 FOR_EACH_VEC_ELT (lhsc, j, c)
4495 struct constraint_expr *c2;
4496 while (rhsc.length () > 0)
4498 c2 = &rhsc.last ();
4499 process_constraint (new_constraint (*c, *c2));
4500 rhsc.pop ();
4505 /* In IPA mode, we need to generate constraints to pass call
4506 arguments through their calls. There are two cases,
4507 either a GIMPLE_CALL returning a value, or just a plain
4508 GIMPLE_CALL when we are not.
4510 In non-ipa mode, we need to generate constraints for each
4511 pointer passed by address. */
4512 else if (is_gimple_call (t))
4513 find_func_aliases_for_call (t);
4515 /* Otherwise, just a regular assignment statement. Only care about
4516 operations with pointer result, others are dealt with as escape
4517 points if they have pointer operands. */
4518 else if (is_gimple_assign (t))
4520 /* Otherwise, just a regular assignment statement. */
4521 tree lhsop = gimple_assign_lhs (t);
4522 tree rhsop = (gimple_num_ops (t) == 2) ? gimple_assign_rhs1 (t) : NULL;
4524 if (rhsop && TREE_CLOBBER_P (rhsop))
4525 /* Ignore clobbers, they don't actually store anything into
4526 the LHS. */
4528 else if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
4529 do_structure_copy (lhsop, rhsop);
4530 else
4532 enum tree_code code = gimple_assign_rhs_code (t);
4534 get_constraint_for (lhsop, &lhsc);
4536 if (code == POINTER_PLUS_EXPR)
4537 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4538 gimple_assign_rhs2 (t), &rhsc);
4539 else if (code == BIT_AND_EXPR
4540 && TREE_CODE (gimple_assign_rhs2 (t)) == INTEGER_CST)
4542 /* Aligning a pointer via a BIT_AND_EXPR is offsetting
4543 the pointer. Handle it by offsetting it by UNKNOWN. */
4544 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4545 NULL_TREE, &rhsc);
4547 else if ((CONVERT_EXPR_CODE_P (code)
4548 && !(POINTER_TYPE_P (gimple_expr_type (t))
4549 && !POINTER_TYPE_P (TREE_TYPE (rhsop))))
4550 || gimple_assign_single_p (t))
4551 get_constraint_for_rhs (rhsop, &rhsc);
4552 else if (code == COND_EXPR)
4554 /* The result is a merge of both COND_EXPR arms. */
4555 vec<ce_s> tmp = vNULL;
4556 struct constraint_expr *rhsp;
4557 unsigned i;
4558 get_constraint_for_rhs (gimple_assign_rhs2 (t), &rhsc);
4559 get_constraint_for_rhs (gimple_assign_rhs3 (t), &tmp);
4560 FOR_EACH_VEC_ELT (tmp, i, rhsp)
4561 rhsc.safe_push (*rhsp);
4562 tmp.release ();
4564 else if (truth_value_p (code))
4565 /* Truth value results are not pointer (parts). Or at least
4566 very very unreasonable obfuscation of a part. */
4568 else
4570 /* All other operations are merges. */
4571 vec<ce_s> tmp = vNULL;
4572 struct constraint_expr *rhsp;
4573 unsigned i, j;
4574 get_constraint_for_rhs (gimple_assign_rhs1 (t), &rhsc);
4575 for (i = 2; i < gimple_num_ops (t); ++i)
4577 get_constraint_for_rhs (gimple_op (t, i), &tmp);
4578 FOR_EACH_VEC_ELT (tmp, j, rhsp)
4579 rhsc.safe_push (*rhsp);
4580 tmp.truncate (0);
4582 tmp.release ();
4584 process_all_all_constraints (lhsc, rhsc);
4586 /* If there is a store to a global variable the rhs escapes. */
4587 if ((lhsop = get_base_address (lhsop)) != NULL_TREE
4588 && DECL_P (lhsop)
4589 && is_global_var (lhsop)
4590 && (!in_ipa_mode
4591 || DECL_EXTERNAL (lhsop) || TREE_PUBLIC (lhsop)))
4592 make_escape_constraint (rhsop);
4594 /* Handle escapes through return. */
4595 else if (gimple_code (t) == GIMPLE_RETURN
4596 && gimple_return_retval (t) != NULL_TREE)
4598 fi = NULL;
4599 if (!in_ipa_mode
4600 || !(fi = get_vi_for_tree (cfun->decl)))
4601 make_escape_constraint (gimple_return_retval (t));
4602 else if (in_ipa_mode
4603 && fi != NULL)
4605 struct constraint_expr lhs ;
4606 struct constraint_expr *rhsp;
4607 unsigned i;
4609 lhs = get_function_part_constraint (fi, fi_result);
4610 get_constraint_for_rhs (gimple_return_retval (t), &rhsc);
4611 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4612 process_constraint (new_constraint (lhs, *rhsp));
4615 /* Handle asms conservatively by adding escape constraints to everything. */
4616 else if (gimple_code (t) == GIMPLE_ASM)
4618 unsigned i, noutputs;
4619 const char **oconstraints;
4620 const char *constraint;
4621 bool allows_mem, allows_reg, is_inout;
4623 noutputs = gimple_asm_noutputs (t);
4624 oconstraints = XALLOCAVEC (const char *, noutputs);
4626 for (i = 0; i < noutputs; ++i)
4628 tree link = gimple_asm_output_op (t, i);
4629 tree op = TREE_VALUE (link);
4631 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4632 oconstraints[i] = constraint;
4633 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
4634 &allows_reg, &is_inout);
4636 /* A memory constraint makes the address of the operand escape. */
4637 if (!allows_reg && allows_mem)
4638 make_escape_constraint (build_fold_addr_expr (op));
4640 /* The asm may read global memory, so outputs may point to
4641 any global memory. */
4642 if (op)
4644 vec<ce_s> lhsc = vNULL;
4645 struct constraint_expr rhsc, *lhsp;
4646 unsigned j;
4647 get_constraint_for (op, &lhsc);
4648 rhsc.var = nonlocal_id;
4649 rhsc.offset = 0;
4650 rhsc.type = SCALAR;
4651 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
4652 process_constraint (new_constraint (*lhsp, rhsc));
4653 lhsc.release ();
4656 for (i = 0; i < gimple_asm_ninputs (t); ++i)
4658 tree link = gimple_asm_input_op (t, i);
4659 tree op = TREE_VALUE (link);
4661 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4663 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
4664 &allows_mem, &allows_reg);
4666 /* A memory constraint makes the address of the operand escape. */
4667 if (!allows_reg && allows_mem)
4668 make_escape_constraint (build_fold_addr_expr (op));
4669 /* Strictly we'd only need the constraint to ESCAPED if
4670 the asm clobbers memory, otherwise using something
4671 along the lines of per-call clobbers/uses would be enough. */
4672 else if (op)
4673 make_escape_constraint (op);
4677 rhsc.release ();
4678 lhsc.release ();
4682 /* Create a constraint adding to the clobber set of FI the memory
4683 pointed to by PTR. */
4685 static void
4686 process_ipa_clobber (varinfo_t fi, tree ptr)
4688 vec<ce_s> ptrc = vNULL;
4689 struct constraint_expr *c, lhs;
4690 unsigned i;
4691 get_constraint_for_rhs (ptr, &ptrc);
4692 lhs = get_function_part_constraint (fi, fi_clobbers);
4693 FOR_EACH_VEC_ELT (ptrc, i, c)
4694 process_constraint (new_constraint (lhs, *c));
4695 ptrc.release ();
4698 /* Walk statement T setting up clobber and use constraints according to the
4699 references found in T. This function is a main part of the
4700 IPA constraint builder. */
4702 static void
4703 find_func_clobbers (gimple origt)
4705 gimple t = origt;
4706 vec<ce_s> lhsc = vNULL;
4707 vec<ce_s> rhsc = vNULL;
4708 varinfo_t fi;
4710 /* Add constraints for clobbered/used in IPA mode.
4711 We are not interested in what automatic variables are clobbered
4712 or used as we only use the information in the caller to which
4713 they do not escape. */
4714 gcc_assert (in_ipa_mode);
4716 /* If the stmt refers to memory in any way it better had a VUSE. */
4717 if (gimple_vuse (t) == NULL_TREE)
4718 return;
4720 /* We'd better have function information for the current function. */
4721 fi = lookup_vi_for_tree (cfun->decl);
4722 gcc_assert (fi != NULL);
4724 /* Account for stores in assignments and calls. */
4725 if (gimple_vdef (t) != NULL_TREE
4726 && gimple_has_lhs (t))
4728 tree lhs = gimple_get_lhs (t);
4729 tree tem = lhs;
4730 while (handled_component_p (tem))
4731 tem = TREE_OPERAND (tem, 0);
4732 if ((DECL_P (tem)
4733 && !auto_var_in_fn_p (tem, cfun->decl))
4734 || INDIRECT_REF_P (tem)
4735 || (TREE_CODE (tem) == MEM_REF
4736 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
4737 && auto_var_in_fn_p
4738 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), cfun->decl))))
4740 struct constraint_expr lhsc, *rhsp;
4741 unsigned i;
4742 lhsc = get_function_part_constraint (fi, fi_clobbers);
4743 get_constraint_for_address_of (lhs, &rhsc);
4744 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4745 process_constraint (new_constraint (lhsc, *rhsp));
4746 rhsc.release ();
4750 /* Account for uses in assigments and returns. */
4751 if (gimple_assign_single_p (t)
4752 || (gimple_code (t) == GIMPLE_RETURN
4753 && gimple_return_retval (t) != NULL_TREE))
4755 tree rhs = (gimple_assign_single_p (t)
4756 ? gimple_assign_rhs1 (t) : gimple_return_retval (t));
4757 tree tem = rhs;
4758 while (handled_component_p (tem))
4759 tem = TREE_OPERAND (tem, 0);
4760 if ((DECL_P (tem)
4761 && !auto_var_in_fn_p (tem, cfun->decl))
4762 || INDIRECT_REF_P (tem)
4763 || (TREE_CODE (tem) == MEM_REF
4764 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
4765 && auto_var_in_fn_p
4766 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), cfun->decl))))
4768 struct constraint_expr lhs, *rhsp;
4769 unsigned i;
4770 lhs = get_function_part_constraint (fi, fi_uses);
4771 get_constraint_for_address_of (rhs, &rhsc);
4772 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4773 process_constraint (new_constraint (lhs, *rhsp));
4774 rhsc.release ();
4778 if (is_gimple_call (t))
4780 varinfo_t cfi = NULL;
4781 tree decl = gimple_call_fndecl (t);
4782 struct constraint_expr lhs, rhs;
4783 unsigned i, j;
4785 /* For builtins we do not have separate function info. For those
4786 we do not generate escapes for we have to generate clobbers/uses. */
4787 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
4788 switch (DECL_FUNCTION_CODE (decl))
4790 /* The following functions use and clobber memory pointed to
4791 by their arguments. */
4792 case BUILT_IN_STRCPY:
4793 case BUILT_IN_STRNCPY:
4794 case BUILT_IN_BCOPY:
4795 case BUILT_IN_MEMCPY:
4796 case BUILT_IN_MEMMOVE:
4797 case BUILT_IN_MEMPCPY:
4798 case BUILT_IN_STPCPY:
4799 case BUILT_IN_STPNCPY:
4800 case BUILT_IN_STRCAT:
4801 case BUILT_IN_STRNCAT:
4802 case BUILT_IN_STRCPY_CHK:
4803 case BUILT_IN_STRNCPY_CHK:
4804 case BUILT_IN_MEMCPY_CHK:
4805 case BUILT_IN_MEMMOVE_CHK:
4806 case BUILT_IN_MEMPCPY_CHK:
4807 case BUILT_IN_STPCPY_CHK:
4808 case BUILT_IN_STPNCPY_CHK:
4809 case BUILT_IN_STRCAT_CHK:
4810 case BUILT_IN_STRNCAT_CHK:
4812 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
4813 == BUILT_IN_BCOPY ? 1 : 0));
4814 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
4815 == BUILT_IN_BCOPY ? 0 : 1));
4816 unsigned i;
4817 struct constraint_expr *rhsp, *lhsp;
4818 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4819 lhs = get_function_part_constraint (fi, fi_clobbers);
4820 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4821 process_constraint (new_constraint (lhs, *lhsp));
4822 lhsc.release ();
4823 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4824 lhs = get_function_part_constraint (fi, fi_uses);
4825 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4826 process_constraint (new_constraint (lhs, *rhsp));
4827 rhsc.release ();
4828 return;
4830 /* The following function clobbers memory pointed to by
4831 its argument. */
4832 case BUILT_IN_MEMSET:
4833 case BUILT_IN_MEMSET_CHK:
4835 tree dest = gimple_call_arg (t, 0);
4836 unsigned i;
4837 ce_s *lhsp;
4838 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4839 lhs = get_function_part_constraint (fi, fi_clobbers);
4840 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4841 process_constraint (new_constraint (lhs, *lhsp));
4842 lhsc.release ();
4843 return;
4845 /* The following functions clobber their second and third
4846 arguments. */
4847 case BUILT_IN_SINCOS:
4848 case BUILT_IN_SINCOSF:
4849 case BUILT_IN_SINCOSL:
4851 process_ipa_clobber (fi, gimple_call_arg (t, 1));
4852 process_ipa_clobber (fi, gimple_call_arg (t, 2));
4853 return;
4855 /* The following functions clobber their second argument. */
4856 case BUILT_IN_FREXP:
4857 case BUILT_IN_FREXPF:
4858 case BUILT_IN_FREXPL:
4859 case BUILT_IN_LGAMMA_R:
4860 case BUILT_IN_LGAMMAF_R:
4861 case BUILT_IN_LGAMMAL_R:
4862 case BUILT_IN_GAMMA_R:
4863 case BUILT_IN_GAMMAF_R:
4864 case BUILT_IN_GAMMAL_R:
4865 case BUILT_IN_MODF:
4866 case BUILT_IN_MODFF:
4867 case BUILT_IN_MODFL:
4869 process_ipa_clobber (fi, gimple_call_arg (t, 1));
4870 return;
4872 /* The following functions clobber their third argument. */
4873 case BUILT_IN_REMQUO:
4874 case BUILT_IN_REMQUOF:
4875 case BUILT_IN_REMQUOL:
4877 process_ipa_clobber (fi, gimple_call_arg (t, 2));
4878 return;
4880 /* The following functions neither read nor clobber memory. */
4881 case BUILT_IN_ASSUME_ALIGNED:
4882 case BUILT_IN_FREE:
4883 return;
4884 /* Trampolines are of no interest to us. */
4885 case BUILT_IN_INIT_TRAMPOLINE:
4886 case BUILT_IN_ADJUST_TRAMPOLINE:
4887 return;
4888 case BUILT_IN_VA_START:
4889 case BUILT_IN_VA_END:
4890 return;
4891 /* printf-style functions may have hooks to set pointers to
4892 point to somewhere into the generated string. Leave them
4893 for a later excercise... */
4894 default:
4895 /* Fallthru to general call handling. */;
4898 /* Parameters passed by value are used. */
4899 lhs = get_function_part_constraint (fi, fi_uses);
4900 for (i = 0; i < gimple_call_num_args (t); i++)
4902 struct constraint_expr *rhsp;
4903 tree arg = gimple_call_arg (t, i);
4905 if (TREE_CODE (arg) == SSA_NAME
4906 || is_gimple_min_invariant (arg))
4907 continue;
4909 get_constraint_for_address_of (arg, &rhsc);
4910 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4911 process_constraint (new_constraint (lhs, *rhsp));
4912 rhsc.release ();
4915 /* Build constraints for propagating clobbers/uses along the
4916 callgraph edges. */
4917 cfi = get_fi_for_callee (t);
4918 if (cfi->id == anything_id)
4920 if (gimple_vdef (t))
4921 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
4922 anything_id);
4923 make_constraint_from (first_vi_for_offset (fi, fi_uses),
4924 anything_id);
4925 return;
4928 /* For callees without function info (that's external functions),
4929 ESCAPED is clobbered and used. */
4930 if (gimple_call_fndecl (t)
4931 && !cfi->is_fn_info)
4933 varinfo_t vi;
4935 if (gimple_vdef (t))
4936 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
4937 escaped_id);
4938 make_copy_constraint (first_vi_for_offset (fi, fi_uses), escaped_id);
4940 /* Also honor the call statement use/clobber info. */
4941 if ((vi = lookup_call_clobber_vi (t)) != NULL)
4942 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
4943 vi->id);
4944 if ((vi = lookup_call_use_vi (t)) != NULL)
4945 make_copy_constraint (first_vi_for_offset (fi, fi_uses),
4946 vi->id);
4947 return;
4950 /* Otherwise the caller clobbers and uses what the callee does.
4951 ??? This should use a new complex constraint that filters
4952 local variables of the callee. */
4953 if (gimple_vdef (t))
4955 lhs = get_function_part_constraint (fi, fi_clobbers);
4956 rhs = get_function_part_constraint (cfi, fi_clobbers);
4957 process_constraint (new_constraint (lhs, rhs));
4959 lhs = get_function_part_constraint (fi, fi_uses);
4960 rhs = get_function_part_constraint (cfi, fi_uses);
4961 process_constraint (new_constraint (lhs, rhs));
4963 else if (gimple_code (t) == GIMPLE_ASM)
4965 /* ??? Ick. We can do better. */
4966 if (gimple_vdef (t))
4967 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
4968 anything_id);
4969 make_constraint_from (first_vi_for_offset (fi, fi_uses),
4970 anything_id);
4973 rhsc.release ();
4977 /* Find the first varinfo in the same variable as START that overlaps with
4978 OFFSET. Return NULL if we can't find one. */
4980 static varinfo_t
4981 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
4983 /* If the offset is outside of the variable, bail out. */
4984 if (offset >= start->fullsize)
4985 return NULL;
4987 /* If we cannot reach offset from start, lookup the first field
4988 and start from there. */
4989 if (start->offset > offset)
4990 start = lookup_vi_for_tree (start->decl);
4992 while (start)
4994 /* We may not find a variable in the field list with the actual
4995 offset when when we have glommed a structure to a variable.
4996 In that case, however, offset should still be within the size
4997 of the variable. */
4998 if (offset >= start->offset
4999 && (offset - start->offset) < start->size)
5000 return start;
5002 start= start->next;
5005 return NULL;
5008 /* Find the first varinfo in the same variable as START that overlaps with
5009 OFFSET. If there is no such varinfo the varinfo directly preceding
5010 OFFSET is returned. */
5012 static varinfo_t
5013 first_or_preceding_vi_for_offset (varinfo_t start,
5014 unsigned HOST_WIDE_INT offset)
5016 /* If we cannot reach offset from start, lookup the first field
5017 and start from there. */
5018 if (start->offset > offset)
5019 start = lookup_vi_for_tree (start->decl);
5021 /* We may not find a variable in the field list with the actual
5022 offset when when we have glommed a structure to a variable.
5023 In that case, however, offset should still be within the size
5024 of the variable.
5025 If we got beyond the offset we look for return the field
5026 directly preceding offset which may be the last field. */
5027 while (start->next
5028 && offset >= start->offset
5029 && !((offset - start->offset) < start->size))
5030 start = start->next;
5032 return start;
5036 /* This structure is used during pushing fields onto the fieldstack
5037 to track the offset of the field, since bitpos_of_field gives it
5038 relative to its immediate containing type, and we want it relative
5039 to the ultimate containing object. */
5041 struct fieldoff
5043 /* Offset from the base of the base containing object to this field. */
5044 HOST_WIDE_INT offset;
5046 /* Size, in bits, of the field. */
5047 unsigned HOST_WIDE_INT size;
5049 unsigned has_unknown_size : 1;
5051 unsigned must_have_pointers : 1;
5053 unsigned may_have_pointers : 1;
5055 unsigned only_restrict_pointers : 1;
5057 typedef struct fieldoff fieldoff_s;
5060 /* qsort comparison function for two fieldoff's PA and PB */
5062 static int
5063 fieldoff_compare (const void *pa, const void *pb)
5065 const fieldoff_s *foa = (const fieldoff_s *)pa;
5066 const fieldoff_s *fob = (const fieldoff_s *)pb;
5067 unsigned HOST_WIDE_INT foasize, fobsize;
5069 if (foa->offset < fob->offset)
5070 return -1;
5071 else if (foa->offset > fob->offset)
5072 return 1;
5074 foasize = foa->size;
5075 fobsize = fob->size;
5076 if (foasize < fobsize)
5077 return -1;
5078 else if (foasize > fobsize)
5079 return 1;
5080 return 0;
5083 /* Sort a fieldstack according to the field offset and sizes. */
5084 static void
5085 sort_fieldstack (vec<fieldoff_s> fieldstack)
5087 fieldstack.qsort (fieldoff_compare);
5090 /* Return true if T is a type that can have subvars. */
5092 static inline bool
5093 type_can_have_subvars (const_tree t)
5095 /* Aggregates without overlapping fields can have subvars. */
5096 return TREE_CODE (t) == RECORD_TYPE;
5099 /* Return true if V is a tree that we can have subvars for.
5100 Normally, this is any aggregate type. Also complex
5101 types which are not gimple registers can have subvars. */
5103 static inline bool
5104 var_can_have_subvars (const_tree v)
5106 /* Volatile variables should never have subvars. */
5107 if (TREE_THIS_VOLATILE (v))
5108 return false;
5110 /* Non decls or memory tags can never have subvars. */
5111 if (!DECL_P (v))
5112 return false;
5114 return type_can_have_subvars (TREE_TYPE (v));
5117 /* Return true if T is a type that does contain pointers. */
5119 static bool
5120 type_must_have_pointers (tree type)
5122 if (POINTER_TYPE_P (type))
5123 return true;
5125 if (TREE_CODE (type) == ARRAY_TYPE)
5126 return type_must_have_pointers (TREE_TYPE (type));
5128 /* A function or method can have pointers as arguments, so track
5129 those separately. */
5130 if (TREE_CODE (type) == FUNCTION_TYPE
5131 || TREE_CODE (type) == METHOD_TYPE)
5132 return true;
5134 return false;
5137 static bool
5138 field_must_have_pointers (tree t)
5140 return type_must_have_pointers (TREE_TYPE (t));
5143 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
5144 the fields of TYPE onto fieldstack, recording their offsets along
5145 the way.
5147 OFFSET is used to keep track of the offset in this entire
5148 structure, rather than just the immediately containing structure.
5149 Returns false if the caller is supposed to handle the field we
5150 recursed for. */
5152 static bool
5153 push_fields_onto_fieldstack (tree type, vec<fieldoff_s> *fieldstack,
5154 HOST_WIDE_INT offset)
5156 tree field;
5157 bool empty_p = true;
5159 if (TREE_CODE (type) != RECORD_TYPE)
5160 return false;
5162 /* If the vector of fields is growing too big, bail out early.
5163 Callers check for vec::length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
5164 sure this fails. */
5165 if (fieldstack->length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
5166 return false;
5168 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5169 if (TREE_CODE (field) == FIELD_DECL)
5171 bool push = false;
5172 HOST_WIDE_INT foff = bitpos_of_field (field);
5174 if (!var_can_have_subvars (field)
5175 || TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE
5176 || TREE_CODE (TREE_TYPE (field)) == UNION_TYPE)
5177 push = true;
5178 else if (!push_fields_onto_fieldstack
5179 (TREE_TYPE (field), fieldstack, offset + foff)
5180 && (DECL_SIZE (field)
5181 && !integer_zerop (DECL_SIZE (field))))
5182 /* Empty structures may have actual size, like in C++. So
5183 see if we didn't push any subfields and the size is
5184 nonzero, push the field onto the stack. */
5185 push = true;
5187 if (push)
5189 fieldoff_s *pair = NULL;
5190 bool has_unknown_size = false;
5191 bool must_have_pointers_p;
5193 if (!fieldstack->is_empty ())
5194 pair = &fieldstack->last ();
5196 /* If there isn't anything at offset zero, create sth. */
5197 if (!pair
5198 && offset + foff != 0)
5200 fieldoff_s e = {0, offset + foff, false, false, false, false};
5201 pair = fieldstack->safe_push (e);
5204 if (!DECL_SIZE (field)
5205 || !host_integerp (DECL_SIZE (field), 1))
5206 has_unknown_size = true;
5208 /* If adjacent fields do not contain pointers merge them. */
5209 must_have_pointers_p = field_must_have_pointers (field);
5210 if (pair
5211 && !has_unknown_size
5212 && !must_have_pointers_p
5213 && !pair->must_have_pointers
5214 && !pair->has_unknown_size
5215 && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff)
5217 pair->size += TREE_INT_CST_LOW (DECL_SIZE (field));
5219 else
5221 fieldoff_s e;
5222 e.offset = offset + foff;
5223 e.has_unknown_size = has_unknown_size;
5224 if (!has_unknown_size)
5225 e.size = TREE_INT_CST_LOW (DECL_SIZE (field));
5226 else
5227 e.size = -1;
5228 e.must_have_pointers = must_have_pointers_p;
5229 e.may_have_pointers = true;
5230 e.only_restrict_pointers
5231 = (!has_unknown_size
5232 && POINTER_TYPE_P (TREE_TYPE (field))
5233 && TYPE_RESTRICT (TREE_TYPE (field)));
5234 fieldstack->safe_push (e);
5238 empty_p = false;
5241 return !empty_p;
5244 /* Count the number of arguments DECL has, and set IS_VARARGS to true
5245 if it is a varargs function. */
5247 static unsigned int
5248 count_num_arguments (tree decl, bool *is_varargs)
5250 unsigned int num = 0;
5251 tree t;
5253 /* Capture named arguments for K&R functions. They do not
5254 have a prototype and thus no TYPE_ARG_TYPES. */
5255 for (t = DECL_ARGUMENTS (decl); t; t = DECL_CHAIN (t))
5256 ++num;
5258 /* Check if the function has variadic arguments. */
5259 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl)); t; t = TREE_CHAIN (t))
5260 if (TREE_VALUE (t) == void_type_node)
5261 break;
5262 if (!t)
5263 *is_varargs = true;
5265 return num;
5268 /* Creation function node for DECL, using NAME, and return the index
5269 of the variable we've created for the function. */
5271 static varinfo_t
5272 create_function_info_for (tree decl, const char *name)
5274 struct function *fn = DECL_STRUCT_FUNCTION (decl);
5275 varinfo_t vi, prev_vi;
5276 tree arg;
5277 unsigned int i;
5278 bool is_varargs = false;
5279 unsigned int num_args = count_num_arguments (decl, &is_varargs);
5281 /* Create the variable info. */
5283 vi = new_var_info (decl, name);
5284 vi->offset = 0;
5285 vi->size = 1;
5286 vi->fullsize = fi_parm_base + num_args;
5287 vi->is_fn_info = 1;
5288 vi->may_have_pointers = false;
5289 if (is_varargs)
5290 vi->fullsize = ~0;
5291 insert_vi_for_tree (vi->decl, vi);
5293 prev_vi = vi;
5295 /* Create a variable for things the function clobbers and one for
5296 things the function uses. */
5298 varinfo_t clobbervi, usevi;
5299 const char *newname;
5300 char *tempname;
5302 asprintf (&tempname, "%s.clobber", name);
5303 newname = ggc_strdup (tempname);
5304 free (tempname);
5306 clobbervi = new_var_info (NULL, newname);
5307 clobbervi->offset = fi_clobbers;
5308 clobbervi->size = 1;
5309 clobbervi->fullsize = vi->fullsize;
5310 clobbervi->is_full_var = true;
5311 clobbervi->is_global_var = false;
5312 gcc_assert (prev_vi->offset < clobbervi->offset);
5313 prev_vi->next = clobbervi;
5314 prev_vi = clobbervi;
5316 asprintf (&tempname, "%s.use", name);
5317 newname = ggc_strdup (tempname);
5318 free (tempname);
5320 usevi = new_var_info (NULL, newname);
5321 usevi->offset = fi_uses;
5322 usevi->size = 1;
5323 usevi->fullsize = vi->fullsize;
5324 usevi->is_full_var = true;
5325 usevi->is_global_var = false;
5326 gcc_assert (prev_vi->offset < usevi->offset);
5327 prev_vi->next = usevi;
5328 prev_vi = usevi;
5331 /* And one for the static chain. */
5332 if (fn->static_chain_decl != NULL_TREE)
5334 varinfo_t chainvi;
5335 const char *newname;
5336 char *tempname;
5338 asprintf (&tempname, "%s.chain", name);
5339 newname = ggc_strdup (tempname);
5340 free (tempname);
5342 chainvi = new_var_info (fn->static_chain_decl, newname);
5343 chainvi->offset = fi_static_chain;
5344 chainvi->size = 1;
5345 chainvi->fullsize = vi->fullsize;
5346 chainvi->is_full_var = true;
5347 chainvi->is_global_var = false;
5348 gcc_assert (prev_vi->offset < chainvi->offset);
5349 prev_vi->next = chainvi;
5350 prev_vi = chainvi;
5351 insert_vi_for_tree (fn->static_chain_decl, chainvi);
5354 /* Create a variable for the return var. */
5355 if (DECL_RESULT (decl) != NULL
5356 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
5358 varinfo_t resultvi;
5359 const char *newname;
5360 char *tempname;
5361 tree resultdecl = decl;
5363 if (DECL_RESULT (decl))
5364 resultdecl = DECL_RESULT (decl);
5366 asprintf (&tempname, "%s.result", name);
5367 newname = ggc_strdup (tempname);
5368 free (tempname);
5370 resultvi = new_var_info (resultdecl, newname);
5371 resultvi->offset = fi_result;
5372 resultvi->size = 1;
5373 resultvi->fullsize = vi->fullsize;
5374 resultvi->is_full_var = true;
5375 if (DECL_RESULT (decl))
5376 resultvi->may_have_pointers = true;
5377 gcc_assert (prev_vi->offset < resultvi->offset);
5378 prev_vi->next = resultvi;
5379 prev_vi = resultvi;
5380 if (DECL_RESULT (decl))
5381 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
5384 /* Set up variables for each argument. */
5385 arg = DECL_ARGUMENTS (decl);
5386 for (i = 0; i < num_args; i++)
5388 varinfo_t argvi;
5389 const char *newname;
5390 char *tempname;
5391 tree argdecl = decl;
5393 if (arg)
5394 argdecl = arg;
5396 asprintf (&tempname, "%s.arg%d", name, i);
5397 newname = ggc_strdup (tempname);
5398 free (tempname);
5400 argvi = new_var_info (argdecl, newname);
5401 argvi->offset = fi_parm_base + i;
5402 argvi->size = 1;
5403 argvi->is_full_var = true;
5404 argvi->fullsize = vi->fullsize;
5405 if (arg)
5406 argvi->may_have_pointers = true;
5407 gcc_assert (prev_vi->offset < argvi->offset);
5408 prev_vi->next = argvi;
5409 prev_vi = argvi;
5410 if (arg)
5412 insert_vi_for_tree (arg, argvi);
5413 arg = DECL_CHAIN (arg);
5417 /* Add one representative for all further args. */
5418 if (is_varargs)
5420 varinfo_t argvi;
5421 const char *newname;
5422 char *tempname;
5423 tree decl;
5425 asprintf (&tempname, "%s.varargs", name);
5426 newname = ggc_strdup (tempname);
5427 free (tempname);
5429 /* We need sth that can be pointed to for va_start. */
5430 decl = build_fake_var_decl (ptr_type_node);
5432 argvi = new_var_info (decl, newname);
5433 argvi->offset = fi_parm_base + num_args;
5434 argvi->size = ~0;
5435 argvi->is_full_var = true;
5436 argvi->is_heap_var = true;
5437 argvi->fullsize = vi->fullsize;
5438 gcc_assert (prev_vi->offset < argvi->offset);
5439 prev_vi->next = argvi;
5440 prev_vi = argvi;
5443 return vi;
5447 /* Return true if FIELDSTACK contains fields that overlap.
5448 FIELDSTACK is assumed to be sorted by offset. */
5450 static bool
5451 check_for_overlaps (vec<fieldoff_s> fieldstack)
5453 fieldoff_s *fo = NULL;
5454 unsigned int i;
5455 HOST_WIDE_INT lastoffset = -1;
5457 FOR_EACH_VEC_ELT (fieldstack, i, fo)
5459 if (fo->offset == lastoffset)
5460 return true;
5461 lastoffset = fo->offset;
5463 return false;
5466 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
5467 This will also create any varinfo structures necessary for fields
5468 of DECL. */
5470 static varinfo_t
5471 create_variable_info_for_1 (tree decl, const char *name)
5473 varinfo_t vi, newvi;
5474 tree decl_type = TREE_TYPE (decl);
5475 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type);
5476 vec<fieldoff_s> fieldstack = vNULL;
5477 fieldoff_s *fo;
5478 unsigned int i;
5480 if (!declsize
5481 || !host_integerp (declsize, 1))
5483 vi = new_var_info (decl, name);
5484 vi->offset = 0;
5485 vi->size = ~0;
5486 vi->fullsize = ~0;
5487 vi->is_unknown_size_var = true;
5488 vi->is_full_var = true;
5489 vi->may_have_pointers = true;
5490 return vi;
5493 /* Collect field information. */
5494 if (use_field_sensitive
5495 && var_can_have_subvars (decl)
5496 /* ??? Force us to not use subfields for global initializers
5497 in IPA mode. Else we'd have to parse arbitrary initializers. */
5498 && !(in_ipa_mode
5499 && is_global_var (decl)
5500 && DECL_INITIAL (decl)))
5502 fieldoff_s *fo = NULL;
5503 bool notokay = false;
5504 unsigned int i;
5506 push_fields_onto_fieldstack (decl_type, &fieldstack, 0);
5508 for (i = 0; !notokay && fieldstack.iterate (i, &fo); i++)
5509 if (fo->has_unknown_size
5510 || fo->offset < 0)
5512 notokay = true;
5513 break;
5516 /* We can't sort them if we have a field with a variable sized type,
5517 which will make notokay = true. In that case, we are going to return
5518 without creating varinfos for the fields anyway, so sorting them is a
5519 waste to boot. */
5520 if (!notokay)
5522 sort_fieldstack (fieldstack);
5523 /* Due to some C++ FE issues, like PR 22488, we might end up
5524 what appear to be overlapping fields even though they,
5525 in reality, do not overlap. Until the C++ FE is fixed,
5526 we will simply disable field-sensitivity for these cases. */
5527 notokay = check_for_overlaps (fieldstack);
5530 if (notokay)
5531 fieldstack.release ();
5534 /* If we didn't end up collecting sub-variables create a full
5535 variable for the decl. */
5536 if (fieldstack.length () <= 1
5537 || fieldstack.length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
5539 vi = new_var_info (decl, name);
5540 vi->offset = 0;
5541 vi->may_have_pointers = true;
5542 vi->fullsize = TREE_INT_CST_LOW (declsize);
5543 vi->size = vi->fullsize;
5544 vi->is_full_var = true;
5545 fieldstack.release ();
5546 return vi;
5549 vi = new_var_info (decl, name);
5550 vi->fullsize = TREE_INT_CST_LOW (declsize);
5551 for (i = 0, newvi = vi;
5552 fieldstack.iterate (i, &fo);
5553 ++i, newvi = newvi->next)
5555 const char *newname = "NULL";
5556 char *tempname;
5558 if (dump_file)
5560 asprintf (&tempname, "%s." HOST_WIDE_INT_PRINT_DEC
5561 "+" HOST_WIDE_INT_PRINT_DEC, name, fo->offset, fo->size);
5562 newname = ggc_strdup (tempname);
5563 free (tempname);
5565 newvi->name = newname;
5566 newvi->offset = fo->offset;
5567 newvi->size = fo->size;
5568 newvi->fullsize = vi->fullsize;
5569 newvi->may_have_pointers = fo->may_have_pointers;
5570 newvi->only_restrict_pointers = fo->only_restrict_pointers;
5571 if (i + 1 < fieldstack.length ())
5572 newvi->next = new_var_info (decl, name);
5575 fieldstack.release ();
5577 return vi;
5580 static unsigned int
5581 create_variable_info_for (tree decl, const char *name)
5583 varinfo_t vi = create_variable_info_for_1 (decl, name);
5584 unsigned int id = vi->id;
5586 insert_vi_for_tree (decl, vi);
5588 if (TREE_CODE (decl) != VAR_DECL)
5589 return id;
5591 /* Create initial constraints for globals. */
5592 for (; vi; vi = vi->next)
5594 if (!vi->may_have_pointers
5595 || !vi->is_global_var)
5596 continue;
5598 /* Mark global restrict qualified pointers. */
5599 if ((POINTER_TYPE_P (TREE_TYPE (decl))
5600 && TYPE_RESTRICT (TREE_TYPE (decl)))
5601 || vi->only_restrict_pointers)
5603 make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT");
5604 continue;
5607 /* In non-IPA mode the initializer from nonlocal is all we need. */
5608 if (!in_ipa_mode
5609 || DECL_HARD_REGISTER (decl))
5610 make_copy_constraint (vi, nonlocal_id);
5612 /* In IPA mode parse the initializer and generate proper constraints
5613 for it. */
5614 else
5616 struct varpool_node *vnode = varpool_get_node (decl);
5618 /* For escaped variables initialize them from nonlocal. */
5619 if (!varpool_all_refs_explicit_p (vnode))
5620 make_copy_constraint (vi, nonlocal_id);
5622 /* If this is a global variable with an initializer and we are in
5623 IPA mode generate constraints for it. */
5624 if (DECL_INITIAL (decl)
5625 && vnode->analyzed)
5627 vec<ce_s> rhsc = vNULL;
5628 struct constraint_expr lhs, *rhsp;
5629 unsigned i;
5630 get_constraint_for_rhs (DECL_INITIAL (decl), &rhsc);
5631 lhs.var = vi->id;
5632 lhs.offset = 0;
5633 lhs.type = SCALAR;
5634 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5635 process_constraint (new_constraint (lhs, *rhsp));
5636 /* If this is a variable that escapes from the unit
5637 the initializer escapes as well. */
5638 if (!varpool_all_refs_explicit_p (vnode))
5640 lhs.var = escaped_id;
5641 lhs.offset = 0;
5642 lhs.type = SCALAR;
5643 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5644 process_constraint (new_constraint (lhs, *rhsp));
5646 rhsc.release ();
5651 return id;
5654 /* Print out the points-to solution for VAR to FILE. */
5656 static void
5657 dump_solution_for_var (FILE *file, unsigned int var)
5659 varinfo_t vi = get_varinfo (var);
5660 unsigned int i;
5661 bitmap_iterator bi;
5663 /* Dump the solution for unified vars anyway, this avoids difficulties
5664 in scanning dumps in the testsuite. */
5665 fprintf (file, "%s = { ", vi->name);
5666 vi = get_varinfo (find (var));
5667 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
5668 fprintf (file, "%s ", get_varinfo (i)->name);
5669 fprintf (file, "}");
5671 /* But note when the variable was unified. */
5672 if (vi->id != var)
5673 fprintf (file, " same as %s", vi->name);
5675 fprintf (file, "\n");
5678 /* Print the points-to solution for VAR to stdout. */
5680 DEBUG_FUNCTION void
5681 debug_solution_for_var (unsigned int var)
5683 dump_solution_for_var (stdout, var);
5686 /* Create varinfo structures for all of the variables in the
5687 function for intraprocedural mode. */
5689 static void
5690 intra_create_variable_infos (void)
5692 tree t;
5694 /* For each incoming pointer argument arg, create the constraint ARG
5695 = NONLOCAL or a dummy variable if it is a restrict qualified
5696 passed-by-reference argument. */
5697 for (t = DECL_ARGUMENTS (current_function_decl); t; t = DECL_CHAIN (t))
5699 varinfo_t p = get_vi_for_tree (t);
5701 /* For restrict qualified pointers to objects passed by
5702 reference build a real representative for the pointed-to object.
5703 Treat restrict qualified references the same. */
5704 if (TYPE_RESTRICT (TREE_TYPE (t))
5705 && ((DECL_BY_REFERENCE (t) && POINTER_TYPE_P (TREE_TYPE (t)))
5706 || TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE)
5707 && !type_contains_placeholder_p (TREE_TYPE (TREE_TYPE (t))))
5709 struct constraint_expr lhsc, rhsc;
5710 varinfo_t vi;
5711 tree heapvar = build_fake_var_decl (TREE_TYPE (TREE_TYPE (t)));
5712 DECL_EXTERNAL (heapvar) = 1;
5713 vi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS");
5714 insert_vi_for_tree (heapvar, vi);
5715 lhsc.var = p->id;
5716 lhsc.type = SCALAR;
5717 lhsc.offset = 0;
5718 rhsc.var = vi->id;
5719 rhsc.type = ADDRESSOF;
5720 rhsc.offset = 0;
5721 process_constraint (new_constraint (lhsc, rhsc));
5722 for (; vi; vi = vi->next)
5723 if (vi->may_have_pointers)
5725 if (vi->only_restrict_pointers)
5726 make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT");
5727 else
5728 make_copy_constraint (vi, nonlocal_id);
5730 continue;
5733 if (POINTER_TYPE_P (TREE_TYPE (t))
5734 && TYPE_RESTRICT (TREE_TYPE (t)))
5735 make_constraint_from_global_restrict (p, "PARM_RESTRICT");
5736 else
5738 for (; p; p = p->next)
5740 if (p->only_restrict_pointers)
5741 make_constraint_from_global_restrict (p, "PARM_RESTRICT");
5742 else if (p->may_have_pointers)
5743 make_constraint_from (p, nonlocal_id);
5748 /* Add a constraint for a result decl that is passed by reference. */
5749 if (DECL_RESULT (cfun->decl)
5750 && DECL_BY_REFERENCE (DECL_RESULT (cfun->decl)))
5752 varinfo_t p, result_vi = get_vi_for_tree (DECL_RESULT (cfun->decl));
5754 for (p = result_vi; p; p = p->next)
5755 make_constraint_from (p, nonlocal_id);
5758 /* Add a constraint for the incoming static chain parameter. */
5759 if (cfun->static_chain_decl != NULL_TREE)
5761 varinfo_t p, chain_vi = get_vi_for_tree (cfun->static_chain_decl);
5763 for (p = chain_vi; p; p = p->next)
5764 make_constraint_from (p, nonlocal_id);
5768 /* Structure used to put solution bitmaps in a hashtable so they can
5769 be shared among variables with the same points-to set. */
5771 typedef struct shared_bitmap_info
5773 bitmap pt_vars;
5774 hashval_t hashcode;
5775 } *shared_bitmap_info_t;
5776 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
5778 static htab_t shared_bitmap_table;
5780 /* Hash function for a shared_bitmap_info_t */
5782 static hashval_t
5783 shared_bitmap_hash (const void *p)
5785 const_shared_bitmap_info_t const bi = (const_shared_bitmap_info_t) p;
5786 return bi->hashcode;
5789 /* Equality function for two shared_bitmap_info_t's. */
5791 static int
5792 shared_bitmap_eq (const void *p1, const void *p2)
5794 const_shared_bitmap_info_t const sbi1 = (const_shared_bitmap_info_t) p1;
5795 const_shared_bitmap_info_t const sbi2 = (const_shared_bitmap_info_t) p2;
5796 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
5799 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
5800 existing instance if there is one, NULL otherwise. */
5802 static bitmap
5803 shared_bitmap_lookup (bitmap pt_vars)
5805 void **slot;
5806 struct shared_bitmap_info sbi;
5808 sbi.pt_vars = pt_vars;
5809 sbi.hashcode = bitmap_hash (pt_vars);
5811 slot = htab_find_slot_with_hash (shared_bitmap_table, &sbi,
5812 sbi.hashcode, NO_INSERT);
5813 if (!slot)
5814 return NULL;
5815 else
5816 return ((shared_bitmap_info_t) *slot)->pt_vars;
5820 /* Add a bitmap to the shared bitmap hashtable. */
5822 static void
5823 shared_bitmap_add (bitmap pt_vars)
5825 void **slot;
5826 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
5828 sbi->pt_vars = pt_vars;
5829 sbi->hashcode = bitmap_hash (pt_vars);
5831 slot = htab_find_slot_with_hash (shared_bitmap_table, sbi,
5832 sbi->hashcode, INSERT);
5833 gcc_assert (!*slot);
5834 *slot = (void *) sbi;
5838 /* Set bits in INTO corresponding to the variable uids in solution set FROM. */
5840 static void
5841 set_uids_in_ptset (bitmap into, bitmap from, struct pt_solution *pt)
5843 unsigned int i;
5844 bitmap_iterator bi;
5846 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
5848 varinfo_t vi = get_varinfo (i);
5850 /* The only artificial variables that are allowed in a may-alias
5851 set are heap variables. */
5852 if (vi->is_artificial_var && !vi->is_heap_var)
5853 continue;
5855 if (TREE_CODE (vi->decl) == VAR_DECL
5856 || TREE_CODE (vi->decl) == PARM_DECL
5857 || TREE_CODE (vi->decl) == RESULT_DECL)
5859 /* If we are in IPA mode we will not recompute points-to
5860 sets after inlining so make sure they stay valid. */
5861 if (in_ipa_mode
5862 && !DECL_PT_UID_SET_P (vi->decl))
5863 SET_DECL_PT_UID (vi->decl, DECL_UID (vi->decl));
5865 /* Add the decl to the points-to set. Note that the points-to
5866 set contains global variables. */
5867 bitmap_set_bit (into, DECL_PT_UID (vi->decl));
5868 if (vi->is_global_var)
5869 pt->vars_contains_global = true;
5875 /* Compute the points-to solution *PT for the variable VI. */
5877 static struct pt_solution
5878 find_what_var_points_to (varinfo_t orig_vi)
5880 unsigned int i;
5881 bitmap_iterator bi;
5882 bitmap finished_solution;
5883 bitmap result;
5884 varinfo_t vi;
5885 void **slot;
5886 struct pt_solution *pt;
5888 /* This variable may have been collapsed, let's get the real
5889 variable. */
5890 vi = get_varinfo (find (orig_vi->id));
5892 /* See if we have already computed the solution and return it. */
5893 slot = pointer_map_insert (final_solutions, vi);
5894 if (*slot != NULL)
5895 return *(struct pt_solution *)*slot;
5897 *slot = pt = XOBNEW (&final_solutions_obstack, struct pt_solution);
5898 memset (pt, 0, sizeof (struct pt_solution));
5900 /* Translate artificial variables into SSA_NAME_PTR_INFO
5901 attributes. */
5902 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
5904 varinfo_t vi = get_varinfo (i);
5906 if (vi->is_artificial_var)
5908 if (vi->id == nothing_id)
5909 pt->null = 1;
5910 else if (vi->id == escaped_id)
5912 if (in_ipa_mode)
5913 pt->ipa_escaped = 1;
5914 else
5915 pt->escaped = 1;
5917 else if (vi->id == nonlocal_id)
5918 pt->nonlocal = 1;
5919 else if (vi->is_heap_var)
5920 /* We represent heapvars in the points-to set properly. */
5922 else if (vi->id == readonly_id)
5923 /* Nobody cares. */
5925 else if (vi->id == anything_id
5926 || vi->id == integer_id)
5927 pt->anything = 1;
5931 /* Instead of doing extra work, simply do not create
5932 elaborate points-to information for pt_anything pointers. */
5933 if (pt->anything)
5934 return *pt;
5936 /* Share the final set of variables when possible. */
5937 finished_solution = BITMAP_GGC_ALLOC ();
5938 stats.points_to_sets_created++;
5940 set_uids_in_ptset (finished_solution, vi->solution, pt);
5941 result = shared_bitmap_lookup (finished_solution);
5942 if (!result)
5944 shared_bitmap_add (finished_solution);
5945 pt->vars = finished_solution;
5947 else
5949 pt->vars = result;
5950 bitmap_clear (finished_solution);
5953 return *pt;
5956 /* Given a pointer variable P, fill in its points-to set. */
5958 static void
5959 find_what_p_points_to (tree p)
5961 struct ptr_info_def *pi;
5962 tree lookup_p = p;
5963 varinfo_t vi;
5965 /* For parameters, get at the points-to set for the actual parm
5966 decl. */
5967 if (TREE_CODE (p) == SSA_NAME
5968 && SSA_NAME_IS_DEFAULT_DEF (p)
5969 && (TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
5970 || TREE_CODE (SSA_NAME_VAR (p)) == RESULT_DECL))
5971 lookup_p = SSA_NAME_VAR (p);
5973 vi = lookup_vi_for_tree (lookup_p);
5974 if (!vi)
5975 return;
5977 pi = get_ptr_info (p);
5978 pi->pt = find_what_var_points_to (vi);
5982 /* Query statistics for points-to solutions. */
5984 static struct {
5985 unsigned HOST_WIDE_INT pt_solution_includes_may_alias;
5986 unsigned HOST_WIDE_INT pt_solution_includes_no_alias;
5987 unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias;
5988 unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias;
5989 } pta_stats;
5991 void
5992 dump_pta_stats (FILE *s)
5994 fprintf (s, "\nPTA query stats:\n");
5995 fprintf (s, " pt_solution_includes: "
5996 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
5997 HOST_WIDE_INT_PRINT_DEC" queries\n",
5998 pta_stats.pt_solution_includes_no_alias,
5999 pta_stats.pt_solution_includes_no_alias
6000 + pta_stats.pt_solution_includes_may_alias);
6001 fprintf (s, " pt_solutions_intersect: "
6002 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6003 HOST_WIDE_INT_PRINT_DEC" queries\n",
6004 pta_stats.pt_solutions_intersect_no_alias,
6005 pta_stats.pt_solutions_intersect_no_alias
6006 + pta_stats.pt_solutions_intersect_may_alias);
6010 /* Reset the points-to solution *PT to a conservative default
6011 (point to anything). */
6013 void
6014 pt_solution_reset (struct pt_solution *pt)
6016 memset (pt, 0, sizeof (struct pt_solution));
6017 pt->anything = true;
6020 /* Set the points-to solution *PT to point only to the variables
6021 in VARS. VARS_CONTAINS_GLOBAL specifies whether that contains
6022 global variables and VARS_CONTAINS_RESTRICT specifies whether
6023 it contains restrict tag variables. */
6025 void
6026 pt_solution_set (struct pt_solution *pt, bitmap vars, bool vars_contains_global)
6028 memset (pt, 0, sizeof (struct pt_solution));
6029 pt->vars = vars;
6030 pt->vars_contains_global = vars_contains_global;
6033 /* Set the points-to solution *PT to point only to the variable VAR. */
6035 void
6036 pt_solution_set_var (struct pt_solution *pt, tree var)
6038 memset (pt, 0, sizeof (struct pt_solution));
6039 pt->vars = BITMAP_GGC_ALLOC ();
6040 bitmap_set_bit (pt->vars, DECL_PT_UID (var));
6041 pt->vars_contains_global = is_global_var (var);
6044 /* Computes the union of the points-to solutions *DEST and *SRC and
6045 stores the result in *DEST. This changes the points-to bitmap
6046 of *DEST and thus may not be used if that might be shared.
6047 The points-to bitmap of *SRC and *DEST will not be shared after
6048 this function if they were not before. */
6050 static void
6051 pt_solution_ior_into (struct pt_solution *dest, struct pt_solution *src)
6053 dest->anything |= src->anything;
6054 if (dest->anything)
6056 pt_solution_reset (dest);
6057 return;
6060 dest->nonlocal |= src->nonlocal;
6061 dest->escaped |= src->escaped;
6062 dest->ipa_escaped |= src->ipa_escaped;
6063 dest->null |= src->null;
6064 dest->vars_contains_global |= src->vars_contains_global;
6065 if (!src->vars)
6066 return;
6068 if (!dest->vars)
6069 dest->vars = BITMAP_GGC_ALLOC ();
6070 bitmap_ior_into (dest->vars, src->vars);
6073 /* Return true if the points-to solution *PT is empty. */
6075 bool
6076 pt_solution_empty_p (struct pt_solution *pt)
6078 if (pt->anything
6079 || pt->nonlocal)
6080 return false;
6082 if (pt->vars
6083 && !bitmap_empty_p (pt->vars))
6084 return false;
6086 /* If the solution includes ESCAPED, check if that is empty. */
6087 if (pt->escaped
6088 && !pt_solution_empty_p (&cfun->gimple_df->escaped))
6089 return false;
6091 /* If the solution includes ESCAPED, check if that is empty. */
6092 if (pt->ipa_escaped
6093 && !pt_solution_empty_p (&ipa_escaped_pt))
6094 return false;
6096 return true;
6099 /* Return true if the points-to solution *PT only point to a single var, and
6100 return the var uid in *UID. */
6102 bool
6103 pt_solution_singleton_p (struct pt_solution *pt, unsigned *uid)
6105 if (pt->anything || pt->nonlocal || pt->escaped || pt->ipa_escaped
6106 || pt->null || pt->vars == NULL
6107 || !bitmap_single_bit_set_p (pt->vars))
6108 return false;
6110 *uid = bitmap_first_set_bit (pt->vars);
6111 return true;
6114 /* Return true if the points-to solution *PT includes global memory. */
6116 bool
6117 pt_solution_includes_global (struct pt_solution *pt)
6119 if (pt->anything
6120 || pt->nonlocal
6121 || pt->vars_contains_global)
6122 return true;
6124 if (pt->escaped)
6125 return pt_solution_includes_global (&cfun->gimple_df->escaped);
6127 if (pt->ipa_escaped)
6128 return pt_solution_includes_global (&ipa_escaped_pt);
6130 /* ??? This predicate is not correct for the IPA-PTA solution
6131 as we do not properly distinguish between unit escape points
6132 and global variables. */
6133 if (cfun->gimple_df->ipa_pta)
6134 return true;
6136 return false;
6139 /* Return true if the points-to solution *PT includes the variable
6140 declaration DECL. */
6142 static bool
6143 pt_solution_includes_1 (struct pt_solution *pt, const_tree decl)
6145 if (pt->anything)
6146 return true;
6148 if (pt->nonlocal
6149 && is_global_var (decl))
6150 return true;
6152 if (pt->vars
6153 && bitmap_bit_p (pt->vars, DECL_PT_UID (decl)))
6154 return true;
6156 /* If the solution includes ESCAPED, check it. */
6157 if (pt->escaped
6158 && pt_solution_includes_1 (&cfun->gimple_df->escaped, decl))
6159 return true;
6161 /* If the solution includes ESCAPED, check it. */
6162 if (pt->ipa_escaped
6163 && pt_solution_includes_1 (&ipa_escaped_pt, decl))
6164 return true;
6166 return false;
6169 bool
6170 pt_solution_includes (struct pt_solution *pt, const_tree decl)
6172 bool res = pt_solution_includes_1 (pt, decl);
6173 if (res)
6174 ++pta_stats.pt_solution_includes_may_alias;
6175 else
6176 ++pta_stats.pt_solution_includes_no_alias;
6177 return res;
6180 /* Return true if both points-to solutions PT1 and PT2 have a non-empty
6181 intersection. */
6183 static bool
6184 pt_solutions_intersect_1 (struct pt_solution *pt1, struct pt_solution *pt2)
6186 if (pt1->anything || pt2->anything)
6187 return true;
6189 /* If either points to unknown global memory and the other points to
6190 any global memory they alias. */
6191 if ((pt1->nonlocal
6192 && (pt2->nonlocal
6193 || pt2->vars_contains_global))
6194 || (pt2->nonlocal
6195 && pt1->vars_contains_global))
6196 return true;
6198 /* Check the escaped solution if required. */
6199 if ((pt1->escaped || pt2->escaped)
6200 && !pt_solution_empty_p (&cfun->gimple_df->escaped))
6202 /* If both point to escaped memory and that solution
6203 is not empty they alias. */
6204 if (pt1->escaped && pt2->escaped)
6205 return true;
6207 /* If either points to escaped memory see if the escaped solution
6208 intersects with the other. */
6209 if ((pt1->escaped
6210 && pt_solutions_intersect_1 (&cfun->gimple_df->escaped, pt2))
6211 || (pt2->escaped
6212 && pt_solutions_intersect_1 (&cfun->gimple_df->escaped, pt1)))
6213 return true;
6216 /* Check the escaped solution if required.
6217 ??? Do we need to check the local against the IPA escaped sets? */
6218 if ((pt1->ipa_escaped || pt2->ipa_escaped)
6219 && !pt_solution_empty_p (&ipa_escaped_pt))
6221 /* If both point to escaped memory and that solution
6222 is not empty they alias. */
6223 if (pt1->ipa_escaped && pt2->ipa_escaped)
6224 return true;
6226 /* If either points to escaped memory see if the escaped solution
6227 intersects with the other. */
6228 if ((pt1->ipa_escaped
6229 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt2))
6230 || (pt2->ipa_escaped
6231 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt1)))
6232 return true;
6235 /* Now both pointers alias if their points-to solution intersects. */
6236 return (pt1->vars
6237 && pt2->vars
6238 && bitmap_intersect_p (pt1->vars, pt2->vars));
6241 bool
6242 pt_solutions_intersect (struct pt_solution *pt1, struct pt_solution *pt2)
6244 bool res = pt_solutions_intersect_1 (pt1, pt2);
6245 if (res)
6246 ++pta_stats.pt_solutions_intersect_may_alias;
6247 else
6248 ++pta_stats.pt_solutions_intersect_no_alias;
6249 return res;
6253 /* Dump points-to information to OUTFILE. */
6255 static void
6256 dump_sa_points_to_info (FILE *outfile)
6258 unsigned int i;
6260 fprintf (outfile, "\nPoints-to sets\n\n");
6262 if (dump_flags & TDF_STATS)
6264 fprintf (outfile, "Stats:\n");
6265 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
6266 fprintf (outfile, "Non-pointer vars: %d\n",
6267 stats.nonpointer_vars);
6268 fprintf (outfile, "Statically unified vars: %d\n",
6269 stats.unified_vars_static);
6270 fprintf (outfile, "Dynamically unified vars: %d\n",
6271 stats.unified_vars_dynamic);
6272 fprintf (outfile, "Iterations: %d\n", stats.iterations);
6273 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
6274 fprintf (outfile, "Number of implicit edges: %d\n",
6275 stats.num_implicit_edges);
6278 for (i = 0; i < varmap.length (); i++)
6280 varinfo_t vi = get_varinfo (i);
6281 if (!vi->may_have_pointers)
6282 continue;
6283 dump_solution_for_var (outfile, i);
6288 /* Debug points-to information to stderr. */
6290 DEBUG_FUNCTION void
6291 debug_sa_points_to_info (void)
6293 dump_sa_points_to_info (stderr);
6297 /* Initialize the always-existing constraint variables for NULL
6298 ANYTHING, READONLY, and INTEGER */
6300 static void
6301 init_base_vars (void)
6303 struct constraint_expr lhs, rhs;
6304 varinfo_t var_anything;
6305 varinfo_t var_nothing;
6306 varinfo_t var_readonly;
6307 varinfo_t var_escaped;
6308 varinfo_t var_nonlocal;
6309 varinfo_t var_storedanything;
6310 varinfo_t var_integer;
6312 /* Create the NULL variable, used to represent that a variable points
6313 to NULL. */
6314 var_nothing = new_var_info (NULL_TREE, "NULL");
6315 gcc_assert (var_nothing->id == nothing_id);
6316 var_nothing->is_artificial_var = 1;
6317 var_nothing->offset = 0;
6318 var_nothing->size = ~0;
6319 var_nothing->fullsize = ~0;
6320 var_nothing->is_special_var = 1;
6321 var_nothing->may_have_pointers = 0;
6322 var_nothing->is_global_var = 0;
6324 /* Create the ANYTHING variable, used to represent that a variable
6325 points to some unknown piece of memory. */
6326 var_anything = new_var_info (NULL_TREE, "ANYTHING");
6327 gcc_assert (var_anything->id == anything_id);
6328 var_anything->is_artificial_var = 1;
6329 var_anything->size = ~0;
6330 var_anything->offset = 0;
6331 var_anything->next = NULL;
6332 var_anything->fullsize = ~0;
6333 var_anything->is_special_var = 1;
6335 /* Anything points to anything. This makes deref constraints just
6336 work in the presence of linked list and other p = *p type loops,
6337 by saying that *ANYTHING = ANYTHING. */
6338 lhs.type = SCALAR;
6339 lhs.var = anything_id;
6340 lhs.offset = 0;
6341 rhs.type = ADDRESSOF;
6342 rhs.var = anything_id;
6343 rhs.offset = 0;
6345 /* This specifically does not use process_constraint because
6346 process_constraint ignores all anything = anything constraints, since all
6347 but this one are redundant. */
6348 constraints.safe_push (new_constraint (lhs, rhs));
6350 /* Create the READONLY variable, used to represent that a variable
6351 points to readonly memory. */
6352 var_readonly = new_var_info (NULL_TREE, "READONLY");
6353 gcc_assert (var_readonly->id == readonly_id);
6354 var_readonly->is_artificial_var = 1;
6355 var_readonly->offset = 0;
6356 var_readonly->size = ~0;
6357 var_readonly->fullsize = ~0;
6358 var_readonly->next = NULL;
6359 var_readonly->is_special_var = 1;
6361 /* readonly memory points to anything, in order to make deref
6362 easier. In reality, it points to anything the particular
6363 readonly variable can point to, but we don't track this
6364 separately. */
6365 lhs.type = SCALAR;
6366 lhs.var = readonly_id;
6367 lhs.offset = 0;
6368 rhs.type = ADDRESSOF;
6369 rhs.var = readonly_id; /* FIXME */
6370 rhs.offset = 0;
6371 process_constraint (new_constraint (lhs, rhs));
6373 /* Create the ESCAPED variable, used to represent the set of escaped
6374 memory. */
6375 var_escaped = new_var_info (NULL_TREE, "ESCAPED");
6376 gcc_assert (var_escaped->id == escaped_id);
6377 var_escaped->is_artificial_var = 1;
6378 var_escaped->offset = 0;
6379 var_escaped->size = ~0;
6380 var_escaped->fullsize = ~0;
6381 var_escaped->is_special_var = 0;
6383 /* Create the NONLOCAL variable, used to represent the set of nonlocal
6384 memory. */
6385 var_nonlocal = new_var_info (NULL_TREE, "NONLOCAL");
6386 gcc_assert (var_nonlocal->id == nonlocal_id);
6387 var_nonlocal->is_artificial_var = 1;
6388 var_nonlocal->offset = 0;
6389 var_nonlocal->size = ~0;
6390 var_nonlocal->fullsize = ~0;
6391 var_nonlocal->is_special_var = 1;
6393 /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
6394 lhs.type = SCALAR;
6395 lhs.var = escaped_id;
6396 lhs.offset = 0;
6397 rhs.type = DEREF;
6398 rhs.var = escaped_id;
6399 rhs.offset = 0;
6400 process_constraint (new_constraint (lhs, rhs));
6402 /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the
6403 whole variable escapes. */
6404 lhs.type = SCALAR;
6405 lhs.var = escaped_id;
6406 lhs.offset = 0;
6407 rhs.type = SCALAR;
6408 rhs.var = escaped_id;
6409 rhs.offset = UNKNOWN_OFFSET;
6410 process_constraint (new_constraint (lhs, rhs));
6412 /* *ESCAPED = NONLOCAL. This is true because we have to assume
6413 everything pointed to by escaped points to what global memory can
6414 point to. */
6415 lhs.type = DEREF;
6416 lhs.var = escaped_id;
6417 lhs.offset = 0;
6418 rhs.type = SCALAR;
6419 rhs.var = nonlocal_id;
6420 rhs.offset = 0;
6421 process_constraint (new_constraint (lhs, rhs));
6423 /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED. This is true because
6424 global memory may point to global memory and escaped memory. */
6425 lhs.type = SCALAR;
6426 lhs.var = nonlocal_id;
6427 lhs.offset = 0;
6428 rhs.type = ADDRESSOF;
6429 rhs.var = nonlocal_id;
6430 rhs.offset = 0;
6431 process_constraint (new_constraint (lhs, rhs));
6432 rhs.type = ADDRESSOF;
6433 rhs.var = escaped_id;
6434 rhs.offset = 0;
6435 process_constraint (new_constraint (lhs, rhs));
6437 /* Create the STOREDANYTHING variable, used to represent the set of
6438 variables stored to *ANYTHING. */
6439 var_storedanything = new_var_info (NULL_TREE, "STOREDANYTHING");
6440 gcc_assert (var_storedanything->id == storedanything_id);
6441 var_storedanything->is_artificial_var = 1;
6442 var_storedanything->offset = 0;
6443 var_storedanything->size = ~0;
6444 var_storedanything->fullsize = ~0;
6445 var_storedanything->is_special_var = 0;
6447 /* Create the INTEGER variable, used to represent that a variable points
6448 to what an INTEGER "points to". */
6449 var_integer = new_var_info (NULL_TREE, "INTEGER");
6450 gcc_assert (var_integer->id == integer_id);
6451 var_integer->is_artificial_var = 1;
6452 var_integer->size = ~0;
6453 var_integer->fullsize = ~0;
6454 var_integer->offset = 0;
6455 var_integer->next = NULL;
6456 var_integer->is_special_var = 1;
6458 /* INTEGER = ANYTHING, because we don't know where a dereference of
6459 a random integer will point to. */
6460 lhs.type = SCALAR;
6461 lhs.var = integer_id;
6462 lhs.offset = 0;
6463 rhs.type = ADDRESSOF;
6464 rhs.var = anything_id;
6465 rhs.offset = 0;
6466 process_constraint (new_constraint (lhs, rhs));
6469 /* Initialize things necessary to perform PTA */
6471 static void
6472 init_alias_vars (void)
6474 use_field_sensitive = (MAX_FIELDS_FOR_FIELD_SENSITIVE > 1);
6476 bitmap_obstack_initialize (&pta_obstack);
6477 bitmap_obstack_initialize (&oldpta_obstack);
6478 bitmap_obstack_initialize (&predbitmap_obstack);
6480 constraint_pool = create_alloc_pool ("Constraint pool",
6481 sizeof (struct constraint), 30);
6482 variable_info_pool = create_alloc_pool ("Variable info pool",
6483 sizeof (struct variable_info), 30);
6484 constraints.create (8);
6485 varmap.create (8);
6486 vi_for_tree = pointer_map_create ();
6487 call_stmt_vars = pointer_map_create ();
6489 memset (&stats, 0, sizeof (stats));
6490 shared_bitmap_table = htab_create (511, shared_bitmap_hash,
6491 shared_bitmap_eq, free);
6492 init_base_vars ();
6494 gcc_obstack_init (&fake_var_decl_obstack);
6496 final_solutions = pointer_map_create ();
6497 gcc_obstack_init (&final_solutions_obstack);
6500 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
6501 predecessor edges. */
6503 static void
6504 remove_preds_and_fake_succs (constraint_graph_t graph)
6506 unsigned int i;
6508 /* Clear the implicit ref and address nodes from the successor
6509 lists. */
6510 for (i = 0; i < FIRST_REF_NODE; i++)
6512 if (graph->succs[i])
6513 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
6514 FIRST_REF_NODE * 2);
6517 /* Free the successor list for the non-ref nodes. */
6518 for (i = FIRST_REF_NODE; i < graph->size; i++)
6520 if (graph->succs[i])
6521 BITMAP_FREE (graph->succs[i]);
6524 /* Now reallocate the size of the successor list as, and blow away
6525 the predecessor bitmaps. */
6526 graph->size = varmap.length ();
6527 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
6529 free (graph->implicit_preds);
6530 graph->implicit_preds = NULL;
6531 free (graph->preds);
6532 graph->preds = NULL;
6533 bitmap_obstack_release (&predbitmap_obstack);
6536 /* Solve the constraint set. */
6538 static void
6539 solve_constraints (void)
6541 struct scc_info *si;
6543 if (dump_file)
6544 fprintf (dump_file,
6545 "\nCollapsing static cycles and doing variable "
6546 "substitution\n");
6548 init_graph (varmap.length () * 2);
6550 if (dump_file)
6551 fprintf (dump_file, "Building predecessor graph\n");
6552 build_pred_graph ();
6554 if (dump_file)
6555 fprintf (dump_file, "Detecting pointer and location "
6556 "equivalences\n");
6557 si = perform_var_substitution (graph);
6559 if (dump_file)
6560 fprintf (dump_file, "Rewriting constraints and unifying "
6561 "variables\n");
6562 rewrite_constraints (graph, si);
6564 build_succ_graph ();
6566 free_var_substitution_info (si);
6568 /* Attach complex constraints to graph nodes. */
6569 move_complex_constraints (graph);
6571 if (dump_file)
6572 fprintf (dump_file, "Uniting pointer but not location equivalent "
6573 "variables\n");
6574 unite_pointer_equivalences (graph);
6576 if (dump_file)
6577 fprintf (dump_file, "Finding indirect cycles\n");
6578 find_indirect_cycles (graph);
6580 /* Implicit nodes and predecessors are no longer necessary at this
6581 point. */
6582 remove_preds_and_fake_succs (graph);
6584 if (dump_file && (dump_flags & TDF_GRAPH))
6586 fprintf (dump_file, "\n\n// The constraint graph before solve-graph "
6587 "in dot format:\n");
6588 dump_constraint_graph (dump_file);
6589 fprintf (dump_file, "\n\n");
6592 if (dump_file)
6593 fprintf (dump_file, "Solving graph\n");
6595 solve_graph (graph);
6597 if (dump_file && (dump_flags & TDF_GRAPH))
6599 fprintf (dump_file, "\n\n// The constraint graph after solve-graph "
6600 "in dot format:\n");
6601 dump_constraint_graph (dump_file);
6602 fprintf (dump_file, "\n\n");
6605 if (dump_file)
6606 dump_sa_points_to_info (dump_file);
6609 /* Create points-to sets for the current function. See the comments
6610 at the start of the file for an algorithmic overview. */
6612 static void
6613 compute_points_to_sets (void)
6615 basic_block bb;
6616 unsigned i;
6617 varinfo_t vi;
6619 timevar_push (TV_TREE_PTA);
6621 init_alias_vars ();
6623 intra_create_variable_infos ();
6625 /* Now walk all statements and build the constraint set. */
6626 FOR_EACH_BB (bb)
6628 gimple_stmt_iterator gsi;
6630 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6632 gimple phi = gsi_stmt (gsi);
6634 if (! virtual_operand_p (gimple_phi_result (phi)))
6635 find_func_aliases (phi);
6638 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6640 gimple stmt = gsi_stmt (gsi);
6642 find_func_aliases (stmt);
6646 if (dump_file)
6648 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
6649 dump_constraints (dump_file, 0);
6652 /* From the constraints compute the points-to sets. */
6653 solve_constraints ();
6655 /* Compute the points-to set for ESCAPED used for call-clobber analysis. */
6656 cfun->gimple_df->escaped = find_what_var_points_to (get_varinfo (escaped_id));
6658 /* Make sure the ESCAPED solution (which is used as placeholder in
6659 other solutions) does not reference itself. This simplifies
6660 points-to solution queries. */
6661 cfun->gimple_df->escaped.escaped = 0;
6663 /* Mark escaped HEAP variables as global. */
6664 FOR_EACH_VEC_ELT (varmap, i, vi)
6665 if (vi->is_heap_var
6666 && !vi->is_global_var)
6667 DECL_EXTERNAL (vi->decl) = vi->is_global_var
6668 = pt_solution_includes (&cfun->gimple_df->escaped, vi->decl);
6670 /* Compute the points-to sets for pointer SSA_NAMEs. */
6671 for (i = 0; i < num_ssa_names; ++i)
6673 tree ptr = ssa_name (i);
6674 if (ptr
6675 && POINTER_TYPE_P (TREE_TYPE (ptr)))
6676 find_what_p_points_to (ptr);
6679 /* Compute the call-used/clobbered sets. */
6680 FOR_EACH_BB (bb)
6682 gimple_stmt_iterator gsi;
6684 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6686 gimple stmt = gsi_stmt (gsi);
6687 struct pt_solution *pt;
6688 if (!is_gimple_call (stmt))
6689 continue;
6691 pt = gimple_call_use_set (stmt);
6692 if (gimple_call_flags (stmt) & ECF_CONST)
6693 memset (pt, 0, sizeof (struct pt_solution));
6694 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
6696 *pt = find_what_var_points_to (vi);
6697 /* Escaped (and thus nonlocal) variables are always
6698 implicitly used by calls. */
6699 /* ??? ESCAPED can be empty even though NONLOCAL
6700 always escaped. */
6701 pt->nonlocal = 1;
6702 pt->escaped = 1;
6704 else
6706 /* If there is nothing special about this call then
6707 we have made everything that is used also escape. */
6708 *pt = cfun->gimple_df->escaped;
6709 pt->nonlocal = 1;
6712 pt = gimple_call_clobber_set (stmt);
6713 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
6714 memset (pt, 0, sizeof (struct pt_solution));
6715 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
6717 *pt = find_what_var_points_to (vi);
6718 /* Escaped (and thus nonlocal) variables are always
6719 implicitly clobbered by calls. */
6720 /* ??? ESCAPED can be empty even though NONLOCAL
6721 always escaped. */
6722 pt->nonlocal = 1;
6723 pt->escaped = 1;
6725 else
6727 /* If there is nothing special about this call then
6728 we have made everything that is used also escape. */
6729 *pt = cfun->gimple_df->escaped;
6730 pt->nonlocal = 1;
6735 timevar_pop (TV_TREE_PTA);
6739 /* Delete created points-to sets. */
6741 static void
6742 delete_points_to_sets (void)
6744 unsigned int i;
6746 htab_delete (shared_bitmap_table);
6747 if (dump_file && (dump_flags & TDF_STATS))
6748 fprintf (dump_file, "Points to sets created:%d\n",
6749 stats.points_to_sets_created);
6751 pointer_map_destroy (vi_for_tree);
6752 pointer_map_destroy (call_stmt_vars);
6753 bitmap_obstack_release (&pta_obstack);
6754 constraints.release ();
6756 for (i = 0; i < graph->size; i++)
6757 graph->complex[i].release ();
6758 free (graph->complex);
6760 free (graph->rep);
6761 free (graph->succs);
6762 free (graph->pe);
6763 free (graph->pe_rep);
6764 free (graph->indirect_cycles);
6765 free (graph);
6767 varmap.release ();
6768 free_alloc_pool (variable_info_pool);
6769 free_alloc_pool (constraint_pool);
6771 obstack_free (&fake_var_decl_obstack, NULL);
6773 pointer_map_destroy (final_solutions);
6774 obstack_free (&final_solutions_obstack, NULL);
6778 /* Compute points-to information for every SSA_NAME pointer in the
6779 current function and compute the transitive closure of escaped
6780 variables to re-initialize the call-clobber states of local variables. */
6782 unsigned int
6783 compute_may_aliases (void)
6785 if (cfun->gimple_df->ipa_pta)
6787 if (dump_file)
6789 fprintf (dump_file, "\nNot re-computing points-to information "
6790 "because IPA points-to information is available.\n\n");
6792 /* But still dump what we have remaining it. */
6793 dump_alias_info (dump_file);
6796 return 0;
6799 /* For each pointer P_i, determine the sets of variables that P_i may
6800 point-to. Compute the reachability set of escaped and call-used
6801 variables. */
6802 compute_points_to_sets ();
6804 /* Debugging dumps. */
6805 if (dump_file)
6806 dump_alias_info (dump_file);
6808 /* Deallocate memory used by aliasing data structures and the internal
6809 points-to solution. */
6810 delete_points_to_sets ();
6812 gcc_assert (!need_ssa_update_p (cfun));
6814 return 0;
6817 static bool
6818 gate_tree_pta (void)
6820 return flag_tree_pta;
6823 /* A dummy pass to cause points-to information to be computed via
6824 TODO_rebuild_alias. */
6826 struct gimple_opt_pass pass_build_alias =
6829 GIMPLE_PASS,
6830 "alias", /* name */
6831 OPTGROUP_NONE, /* optinfo_flags */
6832 gate_tree_pta, /* gate */
6833 NULL, /* execute */
6834 NULL, /* sub */
6835 NULL, /* next */
6836 0, /* static_pass_number */
6837 TV_NONE, /* tv_id */
6838 PROP_cfg | PROP_ssa, /* properties_required */
6839 0, /* properties_provided */
6840 0, /* properties_destroyed */
6841 0, /* todo_flags_start */
6842 TODO_rebuild_alias /* todo_flags_finish */
6846 /* A dummy pass to cause points-to information to be computed via
6847 TODO_rebuild_alias. */
6849 struct gimple_opt_pass pass_build_ealias =
6852 GIMPLE_PASS,
6853 "ealias", /* name */
6854 OPTGROUP_NONE, /* optinfo_flags */
6855 gate_tree_pta, /* gate */
6856 NULL, /* execute */
6857 NULL, /* sub */
6858 NULL, /* next */
6859 0, /* static_pass_number */
6860 TV_NONE, /* tv_id */
6861 PROP_cfg | PROP_ssa, /* properties_required */
6862 0, /* properties_provided */
6863 0, /* properties_destroyed */
6864 0, /* todo_flags_start */
6865 TODO_rebuild_alias /* todo_flags_finish */
6870 /* Return true if we should execute IPA PTA. */
6871 static bool
6872 gate_ipa_pta (void)
6874 return (optimize
6875 && flag_ipa_pta
6876 /* Don't bother doing anything if the program has errors. */
6877 && !seen_error ());
6880 /* IPA PTA solutions for ESCAPED. */
6881 struct pt_solution ipa_escaped_pt
6882 = { true, false, false, false, false, false, NULL };
6884 /* Associate node with varinfo DATA. Worker for
6885 cgraph_for_node_and_aliases. */
6886 static bool
6887 associate_varinfo_to_alias (struct cgraph_node *node, void *data)
6889 if (node->alias || node->thunk.thunk_p)
6890 insert_vi_for_tree (node->symbol.decl, (varinfo_t)data);
6891 return false;
6894 /* Execute the driver for IPA PTA. */
6895 static unsigned int
6896 ipa_pta_execute (void)
6898 struct cgraph_node *node;
6899 struct varpool_node *var;
6900 int from;
6902 in_ipa_mode = 1;
6904 init_alias_vars ();
6906 if (dump_file && (dump_flags & TDF_DETAILS))
6908 dump_symtab (dump_file);
6909 fprintf (dump_file, "\n");
6912 /* Build the constraints. */
6913 FOR_EACH_DEFINED_FUNCTION (node)
6915 varinfo_t vi;
6916 /* Nodes without a body are not interesting. Especially do not
6917 visit clones at this point for now - we get duplicate decls
6918 there for inline clones at least. */
6919 if (!cgraph_function_with_gimple_body_p (node))
6920 continue;
6922 gcc_assert (!node->clone_of);
6924 vi = create_function_info_for (node->symbol.decl,
6925 alias_get_name (node->symbol.decl));
6926 cgraph_for_node_and_aliases (node, associate_varinfo_to_alias, vi, true);
6929 /* Create constraints for global variables and their initializers. */
6930 FOR_EACH_VARIABLE (var)
6932 if (var->alias)
6933 continue;
6935 get_vi_for_tree (var->symbol.decl);
6938 if (dump_file)
6940 fprintf (dump_file,
6941 "Generating constraints for global initializers\n\n");
6942 dump_constraints (dump_file, 0);
6943 fprintf (dump_file, "\n");
6945 from = constraints.length ();
6947 FOR_EACH_DEFINED_FUNCTION (node)
6949 struct function *func;
6950 basic_block bb;
6952 /* Nodes without a body are not interesting. */
6953 if (!cgraph_function_with_gimple_body_p (node))
6954 continue;
6956 if (dump_file)
6958 fprintf (dump_file,
6959 "Generating constraints for %s", cgraph_node_name (node));
6960 if (DECL_ASSEMBLER_NAME_SET_P (node->symbol.decl))
6961 fprintf (dump_file, " (%s)",
6962 IDENTIFIER_POINTER
6963 (DECL_ASSEMBLER_NAME (node->symbol.decl)));
6964 fprintf (dump_file, "\n");
6967 func = DECL_STRUCT_FUNCTION (node->symbol.decl);
6968 push_cfun (func);
6970 /* For externally visible or attribute used annotated functions use
6971 local constraints for their arguments.
6972 For local functions we see all callers and thus do not need initial
6973 constraints for parameters. */
6974 if (node->symbol.used_from_other_partition
6975 || node->symbol.externally_visible
6976 || node->symbol.force_output)
6978 intra_create_variable_infos ();
6980 /* We also need to make function return values escape. Nothing
6981 escapes by returning from main though. */
6982 if (!MAIN_NAME_P (DECL_NAME (node->symbol.decl)))
6984 varinfo_t fi, rvi;
6985 fi = lookup_vi_for_tree (node->symbol.decl);
6986 rvi = first_vi_for_offset (fi, fi_result);
6987 if (rvi && rvi->offset == fi_result)
6989 struct constraint_expr includes;
6990 struct constraint_expr var;
6991 includes.var = escaped_id;
6992 includes.offset = 0;
6993 includes.type = SCALAR;
6994 var.var = rvi->id;
6995 var.offset = 0;
6996 var.type = SCALAR;
6997 process_constraint (new_constraint (includes, var));
7002 /* Build constriants for the function body. */
7003 FOR_EACH_BB_FN (bb, func)
7005 gimple_stmt_iterator gsi;
7007 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
7008 gsi_next (&gsi))
7010 gimple phi = gsi_stmt (gsi);
7012 if (! virtual_operand_p (gimple_phi_result (phi)))
7013 find_func_aliases (phi);
7016 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
7018 gimple stmt = gsi_stmt (gsi);
7020 find_func_aliases (stmt);
7021 find_func_clobbers (stmt);
7025 pop_cfun ();
7027 if (dump_file)
7029 fprintf (dump_file, "\n");
7030 dump_constraints (dump_file, from);
7031 fprintf (dump_file, "\n");
7033 from = constraints.length ();
7036 /* From the constraints compute the points-to sets. */
7037 solve_constraints ();
7039 /* Compute the global points-to sets for ESCAPED.
7040 ??? Note that the computed escape set is not correct
7041 for the whole unit as we fail to consider graph edges to
7042 externally visible functions. */
7043 ipa_escaped_pt = find_what_var_points_to (get_varinfo (escaped_id));
7045 /* Make sure the ESCAPED solution (which is used as placeholder in
7046 other solutions) does not reference itself. This simplifies
7047 points-to solution queries. */
7048 ipa_escaped_pt.ipa_escaped = 0;
7050 /* Assign the points-to sets to the SSA names in the unit. */
7051 FOR_EACH_DEFINED_FUNCTION (node)
7053 tree ptr;
7054 struct function *fn;
7055 unsigned i;
7056 varinfo_t fi;
7057 basic_block bb;
7058 struct pt_solution uses, clobbers;
7059 struct cgraph_edge *e;
7061 /* Nodes without a body are not interesting. */
7062 if (!cgraph_function_with_gimple_body_p (node))
7063 continue;
7065 fn = DECL_STRUCT_FUNCTION (node->symbol.decl);
7067 /* Compute the points-to sets for pointer SSA_NAMEs. */
7068 FOR_EACH_VEC_ELT (*fn->gimple_df->ssa_names, i, ptr)
7070 if (ptr
7071 && POINTER_TYPE_P (TREE_TYPE (ptr)))
7072 find_what_p_points_to (ptr);
7075 /* Compute the call-use and call-clobber sets for all direct calls. */
7076 fi = lookup_vi_for_tree (node->symbol.decl);
7077 gcc_assert (fi->is_fn_info);
7078 clobbers
7079 = find_what_var_points_to (first_vi_for_offset (fi, fi_clobbers));
7080 uses = find_what_var_points_to (first_vi_for_offset (fi, fi_uses));
7081 for (e = node->callers; e; e = e->next_caller)
7083 if (!e->call_stmt)
7084 continue;
7086 *gimple_call_clobber_set (e->call_stmt) = clobbers;
7087 *gimple_call_use_set (e->call_stmt) = uses;
7090 /* Compute the call-use and call-clobber sets for indirect calls
7091 and calls to external functions. */
7092 FOR_EACH_BB_FN (bb, fn)
7094 gimple_stmt_iterator gsi;
7096 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
7098 gimple stmt = gsi_stmt (gsi);
7099 struct pt_solution *pt;
7100 varinfo_t vi;
7101 tree decl;
7103 if (!is_gimple_call (stmt))
7104 continue;
7106 /* Handle direct calls to external functions. */
7107 decl = gimple_call_fndecl (stmt);
7108 if (decl
7109 && (!(fi = lookup_vi_for_tree (decl))
7110 || !fi->is_fn_info))
7112 pt = gimple_call_use_set (stmt);
7113 if (gimple_call_flags (stmt) & ECF_CONST)
7114 memset (pt, 0, sizeof (struct pt_solution));
7115 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
7117 *pt = find_what_var_points_to (vi);
7118 /* Escaped (and thus nonlocal) variables are always
7119 implicitly used by calls. */
7120 /* ??? ESCAPED can be empty even though NONLOCAL
7121 always escaped. */
7122 pt->nonlocal = 1;
7123 pt->ipa_escaped = 1;
7125 else
7127 /* If there is nothing special about this call then
7128 we have made everything that is used also escape. */
7129 *pt = ipa_escaped_pt;
7130 pt->nonlocal = 1;
7133 pt = gimple_call_clobber_set (stmt);
7134 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
7135 memset (pt, 0, sizeof (struct pt_solution));
7136 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
7138 *pt = find_what_var_points_to (vi);
7139 /* Escaped (and thus nonlocal) variables are always
7140 implicitly clobbered by calls. */
7141 /* ??? ESCAPED can be empty even though NONLOCAL
7142 always escaped. */
7143 pt->nonlocal = 1;
7144 pt->ipa_escaped = 1;
7146 else
7148 /* If there is nothing special about this call then
7149 we have made everything that is used also escape. */
7150 *pt = ipa_escaped_pt;
7151 pt->nonlocal = 1;
7155 /* Handle indirect calls. */
7156 if (!decl
7157 && (fi = get_fi_for_callee (stmt)))
7159 /* We need to accumulate all clobbers/uses of all possible
7160 callees. */
7161 fi = get_varinfo (find (fi->id));
7162 /* If we cannot constrain the set of functions we'll end up
7163 calling we end up using/clobbering everything. */
7164 if (bitmap_bit_p (fi->solution, anything_id)
7165 || bitmap_bit_p (fi->solution, nonlocal_id)
7166 || bitmap_bit_p (fi->solution, escaped_id))
7168 pt_solution_reset (gimple_call_clobber_set (stmt));
7169 pt_solution_reset (gimple_call_use_set (stmt));
7171 else
7173 bitmap_iterator bi;
7174 unsigned i;
7175 struct pt_solution *uses, *clobbers;
7177 uses = gimple_call_use_set (stmt);
7178 clobbers = gimple_call_clobber_set (stmt);
7179 memset (uses, 0, sizeof (struct pt_solution));
7180 memset (clobbers, 0, sizeof (struct pt_solution));
7181 EXECUTE_IF_SET_IN_BITMAP (fi->solution, 0, i, bi)
7183 struct pt_solution sol;
7185 vi = get_varinfo (i);
7186 if (!vi->is_fn_info)
7188 /* ??? We could be more precise here? */
7189 uses->nonlocal = 1;
7190 uses->ipa_escaped = 1;
7191 clobbers->nonlocal = 1;
7192 clobbers->ipa_escaped = 1;
7193 continue;
7196 if (!uses->anything)
7198 sol = find_what_var_points_to
7199 (first_vi_for_offset (vi, fi_uses));
7200 pt_solution_ior_into (uses, &sol);
7202 if (!clobbers->anything)
7204 sol = find_what_var_points_to
7205 (first_vi_for_offset (vi, fi_clobbers));
7206 pt_solution_ior_into (clobbers, &sol);
7214 fn->gimple_df->ipa_pta = true;
7217 delete_points_to_sets ();
7219 in_ipa_mode = 0;
7221 return 0;
7224 struct simple_ipa_opt_pass pass_ipa_pta =
7227 SIMPLE_IPA_PASS,
7228 "pta", /* name */
7229 OPTGROUP_NONE, /* optinfo_flags */
7230 gate_ipa_pta, /* gate */
7231 ipa_pta_execute, /* execute */
7232 NULL, /* sub */
7233 NULL, /* next */
7234 0, /* static_pass_number */
7235 TV_IPA_PTA, /* tv_id */
7236 0, /* properties_required */
7237 0, /* properties_provided */
7238 0, /* properties_destroyed */
7239 0, /* todo_flags_start */
7240 TODO_update_ssa /* todo_flags_finish */