N3323
[official-gcc.git] / gcc / tree-ssa-structalias.c
blob37751a74fd5dc65acaccc6137e05fcbc02aa23bb
1 /* Tree based points-to analysis
2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "ggc.h"
26 #include "obstack.h"
27 #include "bitmap.h"
28 #include "flags.h"
29 #include "basic-block.h"
30 #include "tree.h"
31 #include "tree-flow.h"
32 #include "tree-inline.h"
33 #include "diagnostic-core.h"
34 #include "gimple.h"
35 #include "hashtab.h"
36 #include "function.h"
37 #include "cgraph.h"
38 #include "tree-pass.h"
39 #include "alloc-pool.h"
40 #include "splay-tree.h"
41 #include "params.h"
42 #include "cgraph.h"
43 #include "alias.h"
44 #include "pointer-set.h"
46 /* The idea behind this analyzer is to generate set constraints from the
47 program, then solve the resulting constraints in order to generate the
48 points-to sets.
50 Set constraints are a way of modeling program analysis problems that
51 involve sets. They consist of an inclusion constraint language,
52 describing the variables (each variable is a set) and operations that
53 are involved on the variables, and a set of rules that derive facts
54 from these operations. To solve a system of set constraints, you derive
55 all possible facts under the rules, which gives you the correct sets
56 as a consequence.
58 See "Efficient Field-sensitive pointer analysis for C" by "David
59 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
60 http://citeseer.ist.psu.edu/pearce04efficient.html
62 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
63 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
64 http://citeseer.ist.psu.edu/heintze01ultrafast.html
66 There are three types of real constraint expressions, DEREF,
67 ADDRESSOF, and SCALAR. Each constraint expression consists
68 of a constraint type, a variable, and an offset.
70 SCALAR is a constraint expression type used to represent x, whether
71 it appears on the LHS or the RHS of a statement.
72 DEREF is a constraint expression type used to represent *x, whether
73 it appears on the LHS or the RHS of a statement.
74 ADDRESSOF is a constraint expression used to represent &x, whether
75 it appears on the LHS or the RHS of a statement.
77 Each pointer variable in the program is assigned an integer id, and
78 each field of a structure variable is assigned an integer id as well.
80 Structure variables are linked to their list of fields through a "next
81 field" in each variable that points to the next field in offset
82 order.
83 Each variable for a structure field has
85 1. "size", that tells the size in bits of that field.
86 2. "fullsize, that tells the size in bits of the entire structure.
87 3. "offset", that tells the offset in bits from the beginning of the
88 structure to this field.
90 Thus,
91 struct f
93 int a;
94 int b;
95 } foo;
96 int *bar;
98 looks like
100 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
101 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
102 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
105 In order to solve the system of set constraints, the following is
106 done:
108 1. Each constraint variable x has a solution set associated with it,
109 Sol(x).
111 2. Constraints are separated into direct, copy, and complex.
112 Direct constraints are ADDRESSOF constraints that require no extra
113 processing, such as P = &Q
114 Copy constraints are those of the form P = Q.
115 Complex constraints are all the constraints involving dereferences
116 and offsets (including offsetted copies).
118 3. All direct constraints of the form P = &Q are processed, such
119 that Q is added to Sol(P)
121 4. All complex constraints for a given constraint variable are stored in a
122 linked list attached to that variable's node.
124 5. A directed graph is built out of the copy constraints. Each
125 constraint variable is a node in the graph, and an edge from
126 Q to P is added for each copy constraint of the form P = Q
128 6. The graph is then walked, and solution sets are
129 propagated along the copy edges, such that an edge from Q to P
130 causes Sol(P) <- Sol(P) union Sol(Q).
132 7. As we visit each node, all complex constraints associated with
133 that node are processed by adding appropriate copy edges to the graph, or the
134 appropriate variables to the solution set.
136 8. The process of walking the graph is iterated until no solution
137 sets change.
139 Prior to walking the graph in steps 6 and 7, We perform static
140 cycle elimination on the constraint graph, as well
141 as off-line variable substitution.
143 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
144 on and turned into anything), but isn't. You can just see what offset
145 inside the pointed-to struct it's going to access.
147 TODO: Constant bounded arrays can be handled as if they were structs of the
148 same number of elements.
150 TODO: Modeling heap and incoming pointers becomes much better if we
151 add fields to them as we discover them, which we could do.
153 TODO: We could handle unions, but to be honest, it's probably not
154 worth the pain or slowdown. */
156 /* IPA-PTA optimizations possible.
158 When the indirect function called is ANYTHING we can add disambiguation
159 based on the function signatures (or simply the parameter count which
160 is the varinfo size). We also do not need to consider functions that
161 do not have their address taken.
163 The is_global_var bit which marks escape points is overly conservative
164 in IPA mode. Split it to is_escape_point and is_global_var - only
165 externally visible globals are escape points in IPA mode. This is
166 also needed to fix the pt_solution_includes_global predicate
167 (and thus ptr_deref_may_alias_global_p).
169 The way we introduce DECL_PT_UID to avoid fixing up all points-to
170 sets in the translation unit when we copy a DECL during inlining
171 pessimizes precision. The advantage is that the DECL_PT_UID keeps
172 compile-time and memory usage overhead low - the points-to sets
173 do not grow or get unshared as they would during a fixup phase.
174 An alternative solution is to delay IPA PTA until after all
175 inlining transformations have been applied.
177 The way we propagate clobber/use information isn't optimized.
178 It should use a new complex constraint that properly filters
179 out local variables of the callee (though that would make
180 the sets invalid after inlining). OTOH we might as well
181 admit defeat to WHOPR and simply do all the clobber/use analysis
182 and propagation after PTA finished but before we threw away
183 points-to information for memory variables. WHOPR and PTA
184 do not play along well anyway - the whole constraint solving
185 would need to be done in WPA phase and it will be very interesting
186 to apply the results to local SSA names during LTRANS phase.
188 We probably should compute a per-function unit-ESCAPE solution
189 propagating it simply like the clobber / uses solutions. The
190 solution can go alongside the non-IPA espaced solution and be
191 used to query which vars escape the unit through a function.
193 We never put function decls in points-to sets so we do not
194 keep the set of called functions for indirect calls.
196 And probably more. */
198 static bool use_field_sensitive = true;
199 static int in_ipa_mode = 0;
201 /* Used for predecessor bitmaps. */
202 static bitmap_obstack predbitmap_obstack;
204 /* Used for points-to sets. */
205 static bitmap_obstack pta_obstack;
207 /* Used for oldsolution members of variables. */
208 static bitmap_obstack oldpta_obstack;
210 /* Used for per-solver-iteration bitmaps. */
211 static bitmap_obstack iteration_obstack;
213 static unsigned int create_variable_info_for (tree, const char *);
214 typedef struct constraint_graph *constraint_graph_t;
215 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
217 struct constraint;
218 typedef struct constraint *constraint_t;
221 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
222 if (a) \
223 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
225 static struct constraint_stats
227 unsigned int total_vars;
228 unsigned int nonpointer_vars;
229 unsigned int unified_vars_static;
230 unsigned int unified_vars_dynamic;
231 unsigned int iterations;
232 unsigned int num_edges;
233 unsigned int num_implicit_edges;
234 unsigned int points_to_sets_created;
235 } stats;
237 struct variable_info
239 /* ID of this variable */
240 unsigned int id;
242 /* True if this is a variable created by the constraint analysis, such as
243 heap variables and constraints we had to break up. */
244 unsigned int is_artificial_var : 1;
246 /* True if this is a special variable whose solution set should not be
247 changed. */
248 unsigned int is_special_var : 1;
250 /* True for variables whose size is not known or variable. */
251 unsigned int is_unknown_size_var : 1;
253 /* True for (sub-)fields that represent a whole variable. */
254 unsigned int is_full_var : 1;
256 /* True if this is a heap variable. */
257 unsigned int is_heap_var : 1;
259 /* True if this field may contain pointers. */
260 unsigned int may_have_pointers : 1;
262 /* True if this field has only restrict qualified pointers. */
263 unsigned int only_restrict_pointers : 1;
265 /* True if this represents a global variable. */
266 unsigned int is_global_var : 1;
268 /* True if this represents a IPA function info. */
269 unsigned int is_fn_info : 1;
271 /* The ID of the variable for the next field in this structure
272 or zero for the last field in this structure. */
273 unsigned next;
275 /* The ID of the variable for the first field in this structure. */
276 unsigned head;
278 /* Offset of this variable, in bits, from the base variable */
279 unsigned HOST_WIDE_INT offset;
281 /* Size of the variable, in bits. */
282 unsigned HOST_WIDE_INT size;
284 /* Full size of the base variable, in bits. */
285 unsigned HOST_WIDE_INT fullsize;
287 /* Name of this variable */
288 const char *name;
290 /* Tree that this variable is associated with. */
291 tree decl;
293 /* Points-to set for this variable. */
294 bitmap solution;
296 /* Old points-to set for this variable. */
297 bitmap oldsolution;
299 typedef struct variable_info *varinfo_t;
301 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
302 static varinfo_t first_or_preceding_vi_for_offset (varinfo_t,
303 unsigned HOST_WIDE_INT);
304 static varinfo_t lookup_vi_for_tree (tree);
305 static inline bool type_can_have_subvars (const_tree);
307 /* Pool of variable info structures. */
308 static alloc_pool variable_info_pool;
310 /* Map varinfo to final pt_solution. */
311 static pointer_map_t *final_solutions;
312 struct obstack final_solutions_obstack;
314 /* Table of variable info structures for constraint variables.
315 Indexed directly by variable info id. */
316 static vec<varinfo_t> varmap;
318 /* Return the varmap element N */
320 static inline varinfo_t
321 get_varinfo (unsigned int n)
323 return varmap[n];
326 /* Return the next variable in the list of sub-variables of VI
327 or NULL if VI is the last sub-variable. */
329 static inline varinfo_t
330 vi_next (varinfo_t vi)
332 return get_varinfo (vi->next);
335 /* Static IDs for the special variables. Variable ID zero is unused
336 and used as terminator for the sub-variable chain. */
337 enum { nothing_id = 1, anything_id = 2, readonly_id = 3,
338 escaped_id = 4, nonlocal_id = 5,
339 storedanything_id = 6, integer_id = 7 };
341 /* Return a new variable info structure consisting for a variable
342 named NAME, and using constraint graph node NODE. Append it
343 to the vector of variable info structures. */
345 static varinfo_t
346 new_var_info (tree t, const char *name)
348 unsigned index = varmap.length ();
349 varinfo_t ret = (varinfo_t) pool_alloc (variable_info_pool);
351 ret->id = index;
352 ret->name = name;
353 ret->decl = t;
354 /* Vars without decl are artificial and do not have sub-variables. */
355 ret->is_artificial_var = (t == NULL_TREE);
356 ret->is_special_var = false;
357 ret->is_unknown_size_var = false;
358 ret->is_full_var = (t == NULL_TREE);
359 ret->is_heap_var = false;
360 ret->may_have_pointers = true;
361 ret->only_restrict_pointers = false;
362 ret->is_global_var = (t == NULL_TREE);
363 ret->is_fn_info = false;
364 if (t && DECL_P (t))
365 ret->is_global_var = (is_global_var (t)
366 /* We have to treat even local register variables
367 as escape points. */
368 || (TREE_CODE (t) == VAR_DECL
369 && DECL_HARD_REGISTER (t)));
370 ret->solution = BITMAP_ALLOC (&pta_obstack);
371 ret->oldsolution = NULL;
372 ret->next = 0;
373 ret->head = ret->id;
375 stats.total_vars++;
377 varmap.safe_push (ret);
379 return ret;
383 /* A map mapping call statements to per-stmt variables for uses
384 and clobbers specific to the call. */
385 struct pointer_map_t *call_stmt_vars;
387 /* Lookup or create the variable for the call statement CALL. */
389 static varinfo_t
390 get_call_vi (gimple call)
392 void **slot_p;
393 varinfo_t vi, vi2;
395 slot_p = pointer_map_insert (call_stmt_vars, call);
396 if (*slot_p)
397 return (varinfo_t) *slot_p;
399 vi = new_var_info (NULL_TREE, "CALLUSED");
400 vi->offset = 0;
401 vi->size = 1;
402 vi->fullsize = 2;
403 vi->is_full_var = true;
405 vi2 = new_var_info (NULL_TREE, "CALLCLOBBERED");
406 vi2->offset = 1;
407 vi2->size = 1;
408 vi2->fullsize = 2;
409 vi2->is_full_var = true;
411 vi->next = vi2->id;
413 *slot_p = (void *) vi;
414 return vi;
417 /* Lookup the variable for the call statement CALL representing
418 the uses. Returns NULL if there is nothing special about this call. */
420 static varinfo_t
421 lookup_call_use_vi (gimple call)
423 void **slot_p;
425 slot_p = pointer_map_contains (call_stmt_vars, call);
426 if (slot_p)
427 return (varinfo_t) *slot_p;
429 return NULL;
432 /* Lookup the variable for the call statement CALL representing
433 the clobbers. Returns NULL if there is nothing special about this call. */
435 static varinfo_t
436 lookup_call_clobber_vi (gimple call)
438 varinfo_t uses = lookup_call_use_vi (call);
439 if (!uses)
440 return NULL;
442 return vi_next (uses);
445 /* Lookup or create the variable for the call statement CALL representing
446 the uses. */
448 static varinfo_t
449 get_call_use_vi (gimple call)
451 return get_call_vi (call);
454 /* Lookup or create the variable for the call statement CALL representing
455 the clobbers. */
457 static varinfo_t ATTRIBUTE_UNUSED
458 get_call_clobber_vi (gimple call)
460 return vi_next (get_call_vi (call));
464 typedef enum {SCALAR, DEREF, ADDRESSOF} constraint_expr_type;
466 /* An expression that appears in a constraint. */
468 struct constraint_expr
470 /* Constraint type. */
471 constraint_expr_type type;
473 /* Variable we are referring to in the constraint. */
474 unsigned int var;
476 /* Offset, in bits, of this constraint from the beginning of
477 variables it ends up referring to.
479 IOW, in a deref constraint, we would deref, get the result set,
480 then add OFFSET to each member. */
481 HOST_WIDE_INT offset;
484 /* Use 0x8000... as special unknown offset. */
485 #define UNKNOWN_OFFSET ((HOST_WIDE_INT)-1 << (HOST_BITS_PER_WIDE_INT-1))
487 typedef struct constraint_expr ce_s;
488 static void get_constraint_for_1 (tree, vec<ce_s> *, bool, bool);
489 static void get_constraint_for (tree, vec<ce_s> *);
490 static void get_constraint_for_rhs (tree, vec<ce_s> *);
491 static void do_deref (vec<ce_s> *);
493 /* Our set constraints are made up of two constraint expressions, one
494 LHS, and one RHS.
496 As described in the introduction, our set constraints each represent an
497 operation between set valued variables.
499 struct constraint
501 struct constraint_expr lhs;
502 struct constraint_expr rhs;
505 /* List of constraints that we use to build the constraint graph from. */
507 static vec<constraint_t> constraints;
508 static alloc_pool constraint_pool;
510 /* The constraint graph is represented as an array of bitmaps
511 containing successor nodes. */
513 struct constraint_graph
515 /* Size of this graph, which may be different than the number of
516 nodes in the variable map. */
517 unsigned int size;
519 /* Explicit successors of each node. */
520 bitmap *succs;
522 /* Implicit predecessors of each node (Used for variable
523 substitution). */
524 bitmap *implicit_preds;
526 /* Explicit predecessors of each node (Used for variable substitution). */
527 bitmap *preds;
529 /* Indirect cycle representatives, or -1 if the node has no indirect
530 cycles. */
531 int *indirect_cycles;
533 /* Representative node for a node. rep[a] == a unless the node has
534 been unified. */
535 unsigned int *rep;
537 /* Equivalence class representative for a label. This is used for
538 variable substitution. */
539 int *eq_rep;
541 /* Pointer equivalence label for a node. All nodes with the same
542 pointer equivalence label can be unified together at some point
543 (either during constraint optimization or after the constraint
544 graph is built). */
545 unsigned int *pe;
547 /* Pointer equivalence representative for a label. This is used to
548 handle nodes that are pointer equivalent but not location
549 equivalent. We can unite these once the addressof constraints
550 are transformed into initial points-to sets. */
551 int *pe_rep;
553 /* Pointer equivalence label for each node, used during variable
554 substitution. */
555 unsigned int *pointer_label;
557 /* Location equivalence label for each node, used during location
558 equivalence finding. */
559 unsigned int *loc_label;
561 /* Pointed-by set for each node, used during location equivalence
562 finding. This is pointed-by rather than pointed-to, because it
563 is constructed using the predecessor graph. */
564 bitmap *pointed_by;
566 /* Points to sets for pointer equivalence. This is *not* the actual
567 points-to sets for nodes. */
568 bitmap *points_to;
570 /* Bitmap of nodes where the bit is set if the node is a direct
571 node. Used for variable substitution. */
572 sbitmap direct_nodes;
574 /* Bitmap of nodes where the bit is set if the node is address
575 taken. Used for variable substitution. */
576 bitmap address_taken;
578 /* Vector of complex constraints for each graph node. Complex
579 constraints are those involving dereferences or offsets that are
580 not 0. */
581 vec<constraint_t> *complex;
584 static constraint_graph_t graph;
586 /* During variable substitution and the offline version of indirect
587 cycle finding, we create nodes to represent dereferences and
588 address taken constraints. These represent where these start and
589 end. */
590 #define FIRST_REF_NODE (varmap).length ()
591 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
593 /* Return the representative node for NODE, if NODE has been unioned
594 with another NODE.
595 This function performs path compression along the way to finding
596 the representative. */
598 static unsigned int
599 find (unsigned int node)
601 gcc_checking_assert (node < graph->size);
602 if (graph->rep[node] != node)
603 return graph->rep[node] = find (graph->rep[node]);
604 return node;
607 /* Union the TO and FROM nodes to the TO nodes.
608 Note that at some point in the future, we may want to do
609 union-by-rank, in which case we are going to have to return the
610 node we unified to. */
612 static bool
613 unite (unsigned int to, unsigned int from)
615 gcc_checking_assert (to < graph->size && from < graph->size);
616 if (to != from && graph->rep[from] != to)
618 graph->rep[from] = to;
619 return true;
621 return false;
624 /* Create a new constraint consisting of LHS and RHS expressions. */
626 static constraint_t
627 new_constraint (const struct constraint_expr lhs,
628 const struct constraint_expr rhs)
630 constraint_t ret = (constraint_t) pool_alloc (constraint_pool);
631 ret->lhs = lhs;
632 ret->rhs = rhs;
633 return ret;
636 /* Print out constraint C to FILE. */
638 static void
639 dump_constraint (FILE *file, constraint_t c)
641 if (c->lhs.type == ADDRESSOF)
642 fprintf (file, "&");
643 else if (c->lhs.type == DEREF)
644 fprintf (file, "*");
645 fprintf (file, "%s", get_varinfo (c->lhs.var)->name);
646 if (c->lhs.offset == UNKNOWN_OFFSET)
647 fprintf (file, " + UNKNOWN");
648 else if (c->lhs.offset != 0)
649 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
650 fprintf (file, " = ");
651 if (c->rhs.type == ADDRESSOF)
652 fprintf (file, "&");
653 else if (c->rhs.type == DEREF)
654 fprintf (file, "*");
655 fprintf (file, "%s", get_varinfo (c->rhs.var)->name);
656 if (c->rhs.offset == UNKNOWN_OFFSET)
657 fprintf (file, " + UNKNOWN");
658 else if (c->rhs.offset != 0)
659 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
663 void debug_constraint (constraint_t);
664 void debug_constraints (void);
665 void debug_constraint_graph (void);
666 void debug_solution_for_var (unsigned int);
667 void debug_sa_points_to_info (void);
669 /* Print out constraint C to stderr. */
671 DEBUG_FUNCTION void
672 debug_constraint (constraint_t c)
674 dump_constraint (stderr, c);
675 fprintf (stderr, "\n");
678 /* Print out all constraints to FILE */
680 static void
681 dump_constraints (FILE *file, int from)
683 int i;
684 constraint_t c;
685 for (i = from; constraints.iterate (i, &c); i++)
686 if (c)
688 dump_constraint (file, c);
689 fprintf (file, "\n");
693 /* Print out all constraints to stderr. */
695 DEBUG_FUNCTION void
696 debug_constraints (void)
698 dump_constraints (stderr, 0);
701 /* Print the constraint graph in dot format. */
703 static void
704 dump_constraint_graph (FILE *file)
706 unsigned int i;
708 /* Only print the graph if it has already been initialized: */
709 if (!graph)
710 return;
712 /* Prints the header of the dot file: */
713 fprintf (file, "strict digraph {\n");
714 fprintf (file, " node [\n shape = box\n ]\n");
715 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
716 fprintf (file, "\n // List of nodes and complex constraints in "
717 "the constraint graph:\n");
719 /* The next lines print the nodes in the graph together with the
720 complex constraints attached to them. */
721 for (i = 1; i < graph->size; i++)
723 if (i == FIRST_REF_NODE)
724 continue;
725 if (find (i) != i)
726 continue;
727 if (i < FIRST_REF_NODE)
728 fprintf (file, "\"%s\"", get_varinfo (i)->name);
729 else
730 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
731 if (graph->complex[i].exists ())
733 unsigned j;
734 constraint_t c;
735 fprintf (file, " [label=\"\\N\\n");
736 for (j = 0; graph->complex[i].iterate (j, &c); ++j)
738 dump_constraint (file, c);
739 fprintf (file, "\\l");
741 fprintf (file, "\"]");
743 fprintf (file, ";\n");
746 /* Go over the edges. */
747 fprintf (file, "\n // Edges in the constraint graph:\n");
748 for (i = 1; i < graph->size; i++)
750 unsigned j;
751 bitmap_iterator bi;
752 if (find (i) != i)
753 continue;
754 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
756 unsigned to = find (j);
757 if (i == to)
758 continue;
759 if (i < FIRST_REF_NODE)
760 fprintf (file, "\"%s\"", get_varinfo (i)->name);
761 else
762 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
763 fprintf (file, " -> ");
764 if (to < FIRST_REF_NODE)
765 fprintf (file, "\"%s\"", get_varinfo (to)->name);
766 else
767 fprintf (file, "\"*%s\"", get_varinfo (to - FIRST_REF_NODE)->name);
768 fprintf (file, ";\n");
772 /* Prints the tail of the dot file. */
773 fprintf (file, "}\n");
776 /* Print out the constraint graph to stderr. */
778 DEBUG_FUNCTION void
779 debug_constraint_graph (void)
781 dump_constraint_graph (stderr);
784 /* SOLVER FUNCTIONS
786 The solver is a simple worklist solver, that works on the following
787 algorithm:
789 sbitmap changed_nodes = all zeroes;
790 changed_count = 0;
791 For each node that is not already collapsed:
792 changed_count++;
793 set bit in changed nodes
795 while (changed_count > 0)
797 compute topological ordering for constraint graph
799 find and collapse cycles in the constraint graph (updating
800 changed if necessary)
802 for each node (n) in the graph in topological order:
803 changed_count--;
805 Process each complex constraint associated with the node,
806 updating changed if necessary.
808 For each outgoing edge from n, propagate the solution from n to
809 the destination of the edge, updating changed as necessary.
811 } */
813 /* Return true if two constraint expressions A and B are equal. */
815 static bool
816 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
818 return a.type == b.type && a.var == b.var && a.offset == b.offset;
821 /* Return true if constraint expression A is less than constraint expression
822 B. This is just arbitrary, but consistent, in order to give them an
823 ordering. */
825 static bool
826 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
828 if (a.type == b.type)
830 if (a.var == b.var)
831 return a.offset < b.offset;
832 else
833 return a.var < b.var;
835 else
836 return a.type < b.type;
839 /* Return true if constraint A is less than constraint B. This is just
840 arbitrary, but consistent, in order to give them an ordering. */
842 static bool
843 constraint_less (const constraint_t &a, const constraint_t &b)
845 if (constraint_expr_less (a->lhs, b->lhs))
846 return true;
847 else if (constraint_expr_less (b->lhs, a->lhs))
848 return false;
849 else
850 return constraint_expr_less (a->rhs, b->rhs);
853 /* Return true if two constraints A and B are equal. */
855 static bool
856 constraint_equal (struct constraint a, struct constraint b)
858 return constraint_expr_equal (a.lhs, b.lhs)
859 && constraint_expr_equal (a.rhs, b.rhs);
863 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
865 static constraint_t
866 constraint_vec_find (vec<constraint_t> vec,
867 struct constraint lookfor)
869 unsigned int place;
870 constraint_t found;
872 if (!vec.exists ())
873 return NULL;
875 place = vec.lower_bound (&lookfor, constraint_less);
876 if (place >= vec.length ())
877 return NULL;
878 found = vec[place];
879 if (!constraint_equal (*found, lookfor))
880 return NULL;
881 return found;
884 /* Union two constraint vectors, TO and FROM. Put the result in TO. */
886 static void
887 constraint_set_union (vec<constraint_t> *to,
888 vec<constraint_t> *from)
890 int i;
891 constraint_t c;
893 FOR_EACH_VEC_ELT (*from, i, c)
895 if (constraint_vec_find (*to, *c) == NULL)
897 unsigned int place = to->lower_bound (c, constraint_less);
898 to->safe_insert (place, c);
903 /* Expands the solution in SET to all sub-fields of variables included. */
905 static void
906 solution_set_expand (bitmap set)
908 bitmap_iterator bi;
909 unsigned j;
911 /* In a first pass expand to the head of the variables we need to
912 add all sub-fields off. This avoids quadratic behavior. */
913 EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi)
915 varinfo_t v = get_varinfo (j);
916 if (v->is_artificial_var
917 || v->is_full_var)
918 continue;
919 bitmap_set_bit (set, v->head);
922 /* In the second pass now expand all head variables with subfields. */
923 EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi)
925 varinfo_t v = get_varinfo (j);
926 if (v->is_artificial_var
927 || v->is_full_var
928 || v->head != j)
929 continue;
930 for (v = vi_next (v); v != NULL; v = vi_next (v))
931 bitmap_set_bit (set, v->id);
935 /* Union solution sets TO and FROM, and add INC to each member of FROM in the
936 process. */
938 static bool
939 set_union_with_increment (bitmap to, bitmap from, HOST_WIDE_INT inc)
941 bool changed = false;
942 bitmap_iterator bi;
943 unsigned int i;
945 /* If the solution of FROM contains anything it is good enough to transfer
946 this to TO. */
947 if (bitmap_bit_p (from, anything_id))
948 return bitmap_set_bit (to, anything_id);
950 /* For zero offset simply union the solution into the destination. */
951 if (inc == 0)
952 return bitmap_ior_into (to, from);
954 /* If the offset is unknown we have to expand the solution to
955 all subfields. */
956 if (inc == UNKNOWN_OFFSET)
958 bitmap tmp = BITMAP_ALLOC (&iteration_obstack);
959 bitmap_copy (tmp, from);
960 solution_set_expand (tmp);
961 changed |= bitmap_ior_into (to, tmp);
962 BITMAP_FREE (tmp);
963 return changed;
966 /* For non-zero offset union the offsetted solution into the destination. */
967 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
969 varinfo_t vi = get_varinfo (i);
971 /* If this is a variable with just one field just set its bit
972 in the result. */
973 if (vi->is_artificial_var
974 || vi->is_unknown_size_var
975 || vi->is_full_var)
976 changed |= bitmap_set_bit (to, i);
977 else
979 unsigned HOST_WIDE_INT fieldoffset = vi->offset + inc;
981 /* If the offset makes the pointer point to before the
982 variable use offset zero for the field lookup. */
983 if (inc < 0
984 && fieldoffset > vi->offset)
985 fieldoffset = 0;
987 vi = first_or_preceding_vi_for_offset (vi, fieldoffset);
989 changed |= bitmap_set_bit (to, vi->id);
990 /* If the result is not exactly at fieldoffset include the next
991 field as well. See get_constraint_for_ptr_offset for more
992 rationale. */
993 if (vi->offset != fieldoffset
994 && vi->next != 0)
995 changed |= bitmap_set_bit (to, vi->next);
999 return changed;
1002 /* Insert constraint C into the list of complex constraints for graph
1003 node VAR. */
1005 static void
1006 insert_into_complex (constraint_graph_t graph,
1007 unsigned int var, constraint_t c)
1009 vec<constraint_t> complex = graph->complex[var];
1010 unsigned int place = complex.lower_bound (c, constraint_less);
1012 /* Only insert constraints that do not already exist. */
1013 if (place >= complex.length ()
1014 || !constraint_equal (*c, *complex[place]))
1015 graph->complex[var].safe_insert (place, c);
1019 /* Condense two variable nodes into a single variable node, by moving
1020 all associated info from SRC to TO. */
1022 static void
1023 merge_node_constraints (constraint_graph_t graph, unsigned int to,
1024 unsigned int from)
1026 unsigned int i;
1027 constraint_t c;
1029 gcc_checking_assert (find (from) == to);
1031 /* Move all complex constraints from src node into to node */
1032 FOR_EACH_VEC_ELT (graph->complex[from], i, c)
1034 /* In complex constraints for node src, we may have either
1035 a = *src, and *src = a, or an offseted constraint which are
1036 always added to the rhs node's constraints. */
1038 if (c->rhs.type == DEREF)
1039 c->rhs.var = to;
1040 else if (c->lhs.type == DEREF)
1041 c->lhs.var = to;
1042 else
1043 c->rhs.var = to;
1045 constraint_set_union (&graph->complex[to], &graph->complex[from]);
1046 graph->complex[from].release ();
1050 /* Remove edges involving NODE from GRAPH. */
1052 static void
1053 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
1055 if (graph->succs[node])
1056 BITMAP_FREE (graph->succs[node]);
1059 /* Merge GRAPH nodes FROM and TO into node TO. */
1061 static void
1062 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
1063 unsigned int from)
1065 if (graph->indirect_cycles[from] != -1)
1067 /* If we have indirect cycles with the from node, and we have
1068 none on the to node, the to node has indirect cycles from the
1069 from node now that they are unified.
1070 If indirect cycles exist on both, unify the nodes that they
1071 are in a cycle with, since we know they are in a cycle with
1072 each other. */
1073 if (graph->indirect_cycles[to] == -1)
1074 graph->indirect_cycles[to] = graph->indirect_cycles[from];
1077 /* Merge all the successor edges. */
1078 if (graph->succs[from])
1080 if (!graph->succs[to])
1081 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
1082 bitmap_ior_into (graph->succs[to],
1083 graph->succs[from]);
1086 clear_edges_for_node (graph, from);
1090 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
1091 it doesn't exist in the graph already. */
1093 static void
1094 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
1095 unsigned int from)
1097 if (to == from)
1098 return;
1100 if (!graph->implicit_preds[to])
1101 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1103 if (bitmap_set_bit (graph->implicit_preds[to], from))
1104 stats.num_implicit_edges++;
1107 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
1108 it doesn't exist in the graph already.
1109 Return false if the edge already existed, true otherwise. */
1111 static void
1112 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
1113 unsigned int from)
1115 if (!graph->preds[to])
1116 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1117 bitmap_set_bit (graph->preds[to], from);
1120 /* Add a graph edge to GRAPH, going from FROM to TO if
1121 it doesn't exist in the graph already.
1122 Return false if the edge already existed, true otherwise. */
1124 static bool
1125 add_graph_edge (constraint_graph_t graph, unsigned int to,
1126 unsigned int from)
1128 if (to == from)
1130 return false;
1132 else
1134 bool r = false;
1136 if (!graph->succs[from])
1137 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
1138 if (bitmap_set_bit (graph->succs[from], to))
1140 r = true;
1141 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
1142 stats.num_edges++;
1144 return r;
1149 /* Initialize the constraint graph structure to contain SIZE nodes. */
1151 static void
1152 init_graph (unsigned int size)
1154 unsigned int j;
1156 graph = XCNEW (struct constraint_graph);
1157 graph->size = size;
1158 graph->succs = XCNEWVEC (bitmap, graph->size);
1159 graph->indirect_cycles = XNEWVEC (int, graph->size);
1160 graph->rep = XNEWVEC (unsigned int, graph->size);
1161 /* ??? Macros do not support template types with multiple arguments,
1162 so we use a typedef to work around it. */
1163 typedef vec<constraint_t> vec_constraint_t_heap;
1164 graph->complex = XCNEWVEC (vec_constraint_t_heap, size);
1165 graph->pe = XCNEWVEC (unsigned int, graph->size);
1166 graph->pe_rep = XNEWVEC (int, graph->size);
1168 for (j = 0; j < graph->size; j++)
1170 graph->rep[j] = j;
1171 graph->pe_rep[j] = -1;
1172 graph->indirect_cycles[j] = -1;
1176 /* Build the constraint graph, adding only predecessor edges right now. */
1178 static void
1179 build_pred_graph (void)
1181 int i;
1182 constraint_t c;
1183 unsigned int j;
1185 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
1186 graph->preds = XCNEWVEC (bitmap, graph->size);
1187 graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
1188 graph->loc_label = XCNEWVEC (unsigned int, graph->size);
1189 graph->pointed_by = XCNEWVEC (bitmap, graph->size);
1190 graph->points_to = XCNEWVEC (bitmap, graph->size);
1191 graph->eq_rep = XNEWVEC (int, graph->size);
1192 graph->direct_nodes = sbitmap_alloc (graph->size);
1193 graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1194 bitmap_clear (graph->direct_nodes);
1196 for (j = 1; j < FIRST_REF_NODE; j++)
1198 if (!get_varinfo (j)->is_special_var)
1199 bitmap_set_bit (graph->direct_nodes, j);
1202 for (j = 0; j < graph->size; j++)
1203 graph->eq_rep[j] = -1;
1205 for (j = 0; j < varmap.length (); j++)
1206 graph->indirect_cycles[j] = -1;
1208 FOR_EACH_VEC_ELT (constraints, i, c)
1210 struct constraint_expr lhs = c->lhs;
1211 struct constraint_expr rhs = c->rhs;
1212 unsigned int lhsvar = lhs.var;
1213 unsigned int rhsvar = rhs.var;
1215 if (lhs.type == DEREF)
1217 /* *x = y. */
1218 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1219 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1221 else if (rhs.type == DEREF)
1223 /* x = *y */
1224 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1225 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1226 else
1227 bitmap_clear_bit (graph->direct_nodes, lhsvar);
1229 else if (rhs.type == ADDRESSOF)
1231 varinfo_t v;
1233 /* x = &y */
1234 if (graph->points_to[lhsvar] == NULL)
1235 graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1236 bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1238 if (graph->pointed_by[rhsvar] == NULL)
1239 graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1240 bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1242 /* Implicitly, *x = y */
1243 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1245 /* All related variables are no longer direct nodes. */
1246 bitmap_clear_bit (graph->direct_nodes, rhsvar);
1247 v = get_varinfo (rhsvar);
1248 if (!v->is_full_var)
1250 v = get_varinfo (v->head);
1253 bitmap_clear_bit (graph->direct_nodes, v->id);
1254 v = vi_next (v);
1256 while (v != NULL);
1258 bitmap_set_bit (graph->address_taken, rhsvar);
1260 else if (lhsvar > anything_id
1261 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1263 /* x = y */
1264 add_pred_graph_edge (graph, lhsvar, rhsvar);
1265 /* Implicitly, *x = *y */
1266 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1267 FIRST_REF_NODE + rhsvar);
1269 else if (lhs.offset != 0 || rhs.offset != 0)
1271 if (rhs.offset != 0)
1272 bitmap_clear_bit (graph->direct_nodes, lhs.var);
1273 else if (lhs.offset != 0)
1274 bitmap_clear_bit (graph->direct_nodes, rhs.var);
1279 /* Build the constraint graph, adding successor edges. */
1281 static void
1282 build_succ_graph (void)
1284 unsigned i, t;
1285 constraint_t c;
1287 FOR_EACH_VEC_ELT (constraints, i, c)
1289 struct constraint_expr lhs;
1290 struct constraint_expr rhs;
1291 unsigned int lhsvar;
1292 unsigned int rhsvar;
1294 if (!c)
1295 continue;
1297 lhs = c->lhs;
1298 rhs = c->rhs;
1299 lhsvar = find (lhs.var);
1300 rhsvar = find (rhs.var);
1302 if (lhs.type == DEREF)
1304 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1305 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1307 else if (rhs.type == DEREF)
1309 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1310 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1312 else if (rhs.type == ADDRESSOF)
1314 /* x = &y */
1315 gcc_checking_assert (find (rhs.var) == rhs.var);
1316 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1318 else if (lhsvar > anything_id
1319 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1321 add_graph_edge (graph, lhsvar, rhsvar);
1325 /* Add edges from STOREDANYTHING to all non-direct nodes that can
1326 receive pointers. */
1327 t = find (storedanything_id);
1328 for (i = integer_id + 1; i < FIRST_REF_NODE; ++i)
1330 if (!bitmap_bit_p (graph->direct_nodes, i)
1331 && get_varinfo (i)->may_have_pointers)
1332 add_graph_edge (graph, find (i), t);
1335 /* Everything stored to ANYTHING also potentially escapes. */
1336 add_graph_edge (graph, find (escaped_id), t);
1340 /* Changed variables on the last iteration. */
1341 static bitmap changed;
1343 /* Strongly Connected Component visitation info. */
1345 struct scc_info
1347 sbitmap visited;
1348 sbitmap deleted;
1349 unsigned int *dfs;
1350 unsigned int *node_mapping;
1351 int current_index;
1352 vec<unsigned> scc_stack;
1356 /* Recursive routine to find strongly connected components in GRAPH.
1357 SI is the SCC info to store the information in, and N is the id of current
1358 graph node we are processing.
1360 This is Tarjan's strongly connected component finding algorithm, as
1361 modified by Nuutila to keep only non-root nodes on the stack.
1362 The algorithm can be found in "On finding the strongly connected
1363 connected components in a directed graph" by Esko Nuutila and Eljas
1364 Soisalon-Soininen, in Information Processing Letters volume 49,
1365 number 1, pages 9-14. */
1367 static void
1368 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1370 unsigned int i;
1371 bitmap_iterator bi;
1372 unsigned int my_dfs;
1374 bitmap_set_bit (si->visited, n);
1375 si->dfs[n] = si->current_index ++;
1376 my_dfs = si->dfs[n];
1378 /* Visit all the successors. */
1379 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1381 unsigned int w;
1383 if (i > LAST_REF_NODE)
1384 break;
1386 w = find (i);
1387 if (bitmap_bit_p (si->deleted, w))
1388 continue;
1390 if (!bitmap_bit_p (si->visited, w))
1391 scc_visit (graph, si, w);
1393 unsigned int t = find (w);
1394 gcc_checking_assert (find (n) == n);
1395 if (si->dfs[t] < si->dfs[n])
1396 si->dfs[n] = si->dfs[t];
1399 /* See if any components have been identified. */
1400 if (si->dfs[n] == my_dfs)
1402 if (si->scc_stack.length () > 0
1403 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1405 bitmap scc = BITMAP_ALLOC (NULL);
1406 unsigned int lowest_node;
1407 bitmap_iterator bi;
1409 bitmap_set_bit (scc, n);
1411 while (si->scc_stack.length () != 0
1412 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1414 unsigned int w = si->scc_stack.pop ();
1416 bitmap_set_bit (scc, w);
1419 lowest_node = bitmap_first_set_bit (scc);
1420 gcc_assert (lowest_node < FIRST_REF_NODE);
1422 /* Collapse the SCC nodes into a single node, and mark the
1423 indirect cycles. */
1424 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1426 if (i < FIRST_REF_NODE)
1428 if (unite (lowest_node, i))
1429 unify_nodes (graph, lowest_node, i, false);
1431 else
1433 unite (lowest_node, i);
1434 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1438 bitmap_set_bit (si->deleted, n);
1440 else
1441 si->scc_stack.safe_push (n);
1444 /* Unify node FROM into node TO, updating the changed count if
1445 necessary when UPDATE_CHANGED is true. */
1447 static void
1448 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1449 bool update_changed)
1451 gcc_checking_assert (to != from && find (to) == to);
1453 if (dump_file && (dump_flags & TDF_DETAILS))
1454 fprintf (dump_file, "Unifying %s to %s\n",
1455 get_varinfo (from)->name,
1456 get_varinfo (to)->name);
1458 if (update_changed)
1459 stats.unified_vars_dynamic++;
1460 else
1461 stats.unified_vars_static++;
1463 merge_graph_nodes (graph, to, from);
1464 merge_node_constraints (graph, to, from);
1466 /* Mark TO as changed if FROM was changed. If TO was already marked
1467 as changed, decrease the changed count. */
1469 if (update_changed
1470 && bitmap_clear_bit (changed, from))
1471 bitmap_set_bit (changed, to);
1472 varinfo_t fromvi = get_varinfo (from);
1473 if (fromvi->solution)
1475 /* If the solution changes because of the merging, we need to mark
1476 the variable as changed. */
1477 varinfo_t tovi = get_varinfo (to);
1478 if (bitmap_ior_into (tovi->solution, fromvi->solution))
1480 if (update_changed)
1481 bitmap_set_bit (changed, to);
1484 BITMAP_FREE (fromvi->solution);
1485 if (fromvi->oldsolution)
1486 BITMAP_FREE (fromvi->oldsolution);
1488 if (stats.iterations > 0
1489 && tovi->oldsolution)
1490 BITMAP_FREE (tovi->oldsolution);
1492 if (graph->succs[to])
1493 bitmap_clear_bit (graph->succs[to], to);
1496 /* Information needed to compute the topological ordering of a graph. */
1498 struct topo_info
1500 /* sbitmap of visited nodes. */
1501 sbitmap visited;
1502 /* Array that stores the topological order of the graph, *in
1503 reverse*. */
1504 vec<unsigned> topo_order;
1508 /* Initialize and return a topological info structure. */
1510 static struct topo_info *
1511 init_topo_info (void)
1513 size_t size = graph->size;
1514 struct topo_info *ti = XNEW (struct topo_info);
1515 ti->visited = sbitmap_alloc (size);
1516 bitmap_clear (ti->visited);
1517 ti->topo_order.create (1);
1518 return ti;
1522 /* Free the topological sort info pointed to by TI. */
1524 static void
1525 free_topo_info (struct topo_info *ti)
1527 sbitmap_free (ti->visited);
1528 ti->topo_order.release ();
1529 free (ti);
1532 /* Visit the graph in topological order, and store the order in the
1533 topo_info structure. */
1535 static void
1536 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1537 unsigned int n)
1539 bitmap_iterator bi;
1540 unsigned int j;
1542 bitmap_set_bit (ti->visited, n);
1544 if (graph->succs[n])
1545 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1547 if (!bitmap_bit_p (ti->visited, j))
1548 topo_visit (graph, ti, j);
1551 ti->topo_order.safe_push (n);
1554 /* Process a constraint C that represents x = *(y + off), using DELTA as the
1555 starting solution for y. */
1557 static void
1558 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1559 bitmap delta)
1561 unsigned int lhs = c->lhs.var;
1562 bool flag = false;
1563 bitmap sol = get_varinfo (lhs)->solution;
1564 unsigned int j;
1565 bitmap_iterator bi;
1566 HOST_WIDE_INT roffset = c->rhs.offset;
1568 /* Our IL does not allow this. */
1569 gcc_checking_assert (c->lhs.offset == 0);
1571 /* If the solution of Y contains anything it is good enough to transfer
1572 this to the LHS. */
1573 if (bitmap_bit_p (delta, anything_id))
1575 flag |= bitmap_set_bit (sol, anything_id);
1576 goto done;
1579 /* If we do not know at with offset the rhs is dereferenced compute
1580 the reachability set of DELTA, conservatively assuming it is
1581 dereferenced at all valid offsets. */
1582 if (roffset == UNKNOWN_OFFSET)
1584 solution_set_expand (delta);
1585 /* No further offset processing is necessary. */
1586 roffset = 0;
1589 /* For each variable j in delta (Sol(y)), add
1590 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1591 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1593 varinfo_t v = get_varinfo (j);
1594 HOST_WIDE_INT fieldoffset = v->offset + roffset;
1595 unsigned int t;
1597 if (v->is_full_var)
1598 fieldoffset = v->offset;
1599 else if (roffset != 0)
1600 v = first_vi_for_offset (v, fieldoffset);
1601 /* If the access is outside of the variable we can ignore it. */
1602 if (!v)
1603 continue;
1607 t = find (v->id);
1609 /* Adding edges from the special vars is pointless.
1610 They don't have sets that can change. */
1611 if (get_varinfo (t)->is_special_var)
1612 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1613 /* Merging the solution from ESCAPED needlessly increases
1614 the set. Use ESCAPED as representative instead. */
1615 else if (v->id == escaped_id)
1616 flag |= bitmap_set_bit (sol, escaped_id);
1617 else if (v->may_have_pointers
1618 && add_graph_edge (graph, lhs, t))
1619 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1621 /* If the variable is not exactly at the requested offset
1622 we have to include the next one. */
1623 if (v->offset == (unsigned HOST_WIDE_INT)fieldoffset
1624 || v->next == 0)
1625 break;
1627 v = vi_next (v);
1628 fieldoffset = v->offset;
1630 while (1);
1633 done:
1634 /* If the LHS solution changed, mark the var as changed. */
1635 if (flag)
1637 get_varinfo (lhs)->solution = sol;
1638 bitmap_set_bit (changed, lhs);
1642 /* Process a constraint C that represents *(x + off) = y using DELTA
1643 as the starting solution for x. */
1645 static void
1646 do_ds_constraint (constraint_t c, bitmap delta)
1648 unsigned int rhs = c->rhs.var;
1649 bitmap sol = get_varinfo (rhs)->solution;
1650 unsigned int j;
1651 bitmap_iterator bi;
1652 HOST_WIDE_INT loff = c->lhs.offset;
1653 bool escaped_p = false;
1655 /* Our IL does not allow this. */
1656 gcc_checking_assert (c->rhs.offset == 0);
1658 /* If the solution of y contains ANYTHING simply use the ANYTHING
1659 solution. This avoids needlessly increasing the points-to sets. */
1660 if (bitmap_bit_p (sol, anything_id))
1661 sol = get_varinfo (find (anything_id))->solution;
1663 /* If the solution for x contains ANYTHING we have to merge the
1664 solution of y into all pointer variables which we do via
1665 STOREDANYTHING. */
1666 if (bitmap_bit_p (delta, anything_id))
1668 unsigned t = find (storedanything_id);
1669 if (add_graph_edge (graph, t, rhs))
1671 if (bitmap_ior_into (get_varinfo (t)->solution, sol))
1672 bitmap_set_bit (changed, t);
1674 return;
1677 /* If we do not know at with offset the rhs is dereferenced compute
1678 the reachability set of DELTA, conservatively assuming it is
1679 dereferenced at all valid offsets. */
1680 if (loff == UNKNOWN_OFFSET)
1682 solution_set_expand (delta);
1683 loff = 0;
1686 /* For each member j of delta (Sol(x)), add an edge from y to j and
1687 union Sol(y) into Sol(j) */
1688 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1690 varinfo_t v = get_varinfo (j);
1691 unsigned int t;
1692 HOST_WIDE_INT fieldoffset = v->offset + loff;
1694 if (v->is_full_var)
1695 fieldoffset = v->offset;
1696 else if (loff != 0)
1697 v = first_vi_for_offset (v, fieldoffset);
1698 /* If the access is outside of the variable we can ignore it. */
1699 if (!v)
1700 continue;
1704 if (v->may_have_pointers)
1706 /* If v is a global variable then this is an escape point. */
1707 if (v->is_global_var
1708 && !escaped_p)
1710 t = find (escaped_id);
1711 if (add_graph_edge (graph, t, rhs)
1712 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1713 bitmap_set_bit (changed, t);
1714 /* Enough to let rhs escape once. */
1715 escaped_p = true;
1718 if (v->is_special_var)
1719 break;
1721 t = find (v->id);
1722 if (add_graph_edge (graph, t, rhs)
1723 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1724 bitmap_set_bit (changed, t);
1727 /* If the variable is not exactly at the requested offset
1728 we have to include the next one. */
1729 if (v->offset == (unsigned HOST_WIDE_INT)fieldoffset
1730 || v->next == 0)
1731 break;
1733 v = vi_next (v);
1734 fieldoffset = v->offset;
1736 while (1);
1740 /* Handle a non-simple (simple meaning requires no iteration),
1741 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1743 static void
1744 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
1746 if (c->lhs.type == DEREF)
1748 if (c->rhs.type == ADDRESSOF)
1750 gcc_unreachable();
1752 else
1754 /* *x = y */
1755 do_ds_constraint (c, delta);
1758 else if (c->rhs.type == DEREF)
1760 /* x = *y */
1761 if (!(get_varinfo (c->lhs.var)->is_special_var))
1762 do_sd_constraint (graph, c, delta);
1764 else
1766 bitmap tmp;
1767 bitmap solution;
1768 bool flag = false;
1770 gcc_checking_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR);
1771 solution = get_varinfo (c->rhs.var)->solution;
1772 tmp = get_varinfo (c->lhs.var)->solution;
1774 flag = set_union_with_increment (tmp, solution, c->rhs.offset);
1776 if (flag)
1777 bitmap_set_bit (changed, c->lhs.var);
1781 /* Initialize and return a new SCC info structure. */
1783 static struct scc_info *
1784 init_scc_info (size_t size)
1786 struct scc_info *si = XNEW (struct scc_info);
1787 size_t i;
1789 si->current_index = 0;
1790 si->visited = sbitmap_alloc (size);
1791 bitmap_clear (si->visited);
1792 si->deleted = sbitmap_alloc (size);
1793 bitmap_clear (si->deleted);
1794 si->node_mapping = XNEWVEC (unsigned int, size);
1795 si->dfs = XCNEWVEC (unsigned int, size);
1797 for (i = 0; i < size; i++)
1798 si->node_mapping[i] = i;
1800 si->scc_stack.create (1);
1801 return si;
1804 /* Free an SCC info structure pointed to by SI */
1806 static void
1807 free_scc_info (struct scc_info *si)
1809 sbitmap_free (si->visited);
1810 sbitmap_free (si->deleted);
1811 free (si->node_mapping);
1812 free (si->dfs);
1813 si->scc_stack.release ();
1814 free (si);
1818 /* Find indirect cycles in GRAPH that occur, using strongly connected
1819 components, and note them in the indirect cycles map.
1821 This technique comes from Ben Hardekopf and Calvin Lin,
1822 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1823 Lines of Code", submitted to PLDI 2007. */
1825 static void
1826 find_indirect_cycles (constraint_graph_t graph)
1828 unsigned int i;
1829 unsigned int size = graph->size;
1830 struct scc_info *si = init_scc_info (size);
1832 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1833 if (!bitmap_bit_p (si->visited, i) && find (i) == i)
1834 scc_visit (graph, si, i);
1836 free_scc_info (si);
1839 /* Compute a topological ordering for GRAPH, and store the result in the
1840 topo_info structure TI. */
1842 static void
1843 compute_topo_order (constraint_graph_t graph,
1844 struct topo_info *ti)
1846 unsigned int i;
1847 unsigned int size = graph->size;
1849 for (i = 0; i != size; ++i)
1850 if (!bitmap_bit_p (ti->visited, i) && find (i) == i)
1851 topo_visit (graph, ti, i);
1854 /* Structure used to for hash value numbering of pointer equivalence
1855 classes. */
1857 typedef struct equiv_class_label
1859 hashval_t hashcode;
1860 unsigned int equivalence_class;
1861 bitmap labels;
1862 } *equiv_class_label_t;
1863 typedef const struct equiv_class_label *const_equiv_class_label_t;
1865 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1866 classes. */
1867 static htab_t pointer_equiv_class_table;
1869 /* A hashtable for mapping a bitmap of labels->location equivalence
1870 classes. */
1871 static htab_t location_equiv_class_table;
1873 /* Hash function for a equiv_class_label_t */
1875 static hashval_t
1876 equiv_class_label_hash (const void *p)
1878 const_equiv_class_label_t const ecl = (const_equiv_class_label_t) p;
1879 return ecl->hashcode;
1882 /* Equality function for two equiv_class_label_t's. */
1884 static int
1885 equiv_class_label_eq (const void *p1, const void *p2)
1887 const_equiv_class_label_t const eql1 = (const_equiv_class_label_t) p1;
1888 const_equiv_class_label_t const eql2 = (const_equiv_class_label_t) p2;
1889 return (eql1->hashcode == eql2->hashcode
1890 && bitmap_equal_p (eql1->labels, eql2->labels));
1893 /* Lookup a equivalence class in TABLE by the bitmap of LABELS with
1894 hash HAS it contains. Sets *REF_LABELS to the bitmap LABELS
1895 is equivalent to. */
1897 static equiv_class_label *
1898 equiv_class_lookup_or_add (htab_t table, bitmap labels)
1900 equiv_class_label **slot;
1901 equiv_class_label ecl;
1903 ecl.labels = labels;
1904 ecl.hashcode = bitmap_hash (labels);
1905 slot = (equiv_class_label **) htab_find_slot_with_hash (table, &ecl,
1906 ecl.hashcode, INSERT);
1907 if (!*slot)
1909 *slot = XNEW (struct equiv_class_label);
1910 (*slot)->labels = labels;
1911 (*slot)->hashcode = ecl.hashcode;
1912 (*slot)->equivalence_class = 0;
1915 return *slot;
1918 /* Perform offline variable substitution.
1920 This is a worst case quadratic time way of identifying variables
1921 that must have equivalent points-to sets, including those caused by
1922 static cycles, and single entry subgraphs, in the constraint graph.
1924 The technique is described in "Exploiting Pointer and Location
1925 Equivalence to Optimize Pointer Analysis. In the 14th International
1926 Static Analysis Symposium (SAS), August 2007." It is known as the
1927 "HU" algorithm, and is equivalent to value numbering the collapsed
1928 constraint graph including evaluating unions.
1930 The general method of finding equivalence classes is as follows:
1931 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1932 Initialize all non-REF nodes to be direct nodes.
1933 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1934 variable}
1935 For each constraint containing the dereference, we also do the same
1936 thing.
1938 We then compute SCC's in the graph and unify nodes in the same SCC,
1939 including pts sets.
1941 For each non-collapsed node x:
1942 Visit all unvisited explicit incoming edges.
1943 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
1944 where y->x.
1945 Lookup the equivalence class for pts(x).
1946 If we found one, equivalence_class(x) = found class.
1947 Otherwise, equivalence_class(x) = new class, and new_class is
1948 added to the lookup table.
1950 All direct nodes with the same equivalence class can be replaced
1951 with a single representative node.
1952 All unlabeled nodes (label == 0) are not pointers and all edges
1953 involving them can be eliminated.
1954 We perform these optimizations during rewrite_constraints
1956 In addition to pointer equivalence class finding, we also perform
1957 location equivalence class finding. This is the set of variables
1958 that always appear together in points-to sets. We use this to
1959 compress the size of the points-to sets. */
1961 /* Current maximum pointer equivalence class id. */
1962 static int pointer_equiv_class;
1964 /* Current maximum location equivalence class id. */
1965 static int location_equiv_class;
1967 /* Recursive routine to find strongly connected components in GRAPH,
1968 and label it's nodes with DFS numbers. */
1970 static void
1971 condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1973 unsigned int i;
1974 bitmap_iterator bi;
1975 unsigned int my_dfs;
1977 gcc_checking_assert (si->node_mapping[n] == n);
1978 bitmap_set_bit (si->visited, n);
1979 si->dfs[n] = si->current_index ++;
1980 my_dfs = si->dfs[n];
1982 /* Visit all the successors. */
1983 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1985 unsigned int w = si->node_mapping[i];
1987 if (bitmap_bit_p (si->deleted, w))
1988 continue;
1990 if (!bitmap_bit_p (si->visited, w))
1991 condense_visit (graph, si, w);
1993 unsigned int t = si->node_mapping[w];
1994 gcc_checking_assert (si->node_mapping[n] == n);
1995 if (si->dfs[t] < si->dfs[n])
1996 si->dfs[n] = si->dfs[t];
1999 /* Visit all the implicit predecessors. */
2000 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
2002 unsigned int w = si->node_mapping[i];
2004 if (bitmap_bit_p (si->deleted, w))
2005 continue;
2007 if (!bitmap_bit_p (si->visited, w))
2008 condense_visit (graph, si, w);
2010 unsigned int t = si->node_mapping[w];
2011 gcc_assert (si->node_mapping[n] == n);
2012 if (si->dfs[t] < si->dfs[n])
2013 si->dfs[n] = si->dfs[t];
2016 /* See if any components have been identified. */
2017 if (si->dfs[n] == my_dfs)
2019 while (si->scc_stack.length () != 0
2020 && si->dfs[si->scc_stack.last ()] >= my_dfs)
2022 unsigned int w = si->scc_stack.pop ();
2023 si->node_mapping[w] = n;
2025 if (!bitmap_bit_p (graph->direct_nodes, w))
2026 bitmap_clear_bit (graph->direct_nodes, n);
2028 /* Unify our nodes. */
2029 if (graph->preds[w])
2031 if (!graph->preds[n])
2032 graph->preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2033 bitmap_ior_into (graph->preds[n], graph->preds[w]);
2035 if (graph->implicit_preds[w])
2037 if (!graph->implicit_preds[n])
2038 graph->implicit_preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2039 bitmap_ior_into (graph->implicit_preds[n],
2040 graph->implicit_preds[w]);
2042 if (graph->points_to[w])
2044 if (!graph->points_to[n])
2045 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2046 bitmap_ior_into (graph->points_to[n],
2047 graph->points_to[w]);
2050 bitmap_set_bit (si->deleted, n);
2052 else
2053 si->scc_stack.safe_push (n);
2056 /* Label pointer equivalences. */
2058 static void
2059 label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2061 unsigned int i, first_pred;
2062 bitmap_iterator bi;
2064 bitmap_set_bit (si->visited, n);
2066 /* Label and union our incoming edges's points to sets. */
2067 first_pred = -1U;
2068 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2070 unsigned int w = si->node_mapping[i];
2071 if (!bitmap_bit_p (si->visited, w))
2072 label_visit (graph, si, w);
2074 /* Skip unused edges */
2075 if (w == n || graph->pointer_label[w] == 0)
2076 continue;
2078 if (graph->points_to[w])
2080 if (!graph->points_to[n])
2082 if (first_pred == -1U)
2083 first_pred = w;
2084 else
2086 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2087 bitmap_ior (graph->points_to[n],
2088 graph->points_to[first_pred],
2089 graph->points_to[w]);
2092 else
2093 bitmap_ior_into(graph->points_to[n], graph->points_to[w]);
2097 /* Indirect nodes get fresh variables and a new pointer equiv class. */
2098 if (!bitmap_bit_p (graph->direct_nodes, n))
2100 if (!graph->points_to[n])
2102 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2103 if (first_pred != -1U)
2104 bitmap_copy (graph->points_to[n], graph->points_to[first_pred]);
2106 bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
2107 graph->pointer_label[n] = pointer_equiv_class++;
2108 equiv_class_label_t ecl;
2109 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2110 graph->points_to[n]);
2111 ecl->equivalence_class = graph->pointer_label[n];
2112 return;
2115 /* If there was only a single non-empty predecessor the pointer equiv
2116 class is the same. */
2117 if (!graph->points_to[n])
2119 if (first_pred != -1U)
2121 graph->pointer_label[n] = graph->pointer_label[first_pred];
2122 graph->points_to[n] = graph->points_to[first_pred];
2124 return;
2127 if (!bitmap_empty_p (graph->points_to[n]))
2129 equiv_class_label_t ecl;
2130 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2131 graph->points_to[n]);
2132 if (ecl->equivalence_class == 0)
2133 ecl->equivalence_class = pointer_equiv_class++;
2134 else
2136 BITMAP_FREE (graph->points_to[n]);
2137 graph->points_to[n] = ecl->labels;
2139 graph->pointer_label[n] = ecl->equivalence_class;
2143 /* Print the pred graph in dot format. */
2145 static void
2146 dump_pred_graph (struct scc_info *si, FILE *file)
2148 unsigned int i;
2150 /* Only print the graph if it has already been initialized: */
2151 if (!graph)
2152 return;
2154 /* Prints the header of the dot file: */
2155 fprintf (file, "strict digraph {\n");
2156 fprintf (file, " node [\n shape = box\n ]\n");
2157 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
2158 fprintf (file, "\n // List of nodes and complex constraints in "
2159 "the constraint graph:\n");
2161 /* The next lines print the nodes in the graph together with the
2162 complex constraints attached to them. */
2163 for (i = 1; i < graph->size; i++)
2165 if (i == FIRST_REF_NODE)
2166 continue;
2167 if (si->node_mapping[i] != i)
2168 continue;
2169 if (i < FIRST_REF_NODE)
2170 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2171 else
2172 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2173 if (graph->points_to[i]
2174 && !bitmap_empty_p (graph->points_to[i]))
2176 fprintf (file, "[label=\"%s = {", get_varinfo (i)->name);
2177 unsigned j;
2178 bitmap_iterator bi;
2179 EXECUTE_IF_SET_IN_BITMAP (graph->points_to[i], 0, j, bi)
2180 fprintf (file, " %d", j);
2181 fprintf (file, " }\"]");
2183 fprintf (file, ";\n");
2186 /* Go over the edges. */
2187 fprintf (file, "\n // Edges in the constraint graph:\n");
2188 for (i = 1; i < graph->size; i++)
2190 unsigned j;
2191 bitmap_iterator bi;
2192 if (si->node_mapping[i] != i)
2193 continue;
2194 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[i], 0, j, bi)
2196 unsigned from = si->node_mapping[j];
2197 if (from < FIRST_REF_NODE)
2198 fprintf (file, "\"%s\"", get_varinfo (from)->name);
2199 else
2200 fprintf (file, "\"*%s\"", get_varinfo (from - FIRST_REF_NODE)->name);
2201 fprintf (file, " -> ");
2202 if (i < FIRST_REF_NODE)
2203 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2204 else
2205 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2206 fprintf (file, ";\n");
2210 /* Prints the tail of the dot file. */
2211 fprintf (file, "}\n");
2214 /* Perform offline variable substitution, discovering equivalence
2215 classes, and eliminating non-pointer variables. */
2217 static struct scc_info *
2218 perform_var_substitution (constraint_graph_t graph)
2220 unsigned int i;
2221 unsigned int size = graph->size;
2222 struct scc_info *si = init_scc_info (size);
2224 bitmap_obstack_initialize (&iteration_obstack);
2225 pointer_equiv_class_table = htab_create (511, equiv_class_label_hash,
2226 equiv_class_label_eq, free);
2227 location_equiv_class_table = htab_create (511, equiv_class_label_hash,
2228 equiv_class_label_eq, free);
2229 pointer_equiv_class = 1;
2230 location_equiv_class = 1;
2232 /* Condense the nodes, which means to find SCC's, count incoming
2233 predecessors, and unite nodes in SCC's. */
2234 for (i = 1; i < FIRST_REF_NODE; i++)
2235 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2236 condense_visit (graph, si, si->node_mapping[i]);
2238 if (dump_file && (dump_flags & TDF_GRAPH))
2240 fprintf (dump_file, "\n\n// The constraint graph before var-substitution "
2241 "in dot format:\n");
2242 dump_pred_graph (si, dump_file);
2243 fprintf (dump_file, "\n\n");
2246 bitmap_clear (si->visited);
2247 /* Actually the label the nodes for pointer equivalences */
2248 for (i = 1; i < FIRST_REF_NODE; i++)
2249 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2250 label_visit (graph, si, si->node_mapping[i]);
2252 /* Calculate location equivalence labels. */
2253 for (i = 1; i < FIRST_REF_NODE; i++)
2255 bitmap pointed_by;
2256 bitmap_iterator bi;
2257 unsigned int j;
2259 if (!graph->pointed_by[i])
2260 continue;
2261 pointed_by = BITMAP_ALLOC (&iteration_obstack);
2263 /* Translate the pointed-by mapping for pointer equivalence
2264 labels. */
2265 EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
2267 bitmap_set_bit (pointed_by,
2268 graph->pointer_label[si->node_mapping[j]]);
2270 /* The original pointed_by is now dead. */
2271 BITMAP_FREE (graph->pointed_by[i]);
2273 /* Look up the location equivalence label if one exists, or make
2274 one otherwise. */
2275 equiv_class_label_t ecl;
2276 ecl = equiv_class_lookup_or_add (location_equiv_class_table, pointed_by);
2277 if (ecl->equivalence_class == 0)
2278 ecl->equivalence_class = location_equiv_class++;
2279 else
2281 if (dump_file && (dump_flags & TDF_DETAILS))
2282 fprintf (dump_file, "Found location equivalence for node %s\n",
2283 get_varinfo (i)->name);
2284 BITMAP_FREE (pointed_by);
2286 graph->loc_label[i] = ecl->equivalence_class;
2290 if (dump_file && (dump_flags & TDF_DETAILS))
2291 for (i = 1; i < FIRST_REF_NODE; i++)
2293 unsigned j = si->node_mapping[i];
2294 if (j != i)
2296 fprintf (dump_file, "%s node id %d ",
2297 bitmap_bit_p (graph->direct_nodes, i)
2298 ? "Direct" : "Indirect", i);
2299 if (i < FIRST_REF_NODE)
2300 fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2301 else
2302 fprintf (dump_file, "\"*%s\"",
2303 get_varinfo (i - FIRST_REF_NODE)->name);
2304 fprintf (dump_file, " mapped to SCC leader node id %d ", j);
2305 if (j < FIRST_REF_NODE)
2306 fprintf (dump_file, "\"%s\"\n", get_varinfo (j)->name);
2307 else
2308 fprintf (dump_file, "\"*%s\"\n",
2309 get_varinfo (j - FIRST_REF_NODE)->name);
2311 else
2313 fprintf (dump_file,
2314 "Equivalence classes for %s node id %d ",
2315 bitmap_bit_p (graph->direct_nodes, i)
2316 ? "direct" : "indirect", i);
2317 if (i < FIRST_REF_NODE)
2318 fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2319 else
2320 fprintf (dump_file, "\"*%s\"",
2321 get_varinfo (i - FIRST_REF_NODE)->name);
2322 fprintf (dump_file,
2323 ": pointer %d, location %d\n",
2324 graph->pointer_label[i], graph->loc_label[i]);
2328 /* Quickly eliminate our non-pointer variables. */
2330 for (i = 1; i < FIRST_REF_NODE; i++)
2332 unsigned int node = si->node_mapping[i];
2334 if (graph->pointer_label[node] == 0)
2336 if (dump_file && (dump_flags & TDF_DETAILS))
2337 fprintf (dump_file,
2338 "%s is a non-pointer variable, eliminating edges.\n",
2339 get_varinfo (node)->name);
2340 stats.nonpointer_vars++;
2341 clear_edges_for_node (graph, node);
2345 return si;
2348 /* Free information that was only necessary for variable
2349 substitution. */
2351 static void
2352 free_var_substitution_info (struct scc_info *si)
2354 free_scc_info (si);
2355 free (graph->pointer_label);
2356 free (graph->loc_label);
2357 free (graph->pointed_by);
2358 free (graph->points_to);
2359 free (graph->eq_rep);
2360 sbitmap_free (graph->direct_nodes);
2361 htab_delete (pointer_equiv_class_table);
2362 htab_delete (location_equiv_class_table);
2363 bitmap_obstack_release (&iteration_obstack);
2366 /* Return an existing node that is equivalent to NODE, which has
2367 equivalence class LABEL, if one exists. Return NODE otherwise. */
2369 static unsigned int
2370 find_equivalent_node (constraint_graph_t graph,
2371 unsigned int node, unsigned int label)
2373 /* If the address version of this variable is unused, we can
2374 substitute it for anything else with the same label.
2375 Otherwise, we know the pointers are equivalent, but not the
2376 locations, and we can unite them later. */
2378 if (!bitmap_bit_p (graph->address_taken, node))
2380 gcc_checking_assert (label < graph->size);
2382 if (graph->eq_rep[label] != -1)
2384 /* Unify the two variables since we know they are equivalent. */
2385 if (unite (graph->eq_rep[label], node))
2386 unify_nodes (graph, graph->eq_rep[label], node, false);
2387 return graph->eq_rep[label];
2389 else
2391 graph->eq_rep[label] = node;
2392 graph->pe_rep[label] = node;
2395 else
2397 gcc_checking_assert (label < graph->size);
2398 graph->pe[node] = label;
2399 if (graph->pe_rep[label] == -1)
2400 graph->pe_rep[label] = node;
2403 return node;
2406 /* Unite pointer equivalent but not location equivalent nodes in
2407 GRAPH. This may only be performed once variable substitution is
2408 finished. */
2410 static void
2411 unite_pointer_equivalences (constraint_graph_t graph)
2413 unsigned int i;
2415 /* Go through the pointer equivalences and unite them to their
2416 representative, if they aren't already. */
2417 for (i = 1; i < FIRST_REF_NODE; i++)
2419 unsigned int label = graph->pe[i];
2420 if (label)
2422 int label_rep = graph->pe_rep[label];
2424 if (label_rep == -1)
2425 continue;
2427 label_rep = find (label_rep);
2428 if (label_rep >= 0 && unite (label_rep, find (i)))
2429 unify_nodes (graph, label_rep, i, false);
2434 /* Move complex constraints to the GRAPH nodes they belong to. */
2436 static void
2437 move_complex_constraints (constraint_graph_t graph)
2439 int i;
2440 constraint_t c;
2442 FOR_EACH_VEC_ELT (constraints, i, c)
2444 if (c)
2446 struct constraint_expr lhs = c->lhs;
2447 struct constraint_expr rhs = c->rhs;
2449 if (lhs.type == DEREF)
2451 insert_into_complex (graph, lhs.var, c);
2453 else if (rhs.type == DEREF)
2455 if (!(get_varinfo (lhs.var)->is_special_var))
2456 insert_into_complex (graph, rhs.var, c);
2458 else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2459 && (lhs.offset != 0 || rhs.offset != 0))
2461 insert_into_complex (graph, rhs.var, c);
2468 /* Optimize and rewrite complex constraints while performing
2469 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2470 result of perform_variable_substitution. */
2472 static void
2473 rewrite_constraints (constraint_graph_t graph,
2474 struct scc_info *si)
2476 int i;
2477 constraint_t c;
2479 #ifdef ENABLE_CHECKING
2480 for (unsigned int j = 0; j < graph->size; j++)
2481 gcc_assert (find (j) == j);
2482 #endif
2484 FOR_EACH_VEC_ELT (constraints, i, c)
2486 struct constraint_expr lhs = c->lhs;
2487 struct constraint_expr rhs = c->rhs;
2488 unsigned int lhsvar = find (lhs.var);
2489 unsigned int rhsvar = find (rhs.var);
2490 unsigned int lhsnode, rhsnode;
2491 unsigned int lhslabel, rhslabel;
2493 lhsnode = si->node_mapping[lhsvar];
2494 rhsnode = si->node_mapping[rhsvar];
2495 lhslabel = graph->pointer_label[lhsnode];
2496 rhslabel = graph->pointer_label[rhsnode];
2498 /* See if it is really a non-pointer variable, and if so, ignore
2499 the constraint. */
2500 if (lhslabel == 0)
2502 if (dump_file && (dump_flags & TDF_DETAILS))
2505 fprintf (dump_file, "%s is a non-pointer variable,"
2506 "ignoring constraint:",
2507 get_varinfo (lhs.var)->name);
2508 dump_constraint (dump_file, c);
2509 fprintf (dump_file, "\n");
2511 constraints[i] = NULL;
2512 continue;
2515 if (rhslabel == 0)
2517 if (dump_file && (dump_flags & TDF_DETAILS))
2520 fprintf (dump_file, "%s is a non-pointer variable,"
2521 "ignoring constraint:",
2522 get_varinfo (rhs.var)->name);
2523 dump_constraint (dump_file, c);
2524 fprintf (dump_file, "\n");
2526 constraints[i] = NULL;
2527 continue;
2530 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2531 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2532 c->lhs.var = lhsvar;
2533 c->rhs.var = rhsvar;
2537 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2538 part of an SCC, false otherwise. */
2540 static bool
2541 eliminate_indirect_cycles (unsigned int node)
2543 if (graph->indirect_cycles[node] != -1
2544 && !bitmap_empty_p (get_varinfo (node)->solution))
2546 unsigned int i;
2547 vec<unsigned> queue = vNULL;
2548 int queuepos;
2549 unsigned int to = find (graph->indirect_cycles[node]);
2550 bitmap_iterator bi;
2552 /* We can't touch the solution set and call unify_nodes
2553 at the same time, because unify_nodes is going to do
2554 bitmap unions into it. */
2556 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2558 if (find (i) == i && i != to)
2560 if (unite (to, i))
2561 queue.safe_push (i);
2565 for (queuepos = 0;
2566 queue.iterate (queuepos, &i);
2567 queuepos++)
2569 unify_nodes (graph, to, i, true);
2571 queue.release ();
2572 return true;
2574 return false;
2577 /* Solve the constraint graph GRAPH using our worklist solver.
2578 This is based on the PW* family of solvers from the "Efficient Field
2579 Sensitive Pointer Analysis for C" paper.
2580 It works by iterating over all the graph nodes, processing the complex
2581 constraints and propagating the copy constraints, until everything stops
2582 changed. This corresponds to steps 6-8 in the solving list given above. */
2584 static void
2585 solve_graph (constraint_graph_t graph)
2587 unsigned int size = graph->size;
2588 unsigned int i;
2589 bitmap pts;
2591 changed = BITMAP_ALLOC (NULL);
2593 /* Mark all initial non-collapsed nodes as changed. */
2594 for (i = 1; i < size; i++)
2596 varinfo_t ivi = get_varinfo (i);
2597 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2598 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2599 || graph->complex[i].length () > 0))
2600 bitmap_set_bit (changed, i);
2603 /* Allocate a bitmap to be used to store the changed bits. */
2604 pts = BITMAP_ALLOC (&pta_obstack);
2606 while (!bitmap_empty_p (changed))
2608 unsigned int i;
2609 struct topo_info *ti = init_topo_info ();
2610 stats.iterations++;
2612 bitmap_obstack_initialize (&iteration_obstack);
2614 compute_topo_order (graph, ti);
2616 while (ti->topo_order.length () != 0)
2619 i = ti->topo_order.pop ();
2621 /* If this variable is not a representative, skip it. */
2622 if (find (i) != i)
2623 continue;
2625 /* In certain indirect cycle cases, we may merge this
2626 variable to another. */
2627 if (eliminate_indirect_cycles (i) && find (i) != i)
2628 continue;
2630 /* If the node has changed, we need to process the
2631 complex constraints and outgoing edges again. */
2632 if (bitmap_clear_bit (changed, i))
2634 unsigned int j;
2635 constraint_t c;
2636 bitmap solution;
2637 vec<constraint_t> complex = graph->complex[i];
2638 varinfo_t vi = get_varinfo (i);
2639 bool solution_empty;
2641 /* Compute the changed set of solution bits. If anything
2642 is in the solution just propagate that. */
2643 if (bitmap_bit_p (vi->solution, anything_id))
2645 /* If anything is also in the old solution there is
2646 nothing to do.
2647 ??? But we shouldn't ended up with "changed" set ... */
2648 if (vi->oldsolution
2649 && bitmap_bit_p (vi->oldsolution, anything_id))
2650 continue;
2651 bitmap_copy (pts, get_varinfo (find (anything_id))->solution);
2653 else if (vi->oldsolution)
2654 bitmap_and_compl (pts, vi->solution, vi->oldsolution);
2655 else
2656 bitmap_copy (pts, vi->solution);
2658 if (bitmap_empty_p (pts))
2659 continue;
2661 if (vi->oldsolution)
2662 bitmap_ior_into (vi->oldsolution, pts);
2663 else
2665 vi->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
2666 bitmap_copy (vi->oldsolution, pts);
2669 solution = vi->solution;
2670 solution_empty = bitmap_empty_p (solution);
2672 /* Process the complex constraints */
2673 FOR_EACH_VEC_ELT (complex, j, c)
2675 /* XXX: This is going to unsort the constraints in
2676 some cases, which will occasionally add duplicate
2677 constraints during unification. This does not
2678 affect correctness. */
2679 c->lhs.var = find (c->lhs.var);
2680 c->rhs.var = find (c->rhs.var);
2682 /* The only complex constraint that can change our
2683 solution to non-empty, given an empty solution,
2684 is a constraint where the lhs side is receiving
2685 some set from elsewhere. */
2686 if (!solution_empty || c->lhs.type != DEREF)
2687 do_complex_constraint (graph, c, pts);
2690 solution_empty = bitmap_empty_p (solution);
2692 if (!solution_empty)
2694 bitmap_iterator bi;
2695 unsigned eff_escaped_id = find (escaped_id);
2697 /* Propagate solution to all successors. */
2698 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2699 0, j, bi)
2701 bitmap tmp;
2702 bool flag;
2704 unsigned int to = find (j);
2705 tmp = get_varinfo (to)->solution;
2706 flag = false;
2708 /* Don't try to propagate to ourselves. */
2709 if (to == i)
2710 continue;
2712 /* If we propagate from ESCAPED use ESCAPED as
2713 placeholder. */
2714 if (i == eff_escaped_id)
2715 flag = bitmap_set_bit (tmp, escaped_id);
2716 else
2717 flag = bitmap_ior_into (tmp, pts);
2719 if (flag)
2720 bitmap_set_bit (changed, to);
2725 free_topo_info (ti);
2726 bitmap_obstack_release (&iteration_obstack);
2729 BITMAP_FREE (pts);
2730 BITMAP_FREE (changed);
2731 bitmap_obstack_release (&oldpta_obstack);
2734 /* Map from trees to variable infos. */
2735 static struct pointer_map_t *vi_for_tree;
2738 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2740 static void
2741 insert_vi_for_tree (tree t, varinfo_t vi)
2743 void **slot = pointer_map_insert (vi_for_tree, t);
2744 gcc_assert (vi);
2745 gcc_assert (*slot == NULL);
2746 *slot = vi;
2749 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2750 exist in the map, return NULL, otherwise, return the varinfo we found. */
2752 static varinfo_t
2753 lookup_vi_for_tree (tree t)
2755 void **slot = pointer_map_contains (vi_for_tree, t);
2756 if (slot == NULL)
2757 return NULL;
2759 return (varinfo_t) *slot;
2762 /* Return a printable name for DECL */
2764 static const char *
2765 alias_get_name (tree decl)
2767 const char *res = NULL;
2768 char *temp;
2769 int num_printed = 0;
2771 if (!dump_file)
2772 return "NULL";
2774 if (TREE_CODE (decl) == SSA_NAME)
2776 res = get_name (decl);
2777 if (res)
2778 num_printed = asprintf (&temp, "%s_%u", res, SSA_NAME_VERSION (decl));
2779 else
2780 num_printed = asprintf (&temp, "_%u", SSA_NAME_VERSION (decl));
2781 if (num_printed > 0)
2783 res = ggc_strdup (temp);
2784 free (temp);
2787 else if (DECL_P (decl))
2789 if (DECL_ASSEMBLER_NAME_SET_P (decl))
2790 res = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
2791 else
2793 res = get_name (decl);
2794 if (!res)
2796 num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
2797 if (num_printed > 0)
2799 res = ggc_strdup (temp);
2800 free (temp);
2805 if (res != NULL)
2806 return res;
2808 return "NULL";
2811 /* Find the variable id for tree T in the map.
2812 If T doesn't exist in the map, create an entry for it and return it. */
2814 static varinfo_t
2815 get_vi_for_tree (tree t)
2817 void **slot = pointer_map_contains (vi_for_tree, t);
2818 if (slot == NULL)
2819 return get_varinfo (create_variable_info_for (t, alias_get_name (t)));
2821 return (varinfo_t) *slot;
2824 /* Get a scalar constraint expression for a new temporary variable. */
2826 static struct constraint_expr
2827 new_scalar_tmp_constraint_exp (const char *name)
2829 struct constraint_expr tmp;
2830 varinfo_t vi;
2832 vi = new_var_info (NULL_TREE, name);
2833 vi->offset = 0;
2834 vi->size = -1;
2835 vi->fullsize = -1;
2836 vi->is_full_var = 1;
2838 tmp.var = vi->id;
2839 tmp.type = SCALAR;
2840 tmp.offset = 0;
2842 return tmp;
2845 /* Get a constraint expression vector from an SSA_VAR_P node.
2846 If address_p is true, the result will be taken its address of. */
2848 static void
2849 get_constraint_for_ssa_var (tree t, vec<ce_s> *results, bool address_p)
2851 struct constraint_expr cexpr;
2852 varinfo_t vi;
2854 /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
2855 gcc_assert (TREE_CODE (t) == SSA_NAME || DECL_P (t));
2857 /* For parameters, get at the points-to set for the actual parm
2858 decl. */
2859 if (TREE_CODE (t) == SSA_NAME
2860 && SSA_NAME_IS_DEFAULT_DEF (t)
2861 && (TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2862 || TREE_CODE (SSA_NAME_VAR (t)) == RESULT_DECL))
2864 get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
2865 return;
2868 /* For global variables resort to the alias target. */
2869 if (TREE_CODE (t) == VAR_DECL
2870 && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
2872 struct varpool_node *node = varpool_get_node (t);
2873 if (node && node->alias)
2875 node = varpool_variable_node (node, NULL);
2876 t = node->symbol.decl;
2880 vi = get_vi_for_tree (t);
2881 cexpr.var = vi->id;
2882 cexpr.type = SCALAR;
2883 cexpr.offset = 0;
2884 /* If we determine the result is "anything", and we know this is readonly,
2885 say it points to readonly memory instead. */
2886 if (cexpr.var == anything_id && TREE_READONLY (t))
2888 gcc_unreachable ();
2889 cexpr.type = ADDRESSOF;
2890 cexpr.var = readonly_id;
2893 /* If we are not taking the address of the constraint expr, add all
2894 sub-fiels of the variable as well. */
2895 if (!address_p
2896 && !vi->is_full_var)
2898 for (; vi; vi = vi_next (vi))
2900 cexpr.var = vi->id;
2901 results->safe_push (cexpr);
2903 return;
2906 results->safe_push (cexpr);
2909 /* Process constraint T, performing various simplifications and then
2910 adding it to our list of overall constraints. */
2912 static void
2913 process_constraint (constraint_t t)
2915 struct constraint_expr rhs = t->rhs;
2916 struct constraint_expr lhs = t->lhs;
2918 gcc_assert (rhs.var < varmap.length ());
2919 gcc_assert (lhs.var < varmap.length ());
2921 /* If we didn't get any useful constraint from the lhs we get
2922 &ANYTHING as fallback from get_constraint_for. Deal with
2923 it here by turning it into *ANYTHING. */
2924 if (lhs.type == ADDRESSOF
2925 && lhs.var == anything_id)
2926 lhs.type = DEREF;
2928 /* ADDRESSOF on the lhs is invalid. */
2929 gcc_assert (lhs.type != ADDRESSOF);
2931 /* We shouldn't add constraints from things that cannot have pointers.
2932 It's not completely trivial to avoid in the callers, so do it here. */
2933 if (rhs.type != ADDRESSOF
2934 && !get_varinfo (rhs.var)->may_have_pointers)
2935 return;
2937 /* Likewise adding to the solution of a non-pointer var isn't useful. */
2938 if (!get_varinfo (lhs.var)->may_have_pointers)
2939 return;
2941 /* This can happen in our IR with things like n->a = *p */
2942 if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
2944 /* Split into tmp = *rhs, *lhs = tmp */
2945 struct constraint_expr tmplhs;
2946 tmplhs = new_scalar_tmp_constraint_exp ("doubledereftmp");
2947 process_constraint (new_constraint (tmplhs, rhs));
2948 process_constraint (new_constraint (lhs, tmplhs));
2950 else if (rhs.type == ADDRESSOF && lhs.type == DEREF)
2952 /* Split into tmp = &rhs, *lhs = tmp */
2953 struct constraint_expr tmplhs;
2954 tmplhs = new_scalar_tmp_constraint_exp ("derefaddrtmp");
2955 process_constraint (new_constraint (tmplhs, rhs));
2956 process_constraint (new_constraint (lhs, tmplhs));
2958 else
2960 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
2961 constraints.safe_push (t);
2966 /* Return the position, in bits, of FIELD_DECL from the beginning of its
2967 structure. */
2969 static HOST_WIDE_INT
2970 bitpos_of_field (const tree fdecl)
2972 if (!host_integerp (DECL_FIELD_OFFSET (fdecl), 0)
2973 || !host_integerp (DECL_FIELD_BIT_OFFSET (fdecl), 0))
2974 return -1;
2976 return (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (fdecl)) * BITS_PER_UNIT
2977 + TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (fdecl)));
2981 /* Get constraint expressions for offsetting PTR by OFFSET. Stores the
2982 resulting constraint expressions in *RESULTS. */
2984 static void
2985 get_constraint_for_ptr_offset (tree ptr, tree offset,
2986 vec<ce_s> *results)
2988 struct constraint_expr c;
2989 unsigned int j, n;
2990 HOST_WIDE_INT rhsoffset;
2992 /* If we do not do field-sensitive PTA adding offsets to pointers
2993 does not change the points-to solution. */
2994 if (!use_field_sensitive)
2996 get_constraint_for_rhs (ptr, results);
2997 return;
3000 /* If the offset is not a non-negative integer constant that fits
3001 in a HOST_WIDE_INT, we have to fall back to a conservative
3002 solution which includes all sub-fields of all pointed-to
3003 variables of ptr. */
3004 if (offset == NULL_TREE
3005 || TREE_CODE (offset) != INTEGER_CST)
3006 rhsoffset = UNKNOWN_OFFSET;
3007 else
3009 /* Sign-extend the offset. */
3010 double_int soffset = tree_to_double_int (offset)
3011 .sext (TYPE_PRECISION (TREE_TYPE (offset)));
3012 if (!soffset.fits_shwi ())
3013 rhsoffset = UNKNOWN_OFFSET;
3014 else
3016 /* Make sure the bit-offset also fits. */
3017 HOST_WIDE_INT rhsunitoffset = soffset.low;
3018 rhsoffset = rhsunitoffset * BITS_PER_UNIT;
3019 if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
3020 rhsoffset = UNKNOWN_OFFSET;
3024 get_constraint_for_rhs (ptr, results);
3025 if (rhsoffset == 0)
3026 return;
3028 /* As we are eventually appending to the solution do not use
3029 vec::iterate here. */
3030 n = results->length ();
3031 for (j = 0; j < n; j++)
3033 varinfo_t curr;
3034 c = (*results)[j];
3035 curr = get_varinfo (c.var);
3037 if (c.type == ADDRESSOF
3038 /* If this varinfo represents a full variable just use it. */
3039 && curr->is_full_var)
3040 c.offset = 0;
3041 else if (c.type == ADDRESSOF
3042 /* If we do not know the offset add all subfields. */
3043 && rhsoffset == UNKNOWN_OFFSET)
3045 varinfo_t temp = get_varinfo (curr->head);
3048 struct constraint_expr c2;
3049 c2.var = temp->id;
3050 c2.type = ADDRESSOF;
3051 c2.offset = 0;
3052 if (c2.var != c.var)
3053 results->safe_push (c2);
3054 temp = vi_next (temp);
3056 while (temp);
3058 else if (c.type == ADDRESSOF)
3060 varinfo_t temp;
3061 unsigned HOST_WIDE_INT offset = curr->offset + rhsoffset;
3063 /* Search the sub-field which overlaps with the
3064 pointed-to offset. If the result is outside of the variable
3065 we have to provide a conservative result, as the variable is
3066 still reachable from the resulting pointer (even though it
3067 technically cannot point to anything). The last and first
3068 sub-fields are such conservative results.
3069 ??? If we always had a sub-field for &object + 1 then
3070 we could represent this in a more precise way. */
3071 if (rhsoffset < 0
3072 && curr->offset < offset)
3073 offset = 0;
3074 temp = first_or_preceding_vi_for_offset (curr, offset);
3076 /* If the found variable is not exactly at the pointed to
3077 result, we have to include the next variable in the
3078 solution as well. Otherwise two increments by offset / 2
3079 do not result in the same or a conservative superset
3080 solution. */
3081 if (temp->offset != offset
3082 && temp->next != 0)
3084 struct constraint_expr c2;
3085 c2.var = temp->next;
3086 c2.type = ADDRESSOF;
3087 c2.offset = 0;
3088 results->safe_push (c2);
3090 c.var = temp->id;
3091 c.offset = 0;
3093 else
3094 c.offset = rhsoffset;
3096 (*results)[j] = c;
3101 /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
3102 If address_p is true the result will be taken its address of.
3103 If lhs_p is true then the constraint expression is assumed to be used
3104 as the lhs. */
3106 static void
3107 get_constraint_for_component_ref (tree t, vec<ce_s> *results,
3108 bool address_p, bool lhs_p)
3110 tree orig_t = t;
3111 HOST_WIDE_INT bitsize = -1;
3112 HOST_WIDE_INT bitmaxsize = -1;
3113 HOST_WIDE_INT bitpos;
3114 tree forzero;
3116 /* Some people like to do cute things like take the address of
3117 &0->a.b */
3118 forzero = t;
3119 while (handled_component_p (forzero)
3120 || INDIRECT_REF_P (forzero)
3121 || TREE_CODE (forzero) == MEM_REF)
3122 forzero = TREE_OPERAND (forzero, 0);
3124 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
3126 struct constraint_expr temp;
3128 temp.offset = 0;
3129 temp.var = integer_id;
3130 temp.type = SCALAR;
3131 results->safe_push (temp);
3132 return;
3135 /* Handle type-punning through unions. If we are extracting a pointer
3136 from a union via a possibly type-punning access that pointer
3137 points to anything, similar to a conversion of an integer to
3138 a pointer. */
3139 if (!lhs_p)
3141 tree u;
3142 for (u = t;
3143 TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
3144 u = TREE_OPERAND (u, 0))
3145 if (TREE_CODE (u) == COMPONENT_REF
3146 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
3148 struct constraint_expr temp;
3150 temp.offset = 0;
3151 temp.var = anything_id;
3152 temp.type = ADDRESSOF;
3153 results->safe_push (temp);
3154 return;
3158 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
3160 /* Pretend to take the address of the base, we'll take care of
3161 adding the required subset of sub-fields below. */
3162 get_constraint_for_1 (t, results, true, lhs_p);
3163 gcc_assert (results->length () == 1);
3164 struct constraint_expr &result = results->last ();
3166 if (result.type == SCALAR
3167 && get_varinfo (result.var)->is_full_var)
3168 /* For single-field vars do not bother about the offset. */
3169 result.offset = 0;
3170 else if (result.type == SCALAR)
3172 /* In languages like C, you can access one past the end of an
3173 array. You aren't allowed to dereference it, so we can
3174 ignore this constraint. When we handle pointer subtraction,
3175 we may have to do something cute here. */
3177 if ((unsigned HOST_WIDE_INT)bitpos < get_varinfo (result.var)->fullsize
3178 && bitmaxsize != 0)
3180 /* It's also not true that the constraint will actually start at the
3181 right offset, it may start in some padding. We only care about
3182 setting the constraint to the first actual field it touches, so
3183 walk to find it. */
3184 struct constraint_expr cexpr = result;
3185 varinfo_t curr;
3186 results->pop ();
3187 cexpr.offset = 0;
3188 for (curr = get_varinfo (cexpr.var); curr; curr = vi_next (curr))
3190 if (ranges_overlap_p (curr->offset, curr->size,
3191 bitpos, bitmaxsize))
3193 cexpr.var = curr->id;
3194 results->safe_push (cexpr);
3195 if (address_p)
3196 break;
3199 /* If we are going to take the address of this field then
3200 to be able to compute reachability correctly add at least
3201 the last field of the variable. */
3202 if (address_p && results->length () == 0)
3204 curr = get_varinfo (cexpr.var);
3205 while (curr->next != 0)
3206 curr = vi_next (curr);
3207 cexpr.var = curr->id;
3208 results->safe_push (cexpr);
3210 else if (results->length () == 0)
3211 /* Assert that we found *some* field there. The user couldn't be
3212 accessing *only* padding. */
3213 /* Still the user could access one past the end of an array
3214 embedded in a struct resulting in accessing *only* padding. */
3215 /* Or accessing only padding via type-punning to a type
3216 that has a filed just in padding space. */
3218 cexpr.type = SCALAR;
3219 cexpr.var = anything_id;
3220 cexpr.offset = 0;
3221 results->safe_push (cexpr);
3224 else if (bitmaxsize == 0)
3226 if (dump_file && (dump_flags & TDF_DETAILS))
3227 fprintf (dump_file, "Access to zero-sized part of variable,"
3228 "ignoring\n");
3230 else
3231 if (dump_file && (dump_flags & TDF_DETAILS))
3232 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
3234 else if (result.type == DEREF)
3236 /* If we do not know exactly where the access goes say so. Note
3237 that only for non-structure accesses we know that we access
3238 at most one subfiled of any variable. */
3239 if (bitpos == -1
3240 || bitsize != bitmaxsize
3241 || AGGREGATE_TYPE_P (TREE_TYPE (orig_t))
3242 || result.offset == UNKNOWN_OFFSET)
3243 result.offset = UNKNOWN_OFFSET;
3244 else
3245 result.offset += bitpos;
3247 else if (result.type == ADDRESSOF)
3249 /* We can end up here for component references on a
3250 VIEW_CONVERT_EXPR <>(&foobar). */
3251 result.type = SCALAR;
3252 result.var = anything_id;
3253 result.offset = 0;
3255 else
3256 gcc_unreachable ();
3260 /* Dereference the constraint expression CONS, and return the result.
3261 DEREF (ADDRESSOF) = SCALAR
3262 DEREF (SCALAR) = DEREF
3263 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
3264 This is needed so that we can handle dereferencing DEREF constraints. */
3266 static void
3267 do_deref (vec<ce_s> *constraints)
3269 struct constraint_expr *c;
3270 unsigned int i = 0;
3272 FOR_EACH_VEC_ELT (*constraints, i, c)
3274 if (c->type == SCALAR)
3275 c->type = DEREF;
3276 else if (c->type == ADDRESSOF)
3277 c->type = SCALAR;
3278 else if (c->type == DEREF)
3280 struct constraint_expr tmplhs;
3281 tmplhs = new_scalar_tmp_constraint_exp ("dereftmp");
3282 process_constraint (new_constraint (tmplhs, *c));
3283 c->var = tmplhs.var;
3285 else
3286 gcc_unreachable ();
3290 /* Given a tree T, return the constraint expression for taking the
3291 address of it. */
3293 static void
3294 get_constraint_for_address_of (tree t, vec<ce_s> *results)
3296 struct constraint_expr *c;
3297 unsigned int i;
3299 get_constraint_for_1 (t, results, true, true);
3301 FOR_EACH_VEC_ELT (*results, i, c)
3303 if (c->type == DEREF)
3304 c->type = SCALAR;
3305 else
3306 c->type = ADDRESSOF;
3310 /* Given a tree T, return the constraint expression for it. */
3312 static void
3313 get_constraint_for_1 (tree t, vec<ce_s> *results, bool address_p,
3314 bool lhs_p)
3316 struct constraint_expr temp;
3318 /* x = integer is all glommed to a single variable, which doesn't
3319 point to anything by itself. That is, of course, unless it is an
3320 integer constant being treated as a pointer, in which case, we
3321 will return that this is really the addressof anything. This
3322 happens below, since it will fall into the default case. The only
3323 case we know something about an integer treated like a pointer is
3324 when it is the NULL pointer, and then we just say it points to
3325 NULL.
3327 Do not do that if -fno-delete-null-pointer-checks though, because
3328 in that case *NULL does not fail, so it _should_ alias *anything.
3329 It is not worth adding a new option or renaming the existing one,
3330 since this case is relatively obscure. */
3331 if ((TREE_CODE (t) == INTEGER_CST
3332 && integer_zerop (t))
3333 /* The only valid CONSTRUCTORs in gimple with pointer typed
3334 elements are zero-initializer. But in IPA mode we also
3335 process global initializers, so verify at least. */
3336 || (TREE_CODE (t) == CONSTRUCTOR
3337 && CONSTRUCTOR_NELTS (t) == 0))
3339 if (flag_delete_null_pointer_checks)
3340 temp.var = nothing_id;
3341 else
3342 temp.var = nonlocal_id;
3343 temp.type = ADDRESSOF;
3344 temp.offset = 0;
3345 results->safe_push (temp);
3346 return;
3349 /* String constants are read-only. */
3350 if (TREE_CODE (t) == STRING_CST)
3352 temp.var = readonly_id;
3353 temp.type = SCALAR;
3354 temp.offset = 0;
3355 results->safe_push (temp);
3356 return;
3359 switch (TREE_CODE_CLASS (TREE_CODE (t)))
3361 case tcc_expression:
3363 switch (TREE_CODE (t))
3365 case ADDR_EXPR:
3366 get_constraint_for_address_of (TREE_OPERAND (t, 0), results);
3367 return;
3368 default:;
3370 break;
3372 case tcc_reference:
3374 switch (TREE_CODE (t))
3376 case MEM_REF:
3378 struct constraint_expr cs;
3379 varinfo_t vi, curr;
3380 get_constraint_for_ptr_offset (TREE_OPERAND (t, 0),
3381 TREE_OPERAND (t, 1), results);
3382 do_deref (results);
3384 /* If we are not taking the address then make sure to process
3385 all subvariables we might access. */
3386 if (address_p)
3387 return;
3389 cs = results->last ();
3390 if (cs.type == DEREF
3391 && type_can_have_subvars (TREE_TYPE (t)))
3393 /* For dereferences this means we have to defer it
3394 to solving time. */
3395 results->last ().offset = UNKNOWN_OFFSET;
3396 return;
3398 if (cs.type != SCALAR)
3399 return;
3401 vi = get_varinfo (cs.var);
3402 curr = vi_next (vi);
3403 if (!vi->is_full_var
3404 && curr)
3406 unsigned HOST_WIDE_INT size;
3407 if (host_integerp (TYPE_SIZE (TREE_TYPE (t)), 1))
3408 size = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (t)));
3409 else
3410 size = -1;
3411 for (; curr; curr = vi_next (curr))
3413 if (curr->offset - vi->offset < size)
3415 cs.var = curr->id;
3416 results->safe_push (cs);
3418 else
3419 break;
3422 return;
3424 case ARRAY_REF:
3425 case ARRAY_RANGE_REF:
3426 case COMPONENT_REF:
3427 get_constraint_for_component_ref (t, results, address_p, lhs_p);
3428 return;
3429 case VIEW_CONVERT_EXPR:
3430 get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p,
3431 lhs_p);
3432 return;
3433 /* We are missing handling for TARGET_MEM_REF here. */
3434 default:;
3436 break;
3438 case tcc_exceptional:
3440 switch (TREE_CODE (t))
3442 case SSA_NAME:
3444 get_constraint_for_ssa_var (t, results, address_p);
3445 return;
3447 case CONSTRUCTOR:
3449 unsigned int i;
3450 tree val;
3451 vec<ce_s> tmp = vNULL;
3452 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
3454 struct constraint_expr *rhsp;
3455 unsigned j;
3456 get_constraint_for_1 (val, &tmp, address_p, lhs_p);
3457 FOR_EACH_VEC_ELT (tmp, j, rhsp)
3458 results->safe_push (*rhsp);
3459 tmp.truncate (0);
3461 tmp.release ();
3462 /* We do not know whether the constructor was complete,
3463 so technically we have to add &NOTHING or &ANYTHING
3464 like we do for an empty constructor as well. */
3465 return;
3467 default:;
3469 break;
3471 case tcc_declaration:
3473 get_constraint_for_ssa_var (t, results, address_p);
3474 return;
3476 case tcc_constant:
3478 /* We cannot refer to automatic variables through constants. */
3479 temp.type = ADDRESSOF;
3480 temp.var = nonlocal_id;
3481 temp.offset = 0;
3482 results->safe_push (temp);
3483 return;
3485 default:;
3488 /* The default fallback is a constraint from anything. */
3489 temp.type = ADDRESSOF;
3490 temp.var = anything_id;
3491 temp.offset = 0;
3492 results->safe_push (temp);
3495 /* Given a gimple tree T, return the constraint expression vector for it. */
3497 static void
3498 get_constraint_for (tree t, vec<ce_s> *results)
3500 gcc_assert (results->length () == 0);
3502 get_constraint_for_1 (t, results, false, true);
3505 /* Given a gimple tree T, return the constraint expression vector for it
3506 to be used as the rhs of a constraint. */
3508 static void
3509 get_constraint_for_rhs (tree t, vec<ce_s> *results)
3511 gcc_assert (results->length () == 0);
3513 get_constraint_for_1 (t, results, false, false);
3517 /* Efficiently generates constraints from all entries in *RHSC to all
3518 entries in *LHSC. */
3520 static void
3521 process_all_all_constraints (vec<ce_s> lhsc,
3522 vec<ce_s> rhsc)
3524 struct constraint_expr *lhsp, *rhsp;
3525 unsigned i, j;
3527 if (lhsc.length () <= 1 || rhsc.length () <= 1)
3529 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3530 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
3531 process_constraint (new_constraint (*lhsp, *rhsp));
3533 else
3535 struct constraint_expr tmp;
3536 tmp = new_scalar_tmp_constraint_exp ("allalltmp");
3537 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
3538 process_constraint (new_constraint (tmp, *rhsp));
3539 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3540 process_constraint (new_constraint (*lhsp, tmp));
3544 /* Handle aggregate copies by expanding into copies of the respective
3545 fields of the structures. */
3547 static void
3548 do_structure_copy (tree lhsop, tree rhsop)
3550 struct constraint_expr *lhsp, *rhsp;
3551 vec<ce_s> lhsc = vNULL;
3552 vec<ce_s> rhsc = vNULL;
3553 unsigned j;
3555 get_constraint_for (lhsop, &lhsc);
3556 get_constraint_for_rhs (rhsop, &rhsc);
3557 lhsp = &lhsc[0];
3558 rhsp = &rhsc[0];
3559 if (lhsp->type == DEREF
3560 || (lhsp->type == ADDRESSOF && lhsp->var == anything_id)
3561 || rhsp->type == DEREF)
3563 if (lhsp->type == DEREF)
3565 gcc_assert (lhsc.length () == 1);
3566 lhsp->offset = UNKNOWN_OFFSET;
3568 if (rhsp->type == DEREF)
3570 gcc_assert (rhsc.length () == 1);
3571 rhsp->offset = UNKNOWN_OFFSET;
3573 process_all_all_constraints (lhsc, rhsc);
3575 else if (lhsp->type == SCALAR
3576 && (rhsp->type == SCALAR
3577 || rhsp->type == ADDRESSOF))
3579 HOST_WIDE_INT lhssize, lhsmaxsize, lhsoffset;
3580 HOST_WIDE_INT rhssize, rhsmaxsize, rhsoffset;
3581 unsigned k = 0;
3582 get_ref_base_and_extent (lhsop, &lhsoffset, &lhssize, &lhsmaxsize);
3583 get_ref_base_and_extent (rhsop, &rhsoffset, &rhssize, &rhsmaxsize);
3584 for (j = 0; lhsc.iterate (j, &lhsp);)
3586 varinfo_t lhsv, rhsv;
3587 rhsp = &rhsc[k];
3588 lhsv = get_varinfo (lhsp->var);
3589 rhsv = get_varinfo (rhsp->var);
3590 if (lhsv->may_have_pointers
3591 && (lhsv->is_full_var
3592 || rhsv->is_full_var
3593 || ranges_overlap_p (lhsv->offset + rhsoffset, lhsv->size,
3594 rhsv->offset + lhsoffset, rhsv->size)))
3595 process_constraint (new_constraint (*lhsp, *rhsp));
3596 if (!rhsv->is_full_var
3597 && (lhsv->is_full_var
3598 || (lhsv->offset + rhsoffset + lhsv->size
3599 > rhsv->offset + lhsoffset + rhsv->size)))
3601 ++k;
3602 if (k >= rhsc.length ())
3603 break;
3605 else
3606 ++j;
3609 else
3610 gcc_unreachable ();
3612 lhsc.release ();
3613 rhsc.release ();
3616 /* Create constraints ID = { rhsc }. */
3618 static void
3619 make_constraints_to (unsigned id, vec<ce_s> rhsc)
3621 struct constraint_expr *c;
3622 struct constraint_expr includes;
3623 unsigned int j;
3625 includes.var = id;
3626 includes.offset = 0;
3627 includes.type = SCALAR;
3629 FOR_EACH_VEC_ELT (rhsc, j, c)
3630 process_constraint (new_constraint (includes, *c));
3633 /* Create a constraint ID = OP. */
3635 static void
3636 make_constraint_to (unsigned id, tree op)
3638 vec<ce_s> rhsc = vNULL;
3639 get_constraint_for_rhs (op, &rhsc);
3640 make_constraints_to (id, rhsc);
3641 rhsc.release ();
3644 /* Create a constraint ID = &FROM. */
3646 static void
3647 make_constraint_from (varinfo_t vi, int from)
3649 struct constraint_expr lhs, rhs;
3651 lhs.var = vi->id;
3652 lhs.offset = 0;
3653 lhs.type = SCALAR;
3655 rhs.var = from;
3656 rhs.offset = 0;
3657 rhs.type = ADDRESSOF;
3658 process_constraint (new_constraint (lhs, rhs));
3661 /* Create a constraint ID = FROM. */
3663 static void
3664 make_copy_constraint (varinfo_t vi, int from)
3666 struct constraint_expr lhs, rhs;
3668 lhs.var = vi->id;
3669 lhs.offset = 0;
3670 lhs.type = SCALAR;
3672 rhs.var = from;
3673 rhs.offset = 0;
3674 rhs.type = SCALAR;
3675 process_constraint (new_constraint (lhs, rhs));
3678 /* Make constraints necessary to make OP escape. */
3680 static void
3681 make_escape_constraint (tree op)
3683 make_constraint_to (escaped_id, op);
3686 /* Add constraints to that the solution of VI is transitively closed. */
3688 static void
3689 make_transitive_closure_constraints (varinfo_t vi)
3691 struct constraint_expr lhs, rhs;
3693 /* VAR = *VAR; */
3694 lhs.type = SCALAR;
3695 lhs.var = vi->id;
3696 lhs.offset = 0;
3697 rhs.type = DEREF;
3698 rhs.var = vi->id;
3699 rhs.offset = 0;
3700 process_constraint (new_constraint (lhs, rhs));
3702 /* VAR = VAR + UNKNOWN; */
3703 lhs.type = SCALAR;
3704 lhs.var = vi->id;
3705 lhs.offset = 0;
3706 rhs.type = SCALAR;
3707 rhs.var = vi->id;
3708 rhs.offset = UNKNOWN_OFFSET;
3709 process_constraint (new_constraint (lhs, rhs));
3712 /* Temporary storage for fake var decls. */
3713 struct obstack fake_var_decl_obstack;
3715 /* Build a fake VAR_DECL acting as referrer to a DECL_UID. */
3717 static tree
3718 build_fake_var_decl (tree type)
3720 tree decl = (tree) XOBNEW (&fake_var_decl_obstack, struct tree_var_decl);
3721 memset (decl, 0, sizeof (struct tree_var_decl));
3722 TREE_SET_CODE (decl, VAR_DECL);
3723 TREE_TYPE (decl) = type;
3724 DECL_UID (decl) = allocate_decl_uid ();
3725 SET_DECL_PT_UID (decl, -1);
3726 layout_decl (decl, 0);
3727 return decl;
3730 /* Create a new artificial heap variable with NAME.
3731 Return the created variable. */
3733 static varinfo_t
3734 make_heapvar (const char *name)
3736 varinfo_t vi;
3737 tree heapvar;
3739 heapvar = build_fake_var_decl (ptr_type_node);
3740 DECL_EXTERNAL (heapvar) = 1;
3742 vi = new_var_info (heapvar, name);
3743 vi->is_artificial_var = true;
3744 vi->is_heap_var = true;
3745 vi->is_unknown_size_var = true;
3746 vi->offset = 0;
3747 vi->fullsize = ~0;
3748 vi->size = ~0;
3749 vi->is_full_var = true;
3750 insert_vi_for_tree (heapvar, vi);
3752 return vi;
3755 /* Create a new artificial heap variable with NAME and make a
3756 constraint from it to LHS. Set flags according to a tag used
3757 for tracking restrict pointers. */
3759 static varinfo_t
3760 make_constraint_from_restrict (varinfo_t lhs, const char *name)
3762 varinfo_t vi = make_heapvar (name);
3763 vi->is_global_var = 1;
3764 vi->may_have_pointers = 1;
3765 make_constraint_from (lhs, vi->id);
3766 return vi;
3769 /* Create a new artificial heap variable with NAME and make a
3770 constraint from it to LHS. Set flags according to a tag used
3771 for tracking restrict pointers and make the artificial heap
3772 point to global memory. */
3774 static varinfo_t
3775 make_constraint_from_global_restrict (varinfo_t lhs, const char *name)
3777 varinfo_t vi = make_constraint_from_restrict (lhs, name);
3778 make_copy_constraint (vi, nonlocal_id);
3779 return vi;
3782 /* In IPA mode there are varinfos for different aspects of reach
3783 function designator. One for the points-to set of the return
3784 value, one for the variables that are clobbered by the function,
3785 one for its uses and one for each parameter (including a single
3786 glob for remaining variadic arguments). */
3788 enum { fi_clobbers = 1, fi_uses = 2,
3789 fi_static_chain = 3, fi_result = 4, fi_parm_base = 5 };
3791 /* Get a constraint for the requested part of a function designator FI
3792 when operating in IPA mode. */
3794 static struct constraint_expr
3795 get_function_part_constraint (varinfo_t fi, unsigned part)
3797 struct constraint_expr c;
3799 gcc_assert (in_ipa_mode);
3801 if (fi->id == anything_id)
3803 /* ??? We probably should have a ANYFN special variable. */
3804 c.var = anything_id;
3805 c.offset = 0;
3806 c.type = SCALAR;
3808 else if (TREE_CODE (fi->decl) == FUNCTION_DECL)
3810 varinfo_t ai = first_vi_for_offset (fi, part);
3811 if (ai)
3812 c.var = ai->id;
3813 else
3814 c.var = anything_id;
3815 c.offset = 0;
3816 c.type = SCALAR;
3818 else
3820 c.var = fi->id;
3821 c.offset = part;
3822 c.type = DEREF;
3825 return c;
3828 /* For non-IPA mode, generate constraints necessary for a call on the
3829 RHS. */
3831 static void
3832 handle_rhs_call (gimple stmt, vec<ce_s> *results)
3834 struct constraint_expr rhsc;
3835 unsigned i;
3836 bool returns_uses = false;
3838 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3840 tree arg = gimple_call_arg (stmt, i);
3841 int flags = gimple_call_arg_flags (stmt, i);
3843 /* If the argument is not used we can ignore it. */
3844 if (flags & EAF_UNUSED)
3845 continue;
3847 /* As we compute ESCAPED context-insensitive we do not gain
3848 any precision with just EAF_NOCLOBBER but not EAF_NOESCAPE
3849 set. The argument would still get clobbered through the
3850 escape solution. */
3851 if ((flags & EAF_NOCLOBBER)
3852 && (flags & EAF_NOESCAPE))
3854 varinfo_t uses = get_call_use_vi (stmt);
3855 if (!(flags & EAF_DIRECT))
3857 varinfo_t tem = new_var_info (NULL_TREE, "callarg");
3858 make_constraint_to (tem->id, arg);
3859 make_transitive_closure_constraints (tem);
3860 make_copy_constraint (uses, tem->id);
3862 else
3863 make_constraint_to (uses->id, arg);
3864 returns_uses = true;
3866 else if (flags & EAF_NOESCAPE)
3868 struct constraint_expr lhs, rhs;
3869 varinfo_t uses = get_call_use_vi (stmt);
3870 varinfo_t clobbers = get_call_clobber_vi (stmt);
3871 varinfo_t tem = new_var_info (NULL_TREE, "callarg");
3872 make_constraint_to (tem->id, arg);
3873 if (!(flags & EAF_DIRECT))
3874 make_transitive_closure_constraints (tem);
3875 make_copy_constraint (uses, tem->id);
3876 make_copy_constraint (clobbers, tem->id);
3877 /* Add *tem = nonlocal, do not add *tem = callused as
3878 EAF_NOESCAPE parameters do not escape to other parameters
3879 and all other uses appear in NONLOCAL as well. */
3880 lhs.type = DEREF;
3881 lhs.var = tem->id;
3882 lhs.offset = 0;
3883 rhs.type = SCALAR;
3884 rhs.var = nonlocal_id;
3885 rhs.offset = 0;
3886 process_constraint (new_constraint (lhs, rhs));
3887 returns_uses = true;
3889 else
3890 make_escape_constraint (arg);
3893 /* If we added to the calls uses solution make sure we account for
3894 pointers to it to be returned. */
3895 if (returns_uses)
3897 rhsc.var = get_call_use_vi (stmt)->id;
3898 rhsc.offset = 0;
3899 rhsc.type = SCALAR;
3900 results->safe_push (rhsc);
3903 /* The static chain escapes as well. */
3904 if (gimple_call_chain (stmt))
3905 make_escape_constraint (gimple_call_chain (stmt));
3907 /* And if we applied NRV the address of the return slot escapes as well. */
3908 if (gimple_call_return_slot_opt_p (stmt)
3909 && gimple_call_lhs (stmt) != NULL_TREE
3910 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
3912 vec<ce_s> tmpc = vNULL;
3913 struct constraint_expr lhsc, *c;
3914 get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
3915 lhsc.var = escaped_id;
3916 lhsc.offset = 0;
3917 lhsc.type = SCALAR;
3918 FOR_EACH_VEC_ELT (tmpc, i, c)
3919 process_constraint (new_constraint (lhsc, *c));
3920 tmpc.release ();
3923 /* Regular functions return nonlocal memory. */
3924 rhsc.var = nonlocal_id;
3925 rhsc.offset = 0;
3926 rhsc.type = SCALAR;
3927 results->safe_push (rhsc);
3930 /* For non-IPA mode, generate constraints necessary for a call
3931 that returns a pointer and assigns it to LHS. This simply makes
3932 the LHS point to global and escaped variables. */
3934 static void
3935 handle_lhs_call (gimple stmt, tree lhs, int flags, vec<ce_s> rhsc,
3936 tree fndecl)
3938 vec<ce_s> lhsc = vNULL;
3940 get_constraint_for (lhs, &lhsc);
3941 /* If the store is to a global decl make sure to
3942 add proper escape constraints. */
3943 lhs = get_base_address (lhs);
3944 if (lhs
3945 && DECL_P (lhs)
3946 && is_global_var (lhs))
3948 struct constraint_expr tmpc;
3949 tmpc.var = escaped_id;
3950 tmpc.offset = 0;
3951 tmpc.type = SCALAR;
3952 lhsc.safe_push (tmpc);
3955 /* If the call returns an argument unmodified override the rhs
3956 constraints. */
3957 flags = gimple_call_return_flags (stmt);
3958 if (flags & ERF_RETURNS_ARG
3959 && (flags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (stmt))
3961 tree arg;
3962 rhsc.create (0);
3963 arg = gimple_call_arg (stmt, flags & ERF_RETURN_ARG_MASK);
3964 get_constraint_for (arg, &rhsc);
3965 process_all_all_constraints (lhsc, rhsc);
3966 rhsc.release ();
3968 else if (flags & ERF_NOALIAS)
3970 varinfo_t vi;
3971 struct constraint_expr tmpc;
3972 rhsc.create (0);
3973 vi = make_heapvar ("HEAP");
3974 /* We delay marking allocated storage global until we know if
3975 it escapes. */
3976 DECL_EXTERNAL (vi->decl) = 0;
3977 vi->is_global_var = 0;
3978 /* If this is not a real malloc call assume the memory was
3979 initialized and thus may point to global memory. All
3980 builtin functions with the malloc attribute behave in a sane way. */
3981 if (!fndecl
3982 || DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL)
3983 make_constraint_from (vi, nonlocal_id);
3984 tmpc.var = vi->id;
3985 tmpc.offset = 0;
3986 tmpc.type = ADDRESSOF;
3987 rhsc.safe_push (tmpc);
3988 process_all_all_constraints (lhsc, rhsc);
3989 rhsc.release ();
3991 else
3992 process_all_all_constraints (lhsc, rhsc);
3994 lhsc.release ();
3997 /* For non-IPA mode, generate constraints necessary for a call of a
3998 const function that returns a pointer in the statement STMT. */
4000 static void
4001 handle_const_call (gimple stmt, vec<ce_s> *results)
4003 struct constraint_expr rhsc;
4004 unsigned int k;
4006 /* Treat nested const functions the same as pure functions as far
4007 as the static chain is concerned. */
4008 if (gimple_call_chain (stmt))
4010 varinfo_t uses = get_call_use_vi (stmt);
4011 make_transitive_closure_constraints (uses);
4012 make_constraint_to (uses->id, gimple_call_chain (stmt));
4013 rhsc.var = uses->id;
4014 rhsc.offset = 0;
4015 rhsc.type = SCALAR;
4016 results->safe_push (rhsc);
4019 /* May return arguments. */
4020 for (k = 0; k < gimple_call_num_args (stmt); ++k)
4022 tree arg = gimple_call_arg (stmt, k);
4023 vec<ce_s> argc = vNULL;
4024 unsigned i;
4025 struct constraint_expr *argp;
4026 get_constraint_for_rhs (arg, &argc);
4027 FOR_EACH_VEC_ELT (argc, i, argp)
4028 results->safe_push (*argp);
4029 argc.release ();
4032 /* May return addresses of globals. */
4033 rhsc.var = nonlocal_id;
4034 rhsc.offset = 0;
4035 rhsc.type = ADDRESSOF;
4036 results->safe_push (rhsc);
4039 /* For non-IPA mode, generate constraints necessary for a call to a
4040 pure function in statement STMT. */
4042 static void
4043 handle_pure_call (gimple stmt, vec<ce_s> *results)
4045 struct constraint_expr rhsc;
4046 unsigned i;
4047 varinfo_t uses = NULL;
4049 /* Memory reached from pointer arguments is call-used. */
4050 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4052 tree arg = gimple_call_arg (stmt, i);
4053 if (!uses)
4055 uses = get_call_use_vi (stmt);
4056 make_transitive_closure_constraints (uses);
4058 make_constraint_to (uses->id, arg);
4061 /* The static chain is used as well. */
4062 if (gimple_call_chain (stmt))
4064 if (!uses)
4066 uses = get_call_use_vi (stmt);
4067 make_transitive_closure_constraints (uses);
4069 make_constraint_to (uses->id, gimple_call_chain (stmt));
4072 /* Pure functions may return call-used and nonlocal memory. */
4073 if (uses)
4075 rhsc.var = uses->id;
4076 rhsc.offset = 0;
4077 rhsc.type = SCALAR;
4078 results->safe_push (rhsc);
4080 rhsc.var = nonlocal_id;
4081 rhsc.offset = 0;
4082 rhsc.type = SCALAR;
4083 results->safe_push (rhsc);
4087 /* Return the varinfo for the callee of CALL. */
4089 static varinfo_t
4090 get_fi_for_callee (gimple call)
4092 tree decl, fn = gimple_call_fn (call);
4094 if (fn && TREE_CODE (fn) == OBJ_TYPE_REF)
4095 fn = OBJ_TYPE_REF_EXPR (fn);
4097 /* If we can directly resolve the function being called, do so.
4098 Otherwise, it must be some sort of indirect expression that
4099 we should still be able to handle. */
4100 decl = gimple_call_addr_fndecl (fn);
4101 if (decl)
4102 return get_vi_for_tree (decl);
4104 /* If the function is anything other than a SSA name pointer we have no
4105 clue and should be getting ANYFN (well, ANYTHING for now). */
4106 if (!fn || TREE_CODE (fn) != SSA_NAME)
4107 return get_varinfo (anything_id);
4109 if (SSA_NAME_IS_DEFAULT_DEF (fn)
4110 && (TREE_CODE (SSA_NAME_VAR (fn)) == PARM_DECL
4111 || TREE_CODE (SSA_NAME_VAR (fn)) == RESULT_DECL))
4112 fn = SSA_NAME_VAR (fn);
4114 return get_vi_for_tree (fn);
4117 /* Create constraints for the builtin call T. Return true if the call
4118 was handled, otherwise false. */
4120 static bool
4121 find_func_aliases_for_builtin_call (gimple t)
4123 tree fndecl = gimple_call_fndecl (t);
4124 vec<ce_s> lhsc = vNULL;
4125 vec<ce_s> rhsc = vNULL;
4126 varinfo_t fi;
4128 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
4129 /* ??? All builtins that are handled here need to be handled
4130 in the alias-oracle query functions explicitly! */
4131 switch (DECL_FUNCTION_CODE (fndecl))
4133 /* All the following functions return a pointer to the same object
4134 as their first argument points to. The functions do not add
4135 to the ESCAPED solution. The functions make the first argument
4136 pointed to memory point to what the second argument pointed to
4137 memory points to. */
4138 case BUILT_IN_STRCPY:
4139 case BUILT_IN_STRNCPY:
4140 case BUILT_IN_BCOPY:
4141 case BUILT_IN_MEMCPY:
4142 case BUILT_IN_MEMMOVE:
4143 case BUILT_IN_MEMPCPY:
4144 case BUILT_IN_STPCPY:
4145 case BUILT_IN_STPNCPY:
4146 case BUILT_IN_STRCAT:
4147 case BUILT_IN_STRNCAT:
4148 case BUILT_IN_STRCPY_CHK:
4149 case BUILT_IN_STRNCPY_CHK:
4150 case BUILT_IN_MEMCPY_CHK:
4151 case BUILT_IN_MEMMOVE_CHK:
4152 case BUILT_IN_MEMPCPY_CHK:
4153 case BUILT_IN_STPCPY_CHK:
4154 case BUILT_IN_STPNCPY_CHK:
4155 case BUILT_IN_STRCAT_CHK:
4156 case BUILT_IN_STRNCAT_CHK:
4157 case BUILT_IN_TM_MEMCPY:
4158 case BUILT_IN_TM_MEMMOVE:
4160 tree res = gimple_call_lhs (t);
4161 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4162 == BUILT_IN_BCOPY ? 1 : 0));
4163 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4164 == BUILT_IN_BCOPY ? 0 : 1));
4165 if (res != NULL_TREE)
4167 get_constraint_for (res, &lhsc);
4168 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY
4169 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY
4170 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY
4171 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY_CHK
4172 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY_CHK
4173 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY_CHK)
4174 get_constraint_for_ptr_offset (dest, NULL_TREE, &rhsc);
4175 else
4176 get_constraint_for (dest, &rhsc);
4177 process_all_all_constraints (lhsc, rhsc);
4178 lhsc.release ();
4179 rhsc.release ();
4181 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4182 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4183 do_deref (&lhsc);
4184 do_deref (&rhsc);
4185 process_all_all_constraints (lhsc, rhsc);
4186 lhsc.release ();
4187 rhsc.release ();
4188 return true;
4190 case BUILT_IN_MEMSET:
4191 case BUILT_IN_MEMSET_CHK:
4192 case BUILT_IN_TM_MEMSET:
4194 tree res = gimple_call_lhs (t);
4195 tree dest = gimple_call_arg (t, 0);
4196 unsigned i;
4197 ce_s *lhsp;
4198 struct constraint_expr ac;
4199 if (res != NULL_TREE)
4201 get_constraint_for (res, &lhsc);
4202 get_constraint_for (dest, &rhsc);
4203 process_all_all_constraints (lhsc, rhsc);
4204 lhsc.release ();
4205 rhsc.release ();
4207 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4208 do_deref (&lhsc);
4209 if (flag_delete_null_pointer_checks
4210 && integer_zerop (gimple_call_arg (t, 1)))
4212 ac.type = ADDRESSOF;
4213 ac.var = nothing_id;
4215 else
4217 ac.type = SCALAR;
4218 ac.var = integer_id;
4220 ac.offset = 0;
4221 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4222 process_constraint (new_constraint (*lhsp, ac));
4223 lhsc.release ();
4224 return true;
4226 case BUILT_IN_ASSUME_ALIGNED:
4228 tree res = gimple_call_lhs (t);
4229 tree dest = gimple_call_arg (t, 0);
4230 if (res != NULL_TREE)
4232 get_constraint_for (res, &lhsc);
4233 get_constraint_for (dest, &rhsc);
4234 process_all_all_constraints (lhsc, rhsc);
4235 lhsc.release ();
4236 rhsc.release ();
4238 return true;
4240 /* All the following functions do not return pointers, do not
4241 modify the points-to sets of memory reachable from their
4242 arguments and do not add to the ESCAPED solution. */
4243 case BUILT_IN_SINCOS:
4244 case BUILT_IN_SINCOSF:
4245 case BUILT_IN_SINCOSL:
4246 case BUILT_IN_FREXP:
4247 case BUILT_IN_FREXPF:
4248 case BUILT_IN_FREXPL:
4249 case BUILT_IN_GAMMA_R:
4250 case BUILT_IN_GAMMAF_R:
4251 case BUILT_IN_GAMMAL_R:
4252 case BUILT_IN_LGAMMA_R:
4253 case BUILT_IN_LGAMMAF_R:
4254 case BUILT_IN_LGAMMAL_R:
4255 case BUILT_IN_MODF:
4256 case BUILT_IN_MODFF:
4257 case BUILT_IN_MODFL:
4258 case BUILT_IN_REMQUO:
4259 case BUILT_IN_REMQUOF:
4260 case BUILT_IN_REMQUOL:
4261 case BUILT_IN_FREE:
4262 return true;
4263 case BUILT_IN_STRDUP:
4264 case BUILT_IN_STRNDUP:
4265 if (gimple_call_lhs (t))
4267 handle_lhs_call (t, gimple_call_lhs (t), gimple_call_flags (t),
4268 vNULL, fndecl);
4269 get_constraint_for_ptr_offset (gimple_call_lhs (t),
4270 NULL_TREE, &lhsc);
4271 get_constraint_for_ptr_offset (gimple_call_arg (t, 0),
4272 NULL_TREE, &rhsc);
4273 do_deref (&lhsc);
4274 do_deref (&rhsc);
4275 process_all_all_constraints (lhsc, rhsc);
4276 lhsc.release ();
4277 rhsc.release ();
4278 return true;
4280 break;
4281 /* String / character search functions return a pointer into the
4282 source string or NULL. */
4283 case BUILT_IN_INDEX:
4284 case BUILT_IN_STRCHR:
4285 case BUILT_IN_STRRCHR:
4286 case BUILT_IN_MEMCHR:
4287 case BUILT_IN_STRSTR:
4288 case BUILT_IN_STRPBRK:
4289 if (gimple_call_lhs (t))
4291 tree src = gimple_call_arg (t, 0);
4292 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4293 constraint_expr nul;
4294 nul.var = nothing_id;
4295 nul.offset = 0;
4296 nul.type = ADDRESSOF;
4297 rhsc.safe_push (nul);
4298 get_constraint_for (gimple_call_lhs (t), &lhsc);
4299 process_all_all_constraints (lhsc, rhsc);
4300 lhsc.release();
4301 rhsc.release();
4303 return true;
4304 /* Trampolines are special - they set up passing the static
4305 frame. */
4306 case BUILT_IN_INIT_TRAMPOLINE:
4308 tree tramp = gimple_call_arg (t, 0);
4309 tree nfunc = gimple_call_arg (t, 1);
4310 tree frame = gimple_call_arg (t, 2);
4311 unsigned i;
4312 struct constraint_expr lhs, *rhsp;
4313 if (in_ipa_mode)
4315 varinfo_t nfi = NULL;
4316 gcc_assert (TREE_CODE (nfunc) == ADDR_EXPR);
4317 nfi = lookup_vi_for_tree (TREE_OPERAND (nfunc, 0));
4318 if (nfi)
4320 lhs = get_function_part_constraint (nfi, fi_static_chain);
4321 get_constraint_for (frame, &rhsc);
4322 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4323 process_constraint (new_constraint (lhs, *rhsp));
4324 rhsc.release ();
4326 /* Make the frame point to the function for
4327 the trampoline adjustment call. */
4328 get_constraint_for (tramp, &lhsc);
4329 do_deref (&lhsc);
4330 get_constraint_for (nfunc, &rhsc);
4331 process_all_all_constraints (lhsc, rhsc);
4332 rhsc.release ();
4333 lhsc.release ();
4335 return true;
4338 /* Else fallthru to generic handling which will let
4339 the frame escape. */
4340 break;
4342 case BUILT_IN_ADJUST_TRAMPOLINE:
4344 tree tramp = gimple_call_arg (t, 0);
4345 tree res = gimple_call_lhs (t);
4346 if (in_ipa_mode && res)
4348 get_constraint_for (res, &lhsc);
4349 get_constraint_for (tramp, &rhsc);
4350 do_deref (&rhsc);
4351 process_all_all_constraints (lhsc, rhsc);
4352 rhsc.release ();
4353 lhsc.release ();
4355 return true;
4357 CASE_BUILT_IN_TM_STORE (1):
4358 CASE_BUILT_IN_TM_STORE (2):
4359 CASE_BUILT_IN_TM_STORE (4):
4360 CASE_BUILT_IN_TM_STORE (8):
4361 CASE_BUILT_IN_TM_STORE (FLOAT):
4362 CASE_BUILT_IN_TM_STORE (DOUBLE):
4363 CASE_BUILT_IN_TM_STORE (LDOUBLE):
4364 CASE_BUILT_IN_TM_STORE (M64):
4365 CASE_BUILT_IN_TM_STORE (M128):
4366 CASE_BUILT_IN_TM_STORE (M256):
4368 tree addr = gimple_call_arg (t, 0);
4369 tree src = gimple_call_arg (t, 1);
4371 get_constraint_for (addr, &lhsc);
4372 do_deref (&lhsc);
4373 get_constraint_for (src, &rhsc);
4374 process_all_all_constraints (lhsc, rhsc);
4375 lhsc.release ();
4376 rhsc.release ();
4377 return true;
4379 CASE_BUILT_IN_TM_LOAD (1):
4380 CASE_BUILT_IN_TM_LOAD (2):
4381 CASE_BUILT_IN_TM_LOAD (4):
4382 CASE_BUILT_IN_TM_LOAD (8):
4383 CASE_BUILT_IN_TM_LOAD (FLOAT):
4384 CASE_BUILT_IN_TM_LOAD (DOUBLE):
4385 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
4386 CASE_BUILT_IN_TM_LOAD (M64):
4387 CASE_BUILT_IN_TM_LOAD (M128):
4388 CASE_BUILT_IN_TM_LOAD (M256):
4390 tree dest = gimple_call_lhs (t);
4391 tree addr = gimple_call_arg (t, 0);
4393 get_constraint_for (dest, &lhsc);
4394 get_constraint_for (addr, &rhsc);
4395 do_deref (&rhsc);
4396 process_all_all_constraints (lhsc, rhsc);
4397 lhsc.release ();
4398 rhsc.release ();
4399 return true;
4401 /* Variadic argument handling needs to be handled in IPA
4402 mode as well. */
4403 case BUILT_IN_VA_START:
4405 tree valist = gimple_call_arg (t, 0);
4406 struct constraint_expr rhs, *lhsp;
4407 unsigned i;
4408 get_constraint_for (valist, &lhsc);
4409 do_deref (&lhsc);
4410 /* The va_list gets access to pointers in variadic
4411 arguments. Which we know in the case of IPA analysis
4412 and otherwise are just all nonlocal variables. */
4413 if (in_ipa_mode)
4415 fi = lookup_vi_for_tree (cfun->decl);
4416 rhs = get_function_part_constraint (fi, ~0);
4417 rhs.type = ADDRESSOF;
4419 else
4421 rhs.var = nonlocal_id;
4422 rhs.type = ADDRESSOF;
4423 rhs.offset = 0;
4425 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4426 process_constraint (new_constraint (*lhsp, rhs));
4427 lhsc.release ();
4428 /* va_list is clobbered. */
4429 make_constraint_to (get_call_clobber_vi (t)->id, valist);
4430 return true;
4432 /* va_end doesn't have any effect that matters. */
4433 case BUILT_IN_VA_END:
4434 return true;
4435 /* Alternate return. Simply give up for now. */
4436 case BUILT_IN_RETURN:
4438 fi = NULL;
4439 if (!in_ipa_mode
4440 || !(fi = get_vi_for_tree (cfun->decl)))
4441 make_constraint_from (get_varinfo (escaped_id), anything_id);
4442 else if (in_ipa_mode
4443 && fi != NULL)
4445 struct constraint_expr lhs, rhs;
4446 lhs = get_function_part_constraint (fi, fi_result);
4447 rhs.var = anything_id;
4448 rhs.offset = 0;
4449 rhs.type = SCALAR;
4450 process_constraint (new_constraint (lhs, rhs));
4452 return true;
4454 /* printf-style functions may have hooks to set pointers to
4455 point to somewhere into the generated string. Leave them
4456 for a later excercise... */
4457 default:
4458 /* Fallthru to general call handling. */;
4461 return false;
4464 /* Create constraints for the call T. */
4466 static void
4467 find_func_aliases_for_call (gimple t)
4469 tree fndecl = gimple_call_fndecl (t);
4470 vec<ce_s> lhsc = vNULL;
4471 vec<ce_s> rhsc = vNULL;
4472 varinfo_t fi;
4474 if (fndecl != NULL_TREE
4475 && DECL_BUILT_IN (fndecl)
4476 && find_func_aliases_for_builtin_call (t))
4477 return;
4479 fi = get_fi_for_callee (t);
4480 if (!in_ipa_mode
4481 || (fndecl && !fi->is_fn_info))
4483 vec<ce_s> rhsc = vNULL;
4484 int flags = gimple_call_flags (t);
4486 /* Const functions can return their arguments and addresses
4487 of global memory but not of escaped memory. */
4488 if (flags & (ECF_CONST|ECF_NOVOPS))
4490 if (gimple_call_lhs (t))
4491 handle_const_call (t, &rhsc);
4493 /* Pure functions can return addresses in and of memory
4494 reachable from their arguments, but they are not an escape
4495 point for reachable memory of their arguments. */
4496 else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE))
4497 handle_pure_call (t, &rhsc);
4498 else
4499 handle_rhs_call (t, &rhsc);
4500 if (gimple_call_lhs (t))
4501 handle_lhs_call (t, gimple_call_lhs (t), flags, rhsc, fndecl);
4502 rhsc.release ();
4504 else
4506 tree lhsop;
4507 unsigned j;
4509 /* Assign all the passed arguments to the appropriate incoming
4510 parameters of the function. */
4511 for (j = 0; j < gimple_call_num_args (t); j++)
4513 struct constraint_expr lhs ;
4514 struct constraint_expr *rhsp;
4515 tree arg = gimple_call_arg (t, j);
4517 get_constraint_for_rhs (arg, &rhsc);
4518 lhs = get_function_part_constraint (fi, fi_parm_base + j);
4519 while (rhsc.length () != 0)
4521 rhsp = &rhsc.last ();
4522 process_constraint (new_constraint (lhs, *rhsp));
4523 rhsc.pop ();
4527 /* If we are returning a value, assign it to the result. */
4528 lhsop = gimple_call_lhs (t);
4529 if (lhsop)
4531 struct constraint_expr rhs;
4532 struct constraint_expr *lhsp;
4534 get_constraint_for (lhsop, &lhsc);
4535 rhs = get_function_part_constraint (fi, fi_result);
4536 if (fndecl
4537 && DECL_RESULT (fndecl)
4538 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
4540 vec<ce_s> tem = vNULL;
4541 tem.safe_push (rhs);
4542 do_deref (&tem);
4543 rhs = tem[0];
4544 tem.release ();
4546 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
4547 process_constraint (new_constraint (*lhsp, rhs));
4550 /* If we pass the result decl by reference, honor that. */
4551 if (lhsop
4552 && fndecl
4553 && DECL_RESULT (fndecl)
4554 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
4556 struct constraint_expr lhs;
4557 struct constraint_expr *rhsp;
4559 get_constraint_for_address_of (lhsop, &rhsc);
4560 lhs = get_function_part_constraint (fi, fi_result);
4561 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4562 process_constraint (new_constraint (lhs, *rhsp));
4563 rhsc.release ();
4566 /* If we use a static chain, pass it along. */
4567 if (gimple_call_chain (t))
4569 struct constraint_expr lhs;
4570 struct constraint_expr *rhsp;
4572 get_constraint_for (gimple_call_chain (t), &rhsc);
4573 lhs = get_function_part_constraint (fi, fi_static_chain);
4574 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4575 process_constraint (new_constraint (lhs, *rhsp));
4580 /* Walk statement T setting up aliasing constraints according to the
4581 references found in T. This function is the main part of the
4582 constraint builder. AI points to auxiliary alias information used
4583 when building alias sets and computing alias grouping heuristics. */
4585 static void
4586 find_func_aliases (gimple origt)
4588 gimple t = origt;
4589 vec<ce_s> lhsc = vNULL;
4590 vec<ce_s> rhsc = vNULL;
4591 struct constraint_expr *c;
4592 varinfo_t fi;
4594 /* Now build constraints expressions. */
4595 if (gimple_code (t) == GIMPLE_PHI)
4597 size_t i;
4598 unsigned int j;
4600 /* For a phi node, assign all the arguments to
4601 the result. */
4602 get_constraint_for (gimple_phi_result (t), &lhsc);
4603 for (i = 0; i < gimple_phi_num_args (t); i++)
4605 tree strippedrhs = PHI_ARG_DEF (t, i);
4607 STRIP_NOPS (strippedrhs);
4608 get_constraint_for_rhs (gimple_phi_arg_def (t, i), &rhsc);
4610 FOR_EACH_VEC_ELT (lhsc, j, c)
4612 struct constraint_expr *c2;
4613 while (rhsc.length () > 0)
4615 c2 = &rhsc.last ();
4616 process_constraint (new_constraint (*c, *c2));
4617 rhsc.pop ();
4622 /* In IPA mode, we need to generate constraints to pass call
4623 arguments through their calls. There are two cases,
4624 either a GIMPLE_CALL returning a value, or just a plain
4625 GIMPLE_CALL when we are not.
4627 In non-ipa mode, we need to generate constraints for each
4628 pointer passed by address. */
4629 else if (is_gimple_call (t))
4630 find_func_aliases_for_call (t);
4632 /* Otherwise, just a regular assignment statement. Only care about
4633 operations with pointer result, others are dealt with as escape
4634 points if they have pointer operands. */
4635 else if (is_gimple_assign (t))
4637 /* Otherwise, just a regular assignment statement. */
4638 tree lhsop = gimple_assign_lhs (t);
4639 tree rhsop = (gimple_num_ops (t) == 2) ? gimple_assign_rhs1 (t) : NULL;
4641 if (rhsop && TREE_CLOBBER_P (rhsop))
4642 /* Ignore clobbers, they don't actually store anything into
4643 the LHS. */
4645 else if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
4646 do_structure_copy (lhsop, rhsop);
4647 else
4649 enum tree_code code = gimple_assign_rhs_code (t);
4651 get_constraint_for (lhsop, &lhsc);
4653 if (FLOAT_TYPE_P (TREE_TYPE (lhsop)))
4654 /* If the operation produces a floating point result then
4655 assume the value is not produced to transfer a pointer. */
4657 else if (code == POINTER_PLUS_EXPR)
4658 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4659 gimple_assign_rhs2 (t), &rhsc);
4660 else if (code == BIT_AND_EXPR
4661 && TREE_CODE (gimple_assign_rhs2 (t)) == INTEGER_CST)
4663 /* Aligning a pointer via a BIT_AND_EXPR is offsetting
4664 the pointer. Handle it by offsetting it by UNKNOWN. */
4665 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4666 NULL_TREE, &rhsc);
4668 else if ((CONVERT_EXPR_CODE_P (code)
4669 && !(POINTER_TYPE_P (gimple_expr_type (t))
4670 && !POINTER_TYPE_P (TREE_TYPE (rhsop))))
4671 || gimple_assign_single_p (t))
4672 get_constraint_for_rhs (rhsop, &rhsc);
4673 else if (code == COND_EXPR)
4675 /* The result is a merge of both COND_EXPR arms. */
4676 vec<ce_s> tmp = vNULL;
4677 struct constraint_expr *rhsp;
4678 unsigned i;
4679 get_constraint_for_rhs (gimple_assign_rhs2 (t), &rhsc);
4680 get_constraint_for_rhs (gimple_assign_rhs3 (t), &tmp);
4681 FOR_EACH_VEC_ELT (tmp, i, rhsp)
4682 rhsc.safe_push (*rhsp);
4683 tmp.release ();
4685 else if (truth_value_p (code))
4686 /* Truth value results are not pointer (parts). Or at least
4687 very very unreasonable obfuscation of a part. */
4689 else
4691 /* All other operations are merges. */
4692 vec<ce_s> tmp = vNULL;
4693 struct constraint_expr *rhsp;
4694 unsigned i, j;
4695 get_constraint_for_rhs (gimple_assign_rhs1 (t), &rhsc);
4696 for (i = 2; i < gimple_num_ops (t); ++i)
4698 get_constraint_for_rhs (gimple_op (t, i), &tmp);
4699 FOR_EACH_VEC_ELT (tmp, j, rhsp)
4700 rhsc.safe_push (*rhsp);
4701 tmp.truncate (0);
4703 tmp.release ();
4705 process_all_all_constraints (lhsc, rhsc);
4707 /* If there is a store to a global variable the rhs escapes. */
4708 if ((lhsop = get_base_address (lhsop)) != NULL_TREE
4709 && DECL_P (lhsop)
4710 && is_global_var (lhsop)
4711 && (!in_ipa_mode
4712 || DECL_EXTERNAL (lhsop) || TREE_PUBLIC (lhsop)))
4713 make_escape_constraint (rhsop);
4715 /* Handle escapes through return. */
4716 else if (gimple_code (t) == GIMPLE_RETURN
4717 && gimple_return_retval (t) != NULL_TREE)
4719 fi = NULL;
4720 if (!in_ipa_mode
4721 || !(fi = get_vi_for_tree (cfun->decl)))
4722 make_escape_constraint (gimple_return_retval (t));
4723 else if (in_ipa_mode
4724 && fi != NULL)
4726 struct constraint_expr lhs ;
4727 struct constraint_expr *rhsp;
4728 unsigned i;
4730 lhs = get_function_part_constraint (fi, fi_result);
4731 get_constraint_for_rhs (gimple_return_retval (t), &rhsc);
4732 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4733 process_constraint (new_constraint (lhs, *rhsp));
4736 /* Handle asms conservatively by adding escape constraints to everything. */
4737 else if (gimple_code (t) == GIMPLE_ASM)
4739 unsigned i, noutputs;
4740 const char **oconstraints;
4741 const char *constraint;
4742 bool allows_mem, allows_reg, is_inout;
4744 noutputs = gimple_asm_noutputs (t);
4745 oconstraints = XALLOCAVEC (const char *, noutputs);
4747 for (i = 0; i < noutputs; ++i)
4749 tree link = gimple_asm_output_op (t, i);
4750 tree op = TREE_VALUE (link);
4752 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4753 oconstraints[i] = constraint;
4754 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
4755 &allows_reg, &is_inout);
4757 /* A memory constraint makes the address of the operand escape. */
4758 if (!allows_reg && allows_mem)
4759 make_escape_constraint (build_fold_addr_expr (op));
4761 /* The asm may read global memory, so outputs may point to
4762 any global memory. */
4763 if (op)
4765 vec<ce_s> lhsc = vNULL;
4766 struct constraint_expr rhsc, *lhsp;
4767 unsigned j;
4768 get_constraint_for (op, &lhsc);
4769 rhsc.var = nonlocal_id;
4770 rhsc.offset = 0;
4771 rhsc.type = SCALAR;
4772 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
4773 process_constraint (new_constraint (*lhsp, rhsc));
4774 lhsc.release ();
4777 for (i = 0; i < gimple_asm_ninputs (t); ++i)
4779 tree link = gimple_asm_input_op (t, i);
4780 tree op = TREE_VALUE (link);
4782 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4784 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
4785 &allows_mem, &allows_reg);
4787 /* A memory constraint makes the address of the operand escape. */
4788 if (!allows_reg && allows_mem)
4789 make_escape_constraint (build_fold_addr_expr (op));
4790 /* Strictly we'd only need the constraint to ESCAPED if
4791 the asm clobbers memory, otherwise using something
4792 along the lines of per-call clobbers/uses would be enough. */
4793 else if (op)
4794 make_escape_constraint (op);
4798 rhsc.release ();
4799 lhsc.release ();
4803 /* Create a constraint adding to the clobber set of FI the memory
4804 pointed to by PTR. */
4806 static void
4807 process_ipa_clobber (varinfo_t fi, tree ptr)
4809 vec<ce_s> ptrc = vNULL;
4810 struct constraint_expr *c, lhs;
4811 unsigned i;
4812 get_constraint_for_rhs (ptr, &ptrc);
4813 lhs = get_function_part_constraint (fi, fi_clobbers);
4814 FOR_EACH_VEC_ELT (ptrc, i, c)
4815 process_constraint (new_constraint (lhs, *c));
4816 ptrc.release ();
4819 /* Walk statement T setting up clobber and use constraints according to the
4820 references found in T. This function is a main part of the
4821 IPA constraint builder. */
4823 static void
4824 find_func_clobbers (gimple origt)
4826 gimple t = origt;
4827 vec<ce_s> lhsc = vNULL;
4828 vec<ce_s> rhsc = vNULL;
4829 varinfo_t fi;
4831 /* Add constraints for clobbered/used in IPA mode.
4832 We are not interested in what automatic variables are clobbered
4833 or used as we only use the information in the caller to which
4834 they do not escape. */
4835 gcc_assert (in_ipa_mode);
4837 /* If the stmt refers to memory in any way it better had a VUSE. */
4838 if (gimple_vuse (t) == NULL_TREE)
4839 return;
4841 /* We'd better have function information for the current function. */
4842 fi = lookup_vi_for_tree (cfun->decl);
4843 gcc_assert (fi != NULL);
4845 /* Account for stores in assignments and calls. */
4846 if (gimple_vdef (t) != NULL_TREE
4847 && gimple_has_lhs (t))
4849 tree lhs = gimple_get_lhs (t);
4850 tree tem = lhs;
4851 while (handled_component_p (tem))
4852 tem = TREE_OPERAND (tem, 0);
4853 if ((DECL_P (tem)
4854 && !auto_var_in_fn_p (tem, cfun->decl))
4855 || INDIRECT_REF_P (tem)
4856 || (TREE_CODE (tem) == MEM_REF
4857 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
4858 && auto_var_in_fn_p
4859 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), cfun->decl))))
4861 struct constraint_expr lhsc, *rhsp;
4862 unsigned i;
4863 lhsc = get_function_part_constraint (fi, fi_clobbers);
4864 get_constraint_for_address_of (lhs, &rhsc);
4865 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4866 process_constraint (new_constraint (lhsc, *rhsp));
4867 rhsc.release ();
4871 /* Account for uses in assigments and returns. */
4872 if (gimple_assign_single_p (t)
4873 || (gimple_code (t) == GIMPLE_RETURN
4874 && gimple_return_retval (t) != NULL_TREE))
4876 tree rhs = (gimple_assign_single_p (t)
4877 ? gimple_assign_rhs1 (t) : gimple_return_retval (t));
4878 tree tem = rhs;
4879 while (handled_component_p (tem))
4880 tem = TREE_OPERAND (tem, 0);
4881 if ((DECL_P (tem)
4882 && !auto_var_in_fn_p (tem, cfun->decl))
4883 || INDIRECT_REF_P (tem)
4884 || (TREE_CODE (tem) == MEM_REF
4885 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
4886 && auto_var_in_fn_p
4887 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), cfun->decl))))
4889 struct constraint_expr lhs, *rhsp;
4890 unsigned i;
4891 lhs = get_function_part_constraint (fi, fi_uses);
4892 get_constraint_for_address_of (rhs, &rhsc);
4893 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4894 process_constraint (new_constraint (lhs, *rhsp));
4895 rhsc.release ();
4899 if (is_gimple_call (t))
4901 varinfo_t cfi = NULL;
4902 tree decl = gimple_call_fndecl (t);
4903 struct constraint_expr lhs, rhs;
4904 unsigned i, j;
4906 /* For builtins we do not have separate function info. For those
4907 we do not generate escapes for we have to generate clobbers/uses. */
4908 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
4909 switch (DECL_FUNCTION_CODE (decl))
4911 /* The following functions use and clobber memory pointed to
4912 by their arguments. */
4913 case BUILT_IN_STRCPY:
4914 case BUILT_IN_STRNCPY:
4915 case BUILT_IN_BCOPY:
4916 case BUILT_IN_MEMCPY:
4917 case BUILT_IN_MEMMOVE:
4918 case BUILT_IN_MEMPCPY:
4919 case BUILT_IN_STPCPY:
4920 case BUILT_IN_STPNCPY:
4921 case BUILT_IN_STRCAT:
4922 case BUILT_IN_STRNCAT:
4923 case BUILT_IN_STRCPY_CHK:
4924 case BUILT_IN_STRNCPY_CHK:
4925 case BUILT_IN_MEMCPY_CHK:
4926 case BUILT_IN_MEMMOVE_CHK:
4927 case BUILT_IN_MEMPCPY_CHK:
4928 case BUILT_IN_STPCPY_CHK:
4929 case BUILT_IN_STPNCPY_CHK:
4930 case BUILT_IN_STRCAT_CHK:
4931 case BUILT_IN_STRNCAT_CHK:
4933 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
4934 == BUILT_IN_BCOPY ? 1 : 0));
4935 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
4936 == BUILT_IN_BCOPY ? 0 : 1));
4937 unsigned i;
4938 struct constraint_expr *rhsp, *lhsp;
4939 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4940 lhs = get_function_part_constraint (fi, fi_clobbers);
4941 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4942 process_constraint (new_constraint (lhs, *lhsp));
4943 lhsc.release ();
4944 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4945 lhs = get_function_part_constraint (fi, fi_uses);
4946 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4947 process_constraint (new_constraint (lhs, *rhsp));
4948 rhsc.release ();
4949 return;
4951 /* The following function clobbers memory pointed to by
4952 its argument. */
4953 case BUILT_IN_MEMSET:
4954 case BUILT_IN_MEMSET_CHK:
4956 tree dest = gimple_call_arg (t, 0);
4957 unsigned i;
4958 ce_s *lhsp;
4959 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4960 lhs = get_function_part_constraint (fi, fi_clobbers);
4961 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4962 process_constraint (new_constraint (lhs, *lhsp));
4963 lhsc.release ();
4964 return;
4966 /* The following functions clobber their second and third
4967 arguments. */
4968 case BUILT_IN_SINCOS:
4969 case BUILT_IN_SINCOSF:
4970 case BUILT_IN_SINCOSL:
4972 process_ipa_clobber (fi, gimple_call_arg (t, 1));
4973 process_ipa_clobber (fi, gimple_call_arg (t, 2));
4974 return;
4976 /* The following functions clobber their second argument. */
4977 case BUILT_IN_FREXP:
4978 case BUILT_IN_FREXPF:
4979 case BUILT_IN_FREXPL:
4980 case BUILT_IN_LGAMMA_R:
4981 case BUILT_IN_LGAMMAF_R:
4982 case BUILT_IN_LGAMMAL_R:
4983 case BUILT_IN_GAMMA_R:
4984 case BUILT_IN_GAMMAF_R:
4985 case BUILT_IN_GAMMAL_R:
4986 case BUILT_IN_MODF:
4987 case BUILT_IN_MODFF:
4988 case BUILT_IN_MODFL:
4990 process_ipa_clobber (fi, gimple_call_arg (t, 1));
4991 return;
4993 /* The following functions clobber their third argument. */
4994 case BUILT_IN_REMQUO:
4995 case BUILT_IN_REMQUOF:
4996 case BUILT_IN_REMQUOL:
4998 process_ipa_clobber (fi, gimple_call_arg (t, 2));
4999 return;
5001 /* The following functions neither read nor clobber memory. */
5002 case BUILT_IN_ASSUME_ALIGNED:
5003 case BUILT_IN_FREE:
5004 return;
5005 /* Trampolines are of no interest to us. */
5006 case BUILT_IN_INIT_TRAMPOLINE:
5007 case BUILT_IN_ADJUST_TRAMPOLINE:
5008 return;
5009 case BUILT_IN_VA_START:
5010 case BUILT_IN_VA_END:
5011 return;
5012 /* printf-style functions may have hooks to set pointers to
5013 point to somewhere into the generated string. Leave them
5014 for a later excercise... */
5015 default:
5016 /* Fallthru to general call handling. */;
5019 /* Parameters passed by value are used. */
5020 lhs = get_function_part_constraint (fi, fi_uses);
5021 for (i = 0; i < gimple_call_num_args (t); i++)
5023 struct constraint_expr *rhsp;
5024 tree arg = gimple_call_arg (t, i);
5026 if (TREE_CODE (arg) == SSA_NAME
5027 || is_gimple_min_invariant (arg))
5028 continue;
5030 get_constraint_for_address_of (arg, &rhsc);
5031 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5032 process_constraint (new_constraint (lhs, *rhsp));
5033 rhsc.release ();
5036 /* Build constraints for propagating clobbers/uses along the
5037 callgraph edges. */
5038 cfi = get_fi_for_callee (t);
5039 if (cfi->id == anything_id)
5041 if (gimple_vdef (t))
5042 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5043 anything_id);
5044 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5045 anything_id);
5046 return;
5049 /* For callees without function info (that's external functions),
5050 ESCAPED is clobbered and used. */
5051 if (gimple_call_fndecl (t)
5052 && !cfi->is_fn_info)
5054 varinfo_t vi;
5056 if (gimple_vdef (t))
5057 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5058 escaped_id);
5059 make_copy_constraint (first_vi_for_offset (fi, fi_uses), escaped_id);
5061 /* Also honor the call statement use/clobber info. */
5062 if ((vi = lookup_call_clobber_vi (t)) != NULL)
5063 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5064 vi->id);
5065 if ((vi = lookup_call_use_vi (t)) != NULL)
5066 make_copy_constraint (first_vi_for_offset (fi, fi_uses),
5067 vi->id);
5068 return;
5071 /* Otherwise the caller clobbers and uses what the callee does.
5072 ??? This should use a new complex constraint that filters
5073 local variables of the callee. */
5074 if (gimple_vdef (t))
5076 lhs = get_function_part_constraint (fi, fi_clobbers);
5077 rhs = get_function_part_constraint (cfi, fi_clobbers);
5078 process_constraint (new_constraint (lhs, rhs));
5080 lhs = get_function_part_constraint (fi, fi_uses);
5081 rhs = get_function_part_constraint (cfi, fi_uses);
5082 process_constraint (new_constraint (lhs, rhs));
5084 else if (gimple_code (t) == GIMPLE_ASM)
5086 /* ??? Ick. We can do better. */
5087 if (gimple_vdef (t))
5088 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5089 anything_id);
5090 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5091 anything_id);
5094 rhsc.release ();
5098 /* Find the first varinfo in the same variable as START that overlaps with
5099 OFFSET. Return NULL if we can't find one. */
5101 static varinfo_t
5102 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
5104 /* If the offset is outside of the variable, bail out. */
5105 if (offset >= start->fullsize)
5106 return NULL;
5108 /* If we cannot reach offset from start, lookup the first field
5109 and start from there. */
5110 if (start->offset > offset)
5111 start = get_varinfo (start->head);
5113 while (start)
5115 /* We may not find a variable in the field list with the actual
5116 offset when when we have glommed a structure to a variable.
5117 In that case, however, offset should still be within the size
5118 of the variable. */
5119 if (offset >= start->offset
5120 && (offset - start->offset) < start->size)
5121 return start;
5123 start = vi_next (start);
5126 return NULL;
5129 /* Find the first varinfo in the same variable as START that overlaps with
5130 OFFSET. If there is no such varinfo the varinfo directly preceding
5131 OFFSET is returned. */
5133 static varinfo_t
5134 first_or_preceding_vi_for_offset (varinfo_t start,
5135 unsigned HOST_WIDE_INT offset)
5137 /* If we cannot reach offset from start, lookup the first field
5138 and start from there. */
5139 if (start->offset > offset)
5140 start = get_varinfo (start->head);
5142 /* We may not find a variable in the field list with the actual
5143 offset when when we have glommed a structure to a variable.
5144 In that case, however, offset should still be within the size
5145 of the variable.
5146 If we got beyond the offset we look for return the field
5147 directly preceding offset which may be the last field. */
5148 while (start->next
5149 && offset >= start->offset
5150 && !((offset - start->offset) < start->size))
5151 start = vi_next (start);
5153 return start;
5157 /* This structure is used during pushing fields onto the fieldstack
5158 to track the offset of the field, since bitpos_of_field gives it
5159 relative to its immediate containing type, and we want it relative
5160 to the ultimate containing object. */
5162 struct fieldoff
5164 /* Offset from the base of the base containing object to this field. */
5165 HOST_WIDE_INT offset;
5167 /* Size, in bits, of the field. */
5168 unsigned HOST_WIDE_INT size;
5170 unsigned has_unknown_size : 1;
5172 unsigned must_have_pointers : 1;
5174 unsigned may_have_pointers : 1;
5176 unsigned only_restrict_pointers : 1;
5178 typedef struct fieldoff fieldoff_s;
5181 /* qsort comparison function for two fieldoff's PA and PB */
5183 static int
5184 fieldoff_compare (const void *pa, const void *pb)
5186 const fieldoff_s *foa = (const fieldoff_s *)pa;
5187 const fieldoff_s *fob = (const fieldoff_s *)pb;
5188 unsigned HOST_WIDE_INT foasize, fobsize;
5190 if (foa->offset < fob->offset)
5191 return -1;
5192 else if (foa->offset > fob->offset)
5193 return 1;
5195 foasize = foa->size;
5196 fobsize = fob->size;
5197 if (foasize < fobsize)
5198 return -1;
5199 else if (foasize > fobsize)
5200 return 1;
5201 return 0;
5204 /* Sort a fieldstack according to the field offset and sizes. */
5205 static void
5206 sort_fieldstack (vec<fieldoff_s> fieldstack)
5208 fieldstack.qsort (fieldoff_compare);
5211 /* Return true if T is a type that can have subvars. */
5213 static inline bool
5214 type_can_have_subvars (const_tree t)
5216 /* Aggregates without overlapping fields can have subvars. */
5217 return TREE_CODE (t) == RECORD_TYPE;
5220 /* Return true if V is a tree that we can have subvars for.
5221 Normally, this is any aggregate type. Also complex
5222 types which are not gimple registers can have subvars. */
5224 static inline bool
5225 var_can_have_subvars (const_tree v)
5227 /* Volatile variables should never have subvars. */
5228 if (TREE_THIS_VOLATILE (v))
5229 return false;
5231 /* Non decls or memory tags can never have subvars. */
5232 if (!DECL_P (v))
5233 return false;
5235 return type_can_have_subvars (TREE_TYPE (v));
5238 /* Return true if T is a type that does contain pointers. */
5240 static bool
5241 type_must_have_pointers (tree type)
5243 if (POINTER_TYPE_P (type))
5244 return true;
5246 if (TREE_CODE (type) == ARRAY_TYPE)
5247 return type_must_have_pointers (TREE_TYPE (type));
5249 /* A function or method can have pointers as arguments, so track
5250 those separately. */
5251 if (TREE_CODE (type) == FUNCTION_TYPE
5252 || TREE_CODE (type) == METHOD_TYPE)
5253 return true;
5255 return false;
5258 static bool
5259 field_must_have_pointers (tree t)
5261 return type_must_have_pointers (TREE_TYPE (t));
5264 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
5265 the fields of TYPE onto fieldstack, recording their offsets along
5266 the way.
5268 OFFSET is used to keep track of the offset in this entire
5269 structure, rather than just the immediately containing structure.
5270 Returns false if the caller is supposed to handle the field we
5271 recursed for. */
5273 static bool
5274 push_fields_onto_fieldstack (tree type, vec<fieldoff_s> *fieldstack,
5275 HOST_WIDE_INT offset)
5277 tree field;
5278 bool empty_p = true;
5280 if (TREE_CODE (type) != RECORD_TYPE)
5281 return false;
5283 /* If the vector of fields is growing too big, bail out early.
5284 Callers check for vec::length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
5285 sure this fails. */
5286 if (fieldstack->length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
5287 return false;
5289 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5290 if (TREE_CODE (field) == FIELD_DECL)
5292 bool push = false;
5293 HOST_WIDE_INT foff = bitpos_of_field (field);
5295 if (!var_can_have_subvars (field)
5296 || TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE
5297 || TREE_CODE (TREE_TYPE (field)) == UNION_TYPE)
5298 push = true;
5299 else if (!push_fields_onto_fieldstack
5300 (TREE_TYPE (field), fieldstack, offset + foff)
5301 && (DECL_SIZE (field)
5302 && !integer_zerop (DECL_SIZE (field))))
5303 /* Empty structures may have actual size, like in C++. So
5304 see if we didn't push any subfields and the size is
5305 nonzero, push the field onto the stack. */
5306 push = true;
5308 if (push)
5310 fieldoff_s *pair = NULL;
5311 bool has_unknown_size = false;
5312 bool must_have_pointers_p;
5314 if (!fieldstack->is_empty ())
5315 pair = &fieldstack->last ();
5317 /* If there isn't anything at offset zero, create sth. */
5318 if (!pair
5319 && offset + foff != 0)
5321 fieldoff_s e = {0, offset + foff, false, false, false, false};
5322 pair = fieldstack->safe_push (e);
5325 if (!DECL_SIZE (field)
5326 || !host_integerp (DECL_SIZE (field), 1))
5327 has_unknown_size = true;
5329 /* If adjacent fields do not contain pointers merge them. */
5330 must_have_pointers_p = field_must_have_pointers (field);
5331 if (pair
5332 && !has_unknown_size
5333 && !must_have_pointers_p
5334 && !pair->must_have_pointers
5335 && !pair->has_unknown_size
5336 && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff)
5338 pair->size += TREE_INT_CST_LOW (DECL_SIZE (field));
5340 else
5342 fieldoff_s e;
5343 e.offset = offset + foff;
5344 e.has_unknown_size = has_unknown_size;
5345 if (!has_unknown_size)
5346 e.size = TREE_INT_CST_LOW (DECL_SIZE (field));
5347 else
5348 e.size = -1;
5349 e.must_have_pointers = must_have_pointers_p;
5350 e.may_have_pointers = true;
5351 e.only_restrict_pointers
5352 = (!has_unknown_size
5353 && POINTER_TYPE_P (TREE_TYPE (field))
5354 && TYPE_RESTRICT (TREE_TYPE (field)));
5355 fieldstack->safe_push (e);
5359 empty_p = false;
5362 return !empty_p;
5365 /* Count the number of arguments DECL has, and set IS_VARARGS to true
5366 if it is a varargs function. */
5368 static unsigned int
5369 count_num_arguments (tree decl, bool *is_varargs)
5371 unsigned int num = 0;
5372 tree t;
5374 /* Capture named arguments for K&R functions. They do not
5375 have a prototype and thus no TYPE_ARG_TYPES. */
5376 for (t = DECL_ARGUMENTS (decl); t; t = DECL_CHAIN (t))
5377 ++num;
5379 /* Check if the function has variadic arguments. */
5380 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl)); t; t = TREE_CHAIN (t))
5381 if (TREE_VALUE (t) == void_type_node)
5382 break;
5383 if (!t)
5384 *is_varargs = true;
5386 return num;
5389 /* Creation function node for DECL, using NAME, and return the index
5390 of the variable we've created for the function. */
5392 static varinfo_t
5393 create_function_info_for (tree decl, const char *name)
5395 struct function *fn = DECL_STRUCT_FUNCTION (decl);
5396 varinfo_t vi, prev_vi;
5397 tree arg;
5398 unsigned int i;
5399 bool is_varargs = false;
5400 unsigned int num_args = count_num_arguments (decl, &is_varargs);
5402 /* Create the variable info. */
5404 vi = new_var_info (decl, name);
5405 vi->offset = 0;
5406 vi->size = 1;
5407 vi->fullsize = fi_parm_base + num_args;
5408 vi->is_fn_info = 1;
5409 vi->may_have_pointers = false;
5410 if (is_varargs)
5411 vi->fullsize = ~0;
5412 insert_vi_for_tree (vi->decl, vi);
5414 prev_vi = vi;
5416 /* Create a variable for things the function clobbers and one for
5417 things the function uses. */
5419 varinfo_t clobbervi, usevi;
5420 const char *newname;
5421 char *tempname;
5423 asprintf (&tempname, "%s.clobber", name);
5424 newname = ggc_strdup (tempname);
5425 free (tempname);
5427 clobbervi = new_var_info (NULL, newname);
5428 clobbervi->offset = fi_clobbers;
5429 clobbervi->size = 1;
5430 clobbervi->fullsize = vi->fullsize;
5431 clobbervi->is_full_var = true;
5432 clobbervi->is_global_var = false;
5433 gcc_assert (prev_vi->offset < clobbervi->offset);
5434 prev_vi->next = clobbervi->id;
5435 prev_vi = clobbervi;
5437 asprintf (&tempname, "%s.use", name);
5438 newname = ggc_strdup (tempname);
5439 free (tempname);
5441 usevi = new_var_info (NULL, newname);
5442 usevi->offset = fi_uses;
5443 usevi->size = 1;
5444 usevi->fullsize = vi->fullsize;
5445 usevi->is_full_var = true;
5446 usevi->is_global_var = false;
5447 gcc_assert (prev_vi->offset < usevi->offset);
5448 prev_vi->next = usevi->id;
5449 prev_vi = usevi;
5452 /* And one for the static chain. */
5453 if (fn->static_chain_decl != NULL_TREE)
5455 varinfo_t chainvi;
5456 const char *newname;
5457 char *tempname;
5459 asprintf (&tempname, "%s.chain", name);
5460 newname = ggc_strdup (tempname);
5461 free (tempname);
5463 chainvi = new_var_info (fn->static_chain_decl, newname);
5464 chainvi->offset = fi_static_chain;
5465 chainvi->size = 1;
5466 chainvi->fullsize = vi->fullsize;
5467 chainvi->is_full_var = true;
5468 chainvi->is_global_var = false;
5469 gcc_assert (prev_vi->offset < chainvi->offset);
5470 prev_vi->next = chainvi->id;
5471 prev_vi = chainvi;
5472 insert_vi_for_tree (fn->static_chain_decl, chainvi);
5475 /* Create a variable for the return var. */
5476 if (DECL_RESULT (decl) != NULL
5477 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
5479 varinfo_t resultvi;
5480 const char *newname;
5481 char *tempname;
5482 tree resultdecl = decl;
5484 if (DECL_RESULT (decl))
5485 resultdecl = DECL_RESULT (decl);
5487 asprintf (&tempname, "%s.result", name);
5488 newname = ggc_strdup (tempname);
5489 free (tempname);
5491 resultvi = new_var_info (resultdecl, newname);
5492 resultvi->offset = fi_result;
5493 resultvi->size = 1;
5494 resultvi->fullsize = vi->fullsize;
5495 resultvi->is_full_var = true;
5496 if (DECL_RESULT (decl))
5497 resultvi->may_have_pointers = true;
5498 gcc_assert (prev_vi->offset < resultvi->offset);
5499 prev_vi->next = resultvi->id;
5500 prev_vi = resultvi;
5501 if (DECL_RESULT (decl))
5502 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
5505 /* Set up variables for each argument. */
5506 arg = DECL_ARGUMENTS (decl);
5507 for (i = 0; i < num_args; i++)
5509 varinfo_t argvi;
5510 const char *newname;
5511 char *tempname;
5512 tree argdecl = decl;
5514 if (arg)
5515 argdecl = arg;
5517 asprintf (&tempname, "%s.arg%d", name, i);
5518 newname = ggc_strdup (tempname);
5519 free (tempname);
5521 argvi = new_var_info (argdecl, newname);
5522 argvi->offset = fi_parm_base + i;
5523 argvi->size = 1;
5524 argvi->is_full_var = true;
5525 argvi->fullsize = vi->fullsize;
5526 if (arg)
5527 argvi->may_have_pointers = true;
5528 gcc_assert (prev_vi->offset < argvi->offset);
5529 prev_vi->next = argvi->id;
5530 prev_vi = argvi;
5531 if (arg)
5533 insert_vi_for_tree (arg, argvi);
5534 arg = DECL_CHAIN (arg);
5538 /* Add one representative for all further args. */
5539 if (is_varargs)
5541 varinfo_t argvi;
5542 const char *newname;
5543 char *tempname;
5544 tree decl;
5546 asprintf (&tempname, "%s.varargs", name);
5547 newname = ggc_strdup (tempname);
5548 free (tempname);
5550 /* We need sth that can be pointed to for va_start. */
5551 decl = build_fake_var_decl (ptr_type_node);
5553 argvi = new_var_info (decl, newname);
5554 argvi->offset = fi_parm_base + num_args;
5555 argvi->size = ~0;
5556 argvi->is_full_var = true;
5557 argvi->is_heap_var = true;
5558 argvi->fullsize = vi->fullsize;
5559 gcc_assert (prev_vi->offset < argvi->offset);
5560 prev_vi->next = argvi->id;
5561 prev_vi = argvi;
5564 return vi;
5568 /* Return true if FIELDSTACK contains fields that overlap.
5569 FIELDSTACK is assumed to be sorted by offset. */
5571 static bool
5572 check_for_overlaps (vec<fieldoff_s> fieldstack)
5574 fieldoff_s *fo = NULL;
5575 unsigned int i;
5576 HOST_WIDE_INT lastoffset = -1;
5578 FOR_EACH_VEC_ELT (fieldstack, i, fo)
5580 if (fo->offset == lastoffset)
5581 return true;
5582 lastoffset = fo->offset;
5584 return false;
5587 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
5588 This will also create any varinfo structures necessary for fields
5589 of DECL. */
5591 static varinfo_t
5592 create_variable_info_for_1 (tree decl, const char *name)
5594 varinfo_t vi, newvi;
5595 tree decl_type = TREE_TYPE (decl);
5596 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type);
5597 vec<fieldoff_s> fieldstack = vNULL;
5598 fieldoff_s *fo;
5599 unsigned int i;
5601 if (!declsize
5602 || !host_integerp (declsize, 1))
5604 vi = new_var_info (decl, name);
5605 vi->offset = 0;
5606 vi->size = ~0;
5607 vi->fullsize = ~0;
5608 vi->is_unknown_size_var = true;
5609 vi->is_full_var = true;
5610 vi->may_have_pointers = true;
5611 return vi;
5614 /* Collect field information. */
5615 if (use_field_sensitive
5616 && var_can_have_subvars (decl)
5617 /* ??? Force us to not use subfields for global initializers
5618 in IPA mode. Else we'd have to parse arbitrary initializers. */
5619 && !(in_ipa_mode
5620 && is_global_var (decl)
5621 && DECL_INITIAL (decl)))
5623 fieldoff_s *fo = NULL;
5624 bool notokay = false;
5625 unsigned int i;
5627 push_fields_onto_fieldstack (decl_type, &fieldstack, 0);
5629 for (i = 0; !notokay && fieldstack.iterate (i, &fo); i++)
5630 if (fo->has_unknown_size
5631 || fo->offset < 0)
5633 notokay = true;
5634 break;
5637 /* We can't sort them if we have a field with a variable sized type,
5638 which will make notokay = true. In that case, we are going to return
5639 without creating varinfos for the fields anyway, so sorting them is a
5640 waste to boot. */
5641 if (!notokay)
5643 sort_fieldstack (fieldstack);
5644 /* Due to some C++ FE issues, like PR 22488, we might end up
5645 what appear to be overlapping fields even though they,
5646 in reality, do not overlap. Until the C++ FE is fixed,
5647 we will simply disable field-sensitivity for these cases. */
5648 notokay = check_for_overlaps (fieldstack);
5651 if (notokay)
5652 fieldstack.release ();
5655 /* If we didn't end up collecting sub-variables create a full
5656 variable for the decl. */
5657 if (fieldstack.length () <= 1
5658 || fieldstack.length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
5660 vi = new_var_info (decl, name);
5661 vi->offset = 0;
5662 vi->may_have_pointers = true;
5663 vi->fullsize = TREE_INT_CST_LOW (declsize);
5664 vi->size = vi->fullsize;
5665 vi->is_full_var = true;
5666 fieldstack.release ();
5667 return vi;
5670 vi = new_var_info (decl, name);
5671 vi->fullsize = TREE_INT_CST_LOW (declsize);
5672 for (i = 0, newvi = vi;
5673 fieldstack.iterate (i, &fo);
5674 ++i, newvi = vi_next (newvi))
5676 const char *newname = "NULL";
5677 char *tempname;
5679 if (dump_file)
5681 asprintf (&tempname, "%s." HOST_WIDE_INT_PRINT_DEC
5682 "+" HOST_WIDE_INT_PRINT_DEC, name, fo->offset, fo->size);
5683 newname = ggc_strdup (tempname);
5684 free (tempname);
5686 newvi->name = newname;
5687 newvi->offset = fo->offset;
5688 newvi->size = fo->size;
5689 newvi->fullsize = vi->fullsize;
5690 newvi->may_have_pointers = fo->may_have_pointers;
5691 newvi->only_restrict_pointers = fo->only_restrict_pointers;
5692 if (i + 1 < fieldstack.length ())
5694 varinfo_t tem = new_var_info (decl, name);
5695 newvi->next = tem->id;
5696 tem->head = vi->id;
5700 fieldstack.release ();
5702 return vi;
5705 static unsigned int
5706 create_variable_info_for (tree decl, const char *name)
5708 varinfo_t vi = create_variable_info_for_1 (decl, name);
5709 unsigned int id = vi->id;
5711 insert_vi_for_tree (decl, vi);
5713 if (TREE_CODE (decl) != VAR_DECL)
5714 return id;
5716 /* Create initial constraints for globals. */
5717 for (; vi; vi = vi_next (vi))
5719 if (!vi->may_have_pointers
5720 || !vi->is_global_var)
5721 continue;
5723 /* Mark global restrict qualified pointers. */
5724 if ((POINTER_TYPE_P (TREE_TYPE (decl))
5725 && TYPE_RESTRICT (TREE_TYPE (decl)))
5726 || vi->only_restrict_pointers)
5728 make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT");
5729 continue;
5732 /* In non-IPA mode the initializer from nonlocal is all we need. */
5733 if (!in_ipa_mode
5734 || DECL_HARD_REGISTER (decl))
5735 make_copy_constraint (vi, nonlocal_id);
5737 /* In IPA mode parse the initializer and generate proper constraints
5738 for it. */
5739 else
5741 struct varpool_node *vnode = varpool_get_node (decl);
5743 /* For escaped variables initialize them from nonlocal. */
5744 if (!varpool_all_refs_explicit_p (vnode))
5745 make_copy_constraint (vi, nonlocal_id);
5747 /* If this is a global variable with an initializer and we are in
5748 IPA mode generate constraints for it. */
5749 if (DECL_INITIAL (decl)
5750 && vnode->analyzed)
5752 vec<ce_s> rhsc = vNULL;
5753 struct constraint_expr lhs, *rhsp;
5754 unsigned i;
5755 get_constraint_for_rhs (DECL_INITIAL (decl), &rhsc);
5756 lhs.var = vi->id;
5757 lhs.offset = 0;
5758 lhs.type = SCALAR;
5759 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5760 process_constraint (new_constraint (lhs, *rhsp));
5761 /* If this is a variable that escapes from the unit
5762 the initializer escapes as well. */
5763 if (!varpool_all_refs_explicit_p (vnode))
5765 lhs.var = escaped_id;
5766 lhs.offset = 0;
5767 lhs.type = SCALAR;
5768 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5769 process_constraint (new_constraint (lhs, *rhsp));
5771 rhsc.release ();
5776 return id;
5779 /* Print out the points-to solution for VAR to FILE. */
5781 static void
5782 dump_solution_for_var (FILE *file, unsigned int var)
5784 varinfo_t vi = get_varinfo (var);
5785 unsigned int i;
5786 bitmap_iterator bi;
5788 /* Dump the solution for unified vars anyway, this avoids difficulties
5789 in scanning dumps in the testsuite. */
5790 fprintf (file, "%s = { ", vi->name);
5791 vi = get_varinfo (find (var));
5792 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
5793 fprintf (file, "%s ", get_varinfo (i)->name);
5794 fprintf (file, "}");
5796 /* But note when the variable was unified. */
5797 if (vi->id != var)
5798 fprintf (file, " same as %s", vi->name);
5800 fprintf (file, "\n");
5803 /* Print the points-to solution for VAR to stdout. */
5805 DEBUG_FUNCTION void
5806 debug_solution_for_var (unsigned int var)
5808 dump_solution_for_var (stdout, var);
5811 /* Create varinfo structures for all of the variables in the
5812 function for intraprocedural mode. */
5814 static void
5815 intra_create_variable_infos (void)
5817 tree t;
5819 /* For each incoming pointer argument arg, create the constraint ARG
5820 = NONLOCAL or a dummy variable if it is a restrict qualified
5821 passed-by-reference argument. */
5822 for (t = DECL_ARGUMENTS (current_function_decl); t; t = DECL_CHAIN (t))
5824 varinfo_t p = get_vi_for_tree (t);
5826 /* For restrict qualified pointers to objects passed by
5827 reference build a real representative for the pointed-to object.
5828 Treat restrict qualified references the same. */
5829 if (TYPE_RESTRICT (TREE_TYPE (t))
5830 && ((DECL_BY_REFERENCE (t) && POINTER_TYPE_P (TREE_TYPE (t)))
5831 || TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE)
5832 && !type_contains_placeholder_p (TREE_TYPE (TREE_TYPE (t))))
5834 struct constraint_expr lhsc, rhsc;
5835 varinfo_t vi;
5836 tree heapvar = build_fake_var_decl (TREE_TYPE (TREE_TYPE (t)));
5837 DECL_EXTERNAL (heapvar) = 1;
5838 vi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS");
5839 insert_vi_for_tree (heapvar, vi);
5840 lhsc.var = p->id;
5841 lhsc.type = SCALAR;
5842 lhsc.offset = 0;
5843 rhsc.var = vi->id;
5844 rhsc.type = ADDRESSOF;
5845 rhsc.offset = 0;
5846 process_constraint (new_constraint (lhsc, rhsc));
5847 for (; vi; vi = vi_next (vi))
5848 if (vi->may_have_pointers)
5850 if (vi->only_restrict_pointers)
5851 make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT");
5852 else
5853 make_copy_constraint (vi, nonlocal_id);
5855 continue;
5858 if (POINTER_TYPE_P (TREE_TYPE (t))
5859 && TYPE_RESTRICT (TREE_TYPE (t)))
5860 make_constraint_from_global_restrict (p, "PARM_RESTRICT");
5861 else
5863 for (; p; p = vi_next (p))
5865 if (p->only_restrict_pointers)
5866 make_constraint_from_global_restrict (p, "PARM_RESTRICT");
5867 else if (p->may_have_pointers)
5868 make_constraint_from (p, nonlocal_id);
5873 /* Add a constraint for a result decl that is passed by reference. */
5874 if (DECL_RESULT (cfun->decl)
5875 && DECL_BY_REFERENCE (DECL_RESULT (cfun->decl)))
5877 varinfo_t p, result_vi = get_vi_for_tree (DECL_RESULT (cfun->decl));
5879 for (p = result_vi; p; p = vi_next (p))
5880 make_constraint_from (p, nonlocal_id);
5883 /* Add a constraint for the incoming static chain parameter. */
5884 if (cfun->static_chain_decl != NULL_TREE)
5886 varinfo_t p, chain_vi = get_vi_for_tree (cfun->static_chain_decl);
5888 for (p = chain_vi; p; p = vi_next (p))
5889 make_constraint_from (p, nonlocal_id);
5893 /* Structure used to put solution bitmaps in a hashtable so they can
5894 be shared among variables with the same points-to set. */
5896 typedef struct shared_bitmap_info
5898 bitmap pt_vars;
5899 hashval_t hashcode;
5900 } *shared_bitmap_info_t;
5901 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
5903 static htab_t shared_bitmap_table;
5905 /* Hash function for a shared_bitmap_info_t */
5907 static hashval_t
5908 shared_bitmap_hash (const void *p)
5910 const_shared_bitmap_info_t const bi = (const_shared_bitmap_info_t) p;
5911 return bi->hashcode;
5914 /* Equality function for two shared_bitmap_info_t's. */
5916 static int
5917 shared_bitmap_eq (const void *p1, const void *p2)
5919 const_shared_bitmap_info_t const sbi1 = (const_shared_bitmap_info_t) p1;
5920 const_shared_bitmap_info_t const sbi2 = (const_shared_bitmap_info_t) p2;
5921 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
5924 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
5925 existing instance if there is one, NULL otherwise. */
5927 static bitmap
5928 shared_bitmap_lookup (bitmap pt_vars)
5930 void **slot;
5931 struct shared_bitmap_info sbi;
5933 sbi.pt_vars = pt_vars;
5934 sbi.hashcode = bitmap_hash (pt_vars);
5936 slot = htab_find_slot_with_hash (shared_bitmap_table, &sbi,
5937 sbi.hashcode, NO_INSERT);
5938 if (!slot)
5939 return NULL;
5940 else
5941 return ((shared_bitmap_info_t) *slot)->pt_vars;
5945 /* Add a bitmap to the shared bitmap hashtable. */
5947 static void
5948 shared_bitmap_add (bitmap pt_vars)
5950 void **slot;
5951 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
5953 sbi->pt_vars = pt_vars;
5954 sbi->hashcode = bitmap_hash (pt_vars);
5956 slot = htab_find_slot_with_hash (shared_bitmap_table, sbi,
5957 sbi->hashcode, INSERT);
5958 gcc_assert (!*slot);
5959 *slot = (void *) sbi;
5963 /* Set bits in INTO corresponding to the variable uids in solution set FROM. */
5965 static void
5966 set_uids_in_ptset (bitmap into, bitmap from, struct pt_solution *pt)
5968 unsigned int i;
5969 bitmap_iterator bi;
5971 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
5973 varinfo_t vi = get_varinfo (i);
5975 /* The only artificial variables that are allowed in a may-alias
5976 set are heap variables. */
5977 if (vi->is_artificial_var && !vi->is_heap_var)
5978 continue;
5980 if (TREE_CODE (vi->decl) == VAR_DECL
5981 || TREE_CODE (vi->decl) == PARM_DECL
5982 || TREE_CODE (vi->decl) == RESULT_DECL)
5984 /* If we are in IPA mode we will not recompute points-to
5985 sets after inlining so make sure they stay valid. */
5986 if (in_ipa_mode
5987 && !DECL_PT_UID_SET_P (vi->decl))
5988 SET_DECL_PT_UID (vi->decl, DECL_UID (vi->decl));
5990 /* Add the decl to the points-to set. Note that the points-to
5991 set contains global variables. */
5992 bitmap_set_bit (into, DECL_PT_UID (vi->decl));
5993 if (vi->is_global_var)
5994 pt->vars_contains_global = true;
6000 /* Compute the points-to solution *PT for the variable VI. */
6002 static struct pt_solution
6003 find_what_var_points_to (varinfo_t orig_vi)
6005 unsigned int i;
6006 bitmap_iterator bi;
6007 bitmap finished_solution;
6008 bitmap result;
6009 varinfo_t vi;
6010 void **slot;
6011 struct pt_solution *pt;
6013 /* This variable may have been collapsed, let's get the real
6014 variable. */
6015 vi = get_varinfo (find (orig_vi->id));
6017 /* See if we have already computed the solution and return it. */
6018 slot = pointer_map_insert (final_solutions, vi);
6019 if (*slot != NULL)
6020 return *(struct pt_solution *)*slot;
6022 *slot = pt = XOBNEW (&final_solutions_obstack, struct pt_solution);
6023 memset (pt, 0, sizeof (struct pt_solution));
6025 /* Translate artificial variables into SSA_NAME_PTR_INFO
6026 attributes. */
6027 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
6029 varinfo_t vi = get_varinfo (i);
6031 if (vi->is_artificial_var)
6033 if (vi->id == nothing_id)
6034 pt->null = 1;
6035 else if (vi->id == escaped_id)
6037 if (in_ipa_mode)
6038 pt->ipa_escaped = 1;
6039 else
6040 pt->escaped = 1;
6042 else if (vi->id == nonlocal_id)
6043 pt->nonlocal = 1;
6044 else if (vi->is_heap_var)
6045 /* We represent heapvars in the points-to set properly. */
6047 else if (vi->id == readonly_id)
6048 /* Nobody cares. */
6050 else if (vi->id == anything_id
6051 || vi->id == integer_id)
6052 pt->anything = 1;
6056 /* Instead of doing extra work, simply do not create
6057 elaborate points-to information for pt_anything pointers. */
6058 if (pt->anything)
6059 return *pt;
6061 /* Share the final set of variables when possible. */
6062 finished_solution = BITMAP_GGC_ALLOC ();
6063 stats.points_to_sets_created++;
6065 set_uids_in_ptset (finished_solution, vi->solution, pt);
6066 result = shared_bitmap_lookup (finished_solution);
6067 if (!result)
6069 shared_bitmap_add (finished_solution);
6070 pt->vars = finished_solution;
6072 else
6074 pt->vars = result;
6075 bitmap_clear (finished_solution);
6078 return *pt;
6081 /* Given a pointer variable P, fill in its points-to set. */
6083 static void
6084 find_what_p_points_to (tree p)
6086 struct ptr_info_def *pi;
6087 tree lookup_p = p;
6088 varinfo_t vi;
6090 /* For parameters, get at the points-to set for the actual parm
6091 decl. */
6092 if (TREE_CODE (p) == SSA_NAME
6093 && SSA_NAME_IS_DEFAULT_DEF (p)
6094 && (TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
6095 || TREE_CODE (SSA_NAME_VAR (p)) == RESULT_DECL))
6096 lookup_p = SSA_NAME_VAR (p);
6098 vi = lookup_vi_for_tree (lookup_p);
6099 if (!vi)
6100 return;
6102 pi = get_ptr_info (p);
6103 pi->pt = find_what_var_points_to (vi);
6107 /* Query statistics for points-to solutions. */
6109 static struct {
6110 unsigned HOST_WIDE_INT pt_solution_includes_may_alias;
6111 unsigned HOST_WIDE_INT pt_solution_includes_no_alias;
6112 unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias;
6113 unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias;
6114 } pta_stats;
6116 void
6117 dump_pta_stats (FILE *s)
6119 fprintf (s, "\nPTA query stats:\n");
6120 fprintf (s, " pt_solution_includes: "
6121 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6122 HOST_WIDE_INT_PRINT_DEC" queries\n",
6123 pta_stats.pt_solution_includes_no_alias,
6124 pta_stats.pt_solution_includes_no_alias
6125 + pta_stats.pt_solution_includes_may_alias);
6126 fprintf (s, " pt_solutions_intersect: "
6127 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6128 HOST_WIDE_INT_PRINT_DEC" queries\n",
6129 pta_stats.pt_solutions_intersect_no_alias,
6130 pta_stats.pt_solutions_intersect_no_alias
6131 + pta_stats.pt_solutions_intersect_may_alias);
6135 /* Reset the points-to solution *PT to a conservative default
6136 (point to anything). */
6138 void
6139 pt_solution_reset (struct pt_solution *pt)
6141 memset (pt, 0, sizeof (struct pt_solution));
6142 pt->anything = true;
6145 /* Set the points-to solution *PT to point only to the variables
6146 in VARS. VARS_CONTAINS_GLOBAL specifies whether that contains
6147 global variables and VARS_CONTAINS_RESTRICT specifies whether
6148 it contains restrict tag variables. */
6150 void
6151 pt_solution_set (struct pt_solution *pt, bitmap vars, bool vars_contains_global)
6153 memset (pt, 0, sizeof (struct pt_solution));
6154 pt->vars = vars;
6155 pt->vars_contains_global = vars_contains_global;
6158 /* Set the points-to solution *PT to point only to the variable VAR. */
6160 void
6161 pt_solution_set_var (struct pt_solution *pt, tree var)
6163 memset (pt, 0, sizeof (struct pt_solution));
6164 pt->vars = BITMAP_GGC_ALLOC ();
6165 bitmap_set_bit (pt->vars, DECL_PT_UID (var));
6166 pt->vars_contains_global = is_global_var (var);
6169 /* Computes the union of the points-to solutions *DEST and *SRC and
6170 stores the result in *DEST. This changes the points-to bitmap
6171 of *DEST and thus may not be used if that might be shared.
6172 The points-to bitmap of *SRC and *DEST will not be shared after
6173 this function if they were not before. */
6175 static void
6176 pt_solution_ior_into (struct pt_solution *dest, struct pt_solution *src)
6178 dest->anything |= src->anything;
6179 if (dest->anything)
6181 pt_solution_reset (dest);
6182 return;
6185 dest->nonlocal |= src->nonlocal;
6186 dest->escaped |= src->escaped;
6187 dest->ipa_escaped |= src->ipa_escaped;
6188 dest->null |= src->null;
6189 dest->vars_contains_global |= src->vars_contains_global;
6190 if (!src->vars)
6191 return;
6193 if (!dest->vars)
6194 dest->vars = BITMAP_GGC_ALLOC ();
6195 bitmap_ior_into (dest->vars, src->vars);
6198 /* Return true if the points-to solution *PT is empty. */
6200 bool
6201 pt_solution_empty_p (struct pt_solution *pt)
6203 if (pt->anything
6204 || pt->nonlocal)
6205 return false;
6207 if (pt->vars
6208 && !bitmap_empty_p (pt->vars))
6209 return false;
6211 /* If the solution includes ESCAPED, check if that is empty. */
6212 if (pt->escaped
6213 && !pt_solution_empty_p (&cfun->gimple_df->escaped))
6214 return false;
6216 /* If the solution includes ESCAPED, check if that is empty. */
6217 if (pt->ipa_escaped
6218 && !pt_solution_empty_p (&ipa_escaped_pt))
6219 return false;
6221 return true;
6224 /* Return true if the points-to solution *PT only point to a single var, and
6225 return the var uid in *UID. */
6227 bool
6228 pt_solution_singleton_p (struct pt_solution *pt, unsigned *uid)
6230 if (pt->anything || pt->nonlocal || pt->escaped || pt->ipa_escaped
6231 || pt->null || pt->vars == NULL
6232 || !bitmap_single_bit_set_p (pt->vars))
6233 return false;
6235 *uid = bitmap_first_set_bit (pt->vars);
6236 return true;
6239 /* Return true if the points-to solution *PT includes global memory. */
6241 bool
6242 pt_solution_includes_global (struct pt_solution *pt)
6244 if (pt->anything
6245 || pt->nonlocal
6246 || pt->vars_contains_global)
6247 return true;
6249 if (pt->escaped)
6250 return pt_solution_includes_global (&cfun->gimple_df->escaped);
6252 if (pt->ipa_escaped)
6253 return pt_solution_includes_global (&ipa_escaped_pt);
6255 /* ??? This predicate is not correct for the IPA-PTA solution
6256 as we do not properly distinguish between unit escape points
6257 and global variables. */
6258 if (cfun->gimple_df->ipa_pta)
6259 return true;
6261 return false;
6264 /* Return true if the points-to solution *PT includes the variable
6265 declaration DECL. */
6267 static bool
6268 pt_solution_includes_1 (struct pt_solution *pt, const_tree decl)
6270 if (pt->anything)
6271 return true;
6273 if (pt->nonlocal
6274 && is_global_var (decl))
6275 return true;
6277 if (pt->vars
6278 && bitmap_bit_p (pt->vars, DECL_PT_UID (decl)))
6279 return true;
6281 /* If the solution includes ESCAPED, check it. */
6282 if (pt->escaped
6283 && pt_solution_includes_1 (&cfun->gimple_df->escaped, decl))
6284 return true;
6286 /* If the solution includes ESCAPED, check it. */
6287 if (pt->ipa_escaped
6288 && pt_solution_includes_1 (&ipa_escaped_pt, decl))
6289 return true;
6291 return false;
6294 bool
6295 pt_solution_includes (struct pt_solution *pt, const_tree decl)
6297 bool res = pt_solution_includes_1 (pt, decl);
6298 if (res)
6299 ++pta_stats.pt_solution_includes_may_alias;
6300 else
6301 ++pta_stats.pt_solution_includes_no_alias;
6302 return res;
6305 /* Return true if both points-to solutions PT1 and PT2 have a non-empty
6306 intersection. */
6308 static bool
6309 pt_solutions_intersect_1 (struct pt_solution *pt1, struct pt_solution *pt2)
6311 if (pt1->anything || pt2->anything)
6312 return true;
6314 /* If either points to unknown global memory and the other points to
6315 any global memory they alias. */
6316 if ((pt1->nonlocal
6317 && (pt2->nonlocal
6318 || pt2->vars_contains_global))
6319 || (pt2->nonlocal
6320 && pt1->vars_contains_global))
6321 return true;
6323 /* Check the escaped solution if required. */
6324 if ((pt1->escaped || pt2->escaped)
6325 && !pt_solution_empty_p (&cfun->gimple_df->escaped))
6327 /* If both point to escaped memory and that solution
6328 is not empty they alias. */
6329 if (pt1->escaped && pt2->escaped)
6330 return true;
6332 /* If either points to escaped memory see if the escaped solution
6333 intersects with the other. */
6334 if ((pt1->escaped
6335 && pt_solutions_intersect_1 (&cfun->gimple_df->escaped, pt2))
6336 || (pt2->escaped
6337 && pt_solutions_intersect_1 (&cfun->gimple_df->escaped, pt1)))
6338 return true;
6341 /* Check the escaped solution if required.
6342 ??? Do we need to check the local against the IPA escaped sets? */
6343 if ((pt1->ipa_escaped || pt2->ipa_escaped)
6344 && !pt_solution_empty_p (&ipa_escaped_pt))
6346 /* If both point to escaped memory and that solution
6347 is not empty they alias. */
6348 if (pt1->ipa_escaped && pt2->ipa_escaped)
6349 return true;
6351 /* If either points to escaped memory see if the escaped solution
6352 intersects with the other. */
6353 if ((pt1->ipa_escaped
6354 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt2))
6355 || (pt2->ipa_escaped
6356 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt1)))
6357 return true;
6360 /* Now both pointers alias if their points-to solution intersects. */
6361 return (pt1->vars
6362 && pt2->vars
6363 && bitmap_intersect_p (pt1->vars, pt2->vars));
6366 bool
6367 pt_solutions_intersect (struct pt_solution *pt1, struct pt_solution *pt2)
6369 bool res = pt_solutions_intersect_1 (pt1, pt2);
6370 if (res)
6371 ++pta_stats.pt_solutions_intersect_may_alias;
6372 else
6373 ++pta_stats.pt_solutions_intersect_no_alias;
6374 return res;
6378 /* Dump points-to information to OUTFILE. */
6380 static void
6381 dump_sa_points_to_info (FILE *outfile)
6383 unsigned int i;
6385 fprintf (outfile, "\nPoints-to sets\n\n");
6387 if (dump_flags & TDF_STATS)
6389 fprintf (outfile, "Stats:\n");
6390 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
6391 fprintf (outfile, "Non-pointer vars: %d\n",
6392 stats.nonpointer_vars);
6393 fprintf (outfile, "Statically unified vars: %d\n",
6394 stats.unified_vars_static);
6395 fprintf (outfile, "Dynamically unified vars: %d\n",
6396 stats.unified_vars_dynamic);
6397 fprintf (outfile, "Iterations: %d\n", stats.iterations);
6398 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
6399 fprintf (outfile, "Number of implicit edges: %d\n",
6400 stats.num_implicit_edges);
6403 for (i = 1; i < varmap.length (); i++)
6405 varinfo_t vi = get_varinfo (i);
6406 if (!vi->may_have_pointers)
6407 continue;
6408 dump_solution_for_var (outfile, i);
6413 /* Debug points-to information to stderr. */
6415 DEBUG_FUNCTION void
6416 debug_sa_points_to_info (void)
6418 dump_sa_points_to_info (stderr);
6422 /* Initialize the always-existing constraint variables for NULL
6423 ANYTHING, READONLY, and INTEGER */
6425 static void
6426 init_base_vars (void)
6428 struct constraint_expr lhs, rhs;
6429 varinfo_t var_anything;
6430 varinfo_t var_nothing;
6431 varinfo_t var_readonly;
6432 varinfo_t var_escaped;
6433 varinfo_t var_nonlocal;
6434 varinfo_t var_storedanything;
6435 varinfo_t var_integer;
6437 /* Variable ID zero is reserved and should be NULL. */
6438 varmap.safe_push (NULL);
6440 /* Create the NULL variable, used to represent that a variable points
6441 to NULL. */
6442 var_nothing = new_var_info (NULL_TREE, "NULL");
6443 gcc_assert (var_nothing->id == nothing_id);
6444 var_nothing->is_artificial_var = 1;
6445 var_nothing->offset = 0;
6446 var_nothing->size = ~0;
6447 var_nothing->fullsize = ~0;
6448 var_nothing->is_special_var = 1;
6449 var_nothing->may_have_pointers = 0;
6450 var_nothing->is_global_var = 0;
6452 /* Create the ANYTHING variable, used to represent that a variable
6453 points to some unknown piece of memory. */
6454 var_anything = new_var_info (NULL_TREE, "ANYTHING");
6455 gcc_assert (var_anything->id == anything_id);
6456 var_anything->is_artificial_var = 1;
6457 var_anything->size = ~0;
6458 var_anything->offset = 0;
6459 var_anything->fullsize = ~0;
6460 var_anything->is_special_var = 1;
6462 /* Anything points to anything. This makes deref constraints just
6463 work in the presence of linked list and other p = *p type loops,
6464 by saying that *ANYTHING = ANYTHING. */
6465 lhs.type = SCALAR;
6466 lhs.var = anything_id;
6467 lhs.offset = 0;
6468 rhs.type = ADDRESSOF;
6469 rhs.var = anything_id;
6470 rhs.offset = 0;
6472 /* This specifically does not use process_constraint because
6473 process_constraint ignores all anything = anything constraints, since all
6474 but this one are redundant. */
6475 constraints.safe_push (new_constraint (lhs, rhs));
6477 /* Create the READONLY variable, used to represent that a variable
6478 points to readonly memory. */
6479 var_readonly = new_var_info (NULL_TREE, "READONLY");
6480 gcc_assert (var_readonly->id == readonly_id);
6481 var_readonly->is_artificial_var = 1;
6482 var_readonly->offset = 0;
6483 var_readonly->size = ~0;
6484 var_readonly->fullsize = ~0;
6485 var_readonly->is_special_var = 1;
6487 /* readonly memory points to anything, in order to make deref
6488 easier. In reality, it points to anything the particular
6489 readonly variable can point to, but we don't track this
6490 separately. */
6491 lhs.type = SCALAR;
6492 lhs.var = readonly_id;
6493 lhs.offset = 0;
6494 rhs.type = ADDRESSOF;
6495 rhs.var = readonly_id; /* FIXME */
6496 rhs.offset = 0;
6497 process_constraint (new_constraint (lhs, rhs));
6499 /* Create the ESCAPED variable, used to represent the set of escaped
6500 memory. */
6501 var_escaped = new_var_info (NULL_TREE, "ESCAPED");
6502 gcc_assert (var_escaped->id == escaped_id);
6503 var_escaped->is_artificial_var = 1;
6504 var_escaped->offset = 0;
6505 var_escaped->size = ~0;
6506 var_escaped->fullsize = ~0;
6507 var_escaped->is_special_var = 0;
6509 /* Create the NONLOCAL variable, used to represent the set of nonlocal
6510 memory. */
6511 var_nonlocal = new_var_info (NULL_TREE, "NONLOCAL");
6512 gcc_assert (var_nonlocal->id == nonlocal_id);
6513 var_nonlocal->is_artificial_var = 1;
6514 var_nonlocal->offset = 0;
6515 var_nonlocal->size = ~0;
6516 var_nonlocal->fullsize = ~0;
6517 var_nonlocal->is_special_var = 1;
6519 /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
6520 lhs.type = SCALAR;
6521 lhs.var = escaped_id;
6522 lhs.offset = 0;
6523 rhs.type = DEREF;
6524 rhs.var = escaped_id;
6525 rhs.offset = 0;
6526 process_constraint (new_constraint (lhs, rhs));
6528 /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the
6529 whole variable escapes. */
6530 lhs.type = SCALAR;
6531 lhs.var = escaped_id;
6532 lhs.offset = 0;
6533 rhs.type = SCALAR;
6534 rhs.var = escaped_id;
6535 rhs.offset = UNKNOWN_OFFSET;
6536 process_constraint (new_constraint (lhs, rhs));
6538 /* *ESCAPED = NONLOCAL. This is true because we have to assume
6539 everything pointed to by escaped points to what global memory can
6540 point to. */
6541 lhs.type = DEREF;
6542 lhs.var = escaped_id;
6543 lhs.offset = 0;
6544 rhs.type = SCALAR;
6545 rhs.var = nonlocal_id;
6546 rhs.offset = 0;
6547 process_constraint (new_constraint (lhs, rhs));
6549 /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED. This is true because
6550 global memory may point to global memory and escaped memory. */
6551 lhs.type = SCALAR;
6552 lhs.var = nonlocal_id;
6553 lhs.offset = 0;
6554 rhs.type = ADDRESSOF;
6555 rhs.var = nonlocal_id;
6556 rhs.offset = 0;
6557 process_constraint (new_constraint (lhs, rhs));
6558 rhs.type = ADDRESSOF;
6559 rhs.var = escaped_id;
6560 rhs.offset = 0;
6561 process_constraint (new_constraint (lhs, rhs));
6563 /* Create the STOREDANYTHING variable, used to represent the set of
6564 variables stored to *ANYTHING. */
6565 var_storedanything = new_var_info (NULL_TREE, "STOREDANYTHING");
6566 gcc_assert (var_storedanything->id == storedanything_id);
6567 var_storedanything->is_artificial_var = 1;
6568 var_storedanything->offset = 0;
6569 var_storedanything->size = ~0;
6570 var_storedanything->fullsize = ~0;
6571 var_storedanything->is_special_var = 0;
6573 /* Create the INTEGER variable, used to represent that a variable points
6574 to what an INTEGER "points to". */
6575 var_integer = new_var_info (NULL_TREE, "INTEGER");
6576 gcc_assert (var_integer->id == integer_id);
6577 var_integer->is_artificial_var = 1;
6578 var_integer->size = ~0;
6579 var_integer->fullsize = ~0;
6580 var_integer->offset = 0;
6581 var_integer->is_special_var = 1;
6583 /* INTEGER = ANYTHING, because we don't know where a dereference of
6584 a random integer will point to. */
6585 lhs.type = SCALAR;
6586 lhs.var = integer_id;
6587 lhs.offset = 0;
6588 rhs.type = ADDRESSOF;
6589 rhs.var = anything_id;
6590 rhs.offset = 0;
6591 process_constraint (new_constraint (lhs, rhs));
6594 /* Initialize things necessary to perform PTA */
6596 static void
6597 init_alias_vars (void)
6599 use_field_sensitive = (MAX_FIELDS_FOR_FIELD_SENSITIVE > 1);
6601 bitmap_obstack_initialize (&pta_obstack);
6602 bitmap_obstack_initialize (&oldpta_obstack);
6603 bitmap_obstack_initialize (&predbitmap_obstack);
6605 constraint_pool = create_alloc_pool ("Constraint pool",
6606 sizeof (struct constraint), 30);
6607 variable_info_pool = create_alloc_pool ("Variable info pool",
6608 sizeof (struct variable_info), 30);
6609 constraints.create (8);
6610 varmap.create (8);
6611 vi_for_tree = pointer_map_create ();
6612 call_stmt_vars = pointer_map_create ();
6614 memset (&stats, 0, sizeof (stats));
6615 shared_bitmap_table = htab_create (511, shared_bitmap_hash,
6616 shared_bitmap_eq, free);
6617 init_base_vars ();
6619 gcc_obstack_init (&fake_var_decl_obstack);
6621 final_solutions = pointer_map_create ();
6622 gcc_obstack_init (&final_solutions_obstack);
6625 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
6626 predecessor edges. */
6628 static void
6629 remove_preds_and_fake_succs (constraint_graph_t graph)
6631 unsigned int i;
6633 /* Clear the implicit ref and address nodes from the successor
6634 lists. */
6635 for (i = 1; i < FIRST_REF_NODE; i++)
6637 if (graph->succs[i])
6638 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
6639 FIRST_REF_NODE * 2);
6642 /* Free the successor list for the non-ref nodes. */
6643 for (i = FIRST_REF_NODE + 1; i < graph->size; i++)
6645 if (graph->succs[i])
6646 BITMAP_FREE (graph->succs[i]);
6649 /* Now reallocate the size of the successor list as, and blow away
6650 the predecessor bitmaps. */
6651 graph->size = varmap.length ();
6652 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
6654 free (graph->implicit_preds);
6655 graph->implicit_preds = NULL;
6656 free (graph->preds);
6657 graph->preds = NULL;
6658 bitmap_obstack_release (&predbitmap_obstack);
6661 /* Solve the constraint set. */
6663 static void
6664 solve_constraints (void)
6666 struct scc_info *si;
6668 if (dump_file)
6669 fprintf (dump_file,
6670 "\nCollapsing static cycles and doing variable "
6671 "substitution\n");
6673 init_graph (varmap.length () * 2);
6675 if (dump_file)
6676 fprintf (dump_file, "Building predecessor graph\n");
6677 build_pred_graph ();
6679 if (dump_file)
6680 fprintf (dump_file, "Detecting pointer and location "
6681 "equivalences\n");
6682 si = perform_var_substitution (graph);
6684 if (dump_file)
6685 fprintf (dump_file, "Rewriting constraints and unifying "
6686 "variables\n");
6687 rewrite_constraints (graph, si);
6689 build_succ_graph ();
6691 free_var_substitution_info (si);
6693 /* Attach complex constraints to graph nodes. */
6694 move_complex_constraints (graph);
6696 if (dump_file)
6697 fprintf (dump_file, "Uniting pointer but not location equivalent "
6698 "variables\n");
6699 unite_pointer_equivalences (graph);
6701 if (dump_file)
6702 fprintf (dump_file, "Finding indirect cycles\n");
6703 find_indirect_cycles (graph);
6705 /* Implicit nodes and predecessors are no longer necessary at this
6706 point. */
6707 remove_preds_and_fake_succs (graph);
6709 if (dump_file && (dump_flags & TDF_GRAPH))
6711 fprintf (dump_file, "\n\n// The constraint graph before solve-graph "
6712 "in dot format:\n");
6713 dump_constraint_graph (dump_file);
6714 fprintf (dump_file, "\n\n");
6717 if (dump_file)
6718 fprintf (dump_file, "Solving graph\n");
6720 solve_graph (graph);
6722 if (dump_file && (dump_flags & TDF_GRAPH))
6724 fprintf (dump_file, "\n\n// The constraint graph after solve-graph "
6725 "in dot format:\n");
6726 dump_constraint_graph (dump_file);
6727 fprintf (dump_file, "\n\n");
6730 if (dump_file)
6731 dump_sa_points_to_info (dump_file);
6734 /* Create points-to sets for the current function. See the comments
6735 at the start of the file for an algorithmic overview. */
6737 static void
6738 compute_points_to_sets (void)
6740 basic_block bb;
6741 unsigned i;
6742 varinfo_t vi;
6744 timevar_push (TV_TREE_PTA);
6746 init_alias_vars ();
6748 intra_create_variable_infos ();
6750 /* Now walk all statements and build the constraint set. */
6751 FOR_EACH_BB (bb)
6753 gimple_stmt_iterator gsi;
6755 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6757 gimple phi = gsi_stmt (gsi);
6759 if (! virtual_operand_p (gimple_phi_result (phi)))
6760 find_func_aliases (phi);
6763 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6765 gimple stmt = gsi_stmt (gsi);
6767 find_func_aliases (stmt);
6771 if (dump_file)
6773 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
6774 dump_constraints (dump_file, 0);
6777 /* From the constraints compute the points-to sets. */
6778 solve_constraints ();
6780 /* Compute the points-to set for ESCAPED used for call-clobber analysis. */
6781 cfun->gimple_df->escaped = find_what_var_points_to (get_varinfo (escaped_id));
6783 /* Make sure the ESCAPED solution (which is used as placeholder in
6784 other solutions) does not reference itself. This simplifies
6785 points-to solution queries. */
6786 cfun->gimple_df->escaped.escaped = 0;
6788 /* Mark escaped HEAP variables as global. */
6789 FOR_EACH_VEC_ELT (varmap, i, vi)
6790 if (vi
6791 && vi->is_heap_var
6792 && !vi->is_global_var)
6793 DECL_EXTERNAL (vi->decl) = vi->is_global_var
6794 = pt_solution_includes (&cfun->gimple_df->escaped, vi->decl);
6796 /* Compute the points-to sets for pointer SSA_NAMEs. */
6797 for (i = 0; i < num_ssa_names; ++i)
6799 tree ptr = ssa_name (i);
6800 if (ptr
6801 && POINTER_TYPE_P (TREE_TYPE (ptr)))
6802 find_what_p_points_to (ptr);
6805 /* Compute the call-used/clobbered sets. */
6806 FOR_EACH_BB (bb)
6808 gimple_stmt_iterator gsi;
6810 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6812 gimple stmt = gsi_stmt (gsi);
6813 struct pt_solution *pt;
6814 if (!is_gimple_call (stmt))
6815 continue;
6817 pt = gimple_call_use_set (stmt);
6818 if (gimple_call_flags (stmt) & ECF_CONST)
6819 memset (pt, 0, sizeof (struct pt_solution));
6820 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
6822 *pt = find_what_var_points_to (vi);
6823 /* Escaped (and thus nonlocal) variables are always
6824 implicitly used by calls. */
6825 /* ??? ESCAPED can be empty even though NONLOCAL
6826 always escaped. */
6827 pt->nonlocal = 1;
6828 pt->escaped = 1;
6830 else
6832 /* If there is nothing special about this call then
6833 we have made everything that is used also escape. */
6834 *pt = cfun->gimple_df->escaped;
6835 pt->nonlocal = 1;
6838 pt = gimple_call_clobber_set (stmt);
6839 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
6840 memset (pt, 0, sizeof (struct pt_solution));
6841 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
6843 *pt = find_what_var_points_to (vi);
6844 /* Escaped (and thus nonlocal) variables are always
6845 implicitly clobbered by calls. */
6846 /* ??? ESCAPED can be empty even though NONLOCAL
6847 always escaped. */
6848 pt->nonlocal = 1;
6849 pt->escaped = 1;
6851 else
6853 /* If there is nothing special about this call then
6854 we have made everything that is used also escape. */
6855 *pt = cfun->gimple_df->escaped;
6856 pt->nonlocal = 1;
6861 timevar_pop (TV_TREE_PTA);
6865 /* Delete created points-to sets. */
6867 static void
6868 delete_points_to_sets (void)
6870 unsigned int i;
6872 htab_delete (shared_bitmap_table);
6873 if (dump_file && (dump_flags & TDF_STATS))
6874 fprintf (dump_file, "Points to sets created:%d\n",
6875 stats.points_to_sets_created);
6877 pointer_map_destroy (vi_for_tree);
6878 pointer_map_destroy (call_stmt_vars);
6879 bitmap_obstack_release (&pta_obstack);
6880 constraints.release ();
6882 for (i = 0; i < graph->size; i++)
6883 graph->complex[i].release ();
6884 free (graph->complex);
6886 free (graph->rep);
6887 free (graph->succs);
6888 free (graph->pe);
6889 free (graph->pe_rep);
6890 free (graph->indirect_cycles);
6891 free (graph);
6893 varmap.release ();
6894 free_alloc_pool (variable_info_pool);
6895 free_alloc_pool (constraint_pool);
6897 obstack_free (&fake_var_decl_obstack, NULL);
6899 pointer_map_destroy (final_solutions);
6900 obstack_free (&final_solutions_obstack, NULL);
6904 /* Compute points-to information for every SSA_NAME pointer in the
6905 current function and compute the transitive closure of escaped
6906 variables to re-initialize the call-clobber states of local variables. */
6908 unsigned int
6909 compute_may_aliases (void)
6911 if (cfun->gimple_df->ipa_pta)
6913 if (dump_file)
6915 fprintf (dump_file, "\nNot re-computing points-to information "
6916 "because IPA points-to information is available.\n\n");
6918 /* But still dump what we have remaining it. */
6919 dump_alias_info (dump_file);
6922 return 0;
6925 /* For each pointer P_i, determine the sets of variables that P_i may
6926 point-to. Compute the reachability set of escaped and call-used
6927 variables. */
6928 compute_points_to_sets ();
6930 /* Debugging dumps. */
6931 if (dump_file)
6932 dump_alias_info (dump_file);
6934 /* Deallocate memory used by aliasing data structures and the internal
6935 points-to solution. */
6936 delete_points_to_sets ();
6938 gcc_assert (!need_ssa_update_p (cfun));
6940 return 0;
6943 static bool
6944 gate_tree_pta (void)
6946 return flag_tree_pta;
6949 /* A dummy pass to cause points-to information to be computed via
6950 TODO_rebuild_alias. */
6952 struct gimple_opt_pass pass_build_alias =
6955 GIMPLE_PASS,
6956 "alias", /* name */
6957 OPTGROUP_NONE, /* optinfo_flags */
6958 gate_tree_pta, /* gate */
6959 NULL, /* execute */
6960 NULL, /* sub */
6961 NULL, /* next */
6962 0, /* static_pass_number */
6963 TV_NONE, /* tv_id */
6964 PROP_cfg | PROP_ssa, /* properties_required */
6965 0, /* properties_provided */
6966 0, /* properties_destroyed */
6967 0, /* todo_flags_start */
6968 TODO_rebuild_alias /* todo_flags_finish */
6972 /* A dummy pass to cause points-to information to be computed via
6973 TODO_rebuild_alias. */
6975 struct gimple_opt_pass pass_build_ealias =
6978 GIMPLE_PASS,
6979 "ealias", /* name */
6980 OPTGROUP_NONE, /* optinfo_flags */
6981 gate_tree_pta, /* gate */
6982 NULL, /* execute */
6983 NULL, /* sub */
6984 NULL, /* next */
6985 0, /* static_pass_number */
6986 TV_NONE, /* tv_id */
6987 PROP_cfg | PROP_ssa, /* properties_required */
6988 0, /* properties_provided */
6989 0, /* properties_destroyed */
6990 0, /* todo_flags_start */
6991 TODO_rebuild_alias /* todo_flags_finish */
6996 /* Return true if we should execute IPA PTA. */
6997 static bool
6998 gate_ipa_pta (void)
7000 return (optimize
7001 && flag_ipa_pta
7002 /* Don't bother doing anything if the program has errors. */
7003 && !seen_error ());
7006 /* IPA PTA solutions for ESCAPED. */
7007 struct pt_solution ipa_escaped_pt
7008 = { true, false, false, false, false, false, NULL };
7010 /* Associate node with varinfo DATA. Worker for
7011 cgraph_for_node_and_aliases. */
7012 static bool
7013 associate_varinfo_to_alias (struct cgraph_node *node, void *data)
7015 if (node->alias || node->thunk.thunk_p)
7016 insert_vi_for_tree (node->symbol.decl, (varinfo_t)data);
7017 return false;
7020 /* Execute the driver for IPA PTA. */
7021 static unsigned int
7022 ipa_pta_execute (void)
7024 struct cgraph_node *node;
7025 struct varpool_node *var;
7026 int from;
7028 in_ipa_mode = 1;
7030 init_alias_vars ();
7032 if (dump_file && (dump_flags & TDF_DETAILS))
7034 dump_symtab (dump_file);
7035 fprintf (dump_file, "\n");
7038 /* Build the constraints. */
7039 FOR_EACH_DEFINED_FUNCTION (node)
7041 varinfo_t vi;
7042 /* Nodes without a body are not interesting. Especially do not
7043 visit clones at this point for now - we get duplicate decls
7044 there for inline clones at least. */
7045 if (!cgraph_function_with_gimple_body_p (node))
7046 continue;
7048 gcc_assert (!node->clone_of);
7050 vi = create_function_info_for (node->symbol.decl,
7051 alias_get_name (node->symbol.decl));
7052 cgraph_for_node_and_aliases (node, associate_varinfo_to_alias, vi, true);
7055 /* Create constraints for global variables and their initializers. */
7056 FOR_EACH_VARIABLE (var)
7058 if (var->alias)
7059 continue;
7061 get_vi_for_tree (var->symbol.decl);
7064 if (dump_file)
7066 fprintf (dump_file,
7067 "Generating constraints for global initializers\n\n");
7068 dump_constraints (dump_file, 0);
7069 fprintf (dump_file, "\n");
7071 from = constraints.length ();
7073 FOR_EACH_DEFINED_FUNCTION (node)
7075 struct function *func;
7076 basic_block bb;
7078 /* Nodes without a body are not interesting. */
7079 if (!cgraph_function_with_gimple_body_p (node))
7080 continue;
7082 if (dump_file)
7084 fprintf (dump_file,
7085 "Generating constraints for %s", cgraph_node_name (node));
7086 if (DECL_ASSEMBLER_NAME_SET_P (node->symbol.decl))
7087 fprintf (dump_file, " (%s)",
7088 IDENTIFIER_POINTER
7089 (DECL_ASSEMBLER_NAME (node->symbol.decl)));
7090 fprintf (dump_file, "\n");
7093 func = DECL_STRUCT_FUNCTION (node->symbol.decl);
7094 push_cfun (func);
7096 /* For externally visible or attribute used annotated functions use
7097 local constraints for their arguments.
7098 For local functions we see all callers and thus do not need initial
7099 constraints for parameters. */
7100 if (node->symbol.used_from_other_partition
7101 || node->symbol.externally_visible
7102 || node->symbol.force_output)
7104 intra_create_variable_infos ();
7106 /* We also need to make function return values escape. Nothing
7107 escapes by returning from main though. */
7108 if (!MAIN_NAME_P (DECL_NAME (node->symbol.decl)))
7110 varinfo_t fi, rvi;
7111 fi = lookup_vi_for_tree (node->symbol.decl);
7112 rvi = first_vi_for_offset (fi, fi_result);
7113 if (rvi && rvi->offset == fi_result)
7115 struct constraint_expr includes;
7116 struct constraint_expr var;
7117 includes.var = escaped_id;
7118 includes.offset = 0;
7119 includes.type = SCALAR;
7120 var.var = rvi->id;
7121 var.offset = 0;
7122 var.type = SCALAR;
7123 process_constraint (new_constraint (includes, var));
7128 /* Build constriants for the function body. */
7129 FOR_EACH_BB_FN (bb, func)
7131 gimple_stmt_iterator gsi;
7133 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
7134 gsi_next (&gsi))
7136 gimple phi = gsi_stmt (gsi);
7138 if (! virtual_operand_p (gimple_phi_result (phi)))
7139 find_func_aliases (phi);
7142 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
7144 gimple stmt = gsi_stmt (gsi);
7146 find_func_aliases (stmt);
7147 find_func_clobbers (stmt);
7151 pop_cfun ();
7153 if (dump_file)
7155 fprintf (dump_file, "\n");
7156 dump_constraints (dump_file, from);
7157 fprintf (dump_file, "\n");
7159 from = constraints.length ();
7162 /* From the constraints compute the points-to sets. */
7163 solve_constraints ();
7165 /* Compute the global points-to sets for ESCAPED.
7166 ??? Note that the computed escape set is not correct
7167 for the whole unit as we fail to consider graph edges to
7168 externally visible functions. */
7169 ipa_escaped_pt = find_what_var_points_to (get_varinfo (escaped_id));
7171 /* Make sure the ESCAPED solution (which is used as placeholder in
7172 other solutions) does not reference itself. This simplifies
7173 points-to solution queries. */
7174 ipa_escaped_pt.ipa_escaped = 0;
7176 /* Assign the points-to sets to the SSA names in the unit. */
7177 FOR_EACH_DEFINED_FUNCTION (node)
7179 tree ptr;
7180 struct function *fn;
7181 unsigned i;
7182 varinfo_t fi;
7183 basic_block bb;
7184 struct pt_solution uses, clobbers;
7185 struct cgraph_edge *e;
7187 /* Nodes without a body are not interesting. */
7188 if (!cgraph_function_with_gimple_body_p (node))
7189 continue;
7191 fn = DECL_STRUCT_FUNCTION (node->symbol.decl);
7193 /* Compute the points-to sets for pointer SSA_NAMEs. */
7194 FOR_EACH_VEC_ELT (*fn->gimple_df->ssa_names, i, ptr)
7196 if (ptr
7197 && POINTER_TYPE_P (TREE_TYPE (ptr)))
7198 find_what_p_points_to (ptr);
7201 /* Compute the call-use and call-clobber sets for all direct calls. */
7202 fi = lookup_vi_for_tree (node->symbol.decl);
7203 gcc_assert (fi->is_fn_info);
7204 clobbers
7205 = find_what_var_points_to (first_vi_for_offset (fi, fi_clobbers));
7206 uses = find_what_var_points_to (first_vi_for_offset (fi, fi_uses));
7207 for (e = node->callers; e; e = e->next_caller)
7209 if (!e->call_stmt)
7210 continue;
7212 *gimple_call_clobber_set (e->call_stmt) = clobbers;
7213 *gimple_call_use_set (e->call_stmt) = uses;
7216 /* Compute the call-use and call-clobber sets for indirect calls
7217 and calls to external functions. */
7218 FOR_EACH_BB_FN (bb, fn)
7220 gimple_stmt_iterator gsi;
7222 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
7224 gimple stmt = gsi_stmt (gsi);
7225 struct pt_solution *pt;
7226 varinfo_t vi;
7227 tree decl;
7229 if (!is_gimple_call (stmt))
7230 continue;
7232 /* Handle direct calls to external functions. */
7233 decl = gimple_call_fndecl (stmt);
7234 if (decl
7235 && (!(fi = lookup_vi_for_tree (decl))
7236 || !fi->is_fn_info))
7238 pt = gimple_call_use_set (stmt);
7239 if (gimple_call_flags (stmt) & ECF_CONST)
7240 memset (pt, 0, sizeof (struct pt_solution));
7241 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
7243 *pt = find_what_var_points_to (vi);
7244 /* Escaped (and thus nonlocal) variables are always
7245 implicitly used by calls. */
7246 /* ??? ESCAPED can be empty even though NONLOCAL
7247 always escaped. */
7248 pt->nonlocal = 1;
7249 pt->ipa_escaped = 1;
7251 else
7253 /* If there is nothing special about this call then
7254 we have made everything that is used also escape. */
7255 *pt = ipa_escaped_pt;
7256 pt->nonlocal = 1;
7259 pt = gimple_call_clobber_set (stmt);
7260 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
7261 memset (pt, 0, sizeof (struct pt_solution));
7262 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
7264 *pt = find_what_var_points_to (vi);
7265 /* Escaped (and thus nonlocal) variables are always
7266 implicitly clobbered by calls. */
7267 /* ??? ESCAPED can be empty even though NONLOCAL
7268 always escaped. */
7269 pt->nonlocal = 1;
7270 pt->ipa_escaped = 1;
7272 else
7274 /* If there is nothing special about this call then
7275 we have made everything that is used also escape. */
7276 *pt = ipa_escaped_pt;
7277 pt->nonlocal = 1;
7281 /* Handle indirect calls. */
7282 if (!decl
7283 && (fi = get_fi_for_callee (stmt)))
7285 /* We need to accumulate all clobbers/uses of all possible
7286 callees. */
7287 fi = get_varinfo (find (fi->id));
7288 /* If we cannot constrain the set of functions we'll end up
7289 calling we end up using/clobbering everything. */
7290 if (bitmap_bit_p (fi->solution, anything_id)
7291 || bitmap_bit_p (fi->solution, nonlocal_id)
7292 || bitmap_bit_p (fi->solution, escaped_id))
7294 pt_solution_reset (gimple_call_clobber_set (stmt));
7295 pt_solution_reset (gimple_call_use_set (stmt));
7297 else
7299 bitmap_iterator bi;
7300 unsigned i;
7301 struct pt_solution *uses, *clobbers;
7303 uses = gimple_call_use_set (stmt);
7304 clobbers = gimple_call_clobber_set (stmt);
7305 memset (uses, 0, sizeof (struct pt_solution));
7306 memset (clobbers, 0, sizeof (struct pt_solution));
7307 EXECUTE_IF_SET_IN_BITMAP (fi->solution, 0, i, bi)
7309 struct pt_solution sol;
7311 vi = get_varinfo (i);
7312 if (!vi->is_fn_info)
7314 /* ??? We could be more precise here? */
7315 uses->nonlocal = 1;
7316 uses->ipa_escaped = 1;
7317 clobbers->nonlocal = 1;
7318 clobbers->ipa_escaped = 1;
7319 continue;
7322 if (!uses->anything)
7324 sol = find_what_var_points_to
7325 (first_vi_for_offset (vi, fi_uses));
7326 pt_solution_ior_into (uses, &sol);
7328 if (!clobbers->anything)
7330 sol = find_what_var_points_to
7331 (first_vi_for_offset (vi, fi_clobbers));
7332 pt_solution_ior_into (clobbers, &sol);
7340 fn->gimple_df->ipa_pta = true;
7343 delete_points_to_sets ();
7345 in_ipa_mode = 0;
7347 return 0;
7350 struct simple_ipa_opt_pass pass_ipa_pta =
7353 SIMPLE_IPA_PASS,
7354 "pta", /* name */
7355 OPTGROUP_NONE, /* optinfo_flags */
7356 gate_ipa_pta, /* gate */
7357 ipa_pta_execute, /* execute */
7358 NULL, /* sub */
7359 NULL, /* next */
7360 0, /* static_pass_number */
7361 TV_IPA_PTA, /* tv_id */
7362 0, /* properties_required */
7363 0, /* properties_provided */
7364 0, /* properties_destroyed */
7365 0, /* todo_flags_start */
7366 TODO_update_ssa /* todo_flags_finish */