2010-07-27 Paolo Carlini <paolo.carlini@oracle.com>
[official-gcc/alias-decl.git] / gcc / tree-ssa-structalias.c
blobcf5815f67e5064e12ea8088599499768a967591b
1 /* Tree based points-to analysis
2 Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dberlin@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3 of the License, or
11 (at your option) any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "ggc.h"
27 #include "obstack.h"
28 #include "bitmap.h"
29 #include "flags.h"
30 #include "basic-block.h"
31 #include "output.h"
32 #include "tree.h"
33 #include "tree-flow.h"
34 #include "tree-inline.h"
35 #include "diagnostic-core.h"
36 #include "toplev.h"
37 #include "gimple.h"
38 #include "hashtab.h"
39 #include "function.h"
40 #include "cgraph.h"
41 #include "tree-pass.h"
42 #include "timevar.h"
43 #include "alloc-pool.h"
44 #include "splay-tree.h"
45 #include "params.h"
46 #include "cgraph.h"
47 #include "alias.h"
48 #include "pointer-set.h"
50 /* The idea behind this analyzer is to generate set constraints from the
51 program, then solve the resulting constraints in order to generate the
52 points-to sets.
54 Set constraints are a way of modeling program analysis problems that
55 involve sets. They consist of an inclusion constraint language,
56 describing the variables (each variable is a set) and operations that
57 are involved on the variables, and a set of rules that derive facts
58 from these operations. To solve a system of set constraints, you derive
59 all possible facts under the rules, which gives you the correct sets
60 as a consequence.
62 See "Efficient Field-sensitive pointer analysis for C" by "David
63 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
64 http://citeseer.ist.psu.edu/pearce04efficient.html
66 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
67 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
68 http://citeseer.ist.psu.edu/heintze01ultrafast.html
70 There are three types of real constraint expressions, DEREF,
71 ADDRESSOF, and SCALAR. Each constraint expression consists
72 of a constraint type, a variable, and an offset.
74 SCALAR is a constraint expression type used to represent x, whether
75 it appears on the LHS or the RHS of a statement.
76 DEREF is a constraint expression type used to represent *x, whether
77 it appears on the LHS or the RHS of a statement.
78 ADDRESSOF is a constraint expression used to represent &x, whether
79 it appears on the LHS or the RHS of a statement.
81 Each pointer variable in the program is assigned an integer id, and
82 each field of a structure variable is assigned an integer id as well.
84 Structure variables are linked to their list of fields through a "next
85 field" in each variable that points to the next field in offset
86 order.
87 Each variable for a structure field has
89 1. "size", that tells the size in bits of that field.
90 2. "fullsize, that tells the size in bits of the entire structure.
91 3. "offset", that tells the offset in bits from the beginning of the
92 structure to this field.
94 Thus,
95 struct f
97 int a;
98 int b;
99 } foo;
100 int *bar;
102 looks like
104 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
105 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
106 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
109 In order to solve the system of set constraints, the following is
110 done:
112 1. Each constraint variable x has a solution set associated with it,
113 Sol(x).
115 2. Constraints are separated into direct, copy, and complex.
116 Direct constraints are ADDRESSOF constraints that require no extra
117 processing, such as P = &Q
118 Copy constraints are those of the form P = Q.
119 Complex constraints are all the constraints involving dereferences
120 and offsets (including offsetted copies).
122 3. All direct constraints of the form P = &Q are processed, such
123 that Q is added to Sol(P)
125 4. All complex constraints for a given constraint variable are stored in a
126 linked list attached to that variable's node.
128 5. A directed graph is built out of the copy constraints. Each
129 constraint variable is a node in the graph, and an edge from
130 Q to P is added for each copy constraint of the form P = Q
132 6. The graph is then walked, and solution sets are
133 propagated along the copy edges, such that an edge from Q to P
134 causes Sol(P) <- Sol(P) union Sol(Q).
136 7. As we visit each node, all complex constraints associated with
137 that node are processed by adding appropriate copy edges to the graph, or the
138 appropriate variables to the solution set.
140 8. The process of walking the graph is iterated until no solution
141 sets change.
143 Prior to walking the graph in steps 6 and 7, We perform static
144 cycle elimination on the constraint graph, as well
145 as off-line variable substitution.
147 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
148 on and turned into anything), but isn't. You can just see what offset
149 inside the pointed-to struct it's going to access.
151 TODO: Constant bounded arrays can be handled as if they were structs of the
152 same number of elements.
154 TODO: Modeling heap and incoming pointers becomes much better if we
155 add fields to them as we discover them, which we could do.
157 TODO: We could handle unions, but to be honest, it's probably not
158 worth the pain or slowdown. */
160 /* IPA-PTA optimizations possible.
162 When the indirect function called is ANYTHING we can add disambiguation
163 based on the function signatures (or simply the parameter count which
164 is the varinfo size). We also do not need to consider functions that
165 do not have their address taken.
167 The is_global_var bit which marks escape points is overly conservative
168 in IPA mode. Split it to is_escape_point and is_global_var - only
169 externally visible globals are escape points in IPA mode. This is
170 also needed to fix the pt_solution_includes_global predicate
171 (and thus ptr_deref_may_alias_global_p).
173 The way we introduce DECL_PT_UID to avoid fixing up all points-to
174 sets in the translation unit when we copy a DECL during inlining
175 pessimizes precision. The advantage is that the DECL_PT_UID keeps
176 compile-time and memory usage overhead low - the points-to sets
177 do not grow or get unshared as they would during a fixup phase.
178 An alternative solution is to delay IPA PTA until after all
179 inlining transformations have been applied.
181 The way we propagate clobber/use information isn't optimized.
182 It should use a new complex constraint that properly filters
183 out local variables of the callee (though that would make
184 the sets invalid after inlining). OTOH we might as well
185 admit defeat to WHOPR and simply do all the clobber/use analysis
186 and propagation after PTA finished but before we threw away
187 points-to information for memory variables. WHOPR and PTA
188 do not play along well anyway - the whole constraint solving
189 would need to be done in WPA phase and it will be very interesting
190 to apply the results to local SSA names during LTRANS phase.
192 We probably should compute a per-function unit-ESCAPE solution
193 propagating it simply like the clobber / uses solutions. The
194 solution can go alongside the non-IPA espaced solution and be
195 used to query which vars escape the unit through a function.
197 We never put function decls in points-to sets so we do not
198 keep the set of called functions for indirect calls.
200 And probably more. */
201 static GTY ((if_marked ("tree_map_marked_p"), param_is (struct heapvar_map)))
202 htab_t heapvar_for_stmt;
204 static bool use_field_sensitive = true;
205 static int in_ipa_mode = 0;
207 /* Used for predecessor bitmaps. */
208 static bitmap_obstack predbitmap_obstack;
210 /* Used for points-to sets. */
211 static bitmap_obstack pta_obstack;
213 /* Used for oldsolution members of variables. */
214 static bitmap_obstack oldpta_obstack;
216 /* Used for per-solver-iteration bitmaps. */
217 static bitmap_obstack iteration_obstack;
219 static unsigned int create_variable_info_for (tree, const char *);
220 typedef struct constraint_graph *constraint_graph_t;
221 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
223 struct constraint;
224 typedef struct constraint *constraint_t;
226 DEF_VEC_P(constraint_t);
227 DEF_VEC_ALLOC_P(constraint_t,heap);
229 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
230 if (a) \
231 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
233 static struct constraint_stats
235 unsigned int total_vars;
236 unsigned int nonpointer_vars;
237 unsigned int unified_vars_static;
238 unsigned int unified_vars_dynamic;
239 unsigned int iterations;
240 unsigned int num_edges;
241 unsigned int num_implicit_edges;
242 unsigned int points_to_sets_created;
243 } stats;
245 struct variable_info
247 /* ID of this variable */
248 unsigned int id;
250 /* True if this is a variable created by the constraint analysis, such as
251 heap variables and constraints we had to break up. */
252 unsigned int is_artificial_var : 1;
254 /* True if this is a special variable whose solution set should not be
255 changed. */
256 unsigned int is_special_var : 1;
258 /* True for variables whose size is not known or variable. */
259 unsigned int is_unknown_size_var : 1;
261 /* True for (sub-)fields that represent a whole variable. */
262 unsigned int is_full_var : 1;
264 /* True if this is a heap variable. */
265 unsigned int is_heap_var : 1;
267 /* True if this is a variable tracking a restrict pointer source. */
268 unsigned int is_restrict_var : 1;
270 /* True if this field may contain pointers. */
271 unsigned int may_have_pointers : 1;
273 /* True if this field has only restrict qualified pointers. */
274 unsigned int only_restrict_pointers : 1;
276 /* True if this represents a global variable. */
277 unsigned int is_global_var : 1;
279 /* True if this represents a IPA function info. */
280 unsigned int is_fn_info : 1;
282 /* A link to the variable for the next field in this structure. */
283 struct variable_info *next;
285 /* Offset of this variable, in bits, from the base variable */
286 unsigned HOST_WIDE_INT offset;
288 /* Size of the variable, in bits. */
289 unsigned HOST_WIDE_INT size;
291 /* Full size of the base variable, in bits. */
292 unsigned HOST_WIDE_INT fullsize;
294 /* Name of this variable */
295 const char *name;
297 /* Tree that this variable is associated with. */
298 tree decl;
300 /* Points-to set for this variable. */
301 bitmap solution;
303 /* Old points-to set for this variable. */
304 bitmap oldsolution;
306 typedef struct variable_info *varinfo_t;
308 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
309 static varinfo_t first_or_preceding_vi_for_offset (varinfo_t,
310 unsigned HOST_WIDE_INT);
311 static varinfo_t lookup_vi_for_tree (tree);
313 /* Pool of variable info structures. */
314 static alloc_pool variable_info_pool;
316 DEF_VEC_P(varinfo_t);
318 DEF_VEC_ALLOC_P(varinfo_t, heap);
320 /* Table of variable info structures for constraint variables.
321 Indexed directly by variable info id. */
322 static VEC(varinfo_t,heap) *varmap;
324 /* Return the varmap element N */
326 static inline varinfo_t
327 get_varinfo (unsigned int n)
329 return VEC_index (varinfo_t, varmap, n);
332 /* Static IDs for the special variables. */
333 enum { nothing_id = 0, anything_id = 1, readonly_id = 2,
334 escaped_id = 3, nonlocal_id = 4,
335 storedanything_id = 5, integer_id = 6 };
337 struct GTY(()) heapvar_map {
338 struct tree_map map;
339 unsigned HOST_WIDE_INT offset;
342 static int
343 heapvar_map_eq (const void *p1, const void *p2)
345 const struct heapvar_map *h1 = (const struct heapvar_map *)p1;
346 const struct heapvar_map *h2 = (const struct heapvar_map *)p2;
347 return (h1->map.base.from == h2->map.base.from
348 && h1->offset == h2->offset);
351 static unsigned int
352 heapvar_map_hash (struct heapvar_map *h)
354 return iterative_hash_host_wide_int (h->offset,
355 htab_hash_pointer (h->map.base.from));
358 /* Lookup a heap var for FROM, and return it if we find one. */
360 static tree
361 heapvar_lookup (tree from, unsigned HOST_WIDE_INT offset)
363 struct heapvar_map *h, in;
364 in.map.base.from = from;
365 in.offset = offset;
366 h = (struct heapvar_map *) htab_find_with_hash (heapvar_for_stmt, &in,
367 heapvar_map_hash (&in));
368 if (h)
369 return h->map.to;
370 return NULL_TREE;
373 /* Insert a mapping FROM->TO in the heap var for statement
374 hashtable. */
376 static void
377 heapvar_insert (tree from, unsigned HOST_WIDE_INT offset, tree to)
379 struct heapvar_map *h;
380 void **loc;
382 h = ggc_alloc_heapvar_map ();
383 h->map.base.from = from;
384 h->offset = offset;
385 h->map.hash = heapvar_map_hash (h);
386 h->map.to = to;
387 loc = htab_find_slot_with_hash (heapvar_for_stmt, h, h->map.hash, INSERT);
388 gcc_assert (*loc == NULL);
389 *(struct heapvar_map **) loc = h;
392 /* Return a new variable info structure consisting for a variable
393 named NAME, and using constraint graph node NODE. Append it
394 to the vector of variable info structures. */
396 static varinfo_t
397 new_var_info (tree t, const char *name)
399 unsigned index = VEC_length (varinfo_t, varmap);
400 varinfo_t ret = (varinfo_t) pool_alloc (variable_info_pool);
402 ret->id = index;
403 ret->name = name;
404 ret->decl = t;
405 /* Vars without decl are artificial and do not have sub-variables. */
406 ret->is_artificial_var = (t == NULL_TREE);
407 ret->is_special_var = false;
408 ret->is_unknown_size_var = false;
409 ret->is_full_var = (t == NULL_TREE);
410 ret->is_heap_var = false;
411 ret->is_restrict_var = false;
412 ret->may_have_pointers = true;
413 ret->only_restrict_pointers = false;
414 ret->is_global_var = (t == NULL_TREE);
415 ret->is_fn_info = false;
416 if (t && DECL_P (t))
417 ret->is_global_var = is_global_var (t);
418 ret->solution = BITMAP_ALLOC (&pta_obstack);
419 ret->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
420 ret->next = NULL;
422 stats.total_vars++;
424 VEC_safe_push (varinfo_t, heap, varmap, ret);
426 return ret;
430 /* A map mapping call statements to per-stmt variables for uses
431 and clobbers specific to the call. */
432 struct pointer_map_t *call_stmt_vars;
434 /* Lookup or create the variable for the call statement CALL. */
436 static varinfo_t
437 get_call_vi (gimple call)
439 void **slot_p;
440 varinfo_t vi, vi2;
442 slot_p = pointer_map_insert (call_stmt_vars, call);
443 if (*slot_p)
444 return (varinfo_t) *slot_p;
446 vi = new_var_info (NULL_TREE, "CALLUSED");
447 vi->offset = 0;
448 vi->size = 1;
449 vi->fullsize = 2;
450 vi->is_full_var = true;
452 vi->next = vi2 = new_var_info (NULL_TREE, "CALLCLOBBERED");
453 vi2->offset = 1;
454 vi2->size = 1;
455 vi2->fullsize = 2;
456 vi2->is_full_var = true;
458 *slot_p = (void *) vi;
459 return vi;
462 /* Lookup the variable for the call statement CALL representing
463 the uses. Returns NULL if there is nothing special about this call. */
465 static varinfo_t
466 lookup_call_use_vi (gimple call)
468 void **slot_p;
470 slot_p = pointer_map_contains (call_stmt_vars, call);
471 if (slot_p)
472 return (varinfo_t) *slot_p;
474 return NULL;
477 /* Lookup the variable for the call statement CALL representing
478 the clobbers. Returns NULL if there is nothing special about this call. */
480 static varinfo_t
481 lookup_call_clobber_vi (gimple call)
483 varinfo_t uses = lookup_call_use_vi (call);
484 if (!uses)
485 return NULL;
487 return uses->next;
490 /* Lookup or create the variable for the call statement CALL representing
491 the uses. */
493 static varinfo_t
494 get_call_use_vi (gimple call)
496 return get_call_vi (call);
499 /* Lookup or create the variable for the call statement CALL representing
500 the clobbers. */
502 static varinfo_t ATTRIBUTE_UNUSED
503 get_call_clobber_vi (gimple call)
505 return get_call_vi (call)->next;
509 typedef enum {SCALAR, DEREF, ADDRESSOF} constraint_expr_type;
511 /* An expression that appears in a constraint. */
513 struct constraint_expr
515 /* Constraint type. */
516 constraint_expr_type type;
518 /* Variable we are referring to in the constraint. */
519 unsigned int var;
521 /* Offset, in bits, of this constraint from the beginning of
522 variables it ends up referring to.
524 IOW, in a deref constraint, we would deref, get the result set,
525 then add OFFSET to each member. */
526 HOST_WIDE_INT offset;
529 /* Use 0x8000... as special unknown offset. */
530 #define UNKNOWN_OFFSET ((HOST_WIDE_INT)-1 << (HOST_BITS_PER_WIDE_INT-1))
532 typedef struct constraint_expr ce_s;
533 DEF_VEC_O(ce_s);
534 DEF_VEC_ALLOC_O(ce_s, heap);
535 static void get_constraint_for_1 (tree, VEC(ce_s, heap) **, bool);
536 static void get_constraint_for (tree, VEC(ce_s, heap) **);
537 static void do_deref (VEC (ce_s, heap) **);
539 /* Our set constraints are made up of two constraint expressions, one
540 LHS, and one RHS.
542 As described in the introduction, our set constraints each represent an
543 operation between set valued variables.
545 struct constraint
547 struct constraint_expr lhs;
548 struct constraint_expr rhs;
551 /* List of constraints that we use to build the constraint graph from. */
553 static VEC(constraint_t,heap) *constraints;
554 static alloc_pool constraint_pool;
556 /* The constraint graph is represented as an array of bitmaps
557 containing successor nodes. */
559 struct constraint_graph
561 /* Size of this graph, which may be different than the number of
562 nodes in the variable map. */
563 unsigned int size;
565 /* Explicit successors of each node. */
566 bitmap *succs;
568 /* Implicit predecessors of each node (Used for variable
569 substitution). */
570 bitmap *implicit_preds;
572 /* Explicit predecessors of each node (Used for variable substitution). */
573 bitmap *preds;
575 /* Indirect cycle representatives, or -1 if the node has no indirect
576 cycles. */
577 int *indirect_cycles;
579 /* Representative node for a node. rep[a] == a unless the node has
580 been unified. */
581 unsigned int *rep;
583 /* Equivalence class representative for a label. This is used for
584 variable substitution. */
585 int *eq_rep;
587 /* Pointer equivalence label for a node. All nodes with the same
588 pointer equivalence label can be unified together at some point
589 (either during constraint optimization or after the constraint
590 graph is built). */
591 unsigned int *pe;
593 /* Pointer equivalence representative for a label. This is used to
594 handle nodes that are pointer equivalent but not location
595 equivalent. We can unite these once the addressof constraints
596 are transformed into initial points-to sets. */
597 int *pe_rep;
599 /* Pointer equivalence label for each node, used during variable
600 substitution. */
601 unsigned int *pointer_label;
603 /* Location equivalence label for each node, used during location
604 equivalence finding. */
605 unsigned int *loc_label;
607 /* Pointed-by set for each node, used during location equivalence
608 finding. This is pointed-by rather than pointed-to, because it
609 is constructed using the predecessor graph. */
610 bitmap *pointed_by;
612 /* Points to sets for pointer equivalence. This is *not* the actual
613 points-to sets for nodes. */
614 bitmap *points_to;
616 /* Bitmap of nodes where the bit is set if the node is a direct
617 node. Used for variable substitution. */
618 sbitmap direct_nodes;
620 /* Bitmap of nodes where the bit is set if the node is address
621 taken. Used for variable substitution. */
622 bitmap address_taken;
624 /* Vector of complex constraints for each graph node. Complex
625 constraints are those involving dereferences or offsets that are
626 not 0. */
627 VEC(constraint_t,heap) **complex;
630 static constraint_graph_t graph;
632 /* During variable substitution and the offline version of indirect
633 cycle finding, we create nodes to represent dereferences and
634 address taken constraints. These represent where these start and
635 end. */
636 #define FIRST_REF_NODE (VEC_length (varinfo_t, varmap))
637 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
639 /* Return the representative node for NODE, if NODE has been unioned
640 with another NODE.
641 This function performs path compression along the way to finding
642 the representative. */
644 static unsigned int
645 find (unsigned int node)
647 gcc_assert (node < graph->size);
648 if (graph->rep[node] != node)
649 return graph->rep[node] = find (graph->rep[node]);
650 return node;
653 /* Union the TO and FROM nodes to the TO nodes.
654 Note that at some point in the future, we may want to do
655 union-by-rank, in which case we are going to have to return the
656 node we unified to. */
658 static bool
659 unite (unsigned int to, unsigned int from)
661 gcc_assert (to < graph->size && from < graph->size);
662 if (to != from && graph->rep[from] != to)
664 graph->rep[from] = to;
665 return true;
667 return false;
670 /* Create a new constraint consisting of LHS and RHS expressions. */
672 static constraint_t
673 new_constraint (const struct constraint_expr lhs,
674 const struct constraint_expr rhs)
676 constraint_t ret = (constraint_t) pool_alloc (constraint_pool);
677 ret->lhs = lhs;
678 ret->rhs = rhs;
679 return ret;
682 /* Print out constraint C to FILE. */
684 static void
685 dump_constraint (FILE *file, constraint_t c)
687 if (c->lhs.type == ADDRESSOF)
688 fprintf (file, "&");
689 else if (c->lhs.type == DEREF)
690 fprintf (file, "*");
691 fprintf (file, "%s", get_varinfo (c->lhs.var)->name);
692 if (c->lhs.offset == UNKNOWN_OFFSET)
693 fprintf (file, " + UNKNOWN");
694 else if (c->lhs.offset != 0)
695 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
696 fprintf (file, " = ");
697 if (c->rhs.type == ADDRESSOF)
698 fprintf (file, "&");
699 else if (c->rhs.type == DEREF)
700 fprintf (file, "*");
701 fprintf (file, "%s", get_varinfo (c->rhs.var)->name);
702 if (c->rhs.offset == UNKNOWN_OFFSET)
703 fprintf (file, " + UNKNOWN");
704 else if (c->rhs.offset != 0)
705 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
706 fprintf (file, "\n");
710 void debug_constraint (constraint_t);
711 void debug_constraints (void);
712 void debug_constraint_graph (void);
713 void debug_solution_for_var (unsigned int);
714 void debug_sa_points_to_info (void);
716 /* Print out constraint C to stderr. */
718 DEBUG_FUNCTION void
719 debug_constraint (constraint_t c)
721 dump_constraint (stderr, c);
724 /* Print out all constraints to FILE */
726 static void
727 dump_constraints (FILE *file, int from)
729 int i;
730 constraint_t c;
731 for (i = from; VEC_iterate (constraint_t, constraints, i, c); i++)
732 dump_constraint (file, c);
735 /* Print out all constraints to stderr. */
737 DEBUG_FUNCTION void
738 debug_constraints (void)
740 dump_constraints (stderr, 0);
743 /* Print out to FILE the edge in the constraint graph that is created by
744 constraint c. The edge may have a label, depending on the type of
745 constraint that it represents. If complex1, e.g: a = *b, then the label
746 is "=*", if complex2, e.g: *a = b, then the label is "*=", if
747 complex with an offset, e.g: a = b + 8, then the label is "+".
748 Otherwise the edge has no label. */
750 static void
751 dump_constraint_edge (FILE *file, constraint_t c)
753 if (c->rhs.type != ADDRESSOF)
755 const char *src = get_varinfo (c->rhs.var)->name;
756 const char *dst = get_varinfo (c->lhs.var)->name;
757 fprintf (file, " \"%s\" -> \"%s\" ", src, dst);
758 /* Due to preprocessing of constraints, instructions like *a = *b are
759 illegal; thus, we do not have to handle such cases. */
760 if (c->lhs.type == DEREF)
761 fprintf (file, " [ label=\"*=\" ] ;\n");
762 else if (c->rhs.type == DEREF)
763 fprintf (file, " [ label=\"=*\" ] ;\n");
764 else
766 /* We must check the case where the constraint is an offset.
767 In this case, it is treated as a complex constraint. */
768 if (c->rhs.offset != c->lhs.offset)
769 fprintf (file, " [ label=\"+\" ] ;\n");
770 else
771 fprintf (file, " ;\n");
776 /* Print the constraint graph in dot format. */
778 static void
779 dump_constraint_graph (FILE *file)
781 unsigned int i=0, size;
782 constraint_t c;
784 /* Only print the graph if it has already been initialized: */
785 if (!graph)
786 return;
788 /* Print the constraints used to produce the constraint graph. The
789 constraints will be printed as comments in the dot file: */
790 fprintf (file, "\n\n/* Constraints used in the constraint graph:\n");
791 dump_constraints (file, 0);
792 fprintf (file, "*/\n");
794 /* Prints the header of the dot file: */
795 fprintf (file, "\n\n// The constraint graph in dot format:\n");
796 fprintf (file, "strict digraph {\n");
797 fprintf (file, " node [\n shape = box\n ]\n");
798 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
799 fprintf (file, "\n // List of nodes in the constraint graph:\n");
801 /* The next lines print the nodes in the graph. In order to get the
802 number of nodes in the graph, we must choose the minimum between the
803 vector VEC (varinfo_t, varmap) and graph->size. If the graph has not
804 yet been initialized, then graph->size == 0, otherwise we must only
805 read nodes that have an entry in VEC (varinfo_t, varmap). */
806 size = VEC_length (varinfo_t, varmap);
807 size = size < graph->size ? size : graph->size;
808 for (i = 0; i < size; i++)
810 const char *name = get_varinfo (graph->rep[i])->name;
811 fprintf (file, " \"%s\" ;\n", name);
814 /* Go over the list of constraints printing the edges in the constraint
815 graph. */
816 fprintf (file, "\n // The constraint edges:\n");
817 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
818 if (c)
819 dump_constraint_edge (file, c);
821 /* Prints the tail of the dot file. By now, only the closing bracket. */
822 fprintf (file, "}\n\n\n");
825 /* Print out the constraint graph to stderr. */
827 DEBUG_FUNCTION void
828 debug_constraint_graph (void)
830 dump_constraint_graph (stderr);
833 /* SOLVER FUNCTIONS
835 The solver is a simple worklist solver, that works on the following
836 algorithm:
838 sbitmap changed_nodes = all zeroes;
839 changed_count = 0;
840 For each node that is not already collapsed:
841 changed_count++;
842 set bit in changed nodes
844 while (changed_count > 0)
846 compute topological ordering for constraint graph
848 find and collapse cycles in the constraint graph (updating
849 changed if necessary)
851 for each node (n) in the graph in topological order:
852 changed_count--;
854 Process each complex constraint associated with the node,
855 updating changed if necessary.
857 For each outgoing edge from n, propagate the solution from n to
858 the destination of the edge, updating changed as necessary.
860 } */
862 /* Return true if two constraint expressions A and B are equal. */
864 static bool
865 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
867 return a.type == b.type && a.var == b.var && a.offset == b.offset;
870 /* Return true if constraint expression A is less than constraint expression
871 B. This is just arbitrary, but consistent, in order to give them an
872 ordering. */
874 static bool
875 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
877 if (a.type == b.type)
879 if (a.var == b.var)
880 return a.offset < b.offset;
881 else
882 return a.var < b.var;
884 else
885 return a.type < b.type;
888 /* Return true if constraint A is less than constraint B. This is just
889 arbitrary, but consistent, in order to give them an ordering. */
891 static bool
892 constraint_less (const constraint_t a, const constraint_t b)
894 if (constraint_expr_less (a->lhs, b->lhs))
895 return true;
896 else if (constraint_expr_less (b->lhs, a->lhs))
897 return false;
898 else
899 return constraint_expr_less (a->rhs, b->rhs);
902 /* Return true if two constraints A and B are equal. */
904 static bool
905 constraint_equal (struct constraint a, struct constraint b)
907 return constraint_expr_equal (a.lhs, b.lhs)
908 && constraint_expr_equal (a.rhs, b.rhs);
912 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
914 static constraint_t
915 constraint_vec_find (VEC(constraint_t,heap) *vec,
916 struct constraint lookfor)
918 unsigned int place;
919 constraint_t found;
921 if (vec == NULL)
922 return NULL;
924 place = VEC_lower_bound (constraint_t, vec, &lookfor, constraint_less);
925 if (place >= VEC_length (constraint_t, vec))
926 return NULL;
927 found = VEC_index (constraint_t, vec, place);
928 if (!constraint_equal (*found, lookfor))
929 return NULL;
930 return found;
933 /* Union two constraint vectors, TO and FROM. Put the result in TO. */
935 static void
936 constraint_set_union (VEC(constraint_t,heap) **to,
937 VEC(constraint_t,heap) **from)
939 int i;
940 constraint_t c;
942 for (i = 0; VEC_iterate (constraint_t, *from, i, c); i++)
944 if (constraint_vec_find (*to, *c) == NULL)
946 unsigned int place = VEC_lower_bound (constraint_t, *to, c,
947 constraint_less);
948 VEC_safe_insert (constraint_t, heap, *to, place, c);
953 /* Expands the solution in SET to all sub-fields of variables included.
954 Union the expanded result into RESULT. */
956 static void
957 solution_set_expand (bitmap result, bitmap set)
959 bitmap_iterator bi;
960 bitmap vars = NULL;
961 unsigned j;
963 /* In a first pass record all variables we need to add all
964 sub-fields off. This avoids quadratic behavior. */
965 EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi)
967 varinfo_t v = get_varinfo (j);
968 if (v->is_artificial_var
969 || v->is_full_var)
970 continue;
971 v = lookup_vi_for_tree (v->decl);
972 if (vars == NULL)
973 vars = BITMAP_ALLOC (NULL);
974 bitmap_set_bit (vars, v->id);
977 /* In the second pass now do the addition to the solution and
978 to speed up solving add it to the delta as well. */
979 if (vars != NULL)
981 EXECUTE_IF_SET_IN_BITMAP (vars, 0, j, bi)
983 varinfo_t v = get_varinfo (j);
984 for (; v != NULL; v = v->next)
985 bitmap_set_bit (result, v->id);
987 BITMAP_FREE (vars);
991 /* Take a solution set SET, add OFFSET to each member of the set, and
992 overwrite SET with the result when done. */
994 static void
995 solution_set_add (bitmap set, HOST_WIDE_INT offset)
997 bitmap result = BITMAP_ALLOC (&iteration_obstack);
998 unsigned int i;
999 bitmap_iterator bi;
1001 /* If the offset is unknown we have to expand the solution to
1002 all subfields. */
1003 if (offset == UNKNOWN_OFFSET)
1005 solution_set_expand (set, set);
1006 return;
1009 EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
1011 varinfo_t vi = get_varinfo (i);
1013 /* If this is a variable with just one field just set its bit
1014 in the result. */
1015 if (vi->is_artificial_var
1016 || vi->is_unknown_size_var
1017 || vi->is_full_var)
1018 bitmap_set_bit (result, i);
1019 else
1021 unsigned HOST_WIDE_INT fieldoffset = vi->offset + offset;
1023 /* If the offset makes the pointer point to before the
1024 variable use offset zero for the field lookup. */
1025 if (offset < 0
1026 && fieldoffset > vi->offset)
1027 fieldoffset = 0;
1029 if (offset != 0)
1030 vi = first_or_preceding_vi_for_offset (vi, fieldoffset);
1032 bitmap_set_bit (result, vi->id);
1033 /* If the result is not exactly at fieldoffset include the next
1034 field as well. See get_constraint_for_ptr_offset for more
1035 rationale. */
1036 if (vi->offset != fieldoffset
1037 && vi->next != NULL)
1038 bitmap_set_bit (result, vi->next->id);
1042 bitmap_copy (set, result);
1043 BITMAP_FREE (result);
1046 /* Union solution sets TO and FROM, and add INC to each member of FROM in the
1047 process. */
1049 static bool
1050 set_union_with_increment (bitmap to, bitmap from, HOST_WIDE_INT inc)
1052 if (inc == 0)
1053 return bitmap_ior_into (to, from);
1054 else
1056 bitmap tmp;
1057 bool res;
1059 tmp = BITMAP_ALLOC (&iteration_obstack);
1060 bitmap_copy (tmp, from);
1061 solution_set_add (tmp, inc);
1062 res = bitmap_ior_into (to, tmp);
1063 BITMAP_FREE (tmp);
1064 return res;
1068 /* Insert constraint C into the list of complex constraints for graph
1069 node VAR. */
1071 static void
1072 insert_into_complex (constraint_graph_t graph,
1073 unsigned int var, constraint_t c)
1075 VEC (constraint_t, heap) *complex = graph->complex[var];
1076 unsigned int place = VEC_lower_bound (constraint_t, complex, c,
1077 constraint_less);
1079 /* Only insert constraints that do not already exist. */
1080 if (place >= VEC_length (constraint_t, complex)
1081 || !constraint_equal (*c, *VEC_index (constraint_t, complex, place)))
1082 VEC_safe_insert (constraint_t, heap, graph->complex[var], place, c);
1086 /* Condense two variable nodes into a single variable node, by moving
1087 all associated info from SRC to TO. */
1089 static void
1090 merge_node_constraints (constraint_graph_t graph, unsigned int to,
1091 unsigned int from)
1093 unsigned int i;
1094 constraint_t c;
1096 gcc_assert (find (from) == to);
1098 /* Move all complex constraints from src node into to node */
1099 for (i = 0; VEC_iterate (constraint_t, graph->complex[from], i, c); i++)
1101 /* In complex constraints for node src, we may have either
1102 a = *src, and *src = a, or an offseted constraint which are
1103 always added to the rhs node's constraints. */
1105 if (c->rhs.type == DEREF)
1106 c->rhs.var = to;
1107 else if (c->lhs.type == DEREF)
1108 c->lhs.var = to;
1109 else
1110 c->rhs.var = to;
1112 constraint_set_union (&graph->complex[to], &graph->complex[from]);
1113 VEC_free (constraint_t, heap, graph->complex[from]);
1114 graph->complex[from] = NULL;
1118 /* Remove edges involving NODE from GRAPH. */
1120 static void
1121 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
1123 if (graph->succs[node])
1124 BITMAP_FREE (graph->succs[node]);
1127 /* Merge GRAPH nodes FROM and TO into node TO. */
1129 static void
1130 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
1131 unsigned int from)
1133 if (graph->indirect_cycles[from] != -1)
1135 /* If we have indirect cycles with the from node, and we have
1136 none on the to node, the to node has indirect cycles from the
1137 from node now that they are unified.
1138 If indirect cycles exist on both, unify the nodes that they
1139 are in a cycle with, since we know they are in a cycle with
1140 each other. */
1141 if (graph->indirect_cycles[to] == -1)
1142 graph->indirect_cycles[to] = graph->indirect_cycles[from];
1145 /* Merge all the successor edges. */
1146 if (graph->succs[from])
1148 if (!graph->succs[to])
1149 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
1150 bitmap_ior_into (graph->succs[to],
1151 graph->succs[from]);
1154 clear_edges_for_node (graph, from);
1158 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
1159 it doesn't exist in the graph already. */
1161 static void
1162 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
1163 unsigned int from)
1165 if (to == from)
1166 return;
1168 if (!graph->implicit_preds[to])
1169 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1171 if (bitmap_set_bit (graph->implicit_preds[to], from))
1172 stats.num_implicit_edges++;
1175 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
1176 it doesn't exist in the graph already.
1177 Return false if the edge already existed, true otherwise. */
1179 static void
1180 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
1181 unsigned int from)
1183 if (!graph->preds[to])
1184 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1185 bitmap_set_bit (graph->preds[to], from);
1188 /* Add a graph edge to GRAPH, going from FROM to TO if
1189 it doesn't exist in the graph already.
1190 Return false if the edge already existed, true otherwise. */
1192 static bool
1193 add_graph_edge (constraint_graph_t graph, unsigned int to,
1194 unsigned int from)
1196 if (to == from)
1198 return false;
1200 else
1202 bool r = false;
1204 if (!graph->succs[from])
1205 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
1206 if (bitmap_set_bit (graph->succs[from], to))
1208 r = true;
1209 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
1210 stats.num_edges++;
1212 return r;
1217 /* Return true if {DEST.SRC} is an existing graph edge in GRAPH. */
1219 static bool
1220 valid_graph_edge (constraint_graph_t graph, unsigned int src,
1221 unsigned int dest)
1223 return (graph->succs[dest]
1224 && bitmap_bit_p (graph->succs[dest], src));
1227 /* Initialize the constraint graph structure to contain SIZE nodes. */
1229 static void
1230 init_graph (unsigned int size)
1232 unsigned int j;
1234 graph = XCNEW (struct constraint_graph);
1235 graph->size = size;
1236 graph->succs = XCNEWVEC (bitmap, graph->size);
1237 graph->indirect_cycles = XNEWVEC (int, graph->size);
1238 graph->rep = XNEWVEC (unsigned int, graph->size);
1239 graph->complex = XCNEWVEC (VEC(constraint_t, heap) *, size);
1240 graph->pe = XCNEWVEC (unsigned int, graph->size);
1241 graph->pe_rep = XNEWVEC (int, graph->size);
1243 for (j = 0; j < graph->size; j++)
1245 graph->rep[j] = j;
1246 graph->pe_rep[j] = -1;
1247 graph->indirect_cycles[j] = -1;
1251 /* Build the constraint graph, adding only predecessor edges right now. */
1253 static void
1254 build_pred_graph (void)
1256 int i;
1257 constraint_t c;
1258 unsigned int j;
1260 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
1261 graph->preds = XCNEWVEC (bitmap, graph->size);
1262 graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
1263 graph->loc_label = XCNEWVEC (unsigned int, graph->size);
1264 graph->pointed_by = XCNEWVEC (bitmap, graph->size);
1265 graph->points_to = XCNEWVEC (bitmap, graph->size);
1266 graph->eq_rep = XNEWVEC (int, graph->size);
1267 graph->direct_nodes = sbitmap_alloc (graph->size);
1268 graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1269 sbitmap_zero (graph->direct_nodes);
1271 for (j = 0; j < FIRST_REF_NODE; j++)
1273 if (!get_varinfo (j)->is_special_var)
1274 SET_BIT (graph->direct_nodes, j);
1277 for (j = 0; j < graph->size; j++)
1278 graph->eq_rep[j] = -1;
1280 for (j = 0; j < VEC_length (varinfo_t, varmap); j++)
1281 graph->indirect_cycles[j] = -1;
1283 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1285 struct constraint_expr lhs = c->lhs;
1286 struct constraint_expr rhs = c->rhs;
1287 unsigned int lhsvar = lhs.var;
1288 unsigned int rhsvar = rhs.var;
1290 if (lhs.type == DEREF)
1292 /* *x = y. */
1293 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1294 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1296 else if (rhs.type == DEREF)
1298 /* x = *y */
1299 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1300 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1301 else
1302 RESET_BIT (graph->direct_nodes, lhsvar);
1304 else if (rhs.type == ADDRESSOF)
1306 varinfo_t v;
1308 /* x = &y */
1309 if (graph->points_to[lhsvar] == NULL)
1310 graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1311 bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1313 if (graph->pointed_by[rhsvar] == NULL)
1314 graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1315 bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1317 /* Implicitly, *x = y */
1318 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1320 /* All related variables are no longer direct nodes. */
1321 RESET_BIT (graph->direct_nodes, rhsvar);
1322 v = get_varinfo (rhsvar);
1323 if (!v->is_full_var)
1325 v = lookup_vi_for_tree (v->decl);
1328 RESET_BIT (graph->direct_nodes, v->id);
1329 v = v->next;
1331 while (v != NULL);
1333 bitmap_set_bit (graph->address_taken, rhsvar);
1335 else if (lhsvar > anything_id
1336 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1338 /* x = y */
1339 add_pred_graph_edge (graph, lhsvar, rhsvar);
1340 /* Implicitly, *x = *y */
1341 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1342 FIRST_REF_NODE + rhsvar);
1344 else if (lhs.offset != 0 || rhs.offset != 0)
1346 if (rhs.offset != 0)
1347 RESET_BIT (graph->direct_nodes, lhs.var);
1348 else if (lhs.offset != 0)
1349 RESET_BIT (graph->direct_nodes, rhs.var);
1354 /* Build the constraint graph, adding successor edges. */
1356 static void
1357 build_succ_graph (void)
1359 unsigned i, t;
1360 constraint_t c;
1362 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1364 struct constraint_expr lhs;
1365 struct constraint_expr rhs;
1366 unsigned int lhsvar;
1367 unsigned int rhsvar;
1369 if (!c)
1370 continue;
1372 lhs = c->lhs;
1373 rhs = c->rhs;
1374 lhsvar = find (lhs.var);
1375 rhsvar = find (rhs.var);
1377 if (lhs.type == DEREF)
1379 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1380 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1382 else if (rhs.type == DEREF)
1384 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1385 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1387 else if (rhs.type == ADDRESSOF)
1389 /* x = &y */
1390 gcc_assert (find (rhs.var) == rhs.var);
1391 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1393 else if (lhsvar > anything_id
1394 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1396 add_graph_edge (graph, lhsvar, rhsvar);
1400 /* Add edges from STOREDANYTHING to all non-direct nodes that can
1401 receive pointers. */
1402 t = find (storedanything_id);
1403 for (i = integer_id + 1; i < FIRST_REF_NODE; ++i)
1405 if (!TEST_BIT (graph->direct_nodes, i)
1406 && get_varinfo (i)->may_have_pointers)
1407 add_graph_edge (graph, find (i), t);
1410 /* Everything stored to ANYTHING also potentially escapes. */
1411 add_graph_edge (graph, find (escaped_id), t);
1415 /* Changed variables on the last iteration. */
1416 static unsigned int changed_count;
1417 static sbitmap changed;
1419 /* Strongly Connected Component visitation info. */
1421 struct scc_info
1423 sbitmap visited;
1424 sbitmap deleted;
1425 unsigned int *dfs;
1426 unsigned int *node_mapping;
1427 int current_index;
1428 VEC(unsigned,heap) *scc_stack;
1432 /* Recursive routine to find strongly connected components in GRAPH.
1433 SI is the SCC info to store the information in, and N is the id of current
1434 graph node we are processing.
1436 This is Tarjan's strongly connected component finding algorithm, as
1437 modified by Nuutila to keep only non-root nodes on the stack.
1438 The algorithm can be found in "On finding the strongly connected
1439 connected components in a directed graph" by Esko Nuutila and Eljas
1440 Soisalon-Soininen, in Information Processing Letters volume 49,
1441 number 1, pages 9-14. */
1443 static void
1444 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1446 unsigned int i;
1447 bitmap_iterator bi;
1448 unsigned int my_dfs;
1450 SET_BIT (si->visited, n);
1451 si->dfs[n] = si->current_index ++;
1452 my_dfs = si->dfs[n];
1454 /* Visit all the successors. */
1455 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1457 unsigned int w;
1459 if (i > LAST_REF_NODE)
1460 break;
1462 w = find (i);
1463 if (TEST_BIT (si->deleted, w))
1464 continue;
1466 if (!TEST_BIT (si->visited, w))
1467 scc_visit (graph, si, w);
1469 unsigned int t = find (w);
1470 unsigned int nnode = find (n);
1471 gcc_assert (nnode == n);
1473 if (si->dfs[t] < si->dfs[nnode])
1474 si->dfs[n] = si->dfs[t];
1478 /* See if any components have been identified. */
1479 if (si->dfs[n] == my_dfs)
1481 if (VEC_length (unsigned, si->scc_stack) > 0
1482 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1484 bitmap scc = BITMAP_ALLOC (NULL);
1485 unsigned int lowest_node;
1486 bitmap_iterator bi;
1488 bitmap_set_bit (scc, n);
1490 while (VEC_length (unsigned, si->scc_stack) != 0
1491 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1493 unsigned int w = VEC_pop (unsigned, si->scc_stack);
1495 bitmap_set_bit (scc, w);
1498 lowest_node = bitmap_first_set_bit (scc);
1499 gcc_assert (lowest_node < FIRST_REF_NODE);
1501 /* Collapse the SCC nodes into a single node, and mark the
1502 indirect cycles. */
1503 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1505 if (i < FIRST_REF_NODE)
1507 if (unite (lowest_node, i))
1508 unify_nodes (graph, lowest_node, i, false);
1510 else
1512 unite (lowest_node, i);
1513 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1517 SET_BIT (si->deleted, n);
1519 else
1520 VEC_safe_push (unsigned, heap, si->scc_stack, n);
1523 /* Unify node FROM into node TO, updating the changed count if
1524 necessary when UPDATE_CHANGED is true. */
1526 static void
1527 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1528 bool update_changed)
1531 gcc_assert (to != from && find (to) == to);
1532 if (dump_file && (dump_flags & TDF_DETAILS))
1533 fprintf (dump_file, "Unifying %s to %s\n",
1534 get_varinfo (from)->name,
1535 get_varinfo (to)->name);
1537 if (update_changed)
1538 stats.unified_vars_dynamic++;
1539 else
1540 stats.unified_vars_static++;
1542 merge_graph_nodes (graph, to, from);
1543 merge_node_constraints (graph, to, from);
1545 /* Mark TO as changed if FROM was changed. If TO was already marked
1546 as changed, decrease the changed count. */
1548 if (update_changed && TEST_BIT (changed, from))
1550 RESET_BIT (changed, from);
1551 if (!TEST_BIT (changed, to))
1552 SET_BIT (changed, to);
1553 else
1555 gcc_assert (changed_count > 0);
1556 changed_count--;
1559 if (get_varinfo (from)->solution)
1561 /* If the solution changes because of the merging, we need to mark
1562 the variable as changed. */
1563 if (bitmap_ior_into (get_varinfo (to)->solution,
1564 get_varinfo (from)->solution))
1566 if (update_changed && !TEST_BIT (changed, to))
1568 SET_BIT (changed, to);
1569 changed_count++;
1573 BITMAP_FREE (get_varinfo (from)->solution);
1574 BITMAP_FREE (get_varinfo (from)->oldsolution);
1576 if (stats.iterations > 0)
1578 BITMAP_FREE (get_varinfo (to)->oldsolution);
1579 get_varinfo (to)->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
1582 if (valid_graph_edge (graph, to, to))
1584 if (graph->succs[to])
1585 bitmap_clear_bit (graph->succs[to], to);
1589 /* Information needed to compute the topological ordering of a graph. */
1591 struct topo_info
1593 /* sbitmap of visited nodes. */
1594 sbitmap visited;
1595 /* Array that stores the topological order of the graph, *in
1596 reverse*. */
1597 VEC(unsigned,heap) *topo_order;
1601 /* Initialize and return a topological info structure. */
1603 static struct topo_info *
1604 init_topo_info (void)
1606 size_t size = graph->size;
1607 struct topo_info *ti = XNEW (struct topo_info);
1608 ti->visited = sbitmap_alloc (size);
1609 sbitmap_zero (ti->visited);
1610 ti->topo_order = VEC_alloc (unsigned, heap, 1);
1611 return ti;
1615 /* Free the topological sort info pointed to by TI. */
1617 static void
1618 free_topo_info (struct topo_info *ti)
1620 sbitmap_free (ti->visited);
1621 VEC_free (unsigned, heap, ti->topo_order);
1622 free (ti);
1625 /* Visit the graph in topological order, and store the order in the
1626 topo_info structure. */
1628 static void
1629 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1630 unsigned int n)
1632 bitmap_iterator bi;
1633 unsigned int j;
1635 SET_BIT (ti->visited, n);
1637 if (graph->succs[n])
1638 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1640 if (!TEST_BIT (ti->visited, j))
1641 topo_visit (graph, ti, j);
1644 VEC_safe_push (unsigned, heap, ti->topo_order, n);
1647 /* Process a constraint C that represents x = *(y + off), using DELTA as the
1648 starting solution for y. */
1650 static void
1651 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1652 bitmap delta)
1654 unsigned int lhs = c->lhs.var;
1655 bool flag = false;
1656 bitmap sol = get_varinfo (lhs)->solution;
1657 unsigned int j;
1658 bitmap_iterator bi;
1659 HOST_WIDE_INT roffset = c->rhs.offset;
1661 /* Our IL does not allow this. */
1662 gcc_assert (c->lhs.offset == 0);
1664 /* If the solution of Y contains anything it is good enough to transfer
1665 this to the LHS. */
1666 if (bitmap_bit_p (delta, anything_id))
1668 flag |= bitmap_set_bit (sol, anything_id);
1669 goto done;
1672 /* If we do not know at with offset the rhs is dereferenced compute
1673 the reachability set of DELTA, conservatively assuming it is
1674 dereferenced at all valid offsets. */
1675 if (roffset == UNKNOWN_OFFSET)
1677 solution_set_expand (delta, delta);
1678 /* No further offset processing is necessary. */
1679 roffset = 0;
1682 /* For each variable j in delta (Sol(y)), add
1683 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1684 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1686 varinfo_t v = get_varinfo (j);
1687 HOST_WIDE_INT fieldoffset = v->offset + roffset;
1688 unsigned int t;
1690 if (v->is_full_var)
1691 fieldoffset = v->offset;
1692 else if (roffset != 0)
1693 v = first_vi_for_offset (v, fieldoffset);
1694 /* If the access is outside of the variable we can ignore it. */
1695 if (!v)
1696 continue;
1700 t = find (v->id);
1702 /* Adding edges from the special vars is pointless.
1703 They don't have sets that can change. */
1704 if (get_varinfo (t)->is_special_var)
1705 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1706 /* Merging the solution from ESCAPED needlessly increases
1707 the set. Use ESCAPED as representative instead. */
1708 else if (v->id == escaped_id)
1709 flag |= bitmap_set_bit (sol, escaped_id);
1710 else if (v->may_have_pointers
1711 && add_graph_edge (graph, lhs, t))
1712 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1714 /* If the variable is not exactly at the requested offset
1715 we have to include the next one. */
1716 if (v->offset == (unsigned HOST_WIDE_INT)fieldoffset
1717 || v->next == NULL)
1718 break;
1720 v = v->next;
1721 fieldoffset = v->offset;
1723 while (1);
1726 done:
1727 /* If the LHS solution changed, mark the var as changed. */
1728 if (flag)
1730 get_varinfo (lhs)->solution = sol;
1731 if (!TEST_BIT (changed, lhs))
1733 SET_BIT (changed, lhs);
1734 changed_count++;
1739 /* Process a constraint C that represents *(x + off) = y using DELTA
1740 as the starting solution for x. */
1742 static void
1743 do_ds_constraint (constraint_t c, bitmap delta)
1745 unsigned int rhs = c->rhs.var;
1746 bitmap sol = get_varinfo (rhs)->solution;
1747 unsigned int j;
1748 bitmap_iterator bi;
1749 HOST_WIDE_INT loff = c->lhs.offset;
1750 bool escaped_p = false;
1752 /* Our IL does not allow this. */
1753 gcc_assert (c->rhs.offset == 0);
1755 /* If the solution of y contains ANYTHING simply use the ANYTHING
1756 solution. This avoids needlessly increasing the points-to sets. */
1757 if (bitmap_bit_p (sol, anything_id))
1758 sol = get_varinfo (find (anything_id))->solution;
1760 /* If the solution for x contains ANYTHING we have to merge the
1761 solution of y into all pointer variables which we do via
1762 STOREDANYTHING. */
1763 if (bitmap_bit_p (delta, anything_id))
1765 unsigned t = find (storedanything_id);
1766 if (add_graph_edge (graph, t, rhs))
1768 if (bitmap_ior_into (get_varinfo (t)->solution, sol))
1770 if (!TEST_BIT (changed, t))
1772 SET_BIT (changed, t);
1773 changed_count++;
1777 return;
1780 /* If we do not know at with offset the rhs is dereferenced compute
1781 the reachability set of DELTA, conservatively assuming it is
1782 dereferenced at all valid offsets. */
1783 if (loff == UNKNOWN_OFFSET)
1785 solution_set_expand (delta, delta);
1786 loff = 0;
1789 /* For each member j of delta (Sol(x)), add an edge from y to j and
1790 union Sol(y) into Sol(j) */
1791 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1793 varinfo_t v = get_varinfo (j);
1794 unsigned int t;
1795 HOST_WIDE_INT fieldoffset = v->offset + loff;
1797 if (v->is_full_var)
1798 fieldoffset = v->offset;
1799 else if (loff != 0)
1800 v = first_vi_for_offset (v, fieldoffset);
1801 /* If the access is outside of the variable we can ignore it. */
1802 if (!v)
1803 continue;
1807 if (v->may_have_pointers)
1809 /* If v is a global variable then this is an escape point. */
1810 if (v->is_global_var
1811 && !escaped_p)
1813 t = find (escaped_id);
1814 if (add_graph_edge (graph, t, rhs)
1815 && bitmap_ior_into (get_varinfo (t)->solution, sol)
1816 && !TEST_BIT (changed, t))
1818 SET_BIT (changed, t);
1819 changed_count++;
1821 /* Enough to let rhs escape once. */
1822 escaped_p = true;
1825 if (v->is_special_var)
1826 break;
1828 t = find (v->id);
1829 if (add_graph_edge (graph, t, rhs)
1830 && bitmap_ior_into (get_varinfo (t)->solution, sol)
1831 && !TEST_BIT (changed, t))
1833 SET_BIT (changed, t);
1834 changed_count++;
1838 /* If the variable is not exactly at the requested offset
1839 we have to include the next one. */
1840 if (v->offset == (unsigned HOST_WIDE_INT)fieldoffset
1841 || v->next == NULL)
1842 break;
1844 v = v->next;
1845 fieldoffset = v->offset;
1847 while (1);
1851 /* Handle a non-simple (simple meaning requires no iteration),
1852 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1854 static void
1855 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
1857 if (c->lhs.type == DEREF)
1859 if (c->rhs.type == ADDRESSOF)
1861 gcc_unreachable();
1863 else
1865 /* *x = y */
1866 do_ds_constraint (c, delta);
1869 else if (c->rhs.type == DEREF)
1871 /* x = *y */
1872 if (!(get_varinfo (c->lhs.var)->is_special_var))
1873 do_sd_constraint (graph, c, delta);
1875 else
1877 bitmap tmp;
1878 bitmap solution;
1879 bool flag = false;
1881 gcc_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR);
1882 solution = get_varinfo (c->rhs.var)->solution;
1883 tmp = get_varinfo (c->lhs.var)->solution;
1885 flag = set_union_with_increment (tmp, solution, c->rhs.offset);
1887 if (flag)
1889 get_varinfo (c->lhs.var)->solution = tmp;
1890 if (!TEST_BIT (changed, c->lhs.var))
1892 SET_BIT (changed, c->lhs.var);
1893 changed_count++;
1899 /* Initialize and return a new SCC info structure. */
1901 static struct scc_info *
1902 init_scc_info (size_t size)
1904 struct scc_info *si = XNEW (struct scc_info);
1905 size_t i;
1907 si->current_index = 0;
1908 si->visited = sbitmap_alloc (size);
1909 sbitmap_zero (si->visited);
1910 si->deleted = sbitmap_alloc (size);
1911 sbitmap_zero (si->deleted);
1912 si->node_mapping = XNEWVEC (unsigned int, size);
1913 si->dfs = XCNEWVEC (unsigned int, size);
1915 for (i = 0; i < size; i++)
1916 si->node_mapping[i] = i;
1918 si->scc_stack = VEC_alloc (unsigned, heap, 1);
1919 return si;
1922 /* Free an SCC info structure pointed to by SI */
1924 static void
1925 free_scc_info (struct scc_info *si)
1927 sbitmap_free (si->visited);
1928 sbitmap_free (si->deleted);
1929 free (si->node_mapping);
1930 free (si->dfs);
1931 VEC_free (unsigned, heap, si->scc_stack);
1932 free (si);
1936 /* Find indirect cycles in GRAPH that occur, using strongly connected
1937 components, and note them in the indirect cycles map.
1939 This technique comes from Ben Hardekopf and Calvin Lin,
1940 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1941 Lines of Code", submitted to PLDI 2007. */
1943 static void
1944 find_indirect_cycles (constraint_graph_t graph)
1946 unsigned int i;
1947 unsigned int size = graph->size;
1948 struct scc_info *si = init_scc_info (size);
1950 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1951 if (!TEST_BIT (si->visited, i) && find (i) == i)
1952 scc_visit (graph, si, i);
1954 free_scc_info (si);
1957 /* Compute a topological ordering for GRAPH, and store the result in the
1958 topo_info structure TI. */
1960 static void
1961 compute_topo_order (constraint_graph_t graph,
1962 struct topo_info *ti)
1964 unsigned int i;
1965 unsigned int size = graph->size;
1967 for (i = 0; i != size; ++i)
1968 if (!TEST_BIT (ti->visited, i) && find (i) == i)
1969 topo_visit (graph, ti, i);
1972 /* Structure used to for hash value numbering of pointer equivalence
1973 classes. */
1975 typedef struct equiv_class_label
1977 hashval_t hashcode;
1978 unsigned int equivalence_class;
1979 bitmap labels;
1980 } *equiv_class_label_t;
1981 typedef const struct equiv_class_label *const_equiv_class_label_t;
1983 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1984 classes. */
1985 static htab_t pointer_equiv_class_table;
1987 /* A hashtable for mapping a bitmap of labels->location equivalence
1988 classes. */
1989 static htab_t location_equiv_class_table;
1991 /* Hash function for a equiv_class_label_t */
1993 static hashval_t
1994 equiv_class_label_hash (const void *p)
1996 const_equiv_class_label_t const ecl = (const_equiv_class_label_t) p;
1997 return ecl->hashcode;
2000 /* Equality function for two equiv_class_label_t's. */
2002 static int
2003 equiv_class_label_eq (const void *p1, const void *p2)
2005 const_equiv_class_label_t const eql1 = (const_equiv_class_label_t) p1;
2006 const_equiv_class_label_t const eql2 = (const_equiv_class_label_t) p2;
2007 return (eql1->hashcode == eql2->hashcode
2008 && bitmap_equal_p (eql1->labels, eql2->labels));
2011 /* Lookup a equivalence class in TABLE by the bitmap of LABELS it
2012 contains. */
2014 static unsigned int
2015 equiv_class_lookup (htab_t table, bitmap labels)
2017 void **slot;
2018 struct equiv_class_label ecl;
2020 ecl.labels = labels;
2021 ecl.hashcode = bitmap_hash (labels);
2023 slot = htab_find_slot_with_hash (table, &ecl,
2024 ecl.hashcode, NO_INSERT);
2025 if (!slot)
2026 return 0;
2027 else
2028 return ((equiv_class_label_t) *slot)->equivalence_class;
2032 /* Add an equivalence class named EQUIVALENCE_CLASS with labels LABELS
2033 to TABLE. */
2035 static void
2036 equiv_class_add (htab_t table, unsigned int equivalence_class,
2037 bitmap labels)
2039 void **slot;
2040 equiv_class_label_t ecl = XNEW (struct equiv_class_label);
2042 ecl->labels = labels;
2043 ecl->equivalence_class = equivalence_class;
2044 ecl->hashcode = bitmap_hash (labels);
2046 slot = htab_find_slot_with_hash (table, ecl,
2047 ecl->hashcode, INSERT);
2048 gcc_assert (!*slot);
2049 *slot = (void *) ecl;
2052 /* Perform offline variable substitution.
2054 This is a worst case quadratic time way of identifying variables
2055 that must have equivalent points-to sets, including those caused by
2056 static cycles, and single entry subgraphs, in the constraint graph.
2058 The technique is described in "Exploiting Pointer and Location
2059 Equivalence to Optimize Pointer Analysis. In the 14th International
2060 Static Analysis Symposium (SAS), August 2007." It is known as the
2061 "HU" algorithm, and is equivalent to value numbering the collapsed
2062 constraint graph including evaluating unions.
2064 The general method of finding equivalence classes is as follows:
2065 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
2066 Initialize all non-REF nodes to be direct nodes.
2067 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
2068 variable}
2069 For each constraint containing the dereference, we also do the same
2070 thing.
2072 We then compute SCC's in the graph and unify nodes in the same SCC,
2073 including pts sets.
2075 For each non-collapsed node x:
2076 Visit all unvisited explicit incoming edges.
2077 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
2078 where y->x.
2079 Lookup the equivalence class for pts(x).
2080 If we found one, equivalence_class(x) = found class.
2081 Otherwise, equivalence_class(x) = new class, and new_class is
2082 added to the lookup table.
2084 All direct nodes with the same equivalence class can be replaced
2085 with a single representative node.
2086 All unlabeled nodes (label == 0) are not pointers and all edges
2087 involving them can be eliminated.
2088 We perform these optimizations during rewrite_constraints
2090 In addition to pointer equivalence class finding, we also perform
2091 location equivalence class finding. This is the set of variables
2092 that always appear together in points-to sets. We use this to
2093 compress the size of the points-to sets. */
2095 /* Current maximum pointer equivalence class id. */
2096 static int pointer_equiv_class;
2098 /* Current maximum location equivalence class id. */
2099 static int location_equiv_class;
2101 /* Recursive routine to find strongly connected components in GRAPH,
2102 and label it's nodes with DFS numbers. */
2104 static void
2105 condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2107 unsigned int i;
2108 bitmap_iterator bi;
2109 unsigned int my_dfs;
2111 gcc_assert (si->node_mapping[n] == n);
2112 SET_BIT (si->visited, n);
2113 si->dfs[n] = si->current_index ++;
2114 my_dfs = si->dfs[n];
2116 /* Visit all the successors. */
2117 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2119 unsigned int w = si->node_mapping[i];
2121 if (TEST_BIT (si->deleted, w))
2122 continue;
2124 if (!TEST_BIT (si->visited, w))
2125 condense_visit (graph, si, w);
2127 unsigned int t = si->node_mapping[w];
2128 unsigned int nnode = si->node_mapping[n];
2129 gcc_assert (nnode == n);
2131 if (si->dfs[t] < si->dfs[nnode])
2132 si->dfs[n] = si->dfs[t];
2136 /* Visit all the implicit predecessors. */
2137 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
2139 unsigned int w = si->node_mapping[i];
2141 if (TEST_BIT (si->deleted, w))
2142 continue;
2144 if (!TEST_BIT (si->visited, w))
2145 condense_visit (graph, si, w);
2147 unsigned int t = si->node_mapping[w];
2148 unsigned int nnode = si->node_mapping[n];
2149 gcc_assert (nnode == n);
2151 if (si->dfs[t] < si->dfs[nnode])
2152 si->dfs[n] = si->dfs[t];
2156 /* See if any components have been identified. */
2157 if (si->dfs[n] == my_dfs)
2159 while (VEC_length (unsigned, si->scc_stack) != 0
2160 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
2162 unsigned int w = VEC_pop (unsigned, si->scc_stack);
2163 si->node_mapping[w] = n;
2165 if (!TEST_BIT (graph->direct_nodes, w))
2166 RESET_BIT (graph->direct_nodes, n);
2168 /* Unify our nodes. */
2169 if (graph->preds[w])
2171 if (!graph->preds[n])
2172 graph->preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2173 bitmap_ior_into (graph->preds[n], graph->preds[w]);
2175 if (graph->implicit_preds[w])
2177 if (!graph->implicit_preds[n])
2178 graph->implicit_preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2179 bitmap_ior_into (graph->implicit_preds[n],
2180 graph->implicit_preds[w]);
2182 if (graph->points_to[w])
2184 if (!graph->points_to[n])
2185 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2186 bitmap_ior_into (graph->points_to[n],
2187 graph->points_to[w]);
2190 SET_BIT (si->deleted, n);
2192 else
2193 VEC_safe_push (unsigned, heap, si->scc_stack, n);
2196 /* Label pointer equivalences. */
2198 static void
2199 label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2201 unsigned int i;
2202 bitmap_iterator bi;
2203 SET_BIT (si->visited, n);
2205 if (!graph->points_to[n])
2206 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2208 /* Label and union our incoming edges's points to sets. */
2209 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2211 unsigned int w = si->node_mapping[i];
2212 if (!TEST_BIT (si->visited, w))
2213 label_visit (graph, si, w);
2215 /* Skip unused edges */
2216 if (w == n || graph->pointer_label[w] == 0)
2217 continue;
2219 if (graph->points_to[w])
2220 bitmap_ior_into(graph->points_to[n], graph->points_to[w]);
2222 /* Indirect nodes get fresh variables. */
2223 if (!TEST_BIT (graph->direct_nodes, n))
2224 bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
2226 if (!bitmap_empty_p (graph->points_to[n]))
2228 unsigned int label = equiv_class_lookup (pointer_equiv_class_table,
2229 graph->points_to[n]);
2230 if (!label)
2232 label = pointer_equiv_class++;
2233 equiv_class_add (pointer_equiv_class_table,
2234 label, graph->points_to[n]);
2236 graph->pointer_label[n] = label;
2240 /* Perform offline variable substitution, discovering equivalence
2241 classes, and eliminating non-pointer variables. */
2243 static struct scc_info *
2244 perform_var_substitution (constraint_graph_t graph)
2246 unsigned int i;
2247 unsigned int size = graph->size;
2248 struct scc_info *si = init_scc_info (size);
2250 bitmap_obstack_initialize (&iteration_obstack);
2251 pointer_equiv_class_table = htab_create (511, equiv_class_label_hash,
2252 equiv_class_label_eq, free);
2253 location_equiv_class_table = htab_create (511, equiv_class_label_hash,
2254 equiv_class_label_eq, free);
2255 pointer_equiv_class = 1;
2256 location_equiv_class = 1;
2258 /* Condense the nodes, which means to find SCC's, count incoming
2259 predecessors, and unite nodes in SCC's. */
2260 for (i = 0; i < FIRST_REF_NODE; i++)
2261 if (!TEST_BIT (si->visited, si->node_mapping[i]))
2262 condense_visit (graph, si, si->node_mapping[i]);
2264 sbitmap_zero (si->visited);
2265 /* Actually the label the nodes for pointer equivalences */
2266 for (i = 0; i < FIRST_REF_NODE; i++)
2267 if (!TEST_BIT (si->visited, si->node_mapping[i]))
2268 label_visit (graph, si, si->node_mapping[i]);
2270 /* Calculate location equivalence labels. */
2271 for (i = 0; i < FIRST_REF_NODE; i++)
2273 bitmap pointed_by;
2274 bitmap_iterator bi;
2275 unsigned int j;
2276 unsigned int label;
2278 if (!graph->pointed_by[i])
2279 continue;
2280 pointed_by = BITMAP_ALLOC (&iteration_obstack);
2282 /* Translate the pointed-by mapping for pointer equivalence
2283 labels. */
2284 EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
2286 bitmap_set_bit (pointed_by,
2287 graph->pointer_label[si->node_mapping[j]]);
2289 /* The original pointed_by is now dead. */
2290 BITMAP_FREE (graph->pointed_by[i]);
2292 /* Look up the location equivalence label if one exists, or make
2293 one otherwise. */
2294 label = equiv_class_lookup (location_equiv_class_table,
2295 pointed_by);
2296 if (label == 0)
2298 label = location_equiv_class++;
2299 equiv_class_add (location_equiv_class_table,
2300 label, pointed_by);
2302 else
2304 if (dump_file && (dump_flags & TDF_DETAILS))
2305 fprintf (dump_file, "Found location equivalence for node %s\n",
2306 get_varinfo (i)->name);
2307 BITMAP_FREE (pointed_by);
2309 graph->loc_label[i] = label;
2313 if (dump_file && (dump_flags & TDF_DETAILS))
2314 for (i = 0; i < FIRST_REF_NODE; i++)
2316 bool direct_node = TEST_BIT (graph->direct_nodes, i);
2317 fprintf (dump_file,
2318 "Equivalence classes for %s node id %d:%s are pointer: %d"
2319 ", location:%d\n",
2320 direct_node ? "Direct node" : "Indirect node", i,
2321 get_varinfo (i)->name,
2322 graph->pointer_label[si->node_mapping[i]],
2323 graph->loc_label[si->node_mapping[i]]);
2326 /* Quickly eliminate our non-pointer variables. */
2328 for (i = 0; i < FIRST_REF_NODE; i++)
2330 unsigned int node = si->node_mapping[i];
2332 if (graph->pointer_label[node] == 0)
2334 if (dump_file && (dump_flags & TDF_DETAILS))
2335 fprintf (dump_file,
2336 "%s is a non-pointer variable, eliminating edges.\n",
2337 get_varinfo (node)->name);
2338 stats.nonpointer_vars++;
2339 clear_edges_for_node (graph, node);
2343 return si;
2346 /* Free information that was only necessary for variable
2347 substitution. */
2349 static void
2350 free_var_substitution_info (struct scc_info *si)
2352 free_scc_info (si);
2353 free (graph->pointer_label);
2354 free (graph->loc_label);
2355 free (graph->pointed_by);
2356 free (graph->points_to);
2357 free (graph->eq_rep);
2358 sbitmap_free (graph->direct_nodes);
2359 htab_delete (pointer_equiv_class_table);
2360 htab_delete (location_equiv_class_table);
2361 bitmap_obstack_release (&iteration_obstack);
2364 /* Return an existing node that is equivalent to NODE, which has
2365 equivalence class LABEL, if one exists. Return NODE otherwise. */
2367 static unsigned int
2368 find_equivalent_node (constraint_graph_t graph,
2369 unsigned int node, unsigned int label)
2371 /* If the address version of this variable is unused, we can
2372 substitute it for anything else with the same label.
2373 Otherwise, we know the pointers are equivalent, but not the
2374 locations, and we can unite them later. */
2376 if (!bitmap_bit_p (graph->address_taken, node))
2378 gcc_assert (label < graph->size);
2380 if (graph->eq_rep[label] != -1)
2382 /* Unify the two variables since we know they are equivalent. */
2383 if (unite (graph->eq_rep[label], node))
2384 unify_nodes (graph, graph->eq_rep[label], node, false);
2385 return graph->eq_rep[label];
2387 else
2389 graph->eq_rep[label] = node;
2390 graph->pe_rep[label] = node;
2393 else
2395 gcc_assert (label < graph->size);
2396 graph->pe[node] = label;
2397 if (graph->pe_rep[label] == -1)
2398 graph->pe_rep[label] = node;
2401 return node;
2404 /* Unite pointer equivalent but not location equivalent nodes in
2405 GRAPH. This may only be performed once variable substitution is
2406 finished. */
2408 static void
2409 unite_pointer_equivalences (constraint_graph_t graph)
2411 unsigned int i;
2413 /* Go through the pointer equivalences and unite them to their
2414 representative, if they aren't already. */
2415 for (i = 0; i < FIRST_REF_NODE; i++)
2417 unsigned int label = graph->pe[i];
2418 if (label)
2420 int label_rep = graph->pe_rep[label];
2422 if (label_rep == -1)
2423 continue;
2425 label_rep = find (label_rep);
2426 if (label_rep >= 0 && unite (label_rep, find (i)))
2427 unify_nodes (graph, label_rep, i, false);
2432 /* Move complex constraints to the GRAPH nodes they belong to. */
2434 static void
2435 move_complex_constraints (constraint_graph_t graph)
2437 int i;
2438 constraint_t c;
2440 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
2442 if (c)
2444 struct constraint_expr lhs = c->lhs;
2445 struct constraint_expr rhs = c->rhs;
2447 if (lhs.type == DEREF)
2449 insert_into_complex (graph, lhs.var, c);
2451 else if (rhs.type == DEREF)
2453 if (!(get_varinfo (lhs.var)->is_special_var))
2454 insert_into_complex (graph, rhs.var, c);
2456 else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2457 && (lhs.offset != 0 || rhs.offset != 0))
2459 insert_into_complex (graph, rhs.var, c);
2466 /* Optimize and rewrite complex constraints while performing
2467 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2468 result of perform_variable_substitution. */
2470 static void
2471 rewrite_constraints (constraint_graph_t graph,
2472 struct scc_info *si)
2474 int i;
2475 unsigned int j;
2476 constraint_t c;
2478 for (j = 0; j < graph->size; j++)
2479 gcc_assert (find (j) == j);
2481 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
2483 struct constraint_expr lhs = c->lhs;
2484 struct constraint_expr rhs = c->rhs;
2485 unsigned int lhsvar = find (lhs.var);
2486 unsigned int rhsvar = find (rhs.var);
2487 unsigned int lhsnode, rhsnode;
2488 unsigned int lhslabel, rhslabel;
2490 lhsnode = si->node_mapping[lhsvar];
2491 rhsnode = si->node_mapping[rhsvar];
2492 lhslabel = graph->pointer_label[lhsnode];
2493 rhslabel = graph->pointer_label[rhsnode];
2495 /* See if it is really a non-pointer variable, and if so, ignore
2496 the constraint. */
2497 if (lhslabel == 0)
2499 if (dump_file && (dump_flags & TDF_DETAILS))
2502 fprintf (dump_file, "%s is a non-pointer variable,"
2503 "ignoring constraint:",
2504 get_varinfo (lhs.var)->name);
2505 dump_constraint (dump_file, c);
2507 VEC_replace (constraint_t, constraints, i, NULL);
2508 continue;
2511 if (rhslabel == 0)
2513 if (dump_file && (dump_flags & TDF_DETAILS))
2516 fprintf (dump_file, "%s is a non-pointer variable,"
2517 "ignoring constraint:",
2518 get_varinfo (rhs.var)->name);
2519 dump_constraint (dump_file, c);
2521 VEC_replace (constraint_t, constraints, i, NULL);
2522 continue;
2525 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2526 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2527 c->lhs.var = lhsvar;
2528 c->rhs.var = rhsvar;
2533 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2534 part of an SCC, false otherwise. */
2536 static bool
2537 eliminate_indirect_cycles (unsigned int node)
2539 if (graph->indirect_cycles[node] != -1
2540 && !bitmap_empty_p (get_varinfo (node)->solution))
2542 unsigned int i;
2543 VEC(unsigned,heap) *queue = NULL;
2544 int queuepos;
2545 unsigned int to = find (graph->indirect_cycles[node]);
2546 bitmap_iterator bi;
2548 /* We can't touch the solution set and call unify_nodes
2549 at the same time, because unify_nodes is going to do
2550 bitmap unions into it. */
2552 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2554 if (find (i) == i && i != to)
2556 if (unite (to, i))
2557 VEC_safe_push (unsigned, heap, queue, i);
2561 for (queuepos = 0;
2562 VEC_iterate (unsigned, queue, queuepos, i);
2563 queuepos++)
2565 unify_nodes (graph, to, i, true);
2567 VEC_free (unsigned, heap, queue);
2568 return true;
2570 return false;
2573 /* Solve the constraint graph GRAPH using our worklist solver.
2574 This is based on the PW* family of solvers from the "Efficient Field
2575 Sensitive Pointer Analysis for C" paper.
2576 It works by iterating over all the graph nodes, processing the complex
2577 constraints and propagating the copy constraints, until everything stops
2578 changed. This corresponds to steps 6-8 in the solving list given above. */
2580 static void
2581 solve_graph (constraint_graph_t graph)
2583 unsigned int size = graph->size;
2584 unsigned int i;
2585 bitmap pts;
2587 changed_count = 0;
2588 changed = sbitmap_alloc (size);
2589 sbitmap_zero (changed);
2591 /* Mark all initial non-collapsed nodes as changed. */
2592 for (i = 0; i < size; i++)
2594 varinfo_t ivi = get_varinfo (i);
2595 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2596 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2597 || VEC_length (constraint_t, graph->complex[i]) > 0))
2599 SET_BIT (changed, i);
2600 changed_count++;
2604 /* Allocate a bitmap to be used to store the changed bits. */
2605 pts = BITMAP_ALLOC (&pta_obstack);
2607 while (changed_count > 0)
2609 unsigned int i;
2610 struct topo_info *ti = init_topo_info ();
2611 stats.iterations++;
2613 bitmap_obstack_initialize (&iteration_obstack);
2615 compute_topo_order (graph, ti);
2617 while (VEC_length (unsigned, ti->topo_order) != 0)
2620 i = VEC_pop (unsigned, ti->topo_order);
2622 /* If this variable is not a representative, skip it. */
2623 if (find (i) != i)
2624 continue;
2626 /* In certain indirect cycle cases, we may merge this
2627 variable to another. */
2628 if (eliminate_indirect_cycles (i) && find (i) != i)
2629 continue;
2631 /* If the node has changed, we need to process the
2632 complex constraints and outgoing edges again. */
2633 if (TEST_BIT (changed, i))
2635 unsigned int j;
2636 constraint_t c;
2637 bitmap solution;
2638 VEC(constraint_t,heap) *complex = graph->complex[i];
2639 bool solution_empty;
2641 RESET_BIT (changed, i);
2642 changed_count--;
2644 /* Compute the changed set of solution bits. */
2645 bitmap_and_compl (pts, get_varinfo (i)->solution,
2646 get_varinfo (i)->oldsolution);
2648 if (bitmap_empty_p (pts))
2649 continue;
2651 bitmap_ior_into (get_varinfo (i)->oldsolution, pts);
2653 solution = get_varinfo (i)->solution;
2654 solution_empty = bitmap_empty_p (solution);
2656 /* Process the complex constraints */
2657 for (j = 0; VEC_iterate (constraint_t, complex, j, c); j++)
2659 /* XXX: This is going to unsort the constraints in
2660 some cases, which will occasionally add duplicate
2661 constraints during unification. This does not
2662 affect correctness. */
2663 c->lhs.var = find (c->lhs.var);
2664 c->rhs.var = find (c->rhs.var);
2666 /* The only complex constraint that can change our
2667 solution to non-empty, given an empty solution,
2668 is a constraint where the lhs side is receiving
2669 some set from elsewhere. */
2670 if (!solution_empty || c->lhs.type != DEREF)
2671 do_complex_constraint (graph, c, pts);
2674 solution_empty = bitmap_empty_p (solution);
2676 if (!solution_empty)
2678 bitmap_iterator bi;
2679 unsigned eff_escaped_id = find (escaped_id);
2681 /* Propagate solution to all successors. */
2682 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2683 0, j, bi)
2685 bitmap tmp;
2686 bool flag;
2688 unsigned int to = find (j);
2689 tmp = get_varinfo (to)->solution;
2690 flag = false;
2692 /* Don't try to propagate to ourselves. */
2693 if (to == i)
2694 continue;
2696 /* If we propagate from ESCAPED use ESCAPED as
2697 placeholder. */
2698 if (i == eff_escaped_id)
2699 flag = bitmap_set_bit (tmp, escaped_id);
2700 else
2701 flag = set_union_with_increment (tmp, pts, 0);
2703 if (flag)
2705 get_varinfo (to)->solution = tmp;
2706 if (!TEST_BIT (changed, to))
2708 SET_BIT (changed, to);
2709 changed_count++;
2716 free_topo_info (ti);
2717 bitmap_obstack_release (&iteration_obstack);
2720 BITMAP_FREE (pts);
2721 sbitmap_free (changed);
2722 bitmap_obstack_release (&oldpta_obstack);
2725 /* Map from trees to variable infos. */
2726 static struct pointer_map_t *vi_for_tree;
2729 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2731 static void
2732 insert_vi_for_tree (tree t, varinfo_t vi)
2734 void **slot = pointer_map_insert (vi_for_tree, t);
2735 gcc_assert (vi);
2736 gcc_assert (*slot == NULL);
2737 *slot = vi;
2740 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2741 exist in the map, return NULL, otherwise, return the varinfo we found. */
2743 static varinfo_t
2744 lookup_vi_for_tree (tree t)
2746 void **slot = pointer_map_contains (vi_for_tree, t);
2747 if (slot == NULL)
2748 return NULL;
2750 return (varinfo_t) *slot;
2753 /* Return a printable name for DECL */
2755 static const char *
2756 alias_get_name (tree decl)
2758 const char *res;
2759 char *temp;
2760 int num_printed = 0;
2762 if (DECL_ASSEMBLER_NAME_SET_P (decl))
2763 res = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
2764 else
2765 res= get_name (decl);
2766 if (res != NULL)
2767 return res;
2769 res = "NULL";
2770 if (!dump_file)
2771 return res;
2773 if (TREE_CODE (decl) == SSA_NAME)
2775 num_printed = asprintf (&temp, "%s_%u",
2776 alias_get_name (SSA_NAME_VAR (decl)),
2777 SSA_NAME_VERSION (decl));
2779 else if (DECL_P (decl))
2781 num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
2783 if (num_printed > 0)
2785 res = ggc_strdup (temp);
2786 free (temp);
2788 return res;
2791 /* Find the variable id for tree T in the map.
2792 If T doesn't exist in the map, create an entry for it and return it. */
2794 static varinfo_t
2795 get_vi_for_tree (tree t)
2797 void **slot = pointer_map_contains (vi_for_tree, t);
2798 if (slot == NULL)
2799 return get_varinfo (create_variable_info_for (t, alias_get_name (t)));
2801 return (varinfo_t) *slot;
2804 /* Get a scalar constraint expression for a new temporary variable. */
2806 static struct constraint_expr
2807 new_scalar_tmp_constraint_exp (const char *name)
2809 struct constraint_expr tmp;
2810 varinfo_t vi;
2812 vi = new_var_info (NULL_TREE, name);
2813 vi->offset = 0;
2814 vi->size = -1;
2815 vi->fullsize = -1;
2816 vi->is_full_var = 1;
2818 tmp.var = vi->id;
2819 tmp.type = SCALAR;
2820 tmp.offset = 0;
2822 return tmp;
2825 /* Get a constraint expression vector from an SSA_VAR_P node.
2826 If address_p is true, the result will be taken its address of. */
2828 static void
2829 get_constraint_for_ssa_var (tree t, VEC(ce_s, heap) **results, bool address_p)
2831 struct constraint_expr cexpr;
2832 varinfo_t vi;
2834 /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
2835 gcc_assert (SSA_VAR_P (t) || DECL_P (t));
2837 /* For parameters, get at the points-to set for the actual parm
2838 decl. */
2839 if (TREE_CODE (t) == SSA_NAME
2840 && (TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2841 || TREE_CODE (SSA_NAME_VAR (t)) == RESULT_DECL)
2842 && SSA_NAME_IS_DEFAULT_DEF (t))
2844 get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
2845 return;
2848 vi = get_vi_for_tree (t);
2849 cexpr.var = vi->id;
2850 cexpr.type = SCALAR;
2851 cexpr.offset = 0;
2852 /* If we determine the result is "anything", and we know this is readonly,
2853 say it points to readonly memory instead. */
2854 if (cexpr.var == anything_id && TREE_READONLY (t))
2856 gcc_unreachable ();
2857 cexpr.type = ADDRESSOF;
2858 cexpr.var = readonly_id;
2861 /* If we are not taking the address of the constraint expr, add all
2862 sub-fiels of the variable as well. */
2863 if (!address_p
2864 && !vi->is_full_var)
2866 for (; vi; vi = vi->next)
2868 cexpr.var = vi->id;
2869 VEC_safe_push (ce_s, heap, *results, &cexpr);
2871 return;
2874 VEC_safe_push (ce_s, heap, *results, &cexpr);
2877 /* Process constraint T, performing various simplifications and then
2878 adding it to our list of overall constraints. */
2880 static void
2881 process_constraint (constraint_t t)
2883 struct constraint_expr rhs = t->rhs;
2884 struct constraint_expr lhs = t->lhs;
2886 gcc_assert (rhs.var < VEC_length (varinfo_t, varmap));
2887 gcc_assert (lhs.var < VEC_length (varinfo_t, varmap));
2889 /* If we didn't get any useful constraint from the lhs we get
2890 &ANYTHING as fallback from get_constraint_for. Deal with
2891 it here by turning it into *ANYTHING. */
2892 if (lhs.type == ADDRESSOF
2893 && lhs.var == anything_id)
2894 lhs.type = DEREF;
2896 /* ADDRESSOF on the lhs is invalid. */
2897 gcc_assert (lhs.type != ADDRESSOF);
2899 /* We shouldn't add constraints from things that cannot have pointers.
2900 It's not completely trivial to avoid in the callers, so do it here. */
2901 if (rhs.type != ADDRESSOF
2902 && !get_varinfo (rhs.var)->may_have_pointers)
2903 return;
2905 /* Likewise adding to the solution of a non-pointer var isn't useful. */
2906 if (!get_varinfo (lhs.var)->may_have_pointers)
2907 return;
2909 /* This can happen in our IR with things like n->a = *p */
2910 if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
2912 /* Split into tmp = *rhs, *lhs = tmp */
2913 struct constraint_expr tmplhs;
2914 tmplhs = new_scalar_tmp_constraint_exp ("doubledereftmp");
2915 process_constraint (new_constraint (tmplhs, rhs));
2916 process_constraint (new_constraint (lhs, tmplhs));
2918 else if (rhs.type == ADDRESSOF && lhs.type == DEREF)
2920 /* Split into tmp = &rhs, *lhs = tmp */
2921 struct constraint_expr tmplhs;
2922 tmplhs = new_scalar_tmp_constraint_exp ("derefaddrtmp");
2923 process_constraint (new_constraint (tmplhs, rhs));
2924 process_constraint (new_constraint (lhs, tmplhs));
2926 else
2928 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
2929 VEC_safe_push (constraint_t, heap, constraints, t);
2933 /* Return true if T is a type that could contain pointers. */
2935 static bool
2936 type_could_have_pointers (tree type)
2938 if (POINTER_TYPE_P (type))
2939 return true;
2941 if (TREE_CODE (type) == ARRAY_TYPE)
2942 return type_could_have_pointers (TREE_TYPE (type));
2944 /* A function or method can consume pointers.
2945 ??? We could be more precise here. */
2946 if (TREE_CODE (type) == FUNCTION_TYPE
2947 || TREE_CODE (type) == METHOD_TYPE)
2948 return true;
2950 return AGGREGATE_TYPE_P (type);
2953 /* Return true if T is a variable of a type that could contain
2954 pointers. */
2956 static bool
2957 could_have_pointers (tree t)
2959 return (((TREE_CODE (t) == VAR_DECL
2960 || TREE_CODE (t) == PARM_DECL
2961 || TREE_CODE (t) == RESULT_DECL)
2962 && (TREE_PUBLIC (t) || DECL_EXTERNAL (t) || TREE_ADDRESSABLE (t)))
2963 || type_could_have_pointers (TREE_TYPE (t)));
2966 /* Return the position, in bits, of FIELD_DECL from the beginning of its
2967 structure. */
2969 static HOST_WIDE_INT
2970 bitpos_of_field (const tree fdecl)
2973 if (!host_integerp (DECL_FIELD_OFFSET (fdecl), 0)
2974 || !host_integerp (DECL_FIELD_BIT_OFFSET (fdecl), 0))
2975 return -1;
2977 return (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (fdecl)) * 8
2978 + TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (fdecl)));
2982 /* Get constraint expressions for offsetting PTR by OFFSET. Stores the
2983 resulting constraint expressions in *RESULTS. */
2985 static void
2986 get_constraint_for_ptr_offset (tree ptr, tree offset,
2987 VEC (ce_s, heap) **results)
2989 struct constraint_expr c;
2990 unsigned int j, n;
2991 HOST_WIDE_INT rhsunitoffset, rhsoffset;
2993 /* If we do not do field-sensitive PTA adding offsets to pointers
2994 does not change the points-to solution. */
2995 if (!use_field_sensitive)
2997 get_constraint_for (ptr, results);
2998 return;
3001 /* If the offset is not a non-negative integer constant that fits
3002 in a HOST_WIDE_INT, we have to fall back to a conservative
3003 solution which includes all sub-fields of all pointed-to
3004 variables of ptr. */
3005 if (offset == NULL_TREE
3006 || !host_integerp (offset, 0))
3007 rhsoffset = UNKNOWN_OFFSET;
3008 else
3010 /* Make sure the bit-offset also fits. */
3011 rhsunitoffset = TREE_INT_CST_LOW (offset);
3012 rhsoffset = rhsunitoffset * BITS_PER_UNIT;
3013 if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
3014 rhsoffset = UNKNOWN_OFFSET;
3017 get_constraint_for (ptr, results);
3018 if (rhsoffset == 0)
3019 return;
3021 /* As we are eventually appending to the solution do not use
3022 VEC_iterate here. */
3023 n = VEC_length (ce_s, *results);
3024 for (j = 0; j < n; j++)
3026 varinfo_t curr;
3027 c = *VEC_index (ce_s, *results, j);
3028 curr = get_varinfo (c.var);
3030 if (c.type == ADDRESSOF
3031 /* If this varinfo represents a full variable just use it. */
3032 && curr->is_full_var)
3033 c.offset = 0;
3034 else if (c.type == ADDRESSOF
3035 /* If we do not know the offset add all subfields. */
3036 && rhsoffset == UNKNOWN_OFFSET)
3038 varinfo_t temp = lookup_vi_for_tree (curr->decl);
3041 struct constraint_expr c2;
3042 c2.var = temp->id;
3043 c2.type = ADDRESSOF;
3044 c2.offset = 0;
3045 if (c2.var != c.var)
3046 VEC_safe_push (ce_s, heap, *results, &c2);
3047 temp = temp->next;
3049 while (temp);
3051 else if (c.type == ADDRESSOF)
3053 varinfo_t temp;
3054 unsigned HOST_WIDE_INT offset = curr->offset + rhsoffset;
3056 /* Search the sub-field which overlaps with the
3057 pointed-to offset. If the result is outside of the variable
3058 we have to provide a conservative result, as the variable is
3059 still reachable from the resulting pointer (even though it
3060 technically cannot point to anything). The last and first
3061 sub-fields are such conservative results.
3062 ??? If we always had a sub-field for &object + 1 then
3063 we could represent this in a more precise way. */
3064 if (rhsoffset < 0
3065 && curr->offset < offset)
3066 offset = 0;
3067 temp = first_or_preceding_vi_for_offset (curr, offset);
3069 /* If the found variable is not exactly at the pointed to
3070 result, we have to include the next variable in the
3071 solution as well. Otherwise two increments by offset / 2
3072 do not result in the same or a conservative superset
3073 solution. */
3074 if (temp->offset != offset
3075 && temp->next != NULL)
3077 struct constraint_expr c2;
3078 c2.var = temp->next->id;
3079 c2.type = ADDRESSOF;
3080 c2.offset = 0;
3081 VEC_safe_push (ce_s, heap, *results, &c2);
3083 c.var = temp->id;
3084 c.offset = 0;
3086 else
3087 c.offset = rhsoffset;
3089 VEC_replace (ce_s, *results, j, &c);
3094 /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
3095 If address_p is true the result will be taken its address of. */
3097 static void
3098 get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results,
3099 bool address_p)
3101 tree orig_t = t;
3102 HOST_WIDE_INT bitsize = -1;
3103 HOST_WIDE_INT bitmaxsize = -1;
3104 HOST_WIDE_INT bitpos;
3105 tree forzero;
3106 struct constraint_expr *result;
3108 /* Some people like to do cute things like take the address of
3109 &0->a.b */
3110 forzero = t;
3111 while (handled_component_p (forzero)
3112 || INDIRECT_REF_P (forzero)
3113 || TREE_CODE (forzero) == MEM_REF)
3114 forzero = TREE_OPERAND (forzero, 0);
3116 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
3118 struct constraint_expr temp;
3120 temp.offset = 0;
3121 temp.var = integer_id;
3122 temp.type = SCALAR;
3123 VEC_safe_push (ce_s, heap, *results, &temp);
3124 return;
3127 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
3129 /* Pretend to take the address of the base, we'll take care of
3130 adding the required subset of sub-fields below. */
3131 get_constraint_for_1 (t, results, true);
3132 gcc_assert (VEC_length (ce_s, *results) == 1);
3133 result = VEC_last (ce_s, *results);
3135 if (result->type == SCALAR
3136 && get_varinfo (result->var)->is_full_var)
3137 /* For single-field vars do not bother about the offset. */
3138 result->offset = 0;
3139 else if (result->type == SCALAR)
3141 /* In languages like C, you can access one past the end of an
3142 array. You aren't allowed to dereference it, so we can
3143 ignore this constraint. When we handle pointer subtraction,
3144 we may have to do something cute here. */
3146 if ((unsigned HOST_WIDE_INT)bitpos < get_varinfo (result->var)->fullsize
3147 && bitmaxsize != 0)
3149 /* It's also not true that the constraint will actually start at the
3150 right offset, it may start in some padding. We only care about
3151 setting the constraint to the first actual field it touches, so
3152 walk to find it. */
3153 struct constraint_expr cexpr = *result;
3154 varinfo_t curr;
3155 VEC_pop (ce_s, *results);
3156 cexpr.offset = 0;
3157 for (curr = get_varinfo (cexpr.var); curr; curr = curr->next)
3159 if (ranges_overlap_p (curr->offset, curr->size,
3160 bitpos, bitmaxsize))
3162 cexpr.var = curr->id;
3163 VEC_safe_push (ce_s, heap, *results, &cexpr);
3164 if (address_p)
3165 break;
3168 /* If we are going to take the address of this field then
3169 to be able to compute reachability correctly add at least
3170 the last field of the variable. */
3171 if (address_p
3172 && VEC_length (ce_s, *results) == 0)
3174 curr = get_varinfo (cexpr.var);
3175 while (curr->next != NULL)
3176 curr = curr->next;
3177 cexpr.var = curr->id;
3178 VEC_safe_push (ce_s, heap, *results, &cexpr);
3180 else if (VEC_length (ce_s, *results) == 0)
3181 /* Assert that we found *some* field there. The user couldn't be
3182 accessing *only* padding. */
3183 /* Still the user could access one past the end of an array
3184 embedded in a struct resulting in accessing *only* padding. */
3185 /* Or accessing only padding via type-punning to a type
3186 that has a filed just in padding space. */
3188 cexpr.type = SCALAR;
3189 cexpr.var = anything_id;
3190 cexpr.offset = 0;
3191 VEC_safe_push (ce_s, heap, *results, &cexpr);
3194 else if (bitmaxsize == 0)
3196 if (dump_file && (dump_flags & TDF_DETAILS))
3197 fprintf (dump_file, "Access to zero-sized part of variable,"
3198 "ignoring\n");
3200 else
3201 if (dump_file && (dump_flags & TDF_DETAILS))
3202 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
3204 else if (result->type == DEREF)
3206 /* If we do not know exactly where the access goes say so. Note
3207 that only for non-structure accesses we know that we access
3208 at most one subfiled of any variable. */
3209 if (bitpos == -1
3210 || bitsize != bitmaxsize
3211 || AGGREGATE_TYPE_P (TREE_TYPE (orig_t)))
3212 result->offset = UNKNOWN_OFFSET;
3213 else
3214 result->offset = bitpos;
3216 else if (result->type == ADDRESSOF)
3218 /* We can end up here for component references on a
3219 VIEW_CONVERT_EXPR <>(&foobar). */
3220 result->type = SCALAR;
3221 result->var = anything_id;
3222 result->offset = 0;
3224 else
3225 gcc_unreachable ();
3229 /* Dereference the constraint expression CONS, and return the result.
3230 DEREF (ADDRESSOF) = SCALAR
3231 DEREF (SCALAR) = DEREF
3232 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
3233 This is needed so that we can handle dereferencing DEREF constraints. */
3235 static void
3236 do_deref (VEC (ce_s, heap) **constraints)
3238 struct constraint_expr *c;
3239 unsigned int i = 0;
3241 for (i = 0; VEC_iterate (ce_s, *constraints, i, c); i++)
3243 if (c->type == SCALAR)
3244 c->type = DEREF;
3245 else if (c->type == ADDRESSOF)
3246 c->type = SCALAR;
3247 else if (c->type == DEREF)
3249 struct constraint_expr tmplhs;
3250 tmplhs = new_scalar_tmp_constraint_exp ("dereftmp");
3251 process_constraint (new_constraint (tmplhs, *c));
3252 c->var = tmplhs.var;
3254 else
3255 gcc_unreachable ();
3259 static void get_constraint_for_1 (tree, VEC (ce_s, heap) **, bool);
3261 /* Given a tree T, return the constraint expression for taking the
3262 address of it. */
3264 static void
3265 get_constraint_for_address_of (tree t, VEC (ce_s, heap) **results)
3267 struct constraint_expr *c;
3268 unsigned int i;
3270 get_constraint_for_1 (t, results, true);
3272 for (i = 0; VEC_iterate (ce_s, *results, i, c); i++)
3274 if (c->type == DEREF)
3275 c->type = SCALAR;
3276 else
3277 c->type = ADDRESSOF;
3281 /* Given a tree T, return the constraint expression for it. */
3283 static void
3284 get_constraint_for_1 (tree t, VEC (ce_s, heap) **results, bool address_p)
3286 struct constraint_expr temp;
3288 /* x = integer is all glommed to a single variable, which doesn't
3289 point to anything by itself. That is, of course, unless it is an
3290 integer constant being treated as a pointer, in which case, we
3291 will return that this is really the addressof anything. This
3292 happens below, since it will fall into the default case. The only
3293 case we know something about an integer treated like a pointer is
3294 when it is the NULL pointer, and then we just say it points to
3295 NULL.
3297 Do not do that if -fno-delete-null-pointer-checks though, because
3298 in that case *NULL does not fail, so it _should_ alias *anything.
3299 It is not worth adding a new option or renaming the existing one,
3300 since this case is relatively obscure. */
3301 if ((TREE_CODE (t) == INTEGER_CST
3302 && integer_zerop (t))
3303 /* The only valid CONSTRUCTORs in gimple with pointer typed
3304 elements are zero-initializer. But in IPA mode we also
3305 process global initializers, so verify at least. */
3306 || (TREE_CODE (t) == CONSTRUCTOR
3307 && CONSTRUCTOR_NELTS (t) == 0))
3309 if (flag_delete_null_pointer_checks)
3310 temp.var = nothing_id;
3311 else
3312 temp.var = anything_id;
3313 temp.type = ADDRESSOF;
3314 temp.offset = 0;
3315 VEC_safe_push (ce_s, heap, *results, &temp);
3316 return;
3319 /* String constants are read-only. */
3320 if (TREE_CODE (t) == STRING_CST)
3322 temp.var = readonly_id;
3323 temp.type = SCALAR;
3324 temp.offset = 0;
3325 VEC_safe_push (ce_s, heap, *results, &temp);
3326 return;
3329 switch (TREE_CODE_CLASS (TREE_CODE (t)))
3331 case tcc_expression:
3333 switch (TREE_CODE (t))
3335 case ADDR_EXPR:
3336 get_constraint_for_address_of (TREE_OPERAND (t, 0), results);
3337 return;
3338 default:;
3340 break;
3342 case tcc_reference:
3344 switch (TREE_CODE (t))
3346 case MEM_REF:
3348 get_constraint_for_ptr_offset (TREE_OPERAND (t, 0),
3349 TREE_OPERAND (t, 1), results);
3350 do_deref (results);
3351 return;
3353 case ARRAY_REF:
3354 case ARRAY_RANGE_REF:
3355 case COMPONENT_REF:
3356 get_constraint_for_component_ref (t, results, address_p);
3357 return;
3358 case VIEW_CONVERT_EXPR:
3359 get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p);
3360 return;
3361 /* We are missing handling for TARGET_MEM_REF here. */
3362 default:;
3364 break;
3366 case tcc_exceptional:
3368 switch (TREE_CODE (t))
3370 case SSA_NAME:
3372 get_constraint_for_ssa_var (t, results, address_p);
3373 return;
3375 case CONSTRUCTOR:
3377 unsigned int i;
3378 tree val;
3379 VEC (ce_s, heap) *tmp = NULL;
3380 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
3382 struct constraint_expr *rhsp;
3383 unsigned j;
3384 get_constraint_for_1 (val, &tmp, address_p);
3385 for (j = 0; VEC_iterate (ce_s, tmp, j, rhsp); ++j)
3386 VEC_safe_push (ce_s, heap, *results, rhsp);
3387 VEC_truncate (ce_s, tmp, 0);
3389 VEC_free (ce_s, heap, tmp);
3390 /* We do not know whether the constructor was complete,
3391 so technically we have to add &NOTHING or &ANYTHING
3392 like we do for an empty constructor as well. */
3393 return;
3395 default:;
3397 break;
3399 case tcc_declaration:
3401 get_constraint_for_ssa_var (t, results, address_p);
3402 return;
3404 default:;
3407 /* The default fallback is a constraint from anything. */
3408 temp.type = ADDRESSOF;
3409 temp.var = anything_id;
3410 temp.offset = 0;
3411 VEC_safe_push (ce_s, heap, *results, &temp);
3414 /* Given a gimple tree T, return the constraint expression vector for it. */
3416 static void
3417 get_constraint_for (tree t, VEC (ce_s, heap) **results)
3419 gcc_assert (VEC_length (ce_s, *results) == 0);
3421 get_constraint_for_1 (t, results, false);
3425 /* Efficiently generates constraints from all entries in *RHSC to all
3426 entries in *LHSC. */
3428 static void
3429 process_all_all_constraints (VEC (ce_s, heap) *lhsc, VEC (ce_s, heap) *rhsc)
3431 struct constraint_expr *lhsp, *rhsp;
3432 unsigned i, j;
3434 if (VEC_length (ce_s, lhsc) <= 1
3435 || VEC_length (ce_s, rhsc) <= 1)
3437 for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); ++i)
3438 for (j = 0; VEC_iterate (ce_s, rhsc, j, rhsp); ++j)
3439 process_constraint (new_constraint (*lhsp, *rhsp));
3441 else
3443 struct constraint_expr tmp;
3444 tmp = new_scalar_tmp_constraint_exp ("allalltmp");
3445 for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); ++i)
3446 process_constraint (new_constraint (tmp, *rhsp));
3447 for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); ++i)
3448 process_constraint (new_constraint (*lhsp, tmp));
3452 /* Handle aggregate copies by expanding into copies of the respective
3453 fields of the structures. */
3455 static void
3456 do_structure_copy (tree lhsop, tree rhsop)
3458 struct constraint_expr *lhsp, *rhsp;
3459 VEC (ce_s, heap) *lhsc = NULL, *rhsc = NULL;
3460 unsigned j;
3462 get_constraint_for (lhsop, &lhsc);
3463 get_constraint_for (rhsop, &rhsc);
3464 lhsp = VEC_index (ce_s, lhsc, 0);
3465 rhsp = VEC_index (ce_s, rhsc, 0);
3466 if (lhsp->type == DEREF
3467 || (lhsp->type == ADDRESSOF && lhsp->var == anything_id)
3468 || rhsp->type == DEREF)
3470 if (lhsp->type == DEREF)
3472 gcc_assert (VEC_length (ce_s, lhsc) == 1);
3473 lhsp->offset = UNKNOWN_OFFSET;
3475 if (rhsp->type == DEREF)
3477 gcc_assert (VEC_length (ce_s, rhsc) == 1);
3478 rhsp->offset = UNKNOWN_OFFSET;
3480 process_all_all_constraints (lhsc, rhsc);
3482 else if (lhsp->type == SCALAR
3483 && (rhsp->type == SCALAR
3484 || rhsp->type == ADDRESSOF))
3486 HOST_WIDE_INT lhssize, lhsmaxsize, lhsoffset;
3487 HOST_WIDE_INT rhssize, rhsmaxsize, rhsoffset;
3488 unsigned k = 0;
3489 get_ref_base_and_extent (lhsop, &lhsoffset, &lhssize, &lhsmaxsize);
3490 get_ref_base_and_extent (rhsop, &rhsoffset, &rhssize, &rhsmaxsize);
3491 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp);)
3493 varinfo_t lhsv, rhsv;
3494 rhsp = VEC_index (ce_s, rhsc, k);
3495 lhsv = get_varinfo (lhsp->var);
3496 rhsv = get_varinfo (rhsp->var);
3497 if (lhsv->may_have_pointers
3498 && ranges_overlap_p (lhsv->offset + rhsoffset, lhsv->size,
3499 rhsv->offset + lhsoffset, rhsv->size))
3500 process_constraint (new_constraint (*lhsp, *rhsp));
3501 if (lhsv->offset + rhsoffset + lhsv->size
3502 > rhsv->offset + lhsoffset + rhsv->size)
3504 ++k;
3505 if (k >= VEC_length (ce_s, rhsc))
3506 break;
3508 else
3509 ++j;
3512 else
3513 gcc_unreachable ();
3515 VEC_free (ce_s, heap, lhsc);
3516 VEC_free (ce_s, heap, rhsc);
3519 /* Create a constraint ID = OP. */
3521 static void
3522 make_constraint_to (unsigned id, tree op)
3524 VEC(ce_s, heap) *rhsc = NULL;
3525 struct constraint_expr *c;
3526 struct constraint_expr includes;
3527 unsigned int j;
3529 includes.var = id;
3530 includes.offset = 0;
3531 includes.type = SCALAR;
3533 get_constraint_for (op, &rhsc);
3534 for (j = 0; VEC_iterate (ce_s, rhsc, j, c); j++)
3535 process_constraint (new_constraint (includes, *c));
3536 VEC_free (ce_s, heap, rhsc);
3539 /* Create a constraint ID = &FROM. */
3541 static void
3542 make_constraint_from (varinfo_t vi, int from)
3544 struct constraint_expr lhs, rhs;
3546 lhs.var = vi->id;
3547 lhs.offset = 0;
3548 lhs.type = SCALAR;
3550 rhs.var = from;
3551 rhs.offset = 0;
3552 rhs.type = ADDRESSOF;
3553 process_constraint (new_constraint (lhs, rhs));
3556 /* Create a constraint ID = FROM. */
3558 static void
3559 make_copy_constraint (varinfo_t vi, int from)
3561 struct constraint_expr lhs, rhs;
3563 lhs.var = vi->id;
3564 lhs.offset = 0;
3565 lhs.type = SCALAR;
3567 rhs.var = from;
3568 rhs.offset = 0;
3569 rhs.type = SCALAR;
3570 process_constraint (new_constraint (lhs, rhs));
3573 /* Make constraints necessary to make OP escape. */
3575 static void
3576 make_escape_constraint (tree op)
3578 make_constraint_to (escaped_id, op);
3581 /* Add constraints to that the solution of VI is transitively closed. */
3583 static void
3584 make_transitive_closure_constraints (varinfo_t vi)
3586 struct constraint_expr lhs, rhs;
3588 /* VAR = *VAR; */
3589 lhs.type = SCALAR;
3590 lhs.var = vi->id;
3591 lhs.offset = 0;
3592 rhs.type = DEREF;
3593 rhs.var = vi->id;
3594 rhs.offset = 0;
3595 process_constraint (new_constraint (lhs, rhs));
3597 /* VAR = VAR + UNKNOWN; */
3598 lhs.type = SCALAR;
3599 lhs.var = vi->id;
3600 lhs.offset = 0;
3601 rhs.type = SCALAR;
3602 rhs.var = vi->id;
3603 rhs.offset = UNKNOWN_OFFSET;
3604 process_constraint (new_constraint (lhs, rhs));
3607 /* Create a new artificial heap variable with NAME.
3608 Return the created variable. */
3610 static varinfo_t
3611 make_heapvar_for (varinfo_t lhs, const char *name)
3613 varinfo_t vi;
3614 tree heapvar = heapvar_lookup (lhs->decl, lhs->offset);
3616 if (heapvar == NULL_TREE)
3618 var_ann_t ann;
3619 heapvar = create_tmp_var_raw (ptr_type_node, name);
3620 DECL_EXTERNAL (heapvar) = 1;
3622 heapvar_insert (lhs->decl, lhs->offset, heapvar);
3624 ann = get_var_ann (heapvar);
3625 ann->is_heapvar = 1;
3628 /* For global vars we need to add a heapvar to the list of referenced
3629 vars of a different function than it was created for originally. */
3630 if (cfun && gimple_referenced_vars (cfun))
3631 add_referenced_var (heapvar);
3633 vi = new_var_info (heapvar, name);
3634 vi->is_artificial_var = true;
3635 vi->is_heap_var = true;
3636 vi->is_unknown_size_var = true;
3637 vi->offset = 0;
3638 vi->fullsize = ~0;
3639 vi->size = ~0;
3640 vi->is_full_var = true;
3641 insert_vi_for_tree (heapvar, vi);
3643 return vi;
3646 /* Create a new artificial heap variable with NAME and make a
3647 constraint from it to LHS. Return the created variable. */
3649 static varinfo_t
3650 make_constraint_from_heapvar (varinfo_t lhs, const char *name)
3652 varinfo_t vi = make_heapvar_for (lhs, name);
3653 make_constraint_from (lhs, vi->id);
3655 return vi;
3658 /* Create a new artificial heap variable with NAME and make a
3659 constraint from it to LHS. Set flags according to a tag used
3660 for tracking restrict pointers. */
3662 static void
3663 make_constraint_from_restrict (varinfo_t lhs, const char *name)
3665 varinfo_t vi;
3666 vi = make_constraint_from_heapvar (lhs, name);
3667 vi->is_restrict_var = 1;
3668 vi->is_global_var = 0;
3669 vi->is_special_var = 1;
3670 vi->may_have_pointers = 0;
3673 /* In IPA mode there are varinfos for different aspects of reach
3674 function designator. One for the points-to set of the return
3675 value, one for the variables that are clobbered by the function,
3676 one for its uses and one for each parameter (including a single
3677 glob for remaining variadic arguments). */
3679 enum { fi_clobbers = 1, fi_uses = 2,
3680 fi_static_chain = 3, fi_result = 4, fi_parm_base = 5 };
3682 /* Get a constraint for the requested part of a function designator FI
3683 when operating in IPA mode. */
3685 static struct constraint_expr
3686 get_function_part_constraint (varinfo_t fi, unsigned part)
3688 struct constraint_expr c;
3690 gcc_assert (in_ipa_mode);
3692 if (fi->id == anything_id)
3694 /* ??? We probably should have a ANYFN special variable. */
3695 c.var = anything_id;
3696 c.offset = 0;
3697 c.type = SCALAR;
3699 else if (TREE_CODE (fi->decl) == FUNCTION_DECL)
3701 varinfo_t ai = first_vi_for_offset (fi, part);
3702 if (ai)
3703 c.var = ai->id;
3704 else
3705 c.var = anything_id;
3706 c.offset = 0;
3707 c.type = SCALAR;
3709 else
3711 c.var = fi->id;
3712 c.offset = part;
3713 c.type = DEREF;
3716 return c;
3719 /* For non-IPA mode, generate constraints necessary for a call on the
3720 RHS. */
3722 static void
3723 handle_rhs_call (gimple stmt, VEC(ce_s, heap) **results)
3725 struct constraint_expr rhsc;
3726 unsigned i;
3727 bool returns_uses = false;
3729 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3731 tree arg = gimple_call_arg (stmt, i);
3732 int flags = gimple_call_arg_flags (stmt, i);
3734 /* If the argument is not used or it does not contain pointers
3735 we can ignore it. */
3736 if ((flags & EAF_UNUSED)
3737 || !could_have_pointers (arg))
3738 continue;
3740 /* As we compute ESCAPED context-insensitive we do not gain
3741 any precision with just EAF_NOCLOBBER but not EAF_NOESCAPE
3742 set. The argument would still get clobbered through the
3743 escape solution.
3744 ??? We might get away with less (and more precise) constraints
3745 if using a temporary for transitively closing things. */
3746 if ((flags & EAF_NOCLOBBER)
3747 && (flags & EAF_NOESCAPE))
3749 varinfo_t uses = get_call_use_vi (stmt);
3750 if (!(flags & EAF_DIRECT))
3751 make_transitive_closure_constraints (uses);
3752 make_constraint_to (uses->id, arg);
3753 returns_uses = true;
3755 else if (flags & EAF_NOESCAPE)
3757 varinfo_t uses = get_call_use_vi (stmt);
3758 varinfo_t clobbers = get_call_clobber_vi (stmt);
3759 if (!(flags & EAF_DIRECT))
3761 make_transitive_closure_constraints (uses);
3762 make_transitive_closure_constraints (clobbers);
3764 make_constraint_to (uses->id, arg);
3765 make_constraint_to (clobbers->id, arg);
3766 returns_uses = true;
3768 else
3769 make_escape_constraint (arg);
3772 /* If we added to the calls uses solution make sure we account for
3773 pointers to it to be returned. */
3774 if (returns_uses)
3776 rhsc.var = get_call_use_vi (stmt)->id;
3777 rhsc.offset = 0;
3778 rhsc.type = SCALAR;
3779 VEC_safe_push (ce_s, heap, *results, &rhsc);
3782 /* The static chain escapes as well. */
3783 if (gimple_call_chain (stmt))
3784 make_escape_constraint (gimple_call_chain (stmt));
3786 /* And if we applied NRV the address of the return slot escapes as well. */
3787 if (gimple_call_return_slot_opt_p (stmt)
3788 && gimple_call_lhs (stmt) != NULL_TREE
3789 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
3791 VEC(ce_s, heap) *tmpc = NULL;
3792 struct constraint_expr lhsc, *c;
3793 get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
3794 lhsc.var = escaped_id;
3795 lhsc.offset = 0;
3796 lhsc.type = SCALAR;
3797 for (i = 0; VEC_iterate (ce_s, tmpc, i, c); ++i)
3798 process_constraint (new_constraint (lhsc, *c));
3799 VEC_free(ce_s, heap, tmpc);
3802 /* Regular functions return nonlocal memory. */
3803 rhsc.var = nonlocal_id;
3804 rhsc.offset = 0;
3805 rhsc.type = SCALAR;
3806 VEC_safe_push (ce_s, heap, *results, &rhsc);
3809 /* For non-IPA mode, generate constraints necessary for a call
3810 that returns a pointer and assigns it to LHS. This simply makes
3811 the LHS point to global and escaped variables. */
3813 static void
3814 handle_lhs_call (gimple stmt, tree lhs, int flags, VEC(ce_s, heap) *rhsc,
3815 tree fndecl)
3817 VEC(ce_s, heap) *lhsc = NULL;
3819 get_constraint_for (lhs, &lhsc);
3820 /* If the store is to a global decl make sure to
3821 add proper escape constraints. */
3822 lhs = get_base_address (lhs);
3823 if (lhs
3824 && DECL_P (lhs)
3825 && is_global_var (lhs))
3827 struct constraint_expr tmpc;
3828 tmpc.var = escaped_id;
3829 tmpc.offset = 0;
3830 tmpc.type = SCALAR;
3831 VEC_safe_push (ce_s, heap, lhsc, &tmpc);
3834 /* If the call returns an argument unmodified override the rhs
3835 constraints. */
3836 flags = gimple_call_return_flags (stmt);
3837 if (flags & ERF_RETURNS_ARG
3838 && (flags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (stmt))
3840 tree arg;
3841 rhsc = NULL;
3842 arg = gimple_call_arg (stmt, flags & ERF_RETURN_ARG_MASK);
3843 get_constraint_for (arg, &rhsc);
3844 process_all_all_constraints (lhsc, rhsc);
3845 VEC_free (ce_s, heap, rhsc);
3847 else if (flags & ERF_NOALIAS)
3849 varinfo_t vi;
3850 struct constraint_expr tmpc;
3851 rhsc = NULL;
3852 vi = make_heapvar_for (get_vi_for_tree (lhs), "HEAP");
3853 /* We delay marking allocated storage global until we know if
3854 it escapes. */
3855 DECL_EXTERNAL (vi->decl) = 0;
3856 vi->is_global_var = 0;
3857 /* If this is not a real malloc call assume the memory was
3858 initialized and thus may point to global memory. All
3859 builtin functions with the malloc attribute behave in a sane way. */
3860 if (!fndecl
3861 || DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL)
3862 make_constraint_from (vi, nonlocal_id);
3863 tmpc.var = vi->id;
3864 tmpc.offset = 0;
3865 tmpc.type = ADDRESSOF;
3866 VEC_safe_push (ce_s, heap, rhsc, &tmpc);
3869 process_all_all_constraints (lhsc, rhsc);
3871 VEC_free (ce_s, heap, lhsc);
3874 /* For non-IPA mode, generate constraints necessary for a call of a
3875 const function that returns a pointer in the statement STMT. */
3877 static void
3878 handle_const_call (gimple stmt, VEC(ce_s, heap) **results)
3880 struct constraint_expr rhsc;
3881 unsigned int k;
3883 /* Treat nested const functions the same as pure functions as far
3884 as the static chain is concerned. */
3885 if (gimple_call_chain (stmt))
3887 varinfo_t uses = get_call_use_vi (stmt);
3888 make_transitive_closure_constraints (uses);
3889 make_constraint_to (uses->id, gimple_call_chain (stmt));
3890 rhsc.var = uses->id;
3891 rhsc.offset = 0;
3892 rhsc.type = SCALAR;
3893 VEC_safe_push (ce_s, heap, *results, &rhsc);
3896 /* May return arguments. */
3897 for (k = 0; k < gimple_call_num_args (stmt); ++k)
3899 tree arg = gimple_call_arg (stmt, k);
3901 if (could_have_pointers (arg))
3903 VEC(ce_s, heap) *argc = NULL;
3904 unsigned i;
3905 struct constraint_expr *argp;
3906 get_constraint_for (arg, &argc);
3907 for (i = 0; VEC_iterate (ce_s, argc, i, argp); ++i)
3908 VEC_safe_push (ce_s, heap, *results, argp);
3909 VEC_free(ce_s, heap, argc);
3913 /* May return addresses of globals. */
3914 rhsc.var = nonlocal_id;
3915 rhsc.offset = 0;
3916 rhsc.type = ADDRESSOF;
3917 VEC_safe_push (ce_s, heap, *results, &rhsc);
3920 /* For non-IPA mode, generate constraints necessary for a call to a
3921 pure function in statement STMT. */
3923 static void
3924 handle_pure_call (gimple stmt, VEC(ce_s, heap) **results)
3926 struct constraint_expr rhsc;
3927 unsigned i;
3928 varinfo_t uses = NULL;
3930 /* Memory reached from pointer arguments is call-used. */
3931 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3933 tree arg = gimple_call_arg (stmt, i);
3935 if (could_have_pointers (arg))
3937 if (!uses)
3939 uses = get_call_use_vi (stmt);
3940 make_transitive_closure_constraints (uses);
3942 make_constraint_to (uses->id, arg);
3946 /* The static chain is used as well. */
3947 if (gimple_call_chain (stmt))
3949 if (!uses)
3951 uses = get_call_use_vi (stmt);
3952 make_transitive_closure_constraints (uses);
3954 make_constraint_to (uses->id, gimple_call_chain (stmt));
3957 /* Pure functions may return call-used and nonlocal memory. */
3958 if (uses)
3960 rhsc.var = uses->id;
3961 rhsc.offset = 0;
3962 rhsc.type = SCALAR;
3963 VEC_safe_push (ce_s, heap, *results, &rhsc);
3965 rhsc.var = nonlocal_id;
3966 rhsc.offset = 0;
3967 rhsc.type = SCALAR;
3968 VEC_safe_push (ce_s, heap, *results, &rhsc);
3972 /* Return the varinfo for the callee of CALL. */
3974 static varinfo_t
3975 get_fi_for_callee (gimple call)
3977 tree decl;
3979 /* If we can directly resolve the function being called, do so.
3980 Otherwise, it must be some sort of indirect expression that
3981 we should still be able to handle. */
3982 decl = gimple_call_fndecl (call);
3983 if (decl)
3984 return get_vi_for_tree (decl);
3986 decl = gimple_call_fn (call);
3987 /* The function can be either an SSA name pointer or,
3988 worse, an OBJ_TYPE_REF. In this case we have no
3989 clue and should be getting ANYFN (well, ANYTHING for now). */
3990 if (TREE_CODE (decl) == SSA_NAME)
3992 if (TREE_CODE (decl) == SSA_NAME
3993 && (TREE_CODE (SSA_NAME_VAR (decl)) == PARM_DECL
3994 || TREE_CODE (SSA_NAME_VAR (decl)) == RESULT_DECL)
3995 && SSA_NAME_IS_DEFAULT_DEF (decl))
3996 decl = SSA_NAME_VAR (decl);
3997 return get_vi_for_tree (decl);
3999 else if (TREE_CODE (decl) == INTEGER_CST
4000 || TREE_CODE (decl) == OBJ_TYPE_REF)
4001 return get_varinfo (anything_id);
4002 else
4003 gcc_unreachable ();
4006 /* Walk statement T setting up aliasing constraints according to the
4007 references found in T. This function is the main part of the
4008 constraint builder. AI points to auxiliary alias information used
4009 when building alias sets and computing alias grouping heuristics. */
4011 static void
4012 find_func_aliases (gimple origt)
4014 gimple t = origt;
4015 VEC(ce_s, heap) *lhsc = NULL;
4016 VEC(ce_s, heap) *rhsc = NULL;
4017 struct constraint_expr *c;
4018 varinfo_t fi;
4020 /* Now build constraints expressions. */
4021 if (gimple_code (t) == GIMPLE_PHI)
4023 gcc_assert (!AGGREGATE_TYPE_P (TREE_TYPE (gimple_phi_result (t))));
4025 /* Only care about pointers and structures containing
4026 pointers. */
4027 if (could_have_pointers (gimple_phi_result (t)))
4029 size_t i;
4030 unsigned int j;
4032 /* For a phi node, assign all the arguments to
4033 the result. */
4034 get_constraint_for (gimple_phi_result (t), &lhsc);
4035 for (i = 0; i < gimple_phi_num_args (t); i++)
4037 tree strippedrhs = PHI_ARG_DEF (t, i);
4039 STRIP_NOPS (strippedrhs);
4040 get_constraint_for (gimple_phi_arg_def (t, i), &rhsc);
4042 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
4044 struct constraint_expr *c2;
4045 while (VEC_length (ce_s, rhsc) > 0)
4047 c2 = VEC_last (ce_s, rhsc);
4048 process_constraint (new_constraint (*c, *c2));
4049 VEC_pop (ce_s, rhsc);
4055 /* In IPA mode, we need to generate constraints to pass call
4056 arguments through their calls. There are two cases,
4057 either a GIMPLE_CALL returning a value, or just a plain
4058 GIMPLE_CALL when we are not.
4060 In non-ipa mode, we need to generate constraints for each
4061 pointer passed by address. */
4062 else if (is_gimple_call (t))
4064 tree fndecl = gimple_call_fndecl (t);
4065 if (fndecl != NULL_TREE
4066 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
4067 /* ??? All builtins that are handled here need to be handled
4068 in the alias-oracle query functions explicitly! */
4069 switch (DECL_FUNCTION_CODE (fndecl))
4071 /* All the following functions return a pointer to the same object
4072 as their first argument points to. The functions do not add
4073 to the ESCAPED solution. The functions make the first argument
4074 pointed to memory point to what the second argument pointed to
4075 memory points to. */
4076 case BUILT_IN_STRCPY:
4077 case BUILT_IN_STRNCPY:
4078 case BUILT_IN_BCOPY:
4079 case BUILT_IN_MEMCPY:
4080 case BUILT_IN_MEMMOVE:
4081 case BUILT_IN_MEMPCPY:
4082 case BUILT_IN_STPCPY:
4083 case BUILT_IN_STPNCPY:
4084 case BUILT_IN_STRCAT:
4085 case BUILT_IN_STRNCAT:
4087 tree res = gimple_call_lhs (t);
4088 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4089 == BUILT_IN_BCOPY ? 1 : 0));
4090 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4091 == BUILT_IN_BCOPY ? 0 : 1));
4092 if (res != NULL_TREE)
4094 get_constraint_for (res, &lhsc);
4095 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY
4096 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY
4097 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY)
4098 get_constraint_for_ptr_offset (dest, NULL_TREE, &rhsc);
4099 else
4100 get_constraint_for (dest, &rhsc);
4101 process_all_all_constraints (lhsc, rhsc);
4102 VEC_free (ce_s, heap, lhsc);
4103 VEC_free (ce_s, heap, rhsc);
4105 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4106 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4107 do_deref (&lhsc);
4108 do_deref (&rhsc);
4109 process_all_all_constraints (lhsc, rhsc);
4110 VEC_free (ce_s, heap, lhsc);
4111 VEC_free (ce_s, heap, rhsc);
4112 return;
4114 case BUILT_IN_MEMSET:
4116 tree res = gimple_call_lhs (t);
4117 tree dest = gimple_call_arg (t, 0);
4118 unsigned i;
4119 ce_s *lhsp;
4120 struct constraint_expr ac;
4121 if (res != NULL_TREE)
4123 get_constraint_for (res, &lhsc);
4124 get_constraint_for (dest, &rhsc);
4125 process_all_all_constraints (lhsc, rhsc);
4126 VEC_free (ce_s, heap, lhsc);
4127 VEC_free (ce_s, heap, rhsc);
4129 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4130 do_deref (&lhsc);
4131 if (flag_delete_null_pointer_checks
4132 && integer_zerop (gimple_call_arg (t, 1)))
4134 ac.type = ADDRESSOF;
4135 ac.var = nothing_id;
4137 else
4139 ac.type = SCALAR;
4140 ac.var = integer_id;
4142 ac.offset = 0;
4143 for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); ++i)
4144 process_constraint (new_constraint (*lhsp, ac));
4145 VEC_free (ce_s, heap, lhsc);
4146 return;
4148 /* All the following functions do not return pointers, do not
4149 modify the points-to sets of memory reachable from their
4150 arguments and do not add to the ESCAPED solution. */
4151 case BUILT_IN_SINCOS:
4152 case BUILT_IN_SINCOSF:
4153 case BUILT_IN_SINCOSL:
4154 case BUILT_IN_FREXP:
4155 case BUILT_IN_FREXPF:
4156 case BUILT_IN_FREXPL:
4157 case BUILT_IN_GAMMA_R:
4158 case BUILT_IN_GAMMAF_R:
4159 case BUILT_IN_GAMMAL_R:
4160 case BUILT_IN_LGAMMA_R:
4161 case BUILT_IN_LGAMMAF_R:
4162 case BUILT_IN_LGAMMAL_R:
4163 case BUILT_IN_MODF:
4164 case BUILT_IN_MODFF:
4165 case BUILT_IN_MODFL:
4166 case BUILT_IN_REMQUO:
4167 case BUILT_IN_REMQUOF:
4168 case BUILT_IN_REMQUOL:
4169 case BUILT_IN_FREE:
4170 return;
4171 /* Trampolines are special - they set up passing the static
4172 frame. */
4173 case BUILT_IN_INIT_TRAMPOLINE:
4175 tree tramp = gimple_call_arg (t, 0);
4176 tree nfunc = gimple_call_arg (t, 1);
4177 tree frame = gimple_call_arg (t, 2);
4178 unsigned i;
4179 struct constraint_expr lhs, *rhsp;
4180 if (in_ipa_mode)
4182 varinfo_t nfi = NULL;
4183 gcc_assert (TREE_CODE (nfunc) == ADDR_EXPR);
4184 nfi = lookup_vi_for_tree (TREE_OPERAND (nfunc, 0));
4185 if (nfi)
4187 lhs = get_function_part_constraint (nfi, fi_static_chain);
4188 get_constraint_for (frame, &rhsc);
4189 for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); ++i)
4190 process_constraint (new_constraint (lhs, *rhsp));
4191 VEC_free (ce_s, heap, rhsc);
4193 /* Make the frame point to the function for
4194 the trampoline adjustment call. */
4195 get_constraint_for (tramp, &lhsc);
4196 do_deref (&lhsc);
4197 get_constraint_for (nfunc, &rhsc);
4198 process_all_all_constraints (lhsc, rhsc);
4199 VEC_free (ce_s, heap, rhsc);
4200 VEC_free (ce_s, heap, lhsc);
4202 return;
4205 /* Else fallthru to generic handling which will let
4206 the frame escape. */
4207 break;
4209 case BUILT_IN_ADJUST_TRAMPOLINE:
4211 tree tramp = gimple_call_arg (t, 0);
4212 tree res = gimple_call_lhs (t);
4213 if (in_ipa_mode && res)
4215 get_constraint_for (res, &lhsc);
4216 get_constraint_for (tramp, &rhsc);
4217 do_deref (&rhsc);
4218 process_all_all_constraints (lhsc, rhsc);
4219 VEC_free (ce_s, heap, rhsc);
4220 VEC_free (ce_s, heap, lhsc);
4222 return;
4224 /* Variadic argument handling needs to be handled in IPA
4225 mode as well. */
4226 case BUILT_IN_VA_START:
4228 if (in_ipa_mode)
4230 tree valist = gimple_call_arg (t, 0);
4231 struct constraint_expr rhs, *lhsp;
4232 unsigned i;
4233 /* The va_list gets access to pointers in variadic
4234 arguments. */
4235 fi = lookup_vi_for_tree (cfun->decl);
4236 gcc_assert (fi != NULL);
4237 get_constraint_for (valist, &lhsc);
4238 do_deref (&lhsc);
4239 rhs = get_function_part_constraint (fi, ~0);
4240 rhs.type = ADDRESSOF;
4241 for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); ++i)
4242 process_constraint (new_constraint (*lhsp, rhs));
4243 VEC_free (ce_s, heap, lhsc);
4244 /* va_list is clobbered. */
4245 make_constraint_to (get_call_clobber_vi (t)->id, valist);
4246 return;
4248 break;
4250 /* va_end doesn't have any effect that matters. */
4251 case BUILT_IN_VA_END:
4252 return;
4253 /* Alternate return. Simply give up for now. */
4254 case BUILT_IN_RETURN:
4256 fi = NULL;
4257 if (!in_ipa_mode
4258 || !(fi = get_vi_for_tree (cfun->decl)))
4259 make_constraint_from (get_varinfo (escaped_id), anything_id);
4260 else if (in_ipa_mode
4261 && fi != NULL)
4263 struct constraint_expr lhs, rhs;
4264 lhs = get_function_part_constraint (fi, fi_result);
4265 rhs.var = anything_id;
4266 rhs.offset = 0;
4267 rhs.type = SCALAR;
4268 process_constraint (new_constraint (lhs, rhs));
4270 return;
4272 /* printf-style functions may have hooks to set pointers to
4273 point to somewhere into the generated string. Leave them
4274 for a later excercise... */
4275 default:
4276 /* Fallthru to general call handling. */;
4278 if (!in_ipa_mode
4279 || (fndecl
4280 && (!(fi = lookup_vi_for_tree (fndecl))
4281 || !fi->is_fn_info)))
4283 VEC(ce_s, heap) *rhsc = NULL;
4284 int flags = gimple_call_flags (t);
4286 /* Const functions can return their arguments and addresses
4287 of global memory but not of escaped memory. */
4288 if (flags & (ECF_CONST|ECF_NOVOPS))
4290 if (gimple_call_lhs (t)
4291 && could_have_pointers (gimple_call_lhs (t)))
4292 handle_const_call (t, &rhsc);
4294 /* Pure functions can return addresses in and of memory
4295 reachable from their arguments, but they are not an escape
4296 point for reachable memory of their arguments. */
4297 else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE))
4298 handle_pure_call (t, &rhsc);
4299 else
4300 handle_rhs_call (t, &rhsc);
4301 if (gimple_call_lhs (t)
4302 && could_have_pointers (gimple_call_lhs (t)))
4303 handle_lhs_call (t, gimple_call_lhs (t), flags, rhsc, fndecl);
4304 VEC_free (ce_s, heap, rhsc);
4306 else
4308 tree lhsop;
4309 unsigned j;
4311 fi = get_fi_for_callee (t);
4313 /* Assign all the passed arguments to the appropriate incoming
4314 parameters of the function. */
4315 for (j = 0; j < gimple_call_num_args (t); j++)
4317 struct constraint_expr lhs ;
4318 struct constraint_expr *rhsp;
4319 tree arg = gimple_call_arg (t, j);
4321 if (!could_have_pointers (arg))
4322 continue;
4324 get_constraint_for (arg, &rhsc);
4325 lhs = get_function_part_constraint (fi, fi_parm_base + j);
4326 while (VEC_length (ce_s, rhsc) != 0)
4328 rhsp = VEC_last (ce_s, rhsc);
4329 process_constraint (new_constraint (lhs, *rhsp));
4330 VEC_pop (ce_s, rhsc);
4334 /* If we are returning a value, assign it to the result. */
4335 lhsop = gimple_call_lhs (t);
4336 if (lhsop
4337 && type_could_have_pointers (TREE_TYPE (lhsop)))
4339 struct constraint_expr rhs;
4340 struct constraint_expr *lhsp;
4342 get_constraint_for (lhsop, &lhsc);
4343 rhs = get_function_part_constraint (fi, fi_result);
4344 if (fndecl
4345 && DECL_RESULT (fndecl)
4346 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
4348 VEC(ce_s, heap) *tem = NULL;
4349 VEC_safe_push (ce_s, heap, tem, &rhs);
4350 do_deref (&tem);
4351 rhs = *VEC_index (ce_s, tem, 0);
4352 VEC_free(ce_s, heap, tem);
4354 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
4355 process_constraint (new_constraint (*lhsp, rhs));
4358 /* If we pass the result decl by reference, honor that. */
4359 if (lhsop
4360 && fndecl
4361 && DECL_RESULT (fndecl)
4362 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
4364 struct constraint_expr lhs;
4365 struct constraint_expr *rhsp;
4367 get_constraint_for_address_of (lhsop, &rhsc);
4368 lhs = get_function_part_constraint (fi, fi_result);
4369 for (j = 0; VEC_iterate (ce_s, rhsc, j, rhsp); j++)
4370 process_constraint (new_constraint (lhs, *rhsp));
4371 VEC_free (ce_s, heap, rhsc);
4374 /* If we use a static chain, pass it along. */
4375 if (gimple_call_chain (t))
4377 struct constraint_expr lhs;
4378 struct constraint_expr *rhsp;
4380 get_constraint_for (gimple_call_chain (t), &rhsc);
4381 lhs = get_function_part_constraint (fi, fi_static_chain);
4382 for (j = 0; VEC_iterate (ce_s, rhsc, j, rhsp); j++)
4383 process_constraint (new_constraint (lhs, *rhsp));
4387 /* Otherwise, just a regular assignment statement. Only care about
4388 operations with pointer result, others are dealt with as escape
4389 points if they have pointer operands. */
4390 else if (is_gimple_assign (t)
4391 && type_could_have_pointers (TREE_TYPE (gimple_assign_lhs (t))))
4393 /* Otherwise, just a regular assignment statement. */
4394 tree lhsop = gimple_assign_lhs (t);
4395 tree rhsop = (gimple_num_ops (t) == 2) ? gimple_assign_rhs1 (t) : NULL;
4397 if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
4398 do_structure_copy (lhsop, rhsop);
4399 else
4401 struct constraint_expr temp;
4402 get_constraint_for (lhsop, &lhsc);
4404 if (gimple_assign_rhs_code (t) == POINTER_PLUS_EXPR)
4405 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4406 gimple_assign_rhs2 (t), &rhsc);
4407 else if (gimple_assign_rhs_code (t) == BIT_AND_EXPR
4408 && TREE_CODE (gimple_assign_rhs2 (t)) == INTEGER_CST)
4410 /* Aligning a pointer via a BIT_AND_EXPR is offsetting
4411 the pointer. Handle it by offsetting it by UNKNOWN. */
4412 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4413 NULL_TREE, &rhsc);
4415 else if ((CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (t))
4416 && !(POINTER_TYPE_P (gimple_expr_type (t))
4417 && !POINTER_TYPE_P (TREE_TYPE (rhsop))))
4418 || gimple_assign_single_p (t))
4419 get_constraint_for (rhsop, &rhsc);
4420 else
4422 temp.type = ADDRESSOF;
4423 temp.var = anything_id;
4424 temp.offset = 0;
4425 VEC_safe_push (ce_s, heap, rhsc, &temp);
4427 process_all_all_constraints (lhsc, rhsc);
4429 /* If there is a store to a global variable the rhs escapes. */
4430 if ((lhsop = get_base_address (lhsop)) != NULL_TREE
4431 && DECL_P (lhsop)
4432 && is_global_var (lhsop)
4433 && (!in_ipa_mode
4434 || DECL_EXTERNAL (lhsop) || TREE_PUBLIC (lhsop)))
4435 make_escape_constraint (rhsop);
4436 /* If this is a conversion of a non-restrict pointer to a
4437 restrict pointer track it with a new heapvar. */
4438 else if (gimple_assign_cast_p (t)
4439 && POINTER_TYPE_P (TREE_TYPE (rhsop))
4440 && POINTER_TYPE_P (TREE_TYPE (lhsop))
4441 && !TYPE_RESTRICT (TREE_TYPE (rhsop))
4442 && TYPE_RESTRICT (TREE_TYPE (lhsop)))
4443 make_constraint_from_restrict (get_vi_for_tree (lhsop),
4444 "CAST_RESTRICT");
4446 /* For conversions of pointers to non-pointers the pointer escapes. */
4447 else if (gimple_assign_cast_p (t)
4448 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (t)))
4449 && !POINTER_TYPE_P (TREE_TYPE (gimple_assign_lhs (t))))
4451 make_escape_constraint (gimple_assign_rhs1 (t));
4453 /* Handle escapes through return. */
4454 else if (gimple_code (t) == GIMPLE_RETURN
4455 && gimple_return_retval (t) != NULL_TREE
4456 && could_have_pointers (gimple_return_retval (t)))
4458 fi = NULL;
4459 if (!in_ipa_mode
4460 || !(fi = get_vi_for_tree (cfun->decl)))
4461 make_escape_constraint (gimple_return_retval (t));
4462 else if (in_ipa_mode
4463 && fi != NULL)
4465 struct constraint_expr lhs ;
4466 struct constraint_expr *rhsp;
4467 unsigned i;
4469 lhs = get_function_part_constraint (fi, fi_result);
4470 get_constraint_for (gimple_return_retval (t), &rhsc);
4471 for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); i++)
4472 process_constraint (new_constraint (lhs, *rhsp));
4475 /* Handle asms conservatively by adding escape constraints to everything. */
4476 else if (gimple_code (t) == GIMPLE_ASM)
4478 unsigned i, noutputs;
4479 const char **oconstraints;
4480 const char *constraint;
4481 bool allows_mem, allows_reg, is_inout;
4483 noutputs = gimple_asm_noutputs (t);
4484 oconstraints = XALLOCAVEC (const char *, noutputs);
4486 for (i = 0; i < noutputs; ++i)
4488 tree link = gimple_asm_output_op (t, i);
4489 tree op = TREE_VALUE (link);
4491 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4492 oconstraints[i] = constraint;
4493 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
4494 &allows_reg, &is_inout);
4496 /* A memory constraint makes the address of the operand escape. */
4497 if (!allows_reg && allows_mem)
4498 make_escape_constraint (build_fold_addr_expr (op));
4500 /* The asm may read global memory, so outputs may point to
4501 any global memory. */
4502 if (op && could_have_pointers (op))
4504 VEC(ce_s, heap) *lhsc = NULL;
4505 struct constraint_expr rhsc, *lhsp;
4506 unsigned j;
4507 get_constraint_for (op, &lhsc);
4508 rhsc.var = nonlocal_id;
4509 rhsc.offset = 0;
4510 rhsc.type = SCALAR;
4511 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
4512 process_constraint (new_constraint (*lhsp, rhsc));
4513 VEC_free (ce_s, heap, lhsc);
4516 for (i = 0; i < gimple_asm_ninputs (t); ++i)
4518 tree link = gimple_asm_input_op (t, i);
4519 tree op = TREE_VALUE (link);
4521 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4523 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
4524 &allows_mem, &allows_reg);
4526 /* A memory constraint makes the address of the operand escape. */
4527 if (!allows_reg && allows_mem)
4528 make_escape_constraint (build_fold_addr_expr (op));
4529 /* Strictly we'd only need the constraint to ESCAPED if
4530 the asm clobbers memory, otherwise using something
4531 along the lines of per-call clobbers/uses would be enough. */
4532 else if (op && could_have_pointers (op))
4533 make_escape_constraint (op);
4537 VEC_free (ce_s, heap, rhsc);
4538 VEC_free (ce_s, heap, lhsc);
4542 /* Create a constraint adding to the clobber set of FI the memory
4543 pointed to by PTR. */
4545 static void
4546 process_ipa_clobber (varinfo_t fi, tree ptr)
4548 VEC(ce_s, heap) *ptrc = NULL;
4549 struct constraint_expr *c, lhs;
4550 unsigned i;
4551 get_constraint_for (ptr, &ptrc);
4552 lhs = get_function_part_constraint (fi, fi_clobbers);
4553 for (i = 0; VEC_iterate (ce_s, ptrc, i, c); i++)
4554 process_constraint (new_constraint (lhs, *c));
4555 VEC_free (ce_s, heap, ptrc);
4558 /* Walk statement T setting up clobber and use constraints according to the
4559 references found in T. This function is a main part of the
4560 IPA constraint builder. */
4562 static void
4563 find_func_clobbers (gimple origt)
4565 gimple t = origt;
4566 VEC(ce_s, heap) *lhsc = NULL;
4567 VEC(ce_s, heap) *rhsc = NULL;
4568 varinfo_t fi;
4570 /* Add constraints for clobbered/used in IPA mode.
4571 We are not interested in what automatic variables are clobbered
4572 or used as we only use the information in the caller to which
4573 they do not escape. */
4574 gcc_assert (in_ipa_mode);
4576 /* If the stmt refers to memory in any way it better had a VUSE. */
4577 if (gimple_vuse (t) == NULL_TREE)
4578 return;
4580 /* We'd better have function information for the current function. */
4581 fi = lookup_vi_for_tree (cfun->decl);
4582 gcc_assert (fi != NULL);
4584 /* Account for stores in assignments and calls. */
4585 if (gimple_vdef (t) != NULL_TREE
4586 && gimple_has_lhs (t))
4588 tree lhs = gimple_get_lhs (t);
4589 tree tem = lhs;
4590 while (handled_component_p (tem))
4591 tem = TREE_OPERAND (tem, 0);
4592 if ((DECL_P (tem)
4593 && !auto_var_in_fn_p (tem, cfun->decl))
4594 || INDIRECT_REF_P (tem)
4595 || (TREE_CODE (tem) == MEM_REF
4596 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
4597 && auto_var_in_fn_p
4598 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), cfun->decl))))
4600 struct constraint_expr lhsc, *rhsp;
4601 unsigned i;
4602 lhsc = get_function_part_constraint (fi, fi_clobbers);
4603 get_constraint_for_address_of (lhs, &rhsc);
4604 for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); i++)
4605 process_constraint (new_constraint (lhsc, *rhsp));
4606 VEC_free (ce_s, heap, rhsc);
4610 /* Account for uses in assigments and returns. */
4611 if (gimple_assign_single_p (t)
4612 || (gimple_code (t) == GIMPLE_RETURN
4613 && gimple_return_retval (t) != NULL_TREE))
4615 tree rhs = (gimple_assign_single_p (t)
4616 ? gimple_assign_rhs1 (t) : gimple_return_retval (t));
4617 tree tem = rhs;
4618 while (handled_component_p (tem))
4619 tem = TREE_OPERAND (tem, 0);
4620 if ((DECL_P (tem)
4621 && !auto_var_in_fn_p (tem, cfun->decl))
4622 || INDIRECT_REF_P (tem)
4623 || (TREE_CODE (tem) == MEM_REF
4624 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
4625 && auto_var_in_fn_p
4626 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), cfun->decl))))
4628 struct constraint_expr lhs, *rhsp;
4629 unsigned i;
4630 lhs = get_function_part_constraint (fi, fi_uses);
4631 get_constraint_for_address_of (rhs, &rhsc);
4632 for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); i++)
4633 process_constraint (new_constraint (lhs, *rhsp));
4634 VEC_free (ce_s, heap, rhsc);
4638 if (is_gimple_call (t))
4640 varinfo_t cfi = NULL;
4641 tree decl = gimple_call_fndecl (t);
4642 struct constraint_expr lhs, rhs;
4643 unsigned i, j;
4645 /* For builtins we do not have separate function info. For those
4646 we do not generate escapes for we have to generate clobbers/uses. */
4647 if (decl
4648 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
4649 switch (DECL_FUNCTION_CODE (decl))
4651 /* The following functions use and clobber memory pointed to
4652 by their arguments. */
4653 case BUILT_IN_STRCPY:
4654 case BUILT_IN_STRNCPY:
4655 case BUILT_IN_BCOPY:
4656 case BUILT_IN_MEMCPY:
4657 case BUILT_IN_MEMMOVE:
4658 case BUILT_IN_MEMPCPY:
4659 case BUILT_IN_STPCPY:
4660 case BUILT_IN_STPNCPY:
4661 case BUILT_IN_STRCAT:
4662 case BUILT_IN_STRNCAT:
4664 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
4665 == BUILT_IN_BCOPY ? 1 : 0));
4666 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
4667 == BUILT_IN_BCOPY ? 0 : 1));
4668 unsigned i;
4669 struct constraint_expr *rhsp, *lhsp;
4670 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4671 lhs = get_function_part_constraint (fi, fi_clobbers);
4672 for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); i++)
4673 process_constraint (new_constraint (lhs, *lhsp));
4674 VEC_free (ce_s, heap, lhsc);
4675 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4676 lhs = get_function_part_constraint (fi, fi_uses);
4677 for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); i++)
4678 process_constraint (new_constraint (lhs, *rhsp));
4679 VEC_free (ce_s, heap, rhsc);
4680 return;
4682 /* The following function clobbers memory pointed to by
4683 its argument. */
4684 case BUILT_IN_MEMSET:
4686 tree dest = gimple_call_arg (t, 0);
4687 unsigned i;
4688 ce_s *lhsp;
4689 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4690 lhs = get_function_part_constraint (fi, fi_clobbers);
4691 for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); i++)
4692 process_constraint (new_constraint (lhs, *lhsp));
4693 VEC_free (ce_s, heap, lhsc);
4694 return;
4696 /* The following functions clobber their second and third
4697 arguments. */
4698 case BUILT_IN_SINCOS:
4699 case BUILT_IN_SINCOSF:
4700 case BUILT_IN_SINCOSL:
4702 process_ipa_clobber (fi, gimple_call_arg (t, 1));
4703 process_ipa_clobber (fi, gimple_call_arg (t, 2));
4704 return;
4706 /* The following functions clobber their second argument. */
4707 case BUILT_IN_FREXP:
4708 case BUILT_IN_FREXPF:
4709 case BUILT_IN_FREXPL:
4710 case BUILT_IN_LGAMMA_R:
4711 case BUILT_IN_LGAMMAF_R:
4712 case BUILT_IN_LGAMMAL_R:
4713 case BUILT_IN_GAMMA_R:
4714 case BUILT_IN_GAMMAF_R:
4715 case BUILT_IN_GAMMAL_R:
4716 case BUILT_IN_MODF:
4717 case BUILT_IN_MODFF:
4718 case BUILT_IN_MODFL:
4720 process_ipa_clobber (fi, gimple_call_arg (t, 1));
4721 return;
4723 /* The following functions clobber their third argument. */
4724 case BUILT_IN_REMQUO:
4725 case BUILT_IN_REMQUOF:
4726 case BUILT_IN_REMQUOL:
4728 process_ipa_clobber (fi, gimple_call_arg (t, 2));
4729 return;
4731 /* The following functions neither read nor clobber memory. */
4732 case BUILT_IN_FREE:
4733 return;
4734 /* Trampolines are of no interest to us. */
4735 case BUILT_IN_INIT_TRAMPOLINE:
4736 case BUILT_IN_ADJUST_TRAMPOLINE:
4737 return;
4738 case BUILT_IN_VA_START:
4739 case BUILT_IN_VA_END:
4740 return;
4741 /* printf-style functions may have hooks to set pointers to
4742 point to somewhere into the generated string. Leave them
4743 for a later excercise... */
4744 default:
4745 /* Fallthru to general call handling. */;
4748 /* Parameters passed by value are used. */
4749 lhs = get_function_part_constraint (fi, fi_uses);
4750 for (i = 0; i < gimple_call_num_args (t); i++)
4752 struct constraint_expr *rhsp;
4753 tree arg = gimple_call_arg (t, i);
4755 if (TREE_CODE (arg) == SSA_NAME
4756 || is_gimple_min_invariant (arg))
4757 continue;
4759 get_constraint_for_address_of (arg, &rhsc);
4760 for (j = 0; VEC_iterate (ce_s, rhsc, j, rhsp); j++)
4761 process_constraint (new_constraint (lhs, *rhsp));
4762 VEC_free (ce_s, heap, rhsc);
4765 /* Build constraints for propagating clobbers/uses along the
4766 callgraph edges. */
4767 cfi = get_fi_for_callee (t);
4768 if (cfi->id == anything_id)
4770 if (gimple_vdef (t))
4771 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
4772 anything_id);
4773 make_constraint_from (first_vi_for_offset (fi, fi_uses),
4774 anything_id);
4775 return;
4778 /* For callees without function info (that's external functions),
4779 ESCAPED is clobbered and used. */
4780 if (gimple_call_fndecl (t)
4781 && !cfi->is_fn_info)
4783 varinfo_t vi;
4785 if (gimple_vdef (t))
4786 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
4787 escaped_id);
4788 make_copy_constraint (first_vi_for_offset (fi, fi_uses), escaped_id);
4790 /* Also honor the call statement use/clobber info. */
4791 if ((vi = lookup_call_clobber_vi (t)) != NULL)
4792 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
4793 vi->id);
4794 if ((vi = lookup_call_use_vi (t)) != NULL)
4795 make_copy_constraint (first_vi_for_offset (fi, fi_uses),
4796 vi->id);
4797 return;
4800 /* Otherwise the caller clobbers and uses what the callee does.
4801 ??? This should use a new complex constraint that filters
4802 local variables of the callee. */
4803 if (gimple_vdef (t))
4805 lhs = get_function_part_constraint (fi, fi_clobbers);
4806 rhs = get_function_part_constraint (cfi, fi_clobbers);
4807 process_constraint (new_constraint (lhs, rhs));
4809 lhs = get_function_part_constraint (fi, fi_uses);
4810 rhs = get_function_part_constraint (cfi, fi_uses);
4811 process_constraint (new_constraint (lhs, rhs));
4813 else if (gimple_code (t) == GIMPLE_ASM)
4815 /* ??? Ick. We can do better. */
4816 if (gimple_vdef (t))
4817 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
4818 anything_id);
4819 make_constraint_from (first_vi_for_offset (fi, fi_uses),
4820 anything_id);
4823 VEC_free (ce_s, heap, rhsc);
4827 /* Find the first varinfo in the same variable as START that overlaps with
4828 OFFSET. Return NULL if we can't find one. */
4830 static varinfo_t
4831 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
4833 /* If the offset is outside of the variable, bail out. */
4834 if (offset >= start->fullsize)
4835 return NULL;
4837 /* If we cannot reach offset from start, lookup the first field
4838 and start from there. */
4839 if (start->offset > offset)
4840 start = lookup_vi_for_tree (start->decl);
4842 while (start)
4844 /* We may not find a variable in the field list with the actual
4845 offset when when we have glommed a structure to a variable.
4846 In that case, however, offset should still be within the size
4847 of the variable. */
4848 if (offset >= start->offset
4849 && (offset - start->offset) < start->size)
4850 return start;
4852 start= start->next;
4855 return NULL;
4858 /* Find the first varinfo in the same variable as START that overlaps with
4859 OFFSET. If there is no such varinfo the varinfo directly preceding
4860 OFFSET is returned. */
4862 static varinfo_t
4863 first_or_preceding_vi_for_offset (varinfo_t start,
4864 unsigned HOST_WIDE_INT offset)
4866 /* If we cannot reach offset from start, lookup the first field
4867 and start from there. */
4868 if (start->offset > offset)
4869 start = lookup_vi_for_tree (start->decl);
4871 /* We may not find a variable in the field list with the actual
4872 offset when when we have glommed a structure to a variable.
4873 In that case, however, offset should still be within the size
4874 of the variable.
4875 If we got beyond the offset we look for return the field
4876 directly preceding offset which may be the last field. */
4877 while (start->next
4878 && offset >= start->offset
4879 && !((offset - start->offset) < start->size))
4880 start = start->next;
4882 return start;
4886 /* This structure is used during pushing fields onto the fieldstack
4887 to track the offset of the field, since bitpos_of_field gives it
4888 relative to its immediate containing type, and we want it relative
4889 to the ultimate containing object. */
4891 struct fieldoff
4893 /* Offset from the base of the base containing object to this field. */
4894 HOST_WIDE_INT offset;
4896 /* Size, in bits, of the field. */
4897 unsigned HOST_WIDE_INT size;
4899 unsigned has_unknown_size : 1;
4901 unsigned may_have_pointers : 1;
4903 unsigned only_restrict_pointers : 1;
4905 typedef struct fieldoff fieldoff_s;
4907 DEF_VEC_O(fieldoff_s);
4908 DEF_VEC_ALLOC_O(fieldoff_s,heap);
4910 /* qsort comparison function for two fieldoff's PA and PB */
4912 static int
4913 fieldoff_compare (const void *pa, const void *pb)
4915 const fieldoff_s *foa = (const fieldoff_s *)pa;
4916 const fieldoff_s *fob = (const fieldoff_s *)pb;
4917 unsigned HOST_WIDE_INT foasize, fobsize;
4919 if (foa->offset < fob->offset)
4920 return -1;
4921 else if (foa->offset > fob->offset)
4922 return 1;
4924 foasize = foa->size;
4925 fobsize = fob->size;
4926 if (foasize < fobsize)
4927 return -1;
4928 else if (foasize > fobsize)
4929 return 1;
4930 return 0;
4933 /* Sort a fieldstack according to the field offset and sizes. */
4934 static void
4935 sort_fieldstack (VEC(fieldoff_s,heap) *fieldstack)
4937 qsort (VEC_address (fieldoff_s, fieldstack),
4938 VEC_length (fieldoff_s, fieldstack),
4939 sizeof (fieldoff_s),
4940 fieldoff_compare);
4943 /* Return true if V is a tree that we can have subvars for.
4944 Normally, this is any aggregate type. Also complex
4945 types which are not gimple registers can have subvars. */
4947 static inline bool
4948 var_can_have_subvars (const_tree v)
4950 /* Volatile variables should never have subvars. */
4951 if (TREE_THIS_VOLATILE (v))
4952 return false;
4954 /* Non decls or memory tags can never have subvars. */
4955 if (!DECL_P (v))
4956 return false;
4958 /* Aggregates without overlapping fields can have subvars. */
4959 if (TREE_CODE (TREE_TYPE (v)) == RECORD_TYPE)
4960 return true;
4962 return false;
4965 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
4966 the fields of TYPE onto fieldstack, recording their offsets along
4967 the way.
4969 OFFSET is used to keep track of the offset in this entire
4970 structure, rather than just the immediately containing structure.
4971 Returns false if the caller is supposed to handle the field we
4972 recursed for. */
4974 static bool
4975 push_fields_onto_fieldstack (tree type, VEC(fieldoff_s,heap) **fieldstack,
4976 HOST_WIDE_INT offset, bool must_have_pointers_p)
4978 tree field;
4979 bool empty_p = true;
4981 if (TREE_CODE (type) != RECORD_TYPE)
4982 return false;
4984 /* If the vector of fields is growing too big, bail out early.
4985 Callers check for VEC_length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
4986 sure this fails. */
4987 if (VEC_length (fieldoff_s, *fieldstack) > MAX_FIELDS_FOR_FIELD_SENSITIVE)
4988 return false;
4990 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4991 if (TREE_CODE (field) == FIELD_DECL)
4993 bool push = false;
4994 HOST_WIDE_INT foff = bitpos_of_field (field);
4996 if (!var_can_have_subvars (field)
4997 || TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE
4998 || TREE_CODE (TREE_TYPE (field)) == UNION_TYPE)
4999 push = true;
5000 else if (!push_fields_onto_fieldstack
5001 (TREE_TYPE (field), fieldstack, offset + foff,
5002 must_have_pointers_p)
5003 && (DECL_SIZE (field)
5004 && !integer_zerop (DECL_SIZE (field))))
5005 /* Empty structures may have actual size, like in C++. So
5006 see if we didn't push any subfields and the size is
5007 nonzero, push the field onto the stack. */
5008 push = true;
5010 if (push)
5012 fieldoff_s *pair = NULL;
5013 bool has_unknown_size = false;
5015 if (!VEC_empty (fieldoff_s, *fieldstack))
5016 pair = VEC_last (fieldoff_s, *fieldstack);
5018 if (!DECL_SIZE (field)
5019 || !host_integerp (DECL_SIZE (field), 1))
5020 has_unknown_size = true;
5022 /* If adjacent fields do not contain pointers merge them. */
5023 if (pair
5024 && !pair->may_have_pointers
5025 && !pair->has_unknown_size
5026 && !has_unknown_size
5027 && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff
5028 && !must_have_pointers_p
5029 && !could_have_pointers (field))
5031 pair->size += TREE_INT_CST_LOW (DECL_SIZE (field));
5033 else
5035 pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
5036 pair->offset = offset + foff;
5037 pair->has_unknown_size = has_unknown_size;
5038 if (!has_unknown_size)
5039 pair->size = TREE_INT_CST_LOW (DECL_SIZE (field));
5040 else
5041 pair->size = -1;
5042 pair->may_have_pointers
5043 = must_have_pointers_p || could_have_pointers (field);
5044 pair->only_restrict_pointers
5045 = (!has_unknown_size
5046 && POINTER_TYPE_P (TREE_TYPE (field))
5047 && TYPE_RESTRICT (TREE_TYPE (field)));
5051 empty_p = false;
5054 return !empty_p;
5057 /* Count the number of arguments DECL has, and set IS_VARARGS to true
5058 if it is a varargs function. */
5060 static unsigned int
5061 count_num_arguments (tree decl, bool *is_varargs)
5063 unsigned int num = 0;
5064 tree t;
5066 /* Capture named arguments for K&R functions. They do not
5067 have a prototype and thus no TYPE_ARG_TYPES. */
5068 for (t = DECL_ARGUMENTS (decl); t; t = DECL_CHAIN (t))
5069 ++num;
5071 /* Check if the function has variadic arguments. */
5072 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl)); t; t = TREE_CHAIN (t))
5073 if (TREE_VALUE (t) == void_type_node)
5074 break;
5075 if (!t)
5076 *is_varargs = true;
5078 return num;
5081 /* Creation function node for DECL, using NAME, and return the index
5082 of the variable we've created for the function. */
5084 static varinfo_t
5085 create_function_info_for (tree decl, const char *name)
5087 struct function *fn = DECL_STRUCT_FUNCTION (decl);
5088 varinfo_t vi, prev_vi;
5089 tree arg;
5090 unsigned int i;
5091 bool is_varargs = false;
5092 unsigned int num_args = count_num_arguments (decl, &is_varargs);
5094 /* Create the variable info. */
5096 vi = new_var_info (decl, name);
5097 vi->offset = 0;
5098 vi->size = 1;
5099 vi->fullsize = fi_parm_base + num_args;
5100 vi->is_fn_info = 1;
5101 vi->may_have_pointers = false;
5102 if (is_varargs)
5103 vi->fullsize = ~0;
5104 insert_vi_for_tree (vi->decl, vi);
5106 prev_vi = vi;
5108 /* Create a variable for things the function clobbers and one for
5109 things the function uses. */
5111 varinfo_t clobbervi, usevi;
5112 const char *newname;
5113 char *tempname;
5115 asprintf (&tempname, "%s.clobber", name);
5116 newname = ggc_strdup (tempname);
5117 free (tempname);
5119 clobbervi = new_var_info (NULL, newname);
5120 clobbervi->offset = fi_clobbers;
5121 clobbervi->size = 1;
5122 clobbervi->fullsize = vi->fullsize;
5123 clobbervi->is_full_var = true;
5124 clobbervi->is_global_var = false;
5125 gcc_assert (prev_vi->offset < clobbervi->offset);
5126 prev_vi->next = clobbervi;
5127 prev_vi = clobbervi;
5129 asprintf (&tempname, "%s.use", name);
5130 newname = ggc_strdup (tempname);
5131 free (tempname);
5133 usevi = new_var_info (NULL, newname);
5134 usevi->offset = fi_uses;
5135 usevi->size = 1;
5136 usevi->fullsize = vi->fullsize;
5137 usevi->is_full_var = true;
5138 usevi->is_global_var = false;
5139 gcc_assert (prev_vi->offset < usevi->offset);
5140 prev_vi->next = usevi;
5141 prev_vi = usevi;
5144 /* And one for the static chain. */
5145 if (fn->static_chain_decl != NULL_TREE)
5147 varinfo_t chainvi;
5148 const char *newname;
5149 char *tempname;
5151 asprintf (&tempname, "%s.chain", name);
5152 newname = ggc_strdup (tempname);
5153 free (tempname);
5155 chainvi = new_var_info (fn->static_chain_decl, newname);
5156 chainvi->offset = fi_static_chain;
5157 chainvi->size = 1;
5158 chainvi->fullsize = vi->fullsize;
5159 chainvi->is_full_var = true;
5160 chainvi->is_global_var = false;
5161 gcc_assert (prev_vi->offset < chainvi->offset);
5162 prev_vi->next = chainvi;
5163 prev_vi = chainvi;
5164 insert_vi_for_tree (fn->static_chain_decl, chainvi);
5167 /* Create a variable for the return var. */
5168 if (DECL_RESULT (decl) != NULL
5169 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
5171 varinfo_t resultvi;
5172 const char *newname;
5173 char *tempname;
5174 tree resultdecl = decl;
5176 if (DECL_RESULT (decl))
5177 resultdecl = DECL_RESULT (decl);
5179 asprintf (&tempname, "%s.result", name);
5180 newname = ggc_strdup (tempname);
5181 free (tempname);
5183 resultvi = new_var_info (resultdecl, newname);
5184 resultvi->offset = fi_result;
5185 resultvi->size = 1;
5186 resultvi->fullsize = vi->fullsize;
5187 resultvi->is_full_var = true;
5188 if (DECL_RESULT (decl))
5189 resultvi->may_have_pointers = could_have_pointers (DECL_RESULT (decl));
5190 gcc_assert (prev_vi->offset < resultvi->offset);
5191 prev_vi->next = resultvi;
5192 prev_vi = resultvi;
5193 if (DECL_RESULT (decl))
5194 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
5197 /* Set up variables for each argument. */
5198 arg = DECL_ARGUMENTS (decl);
5199 for (i = 0; i < num_args; i++)
5201 varinfo_t argvi;
5202 const char *newname;
5203 char *tempname;
5204 tree argdecl = decl;
5206 if (arg)
5207 argdecl = arg;
5209 asprintf (&tempname, "%s.arg%d", name, i);
5210 newname = ggc_strdup (tempname);
5211 free (tempname);
5213 argvi = new_var_info (argdecl, newname);
5214 argvi->offset = fi_parm_base + i;
5215 argvi->size = 1;
5216 argvi->is_full_var = true;
5217 argvi->fullsize = vi->fullsize;
5218 if (arg)
5219 argvi->may_have_pointers = could_have_pointers (arg);
5220 gcc_assert (prev_vi->offset < argvi->offset);
5221 prev_vi->next = argvi;
5222 prev_vi = argvi;
5223 if (arg)
5225 insert_vi_for_tree (arg, argvi);
5226 arg = DECL_CHAIN (arg);
5230 /* Add one representative for all further args. */
5231 if (is_varargs)
5233 varinfo_t argvi;
5234 const char *newname;
5235 char *tempname;
5236 tree decl;
5238 asprintf (&tempname, "%s.varargs", name);
5239 newname = ggc_strdup (tempname);
5240 free (tempname);
5242 /* We need sth that can be pointed to for va_start. */
5243 decl = create_tmp_var_raw (ptr_type_node, name);
5244 get_var_ann (decl);
5246 argvi = new_var_info (decl, newname);
5247 argvi->offset = fi_parm_base + num_args;
5248 argvi->size = ~0;
5249 argvi->is_full_var = true;
5250 argvi->is_heap_var = true;
5251 argvi->fullsize = vi->fullsize;
5252 gcc_assert (prev_vi->offset < argvi->offset);
5253 prev_vi->next = argvi;
5254 prev_vi = argvi;
5257 return vi;
5261 /* Return true if FIELDSTACK contains fields that overlap.
5262 FIELDSTACK is assumed to be sorted by offset. */
5264 static bool
5265 check_for_overlaps (VEC (fieldoff_s,heap) *fieldstack)
5267 fieldoff_s *fo = NULL;
5268 unsigned int i;
5269 HOST_WIDE_INT lastoffset = -1;
5271 for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
5273 if (fo->offset == lastoffset)
5274 return true;
5275 lastoffset = fo->offset;
5277 return false;
5280 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
5281 This will also create any varinfo structures necessary for fields
5282 of DECL. */
5284 static varinfo_t
5285 create_variable_info_for_1 (tree decl, const char *name)
5287 varinfo_t vi, newvi;
5288 tree decl_type = TREE_TYPE (decl);
5289 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type);
5290 VEC (fieldoff_s,heap) *fieldstack = NULL;
5291 fieldoff_s *fo;
5292 unsigned int i;
5294 if (!declsize
5295 || !host_integerp (declsize, 1))
5297 vi = new_var_info (decl, name);
5298 vi->offset = 0;
5299 vi->size = ~0;
5300 vi->fullsize = ~0;
5301 vi->is_unknown_size_var = true;
5302 vi->is_full_var = true;
5303 vi->may_have_pointers = could_have_pointers (decl);
5304 return vi;
5307 /* Collect field information. */
5308 if (use_field_sensitive
5309 && var_can_have_subvars (decl)
5310 /* ??? Force us to not use subfields for global initializers
5311 in IPA mode. Else we'd have to parse arbitrary initializers. */
5312 && !(in_ipa_mode
5313 && is_global_var (decl)
5314 && DECL_INITIAL (decl)))
5316 fieldoff_s *fo = NULL;
5317 bool notokay = false;
5318 unsigned int i;
5320 push_fields_onto_fieldstack (decl_type, &fieldstack, 0,
5321 TREE_PUBLIC (decl)
5322 || DECL_EXTERNAL (decl)
5323 || TREE_ADDRESSABLE (decl));
5325 for (i = 0; !notokay && VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
5326 if (fo->has_unknown_size
5327 || fo->offset < 0)
5329 notokay = true;
5330 break;
5333 /* We can't sort them if we have a field with a variable sized type,
5334 which will make notokay = true. In that case, we are going to return
5335 without creating varinfos for the fields anyway, so sorting them is a
5336 waste to boot. */
5337 if (!notokay)
5339 sort_fieldstack (fieldstack);
5340 /* Due to some C++ FE issues, like PR 22488, we might end up
5341 what appear to be overlapping fields even though they,
5342 in reality, do not overlap. Until the C++ FE is fixed,
5343 we will simply disable field-sensitivity for these cases. */
5344 notokay = check_for_overlaps (fieldstack);
5347 if (notokay)
5348 VEC_free (fieldoff_s, heap, fieldstack);
5351 /* If we didn't end up collecting sub-variables create a full
5352 variable for the decl. */
5353 if (VEC_length (fieldoff_s, fieldstack) <= 1
5354 || VEC_length (fieldoff_s, fieldstack) > MAX_FIELDS_FOR_FIELD_SENSITIVE)
5356 vi = new_var_info (decl, name);
5357 vi->offset = 0;
5358 vi->may_have_pointers = could_have_pointers (decl);
5359 vi->fullsize = TREE_INT_CST_LOW (declsize);
5360 vi->size = vi->fullsize;
5361 vi->is_full_var = true;
5362 VEC_free (fieldoff_s, heap, fieldstack);
5363 return vi;
5366 vi = new_var_info (decl, name);
5367 vi->fullsize = TREE_INT_CST_LOW (declsize);
5368 for (i = 0, newvi = vi;
5369 VEC_iterate (fieldoff_s, fieldstack, i, fo);
5370 ++i, newvi = newvi->next)
5372 const char *newname = "NULL";
5373 char *tempname;
5375 if (dump_file)
5377 asprintf (&tempname, "%s." HOST_WIDE_INT_PRINT_DEC
5378 "+" HOST_WIDE_INT_PRINT_DEC, name, fo->offset, fo->size);
5379 newname = ggc_strdup (tempname);
5380 free (tempname);
5382 newvi->name = newname;
5383 newvi->offset = fo->offset;
5384 newvi->size = fo->size;
5385 newvi->fullsize = vi->fullsize;
5386 newvi->may_have_pointers = fo->may_have_pointers;
5387 newvi->only_restrict_pointers = fo->only_restrict_pointers;
5388 if (i + 1 < VEC_length (fieldoff_s, fieldstack))
5389 newvi->next = new_var_info (decl, name);
5392 VEC_free (fieldoff_s, heap, fieldstack);
5394 return vi;
5397 static unsigned int
5398 create_variable_info_for (tree decl, const char *name)
5400 varinfo_t vi = create_variable_info_for_1 (decl, name);
5401 unsigned int id = vi->id;
5403 insert_vi_for_tree (decl, vi);
5405 /* Create initial constraints for globals. */
5406 for (; vi; vi = vi->next)
5408 if (!vi->may_have_pointers
5409 || !vi->is_global_var)
5410 continue;
5412 /* Mark global restrict qualified pointers. */
5413 if ((POINTER_TYPE_P (TREE_TYPE (decl))
5414 && TYPE_RESTRICT (TREE_TYPE (decl)))
5415 || vi->only_restrict_pointers)
5416 make_constraint_from_restrict (vi, "GLOBAL_RESTRICT");
5418 /* For escaped variables initialize them from nonlocal. */
5419 if (!in_ipa_mode
5420 || DECL_EXTERNAL (decl) || TREE_PUBLIC (decl))
5421 make_copy_constraint (vi, nonlocal_id);
5423 /* If this is a global variable with an initializer and we are in
5424 IPA mode generate constraints for it. In non-IPA mode
5425 the initializer from nonlocal is all we need. */
5426 if (in_ipa_mode
5427 && DECL_INITIAL (decl))
5429 VEC (ce_s, heap) *rhsc = NULL;
5430 struct constraint_expr lhs, *rhsp;
5431 unsigned i;
5432 get_constraint_for (DECL_INITIAL (decl), &rhsc);
5433 lhs.var = vi->id;
5434 lhs.offset = 0;
5435 lhs.type = SCALAR;
5436 for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); ++i)
5437 process_constraint (new_constraint (lhs, *rhsp));
5438 /* If this is a variable that escapes from the unit
5439 the initializer escapes as well. */
5440 if (DECL_EXTERNAL (decl) || TREE_PUBLIC (decl))
5442 lhs.var = escaped_id;
5443 lhs.offset = 0;
5444 lhs.type = SCALAR;
5445 for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); ++i)
5446 process_constraint (new_constraint (lhs, *rhsp));
5448 VEC_free (ce_s, heap, rhsc);
5452 return id;
5455 /* Print out the points-to solution for VAR to FILE. */
5457 static void
5458 dump_solution_for_var (FILE *file, unsigned int var)
5460 varinfo_t vi = get_varinfo (var);
5461 unsigned int i;
5462 bitmap_iterator bi;
5464 /* Dump the solution for unified vars anyway, this avoids difficulties
5465 in scanning dumps in the testsuite. */
5466 fprintf (file, "%s = { ", vi->name);
5467 vi = get_varinfo (find (var));
5468 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
5469 fprintf (file, "%s ", get_varinfo (i)->name);
5470 fprintf (file, "}");
5472 /* But note when the variable was unified. */
5473 if (vi->id != var)
5474 fprintf (file, " same as %s", vi->name);
5476 fprintf (file, "\n");
5479 /* Print the points-to solution for VAR to stdout. */
5481 DEBUG_FUNCTION void
5482 debug_solution_for_var (unsigned int var)
5484 dump_solution_for_var (stdout, var);
5487 /* Create varinfo structures for all of the variables in the
5488 function for intraprocedural mode. */
5490 static void
5491 intra_create_variable_infos (void)
5493 tree t;
5495 /* For each incoming pointer argument arg, create the constraint ARG
5496 = NONLOCAL or a dummy variable if it is a restrict qualified
5497 passed-by-reference argument. */
5498 for (t = DECL_ARGUMENTS (current_function_decl); t; t = DECL_CHAIN (t))
5500 varinfo_t p;
5502 if (!could_have_pointers (t))
5503 continue;
5505 /* For restrict qualified pointers to objects passed by
5506 reference build a real representative for the pointed-to object. */
5507 if (DECL_BY_REFERENCE (t)
5508 && POINTER_TYPE_P (TREE_TYPE (t))
5509 && TYPE_RESTRICT (TREE_TYPE (t)))
5511 struct constraint_expr lhsc, rhsc;
5512 varinfo_t vi;
5513 tree heapvar = heapvar_lookup (t, 0);
5514 if (heapvar == NULL_TREE)
5516 var_ann_t ann;
5517 heapvar = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (t)),
5518 "PARM_NOALIAS");
5519 DECL_EXTERNAL (heapvar) = 1;
5520 heapvar_insert (t, 0, heapvar);
5521 ann = get_var_ann (heapvar);
5522 ann->is_heapvar = 1;
5524 if (gimple_referenced_vars (cfun))
5525 add_referenced_var (heapvar);
5526 lhsc.var = get_vi_for_tree (t)->id;
5527 lhsc.type = SCALAR;
5528 lhsc.offset = 0;
5529 rhsc.var = (vi = get_vi_for_tree (heapvar))->id;
5530 rhsc.type = ADDRESSOF;
5531 rhsc.offset = 0;
5532 process_constraint (new_constraint (lhsc, rhsc));
5533 vi->is_restrict_var = 1;
5534 continue;
5537 for (p = get_vi_for_tree (t); p; p = p->next)
5539 if (p->may_have_pointers)
5540 make_constraint_from (p, nonlocal_id);
5541 if (p->only_restrict_pointers)
5542 make_constraint_from_restrict (p, "PARM_RESTRICT");
5544 if (POINTER_TYPE_P (TREE_TYPE (t))
5545 && TYPE_RESTRICT (TREE_TYPE (t)))
5546 make_constraint_from_restrict (get_vi_for_tree (t), "PARM_RESTRICT");
5549 /* Add a constraint for a result decl that is passed by reference. */
5550 if (DECL_RESULT (cfun->decl)
5551 && DECL_BY_REFERENCE (DECL_RESULT (cfun->decl)))
5553 varinfo_t p, result_vi = get_vi_for_tree (DECL_RESULT (cfun->decl));
5555 for (p = result_vi; p; p = p->next)
5556 make_constraint_from (p, nonlocal_id);
5559 /* Add a constraint for the incoming static chain parameter. */
5560 if (cfun->static_chain_decl != NULL_TREE)
5562 varinfo_t p, chain_vi = get_vi_for_tree (cfun->static_chain_decl);
5564 for (p = chain_vi; p; p = p->next)
5565 make_constraint_from (p, nonlocal_id);
5569 /* Structure used to put solution bitmaps in a hashtable so they can
5570 be shared among variables with the same points-to set. */
5572 typedef struct shared_bitmap_info
5574 bitmap pt_vars;
5575 hashval_t hashcode;
5576 } *shared_bitmap_info_t;
5577 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
5579 static htab_t shared_bitmap_table;
5581 /* Hash function for a shared_bitmap_info_t */
5583 static hashval_t
5584 shared_bitmap_hash (const void *p)
5586 const_shared_bitmap_info_t const bi = (const_shared_bitmap_info_t) p;
5587 return bi->hashcode;
5590 /* Equality function for two shared_bitmap_info_t's. */
5592 static int
5593 shared_bitmap_eq (const void *p1, const void *p2)
5595 const_shared_bitmap_info_t const sbi1 = (const_shared_bitmap_info_t) p1;
5596 const_shared_bitmap_info_t const sbi2 = (const_shared_bitmap_info_t) p2;
5597 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
5600 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
5601 existing instance if there is one, NULL otherwise. */
5603 static bitmap
5604 shared_bitmap_lookup (bitmap pt_vars)
5606 void **slot;
5607 struct shared_bitmap_info sbi;
5609 sbi.pt_vars = pt_vars;
5610 sbi.hashcode = bitmap_hash (pt_vars);
5612 slot = htab_find_slot_with_hash (shared_bitmap_table, &sbi,
5613 sbi.hashcode, NO_INSERT);
5614 if (!slot)
5615 return NULL;
5616 else
5617 return ((shared_bitmap_info_t) *slot)->pt_vars;
5621 /* Add a bitmap to the shared bitmap hashtable. */
5623 static void
5624 shared_bitmap_add (bitmap pt_vars)
5626 void **slot;
5627 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
5629 sbi->pt_vars = pt_vars;
5630 sbi->hashcode = bitmap_hash (pt_vars);
5632 slot = htab_find_slot_with_hash (shared_bitmap_table, sbi,
5633 sbi->hashcode, INSERT);
5634 gcc_assert (!*slot);
5635 *slot = (void *) sbi;
5639 /* Set bits in INTO corresponding to the variable uids in solution set FROM. */
5641 static void
5642 set_uids_in_ptset (bitmap into, bitmap from, struct pt_solution *pt)
5644 unsigned int i;
5645 bitmap_iterator bi;
5647 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
5649 varinfo_t vi = get_varinfo (i);
5651 /* The only artificial variables that are allowed in a may-alias
5652 set are heap variables. */
5653 if (vi->is_artificial_var && !vi->is_heap_var)
5654 continue;
5656 if (TREE_CODE (vi->decl) == VAR_DECL
5657 || TREE_CODE (vi->decl) == PARM_DECL
5658 || TREE_CODE (vi->decl) == RESULT_DECL)
5660 /* If we are in IPA mode we will not recompute points-to
5661 sets after inlining so make sure they stay valid. */
5662 if (in_ipa_mode
5663 && !DECL_PT_UID_SET_P (vi->decl))
5664 SET_DECL_PT_UID (vi->decl, DECL_UID (vi->decl));
5666 /* Add the decl to the points-to set. Note that the points-to
5667 set contains global variables. */
5668 bitmap_set_bit (into, DECL_PT_UID (vi->decl));
5669 if (vi->is_global_var)
5670 pt->vars_contains_global = true;
5676 /* Compute the points-to solution *PT for the variable VI. */
5678 static void
5679 find_what_var_points_to (varinfo_t orig_vi, struct pt_solution *pt)
5681 unsigned int i;
5682 bitmap_iterator bi;
5683 bitmap finished_solution;
5684 bitmap result;
5685 varinfo_t vi;
5687 memset (pt, 0, sizeof (struct pt_solution));
5689 /* This variable may have been collapsed, let's get the real
5690 variable. */
5691 vi = get_varinfo (find (orig_vi->id));
5693 /* Translate artificial variables into SSA_NAME_PTR_INFO
5694 attributes. */
5695 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
5697 varinfo_t vi = get_varinfo (i);
5699 if (vi->is_artificial_var)
5701 if (vi->id == nothing_id)
5702 pt->null = 1;
5703 else if (vi->id == escaped_id)
5705 if (in_ipa_mode)
5706 pt->ipa_escaped = 1;
5707 else
5708 pt->escaped = 1;
5710 else if (vi->id == nonlocal_id)
5711 pt->nonlocal = 1;
5712 else if (vi->is_heap_var)
5713 /* We represent heapvars in the points-to set properly. */
5715 else if (vi->id == readonly_id)
5716 /* Nobody cares. */
5718 else if (vi->id == anything_id
5719 || vi->id == integer_id)
5720 pt->anything = 1;
5722 if (vi->is_restrict_var)
5723 pt->vars_contains_restrict = true;
5726 /* Instead of doing extra work, simply do not create
5727 elaborate points-to information for pt_anything pointers. */
5728 if (pt->anything
5729 && (orig_vi->is_artificial_var
5730 || !pt->vars_contains_restrict))
5731 return;
5733 /* Share the final set of variables when possible. */
5734 finished_solution = BITMAP_GGC_ALLOC ();
5735 stats.points_to_sets_created++;
5737 set_uids_in_ptset (finished_solution, vi->solution, pt);
5738 result = shared_bitmap_lookup (finished_solution);
5739 if (!result)
5741 shared_bitmap_add (finished_solution);
5742 pt->vars = finished_solution;
5744 else
5746 pt->vars = result;
5747 bitmap_clear (finished_solution);
5751 /* Given a pointer variable P, fill in its points-to set. */
5753 static void
5754 find_what_p_points_to (tree p)
5756 struct ptr_info_def *pi;
5757 tree lookup_p = p;
5758 varinfo_t vi;
5760 /* For parameters, get at the points-to set for the actual parm
5761 decl. */
5762 if (TREE_CODE (p) == SSA_NAME
5763 && (TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
5764 || TREE_CODE (SSA_NAME_VAR (p)) == RESULT_DECL)
5765 && SSA_NAME_IS_DEFAULT_DEF (p))
5766 lookup_p = SSA_NAME_VAR (p);
5768 vi = lookup_vi_for_tree (lookup_p);
5769 if (!vi)
5770 return;
5772 pi = get_ptr_info (p);
5773 find_what_var_points_to (vi, &pi->pt);
5777 /* Query statistics for points-to solutions. */
5779 static struct {
5780 unsigned HOST_WIDE_INT pt_solution_includes_may_alias;
5781 unsigned HOST_WIDE_INT pt_solution_includes_no_alias;
5782 unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias;
5783 unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias;
5784 } pta_stats;
5786 void
5787 dump_pta_stats (FILE *s)
5789 fprintf (s, "\nPTA query stats:\n");
5790 fprintf (s, " pt_solution_includes: "
5791 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
5792 HOST_WIDE_INT_PRINT_DEC" queries\n",
5793 pta_stats.pt_solution_includes_no_alias,
5794 pta_stats.pt_solution_includes_no_alias
5795 + pta_stats.pt_solution_includes_may_alias);
5796 fprintf (s, " pt_solutions_intersect: "
5797 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
5798 HOST_WIDE_INT_PRINT_DEC" queries\n",
5799 pta_stats.pt_solutions_intersect_no_alias,
5800 pta_stats.pt_solutions_intersect_no_alias
5801 + pta_stats.pt_solutions_intersect_may_alias);
5805 /* Reset the points-to solution *PT to a conservative default
5806 (point to anything). */
5808 void
5809 pt_solution_reset (struct pt_solution *pt)
5811 memset (pt, 0, sizeof (struct pt_solution));
5812 pt->anything = true;
5815 /* Set the points-to solution *PT to point only to the variables
5816 in VARS. VARS_CONTAINS_GLOBAL specifies whether that contains
5817 global variables and VARS_CONTAINS_RESTRICT specifies whether
5818 it contains restrict tag variables. */
5820 void
5821 pt_solution_set (struct pt_solution *pt, bitmap vars,
5822 bool vars_contains_global, bool vars_contains_restrict)
5824 memset (pt, 0, sizeof (struct pt_solution));
5825 pt->vars = vars;
5826 pt->vars_contains_global = vars_contains_global;
5827 pt->vars_contains_restrict = vars_contains_restrict;
5830 /* Set the points-to solution *PT to point only to the variable VAR. */
5832 void
5833 pt_solution_set_var (struct pt_solution *pt, tree var)
5835 memset (pt, 0, sizeof (struct pt_solution));
5836 pt->vars = BITMAP_GGC_ALLOC ();
5837 bitmap_set_bit (pt->vars, DECL_UID (var));
5838 pt->vars_contains_global = is_global_var (var);
5841 /* Computes the union of the points-to solutions *DEST and *SRC and
5842 stores the result in *DEST. This changes the points-to bitmap
5843 of *DEST and thus may not be used if that might be shared.
5844 The points-to bitmap of *SRC and *DEST will not be shared after
5845 this function if they were not before. */
5847 static void
5848 pt_solution_ior_into (struct pt_solution *dest, struct pt_solution *src)
5850 dest->anything |= src->anything;
5851 if (dest->anything)
5853 pt_solution_reset (dest);
5854 return;
5857 dest->nonlocal |= src->nonlocal;
5858 dest->escaped |= src->escaped;
5859 dest->ipa_escaped |= src->ipa_escaped;
5860 dest->null |= src->null;
5861 dest->vars_contains_global |= src->vars_contains_global;
5862 dest->vars_contains_restrict |= src->vars_contains_restrict;
5863 if (!src->vars)
5864 return;
5866 if (!dest->vars)
5867 dest->vars = BITMAP_GGC_ALLOC ();
5868 bitmap_ior_into (dest->vars, src->vars);
5871 /* Return true if the points-to solution *PT is empty. */
5873 bool
5874 pt_solution_empty_p (struct pt_solution *pt)
5876 if (pt->anything
5877 || pt->nonlocal)
5878 return false;
5880 if (pt->vars
5881 && !bitmap_empty_p (pt->vars))
5882 return false;
5884 /* If the solution includes ESCAPED, check if that is empty. */
5885 if (pt->escaped
5886 && !pt_solution_empty_p (&cfun->gimple_df->escaped))
5887 return false;
5889 /* If the solution includes ESCAPED, check if that is empty. */
5890 if (pt->ipa_escaped
5891 && !pt_solution_empty_p (&ipa_escaped_pt))
5892 return false;
5894 return true;
5897 /* Return true if the points-to solution *PT includes global memory. */
5899 bool
5900 pt_solution_includes_global (struct pt_solution *pt)
5902 if (pt->anything
5903 || pt->nonlocal
5904 || pt->vars_contains_global)
5905 return true;
5907 if (pt->escaped)
5908 return pt_solution_includes_global (&cfun->gimple_df->escaped);
5910 if (pt->ipa_escaped)
5911 return pt_solution_includes_global (&ipa_escaped_pt);
5913 /* ??? This predicate is not correct for the IPA-PTA solution
5914 as we do not properly distinguish between unit escape points
5915 and global variables. */
5916 if (cfun->gimple_df->ipa_pta)
5917 return true;
5919 return false;
5922 /* Return true if the points-to solution *PT includes the variable
5923 declaration DECL. */
5925 static bool
5926 pt_solution_includes_1 (struct pt_solution *pt, const_tree decl)
5928 if (pt->anything)
5929 return true;
5931 if (pt->nonlocal
5932 && is_global_var (decl))
5933 return true;
5935 if (pt->vars
5936 && bitmap_bit_p (pt->vars, DECL_PT_UID (decl)))
5937 return true;
5939 /* If the solution includes ESCAPED, check it. */
5940 if (pt->escaped
5941 && pt_solution_includes_1 (&cfun->gimple_df->escaped, decl))
5942 return true;
5944 /* If the solution includes ESCAPED, check it. */
5945 if (pt->ipa_escaped
5946 && pt_solution_includes_1 (&ipa_escaped_pt, decl))
5947 return true;
5949 return false;
5952 bool
5953 pt_solution_includes (struct pt_solution *pt, const_tree decl)
5955 bool res = pt_solution_includes_1 (pt, decl);
5956 if (res)
5957 ++pta_stats.pt_solution_includes_may_alias;
5958 else
5959 ++pta_stats.pt_solution_includes_no_alias;
5960 return res;
5963 /* Return true if both points-to solutions PT1 and PT2 have a non-empty
5964 intersection. */
5966 static bool
5967 pt_solutions_intersect_1 (struct pt_solution *pt1, struct pt_solution *pt2)
5969 if (pt1->anything || pt2->anything)
5970 return true;
5972 /* If either points to unknown global memory and the other points to
5973 any global memory they alias. */
5974 if ((pt1->nonlocal
5975 && (pt2->nonlocal
5976 || pt2->vars_contains_global))
5977 || (pt2->nonlocal
5978 && pt1->vars_contains_global))
5979 return true;
5981 /* Check the escaped solution if required. */
5982 if ((pt1->escaped || pt2->escaped)
5983 && !pt_solution_empty_p (&cfun->gimple_df->escaped))
5985 /* If both point to escaped memory and that solution
5986 is not empty they alias. */
5987 if (pt1->escaped && pt2->escaped)
5988 return true;
5990 /* If either points to escaped memory see if the escaped solution
5991 intersects with the other. */
5992 if ((pt1->escaped
5993 && pt_solutions_intersect_1 (&cfun->gimple_df->escaped, pt2))
5994 || (pt2->escaped
5995 && pt_solutions_intersect_1 (&cfun->gimple_df->escaped, pt1)))
5996 return true;
5999 /* Check the escaped solution if required.
6000 ??? Do we need to check the local against the IPA escaped sets? */
6001 if ((pt1->ipa_escaped || pt2->ipa_escaped)
6002 && !pt_solution_empty_p (&ipa_escaped_pt))
6004 /* If both point to escaped memory and that solution
6005 is not empty they alias. */
6006 if (pt1->ipa_escaped && pt2->ipa_escaped)
6007 return true;
6009 /* If either points to escaped memory see if the escaped solution
6010 intersects with the other. */
6011 if ((pt1->ipa_escaped
6012 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt2))
6013 || (pt2->ipa_escaped
6014 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt1)))
6015 return true;
6018 /* Now both pointers alias if their points-to solution intersects. */
6019 return (pt1->vars
6020 && pt2->vars
6021 && bitmap_intersect_p (pt1->vars, pt2->vars));
6024 bool
6025 pt_solutions_intersect (struct pt_solution *pt1, struct pt_solution *pt2)
6027 bool res = pt_solutions_intersect_1 (pt1, pt2);
6028 if (res)
6029 ++pta_stats.pt_solutions_intersect_may_alias;
6030 else
6031 ++pta_stats.pt_solutions_intersect_no_alias;
6032 return res;
6035 /* Return true if both points-to solutions PT1 and PT2 for two restrict
6036 qualified pointers are possibly based on the same pointer. */
6038 bool
6039 pt_solutions_same_restrict_base (struct pt_solution *pt1,
6040 struct pt_solution *pt2)
6042 /* If we deal with points-to solutions of two restrict qualified
6043 pointers solely rely on the pointed-to variable bitmap intersection.
6044 For two pointers that are based on each other the bitmaps will
6045 intersect. */
6046 if (pt1->vars_contains_restrict
6047 && pt2->vars_contains_restrict)
6049 gcc_assert (pt1->vars && pt2->vars);
6050 return bitmap_intersect_p (pt1->vars, pt2->vars);
6053 return true;
6057 /* Dump points-to information to OUTFILE. */
6059 static void
6060 dump_sa_points_to_info (FILE *outfile)
6062 unsigned int i;
6064 fprintf (outfile, "\nPoints-to sets\n\n");
6066 if (dump_flags & TDF_STATS)
6068 fprintf (outfile, "Stats:\n");
6069 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
6070 fprintf (outfile, "Non-pointer vars: %d\n",
6071 stats.nonpointer_vars);
6072 fprintf (outfile, "Statically unified vars: %d\n",
6073 stats.unified_vars_static);
6074 fprintf (outfile, "Dynamically unified vars: %d\n",
6075 stats.unified_vars_dynamic);
6076 fprintf (outfile, "Iterations: %d\n", stats.iterations);
6077 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
6078 fprintf (outfile, "Number of implicit edges: %d\n",
6079 stats.num_implicit_edges);
6082 for (i = 0; i < VEC_length (varinfo_t, varmap); i++)
6084 varinfo_t vi = get_varinfo (i);
6085 if (!vi->may_have_pointers)
6086 continue;
6087 dump_solution_for_var (outfile, i);
6092 /* Debug points-to information to stderr. */
6094 DEBUG_FUNCTION void
6095 debug_sa_points_to_info (void)
6097 dump_sa_points_to_info (stderr);
6101 /* Initialize the always-existing constraint variables for NULL
6102 ANYTHING, READONLY, and INTEGER */
6104 static void
6105 init_base_vars (void)
6107 struct constraint_expr lhs, rhs;
6108 varinfo_t var_anything;
6109 varinfo_t var_nothing;
6110 varinfo_t var_readonly;
6111 varinfo_t var_escaped;
6112 varinfo_t var_nonlocal;
6113 varinfo_t var_storedanything;
6114 varinfo_t var_integer;
6116 /* Create the NULL variable, used to represent that a variable points
6117 to NULL. */
6118 var_nothing = new_var_info (NULL_TREE, "NULL");
6119 gcc_assert (var_nothing->id == nothing_id);
6120 var_nothing->is_artificial_var = 1;
6121 var_nothing->offset = 0;
6122 var_nothing->size = ~0;
6123 var_nothing->fullsize = ~0;
6124 var_nothing->is_special_var = 1;
6125 var_nothing->may_have_pointers = 0;
6126 var_nothing->is_global_var = 0;
6128 /* Create the ANYTHING variable, used to represent that a variable
6129 points to some unknown piece of memory. */
6130 var_anything = new_var_info (NULL_TREE, "ANYTHING");
6131 gcc_assert (var_anything->id == anything_id);
6132 var_anything->is_artificial_var = 1;
6133 var_anything->size = ~0;
6134 var_anything->offset = 0;
6135 var_anything->next = NULL;
6136 var_anything->fullsize = ~0;
6137 var_anything->is_special_var = 1;
6139 /* Anything points to anything. This makes deref constraints just
6140 work in the presence of linked list and other p = *p type loops,
6141 by saying that *ANYTHING = ANYTHING. */
6142 lhs.type = SCALAR;
6143 lhs.var = anything_id;
6144 lhs.offset = 0;
6145 rhs.type = ADDRESSOF;
6146 rhs.var = anything_id;
6147 rhs.offset = 0;
6149 /* This specifically does not use process_constraint because
6150 process_constraint ignores all anything = anything constraints, since all
6151 but this one are redundant. */
6152 VEC_safe_push (constraint_t, heap, constraints, new_constraint (lhs, rhs));
6154 /* Create the READONLY variable, used to represent that a variable
6155 points to readonly memory. */
6156 var_readonly = new_var_info (NULL_TREE, "READONLY");
6157 gcc_assert (var_readonly->id == readonly_id);
6158 var_readonly->is_artificial_var = 1;
6159 var_readonly->offset = 0;
6160 var_readonly->size = ~0;
6161 var_readonly->fullsize = ~0;
6162 var_readonly->next = NULL;
6163 var_readonly->is_special_var = 1;
6165 /* readonly memory points to anything, in order to make deref
6166 easier. In reality, it points to anything the particular
6167 readonly variable can point to, but we don't track this
6168 separately. */
6169 lhs.type = SCALAR;
6170 lhs.var = readonly_id;
6171 lhs.offset = 0;
6172 rhs.type = ADDRESSOF;
6173 rhs.var = readonly_id; /* FIXME */
6174 rhs.offset = 0;
6175 process_constraint (new_constraint (lhs, rhs));
6177 /* Create the ESCAPED variable, used to represent the set of escaped
6178 memory. */
6179 var_escaped = new_var_info (NULL_TREE, "ESCAPED");
6180 gcc_assert (var_escaped->id == escaped_id);
6181 var_escaped->is_artificial_var = 1;
6182 var_escaped->offset = 0;
6183 var_escaped->size = ~0;
6184 var_escaped->fullsize = ~0;
6185 var_escaped->is_special_var = 0;
6187 /* Create the NONLOCAL variable, used to represent the set of nonlocal
6188 memory. */
6189 var_nonlocal = new_var_info (NULL_TREE, "NONLOCAL");
6190 gcc_assert (var_nonlocal->id == nonlocal_id);
6191 var_nonlocal->is_artificial_var = 1;
6192 var_nonlocal->offset = 0;
6193 var_nonlocal->size = ~0;
6194 var_nonlocal->fullsize = ~0;
6195 var_nonlocal->is_special_var = 1;
6197 /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
6198 lhs.type = SCALAR;
6199 lhs.var = escaped_id;
6200 lhs.offset = 0;
6201 rhs.type = DEREF;
6202 rhs.var = escaped_id;
6203 rhs.offset = 0;
6204 process_constraint (new_constraint (lhs, rhs));
6206 /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the
6207 whole variable escapes. */
6208 lhs.type = SCALAR;
6209 lhs.var = escaped_id;
6210 lhs.offset = 0;
6211 rhs.type = SCALAR;
6212 rhs.var = escaped_id;
6213 rhs.offset = UNKNOWN_OFFSET;
6214 process_constraint (new_constraint (lhs, rhs));
6216 /* *ESCAPED = NONLOCAL. This is true because we have to assume
6217 everything pointed to by escaped points to what global memory can
6218 point to. */
6219 lhs.type = DEREF;
6220 lhs.var = escaped_id;
6221 lhs.offset = 0;
6222 rhs.type = SCALAR;
6223 rhs.var = nonlocal_id;
6224 rhs.offset = 0;
6225 process_constraint (new_constraint (lhs, rhs));
6227 /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED. This is true because
6228 global memory may point to global memory and escaped memory. */
6229 lhs.type = SCALAR;
6230 lhs.var = nonlocal_id;
6231 lhs.offset = 0;
6232 rhs.type = ADDRESSOF;
6233 rhs.var = nonlocal_id;
6234 rhs.offset = 0;
6235 process_constraint (new_constraint (lhs, rhs));
6236 rhs.type = ADDRESSOF;
6237 rhs.var = escaped_id;
6238 rhs.offset = 0;
6239 process_constraint (new_constraint (lhs, rhs));
6241 /* Create the STOREDANYTHING variable, used to represent the set of
6242 variables stored to *ANYTHING. */
6243 var_storedanything = new_var_info (NULL_TREE, "STOREDANYTHING");
6244 gcc_assert (var_storedanything->id == storedanything_id);
6245 var_storedanything->is_artificial_var = 1;
6246 var_storedanything->offset = 0;
6247 var_storedanything->size = ~0;
6248 var_storedanything->fullsize = ~0;
6249 var_storedanything->is_special_var = 0;
6251 /* Create the INTEGER variable, used to represent that a variable points
6252 to what an INTEGER "points to". */
6253 var_integer = new_var_info (NULL_TREE, "INTEGER");
6254 gcc_assert (var_integer->id == integer_id);
6255 var_integer->is_artificial_var = 1;
6256 var_integer->size = ~0;
6257 var_integer->fullsize = ~0;
6258 var_integer->offset = 0;
6259 var_integer->next = NULL;
6260 var_integer->is_special_var = 1;
6262 /* INTEGER = ANYTHING, because we don't know where a dereference of
6263 a random integer will point to. */
6264 lhs.type = SCALAR;
6265 lhs.var = integer_id;
6266 lhs.offset = 0;
6267 rhs.type = ADDRESSOF;
6268 rhs.var = anything_id;
6269 rhs.offset = 0;
6270 process_constraint (new_constraint (lhs, rhs));
6273 /* Initialize things necessary to perform PTA */
6275 static void
6276 init_alias_vars (void)
6278 use_field_sensitive = (MAX_FIELDS_FOR_FIELD_SENSITIVE > 1);
6280 bitmap_obstack_initialize (&pta_obstack);
6281 bitmap_obstack_initialize (&oldpta_obstack);
6282 bitmap_obstack_initialize (&predbitmap_obstack);
6284 constraint_pool = create_alloc_pool ("Constraint pool",
6285 sizeof (struct constraint), 30);
6286 variable_info_pool = create_alloc_pool ("Variable info pool",
6287 sizeof (struct variable_info), 30);
6288 constraints = VEC_alloc (constraint_t, heap, 8);
6289 varmap = VEC_alloc (varinfo_t, heap, 8);
6290 vi_for_tree = pointer_map_create ();
6291 call_stmt_vars = pointer_map_create ();
6293 memset (&stats, 0, sizeof (stats));
6294 shared_bitmap_table = htab_create (511, shared_bitmap_hash,
6295 shared_bitmap_eq, free);
6296 init_base_vars ();
6299 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
6300 predecessor edges. */
6302 static void
6303 remove_preds_and_fake_succs (constraint_graph_t graph)
6305 unsigned int i;
6307 /* Clear the implicit ref and address nodes from the successor
6308 lists. */
6309 for (i = 0; i < FIRST_REF_NODE; i++)
6311 if (graph->succs[i])
6312 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
6313 FIRST_REF_NODE * 2);
6316 /* Free the successor list for the non-ref nodes. */
6317 for (i = FIRST_REF_NODE; i < graph->size; i++)
6319 if (graph->succs[i])
6320 BITMAP_FREE (graph->succs[i]);
6323 /* Now reallocate the size of the successor list as, and blow away
6324 the predecessor bitmaps. */
6325 graph->size = VEC_length (varinfo_t, varmap);
6326 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
6328 free (graph->implicit_preds);
6329 graph->implicit_preds = NULL;
6330 free (graph->preds);
6331 graph->preds = NULL;
6332 bitmap_obstack_release (&predbitmap_obstack);
6335 /* Initialize the heapvar for statement mapping. */
6337 static void
6338 init_alias_heapvars (void)
6340 if (!heapvar_for_stmt)
6341 heapvar_for_stmt = htab_create_ggc (11, tree_map_hash, heapvar_map_eq,
6342 NULL);
6345 /* Delete the heapvar for statement mapping. */
6347 void
6348 delete_alias_heapvars (void)
6350 if (heapvar_for_stmt)
6351 htab_delete (heapvar_for_stmt);
6352 heapvar_for_stmt = NULL;
6355 /* Solve the constraint set. */
6357 static void
6358 solve_constraints (void)
6360 struct scc_info *si;
6362 if (dump_file)
6363 fprintf (dump_file,
6364 "\nCollapsing static cycles and doing variable "
6365 "substitution\n");
6367 init_graph (VEC_length (varinfo_t, varmap) * 2);
6369 if (dump_file)
6370 fprintf (dump_file, "Building predecessor graph\n");
6371 build_pred_graph ();
6373 if (dump_file)
6374 fprintf (dump_file, "Detecting pointer and location "
6375 "equivalences\n");
6376 si = perform_var_substitution (graph);
6378 if (dump_file)
6379 fprintf (dump_file, "Rewriting constraints and unifying "
6380 "variables\n");
6381 rewrite_constraints (graph, si);
6383 build_succ_graph ();
6384 free_var_substitution_info (si);
6386 if (dump_file && (dump_flags & TDF_GRAPH))
6387 dump_constraint_graph (dump_file);
6389 move_complex_constraints (graph);
6391 if (dump_file)
6392 fprintf (dump_file, "Uniting pointer but not location equivalent "
6393 "variables\n");
6394 unite_pointer_equivalences (graph);
6396 if (dump_file)
6397 fprintf (dump_file, "Finding indirect cycles\n");
6398 find_indirect_cycles (graph);
6400 /* Implicit nodes and predecessors are no longer necessary at this
6401 point. */
6402 remove_preds_and_fake_succs (graph);
6404 if (dump_file)
6405 fprintf (dump_file, "Solving graph\n");
6407 solve_graph (graph);
6409 if (dump_file)
6410 dump_sa_points_to_info (dump_file);
6413 /* Create points-to sets for the current function. See the comments
6414 at the start of the file for an algorithmic overview. */
6416 static void
6417 compute_points_to_sets (void)
6419 basic_block bb;
6420 unsigned i;
6421 varinfo_t vi;
6423 timevar_push (TV_TREE_PTA);
6425 init_alias_vars ();
6426 init_alias_heapvars ();
6428 intra_create_variable_infos ();
6430 /* Now walk all statements and build the constraint set. */
6431 FOR_EACH_BB (bb)
6433 gimple_stmt_iterator gsi;
6435 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6437 gimple phi = gsi_stmt (gsi);
6439 if (is_gimple_reg (gimple_phi_result (phi)))
6440 find_func_aliases (phi);
6443 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6445 gimple stmt = gsi_stmt (gsi);
6447 find_func_aliases (stmt);
6451 if (dump_file)
6453 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
6454 dump_constraints (dump_file, 0);
6457 /* From the constraints compute the points-to sets. */
6458 solve_constraints ();
6460 /* Compute the points-to set for ESCAPED used for call-clobber analysis. */
6461 find_what_var_points_to (get_varinfo (escaped_id),
6462 &cfun->gimple_df->escaped);
6464 /* Make sure the ESCAPED solution (which is used as placeholder in
6465 other solutions) does not reference itself. This simplifies
6466 points-to solution queries. */
6467 cfun->gimple_df->escaped.escaped = 0;
6469 /* Mark escaped HEAP variables as global. */
6470 for (i = 0; VEC_iterate (varinfo_t, varmap, i, vi); ++i)
6471 if (vi->is_heap_var
6472 && !vi->is_restrict_var
6473 && !vi->is_global_var)
6474 DECL_EXTERNAL (vi->decl) = vi->is_global_var
6475 = pt_solution_includes (&cfun->gimple_df->escaped, vi->decl);
6477 /* Compute the points-to sets for pointer SSA_NAMEs. */
6478 for (i = 0; i < num_ssa_names; ++i)
6480 tree ptr = ssa_name (i);
6481 if (ptr
6482 && POINTER_TYPE_P (TREE_TYPE (ptr)))
6483 find_what_p_points_to (ptr);
6486 /* Compute the call-used/clobbered sets. */
6487 FOR_EACH_BB (bb)
6489 gimple_stmt_iterator gsi;
6491 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6493 gimple stmt = gsi_stmt (gsi);
6494 struct pt_solution *pt;
6495 if (!is_gimple_call (stmt))
6496 continue;
6498 pt = gimple_call_use_set (stmt);
6499 if (gimple_call_flags (stmt) & ECF_CONST)
6500 memset (pt, 0, sizeof (struct pt_solution));
6501 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
6503 find_what_var_points_to (vi, pt);
6504 /* Escaped (and thus nonlocal) variables are always
6505 implicitly used by calls. */
6506 /* ??? ESCAPED can be empty even though NONLOCAL
6507 always escaped. */
6508 pt->nonlocal = 1;
6509 pt->escaped = 1;
6511 else
6513 /* If there is nothing special about this call then
6514 we have made everything that is used also escape. */
6515 *pt = cfun->gimple_df->escaped;
6516 pt->nonlocal = 1;
6519 pt = gimple_call_clobber_set (stmt);
6520 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
6521 memset (pt, 0, sizeof (struct pt_solution));
6522 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
6524 find_what_var_points_to (vi, pt);
6525 /* Escaped (and thus nonlocal) variables are always
6526 implicitly clobbered by calls. */
6527 /* ??? ESCAPED can be empty even though NONLOCAL
6528 always escaped. */
6529 pt->nonlocal = 1;
6530 pt->escaped = 1;
6532 else
6534 /* If there is nothing special about this call then
6535 we have made everything that is used also escape. */
6536 *pt = cfun->gimple_df->escaped;
6537 pt->nonlocal = 1;
6542 timevar_pop (TV_TREE_PTA);
6546 /* Delete created points-to sets. */
6548 static void
6549 delete_points_to_sets (void)
6551 unsigned int i;
6553 htab_delete (shared_bitmap_table);
6554 if (dump_file && (dump_flags & TDF_STATS))
6555 fprintf (dump_file, "Points to sets created:%d\n",
6556 stats.points_to_sets_created);
6558 pointer_map_destroy (vi_for_tree);
6559 pointer_map_destroy (call_stmt_vars);
6560 bitmap_obstack_release (&pta_obstack);
6561 VEC_free (constraint_t, heap, constraints);
6563 for (i = 0; i < graph->size; i++)
6564 VEC_free (constraint_t, heap, graph->complex[i]);
6565 free (graph->complex);
6567 free (graph->rep);
6568 free (graph->succs);
6569 free (graph->pe);
6570 free (graph->pe_rep);
6571 free (graph->indirect_cycles);
6572 free (graph);
6574 VEC_free (varinfo_t, heap, varmap);
6575 free_alloc_pool (variable_info_pool);
6576 free_alloc_pool (constraint_pool);
6580 /* Compute points-to information for every SSA_NAME pointer in the
6581 current function and compute the transitive closure of escaped
6582 variables to re-initialize the call-clobber states of local variables. */
6584 unsigned int
6585 compute_may_aliases (void)
6587 if (cfun->gimple_df->ipa_pta)
6589 if (dump_file)
6591 fprintf (dump_file, "\nNot re-computing points-to information "
6592 "because IPA points-to information is available.\n\n");
6594 /* But still dump what we have remaining it. */
6595 dump_alias_info (dump_file);
6597 if (dump_flags & TDF_DETAILS)
6598 dump_referenced_vars (dump_file);
6601 return 0;
6604 /* For each pointer P_i, determine the sets of variables that P_i may
6605 point-to. Compute the reachability set of escaped and call-used
6606 variables. */
6607 compute_points_to_sets ();
6609 /* Debugging dumps. */
6610 if (dump_file)
6612 dump_alias_info (dump_file);
6614 if (dump_flags & TDF_DETAILS)
6615 dump_referenced_vars (dump_file);
6618 /* Deallocate memory used by aliasing data structures and the internal
6619 points-to solution. */
6620 delete_points_to_sets ();
6622 gcc_assert (!need_ssa_update_p (cfun));
6624 return 0;
6627 static bool
6628 gate_tree_pta (void)
6630 return flag_tree_pta;
6633 /* A dummy pass to cause points-to information to be computed via
6634 TODO_rebuild_alias. */
6636 struct gimple_opt_pass pass_build_alias =
6639 GIMPLE_PASS,
6640 "alias", /* name */
6641 gate_tree_pta, /* gate */
6642 NULL, /* execute */
6643 NULL, /* sub */
6644 NULL, /* next */
6645 0, /* static_pass_number */
6646 TV_NONE, /* tv_id */
6647 PROP_cfg | PROP_ssa, /* properties_required */
6648 0, /* properties_provided */
6649 0, /* properties_destroyed */
6650 0, /* todo_flags_start */
6651 TODO_rebuild_alias | TODO_dump_func /* todo_flags_finish */
6655 /* A dummy pass to cause points-to information to be computed via
6656 TODO_rebuild_alias. */
6658 struct gimple_opt_pass pass_build_ealias =
6661 GIMPLE_PASS,
6662 "ealias", /* name */
6663 gate_tree_pta, /* gate */
6664 NULL, /* execute */
6665 NULL, /* sub */
6666 NULL, /* next */
6667 0, /* static_pass_number */
6668 TV_NONE, /* tv_id */
6669 PROP_cfg | PROP_ssa, /* properties_required */
6670 0, /* properties_provided */
6671 0, /* properties_destroyed */
6672 0, /* todo_flags_start */
6673 TODO_rebuild_alias | TODO_dump_func /* todo_flags_finish */
6678 /* Return true if we should execute IPA PTA. */
6679 static bool
6680 gate_ipa_pta (void)
6682 return (optimize
6683 && flag_ipa_pta
6684 /* Don't bother doing anything if the program has errors. */
6685 && !seen_error ());
6688 /* IPA PTA solutions for ESCAPED. */
6689 struct pt_solution ipa_escaped_pt
6690 = { true, false, false, false, false, false, false, NULL };
6692 /* Execute the driver for IPA PTA. */
6693 static unsigned int
6694 ipa_pta_execute (void)
6696 struct cgraph_node *node;
6697 struct varpool_node *var;
6698 int from;
6700 in_ipa_mode = 1;
6702 init_alias_heapvars ();
6703 init_alias_vars ();
6705 /* Build the constraints. */
6706 for (node = cgraph_nodes; node; node = node->next)
6708 struct cgraph_node *alias;
6709 varinfo_t vi;
6711 /* Nodes without a body are not interesting. Especially do not
6712 visit clones at this point for now - we get duplicate decls
6713 there for inline clones at least. */
6714 if (!gimple_has_body_p (node->decl)
6715 || node->clone_of)
6716 continue;
6718 vi = create_function_info_for (node->decl,
6719 alias_get_name (node->decl));
6721 /* Associate the varinfo node with all aliases. */
6722 for (alias = node->same_body; alias; alias = alias->next)
6723 insert_vi_for_tree (alias->decl, vi);
6726 /* Create constraints for global variables and their initializers. */
6727 for (var = varpool_nodes; var; var = var->next)
6729 struct varpool_node *alias;
6730 varinfo_t vi;
6732 vi = get_vi_for_tree (var->decl);
6734 /* Associate the varinfo node with all aliases. */
6735 for (alias = var->extra_name; alias; alias = alias->next)
6736 insert_vi_for_tree (alias->decl, vi);
6739 if (dump_file)
6741 fprintf (dump_file,
6742 "Generating constraints for global initializers\n\n");
6743 dump_constraints (dump_file, 0);
6744 fprintf (dump_file, "\n");
6746 from = VEC_length (constraint_t, constraints);
6748 for (node = cgraph_nodes; node; node = node->next)
6750 struct function *func;
6751 basic_block bb;
6752 tree old_func_decl;
6754 /* Nodes without a body are not interesting. */
6755 if (!gimple_has_body_p (node->decl)
6756 || node->clone_of)
6757 continue;
6759 if (dump_file)
6761 fprintf (dump_file,
6762 "Generating constraints for %s", cgraph_node_name (node));
6763 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
6764 fprintf (dump_file, " (%s)",
6765 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->decl)));
6766 fprintf (dump_file, "\n");
6769 func = DECL_STRUCT_FUNCTION (node->decl);
6770 old_func_decl = current_function_decl;
6771 push_cfun (func);
6772 current_function_decl = node->decl;
6774 /* For externally visible functions use local constraints for
6775 their arguments. For local functions we see all callers
6776 and thus do not need initial constraints for parameters. */
6777 if (node->local.externally_visible)
6778 intra_create_variable_infos ();
6780 /* Build constriants for the function body. */
6781 FOR_EACH_BB_FN (bb, func)
6783 gimple_stmt_iterator gsi;
6785 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
6786 gsi_next (&gsi))
6788 gimple phi = gsi_stmt (gsi);
6790 if (is_gimple_reg (gimple_phi_result (phi)))
6791 find_func_aliases (phi);
6794 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6796 gimple stmt = gsi_stmt (gsi);
6798 find_func_aliases (stmt);
6799 find_func_clobbers (stmt);
6803 current_function_decl = old_func_decl;
6804 pop_cfun ();
6806 if (dump_file)
6808 fprintf (dump_file, "\n");
6809 dump_constraints (dump_file, from);
6810 fprintf (dump_file, "\n");
6812 from = VEC_length (constraint_t, constraints);
6815 /* From the constraints compute the points-to sets. */
6816 solve_constraints ();
6818 /* Compute the global points-to sets for ESCAPED.
6819 ??? Note that the computed escape set is not correct
6820 for the whole unit as we fail to consider graph edges to
6821 externally visible functions. */
6822 find_what_var_points_to (get_varinfo (escaped_id), &ipa_escaped_pt);
6824 /* Make sure the ESCAPED solution (which is used as placeholder in
6825 other solutions) does not reference itself. This simplifies
6826 points-to solution queries. */
6827 ipa_escaped_pt.ipa_escaped = 0;
6829 /* Assign the points-to sets to the SSA names in the unit. */
6830 for (node = cgraph_nodes; node; node = node->next)
6832 tree ptr;
6833 struct function *fn;
6834 unsigned i;
6835 varinfo_t fi;
6836 basic_block bb;
6837 struct pt_solution uses, clobbers;
6838 struct cgraph_edge *e;
6840 /* Nodes without a body are not interesting. */
6841 if (!gimple_has_body_p (node->decl)
6842 || node->clone_of)
6843 continue;
6845 fn = DECL_STRUCT_FUNCTION (node->decl);
6847 /* Compute the points-to sets for pointer SSA_NAMEs. */
6848 for (i = 0; VEC_iterate (tree, fn->gimple_df->ssa_names, i, ptr); ++i)
6850 if (ptr
6851 && POINTER_TYPE_P (TREE_TYPE (ptr)))
6852 find_what_p_points_to (ptr);
6855 /* Compute the call-use and call-clobber sets for all direct calls. */
6856 fi = lookup_vi_for_tree (node->decl);
6857 gcc_assert (fi->is_fn_info);
6858 find_what_var_points_to (first_vi_for_offset (fi, fi_clobbers),
6859 &clobbers);
6860 find_what_var_points_to (first_vi_for_offset (fi, fi_uses), &uses);
6861 for (e = node->callers; e; e = e->next_caller)
6863 if (!e->call_stmt)
6864 continue;
6866 *gimple_call_clobber_set (e->call_stmt) = clobbers;
6867 *gimple_call_use_set (e->call_stmt) = uses;
6870 /* Compute the call-use and call-clobber sets for indirect calls
6871 and calls to external functions. */
6872 FOR_EACH_BB_FN (bb, fn)
6874 gimple_stmt_iterator gsi;
6876 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6878 gimple stmt = gsi_stmt (gsi);
6879 struct pt_solution *pt;
6880 varinfo_t vi;
6881 tree decl;
6883 if (!is_gimple_call (stmt))
6884 continue;
6886 /* Handle direct calls to external functions. */
6887 decl = gimple_call_fndecl (stmt);
6888 if (decl
6889 && (!(fi = lookup_vi_for_tree (decl))
6890 || !fi->is_fn_info))
6892 pt = gimple_call_use_set (stmt);
6893 if (gimple_call_flags (stmt) & ECF_CONST)
6894 memset (pt, 0, sizeof (struct pt_solution));
6895 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
6897 find_what_var_points_to (vi, pt);
6898 /* Escaped (and thus nonlocal) variables are always
6899 implicitly used by calls. */
6900 /* ??? ESCAPED can be empty even though NONLOCAL
6901 always escaped. */
6902 pt->nonlocal = 1;
6903 pt->ipa_escaped = 1;
6905 else
6907 /* If there is nothing special about this call then
6908 we have made everything that is used also escape. */
6909 *pt = ipa_escaped_pt;
6910 pt->nonlocal = 1;
6913 pt = gimple_call_clobber_set (stmt);
6914 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
6915 memset (pt, 0, sizeof (struct pt_solution));
6916 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
6918 find_what_var_points_to (vi, pt);
6919 /* Escaped (and thus nonlocal) variables are always
6920 implicitly clobbered by calls. */
6921 /* ??? ESCAPED can be empty even though NONLOCAL
6922 always escaped. */
6923 pt->nonlocal = 1;
6924 pt->ipa_escaped = 1;
6926 else
6928 /* If there is nothing special about this call then
6929 we have made everything that is used also escape. */
6930 *pt = ipa_escaped_pt;
6931 pt->nonlocal = 1;
6935 /* Handle indirect calls. */
6936 if (!decl
6937 && (fi = get_fi_for_callee (stmt)))
6939 /* We need to accumulate all clobbers/uses of all possible
6940 callees. */
6941 fi = get_varinfo (find (fi->id));
6942 /* If we cannot constrain the set of functions we'll end up
6943 calling we end up using/clobbering everything. */
6944 if (bitmap_bit_p (fi->solution, anything_id)
6945 || bitmap_bit_p (fi->solution, nonlocal_id)
6946 || bitmap_bit_p (fi->solution, escaped_id))
6948 pt_solution_reset (gimple_call_clobber_set (stmt));
6949 pt_solution_reset (gimple_call_use_set (stmt));
6951 else
6953 bitmap_iterator bi;
6954 unsigned i;
6955 struct pt_solution *uses, *clobbers;
6957 uses = gimple_call_use_set (stmt);
6958 clobbers = gimple_call_clobber_set (stmt);
6959 memset (uses, 0, sizeof (struct pt_solution));
6960 memset (clobbers, 0, sizeof (struct pt_solution));
6961 EXECUTE_IF_SET_IN_BITMAP (fi->solution, 0, i, bi)
6963 struct pt_solution sol;
6965 vi = get_varinfo (i);
6966 if (!vi->is_fn_info)
6968 /* ??? We could be more precise here? */
6969 uses->nonlocal = 1;
6970 uses->ipa_escaped = 1;
6971 clobbers->nonlocal = 1;
6972 clobbers->ipa_escaped = 1;
6973 continue;
6976 if (!uses->anything)
6978 find_what_var_points_to
6979 (first_vi_for_offset (vi, fi_uses), &sol);
6980 pt_solution_ior_into (uses, &sol);
6982 if (!clobbers->anything)
6984 find_what_var_points_to
6985 (first_vi_for_offset (vi, fi_clobbers), &sol);
6986 pt_solution_ior_into (clobbers, &sol);
6994 fn->gimple_df->ipa_pta = true;
6997 delete_points_to_sets ();
6999 in_ipa_mode = 0;
7001 return 0;
7004 struct simple_ipa_opt_pass pass_ipa_pta =
7007 SIMPLE_IPA_PASS,
7008 "pta", /* name */
7009 gate_ipa_pta, /* gate */
7010 ipa_pta_execute, /* execute */
7011 NULL, /* sub */
7012 NULL, /* next */
7013 0, /* static_pass_number */
7014 TV_IPA_PTA, /* tv_id */
7015 0, /* properties_required */
7016 0, /* properties_provided */
7017 0, /* properties_destroyed */
7018 0, /* todo_flags_start */
7019 TODO_update_ssa /* todo_flags_finish */
7024 #include "gt-tree-ssa-structalias.h"