Merged r158704 through r158906 into branch.
[official-gcc.git] / gcc / tree-ssa-structalias.c
blob08d3fa7e15b2f5034590525ffaec1af9746ea27d
1 /* Tree based points-to analysis
2 Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dberlin@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3 of the License, or
11 (at your option) any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "ggc.h"
27 #include "obstack.h"
28 #include "bitmap.h"
29 #include "flags.h"
30 #include "rtl.h"
31 #include "tm_p.h"
32 #include "hard-reg-set.h"
33 #include "basic-block.h"
34 #include "output.h"
35 #include "tree.h"
36 #include "tree-flow.h"
37 #include "tree-inline.h"
38 #include "varray.h"
39 #include "diagnostic.h"
40 #include "toplev.h"
41 #include "gimple.h"
42 #include "hashtab.h"
43 #include "function.h"
44 #include "cgraph.h"
45 #include "tree-pass.h"
46 #include "timevar.h"
47 #include "alloc-pool.h"
48 #include "splay-tree.h"
49 #include "params.h"
50 #include "cgraph.h"
51 #include "alias.h"
52 #include "pointer-set.h"
54 /* The idea behind this analyzer is to generate set constraints from the
55 program, then solve the resulting constraints in order to generate the
56 points-to sets.
58 Set constraints are a way of modeling program analysis problems that
59 involve sets. They consist of an inclusion constraint language,
60 describing the variables (each variable is a set) and operations that
61 are involved on the variables, and a set of rules that derive facts
62 from these operations. To solve a system of set constraints, you derive
63 all possible facts under the rules, which gives you the correct sets
64 as a consequence.
66 See "Efficient Field-sensitive pointer analysis for C" by "David
67 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
68 http://citeseer.ist.psu.edu/pearce04efficient.html
70 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
71 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
72 http://citeseer.ist.psu.edu/heintze01ultrafast.html
74 There are three types of real constraint expressions, DEREF,
75 ADDRESSOF, and SCALAR. Each constraint expression consists
76 of a constraint type, a variable, and an offset.
78 SCALAR is a constraint expression type used to represent x, whether
79 it appears on the LHS or the RHS of a statement.
80 DEREF is a constraint expression type used to represent *x, whether
81 it appears on the LHS or the RHS of a statement.
82 ADDRESSOF is a constraint expression used to represent &x, whether
83 it appears on the LHS or the RHS of a statement.
85 Each pointer variable in the program is assigned an integer id, and
86 each field of a structure variable is assigned an integer id as well.
88 Structure variables are linked to their list of fields through a "next
89 field" in each variable that points to the next field in offset
90 order.
91 Each variable for a structure field has
93 1. "size", that tells the size in bits of that field.
94 2. "fullsize, that tells the size in bits of the entire structure.
95 3. "offset", that tells the offset in bits from the beginning of the
96 structure to this field.
98 Thus,
99 struct f
101 int a;
102 int b;
103 } foo;
104 int *bar;
106 looks like
108 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
109 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
110 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
113 In order to solve the system of set constraints, the following is
114 done:
116 1. Each constraint variable x has a solution set associated with it,
117 Sol(x).
119 2. Constraints are separated into direct, copy, and complex.
120 Direct constraints are ADDRESSOF constraints that require no extra
121 processing, such as P = &Q
122 Copy constraints are those of the form P = Q.
123 Complex constraints are all the constraints involving dereferences
124 and offsets (including offsetted copies).
126 3. All direct constraints of the form P = &Q are processed, such
127 that Q is added to Sol(P)
129 4. All complex constraints for a given constraint variable are stored in a
130 linked list attached to that variable's node.
132 5. A directed graph is built out of the copy constraints. Each
133 constraint variable is a node in the graph, and an edge from
134 Q to P is added for each copy constraint of the form P = Q
136 6. The graph is then walked, and solution sets are
137 propagated along the copy edges, such that an edge from Q to P
138 causes Sol(P) <- Sol(P) union Sol(Q).
140 7. As we visit each node, all complex constraints associated with
141 that node are processed by adding appropriate copy edges to the graph, or the
142 appropriate variables to the solution set.
144 8. The process of walking the graph is iterated until no solution
145 sets change.
147 Prior to walking the graph in steps 6 and 7, We perform static
148 cycle elimination on the constraint graph, as well
149 as off-line variable substitution.
151 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
152 on and turned into anything), but isn't. You can just see what offset
153 inside the pointed-to struct it's going to access.
155 TODO: Constant bounded arrays can be handled as if they were structs of the
156 same number of elements.
158 TODO: Modeling heap and incoming pointers becomes much better if we
159 add fields to them as we discover them, which we could do.
161 TODO: We could handle unions, but to be honest, it's probably not
162 worth the pain or slowdown. */
164 /* IPA-PTA optimizations possible.
166 When the indirect function called is ANYTHING we can add disambiguation
167 based on the function signatures (or simply the parameter count which
168 is the varinfo size). We also do not need to consider functions that
169 do not have their address taken.
171 The is_global_var bit which marks escape points is overly conservative
172 in IPA mode. Split it to is_escape_point and is_global_var - only
173 externally visible globals are escape points in IPA mode. This is
174 also needed to fix the pt_solution_includes_global predicate
175 (and thus ptr_deref_may_alias_global_p).
177 The way we introduce DECL_PT_UID to avoid fixing up all points-to
178 sets in the translation unit when we copy a DECL during inlining
179 pessimizes precision. The advantage is that the DECL_PT_UID keeps
180 compile-time and memory usage overhead low - the points-to sets
181 do not grow or get unshared as they would during a fixup phase.
182 An alternative solution is to delay IPA PTA until after all
183 inlining transformations have been applied.
185 The way we propagate clobber/use information isn't optimized.
186 It should use a new complex constraint that properly filters
187 out local variables of the callee (though that would make
188 the sets invalid after inlining). OTOH we might as well
189 admit defeat to WHOPR and simply do all the clobber/use analysis
190 and propagation after PTA finished but before we threw away
191 points-to information for memory variables. WHOPR and PTA
192 do not play along well anyway - the whole constraint solving
193 would need to be done in WPA phase and it will be very interesting
194 to apply the results to local SSA names during LTRANS phase.
196 We probably should compute a per-function unit-ESCAPE solution
197 propagating it simply like the clobber / uses solutions. The
198 solution can go alongside the non-IPA espaced solution and be
199 used to query which vars escape the unit through a function.
201 We never put function decls in points-to sets so we do not
202 keep the set of called functions for indirect calls.
204 And probably more. */
206 static GTY ((if_marked ("tree_map_marked_p"), param_is (struct tree_map)))
207 htab_t heapvar_for_stmt;
209 static bool use_field_sensitive = true;
210 static int in_ipa_mode = 0;
212 /* Used for predecessor bitmaps. */
213 static bitmap_obstack predbitmap_obstack;
215 /* Used for points-to sets. */
216 static bitmap_obstack pta_obstack;
218 /* Used for oldsolution members of variables. */
219 static bitmap_obstack oldpta_obstack;
221 /* Used for per-solver-iteration bitmaps. */
222 static bitmap_obstack iteration_obstack;
224 static unsigned int create_variable_info_for (tree, const char *);
225 typedef struct constraint_graph *constraint_graph_t;
226 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
228 struct constraint;
229 typedef struct constraint *constraint_t;
231 DEF_VEC_P(constraint_t);
232 DEF_VEC_ALLOC_P(constraint_t,heap);
234 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
235 if (a) \
236 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
238 static struct constraint_stats
240 unsigned int total_vars;
241 unsigned int nonpointer_vars;
242 unsigned int unified_vars_static;
243 unsigned int unified_vars_dynamic;
244 unsigned int iterations;
245 unsigned int num_edges;
246 unsigned int num_implicit_edges;
247 unsigned int points_to_sets_created;
248 } stats;
250 struct variable_info
252 /* ID of this variable */
253 unsigned int id;
255 /* True if this is a variable created by the constraint analysis, such as
256 heap variables and constraints we had to break up. */
257 unsigned int is_artificial_var : 1;
259 /* True if this is a special variable whose solution set should not be
260 changed. */
261 unsigned int is_special_var : 1;
263 /* True for variables whose size is not known or variable. */
264 unsigned int is_unknown_size_var : 1;
266 /* True for (sub-)fields that represent a whole variable. */
267 unsigned int is_full_var : 1;
269 /* True if this is a heap variable. */
270 unsigned int is_heap_var : 1;
272 /* True if this is a variable tracking a restrict pointer source. */
273 unsigned int is_restrict_var : 1;
275 /* True if this field may contain pointers. */
276 unsigned int may_have_pointers : 1;
278 /* True if this field has only restrict qualified pointers. */
279 unsigned int only_restrict_pointers : 1;
281 /* True if this represents a global variable. */
282 unsigned int is_global_var : 1;
284 /* True if this represents a IPA function info. */
285 unsigned int is_fn_info : 1;
287 /* A link to the variable for the next field in this structure. */
288 struct variable_info *next;
290 /* Offset of this variable, in bits, from the base variable */
291 unsigned HOST_WIDE_INT offset;
293 /* Size of the variable, in bits. */
294 unsigned HOST_WIDE_INT size;
296 /* Full size of the base variable, in bits. */
297 unsigned HOST_WIDE_INT fullsize;
299 /* Name of this variable */
300 const char *name;
302 /* Tree that this variable is associated with. */
303 tree decl;
305 /* Points-to set for this variable. */
306 bitmap solution;
308 /* Old points-to set for this variable. */
309 bitmap oldsolution;
311 typedef struct variable_info *varinfo_t;
313 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
314 static varinfo_t first_or_preceding_vi_for_offset (varinfo_t,
315 unsigned HOST_WIDE_INT);
316 static varinfo_t lookup_vi_for_tree (tree);
318 /* Pool of variable info structures. */
319 static alloc_pool variable_info_pool;
321 DEF_VEC_P(varinfo_t);
323 DEF_VEC_ALLOC_P(varinfo_t, heap);
325 /* Table of variable info structures for constraint variables.
326 Indexed directly by variable info id. */
327 static VEC(varinfo_t,heap) *varmap;
329 /* Return the varmap element N */
331 static inline varinfo_t
332 get_varinfo (unsigned int n)
334 return VEC_index (varinfo_t, varmap, n);
337 /* Static IDs for the special variables. */
338 enum { nothing_id = 0, anything_id = 1, readonly_id = 2,
339 escaped_id = 3, nonlocal_id = 4,
340 storedanything_id = 5, integer_id = 6 };
342 struct GTY(()) heapvar_map {
343 struct tree_map map;
344 unsigned HOST_WIDE_INT offset;
347 static int
348 heapvar_map_eq (const void *p1, const void *p2)
350 const struct heapvar_map *h1 = (const struct heapvar_map *)p1;
351 const struct heapvar_map *h2 = (const struct heapvar_map *)p2;
352 return (h1->map.base.from == h2->map.base.from
353 && h1->offset == h2->offset);
356 static unsigned int
357 heapvar_map_hash (struct heapvar_map *h)
359 return iterative_hash_host_wide_int (h->offset,
360 htab_hash_pointer (h->map.base.from));
363 /* Lookup a heap var for FROM, and return it if we find one. */
365 static tree
366 heapvar_lookup (tree from, unsigned HOST_WIDE_INT offset)
368 struct heapvar_map *h, in;
369 in.map.base.from = from;
370 in.offset = offset;
371 h = (struct heapvar_map *) htab_find_with_hash (heapvar_for_stmt, &in,
372 heapvar_map_hash (&in));
373 if (h)
374 return h->map.to;
375 return NULL_TREE;
378 /* Insert a mapping FROM->TO in the heap var for statement
379 hashtable. */
381 static void
382 heapvar_insert (tree from, unsigned HOST_WIDE_INT offset, tree to)
384 struct heapvar_map *h;
385 void **loc;
387 h = GGC_NEW (struct heapvar_map);
388 h->map.base.from = from;
389 h->offset = offset;
390 h->map.hash = heapvar_map_hash (h);
391 h->map.to = to;
392 loc = htab_find_slot_with_hash (heapvar_for_stmt, h, h->map.hash, INSERT);
393 gcc_assert (*loc == NULL);
394 *(struct heapvar_map **) loc = h;
397 /* Return a new variable info structure consisting for a variable
398 named NAME, and using constraint graph node NODE. Append it
399 to the vector of variable info structures. */
401 static varinfo_t
402 new_var_info (tree t, const char *name)
404 unsigned index = VEC_length (varinfo_t, varmap);
405 varinfo_t ret = (varinfo_t) pool_alloc (variable_info_pool);
407 ret->id = index;
408 ret->name = name;
409 ret->decl = t;
410 /* Vars without decl are artificial and do not have sub-variables. */
411 ret->is_artificial_var = (t == NULL_TREE);
412 ret->is_special_var = false;
413 ret->is_unknown_size_var = false;
414 ret->is_full_var = (t == NULL_TREE);
415 ret->is_heap_var = false;
416 ret->is_restrict_var = false;
417 ret->may_have_pointers = true;
418 ret->only_restrict_pointers = false;
419 ret->is_global_var = (t == NULL_TREE);
420 ret->is_fn_info = false;
421 if (t && DECL_P (t))
422 ret->is_global_var = is_global_var (t);
423 ret->solution = BITMAP_ALLOC (&pta_obstack);
424 ret->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
425 ret->next = NULL;
427 stats.total_vars++;
429 VEC_safe_push (varinfo_t, heap, varmap, ret);
431 return ret;
435 /* A map mapping call statements to per-stmt variables for uses
436 and clobbers specific to the call. */
437 struct pointer_map_t *call_stmt_vars;
439 /* Lookup or create the variable for the call statement CALL. */
441 static varinfo_t
442 get_call_vi (gimple call)
444 void **slot_p;
445 varinfo_t vi, vi2;
447 slot_p = pointer_map_insert (call_stmt_vars, call);
448 if (*slot_p)
449 return (varinfo_t) *slot_p;
451 vi = new_var_info (NULL_TREE, "CALLUSED");
452 vi->offset = 0;
453 vi->size = 1;
454 vi->fullsize = 2;
455 vi->is_full_var = true;
457 vi->next = vi2 = new_var_info (NULL_TREE, "CALLCLOBBERED");
458 vi2->offset = 1;
459 vi2->size = 1;
460 vi2->fullsize = 2;
461 vi2->is_full_var = true;
463 *slot_p = (void *) vi;
464 return vi;
467 /* Lookup the variable for the call statement CALL representing
468 the uses. Returns NULL if there is nothing special about this call. */
470 static varinfo_t
471 lookup_call_use_vi (gimple call)
473 void **slot_p;
475 slot_p = pointer_map_contains (call_stmt_vars, call);
476 if (slot_p)
477 return (varinfo_t) *slot_p;
479 return NULL;
482 /* Lookup the variable for the call statement CALL representing
483 the clobbers. Returns NULL if there is nothing special about this call. */
485 static varinfo_t
486 lookup_call_clobber_vi (gimple call)
488 varinfo_t uses = lookup_call_use_vi (call);
489 if (!uses)
490 return NULL;
492 return uses->next;
495 /* Lookup or create the variable for the call statement CALL representing
496 the uses. */
498 static varinfo_t
499 get_call_use_vi (gimple call)
501 return get_call_vi (call);
504 /* Lookup or create the variable for the call statement CALL representing
505 the clobbers. */
507 static varinfo_t ATTRIBUTE_UNUSED
508 get_call_clobber_vi (gimple call)
510 return get_call_vi (call)->next;
514 typedef enum {SCALAR, DEREF, ADDRESSOF} constraint_expr_type;
516 /* An expression that appears in a constraint. */
518 struct constraint_expr
520 /* Constraint type. */
521 constraint_expr_type type;
523 /* Variable we are referring to in the constraint. */
524 unsigned int var;
526 /* Offset, in bits, of this constraint from the beginning of
527 variables it ends up referring to.
529 IOW, in a deref constraint, we would deref, get the result set,
530 then add OFFSET to each member. */
531 HOST_WIDE_INT offset;
534 /* Use 0x8000... as special unknown offset. */
535 #define UNKNOWN_OFFSET ((HOST_WIDE_INT)-1 << (HOST_BITS_PER_WIDE_INT-1))
537 typedef struct constraint_expr ce_s;
538 DEF_VEC_O(ce_s);
539 DEF_VEC_ALLOC_O(ce_s, heap);
540 static void get_constraint_for_1 (tree, VEC(ce_s, heap) **, bool);
541 static void get_constraint_for (tree, VEC(ce_s, heap) **);
542 static void do_deref (VEC (ce_s, heap) **);
544 /* Our set constraints are made up of two constraint expressions, one
545 LHS, and one RHS.
547 As described in the introduction, our set constraints each represent an
548 operation between set valued variables.
550 struct constraint
552 struct constraint_expr lhs;
553 struct constraint_expr rhs;
556 /* List of constraints that we use to build the constraint graph from. */
558 static VEC(constraint_t,heap) *constraints;
559 static alloc_pool constraint_pool;
561 /* The constraint graph is represented as an array of bitmaps
562 containing successor nodes. */
564 struct constraint_graph
566 /* Size of this graph, which may be different than the number of
567 nodes in the variable map. */
568 unsigned int size;
570 /* Explicit successors of each node. */
571 bitmap *succs;
573 /* Implicit predecessors of each node (Used for variable
574 substitution). */
575 bitmap *implicit_preds;
577 /* Explicit predecessors of each node (Used for variable substitution). */
578 bitmap *preds;
580 /* Indirect cycle representatives, or -1 if the node has no indirect
581 cycles. */
582 int *indirect_cycles;
584 /* Representative node for a node. rep[a] == a unless the node has
585 been unified. */
586 unsigned int *rep;
588 /* Equivalence class representative for a label. This is used for
589 variable substitution. */
590 int *eq_rep;
592 /* Pointer equivalence label for a node. All nodes with the same
593 pointer equivalence label can be unified together at some point
594 (either during constraint optimization or after the constraint
595 graph is built). */
596 unsigned int *pe;
598 /* Pointer equivalence representative for a label. This is used to
599 handle nodes that are pointer equivalent but not location
600 equivalent. We can unite these once the addressof constraints
601 are transformed into initial points-to sets. */
602 int *pe_rep;
604 /* Pointer equivalence label for each node, used during variable
605 substitution. */
606 unsigned int *pointer_label;
608 /* Location equivalence label for each node, used during location
609 equivalence finding. */
610 unsigned int *loc_label;
612 /* Pointed-by set for each node, used during location equivalence
613 finding. This is pointed-by rather than pointed-to, because it
614 is constructed using the predecessor graph. */
615 bitmap *pointed_by;
617 /* Points to sets for pointer equivalence. This is *not* the actual
618 points-to sets for nodes. */
619 bitmap *points_to;
621 /* Bitmap of nodes where the bit is set if the node is a direct
622 node. Used for variable substitution. */
623 sbitmap direct_nodes;
625 /* Bitmap of nodes where the bit is set if the node is address
626 taken. Used for variable substitution. */
627 bitmap address_taken;
629 /* Vector of complex constraints for each graph node. Complex
630 constraints are those involving dereferences or offsets that are
631 not 0. */
632 VEC(constraint_t,heap) **complex;
635 static constraint_graph_t graph;
637 /* During variable substitution and the offline version of indirect
638 cycle finding, we create nodes to represent dereferences and
639 address taken constraints. These represent where these start and
640 end. */
641 #define FIRST_REF_NODE (VEC_length (varinfo_t, varmap))
642 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
644 /* Return the representative node for NODE, if NODE has been unioned
645 with another NODE.
646 This function performs path compression along the way to finding
647 the representative. */
649 static unsigned int
650 find (unsigned int node)
652 gcc_assert (node < graph->size);
653 if (graph->rep[node] != node)
654 return graph->rep[node] = find (graph->rep[node]);
655 return node;
658 /* Union the TO and FROM nodes to the TO nodes.
659 Note that at some point in the future, we may want to do
660 union-by-rank, in which case we are going to have to return the
661 node we unified to. */
663 static bool
664 unite (unsigned int to, unsigned int from)
666 gcc_assert (to < graph->size && from < graph->size);
667 if (to != from && graph->rep[from] != to)
669 graph->rep[from] = to;
670 return true;
672 return false;
675 /* Create a new constraint consisting of LHS and RHS expressions. */
677 static constraint_t
678 new_constraint (const struct constraint_expr lhs,
679 const struct constraint_expr rhs)
681 constraint_t ret = (constraint_t) pool_alloc (constraint_pool);
682 ret->lhs = lhs;
683 ret->rhs = rhs;
684 return ret;
687 /* Print out constraint C to FILE. */
689 static void
690 dump_constraint (FILE *file, constraint_t c)
692 if (c->lhs.type == ADDRESSOF)
693 fprintf (file, "&");
694 else if (c->lhs.type == DEREF)
695 fprintf (file, "*");
696 fprintf (file, "%s", get_varinfo (c->lhs.var)->name);
697 if (c->lhs.offset == UNKNOWN_OFFSET)
698 fprintf (file, " + UNKNOWN");
699 else if (c->lhs.offset != 0)
700 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
701 fprintf (file, " = ");
702 if (c->rhs.type == ADDRESSOF)
703 fprintf (file, "&");
704 else if (c->rhs.type == DEREF)
705 fprintf (file, "*");
706 fprintf (file, "%s", get_varinfo (c->rhs.var)->name);
707 if (c->rhs.offset == UNKNOWN_OFFSET)
708 fprintf (file, " + UNKNOWN");
709 else if (c->rhs.offset != 0)
710 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
711 fprintf (file, "\n");
715 void debug_constraint (constraint_t);
716 void debug_constraints (void);
717 void debug_constraint_graph (void);
718 void debug_solution_for_var (unsigned int);
719 void debug_sa_points_to_info (void);
721 /* Print out constraint C to stderr. */
723 void
724 debug_constraint (constraint_t c)
726 dump_constraint (stderr, c);
729 /* Print out all constraints to FILE */
731 static void
732 dump_constraints (FILE *file, int from)
734 int i;
735 constraint_t c;
736 for (i = from; VEC_iterate (constraint_t, constraints, i, c); i++)
737 dump_constraint (file, c);
740 /* Print out all constraints to stderr. */
742 void
743 debug_constraints (void)
745 dump_constraints (stderr, 0);
748 /* Print out to FILE the edge in the constraint graph that is created by
749 constraint c. The edge may have a label, depending on the type of
750 constraint that it represents. If complex1, e.g: a = *b, then the label
751 is "=*", if complex2, e.g: *a = b, then the label is "*=", if
752 complex with an offset, e.g: a = b + 8, then the label is "+".
753 Otherwise the edge has no label. */
755 static void
756 dump_constraint_edge (FILE *file, constraint_t c)
758 if (c->rhs.type != ADDRESSOF)
760 const char *src = get_varinfo (c->rhs.var)->name;
761 const char *dst = get_varinfo (c->lhs.var)->name;
762 fprintf (file, " \"%s\" -> \"%s\" ", src, dst);
763 /* Due to preprocessing of constraints, instructions like *a = *b are
764 illegal; thus, we do not have to handle such cases. */
765 if (c->lhs.type == DEREF)
766 fprintf (file, " [ label=\"*=\" ] ;\n");
767 else if (c->rhs.type == DEREF)
768 fprintf (file, " [ label=\"=*\" ] ;\n");
769 else
771 /* We must check the case where the constraint is an offset.
772 In this case, it is treated as a complex constraint. */
773 if (c->rhs.offset != c->lhs.offset)
774 fprintf (file, " [ label=\"+\" ] ;\n");
775 else
776 fprintf (file, " ;\n");
781 /* Print the constraint graph in dot format. */
783 static void
784 dump_constraint_graph (FILE *file)
786 unsigned int i=0, size;
787 constraint_t c;
789 /* Only print the graph if it has already been initialized: */
790 if (!graph)
791 return;
793 /* Print the constraints used to produce the constraint graph. The
794 constraints will be printed as comments in the dot file: */
795 fprintf (file, "\n\n/* Constraints used in the constraint graph:\n");
796 dump_constraints (file, 0);
797 fprintf (file, "*/\n");
799 /* Prints the header of the dot file: */
800 fprintf (file, "\n\n// The constraint graph in dot format:\n");
801 fprintf (file, "strict digraph {\n");
802 fprintf (file, " node [\n shape = box\n ]\n");
803 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
804 fprintf (file, "\n // List of nodes in the constraint graph:\n");
806 /* The next lines print the nodes in the graph. In order to get the
807 number of nodes in the graph, we must choose the minimum between the
808 vector VEC (varinfo_t, varmap) and graph->size. If the graph has not
809 yet been initialized, then graph->size == 0, otherwise we must only
810 read nodes that have an entry in VEC (varinfo_t, varmap). */
811 size = VEC_length (varinfo_t, varmap);
812 size = size < graph->size ? size : graph->size;
813 for (i = 0; i < size; i++)
815 const char *name = get_varinfo (graph->rep[i])->name;
816 fprintf (file, " \"%s\" ;\n", name);
819 /* Go over the list of constraints printing the edges in the constraint
820 graph. */
821 fprintf (file, "\n // The constraint edges:\n");
822 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
823 if (c)
824 dump_constraint_edge (file, c);
826 /* Prints the tail of the dot file. By now, only the closing bracket. */
827 fprintf (file, "}\n\n\n");
830 /* Print out the constraint graph to stderr. */
832 void
833 debug_constraint_graph (void)
835 dump_constraint_graph (stderr);
838 /* SOLVER FUNCTIONS
840 The solver is a simple worklist solver, that works on the following
841 algorithm:
843 sbitmap changed_nodes = all zeroes;
844 changed_count = 0;
845 For each node that is not already collapsed:
846 changed_count++;
847 set bit in changed nodes
849 while (changed_count > 0)
851 compute topological ordering for constraint graph
853 find and collapse cycles in the constraint graph (updating
854 changed if necessary)
856 for each node (n) in the graph in topological order:
857 changed_count--;
859 Process each complex constraint associated with the node,
860 updating changed if necessary.
862 For each outgoing edge from n, propagate the solution from n to
863 the destination of the edge, updating changed as necessary.
865 } */
867 /* Return true if two constraint expressions A and B are equal. */
869 static bool
870 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
872 return a.type == b.type && a.var == b.var && a.offset == b.offset;
875 /* Return true if constraint expression A is less than constraint expression
876 B. This is just arbitrary, but consistent, in order to give them an
877 ordering. */
879 static bool
880 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
882 if (a.type == b.type)
884 if (a.var == b.var)
885 return a.offset < b.offset;
886 else
887 return a.var < b.var;
889 else
890 return a.type < b.type;
893 /* Return true if constraint A is less than constraint B. This is just
894 arbitrary, but consistent, in order to give them an ordering. */
896 static bool
897 constraint_less (const constraint_t a, const constraint_t b)
899 if (constraint_expr_less (a->lhs, b->lhs))
900 return true;
901 else if (constraint_expr_less (b->lhs, a->lhs))
902 return false;
903 else
904 return constraint_expr_less (a->rhs, b->rhs);
907 /* Return true if two constraints A and B are equal. */
909 static bool
910 constraint_equal (struct constraint a, struct constraint b)
912 return constraint_expr_equal (a.lhs, b.lhs)
913 && constraint_expr_equal (a.rhs, b.rhs);
917 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
919 static constraint_t
920 constraint_vec_find (VEC(constraint_t,heap) *vec,
921 struct constraint lookfor)
923 unsigned int place;
924 constraint_t found;
926 if (vec == NULL)
927 return NULL;
929 place = VEC_lower_bound (constraint_t, vec, &lookfor, constraint_less);
930 if (place >= VEC_length (constraint_t, vec))
931 return NULL;
932 found = VEC_index (constraint_t, vec, place);
933 if (!constraint_equal (*found, lookfor))
934 return NULL;
935 return found;
938 /* Union two constraint vectors, TO and FROM. Put the result in TO. */
940 static void
941 constraint_set_union (VEC(constraint_t,heap) **to,
942 VEC(constraint_t,heap) **from)
944 int i;
945 constraint_t c;
947 for (i = 0; VEC_iterate (constraint_t, *from, i, c); i++)
949 if (constraint_vec_find (*to, *c) == NULL)
951 unsigned int place = VEC_lower_bound (constraint_t, *to, c,
952 constraint_less);
953 VEC_safe_insert (constraint_t, heap, *to, place, c);
958 /* Expands the solution in SET to all sub-fields of variables included.
959 Union the expanded result into RESULT. */
961 static void
962 solution_set_expand (bitmap result, bitmap set)
964 bitmap_iterator bi;
965 bitmap vars = NULL;
966 unsigned j;
968 /* In a first pass record all variables we need to add all
969 sub-fields off. This avoids quadratic behavior. */
970 EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi)
972 varinfo_t v = get_varinfo (j);
973 if (v->is_artificial_var
974 || v->is_full_var)
975 continue;
976 v = lookup_vi_for_tree (v->decl);
977 if (vars == NULL)
978 vars = BITMAP_ALLOC (NULL);
979 bitmap_set_bit (vars, v->id);
982 /* In the second pass now do the addition to the solution and
983 to speed up solving add it to the delta as well. */
984 if (vars != NULL)
986 EXECUTE_IF_SET_IN_BITMAP (vars, 0, j, bi)
988 varinfo_t v = get_varinfo (j);
989 for (; v != NULL; v = v->next)
990 bitmap_set_bit (result, v->id);
992 BITMAP_FREE (vars);
996 /* Take a solution set SET, add OFFSET to each member of the set, and
997 overwrite SET with the result when done. */
999 static void
1000 solution_set_add (bitmap set, HOST_WIDE_INT offset)
1002 bitmap result = BITMAP_ALLOC (&iteration_obstack);
1003 unsigned int i;
1004 bitmap_iterator bi;
1006 /* If the offset is unknown we have to expand the solution to
1007 all subfields. */
1008 if (offset == UNKNOWN_OFFSET)
1010 solution_set_expand (set, set);
1011 return;
1014 EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
1016 varinfo_t vi = get_varinfo (i);
1018 /* If this is a variable with just one field just set its bit
1019 in the result. */
1020 if (vi->is_artificial_var
1021 || vi->is_unknown_size_var
1022 || vi->is_full_var)
1023 bitmap_set_bit (result, i);
1024 else
1026 unsigned HOST_WIDE_INT fieldoffset = vi->offset + offset;
1028 /* If the offset makes the pointer point to before the
1029 variable use offset zero for the field lookup. */
1030 if (offset < 0
1031 && fieldoffset > vi->offset)
1032 fieldoffset = 0;
1034 if (offset != 0)
1035 vi = first_or_preceding_vi_for_offset (vi, fieldoffset);
1037 bitmap_set_bit (result, vi->id);
1038 /* If the result is not exactly at fieldoffset include the next
1039 field as well. See get_constraint_for_ptr_offset for more
1040 rationale. */
1041 if (vi->offset != fieldoffset
1042 && vi->next != NULL)
1043 bitmap_set_bit (result, vi->next->id);
1047 bitmap_copy (set, result);
1048 BITMAP_FREE (result);
1051 /* Union solution sets TO and FROM, and add INC to each member of FROM in the
1052 process. */
1054 static bool
1055 set_union_with_increment (bitmap to, bitmap from, HOST_WIDE_INT inc)
1057 if (inc == 0)
1058 return bitmap_ior_into (to, from);
1059 else
1061 bitmap tmp;
1062 bool res;
1064 tmp = BITMAP_ALLOC (&iteration_obstack);
1065 bitmap_copy (tmp, from);
1066 solution_set_add (tmp, inc);
1067 res = bitmap_ior_into (to, tmp);
1068 BITMAP_FREE (tmp);
1069 return res;
1073 /* Insert constraint C into the list of complex constraints for graph
1074 node VAR. */
1076 static void
1077 insert_into_complex (constraint_graph_t graph,
1078 unsigned int var, constraint_t c)
1080 VEC (constraint_t, heap) *complex = graph->complex[var];
1081 unsigned int place = VEC_lower_bound (constraint_t, complex, c,
1082 constraint_less);
1084 /* Only insert constraints that do not already exist. */
1085 if (place >= VEC_length (constraint_t, complex)
1086 || !constraint_equal (*c, *VEC_index (constraint_t, complex, place)))
1087 VEC_safe_insert (constraint_t, heap, graph->complex[var], place, c);
1091 /* Condense two variable nodes into a single variable node, by moving
1092 all associated info from SRC to TO. */
1094 static void
1095 merge_node_constraints (constraint_graph_t graph, unsigned int to,
1096 unsigned int from)
1098 unsigned int i;
1099 constraint_t c;
1101 gcc_assert (find (from) == to);
1103 /* Move all complex constraints from src node into to node */
1104 for (i = 0; VEC_iterate (constraint_t, graph->complex[from], i, c); i++)
1106 /* In complex constraints for node src, we may have either
1107 a = *src, and *src = a, or an offseted constraint which are
1108 always added to the rhs node's constraints. */
1110 if (c->rhs.type == DEREF)
1111 c->rhs.var = to;
1112 else if (c->lhs.type == DEREF)
1113 c->lhs.var = to;
1114 else
1115 c->rhs.var = to;
1117 constraint_set_union (&graph->complex[to], &graph->complex[from]);
1118 VEC_free (constraint_t, heap, graph->complex[from]);
1119 graph->complex[from] = NULL;
1123 /* Remove edges involving NODE from GRAPH. */
1125 static void
1126 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
1128 if (graph->succs[node])
1129 BITMAP_FREE (graph->succs[node]);
1132 /* Merge GRAPH nodes FROM and TO into node TO. */
1134 static void
1135 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
1136 unsigned int from)
1138 if (graph->indirect_cycles[from] != -1)
1140 /* If we have indirect cycles with the from node, and we have
1141 none on the to node, the to node has indirect cycles from the
1142 from node now that they are unified.
1143 If indirect cycles exist on both, unify the nodes that they
1144 are in a cycle with, since we know they are in a cycle with
1145 each other. */
1146 if (graph->indirect_cycles[to] == -1)
1147 graph->indirect_cycles[to] = graph->indirect_cycles[from];
1150 /* Merge all the successor edges. */
1151 if (graph->succs[from])
1153 if (!graph->succs[to])
1154 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
1155 bitmap_ior_into (graph->succs[to],
1156 graph->succs[from]);
1159 clear_edges_for_node (graph, from);
1163 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
1164 it doesn't exist in the graph already. */
1166 static void
1167 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
1168 unsigned int from)
1170 if (to == from)
1171 return;
1173 if (!graph->implicit_preds[to])
1174 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1176 if (bitmap_set_bit (graph->implicit_preds[to], from))
1177 stats.num_implicit_edges++;
1180 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
1181 it doesn't exist in the graph already.
1182 Return false if the edge already existed, true otherwise. */
1184 static void
1185 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
1186 unsigned int from)
1188 if (!graph->preds[to])
1189 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1190 bitmap_set_bit (graph->preds[to], from);
1193 /* Add a graph edge to GRAPH, going from FROM to TO if
1194 it doesn't exist in the graph already.
1195 Return false if the edge already existed, true otherwise. */
1197 static bool
1198 add_graph_edge (constraint_graph_t graph, unsigned int to,
1199 unsigned int from)
1201 if (to == from)
1203 return false;
1205 else
1207 bool r = false;
1209 if (!graph->succs[from])
1210 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
1211 if (bitmap_set_bit (graph->succs[from], to))
1213 r = true;
1214 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
1215 stats.num_edges++;
1217 return r;
1222 /* Return true if {DEST.SRC} is an existing graph edge in GRAPH. */
1224 static bool
1225 valid_graph_edge (constraint_graph_t graph, unsigned int src,
1226 unsigned int dest)
1228 return (graph->succs[dest]
1229 && bitmap_bit_p (graph->succs[dest], src));
1232 /* Initialize the constraint graph structure to contain SIZE nodes. */
1234 static void
1235 init_graph (unsigned int size)
1237 unsigned int j;
1239 graph = XCNEW (struct constraint_graph);
1240 graph->size = size;
1241 graph->succs = XCNEWVEC (bitmap, graph->size);
1242 graph->indirect_cycles = XNEWVEC (int, graph->size);
1243 graph->rep = XNEWVEC (unsigned int, graph->size);
1244 graph->complex = XCNEWVEC (VEC(constraint_t, heap) *, size);
1245 graph->pe = XCNEWVEC (unsigned int, graph->size);
1246 graph->pe_rep = XNEWVEC (int, graph->size);
1248 for (j = 0; j < graph->size; j++)
1250 graph->rep[j] = j;
1251 graph->pe_rep[j] = -1;
1252 graph->indirect_cycles[j] = -1;
1256 /* Build the constraint graph, adding only predecessor edges right now. */
1258 static void
1259 build_pred_graph (void)
1261 int i;
1262 constraint_t c;
1263 unsigned int j;
1265 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
1266 graph->preds = XCNEWVEC (bitmap, graph->size);
1267 graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
1268 graph->loc_label = XCNEWVEC (unsigned int, graph->size);
1269 graph->pointed_by = XCNEWVEC (bitmap, graph->size);
1270 graph->points_to = XCNEWVEC (bitmap, graph->size);
1271 graph->eq_rep = XNEWVEC (int, graph->size);
1272 graph->direct_nodes = sbitmap_alloc (graph->size);
1273 graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1274 sbitmap_zero (graph->direct_nodes);
1276 for (j = 0; j < FIRST_REF_NODE; j++)
1278 if (!get_varinfo (j)->is_special_var)
1279 SET_BIT (graph->direct_nodes, j);
1282 for (j = 0; j < graph->size; j++)
1283 graph->eq_rep[j] = -1;
1285 for (j = 0; j < VEC_length (varinfo_t, varmap); j++)
1286 graph->indirect_cycles[j] = -1;
1288 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1290 struct constraint_expr lhs = c->lhs;
1291 struct constraint_expr rhs = c->rhs;
1292 unsigned int lhsvar = lhs.var;
1293 unsigned int rhsvar = rhs.var;
1295 if (lhs.type == DEREF)
1297 /* *x = y. */
1298 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1299 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1301 else if (rhs.type == DEREF)
1303 /* x = *y */
1304 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1305 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1306 else
1307 RESET_BIT (graph->direct_nodes, lhsvar);
1309 else if (rhs.type == ADDRESSOF)
1311 varinfo_t v;
1313 /* x = &y */
1314 if (graph->points_to[lhsvar] == NULL)
1315 graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1316 bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1318 if (graph->pointed_by[rhsvar] == NULL)
1319 graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1320 bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1322 /* Implicitly, *x = y */
1323 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1325 /* All related variables are no longer direct nodes. */
1326 RESET_BIT (graph->direct_nodes, rhsvar);
1327 v = get_varinfo (rhsvar);
1328 if (!v->is_full_var)
1330 v = lookup_vi_for_tree (v->decl);
1333 RESET_BIT (graph->direct_nodes, v->id);
1334 v = v->next;
1336 while (v != NULL);
1338 bitmap_set_bit (graph->address_taken, rhsvar);
1340 else if (lhsvar > anything_id
1341 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1343 /* x = y */
1344 add_pred_graph_edge (graph, lhsvar, rhsvar);
1345 /* Implicitly, *x = *y */
1346 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1347 FIRST_REF_NODE + rhsvar);
1349 else if (lhs.offset != 0 || rhs.offset != 0)
1351 if (rhs.offset != 0)
1352 RESET_BIT (graph->direct_nodes, lhs.var);
1353 else if (lhs.offset != 0)
1354 RESET_BIT (graph->direct_nodes, rhs.var);
1359 /* Build the constraint graph, adding successor edges. */
1361 static void
1362 build_succ_graph (void)
1364 unsigned i, t;
1365 constraint_t c;
1367 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1369 struct constraint_expr lhs;
1370 struct constraint_expr rhs;
1371 unsigned int lhsvar;
1372 unsigned int rhsvar;
1374 if (!c)
1375 continue;
1377 lhs = c->lhs;
1378 rhs = c->rhs;
1379 lhsvar = find (lhs.var);
1380 rhsvar = find (rhs.var);
1382 if (lhs.type == DEREF)
1384 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1385 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1387 else if (rhs.type == DEREF)
1389 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1390 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1392 else if (rhs.type == ADDRESSOF)
1394 /* x = &y */
1395 gcc_assert (find (rhs.var) == rhs.var);
1396 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1398 else if (lhsvar > anything_id
1399 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1401 add_graph_edge (graph, lhsvar, rhsvar);
1405 /* Add edges from STOREDANYTHING to all non-direct nodes that can
1406 receive pointers. */
1407 t = find (storedanything_id);
1408 for (i = integer_id + 1; i < FIRST_REF_NODE; ++i)
1410 if (!TEST_BIT (graph->direct_nodes, i)
1411 && get_varinfo (i)->may_have_pointers)
1412 add_graph_edge (graph, find (i), t);
1415 /* Everything stored to ANYTHING also potentially escapes. */
1416 add_graph_edge (graph, find (escaped_id), t);
1420 /* Changed variables on the last iteration. */
1421 static unsigned int changed_count;
1422 static sbitmap changed;
1424 /* Strongly Connected Component visitation info. */
1426 struct scc_info
1428 sbitmap visited;
1429 sbitmap deleted;
1430 unsigned int *dfs;
1431 unsigned int *node_mapping;
1432 int current_index;
1433 VEC(unsigned,heap) *scc_stack;
1437 /* Recursive routine to find strongly connected components in GRAPH.
1438 SI is the SCC info to store the information in, and N is the id of current
1439 graph node we are processing.
1441 This is Tarjan's strongly connected component finding algorithm, as
1442 modified by Nuutila to keep only non-root nodes on the stack.
1443 The algorithm can be found in "On finding the strongly connected
1444 connected components in a directed graph" by Esko Nuutila and Eljas
1445 Soisalon-Soininen, in Information Processing Letters volume 49,
1446 number 1, pages 9-14. */
1448 static void
1449 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1451 unsigned int i;
1452 bitmap_iterator bi;
1453 unsigned int my_dfs;
1455 SET_BIT (si->visited, n);
1456 si->dfs[n] = si->current_index ++;
1457 my_dfs = si->dfs[n];
1459 /* Visit all the successors. */
1460 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1462 unsigned int w;
1464 if (i > LAST_REF_NODE)
1465 break;
1467 w = find (i);
1468 if (TEST_BIT (si->deleted, w))
1469 continue;
1471 if (!TEST_BIT (si->visited, w))
1472 scc_visit (graph, si, w);
1474 unsigned int t = find (w);
1475 unsigned int nnode = find (n);
1476 gcc_assert (nnode == n);
1478 if (si->dfs[t] < si->dfs[nnode])
1479 si->dfs[n] = si->dfs[t];
1483 /* See if any components have been identified. */
1484 if (si->dfs[n] == my_dfs)
1486 if (VEC_length (unsigned, si->scc_stack) > 0
1487 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1489 bitmap scc = BITMAP_ALLOC (NULL);
1490 unsigned int lowest_node;
1491 bitmap_iterator bi;
1493 bitmap_set_bit (scc, n);
1495 while (VEC_length (unsigned, si->scc_stack) != 0
1496 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1498 unsigned int w = VEC_pop (unsigned, si->scc_stack);
1500 bitmap_set_bit (scc, w);
1503 lowest_node = bitmap_first_set_bit (scc);
1504 gcc_assert (lowest_node < FIRST_REF_NODE);
1506 /* Collapse the SCC nodes into a single node, and mark the
1507 indirect cycles. */
1508 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1510 if (i < FIRST_REF_NODE)
1512 if (unite (lowest_node, i))
1513 unify_nodes (graph, lowest_node, i, false);
1515 else
1517 unite (lowest_node, i);
1518 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1522 SET_BIT (si->deleted, n);
1524 else
1525 VEC_safe_push (unsigned, heap, si->scc_stack, n);
1528 /* Unify node FROM into node TO, updating the changed count if
1529 necessary when UPDATE_CHANGED is true. */
1531 static void
1532 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1533 bool update_changed)
1536 gcc_assert (to != from && find (to) == to);
1537 if (dump_file && (dump_flags & TDF_DETAILS))
1538 fprintf (dump_file, "Unifying %s to %s\n",
1539 get_varinfo (from)->name,
1540 get_varinfo (to)->name);
1542 if (update_changed)
1543 stats.unified_vars_dynamic++;
1544 else
1545 stats.unified_vars_static++;
1547 merge_graph_nodes (graph, to, from);
1548 merge_node_constraints (graph, to, from);
1550 /* Mark TO as changed if FROM was changed. If TO was already marked
1551 as changed, decrease the changed count. */
1553 if (update_changed && TEST_BIT (changed, from))
1555 RESET_BIT (changed, from);
1556 if (!TEST_BIT (changed, to))
1557 SET_BIT (changed, to);
1558 else
1560 gcc_assert (changed_count > 0);
1561 changed_count--;
1564 if (get_varinfo (from)->solution)
1566 /* If the solution changes because of the merging, we need to mark
1567 the variable as changed. */
1568 if (bitmap_ior_into (get_varinfo (to)->solution,
1569 get_varinfo (from)->solution))
1571 if (update_changed && !TEST_BIT (changed, to))
1573 SET_BIT (changed, to);
1574 changed_count++;
1578 BITMAP_FREE (get_varinfo (from)->solution);
1579 BITMAP_FREE (get_varinfo (from)->oldsolution);
1581 if (stats.iterations > 0)
1583 BITMAP_FREE (get_varinfo (to)->oldsolution);
1584 get_varinfo (to)->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
1587 if (valid_graph_edge (graph, to, to))
1589 if (graph->succs[to])
1590 bitmap_clear_bit (graph->succs[to], to);
1594 /* Information needed to compute the topological ordering of a graph. */
1596 struct topo_info
1598 /* sbitmap of visited nodes. */
1599 sbitmap visited;
1600 /* Array that stores the topological order of the graph, *in
1601 reverse*. */
1602 VEC(unsigned,heap) *topo_order;
1606 /* Initialize and return a topological info structure. */
1608 static struct topo_info *
1609 init_topo_info (void)
1611 size_t size = graph->size;
1612 struct topo_info *ti = XNEW (struct topo_info);
1613 ti->visited = sbitmap_alloc (size);
1614 sbitmap_zero (ti->visited);
1615 ti->topo_order = VEC_alloc (unsigned, heap, 1);
1616 return ti;
1620 /* Free the topological sort info pointed to by TI. */
1622 static void
1623 free_topo_info (struct topo_info *ti)
1625 sbitmap_free (ti->visited);
1626 VEC_free (unsigned, heap, ti->topo_order);
1627 free (ti);
1630 /* Visit the graph in topological order, and store the order in the
1631 topo_info structure. */
1633 static void
1634 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1635 unsigned int n)
1637 bitmap_iterator bi;
1638 unsigned int j;
1640 SET_BIT (ti->visited, n);
1642 if (graph->succs[n])
1643 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1645 if (!TEST_BIT (ti->visited, j))
1646 topo_visit (graph, ti, j);
1649 VEC_safe_push (unsigned, heap, ti->topo_order, n);
1652 /* Process a constraint C that represents x = *(y + off), using DELTA as the
1653 starting solution for y. */
1655 static void
1656 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1657 bitmap delta)
1659 unsigned int lhs = c->lhs.var;
1660 bool flag = false;
1661 bitmap sol = get_varinfo (lhs)->solution;
1662 unsigned int j;
1663 bitmap_iterator bi;
1664 HOST_WIDE_INT roffset = c->rhs.offset;
1666 /* Our IL does not allow this. */
1667 gcc_assert (c->lhs.offset == 0);
1669 /* If the solution of Y contains anything it is good enough to transfer
1670 this to the LHS. */
1671 if (bitmap_bit_p (delta, anything_id))
1673 flag |= bitmap_set_bit (sol, anything_id);
1674 goto done;
1677 /* If we do not know at with offset the rhs is dereferenced compute
1678 the reachability set of DELTA, conservatively assuming it is
1679 dereferenced at all valid offsets. */
1680 if (roffset == UNKNOWN_OFFSET)
1682 solution_set_expand (delta, delta);
1683 /* No further offset processing is necessary. */
1684 roffset = 0;
1687 /* For each variable j in delta (Sol(y)), add
1688 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1689 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1691 varinfo_t v = get_varinfo (j);
1692 HOST_WIDE_INT fieldoffset = v->offset + roffset;
1693 unsigned int t;
1695 if (v->is_full_var)
1696 fieldoffset = v->offset;
1697 else if (roffset != 0)
1698 v = first_vi_for_offset (v, fieldoffset);
1699 /* If the access is outside of the variable we can ignore it. */
1700 if (!v)
1701 continue;
1705 t = find (v->id);
1707 /* Adding edges from the special vars is pointless.
1708 They don't have sets that can change. */
1709 if (get_varinfo (t)->is_special_var)
1710 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1711 /* Merging the solution from ESCAPED needlessly increases
1712 the set. Use ESCAPED as representative instead. */
1713 else if (v->id == escaped_id)
1714 flag |= bitmap_set_bit (sol, escaped_id);
1715 else if (v->may_have_pointers
1716 && add_graph_edge (graph, lhs, t))
1717 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1719 /* If the variable is not exactly at the requested offset
1720 we have to include the next one. */
1721 if (v->offset == (unsigned HOST_WIDE_INT)fieldoffset
1722 || v->next == NULL)
1723 break;
1725 v = v->next;
1726 fieldoffset = v->offset;
1728 while (1);
1731 done:
1732 /* If the LHS solution changed, mark the var as changed. */
1733 if (flag)
1735 get_varinfo (lhs)->solution = sol;
1736 if (!TEST_BIT (changed, lhs))
1738 SET_BIT (changed, lhs);
1739 changed_count++;
1744 /* Process a constraint C that represents *(x + off) = y using DELTA
1745 as the starting solution for x. */
1747 static void
1748 do_ds_constraint (constraint_t c, bitmap delta)
1750 unsigned int rhs = c->rhs.var;
1751 bitmap sol = get_varinfo (rhs)->solution;
1752 unsigned int j;
1753 bitmap_iterator bi;
1754 HOST_WIDE_INT loff = c->lhs.offset;
1755 bool escaped_p = false;
1757 /* Our IL does not allow this. */
1758 gcc_assert (c->rhs.offset == 0);
1760 /* If the solution of y contains ANYTHING simply use the ANYTHING
1761 solution. This avoids needlessly increasing the points-to sets. */
1762 if (bitmap_bit_p (sol, anything_id))
1763 sol = get_varinfo (find (anything_id))->solution;
1765 /* If the solution for x contains ANYTHING we have to merge the
1766 solution of y into all pointer variables which we do via
1767 STOREDANYTHING. */
1768 if (bitmap_bit_p (delta, anything_id))
1770 unsigned t = find (storedanything_id);
1771 if (add_graph_edge (graph, t, rhs))
1773 if (bitmap_ior_into (get_varinfo (t)->solution, sol))
1775 if (!TEST_BIT (changed, t))
1777 SET_BIT (changed, t);
1778 changed_count++;
1782 return;
1785 /* If we do not know at with offset the rhs is dereferenced compute
1786 the reachability set of DELTA, conservatively assuming it is
1787 dereferenced at all valid offsets. */
1788 if (loff == UNKNOWN_OFFSET)
1790 solution_set_expand (delta, delta);
1791 loff = 0;
1794 /* For each member j of delta (Sol(x)), add an edge from y to j and
1795 union Sol(y) into Sol(j) */
1796 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1798 varinfo_t v = get_varinfo (j);
1799 unsigned int t;
1800 HOST_WIDE_INT fieldoffset = v->offset + loff;
1802 if (v->is_full_var)
1803 fieldoffset = v->offset;
1804 else if (loff != 0)
1805 v = first_vi_for_offset (v, fieldoffset);
1806 /* If the access is outside of the variable we can ignore it. */
1807 if (!v)
1808 continue;
1812 if (v->may_have_pointers)
1814 /* If v is a global variable then this is an escape point. */
1815 if (v->is_global_var
1816 && !escaped_p)
1818 t = find (escaped_id);
1819 if (add_graph_edge (graph, t, rhs)
1820 && bitmap_ior_into (get_varinfo (t)->solution, sol)
1821 && !TEST_BIT (changed, t))
1823 SET_BIT (changed, t);
1824 changed_count++;
1826 /* Enough to let rhs escape once. */
1827 escaped_p = true;
1830 if (v->is_special_var)
1831 break;
1833 t = find (v->id);
1834 if (add_graph_edge (graph, t, rhs)
1835 && bitmap_ior_into (get_varinfo (t)->solution, sol)
1836 && !TEST_BIT (changed, t))
1838 SET_BIT (changed, t);
1839 changed_count++;
1843 /* If the variable is not exactly at the requested offset
1844 we have to include the next one. */
1845 if (v->offset == (unsigned HOST_WIDE_INT)fieldoffset
1846 || v->next == NULL)
1847 break;
1849 v = v->next;
1850 fieldoffset = v->offset;
1852 while (1);
1856 /* Handle a non-simple (simple meaning requires no iteration),
1857 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1859 static void
1860 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
1862 if (c->lhs.type == DEREF)
1864 if (c->rhs.type == ADDRESSOF)
1866 gcc_unreachable();
1868 else
1870 /* *x = y */
1871 do_ds_constraint (c, delta);
1874 else if (c->rhs.type == DEREF)
1876 /* x = *y */
1877 if (!(get_varinfo (c->lhs.var)->is_special_var))
1878 do_sd_constraint (graph, c, delta);
1880 else
1882 bitmap tmp;
1883 bitmap solution;
1884 bool flag = false;
1886 gcc_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR);
1887 solution = get_varinfo (c->rhs.var)->solution;
1888 tmp = get_varinfo (c->lhs.var)->solution;
1890 flag = set_union_with_increment (tmp, solution, c->rhs.offset);
1892 if (flag)
1894 get_varinfo (c->lhs.var)->solution = tmp;
1895 if (!TEST_BIT (changed, c->lhs.var))
1897 SET_BIT (changed, c->lhs.var);
1898 changed_count++;
1904 /* Initialize and return a new SCC info structure. */
1906 static struct scc_info *
1907 init_scc_info (size_t size)
1909 struct scc_info *si = XNEW (struct scc_info);
1910 size_t i;
1912 si->current_index = 0;
1913 si->visited = sbitmap_alloc (size);
1914 sbitmap_zero (si->visited);
1915 si->deleted = sbitmap_alloc (size);
1916 sbitmap_zero (si->deleted);
1917 si->node_mapping = XNEWVEC (unsigned int, size);
1918 si->dfs = XCNEWVEC (unsigned int, size);
1920 for (i = 0; i < size; i++)
1921 si->node_mapping[i] = i;
1923 si->scc_stack = VEC_alloc (unsigned, heap, 1);
1924 return si;
1927 /* Free an SCC info structure pointed to by SI */
1929 static void
1930 free_scc_info (struct scc_info *si)
1932 sbitmap_free (si->visited);
1933 sbitmap_free (si->deleted);
1934 free (si->node_mapping);
1935 free (si->dfs);
1936 VEC_free (unsigned, heap, si->scc_stack);
1937 free (si);
1941 /* Find indirect cycles in GRAPH that occur, using strongly connected
1942 components, and note them in the indirect cycles map.
1944 This technique comes from Ben Hardekopf and Calvin Lin,
1945 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1946 Lines of Code", submitted to PLDI 2007. */
1948 static void
1949 find_indirect_cycles (constraint_graph_t graph)
1951 unsigned int i;
1952 unsigned int size = graph->size;
1953 struct scc_info *si = init_scc_info (size);
1955 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1956 if (!TEST_BIT (si->visited, i) && find (i) == i)
1957 scc_visit (graph, si, i);
1959 free_scc_info (si);
1962 /* Compute a topological ordering for GRAPH, and store the result in the
1963 topo_info structure TI. */
1965 static void
1966 compute_topo_order (constraint_graph_t graph,
1967 struct topo_info *ti)
1969 unsigned int i;
1970 unsigned int size = graph->size;
1972 for (i = 0; i != size; ++i)
1973 if (!TEST_BIT (ti->visited, i) && find (i) == i)
1974 topo_visit (graph, ti, i);
1977 /* Structure used to for hash value numbering of pointer equivalence
1978 classes. */
1980 typedef struct equiv_class_label
1982 hashval_t hashcode;
1983 unsigned int equivalence_class;
1984 bitmap labels;
1985 } *equiv_class_label_t;
1986 typedef const struct equiv_class_label *const_equiv_class_label_t;
1988 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1989 classes. */
1990 static htab_t pointer_equiv_class_table;
1992 /* A hashtable for mapping a bitmap of labels->location equivalence
1993 classes. */
1994 static htab_t location_equiv_class_table;
1996 /* Hash function for a equiv_class_label_t */
1998 static hashval_t
1999 equiv_class_label_hash (const void *p)
2001 const_equiv_class_label_t const ecl = (const_equiv_class_label_t) p;
2002 return ecl->hashcode;
2005 /* Equality function for two equiv_class_label_t's. */
2007 static int
2008 equiv_class_label_eq (const void *p1, const void *p2)
2010 const_equiv_class_label_t const eql1 = (const_equiv_class_label_t) p1;
2011 const_equiv_class_label_t const eql2 = (const_equiv_class_label_t) p2;
2012 return (eql1->hashcode == eql2->hashcode
2013 && bitmap_equal_p (eql1->labels, eql2->labels));
2016 /* Lookup a equivalence class in TABLE by the bitmap of LABELS it
2017 contains. */
2019 static unsigned int
2020 equiv_class_lookup (htab_t table, bitmap labels)
2022 void **slot;
2023 struct equiv_class_label ecl;
2025 ecl.labels = labels;
2026 ecl.hashcode = bitmap_hash (labels);
2028 slot = htab_find_slot_with_hash (table, &ecl,
2029 ecl.hashcode, NO_INSERT);
2030 if (!slot)
2031 return 0;
2032 else
2033 return ((equiv_class_label_t) *slot)->equivalence_class;
2037 /* Add an equivalence class named EQUIVALENCE_CLASS with labels LABELS
2038 to TABLE. */
2040 static void
2041 equiv_class_add (htab_t table, unsigned int equivalence_class,
2042 bitmap labels)
2044 void **slot;
2045 equiv_class_label_t ecl = XNEW (struct equiv_class_label);
2047 ecl->labels = labels;
2048 ecl->equivalence_class = equivalence_class;
2049 ecl->hashcode = bitmap_hash (labels);
2051 slot = htab_find_slot_with_hash (table, ecl,
2052 ecl->hashcode, INSERT);
2053 gcc_assert (!*slot);
2054 *slot = (void *) ecl;
2057 /* Perform offline variable substitution.
2059 This is a worst case quadratic time way of identifying variables
2060 that must have equivalent points-to sets, including those caused by
2061 static cycles, and single entry subgraphs, in the constraint graph.
2063 The technique is described in "Exploiting Pointer and Location
2064 Equivalence to Optimize Pointer Analysis. In the 14th International
2065 Static Analysis Symposium (SAS), August 2007." It is known as the
2066 "HU" algorithm, and is equivalent to value numbering the collapsed
2067 constraint graph including evaluating unions.
2069 The general method of finding equivalence classes is as follows:
2070 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
2071 Initialize all non-REF nodes to be direct nodes.
2072 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
2073 variable}
2074 For each constraint containing the dereference, we also do the same
2075 thing.
2077 We then compute SCC's in the graph and unify nodes in the same SCC,
2078 including pts sets.
2080 For each non-collapsed node x:
2081 Visit all unvisited explicit incoming edges.
2082 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
2083 where y->x.
2084 Lookup the equivalence class for pts(x).
2085 If we found one, equivalence_class(x) = found class.
2086 Otherwise, equivalence_class(x) = new class, and new_class is
2087 added to the lookup table.
2089 All direct nodes with the same equivalence class can be replaced
2090 with a single representative node.
2091 All unlabeled nodes (label == 0) are not pointers and all edges
2092 involving them can be eliminated.
2093 We perform these optimizations during rewrite_constraints
2095 In addition to pointer equivalence class finding, we also perform
2096 location equivalence class finding. This is the set of variables
2097 that always appear together in points-to sets. We use this to
2098 compress the size of the points-to sets. */
2100 /* Current maximum pointer equivalence class id. */
2101 static int pointer_equiv_class;
2103 /* Current maximum location equivalence class id. */
2104 static int location_equiv_class;
2106 /* Recursive routine to find strongly connected components in GRAPH,
2107 and label it's nodes with DFS numbers. */
2109 static void
2110 condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2112 unsigned int i;
2113 bitmap_iterator bi;
2114 unsigned int my_dfs;
2116 gcc_assert (si->node_mapping[n] == n);
2117 SET_BIT (si->visited, n);
2118 si->dfs[n] = si->current_index ++;
2119 my_dfs = si->dfs[n];
2121 /* Visit all the successors. */
2122 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2124 unsigned int w = si->node_mapping[i];
2126 if (TEST_BIT (si->deleted, w))
2127 continue;
2129 if (!TEST_BIT (si->visited, w))
2130 condense_visit (graph, si, w);
2132 unsigned int t = si->node_mapping[w];
2133 unsigned int nnode = si->node_mapping[n];
2134 gcc_assert (nnode == n);
2136 if (si->dfs[t] < si->dfs[nnode])
2137 si->dfs[n] = si->dfs[t];
2141 /* Visit all the implicit predecessors. */
2142 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
2144 unsigned int w = si->node_mapping[i];
2146 if (TEST_BIT (si->deleted, w))
2147 continue;
2149 if (!TEST_BIT (si->visited, w))
2150 condense_visit (graph, si, w);
2152 unsigned int t = si->node_mapping[w];
2153 unsigned int nnode = si->node_mapping[n];
2154 gcc_assert (nnode == n);
2156 if (si->dfs[t] < si->dfs[nnode])
2157 si->dfs[n] = si->dfs[t];
2161 /* See if any components have been identified. */
2162 if (si->dfs[n] == my_dfs)
2164 while (VEC_length (unsigned, si->scc_stack) != 0
2165 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
2167 unsigned int w = VEC_pop (unsigned, si->scc_stack);
2168 si->node_mapping[w] = n;
2170 if (!TEST_BIT (graph->direct_nodes, w))
2171 RESET_BIT (graph->direct_nodes, n);
2173 /* Unify our nodes. */
2174 if (graph->preds[w])
2176 if (!graph->preds[n])
2177 graph->preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2178 bitmap_ior_into (graph->preds[n], graph->preds[w]);
2180 if (graph->implicit_preds[w])
2182 if (!graph->implicit_preds[n])
2183 graph->implicit_preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2184 bitmap_ior_into (graph->implicit_preds[n],
2185 graph->implicit_preds[w]);
2187 if (graph->points_to[w])
2189 if (!graph->points_to[n])
2190 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2191 bitmap_ior_into (graph->points_to[n],
2192 graph->points_to[w]);
2195 SET_BIT (si->deleted, n);
2197 else
2198 VEC_safe_push (unsigned, heap, si->scc_stack, n);
2201 /* Label pointer equivalences. */
2203 static void
2204 label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2206 unsigned int i;
2207 bitmap_iterator bi;
2208 SET_BIT (si->visited, n);
2210 if (!graph->points_to[n])
2211 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2213 /* Label and union our incoming edges's points to sets. */
2214 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2216 unsigned int w = si->node_mapping[i];
2217 if (!TEST_BIT (si->visited, w))
2218 label_visit (graph, si, w);
2220 /* Skip unused edges */
2221 if (w == n || graph->pointer_label[w] == 0)
2222 continue;
2224 if (graph->points_to[w])
2225 bitmap_ior_into(graph->points_to[n], graph->points_to[w]);
2227 /* Indirect nodes get fresh variables. */
2228 if (!TEST_BIT (graph->direct_nodes, n))
2229 bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
2231 if (!bitmap_empty_p (graph->points_to[n]))
2233 unsigned int label = equiv_class_lookup (pointer_equiv_class_table,
2234 graph->points_to[n]);
2235 if (!label)
2237 label = pointer_equiv_class++;
2238 equiv_class_add (pointer_equiv_class_table,
2239 label, graph->points_to[n]);
2241 graph->pointer_label[n] = label;
2245 /* Perform offline variable substitution, discovering equivalence
2246 classes, and eliminating non-pointer variables. */
2248 static struct scc_info *
2249 perform_var_substitution (constraint_graph_t graph)
2251 unsigned int i;
2252 unsigned int size = graph->size;
2253 struct scc_info *si = init_scc_info (size);
2255 bitmap_obstack_initialize (&iteration_obstack);
2256 pointer_equiv_class_table = htab_create (511, equiv_class_label_hash,
2257 equiv_class_label_eq, free);
2258 location_equiv_class_table = htab_create (511, equiv_class_label_hash,
2259 equiv_class_label_eq, free);
2260 pointer_equiv_class = 1;
2261 location_equiv_class = 1;
2263 /* Condense the nodes, which means to find SCC's, count incoming
2264 predecessors, and unite nodes in SCC's. */
2265 for (i = 0; i < FIRST_REF_NODE; i++)
2266 if (!TEST_BIT (si->visited, si->node_mapping[i]))
2267 condense_visit (graph, si, si->node_mapping[i]);
2269 sbitmap_zero (si->visited);
2270 /* Actually the label the nodes for pointer equivalences */
2271 for (i = 0; i < FIRST_REF_NODE; i++)
2272 if (!TEST_BIT (si->visited, si->node_mapping[i]))
2273 label_visit (graph, si, si->node_mapping[i]);
2275 /* Calculate location equivalence labels. */
2276 for (i = 0; i < FIRST_REF_NODE; i++)
2278 bitmap pointed_by;
2279 bitmap_iterator bi;
2280 unsigned int j;
2281 unsigned int label;
2283 if (!graph->pointed_by[i])
2284 continue;
2285 pointed_by = BITMAP_ALLOC (&iteration_obstack);
2287 /* Translate the pointed-by mapping for pointer equivalence
2288 labels. */
2289 EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
2291 bitmap_set_bit (pointed_by,
2292 graph->pointer_label[si->node_mapping[j]]);
2294 /* The original pointed_by is now dead. */
2295 BITMAP_FREE (graph->pointed_by[i]);
2297 /* Look up the location equivalence label if one exists, or make
2298 one otherwise. */
2299 label = equiv_class_lookup (location_equiv_class_table,
2300 pointed_by);
2301 if (label == 0)
2303 label = location_equiv_class++;
2304 equiv_class_add (location_equiv_class_table,
2305 label, pointed_by);
2307 else
2309 if (dump_file && (dump_flags & TDF_DETAILS))
2310 fprintf (dump_file, "Found location equivalence for node %s\n",
2311 get_varinfo (i)->name);
2312 BITMAP_FREE (pointed_by);
2314 graph->loc_label[i] = label;
2318 if (dump_file && (dump_flags & TDF_DETAILS))
2319 for (i = 0; i < FIRST_REF_NODE; i++)
2321 bool direct_node = TEST_BIT (graph->direct_nodes, i);
2322 fprintf (dump_file,
2323 "Equivalence classes for %s node id %d:%s are pointer: %d"
2324 ", location:%d\n",
2325 direct_node ? "Direct node" : "Indirect node", i,
2326 get_varinfo (i)->name,
2327 graph->pointer_label[si->node_mapping[i]],
2328 graph->loc_label[si->node_mapping[i]]);
2331 /* Quickly eliminate our non-pointer variables. */
2333 for (i = 0; i < FIRST_REF_NODE; i++)
2335 unsigned int node = si->node_mapping[i];
2337 if (graph->pointer_label[node] == 0)
2339 if (dump_file && (dump_flags & TDF_DETAILS))
2340 fprintf (dump_file,
2341 "%s is a non-pointer variable, eliminating edges.\n",
2342 get_varinfo (node)->name);
2343 stats.nonpointer_vars++;
2344 clear_edges_for_node (graph, node);
2348 return si;
2351 /* Free information that was only necessary for variable
2352 substitution. */
2354 static void
2355 free_var_substitution_info (struct scc_info *si)
2357 free_scc_info (si);
2358 free (graph->pointer_label);
2359 free (graph->loc_label);
2360 free (graph->pointed_by);
2361 free (graph->points_to);
2362 free (graph->eq_rep);
2363 sbitmap_free (graph->direct_nodes);
2364 htab_delete (pointer_equiv_class_table);
2365 htab_delete (location_equiv_class_table);
2366 bitmap_obstack_release (&iteration_obstack);
2369 /* Return an existing node that is equivalent to NODE, which has
2370 equivalence class LABEL, if one exists. Return NODE otherwise. */
2372 static unsigned int
2373 find_equivalent_node (constraint_graph_t graph,
2374 unsigned int node, unsigned int label)
2376 /* If the address version of this variable is unused, we can
2377 substitute it for anything else with the same label.
2378 Otherwise, we know the pointers are equivalent, but not the
2379 locations, and we can unite them later. */
2381 if (!bitmap_bit_p (graph->address_taken, node))
2383 gcc_assert (label < graph->size);
2385 if (graph->eq_rep[label] != -1)
2387 /* Unify the two variables since we know they are equivalent. */
2388 if (unite (graph->eq_rep[label], node))
2389 unify_nodes (graph, graph->eq_rep[label], node, false);
2390 return graph->eq_rep[label];
2392 else
2394 graph->eq_rep[label] = node;
2395 graph->pe_rep[label] = node;
2398 else
2400 gcc_assert (label < graph->size);
2401 graph->pe[node] = label;
2402 if (graph->pe_rep[label] == -1)
2403 graph->pe_rep[label] = node;
2406 return node;
2409 /* Unite pointer equivalent but not location equivalent nodes in
2410 GRAPH. This may only be performed once variable substitution is
2411 finished. */
2413 static void
2414 unite_pointer_equivalences (constraint_graph_t graph)
2416 unsigned int i;
2418 /* Go through the pointer equivalences and unite them to their
2419 representative, if they aren't already. */
2420 for (i = 0; i < FIRST_REF_NODE; i++)
2422 unsigned int label = graph->pe[i];
2423 if (label)
2425 int label_rep = graph->pe_rep[label];
2427 if (label_rep == -1)
2428 continue;
2430 label_rep = find (label_rep);
2431 if (label_rep >= 0 && unite (label_rep, find (i)))
2432 unify_nodes (graph, label_rep, i, false);
2437 /* Move complex constraints to the GRAPH nodes they belong to. */
2439 static void
2440 move_complex_constraints (constraint_graph_t graph)
2442 int i;
2443 constraint_t c;
2445 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
2447 if (c)
2449 struct constraint_expr lhs = c->lhs;
2450 struct constraint_expr rhs = c->rhs;
2452 if (lhs.type == DEREF)
2454 insert_into_complex (graph, lhs.var, c);
2456 else if (rhs.type == DEREF)
2458 if (!(get_varinfo (lhs.var)->is_special_var))
2459 insert_into_complex (graph, rhs.var, c);
2461 else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2462 && (lhs.offset != 0 || rhs.offset != 0))
2464 insert_into_complex (graph, rhs.var, c);
2471 /* Optimize and rewrite complex constraints while performing
2472 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2473 result of perform_variable_substitution. */
2475 static void
2476 rewrite_constraints (constraint_graph_t graph,
2477 struct scc_info *si)
2479 int i;
2480 unsigned int j;
2481 constraint_t c;
2483 for (j = 0; j < graph->size; j++)
2484 gcc_assert (find (j) == j);
2486 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
2488 struct constraint_expr lhs = c->lhs;
2489 struct constraint_expr rhs = c->rhs;
2490 unsigned int lhsvar = find (lhs.var);
2491 unsigned int rhsvar = find (rhs.var);
2492 unsigned int lhsnode, rhsnode;
2493 unsigned int lhslabel, rhslabel;
2495 lhsnode = si->node_mapping[lhsvar];
2496 rhsnode = si->node_mapping[rhsvar];
2497 lhslabel = graph->pointer_label[lhsnode];
2498 rhslabel = graph->pointer_label[rhsnode];
2500 /* See if it is really a non-pointer variable, and if so, ignore
2501 the constraint. */
2502 if (lhslabel == 0)
2504 if (dump_file && (dump_flags & TDF_DETAILS))
2507 fprintf (dump_file, "%s is a non-pointer variable,"
2508 "ignoring constraint:",
2509 get_varinfo (lhs.var)->name);
2510 dump_constraint (dump_file, c);
2512 VEC_replace (constraint_t, constraints, i, NULL);
2513 continue;
2516 if (rhslabel == 0)
2518 if (dump_file && (dump_flags & TDF_DETAILS))
2521 fprintf (dump_file, "%s is a non-pointer variable,"
2522 "ignoring constraint:",
2523 get_varinfo (rhs.var)->name);
2524 dump_constraint (dump_file, c);
2526 VEC_replace (constraint_t, constraints, i, NULL);
2527 continue;
2530 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2531 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2532 c->lhs.var = lhsvar;
2533 c->rhs.var = rhsvar;
2538 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2539 part of an SCC, false otherwise. */
2541 static bool
2542 eliminate_indirect_cycles (unsigned int node)
2544 if (graph->indirect_cycles[node] != -1
2545 && !bitmap_empty_p (get_varinfo (node)->solution))
2547 unsigned int i;
2548 VEC(unsigned,heap) *queue = NULL;
2549 int queuepos;
2550 unsigned int to = find (graph->indirect_cycles[node]);
2551 bitmap_iterator bi;
2553 /* We can't touch the solution set and call unify_nodes
2554 at the same time, because unify_nodes is going to do
2555 bitmap unions into it. */
2557 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2559 if (find (i) == i && i != to)
2561 if (unite (to, i))
2562 VEC_safe_push (unsigned, heap, queue, i);
2566 for (queuepos = 0;
2567 VEC_iterate (unsigned, queue, queuepos, i);
2568 queuepos++)
2570 unify_nodes (graph, to, i, true);
2572 VEC_free (unsigned, heap, queue);
2573 return true;
2575 return false;
2578 /* Solve the constraint graph GRAPH using our worklist solver.
2579 This is based on the PW* family of solvers from the "Efficient Field
2580 Sensitive Pointer Analysis for C" paper.
2581 It works by iterating over all the graph nodes, processing the complex
2582 constraints and propagating the copy constraints, until everything stops
2583 changed. This corresponds to steps 6-8 in the solving list given above. */
2585 static void
2586 solve_graph (constraint_graph_t graph)
2588 unsigned int size = graph->size;
2589 unsigned int i;
2590 bitmap pts;
2592 changed_count = 0;
2593 changed = sbitmap_alloc (size);
2594 sbitmap_zero (changed);
2596 /* Mark all initial non-collapsed nodes as changed. */
2597 for (i = 0; i < size; i++)
2599 varinfo_t ivi = get_varinfo (i);
2600 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2601 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2602 || VEC_length (constraint_t, graph->complex[i]) > 0))
2604 SET_BIT (changed, i);
2605 changed_count++;
2609 /* Allocate a bitmap to be used to store the changed bits. */
2610 pts = BITMAP_ALLOC (&pta_obstack);
2612 while (changed_count > 0)
2614 unsigned int i;
2615 struct topo_info *ti = init_topo_info ();
2616 stats.iterations++;
2618 bitmap_obstack_initialize (&iteration_obstack);
2620 compute_topo_order (graph, ti);
2622 while (VEC_length (unsigned, ti->topo_order) != 0)
2625 i = VEC_pop (unsigned, ti->topo_order);
2627 /* If this variable is not a representative, skip it. */
2628 if (find (i) != i)
2629 continue;
2631 /* In certain indirect cycle cases, we may merge this
2632 variable to another. */
2633 if (eliminate_indirect_cycles (i) && find (i) != i)
2634 continue;
2636 /* If the node has changed, we need to process the
2637 complex constraints and outgoing edges again. */
2638 if (TEST_BIT (changed, i))
2640 unsigned int j;
2641 constraint_t c;
2642 bitmap solution;
2643 VEC(constraint_t,heap) *complex = graph->complex[i];
2644 bool solution_empty;
2646 RESET_BIT (changed, i);
2647 changed_count--;
2649 /* Compute the changed set of solution bits. */
2650 bitmap_and_compl (pts, get_varinfo (i)->solution,
2651 get_varinfo (i)->oldsolution);
2653 if (bitmap_empty_p (pts))
2654 continue;
2656 bitmap_ior_into (get_varinfo (i)->oldsolution, pts);
2658 solution = get_varinfo (i)->solution;
2659 solution_empty = bitmap_empty_p (solution);
2661 /* Process the complex constraints */
2662 for (j = 0; VEC_iterate (constraint_t, complex, j, c); j++)
2664 /* XXX: This is going to unsort the constraints in
2665 some cases, which will occasionally add duplicate
2666 constraints during unification. This does not
2667 affect correctness. */
2668 c->lhs.var = find (c->lhs.var);
2669 c->rhs.var = find (c->rhs.var);
2671 /* The only complex constraint that can change our
2672 solution to non-empty, given an empty solution,
2673 is a constraint where the lhs side is receiving
2674 some set from elsewhere. */
2675 if (!solution_empty || c->lhs.type != DEREF)
2676 do_complex_constraint (graph, c, pts);
2679 solution_empty = bitmap_empty_p (solution);
2681 if (!solution_empty)
2683 bitmap_iterator bi;
2684 unsigned eff_escaped_id = find (escaped_id);
2686 /* Propagate solution to all successors. */
2687 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2688 0, j, bi)
2690 bitmap tmp;
2691 bool flag;
2693 unsigned int to = find (j);
2694 tmp = get_varinfo (to)->solution;
2695 flag = false;
2697 /* Don't try to propagate to ourselves. */
2698 if (to == i)
2699 continue;
2701 /* If we propagate from ESCAPED use ESCAPED as
2702 placeholder. */
2703 if (i == eff_escaped_id)
2704 flag = bitmap_set_bit (tmp, escaped_id);
2705 else
2706 flag = set_union_with_increment (tmp, pts, 0);
2708 if (flag)
2710 get_varinfo (to)->solution = tmp;
2711 if (!TEST_BIT (changed, to))
2713 SET_BIT (changed, to);
2714 changed_count++;
2721 free_topo_info (ti);
2722 bitmap_obstack_release (&iteration_obstack);
2725 BITMAP_FREE (pts);
2726 sbitmap_free (changed);
2727 bitmap_obstack_release (&oldpta_obstack);
2730 /* Map from trees to variable infos. */
2731 static struct pointer_map_t *vi_for_tree;
2734 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2736 static void
2737 insert_vi_for_tree (tree t, varinfo_t vi)
2739 void **slot = pointer_map_insert (vi_for_tree, t);
2740 gcc_assert (vi);
2741 gcc_assert (*slot == NULL);
2742 *slot = vi;
2745 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2746 exist in the map, return NULL, otherwise, return the varinfo we found. */
2748 static varinfo_t
2749 lookup_vi_for_tree (tree t)
2751 void **slot = pointer_map_contains (vi_for_tree, t);
2752 if (slot == NULL)
2753 return NULL;
2755 return (varinfo_t) *slot;
2758 /* Return a printable name for DECL */
2760 static const char *
2761 alias_get_name (tree decl)
2763 const char *res = get_name (decl);
2764 char *temp;
2765 int num_printed = 0;
2767 if (res != NULL)
2768 return res;
2770 res = "NULL";
2771 if (!dump_file)
2772 return res;
2774 if (TREE_CODE (decl) == SSA_NAME)
2776 num_printed = asprintf (&temp, "%s_%u",
2777 alias_get_name (SSA_NAME_VAR (decl)),
2778 SSA_NAME_VERSION (decl));
2780 else if (DECL_P (decl))
2782 num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
2784 if (num_printed > 0)
2786 res = ggc_strdup (temp);
2787 free (temp);
2789 return res;
2792 /* Find the variable id for tree T in the map.
2793 If T doesn't exist in the map, create an entry for it and return it. */
2795 static varinfo_t
2796 get_vi_for_tree (tree t)
2798 void **slot = pointer_map_contains (vi_for_tree, t);
2799 if (slot == NULL)
2800 return get_varinfo (create_variable_info_for (t, alias_get_name (t)));
2802 return (varinfo_t) *slot;
2805 /* Get a scalar constraint expression for a new temporary variable. */
2807 static struct constraint_expr
2808 new_scalar_tmp_constraint_exp (const char *name)
2810 struct constraint_expr tmp;
2811 varinfo_t vi;
2813 vi = new_var_info (NULL_TREE, name);
2814 vi->offset = 0;
2815 vi->size = -1;
2816 vi->fullsize = -1;
2817 vi->is_full_var = 1;
2819 tmp.var = vi->id;
2820 tmp.type = SCALAR;
2821 tmp.offset = 0;
2823 return tmp;
2826 /* Get a constraint expression vector from an SSA_VAR_P node.
2827 If address_p is true, the result will be taken its address of. */
2829 static void
2830 get_constraint_for_ssa_var (tree t, VEC(ce_s, heap) **results, bool address_p)
2832 struct constraint_expr cexpr;
2833 varinfo_t vi;
2835 /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
2836 gcc_assert (SSA_VAR_P (t) || DECL_P (t));
2838 /* For parameters, get at the points-to set for the actual parm
2839 decl. */
2840 if (TREE_CODE (t) == SSA_NAME
2841 && TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2842 && SSA_NAME_IS_DEFAULT_DEF (t))
2844 get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
2845 return;
2848 vi = get_vi_for_tree (t);
2849 cexpr.var = vi->id;
2850 cexpr.type = SCALAR;
2851 cexpr.offset = 0;
2852 /* If we determine the result is "anything", and we know this is readonly,
2853 say it points to readonly memory instead. */
2854 if (cexpr.var == anything_id && TREE_READONLY (t))
2856 gcc_unreachable ();
2857 cexpr.type = ADDRESSOF;
2858 cexpr.var = readonly_id;
2861 /* If we are not taking the address of the constraint expr, add all
2862 sub-fiels of the variable as well. */
2863 if (!address_p
2864 && !vi->is_full_var)
2866 for (; vi; vi = vi->next)
2868 cexpr.var = vi->id;
2869 VEC_safe_push (ce_s, heap, *results, &cexpr);
2871 return;
2874 VEC_safe_push (ce_s, heap, *results, &cexpr);
2877 /* Process constraint T, performing various simplifications and then
2878 adding it to our list of overall constraints. */
2880 static void
2881 process_constraint (constraint_t t)
2883 struct constraint_expr rhs = t->rhs;
2884 struct constraint_expr lhs = t->lhs;
2886 gcc_assert (rhs.var < VEC_length (varinfo_t, varmap));
2887 gcc_assert (lhs.var < VEC_length (varinfo_t, varmap));
2889 /* If we didn't get any useful constraint from the lhs we get
2890 &ANYTHING as fallback from get_constraint_for. Deal with
2891 it here by turning it into *ANYTHING. */
2892 if (lhs.type == ADDRESSOF
2893 && lhs.var == anything_id)
2894 lhs.type = DEREF;
2896 /* ADDRESSOF on the lhs is invalid. */
2897 gcc_assert (lhs.type != ADDRESSOF);
2899 /* We shouldn't add constraints from things that cannot have pointers.
2900 It's not completely trivial to avoid in the callers, so do it here. */
2901 if (rhs.type != ADDRESSOF
2902 && !get_varinfo (rhs.var)->may_have_pointers)
2903 return;
2905 /* Likewise adding to the solution of a non-pointer var isn't useful. */
2906 if (!get_varinfo (lhs.var)->may_have_pointers)
2907 return;
2909 /* This can happen in our IR with things like n->a = *p */
2910 if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
2912 /* Split into tmp = *rhs, *lhs = tmp */
2913 struct constraint_expr tmplhs;
2914 tmplhs = new_scalar_tmp_constraint_exp ("doubledereftmp");
2915 process_constraint (new_constraint (tmplhs, rhs));
2916 process_constraint (new_constraint (lhs, tmplhs));
2918 else if (rhs.type == ADDRESSOF && lhs.type == DEREF)
2920 /* Split into tmp = &rhs, *lhs = tmp */
2921 struct constraint_expr tmplhs;
2922 tmplhs = new_scalar_tmp_constraint_exp ("derefaddrtmp");
2923 process_constraint (new_constraint (tmplhs, rhs));
2924 process_constraint (new_constraint (lhs, tmplhs));
2926 else
2928 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
2929 VEC_safe_push (constraint_t, heap, constraints, t);
2933 /* Return true if T is a type that could contain pointers. */
2935 static bool
2936 type_could_have_pointers (tree type)
2938 if (POINTER_TYPE_P (type))
2939 return true;
2941 if (TREE_CODE (type) == ARRAY_TYPE)
2942 return type_could_have_pointers (TREE_TYPE (type));
2944 return AGGREGATE_TYPE_P (type);
2947 /* Return true if T is a variable of a type that could contain
2948 pointers. */
2950 static bool
2951 could_have_pointers (tree t)
2953 return type_could_have_pointers (TREE_TYPE (t));
2956 /* Return the position, in bits, of FIELD_DECL from the beginning of its
2957 structure. */
2959 static HOST_WIDE_INT
2960 bitpos_of_field (const tree fdecl)
2963 if (!host_integerp (DECL_FIELD_OFFSET (fdecl), 0)
2964 || !host_integerp (DECL_FIELD_BIT_OFFSET (fdecl), 0))
2965 return -1;
2967 return (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (fdecl)) * 8
2968 + TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (fdecl)));
2972 /* Get constraint expressions for offsetting PTR by OFFSET. Stores the
2973 resulting constraint expressions in *RESULTS. */
2975 static void
2976 get_constraint_for_ptr_offset (tree ptr, tree offset,
2977 VEC (ce_s, heap) **results)
2979 struct constraint_expr c;
2980 unsigned int j, n;
2981 HOST_WIDE_INT rhsunitoffset, rhsoffset;
2983 /* If we do not do field-sensitive PTA adding offsets to pointers
2984 does not change the points-to solution. */
2985 if (!use_field_sensitive)
2987 get_constraint_for (ptr, results);
2988 return;
2991 /* If the offset is not a non-negative integer constant that fits
2992 in a HOST_WIDE_INT, we have to fall back to a conservative
2993 solution which includes all sub-fields of all pointed-to
2994 variables of ptr. */
2995 if (offset == NULL_TREE
2996 || !host_integerp (offset, 0))
2997 rhsoffset = UNKNOWN_OFFSET;
2998 else
3000 /* Make sure the bit-offset also fits. */
3001 rhsunitoffset = TREE_INT_CST_LOW (offset);
3002 rhsoffset = rhsunitoffset * BITS_PER_UNIT;
3003 if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
3004 rhsoffset = UNKNOWN_OFFSET;
3007 get_constraint_for (ptr, results);
3008 if (rhsoffset == 0)
3009 return;
3011 /* As we are eventually appending to the solution do not use
3012 VEC_iterate here. */
3013 n = VEC_length (ce_s, *results);
3014 for (j = 0; j < n; j++)
3016 varinfo_t curr;
3017 c = *VEC_index (ce_s, *results, j);
3018 curr = get_varinfo (c.var);
3020 if (c.type == ADDRESSOF
3021 /* If this varinfo represents a full variable just use it. */
3022 && curr->is_full_var)
3023 c.offset = 0;
3024 else if (c.type == ADDRESSOF
3025 /* If we do not know the offset add all subfields. */
3026 && rhsoffset == UNKNOWN_OFFSET)
3028 varinfo_t temp = lookup_vi_for_tree (curr->decl);
3031 struct constraint_expr c2;
3032 c2.var = temp->id;
3033 c2.type = ADDRESSOF;
3034 c2.offset = 0;
3035 if (c2.var != c.var)
3036 VEC_safe_push (ce_s, heap, *results, &c2);
3037 temp = temp->next;
3039 while (temp);
3041 else if (c.type == ADDRESSOF)
3043 varinfo_t temp;
3044 unsigned HOST_WIDE_INT offset = curr->offset + rhsoffset;
3046 /* Search the sub-field which overlaps with the
3047 pointed-to offset. If the result is outside of the variable
3048 we have to provide a conservative result, as the variable is
3049 still reachable from the resulting pointer (even though it
3050 technically cannot point to anything). The last and first
3051 sub-fields are such conservative results.
3052 ??? If we always had a sub-field for &object + 1 then
3053 we could represent this in a more precise way. */
3054 if (rhsoffset < 0
3055 && curr->offset < offset)
3056 offset = 0;
3057 temp = first_or_preceding_vi_for_offset (curr, offset);
3059 /* If the found variable is not exactly at the pointed to
3060 result, we have to include the next variable in the
3061 solution as well. Otherwise two increments by offset / 2
3062 do not result in the same or a conservative superset
3063 solution. */
3064 if (temp->offset != offset
3065 && temp->next != NULL)
3067 struct constraint_expr c2;
3068 c2.var = temp->next->id;
3069 c2.type = ADDRESSOF;
3070 c2.offset = 0;
3071 VEC_safe_push (ce_s, heap, *results, &c2);
3073 c.var = temp->id;
3074 c.offset = 0;
3076 else
3077 c.offset = rhsoffset;
3079 VEC_replace (ce_s, *results, j, &c);
3084 /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
3085 If address_p is true the result will be taken its address of. */
3087 static void
3088 get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results,
3089 bool address_p)
3091 tree orig_t = t;
3092 HOST_WIDE_INT bitsize = -1;
3093 HOST_WIDE_INT bitmaxsize = -1;
3094 HOST_WIDE_INT bitpos;
3095 tree forzero;
3096 struct constraint_expr *result;
3098 /* Some people like to do cute things like take the address of
3099 &0->a.b */
3100 forzero = t;
3101 while (handled_component_p (forzero)
3102 || INDIRECT_REF_P (forzero))
3103 forzero = TREE_OPERAND (forzero, 0);
3105 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
3107 struct constraint_expr temp;
3109 temp.offset = 0;
3110 temp.var = integer_id;
3111 temp.type = SCALAR;
3112 VEC_safe_push (ce_s, heap, *results, &temp);
3113 return;
3116 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
3118 /* Pretend to take the address of the base, we'll take care of
3119 adding the required subset of sub-fields below. */
3120 get_constraint_for_1 (t, results, true);
3121 gcc_assert (VEC_length (ce_s, *results) == 1);
3122 result = VEC_last (ce_s, *results);
3124 if (result->type == SCALAR
3125 && get_varinfo (result->var)->is_full_var)
3126 /* For single-field vars do not bother about the offset. */
3127 result->offset = 0;
3128 else if (result->type == SCALAR)
3130 /* In languages like C, you can access one past the end of an
3131 array. You aren't allowed to dereference it, so we can
3132 ignore this constraint. When we handle pointer subtraction,
3133 we may have to do something cute here. */
3135 if ((unsigned HOST_WIDE_INT)bitpos < get_varinfo (result->var)->fullsize
3136 && bitmaxsize != 0)
3138 /* It's also not true that the constraint will actually start at the
3139 right offset, it may start in some padding. We only care about
3140 setting the constraint to the first actual field it touches, so
3141 walk to find it. */
3142 struct constraint_expr cexpr = *result;
3143 varinfo_t curr;
3144 VEC_pop (ce_s, *results);
3145 cexpr.offset = 0;
3146 for (curr = get_varinfo (cexpr.var); curr; curr = curr->next)
3148 if (ranges_overlap_p (curr->offset, curr->size,
3149 bitpos, bitmaxsize))
3151 cexpr.var = curr->id;
3152 VEC_safe_push (ce_s, heap, *results, &cexpr);
3153 if (address_p)
3154 break;
3157 /* If we are going to take the address of this field then
3158 to be able to compute reachability correctly add at least
3159 the last field of the variable. */
3160 if (address_p
3161 && VEC_length (ce_s, *results) == 0)
3163 curr = get_varinfo (cexpr.var);
3164 while (curr->next != NULL)
3165 curr = curr->next;
3166 cexpr.var = curr->id;
3167 VEC_safe_push (ce_s, heap, *results, &cexpr);
3169 else
3170 /* Assert that we found *some* field there. The user couldn't be
3171 accessing *only* padding. */
3172 /* Still the user could access one past the end of an array
3173 embedded in a struct resulting in accessing *only* padding. */
3174 gcc_assert (VEC_length (ce_s, *results) >= 1
3175 || ref_contains_array_ref (orig_t));
3177 else if (bitmaxsize == 0)
3179 if (dump_file && (dump_flags & TDF_DETAILS))
3180 fprintf (dump_file, "Access to zero-sized part of variable,"
3181 "ignoring\n");
3183 else
3184 if (dump_file && (dump_flags & TDF_DETAILS))
3185 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
3187 else if (result->type == DEREF)
3189 /* If we do not know exactly where the access goes say so. Note
3190 that only for non-structure accesses we know that we access
3191 at most one subfiled of any variable. */
3192 if (bitpos == -1
3193 || bitsize != bitmaxsize
3194 || AGGREGATE_TYPE_P (TREE_TYPE (orig_t)))
3195 result->offset = UNKNOWN_OFFSET;
3196 else
3197 result->offset = bitpos;
3199 else if (result->type == ADDRESSOF)
3201 /* We can end up here for component references on a
3202 VIEW_CONVERT_EXPR <>(&foobar). */
3203 result->type = SCALAR;
3204 result->var = anything_id;
3205 result->offset = 0;
3207 else
3208 gcc_unreachable ();
3212 /* Dereference the constraint expression CONS, and return the result.
3213 DEREF (ADDRESSOF) = SCALAR
3214 DEREF (SCALAR) = DEREF
3215 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
3216 This is needed so that we can handle dereferencing DEREF constraints. */
3218 static void
3219 do_deref (VEC (ce_s, heap) **constraints)
3221 struct constraint_expr *c;
3222 unsigned int i = 0;
3224 for (i = 0; VEC_iterate (ce_s, *constraints, i, c); i++)
3226 if (c->type == SCALAR)
3227 c->type = DEREF;
3228 else if (c->type == ADDRESSOF)
3229 c->type = SCALAR;
3230 else if (c->type == DEREF)
3232 struct constraint_expr tmplhs;
3233 tmplhs = new_scalar_tmp_constraint_exp ("dereftmp");
3234 process_constraint (new_constraint (tmplhs, *c));
3235 c->var = tmplhs.var;
3237 else
3238 gcc_unreachable ();
3242 static void get_constraint_for_1 (tree, VEC (ce_s, heap) **, bool);
3244 /* Given a tree T, return the constraint expression for taking the
3245 address of it. */
3247 static void
3248 get_constraint_for_address_of (tree t, VEC (ce_s, heap) **results)
3250 struct constraint_expr *c;
3251 unsigned int i;
3253 get_constraint_for_1 (t, results, true);
3255 for (i = 0; VEC_iterate (ce_s, *results, i, c); i++)
3257 if (c->type == DEREF)
3258 c->type = SCALAR;
3259 else
3260 c->type = ADDRESSOF;
3264 /* Given a tree T, return the constraint expression for it. */
3266 static void
3267 get_constraint_for_1 (tree t, VEC (ce_s, heap) **results, bool address_p)
3269 struct constraint_expr temp;
3271 /* x = integer is all glommed to a single variable, which doesn't
3272 point to anything by itself. That is, of course, unless it is an
3273 integer constant being treated as a pointer, in which case, we
3274 will return that this is really the addressof anything. This
3275 happens below, since it will fall into the default case. The only
3276 case we know something about an integer treated like a pointer is
3277 when it is the NULL pointer, and then we just say it points to
3278 NULL.
3280 Do not do that if -fno-delete-null-pointer-checks though, because
3281 in that case *NULL does not fail, so it _should_ alias *anything.
3282 It is not worth adding a new option or renaming the existing one,
3283 since this case is relatively obscure. */
3284 if (flag_delete_null_pointer_checks
3285 && ((TREE_CODE (t) == INTEGER_CST
3286 && integer_zerop (t))
3287 /* The only valid CONSTRUCTORs in gimple with pointer typed
3288 elements are zero-initializer. */
3289 || TREE_CODE (t) == CONSTRUCTOR))
3291 temp.var = nothing_id;
3292 temp.type = ADDRESSOF;
3293 temp.offset = 0;
3294 VEC_safe_push (ce_s, heap, *results, &temp);
3295 return;
3298 /* String constants are read-only. */
3299 if (TREE_CODE (t) == STRING_CST)
3301 temp.var = readonly_id;
3302 temp.type = SCALAR;
3303 temp.offset = 0;
3304 VEC_safe_push (ce_s, heap, *results, &temp);
3305 return;
3308 switch (TREE_CODE_CLASS (TREE_CODE (t)))
3310 case tcc_expression:
3312 switch (TREE_CODE (t))
3314 case ADDR_EXPR:
3315 get_constraint_for_address_of (TREE_OPERAND (t, 0), results);
3316 return;
3317 default:;
3319 break;
3321 case tcc_reference:
3323 switch (TREE_CODE (t))
3325 case INDIRECT_REF:
3327 get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p);
3328 do_deref (results);
3329 return;
3331 case ARRAY_REF:
3332 case ARRAY_RANGE_REF:
3333 case COMPONENT_REF:
3334 get_constraint_for_component_ref (t, results, address_p);
3335 return;
3336 case VIEW_CONVERT_EXPR:
3337 get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p);
3338 return;
3339 /* We are missing handling for TARGET_MEM_REF here. */
3340 default:;
3342 break;
3344 case tcc_exceptional:
3346 switch (TREE_CODE (t))
3348 case SSA_NAME:
3350 get_constraint_for_ssa_var (t, results, address_p);
3351 return;
3353 default:;
3355 break;
3357 case tcc_declaration:
3359 get_constraint_for_ssa_var (t, results, address_p);
3360 return;
3362 default:;
3365 /* The default fallback is a constraint from anything. */
3366 temp.type = ADDRESSOF;
3367 temp.var = anything_id;
3368 temp.offset = 0;
3369 VEC_safe_push (ce_s, heap, *results, &temp);
3372 /* Given a gimple tree T, return the constraint expression vector for it. */
3374 static void
3375 get_constraint_for (tree t, VEC (ce_s, heap) **results)
3377 gcc_assert (VEC_length (ce_s, *results) == 0);
3379 get_constraint_for_1 (t, results, false);
3383 /* Efficiently generates constraints from all entries in *RHSC to all
3384 entries in *LHSC. */
3386 static void
3387 process_all_all_constraints (VEC (ce_s, heap) *lhsc, VEC (ce_s, heap) *rhsc)
3389 struct constraint_expr *lhsp, *rhsp;
3390 unsigned i, j;
3392 if (VEC_length (ce_s, lhsc) <= 1
3393 || VEC_length (ce_s, rhsc) <= 1)
3395 for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); ++i)
3396 for (j = 0; VEC_iterate (ce_s, rhsc, j, rhsp); ++j)
3397 process_constraint (new_constraint (*lhsp, *rhsp));
3399 else
3401 struct constraint_expr tmp;
3402 tmp = new_scalar_tmp_constraint_exp ("allalltmp");
3403 for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); ++i)
3404 process_constraint (new_constraint (tmp, *rhsp));
3405 for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); ++i)
3406 process_constraint (new_constraint (*lhsp, tmp));
3410 /* Handle aggregate copies by expanding into copies of the respective
3411 fields of the structures. */
3413 static void
3414 do_structure_copy (tree lhsop, tree rhsop)
3416 struct constraint_expr *lhsp, *rhsp;
3417 VEC (ce_s, heap) *lhsc = NULL, *rhsc = NULL;
3418 unsigned j;
3420 get_constraint_for (lhsop, &lhsc);
3421 get_constraint_for (rhsop, &rhsc);
3422 lhsp = VEC_index (ce_s, lhsc, 0);
3423 rhsp = VEC_index (ce_s, rhsc, 0);
3424 if (lhsp->type == DEREF
3425 || (lhsp->type == ADDRESSOF && lhsp->var == anything_id)
3426 || rhsp->type == DEREF)
3428 if (lhsp->type == DEREF)
3430 gcc_assert (VEC_length (ce_s, lhsc) == 1);
3431 lhsp->offset = UNKNOWN_OFFSET;
3433 if (rhsp->type == DEREF)
3435 gcc_assert (VEC_length (ce_s, rhsc) == 1);
3436 rhsp->offset = UNKNOWN_OFFSET;
3438 process_all_all_constraints (lhsc, rhsc);
3440 else if (lhsp->type == SCALAR
3441 && (rhsp->type == SCALAR
3442 || rhsp->type == ADDRESSOF))
3444 HOST_WIDE_INT lhssize, lhsmaxsize, lhsoffset;
3445 HOST_WIDE_INT rhssize, rhsmaxsize, rhsoffset;
3446 unsigned k = 0;
3447 get_ref_base_and_extent (lhsop, &lhsoffset, &lhssize, &lhsmaxsize);
3448 get_ref_base_and_extent (rhsop, &rhsoffset, &rhssize, &rhsmaxsize);
3449 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp);)
3451 varinfo_t lhsv, rhsv;
3452 rhsp = VEC_index (ce_s, rhsc, k);
3453 lhsv = get_varinfo (lhsp->var);
3454 rhsv = get_varinfo (rhsp->var);
3455 if (lhsv->may_have_pointers
3456 && ranges_overlap_p (lhsv->offset + rhsoffset, lhsv->size,
3457 rhsv->offset + lhsoffset, rhsv->size))
3458 process_constraint (new_constraint (*lhsp, *rhsp));
3459 if (lhsv->offset + rhsoffset + lhsv->size
3460 > rhsv->offset + lhsoffset + rhsv->size)
3462 ++k;
3463 if (k >= VEC_length (ce_s, rhsc))
3464 break;
3466 else
3467 ++j;
3470 else
3471 gcc_unreachable ();
3473 VEC_free (ce_s, heap, lhsc);
3474 VEC_free (ce_s, heap, rhsc);
3477 /* Create a constraint ID = OP. */
3479 static void
3480 make_constraint_to (unsigned id, tree op)
3482 VEC(ce_s, heap) *rhsc = NULL;
3483 struct constraint_expr *c;
3484 struct constraint_expr includes;
3485 unsigned int j;
3487 includes.var = id;
3488 includes.offset = 0;
3489 includes.type = SCALAR;
3491 get_constraint_for (op, &rhsc);
3492 for (j = 0; VEC_iterate (ce_s, rhsc, j, c); j++)
3493 process_constraint (new_constraint (includes, *c));
3494 VEC_free (ce_s, heap, rhsc);
3497 /* Create a constraint ID = &FROM. */
3499 static void
3500 make_constraint_from (varinfo_t vi, int from)
3502 struct constraint_expr lhs, rhs;
3504 lhs.var = vi->id;
3505 lhs.offset = 0;
3506 lhs.type = SCALAR;
3508 rhs.var = from;
3509 rhs.offset = 0;
3510 rhs.type = ADDRESSOF;
3511 process_constraint (new_constraint (lhs, rhs));
3514 /* Create a constraint ID = FROM. */
3516 static void
3517 make_copy_constraint (varinfo_t vi, int from)
3519 struct constraint_expr lhs, rhs;
3521 lhs.var = vi->id;
3522 lhs.offset = 0;
3523 lhs.type = SCALAR;
3525 rhs.var = from;
3526 rhs.offset = 0;
3527 rhs.type = SCALAR;
3528 process_constraint (new_constraint (lhs, rhs));
3531 /* Make constraints necessary to make OP escape. */
3533 static void
3534 make_escape_constraint (tree op)
3536 make_constraint_to (escaped_id, op);
3539 /* Add constraints to that the solution of VI is transitively closed. */
3541 static void
3542 make_transitive_closure_constraints (varinfo_t vi)
3544 struct constraint_expr lhs, rhs;
3546 /* VAR = *VAR; */
3547 lhs.type = SCALAR;
3548 lhs.var = vi->id;
3549 lhs.offset = 0;
3550 rhs.type = DEREF;
3551 rhs.var = vi->id;
3552 rhs.offset = 0;
3553 process_constraint (new_constraint (lhs, rhs));
3555 /* VAR = VAR + UNKNOWN; */
3556 lhs.type = SCALAR;
3557 lhs.var = vi->id;
3558 lhs.offset = 0;
3559 rhs.type = SCALAR;
3560 rhs.var = vi->id;
3561 rhs.offset = UNKNOWN_OFFSET;
3562 process_constraint (new_constraint (lhs, rhs));
3565 /* Create a new artificial heap variable with NAME and make a
3566 constraint from it to LHS. Return the created variable. */
3568 static varinfo_t
3569 make_constraint_from_heapvar (varinfo_t lhs, const char *name)
3571 varinfo_t vi;
3572 tree heapvar = heapvar_lookup (lhs->decl, lhs->offset);
3574 if (heapvar == NULL_TREE)
3576 var_ann_t ann;
3577 heapvar = create_tmp_var_raw (ptr_type_node, name);
3578 DECL_EXTERNAL (heapvar) = 1;
3580 heapvar_insert (lhs->decl, lhs->offset, heapvar);
3582 ann = get_var_ann (heapvar);
3583 ann->is_heapvar = 1;
3586 /* For global vars we need to add a heapvar to the list of referenced
3587 vars of a different function than it was created for originally. */
3588 if (cfun && gimple_referenced_vars (cfun))
3589 add_referenced_var (heapvar);
3591 vi = new_var_info (heapvar, name);
3592 vi->is_artificial_var = true;
3593 vi->is_heap_var = true;
3594 vi->is_unknown_size_var = true;
3595 vi->offset = 0;
3596 vi->fullsize = ~0;
3597 vi->size = ~0;
3598 vi->is_full_var = true;
3599 insert_vi_for_tree (heapvar, vi);
3601 make_constraint_from (lhs, vi->id);
3603 return vi;
3606 /* Create a new artificial heap variable with NAME and make a
3607 constraint from it to LHS. Set flags according to a tag used
3608 for tracking restrict pointers. */
3610 static void
3611 make_constraint_from_restrict (varinfo_t lhs, const char *name)
3613 varinfo_t vi;
3614 vi = make_constraint_from_heapvar (lhs, name);
3615 vi->is_restrict_var = 1;
3616 vi->is_global_var = 0;
3617 vi->is_special_var = 1;
3618 vi->may_have_pointers = 0;
3621 /* In IPA mode there are varinfos for different aspects of reach
3622 function designator. One for the points-to set of the return
3623 value, one for the variables that are clobbered by the function,
3624 one for its uses and one for each parameter (including a single
3625 glob for remaining variadic arguments). */
3627 enum { fi_clobbers = 1, fi_uses = 2,
3628 fi_static_chain = 3, fi_result = 4, fi_parm_base = 5 };
3630 /* Get a constraint for the requested part of a function designator FI
3631 when operating in IPA mode. */
3633 static struct constraint_expr
3634 get_function_part_constraint (varinfo_t fi, unsigned part)
3636 struct constraint_expr c;
3638 gcc_assert (in_ipa_mode);
3640 if (fi->id == anything_id)
3642 /* ??? We probably should have a ANYFN special variable. */
3643 c.var = anything_id;
3644 c.offset = 0;
3645 c.type = SCALAR;
3647 else if (TREE_CODE (fi->decl) == FUNCTION_DECL)
3649 varinfo_t ai = first_vi_for_offset (fi, part);
3650 if (ai)
3651 c.var = ai->id;
3652 else
3653 c.var = anything_id;
3654 c.offset = 0;
3655 c.type = SCALAR;
3657 else
3659 c.var = fi->id;
3660 c.offset = part;
3661 c.type = DEREF;
3664 return c;
3667 /* For non-IPA mode, generate constraints necessary for a call on the
3668 RHS. */
3670 static void
3671 handle_rhs_call (gimple stmt, VEC(ce_s, heap) **results)
3673 struct constraint_expr rhsc;
3674 unsigned i;
3676 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3678 tree arg = gimple_call_arg (stmt, i);
3680 /* Find those pointers being passed, and make sure they end up
3681 pointing to anything. */
3682 if (could_have_pointers (arg))
3683 make_escape_constraint (arg);
3686 /* The static chain escapes as well. */
3687 if (gimple_call_chain (stmt))
3688 make_escape_constraint (gimple_call_chain (stmt));
3690 /* And if we applied NRV the address of the return slot escapes as well. */
3691 if (gimple_call_return_slot_opt_p (stmt)
3692 && gimple_call_lhs (stmt) != NULL_TREE
3693 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
3695 VEC(ce_s, heap) *tmpc = NULL;
3696 struct constraint_expr lhsc, *c;
3697 get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
3698 lhsc.var = escaped_id;
3699 lhsc.offset = 0;
3700 lhsc.type = SCALAR;
3701 for (i = 0; VEC_iterate (ce_s, tmpc, i, c); ++i)
3702 process_constraint (new_constraint (lhsc, *c));
3703 VEC_free(ce_s, heap, tmpc);
3706 /* Regular functions return nonlocal memory. */
3707 rhsc.var = nonlocal_id;
3708 rhsc.offset = 0;
3709 rhsc.type = SCALAR;
3710 VEC_safe_push (ce_s, heap, *results, &rhsc);
3713 /* For non-IPA mode, generate constraints necessary for a call
3714 that returns a pointer and assigns it to LHS. This simply makes
3715 the LHS point to global and escaped variables. */
3717 static void
3718 handle_lhs_call (tree lhs, int flags, VEC(ce_s, heap) *rhsc, tree fndecl)
3720 VEC(ce_s, heap) *lhsc = NULL;
3722 get_constraint_for (lhs, &lhsc);
3724 if (flags & ECF_MALLOC)
3726 varinfo_t vi;
3727 vi = make_constraint_from_heapvar (get_vi_for_tree (lhs), "HEAP");
3728 /* We delay marking allocated storage global until we know if
3729 it escapes. */
3730 DECL_EXTERNAL (vi->decl) = 0;
3731 vi->is_global_var = 0;
3732 /* If this is not a real malloc call assume the memory was
3733 initialized and thus may point to global memory. All
3734 builtin functions with the malloc attribute behave in a sane way. */
3735 if (!fndecl
3736 || DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL)
3737 make_constraint_from (vi, nonlocal_id);
3739 else if (VEC_length (ce_s, rhsc) > 0)
3741 /* If the store is to a global decl make sure to
3742 add proper escape constraints. */
3743 lhs = get_base_address (lhs);
3744 if (lhs
3745 && DECL_P (lhs)
3746 && is_global_var (lhs))
3748 struct constraint_expr tmpc;
3749 tmpc.var = escaped_id;
3750 tmpc.offset = 0;
3751 tmpc.type = SCALAR;
3752 VEC_safe_push (ce_s, heap, lhsc, &tmpc);
3754 process_all_all_constraints (lhsc, rhsc);
3756 VEC_free (ce_s, heap, lhsc);
3759 /* For non-IPA mode, generate constraints necessary for a call of a
3760 const function that returns a pointer in the statement STMT. */
3762 static void
3763 handle_const_call (gimple stmt, VEC(ce_s, heap) **results)
3765 struct constraint_expr rhsc;
3766 unsigned int k;
3768 /* Treat nested const functions the same as pure functions as far
3769 as the static chain is concerned. */
3770 if (gimple_call_chain (stmt))
3772 varinfo_t uses = get_call_use_vi (stmt);
3773 make_transitive_closure_constraints (uses);
3774 make_constraint_to (uses->id, gimple_call_chain (stmt));
3775 rhsc.var = uses->id;
3776 rhsc.offset = 0;
3777 rhsc.type = SCALAR;
3778 VEC_safe_push (ce_s, heap, *results, &rhsc);
3781 /* May return arguments. */
3782 for (k = 0; k < gimple_call_num_args (stmt); ++k)
3784 tree arg = gimple_call_arg (stmt, k);
3786 if (could_have_pointers (arg))
3788 VEC(ce_s, heap) *argc = NULL;
3789 unsigned i;
3790 struct constraint_expr *argp;
3791 get_constraint_for (arg, &argc);
3792 for (i = 0; VEC_iterate (ce_s, argc, i, argp); ++i)
3793 VEC_safe_push (ce_s, heap, *results, argp);
3794 VEC_free(ce_s, heap, argc);
3798 /* May return addresses of globals. */
3799 rhsc.var = nonlocal_id;
3800 rhsc.offset = 0;
3801 rhsc.type = ADDRESSOF;
3802 VEC_safe_push (ce_s, heap, *results, &rhsc);
3805 /* For non-IPA mode, generate constraints necessary for a call to a
3806 pure function in statement STMT. */
3808 static void
3809 handle_pure_call (gimple stmt, VEC(ce_s, heap) **results)
3811 struct constraint_expr rhsc;
3812 unsigned i;
3813 varinfo_t uses = NULL;
3815 /* Memory reached from pointer arguments is call-used. */
3816 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3818 tree arg = gimple_call_arg (stmt, i);
3820 if (could_have_pointers (arg))
3822 if (!uses)
3824 uses = get_call_use_vi (stmt);
3825 make_transitive_closure_constraints (uses);
3827 make_constraint_to (uses->id, arg);
3831 /* The static chain is used as well. */
3832 if (gimple_call_chain (stmt))
3834 if (!uses)
3836 uses = get_call_use_vi (stmt);
3837 make_transitive_closure_constraints (uses);
3839 make_constraint_to (uses->id, gimple_call_chain (stmt));
3842 /* Pure functions may return call-used and nonlocal memory. */
3843 if (uses)
3845 rhsc.var = uses->id;
3846 rhsc.offset = 0;
3847 rhsc.type = SCALAR;
3848 VEC_safe_push (ce_s, heap, *results, &rhsc);
3850 rhsc.var = nonlocal_id;
3851 rhsc.offset = 0;
3852 rhsc.type = SCALAR;
3853 VEC_safe_push (ce_s, heap, *results, &rhsc);
3857 /* Return the varinfo for the callee of CALL. */
3859 static varinfo_t
3860 get_fi_for_callee (gimple call)
3862 tree decl;
3864 /* If we can directly resolve the function being called, do so.
3865 Otherwise, it must be some sort of indirect expression that
3866 we should still be able to handle. */
3867 decl = gimple_call_fndecl (call);
3868 if (decl)
3869 return get_vi_for_tree (decl);
3871 decl = gimple_call_fn (call);
3872 /* The function can be either an SSA name pointer or,
3873 worse, an OBJ_TYPE_REF. In this case we have no
3874 clue and should be getting ANYFN (well, ANYTHING for now). */
3875 if (TREE_CODE (decl) == SSA_NAME)
3877 if (TREE_CODE (decl) == SSA_NAME
3878 && TREE_CODE (SSA_NAME_VAR (decl)) == PARM_DECL
3879 && SSA_NAME_IS_DEFAULT_DEF (decl))
3880 decl = SSA_NAME_VAR (decl);
3881 return get_vi_for_tree (decl);
3883 else if (TREE_CODE (decl) == INTEGER_CST
3884 || TREE_CODE (decl) == OBJ_TYPE_REF)
3885 return get_varinfo (anything_id);
3886 else
3887 gcc_unreachable ();
3890 /* Walk statement T setting up aliasing constraints according to the
3891 references found in T. This function is the main part of the
3892 constraint builder. AI points to auxiliary alias information used
3893 when building alias sets and computing alias grouping heuristics. */
3895 static void
3896 find_func_aliases (gimple origt)
3898 gimple t = origt;
3899 VEC(ce_s, heap) *lhsc = NULL;
3900 VEC(ce_s, heap) *rhsc = NULL;
3901 struct constraint_expr *c;
3902 varinfo_t fi;
3904 /* Now build constraints expressions. */
3905 if (gimple_code (t) == GIMPLE_PHI)
3907 gcc_assert (!AGGREGATE_TYPE_P (TREE_TYPE (gimple_phi_result (t))));
3909 /* Only care about pointers and structures containing
3910 pointers. */
3911 if (could_have_pointers (gimple_phi_result (t)))
3913 size_t i;
3914 unsigned int j;
3916 /* For a phi node, assign all the arguments to
3917 the result. */
3918 get_constraint_for (gimple_phi_result (t), &lhsc);
3919 for (i = 0; i < gimple_phi_num_args (t); i++)
3921 tree strippedrhs = PHI_ARG_DEF (t, i);
3923 STRIP_NOPS (strippedrhs);
3924 get_constraint_for (gimple_phi_arg_def (t, i), &rhsc);
3926 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3928 struct constraint_expr *c2;
3929 while (VEC_length (ce_s, rhsc) > 0)
3931 c2 = VEC_last (ce_s, rhsc);
3932 process_constraint (new_constraint (*c, *c2));
3933 VEC_pop (ce_s, rhsc);
3939 /* In IPA mode, we need to generate constraints to pass call
3940 arguments through their calls. There are two cases,
3941 either a GIMPLE_CALL returning a value, or just a plain
3942 GIMPLE_CALL when we are not.
3944 In non-ipa mode, we need to generate constraints for each
3945 pointer passed by address. */
3946 else if (is_gimple_call (t))
3948 tree fndecl = gimple_call_fndecl (t);
3949 if (fndecl != NULL_TREE
3950 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3951 /* ??? All builtins that are handled here need to be handled
3952 in the alias-oracle query functions explicitly! */
3953 switch (DECL_FUNCTION_CODE (fndecl))
3955 /* All the following functions return a pointer to the same object
3956 as their first argument points to. The functions do not add
3957 to the ESCAPED solution. The functions make the first argument
3958 pointed to memory point to what the second argument pointed to
3959 memory points to. */
3960 case BUILT_IN_STRCPY:
3961 case BUILT_IN_STRNCPY:
3962 case BUILT_IN_BCOPY:
3963 case BUILT_IN_MEMCPY:
3964 case BUILT_IN_MEMMOVE:
3965 case BUILT_IN_MEMPCPY:
3966 case BUILT_IN_STPCPY:
3967 case BUILT_IN_STPNCPY:
3968 case BUILT_IN_STRCAT:
3969 case BUILT_IN_STRNCAT:
3971 tree res = gimple_call_lhs (t);
3972 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
3973 == BUILT_IN_BCOPY ? 1 : 0));
3974 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
3975 == BUILT_IN_BCOPY ? 0 : 1));
3976 if (res != NULL_TREE)
3978 get_constraint_for (res, &lhsc);
3979 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY
3980 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY
3981 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY)
3982 get_constraint_for_ptr_offset (dest, NULL_TREE, &rhsc);
3983 else
3984 get_constraint_for (dest, &rhsc);
3985 process_all_all_constraints (lhsc, rhsc);
3986 VEC_free (ce_s, heap, lhsc);
3987 VEC_free (ce_s, heap, rhsc);
3989 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
3990 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
3991 do_deref (&lhsc);
3992 do_deref (&rhsc);
3993 process_all_all_constraints (lhsc, rhsc);
3994 VEC_free (ce_s, heap, lhsc);
3995 VEC_free (ce_s, heap, rhsc);
3996 return;
3998 case BUILT_IN_MEMSET:
4000 tree res = gimple_call_lhs (t);
4001 tree dest = gimple_call_arg (t, 0);
4002 unsigned i;
4003 ce_s *lhsp;
4004 struct constraint_expr ac;
4005 if (res != NULL_TREE)
4007 get_constraint_for (res, &lhsc);
4008 get_constraint_for (dest, &rhsc);
4009 process_all_all_constraints (lhsc, rhsc);
4010 VEC_free (ce_s, heap, lhsc);
4011 VEC_free (ce_s, heap, rhsc);
4013 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4014 do_deref (&lhsc);
4015 if (flag_delete_null_pointer_checks
4016 && integer_zerop (gimple_call_arg (t, 1)))
4018 ac.type = ADDRESSOF;
4019 ac.var = nothing_id;
4021 else
4023 ac.type = SCALAR;
4024 ac.var = integer_id;
4026 ac.offset = 0;
4027 for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); ++i)
4028 process_constraint (new_constraint (*lhsp, ac));
4029 VEC_free (ce_s, heap, lhsc);
4030 return;
4032 /* All the following functions do not return pointers, do not
4033 modify the points-to sets of memory reachable from their
4034 arguments and do not add to the ESCAPED solution. */
4035 case BUILT_IN_SINCOS:
4036 case BUILT_IN_SINCOSF:
4037 case BUILT_IN_SINCOSL:
4038 case BUILT_IN_FREXP:
4039 case BUILT_IN_FREXPF:
4040 case BUILT_IN_FREXPL:
4041 case BUILT_IN_GAMMA_R:
4042 case BUILT_IN_GAMMAF_R:
4043 case BUILT_IN_GAMMAL_R:
4044 case BUILT_IN_LGAMMA_R:
4045 case BUILT_IN_LGAMMAF_R:
4046 case BUILT_IN_LGAMMAL_R:
4047 case BUILT_IN_MODF:
4048 case BUILT_IN_MODFF:
4049 case BUILT_IN_MODFL:
4050 case BUILT_IN_REMQUO:
4051 case BUILT_IN_REMQUOF:
4052 case BUILT_IN_REMQUOL:
4053 case BUILT_IN_FREE:
4054 return;
4055 /* Trampolines are special - they set up passing the static
4056 frame. */
4057 case BUILT_IN_INIT_TRAMPOLINE:
4059 tree tramp = gimple_call_arg (t, 0);
4060 tree nfunc = gimple_call_arg (t, 1);
4061 tree frame = gimple_call_arg (t, 2);
4062 unsigned i;
4063 struct constraint_expr lhs, *rhsp;
4064 if (in_ipa_mode)
4066 varinfo_t nfi = NULL;
4067 gcc_assert (TREE_CODE (nfunc) == ADDR_EXPR);
4068 nfi = lookup_vi_for_tree (TREE_OPERAND (nfunc, 0));
4069 if (nfi)
4071 lhs = get_function_part_constraint (nfi, fi_static_chain);
4072 get_constraint_for (frame, &rhsc);
4073 for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); ++i)
4074 process_constraint (new_constraint (lhs, *rhsp));
4075 VEC_free (ce_s, heap, rhsc);
4077 /* Make the frame point to the function for
4078 the trampoline adjustment call. */
4079 get_constraint_for (tramp, &lhsc);
4080 do_deref (&lhsc);
4081 get_constraint_for (nfunc, &rhsc);
4082 process_all_all_constraints (lhsc, rhsc);
4083 VEC_free (ce_s, heap, rhsc);
4084 VEC_free (ce_s, heap, lhsc);
4086 return;
4089 /* Else fallthru to generic handling which will let
4090 the frame escape. */
4091 break;
4093 case BUILT_IN_ADJUST_TRAMPOLINE:
4095 tree tramp = gimple_call_arg (t, 0);
4096 tree res = gimple_call_lhs (t);
4097 if (in_ipa_mode && res)
4099 get_constraint_for (res, &lhsc);
4100 get_constraint_for (tramp, &rhsc);
4101 do_deref (&rhsc);
4102 process_all_all_constraints (lhsc, rhsc);
4103 VEC_free (ce_s, heap, rhsc);
4104 VEC_free (ce_s, heap, lhsc);
4106 return;
4108 /* Variadic argument handling needs to be handled in IPA
4109 mode as well. */
4110 case BUILT_IN_VA_START:
4112 if (in_ipa_mode)
4114 tree valist = gimple_call_arg (t, 0);
4115 struct constraint_expr rhs, *lhsp;
4116 unsigned i;
4117 /* The va_list gets access to pointers in variadic
4118 arguments. */
4119 fi = lookup_vi_for_tree (cfun->decl);
4120 gcc_assert (fi != NULL);
4121 get_constraint_for (valist, &lhsc);
4122 do_deref (&lhsc);
4123 rhs = get_function_part_constraint (fi, ~0);
4124 rhs.type = ADDRESSOF;
4125 for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); ++i)
4126 process_constraint (new_constraint (*lhsp, rhs));
4127 VEC_free (ce_s, heap, lhsc);
4128 /* va_list is clobbered. */
4129 make_constraint_to (get_call_clobber_vi (t)->id, valist);
4130 return;
4132 break;
4134 /* va_end doesn't have any effect that matters. */
4135 case BUILT_IN_VA_END:
4136 return;
4137 /* printf-style functions may have hooks to set pointers to
4138 point to somewhere into the generated string. Leave them
4139 for a later excercise... */
4140 default:
4141 /* Fallthru to general call handling. */;
4143 if (!in_ipa_mode
4144 || (fndecl
4145 && (!(fi = lookup_vi_for_tree (fndecl))
4146 || !fi->is_fn_info)))
4148 VEC(ce_s, heap) *rhsc = NULL;
4149 int flags = gimple_call_flags (t);
4151 /* Const functions can return their arguments and addresses
4152 of global memory but not of escaped memory. */
4153 if (flags & (ECF_CONST|ECF_NOVOPS))
4155 if (gimple_call_lhs (t)
4156 && could_have_pointers (gimple_call_lhs (t)))
4157 handle_const_call (t, &rhsc);
4159 /* Pure functions can return addresses in and of memory
4160 reachable from their arguments, but they are not an escape
4161 point for reachable memory of their arguments. */
4162 else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE))
4163 handle_pure_call (t, &rhsc);
4164 else
4165 handle_rhs_call (t, &rhsc);
4166 if (gimple_call_lhs (t)
4167 && could_have_pointers (gimple_call_lhs (t)))
4168 handle_lhs_call (gimple_call_lhs (t), flags, rhsc, fndecl);
4169 VEC_free (ce_s, heap, rhsc);
4171 else
4173 tree lhsop;
4174 unsigned j;
4176 fi = get_fi_for_callee (t);
4178 /* Assign all the passed arguments to the appropriate incoming
4179 parameters of the function. */
4180 for (j = 0; j < gimple_call_num_args (t); j++)
4182 struct constraint_expr lhs ;
4183 struct constraint_expr *rhsp;
4184 tree arg = gimple_call_arg (t, j);
4186 if (!could_have_pointers (arg))
4187 continue;
4189 get_constraint_for (arg, &rhsc);
4190 lhs = get_function_part_constraint (fi, fi_parm_base + j);
4191 while (VEC_length (ce_s, rhsc) != 0)
4193 rhsp = VEC_last (ce_s, rhsc);
4194 process_constraint (new_constraint (lhs, *rhsp));
4195 VEC_pop (ce_s, rhsc);
4199 /* If we are returning a value, assign it to the result. */
4200 lhsop = gimple_call_lhs (t);
4201 if (lhsop
4202 && could_have_pointers (lhsop))
4204 struct constraint_expr rhs;
4205 struct constraint_expr *lhsp;
4207 get_constraint_for (lhsop, &lhsc);
4208 rhs = get_function_part_constraint (fi, fi_result);
4209 if (fndecl
4210 && DECL_RESULT (fndecl)
4211 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
4213 VEC(ce_s, heap) *tem = NULL;
4214 VEC_safe_push (ce_s, heap, tem, &rhs);
4215 do_deref (&tem);
4216 rhs = *VEC_index (ce_s, tem, 0);
4217 VEC_free(ce_s, heap, tem);
4219 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
4220 process_constraint (new_constraint (*lhsp, rhs));
4223 /* If we pass the result decl by reference, honor that. */
4224 if (lhsop
4225 && fndecl
4226 && DECL_RESULT (fndecl)
4227 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
4229 struct constraint_expr lhs;
4230 struct constraint_expr *rhsp;
4232 get_constraint_for_address_of (lhsop, &rhsc);
4233 lhs = get_function_part_constraint (fi, fi_result);
4234 for (j = 0; VEC_iterate (ce_s, rhsc, j, rhsp); j++)
4235 process_constraint (new_constraint (lhs, *rhsp));
4236 VEC_free (ce_s, heap, rhsc);
4239 /* If we use a static chain, pass it along. */
4240 if (gimple_call_chain (t))
4242 struct constraint_expr lhs;
4243 struct constraint_expr *rhsp;
4245 get_constraint_for (gimple_call_chain (t), &rhsc);
4246 lhs = get_function_part_constraint (fi, fi_static_chain);
4247 for (j = 0; VEC_iterate (ce_s, rhsc, j, rhsp); j++)
4248 process_constraint (new_constraint (lhs, *rhsp));
4252 /* Otherwise, just a regular assignment statement. Only care about
4253 operations with pointer result, others are dealt with as escape
4254 points if they have pointer operands. */
4255 else if (is_gimple_assign (t)
4256 && could_have_pointers (gimple_assign_lhs (t)))
4258 /* Otherwise, just a regular assignment statement. */
4259 tree lhsop = gimple_assign_lhs (t);
4260 tree rhsop = (gimple_num_ops (t) == 2) ? gimple_assign_rhs1 (t) : NULL;
4262 if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
4263 do_structure_copy (lhsop, rhsop);
4264 else
4266 struct constraint_expr temp;
4267 get_constraint_for (lhsop, &lhsc);
4269 if (gimple_assign_rhs_code (t) == POINTER_PLUS_EXPR)
4270 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4271 gimple_assign_rhs2 (t), &rhsc);
4272 else if ((CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (t))
4273 && !(POINTER_TYPE_P (gimple_expr_type (t))
4274 && !POINTER_TYPE_P (TREE_TYPE (rhsop))))
4275 || gimple_assign_single_p (t))
4276 get_constraint_for (rhsop, &rhsc);
4277 else
4279 temp.type = ADDRESSOF;
4280 temp.var = anything_id;
4281 temp.offset = 0;
4282 VEC_safe_push (ce_s, heap, rhsc, &temp);
4284 process_all_all_constraints (lhsc, rhsc);
4286 /* If there is a store to a global variable the rhs escapes. */
4287 if ((lhsop = get_base_address (lhsop)) != NULL_TREE
4288 && DECL_P (lhsop)
4289 && is_global_var (lhsop)
4290 && (!in_ipa_mode
4291 || DECL_EXTERNAL (lhsop) || TREE_PUBLIC (lhsop)))
4292 make_escape_constraint (rhsop);
4293 /* If this is a conversion of a non-restrict pointer to a
4294 restrict pointer track it with a new heapvar. */
4295 else if (gimple_assign_cast_p (t)
4296 && POINTER_TYPE_P (TREE_TYPE (rhsop))
4297 && POINTER_TYPE_P (TREE_TYPE (lhsop))
4298 && !TYPE_RESTRICT (TREE_TYPE (rhsop))
4299 && TYPE_RESTRICT (TREE_TYPE (lhsop)))
4300 make_constraint_from_restrict (get_vi_for_tree (lhsop),
4301 "CAST_RESTRICT");
4303 /* For conversions of pointers to non-pointers the pointer escapes. */
4304 else if (gimple_assign_cast_p (t)
4305 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (t)))
4306 && !POINTER_TYPE_P (TREE_TYPE (gimple_assign_lhs (t))))
4308 make_escape_constraint (gimple_assign_rhs1 (t));
4310 /* Handle escapes through return. */
4311 else if (gimple_code (t) == GIMPLE_RETURN
4312 && gimple_return_retval (t) != NULL_TREE
4313 && could_have_pointers (gimple_return_retval (t)))
4315 fi = NULL;
4316 if (!in_ipa_mode
4317 || !(fi = get_vi_for_tree (cfun->decl)))
4318 make_escape_constraint (gimple_return_retval (t));
4319 else if (in_ipa_mode
4320 && fi != NULL)
4322 struct constraint_expr lhs ;
4323 struct constraint_expr *rhsp;
4324 unsigned i;
4326 lhs = get_function_part_constraint (fi, fi_result);
4327 get_constraint_for (gimple_return_retval (t), &rhsc);
4328 for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); i++)
4329 process_constraint (new_constraint (lhs, *rhsp));
4332 /* Handle asms conservatively by adding escape constraints to everything. */
4333 else if (gimple_code (t) == GIMPLE_ASM)
4335 unsigned i, noutputs;
4336 const char **oconstraints;
4337 const char *constraint;
4338 bool allows_mem, allows_reg, is_inout;
4340 noutputs = gimple_asm_noutputs (t);
4341 oconstraints = XALLOCAVEC (const char *, noutputs);
4343 for (i = 0; i < noutputs; ++i)
4345 tree link = gimple_asm_output_op (t, i);
4346 tree op = TREE_VALUE (link);
4348 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4349 oconstraints[i] = constraint;
4350 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
4351 &allows_reg, &is_inout);
4353 /* A memory constraint makes the address of the operand escape. */
4354 if (!allows_reg && allows_mem)
4355 make_escape_constraint (build_fold_addr_expr (op));
4357 /* The asm may read global memory, so outputs may point to
4358 any global memory. */
4359 if (op && could_have_pointers (op))
4361 VEC(ce_s, heap) *lhsc = NULL;
4362 struct constraint_expr rhsc, *lhsp;
4363 unsigned j;
4364 get_constraint_for (op, &lhsc);
4365 rhsc.var = nonlocal_id;
4366 rhsc.offset = 0;
4367 rhsc.type = SCALAR;
4368 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
4369 process_constraint (new_constraint (*lhsp, rhsc));
4370 VEC_free (ce_s, heap, lhsc);
4373 for (i = 0; i < gimple_asm_ninputs (t); ++i)
4375 tree link = gimple_asm_input_op (t, i);
4376 tree op = TREE_VALUE (link);
4378 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4380 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
4381 &allows_mem, &allows_reg);
4383 /* A memory constraint makes the address of the operand escape. */
4384 if (!allows_reg && allows_mem)
4385 make_escape_constraint (build_fold_addr_expr (op));
4386 /* Strictly we'd only need the constraint to ESCAPED if
4387 the asm clobbers memory, otherwise using something
4388 along the lines of per-call clobbers/uses would be enough. */
4389 else if (op && could_have_pointers (op))
4390 make_escape_constraint (op);
4394 VEC_free (ce_s, heap, rhsc);
4395 VEC_free (ce_s, heap, lhsc);
4399 /* Create a constraint adding to the clobber set of FI the memory
4400 pointed to by PTR. */
4402 static void
4403 process_ipa_clobber (varinfo_t fi, tree ptr)
4405 VEC(ce_s, heap) *ptrc = NULL;
4406 struct constraint_expr *c, lhs;
4407 unsigned i;
4408 get_constraint_for (ptr, &ptrc);
4409 lhs = get_function_part_constraint (fi, fi_clobbers);
4410 for (i = 0; VEC_iterate (ce_s, ptrc, i, c); i++)
4411 process_constraint (new_constraint (lhs, *c));
4412 VEC_free (ce_s, heap, ptrc);
4415 /* Walk statement T setting up clobber and use constraints according to the
4416 references found in T. This function is a main part of the
4417 IPA constraint builder. */
4419 static void
4420 find_func_clobbers (gimple origt)
4422 gimple t = origt;
4423 VEC(ce_s, heap) *lhsc = NULL;
4424 VEC(ce_s, heap) *rhsc = NULL;
4425 varinfo_t fi;
4427 /* Add constraints for clobbered/used in IPA mode.
4428 We are not interested in what automatic variables are clobbered
4429 or used as we only use the information in the caller to which
4430 they do not escape. */
4431 gcc_assert (in_ipa_mode);
4433 /* If the stmt refers to memory in any way it better had a VUSE. */
4434 if (gimple_vuse (t) == NULL_TREE)
4435 return;
4437 /* We'd better have function information for the current function. */
4438 fi = lookup_vi_for_tree (cfun->decl);
4439 gcc_assert (fi != NULL);
4441 /* Account for stores in assignments and calls. */
4442 if (gimple_vdef (t) != NULL_TREE
4443 && gimple_has_lhs (t))
4445 tree lhs = gimple_get_lhs (t);
4446 tree tem = lhs;
4447 while (handled_component_p (tem))
4448 tem = TREE_OPERAND (tem, 0);
4449 if ((DECL_P (tem)
4450 && !auto_var_in_fn_p (tem, cfun->decl))
4451 || INDIRECT_REF_P (tem))
4453 struct constraint_expr lhsc, *rhsp;
4454 unsigned i;
4455 lhsc = get_function_part_constraint (fi, fi_clobbers);
4456 get_constraint_for_address_of (lhs, &rhsc);
4457 for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); i++)
4458 process_constraint (new_constraint (lhsc, *rhsp));
4459 VEC_free (ce_s, heap, rhsc);
4463 /* Account for uses in assigments and returns. */
4464 if (gimple_assign_single_p (t)
4465 || (gimple_code (t) == GIMPLE_RETURN
4466 && gimple_return_retval (t) != NULL_TREE))
4468 tree rhs = (gimple_assign_single_p (t)
4469 ? gimple_assign_rhs1 (t) : gimple_return_retval (t));
4470 tree tem = rhs;
4471 while (handled_component_p (tem))
4472 tem = TREE_OPERAND (tem, 0);
4473 if ((DECL_P (tem)
4474 && !auto_var_in_fn_p (tem, cfun->decl))
4475 || INDIRECT_REF_P (tem))
4477 struct constraint_expr lhs, *rhsp;
4478 unsigned i;
4479 lhs = get_function_part_constraint (fi, fi_uses);
4480 get_constraint_for_address_of (rhs, &rhsc);
4481 for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); i++)
4482 process_constraint (new_constraint (lhs, *rhsp));
4483 VEC_free (ce_s, heap, rhsc);
4487 if (is_gimple_call (t))
4489 varinfo_t cfi = NULL;
4490 tree decl = gimple_call_fndecl (t);
4491 struct constraint_expr lhs, rhs;
4492 unsigned i, j;
4494 /* For builtins we do not have separate function info. For those
4495 we do not generate escapes for we have to generate clobbers/uses. */
4496 if (decl
4497 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
4498 switch (DECL_FUNCTION_CODE (decl))
4500 /* The following functions use and clobber memory pointed to
4501 by their arguments. */
4502 case BUILT_IN_STRCPY:
4503 case BUILT_IN_STRNCPY:
4504 case BUILT_IN_BCOPY:
4505 case BUILT_IN_MEMCPY:
4506 case BUILT_IN_MEMMOVE:
4507 case BUILT_IN_MEMPCPY:
4508 case BUILT_IN_STPCPY:
4509 case BUILT_IN_STPNCPY:
4510 case BUILT_IN_STRCAT:
4511 case BUILT_IN_STRNCAT:
4513 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
4514 == BUILT_IN_BCOPY ? 1 : 0));
4515 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
4516 == BUILT_IN_BCOPY ? 0 : 1));
4517 unsigned i;
4518 struct constraint_expr *rhsp, *lhsp;
4519 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4520 lhs = get_function_part_constraint (fi, fi_clobbers);
4521 for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); i++)
4522 process_constraint (new_constraint (lhs, *lhsp));
4523 VEC_free (ce_s, heap, lhsc);
4524 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4525 lhs = get_function_part_constraint (fi, fi_uses);
4526 for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); i++)
4527 process_constraint (new_constraint (lhs, *rhsp));
4528 VEC_free (ce_s, heap, rhsc);
4529 return;
4531 /* The following function clobbers memory pointed to by
4532 its argument. */
4533 case BUILT_IN_MEMSET:
4535 tree dest = gimple_call_arg (t, 0);
4536 unsigned i;
4537 ce_s *lhsp;
4538 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4539 lhs = get_function_part_constraint (fi, fi_clobbers);
4540 for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); i++)
4541 process_constraint (new_constraint (lhs, *lhsp));
4542 VEC_free (ce_s, heap, lhsc);
4543 return;
4545 /* The following functions clobber their second and third
4546 arguments. */
4547 case BUILT_IN_SINCOS:
4548 case BUILT_IN_SINCOSF:
4549 case BUILT_IN_SINCOSL:
4551 process_ipa_clobber (fi, gimple_call_arg (t, 1));
4552 process_ipa_clobber (fi, gimple_call_arg (t, 2));
4553 return;
4555 /* The following functions clobber their second argument. */
4556 case BUILT_IN_FREXP:
4557 case BUILT_IN_FREXPF:
4558 case BUILT_IN_FREXPL:
4559 case BUILT_IN_LGAMMA_R:
4560 case BUILT_IN_LGAMMAF_R:
4561 case BUILT_IN_LGAMMAL_R:
4562 case BUILT_IN_GAMMA_R:
4563 case BUILT_IN_GAMMAF_R:
4564 case BUILT_IN_GAMMAL_R:
4565 case BUILT_IN_MODF:
4566 case BUILT_IN_MODFF:
4567 case BUILT_IN_MODFL:
4569 process_ipa_clobber (fi, gimple_call_arg (t, 1));
4570 return;
4572 /* The following functions clobber their third argument. */
4573 case BUILT_IN_REMQUO:
4574 case BUILT_IN_REMQUOF:
4575 case BUILT_IN_REMQUOL:
4577 process_ipa_clobber (fi, gimple_call_arg (t, 2));
4578 return;
4580 /* The following functions neither read nor clobber memory. */
4581 case BUILT_IN_FREE:
4582 return;
4583 /* Trampolines are of no interest to us. */
4584 case BUILT_IN_INIT_TRAMPOLINE:
4585 case BUILT_IN_ADJUST_TRAMPOLINE:
4586 return;
4587 case BUILT_IN_VA_START:
4588 case BUILT_IN_VA_END:
4589 return;
4590 /* printf-style functions may have hooks to set pointers to
4591 point to somewhere into the generated string. Leave them
4592 for a later excercise... */
4593 default:
4594 /* Fallthru to general call handling. */;
4597 /* Parameters passed by value are used. */
4598 lhs = get_function_part_constraint (fi, fi_uses);
4599 for (i = 0; i < gimple_call_num_args (t); i++)
4601 struct constraint_expr *rhsp;
4602 tree arg = gimple_call_arg (t, i);
4604 if (TREE_CODE (arg) == SSA_NAME
4605 || is_gimple_min_invariant (arg))
4606 continue;
4608 get_constraint_for_address_of (arg, &rhsc);
4609 for (j = 0; VEC_iterate (ce_s, rhsc, j, rhsp); j++)
4610 process_constraint (new_constraint (lhs, *rhsp));
4611 VEC_free (ce_s, heap, rhsc);
4614 /* Build constraints for propagating clobbers/uses along the
4615 callgraph edges. */
4616 cfi = get_fi_for_callee (t);
4617 if (cfi->id == anything_id)
4619 if (gimple_vdef (t))
4620 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
4621 anything_id);
4622 make_constraint_from (first_vi_for_offset (fi, fi_uses),
4623 anything_id);
4624 return;
4627 /* For callees without function info (that's external functions),
4628 ESCAPED is clobbered and used. */
4629 if (gimple_call_fndecl (t)
4630 && !cfi->is_fn_info)
4632 varinfo_t vi;
4634 if (gimple_vdef (t))
4635 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
4636 escaped_id);
4637 make_copy_constraint (first_vi_for_offset (fi, fi_uses), escaped_id);
4639 /* Also honor the call statement use/clobber info. */
4640 if ((vi = lookup_call_clobber_vi (t)) != NULL)
4641 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
4642 vi->id);
4643 if ((vi = lookup_call_use_vi (t)) != NULL)
4644 make_copy_constraint (first_vi_for_offset (fi, fi_uses),
4645 vi->id);
4646 return;
4649 /* Otherwise the caller clobbers and uses what the callee does.
4650 ??? This should use a new complex constraint that filters
4651 local variables of the callee. */
4652 if (gimple_vdef (t))
4654 lhs = get_function_part_constraint (fi, fi_clobbers);
4655 rhs = get_function_part_constraint (cfi, fi_clobbers);
4656 process_constraint (new_constraint (lhs, rhs));
4658 lhs = get_function_part_constraint (fi, fi_uses);
4659 rhs = get_function_part_constraint (cfi, fi_uses);
4660 process_constraint (new_constraint (lhs, rhs));
4662 else if (gimple_code (t) == GIMPLE_ASM)
4664 /* ??? Ick. We can do better. */
4665 if (gimple_vdef (t))
4666 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
4667 anything_id);
4668 make_constraint_from (first_vi_for_offset (fi, fi_uses),
4669 anything_id);
4672 VEC_free (ce_s, heap, rhsc);
4676 /* Find the first varinfo in the same variable as START that overlaps with
4677 OFFSET. Return NULL if we can't find one. */
4679 static varinfo_t
4680 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
4682 /* If the offset is outside of the variable, bail out. */
4683 if (offset >= start->fullsize)
4684 return NULL;
4686 /* If we cannot reach offset from start, lookup the first field
4687 and start from there. */
4688 if (start->offset > offset)
4689 start = lookup_vi_for_tree (start->decl);
4691 while (start)
4693 /* We may not find a variable in the field list with the actual
4694 offset when when we have glommed a structure to a variable.
4695 In that case, however, offset should still be within the size
4696 of the variable. */
4697 if (offset >= start->offset
4698 && (offset - start->offset) < start->size)
4699 return start;
4701 start= start->next;
4704 return NULL;
4707 /* Find the first varinfo in the same variable as START that overlaps with
4708 OFFSET. If there is no such varinfo the varinfo directly preceding
4709 OFFSET is returned. */
4711 static varinfo_t
4712 first_or_preceding_vi_for_offset (varinfo_t start,
4713 unsigned HOST_WIDE_INT offset)
4715 /* If we cannot reach offset from start, lookup the first field
4716 and start from there. */
4717 if (start->offset > offset)
4718 start = lookup_vi_for_tree (start->decl);
4720 /* We may not find a variable in the field list with the actual
4721 offset when when we have glommed a structure to a variable.
4722 In that case, however, offset should still be within the size
4723 of the variable.
4724 If we got beyond the offset we look for return the field
4725 directly preceding offset which may be the last field. */
4726 while (start->next
4727 && offset >= start->offset
4728 && !((offset - start->offset) < start->size))
4729 start = start->next;
4731 return start;
4735 /* This structure is used during pushing fields onto the fieldstack
4736 to track the offset of the field, since bitpos_of_field gives it
4737 relative to its immediate containing type, and we want it relative
4738 to the ultimate containing object. */
4740 struct fieldoff
4742 /* Offset from the base of the base containing object to this field. */
4743 HOST_WIDE_INT offset;
4745 /* Size, in bits, of the field. */
4746 unsigned HOST_WIDE_INT size;
4748 unsigned has_unknown_size : 1;
4750 unsigned may_have_pointers : 1;
4752 unsigned only_restrict_pointers : 1;
4754 typedef struct fieldoff fieldoff_s;
4756 DEF_VEC_O(fieldoff_s);
4757 DEF_VEC_ALLOC_O(fieldoff_s,heap);
4759 /* qsort comparison function for two fieldoff's PA and PB */
4761 static int
4762 fieldoff_compare (const void *pa, const void *pb)
4764 const fieldoff_s *foa = (const fieldoff_s *)pa;
4765 const fieldoff_s *fob = (const fieldoff_s *)pb;
4766 unsigned HOST_WIDE_INT foasize, fobsize;
4768 if (foa->offset < fob->offset)
4769 return -1;
4770 else if (foa->offset > fob->offset)
4771 return 1;
4773 foasize = foa->size;
4774 fobsize = fob->size;
4775 if (foasize < fobsize)
4776 return -1;
4777 else if (foasize > fobsize)
4778 return 1;
4779 return 0;
4782 /* Sort a fieldstack according to the field offset and sizes. */
4783 static void
4784 sort_fieldstack (VEC(fieldoff_s,heap) *fieldstack)
4786 qsort (VEC_address (fieldoff_s, fieldstack),
4787 VEC_length (fieldoff_s, fieldstack),
4788 sizeof (fieldoff_s),
4789 fieldoff_compare);
4792 /* Return true if V is a tree that we can have subvars for.
4793 Normally, this is any aggregate type. Also complex
4794 types which are not gimple registers can have subvars. */
4796 static inline bool
4797 var_can_have_subvars (const_tree v)
4799 /* Volatile variables should never have subvars. */
4800 if (TREE_THIS_VOLATILE (v))
4801 return false;
4803 /* Non decls or memory tags can never have subvars. */
4804 if (!DECL_P (v))
4805 return false;
4807 /* Aggregates without overlapping fields can have subvars. */
4808 if (TREE_CODE (TREE_TYPE (v)) == RECORD_TYPE)
4809 return true;
4811 return false;
4814 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
4815 the fields of TYPE onto fieldstack, recording their offsets along
4816 the way.
4818 OFFSET is used to keep track of the offset in this entire
4819 structure, rather than just the immediately containing structure.
4820 Returns false if the caller is supposed to handle the field we
4821 recursed for. */
4823 static bool
4824 push_fields_onto_fieldstack (tree type, VEC(fieldoff_s,heap) **fieldstack,
4825 HOST_WIDE_INT offset)
4827 tree field;
4828 bool empty_p = true;
4830 if (TREE_CODE (type) != RECORD_TYPE)
4831 return false;
4833 /* If the vector of fields is growing too big, bail out early.
4834 Callers check for VEC_length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
4835 sure this fails. */
4836 if (VEC_length (fieldoff_s, *fieldstack) > MAX_FIELDS_FOR_FIELD_SENSITIVE)
4837 return false;
4839 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
4840 if (TREE_CODE (field) == FIELD_DECL)
4842 bool push = false;
4843 HOST_WIDE_INT foff = bitpos_of_field (field);
4845 if (!var_can_have_subvars (field)
4846 || TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE
4847 || TREE_CODE (TREE_TYPE (field)) == UNION_TYPE)
4848 push = true;
4849 else if (!push_fields_onto_fieldstack
4850 (TREE_TYPE (field), fieldstack, offset + foff)
4851 && (DECL_SIZE (field)
4852 && !integer_zerop (DECL_SIZE (field))))
4853 /* Empty structures may have actual size, like in C++. So
4854 see if we didn't push any subfields and the size is
4855 nonzero, push the field onto the stack. */
4856 push = true;
4858 if (push)
4860 fieldoff_s *pair = NULL;
4861 bool has_unknown_size = false;
4863 if (!VEC_empty (fieldoff_s, *fieldstack))
4864 pair = VEC_last (fieldoff_s, *fieldstack);
4866 if (!DECL_SIZE (field)
4867 || !host_integerp (DECL_SIZE (field), 1))
4868 has_unknown_size = true;
4870 /* If adjacent fields do not contain pointers merge them. */
4871 if (pair
4872 && !pair->may_have_pointers
4873 && !pair->has_unknown_size
4874 && !has_unknown_size
4875 && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff
4876 && !could_have_pointers (field))
4878 pair->size += TREE_INT_CST_LOW (DECL_SIZE (field));
4880 else
4882 pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
4883 pair->offset = offset + foff;
4884 pair->has_unknown_size = has_unknown_size;
4885 if (!has_unknown_size)
4886 pair->size = TREE_INT_CST_LOW (DECL_SIZE (field));
4887 else
4888 pair->size = -1;
4889 pair->may_have_pointers = could_have_pointers (field);
4890 pair->only_restrict_pointers
4891 = (!has_unknown_size
4892 && POINTER_TYPE_P (TREE_TYPE (field))
4893 && TYPE_RESTRICT (TREE_TYPE (field)));
4897 empty_p = false;
4900 return !empty_p;
4903 /* Count the number of arguments DECL has, and set IS_VARARGS to true
4904 if it is a varargs function. */
4906 static unsigned int
4907 count_num_arguments (tree decl, bool *is_varargs)
4909 unsigned int num = 0;
4910 tree t;
4912 /* Capture named arguments for K&R functions. They do not
4913 have a prototype and thus no TYPE_ARG_TYPES. */
4914 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
4915 ++num;
4917 /* Check if the function has variadic arguments. */
4918 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl)); t; t = TREE_CHAIN (t))
4919 if (TREE_VALUE (t) == void_type_node)
4920 break;
4921 if (!t)
4922 *is_varargs = true;
4924 return num;
4927 /* Creation function node for DECL, using NAME, and return the index
4928 of the variable we've created for the function. */
4930 static unsigned int
4931 create_function_info_for (tree decl, const char *name)
4933 struct function *fn = DECL_STRUCT_FUNCTION (decl);
4934 varinfo_t vi, prev_vi;
4935 tree arg;
4936 unsigned int i;
4937 bool is_varargs = false;
4938 unsigned int num_args = count_num_arguments (decl, &is_varargs);
4940 /* Create the variable info. */
4942 vi = new_var_info (decl, name);
4943 vi->offset = 0;
4944 vi->size = 1;
4945 vi->fullsize = fi_parm_base + num_args;
4946 vi->is_fn_info = 1;
4947 vi->may_have_pointers = false;
4948 if (is_varargs)
4949 vi->fullsize = ~0;
4950 insert_vi_for_tree (vi->decl, vi);
4952 prev_vi = vi;
4954 /* Create a variable for things the function clobbers and one for
4955 things the function uses. */
4957 varinfo_t clobbervi, usevi;
4958 const char *newname;
4959 char *tempname;
4961 asprintf (&tempname, "%s.clobber", name);
4962 newname = ggc_strdup (tempname);
4963 free (tempname);
4965 clobbervi = new_var_info (NULL, newname);
4966 clobbervi->offset = fi_clobbers;
4967 clobbervi->size = 1;
4968 clobbervi->fullsize = vi->fullsize;
4969 clobbervi->is_full_var = true;
4970 clobbervi->is_global_var = false;
4971 gcc_assert (prev_vi->offset < clobbervi->offset);
4972 prev_vi->next = clobbervi;
4973 prev_vi = clobbervi;
4975 asprintf (&tempname, "%s.use", name);
4976 newname = ggc_strdup (tempname);
4977 free (tempname);
4979 usevi = new_var_info (NULL, newname);
4980 usevi->offset = fi_uses;
4981 usevi->size = 1;
4982 usevi->fullsize = vi->fullsize;
4983 usevi->is_full_var = true;
4984 usevi->is_global_var = false;
4985 gcc_assert (prev_vi->offset < usevi->offset);
4986 prev_vi->next = usevi;
4987 prev_vi = usevi;
4990 /* And one for the static chain. */
4991 if (fn->static_chain_decl != NULL_TREE)
4993 varinfo_t chainvi;
4994 const char *newname;
4995 char *tempname;
4997 asprintf (&tempname, "%s.chain", name);
4998 newname = ggc_strdup (tempname);
4999 free (tempname);
5001 chainvi = new_var_info (fn->static_chain_decl, newname);
5002 chainvi->offset = fi_static_chain;
5003 chainvi->size = 1;
5004 chainvi->fullsize = vi->fullsize;
5005 chainvi->is_full_var = true;
5006 chainvi->is_global_var = false;
5007 gcc_assert (prev_vi->offset < chainvi->offset);
5008 prev_vi->next = chainvi;
5009 prev_vi = chainvi;
5010 insert_vi_for_tree (fn->static_chain_decl, chainvi);
5013 /* Create a variable for the return var. */
5014 if (DECL_RESULT (decl) != NULL
5015 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
5017 varinfo_t resultvi;
5018 const char *newname;
5019 char *tempname;
5020 tree resultdecl = decl;
5022 if (DECL_RESULT (decl))
5023 resultdecl = DECL_RESULT (decl);
5025 asprintf (&tempname, "%s.result", name);
5026 newname = ggc_strdup (tempname);
5027 free (tempname);
5029 resultvi = new_var_info (resultdecl, newname);
5030 resultvi->offset = fi_result;
5031 resultvi->size = 1;
5032 resultvi->fullsize = vi->fullsize;
5033 resultvi->is_full_var = true;
5034 if (DECL_RESULT (decl))
5035 resultvi->may_have_pointers = could_have_pointers (DECL_RESULT (decl));
5036 gcc_assert (prev_vi->offset < resultvi->offset);
5037 prev_vi->next = resultvi;
5038 prev_vi = resultvi;
5039 if (DECL_RESULT (decl))
5040 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
5043 /* Set up variables for each argument. */
5044 arg = DECL_ARGUMENTS (decl);
5045 for (i = 0; i < num_args; i++)
5047 varinfo_t argvi;
5048 const char *newname;
5049 char *tempname;
5050 tree argdecl = decl;
5052 if (arg)
5053 argdecl = arg;
5055 asprintf (&tempname, "%s.arg%d", name, i);
5056 newname = ggc_strdup (tempname);
5057 free (tempname);
5059 argvi = new_var_info (argdecl, newname);
5060 argvi->offset = fi_parm_base + i;
5061 argvi->size = 1;
5062 argvi->is_full_var = true;
5063 argvi->fullsize = vi->fullsize;
5064 if (arg)
5065 argvi->may_have_pointers = could_have_pointers (arg);
5066 gcc_assert (prev_vi->offset < argvi->offset);
5067 prev_vi->next = argvi;
5068 prev_vi = argvi;
5069 if (arg)
5071 insert_vi_for_tree (arg, argvi);
5072 arg = TREE_CHAIN (arg);
5076 /* Add one representative for all further args. */
5077 if (is_varargs)
5079 varinfo_t argvi;
5080 const char *newname;
5081 char *tempname;
5082 tree decl;
5084 asprintf (&tempname, "%s.varargs", name);
5085 newname = ggc_strdup (tempname);
5086 free (tempname);
5088 /* We need sth that can be pointed to for va_start. */
5089 decl = create_tmp_var_raw (ptr_type_node, name);
5090 get_var_ann (decl);
5092 argvi = new_var_info (decl, newname);
5093 argvi->offset = fi_parm_base + num_args;
5094 argvi->size = ~0;
5095 argvi->is_full_var = true;
5096 argvi->is_heap_var = true;
5097 argvi->fullsize = vi->fullsize;
5098 gcc_assert (prev_vi->offset < argvi->offset);
5099 prev_vi->next = argvi;
5100 prev_vi = argvi;
5103 return vi->id;
5107 /* Return true if FIELDSTACK contains fields that overlap.
5108 FIELDSTACK is assumed to be sorted by offset. */
5110 static bool
5111 check_for_overlaps (VEC (fieldoff_s,heap) *fieldstack)
5113 fieldoff_s *fo = NULL;
5114 unsigned int i;
5115 HOST_WIDE_INT lastoffset = -1;
5117 for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
5119 if (fo->offset == lastoffset)
5120 return true;
5121 lastoffset = fo->offset;
5123 return false;
5126 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
5127 This will also create any varinfo structures necessary for fields
5128 of DECL. */
5130 static varinfo_t
5131 create_variable_info_for_1 (tree decl, const char *name)
5133 varinfo_t vi, newvi;
5134 tree decl_type = TREE_TYPE (decl);
5135 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type);
5136 VEC (fieldoff_s,heap) *fieldstack = NULL;
5137 fieldoff_s *fo;
5138 unsigned int i;
5140 if (!declsize
5141 || !host_integerp (declsize, 1))
5143 vi = new_var_info (decl, name);
5144 vi->offset = 0;
5145 vi->size = ~0;
5146 vi->fullsize = ~0;
5147 vi->is_unknown_size_var = true;
5148 vi->is_full_var = true;
5149 vi->may_have_pointers = could_have_pointers (decl);
5150 return vi;
5153 /* Collect field information. */
5154 if (use_field_sensitive
5155 && var_can_have_subvars (decl)
5156 /* ??? Force us to not use subfields for global initializers
5157 in IPA mode. Else we'd have to parse arbitrary initializers. */
5158 && !(in_ipa_mode
5159 && is_global_var (decl)
5160 && DECL_INITIAL (decl)))
5162 fieldoff_s *fo = NULL;
5163 bool notokay = false;
5164 unsigned int i;
5166 push_fields_onto_fieldstack (decl_type, &fieldstack, 0);
5168 for (i = 0; !notokay && VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
5169 if (fo->has_unknown_size
5170 || fo->offset < 0)
5172 notokay = true;
5173 break;
5176 /* We can't sort them if we have a field with a variable sized type,
5177 which will make notokay = true. In that case, we are going to return
5178 without creating varinfos for the fields anyway, so sorting them is a
5179 waste to boot. */
5180 if (!notokay)
5182 sort_fieldstack (fieldstack);
5183 /* Due to some C++ FE issues, like PR 22488, we might end up
5184 what appear to be overlapping fields even though they,
5185 in reality, do not overlap. Until the C++ FE is fixed,
5186 we will simply disable field-sensitivity for these cases. */
5187 notokay = check_for_overlaps (fieldstack);
5190 if (notokay)
5191 VEC_free (fieldoff_s, heap, fieldstack);
5194 /* If we didn't end up collecting sub-variables create a full
5195 variable for the decl. */
5196 if (VEC_length (fieldoff_s, fieldstack) <= 1
5197 || VEC_length (fieldoff_s, fieldstack) > MAX_FIELDS_FOR_FIELD_SENSITIVE)
5199 vi = new_var_info (decl, name);
5200 vi->offset = 0;
5201 vi->may_have_pointers = could_have_pointers (decl);
5202 vi->fullsize = TREE_INT_CST_LOW (declsize);
5203 vi->size = vi->fullsize;
5204 vi->is_full_var = true;
5205 VEC_free (fieldoff_s, heap, fieldstack);
5206 return vi;
5209 vi = new_var_info (decl, name);
5210 vi->fullsize = TREE_INT_CST_LOW (declsize);
5211 for (i = 0, newvi = vi;
5212 VEC_iterate (fieldoff_s, fieldstack, i, fo);
5213 ++i, newvi = newvi->next)
5215 const char *newname = "NULL";
5216 char *tempname;
5218 if (dump_file)
5220 asprintf (&tempname, "%s." HOST_WIDE_INT_PRINT_DEC
5221 "+" HOST_WIDE_INT_PRINT_DEC, name, fo->offset, fo->size);
5222 newname = ggc_strdup (tempname);
5223 free (tempname);
5225 newvi->name = newname;
5226 newvi->offset = fo->offset;
5227 newvi->size = fo->size;
5228 newvi->fullsize = vi->fullsize;
5229 newvi->may_have_pointers = fo->may_have_pointers;
5230 newvi->only_restrict_pointers = fo->only_restrict_pointers;
5231 if (i + 1 < VEC_length (fieldoff_s, fieldstack))
5232 newvi->next = new_var_info (decl, name);
5235 VEC_free (fieldoff_s, heap, fieldstack);
5237 return vi;
5240 static unsigned int
5241 create_variable_info_for (tree decl, const char *name)
5243 varinfo_t vi = create_variable_info_for_1 (decl, name);
5244 unsigned int id = vi->id;
5246 insert_vi_for_tree (decl, vi);
5248 /* Create initial constraints for globals. */
5249 for (; vi; vi = vi->next)
5251 if (!vi->may_have_pointers
5252 || !vi->is_global_var)
5253 continue;
5255 /* Mark global restrict qualified pointers. */
5256 if ((POINTER_TYPE_P (TREE_TYPE (decl))
5257 && TYPE_RESTRICT (TREE_TYPE (decl)))
5258 || vi->only_restrict_pointers)
5259 make_constraint_from_restrict (vi, "GLOBAL_RESTRICT");
5261 /* For escaped variables initialize them from nonlocal. */
5262 if (!in_ipa_mode
5263 || DECL_EXTERNAL (decl) || TREE_PUBLIC (decl))
5264 make_copy_constraint (vi, nonlocal_id);
5266 /* If this is a global variable with an initializer and we are in
5267 IPA mode generate constraints for it. In non-IPA mode
5268 the initializer from nonlocal is all we need. */
5269 if (in_ipa_mode
5270 && DECL_INITIAL (decl))
5272 VEC (ce_s, heap) *rhsc = NULL;
5273 struct constraint_expr lhs, *rhsp;
5274 unsigned i;
5275 get_constraint_for (DECL_INITIAL (decl), &rhsc);
5276 lhs.var = vi->id;
5277 lhs.offset = 0;
5278 lhs.type = SCALAR;
5279 for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); ++i)
5280 process_constraint (new_constraint (lhs, *rhsp));
5281 /* If this is a variable that escapes from the unit
5282 the initializer escapes as well. */
5283 if (DECL_EXTERNAL (decl) || TREE_PUBLIC (decl))
5285 lhs.var = escaped_id;
5286 lhs.offset = 0;
5287 lhs.type = SCALAR;
5288 for (i = 0; VEC_iterate (ce_s, rhsc, i, rhsp); ++i)
5289 process_constraint (new_constraint (lhs, *rhsp));
5291 VEC_free (ce_s, heap, rhsc);
5295 return id;
5298 /* Print out the points-to solution for VAR to FILE. */
5300 static void
5301 dump_solution_for_var (FILE *file, unsigned int var)
5303 varinfo_t vi = get_varinfo (var);
5304 unsigned int i;
5305 bitmap_iterator bi;
5307 /* Dump the solution for unified vars anyway, this avoids difficulties
5308 in scanning dumps in the testsuite. */
5309 fprintf (file, "%s = { ", vi->name);
5310 vi = get_varinfo (find (var));
5311 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
5312 fprintf (file, "%s ", get_varinfo (i)->name);
5313 fprintf (file, "}");
5315 /* But note when the variable was unified. */
5316 if (vi->id != var)
5317 fprintf (file, " same as %s", vi->name);
5319 fprintf (file, "\n");
5322 /* Print the points-to solution for VAR to stdout. */
5324 void
5325 debug_solution_for_var (unsigned int var)
5327 dump_solution_for_var (stdout, var);
5330 /* Create varinfo structures for all of the variables in the
5331 function for intraprocedural mode. */
5333 static void
5334 intra_create_variable_infos (void)
5336 tree t;
5338 /* For each incoming pointer argument arg, create the constraint ARG
5339 = NONLOCAL or a dummy variable if it is a restrict qualified
5340 passed-by-reference argument. */
5341 for (t = DECL_ARGUMENTS (current_function_decl); t; t = TREE_CHAIN (t))
5343 varinfo_t p;
5345 if (!could_have_pointers (t))
5346 continue;
5348 /* For restrict qualified pointers to objects passed by
5349 reference build a real representative for the pointed-to object. */
5350 if (DECL_BY_REFERENCE (t)
5351 && POINTER_TYPE_P (TREE_TYPE (t))
5352 && TYPE_RESTRICT (TREE_TYPE (t)))
5354 struct constraint_expr lhsc, rhsc;
5355 varinfo_t vi;
5356 tree heapvar = heapvar_lookup (t, 0);
5357 if (heapvar == NULL_TREE)
5359 var_ann_t ann;
5360 heapvar = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (t)),
5361 "PARM_NOALIAS");
5362 DECL_EXTERNAL (heapvar) = 1;
5363 heapvar_insert (t, 0, heapvar);
5364 ann = get_var_ann (heapvar);
5365 ann->is_heapvar = 1;
5367 if (gimple_referenced_vars (cfun))
5368 add_referenced_var (heapvar);
5369 lhsc.var = get_vi_for_tree (t)->id;
5370 lhsc.type = SCALAR;
5371 lhsc.offset = 0;
5372 rhsc.var = (vi = get_vi_for_tree (heapvar))->id;
5373 rhsc.type = ADDRESSOF;
5374 rhsc.offset = 0;
5375 process_constraint (new_constraint (lhsc, rhsc));
5376 vi->is_restrict_var = 1;
5377 continue;
5380 for (p = get_vi_for_tree (t); p; p = p->next)
5382 if (p->may_have_pointers)
5383 make_constraint_from (p, nonlocal_id);
5384 if (p->only_restrict_pointers)
5385 make_constraint_from_restrict (p, "PARM_RESTRICT");
5387 if (POINTER_TYPE_P (TREE_TYPE (t))
5388 && TYPE_RESTRICT (TREE_TYPE (t)))
5389 make_constraint_from_restrict (get_vi_for_tree (t), "PARM_RESTRICT");
5392 /* Add a constraint for a result decl that is passed by reference. */
5393 if (DECL_RESULT (cfun->decl)
5394 && DECL_BY_REFERENCE (DECL_RESULT (cfun->decl)))
5396 varinfo_t p, result_vi = get_vi_for_tree (DECL_RESULT (cfun->decl));
5398 for (p = result_vi; p; p = p->next)
5399 make_constraint_from (p, nonlocal_id);
5402 /* Add a constraint for the incoming static chain parameter. */
5403 if (cfun->static_chain_decl != NULL_TREE)
5405 varinfo_t p, chain_vi = get_vi_for_tree (cfun->static_chain_decl);
5407 for (p = chain_vi; p; p = p->next)
5408 make_constraint_from (p, nonlocal_id);
5412 /* Structure used to put solution bitmaps in a hashtable so they can
5413 be shared among variables with the same points-to set. */
5415 typedef struct shared_bitmap_info
5417 bitmap pt_vars;
5418 hashval_t hashcode;
5419 } *shared_bitmap_info_t;
5420 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
5422 static htab_t shared_bitmap_table;
5424 /* Hash function for a shared_bitmap_info_t */
5426 static hashval_t
5427 shared_bitmap_hash (const void *p)
5429 const_shared_bitmap_info_t const bi = (const_shared_bitmap_info_t) p;
5430 return bi->hashcode;
5433 /* Equality function for two shared_bitmap_info_t's. */
5435 static int
5436 shared_bitmap_eq (const void *p1, const void *p2)
5438 const_shared_bitmap_info_t const sbi1 = (const_shared_bitmap_info_t) p1;
5439 const_shared_bitmap_info_t const sbi2 = (const_shared_bitmap_info_t) p2;
5440 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
5443 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
5444 existing instance if there is one, NULL otherwise. */
5446 static bitmap
5447 shared_bitmap_lookup (bitmap pt_vars)
5449 void **slot;
5450 struct shared_bitmap_info sbi;
5452 sbi.pt_vars = pt_vars;
5453 sbi.hashcode = bitmap_hash (pt_vars);
5455 slot = htab_find_slot_with_hash (shared_bitmap_table, &sbi,
5456 sbi.hashcode, NO_INSERT);
5457 if (!slot)
5458 return NULL;
5459 else
5460 return ((shared_bitmap_info_t) *slot)->pt_vars;
5464 /* Add a bitmap to the shared bitmap hashtable. */
5466 static void
5467 shared_bitmap_add (bitmap pt_vars)
5469 void **slot;
5470 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
5472 sbi->pt_vars = pt_vars;
5473 sbi->hashcode = bitmap_hash (pt_vars);
5475 slot = htab_find_slot_with_hash (shared_bitmap_table, sbi,
5476 sbi->hashcode, INSERT);
5477 gcc_assert (!*slot);
5478 *slot = (void *) sbi;
5482 /* Set bits in INTO corresponding to the variable uids in solution set FROM. */
5484 static void
5485 set_uids_in_ptset (bitmap into, bitmap from, struct pt_solution *pt)
5487 unsigned int i;
5488 bitmap_iterator bi;
5490 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
5492 varinfo_t vi = get_varinfo (i);
5494 /* The only artificial variables that are allowed in a may-alias
5495 set are heap variables. */
5496 if (vi->is_artificial_var && !vi->is_heap_var)
5497 continue;
5499 if (TREE_CODE (vi->decl) == VAR_DECL
5500 || TREE_CODE (vi->decl) == PARM_DECL
5501 || TREE_CODE (vi->decl) == RESULT_DECL)
5503 /* If we are in IPA mode we will not recompute points-to
5504 sets after inlining so make sure they stay valid. */
5505 if (in_ipa_mode
5506 && !DECL_PT_UID_SET_P (vi->decl))
5507 SET_DECL_PT_UID (vi->decl, DECL_UID (vi->decl));
5509 /* Add the decl to the points-to set. Note that the points-to
5510 set contains global variables. */
5511 bitmap_set_bit (into, DECL_PT_UID (vi->decl));
5512 if (vi->is_global_var)
5513 pt->vars_contains_global = true;
5519 /* Compute the points-to solution *PT for the variable VI. */
5521 static void
5522 find_what_var_points_to (varinfo_t orig_vi, struct pt_solution *pt)
5524 unsigned int i;
5525 bitmap_iterator bi;
5526 bitmap finished_solution;
5527 bitmap result;
5528 varinfo_t vi;
5530 memset (pt, 0, sizeof (struct pt_solution));
5532 /* This variable may have been collapsed, let's get the real
5533 variable. */
5534 vi = get_varinfo (find (orig_vi->id));
5536 /* Translate artificial variables into SSA_NAME_PTR_INFO
5537 attributes. */
5538 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
5540 varinfo_t vi = get_varinfo (i);
5542 if (vi->is_artificial_var)
5544 if (vi->id == nothing_id)
5545 pt->null = 1;
5546 else if (vi->id == escaped_id)
5548 if (in_ipa_mode)
5549 pt->ipa_escaped = 1;
5550 else
5551 pt->escaped = 1;
5553 else if (vi->id == nonlocal_id)
5554 pt->nonlocal = 1;
5555 else if (vi->is_heap_var)
5556 /* We represent heapvars in the points-to set properly. */
5558 else if (vi->id == readonly_id)
5559 /* Nobody cares. */
5561 else if (vi->id == anything_id
5562 || vi->id == integer_id)
5563 pt->anything = 1;
5565 if (vi->is_restrict_var)
5566 pt->vars_contains_restrict = true;
5569 /* Instead of doing extra work, simply do not create
5570 elaborate points-to information for pt_anything pointers. */
5571 if (pt->anything
5572 && (orig_vi->is_artificial_var
5573 || !pt->vars_contains_restrict))
5574 return;
5576 /* Share the final set of variables when possible. */
5577 finished_solution = BITMAP_GGC_ALLOC ();
5578 stats.points_to_sets_created++;
5580 set_uids_in_ptset (finished_solution, vi->solution, pt);
5581 result = shared_bitmap_lookup (finished_solution);
5582 if (!result)
5584 shared_bitmap_add (finished_solution);
5585 pt->vars = finished_solution;
5587 else
5589 pt->vars = result;
5590 bitmap_clear (finished_solution);
5594 /* Given a pointer variable P, fill in its points-to set. */
5596 static void
5597 find_what_p_points_to (tree p)
5599 struct ptr_info_def *pi;
5600 tree lookup_p = p;
5601 varinfo_t vi;
5603 /* For parameters, get at the points-to set for the actual parm
5604 decl. */
5605 if (TREE_CODE (p) == SSA_NAME
5606 && TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
5607 && SSA_NAME_IS_DEFAULT_DEF (p))
5608 lookup_p = SSA_NAME_VAR (p);
5610 vi = lookup_vi_for_tree (lookup_p);
5611 if (!vi)
5612 return;
5614 pi = get_ptr_info (p);
5615 find_what_var_points_to (vi, &pi->pt);
5619 /* Query statistics for points-to solutions. */
5621 static struct {
5622 unsigned HOST_WIDE_INT pt_solution_includes_may_alias;
5623 unsigned HOST_WIDE_INT pt_solution_includes_no_alias;
5624 unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias;
5625 unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias;
5626 } pta_stats;
5628 void
5629 dump_pta_stats (FILE *s)
5631 fprintf (s, "\nPTA query stats:\n");
5632 fprintf (s, " pt_solution_includes: "
5633 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
5634 HOST_WIDE_INT_PRINT_DEC" queries\n",
5635 pta_stats.pt_solution_includes_no_alias,
5636 pta_stats.pt_solution_includes_no_alias
5637 + pta_stats.pt_solution_includes_may_alias);
5638 fprintf (s, " pt_solutions_intersect: "
5639 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
5640 HOST_WIDE_INT_PRINT_DEC" queries\n",
5641 pta_stats.pt_solutions_intersect_no_alias,
5642 pta_stats.pt_solutions_intersect_no_alias
5643 + pta_stats.pt_solutions_intersect_may_alias);
5647 /* Reset the points-to solution *PT to a conservative default
5648 (point to anything). */
5650 void
5651 pt_solution_reset (struct pt_solution *pt)
5653 memset (pt, 0, sizeof (struct pt_solution));
5654 pt->anything = true;
5657 /* Set the points-to solution *PT to point only to the variables
5658 in VARS. VARS_CONTAINS_GLOBAL specifies whether that contains
5659 global variables and VARS_CONTAINS_RESTRICT specifies whether
5660 it contains restrict tag variables. */
5662 void
5663 pt_solution_set (struct pt_solution *pt, bitmap vars,
5664 bool vars_contains_global, bool vars_contains_restrict)
5666 memset (pt, 0, sizeof (struct pt_solution));
5667 pt->vars = vars;
5668 pt->vars_contains_global = vars_contains_global;
5669 pt->vars_contains_restrict = vars_contains_restrict;
5672 /* Computes the union of the points-to solutions *DEST and *SRC and
5673 stores the result in *DEST. This changes the points-to bitmap
5674 of *DEST and thus may not be used if that might be shared.
5675 The points-to bitmap of *SRC and *DEST will not be shared after
5676 this function if they were not before. */
5678 static void
5679 pt_solution_ior_into (struct pt_solution *dest, struct pt_solution *src)
5681 dest->anything |= src->anything;
5682 if (dest->anything)
5684 pt_solution_reset (dest);
5685 return;
5688 dest->nonlocal |= src->nonlocal;
5689 dest->escaped |= src->escaped;
5690 dest->ipa_escaped |= src->ipa_escaped;
5691 dest->null |= src->null;
5692 dest->vars_contains_global |= src->vars_contains_global;
5693 dest->vars_contains_restrict |= src->vars_contains_restrict;
5694 if (!src->vars)
5695 return;
5697 if (!dest->vars)
5698 dest->vars = BITMAP_GGC_ALLOC ();
5699 bitmap_ior_into (dest->vars, src->vars);
5702 /* Return true if the points-to solution *PT is empty. */
5704 bool
5705 pt_solution_empty_p (struct pt_solution *pt)
5707 if (pt->anything
5708 || pt->nonlocal)
5709 return false;
5711 if (pt->vars
5712 && !bitmap_empty_p (pt->vars))
5713 return false;
5715 /* If the solution includes ESCAPED, check if that is empty. */
5716 if (pt->escaped
5717 && !pt_solution_empty_p (&cfun->gimple_df->escaped))
5718 return false;
5720 /* If the solution includes ESCAPED, check if that is empty. */
5721 if (pt->ipa_escaped
5722 && !pt_solution_empty_p (&ipa_escaped_pt))
5723 return false;
5725 return true;
5728 /* Return true if the points-to solution *PT includes global memory. */
5730 bool
5731 pt_solution_includes_global (struct pt_solution *pt)
5733 if (pt->anything
5734 || pt->nonlocal
5735 || pt->vars_contains_global)
5736 return true;
5738 if (pt->escaped)
5739 return pt_solution_includes_global (&cfun->gimple_df->escaped);
5741 if (pt->ipa_escaped)
5742 return pt_solution_includes_global (&ipa_escaped_pt);
5744 /* ??? This predicate is not correct for the IPA-PTA solution
5745 as we do not properly distinguish between unit escape points
5746 and global variables. */
5747 if (cfun->gimple_df->ipa_pta)
5748 return true;
5750 return false;
5753 /* Return true if the points-to solution *PT includes the variable
5754 declaration DECL. */
5756 static bool
5757 pt_solution_includes_1 (struct pt_solution *pt, const_tree decl)
5759 if (pt->anything)
5760 return true;
5762 if (pt->nonlocal
5763 && is_global_var (decl))
5764 return true;
5766 if (pt->vars
5767 && bitmap_bit_p (pt->vars, DECL_PT_UID (decl)))
5768 return true;
5770 /* If the solution includes ESCAPED, check it. */
5771 if (pt->escaped
5772 && pt_solution_includes_1 (&cfun->gimple_df->escaped, decl))
5773 return true;
5775 /* If the solution includes ESCAPED, check it. */
5776 if (pt->ipa_escaped
5777 && pt_solution_includes_1 (&ipa_escaped_pt, decl))
5778 return true;
5780 return false;
5783 bool
5784 pt_solution_includes (struct pt_solution *pt, const_tree decl)
5786 bool res = pt_solution_includes_1 (pt, decl);
5787 if (res)
5788 ++pta_stats.pt_solution_includes_may_alias;
5789 else
5790 ++pta_stats.pt_solution_includes_no_alias;
5791 return res;
5794 /* Return true if both points-to solutions PT1 and PT2 have a non-empty
5795 intersection. */
5797 static bool
5798 pt_solutions_intersect_1 (struct pt_solution *pt1, struct pt_solution *pt2)
5800 if (pt1->anything || pt2->anything)
5801 return true;
5803 /* If either points to unknown global memory and the other points to
5804 any global memory they alias. */
5805 if ((pt1->nonlocal
5806 && (pt2->nonlocal
5807 || pt2->vars_contains_global))
5808 || (pt2->nonlocal
5809 && pt1->vars_contains_global))
5810 return true;
5812 /* Check the escaped solution if required. */
5813 if ((pt1->escaped || pt2->escaped)
5814 && !pt_solution_empty_p (&cfun->gimple_df->escaped))
5816 /* If both point to escaped memory and that solution
5817 is not empty they alias. */
5818 if (pt1->escaped && pt2->escaped)
5819 return true;
5821 /* If either points to escaped memory see if the escaped solution
5822 intersects with the other. */
5823 if ((pt1->escaped
5824 && pt_solutions_intersect_1 (&cfun->gimple_df->escaped, pt2))
5825 || (pt2->escaped
5826 && pt_solutions_intersect_1 (&cfun->gimple_df->escaped, pt1)))
5827 return true;
5830 /* Check the escaped solution if required.
5831 ??? Do we need to check the local against the IPA escaped sets? */
5832 if ((pt1->ipa_escaped || pt2->ipa_escaped)
5833 && !pt_solution_empty_p (&ipa_escaped_pt))
5835 /* If both point to escaped memory and that solution
5836 is not empty they alias. */
5837 if (pt1->ipa_escaped && pt2->ipa_escaped)
5838 return true;
5840 /* If either points to escaped memory see if the escaped solution
5841 intersects with the other. */
5842 if ((pt1->ipa_escaped
5843 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt2))
5844 || (pt2->ipa_escaped
5845 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt1)))
5846 return true;
5849 /* Now both pointers alias if their points-to solution intersects. */
5850 return (pt1->vars
5851 && pt2->vars
5852 && bitmap_intersect_p (pt1->vars, pt2->vars));
5855 bool
5856 pt_solutions_intersect (struct pt_solution *pt1, struct pt_solution *pt2)
5858 bool res = pt_solutions_intersect_1 (pt1, pt2);
5859 if (res)
5860 ++pta_stats.pt_solutions_intersect_may_alias;
5861 else
5862 ++pta_stats.pt_solutions_intersect_no_alias;
5863 return res;
5866 /* Return true if both points-to solutions PT1 and PT2 for two restrict
5867 qualified pointers are possibly based on the same pointer. */
5869 bool
5870 pt_solutions_same_restrict_base (struct pt_solution *pt1,
5871 struct pt_solution *pt2)
5873 /* If we deal with points-to solutions of two restrict qualified
5874 pointers solely rely on the pointed-to variable bitmap intersection.
5875 For two pointers that are based on each other the bitmaps will
5876 intersect. */
5877 if (pt1->vars_contains_restrict
5878 && pt2->vars_contains_restrict)
5880 gcc_assert (pt1->vars && pt2->vars);
5881 return bitmap_intersect_p (pt1->vars, pt2->vars);
5884 return true;
5888 /* Dump points-to information to OUTFILE. */
5890 static void
5891 dump_sa_points_to_info (FILE *outfile)
5893 unsigned int i;
5895 fprintf (outfile, "\nPoints-to sets\n\n");
5897 if (dump_flags & TDF_STATS)
5899 fprintf (outfile, "Stats:\n");
5900 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
5901 fprintf (outfile, "Non-pointer vars: %d\n",
5902 stats.nonpointer_vars);
5903 fprintf (outfile, "Statically unified vars: %d\n",
5904 stats.unified_vars_static);
5905 fprintf (outfile, "Dynamically unified vars: %d\n",
5906 stats.unified_vars_dynamic);
5907 fprintf (outfile, "Iterations: %d\n", stats.iterations);
5908 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
5909 fprintf (outfile, "Number of implicit edges: %d\n",
5910 stats.num_implicit_edges);
5913 for (i = 0; i < VEC_length (varinfo_t, varmap); i++)
5915 varinfo_t vi = get_varinfo (i);
5916 if (!vi->may_have_pointers)
5917 continue;
5918 dump_solution_for_var (outfile, i);
5923 /* Debug points-to information to stderr. */
5925 void
5926 debug_sa_points_to_info (void)
5928 dump_sa_points_to_info (stderr);
5932 /* Initialize the always-existing constraint variables for NULL
5933 ANYTHING, READONLY, and INTEGER */
5935 static void
5936 init_base_vars (void)
5938 struct constraint_expr lhs, rhs;
5939 varinfo_t var_anything;
5940 varinfo_t var_nothing;
5941 varinfo_t var_readonly;
5942 varinfo_t var_escaped;
5943 varinfo_t var_nonlocal;
5944 varinfo_t var_storedanything;
5945 varinfo_t var_integer;
5947 /* Create the NULL variable, used to represent that a variable points
5948 to NULL. */
5949 var_nothing = new_var_info (NULL_TREE, "NULL");
5950 gcc_assert (var_nothing->id == nothing_id);
5951 var_nothing->is_artificial_var = 1;
5952 var_nothing->offset = 0;
5953 var_nothing->size = ~0;
5954 var_nothing->fullsize = ~0;
5955 var_nothing->is_special_var = 1;
5956 var_nothing->may_have_pointers = 0;
5957 var_nothing->is_global_var = 0;
5959 /* Create the ANYTHING variable, used to represent that a variable
5960 points to some unknown piece of memory. */
5961 var_anything = new_var_info (NULL_TREE, "ANYTHING");
5962 gcc_assert (var_anything->id == anything_id);
5963 var_anything->is_artificial_var = 1;
5964 var_anything->size = ~0;
5965 var_anything->offset = 0;
5966 var_anything->next = NULL;
5967 var_anything->fullsize = ~0;
5968 var_anything->is_special_var = 1;
5970 /* Anything points to anything. This makes deref constraints just
5971 work in the presence of linked list and other p = *p type loops,
5972 by saying that *ANYTHING = ANYTHING. */
5973 lhs.type = SCALAR;
5974 lhs.var = anything_id;
5975 lhs.offset = 0;
5976 rhs.type = ADDRESSOF;
5977 rhs.var = anything_id;
5978 rhs.offset = 0;
5980 /* This specifically does not use process_constraint because
5981 process_constraint ignores all anything = anything constraints, since all
5982 but this one are redundant. */
5983 VEC_safe_push (constraint_t, heap, constraints, new_constraint (lhs, rhs));
5985 /* Create the READONLY variable, used to represent that a variable
5986 points to readonly memory. */
5987 var_readonly = new_var_info (NULL_TREE, "READONLY");
5988 gcc_assert (var_readonly->id == readonly_id);
5989 var_readonly->is_artificial_var = 1;
5990 var_readonly->offset = 0;
5991 var_readonly->size = ~0;
5992 var_readonly->fullsize = ~0;
5993 var_readonly->next = NULL;
5994 var_readonly->is_special_var = 1;
5996 /* readonly memory points to anything, in order to make deref
5997 easier. In reality, it points to anything the particular
5998 readonly variable can point to, but we don't track this
5999 separately. */
6000 lhs.type = SCALAR;
6001 lhs.var = readonly_id;
6002 lhs.offset = 0;
6003 rhs.type = ADDRESSOF;
6004 rhs.var = readonly_id; /* FIXME */
6005 rhs.offset = 0;
6006 process_constraint (new_constraint (lhs, rhs));
6008 /* Create the ESCAPED variable, used to represent the set of escaped
6009 memory. */
6010 var_escaped = new_var_info (NULL_TREE, "ESCAPED");
6011 gcc_assert (var_escaped->id == escaped_id);
6012 var_escaped->is_artificial_var = 1;
6013 var_escaped->offset = 0;
6014 var_escaped->size = ~0;
6015 var_escaped->fullsize = ~0;
6016 var_escaped->is_special_var = 0;
6018 /* Create the NONLOCAL variable, used to represent the set of nonlocal
6019 memory. */
6020 var_nonlocal = new_var_info (NULL_TREE, "NONLOCAL");
6021 gcc_assert (var_nonlocal->id == nonlocal_id);
6022 var_nonlocal->is_artificial_var = 1;
6023 var_nonlocal->offset = 0;
6024 var_nonlocal->size = ~0;
6025 var_nonlocal->fullsize = ~0;
6026 var_nonlocal->is_special_var = 1;
6028 /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
6029 lhs.type = SCALAR;
6030 lhs.var = escaped_id;
6031 lhs.offset = 0;
6032 rhs.type = DEREF;
6033 rhs.var = escaped_id;
6034 rhs.offset = 0;
6035 process_constraint (new_constraint (lhs, rhs));
6037 /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the
6038 whole variable escapes. */
6039 lhs.type = SCALAR;
6040 lhs.var = escaped_id;
6041 lhs.offset = 0;
6042 rhs.type = SCALAR;
6043 rhs.var = escaped_id;
6044 rhs.offset = UNKNOWN_OFFSET;
6045 process_constraint (new_constraint (lhs, rhs));
6047 /* *ESCAPED = NONLOCAL. This is true because we have to assume
6048 everything pointed to by escaped points to what global memory can
6049 point to. */
6050 lhs.type = DEREF;
6051 lhs.var = escaped_id;
6052 lhs.offset = 0;
6053 rhs.type = SCALAR;
6054 rhs.var = nonlocal_id;
6055 rhs.offset = 0;
6056 process_constraint (new_constraint (lhs, rhs));
6058 /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED. This is true because
6059 global memory may point to global memory and escaped memory. */
6060 lhs.type = SCALAR;
6061 lhs.var = nonlocal_id;
6062 lhs.offset = 0;
6063 rhs.type = ADDRESSOF;
6064 rhs.var = nonlocal_id;
6065 rhs.offset = 0;
6066 process_constraint (new_constraint (lhs, rhs));
6067 rhs.type = ADDRESSOF;
6068 rhs.var = escaped_id;
6069 rhs.offset = 0;
6070 process_constraint (new_constraint (lhs, rhs));
6072 /* Create the STOREDANYTHING variable, used to represent the set of
6073 variables stored to *ANYTHING. */
6074 var_storedanything = new_var_info (NULL_TREE, "STOREDANYTHING");
6075 gcc_assert (var_storedanything->id == storedanything_id);
6076 var_storedanything->is_artificial_var = 1;
6077 var_storedanything->offset = 0;
6078 var_storedanything->size = ~0;
6079 var_storedanything->fullsize = ~0;
6080 var_storedanything->is_special_var = 0;
6082 /* Create the INTEGER variable, used to represent that a variable points
6083 to what an INTEGER "points to". */
6084 var_integer = new_var_info (NULL_TREE, "INTEGER");
6085 gcc_assert (var_integer->id == integer_id);
6086 var_integer->is_artificial_var = 1;
6087 var_integer->size = ~0;
6088 var_integer->fullsize = ~0;
6089 var_integer->offset = 0;
6090 var_integer->next = NULL;
6091 var_integer->is_special_var = 1;
6093 /* INTEGER = ANYTHING, because we don't know where a dereference of
6094 a random integer will point to. */
6095 lhs.type = SCALAR;
6096 lhs.var = integer_id;
6097 lhs.offset = 0;
6098 rhs.type = ADDRESSOF;
6099 rhs.var = anything_id;
6100 rhs.offset = 0;
6101 process_constraint (new_constraint (lhs, rhs));
6104 /* Initialize things necessary to perform PTA */
6106 static void
6107 init_alias_vars (void)
6109 use_field_sensitive = (MAX_FIELDS_FOR_FIELD_SENSITIVE > 1);
6111 bitmap_obstack_initialize (&pta_obstack);
6112 bitmap_obstack_initialize (&oldpta_obstack);
6113 bitmap_obstack_initialize (&predbitmap_obstack);
6115 constraint_pool = create_alloc_pool ("Constraint pool",
6116 sizeof (struct constraint), 30);
6117 variable_info_pool = create_alloc_pool ("Variable info pool",
6118 sizeof (struct variable_info), 30);
6119 constraints = VEC_alloc (constraint_t, heap, 8);
6120 varmap = VEC_alloc (varinfo_t, heap, 8);
6121 vi_for_tree = pointer_map_create ();
6122 call_stmt_vars = pointer_map_create ();
6124 memset (&stats, 0, sizeof (stats));
6125 shared_bitmap_table = htab_create (511, shared_bitmap_hash,
6126 shared_bitmap_eq, free);
6127 init_base_vars ();
6130 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
6131 predecessor edges. */
6133 static void
6134 remove_preds_and_fake_succs (constraint_graph_t graph)
6136 unsigned int i;
6138 /* Clear the implicit ref and address nodes from the successor
6139 lists. */
6140 for (i = 0; i < FIRST_REF_NODE; i++)
6142 if (graph->succs[i])
6143 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
6144 FIRST_REF_NODE * 2);
6147 /* Free the successor list for the non-ref nodes. */
6148 for (i = FIRST_REF_NODE; i < graph->size; i++)
6150 if (graph->succs[i])
6151 BITMAP_FREE (graph->succs[i]);
6154 /* Now reallocate the size of the successor list as, and blow away
6155 the predecessor bitmaps. */
6156 graph->size = VEC_length (varinfo_t, varmap);
6157 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
6159 free (graph->implicit_preds);
6160 graph->implicit_preds = NULL;
6161 free (graph->preds);
6162 graph->preds = NULL;
6163 bitmap_obstack_release (&predbitmap_obstack);
6166 /* Initialize the heapvar for statement mapping. */
6168 static void
6169 init_alias_heapvars (void)
6171 if (!heapvar_for_stmt)
6172 heapvar_for_stmt = htab_create_ggc (11, tree_map_hash, heapvar_map_eq,
6173 NULL);
6176 /* Delete the heapvar for statement mapping. */
6178 void
6179 delete_alias_heapvars (void)
6181 if (heapvar_for_stmt)
6182 htab_delete (heapvar_for_stmt);
6183 heapvar_for_stmt = NULL;
6186 /* Solve the constraint set. */
6188 static void
6189 solve_constraints (void)
6191 struct scc_info *si;
6193 if (dump_file)
6194 fprintf (dump_file,
6195 "\nCollapsing static cycles and doing variable "
6196 "substitution\n");
6198 init_graph (VEC_length (varinfo_t, varmap) * 2);
6200 if (dump_file)
6201 fprintf (dump_file, "Building predecessor graph\n");
6202 build_pred_graph ();
6204 if (dump_file)
6205 fprintf (dump_file, "Detecting pointer and location "
6206 "equivalences\n");
6207 si = perform_var_substitution (graph);
6209 if (dump_file)
6210 fprintf (dump_file, "Rewriting constraints and unifying "
6211 "variables\n");
6212 rewrite_constraints (graph, si);
6214 build_succ_graph ();
6215 free_var_substitution_info (si);
6217 if (dump_file && (dump_flags & TDF_GRAPH))
6218 dump_constraint_graph (dump_file);
6220 move_complex_constraints (graph);
6222 if (dump_file)
6223 fprintf (dump_file, "Uniting pointer but not location equivalent "
6224 "variables\n");
6225 unite_pointer_equivalences (graph);
6227 if (dump_file)
6228 fprintf (dump_file, "Finding indirect cycles\n");
6229 find_indirect_cycles (graph);
6231 /* Implicit nodes and predecessors are no longer necessary at this
6232 point. */
6233 remove_preds_and_fake_succs (graph);
6235 if (dump_file)
6236 fprintf (dump_file, "Solving graph\n");
6238 solve_graph (graph);
6240 if (dump_file)
6241 dump_sa_points_to_info (dump_file);
6244 /* Create points-to sets for the current function. See the comments
6245 at the start of the file for an algorithmic overview. */
6247 static void
6248 compute_points_to_sets (void)
6250 basic_block bb;
6251 unsigned i;
6252 varinfo_t vi;
6254 timevar_push (TV_TREE_PTA);
6256 init_alias_vars ();
6257 init_alias_heapvars ();
6259 intra_create_variable_infos ();
6261 /* Now walk all statements and build the constraint set. */
6262 FOR_EACH_BB (bb)
6264 gimple_stmt_iterator gsi;
6266 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6268 gimple phi = gsi_stmt (gsi);
6270 if (is_gimple_reg (gimple_phi_result (phi)))
6271 find_func_aliases (phi);
6274 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6276 gimple stmt = gsi_stmt (gsi);
6278 find_func_aliases (stmt);
6282 if (dump_file)
6284 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
6285 dump_constraints (dump_file, 0);
6288 /* From the constraints compute the points-to sets. */
6289 solve_constraints ();
6291 /* Compute the points-to set for ESCAPED used for call-clobber analysis. */
6292 find_what_var_points_to (get_varinfo (escaped_id),
6293 &cfun->gimple_df->escaped);
6295 /* Make sure the ESCAPED solution (which is used as placeholder in
6296 other solutions) does not reference itself. This simplifies
6297 points-to solution queries. */
6298 cfun->gimple_df->escaped.escaped = 0;
6300 /* Mark escaped HEAP variables as global. */
6301 for (i = 0; VEC_iterate (varinfo_t, varmap, i, vi); ++i)
6302 if (vi->is_heap_var
6303 && !vi->is_restrict_var
6304 && !vi->is_global_var)
6305 DECL_EXTERNAL (vi->decl) = vi->is_global_var
6306 = pt_solution_includes (&cfun->gimple_df->escaped, vi->decl);
6308 /* Compute the points-to sets for pointer SSA_NAMEs. */
6309 for (i = 0; i < num_ssa_names; ++i)
6311 tree ptr = ssa_name (i);
6312 if (ptr
6313 && POINTER_TYPE_P (TREE_TYPE (ptr)))
6314 find_what_p_points_to (ptr);
6317 /* Compute the call-used/clobbered sets. */
6318 FOR_EACH_BB (bb)
6320 gimple_stmt_iterator gsi;
6322 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6324 gimple stmt = gsi_stmt (gsi);
6325 struct pt_solution *pt;
6326 if (!is_gimple_call (stmt))
6327 continue;
6329 pt = gimple_call_use_set (stmt);
6330 if (gimple_call_flags (stmt) & ECF_CONST)
6331 memset (pt, 0, sizeof (struct pt_solution));
6332 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
6334 find_what_var_points_to (vi, pt);
6335 /* Escaped (and thus nonlocal) variables are always
6336 implicitly used by calls. */
6337 /* ??? ESCAPED can be empty even though NONLOCAL
6338 always escaped. */
6339 pt->nonlocal = 1;
6340 pt->escaped = 1;
6342 else
6344 /* If there is nothing special about this call then
6345 we have made everything that is used also escape. */
6346 *pt = cfun->gimple_df->escaped;
6347 pt->nonlocal = 1;
6350 pt = gimple_call_clobber_set (stmt);
6351 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
6352 memset (pt, 0, sizeof (struct pt_solution));
6353 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
6355 find_what_var_points_to (vi, pt);
6356 /* Escaped (and thus nonlocal) variables are always
6357 implicitly clobbered by calls. */
6358 /* ??? ESCAPED can be empty even though NONLOCAL
6359 always escaped. */
6360 pt->nonlocal = 1;
6361 pt->escaped = 1;
6363 else
6365 /* If there is nothing special about this call then
6366 we have made everything that is used also escape. */
6367 *pt = cfun->gimple_df->escaped;
6368 pt->nonlocal = 1;
6373 timevar_pop (TV_TREE_PTA);
6377 /* Delete created points-to sets. */
6379 static void
6380 delete_points_to_sets (void)
6382 unsigned int i;
6384 htab_delete (shared_bitmap_table);
6385 if (dump_file && (dump_flags & TDF_STATS))
6386 fprintf (dump_file, "Points to sets created:%d\n",
6387 stats.points_to_sets_created);
6389 pointer_map_destroy (vi_for_tree);
6390 pointer_map_destroy (call_stmt_vars);
6391 bitmap_obstack_release (&pta_obstack);
6392 VEC_free (constraint_t, heap, constraints);
6394 for (i = 0; i < graph->size; i++)
6395 VEC_free (constraint_t, heap, graph->complex[i]);
6396 free (graph->complex);
6398 free (graph->rep);
6399 free (graph->succs);
6400 free (graph->pe);
6401 free (graph->pe_rep);
6402 free (graph->indirect_cycles);
6403 free (graph);
6405 VEC_free (varinfo_t, heap, varmap);
6406 free_alloc_pool (variable_info_pool);
6407 free_alloc_pool (constraint_pool);
6411 /* Compute points-to information for every SSA_NAME pointer in the
6412 current function and compute the transitive closure of escaped
6413 variables to re-initialize the call-clobber states of local variables. */
6415 unsigned int
6416 compute_may_aliases (void)
6418 if (cfun->gimple_df->ipa_pta)
6420 if (dump_file)
6422 fprintf (dump_file, "\nNot re-computing points-to information "
6423 "because IPA points-to information is available.\n\n");
6425 /* But still dump what we have remaining it. */
6426 dump_alias_info (dump_file);
6428 if (dump_flags & TDF_DETAILS)
6429 dump_referenced_vars (dump_file);
6432 return 0;
6435 /* For each pointer P_i, determine the sets of variables that P_i may
6436 point-to. Compute the reachability set of escaped and call-used
6437 variables. */
6438 compute_points_to_sets ();
6440 /* Debugging dumps. */
6441 if (dump_file)
6443 dump_alias_info (dump_file);
6445 if (dump_flags & TDF_DETAILS)
6446 dump_referenced_vars (dump_file);
6449 /* Deallocate memory used by aliasing data structures and the internal
6450 points-to solution. */
6451 delete_points_to_sets ();
6453 gcc_assert (!need_ssa_update_p (cfun));
6455 return 0;
6458 static bool
6459 gate_tree_pta (void)
6461 return flag_tree_pta;
6464 /* A dummy pass to cause points-to information to be computed via
6465 TODO_rebuild_alias. */
6467 struct gimple_opt_pass pass_build_alias =
6470 GIMPLE_PASS,
6471 "alias", /* name */
6472 gate_tree_pta, /* gate */
6473 NULL, /* execute */
6474 NULL, /* sub */
6475 NULL, /* next */
6476 0, /* static_pass_number */
6477 TV_NONE, /* tv_id */
6478 PROP_cfg | PROP_ssa, /* properties_required */
6479 0, /* properties_provided */
6480 0, /* properties_destroyed */
6481 0, /* todo_flags_start */
6482 TODO_rebuild_alias | TODO_dump_func /* todo_flags_finish */
6486 /* A dummy pass to cause points-to information to be computed via
6487 TODO_rebuild_alias. */
6489 struct gimple_opt_pass pass_build_ealias =
6492 GIMPLE_PASS,
6493 "ealias", /* name */
6494 gate_tree_pta, /* gate */
6495 NULL, /* execute */
6496 NULL, /* sub */
6497 NULL, /* next */
6498 0, /* static_pass_number */
6499 TV_NONE, /* tv_id */
6500 PROP_cfg | PROP_ssa, /* properties_required */
6501 0, /* properties_provided */
6502 0, /* properties_destroyed */
6503 0, /* todo_flags_start */
6504 TODO_rebuild_alias | TODO_dump_func /* todo_flags_finish */
6509 /* Return true if we should execute IPA PTA. */
6510 static bool
6511 gate_ipa_pta (void)
6513 return (optimize
6514 && flag_ipa_pta
6515 /* Don't bother doing anything if the program has errors. */
6516 && !(errorcount || sorrycount));
6519 /* IPA PTA solutions for ESCAPED. */
6520 struct pt_solution ipa_escaped_pt
6521 = { true, false, false, false, false, false, false, NULL };
6523 /* Execute the driver for IPA PTA. */
6524 static unsigned int
6525 ipa_pta_execute (void)
6527 struct cgraph_node *node;
6528 struct varpool_node *var;
6529 int from;
6531 in_ipa_mode = 1;
6533 init_alias_heapvars ();
6534 init_alias_vars ();
6536 /* Build the constraints. */
6537 for (node = cgraph_nodes; node; node = node->next)
6539 /* Nodes without a body are not interesting. Especially do not
6540 visit clones at this point for now - we get duplicate decls
6541 there for inline clones at least. */
6542 if (!gimple_has_body_p (node->decl)
6543 || node->clone_of)
6544 continue;
6546 create_function_info_for (node->decl,
6547 cgraph_node_name (node));
6550 /* Create constraints for global variables and their initializers. */
6551 for (var = varpool_nodes; var; var = var->next)
6552 get_vi_for_tree (var->decl);
6554 if (dump_file)
6556 fprintf (dump_file,
6557 "Generating constraints for global initializers\n\n");
6558 dump_constraints (dump_file, 0);
6559 fprintf (dump_file, "\n");
6561 from = VEC_length (constraint_t, constraints);
6563 for (node = cgraph_nodes; node; node = node->next)
6565 struct function *func;
6566 basic_block bb;
6567 tree old_func_decl;
6569 /* Nodes without a body are not interesting. */
6570 if (!gimple_has_body_p (node->decl)
6571 || node->clone_of)
6572 continue;
6574 if (dump_file)
6575 fprintf (dump_file,
6576 "Generating constraints for %s\n",
6577 cgraph_node_name (node));
6579 func = DECL_STRUCT_FUNCTION (node->decl);
6580 old_func_decl = current_function_decl;
6581 push_cfun (func);
6582 current_function_decl = node->decl;
6584 /* For externally visible functions use local constraints for
6585 their arguments. For local functions we see all callers
6586 and thus do not need initial constraints for parameters. */
6587 if (node->local.externally_visible)
6588 intra_create_variable_infos ();
6590 /* Build constriants for the function body. */
6591 FOR_EACH_BB_FN (bb, func)
6593 gimple_stmt_iterator gsi;
6595 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
6596 gsi_next (&gsi))
6598 gimple phi = gsi_stmt (gsi);
6600 if (is_gimple_reg (gimple_phi_result (phi)))
6601 find_func_aliases (phi);
6604 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6606 gimple stmt = gsi_stmt (gsi);
6608 find_func_aliases (stmt);
6609 find_func_clobbers (stmt);
6613 current_function_decl = old_func_decl;
6614 pop_cfun ();
6616 if (dump_file)
6618 fprintf (dump_file, "\n");
6619 dump_constraints (dump_file, from);
6620 fprintf (dump_file, "\n");
6622 from = VEC_length (constraint_t, constraints);
6625 /* From the constraints compute the points-to sets. */
6626 solve_constraints ();
6628 /* Compute the global points-to sets for ESCAPED.
6629 ??? Note that the computed escape set is not correct
6630 for the whole unit as we fail to consider graph edges to
6631 externally visible functions. */
6632 find_what_var_points_to (get_varinfo (escaped_id), &ipa_escaped_pt);
6634 /* Make sure the ESCAPED solution (which is used as placeholder in
6635 other solutions) does not reference itself. This simplifies
6636 points-to solution queries. */
6637 ipa_escaped_pt.ipa_escaped = 0;
6639 /* Assign the points-to sets to the SSA names in the unit. */
6640 for (node = cgraph_nodes; node; node = node->next)
6642 tree ptr;
6643 struct function *fn;
6644 unsigned i;
6645 varinfo_t fi;
6646 basic_block bb;
6647 struct pt_solution uses, clobbers;
6648 struct cgraph_edge *e;
6650 /* Nodes without a body are not interesting. */
6651 if (!gimple_has_body_p (node->decl)
6652 || node->clone_of)
6653 continue;
6655 fn = DECL_STRUCT_FUNCTION (node->decl);
6657 /* Compute the points-to sets for pointer SSA_NAMEs. */
6658 for (i = 0; VEC_iterate (tree, fn->gimple_df->ssa_names, i, ptr); ++i)
6660 if (ptr
6661 && POINTER_TYPE_P (TREE_TYPE (ptr)))
6662 find_what_p_points_to (ptr);
6665 /* Compute the call-use and call-clobber sets for all direct calls. */
6666 fi = lookup_vi_for_tree (node->decl);
6667 gcc_assert (fi->is_fn_info);
6668 find_what_var_points_to (first_vi_for_offset (fi, fi_clobbers),
6669 &clobbers);
6670 find_what_var_points_to (first_vi_for_offset (fi, fi_uses), &uses);
6671 for (e = node->callers; e; e = e->next_caller)
6673 if (!e->call_stmt)
6674 continue;
6676 *gimple_call_clobber_set (e->call_stmt) = clobbers;
6677 *gimple_call_use_set (e->call_stmt) = uses;
6680 /* Compute the call-use and call-clobber sets for indirect calls
6681 and calls to external functions. */
6682 FOR_EACH_BB_FN (bb, fn)
6684 gimple_stmt_iterator gsi;
6686 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6688 gimple stmt = gsi_stmt (gsi);
6689 struct pt_solution *pt;
6690 varinfo_t vi;
6691 tree decl;
6693 if (!is_gimple_call (stmt))
6694 continue;
6696 /* Handle direct calls to external functions. */
6697 decl = gimple_call_fndecl (stmt);
6698 if (decl
6699 && (!(fi = lookup_vi_for_tree (decl))
6700 || !fi->is_fn_info))
6702 pt = gimple_call_use_set (stmt);
6703 if (gimple_call_flags (stmt) & ECF_CONST)
6704 memset (pt, 0, sizeof (struct pt_solution));
6705 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
6707 find_what_var_points_to (vi, pt);
6708 /* Escaped (and thus nonlocal) variables are always
6709 implicitly used by calls. */
6710 /* ??? ESCAPED can be empty even though NONLOCAL
6711 always escaped. */
6712 pt->nonlocal = 1;
6713 pt->ipa_escaped = 1;
6715 else
6717 /* If there is nothing special about this call then
6718 we have made everything that is used also escape. */
6719 *pt = ipa_escaped_pt;
6720 pt->nonlocal = 1;
6723 pt = gimple_call_clobber_set (stmt);
6724 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
6725 memset (pt, 0, sizeof (struct pt_solution));
6726 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
6728 find_what_var_points_to (vi, pt);
6729 /* Escaped (and thus nonlocal) variables are always
6730 implicitly clobbered by calls. */
6731 /* ??? ESCAPED can be empty even though NONLOCAL
6732 always escaped. */
6733 pt->nonlocal = 1;
6734 pt->ipa_escaped = 1;
6736 else
6738 /* If there is nothing special about this call then
6739 we have made everything that is used also escape. */
6740 *pt = ipa_escaped_pt;
6741 pt->nonlocal = 1;
6745 /* Handle indirect calls. */
6746 if (!decl
6747 && (fi = get_fi_for_callee (stmt)))
6749 /* We need to accumulate all clobbers/uses of all possible
6750 callees. */
6751 fi = get_varinfo (find (fi->id));
6752 /* If we cannot constrain the set of functions we'll end up
6753 calling we end up using/clobbering everything. */
6754 if (bitmap_bit_p (fi->solution, anything_id)
6755 || bitmap_bit_p (fi->solution, nonlocal_id)
6756 || bitmap_bit_p (fi->solution, escaped_id))
6758 pt_solution_reset (gimple_call_clobber_set (stmt));
6759 pt_solution_reset (gimple_call_use_set (stmt));
6761 else
6763 bitmap_iterator bi;
6764 unsigned i;
6765 struct pt_solution *uses, *clobbers;
6767 uses = gimple_call_use_set (stmt);
6768 clobbers = gimple_call_clobber_set (stmt);
6769 memset (uses, 0, sizeof (struct pt_solution));
6770 memset (clobbers, 0, sizeof (struct pt_solution));
6771 EXECUTE_IF_SET_IN_BITMAP (fi->solution, 0, i, bi)
6773 struct pt_solution sol;
6775 vi = get_varinfo (i);
6776 if (!vi->is_fn_info)
6778 /* ??? We could be more precise here? */
6779 uses->nonlocal = 1;
6780 uses->ipa_escaped = 1;
6781 clobbers->nonlocal = 1;
6782 clobbers->ipa_escaped = 1;
6783 continue;
6786 if (!uses->anything)
6788 find_what_var_points_to
6789 (first_vi_for_offset (vi, fi_uses), &sol);
6790 pt_solution_ior_into (uses, &sol);
6792 if (!clobbers->anything)
6794 find_what_var_points_to
6795 (first_vi_for_offset (vi, fi_clobbers), &sol);
6796 pt_solution_ior_into (clobbers, &sol);
6804 fn->gimple_df->ipa_pta = true;
6807 delete_points_to_sets ();
6809 in_ipa_mode = 0;
6811 return 0;
6814 struct simple_ipa_opt_pass pass_ipa_pta =
6817 SIMPLE_IPA_PASS,
6818 "pta", /* name */
6819 gate_ipa_pta, /* gate */
6820 ipa_pta_execute, /* execute */
6821 NULL, /* sub */
6822 NULL, /* next */
6823 0, /* static_pass_number */
6824 TV_IPA_PTA, /* tv_id */
6825 0, /* properties_required */
6826 0, /* properties_provided */
6827 0, /* properties_destroyed */
6828 0, /* todo_flags_start */
6829 TODO_update_ssa /* todo_flags_finish */
6834 #include "gt-tree-ssa-structalias.h"