Mark as release
[official-gcc.git] / gcc / tree-ssa-structalias.c
blob0d4258db0f3e28fa4d84cf64096090eb35717578
1 /* Tree based points-to analysis
2 Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dberlin@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3 of the License, or
11 (at your option) any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "ggc.h"
27 #include "obstack.h"
28 #include "bitmap.h"
29 #include "flags.h"
30 #include "basic-block.h"
31 #include "output.h"
32 #include "tree.h"
33 #include "tree-flow.h"
34 #include "tree-inline.h"
35 #include "diagnostic-core.h"
36 #include "gimple.h"
37 #include "hashtab.h"
38 #include "function.h"
39 #include "cgraph.h"
40 #include "tree-pass.h"
41 #include "timevar.h"
42 #include "alloc-pool.h"
43 #include "splay-tree.h"
44 #include "params.h"
45 #include "cgraph.h"
46 #include "alias.h"
47 #include "pointer-set.h"
49 /* The idea behind this analyzer is to generate set constraints from the
50 program, then solve the resulting constraints in order to generate the
51 points-to sets.
53 Set constraints are a way of modeling program analysis problems that
54 involve sets. They consist of an inclusion constraint language,
55 describing the variables (each variable is a set) and operations that
56 are involved on the variables, and a set of rules that derive facts
57 from these operations. To solve a system of set constraints, you derive
58 all possible facts under the rules, which gives you the correct sets
59 as a consequence.
61 See "Efficient Field-sensitive pointer analysis for C" by "David
62 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
63 http://citeseer.ist.psu.edu/pearce04efficient.html
65 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
66 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
67 http://citeseer.ist.psu.edu/heintze01ultrafast.html
69 There are three types of real constraint expressions, DEREF,
70 ADDRESSOF, and SCALAR. Each constraint expression consists
71 of a constraint type, a variable, and an offset.
73 SCALAR is a constraint expression type used to represent x, whether
74 it appears on the LHS or the RHS of a statement.
75 DEREF is a constraint expression type used to represent *x, whether
76 it appears on the LHS or the RHS of a statement.
77 ADDRESSOF is a constraint expression used to represent &x, whether
78 it appears on the LHS or the RHS of a statement.
80 Each pointer variable in the program is assigned an integer id, and
81 each field of a structure variable is assigned an integer id as well.
83 Structure variables are linked to their list of fields through a "next
84 field" in each variable that points to the next field in offset
85 order.
86 Each variable for a structure field has
88 1. "size", that tells the size in bits of that field.
89 2. "fullsize, that tells the size in bits of the entire structure.
90 3. "offset", that tells the offset in bits from the beginning of the
91 structure to this field.
93 Thus,
94 struct f
96 int a;
97 int b;
98 } foo;
99 int *bar;
101 looks like
103 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
104 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
105 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
108 In order to solve the system of set constraints, the following is
109 done:
111 1. Each constraint variable x has a solution set associated with it,
112 Sol(x).
114 2. Constraints are separated into direct, copy, and complex.
115 Direct constraints are ADDRESSOF constraints that require no extra
116 processing, such as P = &Q
117 Copy constraints are those of the form P = Q.
118 Complex constraints are all the constraints involving dereferences
119 and offsets (including offsetted copies).
121 3. All direct constraints of the form P = &Q are processed, such
122 that Q is added to Sol(P)
124 4. All complex constraints for a given constraint variable are stored in a
125 linked list attached to that variable's node.
127 5. A directed graph is built out of the copy constraints. Each
128 constraint variable is a node in the graph, and an edge from
129 Q to P is added for each copy constraint of the form P = Q
131 6. The graph is then walked, and solution sets are
132 propagated along the copy edges, such that an edge from Q to P
133 causes Sol(P) <- Sol(P) union Sol(Q).
135 7. As we visit each node, all complex constraints associated with
136 that node are processed by adding appropriate copy edges to the graph, or the
137 appropriate variables to the solution set.
139 8. The process of walking the graph is iterated until no solution
140 sets change.
142 Prior to walking the graph in steps 6 and 7, We perform static
143 cycle elimination on the constraint graph, as well
144 as off-line variable substitution.
146 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
147 on and turned into anything), but isn't. You can just see what offset
148 inside the pointed-to struct it's going to access.
150 TODO: Constant bounded arrays can be handled as if they were structs of the
151 same number of elements.
153 TODO: Modeling heap and incoming pointers becomes much better if we
154 add fields to them as we discover them, which we could do.
156 TODO: We could handle unions, but to be honest, it's probably not
157 worth the pain or slowdown. */
159 /* IPA-PTA optimizations possible.
161 When the indirect function called is ANYTHING we can add disambiguation
162 based on the function signatures (or simply the parameter count which
163 is the varinfo size). We also do not need to consider functions that
164 do not have their address taken.
166 The is_global_var bit which marks escape points is overly conservative
167 in IPA mode. Split it to is_escape_point and is_global_var - only
168 externally visible globals are escape points in IPA mode. This is
169 also needed to fix the pt_solution_includes_global predicate
170 (and thus ptr_deref_may_alias_global_p).
172 The way we introduce DECL_PT_UID to avoid fixing up all points-to
173 sets in the translation unit when we copy a DECL during inlining
174 pessimizes precision. The advantage is that the DECL_PT_UID keeps
175 compile-time and memory usage overhead low - the points-to sets
176 do not grow or get unshared as they would during a fixup phase.
177 An alternative solution is to delay IPA PTA until after all
178 inlining transformations have been applied.
180 The way we propagate clobber/use information isn't optimized.
181 It should use a new complex constraint that properly filters
182 out local variables of the callee (though that would make
183 the sets invalid after inlining). OTOH we might as well
184 admit defeat to WHOPR and simply do all the clobber/use analysis
185 and propagation after PTA finished but before we threw away
186 points-to information for memory variables. WHOPR and PTA
187 do not play along well anyway - the whole constraint solving
188 would need to be done in WPA phase and it will be very interesting
189 to apply the results to local SSA names during LTRANS phase.
191 We probably should compute a per-function unit-ESCAPE solution
192 propagating it simply like the clobber / uses solutions. The
193 solution can go alongside the non-IPA espaced solution and be
194 used to query which vars escape the unit through a function.
196 We never put function decls in points-to sets so we do not
197 keep the set of called functions for indirect calls.
199 And probably more. */
201 static bool use_field_sensitive = true;
202 static int in_ipa_mode = 0;
204 /* Used for predecessor bitmaps. */
205 static bitmap_obstack predbitmap_obstack;
207 /* Used for points-to sets. */
208 static bitmap_obstack pta_obstack;
210 /* Used for oldsolution members of variables. */
211 static bitmap_obstack oldpta_obstack;
213 /* Used for per-solver-iteration bitmaps. */
214 static bitmap_obstack iteration_obstack;
216 static unsigned int create_variable_info_for (tree, const char *);
217 typedef struct constraint_graph *constraint_graph_t;
218 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
220 struct constraint;
221 typedef struct constraint *constraint_t;
223 DEF_VEC_P(constraint_t);
224 DEF_VEC_ALLOC_P(constraint_t,heap);
226 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
227 if (a) \
228 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
230 static struct constraint_stats
232 unsigned int total_vars;
233 unsigned int nonpointer_vars;
234 unsigned int unified_vars_static;
235 unsigned int unified_vars_dynamic;
236 unsigned int iterations;
237 unsigned int num_edges;
238 unsigned int num_implicit_edges;
239 unsigned int points_to_sets_created;
240 } stats;
242 struct variable_info
244 /* ID of this variable */
245 unsigned int id;
247 /* True if this is a variable created by the constraint analysis, such as
248 heap variables and constraints we had to break up. */
249 unsigned int is_artificial_var : 1;
251 /* True if this is a special variable whose solution set should not be
252 changed. */
253 unsigned int is_special_var : 1;
255 /* True for variables whose size is not known or variable. */
256 unsigned int is_unknown_size_var : 1;
258 /* True for (sub-)fields that represent a whole variable. */
259 unsigned int is_full_var : 1;
261 /* True if this is a heap variable. */
262 unsigned int is_heap_var : 1;
264 /* True if this field may contain pointers. */
265 unsigned int may_have_pointers : 1;
267 /* True if this field has only restrict qualified pointers. */
268 unsigned int only_restrict_pointers : 1;
270 /* True if this represents a global variable. */
271 unsigned int is_global_var : 1;
273 /* True if this represents a IPA function info. */
274 unsigned int is_fn_info : 1;
276 /* A link to the variable for the next field in this structure. */
277 struct variable_info *next;
279 /* Offset of this variable, in bits, from the base variable */
280 unsigned HOST_WIDE_INT offset;
282 /* Size of the variable, in bits. */
283 unsigned HOST_WIDE_INT size;
285 /* Full size of the base variable, in bits. */
286 unsigned HOST_WIDE_INT fullsize;
288 /* Name of this variable */
289 const char *name;
291 /* Tree that this variable is associated with. */
292 tree decl;
294 /* Points-to set for this variable. */
295 bitmap solution;
297 /* Old points-to set for this variable. */
298 bitmap oldsolution;
300 typedef struct variable_info *varinfo_t;
302 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
303 static varinfo_t first_or_preceding_vi_for_offset (varinfo_t,
304 unsigned HOST_WIDE_INT);
305 static varinfo_t lookup_vi_for_tree (tree);
306 static inline bool type_can_have_subvars (const_tree);
308 /* Pool of variable info structures. */
309 static alloc_pool variable_info_pool;
311 DEF_VEC_P(varinfo_t);
313 DEF_VEC_ALLOC_P(varinfo_t, heap);
315 /* Table of variable info structures for constraint variables.
316 Indexed directly by variable info id. */
317 static VEC(varinfo_t,heap) *varmap;
319 /* Return the varmap element N */
321 static inline varinfo_t
322 get_varinfo (unsigned int n)
324 return VEC_index (varinfo_t, varmap, n);
327 /* Static IDs for the special variables. */
328 enum { nothing_id = 0, anything_id = 1, readonly_id = 2,
329 escaped_id = 3, nonlocal_id = 4,
330 storedanything_id = 5, integer_id = 6 };
332 /* Return a new variable info structure consisting for a variable
333 named NAME, and using constraint graph node NODE. Append it
334 to the vector of variable info structures. */
336 static varinfo_t
337 new_var_info (tree t, const char *name)
339 unsigned index = VEC_length (varinfo_t, varmap);
340 varinfo_t ret = (varinfo_t) pool_alloc (variable_info_pool);
342 ret->id = index;
343 ret->name = name;
344 ret->decl = t;
345 /* Vars without decl are artificial and do not have sub-variables. */
346 ret->is_artificial_var = (t == NULL_TREE);
347 ret->is_special_var = false;
348 ret->is_unknown_size_var = false;
349 ret->is_full_var = (t == NULL_TREE);
350 ret->is_heap_var = false;
351 ret->may_have_pointers = true;
352 ret->only_restrict_pointers = false;
353 ret->is_global_var = (t == NULL_TREE);
354 ret->is_fn_info = false;
355 if (t && DECL_P (t))
356 ret->is_global_var = (is_global_var (t)
357 /* We have to treat even local register variables
358 as escape points. */
359 || (TREE_CODE (t) == VAR_DECL
360 && DECL_HARD_REGISTER (t)));
361 ret->solution = BITMAP_ALLOC (&pta_obstack);
362 ret->oldsolution = NULL;
363 ret->next = NULL;
365 stats.total_vars++;
367 VEC_safe_push (varinfo_t, heap, varmap, ret);
369 return ret;
373 /* A map mapping call statements to per-stmt variables for uses
374 and clobbers specific to the call. */
375 struct pointer_map_t *call_stmt_vars;
377 /* Lookup or create the variable for the call statement CALL. */
379 static varinfo_t
380 get_call_vi (gimple call)
382 void **slot_p;
383 varinfo_t vi, vi2;
385 slot_p = pointer_map_insert (call_stmt_vars, call);
386 if (*slot_p)
387 return (varinfo_t) *slot_p;
389 vi = new_var_info (NULL_TREE, "CALLUSED");
390 vi->offset = 0;
391 vi->size = 1;
392 vi->fullsize = 2;
393 vi->is_full_var = true;
395 vi->next = vi2 = new_var_info (NULL_TREE, "CALLCLOBBERED");
396 vi2->offset = 1;
397 vi2->size = 1;
398 vi2->fullsize = 2;
399 vi2->is_full_var = true;
401 *slot_p = (void *) vi;
402 return vi;
405 /* Lookup the variable for the call statement CALL representing
406 the uses. Returns NULL if there is nothing special about this call. */
408 static varinfo_t
409 lookup_call_use_vi (gimple call)
411 void **slot_p;
413 slot_p = pointer_map_contains (call_stmt_vars, call);
414 if (slot_p)
415 return (varinfo_t) *slot_p;
417 return NULL;
420 /* Lookup the variable for the call statement CALL representing
421 the clobbers. Returns NULL if there is nothing special about this call. */
423 static varinfo_t
424 lookup_call_clobber_vi (gimple call)
426 varinfo_t uses = lookup_call_use_vi (call);
427 if (!uses)
428 return NULL;
430 return uses->next;
433 /* Lookup or create the variable for the call statement CALL representing
434 the uses. */
436 static varinfo_t
437 get_call_use_vi (gimple call)
439 return get_call_vi (call);
442 /* Lookup or create the variable for the call statement CALL representing
443 the clobbers. */
445 static varinfo_t ATTRIBUTE_UNUSED
446 get_call_clobber_vi (gimple call)
448 return get_call_vi (call)->next;
452 typedef enum {SCALAR, DEREF, ADDRESSOF} constraint_expr_type;
454 /* An expression that appears in a constraint. */
456 struct constraint_expr
458 /* Constraint type. */
459 constraint_expr_type type;
461 /* Variable we are referring to in the constraint. */
462 unsigned int var;
464 /* Offset, in bits, of this constraint from the beginning of
465 variables it ends up referring to.
467 IOW, in a deref constraint, we would deref, get the result set,
468 then add OFFSET to each member. */
469 HOST_WIDE_INT offset;
472 /* Use 0x8000... as special unknown offset. */
473 #define UNKNOWN_OFFSET ((HOST_WIDE_INT)-1 << (HOST_BITS_PER_WIDE_INT-1))
475 typedef struct constraint_expr ce_s;
476 DEF_VEC_O(ce_s);
477 DEF_VEC_ALLOC_O(ce_s, heap);
478 static void get_constraint_for_1 (tree, VEC(ce_s, heap) **, bool, bool);
479 static void get_constraint_for (tree, VEC(ce_s, heap) **);
480 static void get_constraint_for_rhs (tree, VEC(ce_s, heap) **);
481 static void do_deref (VEC (ce_s, heap) **);
483 /* Our set constraints are made up of two constraint expressions, one
484 LHS, and one RHS.
486 As described in the introduction, our set constraints each represent an
487 operation between set valued variables.
489 struct constraint
491 struct constraint_expr lhs;
492 struct constraint_expr rhs;
495 /* List of constraints that we use to build the constraint graph from. */
497 static VEC(constraint_t,heap) *constraints;
498 static alloc_pool constraint_pool;
500 /* The constraint graph is represented as an array of bitmaps
501 containing successor nodes. */
503 struct constraint_graph
505 /* Size of this graph, which may be different than the number of
506 nodes in the variable map. */
507 unsigned int size;
509 /* Explicit successors of each node. */
510 bitmap *succs;
512 /* Implicit predecessors of each node (Used for variable
513 substitution). */
514 bitmap *implicit_preds;
516 /* Explicit predecessors of each node (Used for variable substitution). */
517 bitmap *preds;
519 /* Indirect cycle representatives, or -1 if the node has no indirect
520 cycles. */
521 int *indirect_cycles;
523 /* Representative node for a node. rep[a] == a unless the node has
524 been unified. */
525 unsigned int *rep;
527 /* Equivalence class representative for a label. This is used for
528 variable substitution. */
529 int *eq_rep;
531 /* Pointer equivalence label for a node. All nodes with the same
532 pointer equivalence label can be unified together at some point
533 (either during constraint optimization or after the constraint
534 graph is built). */
535 unsigned int *pe;
537 /* Pointer equivalence representative for a label. This is used to
538 handle nodes that are pointer equivalent but not location
539 equivalent. We can unite these once the addressof constraints
540 are transformed into initial points-to sets. */
541 int *pe_rep;
543 /* Pointer equivalence label for each node, used during variable
544 substitution. */
545 unsigned int *pointer_label;
547 /* Location equivalence label for each node, used during location
548 equivalence finding. */
549 unsigned int *loc_label;
551 /* Pointed-by set for each node, used during location equivalence
552 finding. This is pointed-by rather than pointed-to, because it
553 is constructed using the predecessor graph. */
554 bitmap *pointed_by;
556 /* Points to sets for pointer equivalence. This is *not* the actual
557 points-to sets for nodes. */
558 bitmap *points_to;
560 /* Bitmap of nodes where the bit is set if the node is a direct
561 node. Used for variable substitution. */
562 sbitmap direct_nodes;
564 /* Bitmap of nodes where the bit is set if the node is address
565 taken. Used for variable substitution. */
566 bitmap address_taken;
568 /* Vector of complex constraints for each graph node. Complex
569 constraints are those involving dereferences or offsets that are
570 not 0. */
571 VEC(constraint_t,heap) **complex;
574 static constraint_graph_t graph;
576 /* During variable substitution and the offline version of indirect
577 cycle finding, we create nodes to represent dereferences and
578 address taken constraints. These represent where these start and
579 end. */
580 #define FIRST_REF_NODE (VEC_length (varinfo_t, varmap))
581 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
583 /* Return the representative node for NODE, if NODE has been unioned
584 with another NODE.
585 This function performs path compression along the way to finding
586 the representative. */
588 static unsigned int
589 find (unsigned int node)
591 gcc_assert (node < graph->size);
592 if (graph->rep[node] != node)
593 return graph->rep[node] = find (graph->rep[node]);
594 return node;
597 /* Union the TO and FROM nodes to the TO nodes.
598 Note that at some point in the future, we may want to do
599 union-by-rank, in which case we are going to have to return the
600 node we unified to. */
602 static bool
603 unite (unsigned int to, unsigned int from)
605 gcc_assert (to < graph->size && from < graph->size);
606 if (to != from && graph->rep[from] != to)
608 graph->rep[from] = to;
609 return true;
611 return false;
614 /* Create a new constraint consisting of LHS and RHS expressions. */
616 static constraint_t
617 new_constraint (const struct constraint_expr lhs,
618 const struct constraint_expr rhs)
620 constraint_t ret = (constraint_t) pool_alloc (constraint_pool);
621 ret->lhs = lhs;
622 ret->rhs = rhs;
623 return ret;
626 /* Print out constraint C to FILE. */
628 static void
629 dump_constraint (FILE *file, constraint_t c)
631 if (c->lhs.type == ADDRESSOF)
632 fprintf (file, "&");
633 else if (c->lhs.type == DEREF)
634 fprintf (file, "*");
635 fprintf (file, "%s", get_varinfo (c->lhs.var)->name);
636 if (c->lhs.offset == UNKNOWN_OFFSET)
637 fprintf (file, " + UNKNOWN");
638 else if (c->lhs.offset != 0)
639 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
640 fprintf (file, " = ");
641 if (c->rhs.type == ADDRESSOF)
642 fprintf (file, "&");
643 else if (c->rhs.type == DEREF)
644 fprintf (file, "*");
645 fprintf (file, "%s", get_varinfo (c->rhs.var)->name);
646 if (c->rhs.offset == UNKNOWN_OFFSET)
647 fprintf (file, " + UNKNOWN");
648 else if (c->rhs.offset != 0)
649 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
653 void debug_constraint (constraint_t);
654 void debug_constraints (void);
655 void debug_constraint_graph (void);
656 void debug_solution_for_var (unsigned int);
657 void debug_sa_points_to_info (void);
659 /* Print out constraint C to stderr. */
661 DEBUG_FUNCTION void
662 debug_constraint (constraint_t c)
664 dump_constraint (stderr, c);
665 fprintf (stderr, "\n");
668 /* Print out all constraints to FILE */
670 static void
671 dump_constraints (FILE *file, int from)
673 int i;
674 constraint_t c;
675 for (i = from; VEC_iterate (constraint_t, constraints, i, c); i++)
676 if (c)
678 dump_constraint (file, c);
679 fprintf (file, "\n");
683 /* Print out all constraints to stderr. */
685 DEBUG_FUNCTION void
686 debug_constraints (void)
688 dump_constraints (stderr, 0);
691 /* Print the constraint graph in dot format. */
693 static void
694 dump_constraint_graph (FILE *file)
696 unsigned int i;
698 /* Only print the graph if it has already been initialized: */
699 if (!graph)
700 return;
702 /* Prints the header of the dot file: */
703 fprintf (file, "strict digraph {\n");
704 fprintf (file, " node [\n shape = box\n ]\n");
705 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
706 fprintf (file, "\n // List of nodes and complex constraints in "
707 "the constraint graph:\n");
709 /* The next lines print the nodes in the graph together with the
710 complex constraints attached to them. */
711 for (i = 0; i < graph->size; i++)
713 if (find (i) != i)
714 continue;
715 if (i < FIRST_REF_NODE)
716 fprintf (file, "\"%s\"", get_varinfo (i)->name);
717 else
718 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
719 if (graph->complex[i])
721 unsigned j;
722 constraint_t c;
723 fprintf (file, " [label=\"\\N\\n");
724 for (j = 0; VEC_iterate (constraint_t, graph->complex[i], j, c); ++j)
726 dump_constraint (file, c);
727 fprintf (file, "\\l");
729 fprintf (file, "\"]");
731 fprintf (file, ";\n");
734 /* Go over the edges. */
735 fprintf (file, "\n // Edges in the constraint graph:\n");
736 for (i = 0; i < graph->size; i++)
738 unsigned j;
739 bitmap_iterator bi;
740 if (find (i) != i)
741 continue;
742 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
744 unsigned to = find (j);
745 if (i == to)
746 continue;
747 if (i < FIRST_REF_NODE)
748 fprintf (file, "\"%s\"", get_varinfo (i)->name);
749 else
750 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
751 fprintf (file, " -> ");
752 if (to < FIRST_REF_NODE)
753 fprintf (file, "\"%s\"", get_varinfo (to)->name);
754 else
755 fprintf (file, "\"*%s\"", get_varinfo (to - FIRST_REF_NODE)->name);
756 fprintf (file, ";\n");
760 /* Prints the tail of the dot file. */
761 fprintf (file, "}\n");
764 /* Print out the constraint graph to stderr. */
766 DEBUG_FUNCTION void
767 debug_constraint_graph (void)
769 dump_constraint_graph (stderr);
772 /* SOLVER FUNCTIONS
774 The solver is a simple worklist solver, that works on the following
775 algorithm:
777 sbitmap changed_nodes = all zeroes;
778 changed_count = 0;
779 For each node that is not already collapsed:
780 changed_count++;
781 set bit in changed nodes
783 while (changed_count > 0)
785 compute topological ordering for constraint graph
787 find and collapse cycles in the constraint graph (updating
788 changed if necessary)
790 for each node (n) in the graph in topological order:
791 changed_count--;
793 Process each complex constraint associated with the node,
794 updating changed if necessary.
796 For each outgoing edge from n, propagate the solution from n to
797 the destination of the edge, updating changed as necessary.
799 } */
801 /* Return true if two constraint expressions A and B are equal. */
803 static bool
804 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
806 return a.type == b.type && a.var == b.var && a.offset == b.offset;
809 /* Return true if constraint expression A is less than constraint expression
810 B. This is just arbitrary, but consistent, in order to give them an
811 ordering. */
813 static bool
814 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
816 if (a.type == b.type)
818 if (a.var == b.var)
819 return a.offset < b.offset;
820 else
821 return a.var < b.var;
823 else
824 return a.type < b.type;
827 /* Return true if constraint A is less than constraint B. This is just
828 arbitrary, but consistent, in order to give them an ordering. */
830 static bool
831 constraint_less (const constraint_t a, const constraint_t b)
833 if (constraint_expr_less (a->lhs, b->lhs))
834 return true;
835 else if (constraint_expr_less (b->lhs, a->lhs))
836 return false;
837 else
838 return constraint_expr_less (a->rhs, b->rhs);
841 /* Return true if two constraints A and B are equal. */
843 static bool
844 constraint_equal (struct constraint a, struct constraint b)
846 return constraint_expr_equal (a.lhs, b.lhs)
847 && constraint_expr_equal (a.rhs, b.rhs);
851 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
853 static constraint_t
854 constraint_vec_find (VEC(constraint_t,heap) *vec,
855 struct constraint lookfor)
857 unsigned int place;
858 constraint_t found;
860 if (vec == NULL)
861 return NULL;
863 place = VEC_lower_bound (constraint_t, vec, &lookfor, constraint_less);
864 if (place >= VEC_length (constraint_t, vec))
865 return NULL;
866 found = VEC_index (constraint_t, vec, place);
867 if (!constraint_equal (*found, lookfor))
868 return NULL;
869 return found;
872 /* Union two constraint vectors, TO and FROM. Put the result in TO. */
874 static void
875 constraint_set_union (VEC(constraint_t,heap) **to,
876 VEC(constraint_t,heap) **from)
878 int i;
879 constraint_t c;
881 FOR_EACH_VEC_ELT (constraint_t, *from, i, c)
883 if (constraint_vec_find (*to, *c) == NULL)
885 unsigned int place = VEC_lower_bound (constraint_t, *to, c,
886 constraint_less);
887 VEC_safe_insert (constraint_t, heap, *to, place, c);
892 /* Expands the solution in SET to all sub-fields of variables included.
893 Union the expanded result into RESULT. */
895 static void
896 solution_set_expand (bitmap result, bitmap set)
898 bitmap_iterator bi;
899 bitmap vars = NULL;
900 unsigned j;
902 /* In a first pass record all variables we need to add all
903 sub-fields off. This avoids quadratic behavior. */
904 EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi)
906 varinfo_t v = get_varinfo (j);
907 if (v->is_artificial_var
908 || v->is_full_var)
909 continue;
910 v = lookup_vi_for_tree (v->decl);
911 if (vars == NULL)
912 vars = BITMAP_ALLOC (NULL);
913 bitmap_set_bit (vars, v->id);
916 /* In the second pass now do the addition to the solution and
917 to speed up solving add it to the delta as well. */
918 if (vars != NULL)
920 EXECUTE_IF_SET_IN_BITMAP (vars, 0, j, bi)
922 varinfo_t v = get_varinfo (j);
923 for (; v != NULL; v = v->next)
924 bitmap_set_bit (result, v->id);
926 BITMAP_FREE (vars);
930 /* Take a solution set SET, add OFFSET to each member of the set, and
931 overwrite SET with the result when done. */
933 static void
934 solution_set_add (bitmap set, HOST_WIDE_INT offset)
936 bitmap result = BITMAP_ALLOC (&iteration_obstack);
937 unsigned int i;
938 bitmap_iterator bi;
940 /* If the offset is unknown we have to expand the solution to
941 all subfields. */
942 if (offset == UNKNOWN_OFFSET)
944 solution_set_expand (set, set);
945 return;
948 EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
950 varinfo_t vi = get_varinfo (i);
952 /* If this is a variable with just one field just set its bit
953 in the result. */
954 if (vi->is_artificial_var
955 || vi->is_unknown_size_var
956 || vi->is_full_var)
957 bitmap_set_bit (result, i);
958 else
960 unsigned HOST_WIDE_INT fieldoffset = vi->offset + offset;
962 /* If the offset makes the pointer point to before the
963 variable use offset zero for the field lookup. */
964 if (offset < 0
965 && fieldoffset > vi->offset)
966 fieldoffset = 0;
968 if (offset != 0)
969 vi = first_or_preceding_vi_for_offset (vi, fieldoffset);
971 bitmap_set_bit (result, vi->id);
972 /* If the result is not exactly at fieldoffset include the next
973 field as well. See get_constraint_for_ptr_offset for more
974 rationale. */
975 if (vi->offset != fieldoffset
976 && vi->next != NULL)
977 bitmap_set_bit (result, vi->next->id);
981 bitmap_copy (set, result);
982 BITMAP_FREE (result);
985 /* Union solution sets TO and FROM, and add INC to each member of FROM in the
986 process. */
988 static bool
989 set_union_with_increment (bitmap to, bitmap from, HOST_WIDE_INT inc)
991 if (inc == 0)
992 return bitmap_ior_into (to, from);
993 else
995 bitmap tmp;
996 bool res;
998 tmp = BITMAP_ALLOC (&iteration_obstack);
999 bitmap_copy (tmp, from);
1000 solution_set_add (tmp, inc);
1001 res = bitmap_ior_into (to, tmp);
1002 BITMAP_FREE (tmp);
1003 return res;
1007 /* Insert constraint C into the list of complex constraints for graph
1008 node VAR. */
1010 static void
1011 insert_into_complex (constraint_graph_t graph,
1012 unsigned int var, constraint_t c)
1014 VEC (constraint_t, heap) *complex = graph->complex[var];
1015 unsigned int place = VEC_lower_bound (constraint_t, complex, c,
1016 constraint_less);
1018 /* Only insert constraints that do not already exist. */
1019 if (place >= VEC_length (constraint_t, complex)
1020 || !constraint_equal (*c, *VEC_index (constraint_t, complex, place)))
1021 VEC_safe_insert (constraint_t, heap, graph->complex[var], place, c);
1025 /* Condense two variable nodes into a single variable node, by moving
1026 all associated info from SRC to TO. */
1028 static void
1029 merge_node_constraints (constraint_graph_t graph, unsigned int to,
1030 unsigned int from)
1032 unsigned int i;
1033 constraint_t c;
1035 gcc_assert (find (from) == to);
1037 /* Move all complex constraints from src node into to node */
1038 FOR_EACH_VEC_ELT (constraint_t, graph->complex[from], i, c)
1040 /* In complex constraints for node src, we may have either
1041 a = *src, and *src = a, or an offseted constraint which are
1042 always added to the rhs node's constraints. */
1044 if (c->rhs.type == DEREF)
1045 c->rhs.var = to;
1046 else if (c->lhs.type == DEREF)
1047 c->lhs.var = to;
1048 else
1049 c->rhs.var = to;
1051 constraint_set_union (&graph->complex[to], &graph->complex[from]);
1052 VEC_free (constraint_t, heap, graph->complex[from]);
1053 graph->complex[from] = NULL;
1057 /* Remove edges involving NODE from GRAPH. */
1059 static void
1060 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
1062 if (graph->succs[node])
1063 BITMAP_FREE (graph->succs[node]);
1066 /* Merge GRAPH nodes FROM and TO into node TO. */
1068 static void
1069 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
1070 unsigned int from)
1072 if (graph->indirect_cycles[from] != -1)
1074 /* If we have indirect cycles with the from node, and we have
1075 none on the to node, the to node has indirect cycles from the
1076 from node now that they are unified.
1077 If indirect cycles exist on both, unify the nodes that they
1078 are in a cycle with, since we know they are in a cycle with
1079 each other. */
1080 if (graph->indirect_cycles[to] == -1)
1081 graph->indirect_cycles[to] = graph->indirect_cycles[from];
1084 /* Merge all the successor edges. */
1085 if (graph->succs[from])
1087 if (!graph->succs[to])
1088 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
1089 bitmap_ior_into (graph->succs[to],
1090 graph->succs[from]);
1093 clear_edges_for_node (graph, from);
1097 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
1098 it doesn't exist in the graph already. */
1100 static void
1101 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
1102 unsigned int from)
1104 if (to == from)
1105 return;
1107 if (!graph->implicit_preds[to])
1108 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1110 if (bitmap_set_bit (graph->implicit_preds[to], from))
1111 stats.num_implicit_edges++;
1114 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
1115 it doesn't exist in the graph already.
1116 Return false if the edge already existed, true otherwise. */
1118 static void
1119 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
1120 unsigned int from)
1122 if (!graph->preds[to])
1123 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1124 bitmap_set_bit (graph->preds[to], from);
1127 /* Add a graph edge to GRAPH, going from FROM to TO if
1128 it doesn't exist in the graph already.
1129 Return false if the edge already existed, true otherwise. */
1131 static bool
1132 add_graph_edge (constraint_graph_t graph, unsigned int to,
1133 unsigned int from)
1135 if (to == from)
1137 return false;
1139 else
1141 bool r = false;
1143 if (!graph->succs[from])
1144 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
1145 if (bitmap_set_bit (graph->succs[from], to))
1147 r = true;
1148 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
1149 stats.num_edges++;
1151 return r;
1156 /* Return true if {DEST.SRC} is an existing graph edge in GRAPH. */
1158 static bool
1159 valid_graph_edge (constraint_graph_t graph, unsigned int src,
1160 unsigned int dest)
1162 return (graph->succs[dest]
1163 && bitmap_bit_p (graph->succs[dest], src));
1166 /* Initialize the constraint graph structure to contain SIZE nodes. */
1168 static void
1169 init_graph (unsigned int size)
1171 unsigned int j;
1173 graph = XCNEW (struct constraint_graph);
1174 graph->size = size;
1175 graph->succs = XCNEWVEC (bitmap, graph->size);
1176 graph->indirect_cycles = XNEWVEC (int, graph->size);
1177 graph->rep = XNEWVEC (unsigned int, graph->size);
1178 graph->complex = XCNEWVEC (VEC(constraint_t, heap) *, size);
1179 graph->pe = XCNEWVEC (unsigned int, graph->size);
1180 graph->pe_rep = XNEWVEC (int, graph->size);
1182 for (j = 0; j < graph->size; j++)
1184 graph->rep[j] = j;
1185 graph->pe_rep[j] = -1;
1186 graph->indirect_cycles[j] = -1;
1190 /* Build the constraint graph, adding only predecessor edges right now. */
1192 static void
1193 build_pred_graph (void)
1195 int i;
1196 constraint_t c;
1197 unsigned int j;
1199 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
1200 graph->preds = XCNEWVEC (bitmap, graph->size);
1201 graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
1202 graph->loc_label = XCNEWVEC (unsigned int, graph->size);
1203 graph->pointed_by = XCNEWVEC (bitmap, graph->size);
1204 graph->points_to = XCNEWVEC (bitmap, graph->size);
1205 graph->eq_rep = XNEWVEC (int, graph->size);
1206 graph->direct_nodes = sbitmap_alloc (graph->size);
1207 graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1208 sbitmap_zero (graph->direct_nodes);
1210 for (j = 0; j < FIRST_REF_NODE; j++)
1212 if (!get_varinfo (j)->is_special_var)
1213 SET_BIT (graph->direct_nodes, j);
1216 for (j = 0; j < graph->size; j++)
1217 graph->eq_rep[j] = -1;
1219 for (j = 0; j < VEC_length (varinfo_t, varmap); j++)
1220 graph->indirect_cycles[j] = -1;
1222 FOR_EACH_VEC_ELT (constraint_t, constraints, i, c)
1224 struct constraint_expr lhs = c->lhs;
1225 struct constraint_expr rhs = c->rhs;
1226 unsigned int lhsvar = lhs.var;
1227 unsigned int rhsvar = rhs.var;
1229 if (lhs.type == DEREF)
1231 /* *x = y. */
1232 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1233 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1235 else if (rhs.type == DEREF)
1237 /* x = *y */
1238 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1239 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1240 else
1241 RESET_BIT (graph->direct_nodes, lhsvar);
1243 else if (rhs.type == ADDRESSOF)
1245 varinfo_t v;
1247 /* x = &y */
1248 if (graph->points_to[lhsvar] == NULL)
1249 graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1250 bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1252 if (graph->pointed_by[rhsvar] == NULL)
1253 graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1254 bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1256 /* Implicitly, *x = y */
1257 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1259 /* All related variables are no longer direct nodes. */
1260 RESET_BIT (graph->direct_nodes, rhsvar);
1261 v = get_varinfo (rhsvar);
1262 if (!v->is_full_var)
1264 v = lookup_vi_for_tree (v->decl);
1267 RESET_BIT (graph->direct_nodes, v->id);
1268 v = v->next;
1270 while (v != NULL);
1272 bitmap_set_bit (graph->address_taken, rhsvar);
1274 else if (lhsvar > anything_id
1275 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1277 /* x = y */
1278 add_pred_graph_edge (graph, lhsvar, rhsvar);
1279 /* Implicitly, *x = *y */
1280 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1281 FIRST_REF_NODE + rhsvar);
1283 else if (lhs.offset != 0 || rhs.offset != 0)
1285 if (rhs.offset != 0)
1286 RESET_BIT (graph->direct_nodes, lhs.var);
1287 else if (lhs.offset != 0)
1288 RESET_BIT (graph->direct_nodes, rhs.var);
1293 /* Build the constraint graph, adding successor edges. */
1295 static void
1296 build_succ_graph (void)
1298 unsigned i, t;
1299 constraint_t c;
1301 FOR_EACH_VEC_ELT (constraint_t, constraints, i, c)
1303 struct constraint_expr lhs;
1304 struct constraint_expr rhs;
1305 unsigned int lhsvar;
1306 unsigned int rhsvar;
1308 if (!c)
1309 continue;
1311 lhs = c->lhs;
1312 rhs = c->rhs;
1313 lhsvar = find (lhs.var);
1314 rhsvar = find (rhs.var);
1316 if (lhs.type == DEREF)
1318 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1319 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1321 else if (rhs.type == DEREF)
1323 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1324 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1326 else if (rhs.type == ADDRESSOF)
1328 /* x = &y */
1329 gcc_assert (find (rhs.var) == rhs.var);
1330 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1332 else if (lhsvar > anything_id
1333 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1335 add_graph_edge (graph, lhsvar, rhsvar);
1339 /* Add edges from STOREDANYTHING to all non-direct nodes that can
1340 receive pointers. */
1341 t = find (storedanything_id);
1342 for (i = integer_id + 1; i < FIRST_REF_NODE; ++i)
1344 if (!TEST_BIT (graph->direct_nodes, i)
1345 && get_varinfo (i)->may_have_pointers)
1346 add_graph_edge (graph, find (i), t);
1349 /* Everything stored to ANYTHING also potentially escapes. */
1350 add_graph_edge (graph, find (escaped_id), t);
1354 /* Changed variables on the last iteration. */
1355 static bitmap changed;
1357 /* Strongly Connected Component visitation info. */
1359 struct scc_info
1361 sbitmap visited;
1362 sbitmap deleted;
1363 unsigned int *dfs;
1364 unsigned int *node_mapping;
1365 int current_index;
1366 VEC(unsigned,heap) *scc_stack;
1370 /* Recursive routine to find strongly connected components in GRAPH.
1371 SI is the SCC info to store the information in, and N is the id of current
1372 graph node we are processing.
1374 This is Tarjan's strongly connected component finding algorithm, as
1375 modified by Nuutila to keep only non-root nodes on the stack.
1376 The algorithm can be found in "On finding the strongly connected
1377 connected components in a directed graph" by Esko Nuutila and Eljas
1378 Soisalon-Soininen, in Information Processing Letters volume 49,
1379 number 1, pages 9-14. */
1381 static void
1382 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1384 unsigned int i;
1385 bitmap_iterator bi;
1386 unsigned int my_dfs;
1388 SET_BIT (si->visited, n);
1389 si->dfs[n] = si->current_index ++;
1390 my_dfs = si->dfs[n];
1392 /* Visit all the successors. */
1393 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1395 unsigned int w;
1397 if (i > LAST_REF_NODE)
1398 break;
1400 w = find (i);
1401 if (TEST_BIT (si->deleted, w))
1402 continue;
1404 if (!TEST_BIT (si->visited, w))
1405 scc_visit (graph, si, w);
1407 unsigned int t = find (w);
1408 unsigned int nnode = find (n);
1409 gcc_assert (nnode == n);
1411 if (si->dfs[t] < si->dfs[nnode])
1412 si->dfs[n] = si->dfs[t];
1416 /* See if any components have been identified. */
1417 if (si->dfs[n] == my_dfs)
1419 if (VEC_length (unsigned, si->scc_stack) > 0
1420 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1422 bitmap scc = BITMAP_ALLOC (NULL);
1423 unsigned int lowest_node;
1424 bitmap_iterator bi;
1426 bitmap_set_bit (scc, n);
1428 while (VEC_length (unsigned, si->scc_stack) != 0
1429 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1431 unsigned int w = VEC_pop (unsigned, si->scc_stack);
1433 bitmap_set_bit (scc, w);
1436 lowest_node = bitmap_first_set_bit (scc);
1437 gcc_assert (lowest_node < FIRST_REF_NODE);
1439 /* Collapse the SCC nodes into a single node, and mark the
1440 indirect cycles. */
1441 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1443 if (i < FIRST_REF_NODE)
1445 if (unite (lowest_node, i))
1446 unify_nodes (graph, lowest_node, i, false);
1448 else
1450 unite (lowest_node, i);
1451 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1455 SET_BIT (si->deleted, n);
1457 else
1458 VEC_safe_push (unsigned, heap, si->scc_stack, n);
1461 /* Unify node FROM into node TO, updating the changed count if
1462 necessary when UPDATE_CHANGED is true. */
1464 static void
1465 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1466 bool update_changed)
1469 gcc_assert (to != from && find (to) == to);
1470 if (dump_file && (dump_flags & TDF_DETAILS))
1471 fprintf (dump_file, "Unifying %s to %s\n",
1472 get_varinfo (from)->name,
1473 get_varinfo (to)->name);
1475 if (update_changed)
1476 stats.unified_vars_dynamic++;
1477 else
1478 stats.unified_vars_static++;
1480 merge_graph_nodes (graph, to, from);
1481 merge_node_constraints (graph, to, from);
1483 /* Mark TO as changed if FROM was changed. If TO was already marked
1484 as changed, decrease the changed count. */
1486 if (update_changed
1487 && bitmap_bit_p (changed, from))
1489 bitmap_clear_bit (changed, from);
1490 bitmap_set_bit (changed, to);
1492 if (get_varinfo (from)->solution)
1494 /* If the solution changes because of the merging, we need to mark
1495 the variable as changed. */
1496 if (bitmap_ior_into (get_varinfo (to)->solution,
1497 get_varinfo (from)->solution))
1499 if (update_changed)
1500 bitmap_set_bit (changed, to);
1503 BITMAP_FREE (get_varinfo (from)->solution);
1504 if (get_varinfo (from)->oldsolution)
1505 BITMAP_FREE (get_varinfo (from)->oldsolution);
1507 if (stats.iterations > 0
1508 && get_varinfo (to)->oldsolution)
1509 BITMAP_FREE (get_varinfo (to)->oldsolution);
1511 if (valid_graph_edge (graph, to, to))
1513 if (graph->succs[to])
1514 bitmap_clear_bit (graph->succs[to], to);
1518 /* Information needed to compute the topological ordering of a graph. */
1520 struct topo_info
1522 /* sbitmap of visited nodes. */
1523 sbitmap visited;
1524 /* Array that stores the topological order of the graph, *in
1525 reverse*. */
1526 VEC(unsigned,heap) *topo_order;
1530 /* Initialize and return a topological info structure. */
1532 static struct topo_info *
1533 init_topo_info (void)
1535 size_t size = graph->size;
1536 struct topo_info *ti = XNEW (struct topo_info);
1537 ti->visited = sbitmap_alloc (size);
1538 sbitmap_zero (ti->visited);
1539 ti->topo_order = VEC_alloc (unsigned, heap, 1);
1540 return ti;
1544 /* Free the topological sort info pointed to by TI. */
1546 static void
1547 free_topo_info (struct topo_info *ti)
1549 sbitmap_free (ti->visited);
1550 VEC_free (unsigned, heap, ti->topo_order);
1551 free (ti);
1554 /* Visit the graph in topological order, and store the order in the
1555 topo_info structure. */
1557 static void
1558 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1559 unsigned int n)
1561 bitmap_iterator bi;
1562 unsigned int j;
1564 SET_BIT (ti->visited, n);
1566 if (graph->succs[n])
1567 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1569 if (!TEST_BIT (ti->visited, j))
1570 topo_visit (graph, ti, j);
1573 VEC_safe_push (unsigned, heap, ti->topo_order, n);
1576 /* Process a constraint C that represents x = *(y + off), using DELTA as the
1577 starting solution for y. */
1579 static void
1580 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1581 bitmap delta)
1583 unsigned int lhs = c->lhs.var;
1584 bool flag = false;
1585 bitmap sol = get_varinfo (lhs)->solution;
1586 unsigned int j;
1587 bitmap_iterator bi;
1588 HOST_WIDE_INT roffset = c->rhs.offset;
1590 /* Our IL does not allow this. */
1591 gcc_assert (c->lhs.offset == 0);
1593 /* If the solution of Y contains anything it is good enough to transfer
1594 this to the LHS. */
1595 if (bitmap_bit_p (delta, anything_id))
1597 flag |= bitmap_set_bit (sol, anything_id);
1598 goto done;
1601 /* If we do not know at with offset the rhs is dereferenced compute
1602 the reachability set of DELTA, conservatively assuming it is
1603 dereferenced at all valid offsets. */
1604 if (roffset == UNKNOWN_OFFSET)
1606 solution_set_expand (delta, delta);
1607 /* No further offset processing is necessary. */
1608 roffset = 0;
1611 /* For each variable j in delta (Sol(y)), add
1612 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1613 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1615 varinfo_t v = get_varinfo (j);
1616 HOST_WIDE_INT fieldoffset = v->offset + roffset;
1617 unsigned int t;
1619 if (v->is_full_var)
1620 fieldoffset = v->offset;
1621 else if (roffset != 0)
1622 v = first_vi_for_offset (v, fieldoffset);
1623 /* If the access is outside of the variable we can ignore it. */
1624 if (!v)
1625 continue;
1629 t = find (v->id);
1631 /* Adding edges from the special vars is pointless.
1632 They don't have sets that can change. */
1633 if (get_varinfo (t)->is_special_var)
1634 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1635 /* Merging the solution from ESCAPED needlessly increases
1636 the set. Use ESCAPED as representative instead. */
1637 else if (v->id == escaped_id)
1638 flag |= bitmap_set_bit (sol, escaped_id);
1639 else if (v->may_have_pointers
1640 && add_graph_edge (graph, lhs, t))
1641 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1643 /* If the variable is not exactly at the requested offset
1644 we have to include the next one. */
1645 if (v->offset == (unsigned HOST_WIDE_INT)fieldoffset
1646 || v->next == NULL)
1647 break;
1649 v = v->next;
1650 fieldoffset = v->offset;
1652 while (1);
1655 done:
1656 /* If the LHS solution changed, mark the var as changed. */
1657 if (flag)
1659 get_varinfo (lhs)->solution = sol;
1660 bitmap_set_bit (changed, lhs);
1664 /* Process a constraint C that represents *(x + off) = y using DELTA
1665 as the starting solution for x. */
1667 static void
1668 do_ds_constraint (constraint_t c, bitmap delta)
1670 unsigned int rhs = c->rhs.var;
1671 bitmap sol = get_varinfo (rhs)->solution;
1672 unsigned int j;
1673 bitmap_iterator bi;
1674 HOST_WIDE_INT loff = c->lhs.offset;
1675 bool escaped_p = false;
1677 /* Our IL does not allow this. */
1678 gcc_assert (c->rhs.offset == 0);
1680 /* If the solution of y contains ANYTHING simply use the ANYTHING
1681 solution. This avoids needlessly increasing the points-to sets. */
1682 if (bitmap_bit_p (sol, anything_id))
1683 sol = get_varinfo (find (anything_id))->solution;
1685 /* If the solution for x contains ANYTHING we have to merge the
1686 solution of y into all pointer variables which we do via
1687 STOREDANYTHING. */
1688 if (bitmap_bit_p (delta, anything_id))
1690 unsigned t = find (storedanything_id);
1691 if (add_graph_edge (graph, t, rhs))
1693 if (bitmap_ior_into (get_varinfo (t)->solution, sol))
1694 bitmap_set_bit (changed, t);
1696 return;
1699 /* If we do not know at with offset the rhs is dereferenced compute
1700 the reachability set of DELTA, conservatively assuming it is
1701 dereferenced at all valid offsets. */
1702 if (loff == UNKNOWN_OFFSET)
1704 solution_set_expand (delta, delta);
1705 loff = 0;
1708 /* For each member j of delta (Sol(x)), add an edge from y to j and
1709 union Sol(y) into Sol(j) */
1710 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1712 varinfo_t v = get_varinfo (j);
1713 unsigned int t;
1714 HOST_WIDE_INT fieldoffset = v->offset + loff;
1716 if (v->is_full_var)
1717 fieldoffset = v->offset;
1718 else if (loff != 0)
1719 v = first_vi_for_offset (v, fieldoffset);
1720 /* If the access is outside of the variable we can ignore it. */
1721 if (!v)
1722 continue;
1726 if (v->may_have_pointers)
1728 /* If v is a global variable then this is an escape point. */
1729 if (v->is_global_var
1730 && !escaped_p)
1732 t = find (escaped_id);
1733 if (add_graph_edge (graph, t, rhs)
1734 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1735 bitmap_set_bit (changed, t);
1736 /* Enough to let rhs escape once. */
1737 escaped_p = true;
1740 if (v->is_special_var)
1741 break;
1743 t = find (v->id);
1744 if (add_graph_edge (graph, t, rhs)
1745 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1746 bitmap_set_bit (changed, t);
1749 /* If the variable is not exactly at the requested offset
1750 we have to include the next one. */
1751 if (v->offset == (unsigned HOST_WIDE_INT)fieldoffset
1752 || v->next == NULL)
1753 break;
1755 v = v->next;
1756 fieldoffset = v->offset;
1758 while (1);
1762 /* Handle a non-simple (simple meaning requires no iteration),
1763 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1765 static void
1766 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
1768 if (c->lhs.type == DEREF)
1770 if (c->rhs.type == ADDRESSOF)
1772 gcc_unreachable();
1774 else
1776 /* *x = y */
1777 do_ds_constraint (c, delta);
1780 else if (c->rhs.type == DEREF)
1782 /* x = *y */
1783 if (!(get_varinfo (c->lhs.var)->is_special_var))
1784 do_sd_constraint (graph, c, delta);
1786 else
1788 bitmap tmp;
1789 bitmap solution;
1790 bool flag = false;
1792 gcc_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR);
1793 solution = get_varinfo (c->rhs.var)->solution;
1794 tmp = get_varinfo (c->lhs.var)->solution;
1796 flag = set_union_with_increment (tmp, solution, c->rhs.offset);
1798 if (flag)
1800 get_varinfo (c->lhs.var)->solution = tmp;
1801 bitmap_set_bit (changed, c->lhs.var);
1806 /* Initialize and return a new SCC info structure. */
1808 static struct scc_info *
1809 init_scc_info (size_t size)
1811 struct scc_info *si = XNEW (struct scc_info);
1812 size_t i;
1814 si->current_index = 0;
1815 si->visited = sbitmap_alloc (size);
1816 sbitmap_zero (si->visited);
1817 si->deleted = sbitmap_alloc (size);
1818 sbitmap_zero (si->deleted);
1819 si->node_mapping = XNEWVEC (unsigned int, size);
1820 si->dfs = XCNEWVEC (unsigned int, size);
1822 for (i = 0; i < size; i++)
1823 si->node_mapping[i] = i;
1825 si->scc_stack = VEC_alloc (unsigned, heap, 1);
1826 return si;
1829 /* Free an SCC info structure pointed to by SI */
1831 static void
1832 free_scc_info (struct scc_info *si)
1834 sbitmap_free (si->visited);
1835 sbitmap_free (si->deleted);
1836 free (si->node_mapping);
1837 free (si->dfs);
1838 VEC_free (unsigned, heap, si->scc_stack);
1839 free (si);
1843 /* Find indirect cycles in GRAPH that occur, using strongly connected
1844 components, and note them in the indirect cycles map.
1846 This technique comes from Ben Hardekopf and Calvin Lin,
1847 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1848 Lines of Code", submitted to PLDI 2007. */
1850 static void
1851 find_indirect_cycles (constraint_graph_t graph)
1853 unsigned int i;
1854 unsigned int size = graph->size;
1855 struct scc_info *si = init_scc_info (size);
1857 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1858 if (!TEST_BIT (si->visited, i) && find (i) == i)
1859 scc_visit (graph, si, i);
1861 free_scc_info (si);
1864 /* Compute a topological ordering for GRAPH, and store the result in the
1865 topo_info structure TI. */
1867 static void
1868 compute_topo_order (constraint_graph_t graph,
1869 struct topo_info *ti)
1871 unsigned int i;
1872 unsigned int size = graph->size;
1874 for (i = 0; i != size; ++i)
1875 if (!TEST_BIT (ti->visited, i) && find (i) == i)
1876 topo_visit (graph, ti, i);
1879 /* Structure used to for hash value numbering of pointer equivalence
1880 classes. */
1882 typedef struct equiv_class_label
1884 hashval_t hashcode;
1885 unsigned int equivalence_class;
1886 bitmap labels;
1887 } *equiv_class_label_t;
1888 typedef const struct equiv_class_label *const_equiv_class_label_t;
1890 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1891 classes. */
1892 static htab_t pointer_equiv_class_table;
1894 /* A hashtable for mapping a bitmap of labels->location equivalence
1895 classes. */
1896 static htab_t location_equiv_class_table;
1898 /* Hash function for a equiv_class_label_t */
1900 static hashval_t
1901 equiv_class_label_hash (const void *p)
1903 const_equiv_class_label_t const ecl = (const_equiv_class_label_t) p;
1904 return ecl->hashcode;
1907 /* Equality function for two equiv_class_label_t's. */
1909 static int
1910 equiv_class_label_eq (const void *p1, const void *p2)
1912 const_equiv_class_label_t const eql1 = (const_equiv_class_label_t) p1;
1913 const_equiv_class_label_t const eql2 = (const_equiv_class_label_t) p2;
1914 return (eql1->hashcode == eql2->hashcode
1915 && bitmap_equal_p (eql1->labels, eql2->labels));
1918 /* Lookup a equivalence class in TABLE by the bitmap of LABELS it
1919 contains. Sets *REF_LABELS to the bitmap LABELS is equivalent to. */
1921 static unsigned int
1922 equiv_class_lookup (htab_t table, bitmap labels, bitmap *ref_labels)
1924 void **slot;
1925 struct equiv_class_label ecl;
1927 ecl.labels = labels;
1928 ecl.hashcode = bitmap_hash (labels);
1930 slot = htab_find_slot_with_hash (table, &ecl,
1931 ecl.hashcode, NO_INSERT);
1932 if (!slot)
1934 if (ref_labels)
1935 *ref_labels = NULL;
1936 return 0;
1938 else
1940 equiv_class_label_t ec = (equiv_class_label_t) *slot;
1941 if (ref_labels)
1942 *ref_labels = ec->labels;
1943 return ec->equivalence_class;
1948 /* Add an equivalence class named EQUIVALENCE_CLASS with labels LABELS
1949 to TABLE. */
1951 static void
1952 equiv_class_add (htab_t table, unsigned int equivalence_class,
1953 bitmap labels)
1955 void **slot;
1956 equiv_class_label_t ecl = XNEW (struct equiv_class_label);
1958 ecl->labels = labels;
1959 ecl->equivalence_class = equivalence_class;
1960 ecl->hashcode = bitmap_hash (labels);
1962 slot = htab_find_slot_with_hash (table, ecl,
1963 ecl->hashcode, INSERT);
1964 gcc_assert (!*slot);
1965 *slot = (void *) ecl;
1968 /* Perform offline variable substitution.
1970 This is a worst case quadratic time way of identifying variables
1971 that must have equivalent points-to sets, including those caused by
1972 static cycles, and single entry subgraphs, in the constraint graph.
1974 The technique is described in "Exploiting Pointer and Location
1975 Equivalence to Optimize Pointer Analysis. In the 14th International
1976 Static Analysis Symposium (SAS), August 2007." It is known as the
1977 "HU" algorithm, and is equivalent to value numbering the collapsed
1978 constraint graph including evaluating unions.
1980 The general method of finding equivalence classes is as follows:
1981 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1982 Initialize all non-REF nodes to be direct nodes.
1983 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1984 variable}
1985 For each constraint containing the dereference, we also do the same
1986 thing.
1988 We then compute SCC's in the graph and unify nodes in the same SCC,
1989 including pts sets.
1991 For each non-collapsed node x:
1992 Visit all unvisited explicit incoming edges.
1993 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
1994 where y->x.
1995 Lookup the equivalence class for pts(x).
1996 If we found one, equivalence_class(x) = found class.
1997 Otherwise, equivalence_class(x) = new class, and new_class is
1998 added to the lookup table.
2000 All direct nodes with the same equivalence class can be replaced
2001 with a single representative node.
2002 All unlabeled nodes (label == 0) are not pointers and all edges
2003 involving them can be eliminated.
2004 We perform these optimizations during rewrite_constraints
2006 In addition to pointer equivalence class finding, we also perform
2007 location equivalence class finding. This is the set of variables
2008 that always appear together in points-to sets. We use this to
2009 compress the size of the points-to sets. */
2011 /* Current maximum pointer equivalence class id. */
2012 static int pointer_equiv_class;
2014 /* Current maximum location equivalence class id. */
2015 static int location_equiv_class;
2017 /* Recursive routine to find strongly connected components in GRAPH,
2018 and label it's nodes with DFS numbers. */
2020 static void
2021 condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2023 unsigned int i;
2024 bitmap_iterator bi;
2025 unsigned int my_dfs;
2027 gcc_assert (si->node_mapping[n] == n);
2028 SET_BIT (si->visited, n);
2029 si->dfs[n] = si->current_index ++;
2030 my_dfs = si->dfs[n];
2032 /* Visit all the successors. */
2033 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2035 unsigned int w = si->node_mapping[i];
2037 if (TEST_BIT (si->deleted, w))
2038 continue;
2040 if (!TEST_BIT (si->visited, w))
2041 condense_visit (graph, si, w);
2043 unsigned int t = si->node_mapping[w];
2044 unsigned int nnode = si->node_mapping[n];
2045 gcc_assert (nnode == n);
2047 if (si->dfs[t] < si->dfs[nnode])
2048 si->dfs[n] = si->dfs[t];
2052 /* Visit all the implicit predecessors. */
2053 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
2055 unsigned int w = si->node_mapping[i];
2057 if (TEST_BIT (si->deleted, w))
2058 continue;
2060 if (!TEST_BIT (si->visited, w))
2061 condense_visit (graph, si, w);
2063 unsigned int t = si->node_mapping[w];
2064 unsigned int nnode = si->node_mapping[n];
2065 gcc_assert (nnode == n);
2067 if (si->dfs[t] < si->dfs[nnode])
2068 si->dfs[n] = si->dfs[t];
2072 /* See if any components have been identified. */
2073 if (si->dfs[n] == my_dfs)
2075 while (VEC_length (unsigned, si->scc_stack) != 0
2076 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
2078 unsigned int w = VEC_pop (unsigned, si->scc_stack);
2079 si->node_mapping[w] = n;
2081 if (!TEST_BIT (graph->direct_nodes, w))
2082 RESET_BIT (graph->direct_nodes, n);
2084 /* Unify our nodes. */
2085 if (graph->preds[w])
2087 if (!graph->preds[n])
2088 graph->preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2089 bitmap_ior_into (graph->preds[n], graph->preds[w]);
2091 if (graph->implicit_preds[w])
2093 if (!graph->implicit_preds[n])
2094 graph->implicit_preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2095 bitmap_ior_into (graph->implicit_preds[n],
2096 graph->implicit_preds[w]);
2098 if (graph->points_to[w])
2100 if (!graph->points_to[n])
2101 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2102 bitmap_ior_into (graph->points_to[n],
2103 graph->points_to[w]);
2106 SET_BIT (si->deleted, n);
2108 else
2109 VEC_safe_push (unsigned, heap, si->scc_stack, n);
2112 /* Label pointer equivalences. */
2114 static void
2115 label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2117 unsigned int i;
2118 bitmap_iterator bi;
2119 SET_BIT (si->visited, n);
2121 if (!graph->points_to[n])
2122 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2124 /* Label and union our incoming edges's points to sets. */
2125 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2127 unsigned int w = si->node_mapping[i];
2128 if (!TEST_BIT (si->visited, w))
2129 label_visit (graph, si, w);
2131 /* Skip unused edges */
2132 if (w == n || graph->pointer_label[w] == 0)
2133 continue;
2135 if (graph->points_to[w])
2136 bitmap_ior_into(graph->points_to[n], graph->points_to[w]);
2138 /* Indirect nodes get fresh variables. */
2139 if (!TEST_BIT (graph->direct_nodes, n))
2140 bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
2142 if (!bitmap_empty_p (graph->points_to[n]))
2144 bitmap ref_points_to;
2145 unsigned int label = equiv_class_lookup (pointer_equiv_class_table,
2146 graph->points_to[n],
2147 &ref_points_to);
2148 if (!label)
2150 label = pointer_equiv_class++;
2151 equiv_class_add (pointer_equiv_class_table,
2152 label, graph->points_to[n]);
2154 else
2156 BITMAP_FREE (graph->points_to[n]);
2157 graph->points_to[n] = ref_points_to;
2159 graph->pointer_label[n] = label;
2163 /* Perform offline variable substitution, discovering equivalence
2164 classes, and eliminating non-pointer variables. */
2166 static struct scc_info *
2167 perform_var_substitution (constraint_graph_t graph)
2169 unsigned int i;
2170 unsigned int size = graph->size;
2171 struct scc_info *si = init_scc_info (size);
2173 bitmap_obstack_initialize (&iteration_obstack);
2174 pointer_equiv_class_table = htab_create (511, equiv_class_label_hash,
2175 equiv_class_label_eq, free);
2176 location_equiv_class_table = htab_create (511, equiv_class_label_hash,
2177 equiv_class_label_eq, free);
2178 pointer_equiv_class = 1;
2179 location_equiv_class = 1;
2181 /* Condense the nodes, which means to find SCC's, count incoming
2182 predecessors, and unite nodes in SCC's. */
2183 for (i = 0; i < FIRST_REF_NODE; i++)
2184 if (!TEST_BIT (si->visited, si->node_mapping[i]))
2185 condense_visit (graph, si, si->node_mapping[i]);
2187 sbitmap_zero (si->visited);
2188 /* Actually the label the nodes for pointer equivalences */
2189 for (i = 0; i < FIRST_REF_NODE; i++)
2190 if (!TEST_BIT (si->visited, si->node_mapping[i]))
2191 label_visit (graph, si, si->node_mapping[i]);
2193 /* Calculate location equivalence labels. */
2194 for (i = 0; i < FIRST_REF_NODE; i++)
2196 bitmap pointed_by;
2197 bitmap_iterator bi;
2198 unsigned int j;
2199 unsigned int label;
2201 if (!graph->pointed_by[i])
2202 continue;
2203 pointed_by = BITMAP_ALLOC (&iteration_obstack);
2205 /* Translate the pointed-by mapping for pointer equivalence
2206 labels. */
2207 EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
2209 bitmap_set_bit (pointed_by,
2210 graph->pointer_label[si->node_mapping[j]]);
2212 /* The original pointed_by is now dead. */
2213 BITMAP_FREE (graph->pointed_by[i]);
2215 /* Look up the location equivalence label if one exists, or make
2216 one otherwise. */
2217 label = equiv_class_lookup (location_equiv_class_table,
2218 pointed_by, NULL);
2219 if (label == 0)
2221 label = location_equiv_class++;
2222 equiv_class_add (location_equiv_class_table,
2223 label, pointed_by);
2225 else
2227 if (dump_file && (dump_flags & TDF_DETAILS))
2228 fprintf (dump_file, "Found location equivalence for node %s\n",
2229 get_varinfo (i)->name);
2230 BITMAP_FREE (pointed_by);
2232 graph->loc_label[i] = label;
2236 if (dump_file && (dump_flags & TDF_DETAILS))
2237 for (i = 0; i < FIRST_REF_NODE; i++)
2239 bool direct_node = TEST_BIT (graph->direct_nodes, i);
2240 fprintf (dump_file,
2241 "Equivalence classes for %s node id %d:%s are pointer: %d"
2242 ", location:%d\n",
2243 direct_node ? "Direct node" : "Indirect node", i,
2244 get_varinfo (i)->name,
2245 graph->pointer_label[si->node_mapping[i]],
2246 graph->loc_label[si->node_mapping[i]]);
2249 /* Quickly eliminate our non-pointer variables. */
2251 for (i = 0; i < FIRST_REF_NODE; i++)
2253 unsigned int node = si->node_mapping[i];
2255 if (graph->pointer_label[node] == 0)
2257 if (dump_file && (dump_flags & TDF_DETAILS))
2258 fprintf (dump_file,
2259 "%s is a non-pointer variable, eliminating edges.\n",
2260 get_varinfo (node)->name);
2261 stats.nonpointer_vars++;
2262 clear_edges_for_node (graph, node);
2266 return si;
2269 /* Free information that was only necessary for variable
2270 substitution. */
2272 static void
2273 free_var_substitution_info (struct scc_info *si)
2275 free_scc_info (si);
2276 free (graph->pointer_label);
2277 free (graph->loc_label);
2278 free (graph->pointed_by);
2279 free (graph->points_to);
2280 free (graph->eq_rep);
2281 sbitmap_free (graph->direct_nodes);
2282 htab_delete (pointer_equiv_class_table);
2283 htab_delete (location_equiv_class_table);
2284 bitmap_obstack_release (&iteration_obstack);
2287 /* Return an existing node that is equivalent to NODE, which has
2288 equivalence class LABEL, if one exists. Return NODE otherwise. */
2290 static unsigned int
2291 find_equivalent_node (constraint_graph_t graph,
2292 unsigned int node, unsigned int label)
2294 /* If the address version of this variable is unused, we can
2295 substitute it for anything else with the same label.
2296 Otherwise, we know the pointers are equivalent, but not the
2297 locations, and we can unite them later. */
2299 if (!bitmap_bit_p (graph->address_taken, node))
2301 gcc_assert (label < graph->size);
2303 if (graph->eq_rep[label] != -1)
2305 /* Unify the two variables since we know they are equivalent. */
2306 if (unite (graph->eq_rep[label], node))
2307 unify_nodes (graph, graph->eq_rep[label], node, false);
2308 return graph->eq_rep[label];
2310 else
2312 graph->eq_rep[label] = node;
2313 graph->pe_rep[label] = node;
2316 else
2318 gcc_assert (label < graph->size);
2319 graph->pe[node] = label;
2320 if (graph->pe_rep[label] == -1)
2321 graph->pe_rep[label] = node;
2324 return node;
2327 /* Unite pointer equivalent but not location equivalent nodes in
2328 GRAPH. This may only be performed once variable substitution is
2329 finished. */
2331 static void
2332 unite_pointer_equivalences (constraint_graph_t graph)
2334 unsigned int i;
2336 /* Go through the pointer equivalences and unite them to their
2337 representative, if they aren't already. */
2338 for (i = 0; i < FIRST_REF_NODE; i++)
2340 unsigned int label = graph->pe[i];
2341 if (label)
2343 int label_rep = graph->pe_rep[label];
2345 if (label_rep == -1)
2346 continue;
2348 label_rep = find (label_rep);
2349 if (label_rep >= 0 && unite (label_rep, find (i)))
2350 unify_nodes (graph, label_rep, i, false);
2355 /* Move complex constraints to the GRAPH nodes they belong to. */
2357 static void
2358 move_complex_constraints (constraint_graph_t graph)
2360 int i;
2361 constraint_t c;
2363 FOR_EACH_VEC_ELT (constraint_t, constraints, i, c)
2365 if (c)
2367 struct constraint_expr lhs = c->lhs;
2368 struct constraint_expr rhs = c->rhs;
2370 if (lhs.type == DEREF)
2372 insert_into_complex (graph, lhs.var, c);
2374 else if (rhs.type == DEREF)
2376 if (!(get_varinfo (lhs.var)->is_special_var))
2377 insert_into_complex (graph, rhs.var, c);
2379 else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2380 && (lhs.offset != 0 || rhs.offset != 0))
2382 insert_into_complex (graph, rhs.var, c);
2389 /* Optimize and rewrite complex constraints while performing
2390 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2391 result of perform_variable_substitution. */
2393 static void
2394 rewrite_constraints (constraint_graph_t graph,
2395 struct scc_info *si)
2397 int i;
2398 unsigned int j;
2399 constraint_t c;
2401 for (j = 0; j < graph->size; j++)
2402 gcc_assert (find (j) == j);
2404 FOR_EACH_VEC_ELT (constraint_t, constraints, i, c)
2406 struct constraint_expr lhs = c->lhs;
2407 struct constraint_expr rhs = c->rhs;
2408 unsigned int lhsvar = find (lhs.var);
2409 unsigned int rhsvar = find (rhs.var);
2410 unsigned int lhsnode, rhsnode;
2411 unsigned int lhslabel, rhslabel;
2413 lhsnode = si->node_mapping[lhsvar];
2414 rhsnode = si->node_mapping[rhsvar];
2415 lhslabel = graph->pointer_label[lhsnode];
2416 rhslabel = graph->pointer_label[rhsnode];
2418 /* See if it is really a non-pointer variable, and if so, ignore
2419 the constraint. */
2420 if (lhslabel == 0)
2422 if (dump_file && (dump_flags & TDF_DETAILS))
2425 fprintf (dump_file, "%s is a non-pointer variable,"
2426 "ignoring constraint:",
2427 get_varinfo (lhs.var)->name);
2428 dump_constraint (dump_file, c);
2429 fprintf (dump_file, "\n");
2431 VEC_replace (constraint_t, constraints, i, NULL);
2432 continue;
2435 if (rhslabel == 0)
2437 if (dump_file && (dump_flags & TDF_DETAILS))
2440 fprintf (dump_file, "%s is a non-pointer variable,"
2441 "ignoring constraint:",
2442 get_varinfo (rhs.var)->name);
2443 dump_constraint (dump_file, c);
2444 fprintf (dump_file, "\n");
2446 VEC_replace (constraint_t, constraints, i, NULL);
2447 continue;
2450 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2451 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2452 c->lhs.var = lhsvar;
2453 c->rhs.var = rhsvar;
2458 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2459 part of an SCC, false otherwise. */
2461 static bool
2462 eliminate_indirect_cycles (unsigned int node)
2464 if (graph->indirect_cycles[node] != -1
2465 && !bitmap_empty_p (get_varinfo (node)->solution))
2467 unsigned int i;
2468 VEC(unsigned,heap) *queue = NULL;
2469 int queuepos;
2470 unsigned int to = find (graph->indirect_cycles[node]);
2471 bitmap_iterator bi;
2473 /* We can't touch the solution set and call unify_nodes
2474 at the same time, because unify_nodes is going to do
2475 bitmap unions into it. */
2477 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2479 if (find (i) == i && i != to)
2481 if (unite (to, i))
2482 VEC_safe_push (unsigned, heap, queue, i);
2486 for (queuepos = 0;
2487 VEC_iterate (unsigned, queue, queuepos, i);
2488 queuepos++)
2490 unify_nodes (graph, to, i, true);
2492 VEC_free (unsigned, heap, queue);
2493 return true;
2495 return false;
2498 /* Solve the constraint graph GRAPH using our worklist solver.
2499 This is based on the PW* family of solvers from the "Efficient Field
2500 Sensitive Pointer Analysis for C" paper.
2501 It works by iterating over all the graph nodes, processing the complex
2502 constraints and propagating the copy constraints, until everything stops
2503 changed. This corresponds to steps 6-8 in the solving list given above. */
2505 static void
2506 solve_graph (constraint_graph_t graph)
2508 unsigned int size = graph->size;
2509 unsigned int i;
2510 bitmap pts;
2512 changed = BITMAP_ALLOC (NULL);
2514 /* Mark all initial non-collapsed nodes as changed. */
2515 for (i = 0; i < size; i++)
2517 varinfo_t ivi = get_varinfo (i);
2518 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2519 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2520 || VEC_length (constraint_t, graph->complex[i]) > 0))
2521 bitmap_set_bit (changed, i);
2524 /* Allocate a bitmap to be used to store the changed bits. */
2525 pts = BITMAP_ALLOC (&pta_obstack);
2527 while (!bitmap_empty_p (changed))
2529 unsigned int i;
2530 struct topo_info *ti = init_topo_info ();
2531 stats.iterations++;
2533 bitmap_obstack_initialize (&iteration_obstack);
2535 compute_topo_order (graph, ti);
2537 while (VEC_length (unsigned, ti->topo_order) != 0)
2540 i = VEC_pop (unsigned, ti->topo_order);
2542 /* If this variable is not a representative, skip it. */
2543 if (find (i) != i)
2544 continue;
2546 /* In certain indirect cycle cases, we may merge this
2547 variable to another. */
2548 if (eliminate_indirect_cycles (i) && find (i) != i)
2549 continue;
2551 /* If the node has changed, we need to process the
2552 complex constraints and outgoing edges again. */
2553 if (bitmap_clear_bit (changed, i))
2555 unsigned int j;
2556 constraint_t c;
2557 bitmap solution;
2558 VEC(constraint_t,heap) *complex = graph->complex[i];
2559 varinfo_t vi = get_varinfo (i);
2560 bool solution_empty;
2562 /* Compute the changed set of solution bits. */
2563 if (vi->oldsolution)
2564 bitmap_and_compl (pts, vi->solution, vi->oldsolution);
2565 else
2566 bitmap_copy (pts, vi->solution);
2568 if (bitmap_empty_p (pts))
2569 continue;
2571 if (vi->oldsolution)
2572 bitmap_ior_into (vi->oldsolution, pts);
2573 else
2575 vi->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
2576 bitmap_copy (vi->oldsolution, pts);
2579 solution = vi->solution;
2580 solution_empty = bitmap_empty_p (solution);
2582 /* Process the complex constraints */
2583 FOR_EACH_VEC_ELT (constraint_t, complex, j, c)
2585 /* XXX: This is going to unsort the constraints in
2586 some cases, which will occasionally add duplicate
2587 constraints during unification. This does not
2588 affect correctness. */
2589 c->lhs.var = find (c->lhs.var);
2590 c->rhs.var = find (c->rhs.var);
2592 /* The only complex constraint that can change our
2593 solution to non-empty, given an empty solution,
2594 is a constraint where the lhs side is receiving
2595 some set from elsewhere. */
2596 if (!solution_empty || c->lhs.type != DEREF)
2597 do_complex_constraint (graph, c, pts);
2600 solution_empty = bitmap_empty_p (solution);
2602 if (!solution_empty)
2604 bitmap_iterator bi;
2605 unsigned eff_escaped_id = find (escaped_id);
2607 /* Propagate solution to all successors. */
2608 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2609 0, j, bi)
2611 bitmap tmp;
2612 bool flag;
2614 unsigned int to = find (j);
2615 tmp = get_varinfo (to)->solution;
2616 flag = false;
2618 /* Don't try to propagate to ourselves. */
2619 if (to == i)
2620 continue;
2622 /* If we propagate from ESCAPED use ESCAPED as
2623 placeholder. */
2624 if (i == eff_escaped_id)
2625 flag = bitmap_set_bit (tmp, escaped_id);
2626 else
2627 flag = set_union_with_increment (tmp, pts, 0);
2629 if (flag)
2631 get_varinfo (to)->solution = tmp;
2632 bitmap_set_bit (changed, to);
2638 free_topo_info (ti);
2639 bitmap_obstack_release (&iteration_obstack);
2642 BITMAP_FREE (pts);
2643 BITMAP_FREE (changed);
2644 bitmap_obstack_release (&oldpta_obstack);
2647 /* Map from trees to variable infos. */
2648 static struct pointer_map_t *vi_for_tree;
2651 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2653 static void
2654 insert_vi_for_tree (tree t, varinfo_t vi)
2656 void **slot = pointer_map_insert (vi_for_tree, t);
2657 gcc_assert (vi);
2658 gcc_assert (*slot == NULL);
2659 *slot = vi;
2662 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2663 exist in the map, return NULL, otherwise, return the varinfo we found. */
2665 static varinfo_t
2666 lookup_vi_for_tree (tree t)
2668 void **slot = pointer_map_contains (vi_for_tree, t);
2669 if (slot == NULL)
2670 return NULL;
2672 return (varinfo_t) *slot;
2675 /* Return a printable name for DECL */
2677 static const char *
2678 alias_get_name (tree decl)
2680 const char *res;
2681 char *temp;
2682 int num_printed = 0;
2684 if (DECL_ASSEMBLER_NAME_SET_P (decl))
2685 res = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
2686 else
2687 res= get_name (decl);
2688 if (res != NULL)
2689 return res;
2691 res = "NULL";
2692 if (!dump_file)
2693 return res;
2695 if (TREE_CODE (decl) == SSA_NAME)
2697 num_printed = asprintf (&temp, "%s_%u",
2698 alias_get_name (SSA_NAME_VAR (decl)),
2699 SSA_NAME_VERSION (decl));
2701 else if (DECL_P (decl))
2703 num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
2705 if (num_printed > 0)
2707 res = ggc_strdup (temp);
2708 free (temp);
2710 return res;
2713 /* Find the variable id for tree T in the map.
2714 If T doesn't exist in the map, create an entry for it and return it. */
2716 static varinfo_t
2717 get_vi_for_tree (tree t)
2719 void **slot = pointer_map_contains (vi_for_tree, t);
2720 if (slot == NULL)
2721 return get_varinfo (create_variable_info_for (t, alias_get_name (t)));
2723 return (varinfo_t) *slot;
2726 /* Get a scalar constraint expression for a new temporary variable. */
2728 static struct constraint_expr
2729 new_scalar_tmp_constraint_exp (const char *name)
2731 struct constraint_expr tmp;
2732 varinfo_t vi;
2734 vi = new_var_info (NULL_TREE, name);
2735 vi->offset = 0;
2736 vi->size = -1;
2737 vi->fullsize = -1;
2738 vi->is_full_var = 1;
2740 tmp.var = vi->id;
2741 tmp.type = SCALAR;
2742 tmp.offset = 0;
2744 return tmp;
2747 /* Get a constraint expression vector from an SSA_VAR_P node.
2748 If address_p is true, the result will be taken its address of. */
2750 static void
2751 get_constraint_for_ssa_var (tree t, VEC(ce_s, heap) **results, bool address_p)
2753 struct constraint_expr cexpr;
2754 varinfo_t vi;
2756 /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
2757 gcc_assert (SSA_VAR_P (t) || DECL_P (t));
2759 /* For parameters, get at the points-to set for the actual parm
2760 decl. */
2761 if (TREE_CODE (t) == SSA_NAME
2762 && (TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2763 || TREE_CODE (SSA_NAME_VAR (t)) == RESULT_DECL)
2764 && SSA_NAME_IS_DEFAULT_DEF (t))
2766 get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
2767 return;
2770 /* For global variables resort to the alias target. */
2771 if (TREE_CODE (t) == VAR_DECL
2772 && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
2774 struct varpool_node *node = varpool_get_node (t);
2775 if (node && node->alias)
2777 node = varpool_variable_node (node, NULL);
2778 t = node->decl;
2782 vi = get_vi_for_tree (t);
2783 cexpr.var = vi->id;
2784 cexpr.type = SCALAR;
2785 cexpr.offset = 0;
2786 /* If we determine the result is "anything", and we know this is readonly,
2787 say it points to readonly memory instead. */
2788 if (cexpr.var == anything_id && TREE_READONLY (t))
2790 gcc_unreachable ();
2791 cexpr.type = ADDRESSOF;
2792 cexpr.var = readonly_id;
2795 /* If we are not taking the address of the constraint expr, add all
2796 sub-fiels of the variable as well. */
2797 if (!address_p
2798 && !vi->is_full_var)
2800 for (; vi; vi = vi->next)
2802 cexpr.var = vi->id;
2803 VEC_safe_push (ce_s, heap, *results, &cexpr);
2805 return;
2808 VEC_safe_push (ce_s, heap, *results, &cexpr);
2811 /* Process constraint T, performing various simplifications and then
2812 adding it to our list of overall constraints. */
2814 static void
2815 process_constraint (constraint_t t)
2817 struct constraint_expr rhs = t->rhs;
2818 struct constraint_expr lhs = t->lhs;
2820 gcc_assert (rhs.var < VEC_length (varinfo_t, varmap));
2821 gcc_assert (lhs.var < VEC_length (varinfo_t, varmap));
2823 /* If we didn't get any useful constraint from the lhs we get
2824 &ANYTHING as fallback from get_constraint_for. Deal with
2825 it here by turning it into *ANYTHING. */
2826 if (lhs.type == ADDRESSOF
2827 && lhs.var == anything_id)
2828 lhs.type = DEREF;
2830 /* ADDRESSOF on the lhs is invalid. */
2831 gcc_assert (lhs.type != ADDRESSOF);
2833 /* We shouldn't add constraints from things that cannot have pointers.
2834 It's not completely trivial to avoid in the callers, so do it here. */
2835 if (rhs.type != ADDRESSOF
2836 && !get_varinfo (rhs.var)->may_have_pointers)
2837 return;
2839 /* Likewise adding to the solution of a non-pointer var isn't useful. */
2840 if (!get_varinfo (lhs.var)->may_have_pointers)
2841 return;
2843 /* This can happen in our IR with things like n->a = *p */
2844 if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
2846 /* Split into tmp = *rhs, *lhs = tmp */
2847 struct constraint_expr tmplhs;
2848 tmplhs = new_scalar_tmp_constraint_exp ("doubledereftmp");
2849 process_constraint (new_constraint (tmplhs, rhs));
2850 process_constraint (new_constraint (lhs, tmplhs));
2852 else if (rhs.type == ADDRESSOF && lhs.type == DEREF)
2854 /* Split into tmp = &rhs, *lhs = tmp */
2855 struct constraint_expr tmplhs;
2856 tmplhs = new_scalar_tmp_constraint_exp ("derefaddrtmp");
2857 process_constraint (new_constraint (tmplhs, rhs));
2858 process_constraint (new_constraint (lhs, tmplhs));
2860 else
2862 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
2863 VEC_safe_push (constraint_t, heap, constraints, t);
2868 /* Return the position, in bits, of FIELD_DECL from the beginning of its
2869 structure. */
2871 static HOST_WIDE_INT
2872 bitpos_of_field (const tree fdecl)
2874 if (!host_integerp (DECL_FIELD_OFFSET (fdecl), 0)
2875 || !host_integerp (DECL_FIELD_BIT_OFFSET (fdecl), 0))
2876 return -1;
2878 return (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (fdecl)) * BITS_PER_UNIT
2879 + TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (fdecl)));
2883 /* Get constraint expressions for offsetting PTR by OFFSET. Stores the
2884 resulting constraint expressions in *RESULTS. */
2886 static void
2887 get_constraint_for_ptr_offset (tree ptr, tree offset,
2888 VEC (ce_s, heap) **results)
2890 struct constraint_expr c;
2891 unsigned int j, n;
2892 HOST_WIDE_INT rhsoffset;
2894 /* If we do not do field-sensitive PTA adding offsets to pointers
2895 does not change the points-to solution. */
2896 if (!use_field_sensitive)
2898 get_constraint_for_rhs (ptr, results);
2899 return;
2902 /* If the offset is not a non-negative integer constant that fits
2903 in a HOST_WIDE_INT, we have to fall back to a conservative
2904 solution which includes all sub-fields of all pointed-to
2905 variables of ptr. */
2906 if (offset == NULL_TREE
2907 || TREE_CODE (offset) != INTEGER_CST)
2908 rhsoffset = UNKNOWN_OFFSET;
2909 else
2911 /* Sign-extend the offset. */
2912 double_int soffset
2913 = double_int_sext (tree_to_double_int (offset),
2914 TYPE_PRECISION (TREE_TYPE (offset)));
2915 if (!double_int_fits_in_shwi_p (soffset))
2916 rhsoffset = UNKNOWN_OFFSET;
2917 else
2919 /* Make sure the bit-offset also fits. */
2920 HOST_WIDE_INT rhsunitoffset = soffset.low;
2921 rhsoffset = rhsunitoffset * BITS_PER_UNIT;
2922 if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
2923 rhsoffset = UNKNOWN_OFFSET;
2927 get_constraint_for_rhs (ptr, results);
2928 if (rhsoffset == 0)
2929 return;
2931 /* As we are eventually appending to the solution do not use
2932 VEC_iterate here. */
2933 n = VEC_length (ce_s, *results);
2934 for (j = 0; j < n; j++)
2936 varinfo_t curr;
2937 c = *VEC_index (ce_s, *results, j);
2938 curr = get_varinfo (c.var);
2940 if (c.type == ADDRESSOF
2941 /* If this varinfo represents a full variable just use it. */
2942 && curr->is_full_var)
2943 c.offset = 0;
2944 else if (c.type == ADDRESSOF
2945 /* If we do not know the offset add all subfields. */
2946 && rhsoffset == UNKNOWN_OFFSET)
2948 varinfo_t temp = lookup_vi_for_tree (curr->decl);
2951 struct constraint_expr c2;
2952 c2.var = temp->id;
2953 c2.type = ADDRESSOF;
2954 c2.offset = 0;
2955 if (c2.var != c.var)
2956 VEC_safe_push (ce_s, heap, *results, &c2);
2957 temp = temp->next;
2959 while (temp);
2961 else if (c.type == ADDRESSOF)
2963 varinfo_t temp;
2964 unsigned HOST_WIDE_INT offset = curr->offset + rhsoffset;
2966 /* Search the sub-field which overlaps with the
2967 pointed-to offset. If the result is outside of the variable
2968 we have to provide a conservative result, as the variable is
2969 still reachable from the resulting pointer (even though it
2970 technically cannot point to anything). The last and first
2971 sub-fields are such conservative results.
2972 ??? If we always had a sub-field for &object + 1 then
2973 we could represent this in a more precise way. */
2974 if (rhsoffset < 0
2975 && curr->offset < offset)
2976 offset = 0;
2977 temp = first_or_preceding_vi_for_offset (curr, offset);
2979 /* If the found variable is not exactly at the pointed to
2980 result, we have to include the next variable in the
2981 solution as well. Otherwise two increments by offset / 2
2982 do not result in the same or a conservative superset
2983 solution. */
2984 if (temp->offset != offset
2985 && temp->next != NULL)
2987 struct constraint_expr c2;
2988 c2.var = temp->next->id;
2989 c2.type = ADDRESSOF;
2990 c2.offset = 0;
2991 VEC_safe_push (ce_s, heap, *results, &c2);
2993 c.var = temp->id;
2994 c.offset = 0;
2996 else
2997 c.offset = rhsoffset;
2999 VEC_replace (ce_s, *results, j, &c);
3004 /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
3005 If address_p is true the result will be taken its address of.
3006 If lhs_p is true then the constraint expression is assumed to be used
3007 as the lhs. */
3009 static void
3010 get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results,
3011 bool address_p, bool lhs_p)
3013 tree orig_t = t;
3014 HOST_WIDE_INT bitsize = -1;
3015 HOST_WIDE_INT bitmaxsize = -1;
3016 HOST_WIDE_INT bitpos;
3017 tree forzero;
3018 struct constraint_expr *result;
3020 /* Some people like to do cute things like take the address of
3021 &0->a.b */
3022 forzero = t;
3023 while (handled_component_p (forzero)
3024 || INDIRECT_REF_P (forzero)
3025 || TREE_CODE (forzero) == MEM_REF)
3026 forzero = TREE_OPERAND (forzero, 0);
3028 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
3030 struct constraint_expr temp;
3032 temp.offset = 0;
3033 temp.var = integer_id;
3034 temp.type = SCALAR;
3035 VEC_safe_push (ce_s, heap, *results, &temp);
3036 return;
3039 /* Handle type-punning through unions. If we are extracting a pointer
3040 from a union via a possibly type-punning access that pointer
3041 points to anything, similar to a conversion of an integer to
3042 a pointer. */
3043 if (!lhs_p)
3045 tree u;
3046 for (u = t;
3047 TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
3048 u = TREE_OPERAND (u, 0))
3049 if (TREE_CODE (u) == COMPONENT_REF
3050 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
3052 struct constraint_expr temp;
3054 temp.offset = 0;
3055 temp.var = anything_id;
3056 temp.type = ADDRESSOF;
3057 VEC_safe_push (ce_s, heap, *results, &temp);
3058 return;
3062 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
3064 /* Pretend to take the address of the base, we'll take care of
3065 adding the required subset of sub-fields below. */
3066 get_constraint_for_1 (t, results, true, lhs_p);
3067 gcc_assert (VEC_length (ce_s, *results) == 1);
3068 result = VEC_last (ce_s, *results);
3070 if (result->type == SCALAR
3071 && get_varinfo (result->var)->is_full_var)
3072 /* For single-field vars do not bother about the offset. */
3073 result->offset = 0;
3074 else if (result->type == SCALAR)
3076 /* In languages like C, you can access one past the end of an
3077 array. You aren't allowed to dereference it, so we can
3078 ignore this constraint. When we handle pointer subtraction,
3079 we may have to do something cute here. */
3081 if ((unsigned HOST_WIDE_INT)bitpos < get_varinfo (result->var)->fullsize
3082 && bitmaxsize != 0)
3084 /* It's also not true that the constraint will actually start at the
3085 right offset, it may start in some padding. We only care about
3086 setting the constraint to the first actual field it touches, so
3087 walk to find it. */
3088 struct constraint_expr cexpr = *result;
3089 varinfo_t curr;
3090 VEC_pop (ce_s, *results);
3091 cexpr.offset = 0;
3092 for (curr = get_varinfo (cexpr.var); curr; curr = curr->next)
3094 if (ranges_overlap_p (curr->offset, curr->size,
3095 bitpos, bitmaxsize))
3097 cexpr.var = curr->id;
3098 VEC_safe_push (ce_s, heap, *results, &cexpr);
3099 if (address_p)
3100 break;
3103 /* If we are going to take the address of this field then
3104 to be able to compute reachability correctly add at least
3105 the last field of the variable. */
3106 if (address_p
3107 && VEC_length (ce_s, *results) == 0)
3109 curr = get_varinfo (cexpr.var);
3110 while (curr->next != NULL)
3111 curr = curr->next;
3112 cexpr.var = curr->id;
3113 VEC_safe_push (ce_s, heap, *results, &cexpr);
3115 else if (VEC_length (ce_s, *results) == 0)
3116 /* Assert that we found *some* field there. The user couldn't be
3117 accessing *only* padding. */
3118 /* Still the user could access one past the end of an array
3119 embedded in a struct resulting in accessing *only* padding. */
3120 /* Or accessing only padding via type-punning to a type
3121 that has a filed just in padding space. */
3123 cexpr.type = SCALAR;
3124 cexpr.var = anything_id;
3125 cexpr.offset = 0;
3126 VEC_safe_push (ce_s, heap, *results, &cexpr);
3129 else if (bitmaxsize == 0)
3131 if (dump_file && (dump_flags & TDF_DETAILS))
3132 fprintf (dump_file, "Access to zero-sized part of variable,"
3133 "ignoring\n");
3135 else
3136 if (dump_file && (dump_flags & TDF_DETAILS))
3137 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
3139 else if (result->type == DEREF)
3141 /* If we do not know exactly where the access goes say so. Note
3142 that only for non-structure accesses we know that we access
3143 at most one subfiled of any variable. */
3144 if (bitpos == -1
3145 || bitsize != bitmaxsize
3146 || AGGREGATE_TYPE_P (TREE_TYPE (orig_t))
3147 || result->offset == UNKNOWN_OFFSET)
3148 result->offset = UNKNOWN_OFFSET;
3149 else
3150 result->offset += bitpos;
3152 else if (result->type == ADDRESSOF)
3154 /* We can end up here for component references on a
3155 VIEW_CONVERT_EXPR <>(&foobar). */
3156 result->type = SCALAR;
3157 result->var = anything_id;
3158 result->offset = 0;
3160 else
3161 gcc_unreachable ();
3165 /* Dereference the constraint expression CONS, and return the result.
3166 DEREF (ADDRESSOF) = SCALAR
3167 DEREF (SCALAR) = DEREF
3168 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
3169 This is needed so that we can handle dereferencing DEREF constraints. */
3171 static void
3172 do_deref (VEC (ce_s, heap) **constraints)
3174 struct constraint_expr *c;
3175 unsigned int i = 0;
3177 FOR_EACH_VEC_ELT (ce_s, *constraints, i, c)
3179 if (c->type == SCALAR)
3180 c->type = DEREF;
3181 else if (c->type == ADDRESSOF)
3182 c->type = SCALAR;
3183 else if (c->type == DEREF)
3185 struct constraint_expr tmplhs;
3186 tmplhs = new_scalar_tmp_constraint_exp ("dereftmp");
3187 process_constraint (new_constraint (tmplhs, *c));
3188 c->var = tmplhs.var;
3190 else
3191 gcc_unreachable ();
3195 /* Given a tree T, return the constraint expression for taking the
3196 address of it. */
3198 static void
3199 get_constraint_for_address_of (tree t, VEC (ce_s, heap) **results)
3201 struct constraint_expr *c;
3202 unsigned int i;
3204 get_constraint_for_1 (t, results, true, true);
3206 FOR_EACH_VEC_ELT (ce_s, *results, i, c)
3208 if (c->type == DEREF)
3209 c->type = SCALAR;
3210 else
3211 c->type = ADDRESSOF;
3215 /* Given a tree T, return the constraint expression for it. */
3217 static void
3218 get_constraint_for_1 (tree t, VEC (ce_s, heap) **results, bool address_p,
3219 bool lhs_p)
3221 struct constraint_expr temp;
3223 /* x = integer is all glommed to a single variable, which doesn't
3224 point to anything by itself. That is, of course, unless it is an
3225 integer constant being treated as a pointer, in which case, we
3226 will return that this is really the addressof anything. This
3227 happens below, since it will fall into the default case. The only
3228 case we know something about an integer treated like a pointer is
3229 when it is the NULL pointer, and then we just say it points to
3230 NULL.
3232 Do not do that if -fno-delete-null-pointer-checks though, because
3233 in that case *NULL does not fail, so it _should_ alias *anything.
3234 It is not worth adding a new option or renaming the existing one,
3235 since this case is relatively obscure. */
3236 if ((TREE_CODE (t) == INTEGER_CST
3237 && integer_zerop (t))
3238 /* The only valid CONSTRUCTORs in gimple with pointer typed
3239 elements are zero-initializer. But in IPA mode we also
3240 process global initializers, so verify at least. */
3241 || (TREE_CODE (t) == CONSTRUCTOR
3242 && CONSTRUCTOR_NELTS (t) == 0))
3244 if (flag_delete_null_pointer_checks)
3245 temp.var = nothing_id;
3246 else
3247 temp.var = nonlocal_id;
3248 temp.type = ADDRESSOF;
3249 temp.offset = 0;
3250 VEC_safe_push (ce_s, heap, *results, &temp);
3251 return;
3254 /* String constants are read-only. */
3255 if (TREE_CODE (t) == STRING_CST)
3257 temp.var = readonly_id;
3258 temp.type = SCALAR;
3259 temp.offset = 0;
3260 VEC_safe_push (ce_s, heap, *results, &temp);
3261 return;
3264 switch (TREE_CODE_CLASS (TREE_CODE (t)))
3266 case tcc_expression:
3268 switch (TREE_CODE (t))
3270 case ADDR_EXPR:
3271 get_constraint_for_address_of (TREE_OPERAND (t, 0), results);
3272 return;
3273 default:;
3275 break;
3277 case tcc_reference:
3279 switch (TREE_CODE (t))
3281 case MEM_REF:
3283 struct constraint_expr cs;
3284 varinfo_t vi, curr;
3285 get_constraint_for_ptr_offset (TREE_OPERAND (t, 0),
3286 TREE_OPERAND (t, 1), results);
3287 do_deref (results);
3289 /* If we are not taking the address then make sure to process
3290 all subvariables we might access. */
3291 if (address_p)
3292 return;
3294 cs = *VEC_last (ce_s, *results);
3295 if (cs.type == DEREF
3296 && type_can_have_subvars (TREE_TYPE (t)))
3298 /* For dereferences this means we have to defer it
3299 to solving time. */
3300 VEC_last (ce_s, *results)->offset = UNKNOWN_OFFSET;
3301 return;
3303 if (cs.type != SCALAR)
3304 return;
3306 vi = get_varinfo (cs.var);
3307 curr = vi->next;
3308 if (!vi->is_full_var
3309 && curr)
3311 unsigned HOST_WIDE_INT size;
3312 if (host_integerp (TYPE_SIZE (TREE_TYPE (t)), 1))
3313 size = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (t)));
3314 else
3315 size = -1;
3316 for (; curr; curr = curr->next)
3318 if (curr->offset - vi->offset < size)
3320 cs.var = curr->id;
3321 VEC_safe_push (ce_s, heap, *results, &cs);
3323 else
3324 break;
3327 return;
3329 case ARRAY_REF:
3330 case ARRAY_RANGE_REF:
3331 case COMPONENT_REF:
3332 get_constraint_for_component_ref (t, results, address_p, lhs_p);
3333 return;
3334 case VIEW_CONVERT_EXPR:
3335 get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p,
3336 lhs_p);
3337 return;
3338 /* We are missing handling for TARGET_MEM_REF here. */
3339 default:;
3341 break;
3343 case tcc_exceptional:
3345 switch (TREE_CODE (t))
3347 case SSA_NAME:
3349 get_constraint_for_ssa_var (t, results, address_p);
3350 return;
3352 case CONSTRUCTOR:
3354 unsigned int i;
3355 tree val;
3356 VEC (ce_s, heap) *tmp = NULL;
3357 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
3359 struct constraint_expr *rhsp;
3360 unsigned j;
3361 get_constraint_for_1 (val, &tmp, address_p, lhs_p);
3362 FOR_EACH_VEC_ELT (ce_s, tmp, j, rhsp)
3363 VEC_safe_push (ce_s, heap, *results, rhsp);
3364 VEC_truncate (ce_s, tmp, 0);
3366 VEC_free (ce_s, heap, tmp);
3367 /* We do not know whether the constructor was complete,
3368 so technically we have to add &NOTHING or &ANYTHING
3369 like we do for an empty constructor as well. */
3370 return;
3372 default:;
3374 break;
3376 case tcc_declaration:
3378 get_constraint_for_ssa_var (t, results, address_p);
3379 return;
3381 case tcc_constant:
3383 /* We cannot refer to automatic variables through constants. */
3384 temp.type = ADDRESSOF;
3385 temp.var = nonlocal_id;
3386 temp.offset = 0;
3387 VEC_safe_push (ce_s, heap, *results, &temp);
3388 return;
3390 default:;
3393 /* The default fallback is a constraint from anything. */
3394 temp.type = ADDRESSOF;
3395 temp.var = anything_id;
3396 temp.offset = 0;
3397 VEC_safe_push (ce_s, heap, *results, &temp);
3400 /* Given a gimple tree T, return the constraint expression vector for it. */
3402 static void
3403 get_constraint_for (tree t, VEC (ce_s, heap) **results)
3405 gcc_assert (VEC_length (ce_s, *results) == 0);
3407 get_constraint_for_1 (t, results, false, true);
3410 /* Given a gimple tree T, return the constraint expression vector for it
3411 to be used as the rhs of a constraint. */
3413 static void
3414 get_constraint_for_rhs (tree t, VEC (ce_s, heap) **results)
3416 gcc_assert (VEC_length (ce_s, *results) == 0);
3418 get_constraint_for_1 (t, results, false, false);
3422 /* Efficiently generates constraints from all entries in *RHSC to all
3423 entries in *LHSC. */
3425 static void
3426 process_all_all_constraints (VEC (ce_s, heap) *lhsc, VEC (ce_s, heap) *rhsc)
3428 struct constraint_expr *lhsp, *rhsp;
3429 unsigned i, j;
3431 if (VEC_length (ce_s, lhsc) <= 1
3432 || VEC_length (ce_s, rhsc) <= 1)
3434 FOR_EACH_VEC_ELT (ce_s, lhsc, i, lhsp)
3435 FOR_EACH_VEC_ELT (ce_s, rhsc, j, rhsp)
3436 process_constraint (new_constraint (*lhsp, *rhsp));
3438 else
3440 struct constraint_expr tmp;
3441 tmp = new_scalar_tmp_constraint_exp ("allalltmp");
3442 FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
3443 process_constraint (new_constraint (tmp, *rhsp));
3444 FOR_EACH_VEC_ELT (ce_s, lhsc, i, lhsp)
3445 process_constraint (new_constraint (*lhsp, tmp));
3449 /* Handle aggregate copies by expanding into copies of the respective
3450 fields of the structures. */
3452 static void
3453 do_structure_copy (tree lhsop, tree rhsop)
3455 struct constraint_expr *lhsp, *rhsp;
3456 VEC (ce_s, heap) *lhsc = NULL, *rhsc = NULL;
3457 unsigned j;
3459 get_constraint_for (lhsop, &lhsc);
3460 get_constraint_for_rhs (rhsop, &rhsc);
3461 lhsp = VEC_index (ce_s, lhsc, 0);
3462 rhsp = VEC_index (ce_s, rhsc, 0);
3463 if (lhsp->type == DEREF
3464 || (lhsp->type == ADDRESSOF && lhsp->var == anything_id)
3465 || rhsp->type == DEREF)
3467 if (lhsp->type == DEREF)
3469 gcc_assert (VEC_length (ce_s, lhsc) == 1);
3470 lhsp->offset = UNKNOWN_OFFSET;
3472 if (rhsp->type == DEREF)
3474 gcc_assert (VEC_length (ce_s, rhsc) == 1);
3475 rhsp->offset = UNKNOWN_OFFSET;
3477 process_all_all_constraints (lhsc, rhsc);
3479 else if (lhsp->type == SCALAR
3480 && (rhsp->type == SCALAR
3481 || rhsp->type == ADDRESSOF))
3483 HOST_WIDE_INT lhssize, lhsmaxsize, lhsoffset;
3484 HOST_WIDE_INT rhssize, rhsmaxsize, rhsoffset;
3485 unsigned k = 0;
3486 get_ref_base_and_extent (lhsop, &lhsoffset, &lhssize, &lhsmaxsize);
3487 get_ref_base_and_extent (rhsop, &rhsoffset, &rhssize, &rhsmaxsize);
3488 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp);)
3490 varinfo_t lhsv, rhsv;
3491 rhsp = VEC_index (ce_s, rhsc, k);
3492 lhsv = get_varinfo (lhsp->var);
3493 rhsv = get_varinfo (rhsp->var);
3494 if (lhsv->may_have_pointers
3495 && (lhsv->is_full_var
3496 || rhsv->is_full_var
3497 || ranges_overlap_p (lhsv->offset + rhsoffset, lhsv->size,
3498 rhsv->offset + lhsoffset, rhsv->size)))
3499 process_constraint (new_constraint (*lhsp, *rhsp));
3500 if (!rhsv->is_full_var
3501 && (lhsv->is_full_var
3502 || (lhsv->offset + rhsoffset + lhsv->size
3503 > rhsv->offset + lhsoffset + rhsv->size)))
3505 ++k;
3506 if (k >= VEC_length (ce_s, rhsc))
3507 break;
3509 else
3510 ++j;
3513 else
3514 gcc_unreachable ();
3516 VEC_free (ce_s, heap, lhsc);
3517 VEC_free (ce_s, heap, rhsc);
3520 /* Create constraints ID = { rhsc }. */
3522 static void
3523 make_constraints_to (unsigned id, VEC(ce_s, heap) *rhsc)
3525 struct constraint_expr *c;
3526 struct constraint_expr includes;
3527 unsigned int j;
3529 includes.var = id;
3530 includes.offset = 0;
3531 includes.type = SCALAR;
3533 FOR_EACH_VEC_ELT (ce_s, rhsc, j, c)
3534 process_constraint (new_constraint (includes, *c));
3537 /* Create a constraint ID = OP. */
3539 static void
3540 make_constraint_to (unsigned id, tree op)
3542 VEC(ce_s, heap) *rhsc = NULL;
3543 get_constraint_for_rhs (op, &rhsc);
3544 make_constraints_to (id, rhsc);
3545 VEC_free (ce_s, heap, rhsc);
3548 /* Create a constraint ID = &FROM. */
3550 static void
3551 make_constraint_from (varinfo_t vi, int from)
3553 struct constraint_expr lhs, rhs;
3555 lhs.var = vi->id;
3556 lhs.offset = 0;
3557 lhs.type = SCALAR;
3559 rhs.var = from;
3560 rhs.offset = 0;
3561 rhs.type = ADDRESSOF;
3562 process_constraint (new_constraint (lhs, rhs));
3565 /* Create a constraint ID = FROM. */
3567 static void
3568 make_copy_constraint (varinfo_t vi, int from)
3570 struct constraint_expr lhs, rhs;
3572 lhs.var = vi->id;
3573 lhs.offset = 0;
3574 lhs.type = SCALAR;
3576 rhs.var = from;
3577 rhs.offset = 0;
3578 rhs.type = SCALAR;
3579 process_constraint (new_constraint (lhs, rhs));
3582 /* Make constraints necessary to make OP escape. */
3584 static void
3585 make_escape_constraint (tree op)
3587 make_constraint_to (escaped_id, op);
3590 /* Add constraints to that the solution of VI is transitively closed. */
3592 static void
3593 make_transitive_closure_constraints (varinfo_t vi)
3595 struct constraint_expr lhs, rhs;
3597 /* VAR = *VAR; */
3598 lhs.type = SCALAR;
3599 lhs.var = vi->id;
3600 lhs.offset = 0;
3601 rhs.type = DEREF;
3602 rhs.var = vi->id;
3603 rhs.offset = 0;
3604 process_constraint (new_constraint (lhs, rhs));
3606 /* VAR = VAR + UNKNOWN; */
3607 lhs.type = SCALAR;
3608 lhs.var = vi->id;
3609 lhs.offset = 0;
3610 rhs.type = SCALAR;
3611 rhs.var = vi->id;
3612 rhs.offset = UNKNOWN_OFFSET;
3613 process_constraint (new_constraint (lhs, rhs));
3616 /* Temporary storage for fake var decls. */
3617 struct obstack fake_var_decl_obstack;
3619 /* Build a fake VAR_DECL acting as referrer to a DECL_UID. */
3621 static tree
3622 build_fake_var_decl (tree type)
3624 tree decl = (tree) XOBNEW (&fake_var_decl_obstack, struct tree_var_decl);
3625 memset (decl, 0, sizeof (struct tree_var_decl));
3626 TREE_SET_CODE (decl, VAR_DECL);
3627 TREE_TYPE (decl) = type;
3628 DECL_UID (decl) = allocate_decl_uid ();
3629 SET_DECL_PT_UID (decl, -1);
3630 layout_decl (decl, 0);
3631 return decl;
3634 /* Create a new artificial heap variable with NAME.
3635 Return the created variable. */
3637 static varinfo_t
3638 make_heapvar (const char *name)
3640 varinfo_t vi;
3641 tree heapvar;
3643 heapvar = build_fake_var_decl (ptr_type_node);
3644 DECL_EXTERNAL (heapvar) = 1;
3646 vi = new_var_info (heapvar, name);
3647 vi->is_artificial_var = true;
3648 vi->is_heap_var = true;
3649 vi->is_unknown_size_var = true;
3650 vi->offset = 0;
3651 vi->fullsize = ~0;
3652 vi->size = ~0;
3653 vi->is_full_var = true;
3654 insert_vi_for_tree (heapvar, vi);
3656 return vi;
3659 /* Create a new artificial heap variable with NAME and make a
3660 constraint from it to LHS. Set flags according to a tag used
3661 for tracking restrict pointers. */
3663 static varinfo_t
3664 make_constraint_from_restrict (varinfo_t lhs, const char *name)
3666 varinfo_t vi = make_heapvar (name);
3667 vi->is_global_var = 1;
3668 vi->may_have_pointers = 1;
3669 make_constraint_from (lhs, vi->id);
3670 return vi;
3673 /* Create a new artificial heap variable with NAME and make a
3674 constraint from it to LHS. Set flags according to a tag used
3675 for tracking restrict pointers and make the artificial heap
3676 point to global memory. */
3678 static varinfo_t
3679 make_constraint_from_global_restrict (varinfo_t lhs, const char *name)
3681 varinfo_t vi = make_constraint_from_restrict (lhs, name);
3682 make_copy_constraint (vi, nonlocal_id);
3683 return vi;
3686 /* In IPA mode there are varinfos for different aspects of reach
3687 function designator. One for the points-to set of the return
3688 value, one for the variables that are clobbered by the function,
3689 one for its uses and one for each parameter (including a single
3690 glob for remaining variadic arguments). */
3692 enum { fi_clobbers = 1, fi_uses = 2,
3693 fi_static_chain = 3, fi_result = 4, fi_parm_base = 5 };
3695 /* Get a constraint for the requested part of a function designator FI
3696 when operating in IPA mode. */
3698 static struct constraint_expr
3699 get_function_part_constraint (varinfo_t fi, unsigned part)
3701 struct constraint_expr c;
3703 gcc_assert (in_ipa_mode);
3705 if (fi->id == anything_id)
3707 /* ??? We probably should have a ANYFN special variable. */
3708 c.var = anything_id;
3709 c.offset = 0;
3710 c.type = SCALAR;
3712 else if (TREE_CODE (fi->decl) == FUNCTION_DECL)
3714 varinfo_t ai = first_vi_for_offset (fi, part);
3715 if (ai)
3716 c.var = ai->id;
3717 else
3718 c.var = anything_id;
3719 c.offset = 0;
3720 c.type = SCALAR;
3722 else
3724 c.var = fi->id;
3725 c.offset = part;
3726 c.type = DEREF;
3729 return c;
3732 /* For non-IPA mode, generate constraints necessary for a call on the
3733 RHS. */
3735 static void
3736 handle_rhs_call (gimple stmt, VEC(ce_s, heap) **results)
3738 struct constraint_expr rhsc;
3739 unsigned i;
3740 bool returns_uses = false;
3742 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3744 tree arg = gimple_call_arg (stmt, i);
3745 int flags = gimple_call_arg_flags (stmt, i);
3747 /* If the argument is not used we can ignore it. */
3748 if (flags & EAF_UNUSED)
3749 continue;
3751 /* As we compute ESCAPED context-insensitive we do not gain
3752 any precision with just EAF_NOCLOBBER but not EAF_NOESCAPE
3753 set. The argument would still get clobbered through the
3754 escape solution. */
3755 if ((flags & EAF_NOCLOBBER)
3756 && (flags & EAF_NOESCAPE))
3758 varinfo_t uses = get_call_use_vi (stmt);
3759 if (!(flags & EAF_DIRECT))
3761 varinfo_t tem = new_var_info (NULL_TREE, "callarg");
3762 make_constraint_to (tem->id, arg);
3763 make_transitive_closure_constraints (tem);
3764 make_copy_constraint (uses, tem->id);
3766 else
3767 make_constraint_to (uses->id, arg);
3768 returns_uses = true;
3770 else if (flags & EAF_NOESCAPE)
3772 struct constraint_expr lhs, rhs;
3773 varinfo_t uses = get_call_use_vi (stmt);
3774 varinfo_t clobbers = get_call_clobber_vi (stmt);
3775 varinfo_t tem = new_var_info (NULL_TREE, "callarg");
3776 make_constraint_to (tem->id, arg);
3777 if (!(flags & EAF_DIRECT))
3778 make_transitive_closure_constraints (tem);
3779 make_copy_constraint (uses, tem->id);
3780 make_copy_constraint (clobbers, tem->id);
3781 /* Add *tem = nonlocal, do not add *tem = callused as
3782 EAF_NOESCAPE parameters do not escape to other parameters
3783 and all other uses appear in NONLOCAL as well. */
3784 lhs.type = DEREF;
3785 lhs.var = tem->id;
3786 lhs.offset = 0;
3787 rhs.type = SCALAR;
3788 rhs.var = nonlocal_id;
3789 rhs.offset = 0;
3790 process_constraint (new_constraint (lhs, rhs));
3791 returns_uses = true;
3793 else
3794 make_escape_constraint (arg);
3797 /* If we added to the calls uses solution make sure we account for
3798 pointers to it to be returned. */
3799 if (returns_uses)
3801 rhsc.var = get_call_use_vi (stmt)->id;
3802 rhsc.offset = 0;
3803 rhsc.type = SCALAR;
3804 VEC_safe_push (ce_s, heap, *results, &rhsc);
3807 /* The static chain escapes as well. */
3808 if (gimple_call_chain (stmt))
3809 make_escape_constraint (gimple_call_chain (stmt));
3811 /* And if we applied NRV the address of the return slot escapes as well. */
3812 if (gimple_call_return_slot_opt_p (stmt)
3813 && gimple_call_lhs (stmt) != NULL_TREE
3814 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
3816 VEC(ce_s, heap) *tmpc = NULL;
3817 struct constraint_expr lhsc, *c;
3818 get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
3819 lhsc.var = escaped_id;
3820 lhsc.offset = 0;
3821 lhsc.type = SCALAR;
3822 FOR_EACH_VEC_ELT (ce_s, tmpc, i, c)
3823 process_constraint (new_constraint (lhsc, *c));
3824 VEC_free(ce_s, heap, tmpc);
3827 /* Regular functions return nonlocal memory. */
3828 rhsc.var = nonlocal_id;
3829 rhsc.offset = 0;
3830 rhsc.type = SCALAR;
3831 VEC_safe_push (ce_s, heap, *results, &rhsc);
3834 /* For non-IPA mode, generate constraints necessary for a call
3835 that returns a pointer and assigns it to LHS. This simply makes
3836 the LHS point to global and escaped variables. */
3838 static void
3839 handle_lhs_call (gimple stmt, tree lhs, int flags, VEC(ce_s, heap) *rhsc,
3840 tree fndecl)
3842 VEC(ce_s, heap) *lhsc = NULL;
3844 get_constraint_for (lhs, &lhsc);
3845 /* If the store is to a global decl make sure to
3846 add proper escape constraints. */
3847 lhs = get_base_address (lhs);
3848 if (lhs
3849 && DECL_P (lhs)
3850 && is_global_var (lhs))
3852 struct constraint_expr tmpc;
3853 tmpc.var = escaped_id;
3854 tmpc.offset = 0;
3855 tmpc.type = SCALAR;
3856 VEC_safe_push (ce_s, heap, lhsc, &tmpc);
3859 /* If the call returns an argument unmodified override the rhs
3860 constraints. */
3861 flags = gimple_call_return_flags (stmt);
3862 if (flags & ERF_RETURNS_ARG
3863 && (flags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (stmt))
3865 tree arg;
3866 rhsc = NULL;
3867 arg = gimple_call_arg (stmt, flags & ERF_RETURN_ARG_MASK);
3868 get_constraint_for (arg, &rhsc);
3869 process_all_all_constraints (lhsc, rhsc);
3870 VEC_free (ce_s, heap, rhsc);
3872 else if (flags & ERF_NOALIAS)
3874 varinfo_t vi;
3875 struct constraint_expr tmpc;
3876 rhsc = NULL;
3877 vi = make_heapvar ("HEAP");
3878 /* We delay marking allocated storage global until we know if
3879 it escapes. */
3880 DECL_EXTERNAL (vi->decl) = 0;
3881 vi->is_global_var = 0;
3882 /* If this is not a real malloc call assume the memory was
3883 initialized and thus may point to global memory. All
3884 builtin functions with the malloc attribute behave in a sane way. */
3885 if (!fndecl
3886 || DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL)
3887 make_constraint_from (vi, nonlocal_id);
3888 tmpc.var = vi->id;
3889 tmpc.offset = 0;
3890 tmpc.type = ADDRESSOF;
3891 VEC_safe_push (ce_s, heap, rhsc, &tmpc);
3892 process_all_all_constraints (lhsc, rhsc);
3893 VEC_free (ce_s, heap, rhsc);
3895 else
3896 process_all_all_constraints (lhsc, rhsc);
3898 VEC_free (ce_s, heap, lhsc);
3901 /* For non-IPA mode, generate constraints necessary for a call of a
3902 const function that returns a pointer in the statement STMT. */
3904 static void
3905 handle_const_call (gimple stmt, VEC(ce_s, heap) **results)
3907 struct constraint_expr rhsc;
3908 unsigned int k;
3910 /* Treat nested const functions the same as pure functions as far
3911 as the static chain is concerned. */
3912 if (gimple_call_chain (stmt))
3914 varinfo_t uses = get_call_use_vi (stmt);
3915 make_transitive_closure_constraints (uses);
3916 make_constraint_to (uses->id, gimple_call_chain (stmt));
3917 rhsc.var = uses->id;
3918 rhsc.offset = 0;
3919 rhsc.type = SCALAR;
3920 VEC_safe_push (ce_s, heap, *results, &rhsc);
3923 /* May return arguments. */
3924 for (k = 0; k < gimple_call_num_args (stmt); ++k)
3926 tree arg = gimple_call_arg (stmt, k);
3927 VEC(ce_s, heap) *argc = NULL;
3928 unsigned i;
3929 struct constraint_expr *argp;
3930 get_constraint_for_rhs (arg, &argc);
3931 FOR_EACH_VEC_ELT (ce_s, argc, i, argp)
3932 VEC_safe_push (ce_s, heap, *results, argp);
3933 VEC_free(ce_s, heap, argc);
3936 /* May return addresses of globals. */
3937 rhsc.var = nonlocal_id;
3938 rhsc.offset = 0;
3939 rhsc.type = ADDRESSOF;
3940 VEC_safe_push (ce_s, heap, *results, &rhsc);
3943 /* For non-IPA mode, generate constraints necessary for a call to a
3944 pure function in statement STMT. */
3946 static void
3947 handle_pure_call (gimple stmt, VEC(ce_s, heap) **results)
3949 struct constraint_expr rhsc;
3950 unsigned i;
3951 varinfo_t uses = NULL;
3953 /* Memory reached from pointer arguments is call-used. */
3954 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3956 tree arg = gimple_call_arg (stmt, i);
3957 if (!uses)
3959 uses = get_call_use_vi (stmt);
3960 make_transitive_closure_constraints (uses);
3962 make_constraint_to (uses->id, arg);
3965 /* The static chain is used as well. */
3966 if (gimple_call_chain (stmt))
3968 if (!uses)
3970 uses = get_call_use_vi (stmt);
3971 make_transitive_closure_constraints (uses);
3973 make_constraint_to (uses->id, gimple_call_chain (stmt));
3976 /* Pure functions may return call-used and nonlocal memory. */
3977 if (uses)
3979 rhsc.var = uses->id;
3980 rhsc.offset = 0;
3981 rhsc.type = SCALAR;
3982 VEC_safe_push (ce_s, heap, *results, &rhsc);
3984 rhsc.var = nonlocal_id;
3985 rhsc.offset = 0;
3986 rhsc.type = SCALAR;
3987 VEC_safe_push (ce_s, heap, *results, &rhsc);
3991 /* Return the varinfo for the callee of CALL. */
3993 static varinfo_t
3994 get_fi_for_callee (gimple call)
3996 tree decl, fn = gimple_call_fn (call);
3998 if (fn && TREE_CODE (fn) == OBJ_TYPE_REF)
3999 fn = OBJ_TYPE_REF_EXPR (fn);
4001 /* If we can directly resolve the function being called, do so.
4002 Otherwise, it must be some sort of indirect expression that
4003 we should still be able to handle. */
4004 decl = gimple_call_addr_fndecl (fn);
4005 if (decl)
4006 return get_vi_for_tree (decl);
4008 /* If the function is anything other than a SSA name pointer we have no
4009 clue and should be getting ANYFN (well, ANYTHING for now). */
4010 if (!fn || TREE_CODE (fn) != SSA_NAME)
4011 return get_varinfo (anything_id);
4013 if ((TREE_CODE (SSA_NAME_VAR (fn)) == PARM_DECL
4014 || TREE_CODE (SSA_NAME_VAR (fn)) == RESULT_DECL)
4015 && SSA_NAME_IS_DEFAULT_DEF (fn))
4016 fn = SSA_NAME_VAR (fn);
4018 return get_vi_for_tree (fn);
4021 /* Create constraints for the builtin call T. Return true if the call
4022 was handled, otherwise false. */
4024 static bool
4025 find_func_aliases_for_builtin_call (gimple t)
4027 tree fndecl = gimple_call_fndecl (t);
4028 VEC(ce_s, heap) *lhsc = NULL;
4029 VEC(ce_s, heap) *rhsc = NULL;
4030 varinfo_t fi;
4032 if (gimple_call_builtin_class_p (t, BUILT_IN_NORMAL))
4033 /* ??? All builtins that are handled here need to be handled
4034 in the alias-oracle query functions explicitly! */
4035 switch (DECL_FUNCTION_CODE (fndecl))
4037 /* All the following functions return a pointer to the same object
4038 as their first argument points to. The functions do not add
4039 to the ESCAPED solution. The functions make the first argument
4040 pointed to memory point to what the second argument pointed to
4041 memory points to. */
4042 case BUILT_IN_STRCPY:
4043 case BUILT_IN_STRNCPY:
4044 case BUILT_IN_BCOPY:
4045 case BUILT_IN_MEMCPY:
4046 case BUILT_IN_MEMMOVE:
4047 case BUILT_IN_MEMPCPY:
4048 case BUILT_IN_STPCPY:
4049 case BUILT_IN_STPNCPY:
4050 case BUILT_IN_STRCAT:
4051 case BUILT_IN_STRNCAT:
4052 case BUILT_IN_STRCPY_CHK:
4053 case BUILT_IN_STRNCPY_CHK:
4054 case BUILT_IN_MEMCPY_CHK:
4055 case BUILT_IN_MEMMOVE_CHK:
4056 case BUILT_IN_MEMPCPY_CHK:
4057 case BUILT_IN_STPCPY_CHK:
4058 case BUILT_IN_STPNCPY_CHK:
4059 case BUILT_IN_STRCAT_CHK:
4060 case BUILT_IN_STRNCAT_CHK:
4061 case BUILT_IN_TM_MEMCPY:
4062 case BUILT_IN_TM_MEMMOVE:
4064 tree res = gimple_call_lhs (t);
4065 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4066 == BUILT_IN_BCOPY ? 1 : 0));
4067 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4068 == BUILT_IN_BCOPY ? 0 : 1));
4069 if (res != NULL_TREE)
4071 get_constraint_for (res, &lhsc);
4072 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY
4073 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY
4074 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY
4075 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY_CHK
4076 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY_CHK
4077 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY_CHK)
4078 get_constraint_for_ptr_offset (dest, NULL_TREE, &rhsc);
4079 else
4080 get_constraint_for (dest, &rhsc);
4081 process_all_all_constraints (lhsc, rhsc);
4082 VEC_free (ce_s, heap, lhsc);
4083 VEC_free (ce_s, heap, rhsc);
4085 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4086 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4087 do_deref (&lhsc);
4088 do_deref (&rhsc);
4089 process_all_all_constraints (lhsc, rhsc);
4090 VEC_free (ce_s, heap, lhsc);
4091 VEC_free (ce_s, heap, rhsc);
4092 return true;
4094 case BUILT_IN_MEMSET:
4095 case BUILT_IN_MEMSET_CHK:
4096 case BUILT_IN_TM_MEMSET:
4098 tree res = gimple_call_lhs (t);
4099 tree dest = gimple_call_arg (t, 0);
4100 unsigned i;
4101 ce_s *lhsp;
4102 struct constraint_expr ac;
4103 if (res != NULL_TREE)
4105 get_constraint_for (res, &lhsc);
4106 get_constraint_for (dest, &rhsc);
4107 process_all_all_constraints (lhsc, rhsc);
4108 VEC_free (ce_s, heap, lhsc);
4109 VEC_free (ce_s, heap, rhsc);
4111 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4112 do_deref (&lhsc);
4113 if (flag_delete_null_pointer_checks
4114 && integer_zerop (gimple_call_arg (t, 1)))
4116 ac.type = ADDRESSOF;
4117 ac.var = nothing_id;
4119 else
4121 ac.type = SCALAR;
4122 ac.var = integer_id;
4124 ac.offset = 0;
4125 FOR_EACH_VEC_ELT (ce_s, lhsc, i, lhsp)
4126 process_constraint (new_constraint (*lhsp, ac));
4127 VEC_free (ce_s, heap, lhsc);
4128 return true;
4130 case BUILT_IN_ASSUME_ALIGNED:
4132 tree res = gimple_call_lhs (t);
4133 tree dest = gimple_call_arg (t, 0);
4134 if (res != NULL_TREE)
4136 get_constraint_for (res, &lhsc);
4137 get_constraint_for (dest, &rhsc);
4138 process_all_all_constraints (lhsc, rhsc);
4139 VEC_free (ce_s, heap, lhsc);
4140 VEC_free (ce_s, heap, rhsc);
4142 return true;
4144 /* All the following functions do not return pointers, do not
4145 modify the points-to sets of memory reachable from their
4146 arguments and do not add to the ESCAPED solution. */
4147 case BUILT_IN_SINCOS:
4148 case BUILT_IN_SINCOSF:
4149 case BUILT_IN_SINCOSL:
4150 case BUILT_IN_FREXP:
4151 case BUILT_IN_FREXPF:
4152 case BUILT_IN_FREXPL:
4153 case BUILT_IN_GAMMA_R:
4154 case BUILT_IN_GAMMAF_R:
4155 case BUILT_IN_GAMMAL_R:
4156 case BUILT_IN_LGAMMA_R:
4157 case BUILT_IN_LGAMMAF_R:
4158 case BUILT_IN_LGAMMAL_R:
4159 case BUILT_IN_MODF:
4160 case BUILT_IN_MODFF:
4161 case BUILT_IN_MODFL:
4162 case BUILT_IN_REMQUO:
4163 case BUILT_IN_REMQUOF:
4164 case BUILT_IN_REMQUOL:
4165 case BUILT_IN_FREE:
4166 return true;
4167 case BUILT_IN_STRDUP:
4168 case BUILT_IN_STRNDUP:
4169 if (gimple_call_lhs (t))
4171 handle_lhs_call (t, gimple_call_lhs (t), gimple_call_flags (t),
4172 NULL, fndecl);
4173 get_constraint_for_ptr_offset (gimple_call_lhs (t),
4174 NULL_TREE, &lhsc);
4175 get_constraint_for_ptr_offset (gimple_call_arg (t, 0),
4176 NULL_TREE, &rhsc);
4177 do_deref (&lhsc);
4178 do_deref (&rhsc);
4179 process_all_all_constraints (lhsc, rhsc);
4180 VEC_free (ce_s, heap, lhsc);
4181 VEC_free (ce_s, heap, rhsc);
4182 return true;
4184 break;
4185 /* Trampolines are special - they set up passing the static
4186 frame. */
4187 case BUILT_IN_INIT_TRAMPOLINE:
4189 tree tramp = gimple_call_arg (t, 0);
4190 tree nfunc = gimple_call_arg (t, 1);
4191 tree frame = gimple_call_arg (t, 2);
4192 unsigned i;
4193 struct constraint_expr lhs, *rhsp;
4194 if (in_ipa_mode)
4196 varinfo_t nfi = NULL;
4197 gcc_assert (TREE_CODE (nfunc) == ADDR_EXPR);
4198 nfi = lookup_vi_for_tree (TREE_OPERAND (nfunc, 0));
4199 if (nfi)
4201 lhs = get_function_part_constraint (nfi, fi_static_chain);
4202 get_constraint_for (frame, &rhsc);
4203 FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
4204 process_constraint (new_constraint (lhs, *rhsp));
4205 VEC_free (ce_s, heap, rhsc);
4207 /* Make the frame point to the function for
4208 the trampoline adjustment call. */
4209 get_constraint_for (tramp, &lhsc);
4210 do_deref (&lhsc);
4211 get_constraint_for (nfunc, &rhsc);
4212 process_all_all_constraints (lhsc, rhsc);
4213 VEC_free (ce_s, heap, rhsc);
4214 VEC_free (ce_s, heap, lhsc);
4216 return true;
4219 /* Else fallthru to generic handling which will let
4220 the frame escape. */
4221 break;
4223 case BUILT_IN_ADJUST_TRAMPOLINE:
4225 tree tramp = gimple_call_arg (t, 0);
4226 tree res = gimple_call_lhs (t);
4227 if (in_ipa_mode && res)
4229 get_constraint_for (res, &lhsc);
4230 get_constraint_for (tramp, &rhsc);
4231 do_deref (&rhsc);
4232 process_all_all_constraints (lhsc, rhsc);
4233 VEC_free (ce_s, heap, rhsc);
4234 VEC_free (ce_s, heap, lhsc);
4236 return true;
4238 CASE_BUILT_IN_TM_STORE (1):
4239 CASE_BUILT_IN_TM_STORE (2):
4240 CASE_BUILT_IN_TM_STORE (4):
4241 CASE_BUILT_IN_TM_STORE (8):
4242 CASE_BUILT_IN_TM_STORE (FLOAT):
4243 CASE_BUILT_IN_TM_STORE (DOUBLE):
4244 CASE_BUILT_IN_TM_STORE (LDOUBLE):
4245 CASE_BUILT_IN_TM_STORE (M64):
4246 CASE_BUILT_IN_TM_STORE (M128):
4247 CASE_BUILT_IN_TM_STORE (M256):
4249 tree addr = gimple_call_arg (t, 0);
4250 tree src = gimple_call_arg (t, 1);
4252 get_constraint_for (addr, &lhsc);
4253 do_deref (&lhsc);
4254 get_constraint_for (src, &rhsc);
4255 process_all_all_constraints (lhsc, rhsc);
4256 VEC_free (ce_s, heap, lhsc);
4257 VEC_free (ce_s, heap, rhsc);
4258 return true;
4260 CASE_BUILT_IN_TM_LOAD (1):
4261 CASE_BUILT_IN_TM_LOAD (2):
4262 CASE_BUILT_IN_TM_LOAD (4):
4263 CASE_BUILT_IN_TM_LOAD (8):
4264 CASE_BUILT_IN_TM_LOAD (FLOAT):
4265 CASE_BUILT_IN_TM_LOAD (DOUBLE):
4266 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
4267 CASE_BUILT_IN_TM_LOAD (M64):
4268 CASE_BUILT_IN_TM_LOAD (M128):
4269 CASE_BUILT_IN_TM_LOAD (M256):
4271 tree dest = gimple_call_lhs (t);
4272 tree addr = gimple_call_arg (t, 0);
4274 get_constraint_for (dest, &lhsc);
4275 get_constraint_for (addr, &rhsc);
4276 do_deref (&rhsc);
4277 process_all_all_constraints (lhsc, rhsc);
4278 VEC_free (ce_s, heap, lhsc);
4279 VEC_free (ce_s, heap, rhsc);
4280 return true;
4282 /* Variadic argument handling needs to be handled in IPA
4283 mode as well. */
4284 case BUILT_IN_VA_START:
4286 tree valist = gimple_call_arg (t, 0);
4287 struct constraint_expr rhs, *lhsp;
4288 unsigned i;
4289 get_constraint_for (valist, &lhsc);
4290 do_deref (&lhsc);
4291 /* The va_list gets access to pointers in variadic
4292 arguments. Which we know in the case of IPA analysis
4293 and otherwise are just all nonlocal variables. */
4294 if (in_ipa_mode)
4296 fi = lookup_vi_for_tree (cfun->decl);
4297 rhs = get_function_part_constraint (fi, ~0);
4298 rhs.type = ADDRESSOF;
4300 else
4302 rhs.var = nonlocal_id;
4303 rhs.type = ADDRESSOF;
4304 rhs.offset = 0;
4306 FOR_EACH_VEC_ELT (ce_s, lhsc, i, lhsp)
4307 process_constraint (new_constraint (*lhsp, rhs));
4308 VEC_free (ce_s, heap, lhsc);
4309 /* va_list is clobbered. */
4310 make_constraint_to (get_call_clobber_vi (t)->id, valist);
4311 return true;
4313 /* va_end doesn't have any effect that matters. */
4314 case BUILT_IN_VA_END:
4315 return true;
4316 /* Alternate return. Simply give up for now. */
4317 case BUILT_IN_RETURN:
4319 fi = NULL;
4320 if (!in_ipa_mode
4321 || !(fi = get_vi_for_tree (cfun->decl)))
4322 make_constraint_from (get_varinfo (escaped_id), anything_id);
4323 else if (in_ipa_mode
4324 && fi != NULL)
4326 struct constraint_expr lhs, rhs;
4327 lhs = get_function_part_constraint (fi, fi_result);
4328 rhs.var = anything_id;
4329 rhs.offset = 0;
4330 rhs.type = SCALAR;
4331 process_constraint (new_constraint (lhs, rhs));
4333 return true;
4335 /* printf-style functions may have hooks to set pointers to
4336 point to somewhere into the generated string. Leave them
4337 for a later excercise... */
4338 default:
4339 /* Fallthru to general call handling. */;
4342 return false;
4345 /* Create constraints for the call T. */
4347 static void
4348 find_func_aliases_for_call (gimple t)
4350 tree fndecl = gimple_call_fndecl (t);
4351 VEC(ce_s, heap) *lhsc = NULL;
4352 VEC(ce_s, heap) *rhsc = NULL;
4353 varinfo_t fi;
4355 if (fndecl != NULL_TREE
4356 && DECL_BUILT_IN (fndecl)
4357 && find_func_aliases_for_builtin_call (t))
4358 return;
4360 fi = get_fi_for_callee (t);
4361 if (!in_ipa_mode
4362 || (fndecl && !fi->is_fn_info))
4364 VEC(ce_s, heap) *rhsc = NULL;
4365 int flags = gimple_call_flags (t);
4367 /* Const functions can return their arguments and addresses
4368 of global memory but not of escaped memory. */
4369 if (flags & (ECF_CONST|ECF_NOVOPS))
4371 if (gimple_call_lhs (t))
4372 handle_const_call (t, &rhsc);
4374 /* Pure functions can return addresses in and of memory
4375 reachable from their arguments, but they are not an escape
4376 point for reachable memory of their arguments. */
4377 else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE))
4378 handle_pure_call (t, &rhsc);
4379 else
4380 handle_rhs_call (t, &rhsc);
4381 if (gimple_call_lhs (t))
4382 handle_lhs_call (t, gimple_call_lhs (t), flags, rhsc, fndecl);
4383 VEC_free (ce_s, heap, rhsc);
4385 else
4387 tree lhsop;
4388 unsigned j;
4390 /* Assign all the passed arguments to the appropriate incoming
4391 parameters of the function. */
4392 for (j = 0; j < gimple_call_num_args (t); j++)
4394 struct constraint_expr lhs ;
4395 struct constraint_expr *rhsp;
4396 tree arg = gimple_call_arg (t, j);
4398 get_constraint_for_rhs (arg, &rhsc);
4399 lhs = get_function_part_constraint (fi, fi_parm_base + j);
4400 while (VEC_length (ce_s, rhsc) != 0)
4402 rhsp = VEC_last (ce_s, rhsc);
4403 process_constraint (new_constraint (lhs, *rhsp));
4404 VEC_pop (ce_s, rhsc);
4408 /* If we are returning a value, assign it to the result. */
4409 lhsop = gimple_call_lhs (t);
4410 if (lhsop)
4412 struct constraint_expr rhs;
4413 struct constraint_expr *lhsp;
4415 get_constraint_for (lhsop, &lhsc);
4416 rhs = get_function_part_constraint (fi, fi_result);
4417 if (fndecl
4418 && DECL_RESULT (fndecl)
4419 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
4421 VEC(ce_s, heap) *tem = NULL;
4422 VEC_safe_push (ce_s, heap, tem, &rhs);
4423 do_deref (&tem);
4424 rhs = *VEC_index (ce_s, tem, 0);
4425 VEC_free(ce_s, heap, tem);
4427 FOR_EACH_VEC_ELT (ce_s, lhsc, j, lhsp)
4428 process_constraint (new_constraint (*lhsp, rhs));
4431 /* If we pass the result decl by reference, honor that. */
4432 if (lhsop
4433 && fndecl
4434 && DECL_RESULT (fndecl)
4435 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
4437 struct constraint_expr lhs;
4438 struct constraint_expr *rhsp;
4440 get_constraint_for_address_of (lhsop, &rhsc);
4441 lhs = get_function_part_constraint (fi, fi_result);
4442 FOR_EACH_VEC_ELT (ce_s, rhsc, j, rhsp)
4443 process_constraint (new_constraint (lhs, *rhsp));
4444 VEC_free (ce_s, heap, rhsc);
4447 /* If we use a static chain, pass it along. */
4448 if (gimple_call_chain (t))
4450 struct constraint_expr lhs;
4451 struct constraint_expr *rhsp;
4453 get_constraint_for (gimple_call_chain (t), &rhsc);
4454 lhs = get_function_part_constraint (fi, fi_static_chain);
4455 FOR_EACH_VEC_ELT (ce_s, rhsc, j, rhsp)
4456 process_constraint (new_constraint (lhs, *rhsp));
4461 /* Walk statement T setting up aliasing constraints according to the
4462 references found in T. This function is the main part of the
4463 constraint builder. AI points to auxiliary alias information used
4464 when building alias sets and computing alias grouping heuristics. */
4466 static void
4467 find_func_aliases (gimple origt)
4469 gimple t = origt;
4470 VEC(ce_s, heap) *lhsc = NULL;
4471 VEC(ce_s, heap) *rhsc = NULL;
4472 struct constraint_expr *c;
4473 varinfo_t fi;
4475 /* Now build constraints expressions. */
4476 if (gimple_code (t) == GIMPLE_PHI)
4478 size_t i;
4479 unsigned int j;
4481 /* For a phi node, assign all the arguments to
4482 the result. */
4483 get_constraint_for (gimple_phi_result (t), &lhsc);
4484 for (i = 0; i < gimple_phi_num_args (t); i++)
4486 tree strippedrhs = PHI_ARG_DEF (t, i);
4488 STRIP_NOPS (strippedrhs);
4489 get_constraint_for_rhs (gimple_phi_arg_def (t, i), &rhsc);
4491 FOR_EACH_VEC_ELT (ce_s, lhsc, j, c)
4493 struct constraint_expr *c2;
4494 while (VEC_length (ce_s, rhsc) > 0)
4496 c2 = VEC_last (ce_s, rhsc);
4497 process_constraint (new_constraint (*c, *c2));
4498 VEC_pop (ce_s, rhsc);
4503 /* In IPA mode, we need to generate constraints to pass call
4504 arguments through their calls. There are two cases,
4505 either a GIMPLE_CALL returning a value, or just a plain
4506 GIMPLE_CALL when we are not.
4508 In non-ipa mode, we need to generate constraints for each
4509 pointer passed by address. */
4510 else if (is_gimple_call (t))
4511 find_func_aliases_for_call (t);
4513 /* Otherwise, just a regular assignment statement. Only care about
4514 operations with pointer result, others are dealt with as escape
4515 points if they have pointer operands. */
4516 else if (is_gimple_assign (t))
4518 /* Otherwise, just a regular assignment statement. */
4519 tree lhsop = gimple_assign_lhs (t);
4520 tree rhsop = (gimple_num_ops (t) == 2) ? gimple_assign_rhs1 (t) : NULL;
4522 if (rhsop && TREE_CLOBBER_P (rhsop))
4523 /* Ignore clobbers, they don't actually store anything into
4524 the LHS. */
4526 else if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
4527 do_structure_copy (lhsop, rhsop);
4528 else
4530 enum tree_code code = gimple_assign_rhs_code (t);
4532 get_constraint_for (lhsop, &lhsc);
4534 if (code == POINTER_PLUS_EXPR)
4535 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4536 gimple_assign_rhs2 (t), &rhsc);
4537 else if (code == BIT_AND_EXPR
4538 && TREE_CODE (gimple_assign_rhs2 (t)) == INTEGER_CST)
4540 /* Aligning a pointer via a BIT_AND_EXPR is offsetting
4541 the pointer. Handle it by offsetting it by UNKNOWN. */
4542 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4543 NULL_TREE, &rhsc);
4545 else if ((CONVERT_EXPR_CODE_P (code)
4546 && !(POINTER_TYPE_P (gimple_expr_type (t))
4547 && !POINTER_TYPE_P (TREE_TYPE (rhsop))))
4548 || gimple_assign_single_p (t))
4549 get_constraint_for_rhs (rhsop, &rhsc);
4550 else if (truth_value_p (code))
4551 /* Truth value results are not pointer (parts). Or at least
4552 very very unreasonable obfuscation of a part. */
4554 else
4556 /* All other operations are merges. */
4557 VEC (ce_s, heap) *tmp = NULL;
4558 struct constraint_expr *rhsp;
4559 unsigned i, j;
4560 get_constraint_for_rhs (gimple_assign_rhs1 (t), &rhsc);
4561 for (i = 2; i < gimple_num_ops (t); ++i)
4563 get_constraint_for_rhs (gimple_op (t, i), &tmp);
4564 FOR_EACH_VEC_ELT (ce_s, tmp, j, rhsp)
4565 VEC_safe_push (ce_s, heap, rhsc, rhsp);
4566 VEC_truncate (ce_s, tmp, 0);
4568 VEC_free (ce_s, heap, tmp);
4570 process_all_all_constraints (lhsc, rhsc);
4572 /* If there is a store to a global variable the rhs escapes. */
4573 if ((lhsop = get_base_address (lhsop)) != NULL_TREE
4574 && DECL_P (lhsop)
4575 && is_global_var (lhsop)
4576 && (!in_ipa_mode
4577 || DECL_EXTERNAL (lhsop) || TREE_PUBLIC (lhsop)))
4578 make_escape_constraint (rhsop);
4580 /* Handle escapes through return. */
4581 else if (gimple_code (t) == GIMPLE_RETURN
4582 && gimple_return_retval (t) != NULL_TREE)
4584 fi = NULL;
4585 if (!in_ipa_mode
4586 || !(fi = get_vi_for_tree (cfun->decl)))
4587 make_escape_constraint (gimple_return_retval (t));
4588 else if (in_ipa_mode
4589 && fi != NULL)
4591 struct constraint_expr lhs ;
4592 struct constraint_expr *rhsp;
4593 unsigned i;
4595 lhs = get_function_part_constraint (fi, fi_result);
4596 get_constraint_for_rhs (gimple_return_retval (t), &rhsc);
4597 FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
4598 process_constraint (new_constraint (lhs, *rhsp));
4601 /* Handle asms conservatively by adding escape constraints to everything. */
4602 else if (gimple_code (t) == GIMPLE_ASM)
4604 unsigned i, noutputs;
4605 const char **oconstraints;
4606 const char *constraint;
4607 bool allows_mem, allows_reg, is_inout;
4609 noutputs = gimple_asm_noutputs (t);
4610 oconstraints = XALLOCAVEC (const char *, noutputs);
4612 for (i = 0; i < noutputs; ++i)
4614 tree link = gimple_asm_output_op (t, i);
4615 tree op = TREE_VALUE (link);
4617 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4618 oconstraints[i] = constraint;
4619 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
4620 &allows_reg, &is_inout);
4622 /* A memory constraint makes the address of the operand escape. */
4623 if (!allows_reg && allows_mem)
4624 make_escape_constraint (build_fold_addr_expr (op));
4626 /* The asm may read global memory, so outputs may point to
4627 any global memory. */
4628 if (op)
4630 VEC(ce_s, heap) *lhsc = NULL;
4631 struct constraint_expr rhsc, *lhsp;
4632 unsigned j;
4633 get_constraint_for (op, &lhsc);
4634 rhsc.var = nonlocal_id;
4635 rhsc.offset = 0;
4636 rhsc.type = SCALAR;
4637 FOR_EACH_VEC_ELT (ce_s, lhsc, j, lhsp)
4638 process_constraint (new_constraint (*lhsp, rhsc));
4639 VEC_free (ce_s, heap, lhsc);
4642 for (i = 0; i < gimple_asm_ninputs (t); ++i)
4644 tree link = gimple_asm_input_op (t, i);
4645 tree op = TREE_VALUE (link);
4647 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4649 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
4650 &allows_mem, &allows_reg);
4652 /* A memory constraint makes the address of the operand escape. */
4653 if (!allows_reg && allows_mem)
4654 make_escape_constraint (build_fold_addr_expr (op));
4655 /* Strictly we'd only need the constraint to ESCAPED if
4656 the asm clobbers memory, otherwise using something
4657 along the lines of per-call clobbers/uses would be enough. */
4658 else if (op)
4659 make_escape_constraint (op);
4663 VEC_free (ce_s, heap, rhsc);
4664 VEC_free (ce_s, heap, lhsc);
4668 /* Create a constraint adding to the clobber set of FI the memory
4669 pointed to by PTR. */
4671 static void
4672 process_ipa_clobber (varinfo_t fi, tree ptr)
4674 VEC(ce_s, heap) *ptrc = NULL;
4675 struct constraint_expr *c, lhs;
4676 unsigned i;
4677 get_constraint_for_rhs (ptr, &ptrc);
4678 lhs = get_function_part_constraint (fi, fi_clobbers);
4679 FOR_EACH_VEC_ELT (ce_s, ptrc, i, c)
4680 process_constraint (new_constraint (lhs, *c));
4681 VEC_free (ce_s, heap, ptrc);
4684 /* Walk statement T setting up clobber and use constraints according to the
4685 references found in T. This function is a main part of the
4686 IPA constraint builder. */
4688 static void
4689 find_func_clobbers (gimple origt)
4691 gimple t = origt;
4692 VEC(ce_s, heap) *lhsc = NULL;
4693 VEC(ce_s, heap) *rhsc = NULL;
4694 varinfo_t fi;
4696 /* Add constraints for clobbered/used in IPA mode.
4697 We are not interested in what automatic variables are clobbered
4698 or used as we only use the information in the caller to which
4699 they do not escape. */
4700 gcc_assert (in_ipa_mode);
4702 /* If the stmt refers to memory in any way it better had a VUSE. */
4703 if (gimple_vuse (t) == NULL_TREE)
4704 return;
4706 /* We'd better have function information for the current function. */
4707 fi = lookup_vi_for_tree (cfun->decl);
4708 gcc_assert (fi != NULL);
4710 /* Account for stores in assignments and calls. */
4711 if (gimple_vdef (t) != NULL_TREE
4712 && gimple_has_lhs (t))
4714 tree lhs = gimple_get_lhs (t);
4715 tree tem = lhs;
4716 while (handled_component_p (tem))
4717 tem = TREE_OPERAND (tem, 0);
4718 if ((DECL_P (tem)
4719 && !auto_var_in_fn_p (tem, cfun->decl))
4720 || INDIRECT_REF_P (tem)
4721 || (TREE_CODE (tem) == MEM_REF
4722 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
4723 && auto_var_in_fn_p
4724 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), cfun->decl))))
4726 struct constraint_expr lhsc, *rhsp;
4727 unsigned i;
4728 lhsc = get_function_part_constraint (fi, fi_clobbers);
4729 get_constraint_for_address_of (lhs, &rhsc);
4730 FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
4731 process_constraint (new_constraint (lhsc, *rhsp));
4732 VEC_free (ce_s, heap, rhsc);
4736 /* Account for uses in assigments and returns. */
4737 if (gimple_assign_single_p (t)
4738 || (gimple_code (t) == GIMPLE_RETURN
4739 && gimple_return_retval (t) != NULL_TREE))
4741 tree rhs = (gimple_assign_single_p (t)
4742 ? gimple_assign_rhs1 (t) : gimple_return_retval (t));
4743 tree tem = rhs;
4744 while (handled_component_p (tem))
4745 tem = TREE_OPERAND (tem, 0);
4746 if ((DECL_P (tem)
4747 && !auto_var_in_fn_p (tem, cfun->decl))
4748 || INDIRECT_REF_P (tem)
4749 || (TREE_CODE (tem) == MEM_REF
4750 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
4751 && auto_var_in_fn_p
4752 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), cfun->decl))))
4754 struct constraint_expr lhs, *rhsp;
4755 unsigned i;
4756 lhs = get_function_part_constraint (fi, fi_uses);
4757 get_constraint_for_address_of (rhs, &rhsc);
4758 FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
4759 process_constraint (new_constraint (lhs, *rhsp));
4760 VEC_free (ce_s, heap, rhsc);
4764 if (is_gimple_call (t))
4766 varinfo_t cfi = NULL;
4767 tree decl = gimple_call_fndecl (t);
4768 struct constraint_expr lhs, rhs;
4769 unsigned i, j;
4771 /* For builtins we do not have separate function info. For those
4772 we do not generate escapes for we have to generate clobbers/uses. */
4773 if (gimple_call_builtin_class_p (t, BUILT_IN_NORMAL))
4774 switch (DECL_FUNCTION_CODE (decl))
4776 /* The following functions use and clobber memory pointed to
4777 by their arguments. */
4778 case BUILT_IN_STRCPY:
4779 case BUILT_IN_STRNCPY:
4780 case BUILT_IN_BCOPY:
4781 case BUILT_IN_MEMCPY:
4782 case BUILT_IN_MEMMOVE:
4783 case BUILT_IN_MEMPCPY:
4784 case BUILT_IN_STPCPY:
4785 case BUILT_IN_STPNCPY:
4786 case BUILT_IN_STRCAT:
4787 case BUILT_IN_STRNCAT:
4788 case BUILT_IN_STRCPY_CHK:
4789 case BUILT_IN_STRNCPY_CHK:
4790 case BUILT_IN_MEMCPY_CHK:
4791 case BUILT_IN_MEMMOVE_CHK:
4792 case BUILT_IN_MEMPCPY_CHK:
4793 case BUILT_IN_STPCPY_CHK:
4794 case BUILT_IN_STPNCPY_CHK:
4795 case BUILT_IN_STRCAT_CHK:
4796 case BUILT_IN_STRNCAT_CHK:
4798 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
4799 == BUILT_IN_BCOPY ? 1 : 0));
4800 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
4801 == BUILT_IN_BCOPY ? 0 : 1));
4802 unsigned i;
4803 struct constraint_expr *rhsp, *lhsp;
4804 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4805 lhs = get_function_part_constraint (fi, fi_clobbers);
4806 FOR_EACH_VEC_ELT (ce_s, lhsc, i, lhsp)
4807 process_constraint (new_constraint (lhs, *lhsp));
4808 VEC_free (ce_s, heap, lhsc);
4809 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4810 lhs = get_function_part_constraint (fi, fi_uses);
4811 FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
4812 process_constraint (new_constraint (lhs, *rhsp));
4813 VEC_free (ce_s, heap, rhsc);
4814 return;
4816 /* The following function clobbers memory pointed to by
4817 its argument. */
4818 case BUILT_IN_MEMSET:
4819 case BUILT_IN_MEMSET_CHK:
4821 tree dest = gimple_call_arg (t, 0);
4822 unsigned i;
4823 ce_s *lhsp;
4824 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4825 lhs = get_function_part_constraint (fi, fi_clobbers);
4826 FOR_EACH_VEC_ELT (ce_s, lhsc, i, lhsp)
4827 process_constraint (new_constraint (lhs, *lhsp));
4828 VEC_free (ce_s, heap, lhsc);
4829 return;
4831 /* The following functions clobber their second and third
4832 arguments. */
4833 case BUILT_IN_SINCOS:
4834 case BUILT_IN_SINCOSF:
4835 case BUILT_IN_SINCOSL:
4837 process_ipa_clobber (fi, gimple_call_arg (t, 1));
4838 process_ipa_clobber (fi, gimple_call_arg (t, 2));
4839 return;
4841 /* The following functions clobber their second argument. */
4842 case BUILT_IN_FREXP:
4843 case BUILT_IN_FREXPF:
4844 case BUILT_IN_FREXPL:
4845 case BUILT_IN_LGAMMA_R:
4846 case BUILT_IN_LGAMMAF_R:
4847 case BUILT_IN_LGAMMAL_R:
4848 case BUILT_IN_GAMMA_R:
4849 case BUILT_IN_GAMMAF_R:
4850 case BUILT_IN_GAMMAL_R:
4851 case BUILT_IN_MODF:
4852 case BUILT_IN_MODFF:
4853 case BUILT_IN_MODFL:
4855 process_ipa_clobber (fi, gimple_call_arg (t, 1));
4856 return;
4858 /* The following functions clobber their third argument. */
4859 case BUILT_IN_REMQUO:
4860 case BUILT_IN_REMQUOF:
4861 case BUILT_IN_REMQUOL:
4863 process_ipa_clobber (fi, gimple_call_arg (t, 2));
4864 return;
4866 /* The following functions neither read nor clobber memory. */
4867 case BUILT_IN_ASSUME_ALIGNED:
4868 case BUILT_IN_FREE:
4869 return;
4870 /* Trampolines are of no interest to us. */
4871 case BUILT_IN_INIT_TRAMPOLINE:
4872 case BUILT_IN_ADJUST_TRAMPOLINE:
4873 return;
4874 case BUILT_IN_VA_START:
4875 case BUILT_IN_VA_END:
4876 return;
4877 /* printf-style functions may have hooks to set pointers to
4878 point to somewhere into the generated string. Leave them
4879 for a later excercise... */
4880 default:
4881 /* Fallthru to general call handling. */;
4884 /* Parameters passed by value are used. */
4885 lhs = get_function_part_constraint (fi, fi_uses);
4886 for (i = 0; i < gimple_call_num_args (t); i++)
4888 struct constraint_expr *rhsp;
4889 tree arg = gimple_call_arg (t, i);
4891 if (TREE_CODE (arg) == SSA_NAME
4892 || is_gimple_min_invariant (arg))
4893 continue;
4895 get_constraint_for_address_of (arg, &rhsc);
4896 FOR_EACH_VEC_ELT (ce_s, rhsc, j, rhsp)
4897 process_constraint (new_constraint (lhs, *rhsp));
4898 VEC_free (ce_s, heap, rhsc);
4901 /* Build constraints for propagating clobbers/uses along the
4902 callgraph edges. */
4903 cfi = get_fi_for_callee (t);
4904 if (cfi->id == anything_id)
4906 if (gimple_vdef (t))
4907 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
4908 anything_id);
4909 make_constraint_from (first_vi_for_offset (fi, fi_uses),
4910 anything_id);
4911 return;
4914 /* For callees without function info (that's external functions),
4915 ESCAPED is clobbered and used. */
4916 if (gimple_call_fndecl (t)
4917 && !cfi->is_fn_info)
4919 varinfo_t vi;
4921 if (gimple_vdef (t))
4922 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
4923 escaped_id);
4924 make_copy_constraint (first_vi_for_offset (fi, fi_uses), escaped_id);
4926 /* Also honor the call statement use/clobber info. */
4927 if ((vi = lookup_call_clobber_vi (t)) != NULL)
4928 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
4929 vi->id);
4930 if ((vi = lookup_call_use_vi (t)) != NULL)
4931 make_copy_constraint (first_vi_for_offset (fi, fi_uses),
4932 vi->id);
4933 return;
4936 /* Otherwise the caller clobbers and uses what the callee does.
4937 ??? This should use a new complex constraint that filters
4938 local variables of the callee. */
4939 if (gimple_vdef (t))
4941 lhs = get_function_part_constraint (fi, fi_clobbers);
4942 rhs = get_function_part_constraint (cfi, fi_clobbers);
4943 process_constraint (new_constraint (lhs, rhs));
4945 lhs = get_function_part_constraint (fi, fi_uses);
4946 rhs = get_function_part_constraint (cfi, fi_uses);
4947 process_constraint (new_constraint (lhs, rhs));
4949 else if (gimple_code (t) == GIMPLE_ASM)
4951 /* ??? Ick. We can do better. */
4952 if (gimple_vdef (t))
4953 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
4954 anything_id);
4955 make_constraint_from (first_vi_for_offset (fi, fi_uses),
4956 anything_id);
4959 VEC_free (ce_s, heap, rhsc);
4963 /* Find the first varinfo in the same variable as START that overlaps with
4964 OFFSET. Return NULL if we can't find one. */
4966 static varinfo_t
4967 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
4969 /* If the offset is outside of the variable, bail out. */
4970 if (offset >= start->fullsize)
4971 return NULL;
4973 /* If we cannot reach offset from start, lookup the first field
4974 and start from there. */
4975 if (start->offset > offset)
4976 start = lookup_vi_for_tree (start->decl);
4978 while (start)
4980 /* We may not find a variable in the field list with the actual
4981 offset when when we have glommed a structure to a variable.
4982 In that case, however, offset should still be within the size
4983 of the variable. */
4984 if (offset >= start->offset
4985 && (offset - start->offset) < start->size)
4986 return start;
4988 start= start->next;
4991 return NULL;
4994 /* Find the first varinfo in the same variable as START that overlaps with
4995 OFFSET. If there is no such varinfo the varinfo directly preceding
4996 OFFSET is returned. */
4998 static varinfo_t
4999 first_or_preceding_vi_for_offset (varinfo_t start,
5000 unsigned HOST_WIDE_INT offset)
5002 /* If we cannot reach offset from start, lookup the first field
5003 and start from there. */
5004 if (start->offset > offset)
5005 start = lookup_vi_for_tree (start->decl);
5007 /* We may not find a variable in the field list with the actual
5008 offset when when we have glommed a structure to a variable.
5009 In that case, however, offset should still be within the size
5010 of the variable.
5011 If we got beyond the offset we look for return the field
5012 directly preceding offset which may be the last field. */
5013 while (start->next
5014 && offset >= start->offset
5015 && !((offset - start->offset) < start->size))
5016 start = start->next;
5018 return start;
5022 /* This structure is used during pushing fields onto the fieldstack
5023 to track the offset of the field, since bitpos_of_field gives it
5024 relative to its immediate containing type, and we want it relative
5025 to the ultimate containing object. */
5027 struct fieldoff
5029 /* Offset from the base of the base containing object to this field. */
5030 HOST_WIDE_INT offset;
5032 /* Size, in bits, of the field. */
5033 unsigned HOST_WIDE_INT size;
5035 unsigned has_unknown_size : 1;
5037 unsigned must_have_pointers : 1;
5039 unsigned may_have_pointers : 1;
5041 unsigned only_restrict_pointers : 1;
5043 typedef struct fieldoff fieldoff_s;
5045 DEF_VEC_O(fieldoff_s);
5046 DEF_VEC_ALLOC_O(fieldoff_s,heap);
5048 /* qsort comparison function for two fieldoff's PA and PB */
5050 static int
5051 fieldoff_compare (const void *pa, const void *pb)
5053 const fieldoff_s *foa = (const fieldoff_s *)pa;
5054 const fieldoff_s *fob = (const fieldoff_s *)pb;
5055 unsigned HOST_WIDE_INT foasize, fobsize;
5057 if (foa->offset < fob->offset)
5058 return -1;
5059 else if (foa->offset > fob->offset)
5060 return 1;
5062 foasize = foa->size;
5063 fobsize = fob->size;
5064 if (foasize < fobsize)
5065 return -1;
5066 else if (foasize > fobsize)
5067 return 1;
5068 return 0;
5071 /* Sort a fieldstack according to the field offset and sizes. */
5072 static void
5073 sort_fieldstack (VEC(fieldoff_s,heap) *fieldstack)
5075 VEC_qsort (fieldoff_s, fieldstack, fieldoff_compare);
5078 /* Return true if T is a type that can have subvars. */
5080 static inline bool
5081 type_can_have_subvars (const_tree t)
5083 /* Aggregates without overlapping fields can have subvars. */
5084 return TREE_CODE (t) == RECORD_TYPE;
5087 /* Return true if V is a tree that we can have subvars for.
5088 Normally, this is any aggregate type. Also complex
5089 types which are not gimple registers can have subvars. */
5091 static inline bool
5092 var_can_have_subvars (const_tree v)
5094 /* Volatile variables should never have subvars. */
5095 if (TREE_THIS_VOLATILE (v))
5096 return false;
5098 /* Non decls or memory tags can never have subvars. */
5099 if (!DECL_P (v))
5100 return false;
5102 return type_can_have_subvars (TREE_TYPE (v));
5105 /* Return true if T is a type that does contain pointers. */
5107 static bool
5108 type_must_have_pointers (tree type)
5110 if (POINTER_TYPE_P (type))
5111 return true;
5113 if (TREE_CODE (type) == ARRAY_TYPE)
5114 return type_must_have_pointers (TREE_TYPE (type));
5116 /* A function or method can have pointers as arguments, so track
5117 those separately. */
5118 if (TREE_CODE (type) == FUNCTION_TYPE
5119 || TREE_CODE (type) == METHOD_TYPE)
5120 return true;
5122 return false;
5125 static bool
5126 field_must_have_pointers (tree t)
5128 return type_must_have_pointers (TREE_TYPE (t));
5131 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
5132 the fields of TYPE onto fieldstack, recording their offsets along
5133 the way.
5135 OFFSET is used to keep track of the offset in this entire
5136 structure, rather than just the immediately containing structure.
5137 Returns false if the caller is supposed to handle the field we
5138 recursed for. */
5140 static bool
5141 push_fields_onto_fieldstack (tree type, VEC(fieldoff_s,heap) **fieldstack,
5142 HOST_WIDE_INT offset)
5144 tree field;
5145 bool empty_p = true;
5147 if (TREE_CODE (type) != RECORD_TYPE)
5148 return false;
5150 /* If the vector of fields is growing too big, bail out early.
5151 Callers check for VEC_length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
5152 sure this fails. */
5153 if (VEC_length (fieldoff_s, *fieldstack) > MAX_FIELDS_FOR_FIELD_SENSITIVE)
5154 return false;
5156 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5157 if (TREE_CODE (field) == FIELD_DECL)
5159 bool push = false;
5160 HOST_WIDE_INT foff = bitpos_of_field (field);
5162 if (!var_can_have_subvars (field)
5163 || TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE
5164 || TREE_CODE (TREE_TYPE (field)) == UNION_TYPE)
5165 push = true;
5166 else if (!push_fields_onto_fieldstack
5167 (TREE_TYPE (field), fieldstack, offset + foff)
5168 && (DECL_SIZE (field)
5169 && !integer_zerop (DECL_SIZE (field))))
5170 /* Empty structures may have actual size, like in C++. So
5171 see if we didn't push any subfields and the size is
5172 nonzero, push the field onto the stack. */
5173 push = true;
5175 if (push)
5177 fieldoff_s *pair = NULL;
5178 bool has_unknown_size = false;
5179 bool must_have_pointers_p;
5181 if (!VEC_empty (fieldoff_s, *fieldstack))
5182 pair = VEC_last (fieldoff_s, *fieldstack);
5184 /* If there isn't anything at offset zero, create sth. */
5185 if (!pair
5186 && offset + foff != 0)
5188 pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
5189 pair->offset = 0;
5190 pair->size = offset + foff;
5191 pair->has_unknown_size = false;
5192 pair->must_have_pointers = false;
5193 pair->may_have_pointers = false;
5194 pair->only_restrict_pointers = false;
5197 if (!DECL_SIZE (field)
5198 || !host_integerp (DECL_SIZE (field), 1))
5199 has_unknown_size = true;
5201 /* If adjacent fields do not contain pointers merge them. */
5202 must_have_pointers_p = field_must_have_pointers (field);
5203 if (pair
5204 && !has_unknown_size
5205 && !must_have_pointers_p
5206 && !pair->must_have_pointers
5207 && !pair->has_unknown_size
5208 && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff)
5210 pair->size += TREE_INT_CST_LOW (DECL_SIZE (field));
5212 else
5214 pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
5215 pair->offset = offset + foff;
5216 pair->has_unknown_size = has_unknown_size;
5217 if (!has_unknown_size)
5218 pair->size = TREE_INT_CST_LOW (DECL_SIZE (field));
5219 else
5220 pair->size = -1;
5221 pair->must_have_pointers = must_have_pointers_p;
5222 pair->may_have_pointers = true;
5223 pair->only_restrict_pointers
5224 = (!has_unknown_size
5225 && POINTER_TYPE_P (TREE_TYPE (field))
5226 && TYPE_RESTRICT (TREE_TYPE (field)));
5230 empty_p = false;
5233 return !empty_p;
5236 /* Count the number of arguments DECL has, and set IS_VARARGS to true
5237 if it is a varargs function. */
5239 static unsigned int
5240 count_num_arguments (tree decl, bool *is_varargs)
5242 unsigned int num = 0;
5243 tree t;
5245 /* Capture named arguments for K&R functions. They do not
5246 have a prototype and thus no TYPE_ARG_TYPES. */
5247 for (t = DECL_ARGUMENTS (decl); t; t = DECL_CHAIN (t))
5248 ++num;
5250 /* Check if the function has variadic arguments. */
5251 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl)); t; t = TREE_CHAIN (t))
5252 if (TREE_VALUE (t) == void_type_node)
5253 break;
5254 if (!t)
5255 *is_varargs = true;
5257 return num;
5260 /* Creation function node for DECL, using NAME, and return the index
5261 of the variable we've created for the function. */
5263 static varinfo_t
5264 create_function_info_for (tree decl, const char *name)
5266 struct function *fn = DECL_STRUCT_FUNCTION (decl);
5267 varinfo_t vi, prev_vi;
5268 tree arg;
5269 unsigned int i;
5270 bool is_varargs = false;
5271 unsigned int num_args = count_num_arguments (decl, &is_varargs);
5273 /* Create the variable info. */
5275 vi = new_var_info (decl, name);
5276 vi->offset = 0;
5277 vi->size = 1;
5278 vi->fullsize = fi_parm_base + num_args;
5279 vi->is_fn_info = 1;
5280 vi->may_have_pointers = false;
5281 if (is_varargs)
5282 vi->fullsize = ~0;
5283 insert_vi_for_tree (vi->decl, vi);
5285 prev_vi = vi;
5287 /* Create a variable for things the function clobbers and one for
5288 things the function uses. */
5290 varinfo_t clobbervi, usevi;
5291 const char *newname;
5292 char *tempname;
5294 asprintf (&tempname, "%s.clobber", name);
5295 newname = ggc_strdup (tempname);
5296 free (tempname);
5298 clobbervi = new_var_info (NULL, newname);
5299 clobbervi->offset = fi_clobbers;
5300 clobbervi->size = 1;
5301 clobbervi->fullsize = vi->fullsize;
5302 clobbervi->is_full_var = true;
5303 clobbervi->is_global_var = false;
5304 gcc_assert (prev_vi->offset < clobbervi->offset);
5305 prev_vi->next = clobbervi;
5306 prev_vi = clobbervi;
5308 asprintf (&tempname, "%s.use", name);
5309 newname = ggc_strdup (tempname);
5310 free (tempname);
5312 usevi = new_var_info (NULL, newname);
5313 usevi->offset = fi_uses;
5314 usevi->size = 1;
5315 usevi->fullsize = vi->fullsize;
5316 usevi->is_full_var = true;
5317 usevi->is_global_var = false;
5318 gcc_assert (prev_vi->offset < usevi->offset);
5319 prev_vi->next = usevi;
5320 prev_vi = usevi;
5323 /* And one for the static chain. */
5324 if (fn->static_chain_decl != NULL_TREE)
5326 varinfo_t chainvi;
5327 const char *newname;
5328 char *tempname;
5330 asprintf (&tempname, "%s.chain", name);
5331 newname = ggc_strdup (tempname);
5332 free (tempname);
5334 chainvi = new_var_info (fn->static_chain_decl, newname);
5335 chainvi->offset = fi_static_chain;
5336 chainvi->size = 1;
5337 chainvi->fullsize = vi->fullsize;
5338 chainvi->is_full_var = true;
5339 chainvi->is_global_var = false;
5340 gcc_assert (prev_vi->offset < chainvi->offset);
5341 prev_vi->next = chainvi;
5342 prev_vi = chainvi;
5343 insert_vi_for_tree (fn->static_chain_decl, chainvi);
5346 /* Create a variable for the return var. */
5347 if (DECL_RESULT (decl) != NULL
5348 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
5350 varinfo_t resultvi;
5351 const char *newname;
5352 char *tempname;
5353 tree resultdecl = decl;
5355 if (DECL_RESULT (decl))
5356 resultdecl = DECL_RESULT (decl);
5358 asprintf (&tempname, "%s.result", name);
5359 newname = ggc_strdup (tempname);
5360 free (tempname);
5362 resultvi = new_var_info (resultdecl, newname);
5363 resultvi->offset = fi_result;
5364 resultvi->size = 1;
5365 resultvi->fullsize = vi->fullsize;
5366 resultvi->is_full_var = true;
5367 if (DECL_RESULT (decl))
5368 resultvi->may_have_pointers = true;
5369 gcc_assert (prev_vi->offset < resultvi->offset);
5370 prev_vi->next = resultvi;
5371 prev_vi = resultvi;
5372 if (DECL_RESULT (decl))
5373 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
5376 /* Set up variables for each argument. */
5377 arg = DECL_ARGUMENTS (decl);
5378 for (i = 0; i < num_args; i++)
5380 varinfo_t argvi;
5381 const char *newname;
5382 char *tempname;
5383 tree argdecl = decl;
5385 if (arg)
5386 argdecl = arg;
5388 asprintf (&tempname, "%s.arg%d", name, i);
5389 newname = ggc_strdup (tempname);
5390 free (tempname);
5392 argvi = new_var_info (argdecl, newname);
5393 argvi->offset = fi_parm_base + i;
5394 argvi->size = 1;
5395 argvi->is_full_var = true;
5396 argvi->fullsize = vi->fullsize;
5397 if (arg)
5398 argvi->may_have_pointers = true;
5399 gcc_assert (prev_vi->offset < argvi->offset);
5400 prev_vi->next = argvi;
5401 prev_vi = argvi;
5402 if (arg)
5404 insert_vi_for_tree (arg, argvi);
5405 arg = DECL_CHAIN (arg);
5409 /* Add one representative for all further args. */
5410 if (is_varargs)
5412 varinfo_t argvi;
5413 const char *newname;
5414 char *tempname;
5415 tree decl;
5417 asprintf (&tempname, "%s.varargs", name);
5418 newname = ggc_strdup (tempname);
5419 free (tempname);
5421 /* We need sth that can be pointed to for va_start. */
5422 decl = build_fake_var_decl (ptr_type_node);
5424 argvi = new_var_info (decl, newname);
5425 argvi->offset = fi_parm_base + num_args;
5426 argvi->size = ~0;
5427 argvi->is_full_var = true;
5428 argvi->is_heap_var = true;
5429 argvi->fullsize = vi->fullsize;
5430 gcc_assert (prev_vi->offset < argvi->offset);
5431 prev_vi->next = argvi;
5432 prev_vi = argvi;
5435 return vi;
5439 /* Return true if FIELDSTACK contains fields that overlap.
5440 FIELDSTACK is assumed to be sorted by offset. */
5442 static bool
5443 check_for_overlaps (VEC (fieldoff_s,heap) *fieldstack)
5445 fieldoff_s *fo = NULL;
5446 unsigned int i;
5447 HOST_WIDE_INT lastoffset = -1;
5449 FOR_EACH_VEC_ELT (fieldoff_s, fieldstack, i, fo)
5451 if (fo->offset == lastoffset)
5452 return true;
5453 lastoffset = fo->offset;
5455 return false;
5458 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
5459 This will also create any varinfo structures necessary for fields
5460 of DECL. */
5462 static varinfo_t
5463 create_variable_info_for_1 (tree decl, const char *name)
5465 varinfo_t vi, newvi;
5466 tree decl_type = TREE_TYPE (decl);
5467 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type);
5468 VEC (fieldoff_s,heap) *fieldstack = NULL;
5469 fieldoff_s *fo;
5470 unsigned int i;
5472 if (!declsize
5473 || !host_integerp (declsize, 1))
5475 vi = new_var_info (decl, name);
5476 vi->offset = 0;
5477 vi->size = ~0;
5478 vi->fullsize = ~0;
5479 vi->is_unknown_size_var = true;
5480 vi->is_full_var = true;
5481 vi->may_have_pointers = true;
5482 return vi;
5485 /* Collect field information. */
5486 if (use_field_sensitive
5487 && var_can_have_subvars (decl)
5488 /* ??? Force us to not use subfields for global initializers
5489 in IPA mode. Else we'd have to parse arbitrary initializers. */
5490 && !(in_ipa_mode
5491 && is_global_var (decl)
5492 && DECL_INITIAL (decl)))
5494 fieldoff_s *fo = NULL;
5495 bool notokay = false;
5496 unsigned int i;
5498 push_fields_onto_fieldstack (decl_type, &fieldstack, 0);
5500 for (i = 0; !notokay && VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
5501 if (fo->has_unknown_size
5502 || fo->offset < 0)
5504 notokay = true;
5505 break;
5508 /* We can't sort them if we have a field with a variable sized type,
5509 which will make notokay = true. In that case, we are going to return
5510 without creating varinfos for the fields anyway, so sorting them is a
5511 waste to boot. */
5512 if (!notokay)
5514 sort_fieldstack (fieldstack);
5515 /* Due to some C++ FE issues, like PR 22488, we might end up
5516 what appear to be overlapping fields even though they,
5517 in reality, do not overlap. Until the C++ FE is fixed,
5518 we will simply disable field-sensitivity for these cases. */
5519 notokay = check_for_overlaps (fieldstack);
5522 if (notokay)
5523 VEC_free (fieldoff_s, heap, fieldstack);
5526 /* If we didn't end up collecting sub-variables create a full
5527 variable for the decl. */
5528 if (VEC_length (fieldoff_s, fieldstack) <= 1
5529 || VEC_length (fieldoff_s, fieldstack) > MAX_FIELDS_FOR_FIELD_SENSITIVE)
5531 vi = new_var_info (decl, name);
5532 vi->offset = 0;
5533 vi->may_have_pointers = true;
5534 vi->fullsize = TREE_INT_CST_LOW (declsize);
5535 vi->size = vi->fullsize;
5536 vi->is_full_var = true;
5537 VEC_free (fieldoff_s, heap, fieldstack);
5538 return vi;
5541 vi = new_var_info (decl, name);
5542 vi->fullsize = TREE_INT_CST_LOW (declsize);
5543 for (i = 0, newvi = vi;
5544 VEC_iterate (fieldoff_s, fieldstack, i, fo);
5545 ++i, newvi = newvi->next)
5547 const char *newname = "NULL";
5548 char *tempname;
5550 if (dump_file)
5552 asprintf (&tempname, "%s." HOST_WIDE_INT_PRINT_DEC
5553 "+" HOST_WIDE_INT_PRINT_DEC, name, fo->offset, fo->size);
5554 newname = ggc_strdup (tempname);
5555 free (tempname);
5557 newvi->name = newname;
5558 newvi->offset = fo->offset;
5559 newvi->size = fo->size;
5560 newvi->fullsize = vi->fullsize;
5561 newvi->may_have_pointers = fo->may_have_pointers;
5562 newvi->only_restrict_pointers = fo->only_restrict_pointers;
5563 if (i + 1 < VEC_length (fieldoff_s, fieldstack))
5564 newvi->next = new_var_info (decl, name);
5567 VEC_free (fieldoff_s, heap, fieldstack);
5569 return vi;
5572 static unsigned int
5573 create_variable_info_for (tree decl, const char *name)
5575 varinfo_t vi = create_variable_info_for_1 (decl, name);
5576 unsigned int id = vi->id;
5578 insert_vi_for_tree (decl, vi);
5580 if (TREE_CODE (decl) != VAR_DECL)
5581 return id;
5583 /* Create initial constraints for globals. */
5584 for (; vi; vi = vi->next)
5586 if (!vi->may_have_pointers
5587 || !vi->is_global_var)
5588 continue;
5590 /* Mark global restrict qualified pointers. */
5591 if ((POINTER_TYPE_P (TREE_TYPE (decl))
5592 && TYPE_RESTRICT (TREE_TYPE (decl)))
5593 || vi->only_restrict_pointers)
5595 make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT");
5596 continue;
5599 /* In non-IPA mode the initializer from nonlocal is all we need. */
5600 if (!in_ipa_mode
5601 || DECL_HARD_REGISTER (decl))
5602 make_copy_constraint (vi, nonlocal_id);
5604 /* In IPA mode parse the initializer and generate proper constraints
5605 for it. */
5606 else
5608 struct varpool_node *vnode = varpool_get_node (decl);
5610 /* For escaped variables initialize them from nonlocal. */
5611 if (!varpool_all_refs_explicit_p (vnode))
5612 make_copy_constraint (vi, nonlocal_id);
5614 /* If this is a global variable with an initializer and we are in
5615 IPA mode generate constraints for it. */
5616 if (DECL_INITIAL (decl))
5618 VEC (ce_s, heap) *rhsc = NULL;
5619 struct constraint_expr lhs, *rhsp;
5620 unsigned i;
5621 get_constraint_for_rhs (DECL_INITIAL (decl), &rhsc);
5622 lhs.var = vi->id;
5623 lhs.offset = 0;
5624 lhs.type = SCALAR;
5625 FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
5626 process_constraint (new_constraint (lhs, *rhsp));
5627 /* If this is a variable that escapes from the unit
5628 the initializer escapes as well. */
5629 if (!varpool_all_refs_explicit_p (vnode))
5631 lhs.var = escaped_id;
5632 lhs.offset = 0;
5633 lhs.type = SCALAR;
5634 FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
5635 process_constraint (new_constraint (lhs, *rhsp));
5637 VEC_free (ce_s, heap, rhsc);
5642 return id;
5645 /* Print out the points-to solution for VAR to FILE. */
5647 static void
5648 dump_solution_for_var (FILE *file, unsigned int var)
5650 varinfo_t vi = get_varinfo (var);
5651 unsigned int i;
5652 bitmap_iterator bi;
5654 /* Dump the solution for unified vars anyway, this avoids difficulties
5655 in scanning dumps in the testsuite. */
5656 fprintf (file, "%s = { ", vi->name);
5657 vi = get_varinfo (find (var));
5658 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
5659 fprintf (file, "%s ", get_varinfo (i)->name);
5660 fprintf (file, "}");
5662 /* But note when the variable was unified. */
5663 if (vi->id != var)
5664 fprintf (file, " same as %s", vi->name);
5666 fprintf (file, "\n");
5669 /* Print the points-to solution for VAR to stdout. */
5671 DEBUG_FUNCTION void
5672 debug_solution_for_var (unsigned int var)
5674 dump_solution_for_var (stdout, var);
5677 /* Create varinfo structures for all of the variables in the
5678 function for intraprocedural mode. */
5680 static void
5681 intra_create_variable_infos (void)
5683 tree t;
5685 /* For each incoming pointer argument arg, create the constraint ARG
5686 = NONLOCAL or a dummy variable if it is a restrict qualified
5687 passed-by-reference argument. */
5688 for (t = DECL_ARGUMENTS (current_function_decl); t; t = DECL_CHAIN (t))
5690 varinfo_t p = get_vi_for_tree (t);
5692 /* For restrict qualified pointers to objects passed by
5693 reference build a real representative for the pointed-to object.
5694 Treat restrict qualified references the same. */
5695 if (TYPE_RESTRICT (TREE_TYPE (t))
5696 && ((DECL_BY_REFERENCE (t) && POINTER_TYPE_P (TREE_TYPE (t)))
5697 || TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE)
5698 && !type_contains_placeholder_p (TREE_TYPE (TREE_TYPE (t))))
5700 struct constraint_expr lhsc, rhsc;
5701 varinfo_t vi;
5702 tree heapvar = build_fake_var_decl (TREE_TYPE (TREE_TYPE (t)));
5703 DECL_EXTERNAL (heapvar) = 1;
5704 vi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS");
5705 insert_vi_for_tree (heapvar, vi);
5706 lhsc.var = p->id;
5707 lhsc.type = SCALAR;
5708 lhsc.offset = 0;
5709 rhsc.var = vi->id;
5710 rhsc.type = ADDRESSOF;
5711 rhsc.offset = 0;
5712 process_constraint (new_constraint (lhsc, rhsc));
5713 for (; vi; vi = vi->next)
5714 if (vi->may_have_pointers)
5716 if (vi->only_restrict_pointers)
5717 make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT");
5718 else
5719 make_copy_constraint (vi, nonlocal_id);
5721 continue;
5724 if (POINTER_TYPE_P (TREE_TYPE (t))
5725 && TYPE_RESTRICT (TREE_TYPE (t)))
5726 make_constraint_from_global_restrict (p, "PARM_RESTRICT");
5727 else
5729 for (; p; p = p->next)
5731 if (p->only_restrict_pointers)
5732 make_constraint_from_global_restrict (p, "PARM_RESTRICT");
5733 else if (p->may_have_pointers)
5734 make_constraint_from (p, nonlocal_id);
5739 /* Add a constraint for a result decl that is passed by reference. */
5740 if (DECL_RESULT (cfun->decl)
5741 && DECL_BY_REFERENCE (DECL_RESULT (cfun->decl)))
5743 varinfo_t p, result_vi = get_vi_for_tree (DECL_RESULT (cfun->decl));
5745 for (p = result_vi; p; p = p->next)
5746 make_constraint_from (p, nonlocal_id);
5749 /* Add a constraint for the incoming static chain parameter. */
5750 if (cfun->static_chain_decl != NULL_TREE)
5752 varinfo_t p, chain_vi = get_vi_for_tree (cfun->static_chain_decl);
5754 for (p = chain_vi; p; p = p->next)
5755 make_constraint_from (p, nonlocal_id);
5759 /* Structure used to put solution bitmaps in a hashtable so they can
5760 be shared among variables with the same points-to set. */
5762 typedef struct shared_bitmap_info
5764 bitmap pt_vars;
5765 hashval_t hashcode;
5766 } *shared_bitmap_info_t;
5767 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
5769 static htab_t shared_bitmap_table;
5771 /* Hash function for a shared_bitmap_info_t */
5773 static hashval_t
5774 shared_bitmap_hash (const void *p)
5776 const_shared_bitmap_info_t const bi = (const_shared_bitmap_info_t) p;
5777 return bi->hashcode;
5780 /* Equality function for two shared_bitmap_info_t's. */
5782 static int
5783 shared_bitmap_eq (const void *p1, const void *p2)
5785 const_shared_bitmap_info_t const sbi1 = (const_shared_bitmap_info_t) p1;
5786 const_shared_bitmap_info_t const sbi2 = (const_shared_bitmap_info_t) p2;
5787 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
5790 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
5791 existing instance if there is one, NULL otherwise. */
5793 static bitmap
5794 shared_bitmap_lookup (bitmap pt_vars)
5796 void **slot;
5797 struct shared_bitmap_info sbi;
5799 sbi.pt_vars = pt_vars;
5800 sbi.hashcode = bitmap_hash (pt_vars);
5802 slot = htab_find_slot_with_hash (shared_bitmap_table, &sbi,
5803 sbi.hashcode, NO_INSERT);
5804 if (!slot)
5805 return NULL;
5806 else
5807 return ((shared_bitmap_info_t) *slot)->pt_vars;
5811 /* Add a bitmap to the shared bitmap hashtable. */
5813 static void
5814 shared_bitmap_add (bitmap pt_vars)
5816 void **slot;
5817 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
5819 sbi->pt_vars = pt_vars;
5820 sbi->hashcode = bitmap_hash (pt_vars);
5822 slot = htab_find_slot_with_hash (shared_bitmap_table, sbi,
5823 sbi->hashcode, INSERT);
5824 gcc_assert (!*slot);
5825 *slot = (void *) sbi;
5829 /* Set bits in INTO corresponding to the variable uids in solution set FROM. */
5831 static void
5832 set_uids_in_ptset (bitmap into, bitmap from, struct pt_solution *pt)
5834 unsigned int i;
5835 bitmap_iterator bi;
5837 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
5839 varinfo_t vi = get_varinfo (i);
5841 /* The only artificial variables that are allowed in a may-alias
5842 set are heap variables. */
5843 if (vi->is_artificial_var && !vi->is_heap_var)
5844 continue;
5846 if (TREE_CODE (vi->decl) == VAR_DECL
5847 || TREE_CODE (vi->decl) == PARM_DECL
5848 || TREE_CODE (vi->decl) == RESULT_DECL)
5850 /* If we are in IPA mode we will not recompute points-to
5851 sets after inlining so make sure they stay valid. */
5852 if (in_ipa_mode
5853 && !DECL_PT_UID_SET_P (vi->decl))
5854 SET_DECL_PT_UID (vi->decl, DECL_UID (vi->decl));
5856 /* Add the decl to the points-to set. Note that the points-to
5857 set contains global variables. */
5858 bitmap_set_bit (into, DECL_PT_UID (vi->decl));
5859 if (vi->is_global_var)
5860 pt->vars_contains_global = true;
5866 /* Compute the points-to solution *PT for the variable VI. */
5868 static void
5869 find_what_var_points_to (varinfo_t orig_vi, struct pt_solution *pt)
5871 unsigned int i;
5872 bitmap_iterator bi;
5873 bitmap finished_solution;
5874 bitmap result;
5875 varinfo_t vi;
5877 memset (pt, 0, sizeof (struct pt_solution));
5879 /* This variable may have been collapsed, let's get the real
5880 variable. */
5881 vi = get_varinfo (find (orig_vi->id));
5883 /* Translate artificial variables into SSA_NAME_PTR_INFO
5884 attributes. */
5885 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
5887 varinfo_t vi = get_varinfo (i);
5889 if (vi->is_artificial_var)
5891 if (vi->id == nothing_id)
5892 pt->null = 1;
5893 else if (vi->id == escaped_id)
5895 if (in_ipa_mode)
5896 pt->ipa_escaped = 1;
5897 else
5898 pt->escaped = 1;
5900 else if (vi->id == nonlocal_id)
5901 pt->nonlocal = 1;
5902 else if (vi->is_heap_var)
5903 /* We represent heapvars in the points-to set properly. */
5905 else if (vi->id == readonly_id)
5906 /* Nobody cares. */
5908 else if (vi->id == anything_id
5909 || vi->id == integer_id)
5910 pt->anything = 1;
5914 /* Instead of doing extra work, simply do not create
5915 elaborate points-to information for pt_anything pointers. */
5916 if (pt->anything)
5917 return;
5919 /* Share the final set of variables when possible. */
5920 finished_solution = BITMAP_GGC_ALLOC ();
5921 stats.points_to_sets_created++;
5923 set_uids_in_ptset (finished_solution, vi->solution, pt);
5924 result = shared_bitmap_lookup (finished_solution);
5925 if (!result)
5927 shared_bitmap_add (finished_solution);
5928 pt->vars = finished_solution;
5930 else
5932 pt->vars = result;
5933 bitmap_clear (finished_solution);
5937 /* Given a pointer variable P, fill in its points-to set. */
5939 static void
5940 find_what_p_points_to (tree p)
5942 struct ptr_info_def *pi;
5943 tree lookup_p = p;
5944 varinfo_t vi;
5946 /* For parameters, get at the points-to set for the actual parm
5947 decl. */
5948 if (TREE_CODE (p) == SSA_NAME
5949 && (TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
5950 || TREE_CODE (SSA_NAME_VAR (p)) == RESULT_DECL)
5951 && SSA_NAME_IS_DEFAULT_DEF (p))
5952 lookup_p = SSA_NAME_VAR (p);
5954 vi = lookup_vi_for_tree (lookup_p);
5955 if (!vi)
5956 return;
5958 pi = get_ptr_info (p);
5959 find_what_var_points_to (vi, &pi->pt);
5963 /* Query statistics for points-to solutions. */
5965 static struct {
5966 unsigned HOST_WIDE_INT pt_solution_includes_may_alias;
5967 unsigned HOST_WIDE_INT pt_solution_includes_no_alias;
5968 unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias;
5969 unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias;
5970 } pta_stats;
5972 void
5973 dump_pta_stats (FILE *s)
5975 fprintf (s, "\nPTA query stats:\n");
5976 fprintf (s, " pt_solution_includes: "
5977 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
5978 HOST_WIDE_INT_PRINT_DEC" queries\n",
5979 pta_stats.pt_solution_includes_no_alias,
5980 pta_stats.pt_solution_includes_no_alias
5981 + pta_stats.pt_solution_includes_may_alias);
5982 fprintf (s, " pt_solutions_intersect: "
5983 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
5984 HOST_WIDE_INT_PRINT_DEC" queries\n",
5985 pta_stats.pt_solutions_intersect_no_alias,
5986 pta_stats.pt_solutions_intersect_no_alias
5987 + pta_stats.pt_solutions_intersect_may_alias);
5991 /* Reset the points-to solution *PT to a conservative default
5992 (point to anything). */
5994 void
5995 pt_solution_reset (struct pt_solution *pt)
5997 memset (pt, 0, sizeof (struct pt_solution));
5998 pt->anything = true;
6001 /* Set the points-to solution *PT to point only to the variables
6002 in VARS. VARS_CONTAINS_GLOBAL specifies whether that contains
6003 global variables and VARS_CONTAINS_RESTRICT specifies whether
6004 it contains restrict tag variables. */
6006 void
6007 pt_solution_set (struct pt_solution *pt, bitmap vars, bool vars_contains_global)
6009 memset (pt, 0, sizeof (struct pt_solution));
6010 pt->vars = vars;
6011 pt->vars_contains_global = vars_contains_global;
6014 /* Set the points-to solution *PT to point only to the variable VAR. */
6016 void
6017 pt_solution_set_var (struct pt_solution *pt, tree var)
6019 memset (pt, 0, sizeof (struct pt_solution));
6020 pt->vars = BITMAP_GGC_ALLOC ();
6021 bitmap_set_bit (pt->vars, DECL_PT_UID (var));
6022 pt->vars_contains_global = is_global_var (var);
6025 /* Computes the union of the points-to solutions *DEST and *SRC and
6026 stores the result in *DEST. This changes the points-to bitmap
6027 of *DEST and thus may not be used if that might be shared.
6028 The points-to bitmap of *SRC and *DEST will not be shared after
6029 this function if they were not before. */
6031 static void
6032 pt_solution_ior_into (struct pt_solution *dest, struct pt_solution *src)
6034 dest->anything |= src->anything;
6035 if (dest->anything)
6037 pt_solution_reset (dest);
6038 return;
6041 dest->nonlocal |= src->nonlocal;
6042 dest->escaped |= src->escaped;
6043 dest->ipa_escaped |= src->ipa_escaped;
6044 dest->null |= src->null;
6045 dest->vars_contains_global |= src->vars_contains_global;
6046 if (!src->vars)
6047 return;
6049 if (!dest->vars)
6050 dest->vars = BITMAP_GGC_ALLOC ();
6051 bitmap_ior_into (dest->vars, src->vars);
6054 /* Return true if the points-to solution *PT is empty. */
6056 bool
6057 pt_solution_empty_p (struct pt_solution *pt)
6059 if (pt->anything
6060 || pt->nonlocal)
6061 return false;
6063 if (pt->vars
6064 && !bitmap_empty_p (pt->vars))
6065 return false;
6067 /* If the solution includes ESCAPED, check if that is empty. */
6068 if (pt->escaped
6069 && !pt_solution_empty_p (&cfun->gimple_df->escaped))
6070 return false;
6072 /* If the solution includes ESCAPED, check if that is empty. */
6073 if (pt->ipa_escaped
6074 && !pt_solution_empty_p (&ipa_escaped_pt))
6075 return false;
6077 return true;
6080 /* Return true if the points-to solution *PT only point to a single var, and
6081 return the var uid in *UID. */
6083 bool
6084 pt_solution_singleton_p (struct pt_solution *pt, unsigned *uid)
6086 if (pt->anything || pt->nonlocal || pt->escaped || pt->ipa_escaped
6087 || pt->null || pt->vars == NULL
6088 || !bitmap_single_bit_set_p (pt->vars))
6089 return false;
6091 *uid = bitmap_first_set_bit (pt->vars);
6092 return true;
6095 /* Return true if the points-to solution *PT includes global memory. */
6097 bool
6098 pt_solution_includes_global (struct pt_solution *pt)
6100 if (pt->anything
6101 || pt->nonlocal
6102 || pt->vars_contains_global)
6103 return true;
6105 if (pt->escaped)
6106 return pt_solution_includes_global (&cfun->gimple_df->escaped);
6108 if (pt->ipa_escaped)
6109 return pt_solution_includes_global (&ipa_escaped_pt);
6111 /* ??? This predicate is not correct for the IPA-PTA solution
6112 as we do not properly distinguish between unit escape points
6113 and global variables. */
6114 if (cfun->gimple_df->ipa_pta)
6115 return true;
6117 return false;
6120 /* Return true if the points-to solution *PT includes the variable
6121 declaration DECL. */
6123 static bool
6124 pt_solution_includes_1 (struct pt_solution *pt, const_tree decl)
6126 if (pt->anything)
6127 return true;
6129 if (pt->nonlocal
6130 && is_global_var (decl))
6131 return true;
6133 if (pt->vars
6134 && bitmap_bit_p (pt->vars, DECL_PT_UID (decl)))
6135 return true;
6137 /* If the solution includes ESCAPED, check it. */
6138 if (pt->escaped
6139 && pt_solution_includes_1 (&cfun->gimple_df->escaped, decl))
6140 return true;
6142 /* If the solution includes ESCAPED, check it. */
6143 if (pt->ipa_escaped
6144 && pt_solution_includes_1 (&ipa_escaped_pt, decl))
6145 return true;
6147 return false;
6150 bool
6151 pt_solution_includes (struct pt_solution *pt, const_tree decl)
6153 bool res = pt_solution_includes_1 (pt, decl);
6154 if (res)
6155 ++pta_stats.pt_solution_includes_may_alias;
6156 else
6157 ++pta_stats.pt_solution_includes_no_alias;
6158 return res;
6161 /* Return true if both points-to solutions PT1 and PT2 have a non-empty
6162 intersection. */
6164 static bool
6165 pt_solutions_intersect_1 (struct pt_solution *pt1, struct pt_solution *pt2)
6167 if (pt1->anything || pt2->anything)
6168 return true;
6170 /* If either points to unknown global memory and the other points to
6171 any global memory they alias. */
6172 if ((pt1->nonlocal
6173 && (pt2->nonlocal
6174 || pt2->vars_contains_global))
6175 || (pt2->nonlocal
6176 && pt1->vars_contains_global))
6177 return true;
6179 /* Check the escaped solution if required. */
6180 if ((pt1->escaped || pt2->escaped)
6181 && !pt_solution_empty_p (&cfun->gimple_df->escaped))
6183 /* If both point to escaped memory and that solution
6184 is not empty they alias. */
6185 if (pt1->escaped && pt2->escaped)
6186 return true;
6188 /* If either points to escaped memory see if the escaped solution
6189 intersects with the other. */
6190 if ((pt1->escaped
6191 && pt_solutions_intersect_1 (&cfun->gimple_df->escaped, pt2))
6192 || (pt2->escaped
6193 && pt_solutions_intersect_1 (&cfun->gimple_df->escaped, pt1)))
6194 return true;
6197 /* Check the escaped solution if required.
6198 ??? Do we need to check the local against the IPA escaped sets? */
6199 if ((pt1->ipa_escaped || pt2->ipa_escaped)
6200 && !pt_solution_empty_p (&ipa_escaped_pt))
6202 /* If both point to escaped memory and that solution
6203 is not empty they alias. */
6204 if (pt1->ipa_escaped && pt2->ipa_escaped)
6205 return true;
6207 /* If either points to escaped memory see if the escaped solution
6208 intersects with the other. */
6209 if ((pt1->ipa_escaped
6210 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt2))
6211 || (pt2->ipa_escaped
6212 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt1)))
6213 return true;
6216 /* Now both pointers alias if their points-to solution intersects. */
6217 return (pt1->vars
6218 && pt2->vars
6219 && bitmap_intersect_p (pt1->vars, pt2->vars));
6222 bool
6223 pt_solutions_intersect (struct pt_solution *pt1, struct pt_solution *pt2)
6225 bool res = pt_solutions_intersect_1 (pt1, pt2);
6226 if (res)
6227 ++pta_stats.pt_solutions_intersect_may_alias;
6228 else
6229 ++pta_stats.pt_solutions_intersect_no_alias;
6230 return res;
6234 /* Dump points-to information to OUTFILE. */
6236 static void
6237 dump_sa_points_to_info (FILE *outfile)
6239 unsigned int i;
6241 fprintf (outfile, "\nPoints-to sets\n\n");
6243 if (dump_flags & TDF_STATS)
6245 fprintf (outfile, "Stats:\n");
6246 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
6247 fprintf (outfile, "Non-pointer vars: %d\n",
6248 stats.nonpointer_vars);
6249 fprintf (outfile, "Statically unified vars: %d\n",
6250 stats.unified_vars_static);
6251 fprintf (outfile, "Dynamically unified vars: %d\n",
6252 stats.unified_vars_dynamic);
6253 fprintf (outfile, "Iterations: %d\n", stats.iterations);
6254 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
6255 fprintf (outfile, "Number of implicit edges: %d\n",
6256 stats.num_implicit_edges);
6259 for (i = 0; i < VEC_length (varinfo_t, varmap); i++)
6261 varinfo_t vi = get_varinfo (i);
6262 if (!vi->may_have_pointers)
6263 continue;
6264 dump_solution_for_var (outfile, i);
6269 /* Debug points-to information to stderr. */
6271 DEBUG_FUNCTION void
6272 debug_sa_points_to_info (void)
6274 dump_sa_points_to_info (stderr);
6278 /* Initialize the always-existing constraint variables for NULL
6279 ANYTHING, READONLY, and INTEGER */
6281 static void
6282 init_base_vars (void)
6284 struct constraint_expr lhs, rhs;
6285 varinfo_t var_anything;
6286 varinfo_t var_nothing;
6287 varinfo_t var_readonly;
6288 varinfo_t var_escaped;
6289 varinfo_t var_nonlocal;
6290 varinfo_t var_storedanything;
6291 varinfo_t var_integer;
6293 /* Create the NULL variable, used to represent that a variable points
6294 to NULL. */
6295 var_nothing = new_var_info (NULL_TREE, "NULL");
6296 gcc_assert (var_nothing->id == nothing_id);
6297 var_nothing->is_artificial_var = 1;
6298 var_nothing->offset = 0;
6299 var_nothing->size = ~0;
6300 var_nothing->fullsize = ~0;
6301 var_nothing->is_special_var = 1;
6302 var_nothing->may_have_pointers = 0;
6303 var_nothing->is_global_var = 0;
6305 /* Create the ANYTHING variable, used to represent that a variable
6306 points to some unknown piece of memory. */
6307 var_anything = new_var_info (NULL_TREE, "ANYTHING");
6308 gcc_assert (var_anything->id == anything_id);
6309 var_anything->is_artificial_var = 1;
6310 var_anything->size = ~0;
6311 var_anything->offset = 0;
6312 var_anything->next = NULL;
6313 var_anything->fullsize = ~0;
6314 var_anything->is_special_var = 1;
6316 /* Anything points to anything. This makes deref constraints just
6317 work in the presence of linked list and other p = *p type loops,
6318 by saying that *ANYTHING = ANYTHING. */
6319 lhs.type = SCALAR;
6320 lhs.var = anything_id;
6321 lhs.offset = 0;
6322 rhs.type = ADDRESSOF;
6323 rhs.var = anything_id;
6324 rhs.offset = 0;
6326 /* This specifically does not use process_constraint because
6327 process_constraint ignores all anything = anything constraints, since all
6328 but this one are redundant. */
6329 VEC_safe_push (constraint_t, heap, constraints, new_constraint (lhs, rhs));
6331 /* Create the READONLY variable, used to represent that a variable
6332 points to readonly memory. */
6333 var_readonly = new_var_info (NULL_TREE, "READONLY");
6334 gcc_assert (var_readonly->id == readonly_id);
6335 var_readonly->is_artificial_var = 1;
6336 var_readonly->offset = 0;
6337 var_readonly->size = ~0;
6338 var_readonly->fullsize = ~0;
6339 var_readonly->next = NULL;
6340 var_readonly->is_special_var = 1;
6342 /* readonly memory points to anything, in order to make deref
6343 easier. In reality, it points to anything the particular
6344 readonly variable can point to, but we don't track this
6345 separately. */
6346 lhs.type = SCALAR;
6347 lhs.var = readonly_id;
6348 lhs.offset = 0;
6349 rhs.type = ADDRESSOF;
6350 rhs.var = readonly_id; /* FIXME */
6351 rhs.offset = 0;
6352 process_constraint (new_constraint (lhs, rhs));
6354 /* Create the ESCAPED variable, used to represent the set of escaped
6355 memory. */
6356 var_escaped = new_var_info (NULL_TREE, "ESCAPED");
6357 gcc_assert (var_escaped->id == escaped_id);
6358 var_escaped->is_artificial_var = 1;
6359 var_escaped->offset = 0;
6360 var_escaped->size = ~0;
6361 var_escaped->fullsize = ~0;
6362 var_escaped->is_special_var = 0;
6364 /* Create the NONLOCAL variable, used to represent the set of nonlocal
6365 memory. */
6366 var_nonlocal = new_var_info (NULL_TREE, "NONLOCAL");
6367 gcc_assert (var_nonlocal->id == nonlocal_id);
6368 var_nonlocal->is_artificial_var = 1;
6369 var_nonlocal->offset = 0;
6370 var_nonlocal->size = ~0;
6371 var_nonlocal->fullsize = ~0;
6372 var_nonlocal->is_special_var = 1;
6374 /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
6375 lhs.type = SCALAR;
6376 lhs.var = escaped_id;
6377 lhs.offset = 0;
6378 rhs.type = DEREF;
6379 rhs.var = escaped_id;
6380 rhs.offset = 0;
6381 process_constraint (new_constraint (lhs, rhs));
6383 /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the
6384 whole variable escapes. */
6385 lhs.type = SCALAR;
6386 lhs.var = escaped_id;
6387 lhs.offset = 0;
6388 rhs.type = SCALAR;
6389 rhs.var = escaped_id;
6390 rhs.offset = UNKNOWN_OFFSET;
6391 process_constraint (new_constraint (lhs, rhs));
6393 /* *ESCAPED = NONLOCAL. This is true because we have to assume
6394 everything pointed to by escaped points to what global memory can
6395 point to. */
6396 lhs.type = DEREF;
6397 lhs.var = escaped_id;
6398 lhs.offset = 0;
6399 rhs.type = SCALAR;
6400 rhs.var = nonlocal_id;
6401 rhs.offset = 0;
6402 process_constraint (new_constraint (lhs, rhs));
6404 /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED. This is true because
6405 global memory may point to global memory and escaped memory. */
6406 lhs.type = SCALAR;
6407 lhs.var = nonlocal_id;
6408 lhs.offset = 0;
6409 rhs.type = ADDRESSOF;
6410 rhs.var = nonlocal_id;
6411 rhs.offset = 0;
6412 process_constraint (new_constraint (lhs, rhs));
6413 rhs.type = ADDRESSOF;
6414 rhs.var = escaped_id;
6415 rhs.offset = 0;
6416 process_constraint (new_constraint (lhs, rhs));
6418 /* Create the STOREDANYTHING variable, used to represent the set of
6419 variables stored to *ANYTHING. */
6420 var_storedanything = new_var_info (NULL_TREE, "STOREDANYTHING");
6421 gcc_assert (var_storedanything->id == storedanything_id);
6422 var_storedanything->is_artificial_var = 1;
6423 var_storedanything->offset = 0;
6424 var_storedanything->size = ~0;
6425 var_storedanything->fullsize = ~0;
6426 var_storedanything->is_special_var = 0;
6428 /* Create the INTEGER variable, used to represent that a variable points
6429 to what an INTEGER "points to". */
6430 var_integer = new_var_info (NULL_TREE, "INTEGER");
6431 gcc_assert (var_integer->id == integer_id);
6432 var_integer->is_artificial_var = 1;
6433 var_integer->size = ~0;
6434 var_integer->fullsize = ~0;
6435 var_integer->offset = 0;
6436 var_integer->next = NULL;
6437 var_integer->is_special_var = 1;
6439 /* INTEGER = ANYTHING, because we don't know where a dereference of
6440 a random integer will point to. */
6441 lhs.type = SCALAR;
6442 lhs.var = integer_id;
6443 lhs.offset = 0;
6444 rhs.type = ADDRESSOF;
6445 rhs.var = anything_id;
6446 rhs.offset = 0;
6447 process_constraint (new_constraint (lhs, rhs));
6450 /* Initialize things necessary to perform PTA */
6452 static void
6453 init_alias_vars (void)
6455 use_field_sensitive = (MAX_FIELDS_FOR_FIELD_SENSITIVE > 1);
6457 bitmap_obstack_initialize (&pta_obstack);
6458 bitmap_obstack_initialize (&oldpta_obstack);
6459 bitmap_obstack_initialize (&predbitmap_obstack);
6461 constraint_pool = create_alloc_pool ("Constraint pool",
6462 sizeof (struct constraint), 30);
6463 variable_info_pool = create_alloc_pool ("Variable info pool",
6464 sizeof (struct variable_info), 30);
6465 constraints = VEC_alloc (constraint_t, heap, 8);
6466 varmap = VEC_alloc (varinfo_t, heap, 8);
6467 vi_for_tree = pointer_map_create ();
6468 call_stmt_vars = pointer_map_create ();
6470 memset (&stats, 0, sizeof (stats));
6471 shared_bitmap_table = htab_create (511, shared_bitmap_hash,
6472 shared_bitmap_eq, free);
6473 init_base_vars ();
6475 gcc_obstack_init (&fake_var_decl_obstack);
6478 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
6479 predecessor edges. */
6481 static void
6482 remove_preds_and_fake_succs (constraint_graph_t graph)
6484 unsigned int i;
6486 /* Clear the implicit ref and address nodes from the successor
6487 lists. */
6488 for (i = 0; i < FIRST_REF_NODE; i++)
6490 if (graph->succs[i])
6491 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
6492 FIRST_REF_NODE * 2);
6495 /* Free the successor list for the non-ref nodes. */
6496 for (i = FIRST_REF_NODE; i < graph->size; i++)
6498 if (graph->succs[i])
6499 BITMAP_FREE (graph->succs[i]);
6502 /* Now reallocate the size of the successor list as, and blow away
6503 the predecessor bitmaps. */
6504 graph->size = VEC_length (varinfo_t, varmap);
6505 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
6507 free (graph->implicit_preds);
6508 graph->implicit_preds = NULL;
6509 free (graph->preds);
6510 graph->preds = NULL;
6511 bitmap_obstack_release (&predbitmap_obstack);
6514 /* Solve the constraint set. */
6516 static void
6517 solve_constraints (void)
6519 struct scc_info *si;
6521 if (dump_file)
6522 fprintf (dump_file,
6523 "\nCollapsing static cycles and doing variable "
6524 "substitution\n");
6526 init_graph (VEC_length (varinfo_t, varmap) * 2);
6528 if (dump_file)
6529 fprintf (dump_file, "Building predecessor graph\n");
6530 build_pred_graph ();
6532 if (dump_file)
6533 fprintf (dump_file, "Detecting pointer and location "
6534 "equivalences\n");
6535 si = perform_var_substitution (graph);
6537 if (dump_file)
6538 fprintf (dump_file, "Rewriting constraints and unifying "
6539 "variables\n");
6540 rewrite_constraints (graph, si);
6542 build_succ_graph ();
6544 free_var_substitution_info (si);
6546 /* Attach complex constraints to graph nodes. */
6547 move_complex_constraints (graph);
6549 if (dump_file)
6550 fprintf (dump_file, "Uniting pointer but not location equivalent "
6551 "variables\n");
6552 unite_pointer_equivalences (graph);
6554 if (dump_file)
6555 fprintf (dump_file, "Finding indirect cycles\n");
6556 find_indirect_cycles (graph);
6558 /* Implicit nodes and predecessors are no longer necessary at this
6559 point. */
6560 remove_preds_and_fake_succs (graph);
6562 if (dump_file && (dump_flags & TDF_GRAPH))
6564 fprintf (dump_file, "\n\n// The constraint graph before solve-graph "
6565 "in dot format:\n");
6566 dump_constraint_graph (dump_file);
6567 fprintf (dump_file, "\n\n");
6570 if (dump_file)
6571 fprintf (dump_file, "Solving graph\n");
6573 solve_graph (graph);
6575 if (dump_file && (dump_flags & TDF_GRAPH))
6577 fprintf (dump_file, "\n\n// The constraint graph after solve-graph "
6578 "in dot format:\n");
6579 dump_constraint_graph (dump_file);
6580 fprintf (dump_file, "\n\n");
6583 if (dump_file)
6584 dump_sa_points_to_info (dump_file);
6587 /* Create points-to sets for the current function. See the comments
6588 at the start of the file for an algorithmic overview. */
6590 static void
6591 compute_points_to_sets (void)
6593 basic_block bb;
6594 unsigned i;
6595 varinfo_t vi;
6597 timevar_push (TV_TREE_PTA);
6599 init_alias_vars ();
6601 intra_create_variable_infos ();
6603 /* Now walk all statements and build the constraint set. */
6604 FOR_EACH_BB (bb)
6606 gimple_stmt_iterator gsi;
6608 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6610 gimple phi = gsi_stmt (gsi);
6612 if (is_gimple_reg (gimple_phi_result (phi)))
6613 find_func_aliases (phi);
6616 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6618 gimple stmt = gsi_stmt (gsi);
6620 find_func_aliases (stmt);
6624 if (dump_file)
6626 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
6627 dump_constraints (dump_file, 0);
6630 /* From the constraints compute the points-to sets. */
6631 solve_constraints ();
6633 /* Compute the points-to set for ESCAPED used for call-clobber analysis. */
6634 find_what_var_points_to (get_varinfo (escaped_id),
6635 &cfun->gimple_df->escaped);
6637 /* Make sure the ESCAPED solution (which is used as placeholder in
6638 other solutions) does not reference itself. This simplifies
6639 points-to solution queries. */
6640 cfun->gimple_df->escaped.escaped = 0;
6642 /* Mark escaped HEAP variables as global. */
6643 FOR_EACH_VEC_ELT (varinfo_t, varmap, i, vi)
6644 if (vi->is_heap_var
6645 && !vi->is_global_var)
6646 DECL_EXTERNAL (vi->decl) = vi->is_global_var
6647 = pt_solution_includes (&cfun->gimple_df->escaped, vi->decl);
6649 /* Compute the points-to sets for pointer SSA_NAMEs. */
6650 for (i = 0; i < num_ssa_names; ++i)
6652 tree ptr = ssa_name (i);
6653 if (ptr
6654 && POINTER_TYPE_P (TREE_TYPE (ptr)))
6655 find_what_p_points_to (ptr);
6658 /* Compute the call-used/clobbered sets. */
6659 FOR_EACH_BB (bb)
6661 gimple_stmt_iterator gsi;
6663 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6665 gimple stmt = gsi_stmt (gsi);
6666 struct pt_solution *pt;
6667 if (!is_gimple_call (stmt))
6668 continue;
6670 pt = gimple_call_use_set (stmt);
6671 if (gimple_call_flags (stmt) & ECF_CONST)
6672 memset (pt, 0, sizeof (struct pt_solution));
6673 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
6675 find_what_var_points_to (vi, pt);
6676 /* Escaped (and thus nonlocal) variables are always
6677 implicitly used by calls. */
6678 /* ??? ESCAPED can be empty even though NONLOCAL
6679 always escaped. */
6680 pt->nonlocal = 1;
6681 pt->escaped = 1;
6683 else
6685 /* If there is nothing special about this call then
6686 we have made everything that is used also escape. */
6687 *pt = cfun->gimple_df->escaped;
6688 pt->nonlocal = 1;
6691 pt = gimple_call_clobber_set (stmt);
6692 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
6693 memset (pt, 0, sizeof (struct pt_solution));
6694 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
6696 find_what_var_points_to (vi, pt);
6697 /* Escaped (and thus nonlocal) variables are always
6698 implicitly clobbered by calls. */
6699 /* ??? ESCAPED can be empty even though NONLOCAL
6700 always escaped. */
6701 pt->nonlocal = 1;
6702 pt->escaped = 1;
6704 else
6706 /* If there is nothing special about this call then
6707 we have made everything that is used also escape. */
6708 *pt = cfun->gimple_df->escaped;
6709 pt->nonlocal = 1;
6714 timevar_pop (TV_TREE_PTA);
6718 /* Delete created points-to sets. */
6720 static void
6721 delete_points_to_sets (void)
6723 unsigned int i;
6725 htab_delete (shared_bitmap_table);
6726 if (dump_file && (dump_flags & TDF_STATS))
6727 fprintf (dump_file, "Points to sets created:%d\n",
6728 stats.points_to_sets_created);
6730 pointer_map_destroy (vi_for_tree);
6731 pointer_map_destroy (call_stmt_vars);
6732 bitmap_obstack_release (&pta_obstack);
6733 VEC_free (constraint_t, heap, constraints);
6735 for (i = 0; i < graph->size; i++)
6736 VEC_free (constraint_t, heap, graph->complex[i]);
6737 free (graph->complex);
6739 free (graph->rep);
6740 free (graph->succs);
6741 free (graph->pe);
6742 free (graph->pe_rep);
6743 free (graph->indirect_cycles);
6744 free (graph);
6746 VEC_free (varinfo_t, heap, varmap);
6747 free_alloc_pool (variable_info_pool);
6748 free_alloc_pool (constraint_pool);
6750 obstack_free (&fake_var_decl_obstack, NULL);
6754 /* Compute points-to information for every SSA_NAME pointer in the
6755 current function and compute the transitive closure of escaped
6756 variables to re-initialize the call-clobber states of local variables. */
6758 unsigned int
6759 compute_may_aliases (void)
6761 if (cfun->gimple_df->ipa_pta)
6763 if (dump_file)
6765 fprintf (dump_file, "\nNot re-computing points-to information "
6766 "because IPA points-to information is available.\n\n");
6768 /* But still dump what we have remaining it. */
6769 dump_alias_info (dump_file);
6771 if (dump_flags & TDF_DETAILS)
6772 dump_referenced_vars (dump_file);
6775 return 0;
6778 /* For each pointer P_i, determine the sets of variables that P_i may
6779 point-to. Compute the reachability set of escaped and call-used
6780 variables. */
6781 compute_points_to_sets ();
6783 /* Debugging dumps. */
6784 if (dump_file)
6786 dump_alias_info (dump_file);
6788 if (dump_flags & TDF_DETAILS)
6789 dump_referenced_vars (dump_file);
6792 /* Deallocate memory used by aliasing data structures and the internal
6793 points-to solution. */
6794 delete_points_to_sets ();
6796 gcc_assert (!need_ssa_update_p (cfun));
6798 return 0;
6801 static bool
6802 gate_tree_pta (void)
6804 return flag_tree_pta;
6807 /* A dummy pass to cause points-to information to be computed via
6808 TODO_rebuild_alias. */
6810 struct gimple_opt_pass pass_build_alias =
6813 GIMPLE_PASS,
6814 "alias", /* name */
6815 gate_tree_pta, /* gate */
6816 NULL, /* execute */
6817 NULL, /* sub */
6818 NULL, /* next */
6819 0, /* static_pass_number */
6820 TV_NONE, /* tv_id */
6821 PROP_cfg | PROP_ssa, /* properties_required */
6822 0, /* properties_provided */
6823 0, /* properties_destroyed */
6824 0, /* todo_flags_start */
6825 TODO_rebuild_alias /* todo_flags_finish */
6829 /* A dummy pass to cause points-to information to be computed via
6830 TODO_rebuild_alias. */
6832 struct gimple_opt_pass pass_build_ealias =
6835 GIMPLE_PASS,
6836 "ealias", /* name */
6837 gate_tree_pta, /* gate */
6838 NULL, /* execute */
6839 NULL, /* sub */
6840 NULL, /* next */
6841 0, /* static_pass_number */
6842 TV_NONE, /* tv_id */
6843 PROP_cfg | PROP_ssa, /* properties_required */
6844 0, /* properties_provided */
6845 0, /* properties_destroyed */
6846 0, /* todo_flags_start */
6847 TODO_rebuild_alias /* todo_flags_finish */
6852 /* Return true if we should execute IPA PTA. */
6853 static bool
6854 gate_ipa_pta (void)
6856 return (optimize
6857 && flag_ipa_pta
6858 /* Don't bother doing anything if the program has errors. */
6859 && !seen_error ());
6862 /* IPA PTA solutions for ESCAPED. */
6863 struct pt_solution ipa_escaped_pt
6864 = { true, false, false, false, false, false, NULL };
6866 /* Associate node with varinfo DATA. Worker for
6867 cgraph_for_node_and_aliases. */
6868 static bool
6869 associate_varinfo_to_alias (struct cgraph_node *node, void *data)
6871 if (node->alias || node->thunk.thunk_p)
6872 insert_vi_for_tree (node->decl, (varinfo_t)data);
6873 return false;
6876 /* Execute the driver for IPA PTA. */
6877 static unsigned int
6878 ipa_pta_execute (void)
6880 struct cgraph_node *node;
6881 struct varpool_node *var;
6882 int from;
6884 in_ipa_mode = 1;
6886 init_alias_vars ();
6888 if (dump_file && (dump_flags & TDF_DETAILS))
6890 dump_cgraph (dump_file);
6891 fprintf (dump_file, "\n");
6894 /* Build the constraints. */
6895 for (node = cgraph_nodes; node; node = node->next)
6897 varinfo_t vi;
6898 /* Nodes without a body are not interesting. Especially do not
6899 visit clones at this point for now - we get duplicate decls
6900 there for inline clones at least. */
6901 if (!cgraph_function_with_gimple_body_p (node))
6902 continue;
6904 gcc_assert (!node->clone_of);
6906 vi = create_function_info_for (node->decl,
6907 alias_get_name (node->decl));
6908 cgraph_for_node_and_aliases (node, associate_varinfo_to_alias, vi, true);
6911 /* Create constraints for global variables and their initializers. */
6912 for (var = varpool_nodes; var; var = var->next)
6914 if (var->alias)
6915 continue;
6917 get_vi_for_tree (var->decl);
6920 if (dump_file)
6922 fprintf (dump_file,
6923 "Generating constraints for global initializers\n\n");
6924 dump_constraints (dump_file, 0);
6925 fprintf (dump_file, "\n");
6927 from = VEC_length (constraint_t, constraints);
6929 for (node = cgraph_nodes; node; node = node->next)
6931 struct function *func;
6932 basic_block bb;
6933 tree old_func_decl;
6935 /* Nodes without a body are not interesting. */
6936 if (!cgraph_function_with_gimple_body_p (node))
6937 continue;
6939 if (dump_file)
6941 fprintf (dump_file,
6942 "Generating constraints for %s", cgraph_node_name (node));
6943 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
6944 fprintf (dump_file, " (%s)",
6945 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->decl)));
6946 fprintf (dump_file, "\n");
6949 func = DECL_STRUCT_FUNCTION (node->decl);
6950 old_func_decl = current_function_decl;
6951 push_cfun (func);
6952 current_function_decl = node->decl;
6954 /* For externally visible or attribute used annotated functions use
6955 local constraints for their arguments.
6956 For local functions we see all callers and thus do not need initial
6957 constraints for parameters. */
6958 if (node->reachable_from_other_partition
6959 || node->local.externally_visible
6960 || node->needed)
6962 intra_create_variable_infos ();
6964 /* We also need to make function return values escape. Nothing
6965 escapes by returning from main though. */
6966 if (!MAIN_NAME_P (DECL_NAME (node->decl)))
6968 varinfo_t fi, rvi;
6969 fi = lookup_vi_for_tree (node->decl);
6970 rvi = first_vi_for_offset (fi, fi_result);
6971 if (rvi && rvi->offset == fi_result)
6973 struct constraint_expr includes;
6974 struct constraint_expr var;
6975 includes.var = escaped_id;
6976 includes.offset = 0;
6977 includes.type = SCALAR;
6978 var.var = rvi->id;
6979 var.offset = 0;
6980 var.type = SCALAR;
6981 process_constraint (new_constraint (includes, var));
6986 /* Build constriants for the function body. */
6987 FOR_EACH_BB_FN (bb, func)
6989 gimple_stmt_iterator gsi;
6991 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
6992 gsi_next (&gsi))
6994 gimple phi = gsi_stmt (gsi);
6996 if (is_gimple_reg (gimple_phi_result (phi)))
6997 find_func_aliases (phi);
7000 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
7002 gimple stmt = gsi_stmt (gsi);
7004 find_func_aliases (stmt);
7005 find_func_clobbers (stmt);
7009 current_function_decl = old_func_decl;
7010 pop_cfun ();
7012 if (dump_file)
7014 fprintf (dump_file, "\n");
7015 dump_constraints (dump_file, from);
7016 fprintf (dump_file, "\n");
7018 from = VEC_length (constraint_t, constraints);
7021 /* From the constraints compute the points-to sets. */
7022 solve_constraints ();
7024 /* Compute the global points-to sets for ESCAPED.
7025 ??? Note that the computed escape set is not correct
7026 for the whole unit as we fail to consider graph edges to
7027 externally visible functions. */
7028 find_what_var_points_to (get_varinfo (escaped_id), &ipa_escaped_pt);
7030 /* Make sure the ESCAPED solution (which is used as placeholder in
7031 other solutions) does not reference itself. This simplifies
7032 points-to solution queries. */
7033 ipa_escaped_pt.ipa_escaped = 0;
7035 /* Assign the points-to sets to the SSA names in the unit. */
7036 for (node = cgraph_nodes; node; node = node->next)
7038 tree ptr;
7039 struct function *fn;
7040 unsigned i;
7041 varinfo_t fi;
7042 basic_block bb;
7043 struct pt_solution uses, clobbers;
7044 struct cgraph_edge *e;
7046 /* Nodes without a body are not interesting. */
7047 if (!cgraph_function_with_gimple_body_p (node))
7048 continue;
7050 fn = DECL_STRUCT_FUNCTION (node->decl);
7052 /* Compute the points-to sets for pointer SSA_NAMEs. */
7053 FOR_EACH_VEC_ELT (tree, fn->gimple_df->ssa_names, i, ptr)
7055 if (ptr
7056 && POINTER_TYPE_P (TREE_TYPE (ptr)))
7057 find_what_p_points_to (ptr);
7060 /* Compute the call-use and call-clobber sets for all direct calls. */
7061 fi = lookup_vi_for_tree (node->decl);
7062 gcc_assert (fi->is_fn_info);
7063 find_what_var_points_to (first_vi_for_offset (fi, fi_clobbers),
7064 &clobbers);
7065 find_what_var_points_to (first_vi_for_offset (fi, fi_uses), &uses);
7066 for (e = node->callers; e; e = e->next_caller)
7068 if (!e->call_stmt)
7069 continue;
7071 *gimple_call_clobber_set (e->call_stmt) = clobbers;
7072 *gimple_call_use_set (e->call_stmt) = uses;
7075 /* Compute the call-use and call-clobber sets for indirect calls
7076 and calls to external functions. */
7077 FOR_EACH_BB_FN (bb, fn)
7079 gimple_stmt_iterator gsi;
7081 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
7083 gimple stmt = gsi_stmt (gsi);
7084 struct pt_solution *pt;
7085 varinfo_t vi;
7086 tree decl;
7088 if (!is_gimple_call (stmt))
7089 continue;
7091 /* Handle direct calls to external functions. */
7092 decl = gimple_call_fndecl (stmt);
7093 if (decl
7094 && (!(fi = lookup_vi_for_tree (decl))
7095 || !fi->is_fn_info))
7097 pt = gimple_call_use_set (stmt);
7098 if (gimple_call_flags (stmt) & ECF_CONST)
7099 memset (pt, 0, sizeof (struct pt_solution));
7100 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
7102 find_what_var_points_to (vi, pt);
7103 /* Escaped (and thus nonlocal) variables are always
7104 implicitly used by calls. */
7105 /* ??? ESCAPED can be empty even though NONLOCAL
7106 always escaped. */
7107 pt->nonlocal = 1;
7108 pt->ipa_escaped = 1;
7110 else
7112 /* If there is nothing special about this call then
7113 we have made everything that is used also escape. */
7114 *pt = ipa_escaped_pt;
7115 pt->nonlocal = 1;
7118 pt = gimple_call_clobber_set (stmt);
7119 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
7120 memset (pt, 0, sizeof (struct pt_solution));
7121 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
7123 find_what_var_points_to (vi, pt);
7124 /* Escaped (and thus nonlocal) variables are always
7125 implicitly clobbered by calls. */
7126 /* ??? ESCAPED can be empty even though NONLOCAL
7127 always escaped. */
7128 pt->nonlocal = 1;
7129 pt->ipa_escaped = 1;
7131 else
7133 /* If there is nothing special about this call then
7134 we have made everything that is used also escape. */
7135 *pt = ipa_escaped_pt;
7136 pt->nonlocal = 1;
7140 /* Handle indirect calls. */
7141 if (!decl
7142 && (fi = get_fi_for_callee (stmt)))
7144 /* We need to accumulate all clobbers/uses of all possible
7145 callees. */
7146 fi = get_varinfo (find (fi->id));
7147 /* If we cannot constrain the set of functions we'll end up
7148 calling we end up using/clobbering everything. */
7149 if (bitmap_bit_p (fi->solution, anything_id)
7150 || bitmap_bit_p (fi->solution, nonlocal_id)
7151 || bitmap_bit_p (fi->solution, escaped_id))
7153 pt_solution_reset (gimple_call_clobber_set (stmt));
7154 pt_solution_reset (gimple_call_use_set (stmt));
7156 else
7158 bitmap_iterator bi;
7159 unsigned i;
7160 struct pt_solution *uses, *clobbers;
7162 uses = gimple_call_use_set (stmt);
7163 clobbers = gimple_call_clobber_set (stmt);
7164 memset (uses, 0, sizeof (struct pt_solution));
7165 memset (clobbers, 0, sizeof (struct pt_solution));
7166 EXECUTE_IF_SET_IN_BITMAP (fi->solution, 0, i, bi)
7168 struct pt_solution sol;
7170 vi = get_varinfo (i);
7171 if (!vi->is_fn_info)
7173 /* ??? We could be more precise here? */
7174 uses->nonlocal = 1;
7175 uses->ipa_escaped = 1;
7176 clobbers->nonlocal = 1;
7177 clobbers->ipa_escaped = 1;
7178 continue;
7181 if (!uses->anything)
7183 find_what_var_points_to
7184 (first_vi_for_offset (vi, fi_uses), &sol);
7185 pt_solution_ior_into (uses, &sol);
7187 if (!clobbers->anything)
7189 find_what_var_points_to
7190 (first_vi_for_offset (vi, fi_clobbers), &sol);
7191 pt_solution_ior_into (clobbers, &sol);
7199 fn->gimple_df->ipa_pta = true;
7202 delete_points_to_sets ();
7204 in_ipa_mode = 0;
7206 return 0;
7209 struct simple_ipa_opt_pass pass_ipa_pta =
7212 SIMPLE_IPA_PASS,
7213 "pta", /* name */
7214 gate_ipa_pta, /* gate */
7215 ipa_pta_execute, /* execute */
7216 NULL, /* sub */
7217 NULL, /* next */
7218 0, /* static_pass_number */
7219 TV_IPA_PTA, /* tv_id */
7220 0, /* properties_required */
7221 0, /* properties_provided */
7222 0, /* properties_destroyed */
7223 0, /* todo_flags_start */
7224 TODO_update_ssa /* todo_flags_finish */