2013-09-12 Paolo Carlini <paolo.carlini@oracle.com>
[official-gcc.git] / gcc / tree-ssa-structalias.c
blob112fba636fb0c02a2f7492d9920d54f894ff8126
1 /* Tree based points-to analysis
2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "ggc.h"
26 #include "obstack.h"
27 #include "bitmap.h"
28 #include "flags.h"
29 #include "basic-block.h"
30 #include "tree.h"
31 #include "tree-ssa.h"
32 #include "tree-inline.h"
33 #include "diagnostic-core.h"
34 #include "gimple.h"
35 #include "hash-table.h"
36 #include "function.h"
37 #include "cgraph.h"
38 #include "tree-pass.h"
39 #include "alloc-pool.h"
40 #include "splay-tree.h"
41 #include "params.h"
42 #include "cgraph.h"
43 #include "alias.h"
44 #include "pointer-set.h"
46 /* The idea behind this analyzer is to generate set constraints from the
47 program, then solve the resulting constraints in order to generate the
48 points-to sets.
50 Set constraints are a way of modeling program analysis problems that
51 involve sets. They consist of an inclusion constraint language,
52 describing the variables (each variable is a set) and operations that
53 are involved on the variables, and a set of rules that derive facts
54 from these operations. To solve a system of set constraints, you derive
55 all possible facts under the rules, which gives you the correct sets
56 as a consequence.
58 See "Efficient Field-sensitive pointer analysis for C" by "David
59 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
60 http://citeseer.ist.psu.edu/pearce04efficient.html
62 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
63 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
64 http://citeseer.ist.psu.edu/heintze01ultrafast.html
66 There are three types of real constraint expressions, DEREF,
67 ADDRESSOF, and SCALAR. Each constraint expression consists
68 of a constraint type, a variable, and an offset.
70 SCALAR is a constraint expression type used to represent x, whether
71 it appears on the LHS or the RHS of a statement.
72 DEREF is a constraint expression type used to represent *x, whether
73 it appears on the LHS or the RHS of a statement.
74 ADDRESSOF is a constraint expression used to represent &x, whether
75 it appears on the LHS or the RHS of a statement.
77 Each pointer variable in the program is assigned an integer id, and
78 each field of a structure variable is assigned an integer id as well.
80 Structure variables are linked to their list of fields through a "next
81 field" in each variable that points to the next field in offset
82 order.
83 Each variable for a structure field has
85 1. "size", that tells the size in bits of that field.
86 2. "fullsize, that tells the size in bits of the entire structure.
87 3. "offset", that tells the offset in bits from the beginning of the
88 structure to this field.
90 Thus,
91 struct f
93 int a;
94 int b;
95 } foo;
96 int *bar;
98 looks like
100 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
101 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
102 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
105 In order to solve the system of set constraints, the following is
106 done:
108 1. Each constraint variable x has a solution set associated with it,
109 Sol(x).
111 2. Constraints are separated into direct, copy, and complex.
112 Direct constraints are ADDRESSOF constraints that require no extra
113 processing, such as P = &Q
114 Copy constraints are those of the form P = Q.
115 Complex constraints are all the constraints involving dereferences
116 and offsets (including offsetted copies).
118 3. All direct constraints of the form P = &Q are processed, such
119 that Q is added to Sol(P)
121 4. All complex constraints for a given constraint variable are stored in a
122 linked list attached to that variable's node.
124 5. A directed graph is built out of the copy constraints. Each
125 constraint variable is a node in the graph, and an edge from
126 Q to P is added for each copy constraint of the form P = Q
128 6. The graph is then walked, and solution sets are
129 propagated along the copy edges, such that an edge from Q to P
130 causes Sol(P) <- Sol(P) union Sol(Q).
132 7. As we visit each node, all complex constraints associated with
133 that node are processed by adding appropriate copy edges to the graph, or the
134 appropriate variables to the solution set.
136 8. The process of walking the graph is iterated until no solution
137 sets change.
139 Prior to walking the graph in steps 6 and 7, We perform static
140 cycle elimination on the constraint graph, as well
141 as off-line variable substitution.
143 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
144 on and turned into anything), but isn't. You can just see what offset
145 inside the pointed-to struct it's going to access.
147 TODO: Constant bounded arrays can be handled as if they were structs of the
148 same number of elements.
150 TODO: Modeling heap and incoming pointers becomes much better if we
151 add fields to them as we discover them, which we could do.
153 TODO: We could handle unions, but to be honest, it's probably not
154 worth the pain or slowdown. */
156 /* IPA-PTA optimizations possible.
158 When the indirect function called is ANYTHING we can add disambiguation
159 based on the function signatures (or simply the parameter count which
160 is the varinfo size). We also do not need to consider functions that
161 do not have their address taken.
163 The is_global_var bit which marks escape points is overly conservative
164 in IPA mode. Split it to is_escape_point and is_global_var - only
165 externally visible globals are escape points in IPA mode. This is
166 also needed to fix the pt_solution_includes_global predicate
167 (and thus ptr_deref_may_alias_global_p).
169 The way we introduce DECL_PT_UID to avoid fixing up all points-to
170 sets in the translation unit when we copy a DECL during inlining
171 pessimizes precision. The advantage is that the DECL_PT_UID keeps
172 compile-time and memory usage overhead low - the points-to sets
173 do not grow or get unshared as they would during a fixup phase.
174 An alternative solution is to delay IPA PTA until after all
175 inlining transformations have been applied.
177 The way we propagate clobber/use information isn't optimized.
178 It should use a new complex constraint that properly filters
179 out local variables of the callee (though that would make
180 the sets invalid after inlining). OTOH we might as well
181 admit defeat to WHOPR and simply do all the clobber/use analysis
182 and propagation after PTA finished but before we threw away
183 points-to information for memory variables. WHOPR and PTA
184 do not play along well anyway - the whole constraint solving
185 would need to be done in WPA phase and it will be very interesting
186 to apply the results to local SSA names during LTRANS phase.
188 We probably should compute a per-function unit-ESCAPE solution
189 propagating it simply like the clobber / uses solutions. The
190 solution can go alongside the non-IPA espaced solution and be
191 used to query which vars escape the unit through a function.
193 We never put function decls in points-to sets so we do not
194 keep the set of called functions for indirect calls.
196 And probably more. */
198 static bool use_field_sensitive = true;
199 static int in_ipa_mode = 0;
201 /* Used for predecessor bitmaps. */
202 static bitmap_obstack predbitmap_obstack;
204 /* Used for points-to sets. */
205 static bitmap_obstack pta_obstack;
207 /* Used for oldsolution members of variables. */
208 static bitmap_obstack oldpta_obstack;
210 /* Used for per-solver-iteration bitmaps. */
211 static bitmap_obstack iteration_obstack;
213 static unsigned int create_variable_info_for (tree, const char *);
214 typedef struct constraint_graph *constraint_graph_t;
215 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
217 struct constraint;
218 typedef struct constraint *constraint_t;
221 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
222 if (a) \
223 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
225 static struct constraint_stats
227 unsigned int total_vars;
228 unsigned int nonpointer_vars;
229 unsigned int unified_vars_static;
230 unsigned int unified_vars_dynamic;
231 unsigned int iterations;
232 unsigned int num_edges;
233 unsigned int num_implicit_edges;
234 unsigned int points_to_sets_created;
235 } stats;
237 struct variable_info
239 /* ID of this variable */
240 unsigned int id;
242 /* True if this is a variable created by the constraint analysis, such as
243 heap variables and constraints we had to break up. */
244 unsigned int is_artificial_var : 1;
246 /* True if this is a special variable whose solution set should not be
247 changed. */
248 unsigned int is_special_var : 1;
250 /* True for variables whose size is not known or variable. */
251 unsigned int is_unknown_size_var : 1;
253 /* True for (sub-)fields that represent a whole variable. */
254 unsigned int is_full_var : 1;
256 /* True if this is a heap variable. */
257 unsigned int is_heap_var : 1;
259 /* True if this field may contain pointers. */
260 unsigned int may_have_pointers : 1;
262 /* True if this field has only restrict qualified pointers. */
263 unsigned int only_restrict_pointers : 1;
265 /* True if this represents a global variable. */
266 unsigned int is_global_var : 1;
268 /* True if this represents a IPA function info. */
269 unsigned int is_fn_info : 1;
271 /* The ID of the variable for the next field in this structure
272 or zero for the last field in this structure. */
273 unsigned next;
275 /* The ID of the variable for the first field in this structure. */
276 unsigned head;
278 /* Offset of this variable, in bits, from the base variable */
279 unsigned HOST_WIDE_INT offset;
281 /* Size of the variable, in bits. */
282 unsigned HOST_WIDE_INT size;
284 /* Full size of the base variable, in bits. */
285 unsigned HOST_WIDE_INT fullsize;
287 /* Name of this variable */
288 const char *name;
290 /* Tree that this variable is associated with. */
291 tree decl;
293 /* Points-to set for this variable. */
294 bitmap solution;
296 /* Old points-to set for this variable. */
297 bitmap oldsolution;
299 typedef struct variable_info *varinfo_t;
301 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
302 static varinfo_t first_or_preceding_vi_for_offset (varinfo_t,
303 unsigned HOST_WIDE_INT);
304 static varinfo_t lookup_vi_for_tree (tree);
305 static inline bool type_can_have_subvars (const_tree);
307 /* Pool of variable info structures. */
308 static alloc_pool variable_info_pool;
310 /* Map varinfo to final pt_solution. */
311 static pointer_map_t *final_solutions;
312 struct obstack final_solutions_obstack;
314 /* Table of variable info structures for constraint variables.
315 Indexed directly by variable info id. */
316 static vec<varinfo_t> varmap;
318 /* Return the varmap element N */
320 static inline varinfo_t
321 get_varinfo (unsigned int n)
323 return varmap[n];
326 /* Return the next variable in the list of sub-variables of VI
327 or NULL if VI is the last sub-variable. */
329 static inline varinfo_t
330 vi_next (varinfo_t vi)
332 return get_varinfo (vi->next);
335 /* Static IDs for the special variables. Variable ID zero is unused
336 and used as terminator for the sub-variable chain. */
337 enum { nothing_id = 1, anything_id = 2, readonly_id = 3,
338 escaped_id = 4, nonlocal_id = 5,
339 storedanything_id = 6, integer_id = 7 };
341 /* Return a new variable info structure consisting for a variable
342 named NAME, and using constraint graph node NODE. Append it
343 to the vector of variable info structures. */
345 static varinfo_t
346 new_var_info (tree t, const char *name)
348 unsigned index = varmap.length ();
349 varinfo_t ret = (varinfo_t) pool_alloc (variable_info_pool);
351 ret->id = index;
352 ret->name = name;
353 ret->decl = t;
354 /* Vars without decl are artificial and do not have sub-variables. */
355 ret->is_artificial_var = (t == NULL_TREE);
356 ret->is_special_var = false;
357 ret->is_unknown_size_var = false;
358 ret->is_full_var = (t == NULL_TREE);
359 ret->is_heap_var = false;
360 ret->may_have_pointers = true;
361 ret->only_restrict_pointers = false;
362 ret->is_global_var = (t == NULL_TREE);
363 ret->is_fn_info = false;
364 if (t && DECL_P (t))
365 ret->is_global_var = (is_global_var (t)
366 /* We have to treat even local register variables
367 as escape points. */
368 || (TREE_CODE (t) == VAR_DECL
369 && DECL_HARD_REGISTER (t)));
370 ret->solution = BITMAP_ALLOC (&pta_obstack);
371 ret->oldsolution = NULL;
372 ret->next = 0;
373 ret->head = ret->id;
375 stats.total_vars++;
377 varmap.safe_push (ret);
379 return ret;
383 /* A map mapping call statements to per-stmt variables for uses
384 and clobbers specific to the call. */
385 static struct pointer_map_t *call_stmt_vars;
387 /* Lookup or create the variable for the call statement CALL. */
389 static varinfo_t
390 get_call_vi (gimple call)
392 void **slot_p;
393 varinfo_t vi, vi2;
395 slot_p = pointer_map_insert (call_stmt_vars, call);
396 if (*slot_p)
397 return (varinfo_t) *slot_p;
399 vi = new_var_info (NULL_TREE, "CALLUSED");
400 vi->offset = 0;
401 vi->size = 1;
402 vi->fullsize = 2;
403 vi->is_full_var = true;
405 vi2 = new_var_info (NULL_TREE, "CALLCLOBBERED");
406 vi2->offset = 1;
407 vi2->size = 1;
408 vi2->fullsize = 2;
409 vi2->is_full_var = true;
411 vi->next = vi2->id;
413 *slot_p = (void *) vi;
414 return vi;
417 /* Lookup the variable for the call statement CALL representing
418 the uses. Returns NULL if there is nothing special about this call. */
420 static varinfo_t
421 lookup_call_use_vi (gimple call)
423 void **slot_p;
425 slot_p = pointer_map_contains (call_stmt_vars, call);
426 if (slot_p)
427 return (varinfo_t) *slot_p;
429 return NULL;
432 /* Lookup the variable for the call statement CALL representing
433 the clobbers. Returns NULL if there is nothing special about this call. */
435 static varinfo_t
436 lookup_call_clobber_vi (gimple call)
438 varinfo_t uses = lookup_call_use_vi (call);
439 if (!uses)
440 return NULL;
442 return vi_next (uses);
445 /* Lookup or create the variable for the call statement CALL representing
446 the uses. */
448 static varinfo_t
449 get_call_use_vi (gimple call)
451 return get_call_vi (call);
454 /* Lookup or create the variable for the call statement CALL representing
455 the clobbers. */
457 static varinfo_t ATTRIBUTE_UNUSED
458 get_call_clobber_vi (gimple call)
460 return vi_next (get_call_vi (call));
464 typedef enum {SCALAR, DEREF, ADDRESSOF} constraint_expr_type;
466 /* An expression that appears in a constraint. */
468 struct constraint_expr
470 /* Constraint type. */
471 constraint_expr_type type;
473 /* Variable we are referring to in the constraint. */
474 unsigned int var;
476 /* Offset, in bits, of this constraint from the beginning of
477 variables it ends up referring to.
479 IOW, in a deref constraint, we would deref, get the result set,
480 then add OFFSET to each member. */
481 HOST_WIDE_INT offset;
484 /* Use 0x8000... as special unknown offset. */
485 #define UNKNOWN_OFFSET HOST_WIDE_INT_MIN
487 typedef struct constraint_expr ce_s;
488 static void get_constraint_for_1 (tree, vec<ce_s> *, bool, bool);
489 static void get_constraint_for (tree, vec<ce_s> *);
490 static void get_constraint_for_rhs (tree, vec<ce_s> *);
491 static void do_deref (vec<ce_s> *);
493 /* Our set constraints are made up of two constraint expressions, one
494 LHS, and one RHS.
496 As described in the introduction, our set constraints each represent an
497 operation between set valued variables.
499 struct constraint
501 struct constraint_expr lhs;
502 struct constraint_expr rhs;
505 /* List of constraints that we use to build the constraint graph from. */
507 static vec<constraint_t> constraints;
508 static alloc_pool constraint_pool;
510 /* The constraint graph is represented as an array of bitmaps
511 containing successor nodes. */
513 struct constraint_graph
515 /* Size of this graph, which may be different than the number of
516 nodes in the variable map. */
517 unsigned int size;
519 /* Explicit successors of each node. */
520 bitmap *succs;
522 /* Implicit predecessors of each node (Used for variable
523 substitution). */
524 bitmap *implicit_preds;
526 /* Explicit predecessors of each node (Used for variable substitution). */
527 bitmap *preds;
529 /* Indirect cycle representatives, or -1 if the node has no indirect
530 cycles. */
531 int *indirect_cycles;
533 /* Representative node for a node. rep[a] == a unless the node has
534 been unified. */
535 unsigned int *rep;
537 /* Equivalence class representative for a label. This is used for
538 variable substitution. */
539 int *eq_rep;
541 /* Pointer equivalence label for a node. All nodes with the same
542 pointer equivalence label can be unified together at some point
543 (either during constraint optimization or after the constraint
544 graph is built). */
545 unsigned int *pe;
547 /* Pointer equivalence representative for a label. This is used to
548 handle nodes that are pointer equivalent but not location
549 equivalent. We can unite these once the addressof constraints
550 are transformed into initial points-to sets. */
551 int *pe_rep;
553 /* Pointer equivalence label for each node, used during variable
554 substitution. */
555 unsigned int *pointer_label;
557 /* Location equivalence label for each node, used during location
558 equivalence finding. */
559 unsigned int *loc_label;
561 /* Pointed-by set for each node, used during location equivalence
562 finding. This is pointed-by rather than pointed-to, because it
563 is constructed using the predecessor graph. */
564 bitmap *pointed_by;
566 /* Points to sets for pointer equivalence. This is *not* the actual
567 points-to sets for nodes. */
568 bitmap *points_to;
570 /* Bitmap of nodes where the bit is set if the node is a direct
571 node. Used for variable substitution. */
572 sbitmap direct_nodes;
574 /* Bitmap of nodes where the bit is set if the node is address
575 taken. Used for variable substitution. */
576 bitmap address_taken;
578 /* Vector of complex constraints for each graph node. Complex
579 constraints are those involving dereferences or offsets that are
580 not 0. */
581 vec<constraint_t> *complex;
584 static constraint_graph_t graph;
586 /* During variable substitution and the offline version of indirect
587 cycle finding, we create nodes to represent dereferences and
588 address taken constraints. These represent where these start and
589 end. */
590 #define FIRST_REF_NODE (varmap).length ()
591 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
593 /* Return the representative node for NODE, if NODE has been unioned
594 with another NODE.
595 This function performs path compression along the way to finding
596 the representative. */
598 static unsigned int
599 find (unsigned int node)
601 gcc_checking_assert (node < graph->size);
602 if (graph->rep[node] != node)
603 return graph->rep[node] = find (graph->rep[node]);
604 return node;
607 /* Union the TO and FROM nodes to the TO nodes.
608 Note that at some point in the future, we may want to do
609 union-by-rank, in which case we are going to have to return the
610 node we unified to. */
612 static bool
613 unite (unsigned int to, unsigned int from)
615 gcc_checking_assert (to < graph->size && from < graph->size);
616 if (to != from && graph->rep[from] != to)
618 graph->rep[from] = to;
619 return true;
621 return false;
624 /* Create a new constraint consisting of LHS and RHS expressions. */
626 static constraint_t
627 new_constraint (const struct constraint_expr lhs,
628 const struct constraint_expr rhs)
630 constraint_t ret = (constraint_t) pool_alloc (constraint_pool);
631 ret->lhs = lhs;
632 ret->rhs = rhs;
633 return ret;
636 /* Print out constraint C to FILE. */
638 static void
639 dump_constraint (FILE *file, constraint_t c)
641 if (c->lhs.type == ADDRESSOF)
642 fprintf (file, "&");
643 else if (c->lhs.type == DEREF)
644 fprintf (file, "*");
645 fprintf (file, "%s", get_varinfo (c->lhs.var)->name);
646 if (c->lhs.offset == UNKNOWN_OFFSET)
647 fprintf (file, " + UNKNOWN");
648 else if (c->lhs.offset != 0)
649 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
650 fprintf (file, " = ");
651 if (c->rhs.type == ADDRESSOF)
652 fprintf (file, "&");
653 else if (c->rhs.type == DEREF)
654 fprintf (file, "*");
655 fprintf (file, "%s", get_varinfo (c->rhs.var)->name);
656 if (c->rhs.offset == UNKNOWN_OFFSET)
657 fprintf (file, " + UNKNOWN");
658 else if (c->rhs.offset != 0)
659 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
663 void debug_constraint (constraint_t);
664 void debug_constraints (void);
665 void debug_constraint_graph (void);
666 void debug_solution_for_var (unsigned int);
667 void debug_sa_points_to_info (void);
669 /* Print out constraint C to stderr. */
671 DEBUG_FUNCTION void
672 debug_constraint (constraint_t c)
674 dump_constraint (stderr, c);
675 fprintf (stderr, "\n");
678 /* Print out all constraints to FILE */
680 static void
681 dump_constraints (FILE *file, int from)
683 int i;
684 constraint_t c;
685 for (i = from; constraints.iterate (i, &c); i++)
686 if (c)
688 dump_constraint (file, c);
689 fprintf (file, "\n");
693 /* Print out all constraints to stderr. */
695 DEBUG_FUNCTION void
696 debug_constraints (void)
698 dump_constraints (stderr, 0);
701 /* Print the constraint graph in dot format. */
703 static void
704 dump_constraint_graph (FILE *file)
706 unsigned int i;
708 /* Only print the graph if it has already been initialized: */
709 if (!graph)
710 return;
712 /* Prints the header of the dot file: */
713 fprintf (file, "strict digraph {\n");
714 fprintf (file, " node [\n shape = box\n ]\n");
715 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
716 fprintf (file, "\n // List of nodes and complex constraints in "
717 "the constraint graph:\n");
719 /* The next lines print the nodes in the graph together with the
720 complex constraints attached to them. */
721 for (i = 1; i < graph->size; i++)
723 if (i == FIRST_REF_NODE)
724 continue;
725 if (find (i) != i)
726 continue;
727 if (i < FIRST_REF_NODE)
728 fprintf (file, "\"%s\"", get_varinfo (i)->name);
729 else
730 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
731 if (graph->complex[i].exists ())
733 unsigned j;
734 constraint_t c;
735 fprintf (file, " [label=\"\\N\\n");
736 for (j = 0; graph->complex[i].iterate (j, &c); ++j)
738 dump_constraint (file, c);
739 fprintf (file, "\\l");
741 fprintf (file, "\"]");
743 fprintf (file, ";\n");
746 /* Go over the edges. */
747 fprintf (file, "\n // Edges in the constraint graph:\n");
748 for (i = 1; i < graph->size; i++)
750 unsigned j;
751 bitmap_iterator bi;
752 if (find (i) != i)
753 continue;
754 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
756 unsigned to = find (j);
757 if (i == to)
758 continue;
759 if (i < FIRST_REF_NODE)
760 fprintf (file, "\"%s\"", get_varinfo (i)->name);
761 else
762 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
763 fprintf (file, " -> ");
764 if (to < FIRST_REF_NODE)
765 fprintf (file, "\"%s\"", get_varinfo (to)->name);
766 else
767 fprintf (file, "\"*%s\"", get_varinfo (to - FIRST_REF_NODE)->name);
768 fprintf (file, ";\n");
772 /* Prints the tail of the dot file. */
773 fprintf (file, "}\n");
776 /* Print out the constraint graph to stderr. */
778 DEBUG_FUNCTION void
779 debug_constraint_graph (void)
781 dump_constraint_graph (stderr);
784 /* SOLVER FUNCTIONS
786 The solver is a simple worklist solver, that works on the following
787 algorithm:
789 sbitmap changed_nodes = all zeroes;
790 changed_count = 0;
791 For each node that is not already collapsed:
792 changed_count++;
793 set bit in changed nodes
795 while (changed_count > 0)
797 compute topological ordering for constraint graph
799 find and collapse cycles in the constraint graph (updating
800 changed if necessary)
802 for each node (n) in the graph in topological order:
803 changed_count--;
805 Process each complex constraint associated with the node,
806 updating changed if necessary.
808 For each outgoing edge from n, propagate the solution from n to
809 the destination of the edge, updating changed as necessary.
811 } */
813 /* Return true if two constraint expressions A and B are equal. */
815 static bool
816 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
818 return a.type == b.type && a.var == b.var && a.offset == b.offset;
821 /* Return true if constraint expression A is less than constraint expression
822 B. This is just arbitrary, but consistent, in order to give them an
823 ordering. */
825 static bool
826 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
828 if (a.type == b.type)
830 if (a.var == b.var)
831 return a.offset < b.offset;
832 else
833 return a.var < b.var;
835 else
836 return a.type < b.type;
839 /* Return true if constraint A is less than constraint B. This is just
840 arbitrary, but consistent, in order to give them an ordering. */
842 static bool
843 constraint_less (const constraint_t &a, const constraint_t &b)
845 if (constraint_expr_less (a->lhs, b->lhs))
846 return true;
847 else if (constraint_expr_less (b->lhs, a->lhs))
848 return false;
849 else
850 return constraint_expr_less (a->rhs, b->rhs);
853 /* Return true if two constraints A and B are equal. */
855 static bool
856 constraint_equal (struct constraint a, struct constraint b)
858 return constraint_expr_equal (a.lhs, b.lhs)
859 && constraint_expr_equal (a.rhs, b.rhs);
863 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
865 static constraint_t
866 constraint_vec_find (vec<constraint_t> vec,
867 struct constraint lookfor)
869 unsigned int place;
870 constraint_t found;
872 if (!vec.exists ())
873 return NULL;
875 place = vec.lower_bound (&lookfor, constraint_less);
876 if (place >= vec.length ())
877 return NULL;
878 found = vec[place];
879 if (!constraint_equal (*found, lookfor))
880 return NULL;
881 return found;
884 /* Union two constraint vectors, TO and FROM. Put the result in TO. */
886 static void
887 constraint_set_union (vec<constraint_t> *to,
888 vec<constraint_t> *from)
890 int i;
891 constraint_t c;
893 FOR_EACH_VEC_ELT (*from, i, c)
895 if (constraint_vec_find (*to, *c) == NULL)
897 unsigned int place = to->lower_bound (c, constraint_less);
898 to->safe_insert (place, c);
903 /* Expands the solution in SET to all sub-fields of variables included. */
905 static void
906 solution_set_expand (bitmap set)
908 bitmap_iterator bi;
909 unsigned j;
911 /* In a first pass expand to the head of the variables we need to
912 add all sub-fields off. This avoids quadratic behavior. */
913 EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi)
915 varinfo_t v = get_varinfo (j);
916 if (v->is_artificial_var
917 || v->is_full_var)
918 continue;
919 bitmap_set_bit (set, v->head);
922 /* In the second pass now expand all head variables with subfields. */
923 EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi)
925 varinfo_t v = get_varinfo (j);
926 if (v->is_artificial_var
927 || v->is_full_var
928 || v->head != j)
929 continue;
930 for (v = vi_next (v); v != NULL; v = vi_next (v))
931 bitmap_set_bit (set, v->id);
935 /* Union solution sets TO and FROM, and add INC to each member of FROM in the
936 process. */
938 static bool
939 set_union_with_increment (bitmap to, bitmap from, HOST_WIDE_INT inc)
941 bool changed = false;
942 bitmap_iterator bi;
943 unsigned int i;
945 /* If the solution of FROM contains anything it is good enough to transfer
946 this to TO. */
947 if (bitmap_bit_p (from, anything_id))
948 return bitmap_set_bit (to, anything_id);
950 /* For zero offset simply union the solution into the destination. */
951 if (inc == 0)
952 return bitmap_ior_into (to, from);
954 /* If the offset is unknown we have to expand the solution to
955 all subfields. */
956 if (inc == UNKNOWN_OFFSET)
958 bitmap tmp = BITMAP_ALLOC (&iteration_obstack);
959 bitmap_copy (tmp, from);
960 solution_set_expand (tmp);
961 changed |= bitmap_ior_into (to, tmp);
962 BITMAP_FREE (tmp);
963 return changed;
966 /* For non-zero offset union the offsetted solution into the destination. */
967 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
969 varinfo_t vi = get_varinfo (i);
971 /* If this is a variable with just one field just set its bit
972 in the result. */
973 if (vi->is_artificial_var
974 || vi->is_unknown_size_var
975 || vi->is_full_var)
976 changed |= bitmap_set_bit (to, i);
977 else
979 unsigned HOST_WIDE_INT fieldoffset = vi->offset + inc;
981 /* If the offset makes the pointer point to before the
982 variable use offset zero for the field lookup. */
983 if (inc < 0
984 && fieldoffset > vi->offset)
985 fieldoffset = 0;
987 vi = first_or_preceding_vi_for_offset (vi, fieldoffset);
989 changed |= bitmap_set_bit (to, vi->id);
990 /* If the result is not exactly at fieldoffset include the next
991 field as well. See get_constraint_for_ptr_offset for more
992 rationale. */
993 if (vi->offset != fieldoffset
994 && vi->next != 0)
995 changed |= bitmap_set_bit (to, vi->next);
999 return changed;
1002 /* Insert constraint C into the list of complex constraints for graph
1003 node VAR. */
1005 static void
1006 insert_into_complex (constraint_graph_t graph,
1007 unsigned int var, constraint_t c)
1009 vec<constraint_t> complex = graph->complex[var];
1010 unsigned int place = complex.lower_bound (c, constraint_less);
1012 /* Only insert constraints that do not already exist. */
1013 if (place >= complex.length ()
1014 || !constraint_equal (*c, *complex[place]))
1015 graph->complex[var].safe_insert (place, c);
1019 /* Condense two variable nodes into a single variable node, by moving
1020 all associated info from SRC to TO. */
1022 static void
1023 merge_node_constraints (constraint_graph_t graph, unsigned int to,
1024 unsigned int from)
1026 unsigned int i;
1027 constraint_t c;
1029 gcc_checking_assert (find (from) == to);
1031 /* Move all complex constraints from src node into to node */
1032 FOR_EACH_VEC_ELT (graph->complex[from], i, c)
1034 /* In complex constraints for node src, we may have either
1035 a = *src, and *src = a, or an offseted constraint which are
1036 always added to the rhs node's constraints. */
1038 if (c->rhs.type == DEREF)
1039 c->rhs.var = to;
1040 else if (c->lhs.type == DEREF)
1041 c->lhs.var = to;
1042 else
1043 c->rhs.var = to;
1045 constraint_set_union (&graph->complex[to], &graph->complex[from]);
1046 graph->complex[from].release ();
1050 /* Remove edges involving NODE from GRAPH. */
1052 static void
1053 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
1055 if (graph->succs[node])
1056 BITMAP_FREE (graph->succs[node]);
1059 /* Merge GRAPH nodes FROM and TO into node TO. */
1061 static void
1062 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
1063 unsigned int from)
1065 if (graph->indirect_cycles[from] != -1)
1067 /* If we have indirect cycles with the from node, and we have
1068 none on the to node, the to node has indirect cycles from the
1069 from node now that they are unified.
1070 If indirect cycles exist on both, unify the nodes that they
1071 are in a cycle with, since we know they are in a cycle with
1072 each other. */
1073 if (graph->indirect_cycles[to] == -1)
1074 graph->indirect_cycles[to] = graph->indirect_cycles[from];
1077 /* Merge all the successor edges. */
1078 if (graph->succs[from])
1080 if (!graph->succs[to])
1081 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
1082 bitmap_ior_into (graph->succs[to],
1083 graph->succs[from]);
1086 clear_edges_for_node (graph, from);
1090 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
1091 it doesn't exist in the graph already. */
1093 static void
1094 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
1095 unsigned int from)
1097 if (to == from)
1098 return;
1100 if (!graph->implicit_preds[to])
1101 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1103 if (bitmap_set_bit (graph->implicit_preds[to], from))
1104 stats.num_implicit_edges++;
1107 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
1108 it doesn't exist in the graph already.
1109 Return false if the edge already existed, true otherwise. */
1111 static void
1112 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
1113 unsigned int from)
1115 if (!graph->preds[to])
1116 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1117 bitmap_set_bit (graph->preds[to], from);
1120 /* Add a graph edge to GRAPH, going from FROM to TO if
1121 it doesn't exist in the graph already.
1122 Return false if the edge already existed, true otherwise. */
1124 static bool
1125 add_graph_edge (constraint_graph_t graph, unsigned int to,
1126 unsigned int from)
1128 if (to == from)
1130 return false;
1132 else
1134 bool r = false;
1136 if (!graph->succs[from])
1137 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
1138 if (bitmap_set_bit (graph->succs[from], to))
1140 r = true;
1141 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
1142 stats.num_edges++;
1144 return r;
1149 /* Initialize the constraint graph structure to contain SIZE nodes. */
1151 static void
1152 init_graph (unsigned int size)
1154 unsigned int j;
1156 graph = XCNEW (struct constraint_graph);
1157 graph->size = size;
1158 graph->succs = XCNEWVEC (bitmap, graph->size);
1159 graph->indirect_cycles = XNEWVEC (int, graph->size);
1160 graph->rep = XNEWVEC (unsigned int, graph->size);
1161 /* ??? Macros do not support template types with multiple arguments,
1162 so we use a typedef to work around it. */
1163 typedef vec<constraint_t> vec_constraint_t_heap;
1164 graph->complex = XCNEWVEC (vec_constraint_t_heap, size);
1165 graph->pe = XCNEWVEC (unsigned int, graph->size);
1166 graph->pe_rep = XNEWVEC (int, graph->size);
1168 for (j = 0; j < graph->size; j++)
1170 graph->rep[j] = j;
1171 graph->pe_rep[j] = -1;
1172 graph->indirect_cycles[j] = -1;
1176 /* Build the constraint graph, adding only predecessor edges right now. */
1178 static void
1179 build_pred_graph (void)
1181 int i;
1182 constraint_t c;
1183 unsigned int j;
1185 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
1186 graph->preds = XCNEWVEC (bitmap, graph->size);
1187 graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
1188 graph->loc_label = XCNEWVEC (unsigned int, graph->size);
1189 graph->pointed_by = XCNEWVEC (bitmap, graph->size);
1190 graph->points_to = XCNEWVEC (bitmap, graph->size);
1191 graph->eq_rep = XNEWVEC (int, graph->size);
1192 graph->direct_nodes = sbitmap_alloc (graph->size);
1193 graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1194 bitmap_clear (graph->direct_nodes);
1196 for (j = 1; j < FIRST_REF_NODE; j++)
1198 if (!get_varinfo (j)->is_special_var)
1199 bitmap_set_bit (graph->direct_nodes, j);
1202 for (j = 0; j < graph->size; j++)
1203 graph->eq_rep[j] = -1;
1205 for (j = 0; j < varmap.length (); j++)
1206 graph->indirect_cycles[j] = -1;
1208 FOR_EACH_VEC_ELT (constraints, i, c)
1210 struct constraint_expr lhs = c->lhs;
1211 struct constraint_expr rhs = c->rhs;
1212 unsigned int lhsvar = lhs.var;
1213 unsigned int rhsvar = rhs.var;
1215 if (lhs.type == DEREF)
1217 /* *x = y. */
1218 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1219 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1221 else if (rhs.type == DEREF)
1223 /* x = *y */
1224 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1225 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1226 else
1227 bitmap_clear_bit (graph->direct_nodes, lhsvar);
1229 else if (rhs.type == ADDRESSOF)
1231 varinfo_t v;
1233 /* x = &y */
1234 if (graph->points_to[lhsvar] == NULL)
1235 graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1236 bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1238 if (graph->pointed_by[rhsvar] == NULL)
1239 graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1240 bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1242 /* Implicitly, *x = y */
1243 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1245 /* All related variables are no longer direct nodes. */
1246 bitmap_clear_bit (graph->direct_nodes, rhsvar);
1247 v = get_varinfo (rhsvar);
1248 if (!v->is_full_var)
1250 v = get_varinfo (v->head);
1253 bitmap_clear_bit (graph->direct_nodes, v->id);
1254 v = vi_next (v);
1256 while (v != NULL);
1258 bitmap_set_bit (graph->address_taken, rhsvar);
1260 else if (lhsvar > anything_id
1261 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1263 /* x = y */
1264 add_pred_graph_edge (graph, lhsvar, rhsvar);
1265 /* Implicitly, *x = *y */
1266 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1267 FIRST_REF_NODE + rhsvar);
1269 else if (lhs.offset != 0 || rhs.offset != 0)
1271 if (rhs.offset != 0)
1272 bitmap_clear_bit (graph->direct_nodes, lhs.var);
1273 else if (lhs.offset != 0)
1274 bitmap_clear_bit (graph->direct_nodes, rhs.var);
1279 /* Build the constraint graph, adding successor edges. */
1281 static void
1282 build_succ_graph (void)
1284 unsigned i, t;
1285 constraint_t c;
1287 FOR_EACH_VEC_ELT (constraints, i, c)
1289 struct constraint_expr lhs;
1290 struct constraint_expr rhs;
1291 unsigned int lhsvar;
1292 unsigned int rhsvar;
1294 if (!c)
1295 continue;
1297 lhs = c->lhs;
1298 rhs = c->rhs;
1299 lhsvar = find (lhs.var);
1300 rhsvar = find (rhs.var);
1302 if (lhs.type == DEREF)
1304 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1305 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1307 else if (rhs.type == DEREF)
1309 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1310 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1312 else if (rhs.type == ADDRESSOF)
1314 /* x = &y */
1315 gcc_checking_assert (find (rhs.var) == rhs.var);
1316 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1318 else if (lhsvar > anything_id
1319 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1321 add_graph_edge (graph, lhsvar, rhsvar);
1325 /* Add edges from STOREDANYTHING to all non-direct nodes that can
1326 receive pointers. */
1327 t = find (storedanything_id);
1328 for (i = integer_id + 1; i < FIRST_REF_NODE; ++i)
1330 if (!bitmap_bit_p (graph->direct_nodes, i)
1331 && get_varinfo (i)->may_have_pointers)
1332 add_graph_edge (graph, find (i), t);
1335 /* Everything stored to ANYTHING also potentially escapes. */
1336 add_graph_edge (graph, find (escaped_id), t);
1340 /* Changed variables on the last iteration. */
1341 static bitmap changed;
1343 /* Strongly Connected Component visitation info. */
1345 struct scc_info
1347 sbitmap visited;
1348 sbitmap deleted;
1349 unsigned int *dfs;
1350 unsigned int *node_mapping;
1351 int current_index;
1352 vec<unsigned> scc_stack;
1356 /* Recursive routine to find strongly connected components in GRAPH.
1357 SI is the SCC info to store the information in, and N is the id of current
1358 graph node we are processing.
1360 This is Tarjan's strongly connected component finding algorithm, as
1361 modified by Nuutila to keep only non-root nodes on the stack.
1362 The algorithm can be found in "On finding the strongly connected
1363 connected components in a directed graph" by Esko Nuutila and Eljas
1364 Soisalon-Soininen, in Information Processing Letters volume 49,
1365 number 1, pages 9-14. */
1367 static void
1368 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1370 unsigned int i;
1371 bitmap_iterator bi;
1372 unsigned int my_dfs;
1374 bitmap_set_bit (si->visited, n);
1375 si->dfs[n] = si->current_index ++;
1376 my_dfs = si->dfs[n];
1378 /* Visit all the successors. */
1379 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1381 unsigned int w;
1383 if (i > LAST_REF_NODE)
1384 break;
1386 w = find (i);
1387 if (bitmap_bit_p (si->deleted, w))
1388 continue;
1390 if (!bitmap_bit_p (si->visited, w))
1391 scc_visit (graph, si, w);
1393 unsigned int t = find (w);
1394 gcc_checking_assert (find (n) == n);
1395 if (si->dfs[t] < si->dfs[n])
1396 si->dfs[n] = si->dfs[t];
1399 /* See if any components have been identified. */
1400 if (si->dfs[n] == my_dfs)
1402 if (si->scc_stack.length () > 0
1403 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1405 bitmap scc = BITMAP_ALLOC (NULL);
1406 unsigned int lowest_node;
1407 bitmap_iterator bi;
1409 bitmap_set_bit (scc, n);
1411 while (si->scc_stack.length () != 0
1412 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1414 unsigned int w = si->scc_stack.pop ();
1416 bitmap_set_bit (scc, w);
1419 lowest_node = bitmap_first_set_bit (scc);
1420 gcc_assert (lowest_node < FIRST_REF_NODE);
1422 /* Collapse the SCC nodes into a single node, and mark the
1423 indirect cycles. */
1424 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1426 if (i < FIRST_REF_NODE)
1428 if (unite (lowest_node, i))
1429 unify_nodes (graph, lowest_node, i, false);
1431 else
1433 unite (lowest_node, i);
1434 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1438 bitmap_set_bit (si->deleted, n);
1440 else
1441 si->scc_stack.safe_push (n);
1444 /* Unify node FROM into node TO, updating the changed count if
1445 necessary when UPDATE_CHANGED is true. */
1447 static void
1448 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1449 bool update_changed)
1451 gcc_checking_assert (to != from && find (to) == to);
1453 if (dump_file && (dump_flags & TDF_DETAILS))
1454 fprintf (dump_file, "Unifying %s to %s\n",
1455 get_varinfo (from)->name,
1456 get_varinfo (to)->name);
1458 if (update_changed)
1459 stats.unified_vars_dynamic++;
1460 else
1461 stats.unified_vars_static++;
1463 merge_graph_nodes (graph, to, from);
1464 merge_node_constraints (graph, to, from);
1466 /* Mark TO as changed if FROM was changed. If TO was already marked
1467 as changed, decrease the changed count. */
1469 if (update_changed
1470 && bitmap_clear_bit (changed, from))
1471 bitmap_set_bit (changed, to);
1472 varinfo_t fromvi = get_varinfo (from);
1473 if (fromvi->solution)
1475 /* If the solution changes because of the merging, we need to mark
1476 the variable as changed. */
1477 varinfo_t tovi = get_varinfo (to);
1478 if (bitmap_ior_into (tovi->solution, fromvi->solution))
1480 if (update_changed)
1481 bitmap_set_bit (changed, to);
1484 BITMAP_FREE (fromvi->solution);
1485 if (fromvi->oldsolution)
1486 BITMAP_FREE (fromvi->oldsolution);
1488 if (stats.iterations > 0
1489 && tovi->oldsolution)
1490 BITMAP_FREE (tovi->oldsolution);
1492 if (graph->succs[to])
1493 bitmap_clear_bit (graph->succs[to], to);
1496 /* Information needed to compute the topological ordering of a graph. */
1498 struct topo_info
1500 /* sbitmap of visited nodes. */
1501 sbitmap visited;
1502 /* Array that stores the topological order of the graph, *in
1503 reverse*. */
1504 vec<unsigned> topo_order;
1508 /* Initialize and return a topological info structure. */
1510 static struct topo_info *
1511 init_topo_info (void)
1513 size_t size = graph->size;
1514 struct topo_info *ti = XNEW (struct topo_info);
1515 ti->visited = sbitmap_alloc (size);
1516 bitmap_clear (ti->visited);
1517 ti->topo_order.create (1);
1518 return ti;
1522 /* Free the topological sort info pointed to by TI. */
1524 static void
1525 free_topo_info (struct topo_info *ti)
1527 sbitmap_free (ti->visited);
1528 ti->topo_order.release ();
1529 free (ti);
1532 /* Visit the graph in topological order, and store the order in the
1533 topo_info structure. */
1535 static void
1536 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1537 unsigned int n)
1539 bitmap_iterator bi;
1540 unsigned int j;
1542 bitmap_set_bit (ti->visited, n);
1544 if (graph->succs[n])
1545 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1547 if (!bitmap_bit_p (ti->visited, j))
1548 topo_visit (graph, ti, j);
1551 ti->topo_order.safe_push (n);
1554 /* Process a constraint C that represents x = *(y + off), using DELTA as the
1555 starting solution for y. */
1557 static void
1558 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1559 bitmap delta)
1561 unsigned int lhs = c->lhs.var;
1562 bool flag = false;
1563 bitmap sol = get_varinfo (lhs)->solution;
1564 unsigned int j;
1565 bitmap_iterator bi;
1566 HOST_WIDE_INT roffset = c->rhs.offset;
1568 /* Our IL does not allow this. */
1569 gcc_checking_assert (c->lhs.offset == 0);
1571 /* If the solution of Y contains anything it is good enough to transfer
1572 this to the LHS. */
1573 if (bitmap_bit_p (delta, anything_id))
1575 flag |= bitmap_set_bit (sol, anything_id);
1576 goto done;
1579 /* If we do not know at with offset the rhs is dereferenced compute
1580 the reachability set of DELTA, conservatively assuming it is
1581 dereferenced at all valid offsets. */
1582 if (roffset == UNKNOWN_OFFSET)
1584 solution_set_expand (delta);
1585 /* No further offset processing is necessary. */
1586 roffset = 0;
1589 /* For each variable j in delta (Sol(y)), add
1590 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1591 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1593 varinfo_t v = get_varinfo (j);
1594 HOST_WIDE_INT fieldoffset = v->offset + roffset;
1595 unsigned int t;
1597 if (v->is_full_var)
1598 fieldoffset = v->offset;
1599 else if (roffset != 0)
1600 v = first_vi_for_offset (v, fieldoffset);
1601 /* If the access is outside of the variable we can ignore it. */
1602 if (!v)
1603 continue;
1607 t = find (v->id);
1609 /* Adding edges from the special vars is pointless.
1610 They don't have sets that can change. */
1611 if (get_varinfo (t)->is_special_var)
1612 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1613 /* Merging the solution from ESCAPED needlessly increases
1614 the set. Use ESCAPED as representative instead. */
1615 else if (v->id == escaped_id)
1616 flag |= bitmap_set_bit (sol, escaped_id);
1617 else if (v->may_have_pointers
1618 && add_graph_edge (graph, lhs, t))
1619 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1621 /* If the variable is not exactly at the requested offset
1622 we have to include the next one. */
1623 if (v->offset == (unsigned HOST_WIDE_INT)fieldoffset
1624 || v->next == 0)
1625 break;
1627 v = vi_next (v);
1628 fieldoffset = v->offset;
1630 while (1);
1633 done:
1634 /* If the LHS solution changed, mark the var as changed. */
1635 if (flag)
1637 get_varinfo (lhs)->solution = sol;
1638 bitmap_set_bit (changed, lhs);
1642 /* Process a constraint C that represents *(x + off) = y using DELTA
1643 as the starting solution for x. */
1645 static void
1646 do_ds_constraint (constraint_t c, bitmap delta)
1648 unsigned int rhs = c->rhs.var;
1649 bitmap sol = get_varinfo (rhs)->solution;
1650 unsigned int j;
1651 bitmap_iterator bi;
1652 HOST_WIDE_INT loff = c->lhs.offset;
1653 bool escaped_p = false;
1655 /* Our IL does not allow this. */
1656 gcc_checking_assert (c->rhs.offset == 0);
1658 /* If the solution of y contains ANYTHING simply use the ANYTHING
1659 solution. This avoids needlessly increasing the points-to sets. */
1660 if (bitmap_bit_p (sol, anything_id))
1661 sol = get_varinfo (find (anything_id))->solution;
1663 /* If the solution for x contains ANYTHING we have to merge the
1664 solution of y into all pointer variables which we do via
1665 STOREDANYTHING. */
1666 if (bitmap_bit_p (delta, anything_id))
1668 unsigned t = find (storedanything_id);
1669 if (add_graph_edge (graph, t, rhs))
1671 if (bitmap_ior_into (get_varinfo (t)->solution, sol))
1672 bitmap_set_bit (changed, t);
1674 return;
1677 /* If we do not know at with offset the rhs is dereferenced compute
1678 the reachability set of DELTA, conservatively assuming it is
1679 dereferenced at all valid offsets. */
1680 if (loff == UNKNOWN_OFFSET)
1682 solution_set_expand (delta);
1683 loff = 0;
1686 /* For each member j of delta (Sol(x)), add an edge from y to j and
1687 union Sol(y) into Sol(j) */
1688 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1690 varinfo_t v = get_varinfo (j);
1691 unsigned int t;
1692 HOST_WIDE_INT fieldoffset = v->offset + loff;
1694 if (v->is_full_var)
1695 fieldoffset = v->offset;
1696 else if (loff != 0)
1697 v = first_vi_for_offset (v, fieldoffset);
1698 /* If the access is outside of the variable we can ignore it. */
1699 if (!v)
1700 continue;
1704 if (v->may_have_pointers)
1706 /* If v is a global variable then this is an escape point. */
1707 if (v->is_global_var
1708 && !escaped_p)
1710 t = find (escaped_id);
1711 if (add_graph_edge (graph, t, rhs)
1712 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1713 bitmap_set_bit (changed, t);
1714 /* Enough to let rhs escape once. */
1715 escaped_p = true;
1718 if (v->is_special_var)
1719 break;
1721 t = find (v->id);
1722 if (add_graph_edge (graph, t, rhs)
1723 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1724 bitmap_set_bit (changed, t);
1727 /* If the variable is not exactly at the requested offset
1728 we have to include the next one. */
1729 if (v->offset == (unsigned HOST_WIDE_INT)fieldoffset
1730 || v->next == 0)
1731 break;
1733 v = vi_next (v);
1734 fieldoffset = v->offset;
1736 while (1);
1740 /* Handle a non-simple (simple meaning requires no iteration),
1741 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1743 static void
1744 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
1746 if (c->lhs.type == DEREF)
1748 if (c->rhs.type == ADDRESSOF)
1750 gcc_unreachable();
1752 else
1754 /* *x = y */
1755 do_ds_constraint (c, delta);
1758 else if (c->rhs.type == DEREF)
1760 /* x = *y */
1761 if (!(get_varinfo (c->lhs.var)->is_special_var))
1762 do_sd_constraint (graph, c, delta);
1764 else
1766 bitmap tmp;
1767 bitmap solution;
1768 bool flag = false;
1770 gcc_checking_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR);
1771 solution = get_varinfo (c->rhs.var)->solution;
1772 tmp = get_varinfo (c->lhs.var)->solution;
1774 flag = set_union_with_increment (tmp, solution, c->rhs.offset);
1776 if (flag)
1777 bitmap_set_bit (changed, c->lhs.var);
1781 /* Initialize and return a new SCC info structure. */
1783 static struct scc_info *
1784 init_scc_info (size_t size)
1786 struct scc_info *si = XNEW (struct scc_info);
1787 size_t i;
1789 si->current_index = 0;
1790 si->visited = sbitmap_alloc (size);
1791 bitmap_clear (si->visited);
1792 si->deleted = sbitmap_alloc (size);
1793 bitmap_clear (si->deleted);
1794 si->node_mapping = XNEWVEC (unsigned int, size);
1795 si->dfs = XCNEWVEC (unsigned int, size);
1797 for (i = 0; i < size; i++)
1798 si->node_mapping[i] = i;
1800 si->scc_stack.create (1);
1801 return si;
1804 /* Free an SCC info structure pointed to by SI */
1806 static void
1807 free_scc_info (struct scc_info *si)
1809 sbitmap_free (si->visited);
1810 sbitmap_free (si->deleted);
1811 free (si->node_mapping);
1812 free (si->dfs);
1813 si->scc_stack.release ();
1814 free (si);
1818 /* Find indirect cycles in GRAPH that occur, using strongly connected
1819 components, and note them in the indirect cycles map.
1821 This technique comes from Ben Hardekopf and Calvin Lin,
1822 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1823 Lines of Code", submitted to PLDI 2007. */
1825 static void
1826 find_indirect_cycles (constraint_graph_t graph)
1828 unsigned int i;
1829 unsigned int size = graph->size;
1830 struct scc_info *si = init_scc_info (size);
1832 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1833 if (!bitmap_bit_p (si->visited, i) && find (i) == i)
1834 scc_visit (graph, si, i);
1836 free_scc_info (si);
1839 /* Compute a topological ordering for GRAPH, and store the result in the
1840 topo_info structure TI. */
1842 static void
1843 compute_topo_order (constraint_graph_t graph,
1844 struct topo_info *ti)
1846 unsigned int i;
1847 unsigned int size = graph->size;
1849 for (i = 0; i != size; ++i)
1850 if (!bitmap_bit_p (ti->visited, i) && find (i) == i)
1851 topo_visit (graph, ti, i);
1854 /* Structure used to for hash value numbering of pointer equivalence
1855 classes. */
1857 typedef struct equiv_class_label
1859 hashval_t hashcode;
1860 unsigned int equivalence_class;
1861 bitmap labels;
1862 } *equiv_class_label_t;
1863 typedef const struct equiv_class_label *const_equiv_class_label_t;
1865 /* Equiv_class_label hashtable helpers. */
1867 struct equiv_class_hasher : typed_free_remove <equiv_class_label>
1869 typedef equiv_class_label value_type;
1870 typedef equiv_class_label compare_type;
1871 static inline hashval_t hash (const value_type *);
1872 static inline bool equal (const value_type *, const compare_type *);
1875 /* Hash function for a equiv_class_label_t */
1877 inline hashval_t
1878 equiv_class_hasher::hash (const value_type *ecl)
1880 return ecl->hashcode;
1883 /* Equality function for two equiv_class_label_t's. */
1885 inline bool
1886 equiv_class_hasher::equal (const value_type *eql1, const compare_type *eql2)
1888 return (eql1->hashcode == eql2->hashcode
1889 && bitmap_equal_p (eql1->labels, eql2->labels));
1892 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1893 classes. */
1894 static hash_table <equiv_class_hasher> pointer_equiv_class_table;
1896 /* A hashtable for mapping a bitmap of labels->location equivalence
1897 classes. */
1898 static hash_table <equiv_class_hasher> location_equiv_class_table;
1900 /* Lookup a equivalence class in TABLE by the bitmap of LABELS with
1901 hash HAS it contains. Sets *REF_LABELS to the bitmap LABELS
1902 is equivalent to. */
1904 static equiv_class_label *
1905 equiv_class_lookup_or_add (hash_table <equiv_class_hasher> table, bitmap labels)
1907 equiv_class_label **slot;
1908 equiv_class_label ecl;
1910 ecl.labels = labels;
1911 ecl.hashcode = bitmap_hash (labels);
1912 slot = table.find_slot_with_hash (&ecl, ecl.hashcode, INSERT);
1913 if (!*slot)
1915 *slot = XNEW (struct equiv_class_label);
1916 (*slot)->labels = labels;
1917 (*slot)->hashcode = ecl.hashcode;
1918 (*slot)->equivalence_class = 0;
1921 return *slot;
1924 /* Perform offline variable substitution.
1926 This is a worst case quadratic time way of identifying variables
1927 that must have equivalent points-to sets, including those caused by
1928 static cycles, and single entry subgraphs, in the constraint graph.
1930 The technique is described in "Exploiting Pointer and Location
1931 Equivalence to Optimize Pointer Analysis. In the 14th International
1932 Static Analysis Symposium (SAS), August 2007." It is known as the
1933 "HU" algorithm, and is equivalent to value numbering the collapsed
1934 constraint graph including evaluating unions.
1936 The general method of finding equivalence classes is as follows:
1937 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1938 Initialize all non-REF nodes to be direct nodes.
1939 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1940 variable}
1941 For each constraint containing the dereference, we also do the same
1942 thing.
1944 We then compute SCC's in the graph and unify nodes in the same SCC,
1945 including pts sets.
1947 For each non-collapsed node x:
1948 Visit all unvisited explicit incoming edges.
1949 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
1950 where y->x.
1951 Lookup the equivalence class for pts(x).
1952 If we found one, equivalence_class(x) = found class.
1953 Otherwise, equivalence_class(x) = new class, and new_class is
1954 added to the lookup table.
1956 All direct nodes with the same equivalence class can be replaced
1957 with a single representative node.
1958 All unlabeled nodes (label == 0) are not pointers and all edges
1959 involving them can be eliminated.
1960 We perform these optimizations during rewrite_constraints
1962 In addition to pointer equivalence class finding, we also perform
1963 location equivalence class finding. This is the set of variables
1964 that always appear together in points-to sets. We use this to
1965 compress the size of the points-to sets. */
1967 /* Current maximum pointer equivalence class id. */
1968 static int pointer_equiv_class;
1970 /* Current maximum location equivalence class id. */
1971 static int location_equiv_class;
1973 /* Recursive routine to find strongly connected components in GRAPH,
1974 and label it's nodes with DFS numbers. */
1976 static void
1977 condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1979 unsigned int i;
1980 bitmap_iterator bi;
1981 unsigned int my_dfs;
1983 gcc_checking_assert (si->node_mapping[n] == n);
1984 bitmap_set_bit (si->visited, n);
1985 si->dfs[n] = si->current_index ++;
1986 my_dfs = si->dfs[n];
1988 /* Visit all the successors. */
1989 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1991 unsigned int w = si->node_mapping[i];
1993 if (bitmap_bit_p (si->deleted, w))
1994 continue;
1996 if (!bitmap_bit_p (si->visited, w))
1997 condense_visit (graph, si, w);
1999 unsigned int t = si->node_mapping[w];
2000 gcc_checking_assert (si->node_mapping[n] == n);
2001 if (si->dfs[t] < si->dfs[n])
2002 si->dfs[n] = si->dfs[t];
2005 /* Visit all the implicit predecessors. */
2006 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
2008 unsigned int w = si->node_mapping[i];
2010 if (bitmap_bit_p (si->deleted, w))
2011 continue;
2013 if (!bitmap_bit_p (si->visited, w))
2014 condense_visit (graph, si, w);
2016 unsigned int t = si->node_mapping[w];
2017 gcc_assert (si->node_mapping[n] == n);
2018 if (si->dfs[t] < si->dfs[n])
2019 si->dfs[n] = si->dfs[t];
2022 /* See if any components have been identified. */
2023 if (si->dfs[n] == my_dfs)
2025 while (si->scc_stack.length () != 0
2026 && si->dfs[si->scc_stack.last ()] >= my_dfs)
2028 unsigned int w = si->scc_stack.pop ();
2029 si->node_mapping[w] = n;
2031 if (!bitmap_bit_p (graph->direct_nodes, w))
2032 bitmap_clear_bit (graph->direct_nodes, n);
2034 /* Unify our nodes. */
2035 if (graph->preds[w])
2037 if (!graph->preds[n])
2038 graph->preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2039 bitmap_ior_into (graph->preds[n], graph->preds[w]);
2041 if (graph->implicit_preds[w])
2043 if (!graph->implicit_preds[n])
2044 graph->implicit_preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2045 bitmap_ior_into (graph->implicit_preds[n],
2046 graph->implicit_preds[w]);
2048 if (graph->points_to[w])
2050 if (!graph->points_to[n])
2051 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2052 bitmap_ior_into (graph->points_to[n],
2053 graph->points_to[w]);
2056 bitmap_set_bit (si->deleted, n);
2058 else
2059 si->scc_stack.safe_push (n);
2062 /* Label pointer equivalences. */
2064 static void
2065 label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2067 unsigned int i, first_pred;
2068 bitmap_iterator bi;
2070 bitmap_set_bit (si->visited, n);
2072 /* Label and union our incoming edges's points to sets. */
2073 first_pred = -1U;
2074 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2076 unsigned int w = si->node_mapping[i];
2077 if (!bitmap_bit_p (si->visited, w))
2078 label_visit (graph, si, w);
2080 /* Skip unused edges */
2081 if (w == n || graph->pointer_label[w] == 0)
2082 continue;
2084 if (graph->points_to[w])
2086 if (!graph->points_to[n])
2088 if (first_pred == -1U)
2089 first_pred = w;
2090 else
2092 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2093 bitmap_ior (graph->points_to[n],
2094 graph->points_to[first_pred],
2095 graph->points_to[w]);
2098 else
2099 bitmap_ior_into(graph->points_to[n], graph->points_to[w]);
2103 /* Indirect nodes get fresh variables and a new pointer equiv class. */
2104 if (!bitmap_bit_p (graph->direct_nodes, n))
2106 if (!graph->points_to[n])
2108 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2109 if (first_pred != -1U)
2110 bitmap_copy (graph->points_to[n], graph->points_to[first_pred]);
2112 bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
2113 graph->pointer_label[n] = pointer_equiv_class++;
2114 equiv_class_label_t ecl;
2115 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2116 graph->points_to[n]);
2117 ecl->equivalence_class = graph->pointer_label[n];
2118 return;
2121 /* If there was only a single non-empty predecessor the pointer equiv
2122 class is the same. */
2123 if (!graph->points_to[n])
2125 if (first_pred != -1U)
2127 graph->pointer_label[n] = graph->pointer_label[first_pred];
2128 graph->points_to[n] = graph->points_to[first_pred];
2130 return;
2133 if (!bitmap_empty_p (graph->points_to[n]))
2135 equiv_class_label_t ecl;
2136 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2137 graph->points_to[n]);
2138 if (ecl->equivalence_class == 0)
2139 ecl->equivalence_class = pointer_equiv_class++;
2140 else
2142 BITMAP_FREE (graph->points_to[n]);
2143 graph->points_to[n] = ecl->labels;
2145 graph->pointer_label[n] = ecl->equivalence_class;
2149 /* Print the pred graph in dot format. */
2151 static void
2152 dump_pred_graph (struct scc_info *si, FILE *file)
2154 unsigned int i;
2156 /* Only print the graph if it has already been initialized: */
2157 if (!graph)
2158 return;
2160 /* Prints the header of the dot file: */
2161 fprintf (file, "strict digraph {\n");
2162 fprintf (file, " node [\n shape = box\n ]\n");
2163 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
2164 fprintf (file, "\n // List of nodes and complex constraints in "
2165 "the constraint graph:\n");
2167 /* The next lines print the nodes in the graph together with the
2168 complex constraints attached to them. */
2169 for (i = 1; i < graph->size; i++)
2171 if (i == FIRST_REF_NODE)
2172 continue;
2173 if (si->node_mapping[i] != i)
2174 continue;
2175 if (i < FIRST_REF_NODE)
2176 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2177 else
2178 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2179 if (graph->points_to[i]
2180 && !bitmap_empty_p (graph->points_to[i]))
2182 fprintf (file, "[label=\"%s = {", get_varinfo (i)->name);
2183 unsigned j;
2184 bitmap_iterator bi;
2185 EXECUTE_IF_SET_IN_BITMAP (graph->points_to[i], 0, j, bi)
2186 fprintf (file, " %d", j);
2187 fprintf (file, " }\"]");
2189 fprintf (file, ";\n");
2192 /* Go over the edges. */
2193 fprintf (file, "\n // Edges in the constraint graph:\n");
2194 for (i = 1; i < graph->size; i++)
2196 unsigned j;
2197 bitmap_iterator bi;
2198 if (si->node_mapping[i] != i)
2199 continue;
2200 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[i], 0, j, bi)
2202 unsigned from = si->node_mapping[j];
2203 if (from < FIRST_REF_NODE)
2204 fprintf (file, "\"%s\"", get_varinfo (from)->name);
2205 else
2206 fprintf (file, "\"*%s\"", get_varinfo (from - FIRST_REF_NODE)->name);
2207 fprintf (file, " -> ");
2208 if (i < FIRST_REF_NODE)
2209 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2210 else
2211 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2212 fprintf (file, ";\n");
2216 /* Prints the tail of the dot file. */
2217 fprintf (file, "}\n");
2220 /* Perform offline variable substitution, discovering equivalence
2221 classes, and eliminating non-pointer variables. */
2223 static struct scc_info *
2224 perform_var_substitution (constraint_graph_t graph)
2226 unsigned int i;
2227 unsigned int size = graph->size;
2228 struct scc_info *si = init_scc_info (size);
2230 bitmap_obstack_initialize (&iteration_obstack);
2231 pointer_equiv_class_table.create (511);
2232 location_equiv_class_table.create (511);
2233 pointer_equiv_class = 1;
2234 location_equiv_class = 1;
2236 /* Condense the nodes, which means to find SCC's, count incoming
2237 predecessors, and unite nodes in SCC's. */
2238 for (i = 1; i < FIRST_REF_NODE; i++)
2239 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2240 condense_visit (graph, si, si->node_mapping[i]);
2242 if (dump_file && (dump_flags & TDF_GRAPH))
2244 fprintf (dump_file, "\n\n// The constraint graph before var-substitution "
2245 "in dot format:\n");
2246 dump_pred_graph (si, dump_file);
2247 fprintf (dump_file, "\n\n");
2250 bitmap_clear (si->visited);
2251 /* Actually the label the nodes for pointer equivalences */
2252 for (i = 1; i < FIRST_REF_NODE; i++)
2253 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2254 label_visit (graph, si, si->node_mapping[i]);
2256 /* Calculate location equivalence labels. */
2257 for (i = 1; i < FIRST_REF_NODE; i++)
2259 bitmap pointed_by;
2260 bitmap_iterator bi;
2261 unsigned int j;
2263 if (!graph->pointed_by[i])
2264 continue;
2265 pointed_by = BITMAP_ALLOC (&iteration_obstack);
2267 /* Translate the pointed-by mapping for pointer equivalence
2268 labels. */
2269 EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
2271 bitmap_set_bit (pointed_by,
2272 graph->pointer_label[si->node_mapping[j]]);
2274 /* The original pointed_by is now dead. */
2275 BITMAP_FREE (graph->pointed_by[i]);
2277 /* Look up the location equivalence label if one exists, or make
2278 one otherwise. */
2279 equiv_class_label_t ecl;
2280 ecl = equiv_class_lookup_or_add (location_equiv_class_table, pointed_by);
2281 if (ecl->equivalence_class == 0)
2282 ecl->equivalence_class = location_equiv_class++;
2283 else
2285 if (dump_file && (dump_flags & TDF_DETAILS))
2286 fprintf (dump_file, "Found location equivalence for node %s\n",
2287 get_varinfo (i)->name);
2288 BITMAP_FREE (pointed_by);
2290 graph->loc_label[i] = ecl->equivalence_class;
2294 if (dump_file && (dump_flags & TDF_DETAILS))
2295 for (i = 1; i < FIRST_REF_NODE; i++)
2297 unsigned j = si->node_mapping[i];
2298 if (j != i)
2300 fprintf (dump_file, "%s node id %d ",
2301 bitmap_bit_p (graph->direct_nodes, i)
2302 ? "Direct" : "Indirect", i);
2303 if (i < FIRST_REF_NODE)
2304 fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2305 else
2306 fprintf (dump_file, "\"*%s\"",
2307 get_varinfo (i - FIRST_REF_NODE)->name);
2308 fprintf (dump_file, " mapped to SCC leader node id %d ", j);
2309 if (j < FIRST_REF_NODE)
2310 fprintf (dump_file, "\"%s\"\n", get_varinfo (j)->name);
2311 else
2312 fprintf (dump_file, "\"*%s\"\n",
2313 get_varinfo (j - FIRST_REF_NODE)->name);
2315 else
2317 fprintf (dump_file,
2318 "Equivalence classes for %s node id %d ",
2319 bitmap_bit_p (graph->direct_nodes, i)
2320 ? "direct" : "indirect", i);
2321 if (i < FIRST_REF_NODE)
2322 fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2323 else
2324 fprintf (dump_file, "\"*%s\"",
2325 get_varinfo (i - FIRST_REF_NODE)->name);
2326 fprintf (dump_file,
2327 ": pointer %d, location %d\n",
2328 graph->pointer_label[i], graph->loc_label[i]);
2332 /* Quickly eliminate our non-pointer variables. */
2334 for (i = 1; i < FIRST_REF_NODE; i++)
2336 unsigned int node = si->node_mapping[i];
2338 if (graph->pointer_label[node] == 0)
2340 if (dump_file && (dump_flags & TDF_DETAILS))
2341 fprintf (dump_file,
2342 "%s is a non-pointer variable, eliminating edges.\n",
2343 get_varinfo (node)->name);
2344 stats.nonpointer_vars++;
2345 clear_edges_for_node (graph, node);
2349 return si;
2352 /* Free information that was only necessary for variable
2353 substitution. */
2355 static void
2356 free_var_substitution_info (struct scc_info *si)
2358 free_scc_info (si);
2359 free (graph->pointer_label);
2360 free (graph->loc_label);
2361 free (graph->pointed_by);
2362 free (graph->points_to);
2363 free (graph->eq_rep);
2364 sbitmap_free (graph->direct_nodes);
2365 pointer_equiv_class_table.dispose ();
2366 location_equiv_class_table.dispose ();
2367 bitmap_obstack_release (&iteration_obstack);
2370 /* Return an existing node that is equivalent to NODE, which has
2371 equivalence class LABEL, if one exists. Return NODE otherwise. */
2373 static unsigned int
2374 find_equivalent_node (constraint_graph_t graph,
2375 unsigned int node, unsigned int label)
2377 /* If the address version of this variable is unused, we can
2378 substitute it for anything else with the same label.
2379 Otherwise, we know the pointers are equivalent, but not the
2380 locations, and we can unite them later. */
2382 if (!bitmap_bit_p (graph->address_taken, node))
2384 gcc_checking_assert (label < graph->size);
2386 if (graph->eq_rep[label] != -1)
2388 /* Unify the two variables since we know they are equivalent. */
2389 if (unite (graph->eq_rep[label], node))
2390 unify_nodes (graph, graph->eq_rep[label], node, false);
2391 return graph->eq_rep[label];
2393 else
2395 graph->eq_rep[label] = node;
2396 graph->pe_rep[label] = node;
2399 else
2401 gcc_checking_assert (label < graph->size);
2402 graph->pe[node] = label;
2403 if (graph->pe_rep[label] == -1)
2404 graph->pe_rep[label] = node;
2407 return node;
2410 /* Unite pointer equivalent but not location equivalent nodes in
2411 GRAPH. This may only be performed once variable substitution is
2412 finished. */
2414 static void
2415 unite_pointer_equivalences (constraint_graph_t graph)
2417 unsigned int i;
2419 /* Go through the pointer equivalences and unite them to their
2420 representative, if they aren't already. */
2421 for (i = 1; i < FIRST_REF_NODE; i++)
2423 unsigned int label = graph->pe[i];
2424 if (label)
2426 int label_rep = graph->pe_rep[label];
2428 if (label_rep == -1)
2429 continue;
2431 label_rep = find (label_rep);
2432 if (label_rep >= 0 && unite (label_rep, find (i)))
2433 unify_nodes (graph, label_rep, i, false);
2438 /* Move complex constraints to the GRAPH nodes they belong to. */
2440 static void
2441 move_complex_constraints (constraint_graph_t graph)
2443 int i;
2444 constraint_t c;
2446 FOR_EACH_VEC_ELT (constraints, i, c)
2448 if (c)
2450 struct constraint_expr lhs = c->lhs;
2451 struct constraint_expr rhs = c->rhs;
2453 if (lhs.type == DEREF)
2455 insert_into_complex (graph, lhs.var, c);
2457 else if (rhs.type == DEREF)
2459 if (!(get_varinfo (lhs.var)->is_special_var))
2460 insert_into_complex (graph, rhs.var, c);
2462 else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2463 && (lhs.offset != 0 || rhs.offset != 0))
2465 insert_into_complex (graph, rhs.var, c);
2472 /* Optimize and rewrite complex constraints while performing
2473 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2474 result of perform_variable_substitution. */
2476 static void
2477 rewrite_constraints (constraint_graph_t graph,
2478 struct scc_info *si)
2480 int i;
2481 constraint_t c;
2483 #ifdef ENABLE_CHECKING
2484 for (unsigned int j = 0; j < graph->size; j++)
2485 gcc_assert (find (j) == j);
2486 #endif
2488 FOR_EACH_VEC_ELT (constraints, i, c)
2490 struct constraint_expr lhs = c->lhs;
2491 struct constraint_expr rhs = c->rhs;
2492 unsigned int lhsvar = find (lhs.var);
2493 unsigned int rhsvar = find (rhs.var);
2494 unsigned int lhsnode, rhsnode;
2495 unsigned int lhslabel, rhslabel;
2497 lhsnode = si->node_mapping[lhsvar];
2498 rhsnode = si->node_mapping[rhsvar];
2499 lhslabel = graph->pointer_label[lhsnode];
2500 rhslabel = graph->pointer_label[rhsnode];
2502 /* See if it is really a non-pointer variable, and if so, ignore
2503 the constraint. */
2504 if (lhslabel == 0)
2506 if (dump_file && (dump_flags & TDF_DETAILS))
2509 fprintf (dump_file, "%s is a non-pointer variable,"
2510 "ignoring constraint:",
2511 get_varinfo (lhs.var)->name);
2512 dump_constraint (dump_file, c);
2513 fprintf (dump_file, "\n");
2515 constraints[i] = NULL;
2516 continue;
2519 if (rhslabel == 0)
2521 if (dump_file && (dump_flags & TDF_DETAILS))
2524 fprintf (dump_file, "%s is a non-pointer variable,"
2525 "ignoring constraint:",
2526 get_varinfo (rhs.var)->name);
2527 dump_constraint (dump_file, c);
2528 fprintf (dump_file, "\n");
2530 constraints[i] = NULL;
2531 continue;
2534 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2535 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2536 c->lhs.var = lhsvar;
2537 c->rhs.var = rhsvar;
2541 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2542 part of an SCC, false otherwise. */
2544 static bool
2545 eliminate_indirect_cycles (unsigned int node)
2547 if (graph->indirect_cycles[node] != -1
2548 && !bitmap_empty_p (get_varinfo (node)->solution))
2550 unsigned int i;
2551 vec<unsigned> queue = vNULL;
2552 int queuepos;
2553 unsigned int to = find (graph->indirect_cycles[node]);
2554 bitmap_iterator bi;
2556 /* We can't touch the solution set and call unify_nodes
2557 at the same time, because unify_nodes is going to do
2558 bitmap unions into it. */
2560 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2562 if (find (i) == i && i != to)
2564 if (unite (to, i))
2565 queue.safe_push (i);
2569 for (queuepos = 0;
2570 queue.iterate (queuepos, &i);
2571 queuepos++)
2573 unify_nodes (graph, to, i, true);
2575 queue.release ();
2576 return true;
2578 return false;
2581 /* Solve the constraint graph GRAPH using our worklist solver.
2582 This is based on the PW* family of solvers from the "Efficient Field
2583 Sensitive Pointer Analysis for C" paper.
2584 It works by iterating over all the graph nodes, processing the complex
2585 constraints and propagating the copy constraints, until everything stops
2586 changed. This corresponds to steps 6-8 in the solving list given above. */
2588 static void
2589 solve_graph (constraint_graph_t graph)
2591 unsigned int size = graph->size;
2592 unsigned int i;
2593 bitmap pts;
2595 changed = BITMAP_ALLOC (NULL);
2597 /* Mark all initial non-collapsed nodes as changed. */
2598 for (i = 1; i < size; i++)
2600 varinfo_t ivi = get_varinfo (i);
2601 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2602 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2603 || graph->complex[i].length () > 0))
2604 bitmap_set_bit (changed, i);
2607 /* Allocate a bitmap to be used to store the changed bits. */
2608 pts = BITMAP_ALLOC (&pta_obstack);
2610 while (!bitmap_empty_p (changed))
2612 unsigned int i;
2613 struct topo_info *ti = init_topo_info ();
2614 stats.iterations++;
2616 bitmap_obstack_initialize (&iteration_obstack);
2618 compute_topo_order (graph, ti);
2620 while (ti->topo_order.length () != 0)
2623 i = ti->topo_order.pop ();
2625 /* If this variable is not a representative, skip it. */
2626 if (find (i) != i)
2627 continue;
2629 /* In certain indirect cycle cases, we may merge this
2630 variable to another. */
2631 if (eliminate_indirect_cycles (i) && find (i) != i)
2632 continue;
2634 /* If the node has changed, we need to process the
2635 complex constraints and outgoing edges again. */
2636 if (bitmap_clear_bit (changed, i))
2638 unsigned int j;
2639 constraint_t c;
2640 bitmap solution;
2641 vec<constraint_t> complex = graph->complex[i];
2642 varinfo_t vi = get_varinfo (i);
2643 bool solution_empty;
2645 /* Compute the changed set of solution bits. If anything
2646 is in the solution just propagate that. */
2647 if (bitmap_bit_p (vi->solution, anything_id))
2649 /* If anything is also in the old solution there is
2650 nothing to do.
2651 ??? But we shouldn't ended up with "changed" set ... */
2652 if (vi->oldsolution
2653 && bitmap_bit_p (vi->oldsolution, anything_id))
2654 continue;
2655 bitmap_copy (pts, get_varinfo (find (anything_id))->solution);
2657 else if (vi->oldsolution)
2658 bitmap_and_compl (pts, vi->solution, vi->oldsolution);
2659 else
2660 bitmap_copy (pts, vi->solution);
2662 if (bitmap_empty_p (pts))
2663 continue;
2665 if (vi->oldsolution)
2666 bitmap_ior_into (vi->oldsolution, pts);
2667 else
2669 vi->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
2670 bitmap_copy (vi->oldsolution, pts);
2673 solution = vi->solution;
2674 solution_empty = bitmap_empty_p (solution);
2676 /* Process the complex constraints */
2677 FOR_EACH_VEC_ELT (complex, j, c)
2679 /* XXX: This is going to unsort the constraints in
2680 some cases, which will occasionally add duplicate
2681 constraints during unification. This does not
2682 affect correctness. */
2683 c->lhs.var = find (c->lhs.var);
2684 c->rhs.var = find (c->rhs.var);
2686 /* The only complex constraint that can change our
2687 solution to non-empty, given an empty solution,
2688 is a constraint where the lhs side is receiving
2689 some set from elsewhere. */
2690 if (!solution_empty || c->lhs.type != DEREF)
2691 do_complex_constraint (graph, c, pts);
2694 solution_empty = bitmap_empty_p (solution);
2696 if (!solution_empty)
2698 bitmap_iterator bi;
2699 unsigned eff_escaped_id = find (escaped_id);
2701 /* Propagate solution to all successors. */
2702 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2703 0, j, bi)
2705 bitmap tmp;
2706 bool flag;
2708 unsigned int to = find (j);
2709 tmp = get_varinfo (to)->solution;
2710 flag = false;
2712 /* Don't try to propagate to ourselves. */
2713 if (to == i)
2714 continue;
2716 /* If we propagate from ESCAPED use ESCAPED as
2717 placeholder. */
2718 if (i == eff_escaped_id)
2719 flag = bitmap_set_bit (tmp, escaped_id);
2720 else
2721 flag = bitmap_ior_into (tmp, pts);
2723 if (flag)
2724 bitmap_set_bit (changed, to);
2729 free_topo_info (ti);
2730 bitmap_obstack_release (&iteration_obstack);
2733 BITMAP_FREE (pts);
2734 BITMAP_FREE (changed);
2735 bitmap_obstack_release (&oldpta_obstack);
2738 /* Map from trees to variable infos. */
2739 static struct pointer_map_t *vi_for_tree;
2742 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2744 static void
2745 insert_vi_for_tree (tree t, varinfo_t vi)
2747 void **slot = pointer_map_insert (vi_for_tree, t);
2748 gcc_assert (vi);
2749 gcc_assert (*slot == NULL);
2750 *slot = vi;
2753 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2754 exist in the map, return NULL, otherwise, return the varinfo we found. */
2756 static varinfo_t
2757 lookup_vi_for_tree (tree t)
2759 void **slot = pointer_map_contains (vi_for_tree, t);
2760 if (slot == NULL)
2761 return NULL;
2763 return (varinfo_t) *slot;
2766 /* Return a printable name for DECL */
2768 static const char *
2769 alias_get_name (tree decl)
2771 const char *res = NULL;
2772 char *temp;
2773 int num_printed = 0;
2775 if (!dump_file)
2776 return "NULL";
2778 if (TREE_CODE (decl) == SSA_NAME)
2780 res = get_name (decl);
2781 if (res)
2782 num_printed = asprintf (&temp, "%s_%u", res, SSA_NAME_VERSION (decl));
2783 else
2784 num_printed = asprintf (&temp, "_%u", SSA_NAME_VERSION (decl));
2785 if (num_printed > 0)
2787 res = ggc_strdup (temp);
2788 free (temp);
2791 else if (DECL_P (decl))
2793 if (DECL_ASSEMBLER_NAME_SET_P (decl))
2794 res = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
2795 else
2797 res = get_name (decl);
2798 if (!res)
2800 num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
2801 if (num_printed > 0)
2803 res = ggc_strdup (temp);
2804 free (temp);
2809 if (res != NULL)
2810 return res;
2812 return "NULL";
2815 /* Find the variable id for tree T in the map.
2816 If T doesn't exist in the map, create an entry for it and return it. */
2818 static varinfo_t
2819 get_vi_for_tree (tree t)
2821 void **slot = pointer_map_contains (vi_for_tree, t);
2822 if (slot == NULL)
2823 return get_varinfo (create_variable_info_for (t, alias_get_name (t)));
2825 return (varinfo_t) *slot;
2828 /* Get a scalar constraint expression for a new temporary variable. */
2830 static struct constraint_expr
2831 new_scalar_tmp_constraint_exp (const char *name)
2833 struct constraint_expr tmp;
2834 varinfo_t vi;
2836 vi = new_var_info (NULL_TREE, name);
2837 vi->offset = 0;
2838 vi->size = -1;
2839 vi->fullsize = -1;
2840 vi->is_full_var = 1;
2842 tmp.var = vi->id;
2843 tmp.type = SCALAR;
2844 tmp.offset = 0;
2846 return tmp;
2849 /* Get a constraint expression vector from an SSA_VAR_P node.
2850 If address_p is true, the result will be taken its address of. */
2852 static void
2853 get_constraint_for_ssa_var (tree t, vec<ce_s> *results, bool address_p)
2855 struct constraint_expr cexpr;
2856 varinfo_t vi;
2858 /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
2859 gcc_assert (TREE_CODE (t) == SSA_NAME || DECL_P (t));
2861 /* For parameters, get at the points-to set for the actual parm
2862 decl. */
2863 if (TREE_CODE (t) == SSA_NAME
2864 && SSA_NAME_IS_DEFAULT_DEF (t)
2865 && (TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2866 || TREE_CODE (SSA_NAME_VAR (t)) == RESULT_DECL))
2868 get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
2869 return;
2872 /* For global variables resort to the alias target. */
2873 if (TREE_CODE (t) == VAR_DECL
2874 && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
2876 struct varpool_node *node = varpool_get_node (t);
2877 if (node && node->symbol.alias && node->symbol.analyzed)
2879 node = varpool_variable_node (node, NULL);
2880 t = node->symbol.decl;
2884 vi = get_vi_for_tree (t);
2885 cexpr.var = vi->id;
2886 cexpr.type = SCALAR;
2887 cexpr.offset = 0;
2888 /* If we determine the result is "anything", and we know this is readonly,
2889 say it points to readonly memory instead. */
2890 if (cexpr.var == anything_id && TREE_READONLY (t))
2892 gcc_unreachable ();
2893 cexpr.type = ADDRESSOF;
2894 cexpr.var = readonly_id;
2897 /* If we are not taking the address of the constraint expr, add all
2898 sub-fiels of the variable as well. */
2899 if (!address_p
2900 && !vi->is_full_var)
2902 for (; vi; vi = vi_next (vi))
2904 cexpr.var = vi->id;
2905 results->safe_push (cexpr);
2907 return;
2910 results->safe_push (cexpr);
2913 /* Process constraint T, performing various simplifications and then
2914 adding it to our list of overall constraints. */
2916 static void
2917 process_constraint (constraint_t t)
2919 struct constraint_expr rhs = t->rhs;
2920 struct constraint_expr lhs = t->lhs;
2922 gcc_assert (rhs.var < varmap.length ());
2923 gcc_assert (lhs.var < varmap.length ());
2925 /* If we didn't get any useful constraint from the lhs we get
2926 &ANYTHING as fallback from get_constraint_for. Deal with
2927 it here by turning it into *ANYTHING. */
2928 if (lhs.type == ADDRESSOF
2929 && lhs.var == anything_id)
2930 lhs.type = DEREF;
2932 /* ADDRESSOF on the lhs is invalid. */
2933 gcc_assert (lhs.type != ADDRESSOF);
2935 /* We shouldn't add constraints from things that cannot have pointers.
2936 It's not completely trivial to avoid in the callers, so do it here. */
2937 if (rhs.type != ADDRESSOF
2938 && !get_varinfo (rhs.var)->may_have_pointers)
2939 return;
2941 /* Likewise adding to the solution of a non-pointer var isn't useful. */
2942 if (!get_varinfo (lhs.var)->may_have_pointers)
2943 return;
2945 /* This can happen in our IR with things like n->a = *p */
2946 if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
2948 /* Split into tmp = *rhs, *lhs = tmp */
2949 struct constraint_expr tmplhs;
2950 tmplhs = new_scalar_tmp_constraint_exp ("doubledereftmp");
2951 process_constraint (new_constraint (tmplhs, rhs));
2952 process_constraint (new_constraint (lhs, tmplhs));
2954 else if (rhs.type == ADDRESSOF && lhs.type == DEREF)
2956 /* Split into tmp = &rhs, *lhs = tmp */
2957 struct constraint_expr tmplhs;
2958 tmplhs = new_scalar_tmp_constraint_exp ("derefaddrtmp");
2959 process_constraint (new_constraint (tmplhs, rhs));
2960 process_constraint (new_constraint (lhs, tmplhs));
2962 else
2964 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
2965 constraints.safe_push (t);
2970 /* Return the position, in bits, of FIELD_DECL from the beginning of its
2971 structure. */
2973 static HOST_WIDE_INT
2974 bitpos_of_field (const tree fdecl)
2976 if (!host_integerp (DECL_FIELD_OFFSET (fdecl), 0)
2977 || !host_integerp (DECL_FIELD_BIT_OFFSET (fdecl), 0))
2978 return -1;
2980 return (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (fdecl)) * BITS_PER_UNIT
2981 + TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (fdecl)));
2985 /* Get constraint expressions for offsetting PTR by OFFSET. Stores the
2986 resulting constraint expressions in *RESULTS. */
2988 static void
2989 get_constraint_for_ptr_offset (tree ptr, tree offset,
2990 vec<ce_s> *results)
2992 struct constraint_expr c;
2993 unsigned int j, n;
2994 HOST_WIDE_INT rhsoffset;
2996 /* If we do not do field-sensitive PTA adding offsets to pointers
2997 does not change the points-to solution. */
2998 if (!use_field_sensitive)
3000 get_constraint_for_rhs (ptr, results);
3001 return;
3004 /* If the offset is not a non-negative integer constant that fits
3005 in a HOST_WIDE_INT, we have to fall back to a conservative
3006 solution which includes all sub-fields of all pointed-to
3007 variables of ptr. */
3008 if (offset == NULL_TREE
3009 || TREE_CODE (offset) != INTEGER_CST)
3010 rhsoffset = UNKNOWN_OFFSET;
3011 else
3013 /* Sign-extend the offset. */
3014 double_int soffset = tree_to_double_int (offset)
3015 .sext (TYPE_PRECISION (TREE_TYPE (offset)));
3016 if (!soffset.fits_shwi ())
3017 rhsoffset = UNKNOWN_OFFSET;
3018 else
3020 /* Make sure the bit-offset also fits. */
3021 HOST_WIDE_INT rhsunitoffset = soffset.low;
3022 rhsoffset = rhsunitoffset * BITS_PER_UNIT;
3023 if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
3024 rhsoffset = UNKNOWN_OFFSET;
3028 get_constraint_for_rhs (ptr, results);
3029 if (rhsoffset == 0)
3030 return;
3032 /* As we are eventually appending to the solution do not use
3033 vec::iterate here. */
3034 n = results->length ();
3035 for (j = 0; j < n; j++)
3037 varinfo_t curr;
3038 c = (*results)[j];
3039 curr = get_varinfo (c.var);
3041 if (c.type == ADDRESSOF
3042 /* If this varinfo represents a full variable just use it. */
3043 && curr->is_full_var)
3044 c.offset = 0;
3045 else if (c.type == ADDRESSOF
3046 /* If we do not know the offset add all subfields. */
3047 && rhsoffset == UNKNOWN_OFFSET)
3049 varinfo_t temp = get_varinfo (curr->head);
3052 struct constraint_expr c2;
3053 c2.var = temp->id;
3054 c2.type = ADDRESSOF;
3055 c2.offset = 0;
3056 if (c2.var != c.var)
3057 results->safe_push (c2);
3058 temp = vi_next (temp);
3060 while (temp);
3062 else if (c.type == ADDRESSOF)
3064 varinfo_t temp;
3065 unsigned HOST_WIDE_INT offset = curr->offset + rhsoffset;
3067 /* Search the sub-field which overlaps with the
3068 pointed-to offset. If the result is outside of the variable
3069 we have to provide a conservative result, as the variable is
3070 still reachable from the resulting pointer (even though it
3071 technically cannot point to anything). The last and first
3072 sub-fields are such conservative results.
3073 ??? If we always had a sub-field for &object + 1 then
3074 we could represent this in a more precise way. */
3075 if (rhsoffset < 0
3076 && curr->offset < offset)
3077 offset = 0;
3078 temp = first_or_preceding_vi_for_offset (curr, offset);
3080 /* If the found variable is not exactly at the pointed to
3081 result, we have to include the next variable in the
3082 solution as well. Otherwise two increments by offset / 2
3083 do not result in the same or a conservative superset
3084 solution. */
3085 if (temp->offset != offset
3086 && temp->next != 0)
3088 struct constraint_expr c2;
3089 c2.var = temp->next;
3090 c2.type = ADDRESSOF;
3091 c2.offset = 0;
3092 results->safe_push (c2);
3094 c.var = temp->id;
3095 c.offset = 0;
3097 else
3098 c.offset = rhsoffset;
3100 (*results)[j] = c;
3105 /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
3106 If address_p is true the result will be taken its address of.
3107 If lhs_p is true then the constraint expression is assumed to be used
3108 as the lhs. */
3110 static void
3111 get_constraint_for_component_ref (tree t, vec<ce_s> *results,
3112 bool address_p, bool lhs_p)
3114 tree orig_t = t;
3115 HOST_WIDE_INT bitsize = -1;
3116 HOST_WIDE_INT bitmaxsize = -1;
3117 HOST_WIDE_INT bitpos;
3118 tree forzero;
3120 /* Some people like to do cute things like take the address of
3121 &0->a.b */
3122 forzero = t;
3123 while (handled_component_p (forzero)
3124 || INDIRECT_REF_P (forzero)
3125 || TREE_CODE (forzero) == MEM_REF)
3126 forzero = TREE_OPERAND (forzero, 0);
3128 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
3130 struct constraint_expr temp;
3132 temp.offset = 0;
3133 temp.var = integer_id;
3134 temp.type = SCALAR;
3135 results->safe_push (temp);
3136 return;
3139 /* Handle type-punning through unions. If we are extracting a pointer
3140 from a union via a possibly type-punning access that pointer
3141 points to anything, similar to a conversion of an integer to
3142 a pointer. */
3143 if (!lhs_p)
3145 tree u;
3146 for (u = t;
3147 TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
3148 u = TREE_OPERAND (u, 0))
3149 if (TREE_CODE (u) == COMPONENT_REF
3150 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
3152 struct constraint_expr temp;
3154 temp.offset = 0;
3155 temp.var = anything_id;
3156 temp.type = ADDRESSOF;
3157 results->safe_push (temp);
3158 return;
3162 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
3164 /* Pretend to take the address of the base, we'll take care of
3165 adding the required subset of sub-fields below. */
3166 get_constraint_for_1 (t, results, true, lhs_p);
3167 gcc_assert (results->length () == 1);
3168 struct constraint_expr &result = results->last ();
3170 if (result.type == SCALAR
3171 && get_varinfo (result.var)->is_full_var)
3172 /* For single-field vars do not bother about the offset. */
3173 result.offset = 0;
3174 else if (result.type == SCALAR)
3176 /* In languages like C, you can access one past the end of an
3177 array. You aren't allowed to dereference it, so we can
3178 ignore this constraint. When we handle pointer subtraction,
3179 we may have to do something cute here. */
3181 if ((unsigned HOST_WIDE_INT)bitpos < get_varinfo (result.var)->fullsize
3182 && bitmaxsize != 0)
3184 /* It's also not true that the constraint will actually start at the
3185 right offset, it may start in some padding. We only care about
3186 setting the constraint to the first actual field it touches, so
3187 walk to find it. */
3188 struct constraint_expr cexpr = result;
3189 varinfo_t curr;
3190 results->pop ();
3191 cexpr.offset = 0;
3192 for (curr = get_varinfo (cexpr.var); curr; curr = vi_next (curr))
3194 if (ranges_overlap_p (curr->offset, curr->size,
3195 bitpos, bitmaxsize))
3197 cexpr.var = curr->id;
3198 results->safe_push (cexpr);
3199 if (address_p)
3200 break;
3203 /* If we are going to take the address of this field then
3204 to be able to compute reachability correctly add at least
3205 the last field of the variable. */
3206 if (address_p && results->length () == 0)
3208 curr = get_varinfo (cexpr.var);
3209 while (curr->next != 0)
3210 curr = vi_next (curr);
3211 cexpr.var = curr->id;
3212 results->safe_push (cexpr);
3214 else if (results->length () == 0)
3215 /* Assert that we found *some* field there. The user couldn't be
3216 accessing *only* padding. */
3217 /* Still the user could access one past the end of an array
3218 embedded in a struct resulting in accessing *only* padding. */
3219 /* Or accessing only padding via type-punning to a type
3220 that has a filed just in padding space. */
3222 cexpr.type = SCALAR;
3223 cexpr.var = anything_id;
3224 cexpr.offset = 0;
3225 results->safe_push (cexpr);
3228 else if (bitmaxsize == 0)
3230 if (dump_file && (dump_flags & TDF_DETAILS))
3231 fprintf (dump_file, "Access to zero-sized part of variable,"
3232 "ignoring\n");
3234 else
3235 if (dump_file && (dump_flags & TDF_DETAILS))
3236 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
3238 else if (result.type == DEREF)
3240 /* If we do not know exactly where the access goes say so. Note
3241 that only for non-structure accesses we know that we access
3242 at most one subfiled of any variable. */
3243 if (bitpos == -1
3244 || bitsize != bitmaxsize
3245 || AGGREGATE_TYPE_P (TREE_TYPE (orig_t))
3246 || result.offset == UNKNOWN_OFFSET)
3247 result.offset = UNKNOWN_OFFSET;
3248 else
3249 result.offset += bitpos;
3251 else if (result.type == ADDRESSOF)
3253 /* We can end up here for component references on a
3254 VIEW_CONVERT_EXPR <>(&foobar). */
3255 result.type = SCALAR;
3256 result.var = anything_id;
3257 result.offset = 0;
3259 else
3260 gcc_unreachable ();
3264 /* Dereference the constraint expression CONS, and return the result.
3265 DEREF (ADDRESSOF) = SCALAR
3266 DEREF (SCALAR) = DEREF
3267 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
3268 This is needed so that we can handle dereferencing DEREF constraints. */
3270 static void
3271 do_deref (vec<ce_s> *constraints)
3273 struct constraint_expr *c;
3274 unsigned int i = 0;
3276 FOR_EACH_VEC_ELT (*constraints, i, c)
3278 if (c->type == SCALAR)
3279 c->type = DEREF;
3280 else if (c->type == ADDRESSOF)
3281 c->type = SCALAR;
3282 else if (c->type == DEREF)
3284 struct constraint_expr tmplhs;
3285 tmplhs = new_scalar_tmp_constraint_exp ("dereftmp");
3286 process_constraint (new_constraint (tmplhs, *c));
3287 c->var = tmplhs.var;
3289 else
3290 gcc_unreachable ();
3294 /* Given a tree T, return the constraint expression for taking the
3295 address of it. */
3297 static void
3298 get_constraint_for_address_of (tree t, vec<ce_s> *results)
3300 struct constraint_expr *c;
3301 unsigned int i;
3303 get_constraint_for_1 (t, results, true, true);
3305 FOR_EACH_VEC_ELT (*results, i, c)
3307 if (c->type == DEREF)
3308 c->type = SCALAR;
3309 else
3310 c->type = ADDRESSOF;
3314 /* Given a tree T, return the constraint expression for it. */
3316 static void
3317 get_constraint_for_1 (tree t, vec<ce_s> *results, bool address_p,
3318 bool lhs_p)
3320 struct constraint_expr temp;
3322 /* x = integer is all glommed to a single variable, which doesn't
3323 point to anything by itself. That is, of course, unless it is an
3324 integer constant being treated as a pointer, in which case, we
3325 will return that this is really the addressof anything. This
3326 happens below, since it will fall into the default case. The only
3327 case we know something about an integer treated like a pointer is
3328 when it is the NULL pointer, and then we just say it points to
3329 NULL.
3331 Do not do that if -fno-delete-null-pointer-checks though, because
3332 in that case *NULL does not fail, so it _should_ alias *anything.
3333 It is not worth adding a new option or renaming the existing one,
3334 since this case is relatively obscure. */
3335 if ((TREE_CODE (t) == INTEGER_CST
3336 && integer_zerop (t))
3337 /* The only valid CONSTRUCTORs in gimple with pointer typed
3338 elements are zero-initializer. But in IPA mode we also
3339 process global initializers, so verify at least. */
3340 || (TREE_CODE (t) == CONSTRUCTOR
3341 && CONSTRUCTOR_NELTS (t) == 0))
3343 if (flag_delete_null_pointer_checks)
3344 temp.var = nothing_id;
3345 else
3346 temp.var = nonlocal_id;
3347 temp.type = ADDRESSOF;
3348 temp.offset = 0;
3349 results->safe_push (temp);
3350 return;
3353 /* String constants are read-only. */
3354 if (TREE_CODE (t) == STRING_CST)
3356 temp.var = readonly_id;
3357 temp.type = SCALAR;
3358 temp.offset = 0;
3359 results->safe_push (temp);
3360 return;
3363 switch (TREE_CODE_CLASS (TREE_CODE (t)))
3365 case tcc_expression:
3367 switch (TREE_CODE (t))
3369 case ADDR_EXPR:
3370 get_constraint_for_address_of (TREE_OPERAND (t, 0), results);
3371 return;
3372 default:;
3374 break;
3376 case tcc_reference:
3378 switch (TREE_CODE (t))
3380 case MEM_REF:
3382 struct constraint_expr cs;
3383 varinfo_t vi, curr;
3384 get_constraint_for_ptr_offset (TREE_OPERAND (t, 0),
3385 TREE_OPERAND (t, 1), results);
3386 do_deref (results);
3388 /* If we are not taking the address then make sure to process
3389 all subvariables we might access. */
3390 if (address_p)
3391 return;
3393 cs = results->last ();
3394 if (cs.type == DEREF
3395 && type_can_have_subvars (TREE_TYPE (t)))
3397 /* For dereferences this means we have to defer it
3398 to solving time. */
3399 results->last ().offset = UNKNOWN_OFFSET;
3400 return;
3402 if (cs.type != SCALAR)
3403 return;
3405 vi = get_varinfo (cs.var);
3406 curr = vi_next (vi);
3407 if (!vi->is_full_var
3408 && curr)
3410 unsigned HOST_WIDE_INT size;
3411 if (host_integerp (TYPE_SIZE (TREE_TYPE (t)), 1))
3412 size = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (t)));
3413 else
3414 size = -1;
3415 for (; curr; curr = vi_next (curr))
3417 if (curr->offset - vi->offset < size)
3419 cs.var = curr->id;
3420 results->safe_push (cs);
3422 else
3423 break;
3426 return;
3428 case ARRAY_REF:
3429 case ARRAY_RANGE_REF:
3430 case COMPONENT_REF:
3431 get_constraint_for_component_ref (t, results, address_p, lhs_p);
3432 return;
3433 case VIEW_CONVERT_EXPR:
3434 get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p,
3435 lhs_p);
3436 return;
3437 /* We are missing handling for TARGET_MEM_REF here. */
3438 default:;
3440 break;
3442 case tcc_exceptional:
3444 switch (TREE_CODE (t))
3446 case SSA_NAME:
3448 get_constraint_for_ssa_var (t, results, address_p);
3449 return;
3451 case CONSTRUCTOR:
3453 unsigned int i;
3454 tree val;
3455 vec<ce_s> tmp = vNULL;
3456 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
3458 struct constraint_expr *rhsp;
3459 unsigned j;
3460 get_constraint_for_1 (val, &tmp, address_p, lhs_p);
3461 FOR_EACH_VEC_ELT (tmp, j, rhsp)
3462 results->safe_push (*rhsp);
3463 tmp.truncate (0);
3465 tmp.release ();
3466 /* We do not know whether the constructor was complete,
3467 so technically we have to add &NOTHING or &ANYTHING
3468 like we do for an empty constructor as well. */
3469 return;
3471 default:;
3473 break;
3475 case tcc_declaration:
3477 get_constraint_for_ssa_var (t, results, address_p);
3478 return;
3480 case tcc_constant:
3482 /* We cannot refer to automatic variables through constants. */
3483 temp.type = ADDRESSOF;
3484 temp.var = nonlocal_id;
3485 temp.offset = 0;
3486 results->safe_push (temp);
3487 return;
3489 default:;
3492 /* The default fallback is a constraint from anything. */
3493 temp.type = ADDRESSOF;
3494 temp.var = anything_id;
3495 temp.offset = 0;
3496 results->safe_push (temp);
3499 /* Given a gimple tree T, return the constraint expression vector for it. */
3501 static void
3502 get_constraint_for (tree t, vec<ce_s> *results)
3504 gcc_assert (results->length () == 0);
3506 get_constraint_for_1 (t, results, false, true);
3509 /* Given a gimple tree T, return the constraint expression vector for it
3510 to be used as the rhs of a constraint. */
3512 static void
3513 get_constraint_for_rhs (tree t, vec<ce_s> *results)
3515 gcc_assert (results->length () == 0);
3517 get_constraint_for_1 (t, results, false, false);
3521 /* Efficiently generates constraints from all entries in *RHSC to all
3522 entries in *LHSC. */
3524 static void
3525 process_all_all_constraints (vec<ce_s> lhsc,
3526 vec<ce_s> rhsc)
3528 struct constraint_expr *lhsp, *rhsp;
3529 unsigned i, j;
3531 if (lhsc.length () <= 1 || rhsc.length () <= 1)
3533 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3534 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
3535 process_constraint (new_constraint (*lhsp, *rhsp));
3537 else
3539 struct constraint_expr tmp;
3540 tmp = new_scalar_tmp_constraint_exp ("allalltmp");
3541 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
3542 process_constraint (new_constraint (tmp, *rhsp));
3543 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3544 process_constraint (new_constraint (*lhsp, tmp));
3548 /* Handle aggregate copies by expanding into copies of the respective
3549 fields of the structures. */
3551 static void
3552 do_structure_copy (tree lhsop, tree rhsop)
3554 struct constraint_expr *lhsp, *rhsp;
3555 vec<ce_s> lhsc = vNULL;
3556 vec<ce_s> rhsc = vNULL;
3557 unsigned j;
3559 get_constraint_for (lhsop, &lhsc);
3560 get_constraint_for_rhs (rhsop, &rhsc);
3561 lhsp = &lhsc[0];
3562 rhsp = &rhsc[0];
3563 if (lhsp->type == DEREF
3564 || (lhsp->type == ADDRESSOF && lhsp->var == anything_id)
3565 || rhsp->type == DEREF)
3567 if (lhsp->type == DEREF)
3569 gcc_assert (lhsc.length () == 1);
3570 lhsp->offset = UNKNOWN_OFFSET;
3572 if (rhsp->type == DEREF)
3574 gcc_assert (rhsc.length () == 1);
3575 rhsp->offset = UNKNOWN_OFFSET;
3577 process_all_all_constraints (lhsc, rhsc);
3579 else if (lhsp->type == SCALAR
3580 && (rhsp->type == SCALAR
3581 || rhsp->type == ADDRESSOF))
3583 HOST_WIDE_INT lhssize, lhsmaxsize, lhsoffset;
3584 HOST_WIDE_INT rhssize, rhsmaxsize, rhsoffset;
3585 unsigned k = 0;
3586 get_ref_base_and_extent (lhsop, &lhsoffset, &lhssize, &lhsmaxsize);
3587 get_ref_base_and_extent (rhsop, &rhsoffset, &rhssize, &rhsmaxsize);
3588 for (j = 0; lhsc.iterate (j, &lhsp);)
3590 varinfo_t lhsv, rhsv;
3591 rhsp = &rhsc[k];
3592 lhsv = get_varinfo (lhsp->var);
3593 rhsv = get_varinfo (rhsp->var);
3594 if (lhsv->may_have_pointers
3595 && (lhsv->is_full_var
3596 || rhsv->is_full_var
3597 || ranges_overlap_p (lhsv->offset + rhsoffset, lhsv->size,
3598 rhsv->offset + lhsoffset, rhsv->size)))
3599 process_constraint (new_constraint (*lhsp, *rhsp));
3600 if (!rhsv->is_full_var
3601 && (lhsv->is_full_var
3602 || (lhsv->offset + rhsoffset + lhsv->size
3603 > rhsv->offset + lhsoffset + rhsv->size)))
3605 ++k;
3606 if (k >= rhsc.length ())
3607 break;
3609 else
3610 ++j;
3613 else
3614 gcc_unreachable ();
3616 lhsc.release ();
3617 rhsc.release ();
3620 /* Create constraints ID = { rhsc }. */
3622 static void
3623 make_constraints_to (unsigned id, vec<ce_s> rhsc)
3625 struct constraint_expr *c;
3626 struct constraint_expr includes;
3627 unsigned int j;
3629 includes.var = id;
3630 includes.offset = 0;
3631 includes.type = SCALAR;
3633 FOR_EACH_VEC_ELT (rhsc, j, c)
3634 process_constraint (new_constraint (includes, *c));
3637 /* Create a constraint ID = OP. */
3639 static void
3640 make_constraint_to (unsigned id, tree op)
3642 vec<ce_s> rhsc = vNULL;
3643 get_constraint_for_rhs (op, &rhsc);
3644 make_constraints_to (id, rhsc);
3645 rhsc.release ();
3648 /* Create a constraint ID = &FROM. */
3650 static void
3651 make_constraint_from (varinfo_t vi, int from)
3653 struct constraint_expr lhs, rhs;
3655 lhs.var = vi->id;
3656 lhs.offset = 0;
3657 lhs.type = SCALAR;
3659 rhs.var = from;
3660 rhs.offset = 0;
3661 rhs.type = ADDRESSOF;
3662 process_constraint (new_constraint (lhs, rhs));
3665 /* Create a constraint ID = FROM. */
3667 static void
3668 make_copy_constraint (varinfo_t vi, int from)
3670 struct constraint_expr lhs, rhs;
3672 lhs.var = vi->id;
3673 lhs.offset = 0;
3674 lhs.type = SCALAR;
3676 rhs.var = from;
3677 rhs.offset = 0;
3678 rhs.type = SCALAR;
3679 process_constraint (new_constraint (lhs, rhs));
3682 /* Make constraints necessary to make OP escape. */
3684 static void
3685 make_escape_constraint (tree op)
3687 make_constraint_to (escaped_id, op);
3690 /* Add constraints to that the solution of VI is transitively closed. */
3692 static void
3693 make_transitive_closure_constraints (varinfo_t vi)
3695 struct constraint_expr lhs, rhs;
3697 /* VAR = *VAR; */
3698 lhs.type = SCALAR;
3699 lhs.var = vi->id;
3700 lhs.offset = 0;
3701 rhs.type = DEREF;
3702 rhs.var = vi->id;
3703 rhs.offset = 0;
3704 process_constraint (new_constraint (lhs, rhs));
3706 /* VAR = VAR + UNKNOWN; */
3707 lhs.type = SCALAR;
3708 lhs.var = vi->id;
3709 lhs.offset = 0;
3710 rhs.type = SCALAR;
3711 rhs.var = vi->id;
3712 rhs.offset = UNKNOWN_OFFSET;
3713 process_constraint (new_constraint (lhs, rhs));
3716 /* Temporary storage for fake var decls. */
3717 struct obstack fake_var_decl_obstack;
3719 /* Build a fake VAR_DECL acting as referrer to a DECL_UID. */
3721 static tree
3722 build_fake_var_decl (tree type)
3724 tree decl = (tree) XOBNEW (&fake_var_decl_obstack, struct tree_var_decl);
3725 memset (decl, 0, sizeof (struct tree_var_decl));
3726 TREE_SET_CODE (decl, VAR_DECL);
3727 TREE_TYPE (decl) = type;
3728 DECL_UID (decl) = allocate_decl_uid ();
3729 SET_DECL_PT_UID (decl, -1);
3730 layout_decl (decl, 0);
3731 return decl;
3734 /* Create a new artificial heap variable with NAME.
3735 Return the created variable. */
3737 static varinfo_t
3738 make_heapvar (const char *name)
3740 varinfo_t vi;
3741 tree heapvar;
3743 heapvar = build_fake_var_decl (ptr_type_node);
3744 DECL_EXTERNAL (heapvar) = 1;
3746 vi = new_var_info (heapvar, name);
3747 vi->is_artificial_var = true;
3748 vi->is_heap_var = true;
3749 vi->is_unknown_size_var = true;
3750 vi->offset = 0;
3751 vi->fullsize = ~0;
3752 vi->size = ~0;
3753 vi->is_full_var = true;
3754 insert_vi_for_tree (heapvar, vi);
3756 return vi;
3759 /* Create a new artificial heap variable with NAME and make a
3760 constraint from it to LHS. Set flags according to a tag used
3761 for tracking restrict pointers. */
3763 static varinfo_t
3764 make_constraint_from_restrict (varinfo_t lhs, const char *name)
3766 varinfo_t vi = make_heapvar (name);
3767 vi->is_global_var = 1;
3768 vi->may_have_pointers = 1;
3769 make_constraint_from (lhs, vi->id);
3770 return vi;
3773 /* Create a new artificial heap variable with NAME and make a
3774 constraint from it to LHS. Set flags according to a tag used
3775 for tracking restrict pointers and make the artificial heap
3776 point to global memory. */
3778 static varinfo_t
3779 make_constraint_from_global_restrict (varinfo_t lhs, const char *name)
3781 varinfo_t vi = make_constraint_from_restrict (lhs, name);
3782 make_copy_constraint (vi, nonlocal_id);
3783 return vi;
3786 /* In IPA mode there are varinfos for different aspects of reach
3787 function designator. One for the points-to set of the return
3788 value, one for the variables that are clobbered by the function,
3789 one for its uses and one for each parameter (including a single
3790 glob for remaining variadic arguments). */
3792 enum { fi_clobbers = 1, fi_uses = 2,
3793 fi_static_chain = 3, fi_result = 4, fi_parm_base = 5 };
3795 /* Get a constraint for the requested part of a function designator FI
3796 when operating in IPA mode. */
3798 static struct constraint_expr
3799 get_function_part_constraint (varinfo_t fi, unsigned part)
3801 struct constraint_expr c;
3803 gcc_assert (in_ipa_mode);
3805 if (fi->id == anything_id)
3807 /* ??? We probably should have a ANYFN special variable. */
3808 c.var = anything_id;
3809 c.offset = 0;
3810 c.type = SCALAR;
3812 else if (TREE_CODE (fi->decl) == FUNCTION_DECL)
3814 varinfo_t ai = first_vi_for_offset (fi, part);
3815 if (ai)
3816 c.var = ai->id;
3817 else
3818 c.var = anything_id;
3819 c.offset = 0;
3820 c.type = SCALAR;
3822 else
3824 c.var = fi->id;
3825 c.offset = part;
3826 c.type = DEREF;
3829 return c;
3832 /* For non-IPA mode, generate constraints necessary for a call on the
3833 RHS. */
3835 static void
3836 handle_rhs_call (gimple stmt, vec<ce_s> *results)
3838 struct constraint_expr rhsc;
3839 unsigned i;
3840 bool returns_uses = false;
3842 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3844 tree arg = gimple_call_arg (stmt, i);
3845 int flags = gimple_call_arg_flags (stmt, i);
3847 /* If the argument is not used we can ignore it. */
3848 if (flags & EAF_UNUSED)
3849 continue;
3851 /* As we compute ESCAPED context-insensitive we do not gain
3852 any precision with just EAF_NOCLOBBER but not EAF_NOESCAPE
3853 set. The argument would still get clobbered through the
3854 escape solution. */
3855 if ((flags & EAF_NOCLOBBER)
3856 && (flags & EAF_NOESCAPE))
3858 varinfo_t uses = get_call_use_vi (stmt);
3859 if (!(flags & EAF_DIRECT))
3861 varinfo_t tem = new_var_info (NULL_TREE, "callarg");
3862 make_constraint_to (tem->id, arg);
3863 make_transitive_closure_constraints (tem);
3864 make_copy_constraint (uses, tem->id);
3866 else
3867 make_constraint_to (uses->id, arg);
3868 returns_uses = true;
3870 else if (flags & EAF_NOESCAPE)
3872 struct constraint_expr lhs, rhs;
3873 varinfo_t uses = get_call_use_vi (stmt);
3874 varinfo_t clobbers = get_call_clobber_vi (stmt);
3875 varinfo_t tem = new_var_info (NULL_TREE, "callarg");
3876 make_constraint_to (tem->id, arg);
3877 if (!(flags & EAF_DIRECT))
3878 make_transitive_closure_constraints (tem);
3879 make_copy_constraint (uses, tem->id);
3880 make_copy_constraint (clobbers, tem->id);
3881 /* Add *tem = nonlocal, do not add *tem = callused as
3882 EAF_NOESCAPE parameters do not escape to other parameters
3883 and all other uses appear in NONLOCAL as well. */
3884 lhs.type = DEREF;
3885 lhs.var = tem->id;
3886 lhs.offset = 0;
3887 rhs.type = SCALAR;
3888 rhs.var = nonlocal_id;
3889 rhs.offset = 0;
3890 process_constraint (new_constraint (lhs, rhs));
3891 returns_uses = true;
3893 else
3894 make_escape_constraint (arg);
3897 /* If we added to the calls uses solution make sure we account for
3898 pointers to it to be returned. */
3899 if (returns_uses)
3901 rhsc.var = get_call_use_vi (stmt)->id;
3902 rhsc.offset = 0;
3903 rhsc.type = SCALAR;
3904 results->safe_push (rhsc);
3907 /* The static chain escapes as well. */
3908 if (gimple_call_chain (stmt))
3909 make_escape_constraint (gimple_call_chain (stmt));
3911 /* And if we applied NRV the address of the return slot escapes as well. */
3912 if (gimple_call_return_slot_opt_p (stmt)
3913 && gimple_call_lhs (stmt) != NULL_TREE
3914 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
3916 vec<ce_s> tmpc = vNULL;
3917 struct constraint_expr lhsc, *c;
3918 get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
3919 lhsc.var = escaped_id;
3920 lhsc.offset = 0;
3921 lhsc.type = SCALAR;
3922 FOR_EACH_VEC_ELT (tmpc, i, c)
3923 process_constraint (new_constraint (lhsc, *c));
3924 tmpc.release ();
3927 /* Regular functions return nonlocal memory. */
3928 rhsc.var = nonlocal_id;
3929 rhsc.offset = 0;
3930 rhsc.type = SCALAR;
3931 results->safe_push (rhsc);
3934 /* For non-IPA mode, generate constraints necessary for a call
3935 that returns a pointer and assigns it to LHS. This simply makes
3936 the LHS point to global and escaped variables. */
3938 static void
3939 handle_lhs_call (gimple stmt, tree lhs, int flags, vec<ce_s> rhsc,
3940 tree fndecl)
3942 vec<ce_s> lhsc = vNULL;
3944 get_constraint_for (lhs, &lhsc);
3945 /* If the store is to a global decl make sure to
3946 add proper escape constraints. */
3947 lhs = get_base_address (lhs);
3948 if (lhs
3949 && DECL_P (lhs)
3950 && is_global_var (lhs))
3952 struct constraint_expr tmpc;
3953 tmpc.var = escaped_id;
3954 tmpc.offset = 0;
3955 tmpc.type = SCALAR;
3956 lhsc.safe_push (tmpc);
3959 /* If the call returns an argument unmodified override the rhs
3960 constraints. */
3961 flags = gimple_call_return_flags (stmt);
3962 if (flags & ERF_RETURNS_ARG
3963 && (flags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (stmt))
3965 tree arg;
3966 rhsc.create (0);
3967 arg = gimple_call_arg (stmt, flags & ERF_RETURN_ARG_MASK);
3968 get_constraint_for (arg, &rhsc);
3969 process_all_all_constraints (lhsc, rhsc);
3970 rhsc.release ();
3972 else if (flags & ERF_NOALIAS)
3974 varinfo_t vi;
3975 struct constraint_expr tmpc;
3976 rhsc.create (0);
3977 vi = make_heapvar ("HEAP");
3978 /* We delay marking allocated storage global until we know if
3979 it escapes. */
3980 DECL_EXTERNAL (vi->decl) = 0;
3981 vi->is_global_var = 0;
3982 /* If this is not a real malloc call assume the memory was
3983 initialized and thus may point to global memory. All
3984 builtin functions with the malloc attribute behave in a sane way. */
3985 if (!fndecl
3986 || DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL)
3987 make_constraint_from (vi, nonlocal_id);
3988 tmpc.var = vi->id;
3989 tmpc.offset = 0;
3990 tmpc.type = ADDRESSOF;
3991 rhsc.safe_push (tmpc);
3992 process_all_all_constraints (lhsc, rhsc);
3993 rhsc.release ();
3995 else
3996 process_all_all_constraints (lhsc, rhsc);
3998 lhsc.release ();
4001 /* For non-IPA mode, generate constraints necessary for a call of a
4002 const function that returns a pointer in the statement STMT. */
4004 static void
4005 handle_const_call (gimple stmt, vec<ce_s> *results)
4007 struct constraint_expr rhsc;
4008 unsigned int k;
4010 /* Treat nested const functions the same as pure functions as far
4011 as the static chain is concerned. */
4012 if (gimple_call_chain (stmt))
4014 varinfo_t uses = get_call_use_vi (stmt);
4015 make_transitive_closure_constraints (uses);
4016 make_constraint_to (uses->id, gimple_call_chain (stmt));
4017 rhsc.var = uses->id;
4018 rhsc.offset = 0;
4019 rhsc.type = SCALAR;
4020 results->safe_push (rhsc);
4023 /* May return arguments. */
4024 for (k = 0; k < gimple_call_num_args (stmt); ++k)
4026 tree arg = gimple_call_arg (stmt, k);
4027 vec<ce_s> argc = vNULL;
4028 unsigned i;
4029 struct constraint_expr *argp;
4030 get_constraint_for_rhs (arg, &argc);
4031 FOR_EACH_VEC_ELT (argc, i, argp)
4032 results->safe_push (*argp);
4033 argc.release ();
4036 /* May return addresses of globals. */
4037 rhsc.var = nonlocal_id;
4038 rhsc.offset = 0;
4039 rhsc.type = ADDRESSOF;
4040 results->safe_push (rhsc);
4043 /* For non-IPA mode, generate constraints necessary for a call to a
4044 pure function in statement STMT. */
4046 static void
4047 handle_pure_call (gimple stmt, vec<ce_s> *results)
4049 struct constraint_expr rhsc;
4050 unsigned i;
4051 varinfo_t uses = NULL;
4053 /* Memory reached from pointer arguments is call-used. */
4054 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4056 tree arg = gimple_call_arg (stmt, i);
4057 if (!uses)
4059 uses = get_call_use_vi (stmt);
4060 make_transitive_closure_constraints (uses);
4062 make_constraint_to (uses->id, arg);
4065 /* The static chain is used as well. */
4066 if (gimple_call_chain (stmt))
4068 if (!uses)
4070 uses = get_call_use_vi (stmt);
4071 make_transitive_closure_constraints (uses);
4073 make_constraint_to (uses->id, gimple_call_chain (stmt));
4076 /* Pure functions may return call-used and nonlocal memory. */
4077 if (uses)
4079 rhsc.var = uses->id;
4080 rhsc.offset = 0;
4081 rhsc.type = SCALAR;
4082 results->safe_push (rhsc);
4084 rhsc.var = nonlocal_id;
4085 rhsc.offset = 0;
4086 rhsc.type = SCALAR;
4087 results->safe_push (rhsc);
4091 /* Return the varinfo for the callee of CALL. */
4093 static varinfo_t
4094 get_fi_for_callee (gimple call)
4096 tree decl, fn = gimple_call_fn (call);
4098 if (fn && TREE_CODE (fn) == OBJ_TYPE_REF)
4099 fn = OBJ_TYPE_REF_EXPR (fn);
4101 /* If we can directly resolve the function being called, do so.
4102 Otherwise, it must be some sort of indirect expression that
4103 we should still be able to handle. */
4104 decl = gimple_call_addr_fndecl (fn);
4105 if (decl)
4106 return get_vi_for_tree (decl);
4108 /* If the function is anything other than a SSA name pointer we have no
4109 clue and should be getting ANYFN (well, ANYTHING for now). */
4110 if (!fn || TREE_CODE (fn) != SSA_NAME)
4111 return get_varinfo (anything_id);
4113 if (SSA_NAME_IS_DEFAULT_DEF (fn)
4114 && (TREE_CODE (SSA_NAME_VAR (fn)) == PARM_DECL
4115 || TREE_CODE (SSA_NAME_VAR (fn)) == RESULT_DECL))
4116 fn = SSA_NAME_VAR (fn);
4118 return get_vi_for_tree (fn);
4121 /* Create constraints for the builtin call T. Return true if the call
4122 was handled, otherwise false. */
4124 static bool
4125 find_func_aliases_for_builtin_call (gimple t)
4127 tree fndecl = gimple_call_fndecl (t);
4128 vec<ce_s> lhsc = vNULL;
4129 vec<ce_s> rhsc = vNULL;
4130 varinfo_t fi;
4132 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
4133 /* ??? All builtins that are handled here need to be handled
4134 in the alias-oracle query functions explicitly! */
4135 switch (DECL_FUNCTION_CODE (fndecl))
4137 /* All the following functions return a pointer to the same object
4138 as their first argument points to. The functions do not add
4139 to the ESCAPED solution. The functions make the first argument
4140 pointed to memory point to what the second argument pointed to
4141 memory points to. */
4142 case BUILT_IN_STRCPY:
4143 case BUILT_IN_STRNCPY:
4144 case BUILT_IN_BCOPY:
4145 case BUILT_IN_MEMCPY:
4146 case BUILT_IN_MEMMOVE:
4147 case BUILT_IN_MEMPCPY:
4148 case BUILT_IN_STPCPY:
4149 case BUILT_IN_STPNCPY:
4150 case BUILT_IN_STRCAT:
4151 case BUILT_IN_STRNCAT:
4152 case BUILT_IN_STRCPY_CHK:
4153 case BUILT_IN_STRNCPY_CHK:
4154 case BUILT_IN_MEMCPY_CHK:
4155 case BUILT_IN_MEMMOVE_CHK:
4156 case BUILT_IN_MEMPCPY_CHK:
4157 case BUILT_IN_STPCPY_CHK:
4158 case BUILT_IN_STPNCPY_CHK:
4159 case BUILT_IN_STRCAT_CHK:
4160 case BUILT_IN_STRNCAT_CHK:
4161 case BUILT_IN_TM_MEMCPY:
4162 case BUILT_IN_TM_MEMMOVE:
4164 tree res = gimple_call_lhs (t);
4165 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4166 == BUILT_IN_BCOPY ? 1 : 0));
4167 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4168 == BUILT_IN_BCOPY ? 0 : 1));
4169 if (res != NULL_TREE)
4171 get_constraint_for (res, &lhsc);
4172 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY
4173 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY
4174 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY
4175 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY_CHK
4176 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY_CHK
4177 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY_CHK)
4178 get_constraint_for_ptr_offset (dest, NULL_TREE, &rhsc);
4179 else
4180 get_constraint_for (dest, &rhsc);
4181 process_all_all_constraints (lhsc, rhsc);
4182 lhsc.release ();
4183 rhsc.release ();
4185 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4186 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4187 do_deref (&lhsc);
4188 do_deref (&rhsc);
4189 process_all_all_constraints (lhsc, rhsc);
4190 lhsc.release ();
4191 rhsc.release ();
4192 return true;
4194 case BUILT_IN_MEMSET:
4195 case BUILT_IN_MEMSET_CHK:
4196 case BUILT_IN_TM_MEMSET:
4198 tree res = gimple_call_lhs (t);
4199 tree dest = gimple_call_arg (t, 0);
4200 unsigned i;
4201 ce_s *lhsp;
4202 struct constraint_expr ac;
4203 if (res != NULL_TREE)
4205 get_constraint_for (res, &lhsc);
4206 get_constraint_for (dest, &rhsc);
4207 process_all_all_constraints (lhsc, rhsc);
4208 lhsc.release ();
4209 rhsc.release ();
4211 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4212 do_deref (&lhsc);
4213 if (flag_delete_null_pointer_checks
4214 && integer_zerop (gimple_call_arg (t, 1)))
4216 ac.type = ADDRESSOF;
4217 ac.var = nothing_id;
4219 else
4221 ac.type = SCALAR;
4222 ac.var = integer_id;
4224 ac.offset = 0;
4225 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4226 process_constraint (new_constraint (*lhsp, ac));
4227 lhsc.release ();
4228 return true;
4230 case BUILT_IN_ASSUME_ALIGNED:
4232 tree res = gimple_call_lhs (t);
4233 tree dest = gimple_call_arg (t, 0);
4234 if (res != NULL_TREE)
4236 get_constraint_for (res, &lhsc);
4237 get_constraint_for (dest, &rhsc);
4238 process_all_all_constraints (lhsc, rhsc);
4239 lhsc.release ();
4240 rhsc.release ();
4242 return true;
4244 /* All the following functions do not return pointers, do not
4245 modify the points-to sets of memory reachable from their
4246 arguments and do not add to the ESCAPED solution. */
4247 case BUILT_IN_SINCOS:
4248 case BUILT_IN_SINCOSF:
4249 case BUILT_IN_SINCOSL:
4250 case BUILT_IN_FREXP:
4251 case BUILT_IN_FREXPF:
4252 case BUILT_IN_FREXPL:
4253 case BUILT_IN_GAMMA_R:
4254 case BUILT_IN_GAMMAF_R:
4255 case BUILT_IN_GAMMAL_R:
4256 case BUILT_IN_LGAMMA_R:
4257 case BUILT_IN_LGAMMAF_R:
4258 case BUILT_IN_LGAMMAL_R:
4259 case BUILT_IN_MODF:
4260 case BUILT_IN_MODFF:
4261 case BUILT_IN_MODFL:
4262 case BUILT_IN_REMQUO:
4263 case BUILT_IN_REMQUOF:
4264 case BUILT_IN_REMQUOL:
4265 case BUILT_IN_FREE:
4266 return true;
4267 case BUILT_IN_STRDUP:
4268 case BUILT_IN_STRNDUP:
4269 if (gimple_call_lhs (t))
4271 handle_lhs_call (t, gimple_call_lhs (t), gimple_call_flags (t),
4272 vNULL, fndecl);
4273 get_constraint_for_ptr_offset (gimple_call_lhs (t),
4274 NULL_TREE, &lhsc);
4275 get_constraint_for_ptr_offset (gimple_call_arg (t, 0),
4276 NULL_TREE, &rhsc);
4277 do_deref (&lhsc);
4278 do_deref (&rhsc);
4279 process_all_all_constraints (lhsc, rhsc);
4280 lhsc.release ();
4281 rhsc.release ();
4282 return true;
4284 break;
4285 /* String / character search functions return a pointer into the
4286 source string or NULL. */
4287 case BUILT_IN_INDEX:
4288 case BUILT_IN_STRCHR:
4289 case BUILT_IN_STRRCHR:
4290 case BUILT_IN_MEMCHR:
4291 case BUILT_IN_STRSTR:
4292 case BUILT_IN_STRPBRK:
4293 if (gimple_call_lhs (t))
4295 tree src = gimple_call_arg (t, 0);
4296 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4297 constraint_expr nul;
4298 nul.var = nothing_id;
4299 nul.offset = 0;
4300 nul.type = ADDRESSOF;
4301 rhsc.safe_push (nul);
4302 get_constraint_for (gimple_call_lhs (t), &lhsc);
4303 process_all_all_constraints (lhsc, rhsc);
4304 lhsc.release();
4305 rhsc.release();
4307 return true;
4308 /* Trampolines are special - they set up passing the static
4309 frame. */
4310 case BUILT_IN_INIT_TRAMPOLINE:
4312 tree tramp = gimple_call_arg (t, 0);
4313 tree nfunc = gimple_call_arg (t, 1);
4314 tree frame = gimple_call_arg (t, 2);
4315 unsigned i;
4316 struct constraint_expr lhs, *rhsp;
4317 if (in_ipa_mode)
4319 varinfo_t nfi = NULL;
4320 gcc_assert (TREE_CODE (nfunc) == ADDR_EXPR);
4321 nfi = lookup_vi_for_tree (TREE_OPERAND (nfunc, 0));
4322 if (nfi)
4324 lhs = get_function_part_constraint (nfi, fi_static_chain);
4325 get_constraint_for (frame, &rhsc);
4326 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4327 process_constraint (new_constraint (lhs, *rhsp));
4328 rhsc.release ();
4330 /* Make the frame point to the function for
4331 the trampoline adjustment call. */
4332 get_constraint_for (tramp, &lhsc);
4333 do_deref (&lhsc);
4334 get_constraint_for (nfunc, &rhsc);
4335 process_all_all_constraints (lhsc, rhsc);
4336 rhsc.release ();
4337 lhsc.release ();
4339 return true;
4342 /* Else fallthru to generic handling which will let
4343 the frame escape. */
4344 break;
4346 case BUILT_IN_ADJUST_TRAMPOLINE:
4348 tree tramp = gimple_call_arg (t, 0);
4349 tree res = gimple_call_lhs (t);
4350 if (in_ipa_mode && res)
4352 get_constraint_for (res, &lhsc);
4353 get_constraint_for (tramp, &rhsc);
4354 do_deref (&rhsc);
4355 process_all_all_constraints (lhsc, rhsc);
4356 rhsc.release ();
4357 lhsc.release ();
4359 return true;
4361 CASE_BUILT_IN_TM_STORE (1):
4362 CASE_BUILT_IN_TM_STORE (2):
4363 CASE_BUILT_IN_TM_STORE (4):
4364 CASE_BUILT_IN_TM_STORE (8):
4365 CASE_BUILT_IN_TM_STORE (FLOAT):
4366 CASE_BUILT_IN_TM_STORE (DOUBLE):
4367 CASE_BUILT_IN_TM_STORE (LDOUBLE):
4368 CASE_BUILT_IN_TM_STORE (M64):
4369 CASE_BUILT_IN_TM_STORE (M128):
4370 CASE_BUILT_IN_TM_STORE (M256):
4372 tree addr = gimple_call_arg (t, 0);
4373 tree src = gimple_call_arg (t, 1);
4375 get_constraint_for (addr, &lhsc);
4376 do_deref (&lhsc);
4377 get_constraint_for (src, &rhsc);
4378 process_all_all_constraints (lhsc, rhsc);
4379 lhsc.release ();
4380 rhsc.release ();
4381 return true;
4383 CASE_BUILT_IN_TM_LOAD (1):
4384 CASE_BUILT_IN_TM_LOAD (2):
4385 CASE_BUILT_IN_TM_LOAD (4):
4386 CASE_BUILT_IN_TM_LOAD (8):
4387 CASE_BUILT_IN_TM_LOAD (FLOAT):
4388 CASE_BUILT_IN_TM_LOAD (DOUBLE):
4389 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
4390 CASE_BUILT_IN_TM_LOAD (M64):
4391 CASE_BUILT_IN_TM_LOAD (M128):
4392 CASE_BUILT_IN_TM_LOAD (M256):
4394 tree dest = gimple_call_lhs (t);
4395 tree addr = gimple_call_arg (t, 0);
4397 get_constraint_for (dest, &lhsc);
4398 get_constraint_for (addr, &rhsc);
4399 do_deref (&rhsc);
4400 process_all_all_constraints (lhsc, rhsc);
4401 lhsc.release ();
4402 rhsc.release ();
4403 return true;
4405 /* Variadic argument handling needs to be handled in IPA
4406 mode as well. */
4407 case BUILT_IN_VA_START:
4409 tree valist = gimple_call_arg (t, 0);
4410 struct constraint_expr rhs, *lhsp;
4411 unsigned i;
4412 get_constraint_for (valist, &lhsc);
4413 do_deref (&lhsc);
4414 /* The va_list gets access to pointers in variadic
4415 arguments. Which we know in the case of IPA analysis
4416 and otherwise are just all nonlocal variables. */
4417 if (in_ipa_mode)
4419 fi = lookup_vi_for_tree (cfun->decl);
4420 rhs = get_function_part_constraint (fi, ~0);
4421 rhs.type = ADDRESSOF;
4423 else
4425 rhs.var = nonlocal_id;
4426 rhs.type = ADDRESSOF;
4427 rhs.offset = 0;
4429 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4430 process_constraint (new_constraint (*lhsp, rhs));
4431 lhsc.release ();
4432 /* va_list is clobbered. */
4433 make_constraint_to (get_call_clobber_vi (t)->id, valist);
4434 return true;
4436 /* va_end doesn't have any effect that matters. */
4437 case BUILT_IN_VA_END:
4438 return true;
4439 /* Alternate return. Simply give up for now. */
4440 case BUILT_IN_RETURN:
4442 fi = NULL;
4443 if (!in_ipa_mode
4444 || !(fi = get_vi_for_tree (cfun->decl)))
4445 make_constraint_from (get_varinfo (escaped_id), anything_id);
4446 else if (in_ipa_mode
4447 && fi != NULL)
4449 struct constraint_expr lhs, rhs;
4450 lhs = get_function_part_constraint (fi, fi_result);
4451 rhs.var = anything_id;
4452 rhs.offset = 0;
4453 rhs.type = SCALAR;
4454 process_constraint (new_constraint (lhs, rhs));
4456 return true;
4458 /* printf-style functions may have hooks to set pointers to
4459 point to somewhere into the generated string. Leave them
4460 for a later exercise... */
4461 default:
4462 /* Fallthru to general call handling. */;
4465 return false;
4468 /* Create constraints for the call T. */
4470 static void
4471 find_func_aliases_for_call (gimple t)
4473 tree fndecl = gimple_call_fndecl (t);
4474 vec<ce_s> lhsc = vNULL;
4475 vec<ce_s> rhsc = vNULL;
4476 varinfo_t fi;
4478 if (fndecl != NULL_TREE
4479 && DECL_BUILT_IN (fndecl)
4480 && find_func_aliases_for_builtin_call (t))
4481 return;
4483 fi = get_fi_for_callee (t);
4484 if (!in_ipa_mode
4485 || (fndecl && !fi->is_fn_info))
4487 vec<ce_s> rhsc = vNULL;
4488 int flags = gimple_call_flags (t);
4490 /* Const functions can return their arguments and addresses
4491 of global memory but not of escaped memory. */
4492 if (flags & (ECF_CONST|ECF_NOVOPS))
4494 if (gimple_call_lhs (t))
4495 handle_const_call (t, &rhsc);
4497 /* Pure functions can return addresses in and of memory
4498 reachable from their arguments, but they are not an escape
4499 point for reachable memory of their arguments. */
4500 else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE))
4501 handle_pure_call (t, &rhsc);
4502 else
4503 handle_rhs_call (t, &rhsc);
4504 if (gimple_call_lhs (t))
4505 handle_lhs_call (t, gimple_call_lhs (t), flags, rhsc, fndecl);
4506 rhsc.release ();
4508 else
4510 tree lhsop;
4511 unsigned j;
4513 /* Assign all the passed arguments to the appropriate incoming
4514 parameters of the function. */
4515 for (j = 0; j < gimple_call_num_args (t); j++)
4517 struct constraint_expr lhs ;
4518 struct constraint_expr *rhsp;
4519 tree arg = gimple_call_arg (t, j);
4521 get_constraint_for_rhs (arg, &rhsc);
4522 lhs = get_function_part_constraint (fi, fi_parm_base + j);
4523 while (rhsc.length () != 0)
4525 rhsp = &rhsc.last ();
4526 process_constraint (new_constraint (lhs, *rhsp));
4527 rhsc.pop ();
4531 /* If we are returning a value, assign it to the result. */
4532 lhsop = gimple_call_lhs (t);
4533 if (lhsop)
4535 struct constraint_expr rhs;
4536 struct constraint_expr *lhsp;
4538 get_constraint_for (lhsop, &lhsc);
4539 rhs = get_function_part_constraint (fi, fi_result);
4540 if (fndecl
4541 && DECL_RESULT (fndecl)
4542 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
4544 vec<ce_s> tem = vNULL;
4545 tem.safe_push (rhs);
4546 do_deref (&tem);
4547 rhs = tem[0];
4548 tem.release ();
4550 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
4551 process_constraint (new_constraint (*lhsp, rhs));
4554 /* If we pass the result decl by reference, honor that. */
4555 if (lhsop
4556 && fndecl
4557 && DECL_RESULT (fndecl)
4558 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
4560 struct constraint_expr lhs;
4561 struct constraint_expr *rhsp;
4563 get_constraint_for_address_of (lhsop, &rhsc);
4564 lhs = get_function_part_constraint (fi, fi_result);
4565 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4566 process_constraint (new_constraint (lhs, *rhsp));
4567 rhsc.release ();
4570 /* If we use a static chain, pass it along. */
4571 if (gimple_call_chain (t))
4573 struct constraint_expr lhs;
4574 struct constraint_expr *rhsp;
4576 get_constraint_for (gimple_call_chain (t), &rhsc);
4577 lhs = get_function_part_constraint (fi, fi_static_chain);
4578 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4579 process_constraint (new_constraint (lhs, *rhsp));
4584 /* Walk statement T setting up aliasing constraints according to the
4585 references found in T. This function is the main part of the
4586 constraint builder. AI points to auxiliary alias information used
4587 when building alias sets and computing alias grouping heuristics. */
4589 static void
4590 find_func_aliases (gimple origt)
4592 gimple t = origt;
4593 vec<ce_s> lhsc = vNULL;
4594 vec<ce_s> rhsc = vNULL;
4595 struct constraint_expr *c;
4596 varinfo_t fi;
4598 /* Now build constraints expressions. */
4599 if (gimple_code (t) == GIMPLE_PHI)
4601 size_t i;
4602 unsigned int j;
4604 /* For a phi node, assign all the arguments to
4605 the result. */
4606 get_constraint_for (gimple_phi_result (t), &lhsc);
4607 for (i = 0; i < gimple_phi_num_args (t); i++)
4609 tree strippedrhs = PHI_ARG_DEF (t, i);
4611 STRIP_NOPS (strippedrhs);
4612 get_constraint_for_rhs (gimple_phi_arg_def (t, i), &rhsc);
4614 FOR_EACH_VEC_ELT (lhsc, j, c)
4616 struct constraint_expr *c2;
4617 while (rhsc.length () > 0)
4619 c2 = &rhsc.last ();
4620 process_constraint (new_constraint (*c, *c2));
4621 rhsc.pop ();
4626 /* In IPA mode, we need to generate constraints to pass call
4627 arguments through their calls. There are two cases,
4628 either a GIMPLE_CALL returning a value, or just a plain
4629 GIMPLE_CALL when we are not.
4631 In non-ipa mode, we need to generate constraints for each
4632 pointer passed by address. */
4633 else if (is_gimple_call (t))
4634 find_func_aliases_for_call (t);
4636 /* Otherwise, just a regular assignment statement. Only care about
4637 operations with pointer result, others are dealt with as escape
4638 points if they have pointer operands. */
4639 else if (is_gimple_assign (t))
4641 /* Otherwise, just a regular assignment statement. */
4642 tree lhsop = gimple_assign_lhs (t);
4643 tree rhsop = (gimple_num_ops (t) == 2) ? gimple_assign_rhs1 (t) : NULL;
4645 if (rhsop && TREE_CLOBBER_P (rhsop))
4646 /* Ignore clobbers, they don't actually store anything into
4647 the LHS. */
4649 else if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
4650 do_structure_copy (lhsop, rhsop);
4651 else
4653 enum tree_code code = gimple_assign_rhs_code (t);
4655 get_constraint_for (lhsop, &lhsc);
4657 if (FLOAT_TYPE_P (TREE_TYPE (lhsop)))
4658 /* If the operation produces a floating point result then
4659 assume the value is not produced to transfer a pointer. */
4661 else if (code == POINTER_PLUS_EXPR)
4662 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4663 gimple_assign_rhs2 (t), &rhsc);
4664 else if (code == BIT_AND_EXPR
4665 && TREE_CODE (gimple_assign_rhs2 (t)) == INTEGER_CST)
4667 /* Aligning a pointer via a BIT_AND_EXPR is offsetting
4668 the pointer. Handle it by offsetting it by UNKNOWN. */
4669 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4670 NULL_TREE, &rhsc);
4672 else if ((CONVERT_EXPR_CODE_P (code)
4673 && !(POINTER_TYPE_P (gimple_expr_type (t))
4674 && !POINTER_TYPE_P (TREE_TYPE (rhsop))))
4675 || gimple_assign_single_p (t))
4676 get_constraint_for_rhs (rhsop, &rhsc);
4677 else if (code == COND_EXPR)
4679 /* The result is a merge of both COND_EXPR arms. */
4680 vec<ce_s> tmp = vNULL;
4681 struct constraint_expr *rhsp;
4682 unsigned i;
4683 get_constraint_for_rhs (gimple_assign_rhs2 (t), &rhsc);
4684 get_constraint_for_rhs (gimple_assign_rhs3 (t), &tmp);
4685 FOR_EACH_VEC_ELT (tmp, i, rhsp)
4686 rhsc.safe_push (*rhsp);
4687 tmp.release ();
4689 else if (truth_value_p (code))
4690 /* Truth value results are not pointer (parts). Or at least
4691 very very unreasonable obfuscation of a part. */
4693 else
4695 /* All other operations are merges. */
4696 vec<ce_s> tmp = vNULL;
4697 struct constraint_expr *rhsp;
4698 unsigned i, j;
4699 get_constraint_for_rhs (gimple_assign_rhs1 (t), &rhsc);
4700 for (i = 2; i < gimple_num_ops (t); ++i)
4702 get_constraint_for_rhs (gimple_op (t, i), &tmp);
4703 FOR_EACH_VEC_ELT (tmp, j, rhsp)
4704 rhsc.safe_push (*rhsp);
4705 tmp.truncate (0);
4707 tmp.release ();
4709 process_all_all_constraints (lhsc, rhsc);
4711 /* If there is a store to a global variable the rhs escapes. */
4712 if ((lhsop = get_base_address (lhsop)) != NULL_TREE
4713 && DECL_P (lhsop)
4714 && is_global_var (lhsop)
4715 && (!in_ipa_mode
4716 || DECL_EXTERNAL (lhsop) || TREE_PUBLIC (lhsop)))
4717 make_escape_constraint (rhsop);
4719 /* Handle escapes through return. */
4720 else if (gimple_code (t) == GIMPLE_RETURN
4721 && gimple_return_retval (t) != NULL_TREE)
4723 fi = NULL;
4724 if (!in_ipa_mode
4725 || !(fi = get_vi_for_tree (cfun->decl)))
4726 make_escape_constraint (gimple_return_retval (t));
4727 else if (in_ipa_mode
4728 && fi != NULL)
4730 struct constraint_expr lhs ;
4731 struct constraint_expr *rhsp;
4732 unsigned i;
4734 lhs = get_function_part_constraint (fi, fi_result);
4735 get_constraint_for_rhs (gimple_return_retval (t), &rhsc);
4736 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4737 process_constraint (new_constraint (lhs, *rhsp));
4740 /* Handle asms conservatively by adding escape constraints to everything. */
4741 else if (gimple_code (t) == GIMPLE_ASM)
4743 unsigned i, noutputs;
4744 const char **oconstraints;
4745 const char *constraint;
4746 bool allows_mem, allows_reg, is_inout;
4748 noutputs = gimple_asm_noutputs (t);
4749 oconstraints = XALLOCAVEC (const char *, noutputs);
4751 for (i = 0; i < noutputs; ++i)
4753 tree link = gimple_asm_output_op (t, i);
4754 tree op = TREE_VALUE (link);
4756 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4757 oconstraints[i] = constraint;
4758 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
4759 &allows_reg, &is_inout);
4761 /* A memory constraint makes the address of the operand escape. */
4762 if (!allows_reg && allows_mem)
4763 make_escape_constraint (build_fold_addr_expr (op));
4765 /* The asm may read global memory, so outputs may point to
4766 any global memory. */
4767 if (op)
4769 vec<ce_s> lhsc = vNULL;
4770 struct constraint_expr rhsc, *lhsp;
4771 unsigned j;
4772 get_constraint_for (op, &lhsc);
4773 rhsc.var = nonlocal_id;
4774 rhsc.offset = 0;
4775 rhsc.type = SCALAR;
4776 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
4777 process_constraint (new_constraint (*lhsp, rhsc));
4778 lhsc.release ();
4781 for (i = 0; i < gimple_asm_ninputs (t); ++i)
4783 tree link = gimple_asm_input_op (t, i);
4784 tree op = TREE_VALUE (link);
4786 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4788 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
4789 &allows_mem, &allows_reg);
4791 /* A memory constraint makes the address of the operand escape. */
4792 if (!allows_reg && allows_mem)
4793 make_escape_constraint (build_fold_addr_expr (op));
4794 /* Strictly we'd only need the constraint to ESCAPED if
4795 the asm clobbers memory, otherwise using something
4796 along the lines of per-call clobbers/uses would be enough. */
4797 else if (op)
4798 make_escape_constraint (op);
4802 rhsc.release ();
4803 lhsc.release ();
4807 /* Create a constraint adding to the clobber set of FI the memory
4808 pointed to by PTR. */
4810 static void
4811 process_ipa_clobber (varinfo_t fi, tree ptr)
4813 vec<ce_s> ptrc = vNULL;
4814 struct constraint_expr *c, lhs;
4815 unsigned i;
4816 get_constraint_for_rhs (ptr, &ptrc);
4817 lhs = get_function_part_constraint (fi, fi_clobbers);
4818 FOR_EACH_VEC_ELT (ptrc, i, c)
4819 process_constraint (new_constraint (lhs, *c));
4820 ptrc.release ();
4823 /* Walk statement T setting up clobber and use constraints according to the
4824 references found in T. This function is a main part of the
4825 IPA constraint builder. */
4827 static void
4828 find_func_clobbers (gimple origt)
4830 gimple t = origt;
4831 vec<ce_s> lhsc = vNULL;
4832 vec<ce_s> rhsc = vNULL;
4833 varinfo_t fi;
4835 /* Add constraints for clobbered/used in IPA mode.
4836 We are not interested in what automatic variables are clobbered
4837 or used as we only use the information in the caller to which
4838 they do not escape. */
4839 gcc_assert (in_ipa_mode);
4841 /* If the stmt refers to memory in any way it better had a VUSE. */
4842 if (gimple_vuse (t) == NULL_TREE)
4843 return;
4845 /* We'd better have function information for the current function. */
4846 fi = lookup_vi_for_tree (cfun->decl);
4847 gcc_assert (fi != NULL);
4849 /* Account for stores in assignments and calls. */
4850 if (gimple_vdef (t) != NULL_TREE
4851 && gimple_has_lhs (t))
4853 tree lhs = gimple_get_lhs (t);
4854 tree tem = lhs;
4855 while (handled_component_p (tem))
4856 tem = TREE_OPERAND (tem, 0);
4857 if ((DECL_P (tem)
4858 && !auto_var_in_fn_p (tem, cfun->decl))
4859 || INDIRECT_REF_P (tem)
4860 || (TREE_CODE (tem) == MEM_REF
4861 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
4862 && auto_var_in_fn_p
4863 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), cfun->decl))))
4865 struct constraint_expr lhsc, *rhsp;
4866 unsigned i;
4867 lhsc = get_function_part_constraint (fi, fi_clobbers);
4868 get_constraint_for_address_of (lhs, &rhsc);
4869 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4870 process_constraint (new_constraint (lhsc, *rhsp));
4871 rhsc.release ();
4875 /* Account for uses in assigments and returns. */
4876 if (gimple_assign_single_p (t)
4877 || (gimple_code (t) == GIMPLE_RETURN
4878 && gimple_return_retval (t) != NULL_TREE))
4880 tree rhs = (gimple_assign_single_p (t)
4881 ? gimple_assign_rhs1 (t) : gimple_return_retval (t));
4882 tree tem = rhs;
4883 while (handled_component_p (tem))
4884 tem = TREE_OPERAND (tem, 0);
4885 if ((DECL_P (tem)
4886 && !auto_var_in_fn_p (tem, cfun->decl))
4887 || INDIRECT_REF_P (tem)
4888 || (TREE_CODE (tem) == MEM_REF
4889 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
4890 && auto_var_in_fn_p
4891 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), cfun->decl))))
4893 struct constraint_expr lhs, *rhsp;
4894 unsigned i;
4895 lhs = get_function_part_constraint (fi, fi_uses);
4896 get_constraint_for_address_of (rhs, &rhsc);
4897 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4898 process_constraint (new_constraint (lhs, *rhsp));
4899 rhsc.release ();
4903 if (is_gimple_call (t))
4905 varinfo_t cfi = NULL;
4906 tree decl = gimple_call_fndecl (t);
4907 struct constraint_expr lhs, rhs;
4908 unsigned i, j;
4910 /* For builtins we do not have separate function info. For those
4911 we do not generate escapes for we have to generate clobbers/uses. */
4912 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
4913 switch (DECL_FUNCTION_CODE (decl))
4915 /* The following functions use and clobber memory pointed to
4916 by their arguments. */
4917 case BUILT_IN_STRCPY:
4918 case BUILT_IN_STRNCPY:
4919 case BUILT_IN_BCOPY:
4920 case BUILT_IN_MEMCPY:
4921 case BUILT_IN_MEMMOVE:
4922 case BUILT_IN_MEMPCPY:
4923 case BUILT_IN_STPCPY:
4924 case BUILT_IN_STPNCPY:
4925 case BUILT_IN_STRCAT:
4926 case BUILT_IN_STRNCAT:
4927 case BUILT_IN_STRCPY_CHK:
4928 case BUILT_IN_STRNCPY_CHK:
4929 case BUILT_IN_MEMCPY_CHK:
4930 case BUILT_IN_MEMMOVE_CHK:
4931 case BUILT_IN_MEMPCPY_CHK:
4932 case BUILT_IN_STPCPY_CHK:
4933 case BUILT_IN_STPNCPY_CHK:
4934 case BUILT_IN_STRCAT_CHK:
4935 case BUILT_IN_STRNCAT_CHK:
4937 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
4938 == BUILT_IN_BCOPY ? 1 : 0));
4939 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
4940 == BUILT_IN_BCOPY ? 0 : 1));
4941 unsigned i;
4942 struct constraint_expr *rhsp, *lhsp;
4943 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4944 lhs = get_function_part_constraint (fi, fi_clobbers);
4945 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4946 process_constraint (new_constraint (lhs, *lhsp));
4947 lhsc.release ();
4948 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4949 lhs = get_function_part_constraint (fi, fi_uses);
4950 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4951 process_constraint (new_constraint (lhs, *rhsp));
4952 rhsc.release ();
4953 return;
4955 /* The following function clobbers memory pointed to by
4956 its argument. */
4957 case BUILT_IN_MEMSET:
4958 case BUILT_IN_MEMSET_CHK:
4960 tree dest = gimple_call_arg (t, 0);
4961 unsigned i;
4962 ce_s *lhsp;
4963 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4964 lhs = get_function_part_constraint (fi, fi_clobbers);
4965 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4966 process_constraint (new_constraint (lhs, *lhsp));
4967 lhsc.release ();
4968 return;
4970 /* The following functions clobber their second and third
4971 arguments. */
4972 case BUILT_IN_SINCOS:
4973 case BUILT_IN_SINCOSF:
4974 case BUILT_IN_SINCOSL:
4976 process_ipa_clobber (fi, gimple_call_arg (t, 1));
4977 process_ipa_clobber (fi, gimple_call_arg (t, 2));
4978 return;
4980 /* The following functions clobber their second argument. */
4981 case BUILT_IN_FREXP:
4982 case BUILT_IN_FREXPF:
4983 case BUILT_IN_FREXPL:
4984 case BUILT_IN_LGAMMA_R:
4985 case BUILT_IN_LGAMMAF_R:
4986 case BUILT_IN_LGAMMAL_R:
4987 case BUILT_IN_GAMMA_R:
4988 case BUILT_IN_GAMMAF_R:
4989 case BUILT_IN_GAMMAL_R:
4990 case BUILT_IN_MODF:
4991 case BUILT_IN_MODFF:
4992 case BUILT_IN_MODFL:
4994 process_ipa_clobber (fi, gimple_call_arg (t, 1));
4995 return;
4997 /* The following functions clobber their third argument. */
4998 case BUILT_IN_REMQUO:
4999 case BUILT_IN_REMQUOF:
5000 case BUILT_IN_REMQUOL:
5002 process_ipa_clobber (fi, gimple_call_arg (t, 2));
5003 return;
5005 /* The following functions neither read nor clobber memory. */
5006 case BUILT_IN_ASSUME_ALIGNED:
5007 case BUILT_IN_FREE:
5008 return;
5009 /* Trampolines are of no interest to us. */
5010 case BUILT_IN_INIT_TRAMPOLINE:
5011 case BUILT_IN_ADJUST_TRAMPOLINE:
5012 return;
5013 case BUILT_IN_VA_START:
5014 case BUILT_IN_VA_END:
5015 return;
5016 /* printf-style functions may have hooks to set pointers to
5017 point to somewhere into the generated string. Leave them
5018 for a later exercise... */
5019 default:
5020 /* Fallthru to general call handling. */;
5023 /* Parameters passed by value are used. */
5024 lhs = get_function_part_constraint (fi, fi_uses);
5025 for (i = 0; i < gimple_call_num_args (t); i++)
5027 struct constraint_expr *rhsp;
5028 tree arg = gimple_call_arg (t, i);
5030 if (TREE_CODE (arg) == SSA_NAME
5031 || is_gimple_min_invariant (arg))
5032 continue;
5034 get_constraint_for_address_of (arg, &rhsc);
5035 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5036 process_constraint (new_constraint (lhs, *rhsp));
5037 rhsc.release ();
5040 /* Build constraints for propagating clobbers/uses along the
5041 callgraph edges. */
5042 cfi = get_fi_for_callee (t);
5043 if (cfi->id == anything_id)
5045 if (gimple_vdef (t))
5046 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5047 anything_id);
5048 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5049 anything_id);
5050 return;
5053 /* For callees without function info (that's external functions),
5054 ESCAPED is clobbered and used. */
5055 if (gimple_call_fndecl (t)
5056 && !cfi->is_fn_info)
5058 varinfo_t vi;
5060 if (gimple_vdef (t))
5061 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5062 escaped_id);
5063 make_copy_constraint (first_vi_for_offset (fi, fi_uses), escaped_id);
5065 /* Also honor the call statement use/clobber info. */
5066 if ((vi = lookup_call_clobber_vi (t)) != NULL)
5067 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5068 vi->id);
5069 if ((vi = lookup_call_use_vi (t)) != NULL)
5070 make_copy_constraint (first_vi_for_offset (fi, fi_uses),
5071 vi->id);
5072 return;
5075 /* Otherwise the caller clobbers and uses what the callee does.
5076 ??? This should use a new complex constraint that filters
5077 local variables of the callee. */
5078 if (gimple_vdef (t))
5080 lhs = get_function_part_constraint (fi, fi_clobbers);
5081 rhs = get_function_part_constraint (cfi, fi_clobbers);
5082 process_constraint (new_constraint (lhs, rhs));
5084 lhs = get_function_part_constraint (fi, fi_uses);
5085 rhs = get_function_part_constraint (cfi, fi_uses);
5086 process_constraint (new_constraint (lhs, rhs));
5088 else if (gimple_code (t) == GIMPLE_ASM)
5090 /* ??? Ick. We can do better. */
5091 if (gimple_vdef (t))
5092 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5093 anything_id);
5094 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5095 anything_id);
5098 rhsc.release ();
5102 /* Find the first varinfo in the same variable as START that overlaps with
5103 OFFSET. Return NULL if we can't find one. */
5105 static varinfo_t
5106 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
5108 /* If the offset is outside of the variable, bail out. */
5109 if (offset >= start->fullsize)
5110 return NULL;
5112 /* If we cannot reach offset from start, lookup the first field
5113 and start from there. */
5114 if (start->offset > offset)
5115 start = get_varinfo (start->head);
5117 while (start)
5119 /* We may not find a variable in the field list with the actual
5120 offset when when we have glommed a structure to a variable.
5121 In that case, however, offset should still be within the size
5122 of the variable. */
5123 if (offset >= start->offset
5124 && (offset - start->offset) < start->size)
5125 return start;
5127 start = vi_next (start);
5130 return NULL;
5133 /* Find the first varinfo in the same variable as START that overlaps with
5134 OFFSET. If there is no such varinfo the varinfo directly preceding
5135 OFFSET is returned. */
5137 static varinfo_t
5138 first_or_preceding_vi_for_offset (varinfo_t start,
5139 unsigned HOST_WIDE_INT offset)
5141 /* If we cannot reach offset from start, lookup the first field
5142 and start from there. */
5143 if (start->offset > offset)
5144 start = get_varinfo (start->head);
5146 /* We may not find a variable in the field list with the actual
5147 offset when when we have glommed a structure to a variable.
5148 In that case, however, offset should still be within the size
5149 of the variable.
5150 If we got beyond the offset we look for return the field
5151 directly preceding offset which may be the last field. */
5152 while (start->next
5153 && offset >= start->offset
5154 && !((offset - start->offset) < start->size))
5155 start = vi_next (start);
5157 return start;
5161 /* This structure is used during pushing fields onto the fieldstack
5162 to track the offset of the field, since bitpos_of_field gives it
5163 relative to its immediate containing type, and we want it relative
5164 to the ultimate containing object. */
5166 struct fieldoff
5168 /* Offset from the base of the base containing object to this field. */
5169 HOST_WIDE_INT offset;
5171 /* Size, in bits, of the field. */
5172 unsigned HOST_WIDE_INT size;
5174 unsigned has_unknown_size : 1;
5176 unsigned must_have_pointers : 1;
5178 unsigned may_have_pointers : 1;
5180 unsigned only_restrict_pointers : 1;
5182 typedef struct fieldoff fieldoff_s;
5185 /* qsort comparison function for two fieldoff's PA and PB */
5187 static int
5188 fieldoff_compare (const void *pa, const void *pb)
5190 const fieldoff_s *foa = (const fieldoff_s *)pa;
5191 const fieldoff_s *fob = (const fieldoff_s *)pb;
5192 unsigned HOST_WIDE_INT foasize, fobsize;
5194 if (foa->offset < fob->offset)
5195 return -1;
5196 else if (foa->offset > fob->offset)
5197 return 1;
5199 foasize = foa->size;
5200 fobsize = fob->size;
5201 if (foasize < fobsize)
5202 return -1;
5203 else if (foasize > fobsize)
5204 return 1;
5205 return 0;
5208 /* Sort a fieldstack according to the field offset and sizes. */
5209 static void
5210 sort_fieldstack (vec<fieldoff_s> fieldstack)
5212 fieldstack.qsort (fieldoff_compare);
5215 /* Return true if T is a type that can have subvars. */
5217 static inline bool
5218 type_can_have_subvars (const_tree t)
5220 /* Aggregates without overlapping fields can have subvars. */
5221 return TREE_CODE (t) == RECORD_TYPE;
5224 /* Return true if V is a tree that we can have subvars for.
5225 Normally, this is any aggregate type. Also complex
5226 types which are not gimple registers can have subvars. */
5228 static inline bool
5229 var_can_have_subvars (const_tree v)
5231 /* Volatile variables should never have subvars. */
5232 if (TREE_THIS_VOLATILE (v))
5233 return false;
5235 /* Non decls or memory tags can never have subvars. */
5236 if (!DECL_P (v))
5237 return false;
5239 return type_can_have_subvars (TREE_TYPE (v));
5242 /* Return true if T is a type that does contain pointers. */
5244 static bool
5245 type_must_have_pointers (tree type)
5247 if (POINTER_TYPE_P (type))
5248 return true;
5250 if (TREE_CODE (type) == ARRAY_TYPE)
5251 return type_must_have_pointers (TREE_TYPE (type));
5253 /* A function or method can have pointers as arguments, so track
5254 those separately. */
5255 if (TREE_CODE (type) == FUNCTION_TYPE
5256 || TREE_CODE (type) == METHOD_TYPE)
5257 return true;
5259 return false;
5262 static bool
5263 field_must_have_pointers (tree t)
5265 return type_must_have_pointers (TREE_TYPE (t));
5268 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
5269 the fields of TYPE onto fieldstack, recording their offsets along
5270 the way.
5272 OFFSET is used to keep track of the offset in this entire
5273 structure, rather than just the immediately containing structure.
5274 Returns false if the caller is supposed to handle the field we
5275 recursed for. */
5277 static bool
5278 push_fields_onto_fieldstack (tree type, vec<fieldoff_s> *fieldstack,
5279 HOST_WIDE_INT offset)
5281 tree field;
5282 bool empty_p = true;
5284 if (TREE_CODE (type) != RECORD_TYPE)
5285 return false;
5287 /* If the vector of fields is growing too big, bail out early.
5288 Callers check for vec::length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
5289 sure this fails. */
5290 if (fieldstack->length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
5291 return false;
5293 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5294 if (TREE_CODE (field) == FIELD_DECL)
5296 bool push = false;
5297 HOST_WIDE_INT foff = bitpos_of_field (field);
5299 if (!var_can_have_subvars (field)
5300 || TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE
5301 || TREE_CODE (TREE_TYPE (field)) == UNION_TYPE)
5302 push = true;
5303 else if (!push_fields_onto_fieldstack
5304 (TREE_TYPE (field), fieldstack, offset + foff)
5305 && (DECL_SIZE (field)
5306 && !integer_zerop (DECL_SIZE (field))))
5307 /* Empty structures may have actual size, like in C++. So
5308 see if we didn't push any subfields and the size is
5309 nonzero, push the field onto the stack. */
5310 push = true;
5312 if (push)
5314 fieldoff_s *pair = NULL;
5315 bool has_unknown_size = false;
5316 bool must_have_pointers_p;
5318 if (!fieldstack->is_empty ())
5319 pair = &fieldstack->last ();
5321 /* If there isn't anything at offset zero, create sth. */
5322 if (!pair
5323 && offset + foff != 0)
5325 fieldoff_s e = {0, offset + foff, false, false, false, false};
5326 pair = fieldstack->safe_push (e);
5329 if (!DECL_SIZE (field)
5330 || !host_integerp (DECL_SIZE (field), 1))
5331 has_unknown_size = true;
5333 /* If adjacent fields do not contain pointers merge them. */
5334 must_have_pointers_p = field_must_have_pointers (field);
5335 if (pair
5336 && !has_unknown_size
5337 && !must_have_pointers_p
5338 && !pair->must_have_pointers
5339 && !pair->has_unknown_size
5340 && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff)
5342 pair->size += TREE_INT_CST_LOW (DECL_SIZE (field));
5344 else
5346 fieldoff_s e;
5347 e.offset = offset + foff;
5348 e.has_unknown_size = has_unknown_size;
5349 if (!has_unknown_size)
5350 e.size = TREE_INT_CST_LOW (DECL_SIZE (field));
5351 else
5352 e.size = -1;
5353 e.must_have_pointers = must_have_pointers_p;
5354 e.may_have_pointers = true;
5355 e.only_restrict_pointers
5356 = (!has_unknown_size
5357 && POINTER_TYPE_P (TREE_TYPE (field))
5358 && TYPE_RESTRICT (TREE_TYPE (field)));
5359 fieldstack->safe_push (e);
5363 empty_p = false;
5366 return !empty_p;
5369 /* Count the number of arguments DECL has, and set IS_VARARGS to true
5370 if it is a varargs function. */
5372 static unsigned int
5373 count_num_arguments (tree decl, bool *is_varargs)
5375 unsigned int num = 0;
5376 tree t;
5378 /* Capture named arguments for K&R functions. They do not
5379 have a prototype and thus no TYPE_ARG_TYPES. */
5380 for (t = DECL_ARGUMENTS (decl); t; t = DECL_CHAIN (t))
5381 ++num;
5383 /* Check if the function has variadic arguments. */
5384 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl)); t; t = TREE_CHAIN (t))
5385 if (TREE_VALUE (t) == void_type_node)
5386 break;
5387 if (!t)
5388 *is_varargs = true;
5390 return num;
5393 /* Creation function node for DECL, using NAME, and return the index
5394 of the variable we've created for the function. */
5396 static varinfo_t
5397 create_function_info_for (tree decl, const char *name)
5399 struct function *fn = DECL_STRUCT_FUNCTION (decl);
5400 varinfo_t vi, prev_vi;
5401 tree arg;
5402 unsigned int i;
5403 bool is_varargs = false;
5404 unsigned int num_args = count_num_arguments (decl, &is_varargs);
5406 /* Create the variable info. */
5408 vi = new_var_info (decl, name);
5409 vi->offset = 0;
5410 vi->size = 1;
5411 vi->fullsize = fi_parm_base + num_args;
5412 vi->is_fn_info = 1;
5413 vi->may_have_pointers = false;
5414 if (is_varargs)
5415 vi->fullsize = ~0;
5416 insert_vi_for_tree (vi->decl, vi);
5418 prev_vi = vi;
5420 /* Create a variable for things the function clobbers and one for
5421 things the function uses. */
5423 varinfo_t clobbervi, usevi;
5424 const char *newname;
5425 char *tempname;
5427 asprintf (&tempname, "%s.clobber", name);
5428 newname = ggc_strdup (tempname);
5429 free (tempname);
5431 clobbervi = new_var_info (NULL, newname);
5432 clobbervi->offset = fi_clobbers;
5433 clobbervi->size = 1;
5434 clobbervi->fullsize = vi->fullsize;
5435 clobbervi->is_full_var = true;
5436 clobbervi->is_global_var = false;
5437 gcc_assert (prev_vi->offset < clobbervi->offset);
5438 prev_vi->next = clobbervi->id;
5439 prev_vi = clobbervi;
5441 asprintf (&tempname, "%s.use", name);
5442 newname = ggc_strdup (tempname);
5443 free (tempname);
5445 usevi = new_var_info (NULL, newname);
5446 usevi->offset = fi_uses;
5447 usevi->size = 1;
5448 usevi->fullsize = vi->fullsize;
5449 usevi->is_full_var = true;
5450 usevi->is_global_var = false;
5451 gcc_assert (prev_vi->offset < usevi->offset);
5452 prev_vi->next = usevi->id;
5453 prev_vi = usevi;
5456 /* And one for the static chain. */
5457 if (fn->static_chain_decl != NULL_TREE)
5459 varinfo_t chainvi;
5460 const char *newname;
5461 char *tempname;
5463 asprintf (&tempname, "%s.chain", name);
5464 newname = ggc_strdup (tempname);
5465 free (tempname);
5467 chainvi = new_var_info (fn->static_chain_decl, newname);
5468 chainvi->offset = fi_static_chain;
5469 chainvi->size = 1;
5470 chainvi->fullsize = vi->fullsize;
5471 chainvi->is_full_var = true;
5472 chainvi->is_global_var = false;
5473 gcc_assert (prev_vi->offset < chainvi->offset);
5474 prev_vi->next = chainvi->id;
5475 prev_vi = chainvi;
5476 insert_vi_for_tree (fn->static_chain_decl, chainvi);
5479 /* Create a variable for the return var. */
5480 if (DECL_RESULT (decl) != NULL
5481 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
5483 varinfo_t resultvi;
5484 const char *newname;
5485 char *tempname;
5486 tree resultdecl = decl;
5488 if (DECL_RESULT (decl))
5489 resultdecl = DECL_RESULT (decl);
5491 asprintf (&tempname, "%s.result", name);
5492 newname = ggc_strdup (tempname);
5493 free (tempname);
5495 resultvi = new_var_info (resultdecl, newname);
5496 resultvi->offset = fi_result;
5497 resultvi->size = 1;
5498 resultvi->fullsize = vi->fullsize;
5499 resultvi->is_full_var = true;
5500 if (DECL_RESULT (decl))
5501 resultvi->may_have_pointers = true;
5502 gcc_assert (prev_vi->offset < resultvi->offset);
5503 prev_vi->next = resultvi->id;
5504 prev_vi = resultvi;
5505 if (DECL_RESULT (decl))
5506 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
5509 /* Set up variables for each argument. */
5510 arg = DECL_ARGUMENTS (decl);
5511 for (i = 0; i < num_args; i++)
5513 varinfo_t argvi;
5514 const char *newname;
5515 char *tempname;
5516 tree argdecl = decl;
5518 if (arg)
5519 argdecl = arg;
5521 asprintf (&tempname, "%s.arg%d", name, i);
5522 newname = ggc_strdup (tempname);
5523 free (tempname);
5525 argvi = new_var_info (argdecl, newname);
5526 argvi->offset = fi_parm_base + i;
5527 argvi->size = 1;
5528 argvi->is_full_var = true;
5529 argvi->fullsize = vi->fullsize;
5530 if (arg)
5531 argvi->may_have_pointers = true;
5532 gcc_assert (prev_vi->offset < argvi->offset);
5533 prev_vi->next = argvi->id;
5534 prev_vi = argvi;
5535 if (arg)
5537 insert_vi_for_tree (arg, argvi);
5538 arg = DECL_CHAIN (arg);
5542 /* Add one representative for all further args. */
5543 if (is_varargs)
5545 varinfo_t argvi;
5546 const char *newname;
5547 char *tempname;
5548 tree decl;
5550 asprintf (&tempname, "%s.varargs", name);
5551 newname = ggc_strdup (tempname);
5552 free (tempname);
5554 /* We need sth that can be pointed to for va_start. */
5555 decl = build_fake_var_decl (ptr_type_node);
5557 argvi = new_var_info (decl, newname);
5558 argvi->offset = fi_parm_base + num_args;
5559 argvi->size = ~0;
5560 argvi->is_full_var = true;
5561 argvi->is_heap_var = true;
5562 argvi->fullsize = vi->fullsize;
5563 gcc_assert (prev_vi->offset < argvi->offset);
5564 prev_vi->next = argvi->id;
5565 prev_vi = argvi;
5568 return vi;
5572 /* Return true if FIELDSTACK contains fields that overlap.
5573 FIELDSTACK is assumed to be sorted by offset. */
5575 static bool
5576 check_for_overlaps (vec<fieldoff_s> fieldstack)
5578 fieldoff_s *fo = NULL;
5579 unsigned int i;
5580 HOST_WIDE_INT lastoffset = -1;
5582 FOR_EACH_VEC_ELT (fieldstack, i, fo)
5584 if (fo->offset == lastoffset)
5585 return true;
5586 lastoffset = fo->offset;
5588 return false;
5591 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
5592 This will also create any varinfo structures necessary for fields
5593 of DECL. */
5595 static varinfo_t
5596 create_variable_info_for_1 (tree decl, const char *name)
5598 varinfo_t vi, newvi;
5599 tree decl_type = TREE_TYPE (decl);
5600 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type);
5601 vec<fieldoff_s> fieldstack = vNULL;
5602 fieldoff_s *fo;
5603 unsigned int i;
5605 if (!declsize
5606 || !host_integerp (declsize, 1))
5608 vi = new_var_info (decl, name);
5609 vi->offset = 0;
5610 vi->size = ~0;
5611 vi->fullsize = ~0;
5612 vi->is_unknown_size_var = true;
5613 vi->is_full_var = true;
5614 vi->may_have_pointers = true;
5615 return vi;
5618 /* Collect field information. */
5619 if (use_field_sensitive
5620 && var_can_have_subvars (decl)
5621 /* ??? Force us to not use subfields for global initializers
5622 in IPA mode. Else we'd have to parse arbitrary initializers. */
5623 && !(in_ipa_mode
5624 && is_global_var (decl)
5625 && DECL_INITIAL (decl)))
5627 fieldoff_s *fo = NULL;
5628 bool notokay = false;
5629 unsigned int i;
5631 push_fields_onto_fieldstack (decl_type, &fieldstack, 0);
5633 for (i = 0; !notokay && fieldstack.iterate (i, &fo); i++)
5634 if (fo->has_unknown_size
5635 || fo->offset < 0)
5637 notokay = true;
5638 break;
5641 /* We can't sort them if we have a field with a variable sized type,
5642 which will make notokay = true. In that case, we are going to return
5643 without creating varinfos for the fields anyway, so sorting them is a
5644 waste to boot. */
5645 if (!notokay)
5647 sort_fieldstack (fieldstack);
5648 /* Due to some C++ FE issues, like PR 22488, we might end up
5649 what appear to be overlapping fields even though they,
5650 in reality, do not overlap. Until the C++ FE is fixed,
5651 we will simply disable field-sensitivity for these cases. */
5652 notokay = check_for_overlaps (fieldstack);
5655 if (notokay)
5656 fieldstack.release ();
5659 /* If we didn't end up collecting sub-variables create a full
5660 variable for the decl. */
5661 if (fieldstack.length () <= 1
5662 || fieldstack.length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
5664 vi = new_var_info (decl, name);
5665 vi->offset = 0;
5666 vi->may_have_pointers = true;
5667 vi->fullsize = TREE_INT_CST_LOW (declsize);
5668 vi->size = vi->fullsize;
5669 vi->is_full_var = true;
5670 fieldstack.release ();
5671 return vi;
5674 vi = new_var_info (decl, name);
5675 vi->fullsize = TREE_INT_CST_LOW (declsize);
5676 for (i = 0, newvi = vi;
5677 fieldstack.iterate (i, &fo);
5678 ++i, newvi = vi_next (newvi))
5680 const char *newname = "NULL";
5681 char *tempname;
5683 if (dump_file)
5685 asprintf (&tempname, "%s." HOST_WIDE_INT_PRINT_DEC
5686 "+" HOST_WIDE_INT_PRINT_DEC, name, fo->offset, fo->size);
5687 newname = ggc_strdup (tempname);
5688 free (tempname);
5690 newvi->name = newname;
5691 newvi->offset = fo->offset;
5692 newvi->size = fo->size;
5693 newvi->fullsize = vi->fullsize;
5694 newvi->may_have_pointers = fo->may_have_pointers;
5695 newvi->only_restrict_pointers = fo->only_restrict_pointers;
5696 if (i + 1 < fieldstack.length ())
5698 varinfo_t tem = new_var_info (decl, name);
5699 newvi->next = tem->id;
5700 tem->head = vi->id;
5704 fieldstack.release ();
5706 return vi;
5709 static unsigned int
5710 create_variable_info_for (tree decl, const char *name)
5712 varinfo_t vi = create_variable_info_for_1 (decl, name);
5713 unsigned int id = vi->id;
5715 insert_vi_for_tree (decl, vi);
5717 if (TREE_CODE (decl) != VAR_DECL)
5718 return id;
5720 /* Create initial constraints for globals. */
5721 for (; vi; vi = vi_next (vi))
5723 if (!vi->may_have_pointers
5724 || !vi->is_global_var)
5725 continue;
5727 /* Mark global restrict qualified pointers. */
5728 if ((POINTER_TYPE_P (TREE_TYPE (decl))
5729 && TYPE_RESTRICT (TREE_TYPE (decl)))
5730 || vi->only_restrict_pointers)
5732 make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT");
5733 continue;
5736 /* In non-IPA mode the initializer from nonlocal is all we need. */
5737 if (!in_ipa_mode
5738 || DECL_HARD_REGISTER (decl))
5739 make_copy_constraint (vi, nonlocal_id);
5741 /* In IPA mode parse the initializer and generate proper constraints
5742 for it. */
5743 else
5745 struct varpool_node *vnode = varpool_get_node (decl);
5747 /* For escaped variables initialize them from nonlocal. */
5748 if (!varpool_all_refs_explicit_p (vnode))
5749 make_copy_constraint (vi, nonlocal_id);
5751 /* If this is a global variable with an initializer and we are in
5752 IPA mode generate constraints for it. */
5753 if (DECL_INITIAL (decl)
5754 && vnode->symbol.definition)
5756 vec<ce_s> rhsc = vNULL;
5757 struct constraint_expr lhs, *rhsp;
5758 unsigned i;
5759 get_constraint_for_rhs (DECL_INITIAL (decl), &rhsc);
5760 lhs.var = vi->id;
5761 lhs.offset = 0;
5762 lhs.type = SCALAR;
5763 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5764 process_constraint (new_constraint (lhs, *rhsp));
5765 /* If this is a variable that escapes from the unit
5766 the initializer escapes as well. */
5767 if (!varpool_all_refs_explicit_p (vnode))
5769 lhs.var = escaped_id;
5770 lhs.offset = 0;
5771 lhs.type = SCALAR;
5772 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5773 process_constraint (new_constraint (lhs, *rhsp));
5775 rhsc.release ();
5780 return id;
5783 /* Print out the points-to solution for VAR to FILE. */
5785 static void
5786 dump_solution_for_var (FILE *file, unsigned int var)
5788 varinfo_t vi = get_varinfo (var);
5789 unsigned int i;
5790 bitmap_iterator bi;
5792 /* Dump the solution for unified vars anyway, this avoids difficulties
5793 in scanning dumps in the testsuite. */
5794 fprintf (file, "%s = { ", vi->name);
5795 vi = get_varinfo (find (var));
5796 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
5797 fprintf (file, "%s ", get_varinfo (i)->name);
5798 fprintf (file, "}");
5800 /* But note when the variable was unified. */
5801 if (vi->id != var)
5802 fprintf (file, " same as %s", vi->name);
5804 fprintf (file, "\n");
5807 /* Print the points-to solution for VAR to stdout. */
5809 DEBUG_FUNCTION void
5810 debug_solution_for_var (unsigned int var)
5812 dump_solution_for_var (stdout, var);
5815 /* Create varinfo structures for all of the variables in the
5816 function for intraprocedural mode. */
5818 static void
5819 intra_create_variable_infos (void)
5821 tree t;
5823 /* For each incoming pointer argument arg, create the constraint ARG
5824 = NONLOCAL or a dummy variable if it is a restrict qualified
5825 passed-by-reference argument. */
5826 for (t = DECL_ARGUMENTS (current_function_decl); t; t = DECL_CHAIN (t))
5828 varinfo_t p = get_vi_for_tree (t);
5830 /* For restrict qualified pointers to objects passed by
5831 reference build a real representative for the pointed-to object.
5832 Treat restrict qualified references the same. */
5833 if (TYPE_RESTRICT (TREE_TYPE (t))
5834 && ((DECL_BY_REFERENCE (t) && POINTER_TYPE_P (TREE_TYPE (t)))
5835 || TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE)
5836 && !type_contains_placeholder_p (TREE_TYPE (TREE_TYPE (t))))
5838 struct constraint_expr lhsc, rhsc;
5839 varinfo_t vi;
5840 tree heapvar = build_fake_var_decl (TREE_TYPE (TREE_TYPE (t)));
5841 DECL_EXTERNAL (heapvar) = 1;
5842 vi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS");
5843 insert_vi_for_tree (heapvar, vi);
5844 lhsc.var = p->id;
5845 lhsc.type = SCALAR;
5846 lhsc.offset = 0;
5847 rhsc.var = vi->id;
5848 rhsc.type = ADDRESSOF;
5849 rhsc.offset = 0;
5850 process_constraint (new_constraint (lhsc, rhsc));
5851 for (; vi; vi = vi_next (vi))
5852 if (vi->may_have_pointers)
5854 if (vi->only_restrict_pointers)
5855 make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT");
5856 else
5857 make_copy_constraint (vi, nonlocal_id);
5859 continue;
5862 if (POINTER_TYPE_P (TREE_TYPE (t))
5863 && TYPE_RESTRICT (TREE_TYPE (t)))
5864 make_constraint_from_global_restrict (p, "PARM_RESTRICT");
5865 else
5867 for (; p; p = vi_next (p))
5869 if (p->only_restrict_pointers)
5870 make_constraint_from_global_restrict (p, "PARM_RESTRICT");
5871 else if (p->may_have_pointers)
5872 make_constraint_from (p, nonlocal_id);
5877 /* Add a constraint for a result decl that is passed by reference. */
5878 if (DECL_RESULT (cfun->decl)
5879 && DECL_BY_REFERENCE (DECL_RESULT (cfun->decl)))
5881 varinfo_t p, result_vi = get_vi_for_tree (DECL_RESULT (cfun->decl));
5883 for (p = result_vi; p; p = vi_next (p))
5884 make_constraint_from (p, nonlocal_id);
5887 /* Add a constraint for the incoming static chain parameter. */
5888 if (cfun->static_chain_decl != NULL_TREE)
5890 varinfo_t p, chain_vi = get_vi_for_tree (cfun->static_chain_decl);
5892 for (p = chain_vi; p; p = vi_next (p))
5893 make_constraint_from (p, nonlocal_id);
5897 /* Structure used to put solution bitmaps in a hashtable so they can
5898 be shared among variables with the same points-to set. */
5900 typedef struct shared_bitmap_info
5902 bitmap pt_vars;
5903 hashval_t hashcode;
5904 } *shared_bitmap_info_t;
5905 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
5907 /* Shared_bitmap hashtable helpers. */
5909 struct shared_bitmap_hasher : typed_free_remove <shared_bitmap_info>
5911 typedef shared_bitmap_info value_type;
5912 typedef shared_bitmap_info compare_type;
5913 static inline hashval_t hash (const value_type *);
5914 static inline bool equal (const value_type *, const compare_type *);
5917 /* Hash function for a shared_bitmap_info_t */
5919 inline hashval_t
5920 shared_bitmap_hasher::hash (const value_type *bi)
5922 return bi->hashcode;
5925 /* Equality function for two shared_bitmap_info_t's. */
5927 inline bool
5928 shared_bitmap_hasher::equal (const value_type *sbi1, const compare_type *sbi2)
5930 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
5933 /* Shared_bitmap hashtable. */
5935 static hash_table <shared_bitmap_hasher> shared_bitmap_table;
5937 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
5938 existing instance if there is one, NULL otherwise. */
5940 static bitmap
5941 shared_bitmap_lookup (bitmap pt_vars)
5943 shared_bitmap_info **slot;
5944 struct shared_bitmap_info sbi;
5946 sbi.pt_vars = pt_vars;
5947 sbi.hashcode = bitmap_hash (pt_vars);
5949 slot = shared_bitmap_table.find_slot_with_hash (&sbi, sbi.hashcode,
5950 NO_INSERT);
5951 if (!slot)
5952 return NULL;
5953 else
5954 return (*slot)->pt_vars;
5958 /* Add a bitmap to the shared bitmap hashtable. */
5960 static void
5961 shared_bitmap_add (bitmap pt_vars)
5963 shared_bitmap_info **slot;
5964 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
5966 sbi->pt_vars = pt_vars;
5967 sbi->hashcode = bitmap_hash (pt_vars);
5969 slot = shared_bitmap_table.find_slot_with_hash (sbi, sbi->hashcode, INSERT);
5970 gcc_assert (!*slot);
5971 *slot = sbi;
5975 /* Set bits in INTO corresponding to the variable uids in solution set FROM. */
5977 static void
5978 set_uids_in_ptset (bitmap into, bitmap from, struct pt_solution *pt)
5980 unsigned int i;
5981 bitmap_iterator bi;
5983 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
5985 varinfo_t vi = get_varinfo (i);
5987 /* The only artificial variables that are allowed in a may-alias
5988 set are heap variables. */
5989 if (vi->is_artificial_var && !vi->is_heap_var)
5990 continue;
5992 if (TREE_CODE (vi->decl) == VAR_DECL
5993 || TREE_CODE (vi->decl) == PARM_DECL
5994 || TREE_CODE (vi->decl) == RESULT_DECL)
5996 /* If we are in IPA mode we will not recompute points-to
5997 sets after inlining so make sure they stay valid. */
5998 if (in_ipa_mode
5999 && !DECL_PT_UID_SET_P (vi->decl))
6000 SET_DECL_PT_UID (vi->decl, DECL_UID (vi->decl));
6002 /* Add the decl to the points-to set. Note that the points-to
6003 set contains global variables. */
6004 bitmap_set_bit (into, DECL_PT_UID (vi->decl));
6005 if (vi->is_global_var)
6006 pt->vars_contains_global = true;
6012 /* Compute the points-to solution *PT for the variable VI. */
6014 static struct pt_solution
6015 find_what_var_points_to (varinfo_t orig_vi)
6017 unsigned int i;
6018 bitmap_iterator bi;
6019 bitmap finished_solution;
6020 bitmap result;
6021 varinfo_t vi;
6022 void **slot;
6023 struct pt_solution *pt;
6025 /* This variable may have been collapsed, let's get the real
6026 variable. */
6027 vi = get_varinfo (find (orig_vi->id));
6029 /* See if we have already computed the solution and return it. */
6030 slot = pointer_map_insert (final_solutions, vi);
6031 if (*slot != NULL)
6032 return *(struct pt_solution *)*slot;
6034 *slot = pt = XOBNEW (&final_solutions_obstack, struct pt_solution);
6035 memset (pt, 0, sizeof (struct pt_solution));
6037 /* Translate artificial variables into SSA_NAME_PTR_INFO
6038 attributes. */
6039 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
6041 varinfo_t vi = get_varinfo (i);
6043 if (vi->is_artificial_var)
6045 if (vi->id == nothing_id)
6046 pt->null = 1;
6047 else if (vi->id == escaped_id)
6049 if (in_ipa_mode)
6050 pt->ipa_escaped = 1;
6051 else
6052 pt->escaped = 1;
6054 else if (vi->id == nonlocal_id)
6055 pt->nonlocal = 1;
6056 else if (vi->is_heap_var)
6057 /* We represent heapvars in the points-to set properly. */
6059 else if (vi->id == readonly_id)
6060 /* Nobody cares. */
6062 else if (vi->id == anything_id
6063 || vi->id == integer_id)
6064 pt->anything = 1;
6068 /* Instead of doing extra work, simply do not create
6069 elaborate points-to information for pt_anything pointers. */
6070 if (pt->anything)
6071 return *pt;
6073 /* Share the final set of variables when possible. */
6074 finished_solution = BITMAP_GGC_ALLOC ();
6075 stats.points_to_sets_created++;
6077 set_uids_in_ptset (finished_solution, vi->solution, pt);
6078 result = shared_bitmap_lookup (finished_solution);
6079 if (!result)
6081 shared_bitmap_add (finished_solution);
6082 pt->vars = finished_solution;
6084 else
6086 pt->vars = result;
6087 bitmap_clear (finished_solution);
6090 return *pt;
6093 /* Given a pointer variable P, fill in its points-to set. */
6095 static void
6096 find_what_p_points_to (tree p)
6098 struct ptr_info_def *pi;
6099 tree lookup_p = p;
6100 varinfo_t vi;
6102 /* For parameters, get at the points-to set for the actual parm
6103 decl. */
6104 if (TREE_CODE (p) == SSA_NAME
6105 && SSA_NAME_IS_DEFAULT_DEF (p)
6106 && (TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
6107 || TREE_CODE (SSA_NAME_VAR (p)) == RESULT_DECL))
6108 lookup_p = SSA_NAME_VAR (p);
6110 vi = lookup_vi_for_tree (lookup_p);
6111 if (!vi)
6112 return;
6114 pi = get_ptr_info (p);
6115 pi->pt = find_what_var_points_to (vi);
6119 /* Query statistics for points-to solutions. */
6121 static struct {
6122 unsigned HOST_WIDE_INT pt_solution_includes_may_alias;
6123 unsigned HOST_WIDE_INT pt_solution_includes_no_alias;
6124 unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias;
6125 unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias;
6126 } pta_stats;
6128 void
6129 dump_pta_stats (FILE *s)
6131 fprintf (s, "\nPTA query stats:\n");
6132 fprintf (s, " pt_solution_includes: "
6133 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6134 HOST_WIDE_INT_PRINT_DEC" queries\n",
6135 pta_stats.pt_solution_includes_no_alias,
6136 pta_stats.pt_solution_includes_no_alias
6137 + pta_stats.pt_solution_includes_may_alias);
6138 fprintf (s, " pt_solutions_intersect: "
6139 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6140 HOST_WIDE_INT_PRINT_DEC" queries\n",
6141 pta_stats.pt_solutions_intersect_no_alias,
6142 pta_stats.pt_solutions_intersect_no_alias
6143 + pta_stats.pt_solutions_intersect_may_alias);
6147 /* Reset the points-to solution *PT to a conservative default
6148 (point to anything). */
6150 void
6151 pt_solution_reset (struct pt_solution *pt)
6153 memset (pt, 0, sizeof (struct pt_solution));
6154 pt->anything = true;
6157 /* Set the points-to solution *PT to point only to the variables
6158 in VARS. VARS_CONTAINS_GLOBAL specifies whether that contains
6159 global variables and VARS_CONTAINS_RESTRICT specifies whether
6160 it contains restrict tag variables. */
6162 void
6163 pt_solution_set (struct pt_solution *pt, bitmap vars, bool vars_contains_global)
6165 memset (pt, 0, sizeof (struct pt_solution));
6166 pt->vars = vars;
6167 pt->vars_contains_global = vars_contains_global;
6170 /* Set the points-to solution *PT to point only to the variable VAR. */
6172 void
6173 pt_solution_set_var (struct pt_solution *pt, tree var)
6175 memset (pt, 0, sizeof (struct pt_solution));
6176 pt->vars = BITMAP_GGC_ALLOC ();
6177 bitmap_set_bit (pt->vars, DECL_PT_UID (var));
6178 pt->vars_contains_global = is_global_var (var);
6181 /* Computes the union of the points-to solutions *DEST and *SRC and
6182 stores the result in *DEST. This changes the points-to bitmap
6183 of *DEST and thus may not be used if that might be shared.
6184 The points-to bitmap of *SRC and *DEST will not be shared after
6185 this function if they were not before. */
6187 static void
6188 pt_solution_ior_into (struct pt_solution *dest, struct pt_solution *src)
6190 dest->anything |= src->anything;
6191 if (dest->anything)
6193 pt_solution_reset (dest);
6194 return;
6197 dest->nonlocal |= src->nonlocal;
6198 dest->escaped |= src->escaped;
6199 dest->ipa_escaped |= src->ipa_escaped;
6200 dest->null |= src->null;
6201 dest->vars_contains_global |= src->vars_contains_global;
6202 if (!src->vars)
6203 return;
6205 if (!dest->vars)
6206 dest->vars = BITMAP_GGC_ALLOC ();
6207 bitmap_ior_into (dest->vars, src->vars);
6210 /* Return true if the points-to solution *PT is empty. */
6212 bool
6213 pt_solution_empty_p (struct pt_solution *pt)
6215 if (pt->anything
6216 || pt->nonlocal)
6217 return false;
6219 if (pt->vars
6220 && !bitmap_empty_p (pt->vars))
6221 return false;
6223 /* If the solution includes ESCAPED, check if that is empty. */
6224 if (pt->escaped
6225 && !pt_solution_empty_p (&cfun->gimple_df->escaped))
6226 return false;
6228 /* If the solution includes ESCAPED, check if that is empty. */
6229 if (pt->ipa_escaped
6230 && !pt_solution_empty_p (&ipa_escaped_pt))
6231 return false;
6233 return true;
6236 /* Return true if the points-to solution *PT only point to a single var, and
6237 return the var uid in *UID. */
6239 bool
6240 pt_solution_singleton_p (struct pt_solution *pt, unsigned *uid)
6242 if (pt->anything || pt->nonlocal || pt->escaped || pt->ipa_escaped
6243 || pt->null || pt->vars == NULL
6244 || !bitmap_single_bit_set_p (pt->vars))
6245 return false;
6247 *uid = bitmap_first_set_bit (pt->vars);
6248 return true;
6251 /* Return true if the points-to solution *PT includes global memory. */
6253 bool
6254 pt_solution_includes_global (struct pt_solution *pt)
6256 if (pt->anything
6257 || pt->nonlocal
6258 || pt->vars_contains_global)
6259 return true;
6261 if (pt->escaped)
6262 return pt_solution_includes_global (&cfun->gimple_df->escaped);
6264 if (pt->ipa_escaped)
6265 return pt_solution_includes_global (&ipa_escaped_pt);
6267 /* ??? This predicate is not correct for the IPA-PTA solution
6268 as we do not properly distinguish between unit escape points
6269 and global variables. */
6270 if (cfun->gimple_df->ipa_pta)
6271 return true;
6273 return false;
6276 /* Return true if the points-to solution *PT includes the variable
6277 declaration DECL. */
6279 static bool
6280 pt_solution_includes_1 (struct pt_solution *pt, const_tree decl)
6282 if (pt->anything)
6283 return true;
6285 if (pt->nonlocal
6286 && is_global_var (decl))
6287 return true;
6289 if (pt->vars
6290 && bitmap_bit_p (pt->vars, DECL_PT_UID (decl)))
6291 return true;
6293 /* If the solution includes ESCAPED, check it. */
6294 if (pt->escaped
6295 && pt_solution_includes_1 (&cfun->gimple_df->escaped, decl))
6296 return true;
6298 /* If the solution includes ESCAPED, check it. */
6299 if (pt->ipa_escaped
6300 && pt_solution_includes_1 (&ipa_escaped_pt, decl))
6301 return true;
6303 return false;
6306 bool
6307 pt_solution_includes (struct pt_solution *pt, const_tree decl)
6309 bool res = pt_solution_includes_1 (pt, decl);
6310 if (res)
6311 ++pta_stats.pt_solution_includes_may_alias;
6312 else
6313 ++pta_stats.pt_solution_includes_no_alias;
6314 return res;
6317 /* Return true if both points-to solutions PT1 and PT2 have a non-empty
6318 intersection. */
6320 static bool
6321 pt_solutions_intersect_1 (struct pt_solution *pt1, struct pt_solution *pt2)
6323 if (pt1->anything || pt2->anything)
6324 return true;
6326 /* If either points to unknown global memory and the other points to
6327 any global memory they alias. */
6328 if ((pt1->nonlocal
6329 && (pt2->nonlocal
6330 || pt2->vars_contains_global))
6331 || (pt2->nonlocal
6332 && pt1->vars_contains_global))
6333 return true;
6335 /* Check the escaped solution if required. */
6336 if ((pt1->escaped || pt2->escaped)
6337 && !pt_solution_empty_p (&cfun->gimple_df->escaped))
6339 /* If both point to escaped memory and that solution
6340 is not empty they alias. */
6341 if (pt1->escaped && pt2->escaped)
6342 return true;
6344 /* If either points to escaped memory see if the escaped solution
6345 intersects with the other. */
6346 if ((pt1->escaped
6347 && pt_solutions_intersect_1 (&cfun->gimple_df->escaped, pt2))
6348 || (pt2->escaped
6349 && pt_solutions_intersect_1 (&cfun->gimple_df->escaped, pt1)))
6350 return true;
6353 /* Check the escaped solution if required.
6354 ??? Do we need to check the local against the IPA escaped sets? */
6355 if ((pt1->ipa_escaped || pt2->ipa_escaped)
6356 && !pt_solution_empty_p (&ipa_escaped_pt))
6358 /* If both point to escaped memory and that solution
6359 is not empty they alias. */
6360 if (pt1->ipa_escaped && pt2->ipa_escaped)
6361 return true;
6363 /* If either points to escaped memory see if the escaped solution
6364 intersects with the other. */
6365 if ((pt1->ipa_escaped
6366 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt2))
6367 || (pt2->ipa_escaped
6368 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt1)))
6369 return true;
6372 /* Now both pointers alias if their points-to solution intersects. */
6373 return (pt1->vars
6374 && pt2->vars
6375 && bitmap_intersect_p (pt1->vars, pt2->vars));
6378 bool
6379 pt_solutions_intersect (struct pt_solution *pt1, struct pt_solution *pt2)
6381 bool res = pt_solutions_intersect_1 (pt1, pt2);
6382 if (res)
6383 ++pta_stats.pt_solutions_intersect_may_alias;
6384 else
6385 ++pta_stats.pt_solutions_intersect_no_alias;
6386 return res;
6390 /* Dump points-to information to OUTFILE. */
6392 static void
6393 dump_sa_points_to_info (FILE *outfile)
6395 unsigned int i;
6397 fprintf (outfile, "\nPoints-to sets\n\n");
6399 if (dump_flags & TDF_STATS)
6401 fprintf (outfile, "Stats:\n");
6402 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
6403 fprintf (outfile, "Non-pointer vars: %d\n",
6404 stats.nonpointer_vars);
6405 fprintf (outfile, "Statically unified vars: %d\n",
6406 stats.unified_vars_static);
6407 fprintf (outfile, "Dynamically unified vars: %d\n",
6408 stats.unified_vars_dynamic);
6409 fprintf (outfile, "Iterations: %d\n", stats.iterations);
6410 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
6411 fprintf (outfile, "Number of implicit edges: %d\n",
6412 stats.num_implicit_edges);
6415 for (i = 1; i < varmap.length (); i++)
6417 varinfo_t vi = get_varinfo (i);
6418 if (!vi->may_have_pointers)
6419 continue;
6420 dump_solution_for_var (outfile, i);
6425 /* Debug points-to information to stderr. */
6427 DEBUG_FUNCTION void
6428 debug_sa_points_to_info (void)
6430 dump_sa_points_to_info (stderr);
6434 /* Initialize the always-existing constraint variables for NULL
6435 ANYTHING, READONLY, and INTEGER */
6437 static void
6438 init_base_vars (void)
6440 struct constraint_expr lhs, rhs;
6441 varinfo_t var_anything;
6442 varinfo_t var_nothing;
6443 varinfo_t var_readonly;
6444 varinfo_t var_escaped;
6445 varinfo_t var_nonlocal;
6446 varinfo_t var_storedanything;
6447 varinfo_t var_integer;
6449 /* Variable ID zero is reserved and should be NULL. */
6450 varmap.safe_push (NULL);
6452 /* Create the NULL variable, used to represent that a variable points
6453 to NULL. */
6454 var_nothing = new_var_info (NULL_TREE, "NULL");
6455 gcc_assert (var_nothing->id == nothing_id);
6456 var_nothing->is_artificial_var = 1;
6457 var_nothing->offset = 0;
6458 var_nothing->size = ~0;
6459 var_nothing->fullsize = ~0;
6460 var_nothing->is_special_var = 1;
6461 var_nothing->may_have_pointers = 0;
6462 var_nothing->is_global_var = 0;
6464 /* Create the ANYTHING variable, used to represent that a variable
6465 points to some unknown piece of memory. */
6466 var_anything = new_var_info (NULL_TREE, "ANYTHING");
6467 gcc_assert (var_anything->id == anything_id);
6468 var_anything->is_artificial_var = 1;
6469 var_anything->size = ~0;
6470 var_anything->offset = 0;
6471 var_anything->fullsize = ~0;
6472 var_anything->is_special_var = 1;
6474 /* Anything points to anything. This makes deref constraints just
6475 work in the presence of linked list and other p = *p type loops,
6476 by saying that *ANYTHING = ANYTHING. */
6477 lhs.type = SCALAR;
6478 lhs.var = anything_id;
6479 lhs.offset = 0;
6480 rhs.type = ADDRESSOF;
6481 rhs.var = anything_id;
6482 rhs.offset = 0;
6484 /* This specifically does not use process_constraint because
6485 process_constraint ignores all anything = anything constraints, since all
6486 but this one are redundant. */
6487 constraints.safe_push (new_constraint (lhs, rhs));
6489 /* Create the READONLY variable, used to represent that a variable
6490 points to readonly memory. */
6491 var_readonly = new_var_info (NULL_TREE, "READONLY");
6492 gcc_assert (var_readonly->id == readonly_id);
6493 var_readonly->is_artificial_var = 1;
6494 var_readonly->offset = 0;
6495 var_readonly->size = ~0;
6496 var_readonly->fullsize = ~0;
6497 var_readonly->is_special_var = 1;
6499 /* readonly memory points to anything, in order to make deref
6500 easier. In reality, it points to anything the particular
6501 readonly variable can point to, but we don't track this
6502 separately. */
6503 lhs.type = SCALAR;
6504 lhs.var = readonly_id;
6505 lhs.offset = 0;
6506 rhs.type = ADDRESSOF;
6507 rhs.var = readonly_id; /* FIXME */
6508 rhs.offset = 0;
6509 process_constraint (new_constraint (lhs, rhs));
6511 /* Create the ESCAPED variable, used to represent the set of escaped
6512 memory. */
6513 var_escaped = new_var_info (NULL_TREE, "ESCAPED");
6514 gcc_assert (var_escaped->id == escaped_id);
6515 var_escaped->is_artificial_var = 1;
6516 var_escaped->offset = 0;
6517 var_escaped->size = ~0;
6518 var_escaped->fullsize = ~0;
6519 var_escaped->is_special_var = 0;
6521 /* Create the NONLOCAL variable, used to represent the set of nonlocal
6522 memory. */
6523 var_nonlocal = new_var_info (NULL_TREE, "NONLOCAL");
6524 gcc_assert (var_nonlocal->id == nonlocal_id);
6525 var_nonlocal->is_artificial_var = 1;
6526 var_nonlocal->offset = 0;
6527 var_nonlocal->size = ~0;
6528 var_nonlocal->fullsize = ~0;
6529 var_nonlocal->is_special_var = 1;
6531 /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
6532 lhs.type = SCALAR;
6533 lhs.var = escaped_id;
6534 lhs.offset = 0;
6535 rhs.type = DEREF;
6536 rhs.var = escaped_id;
6537 rhs.offset = 0;
6538 process_constraint (new_constraint (lhs, rhs));
6540 /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the
6541 whole variable escapes. */
6542 lhs.type = SCALAR;
6543 lhs.var = escaped_id;
6544 lhs.offset = 0;
6545 rhs.type = SCALAR;
6546 rhs.var = escaped_id;
6547 rhs.offset = UNKNOWN_OFFSET;
6548 process_constraint (new_constraint (lhs, rhs));
6550 /* *ESCAPED = NONLOCAL. This is true because we have to assume
6551 everything pointed to by escaped points to what global memory can
6552 point to. */
6553 lhs.type = DEREF;
6554 lhs.var = escaped_id;
6555 lhs.offset = 0;
6556 rhs.type = SCALAR;
6557 rhs.var = nonlocal_id;
6558 rhs.offset = 0;
6559 process_constraint (new_constraint (lhs, rhs));
6561 /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED. This is true because
6562 global memory may point to global memory and escaped memory. */
6563 lhs.type = SCALAR;
6564 lhs.var = nonlocal_id;
6565 lhs.offset = 0;
6566 rhs.type = ADDRESSOF;
6567 rhs.var = nonlocal_id;
6568 rhs.offset = 0;
6569 process_constraint (new_constraint (lhs, rhs));
6570 rhs.type = ADDRESSOF;
6571 rhs.var = escaped_id;
6572 rhs.offset = 0;
6573 process_constraint (new_constraint (lhs, rhs));
6575 /* Create the STOREDANYTHING variable, used to represent the set of
6576 variables stored to *ANYTHING. */
6577 var_storedanything = new_var_info (NULL_TREE, "STOREDANYTHING");
6578 gcc_assert (var_storedanything->id == storedanything_id);
6579 var_storedanything->is_artificial_var = 1;
6580 var_storedanything->offset = 0;
6581 var_storedanything->size = ~0;
6582 var_storedanything->fullsize = ~0;
6583 var_storedanything->is_special_var = 0;
6585 /* Create the INTEGER variable, used to represent that a variable points
6586 to what an INTEGER "points to". */
6587 var_integer = new_var_info (NULL_TREE, "INTEGER");
6588 gcc_assert (var_integer->id == integer_id);
6589 var_integer->is_artificial_var = 1;
6590 var_integer->size = ~0;
6591 var_integer->fullsize = ~0;
6592 var_integer->offset = 0;
6593 var_integer->is_special_var = 1;
6595 /* INTEGER = ANYTHING, because we don't know where a dereference of
6596 a random integer will point to. */
6597 lhs.type = SCALAR;
6598 lhs.var = integer_id;
6599 lhs.offset = 0;
6600 rhs.type = ADDRESSOF;
6601 rhs.var = anything_id;
6602 rhs.offset = 0;
6603 process_constraint (new_constraint (lhs, rhs));
6606 /* Initialize things necessary to perform PTA */
6608 static void
6609 init_alias_vars (void)
6611 use_field_sensitive = (MAX_FIELDS_FOR_FIELD_SENSITIVE > 1);
6613 bitmap_obstack_initialize (&pta_obstack);
6614 bitmap_obstack_initialize (&oldpta_obstack);
6615 bitmap_obstack_initialize (&predbitmap_obstack);
6617 constraint_pool = create_alloc_pool ("Constraint pool",
6618 sizeof (struct constraint), 30);
6619 variable_info_pool = create_alloc_pool ("Variable info pool",
6620 sizeof (struct variable_info), 30);
6621 constraints.create (8);
6622 varmap.create (8);
6623 vi_for_tree = pointer_map_create ();
6624 call_stmt_vars = pointer_map_create ();
6626 memset (&stats, 0, sizeof (stats));
6627 shared_bitmap_table.create (511);
6628 init_base_vars ();
6630 gcc_obstack_init (&fake_var_decl_obstack);
6632 final_solutions = pointer_map_create ();
6633 gcc_obstack_init (&final_solutions_obstack);
6636 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
6637 predecessor edges. */
6639 static void
6640 remove_preds_and_fake_succs (constraint_graph_t graph)
6642 unsigned int i;
6644 /* Clear the implicit ref and address nodes from the successor
6645 lists. */
6646 for (i = 1; i < FIRST_REF_NODE; i++)
6648 if (graph->succs[i])
6649 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
6650 FIRST_REF_NODE * 2);
6653 /* Free the successor list for the non-ref nodes. */
6654 for (i = FIRST_REF_NODE + 1; i < graph->size; i++)
6656 if (graph->succs[i])
6657 BITMAP_FREE (graph->succs[i]);
6660 /* Now reallocate the size of the successor list as, and blow away
6661 the predecessor bitmaps. */
6662 graph->size = varmap.length ();
6663 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
6665 free (graph->implicit_preds);
6666 graph->implicit_preds = NULL;
6667 free (graph->preds);
6668 graph->preds = NULL;
6669 bitmap_obstack_release (&predbitmap_obstack);
6672 /* Solve the constraint set. */
6674 static void
6675 solve_constraints (void)
6677 struct scc_info *si;
6679 if (dump_file)
6680 fprintf (dump_file,
6681 "\nCollapsing static cycles and doing variable "
6682 "substitution\n");
6684 init_graph (varmap.length () * 2);
6686 if (dump_file)
6687 fprintf (dump_file, "Building predecessor graph\n");
6688 build_pred_graph ();
6690 if (dump_file)
6691 fprintf (dump_file, "Detecting pointer and location "
6692 "equivalences\n");
6693 si = perform_var_substitution (graph);
6695 if (dump_file)
6696 fprintf (dump_file, "Rewriting constraints and unifying "
6697 "variables\n");
6698 rewrite_constraints (graph, si);
6700 build_succ_graph ();
6702 free_var_substitution_info (si);
6704 /* Attach complex constraints to graph nodes. */
6705 move_complex_constraints (graph);
6707 if (dump_file)
6708 fprintf (dump_file, "Uniting pointer but not location equivalent "
6709 "variables\n");
6710 unite_pointer_equivalences (graph);
6712 if (dump_file)
6713 fprintf (dump_file, "Finding indirect cycles\n");
6714 find_indirect_cycles (graph);
6716 /* Implicit nodes and predecessors are no longer necessary at this
6717 point. */
6718 remove_preds_and_fake_succs (graph);
6720 if (dump_file && (dump_flags & TDF_GRAPH))
6722 fprintf (dump_file, "\n\n// The constraint graph before solve-graph "
6723 "in dot format:\n");
6724 dump_constraint_graph (dump_file);
6725 fprintf (dump_file, "\n\n");
6728 if (dump_file)
6729 fprintf (dump_file, "Solving graph\n");
6731 solve_graph (graph);
6733 if (dump_file && (dump_flags & TDF_GRAPH))
6735 fprintf (dump_file, "\n\n// The constraint graph after solve-graph "
6736 "in dot format:\n");
6737 dump_constraint_graph (dump_file);
6738 fprintf (dump_file, "\n\n");
6741 if (dump_file)
6742 dump_sa_points_to_info (dump_file);
6745 /* Create points-to sets for the current function. See the comments
6746 at the start of the file for an algorithmic overview. */
6748 static void
6749 compute_points_to_sets (void)
6751 basic_block bb;
6752 unsigned i;
6753 varinfo_t vi;
6755 timevar_push (TV_TREE_PTA);
6757 init_alias_vars ();
6759 intra_create_variable_infos ();
6761 /* Now walk all statements and build the constraint set. */
6762 FOR_EACH_BB (bb)
6764 gimple_stmt_iterator gsi;
6766 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6768 gimple phi = gsi_stmt (gsi);
6770 if (! virtual_operand_p (gimple_phi_result (phi)))
6771 find_func_aliases (phi);
6774 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6776 gimple stmt = gsi_stmt (gsi);
6778 find_func_aliases (stmt);
6782 if (dump_file)
6784 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
6785 dump_constraints (dump_file, 0);
6788 /* From the constraints compute the points-to sets. */
6789 solve_constraints ();
6791 /* Compute the points-to set for ESCAPED used for call-clobber analysis. */
6792 cfun->gimple_df->escaped = find_what_var_points_to (get_varinfo (escaped_id));
6794 /* Make sure the ESCAPED solution (which is used as placeholder in
6795 other solutions) does not reference itself. This simplifies
6796 points-to solution queries. */
6797 cfun->gimple_df->escaped.escaped = 0;
6799 /* Mark escaped HEAP variables as global. */
6800 FOR_EACH_VEC_ELT (varmap, i, vi)
6801 if (vi
6802 && vi->is_heap_var
6803 && !vi->is_global_var)
6804 DECL_EXTERNAL (vi->decl) = vi->is_global_var
6805 = pt_solution_includes (&cfun->gimple_df->escaped, vi->decl);
6807 /* Compute the points-to sets for pointer SSA_NAMEs. */
6808 for (i = 0; i < num_ssa_names; ++i)
6810 tree ptr = ssa_name (i);
6811 if (ptr
6812 && POINTER_TYPE_P (TREE_TYPE (ptr)))
6813 find_what_p_points_to (ptr);
6816 /* Compute the call-used/clobbered sets. */
6817 FOR_EACH_BB (bb)
6819 gimple_stmt_iterator gsi;
6821 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6823 gimple stmt = gsi_stmt (gsi);
6824 struct pt_solution *pt;
6825 if (!is_gimple_call (stmt))
6826 continue;
6828 pt = gimple_call_use_set (stmt);
6829 if (gimple_call_flags (stmt) & ECF_CONST)
6830 memset (pt, 0, sizeof (struct pt_solution));
6831 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
6833 *pt = find_what_var_points_to (vi);
6834 /* Escaped (and thus nonlocal) variables are always
6835 implicitly used by calls. */
6836 /* ??? ESCAPED can be empty even though NONLOCAL
6837 always escaped. */
6838 pt->nonlocal = 1;
6839 pt->escaped = 1;
6841 else
6843 /* If there is nothing special about this call then
6844 we have made everything that is used also escape. */
6845 *pt = cfun->gimple_df->escaped;
6846 pt->nonlocal = 1;
6849 pt = gimple_call_clobber_set (stmt);
6850 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
6851 memset (pt, 0, sizeof (struct pt_solution));
6852 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
6854 *pt = find_what_var_points_to (vi);
6855 /* Escaped (and thus nonlocal) variables are always
6856 implicitly clobbered by calls. */
6857 /* ??? ESCAPED can be empty even though NONLOCAL
6858 always escaped. */
6859 pt->nonlocal = 1;
6860 pt->escaped = 1;
6862 else
6864 /* If there is nothing special about this call then
6865 we have made everything that is used also escape. */
6866 *pt = cfun->gimple_df->escaped;
6867 pt->nonlocal = 1;
6872 timevar_pop (TV_TREE_PTA);
6876 /* Delete created points-to sets. */
6878 static void
6879 delete_points_to_sets (void)
6881 unsigned int i;
6883 shared_bitmap_table.dispose ();
6884 if (dump_file && (dump_flags & TDF_STATS))
6885 fprintf (dump_file, "Points to sets created:%d\n",
6886 stats.points_to_sets_created);
6888 pointer_map_destroy (vi_for_tree);
6889 pointer_map_destroy (call_stmt_vars);
6890 bitmap_obstack_release (&pta_obstack);
6891 constraints.release ();
6893 for (i = 0; i < graph->size; i++)
6894 graph->complex[i].release ();
6895 free (graph->complex);
6897 free (graph->rep);
6898 free (graph->succs);
6899 free (graph->pe);
6900 free (graph->pe_rep);
6901 free (graph->indirect_cycles);
6902 free (graph);
6904 varmap.release ();
6905 free_alloc_pool (variable_info_pool);
6906 free_alloc_pool (constraint_pool);
6908 obstack_free (&fake_var_decl_obstack, NULL);
6910 pointer_map_destroy (final_solutions);
6911 obstack_free (&final_solutions_obstack, NULL);
6915 /* Compute points-to information for every SSA_NAME pointer in the
6916 current function and compute the transitive closure of escaped
6917 variables to re-initialize the call-clobber states of local variables. */
6919 unsigned int
6920 compute_may_aliases (void)
6922 if (cfun->gimple_df->ipa_pta)
6924 if (dump_file)
6926 fprintf (dump_file, "\nNot re-computing points-to information "
6927 "because IPA points-to information is available.\n\n");
6929 /* But still dump what we have remaining it. */
6930 dump_alias_info (dump_file);
6933 return 0;
6936 /* For each pointer P_i, determine the sets of variables that P_i may
6937 point-to. Compute the reachability set of escaped and call-used
6938 variables. */
6939 compute_points_to_sets ();
6941 /* Debugging dumps. */
6942 if (dump_file)
6943 dump_alias_info (dump_file);
6945 /* Deallocate memory used by aliasing data structures and the internal
6946 points-to solution. */
6947 delete_points_to_sets ();
6949 gcc_assert (!need_ssa_update_p (cfun));
6951 return 0;
6954 static bool
6955 gate_tree_pta (void)
6957 return flag_tree_pta;
6960 /* A dummy pass to cause points-to information to be computed via
6961 TODO_rebuild_alias. */
6963 namespace {
6965 const pass_data pass_data_build_alias =
6967 GIMPLE_PASS, /* type */
6968 "alias", /* name */
6969 OPTGROUP_NONE, /* optinfo_flags */
6970 true, /* has_gate */
6971 false, /* has_execute */
6972 TV_NONE, /* tv_id */
6973 ( PROP_cfg | PROP_ssa ), /* properties_required */
6974 0, /* properties_provided */
6975 0, /* properties_destroyed */
6976 0, /* todo_flags_start */
6977 TODO_rebuild_alias, /* todo_flags_finish */
6980 class pass_build_alias : public gimple_opt_pass
6982 public:
6983 pass_build_alias(gcc::context *ctxt)
6984 : gimple_opt_pass(pass_data_build_alias, ctxt)
6987 /* opt_pass methods: */
6988 bool gate () { return gate_tree_pta (); }
6990 }; // class pass_build_alias
6992 } // anon namespace
6994 gimple_opt_pass *
6995 make_pass_build_alias (gcc::context *ctxt)
6997 return new pass_build_alias (ctxt);
7000 /* A dummy pass to cause points-to information to be computed via
7001 TODO_rebuild_alias. */
7003 namespace {
7005 const pass_data pass_data_build_ealias =
7007 GIMPLE_PASS, /* type */
7008 "ealias", /* name */
7009 OPTGROUP_NONE, /* optinfo_flags */
7010 true, /* has_gate */
7011 false, /* has_execute */
7012 TV_NONE, /* tv_id */
7013 ( PROP_cfg | PROP_ssa ), /* properties_required */
7014 0, /* properties_provided */
7015 0, /* properties_destroyed */
7016 0, /* todo_flags_start */
7017 TODO_rebuild_alias, /* todo_flags_finish */
7020 class pass_build_ealias : public gimple_opt_pass
7022 public:
7023 pass_build_ealias(gcc::context *ctxt)
7024 : gimple_opt_pass(pass_data_build_ealias, ctxt)
7027 /* opt_pass methods: */
7028 bool gate () { return gate_tree_pta (); }
7030 }; // class pass_build_ealias
7032 } // anon namespace
7034 gimple_opt_pass *
7035 make_pass_build_ealias (gcc::context *ctxt)
7037 return new pass_build_ealias (ctxt);
7041 /* Return true if we should execute IPA PTA. */
7042 static bool
7043 gate_ipa_pta (void)
7045 return (optimize
7046 && flag_ipa_pta
7047 /* Don't bother doing anything if the program has errors. */
7048 && !seen_error ());
7051 /* IPA PTA solutions for ESCAPED. */
7052 struct pt_solution ipa_escaped_pt
7053 = { true, false, false, false, false, false, NULL };
7055 /* Associate node with varinfo DATA. Worker for
7056 cgraph_for_node_and_aliases. */
7057 static bool
7058 associate_varinfo_to_alias (struct cgraph_node *node, void *data)
7060 if ((node->symbol.alias || node->thunk.thunk_p)
7061 && node->symbol.analyzed)
7062 insert_vi_for_tree (node->symbol.decl, (varinfo_t)data);
7063 return false;
7066 /* Execute the driver for IPA PTA. */
7067 static unsigned int
7068 ipa_pta_execute (void)
7070 struct cgraph_node *node;
7071 struct varpool_node *var;
7072 int from;
7074 in_ipa_mode = 1;
7076 init_alias_vars ();
7078 if (dump_file && (dump_flags & TDF_DETAILS))
7080 dump_symtab (dump_file);
7081 fprintf (dump_file, "\n");
7084 /* Build the constraints. */
7085 FOR_EACH_DEFINED_FUNCTION (node)
7087 varinfo_t vi;
7088 /* Nodes without a body are not interesting. Especially do not
7089 visit clones at this point for now - we get duplicate decls
7090 there for inline clones at least. */
7091 if (!cgraph_function_with_gimple_body_p (node) || node->clone_of)
7092 continue;
7093 cgraph_get_body (node);
7095 gcc_assert (!node->clone_of);
7097 vi = create_function_info_for (node->symbol.decl,
7098 alias_get_name (node->symbol.decl));
7099 cgraph_for_node_and_aliases (node, associate_varinfo_to_alias, vi, true);
7102 /* Create constraints for global variables and their initializers. */
7103 FOR_EACH_VARIABLE (var)
7105 if (var->symbol.alias && var->symbol.analyzed)
7106 continue;
7108 get_vi_for_tree (var->symbol.decl);
7111 if (dump_file)
7113 fprintf (dump_file,
7114 "Generating constraints for global initializers\n\n");
7115 dump_constraints (dump_file, 0);
7116 fprintf (dump_file, "\n");
7118 from = constraints.length ();
7120 FOR_EACH_DEFINED_FUNCTION (node)
7122 struct function *func;
7123 basic_block bb;
7125 /* Nodes without a body are not interesting. */
7126 if (!cgraph_function_with_gimple_body_p (node) || node->clone_of)
7127 continue;
7129 if (dump_file)
7131 fprintf (dump_file,
7132 "Generating constraints for %s", cgraph_node_name (node));
7133 if (DECL_ASSEMBLER_NAME_SET_P (node->symbol.decl))
7134 fprintf (dump_file, " (%s)",
7135 IDENTIFIER_POINTER
7136 (DECL_ASSEMBLER_NAME (node->symbol.decl)));
7137 fprintf (dump_file, "\n");
7140 func = DECL_STRUCT_FUNCTION (node->symbol.decl);
7141 push_cfun (func);
7143 /* For externally visible or attribute used annotated functions use
7144 local constraints for their arguments.
7145 For local functions we see all callers and thus do not need initial
7146 constraints for parameters. */
7147 if (node->symbol.used_from_other_partition
7148 || node->symbol.externally_visible
7149 || node->symbol.force_output)
7151 intra_create_variable_infos ();
7153 /* We also need to make function return values escape. Nothing
7154 escapes by returning from main though. */
7155 if (!MAIN_NAME_P (DECL_NAME (node->symbol.decl)))
7157 varinfo_t fi, rvi;
7158 fi = lookup_vi_for_tree (node->symbol.decl);
7159 rvi = first_vi_for_offset (fi, fi_result);
7160 if (rvi && rvi->offset == fi_result)
7162 struct constraint_expr includes;
7163 struct constraint_expr var;
7164 includes.var = escaped_id;
7165 includes.offset = 0;
7166 includes.type = SCALAR;
7167 var.var = rvi->id;
7168 var.offset = 0;
7169 var.type = SCALAR;
7170 process_constraint (new_constraint (includes, var));
7175 /* Build constriants for the function body. */
7176 FOR_EACH_BB_FN (bb, func)
7178 gimple_stmt_iterator gsi;
7180 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
7181 gsi_next (&gsi))
7183 gimple phi = gsi_stmt (gsi);
7185 if (! virtual_operand_p (gimple_phi_result (phi)))
7186 find_func_aliases (phi);
7189 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
7191 gimple stmt = gsi_stmt (gsi);
7193 find_func_aliases (stmt);
7194 find_func_clobbers (stmt);
7198 pop_cfun ();
7200 if (dump_file)
7202 fprintf (dump_file, "\n");
7203 dump_constraints (dump_file, from);
7204 fprintf (dump_file, "\n");
7206 from = constraints.length ();
7209 /* From the constraints compute the points-to sets. */
7210 solve_constraints ();
7212 /* Compute the global points-to sets for ESCAPED.
7213 ??? Note that the computed escape set is not correct
7214 for the whole unit as we fail to consider graph edges to
7215 externally visible functions. */
7216 ipa_escaped_pt = find_what_var_points_to (get_varinfo (escaped_id));
7218 /* Make sure the ESCAPED solution (which is used as placeholder in
7219 other solutions) does not reference itself. This simplifies
7220 points-to solution queries. */
7221 ipa_escaped_pt.ipa_escaped = 0;
7223 /* Assign the points-to sets to the SSA names in the unit. */
7224 FOR_EACH_DEFINED_FUNCTION (node)
7226 tree ptr;
7227 struct function *fn;
7228 unsigned i;
7229 varinfo_t fi;
7230 basic_block bb;
7231 struct pt_solution uses, clobbers;
7232 struct cgraph_edge *e;
7234 /* Nodes without a body are not interesting. */
7235 if (!cgraph_function_with_gimple_body_p (node) || node->clone_of)
7236 continue;
7238 fn = DECL_STRUCT_FUNCTION (node->symbol.decl);
7240 /* Compute the points-to sets for pointer SSA_NAMEs. */
7241 FOR_EACH_VEC_ELT (*fn->gimple_df->ssa_names, i, ptr)
7243 if (ptr
7244 && POINTER_TYPE_P (TREE_TYPE (ptr)))
7245 find_what_p_points_to (ptr);
7248 /* Compute the call-use and call-clobber sets for all direct calls. */
7249 fi = lookup_vi_for_tree (node->symbol.decl);
7250 gcc_assert (fi->is_fn_info);
7251 clobbers
7252 = find_what_var_points_to (first_vi_for_offset (fi, fi_clobbers));
7253 uses = find_what_var_points_to (first_vi_for_offset (fi, fi_uses));
7254 for (e = node->callers; e; e = e->next_caller)
7256 if (!e->call_stmt)
7257 continue;
7259 *gimple_call_clobber_set (e->call_stmt) = clobbers;
7260 *gimple_call_use_set (e->call_stmt) = uses;
7263 /* Compute the call-use and call-clobber sets for indirect calls
7264 and calls to external functions. */
7265 FOR_EACH_BB_FN (bb, fn)
7267 gimple_stmt_iterator gsi;
7269 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
7271 gimple stmt = gsi_stmt (gsi);
7272 struct pt_solution *pt;
7273 varinfo_t vi;
7274 tree decl;
7276 if (!is_gimple_call (stmt))
7277 continue;
7279 /* Handle direct calls to external functions. */
7280 decl = gimple_call_fndecl (stmt);
7281 if (decl
7282 && (!(fi = lookup_vi_for_tree (decl))
7283 || !fi->is_fn_info))
7285 pt = gimple_call_use_set (stmt);
7286 if (gimple_call_flags (stmt) & ECF_CONST)
7287 memset (pt, 0, sizeof (struct pt_solution));
7288 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
7290 *pt = find_what_var_points_to (vi);
7291 /* Escaped (and thus nonlocal) variables are always
7292 implicitly used by calls. */
7293 /* ??? ESCAPED can be empty even though NONLOCAL
7294 always escaped. */
7295 pt->nonlocal = 1;
7296 pt->ipa_escaped = 1;
7298 else
7300 /* If there is nothing special about this call then
7301 we have made everything that is used also escape. */
7302 *pt = ipa_escaped_pt;
7303 pt->nonlocal = 1;
7306 pt = gimple_call_clobber_set (stmt);
7307 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
7308 memset (pt, 0, sizeof (struct pt_solution));
7309 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
7311 *pt = find_what_var_points_to (vi);
7312 /* Escaped (and thus nonlocal) variables are always
7313 implicitly clobbered by calls. */
7314 /* ??? ESCAPED can be empty even though NONLOCAL
7315 always escaped. */
7316 pt->nonlocal = 1;
7317 pt->ipa_escaped = 1;
7319 else
7321 /* If there is nothing special about this call then
7322 we have made everything that is used also escape. */
7323 *pt = ipa_escaped_pt;
7324 pt->nonlocal = 1;
7328 /* Handle indirect calls. */
7329 if (!decl
7330 && (fi = get_fi_for_callee (stmt)))
7332 /* We need to accumulate all clobbers/uses of all possible
7333 callees. */
7334 fi = get_varinfo (find (fi->id));
7335 /* If we cannot constrain the set of functions we'll end up
7336 calling we end up using/clobbering everything. */
7337 if (bitmap_bit_p (fi->solution, anything_id)
7338 || bitmap_bit_p (fi->solution, nonlocal_id)
7339 || bitmap_bit_p (fi->solution, escaped_id))
7341 pt_solution_reset (gimple_call_clobber_set (stmt));
7342 pt_solution_reset (gimple_call_use_set (stmt));
7344 else
7346 bitmap_iterator bi;
7347 unsigned i;
7348 struct pt_solution *uses, *clobbers;
7350 uses = gimple_call_use_set (stmt);
7351 clobbers = gimple_call_clobber_set (stmt);
7352 memset (uses, 0, sizeof (struct pt_solution));
7353 memset (clobbers, 0, sizeof (struct pt_solution));
7354 EXECUTE_IF_SET_IN_BITMAP (fi->solution, 0, i, bi)
7356 struct pt_solution sol;
7358 vi = get_varinfo (i);
7359 if (!vi->is_fn_info)
7361 /* ??? We could be more precise here? */
7362 uses->nonlocal = 1;
7363 uses->ipa_escaped = 1;
7364 clobbers->nonlocal = 1;
7365 clobbers->ipa_escaped = 1;
7366 continue;
7369 if (!uses->anything)
7371 sol = find_what_var_points_to
7372 (first_vi_for_offset (vi, fi_uses));
7373 pt_solution_ior_into (uses, &sol);
7375 if (!clobbers->anything)
7377 sol = find_what_var_points_to
7378 (first_vi_for_offset (vi, fi_clobbers));
7379 pt_solution_ior_into (clobbers, &sol);
7387 fn->gimple_df->ipa_pta = true;
7390 delete_points_to_sets ();
7392 in_ipa_mode = 0;
7394 return 0;
7397 namespace {
7399 const pass_data pass_data_ipa_pta =
7401 SIMPLE_IPA_PASS, /* type */
7402 "pta", /* name */
7403 OPTGROUP_NONE, /* optinfo_flags */
7404 true, /* has_gate */
7405 true, /* has_execute */
7406 TV_IPA_PTA, /* tv_id */
7407 0, /* properties_required */
7408 0, /* properties_provided */
7409 0, /* properties_destroyed */
7410 0, /* todo_flags_start */
7411 TODO_update_ssa, /* todo_flags_finish */
7414 class pass_ipa_pta : public simple_ipa_opt_pass
7416 public:
7417 pass_ipa_pta(gcc::context *ctxt)
7418 : simple_ipa_opt_pass(pass_data_ipa_pta, ctxt)
7421 /* opt_pass methods: */
7422 bool gate () { return gate_ipa_pta (); }
7423 unsigned int execute () { return ipa_pta_execute (); }
7425 }; // class pass_ipa_pta
7427 } // anon namespace
7429 simple_ipa_opt_pass *
7430 make_pass_ipa_pta (gcc::context *ctxt)
7432 return new pass_ipa_pta (ctxt);