Update changed bit when constraint set changes
[official-gcc.git] / gcc / tree-ssa-structalias.c
blob16679f41ce942c00caefe798bdbbec53aec19a2d
1 /* Tree based points-to analysis
2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "obstack.h"
26 #include "bitmap.h"
27 #include "sbitmap.h"
28 #include "flags.h"
29 #include "basic-block.h"
30 #include "tree.h"
31 #include "stor-layout.h"
32 #include "stmt.h"
33 #include "pointer-set.h"
34 #include "hash-table.h"
35 #include "tree-ssa-alias.h"
36 #include "internal-fn.h"
37 #include "gimple-expr.h"
38 #include "is-a.h"
39 #include "gimple.h"
40 #include "gimple-iterator.h"
41 #include "gimple-ssa.h"
42 #include "cgraph.h"
43 #include "stringpool.h"
44 #include "tree-ssanames.h"
45 #include "tree-into-ssa.h"
46 #include "expr.h"
47 #include "tree-dfa.h"
48 #include "tree-inline.h"
49 #include "diagnostic-core.h"
50 #include "function.h"
51 #include "tree-pass.h"
52 #include "alloc-pool.h"
53 #include "splay-tree.h"
54 #include "params.h"
55 #include "alias.h"
57 /* The idea behind this analyzer is to generate set constraints from the
58 program, then solve the resulting constraints in order to generate the
59 points-to sets.
61 Set constraints are a way of modeling program analysis problems that
62 involve sets. They consist of an inclusion constraint language,
63 describing the variables (each variable is a set) and operations that
64 are involved on the variables, and a set of rules that derive facts
65 from these operations. To solve a system of set constraints, you derive
66 all possible facts under the rules, which gives you the correct sets
67 as a consequence.
69 See "Efficient Field-sensitive pointer analysis for C" by "David
70 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
71 http://citeseer.ist.psu.edu/pearce04efficient.html
73 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
74 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
75 http://citeseer.ist.psu.edu/heintze01ultrafast.html
77 There are three types of real constraint expressions, DEREF,
78 ADDRESSOF, and SCALAR. Each constraint expression consists
79 of a constraint type, a variable, and an offset.
81 SCALAR is a constraint expression type used to represent x, whether
82 it appears on the LHS or the RHS of a statement.
83 DEREF is a constraint expression type used to represent *x, whether
84 it appears on the LHS or the RHS of a statement.
85 ADDRESSOF is a constraint expression used to represent &x, whether
86 it appears on the LHS or the RHS of a statement.
88 Each pointer variable in the program is assigned an integer id, and
89 each field of a structure variable is assigned an integer id as well.
91 Structure variables are linked to their list of fields through a "next
92 field" in each variable that points to the next field in offset
93 order.
94 Each variable for a structure field has
96 1. "size", that tells the size in bits of that field.
97 2. "fullsize, that tells the size in bits of the entire structure.
98 3. "offset", that tells the offset in bits from the beginning of the
99 structure to this field.
101 Thus,
102 struct f
104 int a;
105 int b;
106 } foo;
107 int *bar;
109 looks like
111 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
112 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
113 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
116 In order to solve the system of set constraints, the following is
117 done:
119 1. Each constraint variable x has a solution set associated with it,
120 Sol(x).
122 2. Constraints are separated into direct, copy, and complex.
123 Direct constraints are ADDRESSOF constraints that require no extra
124 processing, such as P = &Q
125 Copy constraints are those of the form P = Q.
126 Complex constraints are all the constraints involving dereferences
127 and offsets (including offsetted copies).
129 3. All direct constraints of the form P = &Q are processed, such
130 that Q is added to Sol(P)
132 4. All complex constraints for a given constraint variable are stored in a
133 linked list attached to that variable's node.
135 5. A directed graph is built out of the copy constraints. Each
136 constraint variable is a node in the graph, and an edge from
137 Q to P is added for each copy constraint of the form P = Q
139 6. The graph is then walked, and solution sets are
140 propagated along the copy edges, such that an edge from Q to P
141 causes Sol(P) <- Sol(P) union Sol(Q).
143 7. As we visit each node, all complex constraints associated with
144 that node are processed by adding appropriate copy edges to the graph, or the
145 appropriate variables to the solution set.
147 8. The process of walking the graph is iterated until no solution
148 sets change.
150 Prior to walking the graph in steps 6 and 7, We perform static
151 cycle elimination on the constraint graph, as well
152 as off-line variable substitution.
154 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
155 on and turned into anything), but isn't. You can just see what offset
156 inside the pointed-to struct it's going to access.
158 TODO: Constant bounded arrays can be handled as if they were structs of the
159 same number of elements.
161 TODO: Modeling heap and incoming pointers becomes much better if we
162 add fields to them as we discover them, which we could do.
164 TODO: We could handle unions, but to be honest, it's probably not
165 worth the pain or slowdown. */
167 /* IPA-PTA optimizations possible.
169 When the indirect function called is ANYTHING we can add disambiguation
170 based on the function signatures (or simply the parameter count which
171 is the varinfo size). We also do not need to consider functions that
172 do not have their address taken.
174 The is_global_var bit which marks escape points is overly conservative
175 in IPA mode. Split it to is_escape_point and is_global_var - only
176 externally visible globals are escape points in IPA mode. This is
177 also needed to fix the pt_solution_includes_global predicate
178 (and thus ptr_deref_may_alias_global_p).
180 The way we introduce DECL_PT_UID to avoid fixing up all points-to
181 sets in the translation unit when we copy a DECL during inlining
182 pessimizes precision. The advantage is that the DECL_PT_UID keeps
183 compile-time and memory usage overhead low - the points-to sets
184 do not grow or get unshared as they would during a fixup phase.
185 An alternative solution is to delay IPA PTA until after all
186 inlining transformations have been applied.
188 The way we propagate clobber/use information isn't optimized.
189 It should use a new complex constraint that properly filters
190 out local variables of the callee (though that would make
191 the sets invalid after inlining). OTOH we might as well
192 admit defeat to WHOPR and simply do all the clobber/use analysis
193 and propagation after PTA finished but before we threw away
194 points-to information for memory variables. WHOPR and PTA
195 do not play along well anyway - the whole constraint solving
196 would need to be done in WPA phase and it will be very interesting
197 to apply the results to local SSA names during LTRANS phase.
199 We probably should compute a per-function unit-ESCAPE solution
200 propagating it simply like the clobber / uses solutions. The
201 solution can go alongside the non-IPA espaced solution and be
202 used to query which vars escape the unit through a function.
204 We never put function decls in points-to sets so we do not
205 keep the set of called functions for indirect calls.
207 And probably more. */
209 static bool use_field_sensitive = true;
210 static int in_ipa_mode = 0;
212 /* Used for predecessor bitmaps. */
213 static bitmap_obstack predbitmap_obstack;
215 /* Used for points-to sets. */
216 static bitmap_obstack pta_obstack;
218 /* Used for oldsolution members of variables. */
219 static bitmap_obstack oldpta_obstack;
221 /* Used for per-solver-iteration bitmaps. */
222 static bitmap_obstack iteration_obstack;
224 static unsigned int create_variable_info_for (tree, const char *);
225 typedef struct constraint_graph *constraint_graph_t;
226 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
228 struct constraint;
229 typedef struct constraint *constraint_t;
232 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
233 if (a) \
234 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
236 static struct constraint_stats
238 unsigned int total_vars;
239 unsigned int nonpointer_vars;
240 unsigned int unified_vars_static;
241 unsigned int unified_vars_dynamic;
242 unsigned int iterations;
243 unsigned int num_edges;
244 unsigned int num_implicit_edges;
245 unsigned int points_to_sets_created;
246 } stats;
248 struct variable_info
250 /* ID of this variable */
251 unsigned int id;
253 /* True if this is a variable created by the constraint analysis, such as
254 heap variables and constraints we had to break up. */
255 unsigned int is_artificial_var : 1;
257 /* True if this is a special variable whose solution set should not be
258 changed. */
259 unsigned int is_special_var : 1;
261 /* True for variables whose size is not known or variable. */
262 unsigned int is_unknown_size_var : 1;
264 /* True for (sub-)fields that represent a whole variable. */
265 unsigned int is_full_var : 1;
267 /* True if this is a heap variable. */
268 unsigned int is_heap_var : 1;
270 /* True if this field may contain pointers. */
271 unsigned int may_have_pointers : 1;
273 /* True if this field has only restrict qualified pointers. */
274 unsigned int only_restrict_pointers : 1;
276 /* True if this represents a global variable. */
277 unsigned int is_global_var : 1;
279 /* True if this represents a IPA function info. */
280 unsigned int is_fn_info : 1;
282 /* The ID of the variable for the next field in this structure
283 or zero for the last field in this structure. */
284 unsigned next;
286 /* The ID of the variable for the first field in this structure. */
287 unsigned head;
289 /* Offset of this variable, in bits, from the base variable */
290 unsigned HOST_WIDE_INT offset;
292 /* Size of the variable, in bits. */
293 unsigned HOST_WIDE_INT size;
295 /* Full size of the base variable, in bits. */
296 unsigned HOST_WIDE_INT fullsize;
298 /* Name of this variable */
299 const char *name;
301 /* Tree that this variable is associated with. */
302 tree decl;
304 /* Points-to set for this variable. */
305 bitmap solution;
307 /* Old points-to set for this variable. */
308 bitmap oldsolution;
310 typedef struct variable_info *varinfo_t;
312 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
313 static varinfo_t first_or_preceding_vi_for_offset (varinfo_t,
314 unsigned HOST_WIDE_INT);
315 static varinfo_t lookup_vi_for_tree (tree);
316 static inline bool type_can_have_subvars (const_tree);
318 /* Pool of variable info structures. */
319 static alloc_pool variable_info_pool;
321 /* Map varinfo to final pt_solution. */
322 static pointer_map_t *final_solutions;
323 struct obstack final_solutions_obstack;
325 /* Table of variable info structures for constraint variables.
326 Indexed directly by variable info id. */
327 static vec<varinfo_t> varmap;
329 /* Return the varmap element N */
331 static inline varinfo_t
332 get_varinfo (unsigned int n)
334 return varmap[n];
337 /* Return the next variable in the list of sub-variables of VI
338 or NULL if VI is the last sub-variable. */
340 static inline varinfo_t
341 vi_next (varinfo_t vi)
343 return get_varinfo (vi->next);
346 /* Static IDs for the special variables. Variable ID zero is unused
347 and used as terminator for the sub-variable chain. */
348 enum { nothing_id = 1, anything_id = 2, readonly_id = 3,
349 escaped_id = 4, nonlocal_id = 5,
350 storedanything_id = 6, integer_id = 7 };
352 /* Return a new variable info structure consisting for a variable
353 named NAME, and using constraint graph node NODE. Append it
354 to the vector of variable info structures. */
356 static varinfo_t
357 new_var_info (tree t, const char *name)
359 unsigned index = varmap.length ();
360 varinfo_t ret = (varinfo_t) pool_alloc (variable_info_pool);
362 ret->id = index;
363 ret->name = name;
364 ret->decl = t;
365 /* Vars without decl are artificial and do not have sub-variables. */
366 ret->is_artificial_var = (t == NULL_TREE);
367 ret->is_special_var = false;
368 ret->is_unknown_size_var = false;
369 ret->is_full_var = (t == NULL_TREE);
370 ret->is_heap_var = false;
371 ret->may_have_pointers = true;
372 ret->only_restrict_pointers = false;
373 ret->is_global_var = (t == NULL_TREE);
374 ret->is_fn_info = false;
375 if (t && DECL_P (t))
376 ret->is_global_var = (is_global_var (t)
377 /* We have to treat even local register variables
378 as escape points. */
379 || (TREE_CODE (t) == VAR_DECL
380 && DECL_HARD_REGISTER (t)));
381 ret->solution = BITMAP_ALLOC (&pta_obstack);
382 ret->oldsolution = NULL;
383 ret->next = 0;
384 ret->head = ret->id;
386 stats.total_vars++;
388 varmap.safe_push (ret);
390 return ret;
394 /* A map mapping call statements to per-stmt variables for uses
395 and clobbers specific to the call. */
396 static struct pointer_map_t *call_stmt_vars;
398 /* Lookup or create the variable for the call statement CALL. */
400 static varinfo_t
401 get_call_vi (gimple call)
403 void **slot_p;
404 varinfo_t vi, vi2;
406 slot_p = pointer_map_insert (call_stmt_vars, call);
407 if (*slot_p)
408 return (varinfo_t) *slot_p;
410 vi = new_var_info (NULL_TREE, "CALLUSED");
411 vi->offset = 0;
412 vi->size = 1;
413 vi->fullsize = 2;
414 vi->is_full_var = true;
416 vi2 = new_var_info (NULL_TREE, "CALLCLOBBERED");
417 vi2->offset = 1;
418 vi2->size = 1;
419 vi2->fullsize = 2;
420 vi2->is_full_var = true;
422 vi->next = vi2->id;
424 *slot_p = (void *) vi;
425 return vi;
428 /* Lookup the variable for the call statement CALL representing
429 the uses. Returns NULL if there is nothing special about this call. */
431 static varinfo_t
432 lookup_call_use_vi (gimple call)
434 void **slot_p;
436 slot_p = pointer_map_contains (call_stmt_vars, call);
437 if (slot_p)
438 return (varinfo_t) *slot_p;
440 return NULL;
443 /* Lookup the variable for the call statement CALL representing
444 the clobbers. Returns NULL if there is nothing special about this call. */
446 static varinfo_t
447 lookup_call_clobber_vi (gimple call)
449 varinfo_t uses = lookup_call_use_vi (call);
450 if (!uses)
451 return NULL;
453 return vi_next (uses);
456 /* Lookup or create the variable for the call statement CALL representing
457 the uses. */
459 static varinfo_t
460 get_call_use_vi (gimple call)
462 return get_call_vi (call);
465 /* Lookup or create the variable for the call statement CALL representing
466 the clobbers. */
468 static varinfo_t ATTRIBUTE_UNUSED
469 get_call_clobber_vi (gimple call)
471 return vi_next (get_call_vi (call));
475 typedef enum {SCALAR, DEREF, ADDRESSOF} constraint_expr_type;
477 /* An expression that appears in a constraint. */
479 struct constraint_expr
481 /* Constraint type. */
482 constraint_expr_type type;
484 /* Variable we are referring to in the constraint. */
485 unsigned int var;
487 /* Offset, in bits, of this constraint from the beginning of
488 variables it ends up referring to.
490 IOW, in a deref constraint, we would deref, get the result set,
491 then add OFFSET to each member. */
492 HOST_WIDE_INT offset;
495 /* Use 0x8000... as special unknown offset. */
496 #define UNKNOWN_OFFSET HOST_WIDE_INT_MIN
498 typedef struct constraint_expr ce_s;
499 static void get_constraint_for_1 (tree, vec<ce_s> *, bool, bool);
500 static void get_constraint_for (tree, vec<ce_s> *);
501 static void get_constraint_for_rhs (tree, vec<ce_s> *);
502 static void do_deref (vec<ce_s> *);
504 /* Our set constraints are made up of two constraint expressions, one
505 LHS, and one RHS.
507 As described in the introduction, our set constraints each represent an
508 operation between set valued variables.
510 struct constraint
512 struct constraint_expr lhs;
513 struct constraint_expr rhs;
516 /* List of constraints that we use to build the constraint graph from. */
518 static vec<constraint_t> constraints;
519 static alloc_pool constraint_pool;
521 /* The constraint graph is represented as an array of bitmaps
522 containing successor nodes. */
524 struct constraint_graph
526 /* Size of this graph, which may be different than the number of
527 nodes in the variable map. */
528 unsigned int size;
530 /* Explicit successors of each node. */
531 bitmap *succs;
533 /* Implicit predecessors of each node (Used for variable
534 substitution). */
535 bitmap *implicit_preds;
537 /* Explicit predecessors of each node (Used for variable substitution). */
538 bitmap *preds;
540 /* Indirect cycle representatives, or -1 if the node has no indirect
541 cycles. */
542 int *indirect_cycles;
544 /* Representative node for a node. rep[a] == a unless the node has
545 been unified. */
546 unsigned int *rep;
548 /* Equivalence class representative for a label. This is used for
549 variable substitution. */
550 int *eq_rep;
552 /* Pointer equivalence label for a node. All nodes with the same
553 pointer equivalence label can be unified together at some point
554 (either during constraint optimization or after the constraint
555 graph is built). */
556 unsigned int *pe;
558 /* Pointer equivalence representative for a label. This is used to
559 handle nodes that are pointer equivalent but not location
560 equivalent. We can unite these once the addressof constraints
561 are transformed into initial points-to sets. */
562 int *pe_rep;
564 /* Pointer equivalence label for each node, used during variable
565 substitution. */
566 unsigned int *pointer_label;
568 /* Location equivalence label for each node, used during location
569 equivalence finding. */
570 unsigned int *loc_label;
572 /* Pointed-by set for each node, used during location equivalence
573 finding. This is pointed-by rather than pointed-to, because it
574 is constructed using the predecessor graph. */
575 bitmap *pointed_by;
577 /* Points to sets for pointer equivalence. This is *not* the actual
578 points-to sets for nodes. */
579 bitmap *points_to;
581 /* Bitmap of nodes where the bit is set if the node is a direct
582 node. Used for variable substitution. */
583 sbitmap direct_nodes;
585 /* Bitmap of nodes where the bit is set if the node is address
586 taken. Used for variable substitution. */
587 bitmap address_taken;
589 /* Vector of complex constraints for each graph node. Complex
590 constraints are those involving dereferences or offsets that are
591 not 0. */
592 vec<constraint_t> *complex;
595 static constraint_graph_t graph;
597 /* During variable substitution and the offline version of indirect
598 cycle finding, we create nodes to represent dereferences and
599 address taken constraints. These represent where these start and
600 end. */
601 #define FIRST_REF_NODE (varmap).length ()
602 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
604 /* Return the representative node for NODE, if NODE has been unioned
605 with another NODE.
606 This function performs path compression along the way to finding
607 the representative. */
609 static unsigned int
610 find (unsigned int node)
612 gcc_checking_assert (node < graph->size);
613 if (graph->rep[node] != node)
614 return graph->rep[node] = find (graph->rep[node]);
615 return node;
618 /* Union the TO and FROM nodes to the TO nodes.
619 Note that at some point in the future, we may want to do
620 union-by-rank, in which case we are going to have to return the
621 node we unified to. */
623 static bool
624 unite (unsigned int to, unsigned int from)
626 gcc_checking_assert (to < graph->size && from < graph->size);
627 if (to != from && graph->rep[from] != to)
629 graph->rep[from] = to;
630 return true;
632 return false;
635 /* Create a new constraint consisting of LHS and RHS expressions. */
637 static constraint_t
638 new_constraint (const struct constraint_expr lhs,
639 const struct constraint_expr rhs)
641 constraint_t ret = (constraint_t) pool_alloc (constraint_pool);
642 ret->lhs = lhs;
643 ret->rhs = rhs;
644 return ret;
647 /* Print out constraint C to FILE. */
649 static void
650 dump_constraint (FILE *file, constraint_t c)
652 if (c->lhs.type == ADDRESSOF)
653 fprintf (file, "&");
654 else if (c->lhs.type == DEREF)
655 fprintf (file, "*");
656 fprintf (file, "%s", get_varinfo (c->lhs.var)->name);
657 if (c->lhs.offset == UNKNOWN_OFFSET)
658 fprintf (file, " + UNKNOWN");
659 else if (c->lhs.offset != 0)
660 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
661 fprintf (file, " = ");
662 if (c->rhs.type == ADDRESSOF)
663 fprintf (file, "&");
664 else if (c->rhs.type == DEREF)
665 fprintf (file, "*");
666 fprintf (file, "%s", get_varinfo (c->rhs.var)->name);
667 if (c->rhs.offset == UNKNOWN_OFFSET)
668 fprintf (file, " + UNKNOWN");
669 else if (c->rhs.offset != 0)
670 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
674 void debug_constraint (constraint_t);
675 void debug_constraints (void);
676 void debug_constraint_graph (void);
677 void debug_solution_for_var (unsigned int);
678 void debug_sa_points_to_info (void);
680 /* Print out constraint C to stderr. */
682 DEBUG_FUNCTION void
683 debug_constraint (constraint_t c)
685 dump_constraint (stderr, c);
686 fprintf (stderr, "\n");
689 /* Print out all constraints to FILE */
691 static void
692 dump_constraints (FILE *file, int from)
694 int i;
695 constraint_t c;
696 for (i = from; constraints.iterate (i, &c); i++)
697 if (c)
699 dump_constraint (file, c);
700 fprintf (file, "\n");
704 /* Print out all constraints to stderr. */
706 DEBUG_FUNCTION void
707 debug_constraints (void)
709 dump_constraints (stderr, 0);
712 /* Print the constraint graph in dot format. */
714 static void
715 dump_constraint_graph (FILE *file)
717 unsigned int i;
719 /* Only print the graph if it has already been initialized: */
720 if (!graph)
721 return;
723 /* Prints the header of the dot file: */
724 fprintf (file, "strict digraph {\n");
725 fprintf (file, " node [\n shape = box\n ]\n");
726 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
727 fprintf (file, "\n // List of nodes and complex constraints in "
728 "the constraint graph:\n");
730 /* The next lines print the nodes in the graph together with the
731 complex constraints attached to them. */
732 for (i = 1; i < graph->size; i++)
734 if (i == FIRST_REF_NODE)
735 continue;
736 if (find (i) != i)
737 continue;
738 if (i < FIRST_REF_NODE)
739 fprintf (file, "\"%s\"", get_varinfo (i)->name);
740 else
741 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
742 if (graph->complex[i].exists ())
744 unsigned j;
745 constraint_t c;
746 fprintf (file, " [label=\"\\N\\n");
747 for (j = 0; graph->complex[i].iterate (j, &c); ++j)
749 dump_constraint (file, c);
750 fprintf (file, "\\l");
752 fprintf (file, "\"]");
754 fprintf (file, ";\n");
757 /* Go over the edges. */
758 fprintf (file, "\n // Edges in the constraint graph:\n");
759 for (i = 1; i < graph->size; i++)
761 unsigned j;
762 bitmap_iterator bi;
763 if (find (i) != i)
764 continue;
765 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
767 unsigned to = find (j);
768 if (i == to)
769 continue;
770 if (i < FIRST_REF_NODE)
771 fprintf (file, "\"%s\"", get_varinfo (i)->name);
772 else
773 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
774 fprintf (file, " -> ");
775 if (to < FIRST_REF_NODE)
776 fprintf (file, "\"%s\"", get_varinfo (to)->name);
777 else
778 fprintf (file, "\"*%s\"", get_varinfo (to - FIRST_REF_NODE)->name);
779 fprintf (file, ";\n");
783 /* Prints the tail of the dot file. */
784 fprintf (file, "}\n");
787 /* Print out the constraint graph to stderr. */
789 DEBUG_FUNCTION void
790 debug_constraint_graph (void)
792 dump_constraint_graph (stderr);
795 /* SOLVER FUNCTIONS
797 The solver is a simple worklist solver, that works on the following
798 algorithm:
800 sbitmap changed_nodes = all zeroes;
801 changed_count = 0;
802 For each node that is not already collapsed:
803 changed_count++;
804 set bit in changed nodes
806 while (changed_count > 0)
808 compute topological ordering for constraint graph
810 find and collapse cycles in the constraint graph (updating
811 changed if necessary)
813 for each node (n) in the graph in topological order:
814 changed_count--;
816 Process each complex constraint associated with the node,
817 updating changed if necessary.
819 For each outgoing edge from n, propagate the solution from n to
820 the destination of the edge, updating changed as necessary.
822 } */
824 /* Return true if two constraint expressions A and B are equal. */
826 static bool
827 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
829 return a.type == b.type && a.var == b.var && a.offset == b.offset;
832 /* Return true if constraint expression A is less than constraint expression
833 B. This is just arbitrary, but consistent, in order to give them an
834 ordering. */
836 static bool
837 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
839 if (a.type == b.type)
841 if (a.var == b.var)
842 return a.offset < b.offset;
843 else
844 return a.var < b.var;
846 else
847 return a.type < b.type;
850 /* Return true if constraint A is less than constraint B. This is just
851 arbitrary, but consistent, in order to give them an ordering. */
853 static bool
854 constraint_less (const constraint_t &a, const constraint_t &b)
856 if (constraint_expr_less (a->lhs, b->lhs))
857 return true;
858 else if (constraint_expr_less (b->lhs, a->lhs))
859 return false;
860 else
861 return constraint_expr_less (a->rhs, b->rhs);
864 /* Return true if two constraints A and B are equal. */
866 static bool
867 constraint_equal (struct constraint a, struct constraint b)
869 return constraint_expr_equal (a.lhs, b.lhs)
870 && constraint_expr_equal (a.rhs, b.rhs);
874 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
876 static constraint_t
877 constraint_vec_find (vec<constraint_t> vec,
878 struct constraint lookfor)
880 unsigned int place;
881 constraint_t found;
883 if (!vec.exists ())
884 return NULL;
886 place = vec.lower_bound (&lookfor, constraint_less);
887 if (place >= vec.length ())
888 return NULL;
889 found = vec[place];
890 if (!constraint_equal (*found, lookfor))
891 return NULL;
892 return found;
895 /* Union two constraint vectors, TO and FROM. Put the result in TO.
896 Returns true of TO set is changed. */
898 static bool
899 constraint_set_union (vec<constraint_t> *to,
900 vec<constraint_t> *from)
902 int i;
903 constraint_t c;
904 bool any_change = false;
906 FOR_EACH_VEC_ELT (*from, i, c)
908 if (constraint_vec_find (*to, *c) == NULL)
910 unsigned int place = to->lower_bound (c, constraint_less);
911 to->safe_insert (place, c);
912 any_change = true;
915 return any_change;
918 /* Expands the solution in SET to all sub-fields of variables included. */
920 static void
921 solution_set_expand (bitmap set)
923 bitmap_iterator bi;
924 unsigned j;
926 /* In a first pass expand to the head of the variables we need to
927 add all sub-fields off. This avoids quadratic behavior. */
928 EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi)
930 varinfo_t v = get_varinfo (j);
931 if (v->is_artificial_var
932 || v->is_full_var)
933 continue;
934 bitmap_set_bit (set, v->head);
937 /* In the second pass now expand all head variables with subfields. */
938 EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi)
940 varinfo_t v = get_varinfo (j);
941 if (v->is_artificial_var
942 || v->is_full_var
943 || v->head != j)
944 continue;
945 for (v = vi_next (v); v != NULL; v = vi_next (v))
946 bitmap_set_bit (set, v->id);
950 /* Union solution sets TO and FROM, and add INC to each member of FROM in the
951 process. */
953 static bool
954 set_union_with_increment (bitmap to, bitmap from, HOST_WIDE_INT inc)
956 bool changed = false;
957 bitmap_iterator bi;
958 unsigned int i;
960 /* If the solution of FROM contains anything it is good enough to transfer
961 this to TO. */
962 if (bitmap_bit_p (from, anything_id))
963 return bitmap_set_bit (to, anything_id);
965 /* For zero offset simply union the solution into the destination. */
966 if (inc == 0)
967 return bitmap_ior_into (to, from);
969 /* If the offset is unknown we have to expand the solution to
970 all subfields. */
971 if (inc == UNKNOWN_OFFSET)
973 bitmap tmp = BITMAP_ALLOC (&iteration_obstack);
974 bitmap_copy (tmp, from);
975 solution_set_expand (tmp);
976 changed |= bitmap_ior_into (to, tmp);
977 BITMAP_FREE (tmp);
978 return changed;
981 /* For non-zero offset union the offsetted solution into the destination. */
982 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
984 varinfo_t vi = get_varinfo (i);
986 /* If this is a variable with just one field just set its bit
987 in the result. */
988 if (vi->is_artificial_var
989 || vi->is_unknown_size_var
990 || vi->is_full_var)
991 changed |= bitmap_set_bit (to, i);
992 else
994 unsigned HOST_WIDE_INT fieldoffset = vi->offset + inc;
996 /* If the offset makes the pointer point to before the
997 variable use offset zero for the field lookup. */
998 if (inc < 0
999 && fieldoffset > vi->offset)
1000 fieldoffset = 0;
1002 vi = first_or_preceding_vi_for_offset (vi, fieldoffset);
1004 changed |= bitmap_set_bit (to, vi->id);
1005 /* If the result is not exactly at fieldoffset include the next
1006 field as well. See get_constraint_for_ptr_offset for more
1007 rationale. */
1008 if (vi->offset != fieldoffset
1009 && vi->next != 0)
1010 changed |= bitmap_set_bit (to, vi->next);
1014 return changed;
1017 /* Insert constraint C into the list of complex constraints for graph
1018 node VAR. */
1020 static void
1021 insert_into_complex (constraint_graph_t graph,
1022 unsigned int var, constraint_t c)
1024 vec<constraint_t> complex = graph->complex[var];
1025 unsigned int place = complex.lower_bound (c, constraint_less);
1027 /* Only insert constraints that do not already exist. */
1028 if (place >= complex.length ()
1029 || !constraint_equal (*c, *complex[place]))
1030 graph->complex[var].safe_insert (place, c);
1034 /* Condense two variable nodes into a single variable node, by moving
1035 all associated info from FROM to TO. Returns true if TO node's
1036 constraint set changes after the merge. */
1038 static bool
1039 merge_node_constraints (constraint_graph_t graph, unsigned int to,
1040 unsigned int from)
1042 unsigned int i;
1043 constraint_t c;
1044 bool any_change = false;
1046 gcc_checking_assert (find (from) == to);
1048 /* Move all complex constraints from src node into to node */
1049 FOR_EACH_VEC_ELT (graph->complex[from], i, c)
1051 /* In complex constraints for node FROM, we may have either
1052 a = *FROM, and *FROM = a, or an offseted constraint which are
1053 always added to the rhs node's constraints. */
1055 if (c->rhs.type == DEREF)
1056 c->rhs.var = to;
1057 else if (c->lhs.type == DEREF)
1058 c->lhs.var = to;
1059 else
1060 c->rhs.var = to;
1063 any_change = constraint_set_union (&graph->complex[to],
1064 &graph->complex[from]);
1065 graph->complex[from].release ();
1066 return any_change;
1070 /* Remove edges involving NODE from GRAPH. */
1072 static void
1073 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
1075 if (graph->succs[node])
1076 BITMAP_FREE (graph->succs[node]);
1079 /* Merge GRAPH nodes FROM and TO into node TO. */
1081 static void
1082 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
1083 unsigned int from)
1085 if (graph->indirect_cycles[from] != -1)
1087 /* If we have indirect cycles with the from node, and we have
1088 none on the to node, the to node has indirect cycles from the
1089 from node now that they are unified.
1090 If indirect cycles exist on both, unify the nodes that they
1091 are in a cycle with, since we know they are in a cycle with
1092 each other. */
1093 if (graph->indirect_cycles[to] == -1)
1094 graph->indirect_cycles[to] = graph->indirect_cycles[from];
1097 /* Merge all the successor edges. */
1098 if (graph->succs[from])
1100 if (!graph->succs[to])
1101 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
1102 bitmap_ior_into (graph->succs[to],
1103 graph->succs[from]);
1106 clear_edges_for_node (graph, from);
1110 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
1111 it doesn't exist in the graph already. */
1113 static void
1114 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
1115 unsigned int from)
1117 if (to == from)
1118 return;
1120 if (!graph->implicit_preds[to])
1121 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1123 if (bitmap_set_bit (graph->implicit_preds[to], from))
1124 stats.num_implicit_edges++;
1127 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
1128 it doesn't exist in the graph already.
1129 Return false if the edge already existed, true otherwise. */
1131 static void
1132 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
1133 unsigned int from)
1135 if (!graph->preds[to])
1136 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1137 bitmap_set_bit (graph->preds[to], from);
1140 /* Add a graph edge to GRAPH, going from FROM to TO if
1141 it doesn't exist in the graph already.
1142 Return false if the edge already existed, true otherwise. */
1144 static bool
1145 add_graph_edge (constraint_graph_t graph, unsigned int to,
1146 unsigned int from)
1148 if (to == from)
1150 return false;
1152 else
1154 bool r = false;
1156 if (!graph->succs[from])
1157 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
1158 if (bitmap_set_bit (graph->succs[from], to))
1160 r = true;
1161 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
1162 stats.num_edges++;
1164 return r;
1169 /* Initialize the constraint graph structure to contain SIZE nodes. */
1171 static void
1172 init_graph (unsigned int size)
1174 unsigned int j;
1176 graph = XCNEW (struct constraint_graph);
1177 graph->size = size;
1178 graph->succs = XCNEWVEC (bitmap, graph->size);
1179 graph->indirect_cycles = XNEWVEC (int, graph->size);
1180 graph->rep = XNEWVEC (unsigned int, graph->size);
1181 /* ??? Macros do not support template types with multiple arguments,
1182 so we use a typedef to work around it. */
1183 typedef vec<constraint_t> vec_constraint_t_heap;
1184 graph->complex = XCNEWVEC (vec_constraint_t_heap, size);
1185 graph->pe = XCNEWVEC (unsigned int, graph->size);
1186 graph->pe_rep = XNEWVEC (int, graph->size);
1188 for (j = 0; j < graph->size; j++)
1190 graph->rep[j] = j;
1191 graph->pe_rep[j] = -1;
1192 graph->indirect_cycles[j] = -1;
1196 /* Build the constraint graph, adding only predecessor edges right now. */
1198 static void
1199 build_pred_graph (void)
1201 int i;
1202 constraint_t c;
1203 unsigned int j;
1205 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
1206 graph->preds = XCNEWVEC (bitmap, graph->size);
1207 graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
1208 graph->loc_label = XCNEWVEC (unsigned int, graph->size);
1209 graph->pointed_by = XCNEWVEC (bitmap, graph->size);
1210 graph->points_to = XCNEWVEC (bitmap, graph->size);
1211 graph->eq_rep = XNEWVEC (int, graph->size);
1212 graph->direct_nodes = sbitmap_alloc (graph->size);
1213 graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1214 bitmap_clear (graph->direct_nodes);
1216 for (j = 1; j < FIRST_REF_NODE; j++)
1218 if (!get_varinfo (j)->is_special_var)
1219 bitmap_set_bit (graph->direct_nodes, j);
1222 for (j = 0; j < graph->size; j++)
1223 graph->eq_rep[j] = -1;
1225 for (j = 0; j < varmap.length (); j++)
1226 graph->indirect_cycles[j] = -1;
1228 FOR_EACH_VEC_ELT (constraints, i, c)
1230 struct constraint_expr lhs = c->lhs;
1231 struct constraint_expr rhs = c->rhs;
1232 unsigned int lhsvar = lhs.var;
1233 unsigned int rhsvar = rhs.var;
1235 if (lhs.type == DEREF)
1237 /* *x = y. */
1238 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1239 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1241 else if (rhs.type == DEREF)
1243 /* x = *y */
1244 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1245 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1246 else
1247 bitmap_clear_bit (graph->direct_nodes, lhsvar);
1249 else if (rhs.type == ADDRESSOF)
1251 varinfo_t v;
1253 /* x = &y */
1254 if (graph->points_to[lhsvar] == NULL)
1255 graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1256 bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1258 if (graph->pointed_by[rhsvar] == NULL)
1259 graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1260 bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1262 /* Implicitly, *x = y */
1263 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1265 /* All related variables are no longer direct nodes. */
1266 bitmap_clear_bit (graph->direct_nodes, rhsvar);
1267 v = get_varinfo (rhsvar);
1268 if (!v->is_full_var)
1270 v = get_varinfo (v->head);
1273 bitmap_clear_bit (graph->direct_nodes, v->id);
1274 v = vi_next (v);
1276 while (v != NULL);
1278 bitmap_set_bit (graph->address_taken, rhsvar);
1280 else if (lhsvar > anything_id
1281 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1283 /* x = y */
1284 add_pred_graph_edge (graph, lhsvar, rhsvar);
1285 /* Implicitly, *x = *y */
1286 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1287 FIRST_REF_NODE + rhsvar);
1289 else if (lhs.offset != 0 || rhs.offset != 0)
1291 if (rhs.offset != 0)
1292 bitmap_clear_bit (graph->direct_nodes, lhs.var);
1293 else if (lhs.offset != 0)
1294 bitmap_clear_bit (graph->direct_nodes, rhs.var);
1299 /* Build the constraint graph, adding successor edges. */
1301 static void
1302 build_succ_graph (void)
1304 unsigned i, t;
1305 constraint_t c;
1307 FOR_EACH_VEC_ELT (constraints, i, c)
1309 struct constraint_expr lhs;
1310 struct constraint_expr rhs;
1311 unsigned int lhsvar;
1312 unsigned int rhsvar;
1314 if (!c)
1315 continue;
1317 lhs = c->lhs;
1318 rhs = c->rhs;
1319 lhsvar = find (lhs.var);
1320 rhsvar = find (rhs.var);
1322 if (lhs.type == DEREF)
1324 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1325 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1327 else if (rhs.type == DEREF)
1329 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1330 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1332 else if (rhs.type == ADDRESSOF)
1334 /* x = &y */
1335 gcc_checking_assert (find (rhs.var) == rhs.var);
1336 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1338 else if (lhsvar > anything_id
1339 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1341 add_graph_edge (graph, lhsvar, rhsvar);
1345 /* Add edges from STOREDANYTHING to all non-direct nodes that can
1346 receive pointers. */
1347 t = find (storedanything_id);
1348 for (i = integer_id + 1; i < FIRST_REF_NODE; ++i)
1350 if (!bitmap_bit_p (graph->direct_nodes, i)
1351 && get_varinfo (i)->may_have_pointers)
1352 add_graph_edge (graph, find (i), t);
1355 /* Everything stored to ANYTHING also potentially escapes. */
1356 add_graph_edge (graph, find (escaped_id), t);
1360 /* Changed variables on the last iteration. */
1361 static bitmap changed;
1363 /* Strongly Connected Component visitation info. */
1365 struct scc_info
1367 sbitmap visited;
1368 sbitmap deleted;
1369 unsigned int *dfs;
1370 unsigned int *node_mapping;
1371 int current_index;
1372 vec<unsigned> scc_stack;
1376 /* Recursive routine to find strongly connected components in GRAPH.
1377 SI is the SCC info to store the information in, and N is the id of current
1378 graph node we are processing.
1380 This is Tarjan's strongly connected component finding algorithm, as
1381 modified by Nuutila to keep only non-root nodes on the stack.
1382 The algorithm can be found in "On finding the strongly connected
1383 connected components in a directed graph" by Esko Nuutila and Eljas
1384 Soisalon-Soininen, in Information Processing Letters volume 49,
1385 number 1, pages 9-14. */
1387 static void
1388 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1390 unsigned int i;
1391 bitmap_iterator bi;
1392 unsigned int my_dfs;
1394 bitmap_set_bit (si->visited, n);
1395 si->dfs[n] = si->current_index ++;
1396 my_dfs = si->dfs[n];
1398 /* Visit all the successors. */
1399 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1401 unsigned int w;
1403 if (i > LAST_REF_NODE)
1404 break;
1406 w = find (i);
1407 if (bitmap_bit_p (si->deleted, w))
1408 continue;
1410 if (!bitmap_bit_p (si->visited, w))
1411 scc_visit (graph, si, w);
1413 unsigned int t = find (w);
1414 gcc_checking_assert (find (n) == n);
1415 if (si->dfs[t] < si->dfs[n])
1416 si->dfs[n] = si->dfs[t];
1419 /* See if any components have been identified. */
1420 if (si->dfs[n] == my_dfs)
1422 if (si->scc_stack.length () > 0
1423 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1425 bitmap scc = BITMAP_ALLOC (NULL);
1426 unsigned int lowest_node;
1427 bitmap_iterator bi;
1429 bitmap_set_bit (scc, n);
1431 while (si->scc_stack.length () != 0
1432 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1434 unsigned int w = si->scc_stack.pop ();
1436 bitmap_set_bit (scc, w);
1439 lowest_node = bitmap_first_set_bit (scc);
1440 gcc_assert (lowest_node < FIRST_REF_NODE);
1442 /* Collapse the SCC nodes into a single node, and mark the
1443 indirect cycles. */
1444 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1446 if (i < FIRST_REF_NODE)
1448 if (unite (lowest_node, i))
1449 unify_nodes (graph, lowest_node, i, false);
1451 else
1453 unite (lowest_node, i);
1454 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1458 bitmap_set_bit (si->deleted, n);
1460 else
1461 si->scc_stack.safe_push (n);
1464 /* Unify node FROM into node TO, updating the changed count if
1465 necessary when UPDATE_CHANGED is true. */
1467 static void
1468 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1469 bool update_changed)
1471 gcc_checking_assert (to != from && find (to) == to);
1473 if (dump_file && (dump_flags & TDF_DETAILS))
1474 fprintf (dump_file, "Unifying %s to %s\n",
1475 get_varinfo (from)->name,
1476 get_varinfo (to)->name);
1478 if (update_changed)
1479 stats.unified_vars_dynamic++;
1480 else
1481 stats.unified_vars_static++;
1483 merge_graph_nodes (graph, to, from);
1484 if (merge_node_constraints (graph, to, from))
1486 if (update_changed)
1487 bitmap_set_bit (changed, to);
1490 /* Mark TO as changed if FROM was changed. If TO was already marked
1491 as changed, decrease the changed count. */
1493 if (update_changed
1494 && bitmap_clear_bit (changed, from))
1495 bitmap_set_bit (changed, to);
1496 varinfo_t fromvi = get_varinfo (from);
1497 if (fromvi->solution)
1499 /* If the solution changes because of the merging, we need to mark
1500 the variable as changed. */
1501 varinfo_t tovi = get_varinfo (to);
1502 if (bitmap_ior_into (tovi->solution, fromvi->solution))
1504 if (update_changed)
1505 bitmap_set_bit (changed, to);
1508 BITMAP_FREE (fromvi->solution);
1509 if (fromvi->oldsolution)
1510 BITMAP_FREE (fromvi->oldsolution);
1512 if (stats.iterations > 0
1513 && tovi->oldsolution)
1514 BITMAP_FREE (tovi->oldsolution);
1516 if (graph->succs[to])
1517 bitmap_clear_bit (graph->succs[to], to);
1520 /* Information needed to compute the topological ordering of a graph. */
1522 struct topo_info
1524 /* sbitmap of visited nodes. */
1525 sbitmap visited;
1526 /* Array that stores the topological order of the graph, *in
1527 reverse*. */
1528 vec<unsigned> topo_order;
1532 /* Initialize and return a topological info structure. */
1534 static struct topo_info *
1535 init_topo_info (void)
1537 size_t size = graph->size;
1538 struct topo_info *ti = XNEW (struct topo_info);
1539 ti->visited = sbitmap_alloc (size);
1540 bitmap_clear (ti->visited);
1541 ti->topo_order.create (1);
1542 return ti;
1546 /* Free the topological sort info pointed to by TI. */
1548 static void
1549 free_topo_info (struct topo_info *ti)
1551 sbitmap_free (ti->visited);
1552 ti->topo_order.release ();
1553 free (ti);
1556 /* Visit the graph in topological order, and store the order in the
1557 topo_info structure. */
1559 static void
1560 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1561 unsigned int n)
1563 bitmap_iterator bi;
1564 unsigned int j;
1566 bitmap_set_bit (ti->visited, n);
1568 if (graph->succs[n])
1569 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1571 if (!bitmap_bit_p (ti->visited, j))
1572 topo_visit (graph, ti, j);
1575 ti->topo_order.safe_push (n);
1578 /* Process a constraint C that represents x = *(y + off), using DELTA as the
1579 starting solution for y. */
1581 static void
1582 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1583 bitmap delta)
1585 unsigned int lhs = c->lhs.var;
1586 bool flag = false;
1587 bitmap sol = get_varinfo (lhs)->solution;
1588 unsigned int j;
1589 bitmap_iterator bi;
1590 HOST_WIDE_INT roffset = c->rhs.offset;
1592 /* Our IL does not allow this. */
1593 gcc_checking_assert (c->lhs.offset == 0);
1595 /* If the solution of Y contains anything it is good enough to transfer
1596 this to the LHS. */
1597 if (bitmap_bit_p (delta, anything_id))
1599 flag |= bitmap_set_bit (sol, anything_id);
1600 goto done;
1603 /* If we do not know at with offset the rhs is dereferenced compute
1604 the reachability set of DELTA, conservatively assuming it is
1605 dereferenced at all valid offsets. */
1606 if (roffset == UNKNOWN_OFFSET)
1608 solution_set_expand (delta);
1609 /* No further offset processing is necessary. */
1610 roffset = 0;
1613 /* For each variable j in delta (Sol(y)), add
1614 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1615 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1617 varinfo_t v = get_varinfo (j);
1618 HOST_WIDE_INT fieldoffset = v->offset + roffset;
1619 unsigned int t;
1621 if (v->is_full_var)
1622 fieldoffset = v->offset;
1623 else if (roffset != 0)
1624 v = first_vi_for_offset (v, fieldoffset);
1625 /* If the access is outside of the variable we can ignore it. */
1626 if (!v)
1627 continue;
1631 t = find (v->id);
1633 /* Adding edges from the special vars is pointless.
1634 They don't have sets that can change. */
1635 if (get_varinfo (t)->is_special_var)
1636 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1637 /* Merging the solution from ESCAPED needlessly increases
1638 the set. Use ESCAPED as representative instead. */
1639 else if (v->id == escaped_id)
1640 flag |= bitmap_set_bit (sol, escaped_id);
1641 else if (v->may_have_pointers
1642 && add_graph_edge (graph, lhs, t))
1643 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1645 /* If the variable is not exactly at the requested offset
1646 we have to include the next one. */
1647 if (v->offset == (unsigned HOST_WIDE_INT)fieldoffset
1648 || v->next == 0)
1649 break;
1651 v = vi_next (v);
1652 fieldoffset = v->offset;
1654 while (1);
1657 done:
1658 /* If the LHS solution changed, mark the var as changed. */
1659 if (flag)
1661 get_varinfo (lhs)->solution = sol;
1662 bitmap_set_bit (changed, lhs);
1666 /* Process a constraint C that represents *(x + off) = y using DELTA
1667 as the starting solution for x. */
1669 static void
1670 do_ds_constraint (constraint_t c, bitmap delta)
1672 unsigned int rhs = c->rhs.var;
1673 bitmap sol = get_varinfo (rhs)->solution;
1674 unsigned int j;
1675 bitmap_iterator bi;
1676 HOST_WIDE_INT loff = c->lhs.offset;
1677 bool escaped_p = false;
1679 /* Our IL does not allow this. */
1680 gcc_checking_assert (c->rhs.offset == 0);
1682 /* If the solution of y contains ANYTHING simply use the ANYTHING
1683 solution. This avoids needlessly increasing the points-to sets. */
1684 if (bitmap_bit_p (sol, anything_id))
1685 sol = get_varinfo (find (anything_id))->solution;
1687 /* If the solution for x contains ANYTHING we have to merge the
1688 solution of y into all pointer variables which we do via
1689 STOREDANYTHING. */
1690 if (bitmap_bit_p (delta, anything_id))
1692 unsigned t = find (storedanything_id);
1693 if (add_graph_edge (graph, t, rhs))
1695 if (bitmap_ior_into (get_varinfo (t)->solution, sol))
1696 bitmap_set_bit (changed, t);
1698 return;
1701 /* If we do not know at with offset the rhs is dereferenced compute
1702 the reachability set of DELTA, conservatively assuming it is
1703 dereferenced at all valid offsets. */
1704 if (loff == UNKNOWN_OFFSET)
1706 solution_set_expand (delta);
1707 loff = 0;
1710 /* For each member j of delta (Sol(x)), add an edge from y to j and
1711 union Sol(y) into Sol(j) */
1712 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1714 varinfo_t v = get_varinfo (j);
1715 unsigned int t;
1716 HOST_WIDE_INT fieldoffset = v->offset + loff;
1718 if (v->is_full_var)
1719 fieldoffset = v->offset;
1720 else if (loff != 0)
1721 v = first_vi_for_offset (v, fieldoffset);
1722 /* If the access is outside of the variable we can ignore it. */
1723 if (!v)
1724 continue;
1728 if (v->may_have_pointers)
1730 /* If v is a global variable then this is an escape point. */
1731 if (v->is_global_var
1732 && !escaped_p)
1734 t = find (escaped_id);
1735 if (add_graph_edge (graph, t, rhs)
1736 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1737 bitmap_set_bit (changed, t);
1738 /* Enough to let rhs escape once. */
1739 escaped_p = true;
1742 if (v->is_special_var)
1743 break;
1745 t = find (v->id);
1746 if (add_graph_edge (graph, t, rhs)
1747 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1748 bitmap_set_bit (changed, t);
1751 /* If the variable is not exactly at the requested offset
1752 we have to include the next one. */
1753 if (v->offset == (unsigned HOST_WIDE_INT)fieldoffset
1754 || v->next == 0)
1755 break;
1757 v = vi_next (v);
1758 fieldoffset = v->offset;
1760 while (1);
1764 /* Handle a non-simple (simple meaning requires no iteration),
1765 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1767 static void
1768 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
1770 if (c->lhs.type == DEREF)
1772 if (c->rhs.type == ADDRESSOF)
1774 gcc_unreachable ();
1776 else
1778 /* *x = y */
1779 do_ds_constraint (c, delta);
1782 else if (c->rhs.type == DEREF)
1784 /* x = *y */
1785 if (!(get_varinfo (c->lhs.var)->is_special_var))
1786 do_sd_constraint (graph, c, delta);
1788 else
1790 bitmap tmp;
1791 bitmap solution;
1792 bool flag = false;
1794 gcc_checking_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR);
1795 solution = get_varinfo (c->rhs.var)->solution;
1796 tmp = get_varinfo (c->lhs.var)->solution;
1798 flag = set_union_with_increment (tmp, solution, c->rhs.offset);
1800 if (flag)
1801 bitmap_set_bit (changed, c->lhs.var);
1805 /* Initialize and return a new SCC info structure. */
1807 static struct scc_info *
1808 init_scc_info (size_t size)
1810 struct scc_info *si = XNEW (struct scc_info);
1811 size_t i;
1813 si->current_index = 0;
1814 si->visited = sbitmap_alloc (size);
1815 bitmap_clear (si->visited);
1816 si->deleted = sbitmap_alloc (size);
1817 bitmap_clear (si->deleted);
1818 si->node_mapping = XNEWVEC (unsigned int, size);
1819 si->dfs = XCNEWVEC (unsigned int, size);
1821 for (i = 0; i < size; i++)
1822 si->node_mapping[i] = i;
1824 si->scc_stack.create (1);
1825 return si;
1828 /* Free an SCC info structure pointed to by SI */
1830 static void
1831 free_scc_info (struct scc_info *si)
1833 sbitmap_free (si->visited);
1834 sbitmap_free (si->deleted);
1835 free (si->node_mapping);
1836 free (si->dfs);
1837 si->scc_stack.release ();
1838 free (si);
1842 /* Find indirect cycles in GRAPH that occur, using strongly connected
1843 components, and note them in the indirect cycles map.
1845 This technique comes from Ben Hardekopf and Calvin Lin,
1846 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1847 Lines of Code", submitted to PLDI 2007. */
1849 static void
1850 find_indirect_cycles (constraint_graph_t graph)
1852 unsigned int i;
1853 unsigned int size = graph->size;
1854 struct scc_info *si = init_scc_info (size);
1856 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1857 if (!bitmap_bit_p (si->visited, i) && find (i) == i)
1858 scc_visit (graph, si, i);
1860 free_scc_info (si);
1863 /* Compute a topological ordering for GRAPH, and store the result in the
1864 topo_info structure TI. */
1866 static void
1867 compute_topo_order (constraint_graph_t graph,
1868 struct topo_info *ti)
1870 unsigned int i;
1871 unsigned int size = graph->size;
1873 for (i = 0; i != size; ++i)
1874 if (!bitmap_bit_p (ti->visited, i) && find (i) == i)
1875 topo_visit (graph, ti, i);
1878 /* Structure used to for hash value numbering of pointer equivalence
1879 classes. */
1881 typedef struct equiv_class_label
1883 hashval_t hashcode;
1884 unsigned int equivalence_class;
1885 bitmap labels;
1886 } *equiv_class_label_t;
1887 typedef const struct equiv_class_label *const_equiv_class_label_t;
1889 /* Equiv_class_label hashtable helpers. */
1891 struct equiv_class_hasher : typed_free_remove <equiv_class_label>
1893 typedef equiv_class_label value_type;
1894 typedef equiv_class_label compare_type;
1895 static inline hashval_t hash (const value_type *);
1896 static inline bool equal (const value_type *, const compare_type *);
1899 /* Hash function for a equiv_class_label_t */
1901 inline hashval_t
1902 equiv_class_hasher::hash (const value_type *ecl)
1904 return ecl->hashcode;
1907 /* Equality function for two equiv_class_label_t's. */
1909 inline bool
1910 equiv_class_hasher::equal (const value_type *eql1, const compare_type *eql2)
1912 return (eql1->hashcode == eql2->hashcode
1913 && bitmap_equal_p (eql1->labels, eql2->labels));
1916 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1917 classes. */
1918 static hash_table <equiv_class_hasher> pointer_equiv_class_table;
1920 /* A hashtable for mapping a bitmap of labels->location equivalence
1921 classes. */
1922 static hash_table <equiv_class_hasher> location_equiv_class_table;
1924 /* Lookup a equivalence class in TABLE by the bitmap of LABELS with
1925 hash HAS it contains. Sets *REF_LABELS to the bitmap LABELS
1926 is equivalent to. */
1928 static equiv_class_label *
1929 equiv_class_lookup_or_add (hash_table <equiv_class_hasher> table, bitmap labels)
1931 equiv_class_label **slot;
1932 equiv_class_label ecl;
1934 ecl.labels = labels;
1935 ecl.hashcode = bitmap_hash (labels);
1936 slot = table.find_slot_with_hash (&ecl, ecl.hashcode, INSERT);
1937 if (!*slot)
1939 *slot = XNEW (struct equiv_class_label);
1940 (*slot)->labels = labels;
1941 (*slot)->hashcode = ecl.hashcode;
1942 (*slot)->equivalence_class = 0;
1945 return *slot;
1948 /* Perform offline variable substitution.
1950 This is a worst case quadratic time way of identifying variables
1951 that must have equivalent points-to sets, including those caused by
1952 static cycles, and single entry subgraphs, in the constraint graph.
1954 The technique is described in "Exploiting Pointer and Location
1955 Equivalence to Optimize Pointer Analysis. In the 14th International
1956 Static Analysis Symposium (SAS), August 2007." It is known as the
1957 "HU" algorithm, and is equivalent to value numbering the collapsed
1958 constraint graph including evaluating unions.
1960 The general method of finding equivalence classes is as follows:
1961 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1962 Initialize all non-REF nodes to be direct nodes.
1963 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1964 variable}
1965 For each constraint containing the dereference, we also do the same
1966 thing.
1968 We then compute SCC's in the graph and unify nodes in the same SCC,
1969 including pts sets.
1971 For each non-collapsed node x:
1972 Visit all unvisited explicit incoming edges.
1973 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
1974 where y->x.
1975 Lookup the equivalence class for pts(x).
1976 If we found one, equivalence_class(x) = found class.
1977 Otherwise, equivalence_class(x) = new class, and new_class is
1978 added to the lookup table.
1980 All direct nodes with the same equivalence class can be replaced
1981 with a single representative node.
1982 All unlabeled nodes (label == 0) are not pointers and all edges
1983 involving them can be eliminated.
1984 We perform these optimizations during rewrite_constraints
1986 In addition to pointer equivalence class finding, we also perform
1987 location equivalence class finding. This is the set of variables
1988 that always appear together in points-to sets. We use this to
1989 compress the size of the points-to sets. */
1991 /* Current maximum pointer equivalence class id. */
1992 static int pointer_equiv_class;
1994 /* Current maximum location equivalence class id. */
1995 static int location_equiv_class;
1997 /* Recursive routine to find strongly connected components in GRAPH,
1998 and label it's nodes with DFS numbers. */
2000 static void
2001 condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2003 unsigned int i;
2004 bitmap_iterator bi;
2005 unsigned int my_dfs;
2007 gcc_checking_assert (si->node_mapping[n] == n);
2008 bitmap_set_bit (si->visited, n);
2009 si->dfs[n] = si->current_index ++;
2010 my_dfs = si->dfs[n];
2012 /* Visit all the successors. */
2013 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2015 unsigned int w = si->node_mapping[i];
2017 if (bitmap_bit_p (si->deleted, w))
2018 continue;
2020 if (!bitmap_bit_p (si->visited, w))
2021 condense_visit (graph, si, w);
2023 unsigned int t = si->node_mapping[w];
2024 gcc_checking_assert (si->node_mapping[n] == n);
2025 if (si->dfs[t] < si->dfs[n])
2026 si->dfs[n] = si->dfs[t];
2029 /* Visit all the implicit predecessors. */
2030 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
2032 unsigned int w = si->node_mapping[i];
2034 if (bitmap_bit_p (si->deleted, w))
2035 continue;
2037 if (!bitmap_bit_p (si->visited, w))
2038 condense_visit (graph, si, w);
2040 unsigned int t = si->node_mapping[w];
2041 gcc_assert (si->node_mapping[n] == n);
2042 if (si->dfs[t] < si->dfs[n])
2043 si->dfs[n] = si->dfs[t];
2046 /* See if any components have been identified. */
2047 if (si->dfs[n] == my_dfs)
2049 while (si->scc_stack.length () != 0
2050 && si->dfs[si->scc_stack.last ()] >= my_dfs)
2052 unsigned int w = si->scc_stack.pop ();
2053 si->node_mapping[w] = n;
2055 if (!bitmap_bit_p (graph->direct_nodes, w))
2056 bitmap_clear_bit (graph->direct_nodes, n);
2058 /* Unify our nodes. */
2059 if (graph->preds[w])
2061 if (!graph->preds[n])
2062 graph->preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2063 bitmap_ior_into (graph->preds[n], graph->preds[w]);
2065 if (graph->implicit_preds[w])
2067 if (!graph->implicit_preds[n])
2068 graph->implicit_preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2069 bitmap_ior_into (graph->implicit_preds[n],
2070 graph->implicit_preds[w]);
2072 if (graph->points_to[w])
2074 if (!graph->points_to[n])
2075 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2076 bitmap_ior_into (graph->points_to[n],
2077 graph->points_to[w]);
2080 bitmap_set_bit (si->deleted, n);
2082 else
2083 si->scc_stack.safe_push (n);
2086 /* Label pointer equivalences.
2088 This performs a value numbering of the constraint graph to
2089 discover which variables will always have the same points-to sets
2090 under the current set of constraints.
2092 The way it value numbers is to store the set of points-to bits
2093 generated by the constraints and graph edges. This is just used as a
2094 hash and equality comparison. The *actual set of points-to bits* is
2095 completely irrelevant, in that we don't care about being able to
2096 extract them later.
2098 The equality values (currently bitmaps) just have to satisfy a few
2099 constraints, the main ones being:
2100 1. The combining operation must be order independent.
2101 2. The end result of a given set of operations must be unique iff the
2102 combination of input values is unique
2103 3. Hashable. */
2105 static void
2106 label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2108 unsigned int i, first_pred;
2109 bitmap_iterator bi;
2111 bitmap_set_bit (si->visited, n);
2113 /* Label and union our incoming edges's points to sets. */
2114 first_pred = -1U;
2115 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2117 unsigned int w = si->node_mapping[i];
2118 if (!bitmap_bit_p (si->visited, w))
2119 label_visit (graph, si, w);
2121 /* Skip unused edges */
2122 if (w == n || graph->pointer_label[w] == 0)
2123 continue;
2125 if (graph->points_to[w])
2127 if (!graph->points_to[n])
2129 if (first_pred == -1U)
2130 first_pred = w;
2131 else
2133 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2134 bitmap_ior (graph->points_to[n],
2135 graph->points_to[first_pred],
2136 graph->points_to[w]);
2139 else
2140 bitmap_ior_into (graph->points_to[n], graph->points_to[w]);
2144 /* Indirect nodes get fresh variables and a new pointer equiv class. */
2145 if (!bitmap_bit_p (graph->direct_nodes, n))
2147 if (!graph->points_to[n])
2149 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2150 if (first_pred != -1U)
2151 bitmap_copy (graph->points_to[n], graph->points_to[first_pred]);
2153 bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
2154 graph->pointer_label[n] = pointer_equiv_class++;
2155 equiv_class_label_t ecl;
2156 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2157 graph->points_to[n]);
2158 ecl->equivalence_class = graph->pointer_label[n];
2159 return;
2162 /* If there was only a single non-empty predecessor the pointer equiv
2163 class is the same. */
2164 if (!graph->points_to[n])
2166 if (first_pred != -1U)
2168 graph->pointer_label[n] = graph->pointer_label[first_pred];
2169 graph->points_to[n] = graph->points_to[first_pred];
2171 return;
2174 if (!bitmap_empty_p (graph->points_to[n]))
2176 equiv_class_label_t ecl;
2177 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2178 graph->points_to[n]);
2179 if (ecl->equivalence_class == 0)
2180 ecl->equivalence_class = pointer_equiv_class++;
2181 else
2183 BITMAP_FREE (graph->points_to[n]);
2184 graph->points_to[n] = ecl->labels;
2186 graph->pointer_label[n] = ecl->equivalence_class;
2190 /* Print the pred graph in dot format. */
2192 static void
2193 dump_pred_graph (struct scc_info *si, FILE *file)
2195 unsigned int i;
2197 /* Only print the graph if it has already been initialized: */
2198 if (!graph)
2199 return;
2201 /* Prints the header of the dot file: */
2202 fprintf (file, "strict digraph {\n");
2203 fprintf (file, " node [\n shape = box\n ]\n");
2204 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
2205 fprintf (file, "\n // List of nodes and complex constraints in "
2206 "the constraint graph:\n");
2208 /* The next lines print the nodes in the graph together with the
2209 complex constraints attached to them. */
2210 for (i = 1; i < graph->size; i++)
2212 if (i == FIRST_REF_NODE)
2213 continue;
2214 if (si->node_mapping[i] != i)
2215 continue;
2216 if (i < FIRST_REF_NODE)
2217 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2218 else
2219 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2220 if (graph->points_to[i]
2221 && !bitmap_empty_p (graph->points_to[i]))
2223 fprintf (file, "[label=\"%s = {", get_varinfo (i)->name);
2224 unsigned j;
2225 bitmap_iterator bi;
2226 EXECUTE_IF_SET_IN_BITMAP (graph->points_to[i], 0, j, bi)
2227 fprintf (file, " %d", j);
2228 fprintf (file, " }\"]");
2230 fprintf (file, ";\n");
2233 /* Go over the edges. */
2234 fprintf (file, "\n // Edges in the constraint graph:\n");
2235 for (i = 1; i < graph->size; i++)
2237 unsigned j;
2238 bitmap_iterator bi;
2239 if (si->node_mapping[i] != i)
2240 continue;
2241 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[i], 0, j, bi)
2243 unsigned from = si->node_mapping[j];
2244 if (from < FIRST_REF_NODE)
2245 fprintf (file, "\"%s\"", get_varinfo (from)->name);
2246 else
2247 fprintf (file, "\"*%s\"", get_varinfo (from - FIRST_REF_NODE)->name);
2248 fprintf (file, " -> ");
2249 if (i < FIRST_REF_NODE)
2250 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2251 else
2252 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2253 fprintf (file, ";\n");
2257 /* Prints the tail of the dot file. */
2258 fprintf (file, "}\n");
2261 /* Perform offline variable substitution, discovering equivalence
2262 classes, and eliminating non-pointer variables. */
2264 static struct scc_info *
2265 perform_var_substitution (constraint_graph_t graph)
2267 unsigned int i;
2268 unsigned int size = graph->size;
2269 struct scc_info *si = init_scc_info (size);
2271 bitmap_obstack_initialize (&iteration_obstack);
2272 pointer_equiv_class_table.create (511);
2273 location_equiv_class_table.create (511);
2274 pointer_equiv_class = 1;
2275 location_equiv_class = 1;
2277 /* Condense the nodes, which means to find SCC's, count incoming
2278 predecessors, and unite nodes in SCC's. */
2279 for (i = 1; i < FIRST_REF_NODE; i++)
2280 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2281 condense_visit (graph, si, si->node_mapping[i]);
2283 if (dump_file && (dump_flags & TDF_GRAPH))
2285 fprintf (dump_file, "\n\n// The constraint graph before var-substitution "
2286 "in dot format:\n");
2287 dump_pred_graph (si, dump_file);
2288 fprintf (dump_file, "\n\n");
2291 bitmap_clear (si->visited);
2292 /* Actually the label the nodes for pointer equivalences */
2293 for (i = 1; i < FIRST_REF_NODE; i++)
2294 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2295 label_visit (graph, si, si->node_mapping[i]);
2297 /* Calculate location equivalence labels. */
2298 for (i = 1; i < FIRST_REF_NODE; i++)
2300 bitmap pointed_by;
2301 bitmap_iterator bi;
2302 unsigned int j;
2304 if (!graph->pointed_by[i])
2305 continue;
2306 pointed_by = BITMAP_ALLOC (&iteration_obstack);
2308 /* Translate the pointed-by mapping for pointer equivalence
2309 labels. */
2310 EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
2312 bitmap_set_bit (pointed_by,
2313 graph->pointer_label[si->node_mapping[j]]);
2315 /* The original pointed_by is now dead. */
2316 BITMAP_FREE (graph->pointed_by[i]);
2318 /* Look up the location equivalence label if one exists, or make
2319 one otherwise. */
2320 equiv_class_label_t ecl;
2321 ecl = equiv_class_lookup_or_add (location_equiv_class_table, pointed_by);
2322 if (ecl->equivalence_class == 0)
2323 ecl->equivalence_class = location_equiv_class++;
2324 else
2326 if (dump_file && (dump_flags & TDF_DETAILS))
2327 fprintf (dump_file, "Found location equivalence for node %s\n",
2328 get_varinfo (i)->name);
2329 BITMAP_FREE (pointed_by);
2331 graph->loc_label[i] = ecl->equivalence_class;
2335 if (dump_file && (dump_flags & TDF_DETAILS))
2336 for (i = 1; i < FIRST_REF_NODE; i++)
2338 unsigned j = si->node_mapping[i];
2339 if (j != i)
2341 fprintf (dump_file, "%s node id %d ",
2342 bitmap_bit_p (graph->direct_nodes, i)
2343 ? "Direct" : "Indirect", i);
2344 if (i < FIRST_REF_NODE)
2345 fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2346 else
2347 fprintf (dump_file, "\"*%s\"",
2348 get_varinfo (i - FIRST_REF_NODE)->name);
2349 fprintf (dump_file, " mapped to SCC leader node id %d ", j);
2350 if (j < FIRST_REF_NODE)
2351 fprintf (dump_file, "\"%s\"\n", get_varinfo (j)->name);
2352 else
2353 fprintf (dump_file, "\"*%s\"\n",
2354 get_varinfo (j - FIRST_REF_NODE)->name);
2356 else
2358 fprintf (dump_file,
2359 "Equivalence classes for %s node id %d ",
2360 bitmap_bit_p (graph->direct_nodes, i)
2361 ? "direct" : "indirect", i);
2362 if (i < FIRST_REF_NODE)
2363 fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2364 else
2365 fprintf (dump_file, "\"*%s\"",
2366 get_varinfo (i - FIRST_REF_NODE)->name);
2367 fprintf (dump_file,
2368 ": pointer %d, location %d\n",
2369 graph->pointer_label[i], graph->loc_label[i]);
2373 /* Quickly eliminate our non-pointer variables. */
2375 for (i = 1; i < FIRST_REF_NODE; i++)
2377 unsigned int node = si->node_mapping[i];
2379 if (graph->pointer_label[node] == 0)
2381 if (dump_file && (dump_flags & TDF_DETAILS))
2382 fprintf (dump_file,
2383 "%s is a non-pointer variable, eliminating edges.\n",
2384 get_varinfo (node)->name);
2385 stats.nonpointer_vars++;
2386 clear_edges_for_node (graph, node);
2390 return si;
2393 /* Free information that was only necessary for variable
2394 substitution. */
2396 static void
2397 free_var_substitution_info (struct scc_info *si)
2399 free_scc_info (si);
2400 free (graph->pointer_label);
2401 free (graph->loc_label);
2402 free (graph->pointed_by);
2403 free (graph->points_to);
2404 free (graph->eq_rep);
2405 sbitmap_free (graph->direct_nodes);
2406 pointer_equiv_class_table.dispose ();
2407 location_equiv_class_table.dispose ();
2408 bitmap_obstack_release (&iteration_obstack);
2411 /* Return an existing node that is equivalent to NODE, which has
2412 equivalence class LABEL, if one exists. Return NODE otherwise. */
2414 static unsigned int
2415 find_equivalent_node (constraint_graph_t graph,
2416 unsigned int node, unsigned int label)
2418 /* If the address version of this variable is unused, we can
2419 substitute it for anything else with the same label.
2420 Otherwise, we know the pointers are equivalent, but not the
2421 locations, and we can unite them later. */
2423 if (!bitmap_bit_p (graph->address_taken, node))
2425 gcc_checking_assert (label < graph->size);
2427 if (graph->eq_rep[label] != -1)
2429 /* Unify the two variables since we know they are equivalent. */
2430 if (unite (graph->eq_rep[label], node))
2431 unify_nodes (graph, graph->eq_rep[label], node, false);
2432 return graph->eq_rep[label];
2434 else
2436 graph->eq_rep[label] = node;
2437 graph->pe_rep[label] = node;
2440 else
2442 gcc_checking_assert (label < graph->size);
2443 graph->pe[node] = label;
2444 if (graph->pe_rep[label] == -1)
2445 graph->pe_rep[label] = node;
2448 return node;
2451 /* Unite pointer equivalent but not location equivalent nodes in
2452 GRAPH. This may only be performed once variable substitution is
2453 finished. */
2455 static void
2456 unite_pointer_equivalences (constraint_graph_t graph)
2458 unsigned int i;
2460 /* Go through the pointer equivalences and unite them to their
2461 representative, if they aren't already. */
2462 for (i = 1; i < FIRST_REF_NODE; i++)
2464 unsigned int label = graph->pe[i];
2465 if (label)
2467 int label_rep = graph->pe_rep[label];
2469 if (label_rep == -1)
2470 continue;
2472 label_rep = find (label_rep);
2473 if (label_rep >= 0 && unite (label_rep, find (i)))
2474 unify_nodes (graph, label_rep, i, false);
2479 /* Move complex constraints to the GRAPH nodes they belong to. */
2481 static void
2482 move_complex_constraints (constraint_graph_t graph)
2484 int i;
2485 constraint_t c;
2487 FOR_EACH_VEC_ELT (constraints, i, c)
2489 if (c)
2491 struct constraint_expr lhs = c->lhs;
2492 struct constraint_expr rhs = c->rhs;
2494 if (lhs.type == DEREF)
2496 insert_into_complex (graph, lhs.var, c);
2498 else if (rhs.type == DEREF)
2500 if (!(get_varinfo (lhs.var)->is_special_var))
2501 insert_into_complex (graph, rhs.var, c);
2503 else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2504 && (lhs.offset != 0 || rhs.offset != 0))
2506 insert_into_complex (graph, rhs.var, c);
2513 /* Optimize and rewrite complex constraints while performing
2514 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2515 result of perform_variable_substitution. */
2517 static void
2518 rewrite_constraints (constraint_graph_t graph,
2519 struct scc_info *si)
2521 int i;
2522 constraint_t c;
2524 #ifdef ENABLE_CHECKING
2525 for (unsigned int j = 0; j < graph->size; j++)
2526 gcc_assert (find (j) == j);
2527 #endif
2529 FOR_EACH_VEC_ELT (constraints, i, c)
2531 struct constraint_expr lhs = c->lhs;
2532 struct constraint_expr rhs = c->rhs;
2533 unsigned int lhsvar = find (lhs.var);
2534 unsigned int rhsvar = find (rhs.var);
2535 unsigned int lhsnode, rhsnode;
2536 unsigned int lhslabel, rhslabel;
2538 lhsnode = si->node_mapping[lhsvar];
2539 rhsnode = si->node_mapping[rhsvar];
2540 lhslabel = graph->pointer_label[lhsnode];
2541 rhslabel = graph->pointer_label[rhsnode];
2543 /* See if it is really a non-pointer variable, and if so, ignore
2544 the constraint. */
2545 if (lhslabel == 0)
2547 if (dump_file && (dump_flags & TDF_DETAILS))
2550 fprintf (dump_file, "%s is a non-pointer variable,"
2551 "ignoring constraint:",
2552 get_varinfo (lhs.var)->name);
2553 dump_constraint (dump_file, c);
2554 fprintf (dump_file, "\n");
2556 constraints[i] = NULL;
2557 continue;
2560 if (rhslabel == 0)
2562 if (dump_file && (dump_flags & TDF_DETAILS))
2565 fprintf (dump_file, "%s is a non-pointer variable,"
2566 "ignoring constraint:",
2567 get_varinfo (rhs.var)->name);
2568 dump_constraint (dump_file, c);
2569 fprintf (dump_file, "\n");
2571 constraints[i] = NULL;
2572 continue;
2575 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2576 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2577 c->lhs.var = lhsvar;
2578 c->rhs.var = rhsvar;
2582 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2583 part of an SCC, false otherwise. */
2585 static bool
2586 eliminate_indirect_cycles (unsigned int node)
2588 if (graph->indirect_cycles[node] != -1
2589 && !bitmap_empty_p (get_varinfo (node)->solution))
2591 unsigned int i;
2592 auto_vec<unsigned> queue;
2593 int queuepos;
2594 unsigned int to = find (graph->indirect_cycles[node]);
2595 bitmap_iterator bi;
2597 /* We can't touch the solution set and call unify_nodes
2598 at the same time, because unify_nodes is going to do
2599 bitmap unions into it. */
2601 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2603 if (find (i) == i && i != to)
2605 if (unite (to, i))
2606 queue.safe_push (i);
2610 for (queuepos = 0;
2611 queue.iterate (queuepos, &i);
2612 queuepos++)
2614 unify_nodes (graph, to, i, true);
2616 return true;
2618 return false;
2621 /* Solve the constraint graph GRAPH using our worklist solver.
2622 This is based on the PW* family of solvers from the "Efficient Field
2623 Sensitive Pointer Analysis for C" paper.
2624 It works by iterating over all the graph nodes, processing the complex
2625 constraints and propagating the copy constraints, until everything stops
2626 changed. This corresponds to steps 6-8 in the solving list given above. */
2628 static void
2629 solve_graph (constraint_graph_t graph)
2631 unsigned int size = graph->size;
2632 unsigned int i;
2633 bitmap pts;
2635 changed = BITMAP_ALLOC (NULL);
2637 /* Mark all initial non-collapsed nodes as changed. */
2638 for (i = 1; i < size; i++)
2640 varinfo_t ivi = get_varinfo (i);
2641 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2642 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2643 || graph->complex[i].length () > 0))
2644 bitmap_set_bit (changed, i);
2647 /* Allocate a bitmap to be used to store the changed bits. */
2648 pts = BITMAP_ALLOC (&pta_obstack);
2650 while (!bitmap_empty_p (changed))
2652 unsigned int i;
2653 struct topo_info *ti = init_topo_info ();
2654 stats.iterations++;
2656 bitmap_obstack_initialize (&iteration_obstack);
2658 compute_topo_order (graph, ti);
2660 while (ti->topo_order.length () != 0)
2663 i = ti->topo_order.pop ();
2665 /* If this variable is not a representative, skip it. */
2666 if (find (i) != i)
2667 continue;
2669 /* In certain indirect cycle cases, we may merge this
2670 variable to another. */
2671 if (eliminate_indirect_cycles (i) && find (i) != i)
2672 continue;
2674 /* If the node has changed, we need to process the
2675 complex constraints and outgoing edges again. */
2676 if (bitmap_clear_bit (changed, i))
2678 unsigned int j;
2679 constraint_t c;
2680 bitmap solution;
2681 vec<constraint_t> complex = graph->complex[i];
2682 varinfo_t vi = get_varinfo (i);
2683 bool solution_empty;
2685 /* Compute the changed set of solution bits. If anything
2686 is in the solution just propagate that. */
2687 if (bitmap_bit_p (vi->solution, anything_id))
2689 /* If anything is also in the old solution there is
2690 nothing to do.
2691 ??? But we shouldn't ended up with "changed" set ... */
2692 if (vi->oldsolution
2693 && bitmap_bit_p (vi->oldsolution, anything_id))
2694 continue;
2695 bitmap_copy (pts, get_varinfo (find (anything_id))->solution);
2697 else if (vi->oldsolution)
2698 bitmap_and_compl (pts, vi->solution, vi->oldsolution);
2699 else
2700 bitmap_copy (pts, vi->solution);
2702 if (bitmap_empty_p (pts))
2703 continue;
2705 if (vi->oldsolution)
2706 bitmap_ior_into (vi->oldsolution, pts);
2707 else
2709 vi->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
2710 bitmap_copy (vi->oldsolution, pts);
2713 solution = vi->solution;
2714 solution_empty = bitmap_empty_p (solution);
2716 /* Process the complex constraints */
2717 FOR_EACH_VEC_ELT (complex, j, c)
2719 /* XXX: This is going to unsort the constraints in
2720 some cases, which will occasionally add duplicate
2721 constraints during unification. This does not
2722 affect correctness. */
2723 c->lhs.var = find (c->lhs.var);
2724 c->rhs.var = find (c->rhs.var);
2726 /* The only complex constraint that can change our
2727 solution to non-empty, given an empty solution,
2728 is a constraint where the lhs side is receiving
2729 some set from elsewhere. */
2730 if (!solution_empty || c->lhs.type != DEREF)
2731 do_complex_constraint (graph, c, pts);
2734 solution_empty = bitmap_empty_p (solution);
2736 if (!solution_empty)
2738 bitmap_iterator bi;
2739 unsigned eff_escaped_id = find (escaped_id);
2741 /* Propagate solution to all successors. */
2742 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2743 0, j, bi)
2745 bitmap tmp;
2746 bool flag;
2748 unsigned int to = find (j);
2749 tmp = get_varinfo (to)->solution;
2750 flag = false;
2752 /* Don't try to propagate to ourselves. */
2753 if (to == i)
2754 continue;
2756 /* If we propagate from ESCAPED use ESCAPED as
2757 placeholder. */
2758 if (i == eff_escaped_id)
2759 flag = bitmap_set_bit (tmp, escaped_id);
2760 else
2761 flag = bitmap_ior_into (tmp, pts);
2763 if (flag)
2764 bitmap_set_bit (changed, to);
2769 free_topo_info (ti);
2770 bitmap_obstack_release (&iteration_obstack);
2773 BITMAP_FREE (pts);
2774 BITMAP_FREE (changed);
2775 bitmap_obstack_release (&oldpta_obstack);
2778 /* Map from trees to variable infos. */
2779 static struct pointer_map_t *vi_for_tree;
2782 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2784 static void
2785 insert_vi_for_tree (tree t, varinfo_t vi)
2787 void **slot = pointer_map_insert (vi_for_tree, t);
2788 gcc_assert (vi);
2789 gcc_assert (*slot == NULL);
2790 *slot = vi;
2793 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2794 exist in the map, return NULL, otherwise, return the varinfo we found. */
2796 static varinfo_t
2797 lookup_vi_for_tree (tree t)
2799 void **slot = pointer_map_contains (vi_for_tree, t);
2800 if (slot == NULL)
2801 return NULL;
2803 return (varinfo_t) *slot;
2806 /* Return a printable name for DECL */
2808 static const char *
2809 alias_get_name (tree decl)
2811 const char *res = NULL;
2812 char *temp;
2813 int num_printed = 0;
2815 if (!dump_file)
2816 return "NULL";
2818 if (TREE_CODE (decl) == SSA_NAME)
2820 res = get_name (decl);
2821 if (res)
2822 num_printed = asprintf (&temp, "%s_%u", res, SSA_NAME_VERSION (decl));
2823 else
2824 num_printed = asprintf (&temp, "_%u", SSA_NAME_VERSION (decl));
2825 if (num_printed > 0)
2827 res = ggc_strdup (temp);
2828 free (temp);
2831 else if (DECL_P (decl))
2833 if (DECL_ASSEMBLER_NAME_SET_P (decl))
2834 res = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
2835 else
2837 res = get_name (decl);
2838 if (!res)
2840 num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
2841 if (num_printed > 0)
2843 res = ggc_strdup (temp);
2844 free (temp);
2849 if (res != NULL)
2850 return res;
2852 return "NULL";
2855 /* Find the variable id for tree T in the map.
2856 If T doesn't exist in the map, create an entry for it and return it. */
2858 static varinfo_t
2859 get_vi_for_tree (tree t)
2861 void **slot = pointer_map_contains (vi_for_tree, t);
2862 if (slot == NULL)
2863 return get_varinfo (create_variable_info_for (t, alias_get_name (t)));
2865 return (varinfo_t) *slot;
2868 /* Get a scalar constraint expression for a new temporary variable. */
2870 static struct constraint_expr
2871 new_scalar_tmp_constraint_exp (const char *name)
2873 struct constraint_expr tmp;
2874 varinfo_t vi;
2876 vi = new_var_info (NULL_TREE, name);
2877 vi->offset = 0;
2878 vi->size = -1;
2879 vi->fullsize = -1;
2880 vi->is_full_var = 1;
2882 tmp.var = vi->id;
2883 tmp.type = SCALAR;
2884 tmp.offset = 0;
2886 return tmp;
2889 /* Get a constraint expression vector from an SSA_VAR_P node.
2890 If address_p is true, the result will be taken its address of. */
2892 static void
2893 get_constraint_for_ssa_var (tree t, vec<ce_s> *results, bool address_p)
2895 struct constraint_expr cexpr;
2896 varinfo_t vi;
2898 /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
2899 gcc_assert (TREE_CODE (t) == SSA_NAME || DECL_P (t));
2901 /* For parameters, get at the points-to set for the actual parm
2902 decl. */
2903 if (TREE_CODE (t) == SSA_NAME
2904 && SSA_NAME_IS_DEFAULT_DEF (t)
2905 && (TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2906 || TREE_CODE (SSA_NAME_VAR (t)) == RESULT_DECL))
2908 get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
2909 return;
2912 /* For global variables resort to the alias target. */
2913 if (TREE_CODE (t) == VAR_DECL
2914 && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
2916 struct varpool_node *node = varpool_get_node (t);
2917 if (node && node->alias && node->analyzed)
2919 node = varpool_variable_node (node, NULL);
2920 t = node->decl;
2924 vi = get_vi_for_tree (t);
2925 cexpr.var = vi->id;
2926 cexpr.type = SCALAR;
2927 cexpr.offset = 0;
2928 /* If we determine the result is "anything", and we know this is readonly,
2929 say it points to readonly memory instead. */
2930 if (cexpr.var == anything_id && TREE_READONLY (t))
2932 gcc_unreachable ();
2933 cexpr.type = ADDRESSOF;
2934 cexpr.var = readonly_id;
2937 /* If we are not taking the address of the constraint expr, add all
2938 sub-fiels of the variable as well. */
2939 if (!address_p
2940 && !vi->is_full_var)
2942 for (; vi; vi = vi_next (vi))
2944 cexpr.var = vi->id;
2945 results->safe_push (cexpr);
2947 return;
2950 results->safe_push (cexpr);
2953 /* Process constraint T, performing various simplifications and then
2954 adding it to our list of overall constraints. */
2956 static void
2957 process_constraint (constraint_t t)
2959 struct constraint_expr rhs = t->rhs;
2960 struct constraint_expr lhs = t->lhs;
2962 gcc_assert (rhs.var < varmap.length ());
2963 gcc_assert (lhs.var < varmap.length ());
2965 /* If we didn't get any useful constraint from the lhs we get
2966 &ANYTHING as fallback from get_constraint_for. Deal with
2967 it here by turning it into *ANYTHING. */
2968 if (lhs.type == ADDRESSOF
2969 && lhs.var == anything_id)
2970 lhs.type = DEREF;
2972 /* ADDRESSOF on the lhs is invalid. */
2973 gcc_assert (lhs.type != ADDRESSOF);
2975 /* We shouldn't add constraints from things that cannot have pointers.
2976 It's not completely trivial to avoid in the callers, so do it here. */
2977 if (rhs.type != ADDRESSOF
2978 && !get_varinfo (rhs.var)->may_have_pointers)
2979 return;
2981 /* Likewise adding to the solution of a non-pointer var isn't useful. */
2982 if (!get_varinfo (lhs.var)->may_have_pointers)
2983 return;
2985 /* This can happen in our IR with things like n->a = *p */
2986 if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
2988 /* Split into tmp = *rhs, *lhs = tmp */
2989 struct constraint_expr tmplhs;
2990 tmplhs = new_scalar_tmp_constraint_exp ("doubledereftmp");
2991 process_constraint (new_constraint (tmplhs, rhs));
2992 process_constraint (new_constraint (lhs, tmplhs));
2994 else if (rhs.type == ADDRESSOF && lhs.type == DEREF)
2996 /* Split into tmp = &rhs, *lhs = tmp */
2997 struct constraint_expr tmplhs;
2998 tmplhs = new_scalar_tmp_constraint_exp ("derefaddrtmp");
2999 process_constraint (new_constraint (tmplhs, rhs));
3000 process_constraint (new_constraint (lhs, tmplhs));
3002 else
3004 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
3005 constraints.safe_push (t);
3010 /* Return the position, in bits, of FIELD_DECL from the beginning of its
3011 structure. */
3013 static HOST_WIDE_INT
3014 bitpos_of_field (const tree fdecl)
3016 if (!tree_fits_shwi_p (DECL_FIELD_OFFSET (fdecl))
3017 || !tree_fits_shwi_p (DECL_FIELD_BIT_OFFSET (fdecl)))
3018 return -1;
3020 return (tree_to_shwi (DECL_FIELD_OFFSET (fdecl)) * BITS_PER_UNIT
3021 + tree_to_shwi (DECL_FIELD_BIT_OFFSET (fdecl)));
3025 /* Get constraint expressions for offsetting PTR by OFFSET. Stores the
3026 resulting constraint expressions in *RESULTS. */
3028 static void
3029 get_constraint_for_ptr_offset (tree ptr, tree offset,
3030 vec<ce_s> *results)
3032 struct constraint_expr c;
3033 unsigned int j, n;
3034 HOST_WIDE_INT rhsoffset;
3036 /* If we do not do field-sensitive PTA adding offsets to pointers
3037 does not change the points-to solution. */
3038 if (!use_field_sensitive)
3040 get_constraint_for_rhs (ptr, results);
3041 return;
3044 /* If the offset is not a non-negative integer constant that fits
3045 in a HOST_WIDE_INT, we have to fall back to a conservative
3046 solution which includes all sub-fields of all pointed-to
3047 variables of ptr. */
3048 if (offset == NULL_TREE
3049 || TREE_CODE (offset) != INTEGER_CST)
3050 rhsoffset = UNKNOWN_OFFSET;
3051 else
3053 /* Sign-extend the offset. */
3054 double_int soffset = tree_to_double_int (offset)
3055 .sext (TYPE_PRECISION (TREE_TYPE (offset)));
3056 if (!soffset.fits_shwi ())
3057 rhsoffset = UNKNOWN_OFFSET;
3058 else
3060 /* Make sure the bit-offset also fits. */
3061 HOST_WIDE_INT rhsunitoffset = soffset.low;
3062 rhsoffset = rhsunitoffset * BITS_PER_UNIT;
3063 if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
3064 rhsoffset = UNKNOWN_OFFSET;
3068 get_constraint_for_rhs (ptr, results);
3069 if (rhsoffset == 0)
3070 return;
3072 /* As we are eventually appending to the solution do not use
3073 vec::iterate here. */
3074 n = results->length ();
3075 for (j = 0; j < n; j++)
3077 varinfo_t curr;
3078 c = (*results)[j];
3079 curr = get_varinfo (c.var);
3081 if (c.type == ADDRESSOF
3082 /* If this varinfo represents a full variable just use it. */
3083 && curr->is_full_var)
3084 c.offset = 0;
3085 else if (c.type == ADDRESSOF
3086 /* If we do not know the offset add all subfields. */
3087 && rhsoffset == UNKNOWN_OFFSET)
3089 varinfo_t temp = get_varinfo (curr->head);
3092 struct constraint_expr c2;
3093 c2.var = temp->id;
3094 c2.type = ADDRESSOF;
3095 c2.offset = 0;
3096 if (c2.var != c.var)
3097 results->safe_push (c2);
3098 temp = vi_next (temp);
3100 while (temp);
3102 else if (c.type == ADDRESSOF)
3104 varinfo_t temp;
3105 unsigned HOST_WIDE_INT offset = curr->offset + rhsoffset;
3107 /* Search the sub-field which overlaps with the
3108 pointed-to offset. If the result is outside of the variable
3109 we have to provide a conservative result, as the variable is
3110 still reachable from the resulting pointer (even though it
3111 technically cannot point to anything). The last and first
3112 sub-fields are such conservative results.
3113 ??? If we always had a sub-field for &object + 1 then
3114 we could represent this in a more precise way. */
3115 if (rhsoffset < 0
3116 && curr->offset < offset)
3117 offset = 0;
3118 temp = first_or_preceding_vi_for_offset (curr, offset);
3120 /* If the found variable is not exactly at the pointed to
3121 result, we have to include the next variable in the
3122 solution as well. Otherwise two increments by offset / 2
3123 do not result in the same or a conservative superset
3124 solution. */
3125 if (temp->offset != offset
3126 && temp->next != 0)
3128 struct constraint_expr c2;
3129 c2.var = temp->next;
3130 c2.type = ADDRESSOF;
3131 c2.offset = 0;
3132 results->safe_push (c2);
3134 c.var = temp->id;
3135 c.offset = 0;
3137 else
3138 c.offset = rhsoffset;
3140 (*results)[j] = c;
3145 /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
3146 If address_p is true the result will be taken its address of.
3147 If lhs_p is true then the constraint expression is assumed to be used
3148 as the lhs. */
3150 static void
3151 get_constraint_for_component_ref (tree t, vec<ce_s> *results,
3152 bool address_p, bool lhs_p)
3154 tree orig_t = t;
3155 HOST_WIDE_INT bitsize = -1;
3156 HOST_WIDE_INT bitmaxsize = -1;
3157 HOST_WIDE_INT bitpos;
3158 tree forzero;
3160 /* Some people like to do cute things like take the address of
3161 &0->a.b */
3162 forzero = t;
3163 while (handled_component_p (forzero)
3164 || INDIRECT_REF_P (forzero)
3165 || TREE_CODE (forzero) == MEM_REF)
3166 forzero = TREE_OPERAND (forzero, 0);
3168 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
3170 struct constraint_expr temp;
3172 temp.offset = 0;
3173 temp.var = integer_id;
3174 temp.type = SCALAR;
3175 results->safe_push (temp);
3176 return;
3179 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
3181 /* Pretend to take the address of the base, we'll take care of
3182 adding the required subset of sub-fields below. */
3183 get_constraint_for_1 (t, results, true, lhs_p);
3184 gcc_assert (results->length () == 1);
3185 struct constraint_expr &result = results->last ();
3187 if (result.type == SCALAR
3188 && get_varinfo (result.var)->is_full_var)
3189 /* For single-field vars do not bother about the offset. */
3190 result.offset = 0;
3191 else if (result.type == SCALAR)
3193 /* In languages like C, you can access one past the end of an
3194 array. You aren't allowed to dereference it, so we can
3195 ignore this constraint. When we handle pointer subtraction,
3196 we may have to do something cute here. */
3198 if ((unsigned HOST_WIDE_INT)bitpos < get_varinfo (result.var)->fullsize
3199 && bitmaxsize != 0)
3201 /* It's also not true that the constraint will actually start at the
3202 right offset, it may start in some padding. We only care about
3203 setting the constraint to the first actual field it touches, so
3204 walk to find it. */
3205 struct constraint_expr cexpr = result;
3206 varinfo_t curr;
3207 results->pop ();
3208 cexpr.offset = 0;
3209 for (curr = get_varinfo (cexpr.var); curr; curr = vi_next (curr))
3211 if (ranges_overlap_p (curr->offset, curr->size,
3212 bitpos, bitmaxsize))
3214 cexpr.var = curr->id;
3215 results->safe_push (cexpr);
3216 if (address_p)
3217 break;
3220 /* If we are going to take the address of this field then
3221 to be able to compute reachability correctly add at least
3222 the last field of the variable. */
3223 if (address_p && results->length () == 0)
3225 curr = get_varinfo (cexpr.var);
3226 while (curr->next != 0)
3227 curr = vi_next (curr);
3228 cexpr.var = curr->id;
3229 results->safe_push (cexpr);
3231 else if (results->length () == 0)
3232 /* Assert that we found *some* field there. The user couldn't be
3233 accessing *only* padding. */
3234 /* Still the user could access one past the end of an array
3235 embedded in a struct resulting in accessing *only* padding. */
3236 /* Or accessing only padding via type-punning to a type
3237 that has a filed just in padding space. */
3239 cexpr.type = SCALAR;
3240 cexpr.var = anything_id;
3241 cexpr.offset = 0;
3242 results->safe_push (cexpr);
3245 else if (bitmaxsize == 0)
3247 if (dump_file && (dump_flags & TDF_DETAILS))
3248 fprintf (dump_file, "Access to zero-sized part of variable,"
3249 "ignoring\n");
3251 else
3252 if (dump_file && (dump_flags & TDF_DETAILS))
3253 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
3255 else if (result.type == DEREF)
3257 /* If we do not know exactly where the access goes say so. Note
3258 that only for non-structure accesses we know that we access
3259 at most one subfiled of any variable. */
3260 if (bitpos == -1
3261 || bitsize != bitmaxsize
3262 || AGGREGATE_TYPE_P (TREE_TYPE (orig_t))
3263 || result.offset == UNKNOWN_OFFSET)
3264 result.offset = UNKNOWN_OFFSET;
3265 else
3266 result.offset += bitpos;
3268 else if (result.type == ADDRESSOF)
3270 /* We can end up here for component references on a
3271 VIEW_CONVERT_EXPR <>(&foobar). */
3272 result.type = SCALAR;
3273 result.var = anything_id;
3274 result.offset = 0;
3276 else
3277 gcc_unreachable ();
3281 /* Dereference the constraint expression CONS, and return the result.
3282 DEREF (ADDRESSOF) = SCALAR
3283 DEREF (SCALAR) = DEREF
3284 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
3285 This is needed so that we can handle dereferencing DEREF constraints. */
3287 static void
3288 do_deref (vec<ce_s> *constraints)
3290 struct constraint_expr *c;
3291 unsigned int i = 0;
3293 FOR_EACH_VEC_ELT (*constraints, i, c)
3295 if (c->type == SCALAR)
3296 c->type = DEREF;
3297 else if (c->type == ADDRESSOF)
3298 c->type = SCALAR;
3299 else if (c->type == DEREF)
3301 struct constraint_expr tmplhs;
3302 tmplhs = new_scalar_tmp_constraint_exp ("dereftmp");
3303 process_constraint (new_constraint (tmplhs, *c));
3304 c->var = tmplhs.var;
3306 else
3307 gcc_unreachable ();
3311 /* Given a tree T, return the constraint expression for taking the
3312 address of it. */
3314 static void
3315 get_constraint_for_address_of (tree t, vec<ce_s> *results)
3317 struct constraint_expr *c;
3318 unsigned int i;
3320 get_constraint_for_1 (t, results, true, true);
3322 FOR_EACH_VEC_ELT (*results, i, c)
3324 if (c->type == DEREF)
3325 c->type = SCALAR;
3326 else
3327 c->type = ADDRESSOF;
3331 /* Given a tree T, return the constraint expression for it. */
3333 static void
3334 get_constraint_for_1 (tree t, vec<ce_s> *results, bool address_p,
3335 bool lhs_p)
3337 struct constraint_expr temp;
3339 /* x = integer is all glommed to a single variable, which doesn't
3340 point to anything by itself. That is, of course, unless it is an
3341 integer constant being treated as a pointer, in which case, we
3342 will return that this is really the addressof anything. This
3343 happens below, since it will fall into the default case. The only
3344 case we know something about an integer treated like a pointer is
3345 when it is the NULL pointer, and then we just say it points to
3346 NULL.
3348 Do not do that if -fno-delete-null-pointer-checks though, because
3349 in that case *NULL does not fail, so it _should_ alias *anything.
3350 It is not worth adding a new option or renaming the existing one,
3351 since this case is relatively obscure. */
3352 if ((TREE_CODE (t) == INTEGER_CST
3353 && integer_zerop (t))
3354 /* The only valid CONSTRUCTORs in gimple with pointer typed
3355 elements are zero-initializer. But in IPA mode we also
3356 process global initializers, so verify at least. */
3357 || (TREE_CODE (t) == CONSTRUCTOR
3358 && CONSTRUCTOR_NELTS (t) == 0))
3360 if (flag_delete_null_pointer_checks)
3361 temp.var = nothing_id;
3362 else
3363 temp.var = nonlocal_id;
3364 temp.type = ADDRESSOF;
3365 temp.offset = 0;
3366 results->safe_push (temp);
3367 return;
3370 /* String constants are read-only. */
3371 if (TREE_CODE (t) == STRING_CST)
3373 temp.var = readonly_id;
3374 temp.type = SCALAR;
3375 temp.offset = 0;
3376 results->safe_push (temp);
3377 return;
3380 switch (TREE_CODE_CLASS (TREE_CODE (t)))
3382 case tcc_expression:
3384 switch (TREE_CODE (t))
3386 case ADDR_EXPR:
3387 get_constraint_for_address_of (TREE_OPERAND (t, 0), results);
3388 return;
3389 default:;
3391 break;
3393 case tcc_reference:
3395 switch (TREE_CODE (t))
3397 case MEM_REF:
3399 struct constraint_expr cs;
3400 varinfo_t vi, curr;
3401 get_constraint_for_ptr_offset (TREE_OPERAND (t, 0),
3402 TREE_OPERAND (t, 1), results);
3403 do_deref (results);
3405 /* If we are not taking the address then make sure to process
3406 all subvariables we might access. */
3407 if (address_p)
3408 return;
3410 cs = results->last ();
3411 if (cs.type == DEREF
3412 && type_can_have_subvars (TREE_TYPE (t)))
3414 /* For dereferences this means we have to defer it
3415 to solving time. */
3416 results->last ().offset = UNKNOWN_OFFSET;
3417 return;
3419 if (cs.type != SCALAR)
3420 return;
3422 vi = get_varinfo (cs.var);
3423 curr = vi_next (vi);
3424 if (!vi->is_full_var
3425 && curr)
3427 unsigned HOST_WIDE_INT size;
3428 if (tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (t))))
3429 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t)));
3430 else
3431 size = -1;
3432 for (; curr; curr = vi_next (curr))
3434 if (curr->offset - vi->offset < size)
3436 cs.var = curr->id;
3437 results->safe_push (cs);
3439 else
3440 break;
3443 return;
3445 case ARRAY_REF:
3446 case ARRAY_RANGE_REF:
3447 case COMPONENT_REF:
3448 get_constraint_for_component_ref (t, results, address_p, lhs_p);
3449 return;
3450 case VIEW_CONVERT_EXPR:
3451 get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p,
3452 lhs_p);
3453 return;
3454 /* We are missing handling for TARGET_MEM_REF here. */
3455 default:;
3457 break;
3459 case tcc_exceptional:
3461 switch (TREE_CODE (t))
3463 case SSA_NAME:
3465 get_constraint_for_ssa_var (t, results, address_p);
3466 return;
3468 case CONSTRUCTOR:
3470 unsigned int i;
3471 tree val;
3472 auto_vec<ce_s> tmp;
3473 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
3475 struct constraint_expr *rhsp;
3476 unsigned j;
3477 get_constraint_for_1 (val, &tmp, address_p, lhs_p);
3478 FOR_EACH_VEC_ELT (tmp, j, rhsp)
3479 results->safe_push (*rhsp);
3480 tmp.truncate (0);
3482 /* We do not know whether the constructor was complete,
3483 so technically we have to add &NOTHING or &ANYTHING
3484 like we do for an empty constructor as well. */
3485 return;
3487 default:;
3489 break;
3491 case tcc_declaration:
3493 get_constraint_for_ssa_var (t, results, address_p);
3494 return;
3496 case tcc_constant:
3498 /* We cannot refer to automatic variables through constants. */
3499 temp.type = ADDRESSOF;
3500 temp.var = nonlocal_id;
3501 temp.offset = 0;
3502 results->safe_push (temp);
3503 return;
3505 default:;
3508 /* The default fallback is a constraint from anything. */
3509 temp.type = ADDRESSOF;
3510 temp.var = anything_id;
3511 temp.offset = 0;
3512 results->safe_push (temp);
3515 /* Given a gimple tree T, return the constraint expression vector for it. */
3517 static void
3518 get_constraint_for (tree t, vec<ce_s> *results)
3520 gcc_assert (results->length () == 0);
3522 get_constraint_for_1 (t, results, false, true);
3525 /* Given a gimple tree T, return the constraint expression vector for it
3526 to be used as the rhs of a constraint. */
3528 static void
3529 get_constraint_for_rhs (tree t, vec<ce_s> *results)
3531 gcc_assert (results->length () == 0);
3533 get_constraint_for_1 (t, results, false, false);
3537 /* Efficiently generates constraints from all entries in *RHSC to all
3538 entries in *LHSC. */
3540 static void
3541 process_all_all_constraints (vec<ce_s> lhsc,
3542 vec<ce_s> rhsc)
3544 struct constraint_expr *lhsp, *rhsp;
3545 unsigned i, j;
3547 if (lhsc.length () <= 1 || rhsc.length () <= 1)
3549 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3550 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
3551 process_constraint (new_constraint (*lhsp, *rhsp));
3553 else
3555 struct constraint_expr tmp;
3556 tmp = new_scalar_tmp_constraint_exp ("allalltmp");
3557 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
3558 process_constraint (new_constraint (tmp, *rhsp));
3559 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3560 process_constraint (new_constraint (*lhsp, tmp));
3564 /* Handle aggregate copies by expanding into copies of the respective
3565 fields of the structures. */
3567 static void
3568 do_structure_copy (tree lhsop, tree rhsop)
3570 struct constraint_expr *lhsp, *rhsp;
3571 auto_vec<ce_s> lhsc;
3572 auto_vec<ce_s> rhsc;
3573 unsigned j;
3575 get_constraint_for (lhsop, &lhsc);
3576 get_constraint_for_rhs (rhsop, &rhsc);
3577 lhsp = &lhsc[0];
3578 rhsp = &rhsc[0];
3579 if (lhsp->type == DEREF
3580 || (lhsp->type == ADDRESSOF && lhsp->var == anything_id)
3581 || rhsp->type == DEREF)
3583 if (lhsp->type == DEREF)
3585 gcc_assert (lhsc.length () == 1);
3586 lhsp->offset = UNKNOWN_OFFSET;
3588 if (rhsp->type == DEREF)
3590 gcc_assert (rhsc.length () == 1);
3591 rhsp->offset = UNKNOWN_OFFSET;
3593 process_all_all_constraints (lhsc, rhsc);
3595 else if (lhsp->type == SCALAR
3596 && (rhsp->type == SCALAR
3597 || rhsp->type == ADDRESSOF))
3599 HOST_WIDE_INT lhssize, lhsmaxsize, lhsoffset;
3600 HOST_WIDE_INT rhssize, rhsmaxsize, rhsoffset;
3601 unsigned k = 0;
3602 get_ref_base_and_extent (lhsop, &lhsoffset, &lhssize, &lhsmaxsize);
3603 get_ref_base_and_extent (rhsop, &rhsoffset, &rhssize, &rhsmaxsize);
3604 for (j = 0; lhsc.iterate (j, &lhsp);)
3606 varinfo_t lhsv, rhsv;
3607 rhsp = &rhsc[k];
3608 lhsv = get_varinfo (lhsp->var);
3609 rhsv = get_varinfo (rhsp->var);
3610 if (lhsv->may_have_pointers
3611 && (lhsv->is_full_var
3612 || rhsv->is_full_var
3613 || ranges_overlap_p (lhsv->offset + rhsoffset, lhsv->size,
3614 rhsv->offset + lhsoffset, rhsv->size)))
3615 process_constraint (new_constraint (*lhsp, *rhsp));
3616 if (!rhsv->is_full_var
3617 && (lhsv->is_full_var
3618 || (lhsv->offset + rhsoffset + lhsv->size
3619 > rhsv->offset + lhsoffset + rhsv->size)))
3621 ++k;
3622 if (k >= rhsc.length ())
3623 break;
3625 else
3626 ++j;
3629 else
3630 gcc_unreachable ();
3633 /* Create constraints ID = { rhsc }. */
3635 static void
3636 make_constraints_to (unsigned id, vec<ce_s> rhsc)
3638 struct constraint_expr *c;
3639 struct constraint_expr includes;
3640 unsigned int j;
3642 includes.var = id;
3643 includes.offset = 0;
3644 includes.type = SCALAR;
3646 FOR_EACH_VEC_ELT (rhsc, j, c)
3647 process_constraint (new_constraint (includes, *c));
3650 /* Create a constraint ID = OP. */
3652 static void
3653 make_constraint_to (unsigned id, tree op)
3655 auto_vec<ce_s> rhsc;
3656 get_constraint_for_rhs (op, &rhsc);
3657 make_constraints_to (id, rhsc);
3660 /* Create a constraint ID = &FROM. */
3662 static void
3663 make_constraint_from (varinfo_t vi, int from)
3665 struct constraint_expr lhs, rhs;
3667 lhs.var = vi->id;
3668 lhs.offset = 0;
3669 lhs.type = SCALAR;
3671 rhs.var = from;
3672 rhs.offset = 0;
3673 rhs.type = ADDRESSOF;
3674 process_constraint (new_constraint (lhs, rhs));
3677 /* Create a constraint ID = FROM. */
3679 static void
3680 make_copy_constraint (varinfo_t vi, int from)
3682 struct constraint_expr lhs, rhs;
3684 lhs.var = vi->id;
3685 lhs.offset = 0;
3686 lhs.type = SCALAR;
3688 rhs.var = from;
3689 rhs.offset = 0;
3690 rhs.type = SCALAR;
3691 process_constraint (new_constraint (lhs, rhs));
3694 /* Make constraints necessary to make OP escape. */
3696 static void
3697 make_escape_constraint (tree op)
3699 make_constraint_to (escaped_id, op);
3702 /* Add constraints to that the solution of VI is transitively closed. */
3704 static void
3705 make_transitive_closure_constraints (varinfo_t vi)
3707 struct constraint_expr lhs, rhs;
3709 /* VAR = *VAR; */
3710 lhs.type = SCALAR;
3711 lhs.var = vi->id;
3712 lhs.offset = 0;
3713 rhs.type = DEREF;
3714 rhs.var = vi->id;
3715 rhs.offset = 0;
3716 process_constraint (new_constraint (lhs, rhs));
3718 /* VAR = VAR + UNKNOWN; */
3719 lhs.type = SCALAR;
3720 lhs.var = vi->id;
3721 lhs.offset = 0;
3722 rhs.type = SCALAR;
3723 rhs.var = vi->id;
3724 rhs.offset = UNKNOWN_OFFSET;
3725 process_constraint (new_constraint (lhs, rhs));
3728 /* Temporary storage for fake var decls. */
3729 struct obstack fake_var_decl_obstack;
3731 /* Build a fake VAR_DECL acting as referrer to a DECL_UID. */
3733 static tree
3734 build_fake_var_decl (tree type)
3736 tree decl = (tree) XOBNEW (&fake_var_decl_obstack, struct tree_var_decl);
3737 memset (decl, 0, sizeof (struct tree_var_decl));
3738 TREE_SET_CODE (decl, VAR_DECL);
3739 TREE_TYPE (decl) = type;
3740 DECL_UID (decl) = allocate_decl_uid ();
3741 SET_DECL_PT_UID (decl, -1);
3742 layout_decl (decl, 0);
3743 return decl;
3746 /* Create a new artificial heap variable with NAME.
3747 Return the created variable. */
3749 static varinfo_t
3750 make_heapvar (const char *name)
3752 varinfo_t vi;
3753 tree heapvar;
3755 heapvar = build_fake_var_decl (ptr_type_node);
3756 DECL_EXTERNAL (heapvar) = 1;
3758 vi = new_var_info (heapvar, name);
3759 vi->is_artificial_var = true;
3760 vi->is_heap_var = true;
3761 vi->is_unknown_size_var = true;
3762 vi->offset = 0;
3763 vi->fullsize = ~0;
3764 vi->size = ~0;
3765 vi->is_full_var = true;
3766 insert_vi_for_tree (heapvar, vi);
3768 return vi;
3771 /* Create a new artificial heap variable with NAME and make a
3772 constraint from it to LHS. Set flags according to a tag used
3773 for tracking restrict pointers. */
3775 static varinfo_t
3776 make_constraint_from_restrict (varinfo_t lhs, const char *name)
3778 varinfo_t vi = make_heapvar (name);
3779 vi->is_global_var = 1;
3780 vi->may_have_pointers = 1;
3781 make_constraint_from (lhs, vi->id);
3782 return vi;
3785 /* Create a new artificial heap variable with NAME and make a
3786 constraint from it to LHS. Set flags according to a tag used
3787 for tracking restrict pointers and make the artificial heap
3788 point to global memory. */
3790 static varinfo_t
3791 make_constraint_from_global_restrict (varinfo_t lhs, const char *name)
3793 varinfo_t vi = make_constraint_from_restrict (lhs, name);
3794 make_copy_constraint (vi, nonlocal_id);
3795 return vi;
3798 /* In IPA mode there are varinfos for different aspects of reach
3799 function designator. One for the points-to set of the return
3800 value, one for the variables that are clobbered by the function,
3801 one for its uses and one for each parameter (including a single
3802 glob for remaining variadic arguments). */
3804 enum { fi_clobbers = 1, fi_uses = 2,
3805 fi_static_chain = 3, fi_result = 4, fi_parm_base = 5 };
3807 /* Get a constraint for the requested part of a function designator FI
3808 when operating in IPA mode. */
3810 static struct constraint_expr
3811 get_function_part_constraint (varinfo_t fi, unsigned part)
3813 struct constraint_expr c;
3815 gcc_assert (in_ipa_mode);
3817 if (fi->id == anything_id)
3819 /* ??? We probably should have a ANYFN special variable. */
3820 c.var = anything_id;
3821 c.offset = 0;
3822 c.type = SCALAR;
3824 else if (TREE_CODE (fi->decl) == FUNCTION_DECL)
3826 varinfo_t ai = first_vi_for_offset (fi, part);
3827 if (ai)
3828 c.var = ai->id;
3829 else
3830 c.var = anything_id;
3831 c.offset = 0;
3832 c.type = SCALAR;
3834 else
3836 c.var = fi->id;
3837 c.offset = part;
3838 c.type = DEREF;
3841 return c;
3844 /* For non-IPA mode, generate constraints necessary for a call on the
3845 RHS. */
3847 static void
3848 handle_rhs_call (gimple stmt, vec<ce_s> *results)
3850 struct constraint_expr rhsc;
3851 unsigned i;
3852 bool returns_uses = false;
3854 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3856 tree arg = gimple_call_arg (stmt, i);
3857 int flags = gimple_call_arg_flags (stmt, i);
3859 /* If the argument is not used we can ignore it. */
3860 if (flags & EAF_UNUSED)
3861 continue;
3863 /* As we compute ESCAPED context-insensitive we do not gain
3864 any precision with just EAF_NOCLOBBER but not EAF_NOESCAPE
3865 set. The argument would still get clobbered through the
3866 escape solution. */
3867 if ((flags & EAF_NOCLOBBER)
3868 && (flags & EAF_NOESCAPE))
3870 varinfo_t uses = get_call_use_vi (stmt);
3871 if (!(flags & EAF_DIRECT))
3873 varinfo_t tem = new_var_info (NULL_TREE, "callarg");
3874 make_constraint_to (tem->id, arg);
3875 make_transitive_closure_constraints (tem);
3876 make_copy_constraint (uses, tem->id);
3878 else
3879 make_constraint_to (uses->id, arg);
3880 returns_uses = true;
3882 else if (flags & EAF_NOESCAPE)
3884 struct constraint_expr lhs, rhs;
3885 varinfo_t uses = get_call_use_vi (stmt);
3886 varinfo_t clobbers = get_call_clobber_vi (stmt);
3887 varinfo_t tem = new_var_info (NULL_TREE, "callarg");
3888 make_constraint_to (tem->id, arg);
3889 if (!(flags & EAF_DIRECT))
3890 make_transitive_closure_constraints (tem);
3891 make_copy_constraint (uses, tem->id);
3892 make_copy_constraint (clobbers, tem->id);
3893 /* Add *tem = nonlocal, do not add *tem = callused as
3894 EAF_NOESCAPE parameters do not escape to other parameters
3895 and all other uses appear in NONLOCAL as well. */
3896 lhs.type = DEREF;
3897 lhs.var = tem->id;
3898 lhs.offset = 0;
3899 rhs.type = SCALAR;
3900 rhs.var = nonlocal_id;
3901 rhs.offset = 0;
3902 process_constraint (new_constraint (lhs, rhs));
3903 returns_uses = true;
3905 else
3906 make_escape_constraint (arg);
3909 /* If we added to the calls uses solution make sure we account for
3910 pointers to it to be returned. */
3911 if (returns_uses)
3913 rhsc.var = get_call_use_vi (stmt)->id;
3914 rhsc.offset = 0;
3915 rhsc.type = SCALAR;
3916 results->safe_push (rhsc);
3919 /* The static chain escapes as well. */
3920 if (gimple_call_chain (stmt))
3921 make_escape_constraint (gimple_call_chain (stmt));
3923 /* And if we applied NRV the address of the return slot escapes as well. */
3924 if (gimple_call_return_slot_opt_p (stmt)
3925 && gimple_call_lhs (stmt) != NULL_TREE
3926 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
3928 auto_vec<ce_s> tmpc;
3929 struct constraint_expr lhsc, *c;
3930 get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
3931 lhsc.var = escaped_id;
3932 lhsc.offset = 0;
3933 lhsc.type = SCALAR;
3934 FOR_EACH_VEC_ELT (tmpc, i, c)
3935 process_constraint (new_constraint (lhsc, *c));
3938 /* Regular functions return nonlocal memory. */
3939 rhsc.var = nonlocal_id;
3940 rhsc.offset = 0;
3941 rhsc.type = SCALAR;
3942 results->safe_push (rhsc);
3945 /* For non-IPA mode, generate constraints necessary for a call
3946 that returns a pointer and assigns it to LHS. This simply makes
3947 the LHS point to global and escaped variables. */
3949 static void
3950 handle_lhs_call (gimple stmt, tree lhs, int flags, vec<ce_s> rhsc,
3951 tree fndecl)
3953 auto_vec<ce_s> lhsc;
3955 get_constraint_for (lhs, &lhsc);
3956 /* If the store is to a global decl make sure to
3957 add proper escape constraints. */
3958 lhs = get_base_address (lhs);
3959 if (lhs
3960 && DECL_P (lhs)
3961 && is_global_var (lhs))
3963 struct constraint_expr tmpc;
3964 tmpc.var = escaped_id;
3965 tmpc.offset = 0;
3966 tmpc.type = SCALAR;
3967 lhsc.safe_push (tmpc);
3970 /* If the call returns an argument unmodified override the rhs
3971 constraints. */
3972 flags = gimple_call_return_flags (stmt);
3973 if (flags & ERF_RETURNS_ARG
3974 && (flags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (stmt))
3976 tree arg;
3977 rhsc.create (0);
3978 arg = gimple_call_arg (stmt, flags & ERF_RETURN_ARG_MASK);
3979 get_constraint_for (arg, &rhsc);
3980 process_all_all_constraints (lhsc, rhsc);
3981 rhsc.release ();
3983 else if (flags & ERF_NOALIAS)
3985 varinfo_t vi;
3986 struct constraint_expr tmpc;
3987 rhsc.create (0);
3988 vi = make_heapvar ("HEAP");
3989 /* We marking allocated storage local, we deal with it becoming
3990 global by escaping and setting of vars_contains_escaped_heap. */
3991 DECL_EXTERNAL (vi->decl) = 0;
3992 vi->is_global_var = 0;
3993 /* If this is not a real malloc call assume the memory was
3994 initialized and thus may point to global memory. All
3995 builtin functions with the malloc attribute behave in a sane way. */
3996 if (!fndecl
3997 || DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL)
3998 make_constraint_from (vi, nonlocal_id);
3999 tmpc.var = vi->id;
4000 tmpc.offset = 0;
4001 tmpc.type = ADDRESSOF;
4002 rhsc.safe_push (tmpc);
4003 process_all_all_constraints (lhsc, rhsc);
4004 rhsc.release ();
4006 else
4007 process_all_all_constraints (lhsc, rhsc);
4010 /* For non-IPA mode, generate constraints necessary for a call of a
4011 const function that returns a pointer in the statement STMT. */
4013 static void
4014 handle_const_call (gimple stmt, vec<ce_s> *results)
4016 struct constraint_expr rhsc;
4017 unsigned int k;
4019 /* Treat nested const functions the same as pure functions as far
4020 as the static chain is concerned. */
4021 if (gimple_call_chain (stmt))
4023 varinfo_t uses = get_call_use_vi (stmt);
4024 make_transitive_closure_constraints (uses);
4025 make_constraint_to (uses->id, gimple_call_chain (stmt));
4026 rhsc.var = uses->id;
4027 rhsc.offset = 0;
4028 rhsc.type = SCALAR;
4029 results->safe_push (rhsc);
4032 /* May return arguments. */
4033 for (k = 0; k < gimple_call_num_args (stmt); ++k)
4035 tree arg = gimple_call_arg (stmt, k);
4036 auto_vec<ce_s> argc;
4037 unsigned i;
4038 struct constraint_expr *argp;
4039 get_constraint_for_rhs (arg, &argc);
4040 FOR_EACH_VEC_ELT (argc, i, argp)
4041 results->safe_push (*argp);
4044 /* May return addresses of globals. */
4045 rhsc.var = nonlocal_id;
4046 rhsc.offset = 0;
4047 rhsc.type = ADDRESSOF;
4048 results->safe_push (rhsc);
4051 /* For non-IPA mode, generate constraints necessary for a call to a
4052 pure function in statement STMT. */
4054 static void
4055 handle_pure_call (gimple stmt, vec<ce_s> *results)
4057 struct constraint_expr rhsc;
4058 unsigned i;
4059 varinfo_t uses = NULL;
4061 /* Memory reached from pointer arguments is call-used. */
4062 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4064 tree arg = gimple_call_arg (stmt, i);
4065 if (!uses)
4067 uses = get_call_use_vi (stmt);
4068 make_transitive_closure_constraints (uses);
4070 make_constraint_to (uses->id, arg);
4073 /* The static chain is used as well. */
4074 if (gimple_call_chain (stmt))
4076 if (!uses)
4078 uses = get_call_use_vi (stmt);
4079 make_transitive_closure_constraints (uses);
4081 make_constraint_to (uses->id, gimple_call_chain (stmt));
4084 /* Pure functions may return call-used and nonlocal memory. */
4085 if (uses)
4087 rhsc.var = uses->id;
4088 rhsc.offset = 0;
4089 rhsc.type = SCALAR;
4090 results->safe_push (rhsc);
4092 rhsc.var = nonlocal_id;
4093 rhsc.offset = 0;
4094 rhsc.type = SCALAR;
4095 results->safe_push (rhsc);
4099 /* Return the varinfo for the callee of CALL. */
4101 static varinfo_t
4102 get_fi_for_callee (gimple call)
4104 tree decl, fn = gimple_call_fn (call);
4106 if (fn && TREE_CODE (fn) == OBJ_TYPE_REF)
4107 fn = OBJ_TYPE_REF_EXPR (fn);
4109 /* If we can directly resolve the function being called, do so.
4110 Otherwise, it must be some sort of indirect expression that
4111 we should still be able to handle. */
4112 decl = gimple_call_addr_fndecl (fn);
4113 if (decl)
4114 return get_vi_for_tree (decl);
4116 /* If the function is anything other than a SSA name pointer we have no
4117 clue and should be getting ANYFN (well, ANYTHING for now). */
4118 if (!fn || TREE_CODE (fn) != SSA_NAME)
4119 return get_varinfo (anything_id);
4121 if (SSA_NAME_IS_DEFAULT_DEF (fn)
4122 && (TREE_CODE (SSA_NAME_VAR (fn)) == PARM_DECL
4123 || TREE_CODE (SSA_NAME_VAR (fn)) == RESULT_DECL))
4124 fn = SSA_NAME_VAR (fn);
4126 return get_vi_for_tree (fn);
4129 /* Create constraints for the builtin call T. Return true if the call
4130 was handled, otherwise false. */
4132 static bool
4133 find_func_aliases_for_builtin_call (gimple t)
4135 tree fndecl = gimple_call_fndecl (t);
4136 vec<ce_s> lhsc = vNULL;
4137 vec<ce_s> rhsc = vNULL;
4138 varinfo_t fi;
4140 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
4141 /* ??? All builtins that are handled here need to be handled
4142 in the alias-oracle query functions explicitly! */
4143 switch (DECL_FUNCTION_CODE (fndecl))
4145 /* All the following functions return a pointer to the same object
4146 as their first argument points to. The functions do not add
4147 to the ESCAPED solution. The functions make the first argument
4148 pointed to memory point to what the second argument pointed to
4149 memory points to. */
4150 case BUILT_IN_STRCPY:
4151 case BUILT_IN_STRNCPY:
4152 case BUILT_IN_BCOPY:
4153 case BUILT_IN_MEMCPY:
4154 case BUILT_IN_MEMMOVE:
4155 case BUILT_IN_MEMPCPY:
4156 case BUILT_IN_STPCPY:
4157 case BUILT_IN_STPNCPY:
4158 case BUILT_IN_STRCAT:
4159 case BUILT_IN_STRNCAT:
4160 case BUILT_IN_STRCPY_CHK:
4161 case BUILT_IN_STRNCPY_CHK:
4162 case BUILT_IN_MEMCPY_CHK:
4163 case BUILT_IN_MEMMOVE_CHK:
4164 case BUILT_IN_MEMPCPY_CHK:
4165 case BUILT_IN_STPCPY_CHK:
4166 case BUILT_IN_STPNCPY_CHK:
4167 case BUILT_IN_STRCAT_CHK:
4168 case BUILT_IN_STRNCAT_CHK:
4169 case BUILT_IN_TM_MEMCPY:
4170 case BUILT_IN_TM_MEMMOVE:
4172 tree res = gimple_call_lhs (t);
4173 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4174 == BUILT_IN_BCOPY ? 1 : 0));
4175 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4176 == BUILT_IN_BCOPY ? 0 : 1));
4177 if (res != NULL_TREE)
4179 get_constraint_for (res, &lhsc);
4180 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY
4181 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY
4182 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY
4183 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY_CHK
4184 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY_CHK
4185 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY_CHK)
4186 get_constraint_for_ptr_offset (dest, NULL_TREE, &rhsc);
4187 else
4188 get_constraint_for (dest, &rhsc);
4189 process_all_all_constraints (lhsc, rhsc);
4190 lhsc.release ();
4191 rhsc.release ();
4193 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4194 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4195 do_deref (&lhsc);
4196 do_deref (&rhsc);
4197 process_all_all_constraints (lhsc, rhsc);
4198 lhsc.release ();
4199 rhsc.release ();
4200 return true;
4202 case BUILT_IN_MEMSET:
4203 case BUILT_IN_MEMSET_CHK:
4204 case BUILT_IN_TM_MEMSET:
4206 tree res = gimple_call_lhs (t);
4207 tree dest = gimple_call_arg (t, 0);
4208 unsigned i;
4209 ce_s *lhsp;
4210 struct constraint_expr ac;
4211 if (res != NULL_TREE)
4213 get_constraint_for (res, &lhsc);
4214 get_constraint_for (dest, &rhsc);
4215 process_all_all_constraints (lhsc, rhsc);
4216 lhsc.release ();
4217 rhsc.release ();
4219 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4220 do_deref (&lhsc);
4221 if (flag_delete_null_pointer_checks
4222 && integer_zerop (gimple_call_arg (t, 1)))
4224 ac.type = ADDRESSOF;
4225 ac.var = nothing_id;
4227 else
4229 ac.type = SCALAR;
4230 ac.var = integer_id;
4232 ac.offset = 0;
4233 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4234 process_constraint (new_constraint (*lhsp, ac));
4235 lhsc.release ();
4236 return true;
4238 case BUILT_IN_ASSUME_ALIGNED:
4240 tree res = gimple_call_lhs (t);
4241 tree dest = gimple_call_arg (t, 0);
4242 if (res != NULL_TREE)
4244 get_constraint_for (res, &lhsc);
4245 get_constraint_for (dest, &rhsc);
4246 process_all_all_constraints (lhsc, rhsc);
4247 lhsc.release ();
4248 rhsc.release ();
4250 return true;
4252 /* All the following functions do not return pointers, do not
4253 modify the points-to sets of memory reachable from their
4254 arguments and do not add to the ESCAPED solution. */
4255 case BUILT_IN_SINCOS:
4256 case BUILT_IN_SINCOSF:
4257 case BUILT_IN_SINCOSL:
4258 case BUILT_IN_FREXP:
4259 case BUILT_IN_FREXPF:
4260 case BUILT_IN_FREXPL:
4261 case BUILT_IN_GAMMA_R:
4262 case BUILT_IN_GAMMAF_R:
4263 case BUILT_IN_GAMMAL_R:
4264 case BUILT_IN_LGAMMA_R:
4265 case BUILT_IN_LGAMMAF_R:
4266 case BUILT_IN_LGAMMAL_R:
4267 case BUILT_IN_MODF:
4268 case BUILT_IN_MODFF:
4269 case BUILT_IN_MODFL:
4270 case BUILT_IN_REMQUO:
4271 case BUILT_IN_REMQUOF:
4272 case BUILT_IN_REMQUOL:
4273 case BUILT_IN_FREE:
4274 return true;
4275 case BUILT_IN_STRDUP:
4276 case BUILT_IN_STRNDUP:
4277 if (gimple_call_lhs (t))
4279 handle_lhs_call (t, gimple_call_lhs (t), gimple_call_flags (t),
4280 vNULL, fndecl);
4281 get_constraint_for_ptr_offset (gimple_call_lhs (t),
4282 NULL_TREE, &lhsc);
4283 get_constraint_for_ptr_offset (gimple_call_arg (t, 0),
4284 NULL_TREE, &rhsc);
4285 do_deref (&lhsc);
4286 do_deref (&rhsc);
4287 process_all_all_constraints (lhsc, rhsc);
4288 lhsc.release ();
4289 rhsc.release ();
4290 return true;
4292 break;
4293 /* String / character search functions return a pointer into the
4294 source string or NULL. */
4295 case BUILT_IN_INDEX:
4296 case BUILT_IN_STRCHR:
4297 case BUILT_IN_STRRCHR:
4298 case BUILT_IN_MEMCHR:
4299 case BUILT_IN_STRSTR:
4300 case BUILT_IN_STRPBRK:
4301 if (gimple_call_lhs (t))
4303 tree src = gimple_call_arg (t, 0);
4304 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4305 constraint_expr nul;
4306 nul.var = nothing_id;
4307 nul.offset = 0;
4308 nul.type = ADDRESSOF;
4309 rhsc.safe_push (nul);
4310 get_constraint_for (gimple_call_lhs (t), &lhsc);
4311 process_all_all_constraints (lhsc, rhsc);
4312 lhsc.release ();
4313 rhsc.release ();
4315 return true;
4316 /* Trampolines are special - they set up passing the static
4317 frame. */
4318 case BUILT_IN_INIT_TRAMPOLINE:
4320 tree tramp = gimple_call_arg (t, 0);
4321 tree nfunc = gimple_call_arg (t, 1);
4322 tree frame = gimple_call_arg (t, 2);
4323 unsigned i;
4324 struct constraint_expr lhs, *rhsp;
4325 if (in_ipa_mode)
4327 varinfo_t nfi = NULL;
4328 gcc_assert (TREE_CODE (nfunc) == ADDR_EXPR);
4329 nfi = lookup_vi_for_tree (TREE_OPERAND (nfunc, 0));
4330 if (nfi)
4332 lhs = get_function_part_constraint (nfi, fi_static_chain);
4333 get_constraint_for (frame, &rhsc);
4334 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4335 process_constraint (new_constraint (lhs, *rhsp));
4336 rhsc.release ();
4338 /* Make the frame point to the function for
4339 the trampoline adjustment call. */
4340 get_constraint_for (tramp, &lhsc);
4341 do_deref (&lhsc);
4342 get_constraint_for (nfunc, &rhsc);
4343 process_all_all_constraints (lhsc, rhsc);
4344 rhsc.release ();
4345 lhsc.release ();
4347 return true;
4350 /* Else fallthru to generic handling which will let
4351 the frame escape. */
4352 break;
4354 case BUILT_IN_ADJUST_TRAMPOLINE:
4356 tree tramp = gimple_call_arg (t, 0);
4357 tree res = gimple_call_lhs (t);
4358 if (in_ipa_mode && res)
4360 get_constraint_for (res, &lhsc);
4361 get_constraint_for (tramp, &rhsc);
4362 do_deref (&rhsc);
4363 process_all_all_constraints (lhsc, rhsc);
4364 rhsc.release ();
4365 lhsc.release ();
4367 return true;
4369 CASE_BUILT_IN_TM_STORE (1):
4370 CASE_BUILT_IN_TM_STORE (2):
4371 CASE_BUILT_IN_TM_STORE (4):
4372 CASE_BUILT_IN_TM_STORE (8):
4373 CASE_BUILT_IN_TM_STORE (FLOAT):
4374 CASE_BUILT_IN_TM_STORE (DOUBLE):
4375 CASE_BUILT_IN_TM_STORE (LDOUBLE):
4376 CASE_BUILT_IN_TM_STORE (M64):
4377 CASE_BUILT_IN_TM_STORE (M128):
4378 CASE_BUILT_IN_TM_STORE (M256):
4380 tree addr = gimple_call_arg (t, 0);
4381 tree src = gimple_call_arg (t, 1);
4383 get_constraint_for (addr, &lhsc);
4384 do_deref (&lhsc);
4385 get_constraint_for (src, &rhsc);
4386 process_all_all_constraints (lhsc, rhsc);
4387 lhsc.release ();
4388 rhsc.release ();
4389 return true;
4391 CASE_BUILT_IN_TM_LOAD (1):
4392 CASE_BUILT_IN_TM_LOAD (2):
4393 CASE_BUILT_IN_TM_LOAD (4):
4394 CASE_BUILT_IN_TM_LOAD (8):
4395 CASE_BUILT_IN_TM_LOAD (FLOAT):
4396 CASE_BUILT_IN_TM_LOAD (DOUBLE):
4397 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
4398 CASE_BUILT_IN_TM_LOAD (M64):
4399 CASE_BUILT_IN_TM_LOAD (M128):
4400 CASE_BUILT_IN_TM_LOAD (M256):
4402 tree dest = gimple_call_lhs (t);
4403 tree addr = gimple_call_arg (t, 0);
4405 get_constraint_for (dest, &lhsc);
4406 get_constraint_for (addr, &rhsc);
4407 do_deref (&rhsc);
4408 process_all_all_constraints (lhsc, rhsc);
4409 lhsc.release ();
4410 rhsc.release ();
4411 return true;
4413 /* Variadic argument handling needs to be handled in IPA
4414 mode as well. */
4415 case BUILT_IN_VA_START:
4417 tree valist = gimple_call_arg (t, 0);
4418 struct constraint_expr rhs, *lhsp;
4419 unsigned i;
4420 get_constraint_for (valist, &lhsc);
4421 do_deref (&lhsc);
4422 /* The va_list gets access to pointers in variadic
4423 arguments. Which we know in the case of IPA analysis
4424 and otherwise are just all nonlocal variables. */
4425 if (in_ipa_mode)
4427 fi = lookup_vi_for_tree (cfun->decl);
4428 rhs = get_function_part_constraint (fi, ~0);
4429 rhs.type = ADDRESSOF;
4431 else
4433 rhs.var = nonlocal_id;
4434 rhs.type = ADDRESSOF;
4435 rhs.offset = 0;
4437 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4438 process_constraint (new_constraint (*lhsp, rhs));
4439 lhsc.release ();
4440 /* va_list is clobbered. */
4441 make_constraint_to (get_call_clobber_vi (t)->id, valist);
4442 return true;
4444 /* va_end doesn't have any effect that matters. */
4445 case BUILT_IN_VA_END:
4446 return true;
4447 /* Alternate return. Simply give up for now. */
4448 case BUILT_IN_RETURN:
4450 fi = NULL;
4451 if (!in_ipa_mode
4452 || !(fi = get_vi_for_tree (cfun->decl)))
4453 make_constraint_from (get_varinfo (escaped_id), anything_id);
4454 else if (in_ipa_mode
4455 && fi != NULL)
4457 struct constraint_expr lhs, rhs;
4458 lhs = get_function_part_constraint (fi, fi_result);
4459 rhs.var = anything_id;
4460 rhs.offset = 0;
4461 rhs.type = SCALAR;
4462 process_constraint (new_constraint (lhs, rhs));
4464 return true;
4466 /* printf-style functions may have hooks to set pointers to
4467 point to somewhere into the generated string. Leave them
4468 for a later exercise... */
4469 default:
4470 /* Fallthru to general call handling. */;
4473 return false;
4476 /* Create constraints for the call T. */
4478 static void
4479 find_func_aliases_for_call (gimple t)
4481 tree fndecl = gimple_call_fndecl (t);
4482 vec<ce_s> lhsc = vNULL;
4483 vec<ce_s> rhsc = vNULL;
4484 varinfo_t fi;
4486 if (fndecl != NULL_TREE
4487 && DECL_BUILT_IN (fndecl)
4488 && find_func_aliases_for_builtin_call (t))
4489 return;
4491 fi = get_fi_for_callee (t);
4492 if (!in_ipa_mode
4493 || (fndecl && !fi->is_fn_info))
4495 vec<ce_s> rhsc = vNULL;
4496 int flags = gimple_call_flags (t);
4498 /* Const functions can return their arguments and addresses
4499 of global memory but not of escaped memory. */
4500 if (flags & (ECF_CONST|ECF_NOVOPS))
4502 if (gimple_call_lhs (t))
4503 handle_const_call (t, &rhsc);
4505 /* Pure functions can return addresses in and of memory
4506 reachable from their arguments, but they are not an escape
4507 point for reachable memory of their arguments. */
4508 else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE))
4509 handle_pure_call (t, &rhsc);
4510 else
4511 handle_rhs_call (t, &rhsc);
4512 if (gimple_call_lhs (t))
4513 handle_lhs_call (t, gimple_call_lhs (t), flags, rhsc, fndecl);
4514 rhsc.release ();
4516 else
4518 tree lhsop;
4519 unsigned j;
4521 /* Assign all the passed arguments to the appropriate incoming
4522 parameters of the function. */
4523 for (j = 0; j < gimple_call_num_args (t); j++)
4525 struct constraint_expr lhs ;
4526 struct constraint_expr *rhsp;
4527 tree arg = gimple_call_arg (t, j);
4529 get_constraint_for_rhs (arg, &rhsc);
4530 lhs = get_function_part_constraint (fi, fi_parm_base + j);
4531 while (rhsc.length () != 0)
4533 rhsp = &rhsc.last ();
4534 process_constraint (new_constraint (lhs, *rhsp));
4535 rhsc.pop ();
4539 /* If we are returning a value, assign it to the result. */
4540 lhsop = gimple_call_lhs (t);
4541 if (lhsop)
4543 struct constraint_expr rhs;
4544 struct constraint_expr *lhsp;
4546 get_constraint_for (lhsop, &lhsc);
4547 rhs = get_function_part_constraint (fi, fi_result);
4548 if (fndecl
4549 && DECL_RESULT (fndecl)
4550 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
4552 vec<ce_s> tem = vNULL;
4553 tem.safe_push (rhs);
4554 do_deref (&tem);
4555 rhs = tem[0];
4556 tem.release ();
4558 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
4559 process_constraint (new_constraint (*lhsp, rhs));
4562 /* If we pass the result decl by reference, honor that. */
4563 if (lhsop
4564 && fndecl
4565 && DECL_RESULT (fndecl)
4566 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
4568 struct constraint_expr lhs;
4569 struct constraint_expr *rhsp;
4571 get_constraint_for_address_of (lhsop, &rhsc);
4572 lhs = get_function_part_constraint (fi, fi_result);
4573 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4574 process_constraint (new_constraint (lhs, *rhsp));
4575 rhsc.release ();
4578 /* If we use a static chain, pass it along. */
4579 if (gimple_call_chain (t))
4581 struct constraint_expr lhs;
4582 struct constraint_expr *rhsp;
4584 get_constraint_for (gimple_call_chain (t), &rhsc);
4585 lhs = get_function_part_constraint (fi, fi_static_chain);
4586 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4587 process_constraint (new_constraint (lhs, *rhsp));
4592 /* Walk statement T setting up aliasing constraints according to the
4593 references found in T. This function is the main part of the
4594 constraint builder. AI points to auxiliary alias information used
4595 when building alias sets and computing alias grouping heuristics. */
4597 static void
4598 find_func_aliases (gimple origt)
4600 gimple t = origt;
4601 vec<ce_s> lhsc = vNULL;
4602 vec<ce_s> rhsc = vNULL;
4603 struct constraint_expr *c;
4604 varinfo_t fi;
4606 /* Now build constraints expressions. */
4607 if (gimple_code (t) == GIMPLE_PHI)
4609 size_t i;
4610 unsigned int j;
4612 /* For a phi node, assign all the arguments to
4613 the result. */
4614 get_constraint_for (gimple_phi_result (t), &lhsc);
4615 for (i = 0; i < gimple_phi_num_args (t); i++)
4617 tree strippedrhs = PHI_ARG_DEF (t, i);
4619 STRIP_NOPS (strippedrhs);
4620 get_constraint_for_rhs (gimple_phi_arg_def (t, i), &rhsc);
4622 FOR_EACH_VEC_ELT (lhsc, j, c)
4624 struct constraint_expr *c2;
4625 while (rhsc.length () > 0)
4627 c2 = &rhsc.last ();
4628 process_constraint (new_constraint (*c, *c2));
4629 rhsc.pop ();
4634 /* In IPA mode, we need to generate constraints to pass call
4635 arguments through their calls. There are two cases,
4636 either a GIMPLE_CALL returning a value, or just a plain
4637 GIMPLE_CALL when we are not.
4639 In non-ipa mode, we need to generate constraints for each
4640 pointer passed by address. */
4641 else if (is_gimple_call (t))
4642 find_func_aliases_for_call (t);
4644 /* Otherwise, just a regular assignment statement. Only care about
4645 operations with pointer result, others are dealt with as escape
4646 points if they have pointer operands. */
4647 else if (is_gimple_assign (t))
4649 /* Otherwise, just a regular assignment statement. */
4650 tree lhsop = gimple_assign_lhs (t);
4651 tree rhsop = (gimple_num_ops (t) == 2) ? gimple_assign_rhs1 (t) : NULL;
4653 if (rhsop && TREE_CLOBBER_P (rhsop))
4654 /* Ignore clobbers, they don't actually store anything into
4655 the LHS. */
4657 else if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
4658 do_structure_copy (lhsop, rhsop);
4659 else
4661 enum tree_code code = gimple_assign_rhs_code (t);
4663 get_constraint_for (lhsop, &lhsc);
4665 if (FLOAT_TYPE_P (TREE_TYPE (lhsop)))
4666 /* If the operation produces a floating point result then
4667 assume the value is not produced to transfer a pointer. */
4669 else if (code == POINTER_PLUS_EXPR)
4670 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4671 gimple_assign_rhs2 (t), &rhsc);
4672 else if (code == BIT_AND_EXPR
4673 && TREE_CODE (gimple_assign_rhs2 (t)) == INTEGER_CST)
4675 /* Aligning a pointer via a BIT_AND_EXPR is offsetting
4676 the pointer. Handle it by offsetting it by UNKNOWN. */
4677 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4678 NULL_TREE, &rhsc);
4680 else if ((CONVERT_EXPR_CODE_P (code)
4681 && !(POINTER_TYPE_P (gimple_expr_type (t))
4682 && !POINTER_TYPE_P (TREE_TYPE (rhsop))))
4683 || gimple_assign_single_p (t))
4684 get_constraint_for_rhs (rhsop, &rhsc);
4685 else if (code == COND_EXPR)
4687 /* The result is a merge of both COND_EXPR arms. */
4688 vec<ce_s> tmp = vNULL;
4689 struct constraint_expr *rhsp;
4690 unsigned i;
4691 get_constraint_for_rhs (gimple_assign_rhs2 (t), &rhsc);
4692 get_constraint_for_rhs (gimple_assign_rhs3 (t), &tmp);
4693 FOR_EACH_VEC_ELT (tmp, i, rhsp)
4694 rhsc.safe_push (*rhsp);
4695 tmp.release ();
4697 else if (truth_value_p (code))
4698 /* Truth value results are not pointer (parts). Or at least
4699 very very unreasonable obfuscation of a part. */
4701 else
4703 /* All other operations are merges. */
4704 vec<ce_s> tmp = vNULL;
4705 struct constraint_expr *rhsp;
4706 unsigned i, j;
4707 get_constraint_for_rhs (gimple_assign_rhs1 (t), &rhsc);
4708 for (i = 2; i < gimple_num_ops (t); ++i)
4710 get_constraint_for_rhs (gimple_op (t, i), &tmp);
4711 FOR_EACH_VEC_ELT (tmp, j, rhsp)
4712 rhsc.safe_push (*rhsp);
4713 tmp.truncate (0);
4715 tmp.release ();
4717 process_all_all_constraints (lhsc, rhsc);
4719 /* If there is a store to a global variable the rhs escapes. */
4720 if ((lhsop = get_base_address (lhsop)) != NULL_TREE
4721 && DECL_P (lhsop)
4722 && is_global_var (lhsop)
4723 && (!in_ipa_mode
4724 || DECL_EXTERNAL (lhsop) || TREE_PUBLIC (lhsop)))
4725 make_escape_constraint (rhsop);
4727 /* Handle escapes through return. */
4728 else if (gimple_code (t) == GIMPLE_RETURN
4729 && gimple_return_retval (t) != NULL_TREE)
4731 fi = NULL;
4732 if (!in_ipa_mode
4733 || !(fi = get_vi_for_tree (cfun->decl)))
4734 make_escape_constraint (gimple_return_retval (t));
4735 else if (in_ipa_mode
4736 && fi != NULL)
4738 struct constraint_expr lhs ;
4739 struct constraint_expr *rhsp;
4740 unsigned i;
4742 lhs = get_function_part_constraint (fi, fi_result);
4743 get_constraint_for_rhs (gimple_return_retval (t), &rhsc);
4744 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4745 process_constraint (new_constraint (lhs, *rhsp));
4748 /* Handle asms conservatively by adding escape constraints to everything. */
4749 else if (gimple_code (t) == GIMPLE_ASM)
4751 unsigned i, noutputs;
4752 const char **oconstraints;
4753 const char *constraint;
4754 bool allows_mem, allows_reg, is_inout;
4756 noutputs = gimple_asm_noutputs (t);
4757 oconstraints = XALLOCAVEC (const char *, noutputs);
4759 for (i = 0; i < noutputs; ++i)
4761 tree link = gimple_asm_output_op (t, i);
4762 tree op = TREE_VALUE (link);
4764 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4765 oconstraints[i] = constraint;
4766 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
4767 &allows_reg, &is_inout);
4769 /* A memory constraint makes the address of the operand escape. */
4770 if (!allows_reg && allows_mem)
4771 make_escape_constraint (build_fold_addr_expr (op));
4773 /* The asm may read global memory, so outputs may point to
4774 any global memory. */
4775 if (op)
4777 vec<ce_s> lhsc = vNULL;
4778 struct constraint_expr rhsc, *lhsp;
4779 unsigned j;
4780 get_constraint_for (op, &lhsc);
4781 rhsc.var = nonlocal_id;
4782 rhsc.offset = 0;
4783 rhsc.type = SCALAR;
4784 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
4785 process_constraint (new_constraint (*lhsp, rhsc));
4786 lhsc.release ();
4789 for (i = 0; i < gimple_asm_ninputs (t); ++i)
4791 tree link = gimple_asm_input_op (t, i);
4792 tree op = TREE_VALUE (link);
4794 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4796 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
4797 &allows_mem, &allows_reg);
4799 /* A memory constraint makes the address of the operand escape. */
4800 if (!allows_reg && allows_mem)
4801 make_escape_constraint (build_fold_addr_expr (op));
4802 /* Strictly we'd only need the constraint to ESCAPED if
4803 the asm clobbers memory, otherwise using something
4804 along the lines of per-call clobbers/uses would be enough. */
4805 else if (op)
4806 make_escape_constraint (op);
4810 rhsc.release ();
4811 lhsc.release ();
4815 /* Create a constraint adding to the clobber set of FI the memory
4816 pointed to by PTR. */
4818 static void
4819 process_ipa_clobber (varinfo_t fi, tree ptr)
4821 vec<ce_s> ptrc = vNULL;
4822 struct constraint_expr *c, lhs;
4823 unsigned i;
4824 get_constraint_for_rhs (ptr, &ptrc);
4825 lhs = get_function_part_constraint (fi, fi_clobbers);
4826 FOR_EACH_VEC_ELT (ptrc, i, c)
4827 process_constraint (new_constraint (lhs, *c));
4828 ptrc.release ();
4831 /* Walk statement T setting up clobber and use constraints according to the
4832 references found in T. This function is a main part of the
4833 IPA constraint builder. */
4835 static void
4836 find_func_clobbers (gimple origt)
4838 gimple t = origt;
4839 vec<ce_s> lhsc = vNULL;
4840 auto_vec<ce_s> rhsc;
4841 varinfo_t fi;
4843 /* Add constraints for clobbered/used in IPA mode.
4844 We are not interested in what automatic variables are clobbered
4845 or used as we only use the information in the caller to which
4846 they do not escape. */
4847 gcc_assert (in_ipa_mode);
4849 /* If the stmt refers to memory in any way it better had a VUSE. */
4850 if (gimple_vuse (t) == NULL_TREE)
4851 return;
4853 /* We'd better have function information for the current function. */
4854 fi = lookup_vi_for_tree (cfun->decl);
4855 gcc_assert (fi != NULL);
4857 /* Account for stores in assignments and calls. */
4858 if (gimple_vdef (t) != NULL_TREE
4859 && gimple_has_lhs (t))
4861 tree lhs = gimple_get_lhs (t);
4862 tree tem = lhs;
4863 while (handled_component_p (tem))
4864 tem = TREE_OPERAND (tem, 0);
4865 if ((DECL_P (tem)
4866 && !auto_var_in_fn_p (tem, cfun->decl))
4867 || INDIRECT_REF_P (tem)
4868 || (TREE_CODE (tem) == MEM_REF
4869 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
4870 && auto_var_in_fn_p
4871 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), cfun->decl))))
4873 struct constraint_expr lhsc, *rhsp;
4874 unsigned i;
4875 lhsc = get_function_part_constraint (fi, fi_clobbers);
4876 get_constraint_for_address_of (lhs, &rhsc);
4877 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4878 process_constraint (new_constraint (lhsc, *rhsp));
4879 rhsc.release ();
4883 /* Account for uses in assigments and returns. */
4884 if (gimple_assign_single_p (t)
4885 || (gimple_code (t) == GIMPLE_RETURN
4886 && gimple_return_retval (t) != NULL_TREE))
4888 tree rhs = (gimple_assign_single_p (t)
4889 ? gimple_assign_rhs1 (t) : gimple_return_retval (t));
4890 tree tem = rhs;
4891 while (handled_component_p (tem))
4892 tem = TREE_OPERAND (tem, 0);
4893 if ((DECL_P (tem)
4894 && !auto_var_in_fn_p (tem, cfun->decl))
4895 || INDIRECT_REF_P (tem)
4896 || (TREE_CODE (tem) == MEM_REF
4897 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
4898 && auto_var_in_fn_p
4899 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), cfun->decl))))
4901 struct constraint_expr lhs, *rhsp;
4902 unsigned i;
4903 lhs = get_function_part_constraint (fi, fi_uses);
4904 get_constraint_for_address_of (rhs, &rhsc);
4905 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4906 process_constraint (new_constraint (lhs, *rhsp));
4907 rhsc.release ();
4911 if (is_gimple_call (t))
4913 varinfo_t cfi = NULL;
4914 tree decl = gimple_call_fndecl (t);
4915 struct constraint_expr lhs, rhs;
4916 unsigned i, j;
4918 /* For builtins we do not have separate function info. For those
4919 we do not generate escapes for we have to generate clobbers/uses. */
4920 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
4921 switch (DECL_FUNCTION_CODE (decl))
4923 /* The following functions use and clobber memory pointed to
4924 by their arguments. */
4925 case BUILT_IN_STRCPY:
4926 case BUILT_IN_STRNCPY:
4927 case BUILT_IN_BCOPY:
4928 case BUILT_IN_MEMCPY:
4929 case BUILT_IN_MEMMOVE:
4930 case BUILT_IN_MEMPCPY:
4931 case BUILT_IN_STPCPY:
4932 case BUILT_IN_STPNCPY:
4933 case BUILT_IN_STRCAT:
4934 case BUILT_IN_STRNCAT:
4935 case BUILT_IN_STRCPY_CHK:
4936 case BUILT_IN_STRNCPY_CHK:
4937 case BUILT_IN_MEMCPY_CHK:
4938 case BUILT_IN_MEMMOVE_CHK:
4939 case BUILT_IN_MEMPCPY_CHK:
4940 case BUILT_IN_STPCPY_CHK:
4941 case BUILT_IN_STPNCPY_CHK:
4942 case BUILT_IN_STRCAT_CHK:
4943 case BUILT_IN_STRNCAT_CHK:
4945 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
4946 == BUILT_IN_BCOPY ? 1 : 0));
4947 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
4948 == BUILT_IN_BCOPY ? 0 : 1));
4949 unsigned i;
4950 struct constraint_expr *rhsp, *lhsp;
4951 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4952 lhs = get_function_part_constraint (fi, fi_clobbers);
4953 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4954 process_constraint (new_constraint (lhs, *lhsp));
4955 lhsc.release ();
4956 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4957 lhs = get_function_part_constraint (fi, fi_uses);
4958 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4959 process_constraint (new_constraint (lhs, *rhsp));
4960 rhsc.release ();
4961 return;
4963 /* The following function clobbers memory pointed to by
4964 its argument. */
4965 case BUILT_IN_MEMSET:
4966 case BUILT_IN_MEMSET_CHK:
4968 tree dest = gimple_call_arg (t, 0);
4969 unsigned i;
4970 ce_s *lhsp;
4971 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4972 lhs = get_function_part_constraint (fi, fi_clobbers);
4973 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4974 process_constraint (new_constraint (lhs, *lhsp));
4975 lhsc.release ();
4976 return;
4978 /* The following functions clobber their second and third
4979 arguments. */
4980 case BUILT_IN_SINCOS:
4981 case BUILT_IN_SINCOSF:
4982 case BUILT_IN_SINCOSL:
4984 process_ipa_clobber (fi, gimple_call_arg (t, 1));
4985 process_ipa_clobber (fi, gimple_call_arg (t, 2));
4986 return;
4988 /* The following functions clobber their second argument. */
4989 case BUILT_IN_FREXP:
4990 case BUILT_IN_FREXPF:
4991 case BUILT_IN_FREXPL:
4992 case BUILT_IN_LGAMMA_R:
4993 case BUILT_IN_LGAMMAF_R:
4994 case BUILT_IN_LGAMMAL_R:
4995 case BUILT_IN_GAMMA_R:
4996 case BUILT_IN_GAMMAF_R:
4997 case BUILT_IN_GAMMAL_R:
4998 case BUILT_IN_MODF:
4999 case BUILT_IN_MODFF:
5000 case BUILT_IN_MODFL:
5002 process_ipa_clobber (fi, gimple_call_arg (t, 1));
5003 return;
5005 /* The following functions clobber their third argument. */
5006 case BUILT_IN_REMQUO:
5007 case BUILT_IN_REMQUOF:
5008 case BUILT_IN_REMQUOL:
5010 process_ipa_clobber (fi, gimple_call_arg (t, 2));
5011 return;
5013 /* The following functions neither read nor clobber memory. */
5014 case BUILT_IN_ASSUME_ALIGNED:
5015 case BUILT_IN_FREE:
5016 return;
5017 /* Trampolines are of no interest to us. */
5018 case BUILT_IN_INIT_TRAMPOLINE:
5019 case BUILT_IN_ADJUST_TRAMPOLINE:
5020 return;
5021 case BUILT_IN_VA_START:
5022 case BUILT_IN_VA_END:
5023 return;
5024 /* printf-style functions may have hooks to set pointers to
5025 point to somewhere into the generated string. Leave them
5026 for a later exercise... */
5027 default:
5028 /* Fallthru to general call handling. */;
5031 /* Parameters passed by value are used. */
5032 lhs = get_function_part_constraint (fi, fi_uses);
5033 for (i = 0; i < gimple_call_num_args (t); i++)
5035 struct constraint_expr *rhsp;
5036 tree arg = gimple_call_arg (t, i);
5038 if (TREE_CODE (arg) == SSA_NAME
5039 || is_gimple_min_invariant (arg))
5040 continue;
5042 get_constraint_for_address_of (arg, &rhsc);
5043 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5044 process_constraint (new_constraint (lhs, *rhsp));
5045 rhsc.release ();
5048 /* Build constraints for propagating clobbers/uses along the
5049 callgraph edges. */
5050 cfi = get_fi_for_callee (t);
5051 if (cfi->id == anything_id)
5053 if (gimple_vdef (t))
5054 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5055 anything_id);
5056 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5057 anything_id);
5058 return;
5061 /* For callees without function info (that's external functions),
5062 ESCAPED is clobbered and used. */
5063 if (gimple_call_fndecl (t)
5064 && !cfi->is_fn_info)
5066 varinfo_t vi;
5068 if (gimple_vdef (t))
5069 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5070 escaped_id);
5071 make_copy_constraint (first_vi_for_offset (fi, fi_uses), escaped_id);
5073 /* Also honor the call statement use/clobber info. */
5074 if ((vi = lookup_call_clobber_vi (t)) != NULL)
5075 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5076 vi->id);
5077 if ((vi = lookup_call_use_vi (t)) != NULL)
5078 make_copy_constraint (first_vi_for_offset (fi, fi_uses),
5079 vi->id);
5080 return;
5083 /* Otherwise the caller clobbers and uses what the callee does.
5084 ??? This should use a new complex constraint that filters
5085 local variables of the callee. */
5086 if (gimple_vdef (t))
5088 lhs = get_function_part_constraint (fi, fi_clobbers);
5089 rhs = get_function_part_constraint (cfi, fi_clobbers);
5090 process_constraint (new_constraint (lhs, rhs));
5092 lhs = get_function_part_constraint (fi, fi_uses);
5093 rhs = get_function_part_constraint (cfi, fi_uses);
5094 process_constraint (new_constraint (lhs, rhs));
5096 else if (gimple_code (t) == GIMPLE_ASM)
5098 /* ??? Ick. We can do better. */
5099 if (gimple_vdef (t))
5100 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5101 anything_id);
5102 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5103 anything_id);
5108 /* Find the first varinfo in the same variable as START that overlaps with
5109 OFFSET. Return NULL if we can't find one. */
5111 static varinfo_t
5112 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
5114 /* If the offset is outside of the variable, bail out. */
5115 if (offset >= start->fullsize)
5116 return NULL;
5118 /* If we cannot reach offset from start, lookup the first field
5119 and start from there. */
5120 if (start->offset > offset)
5121 start = get_varinfo (start->head);
5123 while (start)
5125 /* We may not find a variable in the field list with the actual
5126 offset when when we have glommed a structure to a variable.
5127 In that case, however, offset should still be within the size
5128 of the variable. */
5129 if (offset >= start->offset
5130 && (offset - start->offset) < start->size)
5131 return start;
5133 start = vi_next (start);
5136 return NULL;
5139 /* Find the first varinfo in the same variable as START that overlaps with
5140 OFFSET. If there is no such varinfo the varinfo directly preceding
5141 OFFSET is returned. */
5143 static varinfo_t
5144 first_or_preceding_vi_for_offset (varinfo_t start,
5145 unsigned HOST_WIDE_INT offset)
5147 /* If we cannot reach offset from start, lookup the first field
5148 and start from there. */
5149 if (start->offset > offset)
5150 start = get_varinfo (start->head);
5152 /* We may not find a variable in the field list with the actual
5153 offset when when we have glommed a structure to a variable.
5154 In that case, however, offset should still be within the size
5155 of the variable.
5156 If we got beyond the offset we look for return the field
5157 directly preceding offset which may be the last field. */
5158 while (start->next
5159 && offset >= start->offset
5160 && !((offset - start->offset) < start->size))
5161 start = vi_next (start);
5163 return start;
5167 /* This structure is used during pushing fields onto the fieldstack
5168 to track the offset of the field, since bitpos_of_field gives it
5169 relative to its immediate containing type, and we want it relative
5170 to the ultimate containing object. */
5172 struct fieldoff
5174 /* Offset from the base of the base containing object to this field. */
5175 HOST_WIDE_INT offset;
5177 /* Size, in bits, of the field. */
5178 unsigned HOST_WIDE_INT size;
5180 unsigned has_unknown_size : 1;
5182 unsigned must_have_pointers : 1;
5184 unsigned may_have_pointers : 1;
5186 unsigned only_restrict_pointers : 1;
5188 typedef struct fieldoff fieldoff_s;
5191 /* qsort comparison function for two fieldoff's PA and PB */
5193 static int
5194 fieldoff_compare (const void *pa, const void *pb)
5196 const fieldoff_s *foa = (const fieldoff_s *)pa;
5197 const fieldoff_s *fob = (const fieldoff_s *)pb;
5198 unsigned HOST_WIDE_INT foasize, fobsize;
5200 if (foa->offset < fob->offset)
5201 return -1;
5202 else if (foa->offset > fob->offset)
5203 return 1;
5205 foasize = foa->size;
5206 fobsize = fob->size;
5207 if (foasize < fobsize)
5208 return -1;
5209 else if (foasize > fobsize)
5210 return 1;
5211 return 0;
5214 /* Sort a fieldstack according to the field offset and sizes. */
5215 static void
5216 sort_fieldstack (vec<fieldoff_s> fieldstack)
5218 fieldstack.qsort (fieldoff_compare);
5221 /* Return true if T is a type that can have subvars. */
5223 static inline bool
5224 type_can_have_subvars (const_tree t)
5226 /* Aggregates without overlapping fields can have subvars. */
5227 return TREE_CODE (t) == RECORD_TYPE;
5230 /* Return true if V is a tree that we can have subvars for.
5231 Normally, this is any aggregate type. Also complex
5232 types which are not gimple registers can have subvars. */
5234 static inline bool
5235 var_can_have_subvars (const_tree v)
5237 /* Volatile variables should never have subvars. */
5238 if (TREE_THIS_VOLATILE (v))
5239 return false;
5241 /* Non decls or memory tags can never have subvars. */
5242 if (!DECL_P (v))
5243 return false;
5245 return type_can_have_subvars (TREE_TYPE (v));
5248 /* Return true if T is a type that does contain pointers. */
5250 static bool
5251 type_must_have_pointers (tree type)
5253 if (POINTER_TYPE_P (type))
5254 return true;
5256 if (TREE_CODE (type) == ARRAY_TYPE)
5257 return type_must_have_pointers (TREE_TYPE (type));
5259 /* A function or method can have pointers as arguments, so track
5260 those separately. */
5261 if (TREE_CODE (type) == FUNCTION_TYPE
5262 || TREE_CODE (type) == METHOD_TYPE)
5263 return true;
5265 return false;
5268 static bool
5269 field_must_have_pointers (tree t)
5271 return type_must_have_pointers (TREE_TYPE (t));
5274 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
5275 the fields of TYPE onto fieldstack, recording their offsets along
5276 the way.
5278 OFFSET is used to keep track of the offset in this entire
5279 structure, rather than just the immediately containing structure.
5280 Returns false if the caller is supposed to handle the field we
5281 recursed for. */
5283 static bool
5284 push_fields_onto_fieldstack (tree type, vec<fieldoff_s> *fieldstack,
5285 HOST_WIDE_INT offset)
5287 tree field;
5288 bool empty_p = true;
5290 if (TREE_CODE (type) != RECORD_TYPE)
5291 return false;
5293 /* If the vector of fields is growing too big, bail out early.
5294 Callers check for vec::length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
5295 sure this fails. */
5296 if (fieldstack->length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
5297 return false;
5299 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5300 if (TREE_CODE (field) == FIELD_DECL)
5302 bool push = false;
5303 HOST_WIDE_INT foff = bitpos_of_field (field);
5305 if (!var_can_have_subvars (field)
5306 || TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE
5307 || TREE_CODE (TREE_TYPE (field)) == UNION_TYPE)
5308 push = true;
5309 else if (!push_fields_onto_fieldstack
5310 (TREE_TYPE (field), fieldstack, offset + foff)
5311 && (DECL_SIZE (field)
5312 && !integer_zerop (DECL_SIZE (field))))
5313 /* Empty structures may have actual size, like in C++. So
5314 see if we didn't push any subfields and the size is
5315 nonzero, push the field onto the stack. */
5316 push = true;
5318 if (push)
5320 fieldoff_s *pair = NULL;
5321 bool has_unknown_size = false;
5322 bool must_have_pointers_p;
5324 if (!fieldstack->is_empty ())
5325 pair = &fieldstack->last ();
5327 /* If there isn't anything at offset zero, create sth. */
5328 if (!pair
5329 && offset + foff != 0)
5331 fieldoff_s e = {0, offset + foff, false, false, false, false};
5332 pair = fieldstack->safe_push (e);
5335 if (!DECL_SIZE (field)
5336 || !tree_fits_uhwi_p (DECL_SIZE (field)))
5337 has_unknown_size = true;
5339 /* If adjacent fields do not contain pointers merge them. */
5340 must_have_pointers_p = field_must_have_pointers (field);
5341 if (pair
5342 && !has_unknown_size
5343 && !must_have_pointers_p
5344 && !pair->must_have_pointers
5345 && !pair->has_unknown_size
5346 && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff)
5348 pair->size += tree_to_uhwi (DECL_SIZE (field));
5350 else
5352 fieldoff_s e;
5353 e.offset = offset + foff;
5354 e.has_unknown_size = has_unknown_size;
5355 if (!has_unknown_size)
5356 e.size = tree_to_uhwi (DECL_SIZE (field));
5357 else
5358 e.size = -1;
5359 e.must_have_pointers = must_have_pointers_p;
5360 e.may_have_pointers = true;
5361 e.only_restrict_pointers
5362 = (!has_unknown_size
5363 && POINTER_TYPE_P (TREE_TYPE (field))
5364 && TYPE_RESTRICT (TREE_TYPE (field)));
5365 fieldstack->safe_push (e);
5369 empty_p = false;
5372 return !empty_p;
5375 /* Count the number of arguments DECL has, and set IS_VARARGS to true
5376 if it is a varargs function. */
5378 static unsigned int
5379 count_num_arguments (tree decl, bool *is_varargs)
5381 unsigned int num = 0;
5382 tree t;
5384 /* Capture named arguments for K&R functions. They do not
5385 have a prototype and thus no TYPE_ARG_TYPES. */
5386 for (t = DECL_ARGUMENTS (decl); t; t = DECL_CHAIN (t))
5387 ++num;
5389 /* Check if the function has variadic arguments. */
5390 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl)); t; t = TREE_CHAIN (t))
5391 if (TREE_VALUE (t) == void_type_node)
5392 break;
5393 if (!t)
5394 *is_varargs = true;
5396 return num;
5399 /* Creation function node for DECL, using NAME, and return the index
5400 of the variable we've created for the function. */
5402 static varinfo_t
5403 create_function_info_for (tree decl, const char *name)
5405 struct function *fn = DECL_STRUCT_FUNCTION (decl);
5406 varinfo_t vi, prev_vi;
5407 tree arg;
5408 unsigned int i;
5409 bool is_varargs = false;
5410 unsigned int num_args = count_num_arguments (decl, &is_varargs);
5412 /* Create the variable info. */
5414 vi = new_var_info (decl, name);
5415 vi->offset = 0;
5416 vi->size = 1;
5417 vi->fullsize = fi_parm_base + num_args;
5418 vi->is_fn_info = 1;
5419 vi->may_have_pointers = false;
5420 if (is_varargs)
5421 vi->fullsize = ~0;
5422 insert_vi_for_tree (vi->decl, vi);
5424 prev_vi = vi;
5426 /* Create a variable for things the function clobbers and one for
5427 things the function uses. */
5429 varinfo_t clobbervi, usevi;
5430 const char *newname;
5431 char *tempname;
5433 asprintf (&tempname, "%s.clobber", name);
5434 newname = ggc_strdup (tempname);
5435 free (tempname);
5437 clobbervi = new_var_info (NULL, newname);
5438 clobbervi->offset = fi_clobbers;
5439 clobbervi->size = 1;
5440 clobbervi->fullsize = vi->fullsize;
5441 clobbervi->is_full_var = true;
5442 clobbervi->is_global_var = false;
5443 gcc_assert (prev_vi->offset < clobbervi->offset);
5444 prev_vi->next = clobbervi->id;
5445 prev_vi = clobbervi;
5447 asprintf (&tempname, "%s.use", name);
5448 newname = ggc_strdup (tempname);
5449 free (tempname);
5451 usevi = new_var_info (NULL, newname);
5452 usevi->offset = fi_uses;
5453 usevi->size = 1;
5454 usevi->fullsize = vi->fullsize;
5455 usevi->is_full_var = true;
5456 usevi->is_global_var = false;
5457 gcc_assert (prev_vi->offset < usevi->offset);
5458 prev_vi->next = usevi->id;
5459 prev_vi = usevi;
5462 /* And one for the static chain. */
5463 if (fn->static_chain_decl != NULL_TREE)
5465 varinfo_t chainvi;
5466 const char *newname;
5467 char *tempname;
5469 asprintf (&tempname, "%s.chain", name);
5470 newname = ggc_strdup (tempname);
5471 free (tempname);
5473 chainvi = new_var_info (fn->static_chain_decl, newname);
5474 chainvi->offset = fi_static_chain;
5475 chainvi->size = 1;
5476 chainvi->fullsize = vi->fullsize;
5477 chainvi->is_full_var = true;
5478 chainvi->is_global_var = false;
5479 gcc_assert (prev_vi->offset < chainvi->offset);
5480 prev_vi->next = chainvi->id;
5481 prev_vi = chainvi;
5482 insert_vi_for_tree (fn->static_chain_decl, chainvi);
5485 /* Create a variable for the return var. */
5486 if (DECL_RESULT (decl) != NULL
5487 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
5489 varinfo_t resultvi;
5490 const char *newname;
5491 char *tempname;
5492 tree resultdecl = decl;
5494 if (DECL_RESULT (decl))
5495 resultdecl = DECL_RESULT (decl);
5497 asprintf (&tempname, "%s.result", name);
5498 newname = ggc_strdup (tempname);
5499 free (tempname);
5501 resultvi = new_var_info (resultdecl, newname);
5502 resultvi->offset = fi_result;
5503 resultvi->size = 1;
5504 resultvi->fullsize = vi->fullsize;
5505 resultvi->is_full_var = true;
5506 if (DECL_RESULT (decl))
5507 resultvi->may_have_pointers = true;
5508 gcc_assert (prev_vi->offset < resultvi->offset);
5509 prev_vi->next = resultvi->id;
5510 prev_vi = resultvi;
5511 if (DECL_RESULT (decl))
5512 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
5515 /* Set up variables for each argument. */
5516 arg = DECL_ARGUMENTS (decl);
5517 for (i = 0; i < num_args; i++)
5519 varinfo_t argvi;
5520 const char *newname;
5521 char *tempname;
5522 tree argdecl = decl;
5524 if (arg)
5525 argdecl = arg;
5527 asprintf (&tempname, "%s.arg%d", name, i);
5528 newname = ggc_strdup (tempname);
5529 free (tempname);
5531 argvi = new_var_info (argdecl, newname);
5532 argvi->offset = fi_parm_base + i;
5533 argvi->size = 1;
5534 argvi->is_full_var = true;
5535 argvi->fullsize = vi->fullsize;
5536 if (arg)
5537 argvi->may_have_pointers = true;
5538 gcc_assert (prev_vi->offset < argvi->offset);
5539 prev_vi->next = argvi->id;
5540 prev_vi = argvi;
5541 if (arg)
5543 insert_vi_for_tree (arg, argvi);
5544 arg = DECL_CHAIN (arg);
5548 /* Add one representative for all further args. */
5549 if (is_varargs)
5551 varinfo_t argvi;
5552 const char *newname;
5553 char *tempname;
5554 tree decl;
5556 asprintf (&tempname, "%s.varargs", name);
5557 newname = ggc_strdup (tempname);
5558 free (tempname);
5560 /* We need sth that can be pointed to for va_start. */
5561 decl = build_fake_var_decl (ptr_type_node);
5563 argvi = new_var_info (decl, newname);
5564 argvi->offset = fi_parm_base + num_args;
5565 argvi->size = ~0;
5566 argvi->is_full_var = true;
5567 argvi->is_heap_var = true;
5568 argvi->fullsize = vi->fullsize;
5569 gcc_assert (prev_vi->offset < argvi->offset);
5570 prev_vi->next = argvi->id;
5571 prev_vi = argvi;
5574 return vi;
5578 /* Return true if FIELDSTACK contains fields that overlap.
5579 FIELDSTACK is assumed to be sorted by offset. */
5581 static bool
5582 check_for_overlaps (vec<fieldoff_s> fieldstack)
5584 fieldoff_s *fo = NULL;
5585 unsigned int i;
5586 HOST_WIDE_INT lastoffset = -1;
5588 FOR_EACH_VEC_ELT (fieldstack, i, fo)
5590 if (fo->offset == lastoffset)
5591 return true;
5592 lastoffset = fo->offset;
5594 return false;
5597 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
5598 This will also create any varinfo structures necessary for fields
5599 of DECL. */
5601 static varinfo_t
5602 create_variable_info_for_1 (tree decl, const char *name)
5604 varinfo_t vi, newvi;
5605 tree decl_type = TREE_TYPE (decl);
5606 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type);
5607 auto_vec<fieldoff_s> fieldstack;
5608 fieldoff_s *fo;
5609 unsigned int i;
5611 if (!declsize
5612 || !tree_fits_uhwi_p (declsize))
5614 vi = new_var_info (decl, name);
5615 vi->offset = 0;
5616 vi->size = ~0;
5617 vi->fullsize = ~0;
5618 vi->is_unknown_size_var = true;
5619 vi->is_full_var = true;
5620 vi->may_have_pointers = true;
5621 return vi;
5624 /* Collect field information. */
5625 if (use_field_sensitive
5626 && var_can_have_subvars (decl)
5627 /* ??? Force us to not use subfields for global initializers
5628 in IPA mode. Else we'd have to parse arbitrary initializers. */
5629 && !(in_ipa_mode
5630 && is_global_var (decl)
5631 && DECL_INITIAL (decl)))
5633 fieldoff_s *fo = NULL;
5634 bool notokay = false;
5635 unsigned int i;
5637 push_fields_onto_fieldstack (decl_type, &fieldstack, 0);
5639 for (i = 0; !notokay && fieldstack.iterate (i, &fo); i++)
5640 if (fo->has_unknown_size
5641 || fo->offset < 0)
5643 notokay = true;
5644 break;
5647 /* We can't sort them if we have a field with a variable sized type,
5648 which will make notokay = true. In that case, we are going to return
5649 without creating varinfos for the fields anyway, so sorting them is a
5650 waste to boot. */
5651 if (!notokay)
5653 sort_fieldstack (fieldstack);
5654 /* Due to some C++ FE issues, like PR 22488, we might end up
5655 what appear to be overlapping fields even though they,
5656 in reality, do not overlap. Until the C++ FE is fixed,
5657 we will simply disable field-sensitivity for these cases. */
5658 notokay = check_for_overlaps (fieldstack);
5661 if (notokay)
5662 fieldstack.release ();
5665 /* If we didn't end up collecting sub-variables create a full
5666 variable for the decl. */
5667 if (fieldstack.length () <= 1
5668 || fieldstack.length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
5670 vi = new_var_info (decl, name);
5671 vi->offset = 0;
5672 vi->may_have_pointers = true;
5673 vi->fullsize = tree_to_uhwi (declsize);
5674 vi->size = vi->fullsize;
5675 vi->is_full_var = true;
5676 fieldstack.release ();
5677 return vi;
5680 vi = new_var_info (decl, name);
5681 vi->fullsize = tree_to_uhwi (declsize);
5682 for (i = 0, newvi = vi;
5683 fieldstack.iterate (i, &fo);
5684 ++i, newvi = vi_next (newvi))
5686 const char *newname = "NULL";
5687 char *tempname;
5689 if (dump_file)
5691 asprintf (&tempname, "%s." HOST_WIDE_INT_PRINT_DEC
5692 "+" HOST_WIDE_INT_PRINT_DEC, name, fo->offset, fo->size);
5693 newname = ggc_strdup (tempname);
5694 free (tempname);
5696 newvi->name = newname;
5697 newvi->offset = fo->offset;
5698 newvi->size = fo->size;
5699 newvi->fullsize = vi->fullsize;
5700 newvi->may_have_pointers = fo->may_have_pointers;
5701 newvi->only_restrict_pointers = fo->only_restrict_pointers;
5702 if (i + 1 < fieldstack.length ())
5704 varinfo_t tem = new_var_info (decl, name);
5705 newvi->next = tem->id;
5706 tem->head = vi->id;
5710 return vi;
5713 static unsigned int
5714 create_variable_info_for (tree decl, const char *name)
5716 varinfo_t vi = create_variable_info_for_1 (decl, name);
5717 unsigned int id = vi->id;
5719 insert_vi_for_tree (decl, vi);
5721 if (TREE_CODE (decl) != VAR_DECL)
5722 return id;
5724 /* Create initial constraints for globals. */
5725 for (; vi; vi = vi_next (vi))
5727 if (!vi->may_have_pointers
5728 || !vi->is_global_var)
5729 continue;
5731 /* Mark global restrict qualified pointers. */
5732 if ((POINTER_TYPE_P (TREE_TYPE (decl))
5733 && TYPE_RESTRICT (TREE_TYPE (decl)))
5734 || vi->only_restrict_pointers)
5736 make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT");
5737 continue;
5740 /* In non-IPA mode the initializer from nonlocal is all we need. */
5741 if (!in_ipa_mode
5742 || DECL_HARD_REGISTER (decl))
5743 make_copy_constraint (vi, nonlocal_id);
5745 /* In IPA mode parse the initializer and generate proper constraints
5746 for it. */
5747 else
5749 struct varpool_node *vnode = varpool_get_node (decl);
5751 /* For escaped variables initialize them from nonlocal. */
5752 if (!varpool_all_refs_explicit_p (vnode))
5753 make_copy_constraint (vi, nonlocal_id);
5755 /* If this is a global variable with an initializer and we are in
5756 IPA mode generate constraints for it. */
5757 if (DECL_INITIAL (decl)
5758 && vnode->definition)
5760 auto_vec<ce_s> rhsc;
5761 struct constraint_expr lhs, *rhsp;
5762 unsigned i;
5763 get_constraint_for_rhs (DECL_INITIAL (decl), &rhsc);
5764 lhs.var = vi->id;
5765 lhs.offset = 0;
5766 lhs.type = SCALAR;
5767 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5768 process_constraint (new_constraint (lhs, *rhsp));
5769 /* If this is a variable that escapes from the unit
5770 the initializer escapes as well. */
5771 if (!varpool_all_refs_explicit_p (vnode))
5773 lhs.var = escaped_id;
5774 lhs.offset = 0;
5775 lhs.type = SCALAR;
5776 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5777 process_constraint (new_constraint (lhs, *rhsp));
5783 return id;
5786 /* Print out the points-to solution for VAR to FILE. */
5788 static void
5789 dump_solution_for_var (FILE *file, unsigned int var)
5791 varinfo_t vi = get_varinfo (var);
5792 unsigned int i;
5793 bitmap_iterator bi;
5795 /* Dump the solution for unified vars anyway, this avoids difficulties
5796 in scanning dumps in the testsuite. */
5797 fprintf (file, "%s = { ", vi->name);
5798 vi = get_varinfo (find (var));
5799 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
5800 fprintf (file, "%s ", get_varinfo (i)->name);
5801 fprintf (file, "}");
5803 /* But note when the variable was unified. */
5804 if (vi->id != var)
5805 fprintf (file, " same as %s", vi->name);
5807 fprintf (file, "\n");
5810 /* Print the points-to solution for VAR to stdout. */
5812 DEBUG_FUNCTION void
5813 debug_solution_for_var (unsigned int var)
5815 dump_solution_for_var (stdout, var);
5818 /* Create varinfo structures for all of the variables in the
5819 function for intraprocedural mode. */
5821 static void
5822 intra_create_variable_infos (void)
5824 tree t;
5826 /* For each incoming pointer argument arg, create the constraint ARG
5827 = NONLOCAL or a dummy variable if it is a restrict qualified
5828 passed-by-reference argument. */
5829 for (t = DECL_ARGUMENTS (current_function_decl); t; t = DECL_CHAIN (t))
5831 varinfo_t p = get_vi_for_tree (t);
5833 /* For restrict qualified pointers to objects passed by
5834 reference build a real representative for the pointed-to object.
5835 Treat restrict qualified references the same. */
5836 if (TYPE_RESTRICT (TREE_TYPE (t))
5837 && ((DECL_BY_REFERENCE (t) && POINTER_TYPE_P (TREE_TYPE (t)))
5838 || TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE)
5839 && !type_contains_placeholder_p (TREE_TYPE (TREE_TYPE (t))))
5841 struct constraint_expr lhsc, rhsc;
5842 varinfo_t vi;
5843 tree heapvar = build_fake_var_decl (TREE_TYPE (TREE_TYPE (t)));
5844 DECL_EXTERNAL (heapvar) = 1;
5845 vi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS");
5846 insert_vi_for_tree (heapvar, vi);
5847 lhsc.var = p->id;
5848 lhsc.type = SCALAR;
5849 lhsc.offset = 0;
5850 rhsc.var = vi->id;
5851 rhsc.type = ADDRESSOF;
5852 rhsc.offset = 0;
5853 process_constraint (new_constraint (lhsc, rhsc));
5854 for (; vi; vi = vi_next (vi))
5855 if (vi->may_have_pointers)
5857 if (vi->only_restrict_pointers)
5858 make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT");
5859 else
5860 make_copy_constraint (vi, nonlocal_id);
5862 continue;
5865 if (POINTER_TYPE_P (TREE_TYPE (t))
5866 && TYPE_RESTRICT (TREE_TYPE (t)))
5867 make_constraint_from_global_restrict (p, "PARM_RESTRICT");
5868 else
5870 for (; p; p = vi_next (p))
5872 if (p->only_restrict_pointers)
5873 make_constraint_from_global_restrict (p, "PARM_RESTRICT");
5874 else if (p->may_have_pointers)
5875 make_constraint_from (p, nonlocal_id);
5880 /* Add a constraint for a result decl that is passed by reference. */
5881 if (DECL_RESULT (cfun->decl)
5882 && DECL_BY_REFERENCE (DECL_RESULT (cfun->decl)))
5884 varinfo_t p, result_vi = get_vi_for_tree (DECL_RESULT (cfun->decl));
5886 for (p = result_vi; p; p = vi_next (p))
5887 make_constraint_from (p, nonlocal_id);
5890 /* Add a constraint for the incoming static chain parameter. */
5891 if (cfun->static_chain_decl != NULL_TREE)
5893 varinfo_t p, chain_vi = get_vi_for_tree (cfun->static_chain_decl);
5895 for (p = chain_vi; p; p = vi_next (p))
5896 make_constraint_from (p, nonlocal_id);
5900 /* Structure used to put solution bitmaps in a hashtable so they can
5901 be shared among variables with the same points-to set. */
5903 typedef struct shared_bitmap_info
5905 bitmap pt_vars;
5906 hashval_t hashcode;
5907 } *shared_bitmap_info_t;
5908 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
5910 /* Shared_bitmap hashtable helpers. */
5912 struct shared_bitmap_hasher : typed_free_remove <shared_bitmap_info>
5914 typedef shared_bitmap_info value_type;
5915 typedef shared_bitmap_info compare_type;
5916 static inline hashval_t hash (const value_type *);
5917 static inline bool equal (const value_type *, const compare_type *);
5920 /* Hash function for a shared_bitmap_info_t */
5922 inline hashval_t
5923 shared_bitmap_hasher::hash (const value_type *bi)
5925 return bi->hashcode;
5928 /* Equality function for two shared_bitmap_info_t's. */
5930 inline bool
5931 shared_bitmap_hasher::equal (const value_type *sbi1, const compare_type *sbi2)
5933 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
5936 /* Shared_bitmap hashtable. */
5938 static hash_table <shared_bitmap_hasher> shared_bitmap_table;
5940 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
5941 existing instance if there is one, NULL otherwise. */
5943 static bitmap
5944 shared_bitmap_lookup (bitmap pt_vars)
5946 shared_bitmap_info **slot;
5947 struct shared_bitmap_info sbi;
5949 sbi.pt_vars = pt_vars;
5950 sbi.hashcode = bitmap_hash (pt_vars);
5952 slot = shared_bitmap_table.find_slot_with_hash (&sbi, sbi.hashcode,
5953 NO_INSERT);
5954 if (!slot)
5955 return NULL;
5956 else
5957 return (*slot)->pt_vars;
5961 /* Add a bitmap to the shared bitmap hashtable. */
5963 static void
5964 shared_bitmap_add (bitmap pt_vars)
5966 shared_bitmap_info **slot;
5967 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
5969 sbi->pt_vars = pt_vars;
5970 sbi->hashcode = bitmap_hash (pt_vars);
5972 slot = shared_bitmap_table.find_slot_with_hash (sbi, sbi->hashcode, INSERT);
5973 gcc_assert (!*slot);
5974 *slot = sbi;
5978 /* Set bits in INTO corresponding to the variable uids in solution set FROM. */
5980 static void
5981 set_uids_in_ptset (bitmap into, bitmap from, struct pt_solution *pt)
5983 unsigned int i;
5984 bitmap_iterator bi;
5985 varinfo_t escaped_vi = get_varinfo (find (escaped_id));
5986 bool everything_escaped
5987 = escaped_vi->solution && bitmap_bit_p (escaped_vi->solution, anything_id);
5989 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
5991 varinfo_t vi = get_varinfo (i);
5993 /* The only artificial variables that are allowed in a may-alias
5994 set are heap variables. */
5995 if (vi->is_artificial_var && !vi->is_heap_var)
5996 continue;
5998 if (everything_escaped
5999 || (escaped_vi->solution
6000 && bitmap_bit_p (escaped_vi->solution, i)))
6002 pt->vars_contains_escaped = true;
6003 pt->vars_contains_escaped_heap = vi->is_heap_var;
6006 if (TREE_CODE (vi->decl) == VAR_DECL
6007 || TREE_CODE (vi->decl) == PARM_DECL
6008 || TREE_CODE (vi->decl) == RESULT_DECL)
6010 /* If we are in IPA mode we will not recompute points-to
6011 sets after inlining so make sure they stay valid. */
6012 if (in_ipa_mode
6013 && !DECL_PT_UID_SET_P (vi->decl))
6014 SET_DECL_PT_UID (vi->decl, DECL_UID (vi->decl));
6016 /* Add the decl to the points-to set. Note that the points-to
6017 set contains global variables. */
6018 bitmap_set_bit (into, DECL_PT_UID (vi->decl));
6019 if (vi->is_global_var)
6020 pt->vars_contains_nonlocal = true;
6026 /* Compute the points-to solution *PT for the variable VI. */
6028 static struct pt_solution
6029 find_what_var_points_to (varinfo_t orig_vi)
6031 unsigned int i;
6032 bitmap_iterator bi;
6033 bitmap finished_solution;
6034 bitmap result;
6035 varinfo_t vi;
6036 void **slot;
6037 struct pt_solution *pt;
6039 /* This variable may have been collapsed, let's get the real
6040 variable. */
6041 vi = get_varinfo (find (orig_vi->id));
6043 /* See if we have already computed the solution and return it. */
6044 slot = pointer_map_insert (final_solutions, vi);
6045 if (*slot != NULL)
6046 return *(struct pt_solution *)*slot;
6048 *slot = pt = XOBNEW (&final_solutions_obstack, struct pt_solution);
6049 memset (pt, 0, sizeof (struct pt_solution));
6051 /* Translate artificial variables into SSA_NAME_PTR_INFO
6052 attributes. */
6053 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
6055 varinfo_t vi = get_varinfo (i);
6057 if (vi->is_artificial_var)
6059 if (vi->id == nothing_id)
6060 pt->null = 1;
6061 else if (vi->id == escaped_id)
6063 if (in_ipa_mode)
6064 pt->ipa_escaped = 1;
6065 else
6066 pt->escaped = 1;
6068 else if (vi->id == nonlocal_id)
6069 pt->nonlocal = 1;
6070 else if (vi->is_heap_var)
6071 /* We represent heapvars in the points-to set properly. */
6073 else if (vi->id == readonly_id)
6074 /* Nobody cares. */
6076 else if (vi->id == anything_id
6077 || vi->id == integer_id)
6078 pt->anything = 1;
6082 /* Instead of doing extra work, simply do not create
6083 elaborate points-to information for pt_anything pointers. */
6084 if (pt->anything)
6085 return *pt;
6087 /* Share the final set of variables when possible. */
6088 finished_solution = BITMAP_GGC_ALLOC ();
6089 stats.points_to_sets_created++;
6091 set_uids_in_ptset (finished_solution, vi->solution, pt);
6092 result = shared_bitmap_lookup (finished_solution);
6093 if (!result)
6095 shared_bitmap_add (finished_solution);
6096 pt->vars = finished_solution;
6098 else
6100 pt->vars = result;
6101 bitmap_clear (finished_solution);
6104 return *pt;
6107 /* Given a pointer variable P, fill in its points-to set. */
6109 static void
6110 find_what_p_points_to (tree p)
6112 struct ptr_info_def *pi;
6113 tree lookup_p = p;
6114 varinfo_t vi;
6116 /* For parameters, get at the points-to set for the actual parm
6117 decl. */
6118 if (TREE_CODE (p) == SSA_NAME
6119 && SSA_NAME_IS_DEFAULT_DEF (p)
6120 && (TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
6121 || TREE_CODE (SSA_NAME_VAR (p)) == RESULT_DECL))
6122 lookup_p = SSA_NAME_VAR (p);
6124 vi = lookup_vi_for_tree (lookup_p);
6125 if (!vi)
6126 return;
6128 pi = get_ptr_info (p);
6129 pi->pt = find_what_var_points_to (vi);
6133 /* Query statistics for points-to solutions. */
6135 static struct {
6136 unsigned HOST_WIDE_INT pt_solution_includes_may_alias;
6137 unsigned HOST_WIDE_INT pt_solution_includes_no_alias;
6138 unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias;
6139 unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias;
6140 } pta_stats;
6142 void
6143 dump_pta_stats (FILE *s)
6145 fprintf (s, "\nPTA query stats:\n");
6146 fprintf (s, " pt_solution_includes: "
6147 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6148 HOST_WIDE_INT_PRINT_DEC" queries\n",
6149 pta_stats.pt_solution_includes_no_alias,
6150 pta_stats.pt_solution_includes_no_alias
6151 + pta_stats.pt_solution_includes_may_alias);
6152 fprintf (s, " pt_solutions_intersect: "
6153 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6154 HOST_WIDE_INT_PRINT_DEC" queries\n",
6155 pta_stats.pt_solutions_intersect_no_alias,
6156 pta_stats.pt_solutions_intersect_no_alias
6157 + pta_stats.pt_solutions_intersect_may_alias);
6161 /* Reset the points-to solution *PT to a conservative default
6162 (point to anything). */
6164 void
6165 pt_solution_reset (struct pt_solution *pt)
6167 memset (pt, 0, sizeof (struct pt_solution));
6168 pt->anything = true;
6171 /* Set the points-to solution *PT to point only to the variables
6172 in VARS. VARS_CONTAINS_GLOBAL specifies whether that contains
6173 global variables and VARS_CONTAINS_RESTRICT specifies whether
6174 it contains restrict tag variables. */
6176 void
6177 pt_solution_set (struct pt_solution *pt, bitmap vars,
6178 bool vars_contains_nonlocal)
6180 memset (pt, 0, sizeof (struct pt_solution));
6181 pt->vars = vars;
6182 pt->vars_contains_nonlocal = vars_contains_nonlocal;
6183 pt->vars_contains_escaped
6184 = (cfun->gimple_df->escaped.anything
6185 || bitmap_intersect_p (cfun->gimple_df->escaped.vars, vars));
6188 /* Set the points-to solution *PT to point only to the variable VAR. */
6190 void
6191 pt_solution_set_var (struct pt_solution *pt, tree var)
6193 memset (pt, 0, sizeof (struct pt_solution));
6194 pt->vars = BITMAP_GGC_ALLOC ();
6195 bitmap_set_bit (pt->vars, DECL_PT_UID (var));
6196 pt->vars_contains_nonlocal = is_global_var (var);
6197 pt->vars_contains_escaped
6198 = (cfun->gimple_df->escaped.anything
6199 || bitmap_bit_p (cfun->gimple_df->escaped.vars, DECL_PT_UID (var)));
6202 /* Computes the union of the points-to solutions *DEST and *SRC and
6203 stores the result in *DEST. This changes the points-to bitmap
6204 of *DEST and thus may not be used if that might be shared.
6205 The points-to bitmap of *SRC and *DEST will not be shared after
6206 this function if they were not before. */
6208 static void
6209 pt_solution_ior_into (struct pt_solution *dest, struct pt_solution *src)
6211 dest->anything |= src->anything;
6212 if (dest->anything)
6214 pt_solution_reset (dest);
6215 return;
6218 dest->nonlocal |= src->nonlocal;
6219 dest->escaped |= src->escaped;
6220 dest->ipa_escaped |= src->ipa_escaped;
6221 dest->null |= src->null;
6222 dest->vars_contains_nonlocal |= src->vars_contains_nonlocal;
6223 dest->vars_contains_escaped |= src->vars_contains_escaped;
6224 dest->vars_contains_escaped_heap |= src->vars_contains_escaped_heap;
6225 if (!src->vars)
6226 return;
6228 if (!dest->vars)
6229 dest->vars = BITMAP_GGC_ALLOC ();
6230 bitmap_ior_into (dest->vars, src->vars);
6233 /* Return true if the points-to solution *PT is empty. */
6235 bool
6236 pt_solution_empty_p (struct pt_solution *pt)
6238 if (pt->anything
6239 || pt->nonlocal)
6240 return false;
6242 if (pt->vars
6243 && !bitmap_empty_p (pt->vars))
6244 return false;
6246 /* If the solution includes ESCAPED, check if that is empty. */
6247 if (pt->escaped
6248 && !pt_solution_empty_p (&cfun->gimple_df->escaped))
6249 return false;
6251 /* If the solution includes ESCAPED, check if that is empty. */
6252 if (pt->ipa_escaped
6253 && !pt_solution_empty_p (&ipa_escaped_pt))
6254 return false;
6256 return true;
6259 /* Return true if the points-to solution *PT only point to a single var, and
6260 return the var uid in *UID. */
6262 bool
6263 pt_solution_singleton_p (struct pt_solution *pt, unsigned *uid)
6265 if (pt->anything || pt->nonlocal || pt->escaped || pt->ipa_escaped
6266 || pt->null || pt->vars == NULL
6267 || !bitmap_single_bit_set_p (pt->vars))
6268 return false;
6270 *uid = bitmap_first_set_bit (pt->vars);
6271 return true;
6274 /* Return true if the points-to solution *PT includes global memory. */
6276 bool
6277 pt_solution_includes_global (struct pt_solution *pt)
6279 if (pt->anything
6280 || pt->nonlocal
6281 || pt->vars_contains_nonlocal
6282 /* The following is a hack to make the malloc escape hack work.
6283 In reality we'd need different sets for escaped-through-return
6284 and escaped-to-callees and passes would need to be updated. */
6285 || pt->vars_contains_escaped_heap)
6286 return true;
6288 /* 'escaped' is also a placeholder so we have to look into it. */
6289 if (pt->escaped)
6290 return pt_solution_includes_global (&cfun->gimple_df->escaped);
6292 if (pt->ipa_escaped)
6293 return pt_solution_includes_global (&ipa_escaped_pt);
6295 /* ??? This predicate is not correct for the IPA-PTA solution
6296 as we do not properly distinguish between unit escape points
6297 and global variables. */
6298 if (cfun->gimple_df->ipa_pta)
6299 return true;
6301 return false;
6304 /* Return true if the points-to solution *PT includes the variable
6305 declaration DECL. */
6307 static bool
6308 pt_solution_includes_1 (struct pt_solution *pt, const_tree decl)
6310 if (pt->anything)
6311 return true;
6313 if (pt->nonlocal
6314 && is_global_var (decl))
6315 return true;
6317 if (pt->vars
6318 && bitmap_bit_p (pt->vars, DECL_PT_UID (decl)))
6319 return true;
6321 /* If the solution includes ESCAPED, check it. */
6322 if (pt->escaped
6323 && pt_solution_includes_1 (&cfun->gimple_df->escaped, decl))
6324 return true;
6326 /* If the solution includes ESCAPED, check it. */
6327 if (pt->ipa_escaped
6328 && pt_solution_includes_1 (&ipa_escaped_pt, decl))
6329 return true;
6331 return false;
6334 bool
6335 pt_solution_includes (struct pt_solution *pt, const_tree decl)
6337 bool res = pt_solution_includes_1 (pt, decl);
6338 if (res)
6339 ++pta_stats.pt_solution_includes_may_alias;
6340 else
6341 ++pta_stats.pt_solution_includes_no_alias;
6342 return res;
6345 /* Return true if both points-to solutions PT1 and PT2 have a non-empty
6346 intersection. */
6348 static bool
6349 pt_solutions_intersect_1 (struct pt_solution *pt1, struct pt_solution *pt2)
6351 if (pt1->anything || pt2->anything)
6352 return true;
6354 /* If either points to unknown global memory and the other points to
6355 any global memory they alias. */
6356 if ((pt1->nonlocal
6357 && (pt2->nonlocal
6358 || pt2->vars_contains_nonlocal))
6359 || (pt2->nonlocal
6360 && pt1->vars_contains_nonlocal))
6361 return true;
6363 /* If either points to all escaped memory and the other points to
6364 any escaped memory they alias. */
6365 if ((pt1->escaped
6366 && (pt2->escaped
6367 || pt2->vars_contains_escaped))
6368 || (pt2->escaped
6369 && pt1->vars_contains_escaped))
6370 return true;
6372 /* Check the escaped solution if required.
6373 ??? Do we need to check the local against the IPA escaped sets? */
6374 if ((pt1->ipa_escaped || pt2->ipa_escaped)
6375 && !pt_solution_empty_p (&ipa_escaped_pt))
6377 /* If both point to escaped memory and that solution
6378 is not empty they alias. */
6379 if (pt1->ipa_escaped && pt2->ipa_escaped)
6380 return true;
6382 /* If either points to escaped memory see if the escaped solution
6383 intersects with the other. */
6384 if ((pt1->ipa_escaped
6385 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt2))
6386 || (pt2->ipa_escaped
6387 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt1)))
6388 return true;
6391 /* Now both pointers alias if their points-to solution intersects. */
6392 return (pt1->vars
6393 && pt2->vars
6394 && bitmap_intersect_p (pt1->vars, pt2->vars));
6397 bool
6398 pt_solutions_intersect (struct pt_solution *pt1, struct pt_solution *pt2)
6400 bool res = pt_solutions_intersect_1 (pt1, pt2);
6401 if (res)
6402 ++pta_stats.pt_solutions_intersect_may_alias;
6403 else
6404 ++pta_stats.pt_solutions_intersect_no_alias;
6405 return res;
6409 /* Dump points-to information to OUTFILE. */
6411 static void
6412 dump_sa_points_to_info (FILE *outfile)
6414 unsigned int i;
6416 fprintf (outfile, "\nPoints-to sets\n\n");
6418 if (dump_flags & TDF_STATS)
6420 fprintf (outfile, "Stats:\n");
6421 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
6422 fprintf (outfile, "Non-pointer vars: %d\n",
6423 stats.nonpointer_vars);
6424 fprintf (outfile, "Statically unified vars: %d\n",
6425 stats.unified_vars_static);
6426 fprintf (outfile, "Dynamically unified vars: %d\n",
6427 stats.unified_vars_dynamic);
6428 fprintf (outfile, "Iterations: %d\n", stats.iterations);
6429 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
6430 fprintf (outfile, "Number of implicit edges: %d\n",
6431 stats.num_implicit_edges);
6434 for (i = 1; i < varmap.length (); i++)
6436 varinfo_t vi = get_varinfo (i);
6437 if (!vi->may_have_pointers)
6438 continue;
6439 dump_solution_for_var (outfile, i);
6444 /* Debug points-to information to stderr. */
6446 DEBUG_FUNCTION void
6447 debug_sa_points_to_info (void)
6449 dump_sa_points_to_info (stderr);
6453 /* Initialize the always-existing constraint variables for NULL
6454 ANYTHING, READONLY, and INTEGER */
6456 static void
6457 init_base_vars (void)
6459 struct constraint_expr lhs, rhs;
6460 varinfo_t var_anything;
6461 varinfo_t var_nothing;
6462 varinfo_t var_readonly;
6463 varinfo_t var_escaped;
6464 varinfo_t var_nonlocal;
6465 varinfo_t var_storedanything;
6466 varinfo_t var_integer;
6468 /* Variable ID zero is reserved and should be NULL. */
6469 varmap.safe_push (NULL);
6471 /* Create the NULL variable, used to represent that a variable points
6472 to NULL. */
6473 var_nothing = new_var_info (NULL_TREE, "NULL");
6474 gcc_assert (var_nothing->id == nothing_id);
6475 var_nothing->is_artificial_var = 1;
6476 var_nothing->offset = 0;
6477 var_nothing->size = ~0;
6478 var_nothing->fullsize = ~0;
6479 var_nothing->is_special_var = 1;
6480 var_nothing->may_have_pointers = 0;
6481 var_nothing->is_global_var = 0;
6483 /* Create the ANYTHING variable, used to represent that a variable
6484 points to some unknown piece of memory. */
6485 var_anything = new_var_info (NULL_TREE, "ANYTHING");
6486 gcc_assert (var_anything->id == anything_id);
6487 var_anything->is_artificial_var = 1;
6488 var_anything->size = ~0;
6489 var_anything->offset = 0;
6490 var_anything->fullsize = ~0;
6491 var_anything->is_special_var = 1;
6493 /* Anything points to anything. This makes deref constraints just
6494 work in the presence of linked list and other p = *p type loops,
6495 by saying that *ANYTHING = ANYTHING. */
6496 lhs.type = SCALAR;
6497 lhs.var = anything_id;
6498 lhs.offset = 0;
6499 rhs.type = ADDRESSOF;
6500 rhs.var = anything_id;
6501 rhs.offset = 0;
6503 /* This specifically does not use process_constraint because
6504 process_constraint ignores all anything = anything constraints, since all
6505 but this one are redundant. */
6506 constraints.safe_push (new_constraint (lhs, rhs));
6508 /* Create the READONLY variable, used to represent that a variable
6509 points to readonly memory. */
6510 var_readonly = new_var_info (NULL_TREE, "READONLY");
6511 gcc_assert (var_readonly->id == readonly_id);
6512 var_readonly->is_artificial_var = 1;
6513 var_readonly->offset = 0;
6514 var_readonly->size = ~0;
6515 var_readonly->fullsize = ~0;
6516 var_readonly->is_special_var = 1;
6518 /* readonly memory points to anything, in order to make deref
6519 easier. In reality, it points to anything the particular
6520 readonly variable can point to, but we don't track this
6521 separately. */
6522 lhs.type = SCALAR;
6523 lhs.var = readonly_id;
6524 lhs.offset = 0;
6525 rhs.type = ADDRESSOF;
6526 rhs.var = readonly_id; /* FIXME */
6527 rhs.offset = 0;
6528 process_constraint (new_constraint (lhs, rhs));
6530 /* Create the ESCAPED variable, used to represent the set of escaped
6531 memory. */
6532 var_escaped = new_var_info (NULL_TREE, "ESCAPED");
6533 gcc_assert (var_escaped->id == escaped_id);
6534 var_escaped->is_artificial_var = 1;
6535 var_escaped->offset = 0;
6536 var_escaped->size = ~0;
6537 var_escaped->fullsize = ~0;
6538 var_escaped->is_special_var = 0;
6540 /* Create the NONLOCAL variable, used to represent the set of nonlocal
6541 memory. */
6542 var_nonlocal = new_var_info (NULL_TREE, "NONLOCAL");
6543 gcc_assert (var_nonlocal->id == nonlocal_id);
6544 var_nonlocal->is_artificial_var = 1;
6545 var_nonlocal->offset = 0;
6546 var_nonlocal->size = ~0;
6547 var_nonlocal->fullsize = ~0;
6548 var_nonlocal->is_special_var = 1;
6550 /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
6551 lhs.type = SCALAR;
6552 lhs.var = escaped_id;
6553 lhs.offset = 0;
6554 rhs.type = DEREF;
6555 rhs.var = escaped_id;
6556 rhs.offset = 0;
6557 process_constraint (new_constraint (lhs, rhs));
6559 /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the
6560 whole variable escapes. */
6561 lhs.type = SCALAR;
6562 lhs.var = escaped_id;
6563 lhs.offset = 0;
6564 rhs.type = SCALAR;
6565 rhs.var = escaped_id;
6566 rhs.offset = UNKNOWN_OFFSET;
6567 process_constraint (new_constraint (lhs, rhs));
6569 /* *ESCAPED = NONLOCAL. This is true because we have to assume
6570 everything pointed to by escaped points to what global memory can
6571 point to. */
6572 lhs.type = DEREF;
6573 lhs.var = escaped_id;
6574 lhs.offset = 0;
6575 rhs.type = SCALAR;
6576 rhs.var = nonlocal_id;
6577 rhs.offset = 0;
6578 process_constraint (new_constraint (lhs, rhs));
6580 /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED. This is true because
6581 global memory may point to global memory and escaped memory. */
6582 lhs.type = SCALAR;
6583 lhs.var = nonlocal_id;
6584 lhs.offset = 0;
6585 rhs.type = ADDRESSOF;
6586 rhs.var = nonlocal_id;
6587 rhs.offset = 0;
6588 process_constraint (new_constraint (lhs, rhs));
6589 rhs.type = ADDRESSOF;
6590 rhs.var = escaped_id;
6591 rhs.offset = 0;
6592 process_constraint (new_constraint (lhs, rhs));
6594 /* Create the STOREDANYTHING variable, used to represent the set of
6595 variables stored to *ANYTHING. */
6596 var_storedanything = new_var_info (NULL_TREE, "STOREDANYTHING");
6597 gcc_assert (var_storedanything->id == storedanything_id);
6598 var_storedanything->is_artificial_var = 1;
6599 var_storedanything->offset = 0;
6600 var_storedanything->size = ~0;
6601 var_storedanything->fullsize = ~0;
6602 var_storedanything->is_special_var = 0;
6604 /* Create the INTEGER variable, used to represent that a variable points
6605 to what an INTEGER "points to". */
6606 var_integer = new_var_info (NULL_TREE, "INTEGER");
6607 gcc_assert (var_integer->id == integer_id);
6608 var_integer->is_artificial_var = 1;
6609 var_integer->size = ~0;
6610 var_integer->fullsize = ~0;
6611 var_integer->offset = 0;
6612 var_integer->is_special_var = 1;
6614 /* INTEGER = ANYTHING, because we don't know where a dereference of
6615 a random integer will point to. */
6616 lhs.type = SCALAR;
6617 lhs.var = integer_id;
6618 lhs.offset = 0;
6619 rhs.type = ADDRESSOF;
6620 rhs.var = anything_id;
6621 rhs.offset = 0;
6622 process_constraint (new_constraint (lhs, rhs));
6625 /* Initialize things necessary to perform PTA */
6627 static void
6628 init_alias_vars (void)
6630 use_field_sensitive = (MAX_FIELDS_FOR_FIELD_SENSITIVE > 1);
6632 bitmap_obstack_initialize (&pta_obstack);
6633 bitmap_obstack_initialize (&oldpta_obstack);
6634 bitmap_obstack_initialize (&predbitmap_obstack);
6636 constraint_pool = create_alloc_pool ("Constraint pool",
6637 sizeof (struct constraint), 30);
6638 variable_info_pool = create_alloc_pool ("Variable info pool",
6639 sizeof (struct variable_info), 30);
6640 constraints.create (8);
6641 varmap.create (8);
6642 vi_for_tree = pointer_map_create ();
6643 call_stmt_vars = pointer_map_create ();
6645 memset (&stats, 0, sizeof (stats));
6646 shared_bitmap_table.create (511);
6647 init_base_vars ();
6649 gcc_obstack_init (&fake_var_decl_obstack);
6651 final_solutions = pointer_map_create ();
6652 gcc_obstack_init (&final_solutions_obstack);
6655 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
6656 predecessor edges. */
6658 static void
6659 remove_preds_and_fake_succs (constraint_graph_t graph)
6661 unsigned int i;
6663 /* Clear the implicit ref and address nodes from the successor
6664 lists. */
6665 for (i = 1; i < FIRST_REF_NODE; i++)
6667 if (graph->succs[i])
6668 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
6669 FIRST_REF_NODE * 2);
6672 /* Free the successor list for the non-ref nodes. */
6673 for (i = FIRST_REF_NODE + 1; i < graph->size; i++)
6675 if (graph->succs[i])
6676 BITMAP_FREE (graph->succs[i]);
6679 /* Now reallocate the size of the successor list as, and blow away
6680 the predecessor bitmaps. */
6681 graph->size = varmap.length ();
6682 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
6684 free (graph->implicit_preds);
6685 graph->implicit_preds = NULL;
6686 free (graph->preds);
6687 graph->preds = NULL;
6688 bitmap_obstack_release (&predbitmap_obstack);
6691 /* Solve the constraint set. */
6693 static void
6694 solve_constraints (void)
6696 struct scc_info *si;
6698 if (dump_file)
6699 fprintf (dump_file,
6700 "\nCollapsing static cycles and doing variable "
6701 "substitution\n");
6703 init_graph (varmap.length () * 2);
6705 if (dump_file)
6706 fprintf (dump_file, "Building predecessor graph\n");
6707 build_pred_graph ();
6709 if (dump_file)
6710 fprintf (dump_file, "Detecting pointer and location "
6711 "equivalences\n");
6712 si = perform_var_substitution (graph);
6714 if (dump_file)
6715 fprintf (dump_file, "Rewriting constraints and unifying "
6716 "variables\n");
6717 rewrite_constraints (graph, si);
6719 build_succ_graph ();
6721 free_var_substitution_info (si);
6723 /* Attach complex constraints to graph nodes. */
6724 move_complex_constraints (graph);
6726 if (dump_file)
6727 fprintf (dump_file, "Uniting pointer but not location equivalent "
6728 "variables\n");
6729 unite_pointer_equivalences (graph);
6731 if (dump_file)
6732 fprintf (dump_file, "Finding indirect cycles\n");
6733 find_indirect_cycles (graph);
6735 /* Implicit nodes and predecessors are no longer necessary at this
6736 point. */
6737 remove_preds_and_fake_succs (graph);
6739 if (dump_file && (dump_flags & TDF_GRAPH))
6741 fprintf (dump_file, "\n\n// The constraint graph before solve-graph "
6742 "in dot format:\n");
6743 dump_constraint_graph (dump_file);
6744 fprintf (dump_file, "\n\n");
6747 if (dump_file)
6748 fprintf (dump_file, "Solving graph\n");
6750 solve_graph (graph);
6752 if (dump_file && (dump_flags & TDF_GRAPH))
6754 fprintf (dump_file, "\n\n// The constraint graph after solve-graph "
6755 "in dot format:\n");
6756 dump_constraint_graph (dump_file);
6757 fprintf (dump_file, "\n\n");
6760 if (dump_file)
6761 dump_sa_points_to_info (dump_file);
6764 /* Create points-to sets for the current function. See the comments
6765 at the start of the file for an algorithmic overview. */
6767 static void
6768 compute_points_to_sets (void)
6770 basic_block bb;
6771 unsigned i;
6772 varinfo_t vi;
6774 timevar_push (TV_TREE_PTA);
6776 init_alias_vars ();
6778 intra_create_variable_infos ();
6780 /* Now walk all statements and build the constraint set. */
6781 FOR_EACH_BB (bb)
6783 gimple_stmt_iterator gsi;
6785 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6787 gimple phi = gsi_stmt (gsi);
6789 if (! virtual_operand_p (gimple_phi_result (phi)))
6790 find_func_aliases (phi);
6793 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6795 gimple stmt = gsi_stmt (gsi);
6797 find_func_aliases (stmt);
6801 if (dump_file)
6803 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
6804 dump_constraints (dump_file, 0);
6807 /* From the constraints compute the points-to sets. */
6808 solve_constraints ();
6810 /* Compute the points-to set for ESCAPED used for call-clobber analysis. */
6811 cfun->gimple_df->escaped = find_what_var_points_to (get_varinfo (escaped_id));
6813 /* Make sure the ESCAPED solution (which is used as placeholder in
6814 other solutions) does not reference itself. This simplifies
6815 points-to solution queries. */
6816 cfun->gimple_df->escaped.escaped = 0;
6818 /* Compute the points-to sets for pointer SSA_NAMEs. */
6819 for (i = 0; i < num_ssa_names; ++i)
6821 tree ptr = ssa_name (i);
6822 if (ptr
6823 && POINTER_TYPE_P (TREE_TYPE (ptr)))
6824 find_what_p_points_to (ptr);
6827 /* Compute the call-used/clobbered sets. */
6828 FOR_EACH_BB (bb)
6830 gimple_stmt_iterator gsi;
6832 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6834 gimple stmt = gsi_stmt (gsi);
6835 struct pt_solution *pt;
6836 if (!is_gimple_call (stmt))
6837 continue;
6839 pt = gimple_call_use_set (stmt);
6840 if (gimple_call_flags (stmt) & ECF_CONST)
6841 memset (pt, 0, sizeof (struct pt_solution));
6842 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
6844 *pt = find_what_var_points_to (vi);
6845 /* Escaped (and thus nonlocal) variables are always
6846 implicitly used by calls. */
6847 /* ??? ESCAPED can be empty even though NONLOCAL
6848 always escaped. */
6849 pt->nonlocal = 1;
6850 pt->escaped = 1;
6852 else
6854 /* If there is nothing special about this call then
6855 we have made everything that is used also escape. */
6856 *pt = cfun->gimple_df->escaped;
6857 pt->nonlocal = 1;
6860 pt = gimple_call_clobber_set (stmt);
6861 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
6862 memset (pt, 0, sizeof (struct pt_solution));
6863 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
6865 *pt = find_what_var_points_to (vi);
6866 /* Escaped (and thus nonlocal) variables are always
6867 implicitly clobbered by calls. */
6868 /* ??? ESCAPED can be empty even though NONLOCAL
6869 always escaped. */
6870 pt->nonlocal = 1;
6871 pt->escaped = 1;
6873 else
6875 /* If there is nothing special about this call then
6876 we have made everything that is used also escape. */
6877 *pt = cfun->gimple_df->escaped;
6878 pt->nonlocal = 1;
6883 timevar_pop (TV_TREE_PTA);
6887 /* Delete created points-to sets. */
6889 static void
6890 delete_points_to_sets (void)
6892 unsigned int i;
6894 shared_bitmap_table.dispose ();
6895 if (dump_file && (dump_flags & TDF_STATS))
6896 fprintf (dump_file, "Points to sets created:%d\n",
6897 stats.points_to_sets_created);
6899 pointer_map_destroy (vi_for_tree);
6900 pointer_map_destroy (call_stmt_vars);
6901 bitmap_obstack_release (&pta_obstack);
6902 constraints.release ();
6904 for (i = 0; i < graph->size; i++)
6905 graph->complex[i].release ();
6906 free (graph->complex);
6908 free (graph->rep);
6909 free (graph->succs);
6910 free (graph->pe);
6911 free (graph->pe_rep);
6912 free (graph->indirect_cycles);
6913 free (graph);
6915 varmap.release ();
6916 free_alloc_pool (variable_info_pool);
6917 free_alloc_pool (constraint_pool);
6919 obstack_free (&fake_var_decl_obstack, NULL);
6921 pointer_map_destroy (final_solutions);
6922 obstack_free (&final_solutions_obstack, NULL);
6926 /* Compute points-to information for every SSA_NAME pointer in the
6927 current function and compute the transitive closure of escaped
6928 variables to re-initialize the call-clobber states of local variables. */
6930 unsigned int
6931 compute_may_aliases (void)
6933 if (cfun->gimple_df->ipa_pta)
6935 if (dump_file)
6937 fprintf (dump_file, "\nNot re-computing points-to information "
6938 "because IPA points-to information is available.\n\n");
6940 /* But still dump what we have remaining it. */
6941 dump_alias_info (dump_file);
6944 return 0;
6947 /* For each pointer P_i, determine the sets of variables that P_i may
6948 point-to. Compute the reachability set of escaped and call-used
6949 variables. */
6950 compute_points_to_sets ();
6952 /* Debugging dumps. */
6953 if (dump_file)
6954 dump_alias_info (dump_file);
6956 /* Deallocate memory used by aliasing data structures and the internal
6957 points-to solution. */
6958 delete_points_to_sets ();
6960 gcc_assert (!need_ssa_update_p (cfun));
6962 return 0;
6965 static bool
6966 gate_tree_pta (void)
6968 return flag_tree_pta;
6971 /* A dummy pass to cause points-to information to be computed via
6972 TODO_rebuild_alias. */
6974 namespace {
6976 const pass_data pass_data_build_alias =
6978 GIMPLE_PASS, /* type */
6979 "alias", /* name */
6980 OPTGROUP_NONE, /* optinfo_flags */
6981 true, /* has_gate */
6982 false, /* has_execute */
6983 TV_NONE, /* tv_id */
6984 ( PROP_cfg | PROP_ssa ), /* properties_required */
6985 0, /* properties_provided */
6986 0, /* properties_destroyed */
6987 0, /* todo_flags_start */
6988 TODO_rebuild_alias, /* todo_flags_finish */
6991 class pass_build_alias : public gimple_opt_pass
6993 public:
6994 pass_build_alias (gcc::context *ctxt)
6995 : gimple_opt_pass (pass_data_build_alias, ctxt)
6998 /* opt_pass methods: */
6999 bool gate () { return gate_tree_pta (); }
7001 }; // class pass_build_alias
7003 } // anon namespace
7005 gimple_opt_pass *
7006 make_pass_build_alias (gcc::context *ctxt)
7008 return new pass_build_alias (ctxt);
7011 /* A dummy pass to cause points-to information to be computed via
7012 TODO_rebuild_alias. */
7014 namespace {
7016 const pass_data pass_data_build_ealias =
7018 GIMPLE_PASS, /* type */
7019 "ealias", /* name */
7020 OPTGROUP_NONE, /* optinfo_flags */
7021 true, /* has_gate */
7022 false, /* has_execute */
7023 TV_NONE, /* tv_id */
7024 ( PROP_cfg | PROP_ssa ), /* properties_required */
7025 0, /* properties_provided */
7026 0, /* properties_destroyed */
7027 0, /* todo_flags_start */
7028 TODO_rebuild_alias, /* todo_flags_finish */
7031 class pass_build_ealias : public gimple_opt_pass
7033 public:
7034 pass_build_ealias (gcc::context *ctxt)
7035 : gimple_opt_pass (pass_data_build_ealias, ctxt)
7038 /* opt_pass methods: */
7039 bool gate () { return gate_tree_pta (); }
7041 }; // class pass_build_ealias
7043 } // anon namespace
7045 gimple_opt_pass *
7046 make_pass_build_ealias (gcc::context *ctxt)
7048 return new pass_build_ealias (ctxt);
7052 /* Return true if we should execute IPA PTA. */
7053 static bool
7054 gate_ipa_pta (void)
7056 return (optimize
7057 && flag_ipa_pta
7058 /* Don't bother doing anything if the program has errors. */
7059 && !seen_error ());
7062 /* IPA PTA solutions for ESCAPED. */
7063 struct pt_solution ipa_escaped_pt
7064 = { true, false, false, false, false, false, false, false, NULL };
7066 /* Associate node with varinfo DATA. Worker for
7067 cgraph_for_node_and_aliases. */
7068 static bool
7069 associate_varinfo_to_alias (struct cgraph_node *node, void *data)
7071 if ((node->alias || node->thunk.thunk_p)
7072 && node->analyzed)
7073 insert_vi_for_tree (node->decl, (varinfo_t)data);
7074 return false;
7077 /* Execute the driver for IPA PTA. */
7078 static unsigned int
7079 ipa_pta_execute (void)
7081 struct cgraph_node *node;
7082 struct varpool_node *var;
7083 int from;
7085 in_ipa_mode = 1;
7087 init_alias_vars ();
7089 if (dump_file && (dump_flags & TDF_DETAILS))
7091 dump_symtab (dump_file);
7092 fprintf (dump_file, "\n");
7095 /* Build the constraints. */
7096 FOR_EACH_DEFINED_FUNCTION (node)
7098 varinfo_t vi;
7099 /* Nodes without a body are not interesting. Especially do not
7100 visit clones at this point for now - we get duplicate decls
7101 there for inline clones at least. */
7102 if (!cgraph_function_with_gimple_body_p (node) || node->clone_of)
7103 continue;
7104 cgraph_get_body (node);
7106 gcc_assert (!node->clone_of);
7108 vi = create_function_info_for (node->decl,
7109 alias_get_name (node->decl));
7110 cgraph_for_node_and_aliases (node, associate_varinfo_to_alias, vi, true);
7113 /* Create constraints for global variables and their initializers. */
7114 FOR_EACH_VARIABLE (var)
7116 if (var->alias && var->analyzed)
7117 continue;
7119 get_vi_for_tree (var->decl);
7122 if (dump_file)
7124 fprintf (dump_file,
7125 "Generating constraints for global initializers\n\n");
7126 dump_constraints (dump_file, 0);
7127 fprintf (dump_file, "\n");
7129 from = constraints.length ();
7131 FOR_EACH_DEFINED_FUNCTION (node)
7133 struct function *func;
7134 basic_block bb;
7136 /* Nodes without a body are not interesting. */
7137 if (!cgraph_function_with_gimple_body_p (node) || node->clone_of)
7138 continue;
7140 if (dump_file)
7142 fprintf (dump_file,
7143 "Generating constraints for %s", node->name ());
7144 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
7145 fprintf (dump_file, " (%s)",
7146 IDENTIFIER_POINTER
7147 (DECL_ASSEMBLER_NAME (node->decl)));
7148 fprintf (dump_file, "\n");
7151 func = DECL_STRUCT_FUNCTION (node->decl);
7152 push_cfun (func);
7154 /* For externally visible or attribute used annotated functions use
7155 local constraints for their arguments.
7156 For local functions we see all callers and thus do not need initial
7157 constraints for parameters. */
7158 if (node->used_from_other_partition
7159 || node->externally_visible
7160 || node->force_output)
7162 intra_create_variable_infos ();
7164 /* We also need to make function return values escape. Nothing
7165 escapes by returning from main though. */
7166 if (!MAIN_NAME_P (DECL_NAME (node->decl)))
7168 varinfo_t fi, rvi;
7169 fi = lookup_vi_for_tree (node->decl);
7170 rvi = first_vi_for_offset (fi, fi_result);
7171 if (rvi && rvi->offset == fi_result)
7173 struct constraint_expr includes;
7174 struct constraint_expr var;
7175 includes.var = escaped_id;
7176 includes.offset = 0;
7177 includes.type = SCALAR;
7178 var.var = rvi->id;
7179 var.offset = 0;
7180 var.type = SCALAR;
7181 process_constraint (new_constraint (includes, var));
7186 /* Build constriants for the function body. */
7187 FOR_EACH_BB_FN (bb, func)
7189 gimple_stmt_iterator gsi;
7191 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
7192 gsi_next (&gsi))
7194 gimple phi = gsi_stmt (gsi);
7196 if (! virtual_operand_p (gimple_phi_result (phi)))
7197 find_func_aliases (phi);
7200 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
7202 gimple stmt = gsi_stmt (gsi);
7204 find_func_aliases (stmt);
7205 find_func_clobbers (stmt);
7209 pop_cfun ();
7211 if (dump_file)
7213 fprintf (dump_file, "\n");
7214 dump_constraints (dump_file, from);
7215 fprintf (dump_file, "\n");
7217 from = constraints.length ();
7220 /* From the constraints compute the points-to sets. */
7221 solve_constraints ();
7223 /* Compute the global points-to sets for ESCAPED.
7224 ??? Note that the computed escape set is not correct
7225 for the whole unit as we fail to consider graph edges to
7226 externally visible functions. */
7227 ipa_escaped_pt = find_what_var_points_to (get_varinfo (escaped_id));
7229 /* Make sure the ESCAPED solution (which is used as placeholder in
7230 other solutions) does not reference itself. This simplifies
7231 points-to solution queries. */
7232 ipa_escaped_pt.ipa_escaped = 0;
7234 /* Assign the points-to sets to the SSA names in the unit. */
7235 FOR_EACH_DEFINED_FUNCTION (node)
7237 tree ptr;
7238 struct function *fn;
7239 unsigned i;
7240 varinfo_t fi;
7241 basic_block bb;
7242 struct pt_solution uses, clobbers;
7243 struct cgraph_edge *e;
7245 /* Nodes without a body are not interesting. */
7246 if (!cgraph_function_with_gimple_body_p (node) || node->clone_of)
7247 continue;
7249 fn = DECL_STRUCT_FUNCTION (node->decl);
7251 /* Compute the points-to sets for pointer SSA_NAMEs. */
7252 FOR_EACH_VEC_ELT (*fn->gimple_df->ssa_names, i, ptr)
7254 if (ptr
7255 && POINTER_TYPE_P (TREE_TYPE (ptr)))
7256 find_what_p_points_to (ptr);
7259 /* Compute the call-use and call-clobber sets for all direct calls. */
7260 fi = lookup_vi_for_tree (node->decl);
7261 gcc_assert (fi->is_fn_info);
7262 clobbers
7263 = find_what_var_points_to (first_vi_for_offset (fi, fi_clobbers));
7264 uses = find_what_var_points_to (first_vi_for_offset (fi, fi_uses));
7265 for (e = node->callers; e; e = e->next_caller)
7267 if (!e->call_stmt)
7268 continue;
7270 *gimple_call_clobber_set (e->call_stmt) = clobbers;
7271 *gimple_call_use_set (e->call_stmt) = uses;
7274 /* Compute the call-use and call-clobber sets for indirect calls
7275 and calls to external functions. */
7276 FOR_EACH_BB_FN (bb, fn)
7278 gimple_stmt_iterator gsi;
7280 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
7282 gimple stmt = gsi_stmt (gsi);
7283 struct pt_solution *pt;
7284 varinfo_t vi;
7285 tree decl;
7287 if (!is_gimple_call (stmt))
7288 continue;
7290 /* Handle direct calls to external functions. */
7291 decl = gimple_call_fndecl (stmt);
7292 if (decl
7293 && (!(fi = lookup_vi_for_tree (decl))
7294 || !fi->is_fn_info))
7296 pt = gimple_call_use_set (stmt);
7297 if (gimple_call_flags (stmt) & ECF_CONST)
7298 memset (pt, 0, sizeof (struct pt_solution));
7299 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
7301 *pt = find_what_var_points_to (vi);
7302 /* Escaped (and thus nonlocal) variables are always
7303 implicitly used by calls. */
7304 /* ??? ESCAPED can be empty even though NONLOCAL
7305 always escaped. */
7306 pt->nonlocal = 1;
7307 pt->ipa_escaped = 1;
7309 else
7311 /* If there is nothing special about this call then
7312 we have made everything that is used also escape. */
7313 *pt = ipa_escaped_pt;
7314 pt->nonlocal = 1;
7317 pt = gimple_call_clobber_set (stmt);
7318 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
7319 memset (pt, 0, sizeof (struct pt_solution));
7320 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
7322 *pt = find_what_var_points_to (vi);
7323 /* Escaped (and thus nonlocal) variables are always
7324 implicitly clobbered by calls. */
7325 /* ??? ESCAPED can be empty even though NONLOCAL
7326 always escaped. */
7327 pt->nonlocal = 1;
7328 pt->ipa_escaped = 1;
7330 else
7332 /* If there is nothing special about this call then
7333 we have made everything that is used also escape. */
7334 *pt = ipa_escaped_pt;
7335 pt->nonlocal = 1;
7339 /* Handle indirect calls. */
7340 if (!decl
7341 && (fi = get_fi_for_callee (stmt)))
7343 /* We need to accumulate all clobbers/uses of all possible
7344 callees. */
7345 fi = get_varinfo (find (fi->id));
7346 /* If we cannot constrain the set of functions we'll end up
7347 calling we end up using/clobbering everything. */
7348 if (bitmap_bit_p (fi->solution, anything_id)
7349 || bitmap_bit_p (fi->solution, nonlocal_id)
7350 || bitmap_bit_p (fi->solution, escaped_id))
7352 pt_solution_reset (gimple_call_clobber_set (stmt));
7353 pt_solution_reset (gimple_call_use_set (stmt));
7355 else
7357 bitmap_iterator bi;
7358 unsigned i;
7359 struct pt_solution *uses, *clobbers;
7361 uses = gimple_call_use_set (stmt);
7362 clobbers = gimple_call_clobber_set (stmt);
7363 memset (uses, 0, sizeof (struct pt_solution));
7364 memset (clobbers, 0, sizeof (struct pt_solution));
7365 EXECUTE_IF_SET_IN_BITMAP (fi->solution, 0, i, bi)
7367 struct pt_solution sol;
7369 vi = get_varinfo (i);
7370 if (!vi->is_fn_info)
7372 /* ??? We could be more precise here? */
7373 uses->nonlocal = 1;
7374 uses->ipa_escaped = 1;
7375 clobbers->nonlocal = 1;
7376 clobbers->ipa_escaped = 1;
7377 continue;
7380 if (!uses->anything)
7382 sol = find_what_var_points_to
7383 (first_vi_for_offset (vi, fi_uses));
7384 pt_solution_ior_into (uses, &sol);
7386 if (!clobbers->anything)
7388 sol = find_what_var_points_to
7389 (first_vi_for_offset (vi, fi_clobbers));
7390 pt_solution_ior_into (clobbers, &sol);
7398 fn->gimple_df->ipa_pta = true;
7401 delete_points_to_sets ();
7403 in_ipa_mode = 0;
7405 return 0;
7408 namespace {
7410 const pass_data pass_data_ipa_pta =
7412 SIMPLE_IPA_PASS, /* type */
7413 "pta", /* name */
7414 OPTGROUP_NONE, /* optinfo_flags */
7415 true, /* has_gate */
7416 true, /* has_execute */
7417 TV_IPA_PTA, /* tv_id */
7418 0, /* properties_required */
7419 0, /* properties_provided */
7420 0, /* properties_destroyed */
7421 0, /* todo_flags_start */
7422 0, /* todo_flags_finish */
7425 class pass_ipa_pta : public simple_ipa_opt_pass
7427 public:
7428 pass_ipa_pta (gcc::context *ctxt)
7429 : simple_ipa_opt_pass (pass_data_ipa_pta, ctxt)
7432 /* opt_pass methods: */
7433 bool gate () { return gate_ipa_pta (); }
7434 unsigned int execute () { return ipa_pta_execute (); }
7436 }; // class pass_ipa_pta
7438 } // anon namespace
7440 simple_ipa_opt_pass *
7441 make_pass_ipa_pta (gcc::context *ctxt)
7443 return new pass_ipa_pta (ctxt);