Update .po files.
[official-gcc.git] / gcc / tree-ssa-structalias.c
blob0a4149489a9e8687a3c91923b9adc50484931e85
1 /* Tree based points-to analysis
2 Copyright (C) 2005-2016 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "tree-pass.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "tree-pretty-print.h"
33 #include "diagnostic-core.h"
34 #include "fold-const.h"
35 #include "stor-layout.h"
36 #include "stmt.h"
37 #include "gimple-iterator.h"
38 #include "tree-into-ssa.h"
39 #include "tree-dfa.h"
40 #include "params.h"
41 #include "gimple-walk.h"
43 /* The idea behind this analyzer is to generate set constraints from the
44 program, then solve the resulting constraints in order to generate the
45 points-to sets.
47 Set constraints are a way of modeling program analysis problems that
48 involve sets. They consist of an inclusion constraint language,
49 describing the variables (each variable is a set) and operations that
50 are involved on the variables, and a set of rules that derive facts
51 from these operations. To solve a system of set constraints, you derive
52 all possible facts under the rules, which gives you the correct sets
53 as a consequence.
55 See "Efficient Field-sensitive pointer analysis for C" by "David
56 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
57 http://citeseer.ist.psu.edu/pearce04efficient.html
59 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
60 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
61 http://citeseer.ist.psu.edu/heintze01ultrafast.html
63 There are three types of real constraint expressions, DEREF,
64 ADDRESSOF, and SCALAR. Each constraint expression consists
65 of a constraint type, a variable, and an offset.
67 SCALAR is a constraint expression type used to represent x, whether
68 it appears on the LHS or the RHS of a statement.
69 DEREF is a constraint expression type used to represent *x, whether
70 it appears on the LHS or the RHS of a statement.
71 ADDRESSOF is a constraint expression used to represent &x, whether
72 it appears on the LHS or the RHS of a statement.
74 Each pointer variable in the program is assigned an integer id, and
75 each field of a structure variable is assigned an integer id as well.
77 Structure variables are linked to their list of fields through a "next
78 field" in each variable that points to the next field in offset
79 order.
80 Each variable for a structure field has
82 1. "size", that tells the size in bits of that field.
83 2. "fullsize, that tells the size in bits of the entire structure.
84 3. "offset", that tells the offset in bits from the beginning of the
85 structure to this field.
87 Thus,
88 struct f
90 int a;
91 int b;
92 } foo;
93 int *bar;
95 looks like
97 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
98 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
99 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
102 In order to solve the system of set constraints, the following is
103 done:
105 1. Each constraint variable x has a solution set associated with it,
106 Sol(x).
108 2. Constraints are separated into direct, copy, and complex.
109 Direct constraints are ADDRESSOF constraints that require no extra
110 processing, such as P = &Q
111 Copy constraints are those of the form P = Q.
112 Complex constraints are all the constraints involving dereferences
113 and offsets (including offsetted copies).
115 3. All direct constraints of the form P = &Q are processed, such
116 that Q is added to Sol(P)
118 4. All complex constraints for a given constraint variable are stored in a
119 linked list attached to that variable's node.
121 5. A directed graph is built out of the copy constraints. Each
122 constraint variable is a node in the graph, and an edge from
123 Q to P is added for each copy constraint of the form P = Q
125 6. The graph is then walked, and solution sets are
126 propagated along the copy edges, such that an edge from Q to P
127 causes Sol(P) <- Sol(P) union Sol(Q).
129 7. As we visit each node, all complex constraints associated with
130 that node are processed by adding appropriate copy edges to the graph, or the
131 appropriate variables to the solution set.
133 8. The process of walking the graph is iterated until no solution
134 sets change.
136 Prior to walking the graph in steps 6 and 7, We perform static
137 cycle elimination on the constraint graph, as well
138 as off-line variable substitution.
140 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
141 on and turned into anything), but isn't. You can just see what offset
142 inside the pointed-to struct it's going to access.
144 TODO: Constant bounded arrays can be handled as if they were structs of the
145 same number of elements.
147 TODO: Modeling heap and incoming pointers becomes much better if we
148 add fields to them as we discover them, which we could do.
150 TODO: We could handle unions, but to be honest, it's probably not
151 worth the pain or slowdown. */
153 /* IPA-PTA optimizations possible.
155 When the indirect function called is ANYTHING we can add disambiguation
156 based on the function signatures (or simply the parameter count which
157 is the varinfo size). We also do not need to consider functions that
158 do not have their address taken.
160 The is_global_var bit which marks escape points is overly conservative
161 in IPA mode. Split it to is_escape_point and is_global_var - only
162 externally visible globals are escape points in IPA mode.
163 There is now is_ipa_escape_point but this is only used in a few
164 selected places.
166 The way we introduce DECL_PT_UID to avoid fixing up all points-to
167 sets in the translation unit when we copy a DECL during inlining
168 pessimizes precision. The advantage is that the DECL_PT_UID keeps
169 compile-time and memory usage overhead low - the points-to sets
170 do not grow or get unshared as they would during a fixup phase.
171 An alternative solution is to delay IPA PTA until after all
172 inlining transformations have been applied.
174 The way we propagate clobber/use information isn't optimized.
175 It should use a new complex constraint that properly filters
176 out local variables of the callee (though that would make
177 the sets invalid after inlining). OTOH we might as well
178 admit defeat to WHOPR and simply do all the clobber/use analysis
179 and propagation after PTA finished but before we threw away
180 points-to information for memory variables. WHOPR and PTA
181 do not play along well anyway - the whole constraint solving
182 would need to be done in WPA phase and it will be very interesting
183 to apply the results to local SSA names during LTRANS phase.
185 We probably should compute a per-function unit-ESCAPE solution
186 propagating it simply like the clobber / uses solutions. The
187 solution can go alongside the non-IPA espaced solution and be
188 used to query which vars escape the unit through a function.
189 This is also required to make the escaped-HEAP trick work in IPA mode.
191 We never put function decls in points-to sets so we do not
192 keep the set of called functions for indirect calls.
194 And probably more. */
196 static bool use_field_sensitive = true;
197 static int in_ipa_mode = 0;
199 /* Used for predecessor bitmaps. */
200 static bitmap_obstack predbitmap_obstack;
202 /* Used for points-to sets. */
203 static bitmap_obstack pta_obstack;
205 /* Used for oldsolution members of variables. */
206 static bitmap_obstack oldpta_obstack;
208 /* Used for per-solver-iteration bitmaps. */
209 static bitmap_obstack iteration_obstack;
211 static unsigned int create_variable_info_for (tree, const char *, bool);
212 typedef struct constraint_graph *constraint_graph_t;
213 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
215 struct constraint;
216 typedef struct constraint *constraint_t;
219 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
220 if (a) \
221 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
223 static struct constraint_stats
225 unsigned int total_vars;
226 unsigned int nonpointer_vars;
227 unsigned int unified_vars_static;
228 unsigned int unified_vars_dynamic;
229 unsigned int iterations;
230 unsigned int num_edges;
231 unsigned int num_implicit_edges;
232 unsigned int points_to_sets_created;
233 } stats;
235 struct variable_info
237 /* ID of this variable */
238 unsigned int id;
240 /* True if this is a variable created by the constraint analysis, such as
241 heap variables and constraints we had to break up. */
242 unsigned int is_artificial_var : 1;
244 /* True if this is a special variable whose solution set should not be
245 changed. */
246 unsigned int is_special_var : 1;
248 /* True for variables whose size is not known or variable. */
249 unsigned int is_unknown_size_var : 1;
251 /* True for (sub-)fields that represent a whole variable. */
252 unsigned int is_full_var : 1;
254 /* True if this is a heap variable. */
255 unsigned int is_heap_var : 1;
257 /* True if this field may contain pointers. */
258 unsigned int may_have_pointers : 1;
260 /* True if this field has only restrict qualified pointers. */
261 unsigned int only_restrict_pointers : 1;
263 /* True if this represents a heap var created for a restrict qualified
264 pointer. */
265 unsigned int is_restrict_var : 1;
267 /* True if this represents a global variable. */
268 unsigned int is_global_var : 1;
270 /* True if this represents a module escape point for IPA analysis. */
271 unsigned int is_ipa_escape_point : 1;
273 /* True if this represents a IPA function info. */
274 unsigned int is_fn_info : 1;
276 /* ??? Store somewhere better. */
277 unsigned short ruid;
279 /* The ID of the variable for the next field in this structure
280 or zero for the last field in this structure. */
281 unsigned next;
283 /* The ID of the variable for the first field in this structure. */
284 unsigned head;
286 /* Offset of this variable, in bits, from the base variable */
287 unsigned HOST_WIDE_INT offset;
289 /* Size of the variable, in bits. */
290 unsigned HOST_WIDE_INT size;
292 /* Full size of the base variable, in bits. */
293 unsigned HOST_WIDE_INT fullsize;
295 /* Name of this variable */
296 const char *name;
298 /* Tree that this variable is associated with. */
299 tree decl;
301 /* Points-to set for this variable. */
302 bitmap solution;
304 /* Old points-to set for this variable. */
305 bitmap oldsolution;
307 typedef struct variable_info *varinfo_t;
309 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
310 static varinfo_t first_or_preceding_vi_for_offset (varinfo_t,
311 unsigned HOST_WIDE_INT);
312 static varinfo_t lookup_vi_for_tree (tree);
313 static inline bool type_can_have_subvars (const_tree);
314 static void make_param_constraints (varinfo_t);
316 /* Pool of variable info structures. */
317 static object_allocator<variable_info> variable_info_pool
318 ("Variable info pool");
320 /* Map varinfo to final pt_solution. */
321 static hash_map<varinfo_t, pt_solution *> *final_solutions;
322 struct obstack final_solutions_obstack;
324 /* Table of variable info structures for constraint variables.
325 Indexed directly by variable info id. */
326 static vec<varinfo_t> varmap;
328 /* Return the varmap element N */
330 static inline varinfo_t
331 get_varinfo (unsigned int n)
333 return varmap[n];
336 /* Return the next variable in the list of sub-variables of VI
337 or NULL if VI is the last sub-variable. */
339 static inline varinfo_t
340 vi_next (varinfo_t vi)
342 return get_varinfo (vi->next);
345 /* Static IDs for the special variables. Variable ID zero is unused
346 and used as terminator for the sub-variable chain. */
347 enum { nothing_id = 1, anything_id = 2, string_id = 3,
348 escaped_id = 4, nonlocal_id = 5,
349 storedanything_id = 6, integer_id = 7 };
351 /* Return a new variable info structure consisting for a variable
352 named NAME, and using constraint graph node NODE. Append it
353 to the vector of variable info structures. */
355 static varinfo_t
356 new_var_info (tree t, const char *name, bool add_id)
358 unsigned index = varmap.length ();
359 varinfo_t ret = variable_info_pool.allocate ();
361 if (dump_file && add_id)
363 char *tempname = xasprintf ("%s(%d)", name, index);
364 name = ggc_strdup (tempname);
365 free (tempname);
368 ret->id = index;
369 ret->name = name;
370 ret->decl = t;
371 /* Vars without decl are artificial and do not have sub-variables. */
372 ret->is_artificial_var = (t == NULL_TREE);
373 ret->is_special_var = false;
374 ret->is_unknown_size_var = false;
375 ret->is_full_var = (t == NULL_TREE);
376 ret->is_heap_var = false;
377 ret->may_have_pointers = true;
378 ret->only_restrict_pointers = false;
379 ret->is_restrict_var = false;
380 ret->ruid = 0;
381 ret->is_global_var = (t == NULL_TREE);
382 ret->is_ipa_escape_point = false;
383 ret->is_fn_info = false;
384 if (t && DECL_P (t))
385 ret->is_global_var = (is_global_var (t)
386 /* We have to treat even local register variables
387 as escape points. */
388 || (TREE_CODE (t) == VAR_DECL
389 && DECL_HARD_REGISTER (t)));
390 ret->solution = BITMAP_ALLOC (&pta_obstack);
391 ret->oldsolution = NULL;
392 ret->next = 0;
393 ret->head = ret->id;
395 stats.total_vars++;
397 varmap.safe_push (ret);
399 return ret;
402 /* A map mapping call statements to per-stmt variables for uses
403 and clobbers specific to the call. */
404 static hash_map<gimple *, varinfo_t> *call_stmt_vars;
406 /* Lookup or create the variable for the call statement CALL. */
408 static varinfo_t
409 get_call_vi (gcall *call)
411 varinfo_t vi, vi2;
413 bool existed;
414 varinfo_t *slot_p = &call_stmt_vars->get_or_insert (call, &existed);
415 if (existed)
416 return *slot_p;
418 vi = new_var_info (NULL_TREE, "CALLUSED", true);
419 vi->offset = 0;
420 vi->size = 1;
421 vi->fullsize = 2;
422 vi->is_full_var = true;
424 vi2 = new_var_info (NULL_TREE, "CALLCLOBBERED", true);
425 vi2->offset = 1;
426 vi2->size = 1;
427 vi2->fullsize = 2;
428 vi2->is_full_var = true;
430 vi->next = vi2->id;
432 *slot_p = vi;
433 return vi;
436 /* Lookup the variable for the call statement CALL representing
437 the uses. Returns NULL if there is nothing special about this call. */
439 static varinfo_t
440 lookup_call_use_vi (gcall *call)
442 varinfo_t *slot_p = call_stmt_vars->get (call);
443 if (slot_p)
444 return *slot_p;
446 return NULL;
449 /* Lookup the variable for the call statement CALL representing
450 the clobbers. Returns NULL if there is nothing special about this call. */
452 static varinfo_t
453 lookup_call_clobber_vi (gcall *call)
455 varinfo_t uses = lookup_call_use_vi (call);
456 if (!uses)
457 return NULL;
459 return vi_next (uses);
462 /* Lookup or create the variable for the call statement CALL representing
463 the uses. */
465 static varinfo_t
466 get_call_use_vi (gcall *call)
468 return get_call_vi (call);
471 /* Lookup or create the variable for the call statement CALL representing
472 the clobbers. */
474 static varinfo_t ATTRIBUTE_UNUSED
475 get_call_clobber_vi (gcall *call)
477 return vi_next (get_call_vi (call));
481 enum constraint_expr_type {SCALAR, DEREF, ADDRESSOF};
483 /* An expression that appears in a constraint. */
485 struct constraint_expr
487 /* Constraint type. */
488 constraint_expr_type type;
490 /* Variable we are referring to in the constraint. */
491 unsigned int var;
493 /* Offset, in bits, of this constraint from the beginning of
494 variables it ends up referring to.
496 IOW, in a deref constraint, we would deref, get the result set,
497 then add OFFSET to each member. */
498 HOST_WIDE_INT offset;
501 /* Use 0x8000... as special unknown offset. */
502 #define UNKNOWN_OFFSET HOST_WIDE_INT_MIN
504 typedef struct constraint_expr ce_s;
505 static void get_constraint_for_1 (tree, vec<ce_s> *, bool, bool);
506 static void get_constraint_for (tree, vec<ce_s> *);
507 static void get_constraint_for_rhs (tree, vec<ce_s> *);
508 static void do_deref (vec<ce_s> *);
510 /* Our set constraints are made up of two constraint expressions, one
511 LHS, and one RHS.
513 As described in the introduction, our set constraints each represent an
514 operation between set valued variables.
516 struct constraint
518 struct constraint_expr lhs;
519 struct constraint_expr rhs;
522 /* List of constraints that we use to build the constraint graph from. */
524 static vec<constraint_t> constraints;
525 static object_allocator<constraint> constraint_pool ("Constraint pool");
527 /* The constraint graph is represented as an array of bitmaps
528 containing successor nodes. */
530 struct constraint_graph
532 /* Size of this graph, which may be different than the number of
533 nodes in the variable map. */
534 unsigned int size;
536 /* Explicit successors of each node. */
537 bitmap *succs;
539 /* Implicit predecessors of each node (Used for variable
540 substitution). */
541 bitmap *implicit_preds;
543 /* Explicit predecessors of each node (Used for variable substitution). */
544 bitmap *preds;
546 /* Indirect cycle representatives, or -1 if the node has no indirect
547 cycles. */
548 int *indirect_cycles;
550 /* Representative node for a node. rep[a] == a unless the node has
551 been unified. */
552 unsigned int *rep;
554 /* Equivalence class representative for a label. This is used for
555 variable substitution. */
556 int *eq_rep;
558 /* Pointer equivalence label for a node. All nodes with the same
559 pointer equivalence label can be unified together at some point
560 (either during constraint optimization or after the constraint
561 graph is built). */
562 unsigned int *pe;
564 /* Pointer equivalence representative for a label. This is used to
565 handle nodes that are pointer equivalent but not location
566 equivalent. We can unite these once the addressof constraints
567 are transformed into initial points-to sets. */
568 int *pe_rep;
570 /* Pointer equivalence label for each node, used during variable
571 substitution. */
572 unsigned int *pointer_label;
574 /* Location equivalence label for each node, used during location
575 equivalence finding. */
576 unsigned int *loc_label;
578 /* Pointed-by set for each node, used during location equivalence
579 finding. This is pointed-by rather than pointed-to, because it
580 is constructed using the predecessor graph. */
581 bitmap *pointed_by;
583 /* Points to sets for pointer equivalence. This is *not* the actual
584 points-to sets for nodes. */
585 bitmap *points_to;
587 /* Bitmap of nodes where the bit is set if the node is a direct
588 node. Used for variable substitution. */
589 sbitmap direct_nodes;
591 /* Bitmap of nodes where the bit is set if the node is address
592 taken. Used for variable substitution. */
593 bitmap address_taken;
595 /* Vector of complex constraints for each graph node. Complex
596 constraints are those involving dereferences or offsets that are
597 not 0. */
598 vec<constraint_t> *complex;
601 static constraint_graph_t graph;
603 /* During variable substitution and the offline version of indirect
604 cycle finding, we create nodes to represent dereferences and
605 address taken constraints. These represent where these start and
606 end. */
607 #define FIRST_REF_NODE (varmap).length ()
608 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
610 /* Return the representative node for NODE, if NODE has been unioned
611 with another NODE.
612 This function performs path compression along the way to finding
613 the representative. */
615 static unsigned int
616 find (unsigned int node)
618 gcc_checking_assert (node < graph->size);
619 if (graph->rep[node] != node)
620 return graph->rep[node] = find (graph->rep[node]);
621 return node;
624 /* Union the TO and FROM nodes to the TO nodes.
625 Note that at some point in the future, we may want to do
626 union-by-rank, in which case we are going to have to return the
627 node we unified to. */
629 static bool
630 unite (unsigned int to, unsigned int from)
632 gcc_checking_assert (to < graph->size && from < graph->size);
633 if (to != from && graph->rep[from] != to)
635 graph->rep[from] = to;
636 return true;
638 return false;
641 /* Create a new constraint consisting of LHS and RHS expressions. */
643 static constraint_t
644 new_constraint (const struct constraint_expr lhs,
645 const struct constraint_expr rhs)
647 constraint_t ret = constraint_pool.allocate ();
648 ret->lhs = lhs;
649 ret->rhs = rhs;
650 return ret;
653 /* Print out constraint C to FILE. */
655 static void
656 dump_constraint (FILE *file, constraint_t c)
658 if (c->lhs.type == ADDRESSOF)
659 fprintf (file, "&");
660 else if (c->lhs.type == DEREF)
661 fprintf (file, "*");
662 fprintf (file, "%s", get_varinfo (c->lhs.var)->name);
663 if (c->lhs.offset == UNKNOWN_OFFSET)
664 fprintf (file, " + UNKNOWN");
665 else if (c->lhs.offset != 0)
666 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
667 fprintf (file, " = ");
668 if (c->rhs.type == ADDRESSOF)
669 fprintf (file, "&");
670 else if (c->rhs.type == DEREF)
671 fprintf (file, "*");
672 fprintf (file, "%s", get_varinfo (c->rhs.var)->name);
673 if (c->rhs.offset == UNKNOWN_OFFSET)
674 fprintf (file, " + UNKNOWN");
675 else if (c->rhs.offset != 0)
676 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
680 void debug_constraint (constraint_t);
681 void debug_constraints (void);
682 void debug_constraint_graph (void);
683 void debug_solution_for_var (unsigned int);
684 void debug_sa_points_to_info (void);
685 void debug_varinfo (varinfo_t);
686 void debug_varmap (void);
688 /* Print out constraint C to stderr. */
690 DEBUG_FUNCTION void
691 debug_constraint (constraint_t c)
693 dump_constraint (stderr, c);
694 fprintf (stderr, "\n");
697 /* Print out all constraints to FILE */
699 static void
700 dump_constraints (FILE *file, int from)
702 int i;
703 constraint_t c;
704 for (i = from; constraints.iterate (i, &c); i++)
705 if (c)
707 dump_constraint (file, c);
708 fprintf (file, "\n");
712 /* Print out all constraints to stderr. */
714 DEBUG_FUNCTION void
715 debug_constraints (void)
717 dump_constraints (stderr, 0);
720 /* Print the constraint graph in dot format. */
722 static void
723 dump_constraint_graph (FILE *file)
725 unsigned int i;
727 /* Only print the graph if it has already been initialized: */
728 if (!graph)
729 return;
731 /* Prints the header of the dot file: */
732 fprintf (file, "strict digraph {\n");
733 fprintf (file, " node [\n shape = box\n ]\n");
734 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
735 fprintf (file, "\n // List of nodes and complex constraints in "
736 "the constraint graph:\n");
738 /* The next lines print the nodes in the graph together with the
739 complex constraints attached to them. */
740 for (i = 1; i < graph->size; i++)
742 if (i == FIRST_REF_NODE)
743 continue;
744 if (find (i) != i)
745 continue;
746 if (i < FIRST_REF_NODE)
747 fprintf (file, "\"%s\"", get_varinfo (i)->name);
748 else
749 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
750 if (graph->complex[i].exists ())
752 unsigned j;
753 constraint_t c;
754 fprintf (file, " [label=\"\\N\\n");
755 for (j = 0; graph->complex[i].iterate (j, &c); ++j)
757 dump_constraint (file, c);
758 fprintf (file, "\\l");
760 fprintf (file, "\"]");
762 fprintf (file, ";\n");
765 /* Go over the edges. */
766 fprintf (file, "\n // Edges in the constraint graph:\n");
767 for (i = 1; i < graph->size; i++)
769 unsigned j;
770 bitmap_iterator bi;
771 if (find (i) != i)
772 continue;
773 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
775 unsigned to = find (j);
776 if (i == to)
777 continue;
778 if (i < FIRST_REF_NODE)
779 fprintf (file, "\"%s\"", get_varinfo (i)->name);
780 else
781 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
782 fprintf (file, " -> ");
783 if (to < FIRST_REF_NODE)
784 fprintf (file, "\"%s\"", get_varinfo (to)->name);
785 else
786 fprintf (file, "\"*%s\"", get_varinfo (to - FIRST_REF_NODE)->name);
787 fprintf (file, ";\n");
791 /* Prints the tail of the dot file. */
792 fprintf (file, "}\n");
795 /* Print out the constraint graph to stderr. */
797 DEBUG_FUNCTION void
798 debug_constraint_graph (void)
800 dump_constraint_graph (stderr);
803 /* SOLVER FUNCTIONS
805 The solver is a simple worklist solver, that works on the following
806 algorithm:
808 sbitmap changed_nodes = all zeroes;
809 changed_count = 0;
810 For each node that is not already collapsed:
811 changed_count++;
812 set bit in changed nodes
814 while (changed_count > 0)
816 compute topological ordering for constraint graph
818 find and collapse cycles in the constraint graph (updating
819 changed if necessary)
821 for each node (n) in the graph in topological order:
822 changed_count--;
824 Process each complex constraint associated with the node,
825 updating changed if necessary.
827 For each outgoing edge from n, propagate the solution from n to
828 the destination of the edge, updating changed as necessary.
830 } */
832 /* Return true if two constraint expressions A and B are equal. */
834 static bool
835 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
837 return a.type == b.type && a.var == b.var && a.offset == b.offset;
840 /* Return true if constraint expression A is less than constraint expression
841 B. This is just arbitrary, but consistent, in order to give them an
842 ordering. */
844 static bool
845 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
847 if (a.type == b.type)
849 if (a.var == b.var)
850 return a.offset < b.offset;
851 else
852 return a.var < b.var;
854 else
855 return a.type < b.type;
858 /* Return true if constraint A is less than constraint B. This is just
859 arbitrary, but consistent, in order to give them an ordering. */
861 static bool
862 constraint_less (const constraint_t &a, const constraint_t &b)
864 if (constraint_expr_less (a->lhs, b->lhs))
865 return true;
866 else if (constraint_expr_less (b->lhs, a->lhs))
867 return false;
868 else
869 return constraint_expr_less (a->rhs, b->rhs);
872 /* Return true if two constraints A and B are equal. */
874 static bool
875 constraint_equal (struct constraint a, struct constraint b)
877 return constraint_expr_equal (a.lhs, b.lhs)
878 && constraint_expr_equal (a.rhs, b.rhs);
882 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
884 static constraint_t
885 constraint_vec_find (vec<constraint_t> vec,
886 struct constraint lookfor)
888 unsigned int place;
889 constraint_t found;
891 if (!vec.exists ())
892 return NULL;
894 place = vec.lower_bound (&lookfor, constraint_less);
895 if (place >= vec.length ())
896 return NULL;
897 found = vec[place];
898 if (!constraint_equal (*found, lookfor))
899 return NULL;
900 return found;
903 /* Union two constraint vectors, TO and FROM. Put the result in TO.
904 Returns true of TO set is changed. */
906 static bool
907 constraint_set_union (vec<constraint_t> *to,
908 vec<constraint_t> *from)
910 int i;
911 constraint_t c;
912 bool any_change = false;
914 FOR_EACH_VEC_ELT (*from, i, c)
916 if (constraint_vec_find (*to, *c) == NULL)
918 unsigned int place = to->lower_bound (c, constraint_less);
919 to->safe_insert (place, c);
920 any_change = true;
923 return any_change;
926 /* Expands the solution in SET to all sub-fields of variables included. */
928 static bitmap
929 solution_set_expand (bitmap set, bitmap *expanded)
931 bitmap_iterator bi;
932 unsigned j;
934 if (*expanded)
935 return *expanded;
937 *expanded = BITMAP_ALLOC (&iteration_obstack);
939 /* In a first pass expand to the head of the variables we need to
940 add all sub-fields off. This avoids quadratic behavior. */
941 EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi)
943 varinfo_t v = get_varinfo (j);
944 if (v->is_artificial_var
945 || v->is_full_var)
946 continue;
947 bitmap_set_bit (*expanded, v->head);
950 /* In the second pass now expand all head variables with subfields. */
951 EXECUTE_IF_SET_IN_BITMAP (*expanded, 0, j, bi)
953 varinfo_t v = get_varinfo (j);
954 if (v->head != j)
955 continue;
956 for (v = vi_next (v); v != NULL; v = vi_next (v))
957 bitmap_set_bit (*expanded, v->id);
960 /* And finally set the rest of the bits from SET. */
961 bitmap_ior_into (*expanded, set);
963 return *expanded;
966 /* Union solution sets TO and DELTA, and add INC to each member of DELTA in the
967 process. */
969 static bool
970 set_union_with_increment (bitmap to, bitmap delta, HOST_WIDE_INT inc,
971 bitmap *expanded_delta)
973 bool changed = false;
974 bitmap_iterator bi;
975 unsigned int i;
977 /* If the solution of DELTA contains anything it is good enough to transfer
978 this to TO. */
979 if (bitmap_bit_p (delta, anything_id))
980 return bitmap_set_bit (to, anything_id);
982 /* If the offset is unknown we have to expand the solution to
983 all subfields. */
984 if (inc == UNKNOWN_OFFSET)
986 delta = solution_set_expand (delta, expanded_delta);
987 changed |= bitmap_ior_into (to, delta);
988 return changed;
991 /* For non-zero offset union the offsetted solution into the destination. */
992 EXECUTE_IF_SET_IN_BITMAP (delta, 0, i, bi)
994 varinfo_t vi = get_varinfo (i);
996 /* If this is a variable with just one field just set its bit
997 in the result. */
998 if (vi->is_artificial_var
999 || vi->is_unknown_size_var
1000 || vi->is_full_var)
1001 changed |= bitmap_set_bit (to, i);
1002 else
1004 HOST_WIDE_INT fieldoffset = vi->offset + inc;
1005 unsigned HOST_WIDE_INT size = vi->size;
1007 /* If the offset makes the pointer point to before the
1008 variable use offset zero for the field lookup. */
1009 if (fieldoffset < 0)
1010 vi = get_varinfo (vi->head);
1011 else
1012 vi = first_or_preceding_vi_for_offset (vi, fieldoffset);
1016 changed |= bitmap_set_bit (to, vi->id);
1017 if (vi->is_full_var
1018 || vi->next == 0)
1019 break;
1021 /* We have to include all fields that overlap the current field
1022 shifted by inc. */
1023 vi = vi_next (vi);
1025 while (vi->offset < fieldoffset + size);
1029 return changed;
1032 /* Insert constraint C into the list of complex constraints for graph
1033 node VAR. */
1035 static void
1036 insert_into_complex (constraint_graph_t graph,
1037 unsigned int var, constraint_t c)
1039 vec<constraint_t> complex = graph->complex[var];
1040 unsigned int place = complex.lower_bound (c, constraint_less);
1042 /* Only insert constraints that do not already exist. */
1043 if (place >= complex.length ()
1044 || !constraint_equal (*c, *complex[place]))
1045 graph->complex[var].safe_insert (place, c);
1049 /* Condense two variable nodes into a single variable node, by moving
1050 all associated info from FROM to TO. Returns true if TO node's
1051 constraint set changes after the merge. */
1053 static bool
1054 merge_node_constraints (constraint_graph_t graph, unsigned int to,
1055 unsigned int from)
1057 unsigned int i;
1058 constraint_t c;
1059 bool any_change = false;
1061 gcc_checking_assert (find (from) == to);
1063 /* Move all complex constraints from src node into to node */
1064 FOR_EACH_VEC_ELT (graph->complex[from], i, c)
1066 /* In complex constraints for node FROM, we may have either
1067 a = *FROM, and *FROM = a, or an offseted constraint which are
1068 always added to the rhs node's constraints. */
1070 if (c->rhs.type == DEREF)
1071 c->rhs.var = to;
1072 else if (c->lhs.type == DEREF)
1073 c->lhs.var = to;
1074 else
1075 c->rhs.var = to;
1078 any_change = constraint_set_union (&graph->complex[to],
1079 &graph->complex[from]);
1080 graph->complex[from].release ();
1081 return any_change;
1085 /* Remove edges involving NODE from GRAPH. */
1087 static void
1088 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
1090 if (graph->succs[node])
1091 BITMAP_FREE (graph->succs[node]);
1094 /* Merge GRAPH nodes FROM and TO into node TO. */
1096 static void
1097 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
1098 unsigned int from)
1100 if (graph->indirect_cycles[from] != -1)
1102 /* If we have indirect cycles with the from node, and we have
1103 none on the to node, the to node has indirect cycles from the
1104 from node now that they are unified.
1105 If indirect cycles exist on both, unify the nodes that they
1106 are in a cycle with, since we know they are in a cycle with
1107 each other. */
1108 if (graph->indirect_cycles[to] == -1)
1109 graph->indirect_cycles[to] = graph->indirect_cycles[from];
1112 /* Merge all the successor edges. */
1113 if (graph->succs[from])
1115 if (!graph->succs[to])
1116 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
1117 bitmap_ior_into (graph->succs[to],
1118 graph->succs[from]);
1121 clear_edges_for_node (graph, from);
1125 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
1126 it doesn't exist in the graph already. */
1128 static void
1129 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
1130 unsigned int from)
1132 if (to == from)
1133 return;
1135 if (!graph->implicit_preds[to])
1136 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1138 if (bitmap_set_bit (graph->implicit_preds[to], from))
1139 stats.num_implicit_edges++;
1142 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
1143 it doesn't exist in the graph already.
1144 Return false if the edge already existed, true otherwise. */
1146 static void
1147 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
1148 unsigned int from)
1150 if (!graph->preds[to])
1151 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1152 bitmap_set_bit (graph->preds[to], from);
1155 /* Add a graph edge to GRAPH, going from FROM to TO if
1156 it doesn't exist in the graph already.
1157 Return false if the edge already existed, true otherwise. */
1159 static bool
1160 add_graph_edge (constraint_graph_t graph, unsigned int to,
1161 unsigned int from)
1163 if (to == from)
1165 return false;
1167 else
1169 bool r = false;
1171 if (!graph->succs[from])
1172 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
1173 if (bitmap_set_bit (graph->succs[from], to))
1175 r = true;
1176 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
1177 stats.num_edges++;
1179 return r;
1184 /* Initialize the constraint graph structure to contain SIZE nodes. */
1186 static void
1187 init_graph (unsigned int size)
1189 unsigned int j;
1191 graph = XCNEW (struct constraint_graph);
1192 graph->size = size;
1193 graph->succs = XCNEWVEC (bitmap, graph->size);
1194 graph->indirect_cycles = XNEWVEC (int, graph->size);
1195 graph->rep = XNEWVEC (unsigned int, graph->size);
1196 /* ??? Macros do not support template types with multiple arguments,
1197 so we use a typedef to work around it. */
1198 typedef vec<constraint_t> vec_constraint_t_heap;
1199 graph->complex = XCNEWVEC (vec_constraint_t_heap, size);
1200 graph->pe = XCNEWVEC (unsigned int, graph->size);
1201 graph->pe_rep = XNEWVEC (int, graph->size);
1203 for (j = 0; j < graph->size; j++)
1205 graph->rep[j] = j;
1206 graph->pe_rep[j] = -1;
1207 graph->indirect_cycles[j] = -1;
1211 /* Build the constraint graph, adding only predecessor edges right now. */
1213 static void
1214 build_pred_graph (void)
1216 int i;
1217 constraint_t c;
1218 unsigned int j;
1220 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
1221 graph->preds = XCNEWVEC (bitmap, graph->size);
1222 graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
1223 graph->loc_label = XCNEWVEC (unsigned int, graph->size);
1224 graph->pointed_by = XCNEWVEC (bitmap, graph->size);
1225 graph->points_to = XCNEWVEC (bitmap, graph->size);
1226 graph->eq_rep = XNEWVEC (int, graph->size);
1227 graph->direct_nodes = sbitmap_alloc (graph->size);
1228 graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1229 bitmap_clear (graph->direct_nodes);
1231 for (j = 1; j < FIRST_REF_NODE; j++)
1233 if (!get_varinfo (j)->is_special_var)
1234 bitmap_set_bit (graph->direct_nodes, j);
1237 for (j = 0; j < graph->size; j++)
1238 graph->eq_rep[j] = -1;
1240 for (j = 0; j < varmap.length (); j++)
1241 graph->indirect_cycles[j] = -1;
1243 FOR_EACH_VEC_ELT (constraints, i, c)
1245 struct constraint_expr lhs = c->lhs;
1246 struct constraint_expr rhs = c->rhs;
1247 unsigned int lhsvar = lhs.var;
1248 unsigned int rhsvar = rhs.var;
1250 if (lhs.type == DEREF)
1252 /* *x = y. */
1253 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1254 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1256 else if (rhs.type == DEREF)
1258 /* x = *y */
1259 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1260 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1261 else
1262 bitmap_clear_bit (graph->direct_nodes, lhsvar);
1264 else if (rhs.type == ADDRESSOF)
1266 varinfo_t v;
1268 /* x = &y */
1269 if (graph->points_to[lhsvar] == NULL)
1270 graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1271 bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1273 if (graph->pointed_by[rhsvar] == NULL)
1274 graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1275 bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1277 /* Implicitly, *x = y */
1278 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1280 /* All related variables are no longer direct nodes. */
1281 bitmap_clear_bit (graph->direct_nodes, rhsvar);
1282 v = get_varinfo (rhsvar);
1283 if (!v->is_full_var)
1285 v = get_varinfo (v->head);
1288 bitmap_clear_bit (graph->direct_nodes, v->id);
1289 v = vi_next (v);
1291 while (v != NULL);
1293 bitmap_set_bit (graph->address_taken, rhsvar);
1295 else if (lhsvar > anything_id
1296 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1298 /* x = y */
1299 add_pred_graph_edge (graph, lhsvar, rhsvar);
1300 /* Implicitly, *x = *y */
1301 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1302 FIRST_REF_NODE + rhsvar);
1304 else if (lhs.offset != 0 || rhs.offset != 0)
1306 if (rhs.offset != 0)
1307 bitmap_clear_bit (graph->direct_nodes, lhs.var);
1308 else if (lhs.offset != 0)
1309 bitmap_clear_bit (graph->direct_nodes, rhs.var);
1314 /* Build the constraint graph, adding successor edges. */
1316 static void
1317 build_succ_graph (void)
1319 unsigned i, t;
1320 constraint_t c;
1322 FOR_EACH_VEC_ELT (constraints, i, c)
1324 struct constraint_expr lhs;
1325 struct constraint_expr rhs;
1326 unsigned int lhsvar;
1327 unsigned int rhsvar;
1329 if (!c)
1330 continue;
1332 lhs = c->lhs;
1333 rhs = c->rhs;
1334 lhsvar = find (lhs.var);
1335 rhsvar = find (rhs.var);
1337 if (lhs.type == DEREF)
1339 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1340 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1342 else if (rhs.type == DEREF)
1344 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1345 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1347 else if (rhs.type == ADDRESSOF)
1349 /* x = &y */
1350 gcc_checking_assert (find (rhs.var) == rhs.var);
1351 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1353 else if (lhsvar > anything_id
1354 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1356 add_graph_edge (graph, lhsvar, rhsvar);
1360 /* Add edges from STOREDANYTHING to all non-direct nodes that can
1361 receive pointers. */
1362 t = find (storedanything_id);
1363 for (i = integer_id + 1; i < FIRST_REF_NODE; ++i)
1365 if (!bitmap_bit_p (graph->direct_nodes, i)
1366 && get_varinfo (i)->may_have_pointers)
1367 add_graph_edge (graph, find (i), t);
1370 /* Everything stored to ANYTHING also potentially escapes. */
1371 add_graph_edge (graph, find (escaped_id), t);
1375 /* Changed variables on the last iteration. */
1376 static bitmap changed;
1378 /* Strongly Connected Component visitation info. */
1380 struct scc_info
1382 sbitmap visited;
1383 sbitmap deleted;
1384 unsigned int *dfs;
1385 unsigned int *node_mapping;
1386 int current_index;
1387 vec<unsigned> scc_stack;
1391 /* Recursive routine to find strongly connected components in GRAPH.
1392 SI is the SCC info to store the information in, and N is the id of current
1393 graph node we are processing.
1395 This is Tarjan's strongly connected component finding algorithm, as
1396 modified by Nuutila to keep only non-root nodes on the stack.
1397 The algorithm can be found in "On finding the strongly connected
1398 connected components in a directed graph" by Esko Nuutila and Eljas
1399 Soisalon-Soininen, in Information Processing Letters volume 49,
1400 number 1, pages 9-14. */
1402 static void
1403 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1405 unsigned int i;
1406 bitmap_iterator bi;
1407 unsigned int my_dfs;
1409 bitmap_set_bit (si->visited, n);
1410 si->dfs[n] = si->current_index ++;
1411 my_dfs = si->dfs[n];
1413 /* Visit all the successors. */
1414 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1416 unsigned int w;
1418 if (i > LAST_REF_NODE)
1419 break;
1421 w = find (i);
1422 if (bitmap_bit_p (si->deleted, w))
1423 continue;
1425 if (!bitmap_bit_p (si->visited, w))
1426 scc_visit (graph, si, w);
1428 unsigned int t = find (w);
1429 gcc_checking_assert (find (n) == n);
1430 if (si->dfs[t] < si->dfs[n])
1431 si->dfs[n] = si->dfs[t];
1434 /* See if any components have been identified. */
1435 if (si->dfs[n] == my_dfs)
1437 if (si->scc_stack.length () > 0
1438 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1440 bitmap scc = BITMAP_ALLOC (NULL);
1441 unsigned int lowest_node;
1442 bitmap_iterator bi;
1444 bitmap_set_bit (scc, n);
1446 while (si->scc_stack.length () != 0
1447 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1449 unsigned int w = si->scc_stack.pop ();
1451 bitmap_set_bit (scc, w);
1454 lowest_node = bitmap_first_set_bit (scc);
1455 gcc_assert (lowest_node < FIRST_REF_NODE);
1457 /* Collapse the SCC nodes into a single node, and mark the
1458 indirect cycles. */
1459 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1461 if (i < FIRST_REF_NODE)
1463 if (unite (lowest_node, i))
1464 unify_nodes (graph, lowest_node, i, false);
1466 else
1468 unite (lowest_node, i);
1469 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1473 bitmap_set_bit (si->deleted, n);
1475 else
1476 si->scc_stack.safe_push (n);
1479 /* Unify node FROM into node TO, updating the changed count if
1480 necessary when UPDATE_CHANGED is true. */
1482 static void
1483 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1484 bool update_changed)
1486 gcc_checking_assert (to != from && find (to) == to);
1488 if (dump_file && (dump_flags & TDF_DETAILS))
1489 fprintf (dump_file, "Unifying %s to %s\n",
1490 get_varinfo (from)->name,
1491 get_varinfo (to)->name);
1493 if (update_changed)
1494 stats.unified_vars_dynamic++;
1495 else
1496 stats.unified_vars_static++;
1498 merge_graph_nodes (graph, to, from);
1499 if (merge_node_constraints (graph, to, from))
1501 if (update_changed)
1502 bitmap_set_bit (changed, to);
1505 /* Mark TO as changed if FROM was changed. If TO was already marked
1506 as changed, decrease the changed count. */
1508 if (update_changed
1509 && bitmap_clear_bit (changed, from))
1510 bitmap_set_bit (changed, to);
1511 varinfo_t fromvi = get_varinfo (from);
1512 if (fromvi->solution)
1514 /* If the solution changes because of the merging, we need to mark
1515 the variable as changed. */
1516 varinfo_t tovi = get_varinfo (to);
1517 if (bitmap_ior_into (tovi->solution, fromvi->solution))
1519 if (update_changed)
1520 bitmap_set_bit (changed, to);
1523 BITMAP_FREE (fromvi->solution);
1524 if (fromvi->oldsolution)
1525 BITMAP_FREE (fromvi->oldsolution);
1527 if (stats.iterations > 0
1528 && tovi->oldsolution)
1529 BITMAP_FREE (tovi->oldsolution);
1531 if (graph->succs[to])
1532 bitmap_clear_bit (graph->succs[to], to);
1535 /* Information needed to compute the topological ordering of a graph. */
1537 struct topo_info
1539 /* sbitmap of visited nodes. */
1540 sbitmap visited;
1541 /* Array that stores the topological order of the graph, *in
1542 reverse*. */
1543 vec<unsigned> topo_order;
1547 /* Initialize and return a topological info structure. */
1549 static struct topo_info *
1550 init_topo_info (void)
1552 size_t size = graph->size;
1553 struct topo_info *ti = XNEW (struct topo_info);
1554 ti->visited = sbitmap_alloc (size);
1555 bitmap_clear (ti->visited);
1556 ti->topo_order.create (1);
1557 return ti;
1561 /* Free the topological sort info pointed to by TI. */
1563 static void
1564 free_topo_info (struct topo_info *ti)
1566 sbitmap_free (ti->visited);
1567 ti->topo_order.release ();
1568 free (ti);
1571 /* Visit the graph in topological order, and store the order in the
1572 topo_info structure. */
1574 static void
1575 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1576 unsigned int n)
1578 bitmap_iterator bi;
1579 unsigned int j;
1581 bitmap_set_bit (ti->visited, n);
1583 if (graph->succs[n])
1584 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1586 if (!bitmap_bit_p (ti->visited, j))
1587 topo_visit (graph, ti, j);
1590 ti->topo_order.safe_push (n);
1593 /* Process a constraint C that represents x = *(y + off), using DELTA as the
1594 starting solution for y. */
1596 static void
1597 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1598 bitmap delta, bitmap *expanded_delta)
1600 unsigned int lhs = c->lhs.var;
1601 bool flag = false;
1602 bitmap sol = get_varinfo (lhs)->solution;
1603 unsigned int j;
1604 bitmap_iterator bi;
1605 HOST_WIDE_INT roffset = c->rhs.offset;
1607 /* Our IL does not allow this. */
1608 gcc_checking_assert (c->lhs.offset == 0);
1610 /* If the solution of Y contains anything it is good enough to transfer
1611 this to the LHS. */
1612 if (bitmap_bit_p (delta, anything_id))
1614 flag |= bitmap_set_bit (sol, anything_id);
1615 goto done;
1618 /* If we do not know at with offset the rhs is dereferenced compute
1619 the reachability set of DELTA, conservatively assuming it is
1620 dereferenced at all valid offsets. */
1621 if (roffset == UNKNOWN_OFFSET)
1623 delta = solution_set_expand (delta, expanded_delta);
1624 /* No further offset processing is necessary. */
1625 roffset = 0;
1628 /* For each variable j in delta (Sol(y)), add
1629 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1630 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1632 varinfo_t v = get_varinfo (j);
1633 HOST_WIDE_INT fieldoffset = v->offset + roffset;
1634 unsigned HOST_WIDE_INT size = v->size;
1635 unsigned int t;
1637 if (v->is_full_var)
1639 else if (roffset != 0)
1641 if (fieldoffset < 0)
1642 v = get_varinfo (v->head);
1643 else
1644 v = first_or_preceding_vi_for_offset (v, fieldoffset);
1647 /* We have to include all fields that overlap the current field
1648 shifted by roffset. */
1651 t = find (v->id);
1653 /* Adding edges from the special vars is pointless.
1654 They don't have sets that can change. */
1655 if (get_varinfo (t)->is_special_var)
1656 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1657 /* Merging the solution from ESCAPED needlessly increases
1658 the set. Use ESCAPED as representative instead. */
1659 else if (v->id == escaped_id)
1660 flag |= bitmap_set_bit (sol, escaped_id);
1661 else if (v->may_have_pointers
1662 && add_graph_edge (graph, lhs, t))
1663 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1665 if (v->is_full_var
1666 || v->next == 0)
1667 break;
1669 v = vi_next (v);
1671 while (v->offset < fieldoffset + size);
1674 done:
1675 /* If the LHS solution changed, mark the var as changed. */
1676 if (flag)
1678 get_varinfo (lhs)->solution = sol;
1679 bitmap_set_bit (changed, lhs);
1683 /* Process a constraint C that represents *(x + off) = y using DELTA
1684 as the starting solution for x. */
1686 static void
1687 do_ds_constraint (constraint_t c, bitmap delta, bitmap *expanded_delta)
1689 unsigned int rhs = c->rhs.var;
1690 bitmap sol = get_varinfo (rhs)->solution;
1691 unsigned int j;
1692 bitmap_iterator bi;
1693 HOST_WIDE_INT loff = c->lhs.offset;
1694 bool escaped_p = false;
1696 /* Our IL does not allow this. */
1697 gcc_checking_assert (c->rhs.offset == 0);
1699 /* If the solution of y contains ANYTHING simply use the ANYTHING
1700 solution. This avoids needlessly increasing the points-to sets. */
1701 if (bitmap_bit_p (sol, anything_id))
1702 sol = get_varinfo (find (anything_id))->solution;
1704 /* If the solution for x contains ANYTHING we have to merge the
1705 solution of y into all pointer variables which we do via
1706 STOREDANYTHING. */
1707 if (bitmap_bit_p (delta, anything_id))
1709 unsigned t = find (storedanything_id);
1710 if (add_graph_edge (graph, t, rhs))
1712 if (bitmap_ior_into (get_varinfo (t)->solution, sol))
1713 bitmap_set_bit (changed, t);
1715 return;
1718 /* If we do not know at with offset the rhs is dereferenced compute
1719 the reachability set of DELTA, conservatively assuming it is
1720 dereferenced at all valid offsets. */
1721 if (loff == UNKNOWN_OFFSET)
1723 delta = solution_set_expand (delta, expanded_delta);
1724 loff = 0;
1727 /* For each member j of delta (Sol(x)), add an edge from y to j and
1728 union Sol(y) into Sol(j) */
1729 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1731 varinfo_t v = get_varinfo (j);
1732 unsigned int t;
1733 HOST_WIDE_INT fieldoffset = v->offset + loff;
1734 unsigned HOST_WIDE_INT size = v->size;
1736 if (v->is_full_var)
1738 else if (loff != 0)
1740 if (fieldoffset < 0)
1741 v = get_varinfo (v->head);
1742 else
1743 v = first_or_preceding_vi_for_offset (v, fieldoffset);
1746 /* We have to include all fields that overlap the current field
1747 shifted by loff. */
1750 if (v->may_have_pointers)
1752 /* If v is a global variable then this is an escape point. */
1753 if (v->is_global_var
1754 && !escaped_p)
1756 t = find (escaped_id);
1757 if (add_graph_edge (graph, t, rhs)
1758 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1759 bitmap_set_bit (changed, t);
1760 /* Enough to let rhs escape once. */
1761 escaped_p = true;
1764 if (v->is_special_var)
1765 break;
1767 t = find (v->id);
1768 if (add_graph_edge (graph, t, rhs)
1769 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1770 bitmap_set_bit (changed, t);
1773 if (v->is_full_var
1774 || v->next == 0)
1775 break;
1777 v = vi_next (v);
1779 while (v->offset < fieldoffset + size);
1783 /* Handle a non-simple (simple meaning requires no iteration),
1784 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1786 static void
1787 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta,
1788 bitmap *expanded_delta)
1790 if (c->lhs.type == DEREF)
1792 if (c->rhs.type == ADDRESSOF)
1794 gcc_unreachable ();
1796 else
1798 /* *x = y */
1799 do_ds_constraint (c, delta, expanded_delta);
1802 else if (c->rhs.type == DEREF)
1804 /* x = *y */
1805 if (!(get_varinfo (c->lhs.var)->is_special_var))
1806 do_sd_constraint (graph, c, delta, expanded_delta);
1808 else
1810 bitmap tmp;
1811 bool flag = false;
1813 gcc_checking_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR
1814 && c->rhs.offset != 0 && c->lhs.offset == 0);
1815 tmp = get_varinfo (c->lhs.var)->solution;
1817 flag = set_union_with_increment (tmp, delta, c->rhs.offset,
1818 expanded_delta);
1820 if (flag)
1821 bitmap_set_bit (changed, c->lhs.var);
1825 /* Initialize and return a new SCC info structure. */
1827 static struct scc_info *
1828 init_scc_info (size_t size)
1830 struct scc_info *si = XNEW (struct scc_info);
1831 size_t i;
1833 si->current_index = 0;
1834 si->visited = sbitmap_alloc (size);
1835 bitmap_clear (si->visited);
1836 si->deleted = sbitmap_alloc (size);
1837 bitmap_clear (si->deleted);
1838 si->node_mapping = XNEWVEC (unsigned int, size);
1839 si->dfs = XCNEWVEC (unsigned int, size);
1841 for (i = 0; i < size; i++)
1842 si->node_mapping[i] = i;
1844 si->scc_stack.create (1);
1845 return si;
1848 /* Free an SCC info structure pointed to by SI */
1850 static void
1851 free_scc_info (struct scc_info *si)
1853 sbitmap_free (si->visited);
1854 sbitmap_free (si->deleted);
1855 free (si->node_mapping);
1856 free (si->dfs);
1857 si->scc_stack.release ();
1858 free (si);
1862 /* Find indirect cycles in GRAPH that occur, using strongly connected
1863 components, and note them in the indirect cycles map.
1865 This technique comes from Ben Hardekopf and Calvin Lin,
1866 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1867 Lines of Code", submitted to PLDI 2007. */
1869 static void
1870 find_indirect_cycles (constraint_graph_t graph)
1872 unsigned int i;
1873 unsigned int size = graph->size;
1874 struct scc_info *si = init_scc_info (size);
1876 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1877 if (!bitmap_bit_p (si->visited, i) && find (i) == i)
1878 scc_visit (graph, si, i);
1880 free_scc_info (si);
1883 /* Compute a topological ordering for GRAPH, and store the result in the
1884 topo_info structure TI. */
1886 static void
1887 compute_topo_order (constraint_graph_t graph,
1888 struct topo_info *ti)
1890 unsigned int i;
1891 unsigned int size = graph->size;
1893 for (i = 0; i != size; ++i)
1894 if (!bitmap_bit_p (ti->visited, i) && find (i) == i)
1895 topo_visit (graph, ti, i);
1898 /* Structure used to for hash value numbering of pointer equivalence
1899 classes. */
1901 typedef struct equiv_class_label
1903 hashval_t hashcode;
1904 unsigned int equivalence_class;
1905 bitmap labels;
1906 } *equiv_class_label_t;
1907 typedef const struct equiv_class_label *const_equiv_class_label_t;
1909 /* Equiv_class_label hashtable helpers. */
1911 struct equiv_class_hasher : free_ptr_hash <equiv_class_label>
1913 static inline hashval_t hash (const equiv_class_label *);
1914 static inline bool equal (const equiv_class_label *,
1915 const equiv_class_label *);
1918 /* Hash function for a equiv_class_label_t */
1920 inline hashval_t
1921 equiv_class_hasher::hash (const equiv_class_label *ecl)
1923 return ecl->hashcode;
1926 /* Equality function for two equiv_class_label_t's. */
1928 inline bool
1929 equiv_class_hasher::equal (const equiv_class_label *eql1,
1930 const equiv_class_label *eql2)
1932 return (eql1->hashcode == eql2->hashcode
1933 && bitmap_equal_p (eql1->labels, eql2->labels));
1936 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1937 classes. */
1938 static hash_table<equiv_class_hasher> *pointer_equiv_class_table;
1940 /* A hashtable for mapping a bitmap of labels->location equivalence
1941 classes. */
1942 static hash_table<equiv_class_hasher> *location_equiv_class_table;
1944 /* Lookup a equivalence class in TABLE by the bitmap of LABELS with
1945 hash HAS it contains. Sets *REF_LABELS to the bitmap LABELS
1946 is equivalent to. */
1948 static equiv_class_label *
1949 equiv_class_lookup_or_add (hash_table<equiv_class_hasher> *table,
1950 bitmap labels)
1952 equiv_class_label **slot;
1953 equiv_class_label ecl;
1955 ecl.labels = labels;
1956 ecl.hashcode = bitmap_hash (labels);
1957 slot = table->find_slot (&ecl, INSERT);
1958 if (!*slot)
1960 *slot = XNEW (struct equiv_class_label);
1961 (*slot)->labels = labels;
1962 (*slot)->hashcode = ecl.hashcode;
1963 (*slot)->equivalence_class = 0;
1966 return *slot;
1969 /* Perform offline variable substitution.
1971 This is a worst case quadratic time way of identifying variables
1972 that must have equivalent points-to sets, including those caused by
1973 static cycles, and single entry subgraphs, in the constraint graph.
1975 The technique is described in "Exploiting Pointer and Location
1976 Equivalence to Optimize Pointer Analysis. In the 14th International
1977 Static Analysis Symposium (SAS), August 2007." It is known as the
1978 "HU" algorithm, and is equivalent to value numbering the collapsed
1979 constraint graph including evaluating unions.
1981 The general method of finding equivalence classes is as follows:
1982 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1983 Initialize all non-REF nodes to be direct nodes.
1984 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1985 variable}
1986 For each constraint containing the dereference, we also do the same
1987 thing.
1989 We then compute SCC's in the graph and unify nodes in the same SCC,
1990 including pts sets.
1992 For each non-collapsed node x:
1993 Visit all unvisited explicit incoming edges.
1994 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
1995 where y->x.
1996 Lookup the equivalence class for pts(x).
1997 If we found one, equivalence_class(x) = found class.
1998 Otherwise, equivalence_class(x) = new class, and new_class is
1999 added to the lookup table.
2001 All direct nodes with the same equivalence class can be replaced
2002 with a single representative node.
2003 All unlabeled nodes (label == 0) are not pointers and all edges
2004 involving them can be eliminated.
2005 We perform these optimizations during rewrite_constraints
2007 In addition to pointer equivalence class finding, we also perform
2008 location equivalence class finding. This is the set of variables
2009 that always appear together in points-to sets. We use this to
2010 compress the size of the points-to sets. */
2012 /* Current maximum pointer equivalence class id. */
2013 static int pointer_equiv_class;
2015 /* Current maximum location equivalence class id. */
2016 static int location_equiv_class;
2018 /* Recursive routine to find strongly connected components in GRAPH,
2019 and label it's nodes with DFS numbers. */
2021 static void
2022 condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2024 unsigned int i;
2025 bitmap_iterator bi;
2026 unsigned int my_dfs;
2028 gcc_checking_assert (si->node_mapping[n] == n);
2029 bitmap_set_bit (si->visited, n);
2030 si->dfs[n] = si->current_index ++;
2031 my_dfs = si->dfs[n];
2033 /* Visit all the successors. */
2034 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2036 unsigned int w = si->node_mapping[i];
2038 if (bitmap_bit_p (si->deleted, w))
2039 continue;
2041 if (!bitmap_bit_p (si->visited, w))
2042 condense_visit (graph, si, w);
2044 unsigned int t = si->node_mapping[w];
2045 gcc_checking_assert (si->node_mapping[n] == n);
2046 if (si->dfs[t] < si->dfs[n])
2047 si->dfs[n] = si->dfs[t];
2050 /* Visit all the implicit predecessors. */
2051 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
2053 unsigned int w = si->node_mapping[i];
2055 if (bitmap_bit_p (si->deleted, w))
2056 continue;
2058 if (!bitmap_bit_p (si->visited, w))
2059 condense_visit (graph, si, w);
2061 unsigned int t = si->node_mapping[w];
2062 gcc_assert (si->node_mapping[n] == n);
2063 if (si->dfs[t] < si->dfs[n])
2064 si->dfs[n] = si->dfs[t];
2067 /* See if any components have been identified. */
2068 if (si->dfs[n] == my_dfs)
2070 while (si->scc_stack.length () != 0
2071 && si->dfs[si->scc_stack.last ()] >= my_dfs)
2073 unsigned int w = si->scc_stack.pop ();
2074 si->node_mapping[w] = n;
2076 if (!bitmap_bit_p (graph->direct_nodes, w))
2077 bitmap_clear_bit (graph->direct_nodes, n);
2079 /* Unify our nodes. */
2080 if (graph->preds[w])
2082 if (!graph->preds[n])
2083 graph->preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2084 bitmap_ior_into (graph->preds[n], graph->preds[w]);
2086 if (graph->implicit_preds[w])
2088 if (!graph->implicit_preds[n])
2089 graph->implicit_preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2090 bitmap_ior_into (graph->implicit_preds[n],
2091 graph->implicit_preds[w]);
2093 if (graph->points_to[w])
2095 if (!graph->points_to[n])
2096 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2097 bitmap_ior_into (graph->points_to[n],
2098 graph->points_to[w]);
2101 bitmap_set_bit (si->deleted, n);
2103 else
2104 si->scc_stack.safe_push (n);
2107 /* Label pointer equivalences.
2109 This performs a value numbering of the constraint graph to
2110 discover which variables will always have the same points-to sets
2111 under the current set of constraints.
2113 The way it value numbers is to store the set of points-to bits
2114 generated by the constraints and graph edges. This is just used as a
2115 hash and equality comparison. The *actual set of points-to bits* is
2116 completely irrelevant, in that we don't care about being able to
2117 extract them later.
2119 The equality values (currently bitmaps) just have to satisfy a few
2120 constraints, the main ones being:
2121 1. The combining operation must be order independent.
2122 2. The end result of a given set of operations must be unique iff the
2123 combination of input values is unique
2124 3. Hashable. */
2126 static void
2127 label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2129 unsigned int i, first_pred;
2130 bitmap_iterator bi;
2132 bitmap_set_bit (si->visited, n);
2134 /* Label and union our incoming edges's points to sets. */
2135 first_pred = -1U;
2136 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2138 unsigned int w = si->node_mapping[i];
2139 if (!bitmap_bit_p (si->visited, w))
2140 label_visit (graph, si, w);
2142 /* Skip unused edges */
2143 if (w == n || graph->pointer_label[w] == 0)
2144 continue;
2146 if (graph->points_to[w])
2148 if (!graph->points_to[n])
2150 if (first_pred == -1U)
2151 first_pred = w;
2152 else
2154 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2155 bitmap_ior (graph->points_to[n],
2156 graph->points_to[first_pred],
2157 graph->points_to[w]);
2160 else
2161 bitmap_ior_into (graph->points_to[n], graph->points_to[w]);
2165 /* Indirect nodes get fresh variables and a new pointer equiv class. */
2166 if (!bitmap_bit_p (graph->direct_nodes, n))
2168 if (!graph->points_to[n])
2170 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2171 if (first_pred != -1U)
2172 bitmap_copy (graph->points_to[n], graph->points_to[first_pred]);
2174 bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
2175 graph->pointer_label[n] = pointer_equiv_class++;
2176 equiv_class_label_t ecl;
2177 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2178 graph->points_to[n]);
2179 ecl->equivalence_class = graph->pointer_label[n];
2180 return;
2183 /* If there was only a single non-empty predecessor the pointer equiv
2184 class is the same. */
2185 if (!graph->points_to[n])
2187 if (first_pred != -1U)
2189 graph->pointer_label[n] = graph->pointer_label[first_pred];
2190 graph->points_to[n] = graph->points_to[first_pred];
2192 return;
2195 if (!bitmap_empty_p (graph->points_to[n]))
2197 equiv_class_label_t ecl;
2198 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2199 graph->points_to[n]);
2200 if (ecl->equivalence_class == 0)
2201 ecl->equivalence_class = pointer_equiv_class++;
2202 else
2204 BITMAP_FREE (graph->points_to[n]);
2205 graph->points_to[n] = ecl->labels;
2207 graph->pointer_label[n] = ecl->equivalence_class;
2211 /* Print the pred graph in dot format. */
2213 static void
2214 dump_pred_graph (struct scc_info *si, FILE *file)
2216 unsigned int i;
2218 /* Only print the graph if it has already been initialized: */
2219 if (!graph)
2220 return;
2222 /* Prints the header of the dot file: */
2223 fprintf (file, "strict digraph {\n");
2224 fprintf (file, " node [\n shape = box\n ]\n");
2225 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
2226 fprintf (file, "\n // List of nodes and complex constraints in "
2227 "the constraint graph:\n");
2229 /* The next lines print the nodes in the graph together with the
2230 complex constraints attached to them. */
2231 for (i = 1; i < graph->size; i++)
2233 if (i == FIRST_REF_NODE)
2234 continue;
2235 if (si->node_mapping[i] != i)
2236 continue;
2237 if (i < FIRST_REF_NODE)
2238 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2239 else
2240 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2241 if (graph->points_to[i]
2242 && !bitmap_empty_p (graph->points_to[i]))
2244 fprintf (file, "[label=\"%s = {", get_varinfo (i)->name);
2245 unsigned j;
2246 bitmap_iterator bi;
2247 EXECUTE_IF_SET_IN_BITMAP (graph->points_to[i], 0, j, bi)
2248 fprintf (file, " %d", j);
2249 fprintf (file, " }\"]");
2251 fprintf (file, ";\n");
2254 /* Go over the edges. */
2255 fprintf (file, "\n // Edges in the constraint graph:\n");
2256 for (i = 1; i < graph->size; i++)
2258 unsigned j;
2259 bitmap_iterator bi;
2260 if (si->node_mapping[i] != i)
2261 continue;
2262 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[i], 0, j, bi)
2264 unsigned from = si->node_mapping[j];
2265 if (from < FIRST_REF_NODE)
2266 fprintf (file, "\"%s\"", get_varinfo (from)->name);
2267 else
2268 fprintf (file, "\"*%s\"", get_varinfo (from - FIRST_REF_NODE)->name);
2269 fprintf (file, " -> ");
2270 if (i < FIRST_REF_NODE)
2271 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2272 else
2273 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2274 fprintf (file, ";\n");
2278 /* Prints the tail of the dot file. */
2279 fprintf (file, "}\n");
2282 /* Perform offline variable substitution, discovering equivalence
2283 classes, and eliminating non-pointer variables. */
2285 static struct scc_info *
2286 perform_var_substitution (constraint_graph_t graph)
2288 unsigned int i;
2289 unsigned int size = graph->size;
2290 struct scc_info *si = init_scc_info (size);
2292 bitmap_obstack_initialize (&iteration_obstack);
2293 pointer_equiv_class_table = new hash_table<equiv_class_hasher> (511);
2294 location_equiv_class_table
2295 = new hash_table<equiv_class_hasher> (511);
2296 pointer_equiv_class = 1;
2297 location_equiv_class = 1;
2299 /* Condense the nodes, which means to find SCC's, count incoming
2300 predecessors, and unite nodes in SCC's. */
2301 for (i = 1; i < FIRST_REF_NODE; i++)
2302 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2303 condense_visit (graph, si, si->node_mapping[i]);
2305 if (dump_file && (dump_flags & TDF_GRAPH))
2307 fprintf (dump_file, "\n\n// The constraint graph before var-substitution "
2308 "in dot format:\n");
2309 dump_pred_graph (si, dump_file);
2310 fprintf (dump_file, "\n\n");
2313 bitmap_clear (si->visited);
2314 /* Actually the label the nodes for pointer equivalences */
2315 for (i = 1; i < FIRST_REF_NODE; i++)
2316 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2317 label_visit (graph, si, si->node_mapping[i]);
2319 /* Calculate location equivalence labels. */
2320 for (i = 1; i < FIRST_REF_NODE; i++)
2322 bitmap pointed_by;
2323 bitmap_iterator bi;
2324 unsigned int j;
2326 if (!graph->pointed_by[i])
2327 continue;
2328 pointed_by = BITMAP_ALLOC (&iteration_obstack);
2330 /* Translate the pointed-by mapping for pointer equivalence
2331 labels. */
2332 EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
2334 bitmap_set_bit (pointed_by,
2335 graph->pointer_label[si->node_mapping[j]]);
2337 /* The original pointed_by is now dead. */
2338 BITMAP_FREE (graph->pointed_by[i]);
2340 /* Look up the location equivalence label if one exists, or make
2341 one otherwise. */
2342 equiv_class_label_t ecl;
2343 ecl = equiv_class_lookup_or_add (location_equiv_class_table, pointed_by);
2344 if (ecl->equivalence_class == 0)
2345 ecl->equivalence_class = location_equiv_class++;
2346 else
2348 if (dump_file && (dump_flags & TDF_DETAILS))
2349 fprintf (dump_file, "Found location equivalence for node %s\n",
2350 get_varinfo (i)->name);
2351 BITMAP_FREE (pointed_by);
2353 graph->loc_label[i] = ecl->equivalence_class;
2357 if (dump_file && (dump_flags & TDF_DETAILS))
2358 for (i = 1; i < FIRST_REF_NODE; i++)
2360 unsigned j = si->node_mapping[i];
2361 if (j != i)
2363 fprintf (dump_file, "%s node id %d ",
2364 bitmap_bit_p (graph->direct_nodes, i)
2365 ? "Direct" : "Indirect", i);
2366 if (i < FIRST_REF_NODE)
2367 fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2368 else
2369 fprintf (dump_file, "\"*%s\"",
2370 get_varinfo (i - FIRST_REF_NODE)->name);
2371 fprintf (dump_file, " mapped to SCC leader node id %d ", j);
2372 if (j < FIRST_REF_NODE)
2373 fprintf (dump_file, "\"%s\"\n", get_varinfo (j)->name);
2374 else
2375 fprintf (dump_file, "\"*%s\"\n",
2376 get_varinfo (j - FIRST_REF_NODE)->name);
2378 else
2380 fprintf (dump_file,
2381 "Equivalence classes for %s node id %d ",
2382 bitmap_bit_p (graph->direct_nodes, i)
2383 ? "direct" : "indirect", i);
2384 if (i < FIRST_REF_NODE)
2385 fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2386 else
2387 fprintf (dump_file, "\"*%s\"",
2388 get_varinfo (i - FIRST_REF_NODE)->name);
2389 fprintf (dump_file,
2390 ": pointer %d, location %d\n",
2391 graph->pointer_label[i], graph->loc_label[i]);
2395 /* Quickly eliminate our non-pointer variables. */
2397 for (i = 1; i < FIRST_REF_NODE; i++)
2399 unsigned int node = si->node_mapping[i];
2401 if (graph->pointer_label[node] == 0)
2403 if (dump_file && (dump_flags & TDF_DETAILS))
2404 fprintf (dump_file,
2405 "%s is a non-pointer variable, eliminating edges.\n",
2406 get_varinfo (node)->name);
2407 stats.nonpointer_vars++;
2408 clear_edges_for_node (graph, node);
2412 return si;
2415 /* Free information that was only necessary for variable
2416 substitution. */
2418 static void
2419 free_var_substitution_info (struct scc_info *si)
2421 free_scc_info (si);
2422 free (graph->pointer_label);
2423 free (graph->loc_label);
2424 free (graph->pointed_by);
2425 free (graph->points_to);
2426 free (graph->eq_rep);
2427 sbitmap_free (graph->direct_nodes);
2428 delete pointer_equiv_class_table;
2429 pointer_equiv_class_table = NULL;
2430 delete location_equiv_class_table;
2431 location_equiv_class_table = NULL;
2432 bitmap_obstack_release (&iteration_obstack);
2435 /* Return an existing node that is equivalent to NODE, which has
2436 equivalence class LABEL, if one exists. Return NODE otherwise. */
2438 static unsigned int
2439 find_equivalent_node (constraint_graph_t graph,
2440 unsigned int node, unsigned int label)
2442 /* If the address version of this variable is unused, we can
2443 substitute it for anything else with the same label.
2444 Otherwise, we know the pointers are equivalent, but not the
2445 locations, and we can unite them later. */
2447 if (!bitmap_bit_p (graph->address_taken, node))
2449 gcc_checking_assert (label < graph->size);
2451 if (graph->eq_rep[label] != -1)
2453 /* Unify the two variables since we know they are equivalent. */
2454 if (unite (graph->eq_rep[label], node))
2455 unify_nodes (graph, graph->eq_rep[label], node, false);
2456 return graph->eq_rep[label];
2458 else
2460 graph->eq_rep[label] = node;
2461 graph->pe_rep[label] = node;
2464 else
2466 gcc_checking_assert (label < graph->size);
2467 graph->pe[node] = label;
2468 if (graph->pe_rep[label] == -1)
2469 graph->pe_rep[label] = node;
2472 return node;
2475 /* Unite pointer equivalent but not location equivalent nodes in
2476 GRAPH. This may only be performed once variable substitution is
2477 finished. */
2479 static void
2480 unite_pointer_equivalences (constraint_graph_t graph)
2482 unsigned int i;
2484 /* Go through the pointer equivalences and unite them to their
2485 representative, if they aren't already. */
2486 for (i = 1; i < FIRST_REF_NODE; i++)
2488 unsigned int label = graph->pe[i];
2489 if (label)
2491 int label_rep = graph->pe_rep[label];
2493 if (label_rep == -1)
2494 continue;
2496 label_rep = find (label_rep);
2497 if (label_rep >= 0 && unite (label_rep, find (i)))
2498 unify_nodes (graph, label_rep, i, false);
2503 /* Move complex constraints to the GRAPH nodes they belong to. */
2505 static void
2506 move_complex_constraints (constraint_graph_t graph)
2508 int i;
2509 constraint_t c;
2511 FOR_EACH_VEC_ELT (constraints, i, c)
2513 if (c)
2515 struct constraint_expr lhs = c->lhs;
2516 struct constraint_expr rhs = c->rhs;
2518 if (lhs.type == DEREF)
2520 insert_into_complex (graph, lhs.var, c);
2522 else if (rhs.type == DEREF)
2524 if (!(get_varinfo (lhs.var)->is_special_var))
2525 insert_into_complex (graph, rhs.var, c);
2527 else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2528 && (lhs.offset != 0 || rhs.offset != 0))
2530 insert_into_complex (graph, rhs.var, c);
2537 /* Optimize and rewrite complex constraints while performing
2538 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2539 result of perform_variable_substitution. */
2541 static void
2542 rewrite_constraints (constraint_graph_t graph,
2543 struct scc_info *si)
2545 int i;
2546 constraint_t c;
2548 if (flag_checking)
2550 for (unsigned int j = 0; j < graph->size; j++)
2551 gcc_assert (find (j) == j);
2554 FOR_EACH_VEC_ELT (constraints, i, c)
2556 struct constraint_expr lhs = c->lhs;
2557 struct constraint_expr rhs = c->rhs;
2558 unsigned int lhsvar = find (lhs.var);
2559 unsigned int rhsvar = find (rhs.var);
2560 unsigned int lhsnode, rhsnode;
2561 unsigned int lhslabel, rhslabel;
2563 lhsnode = si->node_mapping[lhsvar];
2564 rhsnode = si->node_mapping[rhsvar];
2565 lhslabel = graph->pointer_label[lhsnode];
2566 rhslabel = graph->pointer_label[rhsnode];
2568 /* See if it is really a non-pointer variable, and if so, ignore
2569 the constraint. */
2570 if (lhslabel == 0)
2572 if (dump_file && (dump_flags & TDF_DETAILS))
2575 fprintf (dump_file, "%s is a non-pointer variable,"
2576 "ignoring constraint:",
2577 get_varinfo (lhs.var)->name);
2578 dump_constraint (dump_file, c);
2579 fprintf (dump_file, "\n");
2581 constraints[i] = NULL;
2582 continue;
2585 if (rhslabel == 0)
2587 if (dump_file && (dump_flags & TDF_DETAILS))
2590 fprintf (dump_file, "%s is a non-pointer variable,"
2591 "ignoring constraint:",
2592 get_varinfo (rhs.var)->name);
2593 dump_constraint (dump_file, c);
2594 fprintf (dump_file, "\n");
2596 constraints[i] = NULL;
2597 continue;
2600 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2601 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2602 c->lhs.var = lhsvar;
2603 c->rhs.var = rhsvar;
2607 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2608 part of an SCC, false otherwise. */
2610 static bool
2611 eliminate_indirect_cycles (unsigned int node)
2613 if (graph->indirect_cycles[node] != -1
2614 && !bitmap_empty_p (get_varinfo (node)->solution))
2616 unsigned int i;
2617 auto_vec<unsigned> queue;
2618 int queuepos;
2619 unsigned int to = find (graph->indirect_cycles[node]);
2620 bitmap_iterator bi;
2622 /* We can't touch the solution set and call unify_nodes
2623 at the same time, because unify_nodes is going to do
2624 bitmap unions into it. */
2626 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2628 if (find (i) == i && i != to)
2630 if (unite (to, i))
2631 queue.safe_push (i);
2635 for (queuepos = 0;
2636 queue.iterate (queuepos, &i);
2637 queuepos++)
2639 unify_nodes (graph, to, i, true);
2641 return true;
2643 return false;
2646 /* Solve the constraint graph GRAPH using our worklist solver.
2647 This is based on the PW* family of solvers from the "Efficient Field
2648 Sensitive Pointer Analysis for C" paper.
2649 It works by iterating over all the graph nodes, processing the complex
2650 constraints and propagating the copy constraints, until everything stops
2651 changed. This corresponds to steps 6-8 in the solving list given above. */
2653 static void
2654 solve_graph (constraint_graph_t graph)
2656 unsigned int size = graph->size;
2657 unsigned int i;
2658 bitmap pts;
2660 changed = BITMAP_ALLOC (NULL);
2662 /* Mark all initial non-collapsed nodes as changed. */
2663 for (i = 1; i < size; i++)
2665 varinfo_t ivi = get_varinfo (i);
2666 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2667 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2668 || graph->complex[i].length () > 0))
2669 bitmap_set_bit (changed, i);
2672 /* Allocate a bitmap to be used to store the changed bits. */
2673 pts = BITMAP_ALLOC (&pta_obstack);
2675 while (!bitmap_empty_p (changed))
2677 unsigned int i;
2678 struct topo_info *ti = init_topo_info ();
2679 stats.iterations++;
2681 bitmap_obstack_initialize (&iteration_obstack);
2683 compute_topo_order (graph, ti);
2685 while (ti->topo_order.length () != 0)
2688 i = ti->topo_order.pop ();
2690 /* If this variable is not a representative, skip it. */
2691 if (find (i) != i)
2692 continue;
2694 /* In certain indirect cycle cases, we may merge this
2695 variable to another. */
2696 if (eliminate_indirect_cycles (i) && find (i) != i)
2697 continue;
2699 /* If the node has changed, we need to process the
2700 complex constraints and outgoing edges again. */
2701 if (bitmap_clear_bit (changed, i))
2703 unsigned int j;
2704 constraint_t c;
2705 bitmap solution;
2706 vec<constraint_t> complex = graph->complex[i];
2707 varinfo_t vi = get_varinfo (i);
2708 bool solution_empty;
2710 /* Compute the changed set of solution bits. If anything
2711 is in the solution just propagate that. */
2712 if (bitmap_bit_p (vi->solution, anything_id))
2714 /* If anything is also in the old solution there is
2715 nothing to do.
2716 ??? But we shouldn't ended up with "changed" set ... */
2717 if (vi->oldsolution
2718 && bitmap_bit_p (vi->oldsolution, anything_id))
2719 continue;
2720 bitmap_copy (pts, get_varinfo (find (anything_id))->solution);
2722 else if (vi->oldsolution)
2723 bitmap_and_compl (pts, vi->solution, vi->oldsolution);
2724 else
2725 bitmap_copy (pts, vi->solution);
2727 if (bitmap_empty_p (pts))
2728 continue;
2730 if (vi->oldsolution)
2731 bitmap_ior_into (vi->oldsolution, pts);
2732 else
2734 vi->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
2735 bitmap_copy (vi->oldsolution, pts);
2738 solution = vi->solution;
2739 solution_empty = bitmap_empty_p (solution);
2741 /* Process the complex constraints */
2742 bitmap expanded_pts = NULL;
2743 FOR_EACH_VEC_ELT (complex, j, c)
2745 /* XXX: This is going to unsort the constraints in
2746 some cases, which will occasionally add duplicate
2747 constraints during unification. This does not
2748 affect correctness. */
2749 c->lhs.var = find (c->lhs.var);
2750 c->rhs.var = find (c->rhs.var);
2752 /* The only complex constraint that can change our
2753 solution to non-empty, given an empty solution,
2754 is a constraint where the lhs side is receiving
2755 some set from elsewhere. */
2756 if (!solution_empty || c->lhs.type != DEREF)
2757 do_complex_constraint (graph, c, pts, &expanded_pts);
2759 BITMAP_FREE (expanded_pts);
2761 solution_empty = bitmap_empty_p (solution);
2763 if (!solution_empty)
2765 bitmap_iterator bi;
2766 unsigned eff_escaped_id = find (escaped_id);
2768 /* Propagate solution to all successors. */
2769 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2770 0, j, bi)
2772 bitmap tmp;
2773 bool flag;
2775 unsigned int to = find (j);
2776 tmp = get_varinfo (to)->solution;
2777 flag = false;
2779 /* Don't try to propagate to ourselves. */
2780 if (to == i)
2781 continue;
2783 /* If we propagate from ESCAPED use ESCAPED as
2784 placeholder. */
2785 if (i == eff_escaped_id)
2786 flag = bitmap_set_bit (tmp, escaped_id);
2787 else
2788 flag = bitmap_ior_into (tmp, pts);
2790 if (flag)
2791 bitmap_set_bit (changed, to);
2796 free_topo_info (ti);
2797 bitmap_obstack_release (&iteration_obstack);
2800 BITMAP_FREE (pts);
2801 BITMAP_FREE (changed);
2802 bitmap_obstack_release (&oldpta_obstack);
2805 /* Map from trees to variable infos. */
2806 static hash_map<tree, varinfo_t> *vi_for_tree;
2809 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2811 static void
2812 insert_vi_for_tree (tree t, varinfo_t vi)
2814 gcc_assert (vi);
2815 gcc_assert (!vi_for_tree->put (t, vi));
2818 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2819 exist in the map, return NULL, otherwise, return the varinfo we found. */
2821 static varinfo_t
2822 lookup_vi_for_tree (tree t)
2824 varinfo_t *slot = vi_for_tree->get (t);
2825 if (slot == NULL)
2826 return NULL;
2828 return *slot;
2831 /* Return a printable name for DECL */
2833 static const char *
2834 alias_get_name (tree decl)
2836 const char *res = NULL;
2837 char *temp;
2838 int num_printed = 0;
2840 if (!dump_file)
2841 return "NULL";
2843 if (TREE_CODE (decl) == SSA_NAME)
2845 res = get_name (decl);
2846 if (res)
2847 num_printed = asprintf (&temp, "%s_%u", res, SSA_NAME_VERSION (decl));
2848 else
2849 num_printed = asprintf (&temp, "_%u", SSA_NAME_VERSION (decl));
2850 if (num_printed > 0)
2852 res = ggc_strdup (temp);
2853 free (temp);
2856 else if (DECL_P (decl))
2858 if (DECL_ASSEMBLER_NAME_SET_P (decl))
2859 res = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
2860 else
2862 res = get_name (decl);
2863 if (!res)
2865 num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
2866 if (num_printed > 0)
2868 res = ggc_strdup (temp);
2869 free (temp);
2874 if (res != NULL)
2875 return res;
2877 return "NULL";
2880 /* Find the variable id for tree T in the map.
2881 If T doesn't exist in the map, create an entry for it and return it. */
2883 static varinfo_t
2884 get_vi_for_tree (tree t)
2886 varinfo_t *slot = vi_for_tree->get (t);
2887 if (slot == NULL)
2889 unsigned int id = create_variable_info_for (t, alias_get_name (t), false);
2890 return get_varinfo (id);
2893 return *slot;
2896 /* Get a scalar constraint expression for a new temporary variable. */
2898 static struct constraint_expr
2899 new_scalar_tmp_constraint_exp (const char *name, bool add_id)
2901 struct constraint_expr tmp;
2902 varinfo_t vi;
2904 vi = new_var_info (NULL_TREE, name, add_id);
2905 vi->offset = 0;
2906 vi->size = -1;
2907 vi->fullsize = -1;
2908 vi->is_full_var = 1;
2910 tmp.var = vi->id;
2911 tmp.type = SCALAR;
2912 tmp.offset = 0;
2914 return tmp;
2917 /* Get a constraint expression vector from an SSA_VAR_P node.
2918 If address_p is true, the result will be taken its address of. */
2920 static void
2921 get_constraint_for_ssa_var (tree t, vec<ce_s> *results, bool address_p)
2923 struct constraint_expr cexpr;
2924 varinfo_t vi;
2926 /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
2927 gcc_assert (TREE_CODE (t) == SSA_NAME || DECL_P (t));
2929 /* For parameters, get at the points-to set for the actual parm
2930 decl. */
2931 if (TREE_CODE (t) == SSA_NAME
2932 && SSA_NAME_IS_DEFAULT_DEF (t)
2933 && (TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2934 || TREE_CODE (SSA_NAME_VAR (t)) == RESULT_DECL))
2936 get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
2937 return;
2940 /* For global variables resort to the alias target. */
2941 if (TREE_CODE (t) == VAR_DECL
2942 && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
2944 varpool_node *node = varpool_node::get (t);
2945 if (node && node->alias && node->analyzed)
2947 node = node->ultimate_alias_target ();
2948 /* Canonicalize the PT uid of all aliases to the ultimate target.
2949 ??? Hopefully the set of aliases can't change in a way that
2950 changes the ultimate alias target. */
2951 gcc_assert ((! DECL_PT_UID_SET_P (node->decl)
2952 || DECL_PT_UID (node->decl) == DECL_UID (node->decl))
2953 && (! DECL_PT_UID_SET_P (t)
2954 || DECL_PT_UID (t) == DECL_UID (node->decl)));
2955 DECL_PT_UID (t) = DECL_UID (node->decl);
2956 t = node->decl;
2960 vi = get_vi_for_tree (t);
2961 cexpr.var = vi->id;
2962 cexpr.type = SCALAR;
2963 cexpr.offset = 0;
2965 /* If we are not taking the address of the constraint expr, add all
2966 sub-fiels of the variable as well. */
2967 if (!address_p
2968 && !vi->is_full_var)
2970 for (; vi; vi = vi_next (vi))
2972 cexpr.var = vi->id;
2973 results->safe_push (cexpr);
2975 return;
2978 results->safe_push (cexpr);
2981 /* Process constraint T, performing various simplifications and then
2982 adding it to our list of overall constraints. */
2984 static void
2985 process_constraint (constraint_t t)
2987 struct constraint_expr rhs = t->rhs;
2988 struct constraint_expr lhs = t->lhs;
2990 gcc_assert (rhs.var < varmap.length ());
2991 gcc_assert (lhs.var < varmap.length ());
2993 /* If we didn't get any useful constraint from the lhs we get
2994 &ANYTHING as fallback from get_constraint_for. Deal with
2995 it here by turning it into *ANYTHING. */
2996 if (lhs.type == ADDRESSOF
2997 && lhs.var == anything_id)
2998 lhs.type = DEREF;
3000 /* ADDRESSOF on the lhs is invalid. */
3001 gcc_assert (lhs.type != ADDRESSOF);
3003 /* We shouldn't add constraints from things that cannot have pointers.
3004 It's not completely trivial to avoid in the callers, so do it here. */
3005 if (rhs.type != ADDRESSOF
3006 && !get_varinfo (rhs.var)->may_have_pointers)
3007 return;
3009 /* Likewise adding to the solution of a non-pointer var isn't useful. */
3010 if (!get_varinfo (lhs.var)->may_have_pointers)
3011 return;
3013 /* This can happen in our IR with things like n->a = *p */
3014 if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
3016 /* Split into tmp = *rhs, *lhs = tmp */
3017 struct constraint_expr tmplhs;
3018 tmplhs = new_scalar_tmp_constraint_exp ("doubledereftmp", true);
3019 process_constraint (new_constraint (tmplhs, rhs));
3020 process_constraint (new_constraint (lhs, tmplhs));
3022 else if (rhs.type == ADDRESSOF && lhs.type == DEREF)
3024 /* Split into tmp = &rhs, *lhs = tmp */
3025 struct constraint_expr tmplhs;
3026 tmplhs = new_scalar_tmp_constraint_exp ("derefaddrtmp", true);
3027 process_constraint (new_constraint (tmplhs, rhs));
3028 process_constraint (new_constraint (lhs, tmplhs));
3030 else
3032 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
3033 constraints.safe_push (t);
3038 /* Return the position, in bits, of FIELD_DECL from the beginning of its
3039 structure. */
3041 static HOST_WIDE_INT
3042 bitpos_of_field (const tree fdecl)
3044 if (!tree_fits_shwi_p (DECL_FIELD_OFFSET (fdecl))
3045 || !tree_fits_shwi_p (DECL_FIELD_BIT_OFFSET (fdecl)))
3046 return -1;
3048 return (tree_to_shwi (DECL_FIELD_OFFSET (fdecl)) * BITS_PER_UNIT
3049 + tree_to_shwi (DECL_FIELD_BIT_OFFSET (fdecl)));
3053 /* Get constraint expressions for offsetting PTR by OFFSET. Stores the
3054 resulting constraint expressions in *RESULTS. */
3056 static void
3057 get_constraint_for_ptr_offset (tree ptr, tree offset,
3058 vec<ce_s> *results)
3060 struct constraint_expr c;
3061 unsigned int j, n;
3062 HOST_WIDE_INT rhsoffset;
3064 /* If we do not do field-sensitive PTA adding offsets to pointers
3065 does not change the points-to solution. */
3066 if (!use_field_sensitive)
3068 get_constraint_for_rhs (ptr, results);
3069 return;
3072 /* If the offset is not a non-negative integer constant that fits
3073 in a HOST_WIDE_INT, we have to fall back to a conservative
3074 solution which includes all sub-fields of all pointed-to
3075 variables of ptr. */
3076 if (offset == NULL_TREE
3077 || TREE_CODE (offset) != INTEGER_CST)
3078 rhsoffset = UNKNOWN_OFFSET;
3079 else
3081 /* Sign-extend the offset. */
3082 offset_int soffset = offset_int::from (offset, SIGNED);
3083 if (!wi::fits_shwi_p (soffset))
3084 rhsoffset = UNKNOWN_OFFSET;
3085 else
3087 /* Make sure the bit-offset also fits. */
3088 HOST_WIDE_INT rhsunitoffset = soffset.to_shwi ();
3089 rhsoffset = rhsunitoffset * BITS_PER_UNIT;
3090 if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
3091 rhsoffset = UNKNOWN_OFFSET;
3095 get_constraint_for_rhs (ptr, results);
3096 if (rhsoffset == 0)
3097 return;
3099 /* As we are eventually appending to the solution do not use
3100 vec::iterate here. */
3101 n = results->length ();
3102 for (j = 0; j < n; j++)
3104 varinfo_t curr;
3105 c = (*results)[j];
3106 curr = get_varinfo (c.var);
3108 if (c.type == ADDRESSOF
3109 /* If this varinfo represents a full variable just use it. */
3110 && curr->is_full_var)
3112 else if (c.type == ADDRESSOF
3113 /* If we do not know the offset add all subfields. */
3114 && rhsoffset == UNKNOWN_OFFSET)
3116 varinfo_t temp = get_varinfo (curr->head);
3119 struct constraint_expr c2;
3120 c2.var = temp->id;
3121 c2.type = ADDRESSOF;
3122 c2.offset = 0;
3123 if (c2.var != c.var)
3124 results->safe_push (c2);
3125 temp = vi_next (temp);
3127 while (temp);
3129 else if (c.type == ADDRESSOF)
3131 varinfo_t temp;
3132 unsigned HOST_WIDE_INT offset = curr->offset + rhsoffset;
3134 /* If curr->offset + rhsoffset is less than zero adjust it. */
3135 if (rhsoffset < 0
3136 && curr->offset < offset)
3137 offset = 0;
3139 /* We have to include all fields that overlap the current
3140 field shifted by rhsoffset. And we include at least
3141 the last or the first field of the variable to represent
3142 reachability of off-bound addresses, in particular &object + 1,
3143 conservatively correct. */
3144 temp = first_or_preceding_vi_for_offset (curr, offset);
3145 c.var = temp->id;
3146 c.offset = 0;
3147 temp = vi_next (temp);
3148 while (temp
3149 && temp->offset < offset + curr->size)
3151 struct constraint_expr c2;
3152 c2.var = temp->id;
3153 c2.type = ADDRESSOF;
3154 c2.offset = 0;
3155 results->safe_push (c2);
3156 temp = vi_next (temp);
3159 else if (c.type == SCALAR)
3161 gcc_assert (c.offset == 0);
3162 c.offset = rhsoffset;
3164 else
3165 /* We shouldn't get any DEREFs here. */
3166 gcc_unreachable ();
3168 (*results)[j] = c;
3173 /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
3174 If address_p is true the result will be taken its address of.
3175 If lhs_p is true then the constraint expression is assumed to be used
3176 as the lhs. */
3178 static void
3179 get_constraint_for_component_ref (tree t, vec<ce_s> *results,
3180 bool address_p, bool lhs_p)
3182 tree orig_t = t;
3183 HOST_WIDE_INT bitsize = -1;
3184 HOST_WIDE_INT bitmaxsize = -1;
3185 HOST_WIDE_INT bitpos;
3186 bool reverse;
3187 tree forzero;
3189 /* Some people like to do cute things like take the address of
3190 &0->a.b */
3191 forzero = t;
3192 while (handled_component_p (forzero)
3193 || INDIRECT_REF_P (forzero)
3194 || TREE_CODE (forzero) == MEM_REF)
3195 forzero = TREE_OPERAND (forzero, 0);
3197 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
3199 struct constraint_expr temp;
3201 temp.offset = 0;
3202 temp.var = integer_id;
3203 temp.type = SCALAR;
3204 results->safe_push (temp);
3205 return;
3208 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize, &reverse);
3210 /* Pretend to take the address of the base, we'll take care of
3211 adding the required subset of sub-fields below. */
3212 get_constraint_for_1 (t, results, true, lhs_p);
3213 gcc_assert (results->length () == 1);
3214 struct constraint_expr &result = results->last ();
3216 if (result.type == SCALAR
3217 && get_varinfo (result.var)->is_full_var)
3218 /* For single-field vars do not bother about the offset. */
3219 result.offset = 0;
3220 else if (result.type == SCALAR)
3222 /* In languages like C, you can access one past the end of an
3223 array. You aren't allowed to dereference it, so we can
3224 ignore this constraint. When we handle pointer subtraction,
3225 we may have to do something cute here. */
3227 if ((unsigned HOST_WIDE_INT)bitpos < get_varinfo (result.var)->fullsize
3228 && bitmaxsize != 0)
3230 /* It's also not true that the constraint will actually start at the
3231 right offset, it may start in some padding. We only care about
3232 setting the constraint to the first actual field it touches, so
3233 walk to find it. */
3234 struct constraint_expr cexpr = result;
3235 varinfo_t curr;
3236 results->pop ();
3237 cexpr.offset = 0;
3238 for (curr = get_varinfo (cexpr.var); curr; curr = vi_next (curr))
3240 if (ranges_overlap_p (curr->offset, curr->size,
3241 bitpos, bitmaxsize))
3243 cexpr.var = curr->id;
3244 results->safe_push (cexpr);
3245 if (address_p)
3246 break;
3249 /* If we are going to take the address of this field then
3250 to be able to compute reachability correctly add at least
3251 the last field of the variable. */
3252 if (address_p && results->length () == 0)
3254 curr = get_varinfo (cexpr.var);
3255 while (curr->next != 0)
3256 curr = vi_next (curr);
3257 cexpr.var = curr->id;
3258 results->safe_push (cexpr);
3260 else if (results->length () == 0)
3261 /* Assert that we found *some* field there. The user couldn't be
3262 accessing *only* padding. */
3263 /* Still the user could access one past the end of an array
3264 embedded in a struct resulting in accessing *only* padding. */
3265 /* Or accessing only padding via type-punning to a type
3266 that has a filed just in padding space. */
3268 cexpr.type = SCALAR;
3269 cexpr.var = anything_id;
3270 cexpr.offset = 0;
3271 results->safe_push (cexpr);
3274 else if (bitmaxsize == 0)
3276 if (dump_file && (dump_flags & TDF_DETAILS))
3277 fprintf (dump_file, "Access to zero-sized part of variable,"
3278 "ignoring\n");
3280 else
3281 if (dump_file && (dump_flags & TDF_DETAILS))
3282 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
3284 else if (result.type == DEREF)
3286 /* If we do not know exactly where the access goes say so. Note
3287 that only for non-structure accesses we know that we access
3288 at most one subfiled of any variable. */
3289 if (bitpos == -1
3290 || bitsize != bitmaxsize
3291 || AGGREGATE_TYPE_P (TREE_TYPE (orig_t))
3292 || result.offset == UNKNOWN_OFFSET)
3293 result.offset = UNKNOWN_OFFSET;
3294 else
3295 result.offset += bitpos;
3297 else if (result.type == ADDRESSOF)
3299 /* We can end up here for component references on a
3300 VIEW_CONVERT_EXPR <>(&foobar). */
3301 result.type = SCALAR;
3302 result.var = anything_id;
3303 result.offset = 0;
3305 else
3306 gcc_unreachable ();
3310 /* Dereference the constraint expression CONS, and return the result.
3311 DEREF (ADDRESSOF) = SCALAR
3312 DEREF (SCALAR) = DEREF
3313 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
3314 This is needed so that we can handle dereferencing DEREF constraints. */
3316 static void
3317 do_deref (vec<ce_s> *constraints)
3319 struct constraint_expr *c;
3320 unsigned int i = 0;
3322 FOR_EACH_VEC_ELT (*constraints, i, c)
3324 if (c->type == SCALAR)
3325 c->type = DEREF;
3326 else if (c->type == ADDRESSOF)
3327 c->type = SCALAR;
3328 else if (c->type == DEREF)
3330 struct constraint_expr tmplhs;
3331 tmplhs = new_scalar_tmp_constraint_exp ("dereftmp", true);
3332 process_constraint (new_constraint (tmplhs, *c));
3333 c->var = tmplhs.var;
3335 else
3336 gcc_unreachable ();
3340 /* Given a tree T, return the constraint expression for taking the
3341 address of it. */
3343 static void
3344 get_constraint_for_address_of (tree t, vec<ce_s> *results)
3346 struct constraint_expr *c;
3347 unsigned int i;
3349 get_constraint_for_1 (t, results, true, true);
3351 FOR_EACH_VEC_ELT (*results, i, c)
3353 if (c->type == DEREF)
3354 c->type = SCALAR;
3355 else
3356 c->type = ADDRESSOF;
3360 /* Given a tree T, return the constraint expression for it. */
3362 static void
3363 get_constraint_for_1 (tree t, vec<ce_s> *results, bool address_p,
3364 bool lhs_p)
3366 struct constraint_expr temp;
3368 /* x = integer is all glommed to a single variable, which doesn't
3369 point to anything by itself. That is, of course, unless it is an
3370 integer constant being treated as a pointer, in which case, we
3371 will return that this is really the addressof anything. This
3372 happens below, since it will fall into the default case. The only
3373 case we know something about an integer treated like a pointer is
3374 when it is the NULL pointer, and then we just say it points to
3375 NULL.
3377 Do not do that if -fno-delete-null-pointer-checks though, because
3378 in that case *NULL does not fail, so it _should_ alias *anything.
3379 It is not worth adding a new option or renaming the existing one,
3380 since this case is relatively obscure. */
3381 if ((TREE_CODE (t) == INTEGER_CST
3382 && integer_zerop (t))
3383 /* The only valid CONSTRUCTORs in gimple with pointer typed
3384 elements are zero-initializer. But in IPA mode we also
3385 process global initializers, so verify at least. */
3386 || (TREE_CODE (t) == CONSTRUCTOR
3387 && CONSTRUCTOR_NELTS (t) == 0))
3389 if (flag_delete_null_pointer_checks)
3390 temp.var = nothing_id;
3391 else
3392 temp.var = nonlocal_id;
3393 temp.type = ADDRESSOF;
3394 temp.offset = 0;
3395 results->safe_push (temp);
3396 return;
3399 /* String constants are read-only, ideally we'd have a CONST_DECL
3400 for those. */
3401 if (TREE_CODE (t) == STRING_CST)
3403 temp.var = string_id;
3404 temp.type = SCALAR;
3405 temp.offset = 0;
3406 results->safe_push (temp);
3407 return;
3410 switch (TREE_CODE_CLASS (TREE_CODE (t)))
3412 case tcc_expression:
3414 switch (TREE_CODE (t))
3416 case ADDR_EXPR:
3417 get_constraint_for_address_of (TREE_OPERAND (t, 0), results);
3418 return;
3419 default:;
3421 break;
3423 case tcc_reference:
3425 switch (TREE_CODE (t))
3427 case MEM_REF:
3429 struct constraint_expr cs;
3430 varinfo_t vi, curr;
3431 get_constraint_for_ptr_offset (TREE_OPERAND (t, 0),
3432 TREE_OPERAND (t, 1), results);
3433 do_deref (results);
3435 /* If we are not taking the address then make sure to process
3436 all subvariables we might access. */
3437 if (address_p)
3438 return;
3440 cs = results->last ();
3441 if (cs.type == DEREF
3442 && type_can_have_subvars (TREE_TYPE (t)))
3444 /* For dereferences this means we have to defer it
3445 to solving time. */
3446 results->last ().offset = UNKNOWN_OFFSET;
3447 return;
3449 if (cs.type != SCALAR)
3450 return;
3452 vi = get_varinfo (cs.var);
3453 curr = vi_next (vi);
3454 if (!vi->is_full_var
3455 && curr)
3457 unsigned HOST_WIDE_INT size;
3458 if (tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (t))))
3459 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t)));
3460 else
3461 size = -1;
3462 for (; curr; curr = vi_next (curr))
3464 if (curr->offset - vi->offset < size)
3466 cs.var = curr->id;
3467 results->safe_push (cs);
3469 else
3470 break;
3473 return;
3475 case ARRAY_REF:
3476 case ARRAY_RANGE_REF:
3477 case COMPONENT_REF:
3478 case IMAGPART_EXPR:
3479 case REALPART_EXPR:
3480 case BIT_FIELD_REF:
3481 get_constraint_for_component_ref (t, results, address_p, lhs_p);
3482 return;
3483 case VIEW_CONVERT_EXPR:
3484 get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p,
3485 lhs_p);
3486 return;
3487 /* We are missing handling for TARGET_MEM_REF here. */
3488 default:;
3490 break;
3492 case tcc_exceptional:
3494 switch (TREE_CODE (t))
3496 case SSA_NAME:
3498 get_constraint_for_ssa_var (t, results, address_p);
3499 return;
3501 case CONSTRUCTOR:
3503 unsigned int i;
3504 tree val;
3505 auto_vec<ce_s> tmp;
3506 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
3508 struct constraint_expr *rhsp;
3509 unsigned j;
3510 get_constraint_for_1 (val, &tmp, address_p, lhs_p);
3511 FOR_EACH_VEC_ELT (tmp, j, rhsp)
3512 results->safe_push (*rhsp);
3513 tmp.truncate (0);
3515 /* We do not know whether the constructor was complete,
3516 so technically we have to add &NOTHING or &ANYTHING
3517 like we do for an empty constructor as well. */
3518 return;
3520 default:;
3522 break;
3524 case tcc_declaration:
3526 get_constraint_for_ssa_var (t, results, address_p);
3527 return;
3529 case tcc_constant:
3531 /* We cannot refer to automatic variables through constants. */
3532 temp.type = ADDRESSOF;
3533 temp.var = nonlocal_id;
3534 temp.offset = 0;
3535 results->safe_push (temp);
3536 return;
3538 default:;
3541 /* The default fallback is a constraint from anything. */
3542 temp.type = ADDRESSOF;
3543 temp.var = anything_id;
3544 temp.offset = 0;
3545 results->safe_push (temp);
3548 /* Given a gimple tree T, return the constraint expression vector for it. */
3550 static void
3551 get_constraint_for (tree t, vec<ce_s> *results)
3553 gcc_assert (results->length () == 0);
3555 get_constraint_for_1 (t, results, false, true);
3558 /* Given a gimple tree T, return the constraint expression vector for it
3559 to be used as the rhs of a constraint. */
3561 static void
3562 get_constraint_for_rhs (tree t, vec<ce_s> *results)
3564 gcc_assert (results->length () == 0);
3566 get_constraint_for_1 (t, results, false, false);
3570 /* Efficiently generates constraints from all entries in *RHSC to all
3571 entries in *LHSC. */
3573 static void
3574 process_all_all_constraints (vec<ce_s> lhsc,
3575 vec<ce_s> rhsc)
3577 struct constraint_expr *lhsp, *rhsp;
3578 unsigned i, j;
3580 if (lhsc.length () <= 1 || rhsc.length () <= 1)
3582 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3583 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
3584 process_constraint (new_constraint (*lhsp, *rhsp));
3586 else
3588 struct constraint_expr tmp;
3589 tmp = new_scalar_tmp_constraint_exp ("allalltmp", true);
3590 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
3591 process_constraint (new_constraint (tmp, *rhsp));
3592 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3593 process_constraint (new_constraint (*lhsp, tmp));
3597 /* Handle aggregate copies by expanding into copies of the respective
3598 fields of the structures. */
3600 static void
3601 do_structure_copy (tree lhsop, tree rhsop)
3603 struct constraint_expr *lhsp, *rhsp;
3604 auto_vec<ce_s> lhsc;
3605 auto_vec<ce_s> rhsc;
3606 unsigned j;
3608 get_constraint_for (lhsop, &lhsc);
3609 get_constraint_for_rhs (rhsop, &rhsc);
3610 lhsp = &lhsc[0];
3611 rhsp = &rhsc[0];
3612 if (lhsp->type == DEREF
3613 || (lhsp->type == ADDRESSOF && lhsp->var == anything_id)
3614 || rhsp->type == DEREF)
3616 if (lhsp->type == DEREF)
3618 gcc_assert (lhsc.length () == 1);
3619 lhsp->offset = UNKNOWN_OFFSET;
3621 if (rhsp->type == DEREF)
3623 gcc_assert (rhsc.length () == 1);
3624 rhsp->offset = UNKNOWN_OFFSET;
3626 process_all_all_constraints (lhsc, rhsc);
3628 else if (lhsp->type == SCALAR
3629 && (rhsp->type == SCALAR
3630 || rhsp->type == ADDRESSOF))
3632 HOST_WIDE_INT lhssize, lhsmaxsize, lhsoffset;
3633 HOST_WIDE_INT rhssize, rhsmaxsize, rhsoffset;
3634 bool reverse;
3635 unsigned k = 0;
3636 get_ref_base_and_extent (lhsop, &lhsoffset, &lhssize, &lhsmaxsize,
3637 &reverse);
3638 get_ref_base_and_extent (rhsop, &rhsoffset, &rhssize, &rhsmaxsize,
3639 &reverse);
3640 for (j = 0; lhsc.iterate (j, &lhsp);)
3642 varinfo_t lhsv, rhsv;
3643 rhsp = &rhsc[k];
3644 lhsv = get_varinfo (lhsp->var);
3645 rhsv = get_varinfo (rhsp->var);
3646 if (lhsv->may_have_pointers
3647 && (lhsv->is_full_var
3648 || rhsv->is_full_var
3649 || ranges_overlap_p (lhsv->offset + rhsoffset, lhsv->size,
3650 rhsv->offset + lhsoffset, rhsv->size)))
3651 process_constraint (new_constraint (*lhsp, *rhsp));
3652 if (!rhsv->is_full_var
3653 && (lhsv->is_full_var
3654 || (lhsv->offset + rhsoffset + lhsv->size
3655 > rhsv->offset + lhsoffset + rhsv->size)))
3657 ++k;
3658 if (k >= rhsc.length ())
3659 break;
3661 else
3662 ++j;
3665 else
3666 gcc_unreachable ();
3669 /* Create constraints ID = { rhsc }. */
3671 static void
3672 make_constraints_to (unsigned id, vec<ce_s> rhsc)
3674 struct constraint_expr *c;
3675 struct constraint_expr includes;
3676 unsigned int j;
3678 includes.var = id;
3679 includes.offset = 0;
3680 includes.type = SCALAR;
3682 FOR_EACH_VEC_ELT (rhsc, j, c)
3683 process_constraint (new_constraint (includes, *c));
3686 /* Create a constraint ID = OP. */
3688 static void
3689 make_constraint_to (unsigned id, tree op)
3691 auto_vec<ce_s> rhsc;
3692 get_constraint_for_rhs (op, &rhsc);
3693 make_constraints_to (id, rhsc);
3696 /* Create a constraint ID = &FROM. */
3698 static void
3699 make_constraint_from (varinfo_t vi, int from)
3701 struct constraint_expr lhs, rhs;
3703 lhs.var = vi->id;
3704 lhs.offset = 0;
3705 lhs.type = SCALAR;
3707 rhs.var = from;
3708 rhs.offset = 0;
3709 rhs.type = ADDRESSOF;
3710 process_constraint (new_constraint (lhs, rhs));
3713 /* Create a constraint ID = FROM. */
3715 static void
3716 make_copy_constraint (varinfo_t vi, int from)
3718 struct constraint_expr lhs, rhs;
3720 lhs.var = vi->id;
3721 lhs.offset = 0;
3722 lhs.type = SCALAR;
3724 rhs.var = from;
3725 rhs.offset = 0;
3726 rhs.type = SCALAR;
3727 process_constraint (new_constraint (lhs, rhs));
3730 /* Make constraints necessary to make OP escape. */
3732 static void
3733 make_escape_constraint (tree op)
3735 make_constraint_to (escaped_id, op);
3738 /* Add constraints to that the solution of VI is transitively closed. */
3740 static void
3741 make_transitive_closure_constraints (varinfo_t vi)
3743 struct constraint_expr lhs, rhs;
3745 /* VAR = *VAR; */
3746 lhs.type = SCALAR;
3747 lhs.var = vi->id;
3748 lhs.offset = 0;
3749 rhs.type = DEREF;
3750 rhs.var = vi->id;
3751 rhs.offset = UNKNOWN_OFFSET;
3752 process_constraint (new_constraint (lhs, rhs));
3755 /* Temporary storage for fake var decls. */
3756 struct obstack fake_var_decl_obstack;
3758 /* Build a fake VAR_DECL acting as referrer to a DECL_UID. */
3760 static tree
3761 build_fake_var_decl (tree type)
3763 tree decl = (tree) XOBNEW (&fake_var_decl_obstack, struct tree_var_decl);
3764 memset (decl, 0, sizeof (struct tree_var_decl));
3765 TREE_SET_CODE (decl, VAR_DECL);
3766 TREE_TYPE (decl) = type;
3767 DECL_UID (decl) = allocate_decl_uid ();
3768 SET_DECL_PT_UID (decl, -1);
3769 layout_decl (decl, 0);
3770 return decl;
3773 /* Create a new artificial heap variable with NAME.
3774 Return the created variable. */
3776 static varinfo_t
3777 make_heapvar (const char *name, bool add_id)
3779 varinfo_t vi;
3780 tree heapvar;
3782 heapvar = build_fake_var_decl (ptr_type_node);
3783 DECL_EXTERNAL (heapvar) = 1;
3785 vi = new_var_info (heapvar, name, add_id);
3786 vi->is_artificial_var = true;
3787 vi->is_heap_var = true;
3788 vi->is_unknown_size_var = true;
3789 vi->offset = 0;
3790 vi->fullsize = ~0;
3791 vi->size = ~0;
3792 vi->is_full_var = true;
3793 insert_vi_for_tree (heapvar, vi);
3795 return vi;
3798 /* Create a new artificial heap variable with NAME and make a
3799 constraint from it to LHS. Set flags according to a tag used
3800 for tracking restrict pointers. */
3802 static varinfo_t
3803 make_constraint_from_restrict (varinfo_t lhs, const char *name, bool add_id)
3805 varinfo_t vi = make_heapvar (name, add_id);
3806 vi->is_restrict_var = 1;
3807 vi->is_global_var = 1;
3808 vi->may_have_pointers = 1;
3809 make_constraint_from (lhs, vi->id);
3810 return vi;
3813 /* Create a new artificial heap variable with NAME and make a
3814 constraint from it to LHS. Set flags according to a tag used
3815 for tracking restrict pointers and make the artificial heap
3816 point to global memory. */
3818 static varinfo_t
3819 make_constraint_from_global_restrict (varinfo_t lhs, const char *name,
3820 bool add_id)
3822 varinfo_t vi = make_constraint_from_restrict (lhs, name, add_id);
3823 make_copy_constraint (vi, nonlocal_id);
3824 return vi;
3827 /* In IPA mode there are varinfos for different aspects of reach
3828 function designator. One for the points-to set of the return
3829 value, one for the variables that are clobbered by the function,
3830 one for its uses and one for each parameter (including a single
3831 glob for remaining variadic arguments). */
3833 enum { fi_clobbers = 1, fi_uses = 2,
3834 fi_static_chain = 3, fi_result = 4, fi_parm_base = 5 };
3836 /* Get a constraint for the requested part of a function designator FI
3837 when operating in IPA mode. */
3839 static struct constraint_expr
3840 get_function_part_constraint (varinfo_t fi, unsigned part)
3842 struct constraint_expr c;
3844 gcc_assert (in_ipa_mode);
3846 if (fi->id == anything_id)
3848 /* ??? We probably should have a ANYFN special variable. */
3849 c.var = anything_id;
3850 c.offset = 0;
3851 c.type = SCALAR;
3853 else if (TREE_CODE (fi->decl) == FUNCTION_DECL)
3855 varinfo_t ai = first_vi_for_offset (fi, part);
3856 if (ai)
3857 c.var = ai->id;
3858 else
3859 c.var = anything_id;
3860 c.offset = 0;
3861 c.type = SCALAR;
3863 else
3865 c.var = fi->id;
3866 c.offset = part;
3867 c.type = DEREF;
3870 return c;
3873 /* For non-IPA mode, generate constraints necessary for a call on the
3874 RHS. */
3876 static void
3877 handle_rhs_call (gcall *stmt, vec<ce_s> *results)
3879 struct constraint_expr rhsc;
3880 unsigned i;
3881 bool returns_uses = false;
3883 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3885 tree arg = gimple_call_arg (stmt, i);
3886 int flags = gimple_call_arg_flags (stmt, i);
3888 /* If the argument is not used we can ignore it. */
3889 if (flags & EAF_UNUSED)
3890 continue;
3892 /* As we compute ESCAPED context-insensitive we do not gain
3893 any precision with just EAF_NOCLOBBER but not EAF_NOESCAPE
3894 set. The argument would still get clobbered through the
3895 escape solution. */
3896 if ((flags & EAF_NOCLOBBER)
3897 && (flags & EAF_NOESCAPE))
3899 varinfo_t uses = get_call_use_vi (stmt);
3900 if (!(flags & EAF_DIRECT))
3902 varinfo_t tem = new_var_info (NULL_TREE, "callarg", true);
3903 make_constraint_to (tem->id, arg);
3904 make_transitive_closure_constraints (tem);
3905 make_copy_constraint (uses, tem->id);
3907 else
3908 make_constraint_to (uses->id, arg);
3909 returns_uses = true;
3911 else if (flags & EAF_NOESCAPE)
3913 struct constraint_expr lhs, rhs;
3914 varinfo_t uses = get_call_use_vi (stmt);
3915 varinfo_t clobbers = get_call_clobber_vi (stmt);
3916 varinfo_t tem = new_var_info (NULL_TREE, "callarg", true);
3917 make_constraint_to (tem->id, arg);
3918 if (!(flags & EAF_DIRECT))
3919 make_transitive_closure_constraints (tem);
3920 make_copy_constraint (uses, tem->id);
3921 make_copy_constraint (clobbers, tem->id);
3922 /* Add *tem = nonlocal, do not add *tem = callused as
3923 EAF_NOESCAPE parameters do not escape to other parameters
3924 and all other uses appear in NONLOCAL as well. */
3925 lhs.type = DEREF;
3926 lhs.var = tem->id;
3927 lhs.offset = 0;
3928 rhs.type = SCALAR;
3929 rhs.var = nonlocal_id;
3930 rhs.offset = 0;
3931 process_constraint (new_constraint (lhs, rhs));
3932 returns_uses = true;
3934 else
3935 make_escape_constraint (arg);
3938 /* If we added to the calls uses solution make sure we account for
3939 pointers to it to be returned. */
3940 if (returns_uses)
3942 rhsc.var = get_call_use_vi (stmt)->id;
3943 rhsc.offset = 0;
3944 rhsc.type = SCALAR;
3945 results->safe_push (rhsc);
3948 /* The static chain escapes as well. */
3949 if (gimple_call_chain (stmt))
3950 make_escape_constraint (gimple_call_chain (stmt));
3952 /* And if we applied NRV the address of the return slot escapes as well. */
3953 if (gimple_call_return_slot_opt_p (stmt)
3954 && gimple_call_lhs (stmt) != NULL_TREE
3955 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
3957 auto_vec<ce_s> tmpc;
3958 struct constraint_expr lhsc, *c;
3959 get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
3960 lhsc.var = escaped_id;
3961 lhsc.offset = 0;
3962 lhsc.type = SCALAR;
3963 FOR_EACH_VEC_ELT (tmpc, i, c)
3964 process_constraint (new_constraint (lhsc, *c));
3967 /* Regular functions return nonlocal memory. */
3968 rhsc.var = nonlocal_id;
3969 rhsc.offset = 0;
3970 rhsc.type = SCALAR;
3971 results->safe_push (rhsc);
3974 /* For non-IPA mode, generate constraints necessary for a call
3975 that returns a pointer and assigns it to LHS. This simply makes
3976 the LHS point to global and escaped variables. */
3978 static void
3979 handle_lhs_call (gcall *stmt, tree lhs, int flags, vec<ce_s> rhsc,
3980 tree fndecl)
3982 auto_vec<ce_s> lhsc;
3984 get_constraint_for (lhs, &lhsc);
3985 /* If the store is to a global decl make sure to
3986 add proper escape constraints. */
3987 lhs = get_base_address (lhs);
3988 if (lhs
3989 && DECL_P (lhs)
3990 && is_global_var (lhs))
3992 struct constraint_expr tmpc;
3993 tmpc.var = escaped_id;
3994 tmpc.offset = 0;
3995 tmpc.type = SCALAR;
3996 lhsc.safe_push (tmpc);
3999 /* If the call returns an argument unmodified override the rhs
4000 constraints. */
4001 if (flags & ERF_RETURNS_ARG
4002 && (flags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (stmt))
4004 tree arg;
4005 rhsc.create (0);
4006 arg = gimple_call_arg (stmt, flags & ERF_RETURN_ARG_MASK);
4007 get_constraint_for (arg, &rhsc);
4008 process_all_all_constraints (lhsc, rhsc);
4009 rhsc.release ();
4011 else if (flags & ERF_NOALIAS)
4013 varinfo_t vi;
4014 struct constraint_expr tmpc;
4015 rhsc.create (0);
4016 vi = make_heapvar ("HEAP", true);
4017 /* We are marking allocated storage local, we deal with it becoming
4018 global by escaping and setting of vars_contains_escaped_heap. */
4019 DECL_EXTERNAL (vi->decl) = 0;
4020 vi->is_global_var = 0;
4021 /* If this is not a real malloc call assume the memory was
4022 initialized and thus may point to global memory. All
4023 builtin functions with the malloc attribute behave in a sane way. */
4024 if (!fndecl
4025 || DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL)
4026 make_constraint_from (vi, nonlocal_id);
4027 tmpc.var = vi->id;
4028 tmpc.offset = 0;
4029 tmpc.type = ADDRESSOF;
4030 rhsc.safe_push (tmpc);
4031 process_all_all_constraints (lhsc, rhsc);
4032 rhsc.release ();
4034 else
4035 process_all_all_constraints (lhsc, rhsc);
4038 /* For non-IPA mode, generate constraints necessary for a call of a
4039 const function that returns a pointer in the statement STMT. */
4041 static void
4042 handle_const_call (gcall *stmt, vec<ce_s> *results)
4044 struct constraint_expr rhsc;
4045 unsigned int k;
4047 /* Treat nested const functions the same as pure functions as far
4048 as the static chain is concerned. */
4049 if (gimple_call_chain (stmt))
4051 varinfo_t uses = get_call_use_vi (stmt);
4052 make_transitive_closure_constraints (uses);
4053 make_constraint_to (uses->id, gimple_call_chain (stmt));
4054 rhsc.var = uses->id;
4055 rhsc.offset = 0;
4056 rhsc.type = SCALAR;
4057 results->safe_push (rhsc);
4060 /* May return arguments. */
4061 for (k = 0; k < gimple_call_num_args (stmt); ++k)
4063 tree arg = gimple_call_arg (stmt, k);
4064 auto_vec<ce_s> argc;
4065 unsigned i;
4066 struct constraint_expr *argp;
4067 get_constraint_for_rhs (arg, &argc);
4068 FOR_EACH_VEC_ELT (argc, i, argp)
4069 results->safe_push (*argp);
4072 /* May return addresses of globals. */
4073 rhsc.var = nonlocal_id;
4074 rhsc.offset = 0;
4075 rhsc.type = ADDRESSOF;
4076 results->safe_push (rhsc);
4079 /* For non-IPA mode, generate constraints necessary for a call to a
4080 pure function in statement STMT. */
4082 static void
4083 handle_pure_call (gcall *stmt, vec<ce_s> *results)
4085 struct constraint_expr rhsc;
4086 unsigned i;
4087 varinfo_t uses = NULL;
4089 /* Memory reached from pointer arguments is call-used. */
4090 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4092 tree arg = gimple_call_arg (stmt, i);
4093 if (!uses)
4095 uses = get_call_use_vi (stmt);
4096 make_transitive_closure_constraints (uses);
4098 make_constraint_to (uses->id, arg);
4101 /* The static chain is used as well. */
4102 if (gimple_call_chain (stmt))
4104 if (!uses)
4106 uses = get_call_use_vi (stmt);
4107 make_transitive_closure_constraints (uses);
4109 make_constraint_to (uses->id, gimple_call_chain (stmt));
4112 /* Pure functions may return call-used and nonlocal memory. */
4113 if (uses)
4115 rhsc.var = uses->id;
4116 rhsc.offset = 0;
4117 rhsc.type = SCALAR;
4118 results->safe_push (rhsc);
4120 rhsc.var = nonlocal_id;
4121 rhsc.offset = 0;
4122 rhsc.type = SCALAR;
4123 results->safe_push (rhsc);
4127 /* Return the varinfo for the callee of CALL. */
4129 static varinfo_t
4130 get_fi_for_callee (gcall *call)
4132 tree decl, fn = gimple_call_fn (call);
4134 if (fn && TREE_CODE (fn) == OBJ_TYPE_REF)
4135 fn = OBJ_TYPE_REF_EXPR (fn);
4137 /* If we can directly resolve the function being called, do so.
4138 Otherwise, it must be some sort of indirect expression that
4139 we should still be able to handle. */
4140 decl = gimple_call_addr_fndecl (fn);
4141 if (decl)
4142 return get_vi_for_tree (decl);
4144 /* If the function is anything other than a SSA name pointer we have no
4145 clue and should be getting ANYFN (well, ANYTHING for now). */
4146 if (!fn || TREE_CODE (fn) != SSA_NAME)
4147 return get_varinfo (anything_id);
4149 if (SSA_NAME_IS_DEFAULT_DEF (fn)
4150 && (TREE_CODE (SSA_NAME_VAR (fn)) == PARM_DECL
4151 || TREE_CODE (SSA_NAME_VAR (fn)) == RESULT_DECL))
4152 fn = SSA_NAME_VAR (fn);
4154 return get_vi_for_tree (fn);
4157 /* Create constraints for assigning call argument ARG to the incoming parameter
4158 INDEX of function FI. */
4160 static void
4161 find_func_aliases_for_call_arg (varinfo_t fi, unsigned index, tree arg)
4163 struct constraint_expr lhs;
4164 lhs = get_function_part_constraint (fi, fi_parm_base + index);
4166 auto_vec<ce_s, 2> rhsc;
4167 get_constraint_for_rhs (arg, &rhsc);
4169 unsigned j;
4170 struct constraint_expr *rhsp;
4171 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4172 process_constraint (new_constraint (lhs, *rhsp));
4175 /* Return true if FNDECL may be part of another lto partition. */
4177 static bool
4178 fndecl_maybe_in_other_partition (tree fndecl)
4180 cgraph_node *fn_node = cgraph_node::get (fndecl);
4181 if (fn_node == NULL)
4182 return true;
4184 return fn_node->in_other_partition;
4187 /* Create constraints for the builtin call T. Return true if the call
4188 was handled, otherwise false. */
4190 static bool
4191 find_func_aliases_for_builtin_call (struct function *fn, gcall *t)
4193 tree fndecl = gimple_call_fndecl (t);
4194 auto_vec<ce_s, 2> lhsc;
4195 auto_vec<ce_s, 4> rhsc;
4196 varinfo_t fi;
4198 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
4199 /* ??? All builtins that are handled here need to be handled
4200 in the alias-oracle query functions explicitly! */
4201 switch (DECL_FUNCTION_CODE (fndecl))
4203 /* All the following functions return a pointer to the same object
4204 as their first argument points to. The functions do not add
4205 to the ESCAPED solution. The functions make the first argument
4206 pointed to memory point to what the second argument pointed to
4207 memory points to. */
4208 case BUILT_IN_STRCPY:
4209 case BUILT_IN_STRNCPY:
4210 case BUILT_IN_BCOPY:
4211 case BUILT_IN_MEMCPY:
4212 case BUILT_IN_MEMMOVE:
4213 case BUILT_IN_MEMPCPY:
4214 case BUILT_IN_STPCPY:
4215 case BUILT_IN_STPNCPY:
4216 case BUILT_IN_STRCAT:
4217 case BUILT_IN_STRNCAT:
4218 case BUILT_IN_STRCPY_CHK:
4219 case BUILT_IN_STRNCPY_CHK:
4220 case BUILT_IN_MEMCPY_CHK:
4221 case BUILT_IN_MEMMOVE_CHK:
4222 case BUILT_IN_MEMPCPY_CHK:
4223 case BUILT_IN_STPCPY_CHK:
4224 case BUILT_IN_STPNCPY_CHK:
4225 case BUILT_IN_STRCAT_CHK:
4226 case BUILT_IN_STRNCAT_CHK:
4227 case BUILT_IN_TM_MEMCPY:
4228 case BUILT_IN_TM_MEMMOVE:
4230 tree res = gimple_call_lhs (t);
4231 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4232 == BUILT_IN_BCOPY ? 1 : 0));
4233 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4234 == BUILT_IN_BCOPY ? 0 : 1));
4235 if (res != NULL_TREE)
4237 get_constraint_for (res, &lhsc);
4238 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY
4239 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY
4240 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY
4241 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY_CHK
4242 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY_CHK
4243 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY_CHK)
4244 get_constraint_for_ptr_offset (dest, NULL_TREE, &rhsc);
4245 else
4246 get_constraint_for (dest, &rhsc);
4247 process_all_all_constraints (lhsc, rhsc);
4248 lhsc.truncate (0);
4249 rhsc.truncate (0);
4251 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4252 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4253 do_deref (&lhsc);
4254 do_deref (&rhsc);
4255 process_all_all_constraints (lhsc, rhsc);
4256 return true;
4258 case BUILT_IN_MEMSET:
4259 case BUILT_IN_MEMSET_CHK:
4260 case BUILT_IN_TM_MEMSET:
4262 tree res = gimple_call_lhs (t);
4263 tree dest = gimple_call_arg (t, 0);
4264 unsigned i;
4265 ce_s *lhsp;
4266 struct constraint_expr ac;
4267 if (res != NULL_TREE)
4269 get_constraint_for (res, &lhsc);
4270 get_constraint_for (dest, &rhsc);
4271 process_all_all_constraints (lhsc, rhsc);
4272 lhsc.truncate (0);
4274 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4275 do_deref (&lhsc);
4276 if (flag_delete_null_pointer_checks
4277 && integer_zerop (gimple_call_arg (t, 1)))
4279 ac.type = ADDRESSOF;
4280 ac.var = nothing_id;
4282 else
4284 ac.type = SCALAR;
4285 ac.var = integer_id;
4287 ac.offset = 0;
4288 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4289 process_constraint (new_constraint (*lhsp, ac));
4290 return true;
4292 case BUILT_IN_POSIX_MEMALIGN:
4294 tree ptrptr = gimple_call_arg (t, 0);
4295 get_constraint_for (ptrptr, &lhsc);
4296 do_deref (&lhsc);
4297 varinfo_t vi = make_heapvar ("HEAP", true);
4298 /* We are marking allocated storage local, we deal with it becoming
4299 global by escaping and setting of vars_contains_escaped_heap. */
4300 DECL_EXTERNAL (vi->decl) = 0;
4301 vi->is_global_var = 0;
4302 struct constraint_expr tmpc;
4303 tmpc.var = vi->id;
4304 tmpc.offset = 0;
4305 tmpc.type = ADDRESSOF;
4306 rhsc.safe_push (tmpc);
4307 process_all_all_constraints (lhsc, rhsc);
4308 return true;
4310 case BUILT_IN_ASSUME_ALIGNED:
4312 tree res = gimple_call_lhs (t);
4313 tree dest = gimple_call_arg (t, 0);
4314 if (res != NULL_TREE)
4316 get_constraint_for (res, &lhsc);
4317 get_constraint_for (dest, &rhsc);
4318 process_all_all_constraints (lhsc, rhsc);
4320 return true;
4322 /* All the following functions do not return pointers, do not
4323 modify the points-to sets of memory reachable from their
4324 arguments and do not add to the ESCAPED solution. */
4325 case BUILT_IN_SINCOS:
4326 case BUILT_IN_SINCOSF:
4327 case BUILT_IN_SINCOSL:
4328 case BUILT_IN_FREXP:
4329 case BUILT_IN_FREXPF:
4330 case BUILT_IN_FREXPL:
4331 case BUILT_IN_GAMMA_R:
4332 case BUILT_IN_GAMMAF_R:
4333 case BUILT_IN_GAMMAL_R:
4334 case BUILT_IN_LGAMMA_R:
4335 case BUILT_IN_LGAMMAF_R:
4336 case BUILT_IN_LGAMMAL_R:
4337 case BUILT_IN_MODF:
4338 case BUILT_IN_MODFF:
4339 case BUILT_IN_MODFL:
4340 case BUILT_IN_REMQUO:
4341 case BUILT_IN_REMQUOF:
4342 case BUILT_IN_REMQUOL:
4343 case BUILT_IN_FREE:
4344 return true;
4345 case BUILT_IN_STRDUP:
4346 case BUILT_IN_STRNDUP:
4347 case BUILT_IN_REALLOC:
4348 if (gimple_call_lhs (t))
4350 handle_lhs_call (t, gimple_call_lhs (t),
4351 gimple_call_return_flags (t) | ERF_NOALIAS,
4352 vNULL, fndecl);
4353 get_constraint_for_ptr_offset (gimple_call_lhs (t),
4354 NULL_TREE, &lhsc);
4355 get_constraint_for_ptr_offset (gimple_call_arg (t, 0),
4356 NULL_TREE, &rhsc);
4357 do_deref (&lhsc);
4358 do_deref (&rhsc);
4359 process_all_all_constraints (lhsc, rhsc);
4360 lhsc.truncate (0);
4361 rhsc.truncate (0);
4362 /* For realloc the resulting pointer can be equal to the
4363 argument as well. But only doing this wouldn't be
4364 correct because with ptr == 0 realloc behaves like malloc. */
4365 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_REALLOC)
4367 get_constraint_for (gimple_call_lhs (t), &lhsc);
4368 get_constraint_for (gimple_call_arg (t, 0), &rhsc);
4369 process_all_all_constraints (lhsc, rhsc);
4371 return true;
4373 break;
4374 /* String / character search functions return a pointer into the
4375 source string or NULL. */
4376 case BUILT_IN_INDEX:
4377 case BUILT_IN_STRCHR:
4378 case BUILT_IN_STRRCHR:
4379 case BUILT_IN_MEMCHR:
4380 case BUILT_IN_STRSTR:
4381 case BUILT_IN_STRPBRK:
4382 if (gimple_call_lhs (t))
4384 tree src = gimple_call_arg (t, 0);
4385 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4386 constraint_expr nul;
4387 nul.var = nothing_id;
4388 nul.offset = 0;
4389 nul.type = ADDRESSOF;
4390 rhsc.safe_push (nul);
4391 get_constraint_for (gimple_call_lhs (t), &lhsc);
4392 process_all_all_constraints (lhsc, rhsc);
4394 return true;
4395 /* Trampolines are special - they set up passing the static
4396 frame. */
4397 case BUILT_IN_INIT_TRAMPOLINE:
4399 tree tramp = gimple_call_arg (t, 0);
4400 tree nfunc = gimple_call_arg (t, 1);
4401 tree frame = gimple_call_arg (t, 2);
4402 unsigned i;
4403 struct constraint_expr lhs, *rhsp;
4404 if (in_ipa_mode)
4406 varinfo_t nfi = NULL;
4407 gcc_assert (TREE_CODE (nfunc) == ADDR_EXPR);
4408 nfi = lookup_vi_for_tree (TREE_OPERAND (nfunc, 0));
4409 if (nfi)
4411 lhs = get_function_part_constraint (nfi, fi_static_chain);
4412 get_constraint_for (frame, &rhsc);
4413 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4414 process_constraint (new_constraint (lhs, *rhsp));
4415 rhsc.truncate (0);
4417 /* Make the frame point to the function for
4418 the trampoline adjustment call. */
4419 get_constraint_for (tramp, &lhsc);
4420 do_deref (&lhsc);
4421 get_constraint_for (nfunc, &rhsc);
4422 process_all_all_constraints (lhsc, rhsc);
4424 return true;
4427 /* Else fallthru to generic handling which will let
4428 the frame escape. */
4429 break;
4431 case BUILT_IN_ADJUST_TRAMPOLINE:
4433 tree tramp = gimple_call_arg (t, 0);
4434 tree res = gimple_call_lhs (t);
4435 if (in_ipa_mode && res)
4437 get_constraint_for (res, &lhsc);
4438 get_constraint_for (tramp, &rhsc);
4439 do_deref (&rhsc);
4440 process_all_all_constraints (lhsc, rhsc);
4442 return true;
4444 CASE_BUILT_IN_TM_STORE (1):
4445 CASE_BUILT_IN_TM_STORE (2):
4446 CASE_BUILT_IN_TM_STORE (4):
4447 CASE_BUILT_IN_TM_STORE (8):
4448 CASE_BUILT_IN_TM_STORE (FLOAT):
4449 CASE_BUILT_IN_TM_STORE (DOUBLE):
4450 CASE_BUILT_IN_TM_STORE (LDOUBLE):
4451 CASE_BUILT_IN_TM_STORE (M64):
4452 CASE_BUILT_IN_TM_STORE (M128):
4453 CASE_BUILT_IN_TM_STORE (M256):
4455 tree addr = gimple_call_arg (t, 0);
4456 tree src = gimple_call_arg (t, 1);
4458 get_constraint_for (addr, &lhsc);
4459 do_deref (&lhsc);
4460 get_constraint_for (src, &rhsc);
4461 process_all_all_constraints (lhsc, rhsc);
4462 return true;
4464 CASE_BUILT_IN_TM_LOAD (1):
4465 CASE_BUILT_IN_TM_LOAD (2):
4466 CASE_BUILT_IN_TM_LOAD (4):
4467 CASE_BUILT_IN_TM_LOAD (8):
4468 CASE_BUILT_IN_TM_LOAD (FLOAT):
4469 CASE_BUILT_IN_TM_LOAD (DOUBLE):
4470 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
4471 CASE_BUILT_IN_TM_LOAD (M64):
4472 CASE_BUILT_IN_TM_LOAD (M128):
4473 CASE_BUILT_IN_TM_LOAD (M256):
4475 tree dest = gimple_call_lhs (t);
4476 tree addr = gimple_call_arg (t, 0);
4478 get_constraint_for (dest, &lhsc);
4479 get_constraint_for (addr, &rhsc);
4480 do_deref (&rhsc);
4481 process_all_all_constraints (lhsc, rhsc);
4482 return true;
4484 /* Variadic argument handling needs to be handled in IPA
4485 mode as well. */
4486 case BUILT_IN_VA_START:
4488 tree valist = gimple_call_arg (t, 0);
4489 struct constraint_expr rhs, *lhsp;
4490 unsigned i;
4491 get_constraint_for (valist, &lhsc);
4492 do_deref (&lhsc);
4493 /* The va_list gets access to pointers in variadic
4494 arguments. Which we know in the case of IPA analysis
4495 and otherwise are just all nonlocal variables. */
4496 if (in_ipa_mode)
4498 fi = lookup_vi_for_tree (fn->decl);
4499 rhs = get_function_part_constraint (fi, ~0);
4500 rhs.type = ADDRESSOF;
4502 else
4504 rhs.var = nonlocal_id;
4505 rhs.type = ADDRESSOF;
4506 rhs.offset = 0;
4508 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4509 process_constraint (new_constraint (*lhsp, rhs));
4510 /* va_list is clobbered. */
4511 make_constraint_to (get_call_clobber_vi (t)->id, valist);
4512 return true;
4514 /* va_end doesn't have any effect that matters. */
4515 case BUILT_IN_VA_END:
4516 return true;
4517 /* Alternate return. Simply give up for now. */
4518 case BUILT_IN_RETURN:
4520 fi = NULL;
4521 if (!in_ipa_mode
4522 || !(fi = get_vi_for_tree (fn->decl)))
4523 make_constraint_from (get_varinfo (escaped_id), anything_id);
4524 else if (in_ipa_mode
4525 && fi != NULL)
4527 struct constraint_expr lhs, rhs;
4528 lhs = get_function_part_constraint (fi, fi_result);
4529 rhs.var = anything_id;
4530 rhs.offset = 0;
4531 rhs.type = SCALAR;
4532 process_constraint (new_constraint (lhs, rhs));
4534 return true;
4536 case BUILT_IN_GOMP_PARALLEL:
4537 case BUILT_IN_GOACC_PARALLEL:
4539 if (in_ipa_mode)
4541 unsigned int fnpos, argpos;
4542 switch (DECL_FUNCTION_CODE (fndecl))
4544 case BUILT_IN_GOMP_PARALLEL:
4545 /* __builtin_GOMP_parallel (fn, data, num_threads, flags). */
4546 fnpos = 0;
4547 argpos = 1;
4548 break;
4549 case BUILT_IN_GOACC_PARALLEL:
4550 /* __builtin_GOACC_parallel (device, fn, mapnum, hostaddrs,
4551 sizes, kinds, ...). */
4552 fnpos = 1;
4553 argpos = 3;
4554 break;
4555 default:
4556 gcc_unreachable ();
4559 tree fnarg = gimple_call_arg (t, fnpos);
4560 gcc_assert (TREE_CODE (fnarg) == ADDR_EXPR);
4561 tree fndecl = TREE_OPERAND (fnarg, 0);
4562 if (fndecl_maybe_in_other_partition (fndecl))
4563 /* Fallthru to general call handling. */
4564 break;
4566 tree arg = gimple_call_arg (t, argpos);
4568 varinfo_t fi = get_vi_for_tree (fndecl);
4569 find_func_aliases_for_call_arg (fi, 0, arg);
4570 return true;
4572 /* Else fallthru to generic call handling. */
4573 break;
4575 /* printf-style functions may have hooks to set pointers to
4576 point to somewhere into the generated string. Leave them
4577 for a later exercise... */
4578 default:
4579 /* Fallthru to general call handling. */;
4582 return false;
4585 /* Create constraints for the call T. */
4587 static void
4588 find_func_aliases_for_call (struct function *fn, gcall *t)
4590 tree fndecl = gimple_call_fndecl (t);
4591 varinfo_t fi;
4593 if (fndecl != NULL_TREE
4594 && DECL_BUILT_IN (fndecl)
4595 && find_func_aliases_for_builtin_call (fn, t))
4596 return;
4598 fi = get_fi_for_callee (t);
4599 if (!in_ipa_mode
4600 || (fndecl && !fi->is_fn_info))
4602 auto_vec<ce_s, 16> rhsc;
4603 int flags = gimple_call_flags (t);
4605 /* Const functions can return their arguments and addresses
4606 of global memory but not of escaped memory. */
4607 if (flags & (ECF_CONST|ECF_NOVOPS))
4609 if (gimple_call_lhs (t))
4610 handle_const_call (t, &rhsc);
4612 /* Pure functions can return addresses in and of memory
4613 reachable from their arguments, but they are not an escape
4614 point for reachable memory of their arguments. */
4615 else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE))
4616 handle_pure_call (t, &rhsc);
4617 else
4618 handle_rhs_call (t, &rhsc);
4619 if (gimple_call_lhs (t))
4620 handle_lhs_call (t, gimple_call_lhs (t),
4621 gimple_call_return_flags (t), rhsc, fndecl);
4623 else
4625 auto_vec<ce_s, 2> rhsc;
4626 tree lhsop;
4627 unsigned j;
4629 /* Assign all the passed arguments to the appropriate incoming
4630 parameters of the function. */
4631 for (j = 0; j < gimple_call_num_args (t); j++)
4633 tree arg = gimple_call_arg (t, j);
4634 find_func_aliases_for_call_arg (fi, j, arg);
4637 /* If we are returning a value, assign it to the result. */
4638 lhsop = gimple_call_lhs (t);
4639 if (lhsop)
4641 auto_vec<ce_s, 2> lhsc;
4642 struct constraint_expr rhs;
4643 struct constraint_expr *lhsp;
4644 bool aggr_p = aggregate_value_p (lhsop, gimple_call_fntype (t));
4646 get_constraint_for (lhsop, &lhsc);
4647 rhs = get_function_part_constraint (fi, fi_result);
4648 if (aggr_p)
4650 auto_vec<ce_s, 2> tem;
4651 tem.quick_push (rhs);
4652 do_deref (&tem);
4653 gcc_checking_assert (tem.length () == 1);
4654 rhs = tem[0];
4656 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
4657 process_constraint (new_constraint (*lhsp, rhs));
4659 /* If we pass the result decl by reference, honor that. */
4660 if (aggr_p)
4662 struct constraint_expr lhs;
4663 struct constraint_expr *rhsp;
4665 get_constraint_for_address_of (lhsop, &rhsc);
4666 lhs = get_function_part_constraint (fi, fi_result);
4667 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4668 process_constraint (new_constraint (lhs, *rhsp));
4669 rhsc.truncate (0);
4673 /* If we use a static chain, pass it along. */
4674 if (gimple_call_chain (t))
4676 struct constraint_expr lhs;
4677 struct constraint_expr *rhsp;
4679 get_constraint_for (gimple_call_chain (t), &rhsc);
4680 lhs = get_function_part_constraint (fi, fi_static_chain);
4681 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4682 process_constraint (new_constraint (lhs, *rhsp));
4687 /* Walk statement T setting up aliasing constraints according to the
4688 references found in T. This function is the main part of the
4689 constraint builder. AI points to auxiliary alias information used
4690 when building alias sets and computing alias grouping heuristics. */
4692 static void
4693 find_func_aliases (struct function *fn, gimple *origt)
4695 gimple *t = origt;
4696 auto_vec<ce_s, 16> lhsc;
4697 auto_vec<ce_s, 16> rhsc;
4698 struct constraint_expr *c;
4699 varinfo_t fi;
4701 /* Now build constraints expressions. */
4702 if (gimple_code (t) == GIMPLE_PHI)
4704 size_t i;
4705 unsigned int j;
4707 /* For a phi node, assign all the arguments to
4708 the result. */
4709 get_constraint_for (gimple_phi_result (t), &lhsc);
4710 for (i = 0; i < gimple_phi_num_args (t); i++)
4712 tree strippedrhs = PHI_ARG_DEF (t, i);
4714 STRIP_NOPS (strippedrhs);
4715 get_constraint_for_rhs (gimple_phi_arg_def (t, i), &rhsc);
4717 FOR_EACH_VEC_ELT (lhsc, j, c)
4719 struct constraint_expr *c2;
4720 while (rhsc.length () > 0)
4722 c2 = &rhsc.last ();
4723 process_constraint (new_constraint (*c, *c2));
4724 rhsc.pop ();
4729 /* In IPA mode, we need to generate constraints to pass call
4730 arguments through their calls. There are two cases,
4731 either a GIMPLE_CALL returning a value, or just a plain
4732 GIMPLE_CALL when we are not.
4734 In non-ipa mode, we need to generate constraints for each
4735 pointer passed by address. */
4736 else if (is_gimple_call (t))
4737 find_func_aliases_for_call (fn, as_a <gcall *> (t));
4739 /* Otherwise, just a regular assignment statement. Only care about
4740 operations with pointer result, others are dealt with as escape
4741 points if they have pointer operands. */
4742 else if (is_gimple_assign (t))
4744 /* Otherwise, just a regular assignment statement. */
4745 tree lhsop = gimple_assign_lhs (t);
4746 tree rhsop = (gimple_num_ops (t) == 2) ? gimple_assign_rhs1 (t) : NULL;
4748 if (rhsop && TREE_CLOBBER_P (rhsop))
4749 /* Ignore clobbers, they don't actually store anything into
4750 the LHS. */
4752 else if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
4753 do_structure_copy (lhsop, rhsop);
4754 else
4756 enum tree_code code = gimple_assign_rhs_code (t);
4758 get_constraint_for (lhsop, &lhsc);
4760 if (code == POINTER_PLUS_EXPR)
4761 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4762 gimple_assign_rhs2 (t), &rhsc);
4763 else if (code == BIT_AND_EXPR
4764 && TREE_CODE (gimple_assign_rhs2 (t)) == INTEGER_CST)
4766 /* Aligning a pointer via a BIT_AND_EXPR is offsetting
4767 the pointer. Handle it by offsetting it by UNKNOWN. */
4768 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4769 NULL_TREE, &rhsc);
4771 else if ((CONVERT_EXPR_CODE_P (code)
4772 && !(POINTER_TYPE_P (gimple_expr_type (t))
4773 && !POINTER_TYPE_P (TREE_TYPE (rhsop))))
4774 || gimple_assign_single_p (t))
4775 get_constraint_for_rhs (rhsop, &rhsc);
4776 else if (code == COND_EXPR)
4778 /* The result is a merge of both COND_EXPR arms. */
4779 auto_vec<ce_s, 2> tmp;
4780 struct constraint_expr *rhsp;
4781 unsigned i;
4782 get_constraint_for_rhs (gimple_assign_rhs2 (t), &rhsc);
4783 get_constraint_for_rhs (gimple_assign_rhs3 (t), &tmp);
4784 FOR_EACH_VEC_ELT (tmp, i, rhsp)
4785 rhsc.safe_push (*rhsp);
4787 else if (truth_value_p (code))
4788 /* Truth value results are not pointer (parts). Or at least
4789 very unreasonable obfuscation of a part. */
4791 else
4793 /* All other operations are merges. */
4794 auto_vec<ce_s, 4> tmp;
4795 struct constraint_expr *rhsp;
4796 unsigned i, j;
4797 get_constraint_for_rhs (gimple_assign_rhs1 (t), &rhsc);
4798 for (i = 2; i < gimple_num_ops (t); ++i)
4800 get_constraint_for_rhs (gimple_op (t, i), &tmp);
4801 FOR_EACH_VEC_ELT (tmp, j, rhsp)
4802 rhsc.safe_push (*rhsp);
4803 tmp.truncate (0);
4806 process_all_all_constraints (lhsc, rhsc);
4808 /* If there is a store to a global variable the rhs escapes. */
4809 if ((lhsop = get_base_address (lhsop)) != NULL_TREE
4810 && DECL_P (lhsop))
4812 varinfo_t vi = get_vi_for_tree (lhsop);
4813 if ((! in_ipa_mode && vi->is_global_var)
4814 || vi->is_ipa_escape_point)
4815 make_escape_constraint (rhsop);
4818 /* Handle escapes through return. */
4819 else if (gimple_code (t) == GIMPLE_RETURN
4820 && gimple_return_retval (as_a <greturn *> (t)) != NULL_TREE)
4822 greturn *return_stmt = as_a <greturn *> (t);
4823 fi = NULL;
4824 if (!in_ipa_mode
4825 || !(fi = get_vi_for_tree (fn->decl)))
4826 make_escape_constraint (gimple_return_retval (return_stmt));
4827 else if (in_ipa_mode)
4829 struct constraint_expr lhs ;
4830 struct constraint_expr *rhsp;
4831 unsigned i;
4833 lhs = get_function_part_constraint (fi, fi_result);
4834 get_constraint_for_rhs (gimple_return_retval (return_stmt), &rhsc);
4835 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4836 process_constraint (new_constraint (lhs, *rhsp));
4839 /* Handle asms conservatively by adding escape constraints to everything. */
4840 else if (gasm *asm_stmt = dyn_cast <gasm *> (t))
4842 unsigned i, noutputs;
4843 const char **oconstraints;
4844 const char *constraint;
4845 bool allows_mem, allows_reg, is_inout;
4847 noutputs = gimple_asm_noutputs (asm_stmt);
4848 oconstraints = XALLOCAVEC (const char *, noutputs);
4850 for (i = 0; i < noutputs; ++i)
4852 tree link = gimple_asm_output_op (asm_stmt, i);
4853 tree op = TREE_VALUE (link);
4855 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4856 oconstraints[i] = constraint;
4857 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
4858 &allows_reg, &is_inout);
4860 /* A memory constraint makes the address of the operand escape. */
4861 if (!allows_reg && allows_mem)
4862 make_escape_constraint (build_fold_addr_expr (op));
4864 /* The asm may read global memory, so outputs may point to
4865 any global memory. */
4866 if (op)
4868 auto_vec<ce_s, 2> lhsc;
4869 struct constraint_expr rhsc, *lhsp;
4870 unsigned j;
4871 get_constraint_for (op, &lhsc);
4872 rhsc.var = nonlocal_id;
4873 rhsc.offset = 0;
4874 rhsc.type = SCALAR;
4875 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
4876 process_constraint (new_constraint (*lhsp, rhsc));
4879 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4881 tree link = gimple_asm_input_op (asm_stmt, i);
4882 tree op = TREE_VALUE (link);
4884 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4886 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
4887 &allows_mem, &allows_reg);
4889 /* A memory constraint makes the address of the operand escape. */
4890 if (!allows_reg && allows_mem)
4891 make_escape_constraint (build_fold_addr_expr (op));
4892 /* Strictly we'd only need the constraint to ESCAPED if
4893 the asm clobbers memory, otherwise using something
4894 along the lines of per-call clobbers/uses would be enough. */
4895 else if (op)
4896 make_escape_constraint (op);
4902 /* Create a constraint adding to the clobber set of FI the memory
4903 pointed to by PTR. */
4905 static void
4906 process_ipa_clobber (varinfo_t fi, tree ptr)
4908 vec<ce_s> ptrc = vNULL;
4909 struct constraint_expr *c, lhs;
4910 unsigned i;
4911 get_constraint_for_rhs (ptr, &ptrc);
4912 lhs = get_function_part_constraint (fi, fi_clobbers);
4913 FOR_EACH_VEC_ELT (ptrc, i, c)
4914 process_constraint (new_constraint (lhs, *c));
4915 ptrc.release ();
4918 /* Walk statement T setting up clobber and use constraints according to the
4919 references found in T. This function is a main part of the
4920 IPA constraint builder. */
4922 static void
4923 find_func_clobbers (struct function *fn, gimple *origt)
4925 gimple *t = origt;
4926 auto_vec<ce_s, 16> lhsc;
4927 auto_vec<ce_s, 16> rhsc;
4928 varinfo_t fi;
4930 /* Add constraints for clobbered/used in IPA mode.
4931 We are not interested in what automatic variables are clobbered
4932 or used as we only use the information in the caller to which
4933 they do not escape. */
4934 gcc_assert (in_ipa_mode);
4936 /* If the stmt refers to memory in any way it better had a VUSE. */
4937 if (gimple_vuse (t) == NULL_TREE)
4938 return;
4940 /* We'd better have function information for the current function. */
4941 fi = lookup_vi_for_tree (fn->decl);
4942 gcc_assert (fi != NULL);
4944 /* Account for stores in assignments and calls. */
4945 if (gimple_vdef (t) != NULL_TREE
4946 && gimple_has_lhs (t))
4948 tree lhs = gimple_get_lhs (t);
4949 tree tem = lhs;
4950 while (handled_component_p (tem))
4951 tem = TREE_OPERAND (tem, 0);
4952 if ((DECL_P (tem)
4953 && !auto_var_in_fn_p (tem, fn->decl))
4954 || INDIRECT_REF_P (tem)
4955 || (TREE_CODE (tem) == MEM_REF
4956 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
4957 && auto_var_in_fn_p
4958 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), fn->decl))))
4960 struct constraint_expr lhsc, *rhsp;
4961 unsigned i;
4962 lhsc = get_function_part_constraint (fi, fi_clobbers);
4963 get_constraint_for_address_of (lhs, &rhsc);
4964 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4965 process_constraint (new_constraint (lhsc, *rhsp));
4966 rhsc.truncate (0);
4970 /* Account for uses in assigments and returns. */
4971 if (gimple_assign_single_p (t)
4972 || (gimple_code (t) == GIMPLE_RETURN
4973 && gimple_return_retval (as_a <greturn *> (t)) != NULL_TREE))
4975 tree rhs = (gimple_assign_single_p (t)
4976 ? gimple_assign_rhs1 (t)
4977 : gimple_return_retval (as_a <greturn *> (t)));
4978 tree tem = rhs;
4979 while (handled_component_p (tem))
4980 tem = TREE_OPERAND (tem, 0);
4981 if ((DECL_P (tem)
4982 && !auto_var_in_fn_p (tem, fn->decl))
4983 || INDIRECT_REF_P (tem)
4984 || (TREE_CODE (tem) == MEM_REF
4985 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
4986 && auto_var_in_fn_p
4987 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), fn->decl))))
4989 struct constraint_expr lhs, *rhsp;
4990 unsigned i;
4991 lhs = get_function_part_constraint (fi, fi_uses);
4992 get_constraint_for_address_of (rhs, &rhsc);
4993 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4994 process_constraint (new_constraint (lhs, *rhsp));
4995 rhsc.truncate (0);
4999 if (gcall *call_stmt = dyn_cast <gcall *> (t))
5001 varinfo_t cfi = NULL;
5002 tree decl = gimple_call_fndecl (t);
5003 struct constraint_expr lhs, rhs;
5004 unsigned i, j;
5006 /* For builtins we do not have separate function info. For those
5007 we do not generate escapes for we have to generate clobbers/uses. */
5008 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
5009 switch (DECL_FUNCTION_CODE (decl))
5011 /* The following functions use and clobber memory pointed to
5012 by their arguments. */
5013 case BUILT_IN_STRCPY:
5014 case BUILT_IN_STRNCPY:
5015 case BUILT_IN_BCOPY:
5016 case BUILT_IN_MEMCPY:
5017 case BUILT_IN_MEMMOVE:
5018 case BUILT_IN_MEMPCPY:
5019 case BUILT_IN_STPCPY:
5020 case BUILT_IN_STPNCPY:
5021 case BUILT_IN_STRCAT:
5022 case BUILT_IN_STRNCAT:
5023 case BUILT_IN_STRCPY_CHK:
5024 case BUILT_IN_STRNCPY_CHK:
5025 case BUILT_IN_MEMCPY_CHK:
5026 case BUILT_IN_MEMMOVE_CHK:
5027 case BUILT_IN_MEMPCPY_CHK:
5028 case BUILT_IN_STPCPY_CHK:
5029 case BUILT_IN_STPNCPY_CHK:
5030 case BUILT_IN_STRCAT_CHK:
5031 case BUILT_IN_STRNCAT_CHK:
5033 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
5034 == BUILT_IN_BCOPY ? 1 : 0));
5035 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
5036 == BUILT_IN_BCOPY ? 0 : 1));
5037 unsigned i;
5038 struct constraint_expr *rhsp, *lhsp;
5039 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
5040 lhs = get_function_part_constraint (fi, fi_clobbers);
5041 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
5042 process_constraint (new_constraint (lhs, *lhsp));
5043 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
5044 lhs = get_function_part_constraint (fi, fi_uses);
5045 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5046 process_constraint (new_constraint (lhs, *rhsp));
5047 return;
5049 /* The following function clobbers memory pointed to by
5050 its argument. */
5051 case BUILT_IN_MEMSET:
5052 case BUILT_IN_MEMSET_CHK:
5053 case BUILT_IN_POSIX_MEMALIGN:
5055 tree dest = gimple_call_arg (t, 0);
5056 unsigned i;
5057 ce_s *lhsp;
5058 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
5059 lhs = get_function_part_constraint (fi, fi_clobbers);
5060 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
5061 process_constraint (new_constraint (lhs, *lhsp));
5062 return;
5064 /* The following functions clobber their second and third
5065 arguments. */
5066 case BUILT_IN_SINCOS:
5067 case BUILT_IN_SINCOSF:
5068 case BUILT_IN_SINCOSL:
5070 process_ipa_clobber (fi, gimple_call_arg (t, 1));
5071 process_ipa_clobber (fi, gimple_call_arg (t, 2));
5072 return;
5074 /* The following functions clobber their second argument. */
5075 case BUILT_IN_FREXP:
5076 case BUILT_IN_FREXPF:
5077 case BUILT_IN_FREXPL:
5078 case BUILT_IN_LGAMMA_R:
5079 case BUILT_IN_LGAMMAF_R:
5080 case BUILT_IN_LGAMMAL_R:
5081 case BUILT_IN_GAMMA_R:
5082 case BUILT_IN_GAMMAF_R:
5083 case BUILT_IN_GAMMAL_R:
5084 case BUILT_IN_MODF:
5085 case BUILT_IN_MODFF:
5086 case BUILT_IN_MODFL:
5088 process_ipa_clobber (fi, gimple_call_arg (t, 1));
5089 return;
5091 /* The following functions clobber their third argument. */
5092 case BUILT_IN_REMQUO:
5093 case BUILT_IN_REMQUOF:
5094 case BUILT_IN_REMQUOL:
5096 process_ipa_clobber (fi, gimple_call_arg (t, 2));
5097 return;
5099 /* The following functions neither read nor clobber memory. */
5100 case BUILT_IN_ASSUME_ALIGNED:
5101 case BUILT_IN_FREE:
5102 return;
5103 /* Trampolines are of no interest to us. */
5104 case BUILT_IN_INIT_TRAMPOLINE:
5105 case BUILT_IN_ADJUST_TRAMPOLINE:
5106 return;
5107 case BUILT_IN_VA_START:
5108 case BUILT_IN_VA_END:
5109 return;
5110 case BUILT_IN_GOMP_PARALLEL:
5111 case BUILT_IN_GOACC_PARALLEL:
5113 unsigned int fnpos, argpos;
5114 unsigned int implicit_use_args[2];
5115 unsigned int num_implicit_use_args = 0;
5116 switch (DECL_FUNCTION_CODE (decl))
5118 case BUILT_IN_GOMP_PARALLEL:
5119 /* __builtin_GOMP_parallel (fn, data, num_threads, flags). */
5120 fnpos = 0;
5121 argpos = 1;
5122 break;
5123 case BUILT_IN_GOACC_PARALLEL:
5124 /* __builtin_GOACC_parallel (device, fn, mapnum, hostaddrs,
5125 sizes, kinds, ...). */
5126 fnpos = 1;
5127 argpos = 3;
5128 implicit_use_args[num_implicit_use_args++] = 4;
5129 implicit_use_args[num_implicit_use_args++] = 5;
5130 break;
5131 default:
5132 gcc_unreachable ();
5135 tree fnarg = gimple_call_arg (t, fnpos);
5136 gcc_assert (TREE_CODE (fnarg) == ADDR_EXPR);
5137 tree fndecl = TREE_OPERAND (fnarg, 0);
5138 if (fndecl_maybe_in_other_partition (fndecl))
5139 /* Fallthru to general call handling. */
5140 break;
5142 varinfo_t cfi = get_vi_for_tree (fndecl);
5144 tree arg = gimple_call_arg (t, argpos);
5146 /* Parameter passed by value is used. */
5147 lhs = get_function_part_constraint (fi, fi_uses);
5148 struct constraint_expr *rhsp;
5149 get_constraint_for (arg, &rhsc);
5150 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5151 process_constraint (new_constraint (lhs, *rhsp));
5152 rhsc.truncate (0);
5154 /* Handle parameters used by the call, but not used in cfi, as
5155 implicitly used by cfi. */
5156 lhs = get_function_part_constraint (cfi, fi_uses);
5157 for (unsigned i = 0; i < num_implicit_use_args; ++i)
5159 tree arg = gimple_call_arg (t, implicit_use_args[i]);
5160 get_constraint_for (arg, &rhsc);
5161 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5162 process_constraint (new_constraint (lhs, *rhsp));
5163 rhsc.truncate (0);
5166 /* The caller clobbers what the callee does. */
5167 lhs = get_function_part_constraint (fi, fi_clobbers);
5168 rhs = get_function_part_constraint (cfi, fi_clobbers);
5169 process_constraint (new_constraint (lhs, rhs));
5171 /* The caller uses what the callee does. */
5172 lhs = get_function_part_constraint (fi, fi_uses);
5173 rhs = get_function_part_constraint (cfi, fi_uses);
5174 process_constraint (new_constraint (lhs, rhs));
5176 return;
5178 /* printf-style functions may have hooks to set pointers to
5179 point to somewhere into the generated string. Leave them
5180 for a later exercise... */
5181 default:
5182 /* Fallthru to general call handling. */;
5185 /* Parameters passed by value are used. */
5186 lhs = get_function_part_constraint (fi, fi_uses);
5187 for (i = 0; i < gimple_call_num_args (t); i++)
5189 struct constraint_expr *rhsp;
5190 tree arg = gimple_call_arg (t, i);
5192 if (TREE_CODE (arg) == SSA_NAME
5193 || is_gimple_min_invariant (arg))
5194 continue;
5196 get_constraint_for_address_of (arg, &rhsc);
5197 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5198 process_constraint (new_constraint (lhs, *rhsp));
5199 rhsc.truncate (0);
5202 /* Build constraints for propagating clobbers/uses along the
5203 callgraph edges. */
5204 cfi = get_fi_for_callee (call_stmt);
5205 if (cfi->id == anything_id)
5207 if (gimple_vdef (t))
5208 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5209 anything_id);
5210 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5211 anything_id);
5212 return;
5215 /* For callees without function info (that's external functions),
5216 ESCAPED is clobbered and used. */
5217 if (gimple_call_fndecl (t)
5218 && !cfi->is_fn_info)
5220 varinfo_t vi;
5222 if (gimple_vdef (t))
5223 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5224 escaped_id);
5225 make_copy_constraint (first_vi_for_offset (fi, fi_uses), escaped_id);
5227 /* Also honor the call statement use/clobber info. */
5228 if ((vi = lookup_call_clobber_vi (call_stmt)) != NULL)
5229 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5230 vi->id);
5231 if ((vi = lookup_call_use_vi (call_stmt)) != NULL)
5232 make_copy_constraint (first_vi_for_offset (fi, fi_uses),
5233 vi->id);
5234 return;
5237 /* Otherwise the caller clobbers and uses what the callee does.
5238 ??? This should use a new complex constraint that filters
5239 local variables of the callee. */
5240 if (gimple_vdef (t))
5242 lhs = get_function_part_constraint (fi, fi_clobbers);
5243 rhs = get_function_part_constraint (cfi, fi_clobbers);
5244 process_constraint (new_constraint (lhs, rhs));
5246 lhs = get_function_part_constraint (fi, fi_uses);
5247 rhs = get_function_part_constraint (cfi, fi_uses);
5248 process_constraint (new_constraint (lhs, rhs));
5250 else if (gimple_code (t) == GIMPLE_ASM)
5252 /* ??? Ick. We can do better. */
5253 if (gimple_vdef (t))
5254 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5255 anything_id);
5256 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5257 anything_id);
5262 /* Find the first varinfo in the same variable as START that overlaps with
5263 OFFSET. Return NULL if we can't find one. */
5265 static varinfo_t
5266 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
5268 /* If the offset is outside of the variable, bail out. */
5269 if (offset >= start->fullsize)
5270 return NULL;
5272 /* If we cannot reach offset from start, lookup the first field
5273 and start from there. */
5274 if (start->offset > offset)
5275 start = get_varinfo (start->head);
5277 while (start)
5279 /* We may not find a variable in the field list with the actual
5280 offset when we have glommed a structure to a variable.
5281 In that case, however, offset should still be within the size
5282 of the variable. */
5283 if (offset >= start->offset
5284 && (offset - start->offset) < start->size)
5285 return start;
5287 start = vi_next (start);
5290 return NULL;
5293 /* Find the first varinfo in the same variable as START that overlaps with
5294 OFFSET. If there is no such varinfo the varinfo directly preceding
5295 OFFSET is returned. */
5297 static varinfo_t
5298 first_or_preceding_vi_for_offset (varinfo_t start,
5299 unsigned HOST_WIDE_INT offset)
5301 /* If we cannot reach offset from start, lookup the first field
5302 and start from there. */
5303 if (start->offset > offset)
5304 start = get_varinfo (start->head);
5306 /* We may not find a variable in the field list with the actual
5307 offset when we have glommed a structure to a variable.
5308 In that case, however, offset should still be within the size
5309 of the variable.
5310 If we got beyond the offset we look for return the field
5311 directly preceding offset which may be the last field. */
5312 while (start->next
5313 && offset >= start->offset
5314 && !((offset - start->offset) < start->size))
5315 start = vi_next (start);
5317 return start;
5321 /* This structure is used during pushing fields onto the fieldstack
5322 to track the offset of the field, since bitpos_of_field gives it
5323 relative to its immediate containing type, and we want it relative
5324 to the ultimate containing object. */
5326 struct fieldoff
5328 /* Offset from the base of the base containing object to this field. */
5329 HOST_WIDE_INT offset;
5331 /* Size, in bits, of the field. */
5332 unsigned HOST_WIDE_INT size;
5334 unsigned has_unknown_size : 1;
5336 unsigned must_have_pointers : 1;
5338 unsigned may_have_pointers : 1;
5340 unsigned only_restrict_pointers : 1;
5342 tree restrict_pointed_type;
5344 typedef struct fieldoff fieldoff_s;
5347 /* qsort comparison function for two fieldoff's PA and PB */
5349 static int
5350 fieldoff_compare (const void *pa, const void *pb)
5352 const fieldoff_s *foa = (const fieldoff_s *)pa;
5353 const fieldoff_s *fob = (const fieldoff_s *)pb;
5354 unsigned HOST_WIDE_INT foasize, fobsize;
5356 if (foa->offset < fob->offset)
5357 return -1;
5358 else if (foa->offset > fob->offset)
5359 return 1;
5361 foasize = foa->size;
5362 fobsize = fob->size;
5363 if (foasize < fobsize)
5364 return -1;
5365 else if (foasize > fobsize)
5366 return 1;
5367 return 0;
5370 /* Sort a fieldstack according to the field offset and sizes. */
5371 static void
5372 sort_fieldstack (vec<fieldoff_s> fieldstack)
5374 fieldstack.qsort (fieldoff_compare);
5377 /* Return true if T is a type that can have subvars. */
5379 static inline bool
5380 type_can_have_subvars (const_tree t)
5382 /* Aggregates without overlapping fields can have subvars. */
5383 return TREE_CODE (t) == RECORD_TYPE;
5386 /* Return true if V is a tree that we can have subvars for.
5387 Normally, this is any aggregate type. Also complex
5388 types which are not gimple registers can have subvars. */
5390 static inline bool
5391 var_can_have_subvars (const_tree v)
5393 /* Volatile variables should never have subvars. */
5394 if (TREE_THIS_VOLATILE (v))
5395 return false;
5397 /* Non decls or memory tags can never have subvars. */
5398 if (!DECL_P (v))
5399 return false;
5401 return type_can_have_subvars (TREE_TYPE (v));
5404 /* Return true if T is a type that does contain pointers. */
5406 static bool
5407 type_must_have_pointers (tree type)
5409 if (POINTER_TYPE_P (type))
5410 return true;
5412 if (TREE_CODE (type) == ARRAY_TYPE)
5413 return type_must_have_pointers (TREE_TYPE (type));
5415 /* A function or method can have pointers as arguments, so track
5416 those separately. */
5417 if (TREE_CODE (type) == FUNCTION_TYPE
5418 || TREE_CODE (type) == METHOD_TYPE)
5419 return true;
5421 return false;
5424 static bool
5425 field_must_have_pointers (tree t)
5427 return type_must_have_pointers (TREE_TYPE (t));
5430 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
5431 the fields of TYPE onto fieldstack, recording their offsets along
5432 the way.
5434 OFFSET is used to keep track of the offset in this entire
5435 structure, rather than just the immediately containing structure.
5436 Returns false if the caller is supposed to handle the field we
5437 recursed for. */
5439 static bool
5440 push_fields_onto_fieldstack (tree type, vec<fieldoff_s> *fieldstack,
5441 HOST_WIDE_INT offset)
5443 tree field;
5444 bool empty_p = true;
5446 if (TREE_CODE (type) != RECORD_TYPE)
5447 return false;
5449 /* If the vector of fields is growing too big, bail out early.
5450 Callers check for vec::length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
5451 sure this fails. */
5452 if (fieldstack->length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
5453 return false;
5455 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5456 if (TREE_CODE (field) == FIELD_DECL)
5458 bool push = false;
5459 HOST_WIDE_INT foff = bitpos_of_field (field);
5460 tree field_type = TREE_TYPE (field);
5462 if (!var_can_have_subvars (field)
5463 || TREE_CODE (field_type) == QUAL_UNION_TYPE
5464 || TREE_CODE (field_type) == UNION_TYPE)
5465 push = true;
5466 else if (!push_fields_onto_fieldstack
5467 (field_type, fieldstack, offset + foff)
5468 && (DECL_SIZE (field)
5469 && !integer_zerop (DECL_SIZE (field))))
5470 /* Empty structures may have actual size, like in C++. So
5471 see if we didn't push any subfields and the size is
5472 nonzero, push the field onto the stack. */
5473 push = true;
5475 if (push)
5477 fieldoff_s *pair = NULL;
5478 bool has_unknown_size = false;
5479 bool must_have_pointers_p;
5481 if (!fieldstack->is_empty ())
5482 pair = &fieldstack->last ();
5484 /* If there isn't anything at offset zero, create sth. */
5485 if (!pair
5486 && offset + foff != 0)
5488 fieldoff_s e
5489 = {0, offset + foff, false, false, false, false, NULL_TREE};
5490 pair = fieldstack->safe_push (e);
5493 if (!DECL_SIZE (field)
5494 || !tree_fits_uhwi_p (DECL_SIZE (field)))
5495 has_unknown_size = true;
5497 /* If adjacent fields do not contain pointers merge them. */
5498 must_have_pointers_p = field_must_have_pointers (field);
5499 if (pair
5500 && !has_unknown_size
5501 && !must_have_pointers_p
5502 && !pair->must_have_pointers
5503 && !pair->has_unknown_size
5504 && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff)
5506 pair->size += tree_to_uhwi (DECL_SIZE (field));
5508 else
5510 fieldoff_s e;
5511 e.offset = offset + foff;
5512 e.has_unknown_size = has_unknown_size;
5513 if (!has_unknown_size)
5514 e.size = tree_to_uhwi (DECL_SIZE (field));
5515 else
5516 e.size = -1;
5517 e.must_have_pointers = must_have_pointers_p;
5518 e.may_have_pointers = true;
5519 e.only_restrict_pointers
5520 = (!has_unknown_size
5521 && POINTER_TYPE_P (field_type)
5522 && TYPE_RESTRICT (field_type));
5523 if (e.only_restrict_pointers)
5524 e.restrict_pointed_type = TREE_TYPE (field_type);
5525 fieldstack->safe_push (e);
5529 empty_p = false;
5532 return !empty_p;
5535 /* Count the number of arguments DECL has, and set IS_VARARGS to true
5536 if it is a varargs function. */
5538 static unsigned int
5539 count_num_arguments (tree decl, bool *is_varargs)
5541 unsigned int num = 0;
5542 tree t;
5544 /* Capture named arguments for K&R functions. They do not
5545 have a prototype and thus no TYPE_ARG_TYPES. */
5546 for (t = DECL_ARGUMENTS (decl); t; t = DECL_CHAIN (t))
5547 ++num;
5549 /* Check if the function has variadic arguments. */
5550 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl)); t; t = TREE_CHAIN (t))
5551 if (TREE_VALUE (t) == void_type_node)
5552 break;
5553 if (!t)
5554 *is_varargs = true;
5556 return num;
5559 /* Creation function node for DECL, using NAME, and return the index
5560 of the variable we've created for the function. If NONLOCAL_p, create
5561 initial constraints. */
5563 static varinfo_t
5564 create_function_info_for (tree decl, const char *name, bool add_id,
5565 bool nonlocal_p)
5567 struct function *fn = DECL_STRUCT_FUNCTION (decl);
5568 varinfo_t vi, prev_vi;
5569 tree arg;
5570 unsigned int i;
5571 bool is_varargs = false;
5572 unsigned int num_args = count_num_arguments (decl, &is_varargs);
5574 /* Create the variable info. */
5576 vi = new_var_info (decl, name, add_id);
5577 vi->offset = 0;
5578 vi->size = 1;
5579 vi->fullsize = fi_parm_base + num_args;
5580 vi->is_fn_info = 1;
5581 vi->may_have_pointers = false;
5582 if (is_varargs)
5583 vi->fullsize = ~0;
5584 insert_vi_for_tree (vi->decl, vi);
5586 prev_vi = vi;
5588 /* Create a variable for things the function clobbers and one for
5589 things the function uses. */
5591 varinfo_t clobbervi, usevi;
5592 const char *newname;
5593 char *tempname;
5595 tempname = xasprintf ("%s.clobber", name);
5596 newname = ggc_strdup (tempname);
5597 free (tempname);
5599 clobbervi = new_var_info (NULL, newname, false);
5600 clobbervi->offset = fi_clobbers;
5601 clobbervi->size = 1;
5602 clobbervi->fullsize = vi->fullsize;
5603 clobbervi->is_full_var = true;
5604 clobbervi->is_global_var = false;
5606 gcc_assert (prev_vi->offset < clobbervi->offset);
5607 prev_vi->next = clobbervi->id;
5608 prev_vi = clobbervi;
5610 tempname = xasprintf ("%s.use", name);
5611 newname = ggc_strdup (tempname);
5612 free (tempname);
5614 usevi = new_var_info (NULL, newname, false);
5615 usevi->offset = fi_uses;
5616 usevi->size = 1;
5617 usevi->fullsize = vi->fullsize;
5618 usevi->is_full_var = true;
5619 usevi->is_global_var = false;
5621 gcc_assert (prev_vi->offset < usevi->offset);
5622 prev_vi->next = usevi->id;
5623 prev_vi = usevi;
5626 /* And one for the static chain. */
5627 if (fn->static_chain_decl != NULL_TREE)
5629 varinfo_t chainvi;
5630 const char *newname;
5631 char *tempname;
5633 tempname = xasprintf ("%s.chain", name);
5634 newname = ggc_strdup (tempname);
5635 free (tempname);
5637 chainvi = new_var_info (fn->static_chain_decl, newname, false);
5638 chainvi->offset = fi_static_chain;
5639 chainvi->size = 1;
5640 chainvi->fullsize = vi->fullsize;
5641 chainvi->is_full_var = true;
5642 chainvi->is_global_var = false;
5644 insert_vi_for_tree (fn->static_chain_decl, chainvi);
5646 if (nonlocal_p
5647 && chainvi->may_have_pointers)
5648 make_constraint_from (chainvi, nonlocal_id);
5650 gcc_assert (prev_vi->offset < chainvi->offset);
5651 prev_vi->next = chainvi->id;
5652 prev_vi = chainvi;
5655 /* Create a variable for the return var. */
5656 if (DECL_RESULT (decl) != NULL
5657 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
5659 varinfo_t resultvi;
5660 const char *newname;
5661 char *tempname;
5662 tree resultdecl = decl;
5664 if (DECL_RESULT (decl))
5665 resultdecl = DECL_RESULT (decl);
5667 tempname = xasprintf ("%s.result", name);
5668 newname = ggc_strdup (tempname);
5669 free (tempname);
5671 resultvi = new_var_info (resultdecl, newname, false);
5672 resultvi->offset = fi_result;
5673 resultvi->size = 1;
5674 resultvi->fullsize = vi->fullsize;
5675 resultvi->is_full_var = true;
5676 if (DECL_RESULT (decl))
5677 resultvi->may_have_pointers = true;
5679 if (DECL_RESULT (decl))
5680 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
5682 if (nonlocal_p
5683 && DECL_RESULT (decl)
5684 && DECL_BY_REFERENCE (DECL_RESULT (decl)))
5685 make_constraint_from (resultvi, nonlocal_id);
5687 gcc_assert (prev_vi->offset < resultvi->offset);
5688 prev_vi->next = resultvi->id;
5689 prev_vi = resultvi;
5692 /* We also need to make function return values escape. Nothing
5693 escapes by returning from main though. */
5694 if (nonlocal_p
5695 && !MAIN_NAME_P (DECL_NAME (decl)))
5697 varinfo_t fi, rvi;
5698 fi = lookup_vi_for_tree (decl);
5699 rvi = first_vi_for_offset (fi, fi_result);
5700 if (rvi && rvi->offset == fi_result)
5701 make_copy_constraint (get_varinfo (escaped_id), rvi->id);
5704 /* Set up variables for each argument. */
5705 arg = DECL_ARGUMENTS (decl);
5706 for (i = 0; i < num_args; i++)
5708 varinfo_t argvi;
5709 const char *newname;
5710 char *tempname;
5711 tree argdecl = decl;
5713 if (arg)
5714 argdecl = arg;
5716 tempname = xasprintf ("%s.arg%d", name, i);
5717 newname = ggc_strdup (tempname);
5718 free (tempname);
5720 argvi = new_var_info (argdecl, newname, false);
5721 argvi->offset = fi_parm_base + i;
5722 argvi->size = 1;
5723 argvi->is_full_var = true;
5724 argvi->fullsize = vi->fullsize;
5725 if (arg)
5726 argvi->may_have_pointers = true;
5728 if (arg)
5729 insert_vi_for_tree (arg, argvi);
5731 if (nonlocal_p
5732 && argvi->may_have_pointers)
5733 make_constraint_from (argvi, nonlocal_id);
5735 gcc_assert (prev_vi->offset < argvi->offset);
5736 prev_vi->next = argvi->id;
5737 prev_vi = argvi;
5738 if (arg)
5739 arg = DECL_CHAIN (arg);
5742 /* Add one representative for all further args. */
5743 if (is_varargs)
5745 varinfo_t argvi;
5746 const char *newname;
5747 char *tempname;
5748 tree decl;
5750 tempname = xasprintf ("%s.varargs", name);
5751 newname = ggc_strdup (tempname);
5752 free (tempname);
5754 /* We need sth that can be pointed to for va_start. */
5755 decl = build_fake_var_decl (ptr_type_node);
5757 argvi = new_var_info (decl, newname, false);
5758 argvi->offset = fi_parm_base + num_args;
5759 argvi->size = ~0;
5760 argvi->is_full_var = true;
5761 argvi->is_heap_var = true;
5762 argvi->fullsize = vi->fullsize;
5764 if (nonlocal_p
5765 && argvi->may_have_pointers)
5766 make_constraint_from (argvi, nonlocal_id);
5768 gcc_assert (prev_vi->offset < argvi->offset);
5769 prev_vi->next = argvi->id;
5770 prev_vi = argvi;
5773 return vi;
5777 /* Return true if FIELDSTACK contains fields that overlap.
5778 FIELDSTACK is assumed to be sorted by offset. */
5780 static bool
5781 check_for_overlaps (vec<fieldoff_s> fieldstack)
5783 fieldoff_s *fo = NULL;
5784 unsigned int i;
5785 HOST_WIDE_INT lastoffset = -1;
5787 FOR_EACH_VEC_ELT (fieldstack, i, fo)
5789 if (fo->offset == lastoffset)
5790 return true;
5791 lastoffset = fo->offset;
5793 return false;
5796 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
5797 This will also create any varinfo structures necessary for fields
5798 of DECL. DECL is a function parameter if HANDLE_PARAM is set.
5799 HANDLED_STRUCT_TYPE is used to register struct types reached by following
5800 restrict pointers. This is needed to prevent infinite recursion. */
5802 static varinfo_t
5803 create_variable_info_for_1 (tree decl, const char *name, bool add_id,
5804 bool handle_param, bitmap handled_struct_type)
5806 varinfo_t vi, newvi;
5807 tree decl_type = TREE_TYPE (decl);
5808 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type);
5809 auto_vec<fieldoff_s> fieldstack;
5810 fieldoff_s *fo;
5811 unsigned int i;
5813 if (!declsize
5814 || !tree_fits_uhwi_p (declsize))
5816 vi = new_var_info (decl, name, add_id);
5817 vi->offset = 0;
5818 vi->size = ~0;
5819 vi->fullsize = ~0;
5820 vi->is_unknown_size_var = true;
5821 vi->is_full_var = true;
5822 vi->may_have_pointers = true;
5823 return vi;
5826 /* Collect field information. */
5827 if (use_field_sensitive
5828 && var_can_have_subvars (decl)
5829 /* ??? Force us to not use subfields for globals in IPA mode.
5830 Else we'd have to parse arbitrary initializers. */
5831 && !(in_ipa_mode
5832 && is_global_var (decl)))
5834 fieldoff_s *fo = NULL;
5835 bool notokay = false;
5836 unsigned int i;
5838 push_fields_onto_fieldstack (decl_type, &fieldstack, 0);
5840 for (i = 0; !notokay && fieldstack.iterate (i, &fo); i++)
5841 if (fo->has_unknown_size
5842 || fo->offset < 0)
5844 notokay = true;
5845 break;
5848 /* We can't sort them if we have a field with a variable sized type,
5849 which will make notokay = true. In that case, we are going to return
5850 without creating varinfos for the fields anyway, so sorting them is a
5851 waste to boot. */
5852 if (!notokay)
5854 sort_fieldstack (fieldstack);
5855 /* Due to some C++ FE issues, like PR 22488, we might end up
5856 what appear to be overlapping fields even though they,
5857 in reality, do not overlap. Until the C++ FE is fixed,
5858 we will simply disable field-sensitivity for these cases. */
5859 notokay = check_for_overlaps (fieldstack);
5862 if (notokay)
5863 fieldstack.release ();
5866 /* If we didn't end up collecting sub-variables create a full
5867 variable for the decl. */
5868 if (fieldstack.length () == 0
5869 || fieldstack.length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
5871 vi = new_var_info (decl, name, add_id);
5872 vi->offset = 0;
5873 vi->may_have_pointers = true;
5874 vi->fullsize = tree_to_uhwi (declsize);
5875 vi->size = vi->fullsize;
5876 vi->is_full_var = true;
5877 if (POINTER_TYPE_P (decl_type)
5878 && TYPE_RESTRICT (decl_type))
5879 vi->only_restrict_pointers = 1;
5880 if (vi->only_restrict_pointers
5881 && !type_contains_placeholder_p (TREE_TYPE (decl_type))
5882 && handle_param
5883 && !bitmap_bit_p (handled_struct_type,
5884 TYPE_UID (TREE_TYPE (decl_type))))
5886 varinfo_t rvi;
5887 tree heapvar = build_fake_var_decl (TREE_TYPE (decl_type));
5888 DECL_EXTERNAL (heapvar) = 1;
5889 if (var_can_have_subvars (heapvar))
5890 bitmap_set_bit (handled_struct_type,
5891 TYPE_UID (TREE_TYPE (decl_type)));
5892 rvi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS", true,
5893 true, handled_struct_type);
5894 if (var_can_have_subvars (heapvar))
5895 bitmap_clear_bit (handled_struct_type,
5896 TYPE_UID (TREE_TYPE (decl_type)));
5897 rvi->is_restrict_var = 1;
5898 insert_vi_for_tree (heapvar, rvi);
5899 make_constraint_from (vi, rvi->id);
5900 make_param_constraints (rvi);
5902 fieldstack.release ();
5903 return vi;
5906 vi = new_var_info (decl, name, add_id);
5907 vi->fullsize = tree_to_uhwi (declsize);
5908 if (fieldstack.length () == 1)
5909 vi->is_full_var = true;
5910 for (i = 0, newvi = vi;
5911 fieldstack.iterate (i, &fo);
5912 ++i, newvi = vi_next (newvi))
5914 const char *newname = NULL;
5915 char *tempname;
5917 if (dump_file)
5919 if (fieldstack.length () != 1)
5921 tempname
5922 = xasprintf ("%s." HOST_WIDE_INT_PRINT_DEC
5923 "+" HOST_WIDE_INT_PRINT_DEC, name,
5924 fo->offset, fo->size);
5925 newname = ggc_strdup (tempname);
5926 free (tempname);
5929 else
5930 newname = "NULL";
5932 if (newname)
5933 newvi->name = newname;
5934 newvi->offset = fo->offset;
5935 newvi->size = fo->size;
5936 newvi->fullsize = vi->fullsize;
5937 newvi->may_have_pointers = fo->may_have_pointers;
5938 newvi->only_restrict_pointers = fo->only_restrict_pointers;
5939 if (handle_param
5940 && newvi->only_restrict_pointers
5941 && !type_contains_placeholder_p (fo->restrict_pointed_type)
5942 && !bitmap_bit_p (handled_struct_type,
5943 TYPE_UID (fo->restrict_pointed_type)))
5945 varinfo_t rvi;
5946 tree heapvar = build_fake_var_decl (fo->restrict_pointed_type);
5947 DECL_EXTERNAL (heapvar) = 1;
5948 if (var_can_have_subvars (heapvar))
5949 bitmap_set_bit (handled_struct_type,
5950 TYPE_UID (fo->restrict_pointed_type));
5951 rvi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS", true,
5952 true, handled_struct_type);
5953 if (var_can_have_subvars (heapvar))
5954 bitmap_clear_bit (handled_struct_type,
5955 TYPE_UID (fo->restrict_pointed_type));
5956 rvi->is_restrict_var = 1;
5957 insert_vi_for_tree (heapvar, rvi);
5958 make_constraint_from (newvi, rvi->id);
5959 make_param_constraints (rvi);
5961 if (i + 1 < fieldstack.length ())
5963 varinfo_t tem = new_var_info (decl, name, false);
5964 newvi->next = tem->id;
5965 tem->head = vi->id;
5969 return vi;
5972 static unsigned int
5973 create_variable_info_for (tree decl, const char *name, bool add_id)
5975 varinfo_t vi = create_variable_info_for_1 (decl, name, add_id, false, NULL);
5976 unsigned int id = vi->id;
5978 insert_vi_for_tree (decl, vi);
5980 if (TREE_CODE (decl) != VAR_DECL)
5981 return id;
5983 /* Create initial constraints for globals. */
5984 for (; vi; vi = vi_next (vi))
5986 if (!vi->may_have_pointers
5987 || !vi->is_global_var)
5988 continue;
5990 /* Mark global restrict qualified pointers. */
5991 if ((POINTER_TYPE_P (TREE_TYPE (decl))
5992 && TYPE_RESTRICT (TREE_TYPE (decl)))
5993 || vi->only_restrict_pointers)
5995 varinfo_t rvi
5996 = make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT",
5997 true);
5998 /* ??? For now exclude reads from globals as restrict sources
5999 if those are not (indirectly) from incoming parameters. */
6000 rvi->is_restrict_var = false;
6001 continue;
6004 /* In non-IPA mode the initializer from nonlocal is all we need. */
6005 if (!in_ipa_mode
6006 || DECL_HARD_REGISTER (decl))
6007 make_copy_constraint (vi, nonlocal_id);
6009 /* In IPA mode parse the initializer and generate proper constraints
6010 for it. */
6011 else
6013 varpool_node *vnode = varpool_node::get (decl);
6015 /* For escaped variables initialize them from nonlocal. */
6016 if (!vnode->all_refs_explicit_p ())
6017 make_copy_constraint (vi, nonlocal_id);
6019 /* If this is a global variable with an initializer and we are in
6020 IPA mode generate constraints for it. */
6021 ipa_ref *ref;
6022 for (unsigned idx = 0; vnode->iterate_reference (idx, ref); ++idx)
6024 auto_vec<ce_s> rhsc;
6025 struct constraint_expr lhs, *rhsp;
6026 unsigned i;
6027 get_constraint_for_address_of (ref->referred->decl, &rhsc);
6028 lhs.var = vi->id;
6029 lhs.offset = 0;
6030 lhs.type = SCALAR;
6031 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
6032 process_constraint (new_constraint (lhs, *rhsp));
6033 /* If this is a variable that escapes from the unit
6034 the initializer escapes as well. */
6035 if (!vnode->all_refs_explicit_p ())
6037 lhs.var = escaped_id;
6038 lhs.offset = 0;
6039 lhs.type = SCALAR;
6040 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
6041 process_constraint (new_constraint (lhs, *rhsp));
6047 return id;
6050 /* Print out the points-to solution for VAR to FILE. */
6052 static void
6053 dump_solution_for_var (FILE *file, unsigned int var)
6055 varinfo_t vi = get_varinfo (var);
6056 unsigned int i;
6057 bitmap_iterator bi;
6059 /* Dump the solution for unified vars anyway, this avoids difficulties
6060 in scanning dumps in the testsuite. */
6061 fprintf (file, "%s = { ", vi->name);
6062 vi = get_varinfo (find (var));
6063 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
6064 fprintf (file, "%s ", get_varinfo (i)->name);
6065 fprintf (file, "}");
6067 /* But note when the variable was unified. */
6068 if (vi->id != var)
6069 fprintf (file, " same as %s", vi->name);
6071 fprintf (file, "\n");
6074 /* Print the points-to solution for VAR to stderr. */
6076 DEBUG_FUNCTION void
6077 debug_solution_for_var (unsigned int var)
6079 dump_solution_for_var (stderr, var);
6082 /* Register the constraints for function parameter related VI. */
6084 static void
6085 make_param_constraints (varinfo_t vi)
6087 for (; vi; vi = vi_next (vi))
6089 if (vi->only_restrict_pointers)
6091 else if (vi->may_have_pointers)
6092 make_constraint_from (vi, nonlocal_id);
6094 if (vi->is_full_var)
6095 break;
6099 /* Create varinfo structures for all of the variables in the
6100 function for intraprocedural mode. */
6102 static void
6103 intra_create_variable_infos (struct function *fn)
6105 tree t;
6106 bitmap handled_struct_type = NULL;
6108 /* For each incoming pointer argument arg, create the constraint ARG
6109 = NONLOCAL or a dummy variable if it is a restrict qualified
6110 passed-by-reference argument. */
6111 for (t = DECL_ARGUMENTS (fn->decl); t; t = DECL_CHAIN (t))
6113 if (handled_struct_type == NULL)
6114 handled_struct_type = BITMAP_ALLOC (NULL);
6116 varinfo_t p
6117 = create_variable_info_for_1 (t, alias_get_name (t), false, true,
6118 handled_struct_type);
6119 insert_vi_for_tree (t, p);
6121 make_param_constraints (p);
6124 if (handled_struct_type != NULL)
6125 BITMAP_FREE (handled_struct_type);
6127 /* Add a constraint for a result decl that is passed by reference. */
6128 if (DECL_RESULT (fn->decl)
6129 && DECL_BY_REFERENCE (DECL_RESULT (fn->decl)))
6131 varinfo_t p, result_vi = get_vi_for_tree (DECL_RESULT (fn->decl));
6133 for (p = result_vi; p; p = vi_next (p))
6134 make_constraint_from (p, nonlocal_id);
6137 /* Add a constraint for the incoming static chain parameter. */
6138 if (fn->static_chain_decl != NULL_TREE)
6140 varinfo_t p, chain_vi = get_vi_for_tree (fn->static_chain_decl);
6142 for (p = chain_vi; p; p = vi_next (p))
6143 make_constraint_from (p, nonlocal_id);
6147 /* Structure used to put solution bitmaps in a hashtable so they can
6148 be shared among variables with the same points-to set. */
6150 typedef struct shared_bitmap_info
6152 bitmap pt_vars;
6153 hashval_t hashcode;
6154 } *shared_bitmap_info_t;
6155 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
6157 /* Shared_bitmap hashtable helpers. */
6159 struct shared_bitmap_hasher : free_ptr_hash <shared_bitmap_info>
6161 static inline hashval_t hash (const shared_bitmap_info *);
6162 static inline bool equal (const shared_bitmap_info *,
6163 const shared_bitmap_info *);
6166 /* Hash function for a shared_bitmap_info_t */
6168 inline hashval_t
6169 shared_bitmap_hasher::hash (const shared_bitmap_info *bi)
6171 return bi->hashcode;
6174 /* Equality function for two shared_bitmap_info_t's. */
6176 inline bool
6177 shared_bitmap_hasher::equal (const shared_bitmap_info *sbi1,
6178 const shared_bitmap_info *sbi2)
6180 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
6183 /* Shared_bitmap hashtable. */
6185 static hash_table<shared_bitmap_hasher> *shared_bitmap_table;
6187 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
6188 existing instance if there is one, NULL otherwise. */
6190 static bitmap
6191 shared_bitmap_lookup (bitmap pt_vars)
6193 shared_bitmap_info **slot;
6194 struct shared_bitmap_info sbi;
6196 sbi.pt_vars = pt_vars;
6197 sbi.hashcode = bitmap_hash (pt_vars);
6199 slot = shared_bitmap_table->find_slot (&sbi, NO_INSERT);
6200 if (!slot)
6201 return NULL;
6202 else
6203 return (*slot)->pt_vars;
6207 /* Add a bitmap to the shared bitmap hashtable. */
6209 static void
6210 shared_bitmap_add (bitmap pt_vars)
6212 shared_bitmap_info **slot;
6213 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
6215 sbi->pt_vars = pt_vars;
6216 sbi->hashcode = bitmap_hash (pt_vars);
6218 slot = shared_bitmap_table->find_slot (sbi, INSERT);
6219 gcc_assert (!*slot);
6220 *slot = sbi;
6224 /* Set bits in INTO corresponding to the variable uids in solution set FROM. */
6226 static void
6227 set_uids_in_ptset (bitmap into, bitmap from, struct pt_solution *pt,
6228 tree fndecl)
6230 unsigned int i;
6231 bitmap_iterator bi;
6232 varinfo_t escaped_vi = get_varinfo (find (escaped_id));
6233 bool everything_escaped
6234 = escaped_vi->solution && bitmap_bit_p (escaped_vi->solution, anything_id);
6236 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
6238 varinfo_t vi = get_varinfo (i);
6240 /* The only artificial variables that are allowed in a may-alias
6241 set are heap variables. */
6242 if (vi->is_artificial_var && !vi->is_heap_var)
6243 continue;
6245 if (everything_escaped
6246 || (escaped_vi->solution
6247 && bitmap_bit_p (escaped_vi->solution, i)))
6249 pt->vars_contains_escaped = true;
6250 pt->vars_contains_escaped_heap = vi->is_heap_var;
6253 if (TREE_CODE (vi->decl) == VAR_DECL
6254 || TREE_CODE (vi->decl) == PARM_DECL
6255 || TREE_CODE (vi->decl) == RESULT_DECL)
6257 /* If we are in IPA mode we will not recompute points-to
6258 sets after inlining so make sure they stay valid. */
6259 if (in_ipa_mode
6260 && !DECL_PT_UID_SET_P (vi->decl))
6261 SET_DECL_PT_UID (vi->decl, DECL_UID (vi->decl));
6263 /* Add the decl to the points-to set. Note that the points-to
6264 set contains global variables. */
6265 bitmap_set_bit (into, DECL_PT_UID (vi->decl));
6266 if (vi->is_global_var
6267 /* In IPA mode the escaped_heap trick doesn't work as
6268 ESCAPED is escaped from the unit but
6269 pt_solution_includes_global needs to answer true for
6270 all variables not automatic within a function.
6271 For the same reason is_global_var is not the
6272 correct flag to track - local variables from other
6273 functions also need to be considered global.
6274 Conveniently all HEAP vars are not put in function
6275 scope. */
6276 || (in_ipa_mode
6277 && fndecl
6278 && ! auto_var_in_fn_p (vi->decl, fndecl)))
6279 pt->vars_contains_nonlocal = true;
6282 else if (TREE_CODE (vi->decl) == FUNCTION_DECL
6283 || TREE_CODE (vi->decl) == LABEL_DECL)
6285 /* Nothing should read/write from/to code so we can
6286 save bits by not including them in the points-to bitmaps.
6287 Still mark the points-to set as containing global memory
6288 to make code-patching possible - see PR70128. */
6289 pt->vars_contains_nonlocal = true;
6295 /* Compute the points-to solution *PT for the variable VI. */
6297 static struct pt_solution
6298 find_what_var_points_to (tree fndecl, varinfo_t orig_vi)
6300 unsigned int i;
6301 bitmap_iterator bi;
6302 bitmap finished_solution;
6303 bitmap result;
6304 varinfo_t vi;
6305 struct pt_solution *pt;
6307 /* This variable may have been collapsed, let's get the real
6308 variable. */
6309 vi = get_varinfo (find (orig_vi->id));
6311 /* See if we have already computed the solution and return it. */
6312 pt_solution **slot = &final_solutions->get_or_insert (vi);
6313 if (*slot != NULL)
6314 return **slot;
6316 *slot = pt = XOBNEW (&final_solutions_obstack, struct pt_solution);
6317 memset (pt, 0, sizeof (struct pt_solution));
6319 /* Translate artificial variables into SSA_NAME_PTR_INFO
6320 attributes. */
6321 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
6323 varinfo_t vi = get_varinfo (i);
6325 if (vi->is_artificial_var)
6327 if (vi->id == nothing_id)
6328 pt->null = 1;
6329 else if (vi->id == escaped_id)
6331 if (in_ipa_mode)
6332 pt->ipa_escaped = 1;
6333 else
6334 pt->escaped = 1;
6335 /* Expand some special vars of ESCAPED in-place here. */
6336 varinfo_t evi = get_varinfo (find (escaped_id));
6337 if (bitmap_bit_p (evi->solution, nonlocal_id))
6338 pt->nonlocal = 1;
6340 else if (vi->id == nonlocal_id)
6341 pt->nonlocal = 1;
6342 else if (vi->is_heap_var)
6343 /* We represent heapvars in the points-to set properly. */
6345 else if (vi->id == string_id)
6346 /* Nobody cares - STRING_CSTs are read-only entities. */
6348 else if (vi->id == anything_id
6349 || vi->id == integer_id)
6350 pt->anything = 1;
6354 /* Instead of doing extra work, simply do not create
6355 elaborate points-to information for pt_anything pointers. */
6356 if (pt->anything)
6357 return *pt;
6359 /* Share the final set of variables when possible. */
6360 finished_solution = BITMAP_GGC_ALLOC ();
6361 stats.points_to_sets_created++;
6363 set_uids_in_ptset (finished_solution, vi->solution, pt, fndecl);
6364 result = shared_bitmap_lookup (finished_solution);
6365 if (!result)
6367 shared_bitmap_add (finished_solution);
6368 pt->vars = finished_solution;
6370 else
6372 pt->vars = result;
6373 bitmap_clear (finished_solution);
6376 return *pt;
6379 /* Given a pointer variable P, fill in its points-to set. */
6381 static void
6382 find_what_p_points_to (tree fndecl, tree p)
6384 struct ptr_info_def *pi;
6385 tree lookup_p = p;
6386 varinfo_t vi;
6388 /* For parameters, get at the points-to set for the actual parm
6389 decl. */
6390 if (TREE_CODE (p) == SSA_NAME
6391 && SSA_NAME_IS_DEFAULT_DEF (p)
6392 && (TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
6393 || TREE_CODE (SSA_NAME_VAR (p)) == RESULT_DECL))
6394 lookup_p = SSA_NAME_VAR (p);
6396 vi = lookup_vi_for_tree (lookup_p);
6397 if (!vi)
6398 return;
6400 pi = get_ptr_info (p);
6401 pi->pt = find_what_var_points_to (fndecl, vi);
6405 /* Query statistics for points-to solutions. */
6407 static struct {
6408 unsigned HOST_WIDE_INT pt_solution_includes_may_alias;
6409 unsigned HOST_WIDE_INT pt_solution_includes_no_alias;
6410 unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias;
6411 unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias;
6412 } pta_stats;
6414 void
6415 dump_pta_stats (FILE *s)
6417 fprintf (s, "\nPTA query stats:\n");
6418 fprintf (s, " pt_solution_includes: "
6419 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6420 HOST_WIDE_INT_PRINT_DEC" queries\n",
6421 pta_stats.pt_solution_includes_no_alias,
6422 pta_stats.pt_solution_includes_no_alias
6423 + pta_stats.pt_solution_includes_may_alias);
6424 fprintf (s, " pt_solutions_intersect: "
6425 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6426 HOST_WIDE_INT_PRINT_DEC" queries\n",
6427 pta_stats.pt_solutions_intersect_no_alias,
6428 pta_stats.pt_solutions_intersect_no_alias
6429 + pta_stats.pt_solutions_intersect_may_alias);
6433 /* Reset the points-to solution *PT to a conservative default
6434 (point to anything). */
6436 void
6437 pt_solution_reset (struct pt_solution *pt)
6439 memset (pt, 0, sizeof (struct pt_solution));
6440 pt->anything = true;
6443 /* Set the points-to solution *PT to point only to the variables
6444 in VARS. VARS_CONTAINS_GLOBAL specifies whether that contains
6445 global variables and VARS_CONTAINS_RESTRICT specifies whether
6446 it contains restrict tag variables. */
6448 void
6449 pt_solution_set (struct pt_solution *pt, bitmap vars,
6450 bool vars_contains_nonlocal)
6452 memset (pt, 0, sizeof (struct pt_solution));
6453 pt->vars = vars;
6454 pt->vars_contains_nonlocal = vars_contains_nonlocal;
6455 pt->vars_contains_escaped
6456 = (cfun->gimple_df->escaped.anything
6457 || bitmap_intersect_p (cfun->gimple_df->escaped.vars, vars));
6460 /* Set the points-to solution *PT to point only to the variable VAR. */
6462 void
6463 pt_solution_set_var (struct pt_solution *pt, tree var)
6465 memset (pt, 0, sizeof (struct pt_solution));
6466 pt->vars = BITMAP_GGC_ALLOC ();
6467 bitmap_set_bit (pt->vars, DECL_PT_UID (var));
6468 pt->vars_contains_nonlocal = is_global_var (var);
6469 pt->vars_contains_escaped
6470 = (cfun->gimple_df->escaped.anything
6471 || bitmap_bit_p (cfun->gimple_df->escaped.vars, DECL_PT_UID (var)));
6474 /* Computes the union of the points-to solutions *DEST and *SRC and
6475 stores the result in *DEST. This changes the points-to bitmap
6476 of *DEST and thus may not be used if that might be shared.
6477 The points-to bitmap of *SRC and *DEST will not be shared after
6478 this function if they were not before. */
6480 static void
6481 pt_solution_ior_into (struct pt_solution *dest, struct pt_solution *src)
6483 dest->anything |= src->anything;
6484 if (dest->anything)
6486 pt_solution_reset (dest);
6487 return;
6490 dest->nonlocal |= src->nonlocal;
6491 dest->escaped |= src->escaped;
6492 dest->ipa_escaped |= src->ipa_escaped;
6493 dest->null |= src->null;
6494 dest->vars_contains_nonlocal |= src->vars_contains_nonlocal;
6495 dest->vars_contains_escaped |= src->vars_contains_escaped;
6496 dest->vars_contains_escaped_heap |= src->vars_contains_escaped_heap;
6497 if (!src->vars)
6498 return;
6500 if (!dest->vars)
6501 dest->vars = BITMAP_GGC_ALLOC ();
6502 bitmap_ior_into (dest->vars, src->vars);
6505 /* Return true if the points-to solution *PT is empty. */
6507 bool
6508 pt_solution_empty_p (struct pt_solution *pt)
6510 if (pt->anything
6511 || pt->nonlocal)
6512 return false;
6514 if (pt->vars
6515 && !bitmap_empty_p (pt->vars))
6516 return false;
6518 /* If the solution includes ESCAPED, check if that is empty. */
6519 if (pt->escaped
6520 && !pt_solution_empty_p (&cfun->gimple_df->escaped))
6521 return false;
6523 /* If the solution includes ESCAPED, check if that is empty. */
6524 if (pt->ipa_escaped
6525 && !pt_solution_empty_p (&ipa_escaped_pt))
6526 return false;
6528 return true;
6531 /* Return true if the points-to solution *PT only point to a single var, and
6532 return the var uid in *UID. */
6534 bool
6535 pt_solution_singleton_p (struct pt_solution *pt, unsigned *uid)
6537 if (pt->anything || pt->nonlocal || pt->escaped || pt->ipa_escaped
6538 || pt->null || pt->vars == NULL
6539 || !bitmap_single_bit_set_p (pt->vars))
6540 return false;
6542 *uid = bitmap_first_set_bit (pt->vars);
6543 return true;
6546 /* Return true if the points-to solution *PT includes global memory. */
6548 bool
6549 pt_solution_includes_global (struct pt_solution *pt)
6551 if (pt->anything
6552 || pt->nonlocal
6553 || pt->vars_contains_nonlocal
6554 /* The following is a hack to make the malloc escape hack work.
6555 In reality we'd need different sets for escaped-through-return
6556 and escaped-to-callees and passes would need to be updated. */
6557 || pt->vars_contains_escaped_heap)
6558 return true;
6560 /* 'escaped' is also a placeholder so we have to look into it. */
6561 if (pt->escaped)
6562 return pt_solution_includes_global (&cfun->gimple_df->escaped);
6564 if (pt->ipa_escaped)
6565 return pt_solution_includes_global (&ipa_escaped_pt);
6567 return false;
6570 /* Return true if the points-to solution *PT includes the variable
6571 declaration DECL. */
6573 static bool
6574 pt_solution_includes_1 (struct pt_solution *pt, const_tree decl)
6576 if (pt->anything)
6577 return true;
6579 if (pt->nonlocal
6580 && is_global_var (decl))
6581 return true;
6583 if (pt->vars
6584 && bitmap_bit_p (pt->vars, DECL_PT_UID (decl)))
6585 return true;
6587 /* If the solution includes ESCAPED, check it. */
6588 if (pt->escaped
6589 && pt_solution_includes_1 (&cfun->gimple_df->escaped, decl))
6590 return true;
6592 /* If the solution includes ESCAPED, check it. */
6593 if (pt->ipa_escaped
6594 && pt_solution_includes_1 (&ipa_escaped_pt, decl))
6595 return true;
6597 return false;
6600 bool
6601 pt_solution_includes (struct pt_solution *pt, const_tree decl)
6603 bool res = pt_solution_includes_1 (pt, decl);
6604 if (res)
6605 ++pta_stats.pt_solution_includes_may_alias;
6606 else
6607 ++pta_stats.pt_solution_includes_no_alias;
6608 return res;
6611 /* Return true if both points-to solutions PT1 and PT2 have a non-empty
6612 intersection. */
6614 static bool
6615 pt_solutions_intersect_1 (struct pt_solution *pt1, struct pt_solution *pt2)
6617 if (pt1->anything || pt2->anything)
6618 return true;
6620 /* If either points to unknown global memory and the other points to
6621 any global memory they alias. */
6622 if ((pt1->nonlocal
6623 && (pt2->nonlocal
6624 || pt2->vars_contains_nonlocal))
6625 || (pt2->nonlocal
6626 && pt1->vars_contains_nonlocal))
6627 return true;
6629 /* If either points to all escaped memory and the other points to
6630 any escaped memory they alias. */
6631 if ((pt1->escaped
6632 && (pt2->escaped
6633 || pt2->vars_contains_escaped))
6634 || (pt2->escaped
6635 && pt1->vars_contains_escaped))
6636 return true;
6638 /* Check the escaped solution if required.
6639 ??? Do we need to check the local against the IPA escaped sets? */
6640 if ((pt1->ipa_escaped || pt2->ipa_escaped)
6641 && !pt_solution_empty_p (&ipa_escaped_pt))
6643 /* If both point to escaped memory and that solution
6644 is not empty they alias. */
6645 if (pt1->ipa_escaped && pt2->ipa_escaped)
6646 return true;
6648 /* If either points to escaped memory see if the escaped solution
6649 intersects with the other. */
6650 if ((pt1->ipa_escaped
6651 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt2))
6652 || (pt2->ipa_escaped
6653 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt1)))
6654 return true;
6657 /* Now both pointers alias if their points-to solution intersects. */
6658 return (pt1->vars
6659 && pt2->vars
6660 && bitmap_intersect_p (pt1->vars, pt2->vars));
6663 bool
6664 pt_solutions_intersect (struct pt_solution *pt1, struct pt_solution *pt2)
6666 bool res = pt_solutions_intersect_1 (pt1, pt2);
6667 if (res)
6668 ++pta_stats.pt_solutions_intersect_may_alias;
6669 else
6670 ++pta_stats.pt_solutions_intersect_no_alias;
6671 return res;
6675 /* Dump points-to information to OUTFILE. */
6677 static void
6678 dump_sa_points_to_info (FILE *outfile)
6680 unsigned int i;
6682 fprintf (outfile, "\nPoints-to sets\n\n");
6684 if (dump_flags & TDF_STATS)
6686 fprintf (outfile, "Stats:\n");
6687 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
6688 fprintf (outfile, "Non-pointer vars: %d\n",
6689 stats.nonpointer_vars);
6690 fprintf (outfile, "Statically unified vars: %d\n",
6691 stats.unified_vars_static);
6692 fprintf (outfile, "Dynamically unified vars: %d\n",
6693 stats.unified_vars_dynamic);
6694 fprintf (outfile, "Iterations: %d\n", stats.iterations);
6695 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
6696 fprintf (outfile, "Number of implicit edges: %d\n",
6697 stats.num_implicit_edges);
6700 for (i = 1; i < varmap.length (); i++)
6702 varinfo_t vi = get_varinfo (i);
6703 if (!vi->may_have_pointers)
6704 continue;
6705 dump_solution_for_var (outfile, i);
6710 /* Debug points-to information to stderr. */
6712 DEBUG_FUNCTION void
6713 debug_sa_points_to_info (void)
6715 dump_sa_points_to_info (stderr);
6719 /* Initialize the always-existing constraint variables for NULL
6720 ANYTHING, READONLY, and INTEGER */
6722 static void
6723 init_base_vars (void)
6725 struct constraint_expr lhs, rhs;
6726 varinfo_t var_anything;
6727 varinfo_t var_nothing;
6728 varinfo_t var_string;
6729 varinfo_t var_escaped;
6730 varinfo_t var_nonlocal;
6731 varinfo_t var_storedanything;
6732 varinfo_t var_integer;
6734 /* Variable ID zero is reserved and should be NULL. */
6735 varmap.safe_push (NULL);
6737 /* Create the NULL variable, used to represent that a variable points
6738 to NULL. */
6739 var_nothing = new_var_info (NULL_TREE, "NULL", false);
6740 gcc_assert (var_nothing->id == nothing_id);
6741 var_nothing->is_artificial_var = 1;
6742 var_nothing->offset = 0;
6743 var_nothing->size = ~0;
6744 var_nothing->fullsize = ~0;
6745 var_nothing->is_special_var = 1;
6746 var_nothing->may_have_pointers = 0;
6747 var_nothing->is_global_var = 0;
6749 /* Create the ANYTHING variable, used to represent that a variable
6750 points to some unknown piece of memory. */
6751 var_anything = new_var_info (NULL_TREE, "ANYTHING", false);
6752 gcc_assert (var_anything->id == anything_id);
6753 var_anything->is_artificial_var = 1;
6754 var_anything->size = ~0;
6755 var_anything->offset = 0;
6756 var_anything->fullsize = ~0;
6757 var_anything->is_special_var = 1;
6759 /* Anything points to anything. This makes deref constraints just
6760 work in the presence of linked list and other p = *p type loops,
6761 by saying that *ANYTHING = ANYTHING. */
6762 lhs.type = SCALAR;
6763 lhs.var = anything_id;
6764 lhs.offset = 0;
6765 rhs.type = ADDRESSOF;
6766 rhs.var = anything_id;
6767 rhs.offset = 0;
6769 /* This specifically does not use process_constraint because
6770 process_constraint ignores all anything = anything constraints, since all
6771 but this one are redundant. */
6772 constraints.safe_push (new_constraint (lhs, rhs));
6774 /* Create the STRING variable, used to represent that a variable
6775 points to a string literal. String literals don't contain
6776 pointers so STRING doesn't point to anything. */
6777 var_string = new_var_info (NULL_TREE, "STRING", false);
6778 gcc_assert (var_string->id == string_id);
6779 var_string->is_artificial_var = 1;
6780 var_string->offset = 0;
6781 var_string->size = ~0;
6782 var_string->fullsize = ~0;
6783 var_string->is_special_var = 1;
6784 var_string->may_have_pointers = 0;
6786 /* Create the ESCAPED variable, used to represent the set of escaped
6787 memory. */
6788 var_escaped = new_var_info (NULL_TREE, "ESCAPED", false);
6789 gcc_assert (var_escaped->id == escaped_id);
6790 var_escaped->is_artificial_var = 1;
6791 var_escaped->offset = 0;
6792 var_escaped->size = ~0;
6793 var_escaped->fullsize = ~0;
6794 var_escaped->is_special_var = 0;
6796 /* Create the NONLOCAL variable, used to represent the set of nonlocal
6797 memory. */
6798 var_nonlocal = new_var_info (NULL_TREE, "NONLOCAL", false);
6799 gcc_assert (var_nonlocal->id == nonlocal_id);
6800 var_nonlocal->is_artificial_var = 1;
6801 var_nonlocal->offset = 0;
6802 var_nonlocal->size = ~0;
6803 var_nonlocal->fullsize = ~0;
6804 var_nonlocal->is_special_var = 1;
6806 /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
6807 lhs.type = SCALAR;
6808 lhs.var = escaped_id;
6809 lhs.offset = 0;
6810 rhs.type = DEREF;
6811 rhs.var = escaped_id;
6812 rhs.offset = 0;
6813 process_constraint (new_constraint (lhs, rhs));
6815 /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the
6816 whole variable escapes. */
6817 lhs.type = SCALAR;
6818 lhs.var = escaped_id;
6819 lhs.offset = 0;
6820 rhs.type = SCALAR;
6821 rhs.var = escaped_id;
6822 rhs.offset = UNKNOWN_OFFSET;
6823 process_constraint (new_constraint (lhs, rhs));
6825 /* *ESCAPED = NONLOCAL. This is true because we have to assume
6826 everything pointed to by escaped points to what global memory can
6827 point to. */
6828 lhs.type = DEREF;
6829 lhs.var = escaped_id;
6830 lhs.offset = 0;
6831 rhs.type = SCALAR;
6832 rhs.var = nonlocal_id;
6833 rhs.offset = 0;
6834 process_constraint (new_constraint (lhs, rhs));
6836 /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED. This is true because
6837 global memory may point to global memory and escaped memory. */
6838 lhs.type = SCALAR;
6839 lhs.var = nonlocal_id;
6840 lhs.offset = 0;
6841 rhs.type = ADDRESSOF;
6842 rhs.var = nonlocal_id;
6843 rhs.offset = 0;
6844 process_constraint (new_constraint (lhs, rhs));
6845 rhs.type = ADDRESSOF;
6846 rhs.var = escaped_id;
6847 rhs.offset = 0;
6848 process_constraint (new_constraint (lhs, rhs));
6850 /* Create the STOREDANYTHING variable, used to represent the set of
6851 variables stored to *ANYTHING. */
6852 var_storedanything = new_var_info (NULL_TREE, "STOREDANYTHING", false);
6853 gcc_assert (var_storedanything->id == storedanything_id);
6854 var_storedanything->is_artificial_var = 1;
6855 var_storedanything->offset = 0;
6856 var_storedanything->size = ~0;
6857 var_storedanything->fullsize = ~0;
6858 var_storedanything->is_special_var = 0;
6860 /* Create the INTEGER variable, used to represent that a variable points
6861 to what an INTEGER "points to". */
6862 var_integer = new_var_info (NULL_TREE, "INTEGER", false);
6863 gcc_assert (var_integer->id == integer_id);
6864 var_integer->is_artificial_var = 1;
6865 var_integer->size = ~0;
6866 var_integer->fullsize = ~0;
6867 var_integer->offset = 0;
6868 var_integer->is_special_var = 1;
6870 /* INTEGER = ANYTHING, because we don't know where a dereference of
6871 a random integer will point to. */
6872 lhs.type = SCALAR;
6873 lhs.var = integer_id;
6874 lhs.offset = 0;
6875 rhs.type = ADDRESSOF;
6876 rhs.var = anything_id;
6877 rhs.offset = 0;
6878 process_constraint (new_constraint (lhs, rhs));
6881 /* Initialize things necessary to perform PTA */
6883 static void
6884 init_alias_vars (void)
6886 use_field_sensitive = (MAX_FIELDS_FOR_FIELD_SENSITIVE > 1);
6888 bitmap_obstack_initialize (&pta_obstack);
6889 bitmap_obstack_initialize (&oldpta_obstack);
6890 bitmap_obstack_initialize (&predbitmap_obstack);
6892 constraints.create (8);
6893 varmap.create (8);
6894 vi_for_tree = new hash_map<tree, varinfo_t>;
6895 call_stmt_vars = new hash_map<gimple *, varinfo_t>;
6897 memset (&stats, 0, sizeof (stats));
6898 shared_bitmap_table = new hash_table<shared_bitmap_hasher> (511);
6899 init_base_vars ();
6901 gcc_obstack_init (&fake_var_decl_obstack);
6903 final_solutions = new hash_map<varinfo_t, pt_solution *>;
6904 gcc_obstack_init (&final_solutions_obstack);
6907 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
6908 predecessor edges. */
6910 static void
6911 remove_preds_and_fake_succs (constraint_graph_t graph)
6913 unsigned int i;
6915 /* Clear the implicit ref and address nodes from the successor
6916 lists. */
6917 for (i = 1; i < FIRST_REF_NODE; i++)
6919 if (graph->succs[i])
6920 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
6921 FIRST_REF_NODE * 2);
6924 /* Free the successor list for the non-ref nodes. */
6925 for (i = FIRST_REF_NODE + 1; i < graph->size; i++)
6927 if (graph->succs[i])
6928 BITMAP_FREE (graph->succs[i]);
6931 /* Now reallocate the size of the successor list as, and blow away
6932 the predecessor bitmaps. */
6933 graph->size = varmap.length ();
6934 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
6936 free (graph->implicit_preds);
6937 graph->implicit_preds = NULL;
6938 free (graph->preds);
6939 graph->preds = NULL;
6940 bitmap_obstack_release (&predbitmap_obstack);
6943 /* Solve the constraint set. */
6945 static void
6946 solve_constraints (void)
6948 struct scc_info *si;
6950 if (dump_file)
6951 fprintf (dump_file,
6952 "\nCollapsing static cycles and doing variable "
6953 "substitution\n");
6955 init_graph (varmap.length () * 2);
6957 if (dump_file)
6958 fprintf (dump_file, "Building predecessor graph\n");
6959 build_pred_graph ();
6961 if (dump_file)
6962 fprintf (dump_file, "Detecting pointer and location "
6963 "equivalences\n");
6964 si = perform_var_substitution (graph);
6966 if (dump_file)
6967 fprintf (dump_file, "Rewriting constraints and unifying "
6968 "variables\n");
6969 rewrite_constraints (graph, si);
6971 build_succ_graph ();
6973 free_var_substitution_info (si);
6975 /* Attach complex constraints to graph nodes. */
6976 move_complex_constraints (graph);
6978 if (dump_file)
6979 fprintf (dump_file, "Uniting pointer but not location equivalent "
6980 "variables\n");
6981 unite_pointer_equivalences (graph);
6983 if (dump_file)
6984 fprintf (dump_file, "Finding indirect cycles\n");
6985 find_indirect_cycles (graph);
6987 /* Implicit nodes and predecessors are no longer necessary at this
6988 point. */
6989 remove_preds_and_fake_succs (graph);
6991 if (dump_file && (dump_flags & TDF_GRAPH))
6993 fprintf (dump_file, "\n\n// The constraint graph before solve-graph "
6994 "in dot format:\n");
6995 dump_constraint_graph (dump_file);
6996 fprintf (dump_file, "\n\n");
6999 if (dump_file)
7000 fprintf (dump_file, "Solving graph\n");
7002 solve_graph (graph);
7004 if (dump_file && (dump_flags & TDF_GRAPH))
7006 fprintf (dump_file, "\n\n// The constraint graph after solve-graph "
7007 "in dot format:\n");
7008 dump_constraint_graph (dump_file);
7009 fprintf (dump_file, "\n\n");
7012 if (dump_file)
7013 dump_sa_points_to_info (dump_file);
7016 /* Create points-to sets for the current function. See the comments
7017 at the start of the file for an algorithmic overview. */
7019 static void
7020 compute_points_to_sets (void)
7022 basic_block bb;
7023 unsigned i;
7024 varinfo_t vi;
7026 timevar_push (TV_TREE_PTA);
7028 init_alias_vars ();
7030 intra_create_variable_infos (cfun);
7032 /* Now walk all statements and build the constraint set. */
7033 FOR_EACH_BB_FN (bb, cfun)
7035 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
7036 gsi_next (&gsi))
7038 gphi *phi = gsi.phi ();
7040 if (! virtual_operand_p (gimple_phi_result (phi)))
7041 find_func_aliases (cfun, phi);
7044 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
7045 gsi_next (&gsi))
7047 gimple *stmt = gsi_stmt (gsi);
7049 find_func_aliases (cfun, stmt);
7053 if (dump_file)
7055 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
7056 dump_constraints (dump_file, 0);
7059 /* From the constraints compute the points-to sets. */
7060 solve_constraints ();
7062 /* Compute the points-to set for ESCAPED used for call-clobber analysis. */
7063 cfun->gimple_df->escaped = find_what_var_points_to (cfun->decl,
7064 get_varinfo (escaped_id));
7066 /* Make sure the ESCAPED solution (which is used as placeholder in
7067 other solutions) does not reference itself. This simplifies
7068 points-to solution queries. */
7069 cfun->gimple_df->escaped.escaped = 0;
7071 /* Compute the points-to sets for pointer SSA_NAMEs. */
7072 for (i = 0; i < num_ssa_names; ++i)
7074 tree ptr = ssa_name (i);
7075 if (ptr
7076 && POINTER_TYPE_P (TREE_TYPE (ptr)))
7077 find_what_p_points_to (cfun->decl, ptr);
7080 /* Compute the call-used/clobbered sets. */
7081 FOR_EACH_BB_FN (bb, cfun)
7083 gimple_stmt_iterator gsi;
7085 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
7087 gcall *stmt;
7088 struct pt_solution *pt;
7090 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
7091 if (!stmt)
7092 continue;
7094 pt = gimple_call_use_set (stmt);
7095 if (gimple_call_flags (stmt) & ECF_CONST)
7096 memset (pt, 0, sizeof (struct pt_solution));
7097 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
7099 *pt = find_what_var_points_to (cfun->decl, vi);
7100 /* Escaped (and thus nonlocal) variables are always
7101 implicitly used by calls. */
7102 /* ??? ESCAPED can be empty even though NONLOCAL
7103 always escaped. */
7104 pt->nonlocal = 1;
7105 pt->escaped = 1;
7107 else
7109 /* If there is nothing special about this call then
7110 we have made everything that is used also escape. */
7111 *pt = cfun->gimple_df->escaped;
7112 pt->nonlocal = 1;
7115 pt = gimple_call_clobber_set (stmt);
7116 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
7117 memset (pt, 0, sizeof (struct pt_solution));
7118 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
7120 *pt = find_what_var_points_to (cfun->decl, vi);
7121 /* Escaped (and thus nonlocal) variables are always
7122 implicitly clobbered by calls. */
7123 /* ??? ESCAPED can be empty even though NONLOCAL
7124 always escaped. */
7125 pt->nonlocal = 1;
7126 pt->escaped = 1;
7128 else
7130 /* If there is nothing special about this call then
7131 we have made everything that is used also escape. */
7132 *pt = cfun->gimple_df->escaped;
7133 pt->nonlocal = 1;
7138 timevar_pop (TV_TREE_PTA);
7142 /* Delete created points-to sets. */
7144 static void
7145 delete_points_to_sets (void)
7147 unsigned int i;
7149 delete shared_bitmap_table;
7150 shared_bitmap_table = NULL;
7151 if (dump_file && (dump_flags & TDF_STATS))
7152 fprintf (dump_file, "Points to sets created:%d\n",
7153 stats.points_to_sets_created);
7155 delete vi_for_tree;
7156 delete call_stmt_vars;
7157 bitmap_obstack_release (&pta_obstack);
7158 constraints.release ();
7160 for (i = 0; i < graph->size; i++)
7161 graph->complex[i].release ();
7162 free (graph->complex);
7164 free (graph->rep);
7165 free (graph->succs);
7166 free (graph->pe);
7167 free (graph->pe_rep);
7168 free (graph->indirect_cycles);
7169 free (graph);
7171 varmap.release ();
7172 variable_info_pool.release ();
7173 constraint_pool.release ();
7175 obstack_free (&fake_var_decl_obstack, NULL);
7177 delete final_solutions;
7178 obstack_free (&final_solutions_obstack, NULL);
7181 struct vls_data
7183 unsigned short clique;
7184 bitmap rvars;
7187 /* Mark "other" loads and stores as belonging to CLIQUE and with
7188 base zero. */
7190 static bool
7191 visit_loadstore (gimple *, tree base, tree ref, void *data)
7193 unsigned short clique = ((vls_data *) data)->clique;
7194 bitmap rvars = ((vls_data *) data)->rvars;
7195 if (TREE_CODE (base) == MEM_REF
7196 || TREE_CODE (base) == TARGET_MEM_REF)
7198 tree ptr = TREE_OPERAND (base, 0);
7199 if (TREE_CODE (ptr) == SSA_NAME
7200 && ! SSA_NAME_IS_DEFAULT_DEF (ptr))
7202 /* We need to make sure 'ptr' doesn't include any of
7203 the restrict tags we added bases for in its points-to set. */
7204 varinfo_t vi = lookup_vi_for_tree (ptr);
7205 if (! vi)
7206 return false;
7208 vi = get_varinfo (find (vi->id));
7209 if (bitmap_intersect_p (rvars, vi->solution))
7210 return false;
7213 /* Do not overwrite existing cliques (that includes clique, base
7214 pairs we just set). */
7215 if (MR_DEPENDENCE_CLIQUE (base) == 0)
7217 MR_DEPENDENCE_CLIQUE (base) = clique;
7218 MR_DEPENDENCE_BASE (base) = 0;
7222 /* For plain decl accesses see whether they are accesses to globals
7223 and rewrite them to MEM_REFs with { clique, 0 }. */
7224 if (TREE_CODE (base) == VAR_DECL
7225 && is_global_var (base)
7226 /* ??? We can't rewrite a plain decl with the walk_stmt_load_store
7227 ops callback. */
7228 && base != ref)
7230 tree *basep = &ref;
7231 while (handled_component_p (*basep))
7232 basep = &TREE_OPERAND (*basep, 0);
7233 gcc_assert (TREE_CODE (*basep) == VAR_DECL);
7234 tree ptr = build_fold_addr_expr (*basep);
7235 tree zero = build_int_cst (TREE_TYPE (ptr), 0);
7236 *basep = build2 (MEM_REF, TREE_TYPE (*basep), ptr, zero);
7237 MR_DEPENDENCE_CLIQUE (*basep) = clique;
7238 MR_DEPENDENCE_BASE (*basep) = 0;
7241 return false;
7244 /* If REF is a MEM_REF then assign a clique, base pair to it, updating
7245 CLIQUE, *RESTRICT_VAR and LAST_RUID. Return whether dependence info
7246 was assigned to REF. */
7248 static bool
7249 maybe_set_dependence_info (tree ref, tree ptr,
7250 unsigned short &clique, varinfo_t restrict_var,
7251 unsigned short &last_ruid)
7253 while (handled_component_p (ref))
7254 ref = TREE_OPERAND (ref, 0);
7255 if ((TREE_CODE (ref) == MEM_REF
7256 || TREE_CODE (ref) == TARGET_MEM_REF)
7257 && TREE_OPERAND (ref, 0) == ptr)
7259 /* Do not overwrite existing cliques. This avoids overwriting dependence
7260 info inlined from a function with restrict parameters inlined
7261 into a function with restrict parameters. This usually means we
7262 prefer to be precise in innermost loops. */
7263 if (MR_DEPENDENCE_CLIQUE (ref) == 0)
7265 if (clique == 0)
7266 clique = ++cfun->last_clique;
7267 if (restrict_var->ruid == 0)
7268 restrict_var->ruid = ++last_ruid;
7269 MR_DEPENDENCE_CLIQUE (ref) = clique;
7270 MR_DEPENDENCE_BASE (ref) = restrict_var->ruid;
7271 return true;
7274 return false;
7277 /* Compute the set of independend memory references based on restrict
7278 tags and their conservative propagation to the points-to sets. */
7280 static void
7281 compute_dependence_clique (void)
7283 unsigned short clique = 0;
7284 unsigned short last_ruid = 0;
7285 bitmap rvars = BITMAP_ALLOC (NULL);
7286 for (unsigned i = 0; i < num_ssa_names; ++i)
7288 tree ptr = ssa_name (i);
7289 if (!ptr || !POINTER_TYPE_P (TREE_TYPE (ptr)))
7290 continue;
7292 /* Avoid all this when ptr is not dereferenced? */
7293 tree p = ptr;
7294 if (SSA_NAME_IS_DEFAULT_DEF (ptr)
7295 && (TREE_CODE (SSA_NAME_VAR (ptr)) == PARM_DECL
7296 || TREE_CODE (SSA_NAME_VAR (ptr)) == RESULT_DECL))
7297 p = SSA_NAME_VAR (ptr);
7298 varinfo_t vi = lookup_vi_for_tree (p);
7299 if (!vi)
7300 continue;
7301 vi = get_varinfo (find (vi->id));
7302 bitmap_iterator bi;
7303 unsigned j;
7304 varinfo_t restrict_var = NULL;
7305 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, j, bi)
7307 varinfo_t oi = get_varinfo (j);
7308 if (oi->is_restrict_var)
7310 if (restrict_var)
7312 if (dump_file && (dump_flags & TDF_DETAILS))
7314 fprintf (dump_file, "found restrict pointed-to "
7315 "for ");
7316 print_generic_expr (dump_file, ptr, 0);
7317 fprintf (dump_file, " but not exclusively\n");
7319 restrict_var = NULL;
7320 break;
7322 restrict_var = oi;
7324 /* NULL is the only other valid points-to entry. */
7325 else if (oi->id != nothing_id)
7327 restrict_var = NULL;
7328 break;
7331 /* Ok, found that ptr must(!) point to a single(!) restrict
7332 variable. */
7333 /* ??? PTA isn't really a proper propagation engine to compute
7334 this property.
7335 ??? We could handle merging of two restricts by unifying them. */
7336 if (restrict_var)
7338 /* Now look at possible dereferences of ptr. */
7339 imm_use_iterator ui;
7340 gimple *use_stmt;
7341 bool used = false;
7342 FOR_EACH_IMM_USE_STMT (use_stmt, ui, ptr)
7344 /* ??? Calls and asms. */
7345 if (!gimple_assign_single_p (use_stmt))
7346 continue;
7347 used |= maybe_set_dependence_info (gimple_assign_lhs (use_stmt),
7348 ptr, clique, restrict_var,
7349 last_ruid);
7350 used |= maybe_set_dependence_info (gimple_assign_rhs1 (use_stmt),
7351 ptr, clique, restrict_var,
7352 last_ruid);
7354 if (used)
7355 bitmap_set_bit (rvars, restrict_var->id);
7359 if (clique != 0)
7361 /* Assign the BASE id zero to all accesses not based on a restrict
7362 pointer. That way they get disambiguated against restrict
7363 accesses but not against each other. */
7364 /* ??? For restricts derived from globals (thus not incoming
7365 parameters) we can't restrict scoping properly thus the following
7366 is too aggressive there. For now we have excluded those globals from
7367 getting into the MR_DEPENDENCE machinery. */
7368 vls_data data = { clique, rvars };
7369 basic_block bb;
7370 FOR_EACH_BB_FN (bb, cfun)
7371 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
7372 !gsi_end_p (gsi); gsi_next (&gsi))
7374 gimple *stmt = gsi_stmt (gsi);
7375 walk_stmt_load_store_ops (stmt, &data,
7376 visit_loadstore, visit_loadstore);
7380 BITMAP_FREE (rvars);
7383 /* Compute points-to information for every SSA_NAME pointer in the
7384 current function and compute the transitive closure of escaped
7385 variables to re-initialize the call-clobber states of local variables. */
7387 unsigned int
7388 compute_may_aliases (void)
7390 if (cfun->gimple_df->ipa_pta)
7392 if (dump_file)
7394 fprintf (dump_file, "\nNot re-computing points-to information "
7395 "because IPA points-to information is available.\n\n");
7397 /* But still dump what we have remaining it. */
7398 dump_alias_info (dump_file);
7401 return 0;
7404 /* For each pointer P_i, determine the sets of variables that P_i may
7405 point-to. Compute the reachability set of escaped and call-used
7406 variables. */
7407 compute_points_to_sets ();
7409 /* Debugging dumps. */
7410 if (dump_file)
7411 dump_alias_info (dump_file);
7413 /* Compute restrict-based memory disambiguations. */
7414 compute_dependence_clique ();
7416 /* Deallocate memory used by aliasing data structures and the internal
7417 points-to solution. */
7418 delete_points_to_sets ();
7420 gcc_assert (!need_ssa_update_p (cfun));
7422 return 0;
7425 /* A dummy pass to cause points-to information to be computed via
7426 TODO_rebuild_alias. */
7428 namespace {
7430 const pass_data pass_data_build_alias =
7432 GIMPLE_PASS, /* type */
7433 "alias", /* name */
7434 OPTGROUP_NONE, /* optinfo_flags */
7435 TV_NONE, /* tv_id */
7436 ( PROP_cfg | PROP_ssa ), /* properties_required */
7437 0, /* properties_provided */
7438 0, /* properties_destroyed */
7439 0, /* todo_flags_start */
7440 TODO_rebuild_alias, /* todo_flags_finish */
7443 class pass_build_alias : public gimple_opt_pass
7445 public:
7446 pass_build_alias (gcc::context *ctxt)
7447 : gimple_opt_pass (pass_data_build_alias, ctxt)
7450 /* opt_pass methods: */
7451 virtual bool gate (function *) { return flag_tree_pta; }
7453 }; // class pass_build_alias
7455 } // anon namespace
7457 gimple_opt_pass *
7458 make_pass_build_alias (gcc::context *ctxt)
7460 return new pass_build_alias (ctxt);
7463 /* A dummy pass to cause points-to information to be computed via
7464 TODO_rebuild_alias. */
7466 namespace {
7468 const pass_data pass_data_build_ealias =
7470 GIMPLE_PASS, /* type */
7471 "ealias", /* name */
7472 OPTGROUP_NONE, /* optinfo_flags */
7473 TV_NONE, /* tv_id */
7474 ( PROP_cfg | PROP_ssa ), /* properties_required */
7475 0, /* properties_provided */
7476 0, /* properties_destroyed */
7477 0, /* todo_flags_start */
7478 TODO_rebuild_alias, /* todo_flags_finish */
7481 class pass_build_ealias : public gimple_opt_pass
7483 public:
7484 pass_build_ealias (gcc::context *ctxt)
7485 : gimple_opt_pass (pass_data_build_ealias, ctxt)
7488 /* opt_pass methods: */
7489 virtual bool gate (function *) { return flag_tree_pta; }
7491 }; // class pass_build_ealias
7493 } // anon namespace
7495 gimple_opt_pass *
7496 make_pass_build_ealias (gcc::context *ctxt)
7498 return new pass_build_ealias (ctxt);
7502 /* IPA PTA solutions for ESCAPED. */
7503 struct pt_solution ipa_escaped_pt
7504 = { true, false, false, false, false, false, false, false, NULL };
7506 /* Associate node with varinfo DATA. Worker for
7507 cgraph_for_symbol_thunks_and_aliases. */
7508 static bool
7509 associate_varinfo_to_alias (struct cgraph_node *node, void *data)
7511 if ((node->alias || node->thunk.thunk_p)
7512 && node->analyzed)
7513 insert_vi_for_tree (node->decl, (varinfo_t)data);
7514 return false;
7517 /* Dump varinfo VI to FILE. */
7519 static void
7520 dump_varinfo (FILE *file, varinfo_t vi)
7522 if (vi == NULL)
7523 return;
7525 fprintf (file, "%u: %s\n", vi->id, vi->name);
7527 const char *sep = " ";
7528 if (vi->is_artificial_var)
7529 fprintf (file, "%sartificial", sep);
7530 if (vi->is_special_var)
7531 fprintf (file, "%sspecial", sep);
7532 if (vi->is_unknown_size_var)
7533 fprintf (file, "%sunknown-size", sep);
7534 if (vi->is_full_var)
7535 fprintf (file, "%sfull", sep);
7536 if (vi->is_heap_var)
7537 fprintf (file, "%sheap", sep);
7538 if (vi->may_have_pointers)
7539 fprintf (file, "%smay-have-pointers", sep);
7540 if (vi->only_restrict_pointers)
7541 fprintf (file, "%sonly-restrict-pointers", sep);
7542 if (vi->is_restrict_var)
7543 fprintf (file, "%sis-restrict-var", sep);
7544 if (vi->is_global_var)
7545 fprintf (file, "%sglobal", sep);
7546 if (vi->is_ipa_escape_point)
7547 fprintf (file, "%sipa-escape-point", sep);
7548 if (vi->is_fn_info)
7549 fprintf (file, "%sfn-info", sep);
7550 if (vi->ruid)
7551 fprintf (file, "%srestrict-uid:%u", sep, vi->ruid);
7552 if (vi->next)
7553 fprintf (file, "%snext:%u", sep, vi->next);
7554 if (vi->head != vi->id)
7555 fprintf (file, "%shead:%u", sep, vi->head);
7556 if (vi->offset)
7557 fprintf (file, "%soffset:" HOST_WIDE_INT_PRINT_DEC, sep, vi->offset);
7558 if (vi->size != ~(unsigned HOST_WIDE_INT)0)
7559 fprintf (file, "%ssize:" HOST_WIDE_INT_PRINT_DEC, sep, vi->size);
7560 if (vi->fullsize != ~(unsigned HOST_WIDE_INT)0
7561 && vi->fullsize != vi->size)
7562 fprintf (file, "%sfullsize:" HOST_WIDE_INT_PRINT_DEC, sep,
7563 vi->fullsize);
7564 fprintf (file, "\n");
7566 if (vi->solution && !bitmap_empty_p (vi->solution))
7568 bitmap_iterator bi;
7569 unsigned i;
7570 fprintf (file, " solution: {");
7571 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
7572 fprintf (file, " %u", i);
7573 fprintf (file, " }\n");
7576 if (vi->oldsolution && !bitmap_empty_p (vi->oldsolution)
7577 && !bitmap_equal_p (vi->solution, vi->oldsolution))
7579 bitmap_iterator bi;
7580 unsigned i;
7581 fprintf (file, " oldsolution: {");
7582 EXECUTE_IF_SET_IN_BITMAP (vi->oldsolution, 0, i, bi)
7583 fprintf (file, " %u", i);
7584 fprintf (file, " }\n");
7588 /* Dump varinfo VI to stderr. */
7590 DEBUG_FUNCTION void
7591 debug_varinfo (varinfo_t vi)
7593 dump_varinfo (stderr, vi);
7596 /* Dump varmap to FILE. */
7598 static void
7599 dump_varmap (FILE *file)
7601 if (varmap.length () == 0)
7602 return;
7604 fprintf (file, "variables:\n");
7606 for (unsigned int i = 0; i < varmap.length (); ++i)
7608 varinfo_t vi = get_varinfo (i);
7609 dump_varinfo (file, vi);
7612 fprintf (file, "\n");
7615 /* Dump varmap to stderr. */
7617 DEBUG_FUNCTION void
7618 debug_varmap (void)
7620 dump_varmap (stderr);
7623 /* Compute whether node is refered to non-locally. Worker for
7624 cgraph_for_symbol_thunks_and_aliases. */
7625 static bool
7626 refered_from_nonlocal_fn (struct cgraph_node *node, void *data)
7628 bool *nonlocal_p = (bool *)data;
7629 *nonlocal_p |= (node->used_from_other_partition
7630 || node->externally_visible
7631 || node->force_output);
7632 return false;
7635 /* Same for varpool nodes. */
7636 static bool
7637 refered_from_nonlocal_var (struct varpool_node *node, void *data)
7639 bool *nonlocal_p = (bool *)data;
7640 *nonlocal_p |= (node->used_from_other_partition
7641 || node->externally_visible
7642 || node->force_output);
7643 return false;
7646 /* Execute the driver for IPA PTA. */
7647 static unsigned int
7648 ipa_pta_execute (void)
7650 struct cgraph_node *node;
7651 varpool_node *var;
7652 unsigned int from = 0;
7654 in_ipa_mode = 1;
7656 init_alias_vars ();
7658 if (dump_file && (dump_flags & TDF_DETAILS))
7660 symtab_node::dump_table (dump_file);
7661 fprintf (dump_file, "\n");
7664 if (dump_file)
7666 fprintf (dump_file, "Generating generic constraints\n\n");
7667 dump_constraints (dump_file, from);
7668 fprintf (dump_file, "\n");
7669 from = constraints.length ();
7672 /* Build the constraints. */
7673 FOR_EACH_DEFINED_FUNCTION (node)
7675 varinfo_t vi;
7676 /* Nodes without a body are not interesting. Especially do not
7677 visit clones at this point for now - we get duplicate decls
7678 there for inline clones at least. */
7679 if (!node->has_gimple_body_p () || node->global.inlined_to)
7680 continue;
7681 node->get_body ();
7683 gcc_assert (!node->clone_of);
7685 /* For externally visible or attribute used annotated functions use
7686 local constraints for their arguments.
7687 For local functions we see all callers and thus do not need initial
7688 constraints for parameters. */
7689 bool nonlocal_p = (node->used_from_other_partition
7690 || node->externally_visible
7691 || node->force_output);
7692 node->call_for_symbol_thunks_and_aliases (refered_from_nonlocal_fn,
7693 &nonlocal_p, true);
7695 vi = create_function_info_for (node->decl,
7696 alias_get_name (node->decl), false,
7697 nonlocal_p);
7698 if (dump_file
7699 && from != constraints.length ())
7701 fprintf (dump_file,
7702 "Generating intial constraints for %s", node->name ());
7703 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
7704 fprintf (dump_file, " (%s)",
7705 IDENTIFIER_POINTER
7706 (DECL_ASSEMBLER_NAME (node->decl)));
7707 fprintf (dump_file, "\n\n");
7708 dump_constraints (dump_file, from);
7709 fprintf (dump_file, "\n");
7711 from = constraints.length ();
7714 node->call_for_symbol_thunks_and_aliases
7715 (associate_varinfo_to_alias, vi, true);
7718 /* Create constraints for global variables and their initializers. */
7719 FOR_EACH_VARIABLE (var)
7721 if (var->alias && var->analyzed)
7722 continue;
7724 varinfo_t vi = get_vi_for_tree (var->decl);
7726 /* For the purpose of IPA PTA unit-local globals are not
7727 escape points. */
7728 bool nonlocal_p = (var->used_from_other_partition
7729 || var->externally_visible
7730 || var->force_output);
7731 var->call_for_symbol_and_aliases (refered_from_nonlocal_var,
7732 &nonlocal_p, true);
7733 if (nonlocal_p)
7734 vi->is_ipa_escape_point = true;
7737 if (dump_file
7738 && from != constraints.length ())
7740 fprintf (dump_file,
7741 "Generating constraints for global initializers\n\n");
7742 dump_constraints (dump_file, from);
7743 fprintf (dump_file, "\n");
7744 from = constraints.length ();
7747 FOR_EACH_DEFINED_FUNCTION (node)
7749 struct function *func;
7750 basic_block bb;
7752 /* Nodes without a body are not interesting. */
7753 if (!node->has_gimple_body_p () || node->clone_of)
7754 continue;
7756 if (dump_file)
7758 fprintf (dump_file,
7759 "Generating constraints for %s", node->name ());
7760 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
7761 fprintf (dump_file, " (%s)",
7762 IDENTIFIER_POINTER
7763 (DECL_ASSEMBLER_NAME (node->decl)));
7764 fprintf (dump_file, "\n");
7767 func = DECL_STRUCT_FUNCTION (node->decl);
7768 gcc_assert (cfun == NULL);
7770 /* Build constriants for the function body. */
7771 FOR_EACH_BB_FN (bb, func)
7773 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
7774 gsi_next (&gsi))
7776 gphi *phi = gsi.phi ();
7778 if (! virtual_operand_p (gimple_phi_result (phi)))
7779 find_func_aliases (func, phi);
7782 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
7783 gsi_next (&gsi))
7785 gimple *stmt = gsi_stmt (gsi);
7787 find_func_aliases (func, stmt);
7788 find_func_clobbers (func, stmt);
7792 if (dump_file)
7794 fprintf (dump_file, "\n");
7795 dump_constraints (dump_file, from);
7796 fprintf (dump_file, "\n");
7797 from = constraints.length ();
7801 /* From the constraints compute the points-to sets. */
7802 solve_constraints ();
7804 /* Compute the global points-to sets for ESCAPED.
7805 ??? Note that the computed escape set is not correct
7806 for the whole unit as we fail to consider graph edges to
7807 externally visible functions. */
7808 ipa_escaped_pt = find_what_var_points_to (NULL, get_varinfo (escaped_id));
7810 /* Make sure the ESCAPED solution (which is used as placeholder in
7811 other solutions) does not reference itself. This simplifies
7812 points-to solution queries. */
7813 ipa_escaped_pt.ipa_escaped = 0;
7815 /* Assign the points-to sets to the SSA names in the unit. */
7816 FOR_EACH_DEFINED_FUNCTION (node)
7818 tree ptr;
7819 struct function *fn;
7820 unsigned i;
7821 basic_block bb;
7823 /* Nodes without a body are not interesting. */
7824 if (!node->has_gimple_body_p () || node->clone_of)
7825 continue;
7827 fn = DECL_STRUCT_FUNCTION (node->decl);
7829 /* Compute the points-to sets for pointer SSA_NAMEs. */
7830 FOR_EACH_VEC_ELT (*fn->gimple_df->ssa_names, i, ptr)
7832 if (ptr
7833 && POINTER_TYPE_P (TREE_TYPE (ptr)))
7834 find_what_p_points_to (node->decl, ptr);
7837 /* Compute the call-use and call-clobber sets for indirect calls
7838 and calls to external functions. */
7839 FOR_EACH_BB_FN (bb, fn)
7841 gimple_stmt_iterator gsi;
7843 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
7845 gcall *stmt;
7846 struct pt_solution *pt;
7847 varinfo_t vi, fi;
7848 tree decl;
7850 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
7851 if (!stmt)
7852 continue;
7854 /* Handle direct calls to functions with body. */
7855 decl = gimple_call_fndecl (stmt);
7858 tree called_decl = NULL_TREE;
7859 if (gimple_call_builtin_p (stmt, BUILT_IN_GOMP_PARALLEL))
7860 called_decl = TREE_OPERAND (gimple_call_arg (stmt, 0), 0);
7861 else if (gimple_call_builtin_p (stmt, BUILT_IN_GOACC_PARALLEL))
7862 called_decl = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
7864 if (called_decl != NULL_TREE
7865 && !fndecl_maybe_in_other_partition (called_decl))
7866 decl = called_decl;
7869 if (decl
7870 && (fi = lookup_vi_for_tree (decl))
7871 && fi->is_fn_info)
7873 *gimple_call_clobber_set (stmt)
7874 = find_what_var_points_to
7875 (node->decl, first_vi_for_offset (fi, fi_clobbers));
7876 *gimple_call_use_set (stmt)
7877 = find_what_var_points_to
7878 (node->decl, first_vi_for_offset (fi, fi_uses));
7880 /* Handle direct calls to external functions. */
7881 else if (decl)
7883 pt = gimple_call_use_set (stmt);
7884 if (gimple_call_flags (stmt) & ECF_CONST)
7885 memset (pt, 0, sizeof (struct pt_solution));
7886 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
7888 *pt = find_what_var_points_to (node->decl, vi);
7889 /* Escaped (and thus nonlocal) variables are always
7890 implicitly used by calls. */
7891 /* ??? ESCAPED can be empty even though NONLOCAL
7892 always escaped. */
7893 pt->nonlocal = 1;
7894 pt->ipa_escaped = 1;
7896 else
7898 /* If there is nothing special about this call then
7899 we have made everything that is used also escape. */
7900 *pt = ipa_escaped_pt;
7901 pt->nonlocal = 1;
7904 pt = gimple_call_clobber_set (stmt);
7905 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
7906 memset (pt, 0, sizeof (struct pt_solution));
7907 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
7909 *pt = find_what_var_points_to (node->decl, vi);
7910 /* Escaped (and thus nonlocal) variables are always
7911 implicitly clobbered by calls. */
7912 /* ??? ESCAPED can be empty even though NONLOCAL
7913 always escaped. */
7914 pt->nonlocal = 1;
7915 pt->ipa_escaped = 1;
7917 else
7919 /* If there is nothing special about this call then
7920 we have made everything that is used also escape. */
7921 *pt = ipa_escaped_pt;
7922 pt->nonlocal = 1;
7925 /* Handle indirect calls. */
7926 else if (!decl
7927 && (fi = get_fi_for_callee (stmt)))
7929 /* We need to accumulate all clobbers/uses of all possible
7930 callees. */
7931 fi = get_varinfo (find (fi->id));
7932 /* If we cannot constrain the set of functions we'll end up
7933 calling we end up using/clobbering everything. */
7934 if (bitmap_bit_p (fi->solution, anything_id)
7935 || bitmap_bit_p (fi->solution, nonlocal_id)
7936 || bitmap_bit_p (fi->solution, escaped_id))
7938 pt_solution_reset (gimple_call_clobber_set (stmt));
7939 pt_solution_reset (gimple_call_use_set (stmt));
7941 else
7943 bitmap_iterator bi;
7944 unsigned i;
7945 struct pt_solution *uses, *clobbers;
7947 uses = gimple_call_use_set (stmt);
7948 clobbers = gimple_call_clobber_set (stmt);
7949 memset (uses, 0, sizeof (struct pt_solution));
7950 memset (clobbers, 0, sizeof (struct pt_solution));
7951 EXECUTE_IF_SET_IN_BITMAP (fi->solution, 0, i, bi)
7953 struct pt_solution sol;
7955 vi = get_varinfo (i);
7956 if (!vi->is_fn_info)
7958 /* ??? We could be more precise here? */
7959 uses->nonlocal = 1;
7960 uses->ipa_escaped = 1;
7961 clobbers->nonlocal = 1;
7962 clobbers->ipa_escaped = 1;
7963 continue;
7966 if (!uses->anything)
7968 sol = find_what_var_points_to
7969 (node->decl,
7970 first_vi_for_offset (vi, fi_uses));
7971 pt_solution_ior_into (uses, &sol);
7973 if (!clobbers->anything)
7975 sol = find_what_var_points_to
7976 (node->decl,
7977 first_vi_for_offset (vi, fi_clobbers));
7978 pt_solution_ior_into (clobbers, &sol);
7986 fn->gimple_df->ipa_pta = true;
7988 /* We have to re-set the final-solution cache after each function
7989 because what is a "global" is dependent on function context. */
7990 final_solutions->empty ();
7991 obstack_free (&final_solutions_obstack, NULL);
7992 gcc_obstack_init (&final_solutions_obstack);
7995 delete_points_to_sets ();
7997 in_ipa_mode = 0;
7999 return 0;
8002 namespace {
8004 const pass_data pass_data_ipa_pta =
8006 SIMPLE_IPA_PASS, /* type */
8007 "pta", /* name */
8008 OPTGROUP_NONE, /* optinfo_flags */
8009 TV_IPA_PTA, /* tv_id */
8010 0, /* properties_required */
8011 0, /* properties_provided */
8012 0, /* properties_destroyed */
8013 0, /* todo_flags_start */
8014 0, /* todo_flags_finish */
8017 class pass_ipa_pta : public simple_ipa_opt_pass
8019 public:
8020 pass_ipa_pta (gcc::context *ctxt)
8021 : simple_ipa_opt_pass (pass_data_ipa_pta, ctxt)
8024 /* opt_pass methods: */
8025 virtual bool gate (function *)
8027 return (optimize
8028 && flag_ipa_pta
8029 /* Don't bother doing anything if the program has errors. */
8030 && !seen_error ());
8033 opt_pass * clone () { return new pass_ipa_pta (m_ctxt); }
8035 virtual unsigned int execute (function *) { return ipa_pta_execute (); }
8037 }; // class pass_ipa_pta
8039 } // anon namespace
8041 simple_ipa_opt_pass *
8042 make_pass_ipa_pta (gcc::context *ctxt)
8044 return new pass_ipa_pta (ctxt);