2016-08-24 Michael Collison <michael.collison@linaro.org>
[official-gcc.git] / gcc / tree-ssa-structalias.c
blobfd96c3ab7971d00f38de95f9be75ee36324392eb
1 /* Tree based points-to analysis
2 Copyright (C) 2005-2016 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "tree-pass.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "tree-pretty-print.h"
33 #include "diagnostic-core.h"
34 #include "fold-const.h"
35 #include "stor-layout.h"
36 #include "stmt.h"
37 #include "gimple-iterator.h"
38 #include "tree-into-ssa.h"
39 #include "tree-dfa.h"
40 #include "params.h"
41 #include "gimple-walk.h"
43 /* The idea behind this analyzer is to generate set constraints from the
44 program, then solve the resulting constraints in order to generate the
45 points-to sets.
47 Set constraints are a way of modeling program analysis problems that
48 involve sets. They consist of an inclusion constraint language,
49 describing the variables (each variable is a set) and operations that
50 are involved on the variables, and a set of rules that derive facts
51 from these operations. To solve a system of set constraints, you derive
52 all possible facts under the rules, which gives you the correct sets
53 as a consequence.
55 See "Efficient Field-sensitive pointer analysis for C" by "David
56 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
57 http://citeseer.ist.psu.edu/pearce04efficient.html
59 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
60 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
61 http://citeseer.ist.psu.edu/heintze01ultrafast.html
63 There are three types of real constraint expressions, DEREF,
64 ADDRESSOF, and SCALAR. Each constraint expression consists
65 of a constraint type, a variable, and an offset.
67 SCALAR is a constraint expression type used to represent x, whether
68 it appears on the LHS or the RHS of a statement.
69 DEREF is a constraint expression type used to represent *x, whether
70 it appears on the LHS or the RHS of a statement.
71 ADDRESSOF is a constraint expression used to represent &x, whether
72 it appears on the LHS or the RHS of a statement.
74 Each pointer variable in the program is assigned an integer id, and
75 each field of a structure variable is assigned an integer id as well.
77 Structure variables are linked to their list of fields through a "next
78 field" in each variable that points to the next field in offset
79 order.
80 Each variable for a structure field has
82 1. "size", that tells the size in bits of that field.
83 2. "fullsize, that tells the size in bits of the entire structure.
84 3. "offset", that tells the offset in bits from the beginning of the
85 structure to this field.
87 Thus,
88 struct f
90 int a;
91 int b;
92 } foo;
93 int *bar;
95 looks like
97 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
98 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
99 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
102 In order to solve the system of set constraints, the following is
103 done:
105 1. Each constraint variable x has a solution set associated with it,
106 Sol(x).
108 2. Constraints are separated into direct, copy, and complex.
109 Direct constraints are ADDRESSOF constraints that require no extra
110 processing, such as P = &Q
111 Copy constraints are those of the form P = Q.
112 Complex constraints are all the constraints involving dereferences
113 and offsets (including offsetted copies).
115 3. All direct constraints of the form P = &Q are processed, such
116 that Q is added to Sol(P)
118 4. All complex constraints for a given constraint variable are stored in a
119 linked list attached to that variable's node.
121 5. A directed graph is built out of the copy constraints. Each
122 constraint variable is a node in the graph, and an edge from
123 Q to P is added for each copy constraint of the form P = Q
125 6. The graph is then walked, and solution sets are
126 propagated along the copy edges, such that an edge from Q to P
127 causes Sol(P) <- Sol(P) union Sol(Q).
129 7. As we visit each node, all complex constraints associated with
130 that node are processed by adding appropriate copy edges to the graph, or the
131 appropriate variables to the solution set.
133 8. The process of walking the graph is iterated until no solution
134 sets change.
136 Prior to walking the graph in steps 6 and 7, We perform static
137 cycle elimination on the constraint graph, as well
138 as off-line variable substitution.
140 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
141 on and turned into anything), but isn't. You can just see what offset
142 inside the pointed-to struct it's going to access.
144 TODO: Constant bounded arrays can be handled as if they were structs of the
145 same number of elements.
147 TODO: Modeling heap and incoming pointers becomes much better if we
148 add fields to them as we discover them, which we could do.
150 TODO: We could handle unions, but to be honest, it's probably not
151 worth the pain or slowdown. */
153 /* IPA-PTA optimizations possible.
155 When the indirect function called is ANYTHING we can add disambiguation
156 based on the function signatures (or simply the parameter count which
157 is the varinfo size). We also do not need to consider functions that
158 do not have their address taken.
160 The is_global_var bit which marks escape points is overly conservative
161 in IPA mode. Split it to is_escape_point and is_global_var - only
162 externally visible globals are escape points in IPA mode.
163 There is now is_ipa_escape_point but this is only used in a few
164 selected places.
166 The way we introduce DECL_PT_UID to avoid fixing up all points-to
167 sets in the translation unit when we copy a DECL during inlining
168 pessimizes precision. The advantage is that the DECL_PT_UID keeps
169 compile-time and memory usage overhead low - the points-to sets
170 do not grow or get unshared as they would during a fixup phase.
171 An alternative solution is to delay IPA PTA until after all
172 inlining transformations have been applied.
174 The way we propagate clobber/use information isn't optimized.
175 It should use a new complex constraint that properly filters
176 out local variables of the callee (though that would make
177 the sets invalid after inlining). OTOH we might as well
178 admit defeat to WHOPR and simply do all the clobber/use analysis
179 and propagation after PTA finished but before we threw away
180 points-to information for memory variables. WHOPR and PTA
181 do not play along well anyway - the whole constraint solving
182 would need to be done in WPA phase and it will be very interesting
183 to apply the results to local SSA names during LTRANS phase.
185 We probably should compute a per-function unit-ESCAPE solution
186 propagating it simply like the clobber / uses solutions. The
187 solution can go alongside the non-IPA espaced solution and be
188 used to query which vars escape the unit through a function.
189 This is also required to make the escaped-HEAP trick work in IPA mode.
191 We never put function decls in points-to sets so we do not
192 keep the set of called functions for indirect calls.
194 And probably more. */
196 static bool use_field_sensitive = true;
197 static int in_ipa_mode = 0;
199 /* Used for predecessor bitmaps. */
200 static bitmap_obstack predbitmap_obstack;
202 /* Used for points-to sets. */
203 static bitmap_obstack pta_obstack;
205 /* Used for oldsolution members of variables. */
206 static bitmap_obstack oldpta_obstack;
208 /* Used for per-solver-iteration bitmaps. */
209 static bitmap_obstack iteration_obstack;
211 static unsigned int create_variable_info_for (tree, const char *, bool);
212 typedef struct constraint_graph *constraint_graph_t;
213 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
215 struct constraint;
216 typedef struct constraint *constraint_t;
219 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
220 if (a) \
221 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
223 static struct constraint_stats
225 unsigned int total_vars;
226 unsigned int nonpointer_vars;
227 unsigned int unified_vars_static;
228 unsigned int unified_vars_dynamic;
229 unsigned int iterations;
230 unsigned int num_edges;
231 unsigned int num_implicit_edges;
232 unsigned int points_to_sets_created;
233 } stats;
235 struct variable_info
237 /* ID of this variable */
238 unsigned int id;
240 /* True if this is a variable created by the constraint analysis, such as
241 heap variables and constraints we had to break up. */
242 unsigned int is_artificial_var : 1;
244 /* True if this is a special variable whose solution set should not be
245 changed. */
246 unsigned int is_special_var : 1;
248 /* True for variables whose size is not known or variable. */
249 unsigned int is_unknown_size_var : 1;
251 /* True for (sub-)fields that represent a whole variable. */
252 unsigned int is_full_var : 1;
254 /* True if this is a heap variable. */
255 unsigned int is_heap_var : 1;
257 /* True if this field may contain pointers. */
258 unsigned int may_have_pointers : 1;
260 /* True if this field has only restrict qualified pointers. */
261 unsigned int only_restrict_pointers : 1;
263 /* True if this represents a heap var created for a restrict qualified
264 pointer. */
265 unsigned int is_restrict_var : 1;
267 /* True if this represents a global variable. */
268 unsigned int is_global_var : 1;
270 /* True if this represents a module escape point for IPA analysis. */
271 unsigned int is_ipa_escape_point : 1;
273 /* True if this represents a IPA function info. */
274 unsigned int is_fn_info : 1;
276 /* ??? Store somewhere better. */
277 unsigned short ruid;
279 /* The ID of the variable for the next field in this structure
280 or zero for the last field in this structure. */
281 unsigned next;
283 /* The ID of the variable for the first field in this structure. */
284 unsigned head;
286 /* Offset of this variable, in bits, from the base variable */
287 unsigned HOST_WIDE_INT offset;
289 /* Size of the variable, in bits. */
290 unsigned HOST_WIDE_INT size;
292 /* Full size of the base variable, in bits. */
293 unsigned HOST_WIDE_INT fullsize;
295 /* Name of this variable */
296 const char *name;
298 /* Tree that this variable is associated with. */
299 tree decl;
301 /* Points-to set for this variable. */
302 bitmap solution;
304 /* Old points-to set for this variable. */
305 bitmap oldsolution;
307 typedef struct variable_info *varinfo_t;
309 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
310 static varinfo_t first_or_preceding_vi_for_offset (varinfo_t,
311 unsigned HOST_WIDE_INT);
312 static varinfo_t lookup_vi_for_tree (tree);
313 static inline bool type_can_have_subvars (const_tree);
314 static void make_param_constraints (varinfo_t);
316 /* Pool of variable info structures. */
317 static object_allocator<variable_info> variable_info_pool
318 ("Variable info pool");
320 /* Map varinfo to final pt_solution. */
321 static hash_map<varinfo_t, pt_solution *> *final_solutions;
322 struct obstack final_solutions_obstack;
324 /* Table of variable info structures for constraint variables.
325 Indexed directly by variable info id. */
326 static vec<varinfo_t> varmap;
328 /* Return the varmap element N */
330 static inline varinfo_t
331 get_varinfo (unsigned int n)
333 return varmap[n];
336 /* Return the next variable in the list of sub-variables of VI
337 or NULL if VI is the last sub-variable. */
339 static inline varinfo_t
340 vi_next (varinfo_t vi)
342 return get_varinfo (vi->next);
345 /* Static IDs for the special variables. Variable ID zero is unused
346 and used as terminator for the sub-variable chain. */
347 enum { nothing_id = 1, anything_id = 2, string_id = 3,
348 escaped_id = 4, nonlocal_id = 5,
349 storedanything_id = 6, integer_id = 7 };
351 /* Return a new variable info structure consisting for a variable
352 named NAME, and using constraint graph node NODE. Append it
353 to the vector of variable info structures. */
355 static varinfo_t
356 new_var_info (tree t, const char *name, bool add_id)
358 unsigned index = varmap.length ();
359 varinfo_t ret = variable_info_pool.allocate ();
361 if (dump_file && add_id)
363 char *tempname = xasprintf ("%s(%d)", name, index);
364 name = ggc_strdup (tempname);
365 free (tempname);
368 ret->id = index;
369 ret->name = name;
370 ret->decl = t;
371 /* Vars without decl are artificial and do not have sub-variables. */
372 ret->is_artificial_var = (t == NULL_TREE);
373 ret->is_special_var = false;
374 ret->is_unknown_size_var = false;
375 ret->is_full_var = (t == NULL_TREE);
376 ret->is_heap_var = false;
377 ret->may_have_pointers = true;
378 ret->only_restrict_pointers = false;
379 ret->is_restrict_var = false;
380 ret->ruid = 0;
381 ret->is_global_var = (t == NULL_TREE);
382 ret->is_ipa_escape_point = false;
383 ret->is_fn_info = false;
384 if (t && DECL_P (t))
385 ret->is_global_var = (is_global_var (t)
386 /* We have to treat even local register variables
387 as escape points. */
388 || (TREE_CODE (t) == VAR_DECL
389 && DECL_HARD_REGISTER (t)));
390 ret->solution = BITMAP_ALLOC (&pta_obstack);
391 ret->oldsolution = NULL;
392 ret->next = 0;
393 ret->head = ret->id;
395 stats.total_vars++;
397 varmap.safe_push (ret);
399 return ret;
402 /* A map mapping call statements to per-stmt variables for uses
403 and clobbers specific to the call. */
404 static hash_map<gimple *, varinfo_t> *call_stmt_vars;
406 /* Lookup or create the variable for the call statement CALL. */
408 static varinfo_t
409 get_call_vi (gcall *call)
411 varinfo_t vi, vi2;
413 bool existed;
414 varinfo_t *slot_p = &call_stmt_vars->get_or_insert (call, &existed);
415 if (existed)
416 return *slot_p;
418 vi = new_var_info (NULL_TREE, "CALLUSED", true);
419 vi->offset = 0;
420 vi->size = 1;
421 vi->fullsize = 2;
422 vi->is_full_var = true;
424 vi2 = new_var_info (NULL_TREE, "CALLCLOBBERED", true);
425 vi2->offset = 1;
426 vi2->size = 1;
427 vi2->fullsize = 2;
428 vi2->is_full_var = true;
430 vi->next = vi2->id;
432 *slot_p = vi;
433 return vi;
436 /* Lookup the variable for the call statement CALL representing
437 the uses. Returns NULL if there is nothing special about this call. */
439 static varinfo_t
440 lookup_call_use_vi (gcall *call)
442 varinfo_t *slot_p = call_stmt_vars->get (call);
443 if (slot_p)
444 return *slot_p;
446 return NULL;
449 /* Lookup the variable for the call statement CALL representing
450 the clobbers. Returns NULL if there is nothing special about this call. */
452 static varinfo_t
453 lookup_call_clobber_vi (gcall *call)
455 varinfo_t uses = lookup_call_use_vi (call);
456 if (!uses)
457 return NULL;
459 return vi_next (uses);
462 /* Lookup or create the variable for the call statement CALL representing
463 the uses. */
465 static varinfo_t
466 get_call_use_vi (gcall *call)
468 return get_call_vi (call);
471 /* Lookup or create the variable for the call statement CALL representing
472 the clobbers. */
474 static varinfo_t ATTRIBUTE_UNUSED
475 get_call_clobber_vi (gcall *call)
477 return vi_next (get_call_vi (call));
481 enum constraint_expr_type {SCALAR, DEREF, ADDRESSOF};
483 /* An expression that appears in a constraint. */
485 struct constraint_expr
487 /* Constraint type. */
488 constraint_expr_type type;
490 /* Variable we are referring to in the constraint. */
491 unsigned int var;
493 /* Offset, in bits, of this constraint from the beginning of
494 variables it ends up referring to.
496 IOW, in a deref constraint, we would deref, get the result set,
497 then add OFFSET to each member. */
498 HOST_WIDE_INT offset;
501 /* Use 0x8000... as special unknown offset. */
502 #define UNKNOWN_OFFSET HOST_WIDE_INT_MIN
504 typedef struct constraint_expr ce_s;
505 static void get_constraint_for_1 (tree, vec<ce_s> *, bool, bool);
506 static void get_constraint_for (tree, vec<ce_s> *);
507 static void get_constraint_for_rhs (tree, vec<ce_s> *);
508 static void do_deref (vec<ce_s> *);
510 /* Our set constraints are made up of two constraint expressions, one
511 LHS, and one RHS.
513 As described in the introduction, our set constraints each represent an
514 operation between set valued variables.
516 struct constraint
518 struct constraint_expr lhs;
519 struct constraint_expr rhs;
522 /* List of constraints that we use to build the constraint graph from. */
524 static vec<constraint_t> constraints;
525 static object_allocator<constraint> constraint_pool ("Constraint pool");
527 /* The constraint graph is represented as an array of bitmaps
528 containing successor nodes. */
530 struct constraint_graph
532 /* Size of this graph, which may be different than the number of
533 nodes in the variable map. */
534 unsigned int size;
536 /* Explicit successors of each node. */
537 bitmap *succs;
539 /* Implicit predecessors of each node (Used for variable
540 substitution). */
541 bitmap *implicit_preds;
543 /* Explicit predecessors of each node (Used for variable substitution). */
544 bitmap *preds;
546 /* Indirect cycle representatives, or -1 if the node has no indirect
547 cycles. */
548 int *indirect_cycles;
550 /* Representative node for a node. rep[a] == a unless the node has
551 been unified. */
552 unsigned int *rep;
554 /* Equivalence class representative for a label. This is used for
555 variable substitution. */
556 int *eq_rep;
558 /* Pointer equivalence label for a node. All nodes with the same
559 pointer equivalence label can be unified together at some point
560 (either during constraint optimization or after the constraint
561 graph is built). */
562 unsigned int *pe;
564 /* Pointer equivalence representative for a label. This is used to
565 handle nodes that are pointer equivalent but not location
566 equivalent. We can unite these once the addressof constraints
567 are transformed into initial points-to sets. */
568 int *pe_rep;
570 /* Pointer equivalence label for each node, used during variable
571 substitution. */
572 unsigned int *pointer_label;
574 /* Location equivalence label for each node, used during location
575 equivalence finding. */
576 unsigned int *loc_label;
578 /* Pointed-by set for each node, used during location equivalence
579 finding. This is pointed-by rather than pointed-to, because it
580 is constructed using the predecessor graph. */
581 bitmap *pointed_by;
583 /* Points to sets for pointer equivalence. This is *not* the actual
584 points-to sets for nodes. */
585 bitmap *points_to;
587 /* Bitmap of nodes where the bit is set if the node is a direct
588 node. Used for variable substitution. */
589 sbitmap direct_nodes;
591 /* Bitmap of nodes where the bit is set if the node is address
592 taken. Used for variable substitution. */
593 bitmap address_taken;
595 /* Vector of complex constraints for each graph node. Complex
596 constraints are those involving dereferences or offsets that are
597 not 0. */
598 vec<constraint_t> *complex;
601 static constraint_graph_t graph;
603 /* During variable substitution and the offline version of indirect
604 cycle finding, we create nodes to represent dereferences and
605 address taken constraints. These represent where these start and
606 end. */
607 #define FIRST_REF_NODE (varmap).length ()
608 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
610 /* Return the representative node for NODE, if NODE has been unioned
611 with another NODE.
612 This function performs path compression along the way to finding
613 the representative. */
615 static unsigned int
616 find (unsigned int node)
618 gcc_checking_assert (node < graph->size);
619 if (graph->rep[node] != node)
620 return graph->rep[node] = find (graph->rep[node]);
621 return node;
624 /* Union the TO and FROM nodes to the TO nodes.
625 Note that at some point in the future, we may want to do
626 union-by-rank, in which case we are going to have to return the
627 node we unified to. */
629 static bool
630 unite (unsigned int to, unsigned int from)
632 gcc_checking_assert (to < graph->size && from < graph->size);
633 if (to != from && graph->rep[from] != to)
635 graph->rep[from] = to;
636 return true;
638 return false;
641 /* Create a new constraint consisting of LHS and RHS expressions. */
643 static constraint_t
644 new_constraint (const struct constraint_expr lhs,
645 const struct constraint_expr rhs)
647 constraint_t ret = constraint_pool.allocate ();
648 ret->lhs = lhs;
649 ret->rhs = rhs;
650 return ret;
653 /* Print out constraint C to FILE. */
655 static void
656 dump_constraint (FILE *file, constraint_t c)
658 if (c->lhs.type == ADDRESSOF)
659 fprintf (file, "&");
660 else if (c->lhs.type == DEREF)
661 fprintf (file, "*");
662 fprintf (file, "%s", get_varinfo (c->lhs.var)->name);
663 if (c->lhs.offset == UNKNOWN_OFFSET)
664 fprintf (file, " + UNKNOWN");
665 else if (c->lhs.offset != 0)
666 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
667 fprintf (file, " = ");
668 if (c->rhs.type == ADDRESSOF)
669 fprintf (file, "&");
670 else if (c->rhs.type == DEREF)
671 fprintf (file, "*");
672 fprintf (file, "%s", get_varinfo (c->rhs.var)->name);
673 if (c->rhs.offset == UNKNOWN_OFFSET)
674 fprintf (file, " + UNKNOWN");
675 else if (c->rhs.offset != 0)
676 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
680 void debug_constraint (constraint_t);
681 void debug_constraints (void);
682 void debug_constraint_graph (void);
683 void debug_solution_for_var (unsigned int);
684 void debug_sa_points_to_info (void);
685 void debug_varinfo (varinfo_t);
686 void debug_varmap (void);
688 /* Print out constraint C to stderr. */
690 DEBUG_FUNCTION void
691 debug_constraint (constraint_t c)
693 dump_constraint (stderr, c);
694 fprintf (stderr, "\n");
697 /* Print out all constraints to FILE */
699 static void
700 dump_constraints (FILE *file, int from)
702 int i;
703 constraint_t c;
704 for (i = from; constraints.iterate (i, &c); i++)
705 if (c)
707 dump_constraint (file, c);
708 fprintf (file, "\n");
712 /* Print out all constraints to stderr. */
714 DEBUG_FUNCTION void
715 debug_constraints (void)
717 dump_constraints (stderr, 0);
720 /* Print the constraint graph in dot format. */
722 static void
723 dump_constraint_graph (FILE *file)
725 unsigned int i;
727 /* Only print the graph if it has already been initialized: */
728 if (!graph)
729 return;
731 /* Prints the header of the dot file: */
732 fprintf (file, "strict digraph {\n");
733 fprintf (file, " node [\n shape = box\n ]\n");
734 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
735 fprintf (file, "\n // List of nodes and complex constraints in "
736 "the constraint graph:\n");
738 /* The next lines print the nodes in the graph together with the
739 complex constraints attached to them. */
740 for (i = 1; i < graph->size; i++)
742 if (i == FIRST_REF_NODE)
743 continue;
744 if (find (i) != i)
745 continue;
746 if (i < FIRST_REF_NODE)
747 fprintf (file, "\"%s\"", get_varinfo (i)->name);
748 else
749 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
750 if (graph->complex[i].exists ())
752 unsigned j;
753 constraint_t c;
754 fprintf (file, " [label=\"\\N\\n");
755 for (j = 0; graph->complex[i].iterate (j, &c); ++j)
757 dump_constraint (file, c);
758 fprintf (file, "\\l");
760 fprintf (file, "\"]");
762 fprintf (file, ";\n");
765 /* Go over the edges. */
766 fprintf (file, "\n // Edges in the constraint graph:\n");
767 for (i = 1; i < graph->size; i++)
769 unsigned j;
770 bitmap_iterator bi;
771 if (find (i) != i)
772 continue;
773 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
775 unsigned to = find (j);
776 if (i == to)
777 continue;
778 if (i < FIRST_REF_NODE)
779 fprintf (file, "\"%s\"", get_varinfo (i)->name);
780 else
781 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
782 fprintf (file, " -> ");
783 if (to < FIRST_REF_NODE)
784 fprintf (file, "\"%s\"", get_varinfo (to)->name);
785 else
786 fprintf (file, "\"*%s\"", get_varinfo (to - FIRST_REF_NODE)->name);
787 fprintf (file, ";\n");
791 /* Prints the tail of the dot file. */
792 fprintf (file, "}\n");
795 /* Print out the constraint graph to stderr. */
797 DEBUG_FUNCTION void
798 debug_constraint_graph (void)
800 dump_constraint_graph (stderr);
803 /* SOLVER FUNCTIONS
805 The solver is a simple worklist solver, that works on the following
806 algorithm:
808 sbitmap changed_nodes = all zeroes;
809 changed_count = 0;
810 For each node that is not already collapsed:
811 changed_count++;
812 set bit in changed nodes
814 while (changed_count > 0)
816 compute topological ordering for constraint graph
818 find and collapse cycles in the constraint graph (updating
819 changed if necessary)
821 for each node (n) in the graph in topological order:
822 changed_count--;
824 Process each complex constraint associated with the node,
825 updating changed if necessary.
827 For each outgoing edge from n, propagate the solution from n to
828 the destination of the edge, updating changed as necessary.
830 } */
832 /* Return true if two constraint expressions A and B are equal. */
834 static bool
835 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
837 return a.type == b.type && a.var == b.var && a.offset == b.offset;
840 /* Return true if constraint expression A is less than constraint expression
841 B. This is just arbitrary, but consistent, in order to give them an
842 ordering. */
844 static bool
845 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
847 if (a.type == b.type)
849 if (a.var == b.var)
850 return a.offset < b.offset;
851 else
852 return a.var < b.var;
854 else
855 return a.type < b.type;
858 /* Return true if constraint A is less than constraint B. This is just
859 arbitrary, but consistent, in order to give them an ordering. */
861 static bool
862 constraint_less (const constraint_t &a, const constraint_t &b)
864 if (constraint_expr_less (a->lhs, b->lhs))
865 return true;
866 else if (constraint_expr_less (b->lhs, a->lhs))
867 return false;
868 else
869 return constraint_expr_less (a->rhs, b->rhs);
872 /* Return true if two constraints A and B are equal. */
874 static bool
875 constraint_equal (struct constraint a, struct constraint b)
877 return constraint_expr_equal (a.lhs, b.lhs)
878 && constraint_expr_equal (a.rhs, b.rhs);
882 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
884 static constraint_t
885 constraint_vec_find (vec<constraint_t> vec,
886 struct constraint lookfor)
888 unsigned int place;
889 constraint_t found;
891 if (!vec.exists ())
892 return NULL;
894 place = vec.lower_bound (&lookfor, constraint_less);
895 if (place >= vec.length ())
896 return NULL;
897 found = vec[place];
898 if (!constraint_equal (*found, lookfor))
899 return NULL;
900 return found;
903 /* Union two constraint vectors, TO and FROM. Put the result in TO.
904 Returns true of TO set is changed. */
906 static bool
907 constraint_set_union (vec<constraint_t> *to,
908 vec<constraint_t> *from)
910 int i;
911 constraint_t c;
912 bool any_change = false;
914 FOR_EACH_VEC_ELT (*from, i, c)
916 if (constraint_vec_find (*to, *c) == NULL)
918 unsigned int place = to->lower_bound (c, constraint_less);
919 to->safe_insert (place, c);
920 any_change = true;
923 return any_change;
926 /* Expands the solution in SET to all sub-fields of variables included. */
928 static bitmap
929 solution_set_expand (bitmap set, bitmap *expanded)
931 bitmap_iterator bi;
932 unsigned j;
934 if (*expanded)
935 return *expanded;
937 *expanded = BITMAP_ALLOC (&iteration_obstack);
939 /* In a first pass expand to the head of the variables we need to
940 add all sub-fields off. This avoids quadratic behavior. */
941 EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi)
943 varinfo_t v = get_varinfo (j);
944 if (v->is_artificial_var
945 || v->is_full_var)
946 continue;
947 bitmap_set_bit (*expanded, v->head);
950 /* In the second pass now expand all head variables with subfields. */
951 EXECUTE_IF_SET_IN_BITMAP (*expanded, 0, j, bi)
953 varinfo_t v = get_varinfo (j);
954 if (v->head != j)
955 continue;
956 for (v = vi_next (v); v != NULL; v = vi_next (v))
957 bitmap_set_bit (*expanded, v->id);
960 /* And finally set the rest of the bits from SET. */
961 bitmap_ior_into (*expanded, set);
963 return *expanded;
966 /* Union solution sets TO and DELTA, and add INC to each member of DELTA in the
967 process. */
969 static bool
970 set_union_with_increment (bitmap to, bitmap delta, HOST_WIDE_INT inc,
971 bitmap *expanded_delta)
973 bool changed = false;
974 bitmap_iterator bi;
975 unsigned int i;
977 /* If the solution of DELTA contains anything it is good enough to transfer
978 this to TO. */
979 if (bitmap_bit_p (delta, anything_id))
980 return bitmap_set_bit (to, anything_id);
982 /* If the offset is unknown we have to expand the solution to
983 all subfields. */
984 if (inc == UNKNOWN_OFFSET)
986 delta = solution_set_expand (delta, expanded_delta);
987 changed |= bitmap_ior_into (to, delta);
988 return changed;
991 /* For non-zero offset union the offsetted solution into the destination. */
992 EXECUTE_IF_SET_IN_BITMAP (delta, 0, i, bi)
994 varinfo_t vi = get_varinfo (i);
996 /* If this is a variable with just one field just set its bit
997 in the result. */
998 if (vi->is_artificial_var
999 || vi->is_unknown_size_var
1000 || vi->is_full_var)
1001 changed |= bitmap_set_bit (to, i);
1002 else
1004 HOST_WIDE_INT fieldoffset = vi->offset + inc;
1005 unsigned HOST_WIDE_INT size = vi->size;
1007 /* If the offset makes the pointer point to before the
1008 variable use offset zero for the field lookup. */
1009 if (fieldoffset < 0)
1010 vi = get_varinfo (vi->head);
1011 else
1012 vi = first_or_preceding_vi_for_offset (vi, fieldoffset);
1016 changed |= bitmap_set_bit (to, vi->id);
1017 if (vi->is_full_var
1018 || vi->next == 0)
1019 break;
1021 /* We have to include all fields that overlap the current field
1022 shifted by inc. */
1023 vi = vi_next (vi);
1025 while (vi->offset < fieldoffset + size);
1029 return changed;
1032 /* Insert constraint C into the list of complex constraints for graph
1033 node VAR. */
1035 static void
1036 insert_into_complex (constraint_graph_t graph,
1037 unsigned int var, constraint_t c)
1039 vec<constraint_t> complex = graph->complex[var];
1040 unsigned int place = complex.lower_bound (c, constraint_less);
1042 /* Only insert constraints that do not already exist. */
1043 if (place >= complex.length ()
1044 || !constraint_equal (*c, *complex[place]))
1045 graph->complex[var].safe_insert (place, c);
1049 /* Condense two variable nodes into a single variable node, by moving
1050 all associated info from FROM to TO. Returns true if TO node's
1051 constraint set changes after the merge. */
1053 static bool
1054 merge_node_constraints (constraint_graph_t graph, unsigned int to,
1055 unsigned int from)
1057 unsigned int i;
1058 constraint_t c;
1059 bool any_change = false;
1061 gcc_checking_assert (find (from) == to);
1063 /* Move all complex constraints from src node into to node */
1064 FOR_EACH_VEC_ELT (graph->complex[from], i, c)
1066 /* In complex constraints for node FROM, we may have either
1067 a = *FROM, and *FROM = a, or an offseted constraint which are
1068 always added to the rhs node's constraints. */
1070 if (c->rhs.type == DEREF)
1071 c->rhs.var = to;
1072 else if (c->lhs.type == DEREF)
1073 c->lhs.var = to;
1074 else
1075 c->rhs.var = to;
1078 any_change = constraint_set_union (&graph->complex[to],
1079 &graph->complex[from]);
1080 graph->complex[from].release ();
1081 return any_change;
1085 /* Remove edges involving NODE from GRAPH. */
1087 static void
1088 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
1090 if (graph->succs[node])
1091 BITMAP_FREE (graph->succs[node]);
1094 /* Merge GRAPH nodes FROM and TO into node TO. */
1096 static void
1097 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
1098 unsigned int from)
1100 if (graph->indirect_cycles[from] != -1)
1102 /* If we have indirect cycles with the from node, and we have
1103 none on the to node, the to node has indirect cycles from the
1104 from node now that they are unified.
1105 If indirect cycles exist on both, unify the nodes that they
1106 are in a cycle with, since we know they are in a cycle with
1107 each other. */
1108 if (graph->indirect_cycles[to] == -1)
1109 graph->indirect_cycles[to] = graph->indirect_cycles[from];
1112 /* Merge all the successor edges. */
1113 if (graph->succs[from])
1115 if (!graph->succs[to])
1116 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
1117 bitmap_ior_into (graph->succs[to],
1118 graph->succs[from]);
1121 clear_edges_for_node (graph, from);
1125 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
1126 it doesn't exist in the graph already. */
1128 static void
1129 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
1130 unsigned int from)
1132 if (to == from)
1133 return;
1135 if (!graph->implicit_preds[to])
1136 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1138 if (bitmap_set_bit (graph->implicit_preds[to], from))
1139 stats.num_implicit_edges++;
1142 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
1143 it doesn't exist in the graph already.
1144 Return false if the edge already existed, true otherwise. */
1146 static void
1147 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
1148 unsigned int from)
1150 if (!graph->preds[to])
1151 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1152 bitmap_set_bit (graph->preds[to], from);
1155 /* Add a graph edge to GRAPH, going from FROM to TO if
1156 it doesn't exist in the graph already.
1157 Return false if the edge already existed, true otherwise. */
1159 static bool
1160 add_graph_edge (constraint_graph_t graph, unsigned int to,
1161 unsigned int from)
1163 if (to == from)
1165 return false;
1167 else
1169 bool r = false;
1171 if (!graph->succs[from])
1172 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
1173 if (bitmap_set_bit (graph->succs[from], to))
1175 r = true;
1176 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
1177 stats.num_edges++;
1179 return r;
1184 /* Initialize the constraint graph structure to contain SIZE nodes. */
1186 static void
1187 init_graph (unsigned int size)
1189 unsigned int j;
1191 graph = XCNEW (struct constraint_graph);
1192 graph->size = size;
1193 graph->succs = XCNEWVEC (bitmap, graph->size);
1194 graph->indirect_cycles = XNEWVEC (int, graph->size);
1195 graph->rep = XNEWVEC (unsigned int, graph->size);
1196 /* ??? Macros do not support template types with multiple arguments,
1197 so we use a typedef to work around it. */
1198 typedef vec<constraint_t> vec_constraint_t_heap;
1199 graph->complex = XCNEWVEC (vec_constraint_t_heap, size);
1200 graph->pe = XCNEWVEC (unsigned int, graph->size);
1201 graph->pe_rep = XNEWVEC (int, graph->size);
1203 for (j = 0; j < graph->size; j++)
1205 graph->rep[j] = j;
1206 graph->pe_rep[j] = -1;
1207 graph->indirect_cycles[j] = -1;
1211 /* Build the constraint graph, adding only predecessor edges right now. */
1213 static void
1214 build_pred_graph (void)
1216 int i;
1217 constraint_t c;
1218 unsigned int j;
1220 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
1221 graph->preds = XCNEWVEC (bitmap, graph->size);
1222 graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
1223 graph->loc_label = XCNEWVEC (unsigned int, graph->size);
1224 graph->pointed_by = XCNEWVEC (bitmap, graph->size);
1225 graph->points_to = XCNEWVEC (bitmap, graph->size);
1226 graph->eq_rep = XNEWVEC (int, graph->size);
1227 graph->direct_nodes = sbitmap_alloc (graph->size);
1228 graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1229 bitmap_clear (graph->direct_nodes);
1231 for (j = 1; j < FIRST_REF_NODE; j++)
1233 if (!get_varinfo (j)->is_special_var)
1234 bitmap_set_bit (graph->direct_nodes, j);
1237 for (j = 0; j < graph->size; j++)
1238 graph->eq_rep[j] = -1;
1240 for (j = 0; j < varmap.length (); j++)
1241 graph->indirect_cycles[j] = -1;
1243 FOR_EACH_VEC_ELT (constraints, i, c)
1245 struct constraint_expr lhs = c->lhs;
1246 struct constraint_expr rhs = c->rhs;
1247 unsigned int lhsvar = lhs.var;
1248 unsigned int rhsvar = rhs.var;
1250 if (lhs.type == DEREF)
1252 /* *x = y. */
1253 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1254 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1256 else if (rhs.type == DEREF)
1258 /* x = *y */
1259 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1260 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1261 else
1262 bitmap_clear_bit (graph->direct_nodes, lhsvar);
1264 else if (rhs.type == ADDRESSOF)
1266 varinfo_t v;
1268 /* x = &y */
1269 if (graph->points_to[lhsvar] == NULL)
1270 graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1271 bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1273 if (graph->pointed_by[rhsvar] == NULL)
1274 graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1275 bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1277 /* Implicitly, *x = y */
1278 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1280 /* All related variables are no longer direct nodes. */
1281 bitmap_clear_bit (graph->direct_nodes, rhsvar);
1282 v = get_varinfo (rhsvar);
1283 if (!v->is_full_var)
1285 v = get_varinfo (v->head);
1288 bitmap_clear_bit (graph->direct_nodes, v->id);
1289 v = vi_next (v);
1291 while (v != NULL);
1293 bitmap_set_bit (graph->address_taken, rhsvar);
1295 else if (lhsvar > anything_id
1296 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1298 /* x = y */
1299 add_pred_graph_edge (graph, lhsvar, rhsvar);
1300 /* Implicitly, *x = *y */
1301 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1302 FIRST_REF_NODE + rhsvar);
1304 else if (lhs.offset != 0 || rhs.offset != 0)
1306 if (rhs.offset != 0)
1307 bitmap_clear_bit (graph->direct_nodes, lhs.var);
1308 else if (lhs.offset != 0)
1309 bitmap_clear_bit (graph->direct_nodes, rhs.var);
1314 /* Build the constraint graph, adding successor edges. */
1316 static void
1317 build_succ_graph (void)
1319 unsigned i, t;
1320 constraint_t c;
1322 FOR_EACH_VEC_ELT (constraints, i, c)
1324 struct constraint_expr lhs;
1325 struct constraint_expr rhs;
1326 unsigned int lhsvar;
1327 unsigned int rhsvar;
1329 if (!c)
1330 continue;
1332 lhs = c->lhs;
1333 rhs = c->rhs;
1334 lhsvar = find (lhs.var);
1335 rhsvar = find (rhs.var);
1337 if (lhs.type == DEREF)
1339 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1340 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1342 else if (rhs.type == DEREF)
1344 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1345 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1347 else if (rhs.type == ADDRESSOF)
1349 /* x = &y */
1350 gcc_checking_assert (find (rhs.var) == rhs.var);
1351 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1353 else if (lhsvar > anything_id
1354 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1356 add_graph_edge (graph, lhsvar, rhsvar);
1360 /* Add edges from STOREDANYTHING to all non-direct nodes that can
1361 receive pointers. */
1362 t = find (storedanything_id);
1363 for (i = integer_id + 1; i < FIRST_REF_NODE; ++i)
1365 if (!bitmap_bit_p (graph->direct_nodes, i)
1366 && get_varinfo (i)->may_have_pointers)
1367 add_graph_edge (graph, find (i), t);
1370 /* Everything stored to ANYTHING also potentially escapes. */
1371 add_graph_edge (graph, find (escaped_id), t);
1375 /* Changed variables on the last iteration. */
1376 static bitmap changed;
1378 /* Strongly Connected Component visitation info. */
1380 struct scc_info
1382 scc_info (size_t size);
1383 ~scc_info ();
1385 auto_sbitmap visited;
1386 auto_sbitmap deleted;
1387 unsigned int *dfs;
1388 unsigned int *node_mapping;
1389 int current_index;
1390 auto_vec<unsigned> scc_stack;
1394 /* Recursive routine to find strongly connected components in GRAPH.
1395 SI is the SCC info to store the information in, and N is the id of current
1396 graph node we are processing.
1398 This is Tarjan's strongly connected component finding algorithm, as
1399 modified by Nuutila to keep only non-root nodes on the stack.
1400 The algorithm can be found in "On finding the strongly connected
1401 connected components in a directed graph" by Esko Nuutila and Eljas
1402 Soisalon-Soininen, in Information Processing Letters volume 49,
1403 number 1, pages 9-14. */
1405 static void
1406 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1408 unsigned int i;
1409 bitmap_iterator bi;
1410 unsigned int my_dfs;
1412 bitmap_set_bit (si->visited, n);
1413 si->dfs[n] = si->current_index ++;
1414 my_dfs = si->dfs[n];
1416 /* Visit all the successors. */
1417 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1419 unsigned int w;
1421 if (i > LAST_REF_NODE)
1422 break;
1424 w = find (i);
1425 if (bitmap_bit_p (si->deleted, w))
1426 continue;
1428 if (!bitmap_bit_p (si->visited, w))
1429 scc_visit (graph, si, w);
1431 unsigned int t = find (w);
1432 gcc_checking_assert (find (n) == n);
1433 if (si->dfs[t] < si->dfs[n])
1434 si->dfs[n] = si->dfs[t];
1437 /* See if any components have been identified. */
1438 if (si->dfs[n] == my_dfs)
1440 if (si->scc_stack.length () > 0
1441 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1443 bitmap scc = BITMAP_ALLOC (NULL);
1444 unsigned int lowest_node;
1445 bitmap_iterator bi;
1447 bitmap_set_bit (scc, n);
1449 while (si->scc_stack.length () != 0
1450 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1452 unsigned int w = si->scc_stack.pop ();
1454 bitmap_set_bit (scc, w);
1457 lowest_node = bitmap_first_set_bit (scc);
1458 gcc_assert (lowest_node < FIRST_REF_NODE);
1460 /* Collapse the SCC nodes into a single node, and mark the
1461 indirect cycles. */
1462 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1464 if (i < FIRST_REF_NODE)
1466 if (unite (lowest_node, i))
1467 unify_nodes (graph, lowest_node, i, false);
1469 else
1471 unite (lowest_node, i);
1472 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1476 bitmap_set_bit (si->deleted, n);
1478 else
1479 si->scc_stack.safe_push (n);
1482 /* Unify node FROM into node TO, updating the changed count if
1483 necessary when UPDATE_CHANGED is true. */
1485 static void
1486 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1487 bool update_changed)
1489 gcc_checking_assert (to != from && find (to) == to);
1491 if (dump_file && (dump_flags & TDF_DETAILS))
1492 fprintf (dump_file, "Unifying %s to %s\n",
1493 get_varinfo (from)->name,
1494 get_varinfo (to)->name);
1496 if (update_changed)
1497 stats.unified_vars_dynamic++;
1498 else
1499 stats.unified_vars_static++;
1501 merge_graph_nodes (graph, to, from);
1502 if (merge_node_constraints (graph, to, from))
1504 if (update_changed)
1505 bitmap_set_bit (changed, to);
1508 /* Mark TO as changed if FROM was changed. If TO was already marked
1509 as changed, decrease the changed count. */
1511 if (update_changed
1512 && bitmap_clear_bit (changed, from))
1513 bitmap_set_bit (changed, to);
1514 varinfo_t fromvi = get_varinfo (from);
1515 if (fromvi->solution)
1517 /* If the solution changes because of the merging, we need to mark
1518 the variable as changed. */
1519 varinfo_t tovi = get_varinfo (to);
1520 if (bitmap_ior_into (tovi->solution, fromvi->solution))
1522 if (update_changed)
1523 bitmap_set_bit (changed, to);
1526 BITMAP_FREE (fromvi->solution);
1527 if (fromvi->oldsolution)
1528 BITMAP_FREE (fromvi->oldsolution);
1530 if (stats.iterations > 0
1531 && tovi->oldsolution)
1532 BITMAP_FREE (tovi->oldsolution);
1534 if (graph->succs[to])
1535 bitmap_clear_bit (graph->succs[to], to);
1538 /* Information needed to compute the topological ordering of a graph. */
1540 struct topo_info
1542 /* sbitmap of visited nodes. */
1543 sbitmap visited;
1544 /* Array that stores the topological order of the graph, *in
1545 reverse*. */
1546 vec<unsigned> topo_order;
1550 /* Initialize and return a topological info structure. */
1552 static struct topo_info *
1553 init_topo_info (void)
1555 size_t size = graph->size;
1556 struct topo_info *ti = XNEW (struct topo_info);
1557 ti->visited = sbitmap_alloc (size);
1558 bitmap_clear (ti->visited);
1559 ti->topo_order.create (1);
1560 return ti;
1564 /* Free the topological sort info pointed to by TI. */
1566 static void
1567 free_topo_info (struct topo_info *ti)
1569 sbitmap_free (ti->visited);
1570 ti->topo_order.release ();
1571 free (ti);
1574 /* Visit the graph in topological order, and store the order in the
1575 topo_info structure. */
1577 static void
1578 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1579 unsigned int n)
1581 bitmap_iterator bi;
1582 unsigned int j;
1584 bitmap_set_bit (ti->visited, n);
1586 if (graph->succs[n])
1587 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1589 if (!bitmap_bit_p (ti->visited, j))
1590 topo_visit (graph, ti, j);
1593 ti->topo_order.safe_push (n);
1596 /* Process a constraint C that represents x = *(y + off), using DELTA as the
1597 starting solution for y. */
1599 static void
1600 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1601 bitmap delta, bitmap *expanded_delta)
1603 unsigned int lhs = c->lhs.var;
1604 bool flag = false;
1605 bitmap sol = get_varinfo (lhs)->solution;
1606 unsigned int j;
1607 bitmap_iterator bi;
1608 HOST_WIDE_INT roffset = c->rhs.offset;
1610 /* Our IL does not allow this. */
1611 gcc_checking_assert (c->lhs.offset == 0);
1613 /* If the solution of Y contains anything it is good enough to transfer
1614 this to the LHS. */
1615 if (bitmap_bit_p (delta, anything_id))
1617 flag |= bitmap_set_bit (sol, anything_id);
1618 goto done;
1621 /* If we do not know at with offset the rhs is dereferenced compute
1622 the reachability set of DELTA, conservatively assuming it is
1623 dereferenced at all valid offsets. */
1624 if (roffset == UNKNOWN_OFFSET)
1626 delta = solution_set_expand (delta, expanded_delta);
1627 /* No further offset processing is necessary. */
1628 roffset = 0;
1631 /* For each variable j in delta (Sol(y)), add
1632 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1633 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1635 varinfo_t v = get_varinfo (j);
1636 HOST_WIDE_INT fieldoffset = v->offset + roffset;
1637 unsigned HOST_WIDE_INT size = v->size;
1638 unsigned int t;
1640 if (v->is_full_var)
1642 else if (roffset != 0)
1644 if (fieldoffset < 0)
1645 v = get_varinfo (v->head);
1646 else
1647 v = first_or_preceding_vi_for_offset (v, fieldoffset);
1650 /* We have to include all fields that overlap the current field
1651 shifted by roffset. */
1654 t = find (v->id);
1656 /* Adding edges from the special vars is pointless.
1657 They don't have sets that can change. */
1658 if (get_varinfo (t)->is_special_var)
1659 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1660 /* Merging the solution from ESCAPED needlessly increases
1661 the set. Use ESCAPED as representative instead. */
1662 else if (v->id == escaped_id)
1663 flag |= bitmap_set_bit (sol, escaped_id);
1664 else if (v->may_have_pointers
1665 && add_graph_edge (graph, lhs, t))
1666 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1668 if (v->is_full_var
1669 || v->next == 0)
1670 break;
1672 v = vi_next (v);
1674 while (v->offset < fieldoffset + size);
1677 done:
1678 /* If the LHS solution changed, mark the var as changed. */
1679 if (flag)
1681 get_varinfo (lhs)->solution = sol;
1682 bitmap_set_bit (changed, lhs);
1686 /* Process a constraint C that represents *(x + off) = y using DELTA
1687 as the starting solution for x. */
1689 static void
1690 do_ds_constraint (constraint_t c, bitmap delta, bitmap *expanded_delta)
1692 unsigned int rhs = c->rhs.var;
1693 bitmap sol = get_varinfo (rhs)->solution;
1694 unsigned int j;
1695 bitmap_iterator bi;
1696 HOST_WIDE_INT loff = c->lhs.offset;
1697 bool escaped_p = false;
1699 /* Our IL does not allow this. */
1700 gcc_checking_assert (c->rhs.offset == 0);
1702 /* If the solution of y contains ANYTHING simply use the ANYTHING
1703 solution. This avoids needlessly increasing the points-to sets. */
1704 if (bitmap_bit_p (sol, anything_id))
1705 sol = get_varinfo (find (anything_id))->solution;
1707 /* If the solution for x contains ANYTHING we have to merge the
1708 solution of y into all pointer variables which we do via
1709 STOREDANYTHING. */
1710 if (bitmap_bit_p (delta, anything_id))
1712 unsigned t = find (storedanything_id);
1713 if (add_graph_edge (graph, t, rhs))
1715 if (bitmap_ior_into (get_varinfo (t)->solution, sol))
1716 bitmap_set_bit (changed, t);
1718 return;
1721 /* If we do not know at with offset the rhs is dereferenced compute
1722 the reachability set of DELTA, conservatively assuming it is
1723 dereferenced at all valid offsets. */
1724 if (loff == UNKNOWN_OFFSET)
1726 delta = solution_set_expand (delta, expanded_delta);
1727 loff = 0;
1730 /* For each member j of delta (Sol(x)), add an edge from y to j and
1731 union Sol(y) into Sol(j) */
1732 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1734 varinfo_t v = get_varinfo (j);
1735 unsigned int t;
1736 HOST_WIDE_INT fieldoffset = v->offset + loff;
1737 unsigned HOST_WIDE_INT size = v->size;
1739 if (v->is_full_var)
1741 else if (loff != 0)
1743 if (fieldoffset < 0)
1744 v = get_varinfo (v->head);
1745 else
1746 v = first_or_preceding_vi_for_offset (v, fieldoffset);
1749 /* We have to include all fields that overlap the current field
1750 shifted by loff. */
1753 if (v->may_have_pointers)
1755 /* If v is a global variable then this is an escape point. */
1756 if (v->is_global_var
1757 && !escaped_p)
1759 t = find (escaped_id);
1760 if (add_graph_edge (graph, t, rhs)
1761 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1762 bitmap_set_bit (changed, t);
1763 /* Enough to let rhs escape once. */
1764 escaped_p = true;
1767 if (v->is_special_var)
1768 break;
1770 t = find (v->id);
1771 if (add_graph_edge (graph, t, rhs)
1772 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1773 bitmap_set_bit (changed, t);
1776 if (v->is_full_var
1777 || v->next == 0)
1778 break;
1780 v = vi_next (v);
1782 while (v->offset < fieldoffset + size);
1786 /* Handle a non-simple (simple meaning requires no iteration),
1787 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1789 static void
1790 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta,
1791 bitmap *expanded_delta)
1793 if (c->lhs.type == DEREF)
1795 if (c->rhs.type == ADDRESSOF)
1797 gcc_unreachable ();
1799 else
1801 /* *x = y */
1802 do_ds_constraint (c, delta, expanded_delta);
1805 else if (c->rhs.type == DEREF)
1807 /* x = *y */
1808 if (!(get_varinfo (c->lhs.var)->is_special_var))
1809 do_sd_constraint (graph, c, delta, expanded_delta);
1811 else
1813 bitmap tmp;
1814 bool flag = false;
1816 gcc_checking_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR
1817 && c->rhs.offset != 0 && c->lhs.offset == 0);
1818 tmp = get_varinfo (c->lhs.var)->solution;
1820 flag = set_union_with_increment (tmp, delta, c->rhs.offset,
1821 expanded_delta);
1823 if (flag)
1824 bitmap_set_bit (changed, c->lhs.var);
1828 /* Initialize and return a new SCC info structure. */
1830 scc_info::scc_info (size_t size) :
1831 visited (size), deleted (size), current_index (0), scc_stack (1)
1833 bitmap_clear (visited);
1834 bitmap_clear (deleted);
1835 node_mapping = XNEWVEC (unsigned int, size);
1836 dfs = XCNEWVEC (unsigned int, size);
1838 for (size_t i = 0; i < size; i++)
1839 node_mapping[i] = i;
1842 /* Free an SCC info structure pointed to by SI */
1844 scc_info::~scc_info ()
1846 free (node_mapping);
1847 free (dfs);
1851 /* Find indirect cycles in GRAPH that occur, using strongly connected
1852 components, and note them in the indirect cycles map.
1854 This technique comes from Ben Hardekopf and Calvin Lin,
1855 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1856 Lines of Code", submitted to PLDI 2007. */
1858 static void
1859 find_indirect_cycles (constraint_graph_t graph)
1861 unsigned int i;
1862 unsigned int size = graph->size;
1863 scc_info si (size);
1865 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1866 if (!bitmap_bit_p (si.visited, i) && find (i) == i)
1867 scc_visit (graph, &si, i);
1870 /* Compute a topological ordering for GRAPH, and store the result in the
1871 topo_info structure TI. */
1873 static void
1874 compute_topo_order (constraint_graph_t graph,
1875 struct topo_info *ti)
1877 unsigned int i;
1878 unsigned int size = graph->size;
1880 for (i = 0; i != size; ++i)
1881 if (!bitmap_bit_p (ti->visited, i) && find (i) == i)
1882 topo_visit (graph, ti, i);
1885 /* Structure used to for hash value numbering of pointer equivalence
1886 classes. */
1888 typedef struct equiv_class_label
1890 hashval_t hashcode;
1891 unsigned int equivalence_class;
1892 bitmap labels;
1893 } *equiv_class_label_t;
1894 typedef const struct equiv_class_label *const_equiv_class_label_t;
1896 /* Equiv_class_label hashtable helpers. */
1898 struct equiv_class_hasher : free_ptr_hash <equiv_class_label>
1900 static inline hashval_t hash (const equiv_class_label *);
1901 static inline bool equal (const equiv_class_label *,
1902 const equiv_class_label *);
1905 /* Hash function for a equiv_class_label_t */
1907 inline hashval_t
1908 equiv_class_hasher::hash (const equiv_class_label *ecl)
1910 return ecl->hashcode;
1913 /* Equality function for two equiv_class_label_t's. */
1915 inline bool
1916 equiv_class_hasher::equal (const equiv_class_label *eql1,
1917 const equiv_class_label *eql2)
1919 return (eql1->hashcode == eql2->hashcode
1920 && bitmap_equal_p (eql1->labels, eql2->labels));
1923 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1924 classes. */
1925 static hash_table<equiv_class_hasher> *pointer_equiv_class_table;
1927 /* A hashtable for mapping a bitmap of labels->location equivalence
1928 classes. */
1929 static hash_table<equiv_class_hasher> *location_equiv_class_table;
1931 /* Lookup a equivalence class in TABLE by the bitmap of LABELS with
1932 hash HAS it contains. Sets *REF_LABELS to the bitmap LABELS
1933 is equivalent to. */
1935 static equiv_class_label *
1936 equiv_class_lookup_or_add (hash_table<equiv_class_hasher> *table,
1937 bitmap labels)
1939 equiv_class_label **slot;
1940 equiv_class_label ecl;
1942 ecl.labels = labels;
1943 ecl.hashcode = bitmap_hash (labels);
1944 slot = table->find_slot (&ecl, INSERT);
1945 if (!*slot)
1947 *slot = XNEW (struct equiv_class_label);
1948 (*slot)->labels = labels;
1949 (*slot)->hashcode = ecl.hashcode;
1950 (*slot)->equivalence_class = 0;
1953 return *slot;
1956 /* Perform offline variable substitution.
1958 This is a worst case quadratic time way of identifying variables
1959 that must have equivalent points-to sets, including those caused by
1960 static cycles, and single entry subgraphs, in the constraint graph.
1962 The technique is described in "Exploiting Pointer and Location
1963 Equivalence to Optimize Pointer Analysis. In the 14th International
1964 Static Analysis Symposium (SAS), August 2007." It is known as the
1965 "HU" algorithm, and is equivalent to value numbering the collapsed
1966 constraint graph including evaluating unions.
1968 The general method of finding equivalence classes is as follows:
1969 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1970 Initialize all non-REF nodes to be direct nodes.
1971 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1972 variable}
1973 For each constraint containing the dereference, we also do the same
1974 thing.
1976 We then compute SCC's in the graph and unify nodes in the same SCC,
1977 including pts sets.
1979 For each non-collapsed node x:
1980 Visit all unvisited explicit incoming edges.
1981 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
1982 where y->x.
1983 Lookup the equivalence class for pts(x).
1984 If we found one, equivalence_class(x) = found class.
1985 Otherwise, equivalence_class(x) = new class, and new_class is
1986 added to the lookup table.
1988 All direct nodes with the same equivalence class can be replaced
1989 with a single representative node.
1990 All unlabeled nodes (label == 0) are not pointers and all edges
1991 involving them can be eliminated.
1992 We perform these optimizations during rewrite_constraints
1994 In addition to pointer equivalence class finding, we also perform
1995 location equivalence class finding. This is the set of variables
1996 that always appear together in points-to sets. We use this to
1997 compress the size of the points-to sets. */
1999 /* Current maximum pointer equivalence class id. */
2000 static int pointer_equiv_class;
2002 /* Current maximum location equivalence class id. */
2003 static int location_equiv_class;
2005 /* Recursive routine to find strongly connected components in GRAPH,
2006 and label it's nodes with DFS numbers. */
2008 static void
2009 condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2011 unsigned int i;
2012 bitmap_iterator bi;
2013 unsigned int my_dfs;
2015 gcc_checking_assert (si->node_mapping[n] == n);
2016 bitmap_set_bit (si->visited, n);
2017 si->dfs[n] = si->current_index ++;
2018 my_dfs = si->dfs[n];
2020 /* Visit all the successors. */
2021 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2023 unsigned int w = si->node_mapping[i];
2025 if (bitmap_bit_p (si->deleted, w))
2026 continue;
2028 if (!bitmap_bit_p (si->visited, w))
2029 condense_visit (graph, si, w);
2031 unsigned int t = si->node_mapping[w];
2032 gcc_checking_assert (si->node_mapping[n] == n);
2033 if (si->dfs[t] < si->dfs[n])
2034 si->dfs[n] = si->dfs[t];
2037 /* Visit all the implicit predecessors. */
2038 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
2040 unsigned int w = si->node_mapping[i];
2042 if (bitmap_bit_p (si->deleted, w))
2043 continue;
2045 if (!bitmap_bit_p (si->visited, w))
2046 condense_visit (graph, si, w);
2048 unsigned int t = si->node_mapping[w];
2049 gcc_assert (si->node_mapping[n] == n);
2050 if (si->dfs[t] < si->dfs[n])
2051 si->dfs[n] = si->dfs[t];
2054 /* See if any components have been identified. */
2055 if (si->dfs[n] == my_dfs)
2057 while (si->scc_stack.length () != 0
2058 && si->dfs[si->scc_stack.last ()] >= my_dfs)
2060 unsigned int w = si->scc_stack.pop ();
2061 si->node_mapping[w] = n;
2063 if (!bitmap_bit_p (graph->direct_nodes, w))
2064 bitmap_clear_bit (graph->direct_nodes, n);
2066 /* Unify our nodes. */
2067 if (graph->preds[w])
2069 if (!graph->preds[n])
2070 graph->preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2071 bitmap_ior_into (graph->preds[n], graph->preds[w]);
2073 if (graph->implicit_preds[w])
2075 if (!graph->implicit_preds[n])
2076 graph->implicit_preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2077 bitmap_ior_into (graph->implicit_preds[n],
2078 graph->implicit_preds[w]);
2080 if (graph->points_to[w])
2082 if (!graph->points_to[n])
2083 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2084 bitmap_ior_into (graph->points_to[n],
2085 graph->points_to[w]);
2088 bitmap_set_bit (si->deleted, n);
2090 else
2091 si->scc_stack.safe_push (n);
2094 /* Label pointer equivalences.
2096 This performs a value numbering of the constraint graph to
2097 discover which variables will always have the same points-to sets
2098 under the current set of constraints.
2100 The way it value numbers is to store the set of points-to bits
2101 generated by the constraints and graph edges. This is just used as a
2102 hash and equality comparison. The *actual set of points-to bits* is
2103 completely irrelevant, in that we don't care about being able to
2104 extract them later.
2106 The equality values (currently bitmaps) just have to satisfy a few
2107 constraints, the main ones being:
2108 1. The combining operation must be order independent.
2109 2. The end result of a given set of operations must be unique iff the
2110 combination of input values is unique
2111 3. Hashable. */
2113 static void
2114 label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2116 unsigned int i, first_pred;
2117 bitmap_iterator bi;
2119 bitmap_set_bit (si->visited, n);
2121 /* Label and union our incoming edges's points to sets. */
2122 first_pred = -1U;
2123 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2125 unsigned int w = si->node_mapping[i];
2126 if (!bitmap_bit_p (si->visited, w))
2127 label_visit (graph, si, w);
2129 /* Skip unused edges */
2130 if (w == n || graph->pointer_label[w] == 0)
2131 continue;
2133 if (graph->points_to[w])
2135 if (!graph->points_to[n])
2137 if (first_pred == -1U)
2138 first_pred = w;
2139 else
2141 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2142 bitmap_ior (graph->points_to[n],
2143 graph->points_to[first_pred],
2144 graph->points_to[w]);
2147 else
2148 bitmap_ior_into (graph->points_to[n], graph->points_to[w]);
2152 /* Indirect nodes get fresh variables and a new pointer equiv class. */
2153 if (!bitmap_bit_p (graph->direct_nodes, n))
2155 if (!graph->points_to[n])
2157 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2158 if (first_pred != -1U)
2159 bitmap_copy (graph->points_to[n], graph->points_to[first_pred]);
2161 bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
2162 graph->pointer_label[n] = pointer_equiv_class++;
2163 equiv_class_label_t ecl;
2164 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2165 graph->points_to[n]);
2166 ecl->equivalence_class = graph->pointer_label[n];
2167 return;
2170 /* If there was only a single non-empty predecessor the pointer equiv
2171 class is the same. */
2172 if (!graph->points_to[n])
2174 if (first_pred != -1U)
2176 graph->pointer_label[n] = graph->pointer_label[first_pred];
2177 graph->points_to[n] = graph->points_to[first_pred];
2179 return;
2182 if (!bitmap_empty_p (graph->points_to[n]))
2184 equiv_class_label_t ecl;
2185 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2186 graph->points_to[n]);
2187 if (ecl->equivalence_class == 0)
2188 ecl->equivalence_class = pointer_equiv_class++;
2189 else
2191 BITMAP_FREE (graph->points_to[n]);
2192 graph->points_to[n] = ecl->labels;
2194 graph->pointer_label[n] = ecl->equivalence_class;
2198 /* Print the pred graph in dot format. */
2200 static void
2201 dump_pred_graph (struct scc_info *si, FILE *file)
2203 unsigned int i;
2205 /* Only print the graph if it has already been initialized: */
2206 if (!graph)
2207 return;
2209 /* Prints the header of the dot file: */
2210 fprintf (file, "strict digraph {\n");
2211 fprintf (file, " node [\n shape = box\n ]\n");
2212 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
2213 fprintf (file, "\n // List of nodes and complex constraints in "
2214 "the constraint graph:\n");
2216 /* The next lines print the nodes in the graph together with the
2217 complex constraints attached to them. */
2218 for (i = 1; i < graph->size; i++)
2220 if (i == FIRST_REF_NODE)
2221 continue;
2222 if (si->node_mapping[i] != i)
2223 continue;
2224 if (i < FIRST_REF_NODE)
2225 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2226 else
2227 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2228 if (graph->points_to[i]
2229 && !bitmap_empty_p (graph->points_to[i]))
2231 if (i < FIRST_REF_NODE)
2232 fprintf (file, "[label=\"%s = {", get_varinfo (i)->name);
2233 else
2234 fprintf (file, "[label=\"*%s = {",
2235 get_varinfo (i - FIRST_REF_NODE)->name);
2236 unsigned j;
2237 bitmap_iterator bi;
2238 EXECUTE_IF_SET_IN_BITMAP (graph->points_to[i], 0, j, bi)
2239 fprintf (file, " %d", j);
2240 fprintf (file, " }\"]");
2242 fprintf (file, ";\n");
2245 /* Go over the edges. */
2246 fprintf (file, "\n // Edges in the constraint graph:\n");
2247 for (i = 1; i < graph->size; i++)
2249 unsigned j;
2250 bitmap_iterator bi;
2251 if (si->node_mapping[i] != i)
2252 continue;
2253 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[i], 0, j, bi)
2255 unsigned from = si->node_mapping[j];
2256 if (from < FIRST_REF_NODE)
2257 fprintf (file, "\"%s\"", get_varinfo (from)->name);
2258 else
2259 fprintf (file, "\"*%s\"", get_varinfo (from - FIRST_REF_NODE)->name);
2260 fprintf (file, " -> ");
2261 if (i < FIRST_REF_NODE)
2262 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2263 else
2264 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2265 fprintf (file, ";\n");
2269 /* Prints the tail of the dot file. */
2270 fprintf (file, "}\n");
2273 /* Perform offline variable substitution, discovering equivalence
2274 classes, and eliminating non-pointer variables. */
2276 static struct scc_info *
2277 perform_var_substitution (constraint_graph_t graph)
2279 unsigned int i;
2280 unsigned int size = graph->size;
2281 scc_info *si = new scc_info (size);
2283 bitmap_obstack_initialize (&iteration_obstack);
2284 pointer_equiv_class_table = new hash_table<equiv_class_hasher> (511);
2285 location_equiv_class_table
2286 = new hash_table<equiv_class_hasher> (511);
2287 pointer_equiv_class = 1;
2288 location_equiv_class = 1;
2290 /* Condense the nodes, which means to find SCC's, count incoming
2291 predecessors, and unite nodes in SCC's. */
2292 for (i = 1; i < FIRST_REF_NODE; i++)
2293 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2294 condense_visit (graph, si, si->node_mapping[i]);
2296 if (dump_file && (dump_flags & TDF_GRAPH))
2298 fprintf (dump_file, "\n\n// The constraint graph before var-substitution "
2299 "in dot format:\n");
2300 dump_pred_graph (si, dump_file);
2301 fprintf (dump_file, "\n\n");
2304 bitmap_clear (si->visited);
2305 /* Actually the label the nodes for pointer equivalences */
2306 for (i = 1; i < FIRST_REF_NODE; i++)
2307 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2308 label_visit (graph, si, si->node_mapping[i]);
2310 /* Calculate location equivalence labels. */
2311 for (i = 1; i < FIRST_REF_NODE; i++)
2313 bitmap pointed_by;
2314 bitmap_iterator bi;
2315 unsigned int j;
2317 if (!graph->pointed_by[i])
2318 continue;
2319 pointed_by = BITMAP_ALLOC (&iteration_obstack);
2321 /* Translate the pointed-by mapping for pointer equivalence
2322 labels. */
2323 EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
2325 bitmap_set_bit (pointed_by,
2326 graph->pointer_label[si->node_mapping[j]]);
2328 /* The original pointed_by is now dead. */
2329 BITMAP_FREE (graph->pointed_by[i]);
2331 /* Look up the location equivalence label if one exists, or make
2332 one otherwise. */
2333 equiv_class_label_t ecl;
2334 ecl = equiv_class_lookup_or_add (location_equiv_class_table, pointed_by);
2335 if (ecl->equivalence_class == 0)
2336 ecl->equivalence_class = location_equiv_class++;
2337 else
2339 if (dump_file && (dump_flags & TDF_DETAILS))
2340 fprintf (dump_file, "Found location equivalence for node %s\n",
2341 get_varinfo (i)->name);
2342 BITMAP_FREE (pointed_by);
2344 graph->loc_label[i] = ecl->equivalence_class;
2348 if (dump_file && (dump_flags & TDF_DETAILS))
2349 for (i = 1; i < FIRST_REF_NODE; i++)
2351 unsigned j = si->node_mapping[i];
2352 if (j != i)
2354 fprintf (dump_file, "%s node id %d ",
2355 bitmap_bit_p (graph->direct_nodes, i)
2356 ? "Direct" : "Indirect", i);
2357 if (i < FIRST_REF_NODE)
2358 fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2359 else
2360 fprintf (dump_file, "\"*%s\"",
2361 get_varinfo (i - FIRST_REF_NODE)->name);
2362 fprintf (dump_file, " mapped to SCC leader node id %d ", j);
2363 if (j < FIRST_REF_NODE)
2364 fprintf (dump_file, "\"%s\"\n", get_varinfo (j)->name);
2365 else
2366 fprintf (dump_file, "\"*%s\"\n",
2367 get_varinfo (j - FIRST_REF_NODE)->name);
2369 else
2371 fprintf (dump_file,
2372 "Equivalence classes for %s node id %d ",
2373 bitmap_bit_p (graph->direct_nodes, i)
2374 ? "direct" : "indirect", i);
2375 if (i < FIRST_REF_NODE)
2376 fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2377 else
2378 fprintf (dump_file, "\"*%s\"",
2379 get_varinfo (i - FIRST_REF_NODE)->name);
2380 fprintf (dump_file,
2381 ": pointer %d, location %d\n",
2382 graph->pointer_label[i], graph->loc_label[i]);
2386 /* Quickly eliminate our non-pointer variables. */
2388 for (i = 1; i < FIRST_REF_NODE; i++)
2390 unsigned int node = si->node_mapping[i];
2392 if (graph->pointer_label[node] == 0)
2394 if (dump_file && (dump_flags & TDF_DETAILS))
2395 fprintf (dump_file,
2396 "%s is a non-pointer variable, eliminating edges.\n",
2397 get_varinfo (node)->name);
2398 stats.nonpointer_vars++;
2399 clear_edges_for_node (graph, node);
2403 return si;
2406 /* Free information that was only necessary for variable
2407 substitution. */
2409 static void
2410 free_var_substitution_info (struct scc_info *si)
2412 delete si;
2413 free (graph->pointer_label);
2414 free (graph->loc_label);
2415 free (graph->pointed_by);
2416 free (graph->points_to);
2417 free (graph->eq_rep);
2418 sbitmap_free (graph->direct_nodes);
2419 delete pointer_equiv_class_table;
2420 pointer_equiv_class_table = NULL;
2421 delete location_equiv_class_table;
2422 location_equiv_class_table = NULL;
2423 bitmap_obstack_release (&iteration_obstack);
2426 /* Return an existing node that is equivalent to NODE, which has
2427 equivalence class LABEL, if one exists. Return NODE otherwise. */
2429 static unsigned int
2430 find_equivalent_node (constraint_graph_t graph,
2431 unsigned int node, unsigned int label)
2433 /* If the address version of this variable is unused, we can
2434 substitute it for anything else with the same label.
2435 Otherwise, we know the pointers are equivalent, but not the
2436 locations, and we can unite them later. */
2438 if (!bitmap_bit_p (graph->address_taken, node))
2440 gcc_checking_assert (label < graph->size);
2442 if (graph->eq_rep[label] != -1)
2444 /* Unify the two variables since we know they are equivalent. */
2445 if (unite (graph->eq_rep[label], node))
2446 unify_nodes (graph, graph->eq_rep[label], node, false);
2447 return graph->eq_rep[label];
2449 else
2451 graph->eq_rep[label] = node;
2452 graph->pe_rep[label] = node;
2455 else
2457 gcc_checking_assert (label < graph->size);
2458 graph->pe[node] = label;
2459 if (graph->pe_rep[label] == -1)
2460 graph->pe_rep[label] = node;
2463 return node;
2466 /* Unite pointer equivalent but not location equivalent nodes in
2467 GRAPH. This may only be performed once variable substitution is
2468 finished. */
2470 static void
2471 unite_pointer_equivalences (constraint_graph_t graph)
2473 unsigned int i;
2475 /* Go through the pointer equivalences and unite them to their
2476 representative, if they aren't already. */
2477 for (i = 1; i < FIRST_REF_NODE; i++)
2479 unsigned int label = graph->pe[i];
2480 if (label)
2482 int label_rep = graph->pe_rep[label];
2484 if (label_rep == -1)
2485 continue;
2487 label_rep = find (label_rep);
2488 if (label_rep >= 0 && unite (label_rep, find (i)))
2489 unify_nodes (graph, label_rep, i, false);
2494 /* Move complex constraints to the GRAPH nodes they belong to. */
2496 static void
2497 move_complex_constraints (constraint_graph_t graph)
2499 int i;
2500 constraint_t c;
2502 FOR_EACH_VEC_ELT (constraints, i, c)
2504 if (c)
2506 struct constraint_expr lhs = c->lhs;
2507 struct constraint_expr rhs = c->rhs;
2509 if (lhs.type == DEREF)
2511 insert_into_complex (graph, lhs.var, c);
2513 else if (rhs.type == DEREF)
2515 if (!(get_varinfo (lhs.var)->is_special_var))
2516 insert_into_complex (graph, rhs.var, c);
2518 else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2519 && (lhs.offset != 0 || rhs.offset != 0))
2521 insert_into_complex (graph, rhs.var, c);
2528 /* Optimize and rewrite complex constraints while performing
2529 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2530 result of perform_variable_substitution. */
2532 static void
2533 rewrite_constraints (constraint_graph_t graph,
2534 struct scc_info *si)
2536 int i;
2537 constraint_t c;
2539 if (flag_checking)
2541 for (unsigned int j = 0; j < graph->size; j++)
2542 gcc_assert (find (j) == j);
2545 FOR_EACH_VEC_ELT (constraints, i, c)
2547 struct constraint_expr lhs = c->lhs;
2548 struct constraint_expr rhs = c->rhs;
2549 unsigned int lhsvar = find (lhs.var);
2550 unsigned int rhsvar = find (rhs.var);
2551 unsigned int lhsnode, rhsnode;
2552 unsigned int lhslabel, rhslabel;
2554 lhsnode = si->node_mapping[lhsvar];
2555 rhsnode = si->node_mapping[rhsvar];
2556 lhslabel = graph->pointer_label[lhsnode];
2557 rhslabel = graph->pointer_label[rhsnode];
2559 /* See if it is really a non-pointer variable, and if so, ignore
2560 the constraint. */
2561 if (lhslabel == 0)
2563 if (dump_file && (dump_flags & TDF_DETAILS))
2566 fprintf (dump_file, "%s is a non-pointer variable,"
2567 "ignoring constraint:",
2568 get_varinfo (lhs.var)->name);
2569 dump_constraint (dump_file, c);
2570 fprintf (dump_file, "\n");
2572 constraints[i] = NULL;
2573 continue;
2576 if (rhslabel == 0)
2578 if (dump_file && (dump_flags & TDF_DETAILS))
2581 fprintf (dump_file, "%s is a non-pointer variable,"
2582 "ignoring constraint:",
2583 get_varinfo (rhs.var)->name);
2584 dump_constraint (dump_file, c);
2585 fprintf (dump_file, "\n");
2587 constraints[i] = NULL;
2588 continue;
2591 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2592 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2593 c->lhs.var = lhsvar;
2594 c->rhs.var = rhsvar;
2598 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2599 part of an SCC, false otherwise. */
2601 static bool
2602 eliminate_indirect_cycles (unsigned int node)
2604 if (graph->indirect_cycles[node] != -1
2605 && !bitmap_empty_p (get_varinfo (node)->solution))
2607 unsigned int i;
2608 auto_vec<unsigned> queue;
2609 int queuepos;
2610 unsigned int to = find (graph->indirect_cycles[node]);
2611 bitmap_iterator bi;
2613 /* We can't touch the solution set and call unify_nodes
2614 at the same time, because unify_nodes is going to do
2615 bitmap unions into it. */
2617 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2619 if (find (i) == i && i != to)
2621 if (unite (to, i))
2622 queue.safe_push (i);
2626 for (queuepos = 0;
2627 queue.iterate (queuepos, &i);
2628 queuepos++)
2630 unify_nodes (graph, to, i, true);
2632 return true;
2634 return false;
2637 /* Solve the constraint graph GRAPH using our worklist solver.
2638 This is based on the PW* family of solvers from the "Efficient Field
2639 Sensitive Pointer Analysis for C" paper.
2640 It works by iterating over all the graph nodes, processing the complex
2641 constraints and propagating the copy constraints, until everything stops
2642 changed. This corresponds to steps 6-8 in the solving list given above. */
2644 static void
2645 solve_graph (constraint_graph_t graph)
2647 unsigned int size = graph->size;
2648 unsigned int i;
2649 bitmap pts;
2651 changed = BITMAP_ALLOC (NULL);
2653 /* Mark all initial non-collapsed nodes as changed. */
2654 for (i = 1; i < size; i++)
2656 varinfo_t ivi = get_varinfo (i);
2657 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2658 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2659 || graph->complex[i].length () > 0))
2660 bitmap_set_bit (changed, i);
2663 /* Allocate a bitmap to be used to store the changed bits. */
2664 pts = BITMAP_ALLOC (&pta_obstack);
2666 while (!bitmap_empty_p (changed))
2668 unsigned int i;
2669 struct topo_info *ti = init_topo_info ();
2670 stats.iterations++;
2672 bitmap_obstack_initialize (&iteration_obstack);
2674 compute_topo_order (graph, ti);
2676 while (ti->topo_order.length () != 0)
2679 i = ti->topo_order.pop ();
2681 /* If this variable is not a representative, skip it. */
2682 if (find (i) != i)
2683 continue;
2685 /* In certain indirect cycle cases, we may merge this
2686 variable to another. */
2687 if (eliminate_indirect_cycles (i) && find (i) != i)
2688 continue;
2690 /* If the node has changed, we need to process the
2691 complex constraints and outgoing edges again. */
2692 if (bitmap_clear_bit (changed, i))
2694 unsigned int j;
2695 constraint_t c;
2696 bitmap solution;
2697 vec<constraint_t> complex = graph->complex[i];
2698 varinfo_t vi = get_varinfo (i);
2699 bool solution_empty;
2701 /* Compute the changed set of solution bits. If anything
2702 is in the solution just propagate that. */
2703 if (bitmap_bit_p (vi->solution, anything_id))
2705 /* If anything is also in the old solution there is
2706 nothing to do.
2707 ??? But we shouldn't ended up with "changed" set ... */
2708 if (vi->oldsolution
2709 && bitmap_bit_p (vi->oldsolution, anything_id))
2710 continue;
2711 bitmap_copy (pts, get_varinfo (find (anything_id))->solution);
2713 else if (vi->oldsolution)
2714 bitmap_and_compl (pts, vi->solution, vi->oldsolution);
2715 else
2716 bitmap_copy (pts, vi->solution);
2718 if (bitmap_empty_p (pts))
2719 continue;
2721 if (vi->oldsolution)
2722 bitmap_ior_into (vi->oldsolution, pts);
2723 else
2725 vi->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
2726 bitmap_copy (vi->oldsolution, pts);
2729 solution = vi->solution;
2730 solution_empty = bitmap_empty_p (solution);
2732 /* Process the complex constraints */
2733 bitmap expanded_pts = NULL;
2734 FOR_EACH_VEC_ELT (complex, j, c)
2736 /* XXX: This is going to unsort the constraints in
2737 some cases, which will occasionally add duplicate
2738 constraints during unification. This does not
2739 affect correctness. */
2740 c->lhs.var = find (c->lhs.var);
2741 c->rhs.var = find (c->rhs.var);
2743 /* The only complex constraint that can change our
2744 solution to non-empty, given an empty solution,
2745 is a constraint where the lhs side is receiving
2746 some set from elsewhere. */
2747 if (!solution_empty || c->lhs.type != DEREF)
2748 do_complex_constraint (graph, c, pts, &expanded_pts);
2750 BITMAP_FREE (expanded_pts);
2752 solution_empty = bitmap_empty_p (solution);
2754 if (!solution_empty)
2756 bitmap_iterator bi;
2757 unsigned eff_escaped_id = find (escaped_id);
2759 /* Propagate solution to all successors. */
2760 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2761 0, j, bi)
2763 bitmap tmp;
2764 bool flag;
2766 unsigned int to = find (j);
2767 tmp = get_varinfo (to)->solution;
2768 flag = false;
2770 /* Don't try to propagate to ourselves. */
2771 if (to == i)
2772 continue;
2774 /* If we propagate from ESCAPED use ESCAPED as
2775 placeholder. */
2776 if (i == eff_escaped_id)
2777 flag = bitmap_set_bit (tmp, escaped_id);
2778 else
2779 flag = bitmap_ior_into (tmp, pts);
2781 if (flag)
2782 bitmap_set_bit (changed, to);
2787 free_topo_info (ti);
2788 bitmap_obstack_release (&iteration_obstack);
2791 BITMAP_FREE (pts);
2792 BITMAP_FREE (changed);
2793 bitmap_obstack_release (&oldpta_obstack);
2796 /* Map from trees to variable infos. */
2797 static hash_map<tree, varinfo_t> *vi_for_tree;
2800 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2802 static void
2803 insert_vi_for_tree (tree t, varinfo_t vi)
2805 gcc_assert (vi);
2806 gcc_assert (!vi_for_tree->put (t, vi));
2809 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2810 exist in the map, return NULL, otherwise, return the varinfo we found. */
2812 static varinfo_t
2813 lookup_vi_for_tree (tree t)
2815 varinfo_t *slot = vi_for_tree->get (t);
2816 if (slot == NULL)
2817 return NULL;
2819 return *slot;
2822 /* Return a printable name for DECL */
2824 static const char *
2825 alias_get_name (tree decl)
2827 const char *res = NULL;
2828 char *temp;
2829 int num_printed = 0;
2831 if (!dump_file)
2832 return "NULL";
2834 if (TREE_CODE (decl) == SSA_NAME)
2836 res = get_name (decl);
2837 if (res)
2838 num_printed = asprintf (&temp, "%s_%u", res, SSA_NAME_VERSION (decl));
2839 else
2840 num_printed = asprintf (&temp, "_%u", SSA_NAME_VERSION (decl));
2841 if (num_printed > 0)
2843 res = ggc_strdup (temp);
2844 free (temp);
2847 else if (DECL_P (decl))
2849 if (DECL_ASSEMBLER_NAME_SET_P (decl))
2850 res = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
2851 else
2853 res = get_name (decl);
2854 if (!res)
2856 num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
2857 if (num_printed > 0)
2859 res = ggc_strdup (temp);
2860 free (temp);
2865 if (res != NULL)
2866 return res;
2868 return "NULL";
2871 /* Find the variable id for tree T in the map.
2872 If T doesn't exist in the map, create an entry for it and return it. */
2874 static varinfo_t
2875 get_vi_for_tree (tree t)
2877 varinfo_t *slot = vi_for_tree->get (t);
2878 if (slot == NULL)
2880 unsigned int id = create_variable_info_for (t, alias_get_name (t), false);
2881 return get_varinfo (id);
2884 return *slot;
2887 /* Get a scalar constraint expression for a new temporary variable. */
2889 static struct constraint_expr
2890 new_scalar_tmp_constraint_exp (const char *name, bool add_id)
2892 struct constraint_expr tmp;
2893 varinfo_t vi;
2895 vi = new_var_info (NULL_TREE, name, add_id);
2896 vi->offset = 0;
2897 vi->size = -1;
2898 vi->fullsize = -1;
2899 vi->is_full_var = 1;
2901 tmp.var = vi->id;
2902 tmp.type = SCALAR;
2903 tmp.offset = 0;
2905 return tmp;
2908 /* Get a constraint expression vector from an SSA_VAR_P node.
2909 If address_p is true, the result will be taken its address of. */
2911 static void
2912 get_constraint_for_ssa_var (tree t, vec<ce_s> *results, bool address_p)
2914 struct constraint_expr cexpr;
2915 varinfo_t vi;
2917 /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
2918 gcc_assert (TREE_CODE (t) == SSA_NAME || DECL_P (t));
2920 /* For parameters, get at the points-to set for the actual parm
2921 decl. */
2922 if (TREE_CODE (t) == SSA_NAME
2923 && SSA_NAME_IS_DEFAULT_DEF (t)
2924 && (TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2925 || TREE_CODE (SSA_NAME_VAR (t)) == RESULT_DECL))
2927 get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
2928 return;
2931 /* For global variables resort to the alias target. */
2932 if (TREE_CODE (t) == VAR_DECL
2933 && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
2935 varpool_node *node = varpool_node::get (t);
2936 if (node && node->alias && node->analyzed)
2938 node = node->ultimate_alias_target ();
2939 /* Canonicalize the PT uid of all aliases to the ultimate target.
2940 ??? Hopefully the set of aliases can't change in a way that
2941 changes the ultimate alias target. */
2942 gcc_assert ((! DECL_PT_UID_SET_P (node->decl)
2943 || DECL_PT_UID (node->decl) == DECL_UID (node->decl))
2944 && (! DECL_PT_UID_SET_P (t)
2945 || DECL_PT_UID (t) == DECL_UID (node->decl)));
2946 DECL_PT_UID (t) = DECL_UID (node->decl);
2947 t = node->decl;
2951 vi = get_vi_for_tree (t);
2952 cexpr.var = vi->id;
2953 cexpr.type = SCALAR;
2954 cexpr.offset = 0;
2956 /* If we are not taking the address of the constraint expr, add all
2957 sub-fiels of the variable as well. */
2958 if (!address_p
2959 && !vi->is_full_var)
2961 for (; vi; vi = vi_next (vi))
2963 cexpr.var = vi->id;
2964 results->safe_push (cexpr);
2966 return;
2969 results->safe_push (cexpr);
2972 /* Process constraint T, performing various simplifications and then
2973 adding it to our list of overall constraints. */
2975 static void
2976 process_constraint (constraint_t t)
2978 struct constraint_expr rhs = t->rhs;
2979 struct constraint_expr lhs = t->lhs;
2981 gcc_assert (rhs.var < varmap.length ());
2982 gcc_assert (lhs.var < varmap.length ());
2984 /* If we didn't get any useful constraint from the lhs we get
2985 &ANYTHING as fallback from get_constraint_for. Deal with
2986 it here by turning it into *ANYTHING. */
2987 if (lhs.type == ADDRESSOF
2988 && lhs.var == anything_id)
2989 lhs.type = DEREF;
2991 /* ADDRESSOF on the lhs is invalid. */
2992 gcc_assert (lhs.type != ADDRESSOF);
2994 /* We shouldn't add constraints from things that cannot have pointers.
2995 It's not completely trivial to avoid in the callers, so do it here. */
2996 if (rhs.type != ADDRESSOF
2997 && !get_varinfo (rhs.var)->may_have_pointers)
2998 return;
3000 /* Likewise adding to the solution of a non-pointer var isn't useful. */
3001 if (!get_varinfo (lhs.var)->may_have_pointers)
3002 return;
3004 /* This can happen in our IR with things like n->a = *p */
3005 if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
3007 /* Split into tmp = *rhs, *lhs = tmp */
3008 struct constraint_expr tmplhs;
3009 tmplhs = new_scalar_tmp_constraint_exp ("doubledereftmp", true);
3010 process_constraint (new_constraint (tmplhs, rhs));
3011 process_constraint (new_constraint (lhs, tmplhs));
3013 else if (rhs.type == ADDRESSOF && lhs.type == DEREF)
3015 /* Split into tmp = &rhs, *lhs = tmp */
3016 struct constraint_expr tmplhs;
3017 tmplhs = new_scalar_tmp_constraint_exp ("derefaddrtmp", true);
3018 process_constraint (new_constraint (tmplhs, rhs));
3019 process_constraint (new_constraint (lhs, tmplhs));
3021 else
3023 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
3024 constraints.safe_push (t);
3029 /* Return the position, in bits, of FIELD_DECL from the beginning of its
3030 structure. */
3032 static HOST_WIDE_INT
3033 bitpos_of_field (const tree fdecl)
3035 if (!tree_fits_shwi_p (DECL_FIELD_OFFSET (fdecl))
3036 || !tree_fits_shwi_p (DECL_FIELD_BIT_OFFSET (fdecl)))
3037 return -1;
3039 return (tree_to_shwi (DECL_FIELD_OFFSET (fdecl)) * BITS_PER_UNIT
3040 + tree_to_shwi (DECL_FIELD_BIT_OFFSET (fdecl)));
3044 /* Get constraint expressions for offsetting PTR by OFFSET. Stores the
3045 resulting constraint expressions in *RESULTS. */
3047 static void
3048 get_constraint_for_ptr_offset (tree ptr, tree offset,
3049 vec<ce_s> *results)
3051 struct constraint_expr c;
3052 unsigned int j, n;
3053 HOST_WIDE_INT rhsoffset;
3055 /* If we do not do field-sensitive PTA adding offsets to pointers
3056 does not change the points-to solution. */
3057 if (!use_field_sensitive)
3059 get_constraint_for_rhs (ptr, results);
3060 return;
3063 /* If the offset is not a non-negative integer constant that fits
3064 in a HOST_WIDE_INT, we have to fall back to a conservative
3065 solution which includes all sub-fields of all pointed-to
3066 variables of ptr. */
3067 if (offset == NULL_TREE
3068 || TREE_CODE (offset) != INTEGER_CST)
3069 rhsoffset = UNKNOWN_OFFSET;
3070 else
3072 /* Sign-extend the offset. */
3073 offset_int soffset = offset_int::from (offset, SIGNED);
3074 if (!wi::fits_shwi_p (soffset))
3075 rhsoffset = UNKNOWN_OFFSET;
3076 else
3078 /* Make sure the bit-offset also fits. */
3079 HOST_WIDE_INT rhsunitoffset = soffset.to_shwi ();
3080 rhsoffset = rhsunitoffset * BITS_PER_UNIT;
3081 if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
3082 rhsoffset = UNKNOWN_OFFSET;
3086 get_constraint_for_rhs (ptr, results);
3087 if (rhsoffset == 0)
3088 return;
3090 /* As we are eventually appending to the solution do not use
3091 vec::iterate here. */
3092 n = results->length ();
3093 for (j = 0; j < n; j++)
3095 varinfo_t curr;
3096 c = (*results)[j];
3097 curr = get_varinfo (c.var);
3099 if (c.type == ADDRESSOF
3100 /* If this varinfo represents a full variable just use it. */
3101 && curr->is_full_var)
3103 else if (c.type == ADDRESSOF
3104 /* If we do not know the offset add all subfields. */
3105 && rhsoffset == UNKNOWN_OFFSET)
3107 varinfo_t temp = get_varinfo (curr->head);
3110 struct constraint_expr c2;
3111 c2.var = temp->id;
3112 c2.type = ADDRESSOF;
3113 c2.offset = 0;
3114 if (c2.var != c.var)
3115 results->safe_push (c2);
3116 temp = vi_next (temp);
3118 while (temp);
3120 else if (c.type == ADDRESSOF)
3122 varinfo_t temp;
3123 unsigned HOST_WIDE_INT offset = curr->offset + rhsoffset;
3125 /* If curr->offset + rhsoffset is less than zero adjust it. */
3126 if (rhsoffset < 0
3127 && curr->offset < offset)
3128 offset = 0;
3130 /* We have to include all fields that overlap the current
3131 field shifted by rhsoffset. And we include at least
3132 the last or the first field of the variable to represent
3133 reachability of off-bound addresses, in particular &object + 1,
3134 conservatively correct. */
3135 temp = first_or_preceding_vi_for_offset (curr, offset);
3136 c.var = temp->id;
3137 c.offset = 0;
3138 temp = vi_next (temp);
3139 while (temp
3140 && temp->offset < offset + curr->size)
3142 struct constraint_expr c2;
3143 c2.var = temp->id;
3144 c2.type = ADDRESSOF;
3145 c2.offset = 0;
3146 results->safe_push (c2);
3147 temp = vi_next (temp);
3150 else if (c.type == SCALAR)
3152 gcc_assert (c.offset == 0);
3153 c.offset = rhsoffset;
3155 else
3156 /* We shouldn't get any DEREFs here. */
3157 gcc_unreachable ();
3159 (*results)[j] = c;
3164 /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
3165 If address_p is true the result will be taken its address of.
3166 If lhs_p is true then the constraint expression is assumed to be used
3167 as the lhs. */
3169 static void
3170 get_constraint_for_component_ref (tree t, vec<ce_s> *results,
3171 bool address_p, bool lhs_p)
3173 tree orig_t = t;
3174 HOST_WIDE_INT bitsize = -1;
3175 HOST_WIDE_INT bitmaxsize = -1;
3176 HOST_WIDE_INT bitpos;
3177 bool reverse;
3178 tree forzero;
3180 /* Some people like to do cute things like take the address of
3181 &0->a.b */
3182 forzero = t;
3183 while (handled_component_p (forzero)
3184 || INDIRECT_REF_P (forzero)
3185 || TREE_CODE (forzero) == MEM_REF)
3186 forzero = TREE_OPERAND (forzero, 0);
3188 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
3190 struct constraint_expr temp;
3192 temp.offset = 0;
3193 temp.var = integer_id;
3194 temp.type = SCALAR;
3195 results->safe_push (temp);
3196 return;
3199 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize, &reverse);
3201 /* We can end up here for component references on a
3202 VIEW_CONVERT_EXPR <>(&foobar) or things like a
3203 BIT_FIELD_REF <&MEM[(void *)&b + 4B], ...>. So for
3204 symbolic constants simply give up. */
3205 if (TREE_CODE (t) == ADDR_EXPR)
3207 constraint_expr result;
3208 result.type = SCALAR;
3209 result.var = anything_id;
3210 result.offset = 0;
3211 results->safe_push (result);
3212 return;
3215 /* Pretend to take the address of the base, we'll take care of
3216 adding the required subset of sub-fields below. */
3217 get_constraint_for_1 (t, results, true, lhs_p);
3218 gcc_assert (results->length () == 1);
3219 struct constraint_expr &result = results->last ();
3221 if (result.type == SCALAR
3222 && get_varinfo (result.var)->is_full_var)
3223 /* For single-field vars do not bother about the offset. */
3224 result.offset = 0;
3225 else if (result.type == SCALAR)
3227 /* In languages like C, you can access one past the end of an
3228 array. You aren't allowed to dereference it, so we can
3229 ignore this constraint. When we handle pointer subtraction,
3230 we may have to do something cute here. */
3232 if ((unsigned HOST_WIDE_INT)bitpos < get_varinfo (result.var)->fullsize
3233 && bitmaxsize != 0)
3235 /* It's also not true that the constraint will actually start at the
3236 right offset, it may start in some padding. We only care about
3237 setting the constraint to the first actual field it touches, so
3238 walk to find it. */
3239 struct constraint_expr cexpr = result;
3240 varinfo_t curr;
3241 results->pop ();
3242 cexpr.offset = 0;
3243 for (curr = get_varinfo (cexpr.var); curr; curr = vi_next (curr))
3245 if (ranges_overlap_p (curr->offset, curr->size,
3246 bitpos, bitmaxsize))
3248 cexpr.var = curr->id;
3249 results->safe_push (cexpr);
3250 if (address_p)
3251 break;
3254 /* If we are going to take the address of this field then
3255 to be able to compute reachability correctly add at least
3256 the last field of the variable. */
3257 if (address_p && results->length () == 0)
3259 curr = get_varinfo (cexpr.var);
3260 while (curr->next != 0)
3261 curr = vi_next (curr);
3262 cexpr.var = curr->id;
3263 results->safe_push (cexpr);
3265 else if (results->length () == 0)
3266 /* Assert that we found *some* field there. The user couldn't be
3267 accessing *only* padding. */
3268 /* Still the user could access one past the end of an array
3269 embedded in a struct resulting in accessing *only* padding. */
3270 /* Or accessing only padding via type-punning to a type
3271 that has a filed just in padding space. */
3273 cexpr.type = SCALAR;
3274 cexpr.var = anything_id;
3275 cexpr.offset = 0;
3276 results->safe_push (cexpr);
3279 else if (bitmaxsize == 0)
3281 if (dump_file && (dump_flags & TDF_DETAILS))
3282 fprintf (dump_file, "Access to zero-sized part of variable,"
3283 "ignoring\n");
3285 else
3286 if (dump_file && (dump_flags & TDF_DETAILS))
3287 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
3289 else if (result.type == DEREF)
3291 /* If we do not know exactly where the access goes say so. Note
3292 that only for non-structure accesses we know that we access
3293 at most one subfiled of any variable. */
3294 if (bitpos == -1
3295 || bitsize != bitmaxsize
3296 || AGGREGATE_TYPE_P (TREE_TYPE (orig_t))
3297 || result.offset == UNKNOWN_OFFSET)
3298 result.offset = UNKNOWN_OFFSET;
3299 else
3300 result.offset += bitpos;
3302 else if (result.type == ADDRESSOF)
3304 /* We can end up here for component references on constants like
3305 VIEW_CONVERT_EXPR <>({ 0, 1, 2, 3 })[i]. */
3306 result.type = SCALAR;
3307 result.var = anything_id;
3308 result.offset = 0;
3310 else
3311 gcc_unreachable ();
3315 /* Dereference the constraint expression CONS, and return the result.
3316 DEREF (ADDRESSOF) = SCALAR
3317 DEREF (SCALAR) = DEREF
3318 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
3319 This is needed so that we can handle dereferencing DEREF constraints. */
3321 static void
3322 do_deref (vec<ce_s> *constraints)
3324 struct constraint_expr *c;
3325 unsigned int i = 0;
3327 FOR_EACH_VEC_ELT (*constraints, i, c)
3329 if (c->type == SCALAR)
3330 c->type = DEREF;
3331 else if (c->type == ADDRESSOF)
3332 c->type = SCALAR;
3333 else if (c->type == DEREF)
3335 struct constraint_expr tmplhs;
3336 tmplhs = new_scalar_tmp_constraint_exp ("dereftmp", true);
3337 process_constraint (new_constraint (tmplhs, *c));
3338 c->var = tmplhs.var;
3340 else
3341 gcc_unreachable ();
3345 /* Given a tree T, return the constraint expression for taking the
3346 address of it. */
3348 static void
3349 get_constraint_for_address_of (tree t, vec<ce_s> *results)
3351 struct constraint_expr *c;
3352 unsigned int i;
3354 get_constraint_for_1 (t, results, true, true);
3356 FOR_EACH_VEC_ELT (*results, i, c)
3358 if (c->type == DEREF)
3359 c->type = SCALAR;
3360 else
3361 c->type = ADDRESSOF;
3365 /* Given a tree T, return the constraint expression for it. */
3367 static void
3368 get_constraint_for_1 (tree t, vec<ce_s> *results, bool address_p,
3369 bool lhs_p)
3371 struct constraint_expr temp;
3373 /* x = integer is all glommed to a single variable, which doesn't
3374 point to anything by itself. That is, of course, unless it is an
3375 integer constant being treated as a pointer, in which case, we
3376 will return that this is really the addressof anything. This
3377 happens below, since it will fall into the default case. The only
3378 case we know something about an integer treated like a pointer is
3379 when it is the NULL pointer, and then we just say it points to
3380 NULL.
3382 Do not do that if -fno-delete-null-pointer-checks though, because
3383 in that case *NULL does not fail, so it _should_ alias *anything.
3384 It is not worth adding a new option or renaming the existing one,
3385 since this case is relatively obscure. */
3386 if ((TREE_CODE (t) == INTEGER_CST
3387 && integer_zerop (t))
3388 /* The only valid CONSTRUCTORs in gimple with pointer typed
3389 elements are zero-initializer. But in IPA mode we also
3390 process global initializers, so verify at least. */
3391 || (TREE_CODE (t) == CONSTRUCTOR
3392 && CONSTRUCTOR_NELTS (t) == 0))
3394 if (flag_delete_null_pointer_checks)
3395 temp.var = nothing_id;
3396 else
3397 temp.var = nonlocal_id;
3398 temp.type = ADDRESSOF;
3399 temp.offset = 0;
3400 results->safe_push (temp);
3401 return;
3404 /* String constants are read-only, ideally we'd have a CONST_DECL
3405 for those. */
3406 if (TREE_CODE (t) == STRING_CST)
3408 temp.var = string_id;
3409 temp.type = SCALAR;
3410 temp.offset = 0;
3411 results->safe_push (temp);
3412 return;
3415 switch (TREE_CODE_CLASS (TREE_CODE (t)))
3417 case tcc_expression:
3419 switch (TREE_CODE (t))
3421 case ADDR_EXPR:
3422 get_constraint_for_address_of (TREE_OPERAND (t, 0), results);
3423 return;
3424 default:;
3426 break;
3428 case tcc_reference:
3430 switch (TREE_CODE (t))
3432 case MEM_REF:
3434 struct constraint_expr cs;
3435 varinfo_t vi, curr;
3436 get_constraint_for_ptr_offset (TREE_OPERAND (t, 0),
3437 TREE_OPERAND (t, 1), results);
3438 do_deref (results);
3440 /* If we are not taking the address then make sure to process
3441 all subvariables we might access. */
3442 if (address_p)
3443 return;
3445 cs = results->last ();
3446 if (cs.type == DEREF
3447 && type_can_have_subvars (TREE_TYPE (t)))
3449 /* For dereferences this means we have to defer it
3450 to solving time. */
3451 results->last ().offset = UNKNOWN_OFFSET;
3452 return;
3454 if (cs.type != SCALAR)
3455 return;
3457 vi = get_varinfo (cs.var);
3458 curr = vi_next (vi);
3459 if (!vi->is_full_var
3460 && curr)
3462 unsigned HOST_WIDE_INT size;
3463 if (tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (t))))
3464 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t)));
3465 else
3466 size = -1;
3467 for (; curr; curr = vi_next (curr))
3469 if (curr->offset - vi->offset < size)
3471 cs.var = curr->id;
3472 results->safe_push (cs);
3474 else
3475 break;
3478 return;
3480 case ARRAY_REF:
3481 case ARRAY_RANGE_REF:
3482 case COMPONENT_REF:
3483 case IMAGPART_EXPR:
3484 case REALPART_EXPR:
3485 case BIT_FIELD_REF:
3486 get_constraint_for_component_ref (t, results, address_p, lhs_p);
3487 return;
3488 case VIEW_CONVERT_EXPR:
3489 get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p,
3490 lhs_p);
3491 return;
3492 /* We are missing handling for TARGET_MEM_REF here. */
3493 default:;
3495 break;
3497 case tcc_exceptional:
3499 switch (TREE_CODE (t))
3501 case SSA_NAME:
3503 get_constraint_for_ssa_var (t, results, address_p);
3504 return;
3506 case CONSTRUCTOR:
3508 unsigned int i;
3509 tree val;
3510 auto_vec<ce_s> tmp;
3511 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
3513 struct constraint_expr *rhsp;
3514 unsigned j;
3515 get_constraint_for_1 (val, &tmp, address_p, lhs_p);
3516 FOR_EACH_VEC_ELT (tmp, j, rhsp)
3517 results->safe_push (*rhsp);
3518 tmp.truncate (0);
3520 /* We do not know whether the constructor was complete,
3521 so technically we have to add &NOTHING or &ANYTHING
3522 like we do for an empty constructor as well. */
3523 return;
3525 default:;
3527 break;
3529 case tcc_declaration:
3531 get_constraint_for_ssa_var (t, results, address_p);
3532 return;
3534 case tcc_constant:
3536 /* We cannot refer to automatic variables through constants. */
3537 temp.type = ADDRESSOF;
3538 temp.var = nonlocal_id;
3539 temp.offset = 0;
3540 results->safe_push (temp);
3541 return;
3543 default:;
3546 /* The default fallback is a constraint from anything. */
3547 temp.type = ADDRESSOF;
3548 temp.var = anything_id;
3549 temp.offset = 0;
3550 results->safe_push (temp);
3553 /* Given a gimple tree T, return the constraint expression vector for it. */
3555 static void
3556 get_constraint_for (tree t, vec<ce_s> *results)
3558 gcc_assert (results->length () == 0);
3560 get_constraint_for_1 (t, results, false, true);
3563 /* Given a gimple tree T, return the constraint expression vector for it
3564 to be used as the rhs of a constraint. */
3566 static void
3567 get_constraint_for_rhs (tree t, vec<ce_s> *results)
3569 gcc_assert (results->length () == 0);
3571 get_constraint_for_1 (t, results, false, false);
3575 /* Efficiently generates constraints from all entries in *RHSC to all
3576 entries in *LHSC. */
3578 static void
3579 process_all_all_constraints (vec<ce_s> lhsc,
3580 vec<ce_s> rhsc)
3582 struct constraint_expr *lhsp, *rhsp;
3583 unsigned i, j;
3585 if (lhsc.length () <= 1 || rhsc.length () <= 1)
3587 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3588 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
3589 process_constraint (new_constraint (*lhsp, *rhsp));
3591 else
3593 struct constraint_expr tmp;
3594 tmp = new_scalar_tmp_constraint_exp ("allalltmp", true);
3595 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
3596 process_constraint (new_constraint (tmp, *rhsp));
3597 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3598 process_constraint (new_constraint (*lhsp, tmp));
3602 /* Handle aggregate copies by expanding into copies of the respective
3603 fields of the structures. */
3605 static void
3606 do_structure_copy (tree lhsop, tree rhsop)
3608 struct constraint_expr *lhsp, *rhsp;
3609 auto_vec<ce_s> lhsc;
3610 auto_vec<ce_s> rhsc;
3611 unsigned j;
3613 get_constraint_for (lhsop, &lhsc);
3614 get_constraint_for_rhs (rhsop, &rhsc);
3615 lhsp = &lhsc[0];
3616 rhsp = &rhsc[0];
3617 if (lhsp->type == DEREF
3618 || (lhsp->type == ADDRESSOF && lhsp->var == anything_id)
3619 || rhsp->type == DEREF)
3621 if (lhsp->type == DEREF)
3623 gcc_assert (lhsc.length () == 1);
3624 lhsp->offset = UNKNOWN_OFFSET;
3626 if (rhsp->type == DEREF)
3628 gcc_assert (rhsc.length () == 1);
3629 rhsp->offset = UNKNOWN_OFFSET;
3631 process_all_all_constraints (lhsc, rhsc);
3633 else if (lhsp->type == SCALAR
3634 && (rhsp->type == SCALAR
3635 || rhsp->type == ADDRESSOF))
3637 HOST_WIDE_INT lhssize, lhsmaxsize, lhsoffset;
3638 HOST_WIDE_INT rhssize, rhsmaxsize, rhsoffset;
3639 bool reverse;
3640 unsigned k = 0;
3641 get_ref_base_and_extent (lhsop, &lhsoffset, &lhssize, &lhsmaxsize,
3642 &reverse);
3643 get_ref_base_and_extent (rhsop, &rhsoffset, &rhssize, &rhsmaxsize,
3644 &reverse);
3645 for (j = 0; lhsc.iterate (j, &lhsp);)
3647 varinfo_t lhsv, rhsv;
3648 rhsp = &rhsc[k];
3649 lhsv = get_varinfo (lhsp->var);
3650 rhsv = get_varinfo (rhsp->var);
3651 if (lhsv->may_have_pointers
3652 && (lhsv->is_full_var
3653 || rhsv->is_full_var
3654 || ranges_overlap_p (lhsv->offset + rhsoffset, lhsv->size,
3655 rhsv->offset + lhsoffset, rhsv->size)))
3656 process_constraint (new_constraint (*lhsp, *rhsp));
3657 if (!rhsv->is_full_var
3658 && (lhsv->is_full_var
3659 || (lhsv->offset + rhsoffset + lhsv->size
3660 > rhsv->offset + lhsoffset + rhsv->size)))
3662 ++k;
3663 if (k >= rhsc.length ())
3664 break;
3666 else
3667 ++j;
3670 else
3671 gcc_unreachable ();
3674 /* Create constraints ID = { rhsc }. */
3676 static void
3677 make_constraints_to (unsigned id, vec<ce_s> rhsc)
3679 struct constraint_expr *c;
3680 struct constraint_expr includes;
3681 unsigned int j;
3683 includes.var = id;
3684 includes.offset = 0;
3685 includes.type = SCALAR;
3687 FOR_EACH_VEC_ELT (rhsc, j, c)
3688 process_constraint (new_constraint (includes, *c));
3691 /* Create a constraint ID = OP. */
3693 static void
3694 make_constraint_to (unsigned id, tree op)
3696 auto_vec<ce_s> rhsc;
3697 get_constraint_for_rhs (op, &rhsc);
3698 make_constraints_to (id, rhsc);
3701 /* Create a constraint ID = &FROM. */
3703 static void
3704 make_constraint_from (varinfo_t vi, int from)
3706 struct constraint_expr lhs, rhs;
3708 lhs.var = vi->id;
3709 lhs.offset = 0;
3710 lhs.type = SCALAR;
3712 rhs.var = from;
3713 rhs.offset = 0;
3714 rhs.type = ADDRESSOF;
3715 process_constraint (new_constraint (lhs, rhs));
3718 /* Create a constraint ID = FROM. */
3720 static void
3721 make_copy_constraint (varinfo_t vi, int from)
3723 struct constraint_expr lhs, rhs;
3725 lhs.var = vi->id;
3726 lhs.offset = 0;
3727 lhs.type = SCALAR;
3729 rhs.var = from;
3730 rhs.offset = 0;
3731 rhs.type = SCALAR;
3732 process_constraint (new_constraint (lhs, rhs));
3735 /* Make constraints necessary to make OP escape. */
3737 static void
3738 make_escape_constraint (tree op)
3740 make_constraint_to (escaped_id, op);
3743 /* Add constraints to that the solution of VI is transitively closed. */
3745 static void
3746 make_transitive_closure_constraints (varinfo_t vi)
3748 struct constraint_expr lhs, rhs;
3750 /* VAR = *VAR; */
3751 lhs.type = SCALAR;
3752 lhs.var = vi->id;
3753 lhs.offset = 0;
3754 rhs.type = DEREF;
3755 rhs.var = vi->id;
3756 rhs.offset = UNKNOWN_OFFSET;
3757 process_constraint (new_constraint (lhs, rhs));
3760 /* Temporary storage for fake var decls. */
3761 struct obstack fake_var_decl_obstack;
3763 /* Build a fake VAR_DECL acting as referrer to a DECL_UID. */
3765 static tree
3766 build_fake_var_decl (tree type)
3768 tree decl = (tree) XOBNEW (&fake_var_decl_obstack, struct tree_var_decl);
3769 memset (decl, 0, sizeof (struct tree_var_decl));
3770 TREE_SET_CODE (decl, VAR_DECL);
3771 TREE_TYPE (decl) = type;
3772 DECL_UID (decl) = allocate_decl_uid ();
3773 SET_DECL_PT_UID (decl, -1);
3774 layout_decl (decl, 0);
3775 return decl;
3778 /* Create a new artificial heap variable with NAME.
3779 Return the created variable. */
3781 static varinfo_t
3782 make_heapvar (const char *name, bool add_id)
3784 varinfo_t vi;
3785 tree heapvar;
3787 heapvar = build_fake_var_decl (ptr_type_node);
3788 DECL_EXTERNAL (heapvar) = 1;
3790 vi = new_var_info (heapvar, name, add_id);
3791 vi->is_artificial_var = true;
3792 vi->is_heap_var = true;
3793 vi->is_unknown_size_var = true;
3794 vi->offset = 0;
3795 vi->fullsize = ~0;
3796 vi->size = ~0;
3797 vi->is_full_var = true;
3798 insert_vi_for_tree (heapvar, vi);
3800 return vi;
3803 /* Create a new artificial heap variable with NAME and make a
3804 constraint from it to LHS. Set flags according to a tag used
3805 for tracking restrict pointers. */
3807 static varinfo_t
3808 make_constraint_from_restrict (varinfo_t lhs, const char *name, bool add_id)
3810 varinfo_t vi = make_heapvar (name, add_id);
3811 vi->is_restrict_var = 1;
3812 vi->is_global_var = 1;
3813 vi->may_have_pointers = 1;
3814 make_constraint_from (lhs, vi->id);
3815 return vi;
3818 /* Create a new artificial heap variable with NAME and make a
3819 constraint from it to LHS. Set flags according to a tag used
3820 for tracking restrict pointers and make the artificial heap
3821 point to global memory. */
3823 static varinfo_t
3824 make_constraint_from_global_restrict (varinfo_t lhs, const char *name,
3825 bool add_id)
3827 varinfo_t vi = make_constraint_from_restrict (lhs, name, add_id);
3828 make_copy_constraint (vi, nonlocal_id);
3829 return vi;
3832 /* In IPA mode there are varinfos for different aspects of reach
3833 function designator. One for the points-to set of the return
3834 value, one for the variables that are clobbered by the function,
3835 one for its uses and one for each parameter (including a single
3836 glob for remaining variadic arguments). */
3838 enum { fi_clobbers = 1, fi_uses = 2,
3839 fi_static_chain = 3, fi_result = 4, fi_parm_base = 5 };
3841 /* Get a constraint for the requested part of a function designator FI
3842 when operating in IPA mode. */
3844 static struct constraint_expr
3845 get_function_part_constraint (varinfo_t fi, unsigned part)
3847 struct constraint_expr c;
3849 gcc_assert (in_ipa_mode);
3851 if (fi->id == anything_id)
3853 /* ??? We probably should have a ANYFN special variable. */
3854 c.var = anything_id;
3855 c.offset = 0;
3856 c.type = SCALAR;
3858 else if (TREE_CODE (fi->decl) == FUNCTION_DECL)
3860 varinfo_t ai = first_vi_for_offset (fi, part);
3861 if (ai)
3862 c.var = ai->id;
3863 else
3864 c.var = anything_id;
3865 c.offset = 0;
3866 c.type = SCALAR;
3868 else
3870 c.var = fi->id;
3871 c.offset = part;
3872 c.type = DEREF;
3875 return c;
3878 /* For non-IPA mode, generate constraints necessary for a call on the
3879 RHS. */
3881 static void
3882 handle_rhs_call (gcall *stmt, vec<ce_s> *results)
3884 struct constraint_expr rhsc;
3885 unsigned i;
3886 bool returns_uses = false;
3888 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3890 tree arg = gimple_call_arg (stmt, i);
3891 int flags = gimple_call_arg_flags (stmt, i);
3893 /* If the argument is not used we can ignore it. */
3894 if (flags & EAF_UNUSED)
3895 continue;
3897 /* As we compute ESCAPED context-insensitive we do not gain
3898 any precision with just EAF_NOCLOBBER but not EAF_NOESCAPE
3899 set. The argument would still get clobbered through the
3900 escape solution. */
3901 if ((flags & EAF_NOCLOBBER)
3902 && (flags & EAF_NOESCAPE))
3904 varinfo_t uses = get_call_use_vi (stmt);
3905 if (!(flags & EAF_DIRECT))
3907 varinfo_t tem = new_var_info (NULL_TREE, "callarg", true);
3908 make_constraint_to (tem->id, arg);
3909 make_transitive_closure_constraints (tem);
3910 make_copy_constraint (uses, tem->id);
3912 else
3913 make_constraint_to (uses->id, arg);
3914 returns_uses = true;
3916 else if (flags & EAF_NOESCAPE)
3918 struct constraint_expr lhs, rhs;
3919 varinfo_t uses = get_call_use_vi (stmt);
3920 varinfo_t clobbers = get_call_clobber_vi (stmt);
3921 varinfo_t tem = new_var_info (NULL_TREE, "callarg", true);
3922 make_constraint_to (tem->id, arg);
3923 if (!(flags & EAF_DIRECT))
3924 make_transitive_closure_constraints (tem);
3925 make_copy_constraint (uses, tem->id);
3926 make_copy_constraint (clobbers, tem->id);
3927 /* Add *tem = nonlocal, do not add *tem = callused as
3928 EAF_NOESCAPE parameters do not escape to other parameters
3929 and all other uses appear in NONLOCAL as well. */
3930 lhs.type = DEREF;
3931 lhs.var = tem->id;
3932 lhs.offset = 0;
3933 rhs.type = SCALAR;
3934 rhs.var = nonlocal_id;
3935 rhs.offset = 0;
3936 process_constraint (new_constraint (lhs, rhs));
3937 returns_uses = true;
3939 else
3940 make_escape_constraint (arg);
3943 /* If we added to the calls uses solution make sure we account for
3944 pointers to it to be returned. */
3945 if (returns_uses)
3947 rhsc.var = get_call_use_vi (stmt)->id;
3948 rhsc.offset = 0;
3949 rhsc.type = SCALAR;
3950 results->safe_push (rhsc);
3953 /* The static chain escapes as well. */
3954 if (gimple_call_chain (stmt))
3955 make_escape_constraint (gimple_call_chain (stmt));
3957 /* And if we applied NRV the address of the return slot escapes as well. */
3958 if (gimple_call_return_slot_opt_p (stmt)
3959 && gimple_call_lhs (stmt) != NULL_TREE
3960 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
3962 auto_vec<ce_s> tmpc;
3963 struct constraint_expr lhsc, *c;
3964 get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
3965 lhsc.var = escaped_id;
3966 lhsc.offset = 0;
3967 lhsc.type = SCALAR;
3968 FOR_EACH_VEC_ELT (tmpc, i, c)
3969 process_constraint (new_constraint (lhsc, *c));
3972 /* Regular functions return nonlocal memory. */
3973 rhsc.var = nonlocal_id;
3974 rhsc.offset = 0;
3975 rhsc.type = SCALAR;
3976 results->safe_push (rhsc);
3979 /* For non-IPA mode, generate constraints necessary for a call
3980 that returns a pointer and assigns it to LHS. This simply makes
3981 the LHS point to global and escaped variables. */
3983 static void
3984 handle_lhs_call (gcall *stmt, tree lhs, int flags, vec<ce_s> rhsc,
3985 tree fndecl)
3987 auto_vec<ce_s> lhsc;
3989 get_constraint_for (lhs, &lhsc);
3990 /* If the store is to a global decl make sure to
3991 add proper escape constraints. */
3992 lhs = get_base_address (lhs);
3993 if (lhs
3994 && DECL_P (lhs)
3995 && is_global_var (lhs))
3997 struct constraint_expr tmpc;
3998 tmpc.var = escaped_id;
3999 tmpc.offset = 0;
4000 tmpc.type = SCALAR;
4001 lhsc.safe_push (tmpc);
4004 /* If the call returns an argument unmodified override the rhs
4005 constraints. */
4006 if (flags & ERF_RETURNS_ARG
4007 && (flags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (stmt))
4009 tree arg;
4010 rhsc.create (0);
4011 arg = gimple_call_arg (stmt, flags & ERF_RETURN_ARG_MASK);
4012 get_constraint_for (arg, &rhsc);
4013 process_all_all_constraints (lhsc, rhsc);
4014 rhsc.release ();
4016 else if (flags & ERF_NOALIAS)
4018 varinfo_t vi;
4019 struct constraint_expr tmpc;
4020 rhsc.create (0);
4021 vi = make_heapvar ("HEAP", true);
4022 /* We are marking allocated storage local, we deal with it becoming
4023 global by escaping and setting of vars_contains_escaped_heap. */
4024 DECL_EXTERNAL (vi->decl) = 0;
4025 vi->is_global_var = 0;
4026 /* If this is not a real malloc call assume the memory was
4027 initialized and thus may point to global memory. All
4028 builtin functions with the malloc attribute behave in a sane way. */
4029 if (!fndecl
4030 || DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL)
4031 make_constraint_from (vi, nonlocal_id);
4032 tmpc.var = vi->id;
4033 tmpc.offset = 0;
4034 tmpc.type = ADDRESSOF;
4035 rhsc.safe_push (tmpc);
4036 process_all_all_constraints (lhsc, rhsc);
4037 rhsc.release ();
4039 else
4040 process_all_all_constraints (lhsc, rhsc);
4043 /* For non-IPA mode, generate constraints necessary for a call of a
4044 const function that returns a pointer in the statement STMT. */
4046 static void
4047 handle_const_call (gcall *stmt, vec<ce_s> *results)
4049 struct constraint_expr rhsc;
4050 unsigned int k;
4052 /* Treat nested const functions the same as pure functions as far
4053 as the static chain is concerned. */
4054 if (gimple_call_chain (stmt))
4056 varinfo_t uses = get_call_use_vi (stmt);
4057 make_transitive_closure_constraints (uses);
4058 make_constraint_to (uses->id, gimple_call_chain (stmt));
4059 rhsc.var = uses->id;
4060 rhsc.offset = 0;
4061 rhsc.type = SCALAR;
4062 results->safe_push (rhsc);
4065 /* May return arguments. */
4066 for (k = 0; k < gimple_call_num_args (stmt); ++k)
4068 tree arg = gimple_call_arg (stmt, k);
4069 auto_vec<ce_s> argc;
4070 unsigned i;
4071 struct constraint_expr *argp;
4072 get_constraint_for_rhs (arg, &argc);
4073 FOR_EACH_VEC_ELT (argc, i, argp)
4074 results->safe_push (*argp);
4077 /* May return addresses of globals. */
4078 rhsc.var = nonlocal_id;
4079 rhsc.offset = 0;
4080 rhsc.type = ADDRESSOF;
4081 results->safe_push (rhsc);
4084 /* For non-IPA mode, generate constraints necessary for a call to a
4085 pure function in statement STMT. */
4087 static void
4088 handle_pure_call (gcall *stmt, vec<ce_s> *results)
4090 struct constraint_expr rhsc;
4091 unsigned i;
4092 varinfo_t uses = NULL;
4094 /* Memory reached from pointer arguments is call-used. */
4095 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4097 tree arg = gimple_call_arg (stmt, i);
4098 if (!uses)
4100 uses = get_call_use_vi (stmt);
4101 make_transitive_closure_constraints (uses);
4103 make_constraint_to (uses->id, arg);
4106 /* The static chain is used as well. */
4107 if (gimple_call_chain (stmt))
4109 if (!uses)
4111 uses = get_call_use_vi (stmt);
4112 make_transitive_closure_constraints (uses);
4114 make_constraint_to (uses->id, gimple_call_chain (stmt));
4117 /* Pure functions may return call-used and nonlocal memory. */
4118 if (uses)
4120 rhsc.var = uses->id;
4121 rhsc.offset = 0;
4122 rhsc.type = SCALAR;
4123 results->safe_push (rhsc);
4125 rhsc.var = nonlocal_id;
4126 rhsc.offset = 0;
4127 rhsc.type = SCALAR;
4128 results->safe_push (rhsc);
4132 /* Return the varinfo for the callee of CALL. */
4134 static varinfo_t
4135 get_fi_for_callee (gcall *call)
4137 tree decl, fn = gimple_call_fn (call);
4139 if (fn && TREE_CODE (fn) == OBJ_TYPE_REF)
4140 fn = OBJ_TYPE_REF_EXPR (fn);
4142 /* If we can directly resolve the function being called, do so.
4143 Otherwise, it must be some sort of indirect expression that
4144 we should still be able to handle. */
4145 decl = gimple_call_addr_fndecl (fn);
4146 if (decl)
4147 return get_vi_for_tree (decl);
4149 /* If the function is anything other than a SSA name pointer we have no
4150 clue and should be getting ANYFN (well, ANYTHING for now). */
4151 if (!fn || TREE_CODE (fn) != SSA_NAME)
4152 return get_varinfo (anything_id);
4154 if (SSA_NAME_IS_DEFAULT_DEF (fn)
4155 && (TREE_CODE (SSA_NAME_VAR (fn)) == PARM_DECL
4156 || TREE_CODE (SSA_NAME_VAR (fn)) == RESULT_DECL))
4157 fn = SSA_NAME_VAR (fn);
4159 return get_vi_for_tree (fn);
4162 /* Create constraints for assigning call argument ARG to the incoming parameter
4163 INDEX of function FI. */
4165 static void
4166 find_func_aliases_for_call_arg (varinfo_t fi, unsigned index, tree arg)
4168 struct constraint_expr lhs;
4169 lhs = get_function_part_constraint (fi, fi_parm_base + index);
4171 auto_vec<ce_s, 2> rhsc;
4172 get_constraint_for_rhs (arg, &rhsc);
4174 unsigned j;
4175 struct constraint_expr *rhsp;
4176 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4177 process_constraint (new_constraint (lhs, *rhsp));
4180 /* Return true if FNDECL may be part of another lto partition. */
4182 static bool
4183 fndecl_maybe_in_other_partition (tree fndecl)
4185 cgraph_node *fn_node = cgraph_node::get (fndecl);
4186 if (fn_node == NULL)
4187 return true;
4189 return fn_node->in_other_partition;
4192 /* Create constraints for the builtin call T. Return true if the call
4193 was handled, otherwise false. */
4195 static bool
4196 find_func_aliases_for_builtin_call (struct function *fn, gcall *t)
4198 tree fndecl = gimple_call_fndecl (t);
4199 auto_vec<ce_s, 2> lhsc;
4200 auto_vec<ce_s, 4> rhsc;
4201 varinfo_t fi;
4203 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
4204 /* ??? All builtins that are handled here need to be handled
4205 in the alias-oracle query functions explicitly! */
4206 switch (DECL_FUNCTION_CODE (fndecl))
4208 /* All the following functions return a pointer to the same object
4209 as their first argument points to. The functions do not add
4210 to the ESCAPED solution. The functions make the first argument
4211 pointed to memory point to what the second argument pointed to
4212 memory points to. */
4213 case BUILT_IN_STRCPY:
4214 case BUILT_IN_STRNCPY:
4215 case BUILT_IN_BCOPY:
4216 case BUILT_IN_MEMCPY:
4217 case BUILT_IN_MEMMOVE:
4218 case BUILT_IN_MEMPCPY:
4219 case BUILT_IN_STPCPY:
4220 case BUILT_IN_STPNCPY:
4221 case BUILT_IN_STRCAT:
4222 case BUILT_IN_STRNCAT:
4223 case BUILT_IN_STRCPY_CHK:
4224 case BUILT_IN_STRNCPY_CHK:
4225 case BUILT_IN_MEMCPY_CHK:
4226 case BUILT_IN_MEMMOVE_CHK:
4227 case BUILT_IN_MEMPCPY_CHK:
4228 case BUILT_IN_STPCPY_CHK:
4229 case BUILT_IN_STPNCPY_CHK:
4230 case BUILT_IN_STRCAT_CHK:
4231 case BUILT_IN_STRNCAT_CHK:
4232 case BUILT_IN_TM_MEMCPY:
4233 case BUILT_IN_TM_MEMMOVE:
4235 tree res = gimple_call_lhs (t);
4236 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4237 == BUILT_IN_BCOPY ? 1 : 0));
4238 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4239 == BUILT_IN_BCOPY ? 0 : 1));
4240 if (res != NULL_TREE)
4242 get_constraint_for (res, &lhsc);
4243 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY
4244 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY
4245 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY
4246 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY_CHK
4247 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY_CHK
4248 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY_CHK)
4249 get_constraint_for_ptr_offset (dest, NULL_TREE, &rhsc);
4250 else
4251 get_constraint_for (dest, &rhsc);
4252 process_all_all_constraints (lhsc, rhsc);
4253 lhsc.truncate (0);
4254 rhsc.truncate (0);
4256 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4257 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4258 do_deref (&lhsc);
4259 do_deref (&rhsc);
4260 process_all_all_constraints (lhsc, rhsc);
4261 return true;
4263 case BUILT_IN_MEMSET:
4264 case BUILT_IN_MEMSET_CHK:
4265 case BUILT_IN_TM_MEMSET:
4267 tree res = gimple_call_lhs (t);
4268 tree dest = gimple_call_arg (t, 0);
4269 unsigned i;
4270 ce_s *lhsp;
4271 struct constraint_expr ac;
4272 if (res != NULL_TREE)
4274 get_constraint_for (res, &lhsc);
4275 get_constraint_for (dest, &rhsc);
4276 process_all_all_constraints (lhsc, rhsc);
4277 lhsc.truncate (0);
4279 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4280 do_deref (&lhsc);
4281 if (flag_delete_null_pointer_checks
4282 && integer_zerop (gimple_call_arg (t, 1)))
4284 ac.type = ADDRESSOF;
4285 ac.var = nothing_id;
4287 else
4289 ac.type = SCALAR;
4290 ac.var = integer_id;
4292 ac.offset = 0;
4293 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4294 process_constraint (new_constraint (*lhsp, ac));
4295 return true;
4297 case BUILT_IN_POSIX_MEMALIGN:
4299 tree ptrptr = gimple_call_arg (t, 0);
4300 get_constraint_for (ptrptr, &lhsc);
4301 do_deref (&lhsc);
4302 varinfo_t vi = make_heapvar ("HEAP", true);
4303 /* We are marking allocated storage local, we deal with it becoming
4304 global by escaping and setting of vars_contains_escaped_heap. */
4305 DECL_EXTERNAL (vi->decl) = 0;
4306 vi->is_global_var = 0;
4307 struct constraint_expr tmpc;
4308 tmpc.var = vi->id;
4309 tmpc.offset = 0;
4310 tmpc.type = ADDRESSOF;
4311 rhsc.safe_push (tmpc);
4312 process_all_all_constraints (lhsc, rhsc);
4313 return true;
4315 case BUILT_IN_ASSUME_ALIGNED:
4317 tree res = gimple_call_lhs (t);
4318 tree dest = gimple_call_arg (t, 0);
4319 if (res != NULL_TREE)
4321 get_constraint_for (res, &lhsc);
4322 get_constraint_for (dest, &rhsc);
4323 process_all_all_constraints (lhsc, rhsc);
4325 return true;
4327 /* All the following functions do not return pointers, do not
4328 modify the points-to sets of memory reachable from their
4329 arguments and do not add to the ESCAPED solution. */
4330 case BUILT_IN_SINCOS:
4331 case BUILT_IN_SINCOSF:
4332 case BUILT_IN_SINCOSL:
4333 case BUILT_IN_FREXP:
4334 case BUILT_IN_FREXPF:
4335 case BUILT_IN_FREXPL:
4336 case BUILT_IN_GAMMA_R:
4337 case BUILT_IN_GAMMAF_R:
4338 case BUILT_IN_GAMMAL_R:
4339 case BUILT_IN_LGAMMA_R:
4340 case BUILT_IN_LGAMMAF_R:
4341 case BUILT_IN_LGAMMAL_R:
4342 case BUILT_IN_MODF:
4343 case BUILT_IN_MODFF:
4344 case BUILT_IN_MODFL:
4345 case BUILT_IN_REMQUO:
4346 case BUILT_IN_REMQUOF:
4347 case BUILT_IN_REMQUOL:
4348 case BUILT_IN_FREE:
4349 return true;
4350 case BUILT_IN_STRDUP:
4351 case BUILT_IN_STRNDUP:
4352 case BUILT_IN_REALLOC:
4353 if (gimple_call_lhs (t))
4355 handle_lhs_call (t, gimple_call_lhs (t),
4356 gimple_call_return_flags (t) | ERF_NOALIAS,
4357 vNULL, fndecl);
4358 get_constraint_for_ptr_offset (gimple_call_lhs (t),
4359 NULL_TREE, &lhsc);
4360 get_constraint_for_ptr_offset (gimple_call_arg (t, 0),
4361 NULL_TREE, &rhsc);
4362 do_deref (&lhsc);
4363 do_deref (&rhsc);
4364 process_all_all_constraints (lhsc, rhsc);
4365 lhsc.truncate (0);
4366 rhsc.truncate (0);
4367 /* For realloc the resulting pointer can be equal to the
4368 argument as well. But only doing this wouldn't be
4369 correct because with ptr == 0 realloc behaves like malloc. */
4370 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_REALLOC)
4372 get_constraint_for (gimple_call_lhs (t), &lhsc);
4373 get_constraint_for (gimple_call_arg (t, 0), &rhsc);
4374 process_all_all_constraints (lhsc, rhsc);
4376 return true;
4378 break;
4379 /* String / character search functions return a pointer into the
4380 source string or NULL. */
4381 case BUILT_IN_INDEX:
4382 case BUILT_IN_STRCHR:
4383 case BUILT_IN_STRRCHR:
4384 case BUILT_IN_MEMCHR:
4385 case BUILT_IN_STRSTR:
4386 case BUILT_IN_STRPBRK:
4387 if (gimple_call_lhs (t))
4389 tree src = gimple_call_arg (t, 0);
4390 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4391 constraint_expr nul;
4392 nul.var = nothing_id;
4393 nul.offset = 0;
4394 nul.type = ADDRESSOF;
4395 rhsc.safe_push (nul);
4396 get_constraint_for (gimple_call_lhs (t), &lhsc);
4397 process_all_all_constraints (lhsc, rhsc);
4399 return true;
4400 /* Trampolines are special - they set up passing the static
4401 frame. */
4402 case BUILT_IN_INIT_TRAMPOLINE:
4404 tree tramp = gimple_call_arg (t, 0);
4405 tree nfunc = gimple_call_arg (t, 1);
4406 tree frame = gimple_call_arg (t, 2);
4407 unsigned i;
4408 struct constraint_expr lhs, *rhsp;
4409 if (in_ipa_mode)
4411 varinfo_t nfi = NULL;
4412 gcc_assert (TREE_CODE (nfunc) == ADDR_EXPR);
4413 nfi = lookup_vi_for_tree (TREE_OPERAND (nfunc, 0));
4414 if (nfi)
4416 lhs = get_function_part_constraint (nfi, fi_static_chain);
4417 get_constraint_for (frame, &rhsc);
4418 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4419 process_constraint (new_constraint (lhs, *rhsp));
4420 rhsc.truncate (0);
4422 /* Make the frame point to the function for
4423 the trampoline adjustment call. */
4424 get_constraint_for (tramp, &lhsc);
4425 do_deref (&lhsc);
4426 get_constraint_for (nfunc, &rhsc);
4427 process_all_all_constraints (lhsc, rhsc);
4429 return true;
4432 /* Else fallthru to generic handling which will let
4433 the frame escape. */
4434 break;
4436 case BUILT_IN_ADJUST_TRAMPOLINE:
4438 tree tramp = gimple_call_arg (t, 0);
4439 tree res = gimple_call_lhs (t);
4440 if (in_ipa_mode && res)
4442 get_constraint_for (res, &lhsc);
4443 get_constraint_for (tramp, &rhsc);
4444 do_deref (&rhsc);
4445 process_all_all_constraints (lhsc, rhsc);
4447 return true;
4449 CASE_BUILT_IN_TM_STORE (1):
4450 CASE_BUILT_IN_TM_STORE (2):
4451 CASE_BUILT_IN_TM_STORE (4):
4452 CASE_BUILT_IN_TM_STORE (8):
4453 CASE_BUILT_IN_TM_STORE (FLOAT):
4454 CASE_BUILT_IN_TM_STORE (DOUBLE):
4455 CASE_BUILT_IN_TM_STORE (LDOUBLE):
4456 CASE_BUILT_IN_TM_STORE (M64):
4457 CASE_BUILT_IN_TM_STORE (M128):
4458 CASE_BUILT_IN_TM_STORE (M256):
4460 tree addr = gimple_call_arg (t, 0);
4461 tree src = gimple_call_arg (t, 1);
4463 get_constraint_for (addr, &lhsc);
4464 do_deref (&lhsc);
4465 get_constraint_for (src, &rhsc);
4466 process_all_all_constraints (lhsc, rhsc);
4467 return true;
4469 CASE_BUILT_IN_TM_LOAD (1):
4470 CASE_BUILT_IN_TM_LOAD (2):
4471 CASE_BUILT_IN_TM_LOAD (4):
4472 CASE_BUILT_IN_TM_LOAD (8):
4473 CASE_BUILT_IN_TM_LOAD (FLOAT):
4474 CASE_BUILT_IN_TM_LOAD (DOUBLE):
4475 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
4476 CASE_BUILT_IN_TM_LOAD (M64):
4477 CASE_BUILT_IN_TM_LOAD (M128):
4478 CASE_BUILT_IN_TM_LOAD (M256):
4480 tree dest = gimple_call_lhs (t);
4481 tree addr = gimple_call_arg (t, 0);
4483 get_constraint_for (dest, &lhsc);
4484 get_constraint_for (addr, &rhsc);
4485 do_deref (&rhsc);
4486 process_all_all_constraints (lhsc, rhsc);
4487 return true;
4489 /* Variadic argument handling needs to be handled in IPA
4490 mode as well. */
4491 case BUILT_IN_VA_START:
4493 tree valist = gimple_call_arg (t, 0);
4494 struct constraint_expr rhs, *lhsp;
4495 unsigned i;
4496 get_constraint_for_ptr_offset (valist, NULL_TREE, &lhsc);
4497 do_deref (&lhsc);
4498 /* The va_list gets access to pointers in variadic
4499 arguments. Which we know in the case of IPA analysis
4500 and otherwise are just all nonlocal variables. */
4501 if (in_ipa_mode)
4503 fi = lookup_vi_for_tree (fn->decl);
4504 rhs = get_function_part_constraint (fi, ~0);
4505 rhs.type = ADDRESSOF;
4507 else
4509 rhs.var = nonlocal_id;
4510 rhs.type = ADDRESSOF;
4511 rhs.offset = 0;
4513 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4514 process_constraint (new_constraint (*lhsp, rhs));
4515 /* va_list is clobbered. */
4516 make_constraint_to (get_call_clobber_vi (t)->id, valist);
4517 return true;
4519 /* va_end doesn't have any effect that matters. */
4520 case BUILT_IN_VA_END:
4521 return true;
4522 /* Alternate return. Simply give up for now. */
4523 case BUILT_IN_RETURN:
4525 fi = NULL;
4526 if (!in_ipa_mode
4527 || !(fi = get_vi_for_tree (fn->decl)))
4528 make_constraint_from (get_varinfo (escaped_id), anything_id);
4529 else if (in_ipa_mode
4530 && fi != NULL)
4532 struct constraint_expr lhs, rhs;
4533 lhs = get_function_part_constraint (fi, fi_result);
4534 rhs.var = anything_id;
4535 rhs.offset = 0;
4536 rhs.type = SCALAR;
4537 process_constraint (new_constraint (lhs, rhs));
4539 return true;
4541 case BUILT_IN_GOMP_PARALLEL:
4542 case BUILT_IN_GOACC_PARALLEL:
4544 if (in_ipa_mode)
4546 unsigned int fnpos, argpos;
4547 switch (DECL_FUNCTION_CODE (fndecl))
4549 case BUILT_IN_GOMP_PARALLEL:
4550 /* __builtin_GOMP_parallel (fn, data, num_threads, flags). */
4551 fnpos = 0;
4552 argpos = 1;
4553 break;
4554 case BUILT_IN_GOACC_PARALLEL:
4555 /* __builtin_GOACC_parallel (device, fn, mapnum, hostaddrs,
4556 sizes, kinds, ...). */
4557 fnpos = 1;
4558 argpos = 3;
4559 break;
4560 default:
4561 gcc_unreachable ();
4564 tree fnarg = gimple_call_arg (t, fnpos);
4565 gcc_assert (TREE_CODE (fnarg) == ADDR_EXPR);
4566 tree fndecl = TREE_OPERAND (fnarg, 0);
4567 if (fndecl_maybe_in_other_partition (fndecl))
4568 /* Fallthru to general call handling. */
4569 break;
4571 tree arg = gimple_call_arg (t, argpos);
4573 varinfo_t fi = get_vi_for_tree (fndecl);
4574 find_func_aliases_for_call_arg (fi, 0, arg);
4575 return true;
4577 /* Else fallthru to generic call handling. */
4578 break;
4580 /* printf-style functions may have hooks to set pointers to
4581 point to somewhere into the generated string. Leave them
4582 for a later exercise... */
4583 default:
4584 /* Fallthru to general call handling. */;
4587 return false;
4590 /* Create constraints for the call T. */
4592 static void
4593 find_func_aliases_for_call (struct function *fn, gcall *t)
4595 tree fndecl = gimple_call_fndecl (t);
4596 varinfo_t fi;
4598 if (fndecl != NULL_TREE
4599 && DECL_BUILT_IN (fndecl)
4600 && find_func_aliases_for_builtin_call (fn, t))
4601 return;
4603 fi = get_fi_for_callee (t);
4604 if (!in_ipa_mode
4605 || (fndecl && !fi->is_fn_info))
4607 auto_vec<ce_s, 16> rhsc;
4608 int flags = gimple_call_flags (t);
4610 /* Const functions can return their arguments and addresses
4611 of global memory but not of escaped memory. */
4612 if (flags & (ECF_CONST|ECF_NOVOPS))
4614 if (gimple_call_lhs (t))
4615 handle_const_call (t, &rhsc);
4617 /* Pure functions can return addresses in and of memory
4618 reachable from their arguments, but they are not an escape
4619 point for reachable memory of their arguments. */
4620 else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE))
4621 handle_pure_call (t, &rhsc);
4622 else
4623 handle_rhs_call (t, &rhsc);
4624 if (gimple_call_lhs (t))
4625 handle_lhs_call (t, gimple_call_lhs (t),
4626 gimple_call_return_flags (t), rhsc, fndecl);
4628 else
4630 auto_vec<ce_s, 2> rhsc;
4631 tree lhsop;
4632 unsigned j;
4634 /* Assign all the passed arguments to the appropriate incoming
4635 parameters of the function. */
4636 for (j = 0; j < gimple_call_num_args (t); j++)
4638 tree arg = gimple_call_arg (t, j);
4639 find_func_aliases_for_call_arg (fi, j, arg);
4642 /* If we are returning a value, assign it to the result. */
4643 lhsop = gimple_call_lhs (t);
4644 if (lhsop)
4646 auto_vec<ce_s, 2> lhsc;
4647 struct constraint_expr rhs;
4648 struct constraint_expr *lhsp;
4649 bool aggr_p = aggregate_value_p (lhsop, gimple_call_fntype (t));
4651 get_constraint_for (lhsop, &lhsc);
4652 rhs = get_function_part_constraint (fi, fi_result);
4653 if (aggr_p)
4655 auto_vec<ce_s, 2> tem;
4656 tem.quick_push (rhs);
4657 do_deref (&tem);
4658 gcc_checking_assert (tem.length () == 1);
4659 rhs = tem[0];
4661 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
4662 process_constraint (new_constraint (*lhsp, rhs));
4664 /* If we pass the result decl by reference, honor that. */
4665 if (aggr_p)
4667 struct constraint_expr lhs;
4668 struct constraint_expr *rhsp;
4670 get_constraint_for_address_of (lhsop, &rhsc);
4671 lhs = get_function_part_constraint (fi, fi_result);
4672 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4673 process_constraint (new_constraint (lhs, *rhsp));
4674 rhsc.truncate (0);
4678 /* If we use a static chain, pass it along. */
4679 if (gimple_call_chain (t))
4681 struct constraint_expr lhs;
4682 struct constraint_expr *rhsp;
4684 get_constraint_for (gimple_call_chain (t), &rhsc);
4685 lhs = get_function_part_constraint (fi, fi_static_chain);
4686 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4687 process_constraint (new_constraint (lhs, *rhsp));
4692 /* Walk statement T setting up aliasing constraints according to the
4693 references found in T. This function is the main part of the
4694 constraint builder. AI points to auxiliary alias information used
4695 when building alias sets and computing alias grouping heuristics. */
4697 static void
4698 find_func_aliases (struct function *fn, gimple *origt)
4700 gimple *t = origt;
4701 auto_vec<ce_s, 16> lhsc;
4702 auto_vec<ce_s, 16> rhsc;
4703 struct constraint_expr *c;
4704 varinfo_t fi;
4706 /* Now build constraints expressions. */
4707 if (gimple_code (t) == GIMPLE_PHI)
4709 size_t i;
4710 unsigned int j;
4712 /* For a phi node, assign all the arguments to
4713 the result. */
4714 get_constraint_for (gimple_phi_result (t), &lhsc);
4715 for (i = 0; i < gimple_phi_num_args (t); i++)
4717 tree strippedrhs = PHI_ARG_DEF (t, i);
4719 STRIP_NOPS (strippedrhs);
4720 get_constraint_for_rhs (gimple_phi_arg_def (t, i), &rhsc);
4722 FOR_EACH_VEC_ELT (lhsc, j, c)
4724 struct constraint_expr *c2;
4725 while (rhsc.length () > 0)
4727 c2 = &rhsc.last ();
4728 process_constraint (new_constraint (*c, *c2));
4729 rhsc.pop ();
4734 /* In IPA mode, we need to generate constraints to pass call
4735 arguments through their calls. There are two cases,
4736 either a GIMPLE_CALL returning a value, or just a plain
4737 GIMPLE_CALL when we are not.
4739 In non-ipa mode, we need to generate constraints for each
4740 pointer passed by address. */
4741 else if (is_gimple_call (t))
4742 find_func_aliases_for_call (fn, as_a <gcall *> (t));
4744 /* Otherwise, just a regular assignment statement. Only care about
4745 operations with pointer result, others are dealt with as escape
4746 points if they have pointer operands. */
4747 else if (is_gimple_assign (t))
4749 /* Otherwise, just a regular assignment statement. */
4750 tree lhsop = gimple_assign_lhs (t);
4751 tree rhsop = (gimple_num_ops (t) == 2) ? gimple_assign_rhs1 (t) : NULL;
4753 if (rhsop && TREE_CLOBBER_P (rhsop))
4754 /* Ignore clobbers, they don't actually store anything into
4755 the LHS. */
4757 else if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
4758 do_structure_copy (lhsop, rhsop);
4759 else
4761 enum tree_code code = gimple_assign_rhs_code (t);
4763 get_constraint_for (lhsop, &lhsc);
4765 if (code == POINTER_PLUS_EXPR)
4766 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4767 gimple_assign_rhs2 (t), &rhsc);
4768 else if (code == BIT_AND_EXPR
4769 && TREE_CODE (gimple_assign_rhs2 (t)) == INTEGER_CST)
4771 /* Aligning a pointer via a BIT_AND_EXPR is offsetting
4772 the pointer. Handle it by offsetting it by UNKNOWN. */
4773 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4774 NULL_TREE, &rhsc);
4776 else if ((CONVERT_EXPR_CODE_P (code)
4777 && !(POINTER_TYPE_P (gimple_expr_type (t))
4778 && !POINTER_TYPE_P (TREE_TYPE (rhsop))))
4779 || gimple_assign_single_p (t))
4780 get_constraint_for_rhs (rhsop, &rhsc);
4781 else if (code == COND_EXPR)
4783 /* The result is a merge of both COND_EXPR arms. */
4784 auto_vec<ce_s, 2> tmp;
4785 struct constraint_expr *rhsp;
4786 unsigned i;
4787 get_constraint_for_rhs (gimple_assign_rhs2 (t), &rhsc);
4788 get_constraint_for_rhs (gimple_assign_rhs3 (t), &tmp);
4789 FOR_EACH_VEC_ELT (tmp, i, rhsp)
4790 rhsc.safe_push (*rhsp);
4792 else if (truth_value_p (code))
4793 /* Truth value results are not pointer (parts). Or at least
4794 very unreasonable obfuscation of a part. */
4796 else
4798 /* All other operations are merges. */
4799 auto_vec<ce_s, 4> tmp;
4800 struct constraint_expr *rhsp;
4801 unsigned i, j;
4802 get_constraint_for_rhs (gimple_assign_rhs1 (t), &rhsc);
4803 for (i = 2; i < gimple_num_ops (t); ++i)
4805 get_constraint_for_rhs (gimple_op (t, i), &tmp);
4806 FOR_EACH_VEC_ELT (tmp, j, rhsp)
4807 rhsc.safe_push (*rhsp);
4808 tmp.truncate (0);
4811 process_all_all_constraints (lhsc, rhsc);
4813 /* If there is a store to a global variable the rhs escapes. */
4814 if ((lhsop = get_base_address (lhsop)) != NULL_TREE
4815 && DECL_P (lhsop))
4817 varinfo_t vi = get_vi_for_tree (lhsop);
4818 if ((! in_ipa_mode && vi->is_global_var)
4819 || vi->is_ipa_escape_point)
4820 make_escape_constraint (rhsop);
4823 /* Handle escapes through return. */
4824 else if (gimple_code (t) == GIMPLE_RETURN
4825 && gimple_return_retval (as_a <greturn *> (t)) != NULL_TREE)
4827 greturn *return_stmt = as_a <greturn *> (t);
4828 fi = NULL;
4829 if (!in_ipa_mode
4830 || !(fi = get_vi_for_tree (fn->decl)))
4831 make_escape_constraint (gimple_return_retval (return_stmt));
4832 else if (in_ipa_mode)
4834 struct constraint_expr lhs ;
4835 struct constraint_expr *rhsp;
4836 unsigned i;
4838 lhs = get_function_part_constraint (fi, fi_result);
4839 get_constraint_for_rhs (gimple_return_retval (return_stmt), &rhsc);
4840 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4841 process_constraint (new_constraint (lhs, *rhsp));
4844 /* Handle asms conservatively by adding escape constraints to everything. */
4845 else if (gasm *asm_stmt = dyn_cast <gasm *> (t))
4847 unsigned i, noutputs;
4848 const char **oconstraints;
4849 const char *constraint;
4850 bool allows_mem, allows_reg, is_inout;
4852 noutputs = gimple_asm_noutputs (asm_stmt);
4853 oconstraints = XALLOCAVEC (const char *, noutputs);
4855 for (i = 0; i < noutputs; ++i)
4857 tree link = gimple_asm_output_op (asm_stmt, i);
4858 tree op = TREE_VALUE (link);
4860 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4861 oconstraints[i] = constraint;
4862 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
4863 &allows_reg, &is_inout);
4865 /* A memory constraint makes the address of the operand escape. */
4866 if (!allows_reg && allows_mem)
4867 make_escape_constraint (build_fold_addr_expr (op));
4869 /* The asm may read global memory, so outputs may point to
4870 any global memory. */
4871 if (op)
4873 auto_vec<ce_s, 2> lhsc;
4874 struct constraint_expr rhsc, *lhsp;
4875 unsigned j;
4876 get_constraint_for (op, &lhsc);
4877 rhsc.var = nonlocal_id;
4878 rhsc.offset = 0;
4879 rhsc.type = SCALAR;
4880 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
4881 process_constraint (new_constraint (*lhsp, rhsc));
4884 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4886 tree link = gimple_asm_input_op (asm_stmt, i);
4887 tree op = TREE_VALUE (link);
4889 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4891 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
4892 &allows_mem, &allows_reg);
4894 /* A memory constraint makes the address of the operand escape. */
4895 if (!allows_reg && allows_mem)
4896 make_escape_constraint (build_fold_addr_expr (op));
4897 /* Strictly we'd only need the constraint to ESCAPED if
4898 the asm clobbers memory, otherwise using something
4899 along the lines of per-call clobbers/uses would be enough. */
4900 else if (op)
4901 make_escape_constraint (op);
4907 /* Create a constraint adding to the clobber set of FI the memory
4908 pointed to by PTR. */
4910 static void
4911 process_ipa_clobber (varinfo_t fi, tree ptr)
4913 vec<ce_s> ptrc = vNULL;
4914 struct constraint_expr *c, lhs;
4915 unsigned i;
4916 get_constraint_for_rhs (ptr, &ptrc);
4917 lhs = get_function_part_constraint (fi, fi_clobbers);
4918 FOR_EACH_VEC_ELT (ptrc, i, c)
4919 process_constraint (new_constraint (lhs, *c));
4920 ptrc.release ();
4923 /* Walk statement T setting up clobber and use constraints according to the
4924 references found in T. This function is a main part of the
4925 IPA constraint builder. */
4927 static void
4928 find_func_clobbers (struct function *fn, gimple *origt)
4930 gimple *t = origt;
4931 auto_vec<ce_s, 16> lhsc;
4932 auto_vec<ce_s, 16> rhsc;
4933 varinfo_t fi;
4935 /* Add constraints for clobbered/used in IPA mode.
4936 We are not interested in what automatic variables are clobbered
4937 or used as we only use the information in the caller to which
4938 they do not escape. */
4939 gcc_assert (in_ipa_mode);
4941 /* If the stmt refers to memory in any way it better had a VUSE. */
4942 if (gimple_vuse (t) == NULL_TREE)
4943 return;
4945 /* We'd better have function information for the current function. */
4946 fi = lookup_vi_for_tree (fn->decl);
4947 gcc_assert (fi != NULL);
4949 /* Account for stores in assignments and calls. */
4950 if (gimple_vdef (t) != NULL_TREE
4951 && gimple_has_lhs (t))
4953 tree lhs = gimple_get_lhs (t);
4954 tree tem = lhs;
4955 while (handled_component_p (tem))
4956 tem = TREE_OPERAND (tem, 0);
4957 if ((DECL_P (tem)
4958 && !auto_var_in_fn_p (tem, fn->decl))
4959 || INDIRECT_REF_P (tem)
4960 || (TREE_CODE (tem) == MEM_REF
4961 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
4962 && auto_var_in_fn_p
4963 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), fn->decl))))
4965 struct constraint_expr lhsc, *rhsp;
4966 unsigned i;
4967 lhsc = get_function_part_constraint (fi, fi_clobbers);
4968 get_constraint_for_address_of (lhs, &rhsc);
4969 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4970 process_constraint (new_constraint (lhsc, *rhsp));
4971 rhsc.truncate (0);
4975 /* Account for uses in assigments and returns. */
4976 if (gimple_assign_single_p (t)
4977 || (gimple_code (t) == GIMPLE_RETURN
4978 && gimple_return_retval (as_a <greturn *> (t)) != NULL_TREE))
4980 tree rhs = (gimple_assign_single_p (t)
4981 ? gimple_assign_rhs1 (t)
4982 : gimple_return_retval (as_a <greturn *> (t)));
4983 tree tem = rhs;
4984 while (handled_component_p (tem))
4985 tem = TREE_OPERAND (tem, 0);
4986 if ((DECL_P (tem)
4987 && !auto_var_in_fn_p (tem, fn->decl))
4988 || INDIRECT_REF_P (tem)
4989 || (TREE_CODE (tem) == MEM_REF
4990 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
4991 && auto_var_in_fn_p
4992 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), fn->decl))))
4994 struct constraint_expr lhs, *rhsp;
4995 unsigned i;
4996 lhs = get_function_part_constraint (fi, fi_uses);
4997 get_constraint_for_address_of (rhs, &rhsc);
4998 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4999 process_constraint (new_constraint (lhs, *rhsp));
5000 rhsc.truncate (0);
5004 if (gcall *call_stmt = dyn_cast <gcall *> (t))
5006 varinfo_t cfi = NULL;
5007 tree decl = gimple_call_fndecl (t);
5008 struct constraint_expr lhs, rhs;
5009 unsigned i, j;
5011 /* For builtins we do not have separate function info. For those
5012 we do not generate escapes for we have to generate clobbers/uses. */
5013 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
5014 switch (DECL_FUNCTION_CODE (decl))
5016 /* The following functions use and clobber memory pointed to
5017 by their arguments. */
5018 case BUILT_IN_STRCPY:
5019 case BUILT_IN_STRNCPY:
5020 case BUILT_IN_BCOPY:
5021 case BUILT_IN_MEMCPY:
5022 case BUILT_IN_MEMMOVE:
5023 case BUILT_IN_MEMPCPY:
5024 case BUILT_IN_STPCPY:
5025 case BUILT_IN_STPNCPY:
5026 case BUILT_IN_STRCAT:
5027 case BUILT_IN_STRNCAT:
5028 case BUILT_IN_STRCPY_CHK:
5029 case BUILT_IN_STRNCPY_CHK:
5030 case BUILT_IN_MEMCPY_CHK:
5031 case BUILT_IN_MEMMOVE_CHK:
5032 case BUILT_IN_MEMPCPY_CHK:
5033 case BUILT_IN_STPCPY_CHK:
5034 case BUILT_IN_STPNCPY_CHK:
5035 case BUILT_IN_STRCAT_CHK:
5036 case BUILT_IN_STRNCAT_CHK:
5038 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
5039 == BUILT_IN_BCOPY ? 1 : 0));
5040 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
5041 == BUILT_IN_BCOPY ? 0 : 1));
5042 unsigned i;
5043 struct constraint_expr *rhsp, *lhsp;
5044 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
5045 lhs = get_function_part_constraint (fi, fi_clobbers);
5046 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
5047 process_constraint (new_constraint (lhs, *lhsp));
5048 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
5049 lhs = get_function_part_constraint (fi, fi_uses);
5050 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5051 process_constraint (new_constraint (lhs, *rhsp));
5052 return;
5054 /* The following function clobbers memory pointed to by
5055 its argument. */
5056 case BUILT_IN_MEMSET:
5057 case BUILT_IN_MEMSET_CHK:
5058 case BUILT_IN_POSIX_MEMALIGN:
5060 tree dest = gimple_call_arg (t, 0);
5061 unsigned i;
5062 ce_s *lhsp;
5063 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
5064 lhs = get_function_part_constraint (fi, fi_clobbers);
5065 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
5066 process_constraint (new_constraint (lhs, *lhsp));
5067 return;
5069 /* The following functions clobber their second and third
5070 arguments. */
5071 case BUILT_IN_SINCOS:
5072 case BUILT_IN_SINCOSF:
5073 case BUILT_IN_SINCOSL:
5075 process_ipa_clobber (fi, gimple_call_arg (t, 1));
5076 process_ipa_clobber (fi, gimple_call_arg (t, 2));
5077 return;
5079 /* The following functions clobber their second argument. */
5080 case BUILT_IN_FREXP:
5081 case BUILT_IN_FREXPF:
5082 case BUILT_IN_FREXPL:
5083 case BUILT_IN_LGAMMA_R:
5084 case BUILT_IN_LGAMMAF_R:
5085 case BUILT_IN_LGAMMAL_R:
5086 case BUILT_IN_GAMMA_R:
5087 case BUILT_IN_GAMMAF_R:
5088 case BUILT_IN_GAMMAL_R:
5089 case BUILT_IN_MODF:
5090 case BUILT_IN_MODFF:
5091 case BUILT_IN_MODFL:
5093 process_ipa_clobber (fi, gimple_call_arg (t, 1));
5094 return;
5096 /* The following functions clobber their third argument. */
5097 case BUILT_IN_REMQUO:
5098 case BUILT_IN_REMQUOF:
5099 case BUILT_IN_REMQUOL:
5101 process_ipa_clobber (fi, gimple_call_arg (t, 2));
5102 return;
5104 /* The following functions neither read nor clobber memory. */
5105 case BUILT_IN_ASSUME_ALIGNED:
5106 case BUILT_IN_FREE:
5107 return;
5108 /* Trampolines are of no interest to us. */
5109 case BUILT_IN_INIT_TRAMPOLINE:
5110 case BUILT_IN_ADJUST_TRAMPOLINE:
5111 return;
5112 case BUILT_IN_VA_START:
5113 case BUILT_IN_VA_END:
5114 return;
5115 case BUILT_IN_GOMP_PARALLEL:
5116 case BUILT_IN_GOACC_PARALLEL:
5118 unsigned int fnpos, argpos;
5119 unsigned int implicit_use_args[2];
5120 unsigned int num_implicit_use_args = 0;
5121 switch (DECL_FUNCTION_CODE (decl))
5123 case BUILT_IN_GOMP_PARALLEL:
5124 /* __builtin_GOMP_parallel (fn, data, num_threads, flags). */
5125 fnpos = 0;
5126 argpos = 1;
5127 break;
5128 case BUILT_IN_GOACC_PARALLEL:
5129 /* __builtin_GOACC_parallel (device, fn, mapnum, hostaddrs,
5130 sizes, kinds, ...). */
5131 fnpos = 1;
5132 argpos = 3;
5133 implicit_use_args[num_implicit_use_args++] = 4;
5134 implicit_use_args[num_implicit_use_args++] = 5;
5135 break;
5136 default:
5137 gcc_unreachable ();
5140 tree fnarg = gimple_call_arg (t, fnpos);
5141 gcc_assert (TREE_CODE (fnarg) == ADDR_EXPR);
5142 tree fndecl = TREE_OPERAND (fnarg, 0);
5143 if (fndecl_maybe_in_other_partition (fndecl))
5144 /* Fallthru to general call handling. */
5145 break;
5147 varinfo_t cfi = get_vi_for_tree (fndecl);
5149 tree arg = gimple_call_arg (t, argpos);
5151 /* Parameter passed by value is used. */
5152 lhs = get_function_part_constraint (fi, fi_uses);
5153 struct constraint_expr *rhsp;
5154 get_constraint_for (arg, &rhsc);
5155 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5156 process_constraint (new_constraint (lhs, *rhsp));
5157 rhsc.truncate (0);
5159 /* Handle parameters used by the call, but not used in cfi, as
5160 implicitly used by cfi. */
5161 lhs = get_function_part_constraint (cfi, fi_uses);
5162 for (unsigned i = 0; i < num_implicit_use_args; ++i)
5164 tree arg = gimple_call_arg (t, implicit_use_args[i]);
5165 get_constraint_for (arg, &rhsc);
5166 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5167 process_constraint (new_constraint (lhs, *rhsp));
5168 rhsc.truncate (0);
5171 /* The caller clobbers what the callee does. */
5172 lhs = get_function_part_constraint (fi, fi_clobbers);
5173 rhs = get_function_part_constraint (cfi, fi_clobbers);
5174 process_constraint (new_constraint (lhs, rhs));
5176 /* The caller uses what the callee does. */
5177 lhs = get_function_part_constraint (fi, fi_uses);
5178 rhs = get_function_part_constraint (cfi, fi_uses);
5179 process_constraint (new_constraint (lhs, rhs));
5181 return;
5183 /* printf-style functions may have hooks to set pointers to
5184 point to somewhere into the generated string. Leave them
5185 for a later exercise... */
5186 default:
5187 /* Fallthru to general call handling. */;
5190 /* Parameters passed by value are used. */
5191 lhs = get_function_part_constraint (fi, fi_uses);
5192 for (i = 0; i < gimple_call_num_args (t); i++)
5194 struct constraint_expr *rhsp;
5195 tree arg = gimple_call_arg (t, i);
5197 if (TREE_CODE (arg) == SSA_NAME
5198 || is_gimple_min_invariant (arg))
5199 continue;
5201 get_constraint_for_address_of (arg, &rhsc);
5202 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5203 process_constraint (new_constraint (lhs, *rhsp));
5204 rhsc.truncate (0);
5207 /* Build constraints for propagating clobbers/uses along the
5208 callgraph edges. */
5209 cfi = get_fi_for_callee (call_stmt);
5210 if (cfi->id == anything_id)
5212 if (gimple_vdef (t))
5213 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5214 anything_id);
5215 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5216 anything_id);
5217 return;
5220 /* For callees without function info (that's external functions),
5221 ESCAPED is clobbered and used. */
5222 if (gimple_call_fndecl (t)
5223 && !cfi->is_fn_info)
5225 varinfo_t vi;
5227 if (gimple_vdef (t))
5228 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5229 escaped_id);
5230 make_copy_constraint (first_vi_for_offset (fi, fi_uses), escaped_id);
5232 /* Also honor the call statement use/clobber info. */
5233 if ((vi = lookup_call_clobber_vi (call_stmt)) != NULL)
5234 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5235 vi->id);
5236 if ((vi = lookup_call_use_vi (call_stmt)) != NULL)
5237 make_copy_constraint (first_vi_for_offset (fi, fi_uses),
5238 vi->id);
5239 return;
5242 /* Otherwise the caller clobbers and uses what the callee does.
5243 ??? This should use a new complex constraint that filters
5244 local variables of the callee. */
5245 if (gimple_vdef (t))
5247 lhs = get_function_part_constraint (fi, fi_clobbers);
5248 rhs = get_function_part_constraint (cfi, fi_clobbers);
5249 process_constraint (new_constraint (lhs, rhs));
5251 lhs = get_function_part_constraint (fi, fi_uses);
5252 rhs = get_function_part_constraint (cfi, fi_uses);
5253 process_constraint (new_constraint (lhs, rhs));
5255 else if (gimple_code (t) == GIMPLE_ASM)
5257 /* ??? Ick. We can do better. */
5258 if (gimple_vdef (t))
5259 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5260 anything_id);
5261 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5262 anything_id);
5267 /* Find the first varinfo in the same variable as START that overlaps with
5268 OFFSET. Return NULL if we can't find one. */
5270 static varinfo_t
5271 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
5273 /* If the offset is outside of the variable, bail out. */
5274 if (offset >= start->fullsize)
5275 return NULL;
5277 /* If we cannot reach offset from start, lookup the first field
5278 and start from there. */
5279 if (start->offset > offset)
5280 start = get_varinfo (start->head);
5282 while (start)
5284 /* We may not find a variable in the field list with the actual
5285 offset when we have glommed a structure to a variable.
5286 In that case, however, offset should still be within the size
5287 of the variable. */
5288 if (offset >= start->offset
5289 && (offset - start->offset) < start->size)
5290 return start;
5292 start = vi_next (start);
5295 return NULL;
5298 /* Find the first varinfo in the same variable as START that overlaps with
5299 OFFSET. If there is no such varinfo the varinfo directly preceding
5300 OFFSET is returned. */
5302 static varinfo_t
5303 first_or_preceding_vi_for_offset (varinfo_t start,
5304 unsigned HOST_WIDE_INT offset)
5306 /* If we cannot reach offset from start, lookup the first field
5307 and start from there. */
5308 if (start->offset > offset)
5309 start = get_varinfo (start->head);
5311 /* We may not find a variable in the field list with the actual
5312 offset when we have glommed a structure to a variable.
5313 In that case, however, offset should still be within the size
5314 of the variable.
5315 If we got beyond the offset we look for return the field
5316 directly preceding offset which may be the last field. */
5317 while (start->next
5318 && offset >= start->offset
5319 && !((offset - start->offset) < start->size))
5320 start = vi_next (start);
5322 return start;
5326 /* This structure is used during pushing fields onto the fieldstack
5327 to track the offset of the field, since bitpos_of_field gives it
5328 relative to its immediate containing type, and we want it relative
5329 to the ultimate containing object. */
5331 struct fieldoff
5333 /* Offset from the base of the base containing object to this field. */
5334 HOST_WIDE_INT offset;
5336 /* Size, in bits, of the field. */
5337 unsigned HOST_WIDE_INT size;
5339 unsigned has_unknown_size : 1;
5341 unsigned must_have_pointers : 1;
5343 unsigned may_have_pointers : 1;
5345 unsigned only_restrict_pointers : 1;
5347 tree restrict_pointed_type;
5349 typedef struct fieldoff fieldoff_s;
5352 /* qsort comparison function for two fieldoff's PA and PB */
5354 static int
5355 fieldoff_compare (const void *pa, const void *pb)
5357 const fieldoff_s *foa = (const fieldoff_s *)pa;
5358 const fieldoff_s *fob = (const fieldoff_s *)pb;
5359 unsigned HOST_WIDE_INT foasize, fobsize;
5361 if (foa->offset < fob->offset)
5362 return -1;
5363 else if (foa->offset > fob->offset)
5364 return 1;
5366 foasize = foa->size;
5367 fobsize = fob->size;
5368 if (foasize < fobsize)
5369 return -1;
5370 else if (foasize > fobsize)
5371 return 1;
5372 return 0;
5375 /* Sort a fieldstack according to the field offset and sizes. */
5376 static void
5377 sort_fieldstack (vec<fieldoff_s> fieldstack)
5379 fieldstack.qsort (fieldoff_compare);
5382 /* Return true if T is a type that can have subvars. */
5384 static inline bool
5385 type_can_have_subvars (const_tree t)
5387 /* Aggregates without overlapping fields can have subvars. */
5388 return TREE_CODE (t) == RECORD_TYPE;
5391 /* Return true if V is a tree that we can have subvars for.
5392 Normally, this is any aggregate type. Also complex
5393 types which are not gimple registers can have subvars. */
5395 static inline bool
5396 var_can_have_subvars (const_tree v)
5398 /* Volatile variables should never have subvars. */
5399 if (TREE_THIS_VOLATILE (v))
5400 return false;
5402 /* Non decls or memory tags can never have subvars. */
5403 if (!DECL_P (v))
5404 return false;
5406 return type_can_have_subvars (TREE_TYPE (v));
5409 /* Return true if T is a type that does contain pointers. */
5411 static bool
5412 type_must_have_pointers (tree type)
5414 if (POINTER_TYPE_P (type))
5415 return true;
5417 if (TREE_CODE (type) == ARRAY_TYPE)
5418 return type_must_have_pointers (TREE_TYPE (type));
5420 /* A function or method can have pointers as arguments, so track
5421 those separately. */
5422 if (TREE_CODE (type) == FUNCTION_TYPE
5423 || TREE_CODE (type) == METHOD_TYPE)
5424 return true;
5426 return false;
5429 static bool
5430 field_must_have_pointers (tree t)
5432 return type_must_have_pointers (TREE_TYPE (t));
5435 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
5436 the fields of TYPE onto fieldstack, recording their offsets along
5437 the way.
5439 OFFSET is used to keep track of the offset in this entire
5440 structure, rather than just the immediately containing structure.
5441 Returns false if the caller is supposed to handle the field we
5442 recursed for. */
5444 static bool
5445 push_fields_onto_fieldstack (tree type, vec<fieldoff_s> *fieldstack,
5446 HOST_WIDE_INT offset)
5448 tree field;
5449 bool empty_p = true;
5451 if (TREE_CODE (type) != RECORD_TYPE)
5452 return false;
5454 /* If the vector of fields is growing too big, bail out early.
5455 Callers check for vec::length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
5456 sure this fails. */
5457 if (fieldstack->length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
5458 return false;
5460 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5461 if (TREE_CODE (field) == FIELD_DECL)
5463 bool push = false;
5464 HOST_WIDE_INT foff = bitpos_of_field (field);
5465 tree field_type = TREE_TYPE (field);
5467 if (!var_can_have_subvars (field)
5468 || TREE_CODE (field_type) == QUAL_UNION_TYPE
5469 || TREE_CODE (field_type) == UNION_TYPE)
5470 push = true;
5471 else if (!push_fields_onto_fieldstack
5472 (field_type, fieldstack, offset + foff)
5473 && (DECL_SIZE (field)
5474 && !integer_zerop (DECL_SIZE (field))))
5475 /* Empty structures may have actual size, like in C++. So
5476 see if we didn't push any subfields and the size is
5477 nonzero, push the field onto the stack. */
5478 push = true;
5480 if (push)
5482 fieldoff_s *pair = NULL;
5483 bool has_unknown_size = false;
5484 bool must_have_pointers_p;
5486 if (!fieldstack->is_empty ())
5487 pair = &fieldstack->last ();
5489 /* If there isn't anything at offset zero, create sth. */
5490 if (!pair
5491 && offset + foff != 0)
5493 fieldoff_s e
5494 = {0, offset + foff, false, false, false, false, NULL_TREE};
5495 pair = fieldstack->safe_push (e);
5498 if (!DECL_SIZE (field)
5499 || !tree_fits_uhwi_p (DECL_SIZE (field)))
5500 has_unknown_size = true;
5502 /* If adjacent fields do not contain pointers merge them. */
5503 must_have_pointers_p = field_must_have_pointers (field);
5504 if (pair
5505 && !has_unknown_size
5506 && !must_have_pointers_p
5507 && !pair->must_have_pointers
5508 && !pair->has_unknown_size
5509 && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff)
5511 pair->size += tree_to_uhwi (DECL_SIZE (field));
5513 else
5515 fieldoff_s e;
5516 e.offset = offset + foff;
5517 e.has_unknown_size = has_unknown_size;
5518 if (!has_unknown_size)
5519 e.size = tree_to_uhwi (DECL_SIZE (field));
5520 else
5521 e.size = -1;
5522 e.must_have_pointers = must_have_pointers_p;
5523 e.may_have_pointers = true;
5524 e.only_restrict_pointers
5525 = (!has_unknown_size
5526 && POINTER_TYPE_P (field_type)
5527 && TYPE_RESTRICT (field_type));
5528 if (e.only_restrict_pointers)
5529 e.restrict_pointed_type = TREE_TYPE (field_type);
5530 fieldstack->safe_push (e);
5534 empty_p = false;
5537 return !empty_p;
5540 /* Count the number of arguments DECL has, and set IS_VARARGS to true
5541 if it is a varargs function. */
5543 static unsigned int
5544 count_num_arguments (tree decl, bool *is_varargs)
5546 unsigned int num = 0;
5547 tree t;
5549 /* Capture named arguments for K&R functions. They do not
5550 have a prototype and thus no TYPE_ARG_TYPES. */
5551 for (t = DECL_ARGUMENTS (decl); t; t = DECL_CHAIN (t))
5552 ++num;
5554 /* Check if the function has variadic arguments. */
5555 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl)); t; t = TREE_CHAIN (t))
5556 if (TREE_VALUE (t) == void_type_node)
5557 break;
5558 if (!t)
5559 *is_varargs = true;
5561 return num;
5564 /* Creation function node for DECL, using NAME, and return the index
5565 of the variable we've created for the function. If NONLOCAL_p, create
5566 initial constraints. */
5568 static varinfo_t
5569 create_function_info_for (tree decl, const char *name, bool add_id,
5570 bool nonlocal_p)
5572 struct function *fn = DECL_STRUCT_FUNCTION (decl);
5573 varinfo_t vi, prev_vi;
5574 tree arg;
5575 unsigned int i;
5576 bool is_varargs = false;
5577 unsigned int num_args = count_num_arguments (decl, &is_varargs);
5579 /* Create the variable info. */
5581 vi = new_var_info (decl, name, add_id);
5582 vi->offset = 0;
5583 vi->size = 1;
5584 vi->fullsize = fi_parm_base + num_args;
5585 vi->is_fn_info = 1;
5586 vi->may_have_pointers = false;
5587 if (is_varargs)
5588 vi->fullsize = ~0;
5589 insert_vi_for_tree (vi->decl, vi);
5591 prev_vi = vi;
5593 /* Create a variable for things the function clobbers and one for
5594 things the function uses. */
5596 varinfo_t clobbervi, usevi;
5597 const char *newname;
5598 char *tempname;
5600 tempname = xasprintf ("%s.clobber", name);
5601 newname = ggc_strdup (tempname);
5602 free (tempname);
5604 clobbervi = new_var_info (NULL, newname, false);
5605 clobbervi->offset = fi_clobbers;
5606 clobbervi->size = 1;
5607 clobbervi->fullsize = vi->fullsize;
5608 clobbervi->is_full_var = true;
5609 clobbervi->is_global_var = false;
5611 gcc_assert (prev_vi->offset < clobbervi->offset);
5612 prev_vi->next = clobbervi->id;
5613 prev_vi = clobbervi;
5615 tempname = xasprintf ("%s.use", name);
5616 newname = ggc_strdup (tempname);
5617 free (tempname);
5619 usevi = new_var_info (NULL, newname, false);
5620 usevi->offset = fi_uses;
5621 usevi->size = 1;
5622 usevi->fullsize = vi->fullsize;
5623 usevi->is_full_var = true;
5624 usevi->is_global_var = false;
5626 gcc_assert (prev_vi->offset < usevi->offset);
5627 prev_vi->next = usevi->id;
5628 prev_vi = usevi;
5631 /* And one for the static chain. */
5632 if (fn->static_chain_decl != NULL_TREE)
5634 varinfo_t chainvi;
5635 const char *newname;
5636 char *tempname;
5638 tempname = xasprintf ("%s.chain", name);
5639 newname = ggc_strdup (tempname);
5640 free (tempname);
5642 chainvi = new_var_info (fn->static_chain_decl, newname, false);
5643 chainvi->offset = fi_static_chain;
5644 chainvi->size = 1;
5645 chainvi->fullsize = vi->fullsize;
5646 chainvi->is_full_var = true;
5647 chainvi->is_global_var = false;
5649 insert_vi_for_tree (fn->static_chain_decl, chainvi);
5651 if (nonlocal_p
5652 && chainvi->may_have_pointers)
5653 make_constraint_from (chainvi, nonlocal_id);
5655 gcc_assert (prev_vi->offset < chainvi->offset);
5656 prev_vi->next = chainvi->id;
5657 prev_vi = chainvi;
5660 /* Create a variable for the return var. */
5661 if (DECL_RESULT (decl) != NULL
5662 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
5664 varinfo_t resultvi;
5665 const char *newname;
5666 char *tempname;
5667 tree resultdecl = decl;
5669 if (DECL_RESULT (decl))
5670 resultdecl = DECL_RESULT (decl);
5672 tempname = xasprintf ("%s.result", name);
5673 newname = ggc_strdup (tempname);
5674 free (tempname);
5676 resultvi = new_var_info (resultdecl, newname, false);
5677 resultvi->offset = fi_result;
5678 resultvi->size = 1;
5679 resultvi->fullsize = vi->fullsize;
5680 resultvi->is_full_var = true;
5681 if (DECL_RESULT (decl))
5682 resultvi->may_have_pointers = true;
5684 if (DECL_RESULT (decl))
5685 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
5687 if (nonlocal_p
5688 && DECL_RESULT (decl)
5689 && DECL_BY_REFERENCE (DECL_RESULT (decl)))
5690 make_constraint_from (resultvi, nonlocal_id);
5692 gcc_assert (prev_vi->offset < resultvi->offset);
5693 prev_vi->next = resultvi->id;
5694 prev_vi = resultvi;
5697 /* We also need to make function return values escape. Nothing
5698 escapes by returning from main though. */
5699 if (nonlocal_p
5700 && !MAIN_NAME_P (DECL_NAME (decl)))
5702 varinfo_t fi, rvi;
5703 fi = lookup_vi_for_tree (decl);
5704 rvi = first_vi_for_offset (fi, fi_result);
5705 if (rvi && rvi->offset == fi_result)
5706 make_copy_constraint (get_varinfo (escaped_id), rvi->id);
5709 /* Set up variables for each argument. */
5710 arg = DECL_ARGUMENTS (decl);
5711 for (i = 0; i < num_args; i++)
5713 varinfo_t argvi;
5714 const char *newname;
5715 char *tempname;
5716 tree argdecl = decl;
5718 if (arg)
5719 argdecl = arg;
5721 tempname = xasprintf ("%s.arg%d", name, i);
5722 newname = ggc_strdup (tempname);
5723 free (tempname);
5725 argvi = new_var_info (argdecl, newname, false);
5726 argvi->offset = fi_parm_base + i;
5727 argvi->size = 1;
5728 argvi->is_full_var = true;
5729 argvi->fullsize = vi->fullsize;
5730 if (arg)
5731 argvi->may_have_pointers = true;
5733 if (arg)
5734 insert_vi_for_tree (arg, argvi);
5736 if (nonlocal_p
5737 && argvi->may_have_pointers)
5738 make_constraint_from (argvi, nonlocal_id);
5740 gcc_assert (prev_vi->offset < argvi->offset);
5741 prev_vi->next = argvi->id;
5742 prev_vi = argvi;
5743 if (arg)
5744 arg = DECL_CHAIN (arg);
5747 /* Add one representative for all further args. */
5748 if (is_varargs)
5750 varinfo_t argvi;
5751 const char *newname;
5752 char *tempname;
5753 tree decl;
5755 tempname = xasprintf ("%s.varargs", name);
5756 newname = ggc_strdup (tempname);
5757 free (tempname);
5759 /* We need sth that can be pointed to for va_start. */
5760 decl = build_fake_var_decl (ptr_type_node);
5762 argvi = new_var_info (decl, newname, false);
5763 argvi->offset = fi_parm_base + num_args;
5764 argvi->size = ~0;
5765 argvi->is_full_var = true;
5766 argvi->is_heap_var = true;
5767 argvi->fullsize = vi->fullsize;
5769 if (nonlocal_p
5770 && argvi->may_have_pointers)
5771 make_constraint_from (argvi, nonlocal_id);
5773 gcc_assert (prev_vi->offset < argvi->offset);
5774 prev_vi->next = argvi->id;
5775 prev_vi = argvi;
5778 return vi;
5782 /* Return true if FIELDSTACK contains fields that overlap.
5783 FIELDSTACK is assumed to be sorted by offset. */
5785 static bool
5786 check_for_overlaps (vec<fieldoff_s> fieldstack)
5788 fieldoff_s *fo = NULL;
5789 unsigned int i;
5790 HOST_WIDE_INT lastoffset = -1;
5792 FOR_EACH_VEC_ELT (fieldstack, i, fo)
5794 if (fo->offset == lastoffset)
5795 return true;
5796 lastoffset = fo->offset;
5798 return false;
5801 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
5802 This will also create any varinfo structures necessary for fields
5803 of DECL. DECL is a function parameter if HANDLE_PARAM is set.
5804 HANDLED_STRUCT_TYPE is used to register struct types reached by following
5805 restrict pointers. This is needed to prevent infinite recursion. */
5807 static varinfo_t
5808 create_variable_info_for_1 (tree decl, const char *name, bool add_id,
5809 bool handle_param, bitmap handled_struct_type)
5811 varinfo_t vi, newvi;
5812 tree decl_type = TREE_TYPE (decl);
5813 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type);
5814 auto_vec<fieldoff_s> fieldstack;
5815 fieldoff_s *fo;
5816 unsigned int i;
5818 if (!declsize
5819 || !tree_fits_uhwi_p (declsize))
5821 vi = new_var_info (decl, name, add_id);
5822 vi->offset = 0;
5823 vi->size = ~0;
5824 vi->fullsize = ~0;
5825 vi->is_unknown_size_var = true;
5826 vi->is_full_var = true;
5827 vi->may_have_pointers = true;
5828 return vi;
5831 /* Collect field information. */
5832 if (use_field_sensitive
5833 && var_can_have_subvars (decl)
5834 /* ??? Force us to not use subfields for globals in IPA mode.
5835 Else we'd have to parse arbitrary initializers. */
5836 && !(in_ipa_mode
5837 && is_global_var (decl)))
5839 fieldoff_s *fo = NULL;
5840 bool notokay = false;
5841 unsigned int i;
5843 push_fields_onto_fieldstack (decl_type, &fieldstack, 0);
5845 for (i = 0; !notokay && fieldstack.iterate (i, &fo); i++)
5846 if (fo->has_unknown_size
5847 || fo->offset < 0)
5849 notokay = true;
5850 break;
5853 /* We can't sort them if we have a field with a variable sized type,
5854 which will make notokay = true. In that case, we are going to return
5855 without creating varinfos for the fields anyway, so sorting them is a
5856 waste to boot. */
5857 if (!notokay)
5859 sort_fieldstack (fieldstack);
5860 /* Due to some C++ FE issues, like PR 22488, we might end up
5861 what appear to be overlapping fields even though they,
5862 in reality, do not overlap. Until the C++ FE is fixed,
5863 we will simply disable field-sensitivity for these cases. */
5864 notokay = check_for_overlaps (fieldstack);
5867 if (notokay)
5868 fieldstack.release ();
5871 /* If we didn't end up collecting sub-variables create a full
5872 variable for the decl. */
5873 if (fieldstack.length () == 0
5874 || fieldstack.length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
5876 vi = new_var_info (decl, name, add_id);
5877 vi->offset = 0;
5878 vi->may_have_pointers = true;
5879 vi->fullsize = tree_to_uhwi (declsize);
5880 vi->size = vi->fullsize;
5881 vi->is_full_var = true;
5882 if (POINTER_TYPE_P (decl_type)
5883 && TYPE_RESTRICT (decl_type))
5884 vi->only_restrict_pointers = 1;
5885 if (vi->only_restrict_pointers
5886 && !type_contains_placeholder_p (TREE_TYPE (decl_type))
5887 && handle_param
5888 && !bitmap_bit_p (handled_struct_type,
5889 TYPE_UID (TREE_TYPE (decl_type))))
5891 varinfo_t rvi;
5892 tree heapvar = build_fake_var_decl (TREE_TYPE (decl_type));
5893 DECL_EXTERNAL (heapvar) = 1;
5894 if (var_can_have_subvars (heapvar))
5895 bitmap_set_bit (handled_struct_type,
5896 TYPE_UID (TREE_TYPE (decl_type)));
5897 rvi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS", true,
5898 true, handled_struct_type);
5899 if (var_can_have_subvars (heapvar))
5900 bitmap_clear_bit (handled_struct_type,
5901 TYPE_UID (TREE_TYPE (decl_type)));
5902 rvi->is_restrict_var = 1;
5903 insert_vi_for_tree (heapvar, rvi);
5904 make_constraint_from (vi, rvi->id);
5905 make_param_constraints (rvi);
5907 fieldstack.release ();
5908 return vi;
5911 vi = new_var_info (decl, name, add_id);
5912 vi->fullsize = tree_to_uhwi (declsize);
5913 if (fieldstack.length () == 1)
5914 vi->is_full_var = true;
5915 for (i = 0, newvi = vi;
5916 fieldstack.iterate (i, &fo);
5917 ++i, newvi = vi_next (newvi))
5919 const char *newname = NULL;
5920 char *tempname;
5922 if (dump_file)
5924 if (fieldstack.length () != 1)
5926 tempname
5927 = xasprintf ("%s." HOST_WIDE_INT_PRINT_DEC
5928 "+" HOST_WIDE_INT_PRINT_DEC, name,
5929 fo->offset, fo->size);
5930 newname = ggc_strdup (tempname);
5931 free (tempname);
5934 else
5935 newname = "NULL";
5937 if (newname)
5938 newvi->name = newname;
5939 newvi->offset = fo->offset;
5940 newvi->size = fo->size;
5941 newvi->fullsize = vi->fullsize;
5942 newvi->may_have_pointers = fo->may_have_pointers;
5943 newvi->only_restrict_pointers = fo->only_restrict_pointers;
5944 if (handle_param
5945 && newvi->only_restrict_pointers
5946 && !type_contains_placeholder_p (fo->restrict_pointed_type)
5947 && !bitmap_bit_p (handled_struct_type,
5948 TYPE_UID (fo->restrict_pointed_type)))
5950 varinfo_t rvi;
5951 tree heapvar = build_fake_var_decl (fo->restrict_pointed_type);
5952 DECL_EXTERNAL (heapvar) = 1;
5953 if (var_can_have_subvars (heapvar))
5954 bitmap_set_bit (handled_struct_type,
5955 TYPE_UID (fo->restrict_pointed_type));
5956 rvi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS", true,
5957 true, handled_struct_type);
5958 if (var_can_have_subvars (heapvar))
5959 bitmap_clear_bit (handled_struct_type,
5960 TYPE_UID (fo->restrict_pointed_type));
5961 rvi->is_restrict_var = 1;
5962 insert_vi_for_tree (heapvar, rvi);
5963 make_constraint_from (newvi, rvi->id);
5964 make_param_constraints (rvi);
5966 if (i + 1 < fieldstack.length ())
5968 varinfo_t tem = new_var_info (decl, name, false);
5969 newvi->next = tem->id;
5970 tem->head = vi->id;
5974 return vi;
5977 static unsigned int
5978 create_variable_info_for (tree decl, const char *name, bool add_id)
5980 varinfo_t vi = create_variable_info_for_1 (decl, name, add_id, false, NULL);
5981 unsigned int id = vi->id;
5983 insert_vi_for_tree (decl, vi);
5985 if (TREE_CODE (decl) != VAR_DECL)
5986 return id;
5988 /* Create initial constraints for globals. */
5989 for (; vi; vi = vi_next (vi))
5991 if (!vi->may_have_pointers
5992 || !vi->is_global_var)
5993 continue;
5995 /* Mark global restrict qualified pointers. */
5996 if ((POINTER_TYPE_P (TREE_TYPE (decl))
5997 && TYPE_RESTRICT (TREE_TYPE (decl)))
5998 || vi->only_restrict_pointers)
6000 varinfo_t rvi
6001 = make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT",
6002 true);
6003 /* ??? For now exclude reads from globals as restrict sources
6004 if those are not (indirectly) from incoming parameters. */
6005 rvi->is_restrict_var = false;
6006 continue;
6009 /* In non-IPA mode the initializer from nonlocal is all we need. */
6010 if (!in_ipa_mode
6011 || DECL_HARD_REGISTER (decl))
6012 make_copy_constraint (vi, nonlocal_id);
6014 /* In IPA mode parse the initializer and generate proper constraints
6015 for it. */
6016 else
6018 varpool_node *vnode = varpool_node::get (decl);
6020 /* For escaped variables initialize them from nonlocal. */
6021 if (!vnode->all_refs_explicit_p ())
6022 make_copy_constraint (vi, nonlocal_id);
6024 /* If this is a global variable with an initializer and we are in
6025 IPA mode generate constraints for it. */
6026 ipa_ref *ref;
6027 for (unsigned idx = 0; vnode->iterate_reference (idx, ref); ++idx)
6029 auto_vec<ce_s> rhsc;
6030 struct constraint_expr lhs, *rhsp;
6031 unsigned i;
6032 get_constraint_for_address_of (ref->referred->decl, &rhsc);
6033 lhs.var = vi->id;
6034 lhs.offset = 0;
6035 lhs.type = SCALAR;
6036 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
6037 process_constraint (new_constraint (lhs, *rhsp));
6038 /* If this is a variable that escapes from the unit
6039 the initializer escapes as well. */
6040 if (!vnode->all_refs_explicit_p ())
6042 lhs.var = escaped_id;
6043 lhs.offset = 0;
6044 lhs.type = SCALAR;
6045 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
6046 process_constraint (new_constraint (lhs, *rhsp));
6052 return id;
6055 /* Print out the points-to solution for VAR to FILE. */
6057 static void
6058 dump_solution_for_var (FILE *file, unsigned int var)
6060 varinfo_t vi = get_varinfo (var);
6061 unsigned int i;
6062 bitmap_iterator bi;
6064 /* Dump the solution for unified vars anyway, this avoids difficulties
6065 in scanning dumps in the testsuite. */
6066 fprintf (file, "%s = { ", vi->name);
6067 vi = get_varinfo (find (var));
6068 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
6069 fprintf (file, "%s ", get_varinfo (i)->name);
6070 fprintf (file, "}");
6072 /* But note when the variable was unified. */
6073 if (vi->id != var)
6074 fprintf (file, " same as %s", vi->name);
6076 fprintf (file, "\n");
6079 /* Print the points-to solution for VAR to stderr. */
6081 DEBUG_FUNCTION void
6082 debug_solution_for_var (unsigned int var)
6084 dump_solution_for_var (stderr, var);
6087 /* Register the constraints for function parameter related VI. */
6089 static void
6090 make_param_constraints (varinfo_t vi)
6092 for (; vi; vi = vi_next (vi))
6094 if (vi->only_restrict_pointers)
6096 else if (vi->may_have_pointers)
6097 make_constraint_from (vi, nonlocal_id);
6099 if (vi->is_full_var)
6100 break;
6104 /* Create varinfo structures for all of the variables in the
6105 function for intraprocedural mode. */
6107 static void
6108 intra_create_variable_infos (struct function *fn)
6110 tree t;
6111 bitmap handled_struct_type = NULL;
6113 /* For each incoming pointer argument arg, create the constraint ARG
6114 = NONLOCAL or a dummy variable if it is a restrict qualified
6115 passed-by-reference argument. */
6116 for (t = DECL_ARGUMENTS (fn->decl); t; t = DECL_CHAIN (t))
6118 if (handled_struct_type == NULL)
6119 handled_struct_type = BITMAP_ALLOC (NULL);
6121 varinfo_t p
6122 = create_variable_info_for_1 (t, alias_get_name (t), false, true,
6123 handled_struct_type);
6124 insert_vi_for_tree (t, p);
6126 make_param_constraints (p);
6129 if (handled_struct_type != NULL)
6130 BITMAP_FREE (handled_struct_type);
6132 /* Add a constraint for a result decl that is passed by reference. */
6133 if (DECL_RESULT (fn->decl)
6134 && DECL_BY_REFERENCE (DECL_RESULT (fn->decl)))
6136 varinfo_t p, result_vi = get_vi_for_tree (DECL_RESULT (fn->decl));
6138 for (p = result_vi; p; p = vi_next (p))
6139 make_constraint_from (p, nonlocal_id);
6142 /* Add a constraint for the incoming static chain parameter. */
6143 if (fn->static_chain_decl != NULL_TREE)
6145 varinfo_t p, chain_vi = get_vi_for_tree (fn->static_chain_decl);
6147 for (p = chain_vi; p; p = vi_next (p))
6148 make_constraint_from (p, nonlocal_id);
6152 /* Structure used to put solution bitmaps in a hashtable so they can
6153 be shared among variables with the same points-to set. */
6155 typedef struct shared_bitmap_info
6157 bitmap pt_vars;
6158 hashval_t hashcode;
6159 } *shared_bitmap_info_t;
6160 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
6162 /* Shared_bitmap hashtable helpers. */
6164 struct shared_bitmap_hasher : free_ptr_hash <shared_bitmap_info>
6166 static inline hashval_t hash (const shared_bitmap_info *);
6167 static inline bool equal (const shared_bitmap_info *,
6168 const shared_bitmap_info *);
6171 /* Hash function for a shared_bitmap_info_t */
6173 inline hashval_t
6174 shared_bitmap_hasher::hash (const shared_bitmap_info *bi)
6176 return bi->hashcode;
6179 /* Equality function for two shared_bitmap_info_t's. */
6181 inline bool
6182 shared_bitmap_hasher::equal (const shared_bitmap_info *sbi1,
6183 const shared_bitmap_info *sbi2)
6185 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
6188 /* Shared_bitmap hashtable. */
6190 static hash_table<shared_bitmap_hasher> *shared_bitmap_table;
6192 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
6193 existing instance if there is one, NULL otherwise. */
6195 static bitmap
6196 shared_bitmap_lookup (bitmap pt_vars)
6198 shared_bitmap_info **slot;
6199 struct shared_bitmap_info sbi;
6201 sbi.pt_vars = pt_vars;
6202 sbi.hashcode = bitmap_hash (pt_vars);
6204 slot = shared_bitmap_table->find_slot (&sbi, NO_INSERT);
6205 if (!slot)
6206 return NULL;
6207 else
6208 return (*slot)->pt_vars;
6212 /* Add a bitmap to the shared bitmap hashtable. */
6214 static void
6215 shared_bitmap_add (bitmap pt_vars)
6217 shared_bitmap_info **slot;
6218 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
6220 sbi->pt_vars = pt_vars;
6221 sbi->hashcode = bitmap_hash (pt_vars);
6223 slot = shared_bitmap_table->find_slot (sbi, INSERT);
6224 gcc_assert (!*slot);
6225 *slot = sbi;
6229 /* Set bits in INTO corresponding to the variable uids in solution set FROM. */
6231 static void
6232 set_uids_in_ptset (bitmap into, bitmap from, struct pt_solution *pt,
6233 tree fndecl)
6235 unsigned int i;
6236 bitmap_iterator bi;
6237 varinfo_t escaped_vi = get_varinfo (find (escaped_id));
6238 bool everything_escaped
6239 = escaped_vi->solution && bitmap_bit_p (escaped_vi->solution, anything_id);
6241 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
6243 varinfo_t vi = get_varinfo (i);
6245 /* The only artificial variables that are allowed in a may-alias
6246 set are heap variables. */
6247 if (vi->is_artificial_var && !vi->is_heap_var)
6248 continue;
6250 if (everything_escaped
6251 || (escaped_vi->solution
6252 && bitmap_bit_p (escaped_vi->solution, i)))
6254 pt->vars_contains_escaped = true;
6255 pt->vars_contains_escaped_heap = vi->is_heap_var;
6258 if (vi->is_restrict_var)
6259 pt->vars_contains_restrict = true;
6261 if (TREE_CODE (vi->decl) == VAR_DECL
6262 || TREE_CODE (vi->decl) == PARM_DECL
6263 || TREE_CODE (vi->decl) == RESULT_DECL)
6265 /* If we are in IPA mode we will not recompute points-to
6266 sets after inlining so make sure they stay valid. */
6267 if (in_ipa_mode
6268 && !DECL_PT_UID_SET_P (vi->decl))
6269 SET_DECL_PT_UID (vi->decl, DECL_UID (vi->decl));
6271 /* Add the decl to the points-to set. Note that the points-to
6272 set contains global variables. */
6273 bitmap_set_bit (into, DECL_PT_UID (vi->decl));
6274 if (vi->is_global_var
6275 /* In IPA mode the escaped_heap trick doesn't work as
6276 ESCAPED is escaped from the unit but
6277 pt_solution_includes_global needs to answer true for
6278 all variables not automatic within a function.
6279 For the same reason is_global_var is not the
6280 correct flag to track - local variables from other
6281 functions also need to be considered global.
6282 Conveniently all HEAP vars are not put in function
6283 scope. */
6284 || (in_ipa_mode
6285 && fndecl
6286 && ! auto_var_in_fn_p (vi->decl, fndecl)))
6287 pt->vars_contains_nonlocal = true;
6290 else if (TREE_CODE (vi->decl) == FUNCTION_DECL
6291 || TREE_CODE (vi->decl) == LABEL_DECL)
6293 /* Nothing should read/write from/to code so we can
6294 save bits by not including them in the points-to bitmaps.
6295 Still mark the points-to set as containing global memory
6296 to make code-patching possible - see PR70128. */
6297 pt->vars_contains_nonlocal = true;
6303 /* Compute the points-to solution *PT for the variable VI. */
6305 static struct pt_solution
6306 find_what_var_points_to (tree fndecl, varinfo_t orig_vi)
6308 unsigned int i;
6309 bitmap_iterator bi;
6310 bitmap finished_solution;
6311 bitmap result;
6312 varinfo_t vi;
6313 struct pt_solution *pt;
6315 /* This variable may have been collapsed, let's get the real
6316 variable. */
6317 vi = get_varinfo (find (orig_vi->id));
6319 /* See if we have already computed the solution and return it. */
6320 pt_solution **slot = &final_solutions->get_or_insert (vi);
6321 if (*slot != NULL)
6322 return **slot;
6324 *slot = pt = XOBNEW (&final_solutions_obstack, struct pt_solution);
6325 memset (pt, 0, sizeof (struct pt_solution));
6327 /* Translate artificial variables into SSA_NAME_PTR_INFO
6328 attributes. */
6329 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
6331 varinfo_t vi = get_varinfo (i);
6333 if (vi->is_artificial_var)
6335 if (vi->id == nothing_id)
6336 pt->null = 1;
6337 else if (vi->id == escaped_id)
6339 if (in_ipa_mode)
6340 pt->ipa_escaped = 1;
6341 else
6342 pt->escaped = 1;
6343 /* Expand some special vars of ESCAPED in-place here. */
6344 varinfo_t evi = get_varinfo (find (escaped_id));
6345 if (bitmap_bit_p (evi->solution, nonlocal_id))
6346 pt->nonlocal = 1;
6348 else if (vi->id == nonlocal_id)
6349 pt->nonlocal = 1;
6350 else if (vi->is_heap_var)
6351 /* We represent heapvars in the points-to set properly. */
6353 else if (vi->id == string_id)
6354 /* Nobody cares - STRING_CSTs are read-only entities. */
6356 else if (vi->id == anything_id
6357 || vi->id == integer_id)
6358 pt->anything = 1;
6362 /* Instead of doing extra work, simply do not create
6363 elaborate points-to information for pt_anything pointers. */
6364 if (pt->anything)
6365 return *pt;
6367 /* Share the final set of variables when possible. */
6368 finished_solution = BITMAP_GGC_ALLOC ();
6369 stats.points_to_sets_created++;
6371 set_uids_in_ptset (finished_solution, vi->solution, pt, fndecl);
6372 result = shared_bitmap_lookup (finished_solution);
6373 if (!result)
6375 shared_bitmap_add (finished_solution);
6376 pt->vars = finished_solution;
6378 else
6380 pt->vars = result;
6381 bitmap_clear (finished_solution);
6384 return *pt;
6387 /* Given a pointer variable P, fill in its points-to set. */
6389 static void
6390 find_what_p_points_to (tree fndecl, tree p)
6392 struct ptr_info_def *pi;
6393 tree lookup_p = p;
6394 varinfo_t vi;
6396 /* For parameters, get at the points-to set for the actual parm
6397 decl. */
6398 if (TREE_CODE (p) == SSA_NAME
6399 && SSA_NAME_IS_DEFAULT_DEF (p)
6400 && (TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
6401 || TREE_CODE (SSA_NAME_VAR (p)) == RESULT_DECL))
6402 lookup_p = SSA_NAME_VAR (p);
6404 vi = lookup_vi_for_tree (lookup_p);
6405 if (!vi)
6406 return;
6408 pi = get_ptr_info (p);
6409 pi->pt = find_what_var_points_to (fndecl, vi);
6413 /* Query statistics for points-to solutions. */
6415 static struct {
6416 unsigned HOST_WIDE_INT pt_solution_includes_may_alias;
6417 unsigned HOST_WIDE_INT pt_solution_includes_no_alias;
6418 unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias;
6419 unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias;
6420 } pta_stats;
6422 void
6423 dump_pta_stats (FILE *s)
6425 fprintf (s, "\nPTA query stats:\n");
6426 fprintf (s, " pt_solution_includes: "
6427 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6428 HOST_WIDE_INT_PRINT_DEC" queries\n",
6429 pta_stats.pt_solution_includes_no_alias,
6430 pta_stats.pt_solution_includes_no_alias
6431 + pta_stats.pt_solution_includes_may_alias);
6432 fprintf (s, " pt_solutions_intersect: "
6433 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6434 HOST_WIDE_INT_PRINT_DEC" queries\n",
6435 pta_stats.pt_solutions_intersect_no_alias,
6436 pta_stats.pt_solutions_intersect_no_alias
6437 + pta_stats.pt_solutions_intersect_may_alias);
6441 /* Reset the points-to solution *PT to a conservative default
6442 (point to anything). */
6444 void
6445 pt_solution_reset (struct pt_solution *pt)
6447 memset (pt, 0, sizeof (struct pt_solution));
6448 pt->anything = true;
6451 /* Set the points-to solution *PT to point only to the variables
6452 in VARS. VARS_CONTAINS_GLOBAL specifies whether that contains
6453 global variables and VARS_CONTAINS_RESTRICT specifies whether
6454 it contains restrict tag variables. */
6456 void
6457 pt_solution_set (struct pt_solution *pt, bitmap vars,
6458 bool vars_contains_nonlocal)
6460 memset (pt, 0, sizeof (struct pt_solution));
6461 pt->vars = vars;
6462 pt->vars_contains_nonlocal = vars_contains_nonlocal;
6463 pt->vars_contains_escaped
6464 = (cfun->gimple_df->escaped.anything
6465 || bitmap_intersect_p (cfun->gimple_df->escaped.vars, vars));
6468 /* Set the points-to solution *PT to point only to the variable VAR. */
6470 void
6471 pt_solution_set_var (struct pt_solution *pt, tree var)
6473 memset (pt, 0, sizeof (struct pt_solution));
6474 pt->vars = BITMAP_GGC_ALLOC ();
6475 bitmap_set_bit (pt->vars, DECL_PT_UID (var));
6476 pt->vars_contains_nonlocal = is_global_var (var);
6477 pt->vars_contains_escaped
6478 = (cfun->gimple_df->escaped.anything
6479 || bitmap_bit_p (cfun->gimple_df->escaped.vars, DECL_PT_UID (var)));
6482 /* Computes the union of the points-to solutions *DEST and *SRC and
6483 stores the result in *DEST. This changes the points-to bitmap
6484 of *DEST and thus may not be used if that might be shared.
6485 The points-to bitmap of *SRC and *DEST will not be shared after
6486 this function if they were not before. */
6488 static void
6489 pt_solution_ior_into (struct pt_solution *dest, struct pt_solution *src)
6491 dest->anything |= src->anything;
6492 if (dest->anything)
6494 pt_solution_reset (dest);
6495 return;
6498 dest->nonlocal |= src->nonlocal;
6499 dest->escaped |= src->escaped;
6500 dest->ipa_escaped |= src->ipa_escaped;
6501 dest->null |= src->null;
6502 dest->vars_contains_nonlocal |= src->vars_contains_nonlocal;
6503 dest->vars_contains_escaped |= src->vars_contains_escaped;
6504 dest->vars_contains_escaped_heap |= src->vars_contains_escaped_heap;
6505 if (!src->vars)
6506 return;
6508 if (!dest->vars)
6509 dest->vars = BITMAP_GGC_ALLOC ();
6510 bitmap_ior_into (dest->vars, src->vars);
6513 /* Return true if the points-to solution *PT is empty. */
6515 bool
6516 pt_solution_empty_p (struct pt_solution *pt)
6518 if (pt->anything
6519 || pt->nonlocal)
6520 return false;
6522 if (pt->vars
6523 && !bitmap_empty_p (pt->vars))
6524 return false;
6526 /* If the solution includes ESCAPED, check if that is empty. */
6527 if (pt->escaped
6528 && !pt_solution_empty_p (&cfun->gimple_df->escaped))
6529 return false;
6531 /* If the solution includes ESCAPED, check if that is empty. */
6532 if (pt->ipa_escaped
6533 && !pt_solution_empty_p (&ipa_escaped_pt))
6534 return false;
6536 return true;
6539 /* Return true if the points-to solution *PT only point to a single var, and
6540 return the var uid in *UID. */
6542 bool
6543 pt_solution_singleton_p (struct pt_solution *pt, unsigned *uid)
6545 if (pt->anything || pt->nonlocal || pt->escaped || pt->ipa_escaped
6546 || pt->null || pt->vars == NULL
6547 || !bitmap_single_bit_set_p (pt->vars))
6548 return false;
6550 *uid = bitmap_first_set_bit (pt->vars);
6551 return true;
6554 /* Return true if the points-to solution *PT includes global memory. */
6556 bool
6557 pt_solution_includes_global (struct pt_solution *pt)
6559 if (pt->anything
6560 || pt->nonlocal
6561 || pt->vars_contains_nonlocal
6562 /* The following is a hack to make the malloc escape hack work.
6563 In reality we'd need different sets for escaped-through-return
6564 and escaped-to-callees and passes would need to be updated. */
6565 || pt->vars_contains_escaped_heap)
6566 return true;
6568 /* 'escaped' is also a placeholder so we have to look into it. */
6569 if (pt->escaped)
6570 return pt_solution_includes_global (&cfun->gimple_df->escaped);
6572 if (pt->ipa_escaped)
6573 return pt_solution_includes_global (&ipa_escaped_pt);
6575 return false;
6578 /* Return true if the points-to solution *PT includes the variable
6579 declaration DECL. */
6581 static bool
6582 pt_solution_includes_1 (struct pt_solution *pt, const_tree decl)
6584 if (pt->anything)
6585 return true;
6587 if (pt->nonlocal
6588 && is_global_var (decl))
6589 return true;
6591 if (pt->vars
6592 && bitmap_bit_p (pt->vars, DECL_PT_UID (decl)))
6593 return true;
6595 /* If the solution includes ESCAPED, check it. */
6596 if (pt->escaped
6597 && pt_solution_includes_1 (&cfun->gimple_df->escaped, decl))
6598 return true;
6600 /* If the solution includes ESCAPED, check it. */
6601 if (pt->ipa_escaped
6602 && pt_solution_includes_1 (&ipa_escaped_pt, decl))
6603 return true;
6605 return false;
6608 bool
6609 pt_solution_includes (struct pt_solution *pt, const_tree decl)
6611 bool res = pt_solution_includes_1 (pt, decl);
6612 if (res)
6613 ++pta_stats.pt_solution_includes_may_alias;
6614 else
6615 ++pta_stats.pt_solution_includes_no_alias;
6616 return res;
6619 /* Return true if both points-to solutions PT1 and PT2 have a non-empty
6620 intersection. */
6622 static bool
6623 pt_solutions_intersect_1 (struct pt_solution *pt1, struct pt_solution *pt2)
6625 if (pt1->anything || pt2->anything)
6626 return true;
6628 /* If either points to unknown global memory and the other points to
6629 any global memory they alias. */
6630 if ((pt1->nonlocal
6631 && (pt2->nonlocal
6632 || pt2->vars_contains_nonlocal))
6633 || (pt2->nonlocal
6634 && pt1->vars_contains_nonlocal))
6635 return true;
6637 /* If either points to all escaped memory and the other points to
6638 any escaped memory they alias. */
6639 if ((pt1->escaped
6640 && (pt2->escaped
6641 || pt2->vars_contains_escaped))
6642 || (pt2->escaped
6643 && pt1->vars_contains_escaped))
6644 return true;
6646 /* Check the escaped solution if required.
6647 ??? Do we need to check the local against the IPA escaped sets? */
6648 if ((pt1->ipa_escaped || pt2->ipa_escaped)
6649 && !pt_solution_empty_p (&ipa_escaped_pt))
6651 /* If both point to escaped memory and that solution
6652 is not empty they alias. */
6653 if (pt1->ipa_escaped && pt2->ipa_escaped)
6654 return true;
6656 /* If either points to escaped memory see if the escaped solution
6657 intersects with the other. */
6658 if ((pt1->ipa_escaped
6659 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt2))
6660 || (pt2->ipa_escaped
6661 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt1)))
6662 return true;
6665 /* Now both pointers alias if their points-to solution intersects. */
6666 return (pt1->vars
6667 && pt2->vars
6668 && bitmap_intersect_p (pt1->vars, pt2->vars));
6671 bool
6672 pt_solutions_intersect (struct pt_solution *pt1, struct pt_solution *pt2)
6674 bool res = pt_solutions_intersect_1 (pt1, pt2);
6675 if (res)
6676 ++pta_stats.pt_solutions_intersect_may_alias;
6677 else
6678 ++pta_stats.pt_solutions_intersect_no_alias;
6679 return res;
6683 /* Dump points-to information to OUTFILE. */
6685 static void
6686 dump_sa_points_to_info (FILE *outfile)
6688 unsigned int i;
6690 fprintf (outfile, "\nPoints-to sets\n\n");
6692 if (dump_flags & TDF_STATS)
6694 fprintf (outfile, "Stats:\n");
6695 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
6696 fprintf (outfile, "Non-pointer vars: %d\n",
6697 stats.nonpointer_vars);
6698 fprintf (outfile, "Statically unified vars: %d\n",
6699 stats.unified_vars_static);
6700 fprintf (outfile, "Dynamically unified vars: %d\n",
6701 stats.unified_vars_dynamic);
6702 fprintf (outfile, "Iterations: %d\n", stats.iterations);
6703 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
6704 fprintf (outfile, "Number of implicit edges: %d\n",
6705 stats.num_implicit_edges);
6708 for (i = 1; i < varmap.length (); i++)
6710 varinfo_t vi = get_varinfo (i);
6711 if (!vi->may_have_pointers)
6712 continue;
6713 dump_solution_for_var (outfile, i);
6718 /* Debug points-to information to stderr. */
6720 DEBUG_FUNCTION void
6721 debug_sa_points_to_info (void)
6723 dump_sa_points_to_info (stderr);
6727 /* Initialize the always-existing constraint variables for NULL
6728 ANYTHING, READONLY, and INTEGER */
6730 static void
6731 init_base_vars (void)
6733 struct constraint_expr lhs, rhs;
6734 varinfo_t var_anything;
6735 varinfo_t var_nothing;
6736 varinfo_t var_string;
6737 varinfo_t var_escaped;
6738 varinfo_t var_nonlocal;
6739 varinfo_t var_storedanything;
6740 varinfo_t var_integer;
6742 /* Variable ID zero is reserved and should be NULL. */
6743 varmap.safe_push (NULL);
6745 /* Create the NULL variable, used to represent that a variable points
6746 to NULL. */
6747 var_nothing = new_var_info (NULL_TREE, "NULL", false);
6748 gcc_assert (var_nothing->id == nothing_id);
6749 var_nothing->is_artificial_var = 1;
6750 var_nothing->offset = 0;
6751 var_nothing->size = ~0;
6752 var_nothing->fullsize = ~0;
6753 var_nothing->is_special_var = 1;
6754 var_nothing->may_have_pointers = 0;
6755 var_nothing->is_global_var = 0;
6757 /* Create the ANYTHING variable, used to represent that a variable
6758 points to some unknown piece of memory. */
6759 var_anything = new_var_info (NULL_TREE, "ANYTHING", false);
6760 gcc_assert (var_anything->id == anything_id);
6761 var_anything->is_artificial_var = 1;
6762 var_anything->size = ~0;
6763 var_anything->offset = 0;
6764 var_anything->fullsize = ~0;
6765 var_anything->is_special_var = 1;
6767 /* Anything points to anything. This makes deref constraints just
6768 work in the presence of linked list and other p = *p type loops,
6769 by saying that *ANYTHING = ANYTHING. */
6770 lhs.type = SCALAR;
6771 lhs.var = anything_id;
6772 lhs.offset = 0;
6773 rhs.type = ADDRESSOF;
6774 rhs.var = anything_id;
6775 rhs.offset = 0;
6777 /* This specifically does not use process_constraint because
6778 process_constraint ignores all anything = anything constraints, since all
6779 but this one are redundant. */
6780 constraints.safe_push (new_constraint (lhs, rhs));
6782 /* Create the STRING variable, used to represent that a variable
6783 points to a string literal. String literals don't contain
6784 pointers so STRING doesn't point to anything. */
6785 var_string = new_var_info (NULL_TREE, "STRING", false);
6786 gcc_assert (var_string->id == string_id);
6787 var_string->is_artificial_var = 1;
6788 var_string->offset = 0;
6789 var_string->size = ~0;
6790 var_string->fullsize = ~0;
6791 var_string->is_special_var = 1;
6792 var_string->may_have_pointers = 0;
6794 /* Create the ESCAPED variable, used to represent the set of escaped
6795 memory. */
6796 var_escaped = new_var_info (NULL_TREE, "ESCAPED", false);
6797 gcc_assert (var_escaped->id == escaped_id);
6798 var_escaped->is_artificial_var = 1;
6799 var_escaped->offset = 0;
6800 var_escaped->size = ~0;
6801 var_escaped->fullsize = ~0;
6802 var_escaped->is_special_var = 0;
6804 /* Create the NONLOCAL variable, used to represent the set of nonlocal
6805 memory. */
6806 var_nonlocal = new_var_info (NULL_TREE, "NONLOCAL", false);
6807 gcc_assert (var_nonlocal->id == nonlocal_id);
6808 var_nonlocal->is_artificial_var = 1;
6809 var_nonlocal->offset = 0;
6810 var_nonlocal->size = ~0;
6811 var_nonlocal->fullsize = ~0;
6812 var_nonlocal->is_special_var = 1;
6814 /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
6815 lhs.type = SCALAR;
6816 lhs.var = escaped_id;
6817 lhs.offset = 0;
6818 rhs.type = DEREF;
6819 rhs.var = escaped_id;
6820 rhs.offset = 0;
6821 process_constraint (new_constraint (lhs, rhs));
6823 /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the
6824 whole variable escapes. */
6825 lhs.type = SCALAR;
6826 lhs.var = escaped_id;
6827 lhs.offset = 0;
6828 rhs.type = SCALAR;
6829 rhs.var = escaped_id;
6830 rhs.offset = UNKNOWN_OFFSET;
6831 process_constraint (new_constraint (lhs, rhs));
6833 /* *ESCAPED = NONLOCAL. This is true because we have to assume
6834 everything pointed to by escaped points to what global memory can
6835 point to. */
6836 lhs.type = DEREF;
6837 lhs.var = escaped_id;
6838 lhs.offset = 0;
6839 rhs.type = SCALAR;
6840 rhs.var = nonlocal_id;
6841 rhs.offset = 0;
6842 process_constraint (new_constraint (lhs, rhs));
6844 /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED. This is true because
6845 global memory may point to global memory and escaped memory. */
6846 lhs.type = SCALAR;
6847 lhs.var = nonlocal_id;
6848 lhs.offset = 0;
6849 rhs.type = ADDRESSOF;
6850 rhs.var = nonlocal_id;
6851 rhs.offset = 0;
6852 process_constraint (new_constraint (lhs, rhs));
6853 rhs.type = ADDRESSOF;
6854 rhs.var = escaped_id;
6855 rhs.offset = 0;
6856 process_constraint (new_constraint (lhs, rhs));
6858 /* Create the STOREDANYTHING variable, used to represent the set of
6859 variables stored to *ANYTHING. */
6860 var_storedanything = new_var_info (NULL_TREE, "STOREDANYTHING", false);
6861 gcc_assert (var_storedanything->id == storedanything_id);
6862 var_storedanything->is_artificial_var = 1;
6863 var_storedanything->offset = 0;
6864 var_storedanything->size = ~0;
6865 var_storedanything->fullsize = ~0;
6866 var_storedanything->is_special_var = 0;
6868 /* Create the INTEGER variable, used to represent that a variable points
6869 to what an INTEGER "points to". */
6870 var_integer = new_var_info (NULL_TREE, "INTEGER", false);
6871 gcc_assert (var_integer->id == integer_id);
6872 var_integer->is_artificial_var = 1;
6873 var_integer->size = ~0;
6874 var_integer->fullsize = ~0;
6875 var_integer->offset = 0;
6876 var_integer->is_special_var = 1;
6878 /* INTEGER = ANYTHING, because we don't know where a dereference of
6879 a random integer will point to. */
6880 lhs.type = SCALAR;
6881 lhs.var = integer_id;
6882 lhs.offset = 0;
6883 rhs.type = ADDRESSOF;
6884 rhs.var = anything_id;
6885 rhs.offset = 0;
6886 process_constraint (new_constraint (lhs, rhs));
6889 /* Initialize things necessary to perform PTA */
6891 static void
6892 init_alias_vars (void)
6894 use_field_sensitive = (MAX_FIELDS_FOR_FIELD_SENSITIVE > 1);
6896 bitmap_obstack_initialize (&pta_obstack);
6897 bitmap_obstack_initialize (&oldpta_obstack);
6898 bitmap_obstack_initialize (&predbitmap_obstack);
6900 constraints.create (8);
6901 varmap.create (8);
6902 vi_for_tree = new hash_map<tree, varinfo_t>;
6903 call_stmt_vars = new hash_map<gimple *, varinfo_t>;
6905 memset (&stats, 0, sizeof (stats));
6906 shared_bitmap_table = new hash_table<shared_bitmap_hasher> (511);
6907 init_base_vars ();
6909 gcc_obstack_init (&fake_var_decl_obstack);
6911 final_solutions = new hash_map<varinfo_t, pt_solution *>;
6912 gcc_obstack_init (&final_solutions_obstack);
6915 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
6916 predecessor edges. */
6918 static void
6919 remove_preds_and_fake_succs (constraint_graph_t graph)
6921 unsigned int i;
6923 /* Clear the implicit ref and address nodes from the successor
6924 lists. */
6925 for (i = 1; i < FIRST_REF_NODE; i++)
6927 if (graph->succs[i])
6928 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
6929 FIRST_REF_NODE * 2);
6932 /* Free the successor list for the non-ref nodes. */
6933 for (i = FIRST_REF_NODE + 1; i < graph->size; i++)
6935 if (graph->succs[i])
6936 BITMAP_FREE (graph->succs[i]);
6939 /* Now reallocate the size of the successor list as, and blow away
6940 the predecessor bitmaps. */
6941 graph->size = varmap.length ();
6942 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
6944 free (graph->implicit_preds);
6945 graph->implicit_preds = NULL;
6946 free (graph->preds);
6947 graph->preds = NULL;
6948 bitmap_obstack_release (&predbitmap_obstack);
6951 /* Solve the constraint set. */
6953 static void
6954 solve_constraints (void)
6956 struct scc_info *si;
6958 if (dump_file)
6959 fprintf (dump_file,
6960 "\nCollapsing static cycles and doing variable "
6961 "substitution\n");
6963 init_graph (varmap.length () * 2);
6965 if (dump_file)
6966 fprintf (dump_file, "Building predecessor graph\n");
6967 build_pred_graph ();
6969 if (dump_file)
6970 fprintf (dump_file, "Detecting pointer and location "
6971 "equivalences\n");
6972 si = perform_var_substitution (graph);
6974 if (dump_file)
6975 fprintf (dump_file, "Rewriting constraints and unifying "
6976 "variables\n");
6977 rewrite_constraints (graph, si);
6979 build_succ_graph ();
6981 free_var_substitution_info (si);
6983 /* Attach complex constraints to graph nodes. */
6984 move_complex_constraints (graph);
6986 if (dump_file)
6987 fprintf (dump_file, "Uniting pointer but not location equivalent "
6988 "variables\n");
6989 unite_pointer_equivalences (graph);
6991 if (dump_file)
6992 fprintf (dump_file, "Finding indirect cycles\n");
6993 find_indirect_cycles (graph);
6995 /* Implicit nodes and predecessors are no longer necessary at this
6996 point. */
6997 remove_preds_and_fake_succs (graph);
6999 if (dump_file && (dump_flags & TDF_GRAPH))
7001 fprintf (dump_file, "\n\n// The constraint graph before solve-graph "
7002 "in dot format:\n");
7003 dump_constraint_graph (dump_file);
7004 fprintf (dump_file, "\n\n");
7007 if (dump_file)
7008 fprintf (dump_file, "Solving graph\n");
7010 solve_graph (graph);
7012 if (dump_file && (dump_flags & TDF_GRAPH))
7014 fprintf (dump_file, "\n\n// The constraint graph after solve-graph "
7015 "in dot format:\n");
7016 dump_constraint_graph (dump_file);
7017 fprintf (dump_file, "\n\n");
7020 if (dump_file)
7021 dump_sa_points_to_info (dump_file);
7024 /* Create points-to sets for the current function. See the comments
7025 at the start of the file for an algorithmic overview. */
7027 static void
7028 compute_points_to_sets (void)
7030 basic_block bb;
7031 unsigned i;
7032 varinfo_t vi;
7034 timevar_push (TV_TREE_PTA);
7036 init_alias_vars ();
7038 intra_create_variable_infos (cfun);
7040 /* Now walk all statements and build the constraint set. */
7041 FOR_EACH_BB_FN (bb, cfun)
7043 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
7044 gsi_next (&gsi))
7046 gphi *phi = gsi.phi ();
7048 if (! virtual_operand_p (gimple_phi_result (phi)))
7049 find_func_aliases (cfun, phi);
7052 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
7053 gsi_next (&gsi))
7055 gimple *stmt = gsi_stmt (gsi);
7057 find_func_aliases (cfun, stmt);
7061 if (dump_file)
7063 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
7064 dump_constraints (dump_file, 0);
7067 /* From the constraints compute the points-to sets. */
7068 solve_constraints ();
7070 /* Compute the points-to set for ESCAPED used for call-clobber analysis. */
7071 cfun->gimple_df->escaped = find_what_var_points_to (cfun->decl,
7072 get_varinfo (escaped_id));
7074 /* Make sure the ESCAPED solution (which is used as placeholder in
7075 other solutions) does not reference itself. This simplifies
7076 points-to solution queries. */
7077 cfun->gimple_df->escaped.escaped = 0;
7079 /* Compute the points-to sets for pointer SSA_NAMEs. */
7080 for (i = 0; i < num_ssa_names; ++i)
7082 tree ptr = ssa_name (i);
7083 if (ptr
7084 && POINTER_TYPE_P (TREE_TYPE (ptr)))
7085 find_what_p_points_to (cfun->decl, ptr);
7088 /* Compute the call-used/clobbered sets. */
7089 FOR_EACH_BB_FN (bb, cfun)
7091 gimple_stmt_iterator gsi;
7093 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
7095 gcall *stmt;
7096 struct pt_solution *pt;
7098 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
7099 if (!stmt)
7100 continue;
7102 pt = gimple_call_use_set (stmt);
7103 if (gimple_call_flags (stmt) & ECF_CONST)
7104 memset (pt, 0, sizeof (struct pt_solution));
7105 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
7107 *pt = find_what_var_points_to (cfun->decl, vi);
7108 /* Escaped (and thus nonlocal) variables are always
7109 implicitly used by calls. */
7110 /* ??? ESCAPED can be empty even though NONLOCAL
7111 always escaped. */
7112 pt->nonlocal = 1;
7113 pt->escaped = 1;
7115 else
7117 /* If there is nothing special about this call then
7118 we have made everything that is used also escape. */
7119 *pt = cfun->gimple_df->escaped;
7120 pt->nonlocal = 1;
7123 pt = gimple_call_clobber_set (stmt);
7124 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
7125 memset (pt, 0, sizeof (struct pt_solution));
7126 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
7128 *pt = find_what_var_points_to (cfun->decl, vi);
7129 /* Escaped (and thus nonlocal) variables are always
7130 implicitly clobbered by calls. */
7131 /* ??? ESCAPED can be empty even though NONLOCAL
7132 always escaped. */
7133 pt->nonlocal = 1;
7134 pt->escaped = 1;
7136 else
7138 /* If there is nothing special about this call then
7139 we have made everything that is used also escape. */
7140 *pt = cfun->gimple_df->escaped;
7141 pt->nonlocal = 1;
7146 timevar_pop (TV_TREE_PTA);
7150 /* Delete created points-to sets. */
7152 static void
7153 delete_points_to_sets (void)
7155 unsigned int i;
7157 delete shared_bitmap_table;
7158 shared_bitmap_table = NULL;
7159 if (dump_file && (dump_flags & TDF_STATS))
7160 fprintf (dump_file, "Points to sets created:%d\n",
7161 stats.points_to_sets_created);
7163 delete vi_for_tree;
7164 delete call_stmt_vars;
7165 bitmap_obstack_release (&pta_obstack);
7166 constraints.release ();
7168 for (i = 0; i < graph->size; i++)
7169 graph->complex[i].release ();
7170 free (graph->complex);
7172 free (graph->rep);
7173 free (graph->succs);
7174 free (graph->pe);
7175 free (graph->pe_rep);
7176 free (graph->indirect_cycles);
7177 free (graph);
7179 varmap.release ();
7180 variable_info_pool.release ();
7181 constraint_pool.release ();
7183 obstack_free (&fake_var_decl_obstack, NULL);
7185 delete final_solutions;
7186 obstack_free (&final_solutions_obstack, NULL);
7189 struct vls_data
7191 unsigned short clique;
7192 bitmap rvars;
7195 /* Mark "other" loads and stores as belonging to CLIQUE and with
7196 base zero. */
7198 static bool
7199 visit_loadstore (gimple *, tree base, tree ref, void *data)
7201 unsigned short clique = ((vls_data *) data)->clique;
7202 bitmap rvars = ((vls_data *) data)->rvars;
7203 if (TREE_CODE (base) == MEM_REF
7204 || TREE_CODE (base) == TARGET_MEM_REF)
7206 tree ptr = TREE_OPERAND (base, 0);
7207 if (TREE_CODE (ptr) == SSA_NAME
7208 && ! SSA_NAME_IS_DEFAULT_DEF (ptr))
7210 /* We need to make sure 'ptr' doesn't include any of
7211 the restrict tags we added bases for in its points-to set. */
7212 varinfo_t vi = lookup_vi_for_tree (ptr);
7213 if (! vi)
7214 return false;
7216 vi = get_varinfo (find (vi->id));
7217 if (bitmap_intersect_p (rvars, vi->solution))
7218 return false;
7221 /* Do not overwrite existing cliques (that includes clique, base
7222 pairs we just set). */
7223 if (MR_DEPENDENCE_CLIQUE (base) == 0)
7225 MR_DEPENDENCE_CLIQUE (base) = clique;
7226 MR_DEPENDENCE_BASE (base) = 0;
7230 /* For plain decl accesses see whether they are accesses to globals
7231 and rewrite them to MEM_REFs with { clique, 0 }. */
7232 if (TREE_CODE (base) == VAR_DECL
7233 && is_global_var (base)
7234 /* ??? We can't rewrite a plain decl with the walk_stmt_load_store
7235 ops callback. */
7236 && base != ref)
7238 tree *basep = &ref;
7239 while (handled_component_p (*basep))
7240 basep = &TREE_OPERAND (*basep, 0);
7241 gcc_assert (TREE_CODE (*basep) == VAR_DECL);
7242 tree ptr = build_fold_addr_expr (*basep);
7243 tree zero = build_int_cst (TREE_TYPE (ptr), 0);
7244 *basep = build2 (MEM_REF, TREE_TYPE (*basep), ptr, zero);
7245 MR_DEPENDENCE_CLIQUE (*basep) = clique;
7246 MR_DEPENDENCE_BASE (*basep) = 0;
7249 return false;
7252 /* If REF is a MEM_REF then assign a clique, base pair to it, updating
7253 CLIQUE, *RESTRICT_VAR and LAST_RUID. Return whether dependence info
7254 was assigned to REF. */
7256 static bool
7257 maybe_set_dependence_info (tree ref, tree ptr,
7258 unsigned short &clique, varinfo_t restrict_var,
7259 unsigned short &last_ruid)
7261 while (handled_component_p (ref))
7262 ref = TREE_OPERAND (ref, 0);
7263 if ((TREE_CODE (ref) == MEM_REF
7264 || TREE_CODE (ref) == TARGET_MEM_REF)
7265 && TREE_OPERAND (ref, 0) == ptr)
7267 /* Do not overwrite existing cliques. This avoids overwriting dependence
7268 info inlined from a function with restrict parameters inlined
7269 into a function with restrict parameters. This usually means we
7270 prefer to be precise in innermost loops. */
7271 if (MR_DEPENDENCE_CLIQUE (ref) == 0)
7273 if (clique == 0)
7274 clique = ++cfun->last_clique;
7275 if (restrict_var->ruid == 0)
7276 restrict_var->ruid = ++last_ruid;
7277 MR_DEPENDENCE_CLIQUE (ref) = clique;
7278 MR_DEPENDENCE_BASE (ref) = restrict_var->ruid;
7279 return true;
7282 return false;
7285 /* Compute the set of independend memory references based on restrict
7286 tags and their conservative propagation to the points-to sets. */
7288 static void
7289 compute_dependence_clique (void)
7291 unsigned short clique = 0;
7292 unsigned short last_ruid = 0;
7293 bitmap rvars = BITMAP_ALLOC (NULL);
7294 for (unsigned i = 0; i < num_ssa_names; ++i)
7296 tree ptr = ssa_name (i);
7297 if (!ptr || !POINTER_TYPE_P (TREE_TYPE (ptr)))
7298 continue;
7300 /* Avoid all this when ptr is not dereferenced? */
7301 tree p = ptr;
7302 if (SSA_NAME_IS_DEFAULT_DEF (ptr)
7303 && (TREE_CODE (SSA_NAME_VAR (ptr)) == PARM_DECL
7304 || TREE_CODE (SSA_NAME_VAR (ptr)) == RESULT_DECL))
7305 p = SSA_NAME_VAR (ptr);
7306 varinfo_t vi = lookup_vi_for_tree (p);
7307 if (!vi)
7308 continue;
7309 vi = get_varinfo (find (vi->id));
7310 bitmap_iterator bi;
7311 unsigned j;
7312 varinfo_t restrict_var = NULL;
7313 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, j, bi)
7315 varinfo_t oi = get_varinfo (j);
7316 if (oi->is_restrict_var)
7318 if (restrict_var)
7320 if (dump_file && (dump_flags & TDF_DETAILS))
7322 fprintf (dump_file, "found restrict pointed-to "
7323 "for ");
7324 print_generic_expr (dump_file, ptr, 0);
7325 fprintf (dump_file, " but not exclusively\n");
7327 restrict_var = NULL;
7328 break;
7330 restrict_var = oi;
7332 /* NULL is the only other valid points-to entry. */
7333 else if (oi->id != nothing_id)
7335 restrict_var = NULL;
7336 break;
7339 /* Ok, found that ptr must(!) point to a single(!) restrict
7340 variable. */
7341 /* ??? PTA isn't really a proper propagation engine to compute
7342 this property.
7343 ??? We could handle merging of two restricts by unifying them. */
7344 if (restrict_var)
7346 /* Now look at possible dereferences of ptr. */
7347 imm_use_iterator ui;
7348 gimple *use_stmt;
7349 bool used = false;
7350 FOR_EACH_IMM_USE_STMT (use_stmt, ui, ptr)
7352 /* ??? Calls and asms. */
7353 if (!gimple_assign_single_p (use_stmt))
7354 continue;
7355 used |= maybe_set_dependence_info (gimple_assign_lhs (use_stmt),
7356 ptr, clique, restrict_var,
7357 last_ruid);
7358 used |= maybe_set_dependence_info (gimple_assign_rhs1 (use_stmt),
7359 ptr, clique, restrict_var,
7360 last_ruid);
7362 if (used)
7363 bitmap_set_bit (rvars, restrict_var->id);
7367 if (clique != 0)
7369 /* Assign the BASE id zero to all accesses not based on a restrict
7370 pointer. That way they get disambiguated against restrict
7371 accesses but not against each other. */
7372 /* ??? For restricts derived from globals (thus not incoming
7373 parameters) we can't restrict scoping properly thus the following
7374 is too aggressive there. For now we have excluded those globals from
7375 getting into the MR_DEPENDENCE machinery. */
7376 vls_data data = { clique, rvars };
7377 basic_block bb;
7378 FOR_EACH_BB_FN (bb, cfun)
7379 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
7380 !gsi_end_p (gsi); gsi_next (&gsi))
7382 gimple *stmt = gsi_stmt (gsi);
7383 walk_stmt_load_store_ops (stmt, &data,
7384 visit_loadstore, visit_loadstore);
7388 BITMAP_FREE (rvars);
7391 /* Compute points-to information for every SSA_NAME pointer in the
7392 current function and compute the transitive closure of escaped
7393 variables to re-initialize the call-clobber states of local variables. */
7395 unsigned int
7396 compute_may_aliases (void)
7398 if (cfun->gimple_df->ipa_pta)
7400 if (dump_file)
7402 fprintf (dump_file, "\nNot re-computing points-to information "
7403 "because IPA points-to information is available.\n\n");
7405 /* But still dump what we have remaining it. */
7406 dump_alias_info (dump_file);
7409 return 0;
7412 /* For each pointer P_i, determine the sets of variables that P_i may
7413 point-to. Compute the reachability set of escaped and call-used
7414 variables. */
7415 compute_points_to_sets ();
7417 /* Debugging dumps. */
7418 if (dump_file)
7419 dump_alias_info (dump_file);
7421 /* Compute restrict-based memory disambiguations. */
7422 compute_dependence_clique ();
7424 /* Deallocate memory used by aliasing data structures and the internal
7425 points-to solution. */
7426 delete_points_to_sets ();
7428 gcc_assert (!need_ssa_update_p (cfun));
7430 return 0;
7433 /* A dummy pass to cause points-to information to be computed via
7434 TODO_rebuild_alias. */
7436 namespace {
7438 const pass_data pass_data_build_alias =
7440 GIMPLE_PASS, /* type */
7441 "alias", /* name */
7442 OPTGROUP_NONE, /* optinfo_flags */
7443 TV_NONE, /* tv_id */
7444 ( PROP_cfg | PROP_ssa ), /* properties_required */
7445 0, /* properties_provided */
7446 0, /* properties_destroyed */
7447 0, /* todo_flags_start */
7448 TODO_rebuild_alias, /* todo_flags_finish */
7451 class pass_build_alias : public gimple_opt_pass
7453 public:
7454 pass_build_alias (gcc::context *ctxt)
7455 : gimple_opt_pass (pass_data_build_alias, ctxt)
7458 /* opt_pass methods: */
7459 virtual bool gate (function *) { return flag_tree_pta; }
7461 }; // class pass_build_alias
7463 } // anon namespace
7465 gimple_opt_pass *
7466 make_pass_build_alias (gcc::context *ctxt)
7468 return new pass_build_alias (ctxt);
7471 /* A dummy pass to cause points-to information to be computed via
7472 TODO_rebuild_alias. */
7474 namespace {
7476 const pass_data pass_data_build_ealias =
7478 GIMPLE_PASS, /* type */
7479 "ealias", /* name */
7480 OPTGROUP_NONE, /* optinfo_flags */
7481 TV_NONE, /* tv_id */
7482 ( PROP_cfg | PROP_ssa ), /* properties_required */
7483 0, /* properties_provided */
7484 0, /* properties_destroyed */
7485 0, /* todo_flags_start */
7486 TODO_rebuild_alias, /* todo_flags_finish */
7489 class pass_build_ealias : public gimple_opt_pass
7491 public:
7492 pass_build_ealias (gcc::context *ctxt)
7493 : gimple_opt_pass (pass_data_build_ealias, ctxt)
7496 /* opt_pass methods: */
7497 virtual bool gate (function *) { return flag_tree_pta; }
7499 }; // class pass_build_ealias
7501 } // anon namespace
7503 gimple_opt_pass *
7504 make_pass_build_ealias (gcc::context *ctxt)
7506 return new pass_build_ealias (ctxt);
7510 /* IPA PTA solutions for ESCAPED. */
7511 struct pt_solution ipa_escaped_pt
7512 = { true, false, false, false, false, false, false, false, false, NULL };
7514 /* Associate node with varinfo DATA. Worker for
7515 cgraph_for_symbol_thunks_and_aliases. */
7516 static bool
7517 associate_varinfo_to_alias (struct cgraph_node *node, void *data)
7519 if ((node->alias || node->thunk.thunk_p)
7520 && node->analyzed)
7521 insert_vi_for_tree (node->decl, (varinfo_t)data);
7522 return false;
7525 /* Dump varinfo VI to FILE. */
7527 static void
7528 dump_varinfo (FILE *file, varinfo_t vi)
7530 if (vi == NULL)
7531 return;
7533 fprintf (file, "%u: %s\n", vi->id, vi->name);
7535 const char *sep = " ";
7536 if (vi->is_artificial_var)
7537 fprintf (file, "%sartificial", sep);
7538 if (vi->is_special_var)
7539 fprintf (file, "%sspecial", sep);
7540 if (vi->is_unknown_size_var)
7541 fprintf (file, "%sunknown-size", sep);
7542 if (vi->is_full_var)
7543 fprintf (file, "%sfull", sep);
7544 if (vi->is_heap_var)
7545 fprintf (file, "%sheap", sep);
7546 if (vi->may_have_pointers)
7547 fprintf (file, "%smay-have-pointers", sep);
7548 if (vi->only_restrict_pointers)
7549 fprintf (file, "%sonly-restrict-pointers", sep);
7550 if (vi->is_restrict_var)
7551 fprintf (file, "%sis-restrict-var", sep);
7552 if (vi->is_global_var)
7553 fprintf (file, "%sglobal", sep);
7554 if (vi->is_ipa_escape_point)
7555 fprintf (file, "%sipa-escape-point", sep);
7556 if (vi->is_fn_info)
7557 fprintf (file, "%sfn-info", sep);
7558 if (vi->ruid)
7559 fprintf (file, "%srestrict-uid:%u", sep, vi->ruid);
7560 if (vi->next)
7561 fprintf (file, "%snext:%u", sep, vi->next);
7562 if (vi->head != vi->id)
7563 fprintf (file, "%shead:%u", sep, vi->head);
7564 if (vi->offset)
7565 fprintf (file, "%soffset:" HOST_WIDE_INT_PRINT_DEC, sep, vi->offset);
7566 if (vi->size != ~(unsigned HOST_WIDE_INT)0)
7567 fprintf (file, "%ssize:" HOST_WIDE_INT_PRINT_DEC, sep, vi->size);
7568 if (vi->fullsize != ~(unsigned HOST_WIDE_INT)0
7569 && vi->fullsize != vi->size)
7570 fprintf (file, "%sfullsize:" HOST_WIDE_INT_PRINT_DEC, sep,
7571 vi->fullsize);
7572 fprintf (file, "\n");
7574 if (vi->solution && !bitmap_empty_p (vi->solution))
7576 bitmap_iterator bi;
7577 unsigned i;
7578 fprintf (file, " solution: {");
7579 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
7580 fprintf (file, " %u", i);
7581 fprintf (file, " }\n");
7584 if (vi->oldsolution && !bitmap_empty_p (vi->oldsolution)
7585 && !bitmap_equal_p (vi->solution, vi->oldsolution))
7587 bitmap_iterator bi;
7588 unsigned i;
7589 fprintf (file, " oldsolution: {");
7590 EXECUTE_IF_SET_IN_BITMAP (vi->oldsolution, 0, i, bi)
7591 fprintf (file, " %u", i);
7592 fprintf (file, " }\n");
7596 /* Dump varinfo VI to stderr. */
7598 DEBUG_FUNCTION void
7599 debug_varinfo (varinfo_t vi)
7601 dump_varinfo (stderr, vi);
7604 /* Dump varmap to FILE. */
7606 static void
7607 dump_varmap (FILE *file)
7609 if (varmap.length () == 0)
7610 return;
7612 fprintf (file, "variables:\n");
7614 for (unsigned int i = 0; i < varmap.length (); ++i)
7616 varinfo_t vi = get_varinfo (i);
7617 dump_varinfo (file, vi);
7620 fprintf (file, "\n");
7623 /* Dump varmap to stderr. */
7625 DEBUG_FUNCTION void
7626 debug_varmap (void)
7628 dump_varmap (stderr);
7631 /* Compute whether node is refered to non-locally. Worker for
7632 cgraph_for_symbol_thunks_and_aliases. */
7633 static bool
7634 refered_from_nonlocal_fn (struct cgraph_node *node, void *data)
7636 bool *nonlocal_p = (bool *)data;
7637 *nonlocal_p |= (node->used_from_other_partition
7638 || node->externally_visible
7639 || node->force_output);
7640 return false;
7643 /* Same for varpool nodes. */
7644 static bool
7645 refered_from_nonlocal_var (struct varpool_node *node, void *data)
7647 bool *nonlocal_p = (bool *)data;
7648 *nonlocal_p |= (node->used_from_other_partition
7649 || node->externally_visible
7650 || node->force_output);
7651 return false;
7654 /* Execute the driver for IPA PTA. */
7655 static unsigned int
7656 ipa_pta_execute (void)
7658 struct cgraph_node *node;
7659 varpool_node *var;
7660 unsigned int from = 0;
7662 in_ipa_mode = 1;
7664 init_alias_vars ();
7666 if (dump_file && (dump_flags & TDF_DETAILS))
7668 symtab_node::dump_table (dump_file);
7669 fprintf (dump_file, "\n");
7672 if (dump_file)
7674 fprintf (dump_file, "Generating generic constraints\n\n");
7675 dump_constraints (dump_file, from);
7676 fprintf (dump_file, "\n");
7677 from = constraints.length ();
7680 /* Build the constraints. */
7681 FOR_EACH_DEFINED_FUNCTION (node)
7683 varinfo_t vi;
7684 /* Nodes without a body are not interesting. Especially do not
7685 visit clones at this point for now - we get duplicate decls
7686 there for inline clones at least. */
7687 if (!node->has_gimple_body_p () || node->global.inlined_to)
7688 continue;
7689 node->get_body ();
7691 gcc_assert (!node->clone_of);
7693 /* For externally visible or attribute used annotated functions use
7694 local constraints for their arguments.
7695 For local functions we see all callers and thus do not need initial
7696 constraints for parameters. */
7697 bool nonlocal_p = (node->used_from_other_partition
7698 || node->externally_visible
7699 || node->force_output);
7700 node->call_for_symbol_thunks_and_aliases (refered_from_nonlocal_fn,
7701 &nonlocal_p, true);
7703 vi = create_function_info_for (node->decl,
7704 alias_get_name (node->decl), false,
7705 nonlocal_p);
7706 if (dump_file
7707 && from != constraints.length ())
7709 fprintf (dump_file,
7710 "Generating intial constraints for %s", node->name ());
7711 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
7712 fprintf (dump_file, " (%s)",
7713 IDENTIFIER_POINTER
7714 (DECL_ASSEMBLER_NAME (node->decl)));
7715 fprintf (dump_file, "\n\n");
7716 dump_constraints (dump_file, from);
7717 fprintf (dump_file, "\n");
7719 from = constraints.length ();
7722 node->call_for_symbol_thunks_and_aliases
7723 (associate_varinfo_to_alias, vi, true);
7726 /* Create constraints for global variables and their initializers. */
7727 FOR_EACH_VARIABLE (var)
7729 if (var->alias && var->analyzed)
7730 continue;
7732 varinfo_t vi = get_vi_for_tree (var->decl);
7734 /* For the purpose of IPA PTA unit-local globals are not
7735 escape points. */
7736 bool nonlocal_p = (var->used_from_other_partition
7737 || var->externally_visible
7738 || var->force_output);
7739 var->call_for_symbol_and_aliases (refered_from_nonlocal_var,
7740 &nonlocal_p, true);
7741 if (nonlocal_p)
7742 vi->is_ipa_escape_point = true;
7745 if (dump_file
7746 && from != constraints.length ())
7748 fprintf (dump_file,
7749 "Generating constraints for global initializers\n\n");
7750 dump_constraints (dump_file, from);
7751 fprintf (dump_file, "\n");
7752 from = constraints.length ();
7755 FOR_EACH_DEFINED_FUNCTION (node)
7757 struct function *func;
7758 basic_block bb;
7760 /* Nodes without a body are not interesting. */
7761 if (!node->has_gimple_body_p () || node->clone_of)
7762 continue;
7764 if (dump_file)
7766 fprintf (dump_file,
7767 "Generating constraints for %s", node->name ());
7768 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
7769 fprintf (dump_file, " (%s)",
7770 IDENTIFIER_POINTER
7771 (DECL_ASSEMBLER_NAME (node->decl)));
7772 fprintf (dump_file, "\n");
7775 func = DECL_STRUCT_FUNCTION (node->decl);
7776 gcc_assert (cfun == NULL);
7778 /* Build constriants for the function body. */
7779 FOR_EACH_BB_FN (bb, func)
7781 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
7782 gsi_next (&gsi))
7784 gphi *phi = gsi.phi ();
7786 if (! virtual_operand_p (gimple_phi_result (phi)))
7787 find_func_aliases (func, phi);
7790 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
7791 gsi_next (&gsi))
7793 gimple *stmt = gsi_stmt (gsi);
7795 find_func_aliases (func, stmt);
7796 find_func_clobbers (func, stmt);
7800 if (dump_file)
7802 fprintf (dump_file, "\n");
7803 dump_constraints (dump_file, from);
7804 fprintf (dump_file, "\n");
7805 from = constraints.length ();
7809 /* From the constraints compute the points-to sets. */
7810 solve_constraints ();
7812 /* Compute the global points-to sets for ESCAPED.
7813 ??? Note that the computed escape set is not correct
7814 for the whole unit as we fail to consider graph edges to
7815 externally visible functions. */
7816 ipa_escaped_pt = find_what_var_points_to (NULL, get_varinfo (escaped_id));
7818 /* Make sure the ESCAPED solution (which is used as placeholder in
7819 other solutions) does not reference itself. This simplifies
7820 points-to solution queries. */
7821 ipa_escaped_pt.ipa_escaped = 0;
7823 /* Assign the points-to sets to the SSA names in the unit. */
7824 FOR_EACH_DEFINED_FUNCTION (node)
7826 tree ptr;
7827 struct function *fn;
7828 unsigned i;
7829 basic_block bb;
7831 /* Nodes without a body are not interesting. */
7832 if (!node->has_gimple_body_p () || node->clone_of)
7833 continue;
7835 fn = DECL_STRUCT_FUNCTION (node->decl);
7837 /* Compute the points-to sets for pointer SSA_NAMEs. */
7838 FOR_EACH_VEC_ELT (*fn->gimple_df->ssa_names, i, ptr)
7840 if (ptr
7841 && POINTER_TYPE_P (TREE_TYPE (ptr)))
7842 find_what_p_points_to (node->decl, ptr);
7845 /* Compute the call-use and call-clobber sets for indirect calls
7846 and calls to external functions. */
7847 FOR_EACH_BB_FN (bb, fn)
7849 gimple_stmt_iterator gsi;
7851 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
7853 gcall *stmt;
7854 struct pt_solution *pt;
7855 varinfo_t vi, fi;
7856 tree decl;
7858 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
7859 if (!stmt)
7860 continue;
7862 /* Handle direct calls to functions with body. */
7863 decl = gimple_call_fndecl (stmt);
7866 tree called_decl = NULL_TREE;
7867 if (gimple_call_builtin_p (stmt, BUILT_IN_GOMP_PARALLEL))
7868 called_decl = TREE_OPERAND (gimple_call_arg (stmt, 0), 0);
7869 else if (gimple_call_builtin_p (stmt, BUILT_IN_GOACC_PARALLEL))
7870 called_decl = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
7872 if (called_decl != NULL_TREE
7873 && !fndecl_maybe_in_other_partition (called_decl))
7874 decl = called_decl;
7877 if (decl
7878 && (fi = lookup_vi_for_tree (decl))
7879 && fi->is_fn_info)
7881 *gimple_call_clobber_set (stmt)
7882 = find_what_var_points_to
7883 (node->decl, first_vi_for_offset (fi, fi_clobbers));
7884 *gimple_call_use_set (stmt)
7885 = find_what_var_points_to
7886 (node->decl, first_vi_for_offset (fi, fi_uses));
7888 /* Handle direct calls to external functions. */
7889 else if (decl)
7891 pt = gimple_call_use_set (stmt);
7892 if (gimple_call_flags (stmt) & ECF_CONST)
7893 memset (pt, 0, sizeof (struct pt_solution));
7894 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
7896 *pt = find_what_var_points_to (node->decl, vi);
7897 /* Escaped (and thus nonlocal) variables are always
7898 implicitly used by calls. */
7899 /* ??? ESCAPED can be empty even though NONLOCAL
7900 always escaped. */
7901 pt->nonlocal = 1;
7902 pt->ipa_escaped = 1;
7904 else
7906 /* If there is nothing special about this call then
7907 we have made everything that is used also escape. */
7908 *pt = ipa_escaped_pt;
7909 pt->nonlocal = 1;
7912 pt = gimple_call_clobber_set (stmt);
7913 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
7914 memset (pt, 0, sizeof (struct pt_solution));
7915 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
7917 *pt = find_what_var_points_to (node->decl, vi);
7918 /* Escaped (and thus nonlocal) variables are always
7919 implicitly clobbered by calls. */
7920 /* ??? ESCAPED can be empty even though NONLOCAL
7921 always escaped. */
7922 pt->nonlocal = 1;
7923 pt->ipa_escaped = 1;
7925 else
7927 /* If there is nothing special about this call then
7928 we have made everything that is used also escape. */
7929 *pt = ipa_escaped_pt;
7930 pt->nonlocal = 1;
7933 /* Handle indirect calls. */
7934 else if (!decl
7935 && (fi = get_fi_for_callee (stmt)))
7937 /* We need to accumulate all clobbers/uses of all possible
7938 callees. */
7939 fi = get_varinfo (find (fi->id));
7940 /* If we cannot constrain the set of functions we'll end up
7941 calling we end up using/clobbering everything. */
7942 if (bitmap_bit_p (fi->solution, anything_id)
7943 || bitmap_bit_p (fi->solution, nonlocal_id)
7944 || bitmap_bit_p (fi->solution, escaped_id))
7946 pt_solution_reset (gimple_call_clobber_set (stmt));
7947 pt_solution_reset (gimple_call_use_set (stmt));
7949 else
7951 bitmap_iterator bi;
7952 unsigned i;
7953 struct pt_solution *uses, *clobbers;
7955 uses = gimple_call_use_set (stmt);
7956 clobbers = gimple_call_clobber_set (stmt);
7957 memset (uses, 0, sizeof (struct pt_solution));
7958 memset (clobbers, 0, sizeof (struct pt_solution));
7959 EXECUTE_IF_SET_IN_BITMAP (fi->solution, 0, i, bi)
7961 struct pt_solution sol;
7963 vi = get_varinfo (i);
7964 if (!vi->is_fn_info)
7966 /* ??? We could be more precise here? */
7967 uses->nonlocal = 1;
7968 uses->ipa_escaped = 1;
7969 clobbers->nonlocal = 1;
7970 clobbers->ipa_escaped = 1;
7971 continue;
7974 if (!uses->anything)
7976 sol = find_what_var_points_to
7977 (node->decl,
7978 first_vi_for_offset (vi, fi_uses));
7979 pt_solution_ior_into (uses, &sol);
7981 if (!clobbers->anything)
7983 sol = find_what_var_points_to
7984 (node->decl,
7985 first_vi_for_offset (vi, fi_clobbers));
7986 pt_solution_ior_into (clobbers, &sol);
7994 fn->gimple_df->ipa_pta = true;
7996 /* We have to re-set the final-solution cache after each function
7997 because what is a "global" is dependent on function context. */
7998 final_solutions->empty ();
7999 obstack_free (&final_solutions_obstack, NULL);
8000 gcc_obstack_init (&final_solutions_obstack);
8003 delete_points_to_sets ();
8005 in_ipa_mode = 0;
8007 return 0;
8010 namespace {
8012 const pass_data pass_data_ipa_pta =
8014 SIMPLE_IPA_PASS, /* type */
8015 "pta", /* name */
8016 OPTGROUP_NONE, /* optinfo_flags */
8017 TV_IPA_PTA, /* tv_id */
8018 0, /* properties_required */
8019 0, /* properties_provided */
8020 0, /* properties_destroyed */
8021 0, /* todo_flags_start */
8022 0, /* todo_flags_finish */
8025 class pass_ipa_pta : public simple_ipa_opt_pass
8027 public:
8028 pass_ipa_pta (gcc::context *ctxt)
8029 : simple_ipa_opt_pass (pass_data_ipa_pta, ctxt)
8032 /* opt_pass methods: */
8033 virtual bool gate (function *)
8035 return (optimize
8036 && flag_ipa_pta
8037 /* Don't bother doing anything if the program has errors. */
8038 && !seen_error ());
8041 opt_pass * clone () { return new pass_ipa_pta (m_ctxt); }
8043 virtual unsigned int execute (function *) { return ipa_pta_execute (); }
8045 }; // class pass_ipa_pta
8047 } // anon namespace
8049 simple_ipa_opt_pass *
8050 make_pass_ipa_pta (gcc::context *ctxt)
8052 return new pass_ipa_pta (ctxt);