stor-layout.c (layout_type): Move setting complex MODE to layout_type...
[official-gcc.git] / gcc / tree-ssa-structalias.c
blob5e3c7d094b58a4296eae879a9c43a5b373a717ee
1 /* Tree based points-to analysis
2 Copyright (C) 2005-2016 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "tree-pass.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "tree-pretty-print.h"
33 #include "diagnostic-core.h"
34 #include "fold-const.h"
35 #include "stor-layout.h"
36 #include "stmt.h"
37 #include "gimple-iterator.h"
38 #include "tree-into-ssa.h"
39 #include "tree-dfa.h"
40 #include "params.h"
41 #include "gimple-walk.h"
43 /* The idea behind this analyzer is to generate set constraints from the
44 program, then solve the resulting constraints in order to generate the
45 points-to sets.
47 Set constraints are a way of modeling program analysis problems that
48 involve sets. They consist of an inclusion constraint language,
49 describing the variables (each variable is a set) and operations that
50 are involved on the variables, and a set of rules that derive facts
51 from these operations. To solve a system of set constraints, you derive
52 all possible facts under the rules, which gives you the correct sets
53 as a consequence.
55 See "Efficient Field-sensitive pointer analysis for C" by "David
56 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
57 http://citeseer.ist.psu.edu/pearce04efficient.html
59 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
60 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
61 http://citeseer.ist.psu.edu/heintze01ultrafast.html
63 There are three types of real constraint expressions, DEREF,
64 ADDRESSOF, and SCALAR. Each constraint expression consists
65 of a constraint type, a variable, and an offset.
67 SCALAR is a constraint expression type used to represent x, whether
68 it appears on the LHS or the RHS of a statement.
69 DEREF is a constraint expression type used to represent *x, whether
70 it appears on the LHS or the RHS of a statement.
71 ADDRESSOF is a constraint expression used to represent &x, whether
72 it appears on the LHS or the RHS of a statement.
74 Each pointer variable in the program is assigned an integer id, and
75 each field of a structure variable is assigned an integer id as well.
77 Structure variables are linked to their list of fields through a "next
78 field" in each variable that points to the next field in offset
79 order.
80 Each variable for a structure field has
82 1. "size", that tells the size in bits of that field.
83 2. "fullsize, that tells the size in bits of the entire structure.
84 3. "offset", that tells the offset in bits from the beginning of the
85 structure to this field.
87 Thus,
88 struct f
90 int a;
91 int b;
92 } foo;
93 int *bar;
95 looks like
97 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
98 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
99 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
102 In order to solve the system of set constraints, the following is
103 done:
105 1. Each constraint variable x has a solution set associated with it,
106 Sol(x).
108 2. Constraints are separated into direct, copy, and complex.
109 Direct constraints are ADDRESSOF constraints that require no extra
110 processing, such as P = &Q
111 Copy constraints are those of the form P = Q.
112 Complex constraints are all the constraints involving dereferences
113 and offsets (including offsetted copies).
115 3. All direct constraints of the form P = &Q are processed, such
116 that Q is added to Sol(P)
118 4. All complex constraints for a given constraint variable are stored in a
119 linked list attached to that variable's node.
121 5. A directed graph is built out of the copy constraints. Each
122 constraint variable is a node in the graph, and an edge from
123 Q to P is added for each copy constraint of the form P = Q
125 6. The graph is then walked, and solution sets are
126 propagated along the copy edges, such that an edge from Q to P
127 causes Sol(P) <- Sol(P) union Sol(Q).
129 7. As we visit each node, all complex constraints associated with
130 that node are processed by adding appropriate copy edges to the graph, or the
131 appropriate variables to the solution set.
133 8. The process of walking the graph is iterated until no solution
134 sets change.
136 Prior to walking the graph in steps 6 and 7, We perform static
137 cycle elimination on the constraint graph, as well
138 as off-line variable substitution.
140 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
141 on and turned into anything), but isn't. You can just see what offset
142 inside the pointed-to struct it's going to access.
144 TODO: Constant bounded arrays can be handled as if they were structs of the
145 same number of elements.
147 TODO: Modeling heap and incoming pointers becomes much better if we
148 add fields to them as we discover them, which we could do.
150 TODO: We could handle unions, but to be honest, it's probably not
151 worth the pain or slowdown. */
153 /* IPA-PTA optimizations possible.
155 When the indirect function called is ANYTHING we can add disambiguation
156 based on the function signatures (or simply the parameter count which
157 is the varinfo size). We also do not need to consider functions that
158 do not have their address taken.
160 The is_global_var bit which marks escape points is overly conservative
161 in IPA mode. Split it to is_escape_point and is_global_var - only
162 externally visible globals are escape points in IPA mode.
163 There is now is_ipa_escape_point but this is only used in a few
164 selected places.
166 The way we introduce DECL_PT_UID to avoid fixing up all points-to
167 sets in the translation unit when we copy a DECL during inlining
168 pessimizes precision. The advantage is that the DECL_PT_UID keeps
169 compile-time and memory usage overhead low - the points-to sets
170 do not grow or get unshared as they would during a fixup phase.
171 An alternative solution is to delay IPA PTA until after all
172 inlining transformations have been applied.
174 The way we propagate clobber/use information isn't optimized.
175 It should use a new complex constraint that properly filters
176 out local variables of the callee (though that would make
177 the sets invalid after inlining). OTOH we might as well
178 admit defeat to WHOPR and simply do all the clobber/use analysis
179 and propagation after PTA finished but before we threw away
180 points-to information for memory variables. WHOPR and PTA
181 do not play along well anyway - the whole constraint solving
182 would need to be done in WPA phase and it will be very interesting
183 to apply the results to local SSA names during LTRANS phase.
185 We probably should compute a per-function unit-ESCAPE solution
186 propagating it simply like the clobber / uses solutions. The
187 solution can go alongside the non-IPA espaced solution and be
188 used to query which vars escape the unit through a function.
189 This is also required to make the escaped-HEAP trick work in IPA mode.
191 We never put function decls in points-to sets so we do not
192 keep the set of called functions for indirect calls.
194 And probably more. */
196 static bool use_field_sensitive = true;
197 static int in_ipa_mode = 0;
199 /* Used for predecessor bitmaps. */
200 static bitmap_obstack predbitmap_obstack;
202 /* Used for points-to sets. */
203 static bitmap_obstack pta_obstack;
205 /* Used for oldsolution members of variables. */
206 static bitmap_obstack oldpta_obstack;
208 /* Used for per-solver-iteration bitmaps. */
209 static bitmap_obstack iteration_obstack;
211 static unsigned int create_variable_info_for (tree, const char *, bool);
212 typedef struct constraint_graph *constraint_graph_t;
213 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
215 struct constraint;
216 typedef struct constraint *constraint_t;
219 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
220 if (a) \
221 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
223 static struct constraint_stats
225 unsigned int total_vars;
226 unsigned int nonpointer_vars;
227 unsigned int unified_vars_static;
228 unsigned int unified_vars_dynamic;
229 unsigned int iterations;
230 unsigned int num_edges;
231 unsigned int num_implicit_edges;
232 unsigned int points_to_sets_created;
233 } stats;
235 struct variable_info
237 /* ID of this variable */
238 unsigned int id;
240 /* True if this is a variable created by the constraint analysis, such as
241 heap variables and constraints we had to break up. */
242 unsigned int is_artificial_var : 1;
244 /* True if this is a special variable whose solution set should not be
245 changed. */
246 unsigned int is_special_var : 1;
248 /* True for variables whose size is not known or variable. */
249 unsigned int is_unknown_size_var : 1;
251 /* True for (sub-)fields that represent a whole variable. */
252 unsigned int is_full_var : 1;
254 /* True if this is a heap variable. */
255 unsigned int is_heap_var : 1;
257 /* True if this field may contain pointers. */
258 unsigned int may_have_pointers : 1;
260 /* True if this field has only restrict qualified pointers. */
261 unsigned int only_restrict_pointers : 1;
263 /* True if this represents a heap var created for a restrict qualified
264 pointer. */
265 unsigned int is_restrict_var : 1;
267 /* True if this represents a global variable. */
268 unsigned int is_global_var : 1;
270 /* True if this represents a module escape point for IPA analysis. */
271 unsigned int is_ipa_escape_point : 1;
273 /* True if this represents a IPA function info. */
274 unsigned int is_fn_info : 1;
276 /* ??? Store somewhere better. */
277 unsigned short ruid;
279 /* The ID of the variable for the next field in this structure
280 or zero for the last field in this structure. */
281 unsigned next;
283 /* The ID of the variable for the first field in this structure. */
284 unsigned head;
286 /* Offset of this variable, in bits, from the base variable */
287 unsigned HOST_WIDE_INT offset;
289 /* Size of the variable, in bits. */
290 unsigned HOST_WIDE_INT size;
292 /* Full size of the base variable, in bits. */
293 unsigned HOST_WIDE_INT fullsize;
295 /* Name of this variable */
296 const char *name;
298 /* Tree that this variable is associated with. */
299 tree decl;
301 /* Points-to set for this variable. */
302 bitmap solution;
304 /* Old points-to set for this variable. */
305 bitmap oldsolution;
307 typedef struct variable_info *varinfo_t;
309 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
310 static varinfo_t first_or_preceding_vi_for_offset (varinfo_t,
311 unsigned HOST_WIDE_INT);
312 static varinfo_t lookup_vi_for_tree (tree);
313 static inline bool type_can_have_subvars (const_tree);
314 static void make_param_constraints (varinfo_t);
316 /* Pool of variable info structures. */
317 static object_allocator<variable_info> variable_info_pool
318 ("Variable info pool");
320 /* Map varinfo to final pt_solution. */
321 static hash_map<varinfo_t, pt_solution *> *final_solutions;
322 struct obstack final_solutions_obstack;
324 /* Table of variable info structures for constraint variables.
325 Indexed directly by variable info id. */
326 static vec<varinfo_t> varmap;
328 /* Return the varmap element N */
330 static inline varinfo_t
331 get_varinfo (unsigned int n)
333 return varmap[n];
336 /* Return the next variable in the list of sub-variables of VI
337 or NULL if VI is the last sub-variable. */
339 static inline varinfo_t
340 vi_next (varinfo_t vi)
342 return get_varinfo (vi->next);
345 /* Static IDs for the special variables. Variable ID zero is unused
346 and used as terminator for the sub-variable chain. */
347 enum { nothing_id = 1, anything_id = 2, string_id = 3,
348 escaped_id = 4, nonlocal_id = 5,
349 storedanything_id = 6, integer_id = 7 };
351 /* Return a new variable info structure consisting for a variable
352 named NAME, and using constraint graph node NODE. Append it
353 to the vector of variable info structures. */
355 static varinfo_t
356 new_var_info (tree t, const char *name, bool add_id)
358 unsigned index = varmap.length ();
359 varinfo_t ret = variable_info_pool.allocate ();
361 if (dump_file && add_id)
363 char *tempname = xasprintf ("%s(%d)", name, index);
364 name = ggc_strdup (tempname);
365 free (tempname);
368 ret->id = index;
369 ret->name = name;
370 ret->decl = t;
371 /* Vars without decl are artificial and do not have sub-variables. */
372 ret->is_artificial_var = (t == NULL_TREE);
373 ret->is_special_var = false;
374 ret->is_unknown_size_var = false;
375 ret->is_full_var = (t == NULL_TREE);
376 ret->is_heap_var = false;
377 ret->may_have_pointers = true;
378 ret->only_restrict_pointers = false;
379 ret->is_restrict_var = false;
380 ret->ruid = 0;
381 ret->is_global_var = (t == NULL_TREE);
382 ret->is_ipa_escape_point = false;
383 ret->is_fn_info = false;
384 if (t && DECL_P (t))
385 ret->is_global_var = (is_global_var (t)
386 /* We have to treat even local register variables
387 as escape points. */
388 || (TREE_CODE (t) == VAR_DECL
389 && DECL_HARD_REGISTER (t)));
390 ret->solution = BITMAP_ALLOC (&pta_obstack);
391 ret->oldsolution = NULL;
392 ret->next = 0;
393 ret->head = ret->id;
395 stats.total_vars++;
397 varmap.safe_push (ret);
399 return ret;
402 /* A map mapping call statements to per-stmt variables for uses
403 and clobbers specific to the call. */
404 static hash_map<gimple *, varinfo_t> *call_stmt_vars;
406 /* Lookup or create the variable for the call statement CALL. */
408 static varinfo_t
409 get_call_vi (gcall *call)
411 varinfo_t vi, vi2;
413 bool existed;
414 varinfo_t *slot_p = &call_stmt_vars->get_or_insert (call, &existed);
415 if (existed)
416 return *slot_p;
418 vi = new_var_info (NULL_TREE, "CALLUSED", true);
419 vi->offset = 0;
420 vi->size = 1;
421 vi->fullsize = 2;
422 vi->is_full_var = true;
424 vi2 = new_var_info (NULL_TREE, "CALLCLOBBERED", true);
425 vi2->offset = 1;
426 vi2->size = 1;
427 vi2->fullsize = 2;
428 vi2->is_full_var = true;
430 vi->next = vi2->id;
432 *slot_p = vi;
433 return vi;
436 /* Lookup the variable for the call statement CALL representing
437 the uses. Returns NULL if there is nothing special about this call. */
439 static varinfo_t
440 lookup_call_use_vi (gcall *call)
442 varinfo_t *slot_p = call_stmt_vars->get (call);
443 if (slot_p)
444 return *slot_p;
446 return NULL;
449 /* Lookup the variable for the call statement CALL representing
450 the clobbers. Returns NULL if there is nothing special about this call. */
452 static varinfo_t
453 lookup_call_clobber_vi (gcall *call)
455 varinfo_t uses = lookup_call_use_vi (call);
456 if (!uses)
457 return NULL;
459 return vi_next (uses);
462 /* Lookup or create the variable for the call statement CALL representing
463 the uses. */
465 static varinfo_t
466 get_call_use_vi (gcall *call)
468 return get_call_vi (call);
471 /* Lookup or create the variable for the call statement CALL representing
472 the clobbers. */
474 static varinfo_t ATTRIBUTE_UNUSED
475 get_call_clobber_vi (gcall *call)
477 return vi_next (get_call_vi (call));
481 enum constraint_expr_type {SCALAR, DEREF, ADDRESSOF};
483 /* An expression that appears in a constraint. */
485 struct constraint_expr
487 /* Constraint type. */
488 constraint_expr_type type;
490 /* Variable we are referring to in the constraint. */
491 unsigned int var;
493 /* Offset, in bits, of this constraint from the beginning of
494 variables it ends up referring to.
496 IOW, in a deref constraint, we would deref, get the result set,
497 then add OFFSET to each member. */
498 HOST_WIDE_INT offset;
501 /* Use 0x8000... as special unknown offset. */
502 #define UNKNOWN_OFFSET HOST_WIDE_INT_MIN
504 typedef struct constraint_expr ce_s;
505 static void get_constraint_for_1 (tree, vec<ce_s> *, bool, bool);
506 static void get_constraint_for (tree, vec<ce_s> *);
507 static void get_constraint_for_rhs (tree, vec<ce_s> *);
508 static void do_deref (vec<ce_s> *);
510 /* Our set constraints are made up of two constraint expressions, one
511 LHS, and one RHS.
513 As described in the introduction, our set constraints each represent an
514 operation between set valued variables.
516 struct constraint
518 struct constraint_expr lhs;
519 struct constraint_expr rhs;
522 /* List of constraints that we use to build the constraint graph from. */
524 static vec<constraint_t> constraints;
525 static object_allocator<constraint> constraint_pool ("Constraint pool");
527 /* The constraint graph is represented as an array of bitmaps
528 containing successor nodes. */
530 struct constraint_graph
532 /* Size of this graph, which may be different than the number of
533 nodes in the variable map. */
534 unsigned int size;
536 /* Explicit successors of each node. */
537 bitmap *succs;
539 /* Implicit predecessors of each node (Used for variable
540 substitution). */
541 bitmap *implicit_preds;
543 /* Explicit predecessors of each node (Used for variable substitution). */
544 bitmap *preds;
546 /* Indirect cycle representatives, or -1 if the node has no indirect
547 cycles. */
548 int *indirect_cycles;
550 /* Representative node for a node. rep[a] == a unless the node has
551 been unified. */
552 unsigned int *rep;
554 /* Equivalence class representative for a label. This is used for
555 variable substitution. */
556 int *eq_rep;
558 /* Pointer equivalence label for a node. All nodes with the same
559 pointer equivalence label can be unified together at some point
560 (either during constraint optimization or after the constraint
561 graph is built). */
562 unsigned int *pe;
564 /* Pointer equivalence representative for a label. This is used to
565 handle nodes that are pointer equivalent but not location
566 equivalent. We can unite these once the addressof constraints
567 are transformed into initial points-to sets. */
568 int *pe_rep;
570 /* Pointer equivalence label for each node, used during variable
571 substitution. */
572 unsigned int *pointer_label;
574 /* Location equivalence label for each node, used during location
575 equivalence finding. */
576 unsigned int *loc_label;
578 /* Pointed-by set for each node, used during location equivalence
579 finding. This is pointed-by rather than pointed-to, because it
580 is constructed using the predecessor graph. */
581 bitmap *pointed_by;
583 /* Points to sets for pointer equivalence. This is *not* the actual
584 points-to sets for nodes. */
585 bitmap *points_to;
587 /* Bitmap of nodes where the bit is set if the node is a direct
588 node. Used for variable substitution. */
589 sbitmap direct_nodes;
591 /* Bitmap of nodes where the bit is set if the node is address
592 taken. Used for variable substitution. */
593 bitmap address_taken;
595 /* Vector of complex constraints for each graph node. Complex
596 constraints are those involving dereferences or offsets that are
597 not 0. */
598 vec<constraint_t> *complex;
601 static constraint_graph_t graph;
603 /* During variable substitution and the offline version of indirect
604 cycle finding, we create nodes to represent dereferences and
605 address taken constraints. These represent where these start and
606 end. */
607 #define FIRST_REF_NODE (varmap).length ()
608 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
610 /* Return the representative node for NODE, if NODE has been unioned
611 with another NODE.
612 This function performs path compression along the way to finding
613 the representative. */
615 static unsigned int
616 find (unsigned int node)
618 gcc_checking_assert (node < graph->size);
619 if (graph->rep[node] != node)
620 return graph->rep[node] = find (graph->rep[node]);
621 return node;
624 /* Union the TO and FROM nodes to the TO nodes.
625 Note that at some point in the future, we may want to do
626 union-by-rank, in which case we are going to have to return the
627 node we unified to. */
629 static bool
630 unite (unsigned int to, unsigned int from)
632 gcc_checking_assert (to < graph->size && from < graph->size);
633 if (to != from && graph->rep[from] != to)
635 graph->rep[from] = to;
636 return true;
638 return false;
641 /* Create a new constraint consisting of LHS and RHS expressions. */
643 static constraint_t
644 new_constraint (const struct constraint_expr lhs,
645 const struct constraint_expr rhs)
647 constraint_t ret = constraint_pool.allocate ();
648 ret->lhs = lhs;
649 ret->rhs = rhs;
650 return ret;
653 /* Print out constraint C to FILE. */
655 static void
656 dump_constraint (FILE *file, constraint_t c)
658 if (c->lhs.type == ADDRESSOF)
659 fprintf (file, "&");
660 else if (c->lhs.type == DEREF)
661 fprintf (file, "*");
662 fprintf (file, "%s", get_varinfo (c->lhs.var)->name);
663 if (c->lhs.offset == UNKNOWN_OFFSET)
664 fprintf (file, " + UNKNOWN");
665 else if (c->lhs.offset != 0)
666 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
667 fprintf (file, " = ");
668 if (c->rhs.type == ADDRESSOF)
669 fprintf (file, "&");
670 else if (c->rhs.type == DEREF)
671 fprintf (file, "*");
672 fprintf (file, "%s", get_varinfo (c->rhs.var)->name);
673 if (c->rhs.offset == UNKNOWN_OFFSET)
674 fprintf (file, " + UNKNOWN");
675 else if (c->rhs.offset != 0)
676 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
680 void debug_constraint (constraint_t);
681 void debug_constraints (void);
682 void debug_constraint_graph (void);
683 void debug_solution_for_var (unsigned int);
684 void debug_sa_points_to_info (void);
685 void debug_varinfo (varinfo_t);
686 void debug_varmap (void);
688 /* Print out constraint C to stderr. */
690 DEBUG_FUNCTION void
691 debug_constraint (constraint_t c)
693 dump_constraint (stderr, c);
694 fprintf (stderr, "\n");
697 /* Print out all constraints to FILE */
699 static void
700 dump_constraints (FILE *file, int from)
702 int i;
703 constraint_t c;
704 for (i = from; constraints.iterate (i, &c); i++)
705 if (c)
707 dump_constraint (file, c);
708 fprintf (file, "\n");
712 /* Print out all constraints to stderr. */
714 DEBUG_FUNCTION void
715 debug_constraints (void)
717 dump_constraints (stderr, 0);
720 /* Print the constraint graph in dot format. */
722 static void
723 dump_constraint_graph (FILE *file)
725 unsigned int i;
727 /* Only print the graph if it has already been initialized: */
728 if (!graph)
729 return;
731 /* Prints the header of the dot file: */
732 fprintf (file, "strict digraph {\n");
733 fprintf (file, " node [\n shape = box\n ]\n");
734 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
735 fprintf (file, "\n // List of nodes and complex constraints in "
736 "the constraint graph:\n");
738 /* The next lines print the nodes in the graph together with the
739 complex constraints attached to them. */
740 for (i = 1; i < graph->size; i++)
742 if (i == FIRST_REF_NODE)
743 continue;
744 if (find (i) != i)
745 continue;
746 if (i < FIRST_REF_NODE)
747 fprintf (file, "\"%s\"", get_varinfo (i)->name);
748 else
749 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
750 if (graph->complex[i].exists ())
752 unsigned j;
753 constraint_t c;
754 fprintf (file, " [label=\"\\N\\n");
755 for (j = 0; graph->complex[i].iterate (j, &c); ++j)
757 dump_constraint (file, c);
758 fprintf (file, "\\l");
760 fprintf (file, "\"]");
762 fprintf (file, ";\n");
765 /* Go over the edges. */
766 fprintf (file, "\n // Edges in the constraint graph:\n");
767 for (i = 1; i < graph->size; i++)
769 unsigned j;
770 bitmap_iterator bi;
771 if (find (i) != i)
772 continue;
773 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
775 unsigned to = find (j);
776 if (i == to)
777 continue;
778 if (i < FIRST_REF_NODE)
779 fprintf (file, "\"%s\"", get_varinfo (i)->name);
780 else
781 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
782 fprintf (file, " -> ");
783 if (to < FIRST_REF_NODE)
784 fprintf (file, "\"%s\"", get_varinfo (to)->name);
785 else
786 fprintf (file, "\"*%s\"", get_varinfo (to - FIRST_REF_NODE)->name);
787 fprintf (file, ";\n");
791 /* Prints the tail of the dot file. */
792 fprintf (file, "}\n");
795 /* Print out the constraint graph to stderr. */
797 DEBUG_FUNCTION void
798 debug_constraint_graph (void)
800 dump_constraint_graph (stderr);
803 /* SOLVER FUNCTIONS
805 The solver is a simple worklist solver, that works on the following
806 algorithm:
808 sbitmap changed_nodes = all zeroes;
809 changed_count = 0;
810 For each node that is not already collapsed:
811 changed_count++;
812 set bit in changed nodes
814 while (changed_count > 0)
816 compute topological ordering for constraint graph
818 find and collapse cycles in the constraint graph (updating
819 changed if necessary)
821 for each node (n) in the graph in topological order:
822 changed_count--;
824 Process each complex constraint associated with the node,
825 updating changed if necessary.
827 For each outgoing edge from n, propagate the solution from n to
828 the destination of the edge, updating changed as necessary.
830 } */
832 /* Return true if two constraint expressions A and B are equal. */
834 static bool
835 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
837 return a.type == b.type && a.var == b.var && a.offset == b.offset;
840 /* Return true if constraint expression A is less than constraint expression
841 B. This is just arbitrary, but consistent, in order to give them an
842 ordering. */
844 static bool
845 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
847 if (a.type == b.type)
849 if (a.var == b.var)
850 return a.offset < b.offset;
851 else
852 return a.var < b.var;
854 else
855 return a.type < b.type;
858 /* Return true if constraint A is less than constraint B. This is just
859 arbitrary, but consistent, in order to give them an ordering. */
861 static bool
862 constraint_less (const constraint_t &a, const constraint_t &b)
864 if (constraint_expr_less (a->lhs, b->lhs))
865 return true;
866 else if (constraint_expr_less (b->lhs, a->lhs))
867 return false;
868 else
869 return constraint_expr_less (a->rhs, b->rhs);
872 /* Return true if two constraints A and B are equal. */
874 static bool
875 constraint_equal (struct constraint a, struct constraint b)
877 return constraint_expr_equal (a.lhs, b.lhs)
878 && constraint_expr_equal (a.rhs, b.rhs);
882 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
884 static constraint_t
885 constraint_vec_find (vec<constraint_t> vec,
886 struct constraint lookfor)
888 unsigned int place;
889 constraint_t found;
891 if (!vec.exists ())
892 return NULL;
894 place = vec.lower_bound (&lookfor, constraint_less);
895 if (place >= vec.length ())
896 return NULL;
897 found = vec[place];
898 if (!constraint_equal (*found, lookfor))
899 return NULL;
900 return found;
903 /* Union two constraint vectors, TO and FROM. Put the result in TO.
904 Returns true of TO set is changed. */
906 static bool
907 constraint_set_union (vec<constraint_t> *to,
908 vec<constraint_t> *from)
910 int i;
911 constraint_t c;
912 bool any_change = false;
914 FOR_EACH_VEC_ELT (*from, i, c)
916 if (constraint_vec_find (*to, *c) == NULL)
918 unsigned int place = to->lower_bound (c, constraint_less);
919 to->safe_insert (place, c);
920 any_change = true;
923 return any_change;
926 /* Expands the solution in SET to all sub-fields of variables included. */
928 static bitmap
929 solution_set_expand (bitmap set, bitmap *expanded)
931 bitmap_iterator bi;
932 unsigned j;
934 if (*expanded)
935 return *expanded;
937 *expanded = BITMAP_ALLOC (&iteration_obstack);
939 /* In a first pass expand to the head of the variables we need to
940 add all sub-fields off. This avoids quadratic behavior. */
941 EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi)
943 varinfo_t v = get_varinfo (j);
944 if (v->is_artificial_var
945 || v->is_full_var)
946 continue;
947 bitmap_set_bit (*expanded, v->head);
950 /* In the second pass now expand all head variables with subfields. */
951 EXECUTE_IF_SET_IN_BITMAP (*expanded, 0, j, bi)
953 varinfo_t v = get_varinfo (j);
954 if (v->head != j)
955 continue;
956 for (v = vi_next (v); v != NULL; v = vi_next (v))
957 bitmap_set_bit (*expanded, v->id);
960 /* And finally set the rest of the bits from SET. */
961 bitmap_ior_into (*expanded, set);
963 return *expanded;
966 /* Union solution sets TO and DELTA, and add INC to each member of DELTA in the
967 process. */
969 static bool
970 set_union_with_increment (bitmap to, bitmap delta, HOST_WIDE_INT inc,
971 bitmap *expanded_delta)
973 bool changed = false;
974 bitmap_iterator bi;
975 unsigned int i;
977 /* If the solution of DELTA contains anything it is good enough to transfer
978 this to TO. */
979 if (bitmap_bit_p (delta, anything_id))
980 return bitmap_set_bit (to, anything_id);
982 /* If the offset is unknown we have to expand the solution to
983 all subfields. */
984 if (inc == UNKNOWN_OFFSET)
986 delta = solution_set_expand (delta, expanded_delta);
987 changed |= bitmap_ior_into (to, delta);
988 return changed;
991 /* For non-zero offset union the offsetted solution into the destination. */
992 EXECUTE_IF_SET_IN_BITMAP (delta, 0, i, bi)
994 varinfo_t vi = get_varinfo (i);
996 /* If this is a variable with just one field just set its bit
997 in the result. */
998 if (vi->is_artificial_var
999 || vi->is_unknown_size_var
1000 || vi->is_full_var)
1001 changed |= bitmap_set_bit (to, i);
1002 else
1004 HOST_WIDE_INT fieldoffset = vi->offset + inc;
1005 unsigned HOST_WIDE_INT size = vi->size;
1007 /* If the offset makes the pointer point to before the
1008 variable use offset zero for the field lookup. */
1009 if (fieldoffset < 0)
1010 vi = get_varinfo (vi->head);
1011 else
1012 vi = first_or_preceding_vi_for_offset (vi, fieldoffset);
1016 changed |= bitmap_set_bit (to, vi->id);
1017 if (vi->is_full_var
1018 || vi->next == 0)
1019 break;
1021 /* We have to include all fields that overlap the current field
1022 shifted by inc. */
1023 vi = vi_next (vi);
1025 while (vi->offset < fieldoffset + size);
1029 return changed;
1032 /* Insert constraint C into the list of complex constraints for graph
1033 node VAR. */
1035 static void
1036 insert_into_complex (constraint_graph_t graph,
1037 unsigned int var, constraint_t c)
1039 vec<constraint_t> complex = graph->complex[var];
1040 unsigned int place = complex.lower_bound (c, constraint_less);
1042 /* Only insert constraints that do not already exist. */
1043 if (place >= complex.length ()
1044 || !constraint_equal (*c, *complex[place]))
1045 graph->complex[var].safe_insert (place, c);
1049 /* Condense two variable nodes into a single variable node, by moving
1050 all associated info from FROM to TO. Returns true if TO node's
1051 constraint set changes after the merge. */
1053 static bool
1054 merge_node_constraints (constraint_graph_t graph, unsigned int to,
1055 unsigned int from)
1057 unsigned int i;
1058 constraint_t c;
1059 bool any_change = false;
1061 gcc_checking_assert (find (from) == to);
1063 /* Move all complex constraints from src node into to node */
1064 FOR_EACH_VEC_ELT (graph->complex[from], i, c)
1066 /* In complex constraints for node FROM, we may have either
1067 a = *FROM, and *FROM = a, or an offseted constraint which are
1068 always added to the rhs node's constraints. */
1070 if (c->rhs.type == DEREF)
1071 c->rhs.var = to;
1072 else if (c->lhs.type == DEREF)
1073 c->lhs.var = to;
1074 else
1075 c->rhs.var = to;
1078 any_change = constraint_set_union (&graph->complex[to],
1079 &graph->complex[from]);
1080 graph->complex[from].release ();
1081 return any_change;
1085 /* Remove edges involving NODE from GRAPH. */
1087 static void
1088 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
1090 if (graph->succs[node])
1091 BITMAP_FREE (graph->succs[node]);
1094 /* Merge GRAPH nodes FROM and TO into node TO. */
1096 static void
1097 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
1098 unsigned int from)
1100 if (graph->indirect_cycles[from] != -1)
1102 /* If we have indirect cycles with the from node, and we have
1103 none on the to node, the to node has indirect cycles from the
1104 from node now that they are unified.
1105 If indirect cycles exist on both, unify the nodes that they
1106 are in a cycle with, since we know they are in a cycle with
1107 each other. */
1108 if (graph->indirect_cycles[to] == -1)
1109 graph->indirect_cycles[to] = graph->indirect_cycles[from];
1112 /* Merge all the successor edges. */
1113 if (graph->succs[from])
1115 if (!graph->succs[to])
1116 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
1117 bitmap_ior_into (graph->succs[to],
1118 graph->succs[from]);
1121 clear_edges_for_node (graph, from);
1125 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
1126 it doesn't exist in the graph already. */
1128 static void
1129 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
1130 unsigned int from)
1132 if (to == from)
1133 return;
1135 if (!graph->implicit_preds[to])
1136 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1138 if (bitmap_set_bit (graph->implicit_preds[to], from))
1139 stats.num_implicit_edges++;
1142 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
1143 it doesn't exist in the graph already.
1144 Return false if the edge already existed, true otherwise. */
1146 static void
1147 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
1148 unsigned int from)
1150 if (!graph->preds[to])
1151 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1152 bitmap_set_bit (graph->preds[to], from);
1155 /* Add a graph edge to GRAPH, going from FROM to TO if
1156 it doesn't exist in the graph already.
1157 Return false if the edge already existed, true otherwise. */
1159 static bool
1160 add_graph_edge (constraint_graph_t graph, unsigned int to,
1161 unsigned int from)
1163 if (to == from)
1165 return false;
1167 else
1169 bool r = false;
1171 if (!graph->succs[from])
1172 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
1173 if (bitmap_set_bit (graph->succs[from], to))
1175 r = true;
1176 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
1177 stats.num_edges++;
1179 return r;
1184 /* Initialize the constraint graph structure to contain SIZE nodes. */
1186 static void
1187 init_graph (unsigned int size)
1189 unsigned int j;
1191 graph = XCNEW (struct constraint_graph);
1192 graph->size = size;
1193 graph->succs = XCNEWVEC (bitmap, graph->size);
1194 graph->indirect_cycles = XNEWVEC (int, graph->size);
1195 graph->rep = XNEWVEC (unsigned int, graph->size);
1196 /* ??? Macros do not support template types with multiple arguments,
1197 so we use a typedef to work around it. */
1198 typedef vec<constraint_t> vec_constraint_t_heap;
1199 graph->complex = XCNEWVEC (vec_constraint_t_heap, size);
1200 graph->pe = XCNEWVEC (unsigned int, graph->size);
1201 graph->pe_rep = XNEWVEC (int, graph->size);
1203 for (j = 0; j < graph->size; j++)
1205 graph->rep[j] = j;
1206 graph->pe_rep[j] = -1;
1207 graph->indirect_cycles[j] = -1;
1211 /* Build the constraint graph, adding only predecessor edges right now. */
1213 static void
1214 build_pred_graph (void)
1216 int i;
1217 constraint_t c;
1218 unsigned int j;
1220 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
1221 graph->preds = XCNEWVEC (bitmap, graph->size);
1222 graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
1223 graph->loc_label = XCNEWVEC (unsigned int, graph->size);
1224 graph->pointed_by = XCNEWVEC (bitmap, graph->size);
1225 graph->points_to = XCNEWVEC (bitmap, graph->size);
1226 graph->eq_rep = XNEWVEC (int, graph->size);
1227 graph->direct_nodes = sbitmap_alloc (graph->size);
1228 graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1229 bitmap_clear (graph->direct_nodes);
1231 for (j = 1; j < FIRST_REF_NODE; j++)
1233 if (!get_varinfo (j)->is_special_var)
1234 bitmap_set_bit (graph->direct_nodes, j);
1237 for (j = 0; j < graph->size; j++)
1238 graph->eq_rep[j] = -1;
1240 for (j = 0; j < varmap.length (); j++)
1241 graph->indirect_cycles[j] = -1;
1243 FOR_EACH_VEC_ELT (constraints, i, c)
1245 struct constraint_expr lhs = c->lhs;
1246 struct constraint_expr rhs = c->rhs;
1247 unsigned int lhsvar = lhs.var;
1248 unsigned int rhsvar = rhs.var;
1250 if (lhs.type == DEREF)
1252 /* *x = y. */
1253 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1254 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1256 else if (rhs.type == DEREF)
1258 /* x = *y */
1259 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1260 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1261 else
1262 bitmap_clear_bit (graph->direct_nodes, lhsvar);
1264 else if (rhs.type == ADDRESSOF)
1266 varinfo_t v;
1268 /* x = &y */
1269 if (graph->points_to[lhsvar] == NULL)
1270 graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1271 bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1273 if (graph->pointed_by[rhsvar] == NULL)
1274 graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1275 bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1277 /* Implicitly, *x = y */
1278 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1280 /* All related variables are no longer direct nodes. */
1281 bitmap_clear_bit (graph->direct_nodes, rhsvar);
1282 v = get_varinfo (rhsvar);
1283 if (!v->is_full_var)
1285 v = get_varinfo (v->head);
1288 bitmap_clear_bit (graph->direct_nodes, v->id);
1289 v = vi_next (v);
1291 while (v != NULL);
1293 bitmap_set_bit (graph->address_taken, rhsvar);
1295 else if (lhsvar > anything_id
1296 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1298 /* x = y */
1299 add_pred_graph_edge (graph, lhsvar, rhsvar);
1300 /* Implicitly, *x = *y */
1301 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1302 FIRST_REF_NODE + rhsvar);
1304 else if (lhs.offset != 0 || rhs.offset != 0)
1306 if (rhs.offset != 0)
1307 bitmap_clear_bit (graph->direct_nodes, lhs.var);
1308 else if (lhs.offset != 0)
1309 bitmap_clear_bit (graph->direct_nodes, rhs.var);
1314 /* Build the constraint graph, adding successor edges. */
1316 static void
1317 build_succ_graph (void)
1319 unsigned i, t;
1320 constraint_t c;
1322 FOR_EACH_VEC_ELT (constraints, i, c)
1324 struct constraint_expr lhs;
1325 struct constraint_expr rhs;
1326 unsigned int lhsvar;
1327 unsigned int rhsvar;
1329 if (!c)
1330 continue;
1332 lhs = c->lhs;
1333 rhs = c->rhs;
1334 lhsvar = find (lhs.var);
1335 rhsvar = find (rhs.var);
1337 if (lhs.type == DEREF)
1339 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1340 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1342 else if (rhs.type == DEREF)
1344 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1345 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1347 else if (rhs.type == ADDRESSOF)
1349 /* x = &y */
1350 gcc_checking_assert (find (rhs.var) == rhs.var);
1351 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1353 else if (lhsvar > anything_id
1354 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1356 add_graph_edge (graph, lhsvar, rhsvar);
1360 /* Add edges from STOREDANYTHING to all non-direct nodes that can
1361 receive pointers. */
1362 t = find (storedanything_id);
1363 for (i = integer_id + 1; i < FIRST_REF_NODE; ++i)
1365 if (!bitmap_bit_p (graph->direct_nodes, i)
1366 && get_varinfo (i)->may_have_pointers)
1367 add_graph_edge (graph, find (i), t);
1370 /* Everything stored to ANYTHING also potentially escapes. */
1371 add_graph_edge (graph, find (escaped_id), t);
1375 /* Changed variables on the last iteration. */
1376 static bitmap changed;
1378 /* Strongly Connected Component visitation info. */
1380 struct scc_info
1382 sbitmap visited;
1383 sbitmap deleted;
1384 unsigned int *dfs;
1385 unsigned int *node_mapping;
1386 int current_index;
1387 vec<unsigned> scc_stack;
1391 /* Recursive routine to find strongly connected components in GRAPH.
1392 SI is the SCC info to store the information in, and N is the id of current
1393 graph node we are processing.
1395 This is Tarjan's strongly connected component finding algorithm, as
1396 modified by Nuutila to keep only non-root nodes on the stack.
1397 The algorithm can be found in "On finding the strongly connected
1398 connected components in a directed graph" by Esko Nuutila and Eljas
1399 Soisalon-Soininen, in Information Processing Letters volume 49,
1400 number 1, pages 9-14. */
1402 static void
1403 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1405 unsigned int i;
1406 bitmap_iterator bi;
1407 unsigned int my_dfs;
1409 bitmap_set_bit (si->visited, n);
1410 si->dfs[n] = si->current_index ++;
1411 my_dfs = si->dfs[n];
1413 /* Visit all the successors. */
1414 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1416 unsigned int w;
1418 if (i > LAST_REF_NODE)
1419 break;
1421 w = find (i);
1422 if (bitmap_bit_p (si->deleted, w))
1423 continue;
1425 if (!bitmap_bit_p (si->visited, w))
1426 scc_visit (graph, si, w);
1428 unsigned int t = find (w);
1429 gcc_checking_assert (find (n) == n);
1430 if (si->dfs[t] < si->dfs[n])
1431 si->dfs[n] = si->dfs[t];
1434 /* See if any components have been identified. */
1435 if (si->dfs[n] == my_dfs)
1437 if (si->scc_stack.length () > 0
1438 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1440 bitmap scc = BITMAP_ALLOC (NULL);
1441 unsigned int lowest_node;
1442 bitmap_iterator bi;
1444 bitmap_set_bit (scc, n);
1446 while (si->scc_stack.length () != 0
1447 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1449 unsigned int w = si->scc_stack.pop ();
1451 bitmap_set_bit (scc, w);
1454 lowest_node = bitmap_first_set_bit (scc);
1455 gcc_assert (lowest_node < FIRST_REF_NODE);
1457 /* Collapse the SCC nodes into a single node, and mark the
1458 indirect cycles. */
1459 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1461 if (i < FIRST_REF_NODE)
1463 if (unite (lowest_node, i))
1464 unify_nodes (graph, lowest_node, i, false);
1466 else
1468 unite (lowest_node, i);
1469 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1473 bitmap_set_bit (si->deleted, n);
1475 else
1476 si->scc_stack.safe_push (n);
1479 /* Unify node FROM into node TO, updating the changed count if
1480 necessary when UPDATE_CHANGED is true. */
1482 static void
1483 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1484 bool update_changed)
1486 gcc_checking_assert (to != from && find (to) == to);
1488 if (dump_file && (dump_flags & TDF_DETAILS))
1489 fprintf (dump_file, "Unifying %s to %s\n",
1490 get_varinfo (from)->name,
1491 get_varinfo (to)->name);
1493 if (update_changed)
1494 stats.unified_vars_dynamic++;
1495 else
1496 stats.unified_vars_static++;
1498 merge_graph_nodes (graph, to, from);
1499 if (merge_node_constraints (graph, to, from))
1501 if (update_changed)
1502 bitmap_set_bit (changed, to);
1505 /* Mark TO as changed if FROM was changed. If TO was already marked
1506 as changed, decrease the changed count. */
1508 if (update_changed
1509 && bitmap_clear_bit (changed, from))
1510 bitmap_set_bit (changed, to);
1511 varinfo_t fromvi = get_varinfo (from);
1512 if (fromvi->solution)
1514 /* If the solution changes because of the merging, we need to mark
1515 the variable as changed. */
1516 varinfo_t tovi = get_varinfo (to);
1517 if (bitmap_ior_into (tovi->solution, fromvi->solution))
1519 if (update_changed)
1520 bitmap_set_bit (changed, to);
1523 BITMAP_FREE (fromvi->solution);
1524 if (fromvi->oldsolution)
1525 BITMAP_FREE (fromvi->oldsolution);
1527 if (stats.iterations > 0
1528 && tovi->oldsolution)
1529 BITMAP_FREE (tovi->oldsolution);
1531 if (graph->succs[to])
1532 bitmap_clear_bit (graph->succs[to], to);
1535 /* Information needed to compute the topological ordering of a graph. */
1537 struct topo_info
1539 /* sbitmap of visited nodes. */
1540 sbitmap visited;
1541 /* Array that stores the topological order of the graph, *in
1542 reverse*. */
1543 vec<unsigned> topo_order;
1547 /* Initialize and return a topological info structure. */
1549 static struct topo_info *
1550 init_topo_info (void)
1552 size_t size = graph->size;
1553 struct topo_info *ti = XNEW (struct topo_info);
1554 ti->visited = sbitmap_alloc (size);
1555 bitmap_clear (ti->visited);
1556 ti->topo_order.create (1);
1557 return ti;
1561 /* Free the topological sort info pointed to by TI. */
1563 static void
1564 free_topo_info (struct topo_info *ti)
1566 sbitmap_free (ti->visited);
1567 ti->topo_order.release ();
1568 free (ti);
1571 /* Visit the graph in topological order, and store the order in the
1572 topo_info structure. */
1574 static void
1575 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1576 unsigned int n)
1578 bitmap_iterator bi;
1579 unsigned int j;
1581 bitmap_set_bit (ti->visited, n);
1583 if (graph->succs[n])
1584 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1586 if (!bitmap_bit_p (ti->visited, j))
1587 topo_visit (graph, ti, j);
1590 ti->topo_order.safe_push (n);
1593 /* Process a constraint C that represents x = *(y + off), using DELTA as the
1594 starting solution for y. */
1596 static void
1597 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1598 bitmap delta, bitmap *expanded_delta)
1600 unsigned int lhs = c->lhs.var;
1601 bool flag = false;
1602 bitmap sol = get_varinfo (lhs)->solution;
1603 unsigned int j;
1604 bitmap_iterator bi;
1605 HOST_WIDE_INT roffset = c->rhs.offset;
1607 /* Our IL does not allow this. */
1608 gcc_checking_assert (c->lhs.offset == 0);
1610 /* If the solution of Y contains anything it is good enough to transfer
1611 this to the LHS. */
1612 if (bitmap_bit_p (delta, anything_id))
1614 flag |= bitmap_set_bit (sol, anything_id);
1615 goto done;
1618 /* If we do not know at with offset the rhs is dereferenced compute
1619 the reachability set of DELTA, conservatively assuming it is
1620 dereferenced at all valid offsets. */
1621 if (roffset == UNKNOWN_OFFSET)
1623 delta = solution_set_expand (delta, expanded_delta);
1624 /* No further offset processing is necessary. */
1625 roffset = 0;
1628 /* For each variable j in delta (Sol(y)), add
1629 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1630 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1632 varinfo_t v = get_varinfo (j);
1633 HOST_WIDE_INT fieldoffset = v->offset + roffset;
1634 unsigned HOST_WIDE_INT size = v->size;
1635 unsigned int t;
1637 if (v->is_full_var)
1639 else if (roffset != 0)
1641 if (fieldoffset < 0)
1642 v = get_varinfo (v->head);
1643 else
1644 v = first_or_preceding_vi_for_offset (v, fieldoffset);
1647 /* We have to include all fields that overlap the current field
1648 shifted by roffset. */
1651 t = find (v->id);
1653 /* Adding edges from the special vars is pointless.
1654 They don't have sets that can change. */
1655 if (get_varinfo (t)->is_special_var)
1656 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1657 /* Merging the solution from ESCAPED needlessly increases
1658 the set. Use ESCAPED as representative instead. */
1659 else if (v->id == escaped_id)
1660 flag |= bitmap_set_bit (sol, escaped_id);
1661 else if (v->may_have_pointers
1662 && add_graph_edge (graph, lhs, t))
1663 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1665 if (v->is_full_var
1666 || v->next == 0)
1667 break;
1669 v = vi_next (v);
1671 while (v->offset < fieldoffset + size);
1674 done:
1675 /* If the LHS solution changed, mark the var as changed. */
1676 if (flag)
1678 get_varinfo (lhs)->solution = sol;
1679 bitmap_set_bit (changed, lhs);
1683 /* Process a constraint C that represents *(x + off) = y using DELTA
1684 as the starting solution for x. */
1686 static void
1687 do_ds_constraint (constraint_t c, bitmap delta, bitmap *expanded_delta)
1689 unsigned int rhs = c->rhs.var;
1690 bitmap sol = get_varinfo (rhs)->solution;
1691 unsigned int j;
1692 bitmap_iterator bi;
1693 HOST_WIDE_INT loff = c->lhs.offset;
1694 bool escaped_p = false;
1696 /* Our IL does not allow this. */
1697 gcc_checking_assert (c->rhs.offset == 0);
1699 /* If the solution of y contains ANYTHING simply use the ANYTHING
1700 solution. This avoids needlessly increasing the points-to sets. */
1701 if (bitmap_bit_p (sol, anything_id))
1702 sol = get_varinfo (find (anything_id))->solution;
1704 /* If the solution for x contains ANYTHING we have to merge the
1705 solution of y into all pointer variables which we do via
1706 STOREDANYTHING. */
1707 if (bitmap_bit_p (delta, anything_id))
1709 unsigned t = find (storedanything_id);
1710 if (add_graph_edge (graph, t, rhs))
1712 if (bitmap_ior_into (get_varinfo (t)->solution, sol))
1713 bitmap_set_bit (changed, t);
1715 return;
1718 /* If we do not know at with offset the rhs is dereferenced compute
1719 the reachability set of DELTA, conservatively assuming it is
1720 dereferenced at all valid offsets. */
1721 if (loff == UNKNOWN_OFFSET)
1723 delta = solution_set_expand (delta, expanded_delta);
1724 loff = 0;
1727 /* For each member j of delta (Sol(x)), add an edge from y to j and
1728 union Sol(y) into Sol(j) */
1729 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1731 varinfo_t v = get_varinfo (j);
1732 unsigned int t;
1733 HOST_WIDE_INT fieldoffset = v->offset + loff;
1734 unsigned HOST_WIDE_INT size = v->size;
1736 if (v->is_full_var)
1738 else if (loff != 0)
1740 if (fieldoffset < 0)
1741 v = get_varinfo (v->head);
1742 else
1743 v = first_or_preceding_vi_for_offset (v, fieldoffset);
1746 /* We have to include all fields that overlap the current field
1747 shifted by loff. */
1750 if (v->may_have_pointers)
1752 /* If v is a global variable then this is an escape point. */
1753 if (v->is_global_var
1754 && !escaped_p)
1756 t = find (escaped_id);
1757 if (add_graph_edge (graph, t, rhs)
1758 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1759 bitmap_set_bit (changed, t);
1760 /* Enough to let rhs escape once. */
1761 escaped_p = true;
1764 if (v->is_special_var)
1765 break;
1767 t = find (v->id);
1768 if (add_graph_edge (graph, t, rhs)
1769 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1770 bitmap_set_bit (changed, t);
1773 if (v->is_full_var
1774 || v->next == 0)
1775 break;
1777 v = vi_next (v);
1779 while (v->offset < fieldoffset + size);
1783 /* Handle a non-simple (simple meaning requires no iteration),
1784 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1786 static void
1787 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta,
1788 bitmap *expanded_delta)
1790 if (c->lhs.type == DEREF)
1792 if (c->rhs.type == ADDRESSOF)
1794 gcc_unreachable ();
1796 else
1798 /* *x = y */
1799 do_ds_constraint (c, delta, expanded_delta);
1802 else if (c->rhs.type == DEREF)
1804 /* x = *y */
1805 if (!(get_varinfo (c->lhs.var)->is_special_var))
1806 do_sd_constraint (graph, c, delta, expanded_delta);
1808 else
1810 bitmap tmp;
1811 bool flag = false;
1813 gcc_checking_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR
1814 && c->rhs.offset != 0 && c->lhs.offset == 0);
1815 tmp = get_varinfo (c->lhs.var)->solution;
1817 flag = set_union_with_increment (tmp, delta, c->rhs.offset,
1818 expanded_delta);
1820 if (flag)
1821 bitmap_set_bit (changed, c->lhs.var);
1825 /* Initialize and return a new SCC info structure. */
1827 static struct scc_info *
1828 init_scc_info (size_t size)
1830 struct scc_info *si = XNEW (struct scc_info);
1831 size_t i;
1833 si->current_index = 0;
1834 si->visited = sbitmap_alloc (size);
1835 bitmap_clear (si->visited);
1836 si->deleted = sbitmap_alloc (size);
1837 bitmap_clear (si->deleted);
1838 si->node_mapping = XNEWVEC (unsigned int, size);
1839 si->dfs = XCNEWVEC (unsigned int, size);
1841 for (i = 0; i < size; i++)
1842 si->node_mapping[i] = i;
1844 si->scc_stack.create (1);
1845 return si;
1848 /* Free an SCC info structure pointed to by SI */
1850 static void
1851 free_scc_info (struct scc_info *si)
1853 sbitmap_free (si->visited);
1854 sbitmap_free (si->deleted);
1855 free (si->node_mapping);
1856 free (si->dfs);
1857 si->scc_stack.release ();
1858 free (si);
1862 /* Find indirect cycles in GRAPH that occur, using strongly connected
1863 components, and note them in the indirect cycles map.
1865 This technique comes from Ben Hardekopf and Calvin Lin,
1866 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1867 Lines of Code", submitted to PLDI 2007. */
1869 static void
1870 find_indirect_cycles (constraint_graph_t graph)
1872 unsigned int i;
1873 unsigned int size = graph->size;
1874 struct scc_info *si = init_scc_info (size);
1876 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1877 if (!bitmap_bit_p (si->visited, i) && find (i) == i)
1878 scc_visit (graph, si, i);
1880 free_scc_info (si);
1883 /* Compute a topological ordering for GRAPH, and store the result in the
1884 topo_info structure TI. */
1886 static void
1887 compute_topo_order (constraint_graph_t graph,
1888 struct topo_info *ti)
1890 unsigned int i;
1891 unsigned int size = graph->size;
1893 for (i = 0; i != size; ++i)
1894 if (!bitmap_bit_p (ti->visited, i) && find (i) == i)
1895 topo_visit (graph, ti, i);
1898 /* Structure used to for hash value numbering of pointer equivalence
1899 classes. */
1901 typedef struct equiv_class_label
1903 hashval_t hashcode;
1904 unsigned int equivalence_class;
1905 bitmap labels;
1906 } *equiv_class_label_t;
1907 typedef const struct equiv_class_label *const_equiv_class_label_t;
1909 /* Equiv_class_label hashtable helpers. */
1911 struct equiv_class_hasher : free_ptr_hash <equiv_class_label>
1913 static inline hashval_t hash (const equiv_class_label *);
1914 static inline bool equal (const equiv_class_label *,
1915 const equiv_class_label *);
1918 /* Hash function for a equiv_class_label_t */
1920 inline hashval_t
1921 equiv_class_hasher::hash (const equiv_class_label *ecl)
1923 return ecl->hashcode;
1926 /* Equality function for two equiv_class_label_t's. */
1928 inline bool
1929 equiv_class_hasher::equal (const equiv_class_label *eql1,
1930 const equiv_class_label *eql2)
1932 return (eql1->hashcode == eql2->hashcode
1933 && bitmap_equal_p (eql1->labels, eql2->labels));
1936 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1937 classes. */
1938 static hash_table<equiv_class_hasher> *pointer_equiv_class_table;
1940 /* A hashtable for mapping a bitmap of labels->location equivalence
1941 classes. */
1942 static hash_table<equiv_class_hasher> *location_equiv_class_table;
1944 /* Lookup a equivalence class in TABLE by the bitmap of LABELS with
1945 hash HAS it contains. Sets *REF_LABELS to the bitmap LABELS
1946 is equivalent to. */
1948 static equiv_class_label *
1949 equiv_class_lookup_or_add (hash_table<equiv_class_hasher> *table,
1950 bitmap labels)
1952 equiv_class_label **slot;
1953 equiv_class_label ecl;
1955 ecl.labels = labels;
1956 ecl.hashcode = bitmap_hash (labels);
1957 slot = table->find_slot (&ecl, INSERT);
1958 if (!*slot)
1960 *slot = XNEW (struct equiv_class_label);
1961 (*slot)->labels = labels;
1962 (*slot)->hashcode = ecl.hashcode;
1963 (*slot)->equivalence_class = 0;
1966 return *slot;
1969 /* Perform offline variable substitution.
1971 This is a worst case quadratic time way of identifying variables
1972 that must have equivalent points-to sets, including those caused by
1973 static cycles, and single entry subgraphs, in the constraint graph.
1975 The technique is described in "Exploiting Pointer and Location
1976 Equivalence to Optimize Pointer Analysis. In the 14th International
1977 Static Analysis Symposium (SAS), August 2007." It is known as the
1978 "HU" algorithm, and is equivalent to value numbering the collapsed
1979 constraint graph including evaluating unions.
1981 The general method of finding equivalence classes is as follows:
1982 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1983 Initialize all non-REF nodes to be direct nodes.
1984 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1985 variable}
1986 For each constraint containing the dereference, we also do the same
1987 thing.
1989 We then compute SCC's in the graph and unify nodes in the same SCC,
1990 including pts sets.
1992 For each non-collapsed node x:
1993 Visit all unvisited explicit incoming edges.
1994 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
1995 where y->x.
1996 Lookup the equivalence class for pts(x).
1997 If we found one, equivalence_class(x) = found class.
1998 Otherwise, equivalence_class(x) = new class, and new_class is
1999 added to the lookup table.
2001 All direct nodes with the same equivalence class can be replaced
2002 with a single representative node.
2003 All unlabeled nodes (label == 0) are not pointers and all edges
2004 involving them can be eliminated.
2005 We perform these optimizations during rewrite_constraints
2007 In addition to pointer equivalence class finding, we also perform
2008 location equivalence class finding. This is the set of variables
2009 that always appear together in points-to sets. We use this to
2010 compress the size of the points-to sets. */
2012 /* Current maximum pointer equivalence class id. */
2013 static int pointer_equiv_class;
2015 /* Current maximum location equivalence class id. */
2016 static int location_equiv_class;
2018 /* Recursive routine to find strongly connected components in GRAPH,
2019 and label it's nodes with DFS numbers. */
2021 static void
2022 condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2024 unsigned int i;
2025 bitmap_iterator bi;
2026 unsigned int my_dfs;
2028 gcc_checking_assert (si->node_mapping[n] == n);
2029 bitmap_set_bit (si->visited, n);
2030 si->dfs[n] = si->current_index ++;
2031 my_dfs = si->dfs[n];
2033 /* Visit all the successors. */
2034 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2036 unsigned int w = si->node_mapping[i];
2038 if (bitmap_bit_p (si->deleted, w))
2039 continue;
2041 if (!bitmap_bit_p (si->visited, w))
2042 condense_visit (graph, si, w);
2044 unsigned int t = si->node_mapping[w];
2045 gcc_checking_assert (si->node_mapping[n] == n);
2046 if (si->dfs[t] < si->dfs[n])
2047 si->dfs[n] = si->dfs[t];
2050 /* Visit all the implicit predecessors. */
2051 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
2053 unsigned int w = si->node_mapping[i];
2055 if (bitmap_bit_p (si->deleted, w))
2056 continue;
2058 if (!bitmap_bit_p (si->visited, w))
2059 condense_visit (graph, si, w);
2061 unsigned int t = si->node_mapping[w];
2062 gcc_assert (si->node_mapping[n] == n);
2063 if (si->dfs[t] < si->dfs[n])
2064 si->dfs[n] = si->dfs[t];
2067 /* See if any components have been identified. */
2068 if (si->dfs[n] == my_dfs)
2070 while (si->scc_stack.length () != 0
2071 && si->dfs[si->scc_stack.last ()] >= my_dfs)
2073 unsigned int w = si->scc_stack.pop ();
2074 si->node_mapping[w] = n;
2076 if (!bitmap_bit_p (graph->direct_nodes, w))
2077 bitmap_clear_bit (graph->direct_nodes, n);
2079 /* Unify our nodes. */
2080 if (graph->preds[w])
2082 if (!graph->preds[n])
2083 graph->preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2084 bitmap_ior_into (graph->preds[n], graph->preds[w]);
2086 if (graph->implicit_preds[w])
2088 if (!graph->implicit_preds[n])
2089 graph->implicit_preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2090 bitmap_ior_into (graph->implicit_preds[n],
2091 graph->implicit_preds[w]);
2093 if (graph->points_to[w])
2095 if (!graph->points_to[n])
2096 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2097 bitmap_ior_into (graph->points_to[n],
2098 graph->points_to[w]);
2101 bitmap_set_bit (si->deleted, n);
2103 else
2104 si->scc_stack.safe_push (n);
2107 /* Label pointer equivalences.
2109 This performs a value numbering of the constraint graph to
2110 discover which variables will always have the same points-to sets
2111 under the current set of constraints.
2113 The way it value numbers is to store the set of points-to bits
2114 generated by the constraints and graph edges. This is just used as a
2115 hash and equality comparison. The *actual set of points-to bits* is
2116 completely irrelevant, in that we don't care about being able to
2117 extract them later.
2119 The equality values (currently bitmaps) just have to satisfy a few
2120 constraints, the main ones being:
2121 1. The combining operation must be order independent.
2122 2. The end result of a given set of operations must be unique iff the
2123 combination of input values is unique
2124 3. Hashable. */
2126 static void
2127 label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2129 unsigned int i, first_pred;
2130 bitmap_iterator bi;
2132 bitmap_set_bit (si->visited, n);
2134 /* Label and union our incoming edges's points to sets. */
2135 first_pred = -1U;
2136 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2138 unsigned int w = si->node_mapping[i];
2139 if (!bitmap_bit_p (si->visited, w))
2140 label_visit (graph, si, w);
2142 /* Skip unused edges */
2143 if (w == n || graph->pointer_label[w] == 0)
2144 continue;
2146 if (graph->points_to[w])
2148 if (!graph->points_to[n])
2150 if (first_pred == -1U)
2151 first_pred = w;
2152 else
2154 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2155 bitmap_ior (graph->points_to[n],
2156 graph->points_to[first_pred],
2157 graph->points_to[w]);
2160 else
2161 bitmap_ior_into (graph->points_to[n], graph->points_to[w]);
2165 /* Indirect nodes get fresh variables and a new pointer equiv class. */
2166 if (!bitmap_bit_p (graph->direct_nodes, n))
2168 if (!graph->points_to[n])
2170 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2171 if (first_pred != -1U)
2172 bitmap_copy (graph->points_to[n], graph->points_to[first_pred]);
2174 bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
2175 graph->pointer_label[n] = pointer_equiv_class++;
2176 equiv_class_label_t ecl;
2177 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2178 graph->points_to[n]);
2179 ecl->equivalence_class = graph->pointer_label[n];
2180 return;
2183 /* If there was only a single non-empty predecessor the pointer equiv
2184 class is the same. */
2185 if (!graph->points_to[n])
2187 if (first_pred != -1U)
2189 graph->pointer_label[n] = graph->pointer_label[first_pred];
2190 graph->points_to[n] = graph->points_to[first_pred];
2192 return;
2195 if (!bitmap_empty_p (graph->points_to[n]))
2197 equiv_class_label_t ecl;
2198 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2199 graph->points_to[n]);
2200 if (ecl->equivalence_class == 0)
2201 ecl->equivalence_class = pointer_equiv_class++;
2202 else
2204 BITMAP_FREE (graph->points_to[n]);
2205 graph->points_to[n] = ecl->labels;
2207 graph->pointer_label[n] = ecl->equivalence_class;
2211 /* Print the pred graph in dot format. */
2213 static void
2214 dump_pred_graph (struct scc_info *si, FILE *file)
2216 unsigned int i;
2218 /* Only print the graph if it has already been initialized: */
2219 if (!graph)
2220 return;
2222 /* Prints the header of the dot file: */
2223 fprintf (file, "strict digraph {\n");
2224 fprintf (file, " node [\n shape = box\n ]\n");
2225 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
2226 fprintf (file, "\n // List of nodes and complex constraints in "
2227 "the constraint graph:\n");
2229 /* The next lines print the nodes in the graph together with the
2230 complex constraints attached to them. */
2231 for (i = 1; i < graph->size; i++)
2233 if (i == FIRST_REF_NODE)
2234 continue;
2235 if (si->node_mapping[i] != i)
2236 continue;
2237 if (i < FIRST_REF_NODE)
2238 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2239 else
2240 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2241 if (graph->points_to[i]
2242 && !bitmap_empty_p (graph->points_to[i]))
2244 if (i < FIRST_REF_NODE)
2245 fprintf (file, "[label=\"%s = {", get_varinfo (i)->name);
2246 else
2247 fprintf (file, "[label=\"*%s = {",
2248 get_varinfo (i - FIRST_REF_NODE)->name);
2249 unsigned j;
2250 bitmap_iterator bi;
2251 EXECUTE_IF_SET_IN_BITMAP (graph->points_to[i], 0, j, bi)
2252 fprintf (file, " %d", j);
2253 fprintf (file, " }\"]");
2255 fprintf (file, ";\n");
2258 /* Go over the edges. */
2259 fprintf (file, "\n // Edges in the constraint graph:\n");
2260 for (i = 1; i < graph->size; i++)
2262 unsigned j;
2263 bitmap_iterator bi;
2264 if (si->node_mapping[i] != i)
2265 continue;
2266 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[i], 0, j, bi)
2268 unsigned from = si->node_mapping[j];
2269 if (from < FIRST_REF_NODE)
2270 fprintf (file, "\"%s\"", get_varinfo (from)->name);
2271 else
2272 fprintf (file, "\"*%s\"", get_varinfo (from - FIRST_REF_NODE)->name);
2273 fprintf (file, " -> ");
2274 if (i < FIRST_REF_NODE)
2275 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2276 else
2277 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2278 fprintf (file, ";\n");
2282 /* Prints the tail of the dot file. */
2283 fprintf (file, "}\n");
2286 /* Perform offline variable substitution, discovering equivalence
2287 classes, and eliminating non-pointer variables. */
2289 static struct scc_info *
2290 perform_var_substitution (constraint_graph_t graph)
2292 unsigned int i;
2293 unsigned int size = graph->size;
2294 struct scc_info *si = init_scc_info (size);
2296 bitmap_obstack_initialize (&iteration_obstack);
2297 pointer_equiv_class_table = new hash_table<equiv_class_hasher> (511);
2298 location_equiv_class_table
2299 = new hash_table<equiv_class_hasher> (511);
2300 pointer_equiv_class = 1;
2301 location_equiv_class = 1;
2303 /* Condense the nodes, which means to find SCC's, count incoming
2304 predecessors, and unite nodes in SCC's. */
2305 for (i = 1; i < FIRST_REF_NODE; i++)
2306 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2307 condense_visit (graph, si, si->node_mapping[i]);
2309 if (dump_file && (dump_flags & TDF_GRAPH))
2311 fprintf (dump_file, "\n\n// The constraint graph before var-substitution "
2312 "in dot format:\n");
2313 dump_pred_graph (si, dump_file);
2314 fprintf (dump_file, "\n\n");
2317 bitmap_clear (si->visited);
2318 /* Actually the label the nodes for pointer equivalences */
2319 for (i = 1; i < FIRST_REF_NODE; i++)
2320 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2321 label_visit (graph, si, si->node_mapping[i]);
2323 /* Calculate location equivalence labels. */
2324 for (i = 1; i < FIRST_REF_NODE; i++)
2326 bitmap pointed_by;
2327 bitmap_iterator bi;
2328 unsigned int j;
2330 if (!graph->pointed_by[i])
2331 continue;
2332 pointed_by = BITMAP_ALLOC (&iteration_obstack);
2334 /* Translate the pointed-by mapping for pointer equivalence
2335 labels. */
2336 EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
2338 bitmap_set_bit (pointed_by,
2339 graph->pointer_label[si->node_mapping[j]]);
2341 /* The original pointed_by is now dead. */
2342 BITMAP_FREE (graph->pointed_by[i]);
2344 /* Look up the location equivalence label if one exists, or make
2345 one otherwise. */
2346 equiv_class_label_t ecl;
2347 ecl = equiv_class_lookup_or_add (location_equiv_class_table, pointed_by);
2348 if (ecl->equivalence_class == 0)
2349 ecl->equivalence_class = location_equiv_class++;
2350 else
2352 if (dump_file && (dump_flags & TDF_DETAILS))
2353 fprintf (dump_file, "Found location equivalence for node %s\n",
2354 get_varinfo (i)->name);
2355 BITMAP_FREE (pointed_by);
2357 graph->loc_label[i] = ecl->equivalence_class;
2361 if (dump_file && (dump_flags & TDF_DETAILS))
2362 for (i = 1; i < FIRST_REF_NODE; i++)
2364 unsigned j = si->node_mapping[i];
2365 if (j != i)
2367 fprintf (dump_file, "%s node id %d ",
2368 bitmap_bit_p (graph->direct_nodes, i)
2369 ? "Direct" : "Indirect", i);
2370 if (i < FIRST_REF_NODE)
2371 fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2372 else
2373 fprintf (dump_file, "\"*%s\"",
2374 get_varinfo (i - FIRST_REF_NODE)->name);
2375 fprintf (dump_file, " mapped to SCC leader node id %d ", j);
2376 if (j < FIRST_REF_NODE)
2377 fprintf (dump_file, "\"%s\"\n", get_varinfo (j)->name);
2378 else
2379 fprintf (dump_file, "\"*%s\"\n",
2380 get_varinfo (j - FIRST_REF_NODE)->name);
2382 else
2384 fprintf (dump_file,
2385 "Equivalence classes for %s node id %d ",
2386 bitmap_bit_p (graph->direct_nodes, i)
2387 ? "direct" : "indirect", i);
2388 if (i < FIRST_REF_NODE)
2389 fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2390 else
2391 fprintf (dump_file, "\"*%s\"",
2392 get_varinfo (i - FIRST_REF_NODE)->name);
2393 fprintf (dump_file,
2394 ": pointer %d, location %d\n",
2395 graph->pointer_label[i], graph->loc_label[i]);
2399 /* Quickly eliminate our non-pointer variables. */
2401 for (i = 1; i < FIRST_REF_NODE; i++)
2403 unsigned int node = si->node_mapping[i];
2405 if (graph->pointer_label[node] == 0)
2407 if (dump_file && (dump_flags & TDF_DETAILS))
2408 fprintf (dump_file,
2409 "%s is a non-pointer variable, eliminating edges.\n",
2410 get_varinfo (node)->name);
2411 stats.nonpointer_vars++;
2412 clear_edges_for_node (graph, node);
2416 return si;
2419 /* Free information that was only necessary for variable
2420 substitution. */
2422 static void
2423 free_var_substitution_info (struct scc_info *si)
2425 free_scc_info (si);
2426 free (graph->pointer_label);
2427 free (graph->loc_label);
2428 free (graph->pointed_by);
2429 free (graph->points_to);
2430 free (graph->eq_rep);
2431 sbitmap_free (graph->direct_nodes);
2432 delete pointer_equiv_class_table;
2433 pointer_equiv_class_table = NULL;
2434 delete location_equiv_class_table;
2435 location_equiv_class_table = NULL;
2436 bitmap_obstack_release (&iteration_obstack);
2439 /* Return an existing node that is equivalent to NODE, which has
2440 equivalence class LABEL, if one exists. Return NODE otherwise. */
2442 static unsigned int
2443 find_equivalent_node (constraint_graph_t graph,
2444 unsigned int node, unsigned int label)
2446 /* If the address version of this variable is unused, we can
2447 substitute it for anything else with the same label.
2448 Otherwise, we know the pointers are equivalent, but not the
2449 locations, and we can unite them later. */
2451 if (!bitmap_bit_p (graph->address_taken, node))
2453 gcc_checking_assert (label < graph->size);
2455 if (graph->eq_rep[label] != -1)
2457 /* Unify the two variables since we know they are equivalent. */
2458 if (unite (graph->eq_rep[label], node))
2459 unify_nodes (graph, graph->eq_rep[label], node, false);
2460 return graph->eq_rep[label];
2462 else
2464 graph->eq_rep[label] = node;
2465 graph->pe_rep[label] = node;
2468 else
2470 gcc_checking_assert (label < graph->size);
2471 graph->pe[node] = label;
2472 if (graph->pe_rep[label] == -1)
2473 graph->pe_rep[label] = node;
2476 return node;
2479 /* Unite pointer equivalent but not location equivalent nodes in
2480 GRAPH. This may only be performed once variable substitution is
2481 finished. */
2483 static void
2484 unite_pointer_equivalences (constraint_graph_t graph)
2486 unsigned int i;
2488 /* Go through the pointer equivalences and unite them to their
2489 representative, if they aren't already. */
2490 for (i = 1; i < FIRST_REF_NODE; i++)
2492 unsigned int label = graph->pe[i];
2493 if (label)
2495 int label_rep = graph->pe_rep[label];
2497 if (label_rep == -1)
2498 continue;
2500 label_rep = find (label_rep);
2501 if (label_rep >= 0 && unite (label_rep, find (i)))
2502 unify_nodes (graph, label_rep, i, false);
2507 /* Move complex constraints to the GRAPH nodes they belong to. */
2509 static void
2510 move_complex_constraints (constraint_graph_t graph)
2512 int i;
2513 constraint_t c;
2515 FOR_EACH_VEC_ELT (constraints, i, c)
2517 if (c)
2519 struct constraint_expr lhs = c->lhs;
2520 struct constraint_expr rhs = c->rhs;
2522 if (lhs.type == DEREF)
2524 insert_into_complex (graph, lhs.var, c);
2526 else if (rhs.type == DEREF)
2528 if (!(get_varinfo (lhs.var)->is_special_var))
2529 insert_into_complex (graph, rhs.var, c);
2531 else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2532 && (lhs.offset != 0 || rhs.offset != 0))
2534 insert_into_complex (graph, rhs.var, c);
2541 /* Optimize and rewrite complex constraints while performing
2542 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2543 result of perform_variable_substitution. */
2545 static void
2546 rewrite_constraints (constraint_graph_t graph,
2547 struct scc_info *si)
2549 int i;
2550 constraint_t c;
2552 if (flag_checking)
2554 for (unsigned int j = 0; j < graph->size; j++)
2555 gcc_assert (find (j) == j);
2558 FOR_EACH_VEC_ELT (constraints, i, c)
2560 struct constraint_expr lhs = c->lhs;
2561 struct constraint_expr rhs = c->rhs;
2562 unsigned int lhsvar = find (lhs.var);
2563 unsigned int rhsvar = find (rhs.var);
2564 unsigned int lhsnode, rhsnode;
2565 unsigned int lhslabel, rhslabel;
2567 lhsnode = si->node_mapping[lhsvar];
2568 rhsnode = si->node_mapping[rhsvar];
2569 lhslabel = graph->pointer_label[lhsnode];
2570 rhslabel = graph->pointer_label[rhsnode];
2572 /* See if it is really a non-pointer variable, and if so, ignore
2573 the constraint. */
2574 if (lhslabel == 0)
2576 if (dump_file && (dump_flags & TDF_DETAILS))
2579 fprintf (dump_file, "%s is a non-pointer variable,"
2580 "ignoring constraint:",
2581 get_varinfo (lhs.var)->name);
2582 dump_constraint (dump_file, c);
2583 fprintf (dump_file, "\n");
2585 constraints[i] = NULL;
2586 continue;
2589 if (rhslabel == 0)
2591 if (dump_file && (dump_flags & TDF_DETAILS))
2594 fprintf (dump_file, "%s is a non-pointer variable,"
2595 "ignoring constraint:",
2596 get_varinfo (rhs.var)->name);
2597 dump_constraint (dump_file, c);
2598 fprintf (dump_file, "\n");
2600 constraints[i] = NULL;
2601 continue;
2604 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2605 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2606 c->lhs.var = lhsvar;
2607 c->rhs.var = rhsvar;
2611 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2612 part of an SCC, false otherwise. */
2614 static bool
2615 eliminate_indirect_cycles (unsigned int node)
2617 if (graph->indirect_cycles[node] != -1
2618 && !bitmap_empty_p (get_varinfo (node)->solution))
2620 unsigned int i;
2621 auto_vec<unsigned> queue;
2622 int queuepos;
2623 unsigned int to = find (graph->indirect_cycles[node]);
2624 bitmap_iterator bi;
2626 /* We can't touch the solution set and call unify_nodes
2627 at the same time, because unify_nodes is going to do
2628 bitmap unions into it. */
2630 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2632 if (find (i) == i && i != to)
2634 if (unite (to, i))
2635 queue.safe_push (i);
2639 for (queuepos = 0;
2640 queue.iterate (queuepos, &i);
2641 queuepos++)
2643 unify_nodes (graph, to, i, true);
2645 return true;
2647 return false;
2650 /* Solve the constraint graph GRAPH using our worklist solver.
2651 This is based on the PW* family of solvers from the "Efficient Field
2652 Sensitive Pointer Analysis for C" paper.
2653 It works by iterating over all the graph nodes, processing the complex
2654 constraints and propagating the copy constraints, until everything stops
2655 changed. This corresponds to steps 6-8 in the solving list given above. */
2657 static void
2658 solve_graph (constraint_graph_t graph)
2660 unsigned int size = graph->size;
2661 unsigned int i;
2662 bitmap pts;
2664 changed = BITMAP_ALLOC (NULL);
2666 /* Mark all initial non-collapsed nodes as changed. */
2667 for (i = 1; i < size; i++)
2669 varinfo_t ivi = get_varinfo (i);
2670 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2671 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2672 || graph->complex[i].length () > 0))
2673 bitmap_set_bit (changed, i);
2676 /* Allocate a bitmap to be used to store the changed bits. */
2677 pts = BITMAP_ALLOC (&pta_obstack);
2679 while (!bitmap_empty_p (changed))
2681 unsigned int i;
2682 struct topo_info *ti = init_topo_info ();
2683 stats.iterations++;
2685 bitmap_obstack_initialize (&iteration_obstack);
2687 compute_topo_order (graph, ti);
2689 while (ti->topo_order.length () != 0)
2692 i = ti->topo_order.pop ();
2694 /* If this variable is not a representative, skip it. */
2695 if (find (i) != i)
2696 continue;
2698 /* In certain indirect cycle cases, we may merge this
2699 variable to another. */
2700 if (eliminate_indirect_cycles (i) && find (i) != i)
2701 continue;
2703 /* If the node has changed, we need to process the
2704 complex constraints and outgoing edges again. */
2705 if (bitmap_clear_bit (changed, i))
2707 unsigned int j;
2708 constraint_t c;
2709 bitmap solution;
2710 vec<constraint_t> complex = graph->complex[i];
2711 varinfo_t vi = get_varinfo (i);
2712 bool solution_empty;
2714 /* Compute the changed set of solution bits. If anything
2715 is in the solution just propagate that. */
2716 if (bitmap_bit_p (vi->solution, anything_id))
2718 /* If anything is also in the old solution there is
2719 nothing to do.
2720 ??? But we shouldn't ended up with "changed" set ... */
2721 if (vi->oldsolution
2722 && bitmap_bit_p (vi->oldsolution, anything_id))
2723 continue;
2724 bitmap_copy (pts, get_varinfo (find (anything_id))->solution);
2726 else if (vi->oldsolution)
2727 bitmap_and_compl (pts, vi->solution, vi->oldsolution);
2728 else
2729 bitmap_copy (pts, vi->solution);
2731 if (bitmap_empty_p (pts))
2732 continue;
2734 if (vi->oldsolution)
2735 bitmap_ior_into (vi->oldsolution, pts);
2736 else
2738 vi->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
2739 bitmap_copy (vi->oldsolution, pts);
2742 solution = vi->solution;
2743 solution_empty = bitmap_empty_p (solution);
2745 /* Process the complex constraints */
2746 bitmap expanded_pts = NULL;
2747 FOR_EACH_VEC_ELT (complex, j, c)
2749 /* XXX: This is going to unsort the constraints in
2750 some cases, which will occasionally add duplicate
2751 constraints during unification. This does not
2752 affect correctness. */
2753 c->lhs.var = find (c->lhs.var);
2754 c->rhs.var = find (c->rhs.var);
2756 /* The only complex constraint that can change our
2757 solution to non-empty, given an empty solution,
2758 is a constraint where the lhs side is receiving
2759 some set from elsewhere. */
2760 if (!solution_empty || c->lhs.type != DEREF)
2761 do_complex_constraint (graph, c, pts, &expanded_pts);
2763 BITMAP_FREE (expanded_pts);
2765 solution_empty = bitmap_empty_p (solution);
2767 if (!solution_empty)
2769 bitmap_iterator bi;
2770 unsigned eff_escaped_id = find (escaped_id);
2772 /* Propagate solution to all successors. */
2773 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2774 0, j, bi)
2776 bitmap tmp;
2777 bool flag;
2779 unsigned int to = find (j);
2780 tmp = get_varinfo (to)->solution;
2781 flag = false;
2783 /* Don't try to propagate to ourselves. */
2784 if (to == i)
2785 continue;
2787 /* If we propagate from ESCAPED use ESCAPED as
2788 placeholder. */
2789 if (i == eff_escaped_id)
2790 flag = bitmap_set_bit (tmp, escaped_id);
2791 else
2792 flag = bitmap_ior_into (tmp, pts);
2794 if (flag)
2795 bitmap_set_bit (changed, to);
2800 free_topo_info (ti);
2801 bitmap_obstack_release (&iteration_obstack);
2804 BITMAP_FREE (pts);
2805 BITMAP_FREE (changed);
2806 bitmap_obstack_release (&oldpta_obstack);
2809 /* Map from trees to variable infos. */
2810 static hash_map<tree, varinfo_t> *vi_for_tree;
2813 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2815 static void
2816 insert_vi_for_tree (tree t, varinfo_t vi)
2818 gcc_assert (vi);
2819 gcc_assert (!vi_for_tree->put (t, vi));
2822 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2823 exist in the map, return NULL, otherwise, return the varinfo we found. */
2825 static varinfo_t
2826 lookup_vi_for_tree (tree t)
2828 varinfo_t *slot = vi_for_tree->get (t);
2829 if (slot == NULL)
2830 return NULL;
2832 return *slot;
2835 /* Return a printable name for DECL */
2837 static const char *
2838 alias_get_name (tree decl)
2840 const char *res = NULL;
2841 char *temp;
2842 int num_printed = 0;
2844 if (!dump_file)
2845 return "NULL";
2847 if (TREE_CODE (decl) == SSA_NAME)
2849 res = get_name (decl);
2850 if (res)
2851 num_printed = asprintf (&temp, "%s_%u", res, SSA_NAME_VERSION (decl));
2852 else
2853 num_printed = asprintf (&temp, "_%u", SSA_NAME_VERSION (decl));
2854 if (num_printed > 0)
2856 res = ggc_strdup (temp);
2857 free (temp);
2860 else if (DECL_P (decl))
2862 if (DECL_ASSEMBLER_NAME_SET_P (decl))
2863 res = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
2864 else
2866 res = get_name (decl);
2867 if (!res)
2869 num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
2870 if (num_printed > 0)
2872 res = ggc_strdup (temp);
2873 free (temp);
2878 if (res != NULL)
2879 return res;
2881 return "NULL";
2884 /* Find the variable id for tree T in the map.
2885 If T doesn't exist in the map, create an entry for it and return it. */
2887 static varinfo_t
2888 get_vi_for_tree (tree t)
2890 varinfo_t *slot = vi_for_tree->get (t);
2891 if (slot == NULL)
2893 unsigned int id = create_variable_info_for (t, alias_get_name (t), false);
2894 return get_varinfo (id);
2897 return *slot;
2900 /* Get a scalar constraint expression for a new temporary variable. */
2902 static struct constraint_expr
2903 new_scalar_tmp_constraint_exp (const char *name, bool add_id)
2905 struct constraint_expr tmp;
2906 varinfo_t vi;
2908 vi = new_var_info (NULL_TREE, name, add_id);
2909 vi->offset = 0;
2910 vi->size = -1;
2911 vi->fullsize = -1;
2912 vi->is_full_var = 1;
2914 tmp.var = vi->id;
2915 tmp.type = SCALAR;
2916 tmp.offset = 0;
2918 return tmp;
2921 /* Get a constraint expression vector from an SSA_VAR_P node.
2922 If address_p is true, the result will be taken its address of. */
2924 static void
2925 get_constraint_for_ssa_var (tree t, vec<ce_s> *results, bool address_p)
2927 struct constraint_expr cexpr;
2928 varinfo_t vi;
2930 /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
2931 gcc_assert (TREE_CODE (t) == SSA_NAME || DECL_P (t));
2933 /* For parameters, get at the points-to set for the actual parm
2934 decl. */
2935 if (TREE_CODE (t) == SSA_NAME
2936 && SSA_NAME_IS_DEFAULT_DEF (t)
2937 && (TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2938 || TREE_CODE (SSA_NAME_VAR (t)) == RESULT_DECL))
2940 get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
2941 return;
2944 /* For global variables resort to the alias target. */
2945 if (TREE_CODE (t) == VAR_DECL
2946 && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
2948 varpool_node *node = varpool_node::get (t);
2949 if (node && node->alias && node->analyzed)
2951 node = node->ultimate_alias_target ();
2952 /* Canonicalize the PT uid of all aliases to the ultimate target.
2953 ??? Hopefully the set of aliases can't change in a way that
2954 changes the ultimate alias target. */
2955 gcc_assert ((! DECL_PT_UID_SET_P (node->decl)
2956 || DECL_PT_UID (node->decl) == DECL_UID (node->decl))
2957 && (! DECL_PT_UID_SET_P (t)
2958 || DECL_PT_UID (t) == DECL_UID (node->decl)));
2959 DECL_PT_UID (t) = DECL_UID (node->decl);
2960 t = node->decl;
2964 vi = get_vi_for_tree (t);
2965 cexpr.var = vi->id;
2966 cexpr.type = SCALAR;
2967 cexpr.offset = 0;
2969 /* If we are not taking the address of the constraint expr, add all
2970 sub-fiels of the variable as well. */
2971 if (!address_p
2972 && !vi->is_full_var)
2974 for (; vi; vi = vi_next (vi))
2976 cexpr.var = vi->id;
2977 results->safe_push (cexpr);
2979 return;
2982 results->safe_push (cexpr);
2985 /* Process constraint T, performing various simplifications and then
2986 adding it to our list of overall constraints. */
2988 static void
2989 process_constraint (constraint_t t)
2991 struct constraint_expr rhs = t->rhs;
2992 struct constraint_expr lhs = t->lhs;
2994 gcc_assert (rhs.var < varmap.length ());
2995 gcc_assert (lhs.var < varmap.length ());
2997 /* If we didn't get any useful constraint from the lhs we get
2998 &ANYTHING as fallback from get_constraint_for. Deal with
2999 it here by turning it into *ANYTHING. */
3000 if (lhs.type == ADDRESSOF
3001 && lhs.var == anything_id)
3002 lhs.type = DEREF;
3004 /* ADDRESSOF on the lhs is invalid. */
3005 gcc_assert (lhs.type != ADDRESSOF);
3007 /* We shouldn't add constraints from things that cannot have pointers.
3008 It's not completely trivial to avoid in the callers, so do it here. */
3009 if (rhs.type != ADDRESSOF
3010 && !get_varinfo (rhs.var)->may_have_pointers)
3011 return;
3013 /* Likewise adding to the solution of a non-pointer var isn't useful. */
3014 if (!get_varinfo (lhs.var)->may_have_pointers)
3015 return;
3017 /* This can happen in our IR with things like n->a = *p */
3018 if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
3020 /* Split into tmp = *rhs, *lhs = tmp */
3021 struct constraint_expr tmplhs;
3022 tmplhs = new_scalar_tmp_constraint_exp ("doubledereftmp", true);
3023 process_constraint (new_constraint (tmplhs, rhs));
3024 process_constraint (new_constraint (lhs, tmplhs));
3026 else if (rhs.type == ADDRESSOF && lhs.type == DEREF)
3028 /* Split into tmp = &rhs, *lhs = tmp */
3029 struct constraint_expr tmplhs;
3030 tmplhs = new_scalar_tmp_constraint_exp ("derefaddrtmp", true);
3031 process_constraint (new_constraint (tmplhs, rhs));
3032 process_constraint (new_constraint (lhs, tmplhs));
3034 else
3036 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
3037 constraints.safe_push (t);
3042 /* Return the position, in bits, of FIELD_DECL from the beginning of its
3043 structure. */
3045 static HOST_WIDE_INT
3046 bitpos_of_field (const tree fdecl)
3048 if (!tree_fits_shwi_p (DECL_FIELD_OFFSET (fdecl))
3049 || !tree_fits_shwi_p (DECL_FIELD_BIT_OFFSET (fdecl)))
3050 return -1;
3052 return (tree_to_shwi (DECL_FIELD_OFFSET (fdecl)) * BITS_PER_UNIT
3053 + tree_to_shwi (DECL_FIELD_BIT_OFFSET (fdecl)));
3057 /* Get constraint expressions for offsetting PTR by OFFSET. Stores the
3058 resulting constraint expressions in *RESULTS. */
3060 static void
3061 get_constraint_for_ptr_offset (tree ptr, tree offset,
3062 vec<ce_s> *results)
3064 struct constraint_expr c;
3065 unsigned int j, n;
3066 HOST_WIDE_INT rhsoffset;
3068 /* If we do not do field-sensitive PTA adding offsets to pointers
3069 does not change the points-to solution. */
3070 if (!use_field_sensitive)
3072 get_constraint_for_rhs (ptr, results);
3073 return;
3076 /* If the offset is not a non-negative integer constant that fits
3077 in a HOST_WIDE_INT, we have to fall back to a conservative
3078 solution which includes all sub-fields of all pointed-to
3079 variables of ptr. */
3080 if (offset == NULL_TREE
3081 || TREE_CODE (offset) != INTEGER_CST)
3082 rhsoffset = UNKNOWN_OFFSET;
3083 else
3085 /* Sign-extend the offset. */
3086 offset_int soffset = offset_int::from (offset, SIGNED);
3087 if (!wi::fits_shwi_p (soffset))
3088 rhsoffset = UNKNOWN_OFFSET;
3089 else
3091 /* Make sure the bit-offset also fits. */
3092 HOST_WIDE_INT rhsunitoffset = soffset.to_shwi ();
3093 rhsoffset = rhsunitoffset * BITS_PER_UNIT;
3094 if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
3095 rhsoffset = UNKNOWN_OFFSET;
3099 get_constraint_for_rhs (ptr, results);
3100 if (rhsoffset == 0)
3101 return;
3103 /* As we are eventually appending to the solution do not use
3104 vec::iterate here. */
3105 n = results->length ();
3106 for (j = 0; j < n; j++)
3108 varinfo_t curr;
3109 c = (*results)[j];
3110 curr = get_varinfo (c.var);
3112 if (c.type == ADDRESSOF
3113 /* If this varinfo represents a full variable just use it. */
3114 && curr->is_full_var)
3116 else if (c.type == ADDRESSOF
3117 /* If we do not know the offset add all subfields. */
3118 && rhsoffset == UNKNOWN_OFFSET)
3120 varinfo_t temp = get_varinfo (curr->head);
3123 struct constraint_expr c2;
3124 c2.var = temp->id;
3125 c2.type = ADDRESSOF;
3126 c2.offset = 0;
3127 if (c2.var != c.var)
3128 results->safe_push (c2);
3129 temp = vi_next (temp);
3131 while (temp);
3133 else if (c.type == ADDRESSOF)
3135 varinfo_t temp;
3136 unsigned HOST_WIDE_INT offset = curr->offset + rhsoffset;
3138 /* If curr->offset + rhsoffset is less than zero adjust it. */
3139 if (rhsoffset < 0
3140 && curr->offset < offset)
3141 offset = 0;
3143 /* We have to include all fields that overlap the current
3144 field shifted by rhsoffset. And we include at least
3145 the last or the first field of the variable to represent
3146 reachability of off-bound addresses, in particular &object + 1,
3147 conservatively correct. */
3148 temp = first_or_preceding_vi_for_offset (curr, offset);
3149 c.var = temp->id;
3150 c.offset = 0;
3151 temp = vi_next (temp);
3152 while (temp
3153 && temp->offset < offset + curr->size)
3155 struct constraint_expr c2;
3156 c2.var = temp->id;
3157 c2.type = ADDRESSOF;
3158 c2.offset = 0;
3159 results->safe_push (c2);
3160 temp = vi_next (temp);
3163 else if (c.type == SCALAR)
3165 gcc_assert (c.offset == 0);
3166 c.offset = rhsoffset;
3168 else
3169 /* We shouldn't get any DEREFs here. */
3170 gcc_unreachable ();
3172 (*results)[j] = c;
3177 /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
3178 If address_p is true the result will be taken its address of.
3179 If lhs_p is true then the constraint expression is assumed to be used
3180 as the lhs. */
3182 static void
3183 get_constraint_for_component_ref (tree t, vec<ce_s> *results,
3184 bool address_p, bool lhs_p)
3186 tree orig_t = t;
3187 HOST_WIDE_INT bitsize = -1;
3188 HOST_WIDE_INT bitmaxsize = -1;
3189 HOST_WIDE_INT bitpos;
3190 bool reverse;
3191 tree forzero;
3193 /* Some people like to do cute things like take the address of
3194 &0->a.b */
3195 forzero = t;
3196 while (handled_component_p (forzero)
3197 || INDIRECT_REF_P (forzero)
3198 || TREE_CODE (forzero) == MEM_REF)
3199 forzero = TREE_OPERAND (forzero, 0);
3201 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
3203 struct constraint_expr temp;
3205 temp.offset = 0;
3206 temp.var = integer_id;
3207 temp.type = SCALAR;
3208 results->safe_push (temp);
3209 return;
3212 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize, &reverse);
3214 /* Pretend to take the address of the base, we'll take care of
3215 adding the required subset of sub-fields below. */
3216 get_constraint_for_1 (t, results, true, lhs_p);
3217 gcc_assert (results->length () == 1);
3218 struct constraint_expr &result = results->last ();
3220 if (result.type == SCALAR
3221 && get_varinfo (result.var)->is_full_var)
3222 /* For single-field vars do not bother about the offset. */
3223 result.offset = 0;
3224 else if (result.type == SCALAR)
3226 /* In languages like C, you can access one past the end of an
3227 array. You aren't allowed to dereference it, so we can
3228 ignore this constraint. When we handle pointer subtraction,
3229 we may have to do something cute here. */
3231 if ((unsigned HOST_WIDE_INT)bitpos < get_varinfo (result.var)->fullsize
3232 && bitmaxsize != 0)
3234 /* It's also not true that the constraint will actually start at the
3235 right offset, it may start in some padding. We only care about
3236 setting the constraint to the first actual field it touches, so
3237 walk to find it. */
3238 struct constraint_expr cexpr = result;
3239 varinfo_t curr;
3240 results->pop ();
3241 cexpr.offset = 0;
3242 for (curr = get_varinfo (cexpr.var); curr; curr = vi_next (curr))
3244 if (ranges_overlap_p (curr->offset, curr->size,
3245 bitpos, bitmaxsize))
3247 cexpr.var = curr->id;
3248 results->safe_push (cexpr);
3249 if (address_p)
3250 break;
3253 /* If we are going to take the address of this field then
3254 to be able to compute reachability correctly add at least
3255 the last field of the variable. */
3256 if (address_p && results->length () == 0)
3258 curr = get_varinfo (cexpr.var);
3259 while (curr->next != 0)
3260 curr = vi_next (curr);
3261 cexpr.var = curr->id;
3262 results->safe_push (cexpr);
3264 else if (results->length () == 0)
3265 /* Assert that we found *some* field there. The user couldn't be
3266 accessing *only* padding. */
3267 /* Still the user could access one past the end of an array
3268 embedded in a struct resulting in accessing *only* padding. */
3269 /* Or accessing only padding via type-punning to a type
3270 that has a filed just in padding space. */
3272 cexpr.type = SCALAR;
3273 cexpr.var = anything_id;
3274 cexpr.offset = 0;
3275 results->safe_push (cexpr);
3278 else if (bitmaxsize == 0)
3280 if (dump_file && (dump_flags & TDF_DETAILS))
3281 fprintf (dump_file, "Access to zero-sized part of variable,"
3282 "ignoring\n");
3284 else
3285 if (dump_file && (dump_flags & TDF_DETAILS))
3286 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
3288 else if (result.type == DEREF)
3290 /* If we do not know exactly where the access goes say so. Note
3291 that only for non-structure accesses we know that we access
3292 at most one subfiled of any variable. */
3293 if (bitpos == -1
3294 || bitsize != bitmaxsize
3295 || AGGREGATE_TYPE_P (TREE_TYPE (orig_t))
3296 || result.offset == UNKNOWN_OFFSET)
3297 result.offset = UNKNOWN_OFFSET;
3298 else
3299 result.offset += bitpos;
3301 else if (result.type == ADDRESSOF)
3303 /* We can end up here for component references on a
3304 VIEW_CONVERT_EXPR <>(&foobar). */
3305 result.type = SCALAR;
3306 result.var = anything_id;
3307 result.offset = 0;
3309 else
3310 gcc_unreachable ();
3314 /* Dereference the constraint expression CONS, and return the result.
3315 DEREF (ADDRESSOF) = SCALAR
3316 DEREF (SCALAR) = DEREF
3317 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
3318 This is needed so that we can handle dereferencing DEREF constraints. */
3320 static void
3321 do_deref (vec<ce_s> *constraints)
3323 struct constraint_expr *c;
3324 unsigned int i = 0;
3326 FOR_EACH_VEC_ELT (*constraints, i, c)
3328 if (c->type == SCALAR)
3329 c->type = DEREF;
3330 else if (c->type == ADDRESSOF)
3331 c->type = SCALAR;
3332 else if (c->type == DEREF)
3334 struct constraint_expr tmplhs;
3335 tmplhs = new_scalar_tmp_constraint_exp ("dereftmp", true);
3336 process_constraint (new_constraint (tmplhs, *c));
3337 c->var = tmplhs.var;
3339 else
3340 gcc_unreachable ();
3344 /* Given a tree T, return the constraint expression for taking the
3345 address of it. */
3347 static void
3348 get_constraint_for_address_of (tree t, vec<ce_s> *results)
3350 struct constraint_expr *c;
3351 unsigned int i;
3353 get_constraint_for_1 (t, results, true, true);
3355 FOR_EACH_VEC_ELT (*results, i, c)
3357 if (c->type == DEREF)
3358 c->type = SCALAR;
3359 else
3360 c->type = ADDRESSOF;
3364 /* Given a tree T, return the constraint expression for it. */
3366 static void
3367 get_constraint_for_1 (tree t, vec<ce_s> *results, bool address_p,
3368 bool lhs_p)
3370 struct constraint_expr temp;
3372 /* x = integer is all glommed to a single variable, which doesn't
3373 point to anything by itself. That is, of course, unless it is an
3374 integer constant being treated as a pointer, in which case, we
3375 will return that this is really the addressof anything. This
3376 happens below, since it will fall into the default case. The only
3377 case we know something about an integer treated like a pointer is
3378 when it is the NULL pointer, and then we just say it points to
3379 NULL.
3381 Do not do that if -fno-delete-null-pointer-checks though, because
3382 in that case *NULL does not fail, so it _should_ alias *anything.
3383 It is not worth adding a new option or renaming the existing one,
3384 since this case is relatively obscure. */
3385 if ((TREE_CODE (t) == INTEGER_CST
3386 && integer_zerop (t))
3387 /* The only valid CONSTRUCTORs in gimple with pointer typed
3388 elements are zero-initializer. But in IPA mode we also
3389 process global initializers, so verify at least. */
3390 || (TREE_CODE (t) == CONSTRUCTOR
3391 && CONSTRUCTOR_NELTS (t) == 0))
3393 if (flag_delete_null_pointer_checks)
3394 temp.var = nothing_id;
3395 else
3396 temp.var = nonlocal_id;
3397 temp.type = ADDRESSOF;
3398 temp.offset = 0;
3399 results->safe_push (temp);
3400 return;
3403 /* String constants are read-only, ideally we'd have a CONST_DECL
3404 for those. */
3405 if (TREE_CODE (t) == STRING_CST)
3407 temp.var = string_id;
3408 temp.type = SCALAR;
3409 temp.offset = 0;
3410 results->safe_push (temp);
3411 return;
3414 switch (TREE_CODE_CLASS (TREE_CODE (t)))
3416 case tcc_expression:
3418 switch (TREE_CODE (t))
3420 case ADDR_EXPR:
3421 get_constraint_for_address_of (TREE_OPERAND (t, 0), results);
3422 return;
3423 default:;
3425 break;
3427 case tcc_reference:
3429 switch (TREE_CODE (t))
3431 case MEM_REF:
3433 struct constraint_expr cs;
3434 varinfo_t vi, curr;
3435 get_constraint_for_ptr_offset (TREE_OPERAND (t, 0),
3436 TREE_OPERAND (t, 1), results);
3437 do_deref (results);
3439 /* If we are not taking the address then make sure to process
3440 all subvariables we might access. */
3441 if (address_p)
3442 return;
3444 cs = results->last ();
3445 if (cs.type == DEREF
3446 && type_can_have_subvars (TREE_TYPE (t)))
3448 /* For dereferences this means we have to defer it
3449 to solving time. */
3450 results->last ().offset = UNKNOWN_OFFSET;
3451 return;
3453 if (cs.type != SCALAR)
3454 return;
3456 vi = get_varinfo (cs.var);
3457 curr = vi_next (vi);
3458 if (!vi->is_full_var
3459 && curr)
3461 unsigned HOST_WIDE_INT size;
3462 if (tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (t))))
3463 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t)));
3464 else
3465 size = -1;
3466 for (; curr; curr = vi_next (curr))
3468 if (curr->offset - vi->offset < size)
3470 cs.var = curr->id;
3471 results->safe_push (cs);
3473 else
3474 break;
3477 return;
3479 case ARRAY_REF:
3480 case ARRAY_RANGE_REF:
3481 case COMPONENT_REF:
3482 case IMAGPART_EXPR:
3483 case REALPART_EXPR:
3484 case BIT_FIELD_REF:
3485 get_constraint_for_component_ref (t, results, address_p, lhs_p);
3486 return;
3487 case VIEW_CONVERT_EXPR:
3488 get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p,
3489 lhs_p);
3490 return;
3491 /* We are missing handling for TARGET_MEM_REF here. */
3492 default:;
3494 break;
3496 case tcc_exceptional:
3498 switch (TREE_CODE (t))
3500 case SSA_NAME:
3502 get_constraint_for_ssa_var (t, results, address_p);
3503 return;
3505 case CONSTRUCTOR:
3507 unsigned int i;
3508 tree val;
3509 auto_vec<ce_s> tmp;
3510 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
3512 struct constraint_expr *rhsp;
3513 unsigned j;
3514 get_constraint_for_1 (val, &tmp, address_p, lhs_p);
3515 FOR_EACH_VEC_ELT (tmp, j, rhsp)
3516 results->safe_push (*rhsp);
3517 tmp.truncate (0);
3519 /* We do not know whether the constructor was complete,
3520 so technically we have to add &NOTHING or &ANYTHING
3521 like we do for an empty constructor as well. */
3522 return;
3524 default:;
3526 break;
3528 case tcc_declaration:
3530 get_constraint_for_ssa_var (t, results, address_p);
3531 return;
3533 case tcc_constant:
3535 /* We cannot refer to automatic variables through constants. */
3536 temp.type = ADDRESSOF;
3537 temp.var = nonlocal_id;
3538 temp.offset = 0;
3539 results->safe_push (temp);
3540 return;
3542 default:;
3545 /* The default fallback is a constraint from anything. */
3546 temp.type = ADDRESSOF;
3547 temp.var = anything_id;
3548 temp.offset = 0;
3549 results->safe_push (temp);
3552 /* Given a gimple tree T, return the constraint expression vector for it. */
3554 static void
3555 get_constraint_for (tree t, vec<ce_s> *results)
3557 gcc_assert (results->length () == 0);
3559 get_constraint_for_1 (t, results, false, true);
3562 /* Given a gimple tree T, return the constraint expression vector for it
3563 to be used as the rhs of a constraint. */
3565 static void
3566 get_constraint_for_rhs (tree t, vec<ce_s> *results)
3568 gcc_assert (results->length () == 0);
3570 get_constraint_for_1 (t, results, false, false);
3574 /* Efficiently generates constraints from all entries in *RHSC to all
3575 entries in *LHSC. */
3577 static void
3578 process_all_all_constraints (vec<ce_s> lhsc,
3579 vec<ce_s> rhsc)
3581 struct constraint_expr *lhsp, *rhsp;
3582 unsigned i, j;
3584 if (lhsc.length () <= 1 || rhsc.length () <= 1)
3586 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3587 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
3588 process_constraint (new_constraint (*lhsp, *rhsp));
3590 else
3592 struct constraint_expr tmp;
3593 tmp = new_scalar_tmp_constraint_exp ("allalltmp", true);
3594 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
3595 process_constraint (new_constraint (tmp, *rhsp));
3596 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3597 process_constraint (new_constraint (*lhsp, tmp));
3601 /* Handle aggregate copies by expanding into copies of the respective
3602 fields of the structures. */
3604 static void
3605 do_structure_copy (tree lhsop, tree rhsop)
3607 struct constraint_expr *lhsp, *rhsp;
3608 auto_vec<ce_s> lhsc;
3609 auto_vec<ce_s> rhsc;
3610 unsigned j;
3612 get_constraint_for (lhsop, &lhsc);
3613 get_constraint_for_rhs (rhsop, &rhsc);
3614 lhsp = &lhsc[0];
3615 rhsp = &rhsc[0];
3616 if (lhsp->type == DEREF
3617 || (lhsp->type == ADDRESSOF && lhsp->var == anything_id)
3618 || rhsp->type == DEREF)
3620 if (lhsp->type == DEREF)
3622 gcc_assert (lhsc.length () == 1);
3623 lhsp->offset = UNKNOWN_OFFSET;
3625 if (rhsp->type == DEREF)
3627 gcc_assert (rhsc.length () == 1);
3628 rhsp->offset = UNKNOWN_OFFSET;
3630 process_all_all_constraints (lhsc, rhsc);
3632 else if (lhsp->type == SCALAR
3633 && (rhsp->type == SCALAR
3634 || rhsp->type == ADDRESSOF))
3636 HOST_WIDE_INT lhssize, lhsmaxsize, lhsoffset;
3637 HOST_WIDE_INT rhssize, rhsmaxsize, rhsoffset;
3638 bool reverse;
3639 unsigned k = 0;
3640 get_ref_base_and_extent (lhsop, &lhsoffset, &lhssize, &lhsmaxsize,
3641 &reverse);
3642 get_ref_base_and_extent (rhsop, &rhsoffset, &rhssize, &rhsmaxsize,
3643 &reverse);
3644 for (j = 0; lhsc.iterate (j, &lhsp);)
3646 varinfo_t lhsv, rhsv;
3647 rhsp = &rhsc[k];
3648 lhsv = get_varinfo (lhsp->var);
3649 rhsv = get_varinfo (rhsp->var);
3650 if (lhsv->may_have_pointers
3651 && (lhsv->is_full_var
3652 || rhsv->is_full_var
3653 || ranges_overlap_p (lhsv->offset + rhsoffset, lhsv->size,
3654 rhsv->offset + lhsoffset, rhsv->size)))
3655 process_constraint (new_constraint (*lhsp, *rhsp));
3656 if (!rhsv->is_full_var
3657 && (lhsv->is_full_var
3658 || (lhsv->offset + rhsoffset + lhsv->size
3659 > rhsv->offset + lhsoffset + rhsv->size)))
3661 ++k;
3662 if (k >= rhsc.length ())
3663 break;
3665 else
3666 ++j;
3669 else
3670 gcc_unreachable ();
3673 /* Create constraints ID = { rhsc }. */
3675 static void
3676 make_constraints_to (unsigned id, vec<ce_s> rhsc)
3678 struct constraint_expr *c;
3679 struct constraint_expr includes;
3680 unsigned int j;
3682 includes.var = id;
3683 includes.offset = 0;
3684 includes.type = SCALAR;
3686 FOR_EACH_VEC_ELT (rhsc, j, c)
3687 process_constraint (new_constraint (includes, *c));
3690 /* Create a constraint ID = OP. */
3692 static void
3693 make_constraint_to (unsigned id, tree op)
3695 auto_vec<ce_s> rhsc;
3696 get_constraint_for_rhs (op, &rhsc);
3697 make_constraints_to (id, rhsc);
3700 /* Create a constraint ID = &FROM. */
3702 static void
3703 make_constraint_from (varinfo_t vi, int from)
3705 struct constraint_expr lhs, rhs;
3707 lhs.var = vi->id;
3708 lhs.offset = 0;
3709 lhs.type = SCALAR;
3711 rhs.var = from;
3712 rhs.offset = 0;
3713 rhs.type = ADDRESSOF;
3714 process_constraint (new_constraint (lhs, rhs));
3717 /* Create a constraint ID = FROM. */
3719 static void
3720 make_copy_constraint (varinfo_t vi, int from)
3722 struct constraint_expr lhs, rhs;
3724 lhs.var = vi->id;
3725 lhs.offset = 0;
3726 lhs.type = SCALAR;
3728 rhs.var = from;
3729 rhs.offset = 0;
3730 rhs.type = SCALAR;
3731 process_constraint (new_constraint (lhs, rhs));
3734 /* Make constraints necessary to make OP escape. */
3736 static void
3737 make_escape_constraint (tree op)
3739 make_constraint_to (escaped_id, op);
3742 /* Add constraints to that the solution of VI is transitively closed. */
3744 static void
3745 make_transitive_closure_constraints (varinfo_t vi)
3747 struct constraint_expr lhs, rhs;
3749 /* VAR = *VAR; */
3750 lhs.type = SCALAR;
3751 lhs.var = vi->id;
3752 lhs.offset = 0;
3753 rhs.type = DEREF;
3754 rhs.var = vi->id;
3755 rhs.offset = UNKNOWN_OFFSET;
3756 process_constraint (new_constraint (lhs, rhs));
3759 /* Temporary storage for fake var decls. */
3760 struct obstack fake_var_decl_obstack;
3762 /* Build a fake VAR_DECL acting as referrer to a DECL_UID. */
3764 static tree
3765 build_fake_var_decl (tree type)
3767 tree decl = (tree) XOBNEW (&fake_var_decl_obstack, struct tree_var_decl);
3768 memset (decl, 0, sizeof (struct tree_var_decl));
3769 TREE_SET_CODE (decl, VAR_DECL);
3770 TREE_TYPE (decl) = type;
3771 DECL_UID (decl) = allocate_decl_uid ();
3772 SET_DECL_PT_UID (decl, -1);
3773 layout_decl (decl, 0);
3774 return decl;
3777 /* Create a new artificial heap variable with NAME.
3778 Return the created variable. */
3780 static varinfo_t
3781 make_heapvar (const char *name, bool add_id)
3783 varinfo_t vi;
3784 tree heapvar;
3786 heapvar = build_fake_var_decl (ptr_type_node);
3787 DECL_EXTERNAL (heapvar) = 1;
3789 vi = new_var_info (heapvar, name, add_id);
3790 vi->is_artificial_var = true;
3791 vi->is_heap_var = true;
3792 vi->is_unknown_size_var = true;
3793 vi->offset = 0;
3794 vi->fullsize = ~0;
3795 vi->size = ~0;
3796 vi->is_full_var = true;
3797 insert_vi_for_tree (heapvar, vi);
3799 return vi;
3802 /* Create a new artificial heap variable with NAME and make a
3803 constraint from it to LHS. Set flags according to a tag used
3804 for tracking restrict pointers. */
3806 static varinfo_t
3807 make_constraint_from_restrict (varinfo_t lhs, const char *name, bool add_id)
3809 varinfo_t vi = make_heapvar (name, add_id);
3810 vi->is_restrict_var = 1;
3811 vi->is_global_var = 1;
3812 vi->may_have_pointers = 1;
3813 make_constraint_from (lhs, vi->id);
3814 return vi;
3817 /* Create a new artificial heap variable with NAME and make a
3818 constraint from it to LHS. Set flags according to a tag used
3819 for tracking restrict pointers and make the artificial heap
3820 point to global memory. */
3822 static varinfo_t
3823 make_constraint_from_global_restrict (varinfo_t lhs, const char *name,
3824 bool add_id)
3826 varinfo_t vi = make_constraint_from_restrict (lhs, name, add_id);
3827 make_copy_constraint (vi, nonlocal_id);
3828 return vi;
3831 /* In IPA mode there are varinfos for different aspects of reach
3832 function designator. One for the points-to set of the return
3833 value, one for the variables that are clobbered by the function,
3834 one for its uses and one for each parameter (including a single
3835 glob for remaining variadic arguments). */
3837 enum { fi_clobbers = 1, fi_uses = 2,
3838 fi_static_chain = 3, fi_result = 4, fi_parm_base = 5 };
3840 /* Get a constraint for the requested part of a function designator FI
3841 when operating in IPA mode. */
3843 static struct constraint_expr
3844 get_function_part_constraint (varinfo_t fi, unsigned part)
3846 struct constraint_expr c;
3848 gcc_assert (in_ipa_mode);
3850 if (fi->id == anything_id)
3852 /* ??? We probably should have a ANYFN special variable. */
3853 c.var = anything_id;
3854 c.offset = 0;
3855 c.type = SCALAR;
3857 else if (TREE_CODE (fi->decl) == FUNCTION_DECL)
3859 varinfo_t ai = first_vi_for_offset (fi, part);
3860 if (ai)
3861 c.var = ai->id;
3862 else
3863 c.var = anything_id;
3864 c.offset = 0;
3865 c.type = SCALAR;
3867 else
3869 c.var = fi->id;
3870 c.offset = part;
3871 c.type = DEREF;
3874 return c;
3877 /* For non-IPA mode, generate constraints necessary for a call on the
3878 RHS. */
3880 static void
3881 handle_rhs_call (gcall *stmt, vec<ce_s> *results)
3883 struct constraint_expr rhsc;
3884 unsigned i;
3885 bool returns_uses = false;
3887 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3889 tree arg = gimple_call_arg (stmt, i);
3890 int flags = gimple_call_arg_flags (stmt, i);
3892 /* If the argument is not used we can ignore it. */
3893 if (flags & EAF_UNUSED)
3894 continue;
3896 /* As we compute ESCAPED context-insensitive we do not gain
3897 any precision with just EAF_NOCLOBBER but not EAF_NOESCAPE
3898 set. The argument would still get clobbered through the
3899 escape solution. */
3900 if ((flags & EAF_NOCLOBBER)
3901 && (flags & EAF_NOESCAPE))
3903 varinfo_t uses = get_call_use_vi (stmt);
3904 if (!(flags & EAF_DIRECT))
3906 varinfo_t tem = new_var_info (NULL_TREE, "callarg", true);
3907 make_constraint_to (tem->id, arg);
3908 make_transitive_closure_constraints (tem);
3909 make_copy_constraint (uses, tem->id);
3911 else
3912 make_constraint_to (uses->id, arg);
3913 returns_uses = true;
3915 else if (flags & EAF_NOESCAPE)
3917 struct constraint_expr lhs, rhs;
3918 varinfo_t uses = get_call_use_vi (stmt);
3919 varinfo_t clobbers = get_call_clobber_vi (stmt);
3920 varinfo_t tem = new_var_info (NULL_TREE, "callarg", true);
3921 make_constraint_to (tem->id, arg);
3922 if (!(flags & EAF_DIRECT))
3923 make_transitive_closure_constraints (tem);
3924 make_copy_constraint (uses, tem->id);
3925 make_copy_constraint (clobbers, tem->id);
3926 /* Add *tem = nonlocal, do not add *tem = callused as
3927 EAF_NOESCAPE parameters do not escape to other parameters
3928 and all other uses appear in NONLOCAL as well. */
3929 lhs.type = DEREF;
3930 lhs.var = tem->id;
3931 lhs.offset = 0;
3932 rhs.type = SCALAR;
3933 rhs.var = nonlocal_id;
3934 rhs.offset = 0;
3935 process_constraint (new_constraint (lhs, rhs));
3936 returns_uses = true;
3938 else
3939 make_escape_constraint (arg);
3942 /* If we added to the calls uses solution make sure we account for
3943 pointers to it to be returned. */
3944 if (returns_uses)
3946 rhsc.var = get_call_use_vi (stmt)->id;
3947 rhsc.offset = 0;
3948 rhsc.type = SCALAR;
3949 results->safe_push (rhsc);
3952 /* The static chain escapes as well. */
3953 if (gimple_call_chain (stmt))
3954 make_escape_constraint (gimple_call_chain (stmt));
3956 /* And if we applied NRV the address of the return slot escapes as well. */
3957 if (gimple_call_return_slot_opt_p (stmt)
3958 && gimple_call_lhs (stmt) != NULL_TREE
3959 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
3961 auto_vec<ce_s> tmpc;
3962 struct constraint_expr lhsc, *c;
3963 get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
3964 lhsc.var = escaped_id;
3965 lhsc.offset = 0;
3966 lhsc.type = SCALAR;
3967 FOR_EACH_VEC_ELT (tmpc, i, c)
3968 process_constraint (new_constraint (lhsc, *c));
3971 /* Regular functions return nonlocal memory. */
3972 rhsc.var = nonlocal_id;
3973 rhsc.offset = 0;
3974 rhsc.type = SCALAR;
3975 results->safe_push (rhsc);
3978 /* For non-IPA mode, generate constraints necessary for a call
3979 that returns a pointer and assigns it to LHS. This simply makes
3980 the LHS point to global and escaped variables. */
3982 static void
3983 handle_lhs_call (gcall *stmt, tree lhs, int flags, vec<ce_s> rhsc,
3984 tree fndecl)
3986 auto_vec<ce_s> lhsc;
3988 get_constraint_for (lhs, &lhsc);
3989 /* If the store is to a global decl make sure to
3990 add proper escape constraints. */
3991 lhs = get_base_address (lhs);
3992 if (lhs
3993 && DECL_P (lhs)
3994 && is_global_var (lhs))
3996 struct constraint_expr tmpc;
3997 tmpc.var = escaped_id;
3998 tmpc.offset = 0;
3999 tmpc.type = SCALAR;
4000 lhsc.safe_push (tmpc);
4003 /* If the call returns an argument unmodified override the rhs
4004 constraints. */
4005 if (flags & ERF_RETURNS_ARG
4006 && (flags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (stmt))
4008 tree arg;
4009 rhsc.create (0);
4010 arg = gimple_call_arg (stmt, flags & ERF_RETURN_ARG_MASK);
4011 get_constraint_for (arg, &rhsc);
4012 process_all_all_constraints (lhsc, rhsc);
4013 rhsc.release ();
4015 else if (flags & ERF_NOALIAS)
4017 varinfo_t vi;
4018 struct constraint_expr tmpc;
4019 rhsc.create (0);
4020 vi = make_heapvar ("HEAP", true);
4021 /* We are marking allocated storage local, we deal with it becoming
4022 global by escaping and setting of vars_contains_escaped_heap. */
4023 DECL_EXTERNAL (vi->decl) = 0;
4024 vi->is_global_var = 0;
4025 /* If this is not a real malloc call assume the memory was
4026 initialized and thus may point to global memory. All
4027 builtin functions with the malloc attribute behave in a sane way. */
4028 if (!fndecl
4029 || DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL)
4030 make_constraint_from (vi, nonlocal_id);
4031 tmpc.var = vi->id;
4032 tmpc.offset = 0;
4033 tmpc.type = ADDRESSOF;
4034 rhsc.safe_push (tmpc);
4035 process_all_all_constraints (lhsc, rhsc);
4036 rhsc.release ();
4038 else
4039 process_all_all_constraints (lhsc, rhsc);
4042 /* For non-IPA mode, generate constraints necessary for a call of a
4043 const function that returns a pointer in the statement STMT. */
4045 static void
4046 handle_const_call (gcall *stmt, vec<ce_s> *results)
4048 struct constraint_expr rhsc;
4049 unsigned int k;
4051 /* Treat nested const functions the same as pure functions as far
4052 as the static chain is concerned. */
4053 if (gimple_call_chain (stmt))
4055 varinfo_t uses = get_call_use_vi (stmt);
4056 make_transitive_closure_constraints (uses);
4057 make_constraint_to (uses->id, gimple_call_chain (stmt));
4058 rhsc.var = uses->id;
4059 rhsc.offset = 0;
4060 rhsc.type = SCALAR;
4061 results->safe_push (rhsc);
4064 /* May return arguments. */
4065 for (k = 0; k < gimple_call_num_args (stmt); ++k)
4067 tree arg = gimple_call_arg (stmt, k);
4068 auto_vec<ce_s> argc;
4069 unsigned i;
4070 struct constraint_expr *argp;
4071 get_constraint_for_rhs (arg, &argc);
4072 FOR_EACH_VEC_ELT (argc, i, argp)
4073 results->safe_push (*argp);
4076 /* May return addresses of globals. */
4077 rhsc.var = nonlocal_id;
4078 rhsc.offset = 0;
4079 rhsc.type = ADDRESSOF;
4080 results->safe_push (rhsc);
4083 /* For non-IPA mode, generate constraints necessary for a call to a
4084 pure function in statement STMT. */
4086 static void
4087 handle_pure_call (gcall *stmt, vec<ce_s> *results)
4089 struct constraint_expr rhsc;
4090 unsigned i;
4091 varinfo_t uses = NULL;
4093 /* Memory reached from pointer arguments is call-used. */
4094 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4096 tree arg = gimple_call_arg (stmt, i);
4097 if (!uses)
4099 uses = get_call_use_vi (stmt);
4100 make_transitive_closure_constraints (uses);
4102 make_constraint_to (uses->id, arg);
4105 /* The static chain is used as well. */
4106 if (gimple_call_chain (stmt))
4108 if (!uses)
4110 uses = get_call_use_vi (stmt);
4111 make_transitive_closure_constraints (uses);
4113 make_constraint_to (uses->id, gimple_call_chain (stmt));
4116 /* Pure functions may return call-used and nonlocal memory. */
4117 if (uses)
4119 rhsc.var = uses->id;
4120 rhsc.offset = 0;
4121 rhsc.type = SCALAR;
4122 results->safe_push (rhsc);
4124 rhsc.var = nonlocal_id;
4125 rhsc.offset = 0;
4126 rhsc.type = SCALAR;
4127 results->safe_push (rhsc);
4131 /* Return the varinfo for the callee of CALL. */
4133 static varinfo_t
4134 get_fi_for_callee (gcall *call)
4136 tree decl, fn = gimple_call_fn (call);
4138 if (fn && TREE_CODE (fn) == OBJ_TYPE_REF)
4139 fn = OBJ_TYPE_REF_EXPR (fn);
4141 /* If we can directly resolve the function being called, do so.
4142 Otherwise, it must be some sort of indirect expression that
4143 we should still be able to handle. */
4144 decl = gimple_call_addr_fndecl (fn);
4145 if (decl)
4146 return get_vi_for_tree (decl);
4148 /* If the function is anything other than a SSA name pointer we have no
4149 clue and should be getting ANYFN (well, ANYTHING for now). */
4150 if (!fn || TREE_CODE (fn) != SSA_NAME)
4151 return get_varinfo (anything_id);
4153 if (SSA_NAME_IS_DEFAULT_DEF (fn)
4154 && (TREE_CODE (SSA_NAME_VAR (fn)) == PARM_DECL
4155 || TREE_CODE (SSA_NAME_VAR (fn)) == RESULT_DECL))
4156 fn = SSA_NAME_VAR (fn);
4158 return get_vi_for_tree (fn);
4161 /* Create constraints for assigning call argument ARG to the incoming parameter
4162 INDEX of function FI. */
4164 static void
4165 find_func_aliases_for_call_arg (varinfo_t fi, unsigned index, tree arg)
4167 struct constraint_expr lhs;
4168 lhs = get_function_part_constraint (fi, fi_parm_base + index);
4170 auto_vec<ce_s, 2> rhsc;
4171 get_constraint_for_rhs (arg, &rhsc);
4173 unsigned j;
4174 struct constraint_expr *rhsp;
4175 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4176 process_constraint (new_constraint (lhs, *rhsp));
4179 /* Return true if FNDECL may be part of another lto partition. */
4181 static bool
4182 fndecl_maybe_in_other_partition (tree fndecl)
4184 cgraph_node *fn_node = cgraph_node::get (fndecl);
4185 if (fn_node == NULL)
4186 return true;
4188 return fn_node->in_other_partition;
4191 /* Create constraints for the builtin call T. Return true if the call
4192 was handled, otherwise false. */
4194 static bool
4195 find_func_aliases_for_builtin_call (struct function *fn, gcall *t)
4197 tree fndecl = gimple_call_fndecl (t);
4198 auto_vec<ce_s, 2> lhsc;
4199 auto_vec<ce_s, 4> rhsc;
4200 varinfo_t fi;
4202 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
4203 /* ??? All builtins that are handled here need to be handled
4204 in the alias-oracle query functions explicitly! */
4205 switch (DECL_FUNCTION_CODE (fndecl))
4207 /* All the following functions return a pointer to the same object
4208 as their first argument points to. The functions do not add
4209 to the ESCAPED solution. The functions make the first argument
4210 pointed to memory point to what the second argument pointed to
4211 memory points to. */
4212 case BUILT_IN_STRCPY:
4213 case BUILT_IN_STRNCPY:
4214 case BUILT_IN_BCOPY:
4215 case BUILT_IN_MEMCPY:
4216 case BUILT_IN_MEMMOVE:
4217 case BUILT_IN_MEMPCPY:
4218 case BUILT_IN_STPCPY:
4219 case BUILT_IN_STPNCPY:
4220 case BUILT_IN_STRCAT:
4221 case BUILT_IN_STRNCAT:
4222 case BUILT_IN_STRCPY_CHK:
4223 case BUILT_IN_STRNCPY_CHK:
4224 case BUILT_IN_MEMCPY_CHK:
4225 case BUILT_IN_MEMMOVE_CHK:
4226 case BUILT_IN_MEMPCPY_CHK:
4227 case BUILT_IN_STPCPY_CHK:
4228 case BUILT_IN_STPNCPY_CHK:
4229 case BUILT_IN_STRCAT_CHK:
4230 case BUILT_IN_STRNCAT_CHK:
4231 case BUILT_IN_TM_MEMCPY:
4232 case BUILT_IN_TM_MEMMOVE:
4234 tree res = gimple_call_lhs (t);
4235 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4236 == BUILT_IN_BCOPY ? 1 : 0));
4237 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4238 == BUILT_IN_BCOPY ? 0 : 1));
4239 if (res != NULL_TREE)
4241 get_constraint_for (res, &lhsc);
4242 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY
4243 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY
4244 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY
4245 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY_CHK
4246 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY_CHK
4247 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY_CHK)
4248 get_constraint_for_ptr_offset (dest, NULL_TREE, &rhsc);
4249 else
4250 get_constraint_for (dest, &rhsc);
4251 process_all_all_constraints (lhsc, rhsc);
4252 lhsc.truncate (0);
4253 rhsc.truncate (0);
4255 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4256 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4257 do_deref (&lhsc);
4258 do_deref (&rhsc);
4259 process_all_all_constraints (lhsc, rhsc);
4260 return true;
4262 case BUILT_IN_MEMSET:
4263 case BUILT_IN_MEMSET_CHK:
4264 case BUILT_IN_TM_MEMSET:
4266 tree res = gimple_call_lhs (t);
4267 tree dest = gimple_call_arg (t, 0);
4268 unsigned i;
4269 ce_s *lhsp;
4270 struct constraint_expr ac;
4271 if (res != NULL_TREE)
4273 get_constraint_for (res, &lhsc);
4274 get_constraint_for (dest, &rhsc);
4275 process_all_all_constraints (lhsc, rhsc);
4276 lhsc.truncate (0);
4278 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4279 do_deref (&lhsc);
4280 if (flag_delete_null_pointer_checks
4281 && integer_zerop (gimple_call_arg (t, 1)))
4283 ac.type = ADDRESSOF;
4284 ac.var = nothing_id;
4286 else
4288 ac.type = SCALAR;
4289 ac.var = integer_id;
4291 ac.offset = 0;
4292 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4293 process_constraint (new_constraint (*lhsp, ac));
4294 return true;
4296 case BUILT_IN_POSIX_MEMALIGN:
4298 tree ptrptr = gimple_call_arg (t, 0);
4299 get_constraint_for (ptrptr, &lhsc);
4300 do_deref (&lhsc);
4301 varinfo_t vi = make_heapvar ("HEAP", true);
4302 /* We are marking allocated storage local, we deal with it becoming
4303 global by escaping and setting of vars_contains_escaped_heap. */
4304 DECL_EXTERNAL (vi->decl) = 0;
4305 vi->is_global_var = 0;
4306 struct constraint_expr tmpc;
4307 tmpc.var = vi->id;
4308 tmpc.offset = 0;
4309 tmpc.type = ADDRESSOF;
4310 rhsc.safe_push (tmpc);
4311 process_all_all_constraints (lhsc, rhsc);
4312 return true;
4314 case BUILT_IN_ASSUME_ALIGNED:
4316 tree res = gimple_call_lhs (t);
4317 tree dest = gimple_call_arg (t, 0);
4318 if (res != NULL_TREE)
4320 get_constraint_for (res, &lhsc);
4321 get_constraint_for (dest, &rhsc);
4322 process_all_all_constraints (lhsc, rhsc);
4324 return true;
4326 /* All the following functions do not return pointers, do not
4327 modify the points-to sets of memory reachable from their
4328 arguments and do not add to the ESCAPED solution. */
4329 case BUILT_IN_SINCOS:
4330 case BUILT_IN_SINCOSF:
4331 case BUILT_IN_SINCOSL:
4332 case BUILT_IN_FREXP:
4333 case BUILT_IN_FREXPF:
4334 case BUILT_IN_FREXPL:
4335 case BUILT_IN_GAMMA_R:
4336 case BUILT_IN_GAMMAF_R:
4337 case BUILT_IN_GAMMAL_R:
4338 case BUILT_IN_LGAMMA_R:
4339 case BUILT_IN_LGAMMAF_R:
4340 case BUILT_IN_LGAMMAL_R:
4341 case BUILT_IN_MODF:
4342 case BUILT_IN_MODFF:
4343 case BUILT_IN_MODFL:
4344 case BUILT_IN_REMQUO:
4345 case BUILT_IN_REMQUOF:
4346 case BUILT_IN_REMQUOL:
4347 case BUILT_IN_FREE:
4348 return true;
4349 case BUILT_IN_STRDUP:
4350 case BUILT_IN_STRNDUP:
4351 case BUILT_IN_REALLOC:
4352 if (gimple_call_lhs (t))
4354 handle_lhs_call (t, gimple_call_lhs (t),
4355 gimple_call_return_flags (t) | ERF_NOALIAS,
4356 vNULL, fndecl);
4357 get_constraint_for_ptr_offset (gimple_call_lhs (t),
4358 NULL_TREE, &lhsc);
4359 get_constraint_for_ptr_offset (gimple_call_arg (t, 0),
4360 NULL_TREE, &rhsc);
4361 do_deref (&lhsc);
4362 do_deref (&rhsc);
4363 process_all_all_constraints (lhsc, rhsc);
4364 lhsc.truncate (0);
4365 rhsc.truncate (0);
4366 /* For realloc the resulting pointer can be equal to the
4367 argument as well. But only doing this wouldn't be
4368 correct because with ptr == 0 realloc behaves like malloc. */
4369 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_REALLOC)
4371 get_constraint_for (gimple_call_lhs (t), &lhsc);
4372 get_constraint_for (gimple_call_arg (t, 0), &rhsc);
4373 process_all_all_constraints (lhsc, rhsc);
4375 return true;
4377 break;
4378 /* String / character search functions return a pointer into the
4379 source string or NULL. */
4380 case BUILT_IN_INDEX:
4381 case BUILT_IN_STRCHR:
4382 case BUILT_IN_STRRCHR:
4383 case BUILT_IN_MEMCHR:
4384 case BUILT_IN_STRSTR:
4385 case BUILT_IN_STRPBRK:
4386 if (gimple_call_lhs (t))
4388 tree src = gimple_call_arg (t, 0);
4389 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4390 constraint_expr nul;
4391 nul.var = nothing_id;
4392 nul.offset = 0;
4393 nul.type = ADDRESSOF;
4394 rhsc.safe_push (nul);
4395 get_constraint_for (gimple_call_lhs (t), &lhsc);
4396 process_all_all_constraints (lhsc, rhsc);
4398 return true;
4399 /* Trampolines are special - they set up passing the static
4400 frame. */
4401 case BUILT_IN_INIT_TRAMPOLINE:
4403 tree tramp = gimple_call_arg (t, 0);
4404 tree nfunc = gimple_call_arg (t, 1);
4405 tree frame = gimple_call_arg (t, 2);
4406 unsigned i;
4407 struct constraint_expr lhs, *rhsp;
4408 if (in_ipa_mode)
4410 varinfo_t nfi = NULL;
4411 gcc_assert (TREE_CODE (nfunc) == ADDR_EXPR);
4412 nfi = lookup_vi_for_tree (TREE_OPERAND (nfunc, 0));
4413 if (nfi)
4415 lhs = get_function_part_constraint (nfi, fi_static_chain);
4416 get_constraint_for (frame, &rhsc);
4417 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4418 process_constraint (new_constraint (lhs, *rhsp));
4419 rhsc.truncate (0);
4421 /* Make the frame point to the function for
4422 the trampoline adjustment call. */
4423 get_constraint_for (tramp, &lhsc);
4424 do_deref (&lhsc);
4425 get_constraint_for (nfunc, &rhsc);
4426 process_all_all_constraints (lhsc, rhsc);
4428 return true;
4431 /* Else fallthru to generic handling which will let
4432 the frame escape. */
4433 break;
4435 case BUILT_IN_ADJUST_TRAMPOLINE:
4437 tree tramp = gimple_call_arg (t, 0);
4438 tree res = gimple_call_lhs (t);
4439 if (in_ipa_mode && res)
4441 get_constraint_for (res, &lhsc);
4442 get_constraint_for (tramp, &rhsc);
4443 do_deref (&rhsc);
4444 process_all_all_constraints (lhsc, rhsc);
4446 return true;
4448 CASE_BUILT_IN_TM_STORE (1):
4449 CASE_BUILT_IN_TM_STORE (2):
4450 CASE_BUILT_IN_TM_STORE (4):
4451 CASE_BUILT_IN_TM_STORE (8):
4452 CASE_BUILT_IN_TM_STORE (FLOAT):
4453 CASE_BUILT_IN_TM_STORE (DOUBLE):
4454 CASE_BUILT_IN_TM_STORE (LDOUBLE):
4455 CASE_BUILT_IN_TM_STORE (M64):
4456 CASE_BUILT_IN_TM_STORE (M128):
4457 CASE_BUILT_IN_TM_STORE (M256):
4459 tree addr = gimple_call_arg (t, 0);
4460 tree src = gimple_call_arg (t, 1);
4462 get_constraint_for (addr, &lhsc);
4463 do_deref (&lhsc);
4464 get_constraint_for (src, &rhsc);
4465 process_all_all_constraints (lhsc, rhsc);
4466 return true;
4468 CASE_BUILT_IN_TM_LOAD (1):
4469 CASE_BUILT_IN_TM_LOAD (2):
4470 CASE_BUILT_IN_TM_LOAD (4):
4471 CASE_BUILT_IN_TM_LOAD (8):
4472 CASE_BUILT_IN_TM_LOAD (FLOAT):
4473 CASE_BUILT_IN_TM_LOAD (DOUBLE):
4474 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
4475 CASE_BUILT_IN_TM_LOAD (M64):
4476 CASE_BUILT_IN_TM_LOAD (M128):
4477 CASE_BUILT_IN_TM_LOAD (M256):
4479 tree dest = gimple_call_lhs (t);
4480 tree addr = gimple_call_arg (t, 0);
4482 get_constraint_for (dest, &lhsc);
4483 get_constraint_for (addr, &rhsc);
4484 do_deref (&rhsc);
4485 process_all_all_constraints (lhsc, rhsc);
4486 return true;
4488 /* Variadic argument handling needs to be handled in IPA
4489 mode as well. */
4490 case BUILT_IN_VA_START:
4492 tree valist = gimple_call_arg (t, 0);
4493 struct constraint_expr rhs, *lhsp;
4494 unsigned i;
4495 get_constraint_for_ptr_offset (valist, NULL_TREE, &lhsc);
4496 do_deref (&lhsc);
4497 /* The va_list gets access to pointers in variadic
4498 arguments. Which we know in the case of IPA analysis
4499 and otherwise are just all nonlocal variables. */
4500 if (in_ipa_mode)
4502 fi = lookup_vi_for_tree (fn->decl);
4503 rhs = get_function_part_constraint (fi, ~0);
4504 rhs.type = ADDRESSOF;
4506 else
4508 rhs.var = nonlocal_id;
4509 rhs.type = ADDRESSOF;
4510 rhs.offset = 0;
4512 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4513 process_constraint (new_constraint (*lhsp, rhs));
4514 /* va_list is clobbered. */
4515 make_constraint_to (get_call_clobber_vi (t)->id, valist);
4516 return true;
4518 /* va_end doesn't have any effect that matters. */
4519 case BUILT_IN_VA_END:
4520 return true;
4521 /* Alternate return. Simply give up for now. */
4522 case BUILT_IN_RETURN:
4524 fi = NULL;
4525 if (!in_ipa_mode
4526 || !(fi = get_vi_for_tree (fn->decl)))
4527 make_constraint_from (get_varinfo (escaped_id), anything_id);
4528 else if (in_ipa_mode
4529 && fi != NULL)
4531 struct constraint_expr lhs, rhs;
4532 lhs = get_function_part_constraint (fi, fi_result);
4533 rhs.var = anything_id;
4534 rhs.offset = 0;
4535 rhs.type = SCALAR;
4536 process_constraint (new_constraint (lhs, rhs));
4538 return true;
4540 case BUILT_IN_GOMP_PARALLEL:
4541 case BUILT_IN_GOACC_PARALLEL:
4543 if (in_ipa_mode)
4545 unsigned int fnpos, argpos;
4546 switch (DECL_FUNCTION_CODE (fndecl))
4548 case BUILT_IN_GOMP_PARALLEL:
4549 /* __builtin_GOMP_parallel (fn, data, num_threads, flags). */
4550 fnpos = 0;
4551 argpos = 1;
4552 break;
4553 case BUILT_IN_GOACC_PARALLEL:
4554 /* __builtin_GOACC_parallel (device, fn, mapnum, hostaddrs,
4555 sizes, kinds, ...). */
4556 fnpos = 1;
4557 argpos = 3;
4558 break;
4559 default:
4560 gcc_unreachable ();
4563 tree fnarg = gimple_call_arg (t, fnpos);
4564 gcc_assert (TREE_CODE (fnarg) == ADDR_EXPR);
4565 tree fndecl = TREE_OPERAND (fnarg, 0);
4566 if (fndecl_maybe_in_other_partition (fndecl))
4567 /* Fallthru to general call handling. */
4568 break;
4570 tree arg = gimple_call_arg (t, argpos);
4572 varinfo_t fi = get_vi_for_tree (fndecl);
4573 find_func_aliases_for_call_arg (fi, 0, arg);
4574 return true;
4576 /* Else fallthru to generic call handling. */
4577 break;
4579 /* printf-style functions may have hooks to set pointers to
4580 point to somewhere into the generated string. Leave them
4581 for a later exercise... */
4582 default:
4583 /* Fallthru to general call handling. */;
4586 return false;
4589 /* Create constraints for the call T. */
4591 static void
4592 find_func_aliases_for_call (struct function *fn, gcall *t)
4594 tree fndecl = gimple_call_fndecl (t);
4595 varinfo_t fi;
4597 if (fndecl != NULL_TREE
4598 && DECL_BUILT_IN (fndecl)
4599 && find_func_aliases_for_builtin_call (fn, t))
4600 return;
4602 fi = get_fi_for_callee (t);
4603 if (!in_ipa_mode
4604 || (fndecl && !fi->is_fn_info))
4606 auto_vec<ce_s, 16> rhsc;
4607 int flags = gimple_call_flags (t);
4609 /* Const functions can return their arguments and addresses
4610 of global memory but not of escaped memory. */
4611 if (flags & (ECF_CONST|ECF_NOVOPS))
4613 if (gimple_call_lhs (t))
4614 handle_const_call (t, &rhsc);
4616 /* Pure functions can return addresses in and of memory
4617 reachable from their arguments, but they are not an escape
4618 point for reachable memory of their arguments. */
4619 else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE))
4620 handle_pure_call (t, &rhsc);
4621 else
4622 handle_rhs_call (t, &rhsc);
4623 if (gimple_call_lhs (t))
4624 handle_lhs_call (t, gimple_call_lhs (t),
4625 gimple_call_return_flags (t), rhsc, fndecl);
4627 else
4629 auto_vec<ce_s, 2> rhsc;
4630 tree lhsop;
4631 unsigned j;
4633 /* Assign all the passed arguments to the appropriate incoming
4634 parameters of the function. */
4635 for (j = 0; j < gimple_call_num_args (t); j++)
4637 tree arg = gimple_call_arg (t, j);
4638 find_func_aliases_for_call_arg (fi, j, arg);
4641 /* If we are returning a value, assign it to the result. */
4642 lhsop = gimple_call_lhs (t);
4643 if (lhsop)
4645 auto_vec<ce_s, 2> lhsc;
4646 struct constraint_expr rhs;
4647 struct constraint_expr *lhsp;
4648 bool aggr_p = aggregate_value_p (lhsop, gimple_call_fntype (t));
4650 get_constraint_for (lhsop, &lhsc);
4651 rhs = get_function_part_constraint (fi, fi_result);
4652 if (aggr_p)
4654 auto_vec<ce_s, 2> tem;
4655 tem.quick_push (rhs);
4656 do_deref (&tem);
4657 gcc_checking_assert (tem.length () == 1);
4658 rhs = tem[0];
4660 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
4661 process_constraint (new_constraint (*lhsp, rhs));
4663 /* If we pass the result decl by reference, honor that. */
4664 if (aggr_p)
4666 struct constraint_expr lhs;
4667 struct constraint_expr *rhsp;
4669 get_constraint_for_address_of (lhsop, &rhsc);
4670 lhs = get_function_part_constraint (fi, fi_result);
4671 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4672 process_constraint (new_constraint (lhs, *rhsp));
4673 rhsc.truncate (0);
4677 /* If we use a static chain, pass it along. */
4678 if (gimple_call_chain (t))
4680 struct constraint_expr lhs;
4681 struct constraint_expr *rhsp;
4683 get_constraint_for (gimple_call_chain (t), &rhsc);
4684 lhs = get_function_part_constraint (fi, fi_static_chain);
4685 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4686 process_constraint (new_constraint (lhs, *rhsp));
4691 /* Walk statement T setting up aliasing constraints according to the
4692 references found in T. This function is the main part of the
4693 constraint builder. AI points to auxiliary alias information used
4694 when building alias sets and computing alias grouping heuristics. */
4696 static void
4697 find_func_aliases (struct function *fn, gimple *origt)
4699 gimple *t = origt;
4700 auto_vec<ce_s, 16> lhsc;
4701 auto_vec<ce_s, 16> rhsc;
4702 struct constraint_expr *c;
4703 varinfo_t fi;
4705 /* Now build constraints expressions. */
4706 if (gimple_code (t) == GIMPLE_PHI)
4708 size_t i;
4709 unsigned int j;
4711 /* For a phi node, assign all the arguments to
4712 the result. */
4713 get_constraint_for (gimple_phi_result (t), &lhsc);
4714 for (i = 0; i < gimple_phi_num_args (t); i++)
4716 tree strippedrhs = PHI_ARG_DEF (t, i);
4718 STRIP_NOPS (strippedrhs);
4719 get_constraint_for_rhs (gimple_phi_arg_def (t, i), &rhsc);
4721 FOR_EACH_VEC_ELT (lhsc, j, c)
4723 struct constraint_expr *c2;
4724 while (rhsc.length () > 0)
4726 c2 = &rhsc.last ();
4727 process_constraint (new_constraint (*c, *c2));
4728 rhsc.pop ();
4733 /* In IPA mode, we need to generate constraints to pass call
4734 arguments through their calls. There are two cases,
4735 either a GIMPLE_CALL returning a value, or just a plain
4736 GIMPLE_CALL when we are not.
4738 In non-ipa mode, we need to generate constraints for each
4739 pointer passed by address. */
4740 else if (is_gimple_call (t))
4741 find_func_aliases_for_call (fn, as_a <gcall *> (t));
4743 /* Otherwise, just a regular assignment statement. Only care about
4744 operations with pointer result, others are dealt with as escape
4745 points if they have pointer operands. */
4746 else if (is_gimple_assign (t))
4748 /* Otherwise, just a regular assignment statement. */
4749 tree lhsop = gimple_assign_lhs (t);
4750 tree rhsop = (gimple_num_ops (t) == 2) ? gimple_assign_rhs1 (t) : NULL;
4752 if (rhsop && TREE_CLOBBER_P (rhsop))
4753 /* Ignore clobbers, they don't actually store anything into
4754 the LHS. */
4756 else if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
4757 do_structure_copy (lhsop, rhsop);
4758 else
4760 enum tree_code code = gimple_assign_rhs_code (t);
4762 get_constraint_for (lhsop, &lhsc);
4764 if (code == POINTER_PLUS_EXPR)
4765 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4766 gimple_assign_rhs2 (t), &rhsc);
4767 else if (code == BIT_AND_EXPR
4768 && TREE_CODE (gimple_assign_rhs2 (t)) == INTEGER_CST)
4770 /* Aligning a pointer via a BIT_AND_EXPR is offsetting
4771 the pointer. Handle it by offsetting it by UNKNOWN. */
4772 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4773 NULL_TREE, &rhsc);
4775 else if ((CONVERT_EXPR_CODE_P (code)
4776 && !(POINTER_TYPE_P (gimple_expr_type (t))
4777 && !POINTER_TYPE_P (TREE_TYPE (rhsop))))
4778 || gimple_assign_single_p (t))
4779 get_constraint_for_rhs (rhsop, &rhsc);
4780 else if (code == COND_EXPR)
4782 /* The result is a merge of both COND_EXPR arms. */
4783 auto_vec<ce_s, 2> tmp;
4784 struct constraint_expr *rhsp;
4785 unsigned i;
4786 get_constraint_for_rhs (gimple_assign_rhs2 (t), &rhsc);
4787 get_constraint_for_rhs (gimple_assign_rhs3 (t), &tmp);
4788 FOR_EACH_VEC_ELT (tmp, i, rhsp)
4789 rhsc.safe_push (*rhsp);
4791 else if (truth_value_p (code))
4792 /* Truth value results are not pointer (parts). Or at least
4793 very unreasonable obfuscation of a part. */
4795 else
4797 /* All other operations are merges. */
4798 auto_vec<ce_s, 4> tmp;
4799 struct constraint_expr *rhsp;
4800 unsigned i, j;
4801 get_constraint_for_rhs (gimple_assign_rhs1 (t), &rhsc);
4802 for (i = 2; i < gimple_num_ops (t); ++i)
4804 get_constraint_for_rhs (gimple_op (t, i), &tmp);
4805 FOR_EACH_VEC_ELT (tmp, j, rhsp)
4806 rhsc.safe_push (*rhsp);
4807 tmp.truncate (0);
4810 process_all_all_constraints (lhsc, rhsc);
4812 /* If there is a store to a global variable the rhs escapes. */
4813 if ((lhsop = get_base_address (lhsop)) != NULL_TREE
4814 && DECL_P (lhsop))
4816 varinfo_t vi = get_vi_for_tree (lhsop);
4817 if ((! in_ipa_mode && vi->is_global_var)
4818 || vi->is_ipa_escape_point)
4819 make_escape_constraint (rhsop);
4822 /* Handle escapes through return. */
4823 else if (gimple_code (t) == GIMPLE_RETURN
4824 && gimple_return_retval (as_a <greturn *> (t)) != NULL_TREE)
4826 greturn *return_stmt = as_a <greturn *> (t);
4827 fi = NULL;
4828 if (!in_ipa_mode
4829 || !(fi = get_vi_for_tree (fn->decl)))
4830 make_escape_constraint (gimple_return_retval (return_stmt));
4831 else if (in_ipa_mode)
4833 struct constraint_expr lhs ;
4834 struct constraint_expr *rhsp;
4835 unsigned i;
4837 lhs = get_function_part_constraint (fi, fi_result);
4838 get_constraint_for_rhs (gimple_return_retval (return_stmt), &rhsc);
4839 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4840 process_constraint (new_constraint (lhs, *rhsp));
4843 /* Handle asms conservatively by adding escape constraints to everything. */
4844 else if (gasm *asm_stmt = dyn_cast <gasm *> (t))
4846 unsigned i, noutputs;
4847 const char **oconstraints;
4848 const char *constraint;
4849 bool allows_mem, allows_reg, is_inout;
4851 noutputs = gimple_asm_noutputs (asm_stmt);
4852 oconstraints = XALLOCAVEC (const char *, noutputs);
4854 for (i = 0; i < noutputs; ++i)
4856 tree link = gimple_asm_output_op (asm_stmt, i);
4857 tree op = TREE_VALUE (link);
4859 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4860 oconstraints[i] = constraint;
4861 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
4862 &allows_reg, &is_inout);
4864 /* A memory constraint makes the address of the operand escape. */
4865 if (!allows_reg && allows_mem)
4866 make_escape_constraint (build_fold_addr_expr (op));
4868 /* The asm may read global memory, so outputs may point to
4869 any global memory. */
4870 if (op)
4872 auto_vec<ce_s, 2> lhsc;
4873 struct constraint_expr rhsc, *lhsp;
4874 unsigned j;
4875 get_constraint_for (op, &lhsc);
4876 rhsc.var = nonlocal_id;
4877 rhsc.offset = 0;
4878 rhsc.type = SCALAR;
4879 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
4880 process_constraint (new_constraint (*lhsp, rhsc));
4883 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4885 tree link = gimple_asm_input_op (asm_stmt, i);
4886 tree op = TREE_VALUE (link);
4888 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4890 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
4891 &allows_mem, &allows_reg);
4893 /* A memory constraint makes the address of the operand escape. */
4894 if (!allows_reg && allows_mem)
4895 make_escape_constraint (build_fold_addr_expr (op));
4896 /* Strictly we'd only need the constraint to ESCAPED if
4897 the asm clobbers memory, otherwise using something
4898 along the lines of per-call clobbers/uses would be enough. */
4899 else if (op)
4900 make_escape_constraint (op);
4906 /* Create a constraint adding to the clobber set of FI the memory
4907 pointed to by PTR. */
4909 static void
4910 process_ipa_clobber (varinfo_t fi, tree ptr)
4912 vec<ce_s> ptrc = vNULL;
4913 struct constraint_expr *c, lhs;
4914 unsigned i;
4915 get_constraint_for_rhs (ptr, &ptrc);
4916 lhs = get_function_part_constraint (fi, fi_clobbers);
4917 FOR_EACH_VEC_ELT (ptrc, i, c)
4918 process_constraint (new_constraint (lhs, *c));
4919 ptrc.release ();
4922 /* Walk statement T setting up clobber and use constraints according to the
4923 references found in T. This function is a main part of the
4924 IPA constraint builder. */
4926 static void
4927 find_func_clobbers (struct function *fn, gimple *origt)
4929 gimple *t = origt;
4930 auto_vec<ce_s, 16> lhsc;
4931 auto_vec<ce_s, 16> rhsc;
4932 varinfo_t fi;
4934 /* Add constraints for clobbered/used in IPA mode.
4935 We are not interested in what automatic variables are clobbered
4936 or used as we only use the information in the caller to which
4937 they do not escape. */
4938 gcc_assert (in_ipa_mode);
4940 /* If the stmt refers to memory in any way it better had a VUSE. */
4941 if (gimple_vuse (t) == NULL_TREE)
4942 return;
4944 /* We'd better have function information for the current function. */
4945 fi = lookup_vi_for_tree (fn->decl);
4946 gcc_assert (fi != NULL);
4948 /* Account for stores in assignments and calls. */
4949 if (gimple_vdef (t) != NULL_TREE
4950 && gimple_has_lhs (t))
4952 tree lhs = gimple_get_lhs (t);
4953 tree tem = lhs;
4954 while (handled_component_p (tem))
4955 tem = TREE_OPERAND (tem, 0);
4956 if ((DECL_P (tem)
4957 && !auto_var_in_fn_p (tem, fn->decl))
4958 || INDIRECT_REF_P (tem)
4959 || (TREE_CODE (tem) == MEM_REF
4960 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
4961 && auto_var_in_fn_p
4962 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), fn->decl))))
4964 struct constraint_expr lhsc, *rhsp;
4965 unsigned i;
4966 lhsc = get_function_part_constraint (fi, fi_clobbers);
4967 get_constraint_for_address_of (lhs, &rhsc);
4968 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4969 process_constraint (new_constraint (lhsc, *rhsp));
4970 rhsc.truncate (0);
4974 /* Account for uses in assigments and returns. */
4975 if (gimple_assign_single_p (t)
4976 || (gimple_code (t) == GIMPLE_RETURN
4977 && gimple_return_retval (as_a <greturn *> (t)) != NULL_TREE))
4979 tree rhs = (gimple_assign_single_p (t)
4980 ? gimple_assign_rhs1 (t)
4981 : gimple_return_retval (as_a <greturn *> (t)));
4982 tree tem = rhs;
4983 while (handled_component_p (tem))
4984 tem = TREE_OPERAND (tem, 0);
4985 if ((DECL_P (tem)
4986 && !auto_var_in_fn_p (tem, fn->decl))
4987 || INDIRECT_REF_P (tem)
4988 || (TREE_CODE (tem) == MEM_REF
4989 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
4990 && auto_var_in_fn_p
4991 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), fn->decl))))
4993 struct constraint_expr lhs, *rhsp;
4994 unsigned i;
4995 lhs = get_function_part_constraint (fi, fi_uses);
4996 get_constraint_for_address_of (rhs, &rhsc);
4997 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4998 process_constraint (new_constraint (lhs, *rhsp));
4999 rhsc.truncate (0);
5003 if (gcall *call_stmt = dyn_cast <gcall *> (t))
5005 varinfo_t cfi = NULL;
5006 tree decl = gimple_call_fndecl (t);
5007 struct constraint_expr lhs, rhs;
5008 unsigned i, j;
5010 /* For builtins we do not have separate function info. For those
5011 we do not generate escapes for we have to generate clobbers/uses. */
5012 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
5013 switch (DECL_FUNCTION_CODE (decl))
5015 /* The following functions use and clobber memory pointed to
5016 by their arguments. */
5017 case BUILT_IN_STRCPY:
5018 case BUILT_IN_STRNCPY:
5019 case BUILT_IN_BCOPY:
5020 case BUILT_IN_MEMCPY:
5021 case BUILT_IN_MEMMOVE:
5022 case BUILT_IN_MEMPCPY:
5023 case BUILT_IN_STPCPY:
5024 case BUILT_IN_STPNCPY:
5025 case BUILT_IN_STRCAT:
5026 case BUILT_IN_STRNCAT:
5027 case BUILT_IN_STRCPY_CHK:
5028 case BUILT_IN_STRNCPY_CHK:
5029 case BUILT_IN_MEMCPY_CHK:
5030 case BUILT_IN_MEMMOVE_CHK:
5031 case BUILT_IN_MEMPCPY_CHK:
5032 case BUILT_IN_STPCPY_CHK:
5033 case BUILT_IN_STPNCPY_CHK:
5034 case BUILT_IN_STRCAT_CHK:
5035 case BUILT_IN_STRNCAT_CHK:
5037 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
5038 == BUILT_IN_BCOPY ? 1 : 0));
5039 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
5040 == BUILT_IN_BCOPY ? 0 : 1));
5041 unsigned i;
5042 struct constraint_expr *rhsp, *lhsp;
5043 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
5044 lhs = get_function_part_constraint (fi, fi_clobbers);
5045 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
5046 process_constraint (new_constraint (lhs, *lhsp));
5047 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
5048 lhs = get_function_part_constraint (fi, fi_uses);
5049 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5050 process_constraint (new_constraint (lhs, *rhsp));
5051 return;
5053 /* The following function clobbers memory pointed to by
5054 its argument. */
5055 case BUILT_IN_MEMSET:
5056 case BUILT_IN_MEMSET_CHK:
5057 case BUILT_IN_POSIX_MEMALIGN:
5059 tree dest = gimple_call_arg (t, 0);
5060 unsigned i;
5061 ce_s *lhsp;
5062 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
5063 lhs = get_function_part_constraint (fi, fi_clobbers);
5064 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
5065 process_constraint (new_constraint (lhs, *lhsp));
5066 return;
5068 /* The following functions clobber their second and third
5069 arguments. */
5070 case BUILT_IN_SINCOS:
5071 case BUILT_IN_SINCOSF:
5072 case BUILT_IN_SINCOSL:
5074 process_ipa_clobber (fi, gimple_call_arg (t, 1));
5075 process_ipa_clobber (fi, gimple_call_arg (t, 2));
5076 return;
5078 /* The following functions clobber their second argument. */
5079 case BUILT_IN_FREXP:
5080 case BUILT_IN_FREXPF:
5081 case BUILT_IN_FREXPL:
5082 case BUILT_IN_LGAMMA_R:
5083 case BUILT_IN_LGAMMAF_R:
5084 case BUILT_IN_LGAMMAL_R:
5085 case BUILT_IN_GAMMA_R:
5086 case BUILT_IN_GAMMAF_R:
5087 case BUILT_IN_GAMMAL_R:
5088 case BUILT_IN_MODF:
5089 case BUILT_IN_MODFF:
5090 case BUILT_IN_MODFL:
5092 process_ipa_clobber (fi, gimple_call_arg (t, 1));
5093 return;
5095 /* The following functions clobber their third argument. */
5096 case BUILT_IN_REMQUO:
5097 case BUILT_IN_REMQUOF:
5098 case BUILT_IN_REMQUOL:
5100 process_ipa_clobber (fi, gimple_call_arg (t, 2));
5101 return;
5103 /* The following functions neither read nor clobber memory. */
5104 case BUILT_IN_ASSUME_ALIGNED:
5105 case BUILT_IN_FREE:
5106 return;
5107 /* Trampolines are of no interest to us. */
5108 case BUILT_IN_INIT_TRAMPOLINE:
5109 case BUILT_IN_ADJUST_TRAMPOLINE:
5110 return;
5111 case BUILT_IN_VA_START:
5112 case BUILT_IN_VA_END:
5113 return;
5114 case BUILT_IN_GOMP_PARALLEL:
5115 case BUILT_IN_GOACC_PARALLEL:
5117 unsigned int fnpos, argpos;
5118 unsigned int implicit_use_args[2];
5119 unsigned int num_implicit_use_args = 0;
5120 switch (DECL_FUNCTION_CODE (decl))
5122 case BUILT_IN_GOMP_PARALLEL:
5123 /* __builtin_GOMP_parallel (fn, data, num_threads, flags). */
5124 fnpos = 0;
5125 argpos = 1;
5126 break;
5127 case BUILT_IN_GOACC_PARALLEL:
5128 /* __builtin_GOACC_parallel (device, fn, mapnum, hostaddrs,
5129 sizes, kinds, ...). */
5130 fnpos = 1;
5131 argpos = 3;
5132 implicit_use_args[num_implicit_use_args++] = 4;
5133 implicit_use_args[num_implicit_use_args++] = 5;
5134 break;
5135 default:
5136 gcc_unreachable ();
5139 tree fnarg = gimple_call_arg (t, fnpos);
5140 gcc_assert (TREE_CODE (fnarg) == ADDR_EXPR);
5141 tree fndecl = TREE_OPERAND (fnarg, 0);
5142 if (fndecl_maybe_in_other_partition (fndecl))
5143 /* Fallthru to general call handling. */
5144 break;
5146 varinfo_t cfi = get_vi_for_tree (fndecl);
5148 tree arg = gimple_call_arg (t, argpos);
5150 /* Parameter passed by value is used. */
5151 lhs = get_function_part_constraint (fi, fi_uses);
5152 struct constraint_expr *rhsp;
5153 get_constraint_for (arg, &rhsc);
5154 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5155 process_constraint (new_constraint (lhs, *rhsp));
5156 rhsc.truncate (0);
5158 /* Handle parameters used by the call, but not used in cfi, as
5159 implicitly used by cfi. */
5160 lhs = get_function_part_constraint (cfi, fi_uses);
5161 for (unsigned i = 0; i < num_implicit_use_args; ++i)
5163 tree arg = gimple_call_arg (t, implicit_use_args[i]);
5164 get_constraint_for (arg, &rhsc);
5165 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5166 process_constraint (new_constraint (lhs, *rhsp));
5167 rhsc.truncate (0);
5170 /* The caller clobbers what the callee does. */
5171 lhs = get_function_part_constraint (fi, fi_clobbers);
5172 rhs = get_function_part_constraint (cfi, fi_clobbers);
5173 process_constraint (new_constraint (lhs, rhs));
5175 /* The caller uses what the callee does. */
5176 lhs = get_function_part_constraint (fi, fi_uses);
5177 rhs = get_function_part_constraint (cfi, fi_uses);
5178 process_constraint (new_constraint (lhs, rhs));
5180 return;
5182 /* printf-style functions may have hooks to set pointers to
5183 point to somewhere into the generated string. Leave them
5184 for a later exercise... */
5185 default:
5186 /* Fallthru to general call handling. */;
5189 /* Parameters passed by value are used. */
5190 lhs = get_function_part_constraint (fi, fi_uses);
5191 for (i = 0; i < gimple_call_num_args (t); i++)
5193 struct constraint_expr *rhsp;
5194 tree arg = gimple_call_arg (t, i);
5196 if (TREE_CODE (arg) == SSA_NAME
5197 || is_gimple_min_invariant (arg))
5198 continue;
5200 get_constraint_for_address_of (arg, &rhsc);
5201 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5202 process_constraint (new_constraint (lhs, *rhsp));
5203 rhsc.truncate (0);
5206 /* Build constraints for propagating clobbers/uses along the
5207 callgraph edges. */
5208 cfi = get_fi_for_callee (call_stmt);
5209 if (cfi->id == anything_id)
5211 if (gimple_vdef (t))
5212 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5213 anything_id);
5214 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5215 anything_id);
5216 return;
5219 /* For callees without function info (that's external functions),
5220 ESCAPED is clobbered and used. */
5221 if (gimple_call_fndecl (t)
5222 && !cfi->is_fn_info)
5224 varinfo_t vi;
5226 if (gimple_vdef (t))
5227 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5228 escaped_id);
5229 make_copy_constraint (first_vi_for_offset (fi, fi_uses), escaped_id);
5231 /* Also honor the call statement use/clobber info. */
5232 if ((vi = lookup_call_clobber_vi (call_stmt)) != NULL)
5233 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5234 vi->id);
5235 if ((vi = lookup_call_use_vi (call_stmt)) != NULL)
5236 make_copy_constraint (first_vi_for_offset (fi, fi_uses),
5237 vi->id);
5238 return;
5241 /* Otherwise the caller clobbers and uses what the callee does.
5242 ??? This should use a new complex constraint that filters
5243 local variables of the callee. */
5244 if (gimple_vdef (t))
5246 lhs = get_function_part_constraint (fi, fi_clobbers);
5247 rhs = get_function_part_constraint (cfi, fi_clobbers);
5248 process_constraint (new_constraint (lhs, rhs));
5250 lhs = get_function_part_constraint (fi, fi_uses);
5251 rhs = get_function_part_constraint (cfi, fi_uses);
5252 process_constraint (new_constraint (lhs, rhs));
5254 else if (gimple_code (t) == GIMPLE_ASM)
5256 /* ??? Ick. We can do better. */
5257 if (gimple_vdef (t))
5258 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5259 anything_id);
5260 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5261 anything_id);
5266 /* Find the first varinfo in the same variable as START that overlaps with
5267 OFFSET. Return NULL if we can't find one. */
5269 static varinfo_t
5270 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
5272 /* If the offset is outside of the variable, bail out. */
5273 if (offset >= start->fullsize)
5274 return NULL;
5276 /* If we cannot reach offset from start, lookup the first field
5277 and start from there. */
5278 if (start->offset > offset)
5279 start = get_varinfo (start->head);
5281 while (start)
5283 /* We may not find a variable in the field list with the actual
5284 offset when we have glommed a structure to a variable.
5285 In that case, however, offset should still be within the size
5286 of the variable. */
5287 if (offset >= start->offset
5288 && (offset - start->offset) < start->size)
5289 return start;
5291 start = vi_next (start);
5294 return NULL;
5297 /* Find the first varinfo in the same variable as START that overlaps with
5298 OFFSET. If there is no such varinfo the varinfo directly preceding
5299 OFFSET is returned. */
5301 static varinfo_t
5302 first_or_preceding_vi_for_offset (varinfo_t start,
5303 unsigned HOST_WIDE_INT offset)
5305 /* If we cannot reach offset from start, lookup the first field
5306 and start from there. */
5307 if (start->offset > offset)
5308 start = get_varinfo (start->head);
5310 /* We may not find a variable in the field list with the actual
5311 offset when we have glommed a structure to a variable.
5312 In that case, however, offset should still be within the size
5313 of the variable.
5314 If we got beyond the offset we look for return the field
5315 directly preceding offset which may be the last field. */
5316 while (start->next
5317 && offset >= start->offset
5318 && !((offset - start->offset) < start->size))
5319 start = vi_next (start);
5321 return start;
5325 /* This structure is used during pushing fields onto the fieldstack
5326 to track the offset of the field, since bitpos_of_field gives it
5327 relative to its immediate containing type, and we want it relative
5328 to the ultimate containing object. */
5330 struct fieldoff
5332 /* Offset from the base of the base containing object to this field. */
5333 HOST_WIDE_INT offset;
5335 /* Size, in bits, of the field. */
5336 unsigned HOST_WIDE_INT size;
5338 unsigned has_unknown_size : 1;
5340 unsigned must_have_pointers : 1;
5342 unsigned may_have_pointers : 1;
5344 unsigned only_restrict_pointers : 1;
5346 tree restrict_pointed_type;
5348 typedef struct fieldoff fieldoff_s;
5351 /* qsort comparison function for two fieldoff's PA and PB */
5353 static int
5354 fieldoff_compare (const void *pa, const void *pb)
5356 const fieldoff_s *foa = (const fieldoff_s *)pa;
5357 const fieldoff_s *fob = (const fieldoff_s *)pb;
5358 unsigned HOST_WIDE_INT foasize, fobsize;
5360 if (foa->offset < fob->offset)
5361 return -1;
5362 else if (foa->offset > fob->offset)
5363 return 1;
5365 foasize = foa->size;
5366 fobsize = fob->size;
5367 if (foasize < fobsize)
5368 return -1;
5369 else if (foasize > fobsize)
5370 return 1;
5371 return 0;
5374 /* Sort a fieldstack according to the field offset and sizes. */
5375 static void
5376 sort_fieldstack (vec<fieldoff_s> fieldstack)
5378 fieldstack.qsort (fieldoff_compare);
5381 /* Return true if T is a type that can have subvars. */
5383 static inline bool
5384 type_can_have_subvars (const_tree t)
5386 /* Aggregates without overlapping fields can have subvars. */
5387 return TREE_CODE (t) == RECORD_TYPE;
5390 /* Return true if V is a tree that we can have subvars for.
5391 Normally, this is any aggregate type. Also complex
5392 types which are not gimple registers can have subvars. */
5394 static inline bool
5395 var_can_have_subvars (const_tree v)
5397 /* Volatile variables should never have subvars. */
5398 if (TREE_THIS_VOLATILE (v))
5399 return false;
5401 /* Non decls or memory tags can never have subvars. */
5402 if (!DECL_P (v))
5403 return false;
5405 return type_can_have_subvars (TREE_TYPE (v));
5408 /* Return true if T is a type that does contain pointers. */
5410 static bool
5411 type_must_have_pointers (tree type)
5413 if (POINTER_TYPE_P (type))
5414 return true;
5416 if (TREE_CODE (type) == ARRAY_TYPE)
5417 return type_must_have_pointers (TREE_TYPE (type));
5419 /* A function or method can have pointers as arguments, so track
5420 those separately. */
5421 if (TREE_CODE (type) == FUNCTION_TYPE
5422 || TREE_CODE (type) == METHOD_TYPE)
5423 return true;
5425 return false;
5428 static bool
5429 field_must_have_pointers (tree t)
5431 return type_must_have_pointers (TREE_TYPE (t));
5434 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
5435 the fields of TYPE onto fieldstack, recording their offsets along
5436 the way.
5438 OFFSET is used to keep track of the offset in this entire
5439 structure, rather than just the immediately containing structure.
5440 Returns false if the caller is supposed to handle the field we
5441 recursed for. */
5443 static bool
5444 push_fields_onto_fieldstack (tree type, vec<fieldoff_s> *fieldstack,
5445 HOST_WIDE_INT offset)
5447 tree field;
5448 bool empty_p = true;
5450 if (TREE_CODE (type) != RECORD_TYPE)
5451 return false;
5453 /* If the vector of fields is growing too big, bail out early.
5454 Callers check for vec::length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
5455 sure this fails. */
5456 if (fieldstack->length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
5457 return false;
5459 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5460 if (TREE_CODE (field) == FIELD_DECL)
5462 bool push = false;
5463 HOST_WIDE_INT foff = bitpos_of_field (field);
5464 tree field_type = TREE_TYPE (field);
5466 if (!var_can_have_subvars (field)
5467 || TREE_CODE (field_type) == QUAL_UNION_TYPE
5468 || TREE_CODE (field_type) == UNION_TYPE)
5469 push = true;
5470 else if (!push_fields_onto_fieldstack
5471 (field_type, fieldstack, offset + foff)
5472 && (DECL_SIZE (field)
5473 && !integer_zerop (DECL_SIZE (field))))
5474 /* Empty structures may have actual size, like in C++. So
5475 see if we didn't push any subfields and the size is
5476 nonzero, push the field onto the stack. */
5477 push = true;
5479 if (push)
5481 fieldoff_s *pair = NULL;
5482 bool has_unknown_size = false;
5483 bool must_have_pointers_p;
5485 if (!fieldstack->is_empty ())
5486 pair = &fieldstack->last ();
5488 /* If there isn't anything at offset zero, create sth. */
5489 if (!pair
5490 && offset + foff != 0)
5492 fieldoff_s e
5493 = {0, offset + foff, false, false, false, false, NULL_TREE};
5494 pair = fieldstack->safe_push (e);
5497 if (!DECL_SIZE (field)
5498 || !tree_fits_uhwi_p (DECL_SIZE (field)))
5499 has_unknown_size = true;
5501 /* If adjacent fields do not contain pointers merge them. */
5502 must_have_pointers_p = field_must_have_pointers (field);
5503 if (pair
5504 && !has_unknown_size
5505 && !must_have_pointers_p
5506 && !pair->must_have_pointers
5507 && !pair->has_unknown_size
5508 && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff)
5510 pair->size += tree_to_uhwi (DECL_SIZE (field));
5512 else
5514 fieldoff_s e;
5515 e.offset = offset + foff;
5516 e.has_unknown_size = has_unknown_size;
5517 if (!has_unknown_size)
5518 e.size = tree_to_uhwi (DECL_SIZE (field));
5519 else
5520 e.size = -1;
5521 e.must_have_pointers = must_have_pointers_p;
5522 e.may_have_pointers = true;
5523 e.only_restrict_pointers
5524 = (!has_unknown_size
5525 && POINTER_TYPE_P (field_type)
5526 && TYPE_RESTRICT (field_type));
5527 if (e.only_restrict_pointers)
5528 e.restrict_pointed_type = TREE_TYPE (field_type);
5529 fieldstack->safe_push (e);
5533 empty_p = false;
5536 return !empty_p;
5539 /* Count the number of arguments DECL has, and set IS_VARARGS to true
5540 if it is a varargs function. */
5542 static unsigned int
5543 count_num_arguments (tree decl, bool *is_varargs)
5545 unsigned int num = 0;
5546 tree t;
5548 /* Capture named arguments for K&R functions. They do not
5549 have a prototype and thus no TYPE_ARG_TYPES. */
5550 for (t = DECL_ARGUMENTS (decl); t; t = DECL_CHAIN (t))
5551 ++num;
5553 /* Check if the function has variadic arguments. */
5554 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl)); t; t = TREE_CHAIN (t))
5555 if (TREE_VALUE (t) == void_type_node)
5556 break;
5557 if (!t)
5558 *is_varargs = true;
5560 return num;
5563 /* Creation function node for DECL, using NAME, and return the index
5564 of the variable we've created for the function. If NONLOCAL_p, create
5565 initial constraints. */
5567 static varinfo_t
5568 create_function_info_for (tree decl, const char *name, bool add_id,
5569 bool nonlocal_p)
5571 struct function *fn = DECL_STRUCT_FUNCTION (decl);
5572 varinfo_t vi, prev_vi;
5573 tree arg;
5574 unsigned int i;
5575 bool is_varargs = false;
5576 unsigned int num_args = count_num_arguments (decl, &is_varargs);
5578 /* Create the variable info. */
5580 vi = new_var_info (decl, name, add_id);
5581 vi->offset = 0;
5582 vi->size = 1;
5583 vi->fullsize = fi_parm_base + num_args;
5584 vi->is_fn_info = 1;
5585 vi->may_have_pointers = false;
5586 if (is_varargs)
5587 vi->fullsize = ~0;
5588 insert_vi_for_tree (vi->decl, vi);
5590 prev_vi = vi;
5592 /* Create a variable for things the function clobbers and one for
5593 things the function uses. */
5595 varinfo_t clobbervi, usevi;
5596 const char *newname;
5597 char *tempname;
5599 tempname = xasprintf ("%s.clobber", name);
5600 newname = ggc_strdup (tempname);
5601 free (tempname);
5603 clobbervi = new_var_info (NULL, newname, false);
5604 clobbervi->offset = fi_clobbers;
5605 clobbervi->size = 1;
5606 clobbervi->fullsize = vi->fullsize;
5607 clobbervi->is_full_var = true;
5608 clobbervi->is_global_var = false;
5610 gcc_assert (prev_vi->offset < clobbervi->offset);
5611 prev_vi->next = clobbervi->id;
5612 prev_vi = clobbervi;
5614 tempname = xasprintf ("%s.use", name);
5615 newname = ggc_strdup (tempname);
5616 free (tempname);
5618 usevi = new_var_info (NULL, newname, false);
5619 usevi->offset = fi_uses;
5620 usevi->size = 1;
5621 usevi->fullsize = vi->fullsize;
5622 usevi->is_full_var = true;
5623 usevi->is_global_var = false;
5625 gcc_assert (prev_vi->offset < usevi->offset);
5626 prev_vi->next = usevi->id;
5627 prev_vi = usevi;
5630 /* And one for the static chain. */
5631 if (fn->static_chain_decl != NULL_TREE)
5633 varinfo_t chainvi;
5634 const char *newname;
5635 char *tempname;
5637 tempname = xasprintf ("%s.chain", name);
5638 newname = ggc_strdup (tempname);
5639 free (tempname);
5641 chainvi = new_var_info (fn->static_chain_decl, newname, false);
5642 chainvi->offset = fi_static_chain;
5643 chainvi->size = 1;
5644 chainvi->fullsize = vi->fullsize;
5645 chainvi->is_full_var = true;
5646 chainvi->is_global_var = false;
5648 insert_vi_for_tree (fn->static_chain_decl, chainvi);
5650 if (nonlocal_p
5651 && chainvi->may_have_pointers)
5652 make_constraint_from (chainvi, nonlocal_id);
5654 gcc_assert (prev_vi->offset < chainvi->offset);
5655 prev_vi->next = chainvi->id;
5656 prev_vi = chainvi;
5659 /* Create a variable for the return var. */
5660 if (DECL_RESULT (decl) != NULL
5661 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
5663 varinfo_t resultvi;
5664 const char *newname;
5665 char *tempname;
5666 tree resultdecl = decl;
5668 if (DECL_RESULT (decl))
5669 resultdecl = DECL_RESULT (decl);
5671 tempname = xasprintf ("%s.result", name);
5672 newname = ggc_strdup (tempname);
5673 free (tempname);
5675 resultvi = new_var_info (resultdecl, newname, false);
5676 resultvi->offset = fi_result;
5677 resultvi->size = 1;
5678 resultvi->fullsize = vi->fullsize;
5679 resultvi->is_full_var = true;
5680 if (DECL_RESULT (decl))
5681 resultvi->may_have_pointers = true;
5683 if (DECL_RESULT (decl))
5684 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
5686 if (nonlocal_p
5687 && DECL_RESULT (decl)
5688 && DECL_BY_REFERENCE (DECL_RESULT (decl)))
5689 make_constraint_from (resultvi, nonlocal_id);
5691 gcc_assert (prev_vi->offset < resultvi->offset);
5692 prev_vi->next = resultvi->id;
5693 prev_vi = resultvi;
5696 /* We also need to make function return values escape. Nothing
5697 escapes by returning from main though. */
5698 if (nonlocal_p
5699 && !MAIN_NAME_P (DECL_NAME (decl)))
5701 varinfo_t fi, rvi;
5702 fi = lookup_vi_for_tree (decl);
5703 rvi = first_vi_for_offset (fi, fi_result);
5704 if (rvi && rvi->offset == fi_result)
5705 make_copy_constraint (get_varinfo (escaped_id), rvi->id);
5708 /* Set up variables for each argument. */
5709 arg = DECL_ARGUMENTS (decl);
5710 for (i = 0; i < num_args; i++)
5712 varinfo_t argvi;
5713 const char *newname;
5714 char *tempname;
5715 tree argdecl = decl;
5717 if (arg)
5718 argdecl = arg;
5720 tempname = xasprintf ("%s.arg%d", name, i);
5721 newname = ggc_strdup (tempname);
5722 free (tempname);
5724 argvi = new_var_info (argdecl, newname, false);
5725 argvi->offset = fi_parm_base + i;
5726 argvi->size = 1;
5727 argvi->is_full_var = true;
5728 argvi->fullsize = vi->fullsize;
5729 if (arg)
5730 argvi->may_have_pointers = true;
5732 if (arg)
5733 insert_vi_for_tree (arg, argvi);
5735 if (nonlocal_p
5736 && argvi->may_have_pointers)
5737 make_constraint_from (argvi, nonlocal_id);
5739 gcc_assert (prev_vi->offset < argvi->offset);
5740 prev_vi->next = argvi->id;
5741 prev_vi = argvi;
5742 if (arg)
5743 arg = DECL_CHAIN (arg);
5746 /* Add one representative for all further args. */
5747 if (is_varargs)
5749 varinfo_t argvi;
5750 const char *newname;
5751 char *tempname;
5752 tree decl;
5754 tempname = xasprintf ("%s.varargs", name);
5755 newname = ggc_strdup (tempname);
5756 free (tempname);
5758 /* We need sth that can be pointed to for va_start. */
5759 decl = build_fake_var_decl (ptr_type_node);
5761 argvi = new_var_info (decl, newname, false);
5762 argvi->offset = fi_parm_base + num_args;
5763 argvi->size = ~0;
5764 argvi->is_full_var = true;
5765 argvi->is_heap_var = true;
5766 argvi->fullsize = vi->fullsize;
5768 if (nonlocal_p
5769 && argvi->may_have_pointers)
5770 make_constraint_from (argvi, nonlocal_id);
5772 gcc_assert (prev_vi->offset < argvi->offset);
5773 prev_vi->next = argvi->id;
5774 prev_vi = argvi;
5777 return vi;
5781 /* Return true if FIELDSTACK contains fields that overlap.
5782 FIELDSTACK is assumed to be sorted by offset. */
5784 static bool
5785 check_for_overlaps (vec<fieldoff_s> fieldstack)
5787 fieldoff_s *fo = NULL;
5788 unsigned int i;
5789 HOST_WIDE_INT lastoffset = -1;
5791 FOR_EACH_VEC_ELT (fieldstack, i, fo)
5793 if (fo->offset == lastoffset)
5794 return true;
5795 lastoffset = fo->offset;
5797 return false;
5800 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
5801 This will also create any varinfo structures necessary for fields
5802 of DECL. DECL is a function parameter if HANDLE_PARAM is set.
5803 HANDLED_STRUCT_TYPE is used to register struct types reached by following
5804 restrict pointers. This is needed to prevent infinite recursion. */
5806 static varinfo_t
5807 create_variable_info_for_1 (tree decl, const char *name, bool add_id,
5808 bool handle_param, bitmap handled_struct_type)
5810 varinfo_t vi, newvi;
5811 tree decl_type = TREE_TYPE (decl);
5812 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type);
5813 auto_vec<fieldoff_s> fieldstack;
5814 fieldoff_s *fo;
5815 unsigned int i;
5817 if (!declsize
5818 || !tree_fits_uhwi_p (declsize))
5820 vi = new_var_info (decl, name, add_id);
5821 vi->offset = 0;
5822 vi->size = ~0;
5823 vi->fullsize = ~0;
5824 vi->is_unknown_size_var = true;
5825 vi->is_full_var = true;
5826 vi->may_have_pointers = true;
5827 return vi;
5830 /* Collect field information. */
5831 if (use_field_sensitive
5832 && var_can_have_subvars (decl)
5833 /* ??? Force us to not use subfields for globals in IPA mode.
5834 Else we'd have to parse arbitrary initializers. */
5835 && !(in_ipa_mode
5836 && is_global_var (decl)))
5838 fieldoff_s *fo = NULL;
5839 bool notokay = false;
5840 unsigned int i;
5842 push_fields_onto_fieldstack (decl_type, &fieldstack, 0);
5844 for (i = 0; !notokay && fieldstack.iterate (i, &fo); i++)
5845 if (fo->has_unknown_size
5846 || fo->offset < 0)
5848 notokay = true;
5849 break;
5852 /* We can't sort them if we have a field with a variable sized type,
5853 which will make notokay = true. In that case, we are going to return
5854 without creating varinfos for the fields anyway, so sorting them is a
5855 waste to boot. */
5856 if (!notokay)
5858 sort_fieldstack (fieldstack);
5859 /* Due to some C++ FE issues, like PR 22488, we might end up
5860 what appear to be overlapping fields even though they,
5861 in reality, do not overlap. Until the C++ FE is fixed,
5862 we will simply disable field-sensitivity for these cases. */
5863 notokay = check_for_overlaps (fieldstack);
5866 if (notokay)
5867 fieldstack.release ();
5870 /* If we didn't end up collecting sub-variables create a full
5871 variable for the decl. */
5872 if (fieldstack.length () == 0
5873 || fieldstack.length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
5875 vi = new_var_info (decl, name, add_id);
5876 vi->offset = 0;
5877 vi->may_have_pointers = true;
5878 vi->fullsize = tree_to_uhwi (declsize);
5879 vi->size = vi->fullsize;
5880 vi->is_full_var = true;
5881 if (POINTER_TYPE_P (decl_type)
5882 && TYPE_RESTRICT (decl_type))
5883 vi->only_restrict_pointers = 1;
5884 if (vi->only_restrict_pointers
5885 && !type_contains_placeholder_p (TREE_TYPE (decl_type))
5886 && handle_param
5887 && !bitmap_bit_p (handled_struct_type,
5888 TYPE_UID (TREE_TYPE (decl_type))))
5890 varinfo_t rvi;
5891 tree heapvar = build_fake_var_decl (TREE_TYPE (decl_type));
5892 DECL_EXTERNAL (heapvar) = 1;
5893 if (var_can_have_subvars (heapvar))
5894 bitmap_set_bit (handled_struct_type,
5895 TYPE_UID (TREE_TYPE (decl_type)));
5896 rvi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS", true,
5897 true, handled_struct_type);
5898 if (var_can_have_subvars (heapvar))
5899 bitmap_clear_bit (handled_struct_type,
5900 TYPE_UID (TREE_TYPE (decl_type)));
5901 rvi->is_restrict_var = 1;
5902 insert_vi_for_tree (heapvar, rvi);
5903 make_constraint_from (vi, rvi->id);
5904 make_param_constraints (rvi);
5906 fieldstack.release ();
5907 return vi;
5910 vi = new_var_info (decl, name, add_id);
5911 vi->fullsize = tree_to_uhwi (declsize);
5912 if (fieldstack.length () == 1)
5913 vi->is_full_var = true;
5914 for (i = 0, newvi = vi;
5915 fieldstack.iterate (i, &fo);
5916 ++i, newvi = vi_next (newvi))
5918 const char *newname = NULL;
5919 char *tempname;
5921 if (dump_file)
5923 if (fieldstack.length () != 1)
5925 tempname
5926 = xasprintf ("%s." HOST_WIDE_INT_PRINT_DEC
5927 "+" HOST_WIDE_INT_PRINT_DEC, name,
5928 fo->offset, fo->size);
5929 newname = ggc_strdup (tempname);
5930 free (tempname);
5933 else
5934 newname = "NULL";
5936 if (newname)
5937 newvi->name = newname;
5938 newvi->offset = fo->offset;
5939 newvi->size = fo->size;
5940 newvi->fullsize = vi->fullsize;
5941 newvi->may_have_pointers = fo->may_have_pointers;
5942 newvi->only_restrict_pointers = fo->only_restrict_pointers;
5943 if (handle_param
5944 && newvi->only_restrict_pointers
5945 && !type_contains_placeholder_p (fo->restrict_pointed_type)
5946 && !bitmap_bit_p (handled_struct_type,
5947 TYPE_UID (fo->restrict_pointed_type)))
5949 varinfo_t rvi;
5950 tree heapvar = build_fake_var_decl (fo->restrict_pointed_type);
5951 DECL_EXTERNAL (heapvar) = 1;
5952 if (var_can_have_subvars (heapvar))
5953 bitmap_set_bit (handled_struct_type,
5954 TYPE_UID (fo->restrict_pointed_type));
5955 rvi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS", true,
5956 true, handled_struct_type);
5957 if (var_can_have_subvars (heapvar))
5958 bitmap_clear_bit (handled_struct_type,
5959 TYPE_UID (fo->restrict_pointed_type));
5960 rvi->is_restrict_var = 1;
5961 insert_vi_for_tree (heapvar, rvi);
5962 make_constraint_from (newvi, rvi->id);
5963 make_param_constraints (rvi);
5965 if (i + 1 < fieldstack.length ())
5967 varinfo_t tem = new_var_info (decl, name, false);
5968 newvi->next = tem->id;
5969 tem->head = vi->id;
5973 return vi;
5976 static unsigned int
5977 create_variable_info_for (tree decl, const char *name, bool add_id)
5979 varinfo_t vi = create_variable_info_for_1 (decl, name, add_id, false, NULL);
5980 unsigned int id = vi->id;
5982 insert_vi_for_tree (decl, vi);
5984 if (TREE_CODE (decl) != VAR_DECL)
5985 return id;
5987 /* Create initial constraints for globals. */
5988 for (; vi; vi = vi_next (vi))
5990 if (!vi->may_have_pointers
5991 || !vi->is_global_var)
5992 continue;
5994 /* Mark global restrict qualified pointers. */
5995 if ((POINTER_TYPE_P (TREE_TYPE (decl))
5996 && TYPE_RESTRICT (TREE_TYPE (decl)))
5997 || vi->only_restrict_pointers)
5999 varinfo_t rvi
6000 = make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT",
6001 true);
6002 /* ??? For now exclude reads from globals as restrict sources
6003 if those are not (indirectly) from incoming parameters. */
6004 rvi->is_restrict_var = false;
6005 continue;
6008 /* In non-IPA mode the initializer from nonlocal is all we need. */
6009 if (!in_ipa_mode
6010 || DECL_HARD_REGISTER (decl))
6011 make_copy_constraint (vi, nonlocal_id);
6013 /* In IPA mode parse the initializer and generate proper constraints
6014 for it. */
6015 else
6017 varpool_node *vnode = varpool_node::get (decl);
6019 /* For escaped variables initialize them from nonlocal. */
6020 if (!vnode->all_refs_explicit_p ())
6021 make_copy_constraint (vi, nonlocal_id);
6023 /* If this is a global variable with an initializer and we are in
6024 IPA mode generate constraints for it. */
6025 ipa_ref *ref;
6026 for (unsigned idx = 0; vnode->iterate_reference (idx, ref); ++idx)
6028 auto_vec<ce_s> rhsc;
6029 struct constraint_expr lhs, *rhsp;
6030 unsigned i;
6031 get_constraint_for_address_of (ref->referred->decl, &rhsc);
6032 lhs.var = vi->id;
6033 lhs.offset = 0;
6034 lhs.type = SCALAR;
6035 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
6036 process_constraint (new_constraint (lhs, *rhsp));
6037 /* If this is a variable that escapes from the unit
6038 the initializer escapes as well. */
6039 if (!vnode->all_refs_explicit_p ())
6041 lhs.var = escaped_id;
6042 lhs.offset = 0;
6043 lhs.type = SCALAR;
6044 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
6045 process_constraint (new_constraint (lhs, *rhsp));
6051 return id;
6054 /* Print out the points-to solution for VAR to FILE. */
6056 static void
6057 dump_solution_for_var (FILE *file, unsigned int var)
6059 varinfo_t vi = get_varinfo (var);
6060 unsigned int i;
6061 bitmap_iterator bi;
6063 /* Dump the solution for unified vars anyway, this avoids difficulties
6064 in scanning dumps in the testsuite. */
6065 fprintf (file, "%s = { ", vi->name);
6066 vi = get_varinfo (find (var));
6067 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
6068 fprintf (file, "%s ", get_varinfo (i)->name);
6069 fprintf (file, "}");
6071 /* But note when the variable was unified. */
6072 if (vi->id != var)
6073 fprintf (file, " same as %s", vi->name);
6075 fprintf (file, "\n");
6078 /* Print the points-to solution for VAR to stderr. */
6080 DEBUG_FUNCTION void
6081 debug_solution_for_var (unsigned int var)
6083 dump_solution_for_var (stderr, var);
6086 /* Register the constraints for function parameter related VI. */
6088 static void
6089 make_param_constraints (varinfo_t vi)
6091 for (; vi; vi = vi_next (vi))
6093 if (vi->only_restrict_pointers)
6095 else if (vi->may_have_pointers)
6096 make_constraint_from (vi, nonlocal_id);
6098 if (vi->is_full_var)
6099 break;
6103 /* Create varinfo structures for all of the variables in the
6104 function for intraprocedural mode. */
6106 static void
6107 intra_create_variable_infos (struct function *fn)
6109 tree t;
6110 bitmap handled_struct_type = NULL;
6112 /* For each incoming pointer argument arg, create the constraint ARG
6113 = NONLOCAL or a dummy variable if it is a restrict qualified
6114 passed-by-reference argument. */
6115 for (t = DECL_ARGUMENTS (fn->decl); t; t = DECL_CHAIN (t))
6117 if (handled_struct_type == NULL)
6118 handled_struct_type = BITMAP_ALLOC (NULL);
6120 varinfo_t p
6121 = create_variable_info_for_1 (t, alias_get_name (t), false, true,
6122 handled_struct_type);
6123 insert_vi_for_tree (t, p);
6125 make_param_constraints (p);
6128 if (handled_struct_type != NULL)
6129 BITMAP_FREE (handled_struct_type);
6131 /* Add a constraint for a result decl that is passed by reference. */
6132 if (DECL_RESULT (fn->decl)
6133 && DECL_BY_REFERENCE (DECL_RESULT (fn->decl)))
6135 varinfo_t p, result_vi = get_vi_for_tree (DECL_RESULT (fn->decl));
6137 for (p = result_vi; p; p = vi_next (p))
6138 make_constraint_from (p, nonlocal_id);
6141 /* Add a constraint for the incoming static chain parameter. */
6142 if (fn->static_chain_decl != NULL_TREE)
6144 varinfo_t p, chain_vi = get_vi_for_tree (fn->static_chain_decl);
6146 for (p = chain_vi; p; p = vi_next (p))
6147 make_constraint_from (p, nonlocal_id);
6151 /* Structure used to put solution bitmaps in a hashtable so they can
6152 be shared among variables with the same points-to set. */
6154 typedef struct shared_bitmap_info
6156 bitmap pt_vars;
6157 hashval_t hashcode;
6158 } *shared_bitmap_info_t;
6159 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
6161 /* Shared_bitmap hashtable helpers. */
6163 struct shared_bitmap_hasher : free_ptr_hash <shared_bitmap_info>
6165 static inline hashval_t hash (const shared_bitmap_info *);
6166 static inline bool equal (const shared_bitmap_info *,
6167 const shared_bitmap_info *);
6170 /* Hash function for a shared_bitmap_info_t */
6172 inline hashval_t
6173 shared_bitmap_hasher::hash (const shared_bitmap_info *bi)
6175 return bi->hashcode;
6178 /* Equality function for two shared_bitmap_info_t's. */
6180 inline bool
6181 shared_bitmap_hasher::equal (const shared_bitmap_info *sbi1,
6182 const shared_bitmap_info *sbi2)
6184 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
6187 /* Shared_bitmap hashtable. */
6189 static hash_table<shared_bitmap_hasher> *shared_bitmap_table;
6191 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
6192 existing instance if there is one, NULL otherwise. */
6194 static bitmap
6195 shared_bitmap_lookup (bitmap pt_vars)
6197 shared_bitmap_info **slot;
6198 struct shared_bitmap_info sbi;
6200 sbi.pt_vars = pt_vars;
6201 sbi.hashcode = bitmap_hash (pt_vars);
6203 slot = shared_bitmap_table->find_slot (&sbi, NO_INSERT);
6204 if (!slot)
6205 return NULL;
6206 else
6207 return (*slot)->pt_vars;
6211 /* Add a bitmap to the shared bitmap hashtable. */
6213 static void
6214 shared_bitmap_add (bitmap pt_vars)
6216 shared_bitmap_info **slot;
6217 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
6219 sbi->pt_vars = pt_vars;
6220 sbi->hashcode = bitmap_hash (pt_vars);
6222 slot = shared_bitmap_table->find_slot (sbi, INSERT);
6223 gcc_assert (!*slot);
6224 *slot = sbi;
6228 /* Set bits in INTO corresponding to the variable uids in solution set FROM. */
6230 static void
6231 set_uids_in_ptset (bitmap into, bitmap from, struct pt_solution *pt,
6232 tree fndecl)
6234 unsigned int i;
6235 bitmap_iterator bi;
6236 varinfo_t escaped_vi = get_varinfo (find (escaped_id));
6237 bool everything_escaped
6238 = escaped_vi->solution && bitmap_bit_p (escaped_vi->solution, anything_id);
6240 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
6242 varinfo_t vi = get_varinfo (i);
6244 /* The only artificial variables that are allowed in a may-alias
6245 set are heap variables. */
6246 if (vi->is_artificial_var && !vi->is_heap_var)
6247 continue;
6249 if (everything_escaped
6250 || (escaped_vi->solution
6251 && bitmap_bit_p (escaped_vi->solution, i)))
6253 pt->vars_contains_escaped = true;
6254 pt->vars_contains_escaped_heap = vi->is_heap_var;
6257 if (vi->is_restrict_var)
6258 pt->vars_contains_restrict = true;
6260 if (TREE_CODE (vi->decl) == VAR_DECL
6261 || TREE_CODE (vi->decl) == PARM_DECL
6262 || TREE_CODE (vi->decl) == RESULT_DECL)
6264 /* If we are in IPA mode we will not recompute points-to
6265 sets after inlining so make sure they stay valid. */
6266 if (in_ipa_mode
6267 && !DECL_PT_UID_SET_P (vi->decl))
6268 SET_DECL_PT_UID (vi->decl, DECL_UID (vi->decl));
6270 /* Add the decl to the points-to set. Note that the points-to
6271 set contains global variables. */
6272 bitmap_set_bit (into, DECL_PT_UID (vi->decl));
6273 if (vi->is_global_var
6274 /* In IPA mode the escaped_heap trick doesn't work as
6275 ESCAPED is escaped from the unit but
6276 pt_solution_includes_global needs to answer true for
6277 all variables not automatic within a function.
6278 For the same reason is_global_var is not the
6279 correct flag to track - local variables from other
6280 functions also need to be considered global.
6281 Conveniently all HEAP vars are not put in function
6282 scope. */
6283 || (in_ipa_mode
6284 && fndecl
6285 && ! auto_var_in_fn_p (vi->decl, fndecl)))
6286 pt->vars_contains_nonlocal = true;
6289 else if (TREE_CODE (vi->decl) == FUNCTION_DECL
6290 || TREE_CODE (vi->decl) == LABEL_DECL)
6292 /* Nothing should read/write from/to code so we can
6293 save bits by not including them in the points-to bitmaps.
6294 Still mark the points-to set as containing global memory
6295 to make code-patching possible - see PR70128. */
6296 pt->vars_contains_nonlocal = true;
6302 /* Compute the points-to solution *PT for the variable VI. */
6304 static struct pt_solution
6305 find_what_var_points_to (tree fndecl, varinfo_t orig_vi)
6307 unsigned int i;
6308 bitmap_iterator bi;
6309 bitmap finished_solution;
6310 bitmap result;
6311 varinfo_t vi;
6312 struct pt_solution *pt;
6314 /* This variable may have been collapsed, let's get the real
6315 variable. */
6316 vi = get_varinfo (find (orig_vi->id));
6318 /* See if we have already computed the solution and return it. */
6319 pt_solution **slot = &final_solutions->get_or_insert (vi);
6320 if (*slot != NULL)
6321 return **slot;
6323 *slot = pt = XOBNEW (&final_solutions_obstack, struct pt_solution);
6324 memset (pt, 0, sizeof (struct pt_solution));
6326 /* Translate artificial variables into SSA_NAME_PTR_INFO
6327 attributes. */
6328 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
6330 varinfo_t vi = get_varinfo (i);
6332 if (vi->is_artificial_var)
6334 if (vi->id == nothing_id)
6335 pt->null = 1;
6336 else if (vi->id == escaped_id)
6338 if (in_ipa_mode)
6339 pt->ipa_escaped = 1;
6340 else
6341 pt->escaped = 1;
6342 /* Expand some special vars of ESCAPED in-place here. */
6343 varinfo_t evi = get_varinfo (find (escaped_id));
6344 if (bitmap_bit_p (evi->solution, nonlocal_id))
6345 pt->nonlocal = 1;
6347 else if (vi->id == nonlocal_id)
6348 pt->nonlocal = 1;
6349 else if (vi->is_heap_var)
6350 /* We represent heapvars in the points-to set properly. */
6352 else if (vi->id == string_id)
6353 /* Nobody cares - STRING_CSTs are read-only entities. */
6355 else if (vi->id == anything_id
6356 || vi->id == integer_id)
6357 pt->anything = 1;
6361 /* Instead of doing extra work, simply do not create
6362 elaborate points-to information for pt_anything pointers. */
6363 if (pt->anything)
6364 return *pt;
6366 /* Share the final set of variables when possible. */
6367 finished_solution = BITMAP_GGC_ALLOC ();
6368 stats.points_to_sets_created++;
6370 set_uids_in_ptset (finished_solution, vi->solution, pt, fndecl);
6371 result = shared_bitmap_lookup (finished_solution);
6372 if (!result)
6374 shared_bitmap_add (finished_solution);
6375 pt->vars = finished_solution;
6377 else
6379 pt->vars = result;
6380 bitmap_clear (finished_solution);
6383 return *pt;
6386 /* Given a pointer variable P, fill in its points-to set. */
6388 static void
6389 find_what_p_points_to (tree fndecl, tree p)
6391 struct ptr_info_def *pi;
6392 tree lookup_p = p;
6393 varinfo_t vi;
6395 /* For parameters, get at the points-to set for the actual parm
6396 decl. */
6397 if (TREE_CODE (p) == SSA_NAME
6398 && SSA_NAME_IS_DEFAULT_DEF (p)
6399 && (TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
6400 || TREE_CODE (SSA_NAME_VAR (p)) == RESULT_DECL))
6401 lookup_p = SSA_NAME_VAR (p);
6403 vi = lookup_vi_for_tree (lookup_p);
6404 if (!vi)
6405 return;
6407 pi = get_ptr_info (p);
6408 pi->pt = find_what_var_points_to (fndecl, vi);
6412 /* Query statistics for points-to solutions. */
6414 static struct {
6415 unsigned HOST_WIDE_INT pt_solution_includes_may_alias;
6416 unsigned HOST_WIDE_INT pt_solution_includes_no_alias;
6417 unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias;
6418 unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias;
6419 } pta_stats;
6421 void
6422 dump_pta_stats (FILE *s)
6424 fprintf (s, "\nPTA query stats:\n");
6425 fprintf (s, " pt_solution_includes: "
6426 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6427 HOST_WIDE_INT_PRINT_DEC" queries\n",
6428 pta_stats.pt_solution_includes_no_alias,
6429 pta_stats.pt_solution_includes_no_alias
6430 + pta_stats.pt_solution_includes_may_alias);
6431 fprintf (s, " pt_solutions_intersect: "
6432 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6433 HOST_WIDE_INT_PRINT_DEC" queries\n",
6434 pta_stats.pt_solutions_intersect_no_alias,
6435 pta_stats.pt_solutions_intersect_no_alias
6436 + pta_stats.pt_solutions_intersect_may_alias);
6440 /* Reset the points-to solution *PT to a conservative default
6441 (point to anything). */
6443 void
6444 pt_solution_reset (struct pt_solution *pt)
6446 memset (pt, 0, sizeof (struct pt_solution));
6447 pt->anything = true;
6450 /* Set the points-to solution *PT to point only to the variables
6451 in VARS. VARS_CONTAINS_GLOBAL specifies whether that contains
6452 global variables and VARS_CONTAINS_RESTRICT specifies whether
6453 it contains restrict tag variables. */
6455 void
6456 pt_solution_set (struct pt_solution *pt, bitmap vars,
6457 bool vars_contains_nonlocal)
6459 memset (pt, 0, sizeof (struct pt_solution));
6460 pt->vars = vars;
6461 pt->vars_contains_nonlocal = vars_contains_nonlocal;
6462 pt->vars_contains_escaped
6463 = (cfun->gimple_df->escaped.anything
6464 || bitmap_intersect_p (cfun->gimple_df->escaped.vars, vars));
6467 /* Set the points-to solution *PT to point only to the variable VAR. */
6469 void
6470 pt_solution_set_var (struct pt_solution *pt, tree var)
6472 memset (pt, 0, sizeof (struct pt_solution));
6473 pt->vars = BITMAP_GGC_ALLOC ();
6474 bitmap_set_bit (pt->vars, DECL_PT_UID (var));
6475 pt->vars_contains_nonlocal = is_global_var (var);
6476 pt->vars_contains_escaped
6477 = (cfun->gimple_df->escaped.anything
6478 || bitmap_bit_p (cfun->gimple_df->escaped.vars, DECL_PT_UID (var)));
6481 /* Computes the union of the points-to solutions *DEST and *SRC and
6482 stores the result in *DEST. This changes the points-to bitmap
6483 of *DEST and thus may not be used if that might be shared.
6484 The points-to bitmap of *SRC and *DEST will not be shared after
6485 this function if they were not before. */
6487 static void
6488 pt_solution_ior_into (struct pt_solution *dest, struct pt_solution *src)
6490 dest->anything |= src->anything;
6491 if (dest->anything)
6493 pt_solution_reset (dest);
6494 return;
6497 dest->nonlocal |= src->nonlocal;
6498 dest->escaped |= src->escaped;
6499 dest->ipa_escaped |= src->ipa_escaped;
6500 dest->null |= src->null;
6501 dest->vars_contains_nonlocal |= src->vars_contains_nonlocal;
6502 dest->vars_contains_escaped |= src->vars_contains_escaped;
6503 dest->vars_contains_escaped_heap |= src->vars_contains_escaped_heap;
6504 if (!src->vars)
6505 return;
6507 if (!dest->vars)
6508 dest->vars = BITMAP_GGC_ALLOC ();
6509 bitmap_ior_into (dest->vars, src->vars);
6512 /* Return true if the points-to solution *PT is empty. */
6514 bool
6515 pt_solution_empty_p (struct pt_solution *pt)
6517 if (pt->anything
6518 || pt->nonlocal)
6519 return false;
6521 if (pt->vars
6522 && !bitmap_empty_p (pt->vars))
6523 return false;
6525 /* If the solution includes ESCAPED, check if that is empty. */
6526 if (pt->escaped
6527 && !pt_solution_empty_p (&cfun->gimple_df->escaped))
6528 return false;
6530 /* If the solution includes ESCAPED, check if that is empty. */
6531 if (pt->ipa_escaped
6532 && !pt_solution_empty_p (&ipa_escaped_pt))
6533 return false;
6535 return true;
6538 /* Return true if the points-to solution *PT only point to a single var, and
6539 return the var uid in *UID. */
6541 bool
6542 pt_solution_singleton_p (struct pt_solution *pt, unsigned *uid)
6544 if (pt->anything || pt->nonlocal || pt->escaped || pt->ipa_escaped
6545 || pt->null || pt->vars == NULL
6546 || !bitmap_single_bit_set_p (pt->vars))
6547 return false;
6549 *uid = bitmap_first_set_bit (pt->vars);
6550 return true;
6553 /* Return true if the points-to solution *PT includes global memory. */
6555 bool
6556 pt_solution_includes_global (struct pt_solution *pt)
6558 if (pt->anything
6559 || pt->nonlocal
6560 || pt->vars_contains_nonlocal
6561 /* The following is a hack to make the malloc escape hack work.
6562 In reality we'd need different sets for escaped-through-return
6563 and escaped-to-callees and passes would need to be updated. */
6564 || pt->vars_contains_escaped_heap)
6565 return true;
6567 /* 'escaped' is also a placeholder so we have to look into it. */
6568 if (pt->escaped)
6569 return pt_solution_includes_global (&cfun->gimple_df->escaped);
6571 if (pt->ipa_escaped)
6572 return pt_solution_includes_global (&ipa_escaped_pt);
6574 return false;
6577 /* Return true if the points-to solution *PT includes the variable
6578 declaration DECL. */
6580 static bool
6581 pt_solution_includes_1 (struct pt_solution *pt, const_tree decl)
6583 if (pt->anything)
6584 return true;
6586 if (pt->nonlocal
6587 && is_global_var (decl))
6588 return true;
6590 if (pt->vars
6591 && bitmap_bit_p (pt->vars, DECL_PT_UID (decl)))
6592 return true;
6594 /* If the solution includes ESCAPED, check it. */
6595 if (pt->escaped
6596 && pt_solution_includes_1 (&cfun->gimple_df->escaped, decl))
6597 return true;
6599 /* If the solution includes ESCAPED, check it. */
6600 if (pt->ipa_escaped
6601 && pt_solution_includes_1 (&ipa_escaped_pt, decl))
6602 return true;
6604 return false;
6607 bool
6608 pt_solution_includes (struct pt_solution *pt, const_tree decl)
6610 bool res = pt_solution_includes_1 (pt, decl);
6611 if (res)
6612 ++pta_stats.pt_solution_includes_may_alias;
6613 else
6614 ++pta_stats.pt_solution_includes_no_alias;
6615 return res;
6618 /* Return true if both points-to solutions PT1 and PT2 have a non-empty
6619 intersection. */
6621 static bool
6622 pt_solutions_intersect_1 (struct pt_solution *pt1, struct pt_solution *pt2)
6624 if (pt1->anything || pt2->anything)
6625 return true;
6627 /* If either points to unknown global memory and the other points to
6628 any global memory they alias. */
6629 if ((pt1->nonlocal
6630 && (pt2->nonlocal
6631 || pt2->vars_contains_nonlocal))
6632 || (pt2->nonlocal
6633 && pt1->vars_contains_nonlocal))
6634 return true;
6636 /* If either points to all escaped memory and the other points to
6637 any escaped memory they alias. */
6638 if ((pt1->escaped
6639 && (pt2->escaped
6640 || pt2->vars_contains_escaped))
6641 || (pt2->escaped
6642 && pt1->vars_contains_escaped))
6643 return true;
6645 /* Check the escaped solution if required.
6646 ??? Do we need to check the local against the IPA escaped sets? */
6647 if ((pt1->ipa_escaped || pt2->ipa_escaped)
6648 && !pt_solution_empty_p (&ipa_escaped_pt))
6650 /* If both point to escaped memory and that solution
6651 is not empty they alias. */
6652 if (pt1->ipa_escaped && pt2->ipa_escaped)
6653 return true;
6655 /* If either points to escaped memory see if the escaped solution
6656 intersects with the other. */
6657 if ((pt1->ipa_escaped
6658 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt2))
6659 || (pt2->ipa_escaped
6660 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt1)))
6661 return true;
6664 /* Now both pointers alias if their points-to solution intersects. */
6665 return (pt1->vars
6666 && pt2->vars
6667 && bitmap_intersect_p (pt1->vars, pt2->vars));
6670 bool
6671 pt_solutions_intersect (struct pt_solution *pt1, struct pt_solution *pt2)
6673 bool res = pt_solutions_intersect_1 (pt1, pt2);
6674 if (res)
6675 ++pta_stats.pt_solutions_intersect_may_alias;
6676 else
6677 ++pta_stats.pt_solutions_intersect_no_alias;
6678 return res;
6682 /* Dump points-to information to OUTFILE. */
6684 static void
6685 dump_sa_points_to_info (FILE *outfile)
6687 unsigned int i;
6689 fprintf (outfile, "\nPoints-to sets\n\n");
6691 if (dump_flags & TDF_STATS)
6693 fprintf (outfile, "Stats:\n");
6694 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
6695 fprintf (outfile, "Non-pointer vars: %d\n",
6696 stats.nonpointer_vars);
6697 fprintf (outfile, "Statically unified vars: %d\n",
6698 stats.unified_vars_static);
6699 fprintf (outfile, "Dynamically unified vars: %d\n",
6700 stats.unified_vars_dynamic);
6701 fprintf (outfile, "Iterations: %d\n", stats.iterations);
6702 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
6703 fprintf (outfile, "Number of implicit edges: %d\n",
6704 stats.num_implicit_edges);
6707 for (i = 1; i < varmap.length (); i++)
6709 varinfo_t vi = get_varinfo (i);
6710 if (!vi->may_have_pointers)
6711 continue;
6712 dump_solution_for_var (outfile, i);
6717 /* Debug points-to information to stderr. */
6719 DEBUG_FUNCTION void
6720 debug_sa_points_to_info (void)
6722 dump_sa_points_to_info (stderr);
6726 /* Initialize the always-existing constraint variables for NULL
6727 ANYTHING, READONLY, and INTEGER */
6729 static void
6730 init_base_vars (void)
6732 struct constraint_expr lhs, rhs;
6733 varinfo_t var_anything;
6734 varinfo_t var_nothing;
6735 varinfo_t var_string;
6736 varinfo_t var_escaped;
6737 varinfo_t var_nonlocal;
6738 varinfo_t var_storedanything;
6739 varinfo_t var_integer;
6741 /* Variable ID zero is reserved and should be NULL. */
6742 varmap.safe_push (NULL);
6744 /* Create the NULL variable, used to represent that a variable points
6745 to NULL. */
6746 var_nothing = new_var_info (NULL_TREE, "NULL", false);
6747 gcc_assert (var_nothing->id == nothing_id);
6748 var_nothing->is_artificial_var = 1;
6749 var_nothing->offset = 0;
6750 var_nothing->size = ~0;
6751 var_nothing->fullsize = ~0;
6752 var_nothing->is_special_var = 1;
6753 var_nothing->may_have_pointers = 0;
6754 var_nothing->is_global_var = 0;
6756 /* Create the ANYTHING variable, used to represent that a variable
6757 points to some unknown piece of memory. */
6758 var_anything = new_var_info (NULL_TREE, "ANYTHING", false);
6759 gcc_assert (var_anything->id == anything_id);
6760 var_anything->is_artificial_var = 1;
6761 var_anything->size = ~0;
6762 var_anything->offset = 0;
6763 var_anything->fullsize = ~0;
6764 var_anything->is_special_var = 1;
6766 /* Anything points to anything. This makes deref constraints just
6767 work in the presence of linked list and other p = *p type loops,
6768 by saying that *ANYTHING = ANYTHING. */
6769 lhs.type = SCALAR;
6770 lhs.var = anything_id;
6771 lhs.offset = 0;
6772 rhs.type = ADDRESSOF;
6773 rhs.var = anything_id;
6774 rhs.offset = 0;
6776 /* This specifically does not use process_constraint because
6777 process_constraint ignores all anything = anything constraints, since all
6778 but this one are redundant. */
6779 constraints.safe_push (new_constraint (lhs, rhs));
6781 /* Create the STRING variable, used to represent that a variable
6782 points to a string literal. String literals don't contain
6783 pointers so STRING doesn't point to anything. */
6784 var_string = new_var_info (NULL_TREE, "STRING", false);
6785 gcc_assert (var_string->id == string_id);
6786 var_string->is_artificial_var = 1;
6787 var_string->offset = 0;
6788 var_string->size = ~0;
6789 var_string->fullsize = ~0;
6790 var_string->is_special_var = 1;
6791 var_string->may_have_pointers = 0;
6793 /* Create the ESCAPED variable, used to represent the set of escaped
6794 memory. */
6795 var_escaped = new_var_info (NULL_TREE, "ESCAPED", false);
6796 gcc_assert (var_escaped->id == escaped_id);
6797 var_escaped->is_artificial_var = 1;
6798 var_escaped->offset = 0;
6799 var_escaped->size = ~0;
6800 var_escaped->fullsize = ~0;
6801 var_escaped->is_special_var = 0;
6803 /* Create the NONLOCAL variable, used to represent the set of nonlocal
6804 memory. */
6805 var_nonlocal = new_var_info (NULL_TREE, "NONLOCAL", false);
6806 gcc_assert (var_nonlocal->id == nonlocal_id);
6807 var_nonlocal->is_artificial_var = 1;
6808 var_nonlocal->offset = 0;
6809 var_nonlocal->size = ~0;
6810 var_nonlocal->fullsize = ~0;
6811 var_nonlocal->is_special_var = 1;
6813 /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
6814 lhs.type = SCALAR;
6815 lhs.var = escaped_id;
6816 lhs.offset = 0;
6817 rhs.type = DEREF;
6818 rhs.var = escaped_id;
6819 rhs.offset = 0;
6820 process_constraint (new_constraint (lhs, rhs));
6822 /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the
6823 whole variable escapes. */
6824 lhs.type = SCALAR;
6825 lhs.var = escaped_id;
6826 lhs.offset = 0;
6827 rhs.type = SCALAR;
6828 rhs.var = escaped_id;
6829 rhs.offset = UNKNOWN_OFFSET;
6830 process_constraint (new_constraint (lhs, rhs));
6832 /* *ESCAPED = NONLOCAL. This is true because we have to assume
6833 everything pointed to by escaped points to what global memory can
6834 point to. */
6835 lhs.type = DEREF;
6836 lhs.var = escaped_id;
6837 lhs.offset = 0;
6838 rhs.type = SCALAR;
6839 rhs.var = nonlocal_id;
6840 rhs.offset = 0;
6841 process_constraint (new_constraint (lhs, rhs));
6843 /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED. This is true because
6844 global memory may point to global memory and escaped memory. */
6845 lhs.type = SCALAR;
6846 lhs.var = nonlocal_id;
6847 lhs.offset = 0;
6848 rhs.type = ADDRESSOF;
6849 rhs.var = nonlocal_id;
6850 rhs.offset = 0;
6851 process_constraint (new_constraint (lhs, rhs));
6852 rhs.type = ADDRESSOF;
6853 rhs.var = escaped_id;
6854 rhs.offset = 0;
6855 process_constraint (new_constraint (lhs, rhs));
6857 /* Create the STOREDANYTHING variable, used to represent the set of
6858 variables stored to *ANYTHING. */
6859 var_storedanything = new_var_info (NULL_TREE, "STOREDANYTHING", false);
6860 gcc_assert (var_storedanything->id == storedanything_id);
6861 var_storedanything->is_artificial_var = 1;
6862 var_storedanything->offset = 0;
6863 var_storedanything->size = ~0;
6864 var_storedanything->fullsize = ~0;
6865 var_storedanything->is_special_var = 0;
6867 /* Create the INTEGER variable, used to represent that a variable points
6868 to what an INTEGER "points to". */
6869 var_integer = new_var_info (NULL_TREE, "INTEGER", false);
6870 gcc_assert (var_integer->id == integer_id);
6871 var_integer->is_artificial_var = 1;
6872 var_integer->size = ~0;
6873 var_integer->fullsize = ~0;
6874 var_integer->offset = 0;
6875 var_integer->is_special_var = 1;
6877 /* INTEGER = ANYTHING, because we don't know where a dereference of
6878 a random integer will point to. */
6879 lhs.type = SCALAR;
6880 lhs.var = integer_id;
6881 lhs.offset = 0;
6882 rhs.type = ADDRESSOF;
6883 rhs.var = anything_id;
6884 rhs.offset = 0;
6885 process_constraint (new_constraint (lhs, rhs));
6888 /* Initialize things necessary to perform PTA */
6890 static void
6891 init_alias_vars (void)
6893 use_field_sensitive = (MAX_FIELDS_FOR_FIELD_SENSITIVE > 1);
6895 bitmap_obstack_initialize (&pta_obstack);
6896 bitmap_obstack_initialize (&oldpta_obstack);
6897 bitmap_obstack_initialize (&predbitmap_obstack);
6899 constraints.create (8);
6900 varmap.create (8);
6901 vi_for_tree = new hash_map<tree, varinfo_t>;
6902 call_stmt_vars = new hash_map<gimple *, varinfo_t>;
6904 memset (&stats, 0, sizeof (stats));
6905 shared_bitmap_table = new hash_table<shared_bitmap_hasher> (511);
6906 init_base_vars ();
6908 gcc_obstack_init (&fake_var_decl_obstack);
6910 final_solutions = new hash_map<varinfo_t, pt_solution *>;
6911 gcc_obstack_init (&final_solutions_obstack);
6914 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
6915 predecessor edges. */
6917 static void
6918 remove_preds_and_fake_succs (constraint_graph_t graph)
6920 unsigned int i;
6922 /* Clear the implicit ref and address nodes from the successor
6923 lists. */
6924 for (i = 1; i < FIRST_REF_NODE; i++)
6926 if (graph->succs[i])
6927 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
6928 FIRST_REF_NODE * 2);
6931 /* Free the successor list for the non-ref nodes. */
6932 for (i = FIRST_REF_NODE + 1; i < graph->size; i++)
6934 if (graph->succs[i])
6935 BITMAP_FREE (graph->succs[i]);
6938 /* Now reallocate the size of the successor list as, and blow away
6939 the predecessor bitmaps. */
6940 graph->size = varmap.length ();
6941 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
6943 free (graph->implicit_preds);
6944 graph->implicit_preds = NULL;
6945 free (graph->preds);
6946 graph->preds = NULL;
6947 bitmap_obstack_release (&predbitmap_obstack);
6950 /* Solve the constraint set. */
6952 static void
6953 solve_constraints (void)
6955 struct scc_info *si;
6957 if (dump_file)
6958 fprintf (dump_file,
6959 "\nCollapsing static cycles and doing variable "
6960 "substitution\n");
6962 init_graph (varmap.length () * 2);
6964 if (dump_file)
6965 fprintf (dump_file, "Building predecessor graph\n");
6966 build_pred_graph ();
6968 if (dump_file)
6969 fprintf (dump_file, "Detecting pointer and location "
6970 "equivalences\n");
6971 si = perform_var_substitution (graph);
6973 if (dump_file)
6974 fprintf (dump_file, "Rewriting constraints and unifying "
6975 "variables\n");
6976 rewrite_constraints (graph, si);
6978 build_succ_graph ();
6980 free_var_substitution_info (si);
6982 /* Attach complex constraints to graph nodes. */
6983 move_complex_constraints (graph);
6985 if (dump_file)
6986 fprintf (dump_file, "Uniting pointer but not location equivalent "
6987 "variables\n");
6988 unite_pointer_equivalences (graph);
6990 if (dump_file)
6991 fprintf (dump_file, "Finding indirect cycles\n");
6992 find_indirect_cycles (graph);
6994 /* Implicit nodes and predecessors are no longer necessary at this
6995 point. */
6996 remove_preds_and_fake_succs (graph);
6998 if (dump_file && (dump_flags & TDF_GRAPH))
7000 fprintf (dump_file, "\n\n// The constraint graph before solve-graph "
7001 "in dot format:\n");
7002 dump_constraint_graph (dump_file);
7003 fprintf (dump_file, "\n\n");
7006 if (dump_file)
7007 fprintf (dump_file, "Solving graph\n");
7009 solve_graph (graph);
7011 if (dump_file && (dump_flags & TDF_GRAPH))
7013 fprintf (dump_file, "\n\n// The constraint graph after solve-graph "
7014 "in dot format:\n");
7015 dump_constraint_graph (dump_file);
7016 fprintf (dump_file, "\n\n");
7019 if (dump_file)
7020 dump_sa_points_to_info (dump_file);
7023 /* Create points-to sets for the current function. See the comments
7024 at the start of the file for an algorithmic overview. */
7026 static void
7027 compute_points_to_sets (void)
7029 basic_block bb;
7030 unsigned i;
7031 varinfo_t vi;
7033 timevar_push (TV_TREE_PTA);
7035 init_alias_vars ();
7037 intra_create_variable_infos (cfun);
7039 /* Now walk all statements and build the constraint set. */
7040 FOR_EACH_BB_FN (bb, cfun)
7042 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
7043 gsi_next (&gsi))
7045 gphi *phi = gsi.phi ();
7047 if (! virtual_operand_p (gimple_phi_result (phi)))
7048 find_func_aliases (cfun, phi);
7051 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
7052 gsi_next (&gsi))
7054 gimple *stmt = gsi_stmt (gsi);
7056 find_func_aliases (cfun, stmt);
7060 if (dump_file)
7062 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
7063 dump_constraints (dump_file, 0);
7066 /* From the constraints compute the points-to sets. */
7067 solve_constraints ();
7069 /* Compute the points-to set for ESCAPED used for call-clobber analysis. */
7070 cfun->gimple_df->escaped = find_what_var_points_to (cfun->decl,
7071 get_varinfo (escaped_id));
7073 /* Make sure the ESCAPED solution (which is used as placeholder in
7074 other solutions) does not reference itself. This simplifies
7075 points-to solution queries. */
7076 cfun->gimple_df->escaped.escaped = 0;
7078 /* Compute the points-to sets for pointer SSA_NAMEs. */
7079 for (i = 0; i < num_ssa_names; ++i)
7081 tree ptr = ssa_name (i);
7082 if (ptr
7083 && POINTER_TYPE_P (TREE_TYPE (ptr)))
7084 find_what_p_points_to (cfun->decl, ptr);
7087 /* Compute the call-used/clobbered sets. */
7088 FOR_EACH_BB_FN (bb, cfun)
7090 gimple_stmt_iterator gsi;
7092 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
7094 gcall *stmt;
7095 struct pt_solution *pt;
7097 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
7098 if (!stmt)
7099 continue;
7101 pt = gimple_call_use_set (stmt);
7102 if (gimple_call_flags (stmt) & ECF_CONST)
7103 memset (pt, 0, sizeof (struct pt_solution));
7104 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
7106 *pt = find_what_var_points_to (cfun->decl, vi);
7107 /* Escaped (and thus nonlocal) variables are always
7108 implicitly used by calls. */
7109 /* ??? ESCAPED can be empty even though NONLOCAL
7110 always escaped. */
7111 pt->nonlocal = 1;
7112 pt->escaped = 1;
7114 else
7116 /* If there is nothing special about this call then
7117 we have made everything that is used also escape. */
7118 *pt = cfun->gimple_df->escaped;
7119 pt->nonlocal = 1;
7122 pt = gimple_call_clobber_set (stmt);
7123 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
7124 memset (pt, 0, sizeof (struct pt_solution));
7125 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
7127 *pt = find_what_var_points_to (cfun->decl, vi);
7128 /* Escaped (and thus nonlocal) variables are always
7129 implicitly clobbered by calls. */
7130 /* ??? ESCAPED can be empty even though NONLOCAL
7131 always escaped. */
7132 pt->nonlocal = 1;
7133 pt->escaped = 1;
7135 else
7137 /* If there is nothing special about this call then
7138 we have made everything that is used also escape. */
7139 *pt = cfun->gimple_df->escaped;
7140 pt->nonlocal = 1;
7145 timevar_pop (TV_TREE_PTA);
7149 /* Delete created points-to sets. */
7151 static void
7152 delete_points_to_sets (void)
7154 unsigned int i;
7156 delete shared_bitmap_table;
7157 shared_bitmap_table = NULL;
7158 if (dump_file && (dump_flags & TDF_STATS))
7159 fprintf (dump_file, "Points to sets created:%d\n",
7160 stats.points_to_sets_created);
7162 delete vi_for_tree;
7163 delete call_stmt_vars;
7164 bitmap_obstack_release (&pta_obstack);
7165 constraints.release ();
7167 for (i = 0; i < graph->size; i++)
7168 graph->complex[i].release ();
7169 free (graph->complex);
7171 free (graph->rep);
7172 free (graph->succs);
7173 free (graph->pe);
7174 free (graph->pe_rep);
7175 free (graph->indirect_cycles);
7176 free (graph);
7178 varmap.release ();
7179 variable_info_pool.release ();
7180 constraint_pool.release ();
7182 obstack_free (&fake_var_decl_obstack, NULL);
7184 delete final_solutions;
7185 obstack_free (&final_solutions_obstack, NULL);
7188 struct vls_data
7190 unsigned short clique;
7191 bitmap rvars;
7194 /* Mark "other" loads and stores as belonging to CLIQUE and with
7195 base zero. */
7197 static bool
7198 visit_loadstore (gimple *, tree base, tree ref, void *data)
7200 unsigned short clique = ((vls_data *) data)->clique;
7201 bitmap rvars = ((vls_data *) data)->rvars;
7202 if (TREE_CODE (base) == MEM_REF
7203 || TREE_CODE (base) == TARGET_MEM_REF)
7205 tree ptr = TREE_OPERAND (base, 0);
7206 if (TREE_CODE (ptr) == SSA_NAME
7207 && ! SSA_NAME_IS_DEFAULT_DEF (ptr))
7209 /* We need to make sure 'ptr' doesn't include any of
7210 the restrict tags we added bases for in its points-to set. */
7211 varinfo_t vi = lookup_vi_for_tree (ptr);
7212 if (! vi)
7213 return false;
7215 vi = get_varinfo (find (vi->id));
7216 if (bitmap_intersect_p (rvars, vi->solution))
7217 return false;
7220 /* Do not overwrite existing cliques (that includes clique, base
7221 pairs we just set). */
7222 if (MR_DEPENDENCE_CLIQUE (base) == 0)
7224 MR_DEPENDENCE_CLIQUE (base) = clique;
7225 MR_DEPENDENCE_BASE (base) = 0;
7229 /* For plain decl accesses see whether they are accesses to globals
7230 and rewrite them to MEM_REFs with { clique, 0 }. */
7231 if (TREE_CODE (base) == VAR_DECL
7232 && is_global_var (base)
7233 /* ??? We can't rewrite a plain decl with the walk_stmt_load_store
7234 ops callback. */
7235 && base != ref)
7237 tree *basep = &ref;
7238 while (handled_component_p (*basep))
7239 basep = &TREE_OPERAND (*basep, 0);
7240 gcc_assert (TREE_CODE (*basep) == VAR_DECL);
7241 tree ptr = build_fold_addr_expr (*basep);
7242 tree zero = build_int_cst (TREE_TYPE (ptr), 0);
7243 *basep = build2 (MEM_REF, TREE_TYPE (*basep), ptr, zero);
7244 MR_DEPENDENCE_CLIQUE (*basep) = clique;
7245 MR_DEPENDENCE_BASE (*basep) = 0;
7248 return false;
7251 /* If REF is a MEM_REF then assign a clique, base pair to it, updating
7252 CLIQUE, *RESTRICT_VAR and LAST_RUID. Return whether dependence info
7253 was assigned to REF. */
7255 static bool
7256 maybe_set_dependence_info (tree ref, tree ptr,
7257 unsigned short &clique, varinfo_t restrict_var,
7258 unsigned short &last_ruid)
7260 while (handled_component_p (ref))
7261 ref = TREE_OPERAND (ref, 0);
7262 if ((TREE_CODE (ref) == MEM_REF
7263 || TREE_CODE (ref) == TARGET_MEM_REF)
7264 && TREE_OPERAND (ref, 0) == ptr)
7266 /* Do not overwrite existing cliques. This avoids overwriting dependence
7267 info inlined from a function with restrict parameters inlined
7268 into a function with restrict parameters. This usually means we
7269 prefer to be precise in innermost loops. */
7270 if (MR_DEPENDENCE_CLIQUE (ref) == 0)
7272 if (clique == 0)
7273 clique = ++cfun->last_clique;
7274 if (restrict_var->ruid == 0)
7275 restrict_var->ruid = ++last_ruid;
7276 MR_DEPENDENCE_CLIQUE (ref) = clique;
7277 MR_DEPENDENCE_BASE (ref) = restrict_var->ruid;
7278 return true;
7281 return false;
7284 /* Compute the set of independend memory references based on restrict
7285 tags and their conservative propagation to the points-to sets. */
7287 static void
7288 compute_dependence_clique (void)
7290 unsigned short clique = 0;
7291 unsigned short last_ruid = 0;
7292 bitmap rvars = BITMAP_ALLOC (NULL);
7293 for (unsigned i = 0; i < num_ssa_names; ++i)
7295 tree ptr = ssa_name (i);
7296 if (!ptr || !POINTER_TYPE_P (TREE_TYPE (ptr)))
7297 continue;
7299 /* Avoid all this when ptr is not dereferenced? */
7300 tree p = ptr;
7301 if (SSA_NAME_IS_DEFAULT_DEF (ptr)
7302 && (TREE_CODE (SSA_NAME_VAR (ptr)) == PARM_DECL
7303 || TREE_CODE (SSA_NAME_VAR (ptr)) == RESULT_DECL))
7304 p = SSA_NAME_VAR (ptr);
7305 varinfo_t vi = lookup_vi_for_tree (p);
7306 if (!vi)
7307 continue;
7308 vi = get_varinfo (find (vi->id));
7309 bitmap_iterator bi;
7310 unsigned j;
7311 varinfo_t restrict_var = NULL;
7312 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, j, bi)
7314 varinfo_t oi = get_varinfo (j);
7315 if (oi->is_restrict_var)
7317 if (restrict_var)
7319 if (dump_file && (dump_flags & TDF_DETAILS))
7321 fprintf (dump_file, "found restrict pointed-to "
7322 "for ");
7323 print_generic_expr (dump_file, ptr, 0);
7324 fprintf (dump_file, " but not exclusively\n");
7326 restrict_var = NULL;
7327 break;
7329 restrict_var = oi;
7331 /* NULL is the only other valid points-to entry. */
7332 else if (oi->id != nothing_id)
7334 restrict_var = NULL;
7335 break;
7338 /* Ok, found that ptr must(!) point to a single(!) restrict
7339 variable. */
7340 /* ??? PTA isn't really a proper propagation engine to compute
7341 this property.
7342 ??? We could handle merging of two restricts by unifying them. */
7343 if (restrict_var)
7345 /* Now look at possible dereferences of ptr. */
7346 imm_use_iterator ui;
7347 gimple *use_stmt;
7348 bool used = false;
7349 FOR_EACH_IMM_USE_STMT (use_stmt, ui, ptr)
7351 /* ??? Calls and asms. */
7352 if (!gimple_assign_single_p (use_stmt))
7353 continue;
7354 used |= maybe_set_dependence_info (gimple_assign_lhs (use_stmt),
7355 ptr, clique, restrict_var,
7356 last_ruid);
7357 used |= maybe_set_dependence_info (gimple_assign_rhs1 (use_stmt),
7358 ptr, clique, restrict_var,
7359 last_ruid);
7361 if (used)
7362 bitmap_set_bit (rvars, restrict_var->id);
7366 if (clique != 0)
7368 /* Assign the BASE id zero to all accesses not based on a restrict
7369 pointer. That way they get disambiguated against restrict
7370 accesses but not against each other. */
7371 /* ??? For restricts derived from globals (thus not incoming
7372 parameters) we can't restrict scoping properly thus the following
7373 is too aggressive there. For now we have excluded those globals from
7374 getting into the MR_DEPENDENCE machinery. */
7375 vls_data data = { clique, rvars };
7376 basic_block bb;
7377 FOR_EACH_BB_FN (bb, cfun)
7378 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
7379 !gsi_end_p (gsi); gsi_next (&gsi))
7381 gimple *stmt = gsi_stmt (gsi);
7382 walk_stmt_load_store_ops (stmt, &data,
7383 visit_loadstore, visit_loadstore);
7387 BITMAP_FREE (rvars);
7390 /* Compute points-to information for every SSA_NAME pointer in the
7391 current function and compute the transitive closure of escaped
7392 variables to re-initialize the call-clobber states of local variables. */
7394 unsigned int
7395 compute_may_aliases (void)
7397 if (cfun->gimple_df->ipa_pta)
7399 if (dump_file)
7401 fprintf (dump_file, "\nNot re-computing points-to information "
7402 "because IPA points-to information is available.\n\n");
7404 /* But still dump what we have remaining it. */
7405 dump_alias_info (dump_file);
7408 return 0;
7411 /* For each pointer P_i, determine the sets of variables that P_i may
7412 point-to. Compute the reachability set of escaped and call-used
7413 variables. */
7414 compute_points_to_sets ();
7416 /* Debugging dumps. */
7417 if (dump_file)
7418 dump_alias_info (dump_file);
7420 /* Compute restrict-based memory disambiguations. */
7421 compute_dependence_clique ();
7423 /* Deallocate memory used by aliasing data structures and the internal
7424 points-to solution. */
7425 delete_points_to_sets ();
7427 gcc_assert (!need_ssa_update_p (cfun));
7429 return 0;
7432 /* A dummy pass to cause points-to information to be computed via
7433 TODO_rebuild_alias. */
7435 namespace {
7437 const pass_data pass_data_build_alias =
7439 GIMPLE_PASS, /* type */
7440 "alias", /* name */
7441 OPTGROUP_NONE, /* optinfo_flags */
7442 TV_NONE, /* tv_id */
7443 ( PROP_cfg | PROP_ssa ), /* properties_required */
7444 0, /* properties_provided */
7445 0, /* properties_destroyed */
7446 0, /* todo_flags_start */
7447 TODO_rebuild_alias, /* todo_flags_finish */
7450 class pass_build_alias : public gimple_opt_pass
7452 public:
7453 pass_build_alias (gcc::context *ctxt)
7454 : gimple_opt_pass (pass_data_build_alias, ctxt)
7457 /* opt_pass methods: */
7458 virtual bool gate (function *) { return flag_tree_pta; }
7460 }; // class pass_build_alias
7462 } // anon namespace
7464 gimple_opt_pass *
7465 make_pass_build_alias (gcc::context *ctxt)
7467 return new pass_build_alias (ctxt);
7470 /* A dummy pass to cause points-to information to be computed via
7471 TODO_rebuild_alias. */
7473 namespace {
7475 const pass_data pass_data_build_ealias =
7477 GIMPLE_PASS, /* type */
7478 "ealias", /* name */
7479 OPTGROUP_NONE, /* optinfo_flags */
7480 TV_NONE, /* tv_id */
7481 ( PROP_cfg | PROP_ssa ), /* properties_required */
7482 0, /* properties_provided */
7483 0, /* properties_destroyed */
7484 0, /* todo_flags_start */
7485 TODO_rebuild_alias, /* todo_flags_finish */
7488 class pass_build_ealias : public gimple_opt_pass
7490 public:
7491 pass_build_ealias (gcc::context *ctxt)
7492 : gimple_opt_pass (pass_data_build_ealias, ctxt)
7495 /* opt_pass methods: */
7496 virtual bool gate (function *) { return flag_tree_pta; }
7498 }; // class pass_build_ealias
7500 } // anon namespace
7502 gimple_opt_pass *
7503 make_pass_build_ealias (gcc::context *ctxt)
7505 return new pass_build_ealias (ctxt);
7509 /* IPA PTA solutions for ESCAPED. */
7510 struct pt_solution ipa_escaped_pt
7511 = { true, false, false, false, false, false, false, false, false, NULL };
7513 /* Associate node with varinfo DATA. Worker for
7514 cgraph_for_symbol_thunks_and_aliases. */
7515 static bool
7516 associate_varinfo_to_alias (struct cgraph_node *node, void *data)
7518 if ((node->alias || node->thunk.thunk_p)
7519 && node->analyzed)
7520 insert_vi_for_tree (node->decl, (varinfo_t)data);
7521 return false;
7524 /* Dump varinfo VI to FILE. */
7526 static void
7527 dump_varinfo (FILE *file, varinfo_t vi)
7529 if (vi == NULL)
7530 return;
7532 fprintf (file, "%u: %s\n", vi->id, vi->name);
7534 const char *sep = " ";
7535 if (vi->is_artificial_var)
7536 fprintf (file, "%sartificial", sep);
7537 if (vi->is_special_var)
7538 fprintf (file, "%sspecial", sep);
7539 if (vi->is_unknown_size_var)
7540 fprintf (file, "%sunknown-size", sep);
7541 if (vi->is_full_var)
7542 fprintf (file, "%sfull", sep);
7543 if (vi->is_heap_var)
7544 fprintf (file, "%sheap", sep);
7545 if (vi->may_have_pointers)
7546 fprintf (file, "%smay-have-pointers", sep);
7547 if (vi->only_restrict_pointers)
7548 fprintf (file, "%sonly-restrict-pointers", sep);
7549 if (vi->is_restrict_var)
7550 fprintf (file, "%sis-restrict-var", sep);
7551 if (vi->is_global_var)
7552 fprintf (file, "%sglobal", sep);
7553 if (vi->is_ipa_escape_point)
7554 fprintf (file, "%sipa-escape-point", sep);
7555 if (vi->is_fn_info)
7556 fprintf (file, "%sfn-info", sep);
7557 if (vi->ruid)
7558 fprintf (file, "%srestrict-uid:%u", sep, vi->ruid);
7559 if (vi->next)
7560 fprintf (file, "%snext:%u", sep, vi->next);
7561 if (vi->head != vi->id)
7562 fprintf (file, "%shead:%u", sep, vi->head);
7563 if (vi->offset)
7564 fprintf (file, "%soffset:" HOST_WIDE_INT_PRINT_DEC, sep, vi->offset);
7565 if (vi->size != ~(unsigned HOST_WIDE_INT)0)
7566 fprintf (file, "%ssize:" HOST_WIDE_INT_PRINT_DEC, sep, vi->size);
7567 if (vi->fullsize != ~(unsigned HOST_WIDE_INT)0
7568 && vi->fullsize != vi->size)
7569 fprintf (file, "%sfullsize:" HOST_WIDE_INT_PRINT_DEC, sep,
7570 vi->fullsize);
7571 fprintf (file, "\n");
7573 if (vi->solution && !bitmap_empty_p (vi->solution))
7575 bitmap_iterator bi;
7576 unsigned i;
7577 fprintf (file, " solution: {");
7578 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
7579 fprintf (file, " %u", i);
7580 fprintf (file, " }\n");
7583 if (vi->oldsolution && !bitmap_empty_p (vi->oldsolution)
7584 && !bitmap_equal_p (vi->solution, vi->oldsolution))
7586 bitmap_iterator bi;
7587 unsigned i;
7588 fprintf (file, " oldsolution: {");
7589 EXECUTE_IF_SET_IN_BITMAP (vi->oldsolution, 0, i, bi)
7590 fprintf (file, " %u", i);
7591 fprintf (file, " }\n");
7595 /* Dump varinfo VI to stderr. */
7597 DEBUG_FUNCTION void
7598 debug_varinfo (varinfo_t vi)
7600 dump_varinfo (stderr, vi);
7603 /* Dump varmap to FILE. */
7605 static void
7606 dump_varmap (FILE *file)
7608 if (varmap.length () == 0)
7609 return;
7611 fprintf (file, "variables:\n");
7613 for (unsigned int i = 0; i < varmap.length (); ++i)
7615 varinfo_t vi = get_varinfo (i);
7616 dump_varinfo (file, vi);
7619 fprintf (file, "\n");
7622 /* Dump varmap to stderr. */
7624 DEBUG_FUNCTION void
7625 debug_varmap (void)
7627 dump_varmap (stderr);
7630 /* Compute whether node is refered to non-locally. Worker for
7631 cgraph_for_symbol_thunks_and_aliases. */
7632 static bool
7633 refered_from_nonlocal_fn (struct cgraph_node *node, void *data)
7635 bool *nonlocal_p = (bool *)data;
7636 *nonlocal_p |= (node->used_from_other_partition
7637 || node->externally_visible
7638 || node->force_output);
7639 return false;
7642 /* Same for varpool nodes. */
7643 static bool
7644 refered_from_nonlocal_var (struct varpool_node *node, void *data)
7646 bool *nonlocal_p = (bool *)data;
7647 *nonlocal_p |= (node->used_from_other_partition
7648 || node->externally_visible
7649 || node->force_output);
7650 return false;
7653 /* Execute the driver for IPA PTA. */
7654 static unsigned int
7655 ipa_pta_execute (void)
7657 struct cgraph_node *node;
7658 varpool_node *var;
7659 unsigned int from = 0;
7661 in_ipa_mode = 1;
7663 init_alias_vars ();
7665 if (dump_file && (dump_flags & TDF_DETAILS))
7667 symtab_node::dump_table (dump_file);
7668 fprintf (dump_file, "\n");
7671 if (dump_file)
7673 fprintf (dump_file, "Generating generic constraints\n\n");
7674 dump_constraints (dump_file, from);
7675 fprintf (dump_file, "\n");
7676 from = constraints.length ();
7679 /* Build the constraints. */
7680 FOR_EACH_DEFINED_FUNCTION (node)
7682 varinfo_t vi;
7683 /* Nodes without a body are not interesting. Especially do not
7684 visit clones at this point for now - we get duplicate decls
7685 there for inline clones at least. */
7686 if (!node->has_gimple_body_p () || node->global.inlined_to)
7687 continue;
7688 node->get_body ();
7690 gcc_assert (!node->clone_of);
7692 /* For externally visible or attribute used annotated functions use
7693 local constraints for their arguments.
7694 For local functions we see all callers and thus do not need initial
7695 constraints for parameters. */
7696 bool nonlocal_p = (node->used_from_other_partition
7697 || node->externally_visible
7698 || node->force_output);
7699 node->call_for_symbol_thunks_and_aliases (refered_from_nonlocal_fn,
7700 &nonlocal_p, true);
7702 vi = create_function_info_for (node->decl,
7703 alias_get_name (node->decl), false,
7704 nonlocal_p);
7705 if (dump_file
7706 && from != constraints.length ())
7708 fprintf (dump_file,
7709 "Generating intial constraints for %s", node->name ());
7710 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
7711 fprintf (dump_file, " (%s)",
7712 IDENTIFIER_POINTER
7713 (DECL_ASSEMBLER_NAME (node->decl)));
7714 fprintf (dump_file, "\n\n");
7715 dump_constraints (dump_file, from);
7716 fprintf (dump_file, "\n");
7718 from = constraints.length ();
7721 node->call_for_symbol_thunks_and_aliases
7722 (associate_varinfo_to_alias, vi, true);
7725 /* Create constraints for global variables and their initializers. */
7726 FOR_EACH_VARIABLE (var)
7728 if (var->alias && var->analyzed)
7729 continue;
7731 varinfo_t vi = get_vi_for_tree (var->decl);
7733 /* For the purpose of IPA PTA unit-local globals are not
7734 escape points. */
7735 bool nonlocal_p = (var->used_from_other_partition
7736 || var->externally_visible
7737 || var->force_output);
7738 var->call_for_symbol_and_aliases (refered_from_nonlocal_var,
7739 &nonlocal_p, true);
7740 if (nonlocal_p)
7741 vi->is_ipa_escape_point = true;
7744 if (dump_file
7745 && from != constraints.length ())
7747 fprintf (dump_file,
7748 "Generating constraints for global initializers\n\n");
7749 dump_constraints (dump_file, from);
7750 fprintf (dump_file, "\n");
7751 from = constraints.length ();
7754 FOR_EACH_DEFINED_FUNCTION (node)
7756 struct function *func;
7757 basic_block bb;
7759 /* Nodes without a body are not interesting. */
7760 if (!node->has_gimple_body_p () || node->clone_of)
7761 continue;
7763 if (dump_file)
7765 fprintf (dump_file,
7766 "Generating constraints for %s", node->name ());
7767 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
7768 fprintf (dump_file, " (%s)",
7769 IDENTIFIER_POINTER
7770 (DECL_ASSEMBLER_NAME (node->decl)));
7771 fprintf (dump_file, "\n");
7774 func = DECL_STRUCT_FUNCTION (node->decl);
7775 gcc_assert (cfun == NULL);
7777 /* Build constriants for the function body. */
7778 FOR_EACH_BB_FN (bb, func)
7780 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
7781 gsi_next (&gsi))
7783 gphi *phi = gsi.phi ();
7785 if (! virtual_operand_p (gimple_phi_result (phi)))
7786 find_func_aliases (func, phi);
7789 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
7790 gsi_next (&gsi))
7792 gimple *stmt = gsi_stmt (gsi);
7794 find_func_aliases (func, stmt);
7795 find_func_clobbers (func, stmt);
7799 if (dump_file)
7801 fprintf (dump_file, "\n");
7802 dump_constraints (dump_file, from);
7803 fprintf (dump_file, "\n");
7804 from = constraints.length ();
7808 /* From the constraints compute the points-to sets. */
7809 solve_constraints ();
7811 /* Compute the global points-to sets for ESCAPED.
7812 ??? Note that the computed escape set is not correct
7813 for the whole unit as we fail to consider graph edges to
7814 externally visible functions. */
7815 ipa_escaped_pt = find_what_var_points_to (NULL, get_varinfo (escaped_id));
7817 /* Make sure the ESCAPED solution (which is used as placeholder in
7818 other solutions) does not reference itself. This simplifies
7819 points-to solution queries. */
7820 ipa_escaped_pt.ipa_escaped = 0;
7822 /* Assign the points-to sets to the SSA names in the unit. */
7823 FOR_EACH_DEFINED_FUNCTION (node)
7825 tree ptr;
7826 struct function *fn;
7827 unsigned i;
7828 basic_block bb;
7830 /* Nodes without a body are not interesting. */
7831 if (!node->has_gimple_body_p () || node->clone_of)
7832 continue;
7834 fn = DECL_STRUCT_FUNCTION (node->decl);
7836 /* Compute the points-to sets for pointer SSA_NAMEs. */
7837 FOR_EACH_VEC_ELT (*fn->gimple_df->ssa_names, i, ptr)
7839 if (ptr
7840 && POINTER_TYPE_P (TREE_TYPE (ptr)))
7841 find_what_p_points_to (node->decl, ptr);
7844 /* Compute the call-use and call-clobber sets for indirect calls
7845 and calls to external functions. */
7846 FOR_EACH_BB_FN (bb, fn)
7848 gimple_stmt_iterator gsi;
7850 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
7852 gcall *stmt;
7853 struct pt_solution *pt;
7854 varinfo_t vi, fi;
7855 tree decl;
7857 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
7858 if (!stmt)
7859 continue;
7861 /* Handle direct calls to functions with body. */
7862 decl = gimple_call_fndecl (stmt);
7865 tree called_decl = NULL_TREE;
7866 if (gimple_call_builtin_p (stmt, BUILT_IN_GOMP_PARALLEL))
7867 called_decl = TREE_OPERAND (gimple_call_arg (stmt, 0), 0);
7868 else if (gimple_call_builtin_p (stmt, BUILT_IN_GOACC_PARALLEL))
7869 called_decl = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
7871 if (called_decl != NULL_TREE
7872 && !fndecl_maybe_in_other_partition (called_decl))
7873 decl = called_decl;
7876 if (decl
7877 && (fi = lookup_vi_for_tree (decl))
7878 && fi->is_fn_info)
7880 *gimple_call_clobber_set (stmt)
7881 = find_what_var_points_to
7882 (node->decl, first_vi_for_offset (fi, fi_clobbers));
7883 *gimple_call_use_set (stmt)
7884 = find_what_var_points_to
7885 (node->decl, first_vi_for_offset (fi, fi_uses));
7887 /* Handle direct calls to external functions. */
7888 else if (decl)
7890 pt = gimple_call_use_set (stmt);
7891 if (gimple_call_flags (stmt) & ECF_CONST)
7892 memset (pt, 0, sizeof (struct pt_solution));
7893 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
7895 *pt = find_what_var_points_to (node->decl, vi);
7896 /* Escaped (and thus nonlocal) variables are always
7897 implicitly used by calls. */
7898 /* ??? ESCAPED can be empty even though NONLOCAL
7899 always escaped. */
7900 pt->nonlocal = 1;
7901 pt->ipa_escaped = 1;
7903 else
7905 /* If there is nothing special about this call then
7906 we have made everything that is used also escape. */
7907 *pt = ipa_escaped_pt;
7908 pt->nonlocal = 1;
7911 pt = gimple_call_clobber_set (stmt);
7912 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
7913 memset (pt, 0, sizeof (struct pt_solution));
7914 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
7916 *pt = find_what_var_points_to (node->decl, vi);
7917 /* Escaped (and thus nonlocal) variables are always
7918 implicitly clobbered by calls. */
7919 /* ??? ESCAPED can be empty even though NONLOCAL
7920 always escaped. */
7921 pt->nonlocal = 1;
7922 pt->ipa_escaped = 1;
7924 else
7926 /* If there is nothing special about this call then
7927 we have made everything that is used also escape. */
7928 *pt = ipa_escaped_pt;
7929 pt->nonlocal = 1;
7932 /* Handle indirect calls. */
7933 else if (!decl
7934 && (fi = get_fi_for_callee (stmt)))
7936 /* We need to accumulate all clobbers/uses of all possible
7937 callees. */
7938 fi = get_varinfo (find (fi->id));
7939 /* If we cannot constrain the set of functions we'll end up
7940 calling we end up using/clobbering everything. */
7941 if (bitmap_bit_p (fi->solution, anything_id)
7942 || bitmap_bit_p (fi->solution, nonlocal_id)
7943 || bitmap_bit_p (fi->solution, escaped_id))
7945 pt_solution_reset (gimple_call_clobber_set (stmt));
7946 pt_solution_reset (gimple_call_use_set (stmt));
7948 else
7950 bitmap_iterator bi;
7951 unsigned i;
7952 struct pt_solution *uses, *clobbers;
7954 uses = gimple_call_use_set (stmt);
7955 clobbers = gimple_call_clobber_set (stmt);
7956 memset (uses, 0, sizeof (struct pt_solution));
7957 memset (clobbers, 0, sizeof (struct pt_solution));
7958 EXECUTE_IF_SET_IN_BITMAP (fi->solution, 0, i, bi)
7960 struct pt_solution sol;
7962 vi = get_varinfo (i);
7963 if (!vi->is_fn_info)
7965 /* ??? We could be more precise here? */
7966 uses->nonlocal = 1;
7967 uses->ipa_escaped = 1;
7968 clobbers->nonlocal = 1;
7969 clobbers->ipa_escaped = 1;
7970 continue;
7973 if (!uses->anything)
7975 sol = find_what_var_points_to
7976 (node->decl,
7977 first_vi_for_offset (vi, fi_uses));
7978 pt_solution_ior_into (uses, &sol);
7980 if (!clobbers->anything)
7982 sol = find_what_var_points_to
7983 (node->decl,
7984 first_vi_for_offset (vi, fi_clobbers));
7985 pt_solution_ior_into (clobbers, &sol);
7993 fn->gimple_df->ipa_pta = true;
7995 /* We have to re-set the final-solution cache after each function
7996 because what is a "global" is dependent on function context. */
7997 final_solutions->empty ();
7998 obstack_free (&final_solutions_obstack, NULL);
7999 gcc_obstack_init (&final_solutions_obstack);
8002 delete_points_to_sets ();
8004 in_ipa_mode = 0;
8006 return 0;
8009 namespace {
8011 const pass_data pass_data_ipa_pta =
8013 SIMPLE_IPA_PASS, /* type */
8014 "pta", /* name */
8015 OPTGROUP_NONE, /* optinfo_flags */
8016 TV_IPA_PTA, /* tv_id */
8017 0, /* properties_required */
8018 0, /* properties_provided */
8019 0, /* properties_destroyed */
8020 0, /* todo_flags_start */
8021 0, /* todo_flags_finish */
8024 class pass_ipa_pta : public simple_ipa_opt_pass
8026 public:
8027 pass_ipa_pta (gcc::context *ctxt)
8028 : simple_ipa_opt_pass (pass_data_ipa_pta, ctxt)
8031 /* opt_pass methods: */
8032 virtual bool gate (function *)
8034 return (optimize
8035 && flag_ipa_pta
8036 /* Don't bother doing anything if the program has errors. */
8037 && !seen_error ());
8040 opt_pass * clone () { return new pass_ipa_pta (m_ctxt); }
8042 virtual unsigned int execute (function *) { return ipa_pta_execute (); }
8044 }; // class pass_ipa_pta
8046 } // anon namespace
8048 simple_ipa_opt_pass *
8049 make_pass_ipa_pta (gcc::context *ctxt)
8051 return new pass_ipa_pta (ctxt);