2016-09-26 François Dumont <fdumont@gcc.gnu.org>
[official-gcc.git] / gcc / tree-ssa-structalias.c
blobbe892fd947232641716ce851f4cc7f53d7cca453
1 /* Tree based points-to analysis
2 Copyright (C) 2005-2016 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "tree-pass.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "tree-pretty-print.h"
33 #include "diagnostic-core.h"
34 #include "fold-const.h"
35 #include "stor-layout.h"
36 #include "stmt.h"
37 #include "gimple-iterator.h"
38 #include "tree-into-ssa.h"
39 #include "tree-dfa.h"
40 #include "params.h"
41 #include "gimple-walk.h"
43 /* The idea behind this analyzer is to generate set constraints from the
44 program, then solve the resulting constraints in order to generate the
45 points-to sets.
47 Set constraints are a way of modeling program analysis problems that
48 involve sets. They consist of an inclusion constraint language,
49 describing the variables (each variable is a set) and operations that
50 are involved on the variables, and a set of rules that derive facts
51 from these operations. To solve a system of set constraints, you derive
52 all possible facts under the rules, which gives you the correct sets
53 as a consequence.
55 See "Efficient Field-sensitive pointer analysis for C" by "David
56 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
57 http://citeseer.ist.psu.edu/pearce04efficient.html
59 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
60 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
61 http://citeseer.ist.psu.edu/heintze01ultrafast.html
63 There are three types of real constraint expressions, DEREF,
64 ADDRESSOF, and SCALAR. Each constraint expression consists
65 of a constraint type, a variable, and an offset.
67 SCALAR is a constraint expression type used to represent x, whether
68 it appears on the LHS or the RHS of a statement.
69 DEREF is a constraint expression type used to represent *x, whether
70 it appears on the LHS or the RHS of a statement.
71 ADDRESSOF is a constraint expression used to represent &x, whether
72 it appears on the LHS or the RHS of a statement.
74 Each pointer variable in the program is assigned an integer id, and
75 each field of a structure variable is assigned an integer id as well.
77 Structure variables are linked to their list of fields through a "next
78 field" in each variable that points to the next field in offset
79 order.
80 Each variable for a structure field has
82 1. "size", that tells the size in bits of that field.
83 2. "fullsize, that tells the size in bits of the entire structure.
84 3. "offset", that tells the offset in bits from the beginning of the
85 structure to this field.
87 Thus,
88 struct f
90 int a;
91 int b;
92 } foo;
93 int *bar;
95 looks like
97 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
98 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
99 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
102 In order to solve the system of set constraints, the following is
103 done:
105 1. Each constraint variable x has a solution set associated with it,
106 Sol(x).
108 2. Constraints are separated into direct, copy, and complex.
109 Direct constraints are ADDRESSOF constraints that require no extra
110 processing, such as P = &Q
111 Copy constraints are those of the form P = Q.
112 Complex constraints are all the constraints involving dereferences
113 and offsets (including offsetted copies).
115 3. All direct constraints of the form P = &Q are processed, such
116 that Q is added to Sol(P)
118 4. All complex constraints for a given constraint variable are stored in a
119 linked list attached to that variable's node.
121 5. A directed graph is built out of the copy constraints. Each
122 constraint variable is a node in the graph, and an edge from
123 Q to P is added for each copy constraint of the form P = Q
125 6. The graph is then walked, and solution sets are
126 propagated along the copy edges, such that an edge from Q to P
127 causes Sol(P) <- Sol(P) union Sol(Q).
129 7. As we visit each node, all complex constraints associated with
130 that node are processed by adding appropriate copy edges to the graph, or the
131 appropriate variables to the solution set.
133 8. The process of walking the graph is iterated until no solution
134 sets change.
136 Prior to walking the graph in steps 6 and 7, We perform static
137 cycle elimination on the constraint graph, as well
138 as off-line variable substitution.
140 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
141 on and turned into anything), but isn't. You can just see what offset
142 inside the pointed-to struct it's going to access.
144 TODO: Constant bounded arrays can be handled as if they were structs of the
145 same number of elements.
147 TODO: Modeling heap and incoming pointers becomes much better if we
148 add fields to them as we discover them, which we could do.
150 TODO: We could handle unions, but to be honest, it's probably not
151 worth the pain or slowdown. */
153 /* IPA-PTA optimizations possible.
155 When the indirect function called is ANYTHING we can add disambiguation
156 based on the function signatures (or simply the parameter count which
157 is the varinfo size). We also do not need to consider functions that
158 do not have their address taken.
160 The is_global_var bit which marks escape points is overly conservative
161 in IPA mode. Split it to is_escape_point and is_global_var - only
162 externally visible globals are escape points in IPA mode.
163 There is now is_ipa_escape_point but this is only used in a few
164 selected places.
166 The way we introduce DECL_PT_UID to avoid fixing up all points-to
167 sets in the translation unit when we copy a DECL during inlining
168 pessimizes precision. The advantage is that the DECL_PT_UID keeps
169 compile-time and memory usage overhead low - the points-to sets
170 do not grow or get unshared as they would during a fixup phase.
171 An alternative solution is to delay IPA PTA until after all
172 inlining transformations have been applied.
174 The way we propagate clobber/use information isn't optimized.
175 It should use a new complex constraint that properly filters
176 out local variables of the callee (though that would make
177 the sets invalid after inlining). OTOH we might as well
178 admit defeat to WHOPR and simply do all the clobber/use analysis
179 and propagation after PTA finished but before we threw away
180 points-to information for memory variables. WHOPR and PTA
181 do not play along well anyway - the whole constraint solving
182 would need to be done in WPA phase and it will be very interesting
183 to apply the results to local SSA names during LTRANS phase.
185 We probably should compute a per-function unit-ESCAPE solution
186 propagating it simply like the clobber / uses solutions. The
187 solution can go alongside the non-IPA espaced solution and be
188 used to query which vars escape the unit through a function.
189 This is also required to make the escaped-HEAP trick work in IPA mode.
191 We never put function decls in points-to sets so we do not
192 keep the set of called functions for indirect calls.
194 And probably more. */
196 static bool use_field_sensitive = true;
197 static int in_ipa_mode = 0;
199 /* Used for predecessor bitmaps. */
200 static bitmap_obstack predbitmap_obstack;
202 /* Used for points-to sets. */
203 static bitmap_obstack pta_obstack;
205 /* Used for oldsolution members of variables. */
206 static bitmap_obstack oldpta_obstack;
208 /* Used for per-solver-iteration bitmaps. */
209 static bitmap_obstack iteration_obstack;
211 static unsigned int create_variable_info_for (tree, const char *, bool);
212 typedef struct constraint_graph *constraint_graph_t;
213 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
215 struct constraint;
216 typedef struct constraint *constraint_t;
219 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
220 if (a) \
221 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
223 static struct constraint_stats
225 unsigned int total_vars;
226 unsigned int nonpointer_vars;
227 unsigned int unified_vars_static;
228 unsigned int unified_vars_dynamic;
229 unsigned int iterations;
230 unsigned int num_edges;
231 unsigned int num_implicit_edges;
232 unsigned int points_to_sets_created;
233 } stats;
235 struct variable_info
237 /* ID of this variable */
238 unsigned int id;
240 /* True if this is a variable created by the constraint analysis, such as
241 heap variables and constraints we had to break up. */
242 unsigned int is_artificial_var : 1;
244 /* True if this is a special variable whose solution set should not be
245 changed. */
246 unsigned int is_special_var : 1;
248 /* True for variables whose size is not known or variable. */
249 unsigned int is_unknown_size_var : 1;
251 /* True for (sub-)fields that represent a whole variable. */
252 unsigned int is_full_var : 1;
254 /* True if this is a heap variable. */
255 unsigned int is_heap_var : 1;
257 /* True if this field may contain pointers. */
258 unsigned int may_have_pointers : 1;
260 /* True if this field has only restrict qualified pointers. */
261 unsigned int only_restrict_pointers : 1;
263 /* True if this represents a heap var created for a restrict qualified
264 pointer. */
265 unsigned int is_restrict_var : 1;
267 /* True if this represents a global variable. */
268 unsigned int is_global_var : 1;
270 /* True if this represents a module escape point for IPA analysis. */
271 unsigned int is_ipa_escape_point : 1;
273 /* True if this represents a IPA function info. */
274 unsigned int is_fn_info : 1;
276 /* ??? Store somewhere better. */
277 unsigned short ruid;
279 /* The ID of the variable for the next field in this structure
280 or zero for the last field in this structure. */
281 unsigned next;
283 /* The ID of the variable for the first field in this structure. */
284 unsigned head;
286 /* Offset of this variable, in bits, from the base variable */
287 unsigned HOST_WIDE_INT offset;
289 /* Size of the variable, in bits. */
290 unsigned HOST_WIDE_INT size;
292 /* Full size of the base variable, in bits. */
293 unsigned HOST_WIDE_INT fullsize;
295 /* Name of this variable */
296 const char *name;
298 /* Tree that this variable is associated with. */
299 tree decl;
301 /* Points-to set for this variable. */
302 bitmap solution;
304 /* Old points-to set for this variable. */
305 bitmap oldsolution;
307 typedef struct variable_info *varinfo_t;
309 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
310 static varinfo_t first_or_preceding_vi_for_offset (varinfo_t,
311 unsigned HOST_WIDE_INT);
312 static varinfo_t lookup_vi_for_tree (tree);
313 static inline bool type_can_have_subvars (const_tree);
314 static void make_param_constraints (varinfo_t);
316 /* Pool of variable info structures. */
317 static object_allocator<variable_info> variable_info_pool
318 ("Variable info pool");
320 /* Map varinfo to final pt_solution. */
321 static hash_map<varinfo_t, pt_solution *> *final_solutions;
322 struct obstack final_solutions_obstack;
324 /* Table of variable info structures for constraint variables.
325 Indexed directly by variable info id. */
326 static vec<varinfo_t> varmap;
328 /* Return the varmap element N */
330 static inline varinfo_t
331 get_varinfo (unsigned int n)
333 return varmap[n];
336 /* Return the next variable in the list of sub-variables of VI
337 or NULL if VI is the last sub-variable. */
339 static inline varinfo_t
340 vi_next (varinfo_t vi)
342 return get_varinfo (vi->next);
345 /* Static IDs for the special variables. Variable ID zero is unused
346 and used as terminator for the sub-variable chain. */
347 enum { nothing_id = 1, anything_id = 2, string_id = 3,
348 escaped_id = 4, nonlocal_id = 5,
349 storedanything_id = 6, integer_id = 7 };
351 /* Return a new variable info structure consisting for a variable
352 named NAME, and using constraint graph node NODE. Append it
353 to the vector of variable info structures. */
355 static varinfo_t
356 new_var_info (tree t, const char *name, bool add_id)
358 unsigned index = varmap.length ();
359 varinfo_t ret = variable_info_pool.allocate ();
361 if (dump_file && add_id)
363 char *tempname = xasprintf ("%s(%d)", name, index);
364 name = ggc_strdup (tempname);
365 free (tempname);
368 ret->id = index;
369 ret->name = name;
370 ret->decl = t;
371 /* Vars without decl are artificial and do not have sub-variables. */
372 ret->is_artificial_var = (t == NULL_TREE);
373 ret->is_special_var = false;
374 ret->is_unknown_size_var = false;
375 ret->is_full_var = (t == NULL_TREE);
376 ret->is_heap_var = false;
377 ret->may_have_pointers = true;
378 ret->only_restrict_pointers = false;
379 ret->is_restrict_var = false;
380 ret->ruid = 0;
381 ret->is_global_var = (t == NULL_TREE);
382 ret->is_ipa_escape_point = false;
383 ret->is_fn_info = false;
384 if (t && DECL_P (t))
385 ret->is_global_var = (is_global_var (t)
386 /* We have to treat even local register variables
387 as escape points. */
388 || (TREE_CODE (t) == VAR_DECL
389 && DECL_HARD_REGISTER (t)));
390 ret->solution = BITMAP_ALLOC (&pta_obstack);
391 ret->oldsolution = NULL;
392 ret->next = 0;
393 ret->head = ret->id;
395 stats.total_vars++;
397 varmap.safe_push (ret);
399 return ret;
402 /* A map mapping call statements to per-stmt variables for uses
403 and clobbers specific to the call. */
404 static hash_map<gimple *, varinfo_t> *call_stmt_vars;
406 /* Lookup or create the variable for the call statement CALL. */
408 static varinfo_t
409 get_call_vi (gcall *call)
411 varinfo_t vi, vi2;
413 bool existed;
414 varinfo_t *slot_p = &call_stmt_vars->get_or_insert (call, &existed);
415 if (existed)
416 return *slot_p;
418 vi = new_var_info (NULL_TREE, "CALLUSED", true);
419 vi->offset = 0;
420 vi->size = 1;
421 vi->fullsize = 2;
422 vi->is_full_var = true;
424 vi2 = new_var_info (NULL_TREE, "CALLCLOBBERED", true);
425 vi2->offset = 1;
426 vi2->size = 1;
427 vi2->fullsize = 2;
428 vi2->is_full_var = true;
430 vi->next = vi2->id;
432 *slot_p = vi;
433 return vi;
436 /* Lookup the variable for the call statement CALL representing
437 the uses. Returns NULL if there is nothing special about this call. */
439 static varinfo_t
440 lookup_call_use_vi (gcall *call)
442 varinfo_t *slot_p = call_stmt_vars->get (call);
443 if (slot_p)
444 return *slot_p;
446 return NULL;
449 /* Lookup the variable for the call statement CALL representing
450 the clobbers. Returns NULL if there is nothing special about this call. */
452 static varinfo_t
453 lookup_call_clobber_vi (gcall *call)
455 varinfo_t uses = lookup_call_use_vi (call);
456 if (!uses)
457 return NULL;
459 return vi_next (uses);
462 /* Lookup or create the variable for the call statement CALL representing
463 the uses. */
465 static varinfo_t
466 get_call_use_vi (gcall *call)
468 return get_call_vi (call);
471 /* Lookup or create the variable for the call statement CALL representing
472 the clobbers. */
474 static varinfo_t ATTRIBUTE_UNUSED
475 get_call_clobber_vi (gcall *call)
477 return vi_next (get_call_vi (call));
481 enum constraint_expr_type {SCALAR, DEREF, ADDRESSOF};
483 /* An expression that appears in a constraint. */
485 struct constraint_expr
487 /* Constraint type. */
488 constraint_expr_type type;
490 /* Variable we are referring to in the constraint. */
491 unsigned int var;
493 /* Offset, in bits, of this constraint from the beginning of
494 variables it ends up referring to.
496 IOW, in a deref constraint, we would deref, get the result set,
497 then add OFFSET to each member. */
498 HOST_WIDE_INT offset;
501 /* Use 0x8000... as special unknown offset. */
502 #define UNKNOWN_OFFSET HOST_WIDE_INT_MIN
504 typedef struct constraint_expr ce_s;
505 static void get_constraint_for_1 (tree, vec<ce_s> *, bool, bool);
506 static void get_constraint_for (tree, vec<ce_s> *);
507 static void get_constraint_for_rhs (tree, vec<ce_s> *);
508 static void do_deref (vec<ce_s> *);
510 /* Our set constraints are made up of two constraint expressions, one
511 LHS, and one RHS.
513 As described in the introduction, our set constraints each represent an
514 operation between set valued variables.
516 struct constraint
518 struct constraint_expr lhs;
519 struct constraint_expr rhs;
522 /* List of constraints that we use to build the constraint graph from. */
524 static vec<constraint_t> constraints;
525 static object_allocator<constraint> constraint_pool ("Constraint pool");
527 /* The constraint graph is represented as an array of bitmaps
528 containing successor nodes. */
530 struct constraint_graph
532 /* Size of this graph, which may be different than the number of
533 nodes in the variable map. */
534 unsigned int size;
536 /* Explicit successors of each node. */
537 bitmap *succs;
539 /* Implicit predecessors of each node (Used for variable
540 substitution). */
541 bitmap *implicit_preds;
543 /* Explicit predecessors of each node (Used for variable substitution). */
544 bitmap *preds;
546 /* Indirect cycle representatives, or -1 if the node has no indirect
547 cycles. */
548 int *indirect_cycles;
550 /* Representative node for a node. rep[a] == a unless the node has
551 been unified. */
552 unsigned int *rep;
554 /* Equivalence class representative for a label. This is used for
555 variable substitution. */
556 int *eq_rep;
558 /* Pointer equivalence label for a node. All nodes with the same
559 pointer equivalence label can be unified together at some point
560 (either during constraint optimization or after the constraint
561 graph is built). */
562 unsigned int *pe;
564 /* Pointer equivalence representative for a label. This is used to
565 handle nodes that are pointer equivalent but not location
566 equivalent. We can unite these once the addressof constraints
567 are transformed into initial points-to sets. */
568 int *pe_rep;
570 /* Pointer equivalence label for each node, used during variable
571 substitution. */
572 unsigned int *pointer_label;
574 /* Location equivalence label for each node, used during location
575 equivalence finding. */
576 unsigned int *loc_label;
578 /* Pointed-by set for each node, used during location equivalence
579 finding. This is pointed-by rather than pointed-to, because it
580 is constructed using the predecessor graph. */
581 bitmap *pointed_by;
583 /* Points to sets for pointer equivalence. This is *not* the actual
584 points-to sets for nodes. */
585 bitmap *points_to;
587 /* Bitmap of nodes where the bit is set if the node is a direct
588 node. Used for variable substitution. */
589 sbitmap direct_nodes;
591 /* Bitmap of nodes where the bit is set if the node is address
592 taken. Used for variable substitution. */
593 bitmap address_taken;
595 /* Vector of complex constraints for each graph node. Complex
596 constraints are those involving dereferences or offsets that are
597 not 0. */
598 vec<constraint_t> *complex;
601 static constraint_graph_t graph;
603 /* During variable substitution and the offline version of indirect
604 cycle finding, we create nodes to represent dereferences and
605 address taken constraints. These represent where these start and
606 end. */
607 #define FIRST_REF_NODE (varmap).length ()
608 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
610 /* Return the representative node for NODE, if NODE has been unioned
611 with another NODE.
612 This function performs path compression along the way to finding
613 the representative. */
615 static unsigned int
616 find (unsigned int node)
618 gcc_checking_assert (node < graph->size);
619 if (graph->rep[node] != node)
620 return graph->rep[node] = find (graph->rep[node]);
621 return node;
624 /* Union the TO and FROM nodes to the TO nodes.
625 Note that at some point in the future, we may want to do
626 union-by-rank, in which case we are going to have to return the
627 node we unified to. */
629 static bool
630 unite (unsigned int to, unsigned int from)
632 gcc_checking_assert (to < graph->size && from < graph->size);
633 if (to != from && graph->rep[from] != to)
635 graph->rep[from] = to;
636 return true;
638 return false;
641 /* Create a new constraint consisting of LHS and RHS expressions. */
643 static constraint_t
644 new_constraint (const struct constraint_expr lhs,
645 const struct constraint_expr rhs)
647 constraint_t ret = constraint_pool.allocate ();
648 ret->lhs = lhs;
649 ret->rhs = rhs;
650 return ret;
653 /* Print out constraint C to FILE. */
655 static void
656 dump_constraint (FILE *file, constraint_t c)
658 if (c->lhs.type == ADDRESSOF)
659 fprintf (file, "&");
660 else if (c->lhs.type == DEREF)
661 fprintf (file, "*");
662 fprintf (file, "%s", get_varinfo (c->lhs.var)->name);
663 if (c->lhs.offset == UNKNOWN_OFFSET)
664 fprintf (file, " + UNKNOWN");
665 else if (c->lhs.offset != 0)
666 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
667 fprintf (file, " = ");
668 if (c->rhs.type == ADDRESSOF)
669 fprintf (file, "&");
670 else if (c->rhs.type == DEREF)
671 fprintf (file, "*");
672 fprintf (file, "%s", get_varinfo (c->rhs.var)->name);
673 if (c->rhs.offset == UNKNOWN_OFFSET)
674 fprintf (file, " + UNKNOWN");
675 else if (c->rhs.offset != 0)
676 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
680 void debug_constraint (constraint_t);
681 void debug_constraints (void);
682 void debug_constraint_graph (void);
683 void debug_solution_for_var (unsigned int);
684 void debug_sa_points_to_info (void);
685 void debug_varinfo (varinfo_t);
686 void debug_varmap (void);
688 /* Print out constraint C to stderr. */
690 DEBUG_FUNCTION void
691 debug_constraint (constraint_t c)
693 dump_constraint (stderr, c);
694 fprintf (stderr, "\n");
697 /* Print out all constraints to FILE */
699 static void
700 dump_constraints (FILE *file, int from)
702 int i;
703 constraint_t c;
704 for (i = from; constraints.iterate (i, &c); i++)
705 if (c)
707 dump_constraint (file, c);
708 fprintf (file, "\n");
712 /* Print out all constraints to stderr. */
714 DEBUG_FUNCTION void
715 debug_constraints (void)
717 dump_constraints (stderr, 0);
720 /* Print the constraint graph in dot format. */
722 static void
723 dump_constraint_graph (FILE *file)
725 unsigned int i;
727 /* Only print the graph if it has already been initialized: */
728 if (!graph)
729 return;
731 /* Prints the header of the dot file: */
732 fprintf (file, "strict digraph {\n");
733 fprintf (file, " node [\n shape = box\n ]\n");
734 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
735 fprintf (file, "\n // List of nodes and complex constraints in "
736 "the constraint graph:\n");
738 /* The next lines print the nodes in the graph together with the
739 complex constraints attached to them. */
740 for (i = 1; i < graph->size; i++)
742 if (i == FIRST_REF_NODE)
743 continue;
744 if (find (i) != i)
745 continue;
746 if (i < FIRST_REF_NODE)
747 fprintf (file, "\"%s\"", get_varinfo (i)->name);
748 else
749 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
750 if (graph->complex[i].exists ())
752 unsigned j;
753 constraint_t c;
754 fprintf (file, " [label=\"\\N\\n");
755 for (j = 0; graph->complex[i].iterate (j, &c); ++j)
757 dump_constraint (file, c);
758 fprintf (file, "\\l");
760 fprintf (file, "\"]");
762 fprintf (file, ";\n");
765 /* Go over the edges. */
766 fprintf (file, "\n // Edges in the constraint graph:\n");
767 for (i = 1; i < graph->size; i++)
769 unsigned j;
770 bitmap_iterator bi;
771 if (find (i) != i)
772 continue;
773 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
775 unsigned to = find (j);
776 if (i == to)
777 continue;
778 if (i < FIRST_REF_NODE)
779 fprintf (file, "\"%s\"", get_varinfo (i)->name);
780 else
781 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
782 fprintf (file, " -> ");
783 if (to < FIRST_REF_NODE)
784 fprintf (file, "\"%s\"", get_varinfo (to)->name);
785 else
786 fprintf (file, "\"*%s\"", get_varinfo (to - FIRST_REF_NODE)->name);
787 fprintf (file, ";\n");
791 /* Prints the tail of the dot file. */
792 fprintf (file, "}\n");
795 /* Print out the constraint graph to stderr. */
797 DEBUG_FUNCTION void
798 debug_constraint_graph (void)
800 dump_constraint_graph (stderr);
803 /* SOLVER FUNCTIONS
805 The solver is a simple worklist solver, that works on the following
806 algorithm:
808 sbitmap changed_nodes = all zeroes;
809 changed_count = 0;
810 For each node that is not already collapsed:
811 changed_count++;
812 set bit in changed nodes
814 while (changed_count > 0)
816 compute topological ordering for constraint graph
818 find and collapse cycles in the constraint graph (updating
819 changed if necessary)
821 for each node (n) in the graph in topological order:
822 changed_count--;
824 Process each complex constraint associated with the node,
825 updating changed if necessary.
827 For each outgoing edge from n, propagate the solution from n to
828 the destination of the edge, updating changed as necessary.
830 } */
832 /* Return true if two constraint expressions A and B are equal. */
834 static bool
835 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
837 return a.type == b.type && a.var == b.var && a.offset == b.offset;
840 /* Return true if constraint expression A is less than constraint expression
841 B. This is just arbitrary, but consistent, in order to give them an
842 ordering. */
844 static bool
845 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
847 if (a.type == b.type)
849 if (a.var == b.var)
850 return a.offset < b.offset;
851 else
852 return a.var < b.var;
854 else
855 return a.type < b.type;
858 /* Return true if constraint A is less than constraint B. This is just
859 arbitrary, but consistent, in order to give them an ordering. */
861 static bool
862 constraint_less (const constraint_t &a, const constraint_t &b)
864 if (constraint_expr_less (a->lhs, b->lhs))
865 return true;
866 else if (constraint_expr_less (b->lhs, a->lhs))
867 return false;
868 else
869 return constraint_expr_less (a->rhs, b->rhs);
872 /* Return true if two constraints A and B are equal. */
874 static bool
875 constraint_equal (struct constraint a, struct constraint b)
877 return constraint_expr_equal (a.lhs, b.lhs)
878 && constraint_expr_equal (a.rhs, b.rhs);
882 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
884 static constraint_t
885 constraint_vec_find (vec<constraint_t> vec,
886 struct constraint lookfor)
888 unsigned int place;
889 constraint_t found;
891 if (!vec.exists ())
892 return NULL;
894 place = vec.lower_bound (&lookfor, constraint_less);
895 if (place >= vec.length ())
896 return NULL;
897 found = vec[place];
898 if (!constraint_equal (*found, lookfor))
899 return NULL;
900 return found;
903 /* Union two constraint vectors, TO and FROM. Put the result in TO.
904 Returns true of TO set is changed. */
906 static bool
907 constraint_set_union (vec<constraint_t> *to,
908 vec<constraint_t> *from)
910 int i;
911 constraint_t c;
912 bool any_change = false;
914 FOR_EACH_VEC_ELT (*from, i, c)
916 if (constraint_vec_find (*to, *c) == NULL)
918 unsigned int place = to->lower_bound (c, constraint_less);
919 to->safe_insert (place, c);
920 any_change = true;
923 return any_change;
926 /* Expands the solution in SET to all sub-fields of variables included. */
928 static bitmap
929 solution_set_expand (bitmap set, bitmap *expanded)
931 bitmap_iterator bi;
932 unsigned j;
934 if (*expanded)
935 return *expanded;
937 *expanded = BITMAP_ALLOC (&iteration_obstack);
939 /* In a first pass expand to the head of the variables we need to
940 add all sub-fields off. This avoids quadratic behavior. */
941 EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi)
943 varinfo_t v = get_varinfo (j);
944 if (v->is_artificial_var
945 || v->is_full_var)
946 continue;
947 bitmap_set_bit (*expanded, v->head);
950 /* In the second pass now expand all head variables with subfields. */
951 EXECUTE_IF_SET_IN_BITMAP (*expanded, 0, j, bi)
953 varinfo_t v = get_varinfo (j);
954 if (v->head != j)
955 continue;
956 for (v = vi_next (v); v != NULL; v = vi_next (v))
957 bitmap_set_bit (*expanded, v->id);
960 /* And finally set the rest of the bits from SET. */
961 bitmap_ior_into (*expanded, set);
963 return *expanded;
966 /* Union solution sets TO and DELTA, and add INC to each member of DELTA in the
967 process. */
969 static bool
970 set_union_with_increment (bitmap to, bitmap delta, HOST_WIDE_INT inc,
971 bitmap *expanded_delta)
973 bool changed = false;
974 bitmap_iterator bi;
975 unsigned int i;
977 /* If the solution of DELTA contains anything it is good enough to transfer
978 this to TO. */
979 if (bitmap_bit_p (delta, anything_id))
980 return bitmap_set_bit (to, anything_id);
982 /* If the offset is unknown we have to expand the solution to
983 all subfields. */
984 if (inc == UNKNOWN_OFFSET)
986 delta = solution_set_expand (delta, expanded_delta);
987 changed |= bitmap_ior_into (to, delta);
988 return changed;
991 /* For non-zero offset union the offsetted solution into the destination. */
992 EXECUTE_IF_SET_IN_BITMAP (delta, 0, i, bi)
994 varinfo_t vi = get_varinfo (i);
996 /* If this is a variable with just one field just set its bit
997 in the result. */
998 if (vi->is_artificial_var
999 || vi->is_unknown_size_var
1000 || vi->is_full_var)
1001 changed |= bitmap_set_bit (to, i);
1002 else
1004 HOST_WIDE_INT fieldoffset = vi->offset + inc;
1005 unsigned HOST_WIDE_INT size = vi->size;
1007 /* If the offset makes the pointer point to before the
1008 variable use offset zero for the field lookup. */
1009 if (fieldoffset < 0)
1010 vi = get_varinfo (vi->head);
1011 else
1012 vi = first_or_preceding_vi_for_offset (vi, fieldoffset);
1016 changed |= bitmap_set_bit (to, vi->id);
1017 if (vi->is_full_var
1018 || vi->next == 0)
1019 break;
1021 /* We have to include all fields that overlap the current field
1022 shifted by inc. */
1023 vi = vi_next (vi);
1025 while (vi->offset < fieldoffset + size);
1029 return changed;
1032 /* Insert constraint C into the list of complex constraints for graph
1033 node VAR. */
1035 static void
1036 insert_into_complex (constraint_graph_t graph,
1037 unsigned int var, constraint_t c)
1039 vec<constraint_t> complex = graph->complex[var];
1040 unsigned int place = complex.lower_bound (c, constraint_less);
1042 /* Only insert constraints that do not already exist. */
1043 if (place >= complex.length ()
1044 || !constraint_equal (*c, *complex[place]))
1045 graph->complex[var].safe_insert (place, c);
1049 /* Condense two variable nodes into a single variable node, by moving
1050 all associated info from FROM to TO. Returns true if TO node's
1051 constraint set changes after the merge. */
1053 static bool
1054 merge_node_constraints (constraint_graph_t graph, unsigned int to,
1055 unsigned int from)
1057 unsigned int i;
1058 constraint_t c;
1059 bool any_change = false;
1061 gcc_checking_assert (find (from) == to);
1063 /* Move all complex constraints from src node into to node */
1064 FOR_EACH_VEC_ELT (graph->complex[from], i, c)
1066 /* In complex constraints for node FROM, we may have either
1067 a = *FROM, and *FROM = a, or an offseted constraint which are
1068 always added to the rhs node's constraints. */
1070 if (c->rhs.type == DEREF)
1071 c->rhs.var = to;
1072 else if (c->lhs.type == DEREF)
1073 c->lhs.var = to;
1074 else
1075 c->rhs.var = to;
1078 any_change = constraint_set_union (&graph->complex[to],
1079 &graph->complex[from]);
1080 graph->complex[from].release ();
1081 return any_change;
1085 /* Remove edges involving NODE from GRAPH. */
1087 static void
1088 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
1090 if (graph->succs[node])
1091 BITMAP_FREE (graph->succs[node]);
1094 /* Merge GRAPH nodes FROM and TO into node TO. */
1096 static void
1097 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
1098 unsigned int from)
1100 if (graph->indirect_cycles[from] != -1)
1102 /* If we have indirect cycles with the from node, and we have
1103 none on the to node, the to node has indirect cycles from the
1104 from node now that they are unified.
1105 If indirect cycles exist on both, unify the nodes that they
1106 are in a cycle with, since we know they are in a cycle with
1107 each other. */
1108 if (graph->indirect_cycles[to] == -1)
1109 graph->indirect_cycles[to] = graph->indirect_cycles[from];
1112 /* Merge all the successor edges. */
1113 if (graph->succs[from])
1115 if (!graph->succs[to])
1116 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
1117 bitmap_ior_into (graph->succs[to],
1118 graph->succs[from]);
1121 clear_edges_for_node (graph, from);
1125 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
1126 it doesn't exist in the graph already. */
1128 static void
1129 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
1130 unsigned int from)
1132 if (to == from)
1133 return;
1135 if (!graph->implicit_preds[to])
1136 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1138 if (bitmap_set_bit (graph->implicit_preds[to], from))
1139 stats.num_implicit_edges++;
1142 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
1143 it doesn't exist in the graph already.
1144 Return false if the edge already existed, true otherwise. */
1146 static void
1147 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
1148 unsigned int from)
1150 if (!graph->preds[to])
1151 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1152 bitmap_set_bit (graph->preds[to], from);
1155 /* Add a graph edge to GRAPH, going from FROM to TO if
1156 it doesn't exist in the graph already.
1157 Return false if the edge already existed, true otherwise. */
1159 static bool
1160 add_graph_edge (constraint_graph_t graph, unsigned int to,
1161 unsigned int from)
1163 if (to == from)
1165 return false;
1167 else
1169 bool r = false;
1171 if (!graph->succs[from])
1172 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
1173 if (bitmap_set_bit (graph->succs[from], to))
1175 r = true;
1176 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
1177 stats.num_edges++;
1179 return r;
1184 /* Initialize the constraint graph structure to contain SIZE nodes. */
1186 static void
1187 init_graph (unsigned int size)
1189 unsigned int j;
1191 graph = XCNEW (struct constraint_graph);
1192 graph->size = size;
1193 graph->succs = XCNEWVEC (bitmap, graph->size);
1194 graph->indirect_cycles = XNEWVEC (int, graph->size);
1195 graph->rep = XNEWVEC (unsigned int, graph->size);
1196 /* ??? Macros do not support template types with multiple arguments,
1197 so we use a typedef to work around it. */
1198 typedef vec<constraint_t> vec_constraint_t_heap;
1199 graph->complex = XCNEWVEC (vec_constraint_t_heap, size);
1200 graph->pe = XCNEWVEC (unsigned int, graph->size);
1201 graph->pe_rep = XNEWVEC (int, graph->size);
1203 for (j = 0; j < graph->size; j++)
1205 graph->rep[j] = j;
1206 graph->pe_rep[j] = -1;
1207 graph->indirect_cycles[j] = -1;
1211 /* Build the constraint graph, adding only predecessor edges right now. */
1213 static void
1214 build_pred_graph (void)
1216 int i;
1217 constraint_t c;
1218 unsigned int j;
1220 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
1221 graph->preds = XCNEWVEC (bitmap, graph->size);
1222 graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
1223 graph->loc_label = XCNEWVEC (unsigned int, graph->size);
1224 graph->pointed_by = XCNEWVEC (bitmap, graph->size);
1225 graph->points_to = XCNEWVEC (bitmap, graph->size);
1226 graph->eq_rep = XNEWVEC (int, graph->size);
1227 graph->direct_nodes = sbitmap_alloc (graph->size);
1228 graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1229 bitmap_clear (graph->direct_nodes);
1231 for (j = 1; j < FIRST_REF_NODE; j++)
1233 if (!get_varinfo (j)->is_special_var)
1234 bitmap_set_bit (graph->direct_nodes, j);
1237 for (j = 0; j < graph->size; j++)
1238 graph->eq_rep[j] = -1;
1240 for (j = 0; j < varmap.length (); j++)
1241 graph->indirect_cycles[j] = -1;
1243 FOR_EACH_VEC_ELT (constraints, i, c)
1245 struct constraint_expr lhs = c->lhs;
1246 struct constraint_expr rhs = c->rhs;
1247 unsigned int lhsvar = lhs.var;
1248 unsigned int rhsvar = rhs.var;
1250 if (lhs.type == DEREF)
1252 /* *x = y. */
1253 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1254 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1256 else if (rhs.type == DEREF)
1258 /* x = *y */
1259 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1260 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1261 else
1262 bitmap_clear_bit (graph->direct_nodes, lhsvar);
1264 else if (rhs.type == ADDRESSOF)
1266 varinfo_t v;
1268 /* x = &y */
1269 if (graph->points_to[lhsvar] == NULL)
1270 graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1271 bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1273 if (graph->pointed_by[rhsvar] == NULL)
1274 graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1275 bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1277 /* Implicitly, *x = y */
1278 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1280 /* All related variables are no longer direct nodes. */
1281 bitmap_clear_bit (graph->direct_nodes, rhsvar);
1282 v = get_varinfo (rhsvar);
1283 if (!v->is_full_var)
1285 v = get_varinfo (v->head);
1288 bitmap_clear_bit (graph->direct_nodes, v->id);
1289 v = vi_next (v);
1291 while (v != NULL);
1293 bitmap_set_bit (graph->address_taken, rhsvar);
1295 else if (lhsvar > anything_id
1296 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1298 /* x = y */
1299 add_pred_graph_edge (graph, lhsvar, rhsvar);
1300 /* Implicitly, *x = *y */
1301 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1302 FIRST_REF_NODE + rhsvar);
1304 else if (lhs.offset != 0 || rhs.offset != 0)
1306 if (rhs.offset != 0)
1307 bitmap_clear_bit (graph->direct_nodes, lhs.var);
1308 else if (lhs.offset != 0)
1309 bitmap_clear_bit (graph->direct_nodes, rhs.var);
1314 /* Build the constraint graph, adding successor edges. */
1316 static void
1317 build_succ_graph (void)
1319 unsigned i, t;
1320 constraint_t c;
1322 FOR_EACH_VEC_ELT (constraints, i, c)
1324 struct constraint_expr lhs;
1325 struct constraint_expr rhs;
1326 unsigned int lhsvar;
1327 unsigned int rhsvar;
1329 if (!c)
1330 continue;
1332 lhs = c->lhs;
1333 rhs = c->rhs;
1334 lhsvar = find (lhs.var);
1335 rhsvar = find (rhs.var);
1337 if (lhs.type == DEREF)
1339 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1340 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1342 else if (rhs.type == DEREF)
1344 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1345 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1347 else if (rhs.type == ADDRESSOF)
1349 /* x = &y */
1350 gcc_checking_assert (find (rhs.var) == rhs.var);
1351 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1353 else if (lhsvar > anything_id
1354 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1356 add_graph_edge (graph, lhsvar, rhsvar);
1360 /* Add edges from STOREDANYTHING to all non-direct nodes that can
1361 receive pointers. */
1362 t = find (storedanything_id);
1363 for (i = integer_id + 1; i < FIRST_REF_NODE; ++i)
1365 if (!bitmap_bit_p (graph->direct_nodes, i)
1366 && get_varinfo (i)->may_have_pointers)
1367 add_graph_edge (graph, find (i), t);
1370 /* Everything stored to ANYTHING also potentially escapes. */
1371 add_graph_edge (graph, find (escaped_id), t);
1375 /* Changed variables on the last iteration. */
1376 static bitmap changed;
1378 /* Strongly Connected Component visitation info. */
1380 struct scc_info
1382 scc_info (size_t size);
1383 ~scc_info ();
1385 auto_sbitmap visited;
1386 auto_sbitmap deleted;
1387 unsigned int *dfs;
1388 unsigned int *node_mapping;
1389 int current_index;
1390 auto_vec<unsigned> scc_stack;
1394 /* Recursive routine to find strongly connected components in GRAPH.
1395 SI is the SCC info to store the information in, and N is the id of current
1396 graph node we are processing.
1398 This is Tarjan's strongly connected component finding algorithm, as
1399 modified by Nuutila to keep only non-root nodes on the stack.
1400 The algorithm can be found in "On finding the strongly connected
1401 connected components in a directed graph" by Esko Nuutila and Eljas
1402 Soisalon-Soininen, in Information Processing Letters volume 49,
1403 number 1, pages 9-14. */
1405 static void
1406 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1408 unsigned int i;
1409 bitmap_iterator bi;
1410 unsigned int my_dfs;
1412 bitmap_set_bit (si->visited, n);
1413 si->dfs[n] = si->current_index ++;
1414 my_dfs = si->dfs[n];
1416 /* Visit all the successors. */
1417 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1419 unsigned int w;
1421 if (i > LAST_REF_NODE)
1422 break;
1424 w = find (i);
1425 if (bitmap_bit_p (si->deleted, w))
1426 continue;
1428 if (!bitmap_bit_p (si->visited, w))
1429 scc_visit (graph, si, w);
1431 unsigned int t = find (w);
1432 gcc_checking_assert (find (n) == n);
1433 if (si->dfs[t] < si->dfs[n])
1434 si->dfs[n] = si->dfs[t];
1437 /* See if any components have been identified. */
1438 if (si->dfs[n] == my_dfs)
1440 if (si->scc_stack.length () > 0
1441 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1443 bitmap scc = BITMAP_ALLOC (NULL);
1444 unsigned int lowest_node;
1445 bitmap_iterator bi;
1447 bitmap_set_bit (scc, n);
1449 while (si->scc_stack.length () != 0
1450 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1452 unsigned int w = si->scc_stack.pop ();
1454 bitmap_set_bit (scc, w);
1457 lowest_node = bitmap_first_set_bit (scc);
1458 gcc_assert (lowest_node < FIRST_REF_NODE);
1460 /* Collapse the SCC nodes into a single node, and mark the
1461 indirect cycles. */
1462 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1464 if (i < FIRST_REF_NODE)
1466 if (unite (lowest_node, i))
1467 unify_nodes (graph, lowest_node, i, false);
1469 else
1471 unite (lowest_node, i);
1472 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1476 bitmap_set_bit (si->deleted, n);
1478 else
1479 si->scc_stack.safe_push (n);
1482 /* Unify node FROM into node TO, updating the changed count if
1483 necessary when UPDATE_CHANGED is true. */
1485 static void
1486 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1487 bool update_changed)
1489 gcc_checking_assert (to != from && find (to) == to);
1491 if (dump_file && (dump_flags & TDF_DETAILS))
1492 fprintf (dump_file, "Unifying %s to %s\n",
1493 get_varinfo (from)->name,
1494 get_varinfo (to)->name);
1496 if (update_changed)
1497 stats.unified_vars_dynamic++;
1498 else
1499 stats.unified_vars_static++;
1501 merge_graph_nodes (graph, to, from);
1502 if (merge_node_constraints (graph, to, from))
1504 if (update_changed)
1505 bitmap_set_bit (changed, to);
1508 /* Mark TO as changed if FROM was changed. If TO was already marked
1509 as changed, decrease the changed count. */
1511 if (update_changed
1512 && bitmap_clear_bit (changed, from))
1513 bitmap_set_bit (changed, to);
1514 varinfo_t fromvi = get_varinfo (from);
1515 if (fromvi->solution)
1517 /* If the solution changes because of the merging, we need to mark
1518 the variable as changed. */
1519 varinfo_t tovi = get_varinfo (to);
1520 if (bitmap_ior_into (tovi->solution, fromvi->solution))
1522 if (update_changed)
1523 bitmap_set_bit (changed, to);
1526 BITMAP_FREE (fromvi->solution);
1527 if (fromvi->oldsolution)
1528 BITMAP_FREE (fromvi->oldsolution);
1530 if (stats.iterations > 0
1531 && tovi->oldsolution)
1532 BITMAP_FREE (tovi->oldsolution);
1534 if (graph->succs[to])
1535 bitmap_clear_bit (graph->succs[to], to);
1538 /* Information needed to compute the topological ordering of a graph. */
1540 struct topo_info
1542 /* sbitmap of visited nodes. */
1543 sbitmap visited;
1544 /* Array that stores the topological order of the graph, *in
1545 reverse*. */
1546 vec<unsigned> topo_order;
1550 /* Initialize and return a topological info structure. */
1552 static struct topo_info *
1553 init_topo_info (void)
1555 size_t size = graph->size;
1556 struct topo_info *ti = XNEW (struct topo_info);
1557 ti->visited = sbitmap_alloc (size);
1558 bitmap_clear (ti->visited);
1559 ti->topo_order.create (1);
1560 return ti;
1564 /* Free the topological sort info pointed to by TI. */
1566 static void
1567 free_topo_info (struct topo_info *ti)
1569 sbitmap_free (ti->visited);
1570 ti->topo_order.release ();
1571 free (ti);
1574 /* Visit the graph in topological order, and store the order in the
1575 topo_info structure. */
1577 static void
1578 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1579 unsigned int n)
1581 bitmap_iterator bi;
1582 unsigned int j;
1584 bitmap_set_bit (ti->visited, n);
1586 if (graph->succs[n])
1587 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1589 if (!bitmap_bit_p (ti->visited, j))
1590 topo_visit (graph, ti, j);
1593 ti->topo_order.safe_push (n);
1596 /* Process a constraint C that represents x = *(y + off), using DELTA as the
1597 starting solution for y. */
1599 static void
1600 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1601 bitmap delta, bitmap *expanded_delta)
1603 unsigned int lhs = c->lhs.var;
1604 bool flag = false;
1605 bitmap sol = get_varinfo (lhs)->solution;
1606 unsigned int j;
1607 bitmap_iterator bi;
1608 HOST_WIDE_INT roffset = c->rhs.offset;
1610 /* Our IL does not allow this. */
1611 gcc_checking_assert (c->lhs.offset == 0);
1613 /* If the solution of Y contains anything it is good enough to transfer
1614 this to the LHS. */
1615 if (bitmap_bit_p (delta, anything_id))
1617 flag |= bitmap_set_bit (sol, anything_id);
1618 goto done;
1621 /* If we do not know at with offset the rhs is dereferenced compute
1622 the reachability set of DELTA, conservatively assuming it is
1623 dereferenced at all valid offsets. */
1624 if (roffset == UNKNOWN_OFFSET)
1626 delta = solution_set_expand (delta, expanded_delta);
1627 /* No further offset processing is necessary. */
1628 roffset = 0;
1631 /* For each variable j in delta (Sol(y)), add
1632 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1633 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1635 varinfo_t v = get_varinfo (j);
1636 HOST_WIDE_INT fieldoffset = v->offset + roffset;
1637 unsigned HOST_WIDE_INT size = v->size;
1638 unsigned int t;
1640 if (v->is_full_var)
1642 else if (roffset != 0)
1644 if (fieldoffset < 0)
1645 v = get_varinfo (v->head);
1646 else
1647 v = first_or_preceding_vi_for_offset (v, fieldoffset);
1650 /* We have to include all fields that overlap the current field
1651 shifted by roffset. */
1654 t = find (v->id);
1656 /* Adding edges from the special vars is pointless.
1657 They don't have sets that can change. */
1658 if (get_varinfo (t)->is_special_var)
1659 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1660 /* Merging the solution from ESCAPED needlessly increases
1661 the set. Use ESCAPED as representative instead. */
1662 else if (v->id == escaped_id)
1663 flag |= bitmap_set_bit (sol, escaped_id);
1664 else if (v->may_have_pointers
1665 && add_graph_edge (graph, lhs, t))
1666 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1668 if (v->is_full_var
1669 || v->next == 0)
1670 break;
1672 v = vi_next (v);
1674 while (v->offset < fieldoffset + size);
1677 done:
1678 /* If the LHS solution changed, mark the var as changed. */
1679 if (flag)
1681 get_varinfo (lhs)->solution = sol;
1682 bitmap_set_bit (changed, lhs);
1686 /* Process a constraint C that represents *(x + off) = y using DELTA
1687 as the starting solution for x. */
1689 static void
1690 do_ds_constraint (constraint_t c, bitmap delta, bitmap *expanded_delta)
1692 unsigned int rhs = c->rhs.var;
1693 bitmap sol = get_varinfo (rhs)->solution;
1694 unsigned int j;
1695 bitmap_iterator bi;
1696 HOST_WIDE_INT loff = c->lhs.offset;
1697 bool escaped_p = false;
1699 /* Our IL does not allow this. */
1700 gcc_checking_assert (c->rhs.offset == 0);
1702 /* If the solution of y contains ANYTHING simply use the ANYTHING
1703 solution. This avoids needlessly increasing the points-to sets. */
1704 if (bitmap_bit_p (sol, anything_id))
1705 sol = get_varinfo (find (anything_id))->solution;
1707 /* If the solution for x contains ANYTHING we have to merge the
1708 solution of y into all pointer variables which we do via
1709 STOREDANYTHING. */
1710 if (bitmap_bit_p (delta, anything_id))
1712 unsigned t = find (storedanything_id);
1713 if (add_graph_edge (graph, t, rhs))
1715 if (bitmap_ior_into (get_varinfo (t)->solution, sol))
1716 bitmap_set_bit (changed, t);
1718 return;
1721 /* If we do not know at with offset the rhs is dereferenced compute
1722 the reachability set of DELTA, conservatively assuming it is
1723 dereferenced at all valid offsets. */
1724 if (loff == UNKNOWN_OFFSET)
1726 delta = solution_set_expand (delta, expanded_delta);
1727 loff = 0;
1730 /* For each member j of delta (Sol(x)), add an edge from y to j and
1731 union Sol(y) into Sol(j) */
1732 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1734 varinfo_t v = get_varinfo (j);
1735 unsigned int t;
1736 HOST_WIDE_INT fieldoffset = v->offset + loff;
1737 unsigned HOST_WIDE_INT size = v->size;
1739 if (v->is_full_var)
1741 else if (loff != 0)
1743 if (fieldoffset < 0)
1744 v = get_varinfo (v->head);
1745 else
1746 v = first_or_preceding_vi_for_offset (v, fieldoffset);
1749 /* We have to include all fields that overlap the current field
1750 shifted by loff. */
1753 if (v->may_have_pointers)
1755 /* If v is a global variable then this is an escape point. */
1756 if (v->is_global_var
1757 && !escaped_p)
1759 t = find (escaped_id);
1760 if (add_graph_edge (graph, t, rhs)
1761 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1762 bitmap_set_bit (changed, t);
1763 /* Enough to let rhs escape once. */
1764 escaped_p = true;
1767 if (v->is_special_var)
1768 break;
1770 t = find (v->id);
1771 if (add_graph_edge (graph, t, rhs)
1772 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1773 bitmap_set_bit (changed, t);
1776 if (v->is_full_var
1777 || v->next == 0)
1778 break;
1780 v = vi_next (v);
1782 while (v->offset < fieldoffset + size);
1786 /* Handle a non-simple (simple meaning requires no iteration),
1787 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1789 static void
1790 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta,
1791 bitmap *expanded_delta)
1793 if (c->lhs.type == DEREF)
1795 if (c->rhs.type == ADDRESSOF)
1797 gcc_unreachable ();
1799 else
1801 /* *x = y */
1802 do_ds_constraint (c, delta, expanded_delta);
1805 else if (c->rhs.type == DEREF)
1807 /* x = *y */
1808 if (!(get_varinfo (c->lhs.var)->is_special_var))
1809 do_sd_constraint (graph, c, delta, expanded_delta);
1811 else
1813 bitmap tmp;
1814 bool flag = false;
1816 gcc_checking_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR
1817 && c->rhs.offset != 0 && c->lhs.offset == 0);
1818 tmp = get_varinfo (c->lhs.var)->solution;
1820 flag = set_union_with_increment (tmp, delta, c->rhs.offset,
1821 expanded_delta);
1823 if (flag)
1824 bitmap_set_bit (changed, c->lhs.var);
1828 /* Initialize and return a new SCC info structure. */
1830 scc_info::scc_info (size_t size) :
1831 visited (size), deleted (size), current_index (0), scc_stack (1)
1833 bitmap_clear (visited);
1834 bitmap_clear (deleted);
1835 node_mapping = XNEWVEC (unsigned int, size);
1836 dfs = XCNEWVEC (unsigned int, size);
1838 for (size_t i = 0; i < size; i++)
1839 node_mapping[i] = i;
1842 /* Free an SCC info structure pointed to by SI */
1844 scc_info::~scc_info ()
1846 free (node_mapping);
1847 free (dfs);
1851 /* Find indirect cycles in GRAPH that occur, using strongly connected
1852 components, and note them in the indirect cycles map.
1854 This technique comes from Ben Hardekopf and Calvin Lin,
1855 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1856 Lines of Code", submitted to PLDI 2007. */
1858 static void
1859 find_indirect_cycles (constraint_graph_t graph)
1861 unsigned int i;
1862 unsigned int size = graph->size;
1863 scc_info si (size);
1865 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1866 if (!bitmap_bit_p (si.visited, i) && find (i) == i)
1867 scc_visit (graph, &si, i);
1870 /* Compute a topological ordering for GRAPH, and store the result in the
1871 topo_info structure TI. */
1873 static void
1874 compute_topo_order (constraint_graph_t graph,
1875 struct topo_info *ti)
1877 unsigned int i;
1878 unsigned int size = graph->size;
1880 for (i = 0; i != size; ++i)
1881 if (!bitmap_bit_p (ti->visited, i) && find (i) == i)
1882 topo_visit (graph, ti, i);
1885 /* Structure used to for hash value numbering of pointer equivalence
1886 classes. */
1888 typedef struct equiv_class_label
1890 hashval_t hashcode;
1891 unsigned int equivalence_class;
1892 bitmap labels;
1893 } *equiv_class_label_t;
1894 typedef const struct equiv_class_label *const_equiv_class_label_t;
1896 /* Equiv_class_label hashtable helpers. */
1898 struct equiv_class_hasher : free_ptr_hash <equiv_class_label>
1900 static inline hashval_t hash (const equiv_class_label *);
1901 static inline bool equal (const equiv_class_label *,
1902 const equiv_class_label *);
1905 /* Hash function for a equiv_class_label_t */
1907 inline hashval_t
1908 equiv_class_hasher::hash (const equiv_class_label *ecl)
1910 return ecl->hashcode;
1913 /* Equality function for two equiv_class_label_t's. */
1915 inline bool
1916 equiv_class_hasher::equal (const equiv_class_label *eql1,
1917 const equiv_class_label *eql2)
1919 return (eql1->hashcode == eql2->hashcode
1920 && bitmap_equal_p (eql1->labels, eql2->labels));
1923 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1924 classes. */
1925 static hash_table<equiv_class_hasher> *pointer_equiv_class_table;
1927 /* A hashtable for mapping a bitmap of labels->location equivalence
1928 classes. */
1929 static hash_table<equiv_class_hasher> *location_equiv_class_table;
1931 /* Lookup a equivalence class in TABLE by the bitmap of LABELS with
1932 hash HAS it contains. Sets *REF_LABELS to the bitmap LABELS
1933 is equivalent to. */
1935 static equiv_class_label *
1936 equiv_class_lookup_or_add (hash_table<equiv_class_hasher> *table,
1937 bitmap labels)
1939 equiv_class_label **slot;
1940 equiv_class_label ecl;
1942 ecl.labels = labels;
1943 ecl.hashcode = bitmap_hash (labels);
1944 slot = table->find_slot (&ecl, INSERT);
1945 if (!*slot)
1947 *slot = XNEW (struct equiv_class_label);
1948 (*slot)->labels = labels;
1949 (*slot)->hashcode = ecl.hashcode;
1950 (*slot)->equivalence_class = 0;
1953 return *slot;
1956 /* Perform offline variable substitution.
1958 This is a worst case quadratic time way of identifying variables
1959 that must have equivalent points-to sets, including those caused by
1960 static cycles, and single entry subgraphs, in the constraint graph.
1962 The technique is described in "Exploiting Pointer and Location
1963 Equivalence to Optimize Pointer Analysis. In the 14th International
1964 Static Analysis Symposium (SAS), August 2007." It is known as the
1965 "HU" algorithm, and is equivalent to value numbering the collapsed
1966 constraint graph including evaluating unions.
1968 The general method of finding equivalence classes is as follows:
1969 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1970 Initialize all non-REF nodes to be direct nodes.
1971 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1972 variable}
1973 For each constraint containing the dereference, we also do the same
1974 thing.
1976 We then compute SCC's in the graph and unify nodes in the same SCC,
1977 including pts sets.
1979 For each non-collapsed node x:
1980 Visit all unvisited explicit incoming edges.
1981 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
1982 where y->x.
1983 Lookup the equivalence class for pts(x).
1984 If we found one, equivalence_class(x) = found class.
1985 Otherwise, equivalence_class(x) = new class, and new_class is
1986 added to the lookup table.
1988 All direct nodes with the same equivalence class can be replaced
1989 with a single representative node.
1990 All unlabeled nodes (label == 0) are not pointers and all edges
1991 involving them can be eliminated.
1992 We perform these optimizations during rewrite_constraints
1994 In addition to pointer equivalence class finding, we also perform
1995 location equivalence class finding. This is the set of variables
1996 that always appear together in points-to sets. We use this to
1997 compress the size of the points-to sets. */
1999 /* Current maximum pointer equivalence class id. */
2000 static int pointer_equiv_class;
2002 /* Current maximum location equivalence class id. */
2003 static int location_equiv_class;
2005 /* Recursive routine to find strongly connected components in GRAPH,
2006 and label it's nodes with DFS numbers. */
2008 static void
2009 condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2011 unsigned int i;
2012 bitmap_iterator bi;
2013 unsigned int my_dfs;
2015 gcc_checking_assert (si->node_mapping[n] == n);
2016 bitmap_set_bit (si->visited, n);
2017 si->dfs[n] = si->current_index ++;
2018 my_dfs = si->dfs[n];
2020 /* Visit all the successors. */
2021 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2023 unsigned int w = si->node_mapping[i];
2025 if (bitmap_bit_p (si->deleted, w))
2026 continue;
2028 if (!bitmap_bit_p (si->visited, w))
2029 condense_visit (graph, si, w);
2031 unsigned int t = si->node_mapping[w];
2032 gcc_checking_assert (si->node_mapping[n] == n);
2033 if (si->dfs[t] < si->dfs[n])
2034 si->dfs[n] = si->dfs[t];
2037 /* Visit all the implicit predecessors. */
2038 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
2040 unsigned int w = si->node_mapping[i];
2042 if (bitmap_bit_p (si->deleted, w))
2043 continue;
2045 if (!bitmap_bit_p (si->visited, w))
2046 condense_visit (graph, si, w);
2048 unsigned int t = si->node_mapping[w];
2049 gcc_assert (si->node_mapping[n] == n);
2050 if (si->dfs[t] < si->dfs[n])
2051 si->dfs[n] = si->dfs[t];
2054 /* See if any components have been identified. */
2055 if (si->dfs[n] == my_dfs)
2057 while (si->scc_stack.length () != 0
2058 && si->dfs[si->scc_stack.last ()] >= my_dfs)
2060 unsigned int w = si->scc_stack.pop ();
2061 si->node_mapping[w] = n;
2063 if (!bitmap_bit_p (graph->direct_nodes, w))
2064 bitmap_clear_bit (graph->direct_nodes, n);
2066 /* Unify our nodes. */
2067 if (graph->preds[w])
2069 if (!graph->preds[n])
2070 graph->preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2071 bitmap_ior_into (graph->preds[n], graph->preds[w]);
2073 if (graph->implicit_preds[w])
2075 if (!graph->implicit_preds[n])
2076 graph->implicit_preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2077 bitmap_ior_into (graph->implicit_preds[n],
2078 graph->implicit_preds[w]);
2080 if (graph->points_to[w])
2082 if (!graph->points_to[n])
2083 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2084 bitmap_ior_into (graph->points_to[n],
2085 graph->points_to[w]);
2088 bitmap_set_bit (si->deleted, n);
2090 else
2091 si->scc_stack.safe_push (n);
2094 /* Label pointer equivalences.
2096 This performs a value numbering of the constraint graph to
2097 discover which variables will always have the same points-to sets
2098 under the current set of constraints.
2100 The way it value numbers is to store the set of points-to bits
2101 generated by the constraints and graph edges. This is just used as a
2102 hash and equality comparison. The *actual set of points-to bits* is
2103 completely irrelevant, in that we don't care about being able to
2104 extract them later.
2106 The equality values (currently bitmaps) just have to satisfy a few
2107 constraints, the main ones being:
2108 1. The combining operation must be order independent.
2109 2. The end result of a given set of operations must be unique iff the
2110 combination of input values is unique
2111 3. Hashable. */
2113 static void
2114 label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2116 unsigned int i, first_pred;
2117 bitmap_iterator bi;
2119 bitmap_set_bit (si->visited, n);
2121 /* Label and union our incoming edges's points to sets. */
2122 first_pred = -1U;
2123 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2125 unsigned int w = si->node_mapping[i];
2126 if (!bitmap_bit_p (si->visited, w))
2127 label_visit (graph, si, w);
2129 /* Skip unused edges */
2130 if (w == n || graph->pointer_label[w] == 0)
2131 continue;
2133 if (graph->points_to[w])
2135 if (!graph->points_to[n])
2137 if (first_pred == -1U)
2138 first_pred = w;
2139 else
2141 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2142 bitmap_ior (graph->points_to[n],
2143 graph->points_to[first_pred],
2144 graph->points_to[w]);
2147 else
2148 bitmap_ior_into (graph->points_to[n], graph->points_to[w]);
2152 /* Indirect nodes get fresh variables and a new pointer equiv class. */
2153 if (!bitmap_bit_p (graph->direct_nodes, n))
2155 if (!graph->points_to[n])
2157 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2158 if (first_pred != -1U)
2159 bitmap_copy (graph->points_to[n], graph->points_to[first_pred]);
2161 bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
2162 graph->pointer_label[n] = pointer_equiv_class++;
2163 equiv_class_label_t ecl;
2164 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2165 graph->points_to[n]);
2166 ecl->equivalence_class = graph->pointer_label[n];
2167 return;
2170 /* If there was only a single non-empty predecessor the pointer equiv
2171 class is the same. */
2172 if (!graph->points_to[n])
2174 if (first_pred != -1U)
2176 graph->pointer_label[n] = graph->pointer_label[first_pred];
2177 graph->points_to[n] = graph->points_to[first_pred];
2179 return;
2182 if (!bitmap_empty_p (graph->points_to[n]))
2184 equiv_class_label_t ecl;
2185 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2186 graph->points_to[n]);
2187 if (ecl->equivalence_class == 0)
2188 ecl->equivalence_class = pointer_equiv_class++;
2189 else
2191 BITMAP_FREE (graph->points_to[n]);
2192 graph->points_to[n] = ecl->labels;
2194 graph->pointer_label[n] = ecl->equivalence_class;
2198 /* Print the pred graph in dot format. */
2200 static void
2201 dump_pred_graph (struct scc_info *si, FILE *file)
2203 unsigned int i;
2205 /* Only print the graph if it has already been initialized: */
2206 if (!graph)
2207 return;
2209 /* Prints the header of the dot file: */
2210 fprintf (file, "strict digraph {\n");
2211 fprintf (file, " node [\n shape = box\n ]\n");
2212 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
2213 fprintf (file, "\n // List of nodes and complex constraints in "
2214 "the constraint graph:\n");
2216 /* The next lines print the nodes in the graph together with the
2217 complex constraints attached to them. */
2218 for (i = 1; i < graph->size; i++)
2220 if (i == FIRST_REF_NODE)
2221 continue;
2222 if (si->node_mapping[i] != i)
2223 continue;
2224 if (i < FIRST_REF_NODE)
2225 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2226 else
2227 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2228 if (graph->points_to[i]
2229 && !bitmap_empty_p (graph->points_to[i]))
2231 if (i < FIRST_REF_NODE)
2232 fprintf (file, "[label=\"%s = {", get_varinfo (i)->name);
2233 else
2234 fprintf (file, "[label=\"*%s = {",
2235 get_varinfo (i - FIRST_REF_NODE)->name);
2236 unsigned j;
2237 bitmap_iterator bi;
2238 EXECUTE_IF_SET_IN_BITMAP (graph->points_to[i], 0, j, bi)
2239 fprintf (file, " %d", j);
2240 fprintf (file, " }\"]");
2242 fprintf (file, ";\n");
2245 /* Go over the edges. */
2246 fprintf (file, "\n // Edges in the constraint graph:\n");
2247 for (i = 1; i < graph->size; i++)
2249 unsigned j;
2250 bitmap_iterator bi;
2251 if (si->node_mapping[i] != i)
2252 continue;
2253 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[i], 0, j, bi)
2255 unsigned from = si->node_mapping[j];
2256 if (from < FIRST_REF_NODE)
2257 fprintf (file, "\"%s\"", get_varinfo (from)->name);
2258 else
2259 fprintf (file, "\"*%s\"", get_varinfo (from - FIRST_REF_NODE)->name);
2260 fprintf (file, " -> ");
2261 if (i < FIRST_REF_NODE)
2262 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2263 else
2264 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2265 fprintf (file, ";\n");
2269 /* Prints the tail of the dot file. */
2270 fprintf (file, "}\n");
2273 /* Perform offline variable substitution, discovering equivalence
2274 classes, and eliminating non-pointer variables. */
2276 static struct scc_info *
2277 perform_var_substitution (constraint_graph_t graph)
2279 unsigned int i;
2280 unsigned int size = graph->size;
2281 scc_info *si = new scc_info (size);
2283 bitmap_obstack_initialize (&iteration_obstack);
2284 pointer_equiv_class_table = new hash_table<equiv_class_hasher> (511);
2285 location_equiv_class_table
2286 = new hash_table<equiv_class_hasher> (511);
2287 pointer_equiv_class = 1;
2288 location_equiv_class = 1;
2290 /* Condense the nodes, which means to find SCC's, count incoming
2291 predecessors, and unite nodes in SCC's. */
2292 for (i = 1; i < FIRST_REF_NODE; i++)
2293 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2294 condense_visit (graph, si, si->node_mapping[i]);
2296 if (dump_file && (dump_flags & TDF_GRAPH))
2298 fprintf (dump_file, "\n\n// The constraint graph before var-substitution "
2299 "in dot format:\n");
2300 dump_pred_graph (si, dump_file);
2301 fprintf (dump_file, "\n\n");
2304 bitmap_clear (si->visited);
2305 /* Actually the label the nodes for pointer equivalences */
2306 for (i = 1; i < FIRST_REF_NODE; i++)
2307 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2308 label_visit (graph, si, si->node_mapping[i]);
2310 /* Calculate location equivalence labels. */
2311 for (i = 1; i < FIRST_REF_NODE; i++)
2313 bitmap pointed_by;
2314 bitmap_iterator bi;
2315 unsigned int j;
2317 if (!graph->pointed_by[i])
2318 continue;
2319 pointed_by = BITMAP_ALLOC (&iteration_obstack);
2321 /* Translate the pointed-by mapping for pointer equivalence
2322 labels. */
2323 EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
2325 bitmap_set_bit (pointed_by,
2326 graph->pointer_label[si->node_mapping[j]]);
2328 /* The original pointed_by is now dead. */
2329 BITMAP_FREE (graph->pointed_by[i]);
2331 /* Look up the location equivalence label if one exists, or make
2332 one otherwise. */
2333 equiv_class_label_t ecl;
2334 ecl = equiv_class_lookup_or_add (location_equiv_class_table, pointed_by);
2335 if (ecl->equivalence_class == 0)
2336 ecl->equivalence_class = location_equiv_class++;
2337 else
2339 if (dump_file && (dump_flags & TDF_DETAILS))
2340 fprintf (dump_file, "Found location equivalence for node %s\n",
2341 get_varinfo (i)->name);
2342 BITMAP_FREE (pointed_by);
2344 graph->loc_label[i] = ecl->equivalence_class;
2348 if (dump_file && (dump_flags & TDF_DETAILS))
2349 for (i = 1; i < FIRST_REF_NODE; i++)
2351 unsigned j = si->node_mapping[i];
2352 if (j != i)
2354 fprintf (dump_file, "%s node id %d ",
2355 bitmap_bit_p (graph->direct_nodes, i)
2356 ? "Direct" : "Indirect", i);
2357 if (i < FIRST_REF_NODE)
2358 fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2359 else
2360 fprintf (dump_file, "\"*%s\"",
2361 get_varinfo (i - FIRST_REF_NODE)->name);
2362 fprintf (dump_file, " mapped to SCC leader node id %d ", j);
2363 if (j < FIRST_REF_NODE)
2364 fprintf (dump_file, "\"%s\"\n", get_varinfo (j)->name);
2365 else
2366 fprintf (dump_file, "\"*%s\"\n",
2367 get_varinfo (j - FIRST_REF_NODE)->name);
2369 else
2371 fprintf (dump_file,
2372 "Equivalence classes for %s node id %d ",
2373 bitmap_bit_p (graph->direct_nodes, i)
2374 ? "direct" : "indirect", i);
2375 if (i < FIRST_REF_NODE)
2376 fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2377 else
2378 fprintf (dump_file, "\"*%s\"",
2379 get_varinfo (i - FIRST_REF_NODE)->name);
2380 fprintf (dump_file,
2381 ": pointer %d, location %d\n",
2382 graph->pointer_label[i], graph->loc_label[i]);
2386 /* Quickly eliminate our non-pointer variables. */
2388 for (i = 1; i < FIRST_REF_NODE; i++)
2390 unsigned int node = si->node_mapping[i];
2392 if (graph->pointer_label[node] == 0)
2394 if (dump_file && (dump_flags & TDF_DETAILS))
2395 fprintf (dump_file,
2396 "%s is a non-pointer variable, eliminating edges.\n",
2397 get_varinfo (node)->name);
2398 stats.nonpointer_vars++;
2399 clear_edges_for_node (graph, node);
2403 return si;
2406 /* Free information that was only necessary for variable
2407 substitution. */
2409 static void
2410 free_var_substitution_info (struct scc_info *si)
2412 delete si;
2413 free (graph->pointer_label);
2414 free (graph->loc_label);
2415 free (graph->pointed_by);
2416 free (graph->points_to);
2417 free (graph->eq_rep);
2418 sbitmap_free (graph->direct_nodes);
2419 delete pointer_equiv_class_table;
2420 pointer_equiv_class_table = NULL;
2421 delete location_equiv_class_table;
2422 location_equiv_class_table = NULL;
2423 bitmap_obstack_release (&iteration_obstack);
2426 /* Return an existing node that is equivalent to NODE, which has
2427 equivalence class LABEL, if one exists. Return NODE otherwise. */
2429 static unsigned int
2430 find_equivalent_node (constraint_graph_t graph,
2431 unsigned int node, unsigned int label)
2433 /* If the address version of this variable is unused, we can
2434 substitute it for anything else with the same label.
2435 Otherwise, we know the pointers are equivalent, but not the
2436 locations, and we can unite them later. */
2438 if (!bitmap_bit_p (graph->address_taken, node))
2440 gcc_checking_assert (label < graph->size);
2442 if (graph->eq_rep[label] != -1)
2444 /* Unify the two variables since we know they are equivalent. */
2445 if (unite (graph->eq_rep[label], node))
2446 unify_nodes (graph, graph->eq_rep[label], node, false);
2447 return graph->eq_rep[label];
2449 else
2451 graph->eq_rep[label] = node;
2452 graph->pe_rep[label] = node;
2455 else
2457 gcc_checking_assert (label < graph->size);
2458 graph->pe[node] = label;
2459 if (graph->pe_rep[label] == -1)
2460 graph->pe_rep[label] = node;
2463 return node;
2466 /* Unite pointer equivalent but not location equivalent nodes in
2467 GRAPH. This may only be performed once variable substitution is
2468 finished. */
2470 static void
2471 unite_pointer_equivalences (constraint_graph_t graph)
2473 unsigned int i;
2475 /* Go through the pointer equivalences and unite them to their
2476 representative, if they aren't already. */
2477 for (i = 1; i < FIRST_REF_NODE; i++)
2479 unsigned int label = graph->pe[i];
2480 if (label)
2482 int label_rep = graph->pe_rep[label];
2484 if (label_rep == -1)
2485 continue;
2487 label_rep = find (label_rep);
2488 if (label_rep >= 0 && unite (label_rep, find (i)))
2489 unify_nodes (graph, label_rep, i, false);
2494 /* Move complex constraints to the GRAPH nodes they belong to. */
2496 static void
2497 move_complex_constraints (constraint_graph_t graph)
2499 int i;
2500 constraint_t c;
2502 FOR_EACH_VEC_ELT (constraints, i, c)
2504 if (c)
2506 struct constraint_expr lhs = c->lhs;
2507 struct constraint_expr rhs = c->rhs;
2509 if (lhs.type == DEREF)
2511 insert_into_complex (graph, lhs.var, c);
2513 else if (rhs.type == DEREF)
2515 if (!(get_varinfo (lhs.var)->is_special_var))
2516 insert_into_complex (graph, rhs.var, c);
2518 else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2519 && (lhs.offset != 0 || rhs.offset != 0))
2521 insert_into_complex (graph, rhs.var, c);
2528 /* Optimize and rewrite complex constraints while performing
2529 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2530 result of perform_variable_substitution. */
2532 static void
2533 rewrite_constraints (constraint_graph_t graph,
2534 struct scc_info *si)
2536 int i;
2537 constraint_t c;
2539 if (flag_checking)
2541 for (unsigned int j = 0; j < graph->size; j++)
2542 gcc_assert (find (j) == j);
2545 FOR_EACH_VEC_ELT (constraints, i, c)
2547 struct constraint_expr lhs = c->lhs;
2548 struct constraint_expr rhs = c->rhs;
2549 unsigned int lhsvar = find (lhs.var);
2550 unsigned int rhsvar = find (rhs.var);
2551 unsigned int lhsnode, rhsnode;
2552 unsigned int lhslabel, rhslabel;
2554 lhsnode = si->node_mapping[lhsvar];
2555 rhsnode = si->node_mapping[rhsvar];
2556 lhslabel = graph->pointer_label[lhsnode];
2557 rhslabel = graph->pointer_label[rhsnode];
2559 /* See if it is really a non-pointer variable, and if so, ignore
2560 the constraint. */
2561 if (lhslabel == 0)
2563 if (dump_file && (dump_flags & TDF_DETAILS))
2566 fprintf (dump_file, "%s is a non-pointer variable,"
2567 "ignoring constraint:",
2568 get_varinfo (lhs.var)->name);
2569 dump_constraint (dump_file, c);
2570 fprintf (dump_file, "\n");
2572 constraints[i] = NULL;
2573 continue;
2576 if (rhslabel == 0)
2578 if (dump_file && (dump_flags & TDF_DETAILS))
2581 fprintf (dump_file, "%s is a non-pointer variable,"
2582 "ignoring constraint:",
2583 get_varinfo (rhs.var)->name);
2584 dump_constraint (dump_file, c);
2585 fprintf (dump_file, "\n");
2587 constraints[i] = NULL;
2588 continue;
2591 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2592 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2593 c->lhs.var = lhsvar;
2594 c->rhs.var = rhsvar;
2598 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2599 part of an SCC, false otherwise. */
2601 static bool
2602 eliminate_indirect_cycles (unsigned int node)
2604 if (graph->indirect_cycles[node] != -1
2605 && !bitmap_empty_p (get_varinfo (node)->solution))
2607 unsigned int i;
2608 auto_vec<unsigned> queue;
2609 int queuepos;
2610 unsigned int to = find (graph->indirect_cycles[node]);
2611 bitmap_iterator bi;
2613 /* We can't touch the solution set and call unify_nodes
2614 at the same time, because unify_nodes is going to do
2615 bitmap unions into it. */
2617 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2619 if (find (i) == i && i != to)
2621 if (unite (to, i))
2622 queue.safe_push (i);
2626 for (queuepos = 0;
2627 queue.iterate (queuepos, &i);
2628 queuepos++)
2630 unify_nodes (graph, to, i, true);
2632 return true;
2634 return false;
2637 /* Solve the constraint graph GRAPH using our worklist solver.
2638 This is based on the PW* family of solvers from the "Efficient Field
2639 Sensitive Pointer Analysis for C" paper.
2640 It works by iterating over all the graph nodes, processing the complex
2641 constraints and propagating the copy constraints, until everything stops
2642 changed. This corresponds to steps 6-8 in the solving list given above. */
2644 static void
2645 solve_graph (constraint_graph_t graph)
2647 unsigned int size = graph->size;
2648 unsigned int i;
2649 bitmap pts;
2651 changed = BITMAP_ALLOC (NULL);
2653 /* Mark all initial non-collapsed nodes as changed. */
2654 for (i = 1; i < size; i++)
2656 varinfo_t ivi = get_varinfo (i);
2657 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2658 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2659 || graph->complex[i].length () > 0))
2660 bitmap_set_bit (changed, i);
2663 /* Allocate a bitmap to be used to store the changed bits. */
2664 pts = BITMAP_ALLOC (&pta_obstack);
2666 while (!bitmap_empty_p (changed))
2668 unsigned int i;
2669 struct topo_info *ti = init_topo_info ();
2670 stats.iterations++;
2672 bitmap_obstack_initialize (&iteration_obstack);
2674 compute_topo_order (graph, ti);
2676 while (ti->topo_order.length () != 0)
2679 i = ti->topo_order.pop ();
2681 /* If this variable is not a representative, skip it. */
2682 if (find (i) != i)
2683 continue;
2685 /* In certain indirect cycle cases, we may merge this
2686 variable to another. */
2687 if (eliminate_indirect_cycles (i) && find (i) != i)
2688 continue;
2690 /* If the node has changed, we need to process the
2691 complex constraints and outgoing edges again. */
2692 if (bitmap_clear_bit (changed, i))
2694 unsigned int j;
2695 constraint_t c;
2696 bitmap solution;
2697 vec<constraint_t> complex = graph->complex[i];
2698 varinfo_t vi = get_varinfo (i);
2699 bool solution_empty;
2701 /* Compute the changed set of solution bits. If anything
2702 is in the solution just propagate that. */
2703 if (bitmap_bit_p (vi->solution, anything_id))
2705 /* If anything is also in the old solution there is
2706 nothing to do.
2707 ??? But we shouldn't ended up with "changed" set ... */
2708 if (vi->oldsolution
2709 && bitmap_bit_p (vi->oldsolution, anything_id))
2710 continue;
2711 bitmap_copy (pts, get_varinfo (find (anything_id))->solution);
2713 else if (vi->oldsolution)
2714 bitmap_and_compl (pts, vi->solution, vi->oldsolution);
2715 else
2716 bitmap_copy (pts, vi->solution);
2718 if (bitmap_empty_p (pts))
2719 continue;
2721 if (vi->oldsolution)
2722 bitmap_ior_into (vi->oldsolution, pts);
2723 else
2725 vi->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
2726 bitmap_copy (vi->oldsolution, pts);
2729 solution = vi->solution;
2730 solution_empty = bitmap_empty_p (solution);
2732 /* Process the complex constraints */
2733 bitmap expanded_pts = NULL;
2734 FOR_EACH_VEC_ELT (complex, j, c)
2736 /* XXX: This is going to unsort the constraints in
2737 some cases, which will occasionally add duplicate
2738 constraints during unification. This does not
2739 affect correctness. */
2740 c->lhs.var = find (c->lhs.var);
2741 c->rhs.var = find (c->rhs.var);
2743 /* The only complex constraint that can change our
2744 solution to non-empty, given an empty solution,
2745 is a constraint where the lhs side is receiving
2746 some set from elsewhere. */
2747 if (!solution_empty || c->lhs.type != DEREF)
2748 do_complex_constraint (graph, c, pts, &expanded_pts);
2750 BITMAP_FREE (expanded_pts);
2752 solution_empty = bitmap_empty_p (solution);
2754 if (!solution_empty)
2756 bitmap_iterator bi;
2757 unsigned eff_escaped_id = find (escaped_id);
2759 /* Propagate solution to all successors. */
2760 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2761 0, j, bi)
2763 bitmap tmp;
2764 bool flag;
2766 unsigned int to = find (j);
2767 tmp = get_varinfo (to)->solution;
2768 flag = false;
2770 /* Don't try to propagate to ourselves. */
2771 if (to == i)
2772 continue;
2774 /* If we propagate from ESCAPED use ESCAPED as
2775 placeholder. */
2776 if (i == eff_escaped_id)
2777 flag = bitmap_set_bit (tmp, escaped_id);
2778 else
2779 flag = bitmap_ior_into (tmp, pts);
2781 if (flag)
2782 bitmap_set_bit (changed, to);
2787 free_topo_info (ti);
2788 bitmap_obstack_release (&iteration_obstack);
2791 BITMAP_FREE (pts);
2792 BITMAP_FREE (changed);
2793 bitmap_obstack_release (&oldpta_obstack);
2796 /* Map from trees to variable infos. */
2797 static hash_map<tree, varinfo_t> *vi_for_tree;
2800 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2802 static void
2803 insert_vi_for_tree (tree t, varinfo_t vi)
2805 gcc_assert (vi);
2806 gcc_assert (!vi_for_tree->put (t, vi));
2809 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2810 exist in the map, return NULL, otherwise, return the varinfo we found. */
2812 static varinfo_t
2813 lookup_vi_for_tree (tree t)
2815 varinfo_t *slot = vi_for_tree->get (t);
2816 if (slot == NULL)
2817 return NULL;
2819 return *slot;
2822 /* Return a printable name for DECL */
2824 static const char *
2825 alias_get_name (tree decl)
2827 const char *res = NULL;
2828 char *temp;
2829 int num_printed = 0;
2831 if (!dump_file)
2832 return "NULL";
2834 if (TREE_CODE (decl) == SSA_NAME)
2836 res = get_name (decl);
2837 if (res)
2838 num_printed = asprintf (&temp, "%s_%u", res, SSA_NAME_VERSION (decl));
2839 else
2840 num_printed = asprintf (&temp, "_%u", SSA_NAME_VERSION (decl));
2841 if (num_printed > 0)
2843 res = ggc_strdup (temp);
2844 free (temp);
2847 else if (DECL_P (decl))
2849 if (DECL_ASSEMBLER_NAME_SET_P (decl))
2850 res = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
2851 else
2853 res = get_name (decl);
2854 if (!res)
2856 num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
2857 if (num_printed > 0)
2859 res = ggc_strdup (temp);
2860 free (temp);
2865 if (res != NULL)
2866 return res;
2868 return "NULL";
2871 /* Find the variable id for tree T in the map.
2872 If T doesn't exist in the map, create an entry for it and return it. */
2874 static varinfo_t
2875 get_vi_for_tree (tree t)
2877 varinfo_t *slot = vi_for_tree->get (t);
2878 if (slot == NULL)
2880 unsigned int id = create_variable_info_for (t, alias_get_name (t), false);
2881 return get_varinfo (id);
2884 return *slot;
2887 /* Get a scalar constraint expression for a new temporary variable. */
2889 static struct constraint_expr
2890 new_scalar_tmp_constraint_exp (const char *name, bool add_id)
2892 struct constraint_expr tmp;
2893 varinfo_t vi;
2895 vi = new_var_info (NULL_TREE, name, add_id);
2896 vi->offset = 0;
2897 vi->size = -1;
2898 vi->fullsize = -1;
2899 vi->is_full_var = 1;
2901 tmp.var = vi->id;
2902 tmp.type = SCALAR;
2903 tmp.offset = 0;
2905 return tmp;
2908 /* Get a constraint expression vector from an SSA_VAR_P node.
2909 If address_p is true, the result will be taken its address of. */
2911 static void
2912 get_constraint_for_ssa_var (tree t, vec<ce_s> *results, bool address_p)
2914 struct constraint_expr cexpr;
2915 varinfo_t vi;
2917 /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
2918 gcc_assert (TREE_CODE (t) == SSA_NAME || DECL_P (t));
2920 /* For parameters, get at the points-to set for the actual parm
2921 decl. */
2922 if (TREE_CODE (t) == SSA_NAME
2923 && SSA_NAME_IS_DEFAULT_DEF (t)
2924 && (TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2925 || TREE_CODE (SSA_NAME_VAR (t)) == RESULT_DECL))
2927 get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
2928 return;
2931 /* For global variables resort to the alias target. */
2932 if (TREE_CODE (t) == VAR_DECL
2933 && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
2935 varpool_node *node = varpool_node::get (t);
2936 if (node && node->alias && node->analyzed)
2938 node = node->ultimate_alias_target ();
2939 /* Canonicalize the PT uid of all aliases to the ultimate target.
2940 ??? Hopefully the set of aliases can't change in a way that
2941 changes the ultimate alias target. */
2942 gcc_assert ((! DECL_PT_UID_SET_P (node->decl)
2943 || DECL_PT_UID (node->decl) == DECL_UID (node->decl))
2944 && (! DECL_PT_UID_SET_P (t)
2945 || DECL_PT_UID (t) == DECL_UID (node->decl)));
2946 DECL_PT_UID (t) = DECL_UID (node->decl);
2947 t = node->decl;
2951 vi = get_vi_for_tree (t);
2952 cexpr.var = vi->id;
2953 cexpr.type = SCALAR;
2954 cexpr.offset = 0;
2956 /* If we are not taking the address of the constraint expr, add all
2957 sub-fiels of the variable as well. */
2958 if (!address_p
2959 && !vi->is_full_var)
2961 for (; vi; vi = vi_next (vi))
2963 cexpr.var = vi->id;
2964 results->safe_push (cexpr);
2966 return;
2969 results->safe_push (cexpr);
2972 /* Process constraint T, performing various simplifications and then
2973 adding it to our list of overall constraints. */
2975 static void
2976 process_constraint (constraint_t t)
2978 struct constraint_expr rhs = t->rhs;
2979 struct constraint_expr lhs = t->lhs;
2981 gcc_assert (rhs.var < varmap.length ());
2982 gcc_assert (lhs.var < varmap.length ());
2984 /* If we didn't get any useful constraint from the lhs we get
2985 &ANYTHING as fallback from get_constraint_for. Deal with
2986 it here by turning it into *ANYTHING. */
2987 if (lhs.type == ADDRESSOF
2988 && lhs.var == anything_id)
2989 lhs.type = DEREF;
2991 /* ADDRESSOF on the lhs is invalid. */
2992 gcc_assert (lhs.type != ADDRESSOF);
2994 /* We shouldn't add constraints from things that cannot have pointers.
2995 It's not completely trivial to avoid in the callers, so do it here. */
2996 if (rhs.type != ADDRESSOF
2997 && !get_varinfo (rhs.var)->may_have_pointers)
2998 return;
3000 /* Likewise adding to the solution of a non-pointer var isn't useful. */
3001 if (!get_varinfo (lhs.var)->may_have_pointers)
3002 return;
3004 /* This can happen in our IR with things like n->a = *p */
3005 if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
3007 /* Split into tmp = *rhs, *lhs = tmp */
3008 struct constraint_expr tmplhs;
3009 tmplhs = new_scalar_tmp_constraint_exp ("doubledereftmp", true);
3010 process_constraint (new_constraint (tmplhs, rhs));
3011 process_constraint (new_constraint (lhs, tmplhs));
3013 else if ((rhs.type != SCALAR || rhs.offset != 0) && lhs.type == DEREF)
3015 /* Split into tmp = &rhs, *lhs = tmp */
3016 struct constraint_expr tmplhs;
3017 tmplhs = new_scalar_tmp_constraint_exp ("derefaddrtmp", true);
3018 process_constraint (new_constraint (tmplhs, rhs));
3019 process_constraint (new_constraint (lhs, tmplhs));
3021 else
3023 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
3024 constraints.safe_push (t);
3029 /* Return the position, in bits, of FIELD_DECL from the beginning of its
3030 structure. */
3032 static HOST_WIDE_INT
3033 bitpos_of_field (const tree fdecl)
3035 if (!tree_fits_shwi_p (DECL_FIELD_OFFSET (fdecl))
3036 || !tree_fits_shwi_p (DECL_FIELD_BIT_OFFSET (fdecl)))
3037 return -1;
3039 return (tree_to_shwi (DECL_FIELD_OFFSET (fdecl)) * BITS_PER_UNIT
3040 + tree_to_shwi (DECL_FIELD_BIT_OFFSET (fdecl)));
3044 /* Get constraint expressions for offsetting PTR by OFFSET. Stores the
3045 resulting constraint expressions in *RESULTS. */
3047 static void
3048 get_constraint_for_ptr_offset (tree ptr, tree offset,
3049 vec<ce_s> *results)
3051 struct constraint_expr c;
3052 unsigned int j, n;
3053 HOST_WIDE_INT rhsoffset;
3055 /* If we do not do field-sensitive PTA adding offsets to pointers
3056 does not change the points-to solution. */
3057 if (!use_field_sensitive)
3059 get_constraint_for_rhs (ptr, results);
3060 return;
3063 /* If the offset is not a non-negative integer constant that fits
3064 in a HOST_WIDE_INT, we have to fall back to a conservative
3065 solution which includes all sub-fields of all pointed-to
3066 variables of ptr. */
3067 if (offset == NULL_TREE
3068 || TREE_CODE (offset) != INTEGER_CST)
3069 rhsoffset = UNKNOWN_OFFSET;
3070 else
3072 /* Sign-extend the offset. */
3073 offset_int soffset = offset_int::from (offset, SIGNED);
3074 if (!wi::fits_shwi_p (soffset))
3075 rhsoffset = UNKNOWN_OFFSET;
3076 else
3078 /* Make sure the bit-offset also fits. */
3079 HOST_WIDE_INT rhsunitoffset = soffset.to_shwi ();
3080 rhsoffset = rhsunitoffset * BITS_PER_UNIT;
3081 if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
3082 rhsoffset = UNKNOWN_OFFSET;
3086 get_constraint_for_rhs (ptr, results);
3087 if (rhsoffset == 0)
3088 return;
3090 /* As we are eventually appending to the solution do not use
3091 vec::iterate here. */
3092 n = results->length ();
3093 for (j = 0; j < n; j++)
3095 varinfo_t curr;
3096 c = (*results)[j];
3097 curr = get_varinfo (c.var);
3099 if (c.type == ADDRESSOF
3100 /* If this varinfo represents a full variable just use it. */
3101 && curr->is_full_var)
3103 else if (c.type == ADDRESSOF
3104 /* If we do not know the offset add all subfields. */
3105 && rhsoffset == UNKNOWN_OFFSET)
3107 varinfo_t temp = get_varinfo (curr->head);
3110 struct constraint_expr c2;
3111 c2.var = temp->id;
3112 c2.type = ADDRESSOF;
3113 c2.offset = 0;
3114 if (c2.var != c.var)
3115 results->safe_push (c2);
3116 temp = vi_next (temp);
3118 while (temp);
3120 else if (c.type == ADDRESSOF)
3122 varinfo_t temp;
3123 unsigned HOST_WIDE_INT offset = curr->offset + rhsoffset;
3125 /* If curr->offset + rhsoffset is less than zero adjust it. */
3126 if (rhsoffset < 0
3127 && curr->offset < offset)
3128 offset = 0;
3130 /* We have to include all fields that overlap the current
3131 field shifted by rhsoffset. And we include at least
3132 the last or the first field of the variable to represent
3133 reachability of off-bound addresses, in particular &object + 1,
3134 conservatively correct. */
3135 temp = first_or_preceding_vi_for_offset (curr, offset);
3136 c.var = temp->id;
3137 c.offset = 0;
3138 temp = vi_next (temp);
3139 while (temp
3140 && temp->offset < offset + curr->size)
3142 struct constraint_expr c2;
3143 c2.var = temp->id;
3144 c2.type = ADDRESSOF;
3145 c2.offset = 0;
3146 results->safe_push (c2);
3147 temp = vi_next (temp);
3150 else if (c.type == SCALAR)
3152 gcc_assert (c.offset == 0);
3153 c.offset = rhsoffset;
3155 else
3156 /* We shouldn't get any DEREFs here. */
3157 gcc_unreachable ();
3159 (*results)[j] = c;
3164 /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
3165 If address_p is true the result will be taken its address of.
3166 If lhs_p is true then the constraint expression is assumed to be used
3167 as the lhs. */
3169 static void
3170 get_constraint_for_component_ref (tree t, vec<ce_s> *results,
3171 bool address_p, bool lhs_p)
3173 tree orig_t = t;
3174 HOST_WIDE_INT bitsize = -1;
3175 HOST_WIDE_INT bitmaxsize = -1;
3176 HOST_WIDE_INT bitpos;
3177 bool reverse;
3178 tree forzero;
3180 /* Some people like to do cute things like take the address of
3181 &0->a.b */
3182 forzero = t;
3183 while (handled_component_p (forzero)
3184 || INDIRECT_REF_P (forzero)
3185 || TREE_CODE (forzero) == MEM_REF)
3186 forzero = TREE_OPERAND (forzero, 0);
3188 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
3190 struct constraint_expr temp;
3192 temp.offset = 0;
3193 temp.var = integer_id;
3194 temp.type = SCALAR;
3195 results->safe_push (temp);
3196 return;
3199 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize, &reverse);
3201 /* We can end up here for component references on a
3202 VIEW_CONVERT_EXPR <>(&foobar) or things like a
3203 BIT_FIELD_REF <&MEM[(void *)&b + 4B], ...>. So for
3204 symbolic constants simply give up. */
3205 if (TREE_CODE (t) == ADDR_EXPR)
3207 constraint_expr result;
3208 result.type = SCALAR;
3209 result.var = anything_id;
3210 result.offset = 0;
3211 results->safe_push (result);
3212 return;
3215 /* Pretend to take the address of the base, we'll take care of
3216 adding the required subset of sub-fields below. */
3217 get_constraint_for_1 (t, results, true, lhs_p);
3218 gcc_assert (results->length () == 1);
3219 struct constraint_expr &result = results->last ();
3221 if (result.type == SCALAR
3222 && get_varinfo (result.var)->is_full_var)
3223 /* For single-field vars do not bother about the offset. */
3224 result.offset = 0;
3225 else if (result.type == SCALAR)
3227 /* In languages like C, you can access one past the end of an
3228 array. You aren't allowed to dereference it, so we can
3229 ignore this constraint. When we handle pointer subtraction,
3230 we may have to do something cute here. */
3232 if ((unsigned HOST_WIDE_INT)bitpos < get_varinfo (result.var)->fullsize
3233 && bitmaxsize != 0)
3235 /* It's also not true that the constraint will actually start at the
3236 right offset, it may start in some padding. We only care about
3237 setting the constraint to the first actual field it touches, so
3238 walk to find it. */
3239 struct constraint_expr cexpr = result;
3240 varinfo_t curr;
3241 results->pop ();
3242 cexpr.offset = 0;
3243 for (curr = get_varinfo (cexpr.var); curr; curr = vi_next (curr))
3245 if (ranges_overlap_p (curr->offset, curr->size,
3246 bitpos, bitmaxsize))
3248 cexpr.var = curr->id;
3249 results->safe_push (cexpr);
3250 if (address_p)
3251 break;
3254 /* If we are going to take the address of this field then
3255 to be able to compute reachability correctly add at least
3256 the last field of the variable. */
3257 if (address_p && results->length () == 0)
3259 curr = get_varinfo (cexpr.var);
3260 while (curr->next != 0)
3261 curr = vi_next (curr);
3262 cexpr.var = curr->id;
3263 results->safe_push (cexpr);
3265 else if (results->length () == 0)
3266 /* Assert that we found *some* field there. The user couldn't be
3267 accessing *only* padding. */
3268 /* Still the user could access one past the end of an array
3269 embedded in a struct resulting in accessing *only* padding. */
3270 /* Or accessing only padding via type-punning to a type
3271 that has a filed just in padding space. */
3273 cexpr.type = SCALAR;
3274 cexpr.var = anything_id;
3275 cexpr.offset = 0;
3276 results->safe_push (cexpr);
3279 else if (bitmaxsize == 0)
3281 if (dump_file && (dump_flags & TDF_DETAILS))
3282 fprintf (dump_file, "Access to zero-sized part of variable,"
3283 "ignoring\n");
3285 else
3286 if (dump_file && (dump_flags & TDF_DETAILS))
3287 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
3289 else if (result.type == DEREF)
3291 /* If we do not know exactly where the access goes say so. Note
3292 that only for non-structure accesses we know that we access
3293 at most one subfiled of any variable. */
3294 if (bitpos == -1
3295 || bitsize != bitmaxsize
3296 || AGGREGATE_TYPE_P (TREE_TYPE (orig_t))
3297 || result.offset == UNKNOWN_OFFSET)
3298 result.offset = UNKNOWN_OFFSET;
3299 else
3300 result.offset += bitpos;
3302 else if (result.type == ADDRESSOF)
3304 /* We can end up here for component references on constants like
3305 VIEW_CONVERT_EXPR <>({ 0, 1, 2, 3 })[i]. */
3306 result.type = SCALAR;
3307 result.var = anything_id;
3308 result.offset = 0;
3310 else
3311 gcc_unreachable ();
3315 /* Dereference the constraint expression CONS, and return the result.
3316 DEREF (ADDRESSOF) = SCALAR
3317 DEREF (SCALAR) = DEREF
3318 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
3319 This is needed so that we can handle dereferencing DEREF constraints. */
3321 static void
3322 do_deref (vec<ce_s> *constraints)
3324 struct constraint_expr *c;
3325 unsigned int i = 0;
3327 FOR_EACH_VEC_ELT (*constraints, i, c)
3329 if (c->type == SCALAR)
3330 c->type = DEREF;
3331 else if (c->type == ADDRESSOF)
3332 c->type = SCALAR;
3333 else if (c->type == DEREF)
3335 struct constraint_expr tmplhs;
3336 tmplhs = new_scalar_tmp_constraint_exp ("dereftmp", true);
3337 process_constraint (new_constraint (tmplhs, *c));
3338 c->var = tmplhs.var;
3340 else
3341 gcc_unreachable ();
3345 /* Given a tree T, return the constraint expression for taking the
3346 address of it. */
3348 static void
3349 get_constraint_for_address_of (tree t, vec<ce_s> *results)
3351 struct constraint_expr *c;
3352 unsigned int i;
3354 get_constraint_for_1 (t, results, true, true);
3356 FOR_EACH_VEC_ELT (*results, i, c)
3358 if (c->type == DEREF)
3359 c->type = SCALAR;
3360 else
3361 c->type = ADDRESSOF;
3365 /* Given a tree T, return the constraint expression for it. */
3367 static void
3368 get_constraint_for_1 (tree t, vec<ce_s> *results, bool address_p,
3369 bool lhs_p)
3371 struct constraint_expr temp;
3373 /* x = integer is all glommed to a single variable, which doesn't
3374 point to anything by itself. That is, of course, unless it is an
3375 integer constant being treated as a pointer, in which case, we
3376 will return that this is really the addressof anything. This
3377 happens below, since it will fall into the default case. The only
3378 case we know something about an integer treated like a pointer is
3379 when it is the NULL pointer, and then we just say it points to
3380 NULL.
3382 Do not do that if -fno-delete-null-pointer-checks though, because
3383 in that case *NULL does not fail, so it _should_ alias *anything.
3384 It is not worth adding a new option or renaming the existing one,
3385 since this case is relatively obscure. */
3386 if ((TREE_CODE (t) == INTEGER_CST
3387 && integer_zerop (t))
3388 /* The only valid CONSTRUCTORs in gimple with pointer typed
3389 elements are zero-initializer. But in IPA mode we also
3390 process global initializers, so verify at least. */
3391 || (TREE_CODE (t) == CONSTRUCTOR
3392 && CONSTRUCTOR_NELTS (t) == 0))
3394 if (flag_delete_null_pointer_checks)
3395 temp.var = nothing_id;
3396 else
3397 temp.var = nonlocal_id;
3398 temp.type = ADDRESSOF;
3399 temp.offset = 0;
3400 results->safe_push (temp);
3401 return;
3404 /* String constants are read-only, ideally we'd have a CONST_DECL
3405 for those. */
3406 if (TREE_CODE (t) == STRING_CST)
3408 temp.var = string_id;
3409 temp.type = SCALAR;
3410 temp.offset = 0;
3411 results->safe_push (temp);
3412 return;
3415 switch (TREE_CODE_CLASS (TREE_CODE (t)))
3417 case tcc_expression:
3419 switch (TREE_CODE (t))
3421 case ADDR_EXPR:
3422 get_constraint_for_address_of (TREE_OPERAND (t, 0), results);
3423 return;
3424 default:;
3426 break;
3428 case tcc_reference:
3430 switch (TREE_CODE (t))
3432 case MEM_REF:
3434 struct constraint_expr cs;
3435 varinfo_t vi, curr;
3436 get_constraint_for_ptr_offset (TREE_OPERAND (t, 0),
3437 TREE_OPERAND (t, 1), results);
3438 do_deref (results);
3440 /* If we are not taking the address then make sure to process
3441 all subvariables we might access. */
3442 if (address_p)
3443 return;
3445 cs = results->last ();
3446 if (cs.type == DEREF
3447 && type_can_have_subvars (TREE_TYPE (t)))
3449 /* For dereferences this means we have to defer it
3450 to solving time. */
3451 results->last ().offset = UNKNOWN_OFFSET;
3452 return;
3454 if (cs.type != SCALAR)
3455 return;
3457 vi = get_varinfo (cs.var);
3458 curr = vi_next (vi);
3459 if (!vi->is_full_var
3460 && curr)
3462 unsigned HOST_WIDE_INT size;
3463 if (tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (t))))
3464 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t)));
3465 else
3466 size = -1;
3467 for (; curr; curr = vi_next (curr))
3469 if (curr->offset - vi->offset < size)
3471 cs.var = curr->id;
3472 results->safe_push (cs);
3474 else
3475 break;
3478 return;
3480 case ARRAY_REF:
3481 case ARRAY_RANGE_REF:
3482 case COMPONENT_REF:
3483 case IMAGPART_EXPR:
3484 case REALPART_EXPR:
3485 case BIT_FIELD_REF:
3486 get_constraint_for_component_ref (t, results, address_p, lhs_p);
3487 return;
3488 case VIEW_CONVERT_EXPR:
3489 get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p,
3490 lhs_p);
3491 return;
3492 /* We are missing handling for TARGET_MEM_REF here. */
3493 default:;
3495 break;
3497 case tcc_exceptional:
3499 switch (TREE_CODE (t))
3501 case SSA_NAME:
3503 get_constraint_for_ssa_var (t, results, address_p);
3504 return;
3506 case CONSTRUCTOR:
3508 unsigned int i;
3509 tree val;
3510 auto_vec<ce_s> tmp;
3511 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
3513 struct constraint_expr *rhsp;
3514 unsigned j;
3515 get_constraint_for_1 (val, &tmp, address_p, lhs_p);
3516 FOR_EACH_VEC_ELT (tmp, j, rhsp)
3517 results->safe_push (*rhsp);
3518 tmp.truncate (0);
3520 /* We do not know whether the constructor was complete,
3521 so technically we have to add &NOTHING or &ANYTHING
3522 like we do for an empty constructor as well. */
3523 return;
3525 default:;
3527 break;
3529 case tcc_declaration:
3531 get_constraint_for_ssa_var (t, results, address_p);
3532 return;
3534 case tcc_constant:
3536 /* We cannot refer to automatic variables through constants. */
3537 temp.type = ADDRESSOF;
3538 temp.var = nonlocal_id;
3539 temp.offset = 0;
3540 results->safe_push (temp);
3541 return;
3543 default:;
3546 /* The default fallback is a constraint from anything. */
3547 temp.type = ADDRESSOF;
3548 temp.var = anything_id;
3549 temp.offset = 0;
3550 results->safe_push (temp);
3553 /* Given a gimple tree T, return the constraint expression vector for it. */
3555 static void
3556 get_constraint_for (tree t, vec<ce_s> *results)
3558 gcc_assert (results->length () == 0);
3560 get_constraint_for_1 (t, results, false, true);
3563 /* Given a gimple tree T, return the constraint expression vector for it
3564 to be used as the rhs of a constraint. */
3566 static void
3567 get_constraint_for_rhs (tree t, vec<ce_s> *results)
3569 gcc_assert (results->length () == 0);
3571 get_constraint_for_1 (t, results, false, false);
3575 /* Efficiently generates constraints from all entries in *RHSC to all
3576 entries in *LHSC. */
3578 static void
3579 process_all_all_constraints (vec<ce_s> lhsc,
3580 vec<ce_s> rhsc)
3582 struct constraint_expr *lhsp, *rhsp;
3583 unsigned i, j;
3585 if (lhsc.length () <= 1 || rhsc.length () <= 1)
3587 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3588 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
3589 process_constraint (new_constraint (*lhsp, *rhsp));
3591 else
3593 struct constraint_expr tmp;
3594 tmp = new_scalar_tmp_constraint_exp ("allalltmp", true);
3595 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
3596 process_constraint (new_constraint (tmp, *rhsp));
3597 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3598 process_constraint (new_constraint (*lhsp, tmp));
3602 /* Handle aggregate copies by expanding into copies of the respective
3603 fields of the structures. */
3605 static void
3606 do_structure_copy (tree lhsop, tree rhsop)
3608 struct constraint_expr *lhsp, *rhsp;
3609 auto_vec<ce_s> lhsc;
3610 auto_vec<ce_s> rhsc;
3611 unsigned j;
3613 get_constraint_for (lhsop, &lhsc);
3614 get_constraint_for_rhs (rhsop, &rhsc);
3615 lhsp = &lhsc[0];
3616 rhsp = &rhsc[0];
3617 if (lhsp->type == DEREF
3618 || (lhsp->type == ADDRESSOF && lhsp->var == anything_id)
3619 || rhsp->type == DEREF)
3621 if (lhsp->type == DEREF)
3623 gcc_assert (lhsc.length () == 1);
3624 lhsp->offset = UNKNOWN_OFFSET;
3626 if (rhsp->type == DEREF)
3628 gcc_assert (rhsc.length () == 1);
3629 rhsp->offset = UNKNOWN_OFFSET;
3631 process_all_all_constraints (lhsc, rhsc);
3633 else if (lhsp->type == SCALAR
3634 && (rhsp->type == SCALAR
3635 || rhsp->type == ADDRESSOF))
3637 HOST_WIDE_INT lhssize, lhsmaxsize, lhsoffset;
3638 HOST_WIDE_INT rhssize, rhsmaxsize, rhsoffset;
3639 bool reverse;
3640 unsigned k = 0;
3641 get_ref_base_and_extent (lhsop, &lhsoffset, &lhssize, &lhsmaxsize,
3642 &reverse);
3643 get_ref_base_and_extent (rhsop, &rhsoffset, &rhssize, &rhsmaxsize,
3644 &reverse);
3645 for (j = 0; lhsc.iterate (j, &lhsp);)
3647 varinfo_t lhsv, rhsv;
3648 rhsp = &rhsc[k];
3649 lhsv = get_varinfo (lhsp->var);
3650 rhsv = get_varinfo (rhsp->var);
3651 if (lhsv->may_have_pointers
3652 && (lhsv->is_full_var
3653 || rhsv->is_full_var
3654 || ranges_overlap_p (lhsv->offset + rhsoffset, lhsv->size,
3655 rhsv->offset + lhsoffset, rhsv->size)))
3656 process_constraint (new_constraint (*lhsp, *rhsp));
3657 if (!rhsv->is_full_var
3658 && (lhsv->is_full_var
3659 || (lhsv->offset + rhsoffset + lhsv->size
3660 > rhsv->offset + lhsoffset + rhsv->size)))
3662 ++k;
3663 if (k >= rhsc.length ())
3664 break;
3666 else
3667 ++j;
3670 else
3671 gcc_unreachable ();
3674 /* Create constraints ID = { rhsc }. */
3676 static void
3677 make_constraints_to (unsigned id, vec<ce_s> rhsc)
3679 struct constraint_expr *c;
3680 struct constraint_expr includes;
3681 unsigned int j;
3683 includes.var = id;
3684 includes.offset = 0;
3685 includes.type = SCALAR;
3687 FOR_EACH_VEC_ELT (rhsc, j, c)
3688 process_constraint (new_constraint (includes, *c));
3691 /* Create a constraint ID = OP. */
3693 static void
3694 make_constraint_to (unsigned id, tree op)
3696 auto_vec<ce_s> rhsc;
3697 get_constraint_for_rhs (op, &rhsc);
3698 make_constraints_to (id, rhsc);
3701 /* Create a constraint ID = &FROM. */
3703 static void
3704 make_constraint_from (varinfo_t vi, int from)
3706 struct constraint_expr lhs, rhs;
3708 lhs.var = vi->id;
3709 lhs.offset = 0;
3710 lhs.type = SCALAR;
3712 rhs.var = from;
3713 rhs.offset = 0;
3714 rhs.type = ADDRESSOF;
3715 process_constraint (new_constraint (lhs, rhs));
3718 /* Create a constraint ID = FROM. */
3720 static void
3721 make_copy_constraint (varinfo_t vi, int from)
3723 struct constraint_expr lhs, rhs;
3725 lhs.var = vi->id;
3726 lhs.offset = 0;
3727 lhs.type = SCALAR;
3729 rhs.var = from;
3730 rhs.offset = 0;
3731 rhs.type = SCALAR;
3732 process_constraint (new_constraint (lhs, rhs));
3735 /* Make constraints necessary to make OP escape. */
3737 static void
3738 make_escape_constraint (tree op)
3740 make_constraint_to (escaped_id, op);
3743 /* Add constraints to that the solution of VI is transitively closed. */
3745 static void
3746 make_transitive_closure_constraints (varinfo_t vi)
3748 struct constraint_expr lhs, rhs;
3750 /* VAR = *(VAR + UNKNOWN); */
3751 lhs.type = SCALAR;
3752 lhs.var = vi->id;
3753 lhs.offset = 0;
3754 rhs.type = DEREF;
3755 rhs.var = vi->id;
3756 rhs.offset = UNKNOWN_OFFSET;
3757 process_constraint (new_constraint (lhs, rhs));
3760 /* Add constraints to that the solution of VI has all subvariables added. */
3762 static void
3763 make_any_offset_constraints (varinfo_t vi)
3765 struct constraint_expr lhs, rhs;
3767 /* VAR = VAR + UNKNOWN; */
3768 lhs.type = SCALAR;
3769 lhs.var = vi->id;
3770 lhs.offset = 0;
3771 rhs.type = SCALAR;
3772 rhs.var = vi->id;
3773 rhs.offset = UNKNOWN_OFFSET;
3774 process_constraint (new_constraint (lhs, rhs));
3777 /* Temporary storage for fake var decls. */
3778 struct obstack fake_var_decl_obstack;
3780 /* Build a fake VAR_DECL acting as referrer to a DECL_UID. */
3782 static tree
3783 build_fake_var_decl (tree type)
3785 tree decl = (tree) XOBNEW (&fake_var_decl_obstack, struct tree_var_decl);
3786 memset (decl, 0, sizeof (struct tree_var_decl));
3787 TREE_SET_CODE (decl, VAR_DECL);
3788 TREE_TYPE (decl) = type;
3789 DECL_UID (decl) = allocate_decl_uid ();
3790 SET_DECL_PT_UID (decl, -1);
3791 layout_decl (decl, 0);
3792 return decl;
3795 /* Create a new artificial heap variable with NAME.
3796 Return the created variable. */
3798 static varinfo_t
3799 make_heapvar (const char *name, bool add_id)
3801 varinfo_t vi;
3802 tree heapvar;
3804 heapvar = build_fake_var_decl (ptr_type_node);
3805 DECL_EXTERNAL (heapvar) = 1;
3807 vi = new_var_info (heapvar, name, add_id);
3808 vi->is_artificial_var = true;
3809 vi->is_heap_var = true;
3810 vi->is_unknown_size_var = true;
3811 vi->offset = 0;
3812 vi->fullsize = ~0;
3813 vi->size = ~0;
3814 vi->is_full_var = true;
3815 insert_vi_for_tree (heapvar, vi);
3817 return vi;
3820 /* Create a new artificial heap variable with NAME and make a
3821 constraint from it to LHS. Set flags according to a tag used
3822 for tracking restrict pointers. */
3824 static varinfo_t
3825 make_constraint_from_restrict (varinfo_t lhs, const char *name, bool add_id)
3827 varinfo_t vi = make_heapvar (name, add_id);
3828 vi->is_restrict_var = 1;
3829 vi->is_global_var = 1;
3830 vi->may_have_pointers = 1;
3831 make_constraint_from (lhs, vi->id);
3832 return vi;
3835 /* Create a new artificial heap variable with NAME and make a
3836 constraint from it to LHS. Set flags according to a tag used
3837 for tracking restrict pointers and make the artificial heap
3838 point to global memory. */
3840 static varinfo_t
3841 make_constraint_from_global_restrict (varinfo_t lhs, const char *name,
3842 bool add_id)
3844 varinfo_t vi = make_constraint_from_restrict (lhs, name, add_id);
3845 make_copy_constraint (vi, nonlocal_id);
3846 return vi;
3849 /* In IPA mode there are varinfos for different aspects of reach
3850 function designator. One for the points-to set of the return
3851 value, one for the variables that are clobbered by the function,
3852 one for its uses and one for each parameter (including a single
3853 glob for remaining variadic arguments). */
3855 enum { fi_clobbers = 1, fi_uses = 2,
3856 fi_static_chain = 3, fi_result = 4, fi_parm_base = 5 };
3858 /* Get a constraint for the requested part of a function designator FI
3859 when operating in IPA mode. */
3861 static struct constraint_expr
3862 get_function_part_constraint (varinfo_t fi, unsigned part)
3864 struct constraint_expr c;
3866 gcc_assert (in_ipa_mode);
3868 if (fi->id == anything_id)
3870 /* ??? We probably should have a ANYFN special variable. */
3871 c.var = anything_id;
3872 c.offset = 0;
3873 c.type = SCALAR;
3875 else if (TREE_CODE (fi->decl) == FUNCTION_DECL)
3877 varinfo_t ai = first_vi_for_offset (fi, part);
3878 if (ai)
3879 c.var = ai->id;
3880 else
3881 c.var = anything_id;
3882 c.offset = 0;
3883 c.type = SCALAR;
3885 else
3887 c.var = fi->id;
3888 c.offset = part;
3889 c.type = DEREF;
3892 return c;
3895 /* For non-IPA mode, generate constraints necessary for a call on the
3896 RHS. */
3898 static void
3899 handle_rhs_call (gcall *stmt, vec<ce_s> *results)
3901 struct constraint_expr rhsc;
3902 unsigned i;
3903 bool returns_uses = false;
3905 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3907 tree arg = gimple_call_arg (stmt, i);
3908 int flags = gimple_call_arg_flags (stmt, i);
3910 /* If the argument is not used we can ignore it. */
3911 if (flags & EAF_UNUSED)
3912 continue;
3914 /* As we compute ESCAPED context-insensitive we do not gain
3915 any precision with just EAF_NOCLOBBER but not EAF_NOESCAPE
3916 set. The argument would still get clobbered through the
3917 escape solution. */
3918 if ((flags & EAF_NOCLOBBER)
3919 && (flags & EAF_NOESCAPE))
3921 varinfo_t uses = get_call_use_vi (stmt);
3922 varinfo_t tem = new_var_info (NULL_TREE, "callarg", true);
3923 make_constraint_to (tem->id, arg);
3924 make_any_offset_constraints (tem);
3925 if (!(flags & EAF_DIRECT))
3926 make_transitive_closure_constraints (tem);
3927 make_copy_constraint (uses, tem->id);
3928 returns_uses = true;
3930 else if (flags & EAF_NOESCAPE)
3932 struct constraint_expr lhs, rhs;
3933 varinfo_t uses = get_call_use_vi (stmt);
3934 varinfo_t clobbers = get_call_clobber_vi (stmt);
3935 varinfo_t tem = new_var_info (NULL_TREE, "callarg", true);
3936 make_constraint_to (tem->id, arg);
3937 make_any_offset_constraints (tem);
3938 if (!(flags & EAF_DIRECT))
3939 make_transitive_closure_constraints (tem);
3940 make_copy_constraint (uses, tem->id);
3941 make_copy_constraint (clobbers, tem->id);
3942 /* Add *tem = nonlocal, do not add *tem = callused as
3943 EAF_NOESCAPE parameters do not escape to other parameters
3944 and all other uses appear in NONLOCAL as well. */
3945 lhs.type = DEREF;
3946 lhs.var = tem->id;
3947 lhs.offset = 0;
3948 rhs.type = SCALAR;
3949 rhs.var = nonlocal_id;
3950 rhs.offset = 0;
3951 process_constraint (new_constraint (lhs, rhs));
3952 returns_uses = true;
3954 else
3955 make_escape_constraint (arg);
3958 /* If we added to the calls uses solution make sure we account for
3959 pointers to it to be returned. */
3960 if (returns_uses)
3962 rhsc.var = get_call_use_vi (stmt)->id;
3963 rhsc.offset = UNKNOWN_OFFSET;
3964 rhsc.type = SCALAR;
3965 results->safe_push (rhsc);
3968 /* The static chain escapes as well. */
3969 if (gimple_call_chain (stmt))
3970 make_escape_constraint (gimple_call_chain (stmt));
3972 /* And if we applied NRV the address of the return slot escapes as well. */
3973 if (gimple_call_return_slot_opt_p (stmt)
3974 && gimple_call_lhs (stmt) != NULL_TREE
3975 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
3977 auto_vec<ce_s> tmpc;
3978 struct constraint_expr lhsc, *c;
3979 get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
3980 lhsc.var = escaped_id;
3981 lhsc.offset = 0;
3982 lhsc.type = SCALAR;
3983 FOR_EACH_VEC_ELT (tmpc, i, c)
3984 process_constraint (new_constraint (lhsc, *c));
3987 /* Regular functions return nonlocal memory. */
3988 rhsc.var = nonlocal_id;
3989 rhsc.offset = 0;
3990 rhsc.type = SCALAR;
3991 results->safe_push (rhsc);
3994 /* For non-IPA mode, generate constraints necessary for a call
3995 that returns a pointer and assigns it to LHS. This simply makes
3996 the LHS point to global and escaped variables. */
3998 static void
3999 handle_lhs_call (gcall *stmt, tree lhs, int flags, vec<ce_s> rhsc,
4000 tree fndecl)
4002 auto_vec<ce_s> lhsc;
4004 get_constraint_for (lhs, &lhsc);
4005 /* If the store is to a global decl make sure to
4006 add proper escape constraints. */
4007 lhs = get_base_address (lhs);
4008 if (lhs
4009 && DECL_P (lhs)
4010 && is_global_var (lhs))
4012 struct constraint_expr tmpc;
4013 tmpc.var = escaped_id;
4014 tmpc.offset = 0;
4015 tmpc.type = SCALAR;
4016 lhsc.safe_push (tmpc);
4019 /* If the call returns an argument unmodified override the rhs
4020 constraints. */
4021 if (flags & ERF_RETURNS_ARG
4022 && (flags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (stmt))
4024 tree arg;
4025 rhsc.create (0);
4026 arg = gimple_call_arg (stmt, flags & ERF_RETURN_ARG_MASK);
4027 get_constraint_for (arg, &rhsc);
4028 process_all_all_constraints (lhsc, rhsc);
4029 rhsc.release ();
4031 else if (flags & ERF_NOALIAS)
4033 varinfo_t vi;
4034 struct constraint_expr tmpc;
4035 rhsc.create (0);
4036 vi = make_heapvar ("HEAP", true);
4037 /* We are marking allocated storage local, we deal with it becoming
4038 global by escaping and setting of vars_contains_escaped_heap. */
4039 DECL_EXTERNAL (vi->decl) = 0;
4040 vi->is_global_var = 0;
4041 /* If this is not a real malloc call assume the memory was
4042 initialized and thus may point to global memory. All
4043 builtin functions with the malloc attribute behave in a sane way. */
4044 if (!fndecl
4045 || DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL)
4046 make_constraint_from (vi, nonlocal_id);
4047 tmpc.var = vi->id;
4048 tmpc.offset = 0;
4049 tmpc.type = ADDRESSOF;
4050 rhsc.safe_push (tmpc);
4051 process_all_all_constraints (lhsc, rhsc);
4052 rhsc.release ();
4054 else
4055 process_all_all_constraints (lhsc, rhsc);
4058 /* For non-IPA mode, generate constraints necessary for a call of a
4059 const function that returns a pointer in the statement STMT. */
4061 static void
4062 handle_const_call (gcall *stmt, vec<ce_s> *results)
4064 struct constraint_expr rhsc;
4065 unsigned int k;
4067 /* Treat nested const functions the same as pure functions as far
4068 as the static chain is concerned. */
4069 if (gimple_call_chain (stmt))
4071 varinfo_t uses = get_call_use_vi (stmt);
4072 make_any_offset_constraints (uses);
4073 make_transitive_closure_constraints (uses);
4074 make_constraint_to (uses->id, gimple_call_chain (stmt));
4075 rhsc.var = uses->id;
4076 rhsc.offset = 0;
4077 rhsc.type = SCALAR;
4078 results->safe_push (rhsc);
4081 /* May return offsetted arguments. */
4082 varinfo_t tem = NULL;
4083 if (gimple_call_num_args (stmt) != 0)
4084 tem = new_var_info (NULL_TREE, "callarg", true);
4085 for (k = 0; k < gimple_call_num_args (stmt); ++k)
4087 tree arg = gimple_call_arg (stmt, k);
4088 auto_vec<ce_s> argc;
4089 get_constraint_for_rhs (arg, &argc);
4090 make_constraints_to (tem->id, argc);
4092 if (tem)
4094 ce_s ce;
4095 ce.type = SCALAR;
4096 ce.var = tem->id;
4097 ce.offset = UNKNOWN_OFFSET;
4098 results->safe_push (ce);
4101 /* May return addresses of globals. */
4102 rhsc.var = nonlocal_id;
4103 rhsc.offset = 0;
4104 rhsc.type = ADDRESSOF;
4105 results->safe_push (rhsc);
4108 /* For non-IPA mode, generate constraints necessary for a call to a
4109 pure function in statement STMT. */
4111 static void
4112 handle_pure_call (gcall *stmt, vec<ce_s> *results)
4114 struct constraint_expr rhsc;
4115 unsigned i;
4116 varinfo_t uses = NULL;
4118 /* Memory reached from pointer arguments is call-used. */
4119 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4121 tree arg = gimple_call_arg (stmt, i);
4122 if (!uses)
4124 uses = get_call_use_vi (stmt);
4125 make_any_offset_constraints (uses);
4126 make_transitive_closure_constraints (uses);
4128 make_constraint_to (uses->id, arg);
4131 /* The static chain is used as well. */
4132 if (gimple_call_chain (stmt))
4134 if (!uses)
4136 uses = get_call_use_vi (stmt);
4137 make_any_offset_constraints (uses);
4138 make_transitive_closure_constraints (uses);
4140 make_constraint_to (uses->id, gimple_call_chain (stmt));
4143 /* Pure functions may return call-used and nonlocal memory. */
4144 if (uses)
4146 rhsc.var = uses->id;
4147 rhsc.offset = 0;
4148 rhsc.type = SCALAR;
4149 results->safe_push (rhsc);
4151 rhsc.var = nonlocal_id;
4152 rhsc.offset = 0;
4153 rhsc.type = SCALAR;
4154 results->safe_push (rhsc);
4158 /* Return the varinfo for the callee of CALL. */
4160 static varinfo_t
4161 get_fi_for_callee (gcall *call)
4163 tree decl, fn = gimple_call_fn (call);
4165 if (fn && TREE_CODE (fn) == OBJ_TYPE_REF)
4166 fn = OBJ_TYPE_REF_EXPR (fn);
4168 /* If we can directly resolve the function being called, do so.
4169 Otherwise, it must be some sort of indirect expression that
4170 we should still be able to handle. */
4171 decl = gimple_call_addr_fndecl (fn);
4172 if (decl)
4173 return get_vi_for_tree (decl);
4175 /* If the function is anything other than a SSA name pointer we have no
4176 clue and should be getting ANYFN (well, ANYTHING for now). */
4177 if (!fn || TREE_CODE (fn) != SSA_NAME)
4178 return get_varinfo (anything_id);
4180 if (SSA_NAME_IS_DEFAULT_DEF (fn)
4181 && (TREE_CODE (SSA_NAME_VAR (fn)) == PARM_DECL
4182 || TREE_CODE (SSA_NAME_VAR (fn)) == RESULT_DECL))
4183 fn = SSA_NAME_VAR (fn);
4185 return get_vi_for_tree (fn);
4188 /* Create constraints for assigning call argument ARG to the incoming parameter
4189 INDEX of function FI. */
4191 static void
4192 find_func_aliases_for_call_arg (varinfo_t fi, unsigned index, tree arg)
4194 struct constraint_expr lhs;
4195 lhs = get_function_part_constraint (fi, fi_parm_base + index);
4197 auto_vec<ce_s, 2> rhsc;
4198 get_constraint_for_rhs (arg, &rhsc);
4200 unsigned j;
4201 struct constraint_expr *rhsp;
4202 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4203 process_constraint (new_constraint (lhs, *rhsp));
4206 /* Return true if FNDECL may be part of another lto partition. */
4208 static bool
4209 fndecl_maybe_in_other_partition (tree fndecl)
4211 cgraph_node *fn_node = cgraph_node::get (fndecl);
4212 if (fn_node == NULL)
4213 return true;
4215 return fn_node->in_other_partition;
4218 /* Create constraints for the builtin call T. Return true if the call
4219 was handled, otherwise false. */
4221 static bool
4222 find_func_aliases_for_builtin_call (struct function *fn, gcall *t)
4224 tree fndecl = gimple_call_fndecl (t);
4225 auto_vec<ce_s, 2> lhsc;
4226 auto_vec<ce_s, 4> rhsc;
4227 varinfo_t fi;
4229 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
4230 /* ??? All builtins that are handled here need to be handled
4231 in the alias-oracle query functions explicitly! */
4232 switch (DECL_FUNCTION_CODE (fndecl))
4234 /* All the following functions return a pointer to the same object
4235 as their first argument points to. The functions do not add
4236 to the ESCAPED solution. The functions make the first argument
4237 pointed to memory point to what the second argument pointed to
4238 memory points to. */
4239 case BUILT_IN_STRCPY:
4240 case BUILT_IN_STRNCPY:
4241 case BUILT_IN_BCOPY:
4242 case BUILT_IN_MEMCPY:
4243 case BUILT_IN_MEMMOVE:
4244 case BUILT_IN_MEMPCPY:
4245 case BUILT_IN_STPCPY:
4246 case BUILT_IN_STPNCPY:
4247 case BUILT_IN_STRCAT:
4248 case BUILT_IN_STRNCAT:
4249 case BUILT_IN_STRCPY_CHK:
4250 case BUILT_IN_STRNCPY_CHK:
4251 case BUILT_IN_MEMCPY_CHK:
4252 case BUILT_IN_MEMMOVE_CHK:
4253 case BUILT_IN_MEMPCPY_CHK:
4254 case BUILT_IN_STPCPY_CHK:
4255 case BUILT_IN_STPNCPY_CHK:
4256 case BUILT_IN_STRCAT_CHK:
4257 case BUILT_IN_STRNCAT_CHK:
4258 case BUILT_IN_TM_MEMCPY:
4259 case BUILT_IN_TM_MEMMOVE:
4261 tree res = gimple_call_lhs (t);
4262 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4263 == BUILT_IN_BCOPY ? 1 : 0));
4264 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4265 == BUILT_IN_BCOPY ? 0 : 1));
4266 if (res != NULL_TREE)
4268 get_constraint_for (res, &lhsc);
4269 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY
4270 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY
4271 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY
4272 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY_CHK
4273 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY_CHK
4274 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY_CHK)
4275 get_constraint_for_ptr_offset (dest, NULL_TREE, &rhsc);
4276 else
4277 get_constraint_for (dest, &rhsc);
4278 process_all_all_constraints (lhsc, rhsc);
4279 lhsc.truncate (0);
4280 rhsc.truncate (0);
4282 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4283 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4284 do_deref (&lhsc);
4285 do_deref (&rhsc);
4286 process_all_all_constraints (lhsc, rhsc);
4287 return true;
4289 case BUILT_IN_MEMSET:
4290 case BUILT_IN_MEMSET_CHK:
4291 case BUILT_IN_TM_MEMSET:
4293 tree res = gimple_call_lhs (t);
4294 tree dest = gimple_call_arg (t, 0);
4295 unsigned i;
4296 ce_s *lhsp;
4297 struct constraint_expr ac;
4298 if (res != NULL_TREE)
4300 get_constraint_for (res, &lhsc);
4301 get_constraint_for (dest, &rhsc);
4302 process_all_all_constraints (lhsc, rhsc);
4303 lhsc.truncate (0);
4305 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4306 do_deref (&lhsc);
4307 if (flag_delete_null_pointer_checks
4308 && integer_zerop (gimple_call_arg (t, 1)))
4310 ac.type = ADDRESSOF;
4311 ac.var = nothing_id;
4313 else
4315 ac.type = SCALAR;
4316 ac.var = integer_id;
4318 ac.offset = 0;
4319 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4320 process_constraint (new_constraint (*lhsp, ac));
4321 return true;
4323 case BUILT_IN_POSIX_MEMALIGN:
4325 tree ptrptr = gimple_call_arg (t, 0);
4326 get_constraint_for (ptrptr, &lhsc);
4327 do_deref (&lhsc);
4328 varinfo_t vi = make_heapvar ("HEAP", true);
4329 /* We are marking allocated storage local, we deal with it becoming
4330 global by escaping and setting of vars_contains_escaped_heap. */
4331 DECL_EXTERNAL (vi->decl) = 0;
4332 vi->is_global_var = 0;
4333 struct constraint_expr tmpc;
4334 tmpc.var = vi->id;
4335 tmpc.offset = 0;
4336 tmpc.type = ADDRESSOF;
4337 rhsc.safe_push (tmpc);
4338 process_all_all_constraints (lhsc, rhsc);
4339 return true;
4341 case BUILT_IN_ASSUME_ALIGNED:
4343 tree res = gimple_call_lhs (t);
4344 tree dest = gimple_call_arg (t, 0);
4345 if (res != NULL_TREE)
4347 get_constraint_for (res, &lhsc);
4348 get_constraint_for (dest, &rhsc);
4349 process_all_all_constraints (lhsc, rhsc);
4351 return true;
4353 /* All the following functions do not return pointers, do not
4354 modify the points-to sets of memory reachable from their
4355 arguments and do not add to the ESCAPED solution. */
4356 case BUILT_IN_SINCOS:
4357 case BUILT_IN_SINCOSF:
4358 case BUILT_IN_SINCOSL:
4359 case BUILT_IN_FREXP:
4360 case BUILT_IN_FREXPF:
4361 case BUILT_IN_FREXPL:
4362 case BUILT_IN_GAMMA_R:
4363 case BUILT_IN_GAMMAF_R:
4364 case BUILT_IN_GAMMAL_R:
4365 case BUILT_IN_LGAMMA_R:
4366 case BUILT_IN_LGAMMAF_R:
4367 case BUILT_IN_LGAMMAL_R:
4368 case BUILT_IN_MODF:
4369 case BUILT_IN_MODFF:
4370 case BUILT_IN_MODFL:
4371 case BUILT_IN_REMQUO:
4372 case BUILT_IN_REMQUOF:
4373 case BUILT_IN_REMQUOL:
4374 case BUILT_IN_FREE:
4375 return true;
4376 case BUILT_IN_STRDUP:
4377 case BUILT_IN_STRNDUP:
4378 case BUILT_IN_REALLOC:
4379 if (gimple_call_lhs (t))
4381 handle_lhs_call (t, gimple_call_lhs (t),
4382 gimple_call_return_flags (t) | ERF_NOALIAS,
4383 vNULL, fndecl);
4384 get_constraint_for_ptr_offset (gimple_call_lhs (t),
4385 NULL_TREE, &lhsc);
4386 get_constraint_for_ptr_offset (gimple_call_arg (t, 0),
4387 NULL_TREE, &rhsc);
4388 do_deref (&lhsc);
4389 do_deref (&rhsc);
4390 process_all_all_constraints (lhsc, rhsc);
4391 lhsc.truncate (0);
4392 rhsc.truncate (0);
4393 /* For realloc the resulting pointer can be equal to the
4394 argument as well. But only doing this wouldn't be
4395 correct because with ptr == 0 realloc behaves like malloc. */
4396 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_REALLOC)
4398 get_constraint_for (gimple_call_lhs (t), &lhsc);
4399 get_constraint_for (gimple_call_arg (t, 0), &rhsc);
4400 process_all_all_constraints (lhsc, rhsc);
4402 return true;
4404 break;
4405 /* String / character search functions return a pointer into the
4406 source string or NULL. */
4407 case BUILT_IN_INDEX:
4408 case BUILT_IN_STRCHR:
4409 case BUILT_IN_STRRCHR:
4410 case BUILT_IN_MEMCHR:
4411 case BUILT_IN_STRSTR:
4412 case BUILT_IN_STRPBRK:
4413 if (gimple_call_lhs (t))
4415 tree src = gimple_call_arg (t, 0);
4416 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4417 constraint_expr nul;
4418 nul.var = nothing_id;
4419 nul.offset = 0;
4420 nul.type = ADDRESSOF;
4421 rhsc.safe_push (nul);
4422 get_constraint_for (gimple_call_lhs (t), &lhsc);
4423 process_all_all_constraints (lhsc, rhsc);
4425 return true;
4426 /* Trampolines are special - they set up passing the static
4427 frame. */
4428 case BUILT_IN_INIT_TRAMPOLINE:
4430 tree tramp = gimple_call_arg (t, 0);
4431 tree nfunc = gimple_call_arg (t, 1);
4432 tree frame = gimple_call_arg (t, 2);
4433 unsigned i;
4434 struct constraint_expr lhs, *rhsp;
4435 if (in_ipa_mode)
4437 varinfo_t nfi = NULL;
4438 gcc_assert (TREE_CODE (nfunc) == ADDR_EXPR);
4439 nfi = lookup_vi_for_tree (TREE_OPERAND (nfunc, 0));
4440 if (nfi)
4442 lhs = get_function_part_constraint (nfi, fi_static_chain);
4443 get_constraint_for (frame, &rhsc);
4444 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4445 process_constraint (new_constraint (lhs, *rhsp));
4446 rhsc.truncate (0);
4448 /* Make the frame point to the function for
4449 the trampoline adjustment call. */
4450 get_constraint_for (tramp, &lhsc);
4451 do_deref (&lhsc);
4452 get_constraint_for (nfunc, &rhsc);
4453 process_all_all_constraints (lhsc, rhsc);
4455 return true;
4458 /* Else fallthru to generic handling which will let
4459 the frame escape. */
4460 break;
4462 case BUILT_IN_ADJUST_TRAMPOLINE:
4464 tree tramp = gimple_call_arg (t, 0);
4465 tree res = gimple_call_lhs (t);
4466 if (in_ipa_mode && res)
4468 get_constraint_for (res, &lhsc);
4469 get_constraint_for (tramp, &rhsc);
4470 do_deref (&rhsc);
4471 process_all_all_constraints (lhsc, rhsc);
4473 return true;
4475 CASE_BUILT_IN_TM_STORE (1):
4476 CASE_BUILT_IN_TM_STORE (2):
4477 CASE_BUILT_IN_TM_STORE (4):
4478 CASE_BUILT_IN_TM_STORE (8):
4479 CASE_BUILT_IN_TM_STORE (FLOAT):
4480 CASE_BUILT_IN_TM_STORE (DOUBLE):
4481 CASE_BUILT_IN_TM_STORE (LDOUBLE):
4482 CASE_BUILT_IN_TM_STORE (M64):
4483 CASE_BUILT_IN_TM_STORE (M128):
4484 CASE_BUILT_IN_TM_STORE (M256):
4486 tree addr = gimple_call_arg (t, 0);
4487 tree src = gimple_call_arg (t, 1);
4489 get_constraint_for (addr, &lhsc);
4490 do_deref (&lhsc);
4491 get_constraint_for (src, &rhsc);
4492 process_all_all_constraints (lhsc, rhsc);
4493 return true;
4495 CASE_BUILT_IN_TM_LOAD (1):
4496 CASE_BUILT_IN_TM_LOAD (2):
4497 CASE_BUILT_IN_TM_LOAD (4):
4498 CASE_BUILT_IN_TM_LOAD (8):
4499 CASE_BUILT_IN_TM_LOAD (FLOAT):
4500 CASE_BUILT_IN_TM_LOAD (DOUBLE):
4501 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
4502 CASE_BUILT_IN_TM_LOAD (M64):
4503 CASE_BUILT_IN_TM_LOAD (M128):
4504 CASE_BUILT_IN_TM_LOAD (M256):
4506 tree dest = gimple_call_lhs (t);
4507 tree addr = gimple_call_arg (t, 0);
4509 get_constraint_for (dest, &lhsc);
4510 get_constraint_for (addr, &rhsc);
4511 do_deref (&rhsc);
4512 process_all_all_constraints (lhsc, rhsc);
4513 return true;
4515 /* Variadic argument handling needs to be handled in IPA
4516 mode as well. */
4517 case BUILT_IN_VA_START:
4519 tree valist = gimple_call_arg (t, 0);
4520 struct constraint_expr rhs, *lhsp;
4521 unsigned i;
4522 get_constraint_for_ptr_offset (valist, NULL_TREE, &lhsc);
4523 do_deref (&lhsc);
4524 /* The va_list gets access to pointers in variadic
4525 arguments. Which we know in the case of IPA analysis
4526 and otherwise are just all nonlocal variables. */
4527 if (in_ipa_mode)
4529 fi = lookup_vi_for_tree (fn->decl);
4530 rhs = get_function_part_constraint (fi, ~0);
4531 rhs.type = ADDRESSOF;
4533 else
4535 rhs.var = nonlocal_id;
4536 rhs.type = ADDRESSOF;
4537 rhs.offset = 0;
4539 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4540 process_constraint (new_constraint (*lhsp, rhs));
4541 /* va_list is clobbered. */
4542 make_constraint_to (get_call_clobber_vi (t)->id, valist);
4543 return true;
4545 /* va_end doesn't have any effect that matters. */
4546 case BUILT_IN_VA_END:
4547 return true;
4548 /* Alternate return. Simply give up for now. */
4549 case BUILT_IN_RETURN:
4551 fi = NULL;
4552 if (!in_ipa_mode
4553 || !(fi = get_vi_for_tree (fn->decl)))
4554 make_constraint_from (get_varinfo (escaped_id), anything_id);
4555 else if (in_ipa_mode
4556 && fi != NULL)
4558 struct constraint_expr lhs, rhs;
4559 lhs = get_function_part_constraint (fi, fi_result);
4560 rhs.var = anything_id;
4561 rhs.offset = 0;
4562 rhs.type = SCALAR;
4563 process_constraint (new_constraint (lhs, rhs));
4565 return true;
4567 case BUILT_IN_GOMP_PARALLEL:
4568 case BUILT_IN_GOACC_PARALLEL:
4570 if (in_ipa_mode)
4572 unsigned int fnpos, argpos;
4573 switch (DECL_FUNCTION_CODE (fndecl))
4575 case BUILT_IN_GOMP_PARALLEL:
4576 /* __builtin_GOMP_parallel (fn, data, num_threads, flags). */
4577 fnpos = 0;
4578 argpos = 1;
4579 break;
4580 case BUILT_IN_GOACC_PARALLEL:
4581 /* __builtin_GOACC_parallel (device, fn, mapnum, hostaddrs,
4582 sizes, kinds, ...). */
4583 fnpos = 1;
4584 argpos = 3;
4585 break;
4586 default:
4587 gcc_unreachable ();
4590 tree fnarg = gimple_call_arg (t, fnpos);
4591 gcc_assert (TREE_CODE (fnarg) == ADDR_EXPR);
4592 tree fndecl = TREE_OPERAND (fnarg, 0);
4593 if (fndecl_maybe_in_other_partition (fndecl))
4594 /* Fallthru to general call handling. */
4595 break;
4597 tree arg = gimple_call_arg (t, argpos);
4599 varinfo_t fi = get_vi_for_tree (fndecl);
4600 find_func_aliases_for_call_arg (fi, 0, arg);
4601 return true;
4603 /* Else fallthru to generic call handling. */
4604 break;
4606 /* printf-style functions may have hooks to set pointers to
4607 point to somewhere into the generated string. Leave them
4608 for a later exercise... */
4609 default:
4610 /* Fallthru to general call handling. */;
4613 return false;
4616 /* Create constraints for the call T. */
4618 static void
4619 find_func_aliases_for_call (struct function *fn, gcall *t)
4621 tree fndecl = gimple_call_fndecl (t);
4622 varinfo_t fi;
4624 if (fndecl != NULL_TREE
4625 && DECL_BUILT_IN (fndecl)
4626 && find_func_aliases_for_builtin_call (fn, t))
4627 return;
4629 fi = get_fi_for_callee (t);
4630 if (!in_ipa_mode
4631 || (fndecl && !fi->is_fn_info))
4633 auto_vec<ce_s, 16> rhsc;
4634 int flags = gimple_call_flags (t);
4636 /* Const functions can return their arguments and addresses
4637 of global memory but not of escaped memory. */
4638 if (flags & (ECF_CONST|ECF_NOVOPS))
4640 if (gimple_call_lhs (t))
4641 handle_const_call (t, &rhsc);
4643 /* Pure functions can return addresses in and of memory
4644 reachable from their arguments, but they are not an escape
4645 point for reachable memory of their arguments. */
4646 else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE))
4647 handle_pure_call (t, &rhsc);
4648 else
4649 handle_rhs_call (t, &rhsc);
4650 if (gimple_call_lhs (t))
4651 handle_lhs_call (t, gimple_call_lhs (t),
4652 gimple_call_return_flags (t), rhsc, fndecl);
4654 else
4656 auto_vec<ce_s, 2> rhsc;
4657 tree lhsop;
4658 unsigned j;
4660 /* Assign all the passed arguments to the appropriate incoming
4661 parameters of the function. */
4662 for (j = 0; j < gimple_call_num_args (t); j++)
4664 tree arg = gimple_call_arg (t, j);
4665 find_func_aliases_for_call_arg (fi, j, arg);
4668 /* If we are returning a value, assign it to the result. */
4669 lhsop = gimple_call_lhs (t);
4670 if (lhsop)
4672 auto_vec<ce_s, 2> lhsc;
4673 struct constraint_expr rhs;
4674 struct constraint_expr *lhsp;
4675 bool aggr_p = aggregate_value_p (lhsop, gimple_call_fntype (t));
4677 get_constraint_for (lhsop, &lhsc);
4678 rhs = get_function_part_constraint (fi, fi_result);
4679 if (aggr_p)
4681 auto_vec<ce_s, 2> tem;
4682 tem.quick_push (rhs);
4683 do_deref (&tem);
4684 gcc_checking_assert (tem.length () == 1);
4685 rhs = tem[0];
4687 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
4688 process_constraint (new_constraint (*lhsp, rhs));
4690 /* If we pass the result decl by reference, honor that. */
4691 if (aggr_p)
4693 struct constraint_expr lhs;
4694 struct constraint_expr *rhsp;
4696 get_constraint_for_address_of (lhsop, &rhsc);
4697 lhs = get_function_part_constraint (fi, fi_result);
4698 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4699 process_constraint (new_constraint (lhs, *rhsp));
4700 rhsc.truncate (0);
4704 /* If we use a static chain, pass it along. */
4705 if (gimple_call_chain (t))
4707 struct constraint_expr lhs;
4708 struct constraint_expr *rhsp;
4710 get_constraint_for (gimple_call_chain (t), &rhsc);
4711 lhs = get_function_part_constraint (fi, fi_static_chain);
4712 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4713 process_constraint (new_constraint (lhs, *rhsp));
4718 /* Walk statement T setting up aliasing constraints according to the
4719 references found in T. This function is the main part of the
4720 constraint builder. AI points to auxiliary alias information used
4721 when building alias sets and computing alias grouping heuristics. */
4723 static void
4724 find_func_aliases (struct function *fn, gimple *origt)
4726 gimple *t = origt;
4727 auto_vec<ce_s, 16> lhsc;
4728 auto_vec<ce_s, 16> rhsc;
4729 struct constraint_expr *c;
4730 varinfo_t fi;
4732 /* Now build constraints expressions. */
4733 if (gimple_code (t) == GIMPLE_PHI)
4735 size_t i;
4736 unsigned int j;
4738 /* For a phi node, assign all the arguments to
4739 the result. */
4740 get_constraint_for (gimple_phi_result (t), &lhsc);
4741 for (i = 0; i < gimple_phi_num_args (t); i++)
4743 tree strippedrhs = PHI_ARG_DEF (t, i);
4745 STRIP_NOPS (strippedrhs);
4746 get_constraint_for_rhs (gimple_phi_arg_def (t, i), &rhsc);
4748 FOR_EACH_VEC_ELT (lhsc, j, c)
4750 struct constraint_expr *c2;
4751 while (rhsc.length () > 0)
4753 c2 = &rhsc.last ();
4754 process_constraint (new_constraint (*c, *c2));
4755 rhsc.pop ();
4760 /* In IPA mode, we need to generate constraints to pass call
4761 arguments through their calls. There are two cases,
4762 either a GIMPLE_CALL returning a value, or just a plain
4763 GIMPLE_CALL when we are not.
4765 In non-ipa mode, we need to generate constraints for each
4766 pointer passed by address. */
4767 else if (is_gimple_call (t))
4768 find_func_aliases_for_call (fn, as_a <gcall *> (t));
4770 /* Otherwise, just a regular assignment statement. Only care about
4771 operations with pointer result, others are dealt with as escape
4772 points if they have pointer operands. */
4773 else if (is_gimple_assign (t))
4775 /* Otherwise, just a regular assignment statement. */
4776 tree lhsop = gimple_assign_lhs (t);
4777 tree rhsop = (gimple_num_ops (t) == 2) ? gimple_assign_rhs1 (t) : NULL;
4779 if (rhsop && TREE_CLOBBER_P (rhsop))
4780 /* Ignore clobbers, they don't actually store anything into
4781 the LHS. */
4783 else if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
4784 do_structure_copy (lhsop, rhsop);
4785 else
4787 enum tree_code code = gimple_assign_rhs_code (t);
4789 get_constraint_for (lhsop, &lhsc);
4791 if (code == POINTER_PLUS_EXPR)
4792 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4793 gimple_assign_rhs2 (t), &rhsc);
4794 else if (code == BIT_AND_EXPR
4795 && TREE_CODE (gimple_assign_rhs2 (t)) == INTEGER_CST)
4797 /* Aligning a pointer via a BIT_AND_EXPR is offsetting
4798 the pointer. Handle it by offsetting it by UNKNOWN. */
4799 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4800 NULL_TREE, &rhsc);
4802 else if ((CONVERT_EXPR_CODE_P (code)
4803 && !(POINTER_TYPE_P (gimple_expr_type (t))
4804 && !POINTER_TYPE_P (TREE_TYPE (rhsop))))
4805 || gimple_assign_single_p (t))
4806 get_constraint_for_rhs (rhsop, &rhsc);
4807 else if (code == COND_EXPR)
4809 /* The result is a merge of both COND_EXPR arms. */
4810 auto_vec<ce_s, 2> tmp;
4811 struct constraint_expr *rhsp;
4812 unsigned i;
4813 get_constraint_for_rhs (gimple_assign_rhs2 (t), &rhsc);
4814 get_constraint_for_rhs (gimple_assign_rhs3 (t), &tmp);
4815 FOR_EACH_VEC_ELT (tmp, i, rhsp)
4816 rhsc.safe_push (*rhsp);
4818 else if (truth_value_p (code))
4819 /* Truth value results are not pointer (parts). Or at least
4820 very unreasonable obfuscation of a part. */
4822 else
4824 /* All other operations are merges. */
4825 auto_vec<ce_s, 4> tmp;
4826 struct constraint_expr *rhsp;
4827 unsigned i, j;
4828 get_constraint_for_rhs (gimple_assign_rhs1 (t), &rhsc);
4829 for (i = 2; i < gimple_num_ops (t); ++i)
4831 get_constraint_for_rhs (gimple_op (t, i), &tmp);
4832 FOR_EACH_VEC_ELT (tmp, j, rhsp)
4833 rhsc.safe_push (*rhsp);
4834 tmp.truncate (0);
4837 process_all_all_constraints (lhsc, rhsc);
4839 /* If there is a store to a global variable the rhs escapes. */
4840 if ((lhsop = get_base_address (lhsop)) != NULL_TREE
4841 && DECL_P (lhsop))
4843 varinfo_t vi = get_vi_for_tree (lhsop);
4844 if ((! in_ipa_mode && vi->is_global_var)
4845 || vi->is_ipa_escape_point)
4846 make_escape_constraint (rhsop);
4849 /* Handle escapes through return. */
4850 else if (gimple_code (t) == GIMPLE_RETURN
4851 && gimple_return_retval (as_a <greturn *> (t)) != NULL_TREE)
4853 greturn *return_stmt = as_a <greturn *> (t);
4854 fi = NULL;
4855 if (!in_ipa_mode
4856 || !(fi = get_vi_for_tree (fn->decl)))
4857 make_escape_constraint (gimple_return_retval (return_stmt));
4858 else if (in_ipa_mode)
4860 struct constraint_expr lhs ;
4861 struct constraint_expr *rhsp;
4862 unsigned i;
4864 lhs = get_function_part_constraint (fi, fi_result);
4865 get_constraint_for_rhs (gimple_return_retval (return_stmt), &rhsc);
4866 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4867 process_constraint (new_constraint (lhs, *rhsp));
4870 /* Handle asms conservatively by adding escape constraints to everything. */
4871 else if (gasm *asm_stmt = dyn_cast <gasm *> (t))
4873 unsigned i, noutputs;
4874 const char **oconstraints;
4875 const char *constraint;
4876 bool allows_mem, allows_reg, is_inout;
4878 noutputs = gimple_asm_noutputs (asm_stmt);
4879 oconstraints = XALLOCAVEC (const char *, noutputs);
4881 for (i = 0; i < noutputs; ++i)
4883 tree link = gimple_asm_output_op (asm_stmt, i);
4884 tree op = TREE_VALUE (link);
4886 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4887 oconstraints[i] = constraint;
4888 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
4889 &allows_reg, &is_inout);
4891 /* A memory constraint makes the address of the operand escape. */
4892 if (!allows_reg && allows_mem)
4893 make_escape_constraint (build_fold_addr_expr (op));
4895 /* The asm may read global memory, so outputs may point to
4896 any global memory. */
4897 if (op)
4899 auto_vec<ce_s, 2> lhsc;
4900 struct constraint_expr rhsc, *lhsp;
4901 unsigned j;
4902 get_constraint_for (op, &lhsc);
4903 rhsc.var = nonlocal_id;
4904 rhsc.offset = 0;
4905 rhsc.type = SCALAR;
4906 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
4907 process_constraint (new_constraint (*lhsp, rhsc));
4910 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4912 tree link = gimple_asm_input_op (asm_stmt, i);
4913 tree op = TREE_VALUE (link);
4915 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4917 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
4918 &allows_mem, &allows_reg);
4920 /* A memory constraint makes the address of the operand escape. */
4921 if (!allows_reg && allows_mem)
4922 make_escape_constraint (build_fold_addr_expr (op));
4923 /* Strictly we'd only need the constraint to ESCAPED if
4924 the asm clobbers memory, otherwise using something
4925 along the lines of per-call clobbers/uses would be enough. */
4926 else if (op)
4927 make_escape_constraint (op);
4933 /* Create a constraint adding to the clobber set of FI the memory
4934 pointed to by PTR. */
4936 static void
4937 process_ipa_clobber (varinfo_t fi, tree ptr)
4939 vec<ce_s> ptrc = vNULL;
4940 struct constraint_expr *c, lhs;
4941 unsigned i;
4942 get_constraint_for_rhs (ptr, &ptrc);
4943 lhs = get_function_part_constraint (fi, fi_clobbers);
4944 FOR_EACH_VEC_ELT (ptrc, i, c)
4945 process_constraint (new_constraint (lhs, *c));
4946 ptrc.release ();
4949 /* Walk statement T setting up clobber and use constraints according to the
4950 references found in T. This function is a main part of the
4951 IPA constraint builder. */
4953 static void
4954 find_func_clobbers (struct function *fn, gimple *origt)
4956 gimple *t = origt;
4957 auto_vec<ce_s, 16> lhsc;
4958 auto_vec<ce_s, 16> rhsc;
4959 varinfo_t fi;
4961 /* Add constraints for clobbered/used in IPA mode.
4962 We are not interested in what automatic variables are clobbered
4963 or used as we only use the information in the caller to which
4964 they do not escape. */
4965 gcc_assert (in_ipa_mode);
4967 /* If the stmt refers to memory in any way it better had a VUSE. */
4968 if (gimple_vuse (t) == NULL_TREE)
4969 return;
4971 /* We'd better have function information for the current function. */
4972 fi = lookup_vi_for_tree (fn->decl);
4973 gcc_assert (fi != NULL);
4975 /* Account for stores in assignments and calls. */
4976 if (gimple_vdef (t) != NULL_TREE
4977 && gimple_has_lhs (t))
4979 tree lhs = gimple_get_lhs (t);
4980 tree tem = lhs;
4981 while (handled_component_p (tem))
4982 tem = TREE_OPERAND (tem, 0);
4983 if ((DECL_P (tem)
4984 && !auto_var_in_fn_p (tem, fn->decl))
4985 || INDIRECT_REF_P (tem)
4986 || (TREE_CODE (tem) == MEM_REF
4987 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
4988 && auto_var_in_fn_p
4989 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), fn->decl))))
4991 struct constraint_expr lhsc, *rhsp;
4992 unsigned i;
4993 lhsc = get_function_part_constraint (fi, fi_clobbers);
4994 get_constraint_for_address_of (lhs, &rhsc);
4995 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4996 process_constraint (new_constraint (lhsc, *rhsp));
4997 rhsc.truncate (0);
5001 /* Account for uses in assigments and returns. */
5002 if (gimple_assign_single_p (t)
5003 || (gimple_code (t) == GIMPLE_RETURN
5004 && gimple_return_retval (as_a <greturn *> (t)) != NULL_TREE))
5006 tree rhs = (gimple_assign_single_p (t)
5007 ? gimple_assign_rhs1 (t)
5008 : gimple_return_retval (as_a <greturn *> (t)));
5009 tree tem = rhs;
5010 while (handled_component_p (tem))
5011 tem = TREE_OPERAND (tem, 0);
5012 if ((DECL_P (tem)
5013 && !auto_var_in_fn_p (tem, fn->decl))
5014 || INDIRECT_REF_P (tem)
5015 || (TREE_CODE (tem) == MEM_REF
5016 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
5017 && auto_var_in_fn_p
5018 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), fn->decl))))
5020 struct constraint_expr lhs, *rhsp;
5021 unsigned i;
5022 lhs = get_function_part_constraint (fi, fi_uses);
5023 get_constraint_for_address_of (rhs, &rhsc);
5024 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5025 process_constraint (new_constraint (lhs, *rhsp));
5026 rhsc.truncate (0);
5030 if (gcall *call_stmt = dyn_cast <gcall *> (t))
5032 varinfo_t cfi = NULL;
5033 tree decl = gimple_call_fndecl (t);
5034 struct constraint_expr lhs, rhs;
5035 unsigned i, j;
5037 /* For builtins we do not have separate function info. For those
5038 we do not generate escapes for we have to generate clobbers/uses. */
5039 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
5040 switch (DECL_FUNCTION_CODE (decl))
5042 /* The following functions use and clobber memory pointed to
5043 by their arguments. */
5044 case BUILT_IN_STRCPY:
5045 case BUILT_IN_STRNCPY:
5046 case BUILT_IN_BCOPY:
5047 case BUILT_IN_MEMCPY:
5048 case BUILT_IN_MEMMOVE:
5049 case BUILT_IN_MEMPCPY:
5050 case BUILT_IN_STPCPY:
5051 case BUILT_IN_STPNCPY:
5052 case BUILT_IN_STRCAT:
5053 case BUILT_IN_STRNCAT:
5054 case BUILT_IN_STRCPY_CHK:
5055 case BUILT_IN_STRNCPY_CHK:
5056 case BUILT_IN_MEMCPY_CHK:
5057 case BUILT_IN_MEMMOVE_CHK:
5058 case BUILT_IN_MEMPCPY_CHK:
5059 case BUILT_IN_STPCPY_CHK:
5060 case BUILT_IN_STPNCPY_CHK:
5061 case BUILT_IN_STRCAT_CHK:
5062 case BUILT_IN_STRNCAT_CHK:
5064 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
5065 == BUILT_IN_BCOPY ? 1 : 0));
5066 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
5067 == BUILT_IN_BCOPY ? 0 : 1));
5068 unsigned i;
5069 struct constraint_expr *rhsp, *lhsp;
5070 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
5071 lhs = get_function_part_constraint (fi, fi_clobbers);
5072 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
5073 process_constraint (new_constraint (lhs, *lhsp));
5074 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
5075 lhs = get_function_part_constraint (fi, fi_uses);
5076 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5077 process_constraint (new_constraint (lhs, *rhsp));
5078 return;
5080 /* The following function clobbers memory pointed to by
5081 its argument. */
5082 case BUILT_IN_MEMSET:
5083 case BUILT_IN_MEMSET_CHK:
5084 case BUILT_IN_POSIX_MEMALIGN:
5086 tree dest = gimple_call_arg (t, 0);
5087 unsigned i;
5088 ce_s *lhsp;
5089 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
5090 lhs = get_function_part_constraint (fi, fi_clobbers);
5091 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
5092 process_constraint (new_constraint (lhs, *lhsp));
5093 return;
5095 /* The following functions clobber their second and third
5096 arguments. */
5097 case BUILT_IN_SINCOS:
5098 case BUILT_IN_SINCOSF:
5099 case BUILT_IN_SINCOSL:
5101 process_ipa_clobber (fi, gimple_call_arg (t, 1));
5102 process_ipa_clobber (fi, gimple_call_arg (t, 2));
5103 return;
5105 /* The following functions clobber their second argument. */
5106 case BUILT_IN_FREXP:
5107 case BUILT_IN_FREXPF:
5108 case BUILT_IN_FREXPL:
5109 case BUILT_IN_LGAMMA_R:
5110 case BUILT_IN_LGAMMAF_R:
5111 case BUILT_IN_LGAMMAL_R:
5112 case BUILT_IN_GAMMA_R:
5113 case BUILT_IN_GAMMAF_R:
5114 case BUILT_IN_GAMMAL_R:
5115 case BUILT_IN_MODF:
5116 case BUILT_IN_MODFF:
5117 case BUILT_IN_MODFL:
5119 process_ipa_clobber (fi, gimple_call_arg (t, 1));
5120 return;
5122 /* The following functions clobber their third argument. */
5123 case BUILT_IN_REMQUO:
5124 case BUILT_IN_REMQUOF:
5125 case BUILT_IN_REMQUOL:
5127 process_ipa_clobber (fi, gimple_call_arg (t, 2));
5128 return;
5130 /* The following functions neither read nor clobber memory. */
5131 case BUILT_IN_ASSUME_ALIGNED:
5132 case BUILT_IN_FREE:
5133 return;
5134 /* Trampolines are of no interest to us. */
5135 case BUILT_IN_INIT_TRAMPOLINE:
5136 case BUILT_IN_ADJUST_TRAMPOLINE:
5137 return;
5138 case BUILT_IN_VA_START:
5139 case BUILT_IN_VA_END:
5140 return;
5141 case BUILT_IN_GOMP_PARALLEL:
5142 case BUILT_IN_GOACC_PARALLEL:
5144 unsigned int fnpos, argpos;
5145 unsigned int implicit_use_args[2];
5146 unsigned int num_implicit_use_args = 0;
5147 switch (DECL_FUNCTION_CODE (decl))
5149 case BUILT_IN_GOMP_PARALLEL:
5150 /* __builtin_GOMP_parallel (fn, data, num_threads, flags). */
5151 fnpos = 0;
5152 argpos = 1;
5153 break;
5154 case BUILT_IN_GOACC_PARALLEL:
5155 /* __builtin_GOACC_parallel (device, fn, mapnum, hostaddrs,
5156 sizes, kinds, ...). */
5157 fnpos = 1;
5158 argpos = 3;
5159 implicit_use_args[num_implicit_use_args++] = 4;
5160 implicit_use_args[num_implicit_use_args++] = 5;
5161 break;
5162 default:
5163 gcc_unreachable ();
5166 tree fnarg = gimple_call_arg (t, fnpos);
5167 gcc_assert (TREE_CODE (fnarg) == ADDR_EXPR);
5168 tree fndecl = TREE_OPERAND (fnarg, 0);
5169 if (fndecl_maybe_in_other_partition (fndecl))
5170 /* Fallthru to general call handling. */
5171 break;
5173 varinfo_t cfi = get_vi_for_tree (fndecl);
5175 tree arg = gimple_call_arg (t, argpos);
5177 /* Parameter passed by value is used. */
5178 lhs = get_function_part_constraint (fi, fi_uses);
5179 struct constraint_expr *rhsp;
5180 get_constraint_for (arg, &rhsc);
5181 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5182 process_constraint (new_constraint (lhs, *rhsp));
5183 rhsc.truncate (0);
5185 /* Handle parameters used by the call, but not used in cfi, as
5186 implicitly used by cfi. */
5187 lhs = get_function_part_constraint (cfi, fi_uses);
5188 for (unsigned i = 0; i < num_implicit_use_args; ++i)
5190 tree arg = gimple_call_arg (t, implicit_use_args[i]);
5191 get_constraint_for (arg, &rhsc);
5192 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5193 process_constraint (new_constraint (lhs, *rhsp));
5194 rhsc.truncate (0);
5197 /* The caller clobbers what the callee does. */
5198 lhs = get_function_part_constraint (fi, fi_clobbers);
5199 rhs = get_function_part_constraint (cfi, fi_clobbers);
5200 process_constraint (new_constraint (lhs, rhs));
5202 /* The caller uses what the callee does. */
5203 lhs = get_function_part_constraint (fi, fi_uses);
5204 rhs = get_function_part_constraint (cfi, fi_uses);
5205 process_constraint (new_constraint (lhs, rhs));
5207 return;
5209 /* printf-style functions may have hooks to set pointers to
5210 point to somewhere into the generated string. Leave them
5211 for a later exercise... */
5212 default:
5213 /* Fallthru to general call handling. */;
5216 /* Parameters passed by value are used. */
5217 lhs = get_function_part_constraint (fi, fi_uses);
5218 for (i = 0; i < gimple_call_num_args (t); i++)
5220 struct constraint_expr *rhsp;
5221 tree arg = gimple_call_arg (t, i);
5223 if (TREE_CODE (arg) == SSA_NAME
5224 || is_gimple_min_invariant (arg))
5225 continue;
5227 get_constraint_for_address_of (arg, &rhsc);
5228 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5229 process_constraint (new_constraint (lhs, *rhsp));
5230 rhsc.truncate (0);
5233 /* Build constraints for propagating clobbers/uses along the
5234 callgraph edges. */
5235 cfi = get_fi_for_callee (call_stmt);
5236 if (cfi->id == anything_id)
5238 if (gimple_vdef (t))
5239 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5240 anything_id);
5241 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5242 anything_id);
5243 return;
5246 /* For callees without function info (that's external functions),
5247 ESCAPED is clobbered and used. */
5248 if (gimple_call_fndecl (t)
5249 && !cfi->is_fn_info)
5251 varinfo_t vi;
5253 if (gimple_vdef (t))
5254 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5255 escaped_id);
5256 make_copy_constraint (first_vi_for_offset (fi, fi_uses), escaped_id);
5258 /* Also honor the call statement use/clobber info. */
5259 if ((vi = lookup_call_clobber_vi (call_stmt)) != NULL)
5260 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5261 vi->id);
5262 if ((vi = lookup_call_use_vi (call_stmt)) != NULL)
5263 make_copy_constraint (first_vi_for_offset (fi, fi_uses),
5264 vi->id);
5265 return;
5268 /* Otherwise the caller clobbers and uses what the callee does.
5269 ??? This should use a new complex constraint that filters
5270 local variables of the callee. */
5271 if (gimple_vdef (t))
5273 lhs = get_function_part_constraint (fi, fi_clobbers);
5274 rhs = get_function_part_constraint (cfi, fi_clobbers);
5275 process_constraint (new_constraint (lhs, rhs));
5277 lhs = get_function_part_constraint (fi, fi_uses);
5278 rhs = get_function_part_constraint (cfi, fi_uses);
5279 process_constraint (new_constraint (lhs, rhs));
5281 else if (gimple_code (t) == GIMPLE_ASM)
5283 /* ??? Ick. We can do better. */
5284 if (gimple_vdef (t))
5285 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5286 anything_id);
5287 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5288 anything_id);
5293 /* Find the first varinfo in the same variable as START that overlaps with
5294 OFFSET. Return NULL if we can't find one. */
5296 static varinfo_t
5297 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
5299 /* If the offset is outside of the variable, bail out. */
5300 if (offset >= start->fullsize)
5301 return NULL;
5303 /* If we cannot reach offset from start, lookup the first field
5304 and start from there. */
5305 if (start->offset > offset)
5306 start = get_varinfo (start->head);
5308 while (start)
5310 /* We may not find a variable in the field list with the actual
5311 offset when we have glommed a structure to a variable.
5312 In that case, however, offset should still be within the size
5313 of the variable. */
5314 if (offset >= start->offset
5315 && (offset - start->offset) < start->size)
5316 return start;
5318 start = vi_next (start);
5321 return NULL;
5324 /* Find the first varinfo in the same variable as START that overlaps with
5325 OFFSET. If there is no such varinfo the varinfo directly preceding
5326 OFFSET is returned. */
5328 static varinfo_t
5329 first_or_preceding_vi_for_offset (varinfo_t start,
5330 unsigned HOST_WIDE_INT offset)
5332 /* If we cannot reach offset from start, lookup the first field
5333 and start from there. */
5334 if (start->offset > offset)
5335 start = get_varinfo (start->head);
5337 /* We may not find a variable in the field list with the actual
5338 offset when we have glommed a structure to a variable.
5339 In that case, however, offset should still be within the size
5340 of the variable.
5341 If we got beyond the offset we look for return the field
5342 directly preceding offset which may be the last field. */
5343 while (start->next
5344 && offset >= start->offset
5345 && !((offset - start->offset) < start->size))
5346 start = vi_next (start);
5348 return start;
5352 /* This structure is used during pushing fields onto the fieldstack
5353 to track the offset of the field, since bitpos_of_field gives it
5354 relative to its immediate containing type, and we want it relative
5355 to the ultimate containing object. */
5357 struct fieldoff
5359 /* Offset from the base of the base containing object to this field. */
5360 HOST_WIDE_INT offset;
5362 /* Size, in bits, of the field. */
5363 unsigned HOST_WIDE_INT size;
5365 unsigned has_unknown_size : 1;
5367 unsigned must_have_pointers : 1;
5369 unsigned may_have_pointers : 1;
5371 unsigned only_restrict_pointers : 1;
5373 tree restrict_pointed_type;
5375 typedef struct fieldoff fieldoff_s;
5378 /* qsort comparison function for two fieldoff's PA and PB */
5380 static int
5381 fieldoff_compare (const void *pa, const void *pb)
5383 const fieldoff_s *foa = (const fieldoff_s *)pa;
5384 const fieldoff_s *fob = (const fieldoff_s *)pb;
5385 unsigned HOST_WIDE_INT foasize, fobsize;
5387 if (foa->offset < fob->offset)
5388 return -1;
5389 else if (foa->offset > fob->offset)
5390 return 1;
5392 foasize = foa->size;
5393 fobsize = fob->size;
5394 if (foasize < fobsize)
5395 return -1;
5396 else if (foasize > fobsize)
5397 return 1;
5398 return 0;
5401 /* Sort a fieldstack according to the field offset and sizes. */
5402 static void
5403 sort_fieldstack (vec<fieldoff_s> fieldstack)
5405 fieldstack.qsort (fieldoff_compare);
5408 /* Return true if T is a type that can have subvars. */
5410 static inline bool
5411 type_can_have_subvars (const_tree t)
5413 /* Aggregates without overlapping fields can have subvars. */
5414 return TREE_CODE (t) == RECORD_TYPE;
5417 /* Return true if V is a tree that we can have subvars for.
5418 Normally, this is any aggregate type. Also complex
5419 types which are not gimple registers can have subvars. */
5421 static inline bool
5422 var_can_have_subvars (const_tree v)
5424 /* Volatile variables should never have subvars. */
5425 if (TREE_THIS_VOLATILE (v))
5426 return false;
5428 /* Non decls or memory tags can never have subvars. */
5429 if (!DECL_P (v))
5430 return false;
5432 return type_can_have_subvars (TREE_TYPE (v));
5435 /* Return true if T is a type that does contain pointers. */
5437 static bool
5438 type_must_have_pointers (tree type)
5440 if (POINTER_TYPE_P (type))
5441 return true;
5443 if (TREE_CODE (type) == ARRAY_TYPE)
5444 return type_must_have_pointers (TREE_TYPE (type));
5446 /* A function or method can have pointers as arguments, so track
5447 those separately. */
5448 if (TREE_CODE (type) == FUNCTION_TYPE
5449 || TREE_CODE (type) == METHOD_TYPE)
5450 return true;
5452 return false;
5455 static bool
5456 field_must_have_pointers (tree t)
5458 return type_must_have_pointers (TREE_TYPE (t));
5461 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
5462 the fields of TYPE onto fieldstack, recording their offsets along
5463 the way.
5465 OFFSET is used to keep track of the offset in this entire
5466 structure, rather than just the immediately containing structure.
5467 Returns false if the caller is supposed to handle the field we
5468 recursed for. */
5470 static bool
5471 push_fields_onto_fieldstack (tree type, vec<fieldoff_s> *fieldstack,
5472 HOST_WIDE_INT offset)
5474 tree field;
5475 bool empty_p = true;
5477 if (TREE_CODE (type) != RECORD_TYPE)
5478 return false;
5480 /* If the vector of fields is growing too big, bail out early.
5481 Callers check for vec::length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
5482 sure this fails. */
5483 if (fieldstack->length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
5484 return false;
5486 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5487 if (TREE_CODE (field) == FIELD_DECL)
5489 bool push = false;
5490 HOST_WIDE_INT foff = bitpos_of_field (field);
5491 tree field_type = TREE_TYPE (field);
5493 if (!var_can_have_subvars (field)
5494 || TREE_CODE (field_type) == QUAL_UNION_TYPE
5495 || TREE_CODE (field_type) == UNION_TYPE)
5496 push = true;
5497 else if (!push_fields_onto_fieldstack
5498 (field_type, fieldstack, offset + foff)
5499 && (DECL_SIZE (field)
5500 && !integer_zerop (DECL_SIZE (field))))
5501 /* Empty structures may have actual size, like in C++. So
5502 see if we didn't push any subfields and the size is
5503 nonzero, push the field onto the stack. */
5504 push = true;
5506 if (push)
5508 fieldoff_s *pair = NULL;
5509 bool has_unknown_size = false;
5510 bool must_have_pointers_p;
5512 if (!fieldstack->is_empty ())
5513 pair = &fieldstack->last ();
5515 /* If there isn't anything at offset zero, create sth. */
5516 if (!pair
5517 && offset + foff != 0)
5519 fieldoff_s e
5520 = {0, offset + foff, false, false, false, false, NULL_TREE};
5521 pair = fieldstack->safe_push (e);
5524 if (!DECL_SIZE (field)
5525 || !tree_fits_uhwi_p (DECL_SIZE (field)))
5526 has_unknown_size = true;
5528 /* If adjacent fields do not contain pointers merge them. */
5529 must_have_pointers_p = field_must_have_pointers (field);
5530 if (pair
5531 && !has_unknown_size
5532 && !must_have_pointers_p
5533 && !pair->must_have_pointers
5534 && !pair->has_unknown_size
5535 && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff)
5537 pair->size += tree_to_uhwi (DECL_SIZE (field));
5539 else
5541 fieldoff_s e;
5542 e.offset = offset + foff;
5543 e.has_unknown_size = has_unknown_size;
5544 if (!has_unknown_size)
5545 e.size = tree_to_uhwi (DECL_SIZE (field));
5546 else
5547 e.size = -1;
5548 e.must_have_pointers = must_have_pointers_p;
5549 e.may_have_pointers = true;
5550 e.only_restrict_pointers
5551 = (!has_unknown_size
5552 && POINTER_TYPE_P (field_type)
5553 && TYPE_RESTRICT (field_type));
5554 if (e.only_restrict_pointers)
5555 e.restrict_pointed_type = TREE_TYPE (field_type);
5556 fieldstack->safe_push (e);
5560 empty_p = false;
5563 return !empty_p;
5566 /* Count the number of arguments DECL has, and set IS_VARARGS to true
5567 if it is a varargs function. */
5569 static unsigned int
5570 count_num_arguments (tree decl, bool *is_varargs)
5572 unsigned int num = 0;
5573 tree t;
5575 /* Capture named arguments for K&R functions. They do not
5576 have a prototype and thus no TYPE_ARG_TYPES. */
5577 for (t = DECL_ARGUMENTS (decl); t; t = DECL_CHAIN (t))
5578 ++num;
5580 /* Check if the function has variadic arguments. */
5581 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl)); t; t = TREE_CHAIN (t))
5582 if (TREE_VALUE (t) == void_type_node)
5583 break;
5584 if (!t)
5585 *is_varargs = true;
5587 return num;
5590 /* Creation function node for DECL, using NAME, and return the index
5591 of the variable we've created for the function. If NONLOCAL_p, create
5592 initial constraints. */
5594 static varinfo_t
5595 create_function_info_for (tree decl, const char *name, bool add_id,
5596 bool nonlocal_p)
5598 struct function *fn = DECL_STRUCT_FUNCTION (decl);
5599 varinfo_t vi, prev_vi;
5600 tree arg;
5601 unsigned int i;
5602 bool is_varargs = false;
5603 unsigned int num_args = count_num_arguments (decl, &is_varargs);
5605 /* Create the variable info. */
5607 vi = new_var_info (decl, name, add_id);
5608 vi->offset = 0;
5609 vi->size = 1;
5610 vi->fullsize = fi_parm_base + num_args;
5611 vi->is_fn_info = 1;
5612 vi->may_have_pointers = false;
5613 if (is_varargs)
5614 vi->fullsize = ~0;
5615 insert_vi_for_tree (vi->decl, vi);
5617 prev_vi = vi;
5619 /* Create a variable for things the function clobbers and one for
5620 things the function uses. */
5622 varinfo_t clobbervi, usevi;
5623 const char *newname;
5624 char *tempname;
5626 tempname = xasprintf ("%s.clobber", name);
5627 newname = ggc_strdup (tempname);
5628 free (tempname);
5630 clobbervi = new_var_info (NULL, newname, false);
5631 clobbervi->offset = fi_clobbers;
5632 clobbervi->size = 1;
5633 clobbervi->fullsize = vi->fullsize;
5634 clobbervi->is_full_var = true;
5635 clobbervi->is_global_var = false;
5637 gcc_assert (prev_vi->offset < clobbervi->offset);
5638 prev_vi->next = clobbervi->id;
5639 prev_vi = clobbervi;
5641 tempname = xasprintf ("%s.use", name);
5642 newname = ggc_strdup (tempname);
5643 free (tempname);
5645 usevi = new_var_info (NULL, newname, false);
5646 usevi->offset = fi_uses;
5647 usevi->size = 1;
5648 usevi->fullsize = vi->fullsize;
5649 usevi->is_full_var = true;
5650 usevi->is_global_var = false;
5652 gcc_assert (prev_vi->offset < usevi->offset);
5653 prev_vi->next = usevi->id;
5654 prev_vi = usevi;
5657 /* And one for the static chain. */
5658 if (fn->static_chain_decl != NULL_TREE)
5660 varinfo_t chainvi;
5661 const char *newname;
5662 char *tempname;
5664 tempname = xasprintf ("%s.chain", name);
5665 newname = ggc_strdup (tempname);
5666 free (tempname);
5668 chainvi = new_var_info (fn->static_chain_decl, newname, false);
5669 chainvi->offset = fi_static_chain;
5670 chainvi->size = 1;
5671 chainvi->fullsize = vi->fullsize;
5672 chainvi->is_full_var = true;
5673 chainvi->is_global_var = false;
5675 insert_vi_for_tree (fn->static_chain_decl, chainvi);
5677 if (nonlocal_p
5678 && chainvi->may_have_pointers)
5679 make_constraint_from (chainvi, nonlocal_id);
5681 gcc_assert (prev_vi->offset < chainvi->offset);
5682 prev_vi->next = chainvi->id;
5683 prev_vi = chainvi;
5686 /* Create a variable for the return var. */
5687 if (DECL_RESULT (decl) != NULL
5688 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
5690 varinfo_t resultvi;
5691 const char *newname;
5692 char *tempname;
5693 tree resultdecl = decl;
5695 if (DECL_RESULT (decl))
5696 resultdecl = DECL_RESULT (decl);
5698 tempname = xasprintf ("%s.result", name);
5699 newname = ggc_strdup (tempname);
5700 free (tempname);
5702 resultvi = new_var_info (resultdecl, newname, false);
5703 resultvi->offset = fi_result;
5704 resultvi->size = 1;
5705 resultvi->fullsize = vi->fullsize;
5706 resultvi->is_full_var = true;
5707 if (DECL_RESULT (decl))
5708 resultvi->may_have_pointers = true;
5710 if (DECL_RESULT (decl))
5711 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
5713 if (nonlocal_p
5714 && DECL_RESULT (decl)
5715 && DECL_BY_REFERENCE (DECL_RESULT (decl)))
5716 make_constraint_from (resultvi, nonlocal_id);
5718 gcc_assert (prev_vi->offset < resultvi->offset);
5719 prev_vi->next = resultvi->id;
5720 prev_vi = resultvi;
5723 /* We also need to make function return values escape. Nothing
5724 escapes by returning from main though. */
5725 if (nonlocal_p
5726 && !MAIN_NAME_P (DECL_NAME (decl)))
5728 varinfo_t fi, rvi;
5729 fi = lookup_vi_for_tree (decl);
5730 rvi = first_vi_for_offset (fi, fi_result);
5731 if (rvi && rvi->offset == fi_result)
5732 make_copy_constraint (get_varinfo (escaped_id), rvi->id);
5735 /* Set up variables for each argument. */
5736 arg = DECL_ARGUMENTS (decl);
5737 for (i = 0; i < num_args; i++)
5739 varinfo_t argvi;
5740 const char *newname;
5741 char *tempname;
5742 tree argdecl = decl;
5744 if (arg)
5745 argdecl = arg;
5747 tempname = xasprintf ("%s.arg%d", name, i);
5748 newname = ggc_strdup (tempname);
5749 free (tempname);
5751 argvi = new_var_info (argdecl, newname, false);
5752 argvi->offset = fi_parm_base + i;
5753 argvi->size = 1;
5754 argvi->is_full_var = true;
5755 argvi->fullsize = vi->fullsize;
5756 if (arg)
5757 argvi->may_have_pointers = true;
5759 if (arg)
5760 insert_vi_for_tree (arg, argvi);
5762 if (nonlocal_p
5763 && argvi->may_have_pointers)
5764 make_constraint_from (argvi, nonlocal_id);
5766 gcc_assert (prev_vi->offset < argvi->offset);
5767 prev_vi->next = argvi->id;
5768 prev_vi = argvi;
5769 if (arg)
5770 arg = DECL_CHAIN (arg);
5773 /* Add one representative for all further args. */
5774 if (is_varargs)
5776 varinfo_t argvi;
5777 const char *newname;
5778 char *tempname;
5779 tree decl;
5781 tempname = xasprintf ("%s.varargs", name);
5782 newname = ggc_strdup (tempname);
5783 free (tempname);
5785 /* We need sth that can be pointed to for va_start. */
5786 decl = build_fake_var_decl (ptr_type_node);
5788 argvi = new_var_info (decl, newname, false);
5789 argvi->offset = fi_parm_base + num_args;
5790 argvi->size = ~0;
5791 argvi->is_full_var = true;
5792 argvi->is_heap_var = true;
5793 argvi->fullsize = vi->fullsize;
5795 if (nonlocal_p
5796 && argvi->may_have_pointers)
5797 make_constraint_from (argvi, nonlocal_id);
5799 gcc_assert (prev_vi->offset < argvi->offset);
5800 prev_vi->next = argvi->id;
5801 prev_vi = argvi;
5804 return vi;
5808 /* Return true if FIELDSTACK contains fields that overlap.
5809 FIELDSTACK is assumed to be sorted by offset. */
5811 static bool
5812 check_for_overlaps (vec<fieldoff_s> fieldstack)
5814 fieldoff_s *fo = NULL;
5815 unsigned int i;
5816 HOST_WIDE_INT lastoffset = -1;
5818 FOR_EACH_VEC_ELT (fieldstack, i, fo)
5820 if (fo->offset == lastoffset)
5821 return true;
5822 lastoffset = fo->offset;
5824 return false;
5827 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
5828 This will also create any varinfo structures necessary for fields
5829 of DECL. DECL is a function parameter if HANDLE_PARAM is set.
5830 HANDLED_STRUCT_TYPE is used to register struct types reached by following
5831 restrict pointers. This is needed to prevent infinite recursion. */
5833 static varinfo_t
5834 create_variable_info_for_1 (tree decl, const char *name, bool add_id,
5835 bool handle_param, bitmap handled_struct_type)
5837 varinfo_t vi, newvi;
5838 tree decl_type = TREE_TYPE (decl);
5839 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type);
5840 auto_vec<fieldoff_s> fieldstack;
5841 fieldoff_s *fo;
5842 unsigned int i;
5844 if (!declsize
5845 || !tree_fits_uhwi_p (declsize))
5847 vi = new_var_info (decl, name, add_id);
5848 vi->offset = 0;
5849 vi->size = ~0;
5850 vi->fullsize = ~0;
5851 vi->is_unknown_size_var = true;
5852 vi->is_full_var = true;
5853 vi->may_have_pointers = true;
5854 return vi;
5857 /* Collect field information. */
5858 if (use_field_sensitive
5859 && var_can_have_subvars (decl)
5860 /* ??? Force us to not use subfields for globals in IPA mode.
5861 Else we'd have to parse arbitrary initializers. */
5862 && !(in_ipa_mode
5863 && is_global_var (decl)))
5865 fieldoff_s *fo = NULL;
5866 bool notokay = false;
5867 unsigned int i;
5869 push_fields_onto_fieldstack (decl_type, &fieldstack, 0);
5871 for (i = 0; !notokay && fieldstack.iterate (i, &fo); i++)
5872 if (fo->has_unknown_size
5873 || fo->offset < 0)
5875 notokay = true;
5876 break;
5879 /* We can't sort them if we have a field with a variable sized type,
5880 which will make notokay = true. In that case, we are going to return
5881 without creating varinfos for the fields anyway, so sorting them is a
5882 waste to boot. */
5883 if (!notokay)
5885 sort_fieldstack (fieldstack);
5886 /* Due to some C++ FE issues, like PR 22488, we might end up
5887 what appear to be overlapping fields even though they,
5888 in reality, do not overlap. Until the C++ FE is fixed,
5889 we will simply disable field-sensitivity for these cases. */
5890 notokay = check_for_overlaps (fieldstack);
5893 if (notokay)
5894 fieldstack.release ();
5897 /* If we didn't end up collecting sub-variables create a full
5898 variable for the decl. */
5899 if (fieldstack.length () == 0
5900 || fieldstack.length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
5902 vi = new_var_info (decl, name, add_id);
5903 vi->offset = 0;
5904 vi->may_have_pointers = true;
5905 vi->fullsize = tree_to_uhwi (declsize);
5906 vi->size = vi->fullsize;
5907 vi->is_full_var = true;
5908 if (POINTER_TYPE_P (decl_type)
5909 && TYPE_RESTRICT (decl_type))
5910 vi->only_restrict_pointers = 1;
5911 if (vi->only_restrict_pointers
5912 && !type_contains_placeholder_p (TREE_TYPE (decl_type))
5913 && handle_param
5914 && !bitmap_bit_p (handled_struct_type,
5915 TYPE_UID (TREE_TYPE (decl_type))))
5917 varinfo_t rvi;
5918 tree heapvar = build_fake_var_decl (TREE_TYPE (decl_type));
5919 DECL_EXTERNAL (heapvar) = 1;
5920 if (var_can_have_subvars (heapvar))
5921 bitmap_set_bit (handled_struct_type,
5922 TYPE_UID (TREE_TYPE (decl_type)));
5923 rvi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS", true,
5924 true, handled_struct_type);
5925 if (var_can_have_subvars (heapvar))
5926 bitmap_clear_bit (handled_struct_type,
5927 TYPE_UID (TREE_TYPE (decl_type)));
5928 rvi->is_restrict_var = 1;
5929 insert_vi_for_tree (heapvar, rvi);
5930 make_constraint_from (vi, rvi->id);
5931 make_param_constraints (rvi);
5933 fieldstack.release ();
5934 return vi;
5937 vi = new_var_info (decl, name, add_id);
5938 vi->fullsize = tree_to_uhwi (declsize);
5939 if (fieldstack.length () == 1)
5940 vi->is_full_var = true;
5941 for (i = 0, newvi = vi;
5942 fieldstack.iterate (i, &fo);
5943 ++i, newvi = vi_next (newvi))
5945 const char *newname = NULL;
5946 char *tempname;
5948 if (dump_file)
5950 if (fieldstack.length () != 1)
5952 tempname
5953 = xasprintf ("%s." HOST_WIDE_INT_PRINT_DEC
5954 "+" HOST_WIDE_INT_PRINT_DEC, name,
5955 fo->offset, fo->size);
5956 newname = ggc_strdup (tempname);
5957 free (tempname);
5960 else
5961 newname = "NULL";
5963 if (newname)
5964 newvi->name = newname;
5965 newvi->offset = fo->offset;
5966 newvi->size = fo->size;
5967 newvi->fullsize = vi->fullsize;
5968 newvi->may_have_pointers = fo->may_have_pointers;
5969 newvi->only_restrict_pointers = fo->only_restrict_pointers;
5970 if (handle_param
5971 && newvi->only_restrict_pointers
5972 && !type_contains_placeholder_p (fo->restrict_pointed_type)
5973 && !bitmap_bit_p (handled_struct_type,
5974 TYPE_UID (fo->restrict_pointed_type)))
5976 varinfo_t rvi;
5977 tree heapvar = build_fake_var_decl (fo->restrict_pointed_type);
5978 DECL_EXTERNAL (heapvar) = 1;
5979 if (var_can_have_subvars (heapvar))
5980 bitmap_set_bit (handled_struct_type,
5981 TYPE_UID (fo->restrict_pointed_type));
5982 rvi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS", true,
5983 true, handled_struct_type);
5984 if (var_can_have_subvars (heapvar))
5985 bitmap_clear_bit (handled_struct_type,
5986 TYPE_UID (fo->restrict_pointed_type));
5987 rvi->is_restrict_var = 1;
5988 insert_vi_for_tree (heapvar, rvi);
5989 make_constraint_from (newvi, rvi->id);
5990 make_param_constraints (rvi);
5992 if (i + 1 < fieldstack.length ())
5994 varinfo_t tem = new_var_info (decl, name, false);
5995 newvi->next = tem->id;
5996 tem->head = vi->id;
6000 return vi;
6003 static unsigned int
6004 create_variable_info_for (tree decl, const char *name, bool add_id)
6006 varinfo_t vi = create_variable_info_for_1 (decl, name, add_id, false, NULL);
6007 unsigned int id = vi->id;
6009 insert_vi_for_tree (decl, vi);
6011 if (TREE_CODE (decl) != VAR_DECL)
6012 return id;
6014 /* Create initial constraints for globals. */
6015 for (; vi; vi = vi_next (vi))
6017 if (!vi->may_have_pointers
6018 || !vi->is_global_var)
6019 continue;
6021 /* Mark global restrict qualified pointers. */
6022 if ((POINTER_TYPE_P (TREE_TYPE (decl))
6023 && TYPE_RESTRICT (TREE_TYPE (decl)))
6024 || vi->only_restrict_pointers)
6026 varinfo_t rvi
6027 = make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT",
6028 true);
6029 /* ??? For now exclude reads from globals as restrict sources
6030 if those are not (indirectly) from incoming parameters. */
6031 rvi->is_restrict_var = false;
6032 continue;
6035 /* In non-IPA mode the initializer from nonlocal is all we need. */
6036 if (!in_ipa_mode
6037 || DECL_HARD_REGISTER (decl))
6038 make_copy_constraint (vi, nonlocal_id);
6040 /* In IPA mode parse the initializer and generate proper constraints
6041 for it. */
6042 else
6044 varpool_node *vnode = varpool_node::get (decl);
6046 /* For escaped variables initialize them from nonlocal. */
6047 if (!vnode->all_refs_explicit_p ())
6048 make_copy_constraint (vi, nonlocal_id);
6050 /* If this is a global variable with an initializer and we are in
6051 IPA mode generate constraints for it. */
6052 ipa_ref *ref;
6053 for (unsigned idx = 0; vnode->iterate_reference (idx, ref); ++idx)
6055 auto_vec<ce_s> rhsc;
6056 struct constraint_expr lhs, *rhsp;
6057 unsigned i;
6058 get_constraint_for_address_of (ref->referred->decl, &rhsc);
6059 lhs.var = vi->id;
6060 lhs.offset = 0;
6061 lhs.type = SCALAR;
6062 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
6063 process_constraint (new_constraint (lhs, *rhsp));
6064 /* If this is a variable that escapes from the unit
6065 the initializer escapes as well. */
6066 if (!vnode->all_refs_explicit_p ())
6068 lhs.var = escaped_id;
6069 lhs.offset = 0;
6070 lhs.type = SCALAR;
6071 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
6072 process_constraint (new_constraint (lhs, *rhsp));
6078 return id;
6081 /* Print out the points-to solution for VAR to FILE. */
6083 static void
6084 dump_solution_for_var (FILE *file, unsigned int var)
6086 varinfo_t vi = get_varinfo (var);
6087 unsigned int i;
6088 bitmap_iterator bi;
6090 /* Dump the solution for unified vars anyway, this avoids difficulties
6091 in scanning dumps in the testsuite. */
6092 fprintf (file, "%s = { ", vi->name);
6093 vi = get_varinfo (find (var));
6094 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
6095 fprintf (file, "%s ", get_varinfo (i)->name);
6096 fprintf (file, "}");
6098 /* But note when the variable was unified. */
6099 if (vi->id != var)
6100 fprintf (file, " same as %s", vi->name);
6102 fprintf (file, "\n");
6105 /* Print the points-to solution for VAR to stderr. */
6107 DEBUG_FUNCTION void
6108 debug_solution_for_var (unsigned int var)
6110 dump_solution_for_var (stderr, var);
6113 /* Register the constraints for function parameter related VI. */
6115 static void
6116 make_param_constraints (varinfo_t vi)
6118 for (; vi; vi = vi_next (vi))
6120 if (vi->only_restrict_pointers)
6122 else if (vi->may_have_pointers)
6123 make_constraint_from (vi, nonlocal_id);
6125 if (vi->is_full_var)
6126 break;
6130 /* Create varinfo structures for all of the variables in the
6131 function for intraprocedural mode. */
6133 static void
6134 intra_create_variable_infos (struct function *fn)
6136 tree t;
6137 bitmap handled_struct_type = NULL;
6139 /* For each incoming pointer argument arg, create the constraint ARG
6140 = NONLOCAL or a dummy variable if it is a restrict qualified
6141 passed-by-reference argument. */
6142 for (t = DECL_ARGUMENTS (fn->decl); t; t = DECL_CHAIN (t))
6144 if (handled_struct_type == NULL)
6145 handled_struct_type = BITMAP_ALLOC (NULL);
6147 varinfo_t p
6148 = create_variable_info_for_1 (t, alias_get_name (t), false, true,
6149 handled_struct_type);
6150 insert_vi_for_tree (t, p);
6152 make_param_constraints (p);
6155 if (handled_struct_type != NULL)
6156 BITMAP_FREE (handled_struct_type);
6158 /* Add a constraint for a result decl that is passed by reference. */
6159 if (DECL_RESULT (fn->decl)
6160 && DECL_BY_REFERENCE (DECL_RESULT (fn->decl)))
6162 varinfo_t p, result_vi = get_vi_for_tree (DECL_RESULT (fn->decl));
6164 for (p = result_vi; p; p = vi_next (p))
6165 make_constraint_from (p, nonlocal_id);
6168 /* Add a constraint for the incoming static chain parameter. */
6169 if (fn->static_chain_decl != NULL_TREE)
6171 varinfo_t p, chain_vi = get_vi_for_tree (fn->static_chain_decl);
6173 for (p = chain_vi; p; p = vi_next (p))
6174 make_constraint_from (p, nonlocal_id);
6178 /* Structure used to put solution bitmaps in a hashtable so they can
6179 be shared among variables with the same points-to set. */
6181 typedef struct shared_bitmap_info
6183 bitmap pt_vars;
6184 hashval_t hashcode;
6185 } *shared_bitmap_info_t;
6186 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
6188 /* Shared_bitmap hashtable helpers. */
6190 struct shared_bitmap_hasher : free_ptr_hash <shared_bitmap_info>
6192 static inline hashval_t hash (const shared_bitmap_info *);
6193 static inline bool equal (const shared_bitmap_info *,
6194 const shared_bitmap_info *);
6197 /* Hash function for a shared_bitmap_info_t */
6199 inline hashval_t
6200 shared_bitmap_hasher::hash (const shared_bitmap_info *bi)
6202 return bi->hashcode;
6205 /* Equality function for two shared_bitmap_info_t's. */
6207 inline bool
6208 shared_bitmap_hasher::equal (const shared_bitmap_info *sbi1,
6209 const shared_bitmap_info *sbi2)
6211 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
6214 /* Shared_bitmap hashtable. */
6216 static hash_table<shared_bitmap_hasher> *shared_bitmap_table;
6218 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
6219 existing instance if there is one, NULL otherwise. */
6221 static bitmap
6222 shared_bitmap_lookup (bitmap pt_vars)
6224 shared_bitmap_info **slot;
6225 struct shared_bitmap_info sbi;
6227 sbi.pt_vars = pt_vars;
6228 sbi.hashcode = bitmap_hash (pt_vars);
6230 slot = shared_bitmap_table->find_slot (&sbi, NO_INSERT);
6231 if (!slot)
6232 return NULL;
6233 else
6234 return (*slot)->pt_vars;
6238 /* Add a bitmap to the shared bitmap hashtable. */
6240 static void
6241 shared_bitmap_add (bitmap pt_vars)
6243 shared_bitmap_info **slot;
6244 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
6246 sbi->pt_vars = pt_vars;
6247 sbi->hashcode = bitmap_hash (pt_vars);
6249 slot = shared_bitmap_table->find_slot (sbi, INSERT);
6250 gcc_assert (!*slot);
6251 *slot = sbi;
6255 /* Set bits in INTO corresponding to the variable uids in solution set FROM. */
6257 static void
6258 set_uids_in_ptset (bitmap into, bitmap from, struct pt_solution *pt,
6259 tree fndecl)
6261 unsigned int i;
6262 bitmap_iterator bi;
6263 varinfo_t escaped_vi = get_varinfo (find (escaped_id));
6264 bool everything_escaped
6265 = escaped_vi->solution && bitmap_bit_p (escaped_vi->solution, anything_id);
6267 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
6269 varinfo_t vi = get_varinfo (i);
6271 /* The only artificial variables that are allowed in a may-alias
6272 set are heap variables. */
6273 if (vi->is_artificial_var && !vi->is_heap_var)
6274 continue;
6276 if (everything_escaped
6277 || (escaped_vi->solution
6278 && bitmap_bit_p (escaped_vi->solution, i)))
6280 pt->vars_contains_escaped = true;
6281 pt->vars_contains_escaped_heap = vi->is_heap_var;
6284 if (vi->is_restrict_var)
6285 pt->vars_contains_restrict = true;
6287 if (TREE_CODE (vi->decl) == VAR_DECL
6288 || TREE_CODE (vi->decl) == PARM_DECL
6289 || TREE_CODE (vi->decl) == RESULT_DECL)
6291 /* If we are in IPA mode we will not recompute points-to
6292 sets after inlining so make sure they stay valid. */
6293 if (in_ipa_mode
6294 && !DECL_PT_UID_SET_P (vi->decl))
6295 SET_DECL_PT_UID (vi->decl, DECL_UID (vi->decl));
6297 /* Add the decl to the points-to set. Note that the points-to
6298 set contains global variables. */
6299 bitmap_set_bit (into, DECL_PT_UID (vi->decl));
6300 if (vi->is_global_var
6301 /* In IPA mode the escaped_heap trick doesn't work as
6302 ESCAPED is escaped from the unit but
6303 pt_solution_includes_global needs to answer true for
6304 all variables not automatic within a function.
6305 For the same reason is_global_var is not the
6306 correct flag to track - local variables from other
6307 functions also need to be considered global.
6308 Conveniently all HEAP vars are not put in function
6309 scope. */
6310 || (in_ipa_mode
6311 && fndecl
6312 && ! auto_var_in_fn_p (vi->decl, fndecl)))
6313 pt->vars_contains_nonlocal = true;
6316 else if (TREE_CODE (vi->decl) == FUNCTION_DECL
6317 || TREE_CODE (vi->decl) == LABEL_DECL)
6319 /* Nothing should read/write from/to code so we can
6320 save bits by not including them in the points-to bitmaps.
6321 Still mark the points-to set as containing global memory
6322 to make code-patching possible - see PR70128. */
6323 pt->vars_contains_nonlocal = true;
6329 /* Compute the points-to solution *PT for the variable VI. */
6331 static struct pt_solution
6332 find_what_var_points_to (tree fndecl, varinfo_t orig_vi)
6334 unsigned int i;
6335 bitmap_iterator bi;
6336 bitmap finished_solution;
6337 bitmap result;
6338 varinfo_t vi;
6339 struct pt_solution *pt;
6341 /* This variable may have been collapsed, let's get the real
6342 variable. */
6343 vi = get_varinfo (find (orig_vi->id));
6345 /* See if we have already computed the solution and return it. */
6346 pt_solution **slot = &final_solutions->get_or_insert (vi);
6347 if (*slot != NULL)
6348 return **slot;
6350 *slot = pt = XOBNEW (&final_solutions_obstack, struct pt_solution);
6351 memset (pt, 0, sizeof (struct pt_solution));
6353 /* Translate artificial variables into SSA_NAME_PTR_INFO
6354 attributes. */
6355 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
6357 varinfo_t vi = get_varinfo (i);
6359 if (vi->is_artificial_var)
6361 if (vi->id == nothing_id)
6362 pt->null = 1;
6363 else if (vi->id == escaped_id)
6365 if (in_ipa_mode)
6366 pt->ipa_escaped = 1;
6367 else
6368 pt->escaped = 1;
6369 /* Expand some special vars of ESCAPED in-place here. */
6370 varinfo_t evi = get_varinfo (find (escaped_id));
6371 if (bitmap_bit_p (evi->solution, nonlocal_id))
6372 pt->nonlocal = 1;
6374 else if (vi->id == nonlocal_id)
6375 pt->nonlocal = 1;
6376 else if (vi->is_heap_var)
6377 /* We represent heapvars in the points-to set properly. */
6379 else if (vi->id == string_id)
6380 /* Nobody cares - STRING_CSTs are read-only entities. */
6382 else if (vi->id == anything_id
6383 || vi->id == integer_id)
6384 pt->anything = 1;
6388 /* Instead of doing extra work, simply do not create
6389 elaborate points-to information for pt_anything pointers. */
6390 if (pt->anything)
6391 return *pt;
6393 /* Share the final set of variables when possible. */
6394 finished_solution = BITMAP_GGC_ALLOC ();
6395 stats.points_to_sets_created++;
6397 set_uids_in_ptset (finished_solution, vi->solution, pt, fndecl);
6398 result = shared_bitmap_lookup (finished_solution);
6399 if (!result)
6401 shared_bitmap_add (finished_solution);
6402 pt->vars = finished_solution;
6404 else
6406 pt->vars = result;
6407 bitmap_clear (finished_solution);
6410 return *pt;
6413 /* Given a pointer variable P, fill in its points-to set. */
6415 static void
6416 find_what_p_points_to (tree fndecl, tree p)
6418 struct ptr_info_def *pi;
6419 tree lookup_p = p;
6420 varinfo_t vi;
6422 /* For parameters, get at the points-to set for the actual parm
6423 decl. */
6424 if (TREE_CODE (p) == SSA_NAME
6425 && SSA_NAME_IS_DEFAULT_DEF (p)
6426 && (TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
6427 || TREE_CODE (SSA_NAME_VAR (p)) == RESULT_DECL))
6428 lookup_p = SSA_NAME_VAR (p);
6430 vi = lookup_vi_for_tree (lookup_p);
6431 if (!vi)
6432 return;
6434 pi = get_ptr_info (p);
6435 pi->pt = find_what_var_points_to (fndecl, vi);
6439 /* Query statistics for points-to solutions. */
6441 static struct {
6442 unsigned HOST_WIDE_INT pt_solution_includes_may_alias;
6443 unsigned HOST_WIDE_INT pt_solution_includes_no_alias;
6444 unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias;
6445 unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias;
6446 } pta_stats;
6448 void
6449 dump_pta_stats (FILE *s)
6451 fprintf (s, "\nPTA query stats:\n");
6452 fprintf (s, " pt_solution_includes: "
6453 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6454 HOST_WIDE_INT_PRINT_DEC" queries\n",
6455 pta_stats.pt_solution_includes_no_alias,
6456 pta_stats.pt_solution_includes_no_alias
6457 + pta_stats.pt_solution_includes_may_alias);
6458 fprintf (s, " pt_solutions_intersect: "
6459 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6460 HOST_WIDE_INT_PRINT_DEC" queries\n",
6461 pta_stats.pt_solutions_intersect_no_alias,
6462 pta_stats.pt_solutions_intersect_no_alias
6463 + pta_stats.pt_solutions_intersect_may_alias);
6467 /* Reset the points-to solution *PT to a conservative default
6468 (point to anything). */
6470 void
6471 pt_solution_reset (struct pt_solution *pt)
6473 memset (pt, 0, sizeof (struct pt_solution));
6474 pt->anything = true;
6477 /* Set the points-to solution *PT to point only to the variables
6478 in VARS. VARS_CONTAINS_GLOBAL specifies whether that contains
6479 global variables and VARS_CONTAINS_RESTRICT specifies whether
6480 it contains restrict tag variables. */
6482 void
6483 pt_solution_set (struct pt_solution *pt, bitmap vars,
6484 bool vars_contains_nonlocal)
6486 memset (pt, 0, sizeof (struct pt_solution));
6487 pt->vars = vars;
6488 pt->vars_contains_nonlocal = vars_contains_nonlocal;
6489 pt->vars_contains_escaped
6490 = (cfun->gimple_df->escaped.anything
6491 || bitmap_intersect_p (cfun->gimple_df->escaped.vars, vars));
6494 /* Set the points-to solution *PT to point only to the variable VAR. */
6496 void
6497 pt_solution_set_var (struct pt_solution *pt, tree var)
6499 memset (pt, 0, sizeof (struct pt_solution));
6500 pt->vars = BITMAP_GGC_ALLOC ();
6501 bitmap_set_bit (pt->vars, DECL_PT_UID (var));
6502 pt->vars_contains_nonlocal = is_global_var (var);
6503 pt->vars_contains_escaped
6504 = (cfun->gimple_df->escaped.anything
6505 || bitmap_bit_p (cfun->gimple_df->escaped.vars, DECL_PT_UID (var)));
6508 /* Computes the union of the points-to solutions *DEST and *SRC and
6509 stores the result in *DEST. This changes the points-to bitmap
6510 of *DEST and thus may not be used if that might be shared.
6511 The points-to bitmap of *SRC and *DEST will not be shared after
6512 this function if they were not before. */
6514 static void
6515 pt_solution_ior_into (struct pt_solution *dest, struct pt_solution *src)
6517 dest->anything |= src->anything;
6518 if (dest->anything)
6520 pt_solution_reset (dest);
6521 return;
6524 dest->nonlocal |= src->nonlocal;
6525 dest->escaped |= src->escaped;
6526 dest->ipa_escaped |= src->ipa_escaped;
6527 dest->null |= src->null;
6528 dest->vars_contains_nonlocal |= src->vars_contains_nonlocal;
6529 dest->vars_contains_escaped |= src->vars_contains_escaped;
6530 dest->vars_contains_escaped_heap |= src->vars_contains_escaped_heap;
6531 if (!src->vars)
6532 return;
6534 if (!dest->vars)
6535 dest->vars = BITMAP_GGC_ALLOC ();
6536 bitmap_ior_into (dest->vars, src->vars);
6539 /* Return true if the points-to solution *PT is empty. */
6541 bool
6542 pt_solution_empty_p (struct pt_solution *pt)
6544 if (pt->anything
6545 || pt->nonlocal)
6546 return false;
6548 if (pt->vars
6549 && !bitmap_empty_p (pt->vars))
6550 return false;
6552 /* If the solution includes ESCAPED, check if that is empty. */
6553 if (pt->escaped
6554 && !pt_solution_empty_p (&cfun->gimple_df->escaped))
6555 return false;
6557 /* If the solution includes ESCAPED, check if that is empty. */
6558 if (pt->ipa_escaped
6559 && !pt_solution_empty_p (&ipa_escaped_pt))
6560 return false;
6562 return true;
6565 /* Return true if the points-to solution *PT only point to a single var, and
6566 return the var uid in *UID. */
6568 bool
6569 pt_solution_singleton_p (struct pt_solution *pt, unsigned *uid)
6571 if (pt->anything || pt->nonlocal || pt->escaped || pt->ipa_escaped
6572 || pt->null || pt->vars == NULL
6573 || !bitmap_single_bit_set_p (pt->vars))
6574 return false;
6576 *uid = bitmap_first_set_bit (pt->vars);
6577 return true;
6580 /* Return true if the points-to solution *PT includes global memory. */
6582 bool
6583 pt_solution_includes_global (struct pt_solution *pt)
6585 if (pt->anything
6586 || pt->nonlocal
6587 || pt->vars_contains_nonlocal
6588 /* The following is a hack to make the malloc escape hack work.
6589 In reality we'd need different sets for escaped-through-return
6590 and escaped-to-callees and passes would need to be updated. */
6591 || pt->vars_contains_escaped_heap)
6592 return true;
6594 /* 'escaped' is also a placeholder so we have to look into it. */
6595 if (pt->escaped)
6596 return pt_solution_includes_global (&cfun->gimple_df->escaped);
6598 if (pt->ipa_escaped)
6599 return pt_solution_includes_global (&ipa_escaped_pt);
6601 return false;
6604 /* Return true if the points-to solution *PT includes the variable
6605 declaration DECL. */
6607 static bool
6608 pt_solution_includes_1 (struct pt_solution *pt, const_tree decl)
6610 if (pt->anything)
6611 return true;
6613 if (pt->nonlocal
6614 && is_global_var (decl))
6615 return true;
6617 if (pt->vars
6618 && bitmap_bit_p (pt->vars, DECL_PT_UID (decl)))
6619 return true;
6621 /* If the solution includes ESCAPED, check it. */
6622 if (pt->escaped
6623 && pt_solution_includes_1 (&cfun->gimple_df->escaped, decl))
6624 return true;
6626 /* If the solution includes ESCAPED, check it. */
6627 if (pt->ipa_escaped
6628 && pt_solution_includes_1 (&ipa_escaped_pt, decl))
6629 return true;
6631 return false;
6634 bool
6635 pt_solution_includes (struct pt_solution *pt, const_tree decl)
6637 bool res = pt_solution_includes_1 (pt, decl);
6638 if (res)
6639 ++pta_stats.pt_solution_includes_may_alias;
6640 else
6641 ++pta_stats.pt_solution_includes_no_alias;
6642 return res;
6645 /* Return true if both points-to solutions PT1 and PT2 have a non-empty
6646 intersection. */
6648 static bool
6649 pt_solutions_intersect_1 (struct pt_solution *pt1, struct pt_solution *pt2)
6651 if (pt1->anything || pt2->anything)
6652 return true;
6654 /* If either points to unknown global memory and the other points to
6655 any global memory they alias. */
6656 if ((pt1->nonlocal
6657 && (pt2->nonlocal
6658 || pt2->vars_contains_nonlocal))
6659 || (pt2->nonlocal
6660 && pt1->vars_contains_nonlocal))
6661 return true;
6663 /* If either points to all escaped memory and the other points to
6664 any escaped memory they alias. */
6665 if ((pt1->escaped
6666 && (pt2->escaped
6667 || pt2->vars_contains_escaped))
6668 || (pt2->escaped
6669 && pt1->vars_contains_escaped))
6670 return true;
6672 /* Check the escaped solution if required.
6673 ??? Do we need to check the local against the IPA escaped sets? */
6674 if ((pt1->ipa_escaped || pt2->ipa_escaped)
6675 && !pt_solution_empty_p (&ipa_escaped_pt))
6677 /* If both point to escaped memory and that solution
6678 is not empty they alias. */
6679 if (pt1->ipa_escaped && pt2->ipa_escaped)
6680 return true;
6682 /* If either points to escaped memory see if the escaped solution
6683 intersects with the other. */
6684 if ((pt1->ipa_escaped
6685 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt2))
6686 || (pt2->ipa_escaped
6687 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt1)))
6688 return true;
6691 /* Now both pointers alias if their points-to solution intersects. */
6692 return (pt1->vars
6693 && pt2->vars
6694 && bitmap_intersect_p (pt1->vars, pt2->vars));
6697 bool
6698 pt_solutions_intersect (struct pt_solution *pt1, struct pt_solution *pt2)
6700 bool res = pt_solutions_intersect_1 (pt1, pt2);
6701 if (res)
6702 ++pta_stats.pt_solutions_intersect_may_alias;
6703 else
6704 ++pta_stats.pt_solutions_intersect_no_alias;
6705 return res;
6709 /* Dump points-to information to OUTFILE. */
6711 static void
6712 dump_sa_points_to_info (FILE *outfile)
6714 unsigned int i;
6716 fprintf (outfile, "\nPoints-to sets\n\n");
6718 if (dump_flags & TDF_STATS)
6720 fprintf (outfile, "Stats:\n");
6721 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
6722 fprintf (outfile, "Non-pointer vars: %d\n",
6723 stats.nonpointer_vars);
6724 fprintf (outfile, "Statically unified vars: %d\n",
6725 stats.unified_vars_static);
6726 fprintf (outfile, "Dynamically unified vars: %d\n",
6727 stats.unified_vars_dynamic);
6728 fprintf (outfile, "Iterations: %d\n", stats.iterations);
6729 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
6730 fprintf (outfile, "Number of implicit edges: %d\n",
6731 stats.num_implicit_edges);
6734 for (i = 1; i < varmap.length (); i++)
6736 varinfo_t vi = get_varinfo (i);
6737 if (!vi->may_have_pointers)
6738 continue;
6739 dump_solution_for_var (outfile, i);
6744 /* Debug points-to information to stderr. */
6746 DEBUG_FUNCTION void
6747 debug_sa_points_to_info (void)
6749 dump_sa_points_to_info (stderr);
6753 /* Initialize the always-existing constraint variables for NULL
6754 ANYTHING, READONLY, and INTEGER */
6756 static void
6757 init_base_vars (void)
6759 struct constraint_expr lhs, rhs;
6760 varinfo_t var_anything;
6761 varinfo_t var_nothing;
6762 varinfo_t var_string;
6763 varinfo_t var_escaped;
6764 varinfo_t var_nonlocal;
6765 varinfo_t var_storedanything;
6766 varinfo_t var_integer;
6768 /* Variable ID zero is reserved and should be NULL. */
6769 varmap.safe_push (NULL);
6771 /* Create the NULL variable, used to represent that a variable points
6772 to NULL. */
6773 var_nothing = new_var_info (NULL_TREE, "NULL", false);
6774 gcc_assert (var_nothing->id == nothing_id);
6775 var_nothing->is_artificial_var = 1;
6776 var_nothing->offset = 0;
6777 var_nothing->size = ~0;
6778 var_nothing->fullsize = ~0;
6779 var_nothing->is_special_var = 1;
6780 var_nothing->may_have_pointers = 0;
6781 var_nothing->is_global_var = 0;
6783 /* Create the ANYTHING variable, used to represent that a variable
6784 points to some unknown piece of memory. */
6785 var_anything = new_var_info (NULL_TREE, "ANYTHING", false);
6786 gcc_assert (var_anything->id == anything_id);
6787 var_anything->is_artificial_var = 1;
6788 var_anything->size = ~0;
6789 var_anything->offset = 0;
6790 var_anything->fullsize = ~0;
6791 var_anything->is_special_var = 1;
6793 /* Anything points to anything. This makes deref constraints just
6794 work in the presence of linked list and other p = *p type loops,
6795 by saying that *ANYTHING = ANYTHING. */
6796 lhs.type = SCALAR;
6797 lhs.var = anything_id;
6798 lhs.offset = 0;
6799 rhs.type = ADDRESSOF;
6800 rhs.var = anything_id;
6801 rhs.offset = 0;
6803 /* This specifically does not use process_constraint because
6804 process_constraint ignores all anything = anything constraints, since all
6805 but this one are redundant. */
6806 constraints.safe_push (new_constraint (lhs, rhs));
6808 /* Create the STRING variable, used to represent that a variable
6809 points to a string literal. String literals don't contain
6810 pointers so STRING doesn't point to anything. */
6811 var_string = new_var_info (NULL_TREE, "STRING", false);
6812 gcc_assert (var_string->id == string_id);
6813 var_string->is_artificial_var = 1;
6814 var_string->offset = 0;
6815 var_string->size = ~0;
6816 var_string->fullsize = ~0;
6817 var_string->is_special_var = 1;
6818 var_string->may_have_pointers = 0;
6820 /* Create the ESCAPED variable, used to represent the set of escaped
6821 memory. */
6822 var_escaped = new_var_info (NULL_TREE, "ESCAPED", false);
6823 gcc_assert (var_escaped->id == escaped_id);
6824 var_escaped->is_artificial_var = 1;
6825 var_escaped->offset = 0;
6826 var_escaped->size = ~0;
6827 var_escaped->fullsize = ~0;
6828 var_escaped->is_special_var = 0;
6830 /* Create the NONLOCAL variable, used to represent the set of nonlocal
6831 memory. */
6832 var_nonlocal = new_var_info (NULL_TREE, "NONLOCAL", false);
6833 gcc_assert (var_nonlocal->id == nonlocal_id);
6834 var_nonlocal->is_artificial_var = 1;
6835 var_nonlocal->offset = 0;
6836 var_nonlocal->size = ~0;
6837 var_nonlocal->fullsize = ~0;
6838 var_nonlocal->is_special_var = 1;
6840 /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
6841 lhs.type = SCALAR;
6842 lhs.var = escaped_id;
6843 lhs.offset = 0;
6844 rhs.type = DEREF;
6845 rhs.var = escaped_id;
6846 rhs.offset = 0;
6847 process_constraint (new_constraint (lhs, rhs));
6849 /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the
6850 whole variable escapes. */
6851 lhs.type = SCALAR;
6852 lhs.var = escaped_id;
6853 lhs.offset = 0;
6854 rhs.type = SCALAR;
6855 rhs.var = escaped_id;
6856 rhs.offset = UNKNOWN_OFFSET;
6857 process_constraint (new_constraint (lhs, rhs));
6859 /* *ESCAPED = NONLOCAL. This is true because we have to assume
6860 everything pointed to by escaped points to what global memory can
6861 point to. */
6862 lhs.type = DEREF;
6863 lhs.var = escaped_id;
6864 lhs.offset = 0;
6865 rhs.type = SCALAR;
6866 rhs.var = nonlocal_id;
6867 rhs.offset = 0;
6868 process_constraint (new_constraint (lhs, rhs));
6870 /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED. This is true because
6871 global memory may point to global memory and escaped memory. */
6872 lhs.type = SCALAR;
6873 lhs.var = nonlocal_id;
6874 lhs.offset = 0;
6875 rhs.type = ADDRESSOF;
6876 rhs.var = nonlocal_id;
6877 rhs.offset = 0;
6878 process_constraint (new_constraint (lhs, rhs));
6879 rhs.type = ADDRESSOF;
6880 rhs.var = escaped_id;
6881 rhs.offset = 0;
6882 process_constraint (new_constraint (lhs, rhs));
6884 /* Create the STOREDANYTHING variable, used to represent the set of
6885 variables stored to *ANYTHING. */
6886 var_storedanything = new_var_info (NULL_TREE, "STOREDANYTHING", false);
6887 gcc_assert (var_storedanything->id == storedanything_id);
6888 var_storedanything->is_artificial_var = 1;
6889 var_storedanything->offset = 0;
6890 var_storedanything->size = ~0;
6891 var_storedanything->fullsize = ~0;
6892 var_storedanything->is_special_var = 0;
6894 /* Create the INTEGER variable, used to represent that a variable points
6895 to what an INTEGER "points to". */
6896 var_integer = new_var_info (NULL_TREE, "INTEGER", false);
6897 gcc_assert (var_integer->id == integer_id);
6898 var_integer->is_artificial_var = 1;
6899 var_integer->size = ~0;
6900 var_integer->fullsize = ~0;
6901 var_integer->offset = 0;
6902 var_integer->is_special_var = 1;
6904 /* INTEGER = ANYTHING, because we don't know where a dereference of
6905 a random integer will point to. */
6906 lhs.type = SCALAR;
6907 lhs.var = integer_id;
6908 lhs.offset = 0;
6909 rhs.type = ADDRESSOF;
6910 rhs.var = anything_id;
6911 rhs.offset = 0;
6912 process_constraint (new_constraint (lhs, rhs));
6915 /* Initialize things necessary to perform PTA */
6917 static void
6918 init_alias_vars (void)
6920 use_field_sensitive = (MAX_FIELDS_FOR_FIELD_SENSITIVE > 1);
6922 bitmap_obstack_initialize (&pta_obstack);
6923 bitmap_obstack_initialize (&oldpta_obstack);
6924 bitmap_obstack_initialize (&predbitmap_obstack);
6926 constraints.create (8);
6927 varmap.create (8);
6928 vi_for_tree = new hash_map<tree, varinfo_t>;
6929 call_stmt_vars = new hash_map<gimple *, varinfo_t>;
6931 memset (&stats, 0, sizeof (stats));
6932 shared_bitmap_table = new hash_table<shared_bitmap_hasher> (511);
6933 init_base_vars ();
6935 gcc_obstack_init (&fake_var_decl_obstack);
6937 final_solutions = new hash_map<varinfo_t, pt_solution *>;
6938 gcc_obstack_init (&final_solutions_obstack);
6941 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
6942 predecessor edges. */
6944 static void
6945 remove_preds_and_fake_succs (constraint_graph_t graph)
6947 unsigned int i;
6949 /* Clear the implicit ref and address nodes from the successor
6950 lists. */
6951 for (i = 1; i < FIRST_REF_NODE; i++)
6953 if (graph->succs[i])
6954 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
6955 FIRST_REF_NODE * 2);
6958 /* Free the successor list for the non-ref nodes. */
6959 for (i = FIRST_REF_NODE + 1; i < graph->size; i++)
6961 if (graph->succs[i])
6962 BITMAP_FREE (graph->succs[i]);
6965 /* Now reallocate the size of the successor list as, and blow away
6966 the predecessor bitmaps. */
6967 graph->size = varmap.length ();
6968 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
6970 free (graph->implicit_preds);
6971 graph->implicit_preds = NULL;
6972 free (graph->preds);
6973 graph->preds = NULL;
6974 bitmap_obstack_release (&predbitmap_obstack);
6977 /* Solve the constraint set. */
6979 static void
6980 solve_constraints (void)
6982 struct scc_info *si;
6984 if (dump_file)
6985 fprintf (dump_file,
6986 "\nCollapsing static cycles and doing variable "
6987 "substitution\n");
6989 init_graph (varmap.length () * 2);
6991 if (dump_file)
6992 fprintf (dump_file, "Building predecessor graph\n");
6993 build_pred_graph ();
6995 if (dump_file)
6996 fprintf (dump_file, "Detecting pointer and location "
6997 "equivalences\n");
6998 si = perform_var_substitution (graph);
7000 if (dump_file)
7001 fprintf (dump_file, "Rewriting constraints and unifying "
7002 "variables\n");
7003 rewrite_constraints (graph, si);
7005 build_succ_graph ();
7007 free_var_substitution_info (si);
7009 /* Attach complex constraints to graph nodes. */
7010 move_complex_constraints (graph);
7012 if (dump_file)
7013 fprintf (dump_file, "Uniting pointer but not location equivalent "
7014 "variables\n");
7015 unite_pointer_equivalences (graph);
7017 if (dump_file)
7018 fprintf (dump_file, "Finding indirect cycles\n");
7019 find_indirect_cycles (graph);
7021 /* Implicit nodes and predecessors are no longer necessary at this
7022 point. */
7023 remove_preds_and_fake_succs (graph);
7025 if (dump_file && (dump_flags & TDF_GRAPH))
7027 fprintf (dump_file, "\n\n// The constraint graph before solve-graph "
7028 "in dot format:\n");
7029 dump_constraint_graph (dump_file);
7030 fprintf (dump_file, "\n\n");
7033 if (dump_file)
7034 fprintf (dump_file, "Solving graph\n");
7036 solve_graph (graph);
7038 if (dump_file && (dump_flags & TDF_GRAPH))
7040 fprintf (dump_file, "\n\n// The constraint graph after solve-graph "
7041 "in dot format:\n");
7042 dump_constraint_graph (dump_file);
7043 fprintf (dump_file, "\n\n");
7046 if (dump_file)
7047 dump_sa_points_to_info (dump_file);
7050 /* Create points-to sets for the current function. See the comments
7051 at the start of the file for an algorithmic overview. */
7053 static void
7054 compute_points_to_sets (void)
7056 basic_block bb;
7057 varinfo_t vi;
7059 timevar_push (TV_TREE_PTA);
7061 init_alias_vars ();
7063 intra_create_variable_infos (cfun);
7065 /* Now walk all statements and build the constraint set. */
7066 FOR_EACH_BB_FN (bb, cfun)
7068 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
7069 gsi_next (&gsi))
7071 gphi *phi = gsi.phi ();
7073 if (! virtual_operand_p (gimple_phi_result (phi)))
7074 find_func_aliases (cfun, phi);
7077 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
7078 gsi_next (&gsi))
7080 gimple *stmt = gsi_stmt (gsi);
7082 find_func_aliases (cfun, stmt);
7086 if (dump_file)
7088 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
7089 dump_constraints (dump_file, 0);
7092 /* From the constraints compute the points-to sets. */
7093 solve_constraints ();
7095 /* Compute the points-to set for ESCAPED used for call-clobber analysis. */
7096 cfun->gimple_df->escaped = find_what_var_points_to (cfun->decl,
7097 get_varinfo (escaped_id));
7099 /* Make sure the ESCAPED solution (which is used as placeholder in
7100 other solutions) does not reference itself. This simplifies
7101 points-to solution queries. */
7102 cfun->gimple_df->escaped.escaped = 0;
7104 /* Compute the points-to sets for pointer SSA_NAMEs. */
7105 unsigned i;
7106 tree ptr;
7108 FOR_EACH_SSA_NAME (i, ptr, cfun)
7110 if (POINTER_TYPE_P (TREE_TYPE (ptr)))
7111 find_what_p_points_to (cfun->decl, ptr);
7114 /* Compute the call-used/clobbered sets. */
7115 FOR_EACH_BB_FN (bb, cfun)
7117 gimple_stmt_iterator gsi;
7119 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
7121 gcall *stmt;
7122 struct pt_solution *pt;
7124 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
7125 if (!stmt)
7126 continue;
7128 pt = gimple_call_use_set (stmt);
7129 if (gimple_call_flags (stmt) & ECF_CONST)
7130 memset (pt, 0, sizeof (struct pt_solution));
7131 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
7133 *pt = find_what_var_points_to (cfun->decl, vi);
7134 /* Escaped (and thus nonlocal) variables are always
7135 implicitly used by calls. */
7136 /* ??? ESCAPED can be empty even though NONLOCAL
7137 always escaped. */
7138 pt->nonlocal = 1;
7139 pt->escaped = 1;
7141 else
7143 /* If there is nothing special about this call then
7144 we have made everything that is used also escape. */
7145 *pt = cfun->gimple_df->escaped;
7146 pt->nonlocal = 1;
7149 pt = gimple_call_clobber_set (stmt);
7150 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
7151 memset (pt, 0, sizeof (struct pt_solution));
7152 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
7154 *pt = find_what_var_points_to (cfun->decl, vi);
7155 /* Escaped (and thus nonlocal) variables are always
7156 implicitly clobbered by calls. */
7157 /* ??? ESCAPED can be empty even though NONLOCAL
7158 always escaped. */
7159 pt->nonlocal = 1;
7160 pt->escaped = 1;
7162 else
7164 /* If there is nothing special about this call then
7165 we have made everything that is used also escape. */
7166 *pt = cfun->gimple_df->escaped;
7167 pt->nonlocal = 1;
7172 timevar_pop (TV_TREE_PTA);
7176 /* Delete created points-to sets. */
7178 static void
7179 delete_points_to_sets (void)
7181 unsigned int i;
7183 delete shared_bitmap_table;
7184 shared_bitmap_table = NULL;
7185 if (dump_file && (dump_flags & TDF_STATS))
7186 fprintf (dump_file, "Points to sets created:%d\n",
7187 stats.points_to_sets_created);
7189 delete vi_for_tree;
7190 delete call_stmt_vars;
7191 bitmap_obstack_release (&pta_obstack);
7192 constraints.release ();
7194 for (i = 0; i < graph->size; i++)
7195 graph->complex[i].release ();
7196 free (graph->complex);
7198 free (graph->rep);
7199 free (graph->succs);
7200 free (graph->pe);
7201 free (graph->pe_rep);
7202 free (graph->indirect_cycles);
7203 free (graph);
7205 varmap.release ();
7206 variable_info_pool.release ();
7207 constraint_pool.release ();
7209 obstack_free (&fake_var_decl_obstack, NULL);
7211 delete final_solutions;
7212 obstack_free (&final_solutions_obstack, NULL);
7215 struct vls_data
7217 unsigned short clique;
7218 bitmap rvars;
7221 /* Mark "other" loads and stores as belonging to CLIQUE and with
7222 base zero. */
7224 static bool
7225 visit_loadstore (gimple *, tree base, tree ref, void *data)
7227 unsigned short clique = ((vls_data *) data)->clique;
7228 bitmap rvars = ((vls_data *) data)->rvars;
7229 if (TREE_CODE (base) == MEM_REF
7230 || TREE_CODE (base) == TARGET_MEM_REF)
7232 tree ptr = TREE_OPERAND (base, 0);
7233 if (TREE_CODE (ptr) == SSA_NAME
7234 && ! SSA_NAME_IS_DEFAULT_DEF (ptr))
7236 /* We need to make sure 'ptr' doesn't include any of
7237 the restrict tags we added bases for in its points-to set. */
7238 varinfo_t vi = lookup_vi_for_tree (ptr);
7239 if (! vi)
7240 return false;
7242 vi = get_varinfo (find (vi->id));
7243 if (bitmap_intersect_p (rvars, vi->solution))
7244 return false;
7247 /* Do not overwrite existing cliques (that includes clique, base
7248 pairs we just set). */
7249 if (MR_DEPENDENCE_CLIQUE (base) == 0)
7251 MR_DEPENDENCE_CLIQUE (base) = clique;
7252 MR_DEPENDENCE_BASE (base) = 0;
7256 /* For plain decl accesses see whether they are accesses to globals
7257 and rewrite them to MEM_REFs with { clique, 0 }. */
7258 if (TREE_CODE (base) == VAR_DECL
7259 && is_global_var (base)
7260 /* ??? We can't rewrite a plain decl with the walk_stmt_load_store
7261 ops callback. */
7262 && base != ref)
7264 tree *basep = &ref;
7265 while (handled_component_p (*basep))
7266 basep = &TREE_OPERAND (*basep, 0);
7267 gcc_assert (TREE_CODE (*basep) == VAR_DECL);
7268 tree ptr = build_fold_addr_expr (*basep);
7269 tree zero = build_int_cst (TREE_TYPE (ptr), 0);
7270 *basep = build2 (MEM_REF, TREE_TYPE (*basep), ptr, zero);
7271 MR_DEPENDENCE_CLIQUE (*basep) = clique;
7272 MR_DEPENDENCE_BASE (*basep) = 0;
7275 return false;
7278 /* If REF is a MEM_REF then assign a clique, base pair to it, updating
7279 CLIQUE, *RESTRICT_VAR and LAST_RUID. Return whether dependence info
7280 was assigned to REF. */
7282 static bool
7283 maybe_set_dependence_info (tree ref, tree ptr,
7284 unsigned short &clique, varinfo_t restrict_var,
7285 unsigned short &last_ruid)
7287 while (handled_component_p (ref))
7288 ref = TREE_OPERAND (ref, 0);
7289 if ((TREE_CODE (ref) == MEM_REF
7290 || TREE_CODE (ref) == TARGET_MEM_REF)
7291 && TREE_OPERAND (ref, 0) == ptr)
7293 /* Do not overwrite existing cliques. This avoids overwriting dependence
7294 info inlined from a function with restrict parameters inlined
7295 into a function with restrict parameters. This usually means we
7296 prefer to be precise in innermost loops. */
7297 if (MR_DEPENDENCE_CLIQUE (ref) == 0)
7299 if (clique == 0)
7300 clique = ++cfun->last_clique;
7301 if (restrict_var->ruid == 0)
7302 restrict_var->ruid = ++last_ruid;
7303 MR_DEPENDENCE_CLIQUE (ref) = clique;
7304 MR_DEPENDENCE_BASE (ref) = restrict_var->ruid;
7305 return true;
7308 return false;
7311 /* Compute the set of independend memory references based on restrict
7312 tags and their conservative propagation to the points-to sets. */
7314 static void
7315 compute_dependence_clique (void)
7317 unsigned short clique = 0;
7318 unsigned short last_ruid = 0;
7319 bitmap rvars = BITMAP_ALLOC (NULL);
7320 for (unsigned i = 0; i < num_ssa_names; ++i)
7322 tree ptr = ssa_name (i);
7323 if (!ptr || !POINTER_TYPE_P (TREE_TYPE (ptr)))
7324 continue;
7326 /* Avoid all this when ptr is not dereferenced? */
7327 tree p = ptr;
7328 if (SSA_NAME_IS_DEFAULT_DEF (ptr)
7329 && (TREE_CODE (SSA_NAME_VAR (ptr)) == PARM_DECL
7330 || TREE_CODE (SSA_NAME_VAR (ptr)) == RESULT_DECL))
7331 p = SSA_NAME_VAR (ptr);
7332 varinfo_t vi = lookup_vi_for_tree (p);
7333 if (!vi)
7334 continue;
7335 vi = get_varinfo (find (vi->id));
7336 bitmap_iterator bi;
7337 unsigned j;
7338 varinfo_t restrict_var = NULL;
7339 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, j, bi)
7341 varinfo_t oi = get_varinfo (j);
7342 if (oi->is_restrict_var)
7344 if (restrict_var)
7346 if (dump_file && (dump_flags & TDF_DETAILS))
7348 fprintf (dump_file, "found restrict pointed-to "
7349 "for ");
7350 print_generic_expr (dump_file, ptr, 0);
7351 fprintf (dump_file, " but not exclusively\n");
7353 restrict_var = NULL;
7354 break;
7356 restrict_var = oi;
7358 /* NULL is the only other valid points-to entry. */
7359 else if (oi->id != nothing_id)
7361 restrict_var = NULL;
7362 break;
7365 /* Ok, found that ptr must(!) point to a single(!) restrict
7366 variable. */
7367 /* ??? PTA isn't really a proper propagation engine to compute
7368 this property.
7369 ??? We could handle merging of two restricts by unifying them. */
7370 if (restrict_var)
7372 /* Now look at possible dereferences of ptr. */
7373 imm_use_iterator ui;
7374 gimple *use_stmt;
7375 bool used = false;
7376 FOR_EACH_IMM_USE_STMT (use_stmt, ui, ptr)
7378 /* ??? Calls and asms. */
7379 if (!gimple_assign_single_p (use_stmt))
7380 continue;
7381 used |= maybe_set_dependence_info (gimple_assign_lhs (use_stmt),
7382 ptr, clique, restrict_var,
7383 last_ruid);
7384 used |= maybe_set_dependence_info (gimple_assign_rhs1 (use_stmt),
7385 ptr, clique, restrict_var,
7386 last_ruid);
7388 if (used)
7389 bitmap_set_bit (rvars, restrict_var->id);
7393 if (clique != 0)
7395 /* Assign the BASE id zero to all accesses not based on a restrict
7396 pointer. That way they get disambiguated against restrict
7397 accesses but not against each other. */
7398 /* ??? For restricts derived from globals (thus not incoming
7399 parameters) we can't restrict scoping properly thus the following
7400 is too aggressive there. For now we have excluded those globals from
7401 getting into the MR_DEPENDENCE machinery. */
7402 vls_data data = { clique, rvars };
7403 basic_block bb;
7404 FOR_EACH_BB_FN (bb, cfun)
7405 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
7406 !gsi_end_p (gsi); gsi_next (&gsi))
7408 gimple *stmt = gsi_stmt (gsi);
7409 walk_stmt_load_store_ops (stmt, &data,
7410 visit_loadstore, visit_loadstore);
7414 BITMAP_FREE (rvars);
7417 /* Compute points-to information for every SSA_NAME pointer in the
7418 current function and compute the transitive closure of escaped
7419 variables to re-initialize the call-clobber states of local variables. */
7421 unsigned int
7422 compute_may_aliases (void)
7424 if (cfun->gimple_df->ipa_pta)
7426 if (dump_file)
7428 fprintf (dump_file, "\nNot re-computing points-to information "
7429 "because IPA points-to information is available.\n\n");
7431 /* But still dump what we have remaining it. */
7432 dump_alias_info (dump_file);
7435 return 0;
7438 /* For each pointer P_i, determine the sets of variables that P_i may
7439 point-to. Compute the reachability set of escaped and call-used
7440 variables. */
7441 compute_points_to_sets ();
7443 /* Debugging dumps. */
7444 if (dump_file)
7445 dump_alias_info (dump_file);
7447 /* Compute restrict-based memory disambiguations. */
7448 compute_dependence_clique ();
7450 /* Deallocate memory used by aliasing data structures and the internal
7451 points-to solution. */
7452 delete_points_to_sets ();
7454 gcc_assert (!need_ssa_update_p (cfun));
7456 return 0;
7459 /* A dummy pass to cause points-to information to be computed via
7460 TODO_rebuild_alias. */
7462 namespace {
7464 const pass_data pass_data_build_alias =
7466 GIMPLE_PASS, /* type */
7467 "alias", /* name */
7468 OPTGROUP_NONE, /* optinfo_flags */
7469 TV_NONE, /* tv_id */
7470 ( PROP_cfg | PROP_ssa ), /* properties_required */
7471 0, /* properties_provided */
7472 0, /* properties_destroyed */
7473 0, /* todo_flags_start */
7474 TODO_rebuild_alias, /* todo_flags_finish */
7477 class pass_build_alias : public gimple_opt_pass
7479 public:
7480 pass_build_alias (gcc::context *ctxt)
7481 : gimple_opt_pass (pass_data_build_alias, ctxt)
7484 /* opt_pass methods: */
7485 virtual bool gate (function *) { return flag_tree_pta; }
7487 }; // class pass_build_alias
7489 } // anon namespace
7491 gimple_opt_pass *
7492 make_pass_build_alias (gcc::context *ctxt)
7494 return new pass_build_alias (ctxt);
7497 /* A dummy pass to cause points-to information to be computed via
7498 TODO_rebuild_alias. */
7500 namespace {
7502 const pass_data pass_data_build_ealias =
7504 GIMPLE_PASS, /* type */
7505 "ealias", /* name */
7506 OPTGROUP_NONE, /* optinfo_flags */
7507 TV_NONE, /* tv_id */
7508 ( PROP_cfg | PROP_ssa ), /* properties_required */
7509 0, /* properties_provided */
7510 0, /* properties_destroyed */
7511 0, /* todo_flags_start */
7512 TODO_rebuild_alias, /* todo_flags_finish */
7515 class pass_build_ealias : public gimple_opt_pass
7517 public:
7518 pass_build_ealias (gcc::context *ctxt)
7519 : gimple_opt_pass (pass_data_build_ealias, ctxt)
7522 /* opt_pass methods: */
7523 virtual bool gate (function *) { return flag_tree_pta; }
7525 }; // class pass_build_ealias
7527 } // anon namespace
7529 gimple_opt_pass *
7530 make_pass_build_ealias (gcc::context *ctxt)
7532 return new pass_build_ealias (ctxt);
7536 /* IPA PTA solutions for ESCAPED. */
7537 struct pt_solution ipa_escaped_pt
7538 = { true, false, false, false, false, false, false, false, false, NULL };
7540 /* Associate node with varinfo DATA. Worker for
7541 cgraph_for_symbol_thunks_and_aliases. */
7542 static bool
7543 associate_varinfo_to_alias (struct cgraph_node *node, void *data)
7545 if ((node->alias || node->thunk.thunk_p)
7546 && node->analyzed)
7547 insert_vi_for_tree (node->decl, (varinfo_t)data);
7548 return false;
7551 /* Dump varinfo VI to FILE. */
7553 static void
7554 dump_varinfo (FILE *file, varinfo_t vi)
7556 if (vi == NULL)
7557 return;
7559 fprintf (file, "%u: %s\n", vi->id, vi->name);
7561 const char *sep = " ";
7562 if (vi->is_artificial_var)
7563 fprintf (file, "%sartificial", sep);
7564 if (vi->is_special_var)
7565 fprintf (file, "%sspecial", sep);
7566 if (vi->is_unknown_size_var)
7567 fprintf (file, "%sunknown-size", sep);
7568 if (vi->is_full_var)
7569 fprintf (file, "%sfull", sep);
7570 if (vi->is_heap_var)
7571 fprintf (file, "%sheap", sep);
7572 if (vi->may_have_pointers)
7573 fprintf (file, "%smay-have-pointers", sep);
7574 if (vi->only_restrict_pointers)
7575 fprintf (file, "%sonly-restrict-pointers", sep);
7576 if (vi->is_restrict_var)
7577 fprintf (file, "%sis-restrict-var", sep);
7578 if (vi->is_global_var)
7579 fprintf (file, "%sglobal", sep);
7580 if (vi->is_ipa_escape_point)
7581 fprintf (file, "%sipa-escape-point", sep);
7582 if (vi->is_fn_info)
7583 fprintf (file, "%sfn-info", sep);
7584 if (vi->ruid)
7585 fprintf (file, "%srestrict-uid:%u", sep, vi->ruid);
7586 if (vi->next)
7587 fprintf (file, "%snext:%u", sep, vi->next);
7588 if (vi->head != vi->id)
7589 fprintf (file, "%shead:%u", sep, vi->head);
7590 if (vi->offset)
7591 fprintf (file, "%soffset:" HOST_WIDE_INT_PRINT_DEC, sep, vi->offset);
7592 if (vi->size != ~(unsigned HOST_WIDE_INT)0)
7593 fprintf (file, "%ssize:" HOST_WIDE_INT_PRINT_DEC, sep, vi->size);
7594 if (vi->fullsize != ~(unsigned HOST_WIDE_INT)0
7595 && vi->fullsize != vi->size)
7596 fprintf (file, "%sfullsize:" HOST_WIDE_INT_PRINT_DEC, sep,
7597 vi->fullsize);
7598 fprintf (file, "\n");
7600 if (vi->solution && !bitmap_empty_p (vi->solution))
7602 bitmap_iterator bi;
7603 unsigned i;
7604 fprintf (file, " solution: {");
7605 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
7606 fprintf (file, " %u", i);
7607 fprintf (file, " }\n");
7610 if (vi->oldsolution && !bitmap_empty_p (vi->oldsolution)
7611 && !bitmap_equal_p (vi->solution, vi->oldsolution))
7613 bitmap_iterator bi;
7614 unsigned i;
7615 fprintf (file, " oldsolution: {");
7616 EXECUTE_IF_SET_IN_BITMAP (vi->oldsolution, 0, i, bi)
7617 fprintf (file, " %u", i);
7618 fprintf (file, " }\n");
7622 /* Dump varinfo VI to stderr. */
7624 DEBUG_FUNCTION void
7625 debug_varinfo (varinfo_t vi)
7627 dump_varinfo (stderr, vi);
7630 /* Dump varmap to FILE. */
7632 static void
7633 dump_varmap (FILE *file)
7635 if (varmap.length () == 0)
7636 return;
7638 fprintf (file, "variables:\n");
7640 for (unsigned int i = 0; i < varmap.length (); ++i)
7642 varinfo_t vi = get_varinfo (i);
7643 dump_varinfo (file, vi);
7646 fprintf (file, "\n");
7649 /* Dump varmap to stderr. */
7651 DEBUG_FUNCTION void
7652 debug_varmap (void)
7654 dump_varmap (stderr);
7657 /* Compute whether node is refered to non-locally. Worker for
7658 cgraph_for_symbol_thunks_and_aliases. */
7659 static bool
7660 refered_from_nonlocal_fn (struct cgraph_node *node, void *data)
7662 bool *nonlocal_p = (bool *)data;
7663 *nonlocal_p |= (node->used_from_other_partition
7664 || node->externally_visible
7665 || node->force_output);
7666 return false;
7669 /* Same for varpool nodes. */
7670 static bool
7671 refered_from_nonlocal_var (struct varpool_node *node, void *data)
7673 bool *nonlocal_p = (bool *)data;
7674 *nonlocal_p |= (node->used_from_other_partition
7675 || node->externally_visible
7676 || node->force_output);
7677 return false;
7680 /* Execute the driver for IPA PTA. */
7681 static unsigned int
7682 ipa_pta_execute (void)
7684 struct cgraph_node *node;
7685 varpool_node *var;
7686 unsigned int from = 0;
7688 in_ipa_mode = 1;
7690 init_alias_vars ();
7692 if (dump_file && (dump_flags & TDF_DETAILS))
7694 symtab_node::dump_table (dump_file);
7695 fprintf (dump_file, "\n");
7698 if (dump_file)
7700 fprintf (dump_file, "Generating generic constraints\n\n");
7701 dump_constraints (dump_file, from);
7702 fprintf (dump_file, "\n");
7703 from = constraints.length ();
7706 /* Build the constraints. */
7707 FOR_EACH_DEFINED_FUNCTION (node)
7709 varinfo_t vi;
7710 /* Nodes without a body are not interesting. Especially do not
7711 visit clones at this point for now - we get duplicate decls
7712 there for inline clones at least. */
7713 if (!node->has_gimple_body_p () || node->global.inlined_to)
7714 continue;
7715 node->get_body ();
7717 gcc_assert (!node->clone_of);
7719 /* For externally visible or attribute used annotated functions use
7720 local constraints for their arguments.
7721 For local functions we see all callers and thus do not need initial
7722 constraints for parameters. */
7723 bool nonlocal_p = (node->used_from_other_partition
7724 || node->externally_visible
7725 || node->force_output);
7726 node->call_for_symbol_thunks_and_aliases (refered_from_nonlocal_fn,
7727 &nonlocal_p, true);
7729 vi = create_function_info_for (node->decl,
7730 alias_get_name (node->decl), false,
7731 nonlocal_p);
7732 if (dump_file
7733 && from != constraints.length ())
7735 fprintf (dump_file,
7736 "Generating intial constraints for %s", node->name ());
7737 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
7738 fprintf (dump_file, " (%s)",
7739 IDENTIFIER_POINTER
7740 (DECL_ASSEMBLER_NAME (node->decl)));
7741 fprintf (dump_file, "\n\n");
7742 dump_constraints (dump_file, from);
7743 fprintf (dump_file, "\n");
7745 from = constraints.length ();
7748 node->call_for_symbol_thunks_and_aliases
7749 (associate_varinfo_to_alias, vi, true);
7752 /* Create constraints for global variables and their initializers. */
7753 FOR_EACH_VARIABLE (var)
7755 if (var->alias && var->analyzed)
7756 continue;
7758 varinfo_t vi = get_vi_for_tree (var->decl);
7760 /* For the purpose of IPA PTA unit-local globals are not
7761 escape points. */
7762 bool nonlocal_p = (var->used_from_other_partition
7763 || var->externally_visible
7764 || var->force_output);
7765 var->call_for_symbol_and_aliases (refered_from_nonlocal_var,
7766 &nonlocal_p, true);
7767 if (nonlocal_p)
7768 vi->is_ipa_escape_point = true;
7771 if (dump_file
7772 && from != constraints.length ())
7774 fprintf (dump_file,
7775 "Generating constraints for global initializers\n\n");
7776 dump_constraints (dump_file, from);
7777 fprintf (dump_file, "\n");
7778 from = constraints.length ();
7781 FOR_EACH_DEFINED_FUNCTION (node)
7783 struct function *func;
7784 basic_block bb;
7786 /* Nodes without a body are not interesting. */
7787 if (!node->has_gimple_body_p () || node->clone_of)
7788 continue;
7790 if (dump_file)
7792 fprintf (dump_file,
7793 "Generating constraints for %s", node->name ());
7794 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
7795 fprintf (dump_file, " (%s)",
7796 IDENTIFIER_POINTER
7797 (DECL_ASSEMBLER_NAME (node->decl)));
7798 fprintf (dump_file, "\n");
7801 func = DECL_STRUCT_FUNCTION (node->decl);
7802 gcc_assert (cfun == NULL);
7804 /* Build constriants for the function body. */
7805 FOR_EACH_BB_FN (bb, func)
7807 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
7808 gsi_next (&gsi))
7810 gphi *phi = gsi.phi ();
7812 if (! virtual_operand_p (gimple_phi_result (phi)))
7813 find_func_aliases (func, phi);
7816 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
7817 gsi_next (&gsi))
7819 gimple *stmt = gsi_stmt (gsi);
7821 find_func_aliases (func, stmt);
7822 find_func_clobbers (func, stmt);
7826 if (dump_file)
7828 fprintf (dump_file, "\n");
7829 dump_constraints (dump_file, from);
7830 fprintf (dump_file, "\n");
7831 from = constraints.length ();
7835 /* From the constraints compute the points-to sets. */
7836 solve_constraints ();
7838 /* Compute the global points-to sets for ESCAPED.
7839 ??? Note that the computed escape set is not correct
7840 for the whole unit as we fail to consider graph edges to
7841 externally visible functions. */
7842 ipa_escaped_pt = find_what_var_points_to (NULL, get_varinfo (escaped_id));
7844 /* Make sure the ESCAPED solution (which is used as placeholder in
7845 other solutions) does not reference itself. This simplifies
7846 points-to solution queries. */
7847 ipa_escaped_pt.ipa_escaped = 0;
7849 /* Assign the points-to sets to the SSA names in the unit. */
7850 FOR_EACH_DEFINED_FUNCTION (node)
7852 tree ptr;
7853 struct function *fn;
7854 unsigned i;
7855 basic_block bb;
7857 /* Nodes without a body are not interesting. */
7858 if (!node->has_gimple_body_p () || node->clone_of)
7859 continue;
7861 fn = DECL_STRUCT_FUNCTION (node->decl);
7863 /* Compute the points-to sets for pointer SSA_NAMEs. */
7864 FOR_EACH_VEC_ELT (*fn->gimple_df->ssa_names, i, ptr)
7866 if (ptr
7867 && POINTER_TYPE_P (TREE_TYPE (ptr)))
7868 find_what_p_points_to (node->decl, ptr);
7871 /* Compute the call-use and call-clobber sets for indirect calls
7872 and calls to external functions. */
7873 FOR_EACH_BB_FN (bb, fn)
7875 gimple_stmt_iterator gsi;
7877 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
7879 gcall *stmt;
7880 struct pt_solution *pt;
7881 varinfo_t vi, fi;
7882 tree decl;
7884 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
7885 if (!stmt)
7886 continue;
7888 /* Handle direct calls to functions with body. */
7889 decl = gimple_call_fndecl (stmt);
7892 tree called_decl = NULL_TREE;
7893 if (gimple_call_builtin_p (stmt, BUILT_IN_GOMP_PARALLEL))
7894 called_decl = TREE_OPERAND (gimple_call_arg (stmt, 0), 0);
7895 else if (gimple_call_builtin_p (stmt, BUILT_IN_GOACC_PARALLEL))
7896 called_decl = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
7898 if (called_decl != NULL_TREE
7899 && !fndecl_maybe_in_other_partition (called_decl))
7900 decl = called_decl;
7903 if (decl
7904 && (fi = lookup_vi_for_tree (decl))
7905 && fi->is_fn_info)
7907 *gimple_call_clobber_set (stmt)
7908 = find_what_var_points_to
7909 (node->decl, first_vi_for_offset (fi, fi_clobbers));
7910 *gimple_call_use_set (stmt)
7911 = find_what_var_points_to
7912 (node->decl, first_vi_for_offset (fi, fi_uses));
7914 /* Handle direct calls to external functions. */
7915 else if (decl)
7917 pt = gimple_call_use_set (stmt);
7918 if (gimple_call_flags (stmt) & ECF_CONST)
7919 memset (pt, 0, sizeof (struct pt_solution));
7920 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
7922 *pt = find_what_var_points_to (node->decl, vi);
7923 /* Escaped (and thus nonlocal) variables are always
7924 implicitly used by calls. */
7925 /* ??? ESCAPED can be empty even though NONLOCAL
7926 always escaped. */
7927 pt->nonlocal = 1;
7928 pt->ipa_escaped = 1;
7930 else
7932 /* If there is nothing special about this call then
7933 we have made everything that is used also escape. */
7934 *pt = ipa_escaped_pt;
7935 pt->nonlocal = 1;
7938 pt = gimple_call_clobber_set (stmt);
7939 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
7940 memset (pt, 0, sizeof (struct pt_solution));
7941 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
7943 *pt = find_what_var_points_to (node->decl, vi);
7944 /* Escaped (and thus nonlocal) variables are always
7945 implicitly clobbered by calls. */
7946 /* ??? ESCAPED can be empty even though NONLOCAL
7947 always escaped. */
7948 pt->nonlocal = 1;
7949 pt->ipa_escaped = 1;
7951 else
7953 /* If there is nothing special about this call then
7954 we have made everything that is used also escape. */
7955 *pt = ipa_escaped_pt;
7956 pt->nonlocal = 1;
7959 /* Handle indirect calls. */
7960 else if (!decl
7961 && (fi = get_fi_for_callee (stmt)))
7963 /* We need to accumulate all clobbers/uses of all possible
7964 callees. */
7965 fi = get_varinfo (find (fi->id));
7966 /* If we cannot constrain the set of functions we'll end up
7967 calling we end up using/clobbering everything. */
7968 if (bitmap_bit_p (fi->solution, anything_id)
7969 || bitmap_bit_p (fi->solution, nonlocal_id)
7970 || bitmap_bit_p (fi->solution, escaped_id))
7972 pt_solution_reset (gimple_call_clobber_set (stmt));
7973 pt_solution_reset (gimple_call_use_set (stmt));
7975 else
7977 bitmap_iterator bi;
7978 unsigned i;
7979 struct pt_solution *uses, *clobbers;
7981 uses = gimple_call_use_set (stmt);
7982 clobbers = gimple_call_clobber_set (stmt);
7983 memset (uses, 0, sizeof (struct pt_solution));
7984 memset (clobbers, 0, sizeof (struct pt_solution));
7985 EXECUTE_IF_SET_IN_BITMAP (fi->solution, 0, i, bi)
7987 struct pt_solution sol;
7989 vi = get_varinfo (i);
7990 if (!vi->is_fn_info)
7992 /* ??? We could be more precise here? */
7993 uses->nonlocal = 1;
7994 uses->ipa_escaped = 1;
7995 clobbers->nonlocal = 1;
7996 clobbers->ipa_escaped = 1;
7997 continue;
8000 if (!uses->anything)
8002 sol = find_what_var_points_to
8003 (node->decl,
8004 first_vi_for_offset (vi, fi_uses));
8005 pt_solution_ior_into (uses, &sol);
8007 if (!clobbers->anything)
8009 sol = find_what_var_points_to
8010 (node->decl,
8011 first_vi_for_offset (vi, fi_clobbers));
8012 pt_solution_ior_into (clobbers, &sol);
8020 fn->gimple_df->ipa_pta = true;
8022 /* We have to re-set the final-solution cache after each function
8023 because what is a "global" is dependent on function context. */
8024 final_solutions->empty ();
8025 obstack_free (&final_solutions_obstack, NULL);
8026 gcc_obstack_init (&final_solutions_obstack);
8029 delete_points_to_sets ();
8031 in_ipa_mode = 0;
8033 return 0;
8036 namespace {
8038 const pass_data pass_data_ipa_pta =
8040 SIMPLE_IPA_PASS, /* type */
8041 "pta", /* name */
8042 OPTGROUP_NONE, /* optinfo_flags */
8043 TV_IPA_PTA, /* tv_id */
8044 0, /* properties_required */
8045 0, /* properties_provided */
8046 0, /* properties_destroyed */
8047 0, /* todo_flags_start */
8048 0, /* todo_flags_finish */
8051 class pass_ipa_pta : public simple_ipa_opt_pass
8053 public:
8054 pass_ipa_pta (gcc::context *ctxt)
8055 : simple_ipa_opt_pass (pass_data_ipa_pta, ctxt)
8058 /* opt_pass methods: */
8059 virtual bool gate (function *)
8061 return (optimize
8062 && flag_ipa_pta
8063 /* Don't bother doing anything if the program has errors. */
8064 && !seen_error ());
8067 opt_pass * clone () { return new pass_ipa_pta (m_ctxt); }
8069 virtual unsigned int execute (function *) { return ipa_pta_execute (); }
8071 }; // class pass_ipa_pta
8073 } // anon namespace
8075 simple_ipa_opt_pass *
8076 make_pass_ipa_pta (gcc::context *ctxt)
8078 return new pass_ipa_pta (ctxt);