2017-08-28 Richard Biener <rguenther@suse.de>
[official-gcc.git] / gcc / tree-ssa-structalias.c
blob33ad80efe3c4c33f158807cc79c945663b315341
1 /* Tree based points-to analysis
2 Copyright (C) 2005-2017 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "tree-pass.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "tree-pretty-print.h"
33 #include "diagnostic-core.h"
34 #include "fold-const.h"
35 #include "stor-layout.h"
36 #include "stmt.h"
37 #include "gimple-iterator.h"
38 #include "tree-into-ssa.h"
39 #include "tree-dfa.h"
40 #include "params.h"
41 #include "gimple-walk.h"
42 #include "varasm.h"
45 /* The idea behind this analyzer is to generate set constraints from the
46 program, then solve the resulting constraints in order to generate the
47 points-to sets.
49 Set constraints are a way of modeling program analysis problems that
50 involve sets. They consist of an inclusion constraint language,
51 describing the variables (each variable is a set) and operations that
52 are involved on the variables, and a set of rules that derive facts
53 from these operations. To solve a system of set constraints, you derive
54 all possible facts under the rules, which gives you the correct sets
55 as a consequence.
57 See "Efficient Field-sensitive pointer analysis for C" by "David
58 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
59 http://citeseer.ist.psu.edu/pearce04efficient.html
61 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
62 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
63 http://citeseer.ist.psu.edu/heintze01ultrafast.html
65 There are three types of real constraint expressions, DEREF,
66 ADDRESSOF, and SCALAR. Each constraint expression consists
67 of a constraint type, a variable, and an offset.
69 SCALAR is a constraint expression type used to represent x, whether
70 it appears on the LHS or the RHS of a statement.
71 DEREF is a constraint expression type used to represent *x, whether
72 it appears on the LHS or the RHS of a statement.
73 ADDRESSOF is a constraint expression used to represent &x, whether
74 it appears on the LHS or the RHS of a statement.
76 Each pointer variable in the program is assigned an integer id, and
77 each field of a structure variable is assigned an integer id as well.
79 Structure variables are linked to their list of fields through a "next
80 field" in each variable that points to the next field in offset
81 order.
82 Each variable for a structure field has
84 1. "size", that tells the size in bits of that field.
85 2. "fullsize, that tells the size in bits of the entire structure.
86 3. "offset", that tells the offset in bits from the beginning of the
87 structure to this field.
89 Thus,
90 struct f
92 int a;
93 int b;
94 } foo;
95 int *bar;
97 looks like
99 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
100 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
101 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
104 In order to solve the system of set constraints, the following is
105 done:
107 1. Each constraint variable x has a solution set associated with it,
108 Sol(x).
110 2. Constraints are separated into direct, copy, and complex.
111 Direct constraints are ADDRESSOF constraints that require no extra
112 processing, such as P = &Q
113 Copy constraints are those of the form P = Q.
114 Complex constraints are all the constraints involving dereferences
115 and offsets (including offsetted copies).
117 3. All direct constraints of the form P = &Q are processed, such
118 that Q is added to Sol(P)
120 4. All complex constraints for a given constraint variable are stored in a
121 linked list attached to that variable's node.
123 5. A directed graph is built out of the copy constraints. Each
124 constraint variable is a node in the graph, and an edge from
125 Q to P is added for each copy constraint of the form P = Q
127 6. The graph is then walked, and solution sets are
128 propagated along the copy edges, such that an edge from Q to P
129 causes Sol(P) <- Sol(P) union Sol(Q).
131 7. As we visit each node, all complex constraints associated with
132 that node are processed by adding appropriate copy edges to the graph, or the
133 appropriate variables to the solution set.
135 8. The process of walking the graph is iterated until no solution
136 sets change.
138 Prior to walking the graph in steps 6 and 7, We perform static
139 cycle elimination on the constraint graph, as well
140 as off-line variable substitution.
142 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
143 on and turned into anything), but isn't. You can just see what offset
144 inside the pointed-to struct it's going to access.
146 TODO: Constant bounded arrays can be handled as if they were structs of the
147 same number of elements.
149 TODO: Modeling heap and incoming pointers becomes much better if we
150 add fields to them as we discover them, which we could do.
152 TODO: We could handle unions, but to be honest, it's probably not
153 worth the pain or slowdown. */
155 /* IPA-PTA optimizations possible.
157 When the indirect function called is ANYTHING we can add disambiguation
158 based on the function signatures (or simply the parameter count which
159 is the varinfo size). We also do not need to consider functions that
160 do not have their address taken.
162 The is_global_var bit which marks escape points is overly conservative
163 in IPA mode. Split it to is_escape_point and is_global_var - only
164 externally visible globals are escape points in IPA mode.
165 There is now is_ipa_escape_point but this is only used in a few
166 selected places.
168 The way we introduce DECL_PT_UID to avoid fixing up all points-to
169 sets in the translation unit when we copy a DECL during inlining
170 pessimizes precision. The advantage is that the DECL_PT_UID keeps
171 compile-time and memory usage overhead low - the points-to sets
172 do not grow or get unshared as they would during a fixup phase.
173 An alternative solution is to delay IPA PTA until after all
174 inlining transformations have been applied.
176 The way we propagate clobber/use information isn't optimized.
177 It should use a new complex constraint that properly filters
178 out local variables of the callee (though that would make
179 the sets invalid after inlining). OTOH we might as well
180 admit defeat to WHOPR and simply do all the clobber/use analysis
181 and propagation after PTA finished but before we threw away
182 points-to information for memory variables. WHOPR and PTA
183 do not play along well anyway - the whole constraint solving
184 would need to be done in WPA phase and it will be very interesting
185 to apply the results to local SSA names during LTRANS phase.
187 We probably should compute a per-function unit-ESCAPE solution
188 propagating it simply like the clobber / uses solutions. The
189 solution can go alongside the non-IPA espaced solution and be
190 used to query which vars escape the unit through a function.
191 This is also required to make the escaped-HEAP trick work in IPA mode.
193 We never put function decls in points-to sets so we do not
194 keep the set of called functions for indirect calls.
196 And probably more. */
198 static bool use_field_sensitive = true;
199 static int in_ipa_mode = 0;
201 /* Used for predecessor bitmaps. */
202 static bitmap_obstack predbitmap_obstack;
204 /* Used for points-to sets. */
205 static bitmap_obstack pta_obstack;
207 /* Used for oldsolution members of variables. */
208 static bitmap_obstack oldpta_obstack;
210 /* Used for per-solver-iteration bitmaps. */
211 static bitmap_obstack iteration_obstack;
213 static unsigned int create_variable_info_for (tree, const char *, bool);
214 typedef struct constraint_graph *constraint_graph_t;
215 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
217 struct constraint;
218 typedef struct constraint *constraint_t;
221 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
222 if (a) \
223 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
225 static struct constraint_stats
227 unsigned int total_vars;
228 unsigned int nonpointer_vars;
229 unsigned int unified_vars_static;
230 unsigned int unified_vars_dynamic;
231 unsigned int iterations;
232 unsigned int num_edges;
233 unsigned int num_implicit_edges;
234 unsigned int points_to_sets_created;
235 } stats;
237 struct variable_info
239 /* ID of this variable */
240 unsigned int id;
242 /* True if this is a variable created by the constraint analysis, such as
243 heap variables and constraints we had to break up. */
244 unsigned int is_artificial_var : 1;
246 /* True if this is a special variable whose solution set should not be
247 changed. */
248 unsigned int is_special_var : 1;
250 /* True for variables whose size is not known or variable. */
251 unsigned int is_unknown_size_var : 1;
253 /* True for (sub-)fields that represent a whole variable. */
254 unsigned int is_full_var : 1;
256 /* True if this is a heap variable. */
257 unsigned int is_heap_var : 1;
259 /* True if this field may contain pointers. */
260 unsigned int may_have_pointers : 1;
262 /* True if this field has only restrict qualified pointers. */
263 unsigned int only_restrict_pointers : 1;
265 /* True if this represents a heap var created for a restrict qualified
266 pointer. */
267 unsigned int is_restrict_var : 1;
269 /* True if this represents a global variable. */
270 unsigned int is_global_var : 1;
272 /* True if this represents a module escape point for IPA analysis. */
273 unsigned int is_ipa_escape_point : 1;
275 /* True if this represents a IPA function info. */
276 unsigned int is_fn_info : 1;
278 /* ??? Store somewhere better. */
279 unsigned short ruid;
281 /* The ID of the variable for the next field in this structure
282 or zero for the last field in this structure. */
283 unsigned next;
285 /* The ID of the variable for the first field in this structure. */
286 unsigned head;
288 /* Offset of this variable, in bits, from the base variable */
289 unsigned HOST_WIDE_INT offset;
291 /* Size of the variable, in bits. */
292 unsigned HOST_WIDE_INT size;
294 /* Full size of the base variable, in bits. */
295 unsigned HOST_WIDE_INT fullsize;
297 /* Name of this variable */
298 const char *name;
300 /* Tree that this variable is associated with. */
301 tree decl;
303 /* Points-to set for this variable. */
304 bitmap solution;
306 /* Old points-to set for this variable. */
307 bitmap oldsolution;
309 typedef struct variable_info *varinfo_t;
311 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
312 static varinfo_t first_or_preceding_vi_for_offset (varinfo_t,
313 unsigned HOST_WIDE_INT);
314 static varinfo_t lookup_vi_for_tree (tree);
315 static inline bool type_can_have_subvars (const_tree);
316 static void make_param_constraints (varinfo_t);
318 /* Pool of variable info structures. */
319 static object_allocator<variable_info> variable_info_pool
320 ("Variable info pool");
322 /* Map varinfo to final pt_solution. */
323 static hash_map<varinfo_t, pt_solution *> *final_solutions;
324 struct obstack final_solutions_obstack;
326 /* Table of variable info structures for constraint variables.
327 Indexed directly by variable info id. */
328 static vec<varinfo_t> varmap;
330 /* Return the varmap element N */
332 static inline varinfo_t
333 get_varinfo (unsigned int n)
335 return varmap[n];
338 /* Return the next variable in the list of sub-variables of VI
339 or NULL if VI is the last sub-variable. */
341 static inline varinfo_t
342 vi_next (varinfo_t vi)
344 return get_varinfo (vi->next);
347 /* Static IDs for the special variables. Variable ID zero is unused
348 and used as terminator for the sub-variable chain. */
349 enum { nothing_id = 1, anything_id = 2, string_id = 3,
350 escaped_id = 4, nonlocal_id = 5,
351 storedanything_id = 6, integer_id = 7 };
353 /* Return a new variable info structure consisting for a variable
354 named NAME, and using constraint graph node NODE. Append it
355 to the vector of variable info structures. */
357 static varinfo_t
358 new_var_info (tree t, const char *name, bool add_id)
360 unsigned index = varmap.length ();
361 varinfo_t ret = variable_info_pool.allocate ();
363 if (dump_file && add_id)
365 char *tempname = xasprintf ("%s(%d)", name, index);
366 name = ggc_strdup (tempname);
367 free (tempname);
370 ret->id = index;
371 ret->name = name;
372 ret->decl = t;
373 /* Vars without decl are artificial and do not have sub-variables. */
374 ret->is_artificial_var = (t == NULL_TREE);
375 ret->is_special_var = false;
376 ret->is_unknown_size_var = false;
377 ret->is_full_var = (t == NULL_TREE);
378 ret->is_heap_var = false;
379 ret->may_have_pointers = true;
380 ret->only_restrict_pointers = false;
381 ret->is_restrict_var = false;
382 ret->ruid = 0;
383 ret->is_global_var = (t == NULL_TREE);
384 ret->is_ipa_escape_point = false;
385 ret->is_fn_info = false;
386 if (t && DECL_P (t))
387 ret->is_global_var = (is_global_var (t)
388 /* We have to treat even local register variables
389 as escape points. */
390 || (VAR_P (t) && DECL_HARD_REGISTER (t)));
391 ret->solution = BITMAP_ALLOC (&pta_obstack);
392 ret->oldsolution = NULL;
393 ret->next = 0;
394 ret->head = ret->id;
396 stats.total_vars++;
398 varmap.safe_push (ret);
400 return ret;
403 /* A map mapping call statements to per-stmt variables for uses
404 and clobbers specific to the call. */
405 static hash_map<gimple *, varinfo_t> *call_stmt_vars;
407 /* Lookup or create the variable for the call statement CALL. */
409 static varinfo_t
410 get_call_vi (gcall *call)
412 varinfo_t vi, vi2;
414 bool existed;
415 varinfo_t *slot_p = &call_stmt_vars->get_or_insert (call, &existed);
416 if (existed)
417 return *slot_p;
419 vi = new_var_info (NULL_TREE, "CALLUSED", true);
420 vi->offset = 0;
421 vi->size = 1;
422 vi->fullsize = 2;
423 vi->is_full_var = true;
425 vi2 = new_var_info (NULL_TREE, "CALLCLOBBERED", true);
426 vi2->offset = 1;
427 vi2->size = 1;
428 vi2->fullsize = 2;
429 vi2->is_full_var = true;
431 vi->next = vi2->id;
433 *slot_p = vi;
434 return vi;
437 /* Lookup the variable for the call statement CALL representing
438 the uses. Returns NULL if there is nothing special about this call. */
440 static varinfo_t
441 lookup_call_use_vi (gcall *call)
443 varinfo_t *slot_p = call_stmt_vars->get (call);
444 if (slot_p)
445 return *slot_p;
447 return NULL;
450 /* Lookup the variable for the call statement CALL representing
451 the clobbers. Returns NULL if there is nothing special about this call. */
453 static varinfo_t
454 lookup_call_clobber_vi (gcall *call)
456 varinfo_t uses = lookup_call_use_vi (call);
457 if (!uses)
458 return NULL;
460 return vi_next (uses);
463 /* Lookup or create the variable for the call statement CALL representing
464 the uses. */
466 static varinfo_t
467 get_call_use_vi (gcall *call)
469 return get_call_vi (call);
472 /* Lookup or create the variable for the call statement CALL representing
473 the clobbers. */
475 static varinfo_t ATTRIBUTE_UNUSED
476 get_call_clobber_vi (gcall *call)
478 return vi_next (get_call_vi (call));
482 enum constraint_expr_type {SCALAR, DEREF, ADDRESSOF};
484 /* An expression that appears in a constraint. */
486 struct constraint_expr
488 /* Constraint type. */
489 constraint_expr_type type;
491 /* Variable we are referring to in the constraint. */
492 unsigned int var;
494 /* Offset, in bits, of this constraint from the beginning of
495 variables it ends up referring to.
497 IOW, in a deref constraint, we would deref, get the result set,
498 then add OFFSET to each member. */
499 HOST_WIDE_INT offset;
502 /* Use 0x8000... as special unknown offset. */
503 #define UNKNOWN_OFFSET HOST_WIDE_INT_MIN
505 typedef struct constraint_expr ce_s;
506 static void get_constraint_for_1 (tree, vec<ce_s> *, bool, bool);
507 static void get_constraint_for (tree, vec<ce_s> *);
508 static void get_constraint_for_rhs (tree, vec<ce_s> *);
509 static void do_deref (vec<ce_s> *);
511 /* Our set constraints are made up of two constraint expressions, one
512 LHS, and one RHS.
514 As described in the introduction, our set constraints each represent an
515 operation between set valued variables.
517 struct constraint
519 struct constraint_expr lhs;
520 struct constraint_expr rhs;
523 /* List of constraints that we use to build the constraint graph from. */
525 static vec<constraint_t> constraints;
526 static object_allocator<constraint> constraint_pool ("Constraint pool");
528 /* The constraint graph is represented as an array of bitmaps
529 containing successor nodes. */
531 struct constraint_graph
533 /* Size of this graph, which may be different than the number of
534 nodes in the variable map. */
535 unsigned int size;
537 /* Explicit successors of each node. */
538 bitmap *succs;
540 /* Implicit predecessors of each node (Used for variable
541 substitution). */
542 bitmap *implicit_preds;
544 /* Explicit predecessors of each node (Used for variable substitution). */
545 bitmap *preds;
547 /* Indirect cycle representatives, or -1 if the node has no indirect
548 cycles. */
549 int *indirect_cycles;
551 /* Representative node for a node. rep[a] == a unless the node has
552 been unified. */
553 unsigned int *rep;
555 /* Equivalence class representative for a label. This is used for
556 variable substitution. */
557 int *eq_rep;
559 /* Pointer equivalence label for a node. All nodes with the same
560 pointer equivalence label can be unified together at some point
561 (either during constraint optimization or after the constraint
562 graph is built). */
563 unsigned int *pe;
565 /* Pointer equivalence representative for a label. This is used to
566 handle nodes that are pointer equivalent but not location
567 equivalent. We can unite these once the addressof constraints
568 are transformed into initial points-to sets. */
569 int *pe_rep;
571 /* Pointer equivalence label for each node, used during variable
572 substitution. */
573 unsigned int *pointer_label;
575 /* Location equivalence label for each node, used during location
576 equivalence finding. */
577 unsigned int *loc_label;
579 /* Pointed-by set for each node, used during location equivalence
580 finding. This is pointed-by rather than pointed-to, because it
581 is constructed using the predecessor graph. */
582 bitmap *pointed_by;
584 /* Points to sets for pointer equivalence. This is *not* the actual
585 points-to sets for nodes. */
586 bitmap *points_to;
588 /* Bitmap of nodes where the bit is set if the node is a direct
589 node. Used for variable substitution. */
590 sbitmap direct_nodes;
592 /* Bitmap of nodes where the bit is set if the node is address
593 taken. Used for variable substitution. */
594 bitmap address_taken;
596 /* Vector of complex constraints for each graph node. Complex
597 constraints are those involving dereferences or offsets that are
598 not 0. */
599 vec<constraint_t> *complex;
602 static constraint_graph_t graph;
604 /* During variable substitution and the offline version of indirect
605 cycle finding, we create nodes to represent dereferences and
606 address taken constraints. These represent where these start and
607 end. */
608 #define FIRST_REF_NODE (varmap).length ()
609 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
611 /* Return the representative node for NODE, if NODE has been unioned
612 with another NODE.
613 This function performs path compression along the way to finding
614 the representative. */
616 static unsigned int
617 find (unsigned int node)
619 gcc_checking_assert (node < graph->size);
620 if (graph->rep[node] != node)
621 return graph->rep[node] = find (graph->rep[node]);
622 return node;
625 /* Union the TO and FROM nodes to the TO nodes.
626 Note that at some point in the future, we may want to do
627 union-by-rank, in which case we are going to have to return the
628 node we unified to. */
630 static bool
631 unite (unsigned int to, unsigned int from)
633 gcc_checking_assert (to < graph->size && from < graph->size);
634 if (to != from && graph->rep[from] != to)
636 graph->rep[from] = to;
637 return true;
639 return false;
642 /* Create a new constraint consisting of LHS and RHS expressions. */
644 static constraint_t
645 new_constraint (const struct constraint_expr lhs,
646 const struct constraint_expr rhs)
648 constraint_t ret = constraint_pool.allocate ();
649 ret->lhs = lhs;
650 ret->rhs = rhs;
651 return ret;
654 /* Print out constraint C to FILE. */
656 static void
657 dump_constraint (FILE *file, constraint_t c)
659 if (c->lhs.type == ADDRESSOF)
660 fprintf (file, "&");
661 else if (c->lhs.type == DEREF)
662 fprintf (file, "*");
663 fprintf (file, "%s", get_varinfo (c->lhs.var)->name);
664 if (c->lhs.offset == UNKNOWN_OFFSET)
665 fprintf (file, " + UNKNOWN");
666 else if (c->lhs.offset != 0)
667 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
668 fprintf (file, " = ");
669 if (c->rhs.type == ADDRESSOF)
670 fprintf (file, "&");
671 else if (c->rhs.type == DEREF)
672 fprintf (file, "*");
673 fprintf (file, "%s", get_varinfo (c->rhs.var)->name);
674 if (c->rhs.offset == UNKNOWN_OFFSET)
675 fprintf (file, " + UNKNOWN");
676 else if (c->rhs.offset != 0)
677 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
681 void debug_constraint (constraint_t);
682 void debug_constraints (void);
683 void debug_constraint_graph (void);
684 void debug_solution_for_var (unsigned int);
685 void debug_sa_points_to_info (void);
686 void debug_varinfo (varinfo_t);
687 void debug_varmap (void);
689 /* Print out constraint C to stderr. */
691 DEBUG_FUNCTION void
692 debug_constraint (constraint_t c)
694 dump_constraint (stderr, c);
695 fprintf (stderr, "\n");
698 /* Print out all constraints to FILE */
700 static void
701 dump_constraints (FILE *file, int from)
703 int i;
704 constraint_t c;
705 for (i = from; constraints.iterate (i, &c); i++)
706 if (c)
708 dump_constraint (file, c);
709 fprintf (file, "\n");
713 /* Print out all constraints to stderr. */
715 DEBUG_FUNCTION void
716 debug_constraints (void)
718 dump_constraints (stderr, 0);
721 /* Print the constraint graph in dot format. */
723 static void
724 dump_constraint_graph (FILE *file)
726 unsigned int i;
728 /* Only print the graph if it has already been initialized: */
729 if (!graph)
730 return;
732 /* Prints the header of the dot file: */
733 fprintf (file, "strict digraph {\n");
734 fprintf (file, " node [\n shape = box\n ]\n");
735 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
736 fprintf (file, "\n // List of nodes and complex constraints in "
737 "the constraint graph:\n");
739 /* The next lines print the nodes in the graph together with the
740 complex constraints attached to them. */
741 for (i = 1; i < graph->size; i++)
743 if (i == FIRST_REF_NODE)
744 continue;
745 if (find (i) != i)
746 continue;
747 if (i < FIRST_REF_NODE)
748 fprintf (file, "\"%s\"", get_varinfo (i)->name);
749 else
750 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
751 if (graph->complex[i].exists ())
753 unsigned j;
754 constraint_t c;
755 fprintf (file, " [label=\"\\N\\n");
756 for (j = 0; graph->complex[i].iterate (j, &c); ++j)
758 dump_constraint (file, c);
759 fprintf (file, "\\l");
761 fprintf (file, "\"]");
763 fprintf (file, ";\n");
766 /* Go over the edges. */
767 fprintf (file, "\n // Edges in the constraint graph:\n");
768 for (i = 1; i < graph->size; i++)
770 unsigned j;
771 bitmap_iterator bi;
772 if (find (i) != i)
773 continue;
774 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
776 unsigned to = find (j);
777 if (i == to)
778 continue;
779 if (i < FIRST_REF_NODE)
780 fprintf (file, "\"%s\"", get_varinfo (i)->name);
781 else
782 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
783 fprintf (file, " -> ");
784 if (to < FIRST_REF_NODE)
785 fprintf (file, "\"%s\"", get_varinfo (to)->name);
786 else
787 fprintf (file, "\"*%s\"", get_varinfo (to - FIRST_REF_NODE)->name);
788 fprintf (file, ";\n");
792 /* Prints the tail of the dot file. */
793 fprintf (file, "}\n");
796 /* Print out the constraint graph to stderr. */
798 DEBUG_FUNCTION void
799 debug_constraint_graph (void)
801 dump_constraint_graph (stderr);
804 /* SOLVER FUNCTIONS
806 The solver is a simple worklist solver, that works on the following
807 algorithm:
809 sbitmap changed_nodes = all zeroes;
810 changed_count = 0;
811 For each node that is not already collapsed:
812 changed_count++;
813 set bit in changed nodes
815 while (changed_count > 0)
817 compute topological ordering for constraint graph
819 find and collapse cycles in the constraint graph (updating
820 changed if necessary)
822 for each node (n) in the graph in topological order:
823 changed_count--;
825 Process each complex constraint associated with the node,
826 updating changed if necessary.
828 For each outgoing edge from n, propagate the solution from n to
829 the destination of the edge, updating changed as necessary.
831 } */
833 /* Return true if two constraint expressions A and B are equal. */
835 static bool
836 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
838 return a.type == b.type && a.var == b.var && a.offset == b.offset;
841 /* Return true if constraint expression A is less than constraint expression
842 B. This is just arbitrary, but consistent, in order to give them an
843 ordering. */
845 static bool
846 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
848 if (a.type == b.type)
850 if (a.var == b.var)
851 return a.offset < b.offset;
852 else
853 return a.var < b.var;
855 else
856 return a.type < b.type;
859 /* Return true if constraint A is less than constraint B. This is just
860 arbitrary, but consistent, in order to give them an ordering. */
862 static bool
863 constraint_less (const constraint_t &a, const constraint_t &b)
865 if (constraint_expr_less (a->lhs, b->lhs))
866 return true;
867 else if (constraint_expr_less (b->lhs, a->lhs))
868 return false;
869 else
870 return constraint_expr_less (a->rhs, b->rhs);
873 /* Return true if two constraints A and B are equal. */
875 static bool
876 constraint_equal (struct constraint a, struct constraint b)
878 return constraint_expr_equal (a.lhs, b.lhs)
879 && constraint_expr_equal (a.rhs, b.rhs);
883 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
885 static constraint_t
886 constraint_vec_find (vec<constraint_t> vec,
887 struct constraint lookfor)
889 unsigned int place;
890 constraint_t found;
892 if (!vec.exists ())
893 return NULL;
895 place = vec.lower_bound (&lookfor, constraint_less);
896 if (place >= vec.length ())
897 return NULL;
898 found = vec[place];
899 if (!constraint_equal (*found, lookfor))
900 return NULL;
901 return found;
904 /* Union two constraint vectors, TO and FROM. Put the result in TO.
905 Returns true of TO set is changed. */
907 static bool
908 constraint_set_union (vec<constraint_t> *to,
909 vec<constraint_t> *from)
911 int i;
912 constraint_t c;
913 bool any_change = false;
915 FOR_EACH_VEC_ELT (*from, i, c)
917 if (constraint_vec_find (*to, *c) == NULL)
919 unsigned int place = to->lower_bound (c, constraint_less);
920 to->safe_insert (place, c);
921 any_change = true;
924 return any_change;
927 /* Expands the solution in SET to all sub-fields of variables included. */
929 static bitmap
930 solution_set_expand (bitmap set, bitmap *expanded)
932 bitmap_iterator bi;
933 unsigned j;
935 if (*expanded)
936 return *expanded;
938 *expanded = BITMAP_ALLOC (&iteration_obstack);
940 /* In a first pass expand to the head of the variables we need to
941 add all sub-fields off. This avoids quadratic behavior. */
942 EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi)
944 varinfo_t v = get_varinfo (j);
945 if (v->is_artificial_var
946 || v->is_full_var)
947 continue;
948 bitmap_set_bit (*expanded, v->head);
951 /* In the second pass now expand all head variables with subfields. */
952 EXECUTE_IF_SET_IN_BITMAP (*expanded, 0, j, bi)
954 varinfo_t v = get_varinfo (j);
955 if (v->head != j)
956 continue;
957 for (v = vi_next (v); v != NULL; v = vi_next (v))
958 bitmap_set_bit (*expanded, v->id);
961 /* And finally set the rest of the bits from SET. */
962 bitmap_ior_into (*expanded, set);
964 return *expanded;
967 /* Union solution sets TO and DELTA, and add INC to each member of DELTA in the
968 process. */
970 static bool
971 set_union_with_increment (bitmap to, bitmap delta, HOST_WIDE_INT inc,
972 bitmap *expanded_delta)
974 bool changed = false;
975 bitmap_iterator bi;
976 unsigned int i;
978 /* If the solution of DELTA contains anything it is good enough to transfer
979 this to TO. */
980 if (bitmap_bit_p (delta, anything_id))
981 return bitmap_set_bit (to, anything_id);
983 /* If the offset is unknown we have to expand the solution to
984 all subfields. */
985 if (inc == UNKNOWN_OFFSET)
987 delta = solution_set_expand (delta, expanded_delta);
988 changed |= bitmap_ior_into (to, delta);
989 return changed;
992 /* For non-zero offset union the offsetted solution into the destination. */
993 EXECUTE_IF_SET_IN_BITMAP (delta, 0, i, bi)
995 varinfo_t vi = get_varinfo (i);
997 /* If this is a variable with just one field just set its bit
998 in the result. */
999 if (vi->is_artificial_var
1000 || vi->is_unknown_size_var
1001 || vi->is_full_var)
1002 changed |= bitmap_set_bit (to, i);
1003 else
1005 HOST_WIDE_INT fieldoffset = vi->offset + inc;
1006 unsigned HOST_WIDE_INT size = vi->size;
1008 /* If the offset makes the pointer point to before the
1009 variable use offset zero for the field lookup. */
1010 if (fieldoffset < 0)
1011 vi = get_varinfo (vi->head);
1012 else
1013 vi = first_or_preceding_vi_for_offset (vi, fieldoffset);
1017 changed |= bitmap_set_bit (to, vi->id);
1018 if (vi->is_full_var
1019 || vi->next == 0)
1020 break;
1022 /* We have to include all fields that overlap the current field
1023 shifted by inc. */
1024 vi = vi_next (vi);
1026 while (vi->offset < fieldoffset + size);
1030 return changed;
1033 /* Insert constraint C into the list of complex constraints for graph
1034 node VAR. */
1036 static void
1037 insert_into_complex (constraint_graph_t graph,
1038 unsigned int var, constraint_t c)
1040 vec<constraint_t> complex = graph->complex[var];
1041 unsigned int place = complex.lower_bound (c, constraint_less);
1043 /* Only insert constraints that do not already exist. */
1044 if (place >= complex.length ()
1045 || !constraint_equal (*c, *complex[place]))
1046 graph->complex[var].safe_insert (place, c);
1050 /* Condense two variable nodes into a single variable node, by moving
1051 all associated info from FROM to TO. Returns true if TO node's
1052 constraint set changes after the merge. */
1054 static bool
1055 merge_node_constraints (constraint_graph_t graph, unsigned int to,
1056 unsigned int from)
1058 unsigned int i;
1059 constraint_t c;
1060 bool any_change = false;
1062 gcc_checking_assert (find (from) == to);
1064 /* Move all complex constraints from src node into to node */
1065 FOR_EACH_VEC_ELT (graph->complex[from], i, c)
1067 /* In complex constraints for node FROM, we may have either
1068 a = *FROM, and *FROM = a, or an offseted constraint which are
1069 always added to the rhs node's constraints. */
1071 if (c->rhs.type == DEREF)
1072 c->rhs.var = to;
1073 else if (c->lhs.type == DEREF)
1074 c->lhs.var = to;
1075 else
1076 c->rhs.var = to;
1079 any_change = constraint_set_union (&graph->complex[to],
1080 &graph->complex[from]);
1081 graph->complex[from].release ();
1082 return any_change;
1086 /* Remove edges involving NODE from GRAPH. */
1088 static void
1089 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
1091 if (graph->succs[node])
1092 BITMAP_FREE (graph->succs[node]);
1095 /* Merge GRAPH nodes FROM and TO into node TO. */
1097 static void
1098 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
1099 unsigned int from)
1101 if (graph->indirect_cycles[from] != -1)
1103 /* If we have indirect cycles with the from node, and we have
1104 none on the to node, the to node has indirect cycles from the
1105 from node now that they are unified.
1106 If indirect cycles exist on both, unify the nodes that they
1107 are in a cycle with, since we know they are in a cycle with
1108 each other. */
1109 if (graph->indirect_cycles[to] == -1)
1110 graph->indirect_cycles[to] = graph->indirect_cycles[from];
1113 /* Merge all the successor edges. */
1114 if (graph->succs[from])
1116 if (!graph->succs[to])
1117 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
1118 bitmap_ior_into (graph->succs[to],
1119 graph->succs[from]);
1122 clear_edges_for_node (graph, from);
1126 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
1127 it doesn't exist in the graph already. */
1129 static void
1130 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
1131 unsigned int from)
1133 if (to == from)
1134 return;
1136 if (!graph->implicit_preds[to])
1137 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1139 if (bitmap_set_bit (graph->implicit_preds[to], from))
1140 stats.num_implicit_edges++;
1143 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
1144 it doesn't exist in the graph already.
1145 Return false if the edge already existed, true otherwise. */
1147 static void
1148 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
1149 unsigned int from)
1151 if (!graph->preds[to])
1152 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1153 bitmap_set_bit (graph->preds[to], from);
1156 /* Add a graph edge to GRAPH, going from FROM to TO if
1157 it doesn't exist in the graph already.
1158 Return false if the edge already existed, true otherwise. */
1160 static bool
1161 add_graph_edge (constraint_graph_t graph, unsigned int to,
1162 unsigned int from)
1164 if (to == from)
1166 return false;
1168 else
1170 bool r = false;
1172 if (!graph->succs[from])
1173 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
1174 if (bitmap_set_bit (graph->succs[from], to))
1176 r = true;
1177 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
1178 stats.num_edges++;
1180 return r;
1185 /* Initialize the constraint graph structure to contain SIZE nodes. */
1187 static void
1188 init_graph (unsigned int size)
1190 unsigned int j;
1192 graph = XCNEW (struct constraint_graph);
1193 graph->size = size;
1194 graph->succs = XCNEWVEC (bitmap, graph->size);
1195 graph->indirect_cycles = XNEWVEC (int, graph->size);
1196 graph->rep = XNEWVEC (unsigned int, graph->size);
1197 /* ??? Macros do not support template types with multiple arguments,
1198 so we use a typedef to work around it. */
1199 typedef vec<constraint_t> vec_constraint_t_heap;
1200 graph->complex = XCNEWVEC (vec_constraint_t_heap, size);
1201 graph->pe = XCNEWVEC (unsigned int, graph->size);
1202 graph->pe_rep = XNEWVEC (int, graph->size);
1204 for (j = 0; j < graph->size; j++)
1206 graph->rep[j] = j;
1207 graph->pe_rep[j] = -1;
1208 graph->indirect_cycles[j] = -1;
1212 /* Build the constraint graph, adding only predecessor edges right now. */
1214 static void
1215 build_pred_graph (void)
1217 int i;
1218 constraint_t c;
1219 unsigned int j;
1221 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
1222 graph->preds = XCNEWVEC (bitmap, graph->size);
1223 graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
1224 graph->loc_label = XCNEWVEC (unsigned int, graph->size);
1225 graph->pointed_by = XCNEWVEC (bitmap, graph->size);
1226 graph->points_to = XCNEWVEC (bitmap, graph->size);
1227 graph->eq_rep = XNEWVEC (int, graph->size);
1228 graph->direct_nodes = sbitmap_alloc (graph->size);
1229 graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1230 bitmap_clear (graph->direct_nodes);
1232 for (j = 1; j < FIRST_REF_NODE; j++)
1234 if (!get_varinfo (j)->is_special_var)
1235 bitmap_set_bit (graph->direct_nodes, j);
1238 for (j = 0; j < graph->size; j++)
1239 graph->eq_rep[j] = -1;
1241 for (j = 0; j < varmap.length (); j++)
1242 graph->indirect_cycles[j] = -1;
1244 FOR_EACH_VEC_ELT (constraints, i, c)
1246 struct constraint_expr lhs = c->lhs;
1247 struct constraint_expr rhs = c->rhs;
1248 unsigned int lhsvar = lhs.var;
1249 unsigned int rhsvar = rhs.var;
1251 if (lhs.type == DEREF)
1253 /* *x = y. */
1254 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1255 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1257 else if (rhs.type == DEREF)
1259 /* x = *y */
1260 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1261 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1262 else
1263 bitmap_clear_bit (graph->direct_nodes, lhsvar);
1265 else if (rhs.type == ADDRESSOF)
1267 varinfo_t v;
1269 /* x = &y */
1270 if (graph->points_to[lhsvar] == NULL)
1271 graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1272 bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1274 if (graph->pointed_by[rhsvar] == NULL)
1275 graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1276 bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1278 /* Implicitly, *x = y */
1279 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1281 /* All related variables are no longer direct nodes. */
1282 bitmap_clear_bit (graph->direct_nodes, rhsvar);
1283 v = get_varinfo (rhsvar);
1284 if (!v->is_full_var)
1286 v = get_varinfo (v->head);
1289 bitmap_clear_bit (graph->direct_nodes, v->id);
1290 v = vi_next (v);
1292 while (v != NULL);
1294 bitmap_set_bit (graph->address_taken, rhsvar);
1296 else if (lhsvar > anything_id
1297 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1299 /* x = y */
1300 add_pred_graph_edge (graph, lhsvar, rhsvar);
1301 /* Implicitly, *x = *y */
1302 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1303 FIRST_REF_NODE + rhsvar);
1305 else if (lhs.offset != 0 || rhs.offset != 0)
1307 if (rhs.offset != 0)
1308 bitmap_clear_bit (graph->direct_nodes, lhs.var);
1309 else if (lhs.offset != 0)
1310 bitmap_clear_bit (graph->direct_nodes, rhs.var);
1315 /* Build the constraint graph, adding successor edges. */
1317 static void
1318 build_succ_graph (void)
1320 unsigned i, t;
1321 constraint_t c;
1323 FOR_EACH_VEC_ELT (constraints, i, c)
1325 struct constraint_expr lhs;
1326 struct constraint_expr rhs;
1327 unsigned int lhsvar;
1328 unsigned int rhsvar;
1330 if (!c)
1331 continue;
1333 lhs = c->lhs;
1334 rhs = c->rhs;
1335 lhsvar = find (lhs.var);
1336 rhsvar = find (rhs.var);
1338 if (lhs.type == DEREF)
1340 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1341 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1343 else if (rhs.type == DEREF)
1345 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1346 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1348 else if (rhs.type == ADDRESSOF)
1350 /* x = &y */
1351 gcc_checking_assert (find (rhs.var) == rhs.var);
1352 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1354 else if (lhsvar > anything_id
1355 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1357 add_graph_edge (graph, lhsvar, rhsvar);
1361 /* Add edges from STOREDANYTHING to all non-direct nodes that can
1362 receive pointers. */
1363 t = find (storedanything_id);
1364 for (i = integer_id + 1; i < FIRST_REF_NODE; ++i)
1366 if (!bitmap_bit_p (graph->direct_nodes, i)
1367 && get_varinfo (i)->may_have_pointers)
1368 add_graph_edge (graph, find (i), t);
1371 /* Everything stored to ANYTHING also potentially escapes. */
1372 add_graph_edge (graph, find (escaped_id), t);
1376 /* Changed variables on the last iteration. */
1377 static bitmap changed;
1379 /* Strongly Connected Component visitation info. */
1381 struct scc_info
1383 scc_info (size_t size);
1384 ~scc_info ();
1386 auto_sbitmap visited;
1387 auto_sbitmap deleted;
1388 unsigned int *dfs;
1389 unsigned int *node_mapping;
1390 int current_index;
1391 auto_vec<unsigned> scc_stack;
1395 /* Recursive routine to find strongly connected components in GRAPH.
1396 SI is the SCC info to store the information in, and N is the id of current
1397 graph node we are processing.
1399 This is Tarjan's strongly connected component finding algorithm, as
1400 modified by Nuutila to keep only non-root nodes on the stack.
1401 The algorithm can be found in "On finding the strongly connected
1402 connected components in a directed graph" by Esko Nuutila and Eljas
1403 Soisalon-Soininen, in Information Processing Letters volume 49,
1404 number 1, pages 9-14. */
1406 static void
1407 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1409 unsigned int i;
1410 bitmap_iterator bi;
1411 unsigned int my_dfs;
1413 bitmap_set_bit (si->visited, n);
1414 si->dfs[n] = si->current_index ++;
1415 my_dfs = si->dfs[n];
1417 /* Visit all the successors. */
1418 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1420 unsigned int w;
1422 if (i > LAST_REF_NODE)
1423 break;
1425 w = find (i);
1426 if (bitmap_bit_p (si->deleted, w))
1427 continue;
1429 if (!bitmap_bit_p (si->visited, w))
1430 scc_visit (graph, si, w);
1432 unsigned int t = find (w);
1433 gcc_checking_assert (find (n) == n);
1434 if (si->dfs[t] < si->dfs[n])
1435 si->dfs[n] = si->dfs[t];
1438 /* See if any components have been identified. */
1439 if (si->dfs[n] == my_dfs)
1441 if (si->scc_stack.length () > 0
1442 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1444 bitmap scc = BITMAP_ALLOC (NULL);
1445 unsigned int lowest_node;
1446 bitmap_iterator bi;
1448 bitmap_set_bit (scc, n);
1450 while (si->scc_stack.length () != 0
1451 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1453 unsigned int w = si->scc_stack.pop ();
1455 bitmap_set_bit (scc, w);
1458 lowest_node = bitmap_first_set_bit (scc);
1459 gcc_assert (lowest_node < FIRST_REF_NODE);
1461 /* Collapse the SCC nodes into a single node, and mark the
1462 indirect cycles. */
1463 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1465 if (i < FIRST_REF_NODE)
1467 if (unite (lowest_node, i))
1468 unify_nodes (graph, lowest_node, i, false);
1470 else
1472 unite (lowest_node, i);
1473 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1477 bitmap_set_bit (si->deleted, n);
1479 else
1480 si->scc_stack.safe_push (n);
1483 /* Unify node FROM into node TO, updating the changed count if
1484 necessary when UPDATE_CHANGED is true. */
1486 static void
1487 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1488 bool update_changed)
1490 gcc_checking_assert (to != from && find (to) == to);
1492 if (dump_file && (dump_flags & TDF_DETAILS))
1493 fprintf (dump_file, "Unifying %s to %s\n",
1494 get_varinfo (from)->name,
1495 get_varinfo (to)->name);
1497 if (update_changed)
1498 stats.unified_vars_dynamic++;
1499 else
1500 stats.unified_vars_static++;
1502 merge_graph_nodes (graph, to, from);
1503 if (merge_node_constraints (graph, to, from))
1505 if (update_changed)
1506 bitmap_set_bit (changed, to);
1509 /* Mark TO as changed if FROM was changed. If TO was already marked
1510 as changed, decrease the changed count. */
1512 if (update_changed
1513 && bitmap_clear_bit (changed, from))
1514 bitmap_set_bit (changed, to);
1515 varinfo_t fromvi = get_varinfo (from);
1516 if (fromvi->solution)
1518 /* If the solution changes because of the merging, we need to mark
1519 the variable as changed. */
1520 varinfo_t tovi = get_varinfo (to);
1521 if (bitmap_ior_into (tovi->solution, fromvi->solution))
1523 if (update_changed)
1524 bitmap_set_bit (changed, to);
1527 BITMAP_FREE (fromvi->solution);
1528 if (fromvi->oldsolution)
1529 BITMAP_FREE (fromvi->oldsolution);
1531 if (stats.iterations > 0
1532 && tovi->oldsolution)
1533 BITMAP_FREE (tovi->oldsolution);
1535 if (graph->succs[to])
1536 bitmap_clear_bit (graph->succs[to], to);
1539 /* Information needed to compute the topological ordering of a graph. */
1541 struct topo_info
1543 /* sbitmap of visited nodes. */
1544 sbitmap visited;
1545 /* Array that stores the topological order of the graph, *in
1546 reverse*. */
1547 vec<unsigned> topo_order;
1551 /* Initialize and return a topological info structure. */
1553 static struct topo_info *
1554 init_topo_info (void)
1556 size_t size = graph->size;
1557 struct topo_info *ti = XNEW (struct topo_info);
1558 ti->visited = sbitmap_alloc (size);
1559 bitmap_clear (ti->visited);
1560 ti->topo_order.create (1);
1561 return ti;
1565 /* Free the topological sort info pointed to by TI. */
1567 static void
1568 free_topo_info (struct topo_info *ti)
1570 sbitmap_free (ti->visited);
1571 ti->topo_order.release ();
1572 free (ti);
1575 /* Visit the graph in topological order, and store the order in the
1576 topo_info structure. */
1578 static void
1579 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1580 unsigned int n)
1582 bitmap_iterator bi;
1583 unsigned int j;
1585 bitmap_set_bit (ti->visited, n);
1587 if (graph->succs[n])
1588 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1590 if (!bitmap_bit_p (ti->visited, j))
1591 topo_visit (graph, ti, j);
1594 ti->topo_order.safe_push (n);
1597 /* Process a constraint C that represents x = *(y + off), using DELTA as the
1598 starting solution for y. */
1600 static void
1601 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1602 bitmap delta, bitmap *expanded_delta)
1604 unsigned int lhs = c->lhs.var;
1605 bool flag = false;
1606 bitmap sol = get_varinfo (lhs)->solution;
1607 unsigned int j;
1608 bitmap_iterator bi;
1609 HOST_WIDE_INT roffset = c->rhs.offset;
1611 /* Our IL does not allow this. */
1612 gcc_checking_assert (c->lhs.offset == 0);
1614 /* If the solution of Y contains anything it is good enough to transfer
1615 this to the LHS. */
1616 if (bitmap_bit_p (delta, anything_id))
1618 flag |= bitmap_set_bit (sol, anything_id);
1619 goto done;
1622 /* If we do not know at with offset the rhs is dereferenced compute
1623 the reachability set of DELTA, conservatively assuming it is
1624 dereferenced at all valid offsets. */
1625 if (roffset == UNKNOWN_OFFSET)
1627 delta = solution_set_expand (delta, expanded_delta);
1628 /* No further offset processing is necessary. */
1629 roffset = 0;
1632 /* For each variable j in delta (Sol(y)), add
1633 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1634 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1636 varinfo_t v = get_varinfo (j);
1637 HOST_WIDE_INT fieldoffset = v->offset + roffset;
1638 unsigned HOST_WIDE_INT size = v->size;
1639 unsigned int t;
1641 if (v->is_full_var)
1643 else if (roffset != 0)
1645 if (fieldoffset < 0)
1646 v = get_varinfo (v->head);
1647 else
1648 v = first_or_preceding_vi_for_offset (v, fieldoffset);
1651 /* We have to include all fields that overlap the current field
1652 shifted by roffset. */
1655 t = find (v->id);
1657 /* Adding edges from the special vars is pointless.
1658 They don't have sets that can change. */
1659 if (get_varinfo (t)->is_special_var)
1660 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1661 /* Merging the solution from ESCAPED needlessly increases
1662 the set. Use ESCAPED as representative instead. */
1663 else if (v->id == escaped_id)
1664 flag |= bitmap_set_bit (sol, escaped_id);
1665 else if (v->may_have_pointers
1666 && add_graph_edge (graph, lhs, t))
1667 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1669 if (v->is_full_var
1670 || v->next == 0)
1671 break;
1673 v = vi_next (v);
1675 while (v->offset < fieldoffset + size);
1678 done:
1679 /* If the LHS solution changed, mark the var as changed. */
1680 if (flag)
1682 get_varinfo (lhs)->solution = sol;
1683 bitmap_set_bit (changed, lhs);
1687 /* Process a constraint C that represents *(x + off) = y using DELTA
1688 as the starting solution for x. */
1690 static void
1691 do_ds_constraint (constraint_t c, bitmap delta, bitmap *expanded_delta)
1693 unsigned int rhs = c->rhs.var;
1694 bitmap sol = get_varinfo (rhs)->solution;
1695 unsigned int j;
1696 bitmap_iterator bi;
1697 HOST_WIDE_INT loff = c->lhs.offset;
1698 bool escaped_p = false;
1700 /* Our IL does not allow this. */
1701 gcc_checking_assert (c->rhs.offset == 0);
1703 /* If the solution of y contains ANYTHING simply use the ANYTHING
1704 solution. This avoids needlessly increasing the points-to sets. */
1705 if (bitmap_bit_p (sol, anything_id))
1706 sol = get_varinfo (find (anything_id))->solution;
1708 /* If the solution for x contains ANYTHING we have to merge the
1709 solution of y into all pointer variables which we do via
1710 STOREDANYTHING. */
1711 if (bitmap_bit_p (delta, anything_id))
1713 unsigned t = find (storedanything_id);
1714 if (add_graph_edge (graph, t, rhs))
1716 if (bitmap_ior_into (get_varinfo (t)->solution, sol))
1717 bitmap_set_bit (changed, t);
1719 return;
1722 /* If we do not know at with offset the rhs is dereferenced compute
1723 the reachability set of DELTA, conservatively assuming it is
1724 dereferenced at all valid offsets. */
1725 if (loff == UNKNOWN_OFFSET)
1727 delta = solution_set_expand (delta, expanded_delta);
1728 loff = 0;
1731 /* For each member j of delta (Sol(x)), add an edge from y to j and
1732 union Sol(y) into Sol(j) */
1733 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1735 varinfo_t v = get_varinfo (j);
1736 unsigned int t;
1737 HOST_WIDE_INT fieldoffset = v->offset + loff;
1738 unsigned HOST_WIDE_INT size = v->size;
1740 if (v->is_full_var)
1742 else if (loff != 0)
1744 if (fieldoffset < 0)
1745 v = get_varinfo (v->head);
1746 else
1747 v = first_or_preceding_vi_for_offset (v, fieldoffset);
1750 /* We have to include all fields that overlap the current field
1751 shifted by loff. */
1754 if (v->may_have_pointers)
1756 /* If v is a global variable then this is an escape point. */
1757 if (v->is_global_var
1758 && !escaped_p)
1760 t = find (escaped_id);
1761 if (add_graph_edge (graph, t, rhs)
1762 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1763 bitmap_set_bit (changed, t);
1764 /* Enough to let rhs escape once. */
1765 escaped_p = true;
1768 if (v->is_special_var)
1769 break;
1771 t = find (v->id);
1772 if (add_graph_edge (graph, t, rhs)
1773 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1774 bitmap_set_bit (changed, t);
1777 if (v->is_full_var
1778 || v->next == 0)
1779 break;
1781 v = vi_next (v);
1783 while (v->offset < fieldoffset + size);
1787 /* Handle a non-simple (simple meaning requires no iteration),
1788 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1790 static void
1791 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta,
1792 bitmap *expanded_delta)
1794 if (c->lhs.type == DEREF)
1796 if (c->rhs.type == ADDRESSOF)
1798 gcc_unreachable ();
1800 else
1802 /* *x = y */
1803 do_ds_constraint (c, delta, expanded_delta);
1806 else if (c->rhs.type == DEREF)
1808 /* x = *y */
1809 if (!(get_varinfo (c->lhs.var)->is_special_var))
1810 do_sd_constraint (graph, c, delta, expanded_delta);
1812 else
1814 bitmap tmp;
1815 bool flag = false;
1817 gcc_checking_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR
1818 && c->rhs.offset != 0 && c->lhs.offset == 0);
1819 tmp = get_varinfo (c->lhs.var)->solution;
1821 flag = set_union_with_increment (tmp, delta, c->rhs.offset,
1822 expanded_delta);
1824 if (flag)
1825 bitmap_set_bit (changed, c->lhs.var);
1829 /* Initialize and return a new SCC info structure. */
1831 scc_info::scc_info (size_t size) :
1832 visited (size), deleted (size), current_index (0), scc_stack (1)
1834 bitmap_clear (visited);
1835 bitmap_clear (deleted);
1836 node_mapping = XNEWVEC (unsigned int, size);
1837 dfs = XCNEWVEC (unsigned int, size);
1839 for (size_t i = 0; i < size; i++)
1840 node_mapping[i] = i;
1843 /* Free an SCC info structure pointed to by SI */
1845 scc_info::~scc_info ()
1847 free (node_mapping);
1848 free (dfs);
1852 /* Find indirect cycles in GRAPH that occur, using strongly connected
1853 components, and note them in the indirect cycles map.
1855 This technique comes from Ben Hardekopf and Calvin Lin,
1856 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1857 Lines of Code", submitted to PLDI 2007. */
1859 static void
1860 find_indirect_cycles (constraint_graph_t graph)
1862 unsigned int i;
1863 unsigned int size = graph->size;
1864 scc_info si (size);
1866 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1867 if (!bitmap_bit_p (si.visited, i) && find (i) == i)
1868 scc_visit (graph, &si, i);
1871 /* Compute a topological ordering for GRAPH, and store the result in the
1872 topo_info structure TI. */
1874 static void
1875 compute_topo_order (constraint_graph_t graph,
1876 struct topo_info *ti)
1878 unsigned int i;
1879 unsigned int size = graph->size;
1881 for (i = 0; i != size; ++i)
1882 if (!bitmap_bit_p (ti->visited, i) && find (i) == i)
1883 topo_visit (graph, ti, i);
1886 /* Structure used to for hash value numbering of pointer equivalence
1887 classes. */
1889 typedef struct equiv_class_label
1891 hashval_t hashcode;
1892 unsigned int equivalence_class;
1893 bitmap labels;
1894 } *equiv_class_label_t;
1895 typedef const struct equiv_class_label *const_equiv_class_label_t;
1897 /* Equiv_class_label hashtable helpers. */
1899 struct equiv_class_hasher : free_ptr_hash <equiv_class_label>
1901 static inline hashval_t hash (const equiv_class_label *);
1902 static inline bool equal (const equiv_class_label *,
1903 const equiv_class_label *);
1906 /* Hash function for a equiv_class_label_t */
1908 inline hashval_t
1909 equiv_class_hasher::hash (const equiv_class_label *ecl)
1911 return ecl->hashcode;
1914 /* Equality function for two equiv_class_label_t's. */
1916 inline bool
1917 equiv_class_hasher::equal (const equiv_class_label *eql1,
1918 const equiv_class_label *eql2)
1920 return (eql1->hashcode == eql2->hashcode
1921 && bitmap_equal_p (eql1->labels, eql2->labels));
1924 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1925 classes. */
1926 static hash_table<equiv_class_hasher> *pointer_equiv_class_table;
1928 /* A hashtable for mapping a bitmap of labels->location equivalence
1929 classes. */
1930 static hash_table<equiv_class_hasher> *location_equiv_class_table;
1932 /* Lookup a equivalence class in TABLE by the bitmap of LABELS with
1933 hash HAS it contains. Sets *REF_LABELS to the bitmap LABELS
1934 is equivalent to. */
1936 static equiv_class_label *
1937 equiv_class_lookup_or_add (hash_table<equiv_class_hasher> *table,
1938 bitmap labels)
1940 equiv_class_label **slot;
1941 equiv_class_label ecl;
1943 ecl.labels = labels;
1944 ecl.hashcode = bitmap_hash (labels);
1945 slot = table->find_slot (&ecl, INSERT);
1946 if (!*slot)
1948 *slot = XNEW (struct equiv_class_label);
1949 (*slot)->labels = labels;
1950 (*slot)->hashcode = ecl.hashcode;
1951 (*slot)->equivalence_class = 0;
1954 return *slot;
1957 /* Perform offline variable substitution.
1959 This is a worst case quadratic time way of identifying variables
1960 that must have equivalent points-to sets, including those caused by
1961 static cycles, and single entry subgraphs, in the constraint graph.
1963 The technique is described in "Exploiting Pointer and Location
1964 Equivalence to Optimize Pointer Analysis. In the 14th International
1965 Static Analysis Symposium (SAS), August 2007." It is known as the
1966 "HU" algorithm, and is equivalent to value numbering the collapsed
1967 constraint graph including evaluating unions.
1969 The general method of finding equivalence classes is as follows:
1970 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1971 Initialize all non-REF nodes to be direct nodes.
1972 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1973 variable}
1974 For each constraint containing the dereference, we also do the same
1975 thing.
1977 We then compute SCC's in the graph and unify nodes in the same SCC,
1978 including pts sets.
1980 For each non-collapsed node x:
1981 Visit all unvisited explicit incoming edges.
1982 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
1983 where y->x.
1984 Lookup the equivalence class for pts(x).
1985 If we found one, equivalence_class(x) = found class.
1986 Otherwise, equivalence_class(x) = new class, and new_class is
1987 added to the lookup table.
1989 All direct nodes with the same equivalence class can be replaced
1990 with a single representative node.
1991 All unlabeled nodes (label == 0) are not pointers and all edges
1992 involving them can be eliminated.
1993 We perform these optimizations during rewrite_constraints
1995 In addition to pointer equivalence class finding, we also perform
1996 location equivalence class finding. This is the set of variables
1997 that always appear together in points-to sets. We use this to
1998 compress the size of the points-to sets. */
2000 /* Current maximum pointer equivalence class id. */
2001 static int pointer_equiv_class;
2003 /* Current maximum location equivalence class id. */
2004 static int location_equiv_class;
2006 /* Recursive routine to find strongly connected components in GRAPH,
2007 and label it's nodes with DFS numbers. */
2009 static void
2010 condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2012 unsigned int i;
2013 bitmap_iterator bi;
2014 unsigned int my_dfs;
2016 gcc_checking_assert (si->node_mapping[n] == n);
2017 bitmap_set_bit (si->visited, n);
2018 si->dfs[n] = si->current_index ++;
2019 my_dfs = si->dfs[n];
2021 /* Visit all the successors. */
2022 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2024 unsigned int w = si->node_mapping[i];
2026 if (bitmap_bit_p (si->deleted, w))
2027 continue;
2029 if (!bitmap_bit_p (si->visited, w))
2030 condense_visit (graph, si, w);
2032 unsigned int t = si->node_mapping[w];
2033 gcc_checking_assert (si->node_mapping[n] == n);
2034 if (si->dfs[t] < si->dfs[n])
2035 si->dfs[n] = si->dfs[t];
2038 /* Visit all the implicit predecessors. */
2039 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
2041 unsigned int w = si->node_mapping[i];
2043 if (bitmap_bit_p (si->deleted, w))
2044 continue;
2046 if (!bitmap_bit_p (si->visited, w))
2047 condense_visit (graph, si, w);
2049 unsigned int t = si->node_mapping[w];
2050 gcc_assert (si->node_mapping[n] == n);
2051 if (si->dfs[t] < si->dfs[n])
2052 si->dfs[n] = si->dfs[t];
2055 /* See if any components have been identified. */
2056 if (si->dfs[n] == my_dfs)
2058 while (si->scc_stack.length () != 0
2059 && si->dfs[si->scc_stack.last ()] >= my_dfs)
2061 unsigned int w = si->scc_stack.pop ();
2062 si->node_mapping[w] = n;
2064 if (!bitmap_bit_p (graph->direct_nodes, w))
2065 bitmap_clear_bit (graph->direct_nodes, n);
2067 /* Unify our nodes. */
2068 if (graph->preds[w])
2070 if (!graph->preds[n])
2071 graph->preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2072 bitmap_ior_into (graph->preds[n], graph->preds[w]);
2074 if (graph->implicit_preds[w])
2076 if (!graph->implicit_preds[n])
2077 graph->implicit_preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2078 bitmap_ior_into (graph->implicit_preds[n],
2079 graph->implicit_preds[w]);
2081 if (graph->points_to[w])
2083 if (!graph->points_to[n])
2084 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2085 bitmap_ior_into (graph->points_to[n],
2086 graph->points_to[w]);
2089 bitmap_set_bit (si->deleted, n);
2091 else
2092 si->scc_stack.safe_push (n);
2095 /* Label pointer equivalences.
2097 This performs a value numbering of the constraint graph to
2098 discover which variables will always have the same points-to sets
2099 under the current set of constraints.
2101 The way it value numbers is to store the set of points-to bits
2102 generated by the constraints and graph edges. This is just used as a
2103 hash and equality comparison. The *actual set of points-to bits* is
2104 completely irrelevant, in that we don't care about being able to
2105 extract them later.
2107 The equality values (currently bitmaps) just have to satisfy a few
2108 constraints, the main ones being:
2109 1. The combining operation must be order independent.
2110 2. The end result of a given set of operations must be unique iff the
2111 combination of input values is unique
2112 3. Hashable. */
2114 static void
2115 label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2117 unsigned int i, first_pred;
2118 bitmap_iterator bi;
2120 bitmap_set_bit (si->visited, n);
2122 /* Label and union our incoming edges's points to sets. */
2123 first_pred = -1U;
2124 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2126 unsigned int w = si->node_mapping[i];
2127 if (!bitmap_bit_p (si->visited, w))
2128 label_visit (graph, si, w);
2130 /* Skip unused edges */
2131 if (w == n || graph->pointer_label[w] == 0)
2132 continue;
2134 if (graph->points_to[w])
2136 if (!graph->points_to[n])
2138 if (first_pred == -1U)
2139 first_pred = w;
2140 else
2142 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2143 bitmap_ior (graph->points_to[n],
2144 graph->points_to[first_pred],
2145 graph->points_to[w]);
2148 else
2149 bitmap_ior_into (graph->points_to[n], graph->points_to[w]);
2153 /* Indirect nodes get fresh variables and a new pointer equiv class. */
2154 if (!bitmap_bit_p (graph->direct_nodes, n))
2156 if (!graph->points_to[n])
2158 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2159 if (first_pred != -1U)
2160 bitmap_copy (graph->points_to[n], graph->points_to[first_pred]);
2162 bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
2163 graph->pointer_label[n] = pointer_equiv_class++;
2164 equiv_class_label_t ecl;
2165 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2166 graph->points_to[n]);
2167 ecl->equivalence_class = graph->pointer_label[n];
2168 return;
2171 /* If there was only a single non-empty predecessor the pointer equiv
2172 class is the same. */
2173 if (!graph->points_to[n])
2175 if (first_pred != -1U)
2177 graph->pointer_label[n] = graph->pointer_label[first_pred];
2178 graph->points_to[n] = graph->points_to[first_pred];
2180 return;
2183 if (!bitmap_empty_p (graph->points_to[n]))
2185 equiv_class_label_t ecl;
2186 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2187 graph->points_to[n]);
2188 if (ecl->equivalence_class == 0)
2189 ecl->equivalence_class = pointer_equiv_class++;
2190 else
2192 BITMAP_FREE (graph->points_to[n]);
2193 graph->points_to[n] = ecl->labels;
2195 graph->pointer_label[n] = ecl->equivalence_class;
2199 /* Print the pred graph in dot format. */
2201 static void
2202 dump_pred_graph (struct scc_info *si, FILE *file)
2204 unsigned int i;
2206 /* Only print the graph if it has already been initialized: */
2207 if (!graph)
2208 return;
2210 /* Prints the header of the dot file: */
2211 fprintf (file, "strict digraph {\n");
2212 fprintf (file, " node [\n shape = box\n ]\n");
2213 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
2214 fprintf (file, "\n // List of nodes and complex constraints in "
2215 "the constraint graph:\n");
2217 /* The next lines print the nodes in the graph together with the
2218 complex constraints attached to them. */
2219 for (i = 1; i < graph->size; i++)
2221 if (i == FIRST_REF_NODE)
2222 continue;
2223 if (si->node_mapping[i] != i)
2224 continue;
2225 if (i < FIRST_REF_NODE)
2226 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2227 else
2228 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2229 if (graph->points_to[i]
2230 && !bitmap_empty_p (graph->points_to[i]))
2232 if (i < FIRST_REF_NODE)
2233 fprintf (file, "[label=\"%s = {", get_varinfo (i)->name);
2234 else
2235 fprintf (file, "[label=\"*%s = {",
2236 get_varinfo (i - FIRST_REF_NODE)->name);
2237 unsigned j;
2238 bitmap_iterator bi;
2239 EXECUTE_IF_SET_IN_BITMAP (graph->points_to[i], 0, j, bi)
2240 fprintf (file, " %d", j);
2241 fprintf (file, " }\"]");
2243 fprintf (file, ";\n");
2246 /* Go over the edges. */
2247 fprintf (file, "\n // Edges in the constraint graph:\n");
2248 for (i = 1; i < graph->size; i++)
2250 unsigned j;
2251 bitmap_iterator bi;
2252 if (si->node_mapping[i] != i)
2253 continue;
2254 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[i], 0, j, bi)
2256 unsigned from = si->node_mapping[j];
2257 if (from < FIRST_REF_NODE)
2258 fprintf (file, "\"%s\"", get_varinfo (from)->name);
2259 else
2260 fprintf (file, "\"*%s\"", get_varinfo (from - FIRST_REF_NODE)->name);
2261 fprintf (file, " -> ");
2262 if (i < FIRST_REF_NODE)
2263 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2264 else
2265 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2266 fprintf (file, ";\n");
2270 /* Prints the tail of the dot file. */
2271 fprintf (file, "}\n");
2274 /* Perform offline variable substitution, discovering equivalence
2275 classes, and eliminating non-pointer variables. */
2277 static struct scc_info *
2278 perform_var_substitution (constraint_graph_t graph)
2280 unsigned int i;
2281 unsigned int size = graph->size;
2282 scc_info *si = new scc_info (size);
2284 bitmap_obstack_initialize (&iteration_obstack);
2285 pointer_equiv_class_table = new hash_table<equiv_class_hasher> (511);
2286 location_equiv_class_table
2287 = new hash_table<equiv_class_hasher> (511);
2288 pointer_equiv_class = 1;
2289 location_equiv_class = 1;
2291 /* Condense the nodes, which means to find SCC's, count incoming
2292 predecessors, and unite nodes in SCC's. */
2293 for (i = 1; i < FIRST_REF_NODE; i++)
2294 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2295 condense_visit (graph, si, si->node_mapping[i]);
2297 if (dump_file && (dump_flags & TDF_GRAPH))
2299 fprintf (dump_file, "\n\n// The constraint graph before var-substitution "
2300 "in dot format:\n");
2301 dump_pred_graph (si, dump_file);
2302 fprintf (dump_file, "\n\n");
2305 bitmap_clear (si->visited);
2306 /* Actually the label the nodes for pointer equivalences */
2307 for (i = 1; i < FIRST_REF_NODE; i++)
2308 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2309 label_visit (graph, si, si->node_mapping[i]);
2311 /* Calculate location equivalence labels. */
2312 for (i = 1; i < FIRST_REF_NODE; i++)
2314 bitmap pointed_by;
2315 bitmap_iterator bi;
2316 unsigned int j;
2318 if (!graph->pointed_by[i])
2319 continue;
2320 pointed_by = BITMAP_ALLOC (&iteration_obstack);
2322 /* Translate the pointed-by mapping for pointer equivalence
2323 labels. */
2324 EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
2326 bitmap_set_bit (pointed_by,
2327 graph->pointer_label[si->node_mapping[j]]);
2329 /* The original pointed_by is now dead. */
2330 BITMAP_FREE (graph->pointed_by[i]);
2332 /* Look up the location equivalence label if one exists, or make
2333 one otherwise. */
2334 equiv_class_label_t ecl;
2335 ecl = equiv_class_lookup_or_add (location_equiv_class_table, pointed_by);
2336 if (ecl->equivalence_class == 0)
2337 ecl->equivalence_class = location_equiv_class++;
2338 else
2340 if (dump_file && (dump_flags & TDF_DETAILS))
2341 fprintf (dump_file, "Found location equivalence for node %s\n",
2342 get_varinfo (i)->name);
2343 BITMAP_FREE (pointed_by);
2345 graph->loc_label[i] = ecl->equivalence_class;
2349 if (dump_file && (dump_flags & TDF_DETAILS))
2350 for (i = 1; i < FIRST_REF_NODE; i++)
2352 unsigned j = si->node_mapping[i];
2353 if (j != i)
2355 fprintf (dump_file, "%s node id %d ",
2356 bitmap_bit_p (graph->direct_nodes, i)
2357 ? "Direct" : "Indirect", i);
2358 if (i < FIRST_REF_NODE)
2359 fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2360 else
2361 fprintf (dump_file, "\"*%s\"",
2362 get_varinfo (i - FIRST_REF_NODE)->name);
2363 fprintf (dump_file, " mapped to SCC leader node id %d ", j);
2364 if (j < FIRST_REF_NODE)
2365 fprintf (dump_file, "\"%s\"\n", get_varinfo (j)->name);
2366 else
2367 fprintf (dump_file, "\"*%s\"\n",
2368 get_varinfo (j - FIRST_REF_NODE)->name);
2370 else
2372 fprintf (dump_file,
2373 "Equivalence classes for %s node id %d ",
2374 bitmap_bit_p (graph->direct_nodes, i)
2375 ? "direct" : "indirect", i);
2376 if (i < FIRST_REF_NODE)
2377 fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2378 else
2379 fprintf (dump_file, "\"*%s\"",
2380 get_varinfo (i - FIRST_REF_NODE)->name);
2381 fprintf (dump_file,
2382 ": pointer %d, location %d\n",
2383 graph->pointer_label[i], graph->loc_label[i]);
2387 /* Quickly eliminate our non-pointer variables. */
2389 for (i = 1; i < FIRST_REF_NODE; i++)
2391 unsigned int node = si->node_mapping[i];
2393 if (graph->pointer_label[node] == 0)
2395 if (dump_file && (dump_flags & TDF_DETAILS))
2396 fprintf (dump_file,
2397 "%s is a non-pointer variable, eliminating edges.\n",
2398 get_varinfo (node)->name);
2399 stats.nonpointer_vars++;
2400 clear_edges_for_node (graph, node);
2404 return si;
2407 /* Free information that was only necessary for variable
2408 substitution. */
2410 static void
2411 free_var_substitution_info (struct scc_info *si)
2413 delete si;
2414 free (graph->pointer_label);
2415 free (graph->loc_label);
2416 free (graph->pointed_by);
2417 free (graph->points_to);
2418 free (graph->eq_rep);
2419 sbitmap_free (graph->direct_nodes);
2420 delete pointer_equiv_class_table;
2421 pointer_equiv_class_table = NULL;
2422 delete location_equiv_class_table;
2423 location_equiv_class_table = NULL;
2424 bitmap_obstack_release (&iteration_obstack);
2427 /* Return an existing node that is equivalent to NODE, which has
2428 equivalence class LABEL, if one exists. Return NODE otherwise. */
2430 static unsigned int
2431 find_equivalent_node (constraint_graph_t graph,
2432 unsigned int node, unsigned int label)
2434 /* If the address version of this variable is unused, we can
2435 substitute it for anything else with the same label.
2436 Otherwise, we know the pointers are equivalent, but not the
2437 locations, and we can unite them later. */
2439 if (!bitmap_bit_p (graph->address_taken, node))
2441 gcc_checking_assert (label < graph->size);
2443 if (graph->eq_rep[label] != -1)
2445 /* Unify the two variables since we know they are equivalent. */
2446 if (unite (graph->eq_rep[label], node))
2447 unify_nodes (graph, graph->eq_rep[label], node, false);
2448 return graph->eq_rep[label];
2450 else
2452 graph->eq_rep[label] = node;
2453 graph->pe_rep[label] = node;
2456 else
2458 gcc_checking_assert (label < graph->size);
2459 graph->pe[node] = label;
2460 if (graph->pe_rep[label] == -1)
2461 graph->pe_rep[label] = node;
2464 return node;
2467 /* Unite pointer equivalent but not location equivalent nodes in
2468 GRAPH. This may only be performed once variable substitution is
2469 finished. */
2471 static void
2472 unite_pointer_equivalences (constraint_graph_t graph)
2474 unsigned int i;
2476 /* Go through the pointer equivalences and unite them to their
2477 representative, if they aren't already. */
2478 for (i = 1; i < FIRST_REF_NODE; i++)
2480 unsigned int label = graph->pe[i];
2481 if (label)
2483 int label_rep = graph->pe_rep[label];
2485 if (label_rep == -1)
2486 continue;
2488 label_rep = find (label_rep);
2489 if (label_rep >= 0 && unite (label_rep, find (i)))
2490 unify_nodes (graph, label_rep, i, false);
2495 /* Move complex constraints to the GRAPH nodes they belong to. */
2497 static void
2498 move_complex_constraints (constraint_graph_t graph)
2500 int i;
2501 constraint_t c;
2503 FOR_EACH_VEC_ELT (constraints, i, c)
2505 if (c)
2507 struct constraint_expr lhs = c->lhs;
2508 struct constraint_expr rhs = c->rhs;
2510 if (lhs.type == DEREF)
2512 insert_into_complex (graph, lhs.var, c);
2514 else if (rhs.type == DEREF)
2516 if (!(get_varinfo (lhs.var)->is_special_var))
2517 insert_into_complex (graph, rhs.var, c);
2519 else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2520 && (lhs.offset != 0 || rhs.offset != 0))
2522 insert_into_complex (graph, rhs.var, c);
2529 /* Optimize and rewrite complex constraints while performing
2530 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2531 result of perform_variable_substitution. */
2533 static void
2534 rewrite_constraints (constraint_graph_t graph,
2535 struct scc_info *si)
2537 int i;
2538 constraint_t c;
2540 if (flag_checking)
2542 for (unsigned int j = 0; j < graph->size; j++)
2543 gcc_assert (find (j) == j);
2546 FOR_EACH_VEC_ELT (constraints, i, c)
2548 struct constraint_expr lhs = c->lhs;
2549 struct constraint_expr rhs = c->rhs;
2550 unsigned int lhsvar = find (lhs.var);
2551 unsigned int rhsvar = find (rhs.var);
2552 unsigned int lhsnode, rhsnode;
2553 unsigned int lhslabel, rhslabel;
2555 lhsnode = si->node_mapping[lhsvar];
2556 rhsnode = si->node_mapping[rhsvar];
2557 lhslabel = graph->pointer_label[lhsnode];
2558 rhslabel = graph->pointer_label[rhsnode];
2560 /* See if it is really a non-pointer variable, and if so, ignore
2561 the constraint. */
2562 if (lhslabel == 0)
2564 if (dump_file && (dump_flags & TDF_DETAILS))
2567 fprintf (dump_file, "%s is a non-pointer variable, "
2568 "ignoring constraint:",
2569 get_varinfo (lhs.var)->name);
2570 dump_constraint (dump_file, c);
2571 fprintf (dump_file, "\n");
2573 constraints[i] = NULL;
2574 continue;
2577 if (rhslabel == 0)
2579 if (dump_file && (dump_flags & TDF_DETAILS))
2582 fprintf (dump_file, "%s is a non-pointer variable, "
2583 "ignoring constraint:",
2584 get_varinfo (rhs.var)->name);
2585 dump_constraint (dump_file, c);
2586 fprintf (dump_file, "\n");
2588 constraints[i] = NULL;
2589 continue;
2592 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2593 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2594 c->lhs.var = lhsvar;
2595 c->rhs.var = rhsvar;
2599 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2600 part of an SCC, false otherwise. */
2602 static bool
2603 eliminate_indirect_cycles (unsigned int node)
2605 if (graph->indirect_cycles[node] != -1
2606 && !bitmap_empty_p (get_varinfo (node)->solution))
2608 unsigned int i;
2609 auto_vec<unsigned> queue;
2610 int queuepos;
2611 unsigned int to = find (graph->indirect_cycles[node]);
2612 bitmap_iterator bi;
2614 /* We can't touch the solution set and call unify_nodes
2615 at the same time, because unify_nodes is going to do
2616 bitmap unions into it. */
2618 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2620 if (find (i) == i && i != to)
2622 if (unite (to, i))
2623 queue.safe_push (i);
2627 for (queuepos = 0;
2628 queue.iterate (queuepos, &i);
2629 queuepos++)
2631 unify_nodes (graph, to, i, true);
2633 return true;
2635 return false;
2638 /* Solve the constraint graph GRAPH using our worklist solver.
2639 This is based on the PW* family of solvers from the "Efficient Field
2640 Sensitive Pointer Analysis for C" paper.
2641 It works by iterating over all the graph nodes, processing the complex
2642 constraints and propagating the copy constraints, until everything stops
2643 changed. This corresponds to steps 6-8 in the solving list given above. */
2645 static void
2646 solve_graph (constraint_graph_t graph)
2648 unsigned int size = graph->size;
2649 unsigned int i;
2650 bitmap pts;
2652 changed = BITMAP_ALLOC (NULL);
2654 /* Mark all initial non-collapsed nodes as changed. */
2655 for (i = 1; i < size; i++)
2657 varinfo_t ivi = get_varinfo (i);
2658 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2659 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2660 || graph->complex[i].length () > 0))
2661 bitmap_set_bit (changed, i);
2664 /* Allocate a bitmap to be used to store the changed bits. */
2665 pts = BITMAP_ALLOC (&pta_obstack);
2667 while (!bitmap_empty_p (changed))
2669 unsigned int i;
2670 struct topo_info *ti = init_topo_info ();
2671 stats.iterations++;
2673 bitmap_obstack_initialize (&iteration_obstack);
2675 compute_topo_order (graph, ti);
2677 while (ti->topo_order.length () != 0)
2680 i = ti->topo_order.pop ();
2682 /* If this variable is not a representative, skip it. */
2683 if (find (i) != i)
2684 continue;
2686 /* In certain indirect cycle cases, we may merge this
2687 variable to another. */
2688 if (eliminate_indirect_cycles (i) && find (i) != i)
2689 continue;
2691 /* If the node has changed, we need to process the
2692 complex constraints and outgoing edges again. */
2693 if (bitmap_clear_bit (changed, i))
2695 unsigned int j;
2696 constraint_t c;
2697 bitmap solution;
2698 vec<constraint_t> complex = graph->complex[i];
2699 varinfo_t vi = get_varinfo (i);
2700 bool solution_empty;
2702 /* Compute the changed set of solution bits. If anything
2703 is in the solution just propagate that. */
2704 if (bitmap_bit_p (vi->solution, anything_id))
2706 /* If anything is also in the old solution there is
2707 nothing to do.
2708 ??? But we shouldn't ended up with "changed" set ... */
2709 if (vi->oldsolution
2710 && bitmap_bit_p (vi->oldsolution, anything_id))
2711 continue;
2712 bitmap_copy (pts, get_varinfo (find (anything_id))->solution);
2714 else if (vi->oldsolution)
2715 bitmap_and_compl (pts, vi->solution, vi->oldsolution);
2716 else
2717 bitmap_copy (pts, vi->solution);
2719 if (bitmap_empty_p (pts))
2720 continue;
2722 if (vi->oldsolution)
2723 bitmap_ior_into (vi->oldsolution, pts);
2724 else
2726 vi->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
2727 bitmap_copy (vi->oldsolution, pts);
2730 solution = vi->solution;
2731 solution_empty = bitmap_empty_p (solution);
2733 /* Process the complex constraints */
2734 bitmap expanded_pts = NULL;
2735 FOR_EACH_VEC_ELT (complex, j, c)
2737 /* XXX: This is going to unsort the constraints in
2738 some cases, which will occasionally add duplicate
2739 constraints during unification. This does not
2740 affect correctness. */
2741 c->lhs.var = find (c->lhs.var);
2742 c->rhs.var = find (c->rhs.var);
2744 /* The only complex constraint that can change our
2745 solution to non-empty, given an empty solution,
2746 is a constraint where the lhs side is receiving
2747 some set from elsewhere. */
2748 if (!solution_empty || c->lhs.type != DEREF)
2749 do_complex_constraint (graph, c, pts, &expanded_pts);
2751 BITMAP_FREE (expanded_pts);
2753 solution_empty = bitmap_empty_p (solution);
2755 if (!solution_empty)
2757 bitmap_iterator bi;
2758 unsigned eff_escaped_id = find (escaped_id);
2760 /* Propagate solution to all successors. */
2761 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2762 0, j, bi)
2764 bitmap tmp;
2765 bool flag;
2767 unsigned int to = find (j);
2768 tmp = get_varinfo (to)->solution;
2769 flag = false;
2771 /* Don't try to propagate to ourselves. */
2772 if (to == i)
2773 continue;
2775 /* If we propagate from ESCAPED use ESCAPED as
2776 placeholder. */
2777 if (i == eff_escaped_id)
2778 flag = bitmap_set_bit (tmp, escaped_id);
2779 else
2780 flag = bitmap_ior_into (tmp, pts);
2782 if (flag)
2783 bitmap_set_bit (changed, to);
2788 free_topo_info (ti);
2789 bitmap_obstack_release (&iteration_obstack);
2792 BITMAP_FREE (pts);
2793 BITMAP_FREE (changed);
2794 bitmap_obstack_release (&oldpta_obstack);
2797 /* Map from trees to variable infos. */
2798 static hash_map<tree, varinfo_t> *vi_for_tree;
2801 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2803 static void
2804 insert_vi_for_tree (tree t, varinfo_t vi)
2806 gcc_assert (vi);
2807 gcc_assert (!vi_for_tree->put (t, vi));
2810 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2811 exist in the map, return NULL, otherwise, return the varinfo we found. */
2813 static varinfo_t
2814 lookup_vi_for_tree (tree t)
2816 varinfo_t *slot = vi_for_tree->get (t);
2817 if (slot == NULL)
2818 return NULL;
2820 return *slot;
2823 /* Return a printable name for DECL */
2825 static const char *
2826 alias_get_name (tree decl)
2828 const char *res = NULL;
2829 char *temp;
2831 if (!dump_file)
2832 return "NULL";
2834 if (TREE_CODE (decl) == SSA_NAME)
2836 res = get_name (decl);
2837 if (res)
2838 temp = xasprintf ("%s_%u", res, SSA_NAME_VERSION (decl));
2839 else
2840 temp = xasprintf ("_%u", SSA_NAME_VERSION (decl));
2841 res = ggc_strdup (temp);
2842 free (temp);
2844 else if (DECL_P (decl))
2846 if (DECL_ASSEMBLER_NAME_SET_P (decl))
2847 res = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
2848 else
2850 res = get_name (decl);
2851 if (!res)
2853 temp = xasprintf ("D.%u", DECL_UID (decl));
2854 res = ggc_strdup (temp);
2855 free (temp);
2859 if (res != NULL)
2860 return res;
2862 return "NULL";
2865 /* Find the variable id for tree T in the map.
2866 If T doesn't exist in the map, create an entry for it and return it. */
2868 static varinfo_t
2869 get_vi_for_tree (tree t)
2871 varinfo_t *slot = vi_for_tree->get (t);
2872 if (slot == NULL)
2874 unsigned int id = create_variable_info_for (t, alias_get_name (t), false);
2875 return get_varinfo (id);
2878 return *slot;
2881 /* Get a scalar constraint expression for a new temporary variable. */
2883 static struct constraint_expr
2884 new_scalar_tmp_constraint_exp (const char *name, bool add_id)
2886 struct constraint_expr tmp;
2887 varinfo_t vi;
2889 vi = new_var_info (NULL_TREE, name, add_id);
2890 vi->offset = 0;
2891 vi->size = -1;
2892 vi->fullsize = -1;
2893 vi->is_full_var = 1;
2895 tmp.var = vi->id;
2896 tmp.type = SCALAR;
2897 tmp.offset = 0;
2899 return tmp;
2902 /* Get a constraint expression vector from an SSA_VAR_P node.
2903 If address_p is true, the result will be taken its address of. */
2905 static void
2906 get_constraint_for_ssa_var (tree t, vec<ce_s> *results, bool address_p)
2908 struct constraint_expr cexpr;
2909 varinfo_t vi;
2911 /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
2912 gcc_assert (TREE_CODE (t) == SSA_NAME || DECL_P (t));
2914 /* For parameters, get at the points-to set for the actual parm
2915 decl. */
2916 if (TREE_CODE (t) == SSA_NAME
2917 && SSA_NAME_IS_DEFAULT_DEF (t)
2918 && (TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2919 || TREE_CODE (SSA_NAME_VAR (t)) == RESULT_DECL))
2921 get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
2922 return;
2925 /* For global variables resort to the alias target. */
2926 if (VAR_P (t) && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
2928 varpool_node *node = varpool_node::get (t);
2929 if (node && node->alias && node->analyzed)
2931 node = node->ultimate_alias_target ();
2932 /* Canonicalize the PT uid of all aliases to the ultimate target.
2933 ??? Hopefully the set of aliases can't change in a way that
2934 changes the ultimate alias target. */
2935 gcc_assert ((! DECL_PT_UID_SET_P (node->decl)
2936 || DECL_PT_UID (node->decl) == DECL_UID (node->decl))
2937 && (! DECL_PT_UID_SET_P (t)
2938 || DECL_PT_UID (t) == DECL_UID (node->decl)));
2939 DECL_PT_UID (t) = DECL_UID (node->decl);
2940 t = node->decl;
2943 /* If this is decl may bind to NULL note that. */
2944 if (address_p
2945 && (! node || ! node->nonzero_address ()))
2947 cexpr.var = nothing_id;
2948 cexpr.type = SCALAR;
2949 cexpr.offset = 0;
2950 results->safe_push (cexpr);
2954 vi = get_vi_for_tree (t);
2955 cexpr.var = vi->id;
2956 cexpr.type = SCALAR;
2957 cexpr.offset = 0;
2959 /* If we are not taking the address of the constraint expr, add all
2960 sub-fiels of the variable as well. */
2961 if (!address_p
2962 && !vi->is_full_var)
2964 for (; vi; vi = vi_next (vi))
2966 cexpr.var = vi->id;
2967 results->safe_push (cexpr);
2969 return;
2972 results->safe_push (cexpr);
2975 /* Process constraint T, performing various simplifications and then
2976 adding it to our list of overall constraints. */
2978 static void
2979 process_constraint (constraint_t t)
2981 struct constraint_expr rhs = t->rhs;
2982 struct constraint_expr lhs = t->lhs;
2984 gcc_assert (rhs.var < varmap.length ());
2985 gcc_assert (lhs.var < varmap.length ());
2987 /* If we didn't get any useful constraint from the lhs we get
2988 &ANYTHING as fallback from get_constraint_for. Deal with
2989 it here by turning it into *ANYTHING. */
2990 if (lhs.type == ADDRESSOF
2991 && lhs.var == anything_id)
2992 lhs.type = DEREF;
2994 /* ADDRESSOF on the lhs is invalid. */
2995 gcc_assert (lhs.type != ADDRESSOF);
2997 /* We shouldn't add constraints from things that cannot have pointers.
2998 It's not completely trivial to avoid in the callers, so do it here. */
2999 if (rhs.type != ADDRESSOF
3000 && !get_varinfo (rhs.var)->may_have_pointers)
3001 return;
3003 /* Likewise adding to the solution of a non-pointer var isn't useful. */
3004 if (!get_varinfo (lhs.var)->may_have_pointers)
3005 return;
3007 /* This can happen in our IR with things like n->a = *p */
3008 if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
3010 /* Split into tmp = *rhs, *lhs = tmp */
3011 struct constraint_expr tmplhs;
3012 tmplhs = new_scalar_tmp_constraint_exp ("doubledereftmp", true);
3013 process_constraint (new_constraint (tmplhs, rhs));
3014 process_constraint (new_constraint (lhs, tmplhs));
3016 else if ((rhs.type != SCALAR || rhs.offset != 0) && lhs.type == DEREF)
3018 /* Split into tmp = &rhs, *lhs = tmp */
3019 struct constraint_expr tmplhs;
3020 tmplhs = new_scalar_tmp_constraint_exp ("derefaddrtmp", true);
3021 process_constraint (new_constraint (tmplhs, rhs));
3022 process_constraint (new_constraint (lhs, tmplhs));
3024 else
3026 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
3027 constraints.safe_push (t);
3032 /* Return the position, in bits, of FIELD_DECL from the beginning of its
3033 structure. */
3035 static HOST_WIDE_INT
3036 bitpos_of_field (const tree fdecl)
3038 if (!tree_fits_shwi_p (DECL_FIELD_OFFSET (fdecl))
3039 || !tree_fits_shwi_p (DECL_FIELD_BIT_OFFSET (fdecl)))
3040 return -1;
3042 return (tree_to_shwi (DECL_FIELD_OFFSET (fdecl)) * BITS_PER_UNIT
3043 + tree_to_shwi (DECL_FIELD_BIT_OFFSET (fdecl)));
3047 /* Get constraint expressions for offsetting PTR by OFFSET. Stores the
3048 resulting constraint expressions in *RESULTS. */
3050 static void
3051 get_constraint_for_ptr_offset (tree ptr, tree offset,
3052 vec<ce_s> *results)
3054 struct constraint_expr c;
3055 unsigned int j, n;
3056 HOST_WIDE_INT rhsoffset;
3058 /* If we do not do field-sensitive PTA adding offsets to pointers
3059 does not change the points-to solution. */
3060 if (!use_field_sensitive)
3062 get_constraint_for_rhs (ptr, results);
3063 return;
3066 /* If the offset is not a non-negative integer constant that fits
3067 in a HOST_WIDE_INT, we have to fall back to a conservative
3068 solution which includes all sub-fields of all pointed-to
3069 variables of ptr. */
3070 if (offset == NULL_TREE
3071 || TREE_CODE (offset) != INTEGER_CST)
3072 rhsoffset = UNKNOWN_OFFSET;
3073 else
3075 /* Sign-extend the offset. */
3076 offset_int soffset = offset_int::from (offset, SIGNED);
3077 if (!wi::fits_shwi_p (soffset))
3078 rhsoffset = UNKNOWN_OFFSET;
3079 else
3081 /* Make sure the bit-offset also fits. */
3082 HOST_WIDE_INT rhsunitoffset = soffset.to_shwi ();
3083 rhsoffset = rhsunitoffset * BITS_PER_UNIT;
3084 if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
3085 rhsoffset = UNKNOWN_OFFSET;
3089 get_constraint_for_rhs (ptr, results);
3090 if (rhsoffset == 0)
3091 return;
3093 /* As we are eventually appending to the solution do not use
3094 vec::iterate here. */
3095 n = results->length ();
3096 for (j = 0; j < n; j++)
3098 varinfo_t curr;
3099 c = (*results)[j];
3100 curr = get_varinfo (c.var);
3102 if (c.type == ADDRESSOF
3103 /* If this varinfo represents a full variable just use it. */
3104 && curr->is_full_var)
3106 else if (c.type == ADDRESSOF
3107 /* If we do not know the offset add all subfields. */
3108 && rhsoffset == UNKNOWN_OFFSET)
3110 varinfo_t temp = get_varinfo (curr->head);
3113 struct constraint_expr c2;
3114 c2.var = temp->id;
3115 c2.type = ADDRESSOF;
3116 c2.offset = 0;
3117 if (c2.var != c.var)
3118 results->safe_push (c2);
3119 temp = vi_next (temp);
3121 while (temp);
3123 else if (c.type == ADDRESSOF)
3125 varinfo_t temp;
3126 unsigned HOST_WIDE_INT offset = curr->offset + rhsoffset;
3128 /* If curr->offset + rhsoffset is less than zero adjust it. */
3129 if (rhsoffset < 0
3130 && curr->offset < offset)
3131 offset = 0;
3133 /* We have to include all fields that overlap the current
3134 field shifted by rhsoffset. And we include at least
3135 the last or the first field of the variable to represent
3136 reachability of off-bound addresses, in particular &object + 1,
3137 conservatively correct. */
3138 temp = first_or_preceding_vi_for_offset (curr, offset);
3139 c.var = temp->id;
3140 c.offset = 0;
3141 temp = vi_next (temp);
3142 while (temp
3143 && temp->offset < offset + curr->size)
3145 struct constraint_expr c2;
3146 c2.var = temp->id;
3147 c2.type = ADDRESSOF;
3148 c2.offset = 0;
3149 results->safe_push (c2);
3150 temp = vi_next (temp);
3153 else if (c.type == SCALAR)
3155 gcc_assert (c.offset == 0);
3156 c.offset = rhsoffset;
3158 else
3159 /* We shouldn't get any DEREFs here. */
3160 gcc_unreachable ();
3162 (*results)[j] = c;
3167 /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
3168 If address_p is true the result will be taken its address of.
3169 If lhs_p is true then the constraint expression is assumed to be used
3170 as the lhs. */
3172 static void
3173 get_constraint_for_component_ref (tree t, vec<ce_s> *results,
3174 bool address_p, bool lhs_p)
3176 tree orig_t = t;
3177 HOST_WIDE_INT bitsize = -1;
3178 HOST_WIDE_INT bitmaxsize = -1;
3179 HOST_WIDE_INT bitpos;
3180 bool reverse;
3181 tree forzero;
3183 /* Some people like to do cute things like take the address of
3184 &0->a.b */
3185 forzero = t;
3186 while (handled_component_p (forzero)
3187 || INDIRECT_REF_P (forzero)
3188 || TREE_CODE (forzero) == MEM_REF)
3189 forzero = TREE_OPERAND (forzero, 0);
3191 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
3193 struct constraint_expr temp;
3195 temp.offset = 0;
3196 temp.var = integer_id;
3197 temp.type = SCALAR;
3198 results->safe_push (temp);
3199 return;
3202 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize, &reverse);
3204 /* We can end up here for component references on a
3205 VIEW_CONVERT_EXPR <>(&foobar) or things like a
3206 BIT_FIELD_REF <&MEM[(void *)&b + 4B], ...>. So for
3207 symbolic constants simply give up. */
3208 if (TREE_CODE (t) == ADDR_EXPR)
3210 constraint_expr result;
3211 result.type = SCALAR;
3212 result.var = anything_id;
3213 result.offset = 0;
3214 results->safe_push (result);
3215 return;
3218 /* Pretend to take the address of the base, we'll take care of
3219 adding the required subset of sub-fields below. */
3220 get_constraint_for_1 (t, results, true, lhs_p);
3221 /* Strip off nothing_id. */
3222 if (results->length () == 2)
3224 gcc_assert ((*results)[0].var == nothing_id);
3225 results->unordered_remove (0);
3227 gcc_assert (results->length () == 1);
3228 struct constraint_expr &result = results->last ();
3230 if (result.type == SCALAR
3231 && get_varinfo (result.var)->is_full_var)
3232 /* For single-field vars do not bother about the offset. */
3233 result.offset = 0;
3234 else if (result.type == SCALAR)
3236 /* In languages like C, you can access one past the end of an
3237 array. You aren't allowed to dereference it, so we can
3238 ignore this constraint. When we handle pointer subtraction,
3239 we may have to do something cute here. */
3241 if ((unsigned HOST_WIDE_INT)bitpos < get_varinfo (result.var)->fullsize
3242 && bitmaxsize != 0)
3244 /* It's also not true that the constraint will actually start at the
3245 right offset, it may start in some padding. We only care about
3246 setting the constraint to the first actual field it touches, so
3247 walk to find it. */
3248 struct constraint_expr cexpr = result;
3249 varinfo_t curr;
3250 results->pop ();
3251 cexpr.offset = 0;
3252 for (curr = get_varinfo (cexpr.var); curr; curr = vi_next (curr))
3254 if (ranges_overlap_p (curr->offset, curr->size,
3255 bitpos, bitmaxsize))
3257 cexpr.var = curr->id;
3258 results->safe_push (cexpr);
3259 if (address_p)
3260 break;
3263 /* If we are going to take the address of this field then
3264 to be able to compute reachability correctly add at least
3265 the last field of the variable. */
3266 if (address_p && results->length () == 0)
3268 curr = get_varinfo (cexpr.var);
3269 while (curr->next != 0)
3270 curr = vi_next (curr);
3271 cexpr.var = curr->id;
3272 results->safe_push (cexpr);
3274 else if (results->length () == 0)
3275 /* Assert that we found *some* field there. The user couldn't be
3276 accessing *only* padding. */
3277 /* Still the user could access one past the end of an array
3278 embedded in a struct resulting in accessing *only* padding. */
3279 /* Or accessing only padding via type-punning to a type
3280 that has a filed just in padding space. */
3282 cexpr.type = SCALAR;
3283 cexpr.var = anything_id;
3284 cexpr.offset = 0;
3285 results->safe_push (cexpr);
3288 else if (bitmaxsize == 0)
3290 if (dump_file && (dump_flags & TDF_DETAILS))
3291 fprintf (dump_file, "Access to zero-sized part of variable, "
3292 "ignoring\n");
3294 else
3295 if (dump_file && (dump_flags & TDF_DETAILS))
3296 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
3298 else if (result.type == DEREF)
3300 /* If we do not know exactly where the access goes say so. Note
3301 that only for non-structure accesses we know that we access
3302 at most one subfiled of any variable. */
3303 if (bitpos == -1
3304 || bitsize != bitmaxsize
3305 || AGGREGATE_TYPE_P (TREE_TYPE (orig_t))
3306 || result.offset == UNKNOWN_OFFSET)
3307 result.offset = UNKNOWN_OFFSET;
3308 else
3309 result.offset += bitpos;
3311 else if (result.type == ADDRESSOF)
3313 /* We can end up here for component references on constants like
3314 VIEW_CONVERT_EXPR <>({ 0, 1, 2, 3 })[i]. */
3315 result.type = SCALAR;
3316 result.var = anything_id;
3317 result.offset = 0;
3319 else
3320 gcc_unreachable ();
3324 /* Dereference the constraint expression CONS, and return the result.
3325 DEREF (ADDRESSOF) = SCALAR
3326 DEREF (SCALAR) = DEREF
3327 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
3328 This is needed so that we can handle dereferencing DEREF constraints. */
3330 static void
3331 do_deref (vec<ce_s> *constraints)
3333 struct constraint_expr *c;
3334 unsigned int i = 0;
3336 FOR_EACH_VEC_ELT (*constraints, i, c)
3338 if (c->type == SCALAR)
3339 c->type = DEREF;
3340 else if (c->type == ADDRESSOF)
3341 c->type = SCALAR;
3342 else if (c->type == DEREF)
3344 struct constraint_expr tmplhs;
3345 tmplhs = new_scalar_tmp_constraint_exp ("dereftmp", true);
3346 process_constraint (new_constraint (tmplhs, *c));
3347 c->var = tmplhs.var;
3349 else
3350 gcc_unreachable ();
3354 /* Given a tree T, return the constraint expression for taking the
3355 address of it. */
3357 static void
3358 get_constraint_for_address_of (tree t, vec<ce_s> *results)
3360 struct constraint_expr *c;
3361 unsigned int i;
3363 get_constraint_for_1 (t, results, true, true);
3365 FOR_EACH_VEC_ELT (*results, i, c)
3367 if (c->type == DEREF)
3368 c->type = SCALAR;
3369 else
3370 c->type = ADDRESSOF;
3374 /* Given a tree T, return the constraint expression for it. */
3376 static void
3377 get_constraint_for_1 (tree t, vec<ce_s> *results, bool address_p,
3378 bool lhs_p)
3380 struct constraint_expr temp;
3382 /* x = integer is all glommed to a single variable, which doesn't
3383 point to anything by itself. That is, of course, unless it is an
3384 integer constant being treated as a pointer, in which case, we
3385 will return that this is really the addressof anything. This
3386 happens below, since it will fall into the default case. The only
3387 case we know something about an integer treated like a pointer is
3388 when it is the NULL pointer, and then we just say it points to
3389 NULL.
3391 Do not do that if -fno-delete-null-pointer-checks though, because
3392 in that case *NULL does not fail, so it _should_ alias *anything.
3393 It is not worth adding a new option or renaming the existing one,
3394 since this case is relatively obscure. */
3395 if ((TREE_CODE (t) == INTEGER_CST
3396 && integer_zerop (t))
3397 /* The only valid CONSTRUCTORs in gimple with pointer typed
3398 elements are zero-initializer. But in IPA mode we also
3399 process global initializers, so verify at least. */
3400 || (TREE_CODE (t) == CONSTRUCTOR
3401 && CONSTRUCTOR_NELTS (t) == 0))
3403 if (flag_delete_null_pointer_checks)
3404 temp.var = nothing_id;
3405 else
3406 temp.var = nonlocal_id;
3407 temp.type = ADDRESSOF;
3408 temp.offset = 0;
3409 results->safe_push (temp);
3410 return;
3413 /* String constants are read-only, ideally we'd have a CONST_DECL
3414 for those. */
3415 if (TREE_CODE (t) == STRING_CST)
3417 temp.var = string_id;
3418 temp.type = SCALAR;
3419 temp.offset = 0;
3420 results->safe_push (temp);
3421 return;
3424 switch (TREE_CODE_CLASS (TREE_CODE (t)))
3426 case tcc_expression:
3428 switch (TREE_CODE (t))
3430 case ADDR_EXPR:
3431 get_constraint_for_address_of (TREE_OPERAND (t, 0), results);
3432 return;
3433 default:;
3435 break;
3437 case tcc_reference:
3439 switch (TREE_CODE (t))
3441 case MEM_REF:
3443 struct constraint_expr cs;
3444 varinfo_t vi, curr;
3445 get_constraint_for_ptr_offset (TREE_OPERAND (t, 0),
3446 TREE_OPERAND (t, 1), results);
3447 do_deref (results);
3449 /* If we are not taking the address then make sure to process
3450 all subvariables we might access. */
3451 if (address_p)
3452 return;
3454 cs = results->last ();
3455 if (cs.type == DEREF
3456 && type_can_have_subvars (TREE_TYPE (t)))
3458 /* For dereferences this means we have to defer it
3459 to solving time. */
3460 results->last ().offset = UNKNOWN_OFFSET;
3461 return;
3463 if (cs.type != SCALAR)
3464 return;
3466 vi = get_varinfo (cs.var);
3467 curr = vi_next (vi);
3468 if (!vi->is_full_var
3469 && curr)
3471 unsigned HOST_WIDE_INT size;
3472 if (tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (t))))
3473 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t)));
3474 else
3475 size = -1;
3476 for (; curr; curr = vi_next (curr))
3478 if (curr->offset - vi->offset < size)
3480 cs.var = curr->id;
3481 results->safe_push (cs);
3483 else
3484 break;
3487 return;
3489 case ARRAY_REF:
3490 case ARRAY_RANGE_REF:
3491 case COMPONENT_REF:
3492 case IMAGPART_EXPR:
3493 case REALPART_EXPR:
3494 case BIT_FIELD_REF:
3495 get_constraint_for_component_ref (t, results, address_p, lhs_p);
3496 return;
3497 case VIEW_CONVERT_EXPR:
3498 get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p,
3499 lhs_p);
3500 return;
3501 /* We are missing handling for TARGET_MEM_REF here. */
3502 default:;
3504 break;
3506 case tcc_exceptional:
3508 switch (TREE_CODE (t))
3510 case SSA_NAME:
3512 get_constraint_for_ssa_var (t, results, address_p);
3513 return;
3515 case CONSTRUCTOR:
3517 unsigned int i;
3518 tree val;
3519 auto_vec<ce_s> tmp;
3520 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
3522 struct constraint_expr *rhsp;
3523 unsigned j;
3524 get_constraint_for_1 (val, &tmp, address_p, lhs_p);
3525 FOR_EACH_VEC_ELT (tmp, j, rhsp)
3526 results->safe_push (*rhsp);
3527 tmp.truncate (0);
3529 /* We do not know whether the constructor was complete,
3530 so technically we have to add &NOTHING or &ANYTHING
3531 like we do for an empty constructor as well. */
3532 return;
3534 default:;
3536 break;
3538 case tcc_declaration:
3540 get_constraint_for_ssa_var (t, results, address_p);
3541 return;
3543 case tcc_constant:
3545 /* We cannot refer to automatic variables through constants. */
3546 temp.type = ADDRESSOF;
3547 temp.var = nonlocal_id;
3548 temp.offset = 0;
3549 results->safe_push (temp);
3550 return;
3552 default:;
3555 /* The default fallback is a constraint from anything. */
3556 temp.type = ADDRESSOF;
3557 temp.var = anything_id;
3558 temp.offset = 0;
3559 results->safe_push (temp);
3562 /* Given a gimple tree T, return the constraint expression vector for it. */
3564 static void
3565 get_constraint_for (tree t, vec<ce_s> *results)
3567 gcc_assert (results->length () == 0);
3569 get_constraint_for_1 (t, results, false, true);
3572 /* Given a gimple tree T, return the constraint expression vector for it
3573 to be used as the rhs of a constraint. */
3575 static void
3576 get_constraint_for_rhs (tree t, vec<ce_s> *results)
3578 gcc_assert (results->length () == 0);
3580 get_constraint_for_1 (t, results, false, false);
3584 /* Efficiently generates constraints from all entries in *RHSC to all
3585 entries in *LHSC. */
3587 static void
3588 process_all_all_constraints (vec<ce_s> lhsc,
3589 vec<ce_s> rhsc)
3591 struct constraint_expr *lhsp, *rhsp;
3592 unsigned i, j;
3594 if (lhsc.length () <= 1 || rhsc.length () <= 1)
3596 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3597 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
3598 process_constraint (new_constraint (*lhsp, *rhsp));
3600 else
3602 struct constraint_expr tmp;
3603 tmp = new_scalar_tmp_constraint_exp ("allalltmp", true);
3604 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
3605 process_constraint (new_constraint (tmp, *rhsp));
3606 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3607 process_constraint (new_constraint (*lhsp, tmp));
3611 /* Handle aggregate copies by expanding into copies of the respective
3612 fields of the structures. */
3614 static void
3615 do_structure_copy (tree lhsop, tree rhsop)
3617 struct constraint_expr *lhsp, *rhsp;
3618 auto_vec<ce_s> lhsc;
3619 auto_vec<ce_s> rhsc;
3620 unsigned j;
3622 get_constraint_for (lhsop, &lhsc);
3623 get_constraint_for_rhs (rhsop, &rhsc);
3624 lhsp = &lhsc[0];
3625 rhsp = &rhsc[0];
3626 if (lhsp->type == DEREF
3627 || (lhsp->type == ADDRESSOF && lhsp->var == anything_id)
3628 || rhsp->type == DEREF)
3630 if (lhsp->type == DEREF)
3632 gcc_assert (lhsc.length () == 1);
3633 lhsp->offset = UNKNOWN_OFFSET;
3635 if (rhsp->type == DEREF)
3637 gcc_assert (rhsc.length () == 1);
3638 rhsp->offset = UNKNOWN_OFFSET;
3640 process_all_all_constraints (lhsc, rhsc);
3642 else if (lhsp->type == SCALAR
3643 && (rhsp->type == SCALAR
3644 || rhsp->type == ADDRESSOF))
3646 HOST_WIDE_INT lhssize, lhsmaxsize, lhsoffset;
3647 HOST_WIDE_INT rhssize, rhsmaxsize, rhsoffset;
3648 bool reverse;
3649 unsigned k = 0;
3650 get_ref_base_and_extent (lhsop, &lhsoffset, &lhssize, &lhsmaxsize,
3651 &reverse);
3652 get_ref_base_and_extent (rhsop, &rhsoffset, &rhssize, &rhsmaxsize,
3653 &reverse);
3654 for (j = 0; lhsc.iterate (j, &lhsp);)
3656 varinfo_t lhsv, rhsv;
3657 rhsp = &rhsc[k];
3658 lhsv = get_varinfo (lhsp->var);
3659 rhsv = get_varinfo (rhsp->var);
3660 if (lhsv->may_have_pointers
3661 && (lhsv->is_full_var
3662 || rhsv->is_full_var
3663 || ranges_overlap_p (lhsv->offset + rhsoffset, lhsv->size,
3664 rhsv->offset + lhsoffset, rhsv->size)))
3665 process_constraint (new_constraint (*lhsp, *rhsp));
3666 if (!rhsv->is_full_var
3667 && (lhsv->is_full_var
3668 || (lhsv->offset + rhsoffset + lhsv->size
3669 > rhsv->offset + lhsoffset + rhsv->size)))
3671 ++k;
3672 if (k >= rhsc.length ())
3673 break;
3675 else
3676 ++j;
3679 else
3680 gcc_unreachable ();
3683 /* Create constraints ID = { rhsc }. */
3685 static void
3686 make_constraints_to (unsigned id, vec<ce_s> rhsc)
3688 struct constraint_expr *c;
3689 struct constraint_expr includes;
3690 unsigned int j;
3692 includes.var = id;
3693 includes.offset = 0;
3694 includes.type = SCALAR;
3696 FOR_EACH_VEC_ELT (rhsc, j, c)
3697 process_constraint (new_constraint (includes, *c));
3700 /* Create a constraint ID = OP. */
3702 static void
3703 make_constraint_to (unsigned id, tree op)
3705 auto_vec<ce_s> rhsc;
3706 get_constraint_for_rhs (op, &rhsc);
3707 make_constraints_to (id, rhsc);
3710 /* Create a constraint ID = &FROM. */
3712 static void
3713 make_constraint_from (varinfo_t vi, int from)
3715 struct constraint_expr lhs, rhs;
3717 lhs.var = vi->id;
3718 lhs.offset = 0;
3719 lhs.type = SCALAR;
3721 rhs.var = from;
3722 rhs.offset = 0;
3723 rhs.type = ADDRESSOF;
3724 process_constraint (new_constraint (lhs, rhs));
3727 /* Create a constraint ID = FROM. */
3729 static void
3730 make_copy_constraint (varinfo_t vi, int from)
3732 struct constraint_expr lhs, rhs;
3734 lhs.var = vi->id;
3735 lhs.offset = 0;
3736 lhs.type = SCALAR;
3738 rhs.var = from;
3739 rhs.offset = 0;
3740 rhs.type = SCALAR;
3741 process_constraint (new_constraint (lhs, rhs));
3744 /* Make constraints necessary to make OP escape. */
3746 static void
3747 make_escape_constraint (tree op)
3749 make_constraint_to (escaped_id, op);
3752 /* Add constraints to that the solution of VI is transitively closed. */
3754 static void
3755 make_transitive_closure_constraints (varinfo_t vi)
3757 struct constraint_expr lhs, rhs;
3759 /* VAR = *(VAR + UNKNOWN); */
3760 lhs.type = SCALAR;
3761 lhs.var = vi->id;
3762 lhs.offset = 0;
3763 rhs.type = DEREF;
3764 rhs.var = vi->id;
3765 rhs.offset = UNKNOWN_OFFSET;
3766 process_constraint (new_constraint (lhs, rhs));
3769 /* Add constraints to that the solution of VI has all subvariables added. */
3771 static void
3772 make_any_offset_constraints (varinfo_t vi)
3774 struct constraint_expr lhs, rhs;
3776 /* VAR = VAR + UNKNOWN; */
3777 lhs.type = SCALAR;
3778 lhs.var = vi->id;
3779 lhs.offset = 0;
3780 rhs.type = SCALAR;
3781 rhs.var = vi->id;
3782 rhs.offset = UNKNOWN_OFFSET;
3783 process_constraint (new_constraint (lhs, rhs));
3786 /* Temporary storage for fake var decls. */
3787 struct obstack fake_var_decl_obstack;
3789 /* Build a fake VAR_DECL acting as referrer to a DECL_UID. */
3791 static tree
3792 build_fake_var_decl (tree type)
3794 tree decl = (tree) XOBNEW (&fake_var_decl_obstack, struct tree_var_decl);
3795 memset (decl, 0, sizeof (struct tree_var_decl));
3796 TREE_SET_CODE (decl, VAR_DECL);
3797 TREE_TYPE (decl) = type;
3798 DECL_UID (decl) = allocate_decl_uid ();
3799 SET_DECL_PT_UID (decl, -1);
3800 layout_decl (decl, 0);
3801 return decl;
3804 /* Create a new artificial heap variable with NAME.
3805 Return the created variable. */
3807 static varinfo_t
3808 make_heapvar (const char *name, bool add_id)
3810 varinfo_t vi;
3811 tree heapvar;
3813 heapvar = build_fake_var_decl (ptr_type_node);
3814 DECL_EXTERNAL (heapvar) = 1;
3816 vi = new_var_info (heapvar, name, add_id);
3817 vi->is_artificial_var = true;
3818 vi->is_heap_var = true;
3819 vi->is_unknown_size_var = true;
3820 vi->offset = 0;
3821 vi->fullsize = ~0;
3822 vi->size = ~0;
3823 vi->is_full_var = true;
3824 insert_vi_for_tree (heapvar, vi);
3826 return vi;
3829 /* Create a new artificial heap variable with NAME and make a
3830 constraint from it to LHS. Set flags according to a tag used
3831 for tracking restrict pointers. */
3833 static varinfo_t
3834 make_constraint_from_restrict (varinfo_t lhs, const char *name, bool add_id)
3836 varinfo_t vi = make_heapvar (name, add_id);
3837 vi->is_restrict_var = 1;
3838 vi->is_global_var = 1;
3839 vi->may_have_pointers = 1;
3840 make_constraint_from (lhs, vi->id);
3841 return vi;
3844 /* Create a new artificial heap variable with NAME and make a
3845 constraint from it to LHS. Set flags according to a tag used
3846 for tracking restrict pointers and make the artificial heap
3847 point to global memory. */
3849 static varinfo_t
3850 make_constraint_from_global_restrict (varinfo_t lhs, const char *name,
3851 bool add_id)
3853 varinfo_t vi = make_constraint_from_restrict (lhs, name, add_id);
3854 make_copy_constraint (vi, nonlocal_id);
3855 return vi;
3858 /* In IPA mode there are varinfos for different aspects of reach
3859 function designator. One for the points-to set of the return
3860 value, one for the variables that are clobbered by the function,
3861 one for its uses and one for each parameter (including a single
3862 glob for remaining variadic arguments). */
3864 enum { fi_clobbers = 1, fi_uses = 2,
3865 fi_static_chain = 3, fi_result = 4, fi_parm_base = 5 };
3867 /* Get a constraint for the requested part of a function designator FI
3868 when operating in IPA mode. */
3870 static struct constraint_expr
3871 get_function_part_constraint (varinfo_t fi, unsigned part)
3873 struct constraint_expr c;
3875 gcc_assert (in_ipa_mode);
3877 if (fi->id == anything_id)
3879 /* ??? We probably should have a ANYFN special variable. */
3880 c.var = anything_id;
3881 c.offset = 0;
3882 c.type = SCALAR;
3884 else if (TREE_CODE (fi->decl) == FUNCTION_DECL)
3886 varinfo_t ai = first_vi_for_offset (fi, part);
3887 if (ai)
3888 c.var = ai->id;
3889 else
3890 c.var = anything_id;
3891 c.offset = 0;
3892 c.type = SCALAR;
3894 else
3896 c.var = fi->id;
3897 c.offset = part;
3898 c.type = DEREF;
3901 return c;
3904 /* For non-IPA mode, generate constraints necessary for a call on the
3905 RHS. */
3907 static void
3908 handle_rhs_call (gcall *stmt, vec<ce_s> *results)
3910 struct constraint_expr rhsc;
3911 unsigned i;
3912 bool returns_uses = false;
3914 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3916 tree arg = gimple_call_arg (stmt, i);
3917 int flags = gimple_call_arg_flags (stmt, i);
3919 /* If the argument is not used we can ignore it. */
3920 if (flags & EAF_UNUSED)
3921 continue;
3923 /* As we compute ESCAPED context-insensitive we do not gain
3924 any precision with just EAF_NOCLOBBER but not EAF_NOESCAPE
3925 set. The argument would still get clobbered through the
3926 escape solution. */
3927 if ((flags & EAF_NOCLOBBER)
3928 && (flags & EAF_NOESCAPE))
3930 varinfo_t uses = get_call_use_vi (stmt);
3931 varinfo_t tem = new_var_info (NULL_TREE, "callarg", true);
3932 make_constraint_to (tem->id, arg);
3933 make_any_offset_constraints (tem);
3934 if (!(flags & EAF_DIRECT))
3935 make_transitive_closure_constraints (tem);
3936 make_copy_constraint (uses, tem->id);
3937 returns_uses = true;
3939 else if (flags & EAF_NOESCAPE)
3941 struct constraint_expr lhs, rhs;
3942 varinfo_t uses = get_call_use_vi (stmt);
3943 varinfo_t clobbers = get_call_clobber_vi (stmt);
3944 varinfo_t tem = new_var_info (NULL_TREE, "callarg", true);
3945 make_constraint_to (tem->id, arg);
3946 make_any_offset_constraints (tem);
3947 if (!(flags & EAF_DIRECT))
3948 make_transitive_closure_constraints (tem);
3949 make_copy_constraint (uses, tem->id);
3950 make_copy_constraint (clobbers, tem->id);
3951 /* Add *tem = nonlocal, do not add *tem = callused as
3952 EAF_NOESCAPE parameters do not escape to other parameters
3953 and all other uses appear in NONLOCAL as well. */
3954 lhs.type = DEREF;
3955 lhs.var = tem->id;
3956 lhs.offset = 0;
3957 rhs.type = SCALAR;
3958 rhs.var = nonlocal_id;
3959 rhs.offset = 0;
3960 process_constraint (new_constraint (lhs, rhs));
3961 returns_uses = true;
3963 else
3964 make_escape_constraint (arg);
3967 /* If we added to the calls uses solution make sure we account for
3968 pointers to it to be returned. */
3969 if (returns_uses)
3971 rhsc.var = get_call_use_vi (stmt)->id;
3972 rhsc.offset = UNKNOWN_OFFSET;
3973 rhsc.type = SCALAR;
3974 results->safe_push (rhsc);
3977 /* The static chain escapes as well. */
3978 if (gimple_call_chain (stmt))
3979 make_escape_constraint (gimple_call_chain (stmt));
3981 /* And if we applied NRV the address of the return slot escapes as well. */
3982 if (gimple_call_return_slot_opt_p (stmt)
3983 && gimple_call_lhs (stmt) != NULL_TREE
3984 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
3986 auto_vec<ce_s> tmpc;
3987 struct constraint_expr lhsc, *c;
3988 get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
3989 lhsc.var = escaped_id;
3990 lhsc.offset = 0;
3991 lhsc.type = SCALAR;
3992 FOR_EACH_VEC_ELT (tmpc, i, c)
3993 process_constraint (new_constraint (lhsc, *c));
3996 /* Regular functions return nonlocal memory. */
3997 rhsc.var = nonlocal_id;
3998 rhsc.offset = 0;
3999 rhsc.type = SCALAR;
4000 results->safe_push (rhsc);
4003 /* For non-IPA mode, generate constraints necessary for a call
4004 that returns a pointer and assigns it to LHS. This simply makes
4005 the LHS point to global and escaped variables. */
4007 static void
4008 handle_lhs_call (gcall *stmt, tree lhs, int flags, vec<ce_s> rhsc,
4009 tree fndecl)
4011 auto_vec<ce_s> lhsc;
4013 get_constraint_for (lhs, &lhsc);
4014 /* If the store is to a global decl make sure to
4015 add proper escape constraints. */
4016 lhs = get_base_address (lhs);
4017 if (lhs
4018 && DECL_P (lhs)
4019 && is_global_var (lhs))
4021 struct constraint_expr tmpc;
4022 tmpc.var = escaped_id;
4023 tmpc.offset = 0;
4024 tmpc.type = SCALAR;
4025 lhsc.safe_push (tmpc);
4028 /* If the call returns an argument unmodified override the rhs
4029 constraints. */
4030 if (flags & ERF_RETURNS_ARG
4031 && (flags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (stmt))
4033 tree arg;
4034 rhsc.create (0);
4035 arg = gimple_call_arg (stmt, flags & ERF_RETURN_ARG_MASK);
4036 get_constraint_for (arg, &rhsc);
4037 process_all_all_constraints (lhsc, rhsc);
4038 rhsc.release ();
4040 else if (flags & ERF_NOALIAS)
4042 varinfo_t vi;
4043 struct constraint_expr tmpc;
4044 rhsc.create (0);
4045 vi = make_heapvar ("HEAP", true);
4046 /* We are marking allocated storage local, we deal with it becoming
4047 global by escaping and setting of vars_contains_escaped_heap. */
4048 DECL_EXTERNAL (vi->decl) = 0;
4049 vi->is_global_var = 0;
4050 /* If this is not a real malloc call assume the memory was
4051 initialized and thus may point to global memory. All
4052 builtin functions with the malloc attribute behave in a sane way. */
4053 if (!fndecl
4054 || DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL)
4055 make_constraint_from (vi, nonlocal_id);
4056 tmpc.var = vi->id;
4057 tmpc.offset = 0;
4058 tmpc.type = ADDRESSOF;
4059 rhsc.safe_push (tmpc);
4060 process_all_all_constraints (lhsc, rhsc);
4061 rhsc.release ();
4063 else
4064 process_all_all_constraints (lhsc, rhsc);
4067 /* For non-IPA mode, generate constraints necessary for a call of a
4068 const function that returns a pointer in the statement STMT. */
4070 static void
4071 handle_const_call (gcall *stmt, vec<ce_s> *results)
4073 struct constraint_expr rhsc;
4074 unsigned int k;
4075 bool need_uses = false;
4077 /* Treat nested const functions the same as pure functions as far
4078 as the static chain is concerned. */
4079 if (gimple_call_chain (stmt))
4081 varinfo_t uses = get_call_use_vi (stmt);
4082 make_constraint_to (uses->id, gimple_call_chain (stmt));
4083 need_uses = true;
4086 /* And if we applied NRV the address of the return slot escapes as well. */
4087 if (gimple_call_return_slot_opt_p (stmt)
4088 && gimple_call_lhs (stmt) != NULL_TREE
4089 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
4091 varinfo_t uses = get_call_use_vi (stmt);
4092 auto_vec<ce_s> tmpc;
4093 get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
4094 make_constraints_to (uses->id, tmpc);
4095 need_uses = true;
4098 if (need_uses)
4100 varinfo_t uses = get_call_use_vi (stmt);
4101 make_any_offset_constraints (uses);
4102 make_transitive_closure_constraints (uses);
4103 rhsc.var = uses->id;
4104 rhsc.offset = 0;
4105 rhsc.type = SCALAR;
4106 results->safe_push (rhsc);
4109 /* May return offsetted arguments. */
4110 varinfo_t tem = NULL;
4111 if (gimple_call_num_args (stmt) != 0)
4112 tem = new_var_info (NULL_TREE, "callarg", true);
4113 for (k = 0; k < gimple_call_num_args (stmt); ++k)
4115 tree arg = gimple_call_arg (stmt, k);
4116 auto_vec<ce_s> argc;
4117 get_constraint_for_rhs (arg, &argc);
4118 make_constraints_to (tem->id, argc);
4120 if (tem)
4122 ce_s ce;
4123 ce.type = SCALAR;
4124 ce.var = tem->id;
4125 ce.offset = UNKNOWN_OFFSET;
4126 results->safe_push (ce);
4129 /* May return addresses of globals. */
4130 rhsc.var = nonlocal_id;
4131 rhsc.offset = 0;
4132 rhsc.type = ADDRESSOF;
4133 results->safe_push (rhsc);
4136 /* For non-IPA mode, generate constraints necessary for a call to a
4137 pure function in statement STMT. */
4139 static void
4140 handle_pure_call (gcall *stmt, vec<ce_s> *results)
4142 struct constraint_expr rhsc;
4143 unsigned i;
4144 varinfo_t uses = NULL;
4146 /* Memory reached from pointer arguments is call-used. */
4147 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4149 tree arg = gimple_call_arg (stmt, i);
4150 if (!uses)
4152 uses = get_call_use_vi (stmt);
4153 make_any_offset_constraints (uses);
4154 make_transitive_closure_constraints (uses);
4156 make_constraint_to (uses->id, arg);
4159 /* The static chain is used as well. */
4160 if (gimple_call_chain (stmt))
4162 if (!uses)
4164 uses = get_call_use_vi (stmt);
4165 make_any_offset_constraints (uses);
4166 make_transitive_closure_constraints (uses);
4168 make_constraint_to (uses->id, gimple_call_chain (stmt));
4171 /* And if we applied NRV the address of the return slot. */
4172 if (gimple_call_return_slot_opt_p (stmt)
4173 && gimple_call_lhs (stmt) != NULL_TREE
4174 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
4176 if (!uses)
4178 uses = get_call_use_vi (stmt);
4179 make_any_offset_constraints (uses);
4180 make_transitive_closure_constraints (uses);
4182 auto_vec<ce_s> tmpc;
4183 get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
4184 make_constraints_to (uses->id, tmpc);
4187 /* Pure functions may return call-used and nonlocal memory. */
4188 if (uses)
4190 rhsc.var = uses->id;
4191 rhsc.offset = 0;
4192 rhsc.type = SCALAR;
4193 results->safe_push (rhsc);
4195 rhsc.var = nonlocal_id;
4196 rhsc.offset = 0;
4197 rhsc.type = SCALAR;
4198 results->safe_push (rhsc);
4202 /* Return the varinfo for the callee of CALL. */
4204 static varinfo_t
4205 get_fi_for_callee (gcall *call)
4207 tree decl, fn = gimple_call_fn (call);
4209 if (fn && TREE_CODE (fn) == OBJ_TYPE_REF)
4210 fn = OBJ_TYPE_REF_EXPR (fn);
4212 /* If we can directly resolve the function being called, do so.
4213 Otherwise, it must be some sort of indirect expression that
4214 we should still be able to handle. */
4215 decl = gimple_call_addr_fndecl (fn);
4216 if (decl)
4217 return get_vi_for_tree (decl);
4219 /* If the function is anything other than a SSA name pointer we have no
4220 clue and should be getting ANYFN (well, ANYTHING for now). */
4221 if (!fn || TREE_CODE (fn) != SSA_NAME)
4222 return get_varinfo (anything_id);
4224 if (SSA_NAME_IS_DEFAULT_DEF (fn)
4225 && (TREE_CODE (SSA_NAME_VAR (fn)) == PARM_DECL
4226 || TREE_CODE (SSA_NAME_VAR (fn)) == RESULT_DECL))
4227 fn = SSA_NAME_VAR (fn);
4229 return get_vi_for_tree (fn);
4232 /* Create constraints for assigning call argument ARG to the incoming parameter
4233 INDEX of function FI. */
4235 static void
4236 find_func_aliases_for_call_arg (varinfo_t fi, unsigned index, tree arg)
4238 struct constraint_expr lhs;
4239 lhs = get_function_part_constraint (fi, fi_parm_base + index);
4241 auto_vec<ce_s, 2> rhsc;
4242 get_constraint_for_rhs (arg, &rhsc);
4244 unsigned j;
4245 struct constraint_expr *rhsp;
4246 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4247 process_constraint (new_constraint (lhs, *rhsp));
4250 /* Return true if FNDECL may be part of another lto partition. */
4252 static bool
4253 fndecl_maybe_in_other_partition (tree fndecl)
4255 cgraph_node *fn_node = cgraph_node::get (fndecl);
4256 if (fn_node == NULL)
4257 return true;
4259 return fn_node->in_other_partition;
4262 /* Create constraints for the builtin call T. Return true if the call
4263 was handled, otherwise false. */
4265 static bool
4266 find_func_aliases_for_builtin_call (struct function *fn, gcall *t)
4268 tree fndecl = gimple_call_fndecl (t);
4269 auto_vec<ce_s, 2> lhsc;
4270 auto_vec<ce_s, 4> rhsc;
4271 varinfo_t fi;
4273 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
4274 /* ??? All builtins that are handled here need to be handled
4275 in the alias-oracle query functions explicitly! */
4276 switch (DECL_FUNCTION_CODE (fndecl))
4278 /* All the following functions return a pointer to the same object
4279 as their first argument points to. The functions do not add
4280 to the ESCAPED solution. The functions make the first argument
4281 pointed to memory point to what the second argument pointed to
4282 memory points to. */
4283 case BUILT_IN_STRCPY:
4284 case BUILT_IN_STRNCPY:
4285 case BUILT_IN_BCOPY:
4286 case BUILT_IN_MEMCPY:
4287 case BUILT_IN_MEMMOVE:
4288 case BUILT_IN_MEMPCPY:
4289 case BUILT_IN_STPCPY:
4290 case BUILT_IN_STPNCPY:
4291 case BUILT_IN_STRCAT:
4292 case BUILT_IN_STRNCAT:
4293 case BUILT_IN_STRCPY_CHK:
4294 case BUILT_IN_STRNCPY_CHK:
4295 case BUILT_IN_MEMCPY_CHK:
4296 case BUILT_IN_MEMMOVE_CHK:
4297 case BUILT_IN_MEMPCPY_CHK:
4298 case BUILT_IN_STPCPY_CHK:
4299 case BUILT_IN_STPNCPY_CHK:
4300 case BUILT_IN_STRCAT_CHK:
4301 case BUILT_IN_STRNCAT_CHK:
4302 case BUILT_IN_TM_MEMCPY:
4303 case BUILT_IN_TM_MEMMOVE:
4305 tree res = gimple_call_lhs (t);
4306 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4307 == BUILT_IN_BCOPY ? 1 : 0));
4308 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4309 == BUILT_IN_BCOPY ? 0 : 1));
4310 if (res != NULL_TREE)
4312 get_constraint_for (res, &lhsc);
4313 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY
4314 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY
4315 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY
4316 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY_CHK
4317 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY_CHK
4318 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY_CHK)
4319 get_constraint_for_ptr_offset (dest, NULL_TREE, &rhsc);
4320 else
4321 get_constraint_for (dest, &rhsc);
4322 process_all_all_constraints (lhsc, rhsc);
4323 lhsc.truncate (0);
4324 rhsc.truncate (0);
4326 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4327 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4328 do_deref (&lhsc);
4329 do_deref (&rhsc);
4330 process_all_all_constraints (lhsc, rhsc);
4331 return true;
4333 case BUILT_IN_MEMSET:
4334 case BUILT_IN_MEMSET_CHK:
4335 case BUILT_IN_TM_MEMSET:
4337 tree res = gimple_call_lhs (t);
4338 tree dest = gimple_call_arg (t, 0);
4339 unsigned i;
4340 ce_s *lhsp;
4341 struct constraint_expr ac;
4342 if (res != NULL_TREE)
4344 get_constraint_for (res, &lhsc);
4345 get_constraint_for (dest, &rhsc);
4346 process_all_all_constraints (lhsc, rhsc);
4347 lhsc.truncate (0);
4349 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4350 do_deref (&lhsc);
4351 if (flag_delete_null_pointer_checks
4352 && integer_zerop (gimple_call_arg (t, 1)))
4354 ac.type = ADDRESSOF;
4355 ac.var = nothing_id;
4357 else
4359 ac.type = SCALAR;
4360 ac.var = integer_id;
4362 ac.offset = 0;
4363 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4364 process_constraint (new_constraint (*lhsp, ac));
4365 return true;
4367 case BUILT_IN_POSIX_MEMALIGN:
4369 tree ptrptr = gimple_call_arg (t, 0);
4370 get_constraint_for (ptrptr, &lhsc);
4371 do_deref (&lhsc);
4372 varinfo_t vi = make_heapvar ("HEAP", true);
4373 /* We are marking allocated storage local, we deal with it becoming
4374 global by escaping and setting of vars_contains_escaped_heap. */
4375 DECL_EXTERNAL (vi->decl) = 0;
4376 vi->is_global_var = 0;
4377 struct constraint_expr tmpc;
4378 tmpc.var = vi->id;
4379 tmpc.offset = 0;
4380 tmpc.type = ADDRESSOF;
4381 rhsc.safe_push (tmpc);
4382 process_all_all_constraints (lhsc, rhsc);
4383 return true;
4385 case BUILT_IN_ASSUME_ALIGNED:
4387 tree res = gimple_call_lhs (t);
4388 tree dest = gimple_call_arg (t, 0);
4389 if (res != NULL_TREE)
4391 get_constraint_for (res, &lhsc);
4392 get_constraint_for (dest, &rhsc);
4393 process_all_all_constraints (lhsc, rhsc);
4395 return true;
4397 /* All the following functions do not return pointers, do not
4398 modify the points-to sets of memory reachable from their
4399 arguments and do not add to the ESCAPED solution. */
4400 case BUILT_IN_SINCOS:
4401 case BUILT_IN_SINCOSF:
4402 case BUILT_IN_SINCOSL:
4403 case BUILT_IN_FREXP:
4404 case BUILT_IN_FREXPF:
4405 case BUILT_IN_FREXPL:
4406 case BUILT_IN_GAMMA_R:
4407 case BUILT_IN_GAMMAF_R:
4408 case BUILT_IN_GAMMAL_R:
4409 case BUILT_IN_LGAMMA_R:
4410 case BUILT_IN_LGAMMAF_R:
4411 case BUILT_IN_LGAMMAL_R:
4412 case BUILT_IN_MODF:
4413 case BUILT_IN_MODFF:
4414 case BUILT_IN_MODFL:
4415 case BUILT_IN_REMQUO:
4416 case BUILT_IN_REMQUOF:
4417 case BUILT_IN_REMQUOL:
4418 case BUILT_IN_FREE:
4419 return true;
4420 case BUILT_IN_STRDUP:
4421 case BUILT_IN_STRNDUP:
4422 case BUILT_IN_REALLOC:
4423 if (gimple_call_lhs (t))
4425 handle_lhs_call (t, gimple_call_lhs (t),
4426 gimple_call_return_flags (t) | ERF_NOALIAS,
4427 vNULL, fndecl);
4428 get_constraint_for_ptr_offset (gimple_call_lhs (t),
4429 NULL_TREE, &lhsc);
4430 get_constraint_for_ptr_offset (gimple_call_arg (t, 0),
4431 NULL_TREE, &rhsc);
4432 do_deref (&lhsc);
4433 do_deref (&rhsc);
4434 process_all_all_constraints (lhsc, rhsc);
4435 lhsc.truncate (0);
4436 rhsc.truncate (0);
4437 /* For realloc the resulting pointer can be equal to the
4438 argument as well. But only doing this wouldn't be
4439 correct because with ptr == 0 realloc behaves like malloc. */
4440 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_REALLOC)
4442 get_constraint_for (gimple_call_lhs (t), &lhsc);
4443 get_constraint_for (gimple_call_arg (t, 0), &rhsc);
4444 process_all_all_constraints (lhsc, rhsc);
4446 return true;
4448 break;
4449 /* String / character search functions return a pointer into the
4450 source string or NULL. */
4451 case BUILT_IN_INDEX:
4452 case BUILT_IN_STRCHR:
4453 case BUILT_IN_STRRCHR:
4454 case BUILT_IN_MEMCHR:
4455 case BUILT_IN_STRSTR:
4456 case BUILT_IN_STRPBRK:
4457 if (gimple_call_lhs (t))
4459 tree src = gimple_call_arg (t, 0);
4460 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4461 constraint_expr nul;
4462 nul.var = nothing_id;
4463 nul.offset = 0;
4464 nul.type = ADDRESSOF;
4465 rhsc.safe_push (nul);
4466 get_constraint_for (gimple_call_lhs (t), &lhsc);
4467 process_all_all_constraints (lhsc, rhsc);
4469 return true;
4470 /* Trampolines are special - they set up passing the static
4471 frame. */
4472 case BUILT_IN_INIT_TRAMPOLINE:
4474 tree tramp = gimple_call_arg (t, 0);
4475 tree nfunc = gimple_call_arg (t, 1);
4476 tree frame = gimple_call_arg (t, 2);
4477 unsigned i;
4478 struct constraint_expr lhs, *rhsp;
4479 if (in_ipa_mode)
4481 varinfo_t nfi = NULL;
4482 gcc_assert (TREE_CODE (nfunc) == ADDR_EXPR);
4483 nfi = lookup_vi_for_tree (TREE_OPERAND (nfunc, 0));
4484 if (nfi)
4486 lhs = get_function_part_constraint (nfi, fi_static_chain);
4487 get_constraint_for (frame, &rhsc);
4488 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4489 process_constraint (new_constraint (lhs, *rhsp));
4490 rhsc.truncate (0);
4492 /* Make the frame point to the function for
4493 the trampoline adjustment call. */
4494 get_constraint_for (tramp, &lhsc);
4495 do_deref (&lhsc);
4496 get_constraint_for (nfunc, &rhsc);
4497 process_all_all_constraints (lhsc, rhsc);
4499 return true;
4502 /* Else fallthru to generic handling which will let
4503 the frame escape. */
4504 break;
4506 case BUILT_IN_ADJUST_TRAMPOLINE:
4508 tree tramp = gimple_call_arg (t, 0);
4509 tree res = gimple_call_lhs (t);
4510 if (in_ipa_mode && res)
4512 get_constraint_for (res, &lhsc);
4513 get_constraint_for (tramp, &rhsc);
4514 do_deref (&rhsc);
4515 process_all_all_constraints (lhsc, rhsc);
4517 return true;
4519 CASE_BUILT_IN_TM_STORE (1):
4520 CASE_BUILT_IN_TM_STORE (2):
4521 CASE_BUILT_IN_TM_STORE (4):
4522 CASE_BUILT_IN_TM_STORE (8):
4523 CASE_BUILT_IN_TM_STORE (FLOAT):
4524 CASE_BUILT_IN_TM_STORE (DOUBLE):
4525 CASE_BUILT_IN_TM_STORE (LDOUBLE):
4526 CASE_BUILT_IN_TM_STORE (M64):
4527 CASE_BUILT_IN_TM_STORE (M128):
4528 CASE_BUILT_IN_TM_STORE (M256):
4530 tree addr = gimple_call_arg (t, 0);
4531 tree src = gimple_call_arg (t, 1);
4533 get_constraint_for (addr, &lhsc);
4534 do_deref (&lhsc);
4535 get_constraint_for (src, &rhsc);
4536 process_all_all_constraints (lhsc, rhsc);
4537 return true;
4539 CASE_BUILT_IN_TM_LOAD (1):
4540 CASE_BUILT_IN_TM_LOAD (2):
4541 CASE_BUILT_IN_TM_LOAD (4):
4542 CASE_BUILT_IN_TM_LOAD (8):
4543 CASE_BUILT_IN_TM_LOAD (FLOAT):
4544 CASE_BUILT_IN_TM_LOAD (DOUBLE):
4545 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
4546 CASE_BUILT_IN_TM_LOAD (M64):
4547 CASE_BUILT_IN_TM_LOAD (M128):
4548 CASE_BUILT_IN_TM_LOAD (M256):
4550 tree dest = gimple_call_lhs (t);
4551 tree addr = gimple_call_arg (t, 0);
4553 get_constraint_for (dest, &lhsc);
4554 get_constraint_for (addr, &rhsc);
4555 do_deref (&rhsc);
4556 process_all_all_constraints (lhsc, rhsc);
4557 return true;
4559 /* Variadic argument handling needs to be handled in IPA
4560 mode as well. */
4561 case BUILT_IN_VA_START:
4563 tree valist = gimple_call_arg (t, 0);
4564 struct constraint_expr rhs, *lhsp;
4565 unsigned i;
4566 get_constraint_for_ptr_offset (valist, NULL_TREE, &lhsc);
4567 do_deref (&lhsc);
4568 /* The va_list gets access to pointers in variadic
4569 arguments. Which we know in the case of IPA analysis
4570 and otherwise are just all nonlocal variables. */
4571 if (in_ipa_mode)
4573 fi = lookup_vi_for_tree (fn->decl);
4574 rhs = get_function_part_constraint (fi, ~0);
4575 rhs.type = ADDRESSOF;
4577 else
4579 rhs.var = nonlocal_id;
4580 rhs.type = ADDRESSOF;
4581 rhs.offset = 0;
4583 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4584 process_constraint (new_constraint (*lhsp, rhs));
4585 /* va_list is clobbered. */
4586 make_constraint_to (get_call_clobber_vi (t)->id, valist);
4587 return true;
4589 /* va_end doesn't have any effect that matters. */
4590 case BUILT_IN_VA_END:
4591 return true;
4592 /* Alternate return. Simply give up for now. */
4593 case BUILT_IN_RETURN:
4595 fi = NULL;
4596 if (!in_ipa_mode
4597 || !(fi = get_vi_for_tree (fn->decl)))
4598 make_constraint_from (get_varinfo (escaped_id), anything_id);
4599 else if (in_ipa_mode
4600 && fi != NULL)
4602 struct constraint_expr lhs, rhs;
4603 lhs = get_function_part_constraint (fi, fi_result);
4604 rhs.var = anything_id;
4605 rhs.offset = 0;
4606 rhs.type = SCALAR;
4607 process_constraint (new_constraint (lhs, rhs));
4609 return true;
4611 case BUILT_IN_GOMP_PARALLEL:
4612 case BUILT_IN_GOACC_PARALLEL:
4614 if (in_ipa_mode)
4616 unsigned int fnpos, argpos;
4617 switch (DECL_FUNCTION_CODE (fndecl))
4619 case BUILT_IN_GOMP_PARALLEL:
4620 /* __builtin_GOMP_parallel (fn, data, num_threads, flags). */
4621 fnpos = 0;
4622 argpos = 1;
4623 break;
4624 case BUILT_IN_GOACC_PARALLEL:
4625 /* __builtin_GOACC_parallel (device, fn, mapnum, hostaddrs,
4626 sizes, kinds, ...). */
4627 fnpos = 1;
4628 argpos = 3;
4629 break;
4630 default:
4631 gcc_unreachable ();
4634 tree fnarg = gimple_call_arg (t, fnpos);
4635 gcc_assert (TREE_CODE (fnarg) == ADDR_EXPR);
4636 tree fndecl = TREE_OPERAND (fnarg, 0);
4637 if (fndecl_maybe_in_other_partition (fndecl))
4638 /* Fallthru to general call handling. */
4639 break;
4641 tree arg = gimple_call_arg (t, argpos);
4643 varinfo_t fi = get_vi_for_tree (fndecl);
4644 find_func_aliases_for_call_arg (fi, 0, arg);
4645 return true;
4647 /* Else fallthru to generic call handling. */
4648 break;
4650 /* printf-style functions may have hooks to set pointers to
4651 point to somewhere into the generated string. Leave them
4652 for a later exercise... */
4653 default:
4654 /* Fallthru to general call handling. */;
4657 return false;
4660 /* Create constraints for the call T. */
4662 static void
4663 find_func_aliases_for_call (struct function *fn, gcall *t)
4665 tree fndecl = gimple_call_fndecl (t);
4666 varinfo_t fi;
4668 if (fndecl != NULL_TREE
4669 && DECL_BUILT_IN (fndecl)
4670 && find_func_aliases_for_builtin_call (fn, t))
4671 return;
4673 fi = get_fi_for_callee (t);
4674 if (!in_ipa_mode
4675 || (fndecl && !fi->is_fn_info))
4677 auto_vec<ce_s, 16> rhsc;
4678 int flags = gimple_call_flags (t);
4680 /* Const functions can return their arguments and addresses
4681 of global memory but not of escaped memory. */
4682 if (flags & (ECF_CONST|ECF_NOVOPS))
4684 if (gimple_call_lhs (t))
4685 handle_const_call (t, &rhsc);
4687 /* Pure functions can return addresses in and of memory
4688 reachable from their arguments, but they are not an escape
4689 point for reachable memory of their arguments. */
4690 else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE))
4691 handle_pure_call (t, &rhsc);
4692 else
4693 handle_rhs_call (t, &rhsc);
4694 if (gimple_call_lhs (t))
4695 handle_lhs_call (t, gimple_call_lhs (t),
4696 gimple_call_return_flags (t), rhsc, fndecl);
4698 else
4700 auto_vec<ce_s, 2> rhsc;
4701 tree lhsop;
4702 unsigned j;
4704 /* Assign all the passed arguments to the appropriate incoming
4705 parameters of the function. */
4706 for (j = 0; j < gimple_call_num_args (t); j++)
4708 tree arg = gimple_call_arg (t, j);
4709 find_func_aliases_for_call_arg (fi, j, arg);
4712 /* If we are returning a value, assign it to the result. */
4713 lhsop = gimple_call_lhs (t);
4714 if (lhsop)
4716 auto_vec<ce_s, 2> lhsc;
4717 struct constraint_expr rhs;
4718 struct constraint_expr *lhsp;
4719 bool aggr_p = aggregate_value_p (lhsop, gimple_call_fntype (t));
4721 get_constraint_for (lhsop, &lhsc);
4722 rhs = get_function_part_constraint (fi, fi_result);
4723 if (aggr_p)
4725 auto_vec<ce_s, 2> tem;
4726 tem.quick_push (rhs);
4727 do_deref (&tem);
4728 gcc_checking_assert (tem.length () == 1);
4729 rhs = tem[0];
4731 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
4732 process_constraint (new_constraint (*lhsp, rhs));
4734 /* If we pass the result decl by reference, honor that. */
4735 if (aggr_p)
4737 struct constraint_expr lhs;
4738 struct constraint_expr *rhsp;
4740 get_constraint_for_address_of (lhsop, &rhsc);
4741 lhs = get_function_part_constraint (fi, fi_result);
4742 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4743 process_constraint (new_constraint (lhs, *rhsp));
4744 rhsc.truncate (0);
4748 /* If we use a static chain, pass it along. */
4749 if (gimple_call_chain (t))
4751 struct constraint_expr lhs;
4752 struct constraint_expr *rhsp;
4754 get_constraint_for (gimple_call_chain (t), &rhsc);
4755 lhs = get_function_part_constraint (fi, fi_static_chain);
4756 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4757 process_constraint (new_constraint (lhs, *rhsp));
4762 /* Walk statement T setting up aliasing constraints according to the
4763 references found in T. This function is the main part of the
4764 constraint builder. AI points to auxiliary alias information used
4765 when building alias sets and computing alias grouping heuristics. */
4767 static void
4768 find_func_aliases (struct function *fn, gimple *origt)
4770 gimple *t = origt;
4771 auto_vec<ce_s, 16> lhsc;
4772 auto_vec<ce_s, 16> rhsc;
4773 struct constraint_expr *c;
4774 varinfo_t fi;
4776 /* Now build constraints expressions. */
4777 if (gimple_code (t) == GIMPLE_PHI)
4779 size_t i;
4780 unsigned int j;
4782 /* For a phi node, assign all the arguments to
4783 the result. */
4784 get_constraint_for (gimple_phi_result (t), &lhsc);
4785 for (i = 0; i < gimple_phi_num_args (t); i++)
4787 tree strippedrhs = PHI_ARG_DEF (t, i);
4789 STRIP_NOPS (strippedrhs);
4790 get_constraint_for_rhs (gimple_phi_arg_def (t, i), &rhsc);
4792 FOR_EACH_VEC_ELT (lhsc, j, c)
4794 struct constraint_expr *c2;
4795 while (rhsc.length () > 0)
4797 c2 = &rhsc.last ();
4798 process_constraint (new_constraint (*c, *c2));
4799 rhsc.pop ();
4804 /* In IPA mode, we need to generate constraints to pass call
4805 arguments through their calls. There are two cases,
4806 either a GIMPLE_CALL returning a value, or just a plain
4807 GIMPLE_CALL when we are not.
4809 In non-ipa mode, we need to generate constraints for each
4810 pointer passed by address. */
4811 else if (is_gimple_call (t))
4812 find_func_aliases_for_call (fn, as_a <gcall *> (t));
4814 /* Otherwise, just a regular assignment statement. Only care about
4815 operations with pointer result, others are dealt with as escape
4816 points if they have pointer operands. */
4817 else if (is_gimple_assign (t))
4819 /* Otherwise, just a regular assignment statement. */
4820 tree lhsop = gimple_assign_lhs (t);
4821 tree rhsop = (gimple_num_ops (t) == 2) ? gimple_assign_rhs1 (t) : NULL;
4823 if (rhsop && TREE_CLOBBER_P (rhsop))
4824 /* Ignore clobbers, they don't actually store anything into
4825 the LHS. */
4827 else if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
4828 do_structure_copy (lhsop, rhsop);
4829 else
4831 enum tree_code code = gimple_assign_rhs_code (t);
4833 get_constraint_for (lhsop, &lhsc);
4835 if (code == POINTER_PLUS_EXPR)
4836 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4837 gimple_assign_rhs2 (t), &rhsc);
4838 else if (code == BIT_AND_EXPR
4839 && TREE_CODE (gimple_assign_rhs2 (t)) == INTEGER_CST)
4841 /* Aligning a pointer via a BIT_AND_EXPR is offsetting
4842 the pointer. Handle it by offsetting it by UNKNOWN. */
4843 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4844 NULL_TREE, &rhsc);
4846 else if ((CONVERT_EXPR_CODE_P (code)
4847 && !(POINTER_TYPE_P (gimple_expr_type (t))
4848 && !POINTER_TYPE_P (TREE_TYPE (rhsop))))
4849 || gimple_assign_single_p (t))
4850 get_constraint_for_rhs (rhsop, &rhsc);
4851 else if (code == COND_EXPR)
4853 /* The result is a merge of both COND_EXPR arms. */
4854 auto_vec<ce_s, 2> tmp;
4855 struct constraint_expr *rhsp;
4856 unsigned i;
4857 get_constraint_for_rhs (gimple_assign_rhs2 (t), &rhsc);
4858 get_constraint_for_rhs (gimple_assign_rhs3 (t), &tmp);
4859 FOR_EACH_VEC_ELT (tmp, i, rhsp)
4860 rhsc.safe_push (*rhsp);
4862 else if (truth_value_p (code))
4863 /* Truth value results are not pointer (parts). Or at least
4864 very unreasonable obfuscation of a part. */
4866 else
4868 /* All other operations are merges. */
4869 auto_vec<ce_s, 4> tmp;
4870 struct constraint_expr *rhsp;
4871 unsigned i, j;
4872 get_constraint_for_rhs (gimple_assign_rhs1 (t), &rhsc);
4873 for (i = 2; i < gimple_num_ops (t); ++i)
4875 get_constraint_for_rhs (gimple_op (t, i), &tmp);
4876 FOR_EACH_VEC_ELT (tmp, j, rhsp)
4877 rhsc.safe_push (*rhsp);
4878 tmp.truncate (0);
4881 process_all_all_constraints (lhsc, rhsc);
4883 /* If there is a store to a global variable the rhs escapes. */
4884 if ((lhsop = get_base_address (lhsop)) != NULL_TREE
4885 && DECL_P (lhsop))
4887 varinfo_t vi = get_vi_for_tree (lhsop);
4888 if ((! in_ipa_mode && vi->is_global_var)
4889 || vi->is_ipa_escape_point)
4890 make_escape_constraint (rhsop);
4893 /* Handle escapes through return. */
4894 else if (gimple_code (t) == GIMPLE_RETURN
4895 && gimple_return_retval (as_a <greturn *> (t)) != NULL_TREE)
4897 greturn *return_stmt = as_a <greturn *> (t);
4898 fi = NULL;
4899 if (!in_ipa_mode
4900 || !(fi = get_vi_for_tree (fn->decl)))
4901 make_escape_constraint (gimple_return_retval (return_stmt));
4902 else if (in_ipa_mode)
4904 struct constraint_expr lhs ;
4905 struct constraint_expr *rhsp;
4906 unsigned i;
4908 lhs = get_function_part_constraint (fi, fi_result);
4909 get_constraint_for_rhs (gimple_return_retval (return_stmt), &rhsc);
4910 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4911 process_constraint (new_constraint (lhs, *rhsp));
4914 /* Handle asms conservatively by adding escape constraints to everything. */
4915 else if (gasm *asm_stmt = dyn_cast <gasm *> (t))
4917 unsigned i, noutputs;
4918 const char **oconstraints;
4919 const char *constraint;
4920 bool allows_mem, allows_reg, is_inout;
4922 noutputs = gimple_asm_noutputs (asm_stmt);
4923 oconstraints = XALLOCAVEC (const char *, noutputs);
4925 for (i = 0; i < noutputs; ++i)
4927 tree link = gimple_asm_output_op (asm_stmt, i);
4928 tree op = TREE_VALUE (link);
4930 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4931 oconstraints[i] = constraint;
4932 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
4933 &allows_reg, &is_inout);
4935 /* A memory constraint makes the address of the operand escape. */
4936 if (!allows_reg && allows_mem)
4937 make_escape_constraint (build_fold_addr_expr (op));
4939 /* The asm may read global memory, so outputs may point to
4940 any global memory. */
4941 if (op)
4943 auto_vec<ce_s, 2> lhsc;
4944 struct constraint_expr rhsc, *lhsp;
4945 unsigned j;
4946 get_constraint_for (op, &lhsc);
4947 rhsc.var = nonlocal_id;
4948 rhsc.offset = 0;
4949 rhsc.type = SCALAR;
4950 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
4951 process_constraint (new_constraint (*lhsp, rhsc));
4954 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4956 tree link = gimple_asm_input_op (asm_stmt, i);
4957 tree op = TREE_VALUE (link);
4959 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4961 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
4962 &allows_mem, &allows_reg);
4964 /* A memory constraint makes the address of the operand escape. */
4965 if (!allows_reg && allows_mem)
4966 make_escape_constraint (build_fold_addr_expr (op));
4967 /* Strictly we'd only need the constraint to ESCAPED if
4968 the asm clobbers memory, otherwise using something
4969 along the lines of per-call clobbers/uses would be enough. */
4970 else if (op)
4971 make_escape_constraint (op);
4977 /* Create a constraint adding to the clobber set of FI the memory
4978 pointed to by PTR. */
4980 static void
4981 process_ipa_clobber (varinfo_t fi, tree ptr)
4983 vec<ce_s> ptrc = vNULL;
4984 struct constraint_expr *c, lhs;
4985 unsigned i;
4986 get_constraint_for_rhs (ptr, &ptrc);
4987 lhs = get_function_part_constraint (fi, fi_clobbers);
4988 FOR_EACH_VEC_ELT (ptrc, i, c)
4989 process_constraint (new_constraint (lhs, *c));
4990 ptrc.release ();
4993 /* Walk statement T setting up clobber and use constraints according to the
4994 references found in T. This function is a main part of the
4995 IPA constraint builder. */
4997 static void
4998 find_func_clobbers (struct function *fn, gimple *origt)
5000 gimple *t = origt;
5001 auto_vec<ce_s, 16> lhsc;
5002 auto_vec<ce_s, 16> rhsc;
5003 varinfo_t fi;
5005 /* Add constraints for clobbered/used in IPA mode.
5006 We are not interested in what automatic variables are clobbered
5007 or used as we only use the information in the caller to which
5008 they do not escape. */
5009 gcc_assert (in_ipa_mode);
5011 /* If the stmt refers to memory in any way it better had a VUSE. */
5012 if (gimple_vuse (t) == NULL_TREE)
5013 return;
5015 /* We'd better have function information for the current function. */
5016 fi = lookup_vi_for_tree (fn->decl);
5017 gcc_assert (fi != NULL);
5019 /* Account for stores in assignments and calls. */
5020 if (gimple_vdef (t) != NULL_TREE
5021 && gimple_has_lhs (t))
5023 tree lhs = gimple_get_lhs (t);
5024 tree tem = lhs;
5025 while (handled_component_p (tem))
5026 tem = TREE_OPERAND (tem, 0);
5027 if ((DECL_P (tem)
5028 && !auto_var_in_fn_p (tem, fn->decl))
5029 || INDIRECT_REF_P (tem)
5030 || (TREE_CODE (tem) == MEM_REF
5031 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
5032 && auto_var_in_fn_p
5033 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), fn->decl))))
5035 struct constraint_expr lhsc, *rhsp;
5036 unsigned i;
5037 lhsc = get_function_part_constraint (fi, fi_clobbers);
5038 get_constraint_for_address_of (lhs, &rhsc);
5039 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5040 process_constraint (new_constraint (lhsc, *rhsp));
5041 rhsc.truncate (0);
5045 /* Account for uses in assigments and returns. */
5046 if (gimple_assign_single_p (t)
5047 || (gimple_code (t) == GIMPLE_RETURN
5048 && gimple_return_retval (as_a <greturn *> (t)) != NULL_TREE))
5050 tree rhs = (gimple_assign_single_p (t)
5051 ? gimple_assign_rhs1 (t)
5052 : gimple_return_retval (as_a <greturn *> (t)));
5053 tree tem = rhs;
5054 while (handled_component_p (tem))
5055 tem = TREE_OPERAND (tem, 0);
5056 if ((DECL_P (tem)
5057 && !auto_var_in_fn_p (tem, fn->decl))
5058 || INDIRECT_REF_P (tem)
5059 || (TREE_CODE (tem) == MEM_REF
5060 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
5061 && auto_var_in_fn_p
5062 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), fn->decl))))
5064 struct constraint_expr lhs, *rhsp;
5065 unsigned i;
5066 lhs = get_function_part_constraint (fi, fi_uses);
5067 get_constraint_for_address_of (rhs, &rhsc);
5068 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5069 process_constraint (new_constraint (lhs, *rhsp));
5070 rhsc.truncate (0);
5074 if (gcall *call_stmt = dyn_cast <gcall *> (t))
5076 varinfo_t cfi = NULL;
5077 tree decl = gimple_call_fndecl (t);
5078 struct constraint_expr lhs, rhs;
5079 unsigned i, j;
5081 /* For builtins we do not have separate function info. For those
5082 we do not generate escapes for we have to generate clobbers/uses. */
5083 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
5084 switch (DECL_FUNCTION_CODE (decl))
5086 /* The following functions use and clobber memory pointed to
5087 by their arguments. */
5088 case BUILT_IN_STRCPY:
5089 case BUILT_IN_STRNCPY:
5090 case BUILT_IN_BCOPY:
5091 case BUILT_IN_MEMCPY:
5092 case BUILT_IN_MEMMOVE:
5093 case BUILT_IN_MEMPCPY:
5094 case BUILT_IN_STPCPY:
5095 case BUILT_IN_STPNCPY:
5096 case BUILT_IN_STRCAT:
5097 case BUILT_IN_STRNCAT:
5098 case BUILT_IN_STRCPY_CHK:
5099 case BUILT_IN_STRNCPY_CHK:
5100 case BUILT_IN_MEMCPY_CHK:
5101 case BUILT_IN_MEMMOVE_CHK:
5102 case BUILT_IN_MEMPCPY_CHK:
5103 case BUILT_IN_STPCPY_CHK:
5104 case BUILT_IN_STPNCPY_CHK:
5105 case BUILT_IN_STRCAT_CHK:
5106 case BUILT_IN_STRNCAT_CHK:
5108 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
5109 == BUILT_IN_BCOPY ? 1 : 0));
5110 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
5111 == BUILT_IN_BCOPY ? 0 : 1));
5112 unsigned i;
5113 struct constraint_expr *rhsp, *lhsp;
5114 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
5115 lhs = get_function_part_constraint (fi, fi_clobbers);
5116 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
5117 process_constraint (new_constraint (lhs, *lhsp));
5118 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
5119 lhs = get_function_part_constraint (fi, fi_uses);
5120 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5121 process_constraint (new_constraint (lhs, *rhsp));
5122 return;
5124 /* The following function clobbers memory pointed to by
5125 its argument. */
5126 case BUILT_IN_MEMSET:
5127 case BUILT_IN_MEMSET_CHK:
5128 case BUILT_IN_POSIX_MEMALIGN:
5130 tree dest = gimple_call_arg (t, 0);
5131 unsigned i;
5132 ce_s *lhsp;
5133 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
5134 lhs = get_function_part_constraint (fi, fi_clobbers);
5135 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
5136 process_constraint (new_constraint (lhs, *lhsp));
5137 return;
5139 /* The following functions clobber their second and third
5140 arguments. */
5141 case BUILT_IN_SINCOS:
5142 case BUILT_IN_SINCOSF:
5143 case BUILT_IN_SINCOSL:
5145 process_ipa_clobber (fi, gimple_call_arg (t, 1));
5146 process_ipa_clobber (fi, gimple_call_arg (t, 2));
5147 return;
5149 /* The following functions clobber their second argument. */
5150 case BUILT_IN_FREXP:
5151 case BUILT_IN_FREXPF:
5152 case BUILT_IN_FREXPL:
5153 case BUILT_IN_LGAMMA_R:
5154 case BUILT_IN_LGAMMAF_R:
5155 case BUILT_IN_LGAMMAL_R:
5156 case BUILT_IN_GAMMA_R:
5157 case BUILT_IN_GAMMAF_R:
5158 case BUILT_IN_GAMMAL_R:
5159 case BUILT_IN_MODF:
5160 case BUILT_IN_MODFF:
5161 case BUILT_IN_MODFL:
5163 process_ipa_clobber (fi, gimple_call_arg (t, 1));
5164 return;
5166 /* The following functions clobber their third argument. */
5167 case BUILT_IN_REMQUO:
5168 case BUILT_IN_REMQUOF:
5169 case BUILT_IN_REMQUOL:
5171 process_ipa_clobber (fi, gimple_call_arg (t, 2));
5172 return;
5174 /* The following functions neither read nor clobber memory. */
5175 case BUILT_IN_ASSUME_ALIGNED:
5176 case BUILT_IN_FREE:
5177 return;
5178 /* Trampolines are of no interest to us. */
5179 case BUILT_IN_INIT_TRAMPOLINE:
5180 case BUILT_IN_ADJUST_TRAMPOLINE:
5181 return;
5182 case BUILT_IN_VA_START:
5183 case BUILT_IN_VA_END:
5184 return;
5185 case BUILT_IN_GOMP_PARALLEL:
5186 case BUILT_IN_GOACC_PARALLEL:
5188 unsigned int fnpos, argpos;
5189 unsigned int implicit_use_args[2];
5190 unsigned int num_implicit_use_args = 0;
5191 switch (DECL_FUNCTION_CODE (decl))
5193 case BUILT_IN_GOMP_PARALLEL:
5194 /* __builtin_GOMP_parallel (fn, data, num_threads, flags). */
5195 fnpos = 0;
5196 argpos = 1;
5197 break;
5198 case BUILT_IN_GOACC_PARALLEL:
5199 /* __builtin_GOACC_parallel (device, fn, mapnum, hostaddrs,
5200 sizes, kinds, ...). */
5201 fnpos = 1;
5202 argpos = 3;
5203 implicit_use_args[num_implicit_use_args++] = 4;
5204 implicit_use_args[num_implicit_use_args++] = 5;
5205 break;
5206 default:
5207 gcc_unreachable ();
5210 tree fnarg = gimple_call_arg (t, fnpos);
5211 gcc_assert (TREE_CODE (fnarg) == ADDR_EXPR);
5212 tree fndecl = TREE_OPERAND (fnarg, 0);
5213 if (fndecl_maybe_in_other_partition (fndecl))
5214 /* Fallthru to general call handling. */
5215 break;
5217 varinfo_t cfi = get_vi_for_tree (fndecl);
5219 tree arg = gimple_call_arg (t, argpos);
5221 /* Parameter passed by value is used. */
5222 lhs = get_function_part_constraint (fi, fi_uses);
5223 struct constraint_expr *rhsp;
5224 get_constraint_for (arg, &rhsc);
5225 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5226 process_constraint (new_constraint (lhs, *rhsp));
5227 rhsc.truncate (0);
5229 /* Handle parameters used by the call, but not used in cfi, as
5230 implicitly used by cfi. */
5231 lhs = get_function_part_constraint (cfi, fi_uses);
5232 for (unsigned i = 0; i < num_implicit_use_args; ++i)
5234 tree arg = gimple_call_arg (t, implicit_use_args[i]);
5235 get_constraint_for (arg, &rhsc);
5236 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5237 process_constraint (new_constraint (lhs, *rhsp));
5238 rhsc.truncate (0);
5241 /* The caller clobbers what the callee does. */
5242 lhs = get_function_part_constraint (fi, fi_clobbers);
5243 rhs = get_function_part_constraint (cfi, fi_clobbers);
5244 process_constraint (new_constraint (lhs, rhs));
5246 /* The caller uses what the callee does. */
5247 lhs = get_function_part_constraint (fi, fi_uses);
5248 rhs = get_function_part_constraint (cfi, fi_uses);
5249 process_constraint (new_constraint (lhs, rhs));
5251 return;
5253 /* printf-style functions may have hooks to set pointers to
5254 point to somewhere into the generated string. Leave them
5255 for a later exercise... */
5256 default:
5257 /* Fallthru to general call handling. */;
5260 /* Parameters passed by value are used. */
5261 lhs = get_function_part_constraint (fi, fi_uses);
5262 for (i = 0; i < gimple_call_num_args (t); i++)
5264 struct constraint_expr *rhsp;
5265 tree arg = gimple_call_arg (t, i);
5267 if (TREE_CODE (arg) == SSA_NAME
5268 || is_gimple_min_invariant (arg))
5269 continue;
5271 get_constraint_for_address_of (arg, &rhsc);
5272 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5273 process_constraint (new_constraint (lhs, *rhsp));
5274 rhsc.truncate (0);
5277 /* Build constraints for propagating clobbers/uses along the
5278 callgraph edges. */
5279 cfi = get_fi_for_callee (call_stmt);
5280 if (cfi->id == anything_id)
5282 if (gimple_vdef (t))
5283 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5284 anything_id);
5285 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5286 anything_id);
5287 return;
5290 /* For callees without function info (that's external functions),
5291 ESCAPED is clobbered and used. */
5292 if (gimple_call_fndecl (t)
5293 && !cfi->is_fn_info)
5295 varinfo_t vi;
5297 if (gimple_vdef (t))
5298 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5299 escaped_id);
5300 make_copy_constraint (first_vi_for_offset (fi, fi_uses), escaped_id);
5302 /* Also honor the call statement use/clobber info. */
5303 if ((vi = lookup_call_clobber_vi (call_stmt)) != NULL)
5304 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5305 vi->id);
5306 if ((vi = lookup_call_use_vi (call_stmt)) != NULL)
5307 make_copy_constraint (first_vi_for_offset (fi, fi_uses),
5308 vi->id);
5309 return;
5312 /* Otherwise the caller clobbers and uses what the callee does.
5313 ??? This should use a new complex constraint that filters
5314 local variables of the callee. */
5315 if (gimple_vdef (t))
5317 lhs = get_function_part_constraint (fi, fi_clobbers);
5318 rhs = get_function_part_constraint (cfi, fi_clobbers);
5319 process_constraint (new_constraint (lhs, rhs));
5321 lhs = get_function_part_constraint (fi, fi_uses);
5322 rhs = get_function_part_constraint (cfi, fi_uses);
5323 process_constraint (new_constraint (lhs, rhs));
5325 else if (gimple_code (t) == GIMPLE_ASM)
5327 /* ??? Ick. We can do better. */
5328 if (gimple_vdef (t))
5329 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5330 anything_id);
5331 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5332 anything_id);
5337 /* Find the first varinfo in the same variable as START that overlaps with
5338 OFFSET. Return NULL if we can't find one. */
5340 static varinfo_t
5341 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
5343 /* If the offset is outside of the variable, bail out. */
5344 if (offset >= start->fullsize)
5345 return NULL;
5347 /* If we cannot reach offset from start, lookup the first field
5348 and start from there. */
5349 if (start->offset > offset)
5350 start = get_varinfo (start->head);
5352 while (start)
5354 /* We may not find a variable in the field list with the actual
5355 offset when we have glommed a structure to a variable.
5356 In that case, however, offset should still be within the size
5357 of the variable. */
5358 if (offset >= start->offset
5359 && (offset - start->offset) < start->size)
5360 return start;
5362 start = vi_next (start);
5365 return NULL;
5368 /* Find the first varinfo in the same variable as START that overlaps with
5369 OFFSET. If there is no such varinfo the varinfo directly preceding
5370 OFFSET is returned. */
5372 static varinfo_t
5373 first_or_preceding_vi_for_offset (varinfo_t start,
5374 unsigned HOST_WIDE_INT offset)
5376 /* If we cannot reach offset from start, lookup the first field
5377 and start from there. */
5378 if (start->offset > offset)
5379 start = get_varinfo (start->head);
5381 /* We may not find a variable in the field list with the actual
5382 offset when we have glommed a structure to a variable.
5383 In that case, however, offset should still be within the size
5384 of the variable.
5385 If we got beyond the offset we look for return the field
5386 directly preceding offset which may be the last field. */
5387 while (start->next
5388 && offset >= start->offset
5389 && !((offset - start->offset) < start->size))
5390 start = vi_next (start);
5392 return start;
5396 /* This structure is used during pushing fields onto the fieldstack
5397 to track the offset of the field, since bitpos_of_field gives it
5398 relative to its immediate containing type, and we want it relative
5399 to the ultimate containing object. */
5401 struct fieldoff
5403 /* Offset from the base of the base containing object to this field. */
5404 HOST_WIDE_INT offset;
5406 /* Size, in bits, of the field. */
5407 unsigned HOST_WIDE_INT size;
5409 unsigned has_unknown_size : 1;
5411 unsigned must_have_pointers : 1;
5413 unsigned may_have_pointers : 1;
5415 unsigned only_restrict_pointers : 1;
5417 tree restrict_pointed_type;
5419 typedef struct fieldoff fieldoff_s;
5422 /* qsort comparison function for two fieldoff's PA and PB */
5424 static int
5425 fieldoff_compare (const void *pa, const void *pb)
5427 const fieldoff_s *foa = (const fieldoff_s *)pa;
5428 const fieldoff_s *fob = (const fieldoff_s *)pb;
5429 unsigned HOST_WIDE_INT foasize, fobsize;
5431 if (foa->offset < fob->offset)
5432 return -1;
5433 else if (foa->offset > fob->offset)
5434 return 1;
5436 foasize = foa->size;
5437 fobsize = fob->size;
5438 if (foasize < fobsize)
5439 return -1;
5440 else if (foasize > fobsize)
5441 return 1;
5442 return 0;
5445 /* Sort a fieldstack according to the field offset and sizes. */
5446 static void
5447 sort_fieldstack (vec<fieldoff_s> fieldstack)
5449 fieldstack.qsort (fieldoff_compare);
5452 /* Return true if T is a type that can have subvars. */
5454 static inline bool
5455 type_can_have_subvars (const_tree t)
5457 /* Aggregates without overlapping fields can have subvars. */
5458 return TREE_CODE (t) == RECORD_TYPE;
5461 /* Return true if V is a tree that we can have subvars for.
5462 Normally, this is any aggregate type. Also complex
5463 types which are not gimple registers can have subvars. */
5465 static inline bool
5466 var_can_have_subvars (const_tree v)
5468 /* Volatile variables should never have subvars. */
5469 if (TREE_THIS_VOLATILE (v))
5470 return false;
5472 /* Non decls or memory tags can never have subvars. */
5473 if (!DECL_P (v))
5474 return false;
5476 return type_can_have_subvars (TREE_TYPE (v));
5479 /* Return true if T is a type that does contain pointers. */
5481 static bool
5482 type_must_have_pointers (tree type)
5484 if (POINTER_TYPE_P (type))
5485 return true;
5487 if (TREE_CODE (type) == ARRAY_TYPE)
5488 return type_must_have_pointers (TREE_TYPE (type));
5490 /* A function or method can have pointers as arguments, so track
5491 those separately. */
5492 if (TREE_CODE (type) == FUNCTION_TYPE
5493 || TREE_CODE (type) == METHOD_TYPE)
5494 return true;
5496 return false;
5499 static bool
5500 field_must_have_pointers (tree t)
5502 return type_must_have_pointers (TREE_TYPE (t));
5505 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
5506 the fields of TYPE onto fieldstack, recording their offsets along
5507 the way.
5509 OFFSET is used to keep track of the offset in this entire
5510 structure, rather than just the immediately containing structure.
5511 Returns false if the caller is supposed to handle the field we
5512 recursed for. */
5514 static bool
5515 push_fields_onto_fieldstack (tree type, vec<fieldoff_s> *fieldstack,
5516 HOST_WIDE_INT offset)
5518 tree field;
5519 bool empty_p = true;
5521 if (TREE_CODE (type) != RECORD_TYPE)
5522 return false;
5524 /* If the vector of fields is growing too big, bail out early.
5525 Callers check for vec::length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
5526 sure this fails. */
5527 if (fieldstack->length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
5528 return false;
5530 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5531 if (TREE_CODE (field) == FIELD_DECL)
5533 bool push = false;
5534 HOST_WIDE_INT foff = bitpos_of_field (field);
5535 tree field_type = TREE_TYPE (field);
5537 if (!var_can_have_subvars (field)
5538 || TREE_CODE (field_type) == QUAL_UNION_TYPE
5539 || TREE_CODE (field_type) == UNION_TYPE)
5540 push = true;
5541 else if (!push_fields_onto_fieldstack
5542 (field_type, fieldstack, offset + foff)
5543 && (DECL_SIZE (field)
5544 && !integer_zerop (DECL_SIZE (field))))
5545 /* Empty structures may have actual size, like in C++. So
5546 see if we didn't push any subfields and the size is
5547 nonzero, push the field onto the stack. */
5548 push = true;
5550 if (push)
5552 fieldoff_s *pair = NULL;
5553 bool has_unknown_size = false;
5554 bool must_have_pointers_p;
5556 if (!fieldstack->is_empty ())
5557 pair = &fieldstack->last ();
5559 /* If there isn't anything at offset zero, create sth. */
5560 if (!pair
5561 && offset + foff != 0)
5563 fieldoff_s e
5564 = {0, offset + foff, false, false, true, false, NULL_TREE};
5565 pair = fieldstack->safe_push (e);
5568 if (!DECL_SIZE (field)
5569 || !tree_fits_uhwi_p (DECL_SIZE (field)))
5570 has_unknown_size = true;
5572 /* If adjacent fields do not contain pointers merge them. */
5573 must_have_pointers_p = field_must_have_pointers (field);
5574 if (pair
5575 && !has_unknown_size
5576 && !must_have_pointers_p
5577 && !pair->must_have_pointers
5578 && !pair->has_unknown_size
5579 && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff)
5581 pair->size += tree_to_uhwi (DECL_SIZE (field));
5583 else
5585 fieldoff_s e;
5586 e.offset = offset + foff;
5587 e.has_unknown_size = has_unknown_size;
5588 if (!has_unknown_size)
5589 e.size = tree_to_uhwi (DECL_SIZE (field));
5590 else
5591 e.size = -1;
5592 e.must_have_pointers = must_have_pointers_p;
5593 e.may_have_pointers = true;
5594 e.only_restrict_pointers
5595 = (!has_unknown_size
5596 && POINTER_TYPE_P (field_type)
5597 && TYPE_RESTRICT (field_type));
5598 if (e.only_restrict_pointers)
5599 e.restrict_pointed_type = TREE_TYPE (field_type);
5600 fieldstack->safe_push (e);
5604 empty_p = false;
5607 return !empty_p;
5610 /* Count the number of arguments DECL has, and set IS_VARARGS to true
5611 if it is a varargs function. */
5613 static unsigned int
5614 count_num_arguments (tree decl, bool *is_varargs)
5616 unsigned int num = 0;
5617 tree t;
5619 /* Capture named arguments for K&R functions. They do not
5620 have a prototype and thus no TYPE_ARG_TYPES. */
5621 for (t = DECL_ARGUMENTS (decl); t; t = DECL_CHAIN (t))
5622 ++num;
5624 /* Check if the function has variadic arguments. */
5625 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl)); t; t = TREE_CHAIN (t))
5626 if (TREE_VALUE (t) == void_type_node)
5627 break;
5628 if (!t)
5629 *is_varargs = true;
5631 return num;
5634 /* Creation function node for DECL, using NAME, and return the index
5635 of the variable we've created for the function. If NONLOCAL_p, create
5636 initial constraints. */
5638 static varinfo_t
5639 create_function_info_for (tree decl, const char *name, bool add_id,
5640 bool nonlocal_p)
5642 struct function *fn = DECL_STRUCT_FUNCTION (decl);
5643 varinfo_t vi, prev_vi;
5644 tree arg;
5645 unsigned int i;
5646 bool is_varargs = false;
5647 unsigned int num_args = count_num_arguments (decl, &is_varargs);
5649 /* Create the variable info. */
5651 vi = new_var_info (decl, name, add_id);
5652 vi->offset = 0;
5653 vi->size = 1;
5654 vi->fullsize = fi_parm_base + num_args;
5655 vi->is_fn_info = 1;
5656 vi->may_have_pointers = false;
5657 if (is_varargs)
5658 vi->fullsize = ~0;
5659 insert_vi_for_tree (vi->decl, vi);
5661 prev_vi = vi;
5663 /* Create a variable for things the function clobbers and one for
5664 things the function uses. */
5666 varinfo_t clobbervi, usevi;
5667 const char *newname;
5668 char *tempname;
5670 tempname = xasprintf ("%s.clobber", name);
5671 newname = ggc_strdup (tempname);
5672 free (tempname);
5674 clobbervi = new_var_info (NULL, newname, false);
5675 clobbervi->offset = fi_clobbers;
5676 clobbervi->size = 1;
5677 clobbervi->fullsize = vi->fullsize;
5678 clobbervi->is_full_var = true;
5679 clobbervi->is_global_var = false;
5681 gcc_assert (prev_vi->offset < clobbervi->offset);
5682 prev_vi->next = clobbervi->id;
5683 prev_vi = clobbervi;
5685 tempname = xasprintf ("%s.use", name);
5686 newname = ggc_strdup (tempname);
5687 free (tempname);
5689 usevi = new_var_info (NULL, newname, false);
5690 usevi->offset = fi_uses;
5691 usevi->size = 1;
5692 usevi->fullsize = vi->fullsize;
5693 usevi->is_full_var = true;
5694 usevi->is_global_var = false;
5696 gcc_assert (prev_vi->offset < usevi->offset);
5697 prev_vi->next = usevi->id;
5698 prev_vi = usevi;
5701 /* And one for the static chain. */
5702 if (fn->static_chain_decl != NULL_TREE)
5704 varinfo_t chainvi;
5705 const char *newname;
5706 char *tempname;
5708 tempname = xasprintf ("%s.chain", name);
5709 newname = ggc_strdup (tempname);
5710 free (tempname);
5712 chainvi = new_var_info (fn->static_chain_decl, newname, false);
5713 chainvi->offset = fi_static_chain;
5714 chainvi->size = 1;
5715 chainvi->fullsize = vi->fullsize;
5716 chainvi->is_full_var = true;
5717 chainvi->is_global_var = false;
5719 insert_vi_for_tree (fn->static_chain_decl, chainvi);
5721 if (nonlocal_p
5722 && chainvi->may_have_pointers)
5723 make_constraint_from (chainvi, nonlocal_id);
5725 gcc_assert (prev_vi->offset < chainvi->offset);
5726 prev_vi->next = chainvi->id;
5727 prev_vi = chainvi;
5730 /* Create a variable for the return var. */
5731 if (DECL_RESULT (decl) != NULL
5732 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
5734 varinfo_t resultvi;
5735 const char *newname;
5736 char *tempname;
5737 tree resultdecl = decl;
5739 if (DECL_RESULT (decl))
5740 resultdecl = DECL_RESULT (decl);
5742 tempname = xasprintf ("%s.result", name);
5743 newname = ggc_strdup (tempname);
5744 free (tempname);
5746 resultvi = new_var_info (resultdecl, newname, false);
5747 resultvi->offset = fi_result;
5748 resultvi->size = 1;
5749 resultvi->fullsize = vi->fullsize;
5750 resultvi->is_full_var = true;
5751 if (DECL_RESULT (decl))
5752 resultvi->may_have_pointers = true;
5754 if (DECL_RESULT (decl))
5755 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
5757 if (nonlocal_p
5758 && DECL_RESULT (decl)
5759 && DECL_BY_REFERENCE (DECL_RESULT (decl)))
5760 make_constraint_from (resultvi, nonlocal_id);
5762 gcc_assert (prev_vi->offset < resultvi->offset);
5763 prev_vi->next = resultvi->id;
5764 prev_vi = resultvi;
5767 /* We also need to make function return values escape. Nothing
5768 escapes by returning from main though. */
5769 if (nonlocal_p
5770 && !MAIN_NAME_P (DECL_NAME (decl)))
5772 varinfo_t fi, rvi;
5773 fi = lookup_vi_for_tree (decl);
5774 rvi = first_vi_for_offset (fi, fi_result);
5775 if (rvi && rvi->offset == fi_result)
5776 make_copy_constraint (get_varinfo (escaped_id), rvi->id);
5779 /* Set up variables for each argument. */
5780 arg = DECL_ARGUMENTS (decl);
5781 for (i = 0; i < num_args; i++)
5783 varinfo_t argvi;
5784 const char *newname;
5785 char *tempname;
5786 tree argdecl = decl;
5788 if (arg)
5789 argdecl = arg;
5791 tempname = xasprintf ("%s.arg%d", name, i);
5792 newname = ggc_strdup (tempname);
5793 free (tempname);
5795 argvi = new_var_info (argdecl, newname, false);
5796 argvi->offset = fi_parm_base + i;
5797 argvi->size = 1;
5798 argvi->is_full_var = true;
5799 argvi->fullsize = vi->fullsize;
5800 if (arg)
5801 argvi->may_have_pointers = true;
5803 if (arg)
5804 insert_vi_for_tree (arg, argvi);
5806 if (nonlocal_p
5807 && argvi->may_have_pointers)
5808 make_constraint_from (argvi, nonlocal_id);
5810 gcc_assert (prev_vi->offset < argvi->offset);
5811 prev_vi->next = argvi->id;
5812 prev_vi = argvi;
5813 if (arg)
5814 arg = DECL_CHAIN (arg);
5817 /* Add one representative for all further args. */
5818 if (is_varargs)
5820 varinfo_t argvi;
5821 const char *newname;
5822 char *tempname;
5823 tree decl;
5825 tempname = xasprintf ("%s.varargs", name);
5826 newname = ggc_strdup (tempname);
5827 free (tempname);
5829 /* We need sth that can be pointed to for va_start. */
5830 decl = build_fake_var_decl (ptr_type_node);
5832 argvi = new_var_info (decl, newname, false);
5833 argvi->offset = fi_parm_base + num_args;
5834 argvi->size = ~0;
5835 argvi->is_full_var = true;
5836 argvi->is_heap_var = true;
5837 argvi->fullsize = vi->fullsize;
5839 if (nonlocal_p
5840 && argvi->may_have_pointers)
5841 make_constraint_from (argvi, nonlocal_id);
5843 gcc_assert (prev_vi->offset < argvi->offset);
5844 prev_vi->next = argvi->id;
5845 prev_vi = argvi;
5848 return vi;
5852 /* Return true if FIELDSTACK contains fields that overlap.
5853 FIELDSTACK is assumed to be sorted by offset. */
5855 static bool
5856 check_for_overlaps (vec<fieldoff_s> fieldstack)
5858 fieldoff_s *fo = NULL;
5859 unsigned int i;
5860 HOST_WIDE_INT lastoffset = -1;
5862 FOR_EACH_VEC_ELT (fieldstack, i, fo)
5864 if (fo->offset == lastoffset)
5865 return true;
5866 lastoffset = fo->offset;
5868 return false;
5871 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
5872 This will also create any varinfo structures necessary for fields
5873 of DECL. DECL is a function parameter if HANDLE_PARAM is set.
5874 HANDLED_STRUCT_TYPE is used to register struct types reached by following
5875 restrict pointers. This is needed to prevent infinite recursion. */
5877 static varinfo_t
5878 create_variable_info_for_1 (tree decl, const char *name, bool add_id,
5879 bool handle_param, bitmap handled_struct_type)
5881 varinfo_t vi, newvi;
5882 tree decl_type = TREE_TYPE (decl);
5883 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type);
5884 auto_vec<fieldoff_s> fieldstack;
5885 fieldoff_s *fo;
5886 unsigned int i;
5888 if (!declsize
5889 || !tree_fits_uhwi_p (declsize))
5891 vi = new_var_info (decl, name, add_id);
5892 vi->offset = 0;
5893 vi->size = ~0;
5894 vi->fullsize = ~0;
5895 vi->is_unknown_size_var = true;
5896 vi->is_full_var = true;
5897 vi->may_have_pointers = true;
5898 return vi;
5901 /* Collect field information. */
5902 if (use_field_sensitive
5903 && var_can_have_subvars (decl)
5904 /* ??? Force us to not use subfields for globals in IPA mode.
5905 Else we'd have to parse arbitrary initializers. */
5906 && !(in_ipa_mode
5907 && is_global_var (decl)))
5909 fieldoff_s *fo = NULL;
5910 bool notokay = false;
5911 unsigned int i;
5913 push_fields_onto_fieldstack (decl_type, &fieldstack, 0);
5915 for (i = 0; !notokay && fieldstack.iterate (i, &fo); i++)
5916 if (fo->has_unknown_size
5917 || fo->offset < 0)
5919 notokay = true;
5920 break;
5923 /* We can't sort them if we have a field with a variable sized type,
5924 which will make notokay = true. In that case, we are going to return
5925 without creating varinfos for the fields anyway, so sorting them is a
5926 waste to boot. */
5927 if (!notokay)
5929 sort_fieldstack (fieldstack);
5930 /* Due to some C++ FE issues, like PR 22488, we might end up
5931 what appear to be overlapping fields even though they,
5932 in reality, do not overlap. Until the C++ FE is fixed,
5933 we will simply disable field-sensitivity for these cases. */
5934 notokay = check_for_overlaps (fieldstack);
5937 if (notokay)
5938 fieldstack.release ();
5941 /* If we didn't end up collecting sub-variables create a full
5942 variable for the decl. */
5943 if (fieldstack.length () == 0
5944 || fieldstack.length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
5946 vi = new_var_info (decl, name, add_id);
5947 vi->offset = 0;
5948 vi->may_have_pointers = true;
5949 vi->fullsize = tree_to_uhwi (declsize);
5950 vi->size = vi->fullsize;
5951 vi->is_full_var = true;
5952 if (POINTER_TYPE_P (decl_type)
5953 && TYPE_RESTRICT (decl_type))
5954 vi->only_restrict_pointers = 1;
5955 if (vi->only_restrict_pointers
5956 && !type_contains_placeholder_p (TREE_TYPE (decl_type))
5957 && handle_param
5958 && !bitmap_bit_p (handled_struct_type,
5959 TYPE_UID (TREE_TYPE (decl_type))))
5961 varinfo_t rvi;
5962 tree heapvar = build_fake_var_decl (TREE_TYPE (decl_type));
5963 DECL_EXTERNAL (heapvar) = 1;
5964 if (var_can_have_subvars (heapvar))
5965 bitmap_set_bit (handled_struct_type,
5966 TYPE_UID (TREE_TYPE (decl_type)));
5967 rvi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS", true,
5968 true, handled_struct_type);
5969 if (var_can_have_subvars (heapvar))
5970 bitmap_clear_bit (handled_struct_type,
5971 TYPE_UID (TREE_TYPE (decl_type)));
5972 rvi->is_restrict_var = 1;
5973 insert_vi_for_tree (heapvar, rvi);
5974 make_constraint_from (vi, rvi->id);
5975 make_param_constraints (rvi);
5977 fieldstack.release ();
5978 return vi;
5981 vi = new_var_info (decl, name, add_id);
5982 vi->fullsize = tree_to_uhwi (declsize);
5983 if (fieldstack.length () == 1)
5984 vi->is_full_var = true;
5985 for (i = 0, newvi = vi;
5986 fieldstack.iterate (i, &fo);
5987 ++i, newvi = vi_next (newvi))
5989 const char *newname = NULL;
5990 char *tempname;
5992 if (dump_file)
5994 if (fieldstack.length () != 1)
5996 tempname
5997 = xasprintf ("%s." HOST_WIDE_INT_PRINT_DEC
5998 "+" HOST_WIDE_INT_PRINT_DEC, name,
5999 fo->offset, fo->size);
6000 newname = ggc_strdup (tempname);
6001 free (tempname);
6004 else
6005 newname = "NULL";
6007 if (newname)
6008 newvi->name = newname;
6009 newvi->offset = fo->offset;
6010 newvi->size = fo->size;
6011 newvi->fullsize = vi->fullsize;
6012 newvi->may_have_pointers = fo->may_have_pointers;
6013 newvi->only_restrict_pointers = fo->only_restrict_pointers;
6014 if (handle_param
6015 && newvi->only_restrict_pointers
6016 && !type_contains_placeholder_p (fo->restrict_pointed_type)
6017 && !bitmap_bit_p (handled_struct_type,
6018 TYPE_UID (fo->restrict_pointed_type)))
6020 varinfo_t rvi;
6021 tree heapvar = build_fake_var_decl (fo->restrict_pointed_type);
6022 DECL_EXTERNAL (heapvar) = 1;
6023 if (var_can_have_subvars (heapvar))
6024 bitmap_set_bit (handled_struct_type,
6025 TYPE_UID (fo->restrict_pointed_type));
6026 rvi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS", true,
6027 true, handled_struct_type);
6028 if (var_can_have_subvars (heapvar))
6029 bitmap_clear_bit (handled_struct_type,
6030 TYPE_UID (fo->restrict_pointed_type));
6031 rvi->is_restrict_var = 1;
6032 insert_vi_for_tree (heapvar, rvi);
6033 make_constraint_from (newvi, rvi->id);
6034 make_param_constraints (rvi);
6036 if (i + 1 < fieldstack.length ())
6038 varinfo_t tem = new_var_info (decl, name, false);
6039 newvi->next = tem->id;
6040 tem->head = vi->id;
6044 return vi;
6047 static unsigned int
6048 create_variable_info_for (tree decl, const char *name, bool add_id)
6050 varinfo_t vi = create_variable_info_for_1 (decl, name, add_id, false, NULL);
6051 unsigned int id = vi->id;
6053 insert_vi_for_tree (decl, vi);
6055 if (!VAR_P (decl))
6056 return id;
6058 /* Create initial constraints for globals. */
6059 for (; vi; vi = vi_next (vi))
6061 if (!vi->may_have_pointers
6062 || !vi->is_global_var)
6063 continue;
6065 /* Mark global restrict qualified pointers. */
6066 if ((POINTER_TYPE_P (TREE_TYPE (decl))
6067 && TYPE_RESTRICT (TREE_TYPE (decl)))
6068 || vi->only_restrict_pointers)
6070 varinfo_t rvi
6071 = make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT",
6072 true);
6073 /* ??? For now exclude reads from globals as restrict sources
6074 if those are not (indirectly) from incoming parameters. */
6075 rvi->is_restrict_var = false;
6076 continue;
6079 /* In non-IPA mode the initializer from nonlocal is all we need. */
6080 if (!in_ipa_mode
6081 || DECL_HARD_REGISTER (decl))
6082 make_copy_constraint (vi, nonlocal_id);
6084 /* In IPA mode parse the initializer and generate proper constraints
6085 for it. */
6086 else
6088 varpool_node *vnode = varpool_node::get (decl);
6090 /* For escaped variables initialize them from nonlocal. */
6091 if (!vnode->all_refs_explicit_p ())
6092 make_copy_constraint (vi, nonlocal_id);
6094 /* If this is a global variable with an initializer and we are in
6095 IPA mode generate constraints for it. */
6096 ipa_ref *ref;
6097 for (unsigned idx = 0; vnode->iterate_reference (idx, ref); ++idx)
6099 auto_vec<ce_s> rhsc;
6100 struct constraint_expr lhs, *rhsp;
6101 unsigned i;
6102 get_constraint_for_address_of (ref->referred->decl, &rhsc);
6103 lhs.var = vi->id;
6104 lhs.offset = 0;
6105 lhs.type = SCALAR;
6106 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
6107 process_constraint (new_constraint (lhs, *rhsp));
6108 /* If this is a variable that escapes from the unit
6109 the initializer escapes as well. */
6110 if (!vnode->all_refs_explicit_p ())
6112 lhs.var = escaped_id;
6113 lhs.offset = 0;
6114 lhs.type = SCALAR;
6115 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
6116 process_constraint (new_constraint (lhs, *rhsp));
6122 return id;
6125 /* Print out the points-to solution for VAR to FILE. */
6127 static void
6128 dump_solution_for_var (FILE *file, unsigned int var)
6130 varinfo_t vi = get_varinfo (var);
6131 unsigned int i;
6132 bitmap_iterator bi;
6134 /* Dump the solution for unified vars anyway, this avoids difficulties
6135 in scanning dumps in the testsuite. */
6136 fprintf (file, "%s = { ", vi->name);
6137 vi = get_varinfo (find (var));
6138 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
6139 fprintf (file, "%s ", get_varinfo (i)->name);
6140 fprintf (file, "}");
6142 /* But note when the variable was unified. */
6143 if (vi->id != var)
6144 fprintf (file, " same as %s", vi->name);
6146 fprintf (file, "\n");
6149 /* Print the points-to solution for VAR to stderr. */
6151 DEBUG_FUNCTION void
6152 debug_solution_for_var (unsigned int var)
6154 dump_solution_for_var (stderr, var);
6157 /* Register the constraints for function parameter related VI. */
6159 static void
6160 make_param_constraints (varinfo_t vi)
6162 for (; vi; vi = vi_next (vi))
6164 if (vi->only_restrict_pointers)
6166 else if (vi->may_have_pointers)
6167 make_constraint_from (vi, nonlocal_id);
6169 if (vi->is_full_var)
6170 break;
6174 /* Create varinfo structures for all of the variables in the
6175 function for intraprocedural mode. */
6177 static void
6178 intra_create_variable_infos (struct function *fn)
6180 tree t;
6181 bitmap handled_struct_type = NULL;
6183 /* For each incoming pointer argument arg, create the constraint ARG
6184 = NONLOCAL or a dummy variable if it is a restrict qualified
6185 passed-by-reference argument. */
6186 for (t = DECL_ARGUMENTS (fn->decl); t; t = DECL_CHAIN (t))
6188 if (handled_struct_type == NULL)
6189 handled_struct_type = BITMAP_ALLOC (NULL);
6191 varinfo_t p
6192 = create_variable_info_for_1 (t, alias_get_name (t), false, true,
6193 handled_struct_type);
6194 insert_vi_for_tree (t, p);
6196 make_param_constraints (p);
6199 if (handled_struct_type != NULL)
6200 BITMAP_FREE (handled_struct_type);
6202 /* Add a constraint for a result decl that is passed by reference. */
6203 if (DECL_RESULT (fn->decl)
6204 && DECL_BY_REFERENCE (DECL_RESULT (fn->decl)))
6206 varinfo_t p, result_vi = get_vi_for_tree (DECL_RESULT (fn->decl));
6208 for (p = result_vi; p; p = vi_next (p))
6209 make_constraint_from (p, nonlocal_id);
6212 /* Add a constraint for the incoming static chain parameter. */
6213 if (fn->static_chain_decl != NULL_TREE)
6215 varinfo_t p, chain_vi = get_vi_for_tree (fn->static_chain_decl);
6217 for (p = chain_vi; p; p = vi_next (p))
6218 make_constraint_from (p, nonlocal_id);
6222 /* Structure used to put solution bitmaps in a hashtable so they can
6223 be shared among variables with the same points-to set. */
6225 typedef struct shared_bitmap_info
6227 bitmap pt_vars;
6228 hashval_t hashcode;
6229 } *shared_bitmap_info_t;
6230 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
6232 /* Shared_bitmap hashtable helpers. */
6234 struct shared_bitmap_hasher : free_ptr_hash <shared_bitmap_info>
6236 static inline hashval_t hash (const shared_bitmap_info *);
6237 static inline bool equal (const shared_bitmap_info *,
6238 const shared_bitmap_info *);
6241 /* Hash function for a shared_bitmap_info_t */
6243 inline hashval_t
6244 shared_bitmap_hasher::hash (const shared_bitmap_info *bi)
6246 return bi->hashcode;
6249 /* Equality function for two shared_bitmap_info_t's. */
6251 inline bool
6252 shared_bitmap_hasher::equal (const shared_bitmap_info *sbi1,
6253 const shared_bitmap_info *sbi2)
6255 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
6258 /* Shared_bitmap hashtable. */
6260 static hash_table<shared_bitmap_hasher> *shared_bitmap_table;
6262 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
6263 existing instance if there is one, NULL otherwise. */
6265 static bitmap
6266 shared_bitmap_lookup (bitmap pt_vars)
6268 shared_bitmap_info **slot;
6269 struct shared_bitmap_info sbi;
6271 sbi.pt_vars = pt_vars;
6272 sbi.hashcode = bitmap_hash (pt_vars);
6274 slot = shared_bitmap_table->find_slot (&sbi, NO_INSERT);
6275 if (!slot)
6276 return NULL;
6277 else
6278 return (*slot)->pt_vars;
6282 /* Add a bitmap to the shared bitmap hashtable. */
6284 static void
6285 shared_bitmap_add (bitmap pt_vars)
6287 shared_bitmap_info **slot;
6288 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
6290 sbi->pt_vars = pt_vars;
6291 sbi->hashcode = bitmap_hash (pt_vars);
6293 slot = shared_bitmap_table->find_slot (sbi, INSERT);
6294 gcc_assert (!*slot);
6295 *slot = sbi;
6299 /* Set bits in INTO corresponding to the variable uids in solution set FROM. */
6301 static void
6302 set_uids_in_ptset (bitmap into, bitmap from, struct pt_solution *pt,
6303 tree fndecl)
6305 unsigned int i;
6306 bitmap_iterator bi;
6307 varinfo_t escaped_vi = get_varinfo (find (escaped_id));
6308 bool everything_escaped
6309 = escaped_vi->solution && bitmap_bit_p (escaped_vi->solution, anything_id);
6311 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
6313 varinfo_t vi = get_varinfo (i);
6315 /* The only artificial variables that are allowed in a may-alias
6316 set are heap variables. */
6317 if (vi->is_artificial_var && !vi->is_heap_var)
6318 continue;
6320 if (everything_escaped
6321 || (escaped_vi->solution
6322 && bitmap_bit_p (escaped_vi->solution, i)))
6324 pt->vars_contains_escaped = true;
6325 pt->vars_contains_escaped_heap = vi->is_heap_var;
6328 if (vi->is_restrict_var)
6329 pt->vars_contains_restrict = true;
6331 if (VAR_P (vi->decl)
6332 || TREE_CODE (vi->decl) == PARM_DECL
6333 || TREE_CODE (vi->decl) == RESULT_DECL)
6335 /* If we are in IPA mode we will not recompute points-to
6336 sets after inlining so make sure they stay valid. */
6337 if (in_ipa_mode
6338 && !DECL_PT_UID_SET_P (vi->decl))
6339 SET_DECL_PT_UID (vi->decl, DECL_UID (vi->decl));
6341 /* Add the decl to the points-to set. Note that the points-to
6342 set contains global variables. */
6343 bitmap_set_bit (into, DECL_PT_UID (vi->decl));
6344 if (vi->is_global_var
6345 /* In IPA mode the escaped_heap trick doesn't work as
6346 ESCAPED is escaped from the unit but
6347 pt_solution_includes_global needs to answer true for
6348 all variables not automatic within a function.
6349 For the same reason is_global_var is not the
6350 correct flag to track - local variables from other
6351 functions also need to be considered global.
6352 Conveniently all HEAP vars are not put in function
6353 scope. */
6354 || (in_ipa_mode
6355 && fndecl
6356 && ! auto_var_in_fn_p (vi->decl, fndecl)))
6357 pt->vars_contains_nonlocal = true;
6359 /* If we have a variable that is interposable record that fact
6360 for pointer comparison simplification. */
6361 if (VAR_P (vi->decl)
6362 && (TREE_STATIC (vi->decl) || DECL_EXTERNAL (vi->decl))
6363 && ! decl_binds_to_current_def_p (vi->decl))
6364 pt->vars_contains_interposable = true;
6367 else if (TREE_CODE (vi->decl) == FUNCTION_DECL
6368 || TREE_CODE (vi->decl) == LABEL_DECL)
6370 /* Nothing should read/write from/to code so we can
6371 save bits by not including them in the points-to bitmaps.
6372 Still mark the points-to set as containing global memory
6373 to make code-patching possible - see PR70128. */
6374 pt->vars_contains_nonlocal = true;
6380 /* Compute the points-to solution *PT for the variable VI. */
6382 static struct pt_solution
6383 find_what_var_points_to (tree fndecl, varinfo_t orig_vi)
6385 unsigned int i;
6386 bitmap_iterator bi;
6387 bitmap finished_solution;
6388 bitmap result;
6389 varinfo_t vi;
6390 struct pt_solution *pt;
6392 /* This variable may have been collapsed, let's get the real
6393 variable. */
6394 vi = get_varinfo (find (orig_vi->id));
6396 /* See if we have already computed the solution and return it. */
6397 pt_solution **slot = &final_solutions->get_or_insert (vi);
6398 if (*slot != NULL)
6399 return **slot;
6401 *slot = pt = XOBNEW (&final_solutions_obstack, struct pt_solution);
6402 memset (pt, 0, sizeof (struct pt_solution));
6404 /* Translate artificial variables into SSA_NAME_PTR_INFO
6405 attributes. */
6406 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
6408 varinfo_t vi = get_varinfo (i);
6410 if (vi->is_artificial_var)
6412 if (vi->id == nothing_id)
6413 pt->null = 1;
6414 else if (vi->id == escaped_id)
6416 if (in_ipa_mode)
6417 pt->ipa_escaped = 1;
6418 else
6419 pt->escaped = 1;
6420 /* Expand some special vars of ESCAPED in-place here. */
6421 varinfo_t evi = get_varinfo (find (escaped_id));
6422 if (bitmap_bit_p (evi->solution, nonlocal_id))
6423 pt->nonlocal = 1;
6425 else if (vi->id == nonlocal_id)
6426 pt->nonlocal = 1;
6427 else if (vi->is_heap_var)
6428 /* We represent heapvars in the points-to set properly. */
6430 else if (vi->id == string_id)
6431 /* Nobody cares - STRING_CSTs are read-only entities. */
6433 else if (vi->id == anything_id
6434 || vi->id == integer_id)
6435 pt->anything = 1;
6439 /* Instead of doing extra work, simply do not create
6440 elaborate points-to information for pt_anything pointers. */
6441 if (pt->anything)
6442 return *pt;
6444 /* Share the final set of variables when possible. */
6445 finished_solution = BITMAP_GGC_ALLOC ();
6446 stats.points_to_sets_created++;
6448 set_uids_in_ptset (finished_solution, vi->solution, pt, fndecl);
6449 result = shared_bitmap_lookup (finished_solution);
6450 if (!result)
6452 shared_bitmap_add (finished_solution);
6453 pt->vars = finished_solution;
6455 else
6457 pt->vars = result;
6458 bitmap_clear (finished_solution);
6461 return *pt;
6464 /* Given a pointer variable P, fill in its points-to set. */
6466 static void
6467 find_what_p_points_to (tree fndecl, tree p)
6469 struct ptr_info_def *pi;
6470 tree lookup_p = p;
6471 varinfo_t vi;
6472 bool nonnull = get_ptr_nonnull (p);
6474 /* For parameters, get at the points-to set for the actual parm
6475 decl. */
6476 if (TREE_CODE (p) == SSA_NAME
6477 && SSA_NAME_IS_DEFAULT_DEF (p)
6478 && (TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
6479 || TREE_CODE (SSA_NAME_VAR (p)) == RESULT_DECL))
6480 lookup_p = SSA_NAME_VAR (p);
6482 vi = lookup_vi_for_tree (lookup_p);
6483 if (!vi)
6484 return;
6486 pi = get_ptr_info (p);
6487 pi->pt = find_what_var_points_to (fndecl, vi);
6488 /* Conservatively set to NULL from PTA (to true). */
6489 pi->pt.null = 1;
6490 /* Preserve pointer nonnull computed by VRP. See get_ptr_nonnull
6491 in gcc/tree-ssaname.c for more information. */
6492 if (nonnull)
6493 set_ptr_nonnull (p);
6497 /* Query statistics for points-to solutions. */
6499 static struct {
6500 unsigned HOST_WIDE_INT pt_solution_includes_may_alias;
6501 unsigned HOST_WIDE_INT pt_solution_includes_no_alias;
6502 unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias;
6503 unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias;
6504 } pta_stats;
6506 void
6507 dump_pta_stats (FILE *s)
6509 fprintf (s, "\nPTA query stats:\n");
6510 fprintf (s, " pt_solution_includes: "
6511 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6512 HOST_WIDE_INT_PRINT_DEC" queries\n",
6513 pta_stats.pt_solution_includes_no_alias,
6514 pta_stats.pt_solution_includes_no_alias
6515 + pta_stats.pt_solution_includes_may_alias);
6516 fprintf (s, " pt_solutions_intersect: "
6517 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6518 HOST_WIDE_INT_PRINT_DEC" queries\n",
6519 pta_stats.pt_solutions_intersect_no_alias,
6520 pta_stats.pt_solutions_intersect_no_alias
6521 + pta_stats.pt_solutions_intersect_may_alias);
6525 /* Reset the points-to solution *PT to a conservative default
6526 (point to anything). */
6528 void
6529 pt_solution_reset (struct pt_solution *pt)
6531 memset (pt, 0, sizeof (struct pt_solution));
6532 pt->anything = true;
6533 pt->null = true;
6536 /* Set the points-to solution *PT to point only to the variables
6537 in VARS. VARS_CONTAINS_GLOBAL specifies whether that contains
6538 global variables and VARS_CONTAINS_RESTRICT specifies whether
6539 it contains restrict tag variables. */
6541 void
6542 pt_solution_set (struct pt_solution *pt, bitmap vars,
6543 bool vars_contains_nonlocal)
6545 memset (pt, 0, sizeof (struct pt_solution));
6546 pt->vars = vars;
6547 pt->vars_contains_nonlocal = vars_contains_nonlocal;
6548 pt->vars_contains_escaped
6549 = (cfun->gimple_df->escaped.anything
6550 || bitmap_intersect_p (cfun->gimple_df->escaped.vars, vars));
6553 /* Set the points-to solution *PT to point only to the variable VAR. */
6555 void
6556 pt_solution_set_var (struct pt_solution *pt, tree var)
6558 memset (pt, 0, sizeof (struct pt_solution));
6559 pt->vars = BITMAP_GGC_ALLOC ();
6560 bitmap_set_bit (pt->vars, DECL_PT_UID (var));
6561 pt->vars_contains_nonlocal = is_global_var (var);
6562 pt->vars_contains_escaped
6563 = (cfun->gimple_df->escaped.anything
6564 || bitmap_bit_p (cfun->gimple_df->escaped.vars, DECL_PT_UID (var)));
6567 /* Computes the union of the points-to solutions *DEST and *SRC and
6568 stores the result in *DEST. This changes the points-to bitmap
6569 of *DEST and thus may not be used if that might be shared.
6570 The points-to bitmap of *SRC and *DEST will not be shared after
6571 this function if they were not before. */
6573 static void
6574 pt_solution_ior_into (struct pt_solution *dest, struct pt_solution *src)
6576 dest->anything |= src->anything;
6577 if (dest->anything)
6579 pt_solution_reset (dest);
6580 return;
6583 dest->nonlocal |= src->nonlocal;
6584 dest->escaped |= src->escaped;
6585 dest->ipa_escaped |= src->ipa_escaped;
6586 dest->null |= src->null;
6587 dest->vars_contains_nonlocal |= src->vars_contains_nonlocal;
6588 dest->vars_contains_escaped |= src->vars_contains_escaped;
6589 dest->vars_contains_escaped_heap |= src->vars_contains_escaped_heap;
6590 if (!src->vars)
6591 return;
6593 if (!dest->vars)
6594 dest->vars = BITMAP_GGC_ALLOC ();
6595 bitmap_ior_into (dest->vars, src->vars);
6598 /* Return true if the points-to solution *PT is empty. */
6600 bool
6601 pt_solution_empty_p (struct pt_solution *pt)
6603 if (pt->anything
6604 || pt->nonlocal)
6605 return false;
6607 if (pt->vars
6608 && !bitmap_empty_p (pt->vars))
6609 return false;
6611 /* If the solution includes ESCAPED, check if that is empty. */
6612 if (pt->escaped
6613 && !pt_solution_empty_p (&cfun->gimple_df->escaped))
6614 return false;
6616 /* If the solution includes ESCAPED, check if that is empty. */
6617 if (pt->ipa_escaped
6618 && !pt_solution_empty_p (&ipa_escaped_pt))
6619 return false;
6621 return true;
6624 /* Return true if the points-to solution *PT only point to a single var, and
6625 return the var uid in *UID. */
6627 bool
6628 pt_solution_singleton_or_null_p (struct pt_solution *pt, unsigned *uid)
6630 if (pt->anything || pt->nonlocal || pt->escaped || pt->ipa_escaped
6631 || pt->vars == NULL
6632 || !bitmap_single_bit_set_p (pt->vars))
6633 return false;
6635 *uid = bitmap_first_set_bit (pt->vars);
6636 return true;
6639 /* Return true if the points-to solution *PT includes global memory. */
6641 bool
6642 pt_solution_includes_global (struct pt_solution *pt)
6644 if (pt->anything
6645 || pt->nonlocal
6646 || pt->vars_contains_nonlocal
6647 /* The following is a hack to make the malloc escape hack work.
6648 In reality we'd need different sets for escaped-through-return
6649 and escaped-to-callees and passes would need to be updated. */
6650 || pt->vars_contains_escaped_heap)
6651 return true;
6653 /* 'escaped' is also a placeholder so we have to look into it. */
6654 if (pt->escaped)
6655 return pt_solution_includes_global (&cfun->gimple_df->escaped);
6657 if (pt->ipa_escaped)
6658 return pt_solution_includes_global (&ipa_escaped_pt);
6660 return false;
6663 /* Return true if the points-to solution *PT includes the variable
6664 declaration DECL. */
6666 static bool
6667 pt_solution_includes_1 (struct pt_solution *pt, const_tree decl)
6669 if (pt->anything)
6670 return true;
6672 if (pt->nonlocal
6673 && is_global_var (decl))
6674 return true;
6676 if (pt->vars
6677 && bitmap_bit_p (pt->vars, DECL_PT_UID (decl)))
6678 return true;
6680 /* If the solution includes ESCAPED, check it. */
6681 if (pt->escaped
6682 && pt_solution_includes_1 (&cfun->gimple_df->escaped, decl))
6683 return true;
6685 /* If the solution includes ESCAPED, check it. */
6686 if (pt->ipa_escaped
6687 && pt_solution_includes_1 (&ipa_escaped_pt, decl))
6688 return true;
6690 return false;
6693 bool
6694 pt_solution_includes (struct pt_solution *pt, const_tree decl)
6696 bool res = pt_solution_includes_1 (pt, decl);
6697 if (res)
6698 ++pta_stats.pt_solution_includes_may_alias;
6699 else
6700 ++pta_stats.pt_solution_includes_no_alias;
6701 return res;
6704 /* Return true if both points-to solutions PT1 and PT2 have a non-empty
6705 intersection. */
6707 static bool
6708 pt_solutions_intersect_1 (struct pt_solution *pt1, struct pt_solution *pt2)
6710 if (pt1->anything || pt2->anything)
6711 return true;
6713 /* If either points to unknown global memory and the other points to
6714 any global memory they alias. */
6715 if ((pt1->nonlocal
6716 && (pt2->nonlocal
6717 || pt2->vars_contains_nonlocal))
6718 || (pt2->nonlocal
6719 && pt1->vars_contains_nonlocal))
6720 return true;
6722 /* If either points to all escaped memory and the other points to
6723 any escaped memory they alias. */
6724 if ((pt1->escaped
6725 && (pt2->escaped
6726 || pt2->vars_contains_escaped))
6727 || (pt2->escaped
6728 && pt1->vars_contains_escaped))
6729 return true;
6731 /* Check the escaped solution if required.
6732 ??? Do we need to check the local against the IPA escaped sets? */
6733 if ((pt1->ipa_escaped || pt2->ipa_escaped)
6734 && !pt_solution_empty_p (&ipa_escaped_pt))
6736 /* If both point to escaped memory and that solution
6737 is not empty they alias. */
6738 if (pt1->ipa_escaped && pt2->ipa_escaped)
6739 return true;
6741 /* If either points to escaped memory see if the escaped solution
6742 intersects with the other. */
6743 if ((pt1->ipa_escaped
6744 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt2))
6745 || (pt2->ipa_escaped
6746 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt1)))
6747 return true;
6750 /* Now both pointers alias if their points-to solution intersects. */
6751 return (pt1->vars
6752 && pt2->vars
6753 && bitmap_intersect_p (pt1->vars, pt2->vars));
6756 bool
6757 pt_solutions_intersect (struct pt_solution *pt1, struct pt_solution *pt2)
6759 bool res = pt_solutions_intersect_1 (pt1, pt2);
6760 if (res)
6761 ++pta_stats.pt_solutions_intersect_may_alias;
6762 else
6763 ++pta_stats.pt_solutions_intersect_no_alias;
6764 return res;
6768 /* Dump points-to information to OUTFILE. */
6770 static void
6771 dump_sa_points_to_info (FILE *outfile)
6773 unsigned int i;
6775 fprintf (outfile, "\nPoints-to sets\n\n");
6777 if (dump_flags & TDF_STATS)
6779 fprintf (outfile, "Stats:\n");
6780 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
6781 fprintf (outfile, "Non-pointer vars: %d\n",
6782 stats.nonpointer_vars);
6783 fprintf (outfile, "Statically unified vars: %d\n",
6784 stats.unified_vars_static);
6785 fprintf (outfile, "Dynamically unified vars: %d\n",
6786 stats.unified_vars_dynamic);
6787 fprintf (outfile, "Iterations: %d\n", stats.iterations);
6788 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
6789 fprintf (outfile, "Number of implicit edges: %d\n",
6790 stats.num_implicit_edges);
6793 for (i = 1; i < varmap.length (); i++)
6795 varinfo_t vi = get_varinfo (i);
6796 if (!vi->may_have_pointers)
6797 continue;
6798 dump_solution_for_var (outfile, i);
6803 /* Debug points-to information to stderr. */
6805 DEBUG_FUNCTION void
6806 debug_sa_points_to_info (void)
6808 dump_sa_points_to_info (stderr);
6812 /* Initialize the always-existing constraint variables for NULL
6813 ANYTHING, READONLY, and INTEGER */
6815 static void
6816 init_base_vars (void)
6818 struct constraint_expr lhs, rhs;
6819 varinfo_t var_anything;
6820 varinfo_t var_nothing;
6821 varinfo_t var_string;
6822 varinfo_t var_escaped;
6823 varinfo_t var_nonlocal;
6824 varinfo_t var_storedanything;
6825 varinfo_t var_integer;
6827 /* Variable ID zero is reserved and should be NULL. */
6828 varmap.safe_push (NULL);
6830 /* Create the NULL variable, used to represent that a variable points
6831 to NULL. */
6832 var_nothing = new_var_info (NULL_TREE, "NULL", false);
6833 gcc_assert (var_nothing->id == nothing_id);
6834 var_nothing->is_artificial_var = 1;
6835 var_nothing->offset = 0;
6836 var_nothing->size = ~0;
6837 var_nothing->fullsize = ~0;
6838 var_nothing->is_special_var = 1;
6839 var_nothing->may_have_pointers = 0;
6840 var_nothing->is_global_var = 0;
6842 /* Create the ANYTHING variable, used to represent that a variable
6843 points to some unknown piece of memory. */
6844 var_anything = new_var_info (NULL_TREE, "ANYTHING", false);
6845 gcc_assert (var_anything->id == anything_id);
6846 var_anything->is_artificial_var = 1;
6847 var_anything->size = ~0;
6848 var_anything->offset = 0;
6849 var_anything->fullsize = ~0;
6850 var_anything->is_special_var = 1;
6852 /* Anything points to anything. This makes deref constraints just
6853 work in the presence of linked list and other p = *p type loops,
6854 by saying that *ANYTHING = ANYTHING. */
6855 lhs.type = SCALAR;
6856 lhs.var = anything_id;
6857 lhs.offset = 0;
6858 rhs.type = ADDRESSOF;
6859 rhs.var = anything_id;
6860 rhs.offset = 0;
6862 /* This specifically does not use process_constraint because
6863 process_constraint ignores all anything = anything constraints, since all
6864 but this one are redundant. */
6865 constraints.safe_push (new_constraint (lhs, rhs));
6867 /* Create the STRING variable, used to represent that a variable
6868 points to a string literal. String literals don't contain
6869 pointers so STRING doesn't point to anything. */
6870 var_string = new_var_info (NULL_TREE, "STRING", false);
6871 gcc_assert (var_string->id == string_id);
6872 var_string->is_artificial_var = 1;
6873 var_string->offset = 0;
6874 var_string->size = ~0;
6875 var_string->fullsize = ~0;
6876 var_string->is_special_var = 1;
6877 var_string->may_have_pointers = 0;
6879 /* Create the ESCAPED variable, used to represent the set of escaped
6880 memory. */
6881 var_escaped = new_var_info (NULL_TREE, "ESCAPED", false);
6882 gcc_assert (var_escaped->id == escaped_id);
6883 var_escaped->is_artificial_var = 1;
6884 var_escaped->offset = 0;
6885 var_escaped->size = ~0;
6886 var_escaped->fullsize = ~0;
6887 var_escaped->is_special_var = 0;
6889 /* Create the NONLOCAL variable, used to represent the set of nonlocal
6890 memory. */
6891 var_nonlocal = new_var_info (NULL_TREE, "NONLOCAL", false);
6892 gcc_assert (var_nonlocal->id == nonlocal_id);
6893 var_nonlocal->is_artificial_var = 1;
6894 var_nonlocal->offset = 0;
6895 var_nonlocal->size = ~0;
6896 var_nonlocal->fullsize = ~0;
6897 var_nonlocal->is_special_var = 1;
6899 /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
6900 lhs.type = SCALAR;
6901 lhs.var = escaped_id;
6902 lhs.offset = 0;
6903 rhs.type = DEREF;
6904 rhs.var = escaped_id;
6905 rhs.offset = 0;
6906 process_constraint (new_constraint (lhs, rhs));
6908 /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the
6909 whole variable escapes. */
6910 lhs.type = SCALAR;
6911 lhs.var = escaped_id;
6912 lhs.offset = 0;
6913 rhs.type = SCALAR;
6914 rhs.var = escaped_id;
6915 rhs.offset = UNKNOWN_OFFSET;
6916 process_constraint (new_constraint (lhs, rhs));
6918 /* *ESCAPED = NONLOCAL. This is true because we have to assume
6919 everything pointed to by escaped points to what global memory can
6920 point to. */
6921 lhs.type = DEREF;
6922 lhs.var = escaped_id;
6923 lhs.offset = 0;
6924 rhs.type = SCALAR;
6925 rhs.var = nonlocal_id;
6926 rhs.offset = 0;
6927 process_constraint (new_constraint (lhs, rhs));
6929 /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED. This is true because
6930 global memory may point to global memory and escaped memory. */
6931 lhs.type = SCALAR;
6932 lhs.var = nonlocal_id;
6933 lhs.offset = 0;
6934 rhs.type = ADDRESSOF;
6935 rhs.var = nonlocal_id;
6936 rhs.offset = 0;
6937 process_constraint (new_constraint (lhs, rhs));
6938 rhs.type = ADDRESSOF;
6939 rhs.var = escaped_id;
6940 rhs.offset = 0;
6941 process_constraint (new_constraint (lhs, rhs));
6943 /* Create the STOREDANYTHING variable, used to represent the set of
6944 variables stored to *ANYTHING. */
6945 var_storedanything = new_var_info (NULL_TREE, "STOREDANYTHING", false);
6946 gcc_assert (var_storedanything->id == storedanything_id);
6947 var_storedanything->is_artificial_var = 1;
6948 var_storedanything->offset = 0;
6949 var_storedanything->size = ~0;
6950 var_storedanything->fullsize = ~0;
6951 var_storedanything->is_special_var = 0;
6953 /* Create the INTEGER variable, used to represent that a variable points
6954 to what an INTEGER "points to". */
6955 var_integer = new_var_info (NULL_TREE, "INTEGER", false);
6956 gcc_assert (var_integer->id == integer_id);
6957 var_integer->is_artificial_var = 1;
6958 var_integer->size = ~0;
6959 var_integer->fullsize = ~0;
6960 var_integer->offset = 0;
6961 var_integer->is_special_var = 1;
6963 /* INTEGER = ANYTHING, because we don't know where a dereference of
6964 a random integer will point to. */
6965 lhs.type = SCALAR;
6966 lhs.var = integer_id;
6967 lhs.offset = 0;
6968 rhs.type = ADDRESSOF;
6969 rhs.var = anything_id;
6970 rhs.offset = 0;
6971 process_constraint (new_constraint (lhs, rhs));
6974 /* Initialize things necessary to perform PTA */
6976 static void
6977 init_alias_vars (void)
6979 use_field_sensitive = (MAX_FIELDS_FOR_FIELD_SENSITIVE > 1);
6981 bitmap_obstack_initialize (&pta_obstack);
6982 bitmap_obstack_initialize (&oldpta_obstack);
6983 bitmap_obstack_initialize (&predbitmap_obstack);
6985 constraints.create (8);
6986 varmap.create (8);
6987 vi_for_tree = new hash_map<tree, varinfo_t>;
6988 call_stmt_vars = new hash_map<gimple *, varinfo_t>;
6990 memset (&stats, 0, sizeof (stats));
6991 shared_bitmap_table = new hash_table<shared_bitmap_hasher> (511);
6992 init_base_vars ();
6994 gcc_obstack_init (&fake_var_decl_obstack);
6996 final_solutions = new hash_map<varinfo_t, pt_solution *>;
6997 gcc_obstack_init (&final_solutions_obstack);
7000 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
7001 predecessor edges. */
7003 static void
7004 remove_preds_and_fake_succs (constraint_graph_t graph)
7006 unsigned int i;
7008 /* Clear the implicit ref and address nodes from the successor
7009 lists. */
7010 for (i = 1; i < FIRST_REF_NODE; i++)
7012 if (graph->succs[i])
7013 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
7014 FIRST_REF_NODE * 2);
7017 /* Free the successor list for the non-ref nodes. */
7018 for (i = FIRST_REF_NODE + 1; i < graph->size; i++)
7020 if (graph->succs[i])
7021 BITMAP_FREE (graph->succs[i]);
7024 /* Now reallocate the size of the successor list as, and blow away
7025 the predecessor bitmaps. */
7026 graph->size = varmap.length ();
7027 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
7029 free (graph->implicit_preds);
7030 graph->implicit_preds = NULL;
7031 free (graph->preds);
7032 graph->preds = NULL;
7033 bitmap_obstack_release (&predbitmap_obstack);
7036 /* Solve the constraint set. */
7038 static void
7039 solve_constraints (void)
7041 struct scc_info *si;
7043 if (dump_file)
7044 fprintf (dump_file,
7045 "\nCollapsing static cycles and doing variable "
7046 "substitution\n");
7048 init_graph (varmap.length () * 2);
7050 if (dump_file)
7051 fprintf (dump_file, "Building predecessor graph\n");
7052 build_pred_graph ();
7054 if (dump_file)
7055 fprintf (dump_file, "Detecting pointer and location "
7056 "equivalences\n");
7057 si = perform_var_substitution (graph);
7059 if (dump_file)
7060 fprintf (dump_file, "Rewriting constraints and unifying "
7061 "variables\n");
7062 rewrite_constraints (graph, si);
7064 build_succ_graph ();
7066 free_var_substitution_info (si);
7068 /* Attach complex constraints to graph nodes. */
7069 move_complex_constraints (graph);
7071 if (dump_file)
7072 fprintf (dump_file, "Uniting pointer but not location equivalent "
7073 "variables\n");
7074 unite_pointer_equivalences (graph);
7076 if (dump_file)
7077 fprintf (dump_file, "Finding indirect cycles\n");
7078 find_indirect_cycles (graph);
7080 /* Implicit nodes and predecessors are no longer necessary at this
7081 point. */
7082 remove_preds_and_fake_succs (graph);
7084 if (dump_file && (dump_flags & TDF_GRAPH))
7086 fprintf (dump_file, "\n\n// The constraint graph before solve-graph "
7087 "in dot format:\n");
7088 dump_constraint_graph (dump_file);
7089 fprintf (dump_file, "\n\n");
7092 if (dump_file)
7093 fprintf (dump_file, "Solving graph\n");
7095 solve_graph (graph);
7097 if (dump_file && (dump_flags & TDF_GRAPH))
7099 fprintf (dump_file, "\n\n// The constraint graph after solve-graph "
7100 "in dot format:\n");
7101 dump_constraint_graph (dump_file);
7102 fprintf (dump_file, "\n\n");
7105 if (dump_file)
7106 dump_sa_points_to_info (dump_file);
7109 /* Create points-to sets for the current function. See the comments
7110 at the start of the file for an algorithmic overview. */
7112 static void
7113 compute_points_to_sets (void)
7115 basic_block bb;
7116 varinfo_t vi;
7118 timevar_push (TV_TREE_PTA);
7120 init_alias_vars ();
7122 intra_create_variable_infos (cfun);
7124 /* Now walk all statements and build the constraint set. */
7125 FOR_EACH_BB_FN (bb, cfun)
7127 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
7128 gsi_next (&gsi))
7130 gphi *phi = gsi.phi ();
7132 if (! virtual_operand_p (gimple_phi_result (phi)))
7133 find_func_aliases (cfun, phi);
7136 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
7137 gsi_next (&gsi))
7139 gimple *stmt = gsi_stmt (gsi);
7141 find_func_aliases (cfun, stmt);
7145 if (dump_file)
7147 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
7148 dump_constraints (dump_file, 0);
7151 /* From the constraints compute the points-to sets. */
7152 solve_constraints ();
7154 /* Compute the points-to set for ESCAPED used for call-clobber analysis. */
7155 cfun->gimple_df->escaped = find_what_var_points_to (cfun->decl,
7156 get_varinfo (escaped_id));
7158 /* Make sure the ESCAPED solution (which is used as placeholder in
7159 other solutions) does not reference itself. This simplifies
7160 points-to solution queries. */
7161 cfun->gimple_df->escaped.escaped = 0;
7163 /* Compute the points-to sets for pointer SSA_NAMEs. */
7164 unsigned i;
7165 tree ptr;
7167 FOR_EACH_SSA_NAME (i, ptr, cfun)
7169 if (POINTER_TYPE_P (TREE_TYPE (ptr)))
7170 find_what_p_points_to (cfun->decl, ptr);
7173 /* Compute the call-used/clobbered sets. */
7174 FOR_EACH_BB_FN (bb, cfun)
7176 gimple_stmt_iterator gsi;
7178 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
7180 gcall *stmt;
7181 struct pt_solution *pt;
7183 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
7184 if (!stmt)
7185 continue;
7187 pt = gimple_call_use_set (stmt);
7188 if (gimple_call_flags (stmt) & ECF_CONST)
7189 memset (pt, 0, sizeof (struct pt_solution));
7190 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
7192 *pt = find_what_var_points_to (cfun->decl, vi);
7193 /* Escaped (and thus nonlocal) variables are always
7194 implicitly used by calls. */
7195 /* ??? ESCAPED can be empty even though NONLOCAL
7196 always escaped. */
7197 pt->nonlocal = 1;
7198 pt->escaped = 1;
7200 else
7202 /* If there is nothing special about this call then
7203 we have made everything that is used also escape. */
7204 *pt = cfun->gimple_df->escaped;
7205 pt->nonlocal = 1;
7208 pt = gimple_call_clobber_set (stmt);
7209 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
7210 memset (pt, 0, sizeof (struct pt_solution));
7211 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
7213 *pt = find_what_var_points_to (cfun->decl, vi);
7214 /* Escaped (and thus nonlocal) variables are always
7215 implicitly clobbered by calls. */
7216 /* ??? ESCAPED can be empty even though NONLOCAL
7217 always escaped. */
7218 pt->nonlocal = 1;
7219 pt->escaped = 1;
7221 else
7223 /* If there is nothing special about this call then
7224 we have made everything that is used also escape. */
7225 *pt = cfun->gimple_df->escaped;
7226 pt->nonlocal = 1;
7231 timevar_pop (TV_TREE_PTA);
7235 /* Delete created points-to sets. */
7237 static void
7238 delete_points_to_sets (void)
7240 unsigned int i;
7242 delete shared_bitmap_table;
7243 shared_bitmap_table = NULL;
7244 if (dump_file && (dump_flags & TDF_STATS))
7245 fprintf (dump_file, "Points to sets created:%d\n",
7246 stats.points_to_sets_created);
7248 delete vi_for_tree;
7249 delete call_stmt_vars;
7250 bitmap_obstack_release (&pta_obstack);
7251 constraints.release ();
7253 for (i = 0; i < graph->size; i++)
7254 graph->complex[i].release ();
7255 free (graph->complex);
7257 free (graph->rep);
7258 free (graph->succs);
7259 free (graph->pe);
7260 free (graph->pe_rep);
7261 free (graph->indirect_cycles);
7262 free (graph);
7264 varmap.release ();
7265 variable_info_pool.release ();
7266 constraint_pool.release ();
7268 obstack_free (&fake_var_decl_obstack, NULL);
7270 delete final_solutions;
7271 obstack_free (&final_solutions_obstack, NULL);
7274 struct vls_data
7276 unsigned short clique;
7277 bitmap rvars;
7280 /* Mark "other" loads and stores as belonging to CLIQUE and with
7281 base zero. */
7283 static bool
7284 visit_loadstore (gimple *, tree base, tree ref, void *data)
7286 unsigned short clique = ((vls_data *) data)->clique;
7287 bitmap rvars = ((vls_data *) data)->rvars;
7288 if (TREE_CODE (base) == MEM_REF
7289 || TREE_CODE (base) == TARGET_MEM_REF)
7291 tree ptr = TREE_OPERAND (base, 0);
7292 if (TREE_CODE (ptr) == SSA_NAME)
7294 /* For parameters, get at the points-to set for the actual parm
7295 decl. */
7296 if (SSA_NAME_IS_DEFAULT_DEF (ptr)
7297 && (TREE_CODE (SSA_NAME_VAR (ptr)) == PARM_DECL
7298 || TREE_CODE (SSA_NAME_VAR (ptr)) == RESULT_DECL))
7299 ptr = SSA_NAME_VAR (ptr);
7301 /* We need to make sure 'ptr' doesn't include any of
7302 the restrict tags we added bases for in its points-to set. */
7303 varinfo_t vi = lookup_vi_for_tree (ptr);
7304 if (! vi)
7305 return false;
7307 vi = get_varinfo (find (vi->id));
7308 if (bitmap_intersect_p (rvars, vi->solution))
7309 return false;
7312 /* Do not overwrite existing cliques (that includes clique, base
7313 pairs we just set). */
7314 if (MR_DEPENDENCE_CLIQUE (base) == 0)
7316 MR_DEPENDENCE_CLIQUE (base) = clique;
7317 MR_DEPENDENCE_BASE (base) = 0;
7321 /* For plain decl accesses see whether they are accesses to globals
7322 and rewrite them to MEM_REFs with { clique, 0 }. */
7323 if (VAR_P (base)
7324 && is_global_var (base)
7325 /* ??? We can't rewrite a plain decl with the walk_stmt_load_store
7326 ops callback. */
7327 && base != ref)
7329 tree *basep = &ref;
7330 while (handled_component_p (*basep))
7331 basep = &TREE_OPERAND (*basep, 0);
7332 gcc_assert (VAR_P (*basep));
7333 tree ptr = build_fold_addr_expr (*basep);
7334 tree zero = build_int_cst (TREE_TYPE (ptr), 0);
7335 *basep = build2 (MEM_REF, TREE_TYPE (*basep), ptr, zero);
7336 MR_DEPENDENCE_CLIQUE (*basep) = clique;
7337 MR_DEPENDENCE_BASE (*basep) = 0;
7340 return false;
7343 /* If REF is a MEM_REF then assign a clique, base pair to it, updating
7344 CLIQUE, *RESTRICT_VAR and LAST_RUID. Return whether dependence info
7345 was assigned to REF. */
7347 static bool
7348 maybe_set_dependence_info (tree ref, tree ptr,
7349 unsigned short &clique, varinfo_t restrict_var,
7350 unsigned short &last_ruid)
7352 while (handled_component_p (ref))
7353 ref = TREE_OPERAND (ref, 0);
7354 if ((TREE_CODE (ref) == MEM_REF
7355 || TREE_CODE (ref) == TARGET_MEM_REF)
7356 && TREE_OPERAND (ref, 0) == ptr)
7358 /* Do not overwrite existing cliques. This avoids overwriting dependence
7359 info inlined from a function with restrict parameters inlined
7360 into a function with restrict parameters. This usually means we
7361 prefer to be precise in innermost loops. */
7362 if (MR_DEPENDENCE_CLIQUE (ref) == 0)
7364 if (clique == 0)
7365 clique = ++cfun->last_clique;
7366 if (restrict_var->ruid == 0)
7367 restrict_var->ruid = ++last_ruid;
7368 MR_DEPENDENCE_CLIQUE (ref) = clique;
7369 MR_DEPENDENCE_BASE (ref) = restrict_var->ruid;
7370 return true;
7373 return false;
7376 /* Compute the set of independend memory references based on restrict
7377 tags and their conservative propagation to the points-to sets. */
7379 static void
7380 compute_dependence_clique (void)
7382 unsigned short clique = 0;
7383 unsigned short last_ruid = 0;
7384 bitmap rvars = BITMAP_ALLOC (NULL);
7385 for (unsigned i = 0; i < num_ssa_names; ++i)
7387 tree ptr = ssa_name (i);
7388 if (!ptr || !POINTER_TYPE_P (TREE_TYPE (ptr)))
7389 continue;
7391 /* Avoid all this when ptr is not dereferenced? */
7392 tree p = ptr;
7393 if (SSA_NAME_IS_DEFAULT_DEF (ptr)
7394 && (TREE_CODE (SSA_NAME_VAR (ptr)) == PARM_DECL
7395 || TREE_CODE (SSA_NAME_VAR (ptr)) == RESULT_DECL))
7396 p = SSA_NAME_VAR (ptr);
7397 varinfo_t vi = lookup_vi_for_tree (p);
7398 if (!vi)
7399 continue;
7400 vi = get_varinfo (find (vi->id));
7401 bitmap_iterator bi;
7402 unsigned j;
7403 varinfo_t restrict_var = NULL;
7404 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, j, bi)
7406 varinfo_t oi = get_varinfo (j);
7407 if (oi->is_restrict_var)
7409 if (restrict_var)
7411 if (dump_file && (dump_flags & TDF_DETAILS))
7413 fprintf (dump_file, "found restrict pointed-to "
7414 "for ");
7415 print_generic_expr (dump_file, ptr, 0);
7416 fprintf (dump_file, " but not exclusively\n");
7418 restrict_var = NULL;
7419 break;
7421 restrict_var = oi;
7423 /* NULL is the only other valid points-to entry. */
7424 else if (oi->id != nothing_id)
7426 restrict_var = NULL;
7427 break;
7430 /* Ok, found that ptr must(!) point to a single(!) restrict
7431 variable. */
7432 /* ??? PTA isn't really a proper propagation engine to compute
7433 this property.
7434 ??? We could handle merging of two restricts by unifying them. */
7435 if (restrict_var)
7437 /* Now look at possible dereferences of ptr. */
7438 imm_use_iterator ui;
7439 gimple *use_stmt;
7440 bool used = false;
7441 FOR_EACH_IMM_USE_STMT (use_stmt, ui, ptr)
7443 /* ??? Calls and asms. */
7444 if (!gimple_assign_single_p (use_stmt))
7445 continue;
7446 used |= maybe_set_dependence_info (gimple_assign_lhs (use_stmt),
7447 ptr, clique, restrict_var,
7448 last_ruid);
7449 used |= maybe_set_dependence_info (gimple_assign_rhs1 (use_stmt),
7450 ptr, clique, restrict_var,
7451 last_ruid);
7453 if (used)
7454 bitmap_set_bit (rvars, restrict_var->id);
7458 if (clique != 0)
7460 /* Assign the BASE id zero to all accesses not based on a restrict
7461 pointer. That way they get disambiguated against restrict
7462 accesses but not against each other. */
7463 /* ??? For restricts derived from globals (thus not incoming
7464 parameters) we can't restrict scoping properly thus the following
7465 is too aggressive there. For now we have excluded those globals from
7466 getting into the MR_DEPENDENCE machinery. */
7467 vls_data data = { clique, rvars };
7468 basic_block bb;
7469 FOR_EACH_BB_FN (bb, cfun)
7470 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
7471 !gsi_end_p (gsi); gsi_next (&gsi))
7473 gimple *stmt = gsi_stmt (gsi);
7474 walk_stmt_load_store_ops (stmt, &data,
7475 visit_loadstore, visit_loadstore);
7479 BITMAP_FREE (rvars);
7482 /* Compute points-to information for every SSA_NAME pointer in the
7483 current function and compute the transitive closure of escaped
7484 variables to re-initialize the call-clobber states of local variables. */
7486 unsigned int
7487 compute_may_aliases (void)
7489 if (cfun->gimple_df->ipa_pta)
7491 if (dump_file)
7493 fprintf (dump_file, "\nNot re-computing points-to information "
7494 "because IPA points-to information is available.\n\n");
7496 /* But still dump what we have remaining it. */
7497 dump_alias_info (dump_file);
7500 return 0;
7503 /* For each pointer P_i, determine the sets of variables that P_i may
7504 point-to. Compute the reachability set of escaped and call-used
7505 variables. */
7506 compute_points_to_sets ();
7508 /* Debugging dumps. */
7509 if (dump_file)
7510 dump_alias_info (dump_file);
7512 /* Compute restrict-based memory disambiguations. */
7513 compute_dependence_clique ();
7515 /* Deallocate memory used by aliasing data structures and the internal
7516 points-to solution. */
7517 delete_points_to_sets ();
7519 gcc_assert (!need_ssa_update_p (cfun));
7521 return 0;
7524 /* A dummy pass to cause points-to information to be computed via
7525 TODO_rebuild_alias. */
7527 namespace {
7529 const pass_data pass_data_build_alias =
7531 GIMPLE_PASS, /* type */
7532 "alias", /* name */
7533 OPTGROUP_NONE, /* optinfo_flags */
7534 TV_NONE, /* tv_id */
7535 ( PROP_cfg | PROP_ssa ), /* properties_required */
7536 0, /* properties_provided */
7537 0, /* properties_destroyed */
7538 0, /* todo_flags_start */
7539 TODO_rebuild_alias, /* todo_flags_finish */
7542 class pass_build_alias : public gimple_opt_pass
7544 public:
7545 pass_build_alias (gcc::context *ctxt)
7546 : gimple_opt_pass (pass_data_build_alias, ctxt)
7549 /* opt_pass methods: */
7550 virtual bool gate (function *) { return flag_tree_pta; }
7552 }; // class pass_build_alias
7554 } // anon namespace
7556 gimple_opt_pass *
7557 make_pass_build_alias (gcc::context *ctxt)
7559 return new pass_build_alias (ctxt);
7562 /* A dummy pass to cause points-to information to be computed via
7563 TODO_rebuild_alias. */
7565 namespace {
7567 const pass_data pass_data_build_ealias =
7569 GIMPLE_PASS, /* type */
7570 "ealias", /* name */
7571 OPTGROUP_NONE, /* optinfo_flags */
7572 TV_NONE, /* tv_id */
7573 ( PROP_cfg | PROP_ssa ), /* properties_required */
7574 0, /* properties_provided */
7575 0, /* properties_destroyed */
7576 0, /* todo_flags_start */
7577 TODO_rebuild_alias, /* todo_flags_finish */
7580 class pass_build_ealias : public gimple_opt_pass
7582 public:
7583 pass_build_ealias (gcc::context *ctxt)
7584 : gimple_opt_pass (pass_data_build_ealias, ctxt)
7587 /* opt_pass methods: */
7588 virtual bool gate (function *) { return flag_tree_pta; }
7590 }; // class pass_build_ealias
7592 } // anon namespace
7594 gimple_opt_pass *
7595 make_pass_build_ealias (gcc::context *ctxt)
7597 return new pass_build_ealias (ctxt);
7601 /* IPA PTA solutions for ESCAPED. */
7602 struct pt_solution ipa_escaped_pt
7603 = { true, false, false, false, false,
7604 false, false, false, false, false, NULL };
7606 /* Associate node with varinfo DATA. Worker for
7607 cgraph_for_symbol_thunks_and_aliases. */
7608 static bool
7609 associate_varinfo_to_alias (struct cgraph_node *node, void *data)
7611 if ((node->alias
7612 || (node->thunk.thunk_p
7613 && ! node->global.inlined_to))
7614 && node->analyzed)
7615 insert_vi_for_tree (node->decl, (varinfo_t)data);
7616 return false;
7619 /* Dump varinfo VI to FILE. */
7621 static void
7622 dump_varinfo (FILE *file, varinfo_t vi)
7624 if (vi == NULL)
7625 return;
7627 fprintf (file, "%u: %s\n", vi->id, vi->name);
7629 const char *sep = " ";
7630 if (vi->is_artificial_var)
7631 fprintf (file, "%sartificial", sep);
7632 if (vi->is_special_var)
7633 fprintf (file, "%sspecial", sep);
7634 if (vi->is_unknown_size_var)
7635 fprintf (file, "%sunknown-size", sep);
7636 if (vi->is_full_var)
7637 fprintf (file, "%sfull", sep);
7638 if (vi->is_heap_var)
7639 fprintf (file, "%sheap", sep);
7640 if (vi->may_have_pointers)
7641 fprintf (file, "%smay-have-pointers", sep);
7642 if (vi->only_restrict_pointers)
7643 fprintf (file, "%sonly-restrict-pointers", sep);
7644 if (vi->is_restrict_var)
7645 fprintf (file, "%sis-restrict-var", sep);
7646 if (vi->is_global_var)
7647 fprintf (file, "%sglobal", sep);
7648 if (vi->is_ipa_escape_point)
7649 fprintf (file, "%sipa-escape-point", sep);
7650 if (vi->is_fn_info)
7651 fprintf (file, "%sfn-info", sep);
7652 if (vi->ruid)
7653 fprintf (file, "%srestrict-uid:%u", sep, vi->ruid);
7654 if (vi->next)
7655 fprintf (file, "%snext:%u", sep, vi->next);
7656 if (vi->head != vi->id)
7657 fprintf (file, "%shead:%u", sep, vi->head);
7658 if (vi->offset)
7659 fprintf (file, "%soffset:" HOST_WIDE_INT_PRINT_DEC, sep, vi->offset);
7660 if (vi->size != ~(unsigned HOST_WIDE_INT)0)
7661 fprintf (file, "%ssize:" HOST_WIDE_INT_PRINT_DEC, sep, vi->size);
7662 if (vi->fullsize != ~(unsigned HOST_WIDE_INT)0
7663 && vi->fullsize != vi->size)
7664 fprintf (file, "%sfullsize:" HOST_WIDE_INT_PRINT_DEC, sep,
7665 vi->fullsize);
7666 fprintf (file, "\n");
7668 if (vi->solution && !bitmap_empty_p (vi->solution))
7670 bitmap_iterator bi;
7671 unsigned i;
7672 fprintf (file, " solution: {");
7673 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
7674 fprintf (file, " %u", i);
7675 fprintf (file, " }\n");
7678 if (vi->oldsolution && !bitmap_empty_p (vi->oldsolution)
7679 && !bitmap_equal_p (vi->solution, vi->oldsolution))
7681 bitmap_iterator bi;
7682 unsigned i;
7683 fprintf (file, " oldsolution: {");
7684 EXECUTE_IF_SET_IN_BITMAP (vi->oldsolution, 0, i, bi)
7685 fprintf (file, " %u", i);
7686 fprintf (file, " }\n");
7690 /* Dump varinfo VI to stderr. */
7692 DEBUG_FUNCTION void
7693 debug_varinfo (varinfo_t vi)
7695 dump_varinfo (stderr, vi);
7698 /* Dump varmap to FILE. */
7700 static void
7701 dump_varmap (FILE *file)
7703 if (varmap.length () == 0)
7704 return;
7706 fprintf (file, "variables:\n");
7708 for (unsigned int i = 0; i < varmap.length (); ++i)
7710 varinfo_t vi = get_varinfo (i);
7711 dump_varinfo (file, vi);
7714 fprintf (file, "\n");
7717 /* Dump varmap to stderr. */
7719 DEBUG_FUNCTION void
7720 debug_varmap (void)
7722 dump_varmap (stderr);
7725 /* Compute whether node is refered to non-locally. Worker for
7726 cgraph_for_symbol_thunks_and_aliases. */
7727 static bool
7728 refered_from_nonlocal_fn (struct cgraph_node *node, void *data)
7730 bool *nonlocal_p = (bool *)data;
7731 *nonlocal_p |= (node->used_from_other_partition
7732 || node->externally_visible
7733 || node->force_output);
7734 return false;
7737 /* Same for varpool nodes. */
7738 static bool
7739 refered_from_nonlocal_var (struct varpool_node *node, void *data)
7741 bool *nonlocal_p = (bool *)data;
7742 *nonlocal_p |= (node->used_from_other_partition
7743 || node->externally_visible
7744 || node->force_output);
7745 return false;
7748 /* Execute the driver for IPA PTA. */
7749 static unsigned int
7750 ipa_pta_execute (void)
7752 struct cgraph_node *node;
7753 varpool_node *var;
7754 unsigned int from = 0;
7756 in_ipa_mode = 1;
7758 init_alias_vars ();
7760 if (dump_file && (dump_flags & TDF_DETAILS))
7762 symtab_node::dump_table (dump_file);
7763 fprintf (dump_file, "\n");
7766 if (dump_file)
7768 fprintf (dump_file, "Generating generic constraints\n\n");
7769 dump_constraints (dump_file, from);
7770 fprintf (dump_file, "\n");
7771 from = constraints.length ();
7774 /* Build the constraints. */
7775 FOR_EACH_DEFINED_FUNCTION (node)
7777 varinfo_t vi;
7778 /* Nodes without a body are not interesting. Especially do not
7779 visit clones at this point for now - we get duplicate decls
7780 there for inline clones at least. */
7781 if (!node->has_gimple_body_p () || node->global.inlined_to)
7782 continue;
7783 node->get_body ();
7785 gcc_assert (!node->clone_of);
7787 /* For externally visible or attribute used annotated functions use
7788 local constraints for their arguments.
7789 For local functions we see all callers and thus do not need initial
7790 constraints for parameters. */
7791 bool nonlocal_p = (node->used_from_other_partition
7792 || node->externally_visible
7793 || node->force_output);
7794 node->call_for_symbol_thunks_and_aliases (refered_from_nonlocal_fn,
7795 &nonlocal_p, true);
7797 vi = create_function_info_for (node->decl,
7798 alias_get_name (node->decl), false,
7799 nonlocal_p);
7800 if (dump_file
7801 && from != constraints.length ())
7803 fprintf (dump_file,
7804 "Generating intial constraints for %s", node->name ());
7805 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
7806 fprintf (dump_file, " (%s)",
7807 IDENTIFIER_POINTER
7808 (DECL_ASSEMBLER_NAME (node->decl)));
7809 fprintf (dump_file, "\n\n");
7810 dump_constraints (dump_file, from);
7811 fprintf (dump_file, "\n");
7813 from = constraints.length ();
7816 node->call_for_symbol_thunks_and_aliases
7817 (associate_varinfo_to_alias, vi, true);
7820 /* Create constraints for global variables and their initializers. */
7821 FOR_EACH_VARIABLE (var)
7823 if (var->alias && var->analyzed)
7824 continue;
7826 varinfo_t vi = get_vi_for_tree (var->decl);
7828 /* For the purpose of IPA PTA unit-local globals are not
7829 escape points. */
7830 bool nonlocal_p = (var->used_from_other_partition
7831 || var->externally_visible
7832 || var->force_output);
7833 var->call_for_symbol_and_aliases (refered_from_nonlocal_var,
7834 &nonlocal_p, true);
7835 if (nonlocal_p)
7836 vi->is_ipa_escape_point = true;
7839 if (dump_file
7840 && from != constraints.length ())
7842 fprintf (dump_file,
7843 "Generating constraints for global initializers\n\n");
7844 dump_constraints (dump_file, from);
7845 fprintf (dump_file, "\n");
7846 from = constraints.length ();
7849 FOR_EACH_DEFINED_FUNCTION (node)
7851 struct function *func;
7852 basic_block bb;
7854 /* Nodes without a body are not interesting. */
7855 if (!node->has_gimple_body_p () || node->clone_of)
7856 continue;
7858 if (dump_file)
7860 fprintf (dump_file,
7861 "Generating constraints for %s", node->name ());
7862 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
7863 fprintf (dump_file, " (%s)",
7864 IDENTIFIER_POINTER
7865 (DECL_ASSEMBLER_NAME (node->decl)));
7866 fprintf (dump_file, "\n");
7869 func = DECL_STRUCT_FUNCTION (node->decl);
7870 gcc_assert (cfun == NULL);
7872 /* Build constriants for the function body. */
7873 FOR_EACH_BB_FN (bb, func)
7875 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
7876 gsi_next (&gsi))
7878 gphi *phi = gsi.phi ();
7880 if (! virtual_operand_p (gimple_phi_result (phi)))
7881 find_func_aliases (func, phi);
7884 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
7885 gsi_next (&gsi))
7887 gimple *stmt = gsi_stmt (gsi);
7889 find_func_aliases (func, stmt);
7890 find_func_clobbers (func, stmt);
7894 if (dump_file)
7896 fprintf (dump_file, "\n");
7897 dump_constraints (dump_file, from);
7898 fprintf (dump_file, "\n");
7899 from = constraints.length ();
7903 /* From the constraints compute the points-to sets. */
7904 solve_constraints ();
7906 /* Compute the global points-to sets for ESCAPED.
7907 ??? Note that the computed escape set is not correct
7908 for the whole unit as we fail to consider graph edges to
7909 externally visible functions. */
7910 ipa_escaped_pt = find_what_var_points_to (NULL, get_varinfo (escaped_id));
7912 /* Make sure the ESCAPED solution (which is used as placeholder in
7913 other solutions) does not reference itself. This simplifies
7914 points-to solution queries. */
7915 ipa_escaped_pt.ipa_escaped = 0;
7917 /* Assign the points-to sets to the SSA names in the unit. */
7918 FOR_EACH_DEFINED_FUNCTION (node)
7920 tree ptr;
7921 struct function *fn;
7922 unsigned i;
7923 basic_block bb;
7925 /* Nodes without a body are not interesting. */
7926 if (!node->has_gimple_body_p () || node->clone_of)
7927 continue;
7929 fn = DECL_STRUCT_FUNCTION (node->decl);
7931 /* Compute the points-to sets for pointer SSA_NAMEs. */
7932 FOR_EACH_VEC_ELT (*fn->gimple_df->ssa_names, i, ptr)
7934 if (ptr
7935 && POINTER_TYPE_P (TREE_TYPE (ptr)))
7936 find_what_p_points_to (node->decl, ptr);
7939 /* Compute the call-use and call-clobber sets for indirect calls
7940 and calls to external functions. */
7941 FOR_EACH_BB_FN (bb, fn)
7943 gimple_stmt_iterator gsi;
7945 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
7947 gcall *stmt;
7948 struct pt_solution *pt;
7949 varinfo_t vi, fi;
7950 tree decl;
7952 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
7953 if (!stmt)
7954 continue;
7956 /* Handle direct calls to functions with body. */
7957 decl = gimple_call_fndecl (stmt);
7960 tree called_decl = NULL_TREE;
7961 if (gimple_call_builtin_p (stmt, BUILT_IN_GOMP_PARALLEL))
7962 called_decl = TREE_OPERAND (gimple_call_arg (stmt, 0), 0);
7963 else if (gimple_call_builtin_p (stmt, BUILT_IN_GOACC_PARALLEL))
7964 called_decl = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
7966 if (called_decl != NULL_TREE
7967 && !fndecl_maybe_in_other_partition (called_decl))
7968 decl = called_decl;
7971 if (decl
7972 && (fi = lookup_vi_for_tree (decl))
7973 && fi->is_fn_info)
7975 *gimple_call_clobber_set (stmt)
7976 = find_what_var_points_to
7977 (node->decl, first_vi_for_offset (fi, fi_clobbers));
7978 *gimple_call_use_set (stmt)
7979 = find_what_var_points_to
7980 (node->decl, first_vi_for_offset (fi, fi_uses));
7982 /* Handle direct calls to external functions. */
7983 else if (decl)
7985 pt = gimple_call_use_set (stmt);
7986 if (gimple_call_flags (stmt) & ECF_CONST)
7987 memset (pt, 0, sizeof (struct pt_solution));
7988 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
7990 *pt = find_what_var_points_to (node->decl, vi);
7991 /* Escaped (and thus nonlocal) variables are always
7992 implicitly used by calls. */
7993 /* ??? ESCAPED can be empty even though NONLOCAL
7994 always escaped. */
7995 pt->nonlocal = 1;
7996 pt->ipa_escaped = 1;
7998 else
8000 /* If there is nothing special about this call then
8001 we have made everything that is used also escape. */
8002 *pt = ipa_escaped_pt;
8003 pt->nonlocal = 1;
8006 pt = gimple_call_clobber_set (stmt);
8007 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
8008 memset (pt, 0, sizeof (struct pt_solution));
8009 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
8011 *pt = find_what_var_points_to (node->decl, vi);
8012 /* Escaped (and thus nonlocal) variables are always
8013 implicitly clobbered by calls. */
8014 /* ??? ESCAPED can be empty even though NONLOCAL
8015 always escaped. */
8016 pt->nonlocal = 1;
8017 pt->ipa_escaped = 1;
8019 else
8021 /* If there is nothing special about this call then
8022 we have made everything that is used also escape. */
8023 *pt = ipa_escaped_pt;
8024 pt->nonlocal = 1;
8027 /* Handle indirect calls. */
8028 else if (!decl
8029 && (fi = get_fi_for_callee (stmt)))
8031 /* We need to accumulate all clobbers/uses of all possible
8032 callees. */
8033 fi = get_varinfo (find (fi->id));
8034 /* If we cannot constrain the set of functions we'll end up
8035 calling we end up using/clobbering everything. */
8036 if (bitmap_bit_p (fi->solution, anything_id)
8037 || bitmap_bit_p (fi->solution, nonlocal_id)
8038 || bitmap_bit_p (fi->solution, escaped_id))
8040 pt_solution_reset (gimple_call_clobber_set (stmt));
8041 pt_solution_reset (gimple_call_use_set (stmt));
8043 else
8045 bitmap_iterator bi;
8046 unsigned i;
8047 struct pt_solution *uses, *clobbers;
8049 uses = gimple_call_use_set (stmt);
8050 clobbers = gimple_call_clobber_set (stmt);
8051 memset (uses, 0, sizeof (struct pt_solution));
8052 memset (clobbers, 0, sizeof (struct pt_solution));
8053 EXECUTE_IF_SET_IN_BITMAP (fi->solution, 0, i, bi)
8055 struct pt_solution sol;
8057 vi = get_varinfo (i);
8058 if (!vi->is_fn_info)
8060 /* ??? We could be more precise here? */
8061 uses->nonlocal = 1;
8062 uses->ipa_escaped = 1;
8063 clobbers->nonlocal = 1;
8064 clobbers->ipa_escaped = 1;
8065 continue;
8068 if (!uses->anything)
8070 sol = find_what_var_points_to
8071 (node->decl,
8072 first_vi_for_offset (vi, fi_uses));
8073 pt_solution_ior_into (uses, &sol);
8075 if (!clobbers->anything)
8077 sol = find_what_var_points_to
8078 (node->decl,
8079 first_vi_for_offset (vi, fi_clobbers));
8080 pt_solution_ior_into (clobbers, &sol);
8088 fn->gimple_df->ipa_pta = true;
8090 /* We have to re-set the final-solution cache after each function
8091 because what is a "global" is dependent on function context. */
8092 final_solutions->empty ();
8093 obstack_free (&final_solutions_obstack, NULL);
8094 gcc_obstack_init (&final_solutions_obstack);
8097 delete_points_to_sets ();
8099 in_ipa_mode = 0;
8101 return 0;
8104 namespace {
8106 const pass_data pass_data_ipa_pta =
8108 SIMPLE_IPA_PASS, /* type */
8109 "pta", /* name */
8110 OPTGROUP_NONE, /* optinfo_flags */
8111 TV_IPA_PTA, /* tv_id */
8112 0, /* properties_required */
8113 0, /* properties_provided */
8114 0, /* properties_destroyed */
8115 0, /* todo_flags_start */
8116 0, /* todo_flags_finish */
8119 class pass_ipa_pta : public simple_ipa_opt_pass
8121 public:
8122 pass_ipa_pta (gcc::context *ctxt)
8123 : simple_ipa_opt_pass (pass_data_ipa_pta, ctxt)
8126 /* opt_pass methods: */
8127 virtual bool gate (function *)
8129 return (optimize
8130 && flag_ipa_pta
8131 /* Don't bother doing anything if the program has errors. */
8132 && !seen_error ());
8135 opt_pass * clone () { return new pass_ipa_pta (m_ctxt); }
8137 virtual unsigned int execute (function *) { return ipa_pta_execute (); }
8139 }; // class pass_ipa_pta
8141 } // anon namespace
8143 simple_ipa_opt_pass *
8144 make_pass_ipa_pta (gcc::context *ctxt)
8146 return new pass_ipa_pta (ctxt);