arm: avoid indirect sibcalls when IP is live [PR116597]
[official-gcc.git] / gcc / tree-ssa-structalias.cc
bloba32ef1d5cc0c95fe81821620d6a18d128abe0a56
1 /* Tree based points-to analysis
2 Copyright (C) 2005-2024 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "tree-pass.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "tree-pretty-print.h"
33 #include "diagnostic-core.h"
34 #include "fold-const.h"
35 #include "stor-layout.h"
36 #include "stmt.h"
37 #include "gimple-iterator.h"
38 #include "tree-into-ssa.h"
39 #include "tree-dfa.h"
40 #include "gimple-walk.h"
41 #include "varasm.h"
42 #include "stringpool.h"
43 #include "attribs.h"
44 #include "tree-ssa.h"
45 #include "tree-cfg.h"
46 #include "gimple-range.h"
47 #include "ipa-modref-tree.h"
48 #include "ipa-modref.h"
49 #include "attr-fnspec.h"
51 /* The idea behind this analyzer is to generate set constraints from the
52 program, then solve the resulting constraints in order to generate the
53 points-to sets.
55 Set constraints are a way of modeling program analysis problems that
56 involve sets. They consist of an inclusion constraint language,
57 describing the variables (each variable is a set) and operations that
58 are involved on the variables, and a set of rules that derive facts
59 from these operations. To solve a system of set constraints, you derive
60 all possible facts under the rules, which gives you the correct sets
61 as a consequence.
63 See "Efficient Field-sensitive pointer analysis for C" by "David
64 J. Pearce and Paul H. J. Kelly and Chris Hankin", at
65 http://citeseer.ist.psu.edu/pearce04efficient.html
67 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
68 of C Code in a Second" by "Nevin Heintze and Olivier Tardieu" at
69 http://citeseer.ist.psu.edu/heintze01ultrafast.html
71 There are three types of real constraint expressions, DEREF,
72 ADDRESSOF, and SCALAR. Each constraint expression consists
73 of a constraint type, a variable, and an offset.
75 SCALAR is a constraint expression type used to represent x, whether
76 it appears on the LHS or the RHS of a statement.
77 DEREF is a constraint expression type used to represent *x, whether
78 it appears on the LHS or the RHS of a statement.
79 ADDRESSOF is a constraint expression used to represent &x, whether
80 it appears on the LHS or the RHS of a statement.
82 Each pointer variable in the program is assigned an integer id, and
83 each field of a structure variable is assigned an integer id as well.
85 Structure variables are linked to their list of fields through a "next
86 field" in each variable that points to the next field in offset
87 order.
88 Each variable for a structure field has
90 1. "size", that tells the size in bits of that field.
91 2. "fullsize", that tells the size in bits of the entire structure.
92 3. "offset", that tells the offset in bits from the beginning of the
93 structure to this field.
95 Thus,
96 struct f
98 int a;
99 int b;
100 } foo;
101 int *bar;
103 looks like
105 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
106 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
107 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
110 In order to solve the system of set constraints, the following is
111 done:
113 1. Each constraint variable x has a solution set associated with it,
114 Sol(x).
116 2. Constraints are separated into direct, copy, and complex.
117 Direct constraints are ADDRESSOF constraints that require no extra
118 processing, such as P = &Q
119 Copy constraints are those of the form P = Q.
120 Complex constraints are all the constraints involving dereferences
121 and offsets (including offsetted copies).
123 3. All direct constraints of the form P = &Q are processed, such
124 that Q is added to Sol(P)
126 4. All complex constraints for a given constraint variable are stored in a
127 linked list attached to that variable's node.
129 5. A directed graph is built out of the copy constraints. Each
130 constraint variable is a node in the graph, and an edge from
131 Q to P is added for each copy constraint of the form P = Q
133 6. The graph is then walked, and solution sets are
134 propagated along the copy edges, such that an edge from Q to P
135 causes Sol(P) <- Sol(P) union Sol(Q).
137 7. As we visit each node, all complex constraints associated with
138 that node are processed by adding appropriate copy edges to the graph, or the
139 appropriate variables to the solution set.
141 8. The process of walking the graph is iterated until no solution
142 sets change.
144 Prior to walking the graph in steps 6 and 7, We perform static
145 cycle elimination on the constraint graph, as well
146 as off-line variable substitution.
148 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
149 on and turned into anything), but isn't. You can just see what offset
150 inside the pointed-to struct it's going to access.
152 TODO: Constant bounded arrays can be handled as if they were structs of the
153 same number of elements.
155 TODO: Modeling heap and incoming pointers becomes much better if we
156 add fields to them as we discover them, which we could do.
158 TODO: We could handle unions, but to be honest, it's probably not
159 worth the pain or slowdown. */
161 /* IPA-PTA optimizations possible.
163 When the indirect function called is ANYTHING we can add disambiguation
164 based on the function signatures (or simply the parameter count which
165 is the varinfo size). We also do not need to consider functions that
166 do not have their address taken.
168 The is_global_var bit which marks escape points is overly conservative
169 in IPA mode. Split it to is_escape_point and is_global_var - only
170 externally visible globals are escape points in IPA mode.
171 There is now is_ipa_escape_point but this is only used in a few
172 selected places.
174 The way we introduce DECL_PT_UID to avoid fixing up all points-to
175 sets in the translation unit when we copy a DECL during inlining
176 pessimizes precision. The advantage is that the DECL_PT_UID keeps
177 compile-time and memory usage overhead low - the points-to sets
178 do not grow or get unshared as they would during a fixup phase.
179 An alternative solution is to delay IPA PTA until after all
180 inlining transformations have been applied.
182 The way we propagate clobber/use information isn't optimized.
183 It should use a new complex constraint that properly filters
184 out local variables of the callee (though that would make
185 the sets invalid after inlining). OTOH we might as well
186 admit defeat to WHOPR and simply do all the clobber/use analysis
187 and propagation after PTA finished but before we threw away
188 points-to information for memory variables. WHOPR and PTA
189 do not play along well anyway - the whole constraint solving
190 would need to be done in WPA phase and it will be very interesting
191 to apply the results to local SSA names during LTRANS phase.
193 We probably should compute a per-function unit-ESCAPE solution
194 propagating it simply like the clobber / uses solutions. The
195 solution can go alongside the non-IPA escaped solution and be
196 used to query which vars escape the unit through a function.
197 This is also required to make the escaped-HEAP trick work in IPA mode.
199 We never put function decls in points-to sets so we do not
200 keep the set of called functions for indirect calls.
202 And probably more. */
204 static bool use_field_sensitive = true;
205 static int in_ipa_mode = 0;
207 /* Used for predecessor bitmaps. */
208 static bitmap_obstack predbitmap_obstack;
210 /* Used for points-to sets. */
211 static bitmap_obstack pta_obstack;
213 /* Used for oldsolution members of variables. */
214 static bitmap_obstack oldpta_obstack;
216 /* Used for per-solver-iteration bitmaps. */
217 static bitmap_obstack iteration_obstack;
219 static unsigned int create_variable_info_for (tree, const char *, bool);
220 typedef struct constraint_graph *constraint_graph_t;
221 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
223 struct constraint;
224 typedef struct constraint *constraint_t;
227 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
228 if (a) \
229 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
231 static struct constraint_stats
233 unsigned int total_vars;
234 unsigned int nonpointer_vars;
235 unsigned int unified_vars_static;
236 unsigned int unified_vars_dynamic;
237 unsigned int iterations;
238 unsigned int num_edges;
239 unsigned int num_implicit_edges;
240 unsigned int num_avoided_edges;
241 unsigned int points_to_sets_created;
242 } stats;
244 struct variable_info
246 /* ID of this variable */
247 unsigned int id;
249 /* True if this is a variable created by the constraint analysis, such as
250 heap variables and constraints we had to break up. */
251 unsigned int is_artificial_var : 1;
253 /* True if this is a special variable whose solution set should not be
254 changed. */
255 unsigned int is_special_var : 1;
257 /* True for variables whose size is not known or variable. */
258 unsigned int is_unknown_size_var : 1;
260 /* True for (sub-)fields that represent a whole variable. */
261 unsigned int is_full_var : 1;
263 /* True if this is a heap variable. */
264 unsigned int is_heap_var : 1;
266 /* True if this is a register variable. */
267 unsigned int is_reg_var : 1;
269 /* True if this field may contain pointers. */
270 unsigned int may_have_pointers : 1;
272 /* True if this field has only restrict qualified pointers. */
273 unsigned int only_restrict_pointers : 1;
275 /* True if this represents a heap var created for a restrict qualified
276 pointer. */
277 unsigned int is_restrict_var : 1;
279 /* True if this represents a global variable. */
280 unsigned int is_global_var : 1;
282 /* True if this represents a module escape point for IPA analysis. */
283 unsigned int is_ipa_escape_point : 1;
285 /* True if this represents a IPA function info. */
286 unsigned int is_fn_info : 1;
288 /* True if this appears as RHS in a ADDRESSOF constraint. */
289 unsigned int address_taken : 1;
291 /* ??? Store somewhere better. */
292 unsigned short ruid;
294 /* The ID of the variable for the next field in this structure
295 or zero for the last field in this structure. */
296 unsigned next;
298 /* The ID of the variable for the first field in this structure. */
299 unsigned head;
301 /* Offset of this variable, in bits, from the base variable */
302 unsigned HOST_WIDE_INT offset;
304 /* Size of the variable, in bits. */
305 unsigned HOST_WIDE_INT size;
307 /* Full size of the base variable, in bits. */
308 unsigned HOST_WIDE_INT fullsize;
310 /* In IPA mode the shadow UID in case the variable needs to be duplicated in
311 the final points-to solution because it reaches its containing
312 function recursively. Zero if none is needed. */
313 unsigned int shadow_var_uid;
315 /* Name of this variable */
316 const char *name;
318 /* Tree that this variable is associated with. */
319 tree decl;
321 /* Points-to set for this variable. */
322 bitmap solution;
324 /* Old points-to set for this variable. */
325 bitmap oldsolution;
327 typedef struct variable_info *varinfo_t;
329 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
330 static varinfo_t first_or_preceding_vi_for_offset (varinfo_t,
331 unsigned HOST_WIDE_INT);
332 static varinfo_t lookup_vi_for_tree (tree);
333 static inline bool type_can_have_subvars (const_tree);
334 static void make_param_constraints (varinfo_t);
336 /* Pool of variable info structures. */
337 static object_allocator<variable_info> variable_info_pool
338 ("Variable info pool");
340 /* Map varinfo to final pt_solution. */
341 static hash_map<varinfo_t, pt_solution *> *final_solutions;
342 struct obstack final_solutions_obstack;
344 /* Table of variable info structures for constraint variables.
345 Indexed directly by variable info id. */
346 static vec<varinfo_t> varmap;
348 /* Return the varmap element N */
350 static inline varinfo_t
351 get_varinfo (unsigned int n)
353 return varmap[n];
356 /* Return the next variable in the list of sub-variables of VI
357 or NULL if VI is the last sub-variable. */
359 static inline varinfo_t
360 vi_next (varinfo_t vi)
362 return get_varinfo (vi->next);
365 /* Static IDs for the special variables. Variable ID zero is unused
366 and used as terminator for the sub-variable chain. */
367 enum { nothing_id = 1, anything_id = 2, string_id = 3,
368 escaped_id = 4, nonlocal_id = 5, escaped_return_id = 6,
369 storedanything_id = 7, integer_id = 8 };
371 /* Return a new variable info structure consisting for a variable
372 named NAME, and using constraint graph node NODE. Append it
373 to the vector of variable info structures. */
375 static varinfo_t
376 new_var_info (tree t, const char *name, bool add_id)
378 unsigned index = varmap.length ();
379 varinfo_t ret = variable_info_pool.allocate ();
381 if (dump_file && add_id)
383 char *tempname = xasprintf ("%s(%d)", name, index);
384 name = ggc_strdup (tempname);
385 free (tempname);
388 ret->id = index;
389 ret->name = name;
390 ret->decl = t;
391 /* Vars without decl are artificial and do not have sub-variables. */
392 ret->is_artificial_var = (t == NULL_TREE);
393 ret->is_special_var = false;
394 ret->is_unknown_size_var = false;
395 ret->is_full_var = (t == NULL_TREE);
396 ret->is_heap_var = false;
397 ret->may_have_pointers = true;
398 ret->only_restrict_pointers = false;
399 ret->is_restrict_var = false;
400 ret->ruid = 0;
401 ret->is_global_var = (t == NULL_TREE);
402 ret->is_ipa_escape_point = false;
403 ret->is_fn_info = false;
404 ret->address_taken = false;
405 if (t && DECL_P (t))
406 ret->is_global_var = (is_global_var (t)
407 /* We have to treat even local register variables
408 as escape points. */
409 || (VAR_P (t) && DECL_HARD_REGISTER (t)));
410 ret->is_reg_var = (t && TREE_CODE (t) == SSA_NAME);
411 ret->solution = BITMAP_ALLOC (&pta_obstack);
412 ret->oldsolution = NULL;
413 ret->next = 0;
414 ret->shadow_var_uid = 0;
415 ret->head = ret->id;
417 stats.total_vars++;
419 varmap.safe_push (ret);
421 return ret;
424 /* A map mapping call statements to per-stmt variables for uses
425 and clobbers specific to the call. */
426 static hash_map<gimple *, varinfo_t> *call_stmt_vars;
428 /* Lookup or create the variable for the call statement CALL. */
430 static varinfo_t
431 get_call_vi (gcall *call)
433 varinfo_t vi, vi2;
435 bool existed;
436 varinfo_t *slot_p = &call_stmt_vars->get_or_insert (call, &existed);
437 if (existed)
438 return *slot_p;
440 vi = new_var_info (NULL_TREE, "CALLUSED", true);
441 vi->offset = 0;
442 vi->size = 1;
443 vi->fullsize = 2;
444 vi->is_full_var = true;
445 vi->is_reg_var = true;
447 vi2 = new_var_info (NULL_TREE, "CALLCLOBBERED", true);
448 vi2->offset = 1;
449 vi2->size = 1;
450 vi2->fullsize = 2;
451 vi2->is_full_var = true;
452 vi2->is_reg_var = true;
454 vi->next = vi2->id;
456 *slot_p = vi;
457 return vi;
460 /* Lookup the variable for the call statement CALL representing
461 the uses. Returns NULL if there is nothing special about this call. */
463 static varinfo_t
464 lookup_call_use_vi (gcall *call)
466 varinfo_t *slot_p = call_stmt_vars->get (call);
467 if (slot_p)
468 return *slot_p;
470 return NULL;
473 /* Lookup the variable for the call statement CALL representing
474 the clobbers. Returns NULL if there is nothing special about this call. */
476 static varinfo_t
477 lookup_call_clobber_vi (gcall *call)
479 varinfo_t uses = lookup_call_use_vi (call);
480 if (!uses)
481 return NULL;
483 return vi_next (uses);
486 /* Lookup or create the variable for the call statement CALL representing
487 the uses. */
489 static varinfo_t
490 get_call_use_vi (gcall *call)
492 return get_call_vi (call);
495 /* Lookup or create the variable for the call statement CALL representing
496 the clobbers. */
498 static varinfo_t ATTRIBUTE_UNUSED
499 get_call_clobber_vi (gcall *call)
501 return vi_next (get_call_vi (call));
505 enum constraint_expr_type {SCALAR, DEREF, ADDRESSOF};
507 /* An expression that appears in a constraint. */
509 struct constraint_expr
511 /* Constraint type. */
512 constraint_expr_type type;
514 /* Variable we are referring to in the constraint. */
515 unsigned int var;
517 /* Offset, in bits, of this constraint from the beginning of
518 variables it ends up referring to.
520 IOW, in a deref constraint, we would deref, get the result set,
521 then add OFFSET to each member. */
522 HOST_WIDE_INT offset;
525 /* Use 0x8000... as special unknown offset. */
526 #define UNKNOWN_OFFSET HOST_WIDE_INT_MIN
528 typedef struct constraint_expr ce_s;
529 static void get_constraint_for_1 (tree, vec<ce_s> *, bool, bool);
530 static void get_constraint_for (tree, vec<ce_s> *);
531 static void get_constraint_for_rhs (tree, vec<ce_s> *);
532 static void do_deref (vec<ce_s> *);
534 /* Our set constraints are made up of two constraint expressions, one
535 LHS, and one RHS.
537 As described in the introduction, our set constraints each represent an
538 operation between set valued variables.
540 struct constraint
542 struct constraint_expr lhs;
543 struct constraint_expr rhs;
546 /* List of constraints that we use to build the constraint graph from. */
548 static vec<constraint_t> constraints;
549 static object_allocator<constraint> constraint_pool ("Constraint pool");
551 /* The constraint graph is represented as an array of bitmaps
552 containing successor nodes. */
554 struct constraint_graph
556 /* Size of this graph, which may be different than the number of
557 nodes in the variable map. */
558 unsigned int size;
560 /* Explicit successors of each node. */
561 bitmap *succs;
563 /* Implicit predecessors of each node (Used for variable
564 substitution). */
565 bitmap *implicit_preds;
567 /* Explicit predecessors of each node (Used for variable substitution). */
568 bitmap *preds;
570 /* Indirect cycle representatives, or -1 if the node has no indirect
571 cycles. */
572 int *indirect_cycles;
574 /* Representative node for a node. rep[a] == a unless the node has
575 been unified. */
576 unsigned int *rep;
578 /* Equivalence class representative for a label. This is used for
579 variable substitution. */
580 int *eq_rep;
582 /* Pointer equivalence label for a node. All nodes with the same
583 pointer equivalence label can be unified together at some point
584 (either during constraint optimization or after the constraint
585 graph is built). */
586 unsigned int *pe;
588 /* Pointer equivalence representative for a label. This is used to
589 handle nodes that are pointer equivalent but not location
590 equivalent. We can unite these once the addressof constraints
591 are transformed into initial points-to sets. */
592 int *pe_rep;
594 /* Pointer equivalence label for each node, used during variable
595 substitution. */
596 unsigned int *pointer_label;
598 /* Location equivalence label for each node, used during location
599 equivalence finding. */
600 unsigned int *loc_label;
602 /* Pointed-by set for each node, used during location equivalence
603 finding. This is pointed-by rather than pointed-to, because it
604 is constructed using the predecessor graph. */
605 bitmap *pointed_by;
607 /* Points to sets for pointer equivalence. This is *not* the actual
608 points-to sets for nodes. */
609 bitmap *points_to;
611 /* Bitmap of nodes where the bit is set if the node is a direct
612 node. Used for variable substitution. */
613 sbitmap direct_nodes;
615 /* Bitmap of nodes where the bit is set if the node is address
616 taken. Used for variable substitution. */
617 bitmap address_taken;
619 /* Vector of complex constraints for each graph node. Complex
620 constraints are those involving dereferences or offsets that are
621 not 0. */
622 vec<constraint_t> *complex;
625 static constraint_graph_t graph;
627 /* During variable substitution and the offline version of indirect
628 cycle finding, we create nodes to represent dereferences and
629 address taken constraints. These represent where these start and
630 end. */
631 #define FIRST_REF_NODE (varmap).length ()
632 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
634 /* Return the representative node for NODE, if NODE has been unioned
635 with another NODE.
636 This function performs path compression along the way to finding
637 the representative. */
639 static unsigned int
640 find (unsigned int node)
642 gcc_checking_assert (node < graph->size);
643 if (graph->rep[node] != node)
644 return graph->rep[node] = find (graph->rep[node]);
645 return node;
648 /* Union the TO and FROM nodes to the TO nodes.
649 Note that at some point in the future, we may want to do
650 union-by-rank, in which case we are going to have to return the
651 node we unified to. */
653 static bool
654 unite (unsigned int to, unsigned int from)
656 gcc_checking_assert (to < graph->size && from < graph->size);
657 if (to != from && graph->rep[from] != to)
659 graph->rep[from] = to;
660 return true;
662 return false;
665 /* Create a new constraint consisting of LHS and RHS expressions. */
667 static constraint_t
668 new_constraint (const struct constraint_expr lhs,
669 const struct constraint_expr rhs)
671 constraint_t ret = constraint_pool.allocate ();
672 ret->lhs = lhs;
673 ret->rhs = rhs;
674 return ret;
677 /* Print out constraint C to FILE. */
679 static void
680 dump_constraint (FILE *file, constraint_t c)
682 if (c->lhs.type == ADDRESSOF)
683 fprintf (file, "&");
684 else if (c->lhs.type == DEREF)
685 fprintf (file, "*");
686 if (dump_file)
687 fprintf (file, "%s", get_varinfo (c->lhs.var)->name);
688 else
689 fprintf (file, "V%d", c->lhs.var);
690 if (c->lhs.offset == UNKNOWN_OFFSET)
691 fprintf (file, " + UNKNOWN");
692 else if (c->lhs.offset != 0)
693 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
694 fprintf (file, " = ");
695 if (c->rhs.type == ADDRESSOF)
696 fprintf (file, "&");
697 else if (c->rhs.type == DEREF)
698 fprintf (file, "*");
699 if (dump_file)
700 fprintf (file, "%s", get_varinfo (c->rhs.var)->name);
701 else
702 fprintf (file, "V%d", c->rhs.var);
703 if (c->rhs.offset == UNKNOWN_OFFSET)
704 fprintf (file, " + UNKNOWN");
705 else if (c->rhs.offset != 0)
706 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
710 void debug_constraint (constraint_t);
711 void debug_constraints (void);
712 void debug_constraint_graph (void);
713 void debug_solution_for_var (unsigned int);
714 void debug_sa_points_to_info (void);
715 void debug_varinfo (varinfo_t);
716 void debug_varmap (void);
718 /* Print out constraint C to stderr. */
720 DEBUG_FUNCTION void
721 debug_constraint (constraint_t c)
723 dump_constraint (stderr, c);
724 fprintf (stderr, "\n");
727 /* Print out all constraints to FILE */
729 static void
730 dump_constraints (FILE *file, int from)
732 int i;
733 constraint_t c;
734 for (i = from; constraints.iterate (i, &c); i++)
735 if (c)
737 dump_constraint (file, c);
738 fprintf (file, "\n");
742 /* Print out all constraints to stderr. */
744 DEBUG_FUNCTION void
745 debug_constraints (void)
747 dump_constraints (stderr, 0);
750 /* Print the constraint graph in dot format. */
752 static void
753 dump_constraint_graph (FILE *file)
755 unsigned int i;
757 /* Only print the graph if it has already been initialized: */
758 if (!graph)
759 return;
761 /* Prints the header of the dot file: */
762 fprintf (file, "strict digraph {\n");
763 fprintf (file, " node [\n shape = box\n ]\n");
764 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
765 fprintf (file, "\n // List of nodes and complex constraints in "
766 "the constraint graph:\n");
768 /* The next lines print the nodes in the graph together with the
769 complex constraints attached to them. */
770 for (i = 1; i < graph->size; i++)
772 if (i == FIRST_REF_NODE)
773 continue;
774 if (find (i) != i)
775 continue;
776 if (i < FIRST_REF_NODE)
777 fprintf (file, "\"%s\"", get_varinfo (i)->name);
778 else
779 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
780 if (graph->complex[i].exists ())
782 unsigned j;
783 constraint_t c;
784 fprintf (file, " [label=\"\\N\\n");
785 for (j = 0; graph->complex[i].iterate (j, &c); ++j)
787 dump_constraint (file, c);
788 fprintf (file, "\\l");
790 fprintf (file, "\"]");
792 fprintf (file, ";\n");
795 /* Go over the edges. */
796 fprintf (file, "\n // Edges in the constraint graph:\n");
797 for (i = 1; i < graph->size; i++)
799 unsigned j;
800 bitmap_iterator bi;
801 if (find (i) != i)
802 continue;
803 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
805 unsigned to = find (j);
806 if (i == to)
807 continue;
808 if (i < FIRST_REF_NODE)
809 fprintf (file, "\"%s\"", get_varinfo (i)->name);
810 else
811 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
812 fprintf (file, " -> ");
813 if (to < FIRST_REF_NODE)
814 fprintf (file, "\"%s\"", get_varinfo (to)->name);
815 else
816 fprintf (file, "\"*%s\"", get_varinfo (to - FIRST_REF_NODE)->name);
817 fprintf (file, ";\n");
821 /* Prints the tail of the dot file. */
822 fprintf (file, "}\n");
825 /* Print out the constraint graph to stderr. */
827 DEBUG_FUNCTION void
828 debug_constraint_graph (void)
830 dump_constraint_graph (stderr);
833 /* SOLVER FUNCTIONS
835 The solver is a simple worklist solver, that works on the following
836 algorithm:
838 sbitmap changed_nodes = all zeroes;
839 changed_count = 0;
840 For each node that is not already collapsed:
841 changed_count++;
842 set bit in changed nodes
844 while (changed_count > 0)
846 compute topological ordering for constraint graph
848 find and collapse cycles in the constraint graph (updating
849 changed if necessary)
851 for each node (n) in the graph in topological order:
852 changed_count--;
854 Process each complex constraint associated with the node,
855 updating changed if necessary.
857 For each outgoing edge from n, propagate the solution from n to
858 the destination of the edge, updating changed as necessary.
860 } */
862 /* Return true if two constraint expressions A and B are equal. */
864 static bool
865 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
867 return a.type == b.type && a.var == b.var && a.offset == b.offset;
870 /* Return true if constraint expression A is less than constraint expression
871 B. This is just arbitrary, but consistent, in order to give them an
872 ordering. */
874 static bool
875 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
877 if (a.type == b.type)
879 if (a.var == b.var)
880 return a.offset < b.offset;
881 else
882 return a.var < b.var;
884 else
885 return a.type < b.type;
888 /* Return true if constraint A is less than constraint B. This is just
889 arbitrary, but consistent, in order to give them an ordering. */
891 static bool
892 constraint_less (const constraint_t &a, const constraint_t &b)
894 if (constraint_expr_less (a->lhs, b->lhs))
895 return true;
896 else if (constraint_expr_less (b->lhs, a->lhs))
897 return false;
898 else
899 return constraint_expr_less (a->rhs, b->rhs);
902 /* Return true if two constraints A and B are equal. */
904 static bool
905 constraint_equal (const constraint &a, const constraint &b)
907 return constraint_expr_equal (a.lhs, b.lhs)
908 && constraint_expr_equal (a.rhs, b.rhs);
912 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
914 static constraint_t
915 constraint_vec_find (vec<constraint_t> vec,
916 constraint &lookfor)
918 unsigned int place;
919 constraint_t found;
921 if (!vec.exists ())
922 return NULL;
924 place = vec.lower_bound (&lookfor, constraint_less);
925 if (place >= vec.length ())
926 return NULL;
927 found = vec[place];
928 if (!constraint_equal (*found, lookfor))
929 return NULL;
930 return found;
933 /* Union two constraint vectors, TO and FROM. Put the result in TO.
934 Returns true of TO set is changed. */
936 static bool
937 constraint_set_union (vec<constraint_t> *to,
938 vec<constraint_t> *from)
940 int i;
941 constraint_t c;
942 bool any_change = false;
944 FOR_EACH_VEC_ELT (*from, i, c)
946 if (constraint_vec_find (*to, *c) == NULL)
948 unsigned int place = to->lower_bound (c, constraint_less);
949 to->safe_insert (place, c);
950 any_change = true;
953 return any_change;
956 /* Expands the solution in SET to all sub-fields of variables included. */
958 static bitmap
959 solution_set_expand (bitmap set, bitmap *expanded)
961 bitmap_iterator bi;
962 unsigned j;
964 if (*expanded)
965 return *expanded;
967 *expanded = BITMAP_ALLOC (&iteration_obstack);
969 /* In a first pass expand variables, once for each head to avoid
970 quadratic behavior, to include all sub-fields. */
971 unsigned prev_head = 0;
972 EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi)
974 varinfo_t v = get_varinfo (j);
975 if (v->is_artificial_var
976 || v->is_full_var)
977 continue;
978 if (v->head != prev_head)
980 varinfo_t head = get_varinfo (v->head);
981 unsigned num = 1;
982 for (varinfo_t n = vi_next (head); n != NULL; n = vi_next (n))
984 if (n->id != head->id + num)
986 /* Usually sub variables are adjacent but since we
987 create pointed-to restrict representatives there
988 can be gaps as well. */
989 bitmap_set_range (*expanded, head->id, num);
990 head = n;
991 num = 1;
993 else
994 num++;
997 bitmap_set_range (*expanded, head->id, num);
998 prev_head = v->head;
1002 /* And finally set the rest of the bits from SET in an efficient way. */
1003 bitmap_ior_into (*expanded, set);
1005 return *expanded;
1008 /* Union solution sets TO and DELTA, and add INC to each member of DELTA in the
1009 process. */
1011 static bool
1012 set_union_with_increment (bitmap to, bitmap delta, HOST_WIDE_INT inc,
1013 bitmap *expanded_delta)
1015 bool changed = false;
1016 bitmap_iterator bi;
1017 unsigned int i;
1019 /* If the solution of DELTA contains anything it is good enough to transfer
1020 this to TO. */
1021 if (bitmap_bit_p (delta, anything_id))
1022 return bitmap_set_bit (to, anything_id);
1024 /* If the offset is unknown we have to expand the solution to
1025 all subfields. */
1026 if (inc == UNKNOWN_OFFSET)
1028 delta = solution_set_expand (delta, expanded_delta);
1029 changed |= bitmap_ior_into (to, delta);
1030 return changed;
1033 /* For non-zero offset union the offsetted solution into the destination. */
1034 EXECUTE_IF_SET_IN_BITMAP (delta, 0, i, bi)
1036 varinfo_t vi = get_varinfo (i);
1038 /* If this is a variable with just one field just set its bit
1039 in the result. */
1040 if (vi->is_artificial_var
1041 || vi->is_unknown_size_var
1042 || vi->is_full_var)
1043 changed |= bitmap_set_bit (to, i);
1044 else
1046 HOST_WIDE_INT fieldoffset = vi->offset + inc;
1047 unsigned HOST_WIDE_INT size = vi->size;
1049 /* If the offset makes the pointer point to before the
1050 variable use offset zero for the field lookup. */
1051 if (fieldoffset < 0)
1052 vi = get_varinfo (vi->head);
1053 else
1054 vi = first_or_preceding_vi_for_offset (vi, fieldoffset);
1058 changed |= bitmap_set_bit (to, vi->id);
1059 if (vi->is_full_var
1060 || vi->next == 0)
1061 break;
1063 /* We have to include all fields that overlap the current field
1064 shifted by inc. */
1065 vi = vi_next (vi);
1067 while (vi->offset < fieldoffset + size);
1071 return changed;
1074 /* Insert constraint C into the list of complex constraints for graph
1075 node VAR. */
1077 static void
1078 insert_into_complex (constraint_graph_t graph,
1079 unsigned int var, constraint_t c)
1081 vec<constraint_t> complex = graph->complex[var];
1082 unsigned int place = complex.lower_bound (c, constraint_less);
1084 /* Only insert constraints that do not already exist. */
1085 if (place >= complex.length ()
1086 || !constraint_equal (*c, *complex[place]))
1087 graph->complex[var].safe_insert (place, c);
1091 /* Condense two variable nodes into a single variable node, by moving
1092 all associated info from FROM to TO. Returns true if TO node's
1093 constraint set changes after the merge. */
1095 static bool
1096 merge_node_constraints (constraint_graph_t graph, unsigned int to,
1097 unsigned int from)
1099 unsigned int i;
1100 constraint_t c;
1101 bool any_change = false;
1103 gcc_checking_assert (find (from) == to);
1105 /* Move all complex constraints from src node into to node */
1106 FOR_EACH_VEC_ELT (graph->complex[from], i, c)
1108 /* In complex constraints for node FROM, we may have either
1109 a = *FROM, and *FROM = a, or an offseted constraint which are
1110 always added to the rhs node's constraints. */
1112 if (c->rhs.type == DEREF)
1113 c->rhs.var = to;
1114 else if (c->lhs.type == DEREF)
1115 c->lhs.var = to;
1116 else
1117 c->rhs.var = to;
1120 any_change = constraint_set_union (&graph->complex[to],
1121 &graph->complex[from]);
1122 graph->complex[from].release ();
1123 return any_change;
1127 /* Remove edges involving NODE from GRAPH. */
1129 static void
1130 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
1132 if (graph->succs[node])
1133 BITMAP_FREE (graph->succs[node]);
1136 /* Merge GRAPH nodes FROM and TO into node TO. */
1138 static void
1139 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
1140 unsigned int from)
1142 if (graph->indirect_cycles[from] != -1)
1144 /* If we have indirect cycles with the from node, and we have
1145 none on the to node, the to node has indirect cycles from the
1146 from node now that they are unified.
1147 If indirect cycles exist on both, unify the nodes that they
1148 are in a cycle with, since we know they are in a cycle with
1149 each other. */
1150 if (graph->indirect_cycles[to] == -1)
1151 graph->indirect_cycles[to] = graph->indirect_cycles[from];
1154 /* Merge all the successor edges. */
1155 if (graph->succs[from])
1157 if (!graph->succs[to])
1158 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
1159 bitmap_ior_into (graph->succs[to],
1160 graph->succs[from]);
1163 clear_edges_for_node (graph, from);
1167 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
1168 it doesn't exist in the graph already. */
1170 static void
1171 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
1172 unsigned int from)
1174 if (to == from)
1175 return;
1177 if (!graph->implicit_preds[to])
1178 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1180 if (bitmap_set_bit (graph->implicit_preds[to], from))
1181 stats.num_implicit_edges++;
1184 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
1185 it doesn't exist in the graph already.
1186 Return false if the edge already existed, true otherwise. */
1188 static void
1189 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
1190 unsigned int from)
1192 if (!graph->preds[to])
1193 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1194 bitmap_set_bit (graph->preds[to], from);
1197 /* Add a graph edge to GRAPH, going from FROM to TO if
1198 it doesn't exist in the graph already.
1199 Return false if the edge already existed, true otherwise. */
1201 static bool
1202 add_graph_edge (constraint_graph_t graph, unsigned int to,
1203 unsigned int from)
1205 if (to == from)
1207 return false;
1209 else
1211 bool r = false;
1213 if (!graph->succs[from])
1214 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
1216 /* The graph solving process does not avoid "triangles", thus
1217 there can be multiple paths from a node to another involving
1218 intermediate other nodes. That causes extra copying which is
1219 most difficult to avoid when the intermediate node is ESCAPED
1220 because there are no edges added from ESCAPED. Avoid
1221 adding the direct edge FROM -> TO when we have FROM -> ESCAPED
1222 and TO contains ESCAPED.
1223 ??? Note this is only a heuristic, it does not prevent the
1224 situation from occuring. The heuristic helps PR38474 and
1225 PR99912 significantly. */
1226 if (to < FIRST_REF_NODE
1227 && bitmap_bit_p (graph->succs[from], find (escaped_id))
1228 && bitmap_bit_p (get_varinfo (find (to))->solution, escaped_id))
1230 stats.num_avoided_edges++;
1231 return false;
1234 if (bitmap_set_bit (graph->succs[from], to))
1236 r = true;
1237 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
1238 stats.num_edges++;
1240 return r;
1245 /* Initialize the constraint graph structure to contain SIZE nodes. */
1247 static void
1248 init_graph (unsigned int size)
1250 unsigned int j;
1252 graph = XCNEW (struct constraint_graph);
1253 graph->size = size;
1254 graph->succs = XCNEWVEC (bitmap, graph->size);
1255 graph->indirect_cycles = XNEWVEC (int, graph->size);
1256 graph->rep = XNEWVEC (unsigned int, graph->size);
1257 /* ??? Macros do not support template types with multiple arguments,
1258 so we use a typedef to work around it. */
1259 typedef vec<constraint_t> vec_constraint_t_heap;
1260 graph->complex = XCNEWVEC (vec_constraint_t_heap, size);
1261 graph->pe = XCNEWVEC (unsigned int, graph->size);
1262 graph->pe_rep = XNEWVEC (int, graph->size);
1264 for (j = 0; j < graph->size; j++)
1266 graph->rep[j] = j;
1267 graph->pe_rep[j] = -1;
1268 graph->indirect_cycles[j] = -1;
1272 /* Build the constraint graph, adding only predecessor edges right now. */
1274 static void
1275 build_pred_graph (void)
1277 int i;
1278 constraint_t c;
1279 unsigned int j;
1281 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
1282 graph->preds = XCNEWVEC (bitmap, graph->size);
1283 graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
1284 graph->loc_label = XCNEWVEC (unsigned int, graph->size);
1285 graph->pointed_by = XCNEWVEC (bitmap, graph->size);
1286 graph->points_to = XCNEWVEC (bitmap, graph->size);
1287 graph->eq_rep = XNEWVEC (int, graph->size);
1288 graph->direct_nodes = sbitmap_alloc (graph->size);
1289 graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1290 bitmap_clear (graph->direct_nodes);
1292 for (j = 1; j < FIRST_REF_NODE; j++)
1294 if (!get_varinfo (j)->is_special_var)
1295 bitmap_set_bit (graph->direct_nodes, j);
1298 for (j = 0; j < graph->size; j++)
1299 graph->eq_rep[j] = -1;
1301 for (j = 0; j < varmap.length (); j++)
1302 graph->indirect_cycles[j] = -1;
1304 FOR_EACH_VEC_ELT (constraints, i, c)
1306 struct constraint_expr lhs = c->lhs;
1307 struct constraint_expr rhs = c->rhs;
1308 unsigned int lhsvar = lhs.var;
1309 unsigned int rhsvar = rhs.var;
1311 if (lhs.type == DEREF)
1313 /* *x = y. */
1314 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1316 if (lhs.var == anything_id)
1317 add_pred_graph_edge (graph, storedanything_id, rhsvar);
1318 else
1319 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1322 else if (rhs.type == DEREF)
1324 /* x = *y */
1325 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1326 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1327 else
1328 bitmap_clear_bit (graph->direct_nodes, lhsvar);
1330 else if (rhs.type == ADDRESSOF)
1332 varinfo_t v;
1334 /* x = &y */
1335 if (graph->points_to[lhsvar] == NULL)
1336 graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1337 bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1339 if (graph->pointed_by[rhsvar] == NULL)
1340 graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1341 bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1343 /* Implicitly, *x = y */
1344 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1346 /* All related variables are no longer direct nodes. */
1347 bitmap_clear_bit (graph->direct_nodes, rhsvar);
1348 v = get_varinfo (rhsvar);
1349 if (!v->is_full_var)
1351 v = get_varinfo (v->head);
1354 bitmap_clear_bit (graph->direct_nodes, v->id);
1355 v = vi_next (v);
1357 while (v != NULL);
1359 bitmap_set_bit (graph->address_taken, rhsvar);
1361 else if (lhsvar > anything_id
1362 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1364 /* x = y */
1365 add_pred_graph_edge (graph, lhsvar, rhsvar);
1366 /* Implicitly, *x = *y */
1367 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1368 FIRST_REF_NODE + rhsvar);
1370 else if (lhs.offset != 0 || rhs.offset != 0)
1372 if (rhs.offset != 0)
1373 bitmap_clear_bit (graph->direct_nodes, lhs.var);
1374 else if (lhs.offset != 0)
1375 bitmap_clear_bit (graph->direct_nodes, rhs.var);
1380 /* Build the constraint graph, adding successor edges. */
1382 static void
1383 build_succ_graph (void)
1385 unsigned i, t;
1386 constraint_t c;
1388 FOR_EACH_VEC_ELT (constraints, i, c)
1390 struct constraint_expr lhs;
1391 struct constraint_expr rhs;
1392 unsigned int lhsvar;
1393 unsigned int rhsvar;
1395 if (!c)
1396 continue;
1398 lhs = c->lhs;
1399 rhs = c->rhs;
1400 lhsvar = find (lhs.var);
1401 rhsvar = find (rhs.var);
1403 if (lhs.type == DEREF)
1405 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1407 if (lhs.var == anything_id)
1408 add_graph_edge (graph, storedanything_id, rhsvar);
1409 else
1410 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1413 else if (rhs.type == DEREF)
1415 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1416 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1418 else if (rhs.type == ADDRESSOF)
1420 /* x = &y */
1421 gcc_checking_assert (find (rhs.var) == rhs.var);
1422 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1424 else if (lhsvar > anything_id
1425 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1427 add_graph_edge (graph, lhsvar, rhsvar);
1431 /* Add edges from STOREDANYTHING to all nodes that can receive pointers. */
1432 t = find (storedanything_id);
1433 for (i = integer_id + 1; i < FIRST_REF_NODE; ++i)
1435 if (get_varinfo (i)->may_have_pointers)
1436 add_graph_edge (graph, find (i), t);
1439 /* Everything stored to ANYTHING also potentially escapes. */
1440 add_graph_edge (graph, find (escaped_id), t);
1444 /* Changed variables on the last iteration. */
1445 static bitmap changed;
1447 /* Strongly Connected Component visitation info. */
1449 class scc_info
1451 public:
1452 scc_info (size_t size);
1453 ~scc_info ();
1455 auto_sbitmap visited;
1456 auto_sbitmap deleted;
1457 unsigned int *dfs;
1458 unsigned int *node_mapping;
1459 int current_index;
1460 auto_vec<unsigned> scc_stack;
1464 /* Recursive routine to find strongly connected components in GRAPH.
1465 SI is the SCC info to store the information in, and N is the id of current
1466 graph node we are processing.
1468 This is Tarjan's strongly connected component finding algorithm, as
1469 modified by Nuutila to keep only non-root nodes on the stack.
1470 The algorithm can be found in "On finding the strongly connected
1471 connected components in a directed graph" by Esko Nuutila and Eljas
1472 Soisalon-Soininen, in Information Processing Letters volume 49,
1473 number 1, pages 9-14. */
1475 static void
1476 scc_visit (constraint_graph_t graph, class scc_info *si, unsigned int n)
1478 unsigned int i;
1479 bitmap_iterator bi;
1480 unsigned int my_dfs;
1482 bitmap_set_bit (si->visited, n);
1483 si->dfs[n] = si->current_index ++;
1484 my_dfs = si->dfs[n];
1486 /* Visit all the successors. */
1487 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1489 unsigned int w;
1491 if (i > LAST_REF_NODE)
1492 break;
1494 w = find (i);
1495 if (bitmap_bit_p (si->deleted, w))
1496 continue;
1498 if (!bitmap_bit_p (si->visited, w))
1499 scc_visit (graph, si, w);
1501 unsigned int t = find (w);
1502 gcc_checking_assert (find (n) == n);
1503 if (si->dfs[t] < si->dfs[n])
1504 si->dfs[n] = si->dfs[t];
1507 /* See if any components have been identified. */
1508 if (si->dfs[n] == my_dfs)
1510 if (si->scc_stack.length () > 0
1511 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1513 bitmap scc = BITMAP_ALLOC (NULL);
1514 unsigned int lowest_node;
1515 bitmap_iterator bi;
1517 bitmap_set_bit (scc, n);
1519 while (si->scc_stack.length () != 0
1520 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1522 unsigned int w = si->scc_stack.pop ();
1524 bitmap_set_bit (scc, w);
1527 lowest_node = bitmap_first_set_bit (scc);
1528 gcc_assert (lowest_node < FIRST_REF_NODE);
1530 /* Collapse the SCC nodes into a single node, and mark the
1531 indirect cycles. */
1532 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1534 if (i < FIRST_REF_NODE)
1536 if (unite (lowest_node, i))
1537 unify_nodes (graph, lowest_node, i, false);
1539 else
1541 unite (lowest_node, i);
1542 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1545 bitmap_set_bit (si->deleted, lowest_node);
1547 else
1548 bitmap_set_bit (si->deleted, n);
1550 else
1551 si->scc_stack.safe_push (n);
1554 /* Unify node FROM into node TO, updating the changed count if
1555 necessary when UPDATE_CHANGED is true. */
1557 static void
1558 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1559 bool update_changed)
1561 gcc_checking_assert (to != from && find (to) == to);
1563 if (dump_file && (dump_flags & TDF_DETAILS))
1564 fprintf (dump_file, "Unifying %s to %s\n",
1565 get_varinfo (from)->name,
1566 get_varinfo (to)->name);
1568 if (update_changed)
1569 stats.unified_vars_dynamic++;
1570 else
1571 stats.unified_vars_static++;
1573 merge_graph_nodes (graph, to, from);
1574 if (merge_node_constraints (graph, to, from))
1576 if (update_changed)
1577 bitmap_set_bit (changed, to);
1580 /* Mark TO as changed if FROM was changed. If TO was already marked
1581 as changed, decrease the changed count. */
1583 if (update_changed
1584 && bitmap_clear_bit (changed, from))
1585 bitmap_set_bit (changed, to);
1586 varinfo_t fromvi = get_varinfo (from);
1587 if (fromvi->solution)
1589 /* If the solution changes because of the merging, we need to mark
1590 the variable as changed. */
1591 varinfo_t tovi = get_varinfo (to);
1592 if (bitmap_ior_into (tovi->solution, fromvi->solution))
1594 if (update_changed)
1595 bitmap_set_bit (changed, to);
1598 BITMAP_FREE (fromvi->solution);
1599 if (fromvi->oldsolution)
1600 BITMAP_FREE (fromvi->oldsolution);
1602 if (stats.iterations > 0
1603 && tovi->oldsolution)
1604 BITMAP_FREE (tovi->oldsolution);
1606 if (graph->succs[to])
1607 bitmap_clear_bit (graph->succs[to], to);
1610 /* Add a copy edge FROM -> TO, optimizing special cases. Returns TRUE
1611 if the solution of TO changed. */
1613 static bool
1614 solve_add_graph_edge (constraint_graph_t graph, unsigned int to,
1615 unsigned int from)
1617 /* Adding edges from the special vars is pointless.
1618 They don't have sets that can change. */
1619 if (get_varinfo (from)->is_special_var)
1620 return bitmap_ior_into (get_varinfo (to)->solution,
1621 get_varinfo (from)->solution);
1622 /* Merging the solution from ESCAPED needlessly increases
1623 the set. Use ESCAPED as representative instead. */
1624 else if (from == find (escaped_id))
1625 return bitmap_set_bit (get_varinfo (to)->solution, escaped_id);
1626 else if (get_varinfo (from)->may_have_pointers
1627 && add_graph_edge (graph, to, from))
1628 return bitmap_ior_into (get_varinfo (to)->solution,
1629 get_varinfo (from)->solution);
1630 return false;
1633 /* Process a constraint C that represents x = *(y + off), using DELTA as the
1634 starting solution for y. */
1636 static void
1637 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1638 bitmap delta, bitmap *expanded_delta)
1640 unsigned int lhs = c->lhs.var;
1641 bool flag = false;
1642 bitmap sol = get_varinfo (lhs)->solution;
1643 unsigned int j;
1644 bitmap_iterator bi;
1645 HOST_WIDE_INT roffset = c->rhs.offset;
1647 /* Our IL does not allow this. */
1648 gcc_checking_assert (c->lhs.offset == 0);
1650 /* If the solution of Y contains anything it is good enough to transfer
1651 this to the LHS. */
1652 if (bitmap_bit_p (delta, anything_id))
1654 flag |= bitmap_set_bit (sol, anything_id);
1655 goto done;
1658 /* If we do not know at with offset the rhs is dereferenced compute
1659 the reachability set of DELTA, conservatively assuming it is
1660 dereferenced at all valid offsets. */
1661 if (roffset == UNKNOWN_OFFSET)
1663 delta = solution_set_expand (delta, expanded_delta);
1664 /* No further offset processing is necessary. */
1665 roffset = 0;
1668 /* For each variable j in delta (Sol(y)), add
1669 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1670 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1672 varinfo_t v = get_varinfo (j);
1673 HOST_WIDE_INT fieldoffset = v->offset + roffset;
1674 unsigned HOST_WIDE_INT size = v->size;
1675 unsigned int t;
1677 if (v->is_full_var)
1679 else if (roffset != 0)
1681 if (fieldoffset < 0)
1682 v = get_varinfo (v->head);
1683 else
1684 v = first_or_preceding_vi_for_offset (v, fieldoffset);
1687 /* We have to include all fields that overlap the current field
1688 shifted by roffset. */
1691 t = find (v->id);
1693 flag |= solve_add_graph_edge (graph, lhs, t);
1695 if (v->is_full_var
1696 || v->next == 0)
1697 break;
1699 v = vi_next (v);
1701 while (v->offset < fieldoffset + size);
1704 done:
1705 /* If the LHS solution changed, mark the var as changed. */
1706 if (flag)
1707 bitmap_set_bit (changed, lhs);
1710 /* Process a constraint C that represents *(x + off) = y using DELTA
1711 as the starting solution for x. */
1713 static void
1714 do_ds_constraint (constraint_t c, bitmap delta, bitmap *expanded_delta)
1716 unsigned int rhs = c->rhs.var;
1717 bitmap sol = get_varinfo (rhs)->solution;
1718 unsigned int j;
1719 bitmap_iterator bi;
1720 HOST_WIDE_INT loff = c->lhs.offset;
1721 bool escaped_p = false;
1723 /* Our IL does not allow this. */
1724 gcc_checking_assert (c->rhs.offset == 0);
1726 /* If the solution of y contains ANYTHING simply use the ANYTHING
1727 solution. This avoids needlessly increasing the points-to sets. */
1728 if (bitmap_bit_p (sol, anything_id))
1729 sol = get_varinfo (find (anything_id))->solution;
1731 /* If the solution for x contains ANYTHING we have to merge the
1732 solution of y into all pointer variables which we do via
1733 STOREDANYTHING. */
1734 if (bitmap_bit_p (delta, anything_id))
1736 unsigned t = find (storedanything_id);
1737 if (solve_add_graph_edge (graph, t, rhs))
1738 bitmap_set_bit (changed, t);
1739 return;
1742 /* If we do not know at with offset the rhs is dereferenced compute
1743 the reachability set of DELTA, conservatively assuming it is
1744 dereferenced at all valid offsets. */
1745 if (loff == UNKNOWN_OFFSET)
1747 delta = solution_set_expand (delta, expanded_delta);
1748 loff = 0;
1751 /* For each member j of delta (Sol(x)), add an edge from y to j and
1752 union Sol(y) into Sol(j) */
1753 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1755 varinfo_t v = get_varinfo (j);
1756 unsigned int t;
1757 HOST_WIDE_INT fieldoffset = v->offset + loff;
1758 unsigned HOST_WIDE_INT size = v->size;
1760 if (v->is_full_var)
1762 else if (loff != 0)
1764 if (fieldoffset < 0)
1765 v = get_varinfo (v->head);
1766 else
1767 v = first_or_preceding_vi_for_offset (v, fieldoffset);
1770 /* We have to include all fields that overlap the current field
1771 shifted by loff. */
1774 if (v->may_have_pointers)
1776 /* If v is a global variable then this is an escape point. */
1777 if (v->is_global_var
1778 && !escaped_p)
1780 t = find (escaped_id);
1781 if (add_graph_edge (graph, t, rhs)
1782 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1783 bitmap_set_bit (changed, t);
1784 /* Enough to let rhs escape once. */
1785 escaped_p = true;
1788 if (v->is_special_var)
1789 break;
1791 t = find (v->id);
1793 if (solve_add_graph_edge (graph, t, rhs))
1794 bitmap_set_bit (changed, t);
1797 if (v->is_full_var
1798 || v->next == 0)
1799 break;
1801 v = vi_next (v);
1803 while (v->offset < fieldoffset + size);
1807 /* Handle a non-simple (simple meaning requires no iteration),
1808 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1810 static void
1811 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta,
1812 bitmap *expanded_delta)
1814 if (c->lhs.type == DEREF)
1816 if (c->rhs.type == ADDRESSOF)
1818 gcc_unreachable ();
1820 else
1822 /* *x = y */
1823 do_ds_constraint (c, delta, expanded_delta);
1826 else if (c->rhs.type == DEREF)
1828 /* x = *y */
1829 if (!(get_varinfo (c->lhs.var)->is_special_var))
1830 do_sd_constraint (graph, c, delta, expanded_delta);
1832 else
1834 bitmap tmp;
1835 bool flag = false;
1837 gcc_checking_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR
1838 && c->rhs.offset != 0 && c->lhs.offset == 0);
1839 tmp = get_varinfo (c->lhs.var)->solution;
1841 flag = set_union_with_increment (tmp, delta, c->rhs.offset,
1842 expanded_delta);
1844 if (flag)
1845 bitmap_set_bit (changed, c->lhs.var);
1849 /* Initialize and return a new SCC info structure. */
1851 scc_info::scc_info (size_t size) :
1852 visited (size), deleted (size), current_index (0), scc_stack (1)
1854 bitmap_clear (visited);
1855 bitmap_clear (deleted);
1856 node_mapping = XNEWVEC (unsigned int, size);
1857 dfs = XCNEWVEC (unsigned int, size);
1859 for (size_t i = 0; i < size; i++)
1860 node_mapping[i] = i;
1863 /* Free an SCC info structure pointed to by SI */
1865 scc_info::~scc_info ()
1867 free (node_mapping);
1868 free (dfs);
1872 /* Find indirect cycles in GRAPH that occur, using strongly connected
1873 components, and note them in the indirect cycles map.
1875 This technique comes from Ben Hardekopf and Calvin Lin,
1876 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1877 Lines of Code", submitted to PLDI 2007. */
1879 static void
1880 find_indirect_cycles (constraint_graph_t graph)
1882 unsigned int i;
1883 unsigned int size = graph->size;
1884 scc_info si (size);
1886 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1887 if (!bitmap_bit_p (si.visited, i) && find (i) == i)
1888 scc_visit (graph, &si, i);
1891 /* Visit the graph in topological order starting at node N, and store the
1892 order in TOPO_ORDER using VISITED to indicate visited nodes. */
1894 static void
1895 topo_visit (constraint_graph_t graph, vec<unsigned> &topo_order,
1896 sbitmap visited, unsigned int n)
1898 bitmap_iterator bi;
1899 unsigned int j;
1901 bitmap_set_bit (visited, n);
1903 if (graph->succs[n])
1904 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1906 unsigned k = find (j);
1907 if (!bitmap_bit_p (visited, k))
1908 topo_visit (graph, topo_order, visited, k);
1911 /* Also consider copy with offset complex constraints as implicit edges. */
1912 for (auto c : graph->complex[n])
1914 /* Constraints are ordered so that SCALAR = SCALAR appear first. */
1915 if (c->lhs.type != SCALAR || c->rhs.type != SCALAR)
1916 break;
1917 gcc_checking_assert (c->rhs.var == n);
1918 unsigned k = find (c->lhs.var);
1919 if (!bitmap_bit_p (visited, k))
1920 topo_visit (graph, topo_order, visited, k);
1923 topo_order.quick_push (n);
1926 /* Compute a topological ordering for GRAPH, and return the result. */
1928 static auto_vec<unsigned>
1929 compute_topo_order (constraint_graph_t graph)
1931 unsigned int i;
1932 unsigned int size = graph->size;
1934 auto_sbitmap visited (size);
1935 bitmap_clear (visited);
1937 /* For the heuristic in add_graph_edge to work optimally make sure to
1938 first visit the connected component of the graph containing
1939 ESCAPED. Do this by extracting the connected component
1940 with ESCAPED and append that to all other components as solve_graph
1941 pops from the order. */
1942 auto_vec<unsigned> tail (size);
1943 topo_visit (graph, tail, visited, find (escaped_id));
1945 auto_vec<unsigned> topo_order (size);
1947 for (i = 0; i != size; ++i)
1948 if (!bitmap_bit_p (visited, i) && find (i) == i)
1949 topo_visit (graph, topo_order, visited, i);
1951 topo_order.splice (tail);
1952 return topo_order;
1955 /* Structure used to for hash value numbering of pointer equivalence
1956 classes. */
1958 typedef struct equiv_class_label
1960 hashval_t hashcode;
1961 unsigned int equivalence_class;
1962 bitmap labels;
1963 } *equiv_class_label_t;
1964 typedef const struct equiv_class_label *const_equiv_class_label_t;
1966 /* Equiv_class_label hashtable helpers. */
1968 struct equiv_class_hasher : nofree_ptr_hash <equiv_class_label>
1970 static inline hashval_t hash (const equiv_class_label *);
1971 static inline bool equal (const equiv_class_label *,
1972 const equiv_class_label *);
1975 /* Hash function for a equiv_class_label_t */
1977 inline hashval_t
1978 equiv_class_hasher::hash (const equiv_class_label *ecl)
1980 return ecl->hashcode;
1983 /* Equality function for two equiv_class_label_t's. */
1985 inline bool
1986 equiv_class_hasher::equal (const equiv_class_label *eql1,
1987 const equiv_class_label *eql2)
1989 return (eql1->hashcode == eql2->hashcode
1990 && bitmap_equal_p (eql1->labels, eql2->labels));
1993 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1994 classes. */
1995 static hash_table<equiv_class_hasher> *pointer_equiv_class_table;
1997 /* A hashtable for mapping a bitmap of labels->location equivalence
1998 classes. */
1999 static hash_table<equiv_class_hasher> *location_equiv_class_table;
2001 struct obstack equiv_class_obstack;
2003 /* Lookup a equivalence class in TABLE by the bitmap of LABELS with
2004 hash HAS it contains. Sets *REF_LABELS to the bitmap LABELS
2005 is equivalent to. */
2007 static equiv_class_label *
2008 equiv_class_lookup_or_add (hash_table<equiv_class_hasher> *table,
2009 bitmap labels)
2011 equiv_class_label **slot;
2012 equiv_class_label ecl;
2014 ecl.labels = labels;
2015 ecl.hashcode = bitmap_hash (labels);
2016 slot = table->find_slot (&ecl, INSERT);
2017 if (!*slot)
2019 *slot = XOBNEW (&equiv_class_obstack, struct equiv_class_label);
2020 (*slot)->labels = labels;
2021 (*slot)->hashcode = ecl.hashcode;
2022 (*slot)->equivalence_class = 0;
2025 return *slot;
2028 /* Perform offline variable substitution.
2030 This is a worst case quadratic time way of identifying variables
2031 that must have equivalent points-to sets, including those caused by
2032 static cycles, and single entry subgraphs, in the constraint graph.
2034 The technique is described in "Exploiting Pointer and Location
2035 Equivalence to Optimize Pointer Analysis. In the 14th International
2036 Static Analysis Symposium (SAS), August 2007." It is known as the
2037 "HU" algorithm, and is equivalent to value numbering the collapsed
2038 constraint graph including evaluating unions.
2040 The general method of finding equivalence classes is as follows:
2041 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
2042 Initialize all non-REF nodes to be direct nodes.
2043 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
2044 variable}
2045 For each constraint containing the dereference, we also do the same
2046 thing.
2048 We then compute SCC's in the graph and unify nodes in the same SCC,
2049 including pts sets.
2051 For each non-collapsed node x:
2052 Visit all unvisited explicit incoming edges.
2053 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
2054 where y->x.
2055 Lookup the equivalence class for pts(x).
2056 If we found one, equivalence_class(x) = found class.
2057 Otherwise, equivalence_class(x) = new class, and new_class is
2058 added to the lookup table.
2060 All direct nodes with the same equivalence class can be replaced
2061 with a single representative node.
2062 All unlabeled nodes (label == 0) are not pointers and all edges
2063 involving them can be eliminated.
2064 We perform these optimizations during rewrite_constraints
2066 In addition to pointer equivalence class finding, we also perform
2067 location equivalence class finding. This is the set of variables
2068 that always appear together in points-to sets. We use this to
2069 compress the size of the points-to sets. */
2071 /* Current maximum pointer equivalence class id. */
2072 static int pointer_equiv_class;
2074 /* Current maximum location equivalence class id. */
2075 static int location_equiv_class;
2077 /* Recursive routine to find strongly connected components in GRAPH,
2078 and label it's nodes with DFS numbers. */
2080 static void
2081 condense_visit (constraint_graph_t graph, class scc_info *si, unsigned int n)
2083 unsigned int i;
2084 bitmap_iterator bi;
2085 unsigned int my_dfs;
2087 gcc_checking_assert (si->node_mapping[n] == n);
2088 bitmap_set_bit (si->visited, n);
2089 si->dfs[n] = si->current_index ++;
2090 my_dfs = si->dfs[n];
2092 /* Visit all the successors. */
2093 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2095 unsigned int w = si->node_mapping[i];
2097 if (bitmap_bit_p (si->deleted, w))
2098 continue;
2100 if (!bitmap_bit_p (si->visited, w))
2101 condense_visit (graph, si, w);
2103 unsigned int t = si->node_mapping[w];
2104 gcc_checking_assert (si->node_mapping[n] == n);
2105 if (si->dfs[t] < si->dfs[n])
2106 si->dfs[n] = si->dfs[t];
2109 /* Visit all the implicit predecessors. */
2110 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
2112 unsigned int w = si->node_mapping[i];
2114 if (bitmap_bit_p (si->deleted, w))
2115 continue;
2117 if (!bitmap_bit_p (si->visited, w))
2118 condense_visit (graph, si, w);
2120 unsigned int t = si->node_mapping[w];
2121 gcc_assert (si->node_mapping[n] == n);
2122 if (si->dfs[t] < si->dfs[n])
2123 si->dfs[n] = si->dfs[t];
2126 /* See if any components have been identified. */
2127 if (si->dfs[n] == my_dfs)
2129 if (si->scc_stack.length () != 0
2130 && si->dfs[si->scc_stack.last ()] >= my_dfs)
2132 /* Find the first node of the SCC and do non-bitmap work. */
2133 bool direct_p = true;
2134 unsigned first = si->scc_stack.length ();
2137 --first;
2138 unsigned int w = si->scc_stack[first];
2139 si->node_mapping[w] = n;
2140 if (!bitmap_bit_p (graph->direct_nodes, w))
2141 direct_p = false;
2143 while (first > 0
2144 && si->dfs[si->scc_stack[first - 1]] >= my_dfs);
2145 if (!direct_p)
2146 bitmap_clear_bit (graph->direct_nodes, n);
2148 /* Want to reduce to node n, push that first. */
2149 si->scc_stack.reserve (1);
2150 si->scc_stack.quick_push (si->scc_stack[first]);
2151 si->scc_stack[first] = n;
2153 unsigned scc_size = si->scc_stack.length () - first;
2154 unsigned split = scc_size / 2;
2155 unsigned carry = scc_size - split * 2;
2156 while (split > 0)
2158 for (unsigned i = 0; i < split; ++i)
2160 unsigned a = si->scc_stack[first + i];
2161 unsigned b = si->scc_stack[first + split + carry + i];
2163 /* Unify our nodes. */
2164 if (graph->preds[b])
2166 if (!graph->preds[a])
2167 std::swap (graph->preds[a], graph->preds[b]);
2168 else
2169 bitmap_ior_into_and_free (graph->preds[a],
2170 &graph->preds[b]);
2172 if (graph->implicit_preds[b])
2174 if (!graph->implicit_preds[a])
2175 std::swap (graph->implicit_preds[a],
2176 graph->implicit_preds[b]);
2177 else
2178 bitmap_ior_into_and_free (graph->implicit_preds[a],
2179 &graph->implicit_preds[b]);
2181 if (graph->points_to[b])
2183 if (!graph->points_to[a])
2184 std::swap (graph->points_to[a], graph->points_to[b]);
2185 else
2186 bitmap_ior_into_and_free (graph->points_to[a],
2187 &graph->points_to[b]);
2190 unsigned remain = split + carry;
2191 split = remain / 2;
2192 carry = remain - split * 2;
2194 /* Actually pop the SCC. */
2195 si->scc_stack.truncate (first);
2197 bitmap_set_bit (si->deleted, n);
2199 else
2200 si->scc_stack.safe_push (n);
2203 /* Label pointer equivalences.
2205 This performs a value numbering of the constraint graph to
2206 discover which variables will always have the same points-to sets
2207 under the current set of constraints.
2209 The way it value numbers is to store the set of points-to bits
2210 generated by the constraints and graph edges. This is just used as a
2211 hash and equality comparison. The *actual set of points-to bits* is
2212 completely irrelevant, in that we don't care about being able to
2213 extract them later.
2215 The equality values (currently bitmaps) just have to satisfy a few
2216 constraints, the main ones being:
2217 1. The combining operation must be order independent.
2218 2. The end result of a given set of operations must be unique iff the
2219 combination of input values is unique
2220 3. Hashable. */
2222 static void
2223 label_visit (constraint_graph_t graph, class scc_info *si, unsigned int n)
2225 unsigned int i, first_pred;
2226 bitmap_iterator bi;
2228 bitmap_set_bit (si->visited, n);
2230 /* Label and union our incoming edges's points to sets. */
2231 first_pred = -1U;
2232 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2234 unsigned int w = si->node_mapping[i];
2235 if (!bitmap_bit_p (si->visited, w))
2236 label_visit (graph, si, w);
2238 /* Skip unused edges */
2239 if (w == n || graph->pointer_label[w] == 0)
2240 continue;
2242 if (graph->points_to[w])
2244 if (!graph->points_to[n])
2246 if (first_pred == -1U)
2247 first_pred = w;
2248 else
2250 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2251 bitmap_ior (graph->points_to[n],
2252 graph->points_to[first_pred],
2253 graph->points_to[w]);
2256 else
2257 bitmap_ior_into (graph->points_to[n], graph->points_to[w]);
2261 /* Indirect nodes get fresh variables and a new pointer equiv class. */
2262 if (!bitmap_bit_p (graph->direct_nodes, n))
2264 if (!graph->points_to[n])
2266 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2267 if (first_pred != -1U)
2268 bitmap_copy (graph->points_to[n], graph->points_to[first_pred]);
2270 bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
2271 graph->pointer_label[n] = pointer_equiv_class++;
2272 equiv_class_label_t ecl;
2273 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2274 graph->points_to[n]);
2275 ecl->equivalence_class = graph->pointer_label[n];
2276 return;
2279 /* If there was only a single non-empty predecessor the pointer equiv
2280 class is the same. */
2281 if (!graph->points_to[n])
2283 if (first_pred != -1U)
2285 graph->pointer_label[n] = graph->pointer_label[first_pred];
2286 graph->points_to[n] = graph->points_to[first_pred];
2288 return;
2291 if (!bitmap_empty_p (graph->points_to[n]))
2293 equiv_class_label_t ecl;
2294 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2295 graph->points_to[n]);
2296 if (ecl->equivalence_class == 0)
2297 ecl->equivalence_class = pointer_equiv_class++;
2298 else
2300 BITMAP_FREE (graph->points_to[n]);
2301 graph->points_to[n] = ecl->labels;
2303 graph->pointer_label[n] = ecl->equivalence_class;
2307 /* Print the pred graph in dot format. */
2309 static void
2310 dump_pred_graph (class scc_info *si, FILE *file)
2312 unsigned int i;
2314 /* Only print the graph if it has already been initialized: */
2315 if (!graph)
2316 return;
2318 /* Prints the header of the dot file: */
2319 fprintf (file, "strict digraph {\n");
2320 fprintf (file, " node [\n shape = box\n ]\n");
2321 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
2322 fprintf (file, "\n // List of nodes and complex constraints in "
2323 "the constraint graph:\n");
2325 /* The next lines print the nodes in the graph together with the
2326 complex constraints attached to them. */
2327 for (i = 1; i < graph->size; i++)
2329 if (i == FIRST_REF_NODE)
2330 continue;
2331 if (si->node_mapping[i] != i)
2332 continue;
2333 if (i < FIRST_REF_NODE)
2334 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2335 else
2336 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2337 if (graph->points_to[i]
2338 && !bitmap_empty_p (graph->points_to[i]))
2340 if (i < FIRST_REF_NODE)
2341 fprintf (file, "[label=\"%s = {", get_varinfo (i)->name);
2342 else
2343 fprintf (file, "[label=\"*%s = {",
2344 get_varinfo (i - FIRST_REF_NODE)->name);
2345 unsigned j;
2346 bitmap_iterator bi;
2347 EXECUTE_IF_SET_IN_BITMAP (graph->points_to[i], 0, j, bi)
2348 fprintf (file, " %d", j);
2349 fprintf (file, " }\"]");
2351 fprintf (file, ";\n");
2354 /* Go over the edges. */
2355 fprintf (file, "\n // Edges in the constraint graph:\n");
2356 for (i = 1; i < graph->size; i++)
2358 unsigned j;
2359 bitmap_iterator bi;
2360 if (si->node_mapping[i] != i)
2361 continue;
2362 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[i], 0, j, bi)
2364 unsigned from = si->node_mapping[j];
2365 if (from < FIRST_REF_NODE)
2366 fprintf (file, "\"%s\"", get_varinfo (from)->name);
2367 else
2368 fprintf (file, "\"*%s\"", get_varinfo (from - FIRST_REF_NODE)->name);
2369 fprintf (file, " -> ");
2370 if (i < FIRST_REF_NODE)
2371 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2372 else
2373 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2374 fprintf (file, ";\n");
2378 /* Prints the tail of the dot file. */
2379 fprintf (file, "}\n");
2382 /* Perform offline variable substitution, discovering equivalence
2383 classes, and eliminating non-pointer variables. */
2385 static class scc_info *
2386 perform_var_substitution (constraint_graph_t graph)
2388 unsigned int i;
2389 unsigned int size = graph->size;
2390 scc_info *si = new scc_info (size);
2392 bitmap_obstack_initialize (&iteration_obstack);
2393 gcc_obstack_init (&equiv_class_obstack);
2394 pointer_equiv_class_table = new hash_table<equiv_class_hasher> (511);
2395 location_equiv_class_table
2396 = new hash_table<equiv_class_hasher> (511);
2397 pointer_equiv_class = 1;
2398 location_equiv_class = 1;
2400 /* Condense the nodes, which means to find SCC's, count incoming
2401 predecessors, and unite nodes in SCC's. */
2402 for (i = 1; i < FIRST_REF_NODE; i++)
2403 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2404 condense_visit (graph, si, si->node_mapping[i]);
2406 if (dump_file && (dump_flags & TDF_GRAPH))
2408 fprintf (dump_file, "\n\n// The constraint graph before var-substitution "
2409 "in dot format:\n");
2410 dump_pred_graph (si, dump_file);
2411 fprintf (dump_file, "\n\n");
2414 bitmap_clear (si->visited);
2415 /* Actually the label the nodes for pointer equivalences */
2416 for (i = 1; i < FIRST_REF_NODE; i++)
2417 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2418 label_visit (graph, si, si->node_mapping[i]);
2420 /* Calculate location equivalence labels. */
2421 for (i = 1; i < FIRST_REF_NODE; i++)
2423 bitmap pointed_by;
2424 bitmap_iterator bi;
2425 unsigned int j;
2427 if (!graph->pointed_by[i])
2428 continue;
2429 pointed_by = BITMAP_ALLOC (&iteration_obstack);
2431 /* Translate the pointed-by mapping for pointer equivalence
2432 labels. */
2433 EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
2435 bitmap_set_bit (pointed_by,
2436 graph->pointer_label[si->node_mapping[j]]);
2438 /* The original pointed_by is now dead. */
2439 BITMAP_FREE (graph->pointed_by[i]);
2441 /* Look up the location equivalence label if one exists, or make
2442 one otherwise. */
2443 equiv_class_label_t ecl;
2444 ecl = equiv_class_lookup_or_add (location_equiv_class_table, pointed_by);
2445 if (ecl->equivalence_class == 0)
2446 ecl->equivalence_class = location_equiv_class++;
2447 else
2449 if (dump_file && (dump_flags & TDF_DETAILS))
2450 fprintf (dump_file, "Found location equivalence for node %s\n",
2451 get_varinfo (i)->name);
2452 BITMAP_FREE (pointed_by);
2454 graph->loc_label[i] = ecl->equivalence_class;
2458 if (dump_file && (dump_flags & TDF_DETAILS))
2459 for (i = 1; i < FIRST_REF_NODE; i++)
2461 unsigned j = si->node_mapping[i];
2462 if (j != i)
2464 fprintf (dump_file, "%s node id %d ",
2465 bitmap_bit_p (graph->direct_nodes, i)
2466 ? "Direct" : "Indirect", i);
2467 if (i < FIRST_REF_NODE)
2468 fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2469 else
2470 fprintf (dump_file, "\"*%s\"",
2471 get_varinfo (i - FIRST_REF_NODE)->name);
2472 fprintf (dump_file, " mapped to SCC leader node id %d ", j);
2473 if (j < FIRST_REF_NODE)
2474 fprintf (dump_file, "\"%s\"\n", get_varinfo (j)->name);
2475 else
2476 fprintf (dump_file, "\"*%s\"\n",
2477 get_varinfo (j - FIRST_REF_NODE)->name);
2479 else
2481 fprintf (dump_file,
2482 "Equivalence classes for %s node id %d ",
2483 bitmap_bit_p (graph->direct_nodes, i)
2484 ? "direct" : "indirect", i);
2485 if (i < FIRST_REF_NODE)
2486 fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2487 else
2488 fprintf (dump_file, "\"*%s\"",
2489 get_varinfo (i - FIRST_REF_NODE)->name);
2490 fprintf (dump_file,
2491 ": pointer %d, location %d\n",
2492 graph->pointer_label[i], graph->loc_label[i]);
2496 /* Quickly eliminate our non-pointer variables. */
2498 for (i = 1; i < FIRST_REF_NODE; i++)
2500 unsigned int node = si->node_mapping[i];
2502 if (graph->pointer_label[node] == 0)
2504 if (dump_file && (dump_flags & TDF_DETAILS))
2505 fprintf (dump_file,
2506 "%s is a non-pointer variable, eliminating edges.\n",
2507 get_varinfo (node)->name);
2508 stats.nonpointer_vars++;
2509 clear_edges_for_node (graph, node);
2513 return si;
2516 /* Free information that was only necessary for variable
2517 substitution. */
2519 static void
2520 free_var_substitution_info (class scc_info *si)
2522 delete si;
2523 free (graph->pointer_label);
2524 free (graph->loc_label);
2525 free (graph->pointed_by);
2526 free (graph->points_to);
2527 free (graph->eq_rep);
2528 sbitmap_free (graph->direct_nodes);
2529 delete pointer_equiv_class_table;
2530 pointer_equiv_class_table = NULL;
2531 delete location_equiv_class_table;
2532 location_equiv_class_table = NULL;
2533 obstack_free (&equiv_class_obstack, NULL);
2534 bitmap_obstack_release (&iteration_obstack);
2537 /* Return an existing node that is equivalent to NODE, which has
2538 equivalence class LABEL, if one exists. Return NODE otherwise. */
2540 static unsigned int
2541 find_equivalent_node (constraint_graph_t graph,
2542 unsigned int node, unsigned int label)
2544 /* If the address version of this variable is unused, we can
2545 substitute it for anything else with the same label.
2546 Otherwise, we know the pointers are equivalent, but not the
2547 locations, and we can unite them later. */
2549 if (!bitmap_bit_p (graph->address_taken, node))
2551 gcc_checking_assert (label < graph->size);
2553 if (graph->eq_rep[label] != -1)
2555 /* Unify the two variables since we know they are equivalent. */
2556 if (unite (graph->eq_rep[label], node))
2557 unify_nodes (graph, graph->eq_rep[label], node, false);
2558 return graph->eq_rep[label];
2560 else
2562 graph->eq_rep[label] = node;
2563 graph->pe_rep[label] = node;
2566 else
2568 gcc_checking_assert (label < graph->size);
2569 graph->pe[node] = label;
2570 if (graph->pe_rep[label] == -1)
2571 graph->pe_rep[label] = node;
2574 return node;
2577 /* Unite pointer equivalent but not location equivalent nodes in
2578 GRAPH. This may only be performed once variable substitution is
2579 finished. */
2581 static void
2582 unite_pointer_equivalences (constraint_graph_t graph)
2584 unsigned int i;
2586 /* Go through the pointer equivalences and unite them to their
2587 representative, if they aren't already. */
2588 for (i = 1; i < FIRST_REF_NODE; i++)
2590 unsigned int label = graph->pe[i];
2591 if (label)
2593 int label_rep = graph->pe_rep[label];
2595 if (label_rep == -1)
2596 continue;
2598 label_rep = find (label_rep);
2599 if (label_rep >= 0 && unite (label_rep, find (i)))
2600 unify_nodes (graph, label_rep, i, false);
2605 /* Move complex constraints to the GRAPH nodes they belong to. */
2607 static void
2608 move_complex_constraints (constraint_graph_t graph)
2610 int i;
2611 constraint_t c;
2613 FOR_EACH_VEC_ELT (constraints, i, c)
2615 if (c)
2617 struct constraint_expr lhs = c->lhs;
2618 struct constraint_expr rhs = c->rhs;
2620 if (lhs.type == DEREF)
2622 insert_into_complex (graph, lhs.var, c);
2624 else if (rhs.type == DEREF)
2626 if (!(get_varinfo (lhs.var)->is_special_var))
2627 insert_into_complex (graph, rhs.var, c);
2629 else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2630 && (lhs.offset != 0 || rhs.offset != 0))
2632 insert_into_complex (graph, rhs.var, c);
2639 /* Optimize and rewrite complex constraints while performing
2640 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2641 result of perform_variable_substitution. */
2643 static void
2644 rewrite_constraints (constraint_graph_t graph,
2645 class scc_info *si)
2647 int i;
2648 constraint_t c;
2650 if (flag_checking)
2652 for (unsigned int j = 0; j < graph->size; j++)
2653 gcc_assert (find (j) == j);
2656 FOR_EACH_VEC_ELT (constraints, i, c)
2658 struct constraint_expr lhs = c->lhs;
2659 struct constraint_expr rhs = c->rhs;
2660 unsigned int lhsvar = find (lhs.var);
2661 unsigned int rhsvar = find (rhs.var);
2662 unsigned int lhsnode, rhsnode;
2663 unsigned int lhslabel, rhslabel;
2665 lhsnode = si->node_mapping[lhsvar];
2666 rhsnode = si->node_mapping[rhsvar];
2667 lhslabel = graph->pointer_label[lhsnode];
2668 rhslabel = graph->pointer_label[rhsnode];
2670 /* See if it is really a non-pointer variable, and if so, ignore
2671 the constraint. */
2672 if (lhslabel == 0)
2674 if (dump_file && (dump_flags & TDF_DETAILS))
2677 fprintf (dump_file, "%s is a non-pointer variable, "
2678 "ignoring constraint:",
2679 get_varinfo (lhs.var)->name);
2680 dump_constraint (dump_file, c);
2681 fprintf (dump_file, "\n");
2683 constraints[i] = NULL;
2684 continue;
2687 if (rhslabel == 0)
2689 if (dump_file && (dump_flags & TDF_DETAILS))
2692 fprintf (dump_file, "%s is a non-pointer variable, "
2693 "ignoring constraint:",
2694 get_varinfo (rhs.var)->name);
2695 dump_constraint (dump_file, c);
2696 fprintf (dump_file, "\n");
2698 constraints[i] = NULL;
2699 continue;
2702 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2703 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2704 c->lhs.var = lhsvar;
2705 c->rhs.var = rhsvar;
2709 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2710 part of an SCC, false otherwise. */
2712 static bool
2713 eliminate_indirect_cycles (unsigned int node)
2715 if (graph->indirect_cycles[node] != -1
2716 && !bitmap_empty_p (get_varinfo (node)->solution))
2718 unsigned int i;
2719 auto_vec<unsigned> queue;
2720 int queuepos;
2721 unsigned int to = find (graph->indirect_cycles[node]);
2722 bitmap_iterator bi;
2724 /* We can't touch the solution set and call unify_nodes
2725 at the same time, because unify_nodes is going to do
2726 bitmap unions into it. */
2728 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2730 if (find (i) == i && i != to)
2732 if (unite (to, i))
2733 queue.safe_push (i);
2737 for (queuepos = 0;
2738 queue.iterate (queuepos, &i);
2739 queuepos++)
2741 unify_nodes (graph, to, i, true);
2743 return true;
2745 return false;
2748 /* Solve the constraint graph GRAPH using our worklist solver.
2749 This is based on the PW* family of solvers from the "Efficient Field
2750 Sensitive Pointer Analysis for C" paper.
2751 It works by iterating over all the graph nodes, processing the complex
2752 constraints and propagating the copy constraints, until everything stops
2753 changed. This corresponds to steps 6-8 in the solving list given above. */
2755 static void
2756 solve_graph (constraint_graph_t graph)
2758 unsigned int size = graph->size;
2759 unsigned int i;
2760 bitmap pts;
2762 changed = BITMAP_ALLOC (NULL);
2764 /* Mark all initial non-collapsed nodes as changed. */
2765 for (i = 1; i < size; i++)
2767 varinfo_t ivi = get_varinfo (i);
2768 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2769 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2770 || graph->complex[i].length () > 0))
2771 bitmap_set_bit (changed, i);
2774 /* Allocate a bitmap to be used to store the changed bits. */
2775 pts = BITMAP_ALLOC (&pta_obstack);
2777 while (!bitmap_empty_p (changed))
2779 unsigned int i;
2780 stats.iterations++;
2782 bitmap_obstack_initialize (&iteration_obstack);
2784 auto_vec<unsigned> topo_order = compute_topo_order (graph);
2785 while (topo_order.length () != 0)
2787 i = topo_order.pop ();
2789 /* If this variable is not a representative, skip it. */
2790 if (find (i) != i)
2791 continue;
2793 /* In certain indirect cycle cases, we may merge this
2794 variable to another. */
2795 if (eliminate_indirect_cycles (i) && find (i) != i)
2796 continue;
2798 /* If the node has changed, we need to process the
2799 complex constraints and outgoing edges again. For complex
2800 constraints that modify i itself, like the common group of
2801 callarg = callarg + UNKNOWN;
2802 callarg = *callarg + UNKNOWN;
2803 *callarg = callescape;
2804 make sure to iterate immediately because that maximizes
2805 cache reuse and expands the graph quickest, leading to
2806 better visitation order in the next iteration. */
2807 while (bitmap_clear_bit (changed, i))
2809 bitmap solution;
2810 vec<constraint_t> &complex = graph->complex[i];
2811 varinfo_t vi = get_varinfo (i);
2812 bool solution_empty;
2814 /* Compute the changed set of solution bits. If anything
2815 is in the solution just propagate that. */
2816 if (bitmap_bit_p (vi->solution, anything_id))
2818 /* If anything is also in the old solution there is
2819 nothing to do.
2820 ??? But we shouldn't ended up with "changed" set ... */
2821 if (vi->oldsolution
2822 && bitmap_bit_p (vi->oldsolution, anything_id))
2823 break;
2824 bitmap_copy (pts, get_varinfo (find (anything_id))->solution);
2826 else if (vi->oldsolution)
2827 bitmap_and_compl (pts, vi->solution, vi->oldsolution);
2828 else
2829 bitmap_copy (pts, vi->solution);
2831 if (bitmap_empty_p (pts))
2832 break;
2834 if (vi->oldsolution)
2835 bitmap_ior_into (vi->oldsolution, pts);
2836 else
2838 vi->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
2839 bitmap_copy (vi->oldsolution, pts);
2842 solution = vi->solution;
2843 solution_empty = bitmap_empty_p (solution);
2845 /* Process the complex constraints */
2846 hash_set<constraint_t> *cvisited = nullptr;
2847 if (flag_checking)
2848 cvisited = new hash_set<constraint_t>;
2849 bitmap expanded_pts = NULL;
2850 for (unsigned j = 0; j < complex.length (); ++j)
2852 constraint_t c = complex[j];
2853 /* At unification time only the directly involved nodes
2854 will get their complex constraints updated. Update
2855 our complex constraints now but keep the constraint
2856 vector sorted and clear of duplicates. Also make
2857 sure to evaluate each prevailing constraint only once. */
2858 unsigned int new_lhs = find (c->lhs.var);
2859 unsigned int new_rhs = find (c->rhs.var);
2860 if (c->lhs.var != new_lhs || c->rhs.var != new_rhs)
2862 constraint tem = *c;
2863 tem.lhs.var = new_lhs;
2864 tem.rhs.var = new_rhs;
2865 unsigned int place
2866 = complex.lower_bound (&tem, constraint_less);
2867 c->lhs.var = new_lhs;
2868 c->rhs.var = new_rhs;
2869 if (place != j)
2871 complex.ordered_remove (j);
2872 if (j < place)
2873 --place;
2874 if (place < complex.length ())
2876 if (constraint_equal (*complex[place], *c))
2878 j--;
2879 continue;
2881 else
2882 complex.safe_insert (place, c);
2884 else
2885 complex.quick_push (c);
2886 if (place > j)
2888 j--;
2889 continue;
2894 /* The only complex constraint that can change our
2895 solution to non-empty, given an empty solution,
2896 is a constraint where the lhs side is receiving
2897 some set from elsewhere. */
2898 if (cvisited && cvisited->add (c))
2899 gcc_unreachable ();
2900 if (!solution_empty || c->lhs.type != DEREF)
2901 do_complex_constraint (graph, c, pts, &expanded_pts);
2903 if (cvisited)
2905 /* When checking, verify the order of constraints is
2906 maintained and each constraint is evaluated exactly
2907 once. */
2908 for (unsigned j = 1; j < complex.length (); ++j)
2909 gcc_assert (constraint_less (complex[j-1], complex[j]));
2910 gcc_assert (cvisited->elements () == complex.length ());
2911 delete cvisited;
2913 BITMAP_FREE (expanded_pts);
2915 solution_empty = bitmap_empty_p (solution);
2917 if (!solution_empty)
2919 bitmap_iterator bi;
2920 unsigned eff_escaped_id = find (escaped_id);
2921 unsigned j;
2923 /* Propagate solution to all successors. */
2924 unsigned to_remove = ~0U;
2925 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2926 0, j, bi)
2928 if (to_remove != ~0U)
2930 bitmap_clear_bit (graph->succs[i], to_remove);
2931 to_remove = ~0U;
2933 unsigned int to = find (j);
2934 if (to != j)
2936 /* Update the succ graph, avoiding duplicate
2937 work. */
2938 to_remove = j;
2939 if (! bitmap_set_bit (graph->succs[i], to))
2940 continue;
2941 /* We eventually end up processing 'to' twice
2942 as it is undefined whether bitmap iteration
2943 iterates over bits set during iteration.
2944 Play safe instead of doing tricks. */
2946 /* Don't try to propagate to ourselves. */
2947 if (to == i)
2949 to_remove = j;
2950 continue;
2952 /* Early node unification can lead to edges from
2953 escaped - remove them. */
2954 if (i == eff_escaped_id)
2956 to_remove = j;
2957 if (bitmap_set_bit (get_varinfo (to)->solution,
2958 escaped_id))
2959 bitmap_set_bit (changed, to);
2960 continue;
2963 if (bitmap_ior_into (get_varinfo (to)->solution, pts))
2964 bitmap_set_bit (changed, to);
2966 if (to_remove != ~0U)
2967 bitmap_clear_bit (graph->succs[i], to_remove);
2971 bitmap_obstack_release (&iteration_obstack);
2974 BITMAP_FREE (pts);
2975 BITMAP_FREE (changed);
2976 bitmap_obstack_release (&oldpta_obstack);
2979 /* Map from trees to variable infos. */
2980 static hash_map<tree, varinfo_t> *vi_for_tree;
2983 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2985 static void
2986 insert_vi_for_tree (tree t, varinfo_t vi)
2988 gcc_assert (vi);
2989 gcc_assert (!vi_for_tree->put (t, vi));
2992 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2993 exist in the map, return NULL, otherwise, return the varinfo we found. */
2995 static varinfo_t
2996 lookup_vi_for_tree (tree t)
2998 varinfo_t *slot = vi_for_tree->get (t);
2999 if (slot == NULL)
3000 return NULL;
3002 return *slot;
3005 /* Return a printable name for DECL */
3007 static const char *
3008 alias_get_name (tree decl)
3010 const char *res = "NULL";
3011 if (dump_file)
3013 char *temp = NULL;
3014 if (TREE_CODE (decl) == SSA_NAME)
3016 res = get_name (decl);
3017 temp = xasprintf ("%s_%u", res ? res : "", SSA_NAME_VERSION (decl));
3019 else if (HAS_DECL_ASSEMBLER_NAME_P (decl)
3020 && DECL_ASSEMBLER_NAME_SET_P (decl))
3021 res = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME_RAW (decl));
3022 else if (DECL_P (decl))
3024 res = get_name (decl);
3025 if (!res)
3026 temp = xasprintf ("D.%u", DECL_UID (decl));
3029 if (temp)
3031 res = ggc_strdup (temp);
3032 free (temp);
3036 return res;
3039 /* Find the variable id for tree T in the map.
3040 If T doesn't exist in the map, create an entry for it and return it. */
3042 static varinfo_t
3043 get_vi_for_tree (tree t)
3045 varinfo_t *slot = vi_for_tree->get (t);
3046 if (slot == NULL)
3048 unsigned int id = create_variable_info_for (t, alias_get_name (t), false);
3049 return get_varinfo (id);
3052 return *slot;
3055 /* Get a scalar constraint expression for a new temporary variable. */
3057 static struct constraint_expr
3058 new_scalar_tmp_constraint_exp (const char *name, bool add_id)
3060 struct constraint_expr tmp;
3061 varinfo_t vi;
3063 vi = new_var_info (NULL_TREE, name, add_id);
3064 vi->offset = 0;
3065 vi->size = -1;
3066 vi->fullsize = -1;
3067 vi->is_full_var = 1;
3068 vi->is_reg_var = 1;
3070 tmp.var = vi->id;
3071 tmp.type = SCALAR;
3072 tmp.offset = 0;
3074 return tmp;
3077 /* Get a constraint expression vector from an SSA_VAR_P node.
3078 If address_p is true, the result will be taken its address of. */
3080 static void
3081 get_constraint_for_ssa_var (tree t, vec<ce_s> *results, bool address_p)
3083 struct constraint_expr cexpr;
3084 varinfo_t vi;
3086 /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
3087 gcc_assert (TREE_CODE (t) == SSA_NAME || DECL_P (t));
3089 if (TREE_CODE (t) == SSA_NAME
3090 && SSA_NAME_IS_DEFAULT_DEF (t))
3092 /* For parameters, get at the points-to set for the actual parm
3093 decl. */
3094 if (TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
3095 || TREE_CODE (SSA_NAME_VAR (t)) == RESULT_DECL)
3097 get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
3098 return;
3100 /* For undefined SSA names return nothing. */
3101 else if (!ssa_defined_default_def_p (t))
3103 cexpr.var = nothing_id;
3104 cexpr.type = SCALAR;
3105 cexpr.offset = 0;
3106 results->safe_push (cexpr);
3107 return;
3111 /* For global variables resort to the alias target. */
3112 if (VAR_P (t) && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
3114 varpool_node *node = varpool_node::get (t);
3115 if (node && node->alias && node->analyzed)
3117 node = node->ultimate_alias_target ();
3118 /* Canonicalize the PT uid of all aliases to the ultimate target.
3119 ??? Hopefully the set of aliases can't change in a way that
3120 changes the ultimate alias target. */
3121 gcc_assert ((! DECL_PT_UID_SET_P (node->decl)
3122 || DECL_PT_UID (node->decl) == DECL_UID (node->decl))
3123 && (! DECL_PT_UID_SET_P (t)
3124 || DECL_PT_UID (t) == DECL_UID (node->decl)));
3125 DECL_PT_UID (t) = DECL_UID (node->decl);
3126 t = node->decl;
3129 /* If this is decl may bind to NULL note that. */
3130 if (address_p
3131 && (! node || ! node->nonzero_address ()))
3133 cexpr.var = nothing_id;
3134 cexpr.type = SCALAR;
3135 cexpr.offset = 0;
3136 results->safe_push (cexpr);
3140 vi = get_vi_for_tree (t);
3141 cexpr.var = vi->id;
3142 cexpr.type = SCALAR;
3143 cexpr.offset = 0;
3145 /* If we are not taking the address of the constraint expr, add all
3146 sub-fiels of the variable as well. */
3147 if (!address_p
3148 && !vi->is_full_var)
3150 for (; vi; vi = vi_next (vi))
3152 cexpr.var = vi->id;
3153 results->safe_push (cexpr);
3155 return;
3158 results->safe_push (cexpr);
3161 /* Process constraint T, performing various simplifications and then
3162 adding it to our list of overall constraints. */
3164 static void
3165 process_constraint (constraint_t t)
3167 struct constraint_expr rhs = t->rhs;
3168 struct constraint_expr lhs = t->lhs;
3170 gcc_assert (rhs.var < varmap.length ());
3171 gcc_assert (lhs.var < varmap.length ());
3173 /* If we didn't get any useful constraint from the lhs we get
3174 &ANYTHING as fallback from get_constraint_for. Deal with
3175 it here by turning it into *ANYTHING. */
3176 if (lhs.type == ADDRESSOF
3177 && lhs.var == anything_id)
3178 t->lhs.type = lhs.type = DEREF;
3180 /* ADDRESSOF on the lhs is invalid. */
3181 gcc_assert (lhs.type != ADDRESSOF);
3183 /* We shouldn't add constraints from things that cannot have pointers.
3184 It's not completely trivial to avoid in the callers, so do it here. */
3185 if (rhs.type != ADDRESSOF
3186 && !get_varinfo (rhs.var)->may_have_pointers)
3187 return;
3189 /* Likewise adding to the solution of a non-pointer var isn't useful. */
3190 if (!get_varinfo (lhs.var)->may_have_pointers)
3191 return;
3193 /* This can happen in our IR with things like n->a = *p */
3194 if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
3196 /* Split into tmp = *rhs, *lhs = tmp */
3197 struct constraint_expr tmplhs;
3198 tmplhs = new_scalar_tmp_constraint_exp ("doubledereftmp", true);
3199 process_constraint (new_constraint (tmplhs, rhs));
3200 process_constraint (new_constraint (lhs, tmplhs));
3202 else if ((rhs.type != SCALAR || rhs.offset != 0) && lhs.type == DEREF)
3204 /* Split into tmp = &rhs, *lhs = tmp */
3205 struct constraint_expr tmplhs;
3206 tmplhs = new_scalar_tmp_constraint_exp ("derefaddrtmp", true);
3207 process_constraint (new_constraint (tmplhs, rhs));
3208 process_constraint (new_constraint (lhs, tmplhs));
3210 else
3212 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
3213 if (rhs.type == ADDRESSOF)
3214 get_varinfo (get_varinfo (rhs.var)->head)->address_taken = true;
3215 constraints.safe_push (t);
3220 /* Return the position, in bits, of FIELD_DECL from the beginning of its
3221 structure. */
3223 static HOST_WIDE_INT
3224 bitpos_of_field (const tree fdecl)
3226 if (!tree_fits_shwi_p (DECL_FIELD_OFFSET (fdecl))
3227 || !tree_fits_shwi_p (DECL_FIELD_BIT_OFFSET (fdecl)))
3228 return -1;
3230 return (tree_to_shwi (DECL_FIELD_OFFSET (fdecl)) * BITS_PER_UNIT
3231 + tree_to_shwi (DECL_FIELD_BIT_OFFSET (fdecl)));
3235 /* Get constraint expressions for offsetting PTR by OFFSET. Stores the
3236 resulting constraint expressions in *RESULTS. */
3238 static void
3239 get_constraint_for_ptr_offset (tree ptr, tree offset,
3240 vec<ce_s> *results)
3242 struct constraint_expr c;
3243 unsigned int j, n;
3244 HOST_WIDE_INT rhsoffset;
3246 /* If we do not do field-sensitive PTA adding offsets to pointers
3247 does not change the points-to solution. */
3248 if (!use_field_sensitive)
3250 get_constraint_for_rhs (ptr, results);
3251 return;
3254 /* If the offset is not a non-negative integer constant that fits
3255 in a HOST_WIDE_INT, we have to fall back to a conservative
3256 solution which includes all sub-fields of all pointed-to
3257 variables of ptr. */
3258 if (offset == NULL_TREE
3259 || TREE_CODE (offset) != INTEGER_CST)
3260 rhsoffset = UNKNOWN_OFFSET;
3261 else
3263 /* Sign-extend the offset. */
3264 offset_int soffset = offset_int::from (wi::to_wide (offset), SIGNED);
3265 if (!wi::fits_shwi_p (soffset))
3266 rhsoffset = UNKNOWN_OFFSET;
3267 else
3269 /* Make sure the bit-offset also fits. */
3270 HOST_WIDE_INT rhsunitoffset = soffset.to_shwi ();
3271 rhsoffset = rhsunitoffset * (unsigned HOST_WIDE_INT) BITS_PER_UNIT;
3272 if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
3273 rhsoffset = UNKNOWN_OFFSET;
3277 get_constraint_for_rhs (ptr, results);
3278 if (rhsoffset == 0)
3279 return;
3281 /* As we are eventually appending to the solution do not use
3282 vec::iterate here. */
3283 n = results->length ();
3284 for (j = 0; j < n; j++)
3286 varinfo_t curr;
3287 c = (*results)[j];
3288 curr = get_varinfo (c.var);
3290 if (c.type == ADDRESSOF
3291 /* If this varinfo represents a full variable just use it. */
3292 && curr->is_full_var)
3294 else if (c.type == ADDRESSOF
3295 /* If we do not know the offset add all subfields. */
3296 && rhsoffset == UNKNOWN_OFFSET)
3298 varinfo_t temp = get_varinfo (curr->head);
3301 struct constraint_expr c2;
3302 c2.var = temp->id;
3303 c2.type = ADDRESSOF;
3304 c2.offset = 0;
3305 if (c2.var != c.var)
3306 results->safe_push (c2);
3307 temp = vi_next (temp);
3309 while (temp);
3311 else if (c.type == ADDRESSOF)
3313 varinfo_t temp;
3314 unsigned HOST_WIDE_INT offset = curr->offset + rhsoffset;
3316 /* If curr->offset + rhsoffset is less than zero adjust it. */
3317 if (rhsoffset < 0
3318 && curr->offset < offset)
3319 offset = 0;
3321 /* We have to include all fields that overlap the current
3322 field shifted by rhsoffset. And we include at least
3323 the last or the first field of the variable to represent
3324 reachability of off-bound addresses, in particular &object + 1,
3325 conservatively correct. */
3326 temp = first_or_preceding_vi_for_offset (curr, offset);
3327 c.var = temp->id;
3328 c.offset = 0;
3329 temp = vi_next (temp);
3330 while (temp
3331 && temp->offset < offset + curr->size)
3333 struct constraint_expr c2;
3334 c2.var = temp->id;
3335 c2.type = ADDRESSOF;
3336 c2.offset = 0;
3337 results->safe_push (c2);
3338 temp = vi_next (temp);
3341 else if (c.type == SCALAR)
3343 gcc_assert (c.offset == 0);
3344 c.offset = rhsoffset;
3346 else
3347 /* We shouldn't get any DEREFs here. */
3348 gcc_unreachable ();
3350 (*results)[j] = c;
3355 /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
3356 If address_p is true the result will be taken its address of.
3357 If lhs_p is true then the constraint expression is assumed to be used
3358 as the lhs. */
3360 static void
3361 get_constraint_for_component_ref (tree t, vec<ce_s> *results,
3362 bool address_p, bool lhs_p)
3364 tree orig_t = t;
3365 poly_int64 bitsize = -1;
3366 poly_int64 bitmaxsize = -1;
3367 poly_int64 bitpos;
3368 bool reverse;
3369 tree forzero;
3371 /* Some people like to do cute things like take the address of
3372 &0->a.b */
3373 forzero = t;
3374 while (handled_component_p (forzero)
3375 || INDIRECT_REF_P (forzero)
3376 || TREE_CODE (forzero) == MEM_REF)
3377 forzero = TREE_OPERAND (forzero, 0);
3379 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
3381 struct constraint_expr temp;
3383 temp.offset = 0;
3384 temp.var = integer_id;
3385 temp.type = SCALAR;
3386 results->safe_push (temp);
3387 return;
3390 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize, &reverse);
3392 /* We can end up here for component references on a
3393 VIEW_CONVERT_EXPR <>(&foobar) or things like a
3394 BIT_FIELD_REF <&MEM[(void *)&b + 4B], ...>. So for
3395 symbolic constants simply give up. */
3396 if (TREE_CODE (t) == ADDR_EXPR)
3398 constraint_expr result;
3399 result.type = SCALAR;
3400 result.var = anything_id;
3401 result.offset = 0;
3402 results->safe_push (result);
3403 return;
3406 /* Avoid creating pointer-offset constraints, so handle MEM_REF
3407 offsets directly. Pretend to take the address of the base,
3408 we'll take care of adding the required subset of sub-fields below. */
3409 if (TREE_CODE (t) == MEM_REF
3410 && !integer_zerop (TREE_OPERAND (t, 0)))
3412 poly_offset_int off = mem_ref_offset (t);
3413 off <<= LOG2_BITS_PER_UNIT;
3414 off += bitpos;
3415 poly_int64 off_hwi;
3416 if (off.to_shwi (&off_hwi))
3417 bitpos = off_hwi;
3418 else
3420 bitpos = 0;
3421 bitmaxsize = -1;
3423 get_constraint_for_1 (TREE_OPERAND (t, 0), results, false, lhs_p);
3424 do_deref (results);
3426 else
3427 get_constraint_for_1 (t, results, true, lhs_p);
3429 /* Strip off nothing_id. */
3430 if (results->length () == 2)
3432 gcc_assert ((*results)[0].var == nothing_id);
3433 results->unordered_remove (0);
3435 gcc_assert (results->length () == 1);
3436 struct constraint_expr &result = results->last ();
3438 if (result.type == SCALAR
3439 && get_varinfo (result.var)->is_full_var)
3440 /* For single-field vars do not bother about the offset. */
3441 result.offset = 0;
3442 else if (result.type == SCALAR)
3444 /* In languages like C, you can access one past the end of an
3445 array. You aren't allowed to dereference it, so we can
3446 ignore this constraint. When we handle pointer subtraction,
3447 we may have to do something cute here. */
3449 if (maybe_lt (poly_uint64 (bitpos), get_varinfo (result.var)->fullsize)
3450 && maybe_ne (bitmaxsize, 0))
3452 /* It's also not true that the constraint will actually start at the
3453 right offset, it may start in some padding. We only care about
3454 setting the constraint to the first actual field it touches, so
3455 walk to find it. */
3456 struct constraint_expr cexpr = result;
3457 varinfo_t curr;
3458 results->pop ();
3459 cexpr.offset = 0;
3460 for (curr = get_varinfo (cexpr.var); curr; curr = vi_next (curr))
3462 if (ranges_maybe_overlap_p (poly_int64 (curr->offset),
3463 curr->size, bitpos, bitmaxsize))
3465 cexpr.var = curr->id;
3466 results->safe_push (cexpr);
3467 if (address_p)
3468 break;
3471 /* If we are going to take the address of this field then
3472 to be able to compute reachability correctly add at least
3473 the last field of the variable. */
3474 if (address_p && results->length () == 0)
3476 curr = get_varinfo (cexpr.var);
3477 while (curr->next != 0)
3478 curr = vi_next (curr);
3479 cexpr.var = curr->id;
3480 results->safe_push (cexpr);
3482 else if (results->length () == 0)
3483 /* Assert that we found *some* field there. The user couldn't be
3484 accessing *only* padding. */
3485 /* Still the user could access one past the end of an array
3486 embedded in a struct resulting in accessing *only* padding. */
3487 /* Or accessing only padding via type-punning to a type
3488 that has a filed just in padding space. */
3490 cexpr.type = SCALAR;
3491 cexpr.var = anything_id;
3492 cexpr.offset = 0;
3493 results->safe_push (cexpr);
3496 else if (known_eq (bitmaxsize, 0))
3498 if (dump_file && (dump_flags & TDF_DETAILS))
3499 fprintf (dump_file, "Access to zero-sized part of variable, "
3500 "ignoring\n");
3502 else
3503 if (dump_file && (dump_flags & TDF_DETAILS))
3504 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
3506 else if (result.type == DEREF)
3508 /* If we do not know exactly where the access goes say so. Note
3509 that only for non-structure accesses we know that we access
3510 at most one subfiled of any variable. */
3511 HOST_WIDE_INT const_bitpos;
3512 if (!bitpos.is_constant (&const_bitpos)
3513 || const_bitpos == -1
3514 || maybe_ne (bitsize, bitmaxsize)
3515 || AGGREGATE_TYPE_P (TREE_TYPE (orig_t))
3516 || result.offset == UNKNOWN_OFFSET)
3517 result.offset = UNKNOWN_OFFSET;
3518 else
3519 result.offset += const_bitpos;
3521 else if (result.type == ADDRESSOF)
3523 /* We can end up here for component references on constants like
3524 VIEW_CONVERT_EXPR <>({ 0, 1, 2, 3 })[i]. */
3525 result.type = SCALAR;
3526 result.var = anything_id;
3527 result.offset = 0;
3529 else
3530 gcc_unreachable ();
3534 /* Dereference the constraint expression CONS, and return the result.
3535 DEREF (ADDRESSOF) = SCALAR
3536 DEREF (SCALAR) = DEREF
3537 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
3538 This is needed so that we can handle dereferencing DEREF constraints. */
3540 static void
3541 do_deref (vec<ce_s> *constraints)
3543 struct constraint_expr *c;
3544 unsigned int i = 0;
3546 FOR_EACH_VEC_ELT (*constraints, i, c)
3548 if (c->type == SCALAR)
3549 c->type = DEREF;
3550 else if (c->type == ADDRESSOF)
3551 c->type = SCALAR;
3552 else if (c->type == DEREF)
3554 struct constraint_expr tmplhs;
3555 tmplhs = new_scalar_tmp_constraint_exp ("dereftmp", true);
3556 process_constraint (new_constraint (tmplhs, *c));
3557 c->var = tmplhs.var;
3559 else
3560 gcc_unreachable ();
3564 /* Given a tree T, return the constraint expression for taking the
3565 address of it. */
3567 static void
3568 get_constraint_for_address_of (tree t, vec<ce_s> *results)
3570 struct constraint_expr *c;
3571 unsigned int i;
3573 get_constraint_for_1 (t, results, true, true);
3575 FOR_EACH_VEC_ELT (*results, i, c)
3577 if (c->type == DEREF)
3578 c->type = SCALAR;
3579 else
3580 c->type = ADDRESSOF;
3584 /* Given a tree T, return the constraint expression for it. */
3586 static void
3587 get_constraint_for_1 (tree t, vec<ce_s> *results, bool address_p,
3588 bool lhs_p)
3590 struct constraint_expr temp;
3592 /* x = integer is all glommed to a single variable, which doesn't
3593 point to anything by itself. That is, of course, unless it is an
3594 integer constant being treated as a pointer, in which case, we
3595 will return that this is really the addressof anything. This
3596 happens below, since it will fall into the default case. The only
3597 case we know something about an integer treated like a pointer is
3598 when it is the NULL pointer, and then we just say it points to
3599 NULL.
3601 Do not do that if -fno-delete-null-pointer-checks though, because
3602 in that case *NULL does not fail, so it _should_ alias *anything.
3603 It is not worth adding a new option or renaming the existing one,
3604 since this case is relatively obscure. */
3605 if ((TREE_CODE (t) == INTEGER_CST
3606 && integer_zerop (t))
3607 /* The only valid CONSTRUCTORs in gimple with pointer typed
3608 elements are zero-initializer. But in IPA mode we also
3609 process global initializers, so verify at least. */
3610 || (TREE_CODE (t) == CONSTRUCTOR
3611 && CONSTRUCTOR_NELTS (t) == 0))
3613 if (flag_delete_null_pointer_checks)
3614 temp.var = nothing_id;
3615 else
3616 temp.var = nonlocal_id;
3617 temp.type = ADDRESSOF;
3618 temp.offset = 0;
3619 results->safe_push (temp);
3620 return;
3623 /* String constants are read-only, ideally we'd have a CONST_DECL
3624 for those. */
3625 if (TREE_CODE (t) == STRING_CST)
3627 temp.var = string_id;
3628 temp.type = SCALAR;
3629 temp.offset = 0;
3630 results->safe_push (temp);
3631 return;
3634 switch (TREE_CODE_CLASS (TREE_CODE (t)))
3636 case tcc_expression:
3638 switch (TREE_CODE (t))
3640 case ADDR_EXPR:
3641 get_constraint_for_address_of (TREE_OPERAND (t, 0), results);
3642 return;
3643 default:;
3645 break;
3647 case tcc_reference:
3649 if (TREE_THIS_VOLATILE (t))
3650 /* Fall back to anything. */
3651 break;
3653 switch (TREE_CODE (t))
3655 case MEM_REF:
3657 struct constraint_expr cs;
3658 varinfo_t vi, curr;
3659 get_constraint_for_ptr_offset (TREE_OPERAND (t, 0),
3660 TREE_OPERAND (t, 1), results);
3661 do_deref (results);
3663 /* If we are not taking the address then make sure to process
3664 all subvariables we might access. */
3665 if (address_p)
3666 return;
3668 cs = results->last ();
3669 if (cs.type == DEREF
3670 && type_can_have_subvars (TREE_TYPE (t)))
3672 /* For dereferences this means we have to defer it
3673 to solving time. */
3674 results->last ().offset = UNKNOWN_OFFSET;
3675 return;
3677 if (cs.type != SCALAR)
3678 return;
3680 vi = get_varinfo (cs.var);
3681 curr = vi_next (vi);
3682 if (!vi->is_full_var
3683 && curr)
3685 unsigned HOST_WIDE_INT size;
3686 if (tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (t))))
3687 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t)));
3688 else
3689 size = -1;
3690 for (; curr; curr = vi_next (curr))
3692 if (curr->offset - vi->offset < size)
3694 cs.var = curr->id;
3695 results->safe_push (cs);
3697 else
3698 break;
3701 return;
3703 case ARRAY_REF:
3704 case ARRAY_RANGE_REF:
3705 case COMPONENT_REF:
3706 case IMAGPART_EXPR:
3707 case REALPART_EXPR:
3708 case BIT_FIELD_REF:
3709 get_constraint_for_component_ref (t, results, address_p, lhs_p);
3710 return;
3711 case VIEW_CONVERT_EXPR:
3712 get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p,
3713 lhs_p);
3714 return;
3715 /* We are missing handling for TARGET_MEM_REF here. */
3716 default:;
3718 break;
3720 case tcc_exceptional:
3722 switch (TREE_CODE (t))
3724 case SSA_NAME:
3726 get_constraint_for_ssa_var (t, results, address_p);
3727 return;
3729 case CONSTRUCTOR:
3731 unsigned int i;
3732 tree val;
3733 auto_vec<ce_s> tmp;
3734 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
3736 struct constraint_expr *rhsp;
3737 unsigned j;
3738 get_constraint_for_1 (val, &tmp, address_p, lhs_p);
3739 FOR_EACH_VEC_ELT (tmp, j, rhsp)
3740 results->safe_push (*rhsp);
3741 tmp.truncate (0);
3743 /* We do not know whether the constructor was complete,
3744 so technically we have to add &NOTHING or &ANYTHING
3745 like we do for an empty constructor as well. */
3746 return;
3748 default:;
3750 break;
3752 case tcc_declaration:
3754 if (VAR_P (t) && TREE_THIS_VOLATILE (t))
3755 /* Fall back to anything. */
3756 break;
3757 get_constraint_for_ssa_var (t, results, address_p);
3758 return;
3760 case tcc_constant:
3762 /* We cannot refer to automatic variables through constants. */
3763 temp.type = ADDRESSOF;
3764 temp.var = nonlocal_id;
3765 temp.offset = 0;
3766 results->safe_push (temp);
3767 return;
3769 default:;
3772 /* The default fallback is a constraint from anything. */
3773 temp.type = ADDRESSOF;
3774 temp.var = anything_id;
3775 temp.offset = 0;
3776 results->safe_push (temp);
3779 /* Given a gimple tree T, return the constraint expression vector for it. */
3781 static void
3782 get_constraint_for (tree t, vec<ce_s> *results)
3784 gcc_assert (results->length () == 0);
3786 get_constraint_for_1 (t, results, false, true);
3789 /* Given a gimple tree T, return the constraint expression vector for it
3790 to be used as the rhs of a constraint. */
3792 static void
3793 get_constraint_for_rhs (tree t, vec<ce_s> *results)
3795 gcc_assert (results->length () == 0);
3797 get_constraint_for_1 (t, results, false, false);
3801 /* Efficiently generates constraints from all entries in *RHSC to all
3802 entries in *LHSC. */
3804 static void
3805 process_all_all_constraints (const vec<ce_s> &lhsc,
3806 const vec<ce_s> &rhsc)
3808 struct constraint_expr *lhsp, *rhsp;
3809 unsigned i, j;
3811 if (lhsc.length () <= 1 || rhsc.length () <= 1)
3813 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3814 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
3815 process_constraint (new_constraint (*lhsp, *rhsp));
3817 else
3819 struct constraint_expr tmp;
3820 tmp = new_scalar_tmp_constraint_exp ("allalltmp", true);
3821 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
3822 process_constraint (new_constraint (tmp, *rhsp));
3823 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3824 process_constraint (new_constraint (*lhsp, tmp));
3828 /* Handle aggregate copies by expanding into copies of the respective
3829 fields of the structures. */
3831 static void
3832 do_structure_copy (tree lhsop, tree rhsop)
3834 struct constraint_expr *lhsp, *rhsp;
3835 auto_vec<ce_s> lhsc;
3836 auto_vec<ce_s> rhsc;
3837 unsigned j;
3839 get_constraint_for (lhsop, &lhsc);
3840 get_constraint_for_rhs (rhsop, &rhsc);
3841 lhsp = &lhsc[0];
3842 rhsp = &rhsc[0];
3843 if (lhsp->type == DEREF
3844 || (lhsp->type == ADDRESSOF && lhsp->var == anything_id)
3845 || rhsp->type == DEREF)
3847 if (lhsp->type == DEREF)
3849 gcc_assert (lhsc.length () == 1);
3850 lhsp->offset = UNKNOWN_OFFSET;
3852 if (rhsp->type == DEREF)
3854 gcc_assert (rhsc.length () == 1);
3855 rhsp->offset = UNKNOWN_OFFSET;
3857 process_all_all_constraints (lhsc, rhsc);
3859 else if (lhsp->type == SCALAR
3860 && (rhsp->type == SCALAR
3861 || rhsp->type == ADDRESSOF))
3863 HOST_WIDE_INT lhssize, lhsoffset;
3864 HOST_WIDE_INT rhssize, rhsoffset;
3865 bool reverse;
3866 unsigned k = 0;
3867 if (!get_ref_base_and_extent_hwi (lhsop, &lhsoffset, &lhssize, &reverse)
3868 || !get_ref_base_and_extent_hwi (rhsop, &rhsoffset, &rhssize,
3869 &reverse))
3871 process_all_all_constraints (lhsc, rhsc);
3872 return;
3874 for (j = 0; lhsc.iterate (j, &lhsp);)
3876 varinfo_t lhsv, rhsv;
3877 rhsp = &rhsc[k];
3878 lhsv = get_varinfo (lhsp->var);
3879 rhsv = get_varinfo (rhsp->var);
3880 if (lhsv->may_have_pointers
3881 && (lhsv->is_full_var
3882 || rhsv->is_full_var
3883 || ranges_overlap_p (lhsv->offset + rhsoffset, lhsv->size,
3884 rhsv->offset + lhsoffset, rhsv->size)))
3885 process_constraint (new_constraint (*lhsp, *rhsp));
3886 if (!rhsv->is_full_var
3887 && (lhsv->is_full_var
3888 || (lhsv->offset + rhsoffset + lhsv->size
3889 > rhsv->offset + lhsoffset + rhsv->size)))
3891 ++k;
3892 if (k >= rhsc.length ())
3893 break;
3895 else
3896 ++j;
3899 else
3900 gcc_unreachable ();
3903 /* Create constraints ID = { rhsc }. */
3905 static void
3906 make_constraints_to (unsigned id, const vec<ce_s> &rhsc)
3908 struct constraint_expr *c;
3909 struct constraint_expr includes;
3910 unsigned int j;
3912 includes.var = id;
3913 includes.offset = 0;
3914 includes.type = SCALAR;
3916 FOR_EACH_VEC_ELT (rhsc, j, c)
3917 process_constraint (new_constraint (includes, *c));
3920 /* Create a constraint ID = OP. */
3922 static void
3923 make_constraint_to (unsigned id, tree op)
3925 auto_vec<ce_s> rhsc;
3926 get_constraint_for_rhs (op, &rhsc);
3927 make_constraints_to (id, rhsc);
3930 /* Create a constraint ID = &FROM. */
3932 static void
3933 make_constraint_from (varinfo_t vi, int from)
3935 struct constraint_expr lhs, rhs;
3937 lhs.var = vi->id;
3938 lhs.offset = 0;
3939 lhs.type = SCALAR;
3941 rhs.var = from;
3942 rhs.offset = 0;
3943 rhs.type = ADDRESSOF;
3944 process_constraint (new_constraint (lhs, rhs));
3947 /* Create a constraint ID = FROM. */
3949 static void
3950 make_copy_constraint (varinfo_t vi, int from)
3952 struct constraint_expr lhs, rhs;
3954 lhs.var = vi->id;
3955 lhs.offset = 0;
3956 lhs.type = SCALAR;
3958 rhs.var = from;
3959 rhs.offset = 0;
3960 rhs.type = SCALAR;
3961 process_constraint (new_constraint (lhs, rhs));
3964 /* Make constraints necessary to make OP escape. */
3966 static void
3967 make_escape_constraint (tree op)
3969 make_constraint_to (escaped_id, op);
3972 /* Make constraint necessary to make all indirect references
3973 from VI escape. */
3975 static void
3976 make_indirect_escape_constraint (varinfo_t vi)
3978 struct constraint_expr lhs, rhs;
3979 /* escaped = *(VAR + UNKNOWN); */
3980 lhs.type = SCALAR;
3981 lhs.var = escaped_id;
3982 lhs.offset = 0;
3983 rhs.type = DEREF;
3984 rhs.var = vi->id;
3985 rhs.offset = UNKNOWN_OFFSET;
3986 process_constraint (new_constraint (lhs, rhs));
3989 /* Add constraints to that the solution of VI is transitively closed. */
3991 static void
3992 make_transitive_closure_constraints (varinfo_t vi)
3994 struct constraint_expr lhs, rhs;
3996 /* VAR = *(VAR + UNKNOWN); */
3997 lhs.type = SCALAR;
3998 lhs.var = vi->id;
3999 lhs.offset = 0;
4000 rhs.type = DEREF;
4001 rhs.var = vi->id;
4002 rhs.offset = UNKNOWN_OFFSET;
4003 process_constraint (new_constraint (lhs, rhs));
4006 /* Add constraints to that the solution of VI has all subvariables added. */
4008 static void
4009 make_any_offset_constraints (varinfo_t vi)
4011 struct constraint_expr lhs, rhs;
4013 /* VAR = VAR + UNKNOWN; */
4014 lhs.type = SCALAR;
4015 lhs.var = vi->id;
4016 lhs.offset = 0;
4017 rhs.type = SCALAR;
4018 rhs.var = vi->id;
4019 rhs.offset = UNKNOWN_OFFSET;
4020 process_constraint (new_constraint (lhs, rhs));
4023 /* Temporary storage for fake var decls. */
4024 struct obstack fake_var_decl_obstack;
4026 /* Build a fake VAR_DECL acting as referrer to a DECL_UID. */
4028 static tree
4029 build_fake_var_decl (tree type)
4031 tree decl = (tree) XOBNEW (&fake_var_decl_obstack, struct tree_var_decl);
4032 memset (decl, 0, sizeof (struct tree_var_decl));
4033 TREE_SET_CODE (decl, VAR_DECL);
4034 TREE_TYPE (decl) = type;
4035 DECL_UID (decl) = allocate_decl_uid ();
4036 SET_DECL_PT_UID (decl, -1);
4037 layout_decl (decl, 0);
4038 return decl;
4041 /* Create a new artificial heap variable with NAME.
4042 Return the created variable. */
4044 static varinfo_t
4045 make_heapvar (const char *name, bool add_id)
4047 varinfo_t vi;
4048 tree heapvar;
4050 heapvar = build_fake_var_decl (ptr_type_node);
4051 DECL_EXTERNAL (heapvar) = 1;
4053 vi = new_var_info (heapvar, name, add_id);
4054 vi->is_heap_var = true;
4055 vi->is_unknown_size_var = true;
4056 vi->offset = 0;
4057 vi->fullsize = ~0;
4058 vi->size = ~0;
4059 vi->is_full_var = true;
4060 insert_vi_for_tree (heapvar, vi);
4062 return vi;
4065 /* Create a new artificial heap variable with NAME and make a
4066 constraint from it to LHS. Set flags according to a tag used
4067 for tracking restrict pointers. */
4069 static varinfo_t
4070 make_constraint_from_restrict (varinfo_t lhs, const char *name, bool add_id)
4072 varinfo_t vi = make_heapvar (name, add_id);
4073 vi->is_restrict_var = 1;
4074 vi->is_global_var = 1;
4075 vi->may_have_pointers = 1;
4076 make_constraint_from (lhs, vi->id);
4077 return vi;
4080 /* Create a new artificial heap variable with NAME and make a
4081 constraint from it to LHS. Set flags according to a tag used
4082 for tracking restrict pointers and make the artificial heap
4083 point to global memory. */
4085 static varinfo_t
4086 make_constraint_from_global_restrict (varinfo_t lhs, const char *name,
4087 bool add_id)
4089 varinfo_t vi = make_constraint_from_restrict (lhs, name, add_id);
4090 make_copy_constraint (vi, nonlocal_id);
4091 return vi;
4094 /* In IPA mode there are varinfos for different aspects of reach
4095 function designator. One for the points-to set of the return
4096 value, one for the variables that are clobbered by the function,
4097 one for its uses and one for each parameter (including a single
4098 glob for remaining variadic arguments). */
4100 enum { fi_clobbers = 1, fi_uses = 2,
4101 fi_static_chain = 3, fi_result = 4, fi_parm_base = 5 };
4103 /* Get a constraint for the requested part of a function designator FI
4104 when operating in IPA mode. */
4106 static struct constraint_expr
4107 get_function_part_constraint (varinfo_t fi, unsigned part)
4109 struct constraint_expr c;
4111 gcc_assert (in_ipa_mode);
4113 if (fi->id == anything_id)
4115 /* ??? We probably should have a ANYFN special variable. */
4116 c.var = anything_id;
4117 c.offset = 0;
4118 c.type = SCALAR;
4120 else if (fi->decl && TREE_CODE (fi->decl) == FUNCTION_DECL)
4122 varinfo_t ai = first_vi_for_offset (fi, part);
4123 if (ai)
4124 c.var = ai->id;
4125 else
4126 c.var = anything_id;
4127 c.offset = 0;
4128 c.type = SCALAR;
4130 else
4132 c.var = fi->id;
4133 c.offset = part;
4134 c.type = DEREF;
4137 return c;
4140 /* Produce constraints for argument ARG of call STMT with eaf flags
4141 FLAGS. RESULTS is array holding constraints for return value.
4142 CALLESCAPE_ID is variable where call loocal escapes are added.
4143 WRITES_GLOVEL_MEMORY is true if callee may write global memory. */
4145 static void
4146 handle_call_arg (gcall *stmt, tree arg, vec<ce_s> *results, int flags,
4147 int callescape_id, bool writes_global_memory)
4149 int relevant_indirect_flags = EAF_NO_INDIRECT_CLOBBER | EAF_NO_INDIRECT_READ
4150 | EAF_NO_INDIRECT_ESCAPE;
4151 int relevant_flags = relevant_indirect_flags
4152 | EAF_NO_DIRECT_CLOBBER
4153 | EAF_NO_DIRECT_READ
4154 | EAF_NO_DIRECT_ESCAPE;
4155 if (gimple_call_lhs (stmt))
4157 relevant_flags |= EAF_NOT_RETURNED_DIRECTLY | EAF_NOT_RETURNED_INDIRECTLY;
4158 relevant_indirect_flags |= EAF_NOT_RETURNED_INDIRECTLY;
4160 /* If value is never read from it can not be returned indirectly
4161 (except through the escape solution).
4162 For all flags we get these implications right except for
4163 not_returned because we miss return functions in ipa-prop. */
4165 if (flags & EAF_NO_DIRECT_READ)
4166 flags |= EAF_NOT_RETURNED_INDIRECTLY;
4169 /* If the argument is not used we can ignore it.
4170 Similarly argument is invisile for us if it not clobbered, does not
4171 escape, is not read and can not be returned. */
4172 if ((flags & EAF_UNUSED) || ((flags & relevant_flags) == relevant_flags))
4173 return;
4175 /* Produce varinfo for direct accesses to ARG. */
4176 varinfo_t tem = new_var_info (NULL_TREE, "callarg", true);
4177 tem->is_reg_var = true;
4178 make_constraint_to (tem->id, arg);
4179 make_any_offset_constraints (tem);
4181 bool callarg_transitive = false;
4183 /* As an compile time optimization if we make no difference between
4184 direct and indirect accesses make arg transitively closed.
4185 This avoids the need to build indir arg and do everything twice. */
4186 if (((flags & EAF_NO_INDIRECT_CLOBBER) != 0)
4187 == ((flags & EAF_NO_DIRECT_CLOBBER) != 0)
4188 && (((flags & EAF_NO_INDIRECT_READ) != 0)
4189 == ((flags & EAF_NO_DIRECT_READ) != 0))
4190 && (((flags & EAF_NO_INDIRECT_ESCAPE) != 0)
4191 == ((flags & EAF_NO_DIRECT_ESCAPE) != 0))
4192 && (((flags & EAF_NOT_RETURNED_INDIRECTLY) != 0)
4193 == ((flags & EAF_NOT_RETURNED_DIRECTLY) != 0)))
4195 make_transitive_closure_constraints (tem);
4196 callarg_transitive = true;
4197 gcc_checking_assert (!(flags & EAF_NO_DIRECT_READ));
4200 /* If necessary, produce varinfo for indirect accesses to ARG. */
4201 varinfo_t indir_tem = NULL;
4202 if (!callarg_transitive
4203 && (flags & relevant_indirect_flags) != relevant_indirect_flags)
4205 struct constraint_expr lhs, rhs;
4206 indir_tem = new_var_info (NULL_TREE, "indircallarg", true);
4207 indir_tem->is_reg_var = true;
4209 /* indir_term = *tem. */
4210 lhs.type = SCALAR;
4211 lhs.var = indir_tem->id;
4212 lhs.offset = 0;
4214 rhs.type = DEREF;
4215 rhs.var = tem->id;
4216 rhs.offset = UNKNOWN_OFFSET;
4217 process_constraint (new_constraint (lhs, rhs));
4219 make_any_offset_constraints (indir_tem);
4221 /* If we do not read indirectly there is no need for transitive closure.
4222 We know there is only one level of indirection. */
4223 if (!(flags & EAF_NO_INDIRECT_READ))
4224 make_transitive_closure_constraints (indir_tem);
4225 gcc_checking_assert (!(flags & EAF_NO_DIRECT_READ));
4228 if (gimple_call_lhs (stmt))
4230 if (!(flags & EAF_NOT_RETURNED_DIRECTLY))
4232 struct constraint_expr cexpr;
4233 cexpr.var = tem->id;
4234 cexpr.type = SCALAR;
4235 cexpr.offset = 0;
4236 results->safe_push (cexpr);
4238 if (!callarg_transitive & !(flags & EAF_NOT_RETURNED_INDIRECTLY))
4240 struct constraint_expr cexpr;
4241 cexpr.var = indir_tem->id;
4242 cexpr.type = SCALAR;
4243 cexpr.offset = 0;
4244 results->safe_push (cexpr);
4248 if (!(flags & EAF_NO_DIRECT_READ))
4250 varinfo_t uses = get_call_use_vi (stmt);
4251 make_copy_constraint (uses, tem->id);
4252 if (!callarg_transitive & !(flags & EAF_NO_INDIRECT_READ))
4253 make_copy_constraint (uses, indir_tem->id);
4255 else
4256 /* To read indirectly we need to read directly. */
4257 gcc_checking_assert (flags & EAF_NO_INDIRECT_READ);
4259 if (!(flags & EAF_NO_DIRECT_CLOBBER))
4261 struct constraint_expr lhs, rhs;
4263 /* *arg = callescape. */
4264 lhs.type = DEREF;
4265 lhs.var = tem->id;
4266 lhs.offset = 0;
4268 rhs.type = SCALAR;
4269 rhs.var = callescape_id;
4270 rhs.offset = 0;
4271 process_constraint (new_constraint (lhs, rhs));
4273 /* callclobbered = arg. */
4274 make_copy_constraint (get_call_clobber_vi (stmt), tem->id);
4276 if (!callarg_transitive & !(flags & EAF_NO_INDIRECT_CLOBBER))
4278 struct constraint_expr lhs, rhs;
4280 /* *indir_arg = callescape. */
4281 lhs.type = DEREF;
4282 lhs.var = indir_tem->id;
4283 lhs.offset = 0;
4285 rhs.type = SCALAR;
4286 rhs.var = callescape_id;
4287 rhs.offset = 0;
4288 process_constraint (new_constraint (lhs, rhs));
4290 /* callclobbered = indir_arg. */
4291 make_copy_constraint (get_call_clobber_vi (stmt), indir_tem->id);
4294 if (!(flags & (EAF_NO_DIRECT_ESCAPE | EAF_NO_INDIRECT_ESCAPE)))
4296 struct constraint_expr lhs, rhs;
4298 /* callescape = arg; */
4299 lhs.var = callescape_id;
4300 lhs.offset = 0;
4301 lhs.type = SCALAR;
4303 rhs.var = tem->id;
4304 rhs.offset = 0;
4305 rhs.type = SCALAR;
4306 process_constraint (new_constraint (lhs, rhs));
4308 if (writes_global_memory)
4309 make_escape_constraint (arg);
4311 else if (!callarg_transitive & !(flags & EAF_NO_INDIRECT_ESCAPE))
4313 struct constraint_expr lhs, rhs;
4315 /* callescape = *(indir_arg + UNKNOWN); */
4316 lhs.var = callescape_id;
4317 lhs.offset = 0;
4318 lhs.type = SCALAR;
4320 rhs.var = indir_tem->id;
4321 rhs.offset = 0;
4322 rhs.type = SCALAR;
4323 process_constraint (new_constraint (lhs, rhs));
4325 if (writes_global_memory)
4326 make_indirect_escape_constraint (tem);
4330 /* Determine global memory access of call STMT and update
4331 WRITES_GLOBAL_MEMORY, READS_GLOBAL_MEMORY and USES_GLOBAL_MEMORY. */
4333 static void
4334 determine_global_memory_access (gcall *stmt,
4335 bool *writes_global_memory,
4336 bool *reads_global_memory,
4337 bool *uses_global_memory)
4339 tree callee;
4340 cgraph_node *node;
4341 modref_summary *summary;
4343 /* We need to detrmine reads to set uses. */
4344 gcc_assert (!uses_global_memory || reads_global_memory);
4346 if ((callee = gimple_call_fndecl (stmt)) != NULL_TREE
4347 && (node = cgraph_node::get (callee)) != NULL
4348 && (summary = get_modref_function_summary (node)))
4350 if (writes_global_memory && *writes_global_memory)
4351 *writes_global_memory = summary->global_memory_written;
4352 if (reads_global_memory && *reads_global_memory)
4353 *reads_global_memory = summary->global_memory_read;
4354 if (reads_global_memory && uses_global_memory
4355 && !summary->calls_interposable
4356 && !*reads_global_memory && node->binds_to_current_def_p ())
4357 *uses_global_memory = false;
4359 if ((writes_global_memory && *writes_global_memory)
4360 || (uses_global_memory && *uses_global_memory)
4361 || (reads_global_memory && *reads_global_memory))
4363 attr_fnspec fnspec = gimple_call_fnspec (stmt);
4364 if (fnspec.known_p ())
4366 if (writes_global_memory
4367 && !fnspec.global_memory_written_p ())
4368 *writes_global_memory = false;
4369 if (reads_global_memory && !fnspec.global_memory_read_p ())
4371 *reads_global_memory = false;
4372 if (uses_global_memory)
4373 *uses_global_memory = false;
4379 /* For non-IPA mode, generate constraints necessary for a call on the
4380 RHS and collect return value constraint to RESULTS to be used later in
4381 handle_lhs_call.
4383 IMPLICIT_EAF_FLAGS are added to each function argument. If
4384 WRITES_GLOBAL_MEMORY is true function is assumed to possibly write to global
4385 memory. Similar for READS_GLOBAL_MEMORY. */
4387 static void
4388 handle_rhs_call (gcall *stmt, vec<ce_s> *results,
4389 int implicit_eaf_flags,
4390 bool writes_global_memory,
4391 bool reads_global_memory)
4393 determine_global_memory_access (stmt, &writes_global_memory,
4394 &reads_global_memory,
4395 NULL);
4397 varinfo_t callescape = new_var_info (NULL_TREE, "callescape", true);
4399 /* If function can use global memory, add it to callescape
4400 and to possible return values. If not we can still use/return addresses
4401 of global symbols. */
4402 struct constraint_expr lhs, rhs;
4404 lhs.type = SCALAR;
4405 lhs.var = callescape->id;
4406 lhs.offset = 0;
4408 rhs.type = reads_global_memory ? SCALAR : ADDRESSOF;
4409 rhs.var = nonlocal_id;
4410 rhs.offset = 0;
4412 process_constraint (new_constraint (lhs, rhs));
4413 results->safe_push (rhs);
4415 varinfo_t uses = get_call_use_vi (stmt);
4416 make_copy_constraint (uses, callescape->id);
4418 for (unsigned i = 0; i < gimple_call_num_args (stmt); ++i)
4420 tree arg = gimple_call_arg (stmt, i);
4421 int flags = gimple_call_arg_flags (stmt, i);
4422 handle_call_arg (stmt, arg, results,
4423 flags | implicit_eaf_flags,
4424 callescape->id, writes_global_memory);
4427 /* The static chain escapes as well. */
4428 if (gimple_call_chain (stmt))
4429 handle_call_arg (stmt, gimple_call_chain (stmt), results,
4430 implicit_eaf_flags
4431 | gimple_call_static_chain_flags (stmt),
4432 callescape->id, writes_global_memory);
4434 /* And if we applied NRV the address of the return slot escapes as well. */
4435 if (gimple_call_return_slot_opt_p (stmt)
4436 && gimple_call_lhs (stmt) != NULL_TREE
4437 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
4439 int flags = gimple_call_retslot_flags (stmt);
4440 const int relevant_flags = EAF_NO_DIRECT_ESCAPE
4441 | EAF_NOT_RETURNED_DIRECTLY;
4443 if (!(flags & EAF_UNUSED) && (flags & relevant_flags) != relevant_flags)
4445 auto_vec<ce_s> tmpc;
4447 get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
4449 if (!(flags & EAF_NO_DIRECT_ESCAPE))
4451 make_constraints_to (callescape->id, tmpc);
4452 if (writes_global_memory)
4453 make_constraints_to (escaped_id, tmpc);
4455 if (!(flags & EAF_NOT_RETURNED_DIRECTLY))
4457 struct constraint_expr *c;
4458 unsigned i;
4459 FOR_EACH_VEC_ELT (tmpc, i, c)
4460 results->safe_push (*c);
4466 /* For non-IPA mode, generate constraints necessary for a call
4467 that returns a pointer and assigns it to LHS. This simply makes
4468 the LHS point to global and escaped variables. */
4470 static void
4471 handle_lhs_call (gcall *stmt, tree lhs, int flags, vec<ce_s> &rhsc,
4472 tree fndecl)
4474 auto_vec<ce_s> lhsc;
4476 get_constraint_for (lhs, &lhsc);
4477 /* If the store is to a global decl make sure to
4478 add proper escape constraints. */
4479 lhs = get_base_address (lhs);
4480 if (lhs
4481 && DECL_P (lhs)
4482 && is_global_var (lhs))
4484 struct constraint_expr tmpc;
4485 tmpc.var = escaped_id;
4486 tmpc.offset = 0;
4487 tmpc.type = SCALAR;
4488 lhsc.safe_push (tmpc);
4491 /* If the call returns an argument unmodified override the rhs
4492 constraints. */
4493 if (flags & ERF_RETURNS_ARG
4494 && (flags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (stmt))
4496 tree arg;
4497 rhsc.truncate (0);
4498 arg = gimple_call_arg (stmt, flags & ERF_RETURN_ARG_MASK);
4499 get_constraint_for (arg, &rhsc);
4500 process_all_all_constraints (lhsc, rhsc);
4501 rhsc.truncate (0);
4503 else if (flags & ERF_NOALIAS)
4505 varinfo_t vi;
4506 struct constraint_expr tmpc;
4507 rhsc.truncate (0);
4508 vi = make_heapvar ("HEAP", true);
4509 /* We are marking allocated storage local, we deal with it becoming
4510 global by escaping and setting of vars_contains_escaped_heap. */
4511 DECL_EXTERNAL (vi->decl) = 0;
4512 vi->is_global_var = 0;
4513 /* If this is not a real malloc call assume the memory was
4514 initialized and thus may point to global memory. All
4515 builtin functions with the malloc attribute behave in a sane way. */
4516 if (!fndecl
4517 || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
4518 make_constraint_from (vi, nonlocal_id);
4519 tmpc.var = vi->id;
4520 tmpc.offset = 0;
4521 tmpc.type = ADDRESSOF;
4522 rhsc.safe_push (tmpc);
4523 process_all_all_constraints (lhsc, rhsc);
4524 rhsc.truncate (0);
4526 else
4527 process_all_all_constraints (lhsc, rhsc);
4531 /* Return the varinfo for the callee of CALL. */
4533 static varinfo_t
4534 get_fi_for_callee (gcall *call)
4536 tree decl, fn = gimple_call_fn (call);
4538 if (fn && TREE_CODE (fn) == OBJ_TYPE_REF)
4539 fn = OBJ_TYPE_REF_EXPR (fn);
4541 /* If we can directly resolve the function being called, do so.
4542 Otherwise, it must be some sort of indirect expression that
4543 we should still be able to handle. */
4544 decl = gimple_call_addr_fndecl (fn);
4545 if (decl)
4546 return get_vi_for_tree (decl);
4548 /* If the function is anything other than a SSA name pointer we have no
4549 clue and should be getting ANYFN (well, ANYTHING for now). */
4550 if (!fn || TREE_CODE (fn) != SSA_NAME)
4551 return get_varinfo (anything_id);
4553 if (SSA_NAME_IS_DEFAULT_DEF (fn)
4554 && (TREE_CODE (SSA_NAME_VAR (fn)) == PARM_DECL
4555 || TREE_CODE (SSA_NAME_VAR (fn)) == RESULT_DECL))
4556 fn = SSA_NAME_VAR (fn);
4558 return get_vi_for_tree (fn);
4561 /* Create constraints for assigning call argument ARG to the incoming parameter
4562 INDEX of function FI. */
4564 static void
4565 find_func_aliases_for_call_arg (varinfo_t fi, unsigned index, tree arg)
4567 struct constraint_expr lhs;
4568 lhs = get_function_part_constraint (fi, fi_parm_base + index);
4570 auto_vec<ce_s, 2> rhsc;
4571 get_constraint_for_rhs (arg, &rhsc);
4573 unsigned j;
4574 struct constraint_expr *rhsp;
4575 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4576 process_constraint (new_constraint (lhs, *rhsp));
4579 /* Return true if FNDECL may be part of another lto partition. */
4581 static bool
4582 fndecl_maybe_in_other_partition (tree fndecl)
4584 cgraph_node *fn_node = cgraph_node::get (fndecl);
4585 if (fn_node == NULL)
4586 return true;
4588 return fn_node->in_other_partition;
4591 /* Create constraints for the builtin call T. Return true if the call
4592 was handled, otherwise false. */
4594 static bool
4595 find_func_aliases_for_builtin_call (struct function *fn, gcall *t)
4597 tree fndecl = gimple_call_fndecl (t);
4598 auto_vec<ce_s, 2> lhsc;
4599 auto_vec<ce_s, 4> rhsc;
4600 varinfo_t fi;
4602 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
4603 /* ??? All builtins that are handled here need to be handled
4604 in the alias-oracle query functions explicitly! */
4605 switch (DECL_FUNCTION_CODE (fndecl))
4607 /* All the following functions return a pointer to the same object
4608 as their first argument points to. The functions do not add
4609 to the ESCAPED solution. The functions make the first argument
4610 pointed to memory point to what the second argument pointed to
4611 memory points to. */
4612 case BUILT_IN_STRCPY:
4613 case BUILT_IN_STRNCPY:
4614 case BUILT_IN_BCOPY:
4615 case BUILT_IN_MEMCPY:
4616 case BUILT_IN_MEMMOVE:
4617 case BUILT_IN_MEMPCPY:
4618 case BUILT_IN_STPCPY:
4619 case BUILT_IN_STPNCPY:
4620 case BUILT_IN_STRCAT:
4621 case BUILT_IN_STRNCAT:
4622 case BUILT_IN_STRCPY_CHK:
4623 case BUILT_IN_STRNCPY_CHK:
4624 case BUILT_IN_MEMCPY_CHK:
4625 case BUILT_IN_MEMMOVE_CHK:
4626 case BUILT_IN_MEMPCPY_CHK:
4627 case BUILT_IN_STPCPY_CHK:
4628 case BUILT_IN_STPNCPY_CHK:
4629 case BUILT_IN_STRCAT_CHK:
4630 case BUILT_IN_STRNCAT_CHK:
4631 case BUILT_IN_TM_MEMCPY:
4632 case BUILT_IN_TM_MEMMOVE:
4634 tree res = gimple_call_lhs (t);
4635 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4636 == BUILT_IN_BCOPY ? 1 : 0));
4637 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4638 == BUILT_IN_BCOPY ? 0 : 1));
4639 if (res != NULL_TREE)
4641 get_constraint_for (res, &lhsc);
4642 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY
4643 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY
4644 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY
4645 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY_CHK
4646 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY_CHK
4647 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY_CHK)
4648 get_constraint_for_ptr_offset (dest, NULL_TREE, &rhsc);
4649 else
4650 get_constraint_for (dest, &rhsc);
4651 process_all_all_constraints (lhsc, rhsc);
4652 lhsc.truncate (0);
4653 rhsc.truncate (0);
4655 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4656 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4657 do_deref (&lhsc);
4658 do_deref (&rhsc);
4659 process_all_all_constraints (lhsc, rhsc);
4660 return true;
4662 case BUILT_IN_MEMSET:
4663 case BUILT_IN_MEMSET_CHK:
4664 case BUILT_IN_TM_MEMSET:
4666 tree res = gimple_call_lhs (t);
4667 tree dest = gimple_call_arg (t, 0);
4668 unsigned i;
4669 ce_s *lhsp;
4670 struct constraint_expr ac;
4671 if (res != NULL_TREE)
4673 get_constraint_for (res, &lhsc);
4674 get_constraint_for (dest, &rhsc);
4675 process_all_all_constraints (lhsc, rhsc);
4676 lhsc.truncate (0);
4678 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4679 do_deref (&lhsc);
4680 if (flag_delete_null_pointer_checks
4681 && integer_zerop (gimple_call_arg (t, 1)))
4683 ac.type = ADDRESSOF;
4684 ac.var = nothing_id;
4686 else
4688 ac.type = SCALAR;
4689 ac.var = integer_id;
4691 ac.offset = 0;
4692 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4693 process_constraint (new_constraint (*lhsp, ac));
4694 return true;
4696 case BUILT_IN_STACK_SAVE:
4697 case BUILT_IN_STACK_RESTORE:
4698 /* Nothing interesting happens. */
4699 return true;
4700 case BUILT_IN_ALLOCA:
4701 case BUILT_IN_ALLOCA_WITH_ALIGN:
4702 case BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX:
4704 tree ptr = gimple_call_lhs (t);
4705 if (ptr == NULL_TREE)
4706 return true;
4707 get_constraint_for (ptr, &lhsc);
4708 varinfo_t vi = make_heapvar ("HEAP", true);
4709 /* Alloca storage is never global. To exempt it from escaped
4710 handling make it a non-heap var. */
4711 DECL_EXTERNAL (vi->decl) = 0;
4712 vi->is_global_var = 0;
4713 vi->is_heap_var = 0;
4714 struct constraint_expr tmpc;
4715 tmpc.var = vi->id;
4716 tmpc.offset = 0;
4717 tmpc.type = ADDRESSOF;
4718 rhsc.safe_push (tmpc);
4719 process_all_all_constraints (lhsc, rhsc);
4720 return true;
4722 case BUILT_IN_POSIX_MEMALIGN:
4724 tree ptrptr = gimple_call_arg (t, 0);
4725 get_constraint_for (ptrptr, &lhsc);
4726 do_deref (&lhsc);
4727 varinfo_t vi = make_heapvar ("HEAP", true);
4728 /* We are marking allocated storage local, we deal with it becoming
4729 global by escaping and setting of vars_contains_escaped_heap. */
4730 DECL_EXTERNAL (vi->decl) = 0;
4731 vi->is_global_var = 0;
4732 struct constraint_expr tmpc;
4733 tmpc.var = vi->id;
4734 tmpc.offset = 0;
4735 tmpc.type = ADDRESSOF;
4736 rhsc.safe_push (tmpc);
4737 process_all_all_constraints (lhsc, rhsc);
4738 return true;
4740 case BUILT_IN_ASSUME_ALIGNED:
4742 tree res = gimple_call_lhs (t);
4743 tree dest = gimple_call_arg (t, 0);
4744 if (res != NULL_TREE)
4746 get_constraint_for (res, &lhsc);
4747 get_constraint_for (dest, &rhsc);
4748 process_all_all_constraints (lhsc, rhsc);
4750 return true;
4752 /* All the following functions do not return pointers, do not
4753 modify the points-to sets of memory reachable from their
4754 arguments and do not add to the ESCAPED solution. */
4755 case BUILT_IN_SINCOS:
4756 case BUILT_IN_SINCOSF:
4757 case BUILT_IN_SINCOSL:
4758 case BUILT_IN_FREXP:
4759 case BUILT_IN_FREXPF:
4760 case BUILT_IN_FREXPL:
4761 case BUILT_IN_GAMMA_R:
4762 case BUILT_IN_GAMMAF_R:
4763 case BUILT_IN_GAMMAL_R:
4764 case BUILT_IN_LGAMMA_R:
4765 case BUILT_IN_LGAMMAF_R:
4766 case BUILT_IN_LGAMMAL_R:
4767 case BUILT_IN_MODF:
4768 case BUILT_IN_MODFF:
4769 case BUILT_IN_MODFL:
4770 case BUILT_IN_REMQUO:
4771 case BUILT_IN_REMQUOF:
4772 case BUILT_IN_REMQUOL:
4773 case BUILT_IN_FREE:
4774 return true;
4775 case BUILT_IN_STRDUP:
4776 case BUILT_IN_STRNDUP:
4777 case BUILT_IN_REALLOC:
4778 if (gimple_call_lhs (t))
4780 auto_vec<ce_s> rhsc;
4781 handle_lhs_call (t, gimple_call_lhs (t),
4782 gimple_call_return_flags (t) | ERF_NOALIAS,
4783 rhsc, fndecl);
4784 get_constraint_for_ptr_offset (gimple_call_lhs (t),
4785 NULL_TREE, &lhsc);
4786 get_constraint_for_ptr_offset (gimple_call_arg (t, 0),
4787 NULL_TREE, &rhsc);
4788 do_deref (&lhsc);
4789 do_deref (&rhsc);
4790 process_all_all_constraints (lhsc, rhsc);
4791 lhsc.truncate (0);
4792 rhsc.truncate (0);
4793 /* For realloc the resulting pointer can be equal to the
4794 argument as well. But only doing this wouldn't be
4795 correct because with ptr == 0 realloc behaves like malloc. */
4796 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_REALLOC)
4798 get_constraint_for (gimple_call_lhs (t), &lhsc);
4799 get_constraint_for (gimple_call_arg (t, 0), &rhsc);
4800 process_all_all_constraints (lhsc, rhsc);
4802 return true;
4804 break;
4805 /* String / character search functions return a pointer into the
4806 source string or NULL. */
4807 case BUILT_IN_INDEX:
4808 case BUILT_IN_STRCHR:
4809 case BUILT_IN_STRRCHR:
4810 case BUILT_IN_MEMCHR:
4811 case BUILT_IN_STRSTR:
4812 case BUILT_IN_STRPBRK:
4813 if (gimple_call_lhs (t))
4815 tree src = gimple_call_arg (t, 0);
4816 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4817 constraint_expr nul;
4818 nul.var = nothing_id;
4819 nul.offset = 0;
4820 nul.type = ADDRESSOF;
4821 rhsc.safe_push (nul);
4822 get_constraint_for (gimple_call_lhs (t), &lhsc);
4823 process_all_all_constraints (lhsc, rhsc);
4825 return true;
4826 /* Pure functions that return something not based on any object and
4827 that use the memory pointed to by their arguments (but not
4828 transitively). */
4829 case BUILT_IN_STRCMP:
4830 case BUILT_IN_STRCMP_EQ:
4831 case BUILT_IN_STRNCMP:
4832 case BUILT_IN_STRNCMP_EQ:
4833 case BUILT_IN_STRCASECMP:
4834 case BUILT_IN_STRNCASECMP:
4835 case BUILT_IN_MEMCMP:
4836 case BUILT_IN_BCMP:
4837 case BUILT_IN_STRSPN:
4838 case BUILT_IN_STRCSPN:
4840 varinfo_t uses = get_call_use_vi (t);
4841 make_any_offset_constraints (uses);
4842 make_constraint_to (uses->id, gimple_call_arg (t, 0));
4843 make_constraint_to (uses->id, gimple_call_arg (t, 1));
4844 /* No constraints are necessary for the return value. */
4845 return true;
4847 case BUILT_IN_STRLEN:
4849 varinfo_t uses = get_call_use_vi (t);
4850 make_any_offset_constraints (uses);
4851 make_constraint_to (uses->id, gimple_call_arg (t, 0));
4852 /* No constraints are necessary for the return value. */
4853 return true;
4855 case BUILT_IN_OBJECT_SIZE:
4856 case BUILT_IN_CONSTANT_P:
4858 /* No constraints are necessary for the return value or the
4859 arguments. */
4860 return true;
4862 /* Trampolines are special - they set up passing the static
4863 frame. */
4864 case BUILT_IN_INIT_TRAMPOLINE:
4866 tree tramp = gimple_call_arg (t, 0);
4867 tree nfunc = gimple_call_arg (t, 1);
4868 tree frame = gimple_call_arg (t, 2);
4869 unsigned i;
4870 struct constraint_expr lhs, *rhsp;
4871 if (in_ipa_mode)
4873 varinfo_t nfi = NULL;
4874 gcc_assert (TREE_CODE (nfunc) == ADDR_EXPR);
4875 nfi = lookup_vi_for_tree (TREE_OPERAND (nfunc, 0));
4876 if (nfi)
4878 lhs = get_function_part_constraint (nfi, fi_static_chain);
4879 get_constraint_for (frame, &rhsc);
4880 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4881 process_constraint (new_constraint (lhs, *rhsp));
4882 rhsc.truncate (0);
4884 /* Make the frame point to the function for
4885 the trampoline adjustment call. */
4886 get_constraint_for (tramp, &lhsc);
4887 do_deref (&lhsc);
4888 get_constraint_for (nfunc, &rhsc);
4889 process_all_all_constraints (lhsc, rhsc);
4891 return true;
4894 /* Else fallthru to generic handling which will let
4895 the frame escape. */
4896 break;
4898 case BUILT_IN_ADJUST_TRAMPOLINE:
4900 tree tramp = gimple_call_arg (t, 0);
4901 tree res = gimple_call_lhs (t);
4902 if (in_ipa_mode && res)
4904 get_constraint_for (res, &lhsc);
4905 get_constraint_for (tramp, &rhsc);
4906 do_deref (&rhsc);
4907 process_all_all_constraints (lhsc, rhsc);
4909 return true;
4911 CASE_BUILT_IN_TM_STORE (1):
4912 CASE_BUILT_IN_TM_STORE (2):
4913 CASE_BUILT_IN_TM_STORE (4):
4914 CASE_BUILT_IN_TM_STORE (8):
4915 CASE_BUILT_IN_TM_STORE (FLOAT):
4916 CASE_BUILT_IN_TM_STORE (DOUBLE):
4917 CASE_BUILT_IN_TM_STORE (LDOUBLE):
4918 CASE_BUILT_IN_TM_STORE (M64):
4919 CASE_BUILT_IN_TM_STORE (M128):
4920 CASE_BUILT_IN_TM_STORE (M256):
4922 tree addr = gimple_call_arg (t, 0);
4923 tree src = gimple_call_arg (t, 1);
4925 get_constraint_for (addr, &lhsc);
4926 do_deref (&lhsc);
4927 get_constraint_for (src, &rhsc);
4928 process_all_all_constraints (lhsc, rhsc);
4929 return true;
4931 CASE_BUILT_IN_TM_LOAD (1):
4932 CASE_BUILT_IN_TM_LOAD (2):
4933 CASE_BUILT_IN_TM_LOAD (4):
4934 CASE_BUILT_IN_TM_LOAD (8):
4935 CASE_BUILT_IN_TM_LOAD (FLOAT):
4936 CASE_BUILT_IN_TM_LOAD (DOUBLE):
4937 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
4938 CASE_BUILT_IN_TM_LOAD (M64):
4939 CASE_BUILT_IN_TM_LOAD (M128):
4940 CASE_BUILT_IN_TM_LOAD (M256):
4942 tree dest = gimple_call_lhs (t);
4943 tree addr = gimple_call_arg (t, 0);
4945 get_constraint_for (dest, &lhsc);
4946 get_constraint_for (addr, &rhsc);
4947 do_deref (&rhsc);
4948 process_all_all_constraints (lhsc, rhsc);
4949 return true;
4951 /* Variadic argument handling needs to be handled in IPA
4952 mode as well. */
4953 case BUILT_IN_VA_START:
4955 tree valist = gimple_call_arg (t, 0);
4956 struct constraint_expr rhs, *lhsp;
4957 unsigned i;
4958 get_constraint_for_ptr_offset (valist, NULL_TREE, &lhsc);
4959 do_deref (&lhsc);
4960 /* The va_list gets access to pointers in variadic
4961 arguments. Which we know in the case of IPA analysis
4962 and otherwise are just all nonlocal variables. */
4963 if (in_ipa_mode)
4965 fi = lookup_vi_for_tree (fn->decl);
4966 rhs = get_function_part_constraint (fi, ~0);
4967 rhs.type = ADDRESSOF;
4969 else
4971 rhs.var = nonlocal_id;
4972 rhs.type = ADDRESSOF;
4973 rhs.offset = 0;
4975 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4976 process_constraint (new_constraint (*lhsp, rhs));
4977 /* va_list is clobbered. */
4978 make_constraint_to (get_call_clobber_vi (t)->id, valist);
4979 return true;
4981 /* va_end doesn't have any effect that matters. */
4982 case BUILT_IN_VA_END:
4983 return true;
4984 /* Alternate return. Simply give up for now. */
4985 case BUILT_IN_RETURN:
4987 fi = NULL;
4988 if (!in_ipa_mode
4989 || !(fi = get_vi_for_tree (fn->decl)))
4990 make_constraint_from (get_varinfo (escaped_id), anything_id);
4991 else if (in_ipa_mode
4992 && fi != NULL)
4994 struct constraint_expr lhs, rhs;
4995 lhs = get_function_part_constraint (fi, fi_result);
4996 rhs.var = anything_id;
4997 rhs.offset = 0;
4998 rhs.type = SCALAR;
4999 process_constraint (new_constraint (lhs, rhs));
5001 return true;
5003 case BUILT_IN_GOMP_PARALLEL:
5004 case BUILT_IN_GOACC_PARALLEL:
5006 if (in_ipa_mode)
5008 unsigned int fnpos, argpos;
5009 switch (DECL_FUNCTION_CODE (fndecl))
5011 case BUILT_IN_GOMP_PARALLEL:
5012 /* __builtin_GOMP_parallel (fn, data, num_threads, flags). */
5013 fnpos = 0;
5014 argpos = 1;
5015 break;
5016 case BUILT_IN_GOACC_PARALLEL:
5017 /* __builtin_GOACC_parallel (flags_m, fn, mapnum, hostaddrs,
5018 sizes, kinds, ...). */
5019 fnpos = 1;
5020 argpos = 3;
5021 break;
5022 default:
5023 gcc_unreachable ();
5026 tree fnarg = gimple_call_arg (t, fnpos);
5027 gcc_assert (TREE_CODE (fnarg) == ADDR_EXPR);
5028 tree fndecl = TREE_OPERAND (fnarg, 0);
5029 if (fndecl_maybe_in_other_partition (fndecl))
5030 /* Fallthru to general call handling. */
5031 break;
5033 tree arg = gimple_call_arg (t, argpos);
5035 varinfo_t fi = get_vi_for_tree (fndecl);
5036 find_func_aliases_for_call_arg (fi, 0, arg);
5037 return true;
5039 /* Else fallthru to generic call handling. */
5040 break;
5042 /* printf-style functions may have hooks to set pointers to
5043 point to somewhere into the generated string. Leave them
5044 for a later exercise... */
5045 default:
5046 /* Fallthru to general call handling. */;
5049 return false;
5052 /* Create constraints for the call T. */
5054 static void
5055 find_func_aliases_for_call (struct function *fn, gcall *t)
5057 tree fndecl = gimple_call_fndecl (t);
5058 varinfo_t fi;
5060 if (fndecl != NULL_TREE
5061 && fndecl_built_in_p (fndecl)
5062 && find_func_aliases_for_builtin_call (fn, t))
5063 return;
5065 if (gimple_call_internal_p (t, IFN_DEFERRED_INIT))
5066 return;
5068 fi = get_fi_for_callee (t);
5069 if (!in_ipa_mode
5070 || (fi->decl && fndecl && !fi->is_fn_info))
5072 auto_vec<ce_s, 16> rhsc;
5073 int flags = gimple_call_flags (t);
5075 /* Const functions can return their arguments and addresses
5076 of global memory but not of escaped memory. */
5077 if (flags & (ECF_CONST|ECF_NOVOPS))
5079 if (gimple_call_lhs (t))
5080 handle_rhs_call (t, &rhsc, implicit_const_eaf_flags, false, false);
5082 /* Pure functions can return addresses in and of memory
5083 reachable from their arguments, but they are not an escape
5084 point for reachable memory of their arguments. */
5085 else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE))
5086 handle_rhs_call (t, &rhsc, implicit_pure_eaf_flags, false, true);
5087 /* If the call is to a replaceable operator delete and results
5088 from a delete expression as opposed to a direct call to
5089 such operator, then the effects for PTA (in particular
5090 the escaping of the pointer) can be ignored. */
5091 else if (fndecl
5092 && DECL_IS_OPERATOR_DELETE_P (fndecl)
5093 && gimple_call_from_new_or_delete (t))
5095 else
5096 handle_rhs_call (t, &rhsc, 0, true, true);
5097 if (gimple_call_lhs (t))
5098 handle_lhs_call (t, gimple_call_lhs (t),
5099 gimple_call_return_flags (t), rhsc, fndecl);
5101 else
5103 auto_vec<ce_s, 2> rhsc;
5104 tree lhsop;
5105 unsigned j;
5107 /* Assign all the passed arguments to the appropriate incoming
5108 parameters of the function. */
5109 for (j = 0; j < gimple_call_num_args (t); j++)
5111 tree arg = gimple_call_arg (t, j);
5112 find_func_aliases_for_call_arg (fi, j, arg);
5115 /* If we are returning a value, assign it to the result. */
5116 lhsop = gimple_call_lhs (t);
5117 if (lhsop)
5119 auto_vec<ce_s, 2> lhsc;
5120 struct constraint_expr rhs;
5121 struct constraint_expr *lhsp;
5122 bool aggr_p = aggregate_value_p (lhsop, gimple_call_fntype (t));
5124 get_constraint_for (lhsop, &lhsc);
5125 rhs = get_function_part_constraint (fi, fi_result);
5126 if (aggr_p)
5128 auto_vec<ce_s, 2> tem;
5129 tem.quick_push (rhs);
5130 do_deref (&tem);
5131 gcc_checking_assert (tem.length () == 1);
5132 rhs = tem[0];
5134 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
5135 process_constraint (new_constraint (*lhsp, rhs));
5137 /* If we pass the result decl by reference, honor that. */
5138 if (aggr_p)
5140 struct constraint_expr lhs;
5141 struct constraint_expr *rhsp;
5143 get_constraint_for_address_of (lhsop, &rhsc);
5144 lhs = get_function_part_constraint (fi, fi_result);
5145 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5146 process_constraint (new_constraint (lhs, *rhsp));
5147 rhsc.truncate (0);
5151 /* If we use a static chain, pass it along. */
5152 if (gimple_call_chain (t))
5154 struct constraint_expr lhs;
5155 struct constraint_expr *rhsp;
5157 get_constraint_for (gimple_call_chain (t), &rhsc);
5158 lhs = get_function_part_constraint (fi, fi_static_chain);
5159 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5160 process_constraint (new_constraint (lhs, *rhsp));
5165 /* Walk statement T setting up aliasing constraints according to the
5166 references found in T. This function is the main part of the
5167 constraint builder. AI points to auxiliary alias information used
5168 when building alias sets and computing alias grouping heuristics. */
5170 static void
5171 find_func_aliases (struct function *fn, gimple *origt)
5173 gimple *t = origt;
5174 auto_vec<ce_s, 16> lhsc;
5175 auto_vec<ce_s, 16> rhsc;
5176 varinfo_t fi;
5178 /* Now build constraints expressions. */
5179 if (gimple_code (t) == GIMPLE_PHI)
5181 /* For a phi node, assign all the arguments to
5182 the result. */
5183 get_constraint_for (gimple_phi_result (t), &lhsc);
5184 for (unsigned i = 0; i < gimple_phi_num_args (t); i++)
5186 get_constraint_for_rhs (gimple_phi_arg_def (t, i), &rhsc);
5187 process_all_all_constraints (lhsc, rhsc);
5188 rhsc.truncate (0);
5191 /* In IPA mode, we need to generate constraints to pass call
5192 arguments through their calls. There are two cases,
5193 either a GIMPLE_CALL returning a value, or just a plain
5194 GIMPLE_CALL when we are not.
5196 In non-ipa mode, we need to generate constraints for each
5197 pointer passed by address. */
5198 else if (is_gimple_call (t))
5199 find_func_aliases_for_call (fn, as_a <gcall *> (t));
5201 /* Otherwise, just a regular assignment statement. Only care about
5202 operations with pointer result, others are dealt with as escape
5203 points if they have pointer operands. */
5204 else if (is_gimple_assign (t))
5206 /* Otherwise, just a regular assignment statement. */
5207 tree lhsop = gimple_assign_lhs (t);
5208 tree rhsop = (gimple_num_ops (t) == 2) ? gimple_assign_rhs1 (t) : NULL;
5210 if (rhsop && TREE_CLOBBER_P (rhsop))
5211 /* Ignore clobbers, they don't actually store anything into
5212 the LHS. */
5214 else if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
5215 do_structure_copy (lhsop, rhsop);
5216 else
5218 enum tree_code code = gimple_assign_rhs_code (t);
5220 get_constraint_for (lhsop, &lhsc);
5222 if (code == POINTER_PLUS_EXPR)
5223 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
5224 gimple_assign_rhs2 (t), &rhsc);
5225 else if (code == POINTER_DIFF_EXPR)
5226 /* The result is not a pointer (part). */
5228 else if (code == BIT_AND_EXPR
5229 && TREE_CODE (gimple_assign_rhs2 (t)) == INTEGER_CST)
5231 /* Aligning a pointer via a BIT_AND_EXPR is offsetting
5232 the pointer. Handle it by offsetting it by UNKNOWN. */
5233 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
5234 NULL_TREE, &rhsc);
5236 else if (code == TRUNC_DIV_EXPR
5237 || code == CEIL_DIV_EXPR
5238 || code == FLOOR_DIV_EXPR
5239 || code == ROUND_DIV_EXPR
5240 || code == EXACT_DIV_EXPR
5241 || code == TRUNC_MOD_EXPR
5242 || code == CEIL_MOD_EXPR
5243 || code == FLOOR_MOD_EXPR
5244 || code == ROUND_MOD_EXPR)
5245 /* Division and modulo transfer the pointer from the LHS. */
5246 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
5247 NULL_TREE, &rhsc);
5248 else if (CONVERT_EXPR_CODE_P (code)
5249 || gimple_assign_single_p (t))
5250 /* See through conversions, single RHS are handled by
5251 get_constraint_for_rhs. */
5252 get_constraint_for_rhs (rhsop, &rhsc);
5253 else if (code == COND_EXPR)
5255 /* The result is a merge of both COND_EXPR arms. */
5256 auto_vec<ce_s, 2> tmp;
5257 struct constraint_expr *rhsp;
5258 unsigned i;
5259 get_constraint_for_rhs (gimple_assign_rhs2 (t), &rhsc);
5260 get_constraint_for_rhs (gimple_assign_rhs3 (t), &tmp);
5261 FOR_EACH_VEC_ELT (tmp, i, rhsp)
5262 rhsc.safe_push (*rhsp);
5264 else if (truth_value_p (code))
5265 /* Truth value results are not pointer (parts). Or at least
5266 very unreasonable obfuscation of a part. */
5268 else
5270 /* All other operations are possibly offsetting merges. */
5271 auto_vec<ce_s, 4> tmp;
5272 struct constraint_expr *rhsp;
5273 unsigned i, j;
5274 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
5275 NULL_TREE, &rhsc);
5276 for (i = 2; i < gimple_num_ops (t); ++i)
5278 get_constraint_for_ptr_offset (gimple_op (t, i),
5279 NULL_TREE, &tmp);
5280 FOR_EACH_VEC_ELT (tmp, j, rhsp)
5281 rhsc.safe_push (*rhsp);
5282 tmp.truncate (0);
5285 process_all_all_constraints (lhsc, rhsc);
5287 /* If there is a store to a global variable the rhs escapes. */
5288 if ((lhsop = get_base_address (lhsop)) != NULL_TREE
5289 && DECL_P (lhsop))
5291 varinfo_t vi = get_vi_for_tree (lhsop);
5292 if ((! in_ipa_mode && vi->is_global_var)
5293 || vi->is_ipa_escape_point)
5294 make_escape_constraint (rhsop);
5297 /* Handle escapes through return. */
5298 else if (gimple_code (t) == GIMPLE_RETURN
5299 && gimple_return_retval (as_a <greturn *> (t)) != NULL_TREE)
5301 greturn *return_stmt = as_a <greturn *> (t);
5302 tree retval = gimple_return_retval (return_stmt);
5303 if (!in_ipa_mode)
5304 make_constraint_to (escaped_return_id, retval);
5305 else
5307 struct constraint_expr lhs ;
5308 struct constraint_expr *rhsp;
5309 unsigned i;
5311 fi = lookup_vi_for_tree (fn->decl);
5312 lhs = get_function_part_constraint (fi, fi_result);
5313 get_constraint_for_rhs (retval, &rhsc);
5314 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5315 process_constraint (new_constraint (lhs, *rhsp));
5318 /* Handle asms conservatively by adding escape constraints to everything. */
5319 else if (gasm *asm_stmt = dyn_cast <gasm *> (t))
5321 unsigned i, noutputs;
5322 const char **oconstraints;
5323 const char *constraint;
5324 bool allows_mem, allows_reg, is_inout;
5326 noutputs = gimple_asm_noutputs (asm_stmt);
5327 oconstraints = XALLOCAVEC (const char *, noutputs);
5329 for (i = 0; i < noutputs; ++i)
5331 tree link = gimple_asm_output_op (asm_stmt, i);
5332 tree op = TREE_VALUE (link);
5334 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5335 oconstraints[i] = constraint;
5336 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
5337 &allows_reg, &is_inout);
5339 /* A memory constraint makes the address of the operand escape. */
5340 if (!allows_reg && allows_mem)
5342 auto_vec<ce_s> tmpc;
5343 get_constraint_for_address_of (op, &tmpc);
5344 make_constraints_to (escaped_id, tmpc);
5347 /* The asm may read global memory, so outputs may point to
5348 any global memory. */
5349 if (op)
5351 auto_vec<ce_s, 2> lhsc;
5352 struct constraint_expr rhsc, *lhsp;
5353 unsigned j;
5354 get_constraint_for (op, &lhsc);
5355 rhsc.var = nonlocal_id;
5356 rhsc.offset = 0;
5357 rhsc.type = SCALAR;
5358 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
5359 process_constraint (new_constraint (*lhsp, rhsc));
5362 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
5364 tree link = gimple_asm_input_op (asm_stmt, i);
5365 tree op = TREE_VALUE (link);
5367 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5369 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
5370 &allows_mem, &allows_reg);
5372 /* A memory constraint makes the address of the operand escape. */
5373 if (!allows_reg && allows_mem)
5375 auto_vec<ce_s> tmpc;
5376 get_constraint_for_address_of (op, &tmpc);
5377 make_constraints_to (escaped_id, tmpc);
5379 /* Strictly we'd only need the constraint to ESCAPED if
5380 the asm clobbers memory, otherwise using something
5381 along the lines of per-call clobbers/uses would be enough. */
5382 else if (op)
5383 make_escape_constraint (op);
5389 /* Create a constraint adding to the clobber set of FI the memory
5390 pointed to by PTR. */
5392 static void
5393 process_ipa_clobber (varinfo_t fi, tree ptr)
5395 vec<ce_s> ptrc = vNULL;
5396 struct constraint_expr *c, lhs;
5397 unsigned i;
5398 get_constraint_for_rhs (ptr, &ptrc);
5399 lhs = get_function_part_constraint (fi, fi_clobbers);
5400 FOR_EACH_VEC_ELT (ptrc, i, c)
5401 process_constraint (new_constraint (lhs, *c));
5402 ptrc.release ();
5405 /* Walk statement T setting up clobber and use constraints according to the
5406 references found in T. This function is a main part of the
5407 IPA constraint builder. */
5409 static void
5410 find_func_clobbers (struct function *fn, gimple *origt)
5412 gimple *t = origt;
5413 auto_vec<ce_s, 16> lhsc;
5414 auto_vec<ce_s, 16> rhsc;
5415 varinfo_t fi;
5417 /* Add constraints for clobbered/used in IPA mode.
5418 We are not interested in what automatic variables are clobbered
5419 or used as we only use the information in the caller to which
5420 they do not escape. */
5421 gcc_assert (in_ipa_mode);
5423 /* If the stmt refers to memory in any way it better had a VUSE. */
5424 if (gimple_vuse (t) == NULL_TREE)
5425 return;
5427 /* We'd better have function information for the current function. */
5428 fi = lookup_vi_for_tree (fn->decl);
5429 gcc_assert (fi != NULL);
5431 /* Account for stores in assignments and calls. */
5432 if (gimple_vdef (t) != NULL_TREE
5433 && gimple_has_lhs (t))
5435 tree lhs = gimple_get_lhs (t);
5436 tree tem = lhs;
5437 while (handled_component_p (tem))
5438 tem = TREE_OPERAND (tem, 0);
5439 if ((DECL_P (tem)
5440 && !auto_var_in_fn_p (tem, fn->decl))
5441 || INDIRECT_REF_P (tem)
5442 || (TREE_CODE (tem) == MEM_REF
5443 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
5444 && auto_var_in_fn_p
5445 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), fn->decl))))
5447 struct constraint_expr lhsc, *rhsp;
5448 unsigned i;
5449 lhsc = get_function_part_constraint (fi, fi_clobbers);
5450 get_constraint_for_address_of (lhs, &rhsc);
5451 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5452 process_constraint (new_constraint (lhsc, *rhsp));
5453 rhsc.truncate (0);
5457 /* Account for uses in assigments and returns. */
5458 if (gimple_assign_single_p (t)
5459 || (gimple_code (t) == GIMPLE_RETURN
5460 && gimple_return_retval (as_a <greturn *> (t)) != NULL_TREE))
5462 tree rhs = (gimple_assign_single_p (t)
5463 ? gimple_assign_rhs1 (t)
5464 : gimple_return_retval (as_a <greturn *> (t)));
5465 tree tem = rhs;
5466 while (handled_component_p (tem))
5467 tem = TREE_OPERAND (tem, 0);
5468 if ((DECL_P (tem)
5469 && !auto_var_in_fn_p (tem, fn->decl))
5470 || INDIRECT_REF_P (tem)
5471 || (TREE_CODE (tem) == MEM_REF
5472 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
5473 && auto_var_in_fn_p
5474 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), fn->decl))))
5476 struct constraint_expr lhs, *rhsp;
5477 unsigned i;
5478 lhs = get_function_part_constraint (fi, fi_uses);
5479 get_constraint_for_address_of (rhs, &rhsc);
5480 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5481 process_constraint (new_constraint (lhs, *rhsp));
5482 rhsc.truncate (0);
5486 if (gcall *call_stmt = dyn_cast <gcall *> (t))
5488 varinfo_t cfi = NULL;
5489 tree decl = gimple_call_fndecl (t);
5490 struct constraint_expr lhs, rhs;
5491 unsigned i, j;
5493 /* For builtins we do not have separate function info. For those
5494 we do not generate escapes for we have to generate clobbers/uses. */
5495 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
5496 switch (DECL_FUNCTION_CODE (decl))
5498 /* The following functions use and clobber memory pointed to
5499 by their arguments. */
5500 case BUILT_IN_STRCPY:
5501 case BUILT_IN_STRNCPY:
5502 case BUILT_IN_BCOPY:
5503 case BUILT_IN_MEMCPY:
5504 case BUILT_IN_MEMMOVE:
5505 case BUILT_IN_MEMPCPY:
5506 case BUILT_IN_STPCPY:
5507 case BUILT_IN_STPNCPY:
5508 case BUILT_IN_STRCAT:
5509 case BUILT_IN_STRNCAT:
5510 case BUILT_IN_STRCPY_CHK:
5511 case BUILT_IN_STRNCPY_CHK:
5512 case BUILT_IN_MEMCPY_CHK:
5513 case BUILT_IN_MEMMOVE_CHK:
5514 case BUILT_IN_MEMPCPY_CHK:
5515 case BUILT_IN_STPCPY_CHK:
5516 case BUILT_IN_STPNCPY_CHK:
5517 case BUILT_IN_STRCAT_CHK:
5518 case BUILT_IN_STRNCAT_CHK:
5520 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
5521 == BUILT_IN_BCOPY ? 1 : 0));
5522 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
5523 == BUILT_IN_BCOPY ? 0 : 1));
5524 unsigned i;
5525 struct constraint_expr *rhsp, *lhsp;
5526 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
5527 lhs = get_function_part_constraint (fi, fi_clobbers);
5528 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
5529 process_constraint (new_constraint (lhs, *lhsp));
5530 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
5531 lhs = get_function_part_constraint (fi, fi_uses);
5532 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5533 process_constraint (new_constraint (lhs, *rhsp));
5534 return;
5536 /* The following function clobbers memory pointed to by
5537 its argument. */
5538 case BUILT_IN_MEMSET:
5539 case BUILT_IN_MEMSET_CHK:
5540 case BUILT_IN_POSIX_MEMALIGN:
5542 tree dest = gimple_call_arg (t, 0);
5543 unsigned i;
5544 ce_s *lhsp;
5545 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
5546 lhs = get_function_part_constraint (fi, fi_clobbers);
5547 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
5548 process_constraint (new_constraint (lhs, *lhsp));
5549 return;
5551 /* The following functions clobber their second and third
5552 arguments. */
5553 case BUILT_IN_SINCOS:
5554 case BUILT_IN_SINCOSF:
5555 case BUILT_IN_SINCOSL:
5557 process_ipa_clobber (fi, gimple_call_arg (t, 1));
5558 process_ipa_clobber (fi, gimple_call_arg (t, 2));
5559 return;
5561 /* The following functions clobber their second argument. */
5562 case BUILT_IN_FREXP:
5563 case BUILT_IN_FREXPF:
5564 case BUILT_IN_FREXPL:
5565 case BUILT_IN_LGAMMA_R:
5566 case BUILT_IN_LGAMMAF_R:
5567 case BUILT_IN_LGAMMAL_R:
5568 case BUILT_IN_GAMMA_R:
5569 case BUILT_IN_GAMMAF_R:
5570 case BUILT_IN_GAMMAL_R:
5571 case BUILT_IN_MODF:
5572 case BUILT_IN_MODFF:
5573 case BUILT_IN_MODFL:
5575 process_ipa_clobber (fi, gimple_call_arg (t, 1));
5576 return;
5578 /* The following functions clobber their third argument. */
5579 case BUILT_IN_REMQUO:
5580 case BUILT_IN_REMQUOF:
5581 case BUILT_IN_REMQUOL:
5583 process_ipa_clobber (fi, gimple_call_arg (t, 2));
5584 return;
5586 /* The following functions neither read nor clobber memory. */
5587 case BUILT_IN_ASSUME_ALIGNED:
5588 case BUILT_IN_FREE:
5589 return;
5590 /* Trampolines are of no interest to us. */
5591 case BUILT_IN_INIT_TRAMPOLINE:
5592 case BUILT_IN_ADJUST_TRAMPOLINE:
5593 return;
5594 case BUILT_IN_VA_START:
5595 case BUILT_IN_VA_END:
5596 return;
5597 case BUILT_IN_GOMP_PARALLEL:
5598 case BUILT_IN_GOACC_PARALLEL:
5600 unsigned int fnpos, argpos;
5601 unsigned int implicit_use_args[2];
5602 unsigned int num_implicit_use_args = 0;
5603 switch (DECL_FUNCTION_CODE (decl))
5605 case BUILT_IN_GOMP_PARALLEL:
5606 /* __builtin_GOMP_parallel (fn, data, num_threads, flags). */
5607 fnpos = 0;
5608 argpos = 1;
5609 break;
5610 case BUILT_IN_GOACC_PARALLEL:
5611 /* __builtin_GOACC_parallel (flags_m, fn, mapnum, hostaddrs,
5612 sizes, kinds, ...). */
5613 fnpos = 1;
5614 argpos = 3;
5615 implicit_use_args[num_implicit_use_args++] = 4;
5616 implicit_use_args[num_implicit_use_args++] = 5;
5617 break;
5618 default:
5619 gcc_unreachable ();
5622 tree fnarg = gimple_call_arg (t, fnpos);
5623 gcc_assert (TREE_CODE (fnarg) == ADDR_EXPR);
5624 tree fndecl = TREE_OPERAND (fnarg, 0);
5625 if (fndecl_maybe_in_other_partition (fndecl))
5626 /* Fallthru to general call handling. */
5627 break;
5629 varinfo_t cfi = get_vi_for_tree (fndecl);
5631 tree arg = gimple_call_arg (t, argpos);
5633 /* Parameter passed by value is used. */
5634 lhs = get_function_part_constraint (fi, fi_uses);
5635 struct constraint_expr *rhsp;
5636 get_constraint_for (arg, &rhsc);
5637 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5638 process_constraint (new_constraint (lhs, *rhsp));
5639 rhsc.truncate (0);
5641 /* Handle parameters used by the call, but not used in cfi, as
5642 implicitly used by cfi. */
5643 lhs = get_function_part_constraint (cfi, fi_uses);
5644 for (unsigned i = 0; i < num_implicit_use_args; ++i)
5646 tree arg = gimple_call_arg (t, implicit_use_args[i]);
5647 get_constraint_for (arg, &rhsc);
5648 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5649 process_constraint (new_constraint (lhs, *rhsp));
5650 rhsc.truncate (0);
5653 /* The caller clobbers what the callee does. */
5654 lhs = get_function_part_constraint (fi, fi_clobbers);
5655 rhs = get_function_part_constraint (cfi, fi_clobbers);
5656 process_constraint (new_constraint (lhs, rhs));
5658 /* The caller uses what the callee does. */
5659 lhs = get_function_part_constraint (fi, fi_uses);
5660 rhs = get_function_part_constraint (cfi, fi_uses);
5661 process_constraint (new_constraint (lhs, rhs));
5663 return;
5665 /* printf-style functions may have hooks to set pointers to
5666 point to somewhere into the generated string. Leave them
5667 for a later exercise... */
5668 default:
5669 /* Fallthru to general call handling. */;
5672 /* Parameters passed by value are used. */
5673 lhs = get_function_part_constraint (fi, fi_uses);
5674 for (i = 0; i < gimple_call_num_args (t); i++)
5676 struct constraint_expr *rhsp;
5677 tree arg = gimple_call_arg (t, i);
5679 if (TREE_CODE (arg) == SSA_NAME
5680 || is_gimple_min_invariant (arg))
5681 continue;
5683 get_constraint_for_address_of (arg, &rhsc);
5684 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5685 process_constraint (new_constraint (lhs, *rhsp));
5686 rhsc.truncate (0);
5689 /* Build constraints for propagating clobbers/uses along the
5690 callgraph edges. */
5691 cfi = get_fi_for_callee (call_stmt);
5692 if (cfi->id == anything_id)
5694 if (gimple_vdef (t))
5695 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5696 anything_id);
5697 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5698 anything_id);
5699 return;
5702 /* For callees without function info (that's external functions),
5703 ESCAPED is clobbered and used. */
5704 if (cfi->decl
5705 && TREE_CODE (cfi->decl) == FUNCTION_DECL
5706 && !cfi->is_fn_info)
5708 varinfo_t vi;
5710 if (gimple_vdef (t))
5711 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5712 escaped_id);
5713 make_copy_constraint (first_vi_for_offset (fi, fi_uses), escaped_id);
5715 /* Also honor the call statement use/clobber info. */
5716 if ((vi = lookup_call_clobber_vi (call_stmt)) != NULL)
5717 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5718 vi->id);
5719 if ((vi = lookup_call_use_vi (call_stmt)) != NULL)
5720 make_copy_constraint (first_vi_for_offset (fi, fi_uses),
5721 vi->id);
5722 return;
5725 /* Otherwise the caller clobbers and uses what the callee does.
5726 ??? This should use a new complex constraint that filters
5727 local variables of the callee. */
5728 if (gimple_vdef (t))
5730 lhs = get_function_part_constraint (fi, fi_clobbers);
5731 rhs = get_function_part_constraint (cfi, fi_clobbers);
5732 process_constraint (new_constraint (lhs, rhs));
5734 lhs = get_function_part_constraint (fi, fi_uses);
5735 rhs = get_function_part_constraint (cfi, fi_uses);
5736 process_constraint (new_constraint (lhs, rhs));
5738 else if (gimple_code (t) == GIMPLE_ASM)
5740 /* ??? Ick. We can do better. */
5741 if (gimple_vdef (t))
5742 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5743 anything_id);
5744 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5745 anything_id);
5750 /* Find the first varinfo in the same variable as START that overlaps with
5751 OFFSET. Return NULL if we can't find one. */
5753 static varinfo_t
5754 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
5756 /* If the offset is outside of the variable, bail out. */
5757 if (offset >= start->fullsize)
5758 return NULL;
5760 /* If we cannot reach offset from start, lookup the first field
5761 and start from there. */
5762 if (start->offset > offset)
5763 start = get_varinfo (start->head);
5765 while (start)
5767 /* We may not find a variable in the field list with the actual
5768 offset when we have glommed a structure to a variable.
5769 In that case, however, offset should still be within the size
5770 of the variable. */
5771 if (offset >= start->offset
5772 && (offset - start->offset) < start->size)
5773 return start;
5775 start = vi_next (start);
5778 return NULL;
5781 /* Find the first varinfo in the same variable as START that overlaps with
5782 OFFSET. If there is no such varinfo the varinfo directly preceding
5783 OFFSET is returned. */
5785 static varinfo_t
5786 first_or_preceding_vi_for_offset (varinfo_t start,
5787 unsigned HOST_WIDE_INT offset)
5789 /* If we cannot reach offset from start, lookup the first field
5790 and start from there. */
5791 if (start->offset > offset)
5792 start = get_varinfo (start->head);
5794 /* We may not find a variable in the field list with the actual
5795 offset when we have glommed a structure to a variable.
5796 In that case, however, offset should still be within the size
5797 of the variable.
5798 If we got beyond the offset we look for return the field
5799 directly preceding offset which may be the last field. */
5800 while (start->next
5801 && offset >= start->offset
5802 && !((offset - start->offset) < start->size))
5803 start = vi_next (start);
5805 return start;
5809 /* This structure is used during pushing fields onto the fieldstack
5810 to track the offset of the field, since bitpos_of_field gives it
5811 relative to its immediate containing type, and we want it relative
5812 to the ultimate containing object. */
5814 struct fieldoff
5816 /* Offset from the base of the base containing object to this field. */
5817 HOST_WIDE_INT offset;
5819 /* Size, in bits, of the field. */
5820 unsigned HOST_WIDE_INT size;
5822 unsigned has_unknown_size : 1;
5824 unsigned must_have_pointers : 1;
5826 unsigned may_have_pointers : 1;
5828 unsigned only_restrict_pointers : 1;
5830 tree restrict_pointed_type;
5832 typedef struct fieldoff fieldoff_s;
5835 /* qsort comparison function for two fieldoff's PA and PB */
5837 static int
5838 fieldoff_compare (const void *pa, const void *pb)
5840 const fieldoff_s *foa = (const fieldoff_s *)pa;
5841 const fieldoff_s *fob = (const fieldoff_s *)pb;
5842 unsigned HOST_WIDE_INT foasize, fobsize;
5844 if (foa->offset < fob->offset)
5845 return -1;
5846 else if (foa->offset > fob->offset)
5847 return 1;
5849 foasize = foa->size;
5850 fobsize = fob->size;
5851 if (foasize < fobsize)
5852 return -1;
5853 else if (foasize > fobsize)
5854 return 1;
5855 return 0;
5858 /* Sort a fieldstack according to the field offset and sizes. */
5859 static void
5860 sort_fieldstack (vec<fieldoff_s> &fieldstack)
5862 fieldstack.qsort (fieldoff_compare);
5865 /* Return true if T is a type that can have subvars. */
5867 static inline bool
5868 type_can_have_subvars (const_tree t)
5870 /* Aggregates without overlapping fields can have subvars. */
5871 return TREE_CODE (t) == RECORD_TYPE;
5874 /* Return true if V is a tree that we can have subvars for.
5875 Normally, this is any aggregate type. Also complex
5876 types which are not gimple registers can have subvars. */
5878 static inline bool
5879 var_can_have_subvars (const_tree v)
5881 /* Volatile variables should never have subvars. */
5882 if (TREE_THIS_VOLATILE (v))
5883 return false;
5885 /* Non decls or memory tags can never have subvars. */
5886 if (!DECL_P (v))
5887 return false;
5889 return type_can_have_subvars (TREE_TYPE (v));
5892 /* Return true if T is a type that does contain pointers. */
5894 static bool
5895 type_must_have_pointers (tree type)
5897 if (POINTER_TYPE_P (type))
5898 return true;
5900 if (TREE_CODE (type) == ARRAY_TYPE)
5901 return type_must_have_pointers (TREE_TYPE (type));
5903 /* A function or method can have pointers as arguments, so track
5904 those separately. */
5905 if (FUNC_OR_METHOD_TYPE_P (type))
5906 return true;
5908 return false;
5911 static bool
5912 field_must_have_pointers (tree t)
5914 return type_must_have_pointers (TREE_TYPE (t));
5917 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
5918 the fields of TYPE onto fieldstack, recording their offsets along
5919 the way.
5921 OFFSET is used to keep track of the offset in this entire
5922 structure, rather than just the immediately containing structure.
5923 Returns false if the caller is supposed to handle the field we
5924 recursed for. */
5926 static bool
5927 push_fields_onto_fieldstack (tree type, vec<fieldoff_s> *fieldstack,
5928 HOST_WIDE_INT offset)
5930 tree field;
5931 bool empty_p = true;
5933 if (TREE_CODE (type) != RECORD_TYPE)
5934 return false;
5936 /* If the vector of fields is growing too big, bail out early.
5937 Callers check for vec::length <= param_max_fields_for_field_sensitive, make
5938 sure this fails. */
5939 if (fieldstack->length () > (unsigned)param_max_fields_for_field_sensitive)
5940 return false;
5942 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5943 if (TREE_CODE (field) == FIELD_DECL)
5945 bool push = false;
5946 HOST_WIDE_INT foff = bitpos_of_field (field);
5947 tree field_type = TREE_TYPE (field);
5949 if (!var_can_have_subvars (field)
5950 || TREE_CODE (field_type) == QUAL_UNION_TYPE
5951 || TREE_CODE (field_type) == UNION_TYPE)
5952 push = true;
5953 else if (!push_fields_onto_fieldstack
5954 (field_type, fieldstack, offset + foff)
5955 && (DECL_SIZE (field)
5956 && !integer_zerop (DECL_SIZE (field))))
5957 /* Empty structures may have actual size, like in C++. So
5958 see if we didn't push any subfields and the size is
5959 nonzero, push the field onto the stack. */
5960 push = true;
5962 if (push)
5964 fieldoff_s *pair = NULL;
5965 bool has_unknown_size = false;
5966 bool must_have_pointers_p;
5968 if (!fieldstack->is_empty ())
5969 pair = &fieldstack->last ();
5971 /* If there isn't anything at offset zero, create sth. */
5972 if (!pair
5973 && offset + foff != 0)
5975 fieldoff_s e
5976 = {0, offset + foff, false, false, true, false, NULL_TREE};
5977 pair = fieldstack->safe_push (e);
5980 if (!DECL_SIZE (field)
5981 || !tree_fits_uhwi_p (DECL_SIZE (field)))
5982 has_unknown_size = true;
5984 /* If adjacent fields do not contain pointers merge them. */
5985 must_have_pointers_p = field_must_have_pointers (field);
5986 if (pair
5987 && !has_unknown_size
5988 && !must_have_pointers_p
5989 && !pair->must_have_pointers
5990 && !pair->has_unknown_size
5991 && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff)
5993 pair->size += tree_to_uhwi (DECL_SIZE (field));
5995 else
5997 fieldoff_s e;
5998 e.offset = offset + foff;
5999 e.has_unknown_size = has_unknown_size;
6000 if (!has_unknown_size)
6001 e.size = tree_to_uhwi (DECL_SIZE (field));
6002 else
6003 e.size = -1;
6004 e.must_have_pointers = must_have_pointers_p;
6005 e.may_have_pointers = true;
6006 e.only_restrict_pointers
6007 = (!has_unknown_size
6008 && POINTER_TYPE_P (field_type)
6009 && TYPE_RESTRICT (field_type));
6010 if (e.only_restrict_pointers)
6011 e.restrict_pointed_type = TREE_TYPE (field_type);
6012 fieldstack->safe_push (e);
6016 empty_p = false;
6019 return !empty_p;
6022 /* Count the number of arguments DECL has, and set IS_VARARGS to true
6023 if it is a varargs function. */
6025 static unsigned int
6026 count_num_arguments (tree decl, bool *is_varargs)
6028 unsigned int num = 0;
6029 tree t;
6031 /* Capture named arguments for K&R functions. They do not
6032 have a prototype and thus no TYPE_ARG_TYPES. */
6033 for (t = DECL_ARGUMENTS (decl); t; t = DECL_CHAIN (t))
6034 ++num;
6036 /* Check if the function has variadic arguments. */
6037 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl)); t; t = TREE_CHAIN (t))
6038 if (TREE_VALUE (t) == void_type_node)
6039 break;
6040 if (!t)
6041 *is_varargs = true;
6043 return num;
6046 /* Creation function node for DECL, using NAME, and return the index
6047 of the variable we've created for the function. If NONLOCAL_p, create
6048 initial constraints. */
6050 static varinfo_t
6051 create_function_info_for (tree decl, const char *name, bool add_id,
6052 bool nonlocal_p)
6054 struct function *fn = DECL_STRUCT_FUNCTION (decl);
6055 varinfo_t vi, prev_vi;
6056 tree arg;
6057 unsigned int i;
6058 bool is_varargs = false;
6059 unsigned int num_args = count_num_arguments (decl, &is_varargs);
6061 /* Create the variable info. */
6063 vi = new_var_info (decl, name, add_id);
6064 vi->offset = 0;
6065 vi->size = 1;
6066 vi->fullsize = fi_parm_base + num_args;
6067 vi->is_fn_info = 1;
6068 vi->may_have_pointers = false;
6069 if (is_varargs)
6070 vi->fullsize = ~0;
6071 insert_vi_for_tree (vi->decl, vi);
6073 prev_vi = vi;
6075 /* Create a variable for things the function clobbers and one for
6076 things the function uses. */
6078 varinfo_t clobbervi, usevi;
6079 const char *newname;
6080 char *tempname;
6082 tempname = xasprintf ("%s.clobber", name);
6083 newname = ggc_strdup (tempname);
6084 free (tempname);
6086 clobbervi = new_var_info (NULL, newname, false);
6087 clobbervi->offset = fi_clobbers;
6088 clobbervi->size = 1;
6089 clobbervi->fullsize = vi->fullsize;
6090 clobbervi->is_full_var = true;
6091 clobbervi->is_global_var = false;
6092 clobbervi->is_reg_var = true;
6094 gcc_assert (prev_vi->offset < clobbervi->offset);
6095 prev_vi->next = clobbervi->id;
6096 prev_vi = clobbervi;
6098 tempname = xasprintf ("%s.use", name);
6099 newname = ggc_strdup (tempname);
6100 free (tempname);
6102 usevi = new_var_info (NULL, newname, false);
6103 usevi->offset = fi_uses;
6104 usevi->size = 1;
6105 usevi->fullsize = vi->fullsize;
6106 usevi->is_full_var = true;
6107 usevi->is_global_var = false;
6108 usevi->is_reg_var = true;
6110 gcc_assert (prev_vi->offset < usevi->offset);
6111 prev_vi->next = usevi->id;
6112 prev_vi = usevi;
6115 /* And one for the static chain. */
6116 if (fn->static_chain_decl != NULL_TREE)
6118 varinfo_t chainvi;
6119 const char *newname;
6120 char *tempname;
6122 tempname = xasprintf ("%s.chain", name);
6123 newname = ggc_strdup (tempname);
6124 free (tempname);
6126 chainvi = new_var_info (fn->static_chain_decl, newname, false);
6127 chainvi->offset = fi_static_chain;
6128 chainvi->size = 1;
6129 chainvi->fullsize = vi->fullsize;
6130 chainvi->is_full_var = true;
6131 chainvi->is_global_var = false;
6133 insert_vi_for_tree (fn->static_chain_decl, chainvi);
6135 if (nonlocal_p
6136 && chainvi->may_have_pointers)
6137 make_constraint_from (chainvi, nonlocal_id);
6139 gcc_assert (prev_vi->offset < chainvi->offset);
6140 prev_vi->next = chainvi->id;
6141 prev_vi = chainvi;
6144 /* Create a variable for the return var. */
6145 if (DECL_RESULT (decl) != NULL
6146 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
6148 varinfo_t resultvi;
6149 const char *newname;
6150 char *tempname;
6151 tree resultdecl = decl;
6153 if (DECL_RESULT (decl))
6154 resultdecl = DECL_RESULT (decl);
6156 tempname = xasprintf ("%s.result", name);
6157 newname = ggc_strdup (tempname);
6158 free (tempname);
6160 resultvi = new_var_info (resultdecl, newname, false);
6161 resultvi->offset = fi_result;
6162 resultvi->size = 1;
6163 resultvi->fullsize = vi->fullsize;
6164 resultvi->is_full_var = true;
6165 if (DECL_RESULT (decl))
6166 resultvi->may_have_pointers = true;
6168 if (DECL_RESULT (decl))
6169 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
6171 if (nonlocal_p
6172 && DECL_RESULT (decl)
6173 && DECL_BY_REFERENCE (DECL_RESULT (decl)))
6174 make_constraint_from (resultvi, nonlocal_id);
6176 gcc_assert (prev_vi->offset < resultvi->offset);
6177 prev_vi->next = resultvi->id;
6178 prev_vi = resultvi;
6181 /* We also need to make function return values escape. Nothing
6182 escapes by returning from main though. */
6183 if (nonlocal_p
6184 && !MAIN_NAME_P (DECL_NAME (decl)))
6186 varinfo_t fi, rvi;
6187 fi = lookup_vi_for_tree (decl);
6188 rvi = first_vi_for_offset (fi, fi_result);
6189 if (rvi && rvi->offset == fi_result)
6190 make_copy_constraint (get_varinfo (escaped_id), rvi->id);
6193 /* Set up variables for each argument. */
6194 arg = DECL_ARGUMENTS (decl);
6195 for (i = 0; i < num_args; i++)
6197 varinfo_t argvi;
6198 const char *newname;
6199 char *tempname;
6200 tree argdecl = decl;
6202 if (arg)
6203 argdecl = arg;
6205 tempname = xasprintf ("%s.arg%d", name, i);
6206 newname = ggc_strdup (tempname);
6207 free (tempname);
6209 argvi = new_var_info (argdecl, newname, false);
6210 argvi->offset = fi_parm_base + i;
6211 argvi->size = 1;
6212 argvi->is_full_var = true;
6213 argvi->fullsize = vi->fullsize;
6214 if (arg)
6215 argvi->may_have_pointers = true;
6217 if (arg)
6218 insert_vi_for_tree (arg, argvi);
6220 if (nonlocal_p
6221 && argvi->may_have_pointers)
6222 make_constraint_from (argvi, nonlocal_id);
6224 gcc_assert (prev_vi->offset < argvi->offset);
6225 prev_vi->next = argvi->id;
6226 prev_vi = argvi;
6227 if (arg)
6228 arg = DECL_CHAIN (arg);
6231 /* Add one representative for all further args. */
6232 if (is_varargs)
6234 varinfo_t argvi;
6235 const char *newname;
6236 char *tempname;
6237 tree decl;
6239 tempname = xasprintf ("%s.varargs", name);
6240 newname = ggc_strdup (tempname);
6241 free (tempname);
6243 /* We need sth that can be pointed to for va_start. */
6244 decl = build_fake_var_decl (ptr_type_node);
6246 argvi = new_var_info (decl, newname, false);
6247 argvi->offset = fi_parm_base + num_args;
6248 argvi->size = ~0;
6249 argvi->is_full_var = true;
6250 argvi->is_heap_var = true;
6251 argvi->fullsize = vi->fullsize;
6253 if (nonlocal_p
6254 && argvi->may_have_pointers)
6255 make_constraint_from (argvi, nonlocal_id);
6257 gcc_assert (prev_vi->offset < argvi->offset);
6258 prev_vi->next = argvi->id;
6261 return vi;
6265 /* Return true if FIELDSTACK contains fields that overlap.
6266 FIELDSTACK is assumed to be sorted by offset. */
6268 static bool
6269 check_for_overlaps (const vec<fieldoff_s> &fieldstack)
6271 fieldoff_s *fo = NULL;
6272 unsigned int i;
6273 HOST_WIDE_INT lastoffset = -1;
6275 FOR_EACH_VEC_ELT (fieldstack, i, fo)
6277 if (fo->offset == lastoffset)
6278 return true;
6279 lastoffset = fo->offset;
6281 return false;
6284 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
6285 This will also create any varinfo structures necessary for fields
6286 of DECL. DECL is a function parameter if HANDLE_PARAM is set.
6287 HANDLED_STRUCT_TYPE is used to register struct types reached by following
6288 restrict pointers. This is needed to prevent infinite recursion.
6289 If ADD_RESTRICT, pretend that the pointer NAME is restrict even if DECL
6290 does not advertise it. */
6292 static varinfo_t
6293 create_variable_info_for_1 (tree decl, const char *name, bool add_id,
6294 bool handle_param, bitmap handled_struct_type,
6295 bool add_restrict = false)
6297 varinfo_t vi, newvi;
6298 tree decl_type = TREE_TYPE (decl);
6299 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type);
6300 auto_vec<fieldoff_s> fieldstack;
6301 fieldoff_s *fo;
6302 unsigned int i;
6304 if (!declsize
6305 || !tree_fits_uhwi_p (declsize))
6307 vi = new_var_info (decl, name, add_id);
6308 vi->offset = 0;
6309 vi->size = ~0;
6310 vi->fullsize = ~0;
6311 vi->is_unknown_size_var = true;
6312 vi->is_full_var = true;
6313 vi->may_have_pointers = true;
6314 return vi;
6317 /* Collect field information. */
6318 if (use_field_sensitive
6319 && var_can_have_subvars (decl)
6320 /* ??? Force us to not use subfields for globals in IPA mode.
6321 Else we'd have to parse arbitrary initializers. */
6322 && !(in_ipa_mode
6323 && is_global_var (decl)))
6325 fieldoff_s *fo = NULL;
6326 bool notokay = false;
6327 unsigned int i;
6329 push_fields_onto_fieldstack (decl_type, &fieldstack, 0);
6331 for (i = 0; !notokay && fieldstack.iterate (i, &fo); i++)
6332 if (fo->has_unknown_size
6333 || fo->offset < 0)
6335 notokay = true;
6336 break;
6339 /* We can't sort them if we have a field with a variable sized type,
6340 which will make notokay = true. In that case, we are going to return
6341 without creating varinfos for the fields anyway, so sorting them is a
6342 waste to boot. */
6343 if (!notokay)
6345 sort_fieldstack (fieldstack);
6346 /* Due to some C++ FE issues, like PR 22488, we might end up
6347 what appear to be overlapping fields even though they,
6348 in reality, do not overlap. Until the C++ FE is fixed,
6349 we will simply disable field-sensitivity for these cases. */
6350 notokay = check_for_overlaps (fieldstack);
6353 if (notokay)
6354 fieldstack.release ();
6357 /* If we didn't end up collecting sub-variables create a full
6358 variable for the decl. */
6359 if (fieldstack.length () == 0
6360 || fieldstack.length () > (unsigned)param_max_fields_for_field_sensitive)
6362 vi = new_var_info (decl, name, add_id);
6363 vi->offset = 0;
6364 vi->may_have_pointers = true;
6365 vi->fullsize = tree_to_uhwi (declsize);
6366 vi->size = vi->fullsize;
6367 vi->is_full_var = true;
6368 if (POINTER_TYPE_P (decl_type)
6369 && (TYPE_RESTRICT (decl_type) || add_restrict))
6370 vi->only_restrict_pointers = 1;
6371 if (vi->only_restrict_pointers
6372 && !type_contains_placeholder_p (TREE_TYPE (decl_type))
6373 && handle_param
6374 && !bitmap_bit_p (handled_struct_type,
6375 TYPE_UID (TREE_TYPE (decl_type))))
6377 varinfo_t rvi;
6378 tree heapvar = build_fake_var_decl (TREE_TYPE (decl_type));
6379 DECL_EXTERNAL (heapvar) = 1;
6380 if (var_can_have_subvars (heapvar))
6381 bitmap_set_bit (handled_struct_type,
6382 TYPE_UID (TREE_TYPE (decl_type)));
6383 rvi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS", true,
6384 true, handled_struct_type);
6385 if (var_can_have_subvars (heapvar))
6386 bitmap_clear_bit (handled_struct_type,
6387 TYPE_UID (TREE_TYPE (decl_type)));
6388 rvi->is_restrict_var = 1;
6389 insert_vi_for_tree (heapvar, rvi);
6390 make_constraint_from (vi, rvi->id);
6391 make_param_constraints (rvi);
6393 fieldstack.release ();
6394 return vi;
6397 vi = new_var_info (decl, name, add_id);
6398 vi->fullsize = tree_to_uhwi (declsize);
6399 if (fieldstack.length () == 1)
6400 vi->is_full_var = true;
6401 for (i = 0, newvi = vi;
6402 fieldstack.iterate (i, &fo);
6403 ++i, newvi = vi_next (newvi))
6405 const char *newname = NULL;
6406 char *tempname;
6408 if (dump_file)
6410 if (fieldstack.length () != 1)
6412 tempname
6413 = xasprintf ("%s." HOST_WIDE_INT_PRINT_DEC
6414 "+" HOST_WIDE_INT_PRINT_DEC, name,
6415 fo->offset, fo->size);
6416 newname = ggc_strdup (tempname);
6417 free (tempname);
6420 else
6421 newname = "NULL";
6423 if (newname)
6424 newvi->name = newname;
6425 newvi->offset = fo->offset;
6426 newvi->size = fo->size;
6427 newvi->fullsize = vi->fullsize;
6428 newvi->may_have_pointers = fo->may_have_pointers;
6429 newvi->only_restrict_pointers = fo->only_restrict_pointers;
6430 if (handle_param
6431 && newvi->only_restrict_pointers
6432 && !type_contains_placeholder_p (fo->restrict_pointed_type)
6433 && !bitmap_bit_p (handled_struct_type,
6434 TYPE_UID (fo->restrict_pointed_type)))
6436 varinfo_t rvi;
6437 tree heapvar = build_fake_var_decl (fo->restrict_pointed_type);
6438 DECL_EXTERNAL (heapvar) = 1;
6439 if (var_can_have_subvars (heapvar))
6440 bitmap_set_bit (handled_struct_type,
6441 TYPE_UID (fo->restrict_pointed_type));
6442 rvi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS", true,
6443 true, handled_struct_type);
6444 if (var_can_have_subvars (heapvar))
6445 bitmap_clear_bit (handled_struct_type,
6446 TYPE_UID (fo->restrict_pointed_type));
6447 rvi->is_restrict_var = 1;
6448 insert_vi_for_tree (heapvar, rvi);
6449 make_constraint_from (newvi, rvi->id);
6450 make_param_constraints (rvi);
6452 if (i + 1 < fieldstack.length ())
6454 varinfo_t tem = new_var_info (decl, name, false);
6455 newvi->next = tem->id;
6456 tem->head = vi->id;
6460 return vi;
6463 static unsigned int
6464 create_variable_info_for (tree decl, const char *name, bool add_id)
6466 /* First see if we are dealing with an ifunc resolver call and
6467 assiociate that with a call to the resolver function result. */
6468 cgraph_node *node;
6469 if (in_ipa_mode
6470 && TREE_CODE (decl) == FUNCTION_DECL
6471 && (node = cgraph_node::get (decl))
6472 && node->ifunc_resolver)
6474 varinfo_t fi = get_vi_for_tree (node->get_alias_target ()->decl);
6475 constraint_expr rhs
6476 = get_function_part_constraint (fi, fi_result);
6477 fi = new_var_info (NULL_TREE, "ifuncres", true);
6478 fi->is_reg_var = true;
6479 constraint_expr lhs;
6480 lhs.type = SCALAR;
6481 lhs.var = fi->id;
6482 lhs.offset = 0;
6483 process_constraint (new_constraint (lhs, rhs));
6484 insert_vi_for_tree (decl, fi);
6485 return fi->id;
6488 varinfo_t vi = create_variable_info_for_1 (decl, name, add_id, false, NULL);
6489 unsigned int id = vi->id;
6491 insert_vi_for_tree (decl, vi);
6493 if (!VAR_P (decl))
6494 return id;
6496 /* Create initial constraints for globals. */
6497 for (; vi; vi = vi_next (vi))
6499 if (!vi->may_have_pointers
6500 || !vi->is_global_var)
6501 continue;
6503 /* Mark global restrict qualified pointers. */
6504 if ((POINTER_TYPE_P (TREE_TYPE (decl))
6505 && TYPE_RESTRICT (TREE_TYPE (decl)))
6506 || vi->only_restrict_pointers)
6508 varinfo_t rvi
6509 = make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT",
6510 true);
6511 /* ??? For now exclude reads from globals as restrict sources
6512 if those are not (indirectly) from incoming parameters. */
6513 rvi->is_restrict_var = false;
6514 continue;
6517 /* In non-IPA mode the initializer from nonlocal is all we need. */
6518 if (!in_ipa_mode
6519 || DECL_HARD_REGISTER (decl))
6520 make_copy_constraint (vi, nonlocal_id);
6522 /* In IPA mode parse the initializer and generate proper constraints
6523 for it. */
6524 else
6526 varpool_node *vnode = varpool_node::get (decl);
6528 /* For escaped variables initialize them from nonlocal. */
6529 if (!vnode->all_refs_explicit_p ())
6530 make_copy_constraint (vi, nonlocal_id);
6532 /* If this is a global variable with an initializer and we are in
6533 IPA mode generate constraints for it. */
6534 ipa_ref *ref;
6535 for (unsigned idx = 0; vnode->iterate_reference (idx, ref); ++idx)
6537 auto_vec<ce_s> rhsc;
6538 struct constraint_expr lhs, *rhsp;
6539 unsigned i;
6540 get_constraint_for_address_of (ref->referred->decl, &rhsc);
6541 lhs.var = vi->id;
6542 lhs.offset = 0;
6543 lhs.type = SCALAR;
6544 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
6545 process_constraint (new_constraint (lhs, *rhsp));
6546 /* If this is a variable that escapes from the unit
6547 the initializer escapes as well. */
6548 if (!vnode->all_refs_explicit_p ())
6550 lhs.var = escaped_id;
6551 lhs.offset = 0;
6552 lhs.type = SCALAR;
6553 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
6554 process_constraint (new_constraint (lhs, *rhsp));
6560 return id;
6563 /* Print out the points-to solution for VAR to FILE. */
6565 static void
6566 dump_solution_for_var (FILE *file, unsigned int var)
6568 varinfo_t vi = get_varinfo (var);
6569 unsigned int i;
6570 bitmap_iterator bi;
6572 /* Dump the solution for unified vars anyway, this avoids difficulties
6573 in scanning dumps in the testsuite. */
6574 fprintf (file, "%s = { ", vi->name);
6575 vi = get_varinfo (find (var));
6576 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
6577 fprintf (file, "%s ", get_varinfo (i)->name);
6578 fprintf (file, "}");
6580 /* But note when the variable was unified. */
6581 if (vi->id != var)
6582 fprintf (file, " same as %s", vi->name);
6584 fprintf (file, "\n");
6587 /* Print the points-to solution for VAR to stderr. */
6589 DEBUG_FUNCTION void
6590 debug_solution_for_var (unsigned int var)
6592 dump_solution_for_var (stderr, var);
6595 /* Register the constraints for function parameter related VI. */
6597 static void
6598 make_param_constraints (varinfo_t vi)
6600 for (; vi; vi = vi_next (vi))
6602 if (vi->only_restrict_pointers)
6604 else if (vi->may_have_pointers)
6605 make_constraint_from (vi, nonlocal_id);
6607 if (vi->is_full_var)
6608 break;
6612 /* Create varinfo structures for all of the variables in the
6613 function for intraprocedural mode. */
6615 static void
6616 intra_create_variable_infos (struct function *fn)
6618 tree t;
6619 bitmap handled_struct_type = NULL;
6620 bool this_parm_in_ctor = DECL_CXX_CONSTRUCTOR_P (fn->decl);
6622 /* For each incoming pointer argument arg, create the constraint ARG
6623 = NONLOCAL or a dummy variable if it is a restrict qualified
6624 passed-by-reference argument. */
6625 for (t = DECL_ARGUMENTS (fn->decl); t; t = DECL_CHAIN (t))
6627 if (handled_struct_type == NULL)
6628 handled_struct_type = BITMAP_ALLOC (NULL);
6630 varinfo_t p
6631 = create_variable_info_for_1 (t, alias_get_name (t), false, true,
6632 handled_struct_type, this_parm_in_ctor);
6633 insert_vi_for_tree (t, p);
6635 make_param_constraints (p);
6637 this_parm_in_ctor = false;
6640 if (handled_struct_type != NULL)
6641 BITMAP_FREE (handled_struct_type);
6643 /* Add a constraint for a result decl that is passed by reference. */
6644 if (DECL_RESULT (fn->decl)
6645 && DECL_BY_REFERENCE (DECL_RESULT (fn->decl)))
6647 varinfo_t p, result_vi = get_vi_for_tree (DECL_RESULT (fn->decl));
6649 for (p = result_vi; p; p = vi_next (p))
6650 make_constraint_from (p, nonlocal_id);
6653 /* Add a constraint for the incoming static chain parameter. */
6654 if (fn->static_chain_decl != NULL_TREE)
6656 varinfo_t p, chain_vi = get_vi_for_tree (fn->static_chain_decl);
6658 for (p = chain_vi; p; p = vi_next (p))
6659 make_constraint_from (p, nonlocal_id);
6663 /* Structure used to put solution bitmaps in a hashtable so they can
6664 be shared among variables with the same points-to set. */
6666 typedef struct shared_bitmap_info
6668 bitmap pt_vars;
6669 hashval_t hashcode;
6670 } *shared_bitmap_info_t;
6671 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
6673 /* Shared_bitmap hashtable helpers. */
6675 struct shared_bitmap_hasher : free_ptr_hash <shared_bitmap_info>
6677 static inline hashval_t hash (const shared_bitmap_info *);
6678 static inline bool equal (const shared_bitmap_info *,
6679 const shared_bitmap_info *);
6682 /* Hash function for a shared_bitmap_info_t */
6684 inline hashval_t
6685 shared_bitmap_hasher::hash (const shared_bitmap_info *bi)
6687 return bi->hashcode;
6690 /* Equality function for two shared_bitmap_info_t's. */
6692 inline bool
6693 shared_bitmap_hasher::equal (const shared_bitmap_info *sbi1,
6694 const shared_bitmap_info *sbi2)
6696 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
6699 /* Shared_bitmap hashtable. */
6701 static hash_table<shared_bitmap_hasher> *shared_bitmap_table;
6703 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
6704 existing instance if there is one, NULL otherwise. */
6706 static bitmap
6707 shared_bitmap_lookup (bitmap pt_vars)
6709 shared_bitmap_info **slot;
6710 struct shared_bitmap_info sbi;
6712 sbi.pt_vars = pt_vars;
6713 sbi.hashcode = bitmap_hash (pt_vars);
6715 slot = shared_bitmap_table->find_slot (&sbi, NO_INSERT);
6716 if (!slot)
6717 return NULL;
6718 else
6719 return (*slot)->pt_vars;
6723 /* Add a bitmap to the shared bitmap hashtable. */
6725 static void
6726 shared_bitmap_add (bitmap pt_vars)
6728 shared_bitmap_info **slot;
6729 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
6731 sbi->pt_vars = pt_vars;
6732 sbi->hashcode = bitmap_hash (pt_vars);
6734 slot = shared_bitmap_table->find_slot (sbi, INSERT);
6735 gcc_assert (!*slot);
6736 *slot = sbi;
6740 /* Set bits in INTO corresponding to the variable uids in solution set FROM. */
6742 static void
6743 set_uids_in_ptset (bitmap into, bitmap from, struct pt_solution *pt,
6744 tree fndecl)
6746 unsigned int i;
6747 bitmap_iterator bi;
6748 varinfo_t escaped_vi = get_varinfo (find (escaped_id));
6749 varinfo_t escaped_return_vi = get_varinfo (find (escaped_return_id));
6750 bool everything_escaped
6751 = escaped_vi->solution && bitmap_bit_p (escaped_vi->solution, anything_id);
6753 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
6755 varinfo_t vi = get_varinfo (i);
6757 if (vi->is_artificial_var)
6758 continue;
6760 if (everything_escaped
6761 || (escaped_vi->solution
6762 && bitmap_bit_p (escaped_vi->solution, i)))
6764 pt->vars_contains_escaped = true;
6765 pt->vars_contains_escaped_heap |= vi->is_heap_var;
6767 if (escaped_return_vi->solution
6768 && bitmap_bit_p (escaped_return_vi->solution, i))
6769 pt->vars_contains_escaped_heap |= vi->is_heap_var;
6771 if (vi->is_restrict_var)
6772 pt->vars_contains_restrict = true;
6774 if (VAR_P (vi->decl)
6775 || TREE_CODE (vi->decl) == PARM_DECL
6776 || TREE_CODE (vi->decl) == RESULT_DECL)
6778 /* If we are in IPA mode we will not recompute points-to
6779 sets after inlining so make sure they stay valid. */
6780 if (in_ipa_mode
6781 && !DECL_PT_UID_SET_P (vi->decl))
6782 SET_DECL_PT_UID (vi->decl, DECL_UID (vi->decl));
6784 /* Add the decl to the points-to set. Note that the points-to
6785 set contains global variables. */
6786 bitmap_set_bit (into, DECL_PT_UID (vi->decl));
6787 if (vi->is_global_var
6788 /* In IPA mode the escaped_heap trick doesn't work as
6789 ESCAPED is escaped from the unit but
6790 pt_solution_includes_global needs to answer true for
6791 all variables not automatic within a function.
6792 For the same reason is_global_var is not the
6793 correct flag to track - local variables from other
6794 functions also need to be considered global.
6795 Conveniently all HEAP vars are not put in function
6796 scope. */
6797 || (in_ipa_mode
6798 && fndecl
6799 && ! auto_var_in_fn_p (vi->decl, fndecl)))
6800 pt->vars_contains_nonlocal = true;
6802 /* If we have a variable that is interposable record that fact
6803 for pointer comparison simplification. */
6804 if (VAR_P (vi->decl)
6805 && (TREE_STATIC (vi->decl) || DECL_EXTERNAL (vi->decl))
6806 && ! decl_binds_to_current_def_p (vi->decl))
6807 pt->vars_contains_interposable = true;
6809 /* If this is a local variable we can have overlapping lifetime
6810 of different function invocations through recursion duplicate
6811 it with its shadow variable. */
6812 if (in_ipa_mode
6813 && vi->shadow_var_uid != 0)
6815 bitmap_set_bit (into, vi->shadow_var_uid);
6816 pt->vars_contains_nonlocal = true;
6820 else if (TREE_CODE (vi->decl) == FUNCTION_DECL
6821 || TREE_CODE (vi->decl) == LABEL_DECL)
6823 /* Nothing should read/write from/to code so we can
6824 save bits by not including them in the points-to bitmaps.
6825 Still mark the points-to set as containing global memory
6826 to make code-patching possible - see PR70128. */
6827 pt->vars_contains_nonlocal = true;
6833 /* Compute the points-to solution *PT for the variable VI. */
6835 static struct pt_solution
6836 find_what_var_points_to (tree fndecl, varinfo_t orig_vi)
6838 unsigned int i;
6839 bitmap_iterator bi;
6840 bitmap finished_solution;
6841 bitmap result;
6842 varinfo_t vi;
6843 struct pt_solution *pt;
6845 /* This variable may have been collapsed, let's get the real
6846 variable. */
6847 vi = get_varinfo (find (orig_vi->id));
6849 /* See if we have already computed the solution and return it. */
6850 pt_solution **slot = &final_solutions->get_or_insert (vi);
6851 if (*slot != NULL)
6852 return **slot;
6854 *slot = pt = XOBNEW (&final_solutions_obstack, struct pt_solution);
6855 memset (pt, 0, sizeof (struct pt_solution));
6857 /* Translate artificial variables into SSA_NAME_PTR_INFO
6858 attributes. */
6859 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
6861 varinfo_t vi = get_varinfo (i);
6863 if (vi->is_artificial_var)
6865 if (vi->id == nothing_id)
6866 pt->null = 1;
6867 else if (vi->id == escaped_id)
6869 if (in_ipa_mode)
6870 pt->ipa_escaped = 1;
6871 else
6872 pt->escaped = 1;
6873 /* Expand some special vars of ESCAPED in-place here. */
6874 varinfo_t evi = get_varinfo (find (escaped_id));
6875 if (bitmap_bit_p (evi->solution, nonlocal_id))
6876 pt->nonlocal = 1;
6878 else if (vi->id == nonlocal_id)
6879 pt->nonlocal = 1;
6880 else if (vi->id == string_id)
6881 pt->const_pool = 1;
6882 else if (vi->id == anything_id
6883 || vi->id == integer_id)
6884 pt->anything = 1;
6888 /* Instead of doing extra work, simply do not create
6889 elaborate points-to information for pt_anything pointers. */
6890 if (pt->anything)
6891 return *pt;
6893 /* Share the final set of variables when possible. */
6894 finished_solution = BITMAP_GGC_ALLOC ();
6895 stats.points_to_sets_created++;
6897 set_uids_in_ptset (finished_solution, vi->solution, pt, fndecl);
6898 result = shared_bitmap_lookup (finished_solution);
6899 if (!result)
6901 shared_bitmap_add (finished_solution);
6902 pt->vars = finished_solution;
6904 else
6906 pt->vars = result;
6907 bitmap_clear (finished_solution);
6910 return *pt;
6913 /* Given a pointer variable P, fill in its points-to set. */
6915 static void
6916 find_what_p_points_to (tree fndecl, tree p)
6918 struct ptr_info_def *pi;
6919 tree lookup_p = p;
6920 varinfo_t vi;
6921 prange vr;
6922 get_range_query (DECL_STRUCT_FUNCTION (fndecl))->range_of_expr (vr, p);
6923 bool nonnull = vr.nonzero_p ();
6925 /* For parameters, get at the points-to set for the actual parm
6926 decl. */
6927 if (TREE_CODE (p) == SSA_NAME
6928 && SSA_NAME_IS_DEFAULT_DEF (p)
6929 && (TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
6930 || TREE_CODE (SSA_NAME_VAR (p)) == RESULT_DECL))
6931 lookup_p = SSA_NAME_VAR (p);
6933 vi = lookup_vi_for_tree (lookup_p);
6934 if (!vi)
6935 return;
6937 pi = get_ptr_info (p);
6938 pi->pt = find_what_var_points_to (fndecl, vi);
6939 /* Conservatively set to NULL from PTA (to true). */
6940 pi->pt.null = 1;
6941 /* Preserve pointer nonnull globally computed. */
6942 if (nonnull)
6943 set_ptr_nonnull (p);
6947 /* Query statistics for points-to solutions. */
6949 static struct {
6950 unsigned HOST_WIDE_INT pt_solution_includes_may_alias;
6951 unsigned HOST_WIDE_INT pt_solution_includes_no_alias;
6952 unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias;
6953 unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias;
6954 } pta_stats;
6956 void
6957 dump_pta_stats (FILE *s)
6959 fprintf (s, "\nPTA query stats:\n");
6960 fprintf (s, " pt_solution_includes: "
6961 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6962 HOST_WIDE_INT_PRINT_DEC" queries\n",
6963 pta_stats.pt_solution_includes_no_alias,
6964 pta_stats.pt_solution_includes_no_alias
6965 + pta_stats.pt_solution_includes_may_alias);
6966 fprintf (s, " pt_solutions_intersect: "
6967 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6968 HOST_WIDE_INT_PRINT_DEC" queries\n",
6969 pta_stats.pt_solutions_intersect_no_alias,
6970 pta_stats.pt_solutions_intersect_no_alias
6971 + pta_stats.pt_solutions_intersect_may_alias);
6975 /* Reset the points-to solution *PT to a conservative default
6976 (point to anything). */
6978 void
6979 pt_solution_reset (struct pt_solution *pt)
6981 memset (pt, 0, sizeof (struct pt_solution));
6982 pt->anything = true;
6983 pt->null = true;
6986 /* Set the points-to solution *PT to point only to the variables
6987 in VARS. VARS_CONTAINS_GLOBAL specifies whether that contains
6988 global variables and VARS_CONTAINS_RESTRICT specifies whether
6989 it contains restrict tag variables. */
6991 void
6992 pt_solution_set (struct pt_solution *pt, bitmap vars,
6993 bool vars_contains_nonlocal)
6995 memset (pt, 0, sizeof (struct pt_solution));
6996 pt->vars = vars;
6997 pt->vars_contains_nonlocal = vars_contains_nonlocal;
6998 pt->vars_contains_escaped
6999 = (cfun->gimple_df->escaped.anything
7000 || bitmap_intersect_p (cfun->gimple_df->escaped.vars, vars));
7003 /* Set the points-to solution *PT to point only to the variable VAR. */
7005 void
7006 pt_solution_set_var (struct pt_solution *pt, tree var)
7008 memset (pt, 0, sizeof (struct pt_solution));
7009 pt->vars = BITMAP_GGC_ALLOC ();
7010 bitmap_set_bit (pt->vars, DECL_PT_UID (var));
7011 pt->vars_contains_nonlocal = is_global_var (var);
7012 pt->vars_contains_escaped
7013 = (cfun->gimple_df->escaped.anything
7014 || bitmap_bit_p (cfun->gimple_df->escaped.vars, DECL_PT_UID (var)));
7017 /* Computes the union of the points-to solutions *DEST and *SRC and
7018 stores the result in *DEST. This changes the points-to bitmap
7019 of *DEST and thus may not be used if that might be shared.
7020 The points-to bitmap of *SRC and *DEST will not be shared after
7021 this function if they were not before. */
7023 static void
7024 pt_solution_ior_into (struct pt_solution *dest, struct pt_solution *src)
7026 dest->anything |= src->anything;
7027 if (dest->anything)
7029 pt_solution_reset (dest);
7030 return;
7033 dest->nonlocal |= src->nonlocal;
7034 dest->escaped |= src->escaped;
7035 dest->ipa_escaped |= src->ipa_escaped;
7036 dest->null |= src->null;
7037 dest->const_pool |= src->const_pool ;
7038 dest->vars_contains_nonlocal |= src->vars_contains_nonlocal;
7039 dest->vars_contains_escaped |= src->vars_contains_escaped;
7040 dest->vars_contains_escaped_heap |= src->vars_contains_escaped_heap;
7041 if (!src->vars)
7042 return;
7044 if (!dest->vars)
7045 dest->vars = BITMAP_GGC_ALLOC ();
7046 bitmap_ior_into (dest->vars, src->vars);
7049 /* Return true if the points-to solution *PT is empty. */
7051 bool
7052 pt_solution_empty_p (const pt_solution *pt)
7054 if (pt->anything
7055 || pt->nonlocal)
7056 return false;
7058 if (pt->vars
7059 && !bitmap_empty_p (pt->vars))
7060 return false;
7062 /* If the solution includes ESCAPED, check if that is empty. */
7063 if (pt->escaped
7064 && !pt_solution_empty_p (&cfun->gimple_df->escaped))
7065 return false;
7067 /* If the solution includes ESCAPED, check if that is empty. */
7068 if (pt->ipa_escaped
7069 && !pt_solution_empty_p (&ipa_escaped_pt))
7070 return false;
7072 return true;
7075 /* Return true if the points-to solution *PT only point to a single var, and
7076 return the var uid in *UID. */
7078 bool
7079 pt_solution_singleton_or_null_p (struct pt_solution *pt, unsigned *uid)
7081 if (pt->anything || pt->nonlocal || pt->escaped || pt->ipa_escaped
7082 || pt->vars == NULL
7083 || !bitmap_single_bit_set_p (pt->vars))
7084 return false;
7086 *uid = bitmap_first_set_bit (pt->vars);
7087 return true;
7090 /* Return true if the points-to solution *PT includes global memory.
7091 If ESCAPED_LOCAL_P is true then escaped local variables are also
7092 considered global. */
7094 bool
7095 pt_solution_includes_global (struct pt_solution *pt, bool escaped_local_p)
7097 if (pt->anything
7098 || pt->nonlocal
7099 || pt->vars_contains_nonlocal
7100 /* The following is a hack to make the malloc escape hack work.
7101 In reality we'd need different sets for escaped-through-return
7102 and escaped-to-callees and passes would need to be updated. */
7103 || pt->vars_contains_escaped_heap)
7104 return true;
7106 if (escaped_local_p && pt->vars_contains_escaped)
7107 return true;
7109 /* 'escaped' is also a placeholder so we have to look into it. */
7110 if (pt->escaped)
7111 return pt_solution_includes_global (&cfun->gimple_df->escaped,
7112 escaped_local_p);
7114 if (pt->ipa_escaped)
7115 return pt_solution_includes_global (&ipa_escaped_pt,
7116 escaped_local_p);
7118 return false;
7121 /* Return true if the points-to solution *PT includes the variable
7122 declaration DECL. */
7124 static bool
7125 pt_solution_includes_1 (struct pt_solution *pt, const_tree decl)
7127 if (pt->anything)
7128 return true;
7130 if (pt->nonlocal
7131 && is_global_var (decl))
7132 return true;
7134 if (pt->vars
7135 && bitmap_bit_p (pt->vars, DECL_PT_UID (decl)))
7136 return true;
7138 /* If the solution includes ESCAPED, check it. */
7139 if (pt->escaped
7140 && pt_solution_includes_1 (&cfun->gimple_df->escaped, decl))
7141 return true;
7143 /* If the solution includes ESCAPED, check it. */
7144 if (pt->ipa_escaped
7145 && pt_solution_includes_1 (&ipa_escaped_pt, decl))
7146 return true;
7148 return false;
7151 bool
7152 pt_solution_includes (struct pt_solution *pt, const_tree decl)
7154 bool res = pt_solution_includes_1 (pt, decl);
7155 if (res)
7156 ++pta_stats.pt_solution_includes_may_alias;
7157 else
7158 ++pta_stats.pt_solution_includes_no_alias;
7159 return res;
7162 /* Return true if the points-to solution *PT contains a reference to a
7163 constant pool entry. */
7165 bool
7166 pt_solution_includes_const_pool (struct pt_solution *pt)
7168 return (pt->const_pool
7169 || pt->nonlocal
7170 || (pt->escaped && (!cfun || cfun->gimple_df->escaped.const_pool))
7171 || (pt->ipa_escaped && ipa_escaped_pt.const_pool));
7174 /* Return true if both points-to solutions PT1 and PT2 have a non-empty
7175 intersection. */
7177 static bool
7178 pt_solutions_intersect_1 (struct pt_solution *pt1, struct pt_solution *pt2)
7180 if (pt1->anything || pt2->anything)
7181 return true;
7183 /* If either points to unknown global memory and the other points to
7184 any global memory they alias. */
7185 if ((pt1->nonlocal
7186 && (pt2->nonlocal
7187 || pt2->vars_contains_nonlocal))
7188 || (pt2->nonlocal
7189 && pt1->vars_contains_nonlocal))
7190 return true;
7192 /* If either points to all escaped memory and the other points to
7193 any escaped memory they alias. */
7194 if ((pt1->escaped
7195 && (pt2->escaped
7196 || pt2->vars_contains_escaped))
7197 || (pt2->escaped
7198 && pt1->vars_contains_escaped))
7199 return true;
7201 /* Check the escaped solution if required.
7202 ??? Do we need to check the local against the IPA escaped sets? */
7203 if ((pt1->ipa_escaped || pt2->ipa_escaped)
7204 && !pt_solution_empty_p (&ipa_escaped_pt))
7206 /* If both point to escaped memory and that solution
7207 is not empty they alias. */
7208 if (pt1->ipa_escaped && pt2->ipa_escaped)
7209 return true;
7211 /* If either points to escaped memory see if the escaped solution
7212 intersects with the other. */
7213 if ((pt1->ipa_escaped
7214 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt2))
7215 || (pt2->ipa_escaped
7216 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt1)))
7217 return true;
7220 /* Now both pointers alias if their points-to solution intersects. */
7221 return (pt1->vars
7222 && pt2->vars
7223 && bitmap_intersect_p (pt1->vars, pt2->vars));
7226 bool
7227 pt_solutions_intersect (struct pt_solution *pt1, struct pt_solution *pt2)
7229 bool res = pt_solutions_intersect_1 (pt1, pt2);
7230 if (res)
7231 ++pta_stats.pt_solutions_intersect_may_alias;
7232 else
7233 ++pta_stats.pt_solutions_intersect_no_alias;
7234 return res;
7237 /* Dump stats information to OUTFILE. */
7239 static void
7240 dump_sa_stats (FILE *outfile)
7242 fprintf (outfile, "Points-to Stats:\n");
7243 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
7244 fprintf (outfile, "Non-pointer vars: %d\n",
7245 stats.nonpointer_vars);
7246 fprintf (outfile, "Statically unified vars: %d\n",
7247 stats.unified_vars_static);
7248 fprintf (outfile, "Dynamically unified vars: %d\n",
7249 stats.unified_vars_dynamic);
7250 fprintf (outfile, "Iterations: %d\n", stats.iterations);
7251 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
7252 fprintf (outfile, "Number of implicit edges: %d\n",
7253 stats.num_implicit_edges);
7254 fprintf (outfile, "Number of avoided edges: %d\n",
7255 stats.num_avoided_edges);
7258 /* Dump points-to information to OUTFILE. */
7260 static void
7261 dump_sa_points_to_info (FILE *outfile)
7263 fprintf (outfile, "\nPoints-to sets\n\n");
7265 for (unsigned i = 1; i < varmap.length (); i++)
7267 varinfo_t vi = get_varinfo (i);
7268 if (!vi->may_have_pointers)
7269 continue;
7270 dump_solution_for_var (outfile, i);
7275 /* Debug points-to information to stderr. */
7277 DEBUG_FUNCTION void
7278 debug_sa_points_to_info (void)
7280 dump_sa_points_to_info (stderr);
7284 /* Initialize the always-existing constraint variables for NULL
7285 ANYTHING, READONLY, and INTEGER */
7287 static void
7288 init_base_vars (void)
7290 struct constraint_expr lhs, rhs;
7291 varinfo_t var_anything;
7292 varinfo_t var_nothing;
7293 varinfo_t var_string;
7294 varinfo_t var_escaped;
7295 varinfo_t var_nonlocal;
7296 varinfo_t var_escaped_return;
7297 varinfo_t var_storedanything;
7298 varinfo_t var_integer;
7300 /* Variable ID zero is reserved and should be NULL. */
7301 varmap.safe_push (NULL);
7303 /* Create the NULL variable, used to represent that a variable points
7304 to NULL. */
7305 var_nothing = new_var_info (NULL_TREE, "NULL", false);
7306 gcc_assert (var_nothing->id == nothing_id);
7307 var_nothing->is_artificial_var = 1;
7308 var_nothing->offset = 0;
7309 var_nothing->size = ~0;
7310 var_nothing->fullsize = ~0;
7311 var_nothing->is_special_var = 1;
7312 var_nothing->may_have_pointers = 0;
7313 var_nothing->is_global_var = 0;
7315 /* Create the ANYTHING variable, used to represent that a variable
7316 points to some unknown piece of memory. */
7317 var_anything = new_var_info (NULL_TREE, "ANYTHING", false);
7318 gcc_assert (var_anything->id == anything_id);
7319 var_anything->is_artificial_var = 1;
7320 var_anything->size = ~0;
7321 var_anything->offset = 0;
7322 var_anything->fullsize = ~0;
7323 var_anything->is_special_var = 1;
7325 /* Anything points to anything. This makes deref constraints just
7326 work in the presence of linked list and other p = *p type loops,
7327 by saying that *ANYTHING = ANYTHING. */
7328 lhs.type = SCALAR;
7329 lhs.var = anything_id;
7330 lhs.offset = 0;
7331 rhs.type = ADDRESSOF;
7332 rhs.var = anything_id;
7333 rhs.offset = 0;
7335 /* This specifically does not use process_constraint because
7336 process_constraint ignores all anything = anything constraints, since all
7337 but this one are redundant. */
7338 constraints.safe_push (new_constraint (lhs, rhs));
7340 /* Create the STRING variable, used to represent that a variable
7341 points to a string literal. String literals don't contain
7342 pointers so STRING doesn't point to anything. */
7343 var_string = new_var_info (NULL_TREE, "STRING", false);
7344 gcc_assert (var_string->id == string_id);
7345 var_string->is_artificial_var = 1;
7346 var_string->offset = 0;
7347 var_string->size = ~0;
7348 var_string->fullsize = ~0;
7349 var_string->is_special_var = 1;
7350 var_string->may_have_pointers = 0;
7352 /* Create the ESCAPED variable, used to represent the set of escaped
7353 memory. */
7354 var_escaped = new_var_info (NULL_TREE, "ESCAPED", false);
7355 gcc_assert (var_escaped->id == escaped_id);
7356 var_escaped->is_artificial_var = 1;
7357 var_escaped->offset = 0;
7358 var_escaped->size = ~0;
7359 var_escaped->fullsize = ~0;
7360 var_escaped->is_special_var = 0;
7362 /* Create the NONLOCAL variable, used to represent the set of nonlocal
7363 memory. */
7364 var_nonlocal = new_var_info (NULL_TREE, "NONLOCAL", false);
7365 gcc_assert (var_nonlocal->id == nonlocal_id);
7366 var_nonlocal->is_artificial_var = 1;
7367 var_nonlocal->offset = 0;
7368 var_nonlocal->size = ~0;
7369 var_nonlocal->fullsize = ~0;
7370 var_nonlocal->is_special_var = 1;
7372 /* Create the ESCAPED_RETURN variable, used to represent the set of escaped
7373 memory via a regular return stmt. */
7374 var_escaped_return = new_var_info (NULL_TREE, "ESCAPED_RETURN", false);
7375 gcc_assert (var_escaped_return->id == escaped_return_id);
7376 var_escaped_return->is_artificial_var = 1;
7377 var_escaped_return->offset = 0;
7378 var_escaped_return->size = ~0;
7379 var_escaped_return->fullsize = ~0;
7380 var_escaped_return->is_special_var = 0;
7382 /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
7383 lhs.type = SCALAR;
7384 lhs.var = escaped_id;
7385 lhs.offset = 0;
7386 rhs.type = DEREF;
7387 rhs.var = escaped_id;
7388 rhs.offset = 0;
7389 process_constraint (new_constraint (lhs, rhs));
7391 /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the
7392 whole variable escapes. */
7393 lhs.type = SCALAR;
7394 lhs.var = escaped_id;
7395 lhs.offset = 0;
7396 rhs.type = SCALAR;
7397 rhs.var = escaped_id;
7398 rhs.offset = UNKNOWN_OFFSET;
7399 process_constraint (new_constraint (lhs, rhs));
7401 /* *ESCAPED = NONLOCAL. This is true because we have to assume
7402 everything pointed to by escaped points to what global memory can
7403 point to. */
7404 lhs.type = DEREF;
7405 lhs.var = escaped_id;
7406 lhs.offset = 0;
7407 rhs.type = SCALAR;
7408 rhs.var = nonlocal_id;
7409 rhs.offset = 0;
7410 process_constraint (new_constraint (lhs, rhs));
7412 /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED. This is true because
7413 global memory may point to global memory and escaped memory. */
7414 lhs.type = SCALAR;
7415 lhs.var = nonlocal_id;
7416 lhs.offset = 0;
7417 rhs.type = ADDRESSOF;
7418 rhs.var = nonlocal_id;
7419 rhs.offset = 0;
7420 process_constraint (new_constraint (lhs, rhs));
7421 rhs.type = ADDRESSOF;
7422 rhs.var = escaped_id;
7423 rhs.offset = 0;
7424 process_constraint (new_constraint (lhs, rhs));
7426 /* Transitively close ESCAPED_RETURN.
7427 ESCAPED_RETURN = ESCAPED_RETURN + UNKNOWN_OFFSET
7428 ESCAPED_RETURN = *ESCAPED_RETURN. */
7429 lhs.type = SCALAR;
7430 lhs.var = escaped_return_id;
7431 lhs.offset = 0;
7432 rhs.type = SCALAR;
7433 rhs.var = escaped_return_id;
7434 rhs.offset = UNKNOWN_OFFSET;
7435 process_constraint (new_constraint (lhs, rhs));
7436 lhs.type = SCALAR;
7437 lhs.var = escaped_return_id;
7438 lhs.offset = 0;
7439 rhs.type = DEREF;
7440 rhs.var = escaped_return_id;
7441 rhs.offset = 0;
7442 process_constraint (new_constraint (lhs, rhs));
7444 /* Create the STOREDANYTHING variable, used to represent the set of
7445 variables stored to *ANYTHING. */
7446 var_storedanything = new_var_info (NULL_TREE, "STOREDANYTHING", false);
7447 gcc_assert (var_storedanything->id == storedanything_id);
7448 var_storedanything->is_artificial_var = 1;
7449 var_storedanything->offset = 0;
7450 var_storedanything->size = ~0;
7451 var_storedanything->fullsize = ~0;
7452 var_storedanything->is_special_var = 0;
7454 /* Create the INTEGER variable, used to represent that a variable points
7455 to what an INTEGER "points to". */
7456 var_integer = new_var_info (NULL_TREE, "INTEGER", false);
7457 gcc_assert (var_integer->id == integer_id);
7458 var_integer->is_artificial_var = 1;
7459 var_integer->size = ~0;
7460 var_integer->fullsize = ~0;
7461 var_integer->offset = 0;
7462 var_integer->is_special_var = 1;
7464 /* INTEGER = ANYTHING, because we don't know where a dereference of
7465 a random integer will point to. */
7466 lhs.type = SCALAR;
7467 lhs.var = integer_id;
7468 lhs.offset = 0;
7469 rhs.type = ADDRESSOF;
7470 rhs.var = anything_id;
7471 rhs.offset = 0;
7472 process_constraint (new_constraint (lhs, rhs));
7475 /* Initialize things necessary to perform PTA */
7477 static void
7478 init_alias_vars (void)
7480 use_field_sensitive = (param_max_fields_for_field_sensitive > 1);
7482 bitmap_obstack_initialize (&pta_obstack);
7483 bitmap_obstack_initialize (&oldpta_obstack);
7484 bitmap_obstack_initialize (&predbitmap_obstack);
7486 constraints.create (8);
7487 varmap.create (8);
7488 vi_for_tree = new hash_map<tree, varinfo_t>;
7489 call_stmt_vars = new hash_map<gimple *, varinfo_t>;
7491 memset (&stats, 0, sizeof (stats));
7492 shared_bitmap_table = new hash_table<shared_bitmap_hasher> (511);
7493 init_base_vars ();
7495 gcc_obstack_init (&fake_var_decl_obstack);
7497 final_solutions = new hash_map<varinfo_t, pt_solution *>;
7498 gcc_obstack_init (&final_solutions_obstack);
7501 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
7502 predecessor edges. */
7504 static void
7505 remove_preds_and_fake_succs (constraint_graph_t graph)
7507 unsigned int i;
7509 /* Clear the implicit ref and address nodes from the successor
7510 lists. */
7511 for (i = 1; i < FIRST_REF_NODE; i++)
7513 if (graph->succs[i])
7514 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
7515 FIRST_REF_NODE * 2);
7518 /* Free the successor list for the non-ref nodes. */
7519 for (i = FIRST_REF_NODE + 1; i < graph->size; i++)
7521 if (graph->succs[i])
7522 BITMAP_FREE (graph->succs[i]);
7525 /* Now reallocate the size of the successor list as, and blow away
7526 the predecessor bitmaps. */
7527 graph->size = varmap.length ();
7528 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
7530 free (graph->implicit_preds);
7531 graph->implicit_preds = NULL;
7532 free (graph->preds);
7533 graph->preds = NULL;
7534 bitmap_obstack_release (&predbitmap_obstack);
7537 /* Solve the constraint set. */
7539 static void
7540 solve_constraints (void)
7542 class scc_info *si;
7544 /* Sort varinfos so that ones that cannot be pointed to are last.
7545 This makes bitmaps more efficient. */
7546 unsigned int *map = XNEWVEC (unsigned int, varmap.length ());
7547 for (unsigned i = 0; i < integer_id + 1; ++i)
7548 map[i] = i;
7549 /* Start with address-taken vars, followed by not address-taken vars
7550 to move vars never appearing in the points-to solution bitmaps last. */
7551 unsigned j = integer_id + 1;
7552 for (unsigned i = integer_id + 1; i < varmap.length (); ++i)
7553 if (varmap[varmap[i]->head]->address_taken)
7554 map[i] = j++;
7555 for (unsigned i = integer_id + 1; i < varmap.length (); ++i)
7556 if (! varmap[varmap[i]->head]->address_taken)
7557 map[i] = j++;
7558 /* Shuffle varmap according to map. */
7559 for (unsigned i = integer_id + 1; i < varmap.length (); ++i)
7561 while (map[varmap[i]->id] != i)
7562 std::swap (varmap[i], varmap[map[varmap[i]->id]]);
7563 gcc_assert (bitmap_empty_p (varmap[i]->solution));
7564 varmap[i]->id = i;
7565 varmap[i]->next = map[varmap[i]->next];
7566 varmap[i]->head = map[varmap[i]->head];
7568 /* Finally rewrite constraints. */
7569 for (unsigned i = 0; i < constraints.length (); ++i)
7571 constraints[i]->lhs.var = map[constraints[i]->lhs.var];
7572 constraints[i]->rhs.var = map[constraints[i]->rhs.var];
7574 free (map);
7576 if (dump_file)
7577 fprintf (dump_file,
7578 "\nCollapsing static cycles and doing variable "
7579 "substitution\n");
7581 init_graph (varmap.length () * 2);
7583 if (dump_file)
7584 fprintf (dump_file, "Building predecessor graph\n");
7585 build_pred_graph ();
7587 if (dump_file)
7588 fprintf (dump_file, "Detecting pointer and location "
7589 "equivalences\n");
7590 si = perform_var_substitution (graph);
7592 if (dump_file)
7593 fprintf (dump_file, "Rewriting constraints and unifying "
7594 "variables\n");
7595 rewrite_constraints (graph, si);
7597 build_succ_graph ();
7599 free_var_substitution_info (si);
7601 /* Attach complex constraints to graph nodes. */
7602 move_complex_constraints (graph);
7604 if (dump_file)
7605 fprintf (dump_file, "Uniting pointer but not location equivalent "
7606 "variables\n");
7607 unite_pointer_equivalences (graph);
7609 if (dump_file)
7610 fprintf (dump_file, "Finding indirect cycles\n");
7611 find_indirect_cycles (graph);
7613 /* Implicit nodes and predecessors are no longer necessary at this
7614 point. */
7615 remove_preds_and_fake_succs (graph);
7617 if (dump_file && (dump_flags & TDF_GRAPH))
7619 fprintf (dump_file, "\n\n// The constraint graph before solve-graph "
7620 "in dot format:\n");
7621 dump_constraint_graph (dump_file);
7622 fprintf (dump_file, "\n\n");
7625 if (dump_file)
7626 fprintf (dump_file, "Solving graph\n");
7628 solve_graph (graph);
7630 if (dump_file && (dump_flags & TDF_GRAPH))
7632 fprintf (dump_file, "\n\n// The constraint graph after solve-graph "
7633 "in dot format:\n");
7634 dump_constraint_graph (dump_file);
7635 fprintf (dump_file, "\n\n");
7639 /* Create points-to sets for the current function. See the comments
7640 at the start of the file for an algorithmic overview. */
7642 static void
7643 compute_points_to_sets (void)
7645 basic_block bb;
7646 varinfo_t vi;
7648 timevar_push (TV_TREE_PTA);
7650 init_alias_vars ();
7652 intra_create_variable_infos (cfun);
7654 /* Now walk all statements and build the constraint set. */
7655 FOR_EACH_BB_FN (bb, cfun)
7657 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
7658 gsi_next (&gsi))
7660 gphi *phi = gsi.phi ();
7662 if (! virtual_operand_p (gimple_phi_result (phi)))
7663 find_func_aliases (cfun, phi);
7666 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
7667 gsi_next (&gsi))
7669 gimple *stmt = gsi_stmt (gsi);
7671 find_func_aliases (cfun, stmt);
7675 if (dump_file && (dump_flags & TDF_DETAILS))
7677 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
7678 dump_constraints (dump_file, 0);
7681 /* From the constraints compute the points-to sets. */
7682 solve_constraints ();
7684 if (dump_file && (dump_flags & TDF_STATS))
7685 dump_sa_stats (dump_file);
7687 if (dump_file && (dump_flags & TDF_DETAILS))
7688 dump_sa_points_to_info (dump_file);
7690 /* Compute the points-to set for ESCAPED used for call-clobber analysis. */
7691 cfun->gimple_df->escaped = find_what_var_points_to (cfun->decl,
7692 get_varinfo (escaped_id));
7694 /* Make sure the ESCAPED solution (which is used as placeholder in
7695 other solutions) does not reference itself. This simplifies
7696 points-to solution queries. */
7697 cfun->gimple_df->escaped.escaped = 0;
7699 /* The ESCAPED_RETURN solution is what contains all memory that needs
7700 to be considered global. */
7701 cfun->gimple_df->escaped_return
7702 = find_what_var_points_to (cfun->decl, get_varinfo (escaped_return_id));
7703 cfun->gimple_df->escaped_return.escaped = 1;
7705 /* Compute the points-to sets for pointer SSA_NAMEs. */
7706 unsigned i;
7707 tree ptr;
7709 FOR_EACH_SSA_NAME (i, ptr, cfun)
7711 if (POINTER_TYPE_P (TREE_TYPE (ptr)))
7712 find_what_p_points_to (cfun->decl, ptr);
7715 /* Compute the call-used/clobbered sets. */
7716 FOR_EACH_BB_FN (bb, cfun)
7718 gimple_stmt_iterator gsi;
7720 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
7722 gcall *stmt;
7723 struct pt_solution *pt;
7725 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
7726 if (!stmt)
7727 continue;
7729 pt = gimple_call_use_set (stmt);
7730 if (gimple_call_flags (stmt) & ECF_CONST)
7731 memset (pt, 0, sizeof (struct pt_solution));
7732 else
7734 bool uses_global_memory = true;
7735 bool reads_global_memory = true;
7737 determine_global_memory_access (stmt, NULL,
7738 &reads_global_memory,
7739 &uses_global_memory);
7740 if ((vi = lookup_call_use_vi (stmt)) != NULL)
7742 *pt = find_what_var_points_to (cfun->decl, vi);
7743 /* Escaped (and thus nonlocal) variables are always
7744 implicitly used by calls. */
7745 /* ??? ESCAPED can be empty even though NONLOCAL
7746 always escaped. */
7747 if (uses_global_memory)
7749 pt->nonlocal = 1;
7750 pt->escaped = 1;
7753 else if (uses_global_memory)
7755 /* If there is nothing special about this call then
7756 we have made everything that is used also escape. */
7757 *pt = cfun->gimple_df->escaped;
7758 pt->nonlocal = 1;
7760 else
7761 memset (pt, 0, sizeof (struct pt_solution));
7764 pt = gimple_call_clobber_set (stmt);
7765 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
7766 memset (pt, 0, sizeof (struct pt_solution));
7767 else
7769 bool writes_global_memory = true;
7771 determine_global_memory_access (stmt, &writes_global_memory,
7772 NULL, NULL);
7774 if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
7776 *pt = find_what_var_points_to (cfun->decl, vi);
7777 /* Escaped (and thus nonlocal) variables are always
7778 implicitly clobbered by calls. */
7779 /* ??? ESCAPED can be empty even though NONLOCAL
7780 always escaped. */
7781 if (writes_global_memory)
7783 pt->nonlocal = 1;
7784 pt->escaped = 1;
7787 else if (writes_global_memory)
7789 /* If there is nothing special about this call then
7790 we have made everything that is used also escape. */
7791 *pt = cfun->gimple_df->escaped;
7792 pt->nonlocal = 1;
7794 else
7795 memset (pt, 0, sizeof (struct pt_solution));
7800 timevar_pop (TV_TREE_PTA);
7804 /* Delete created points-to sets. */
7806 static void
7807 delete_points_to_sets (void)
7809 unsigned int i;
7811 delete shared_bitmap_table;
7812 shared_bitmap_table = NULL;
7813 if (dump_file && (dump_flags & TDF_STATS))
7814 fprintf (dump_file, "Points to sets created:%d\n",
7815 stats.points_to_sets_created);
7817 delete vi_for_tree;
7818 delete call_stmt_vars;
7819 bitmap_obstack_release (&pta_obstack);
7820 constraints.release ();
7822 for (i = 0; i < graph->size; i++)
7823 graph->complex[i].release ();
7824 free (graph->complex);
7826 free (graph->rep);
7827 free (graph->succs);
7828 free (graph->pe);
7829 free (graph->pe_rep);
7830 free (graph->indirect_cycles);
7831 free (graph);
7833 varmap.release ();
7834 variable_info_pool.release ();
7835 constraint_pool.release ();
7837 obstack_free (&fake_var_decl_obstack, NULL);
7839 delete final_solutions;
7840 obstack_free (&final_solutions_obstack, NULL);
7843 struct vls_data
7845 unsigned short clique;
7846 bool escaped_p;
7847 bitmap rvars;
7850 /* Mark "other" loads and stores as belonging to CLIQUE and with
7851 base zero. */
7853 static bool
7854 visit_loadstore (gimple *, tree base, tree ref, void *data)
7856 unsigned short clique = ((vls_data *) data)->clique;
7857 bitmap rvars = ((vls_data *) data)->rvars;
7858 bool escaped_p = ((vls_data *) data)->escaped_p;
7859 if (TREE_CODE (base) == MEM_REF
7860 || TREE_CODE (base) == TARGET_MEM_REF)
7862 tree ptr = TREE_OPERAND (base, 0);
7863 if (TREE_CODE (ptr) == SSA_NAME)
7865 /* For parameters, get at the points-to set for the actual parm
7866 decl. */
7867 if (SSA_NAME_IS_DEFAULT_DEF (ptr)
7868 && (TREE_CODE (SSA_NAME_VAR (ptr)) == PARM_DECL
7869 || TREE_CODE (SSA_NAME_VAR (ptr)) == RESULT_DECL))
7870 ptr = SSA_NAME_VAR (ptr);
7872 /* We need to make sure 'ptr' doesn't include any of
7873 the restrict tags we added bases for in its points-to set. */
7874 varinfo_t vi = lookup_vi_for_tree (ptr);
7875 if (! vi)
7876 return false;
7878 vi = get_varinfo (find (vi->id));
7879 if (bitmap_intersect_p (rvars, vi->solution)
7880 || (escaped_p && bitmap_bit_p (vi->solution, escaped_id)))
7881 return false;
7884 /* Do not overwrite existing cliques (that includes clique, base
7885 pairs we just set). */
7886 if (MR_DEPENDENCE_CLIQUE (base) == 0)
7888 MR_DEPENDENCE_CLIQUE (base) = clique;
7889 MR_DEPENDENCE_BASE (base) = 0;
7893 /* For plain decl accesses see whether they are accesses to globals
7894 and rewrite them to MEM_REFs with { clique, 0 }. */
7895 if (VAR_P (base)
7896 && is_global_var (base)
7897 /* ??? We can't rewrite a plain decl with the walk_stmt_load_store
7898 ops callback. */
7899 && base != ref)
7901 tree *basep = &ref;
7902 while (handled_component_p (*basep))
7903 basep = &TREE_OPERAND (*basep, 0);
7904 gcc_assert (VAR_P (*basep));
7905 tree ptr = build_fold_addr_expr (*basep);
7906 tree zero = build_int_cst (TREE_TYPE (ptr), 0);
7907 *basep = build2 (MEM_REF, TREE_TYPE (*basep), ptr, zero);
7908 MR_DEPENDENCE_CLIQUE (*basep) = clique;
7909 MR_DEPENDENCE_BASE (*basep) = 0;
7912 return false;
7915 struct msdi_data {
7916 tree ptr;
7917 unsigned short *clique;
7918 unsigned short *last_ruid;
7919 varinfo_t restrict_var;
7922 /* If BASE is a MEM_REF then assign a clique, base pair to it, updating
7923 CLIQUE, *RESTRICT_VAR and LAST_RUID as passed via DATA.
7924 Return whether dependence info was assigned to BASE. */
7926 static bool
7927 maybe_set_dependence_info (gimple *, tree base, tree, void *data)
7929 tree ptr = ((msdi_data *)data)->ptr;
7930 unsigned short &clique = *((msdi_data *)data)->clique;
7931 unsigned short &last_ruid = *((msdi_data *)data)->last_ruid;
7932 varinfo_t restrict_var = ((msdi_data *)data)->restrict_var;
7933 if ((TREE_CODE (base) == MEM_REF
7934 || TREE_CODE (base) == TARGET_MEM_REF)
7935 && TREE_OPERAND (base, 0) == ptr)
7937 /* Do not overwrite existing cliques. This avoids overwriting dependence
7938 info inlined from a function with restrict parameters inlined
7939 into a function with restrict parameters. This usually means we
7940 prefer to be precise in innermost loops. */
7941 if (MR_DEPENDENCE_CLIQUE (base) == 0)
7943 if (clique == 0)
7945 if (cfun->last_clique == 0)
7946 cfun->last_clique = 1;
7947 clique = 1;
7949 if (restrict_var->ruid == 0)
7950 restrict_var->ruid = ++last_ruid;
7951 MR_DEPENDENCE_CLIQUE (base) = clique;
7952 MR_DEPENDENCE_BASE (base) = restrict_var->ruid;
7953 return true;
7956 return false;
7959 /* Clear dependence info for the clique DATA. */
7961 static bool
7962 clear_dependence_clique (gimple *, tree base, tree, void *data)
7964 unsigned short clique = (uintptr_t)data;
7965 if ((TREE_CODE (base) == MEM_REF
7966 || TREE_CODE (base) == TARGET_MEM_REF)
7967 && MR_DEPENDENCE_CLIQUE (base) == clique)
7969 MR_DEPENDENCE_CLIQUE (base) = 0;
7970 MR_DEPENDENCE_BASE (base) = 0;
7973 return false;
7976 /* Compute the set of independend memory references based on restrict
7977 tags and their conservative propagation to the points-to sets. */
7979 static void
7980 compute_dependence_clique (void)
7982 /* First clear the special "local" clique. */
7983 basic_block bb;
7984 if (cfun->last_clique != 0)
7985 FOR_EACH_BB_FN (bb, cfun)
7986 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
7987 !gsi_end_p (gsi); gsi_next (&gsi))
7989 gimple *stmt = gsi_stmt (gsi);
7990 walk_stmt_load_store_ops (stmt, (void *)(uintptr_t) 1,
7991 clear_dependence_clique,
7992 clear_dependence_clique);
7995 unsigned short clique = 0;
7996 unsigned short last_ruid = 0;
7997 bitmap rvars = BITMAP_ALLOC (NULL);
7998 bool escaped_p = false;
7999 for (unsigned i = 0; i < num_ssa_names; ++i)
8001 tree ptr = ssa_name (i);
8002 if (!ptr || !POINTER_TYPE_P (TREE_TYPE (ptr)))
8003 continue;
8005 /* Avoid all this when ptr is not dereferenced? */
8006 tree p = ptr;
8007 if (SSA_NAME_IS_DEFAULT_DEF (ptr)
8008 && (TREE_CODE (SSA_NAME_VAR (ptr)) == PARM_DECL
8009 || TREE_CODE (SSA_NAME_VAR (ptr)) == RESULT_DECL))
8010 p = SSA_NAME_VAR (ptr);
8011 varinfo_t vi = lookup_vi_for_tree (p);
8012 if (!vi)
8013 continue;
8014 vi = get_varinfo (find (vi->id));
8015 bitmap_iterator bi;
8016 unsigned j;
8017 varinfo_t restrict_var = NULL;
8018 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, j, bi)
8020 varinfo_t oi = get_varinfo (j);
8021 if (oi->head != j)
8022 oi = get_varinfo (oi->head);
8023 if (oi->is_restrict_var)
8025 if (restrict_var
8026 && restrict_var != oi)
8028 if (dump_file && (dump_flags & TDF_DETAILS))
8030 fprintf (dump_file, "found restrict pointed-to "
8031 "for ");
8032 print_generic_expr (dump_file, ptr);
8033 fprintf (dump_file, " but not exclusively\n");
8035 restrict_var = NULL;
8036 break;
8038 restrict_var = oi;
8040 /* NULL is the only other valid points-to entry. */
8041 else if (oi->id != nothing_id)
8043 restrict_var = NULL;
8044 break;
8047 /* Ok, found that ptr must(!) point to a single(!) restrict
8048 variable. */
8049 /* ??? PTA isn't really a proper propagation engine to compute
8050 this property.
8051 ??? We could handle merging of two restricts by unifying them. */
8052 if (restrict_var)
8054 /* Now look at possible dereferences of ptr. */
8055 imm_use_iterator ui;
8056 gimple *use_stmt;
8057 bool used = false;
8058 msdi_data data = { ptr, &clique, &last_ruid, restrict_var };
8059 FOR_EACH_IMM_USE_STMT (use_stmt, ui, ptr)
8060 used |= walk_stmt_load_store_ops (use_stmt, &data,
8061 maybe_set_dependence_info,
8062 maybe_set_dependence_info);
8063 if (used)
8065 /* Add all subvars to the set of restrict pointed-to set. */
8066 for (unsigned sv = restrict_var->head; sv != 0;
8067 sv = get_varinfo (sv)->next)
8068 bitmap_set_bit (rvars, sv);
8069 varinfo_t escaped = get_varinfo (find (escaped_id));
8070 if (bitmap_bit_p (escaped->solution, restrict_var->id))
8071 escaped_p = true;
8076 if (clique != 0)
8078 /* Assign the BASE id zero to all accesses not based on a restrict
8079 pointer. That way they get disambiguated against restrict
8080 accesses but not against each other. */
8081 /* ??? For restricts derived from globals (thus not incoming
8082 parameters) we can't restrict scoping properly thus the following
8083 is too aggressive there. For now we have excluded those globals from
8084 getting into the MR_DEPENDENCE machinery. */
8085 vls_data data = { clique, escaped_p, rvars };
8086 basic_block bb;
8087 FOR_EACH_BB_FN (bb, cfun)
8088 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
8089 !gsi_end_p (gsi); gsi_next (&gsi))
8091 gimple *stmt = gsi_stmt (gsi);
8092 walk_stmt_load_store_ops (stmt, &data,
8093 visit_loadstore, visit_loadstore);
8097 BITMAP_FREE (rvars);
8100 /* Compute points-to information for every SSA_NAME pointer in the
8101 current function and compute the transitive closure of escaped
8102 variables to re-initialize the call-clobber states of local variables. */
8104 unsigned int
8105 compute_may_aliases (void)
8107 if (cfun->gimple_df->ipa_pta)
8109 if (dump_file)
8111 fprintf (dump_file, "\nNot re-computing points-to information "
8112 "because IPA points-to information is available.\n\n");
8114 /* But still dump what we have remaining it. */
8115 if (dump_flags & (TDF_DETAILS|TDF_ALIAS))
8116 dump_alias_info (dump_file);
8119 return 0;
8122 /* For each pointer P_i, determine the sets of variables that P_i may
8123 point-to. Compute the reachability set of escaped and call-used
8124 variables. */
8125 compute_points_to_sets ();
8127 /* Debugging dumps. */
8128 if (dump_file && (dump_flags & (TDF_DETAILS|TDF_ALIAS)))
8129 dump_alias_info (dump_file);
8131 /* Compute restrict-based memory disambiguations. */
8132 compute_dependence_clique ();
8134 /* Deallocate memory used by aliasing data structures and the internal
8135 points-to solution. */
8136 delete_points_to_sets ();
8138 gcc_assert (!need_ssa_update_p (cfun));
8140 return 0;
8143 /* A dummy pass to cause points-to information to be computed via
8144 TODO_rebuild_alias. */
8146 namespace {
8148 const pass_data pass_data_build_alias =
8150 GIMPLE_PASS, /* type */
8151 "alias", /* name */
8152 OPTGROUP_NONE, /* optinfo_flags */
8153 TV_NONE, /* tv_id */
8154 ( PROP_cfg | PROP_ssa ), /* properties_required */
8155 0, /* properties_provided */
8156 0, /* properties_destroyed */
8157 0, /* todo_flags_start */
8158 TODO_rebuild_alias, /* todo_flags_finish */
8161 class pass_build_alias : public gimple_opt_pass
8163 public:
8164 pass_build_alias (gcc::context *ctxt)
8165 : gimple_opt_pass (pass_data_build_alias, ctxt)
8168 /* opt_pass methods: */
8169 bool gate (function *) final override { return flag_tree_pta; }
8171 }; // class pass_build_alias
8173 } // anon namespace
8175 gimple_opt_pass *
8176 make_pass_build_alias (gcc::context *ctxt)
8178 return new pass_build_alias (ctxt);
8181 /* A dummy pass to cause points-to information to be computed via
8182 TODO_rebuild_alias. */
8184 namespace {
8186 const pass_data pass_data_build_ealias =
8188 GIMPLE_PASS, /* type */
8189 "ealias", /* name */
8190 OPTGROUP_NONE, /* optinfo_flags */
8191 TV_NONE, /* tv_id */
8192 ( PROP_cfg | PROP_ssa ), /* properties_required */
8193 0, /* properties_provided */
8194 0, /* properties_destroyed */
8195 0, /* todo_flags_start */
8196 TODO_rebuild_alias, /* todo_flags_finish */
8199 class pass_build_ealias : public gimple_opt_pass
8201 public:
8202 pass_build_ealias (gcc::context *ctxt)
8203 : gimple_opt_pass (pass_data_build_ealias, ctxt)
8206 /* opt_pass methods: */
8207 bool gate (function *) final override { return flag_tree_pta; }
8209 }; // class pass_build_ealias
8211 } // anon namespace
8213 gimple_opt_pass *
8214 make_pass_build_ealias (gcc::context *ctxt)
8216 return new pass_build_ealias (ctxt);
8220 /* IPA PTA solutions for ESCAPED. */
8221 struct pt_solution ipa_escaped_pt
8222 = { true, false, false, false, false, false,
8223 false, false, false, false, false, NULL };
8225 /* Associate node with varinfo DATA. Worker for
8226 cgraph_for_symbol_thunks_and_aliases. */
8227 static bool
8228 associate_varinfo_to_alias (struct cgraph_node *node, void *data)
8230 if ((node->alias
8231 || (node->thunk
8232 && ! node->inlined_to))
8233 && node->analyzed
8234 && !node->ifunc_resolver)
8235 insert_vi_for_tree (node->decl, (varinfo_t)data);
8236 return false;
8239 /* Dump varinfo VI to FILE. */
8241 static void
8242 dump_varinfo (FILE *file, varinfo_t vi)
8244 if (vi == NULL)
8245 return;
8247 fprintf (file, "%u: %s\n", vi->id, vi->name);
8249 const char *sep = " ";
8250 if (vi->is_artificial_var)
8251 fprintf (file, "%sartificial", sep);
8252 if (vi->is_special_var)
8253 fprintf (file, "%sspecial", sep);
8254 if (vi->is_unknown_size_var)
8255 fprintf (file, "%sunknown-size", sep);
8256 if (vi->is_full_var)
8257 fprintf (file, "%sfull", sep);
8258 if (vi->is_heap_var)
8259 fprintf (file, "%sheap", sep);
8260 if (vi->may_have_pointers)
8261 fprintf (file, "%smay-have-pointers", sep);
8262 if (vi->only_restrict_pointers)
8263 fprintf (file, "%sonly-restrict-pointers", sep);
8264 if (vi->is_restrict_var)
8265 fprintf (file, "%sis-restrict-var", sep);
8266 if (vi->is_global_var)
8267 fprintf (file, "%sglobal", sep);
8268 if (vi->is_ipa_escape_point)
8269 fprintf (file, "%sipa-escape-point", sep);
8270 if (vi->is_fn_info)
8271 fprintf (file, "%sfn-info", sep);
8272 if (vi->ruid)
8273 fprintf (file, "%srestrict-uid:%u", sep, vi->ruid);
8274 if (vi->next)
8275 fprintf (file, "%snext:%u", sep, vi->next);
8276 if (vi->head != vi->id)
8277 fprintf (file, "%shead:%u", sep, vi->head);
8278 if (vi->offset)
8279 fprintf (file, "%soffset:" HOST_WIDE_INT_PRINT_DEC, sep, vi->offset);
8280 if (vi->size != ~HOST_WIDE_INT_0U)
8281 fprintf (file, "%ssize:" HOST_WIDE_INT_PRINT_DEC, sep, vi->size);
8282 if (vi->fullsize != ~HOST_WIDE_INT_0U && vi->fullsize != vi->size)
8283 fprintf (file, "%sfullsize:" HOST_WIDE_INT_PRINT_DEC, sep,
8284 vi->fullsize);
8285 fprintf (file, "\n");
8287 if (vi->solution && !bitmap_empty_p (vi->solution))
8289 bitmap_iterator bi;
8290 unsigned i;
8291 fprintf (file, " solution: {");
8292 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
8293 fprintf (file, " %u", i);
8294 fprintf (file, " }\n");
8297 if (vi->oldsolution && !bitmap_empty_p (vi->oldsolution)
8298 && !bitmap_equal_p (vi->solution, vi->oldsolution))
8300 bitmap_iterator bi;
8301 unsigned i;
8302 fprintf (file, " oldsolution: {");
8303 EXECUTE_IF_SET_IN_BITMAP (vi->oldsolution, 0, i, bi)
8304 fprintf (file, " %u", i);
8305 fprintf (file, " }\n");
8309 /* Dump varinfo VI to stderr. */
8311 DEBUG_FUNCTION void
8312 debug_varinfo (varinfo_t vi)
8314 dump_varinfo (stderr, vi);
8317 /* Dump varmap to FILE. */
8319 static void
8320 dump_varmap (FILE *file)
8322 if (varmap.length () == 0)
8323 return;
8325 fprintf (file, "variables:\n");
8327 for (unsigned int i = 0; i < varmap.length (); ++i)
8329 varinfo_t vi = get_varinfo (i);
8330 dump_varinfo (file, vi);
8333 fprintf (file, "\n");
8336 /* Dump varmap to stderr. */
8338 DEBUG_FUNCTION void
8339 debug_varmap (void)
8341 dump_varmap (stderr);
8344 /* Compute whether node is refered to non-locally. Worker for
8345 cgraph_for_symbol_thunks_and_aliases. */
8346 static bool
8347 refered_from_nonlocal_fn (struct cgraph_node *node, void *data)
8349 bool *nonlocal_p = (bool *)data;
8350 *nonlocal_p |= (node->used_from_other_partition
8351 || DECL_EXTERNAL (node->decl)
8352 || TREE_PUBLIC (node->decl)
8353 || node->force_output
8354 || lookup_attribute ("noipa", DECL_ATTRIBUTES (node->decl)));
8355 return false;
8358 /* Same for varpool nodes. */
8359 static bool
8360 refered_from_nonlocal_var (struct varpool_node *node, void *data)
8362 bool *nonlocal_p = (bool *)data;
8363 *nonlocal_p |= (node->used_from_other_partition
8364 || DECL_EXTERNAL (node->decl)
8365 || TREE_PUBLIC (node->decl)
8366 || node->force_output);
8367 return false;
8370 /* Execute the driver for IPA PTA. */
8371 static unsigned int
8372 ipa_pta_execute (void)
8374 struct cgraph_node *node;
8375 varpool_node *var;
8376 unsigned int from = 0;
8378 in_ipa_mode = 1;
8380 init_alias_vars ();
8382 if (dump_file && (dump_flags & TDF_DETAILS))
8384 symtab->dump (dump_file);
8385 fprintf (dump_file, "\n");
8388 if (dump_file && (dump_flags & TDF_DETAILS))
8390 fprintf (dump_file, "Generating generic constraints\n\n");
8391 dump_constraints (dump_file, from);
8392 fprintf (dump_file, "\n");
8393 from = constraints.length ();
8396 /* Build the constraints. */
8397 FOR_EACH_DEFINED_FUNCTION (node)
8399 varinfo_t vi;
8400 /* Nodes without a body in this partition are not interesting.
8401 Especially do not visit clones at this point for now - we
8402 get duplicate decls there for inline clones at least. */
8403 if (!node->has_gimple_body_p ()
8404 || node->in_other_partition
8405 || node->inlined_to)
8406 continue;
8407 node->get_body ();
8409 gcc_assert (!node->clone_of);
8411 /* For externally visible or attribute used annotated functions use
8412 local constraints for their arguments.
8413 For local functions we see all callers and thus do not need initial
8414 constraints for parameters. */
8415 bool nonlocal_p = (node->used_from_other_partition
8416 || DECL_EXTERNAL (node->decl)
8417 || TREE_PUBLIC (node->decl)
8418 || node->force_output
8419 || lookup_attribute ("noipa",
8420 DECL_ATTRIBUTES (node->decl)));
8421 node->call_for_symbol_thunks_and_aliases (refered_from_nonlocal_fn,
8422 &nonlocal_p, true);
8424 vi = create_function_info_for (node->decl,
8425 alias_get_name (node->decl), false,
8426 nonlocal_p);
8427 if (dump_file && (dump_flags & TDF_DETAILS)
8428 && from != constraints.length ())
8430 fprintf (dump_file,
8431 "Generating initial constraints for %s",
8432 node->dump_name ());
8433 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
8434 fprintf (dump_file, " (%s)",
8435 IDENTIFIER_POINTER
8436 (DECL_ASSEMBLER_NAME (node->decl)));
8437 fprintf (dump_file, "\n\n");
8438 dump_constraints (dump_file, from);
8439 fprintf (dump_file, "\n");
8441 from = constraints.length ();
8444 node->call_for_symbol_thunks_and_aliases
8445 (associate_varinfo_to_alias, vi, true);
8448 /* Create constraints for global variables and their initializers. */
8449 FOR_EACH_VARIABLE (var)
8451 if (var->alias && var->analyzed)
8452 continue;
8454 varinfo_t vi = get_vi_for_tree (var->decl);
8456 /* For the purpose of IPA PTA unit-local globals are not
8457 escape points. */
8458 bool nonlocal_p = (DECL_EXTERNAL (var->decl)
8459 || TREE_PUBLIC (var->decl)
8460 || var->used_from_other_partition
8461 || var->force_output);
8462 var->call_for_symbol_and_aliases (refered_from_nonlocal_var,
8463 &nonlocal_p, true);
8464 if (nonlocal_p)
8465 vi->is_ipa_escape_point = true;
8468 if (dump_file && (dump_flags & TDF_DETAILS)
8469 && from != constraints.length ())
8471 fprintf (dump_file,
8472 "Generating constraints for global initializers\n\n");
8473 dump_constraints (dump_file, from);
8474 fprintf (dump_file, "\n");
8475 from = constraints.length ();
8478 FOR_EACH_DEFINED_FUNCTION (node)
8480 struct function *func;
8481 basic_block bb;
8483 /* Nodes without a body in this partition are not interesting. */
8484 if (!node->has_gimple_body_p ()
8485 || node->in_other_partition
8486 || node->clone_of)
8487 continue;
8489 if (dump_file && (dump_flags & TDF_DETAILS))
8491 fprintf (dump_file,
8492 "Generating constraints for %s", node->dump_name ());
8493 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
8494 fprintf (dump_file, " (%s)",
8495 IDENTIFIER_POINTER
8496 (DECL_ASSEMBLER_NAME (node->decl)));
8497 fprintf (dump_file, "\n");
8500 func = DECL_STRUCT_FUNCTION (node->decl);
8501 gcc_assert (cfun == NULL);
8503 /* Build constriants for the function body. */
8504 FOR_EACH_BB_FN (bb, func)
8506 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
8507 gsi_next (&gsi))
8509 gphi *phi = gsi.phi ();
8511 if (! virtual_operand_p (gimple_phi_result (phi)))
8512 find_func_aliases (func, phi);
8515 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
8516 gsi_next (&gsi))
8518 gimple *stmt = gsi_stmt (gsi);
8520 find_func_aliases (func, stmt);
8521 find_func_clobbers (func, stmt);
8525 if (dump_file && (dump_flags & TDF_DETAILS))
8527 fprintf (dump_file, "\n");
8528 dump_constraints (dump_file, from);
8529 fprintf (dump_file, "\n");
8530 from = constraints.length ();
8534 /* From the constraints compute the points-to sets. */
8535 solve_constraints ();
8537 if (dump_file && (dump_flags & TDF_STATS))
8538 dump_sa_stats (dump_file);
8540 if (dump_file && (dump_flags & TDF_DETAILS))
8541 dump_sa_points_to_info (dump_file);
8543 /* Now post-process solutions to handle locals from different
8544 runtime instantiations coming in through recursive invocations. */
8545 unsigned shadow_var_cnt = 0;
8546 for (unsigned i = 1; i < varmap.length (); ++i)
8548 varinfo_t fi = get_varinfo (i);
8549 if (fi->is_fn_info
8550 && fi->decl)
8551 /* Automatic variables pointed to by their containing functions
8552 parameters need this treatment. */
8553 for (varinfo_t ai = first_vi_for_offset (fi, fi_parm_base);
8554 ai; ai = vi_next (ai))
8556 varinfo_t vi = get_varinfo (find (ai->id));
8557 bitmap_iterator bi;
8558 unsigned j;
8559 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, j, bi)
8561 varinfo_t pt = get_varinfo (j);
8562 if (pt->shadow_var_uid == 0
8563 && pt->decl
8564 && auto_var_in_fn_p (pt->decl, fi->decl))
8566 pt->shadow_var_uid = allocate_decl_uid ();
8567 shadow_var_cnt++;
8571 /* As well as global variables which are another way of passing
8572 arguments to recursive invocations. */
8573 else if (fi->is_global_var)
8575 for (varinfo_t ai = fi; ai; ai = vi_next (ai))
8577 varinfo_t vi = get_varinfo (find (ai->id));
8578 bitmap_iterator bi;
8579 unsigned j;
8580 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, j, bi)
8582 varinfo_t pt = get_varinfo (j);
8583 if (pt->shadow_var_uid == 0
8584 && pt->decl
8585 && auto_var_p (pt->decl))
8587 pt->shadow_var_uid = allocate_decl_uid ();
8588 shadow_var_cnt++;
8594 if (shadow_var_cnt && dump_file && (dump_flags & TDF_DETAILS))
8595 fprintf (dump_file, "Allocated %u shadow variables for locals "
8596 "maybe leaking into recursive invocations of their containing "
8597 "functions\n", shadow_var_cnt);
8599 /* Compute the global points-to sets for ESCAPED.
8600 ??? Note that the computed escape set is not correct
8601 for the whole unit as we fail to consider graph edges to
8602 externally visible functions. */
8603 ipa_escaped_pt = find_what_var_points_to (NULL, get_varinfo (escaped_id));
8605 /* Make sure the ESCAPED solution (which is used as placeholder in
8606 other solutions) does not reference itself. This simplifies
8607 points-to solution queries. */
8608 ipa_escaped_pt.ipa_escaped = 0;
8610 /* Assign the points-to sets to the SSA names in the unit. */
8611 FOR_EACH_DEFINED_FUNCTION (node)
8613 tree ptr;
8614 struct function *fn;
8615 unsigned i;
8616 basic_block bb;
8618 /* Nodes without a body in this partition are not interesting. */
8619 if (!node->has_gimple_body_p ()
8620 || node->in_other_partition
8621 || node->clone_of)
8622 continue;
8624 fn = DECL_STRUCT_FUNCTION (node->decl);
8626 /* Compute the points-to sets for pointer SSA_NAMEs. */
8627 FOR_EACH_VEC_ELT (*fn->gimple_df->ssa_names, i, ptr)
8629 if (ptr
8630 && POINTER_TYPE_P (TREE_TYPE (ptr)))
8631 find_what_p_points_to (node->decl, ptr);
8634 /* Compute the call-use and call-clobber sets for indirect calls
8635 and calls to external functions. */
8636 FOR_EACH_BB_FN (bb, fn)
8638 gimple_stmt_iterator gsi;
8640 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
8642 gcall *stmt;
8643 struct pt_solution *pt;
8644 varinfo_t vi, fi;
8645 tree decl;
8647 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
8648 if (!stmt)
8649 continue;
8651 /* Handle direct calls to functions with body. */
8652 decl = gimple_call_fndecl (stmt);
8655 tree called_decl = NULL_TREE;
8656 if (gimple_call_builtin_p (stmt, BUILT_IN_GOMP_PARALLEL))
8657 called_decl = TREE_OPERAND (gimple_call_arg (stmt, 0), 0);
8658 else if (gimple_call_builtin_p (stmt, BUILT_IN_GOACC_PARALLEL))
8659 called_decl = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
8661 if (called_decl != NULL_TREE
8662 && !fndecl_maybe_in_other_partition (called_decl))
8663 decl = called_decl;
8666 if (decl
8667 && (fi = lookup_vi_for_tree (decl))
8668 && fi->is_fn_info)
8670 *gimple_call_clobber_set (stmt)
8671 = find_what_var_points_to
8672 (node->decl, first_vi_for_offset (fi, fi_clobbers));
8673 *gimple_call_use_set (stmt)
8674 = find_what_var_points_to
8675 (node->decl, first_vi_for_offset (fi, fi_uses));
8677 /* Handle direct calls to external functions. */
8678 else if (decl && (!fi || fi->decl))
8680 pt = gimple_call_use_set (stmt);
8681 if (gimple_call_flags (stmt) & ECF_CONST)
8682 memset (pt, 0, sizeof (struct pt_solution));
8683 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
8685 *pt = find_what_var_points_to (node->decl, vi);
8686 /* Escaped (and thus nonlocal) variables are always
8687 implicitly used by calls. */
8688 /* ??? ESCAPED can be empty even though NONLOCAL
8689 always escaped. */
8690 pt->nonlocal = 1;
8691 pt->ipa_escaped = 1;
8693 else
8695 /* If there is nothing special about this call then
8696 we have made everything that is used also escape. */
8697 *pt = ipa_escaped_pt;
8698 pt->nonlocal = 1;
8701 pt = gimple_call_clobber_set (stmt);
8702 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
8703 memset (pt, 0, sizeof (struct pt_solution));
8704 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
8706 *pt = find_what_var_points_to (node->decl, vi);
8707 /* Escaped (and thus nonlocal) variables are always
8708 implicitly clobbered by calls. */
8709 /* ??? ESCAPED can be empty even though NONLOCAL
8710 always escaped. */
8711 pt->nonlocal = 1;
8712 pt->ipa_escaped = 1;
8714 else
8716 /* If there is nothing special about this call then
8717 we have made everything that is used also escape. */
8718 *pt = ipa_escaped_pt;
8719 pt->nonlocal = 1;
8722 /* Handle indirect calls. */
8723 else if ((fi = get_fi_for_callee (stmt)))
8725 /* We need to accumulate all clobbers/uses of all possible
8726 callees. */
8727 fi = get_varinfo (find (fi->id));
8728 /* If we cannot constrain the set of functions we'll end up
8729 calling we end up using/clobbering everything. */
8730 if (bitmap_bit_p (fi->solution, anything_id)
8731 || bitmap_bit_p (fi->solution, nonlocal_id)
8732 || bitmap_bit_p (fi->solution, escaped_id))
8734 pt_solution_reset (gimple_call_clobber_set (stmt));
8735 pt_solution_reset (gimple_call_use_set (stmt));
8737 else
8739 bitmap_iterator bi;
8740 unsigned i;
8741 struct pt_solution *uses, *clobbers;
8743 uses = gimple_call_use_set (stmt);
8744 clobbers = gimple_call_clobber_set (stmt);
8745 memset (uses, 0, sizeof (struct pt_solution));
8746 memset (clobbers, 0, sizeof (struct pt_solution));
8747 EXECUTE_IF_SET_IN_BITMAP (fi->solution, 0, i, bi)
8749 struct pt_solution sol;
8751 vi = get_varinfo (i);
8752 if (!vi->is_fn_info)
8754 /* ??? We could be more precise here? */
8755 uses->nonlocal = 1;
8756 uses->ipa_escaped = 1;
8757 clobbers->nonlocal = 1;
8758 clobbers->ipa_escaped = 1;
8759 continue;
8762 if (!uses->anything)
8764 sol = find_what_var_points_to
8765 (node->decl,
8766 first_vi_for_offset (vi, fi_uses));
8767 pt_solution_ior_into (uses, &sol);
8769 if (!clobbers->anything)
8771 sol = find_what_var_points_to
8772 (node->decl,
8773 first_vi_for_offset (vi, fi_clobbers));
8774 pt_solution_ior_into (clobbers, &sol);
8779 else
8780 gcc_unreachable ();
8784 fn->gimple_df->ipa_pta = true;
8786 /* We have to re-set the final-solution cache after each function
8787 because what is a "global" is dependent on function context. */
8788 final_solutions->empty ();
8789 obstack_free (&final_solutions_obstack, NULL);
8790 gcc_obstack_init (&final_solutions_obstack);
8793 delete_points_to_sets ();
8795 in_ipa_mode = 0;
8797 return 0;
8800 namespace {
8802 const pass_data pass_data_ipa_pta =
8804 SIMPLE_IPA_PASS, /* type */
8805 "pta", /* name */
8806 OPTGROUP_NONE, /* optinfo_flags */
8807 TV_IPA_PTA, /* tv_id */
8808 0, /* properties_required */
8809 0, /* properties_provided */
8810 0, /* properties_destroyed */
8811 0, /* todo_flags_start */
8812 0, /* todo_flags_finish */
8815 class pass_ipa_pta : public simple_ipa_opt_pass
8817 public:
8818 pass_ipa_pta (gcc::context *ctxt)
8819 : simple_ipa_opt_pass (pass_data_ipa_pta, ctxt)
8822 /* opt_pass methods: */
8823 bool gate (function *) final override
8825 return (optimize
8826 && flag_ipa_pta
8827 /* Don't bother doing anything if the program has errors. */
8828 && !seen_error ());
8831 opt_pass * clone () final override { return new pass_ipa_pta (m_ctxt); }
8833 unsigned int execute (function *) final override
8835 return ipa_pta_execute ();
8838 }; // class pass_ipa_pta
8840 } // anon namespace
8842 simple_ipa_opt_pass *
8843 make_pass_ipa_pta (gcc::context *ctxt)
8845 return new pass_ipa_pta (ctxt);