Fix gnat.dg/opt39.adb on hppa.
[official-gcc.git] / gcc / tree-ssa-structalias.cc
blobc3c5bce42dfe7464f0854a57087bd4ca20253370
1 /* Tree based points-to analysis
2 Copyright (C) 2005-2023 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "tree-pass.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "tree-pretty-print.h"
33 #include "diagnostic-core.h"
34 #include "fold-const.h"
35 #include "stor-layout.h"
36 #include "stmt.h"
37 #include "gimple-iterator.h"
38 #include "tree-into-ssa.h"
39 #include "tree-dfa.h"
40 #include "gimple-walk.h"
41 #include "varasm.h"
42 #include "stringpool.h"
43 #include "attribs.h"
44 #include "tree-ssa.h"
45 #include "tree-cfg.h"
46 #include "gimple-range.h"
47 #include "ipa-modref-tree.h"
48 #include "ipa-modref.h"
49 #include "attr-fnspec.h"
51 /* The idea behind this analyzer is to generate set constraints from the
52 program, then solve the resulting constraints in order to generate the
53 points-to sets.
55 Set constraints are a way of modeling program analysis problems that
56 involve sets. They consist of an inclusion constraint language,
57 describing the variables (each variable is a set) and operations that
58 are involved on the variables, and a set of rules that derive facts
59 from these operations. To solve a system of set constraints, you derive
60 all possible facts under the rules, which gives you the correct sets
61 as a consequence.
63 See "Efficient Field-sensitive pointer analysis for C" by "David
64 J. Pearce and Paul H. J. Kelly and Chris Hankin", at
65 http://citeseer.ist.psu.edu/pearce04efficient.html
67 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
68 of C Code in a Second" by "Nevin Heintze and Olivier Tardieu" at
69 http://citeseer.ist.psu.edu/heintze01ultrafast.html
71 There are three types of real constraint expressions, DEREF,
72 ADDRESSOF, and SCALAR. Each constraint expression consists
73 of a constraint type, a variable, and an offset.
75 SCALAR is a constraint expression type used to represent x, whether
76 it appears on the LHS or the RHS of a statement.
77 DEREF is a constraint expression type used to represent *x, whether
78 it appears on the LHS or the RHS of a statement.
79 ADDRESSOF is a constraint expression used to represent &x, whether
80 it appears on the LHS or the RHS of a statement.
82 Each pointer variable in the program is assigned an integer id, and
83 each field of a structure variable is assigned an integer id as well.
85 Structure variables are linked to their list of fields through a "next
86 field" in each variable that points to the next field in offset
87 order.
88 Each variable for a structure field has
90 1. "size", that tells the size in bits of that field.
91 2. "fullsize", that tells the size in bits of the entire structure.
92 3. "offset", that tells the offset in bits from the beginning of the
93 structure to this field.
95 Thus,
96 struct f
98 int a;
99 int b;
100 } foo;
101 int *bar;
103 looks like
105 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
106 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
107 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
110 In order to solve the system of set constraints, the following is
111 done:
113 1. Each constraint variable x has a solution set associated with it,
114 Sol(x).
116 2. Constraints are separated into direct, copy, and complex.
117 Direct constraints are ADDRESSOF constraints that require no extra
118 processing, such as P = &Q
119 Copy constraints are those of the form P = Q.
120 Complex constraints are all the constraints involving dereferences
121 and offsets (including offsetted copies).
123 3. All direct constraints of the form P = &Q are processed, such
124 that Q is added to Sol(P)
126 4. All complex constraints for a given constraint variable are stored in a
127 linked list attached to that variable's node.
129 5. A directed graph is built out of the copy constraints. Each
130 constraint variable is a node in the graph, and an edge from
131 Q to P is added for each copy constraint of the form P = Q
133 6. The graph is then walked, and solution sets are
134 propagated along the copy edges, such that an edge from Q to P
135 causes Sol(P) <- Sol(P) union Sol(Q).
137 7. As we visit each node, all complex constraints associated with
138 that node are processed by adding appropriate copy edges to the graph, or the
139 appropriate variables to the solution set.
141 8. The process of walking the graph is iterated until no solution
142 sets change.
144 Prior to walking the graph in steps 6 and 7, We perform static
145 cycle elimination on the constraint graph, as well
146 as off-line variable substitution.
148 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
149 on and turned into anything), but isn't. You can just see what offset
150 inside the pointed-to struct it's going to access.
152 TODO: Constant bounded arrays can be handled as if they were structs of the
153 same number of elements.
155 TODO: Modeling heap and incoming pointers becomes much better if we
156 add fields to them as we discover them, which we could do.
158 TODO: We could handle unions, but to be honest, it's probably not
159 worth the pain or slowdown. */
161 /* IPA-PTA optimizations possible.
163 When the indirect function called is ANYTHING we can add disambiguation
164 based on the function signatures (or simply the parameter count which
165 is the varinfo size). We also do not need to consider functions that
166 do not have their address taken.
168 The is_global_var bit which marks escape points is overly conservative
169 in IPA mode. Split it to is_escape_point and is_global_var - only
170 externally visible globals are escape points in IPA mode.
171 There is now is_ipa_escape_point but this is only used in a few
172 selected places.
174 The way we introduce DECL_PT_UID to avoid fixing up all points-to
175 sets in the translation unit when we copy a DECL during inlining
176 pessimizes precision. The advantage is that the DECL_PT_UID keeps
177 compile-time and memory usage overhead low - the points-to sets
178 do not grow or get unshared as they would during a fixup phase.
179 An alternative solution is to delay IPA PTA until after all
180 inlining transformations have been applied.
182 The way we propagate clobber/use information isn't optimized.
183 It should use a new complex constraint that properly filters
184 out local variables of the callee (though that would make
185 the sets invalid after inlining). OTOH we might as well
186 admit defeat to WHOPR and simply do all the clobber/use analysis
187 and propagation after PTA finished but before we threw away
188 points-to information for memory variables. WHOPR and PTA
189 do not play along well anyway - the whole constraint solving
190 would need to be done in WPA phase and it will be very interesting
191 to apply the results to local SSA names during LTRANS phase.
193 We probably should compute a per-function unit-ESCAPE solution
194 propagating it simply like the clobber / uses solutions. The
195 solution can go alongside the non-IPA escaped solution and be
196 used to query which vars escape the unit through a function.
197 This is also required to make the escaped-HEAP trick work in IPA mode.
199 We never put function decls in points-to sets so we do not
200 keep the set of called functions for indirect calls.
202 And probably more. */
204 static bool use_field_sensitive = true;
205 static int in_ipa_mode = 0;
207 /* Used for predecessor bitmaps. */
208 static bitmap_obstack predbitmap_obstack;
210 /* Used for points-to sets. */
211 static bitmap_obstack pta_obstack;
213 /* Used for oldsolution members of variables. */
214 static bitmap_obstack oldpta_obstack;
216 /* Used for per-solver-iteration bitmaps. */
217 static bitmap_obstack iteration_obstack;
219 static unsigned int create_variable_info_for (tree, const char *, bool);
220 typedef struct constraint_graph *constraint_graph_t;
221 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
223 struct constraint;
224 typedef struct constraint *constraint_t;
227 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
228 if (a) \
229 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
231 static struct constraint_stats
233 unsigned int total_vars;
234 unsigned int nonpointer_vars;
235 unsigned int unified_vars_static;
236 unsigned int unified_vars_dynamic;
237 unsigned int iterations;
238 unsigned int num_edges;
239 unsigned int num_implicit_edges;
240 unsigned int points_to_sets_created;
241 } stats;
243 struct variable_info
245 /* ID of this variable */
246 unsigned int id;
248 /* True if this is a variable created by the constraint analysis, such as
249 heap variables and constraints we had to break up. */
250 unsigned int is_artificial_var : 1;
252 /* True if this is a special variable whose solution set should not be
253 changed. */
254 unsigned int is_special_var : 1;
256 /* True for variables whose size is not known or variable. */
257 unsigned int is_unknown_size_var : 1;
259 /* True for (sub-)fields that represent a whole variable. */
260 unsigned int is_full_var : 1;
262 /* True if this is a heap variable. */
263 unsigned int is_heap_var : 1;
265 /* True if this is a register variable. */
266 unsigned int is_reg_var : 1;
268 /* True if this field may contain pointers. */
269 unsigned int may_have_pointers : 1;
271 /* True if this field has only restrict qualified pointers. */
272 unsigned int only_restrict_pointers : 1;
274 /* True if this represents a heap var created for a restrict qualified
275 pointer. */
276 unsigned int is_restrict_var : 1;
278 /* True if this represents a global variable. */
279 unsigned int is_global_var : 1;
281 /* True if this represents a module escape point for IPA analysis. */
282 unsigned int is_ipa_escape_point : 1;
284 /* True if this represents a IPA function info. */
285 unsigned int is_fn_info : 1;
287 /* True if this appears as RHS in a ADDRESSOF constraint. */
288 unsigned int address_taken : 1;
290 /* ??? Store somewhere better. */
291 unsigned short ruid;
293 /* The ID of the variable for the next field in this structure
294 or zero for the last field in this structure. */
295 unsigned next;
297 /* The ID of the variable for the first field in this structure. */
298 unsigned head;
300 /* Offset of this variable, in bits, from the base variable */
301 unsigned HOST_WIDE_INT offset;
303 /* Size of the variable, in bits. */
304 unsigned HOST_WIDE_INT size;
306 /* Full size of the base variable, in bits. */
307 unsigned HOST_WIDE_INT fullsize;
309 /* In IPA mode the shadow UID in case the variable needs to be duplicated in
310 the final points-to solution because it reaches its containing
311 function recursively. Zero if none is needed. */
312 unsigned int shadow_var_uid;
314 /* Name of this variable */
315 const char *name;
317 /* Tree that this variable is associated with. */
318 tree decl;
320 /* Points-to set for this variable. */
321 bitmap solution;
323 /* Old points-to set for this variable. */
324 bitmap oldsolution;
326 typedef struct variable_info *varinfo_t;
328 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
329 static varinfo_t first_or_preceding_vi_for_offset (varinfo_t,
330 unsigned HOST_WIDE_INT);
331 static varinfo_t lookup_vi_for_tree (tree);
332 static inline bool type_can_have_subvars (const_tree);
333 static void make_param_constraints (varinfo_t);
335 /* Pool of variable info structures. */
336 static object_allocator<variable_info> variable_info_pool
337 ("Variable info pool");
339 /* Map varinfo to final pt_solution. */
340 static hash_map<varinfo_t, pt_solution *> *final_solutions;
341 struct obstack final_solutions_obstack;
343 /* Table of variable info structures for constraint variables.
344 Indexed directly by variable info id. */
345 static vec<varinfo_t> varmap;
347 /* Return the varmap element N */
349 static inline varinfo_t
350 get_varinfo (unsigned int n)
352 return varmap[n];
355 /* Return the next variable in the list of sub-variables of VI
356 or NULL if VI is the last sub-variable. */
358 static inline varinfo_t
359 vi_next (varinfo_t vi)
361 return get_varinfo (vi->next);
364 /* Static IDs for the special variables. Variable ID zero is unused
365 and used as terminator for the sub-variable chain. */
366 enum { nothing_id = 1, anything_id = 2, string_id = 3,
367 escaped_id = 4, nonlocal_id = 5,
368 storedanything_id = 6, integer_id = 7 };
370 /* Return a new variable info structure consisting for a variable
371 named NAME, and using constraint graph node NODE. Append it
372 to the vector of variable info structures. */
374 static varinfo_t
375 new_var_info (tree t, const char *name, bool add_id)
377 unsigned index = varmap.length ();
378 varinfo_t ret = variable_info_pool.allocate ();
380 if (dump_file && add_id)
382 char *tempname = xasprintf ("%s(%d)", name, index);
383 name = ggc_strdup (tempname);
384 free (tempname);
387 ret->id = index;
388 ret->name = name;
389 ret->decl = t;
390 /* Vars without decl are artificial and do not have sub-variables. */
391 ret->is_artificial_var = (t == NULL_TREE);
392 ret->is_special_var = false;
393 ret->is_unknown_size_var = false;
394 ret->is_full_var = (t == NULL_TREE);
395 ret->is_heap_var = false;
396 ret->may_have_pointers = true;
397 ret->only_restrict_pointers = false;
398 ret->is_restrict_var = false;
399 ret->ruid = 0;
400 ret->is_global_var = (t == NULL_TREE);
401 ret->is_ipa_escape_point = false;
402 ret->is_fn_info = false;
403 ret->address_taken = false;
404 if (t && DECL_P (t))
405 ret->is_global_var = (is_global_var (t)
406 /* We have to treat even local register variables
407 as escape points. */
408 || (VAR_P (t) && DECL_HARD_REGISTER (t)));
409 ret->is_reg_var = (t && TREE_CODE (t) == SSA_NAME);
410 ret->solution = BITMAP_ALLOC (&pta_obstack);
411 ret->oldsolution = NULL;
412 ret->next = 0;
413 ret->shadow_var_uid = 0;
414 ret->head = ret->id;
416 stats.total_vars++;
418 varmap.safe_push (ret);
420 return ret;
423 /* A map mapping call statements to per-stmt variables for uses
424 and clobbers specific to the call. */
425 static hash_map<gimple *, varinfo_t> *call_stmt_vars;
427 /* Lookup or create the variable for the call statement CALL. */
429 static varinfo_t
430 get_call_vi (gcall *call)
432 varinfo_t vi, vi2;
434 bool existed;
435 varinfo_t *slot_p = &call_stmt_vars->get_or_insert (call, &existed);
436 if (existed)
437 return *slot_p;
439 vi = new_var_info (NULL_TREE, "CALLUSED", true);
440 vi->offset = 0;
441 vi->size = 1;
442 vi->fullsize = 2;
443 vi->is_full_var = true;
444 vi->is_reg_var = true;
446 vi2 = new_var_info (NULL_TREE, "CALLCLOBBERED", true);
447 vi2->offset = 1;
448 vi2->size = 1;
449 vi2->fullsize = 2;
450 vi2->is_full_var = true;
451 vi2->is_reg_var = true;
453 vi->next = vi2->id;
455 *slot_p = vi;
456 return vi;
459 /* Lookup the variable for the call statement CALL representing
460 the uses. Returns NULL if there is nothing special about this call. */
462 static varinfo_t
463 lookup_call_use_vi (gcall *call)
465 varinfo_t *slot_p = call_stmt_vars->get (call);
466 if (slot_p)
467 return *slot_p;
469 return NULL;
472 /* Lookup the variable for the call statement CALL representing
473 the clobbers. Returns NULL if there is nothing special about this call. */
475 static varinfo_t
476 lookup_call_clobber_vi (gcall *call)
478 varinfo_t uses = lookup_call_use_vi (call);
479 if (!uses)
480 return NULL;
482 return vi_next (uses);
485 /* Lookup or create the variable for the call statement CALL representing
486 the uses. */
488 static varinfo_t
489 get_call_use_vi (gcall *call)
491 return get_call_vi (call);
494 /* Lookup or create the variable for the call statement CALL representing
495 the clobbers. */
497 static varinfo_t ATTRIBUTE_UNUSED
498 get_call_clobber_vi (gcall *call)
500 return vi_next (get_call_vi (call));
504 enum constraint_expr_type {SCALAR, DEREF, ADDRESSOF};
506 /* An expression that appears in a constraint. */
508 struct constraint_expr
510 /* Constraint type. */
511 constraint_expr_type type;
513 /* Variable we are referring to in the constraint. */
514 unsigned int var;
516 /* Offset, in bits, of this constraint from the beginning of
517 variables it ends up referring to.
519 IOW, in a deref constraint, we would deref, get the result set,
520 then add OFFSET to each member. */
521 HOST_WIDE_INT offset;
524 /* Use 0x8000... as special unknown offset. */
525 #define UNKNOWN_OFFSET HOST_WIDE_INT_MIN
527 typedef struct constraint_expr ce_s;
528 static void get_constraint_for_1 (tree, vec<ce_s> *, bool, bool);
529 static void get_constraint_for (tree, vec<ce_s> *);
530 static void get_constraint_for_rhs (tree, vec<ce_s> *);
531 static void do_deref (vec<ce_s> *);
533 /* Our set constraints are made up of two constraint expressions, one
534 LHS, and one RHS.
536 As described in the introduction, our set constraints each represent an
537 operation between set valued variables.
539 struct constraint
541 struct constraint_expr lhs;
542 struct constraint_expr rhs;
545 /* List of constraints that we use to build the constraint graph from. */
547 static vec<constraint_t> constraints;
548 static object_allocator<constraint> constraint_pool ("Constraint pool");
550 /* The constraint graph is represented as an array of bitmaps
551 containing successor nodes. */
553 struct constraint_graph
555 /* Size of this graph, which may be different than the number of
556 nodes in the variable map. */
557 unsigned int size;
559 /* Explicit successors of each node. */
560 bitmap *succs;
562 /* Implicit predecessors of each node (Used for variable
563 substitution). */
564 bitmap *implicit_preds;
566 /* Explicit predecessors of each node (Used for variable substitution). */
567 bitmap *preds;
569 /* Indirect cycle representatives, or -1 if the node has no indirect
570 cycles. */
571 int *indirect_cycles;
573 /* Representative node for a node. rep[a] == a unless the node has
574 been unified. */
575 unsigned int *rep;
577 /* Equivalence class representative for a label. This is used for
578 variable substitution. */
579 int *eq_rep;
581 /* Pointer equivalence label for a node. All nodes with the same
582 pointer equivalence label can be unified together at some point
583 (either during constraint optimization or after the constraint
584 graph is built). */
585 unsigned int *pe;
587 /* Pointer equivalence representative for a label. This is used to
588 handle nodes that are pointer equivalent but not location
589 equivalent. We can unite these once the addressof constraints
590 are transformed into initial points-to sets. */
591 int *pe_rep;
593 /* Pointer equivalence label for each node, used during variable
594 substitution. */
595 unsigned int *pointer_label;
597 /* Location equivalence label for each node, used during location
598 equivalence finding. */
599 unsigned int *loc_label;
601 /* Pointed-by set for each node, used during location equivalence
602 finding. This is pointed-by rather than pointed-to, because it
603 is constructed using the predecessor graph. */
604 bitmap *pointed_by;
606 /* Points to sets for pointer equivalence. This is *not* the actual
607 points-to sets for nodes. */
608 bitmap *points_to;
610 /* Bitmap of nodes where the bit is set if the node is a direct
611 node. Used for variable substitution. */
612 sbitmap direct_nodes;
614 /* Bitmap of nodes where the bit is set if the node is address
615 taken. Used for variable substitution. */
616 bitmap address_taken;
618 /* Vector of complex constraints for each graph node. Complex
619 constraints are those involving dereferences or offsets that are
620 not 0. */
621 vec<constraint_t> *complex;
624 static constraint_graph_t graph;
626 /* During variable substitution and the offline version of indirect
627 cycle finding, we create nodes to represent dereferences and
628 address taken constraints. These represent where these start and
629 end. */
630 #define FIRST_REF_NODE (varmap).length ()
631 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
633 /* Return the representative node for NODE, if NODE has been unioned
634 with another NODE.
635 This function performs path compression along the way to finding
636 the representative. */
638 static unsigned int
639 find (unsigned int node)
641 gcc_checking_assert (node < graph->size);
642 if (graph->rep[node] != node)
643 return graph->rep[node] = find (graph->rep[node]);
644 return node;
647 /* Union the TO and FROM nodes to the TO nodes.
648 Note that at some point in the future, we may want to do
649 union-by-rank, in which case we are going to have to return the
650 node we unified to. */
652 static bool
653 unite (unsigned int to, unsigned int from)
655 gcc_checking_assert (to < graph->size && from < graph->size);
656 if (to != from && graph->rep[from] != to)
658 graph->rep[from] = to;
659 return true;
661 return false;
664 /* Create a new constraint consisting of LHS and RHS expressions. */
666 static constraint_t
667 new_constraint (const struct constraint_expr lhs,
668 const struct constraint_expr rhs)
670 constraint_t ret = constraint_pool.allocate ();
671 ret->lhs = lhs;
672 ret->rhs = rhs;
673 return ret;
676 /* Print out constraint C to FILE. */
678 static void
679 dump_constraint (FILE *file, constraint_t c)
681 if (c->lhs.type == ADDRESSOF)
682 fprintf (file, "&");
683 else if (c->lhs.type == DEREF)
684 fprintf (file, "*");
685 if (dump_file)
686 fprintf (file, "%s", get_varinfo (c->lhs.var)->name);
687 else
688 fprintf (file, "V%d", c->lhs.var);
689 if (c->lhs.offset == UNKNOWN_OFFSET)
690 fprintf (file, " + UNKNOWN");
691 else if (c->lhs.offset != 0)
692 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
693 fprintf (file, " = ");
694 if (c->rhs.type == ADDRESSOF)
695 fprintf (file, "&");
696 else if (c->rhs.type == DEREF)
697 fprintf (file, "*");
698 if (dump_file)
699 fprintf (file, "%s", get_varinfo (c->rhs.var)->name);
700 else
701 fprintf (file, "V%d", c->rhs.var);
702 if (c->rhs.offset == UNKNOWN_OFFSET)
703 fprintf (file, " + UNKNOWN");
704 else if (c->rhs.offset != 0)
705 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
709 void debug_constraint (constraint_t);
710 void debug_constraints (void);
711 void debug_constraint_graph (void);
712 void debug_solution_for_var (unsigned int);
713 void debug_sa_points_to_info (void);
714 void debug_varinfo (varinfo_t);
715 void debug_varmap (void);
717 /* Print out constraint C to stderr. */
719 DEBUG_FUNCTION void
720 debug_constraint (constraint_t c)
722 dump_constraint (stderr, c);
723 fprintf (stderr, "\n");
726 /* Print out all constraints to FILE */
728 static void
729 dump_constraints (FILE *file, int from)
731 int i;
732 constraint_t c;
733 for (i = from; constraints.iterate (i, &c); i++)
734 if (c)
736 dump_constraint (file, c);
737 fprintf (file, "\n");
741 /* Print out all constraints to stderr. */
743 DEBUG_FUNCTION void
744 debug_constraints (void)
746 dump_constraints (stderr, 0);
749 /* Print the constraint graph in dot format. */
751 static void
752 dump_constraint_graph (FILE *file)
754 unsigned int i;
756 /* Only print the graph if it has already been initialized: */
757 if (!graph)
758 return;
760 /* Prints the header of the dot file: */
761 fprintf (file, "strict digraph {\n");
762 fprintf (file, " node [\n shape = box\n ]\n");
763 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
764 fprintf (file, "\n // List of nodes and complex constraints in "
765 "the constraint graph:\n");
767 /* The next lines print the nodes in the graph together with the
768 complex constraints attached to them. */
769 for (i = 1; i < graph->size; i++)
771 if (i == FIRST_REF_NODE)
772 continue;
773 if (find (i) != i)
774 continue;
775 if (i < FIRST_REF_NODE)
776 fprintf (file, "\"%s\"", get_varinfo (i)->name);
777 else
778 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
779 if (graph->complex[i].exists ())
781 unsigned j;
782 constraint_t c;
783 fprintf (file, " [label=\"\\N\\n");
784 for (j = 0; graph->complex[i].iterate (j, &c); ++j)
786 dump_constraint (file, c);
787 fprintf (file, "\\l");
789 fprintf (file, "\"]");
791 fprintf (file, ";\n");
794 /* Go over the edges. */
795 fprintf (file, "\n // Edges in the constraint graph:\n");
796 for (i = 1; i < graph->size; i++)
798 unsigned j;
799 bitmap_iterator bi;
800 if (find (i) != i)
801 continue;
802 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
804 unsigned to = find (j);
805 if (i == to)
806 continue;
807 if (i < FIRST_REF_NODE)
808 fprintf (file, "\"%s\"", get_varinfo (i)->name);
809 else
810 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
811 fprintf (file, " -> ");
812 if (to < FIRST_REF_NODE)
813 fprintf (file, "\"%s\"", get_varinfo (to)->name);
814 else
815 fprintf (file, "\"*%s\"", get_varinfo (to - FIRST_REF_NODE)->name);
816 fprintf (file, ";\n");
820 /* Prints the tail of the dot file. */
821 fprintf (file, "}\n");
824 /* Print out the constraint graph to stderr. */
826 DEBUG_FUNCTION void
827 debug_constraint_graph (void)
829 dump_constraint_graph (stderr);
832 /* SOLVER FUNCTIONS
834 The solver is a simple worklist solver, that works on the following
835 algorithm:
837 sbitmap changed_nodes = all zeroes;
838 changed_count = 0;
839 For each node that is not already collapsed:
840 changed_count++;
841 set bit in changed nodes
843 while (changed_count > 0)
845 compute topological ordering for constraint graph
847 find and collapse cycles in the constraint graph (updating
848 changed if necessary)
850 for each node (n) in the graph in topological order:
851 changed_count--;
853 Process each complex constraint associated with the node,
854 updating changed if necessary.
856 For each outgoing edge from n, propagate the solution from n to
857 the destination of the edge, updating changed as necessary.
859 } */
861 /* Return true if two constraint expressions A and B are equal. */
863 static bool
864 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
866 return a.type == b.type && a.var == b.var && a.offset == b.offset;
869 /* Return true if constraint expression A is less than constraint expression
870 B. This is just arbitrary, but consistent, in order to give them an
871 ordering. */
873 static bool
874 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
876 if (a.type == b.type)
878 if (a.var == b.var)
879 return a.offset < b.offset;
880 else
881 return a.var < b.var;
883 else
884 return a.type < b.type;
887 /* Return true if constraint A is less than constraint B. This is just
888 arbitrary, but consistent, in order to give them an ordering. */
890 static bool
891 constraint_less (const constraint_t &a, const constraint_t &b)
893 if (constraint_expr_less (a->lhs, b->lhs))
894 return true;
895 else if (constraint_expr_less (b->lhs, a->lhs))
896 return false;
897 else
898 return constraint_expr_less (a->rhs, b->rhs);
901 /* Return true if two constraints A and B are equal. */
903 static bool
904 constraint_equal (struct constraint a, struct constraint b)
906 return constraint_expr_equal (a.lhs, b.lhs)
907 && constraint_expr_equal (a.rhs, b.rhs);
911 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
913 static constraint_t
914 constraint_vec_find (vec<constraint_t> vec,
915 struct constraint lookfor)
917 unsigned int place;
918 constraint_t found;
920 if (!vec.exists ())
921 return NULL;
923 place = vec.lower_bound (&lookfor, constraint_less);
924 if (place >= vec.length ())
925 return NULL;
926 found = vec[place];
927 if (!constraint_equal (*found, lookfor))
928 return NULL;
929 return found;
932 /* Union two constraint vectors, TO and FROM. Put the result in TO.
933 Returns true of TO set is changed. */
935 static bool
936 constraint_set_union (vec<constraint_t> *to,
937 vec<constraint_t> *from)
939 int i;
940 constraint_t c;
941 bool any_change = false;
943 FOR_EACH_VEC_ELT (*from, i, c)
945 if (constraint_vec_find (*to, *c) == NULL)
947 unsigned int place = to->lower_bound (c, constraint_less);
948 to->safe_insert (place, c);
949 any_change = true;
952 return any_change;
955 /* Expands the solution in SET to all sub-fields of variables included. */
957 static bitmap
958 solution_set_expand (bitmap set, bitmap *expanded)
960 bitmap_iterator bi;
961 unsigned j;
963 if (*expanded)
964 return *expanded;
966 *expanded = BITMAP_ALLOC (&iteration_obstack);
968 /* In a first pass expand to the head of the variables we need to
969 add all sub-fields off. This avoids quadratic behavior. */
970 EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi)
972 varinfo_t v = get_varinfo (j);
973 if (v->is_artificial_var
974 || v->is_full_var)
975 continue;
976 bitmap_set_bit (*expanded, v->head);
979 /* In the second pass now expand all head variables with subfields. */
980 EXECUTE_IF_SET_IN_BITMAP (*expanded, 0, j, bi)
982 varinfo_t v = get_varinfo (j);
983 if (v->head != j)
984 continue;
985 for (v = vi_next (v); v != NULL; v = vi_next (v))
986 bitmap_set_bit (*expanded, v->id);
989 /* And finally set the rest of the bits from SET. */
990 bitmap_ior_into (*expanded, set);
992 return *expanded;
995 /* Union solution sets TO and DELTA, and add INC to each member of DELTA in the
996 process. */
998 static bool
999 set_union_with_increment (bitmap to, bitmap delta, HOST_WIDE_INT inc,
1000 bitmap *expanded_delta)
1002 bool changed = false;
1003 bitmap_iterator bi;
1004 unsigned int i;
1006 /* If the solution of DELTA contains anything it is good enough to transfer
1007 this to TO. */
1008 if (bitmap_bit_p (delta, anything_id))
1009 return bitmap_set_bit (to, anything_id);
1011 /* If the offset is unknown we have to expand the solution to
1012 all subfields. */
1013 if (inc == UNKNOWN_OFFSET)
1015 delta = solution_set_expand (delta, expanded_delta);
1016 changed |= bitmap_ior_into (to, delta);
1017 return changed;
1020 /* For non-zero offset union the offsetted solution into the destination. */
1021 EXECUTE_IF_SET_IN_BITMAP (delta, 0, i, bi)
1023 varinfo_t vi = get_varinfo (i);
1025 /* If this is a variable with just one field just set its bit
1026 in the result. */
1027 if (vi->is_artificial_var
1028 || vi->is_unknown_size_var
1029 || vi->is_full_var)
1030 changed |= bitmap_set_bit (to, i);
1031 else
1033 HOST_WIDE_INT fieldoffset = vi->offset + inc;
1034 unsigned HOST_WIDE_INT size = vi->size;
1036 /* If the offset makes the pointer point to before the
1037 variable use offset zero for the field lookup. */
1038 if (fieldoffset < 0)
1039 vi = get_varinfo (vi->head);
1040 else
1041 vi = first_or_preceding_vi_for_offset (vi, fieldoffset);
1045 changed |= bitmap_set_bit (to, vi->id);
1046 if (vi->is_full_var
1047 || vi->next == 0)
1048 break;
1050 /* We have to include all fields that overlap the current field
1051 shifted by inc. */
1052 vi = vi_next (vi);
1054 while (vi->offset < fieldoffset + size);
1058 return changed;
1061 /* Insert constraint C into the list of complex constraints for graph
1062 node VAR. */
1064 static void
1065 insert_into_complex (constraint_graph_t graph,
1066 unsigned int var, constraint_t c)
1068 vec<constraint_t> complex = graph->complex[var];
1069 unsigned int place = complex.lower_bound (c, constraint_less);
1071 /* Only insert constraints that do not already exist. */
1072 if (place >= complex.length ()
1073 || !constraint_equal (*c, *complex[place]))
1074 graph->complex[var].safe_insert (place, c);
1078 /* Condense two variable nodes into a single variable node, by moving
1079 all associated info from FROM to TO. Returns true if TO node's
1080 constraint set changes after the merge. */
1082 static bool
1083 merge_node_constraints (constraint_graph_t graph, unsigned int to,
1084 unsigned int from)
1086 unsigned int i;
1087 constraint_t c;
1088 bool any_change = false;
1090 gcc_checking_assert (find (from) == to);
1092 /* Move all complex constraints from src node into to node */
1093 FOR_EACH_VEC_ELT (graph->complex[from], i, c)
1095 /* In complex constraints for node FROM, we may have either
1096 a = *FROM, and *FROM = a, or an offseted constraint which are
1097 always added to the rhs node's constraints. */
1099 if (c->rhs.type == DEREF)
1100 c->rhs.var = to;
1101 else if (c->lhs.type == DEREF)
1102 c->lhs.var = to;
1103 else
1104 c->rhs.var = to;
1107 any_change = constraint_set_union (&graph->complex[to],
1108 &graph->complex[from]);
1109 graph->complex[from].release ();
1110 return any_change;
1114 /* Remove edges involving NODE from GRAPH. */
1116 static void
1117 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
1119 if (graph->succs[node])
1120 BITMAP_FREE (graph->succs[node]);
1123 /* Merge GRAPH nodes FROM and TO into node TO. */
1125 static void
1126 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
1127 unsigned int from)
1129 if (graph->indirect_cycles[from] != -1)
1131 /* If we have indirect cycles with the from node, and we have
1132 none on the to node, the to node has indirect cycles from the
1133 from node now that they are unified.
1134 If indirect cycles exist on both, unify the nodes that they
1135 are in a cycle with, since we know they are in a cycle with
1136 each other. */
1137 if (graph->indirect_cycles[to] == -1)
1138 graph->indirect_cycles[to] = graph->indirect_cycles[from];
1141 /* Merge all the successor edges. */
1142 if (graph->succs[from])
1144 if (!graph->succs[to])
1145 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
1146 bitmap_ior_into (graph->succs[to],
1147 graph->succs[from]);
1150 clear_edges_for_node (graph, from);
1154 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
1155 it doesn't exist in the graph already. */
1157 static void
1158 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
1159 unsigned int from)
1161 if (to == from)
1162 return;
1164 if (!graph->implicit_preds[to])
1165 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1167 if (bitmap_set_bit (graph->implicit_preds[to], from))
1168 stats.num_implicit_edges++;
1171 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
1172 it doesn't exist in the graph already.
1173 Return false if the edge already existed, true otherwise. */
1175 static void
1176 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
1177 unsigned int from)
1179 if (!graph->preds[to])
1180 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1181 bitmap_set_bit (graph->preds[to], from);
1184 /* Add a graph edge to GRAPH, going from FROM to TO if
1185 it doesn't exist in the graph already.
1186 Return false if the edge already existed, true otherwise. */
1188 static bool
1189 add_graph_edge (constraint_graph_t graph, unsigned int to,
1190 unsigned int from)
1192 if (to == from)
1194 return false;
1196 else
1198 bool r = false;
1200 if (!graph->succs[from])
1201 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
1203 /* The graph solving process does not avoid "triangles", thus
1204 there can be multiple paths from a node to another involving
1205 intermediate other nodes. That causes extra copying which is
1206 most difficult to avoid when the intermediate node is ESCAPED
1207 because there are no edges added from ESCAPED. Avoid
1208 adding the direct edge FROM -> TO when we have FROM -> ESCAPED
1209 and TO contains ESCAPED.
1210 ??? Note this is only a heuristic, it does not prevent the
1211 situation from occuring. The heuristic helps PR38474 and
1212 PR99912 significantly. */
1213 if (to < FIRST_REF_NODE
1214 && bitmap_bit_p (graph->succs[from], find (escaped_id))
1215 && bitmap_bit_p (get_varinfo (find (to))->solution, escaped_id))
1216 return false;
1218 if (bitmap_set_bit (graph->succs[from], to))
1220 r = true;
1221 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
1222 stats.num_edges++;
1224 return r;
1229 /* Initialize the constraint graph structure to contain SIZE nodes. */
1231 static void
1232 init_graph (unsigned int size)
1234 unsigned int j;
1236 graph = XCNEW (struct constraint_graph);
1237 graph->size = size;
1238 graph->succs = XCNEWVEC (bitmap, graph->size);
1239 graph->indirect_cycles = XNEWVEC (int, graph->size);
1240 graph->rep = XNEWVEC (unsigned int, graph->size);
1241 /* ??? Macros do not support template types with multiple arguments,
1242 so we use a typedef to work around it. */
1243 typedef vec<constraint_t> vec_constraint_t_heap;
1244 graph->complex = XCNEWVEC (vec_constraint_t_heap, size);
1245 graph->pe = XCNEWVEC (unsigned int, graph->size);
1246 graph->pe_rep = XNEWVEC (int, graph->size);
1248 for (j = 0; j < graph->size; j++)
1250 graph->rep[j] = j;
1251 graph->pe_rep[j] = -1;
1252 graph->indirect_cycles[j] = -1;
1256 /* Build the constraint graph, adding only predecessor edges right now. */
1258 static void
1259 build_pred_graph (void)
1261 int i;
1262 constraint_t c;
1263 unsigned int j;
1265 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
1266 graph->preds = XCNEWVEC (bitmap, graph->size);
1267 graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
1268 graph->loc_label = XCNEWVEC (unsigned int, graph->size);
1269 graph->pointed_by = XCNEWVEC (bitmap, graph->size);
1270 graph->points_to = XCNEWVEC (bitmap, graph->size);
1271 graph->eq_rep = XNEWVEC (int, graph->size);
1272 graph->direct_nodes = sbitmap_alloc (graph->size);
1273 graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1274 bitmap_clear (graph->direct_nodes);
1276 for (j = 1; j < FIRST_REF_NODE; j++)
1278 if (!get_varinfo (j)->is_special_var)
1279 bitmap_set_bit (graph->direct_nodes, j);
1282 for (j = 0; j < graph->size; j++)
1283 graph->eq_rep[j] = -1;
1285 for (j = 0; j < varmap.length (); j++)
1286 graph->indirect_cycles[j] = -1;
1288 FOR_EACH_VEC_ELT (constraints, i, c)
1290 struct constraint_expr lhs = c->lhs;
1291 struct constraint_expr rhs = c->rhs;
1292 unsigned int lhsvar = lhs.var;
1293 unsigned int rhsvar = rhs.var;
1295 if (lhs.type == DEREF)
1297 /* *x = y. */
1298 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1299 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1301 else if (rhs.type == DEREF)
1303 /* x = *y */
1304 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1305 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1306 else
1307 bitmap_clear_bit (graph->direct_nodes, lhsvar);
1309 else if (rhs.type == ADDRESSOF)
1311 varinfo_t v;
1313 /* x = &y */
1314 if (graph->points_to[lhsvar] == NULL)
1315 graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1316 bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1318 if (graph->pointed_by[rhsvar] == NULL)
1319 graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1320 bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1322 /* Implicitly, *x = y */
1323 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1325 /* All related variables are no longer direct nodes. */
1326 bitmap_clear_bit (graph->direct_nodes, rhsvar);
1327 v = get_varinfo (rhsvar);
1328 if (!v->is_full_var)
1330 v = get_varinfo (v->head);
1333 bitmap_clear_bit (graph->direct_nodes, v->id);
1334 v = vi_next (v);
1336 while (v != NULL);
1338 bitmap_set_bit (graph->address_taken, rhsvar);
1340 else if (lhsvar > anything_id
1341 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1343 /* x = y */
1344 add_pred_graph_edge (graph, lhsvar, rhsvar);
1345 /* Implicitly, *x = *y */
1346 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1347 FIRST_REF_NODE + rhsvar);
1349 else if (lhs.offset != 0 || rhs.offset != 0)
1351 if (rhs.offset != 0)
1352 bitmap_clear_bit (graph->direct_nodes, lhs.var);
1353 else if (lhs.offset != 0)
1354 bitmap_clear_bit (graph->direct_nodes, rhs.var);
1359 /* Build the constraint graph, adding successor edges. */
1361 static void
1362 build_succ_graph (void)
1364 unsigned i, t;
1365 constraint_t c;
1367 FOR_EACH_VEC_ELT (constraints, i, c)
1369 struct constraint_expr lhs;
1370 struct constraint_expr rhs;
1371 unsigned int lhsvar;
1372 unsigned int rhsvar;
1374 if (!c)
1375 continue;
1377 lhs = c->lhs;
1378 rhs = c->rhs;
1379 lhsvar = find (lhs.var);
1380 rhsvar = find (rhs.var);
1382 if (lhs.type == DEREF)
1384 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1385 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1387 else if (rhs.type == DEREF)
1389 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1390 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1392 else if (rhs.type == ADDRESSOF)
1394 /* x = &y */
1395 gcc_checking_assert (find (rhs.var) == rhs.var);
1396 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1398 else if (lhsvar > anything_id
1399 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1401 add_graph_edge (graph, lhsvar, rhsvar);
1405 /* Add edges from STOREDANYTHING to all non-direct nodes that can
1406 receive pointers. */
1407 t = find (storedanything_id);
1408 for (i = integer_id + 1; i < FIRST_REF_NODE; ++i)
1410 if (!bitmap_bit_p (graph->direct_nodes, i)
1411 && get_varinfo (i)->may_have_pointers)
1412 add_graph_edge (graph, find (i), t);
1415 /* Everything stored to ANYTHING also potentially escapes. */
1416 add_graph_edge (graph, find (escaped_id), t);
1420 /* Changed variables on the last iteration. */
1421 static bitmap changed;
1423 /* Strongly Connected Component visitation info. */
1425 class scc_info
1427 public:
1428 scc_info (size_t size);
1429 ~scc_info ();
1431 auto_sbitmap visited;
1432 auto_sbitmap deleted;
1433 unsigned int *dfs;
1434 unsigned int *node_mapping;
1435 int current_index;
1436 auto_vec<unsigned> scc_stack;
1440 /* Recursive routine to find strongly connected components in GRAPH.
1441 SI is the SCC info to store the information in, and N is the id of current
1442 graph node we are processing.
1444 This is Tarjan's strongly connected component finding algorithm, as
1445 modified by Nuutila to keep only non-root nodes on the stack.
1446 The algorithm can be found in "On finding the strongly connected
1447 connected components in a directed graph" by Esko Nuutila and Eljas
1448 Soisalon-Soininen, in Information Processing Letters volume 49,
1449 number 1, pages 9-14. */
1451 static void
1452 scc_visit (constraint_graph_t graph, class scc_info *si, unsigned int n)
1454 unsigned int i;
1455 bitmap_iterator bi;
1456 unsigned int my_dfs;
1458 bitmap_set_bit (si->visited, n);
1459 si->dfs[n] = si->current_index ++;
1460 my_dfs = si->dfs[n];
1462 /* Visit all the successors. */
1463 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1465 unsigned int w;
1467 if (i > LAST_REF_NODE)
1468 break;
1470 w = find (i);
1471 if (bitmap_bit_p (si->deleted, w))
1472 continue;
1474 if (!bitmap_bit_p (si->visited, w))
1475 scc_visit (graph, si, w);
1477 unsigned int t = find (w);
1478 gcc_checking_assert (find (n) == n);
1479 if (si->dfs[t] < si->dfs[n])
1480 si->dfs[n] = si->dfs[t];
1483 /* See if any components have been identified. */
1484 if (si->dfs[n] == my_dfs)
1486 if (si->scc_stack.length () > 0
1487 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1489 bitmap scc = BITMAP_ALLOC (NULL);
1490 unsigned int lowest_node;
1491 bitmap_iterator bi;
1493 bitmap_set_bit (scc, n);
1495 while (si->scc_stack.length () != 0
1496 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1498 unsigned int w = si->scc_stack.pop ();
1500 bitmap_set_bit (scc, w);
1503 lowest_node = bitmap_first_set_bit (scc);
1504 gcc_assert (lowest_node < FIRST_REF_NODE);
1506 /* Collapse the SCC nodes into a single node, and mark the
1507 indirect cycles. */
1508 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1510 if (i < FIRST_REF_NODE)
1512 if (unite (lowest_node, i))
1513 unify_nodes (graph, lowest_node, i, false);
1515 else
1517 unite (lowest_node, i);
1518 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1522 bitmap_set_bit (si->deleted, n);
1524 else
1525 si->scc_stack.safe_push (n);
1528 /* Unify node FROM into node TO, updating the changed count if
1529 necessary when UPDATE_CHANGED is true. */
1531 static void
1532 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1533 bool update_changed)
1535 gcc_checking_assert (to != from && find (to) == to);
1537 if (dump_file && (dump_flags & TDF_DETAILS))
1538 fprintf (dump_file, "Unifying %s to %s\n",
1539 get_varinfo (from)->name,
1540 get_varinfo (to)->name);
1542 if (update_changed)
1543 stats.unified_vars_dynamic++;
1544 else
1545 stats.unified_vars_static++;
1547 merge_graph_nodes (graph, to, from);
1548 if (merge_node_constraints (graph, to, from))
1550 if (update_changed)
1551 bitmap_set_bit (changed, to);
1554 /* Mark TO as changed if FROM was changed. If TO was already marked
1555 as changed, decrease the changed count. */
1557 if (update_changed
1558 && bitmap_clear_bit (changed, from))
1559 bitmap_set_bit (changed, to);
1560 varinfo_t fromvi = get_varinfo (from);
1561 if (fromvi->solution)
1563 /* If the solution changes because of the merging, we need to mark
1564 the variable as changed. */
1565 varinfo_t tovi = get_varinfo (to);
1566 if (bitmap_ior_into (tovi->solution, fromvi->solution))
1568 if (update_changed)
1569 bitmap_set_bit (changed, to);
1572 BITMAP_FREE (fromvi->solution);
1573 if (fromvi->oldsolution)
1574 BITMAP_FREE (fromvi->oldsolution);
1576 if (stats.iterations > 0
1577 && tovi->oldsolution)
1578 BITMAP_FREE (tovi->oldsolution);
1580 if (graph->succs[to])
1581 bitmap_clear_bit (graph->succs[to], to);
1584 /* Information needed to compute the topological ordering of a graph. */
1586 struct topo_info
1588 /* sbitmap of visited nodes. */
1589 sbitmap visited;
1590 /* Array that stores the topological order of the graph, *in
1591 reverse*. */
1592 vec<unsigned> topo_order;
1596 /* Initialize and return a topological info structure. */
1598 static struct topo_info *
1599 init_topo_info (void)
1601 size_t size = graph->size;
1602 struct topo_info *ti = XNEW (struct topo_info);
1603 ti->visited = sbitmap_alloc (size);
1604 bitmap_clear (ti->visited);
1605 ti->topo_order.create (1);
1606 return ti;
1610 /* Free the topological sort info pointed to by TI. */
1612 static void
1613 free_topo_info (struct topo_info *ti)
1615 sbitmap_free (ti->visited);
1616 ti->topo_order.release ();
1617 free (ti);
1620 /* Visit the graph in topological order, and store the order in the
1621 topo_info structure. */
1623 static void
1624 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1625 unsigned int n)
1627 bitmap_iterator bi;
1628 unsigned int j;
1630 bitmap_set_bit (ti->visited, n);
1632 if (graph->succs[n])
1633 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1635 if (!bitmap_bit_p (ti->visited, j))
1636 topo_visit (graph, ti, j);
1639 ti->topo_order.safe_push (n);
1642 /* Process a constraint C that represents x = *(y + off), using DELTA as the
1643 starting solution for y. */
1645 static void
1646 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1647 bitmap delta, bitmap *expanded_delta)
1649 unsigned int lhs = c->lhs.var;
1650 bool flag = false;
1651 bitmap sol = get_varinfo (lhs)->solution;
1652 unsigned int j;
1653 bitmap_iterator bi;
1654 HOST_WIDE_INT roffset = c->rhs.offset;
1656 /* Our IL does not allow this. */
1657 gcc_checking_assert (c->lhs.offset == 0);
1659 /* If the solution of Y contains anything it is good enough to transfer
1660 this to the LHS. */
1661 if (bitmap_bit_p (delta, anything_id))
1663 flag |= bitmap_set_bit (sol, anything_id);
1664 goto done;
1667 /* If we do not know at with offset the rhs is dereferenced compute
1668 the reachability set of DELTA, conservatively assuming it is
1669 dereferenced at all valid offsets. */
1670 if (roffset == UNKNOWN_OFFSET)
1672 delta = solution_set_expand (delta, expanded_delta);
1673 /* No further offset processing is necessary. */
1674 roffset = 0;
1677 /* For each variable j in delta (Sol(y)), add
1678 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1679 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1681 varinfo_t v = get_varinfo (j);
1682 HOST_WIDE_INT fieldoffset = v->offset + roffset;
1683 unsigned HOST_WIDE_INT size = v->size;
1684 unsigned int t;
1686 if (v->is_full_var)
1688 else if (roffset != 0)
1690 if (fieldoffset < 0)
1691 v = get_varinfo (v->head);
1692 else
1693 v = first_or_preceding_vi_for_offset (v, fieldoffset);
1696 /* We have to include all fields that overlap the current field
1697 shifted by roffset. */
1700 t = find (v->id);
1702 /* Adding edges from the special vars is pointless.
1703 They don't have sets that can change. */
1704 if (get_varinfo (t)->is_special_var)
1705 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1706 /* Merging the solution from ESCAPED needlessly increases
1707 the set. Use ESCAPED as representative instead. */
1708 else if (v->id == escaped_id)
1709 flag |= bitmap_set_bit (sol, escaped_id);
1710 else if (v->may_have_pointers
1711 && add_graph_edge (graph, lhs, t))
1712 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1714 if (v->is_full_var
1715 || v->next == 0)
1716 break;
1718 v = vi_next (v);
1720 while (v->offset < fieldoffset + size);
1723 done:
1724 /* If the LHS solution changed, mark the var as changed. */
1725 if (flag)
1727 get_varinfo (lhs)->solution = sol;
1728 bitmap_set_bit (changed, lhs);
1732 /* Process a constraint C that represents *(x + off) = y using DELTA
1733 as the starting solution for x. */
1735 static void
1736 do_ds_constraint (constraint_t c, bitmap delta, bitmap *expanded_delta)
1738 unsigned int rhs = c->rhs.var;
1739 bitmap sol = get_varinfo (rhs)->solution;
1740 unsigned int j;
1741 bitmap_iterator bi;
1742 HOST_WIDE_INT loff = c->lhs.offset;
1743 bool escaped_p = false;
1745 /* Our IL does not allow this. */
1746 gcc_checking_assert (c->rhs.offset == 0);
1748 /* If the solution of y contains ANYTHING simply use the ANYTHING
1749 solution. This avoids needlessly increasing the points-to sets. */
1750 if (bitmap_bit_p (sol, anything_id))
1751 sol = get_varinfo (find (anything_id))->solution;
1753 /* If the solution for x contains ANYTHING we have to merge the
1754 solution of y into all pointer variables which we do via
1755 STOREDANYTHING. */
1756 if (bitmap_bit_p (delta, anything_id))
1758 unsigned t = find (storedanything_id);
1759 if (add_graph_edge (graph, t, rhs))
1761 if (bitmap_ior_into (get_varinfo (t)->solution, sol))
1762 bitmap_set_bit (changed, t);
1764 return;
1767 /* If we do not know at with offset the rhs is dereferenced compute
1768 the reachability set of DELTA, conservatively assuming it is
1769 dereferenced at all valid offsets. */
1770 if (loff == UNKNOWN_OFFSET)
1772 delta = solution_set_expand (delta, expanded_delta);
1773 loff = 0;
1776 /* For each member j of delta (Sol(x)), add an edge from y to j and
1777 union Sol(y) into Sol(j) */
1778 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1780 varinfo_t v = get_varinfo (j);
1781 unsigned int t;
1782 HOST_WIDE_INT fieldoffset = v->offset + loff;
1783 unsigned HOST_WIDE_INT size = v->size;
1785 if (v->is_full_var)
1787 else if (loff != 0)
1789 if (fieldoffset < 0)
1790 v = get_varinfo (v->head);
1791 else
1792 v = first_or_preceding_vi_for_offset (v, fieldoffset);
1795 /* We have to include all fields that overlap the current field
1796 shifted by loff. */
1799 if (v->may_have_pointers)
1801 /* If v is a global variable then this is an escape point. */
1802 if (v->is_global_var
1803 && !escaped_p)
1805 t = find (escaped_id);
1806 if (add_graph_edge (graph, t, rhs)
1807 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1808 bitmap_set_bit (changed, t);
1809 /* Enough to let rhs escape once. */
1810 escaped_p = true;
1813 if (v->is_special_var)
1814 break;
1816 t = find (v->id);
1817 if (add_graph_edge (graph, t, rhs)
1818 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1819 bitmap_set_bit (changed, t);
1822 if (v->is_full_var
1823 || v->next == 0)
1824 break;
1826 v = vi_next (v);
1828 while (v->offset < fieldoffset + size);
1832 /* Handle a non-simple (simple meaning requires no iteration),
1833 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1835 static void
1836 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta,
1837 bitmap *expanded_delta)
1839 if (c->lhs.type == DEREF)
1841 if (c->rhs.type == ADDRESSOF)
1843 gcc_unreachable ();
1845 else
1847 /* *x = y */
1848 do_ds_constraint (c, delta, expanded_delta);
1851 else if (c->rhs.type == DEREF)
1853 /* x = *y */
1854 if (!(get_varinfo (c->lhs.var)->is_special_var))
1855 do_sd_constraint (graph, c, delta, expanded_delta);
1857 else
1859 bitmap tmp;
1860 bool flag = false;
1862 gcc_checking_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR
1863 && c->rhs.offset != 0 && c->lhs.offset == 0);
1864 tmp = get_varinfo (c->lhs.var)->solution;
1866 flag = set_union_with_increment (tmp, delta, c->rhs.offset,
1867 expanded_delta);
1869 if (flag)
1870 bitmap_set_bit (changed, c->lhs.var);
1874 /* Initialize and return a new SCC info structure. */
1876 scc_info::scc_info (size_t size) :
1877 visited (size), deleted (size), current_index (0), scc_stack (1)
1879 bitmap_clear (visited);
1880 bitmap_clear (deleted);
1881 node_mapping = XNEWVEC (unsigned int, size);
1882 dfs = XCNEWVEC (unsigned int, size);
1884 for (size_t i = 0; i < size; i++)
1885 node_mapping[i] = i;
1888 /* Free an SCC info structure pointed to by SI */
1890 scc_info::~scc_info ()
1892 free (node_mapping);
1893 free (dfs);
1897 /* Find indirect cycles in GRAPH that occur, using strongly connected
1898 components, and note them in the indirect cycles map.
1900 This technique comes from Ben Hardekopf and Calvin Lin,
1901 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1902 Lines of Code", submitted to PLDI 2007. */
1904 static void
1905 find_indirect_cycles (constraint_graph_t graph)
1907 unsigned int i;
1908 unsigned int size = graph->size;
1909 scc_info si (size);
1911 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1912 if (!bitmap_bit_p (si.visited, i) && find (i) == i)
1913 scc_visit (graph, &si, i);
1916 /* Compute a topological ordering for GRAPH, and store the result in the
1917 topo_info structure TI. */
1919 static void
1920 compute_topo_order (constraint_graph_t graph,
1921 struct topo_info *ti)
1923 unsigned int i;
1924 unsigned int size = graph->size;
1926 for (i = 0; i != size; ++i)
1927 if (!bitmap_bit_p (ti->visited, i) && find (i) == i)
1928 topo_visit (graph, ti, i);
1931 /* Structure used to for hash value numbering of pointer equivalence
1932 classes. */
1934 typedef struct equiv_class_label
1936 hashval_t hashcode;
1937 unsigned int equivalence_class;
1938 bitmap labels;
1939 } *equiv_class_label_t;
1940 typedef const struct equiv_class_label *const_equiv_class_label_t;
1942 /* Equiv_class_label hashtable helpers. */
1944 struct equiv_class_hasher : nofree_ptr_hash <equiv_class_label>
1946 static inline hashval_t hash (const equiv_class_label *);
1947 static inline bool equal (const equiv_class_label *,
1948 const equiv_class_label *);
1951 /* Hash function for a equiv_class_label_t */
1953 inline hashval_t
1954 equiv_class_hasher::hash (const equiv_class_label *ecl)
1956 return ecl->hashcode;
1959 /* Equality function for two equiv_class_label_t's. */
1961 inline bool
1962 equiv_class_hasher::equal (const equiv_class_label *eql1,
1963 const equiv_class_label *eql2)
1965 return (eql1->hashcode == eql2->hashcode
1966 && bitmap_equal_p (eql1->labels, eql2->labels));
1969 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1970 classes. */
1971 static hash_table<equiv_class_hasher> *pointer_equiv_class_table;
1973 /* A hashtable for mapping a bitmap of labels->location equivalence
1974 classes. */
1975 static hash_table<equiv_class_hasher> *location_equiv_class_table;
1977 struct obstack equiv_class_obstack;
1979 /* Lookup a equivalence class in TABLE by the bitmap of LABELS with
1980 hash HAS it contains. Sets *REF_LABELS to the bitmap LABELS
1981 is equivalent to. */
1983 static equiv_class_label *
1984 equiv_class_lookup_or_add (hash_table<equiv_class_hasher> *table,
1985 bitmap labels)
1987 equiv_class_label **slot;
1988 equiv_class_label ecl;
1990 ecl.labels = labels;
1991 ecl.hashcode = bitmap_hash (labels);
1992 slot = table->find_slot (&ecl, INSERT);
1993 if (!*slot)
1995 *slot = XOBNEW (&equiv_class_obstack, struct equiv_class_label);
1996 (*slot)->labels = labels;
1997 (*slot)->hashcode = ecl.hashcode;
1998 (*slot)->equivalence_class = 0;
2001 return *slot;
2004 /* Perform offline variable substitution.
2006 This is a worst case quadratic time way of identifying variables
2007 that must have equivalent points-to sets, including those caused by
2008 static cycles, and single entry subgraphs, in the constraint graph.
2010 The technique is described in "Exploiting Pointer and Location
2011 Equivalence to Optimize Pointer Analysis. In the 14th International
2012 Static Analysis Symposium (SAS), August 2007." It is known as the
2013 "HU" algorithm, and is equivalent to value numbering the collapsed
2014 constraint graph including evaluating unions.
2016 The general method of finding equivalence classes is as follows:
2017 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
2018 Initialize all non-REF nodes to be direct nodes.
2019 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
2020 variable}
2021 For each constraint containing the dereference, we also do the same
2022 thing.
2024 We then compute SCC's in the graph and unify nodes in the same SCC,
2025 including pts sets.
2027 For each non-collapsed node x:
2028 Visit all unvisited explicit incoming edges.
2029 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
2030 where y->x.
2031 Lookup the equivalence class for pts(x).
2032 If we found one, equivalence_class(x) = found class.
2033 Otherwise, equivalence_class(x) = new class, and new_class is
2034 added to the lookup table.
2036 All direct nodes with the same equivalence class can be replaced
2037 with a single representative node.
2038 All unlabeled nodes (label == 0) are not pointers and all edges
2039 involving them can be eliminated.
2040 We perform these optimizations during rewrite_constraints
2042 In addition to pointer equivalence class finding, we also perform
2043 location equivalence class finding. This is the set of variables
2044 that always appear together in points-to sets. We use this to
2045 compress the size of the points-to sets. */
2047 /* Current maximum pointer equivalence class id. */
2048 static int pointer_equiv_class;
2050 /* Current maximum location equivalence class id. */
2051 static int location_equiv_class;
2053 /* Recursive routine to find strongly connected components in GRAPH,
2054 and label it's nodes with DFS numbers. */
2056 static void
2057 condense_visit (constraint_graph_t graph, class scc_info *si, unsigned int n)
2059 unsigned int i;
2060 bitmap_iterator bi;
2061 unsigned int my_dfs;
2063 gcc_checking_assert (si->node_mapping[n] == n);
2064 bitmap_set_bit (si->visited, n);
2065 si->dfs[n] = si->current_index ++;
2066 my_dfs = si->dfs[n];
2068 /* Visit all the successors. */
2069 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2071 unsigned int w = si->node_mapping[i];
2073 if (bitmap_bit_p (si->deleted, w))
2074 continue;
2076 if (!bitmap_bit_p (si->visited, w))
2077 condense_visit (graph, si, w);
2079 unsigned int t = si->node_mapping[w];
2080 gcc_checking_assert (si->node_mapping[n] == n);
2081 if (si->dfs[t] < si->dfs[n])
2082 si->dfs[n] = si->dfs[t];
2085 /* Visit all the implicit predecessors. */
2086 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
2088 unsigned int w = si->node_mapping[i];
2090 if (bitmap_bit_p (si->deleted, w))
2091 continue;
2093 if (!bitmap_bit_p (si->visited, w))
2094 condense_visit (graph, si, w);
2096 unsigned int t = si->node_mapping[w];
2097 gcc_assert (si->node_mapping[n] == n);
2098 if (si->dfs[t] < si->dfs[n])
2099 si->dfs[n] = si->dfs[t];
2102 /* See if any components have been identified. */
2103 if (si->dfs[n] == my_dfs)
2105 if (si->scc_stack.length () != 0
2106 && si->dfs[si->scc_stack.last ()] >= my_dfs)
2108 /* Find the first node of the SCC and do non-bitmap work. */
2109 bool direct_p = true;
2110 unsigned first = si->scc_stack.length ();
2113 --first;
2114 unsigned int w = si->scc_stack[first];
2115 si->node_mapping[w] = n;
2116 if (!bitmap_bit_p (graph->direct_nodes, w))
2117 direct_p = false;
2119 while (first > 0
2120 && si->dfs[si->scc_stack[first - 1]] >= my_dfs);
2121 if (!direct_p)
2122 bitmap_clear_bit (graph->direct_nodes, n);
2124 /* Want to reduce to node n, push that first. */
2125 si->scc_stack.reserve (1);
2126 si->scc_stack.quick_push (si->scc_stack[first]);
2127 si->scc_stack[first] = n;
2129 unsigned scc_size = si->scc_stack.length () - first;
2130 unsigned split = scc_size / 2;
2131 unsigned carry = scc_size - split * 2;
2132 while (split > 0)
2134 for (unsigned i = 0; i < split; ++i)
2136 unsigned a = si->scc_stack[first + i];
2137 unsigned b = si->scc_stack[first + split + carry + i];
2139 /* Unify our nodes. */
2140 if (graph->preds[b])
2142 if (!graph->preds[a])
2143 std::swap (graph->preds[a], graph->preds[b]);
2144 else
2145 bitmap_ior_into_and_free (graph->preds[a],
2146 &graph->preds[b]);
2148 if (graph->implicit_preds[b])
2150 if (!graph->implicit_preds[a])
2151 std::swap (graph->implicit_preds[a],
2152 graph->implicit_preds[b]);
2153 else
2154 bitmap_ior_into_and_free (graph->implicit_preds[a],
2155 &graph->implicit_preds[b]);
2157 if (graph->points_to[b])
2159 if (!graph->points_to[a])
2160 std::swap (graph->points_to[a], graph->points_to[b]);
2161 else
2162 bitmap_ior_into_and_free (graph->points_to[a],
2163 &graph->points_to[b]);
2166 unsigned remain = split + carry;
2167 split = remain / 2;
2168 carry = remain - split * 2;
2170 /* Actually pop the SCC. */
2171 si->scc_stack.truncate (first);
2173 bitmap_set_bit (si->deleted, n);
2175 else
2176 si->scc_stack.safe_push (n);
2179 /* Label pointer equivalences.
2181 This performs a value numbering of the constraint graph to
2182 discover which variables will always have the same points-to sets
2183 under the current set of constraints.
2185 The way it value numbers is to store the set of points-to bits
2186 generated by the constraints and graph edges. This is just used as a
2187 hash and equality comparison. The *actual set of points-to bits* is
2188 completely irrelevant, in that we don't care about being able to
2189 extract them later.
2191 The equality values (currently bitmaps) just have to satisfy a few
2192 constraints, the main ones being:
2193 1. The combining operation must be order independent.
2194 2. The end result of a given set of operations must be unique iff the
2195 combination of input values is unique
2196 3. Hashable. */
2198 static void
2199 label_visit (constraint_graph_t graph, class scc_info *si, unsigned int n)
2201 unsigned int i, first_pred;
2202 bitmap_iterator bi;
2204 bitmap_set_bit (si->visited, n);
2206 /* Label and union our incoming edges's points to sets. */
2207 first_pred = -1U;
2208 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2210 unsigned int w = si->node_mapping[i];
2211 if (!bitmap_bit_p (si->visited, w))
2212 label_visit (graph, si, w);
2214 /* Skip unused edges */
2215 if (w == n || graph->pointer_label[w] == 0)
2216 continue;
2218 if (graph->points_to[w])
2220 if (!graph->points_to[n])
2222 if (first_pred == -1U)
2223 first_pred = w;
2224 else
2226 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2227 bitmap_ior (graph->points_to[n],
2228 graph->points_to[first_pred],
2229 graph->points_to[w]);
2232 else
2233 bitmap_ior_into (graph->points_to[n], graph->points_to[w]);
2237 /* Indirect nodes get fresh variables and a new pointer equiv class. */
2238 if (!bitmap_bit_p (graph->direct_nodes, n))
2240 if (!graph->points_to[n])
2242 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2243 if (first_pred != -1U)
2244 bitmap_copy (graph->points_to[n], graph->points_to[first_pred]);
2246 bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
2247 graph->pointer_label[n] = pointer_equiv_class++;
2248 equiv_class_label_t ecl;
2249 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2250 graph->points_to[n]);
2251 ecl->equivalence_class = graph->pointer_label[n];
2252 return;
2255 /* If there was only a single non-empty predecessor the pointer equiv
2256 class is the same. */
2257 if (!graph->points_to[n])
2259 if (first_pred != -1U)
2261 graph->pointer_label[n] = graph->pointer_label[first_pred];
2262 graph->points_to[n] = graph->points_to[first_pred];
2264 return;
2267 if (!bitmap_empty_p (graph->points_to[n]))
2269 equiv_class_label_t ecl;
2270 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2271 graph->points_to[n]);
2272 if (ecl->equivalence_class == 0)
2273 ecl->equivalence_class = pointer_equiv_class++;
2274 else
2276 BITMAP_FREE (graph->points_to[n]);
2277 graph->points_to[n] = ecl->labels;
2279 graph->pointer_label[n] = ecl->equivalence_class;
2283 /* Print the pred graph in dot format. */
2285 static void
2286 dump_pred_graph (class scc_info *si, FILE *file)
2288 unsigned int i;
2290 /* Only print the graph if it has already been initialized: */
2291 if (!graph)
2292 return;
2294 /* Prints the header of the dot file: */
2295 fprintf (file, "strict digraph {\n");
2296 fprintf (file, " node [\n shape = box\n ]\n");
2297 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
2298 fprintf (file, "\n // List of nodes and complex constraints in "
2299 "the constraint graph:\n");
2301 /* The next lines print the nodes in the graph together with the
2302 complex constraints attached to them. */
2303 for (i = 1; i < graph->size; i++)
2305 if (i == FIRST_REF_NODE)
2306 continue;
2307 if (si->node_mapping[i] != i)
2308 continue;
2309 if (i < FIRST_REF_NODE)
2310 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2311 else
2312 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2313 if (graph->points_to[i]
2314 && !bitmap_empty_p (graph->points_to[i]))
2316 if (i < FIRST_REF_NODE)
2317 fprintf (file, "[label=\"%s = {", get_varinfo (i)->name);
2318 else
2319 fprintf (file, "[label=\"*%s = {",
2320 get_varinfo (i - FIRST_REF_NODE)->name);
2321 unsigned j;
2322 bitmap_iterator bi;
2323 EXECUTE_IF_SET_IN_BITMAP (graph->points_to[i], 0, j, bi)
2324 fprintf (file, " %d", j);
2325 fprintf (file, " }\"]");
2327 fprintf (file, ";\n");
2330 /* Go over the edges. */
2331 fprintf (file, "\n // Edges in the constraint graph:\n");
2332 for (i = 1; i < graph->size; i++)
2334 unsigned j;
2335 bitmap_iterator bi;
2336 if (si->node_mapping[i] != i)
2337 continue;
2338 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[i], 0, j, bi)
2340 unsigned from = si->node_mapping[j];
2341 if (from < FIRST_REF_NODE)
2342 fprintf (file, "\"%s\"", get_varinfo (from)->name);
2343 else
2344 fprintf (file, "\"*%s\"", get_varinfo (from - FIRST_REF_NODE)->name);
2345 fprintf (file, " -> ");
2346 if (i < FIRST_REF_NODE)
2347 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2348 else
2349 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2350 fprintf (file, ";\n");
2354 /* Prints the tail of the dot file. */
2355 fprintf (file, "}\n");
2358 /* Perform offline variable substitution, discovering equivalence
2359 classes, and eliminating non-pointer variables. */
2361 static class scc_info *
2362 perform_var_substitution (constraint_graph_t graph)
2364 unsigned int i;
2365 unsigned int size = graph->size;
2366 scc_info *si = new scc_info (size);
2368 bitmap_obstack_initialize (&iteration_obstack);
2369 gcc_obstack_init (&equiv_class_obstack);
2370 pointer_equiv_class_table = new hash_table<equiv_class_hasher> (511);
2371 location_equiv_class_table
2372 = new hash_table<equiv_class_hasher> (511);
2373 pointer_equiv_class = 1;
2374 location_equiv_class = 1;
2376 /* Condense the nodes, which means to find SCC's, count incoming
2377 predecessors, and unite nodes in SCC's. */
2378 for (i = 1; i < FIRST_REF_NODE; i++)
2379 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2380 condense_visit (graph, si, si->node_mapping[i]);
2382 if (dump_file && (dump_flags & TDF_GRAPH))
2384 fprintf (dump_file, "\n\n// The constraint graph before var-substitution "
2385 "in dot format:\n");
2386 dump_pred_graph (si, dump_file);
2387 fprintf (dump_file, "\n\n");
2390 bitmap_clear (si->visited);
2391 /* Actually the label the nodes for pointer equivalences */
2392 for (i = 1; i < FIRST_REF_NODE; i++)
2393 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2394 label_visit (graph, si, si->node_mapping[i]);
2396 /* Calculate location equivalence labels. */
2397 for (i = 1; i < FIRST_REF_NODE; i++)
2399 bitmap pointed_by;
2400 bitmap_iterator bi;
2401 unsigned int j;
2403 if (!graph->pointed_by[i])
2404 continue;
2405 pointed_by = BITMAP_ALLOC (&iteration_obstack);
2407 /* Translate the pointed-by mapping for pointer equivalence
2408 labels. */
2409 EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
2411 bitmap_set_bit (pointed_by,
2412 graph->pointer_label[si->node_mapping[j]]);
2414 /* The original pointed_by is now dead. */
2415 BITMAP_FREE (graph->pointed_by[i]);
2417 /* Look up the location equivalence label if one exists, or make
2418 one otherwise. */
2419 equiv_class_label_t ecl;
2420 ecl = equiv_class_lookup_or_add (location_equiv_class_table, pointed_by);
2421 if (ecl->equivalence_class == 0)
2422 ecl->equivalence_class = location_equiv_class++;
2423 else
2425 if (dump_file && (dump_flags & TDF_DETAILS))
2426 fprintf (dump_file, "Found location equivalence for node %s\n",
2427 get_varinfo (i)->name);
2428 BITMAP_FREE (pointed_by);
2430 graph->loc_label[i] = ecl->equivalence_class;
2434 if (dump_file && (dump_flags & TDF_DETAILS))
2435 for (i = 1; i < FIRST_REF_NODE; i++)
2437 unsigned j = si->node_mapping[i];
2438 if (j != i)
2440 fprintf (dump_file, "%s node id %d ",
2441 bitmap_bit_p (graph->direct_nodes, i)
2442 ? "Direct" : "Indirect", i);
2443 if (i < FIRST_REF_NODE)
2444 fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2445 else
2446 fprintf (dump_file, "\"*%s\"",
2447 get_varinfo (i - FIRST_REF_NODE)->name);
2448 fprintf (dump_file, " mapped to SCC leader node id %d ", j);
2449 if (j < FIRST_REF_NODE)
2450 fprintf (dump_file, "\"%s\"\n", get_varinfo (j)->name);
2451 else
2452 fprintf (dump_file, "\"*%s\"\n",
2453 get_varinfo (j - FIRST_REF_NODE)->name);
2455 else
2457 fprintf (dump_file,
2458 "Equivalence classes for %s node id %d ",
2459 bitmap_bit_p (graph->direct_nodes, i)
2460 ? "direct" : "indirect", i);
2461 if (i < FIRST_REF_NODE)
2462 fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2463 else
2464 fprintf (dump_file, "\"*%s\"",
2465 get_varinfo (i - FIRST_REF_NODE)->name);
2466 fprintf (dump_file,
2467 ": pointer %d, location %d\n",
2468 graph->pointer_label[i], graph->loc_label[i]);
2472 /* Quickly eliminate our non-pointer variables. */
2474 for (i = 1; i < FIRST_REF_NODE; i++)
2476 unsigned int node = si->node_mapping[i];
2478 if (graph->pointer_label[node] == 0)
2480 if (dump_file && (dump_flags & TDF_DETAILS))
2481 fprintf (dump_file,
2482 "%s is a non-pointer variable, eliminating edges.\n",
2483 get_varinfo (node)->name);
2484 stats.nonpointer_vars++;
2485 clear_edges_for_node (graph, node);
2489 return si;
2492 /* Free information that was only necessary for variable
2493 substitution. */
2495 static void
2496 free_var_substitution_info (class scc_info *si)
2498 delete si;
2499 free (graph->pointer_label);
2500 free (graph->loc_label);
2501 free (graph->pointed_by);
2502 free (graph->points_to);
2503 free (graph->eq_rep);
2504 sbitmap_free (graph->direct_nodes);
2505 delete pointer_equiv_class_table;
2506 pointer_equiv_class_table = NULL;
2507 delete location_equiv_class_table;
2508 location_equiv_class_table = NULL;
2509 obstack_free (&equiv_class_obstack, NULL);
2510 bitmap_obstack_release (&iteration_obstack);
2513 /* Return an existing node that is equivalent to NODE, which has
2514 equivalence class LABEL, if one exists. Return NODE otherwise. */
2516 static unsigned int
2517 find_equivalent_node (constraint_graph_t graph,
2518 unsigned int node, unsigned int label)
2520 /* If the address version of this variable is unused, we can
2521 substitute it for anything else with the same label.
2522 Otherwise, we know the pointers are equivalent, but not the
2523 locations, and we can unite them later. */
2525 if (!bitmap_bit_p (graph->address_taken, node))
2527 gcc_checking_assert (label < graph->size);
2529 if (graph->eq_rep[label] != -1)
2531 /* Unify the two variables since we know they are equivalent. */
2532 if (unite (graph->eq_rep[label], node))
2533 unify_nodes (graph, graph->eq_rep[label], node, false);
2534 return graph->eq_rep[label];
2536 else
2538 graph->eq_rep[label] = node;
2539 graph->pe_rep[label] = node;
2542 else
2544 gcc_checking_assert (label < graph->size);
2545 graph->pe[node] = label;
2546 if (graph->pe_rep[label] == -1)
2547 graph->pe_rep[label] = node;
2550 return node;
2553 /* Unite pointer equivalent but not location equivalent nodes in
2554 GRAPH. This may only be performed once variable substitution is
2555 finished. */
2557 static void
2558 unite_pointer_equivalences (constraint_graph_t graph)
2560 unsigned int i;
2562 /* Go through the pointer equivalences and unite them to their
2563 representative, if they aren't already. */
2564 for (i = 1; i < FIRST_REF_NODE; i++)
2566 unsigned int label = graph->pe[i];
2567 if (label)
2569 int label_rep = graph->pe_rep[label];
2571 if (label_rep == -1)
2572 continue;
2574 label_rep = find (label_rep);
2575 if (label_rep >= 0 && unite (label_rep, find (i)))
2576 unify_nodes (graph, label_rep, i, false);
2581 /* Move complex constraints to the GRAPH nodes they belong to. */
2583 static void
2584 move_complex_constraints (constraint_graph_t graph)
2586 int i;
2587 constraint_t c;
2589 FOR_EACH_VEC_ELT (constraints, i, c)
2591 if (c)
2593 struct constraint_expr lhs = c->lhs;
2594 struct constraint_expr rhs = c->rhs;
2596 if (lhs.type == DEREF)
2598 insert_into_complex (graph, lhs.var, c);
2600 else if (rhs.type == DEREF)
2602 if (!(get_varinfo (lhs.var)->is_special_var))
2603 insert_into_complex (graph, rhs.var, c);
2605 else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2606 && (lhs.offset != 0 || rhs.offset != 0))
2608 insert_into_complex (graph, rhs.var, c);
2615 /* Optimize and rewrite complex constraints while performing
2616 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2617 result of perform_variable_substitution. */
2619 static void
2620 rewrite_constraints (constraint_graph_t graph,
2621 class scc_info *si)
2623 int i;
2624 constraint_t c;
2626 if (flag_checking)
2628 for (unsigned int j = 0; j < graph->size; j++)
2629 gcc_assert (find (j) == j);
2632 FOR_EACH_VEC_ELT (constraints, i, c)
2634 struct constraint_expr lhs = c->lhs;
2635 struct constraint_expr rhs = c->rhs;
2636 unsigned int lhsvar = find (lhs.var);
2637 unsigned int rhsvar = find (rhs.var);
2638 unsigned int lhsnode, rhsnode;
2639 unsigned int lhslabel, rhslabel;
2641 lhsnode = si->node_mapping[lhsvar];
2642 rhsnode = si->node_mapping[rhsvar];
2643 lhslabel = graph->pointer_label[lhsnode];
2644 rhslabel = graph->pointer_label[rhsnode];
2646 /* See if it is really a non-pointer variable, and if so, ignore
2647 the constraint. */
2648 if (lhslabel == 0)
2650 if (dump_file && (dump_flags & TDF_DETAILS))
2653 fprintf (dump_file, "%s is a non-pointer variable, "
2654 "ignoring constraint:",
2655 get_varinfo (lhs.var)->name);
2656 dump_constraint (dump_file, c);
2657 fprintf (dump_file, "\n");
2659 constraints[i] = NULL;
2660 continue;
2663 if (rhslabel == 0)
2665 if (dump_file && (dump_flags & TDF_DETAILS))
2668 fprintf (dump_file, "%s is a non-pointer variable, "
2669 "ignoring constraint:",
2670 get_varinfo (rhs.var)->name);
2671 dump_constraint (dump_file, c);
2672 fprintf (dump_file, "\n");
2674 constraints[i] = NULL;
2675 continue;
2678 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2679 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2680 c->lhs.var = lhsvar;
2681 c->rhs.var = rhsvar;
2685 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2686 part of an SCC, false otherwise. */
2688 static bool
2689 eliminate_indirect_cycles (unsigned int node)
2691 if (graph->indirect_cycles[node] != -1
2692 && !bitmap_empty_p (get_varinfo (node)->solution))
2694 unsigned int i;
2695 auto_vec<unsigned> queue;
2696 int queuepos;
2697 unsigned int to = find (graph->indirect_cycles[node]);
2698 bitmap_iterator bi;
2700 /* We can't touch the solution set and call unify_nodes
2701 at the same time, because unify_nodes is going to do
2702 bitmap unions into it. */
2704 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2706 if (find (i) == i && i != to)
2708 if (unite (to, i))
2709 queue.safe_push (i);
2713 for (queuepos = 0;
2714 queue.iterate (queuepos, &i);
2715 queuepos++)
2717 unify_nodes (graph, to, i, true);
2719 return true;
2721 return false;
2724 /* Solve the constraint graph GRAPH using our worklist solver.
2725 This is based on the PW* family of solvers from the "Efficient Field
2726 Sensitive Pointer Analysis for C" paper.
2727 It works by iterating over all the graph nodes, processing the complex
2728 constraints and propagating the copy constraints, until everything stops
2729 changed. This corresponds to steps 6-8 in the solving list given above. */
2731 static void
2732 solve_graph (constraint_graph_t graph)
2734 unsigned int size = graph->size;
2735 unsigned int i;
2736 bitmap pts;
2738 changed = BITMAP_ALLOC (NULL);
2740 /* Mark all initial non-collapsed nodes as changed. */
2741 for (i = 1; i < size; i++)
2743 varinfo_t ivi = get_varinfo (i);
2744 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2745 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2746 || graph->complex[i].length () > 0))
2747 bitmap_set_bit (changed, i);
2750 /* Allocate a bitmap to be used to store the changed bits. */
2751 pts = BITMAP_ALLOC (&pta_obstack);
2753 while (!bitmap_empty_p (changed))
2755 unsigned int i;
2756 struct topo_info *ti = init_topo_info ();
2757 stats.iterations++;
2759 bitmap_obstack_initialize (&iteration_obstack);
2761 compute_topo_order (graph, ti);
2763 while (ti->topo_order.length () != 0)
2766 i = ti->topo_order.pop ();
2768 /* If this variable is not a representative, skip it. */
2769 if (find (i) != i)
2770 continue;
2772 /* In certain indirect cycle cases, we may merge this
2773 variable to another. */
2774 if (eliminate_indirect_cycles (i) && find (i) != i)
2775 continue;
2777 /* If the node has changed, we need to process the
2778 complex constraints and outgoing edges again. For complex
2779 constraints that modify i itself, like the common group of
2780 callarg = callarg + UNKNOWN;
2781 callarg = *callarg + UNKNOWN;
2782 *callarg = callescape;
2783 make sure to iterate immediately because that maximizes
2784 cache reuse and expands the graph quickest, leading to
2785 better visitation order in the next iteration. */
2786 while (bitmap_clear_bit (changed, i))
2788 unsigned int j;
2789 constraint_t c;
2790 bitmap solution;
2791 vec<constraint_t> complex = graph->complex[i];
2792 varinfo_t vi = get_varinfo (i);
2793 bool solution_empty;
2795 /* Compute the changed set of solution bits. If anything
2796 is in the solution just propagate that. */
2797 if (bitmap_bit_p (vi->solution, anything_id))
2799 /* If anything is also in the old solution there is
2800 nothing to do.
2801 ??? But we shouldn't ended up with "changed" set ... */
2802 if (vi->oldsolution
2803 && bitmap_bit_p (vi->oldsolution, anything_id))
2804 break;
2805 bitmap_copy (pts, get_varinfo (find (anything_id))->solution);
2807 else if (vi->oldsolution)
2808 bitmap_and_compl (pts, vi->solution, vi->oldsolution);
2809 else
2810 bitmap_copy (pts, vi->solution);
2812 if (bitmap_empty_p (pts))
2813 break;
2815 if (vi->oldsolution)
2816 bitmap_ior_into (vi->oldsolution, pts);
2817 else
2819 vi->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
2820 bitmap_copy (vi->oldsolution, pts);
2823 solution = vi->solution;
2824 solution_empty = bitmap_empty_p (solution);
2826 /* Process the complex constraints */
2827 bitmap expanded_pts = NULL;
2828 FOR_EACH_VEC_ELT (complex, j, c)
2830 /* XXX: This is going to unsort the constraints in
2831 some cases, which will occasionally add duplicate
2832 constraints during unification. This does not
2833 affect correctness. */
2834 c->lhs.var = find (c->lhs.var);
2835 c->rhs.var = find (c->rhs.var);
2837 /* The only complex constraint that can change our
2838 solution to non-empty, given an empty solution,
2839 is a constraint where the lhs side is receiving
2840 some set from elsewhere. */
2841 if (!solution_empty || c->lhs.type != DEREF)
2842 do_complex_constraint (graph, c, pts, &expanded_pts);
2844 BITMAP_FREE (expanded_pts);
2846 solution_empty = bitmap_empty_p (solution);
2848 if (!solution_empty)
2850 bitmap_iterator bi;
2851 unsigned eff_escaped_id = find (escaped_id);
2853 /* Propagate solution to all successors. */
2854 unsigned to_remove = ~0U;
2855 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2856 0, j, bi)
2858 if (to_remove != ~0U)
2860 bitmap_clear_bit (graph->succs[i], to_remove);
2861 to_remove = ~0U;
2863 unsigned int to = find (j);
2864 if (to != j)
2866 /* Update the succ graph, avoiding duplicate
2867 work. */
2868 to_remove = j;
2869 if (! bitmap_set_bit (graph->succs[i], to))
2870 continue;
2871 /* We eventually end up processing 'to' twice
2872 as it is undefined whether bitmap iteration
2873 iterates over bits set during iteration.
2874 Play safe instead of doing tricks. */
2876 /* Don't try to propagate to ourselves. */
2877 if (to == i)
2878 continue;
2880 bitmap tmp = get_varinfo (to)->solution;
2881 bool flag = false;
2883 /* If we propagate from ESCAPED use ESCAPED as
2884 placeholder. */
2885 if (i == eff_escaped_id)
2886 flag = bitmap_set_bit (tmp, escaped_id);
2887 else
2888 flag = bitmap_ior_into (tmp, pts);
2890 if (flag)
2891 bitmap_set_bit (changed, to);
2893 if (to_remove != ~0U)
2894 bitmap_clear_bit (graph->succs[i], to_remove);
2898 free_topo_info (ti);
2899 bitmap_obstack_release (&iteration_obstack);
2902 BITMAP_FREE (pts);
2903 BITMAP_FREE (changed);
2904 bitmap_obstack_release (&oldpta_obstack);
2907 /* Map from trees to variable infos. */
2908 static hash_map<tree, varinfo_t> *vi_for_tree;
2911 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2913 static void
2914 insert_vi_for_tree (tree t, varinfo_t vi)
2916 gcc_assert (vi);
2917 gcc_assert (!vi_for_tree->put (t, vi));
2920 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2921 exist in the map, return NULL, otherwise, return the varinfo we found. */
2923 static varinfo_t
2924 lookup_vi_for_tree (tree t)
2926 varinfo_t *slot = vi_for_tree->get (t);
2927 if (slot == NULL)
2928 return NULL;
2930 return *slot;
2933 /* Return a printable name for DECL */
2935 static const char *
2936 alias_get_name (tree decl)
2938 const char *res = "NULL";
2939 if (dump_file)
2941 char *temp = NULL;
2942 if (TREE_CODE (decl) == SSA_NAME)
2944 res = get_name (decl);
2945 temp = xasprintf ("%s_%u", res ? res : "", SSA_NAME_VERSION (decl));
2947 else if (HAS_DECL_ASSEMBLER_NAME_P (decl)
2948 && DECL_ASSEMBLER_NAME_SET_P (decl))
2949 res = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME_RAW (decl));
2950 else if (DECL_P (decl))
2952 res = get_name (decl);
2953 if (!res)
2954 temp = xasprintf ("D.%u", DECL_UID (decl));
2957 if (temp)
2959 res = ggc_strdup (temp);
2960 free (temp);
2964 return res;
2967 /* Find the variable id for tree T in the map.
2968 If T doesn't exist in the map, create an entry for it and return it. */
2970 static varinfo_t
2971 get_vi_for_tree (tree t)
2973 varinfo_t *slot = vi_for_tree->get (t);
2974 if (slot == NULL)
2976 unsigned int id = create_variable_info_for (t, alias_get_name (t), false);
2977 return get_varinfo (id);
2980 return *slot;
2983 /* Get a scalar constraint expression for a new temporary variable. */
2985 static struct constraint_expr
2986 new_scalar_tmp_constraint_exp (const char *name, bool add_id)
2988 struct constraint_expr tmp;
2989 varinfo_t vi;
2991 vi = new_var_info (NULL_TREE, name, add_id);
2992 vi->offset = 0;
2993 vi->size = -1;
2994 vi->fullsize = -1;
2995 vi->is_full_var = 1;
2996 vi->is_reg_var = 1;
2998 tmp.var = vi->id;
2999 tmp.type = SCALAR;
3000 tmp.offset = 0;
3002 return tmp;
3005 /* Get a constraint expression vector from an SSA_VAR_P node.
3006 If address_p is true, the result will be taken its address of. */
3008 static void
3009 get_constraint_for_ssa_var (tree t, vec<ce_s> *results, bool address_p)
3011 struct constraint_expr cexpr;
3012 varinfo_t vi;
3014 /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
3015 gcc_assert (TREE_CODE (t) == SSA_NAME || DECL_P (t));
3017 if (TREE_CODE (t) == SSA_NAME
3018 && SSA_NAME_IS_DEFAULT_DEF (t))
3020 /* For parameters, get at the points-to set for the actual parm
3021 decl. */
3022 if (TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
3023 || TREE_CODE (SSA_NAME_VAR (t)) == RESULT_DECL)
3025 get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
3026 return;
3028 /* For undefined SSA names return nothing. */
3029 else if (!ssa_defined_default_def_p (t))
3031 cexpr.var = nothing_id;
3032 cexpr.type = SCALAR;
3033 cexpr.offset = 0;
3034 results->safe_push (cexpr);
3035 return;
3039 /* For global variables resort to the alias target. */
3040 if (VAR_P (t) && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
3042 varpool_node *node = varpool_node::get (t);
3043 if (node && node->alias && node->analyzed)
3045 node = node->ultimate_alias_target ();
3046 /* Canonicalize the PT uid of all aliases to the ultimate target.
3047 ??? Hopefully the set of aliases can't change in a way that
3048 changes the ultimate alias target. */
3049 gcc_assert ((! DECL_PT_UID_SET_P (node->decl)
3050 || DECL_PT_UID (node->decl) == DECL_UID (node->decl))
3051 && (! DECL_PT_UID_SET_P (t)
3052 || DECL_PT_UID (t) == DECL_UID (node->decl)));
3053 DECL_PT_UID (t) = DECL_UID (node->decl);
3054 t = node->decl;
3057 /* If this is decl may bind to NULL note that. */
3058 if (address_p
3059 && (! node || ! node->nonzero_address ()))
3061 cexpr.var = nothing_id;
3062 cexpr.type = SCALAR;
3063 cexpr.offset = 0;
3064 results->safe_push (cexpr);
3068 vi = get_vi_for_tree (t);
3069 cexpr.var = vi->id;
3070 cexpr.type = SCALAR;
3071 cexpr.offset = 0;
3073 /* If we are not taking the address of the constraint expr, add all
3074 sub-fiels of the variable as well. */
3075 if (!address_p
3076 && !vi->is_full_var)
3078 for (; vi; vi = vi_next (vi))
3080 cexpr.var = vi->id;
3081 results->safe_push (cexpr);
3083 return;
3086 results->safe_push (cexpr);
3089 /* Process constraint T, performing various simplifications and then
3090 adding it to our list of overall constraints. */
3092 static void
3093 process_constraint (constraint_t t)
3095 struct constraint_expr rhs = t->rhs;
3096 struct constraint_expr lhs = t->lhs;
3098 gcc_assert (rhs.var < varmap.length ());
3099 gcc_assert (lhs.var < varmap.length ());
3101 /* If we didn't get any useful constraint from the lhs we get
3102 &ANYTHING as fallback from get_constraint_for. Deal with
3103 it here by turning it into *ANYTHING. */
3104 if (lhs.type == ADDRESSOF
3105 && lhs.var == anything_id)
3106 lhs.type = DEREF;
3108 /* ADDRESSOF on the lhs is invalid. */
3109 gcc_assert (lhs.type != ADDRESSOF);
3111 /* We shouldn't add constraints from things that cannot have pointers.
3112 It's not completely trivial to avoid in the callers, so do it here. */
3113 if (rhs.type != ADDRESSOF
3114 && !get_varinfo (rhs.var)->may_have_pointers)
3115 return;
3117 /* Likewise adding to the solution of a non-pointer var isn't useful. */
3118 if (!get_varinfo (lhs.var)->may_have_pointers)
3119 return;
3121 /* This can happen in our IR with things like n->a = *p */
3122 if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
3124 /* Split into tmp = *rhs, *lhs = tmp */
3125 struct constraint_expr tmplhs;
3126 tmplhs = new_scalar_tmp_constraint_exp ("doubledereftmp", true);
3127 process_constraint (new_constraint (tmplhs, rhs));
3128 process_constraint (new_constraint (lhs, tmplhs));
3130 else if ((rhs.type != SCALAR || rhs.offset != 0) && lhs.type == DEREF)
3132 /* Split into tmp = &rhs, *lhs = tmp */
3133 struct constraint_expr tmplhs;
3134 tmplhs = new_scalar_tmp_constraint_exp ("derefaddrtmp", true);
3135 process_constraint (new_constraint (tmplhs, rhs));
3136 process_constraint (new_constraint (lhs, tmplhs));
3138 else
3140 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
3141 if (rhs.type == ADDRESSOF)
3142 get_varinfo (get_varinfo (rhs.var)->head)->address_taken = true;
3143 constraints.safe_push (t);
3148 /* Return the position, in bits, of FIELD_DECL from the beginning of its
3149 structure. */
3151 static HOST_WIDE_INT
3152 bitpos_of_field (const tree fdecl)
3154 if (!tree_fits_shwi_p (DECL_FIELD_OFFSET (fdecl))
3155 || !tree_fits_shwi_p (DECL_FIELD_BIT_OFFSET (fdecl)))
3156 return -1;
3158 return (tree_to_shwi (DECL_FIELD_OFFSET (fdecl)) * BITS_PER_UNIT
3159 + tree_to_shwi (DECL_FIELD_BIT_OFFSET (fdecl)));
3163 /* Get constraint expressions for offsetting PTR by OFFSET. Stores the
3164 resulting constraint expressions in *RESULTS. */
3166 static void
3167 get_constraint_for_ptr_offset (tree ptr, tree offset,
3168 vec<ce_s> *results)
3170 struct constraint_expr c;
3171 unsigned int j, n;
3172 HOST_WIDE_INT rhsoffset;
3174 /* If we do not do field-sensitive PTA adding offsets to pointers
3175 does not change the points-to solution. */
3176 if (!use_field_sensitive)
3178 get_constraint_for_rhs (ptr, results);
3179 return;
3182 /* If the offset is not a non-negative integer constant that fits
3183 in a HOST_WIDE_INT, we have to fall back to a conservative
3184 solution which includes all sub-fields of all pointed-to
3185 variables of ptr. */
3186 if (offset == NULL_TREE
3187 || TREE_CODE (offset) != INTEGER_CST)
3188 rhsoffset = UNKNOWN_OFFSET;
3189 else
3191 /* Sign-extend the offset. */
3192 offset_int soffset = offset_int::from (wi::to_wide (offset), SIGNED);
3193 if (!wi::fits_shwi_p (soffset))
3194 rhsoffset = UNKNOWN_OFFSET;
3195 else
3197 /* Make sure the bit-offset also fits. */
3198 HOST_WIDE_INT rhsunitoffset = soffset.to_shwi ();
3199 rhsoffset = rhsunitoffset * (unsigned HOST_WIDE_INT) BITS_PER_UNIT;
3200 if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
3201 rhsoffset = UNKNOWN_OFFSET;
3205 get_constraint_for_rhs (ptr, results);
3206 if (rhsoffset == 0)
3207 return;
3209 /* As we are eventually appending to the solution do not use
3210 vec::iterate here. */
3211 n = results->length ();
3212 for (j = 0; j < n; j++)
3214 varinfo_t curr;
3215 c = (*results)[j];
3216 curr = get_varinfo (c.var);
3218 if (c.type == ADDRESSOF
3219 /* If this varinfo represents a full variable just use it. */
3220 && curr->is_full_var)
3222 else if (c.type == ADDRESSOF
3223 /* If we do not know the offset add all subfields. */
3224 && rhsoffset == UNKNOWN_OFFSET)
3226 varinfo_t temp = get_varinfo (curr->head);
3229 struct constraint_expr c2;
3230 c2.var = temp->id;
3231 c2.type = ADDRESSOF;
3232 c2.offset = 0;
3233 if (c2.var != c.var)
3234 results->safe_push (c2);
3235 temp = vi_next (temp);
3237 while (temp);
3239 else if (c.type == ADDRESSOF)
3241 varinfo_t temp;
3242 unsigned HOST_WIDE_INT offset = curr->offset + rhsoffset;
3244 /* If curr->offset + rhsoffset is less than zero adjust it. */
3245 if (rhsoffset < 0
3246 && curr->offset < offset)
3247 offset = 0;
3249 /* We have to include all fields that overlap the current
3250 field shifted by rhsoffset. And we include at least
3251 the last or the first field of the variable to represent
3252 reachability of off-bound addresses, in particular &object + 1,
3253 conservatively correct. */
3254 temp = first_or_preceding_vi_for_offset (curr, offset);
3255 c.var = temp->id;
3256 c.offset = 0;
3257 temp = vi_next (temp);
3258 while (temp
3259 && temp->offset < offset + curr->size)
3261 struct constraint_expr c2;
3262 c2.var = temp->id;
3263 c2.type = ADDRESSOF;
3264 c2.offset = 0;
3265 results->safe_push (c2);
3266 temp = vi_next (temp);
3269 else if (c.type == SCALAR)
3271 gcc_assert (c.offset == 0);
3272 c.offset = rhsoffset;
3274 else
3275 /* We shouldn't get any DEREFs here. */
3276 gcc_unreachable ();
3278 (*results)[j] = c;
3283 /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
3284 If address_p is true the result will be taken its address of.
3285 If lhs_p is true then the constraint expression is assumed to be used
3286 as the lhs. */
3288 static void
3289 get_constraint_for_component_ref (tree t, vec<ce_s> *results,
3290 bool address_p, bool lhs_p)
3292 tree orig_t = t;
3293 poly_int64 bitsize = -1;
3294 poly_int64 bitmaxsize = -1;
3295 poly_int64 bitpos;
3296 bool reverse;
3297 tree forzero;
3299 /* Some people like to do cute things like take the address of
3300 &0->a.b */
3301 forzero = t;
3302 while (handled_component_p (forzero)
3303 || INDIRECT_REF_P (forzero)
3304 || TREE_CODE (forzero) == MEM_REF)
3305 forzero = TREE_OPERAND (forzero, 0);
3307 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
3309 struct constraint_expr temp;
3311 temp.offset = 0;
3312 temp.var = integer_id;
3313 temp.type = SCALAR;
3314 results->safe_push (temp);
3315 return;
3318 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize, &reverse);
3320 /* We can end up here for component references on a
3321 VIEW_CONVERT_EXPR <>(&foobar) or things like a
3322 BIT_FIELD_REF <&MEM[(void *)&b + 4B], ...>. So for
3323 symbolic constants simply give up. */
3324 if (TREE_CODE (t) == ADDR_EXPR)
3326 constraint_expr result;
3327 result.type = SCALAR;
3328 result.var = anything_id;
3329 result.offset = 0;
3330 results->safe_push (result);
3331 return;
3334 /* Avoid creating pointer-offset constraints, so handle MEM_REF
3335 offsets directly. Pretend to take the address of the base,
3336 we'll take care of adding the required subset of sub-fields below. */
3337 if (TREE_CODE (t) == MEM_REF
3338 && !integer_zerop (TREE_OPERAND (t, 0)))
3340 poly_offset_int off = mem_ref_offset (t);
3341 off <<= LOG2_BITS_PER_UNIT;
3342 off += bitpos;
3343 poly_int64 off_hwi;
3344 if (off.to_shwi (&off_hwi))
3345 bitpos = off_hwi;
3346 else
3348 bitpos = 0;
3349 bitmaxsize = -1;
3351 get_constraint_for_1 (TREE_OPERAND (t, 0), results, false, lhs_p);
3352 do_deref (results);
3354 else
3355 get_constraint_for_1 (t, results, true, lhs_p);
3357 /* Strip off nothing_id. */
3358 if (results->length () == 2)
3360 gcc_assert ((*results)[0].var == nothing_id);
3361 results->unordered_remove (0);
3363 gcc_assert (results->length () == 1);
3364 struct constraint_expr &result = results->last ();
3366 if (result.type == SCALAR
3367 && get_varinfo (result.var)->is_full_var)
3368 /* For single-field vars do not bother about the offset. */
3369 result.offset = 0;
3370 else if (result.type == SCALAR)
3372 /* In languages like C, you can access one past the end of an
3373 array. You aren't allowed to dereference it, so we can
3374 ignore this constraint. When we handle pointer subtraction,
3375 we may have to do something cute here. */
3377 if (maybe_lt (poly_uint64 (bitpos), get_varinfo (result.var)->fullsize)
3378 && maybe_ne (bitmaxsize, 0))
3380 /* It's also not true that the constraint will actually start at the
3381 right offset, it may start in some padding. We only care about
3382 setting the constraint to the first actual field it touches, so
3383 walk to find it. */
3384 struct constraint_expr cexpr = result;
3385 varinfo_t curr;
3386 results->pop ();
3387 cexpr.offset = 0;
3388 for (curr = get_varinfo (cexpr.var); curr; curr = vi_next (curr))
3390 if (ranges_maybe_overlap_p (poly_int64 (curr->offset),
3391 curr->size, bitpos, bitmaxsize))
3393 cexpr.var = curr->id;
3394 results->safe_push (cexpr);
3395 if (address_p)
3396 break;
3399 /* If we are going to take the address of this field then
3400 to be able to compute reachability correctly add at least
3401 the last field of the variable. */
3402 if (address_p && results->length () == 0)
3404 curr = get_varinfo (cexpr.var);
3405 while (curr->next != 0)
3406 curr = vi_next (curr);
3407 cexpr.var = curr->id;
3408 results->safe_push (cexpr);
3410 else if (results->length () == 0)
3411 /* Assert that we found *some* field there. The user couldn't be
3412 accessing *only* padding. */
3413 /* Still the user could access one past the end of an array
3414 embedded in a struct resulting in accessing *only* padding. */
3415 /* Or accessing only padding via type-punning to a type
3416 that has a filed just in padding space. */
3418 cexpr.type = SCALAR;
3419 cexpr.var = anything_id;
3420 cexpr.offset = 0;
3421 results->safe_push (cexpr);
3424 else if (known_eq (bitmaxsize, 0))
3426 if (dump_file && (dump_flags & TDF_DETAILS))
3427 fprintf (dump_file, "Access to zero-sized part of variable, "
3428 "ignoring\n");
3430 else
3431 if (dump_file && (dump_flags & TDF_DETAILS))
3432 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
3434 else if (result.type == DEREF)
3436 /* If we do not know exactly where the access goes say so. Note
3437 that only for non-structure accesses we know that we access
3438 at most one subfiled of any variable. */
3439 HOST_WIDE_INT const_bitpos;
3440 if (!bitpos.is_constant (&const_bitpos)
3441 || const_bitpos == -1
3442 || maybe_ne (bitsize, bitmaxsize)
3443 || AGGREGATE_TYPE_P (TREE_TYPE (orig_t))
3444 || result.offset == UNKNOWN_OFFSET)
3445 result.offset = UNKNOWN_OFFSET;
3446 else
3447 result.offset += const_bitpos;
3449 else if (result.type == ADDRESSOF)
3451 /* We can end up here for component references on constants like
3452 VIEW_CONVERT_EXPR <>({ 0, 1, 2, 3 })[i]. */
3453 result.type = SCALAR;
3454 result.var = anything_id;
3455 result.offset = 0;
3457 else
3458 gcc_unreachable ();
3462 /* Dereference the constraint expression CONS, and return the result.
3463 DEREF (ADDRESSOF) = SCALAR
3464 DEREF (SCALAR) = DEREF
3465 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
3466 This is needed so that we can handle dereferencing DEREF constraints. */
3468 static void
3469 do_deref (vec<ce_s> *constraints)
3471 struct constraint_expr *c;
3472 unsigned int i = 0;
3474 FOR_EACH_VEC_ELT (*constraints, i, c)
3476 if (c->type == SCALAR)
3477 c->type = DEREF;
3478 else if (c->type == ADDRESSOF)
3479 c->type = SCALAR;
3480 else if (c->type == DEREF)
3482 struct constraint_expr tmplhs;
3483 tmplhs = new_scalar_tmp_constraint_exp ("dereftmp", true);
3484 process_constraint (new_constraint (tmplhs, *c));
3485 c->var = tmplhs.var;
3487 else
3488 gcc_unreachable ();
3492 /* Given a tree T, return the constraint expression for taking the
3493 address of it. */
3495 static void
3496 get_constraint_for_address_of (tree t, vec<ce_s> *results)
3498 struct constraint_expr *c;
3499 unsigned int i;
3501 get_constraint_for_1 (t, results, true, true);
3503 FOR_EACH_VEC_ELT (*results, i, c)
3505 if (c->type == DEREF)
3506 c->type = SCALAR;
3507 else
3508 c->type = ADDRESSOF;
3512 /* Given a tree T, return the constraint expression for it. */
3514 static void
3515 get_constraint_for_1 (tree t, vec<ce_s> *results, bool address_p,
3516 bool lhs_p)
3518 struct constraint_expr temp;
3520 /* x = integer is all glommed to a single variable, which doesn't
3521 point to anything by itself. That is, of course, unless it is an
3522 integer constant being treated as a pointer, in which case, we
3523 will return that this is really the addressof anything. This
3524 happens below, since it will fall into the default case. The only
3525 case we know something about an integer treated like a pointer is
3526 when it is the NULL pointer, and then we just say it points to
3527 NULL.
3529 Do not do that if -fno-delete-null-pointer-checks though, because
3530 in that case *NULL does not fail, so it _should_ alias *anything.
3531 It is not worth adding a new option or renaming the existing one,
3532 since this case is relatively obscure. */
3533 if ((TREE_CODE (t) == INTEGER_CST
3534 && integer_zerop (t))
3535 /* The only valid CONSTRUCTORs in gimple with pointer typed
3536 elements are zero-initializer. But in IPA mode we also
3537 process global initializers, so verify at least. */
3538 || (TREE_CODE (t) == CONSTRUCTOR
3539 && CONSTRUCTOR_NELTS (t) == 0))
3541 if (flag_delete_null_pointer_checks)
3542 temp.var = nothing_id;
3543 else
3544 temp.var = nonlocal_id;
3545 temp.type = ADDRESSOF;
3546 temp.offset = 0;
3547 results->safe_push (temp);
3548 return;
3551 /* String constants are read-only, ideally we'd have a CONST_DECL
3552 for those. */
3553 if (TREE_CODE (t) == STRING_CST)
3555 temp.var = string_id;
3556 temp.type = SCALAR;
3557 temp.offset = 0;
3558 results->safe_push (temp);
3559 return;
3562 switch (TREE_CODE_CLASS (TREE_CODE (t)))
3564 case tcc_expression:
3566 switch (TREE_CODE (t))
3568 case ADDR_EXPR:
3569 get_constraint_for_address_of (TREE_OPERAND (t, 0), results);
3570 return;
3571 default:;
3573 break;
3575 case tcc_reference:
3577 switch (TREE_CODE (t))
3579 case MEM_REF:
3581 struct constraint_expr cs;
3582 varinfo_t vi, curr;
3583 get_constraint_for_ptr_offset (TREE_OPERAND (t, 0),
3584 TREE_OPERAND (t, 1), results);
3585 do_deref (results);
3587 /* If we are not taking the address then make sure to process
3588 all subvariables we might access. */
3589 if (address_p)
3590 return;
3592 cs = results->last ();
3593 if (cs.type == DEREF
3594 && type_can_have_subvars (TREE_TYPE (t)))
3596 /* For dereferences this means we have to defer it
3597 to solving time. */
3598 results->last ().offset = UNKNOWN_OFFSET;
3599 return;
3601 if (cs.type != SCALAR)
3602 return;
3604 vi = get_varinfo (cs.var);
3605 curr = vi_next (vi);
3606 if (!vi->is_full_var
3607 && curr)
3609 unsigned HOST_WIDE_INT size;
3610 if (tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (t))))
3611 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t)));
3612 else
3613 size = -1;
3614 for (; curr; curr = vi_next (curr))
3616 if (curr->offset - vi->offset < size)
3618 cs.var = curr->id;
3619 results->safe_push (cs);
3621 else
3622 break;
3625 return;
3627 case ARRAY_REF:
3628 case ARRAY_RANGE_REF:
3629 case COMPONENT_REF:
3630 case IMAGPART_EXPR:
3631 case REALPART_EXPR:
3632 case BIT_FIELD_REF:
3633 get_constraint_for_component_ref (t, results, address_p, lhs_p);
3634 return;
3635 case VIEW_CONVERT_EXPR:
3636 get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p,
3637 lhs_p);
3638 return;
3639 /* We are missing handling for TARGET_MEM_REF here. */
3640 default:;
3642 break;
3644 case tcc_exceptional:
3646 switch (TREE_CODE (t))
3648 case SSA_NAME:
3650 get_constraint_for_ssa_var (t, results, address_p);
3651 return;
3653 case CONSTRUCTOR:
3655 unsigned int i;
3656 tree val;
3657 auto_vec<ce_s> tmp;
3658 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
3660 struct constraint_expr *rhsp;
3661 unsigned j;
3662 get_constraint_for_1 (val, &tmp, address_p, lhs_p);
3663 FOR_EACH_VEC_ELT (tmp, j, rhsp)
3664 results->safe_push (*rhsp);
3665 tmp.truncate (0);
3667 /* We do not know whether the constructor was complete,
3668 so technically we have to add &NOTHING or &ANYTHING
3669 like we do for an empty constructor as well. */
3670 return;
3672 default:;
3674 break;
3676 case tcc_declaration:
3678 get_constraint_for_ssa_var (t, results, address_p);
3679 return;
3681 case tcc_constant:
3683 /* We cannot refer to automatic variables through constants. */
3684 temp.type = ADDRESSOF;
3685 temp.var = nonlocal_id;
3686 temp.offset = 0;
3687 results->safe_push (temp);
3688 return;
3690 default:;
3693 /* The default fallback is a constraint from anything. */
3694 temp.type = ADDRESSOF;
3695 temp.var = anything_id;
3696 temp.offset = 0;
3697 results->safe_push (temp);
3700 /* Given a gimple tree T, return the constraint expression vector for it. */
3702 static void
3703 get_constraint_for (tree t, vec<ce_s> *results)
3705 gcc_assert (results->length () == 0);
3707 get_constraint_for_1 (t, results, false, true);
3710 /* Given a gimple tree T, return the constraint expression vector for it
3711 to be used as the rhs of a constraint. */
3713 static void
3714 get_constraint_for_rhs (tree t, vec<ce_s> *results)
3716 gcc_assert (results->length () == 0);
3718 get_constraint_for_1 (t, results, false, false);
3722 /* Efficiently generates constraints from all entries in *RHSC to all
3723 entries in *LHSC. */
3725 static void
3726 process_all_all_constraints (const vec<ce_s> &lhsc,
3727 const vec<ce_s> &rhsc)
3729 struct constraint_expr *lhsp, *rhsp;
3730 unsigned i, j;
3732 if (lhsc.length () <= 1 || rhsc.length () <= 1)
3734 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3735 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
3736 process_constraint (new_constraint (*lhsp, *rhsp));
3738 else
3740 struct constraint_expr tmp;
3741 tmp = new_scalar_tmp_constraint_exp ("allalltmp", true);
3742 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
3743 process_constraint (new_constraint (tmp, *rhsp));
3744 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3745 process_constraint (new_constraint (*lhsp, tmp));
3749 /* Handle aggregate copies by expanding into copies of the respective
3750 fields of the structures. */
3752 static void
3753 do_structure_copy (tree lhsop, tree rhsop)
3755 struct constraint_expr *lhsp, *rhsp;
3756 auto_vec<ce_s> lhsc;
3757 auto_vec<ce_s> rhsc;
3758 unsigned j;
3760 get_constraint_for (lhsop, &lhsc);
3761 get_constraint_for_rhs (rhsop, &rhsc);
3762 lhsp = &lhsc[0];
3763 rhsp = &rhsc[0];
3764 if (lhsp->type == DEREF
3765 || (lhsp->type == ADDRESSOF && lhsp->var == anything_id)
3766 || rhsp->type == DEREF)
3768 if (lhsp->type == DEREF)
3770 gcc_assert (lhsc.length () == 1);
3771 lhsp->offset = UNKNOWN_OFFSET;
3773 if (rhsp->type == DEREF)
3775 gcc_assert (rhsc.length () == 1);
3776 rhsp->offset = UNKNOWN_OFFSET;
3778 process_all_all_constraints (lhsc, rhsc);
3780 else if (lhsp->type == SCALAR
3781 && (rhsp->type == SCALAR
3782 || rhsp->type == ADDRESSOF))
3784 HOST_WIDE_INT lhssize, lhsoffset;
3785 HOST_WIDE_INT rhssize, rhsoffset;
3786 bool reverse;
3787 unsigned k = 0;
3788 if (!get_ref_base_and_extent_hwi (lhsop, &lhsoffset, &lhssize, &reverse)
3789 || !get_ref_base_and_extent_hwi (rhsop, &rhsoffset, &rhssize,
3790 &reverse))
3792 process_all_all_constraints (lhsc, rhsc);
3793 return;
3795 for (j = 0; lhsc.iterate (j, &lhsp);)
3797 varinfo_t lhsv, rhsv;
3798 rhsp = &rhsc[k];
3799 lhsv = get_varinfo (lhsp->var);
3800 rhsv = get_varinfo (rhsp->var);
3801 if (lhsv->may_have_pointers
3802 && (lhsv->is_full_var
3803 || rhsv->is_full_var
3804 || ranges_overlap_p (lhsv->offset + rhsoffset, lhsv->size,
3805 rhsv->offset + lhsoffset, rhsv->size)))
3806 process_constraint (new_constraint (*lhsp, *rhsp));
3807 if (!rhsv->is_full_var
3808 && (lhsv->is_full_var
3809 || (lhsv->offset + rhsoffset + lhsv->size
3810 > rhsv->offset + lhsoffset + rhsv->size)))
3812 ++k;
3813 if (k >= rhsc.length ())
3814 break;
3816 else
3817 ++j;
3820 else
3821 gcc_unreachable ();
3824 /* Create constraints ID = { rhsc }. */
3826 static void
3827 make_constraints_to (unsigned id, const vec<ce_s> &rhsc)
3829 struct constraint_expr *c;
3830 struct constraint_expr includes;
3831 unsigned int j;
3833 includes.var = id;
3834 includes.offset = 0;
3835 includes.type = SCALAR;
3837 FOR_EACH_VEC_ELT (rhsc, j, c)
3838 process_constraint (new_constraint (includes, *c));
3841 /* Create a constraint ID = OP. */
3843 static void
3844 make_constraint_to (unsigned id, tree op)
3846 auto_vec<ce_s> rhsc;
3847 get_constraint_for_rhs (op, &rhsc);
3848 make_constraints_to (id, rhsc);
3851 /* Create a constraint ID = &FROM. */
3853 static void
3854 make_constraint_from (varinfo_t vi, int from)
3856 struct constraint_expr lhs, rhs;
3858 lhs.var = vi->id;
3859 lhs.offset = 0;
3860 lhs.type = SCALAR;
3862 rhs.var = from;
3863 rhs.offset = 0;
3864 rhs.type = ADDRESSOF;
3865 process_constraint (new_constraint (lhs, rhs));
3868 /* Create a constraint ID = FROM. */
3870 static void
3871 make_copy_constraint (varinfo_t vi, int from)
3873 struct constraint_expr lhs, rhs;
3875 lhs.var = vi->id;
3876 lhs.offset = 0;
3877 lhs.type = SCALAR;
3879 rhs.var = from;
3880 rhs.offset = 0;
3881 rhs.type = SCALAR;
3882 process_constraint (new_constraint (lhs, rhs));
3885 /* Make constraints necessary to make OP escape. */
3887 static void
3888 make_escape_constraint (tree op)
3890 make_constraint_to (escaped_id, op);
3893 /* Make constraint necessary to make all indirect references
3894 from VI escape. */
3896 static void
3897 make_indirect_escape_constraint (varinfo_t vi)
3899 struct constraint_expr lhs, rhs;
3900 /* escaped = *(VAR + UNKNOWN); */
3901 lhs.type = SCALAR;
3902 lhs.var = escaped_id;
3903 lhs.offset = 0;
3904 rhs.type = DEREF;
3905 rhs.var = vi->id;
3906 rhs.offset = UNKNOWN_OFFSET;
3907 process_constraint (new_constraint (lhs, rhs));
3910 /* Add constraints to that the solution of VI is transitively closed. */
3912 static void
3913 make_transitive_closure_constraints (varinfo_t vi)
3915 struct constraint_expr lhs, rhs;
3917 /* VAR = *(VAR + UNKNOWN); */
3918 lhs.type = SCALAR;
3919 lhs.var = vi->id;
3920 lhs.offset = 0;
3921 rhs.type = DEREF;
3922 rhs.var = vi->id;
3923 rhs.offset = UNKNOWN_OFFSET;
3924 process_constraint (new_constraint (lhs, rhs));
3927 /* Add constraints to that the solution of VI has all subvariables added. */
3929 static void
3930 make_any_offset_constraints (varinfo_t vi)
3932 struct constraint_expr lhs, rhs;
3934 /* VAR = VAR + UNKNOWN; */
3935 lhs.type = SCALAR;
3936 lhs.var = vi->id;
3937 lhs.offset = 0;
3938 rhs.type = SCALAR;
3939 rhs.var = vi->id;
3940 rhs.offset = UNKNOWN_OFFSET;
3941 process_constraint (new_constraint (lhs, rhs));
3944 /* Temporary storage for fake var decls. */
3945 struct obstack fake_var_decl_obstack;
3947 /* Build a fake VAR_DECL acting as referrer to a DECL_UID. */
3949 static tree
3950 build_fake_var_decl (tree type)
3952 tree decl = (tree) XOBNEW (&fake_var_decl_obstack, struct tree_var_decl);
3953 memset (decl, 0, sizeof (struct tree_var_decl));
3954 TREE_SET_CODE (decl, VAR_DECL);
3955 TREE_TYPE (decl) = type;
3956 DECL_UID (decl) = allocate_decl_uid ();
3957 SET_DECL_PT_UID (decl, -1);
3958 layout_decl (decl, 0);
3959 return decl;
3962 /* Create a new artificial heap variable with NAME.
3963 Return the created variable. */
3965 static varinfo_t
3966 make_heapvar (const char *name, bool add_id)
3968 varinfo_t vi;
3969 tree heapvar;
3971 heapvar = build_fake_var_decl (ptr_type_node);
3972 DECL_EXTERNAL (heapvar) = 1;
3974 vi = new_var_info (heapvar, name, add_id);
3975 vi->is_heap_var = true;
3976 vi->is_unknown_size_var = true;
3977 vi->offset = 0;
3978 vi->fullsize = ~0;
3979 vi->size = ~0;
3980 vi->is_full_var = true;
3981 insert_vi_for_tree (heapvar, vi);
3983 return vi;
3986 /* Create a new artificial heap variable with NAME and make a
3987 constraint from it to LHS. Set flags according to a tag used
3988 for tracking restrict pointers. */
3990 static varinfo_t
3991 make_constraint_from_restrict (varinfo_t lhs, const char *name, bool add_id)
3993 varinfo_t vi = make_heapvar (name, add_id);
3994 vi->is_restrict_var = 1;
3995 vi->is_global_var = 1;
3996 vi->may_have_pointers = 1;
3997 make_constraint_from (lhs, vi->id);
3998 return vi;
4001 /* Create a new artificial heap variable with NAME and make a
4002 constraint from it to LHS. Set flags according to a tag used
4003 for tracking restrict pointers and make the artificial heap
4004 point to global memory. */
4006 static varinfo_t
4007 make_constraint_from_global_restrict (varinfo_t lhs, const char *name,
4008 bool add_id)
4010 varinfo_t vi = make_constraint_from_restrict (lhs, name, add_id);
4011 make_copy_constraint (vi, nonlocal_id);
4012 return vi;
4015 /* In IPA mode there are varinfos for different aspects of reach
4016 function designator. One for the points-to set of the return
4017 value, one for the variables that are clobbered by the function,
4018 one for its uses and one for each parameter (including a single
4019 glob for remaining variadic arguments). */
4021 enum { fi_clobbers = 1, fi_uses = 2,
4022 fi_static_chain = 3, fi_result = 4, fi_parm_base = 5 };
4024 /* Get a constraint for the requested part of a function designator FI
4025 when operating in IPA mode. */
4027 static struct constraint_expr
4028 get_function_part_constraint (varinfo_t fi, unsigned part)
4030 struct constraint_expr c;
4032 gcc_assert (in_ipa_mode);
4034 if (fi->id == anything_id)
4036 /* ??? We probably should have a ANYFN special variable. */
4037 c.var = anything_id;
4038 c.offset = 0;
4039 c.type = SCALAR;
4041 else if (fi->decl && TREE_CODE (fi->decl) == FUNCTION_DECL)
4043 varinfo_t ai = first_vi_for_offset (fi, part);
4044 if (ai)
4045 c.var = ai->id;
4046 else
4047 c.var = anything_id;
4048 c.offset = 0;
4049 c.type = SCALAR;
4051 else
4053 c.var = fi->id;
4054 c.offset = part;
4055 c.type = DEREF;
4058 return c;
4061 /* Produce constraints for argument ARG of call STMT with eaf flags
4062 FLAGS. RESULTS is array holding constraints for return value.
4063 CALLESCAPE_ID is variable where call loocal escapes are added.
4064 WRITES_GLOVEL_MEMORY is true if callee may write global memory. */
4066 static void
4067 handle_call_arg (gcall *stmt, tree arg, vec<ce_s> *results, int flags,
4068 int callescape_id, bool writes_global_memory)
4070 int relevant_indirect_flags = EAF_NO_INDIRECT_CLOBBER | EAF_NO_INDIRECT_READ
4071 | EAF_NO_INDIRECT_ESCAPE;
4072 int relevant_flags = relevant_indirect_flags
4073 | EAF_NO_DIRECT_CLOBBER
4074 | EAF_NO_DIRECT_READ
4075 | EAF_NO_DIRECT_ESCAPE;
4076 if (gimple_call_lhs (stmt))
4078 relevant_flags |= EAF_NOT_RETURNED_DIRECTLY | EAF_NOT_RETURNED_INDIRECTLY;
4079 relevant_indirect_flags |= EAF_NOT_RETURNED_INDIRECTLY;
4081 /* If value is never read from it can not be returned indirectly
4082 (except through the escape solution).
4083 For all flags we get these implications right except for
4084 not_returned because we miss return functions in ipa-prop. */
4086 if (flags & EAF_NO_DIRECT_READ)
4087 flags |= EAF_NOT_RETURNED_INDIRECTLY;
4090 /* If the argument is not used we can ignore it.
4091 Similarly argument is invisile for us if it not clobbered, does not
4092 escape, is not read and can not be returned. */
4093 if ((flags & EAF_UNUSED) || ((flags & relevant_flags) == relevant_flags))
4094 return;
4096 /* Produce varinfo for direct accesses to ARG. */
4097 varinfo_t tem = new_var_info (NULL_TREE, "callarg", true);
4098 tem->is_reg_var = true;
4099 make_constraint_to (tem->id, arg);
4100 make_any_offset_constraints (tem);
4102 bool callarg_transitive = false;
4104 /* As an compile time optimization if we make no difference between
4105 direct and indirect accesses make arg transitively closed.
4106 This avoids the need to build indir arg and do everything twice. */
4107 if (((flags & EAF_NO_INDIRECT_CLOBBER) != 0)
4108 == ((flags & EAF_NO_DIRECT_CLOBBER) != 0)
4109 && (((flags & EAF_NO_INDIRECT_READ) != 0)
4110 == ((flags & EAF_NO_DIRECT_READ) != 0))
4111 && (((flags & EAF_NO_INDIRECT_ESCAPE) != 0)
4112 == ((flags & EAF_NO_DIRECT_ESCAPE) != 0))
4113 && (((flags & EAF_NOT_RETURNED_INDIRECTLY) != 0)
4114 == ((flags & EAF_NOT_RETURNED_DIRECTLY) != 0)))
4116 make_transitive_closure_constraints (tem);
4117 callarg_transitive = true;
4118 gcc_checking_assert (!(flags & EAF_NO_DIRECT_READ));
4121 /* If necessary, produce varinfo for indirect accesses to ARG. */
4122 varinfo_t indir_tem = NULL;
4123 if (!callarg_transitive
4124 && (flags & relevant_indirect_flags) != relevant_indirect_flags)
4126 struct constraint_expr lhs, rhs;
4127 indir_tem = new_var_info (NULL_TREE, "indircallarg", true);
4128 indir_tem->is_reg_var = true;
4130 /* indir_term = *tem. */
4131 lhs.type = SCALAR;
4132 lhs.var = indir_tem->id;
4133 lhs.offset = 0;
4135 rhs.type = DEREF;
4136 rhs.var = tem->id;
4137 rhs.offset = UNKNOWN_OFFSET;
4138 process_constraint (new_constraint (lhs, rhs));
4140 make_any_offset_constraints (indir_tem);
4142 /* If we do not read indirectly there is no need for transitive closure.
4143 We know there is only one level of indirection. */
4144 if (!(flags & EAF_NO_INDIRECT_READ))
4145 make_transitive_closure_constraints (indir_tem);
4146 gcc_checking_assert (!(flags & EAF_NO_DIRECT_READ));
4149 if (gimple_call_lhs (stmt))
4151 if (!(flags & EAF_NOT_RETURNED_DIRECTLY))
4153 struct constraint_expr cexpr;
4154 cexpr.var = tem->id;
4155 cexpr.type = SCALAR;
4156 cexpr.offset = 0;
4157 results->safe_push (cexpr);
4159 if (!callarg_transitive & !(flags & EAF_NOT_RETURNED_INDIRECTLY))
4161 struct constraint_expr cexpr;
4162 cexpr.var = indir_tem->id;
4163 cexpr.type = SCALAR;
4164 cexpr.offset = 0;
4165 results->safe_push (cexpr);
4169 if (!(flags & EAF_NO_DIRECT_READ))
4171 varinfo_t uses = get_call_use_vi (stmt);
4172 make_copy_constraint (uses, tem->id);
4173 if (!callarg_transitive & !(flags & EAF_NO_INDIRECT_READ))
4174 make_copy_constraint (uses, indir_tem->id);
4176 else
4177 /* To read indirectly we need to read directly. */
4178 gcc_checking_assert (flags & EAF_NO_INDIRECT_READ);
4180 if (!(flags & EAF_NO_DIRECT_CLOBBER))
4182 struct constraint_expr lhs, rhs;
4184 /* *arg = callescape. */
4185 lhs.type = DEREF;
4186 lhs.var = tem->id;
4187 lhs.offset = 0;
4189 rhs.type = SCALAR;
4190 rhs.var = callescape_id;
4191 rhs.offset = 0;
4192 process_constraint (new_constraint (lhs, rhs));
4194 /* callclobbered = arg. */
4195 make_copy_constraint (get_call_clobber_vi (stmt), tem->id);
4197 if (!callarg_transitive & !(flags & EAF_NO_INDIRECT_CLOBBER))
4199 struct constraint_expr lhs, rhs;
4201 /* *indir_arg = callescape. */
4202 lhs.type = DEREF;
4203 lhs.var = indir_tem->id;
4204 lhs.offset = 0;
4206 rhs.type = SCALAR;
4207 rhs.var = callescape_id;
4208 rhs.offset = 0;
4209 process_constraint (new_constraint (lhs, rhs));
4211 /* callclobbered = indir_arg. */
4212 make_copy_constraint (get_call_clobber_vi (stmt), indir_tem->id);
4215 if (!(flags & (EAF_NO_DIRECT_ESCAPE | EAF_NO_INDIRECT_ESCAPE)))
4217 struct constraint_expr lhs, rhs;
4219 /* callescape = arg; */
4220 lhs.var = callescape_id;
4221 lhs.offset = 0;
4222 lhs.type = SCALAR;
4224 rhs.var = tem->id;
4225 rhs.offset = 0;
4226 rhs.type = SCALAR;
4227 process_constraint (new_constraint (lhs, rhs));
4229 if (writes_global_memory)
4230 make_escape_constraint (arg);
4232 else if (!callarg_transitive & !(flags & EAF_NO_INDIRECT_ESCAPE))
4234 struct constraint_expr lhs, rhs;
4236 /* callescape = *(indir_arg + UNKNOWN); */
4237 lhs.var = callescape_id;
4238 lhs.offset = 0;
4239 lhs.type = SCALAR;
4241 rhs.var = indir_tem->id;
4242 rhs.offset = 0;
4243 rhs.type = SCALAR;
4244 process_constraint (new_constraint (lhs, rhs));
4246 if (writes_global_memory)
4247 make_indirect_escape_constraint (tem);
4251 /* Determine global memory access of call STMT and update
4252 WRITES_GLOBAL_MEMORY, READS_GLOBAL_MEMORY and USES_GLOBAL_MEMORY. */
4254 static void
4255 determine_global_memory_access (gcall *stmt,
4256 bool *writes_global_memory,
4257 bool *reads_global_memory,
4258 bool *uses_global_memory)
4260 tree callee;
4261 cgraph_node *node;
4262 modref_summary *summary;
4264 /* We need to detrmine reads to set uses. */
4265 gcc_assert (!uses_global_memory || reads_global_memory);
4267 if ((callee = gimple_call_fndecl (stmt)) != NULL_TREE
4268 && (node = cgraph_node::get (callee)) != NULL
4269 && (summary = get_modref_function_summary (node)))
4271 if (writes_global_memory && *writes_global_memory)
4272 *writes_global_memory = summary->global_memory_written;
4273 if (reads_global_memory && *reads_global_memory)
4274 *reads_global_memory = summary->global_memory_read;
4275 if (reads_global_memory && uses_global_memory
4276 && !summary->calls_interposable
4277 && !*reads_global_memory && node->binds_to_current_def_p ())
4278 *uses_global_memory = false;
4280 if ((writes_global_memory && *writes_global_memory)
4281 || (uses_global_memory && *uses_global_memory)
4282 || (reads_global_memory && *reads_global_memory))
4284 attr_fnspec fnspec = gimple_call_fnspec (stmt);
4285 if (fnspec.known_p ())
4287 if (writes_global_memory
4288 && !fnspec.global_memory_written_p ())
4289 *writes_global_memory = false;
4290 if (reads_global_memory && !fnspec.global_memory_read_p ())
4292 *reads_global_memory = false;
4293 if (uses_global_memory)
4294 *uses_global_memory = false;
4300 /* For non-IPA mode, generate constraints necessary for a call on the
4301 RHS and collect return value constraint to RESULTS to be used later in
4302 handle_lhs_call.
4304 IMPLICIT_EAF_FLAGS are added to each function argument. If
4305 WRITES_GLOBAL_MEMORY is true function is assumed to possibly write to global
4306 memory. Similar for READS_GLOBAL_MEMORY. */
4308 static void
4309 handle_rhs_call (gcall *stmt, vec<ce_s> *results,
4310 int implicit_eaf_flags,
4311 bool writes_global_memory,
4312 bool reads_global_memory)
4314 determine_global_memory_access (stmt, &writes_global_memory,
4315 &reads_global_memory,
4316 NULL);
4318 varinfo_t callescape = new_var_info (NULL_TREE, "callescape", true);
4320 /* If function can use global memory, add it to callescape
4321 and to possible return values. If not we can still use/return addresses
4322 of global symbols. */
4323 struct constraint_expr lhs, rhs;
4325 lhs.type = SCALAR;
4326 lhs.var = callescape->id;
4327 lhs.offset = 0;
4329 rhs.type = reads_global_memory ? SCALAR : ADDRESSOF;
4330 rhs.var = nonlocal_id;
4331 rhs.offset = 0;
4333 process_constraint (new_constraint (lhs, rhs));
4334 results->safe_push (rhs);
4336 varinfo_t uses = get_call_use_vi (stmt);
4337 make_copy_constraint (uses, callescape->id);
4339 for (unsigned i = 0; i < gimple_call_num_args (stmt); ++i)
4341 tree arg = gimple_call_arg (stmt, i);
4342 int flags = gimple_call_arg_flags (stmt, i);
4343 handle_call_arg (stmt, arg, results,
4344 flags | implicit_eaf_flags,
4345 callescape->id, writes_global_memory);
4348 /* The static chain escapes as well. */
4349 if (gimple_call_chain (stmt))
4350 handle_call_arg (stmt, gimple_call_chain (stmt), results,
4351 implicit_eaf_flags
4352 | gimple_call_static_chain_flags (stmt),
4353 callescape->id, writes_global_memory);
4355 /* And if we applied NRV the address of the return slot escapes as well. */
4356 if (gimple_call_return_slot_opt_p (stmt)
4357 && gimple_call_lhs (stmt) != NULL_TREE
4358 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
4360 int flags = gimple_call_retslot_flags (stmt);
4361 const int relevant_flags = EAF_NO_DIRECT_ESCAPE
4362 | EAF_NOT_RETURNED_DIRECTLY;
4364 if (!(flags & EAF_UNUSED) && (flags & relevant_flags) != relevant_flags)
4366 auto_vec<ce_s> tmpc;
4368 get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
4370 if (!(flags & EAF_NO_DIRECT_ESCAPE))
4372 make_constraints_to (callescape->id, tmpc);
4373 if (writes_global_memory)
4374 make_constraints_to (escaped_id, tmpc);
4376 if (!(flags & EAF_NOT_RETURNED_DIRECTLY))
4378 struct constraint_expr *c;
4379 unsigned i;
4380 FOR_EACH_VEC_ELT (tmpc, i, c)
4381 results->safe_push (*c);
4387 /* For non-IPA mode, generate constraints necessary for a call
4388 that returns a pointer and assigns it to LHS. This simply makes
4389 the LHS point to global and escaped variables. */
4391 static void
4392 handle_lhs_call (gcall *stmt, tree lhs, int flags, vec<ce_s> &rhsc,
4393 tree fndecl)
4395 auto_vec<ce_s> lhsc;
4397 get_constraint_for (lhs, &lhsc);
4398 /* If the store is to a global decl make sure to
4399 add proper escape constraints. */
4400 lhs = get_base_address (lhs);
4401 if (lhs
4402 && DECL_P (lhs)
4403 && is_global_var (lhs))
4405 struct constraint_expr tmpc;
4406 tmpc.var = escaped_id;
4407 tmpc.offset = 0;
4408 tmpc.type = SCALAR;
4409 lhsc.safe_push (tmpc);
4412 /* If the call returns an argument unmodified override the rhs
4413 constraints. */
4414 if (flags & ERF_RETURNS_ARG
4415 && (flags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (stmt))
4417 tree arg;
4418 rhsc.truncate (0);
4419 arg = gimple_call_arg (stmt, flags & ERF_RETURN_ARG_MASK);
4420 get_constraint_for (arg, &rhsc);
4421 process_all_all_constraints (lhsc, rhsc);
4422 rhsc.truncate (0);
4424 else if (flags & ERF_NOALIAS)
4426 varinfo_t vi;
4427 struct constraint_expr tmpc;
4428 rhsc.truncate (0);
4429 vi = make_heapvar ("HEAP", true);
4430 /* We are marking allocated storage local, we deal with it becoming
4431 global by escaping and setting of vars_contains_escaped_heap. */
4432 DECL_EXTERNAL (vi->decl) = 0;
4433 vi->is_global_var = 0;
4434 /* If this is not a real malloc call assume the memory was
4435 initialized and thus may point to global memory. All
4436 builtin functions with the malloc attribute behave in a sane way. */
4437 if (!fndecl
4438 || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
4439 make_constraint_from (vi, nonlocal_id);
4440 tmpc.var = vi->id;
4441 tmpc.offset = 0;
4442 tmpc.type = ADDRESSOF;
4443 rhsc.safe_push (tmpc);
4444 process_all_all_constraints (lhsc, rhsc);
4445 rhsc.truncate (0);
4447 else
4448 process_all_all_constraints (lhsc, rhsc);
4452 /* Return the varinfo for the callee of CALL. */
4454 static varinfo_t
4455 get_fi_for_callee (gcall *call)
4457 tree decl, fn = gimple_call_fn (call);
4459 if (fn && TREE_CODE (fn) == OBJ_TYPE_REF)
4460 fn = OBJ_TYPE_REF_EXPR (fn);
4462 /* If we can directly resolve the function being called, do so.
4463 Otherwise, it must be some sort of indirect expression that
4464 we should still be able to handle. */
4465 decl = gimple_call_addr_fndecl (fn);
4466 if (decl)
4467 return get_vi_for_tree (decl);
4469 /* If the function is anything other than a SSA name pointer we have no
4470 clue and should be getting ANYFN (well, ANYTHING for now). */
4471 if (!fn || TREE_CODE (fn) != SSA_NAME)
4472 return get_varinfo (anything_id);
4474 if (SSA_NAME_IS_DEFAULT_DEF (fn)
4475 && (TREE_CODE (SSA_NAME_VAR (fn)) == PARM_DECL
4476 || TREE_CODE (SSA_NAME_VAR (fn)) == RESULT_DECL))
4477 fn = SSA_NAME_VAR (fn);
4479 return get_vi_for_tree (fn);
4482 /* Create constraints for assigning call argument ARG to the incoming parameter
4483 INDEX of function FI. */
4485 static void
4486 find_func_aliases_for_call_arg (varinfo_t fi, unsigned index, tree arg)
4488 struct constraint_expr lhs;
4489 lhs = get_function_part_constraint (fi, fi_parm_base + index);
4491 auto_vec<ce_s, 2> rhsc;
4492 get_constraint_for_rhs (arg, &rhsc);
4494 unsigned j;
4495 struct constraint_expr *rhsp;
4496 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4497 process_constraint (new_constraint (lhs, *rhsp));
4500 /* Return true if FNDECL may be part of another lto partition. */
4502 static bool
4503 fndecl_maybe_in_other_partition (tree fndecl)
4505 cgraph_node *fn_node = cgraph_node::get (fndecl);
4506 if (fn_node == NULL)
4507 return true;
4509 return fn_node->in_other_partition;
4512 /* Create constraints for the builtin call T. Return true if the call
4513 was handled, otherwise false. */
4515 static bool
4516 find_func_aliases_for_builtin_call (struct function *fn, gcall *t)
4518 tree fndecl = gimple_call_fndecl (t);
4519 auto_vec<ce_s, 2> lhsc;
4520 auto_vec<ce_s, 4> rhsc;
4521 varinfo_t fi;
4523 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
4524 /* ??? All builtins that are handled here need to be handled
4525 in the alias-oracle query functions explicitly! */
4526 switch (DECL_FUNCTION_CODE (fndecl))
4528 /* All the following functions return a pointer to the same object
4529 as their first argument points to. The functions do not add
4530 to the ESCAPED solution. The functions make the first argument
4531 pointed to memory point to what the second argument pointed to
4532 memory points to. */
4533 case BUILT_IN_STRCPY:
4534 case BUILT_IN_STRNCPY:
4535 case BUILT_IN_BCOPY:
4536 case BUILT_IN_MEMCPY:
4537 case BUILT_IN_MEMMOVE:
4538 case BUILT_IN_MEMPCPY:
4539 case BUILT_IN_STPCPY:
4540 case BUILT_IN_STPNCPY:
4541 case BUILT_IN_STRCAT:
4542 case BUILT_IN_STRNCAT:
4543 case BUILT_IN_STRCPY_CHK:
4544 case BUILT_IN_STRNCPY_CHK:
4545 case BUILT_IN_MEMCPY_CHK:
4546 case BUILT_IN_MEMMOVE_CHK:
4547 case BUILT_IN_MEMPCPY_CHK:
4548 case BUILT_IN_STPCPY_CHK:
4549 case BUILT_IN_STPNCPY_CHK:
4550 case BUILT_IN_STRCAT_CHK:
4551 case BUILT_IN_STRNCAT_CHK:
4552 case BUILT_IN_TM_MEMCPY:
4553 case BUILT_IN_TM_MEMMOVE:
4555 tree res = gimple_call_lhs (t);
4556 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4557 == BUILT_IN_BCOPY ? 1 : 0));
4558 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4559 == BUILT_IN_BCOPY ? 0 : 1));
4560 if (res != NULL_TREE)
4562 get_constraint_for (res, &lhsc);
4563 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY
4564 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY
4565 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY
4566 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY_CHK
4567 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY_CHK
4568 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY_CHK)
4569 get_constraint_for_ptr_offset (dest, NULL_TREE, &rhsc);
4570 else
4571 get_constraint_for (dest, &rhsc);
4572 process_all_all_constraints (lhsc, rhsc);
4573 lhsc.truncate (0);
4574 rhsc.truncate (0);
4576 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4577 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4578 do_deref (&lhsc);
4579 do_deref (&rhsc);
4580 process_all_all_constraints (lhsc, rhsc);
4581 return true;
4583 case BUILT_IN_MEMSET:
4584 case BUILT_IN_MEMSET_CHK:
4585 case BUILT_IN_TM_MEMSET:
4587 tree res = gimple_call_lhs (t);
4588 tree dest = gimple_call_arg (t, 0);
4589 unsigned i;
4590 ce_s *lhsp;
4591 struct constraint_expr ac;
4592 if (res != NULL_TREE)
4594 get_constraint_for (res, &lhsc);
4595 get_constraint_for (dest, &rhsc);
4596 process_all_all_constraints (lhsc, rhsc);
4597 lhsc.truncate (0);
4599 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4600 do_deref (&lhsc);
4601 if (flag_delete_null_pointer_checks
4602 && integer_zerop (gimple_call_arg (t, 1)))
4604 ac.type = ADDRESSOF;
4605 ac.var = nothing_id;
4607 else
4609 ac.type = SCALAR;
4610 ac.var = integer_id;
4612 ac.offset = 0;
4613 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4614 process_constraint (new_constraint (*lhsp, ac));
4615 return true;
4617 case BUILT_IN_STACK_SAVE:
4618 case BUILT_IN_STACK_RESTORE:
4619 /* Nothing interesting happens. */
4620 return true;
4621 case BUILT_IN_ALLOCA:
4622 case BUILT_IN_ALLOCA_WITH_ALIGN:
4623 case BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX:
4625 tree ptr = gimple_call_lhs (t);
4626 if (ptr == NULL_TREE)
4627 return true;
4628 get_constraint_for (ptr, &lhsc);
4629 varinfo_t vi = make_heapvar ("HEAP", true);
4630 /* Alloca storage is never global. To exempt it from escaped
4631 handling make it a non-heap var. */
4632 DECL_EXTERNAL (vi->decl) = 0;
4633 vi->is_global_var = 0;
4634 vi->is_heap_var = 0;
4635 struct constraint_expr tmpc;
4636 tmpc.var = vi->id;
4637 tmpc.offset = 0;
4638 tmpc.type = ADDRESSOF;
4639 rhsc.safe_push (tmpc);
4640 process_all_all_constraints (lhsc, rhsc);
4641 return true;
4643 case BUILT_IN_POSIX_MEMALIGN:
4645 tree ptrptr = gimple_call_arg (t, 0);
4646 get_constraint_for (ptrptr, &lhsc);
4647 do_deref (&lhsc);
4648 varinfo_t vi = make_heapvar ("HEAP", true);
4649 /* We are marking allocated storage local, we deal with it becoming
4650 global by escaping and setting of vars_contains_escaped_heap. */
4651 DECL_EXTERNAL (vi->decl) = 0;
4652 vi->is_global_var = 0;
4653 struct constraint_expr tmpc;
4654 tmpc.var = vi->id;
4655 tmpc.offset = 0;
4656 tmpc.type = ADDRESSOF;
4657 rhsc.safe_push (tmpc);
4658 process_all_all_constraints (lhsc, rhsc);
4659 return true;
4661 case BUILT_IN_ASSUME_ALIGNED:
4663 tree res = gimple_call_lhs (t);
4664 tree dest = gimple_call_arg (t, 0);
4665 if (res != NULL_TREE)
4667 get_constraint_for (res, &lhsc);
4668 get_constraint_for (dest, &rhsc);
4669 process_all_all_constraints (lhsc, rhsc);
4671 return true;
4673 /* All the following functions do not return pointers, do not
4674 modify the points-to sets of memory reachable from their
4675 arguments and do not add to the ESCAPED solution. */
4676 case BUILT_IN_SINCOS:
4677 case BUILT_IN_SINCOSF:
4678 case BUILT_IN_SINCOSL:
4679 case BUILT_IN_FREXP:
4680 case BUILT_IN_FREXPF:
4681 case BUILT_IN_FREXPL:
4682 case BUILT_IN_GAMMA_R:
4683 case BUILT_IN_GAMMAF_R:
4684 case BUILT_IN_GAMMAL_R:
4685 case BUILT_IN_LGAMMA_R:
4686 case BUILT_IN_LGAMMAF_R:
4687 case BUILT_IN_LGAMMAL_R:
4688 case BUILT_IN_MODF:
4689 case BUILT_IN_MODFF:
4690 case BUILT_IN_MODFL:
4691 case BUILT_IN_REMQUO:
4692 case BUILT_IN_REMQUOF:
4693 case BUILT_IN_REMQUOL:
4694 case BUILT_IN_FREE:
4695 return true;
4696 case BUILT_IN_STRDUP:
4697 case BUILT_IN_STRNDUP:
4698 case BUILT_IN_REALLOC:
4699 if (gimple_call_lhs (t))
4701 auto_vec<ce_s> rhsc;
4702 handle_lhs_call (t, gimple_call_lhs (t),
4703 gimple_call_return_flags (t) | ERF_NOALIAS,
4704 rhsc, fndecl);
4705 get_constraint_for_ptr_offset (gimple_call_lhs (t),
4706 NULL_TREE, &lhsc);
4707 get_constraint_for_ptr_offset (gimple_call_arg (t, 0),
4708 NULL_TREE, &rhsc);
4709 do_deref (&lhsc);
4710 do_deref (&rhsc);
4711 process_all_all_constraints (lhsc, rhsc);
4712 lhsc.truncate (0);
4713 rhsc.truncate (0);
4714 /* For realloc the resulting pointer can be equal to the
4715 argument as well. But only doing this wouldn't be
4716 correct because with ptr == 0 realloc behaves like malloc. */
4717 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_REALLOC)
4719 get_constraint_for (gimple_call_lhs (t), &lhsc);
4720 get_constraint_for (gimple_call_arg (t, 0), &rhsc);
4721 process_all_all_constraints (lhsc, rhsc);
4723 return true;
4725 break;
4726 /* String / character search functions return a pointer into the
4727 source string or NULL. */
4728 case BUILT_IN_INDEX:
4729 case BUILT_IN_STRCHR:
4730 case BUILT_IN_STRRCHR:
4731 case BUILT_IN_MEMCHR:
4732 case BUILT_IN_STRSTR:
4733 case BUILT_IN_STRPBRK:
4734 if (gimple_call_lhs (t))
4736 tree src = gimple_call_arg (t, 0);
4737 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4738 constraint_expr nul;
4739 nul.var = nothing_id;
4740 nul.offset = 0;
4741 nul.type = ADDRESSOF;
4742 rhsc.safe_push (nul);
4743 get_constraint_for (gimple_call_lhs (t), &lhsc);
4744 process_all_all_constraints (lhsc, rhsc);
4746 return true;
4747 /* Pure functions that return something not based on any object and
4748 that use the memory pointed to by their arguments (but not
4749 transitively). */
4750 case BUILT_IN_STRCMP:
4751 case BUILT_IN_STRCMP_EQ:
4752 case BUILT_IN_STRNCMP:
4753 case BUILT_IN_STRNCMP_EQ:
4754 case BUILT_IN_STRCASECMP:
4755 case BUILT_IN_STRNCASECMP:
4756 case BUILT_IN_MEMCMP:
4757 case BUILT_IN_BCMP:
4758 case BUILT_IN_STRSPN:
4759 case BUILT_IN_STRCSPN:
4761 varinfo_t uses = get_call_use_vi (t);
4762 make_any_offset_constraints (uses);
4763 make_constraint_to (uses->id, gimple_call_arg (t, 0));
4764 make_constraint_to (uses->id, gimple_call_arg (t, 1));
4765 /* No constraints are necessary for the return value. */
4766 return true;
4768 case BUILT_IN_STRLEN:
4770 varinfo_t uses = get_call_use_vi (t);
4771 make_any_offset_constraints (uses);
4772 make_constraint_to (uses->id, gimple_call_arg (t, 0));
4773 /* No constraints are necessary for the return value. */
4774 return true;
4776 case BUILT_IN_OBJECT_SIZE:
4777 case BUILT_IN_CONSTANT_P:
4779 /* No constraints are necessary for the return value or the
4780 arguments. */
4781 return true;
4783 /* Trampolines are special - they set up passing the static
4784 frame. */
4785 case BUILT_IN_INIT_TRAMPOLINE:
4787 tree tramp = gimple_call_arg (t, 0);
4788 tree nfunc = gimple_call_arg (t, 1);
4789 tree frame = gimple_call_arg (t, 2);
4790 unsigned i;
4791 struct constraint_expr lhs, *rhsp;
4792 if (in_ipa_mode)
4794 varinfo_t nfi = NULL;
4795 gcc_assert (TREE_CODE (nfunc) == ADDR_EXPR);
4796 nfi = lookup_vi_for_tree (TREE_OPERAND (nfunc, 0));
4797 if (nfi)
4799 lhs = get_function_part_constraint (nfi, fi_static_chain);
4800 get_constraint_for (frame, &rhsc);
4801 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4802 process_constraint (new_constraint (lhs, *rhsp));
4803 rhsc.truncate (0);
4805 /* Make the frame point to the function for
4806 the trampoline adjustment call. */
4807 get_constraint_for (tramp, &lhsc);
4808 do_deref (&lhsc);
4809 get_constraint_for (nfunc, &rhsc);
4810 process_all_all_constraints (lhsc, rhsc);
4812 return true;
4815 /* Else fallthru to generic handling which will let
4816 the frame escape. */
4817 break;
4819 case BUILT_IN_ADJUST_TRAMPOLINE:
4821 tree tramp = gimple_call_arg (t, 0);
4822 tree res = gimple_call_lhs (t);
4823 if (in_ipa_mode && res)
4825 get_constraint_for (res, &lhsc);
4826 get_constraint_for (tramp, &rhsc);
4827 do_deref (&rhsc);
4828 process_all_all_constraints (lhsc, rhsc);
4830 return true;
4832 CASE_BUILT_IN_TM_STORE (1):
4833 CASE_BUILT_IN_TM_STORE (2):
4834 CASE_BUILT_IN_TM_STORE (4):
4835 CASE_BUILT_IN_TM_STORE (8):
4836 CASE_BUILT_IN_TM_STORE (FLOAT):
4837 CASE_BUILT_IN_TM_STORE (DOUBLE):
4838 CASE_BUILT_IN_TM_STORE (LDOUBLE):
4839 CASE_BUILT_IN_TM_STORE (M64):
4840 CASE_BUILT_IN_TM_STORE (M128):
4841 CASE_BUILT_IN_TM_STORE (M256):
4843 tree addr = gimple_call_arg (t, 0);
4844 tree src = gimple_call_arg (t, 1);
4846 get_constraint_for (addr, &lhsc);
4847 do_deref (&lhsc);
4848 get_constraint_for (src, &rhsc);
4849 process_all_all_constraints (lhsc, rhsc);
4850 return true;
4852 CASE_BUILT_IN_TM_LOAD (1):
4853 CASE_BUILT_IN_TM_LOAD (2):
4854 CASE_BUILT_IN_TM_LOAD (4):
4855 CASE_BUILT_IN_TM_LOAD (8):
4856 CASE_BUILT_IN_TM_LOAD (FLOAT):
4857 CASE_BUILT_IN_TM_LOAD (DOUBLE):
4858 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
4859 CASE_BUILT_IN_TM_LOAD (M64):
4860 CASE_BUILT_IN_TM_LOAD (M128):
4861 CASE_BUILT_IN_TM_LOAD (M256):
4863 tree dest = gimple_call_lhs (t);
4864 tree addr = gimple_call_arg (t, 0);
4866 get_constraint_for (dest, &lhsc);
4867 get_constraint_for (addr, &rhsc);
4868 do_deref (&rhsc);
4869 process_all_all_constraints (lhsc, rhsc);
4870 return true;
4872 /* Variadic argument handling needs to be handled in IPA
4873 mode as well. */
4874 case BUILT_IN_VA_START:
4876 tree valist = gimple_call_arg (t, 0);
4877 struct constraint_expr rhs, *lhsp;
4878 unsigned i;
4879 get_constraint_for_ptr_offset (valist, NULL_TREE, &lhsc);
4880 do_deref (&lhsc);
4881 /* The va_list gets access to pointers in variadic
4882 arguments. Which we know in the case of IPA analysis
4883 and otherwise are just all nonlocal variables. */
4884 if (in_ipa_mode)
4886 fi = lookup_vi_for_tree (fn->decl);
4887 rhs = get_function_part_constraint (fi, ~0);
4888 rhs.type = ADDRESSOF;
4890 else
4892 rhs.var = nonlocal_id;
4893 rhs.type = ADDRESSOF;
4894 rhs.offset = 0;
4896 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4897 process_constraint (new_constraint (*lhsp, rhs));
4898 /* va_list is clobbered. */
4899 make_constraint_to (get_call_clobber_vi (t)->id, valist);
4900 return true;
4902 /* va_end doesn't have any effect that matters. */
4903 case BUILT_IN_VA_END:
4904 return true;
4905 /* Alternate return. Simply give up for now. */
4906 case BUILT_IN_RETURN:
4908 fi = NULL;
4909 if (!in_ipa_mode
4910 || !(fi = get_vi_for_tree (fn->decl)))
4911 make_constraint_from (get_varinfo (escaped_id), anything_id);
4912 else if (in_ipa_mode
4913 && fi != NULL)
4915 struct constraint_expr lhs, rhs;
4916 lhs = get_function_part_constraint (fi, fi_result);
4917 rhs.var = anything_id;
4918 rhs.offset = 0;
4919 rhs.type = SCALAR;
4920 process_constraint (new_constraint (lhs, rhs));
4922 return true;
4924 case BUILT_IN_GOMP_PARALLEL:
4925 case BUILT_IN_GOACC_PARALLEL:
4927 if (in_ipa_mode)
4929 unsigned int fnpos, argpos;
4930 switch (DECL_FUNCTION_CODE (fndecl))
4932 case BUILT_IN_GOMP_PARALLEL:
4933 /* __builtin_GOMP_parallel (fn, data, num_threads, flags). */
4934 fnpos = 0;
4935 argpos = 1;
4936 break;
4937 case BUILT_IN_GOACC_PARALLEL:
4938 /* __builtin_GOACC_parallel (flags_m, fn, mapnum, hostaddrs,
4939 sizes, kinds, ...). */
4940 fnpos = 1;
4941 argpos = 3;
4942 break;
4943 default:
4944 gcc_unreachable ();
4947 tree fnarg = gimple_call_arg (t, fnpos);
4948 gcc_assert (TREE_CODE (fnarg) == ADDR_EXPR);
4949 tree fndecl = TREE_OPERAND (fnarg, 0);
4950 if (fndecl_maybe_in_other_partition (fndecl))
4951 /* Fallthru to general call handling. */
4952 break;
4954 tree arg = gimple_call_arg (t, argpos);
4956 varinfo_t fi = get_vi_for_tree (fndecl);
4957 find_func_aliases_for_call_arg (fi, 0, arg);
4958 return true;
4960 /* Else fallthru to generic call handling. */
4961 break;
4963 /* printf-style functions may have hooks to set pointers to
4964 point to somewhere into the generated string. Leave them
4965 for a later exercise... */
4966 default:
4967 /* Fallthru to general call handling. */;
4970 return false;
4973 /* Create constraints for the call T. */
4975 static void
4976 find_func_aliases_for_call (struct function *fn, gcall *t)
4978 tree fndecl = gimple_call_fndecl (t);
4979 varinfo_t fi;
4981 if (fndecl != NULL_TREE
4982 && fndecl_built_in_p (fndecl)
4983 && find_func_aliases_for_builtin_call (fn, t))
4984 return;
4986 if (gimple_call_internal_p (t, IFN_DEFERRED_INIT))
4987 return;
4989 fi = get_fi_for_callee (t);
4990 if (!in_ipa_mode
4991 || (fi->decl && fndecl && !fi->is_fn_info))
4993 auto_vec<ce_s, 16> rhsc;
4994 int flags = gimple_call_flags (t);
4996 /* Const functions can return their arguments and addresses
4997 of global memory but not of escaped memory. */
4998 if (flags & (ECF_CONST|ECF_NOVOPS))
5000 if (gimple_call_lhs (t))
5001 handle_rhs_call (t, &rhsc, implicit_const_eaf_flags, false, false);
5003 /* Pure functions can return addresses in and of memory
5004 reachable from their arguments, but they are not an escape
5005 point for reachable memory of their arguments. */
5006 else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE))
5007 handle_rhs_call (t, &rhsc, implicit_pure_eaf_flags, false, true);
5008 /* If the call is to a replaceable operator delete and results
5009 from a delete expression as opposed to a direct call to
5010 such operator, then the effects for PTA (in particular
5011 the escaping of the pointer) can be ignored. */
5012 else if (fndecl
5013 && DECL_IS_OPERATOR_DELETE_P (fndecl)
5014 && gimple_call_from_new_or_delete (t))
5016 else
5017 handle_rhs_call (t, &rhsc, 0, true, true);
5018 if (gimple_call_lhs (t))
5019 handle_lhs_call (t, gimple_call_lhs (t),
5020 gimple_call_return_flags (t), rhsc, fndecl);
5022 else
5024 auto_vec<ce_s, 2> rhsc;
5025 tree lhsop;
5026 unsigned j;
5028 /* Assign all the passed arguments to the appropriate incoming
5029 parameters of the function. */
5030 for (j = 0; j < gimple_call_num_args (t); j++)
5032 tree arg = gimple_call_arg (t, j);
5033 find_func_aliases_for_call_arg (fi, j, arg);
5036 /* If we are returning a value, assign it to the result. */
5037 lhsop = gimple_call_lhs (t);
5038 if (lhsop)
5040 auto_vec<ce_s, 2> lhsc;
5041 struct constraint_expr rhs;
5042 struct constraint_expr *lhsp;
5043 bool aggr_p = aggregate_value_p (lhsop, gimple_call_fntype (t));
5045 get_constraint_for (lhsop, &lhsc);
5046 rhs = get_function_part_constraint (fi, fi_result);
5047 if (aggr_p)
5049 auto_vec<ce_s, 2> tem;
5050 tem.quick_push (rhs);
5051 do_deref (&tem);
5052 gcc_checking_assert (tem.length () == 1);
5053 rhs = tem[0];
5055 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
5056 process_constraint (new_constraint (*lhsp, rhs));
5058 /* If we pass the result decl by reference, honor that. */
5059 if (aggr_p)
5061 struct constraint_expr lhs;
5062 struct constraint_expr *rhsp;
5064 get_constraint_for_address_of (lhsop, &rhsc);
5065 lhs = get_function_part_constraint (fi, fi_result);
5066 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5067 process_constraint (new_constraint (lhs, *rhsp));
5068 rhsc.truncate (0);
5072 /* If we use a static chain, pass it along. */
5073 if (gimple_call_chain (t))
5075 struct constraint_expr lhs;
5076 struct constraint_expr *rhsp;
5078 get_constraint_for (gimple_call_chain (t), &rhsc);
5079 lhs = get_function_part_constraint (fi, fi_static_chain);
5080 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5081 process_constraint (new_constraint (lhs, *rhsp));
5086 /* Walk statement T setting up aliasing constraints according to the
5087 references found in T. This function is the main part of the
5088 constraint builder. AI points to auxiliary alias information used
5089 when building alias sets and computing alias grouping heuristics. */
5091 static void
5092 find_func_aliases (struct function *fn, gimple *origt)
5094 gimple *t = origt;
5095 auto_vec<ce_s, 16> lhsc;
5096 auto_vec<ce_s, 16> rhsc;
5097 varinfo_t fi;
5099 /* Now build constraints expressions. */
5100 if (gimple_code (t) == GIMPLE_PHI)
5102 /* For a phi node, assign all the arguments to
5103 the result. */
5104 get_constraint_for (gimple_phi_result (t), &lhsc);
5105 for (unsigned i = 0; i < gimple_phi_num_args (t); i++)
5107 get_constraint_for_rhs (gimple_phi_arg_def (t, i), &rhsc);
5108 process_all_all_constraints (lhsc, rhsc);
5109 rhsc.truncate (0);
5112 /* In IPA mode, we need to generate constraints to pass call
5113 arguments through their calls. There are two cases,
5114 either a GIMPLE_CALL returning a value, or just a plain
5115 GIMPLE_CALL when we are not.
5117 In non-ipa mode, we need to generate constraints for each
5118 pointer passed by address. */
5119 else if (is_gimple_call (t))
5120 find_func_aliases_for_call (fn, as_a <gcall *> (t));
5122 /* Otherwise, just a regular assignment statement. Only care about
5123 operations with pointer result, others are dealt with as escape
5124 points if they have pointer operands. */
5125 else if (is_gimple_assign (t))
5127 /* Otherwise, just a regular assignment statement. */
5128 tree lhsop = gimple_assign_lhs (t);
5129 tree rhsop = (gimple_num_ops (t) == 2) ? gimple_assign_rhs1 (t) : NULL;
5131 if (rhsop && TREE_CLOBBER_P (rhsop))
5132 /* Ignore clobbers, they don't actually store anything into
5133 the LHS. */
5135 else if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
5136 do_structure_copy (lhsop, rhsop);
5137 else
5139 enum tree_code code = gimple_assign_rhs_code (t);
5141 get_constraint_for (lhsop, &lhsc);
5143 if (code == POINTER_PLUS_EXPR)
5144 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
5145 gimple_assign_rhs2 (t), &rhsc);
5146 else if (code == POINTER_DIFF_EXPR)
5147 /* The result is not a pointer (part). */
5149 else if (code == BIT_AND_EXPR
5150 && TREE_CODE (gimple_assign_rhs2 (t)) == INTEGER_CST)
5152 /* Aligning a pointer via a BIT_AND_EXPR is offsetting
5153 the pointer. Handle it by offsetting it by UNKNOWN. */
5154 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
5155 NULL_TREE, &rhsc);
5157 else if (code == TRUNC_DIV_EXPR
5158 || code == CEIL_DIV_EXPR
5159 || code == FLOOR_DIV_EXPR
5160 || code == ROUND_DIV_EXPR
5161 || code == EXACT_DIV_EXPR
5162 || code == TRUNC_MOD_EXPR
5163 || code == CEIL_MOD_EXPR
5164 || code == FLOOR_MOD_EXPR
5165 || code == ROUND_MOD_EXPR)
5166 /* Division and modulo transfer the pointer from the LHS. */
5167 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
5168 NULL_TREE, &rhsc);
5169 else if (CONVERT_EXPR_CODE_P (code)
5170 || gimple_assign_single_p (t))
5171 /* See through conversions, single RHS are handled by
5172 get_constraint_for_rhs. */
5173 get_constraint_for_rhs (rhsop, &rhsc);
5174 else if (code == COND_EXPR)
5176 /* The result is a merge of both COND_EXPR arms. */
5177 auto_vec<ce_s, 2> tmp;
5178 struct constraint_expr *rhsp;
5179 unsigned i;
5180 get_constraint_for_rhs (gimple_assign_rhs2 (t), &rhsc);
5181 get_constraint_for_rhs (gimple_assign_rhs3 (t), &tmp);
5182 FOR_EACH_VEC_ELT (tmp, i, rhsp)
5183 rhsc.safe_push (*rhsp);
5185 else if (truth_value_p (code))
5186 /* Truth value results are not pointer (parts). Or at least
5187 very unreasonable obfuscation of a part. */
5189 else
5191 /* All other operations are possibly offsetting merges. */
5192 auto_vec<ce_s, 4> tmp;
5193 struct constraint_expr *rhsp;
5194 unsigned i, j;
5195 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
5196 NULL_TREE, &rhsc);
5197 for (i = 2; i < gimple_num_ops (t); ++i)
5199 get_constraint_for_ptr_offset (gimple_op (t, i),
5200 NULL_TREE, &tmp);
5201 FOR_EACH_VEC_ELT (tmp, j, rhsp)
5202 rhsc.safe_push (*rhsp);
5203 tmp.truncate (0);
5206 process_all_all_constraints (lhsc, rhsc);
5208 /* If there is a store to a global variable the rhs escapes. */
5209 if ((lhsop = get_base_address (lhsop)) != NULL_TREE
5210 && DECL_P (lhsop))
5212 varinfo_t vi = get_vi_for_tree (lhsop);
5213 if ((! in_ipa_mode && vi->is_global_var)
5214 || vi->is_ipa_escape_point)
5215 make_escape_constraint (rhsop);
5218 /* Handle escapes through return. */
5219 else if (gimple_code (t) == GIMPLE_RETURN
5220 && gimple_return_retval (as_a <greturn *> (t)) != NULL_TREE)
5222 greturn *return_stmt = as_a <greturn *> (t);
5223 fi = NULL;
5224 if (!in_ipa_mode
5225 && SSA_VAR_P (gimple_return_retval (return_stmt)))
5227 /* We handle simple returns by post-processing the solutions. */
5230 if (!(fi = get_vi_for_tree (fn->decl)))
5231 make_escape_constraint (gimple_return_retval (return_stmt));
5232 else if (in_ipa_mode)
5234 struct constraint_expr lhs ;
5235 struct constraint_expr *rhsp;
5236 unsigned i;
5238 lhs = get_function_part_constraint (fi, fi_result);
5239 get_constraint_for_rhs (gimple_return_retval (return_stmt), &rhsc);
5240 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5241 process_constraint (new_constraint (lhs, *rhsp));
5244 /* Handle asms conservatively by adding escape constraints to everything. */
5245 else if (gasm *asm_stmt = dyn_cast <gasm *> (t))
5247 unsigned i, noutputs;
5248 const char **oconstraints;
5249 const char *constraint;
5250 bool allows_mem, allows_reg, is_inout;
5252 noutputs = gimple_asm_noutputs (asm_stmt);
5253 oconstraints = XALLOCAVEC (const char *, noutputs);
5255 for (i = 0; i < noutputs; ++i)
5257 tree link = gimple_asm_output_op (asm_stmt, i);
5258 tree op = TREE_VALUE (link);
5260 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5261 oconstraints[i] = constraint;
5262 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
5263 &allows_reg, &is_inout);
5265 /* A memory constraint makes the address of the operand escape. */
5266 if (!allows_reg && allows_mem)
5267 make_escape_constraint (build_fold_addr_expr (op));
5269 /* The asm may read global memory, so outputs may point to
5270 any global memory. */
5271 if (op)
5273 auto_vec<ce_s, 2> lhsc;
5274 struct constraint_expr rhsc, *lhsp;
5275 unsigned j;
5276 get_constraint_for (op, &lhsc);
5277 rhsc.var = nonlocal_id;
5278 rhsc.offset = 0;
5279 rhsc.type = SCALAR;
5280 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
5281 process_constraint (new_constraint (*lhsp, rhsc));
5284 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
5286 tree link = gimple_asm_input_op (asm_stmt, i);
5287 tree op = TREE_VALUE (link);
5289 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5291 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
5292 &allows_mem, &allows_reg);
5294 /* A memory constraint makes the address of the operand escape. */
5295 if (!allows_reg && allows_mem)
5296 make_escape_constraint (build_fold_addr_expr (op));
5297 /* Strictly we'd only need the constraint to ESCAPED if
5298 the asm clobbers memory, otherwise using something
5299 along the lines of per-call clobbers/uses would be enough. */
5300 else if (op)
5301 make_escape_constraint (op);
5307 /* Create a constraint adding to the clobber set of FI the memory
5308 pointed to by PTR. */
5310 static void
5311 process_ipa_clobber (varinfo_t fi, tree ptr)
5313 vec<ce_s> ptrc = vNULL;
5314 struct constraint_expr *c, lhs;
5315 unsigned i;
5316 get_constraint_for_rhs (ptr, &ptrc);
5317 lhs = get_function_part_constraint (fi, fi_clobbers);
5318 FOR_EACH_VEC_ELT (ptrc, i, c)
5319 process_constraint (new_constraint (lhs, *c));
5320 ptrc.release ();
5323 /* Walk statement T setting up clobber and use constraints according to the
5324 references found in T. This function is a main part of the
5325 IPA constraint builder. */
5327 static void
5328 find_func_clobbers (struct function *fn, gimple *origt)
5330 gimple *t = origt;
5331 auto_vec<ce_s, 16> lhsc;
5332 auto_vec<ce_s, 16> rhsc;
5333 varinfo_t fi;
5335 /* Add constraints for clobbered/used in IPA mode.
5336 We are not interested in what automatic variables are clobbered
5337 or used as we only use the information in the caller to which
5338 they do not escape. */
5339 gcc_assert (in_ipa_mode);
5341 /* If the stmt refers to memory in any way it better had a VUSE. */
5342 if (gimple_vuse (t) == NULL_TREE)
5343 return;
5345 /* We'd better have function information for the current function. */
5346 fi = lookup_vi_for_tree (fn->decl);
5347 gcc_assert (fi != NULL);
5349 /* Account for stores in assignments and calls. */
5350 if (gimple_vdef (t) != NULL_TREE
5351 && gimple_has_lhs (t))
5353 tree lhs = gimple_get_lhs (t);
5354 tree tem = lhs;
5355 while (handled_component_p (tem))
5356 tem = TREE_OPERAND (tem, 0);
5357 if ((DECL_P (tem)
5358 && !auto_var_in_fn_p (tem, fn->decl))
5359 || INDIRECT_REF_P (tem)
5360 || (TREE_CODE (tem) == MEM_REF
5361 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
5362 && auto_var_in_fn_p
5363 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), fn->decl))))
5365 struct constraint_expr lhsc, *rhsp;
5366 unsigned i;
5367 lhsc = get_function_part_constraint (fi, fi_clobbers);
5368 get_constraint_for_address_of (lhs, &rhsc);
5369 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5370 process_constraint (new_constraint (lhsc, *rhsp));
5371 rhsc.truncate (0);
5375 /* Account for uses in assigments and returns. */
5376 if (gimple_assign_single_p (t)
5377 || (gimple_code (t) == GIMPLE_RETURN
5378 && gimple_return_retval (as_a <greturn *> (t)) != NULL_TREE))
5380 tree rhs = (gimple_assign_single_p (t)
5381 ? gimple_assign_rhs1 (t)
5382 : gimple_return_retval (as_a <greturn *> (t)));
5383 tree tem = rhs;
5384 while (handled_component_p (tem))
5385 tem = TREE_OPERAND (tem, 0);
5386 if ((DECL_P (tem)
5387 && !auto_var_in_fn_p (tem, fn->decl))
5388 || INDIRECT_REF_P (tem)
5389 || (TREE_CODE (tem) == MEM_REF
5390 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
5391 && auto_var_in_fn_p
5392 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), fn->decl))))
5394 struct constraint_expr lhs, *rhsp;
5395 unsigned i;
5396 lhs = get_function_part_constraint (fi, fi_uses);
5397 get_constraint_for_address_of (rhs, &rhsc);
5398 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5399 process_constraint (new_constraint (lhs, *rhsp));
5400 rhsc.truncate (0);
5404 if (gcall *call_stmt = dyn_cast <gcall *> (t))
5406 varinfo_t cfi = NULL;
5407 tree decl = gimple_call_fndecl (t);
5408 struct constraint_expr lhs, rhs;
5409 unsigned i, j;
5411 /* For builtins we do not have separate function info. For those
5412 we do not generate escapes for we have to generate clobbers/uses. */
5413 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
5414 switch (DECL_FUNCTION_CODE (decl))
5416 /* The following functions use and clobber memory pointed to
5417 by their arguments. */
5418 case BUILT_IN_STRCPY:
5419 case BUILT_IN_STRNCPY:
5420 case BUILT_IN_BCOPY:
5421 case BUILT_IN_MEMCPY:
5422 case BUILT_IN_MEMMOVE:
5423 case BUILT_IN_MEMPCPY:
5424 case BUILT_IN_STPCPY:
5425 case BUILT_IN_STPNCPY:
5426 case BUILT_IN_STRCAT:
5427 case BUILT_IN_STRNCAT:
5428 case BUILT_IN_STRCPY_CHK:
5429 case BUILT_IN_STRNCPY_CHK:
5430 case BUILT_IN_MEMCPY_CHK:
5431 case BUILT_IN_MEMMOVE_CHK:
5432 case BUILT_IN_MEMPCPY_CHK:
5433 case BUILT_IN_STPCPY_CHK:
5434 case BUILT_IN_STPNCPY_CHK:
5435 case BUILT_IN_STRCAT_CHK:
5436 case BUILT_IN_STRNCAT_CHK:
5438 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
5439 == BUILT_IN_BCOPY ? 1 : 0));
5440 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
5441 == BUILT_IN_BCOPY ? 0 : 1));
5442 unsigned i;
5443 struct constraint_expr *rhsp, *lhsp;
5444 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
5445 lhs = get_function_part_constraint (fi, fi_clobbers);
5446 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
5447 process_constraint (new_constraint (lhs, *lhsp));
5448 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
5449 lhs = get_function_part_constraint (fi, fi_uses);
5450 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5451 process_constraint (new_constraint (lhs, *rhsp));
5452 return;
5454 /* The following function clobbers memory pointed to by
5455 its argument. */
5456 case BUILT_IN_MEMSET:
5457 case BUILT_IN_MEMSET_CHK:
5458 case BUILT_IN_POSIX_MEMALIGN:
5460 tree dest = gimple_call_arg (t, 0);
5461 unsigned i;
5462 ce_s *lhsp;
5463 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
5464 lhs = get_function_part_constraint (fi, fi_clobbers);
5465 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
5466 process_constraint (new_constraint (lhs, *lhsp));
5467 return;
5469 /* The following functions clobber their second and third
5470 arguments. */
5471 case BUILT_IN_SINCOS:
5472 case BUILT_IN_SINCOSF:
5473 case BUILT_IN_SINCOSL:
5475 process_ipa_clobber (fi, gimple_call_arg (t, 1));
5476 process_ipa_clobber (fi, gimple_call_arg (t, 2));
5477 return;
5479 /* The following functions clobber their second argument. */
5480 case BUILT_IN_FREXP:
5481 case BUILT_IN_FREXPF:
5482 case BUILT_IN_FREXPL:
5483 case BUILT_IN_LGAMMA_R:
5484 case BUILT_IN_LGAMMAF_R:
5485 case BUILT_IN_LGAMMAL_R:
5486 case BUILT_IN_GAMMA_R:
5487 case BUILT_IN_GAMMAF_R:
5488 case BUILT_IN_GAMMAL_R:
5489 case BUILT_IN_MODF:
5490 case BUILT_IN_MODFF:
5491 case BUILT_IN_MODFL:
5493 process_ipa_clobber (fi, gimple_call_arg (t, 1));
5494 return;
5496 /* The following functions clobber their third argument. */
5497 case BUILT_IN_REMQUO:
5498 case BUILT_IN_REMQUOF:
5499 case BUILT_IN_REMQUOL:
5501 process_ipa_clobber (fi, gimple_call_arg (t, 2));
5502 return;
5504 /* The following functions neither read nor clobber memory. */
5505 case BUILT_IN_ASSUME_ALIGNED:
5506 case BUILT_IN_FREE:
5507 return;
5508 /* Trampolines are of no interest to us. */
5509 case BUILT_IN_INIT_TRAMPOLINE:
5510 case BUILT_IN_ADJUST_TRAMPOLINE:
5511 return;
5512 case BUILT_IN_VA_START:
5513 case BUILT_IN_VA_END:
5514 return;
5515 case BUILT_IN_GOMP_PARALLEL:
5516 case BUILT_IN_GOACC_PARALLEL:
5518 unsigned int fnpos, argpos;
5519 unsigned int implicit_use_args[2];
5520 unsigned int num_implicit_use_args = 0;
5521 switch (DECL_FUNCTION_CODE (decl))
5523 case BUILT_IN_GOMP_PARALLEL:
5524 /* __builtin_GOMP_parallel (fn, data, num_threads, flags). */
5525 fnpos = 0;
5526 argpos = 1;
5527 break;
5528 case BUILT_IN_GOACC_PARALLEL:
5529 /* __builtin_GOACC_parallel (flags_m, fn, mapnum, hostaddrs,
5530 sizes, kinds, ...). */
5531 fnpos = 1;
5532 argpos = 3;
5533 implicit_use_args[num_implicit_use_args++] = 4;
5534 implicit_use_args[num_implicit_use_args++] = 5;
5535 break;
5536 default:
5537 gcc_unreachable ();
5540 tree fnarg = gimple_call_arg (t, fnpos);
5541 gcc_assert (TREE_CODE (fnarg) == ADDR_EXPR);
5542 tree fndecl = TREE_OPERAND (fnarg, 0);
5543 if (fndecl_maybe_in_other_partition (fndecl))
5544 /* Fallthru to general call handling. */
5545 break;
5547 varinfo_t cfi = get_vi_for_tree (fndecl);
5549 tree arg = gimple_call_arg (t, argpos);
5551 /* Parameter passed by value is used. */
5552 lhs = get_function_part_constraint (fi, fi_uses);
5553 struct constraint_expr *rhsp;
5554 get_constraint_for (arg, &rhsc);
5555 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5556 process_constraint (new_constraint (lhs, *rhsp));
5557 rhsc.truncate (0);
5559 /* Handle parameters used by the call, but not used in cfi, as
5560 implicitly used by cfi. */
5561 lhs = get_function_part_constraint (cfi, fi_uses);
5562 for (unsigned i = 0; i < num_implicit_use_args; ++i)
5564 tree arg = gimple_call_arg (t, implicit_use_args[i]);
5565 get_constraint_for (arg, &rhsc);
5566 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5567 process_constraint (new_constraint (lhs, *rhsp));
5568 rhsc.truncate (0);
5571 /* The caller clobbers what the callee does. */
5572 lhs = get_function_part_constraint (fi, fi_clobbers);
5573 rhs = get_function_part_constraint (cfi, fi_clobbers);
5574 process_constraint (new_constraint (lhs, rhs));
5576 /* The caller uses what the callee does. */
5577 lhs = get_function_part_constraint (fi, fi_uses);
5578 rhs = get_function_part_constraint (cfi, fi_uses);
5579 process_constraint (new_constraint (lhs, rhs));
5581 return;
5583 /* printf-style functions may have hooks to set pointers to
5584 point to somewhere into the generated string. Leave them
5585 for a later exercise... */
5586 default:
5587 /* Fallthru to general call handling. */;
5590 /* Parameters passed by value are used. */
5591 lhs = get_function_part_constraint (fi, fi_uses);
5592 for (i = 0; i < gimple_call_num_args (t); i++)
5594 struct constraint_expr *rhsp;
5595 tree arg = gimple_call_arg (t, i);
5597 if (TREE_CODE (arg) == SSA_NAME
5598 || is_gimple_min_invariant (arg))
5599 continue;
5601 get_constraint_for_address_of (arg, &rhsc);
5602 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5603 process_constraint (new_constraint (lhs, *rhsp));
5604 rhsc.truncate (0);
5607 /* Build constraints for propagating clobbers/uses along the
5608 callgraph edges. */
5609 cfi = get_fi_for_callee (call_stmt);
5610 if (cfi->id == anything_id)
5612 if (gimple_vdef (t))
5613 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5614 anything_id);
5615 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5616 anything_id);
5617 return;
5620 /* For callees without function info (that's external functions),
5621 ESCAPED is clobbered and used. */
5622 if (cfi->decl
5623 && TREE_CODE (cfi->decl) == FUNCTION_DECL
5624 && !cfi->is_fn_info)
5626 varinfo_t vi;
5628 if (gimple_vdef (t))
5629 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5630 escaped_id);
5631 make_copy_constraint (first_vi_for_offset (fi, fi_uses), escaped_id);
5633 /* Also honor the call statement use/clobber info. */
5634 if ((vi = lookup_call_clobber_vi (call_stmt)) != NULL)
5635 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5636 vi->id);
5637 if ((vi = lookup_call_use_vi (call_stmt)) != NULL)
5638 make_copy_constraint (first_vi_for_offset (fi, fi_uses),
5639 vi->id);
5640 return;
5643 /* Otherwise the caller clobbers and uses what the callee does.
5644 ??? This should use a new complex constraint that filters
5645 local variables of the callee. */
5646 if (gimple_vdef (t))
5648 lhs = get_function_part_constraint (fi, fi_clobbers);
5649 rhs = get_function_part_constraint (cfi, fi_clobbers);
5650 process_constraint (new_constraint (lhs, rhs));
5652 lhs = get_function_part_constraint (fi, fi_uses);
5653 rhs = get_function_part_constraint (cfi, fi_uses);
5654 process_constraint (new_constraint (lhs, rhs));
5656 else if (gimple_code (t) == GIMPLE_ASM)
5658 /* ??? Ick. We can do better. */
5659 if (gimple_vdef (t))
5660 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5661 anything_id);
5662 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5663 anything_id);
5668 /* Find the first varinfo in the same variable as START that overlaps with
5669 OFFSET. Return NULL if we can't find one. */
5671 static varinfo_t
5672 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
5674 /* If the offset is outside of the variable, bail out. */
5675 if (offset >= start->fullsize)
5676 return NULL;
5678 /* If we cannot reach offset from start, lookup the first field
5679 and start from there. */
5680 if (start->offset > offset)
5681 start = get_varinfo (start->head);
5683 while (start)
5685 /* We may not find a variable in the field list with the actual
5686 offset when we have glommed a structure to a variable.
5687 In that case, however, offset should still be within the size
5688 of the variable. */
5689 if (offset >= start->offset
5690 && (offset - start->offset) < start->size)
5691 return start;
5693 start = vi_next (start);
5696 return NULL;
5699 /* Find the first varinfo in the same variable as START that overlaps with
5700 OFFSET. If there is no such varinfo the varinfo directly preceding
5701 OFFSET is returned. */
5703 static varinfo_t
5704 first_or_preceding_vi_for_offset (varinfo_t start,
5705 unsigned HOST_WIDE_INT offset)
5707 /* If we cannot reach offset from start, lookup the first field
5708 and start from there. */
5709 if (start->offset > offset)
5710 start = get_varinfo (start->head);
5712 /* We may not find a variable in the field list with the actual
5713 offset when we have glommed a structure to a variable.
5714 In that case, however, offset should still be within the size
5715 of the variable.
5716 If we got beyond the offset we look for return the field
5717 directly preceding offset which may be the last field. */
5718 while (start->next
5719 && offset >= start->offset
5720 && !((offset - start->offset) < start->size))
5721 start = vi_next (start);
5723 return start;
5727 /* This structure is used during pushing fields onto the fieldstack
5728 to track the offset of the field, since bitpos_of_field gives it
5729 relative to its immediate containing type, and we want it relative
5730 to the ultimate containing object. */
5732 struct fieldoff
5734 /* Offset from the base of the base containing object to this field. */
5735 HOST_WIDE_INT offset;
5737 /* Size, in bits, of the field. */
5738 unsigned HOST_WIDE_INT size;
5740 unsigned has_unknown_size : 1;
5742 unsigned must_have_pointers : 1;
5744 unsigned may_have_pointers : 1;
5746 unsigned only_restrict_pointers : 1;
5748 tree restrict_pointed_type;
5750 typedef struct fieldoff fieldoff_s;
5753 /* qsort comparison function for two fieldoff's PA and PB */
5755 static int
5756 fieldoff_compare (const void *pa, const void *pb)
5758 const fieldoff_s *foa = (const fieldoff_s *)pa;
5759 const fieldoff_s *fob = (const fieldoff_s *)pb;
5760 unsigned HOST_WIDE_INT foasize, fobsize;
5762 if (foa->offset < fob->offset)
5763 return -1;
5764 else if (foa->offset > fob->offset)
5765 return 1;
5767 foasize = foa->size;
5768 fobsize = fob->size;
5769 if (foasize < fobsize)
5770 return -1;
5771 else if (foasize > fobsize)
5772 return 1;
5773 return 0;
5776 /* Sort a fieldstack according to the field offset and sizes. */
5777 static void
5778 sort_fieldstack (vec<fieldoff_s> &fieldstack)
5780 fieldstack.qsort (fieldoff_compare);
5783 /* Return true if T is a type that can have subvars. */
5785 static inline bool
5786 type_can_have_subvars (const_tree t)
5788 /* Aggregates without overlapping fields can have subvars. */
5789 return TREE_CODE (t) == RECORD_TYPE;
5792 /* Return true if V is a tree that we can have subvars for.
5793 Normally, this is any aggregate type. Also complex
5794 types which are not gimple registers can have subvars. */
5796 static inline bool
5797 var_can_have_subvars (const_tree v)
5799 /* Volatile variables should never have subvars. */
5800 if (TREE_THIS_VOLATILE (v))
5801 return false;
5803 /* Non decls or memory tags can never have subvars. */
5804 if (!DECL_P (v))
5805 return false;
5807 return type_can_have_subvars (TREE_TYPE (v));
5810 /* Return true if T is a type that does contain pointers. */
5812 static bool
5813 type_must_have_pointers (tree type)
5815 if (POINTER_TYPE_P (type))
5816 return true;
5818 if (TREE_CODE (type) == ARRAY_TYPE)
5819 return type_must_have_pointers (TREE_TYPE (type));
5821 /* A function or method can have pointers as arguments, so track
5822 those separately. */
5823 if (TREE_CODE (type) == FUNCTION_TYPE
5824 || TREE_CODE (type) == METHOD_TYPE)
5825 return true;
5827 return false;
5830 static bool
5831 field_must_have_pointers (tree t)
5833 return type_must_have_pointers (TREE_TYPE (t));
5836 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
5837 the fields of TYPE onto fieldstack, recording their offsets along
5838 the way.
5840 OFFSET is used to keep track of the offset in this entire
5841 structure, rather than just the immediately containing structure.
5842 Returns false if the caller is supposed to handle the field we
5843 recursed for. */
5845 static bool
5846 push_fields_onto_fieldstack (tree type, vec<fieldoff_s> *fieldstack,
5847 HOST_WIDE_INT offset)
5849 tree field;
5850 bool empty_p = true;
5852 if (TREE_CODE (type) != RECORD_TYPE)
5853 return false;
5855 /* If the vector of fields is growing too big, bail out early.
5856 Callers check for vec::length <= param_max_fields_for_field_sensitive, make
5857 sure this fails. */
5858 if (fieldstack->length () > (unsigned)param_max_fields_for_field_sensitive)
5859 return false;
5861 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5862 if (TREE_CODE (field) == FIELD_DECL)
5864 bool push = false;
5865 HOST_WIDE_INT foff = bitpos_of_field (field);
5866 tree field_type = TREE_TYPE (field);
5868 if (!var_can_have_subvars (field)
5869 || TREE_CODE (field_type) == QUAL_UNION_TYPE
5870 || TREE_CODE (field_type) == UNION_TYPE)
5871 push = true;
5872 else if (!push_fields_onto_fieldstack
5873 (field_type, fieldstack, offset + foff)
5874 && (DECL_SIZE (field)
5875 && !integer_zerop (DECL_SIZE (field))))
5876 /* Empty structures may have actual size, like in C++. So
5877 see if we didn't push any subfields and the size is
5878 nonzero, push the field onto the stack. */
5879 push = true;
5881 if (push)
5883 fieldoff_s *pair = NULL;
5884 bool has_unknown_size = false;
5885 bool must_have_pointers_p;
5887 if (!fieldstack->is_empty ())
5888 pair = &fieldstack->last ();
5890 /* If there isn't anything at offset zero, create sth. */
5891 if (!pair
5892 && offset + foff != 0)
5894 fieldoff_s e
5895 = {0, offset + foff, false, false, true, false, NULL_TREE};
5896 pair = fieldstack->safe_push (e);
5899 if (!DECL_SIZE (field)
5900 || !tree_fits_uhwi_p (DECL_SIZE (field)))
5901 has_unknown_size = true;
5903 /* If adjacent fields do not contain pointers merge them. */
5904 must_have_pointers_p = field_must_have_pointers (field);
5905 if (pair
5906 && !has_unknown_size
5907 && !must_have_pointers_p
5908 && !pair->must_have_pointers
5909 && !pair->has_unknown_size
5910 && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff)
5912 pair->size += tree_to_uhwi (DECL_SIZE (field));
5914 else
5916 fieldoff_s e;
5917 e.offset = offset + foff;
5918 e.has_unknown_size = has_unknown_size;
5919 if (!has_unknown_size)
5920 e.size = tree_to_uhwi (DECL_SIZE (field));
5921 else
5922 e.size = -1;
5923 e.must_have_pointers = must_have_pointers_p;
5924 e.may_have_pointers = true;
5925 e.only_restrict_pointers
5926 = (!has_unknown_size
5927 && POINTER_TYPE_P (field_type)
5928 && TYPE_RESTRICT (field_type));
5929 if (e.only_restrict_pointers)
5930 e.restrict_pointed_type = TREE_TYPE (field_type);
5931 fieldstack->safe_push (e);
5935 empty_p = false;
5938 return !empty_p;
5941 /* Count the number of arguments DECL has, and set IS_VARARGS to true
5942 if it is a varargs function. */
5944 static unsigned int
5945 count_num_arguments (tree decl, bool *is_varargs)
5947 unsigned int num = 0;
5948 tree t;
5950 /* Capture named arguments for K&R functions. They do not
5951 have a prototype and thus no TYPE_ARG_TYPES. */
5952 for (t = DECL_ARGUMENTS (decl); t; t = DECL_CHAIN (t))
5953 ++num;
5955 /* Check if the function has variadic arguments. */
5956 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl)); t; t = TREE_CHAIN (t))
5957 if (TREE_VALUE (t) == void_type_node)
5958 break;
5959 if (!t)
5960 *is_varargs = true;
5962 return num;
5965 /* Creation function node for DECL, using NAME, and return the index
5966 of the variable we've created for the function. If NONLOCAL_p, create
5967 initial constraints. */
5969 static varinfo_t
5970 create_function_info_for (tree decl, const char *name, bool add_id,
5971 bool nonlocal_p)
5973 struct function *fn = DECL_STRUCT_FUNCTION (decl);
5974 varinfo_t vi, prev_vi;
5975 tree arg;
5976 unsigned int i;
5977 bool is_varargs = false;
5978 unsigned int num_args = count_num_arguments (decl, &is_varargs);
5980 /* Create the variable info. */
5982 vi = new_var_info (decl, name, add_id);
5983 vi->offset = 0;
5984 vi->size = 1;
5985 vi->fullsize = fi_parm_base + num_args;
5986 vi->is_fn_info = 1;
5987 vi->may_have_pointers = false;
5988 if (is_varargs)
5989 vi->fullsize = ~0;
5990 insert_vi_for_tree (vi->decl, vi);
5992 prev_vi = vi;
5994 /* Create a variable for things the function clobbers and one for
5995 things the function uses. */
5997 varinfo_t clobbervi, usevi;
5998 const char *newname;
5999 char *tempname;
6001 tempname = xasprintf ("%s.clobber", name);
6002 newname = ggc_strdup (tempname);
6003 free (tempname);
6005 clobbervi = new_var_info (NULL, newname, false);
6006 clobbervi->offset = fi_clobbers;
6007 clobbervi->size = 1;
6008 clobbervi->fullsize = vi->fullsize;
6009 clobbervi->is_full_var = true;
6010 clobbervi->is_global_var = false;
6011 clobbervi->is_reg_var = true;
6013 gcc_assert (prev_vi->offset < clobbervi->offset);
6014 prev_vi->next = clobbervi->id;
6015 prev_vi = clobbervi;
6017 tempname = xasprintf ("%s.use", name);
6018 newname = ggc_strdup (tempname);
6019 free (tempname);
6021 usevi = new_var_info (NULL, newname, false);
6022 usevi->offset = fi_uses;
6023 usevi->size = 1;
6024 usevi->fullsize = vi->fullsize;
6025 usevi->is_full_var = true;
6026 usevi->is_global_var = false;
6027 usevi->is_reg_var = true;
6029 gcc_assert (prev_vi->offset < usevi->offset);
6030 prev_vi->next = usevi->id;
6031 prev_vi = usevi;
6034 /* And one for the static chain. */
6035 if (fn->static_chain_decl != NULL_TREE)
6037 varinfo_t chainvi;
6038 const char *newname;
6039 char *tempname;
6041 tempname = xasprintf ("%s.chain", name);
6042 newname = ggc_strdup (tempname);
6043 free (tempname);
6045 chainvi = new_var_info (fn->static_chain_decl, newname, false);
6046 chainvi->offset = fi_static_chain;
6047 chainvi->size = 1;
6048 chainvi->fullsize = vi->fullsize;
6049 chainvi->is_full_var = true;
6050 chainvi->is_global_var = false;
6052 insert_vi_for_tree (fn->static_chain_decl, chainvi);
6054 if (nonlocal_p
6055 && chainvi->may_have_pointers)
6056 make_constraint_from (chainvi, nonlocal_id);
6058 gcc_assert (prev_vi->offset < chainvi->offset);
6059 prev_vi->next = chainvi->id;
6060 prev_vi = chainvi;
6063 /* Create a variable for the return var. */
6064 if (DECL_RESULT (decl) != NULL
6065 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
6067 varinfo_t resultvi;
6068 const char *newname;
6069 char *tempname;
6070 tree resultdecl = decl;
6072 if (DECL_RESULT (decl))
6073 resultdecl = DECL_RESULT (decl);
6075 tempname = xasprintf ("%s.result", name);
6076 newname = ggc_strdup (tempname);
6077 free (tempname);
6079 resultvi = new_var_info (resultdecl, newname, false);
6080 resultvi->offset = fi_result;
6081 resultvi->size = 1;
6082 resultvi->fullsize = vi->fullsize;
6083 resultvi->is_full_var = true;
6084 if (DECL_RESULT (decl))
6085 resultvi->may_have_pointers = true;
6087 if (DECL_RESULT (decl))
6088 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
6090 if (nonlocal_p
6091 && DECL_RESULT (decl)
6092 && DECL_BY_REFERENCE (DECL_RESULT (decl)))
6093 make_constraint_from (resultvi, nonlocal_id);
6095 gcc_assert (prev_vi->offset < resultvi->offset);
6096 prev_vi->next = resultvi->id;
6097 prev_vi = resultvi;
6100 /* We also need to make function return values escape. Nothing
6101 escapes by returning from main though. */
6102 if (nonlocal_p
6103 && !MAIN_NAME_P (DECL_NAME (decl)))
6105 varinfo_t fi, rvi;
6106 fi = lookup_vi_for_tree (decl);
6107 rvi = first_vi_for_offset (fi, fi_result);
6108 if (rvi && rvi->offset == fi_result)
6109 make_copy_constraint (get_varinfo (escaped_id), rvi->id);
6112 /* Set up variables for each argument. */
6113 arg = DECL_ARGUMENTS (decl);
6114 for (i = 0; i < num_args; i++)
6116 varinfo_t argvi;
6117 const char *newname;
6118 char *tempname;
6119 tree argdecl = decl;
6121 if (arg)
6122 argdecl = arg;
6124 tempname = xasprintf ("%s.arg%d", name, i);
6125 newname = ggc_strdup (tempname);
6126 free (tempname);
6128 argvi = new_var_info (argdecl, newname, false);
6129 argvi->offset = fi_parm_base + i;
6130 argvi->size = 1;
6131 argvi->is_full_var = true;
6132 argvi->fullsize = vi->fullsize;
6133 if (arg)
6134 argvi->may_have_pointers = true;
6136 if (arg)
6137 insert_vi_for_tree (arg, argvi);
6139 if (nonlocal_p
6140 && argvi->may_have_pointers)
6141 make_constraint_from (argvi, nonlocal_id);
6143 gcc_assert (prev_vi->offset < argvi->offset);
6144 prev_vi->next = argvi->id;
6145 prev_vi = argvi;
6146 if (arg)
6147 arg = DECL_CHAIN (arg);
6150 /* Add one representative for all further args. */
6151 if (is_varargs)
6153 varinfo_t argvi;
6154 const char *newname;
6155 char *tempname;
6156 tree decl;
6158 tempname = xasprintf ("%s.varargs", name);
6159 newname = ggc_strdup (tempname);
6160 free (tempname);
6162 /* We need sth that can be pointed to for va_start. */
6163 decl = build_fake_var_decl (ptr_type_node);
6165 argvi = new_var_info (decl, newname, false);
6166 argvi->offset = fi_parm_base + num_args;
6167 argvi->size = ~0;
6168 argvi->is_full_var = true;
6169 argvi->is_heap_var = true;
6170 argvi->fullsize = vi->fullsize;
6172 if (nonlocal_p
6173 && argvi->may_have_pointers)
6174 make_constraint_from (argvi, nonlocal_id);
6176 gcc_assert (prev_vi->offset < argvi->offset);
6177 prev_vi->next = argvi->id;
6180 return vi;
6184 /* Return true if FIELDSTACK contains fields that overlap.
6185 FIELDSTACK is assumed to be sorted by offset. */
6187 static bool
6188 check_for_overlaps (const vec<fieldoff_s> &fieldstack)
6190 fieldoff_s *fo = NULL;
6191 unsigned int i;
6192 HOST_WIDE_INT lastoffset = -1;
6194 FOR_EACH_VEC_ELT (fieldstack, i, fo)
6196 if (fo->offset == lastoffset)
6197 return true;
6198 lastoffset = fo->offset;
6200 return false;
6203 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
6204 This will also create any varinfo structures necessary for fields
6205 of DECL. DECL is a function parameter if HANDLE_PARAM is set.
6206 HANDLED_STRUCT_TYPE is used to register struct types reached by following
6207 restrict pointers. This is needed to prevent infinite recursion.
6208 If ADD_RESTRICT, pretend that the pointer NAME is restrict even if DECL
6209 does not advertise it. */
6211 static varinfo_t
6212 create_variable_info_for_1 (tree decl, const char *name, bool add_id,
6213 bool handle_param, bitmap handled_struct_type,
6214 bool add_restrict = false)
6216 varinfo_t vi, newvi;
6217 tree decl_type = TREE_TYPE (decl);
6218 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type);
6219 auto_vec<fieldoff_s> fieldstack;
6220 fieldoff_s *fo;
6221 unsigned int i;
6223 if (!declsize
6224 || !tree_fits_uhwi_p (declsize))
6226 vi = new_var_info (decl, name, add_id);
6227 vi->offset = 0;
6228 vi->size = ~0;
6229 vi->fullsize = ~0;
6230 vi->is_unknown_size_var = true;
6231 vi->is_full_var = true;
6232 vi->may_have_pointers = true;
6233 return vi;
6236 /* Collect field information. */
6237 if (use_field_sensitive
6238 && var_can_have_subvars (decl)
6239 /* ??? Force us to not use subfields for globals in IPA mode.
6240 Else we'd have to parse arbitrary initializers. */
6241 && !(in_ipa_mode
6242 && is_global_var (decl)))
6244 fieldoff_s *fo = NULL;
6245 bool notokay = false;
6246 unsigned int i;
6248 push_fields_onto_fieldstack (decl_type, &fieldstack, 0);
6250 for (i = 0; !notokay && fieldstack.iterate (i, &fo); i++)
6251 if (fo->has_unknown_size
6252 || fo->offset < 0)
6254 notokay = true;
6255 break;
6258 /* We can't sort them if we have a field with a variable sized type,
6259 which will make notokay = true. In that case, we are going to return
6260 without creating varinfos for the fields anyway, so sorting them is a
6261 waste to boot. */
6262 if (!notokay)
6264 sort_fieldstack (fieldstack);
6265 /* Due to some C++ FE issues, like PR 22488, we might end up
6266 what appear to be overlapping fields even though they,
6267 in reality, do not overlap. Until the C++ FE is fixed,
6268 we will simply disable field-sensitivity for these cases. */
6269 notokay = check_for_overlaps (fieldstack);
6272 if (notokay)
6273 fieldstack.release ();
6276 /* If we didn't end up collecting sub-variables create a full
6277 variable for the decl. */
6278 if (fieldstack.length () == 0
6279 || fieldstack.length () > (unsigned)param_max_fields_for_field_sensitive)
6281 vi = new_var_info (decl, name, add_id);
6282 vi->offset = 0;
6283 vi->may_have_pointers = true;
6284 vi->fullsize = tree_to_uhwi (declsize);
6285 vi->size = vi->fullsize;
6286 vi->is_full_var = true;
6287 if (POINTER_TYPE_P (decl_type)
6288 && (TYPE_RESTRICT (decl_type) || add_restrict))
6289 vi->only_restrict_pointers = 1;
6290 if (vi->only_restrict_pointers
6291 && !type_contains_placeholder_p (TREE_TYPE (decl_type))
6292 && handle_param
6293 && !bitmap_bit_p (handled_struct_type,
6294 TYPE_UID (TREE_TYPE (decl_type))))
6296 varinfo_t rvi;
6297 tree heapvar = build_fake_var_decl (TREE_TYPE (decl_type));
6298 DECL_EXTERNAL (heapvar) = 1;
6299 if (var_can_have_subvars (heapvar))
6300 bitmap_set_bit (handled_struct_type,
6301 TYPE_UID (TREE_TYPE (decl_type)));
6302 rvi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS", true,
6303 true, handled_struct_type);
6304 if (var_can_have_subvars (heapvar))
6305 bitmap_clear_bit (handled_struct_type,
6306 TYPE_UID (TREE_TYPE (decl_type)));
6307 rvi->is_restrict_var = 1;
6308 insert_vi_for_tree (heapvar, rvi);
6309 make_constraint_from (vi, rvi->id);
6310 make_param_constraints (rvi);
6312 fieldstack.release ();
6313 return vi;
6316 vi = new_var_info (decl, name, add_id);
6317 vi->fullsize = tree_to_uhwi (declsize);
6318 if (fieldstack.length () == 1)
6319 vi->is_full_var = true;
6320 for (i = 0, newvi = vi;
6321 fieldstack.iterate (i, &fo);
6322 ++i, newvi = vi_next (newvi))
6324 const char *newname = NULL;
6325 char *tempname;
6327 if (dump_file)
6329 if (fieldstack.length () != 1)
6331 tempname
6332 = xasprintf ("%s." HOST_WIDE_INT_PRINT_DEC
6333 "+" HOST_WIDE_INT_PRINT_DEC, name,
6334 fo->offset, fo->size);
6335 newname = ggc_strdup (tempname);
6336 free (tempname);
6339 else
6340 newname = "NULL";
6342 if (newname)
6343 newvi->name = newname;
6344 newvi->offset = fo->offset;
6345 newvi->size = fo->size;
6346 newvi->fullsize = vi->fullsize;
6347 newvi->may_have_pointers = fo->may_have_pointers;
6348 newvi->only_restrict_pointers = fo->only_restrict_pointers;
6349 if (handle_param
6350 && newvi->only_restrict_pointers
6351 && !type_contains_placeholder_p (fo->restrict_pointed_type)
6352 && !bitmap_bit_p (handled_struct_type,
6353 TYPE_UID (fo->restrict_pointed_type)))
6355 varinfo_t rvi;
6356 tree heapvar = build_fake_var_decl (fo->restrict_pointed_type);
6357 DECL_EXTERNAL (heapvar) = 1;
6358 if (var_can_have_subvars (heapvar))
6359 bitmap_set_bit (handled_struct_type,
6360 TYPE_UID (fo->restrict_pointed_type));
6361 rvi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS", true,
6362 true, handled_struct_type);
6363 if (var_can_have_subvars (heapvar))
6364 bitmap_clear_bit (handled_struct_type,
6365 TYPE_UID (fo->restrict_pointed_type));
6366 rvi->is_restrict_var = 1;
6367 insert_vi_for_tree (heapvar, rvi);
6368 make_constraint_from (newvi, rvi->id);
6369 make_param_constraints (rvi);
6371 if (i + 1 < fieldstack.length ())
6373 varinfo_t tem = new_var_info (decl, name, false);
6374 newvi->next = tem->id;
6375 tem->head = vi->id;
6379 return vi;
6382 static unsigned int
6383 create_variable_info_for (tree decl, const char *name, bool add_id)
6385 /* First see if we are dealing with an ifunc resolver call and
6386 assiociate that with a call to the resolver function result. */
6387 cgraph_node *node;
6388 if (in_ipa_mode
6389 && TREE_CODE (decl) == FUNCTION_DECL
6390 && (node = cgraph_node::get (decl))
6391 && node->ifunc_resolver)
6393 varinfo_t fi = get_vi_for_tree (node->get_alias_target ()->decl);
6394 constraint_expr rhs
6395 = get_function_part_constraint (fi, fi_result);
6396 fi = new_var_info (NULL_TREE, "ifuncres", true);
6397 fi->is_reg_var = true;
6398 constraint_expr lhs;
6399 lhs.type = SCALAR;
6400 lhs.var = fi->id;
6401 lhs.offset = 0;
6402 process_constraint (new_constraint (lhs, rhs));
6403 insert_vi_for_tree (decl, fi);
6404 return fi->id;
6407 varinfo_t vi = create_variable_info_for_1 (decl, name, add_id, false, NULL);
6408 unsigned int id = vi->id;
6410 insert_vi_for_tree (decl, vi);
6412 if (!VAR_P (decl))
6413 return id;
6415 /* Create initial constraints for globals. */
6416 for (; vi; vi = vi_next (vi))
6418 if (!vi->may_have_pointers
6419 || !vi->is_global_var)
6420 continue;
6422 /* Mark global restrict qualified pointers. */
6423 if ((POINTER_TYPE_P (TREE_TYPE (decl))
6424 && TYPE_RESTRICT (TREE_TYPE (decl)))
6425 || vi->only_restrict_pointers)
6427 varinfo_t rvi
6428 = make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT",
6429 true);
6430 /* ??? For now exclude reads from globals as restrict sources
6431 if those are not (indirectly) from incoming parameters. */
6432 rvi->is_restrict_var = false;
6433 continue;
6436 /* In non-IPA mode the initializer from nonlocal is all we need. */
6437 if (!in_ipa_mode
6438 || DECL_HARD_REGISTER (decl))
6439 make_copy_constraint (vi, nonlocal_id);
6441 /* In IPA mode parse the initializer and generate proper constraints
6442 for it. */
6443 else
6445 varpool_node *vnode = varpool_node::get (decl);
6447 /* For escaped variables initialize them from nonlocal. */
6448 if (!vnode->all_refs_explicit_p ())
6449 make_copy_constraint (vi, nonlocal_id);
6451 /* If this is a global variable with an initializer and we are in
6452 IPA mode generate constraints for it. */
6453 ipa_ref *ref;
6454 for (unsigned idx = 0; vnode->iterate_reference (idx, ref); ++idx)
6456 auto_vec<ce_s> rhsc;
6457 struct constraint_expr lhs, *rhsp;
6458 unsigned i;
6459 get_constraint_for_address_of (ref->referred->decl, &rhsc);
6460 lhs.var = vi->id;
6461 lhs.offset = 0;
6462 lhs.type = SCALAR;
6463 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
6464 process_constraint (new_constraint (lhs, *rhsp));
6465 /* If this is a variable that escapes from the unit
6466 the initializer escapes as well. */
6467 if (!vnode->all_refs_explicit_p ())
6469 lhs.var = escaped_id;
6470 lhs.offset = 0;
6471 lhs.type = SCALAR;
6472 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
6473 process_constraint (new_constraint (lhs, *rhsp));
6479 return id;
6482 /* Print out the points-to solution for VAR to FILE. */
6484 static void
6485 dump_solution_for_var (FILE *file, unsigned int var)
6487 varinfo_t vi = get_varinfo (var);
6488 unsigned int i;
6489 bitmap_iterator bi;
6491 /* Dump the solution for unified vars anyway, this avoids difficulties
6492 in scanning dumps in the testsuite. */
6493 fprintf (file, "%s = { ", vi->name);
6494 vi = get_varinfo (find (var));
6495 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
6496 fprintf (file, "%s ", get_varinfo (i)->name);
6497 fprintf (file, "}");
6499 /* But note when the variable was unified. */
6500 if (vi->id != var)
6501 fprintf (file, " same as %s", vi->name);
6503 fprintf (file, "\n");
6506 /* Print the points-to solution for VAR to stderr. */
6508 DEBUG_FUNCTION void
6509 debug_solution_for_var (unsigned int var)
6511 dump_solution_for_var (stderr, var);
6514 /* Register the constraints for function parameter related VI. */
6516 static void
6517 make_param_constraints (varinfo_t vi)
6519 for (; vi; vi = vi_next (vi))
6521 if (vi->only_restrict_pointers)
6523 else if (vi->may_have_pointers)
6524 make_constraint_from (vi, nonlocal_id);
6526 if (vi->is_full_var)
6527 break;
6531 /* Create varinfo structures for all of the variables in the
6532 function for intraprocedural mode. */
6534 static void
6535 intra_create_variable_infos (struct function *fn)
6537 tree t;
6538 bitmap handled_struct_type = NULL;
6539 bool this_parm_in_ctor = DECL_CXX_CONSTRUCTOR_P (fn->decl);
6541 /* For each incoming pointer argument arg, create the constraint ARG
6542 = NONLOCAL or a dummy variable if it is a restrict qualified
6543 passed-by-reference argument. */
6544 for (t = DECL_ARGUMENTS (fn->decl); t; t = DECL_CHAIN (t))
6546 if (handled_struct_type == NULL)
6547 handled_struct_type = BITMAP_ALLOC (NULL);
6549 varinfo_t p
6550 = create_variable_info_for_1 (t, alias_get_name (t), false, true,
6551 handled_struct_type, this_parm_in_ctor);
6552 insert_vi_for_tree (t, p);
6554 make_param_constraints (p);
6556 this_parm_in_ctor = false;
6559 if (handled_struct_type != NULL)
6560 BITMAP_FREE (handled_struct_type);
6562 /* Add a constraint for a result decl that is passed by reference. */
6563 if (DECL_RESULT (fn->decl)
6564 && DECL_BY_REFERENCE (DECL_RESULT (fn->decl)))
6566 varinfo_t p, result_vi = get_vi_for_tree (DECL_RESULT (fn->decl));
6568 for (p = result_vi; p; p = vi_next (p))
6569 make_constraint_from (p, nonlocal_id);
6572 /* Add a constraint for the incoming static chain parameter. */
6573 if (fn->static_chain_decl != NULL_TREE)
6575 varinfo_t p, chain_vi = get_vi_for_tree (fn->static_chain_decl);
6577 for (p = chain_vi; p; p = vi_next (p))
6578 make_constraint_from (p, nonlocal_id);
6582 /* Structure used to put solution bitmaps in a hashtable so they can
6583 be shared among variables with the same points-to set. */
6585 typedef struct shared_bitmap_info
6587 bitmap pt_vars;
6588 hashval_t hashcode;
6589 } *shared_bitmap_info_t;
6590 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
6592 /* Shared_bitmap hashtable helpers. */
6594 struct shared_bitmap_hasher : free_ptr_hash <shared_bitmap_info>
6596 static inline hashval_t hash (const shared_bitmap_info *);
6597 static inline bool equal (const shared_bitmap_info *,
6598 const shared_bitmap_info *);
6601 /* Hash function for a shared_bitmap_info_t */
6603 inline hashval_t
6604 shared_bitmap_hasher::hash (const shared_bitmap_info *bi)
6606 return bi->hashcode;
6609 /* Equality function for two shared_bitmap_info_t's. */
6611 inline bool
6612 shared_bitmap_hasher::equal (const shared_bitmap_info *sbi1,
6613 const shared_bitmap_info *sbi2)
6615 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
6618 /* Shared_bitmap hashtable. */
6620 static hash_table<shared_bitmap_hasher> *shared_bitmap_table;
6622 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
6623 existing instance if there is one, NULL otherwise. */
6625 static bitmap
6626 shared_bitmap_lookup (bitmap pt_vars)
6628 shared_bitmap_info **slot;
6629 struct shared_bitmap_info sbi;
6631 sbi.pt_vars = pt_vars;
6632 sbi.hashcode = bitmap_hash (pt_vars);
6634 slot = shared_bitmap_table->find_slot (&sbi, NO_INSERT);
6635 if (!slot)
6636 return NULL;
6637 else
6638 return (*slot)->pt_vars;
6642 /* Add a bitmap to the shared bitmap hashtable. */
6644 static void
6645 shared_bitmap_add (bitmap pt_vars)
6647 shared_bitmap_info **slot;
6648 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
6650 sbi->pt_vars = pt_vars;
6651 sbi->hashcode = bitmap_hash (pt_vars);
6653 slot = shared_bitmap_table->find_slot (sbi, INSERT);
6654 gcc_assert (!*slot);
6655 *slot = sbi;
6659 /* Set bits in INTO corresponding to the variable uids in solution set FROM. */
6661 static void
6662 set_uids_in_ptset (bitmap into, bitmap from, struct pt_solution *pt,
6663 tree fndecl)
6665 unsigned int i;
6666 bitmap_iterator bi;
6667 varinfo_t escaped_vi = get_varinfo (find (escaped_id));
6668 bool everything_escaped
6669 = escaped_vi->solution && bitmap_bit_p (escaped_vi->solution, anything_id);
6671 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
6673 varinfo_t vi = get_varinfo (i);
6675 if (vi->is_artificial_var)
6676 continue;
6678 if (everything_escaped
6679 || (escaped_vi->solution
6680 && bitmap_bit_p (escaped_vi->solution, i)))
6682 pt->vars_contains_escaped = true;
6683 pt->vars_contains_escaped_heap |= vi->is_heap_var;
6686 if (vi->is_restrict_var)
6687 pt->vars_contains_restrict = true;
6689 if (VAR_P (vi->decl)
6690 || TREE_CODE (vi->decl) == PARM_DECL
6691 || TREE_CODE (vi->decl) == RESULT_DECL)
6693 /* If we are in IPA mode we will not recompute points-to
6694 sets after inlining so make sure they stay valid. */
6695 if (in_ipa_mode
6696 && !DECL_PT_UID_SET_P (vi->decl))
6697 SET_DECL_PT_UID (vi->decl, DECL_UID (vi->decl));
6699 /* Add the decl to the points-to set. Note that the points-to
6700 set contains global variables. */
6701 bitmap_set_bit (into, DECL_PT_UID (vi->decl));
6702 if (vi->is_global_var
6703 /* In IPA mode the escaped_heap trick doesn't work as
6704 ESCAPED is escaped from the unit but
6705 pt_solution_includes_global needs to answer true for
6706 all variables not automatic within a function.
6707 For the same reason is_global_var is not the
6708 correct flag to track - local variables from other
6709 functions also need to be considered global.
6710 Conveniently all HEAP vars are not put in function
6711 scope. */
6712 || (in_ipa_mode
6713 && fndecl
6714 && ! auto_var_in_fn_p (vi->decl, fndecl)))
6715 pt->vars_contains_nonlocal = true;
6717 /* If we have a variable that is interposable record that fact
6718 for pointer comparison simplification. */
6719 if (VAR_P (vi->decl)
6720 && (TREE_STATIC (vi->decl) || DECL_EXTERNAL (vi->decl))
6721 && ! decl_binds_to_current_def_p (vi->decl))
6722 pt->vars_contains_interposable = true;
6724 /* If this is a local variable we can have overlapping lifetime
6725 of different function invocations through recursion duplicate
6726 it with its shadow variable. */
6727 if (in_ipa_mode
6728 && vi->shadow_var_uid != 0)
6730 bitmap_set_bit (into, vi->shadow_var_uid);
6731 pt->vars_contains_nonlocal = true;
6735 else if (TREE_CODE (vi->decl) == FUNCTION_DECL
6736 || TREE_CODE (vi->decl) == LABEL_DECL)
6738 /* Nothing should read/write from/to code so we can
6739 save bits by not including them in the points-to bitmaps.
6740 Still mark the points-to set as containing global memory
6741 to make code-patching possible - see PR70128. */
6742 pt->vars_contains_nonlocal = true;
6748 /* Compute the points-to solution *PT for the variable VI. */
6750 static struct pt_solution
6751 find_what_var_points_to (tree fndecl, varinfo_t orig_vi)
6753 unsigned int i;
6754 bitmap_iterator bi;
6755 bitmap finished_solution;
6756 bitmap result;
6757 varinfo_t vi;
6758 struct pt_solution *pt;
6760 /* This variable may have been collapsed, let's get the real
6761 variable. */
6762 vi = get_varinfo (find (orig_vi->id));
6764 /* See if we have already computed the solution and return it. */
6765 pt_solution **slot = &final_solutions->get_or_insert (vi);
6766 if (*slot != NULL)
6767 return **slot;
6769 *slot = pt = XOBNEW (&final_solutions_obstack, struct pt_solution);
6770 memset (pt, 0, sizeof (struct pt_solution));
6772 /* Translate artificial variables into SSA_NAME_PTR_INFO
6773 attributes. */
6774 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
6776 varinfo_t vi = get_varinfo (i);
6778 if (vi->is_artificial_var)
6780 if (vi->id == nothing_id)
6781 pt->null = 1;
6782 else if (vi->id == escaped_id)
6784 if (in_ipa_mode)
6785 pt->ipa_escaped = 1;
6786 else
6787 pt->escaped = 1;
6788 /* Expand some special vars of ESCAPED in-place here. */
6789 varinfo_t evi = get_varinfo (find (escaped_id));
6790 if (bitmap_bit_p (evi->solution, nonlocal_id))
6791 pt->nonlocal = 1;
6793 else if (vi->id == nonlocal_id)
6794 pt->nonlocal = 1;
6795 else if (vi->id == string_id)
6796 /* Nobody cares - STRING_CSTs are read-only entities. */
6798 else if (vi->id == anything_id
6799 || vi->id == integer_id)
6800 pt->anything = 1;
6804 /* Instead of doing extra work, simply do not create
6805 elaborate points-to information for pt_anything pointers. */
6806 if (pt->anything)
6807 return *pt;
6809 /* Share the final set of variables when possible. */
6810 finished_solution = BITMAP_GGC_ALLOC ();
6811 stats.points_to_sets_created++;
6813 set_uids_in_ptset (finished_solution, vi->solution, pt, fndecl);
6814 result = shared_bitmap_lookup (finished_solution);
6815 if (!result)
6817 shared_bitmap_add (finished_solution);
6818 pt->vars = finished_solution;
6820 else
6822 pt->vars = result;
6823 bitmap_clear (finished_solution);
6826 return *pt;
6829 /* Given a pointer variable P, fill in its points-to set. */
6831 static void
6832 find_what_p_points_to (tree fndecl, tree p)
6834 struct ptr_info_def *pi;
6835 tree lookup_p = p;
6836 varinfo_t vi;
6837 value_range vr;
6838 get_range_query (DECL_STRUCT_FUNCTION (fndecl))->range_of_expr (vr, p);
6839 bool nonnull = vr.nonzero_p ();
6841 /* For parameters, get at the points-to set for the actual parm
6842 decl. */
6843 if (TREE_CODE (p) == SSA_NAME
6844 && SSA_NAME_IS_DEFAULT_DEF (p)
6845 && (TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
6846 || TREE_CODE (SSA_NAME_VAR (p)) == RESULT_DECL))
6847 lookup_p = SSA_NAME_VAR (p);
6849 vi = lookup_vi_for_tree (lookup_p);
6850 if (!vi)
6851 return;
6853 pi = get_ptr_info (p);
6854 pi->pt = find_what_var_points_to (fndecl, vi);
6855 /* Conservatively set to NULL from PTA (to true). */
6856 pi->pt.null = 1;
6857 /* Preserve pointer nonnull globally computed. */
6858 if (nonnull)
6859 set_ptr_nonnull (p);
6863 /* Query statistics for points-to solutions. */
6865 static struct {
6866 unsigned HOST_WIDE_INT pt_solution_includes_may_alias;
6867 unsigned HOST_WIDE_INT pt_solution_includes_no_alias;
6868 unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias;
6869 unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias;
6870 } pta_stats;
6872 void
6873 dump_pta_stats (FILE *s)
6875 fprintf (s, "\nPTA query stats:\n");
6876 fprintf (s, " pt_solution_includes: "
6877 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6878 HOST_WIDE_INT_PRINT_DEC" queries\n",
6879 pta_stats.pt_solution_includes_no_alias,
6880 pta_stats.pt_solution_includes_no_alias
6881 + pta_stats.pt_solution_includes_may_alias);
6882 fprintf (s, " pt_solutions_intersect: "
6883 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6884 HOST_WIDE_INT_PRINT_DEC" queries\n",
6885 pta_stats.pt_solutions_intersect_no_alias,
6886 pta_stats.pt_solutions_intersect_no_alias
6887 + pta_stats.pt_solutions_intersect_may_alias);
6891 /* Reset the points-to solution *PT to a conservative default
6892 (point to anything). */
6894 void
6895 pt_solution_reset (struct pt_solution *pt)
6897 memset (pt, 0, sizeof (struct pt_solution));
6898 pt->anything = true;
6899 pt->null = true;
6902 /* Set the points-to solution *PT to point only to the variables
6903 in VARS. VARS_CONTAINS_GLOBAL specifies whether that contains
6904 global variables and VARS_CONTAINS_RESTRICT specifies whether
6905 it contains restrict tag variables. */
6907 void
6908 pt_solution_set (struct pt_solution *pt, bitmap vars,
6909 bool vars_contains_nonlocal)
6911 memset (pt, 0, sizeof (struct pt_solution));
6912 pt->vars = vars;
6913 pt->vars_contains_nonlocal = vars_contains_nonlocal;
6914 pt->vars_contains_escaped
6915 = (cfun->gimple_df->escaped.anything
6916 || bitmap_intersect_p (cfun->gimple_df->escaped.vars, vars));
6919 /* Set the points-to solution *PT to point only to the variable VAR. */
6921 void
6922 pt_solution_set_var (struct pt_solution *pt, tree var)
6924 memset (pt, 0, sizeof (struct pt_solution));
6925 pt->vars = BITMAP_GGC_ALLOC ();
6926 bitmap_set_bit (pt->vars, DECL_PT_UID (var));
6927 pt->vars_contains_nonlocal = is_global_var (var);
6928 pt->vars_contains_escaped
6929 = (cfun->gimple_df->escaped.anything
6930 || bitmap_bit_p (cfun->gimple_df->escaped.vars, DECL_PT_UID (var)));
6933 /* Computes the union of the points-to solutions *DEST and *SRC and
6934 stores the result in *DEST. This changes the points-to bitmap
6935 of *DEST and thus may not be used if that might be shared.
6936 The points-to bitmap of *SRC and *DEST will not be shared after
6937 this function if they were not before. */
6939 static void
6940 pt_solution_ior_into (struct pt_solution *dest, struct pt_solution *src)
6942 dest->anything |= src->anything;
6943 if (dest->anything)
6945 pt_solution_reset (dest);
6946 return;
6949 dest->nonlocal |= src->nonlocal;
6950 dest->escaped |= src->escaped;
6951 dest->ipa_escaped |= src->ipa_escaped;
6952 dest->null |= src->null;
6953 dest->vars_contains_nonlocal |= src->vars_contains_nonlocal;
6954 dest->vars_contains_escaped |= src->vars_contains_escaped;
6955 dest->vars_contains_escaped_heap |= src->vars_contains_escaped_heap;
6956 if (!src->vars)
6957 return;
6959 if (!dest->vars)
6960 dest->vars = BITMAP_GGC_ALLOC ();
6961 bitmap_ior_into (dest->vars, src->vars);
6964 /* Return true if the points-to solution *PT is empty. */
6966 bool
6967 pt_solution_empty_p (const pt_solution *pt)
6969 if (pt->anything
6970 || pt->nonlocal)
6971 return false;
6973 if (pt->vars
6974 && !bitmap_empty_p (pt->vars))
6975 return false;
6977 /* If the solution includes ESCAPED, check if that is empty. */
6978 if (pt->escaped
6979 && !pt_solution_empty_p (&cfun->gimple_df->escaped))
6980 return false;
6982 /* If the solution includes ESCAPED, check if that is empty. */
6983 if (pt->ipa_escaped
6984 && !pt_solution_empty_p (&ipa_escaped_pt))
6985 return false;
6987 return true;
6990 /* Return true if the points-to solution *PT only point to a single var, and
6991 return the var uid in *UID. */
6993 bool
6994 pt_solution_singleton_or_null_p (struct pt_solution *pt, unsigned *uid)
6996 if (pt->anything || pt->nonlocal || pt->escaped || pt->ipa_escaped
6997 || pt->vars == NULL
6998 || !bitmap_single_bit_set_p (pt->vars))
6999 return false;
7001 *uid = bitmap_first_set_bit (pt->vars);
7002 return true;
7005 /* Return true if the points-to solution *PT includes global memory.
7006 If ESCAPED_LOCAL_P is true then escaped local variables are also
7007 considered global. */
7009 bool
7010 pt_solution_includes_global (struct pt_solution *pt, bool escaped_local_p)
7012 if (pt->anything
7013 || pt->nonlocal
7014 || pt->vars_contains_nonlocal
7015 /* The following is a hack to make the malloc escape hack work.
7016 In reality we'd need different sets for escaped-through-return
7017 and escaped-to-callees and passes would need to be updated. */
7018 || pt->vars_contains_escaped_heap)
7019 return true;
7021 if (escaped_local_p && pt->vars_contains_escaped)
7022 return true;
7024 /* 'escaped' is also a placeholder so we have to look into it. */
7025 if (pt->escaped)
7026 return pt_solution_includes_global (&cfun->gimple_df->escaped,
7027 escaped_local_p);
7029 if (pt->ipa_escaped)
7030 return pt_solution_includes_global (&ipa_escaped_pt,
7031 escaped_local_p);
7033 return false;
7036 /* Return true if the points-to solution *PT includes the variable
7037 declaration DECL. */
7039 static bool
7040 pt_solution_includes_1 (struct pt_solution *pt, const_tree decl)
7042 if (pt->anything)
7043 return true;
7045 if (pt->nonlocal
7046 && is_global_var (decl))
7047 return true;
7049 if (pt->vars
7050 && bitmap_bit_p (pt->vars, DECL_PT_UID (decl)))
7051 return true;
7053 /* If the solution includes ESCAPED, check it. */
7054 if (pt->escaped
7055 && pt_solution_includes_1 (&cfun->gimple_df->escaped, decl))
7056 return true;
7058 /* If the solution includes ESCAPED, check it. */
7059 if (pt->ipa_escaped
7060 && pt_solution_includes_1 (&ipa_escaped_pt, decl))
7061 return true;
7063 return false;
7066 bool
7067 pt_solution_includes (struct pt_solution *pt, const_tree decl)
7069 bool res = pt_solution_includes_1 (pt, decl);
7070 if (res)
7071 ++pta_stats.pt_solution_includes_may_alias;
7072 else
7073 ++pta_stats.pt_solution_includes_no_alias;
7074 return res;
7077 /* Return true if both points-to solutions PT1 and PT2 have a non-empty
7078 intersection. */
7080 static bool
7081 pt_solutions_intersect_1 (struct pt_solution *pt1, struct pt_solution *pt2)
7083 if (pt1->anything || pt2->anything)
7084 return true;
7086 /* If either points to unknown global memory and the other points to
7087 any global memory they alias. */
7088 if ((pt1->nonlocal
7089 && (pt2->nonlocal
7090 || pt2->vars_contains_nonlocal))
7091 || (pt2->nonlocal
7092 && pt1->vars_contains_nonlocal))
7093 return true;
7095 /* If either points to all escaped memory and the other points to
7096 any escaped memory they alias. */
7097 if ((pt1->escaped
7098 && (pt2->escaped
7099 || pt2->vars_contains_escaped))
7100 || (pt2->escaped
7101 && pt1->vars_contains_escaped))
7102 return true;
7104 /* Check the escaped solution if required.
7105 ??? Do we need to check the local against the IPA escaped sets? */
7106 if ((pt1->ipa_escaped || pt2->ipa_escaped)
7107 && !pt_solution_empty_p (&ipa_escaped_pt))
7109 /* If both point to escaped memory and that solution
7110 is not empty they alias. */
7111 if (pt1->ipa_escaped && pt2->ipa_escaped)
7112 return true;
7114 /* If either points to escaped memory see if the escaped solution
7115 intersects with the other. */
7116 if ((pt1->ipa_escaped
7117 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt2))
7118 || (pt2->ipa_escaped
7119 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt1)))
7120 return true;
7123 /* Now both pointers alias if their points-to solution intersects. */
7124 return (pt1->vars
7125 && pt2->vars
7126 && bitmap_intersect_p (pt1->vars, pt2->vars));
7129 bool
7130 pt_solutions_intersect (struct pt_solution *pt1, struct pt_solution *pt2)
7132 bool res = pt_solutions_intersect_1 (pt1, pt2);
7133 if (res)
7134 ++pta_stats.pt_solutions_intersect_may_alias;
7135 else
7136 ++pta_stats.pt_solutions_intersect_no_alias;
7137 return res;
7141 /* Dump points-to information to OUTFILE. */
7143 static void
7144 dump_sa_points_to_info (FILE *outfile)
7146 unsigned int i;
7148 fprintf (outfile, "\nPoints-to sets\n\n");
7150 if (dump_flags & TDF_STATS)
7152 fprintf (outfile, "Stats:\n");
7153 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
7154 fprintf (outfile, "Non-pointer vars: %d\n",
7155 stats.nonpointer_vars);
7156 fprintf (outfile, "Statically unified vars: %d\n",
7157 stats.unified_vars_static);
7158 fprintf (outfile, "Dynamically unified vars: %d\n",
7159 stats.unified_vars_dynamic);
7160 fprintf (outfile, "Iterations: %d\n", stats.iterations);
7161 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
7162 fprintf (outfile, "Number of implicit edges: %d\n",
7163 stats.num_implicit_edges);
7166 for (i = 1; i < varmap.length (); i++)
7168 varinfo_t vi = get_varinfo (i);
7169 if (!vi->may_have_pointers)
7170 continue;
7171 dump_solution_for_var (outfile, i);
7176 /* Debug points-to information to stderr. */
7178 DEBUG_FUNCTION void
7179 debug_sa_points_to_info (void)
7181 dump_sa_points_to_info (stderr);
7185 /* Initialize the always-existing constraint variables for NULL
7186 ANYTHING, READONLY, and INTEGER */
7188 static void
7189 init_base_vars (void)
7191 struct constraint_expr lhs, rhs;
7192 varinfo_t var_anything;
7193 varinfo_t var_nothing;
7194 varinfo_t var_string;
7195 varinfo_t var_escaped;
7196 varinfo_t var_nonlocal;
7197 varinfo_t var_storedanything;
7198 varinfo_t var_integer;
7200 /* Variable ID zero is reserved and should be NULL. */
7201 varmap.safe_push (NULL);
7203 /* Create the NULL variable, used to represent that a variable points
7204 to NULL. */
7205 var_nothing = new_var_info (NULL_TREE, "NULL", false);
7206 gcc_assert (var_nothing->id == nothing_id);
7207 var_nothing->is_artificial_var = 1;
7208 var_nothing->offset = 0;
7209 var_nothing->size = ~0;
7210 var_nothing->fullsize = ~0;
7211 var_nothing->is_special_var = 1;
7212 var_nothing->may_have_pointers = 0;
7213 var_nothing->is_global_var = 0;
7215 /* Create the ANYTHING variable, used to represent that a variable
7216 points to some unknown piece of memory. */
7217 var_anything = new_var_info (NULL_TREE, "ANYTHING", false);
7218 gcc_assert (var_anything->id == anything_id);
7219 var_anything->is_artificial_var = 1;
7220 var_anything->size = ~0;
7221 var_anything->offset = 0;
7222 var_anything->fullsize = ~0;
7223 var_anything->is_special_var = 1;
7225 /* Anything points to anything. This makes deref constraints just
7226 work in the presence of linked list and other p = *p type loops,
7227 by saying that *ANYTHING = ANYTHING. */
7228 lhs.type = SCALAR;
7229 lhs.var = anything_id;
7230 lhs.offset = 0;
7231 rhs.type = ADDRESSOF;
7232 rhs.var = anything_id;
7233 rhs.offset = 0;
7235 /* This specifically does not use process_constraint because
7236 process_constraint ignores all anything = anything constraints, since all
7237 but this one are redundant. */
7238 constraints.safe_push (new_constraint (lhs, rhs));
7240 /* Create the STRING variable, used to represent that a variable
7241 points to a string literal. String literals don't contain
7242 pointers so STRING doesn't point to anything. */
7243 var_string = new_var_info (NULL_TREE, "STRING", false);
7244 gcc_assert (var_string->id == string_id);
7245 var_string->is_artificial_var = 1;
7246 var_string->offset = 0;
7247 var_string->size = ~0;
7248 var_string->fullsize = ~0;
7249 var_string->is_special_var = 1;
7250 var_string->may_have_pointers = 0;
7252 /* Create the ESCAPED variable, used to represent the set of escaped
7253 memory. */
7254 var_escaped = new_var_info (NULL_TREE, "ESCAPED", false);
7255 gcc_assert (var_escaped->id == escaped_id);
7256 var_escaped->is_artificial_var = 1;
7257 var_escaped->offset = 0;
7258 var_escaped->size = ~0;
7259 var_escaped->fullsize = ~0;
7260 var_escaped->is_special_var = 0;
7262 /* Create the NONLOCAL variable, used to represent the set of nonlocal
7263 memory. */
7264 var_nonlocal = new_var_info (NULL_TREE, "NONLOCAL", false);
7265 gcc_assert (var_nonlocal->id == nonlocal_id);
7266 var_nonlocal->is_artificial_var = 1;
7267 var_nonlocal->offset = 0;
7268 var_nonlocal->size = ~0;
7269 var_nonlocal->fullsize = ~0;
7270 var_nonlocal->is_special_var = 1;
7272 /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
7273 lhs.type = SCALAR;
7274 lhs.var = escaped_id;
7275 lhs.offset = 0;
7276 rhs.type = DEREF;
7277 rhs.var = escaped_id;
7278 rhs.offset = 0;
7279 process_constraint (new_constraint (lhs, rhs));
7281 /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the
7282 whole variable escapes. */
7283 lhs.type = SCALAR;
7284 lhs.var = escaped_id;
7285 lhs.offset = 0;
7286 rhs.type = SCALAR;
7287 rhs.var = escaped_id;
7288 rhs.offset = UNKNOWN_OFFSET;
7289 process_constraint (new_constraint (lhs, rhs));
7291 /* *ESCAPED = NONLOCAL. This is true because we have to assume
7292 everything pointed to by escaped points to what global memory can
7293 point to. */
7294 lhs.type = DEREF;
7295 lhs.var = escaped_id;
7296 lhs.offset = 0;
7297 rhs.type = SCALAR;
7298 rhs.var = nonlocal_id;
7299 rhs.offset = 0;
7300 process_constraint (new_constraint (lhs, rhs));
7302 /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED. This is true because
7303 global memory may point to global memory and escaped memory. */
7304 lhs.type = SCALAR;
7305 lhs.var = nonlocal_id;
7306 lhs.offset = 0;
7307 rhs.type = ADDRESSOF;
7308 rhs.var = nonlocal_id;
7309 rhs.offset = 0;
7310 process_constraint (new_constraint (lhs, rhs));
7311 rhs.type = ADDRESSOF;
7312 rhs.var = escaped_id;
7313 rhs.offset = 0;
7314 process_constraint (new_constraint (lhs, rhs));
7316 /* Create the STOREDANYTHING variable, used to represent the set of
7317 variables stored to *ANYTHING. */
7318 var_storedanything = new_var_info (NULL_TREE, "STOREDANYTHING", false);
7319 gcc_assert (var_storedanything->id == storedanything_id);
7320 var_storedanything->is_artificial_var = 1;
7321 var_storedanything->offset = 0;
7322 var_storedanything->size = ~0;
7323 var_storedanything->fullsize = ~0;
7324 var_storedanything->is_special_var = 0;
7326 /* Create the INTEGER variable, used to represent that a variable points
7327 to what an INTEGER "points to". */
7328 var_integer = new_var_info (NULL_TREE, "INTEGER", false);
7329 gcc_assert (var_integer->id == integer_id);
7330 var_integer->is_artificial_var = 1;
7331 var_integer->size = ~0;
7332 var_integer->fullsize = ~0;
7333 var_integer->offset = 0;
7334 var_integer->is_special_var = 1;
7336 /* INTEGER = ANYTHING, because we don't know where a dereference of
7337 a random integer will point to. */
7338 lhs.type = SCALAR;
7339 lhs.var = integer_id;
7340 lhs.offset = 0;
7341 rhs.type = ADDRESSOF;
7342 rhs.var = anything_id;
7343 rhs.offset = 0;
7344 process_constraint (new_constraint (lhs, rhs));
7347 /* Initialize things necessary to perform PTA */
7349 static void
7350 init_alias_vars (void)
7352 use_field_sensitive = (param_max_fields_for_field_sensitive > 1);
7354 bitmap_obstack_initialize (&pta_obstack);
7355 bitmap_obstack_initialize (&oldpta_obstack);
7356 bitmap_obstack_initialize (&predbitmap_obstack);
7358 constraints.create (8);
7359 varmap.create (8);
7360 vi_for_tree = new hash_map<tree, varinfo_t>;
7361 call_stmt_vars = new hash_map<gimple *, varinfo_t>;
7363 memset (&stats, 0, sizeof (stats));
7364 shared_bitmap_table = new hash_table<shared_bitmap_hasher> (511);
7365 init_base_vars ();
7367 gcc_obstack_init (&fake_var_decl_obstack);
7369 final_solutions = new hash_map<varinfo_t, pt_solution *>;
7370 gcc_obstack_init (&final_solutions_obstack);
7373 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
7374 predecessor edges. */
7376 static void
7377 remove_preds_and_fake_succs (constraint_graph_t graph)
7379 unsigned int i;
7381 /* Clear the implicit ref and address nodes from the successor
7382 lists. */
7383 for (i = 1; i < FIRST_REF_NODE; i++)
7385 if (graph->succs[i])
7386 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
7387 FIRST_REF_NODE * 2);
7390 /* Free the successor list for the non-ref nodes. */
7391 for (i = FIRST_REF_NODE + 1; i < graph->size; i++)
7393 if (graph->succs[i])
7394 BITMAP_FREE (graph->succs[i]);
7397 /* Now reallocate the size of the successor list as, and blow away
7398 the predecessor bitmaps. */
7399 graph->size = varmap.length ();
7400 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
7402 free (graph->implicit_preds);
7403 graph->implicit_preds = NULL;
7404 free (graph->preds);
7405 graph->preds = NULL;
7406 bitmap_obstack_release (&predbitmap_obstack);
7409 /* Solve the constraint set. */
7411 static void
7412 solve_constraints (void)
7414 class scc_info *si;
7416 /* Sort varinfos so that ones that cannot be pointed to are last.
7417 This makes bitmaps more efficient. */
7418 unsigned int *map = XNEWVEC (unsigned int, varmap.length ());
7419 for (unsigned i = 0; i < integer_id + 1; ++i)
7420 map[i] = i;
7421 /* Start with address-taken vars, followed by not address-taken vars
7422 to move vars never appearing in the points-to solution bitmaps last. */
7423 unsigned j = integer_id + 1;
7424 for (unsigned i = integer_id + 1; i < varmap.length (); ++i)
7425 if (varmap[varmap[i]->head]->address_taken)
7426 map[i] = j++;
7427 for (unsigned i = integer_id + 1; i < varmap.length (); ++i)
7428 if (! varmap[varmap[i]->head]->address_taken)
7429 map[i] = j++;
7430 /* Shuffle varmap according to map. */
7431 for (unsigned i = integer_id + 1; i < varmap.length (); ++i)
7433 while (map[varmap[i]->id] != i)
7434 std::swap (varmap[i], varmap[map[varmap[i]->id]]);
7435 gcc_assert (bitmap_empty_p (varmap[i]->solution));
7436 varmap[i]->id = i;
7437 varmap[i]->next = map[varmap[i]->next];
7438 varmap[i]->head = map[varmap[i]->head];
7440 /* Finally rewrite constraints. */
7441 for (unsigned i = 0; i < constraints.length (); ++i)
7443 constraints[i]->lhs.var = map[constraints[i]->lhs.var];
7444 constraints[i]->rhs.var = map[constraints[i]->rhs.var];
7446 free (map);
7448 if (dump_file)
7449 fprintf (dump_file,
7450 "\nCollapsing static cycles and doing variable "
7451 "substitution\n");
7453 init_graph (varmap.length () * 2);
7455 if (dump_file)
7456 fprintf (dump_file, "Building predecessor graph\n");
7457 build_pred_graph ();
7459 if (dump_file)
7460 fprintf (dump_file, "Detecting pointer and location "
7461 "equivalences\n");
7462 si = perform_var_substitution (graph);
7464 if (dump_file)
7465 fprintf (dump_file, "Rewriting constraints and unifying "
7466 "variables\n");
7467 rewrite_constraints (graph, si);
7469 build_succ_graph ();
7471 free_var_substitution_info (si);
7473 /* Attach complex constraints to graph nodes. */
7474 move_complex_constraints (graph);
7476 if (dump_file)
7477 fprintf (dump_file, "Uniting pointer but not location equivalent "
7478 "variables\n");
7479 unite_pointer_equivalences (graph);
7481 if (dump_file)
7482 fprintf (dump_file, "Finding indirect cycles\n");
7483 find_indirect_cycles (graph);
7485 /* Implicit nodes and predecessors are no longer necessary at this
7486 point. */
7487 remove_preds_and_fake_succs (graph);
7489 if (dump_file && (dump_flags & TDF_GRAPH))
7491 fprintf (dump_file, "\n\n// The constraint graph before solve-graph "
7492 "in dot format:\n");
7493 dump_constraint_graph (dump_file);
7494 fprintf (dump_file, "\n\n");
7497 if (dump_file)
7498 fprintf (dump_file, "Solving graph\n");
7500 solve_graph (graph);
7502 if (dump_file && (dump_flags & TDF_GRAPH))
7504 fprintf (dump_file, "\n\n// The constraint graph after solve-graph "
7505 "in dot format:\n");
7506 dump_constraint_graph (dump_file);
7507 fprintf (dump_file, "\n\n");
7511 /* Create points-to sets for the current function. See the comments
7512 at the start of the file for an algorithmic overview. */
7514 static void
7515 compute_points_to_sets (void)
7517 basic_block bb;
7518 varinfo_t vi;
7520 timevar_push (TV_TREE_PTA);
7522 init_alias_vars ();
7524 intra_create_variable_infos (cfun);
7526 /* Now walk all statements and build the constraint set. */
7527 FOR_EACH_BB_FN (bb, cfun)
7529 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
7530 gsi_next (&gsi))
7532 gphi *phi = gsi.phi ();
7534 if (! virtual_operand_p (gimple_phi_result (phi)))
7535 find_func_aliases (cfun, phi);
7538 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
7539 gsi_next (&gsi))
7541 gimple *stmt = gsi_stmt (gsi);
7543 find_func_aliases (cfun, stmt);
7547 if (dump_file)
7549 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
7550 dump_constraints (dump_file, 0);
7553 /* From the constraints compute the points-to sets. */
7554 solve_constraints ();
7556 /* Post-process solutions for escapes through returns. */
7557 edge_iterator ei;
7558 edge e;
7559 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
7560 if (greturn *ret = safe_dyn_cast <greturn *> (last_stmt (e->src)))
7562 tree val = gimple_return_retval (ret);
7563 /* ??? Easy to handle simple indirections with some work.
7564 Arbitrary references like foo.bar.baz are more difficult
7565 (but conservatively easy enough with just looking at the base).
7566 Mind to fixup find_func_aliases as well. */
7567 if (!val || !SSA_VAR_P (val))
7568 continue;
7569 /* returns happen last in non-IPA so they only influence
7570 the ESCAPED solution and we can filter local variables. */
7571 varinfo_t escaped_vi = get_varinfo (find (escaped_id));
7572 varinfo_t vi = lookup_vi_for_tree (val);
7573 bitmap delta = BITMAP_ALLOC (&pta_obstack);
7574 bitmap_iterator bi;
7575 unsigned i;
7576 for (; vi; vi = vi_next (vi))
7578 varinfo_t part_vi = get_varinfo (find (vi->id));
7579 EXECUTE_IF_AND_COMPL_IN_BITMAP (part_vi->solution,
7580 escaped_vi->solution, 0, i, bi)
7582 varinfo_t pointed_to_vi = get_varinfo (i);
7583 if (pointed_to_vi->is_global_var
7584 /* We delay marking of heap memory as global. */
7585 || pointed_to_vi->is_heap_var)
7586 bitmap_set_bit (delta, i);
7590 /* Now compute the transitive closure. */
7591 bitmap_ior_into (escaped_vi->solution, delta);
7592 bitmap new_delta = BITMAP_ALLOC (&pta_obstack);
7593 while (!bitmap_empty_p (delta))
7595 EXECUTE_IF_SET_IN_BITMAP (delta, 0, i, bi)
7597 varinfo_t pointed_to_vi = get_varinfo (i);
7598 pointed_to_vi = get_varinfo (find (pointed_to_vi->id));
7599 unsigned j;
7600 bitmap_iterator bi2;
7601 EXECUTE_IF_AND_COMPL_IN_BITMAP (pointed_to_vi->solution,
7602 escaped_vi->solution,
7603 0, j, bi2)
7605 varinfo_t pointed_to_vi2 = get_varinfo (j);
7606 if (pointed_to_vi2->is_global_var
7607 /* We delay marking of heap memory as global. */
7608 || pointed_to_vi2->is_heap_var)
7609 bitmap_set_bit (new_delta, j);
7612 bitmap_ior_into (escaped_vi->solution, new_delta);
7613 bitmap_clear (delta);
7614 std::swap (delta, new_delta);
7616 BITMAP_FREE (delta);
7617 BITMAP_FREE (new_delta);
7620 if (dump_file)
7621 dump_sa_points_to_info (dump_file);
7623 /* Compute the points-to set for ESCAPED used for call-clobber analysis. */
7624 cfun->gimple_df->escaped = find_what_var_points_to (cfun->decl,
7625 get_varinfo (escaped_id));
7627 /* Make sure the ESCAPED solution (which is used as placeholder in
7628 other solutions) does not reference itself. This simplifies
7629 points-to solution queries. */
7630 cfun->gimple_df->escaped.escaped = 0;
7632 /* Compute the points-to sets for pointer SSA_NAMEs. */
7633 unsigned i;
7634 tree ptr;
7636 FOR_EACH_SSA_NAME (i, ptr, cfun)
7638 if (POINTER_TYPE_P (TREE_TYPE (ptr)))
7639 find_what_p_points_to (cfun->decl, ptr);
7642 /* Compute the call-used/clobbered sets. */
7643 FOR_EACH_BB_FN (bb, cfun)
7645 gimple_stmt_iterator gsi;
7647 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
7649 gcall *stmt;
7650 struct pt_solution *pt;
7652 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
7653 if (!stmt)
7654 continue;
7656 pt = gimple_call_use_set (stmt);
7657 if (gimple_call_flags (stmt) & ECF_CONST)
7658 memset (pt, 0, sizeof (struct pt_solution));
7659 else
7661 bool uses_global_memory = true;
7662 bool reads_global_memory = true;
7664 determine_global_memory_access (stmt, NULL,
7665 &reads_global_memory,
7666 &uses_global_memory);
7667 if ((vi = lookup_call_use_vi (stmt)) != NULL)
7669 *pt = find_what_var_points_to (cfun->decl, vi);
7670 /* Escaped (and thus nonlocal) variables are always
7671 implicitly used by calls. */
7672 /* ??? ESCAPED can be empty even though NONLOCAL
7673 always escaped. */
7674 if (uses_global_memory)
7676 pt->nonlocal = 1;
7677 pt->escaped = 1;
7680 else if (uses_global_memory)
7682 /* If there is nothing special about this call then
7683 we have made everything that is used also escape. */
7684 *pt = cfun->gimple_df->escaped;
7685 pt->nonlocal = 1;
7687 else
7688 memset (pt, 0, sizeof (struct pt_solution));
7691 pt = gimple_call_clobber_set (stmt);
7692 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
7693 memset (pt, 0, sizeof (struct pt_solution));
7694 else
7696 bool writes_global_memory = true;
7698 determine_global_memory_access (stmt, &writes_global_memory,
7699 NULL, NULL);
7701 if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
7703 *pt = find_what_var_points_to (cfun->decl, vi);
7704 /* Escaped (and thus nonlocal) variables are always
7705 implicitly clobbered by calls. */
7706 /* ??? ESCAPED can be empty even though NONLOCAL
7707 always escaped. */
7708 if (writes_global_memory)
7710 pt->nonlocal = 1;
7711 pt->escaped = 1;
7714 else if (writes_global_memory)
7716 /* If there is nothing special about this call then
7717 we have made everything that is used also escape. */
7718 *pt = cfun->gimple_df->escaped;
7719 pt->nonlocal = 1;
7721 else
7722 memset (pt, 0, sizeof (struct pt_solution));
7727 timevar_pop (TV_TREE_PTA);
7731 /* Delete created points-to sets. */
7733 static void
7734 delete_points_to_sets (void)
7736 unsigned int i;
7738 delete shared_bitmap_table;
7739 shared_bitmap_table = NULL;
7740 if (dump_file && (dump_flags & TDF_STATS))
7741 fprintf (dump_file, "Points to sets created:%d\n",
7742 stats.points_to_sets_created);
7744 delete vi_for_tree;
7745 delete call_stmt_vars;
7746 bitmap_obstack_release (&pta_obstack);
7747 constraints.release ();
7749 for (i = 0; i < graph->size; i++)
7750 graph->complex[i].release ();
7751 free (graph->complex);
7753 free (graph->rep);
7754 free (graph->succs);
7755 free (graph->pe);
7756 free (graph->pe_rep);
7757 free (graph->indirect_cycles);
7758 free (graph);
7760 varmap.release ();
7761 variable_info_pool.release ();
7762 constraint_pool.release ();
7764 obstack_free (&fake_var_decl_obstack, NULL);
7766 delete final_solutions;
7767 obstack_free (&final_solutions_obstack, NULL);
7770 struct vls_data
7772 unsigned short clique;
7773 bool escaped_p;
7774 bitmap rvars;
7777 /* Mark "other" loads and stores as belonging to CLIQUE and with
7778 base zero. */
7780 static bool
7781 visit_loadstore (gimple *, tree base, tree ref, void *data)
7783 unsigned short clique = ((vls_data *) data)->clique;
7784 bitmap rvars = ((vls_data *) data)->rvars;
7785 bool escaped_p = ((vls_data *) data)->escaped_p;
7786 if (TREE_CODE (base) == MEM_REF
7787 || TREE_CODE (base) == TARGET_MEM_REF)
7789 tree ptr = TREE_OPERAND (base, 0);
7790 if (TREE_CODE (ptr) == SSA_NAME)
7792 /* For parameters, get at the points-to set for the actual parm
7793 decl. */
7794 if (SSA_NAME_IS_DEFAULT_DEF (ptr)
7795 && (TREE_CODE (SSA_NAME_VAR (ptr)) == PARM_DECL
7796 || TREE_CODE (SSA_NAME_VAR (ptr)) == RESULT_DECL))
7797 ptr = SSA_NAME_VAR (ptr);
7799 /* We need to make sure 'ptr' doesn't include any of
7800 the restrict tags we added bases for in its points-to set. */
7801 varinfo_t vi = lookup_vi_for_tree (ptr);
7802 if (! vi)
7803 return false;
7805 vi = get_varinfo (find (vi->id));
7806 if (bitmap_intersect_p (rvars, vi->solution)
7807 || (escaped_p && bitmap_bit_p (vi->solution, escaped_id)))
7808 return false;
7811 /* Do not overwrite existing cliques (that includes clique, base
7812 pairs we just set). */
7813 if (MR_DEPENDENCE_CLIQUE (base) == 0)
7815 MR_DEPENDENCE_CLIQUE (base) = clique;
7816 MR_DEPENDENCE_BASE (base) = 0;
7820 /* For plain decl accesses see whether they are accesses to globals
7821 and rewrite them to MEM_REFs with { clique, 0 }. */
7822 if (VAR_P (base)
7823 && is_global_var (base)
7824 /* ??? We can't rewrite a plain decl with the walk_stmt_load_store
7825 ops callback. */
7826 && base != ref)
7828 tree *basep = &ref;
7829 while (handled_component_p (*basep))
7830 basep = &TREE_OPERAND (*basep, 0);
7831 gcc_assert (VAR_P (*basep));
7832 tree ptr = build_fold_addr_expr (*basep);
7833 tree zero = build_int_cst (TREE_TYPE (ptr), 0);
7834 *basep = build2 (MEM_REF, TREE_TYPE (*basep), ptr, zero);
7835 MR_DEPENDENCE_CLIQUE (*basep) = clique;
7836 MR_DEPENDENCE_BASE (*basep) = 0;
7839 return false;
7842 struct msdi_data {
7843 tree ptr;
7844 unsigned short *clique;
7845 unsigned short *last_ruid;
7846 varinfo_t restrict_var;
7849 /* If BASE is a MEM_REF then assign a clique, base pair to it, updating
7850 CLIQUE, *RESTRICT_VAR and LAST_RUID as passed via DATA.
7851 Return whether dependence info was assigned to BASE. */
7853 static bool
7854 maybe_set_dependence_info (gimple *, tree base, tree, void *data)
7856 tree ptr = ((msdi_data *)data)->ptr;
7857 unsigned short &clique = *((msdi_data *)data)->clique;
7858 unsigned short &last_ruid = *((msdi_data *)data)->last_ruid;
7859 varinfo_t restrict_var = ((msdi_data *)data)->restrict_var;
7860 if ((TREE_CODE (base) == MEM_REF
7861 || TREE_CODE (base) == TARGET_MEM_REF)
7862 && TREE_OPERAND (base, 0) == ptr)
7864 /* Do not overwrite existing cliques. This avoids overwriting dependence
7865 info inlined from a function with restrict parameters inlined
7866 into a function with restrict parameters. This usually means we
7867 prefer to be precise in innermost loops. */
7868 if (MR_DEPENDENCE_CLIQUE (base) == 0)
7870 if (clique == 0)
7872 if (cfun->last_clique == 0)
7873 cfun->last_clique = 1;
7874 clique = 1;
7876 if (restrict_var->ruid == 0)
7877 restrict_var->ruid = ++last_ruid;
7878 MR_DEPENDENCE_CLIQUE (base) = clique;
7879 MR_DEPENDENCE_BASE (base) = restrict_var->ruid;
7880 return true;
7883 return false;
7886 /* Clear dependence info for the clique DATA. */
7888 static bool
7889 clear_dependence_clique (gimple *, tree base, tree, void *data)
7891 unsigned short clique = (uintptr_t)data;
7892 if ((TREE_CODE (base) == MEM_REF
7893 || TREE_CODE (base) == TARGET_MEM_REF)
7894 && MR_DEPENDENCE_CLIQUE (base) == clique)
7896 MR_DEPENDENCE_CLIQUE (base) = 0;
7897 MR_DEPENDENCE_BASE (base) = 0;
7900 return false;
7903 /* Compute the set of independend memory references based on restrict
7904 tags and their conservative propagation to the points-to sets. */
7906 static void
7907 compute_dependence_clique (void)
7909 /* First clear the special "local" clique. */
7910 basic_block bb;
7911 if (cfun->last_clique != 0)
7912 FOR_EACH_BB_FN (bb, cfun)
7913 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
7914 !gsi_end_p (gsi); gsi_next (&gsi))
7916 gimple *stmt = gsi_stmt (gsi);
7917 walk_stmt_load_store_ops (stmt, (void *)(uintptr_t) 1,
7918 clear_dependence_clique,
7919 clear_dependence_clique);
7922 unsigned short clique = 0;
7923 unsigned short last_ruid = 0;
7924 bitmap rvars = BITMAP_ALLOC (NULL);
7925 bool escaped_p = false;
7926 for (unsigned i = 0; i < num_ssa_names; ++i)
7928 tree ptr = ssa_name (i);
7929 if (!ptr || !POINTER_TYPE_P (TREE_TYPE (ptr)))
7930 continue;
7932 /* Avoid all this when ptr is not dereferenced? */
7933 tree p = ptr;
7934 if (SSA_NAME_IS_DEFAULT_DEF (ptr)
7935 && (TREE_CODE (SSA_NAME_VAR (ptr)) == PARM_DECL
7936 || TREE_CODE (SSA_NAME_VAR (ptr)) == RESULT_DECL))
7937 p = SSA_NAME_VAR (ptr);
7938 varinfo_t vi = lookup_vi_for_tree (p);
7939 if (!vi)
7940 continue;
7941 vi = get_varinfo (find (vi->id));
7942 bitmap_iterator bi;
7943 unsigned j;
7944 varinfo_t restrict_var = NULL;
7945 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, j, bi)
7947 varinfo_t oi = get_varinfo (j);
7948 if (oi->head != j)
7949 oi = get_varinfo (oi->head);
7950 if (oi->is_restrict_var)
7952 if (restrict_var
7953 && restrict_var != oi)
7955 if (dump_file && (dump_flags & TDF_DETAILS))
7957 fprintf (dump_file, "found restrict pointed-to "
7958 "for ");
7959 print_generic_expr (dump_file, ptr);
7960 fprintf (dump_file, " but not exclusively\n");
7962 restrict_var = NULL;
7963 break;
7965 restrict_var = oi;
7967 /* NULL is the only other valid points-to entry. */
7968 else if (oi->id != nothing_id)
7970 restrict_var = NULL;
7971 break;
7974 /* Ok, found that ptr must(!) point to a single(!) restrict
7975 variable. */
7976 /* ??? PTA isn't really a proper propagation engine to compute
7977 this property.
7978 ??? We could handle merging of two restricts by unifying them. */
7979 if (restrict_var)
7981 /* Now look at possible dereferences of ptr. */
7982 imm_use_iterator ui;
7983 gimple *use_stmt;
7984 bool used = false;
7985 msdi_data data = { ptr, &clique, &last_ruid, restrict_var };
7986 FOR_EACH_IMM_USE_STMT (use_stmt, ui, ptr)
7987 used |= walk_stmt_load_store_ops (use_stmt, &data,
7988 maybe_set_dependence_info,
7989 maybe_set_dependence_info);
7990 if (used)
7992 /* Add all subvars to the set of restrict pointed-to set. */
7993 for (unsigned sv = restrict_var->head; sv != 0;
7994 sv = get_varinfo (sv)->next)
7995 bitmap_set_bit (rvars, sv);
7996 varinfo_t escaped = get_varinfo (find (escaped_id));
7997 if (bitmap_bit_p (escaped->solution, restrict_var->id))
7998 escaped_p = true;
8003 if (clique != 0)
8005 /* Assign the BASE id zero to all accesses not based on a restrict
8006 pointer. That way they get disambiguated against restrict
8007 accesses but not against each other. */
8008 /* ??? For restricts derived from globals (thus not incoming
8009 parameters) we can't restrict scoping properly thus the following
8010 is too aggressive there. For now we have excluded those globals from
8011 getting into the MR_DEPENDENCE machinery. */
8012 vls_data data = { clique, escaped_p, rvars };
8013 basic_block bb;
8014 FOR_EACH_BB_FN (bb, cfun)
8015 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
8016 !gsi_end_p (gsi); gsi_next (&gsi))
8018 gimple *stmt = gsi_stmt (gsi);
8019 walk_stmt_load_store_ops (stmt, &data,
8020 visit_loadstore, visit_loadstore);
8024 BITMAP_FREE (rvars);
8027 /* Compute points-to information for every SSA_NAME pointer in the
8028 current function and compute the transitive closure of escaped
8029 variables to re-initialize the call-clobber states of local variables. */
8031 unsigned int
8032 compute_may_aliases (void)
8034 if (cfun->gimple_df->ipa_pta)
8036 if (dump_file)
8038 fprintf (dump_file, "\nNot re-computing points-to information "
8039 "because IPA points-to information is available.\n\n");
8041 /* But still dump what we have remaining it. */
8042 dump_alias_info (dump_file);
8045 return 0;
8048 /* For each pointer P_i, determine the sets of variables that P_i may
8049 point-to. Compute the reachability set of escaped and call-used
8050 variables. */
8051 compute_points_to_sets ();
8053 /* Debugging dumps. */
8054 if (dump_file)
8055 dump_alias_info (dump_file);
8057 /* Compute restrict-based memory disambiguations. */
8058 compute_dependence_clique ();
8060 /* Deallocate memory used by aliasing data structures and the internal
8061 points-to solution. */
8062 delete_points_to_sets ();
8064 gcc_assert (!need_ssa_update_p (cfun));
8066 return 0;
8069 /* A dummy pass to cause points-to information to be computed via
8070 TODO_rebuild_alias. */
8072 namespace {
8074 const pass_data pass_data_build_alias =
8076 GIMPLE_PASS, /* type */
8077 "alias", /* name */
8078 OPTGROUP_NONE, /* optinfo_flags */
8079 TV_NONE, /* tv_id */
8080 ( PROP_cfg | PROP_ssa ), /* properties_required */
8081 0, /* properties_provided */
8082 0, /* properties_destroyed */
8083 0, /* todo_flags_start */
8084 TODO_rebuild_alias, /* todo_flags_finish */
8087 class pass_build_alias : public gimple_opt_pass
8089 public:
8090 pass_build_alias (gcc::context *ctxt)
8091 : gimple_opt_pass (pass_data_build_alias, ctxt)
8094 /* opt_pass methods: */
8095 bool gate (function *) final override { return flag_tree_pta; }
8097 }; // class pass_build_alias
8099 } // anon namespace
8101 gimple_opt_pass *
8102 make_pass_build_alias (gcc::context *ctxt)
8104 return new pass_build_alias (ctxt);
8107 /* A dummy pass to cause points-to information to be computed via
8108 TODO_rebuild_alias. */
8110 namespace {
8112 const pass_data pass_data_build_ealias =
8114 GIMPLE_PASS, /* type */
8115 "ealias", /* name */
8116 OPTGROUP_NONE, /* optinfo_flags */
8117 TV_NONE, /* tv_id */
8118 ( PROP_cfg | PROP_ssa ), /* properties_required */
8119 0, /* properties_provided */
8120 0, /* properties_destroyed */
8121 0, /* todo_flags_start */
8122 TODO_rebuild_alias, /* todo_flags_finish */
8125 class pass_build_ealias : public gimple_opt_pass
8127 public:
8128 pass_build_ealias (gcc::context *ctxt)
8129 : gimple_opt_pass (pass_data_build_ealias, ctxt)
8132 /* opt_pass methods: */
8133 bool gate (function *) final override { return flag_tree_pta; }
8135 }; // class pass_build_ealias
8137 } // anon namespace
8139 gimple_opt_pass *
8140 make_pass_build_ealias (gcc::context *ctxt)
8142 return new pass_build_ealias (ctxt);
8146 /* IPA PTA solutions for ESCAPED. */
8147 struct pt_solution ipa_escaped_pt
8148 = { true, false, false, false, false,
8149 false, false, false, false, false, NULL };
8151 /* Associate node with varinfo DATA. Worker for
8152 cgraph_for_symbol_thunks_and_aliases. */
8153 static bool
8154 associate_varinfo_to_alias (struct cgraph_node *node, void *data)
8156 if ((node->alias
8157 || (node->thunk
8158 && ! node->inlined_to))
8159 && node->analyzed
8160 && !node->ifunc_resolver)
8161 insert_vi_for_tree (node->decl, (varinfo_t)data);
8162 return false;
8165 /* Dump varinfo VI to FILE. */
8167 static void
8168 dump_varinfo (FILE *file, varinfo_t vi)
8170 if (vi == NULL)
8171 return;
8173 fprintf (file, "%u: %s\n", vi->id, vi->name);
8175 const char *sep = " ";
8176 if (vi->is_artificial_var)
8177 fprintf (file, "%sartificial", sep);
8178 if (vi->is_special_var)
8179 fprintf (file, "%sspecial", sep);
8180 if (vi->is_unknown_size_var)
8181 fprintf (file, "%sunknown-size", sep);
8182 if (vi->is_full_var)
8183 fprintf (file, "%sfull", sep);
8184 if (vi->is_heap_var)
8185 fprintf (file, "%sheap", sep);
8186 if (vi->may_have_pointers)
8187 fprintf (file, "%smay-have-pointers", sep);
8188 if (vi->only_restrict_pointers)
8189 fprintf (file, "%sonly-restrict-pointers", sep);
8190 if (vi->is_restrict_var)
8191 fprintf (file, "%sis-restrict-var", sep);
8192 if (vi->is_global_var)
8193 fprintf (file, "%sglobal", sep);
8194 if (vi->is_ipa_escape_point)
8195 fprintf (file, "%sipa-escape-point", sep);
8196 if (vi->is_fn_info)
8197 fprintf (file, "%sfn-info", sep);
8198 if (vi->ruid)
8199 fprintf (file, "%srestrict-uid:%u", sep, vi->ruid);
8200 if (vi->next)
8201 fprintf (file, "%snext:%u", sep, vi->next);
8202 if (vi->head != vi->id)
8203 fprintf (file, "%shead:%u", sep, vi->head);
8204 if (vi->offset)
8205 fprintf (file, "%soffset:" HOST_WIDE_INT_PRINT_DEC, sep, vi->offset);
8206 if (vi->size != ~(unsigned HOST_WIDE_INT)0)
8207 fprintf (file, "%ssize:" HOST_WIDE_INT_PRINT_DEC, sep, vi->size);
8208 if (vi->fullsize != ~(unsigned HOST_WIDE_INT)0
8209 && vi->fullsize != vi->size)
8210 fprintf (file, "%sfullsize:" HOST_WIDE_INT_PRINT_DEC, sep,
8211 vi->fullsize);
8212 fprintf (file, "\n");
8214 if (vi->solution && !bitmap_empty_p (vi->solution))
8216 bitmap_iterator bi;
8217 unsigned i;
8218 fprintf (file, " solution: {");
8219 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
8220 fprintf (file, " %u", i);
8221 fprintf (file, " }\n");
8224 if (vi->oldsolution && !bitmap_empty_p (vi->oldsolution)
8225 && !bitmap_equal_p (vi->solution, vi->oldsolution))
8227 bitmap_iterator bi;
8228 unsigned i;
8229 fprintf (file, " oldsolution: {");
8230 EXECUTE_IF_SET_IN_BITMAP (vi->oldsolution, 0, i, bi)
8231 fprintf (file, " %u", i);
8232 fprintf (file, " }\n");
8236 /* Dump varinfo VI to stderr. */
8238 DEBUG_FUNCTION void
8239 debug_varinfo (varinfo_t vi)
8241 dump_varinfo (stderr, vi);
8244 /* Dump varmap to FILE. */
8246 static void
8247 dump_varmap (FILE *file)
8249 if (varmap.length () == 0)
8250 return;
8252 fprintf (file, "variables:\n");
8254 for (unsigned int i = 0; i < varmap.length (); ++i)
8256 varinfo_t vi = get_varinfo (i);
8257 dump_varinfo (file, vi);
8260 fprintf (file, "\n");
8263 /* Dump varmap to stderr. */
8265 DEBUG_FUNCTION void
8266 debug_varmap (void)
8268 dump_varmap (stderr);
8271 /* Compute whether node is refered to non-locally. Worker for
8272 cgraph_for_symbol_thunks_and_aliases. */
8273 static bool
8274 refered_from_nonlocal_fn (struct cgraph_node *node, void *data)
8276 bool *nonlocal_p = (bool *)data;
8277 *nonlocal_p |= (node->used_from_other_partition
8278 || DECL_EXTERNAL (node->decl)
8279 || TREE_PUBLIC (node->decl)
8280 || node->force_output
8281 || lookup_attribute ("noipa", DECL_ATTRIBUTES (node->decl)));
8282 return false;
8285 /* Same for varpool nodes. */
8286 static bool
8287 refered_from_nonlocal_var (struct varpool_node *node, void *data)
8289 bool *nonlocal_p = (bool *)data;
8290 *nonlocal_p |= (node->used_from_other_partition
8291 || DECL_EXTERNAL (node->decl)
8292 || TREE_PUBLIC (node->decl)
8293 || node->force_output);
8294 return false;
8297 /* Execute the driver for IPA PTA. */
8298 static unsigned int
8299 ipa_pta_execute (void)
8301 struct cgraph_node *node;
8302 varpool_node *var;
8303 unsigned int from = 0;
8305 in_ipa_mode = 1;
8307 init_alias_vars ();
8309 if (dump_file && (dump_flags & TDF_DETAILS))
8311 symtab->dump (dump_file);
8312 fprintf (dump_file, "\n");
8315 if (dump_file)
8317 fprintf (dump_file, "Generating generic constraints\n\n");
8318 dump_constraints (dump_file, from);
8319 fprintf (dump_file, "\n");
8320 from = constraints.length ();
8323 /* Build the constraints. */
8324 FOR_EACH_DEFINED_FUNCTION (node)
8326 varinfo_t vi;
8327 /* Nodes without a body in this partition are not interesting.
8328 Especially do not visit clones at this point for now - we
8329 get duplicate decls there for inline clones at least. */
8330 if (!node->has_gimple_body_p ()
8331 || node->in_other_partition
8332 || node->inlined_to)
8333 continue;
8334 node->get_body ();
8336 gcc_assert (!node->clone_of);
8338 /* For externally visible or attribute used annotated functions use
8339 local constraints for their arguments.
8340 For local functions we see all callers and thus do not need initial
8341 constraints for parameters. */
8342 bool nonlocal_p = (node->used_from_other_partition
8343 || DECL_EXTERNAL (node->decl)
8344 || TREE_PUBLIC (node->decl)
8345 || node->force_output
8346 || lookup_attribute ("noipa",
8347 DECL_ATTRIBUTES (node->decl)));
8348 node->call_for_symbol_thunks_and_aliases (refered_from_nonlocal_fn,
8349 &nonlocal_p, true);
8351 vi = create_function_info_for (node->decl,
8352 alias_get_name (node->decl), false,
8353 nonlocal_p);
8354 if (dump_file
8355 && from != constraints.length ())
8357 fprintf (dump_file,
8358 "Generating initial constraints for %s",
8359 node->dump_name ());
8360 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
8361 fprintf (dump_file, " (%s)",
8362 IDENTIFIER_POINTER
8363 (DECL_ASSEMBLER_NAME (node->decl)));
8364 fprintf (dump_file, "\n\n");
8365 dump_constraints (dump_file, from);
8366 fprintf (dump_file, "\n");
8368 from = constraints.length ();
8371 node->call_for_symbol_thunks_and_aliases
8372 (associate_varinfo_to_alias, vi, true);
8375 /* Create constraints for global variables and their initializers. */
8376 FOR_EACH_VARIABLE (var)
8378 if (var->alias && var->analyzed)
8379 continue;
8381 varinfo_t vi = get_vi_for_tree (var->decl);
8383 /* For the purpose of IPA PTA unit-local globals are not
8384 escape points. */
8385 bool nonlocal_p = (DECL_EXTERNAL (var->decl)
8386 || TREE_PUBLIC (var->decl)
8387 || var->used_from_other_partition
8388 || var->force_output);
8389 var->call_for_symbol_and_aliases (refered_from_nonlocal_var,
8390 &nonlocal_p, true);
8391 if (nonlocal_p)
8392 vi->is_ipa_escape_point = true;
8395 if (dump_file
8396 && from != constraints.length ())
8398 fprintf (dump_file,
8399 "Generating constraints for global initializers\n\n");
8400 dump_constraints (dump_file, from);
8401 fprintf (dump_file, "\n");
8402 from = constraints.length ();
8405 FOR_EACH_DEFINED_FUNCTION (node)
8407 struct function *func;
8408 basic_block bb;
8410 /* Nodes without a body in this partition are not interesting. */
8411 if (!node->has_gimple_body_p ()
8412 || node->in_other_partition
8413 || node->clone_of)
8414 continue;
8416 if (dump_file)
8418 fprintf (dump_file,
8419 "Generating constraints for %s", node->dump_name ());
8420 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
8421 fprintf (dump_file, " (%s)",
8422 IDENTIFIER_POINTER
8423 (DECL_ASSEMBLER_NAME (node->decl)));
8424 fprintf (dump_file, "\n");
8427 func = DECL_STRUCT_FUNCTION (node->decl);
8428 gcc_assert (cfun == NULL);
8430 /* Build constriants for the function body. */
8431 FOR_EACH_BB_FN (bb, func)
8433 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
8434 gsi_next (&gsi))
8436 gphi *phi = gsi.phi ();
8438 if (! virtual_operand_p (gimple_phi_result (phi)))
8439 find_func_aliases (func, phi);
8442 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
8443 gsi_next (&gsi))
8445 gimple *stmt = gsi_stmt (gsi);
8447 find_func_aliases (func, stmt);
8448 find_func_clobbers (func, stmt);
8452 if (dump_file)
8454 fprintf (dump_file, "\n");
8455 dump_constraints (dump_file, from);
8456 fprintf (dump_file, "\n");
8457 from = constraints.length ();
8461 /* From the constraints compute the points-to sets. */
8462 solve_constraints ();
8464 if (dump_file)
8465 dump_sa_points_to_info (dump_file);
8467 /* Now post-process solutions to handle locals from different
8468 runtime instantiations coming in through recursive invocations. */
8469 unsigned shadow_var_cnt = 0;
8470 for (unsigned i = 1; i < varmap.length (); ++i)
8472 varinfo_t fi = get_varinfo (i);
8473 if (fi->is_fn_info
8474 && fi->decl)
8475 /* Automatic variables pointed to by their containing functions
8476 parameters need this treatment. */
8477 for (varinfo_t ai = first_vi_for_offset (fi, fi_parm_base);
8478 ai; ai = vi_next (ai))
8480 varinfo_t vi = get_varinfo (find (ai->id));
8481 bitmap_iterator bi;
8482 unsigned j;
8483 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, j, bi)
8485 varinfo_t pt = get_varinfo (j);
8486 if (pt->shadow_var_uid == 0
8487 && pt->decl
8488 && auto_var_in_fn_p (pt->decl, fi->decl))
8490 pt->shadow_var_uid = allocate_decl_uid ();
8491 shadow_var_cnt++;
8495 /* As well as global variables which are another way of passing
8496 arguments to recursive invocations. */
8497 else if (fi->is_global_var)
8499 for (varinfo_t ai = fi; ai; ai = vi_next (ai))
8501 varinfo_t vi = get_varinfo (find (ai->id));
8502 bitmap_iterator bi;
8503 unsigned j;
8504 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, j, bi)
8506 varinfo_t pt = get_varinfo (j);
8507 if (pt->shadow_var_uid == 0
8508 && pt->decl
8509 && auto_var_p (pt->decl))
8511 pt->shadow_var_uid = allocate_decl_uid ();
8512 shadow_var_cnt++;
8518 if (shadow_var_cnt && dump_file && (dump_flags & TDF_DETAILS))
8519 fprintf (dump_file, "Allocated %u shadow variables for locals "
8520 "maybe leaking into recursive invocations of their containing "
8521 "functions\n", shadow_var_cnt);
8523 /* Compute the global points-to sets for ESCAPED.
8524 ??? Note that the computed escape set is not correct
8525 for the whole unit as we fail to consider graph edges to
8526 externally visible functions. */
8527 ipa_escaped_pt = find_what_var_points_to (NULL, get_varinfo (escaped_id));
8529 /* Make sure the ESCAPED solution (which is used as placeholder in
8530 other solutions) does not reference itself. This simplifies
8531 points-to solution queries. */
8532 ipa_escaped_pt.ipa_escaped = 0;
8534 /* Assign the points-to sets to the SSA names in the unit. */
8535 FOR_EACH_DEFINED_FUNCTION (node)
8537 tree ptr;
8538 struct function *fn;
8539 unsigned i;
8540 basic_block bb;
8542 /* Nodes without a body in this partition are not interesting. */
8543 if (!node->has_gimple_body_p ()
8544 || node->in_other_partition
8545 || node->clone_of)
8546 continue;
8548 fn = DECL_STRUCT_FUNCTION (node->decl);
8550 /* Compute the points-to sets for pointer SSA_NAMEs. */
8551 FOR_EACH_VEC_ELT (*fn->gimple_df->ssa_names, i, ptr)
8553 if (ptr
8554 && POINTER_TYPE_P (TREE_TYPE (ptr)))
8555 find_what_p_points_to (node->decl, ptr);
8558 /* Compute the call-use and call-clobber sets for indirect calls
8559 and calls to external functions. */
8560 FOR_EACH_BB_FN (bb, fn)
8562 gimple_stmt_iterator gsi;
8564 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
8566 gcall *stmt;
8567 struct pt_solution *pt;
8568 varinfo_t vi, fi;
8569 tree decl;
8571 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
8572 if (!stmt)
8573 continue;
8575 /* Handle direct calls to functions with body. */
8576 decl = gimple_call_fndecl (stmt);
8579 tree called_decl = NULL_TREE;
8580 if (gimple_call_builtin_p (stmt, BUILT_IN_GOMP_PARALLEL))
8581 called_decl = TREE_OPERAND (gimple_call_arg (stmt, 0), 0);
8582 else if (gimple_call_builtin_p (stmt, BUILT_IN_GOACC_PARALLEL))
8583 called_decl = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
8585 if (called_decl != NULL_TREE
8586 && !fndecl_maybe_in_other_partition (called_decl))
8587 decl = called_decl;
8590 if (decl
8591 && (fi = lookup_vi_for_tree (decl))
8592 && fi->is_fn_info)
8594 *gimple_call_clobber_set (stmt)
8595 = find_what_var_points_to
8596 (node->decl, first_vi_for_offset (fi, fi_clobbers));
8597 *gimple_call_use_set (stmt)
8598 = find_what_var_points_to
8599 (node->decl, first_vi_for_offset (fi, fi_uses));
8601 /* Handle direct calls to external functions. */
8602 else if (decl && (!fi || fi->decl))
8604 pt = gimple_call_use_set (stmt);
8605 if (gimple_call_flags (stmt) & ECF_CONST)
8606 memset (pt, 0, sizeof (struct pt_solution));
8607 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
8609 *pt = find_what_var_points_to (node->decl, vi);
8610 /* Escaped (and thus nonlocal) variables are always
8611 implicitly used by calls. */
8612 /* ??? ESCAPED can be empty even though NONLOCAL
8613 always escaped. */
8614 pt->nonlocal = 1;
8615 pt->ipa_escaped = 1;
8617 else
8619 /* If there is nothing special about this call then
8620 we have made everything that is used also escape. */
8621 *pt = ipa_escaped_pt;
8622 pt->nonlocal = 1;
8625 pt = gimple_call_clobber_set (stmt);
8626 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
8627 memset (pt, 0, sizeof (struct pt_solution));
8628 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
8630 *pt = find_what_var_points_to (node->decl, vi);
8631 /* Escaped (and thus nonlocal) variables are always
8632 implicitly clobbered by calls. */
8633 /* ??? ESCAPED can be empty even though NONLOCAL
8634 always escaped. */
8635 pt->nonlocal = 1;
8636 pt->ipa_escaped = 1;
8638 else
8640 /* If there is nothing special about this call then
8641 we have made everything that is used also escape. */
8642 *pt = ipa_escaped_pt;
8643 pt->nonlocal = 1;
8646 /* Handle indirect calls. */
8647 else if ((fi = get_fi_for_callee (stmt)))
8649 /* We need to accumulate all clobbers/uses of all possible
8650 callees. */
8651 fi = get_varinfo (find (fi->id));
8652 /* If we cannot constrain the set of functions we'll end up
8653 calling we end up using/clobbering everything. */
8654 if (bitmap_bit_p (fi->solution, anything_id)
8655 || bitmap_bit_p (fi->solution, nonlocal_id)
8656 || bitmap_bit_p (fi->solution, escaped_id))
8658 pt_solution_reset (gimple_call_clobber_set (stmt));
8659 pt_solution_reset (gimple_call_use_set (stmt));
8661 else
8663 bitmap_iterator bi;
8664 unsigned i;
8665 struct pt_solution *uses, *clobbers;
8667 uses = gimple_call_use_set (stmt);
8668 clobbers = gimple_call_clobber_set (stmt);
8669 memset (uses, 0, sizeof (struct pt_solution));
8670 memset (clobbers, 0, sizeof (struct pt_solution));
8671 EXECUTE_IF_SET_IN_BITMAP (fi->solution, 0, i, bi)
8673 struct pt_solution sol;
8675 vi = get_varinfo (i);
8676 if (!vi->is_fn_info)
8678 /* ??? We could be more precise here? */
8679 uses->nonlocal = 1;
8680 uses->ipa_escaped = 1;
8681 clobbers->nonlocal = 1;
8682 clobbers->ipa_escaped = 1;
8683 continue;
8686 if (!uses->anything)
8688 sol = find_what_var_points_to
8689 (node->decl,
8690 first_vi_for_offset (vi, fi_uses));
8691 pt_solution_ior_into (uses, &sol);
8693 if (!clobbers->anything)
8695 sol = find_what_var_points_to
8696 (node->decl,
8697 first_vi_for_offset (vi, fi_clobbers));
8698 pt_solution_ior_into (clobbers, &sol);
8703 else
8704 gcc_unreachable ();
8708 fn->gimple_df->ipa_pta = true;
8710 /* We have to re-set the final-solution cache after each function
8711 because what is a "global" is dependent on function context. */
8712 final_solutions->empty ();
8713 obstack_free (&final_solutions_obstack, NULL);
8714 gcc_obstack_init (&final_solutions_obstack);
8717 delete_points_to_sets ();
8719 in_ipa_mode = 0;
8721 return 0;
8724 namespace {
8726 const pass_data pass_data_ipa_pta =
8728 SIMPLE_IPA_PASS, /* type */
8729 "pta", /* name */
8730 OPTGROUP_NONE, /* optinfo_flags */
8731 TV_IPA_PTA, /* tv_id */
8732 0, /* properties_required */
8733 0, /* properties_provided */
8734 0, /* properties_destroyed */
8735 0, /* todo_flags_start */
8736 0, /* todo_flags_finish */
8739 class pass_ipa_pta : public simple_ipa_opt_pass
8741 public:
8742 pass_ipa_pta (gcc::context *ctxt)
8743 : simple_ipa_opt_pass (pass_data_ipa_pta, ctxt)
8746 /* opt_pass methods: */
8747 bool gate (function *) final override
8749 return (optimize
8750 && flag_ipa_pta
8751 /* Don't bother doing anything if the program has errors. */
8752 && !seen_error ());
8755 opt_pass * clone () final override { return new pass_ipa_pta (m_ctxt); }
8757 unsigned int execute (function *) final override
8759 return ipa_pta_execute ();
8762 }; // class pass_ipa_pta
8764 } // anon namespace
8766 simple_ipa_opt_pass *
8767 make_pass_ipa_pta (gcc::context *ctxt)
8769 return new pass_ipa_pta (ctxt);