c, c++: attribute format on a ctor with a vbase [PR101833, PR47634]
[official-gcc.git] / gcc / tree-ssa-structalias.cc
blob581bdcf565243a010ae12b750c196b72ab0cc4e3
1 /* Tree based points-to analysis
2 Copyright (C) 2005-2022 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "tree-pass.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "tree-pretty-print.h"
33 #include "diagnostic-core.h"
34 #include "fold-const.h"
35 #include "stor-layout.h"
36 #include "stmt.h"
37 #include "gimple-iterator.h"
38 #include "tree-into-ssa.h"
39 #include "tree-dfa.h"
40 #include "gimple-walk.h"
41 #include "varasm.h"
42 #include "stringpool.h"
43 #include "attribs.h"
44 #include "tree-ssa.h"
45 #include "tree-cfg.h"
46 #include "gimple-range.h"
47 #include "ipa-modref-tree.h"
48 #include "ipa-modref.h"
49 #include "attr-fnspec.h"
51 /* The idea behind this analyzer is to generate set constraints from the
52 program, then solve the resulting constraints in order to generate the
53 points-to sets.
55 Set constraints are a way of modeling program analysis problems that
56 involve sets. They consist of an inclusion constraint language,
57 describing the variables (each variable is a set) and operations that
58 are involved on the variables, and a set of rules that derive facts
59 from these operations. To solve a system of set constraints, you derive
60 all possible facts under the rules, which gives you the correct sets
61 as a consequence.
63 See "Efficient Field-sensitive pointer analysis for C" by "David
64 J. Pearce and Paul H. J. Kelly and Chris Hankin", at
65 http://citeseer.ist.psu.edu/pearce04efficient.html
67 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
68 of C Code in a Second" by "Nevin Heintze and Olivier Tardieu" at
69 http://citeseer.ist.psu.edu/heintze01ultrafast.html
71 There are three types of real constraint expressions, DEREF,
72 ADDRESSOF, and SCALAR. Each constraint expression consists
73 of a constraint type, a variable, and an offset.
75 SCALAR is a constraint expression type used to represent x, whether
76 it appears on the LHS or the RHS of a statement.
77 DEREF is a constraint expression type used to represent *x, whether
78 it appears on the LHS or the RHS of a statement.
79 ADDRESSOF is a constraint expression used to represent &x, whether
80 it appears on the LHS or the RHS of a statement.
82 Each pointer variable in the program is assigned an integer id, and
83 each field of a structure variable is assigned an integer id as well.
85 Structure variables are linked to their list of fields through a "next
86 field" in each variable that points to the next field in offset
87 order.
88 Each variable for a structure field has
90 1. "size", that tells the size in bits of that field.
91 2. "fullsize", that tells the size in bits of the entire structure.
92 3. "offset", that tells the offset in bits from the beginning of the
93 structure to this field.
95 Thus,
96 struct f
98 int a;
99 int b;
100 } foo;
101 int *bar;
103 looks like
105 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
106 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
107 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
110 In order to solve the system of set constraints, the following is
111 done:
113 1. Each constraint variable x has a solution set associated with it,
114 Sol(x).
116 2. Constraints are separated into direct, copy, and complex.
117 Direct constraints are ADDRESSOF constraints that require no extra
118 processing, such as P = &Q
119 Copy constraints are those of the form P = Q.
120 Complex constraints are all the constraints involving dereferences
121 and offsets (including offsetted copies).
123 3. All direct constraints of the form P = &Q are processed, such
124 that Q is added to Sol(P)
126 4. All complex constraints for a given constraint variable are stored in a
127 linked list attached to that variable's node.
129 5. A directed graph is built out of the copy constraints. Each
130 constraint variable is a node in the graph, and an edge from
131 Q to P is added for each copy constraint of the form P = Q
133 6. The graph is then walked, and solution sets are
134 propagated along the copy edges, such that an edge from Q to P
135 causes Sol(P) <- Sol(P) union Sol(Q).
137 7. As we visit each node, all complex constraints associated with
138 that node are processed by adding appropriate copy edges to the graph, or the
139 appropriate variables to the solution set.
141 8. The process of walking the graph is iterated until no solution
142 sets change.
144 Prior to walking the graph in steps 6 and 7, We perform static
145 cycle elimination on the constraint graph, as well
146 as off-line variable substitution.
148 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
149 on and turned into anything), but isn't. You can just see what offset
150 inside the pointed-to struct it's going to access.
152 TODO: Constant bounded arrays can be handled as if they were structs of the
153 same number of elements.
155 TODO: Modeling heap and incoming pointers becomes much better if we
156 add fields to them as we discover them, which we could do.
158 TODO: We could handle unions, but to be honest, it's probably not
159 worth the pain or slowdown. */
161 /* IPA-PTA optimizations possible.
163 When the indirect function called is ANYTHING we can add disambiguation
164 based on the function signatures (or simply the parameter count which
165 is the varinfo size). We also do not need to consider functions that
166 do not have their address taken.
168 The is_global_var bit which marks escape points is overly conservative
169 in IPA mode. Split it to is_escape_point and is_global_var - only
170 externally visible globals are escape points in IPA mode.
171 There is now is_ipa_escape_point but this is only used in a few
172 selected places.
174 The way we introduce DECL_PT_UID to avoid fixing up all points-to
175 sets in the translation unit when we copy a DECL during inlining
176 pessimizes precision. The advantage is that the DECL_PT_UID keeps
177 compile-time and memory usage overhead low - the points-to sets
178 do not grow or get unshared as they would during a fixup phase.
179 An alternative solution is to delay IPA PTA until after all
180 inlining transformations have been applied.
182 The way we propagate clobber/use information isn't optimized.
183 It should use a new complex constraint that properly filters
184 out local variables of the callee (though that would make
185 the sets invalid after inlining). OTOH we might as well
186 admit defeat to WHOPR and simply do all the clobber/use analysis
187 and propagation after PTA finished but before we threw away
188 points-to information for memory variables. WHOPR and PTA
189 do not play along well anyway - the whole constraint solving
190 would need to be done in WPA phase and it will be very interesting
191 to apply the results to local SSA names during LTRANS phase.
193 We probably should compute a per-function unit-ESCAPE solution
194 propagating it simply like the clobber / uses solutions. The
195 solution can go alongside the non-IPA escaped solution and be
196 used to query which vars escape the unit through a function.
197 This is also required to make the escaped-HEAP trick work in IPA mode.
199 We never put function decls in points-to sets so we do not
200 keep the set of called functions for indirect calls.
202 And probably more. */
204 static bool use_field_sensitive = true;
205 static int in_ipa_mode = 0;
207 /* Used for predecessor bitmaps. */
208 static bitmap_obstack predbitmap_obstack;
210 /* Used for points-to sets. */
211 static bitmap_obstack pta_obstack;
213 /* Used for oldsolution members of variables. */
214 static bitmap_obstack oldpta_obstack;
216 /* Used for per-solver-iteration bitmaps. */
217 static bitmap_obstack iteration_obstack;
219 static unsigned int create_variable_info_for (tree, const char *, bool);
220 typedef struct constraint_graph *constraint_graph_t;
221 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
223 struct constraint;
224 typedef struct constraint *constraint_t;
227 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
228 if (a) \
229 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
231 static struct constraint_stats
233 unsigned int total_vars;
234 unsigned int nonpointer_vars;
235 unsigned int unified_vars_static;
236 unsigned int unified_vars_dynamic;
237 unsigned int iterations;
238 unsigned int num_edges;
239 unsigned int num_implicit_edges;
240 unsigned int points_to_sets_created;
241 } stats;
243 struct variable_info
245 /* ID of this variable */
246 unsigned int id;
248 /* True if this is a variable created by the constraint analysis, such as
249 heap variables and constraints we had to break up. */
250 unsigned int is_artificial_var : 1;
252 /* True if this is a special variable whose solution set should not be
253 changed. */
254 unsigned int is_special_var : 1;
256 /* True for variables whose size is not known or variable. */
257 unsigned int is_unknown_size_var : 1;
259 /* True for (sub-)fields that represent a whole variable. */
260 unsigned int is_full_var : 1;
262 /* True if this is a heap variable. */
263 unsigned int is_heap_var : 1;
265 /* True if this is a register variable. */
266 unsigned int is_reg_var : 1;
268 /* True if this field may contain pointers. */
269 unsigned int may_have_pointers : 1;
271 /* True if this field has only restrict qualified pointers. */
272 unsigned int only_restrict_pointers : 1;
274 /* True if this represents a heap var created for a restrict qualified
275 pointer. */
276 unsigned int is_restrict_var : 1;
278 /* True if this represents a global variable. */
279 unsigned int is_global_var : 1;
281 /* True if this represents a module escape point for IPA analysis. */
282 unsigned int is_ipa_escape_point : 1;
284 /* True if this represents a IPA function info. */
285 unsigned int is_fn_info : 1;
287 /* True if this appears as RHS in a ADDRESSOF constraint. */
288 unsigned int address_taken : 1;
290 /* ??? Store somewhere better. */
291 unsigned short ruid;
293 /* The ID of the variable for the next field in this structure
294 or zero for the last field in this structure. */
295 unsigned next;
297 /* The ID of the variable for the first field in this structure. */
298 unsigned head;
300 /* Offset of this variable, in bits, from the base variable */
301 unsigned HOST_WIDE_INT offset;
303 /* Size of the variable, in bits. */
304 unsigned HOST_WIDE_INT size;
306 /* Full size of the base variable, in bits. */
307 unsigned HOST_WIDE_INT fullsize;
309 /* In IPA mode the shadow UID in case the variable needs to be duplicated in
310 the final points-to solution because it reaches its containing
311 function recursively. Zero if none is needed. */
312 unsigned int shadow_var_uid;
314 /* Name of this variable */
315 const char *name;
317 /* Tree that this variable is associated with. */
318 tree decl;
320 /* Points-to set for this variable. */
321 bitmap solution;
323 /* Old points-to set for this variable. */
324 bitmap oldsolution;
326 typedef struct variable_info *varinfo_t;
328 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
329 static varinfo_t first_or_preceding_vi_for_offset (varinfo_t,
330 unsigned HOST_WIDE_INT);
331 static varinfo_t lookup_vi_for_tree (tree);
332 static inline bool type_can_have_subvars (const_tree);
333 static void make_param_constraints (varinfo_t);
335 /* Pool of variable info structures. */
336 static object_allocator<variable_info> variable_info_pool
337 ("Variable info pool");
339 /* Map varinfo to final pt_solution. */
340 static hash_map<varinfo_t, pt_solution *> *final_solutions;
341 struct obstack final_solutions_obstack;
343 /* Table of variable info structures for constraint variables.
344 Indexed directly by variable info id. */
345 static vec<varinfo_t> varmap;
347 /* Return the varmap element N */
349 static inline varinfo_t
350 get_varinfo (unsigned int n)
352 return varmap[n];
355 /* Return the next variable in the list of sub-variables of VI
356 or NULL if VI is the last sub-variable. */
358 static inline varinfo_t
359 vi_next (varinfo_t vi)
361 return get_varinfo (vi->next);
364 /* Static IDs for the special variables. Variable ID zero is unused
365 and used as terminator for the sub-variable chain. */
366 enum { nothing_id = 1, anything_id = 2, string_id = 3,
367 escaped_id = 4, nonlocal_id = 5,
368 storedanything_id = 6, integer_id = 7 };
370 /* Return a new variable info structure consisting for a variable
371 named NAME, and using constraint graph node NODE. Append it
372 to the vector of variable info structures. */
374 static varinfo_t
375 new_var_info (tree t, const char *name, bool add_id)
377 unsigned index = varmap.length ();
378 varinfo_t ret = variable_info_pool.allocate ();
380 if (dump_file && add_id)
382 char *tempname = xasprintf ("%s(%d)", name, index);
383 name = ggc_strdup (tempname);
384 free (tempname);
387 ret->id = index;
388 ret->name = name;
389 ret->decl = t;
390 /* Vars without decl are artificial and do not have sub-variables. */
391 ret->is_artificial_var = (t == NULL_TREE);
392 ret->is_special_var = false;
393 ret->is_unknown_size_var = false;
394 ret->is_full_var = (t == NULL_TREE);
395 ret->is_heap_var = false;
396 ret->may_have_pointers = true;
397 ret->only_restrict_pointers = false;
398 ret->is_restrict_var = false;
399 ret->ruid = 0;
400 ret->is_global_var = (t == NULL_TREE);
401 ret->is_ipa_escape_point = false;
402 ret->is_fn_info = false;
403 ret->address_taken = false;
404 if (t && DECL_P (t))
405 ret->is_global_var = (is_global_var (t)
406 /* We have to treat even local register variables
407 as escape points. */
408 || (VAR_P (t) && DECL_HARD_REGISTER (t)));
409 ret->is_reg_var = (t && TREE_CODE (t) == SSA_NAME);
410 ret->solution = BITMAP_ALLOC (&pta_obstack);
411 ret->oldsolution = NULL;
412 ret->next = 0;
413 ret->shadow_var_uid = 0;
414 ret->head = ret->id;
416 stats.total_vars++;
418 varmap.safe_push (ret);
420 return ret;
423 /* A map mapping call statements to per-stmt variables for uses
424 and clobbers specific to the call. */
425 static hash_map<gimple *, varinfo_t> *call_stmt_vars;
427 /* Lookup or create the variable for the call statement CALL. */
429 static varinfo_t
430 get_call_vi (gcall *call)
432 varinfo_t vi, vi2;
434 bool existed;
435 varinfo_t *slot_p = &call_stmt_vars->get_or_insert (call, &existed);
436 if (existed)
437 return *slot_p;
439 vi = new_var_info (NULL_TREE, "CALLUSED", true);
440 vi->offset = 0;
441 vi->size = 1;
442 vi->fullsize = 2;
443 vi->is_full_var = true;
444 vi->is_reg_var = true;
446 vi2 = new_var_info (NULL_TREE, "CALLCLOBBERED", true);
447 vi2->offset = 1;
448 vi2->size = 1;
449 vi2->fullsize = 2;
450 vi2->is_full_var = true;
451 vi2->is_reg_var = true;
453 vi->next = vi2->id;
455 *slot_p = vi;
456 return vi;
459 /* Lookup the variable for the call statement CALL representing
460 the uses. Returns NULL if there is nothing special about this call. */
462 static varinfo_t
463 lookup_call_use_vi (gcall *call)
465 varinfo_t *slot_p = call_stmt_vars->get (call);
466 if (slot_p)
467 return *slot_p;
469 return NULL;
472 /* Lookup the variable for the call statement CALL representing
473 the clobbers. Returns NULL if there is nothing special about this call. */
475 static varinfo_t
476 lookup_call_clobber_vi (gcall *call)
478 varinfo_t uses = lookup_call_use_vi (call);
479 if (!uses)
480 return NULL;
482 return vi_next (uses);
485 /* Lookup or create the variable for the call statement CALL representing
486 the uses. */
488 static varinfo_t
489 get_call_use_vi (gcall *call)
491 return get_call_vi (call);
494 /* Lookup or create the variable for the call statement CALL representing
495 the clobbers. */
497 static varinfo_t ATTRIBUTE_UNUSED
498 get_call_clobber_vi (gcall *call)
500 return vi_next (get_call_vi (call));
504 enum constraint_expr_type {SCALAR, DEREF, ADDRESSOF};
506 /* An expression that appears in a constraint. */
508 struct constraint_expr
510 /* Constraint type. */
511 constraint_expr_type type;
513 /* Variable we are referring to in the constraint. */
514 unsigned int var;
516 /* Offset, in bits, of this constraint from the beginning of
517 variables it ends up referring to.
519 IOW, in a deref constraint, we would deref, get the result set,
520 then add OFFSET to each member. */
521 HOST_WIDE_INT offset;
524 /* Use 0x8000... as special unknown offset. */
525 #define UNKNOWN_OFFSET HOST_WIDE_INT_MIN
527 typedef struct constraint_expr ce_s;
528 static void get_constraint_for_1 (tree, vec<ce_s> *, bool, bool);
529 static void get_constraint_for (tree, vec<ce_s> *);
530 static void get_constraint_for_rhs (tree, vec<ce_s> *);
531 static void do_deref (vec<ce_s> *);
533 /* Our set constraints are made up of two constraint expressions, one
534 LHS, and one RHS.
536 As described in the introduction, our set constraints each represent an
537 operation between set valued variables.
539 struct constraint
541 struct constraint_expr lhs;
542 struct constraint_expr rhs;
545 /* List of constraints that we use to build the constraint graph from. */
547 static vec<constraint_t> constraints;
548 static object_allocator<constraint> constraint_pool ("Constraint pool");
550 /* The constraint graph is represented as an array of bitmaps
551 containing successor nodes. */
553 struct constraint_graph
555 /* Size of this graph, which may be different than the number of
556 nodes in the variable map. */
557 unsigned int size;
559 /* Explicit successors of each node. */
560 bitmap *succs;
562 /* Implicit predecessors of each node (Used for variable
563 substitution). */
564 bitmap *implicit_preds;
566 /* Explicit predecessors of each node (Used for variable substitution). */
567 bitmap *preds;
569 /* Indirect cycle representatives, or -1 if the node has no indirect
570 cycles. */
571 int *indirect_cycles;
573 /* Representative node for a node. rep[a] == a unless the node has
574 been unified. */
575 unsigned int *rep;
577 /* Equivalence class representative for a label. This is used for
578 variable substitution. */
579 int *eq_rep;
581 /* Pointer equivalence label for a node. All nodes with the same
582 pointer equivalence label can be unified together at some point
583 (either during constraint optimization or after the constraint
584 graph is built). */
585 unsigned int *pe;
587 /* Pointer equivalence representative for a label. This is used to
588 handle nodes that are pointer equivalent but not location
589 equivalent. We can unite these once the addressof constraints
590 are transformed into initial points-to sets. */
591 int *pe_rep;
593 /* Pointer equivalence label for each node, used during variable
594 substitution. */
595 unsigned int *pointer_label;
597 /* Location equivalence label for each node, used during location
598 equivalence finding. */
599 unsigned int *loc_label;
601 /* Pointed-by set for each node, used during location equivalence
602 finding. This is pointed-by rather than pointed-to, because it
603 is constructed using the predecessor graph. */
604 bitmap *pointed_by;
606 /* Points to sets for pointer equivalence. This is *not* the actual
607 points-to sets for nodes. */
608 bitmap *points_to;
610 /* Bitmap of nodes where the bit is set if the node is a direct
611 node. Used for variable substitution. */
612 sbitmap direct_nodes;
614 /* Bitmap of nodes where the bit is set if the node is address
615 taken. Used for variable substitution. */
616 bitmap address_taken;
618 /* Vector of complex constraints for each graph node. Complex
619 constraints are those involving dereferences or offsets that are
620 not 0. */
621 vec<constraint_t> *complex;
624 static constraint_graph_t graph;
626 /* During variable substitution and the offline version of indirect
627 cycle finding, we create nodes to represent dereferences and
628 address taken constraints. These represent where these start and
629 end. */
630 #define FIRST_REF_NODE (varmap).length ()
631 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
633 /* Return the representative node for NODE, if NODE has been unioned
634 with another NODE.
635 This function performs path compression along the way to finding
636 the representative. */
638 static unsigned int
639 find (unsigned int node)
641 gcc_checking_assert (node < graph->size);
642 if (graph->rep[node] != node)
643 return graph->rep[node] = find (graph->rep[node]);
644 return node;
647 /* Union the TO and FROM nodes to the TO nodes.
648 Note that at some point in the future, we may want to do
649 union-by-rank, in which case we are going to have to return the
650 node we unified to. */
652 static bool
653 unite (unsigned int to, unsigned int from)
655 gcc_checking_assert (to < graph->size && from < graph->size);
656 if (to != from && graph->rep[from] != to)
658 graph->rep[from] = to;
659 return true;
661 return false;
664 /* Create a new constraint consisting of LHS and RHS expressions. */
666 static constraint_t
667 new_constraint (const struct constraint_expr lhs,
668 const struct constraint_expr rhs)
670 constraint_t ret = constraint_pool.allocate ();
671 ret->lhs = lhs;
672 ret->rhs = rhs;
673 return ret;
676 /* Print out constraint C to FILE. */
678 static void
679 dump_constraint (FILE *file, constraint_t c)
681 if (c->lhs.type == ADDRESSOF)
682 fprintf (file, "&");
683 else if (c->lhs.type == DEREF)
684 fprintf (file, "*");
685 if (dump_file)
686 fprintf (file, "%s", get_varinfo (c->lhs.var)->name);
687 else
688 fprintf (file, "V%d", c->lhs.var);
689 if (c->lhs.offset == UNKNOWN_OFFSET)
690 fprintf (file, " + UNKNOWN");
691 else if (c->lhs.offset != 0)
692 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
693 fprintf (file, " = ");
694 if (c->rhs.type == ADDRESSOF)
695 fprintf (file, "&");
696 else if (c->rhs.type == DEREF)
697 fprintf (file, "*");
698 if (dump_file)
699 fprintf (file, "%s", get_varinfo (c->rhs.var)->name);
700 else
701 fprintf (file, "V%d", c->rhs.var);
702 if (c->rhs.offset == UNKNOWN_OFFSET)
703 fprintf (file, " + UNKNOWN");
704 else if (c->rhs.offset != 0)
705 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
709 void debug_constraint (constraint_t);
710 void debug_constraints (void);
711 void debug_constraint_graph (void);
712 void debug_solution_for_var (unsigned int);
713 void debug_sa_points_to_info (void);
714 void debug_varinfo (varinfo_t);
715 void debug_varmap (void);
717 /* Print out constraint C to stderr. */
719 DEBUG_FUNCTION void
720 debug_constraint (constraint_t c)
722 dump_constraint (stderr, c);
723 fprintf (stderr, "\n");
726 /* Print out all constraints to FILE */
728 static void
729 dump_constraints (FILE *file, int from)
731 int i;
732 constraint_t c;
733 for (i = from; constraints.iterate (i, &c); i++)
734 if (c)
736 dump_constraint (file, c);
737 fprintf (file, "\n");
741 /* Print out all constraints to stderr. */
743 DEBUG_FUNCTION void
744 debug_constraints (void)
746 dump_constraints (stderr, 0);
749 /* Print the constraint graph in dot format. */
751 static void
752 dump_constraint_graph (FILE *file)
754 unsigned int i;
756 /* Only print the graph if it has already been initialized: */
757 if (!graph)
758 return;
760 /* Prints the header of the dot file: */
761 fprintf (file, "strict digraph {\n");
762 fprintf (file, " node [\n shape = box\n ]\n");
763 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
764 fprintf (file, "\n // List of nodes and complex constraints in "
765 "the constraint graph:\n");
767 /* The next lines print the nodes in the graph together with the
768 complex constraints attached to them. */
769 for (i = 1; i < graph->size; i++)
771 if (i == FIRST_REF_NODE)
772 continue;
773 if (find (i) != i)
774 continue;
775 if (i < FIRST_REF_NODE)
776 fprintf (file, "\"%s\"", get_varinfo (i)->name);
777 else
778 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
779 if (graph->complex[i].exists ())
781 unsigned j;
782 constraint_t c;
783 fprintf (file, " [label=\"\\N\\n");
784 for (j = 0; graph->complex[i].iterate (j, &c); ++j)
786 dump_constraint (file, c);
787 fprintf (file, "\\l");
789 fprintf (file, "\"]");
791 fprintf (file, ";\n");
794 /* Go over the edges. */
795 fprintf (file, "\n // Edges in the constraint graph:\n");
796 for (i = 1; i < graph->size; i++)
798 unsigned j;
799 bitmap_iterator bi;
800 if (find (i) != i)
801 continue;
802 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
804 unsigned to = find (j);
805 if (i == to)
806 continue;
807 if (i < FIRST_REF_NODE)
808 fprintf (file, "\"%s\"", get_varinfo (i)->name);
809 else
810 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
811 fprintf (file, " -> ");
812 if (to < FIRST_REF_NODE)
813 fprintf (file, "\"%s\"", get_varinfo (to)->name);
814 else
815 fprintf (file, "\"*%s\"", get_varinfo (to - FIRST_REF_NODE)->name);
816 fprintf (file, ";\n");
820 /* Prints the tail of the dot file. */
821 fprintf (file, "}\n");
824 /* Print out the constraint graph to stderr. */
826 DEBUG_FUNCTION void
827 debug_constraint_graph (void)
829 dump_constraint_graph (stderr);
832 /* SOLVER FUNCTIONS
834 The solver is a simple worklist solver, that works on the following
835 algorithm:
837 sbitmap changed_nodes = all zeroes;
838 changed_count = 0;
839 For each node that is not already collapsed:
840 changed_count++;
841 set bit in changed nodes
843 while (changed_count > 0)
845 compute topological ordering for constraint graph
847 find and collapse cycles in the constraint graph (updating
848 changed if necessary)
850 for each node (n) in the graph in topological order:
851 changed_count--;
853 Process each complex constraint associated with the node,
854 updating changed if necessary.
856 For each outgoing edge from n, propagate the solution from n to
857 the destination of the edge, updating changed as necessary.
859 } */
861 /* Return true if two constraint expressions A and B are equal. */
863 static bool
864 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
866 return a.type == b.type && a.var == b.var && a.offset == b.offset;
869 /* Return true if constraint expression A is less than constraint expression
870 B. This is just arbitrary, but consistent, in order to give them an
871 ordering. */
873 static bool
874 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
876 if (a.type == b.type)
878 if (a.var == b.var)
879 return a.offset < b.offset;
880 else
881 return a.var < b.var;
883 else
884 return a.type < b.type;
887 /* Return true if constraint A is less than constraint B. This is just
888 arbitrary, but consistent, in order to give them an ordering. */
890 static bool
891 constraint_less (const constraint_t &a, const constraint_t &b)
893 if (constraint_expr_less (a->lhs, b->lhs))
894 return true;
895 else if (constraint_expr_less (b->lhs, a->lhs))
896 return false;
897 else
898 return constraint_expr_less (a->rhs, b->rhs);
901 /* Return true if two constraints A and B are equal. */
903 static bool
904 constraint_equal (struct constraint a, struct constraint b)
906 return constraint_expr_equal (a.lhs, b.lhs)
907 && constraint_expr_equal (a.rhs, b.rhs);
911 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
913 static constraint_t
914 constraint_vec_find (vec<constraint_t> vec,
915 struct constraint lookfor)
917 unsigned int place;
918 constraint_t found;
920 if (!vec.exists ())
921 return NULL;
923 place = vec.lower_bound (&lookfor, constraint_less);
924 if (place >= vec.length ())
925 return NULL;
926 found = vec[place];
927 if (!constraint_equal (*found, lookfor))
928 return NULL;
929 return found;
932 /* Union two constraint vectors, TO and FROM. Put the result in TO.
933 Returns true of TO set is changed. */
935 static bool
936 constraint_set_union (vec<constraint_t> *to,
937 vec<constraint_t> *from)
939 int i;
940 constraint_t c;
941 bool any_change = false;
943 FOR_EACH_VEC_ELT (*from, i, c)
945 if (constraint_vec_find (*to, *c) == NULL)
947 unsigned int place = to->lower_bound (c, constraint_less);
948 to->safe_insert (place, c);
949 any_change = true;
952 return any_change;
955 /* Expands the solution in SET to all sub-fields of variables included. */
957 static bitmap
958 solution_set_expand (bitmap set, bitmap *expanded)
960 bitmap_iterator bi;
961 unsigned j;
963 if (*expanded)
964 return *expanded;
966 *expanded = BITMAP_ALLOC (&iteration_obstack);
968 /* In a first pass expand to the head of the variables we need to
969 add all sub-fields off. This avoids quadratic behavior. */
970 EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi)
972 varinfo_t v = get_varinfo (j);
973 if (v->is_artificial_var
974 || v->is_full_var)
975 continue;
976 bitmap_set_bit (*expanded, v->head);
979 /* In the second pass now expand all head variables with subfields. */
980 EXECUTE_IF_SET_IN_BITMAP (*expanded, 0, j, bi)
982 varinfo_t v = get_varinfo (j);
983 if (v->head != j)
984 continue;
985 for (v = vi_next (v); v != NULL; v = vi_next (v))
986 bitmap_set_bit (*expanded, v->id);
989 /* And finally set the rest of the bits from SET. */
990 bitmap_ior_into (*expanded, set);
992 return *expanded;
995 /* Union solution sets TO and DELTA, and add INC to each member of DELTA in the
996 process. */
998 static bool
999 set_union_with_increment (bitmap to, bitmap delta, HOST_WIDE_INT inc,
1000 bitmap *expanded_delta)
1002 bool changed = false;
1003 bitmap_iterator bi;
1004 unsigned int i;
1006 /* If the solution of DELTA contains anything it is good enough to transfer
1007 this to TO. */
1008 if (bitmap_bit_p (delta, anything_id))
1009 return bitmap_set_bit (to, anything_id);
1011 /* If the offset is unknown we have to expand the solution to
1012 all subfields. */
1013 if (inc == UNKNOWN_OFFSET)
1015 delta = solution_set_expand (delta, expanded_delta);
1016 changed |= bitmap_ior_into (to, delta);
1017 return changed;
1020 /* For non-zero offset union the offsetted solution into the destination. */
1021 EXECUTE_IF_SET_IN_BITMAP (delta, 0, i, bi)
1023 varinfo_t vi = get_varinfo (i);
1025 /* If this is a variable with just one field just set its bit
1026 in the result. */
1027 if (vi->is_artificial_var
1028 || vi->is_unknown_size_var
1029 || vi->is_full_var)
1030 changed |= bitmap_set_bit (to, i);
1031 else
1033 HOST_WIDE_INT fieldoffset = vi->offset + inc;
1034 unsigned HOST_WIDE_INT size = vi->size;
1036 /* If the offset makes the pointer point to before the
1037 variable use offset zero for the field lookup. */
1038 if (fieldoffset < 0)
1039 vi = get_varinfo (vi->head);
1040 else
1041 vi = first_or_preceding_vi_for_offset (vi, fieldoffset);
1045 changed |= bitmap_set_bit (to, vi->id);
1046 if (vi->is_full_var
1047 || vi->next == 0)
1048 break;
1050 /* We have to include all fields that overlap the current field
1051 shifted by inc. */
1052 vi = vi_next (vi);
1054 while (vi->offset < fieldoffset + size);
1058 return changed;
1061 /* Insert constraint C into the list of complex constraints for graph
1062 node VAR. */
1064 static void
1065 insert_into_complex (constraint_graph_t graph,
1066 unsigned int var, constraint_t c)
1068 vec<constraint_t> complex = graph->complex[var];
1069 unsigned int place = complex.lower_bound (c, constraint_less);
1071 /* Only insert constraints that do not already exist. */
1072 if (place >= complex.length ()
1073 || !constraint_equal (*c, *complex[place]))
1074 graph->complex[var].safe_insert (place, c);
1078 /* Condense two variable nodes into a single variable node, by moving
1079 all associated info from FROM to TO. Returns true if TO node's
1080 constraint set changes after the merge. */
1082 static bool
1083 merge_node_constraints (constraint_graph_t graph, unsigned int to,
1084 unsigned int from)
1086 unsigned int i;
1087 constraint_t c;
1088 bool any_change = false;
1090 gcc_checking_assert (find (from) == to);
1092 /* Move all complex constraints from src node into to node */
1093 FOR_EACH_VEC_ELT (graph->complex[from], i, c)
1095 /* In complex constraints for node FROM, we may have either
1096 a = *FROM, and *FROM = a, or an offseted constraint which are
1097 always added to the rhs node's constraints. */
1099 if (c->rhs.type == DEREF)
1100 c->rhs.var = to;
1101 else if (c->lhs.type == DEREF)
1102 c->lhs.var = to;
1103 else
1104 c->rhs.var = to;
1107 any_change = constraint_set_union (&graph->complex[to],
1108 &graph->complex[from]);
1109 graph->complex[from].release ();
1110 return any_change;
1114 /* Remove edges involving NODE from GRAPH. */
1116 static void
1117 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
1119 if (graph->succs[node])
1120 BITMAP_FREE (graph->succs[node]);
1123 /* Merge GRAPH nodes FROM and TO into node TO. */
1125 static void
1126 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
1127 unsigned int from)
1129 if (graph->indirect_cycles[from] != -1)
1131 /* If we have indirect cycles with the from node, and we have
1132 none on the to node, the to node has indirect cycles from the
1133 from node now that they are unified.
1134 If indirect cycles exist on both, unify the nodes that they
1135 are in a cycle with, since we know they are in a cycle with
1136 each other. */
1137 if (graph->indirect_cycles[to] == -1)
1138 graph->indirect_cycles[to] = graph->indirect_cycles[from];
1141 /* Merge all the successor edges. */
1142 if (graph->succs[from])
1144 if (!graph->succs[to])
1145 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
1146 bitmap_ior_into (graph->succs[to],
1147 graph->succs[from]);
1150 clear_edges_for_node (graph, from);
1154 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
1155 it doesn't exist in the graph already. */
1157 static void
1158 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
1159 unsigned int from)
1161 if (to == from)
1162 return;
1164 if (!graph->implicit_preds[to])
1165 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1167 if (bitmap_set_bit (graph->implicit_preds[to], from))
1168 stats.num_implicit_edges++;
1171 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
1172 it doesn't exist in the graph already.
1173 Return false if the edge already existed, true otherwise. */
1175 static void
1176 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
1177 unsigned int from)
1179 if (!graph->preds[to])
1180 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1181 bitmap_set_bit (graph->preds[to], from);
1184 /* Add a graph edge to GRAPH, going from FROM to TO if
1185 it doesn't exist in the graph already.
1186 Return false if the edge already existed, true otherwise. */
1188 static bool
1189 add_graph_edge (constraint_graph_t graph, unsigned int to,
1190 unsigned int from)
1192 if (to == from)
1194 return false;
1196 else
1198 bool r = false;
1200 if (!graph->succs[from])
1201 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
1203 /* The graph solving process does not avoid "triangles", thus
1204 there can be multiple paths from a node to another involving
1205 intermediate other nodes. That causes extra copying which is
1206 most difficult to avoid when the intermediate node is ESCAPED
1207 because there are no edges added from ESCAPED. Avoid
1208 adding the direct edge FROM -> TO when we have FROM -> ESCAPED
1209 and TO contains ESCAPED.
1210 ??? Note this is only a heuristic, it does not prevent the
1211 situation from occuring. The heuristic helps PR38474 and
1212 PR99912 significantly. */
1213 if (to < FIRST_REF_NODE
1214 && bitmap_bit_p (graph->succs[from], find (escaped_id))
1215 && bitmap_bit_p (get_varinfo (find (to))->solution, escaped_id))
1216 return false;
1218 if (bitmap_set_bit (graph->succs[from], to))
1220 r = true;
1221 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
1222 stats.num_edges++;
1224 return r;
1229 /* Initialize the constraint graph structure to contain SIZE nodes. */
1231 static void
1232 init_graph (unsigned int size)
1234 unsigned int j;
1236 graph = XCNEW (struct constraint_graph);
1237 graph->size = size;
1238 graph->succs = XCNEWVEC (bitmap, graph->size);
1239 graph->indirect_cycles = XNEWVEC (int, graph->size);
1240 graph->rep = XNEWVEC (unsigned int, graph->size);
1241 /* ??? Macros do not support template types with multiple arguments,
1242 so we use a typedef to work around it. */
1243 typedef vec<constraint_t> vec_constraint_t_heap;
1244 graph->complex = XCNEWVEC (vec_constraint_t_heap, size);
1245 graph->pe = XCNEWVEC (unsigned int, graph->size);
1246 graph->pe_rep = XNEWVEC (int, graph->size);
1248 for (j = 0; j < graph->size; j++)
1250 graph->rep[j] = j;
1251 graph->pe_rep[j] = -1;
1252 graph->indirect_cycles[j] = -1;
1256 /* Build the constraint graph, adding only predecessor edges right now. */
1258 static void
1259 build_pred_graph (void)
1261 int i;
1262 constraint_t c;
1263 unsigned int j;
1265 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
1266 graph->preds = XCNEWVEC (bitmap, graph->size);
1267 graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
1268 graph->loc_label = XCNEWVEC (unsigned int, graph->size);
1269 graph->pointed_by = XCNEWVEC (bitmap, graph->size);
1270 graph->points_to = XCNEWVEC (bitmap, graph->size);
1271 graph->eq_rep = XNEWVEC (int, graph->size);
1272 graph->direct_nodes = sbitmap_alloc (graph->size);
1273 graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1274 bitmap_clear (graph->direct_nodes);
1276 for (j = 1; j < FIRST_REF_NODE; j++)
1278 if (!get_varinfo (j)->is_special_var)
1279 bitmap_set_bit (graph->direct_nodes, j);
1282 for (j = 0; j < graph->size; j++)
1283 graph->eq_rep[j] = -1;
1285 for (j = 0; j < varmap.length (); j++)
1286 graph->indirect_cycles[j] = -1;
1288 FOR_EACH_VEC_ELT (constraints, i, c)
1290 struct constraint_expr lhs = c->lhs;
1291 struct constraint_expr rhs = c->rhs;
1292 unsigned int lhsvar = lhs.var;
1293 unsigned int rhsvar = rhs.var;
1295 if (lhs.type == DEREF)
1297 /* *x = y. */
1298 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1299 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1301 else if (rhs.type == DEREF)
1303 /* x = *y */
1304 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1305 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1306 else
1307 bitmap_clear_bit (graph->direct_nodes, lhsvar);
1309 else if (rhs.type == ADDRESSOF)
1311 varinfo_t v;
1313 /* x = &y */
1314 if (graph->points_to[lhsvar] == NULL)
1315 graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1316 bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1318 if (graph->pointed_by[rhsvar] == NULL)
1319 graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1320 bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1322 /* Implicitly, *x = y */
1323 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1325 /* All related variables are no longer direct nodes. */
1326 bitmap_clear_bit (graph->direct_nodes, rhsvar);
1327 v = get_varinfo (rhsvar);
1328 if (!v->is_full_var)
1330 v = get_varinfo (v->head);
1333 bitmap_clear_bit (graph->direct_nodes, v->id);
1334 v = vi_next (v);
1336 while (v != NULL);
1338 bitmap_set_bit (graph->address_taken, rhsvar);
1340 else if (lhsvar > anything_id
1341 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1343 /* x = y */
1344 add_pred_graph_edge (graph, lhsvar, rhsvar);
1345 /* Implicitly, *x = *y */
1346 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1347 FIRST_REF_NODE + rhsvar);
1349 else if (lhs.offset != 0 || rhs.offset != 0)
1351 if (rhs.offset != 0)
1352 bitmap_clear_bit (graph->direct_nodes, lhs.var);
1353 else if (lhs.offset != 0)
1354 bitmap_clear_bit (graph->direct_nodes, rhs.var);
1359 /* Build the constraint graph, adding successor edges. */
1361 static void
1362 build_succ_graph (void)
1364 unsigned i, t;
1365 constraint_t c;
1367 FOR_EACH_VEC_ELT (constraints, i, c)
1369 struct constraint_expr lhs;
1370 struct constraint_expr rhs;
1371 unsigned int lhsvar;
1372 unsigned int rhsvar;
1374 if (!c)
1375 continue;
1377 lhs = c->lhs;
1378 rhs = c->rhs;
1379 lhsvar = find (lhs.var);
1380 rhsvar = find (rhs.var);
1382 if (lhs.type == DEREF)
1384 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1385 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1387 else if (rhs.type == DEREF)
1389 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1390 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1392 else if (rhs.type == ADDRESSOF)
1394 /* x = &y */
1395 gcc_checking_assert (find (rhs.var) == rhs.var);
1396 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1398 else if (lhsvar > anything_id
1399 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1401 add_graph_edge (graph, lhsvar, rhsvar);
1405 /* Add edges from STOREDANYTHING to all non-direct nodes that can
1406 receive pointers. */
1407 t = find (storedanything_id);
1408 for (i = integer_id + 1; i < FIRST_REF_NODE; ++i)
1410 if (!bitmap_bit_p (graph->direct_nodes, i)
1411 && get_varinfo (i)->may_have_pointers)
1412 add_graph_edge (graph, find (i), t);
1415 /* Everything stored to ANYTHING also potentially escapes. */
1416 add_graph_edge (graph, find (escaped_id), t);
1420 /* Changed variables on the last iteration. */
1421 static bitmap changed;
1423 /* Strongly Connected Component visitation info. */
1425 class scc_info
1427 public:
1428 scc_info (size_t size);
1429 ~scc_info ();
1431 auto_sbitmap visited;
1432 auto_sbitmap deleted;
1433 unsigned int *dfs;
1434 unsigned int *node_mapping;
1435 int current_index;
1436 auto_vec<unsigned> scc_stack;
1440 /* Recursive routine to find strongly connected components in GRAPH.
1441 SI is the SCC info to store the information in, and N is the id of current
1442 graph node we are processing.
1444 This is Tarjan's strongly connected component finding algorithm, as
1445 modified by Nuutila to keep only non-root nodes on the stack.
1446 The algorithm can be found in "On finding the strongly connected
1447 connected components in a directed graph" by Esko Nuutila and Eljas
1448 Soisalon-Soininen, in Information Processing Letters volume 49,
1449 number 1, pages 9-14. */
1451 static void
1452 scc_visit (constraint_graph_t graph, class scc_info *si, unsigned int n)
1454 unsigned int i;
1455 bitmap_iterator bi;
1456 unsigned int my_dfs;
1458 bitmap_set_bit (si->visited, n);
1459 si->dfs[n] = si->current_index ++;
1460 my_dfs = si->dfs[n];
1462 /* Visit all the successors. */
1463 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1465 unsigned int w;
1467 if (i > LAST_REF_NODE)
1468 break;
1470 w = find (i);
1471 if (bitmap_bit_p (si->deleted, w))
1472 continue;
1474 if (!bitmap_bit_p (si->visited, w))
1475 scc_visit (graph, si, w);
1477 unsigned int t = find (w);
1478 gcc_checking_assert (find (n) == n);
1479 if (si->dfs[t] < si->dfs[n])
1480 si->dfs[n] = si->dfs[t];
1483 /* See if any components have been identified. */
1484 if (si->dfs[n] == my_dfs)
1486 if (si->scc_stack.length () > 0
1487 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1489 bitmap scc = BITMAP_ALLOC (NULL);
1490 unsigned int lowest_node;
1491 bitmap_iterator bi;
1493 bitmap_set_bit (scc, n);
1495 while (si->scc_stack.length () != 0
1496 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1498 unsigned int w = si->scc_stack.pop ();
1500 bitmap_set_bit (scc, w);
1503 lowest_node = bitmap_first_set_bit (scc);
1504 gcc_assert (lowest_node < FIRST_REF_NODE);
1506 /* Collapse the SCC nodes into a single node, and mark the
1507 indirect cycles. */
1508 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1510 if (i < FIRST_REF_NODE)
1512 if (unite (lowest_node, i))
1513 unify_nodes (graph, lowest_node, i, false);
1515 else
1517 unite (lowest_node, i);
1518 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1522 bitmap_set_bit (si->deleted, n);
1524 else
1525 si->scc_stack.safe_push (n);
1528 /* Unify node FROM into node TO, updating the changed count if
1529 necessary when UPDATE_CHANGED is true. */
1531 static void
1532 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1533 bool update_changed)
1535 gcc_checking_assert (to != from && find (to) == to);
1537 if (dump_file && (dump_flags & TDF_DETAILS))
1538 fprintf (dump_file, "Unifying %s to %s\n",
1539 get_varinfo (from)->name,
1540 get_varinfo (to)->name);
1542 if (update_changed)
1543 stats.unified_vars_dynamic++;
1544 else
1545 stats.unified_vars_static++;
1547 merge_graph_nodes (graph, to, from);
1548 if (merge_node_constraints (graph, to, from))
1550 if (update_changed)
1551 bitmap_set_bit (changed, to);
1554 /* Mark TO as changed if FROM was changed. If TO was already marked
1555 as changed, decrease the changed count. */
1557 if (update_changed
1558 && bitmap_clear_bit (changed, from))
1559 bitmap_set_bit (changed, to);
1560 varinfo_t fromvi = get_varinfo (from);
1561 if (fromvi->solution)
1563 /* If the solution changes because of the merging, we need to mark
1564 the variable as changed. */
1565 varinfo_t tovi = get_varinfo (to);
1566 if (bitmap_ior_into (tovi->solution, fromvi->solution))
1568 if (update_changed)
1569 bitmap_set_bit (changed, to);
1572 BITMAP_FREE (fromvi->solution);
1573 if (fromvi->oldsolution)
1574 BITMAP_FREE (fromvi->oldsolution);
1576 if (stats.iterations > 0
1577 && tovi->oldsolution)
1578 BITMAP_FREE (tovi->oldsolution);
1580 if (graph->succs[to])
1581 bitmap_clear_bit (graph->succs[to], to);
1584 /* Information needed to compute the topological ordering of a graph. */
1586 struct topo_info
1588 /* sbitmap of visited nodes. */
1589 sbitmap visited;
1590 /* Array that stores the topological order of the graph, *in
1591 reverse*. */
1592 vec<unsigned> topo_order;
1596 /* Initialize and return a topological info structure. */
1598 static struct topo_info *
1599 init_topo_info (void)
1601 size_t size = graph->size;
1602 struct topo_info *ti = XNEW (struct topo_info);
1603 ti->visited = sbitmap_alloc (size);
1604 bitmap_clear (ti->visited);
1605 ti->topo_order.create (1);
1606 return ti;
1610 /* Free the topological sort info pointed to by TI. */
1612 static void
1613 free_topo_info (struct topo_info *ti)
1615 sbitmap_free (ti->visited);
1616 ti->topo_order.release ();
1617 free (ti);
1620 /* Visit the graph in topological order, and store the order in the
1621 topo_info structure. */
1623 static void
1624 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1625 unsigned int n)
1627 bitmap_iterator bi;
1628 unsigned int j;
1630 bitmap_set_bit (ti->visited, n);
1632 if (graph->succs[n])
1633 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1635 if (!bitmap_bit_p (ti->visited, j))
1636 topo_visit (graph, ti, j);
1639 ti->topo_order.safe_push (n);
1642 /* Process a constraint C that represents x = *(y + off), using DELTA as the
1643 starting solution for y. */
1645 static void
1646 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1647 bitmap delta, bitmap *expanded_delta)
1649 unsigned int lhs = c->lhs.var;
1650 bool flag = false;
1651 bitmap sol = get_varinfo (lhs)->solution;
1652 unsigned int j;
1653 bitmap_iterator bi;
1654 HOST_WIDE_INT roffset = c->rhs.offset;
1656 /* Our IL does not allow this. */
1657 gcc_checking_assert (c->lhs.offset == 0);
1659 /* If the solution of Y contains anything it is good enough to transfer
1660 this to the LHS. */
1661 if (bitmap_bit_p (delta, anything_id))
1663 flag |= bitmap_set_bit (sol, anything_id);
1664 goto done;
1667 /* If we do not know at with offset the rhs is dereferenced compute
1668 the reachability set of DELTA, conservatively assuming it is
1669 dereferenced at all valid offsets. */
1670 if (roffset == UNKNOWN_OFFSET)
1672 delta = solution_set_expand (delta, expanded_delta);
1673 /* No further offset processing is necessary. */
1674 roffset = 0;
1677 /* For each variable j in delta (Sol(y)), add
1678 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1679 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1681 varinfo_t v = get_varinfo (j);
1682 HOST_WIDE_INT fieldoffset = v->offset + roffset;
1683 unsigned HOST_WIDE_INT size = v->size;
1684 unsigned int t;
1686 if (v->is_full_var)
1688 else if (roffset != 0)
1690 if (fieldoffset < 0)
1691 v = get_varinfo (v->head);
1692 else
1693 v = first_or_preceding_vi_for_offset (v, fieldoffset);
1696 /* We have to include all fields that overlap the current field
1697 shifted by roffset. */
1700 t = find (v->id);
1702 /* Adding edges from the special vars is pointless.
1703 They don't have sets that can change. */
1704 if (get_varinfo (t)->is_special_var)
1705 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1706 /* Merging the solution from ESCAPED needlessly increases
1707 the set. Use ESCAPED as representative instead. */
1708 else if (v->id == escaped_id)
1709 flag |= bitmap_set_bit (sol, escaped_id);
1710 else if (v->may_have_pointers
1711 && add_graph_edge (graph, lhs, t))
1712 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1714 if (v->is_full_var
1715 || v->next == 0)
1716 break;
1718 v = vi_next (v);
1720 while (v->offset < fieldoffset + size);
1723 done:
1724 /* If the LHS solution changed, mark the var as changed. */
1725 if (flag)
1727 get_varinfo (lhs)->solution = sol;
1728 bitmap_set_bit (changed, lhs);
1732 /* Process a constraint C that represents *(x + off) = y using DELTA
1733 as the starting solution for x. */
1735 static void
1736 do_ds_constraint (constraint_t c, bitmap delta, bitmap *expanded_delta)
1738 unsigned int rhs = c->rhs.var;
1739 bitmap sol = get_varinfo (rhs)->solution;
1740 unsigned int j;
1741 bitmap_iterator bi;
1742 HOST_WIDE_INT loff = c->lhs.offset;
1743 bool escaped_p = false;
1745 /* Our IL does not allow this. */
1746 gcc_checking_assert (c->rhs.offset == 0);
1748 /* If the solution of y contains ANYTHING simply use the ANYTHING
1749 solution. This avoids needlessly increasing the points-to sets. */
1750 if (bitmap_bit_p (sol, anything_id))
1751 sol = get_varinfo (find (anything_id))->solution;
1753 /* If the solution for x contains ANYTHING we have to merge the
1754 solution of y into all pointer variables which we do via
1755 STOREDANYTHING. */
1756 if (bitmap_bit_p (delta, anything_id))
1758 unsigned t = find (storedanything_id);
1759 if (add_graph_edge (graph, t, rhs))
1761 if (bitmap_ior_into (get_varinfo (t)->solution, sol))
1762 bitmap_set_bit (changed, t);
1764 return;
1767 /* If we do not know at with offset the rhs is dereferenced compute
1768 the reachability set of DELTA, conservatively assuming it is
1769 dereferenced at all valid offsets. */
1770 if (loff == UNKNOWN_OFFSET)
1772 delta = solution_set_expand (delta, expanded_delta);
1773 loff = 0;
1776 /* For each member j of delta (Sol(x)), add an edge from y to j and
1777 union Sol(y) into Sol(j) */
1778 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1780 varinfo_t v = get_varinfo (j);
1781 unsigned int t;
1782 HOST_WIDE_INT fieldoffset = v->offset + loff;
1783 unsigned HOST_WIDE_INT size = v->size;
1785 if (v->is_full_var)
1787 else if (loff != 0)
1789 if (fieldoffset < 0)
1790 v = get_varinfo (v->head);
1791 else
1792 v = first_or_preceding_vi_for_offset (v, fieldoffset);
1795 /* We have to include all fields that overlap the current field
1796 shifted by loff. */
1799 if (v->may_have_pointers)
1801 /* If v is a global variable then this is an escape point. */
1802 if (v->is_global_var
1803 && !escaped_p)
1805 t = find (escaped_id);
1806 if (add_graph_edge (graph, t, rhs)
1807 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1808 bitmap_set_bit (changed, t);
1809 /* Enough to let rhs escape once. */
1810 escaped_p = true;
1813 if (v->is_special_var)
1814 break;
1816 t = find (v->id);
1817 if (add_graph_edge (graph, t, rhs)
1818 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1819 bitmap_set_bit (changed, t);
1822 if (v->is_full_var
1823 || v->next == 0)
1824 break;
1826 v = vi_next (v);
1828 while (v->offset < fieldoffset + size);
1832 /* Handle a non-simple (simple meaning requires no iteration),
1833 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1835 static void
1836 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta,
1837 bitmap *expanded_delta)
1839 if (c->lhs.type == DEREF)
1841 if (c->rhs.type == ADDRESSOF)
1843 gcc_unreachable ();
1845 else
1847 /* *x = y */
1848 do_ds_constraint (c, delta, expanded_delta);
1851 else if (c->rhs.type == DEREF)
1853 /* x = *y */
1854 if (!(get_varinfo (c->lhs.var)->is_special_var))
1855 do_sd_constraint (graph, c, delta, expanded_delta);
1857 else
1859 bitmap tmp;
1860 bool flag = false;
1862 gcc_checking_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR
1863 && c->rhs.offset != 0 && c->lhs.offset == 0);
1864 tmp = get_varinfo (c->lhs.var)->solution;
1866 flag = set_union_with_increment (tmp, delta, c->rhs.offset,
1867 expanded_delta);
1869 if (flag)
1870 bitmap_set_bit (changed, c->lhs.var);
1874 /* Initialize and return a new SCC info structure. */
1876 scc_info::scc_info (size_t size) :
1877 visited (size), deleted (size), current_index (0), scc_stack (1)
1879 bitmap_clear (visited);
1880 bitmap_clear (deleted);
1881 node_mapping = XNEWVEC (unsigned int, size);
1882 dfs = XCNEWVEC (unsigned int, size);
1884 for (size_t i = 0; i < size; i++)
1885 node_mapping[i] = i;
1888 /* Free an SCC info structure pointed to by SI */
1890 scc_info::~scc_info ()
1892 free (node_mapping);
1893 free (dfs);
1897 /* Find indirect cycles in GRAPH that occur, using strongly connected
1898 components, and note them in the indirect cycles map.
1900 This technique comes from Ben Hardekopf and Calvin Lin,
1901 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1902 Lines of Code", submitted to PLDI 2007. */
1904 static void
1905 find_indirect_cycles (constraint_graph_t graph)
1907 unsigned int i;
1908 unsigned int size = graph->size;
1909 scc_info si (size);
1911 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1912 if (!bitmap_bit_p (si.visited, i) && find (i) == i)
1913 scc_visit (graph, &si, i);
1916 /* Compute a topological ordering for GRAPH, and store the result in the
1917 topo_info structure TI. */
1919 static void
1920 compute_topo_order (constraint_graph_t graph,
1921 struct topo_info *ti)
1923 unsigned int i;
1924 unsigned int size = graph->size;
1926 for (i = 0; i != size; ++i)
1927 if (!bitmap_bit_p (ti->visited, i) && find (i) == i)
1928 topo_visit (graph, ti, i);
1931 /* Structure used to for hash value numbering of pointer equivalence
1932 classes. */
1934 typedef struct equiv_class_label
1936 hashval_t hashcode;
1937 unsigned int equivalence_class;
1938 bitmap labels;
1939 } *equiv_class_label_t;
1940 typedef const struct equiv_class_label *const_equiv_class_label_t;
1942 /* Equiv_class_label hashtable helpers. */
1944 struct equiv_class_hasher : nofree_ptr_hash <equiv_class_label>
1946 static inline hashval_t hash (const equiv_class_label *);
1947 static inline bool equal (const equiv_class_label *,
1948 const equiv_class_label *);
1951 /* Hash function for a equiv_class_label_t */
1953 inline hashval_t
1954 equiv_class_hasher::hash (const equiv_class_label *ecl)
1956 return ecl->hashcode;
1959 /* Equality function for two equiv_class_label_t's. */
1961 inline bool
1962 equiv_class_hasher::equal (const equiv_class_label *eql1,
1963 const equiv_class_label *eql2)
1965 return (eql1->hashcode == eql2->hashcode
1966 && bitmap_equal_p (eql1->labels, eql2->labels));
1969 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1970 classes. */
1971 static hash_table<equiv_class_hasher> *pointer_equiv_class_table;
1973 /* A hashtable for mapping a bitmap of labels->location equivalence
1974 classes. */
1975 static hash_table<equiv_class_hasher> *location_equiv_class_table;
1977 struct obstack equiv_class_obstack;
1979 /* Lookup a equivalence class in TABLE by the bitmap of LABELS with
1980 hash HAS it contains. Sets *REF_LABELS to the bitmap LABELS
1981 is equivalent to. */
1983 static equiv_class_label *
1984 equiv_class_lookup_or_add (hash_table<equiv_class_hasher> *table,
1985 bitmap labels)
1987 equiv_class_label **slot;
1988 equiv_class_label ecl;
1990 ecl.labels = labels;
1991 ecl.hashcode = bitmap_hash (labels);
1992 slot = table->find_slot (&ecl, INSERT);
1993 if (!*slot)
1995 *slot = XOBNEW (&equiv_class_obstack, struct equiv_class_label);
1996 (*slot)->labels = labels;
1997 (*slot)->hashcode = ecl.hashcode;
1998 (*slot)->equivalence_class = 0;
2001 return *slot;
2004 /* Perform offline variable substitution.
2006 This is a worst case quadratic time way of identifying variables
2007 that must have equivalent points-to sets, including those caused by
2008 static cycles, and single entry subgraphs, in the constraint graph.
2010 The technique is described in "Exploiting Pointer and Location
2011 Equivalence to Optimize Pointer Analysis. In the 14th International
2012 Static Analysis Symposium (SAS), August 2007." It is known as the
2013 "HU" algorithm, and is equivalent to value numbering the collapsed
2014 constraint graph including evaluating unions.
2016 The general method of finding equivalence classes is as follows:
2017 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
2018 Initialize all non-REF nodes to be direct nodes.
2019 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
2020 variable}
2021 For each constraint containing the dereference, we also do the same
2022 thing.
2024 We then compute SCC's in the graph and unify nodes in the same SCC,
2025 including pts sets.
2027 For each non-collapsed node x:
2028 Visit all unvisited explicit incoming edges.
2029 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
2030 where y->x.
2031 Lookup the equivalence class for pts(x).
2032 If we found one, equivalence_class(x) = found class.
2033 Otherwise, equivalence_class(x) = new class, and new_class is
2034 added to the lookup table.
2036 All direct nodes with the same equivalence class can be replaced
2037 with a single representative node.
2038 All unlabeled nodes (label == 0) are not pointers and all edges
2039 involving them can be eliminated.
2040 We perform these optimizations during rewrite_constraints
2042 In addition to pointer equivalence class finding, we also perform
2043 location equivalence class finding. This is the set of variables
2044 that always appear together in points-to sets. We use this to
2045 compress the size of the points-to sets. */
2047 /* Current maximum pointer equivalence class id. */
2048 static int pointer_equiv_class;
2050 /* Current maximum location equivalence class id. */
2051 static int location_equiv_class;
2053 /* Recursive routine to find strongly connected components in GRAPH,
2054 and label it's nodes with DFS numbers. */
2056 static void
2057 condense_visit (constraint_graph_t graph, class scc_info *si, unsigned int n)
2059 unsigned int i;
2060 bitmap_iterator bi;
2061 unsigned int my_dfs;
2063 gcc_checking_assert (si->node_mapping[n] == n);
2064 bitmap_set_bit (si->visited, n);
2065 si->dfs[n] = si->current_index ++;
2066 my_dfs = si->dfs[n];
2068 /* Visit all the successors. */
2069 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2071 unsigned int w = si->node_mapping[i];
2073 if (bitmap_bit_p (si->deleted, w))
2074 continue;
2076 if (!bitmap_bit_p (si->visited, w))
2077 condense_visit (graph, si, w);
2079 unsigned int t = si->node_mapping[w];
2080 gcc_checking_assert (si->node_mapping[n] == n);
2081 if (si->dfs[t] < si->dfs[n])
2082 si->dfs[n] = si->dfs[t];
2085 /* Visit all the implicit predecessors. */
2086 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
2088 unsigned int w = si->node_mapping[i];
2090 if (bitmap_bit_p (si->deleted, w))
2091 continue;
2093 if (!bitmap_bit_p (si->visited, w))
2094 condense_visit (graph, si, w);
2096 unsigned int t = si->node_mapping[w];
2097 gcc_assert (si->node_mapping[n] == n);
2098 if (si->dfs[t] < si->dfs[n])
2099 si->dfs[n] = si->dfs[t];
2102 /* See if any components have been identified. */
2103 if (si->dfs[n] == my_dfs)
2105 if (si->scc_stack.length () != 0
2106 && si->dfs[si->scc_stack.last ()] >= my_dfs)
2108 /* Find the first node of the SCC and do non-bitmap work. */
2109 bool direct_p = true;
2110 unsigned first = si->scc_stack.length ();
2113 --first;
2114 unsigned int w = si->scc_stack[first];
2115 si->node_mapping[w] = n;
2116 if (!bitmap_bit_p (graph->direct_nodes, w))
2117 direct_p = false;
2119 while (first > 0
2120 && si->dfs[si->scc_stack[first - 1]] >= my_dfs);
2121 if (!direct_p)
2122 bitmap_clear_bit (graph->direct_nodes, n);
2124 /* Want to reduce to node n, push that first. */
2125 si->scc_stack.reserve (1);
2126 si->scc_stack.quick_push (si->scc_stack[first]);
2127 si->scc_stack[first] = n;
2129 unsigned scc_size = si->scc_stack.length () - first;
2130 unsigned split = scc_size / 2;
2131 unsigned carry = scc_size - split * 2;
2132 while (split > 0)
2134 for (unsigned i = 0; i < split; ++i)
2136 unsigned a = si->scc_stack[first + i];
2137 unsigned b = si->scc_stack[first + split + carry + i];
2139 /* Unify our nodes. */
2140 if (graph->preds[b])
2142 if (!graph->preds[a])
2143 std::swap (graph->preds[a], graph->preds[b]);
2144 else
2145 bitmap_ior_into_and_free (graph->preds[a],
2146 &graph->preds[b]);
2148 if (graph->implicit_preds[b])
2150 if (!graph->implicit_preds[a])
2151 std::swap (graph->implicit_preds[a],
2152 graph->implicit_preds[b]);
2153 else
2154 bitmap_ior_into_and_free (graph->implicit_preds[a],
2155 &graph->implicit_preds[b]);
2157 if (graph->points_to[b])
2159 if (!graph->points_to[a])
2160 std::swap (graph->points_to[a], graph->points_to[b]);
2161 else
2162 bitmap_ior_into_and_free (graph->points_to[a],
2163 &graph->points_to[b]);
2166 unsigned remain = split + carry;
2167 split = remain / 2;
2168 carry = remain - split * 2;
2170 /* Actually pop the SCC. */
2171 si->scc_stack.truncate (first);
2173 bitmap_set_bit (si->deleted, n);
2175 else
2176 si->scc_stack.safe_push (n);
2179 /* Label pointer equivalences.
2181 This performs a value numbering of the constraint graph to
2182 discover which variables will always have the same points-to sets
2183 under the current set of constraints.
2185 The way it value numbers is to store the set of points-to bits
2186 generated by the constraints and graph edges. This is just used as a
2187 hash and equality comparison. The *actual set of points-to bits* is
2188 completely irrelevant, in that we don't care about being able to
2189 extract them later.
2191 The equality values (currently bitmaps) just have to satisfy a few
2192 constraints, the main ones being:
2193 1. The combining operation must be order independent.
2194 2. The end result of a given set of operations must be unique iff the
2195 combination of input values is unique
2196 3. Hashable. */
2198 static void
2199 label_visit (constraint_graph_t graph, class scc_info *si, unsigned int n)
2201 unsigned int i, first_pred;
2202 bitmap_iterator bi;
2204 bitmap_set_bit (si->visited, n);
2206 /* Label and union our incoming edges's points to sets. */
2207 first_pred = -1U;
2208 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2210 unsigned int w = si->node_mapping[i];
2211 if (!bitmap_bit_p (si->visited, w))
2212 label_visit (graph, si, w);
2214 /* Skip unused edges */
2215 if (w == n || graph->pointer_label[w] == 0)
2216 continue;
2218 if (graph->points_to[w])
2220 if (!graph->points_to[n])
2222 if (first_pred == -1U)
2223 first_pred = w;
2224 else
2226 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2227 bitmap_ior (graph->points_to[n],
2228 graph->points_to[first_pred],
2229 graph->points_to[w]);
2232 else
2233 bitmap_ior_into (graph->points_to[n], graph->points_to[w]);
2237 /* Indirect nodes get fresh variables and a new pointer equiv class. */
2238 if (!bitmap_bit_p (graph->direct_nodes, n))
2240 if (!graph->points_to[n])
2242 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2243 if (first_pred != -1U)
2244 bitmap_copy (graph->points_to[n], graph->points_to[first_pred]);
2246 bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
2247 graph->pointer_label[n] = pointer_equiv_class++;
2248 equiv_class_label_t ecl;
2249 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2250 graph->points_to[n]);
2251 ecl->equivalence_class = graph->pointer_label[n];
2252 return;
2255 /* If there was only a single non-empty predecessor the pointer equiv
2256 class is the same. */
2257 if (!graph->points_to[n])
2259 if (first_pred != -1U)
2261 graph->pointer_label[n] = graph->pointer_label[first_pred];
2262 graph->points_to[n] = graph->points_to[first_pred];
2264 return;
2267 if (!bitmap_empty_p (graph->points_to[n]))
2269 equiv_class_label_t ecl;
2270 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2271 graph->points_to[n]);
2272 if (ecl->equivalence_class == 0)
2273 ecl->equivalence_class = pointer_equiv_class++;
2274 else
2276 BITMAP_FREE (graph->points_to[n]);
2277 graph->points_to[n] = ecl->labels;
2279 graph->pointer_label[n] = ecl->equivalence_class;
2283 /* Print the pred graph in dot format. */
2285 static void
2286 dump_pred_graph (class scc_info *si, FILE *file)
2288 unsigned int i;
2290 /* Only print the graph if it has already been initialized: */
2291 if (!graph)
2292 return;
2294 /* Prints the header of the dot file: */
2295 fprintf (file, "strict digraph {\n");
2296 fprintf (file, " node [\n shape = box\n ]\n");
2297 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
2298 fprintf (file, "\n // List of nodes and complex constraints in "
2299 "the constraint graph:\n");
2301 /* The next lines print the nodes in the graph together with the
2302 complex constraints attached to them. */
2303 for (i = 1; i < graph->size; i++)
2305 if (i == FIRST_REF_NODE)
2306 continue;
2307 if (si->node_mapping[i] != i)
2308 continue;
2309 if (i < FIRST_REF_NODE)
2310 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2311 else
2312 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2313 if (graph->points_to[i]
2314 && !bitmap_empty_p (graph->points_to[i]))
2316 if (i < FIRST_REF_NODE)
2317 fprintf (file, "[label=\"%s = {", get_varinfo (i)->name);
2318 else
2319 fprintf (file, "[label=\"*%s = {",
2320 get_varinfo (i - FIRST_REF_NODE)->name);
2321 unsigned j;
2322 bitmap_iterator bi;
2323 EXECUTE_IF_SET_IN_BITMAP (graph->points_to[i], 0, j, bi)
2324 fprintf (file, " %d", j);
2325 fprintf (file, " }\"]");
2327 fprintf (file, ";\n");
2330 /* Go over the edges. */
2331 fprintf (file, "\n // Edges in the constraint graph:\n");
2332 for (i = 1; i < graph->size; i++)
2334 unsigned j;
2335 bitmap_iterator bi;
2336 if (si->node_mapping[i] != i)
2337 continue;
2338 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[i], 0, j, bi)
2340 unsigned from = si->node_mapping[j];
2341 if (from < FIRST_REF_NODE)
2342 fprintf (file, "\"%s\"", get_varinfo (from)->name);
2343 else
2344 fprintf (file, "\"*%s\"", get_varinfo (from - FIRST_REF_NODE)->name);
2345 fprintf (file, " -> ");
2346 if (i < FIRST_REF_NODE)
2347 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2348 else
2349 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2350 fprintf (file, ";\n");
2354 /* Prints the tail of the dot file. */
2355 fprintf (file, "}\n");
2358 /* Perform offline variable substitution, discovering equivalence
2359 classes, and eliminating non-pointer variables. */
2361 static class scc_info *
2362 perform_var_substitution (constraint_graph_t graph)
2364 unsigned int i;
2365 unsigned int size = graph->size;
2366 scc_info *si = new scc_info (size);
2368 bitmap_obstack_initialize (&iteration_obstack);
2369 gcc_obstack_init (&equiv_class_obstack);
2370 pointer_equiv_class_table = new hash_table<equiv_class_hasher> (511);
2371 location_equiv_class_table
2372 = new hash_table<equiv_class_hasher> (511);
2373 pointer_equiv_class = 1;
2374 location_equiv_class = 1;
2376 /* Condense the nodes, which means to find SCC's, count incoming
2377 predecessors, and unite nodes in SCC's. */
2378 for (i = 1; i < FIRST_REF_NODE; i++)
2379 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2380 condense_visit (graph, si, si->node_mapping[i]);
2382 if (dump_file && (dump_flags & TDF_GRAPH))
2384 fprintf (dump_file, "\n\n// The constraint graph before var-substitution "
2385 "in dot format:\n");
2386 dump_pred_graph (si, dump_file);
2387 fprintf (dump_file, "\n\n");
2390 bitmap_clear (si->visited);
2391 /* Actually the label the nodes for pointer equivalences */
2392 for (i = 1; i < FIRST_REF_NODE; i++)
2393 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2394 label_visit (graph, si, si->node_mapping[i]);
2396 /* Calculate location equivalence labels. */
2397 for (i = 1; i < FIRST_REF_NODE; i++)
2399 bitmap pointed_by;
2400 bitmap_iterator bi;
2401 unsigned int j;
2403 if (!graph->pointed_by[i])
2404 continue;
2405 pointed_by = BITMAP_ALLOC (&iteration_obstack);
2407 /* Translate the pointed-by mapping for pointer equivalence
2408 labels. */
2409 EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
2411 bitmap_set_bit (pointed_by,
2412 graph->pointer_label[si->node_mapping[j]]);
2414 /* The original pointed_by is now dead. */
2415 BITMAP_FREE (graph->pointed_by[i]);
2417 /* Look up the location equivalence label if one exists, or make
2418 one otherwise. */
2419 equiv_class_label_t ecl;
2420 ecl = equiv_class_lookup_or_add (location_equiv_class_table, pointed_by);
2421 if (ecl->equivalence_class == 0)
2422 ecl->equivalence_class = location_equiv_class++;
2423 else
2425 if (dump_file && (dump_flags & TDF_DETAILS))
2426 fprintf (dump_file, "Found location equivalence for node %s\n",
2427 get_varinfo (i)->name);
2428 BITMAP_FREE (pointed_by);
2430 graph->loc_label[i] = ecl->equivalence_class;
2434 if (dump_file && (dump_flags & TDF_DETAILS))
2435 for (i = 1; i < FIRST_REF_NODE; i++)
2437 unsigned j = si->node_mapping[i];
2438 if (j != i)
2440 fprintf (dump_file, "%s node id %d ",
2441 bitmap_bit_p (graph->direct_nodes, i)
2442 ? "Direct" : "Indirect", i);
2443 if (i < FIRST_REF_NODE)
2444 fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2445 else
2446 fprintf (dump_file, "\"*%s\"",
2447 get_varinfo (i - FIRST_REF_NODE)->name);
2448 fprintf (dump_file, " mapped to SCC leader node id %d ", j);
2449 if (j < FIRST_REF_NODE)
2450 fprintf (dump_file, "\"%s\"\n", get_varinfo (j)->name);
2451 else
2452 fprintf (dump_file, "\"*%s\"\n",
2453 get_varinfo (j - FIRST_REF_NODE)->name);
2455 else
2457 fprintf (dump_file,
2458 "Equivalence classes for %s node id %d ",
2459 bitmap_bit_p (graph->direct_nodes, i)
2460 ? "direct" : "indirect", i);
2461 if (i < FIRST_REF_NODE)
2462 fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2463 else
2464 fprintf (dump_file, "\"*%s\"",
2465 get_varinfo (i - FIRST_REF_NODE)->name);
2466 fprintf (dump_file,
2467 ": pointer %d, location %d\n",
2468 graph->pointer_label[i], graph->loc_label[i]);
2472 /* Quickly eliminate our non-pointer variables. */
2474 for (i = 1; i < FIRST_REF_NODE; i++)
2476 unsigned int node = si->node_mapping[i];
2478 if (graph->pointer_label[node] == 0)
2480 if (dump_file && (dump_flags & TDF_DETAILS))
2481 fprintf (dump_file,
2482 "%s is a non-pointer variable, eliminating edges.\n",
2483 get_varinfo (node)->name);
2484 stats.nonpointer_vars++;
2485 clear_edges_for_node (graph, node);
2489 return si;
2492 /* Free information that was only necessary for variable
2493 substitution. */
2495 static void
2496 free_var_substitution_info (class scc_info *si)
2498 delete si;
2499 free (graph->pointer_label);
2500 free (graph->loc_label);
2501 free (graph->pointed_by);
2502 free (graph->points_to);
2503 free (graph->eq_rep);
2504 sbitmap_free (graph->direct_nodes);
2505 delete pointer_equiv_class_table;
2506 pointer_equiv_class_table = NULL;
2507 delete location_equiv_class_table;
2508 location_equiv_class_table = NULL;
2509 obstack_free (&equiv_class_obstack, NULL);
2510 bitmap_obstack_release (&iteration_obstack);
2513 /* Return an existing node that is equivalent to NODE, which has
2514 equivalence class LABEL, if one exists. Return NODE otherwise. */
2516 static unsigned int
2517 find_equivalent_node (constraint_graph_t graph,
2518 unsigned int node, unsigned int label)
2520 /* If the address version of this variable is unused, we can
2521 substitute it for anything else with the same label.
2522 Otherwise, we know the pointers are equivalent, but not the
2523 locations, and we can unite them later. */
2525 if (!bitmap_bit_p (graph->address_taken, node))
2527 gcc_checking_assert (label < graph->size);
2529 if (graph->eq_rep[label] != -1)
2531 /* Unify the two variables since we know they are equivalent. */
2532 if (unite (graph->eq_rep[label], node))
2533 unify_nodes (graph, graph->eq_rep[label], node, false);
2534 return graph->eq_rep[label];
2536 else
2538 graph->eq_rep[label] = node;
2539 graph->pe_rep[label] = node;
2542 else
2544 gcc_checking_assert (label < graph->size);
2545 graph->pe[node] = label;
2546 if (graph->pe_rep[label] == -1)
2547 graph->pe_rep[label] = node;
2550 return node;
2553 /* Unite pointer equivalent but not location equivalent nodes in
2554 GRAPH. This may only be performed once variable substitution is
2555 finished. */
2557 static void
2558 unite_pointer_equivalences (constraint_graph_t graph)
2560 unsigned int i;
2562 /* Go through the pointer equivalences and unite them to their
2563 representative, if they aren't already. */
2564 for (i = 1; i < FIRST_REF_NODE; i++)
2566 unsigned int label = graph->pe[i];
2567 if (label)
2569 int label_rep = graph->pe_rep[label];
2571 if (label_rep == -1)
2572 continue;
2574 label_rep = find (label_rep);
2575 if (label_rep >= 0 && unite (label_rep, find (i)))
2576 unify_nodes (graph, label_rep, i, false);
2581 /* Move complex constraints to the GRAPH nodes they belong to. */
2583 static void
2584 move_complex_constraints (constraint_graph_t graph)
2586 int i;
2587 constraint_t c;
2589 FOR_EACH_VEC_ELT (constraints, i, c)
2591 if (c)
2593 struct constraint_expr lhs = c->lhs;
2594 struct constraint_expr rhs = c->rhs;
2596 if (lhs.type == DEREF)
2598 insert_into_complex (graph, lhs.var, c);
2600 else if (rhs.type == DEREF)
2602 if (!(get_varinfo (lhs.var)->is_special_var))
2603 insert_into_complex (graph, rhs.var, c);
2605 else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2606 && (lhs.offset != 0 || rhs.offset != 0))
2608 insert_into_complex (graph, rhs.var, c);
2615 /* Optimize and rewrite complex constraints while performing
2616 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2617 result of perform_variable_substitution. */
2619 static void
2620 rewrite_constraints (constraint_graph_t graph,
2621 class scc_info *si)
2623 int i;
2624 constraint_t c;
2626 if (flag_checking)
2628 for (unsigned int j = 0; j < graph->size; j++)
2629 gcc_assert (find (j) == j);
2632 FOR_EACH_VEC_ELT (constraints, i, c)
2634 struct constraint_expr lhs = c->lhs;
2635 struct constraint_expr rhs = c->rhs;
2636 unsigned int lhsvar = find (lhs.var);
2637 unsigned int rhsvar = find (rhs.var);
2638 unsigned int lhsnode, rhsnode;
2639 unsigned int lhslabel, rhslabel;
2641 lhsnode = si->node_mapping[lhsvar];
2642 rhsnode = si->node_mapping[rhsvar];
2643 lhslabel = graph->pointer_label[lhsnode];
2644 rhslabel = graph->pointer_label[rhsnode];
2646 /* See if it is really a non-pointer variable, and if so, ignore
2647 the constraint. */
2648 if (lhslabel == 0)
2650 if (dump_file && (dump_flags & TDF_DETAILS))
2653 fprintf (dump_file, "%s is a non-pointer variable, "
2654 "ignoring constraint:",
2655 get_varinfo (lhs.var)->name);
2656 dump_constraint (dump_file, c);
2657 fprintf (dump_file, "\n");
2659 constraints[i] = NULL;
2660 continue;
2663 if (rhslabel == 0)
2665 if (dump_file && (dump_flags & TDF_DETAILS))
2668 fprintf (dump_file, "%s is a non-pointer variable, "
2669 "ignoring constraint:",
2670 get_varinfo (rhs.var)->name);
2671 dump_constraint (dump_file, c);
2672 fprintf (dump_file, "\n");
2674 constraints[i] = NULL;
2675 continue;
2678 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2679 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2680 c->lhs.var = lhsvar;
2681 c->rhs.var = rhsvar;
2685 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2686 part of an SCC, false otherwise. */
2688 static bool
2689 eliminate_indirect_cycles (unsigned int node)
2691 if (graph->indirect_cycles[node] != -1
2692 && !bitmap_empty_p (get_varinfo (node)->solution))
2694 unsigned int i;
2695 auto_vec<unsigned> queue;
2696 int queuepos;
2697 unsigned int to = find (graph->indirect_cycles[node]);
2698 bitmap_iterator bi;
2700 /* We can't touch the solution set and call unify_nodes
2701 at the same time, because unify_nodes is going to do
2702 bitmap unions into it. */
2704 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2706 if (find (i) == i && i != to)
2708 if (unite (to, i))
2709 queue.safe_push (i);
2713 for (queuepos = 0;
2714 queue.iterate (queuepos, &i);
2715 queuepos++)
2717 unify_nodes (graph, to, i, true);
2719 return true;
2721 return false;
2724 /* Solve the constraint graph GRAPH using our worklist solver.
2725 This is based on the PW* family of solvers from the "Efficient Field
2726 Sensitive Pointer Analysis for C" paper.
2727 It works by iterating over all the graph nodes, processing the complex
2728 constraints and propagating the copy constraints, until everything stops
2729 changed. This corresponds to steps 6-8 in the solving list given above. */
2731 static void
2732 solve_graph (constraint_graph_t graph)
2734 unsigned int size = graph->size;
2735 unsigned int i;
2736 bitmap pts;
2738 changed = BITMAP_ALLOC (NULL);
2740 /* Mark all initial non-collapsed nodes as changed. */
2741 for (i = 1; i < size; i++)
2743 varinfo_t ivi = get_varinfo (i);
2744 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2745 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2746 || graph->complex[i].length () > 0))
2747 bitmap_set_bit (changed, i);
2750 /* Allocate a bitmap to be used to store the changed bits. */
2751 pts = BITMAP_ALLOC (&pta_obstack);
2753 while (!bitmap_empty_p (changed))
2755 unsigned int i;
2756 struct topo_info *ti = init_topo_info ();
2757 stats.iterations++;
2759 bitmap_obstack_initialize (&iteration_obstack);
2761 compute_topo_order (graph, ti);
2763 while (ti->topo_order.length () != 0)
2766 i = ti->topo_order.pop ();
2768 /* If this variable is not a representative, skip it. */
2769 if (find (i) != i)
2770 continue;
2772 /* In certain indirect cycle cases, we may merge this
2773 variable to another. */
2774 if (eliminate_indirect_cycles (i) && find (i) != i)
2775 continue;
2777 /* If the node has changed, we need to process the
2778 complex constraints and outgoing edges again. */
2779 if (bitmap_clear_bit (changed, i))
2781 unsigned int j;
2782 constraint_t c;
2783 bitmap solution;
2784 vec<constraint_t> complex = graph->complex[i];
2785 varinfo_t vi = get_varinfo (i);
2786 bool solution_empty;
2788 /* Compute the changed set of solution bits. If anything
2789 is in the solution just propagate that. */
2790 if (bitmap_bit_p (vi->solution, anything_id))
2792 /* If anything is also in the old solution there is
2793 nothing to do.
2794 ??? But we shouldn't ended up with "changed" set ... */
2795 if (vi->oldsolution
2796 && bitmap_bit_p (vi->oldsolution, anything_id))
2797 continue;
2798 bitmap_copy (pts, get_varinfo (find (anything_id))->solution);
2800 else if (vi->oldsolution)
2801 bitmap_and_compl (pts, vi->solution, vi->oldsolution);
2802 else
2803 bitmap_copy (pts, vi->solution);
2805 if (bitmap_empty_p (pts))
2806 continue;
2808 if (vi->oldsolution)
2809 bitmap_ior_into (vi->oldsolution, pts);
2810 else
2812 vi->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
2813 bitmap_copy (vi->oldsolution, pts);
2816 solution = vi->solution;
2817 solution_empty = bitmap_empty_p (solution);
2819 /* Process the complex constraints */
2820 bitmap expanded_pts = NULL;
2821 FOR_EACH_VEC_ELT (complex, j, c)
2823 /* XXX: This is going to unsort the constraints in
2824 some cases, which will occasionally add duplicate
2825 constraints during unification. This does not
2826 affect correctness. */
2827 c->lhs.var = find (c->lhs.var);
2828 c->rhs.var = find (c->rhs.var);
2830 /* The only complex constraint that can change our
2831 solution to non-empty, given an empty solution,
2832 is a constraint where the lhs side is receiving
2833 some set from elsewhere. */
2834 if (!solution_empty || c->lhs.type != DEREF)
2835 do_complex_constraint (graph, c, pts, &expanded_pts);
2837 BITMAP_FREE (expanded_pts);
2839 solution_empty = bitmap_empty_p (solution);
2841 if (!solution_empty)
2843 bitmap_iterator bi;
2844 unsigned eff_escaped_id = find (escaped_id);
2846 /* Propagate solution to all successors. */
2847 unsigned to_remove = ~0U;
2848 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2849 0, j, bi)
2851 if (to_remove != ~0U)
2853 bitmap_clear_bit (graph->succs[i], to_remove);
2854 to_remove = ~0U;
2856 unsigned int to = find (j);
2857 if (to != j)
2859 /* Update the succ graph, avoiding duplicate
2860 work. */
2861 to_remove = j;
2862 if (! bitmap_set_bit (graph->succs[i], to))
2863 continue;
2864 /* We eventually end up processing 'to' twice
2865 as it is undefined whether bitmap iteration
2866 iterates over bits set during iteration.
2867 Play safe instead of doing tricks. */
2869 /* Don't try to propagate to ourselves. */
2870 if (to == i)
2871 continue;
2873 bitmap tmp = get_varinfo (to)->solution;
2874 bool flag = false;
2876 /* If we propagate from ESCAPED use ESCAPED as
2877 placeholder. */
2878 if (i == eff_escaped_id)
2879 flag = bitmap_set_bit (tmp, escaped_id);
2880 else
2881 flag = bitmap_ior_into (tmp, pts);
2883 if (flag)
2884 bitmap_set_bit (changed, to);
2886 if (to_remove != ~0U)
2887 bitmap_clear_bit (graph->succs[i], to_remove);
2891 free_topo_info (ti);
2892 bitmap_obstack_release (&iteration_obstack);
2895 BITMAP_FREE (pts);
2896 BITMAP_FREE (changed);
2897 bitmap_obstack_release (&oldpta_obstack);
2900 /* Map from trees to variable infos. */
2901 static hash_map<tree, varinfo_t> *vi_for_tree;
2904 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2906 static void
2907 insert_vi_for_tree (tree t, varinfo_t vi)
2909 gcc_assert (vi);
2910 gcc_assert (!vi_for_tree->put (t, vi));
2913 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2914 exist in the map, return NULL, otherwise, return the varinfo we found. */
2916 static varinfo_t
2917 lookup_vi_for_tree (tree t)
2919 varinfo_t *slot = vi_for_tree->get (t);
2920 if (slot == NULL)
2921 return NULL;
2923 return *slot;
2926 /* Return a printable name for DECL */
2928 static const char *
2929 alias_get_name (tree decl)
2931 const char *res = "NULL";
2932 if (dump_file)
2934 char *temp = NULL;
2935 if (TREE_CODE (decl) == SSA_NAME)
2937 res = get_name (decl);
2938 temp = xasprintf ("%s_%u", res ? res : "", SSA_NAME_VERSION (decl));
2940 else if (HAS_DECL_ASSEMBLER_NAME_P (decl)
2941 && DECL_ASSEMBLER_NAME_SET_P (decl))
2942 res = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME_RAW (decl));
2943 else if (DECL_P (decl))
2945 res = get_name (decl);
2946 if (!res)
2947 temp = xasprintf ("D.%u", DECL_UID (decl));
2950 if (temp)
2952 res = ggc_strdup (temp);
2953 free (temp);
2957 return res;
2960 /* Find the variable id for tree T in the map.
2961 If T doesn't exist in the map, create an entry for it and return it. */
2963 static varinfo_t
2964 get_vi_for_tree (tree t)
2966 varinfo_t *slot = vi_for_tree->get (t);
2967 if (slot == NULL)
2969 unsigned int id = create_variable_info_for (t, alias_get_name (t), false);
2970 return get_varinfo (id);
2973 return *slot;
2976 /* Get a scalar constraint expression for a new temporary variable. */
2978 static struct constraint_expr
2979 new_scalar_tmp_constraint_exp (const char *name, bool add_id)
2981 struct constraint_expr tmp;
2982 varinfo_t vi;
2984 vi = new_var_info (NULL_TREE, name, add_id);
2985 vi->offset = 0;
2986 vi->size = -1;
2987 vi->fullsize = -1;
2988 vi->is_full_var = 1;
2989 vi->is_reg_var = 1;
2991 tmp.var = vi->id;
2992 tmp.type = SCALAR;
2993 tmp.offset = 0;
2995 return tmp;
2998 /* Get a constraint expression vector from an SSA_VAR_P node.
2999 If address_p is true, the result will be taken its address of. */
3001 static void
3002 get_constraint_for_ssa_var (tree t, vec<ce_s> *results, bool address_p)
3004 struct constraint_expr cexpr;
3005 varinfo_t vi;
3007 /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
3008 gcc_assert (TREE_CODE (t) == SSA_NAME || DECL_P (t));
3010 if (TREE_CODE (t) == SSA_NAME
3011 && SSA_NAME_IS_DEFAULT_DEF (t))
3013 /* For parameters, get at the points-to set for the actual parm
3014 decl. */
3015 if (TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
3016 || TREE_CODE (SSA_NAME_VAR (t)) == RESULT_DECL)
3018 get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
3019 return;
3021 /* For undefined SSA names return nothing. */
3022 else if (!ssa_defined_default_def_p (t))
3024 cexpr.var = nothing_id;
3025 cexpr.type = SCALAR;
3026 cexpr.offset = 0;
3027 results->safe_push (cexpr);
3028 return;
3032 /* For global variables resort to the alias target. */
3033 if (VAR_P (t) && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
3035 varpool_node *node = varpool_node::get (t);
3036 if (node && node->alias && node->analyzed)
3038 node = node->ultimate_alias_target ();
3039 /* Canonicalize the PT uid of all aliases to the ultimate target.
3040 ??? Hopefully the set of aliases can't change in a way that
3041 changes the ultimate alias target. */
3042 gcc_assert ((! DECL_PT_UID_SET_P (node->decl)
3043 || DECL_PT_UID (node->decl) == DECL_UID (node->decl))
3044 && (! DECL_PT_UID_SET_P (t)
3045 || DECL_PT_UID (t) == DECL_UID (node->decl)));
3046 DECL_PT_UID (t) = DECL_UID (node->decl);
3047 t = node->decl;
3050 /* If this is decl may bind to NULL note that. */
3051 if (address_p
3052 && (! node || ! node->nonzero_address ()))
3054 cexpr.var = nothing_id;
3055 cexpr.type = SCALAR;
3056 cexpr.offset = 0;
3057 results->safe_push (cexpr);
3061 vi = get_vi_for_tree (t);
3062 cexpr.var = vi->id;
3063 cexpr.type = SCALAR;
3064 cexpr.offset = 0;
3066 /* If we are not taking the address of the constraint expr, add all
3067 sub-fiels of the variable as well. */
3068 if (!address_p
3069 && !vi->is_full_var)
3071 for (; vi; vi = vi_next (vi))
3073 cexpr.var = vi->id;
3074 results->safe_push (cexpr);
3076 return;
3079 results->safe_push (cexpr);
3082 /* Process constraint T, performing various simplifications and then
3083 adding it to our list of overall constraints. */
3085 static void
3086 process_constraint (constraint_t t)
3088 struct constraint_expr rhs = t->rhs;
3089 struct constraint_expr lhs = t->lhs;
3091 gcc_assert (rhs.var < varmap.length ());
3092 gcc_assert (lhs.var < varmap.length ());
3094 /* If we didn't get any useful constraint from the lhs we get
3095 &ANYTHING as fallback from get_constraint_for. Deal with
3096 it here by turning it into *ANYTHING. */
3097 if (lhs.type == ADDRESSOF
3098 && lhs.var == anything_id)
3099 lhs.type = DEREF;
3101 /* ADDRESSOF on the lhs is invalid. */
3102 gcc_assert (lhs.type != ADDRESSOF);
3104 /* We shouldn't add constraints from things that cannot have pointers.
3105 It's not completely trivial to avoid in the callers, so do it here. */
3106 if (rhs.type != ADDRESSOF
3107 && !get_varinfo (rhs.var)->may_have_pointers)
3108 return;
3110 /* Likewise adding to the solution of a non-pointer var isn't useful. */
3111 if (!get_varinfo (lhs.var)->may_have_pointers)
3112 return;
3114 /* This can happen in our IR with things like n->a = *p */
3115 if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
3117 /* Split into tmp = *rhs, *lhs = tmp */
3118 struct constraint_expr tmplhs;
3119 tmplhs = new_scalar_tmp_constraint_exp ("doubledereftmp", true);
3120 process_constraint (new_constraint (tmplhs, rhs));
3121 process_constraint (new_constraint (lhs, tmplhs));
3123 else if ((rhs.type != SCALAR || rhs.offset != 0) && lhs.type == DEREF)
3125 /* Split into tmp = &rhs, *lhs = tmp */
3126 struct constraint_expr tmplhs;
3127 tmplhs = new_scalar_tmp_constraint_exp ("derefaddrtmp", true);
3128 process_constraint (new_constraint (tmplhs, rhs));
3129 process_constraint (new_constraint (lhs, tmplhs));
3131 else
3133 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
3134 if (rhs.type == ADDRESSOF)
3135 get_varinfo (get_varinfo (rhs.var)->head)->address_taken = true;
3136 constraints.safe_push (t);
3141 /* Return the position, in bits, of FIELD_DECL from the beginning of its
3142 structure. */
3144 static HOST_WIDE_INT
3145 bitpos_of_field (const tree fdecl)
3147 if (!tree_fits_shwi_p (DECL_FIELD_OFFSET (fdecl))
3148 || !tree_fits_shwi_p (DECL_FIELD_BIT_OFFSET (fdecl)))
3149 return -1;
3151 return (tree_to_shwi (DECL_FIELD_OFFSET (fdecl)) * BITS_PER_UNIT
3152 + tree_to_shwi (DECL_FIELD_BIT_OFFSET (fdecl)));
3156 /* Get constraint expressions for offsetting PTR by OFFSET. Stores the
3157 resulting constraint expressions in *RESULTS. */
3159 static void
3160 get_constraint_for_ptr_offset (tree ptr, tree offset,
3161 vec<ce_s> *results)
3163 struct constraint_expr c;
3164 unsigned int j, n;
3165 HOST_WIDE_INT rhsoffset;
3167 /* If we do not do field-sensitive PTA adding offsets to pointers
3168 does not change the points-to solution. */
3169 if (!use_field_sensitive)
3171 get_constraint_for_rhs (ptr, results);
3172 return;
3175 /* If the offset is not a non-negative integer constant that fits
3176 in a HOST_WIDE_INT, we have to fall back to a conservative
3177 solution which includes all sub-fields of all pointed-to
3178 variables of ptr. */
3179 if (offset == NULL_TREE
3180 || TREE_CODE (offset) != INTEGER_CST)
3181 rhsoffset = UNKNOWN_OFFSET;
3182 else
3184 /* Sign-extend the offset. */
3185 offset_int soffset = offset_int::from (wi::to_wide (offset), SIGNED);
3186 if (!wi::fits_shwi_p (soffset))
3187 rhsoffset = UNKNOWN_OFFSET;
3188 else
3190 /* Make sure the bit-offset also fits. */
3191 HOST_WIDE_INT rhsunitoffset = soffset.to_shwi ();
3192 rhsoffset = rhsunitoffset * (unsigned HOST_WIDE_INT) BITS_PER_UNIT;
3193 if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
3194 rhsoffset = UNKNOWN_OFFSET;
3198 get_constraint_for_rhs (ptr, results);
3199 if (rhsoffset == 0)
3200 return;
3202 /* As we are eventually appending to the solution do not use
3203 vec::iterate here. */
3204 n = results->length ();
3205 for (j = 0; j < n; j++)
3207 varinfo_t curr;
3208 c = (*results)[j];
3209 curr = get_varinfo (c.var);
3211 if (c.type == ADDRESSOF
3212 /* If this varinfo represents a full variable just use it. */
3213 && curr->is_full_var)
3215 else if (c.type == ADDRESSOF
3216 /* If we do not know the offset add all subfields. */
3217 && rhsoffset == UNKNOWN_OFFSET)
3219 varinfo_t temp = get_varinfo (curr->head);
3222 struct constraint_expr c2;
3223 c2.var = temp->id;
3224 c2.type = ADDRESSOF;
3225 c2.offset = 0;
3226 if (c2.var != c.var)
3227 results->safe_push (c2);
3228 temp = vi_next (temp);
3230 while (temp);
3232 else if (c.type == ADDRESSOF)
3234 varinfo_t temp;
3235 unsigned HOST_WIDE_INT offset = curr->offset + rhsoffset;
3237 /* If curr->offset + rhsoffset is less than zero adjust it. */
3238 if (rhsoffset < 0
3239 && curr->offset < offset)
3240 offset = 0;
3242 /* We have to include all fields that overlap the current
3243 field shifted by rhsoffset. And we include at least
3244 the last or the first field of the variable to represent
3245 reachability of off-bound addresses, in particular &object + 1,
3246 conservatively correct. */
3247 temp = first_or_preceding_vi_for_offset (curr, offset);
3248 c.var = temp->id;
3249 c.offset = 0;
3250 temp = vi_next (temp);
3251 while (temp
3252 && temp->offset < offset + curr->size)
3254 struct constraint_expr c2;
3255 c2.var = temp->id;
3256 c2.type = ADDRESSOF;
3257 c2.offset = 0;
3258 results->safe_push (c2);
3259 temp = vi_next (temp);
3262 else if (c.type == SCALAR)
3264 gcc_assert (c.offset == 0);
3265 c.offset = rhsoffset;
3267 else
3268 /* We shouldn't get any DEREFs here. */
3269 gcc_unreachable ();
3271 (*results)[j] = c;
3276 /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
3277 If address_p is true the result will be taken its address of.
3278 If lhs_p is true then the constraint expression is assumed to be used
3279 as the lhs. */
3281 static void
3282 get_constraint_for_component_ref (tree t, vec<ce_s> *results,
3283 bool address_p, bool lhs_p)
3285 tree orig_t = t;
3286 poly_int64 bitsize = -1;
3287 poly_int64 bitmaxsize = -1;
3288 poly_int64 bitpos;
3289 bool reverse;
3290 tree forzero;
3292 /* Some people like to do cute things like take the address of
3293 &0->a.b */
3294 forzero = t;
3295 while (handled_component_p (forzero)
3296 || INDIRECT_REF_P (forzero)
3297 || TREE_CODE (forzero) == MEM_REF)
3298 forzero = TREE_OPERAND (forzero, 0);
3300 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
3302 struct constraint_expr temp;
3304 temp.offset = 0;
3305 temp.var = integer_id;
3306 temp.type = SCALAR;
3307 results->safe_push (temp);
3308 return;
3311 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize, &reverse);
3313 /* We can end up here for component references on a
3314 VIEW_CONVERT_EXPR <>(&foobar) or things like a
3315 BIT_FIELD_REF <&MEM[(void *)&b + 4B], ...>. So for
3316 symbolic constants simply give up. */
3317 if (TREE_CODE (t) == ADDR_EXPR)
3319 constraint_expr result;
3320 result.type = SCALAR;
3321 result.var = anything_id;
3322 result.offset = 0;
3323 results->safe_push (result);
3324 return;
3327 /* Avoid creating pointer-offset constraints, so handle MEM_REF
3328 offsets directly. Pretend to take the address of the base,
3329 we'll take care of adding the required subset of sub-fields below. */
3330 if (TREE_CODE (t) == MEM_REF
3331 && !integer_zerop (TREE_OPERAND (t, 0)))
3333 poly_offset_int off = mem_ref_offset (t);
3334 off <<= LOG2_BITS_PER_UNIT;
3335 off += bitpos;
3336 poly_int64 off_hwi;
3337 if (off.to_shwi (&off_hwi))
3338 bitpos = off_hwi;
3339 else
3341 bitpos = 0;
3342 bitmaxsize = -1;
3344 get_constraint_for_1 (TREE_OPERAND (t, 0), results, false, lhs_p);
3345 do_deref (results);
3347 else
3348 get_constraint_for_1 (t, results, true, lhs_p);
3350 /* Strip off nothing_id. */
3351 if (results->length () == 2)
3353 gcc_assert ((*results)[0].var == nothing_id);
3354 results->unordered_remove (0);
3356 gcc_assert (results->length () == 1);
3357 struct constraint_expr &result = results->last ();
3359 if (result.type == SCALAR
3360 && get_varinfo (result.var)->is_full_var)
3361 /* For single-field vars do not bother about the offset. */
3362 result.offset = 0;
3363 else if (result.type == SCALAR)
3365 /* In languages like C, you can access one past the end of an
3366 array. You aren't allowed to dereference it, so we can
3367 ignore this constraint. When we handle pointer subtraction,
3368 we may have to do something cute here. */
3370 if (maybe_lt (poly_uint64 (bitpos), get_varinfo (result.var)->fullsize)
3371 && maybe_ne (bitmaxsize, 0))
3373 /* It's also not true that the constraint will actually start at the
3374 right offset, it may start in some padding. We only care about
3375 setting the constraint to the first actual field it touches, so
3376 walk to find it. */
3377 struct constraint_expr cexpr = result;
3378 varinfo_t curr;
3379 results->pop ();
3380 cexpr.offset = 0;
3381 for (curr = get_varinfo (cexpr.var); curr; curr = vi_next (curr))
3383 if (ranges_maybe_overlap_p (poly_int64 (curr->offset),
3384 curr->size, bitpos, bitmaxsize))
3386 cexpr.var = curr->id;
3387 results->safe_push (cexpr);
3388 if (address_p)
3389 break;
3392 /* If we are going to take the address of this field then
3393 to be able to compute reachability correctly add at least
3394 the last field of the variable. */
3395 if (address_p && results->length () == 0)
3397 curr = get_varinfo (cexpr.var);
3398 while (curr->next != 0)
3399 curr = vi_next (curr);
3400 cexpr.var = curr->id;
3401 results->safe_push (cexpr);
3403 else if (results->length () == 0)
3404 /* Assert that we found *some* field there. The user couldn't be
3405 accessing *only* padding. */
3406 /* Still the user could access one past the end of an array
3407 embedded in a struct resulting in accessing *only* padding. */
3408 /* Or accessing only padding via type-punning to a type
3409 that has a filed just in padding space. */
3411 cexpr.type = SCALAR;
3412 cexpr.var = anything_id;
3413 cexpr.offset = 0;
3414 results->safe_push (cexpr);
3417 else if (known_eq (bitmaxsize, 0))
3419 if (dump_file && (dump_flags & TDF_DETAILS))
3420 fprintf (dump_file, "Access to zero-sized part of variable, "
3421 "ignoring\n");
3423 else
3424 if (dump_file && (dump_flags & TDF_DETAILS))
3425 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
3427 else if (result.type == DEREF)
3429 /* If we do not know exactly where the access goes say so. Note
3430 that only for non-structure accesses we know that we access
3431 at most one subfiled of any variable. */
3432 HOST_WIDE_INT const_bitpos;
3433 if (!bitpos.is_constant (&const_bitpos)
3434 || const_bitpos == -1
3435 || maybe_ne (bitsize, bitmaxsize)
3436 || AGGREGATE_TYPE_P (TREE_TYPE (orig_t))
3437 || result.offset == UNKNOWN_OFFSET)
3438 result.offset = UNKNOWN_OFFSET;
3439 else
3440 result.offset += const_bitpos;
3442 else if (result.type == ADDRESSOF)
3444 /* We can end up here for component references on constants like
3445 VIEW_CONVERT_EXPR <>({ 0, 1, 2, 3 })[i]. */
3446 result.type = SCALAR;
3447 result.var = anything_id;
3448 result.offset = 0;
3450 else
3451 gcc_unreachable ();
3455 /* Dereference the constraint expression CONS, and return the result.
3456 DEREF (ADDRESSOF) = SCALAR
3457 DEREF (SCALAR) = DEREF
3458 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
3459 This is needed so that we can handle dereferencing DEREF constraints. */
3461 static void
3462 do_deref (vec<ce_s> *constraints)
3464 struct constraint_expr *c;
3465 unsigned int i = 0;
3467 FOR_EACH_VEC_ELT (*constraints, i, c)
3469 if (c->type == SCALAR)
3470 c->type = DEREF;
3471 else if (c->type == ADDRESSOF)
3472 c->type = SCALAR;
3473 else if (c->type == DEREF)
3475 struct constraint_expr tmplhs;
3476 tmplhs = new_scalar_tmp_constraint_exp ("dereftmp", true);
3477 process_constraint (new_constraint (tmplhs, *c));
3478 c->var = tmplhs.var;
3480 else
3481 gcc_unreachable ();
3485 /* Given a tree T, return the constraint expression for taking the
3486 address of it. */
3488 static void
3489 get_constraint_for_address_of (tree t, vec<ce_s> *results)
3491 struct constraint_expr *c;
3492 unsigned int i;
3494 get_constraint_for_1 (t, results, true, true);
3496 FOR_EACH_VEC_ELT (*results, i, c)
3498 if (c->type == DEREF)
3499 c->type = SCALAR;
3500 else
3501 c->type = ADDRESSOF;
3505 /* Given a tree T, return the constraint expression for it. */
3507 static void
3508 get_constraint_for_1 (tree t, vec<ce_s> *results, bool address_p,
3509 bool lhs_p)
3511 struct constraint_expr temp;
3513 /* x = integer is all glommed to a single variable, which doesn't
3514 point to anything by itself. That is, of course, unless it is an
3515 integer constant being treated as a pointer, in which case, we
3516 will return that this is really the addressof anything. This
3517 happens below, since it will fall into the default case. The only
3518 case we know something about an integer treated like a pointer is
3519 when it is the NULL pointer, and then we just say it points to
3520 NULL.
3522 Do not do that if -fno-delete-null-pointer-checks though, because
3523 in that case *NULL does not fail, so it _should_ alias *anything.
3524 It is not worth adding a new option or renaming the existing one,
3525 since this case is relatively obscure. */
3526 if ((TREE_CODE (t) == INTEGER_CST
3527 && integer_zerop (t))
3528 /* The only valid CONSTRUCTORs in gimple with pointer typed
3529 elements are zero-initializer. But in IPA mode we also
3530 process global initializers, so verify at least. */
3531 || (TREE_CODE (t) == CONSTRUCTOR
3532 && CONSTRUCTOR_NELTS (t) == 0))
3534 if (flag_delete_null_pointer_checks)
3535 temp.var = nothing_id;
3536 else
3537 temp.var = nonlocal_id;
3538 temp.type = ADDRESSOF;
3539 temp.offset = 0;
3540 results->safe_push (temp);
3541 return;
3544 /* String constants are read-only, ideally we'd have a CONST_DECL
3545 for those. */
3546 if (TREE_CODE (t) == STRING_CST)
3548 temp.var = string_id;
3549 temp.type = SCALAR;
3550 temp.offset = 0;
3551 results->safe_push (temp);
3552 return;
3555 switch (TREE_CODE_CLASS (TREE_CODE (t)))
3557 case tcc_expression:
3559 switch (TREE_CODE (t))
3561 case ADDR_EXPR:
3562 get_constraint_for_address_of (TREE_OPERAND (t, 0), results);
3563 return;
3564 default:;
3566 break;
3568 case tcc_reference:
3570 switch (TREE_CODE (t))
3572 case MEM_REF:
3574 struct constraint_expr cs;
3575 varinfo_t vi, curr;
3576 get_constraint_for_ptr_offset (TREE_OPERAND (t, 0),
3577 TREE_OPERAND (t, 1), results);
3578 do_deref (results);
3580 /* If we are not taking the address then make sure to process
3581 all subvariables we might access. */
3582 if (address_p)
3583 return;
3585 cs = results->last ();
3586 if (cs.type == DEREF
3587 && type_can_have_subvars (TREE_TYPE (t)))
3589 /* For dereferences this means we have to defer it
3590 to solving time. */
3591 results->last ().offset = UNKNOWN_OFFSET;
3592 return;
3594 if (cs.type != SCALAR)
3595 return;
3597 vi = get_varinfo (cs.var);
3598 curr = vi_next (vi);
3599 if (!vi->is_full_var
3600 && curr)
3602 unsigned HOST_WIDE_INT size;
3603 if (tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (t))))
3604 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t)));
3605 else
3606 size = -1;
3607 for (; curr; curr = vi_next (curr))
3609 if (curr->offset - vi->offset < size)
3611 cs.var = curr->id;
3612 results->safe_push (cs);
3614 else
3615 break;
3618 return;
3620 case ARRAY_REF:
3621 case ARRAY_RANGE_REF:
3622 case COMPONENT_REF:
3623 case IMAGPART_EXPR:
3624 case REALPART_EXPR:
3625 case BIT_FIELD_REF:
3626 get_constraint_for_component_ref (t, results, address_p, lhs_p);
3627 return;
3628 case VIEW_CONVERT_EXPR:
3629 get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p,
3630 lhs_p);
3631 return;
3632 /* We are missing handling for TARGET_MEM_REF here. */
3633 default:;
3635 break;
3637 case tcc_exceptional:
3639 switch (TREE_CODE (t))
3641 case SSA_NAME:
3643 get_constraint_for_ssa_var (t, results, address_p);
3644 return;
3646 case CONSTRUCTOR:
3648 unsigned int i;
3649 tree val;
3650 auto_vec<ce_s> tmp;
3651 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
3653 struct constraint_expr *rhsp;
3654 unsigned j;
3655 get_constraint_for_1 (val, &tmp, address_p, lhs_p);
3656 FOR_EACH_VEC_ELT (tmp, j, rhsp)
3657 results->safe_push (*rhsp);
3658 tmp.truncate (0);
3660 /* We do not know whether the constructor was complete,
3661 so technically we have to add &NOTHING or &ANYTHING
3662 like we do for an empty constructor as well. */
3663 return;
3665 default:;
3667 break;
3669 case tcc_declaration:
3671 get_constraint_for_ssa_var (t, results, address_p);
3672 return;
3674 case tcc_constant:
3676 /* We cannot refer to automatic variables through constants. */
3677 temp.type = ADDRESSOF;
3678 temp.var = nonlocal_id;
3679 temp.offset = 0;
3680 results->safe_push (temp);
3681 return;
3683 default:;
3686 /* The default fallback is a constraint from anything. */
3687 temp.type = ADDRESSOF;
3688 temp.var = anything_id;
3689 temp.offset = 0;
3690 results->safe_push (temp);
3693 /* Given a gimple tree T, return the constraint expression vector for it. */
3695 static void
3696 get_constraint_for (tree t, vec<ce_s> *results)
3698 gcc_assert (results->length () == 0);
3700 get_constraint_for_1 (t, results, false, true);
3703 /* Given a gimple tree T, return the constraint expression vector for it
3704 to be used as the rhs of a constraint. */
3706 static void
3707 get_constraint_for_rhs (tree t, vec<ce_s> *results)
3709 gcc_assert (results->length () == 0);
3711 get_constraint_for_1 (t, results, false, false);
3715 /* Efficiently generates constraints from all entries in *RHSC to all
3716 entries in *LHSC. */
3718 static void
3719 process_all_all_constraints (const vec<ce_s> &lhsc,
3720 const vec<ce_s> &rhsc)
3722 struct constraint_expr *lhsp, *rhsp;
3723 unsigned i, j;
3725 if (lhsc.length () <= 1 || rhsc.length () <= 1)
3727 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3728 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
3729 process_constraint (new_constraint (*lhsp, *rhsp));
3731 else
3733 struct constraint_expr tmp;
3734 tmp = new_scalar_tmp_constraint_exp ("allalltmp", true);
3735 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
3736 process_constraint (new_constraint (tmp, *rhsp));
3737 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3738 process_constraint (new_constraint (*lhsp, tmp));
3742 /* Handle aggregate copies by expanding into copies of the respective
3743 fields of the structures. */
3745 static void
3746 do_structure_copy (tree lhsop, tree rhsop)
3748 struct constraint_expr *lhsp, *rhsp;
3749 auto_vec<ce_s> lhsc;
3750 auto_vec<ce_s> rhsc;
3751 unsigned j;
3753 get_constraint_for (lhsop, &lhsc);
3754 get_constraint_for_rhs (rhsop, &rhsc);
3755 lhsp = &lhsc[0];
3756 rhsp = &rhsc[0];
3757 if (lhsp->type == DEREF
3758 || (lhsp->type == ADDRESSOF && lhsp->var == anything_id)
3759 || rhsp->type == DEREF)
3761 if (lhsp->type == DEREF)
3763 gcc_assert (lhsc.length () == 1);
3764 lhsp->offset = UNKNOWN_OFFSET;
3766 if (rhsp->type == DEREF)
3768 gcc_assert (rhsc.length () == 1);
3769 rhsp->offset = UNKNOWN_OFFSET;
3771 process_all_all_constraints (lhsc, rhsc);
3773 else if (lhsp->type == SCALAR
3774 && (rhsp->type == SCALAR
3775 || rhsp->type == ADDRESSOF))
3777 HOST_WIDE_INT lhssize, lhsoffset;
3778 HOST_WIDE_INT rhssize, rhsoffset;
3779 bool reverse;
3780 unsigned k = 0;
3781 if (!get_ref_base_and_extent_hwi (lhsop, &lhsoffset, &lhssize, &reverse)
3782 || !get_ref_base_and_extent_hwi (rhsop, &rhsoffset, &rhssize,
3783 &reverse))
3785 process_all_all_constraints (lhsc, rhsc);
3786 return;
3788 for (j = 0; lhsc.iterate (j, &lhsp);)
3790 varinfo_t lhsv, rhsv;
3791 rhsp = &rhsc[k];
3792 lhsv = get_varinfo (lhsp->var);
3793 rhsv = get_varinfo (rhsp->var);
3794 if (lhsv->may_have_pointers
3795 && (lhsv->is_full_var
3796 || rhsv->is_full_var
3797 || ranges_overlap_p (lhsv->offset + rhsoffset, lhsv->size,
3798 rhsv->offset + lhsoffset, rhsv->size)))
3799 process_constraint (new_constraint (*lhsp, *rhsp));
3800 if (!rhsv->is_full_var
3801 && (lhsv->is_full_var
3802 || (lhsv->offset + rhsoffset + lhsv->size
3803 > rhsv->offset + lhsoffset + rhsv->size)))
3805 ++k;
3806 if (k >= rhsc.length ())
3807 break;
3809 else
3810 ++j;
3813 else
3814 gcc_unreachable ();
3817 /* Create constraints ID = { rhsc }. */
3819 static void
3820 make_constraints_to (unsigned id, const vec<ce_s> &rhsc)
3822 struct constraint_expr *c;
3823 struct constraint_expr includes;
3824 unsigned int j;
3826 includes.var = id;
3827 includes.offset = 0;
3828 includes.type = SCALAR;
3830 FOR_EACH_VEC_ELT (rhsc, j, c)
3831 process_constraint (new_constraint (includes, *c));
3834 /* Create a constraint ID = OP. */
3836 static void
3837 make_constraint_to (unsigned id, tree op)
3839 auto_vec<ce_s> rhsc;
3840 get_constraint_for_rhs (op, &rhsc);
3841 make_constraints_to (id, rhsc);
3844 /* Create a constraint ID = &FROM. */
3846 static void
3847 make_constraint_from (varinfo_t vi, int from)
3849 struct constraint_expr lhs, rhs;
3851 lhs.var = vi->id;
3852 lhs.offset = 0;
3853 lhs.type = SCALAR;
3855 rhs.var = from;
3856 rhs.offset = 0;
3857 rhs.type = ADDRESSOF;
3858 process_constraint (new_constraint (lhs, rhs));
3861 /* Create a constraint ID = FROM. */
3863 static void
3864 make_copy_constraint (varinfo_t vi, int from)
3866 struct constraint_expr lhs, rhs;
3868 lhs.var = vi->id;
3869 lhs.offset = 0;
3870 lhs.type = SCALAR;
3872 rhs.var = from;
3873 rhs.offset = 0;
3874 rhs.type = SCALAR;
3875 process_constraint (new_constraint (lhs, rhs));
3878 /* Make constraints necessary to make OP escape. */
3880 static void
3881 make_escape_constraint (tree op)
3883 make_constraint_to (escaped_id, op);
3886 /* Make constraint necessary to make all indirect references
3887 from VI escape. */
3889 static void
3890 make_indirect_escape_constraint (varinfo_t vi)
3892 struct constraint_expr lhs, rhs;
3893 /* escaped = *(VAR + UNKNOWN); */
3894 lhs.type = SCALAR;
3895 lhs.var = escaped_id;
3896 lhs.offset = 0;
3897 rhs.type = DEREF;
3898 rhs.var = vi->id;
3899 rhs.offset = UNKNOWN_OFFSET;
3900 process_constraint (new_constraint (lhs, rhs));
3903 /* Add constraints to that the solution of VI is transitively closed. */
3905 static void
3906 make_transitive_closure_constraints (varinfo_t vi)
3908 struct constraint_expr lhs, rhs;
3910 /* VAR = *(VAR + UNKNOWN); */
3911 lhs.type = SCALAR;
3912 lhs.var = vi->id;
3913 lhs.offset = 0;
3914 rhs.type = DEREF;
3915 rhs.var = vi->id;
3916 rhs.offset = UNKNOWN_OFFSET;
3917 process_constraint (new_constraint (lhs, rhs));
3920 /* Add constraints to that the solution of VI has all subvariables added. */
3922 static void
3923 make_any_offset_constraints (varinfo_t vi)
3925 struct constraint_expr lhs, rhs;
3927 /* VAR = VAR + UNKNOWN; */
3928 lhs.type = SCALAR;
3929 lhs.var = vi->id;
3930 lhs.offset = 0;
3931 rhs.type = SCALAR;
3932 rhs.var = vi->id;
3933 rhs.offset = UNKNOWN_OFFSET;
3934 process_constraint (new_constraint (lhs, rhs));
3937 /* Temporary storage for fake var decls. */
3938 struct obstack fake_var_decl_obstack;
3940 /* Build a fake VAR_DECL acting as referrer to a DECL_UID. */
3942 static tree
3943 build_fake_var_decl (tree type)
3945 tree decl = (tree) XOBNEW (&fake_var_decl_obstack, struct tree_var_decl);
3946 memset (decl, 0, sizeof (struct tree_var_decl));
3947 TREE_SET_CODE (decl, VAR_DECL);
3948 TREE_TYPE (decl) = type;
3949 DECL_UID (decl) = allocate_decl_uid ();
3950 SET_DECL_PT_UID (decl, -1);
3951 layout_decl (decl, 0);
3952 return decl;
3955 /* Create a new artificial heap variable with NAME.
3956 Return the created variable. */
3958 static varinfo_t
3959 make_heapvar (const char *name, bool add_id)
3961 varinfo_t vi;
3962 tree heapvar;
3964 heapvar = build_fake_var_decl (ptr_type_node);
3965 DECL_EXTERNAL (heapvar) = 1;
3967 vi = new_var_info (heapvar, name, add_id);
3968 vi->is_heap_var = true;
3969 vi->is_unknown_size_var = true;
3970 vi->offset = 0;
3971 vi->fullsize = ~0;
3972 vi->size = ~0;
3973 vi->is_full_var = true;
3974 insert_vi_for_tree (heapvar, vi);
3976 return vi;
3979 /* Create a new artificial heap variable with NAME and make a
3980 constraint from it to LHS. Set flags according to a tag used
3981 for tracking restrict pointers. */
3983 static varinfo_t
3984 make_constraint_from_restrict (varinfo_t lhs, const char *name, bool add_id)
3986 varinfo_t vi = make_heapvar (name, add_id);
3987 vi->is_restrict_var = 1;
3988 vi->is_global_var = 1;
3989 vi->may_have_pointers = 1;
3990 make_constraint_from (lhs, vi->id);
3991 return vi;
3994 /* Create a new artificial heap variable with NAME and make a
3995 constraint from it to LHS. Set flags according to a tag used
3996 for tracking restrict pointers and make the artificial heap
3997 point to global memory. */
3999 static varinfo_t
4000 make_constraint_from_global_restrict (varinfo_t lhs, const char *name,
4001 bool add_id)
4003 varinfo_t vi = make_constraint_from_restrict (lhs, name, add_id);
4004 make_copy_constraint (vi, nonlocal_id);
4005 return vi;
4008 /* In IPA mode there are varinfos for different aspects of reach
4009 function designator. One for the points-to set of the return
4010 value, one for the variables that are clobbered by the function,
4011 one for its uses and one for each parameter (including a single
4012 glob for remaining variadic arguments). */
4014 enum { fi_clobbers = 1, fi_uses = 2,
4015 fi_static_chain = 3, fi_result = 4, fi_parm_base = 5 };
4017 /* Get a constraint for the requested part of a function designator FI
4018 when operating in IPA mode. */
4020 static struct constraint_expr
4021 get_function_part_constraint (varinfo_t fi, unsigned part)
4023 struct constraint_expr c;
4025 gcc_assert (in_ipa_mode);
4027 if (fi->id == anything_id)
4029 /* ??? We probably should have a ANYFN special variable. */
4030 c.var = anything_id;
4031 c.offset = 0;
4032 c.type = SCALAR;
4034 else if (fi->decl && TREE_CODE (fi->decl) == FUNCTION_DECL)
4036 varinfo_t ai = first_vi_for_offset (fi, part);
4037 if (ai)
4038 c.var = ai->id;
4039 else
4040 c.var = anything_id;
4041 c.offset = 0;
4042 c.type = SCALAR;
4044 else
4046 c.var = fi->id;
4047 c.offset = part;
4048 c.type = DEREF;
4051 return c;
4054 /* Produce constraints for argument ARG of call STMT with eaf flags
4055 FLAGS. RESULTS is array holding constraints for return value.
4056 CALLESCAPE_ID is variable where call loocal escapes are added.
4057 WRITES_GLOVEL_MEMORY is true if callee may write global memory. */
4059 static void
4060 handle_call_arg (gcall *stmt, tree arg, vec<ce_s> *results, int flags,
4061 int callescape_id, bool writes_global_memory)
4063 int relevant_indirect_flags = EAF_NO_INDIRECT_CLOBBER | EAF_NO_INDIRECT_READ
4064 | EAF_NO_INDIRECT_ESCAPE;
4065 int relevant_flags = relevant_indirect_flags
4066 | EAF_NO_DIRECT_CLOBBER
4067 | EAF_NO_DIRECT_READ
4068 | EAF_NO_DIRECT_ESCAPE;
4069 if (gimple_call_lhs (stmt))
4071 relevant_flags |= EAF_NOT_RETURNED_DIRECTLY | EAF_NOT_RETURNED_INDIRECTLY;
4072 relevant_indirect_flags |= EAF_NOT_RETURNED_INDIRECTLY;
4074 /* If value is never read from it can not be returned indirectly
4075 (except through the escape solution).
4076 For all flags we get these implications right except for
4077 not_returned because we miss return functions in ipa-prop. */
4079 if (flags & EAF_NO_DIRECT_READ)
4080 flags |= EAF_NOT_RETURNED_INDIRECTLY;
4083 /* If the argument is not used we can ignore it.
4084 Similarly argument is invisile for us if it not clobbered, does not
4085 escape, is not read and can not be returned. */
4086 if ((flags & EAF_UNUSED) || ((flags & relevant_flags) == relevant_flags))
4087 return;
4089 /* Produce varinfo for direct accesses to ARG. */
4090 varinfo_t tem = new_var_info (NULL_TREE, "callarg", true);
4091 tem->is_reg_var = true;
4092 make_constraint_to (tem->id, arg);
4093 make_any_offset_constraints (tem);
4095 bool callarg_transitive = false;
4097 /* As an compile time optimization if we make no difference between
4098 direct and indirect accesses make arg transitively closed.
4099 This avoids the need to build indir arg and do everything twice. */
4100 if (((flags & EAF_NO_INDIRECT_CLOBBER) != 0)
4101 == ((flags & EAF_NO_DIRECT_CLOBBER) != 0)
4102 && (((flags & EAF_NO_INDIRECT_READ) != 0)
4103 == ((flags & EAF_NO_DIRECT_READ) != 0))
4104 && (((flags & EAF_NO_INDIRECT_ESCAPE) != 0)
4105 == ((flags & EAF_NO_DIRECT_ESCAPE) != 0))
4106 && (((flags & EAF_NOT_RETURNED_INDIRECTLY) != 0)
4107 == ((flags & EAF_NOT_RETURNED_DIRECTLY) != 0)))
4109 make_transitive_closure_constraints (tem);
4110 callarg_transitive = true;
4111 gcc_checking_assert (!(flags & EAF_NO_DIRECT_READ));
4114 /* If necessary, produce varinfo for indirect accesses to ARG. */
4115 varinfo_t indir_tem = NULL;
4116 if (!callarg_transitive
4117 && (flags & relevant_indirect_flags) != relevant_indirect_flags)
4119 struct constraint_expr lhs, rhs;
4120 indir_tem = new_var_info (NULL_TREE, "indircallarg", true);
4121 indir_tem->is_reg_var = true;
4123 /* indir_term = *tem. */
4124 lhs.type = SCALAR;
4125 lhs.var = indir_tem->id;
4126 lhs.offset = 0;
4128 rhs.type = DEREF;
4129 rhs.var = tem->id;
4130 rhs.offset = UNKNOWN_OFFSET;
4131 process_constraint (new_constraint (lhs, rhs));
4133 make_any_offset_constraints (indir_tem);
4135 /* If we do not read indirectly there is no need for transitive closure.
4136 We know there is only one level of indirection. */
4137 if (!(flags & EAF_NO_INDIRECT_READ))
4138 make_transitive_closure_constraints (indir_tem);
4139 gcc_checking_assert (!(flags & EAF_NO_DIRECT_READ));
4142 if (gimple_call_lhs (stmt))
4144 if (!(flags & EAF_NOT_RETURNED_DIRECTLY))
4146 struct constraint_expr cexpr;
4147 cexpr.var = tem->id;
4148 cexpr.type = SCALAR;
4149 cexpr.offset = 0;
4150 results->safe_push (cexpr);
4152 if (!callarg_transitive & !(flags & EAF_NOT_RETURNED_INDIRECTLY))
4154 struct constraint_expr cexpr;
4155 cexpr.var = indir_tem->id;
4156 cexpr.type = SCALAR;
4157 cexpr.offset = 0;
4158 results->safe_push (cexpr);
4162 if (!(flags & EAF_NO_DIRECT_READ))
4164 varinfo_t uses = get_call_use_vi (stmt);
4165 make_copy_constraint (uses, tem->id);
4166 if (!callarg_transitive & !(flags & EAF_NO_INDIRECT_READ))
4167 make_copy_constraint (uses, indir_tem->id);
4169 else
4170 /* To read indirectly we need to read directly. */
4171 gcc_checking_assert (flags & EAF_NO_INDIRECT_READ);
4173 if (!(flags & EAF_NO_DIRECT_CLOBBER))
4175 struct constraint_expr lhs, rhs;
4177 /* *arg = callescape. */
4178 lhs.type = DEREF;
4179 lhs.var = tem->id;
4180 lhs.offset = 0;
4182 rhs.type = SCALAR;
4183 rhs.var = callescape_id;
4184 rhs.offset = 0;
4185 process_constraint (new_constraint (lhs, rhs));
4187 /* callclobbered = arg. */
4188 make_copy_constraint (get_call_clobber_vi (stmt), tem->id);
4190 if (!callarg_transitive & !(flags & EAF_NO_INDIRECT_CLOBBER))
4192 struct constraint_expr lhs, rhs;
4194 /* *indir_arg = callescape. */
4195 lhs.type = DEREF;
4196 lhs.var = indir_tem->id;
4197 lhs.offset = 0;
4199 rhs.type = SCALAR;
4200 rhs.var = callescape_id;
4201 rhs.offset = 0;
4202 process_constraint (new_constraint (lhs, rhs));
4204 /* callclobbered = indir_arg. */
4205 make_copy_constraint (get_call_clobber_vi (stmt), indir_tem->id);
4208 if (!(flags & (EAF_NO_DIRECT_ESCAPE | EAF_NO_INDIRECT_ESCAPE)))
4210 struct constraint_expr lhs, rhs;
4212 /* callescape = arg; */
4213 lhs.var = callescape_id;
4214 lhs.offset = 0;
4215 lhs.type = SCALAR;
4217 rhs.var = tem->id;
4218 rhs.offset = 0;
4219 rhs.type = SCALAR;
4220 process_constraint (new_constraint (lhs, rhs));
4222 if (writes_global_memory)
4223 make_escape_constraint (arg);
4225 else if (!callarg_transitive & !(flags & EAF_NO_INDIRECT_ESCAPE))
4227 struct constraint_expr lhs, rhs;
4229 /* callescape = *(indir_arg + UNKNOWN); */
4230 lhs.var = callescape_id;
4231 lhs.offset = 0;
4232 lhs.type = SCALAR;
4234 rhs.var = indir_tem->id;
4235 rhs.offset = 0;
4236 rhs.type = SCALAR;
4237 process_constraint (new_constraint (lhs, rhs));
4239 if (writes_global_memory)
4240 make_indirect_escape_constraint (tem);
4244 /* Determine global memory access of call STMT and update
4245 WRITES_GLOBAL_MEMORY, READS_GLOBAL_MEMORY and USES_GLOBAL_MEMORY. */
4247 static void
4248 determine_global_memory_access (gcall *stmt,
4249 bool *writes_global_memory,
4250 bool *reads_global_memory,
4251 bool *uses_global_memory)
4253 tree callee;
4254 cgraph_node *node;
4255 modref_summary *summary;
4257 /* We need to detrmine reads to set uses. */
4258 gcc_assert (!uses_global_memory || reads_global_memory);
4260 if ((callee = gimple_call_fndecl (stmt)) != NULL_TREE
4261 && (node = cgraph_node::get (callee)) != NULL
4262 && (summary = get_modref_function_summary (node)))
4264 if (writes_global_memory && *writes_global_memory)
4265 *writes_global_memory = summary->global_memory_written;
4266 if (reads_global_memory && *reads_global_memory)
4267 *reads_global_memory = summary->global_memory_read;
4268 if (reads_global_memory && uses_global_memory
4269 && !summary->calls_interposable
4270 && !*reads_global_memory && node->binds_to_current_def_p ())
4271 *uses_global_memory = false;
4273 if ((writes_global_memory && *writes_global_memory)
4274 || (uses_global_memory && *uses_global_memory)
4275 || (reads_global_memory && *reads_global_memory))
4277 attr_fnspec fnspec = gimple_call_fnspec (stmt);
4278 if (fnspec.known_p ())
4280 if (writes_global_memory
4281 && !fnspec.global_memory_written_p ())
4282 *writes_global_memory = false;
4283 if (reads_global_memory && !fnspec.global_memory_read_p ())
4285 *reads_global_memory = false;
4286 if (uses_global_memory)
4287 *uses_global_memory = false;
4293 /* For non-IPA mode, generate constraints necessary for a call on the
4294 RHS and collect return value constraint to RESULTS to be used later in
4295 handle_lhs_call.
4297 IMPLICIT_EAF_FLAGS are added to each function argument. If
4298 WRITES_GLOBAL_MEMORY is true function is assumed to possibly write to global
4299 memory. Similar for READS_GLOBAL_MEMORY. */
4301 static void
4302 handle_rhs_call (gcall *stmt, vec<ce_s> *results,
4303 int implicit_eaf_flags,
4304 bool writes_global_memory,
4305 bool reads_global_memory)
4307 determine_global_memory_access (stmt, &writes_global_memory,
4308 &reads_global_memory,
4309 NULL);
4311 varinfo_t callescape = new_var_info (NULL_TREE, "callescape", true);
4313 /* If function can use global memory, add it to callescape
4314 and to possible return values. If not we can still use/return addresses
4315 of global symbols. */
4316 struct constraint_expr lhs, rhs;
4318 lhs.type = SCALAR;
4319 lhs.var = callescape->id;
4320 lhs.offset = 0;
4322 rhs.type = reads_global_memory ? SCALAR : ADDRESSOF;
4323 rhs.var = nonlocal_id;
4324 rhs.offset = 0;
4326 process_constraint (new_constraint (lhs, rhs));
4327 results->safe_push (rhs);
4329 varinfo_t uses = get_call_use_vi (stmt);
4330 make_copy_constraint (uses, callescape->id);
4332 for (unsigned i = 0; i < gimple_call_num_args (stmt); ++i)
4334 tree arg = gimple_call_arg (stmt, i);
4335 int flags = gimple_call_arg_flags (stmt, i);
4336 handle_call_arg (stmt, arg, results,
4337 flags | implicit_eaf_flags,
4338 callescape->id, writes_global_memory);
4341 /* The static chain escapes as well. */
4342 if (gimple_call_chain (stmt))
4343 handle_call_arg (stmt, gimple_call_chain (stmt), results,
4344 implicit_eaf_flags
4345 | gimple_call_static_chain_flags (stmt),
4346 callescape->id, writes_global_memory);
4348 /* And if we applied NRV the address of the return slot escapes as well. */
4349 if (gimple_call_return_slot_opt_p (stmt)
4350 && gimple_call_lhs (stmt) != NULL_TREE
4351 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
4353 int flags = gimple_call_retslot_flags (stmt);
4354 const int relevant_flags = EAF_NO_DIRECT_ESCAPE
4355 | EAF_NOT_RETURNED_DIRECTLY;
4357 if (!(flags & EAF_UNUSED) && (flags & relevant_flags) != relevant_flags)
4359 auto_vec<ce_s> tmpc;
4361 get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
4363 if (!(flags & EAF_NO_DIRECT_ESCAPE))
4365 make_constraints_to (callescape->id, tmpc);
4366 if (writes_global_memory)
4367 make_constraints_to (escaped_id, tmpc);
4369 if (!(flags & EAF_NOT_RETURNED_DIRECTLY))
4371 struct constraint_expr *c;
4372 unsigned i;
4373 FOR_EACH_VEC_ELT (tmpc, i, c)
4374 results->safe_push (*c);
4380 /* For non-IPA mode, generate constraints necessary for a call
4381 that returns a pointer and assigns it to LHS. This simply makes
4382 the LHS point to global and escaped variables. */
4384 static void
4385 handle_lhs_call (gcall *stmt, tree lhs, int flags, vec<ce_s> &rhsc,
4386 tree fndecl)
4388 auto_vec<ce_s> lhsc;
4390 get_constraint_for (lhs, &lhsc);
4391 /* If the store is to a global decl make sure to
4392 add proper escape constraints. */
4393 lhs = get_base_address (lhs);
4394 if (lhs
4395 && DECL_P (lhs)
4396 && is_global_var (lhs))
4398 struct constraint_expr tmpc;
4399 tmpc.var = escaped_id;
4400 tmpc.offset = 0;
4401 tmpc.type = SCALAR;
4402 lhsc.safe_push (tmpc);
4405 /* If the call returns an argument unmodified override the rhs
4406 constraints. */
4407 if (flags & ERF_RETURNS_ARG
4408 && (flags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (stmt))
4410 tree arg;
4411 rhsc.create (0);
4412 arg = gimple_call_arg (stmt, flags & ERF_RETURN_ARG_MASK);
4413 get_constraint_for (arg, &rhsc);
4414 process_all_all_constraints (lhsc, rhsc);
4415 rhsc.release ();
4417 else if (flags & ERF_NOALIAS)
4419 varinfo_t vi;
4420 struct constraint_expr tmpc;
4421 rhsc.create (0);
4422 vi = make_heapvar ("HEAP", true);
4423 /* We are marking allocated storage local, we deal with it becoming
4424 global by escaping and setting of vars_contains_escaped_heap. */
4425 DECL_EXTERNAL (vi->decl) = 0;
4426 vi->is_global_var = 0;
4427 /* If this is not a real malloc call assume the memory was
4428 initialized and thus may point to global memory. All
4429 builtin functions with the malloc attribute behave in a sane way. */
4430 if (!fndecl
4431 || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
4432 make_constraint_from (vi, nonlocal_id);
4433 tmpc.var = vi->id;
4434 tmpc.offset = 0;
4435 tmpc.type = ADDRESSOF;
4436 rhsc.safe_push (tmpc);
4437 process_all_all_constraints (lhsc, rhsc);
4438 rhsc.release ();
4440 else
4441 process_all_all_constraints (lhsc, rhsc);
4445 /* Return the varinfo for the callee of CALL. */
4447 static varinfo_t
4448 get_fi_for_callee (gcall *call)
4450 tree decl, fn = gimple_call_fn (call);
4452 if (fn && TREE_CODE (fn) == OBJ_TYPE_REF)
4453 fn = OBJ_TYPE_REF_EXPR (fn);
4455 /* If we can directly resolve the function being called, do so.
4456 Otherwise, it must be some sort of indirect expression that
4457 we should still be able to handle. */
4458 decl = gimple_call_addr_fndecl (fn);
4459 if (decl)
4460 return get_vi_for_tree (decl);
4462 /* If the function is anything other than a SSA name pointer we have no
4463 clue and should be getting ANYFN (well, ANYTHING for now). */
4464 if (!fn || TREE_CODE (fn) != SSA_NAME)
4465 return get_varinfo (anything_id);
4467 if (SSA_NAME_IS_DEFAULT_DEF (fn)
4468 && (TREE_CODE (SSA_NAME_VAR (fn)) == PARM_DECL
4469 || TREE_CODE (SSA_NAME_VAR (fn)) == RESULT_DECL))
4470 fn = SSA_NAME_VAR (fn);
4472 return get_vi_for_tree (fn);
4475 /* Create constraints for assigning call argument ARG to the incoming parameter
4476 INDEX of function FI. */
4478 static void
4479 find_func_aliases_for_call_arg (varinfo_t fi, unsigned index, tree arg)
4481 struct constraint_expr lhs;
4482 lhs = get_function_part_constraint (fi, fi_parm_base + index);
4484 auto_vec<ce_s, 2> rhsc;
4485 get_constraint_for_rhs (arg, &rhsc);
4487 unsigned j;
4488 struct constraint_expr *rhsp;
4489 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4490 process_constraint (new_constraint (lhs, *rhsp));
4493 /* Return true if FNDECL may be part of another lto partition. */
4495 static bool
4496 fndecl_maybe_in_other_partition (tree fndecl)
4498 cgraph_node *fn_node = cgraph_node::get (fndecl);
4499 if (fn_node == NULL)
4500 return true;
4502 return fn_node->in_other_partition;
4505 /* Create constraints for the builtin call T. Return true if the call
4506 was handled, otherwise false. */
4508 static bool
4509 find_func_aliases_for_builtin_call (struct function *fn, gcall *t)
4511 tree fndecl = gimple_call_fndecl (t);
4512 auto_vec<ce_s, 2> lhsc;
4513 auto_vec<ce_s, 4> rhsc;
4514 varinfo_t fi;
4516 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
4517 /* ??? All builtins that are handled here need to be handled
4518 in the alias-oracle query functions explicitly! */
4519 switch (DECL_FUNCTION_CODE (fndecl))
4521 /* All the following functions return a pointer to the same object
4522 as their first argument points to. The functions do not add
4523 to the ESCAPED solution. The functions make the first argument
4524 pointed to memory point to what the second argument pointed to
4525 memory points to. */
4526 case BUILT_IN_STRCPY:
4527 case BUILT_IN_STRNCPY:
4528 case BUILT_IN_BCOPY:
4529 case BUILT_IN_MEMCPY:
4530 case BUILT_IN_MEMMOVE:
4531 case BUILT_IN_MEMPCPY:
4532 case BUILT_IN_STPCPY:
4533 case BUILT_IN_STPNCPY:
4534 case BUILT_IN_STRCAT:
4535 case BUILT_IN_STRNCAT:
4536 case BUILT_IN_STRCPY_CHK:
4537 case BUILT_IN_STRNCPY_CHK:
4538 case BUILT_IN_MEMCPY_CHK:
4539 case BUILT_IN_MEMMOVE_CHK:
4540 case BUILT_IN_MEMPCPY_CHK:
4541 case BUILT_IN_STPCPY_CHK:
4542 case BUILT_IN_STPNCPY_CHK:
4543 case BUILT_IN_STRCAT_CHK:
4544 case BUILT_IN_STRNCAT_CHK:
4545 case BUILT_IN_TM_MEMCPY:
4546 case BUILT_IN_TM_MEMMOVE:
4548 tree res = gimple_call_lhs (t);
4549 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4550 == BUILT_IN_BCOPY ? 1 : 0));
4551 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4552 == BUILT_IN_BCOPY ? 0 : 1));
4553 if (res != NULL_TREE)
4555 get_constraint_for (res, &lhsc);
4556 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY
4557 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY
4558 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY
4559 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY_CHK
4560 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY_CHK
4561 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY_CHK)
4562 get_constraint_for_ptr_offset (dest, NULL_TREE, &rhsc);
4563 else
4564 get_constraint_for (dest, &rhsc);
4565 process_all_all_constraints (lhsc, rhsc);
4566 lhsc.truncate (0);
4567 rhsc.truncate (0);
4569 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4570 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4571 do_deref (&lhsc);
4572 do_deref (&rhsc);
4573 process_all_all_constraints (lhsc, rhsc);
4574 return true;
4576 case BUILT_IN_MEMSET:
4577 case BUILT_IN_MEMSET_CHK:
4578 case BUILT_IN_TM_MEMSET:
4580 tree res = gimple_call_lhs (t);
4581 tree dest = gimple_call_arg (t, 0);
4582 unsigned i;
4583 ce_s *lhsp;
4584 struct constraint_expr ac;
4585 if (res != NULL_TREE)
4587 get_constraint_for (res, &lhsc);
4588 get_constraint_for (dest, &rhsc);
4589 process_all_all_constraints (lhsc, rhsc);
4590 lhsc.truncate (0);
4592 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4593 do_deref (&lhsc);
4594 if (flag_delete_null_pointer_checks
4595 && integer_zerop (gimple_call_arg (t, 1)))
4597 ac.type = ADDRESSOF;
4598 ac.var = nothing_id;
4600 else
4602 ac.type = SCALAR;
4603 ac.var = integer_id;
4605 ac.offset = 0;
4606 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4607 process_constraint (new_constraint (*lhsp, ac));
4608 return true;
4610 case BUILT_IN_STACK_SAVE:
4611 case BUILT_IN_STACK_RESTORE:
4612 /* Nothing interesting happens. */
4613 return true;
4614 case BUILT_IN_ALLOCA:
4615 case BUILT_IN_ALLOCA_WITH_ALIGN:
4616 case BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX:
4618 tree ptr = gimple_call_lhs (t);
4619 if (ptr == NULL_TREE)
4620 return true;
4621 get_constraint_for (ptr, &lhsc);
4622 varinfo_t vi = make_heapvar ("HEAP", true);
4623 /* Alloca storage is never global. To exempt it from escaped
4624 handling make it a non-heap var. */
4625 DECL_EXTERNAL (vi->decl) = 0;
4626 vi->is_global_var = 0;
4627 vi->is_heap_var = 0;
4628 struct constraint_expr tmpc;
4629 tmpc.var = vi->id;
4630 tmpc.offset = 0;
4631 tmpc.type = ADDRESSOF;
4632 rhsc.safe_push (tmpc);
4633 process_all_all_constraints (lhsc, rhsc);
4634 return true;
4636 case BUILT_IN_POSIX_MEMALIGN:
4638 tree ptrptr = gimple_call_arg (t, 0);
4639 get_constraint_for (ptrptr, &lhsc);
4640 do_deref (&lhsc);
4641 varinfo_t vi = make_heapvar ("HEAP", true);
4642 /* We are marking allocated storage local, we deal with it becoming
4643 global by escaping and setting of vars_contains_escaped_heap. */
4644 DECL_EXTERNAL (vi->decl) = 0;
4645 vi->is_global_var = 0;
4646 struct constraint_expr tmpc;
4647 tmpc.var = vi->id;
4648 tmpc.offset = 0;
4649 tmpc.type = ADDRESSOF;
4650 rhsc.safe_push (tmpc);
4651 process_all_all_constraints (lhsc, rhsc);
4652 return true;
4654 case BUILT_IN_ASSUME_ALIGNED:
4656 tree res = gimple_call_lhs (t);
4657 tree dest = gimple_call_arg (t, 0);
4658 if (res != NULL_TREE)
4660 get_constraint_for (res, &lhsc);
4661 get_constraint_for (dest, &rhsc);
4662 process_all_all_constraints (lhsc, rhsc);
4664 return true;
4666 /* All the following functions do not return pointers, do not
4667 modify the points-to sets of memory reachable from their
4668 arguments and do not add to the ESCAPED solution. */
4669 case BUILT_IN_SINCOS:
4670 case BUILT_IN_SINCOSF:
4671 case BUILT_IN_SINCOSL:
4672 case BUILT_IN_FREXP:
4673 case BUILT_IN_FREXPF:
4674 case BUILT_IN_FREXPL:
4675 case BUILT_IN_GAMMA_R:
4676 case BUILT_IN_GAMMAF_R:
4677 case BUILT_IN_GAMMAL_R:
4678 case BUILT_IN_LGAMMA_R:
4679 case BUILT_IN_LGAMMAF_R:
4680 case BUILT_IN_LGAMMAL_R:
4681 case BUILT_IN_MODF:
4682 case BUILT_IN_MODFF:
4683 case BUILT_IN_MODFL:
4684 case BUILT_IN_REMQUO:
4685 case BUILT_IN_REMQUOF:
4686 case BUILT_IN_REMQUOL:
4687 case BUILT_IN_FREE:
4688 return true;
4689 case BUILT_IN_STRDUP:
4690 case BUILT_IN_STRNDUP:
4691 case BUILT_IN_REALLOC:
4692 if (gimple_call_lhs (t))
4694 auto_vec<ce_s> rhsc;
4695 handle_lhs_call (t, gimple_call_lhs (t),
4696 gimple_call_return_flags (t) | ERF_NOALIAS,
4697 rhsc, fndecl);
4698 get_constraint_for_ptr_offset (gimple_call_lhs (t),
4699 NULL_TREE, &lhsc);
4700 get_constraint_for_ptr_offset (gimple_call_arg (t, 0),
4701 NULL_TREE, &rhsc);
4702 do_deref (&lhsc);
4703 do_deref (&rhsc);
4704 process_all_all_constraints (lhsc, rhsc);
4705 lhsc.truncate (0);
4706 rhsc.truncate (0);
4707 /* For realloc the resulting pointer can be equal to the
4708 argument as well. But only doing this wouldn't be
4709 correct because with ptr == 0 realloc behaves like malloc. */
4710 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_REALLOC)
4712 get_constraint_for (gimple_call_lhs (t), &lhsc);
4713 get_constraint_for (gimple_call_arg (t, 0), &rhsc);
4714 process_all_all_constraints (lhsc, rhsc);
4716 return true;
4718 break;
4719 /* String / character search functions return a pointer into the
4720 source string or NULL. */
4721 case BUILT_IN_INDEX:
4722 case BUILT_IN_STRCHR:
4723 case BUILT_IN_STRRCHR:
4724 case BUILT_IN_MEMCHR:
4725 case BUILT_IN_STRSTR:
4726 case BUILT_IN_STRPBRK:
4727 if (gimple_call_lhs (t))
4729 tree src = gimple_call_arg (t, 0);
4730 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4731 constraint_expr nul;
4732 nul.var = nothing_id;
4733 nul.offset = 0;
4734 nul.type = ADDRESSOF;
4735 rhsc.safe_push (nul);
4736 get_constraint_for (gimple_call_lhs (t), &lhsc);
4737 process_all_all_constraints (lhsc, rhsc);
4739 return true;
4740 /* Pure functions that return something not based on any object and
4741 that use the memory pointed to by their arguments (but not
4742 transitively). */
4743 case BUILT_IN_STRCMP:
4744 case BUILT_IN_STRCMP_EQ:
4745 case BUILT_IN_STRNCMP:
4746 case BUILT_IN_STRNCMP_EQ:
4747 case BUILT_IN_STRCASECMP:
4748 case BUILT_IN_STRNCASECMP:
4749 case BUILT_IN_MEMCMP:
4750 case BUILT_IN_BCMP:
4751 case BUILT_IN_STRSPN:
4752 case BUILT_IN_STRCSPN:
4754 varinfo_t uses = get_call_use_vi (t);
4755 make_any_offset_constraints (uses);
4756 make_constraint_to (uses->id, gimple_call_arg (t, 0));
4757 make_constraint_to (uses->id, gimple_call_arg (t, 1));
4758 /* No constraints are necessary for the return value. */
4759 return true;
4761 case BUILT_IN_STRLEN:
4763 varinfo_t uses = get_call_use_vi (t);
4764 make_any_offset_constraints (uses);
4765 make_constraint_to (uses->id, gimple_call_arg (t, 0));
4766 /* No constraints are necessary for the return value. */
4767 return true;
4769 case BUILT_IN_OBJECT_SIZE:
4770 case BUILT_IN_CONSTANT_P:
4772 /* No constraints are necessary for the return value or the
4773 arguments. */
4774 return true;
4776 /* Trampolines are special - they set up passing the static
4777 frame. */
4778 case BUILT_IN_INIT_TRAMPOLINE:
4780 tree tramp = gimple_call_arg (t, 0);
4781 tree nfunc = gimple_call_arg (t, 1);
4782 tree frame = gimple_call_arg (t, 2);
4783 unsigned i;
4784 struct constraint_expr lhs, *rhsp;
4785 if (in_ipa_mode)
4787 varinfo_t nfi = NULL;
4788 gcc_assert (TREE_CODE (nfunc) == ADDR_EXPR);
4789 nfi = lookup_vi_for_tree (TREE_OPERAND (nfunc, 0));
4790 if (nfi)
4792 lhs = get_function_part_constraint (nfi, fi_static_chain);
4793 get_constraint_for (frame, &rhsc);
4794 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4795 process_constraint (new_constraint (lhs, *rhsp));
4796 rhsc.truncate (0);
4798 /* Make the frame point to the function for
4799 the trampoline adjustment call. */
4800 get_constraint_for (tramp, &lhsc);
4801 do_deref (&lhsc);
4802 get_constraint_for (nfunc, &rhsc);
4803 process_all_all_constraints (lhsc, rhsc);
4805 return true;
4808 /* Else fallthru to generic handling which will let
4809 the frame escape. */
4810 break;
4812 case BUILT_IN_ADJUST_TRAMPOLINE:
4814 tree tramp = gimple_call_arg (t, 0);
4815 tree res = gimple_call_lhs (t);
4816 if (in_ipa_mode && res)
4818 get_constraint_for (res, &lhsc);
4819 get_constraint_for (tramp, &rhsc);
4820 do_deref (&rhsc);
4821 process_all_all_constraints (lhsc, rhsc);
4823 return true;
4825 CASE_BUILT_IN_TM_STORE (1):
4826 CASE_BUILT_IN_TM_STORE (2):
4827 CASE_BUILT_IN_TM_STORE (4):
4828 CASE_BUILT_IN_TM_STORE (8):
4829 CASE_BUILT_IN_TM_STORE (FLOAT):
4830 CASE_BUILT_IN_TM_STORE (DOUBLE):
4831 CASE_BUILT_IN_TM_STORE (LDOUBLE):
4832 CASE_BUILT_IN_TM_STORE (M64):
4833 CASE_BUILT_IN_TM_STORE (M128):
4834 CASE_BUILT_IN_TM_STORE (M256):
4836 tree addr = gimple_call_arg (t, 0);
4837 tree src = gimple_call_arg (t, 1);
4839 get_constraint_for (addr, &lhsc);
4840 do_deref (&lhsc);
4841 get_constraint_for (src, &rhsc);
4842 process_all_all_constraints (lhsc, rhsc);
4843 return true;
4845 CASE_BUILT_IN_TM_LOAD (1):
4846 CASE_BUILT_IN_TM_LOAD (2):
4847 CASE_BUILT_IN_TM_LOAD (4):
4848 CASE_BUILT_IN_TM_LOAD (8):
4849 CASE_BUILT_IN_TM_LOAD (FLOAT):
4850 CASE_BUILT_IN_TM_LOAD (DOUBLE):
4851 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
4852 CASE_BUILT_IN_TM_LOAD (M64):
4853 CASE_BUILT_IN_TM_LOAD (M128):
4854 CASE_BUILT_IN_TM_LOAD (M256):
4856 tree dest = gimple_call_lhs (t);
4857 tree addr = gimple_call_arg (t, 0);
4859 get_constraint_for (dest, &lhsc);
4860 get_constraint_for (addr, &rhsc);
4861 do_deref (&rhsc);
4862 process_all_all_constraints (lhsc, rhsc);
4863 return true;
4865 /* Variadic argument handling needs to be handled in IPA
4866 mode as well. */
4867 case BUILT_IN_VA_START:
4869 tree valist = gimple_call_arg (t, 0);
4870 struct constraint_expr rhs, *lhsp;
4871 unsigned i;
4872 get_constraint_for_ptr_offset (valist, NULL_TREE, &lhsc);
4873 do_deref (&lhsc);
4874 /* The va_list gets access to pointers in variadic
4875 arguments. Which we know in the case of IPA analysis
4876 and otherwise are just all nonlocal variables. */
4877 if (in_ipa_mode)
4879 fi = lookup_vi_for_tree (fn->decl);
4880 rhs = get_function_part_constraint (fi, ~0);
4881 rhs.type = ADDRESSOF;
4883 else
4885 rhs.var = nonlocal_id;
4886 rhs.type = ADDRESSOF;
4887 rhs.offset = 0;
4889 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4890 process_constraint (new_constraint (*lhsp, rhs));
4891 /* va_list is clobbered. */
4892 make_constraint_to (get_call_clobber_vi (t)->id, valist);
4893 return true;
4895 /* va_end doesn't have any effect that matters. */
4896 case BUILT_IN_VA_END:
4897 return true;
4898 /* Alternate return. Simply give up for now. */
4899 case BUILT_IN_RETURN:
4901 fi = NULL;
4902 if (!in_ipa_mode
4903 || !(fi = get_vi_for_tree (fn->decl)))
4904 make_constraint_from (get_varinfo (escaped_id), anything_id);
4905 else if (in_ipa_mode
4906 && fi != NULL)
4908 struct constraint_expr lhs, rhs;
4909 lhs = get_function_part_constraint (fi, fi_result);
4910 rhs.var = anything_id;
4911 rhs.offset = 0;
4912 rhs.type = SCALAR;
4913 process_constraint (new_constraint (lhs, rhs));
4915 return true;
4917 case BUILT_IN_GOMP_PARALLEL:
4918 case BUILT_IN_GOACC_PARALLEL:
4920 if (in_ipa_mode)
4922 unsigned int fnpos, argpos;
4923 switch (DECL_FUNCTION_CODE (fndecl))
4925 case BUILT_IN_GOMP_PARALLEL:
4926 /* __builtin_GOMP_parallel (fn, data, num_threads, flags). */
4927 fnpos = 0;
4928 argpos = 1;
4929 break;
4930 case BUILT_IN_GOACC_PARALLEL:
4931 /* __builtin_GOACC_parallel (flags_m, fn, mapnum, hostaddrs,
4932 sizes, kinds, ...). */
4933 fnpos = 1;
4934 argpos = 3;
4935 break;
4936 default:
4937 gcc_unreachable ();
4940 tree fnarg = gimple_call_arg (t, fnpos);
4941 gcc_assert (TREE_CODE (fnarg) == ADDR_EXPR);
4942 tree fndecl = TREE_OPERAND (fnarg, 0);
4943 if (fndecl_maybe_in_other_partition (fndecl))
4944 /* Fallthru to general call handling. */
4945 break;
4947 tree arg = gimple_call_arg (t, argpos);
4949 varinfo_t fi = get_vi_for_tree (fndecl);
4950 find_func_aliases_for_call_arg (fi, 0, arg);
4951 return true;
4953 /* Else fallthru to generic call handling. */
4954 break;
4956 /* printf-style functions may have hooks to set pointers to
4957 point to somewhere into the generated string. Leave them
4958 for a later exercise... */
4959 default:
4960 /* Fallthru to general call handling. */;
4963 return false;
4966 /* Create constraints for the call T. */
4968 static void
4969 find_func_aliases_for_call (struct function *fn, gcall *t)
4971 tree fndecl = gimple_call_fndecl (t);
4972 varinfo_t fi;
4974 if (fndecl != NULL_TREE
4975 && fndecl_built_in_p (fndecl)
4976 && find_func_aliases_for_builtin_call (fn, t))
4977 return;
4979 if (gimple_call_internal_p (t, IFN_DEFERRED_INIT))
4980 return;
4982 fi = get_fi_for_callee (t);
4983 if (!in_ipa_mode
4984 || (fi->decl && fndecl && !fi->is_fn_info))
4986 auto_vec<ce_s, 16> rhsc;
4987 int flags = gimple_call_flags (t);
4989 /* Const functions can return their arguments and addresses
4990 of global memory but not of escaped memory. */
4991 if (flags & (ECF_CONST|ECF_NOVOPS))
4993 if (gimple_call_lhs (t))
4994 handle_rhs_call (t, &rhsc, implicit_const_eaf_flags, false, false);
4996 /* Pure functions can return addresses in and of memory
4997 reachable from their arguments, but they are not an escape
4998 point for reachable memory of their arguments. */
4999 else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE))
5000 handle_rhs_call (t, &rhsc, implicit_pure_eaf_flags, false, true);
5001 /* If the call is to a replaceable operator delete and results
5002 from a delete expression as opposed to a direct call to
5003 such operator, then the effects for PTA (in particular
5004 the escaping of the pointer) can be ignored. */
5005 else if (fndecl
5006 && DECL_IS_OPERATOR_DELETE_P (fndecl)
5007 && gimple_call_from_new_or_delete (t))
5009 else
5010 handle_rhs_call (t, &rhsc, 0, true, true);
5011 if (gimple_call_lhs (t))
5012 handle_lhs_call (t, gimple_call_lhs (t),
5013 gimple_call_return_flags (t), rhsc, fndecl);
5015 else
5017 auto_vec<ce_s, 2> rhsc;
5018 tree lhsop;
5019 unsigned j;
5021 /* Assign all the passed arguments to the appropriate incoming
5022 parameters of the function. */
5023 for (j = 0; j < gimple_call_num_args (t); j++)
5025 tree arg = gimple_call_arg (t, j);
5026 find_func_aliases_for_call_arg (fi, j, arg);
5029 /* If we are returning a value, assign it to the result. */
5030 lhsop = gimple_call_lhs (t);
5031 if (lhsop)
5033 auto_vec<ce_s, 2> lhsc;
5034 struct constraint_expr rhs;
5035 struct constraint_expr *lhsp;
5036 bool aggr_p = aggregate_value_p (lhsop, gimple_call_fntype (t));
5038 get_constraint_for (lhsop, &lhsc);
5039 rhs = get_function_part_constraint (fi, fi_result);
5040 if (aggr_p)
5042 auto_vec<ce_s, 2> tem;
5043 tem.quick_push (rhs);
5044 do_deref (&tem);
5045 gcc_checking_assert (tem.length () == 1);
5046 rhs = tem[0];
5048 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
5049 process_constraint (new_constraint (*lhsp, rhs));
5051 /* If we pass the result decl by reference, honor that. */
5052 if (aggr_p)
5054 struct constraint_expr lhs;
5055 struct constraint_expr *rhsp;
5057 get_constraint_for_address_of (lhsop, &rhsc);
5058 lhs = get_function_part_constraint (fi, fi_result);
5059 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5060 process_constraint (new_constraint (lhs, *rhsp));
5061 rhsc.truncate (0);
5065 /* If we use a static chain, pass it along. */
5066 if (gimple_call_chain (t))
5068 struct constraint_expr lhs;
5069 struct constraint_expr *rhsp;
5071 get_constraint_for (gimple_call_chain (t), &rhsc);
5072 lhs = get_function_part_constraint (fi, fi_static_chain);
5073 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5074 process_constraint (new_constraint (lhs, *rhsp));
5079 /* Walk statement T setting up aliasing constraints according to the
5080 references found in T. This function is the main part of the
5081 constraint builder. AI points to auxiliary alias information used
5082 when building alias sets and computing alias grouping heuristics. */
5084 static void
5085 find_func_aliases (struct function *fn, gimple *origt)
5087 gimple *t = origt;
5088 auto_vec<ce_s, 16> lhsc;
5089 auto_vec<ce_s, 16> rhsc;
5090 varinfo_t fi;
5092 /* Now build constraints expressions. */
5093 if (gimple_code (t) == GIMPLE_PHI)
5095 /* For a phi node, assign all the arguments to
5096 the result. */
5097 get_constraint_for (gimple_phi_result (t), &lhsc);
5098 for (unsigned i = 0; i < gimple_phi_num_args (t); i++)
5100 get_constraint_for_rhs (gimple_phi_arg_def (t, i), &rhsc);
5101 process_all_all_constraints (lhsc, rhsc);
5102 rhsc.truncate (0);
5105 /* In IPA mode, we need to generate constraints to pass call
5106 arguments through their calls. There are two cases,
5107 either a GIMPLE_CALL returning a value, or just a plain
5108 GIMPLE_CALL when we are not.
5110 In non-ipa mode, we need to generate constraints for each
5111 pointer passed by address. */
5112 else if (is_gimple_call (t))
5113 find_func_aliases_for_call (fn, as_a <gcall *> (t));
5115 /* Otherwise, just a regular assignment statement. Only care about
5116 operations with pointer result, others are dealt with as escape
5117 points if they have pointer operands. */
5118 else if (is_gimple_assign (t))
5120 /* Otherwise, just a regular assignment statement. */
5121 tree lhsop = gimple_assign_lhs (t);
5122 tree rhsop = (gimple_num_ops (t) == 2) ? gimple_assign_rhs1 (t) : NULL;
5124 if (rhsop && TREE_CLOBBER_P (rhsop))
5125 /* Ignore clobbers, they don't actually store anything into
5126 the LHS. */
5128 else if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
5129 do_structure_copy (lhsop, rhsop);
5130 else
5132 enum tree_code code = gimple_assign_rhs_code (t);
5134 get_constraint_for (lhsop, &lhsc);
5136 if (code == POINTER_PLUS_EXPR)
5137 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
5138 gimple_assign_rhs2 (t), &rhsc);
5139 else if (code == POINTER_DIFF_EXPR)
5140 /* The result is not a pointer (part). */
5142 else if (code == BIT_AND_EXPR
5143 && TREE_CODE (gimple_assign_rhs2 (t)) == INTEGER_CST)
5145 /* Aligning a pointer via a BIT_AND_EXPR is offsetting
5146 the pointer. Handle it by offsetting it by UNKNOWN. */
5147 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
5148 NULL_TREE, &rhsc);
5150 else if (code == TRUNC_DIV_EXPR
5151 || code == CEIL_DIV_EXPR
5152 || code == FLOOR_DIV_EXPR
5153 || code == ROUND_DIV_EXPR
5154 || code == EXACT_DIV_EXPR
5155 || code == TRUNC_MOD_EXPR
5156 || code == CEIL_MOD_EXPR
5157 || code == FLOOR_MOD_EXPR
5158 || code == ROUND_MOD_EXPR)
5159 /* Division and modulo transfer the pointer from the LHS. */
5160 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
5161 NULL_TREE, &rhsc);
5162 else if (CONVERT_EXPR_CODE_P (code)
5163 || gimple_assign_single_p (t))
5164 /* See through conversions, single RHS are handled by
5165 get_constraint_for_rhs. */
5166 get_constraint_for_rhs (rhsop, &rhsc);
5167 else if (code == COND_EXPR)
5169 /* The result is a merge of both COND_EXPR arms. */
5170 auto_vec<ce_s, 2> tmp;
5171 struct constraint_expr *rhsp;
5172 unsigned i;
5173 get_constraint_for_rhs (gimple_assign_rhs2 (t), &rhsc);
5174 get_constraint_for_rhs (gimple_assign_rhs3 (t), &tmp);
5175 FOR_EACH_VEC_ELT (tmp, i, rhsp)
5176 rhsc.safe_push (*rhsp);
5178 else if (truth_value_p (code))
5179 /* Truth value results are not pointer (parts). Or at least
5180 very unreasonable obfuscation of a part. */
5182 else
5184 /* All other operations are possibly offsetting merges. */
5185 auto_vec<ce_s, 4> tmp;
5186 struct constraint_expr *rhsp;
5187 unsigned i, j;
5188 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
5189 NULL_TREE, &rhsc);
5190 for (i = 2; i < gimple_num_ops (t); ++i)
5192 get_constraint_for_ptr_offset (gimple_op (t, i),
5193 NULL_TREE, &tmp);
5194 FOR_EACH_VEC_ELT (tmp, j, rhsp)
5195 rhsc.safe_push (*rhsp);
5196 tmp.truncate (0);
5199 process_all_all_constraints (lhsc, rhsc);
5201 /* If there is a store to a global variable the rhs escapes. */
5202 if ((lhsop = get_base_address (lhsop)) != NULL_TREE
5203 && DECL_P (lhsop))
5205 varinfo_t vi = get_vi_for_tree (lhsop);
5206 if ((! in_ipa_mode && vi->is_global_var)
5207 || vi->is_ipa_escape_point)
5208 make_escape_constraint (rhsop);
5211 /* Handle escapes through return. */
5212 else if (gimple_code (t) == GIMPLE_RETURN
5213 && gimple_return_retval (as_a <greturn *> (t)) != NULL_TREE)
5215 greturn *return_stmt = as_a <greturn *> (t);
5216 fi = NULL;
5217 if (!in_ipa_mode
5218 && SSA_VAR_P (gimple_return_retval (return_stmt)))
5220 /* We handle simple returns by post-processing the solutions. */
5223 if (!(fi = get_vi_for_tree (fn->decl)))
5224 make_escape_constraint (gimple_return_retval (return_stmt));
5225 else if (in_ipa_mode)
5227 struct constraint_expr lhs ;
5228 struct constraint_expr *rhsp;
5229 unsigned i;
5231 lhs = get_function_part_constraint (fi, fi_result);
5232 get_constraint_for_rhs (gimple_return_retval (return_stmt), &rhsc);
5233 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5234 process_constraint (new_constraint (lhs, *rhsp));
5237 /* Handle asms conservatively by adding escape constraints to everything. */
5238 else if (gasm *asm_stmt = dyn_cast <gasm *> (t))
5240 unsigned i, noutputs;
5241 const char **oconstraints;
5242 const char *constraint;
5243 bool allows_mem, allows_reg, is_inout;
5245 noutputs = gimple_asm_noutputs (asm_stmt);
5246 oconstraints = XALLOCAVEC (const char *, noutputs);
5248 for (i = 0; i < noutputs; ++i)
5250 tree link = gimple_asm_output_op (asm_stmt, i);
5251 tree op = TREE_VALUE (link);
5253 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5254 oconstraints[i] = constraint;
5255 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
5256 &allows_reg, &is_inout);
5258 /* A memory constraint makes the address of the operand escape. */
5259 if (!allows_reg && allows_mem)
5260 make_escape_constraint (build_fold_addr_expr (op));
5262 /* The asm may read global memory, so outputs may point to
5263 any global memory. */
5264 if (op)
5266 auto_vec<ce_s, 2> lhsc;
5267 struct constraint_expr rhsc, *lhsp;
5268 unsigned j;
5269 get_constraint_for (op, &lhsc);
5270 rhsc.var = nonlocal_id;
5271 rhsc.offset = 0;
5272 rhsc.type = SCALAR;
5273 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
5274 process_constraint (new_constraint (*lhsp, rhsc));
5277 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
5279 tree link = gimple_asm_input_op (asm_stmt, i);
5280 tree op = TREE_VALUE (link);
5282 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5284 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
5285 &allows_mem, &allows_reg);
5287 /* A memory constraint makes the address of the operand escape. */
5288 if (!allows_reg && allows_mem)
5289 make_escape_constraint (build_fold_addr_expr (op));
5290 /* Strictly we'd only need the constraint to ESCAPED if
5291 the asm clobbers memory, otherwise using something
5292 along the lines of per-call clobbers/uses would be enough. */
5293 else if (op)
5294 make_escape_constraint (op);
5300 /* Create a constraint adding to the clobber set of FI the memory
5301 pointed to by PTR. */
5303 static void
5304 process_ipa_clobber (varinfo_t fi, tree ptr)
5306 vec<ce_s> ptrc = vNULL;
5307 struct constraint_expr *c, lhs;
5308 unsigned i;
5309 get_constraint_for_rhs (ptr, &ptrc);
5310 lhs = get_function_part_constraint (fi, fi_clobbers);
5311 FOR_EACH_VEC_ELT (ptrc, i, c)
5312 process_constraint (new_constraint (lhs, *c));
5313 ptrc.release ();
5316 /* Walk statement T setting up clobber and use constraints according to the
5317 references found in T. This function is a main part of the
5318 IPA constraint builder. */
5320 static void
5321 find_func_clobbers (struct function *fn, gimple *origt)
5323 gimple *t = origt;
5324 auto_vec<ce_s, 16> lhsc;
5325 auto_vec<ce_s, 16> rhsc;
5326 varinfo_t fi;
5328 /* Add constraints for clobbered/used in IPA mode.
5329 We are not interested in what automatic variables are clobbered
5330 or used as we only use the information in the caller to which
5331 they do not escape. */
5332 gcc_assert (in_ipa_mode);
5334 /* If the stmt refers to memory in any way it better had a VUSE. */
5335 if (gimple_vuse (t) == NULL_TREE)
5336 return;
5338 /* We'd better have function information for the current function. */
5339 fi = lookup_vi_for_tree (fn->decl);
5340 gcc_assert (fi != NULL);
5342 /* Account for stores in assignments and calls. */
5343 if (gimple_vdef (t) != NULL_TREE
5344 && gimple_has_lhs (t))
5346 tree lhs = gimple_get_lhs (t);
5347 tree tem = lhs;
5348 while (handled_component_p (tem))
5349 tem = TREE_OPERAND (tem, 0);
5350 if ((DECL_P (tem)
5351 && !auto_var_in_fn_p (tem, fn->decl))
5352 || INDIRECT_REF_P (tem)
5353 || (TREE_CODE (tem) == MEM_REF
5354 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
5355 && auto_var_in_fn_p
5356 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), fn->decl))))
5358 struct constraint_expr lhsc, *rhsp;
5359 unsigned i;
5360 lhsc = get_function_part_constraint (fi, fi_clobbers);
5361 get_constraint_for_address_of (lhs, &rhsc);
5362 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5363 process_constraint (new_constraint (lhsc, *rhsp));
5364 rhsc.truncate (0);
5368 /* Account for uses in assigments and returns. */
5369 if (gimple_assign_single_p (t)
5370 || (gimple_code (t) == GIMPLE_RETURN
5371 && gimple_return_retval (as_a <greturn *> (t)) != NULL_TREE))
5373 tree rhs = (gimple_assign_single_p (t)
5374 ? gimple_assign_rhs1 (t)
5375 : gimple_return_retval (as_a <greturn *> (t)));
5376 tree tem = rhs;
5377 while (handled_component_p (tem))
5378 tem = TREE_OPERAND (tem, 0);
5379 if ((DECL_P (tem)
5380 && !auto_var_in_fn_p (tem, fn->decl))
5381 || INDIRECT_REF_P (tem)
5382 || (TREE_CODE (tem) == MEM_REF
5383 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
5384 && auto_var_in_fn_p
5385 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), fn->decl))))
5387 struct constraint_expr lhs, *rhsp;
5388 unsigned i;
5389 lhs = get_function_part_constraint (fi, fi_uses);
5390 get_constraint_for_address_of (rhs, &rhsc);
5391 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5392 process_constraint (new_constraint (lhs, *rhsp));
5393 rhsc.truncate (0);
5397 if (gcall *call_stmt = dyn_cast <gcall *> (t))
5399 varinfo_t cfi = NULL;
5400 tree decl = gimple_call_fndecl (t);
5401 struct constraint_expr lhs, rhs;
5402 unsigned i, j;
5404 /* For builtins we do not have separate function info. For those
5405 we do not generate escapes for we have to generate clobbers/uses. */
5406 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
5407 switch (DECL_FUNCTION_CODE (decl))
5409 /* The following functions use and clobber memory pointed to
5410 by their arguments. */
5411 case BUILT_IN_STRCPY:
5412 case BUILT_IN_STRNCPY:
5413 case BUILT_IN_BCOPY:
5414 case BUILT_IN_MEMCPY:
5415 case BUILT_IN_MEMMOVE:
5416 case BUILT_IN_MEMPCPY:
5417 case BUILT_IN_STPCPY:
5418 case BUILT_IN_STPNCPY:
5419 case BUILT_IN_STRCAT:
5420 case BUILT_IN_STRNCAT:
5421 case BUILT_IN_STRCPY_CHK:
5422 case BUILT_IN_STRNCPY_CHK:
5423 case BUILT_IN_MEMCPY_CHK:
5424 case BUILT_IN_MEMMOVE_CHK:
5425 case BUILT_IN_MEMPCPY_CHK:
5426 case BUILT_IN_STPCPY_CHK:
5427 case BUILT_IN_STPNCPY_CHK:
5428 case BUILT_IN_STRCAT_CHK:
5429 case BUILT_IN_STRNCAT_CHK:
5431 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
5432 == BUILT_IN_BCOPY ? 1 : 0));
5433 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
5434 == BUILT_IN_BCOPY ? 0 : 1));
5435 unsigned i;
5436 struct constraint_expr *rhsp, *lhsp;
5437 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
5438 lhs = get_function_part_constraint (fi, fi_clobbers);
5439 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
5440 process_constraint (new_constraint (lhs, *lhsp));
5441 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
5442 lhs = get_function_part_constraint (fi, fi_uses);
5443 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5444 process_constraint (new_constraint (lhs, *rhsp));
5445 return;
5447 /* The following function clobbers memory pointed to by
5448 its argument. */
5449 case BUILT_IN_MEMSET:
5450 case BUILT_IN_MEMSET_CHK:
5451 case BUILT_IN_POSIX_MEMALIGN:
5453 tree dest = gimple_call_arg (t, 0);
5454 unsigned i;
5455 ce_s *lhsp;
5456 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
5457 lhs = get_function_part_constraint (fi, fi_clobbers);
5458 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
5459 process_constraint (new_constraint (lhs, *lhsp));
5460 return;
5462 /* The following functions clobber their second and third
5463 arguments. */
5464 case BUILT_IN_SINCOS:
5465 case BUILT_IN_SINCOSF:
5466 case BUILT_IN_SINCOSL:
5468 process_ipa_clobber (fi, gimple_call_arg (t, 1));
5469 process_ipa_clobber (fi, gimple_call_arg (t, 2));
5470 return;
5472 /* The following functions clobber their second argument. */
5473 case BUILT_IN_FREXP:
5474 case BUILT_IN_FREXPF:
5475 case BUILT_IN_FREXPL:
5476 case BUILT_IN_LGAMMA_R:
5477 case BUILT_IN_LGAMMAF_R:
5478 case BUILT_IN_LGAMMAL_R:
5479 case BUILT_IN_GAMMA_R:
5480 case BUILT_IN_GAMMAF_R:
5481 case BUILT_IN_GAMMAL_R:
5482 case BUILT_IN_MODF:
5483 case BUILT_IN_MODFF:
5484 case BUILT_IN_MODFL:
5486 process_ipa_clobber (fi, gimple_call_arg (t, 1));
5487 return;
5489 /* The following functions clobber their third argument. */
5490 case BUILT_IN_REMQUO:
5491 case BUILT_IN_REMQUOF:
5492 case BUILT_IN_REMQUOL:
5494 process_ipa_clobber (fi, gimple_call_arg (t, 2));
5495 return;
5497 /* The following functions neither read nor clobber memory. */
5498 case BUILT_IN_ASSUME_ALIGNED:
5499 case BUILT_IN_FREE:
5500 return;
5501 /* Trampolines are of no interest to us. */
5502 case BUILT_IN_INIT_TRAMPOLINE:
5503 case BUILT_IN_ADJUST_TRAMPOLINE:
5504 return;
5505 case BUILT_IN_VA_START:
5506 case BUILT_IN_VA_END:
5507 return;
5508 case BUILT_IN_GOMP_PARALLEL:
5509 case BUILT_IN_GOACC_PARALLEL:
5511 unsigned int fnpos, argpos;
5512 unsigned int implicit_use_args[2];
5513 unsigned int num_implicit_use_args = 0;
5514 switch (DECL_FUNCTION_CODE (decl))
5516 case BUILT_IN_GOMP_PARALLEL:
5517 /* __builtin_GOMP_parallel (fn, data, num_threads, flags). */
5518 fnpos = 0;
5519 argpos = 1;
5520 break;
5521 case BUILT_IN_GOACC_PARALLEL:
5522 /* __builtin_GOACC_parallel (flags_m, fn, mapnum, hostaddrs,
5523 sizes, kinds, ...). */
5524 fnpos = 1;
5525 argpos = 3;
5526 implicit_use_args[num_implicit_use_args++] = 4;
5527 implicit_use_args[num_implicit_use_args++] = 5;
5528 break;
5529 default:
5530 gcc_unreachable ();
5533 tree fnarg = gimple_call_arg (t, fnpos);
5534 gcc_assert (TREE_CODE (fnarg) == ADDR_EXPR);
5535 tree fndecl = TREE_OPERAND (fnarg, 0);
5536 if (fndecl_maybe_in_other_partition (fndecl))
5537 /* Fallthru to general call handling. */
5538 break;
5540 varinfo_t cfi = get_vi_for_tree (fndecl);
5542 tree arg = gimple_call_arg (t, argpos);
5544 /* Parameter passed by value is used. */
5545 lhs = get_function_part_constraint (fi, fi_uses);
5546 struct constraint_expr *rhsp;
5547 get_constraint_for (arg, &rhsc);
5548 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5549 process_constraint (new_constraint (lhs, *rhsp));
5550 rhsc.truncate (0);
5552 /* Handle parameters used by the call, but not used in cfi, as
5553 implicitly used by cfi. */
5554 lhs = get_function_part_constraint (cfi, fi_uses);
5555 for (unsigned i = 0; i < num_implicit_use_args; ++i)
5557 tree arg = gimple_call_arg (t, implicit_use_args[i]);
5558 get_constraint_for (arg, &rhsc);
5559 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5560 process_constraint (new_constraint (lhs, *rhsp));
5561 rhsc.truncate (0);
5564 /* The caller clobbers what the callee does. */
5565 lhs = get_function_part_constraint (fi, fi_clobbers);
5566 rhs = get_function_part_constraint (cfi, fi_clobbers);
5567 process_constraint (new_constraint (lhs, rhs));
5569 /* The caller uses what the callee does. */
5570 lhs = get_function_part_constraint (fi, fi_uses);
5571 rhs = get_function_part_constraint (cfi, fi_uses);
5572 process_constraint (new_constraint (lhs, rhs));
5574 return;
5576 /* printf-style functions may have hooks to set pointers to
5577 point to somewhere into the generated string. Leave them
5578 for a later exercise... */
5579 default:
5580 /* Fallthru to general call handling. */;
5583 /* Parameters passed by value are used. */
5584 lhs = get_function_part_constraint (fi, fi_uses);
5585 for (i = 0; i < gimple_call_num_args (t); i++)
5587 struct constraint_expr *rhsp;
5588 tree arg = gimple_call_arg (t, i);
5590 if (TREE_CODE (arg) == SSA_NAME
5591 || is_gimple_min_invariant (arg))
5592 continue;
5594 get_constraint_for_address_of (arg, &rhsc);
5595 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5596 process_constraint (new_constraint (lhs, *rhsp));
5597 rhsc.truncate (0);
5600 /* Build constraints for propagating clobbers/uses along the
5601 callgraph edges. */
5602 cfi = get_fi_for_callee (call_stmt);
5603 if (cfi->id == anything_id)
5605 if (gimple_vdef (t))
5606 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5607 anything_id);
5608 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5609 anything_id);
5610 return;
5613 /* For callees without function info (that's external functions),
5614 ESCAPED is clobbered and used. */
5615 if (cfi->decl
5616 && TREE_CODE (cfi->decl) == FUNCTION_DECL
5617 && !cfi->is_fn_info)
5619 varinfo_t vi;
5621 if (gimple_vdef (t))
5622 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5623 escaped_id);
5624 make_copy_constraint (first_vi_for_offset (fi, fi_uses), escaped_id);
5626 /* Also honor the call statement use/clobber info. */
5627 if ((vi = lookup_call_clobber_vi (call_stmt)) != NULL)
5628 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5629 vi->id);
5630 if ((vi = lookup_call_use_vi (call_stmt)) != NULL)
5631 make_copy_constraint (first_vi_for_offset (fi, fi_uses),
5632 vi->id);
5633 return;
5636 /* Otherwise the caller clobbers and uses what the callee does.
5637 ??? This should use a new complex constraint that filters
5638 local variables of the callee. */
5639 if (gimple_vdef (t))
5641 lhs = get_function_part_constraint (fi, fi_clobbers);
5642 rhs = get_function_part_constraint (cfi, fi_clobbers);
5643 process_constraint (new_constraint (lhs, rhs));
5645 lhs = get_function_part_constraint (fi, fi_uses);
5646 rhs = get_function_part_constraint (cfi, fi_uses);
5647 process_constraint (new_constraint (lhs, rhs));
5649 else if (gimple_code (t) == GIMPLE_ASM)
5651 /* ??? Ick. We can do better. */
5652 if (gimple_vdef (t))
5653 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5654 anything_id);
5655 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5656 anything_id);
5661 /* Find the first varinfo in the same variable as START that overlaps with
5662 OFFSET. Return NULL if we can't find one. */
5664 static varinfo_t
5665 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
5667 /* If the offset is outside of the variable, bail out. */
5668 if (offset >= start->fullsize)
5669 return NULL;
5671 /* If we cannot reach offset from start, lookup the first field
5672 and start from there. */
5673 if (start->offset > offset)
5674 start = get_varinfo (start->head);
5676 while (start)
5678 /* We may not find a variable in the field list with the actual
5679 offset when we have glommed a structure to a variable.
5680 In that case, however, offset should still be within the size
5681 of the variable. */
5682 if (offset >= start->offset
5683 && (offset - start->offset) < start->size)
5684 return start;
5686 start = vi_next (start);
5689 return NULL;
5692 /* Find the first varinfo in the same variable as START that overlaps with
5693 OFFSET. If there is no such varinfo the varinfo directly preceding
5694 OFFSET is returned. */
5696 static varinfo_t
5697 first_or_preceding_vi_for_offset (varinfo_t start,
5698 unsigned HOST_WIDE_INT offset)
5700 /* If we cannot reach offset from start, lookup the first field
5701 and start from there. */
5702 if (start->offset > offset)
5703 start = get_varinfo (start->head);
5705 /* We may not find a variable in the field list with the actual
5706 offset when we have glommed a structure to a variable.
5707 In that case, however, offset should still be within the size
5708 of the variable.
5709 If we got beyond the offset we look for return the field
5710 directly preceding offset which may be the last field. */
5711 while (start->next
5712 && offset >= start->offset
5713 && !((offset - start->offset) < start->size))
5714 start = vi_next (start);
5716 return start;
5720 /* This structure is used during pushing fields onto the fieldstack
5721 to track the offset of the field, since bitpos_of_field gives it
5722 relative to its immediate containing type, and we want it relative
5723 to the ultimate containing object. */
5725 struct fieldoff
5727 /* Offset from the base of the base containing object to this field. */
5728 HOST_WIDE_INT offset;
5730 /* Size, in bits, of the field. */
5731 unsigned HOST_WIDE_INT size;
5733 unsigned has_unknown_size : 1;
5735 unsigned must_have_pointers : 1;
5737 unsigned may_have_pointers : 1;
5739 unsigned only_restrict_pointers : 1;
5741 tree restrict_pointed_type;
5743 typedef struct fieldoff fieldoff_s;
5746 /* qsort comparison function for two fieldoff's PA and PB */
5748 static int
5749 fieldoff_compare (const void *pa, const void *pb)
5751 const fieldoff_s *foa = (const fieldoff_s *)pa;
5752 const fieldoff_s *fob = (const fieldoff_s *)pb;
5753 unsigned HOST_WIDE_INT foasize, fobsize;
5755 if (foa->offset < fob->offset)
5756 return -1;
5757 else if (foa->offset > fob->offset)
5758 return 1;
5760 foasize = foa->size;
5761 fobsize = fob->size;
5762 if (foasize < fobsize)
5763 return -1;
5764 else if (foasize > fobsize)
5765 return 1;
5766 return 0;
5769 /* Sort a fieldstack according to the field offset and sizes. */
5770 static void
5771 sort_fieldstack (vec<fieldoff_s> &fieldstack)
5773 fieldstack.qsort (fieldoff_compare);
5776 /* Return true if T is a type that can have subvars. */
5778 static inline bool
5779 type_can_have_subvars (const_tree t)
5781 /* Aggregates without overlapping fields can have subvars. */
5782 return TREE_CODE (t) == RECORD_TYPE;
5785 /* Return true if V is a tree that we can have subvars for.
5786 Normally, this is any aggregate type. Also complex
5787 types which are not gimple registers can have subvars. */
5789 static inline bool
5790 var_can_have_subvars (const_tree v)
5792 /* Volatile variables should never have subvars. */
5793 if (TREE_THIS_VOLATILE (v))
5794 return false;
5796 /* Non decls or memory tags can never have subvars. */
5797 if (!DECL_P (v))
5798 return false;
5800 return type_can_have_subvars (TREE_TYPE (v));
5803 /* Return true if T is a type that does contain pointers. */
5805 static bool
5806 type_must_have_pointers (tree type)
5808 if (POINTER_TYPE_P (type))
5809 return true;
5811 if (TREE_CODE (type) == ARRAY_TYPE)
5812 return type_must_have_pointers (TREE_TYPE (type));
5814 /* A function or method can have pointers as arguments, so track
5815 those separately. */
5816 if (TREE_CODE (type) == FUNCTION_TYPE
5817 || TREE_CODE (type) == METHOD_TYPE)
5818 return true;
5820 return false;
5823 static bool
5824 field_must_have_pointers (tree t)
5826 return type_must_have_pointers (TREE_TYPE (t));
5829 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
5830 the fields of TYPE onto fieldstack, recording their offsets along
5831 the way.
5833 OFFSET is used to keep track of the offset in this entire
5834 structure, rather than just the immediately containing structure.
5835 Returns false if the caller is supposed to handle the field we
5836 recursed for. */
5838 static bool
5839 push_fields_onto_fieldstack (tree type, vec<fieldoff_s> *fieldstack,
5840 HOST_WIDE_INT offset)
5842 tree field;
5843 bool empty_p = true;
5845 if (TREE_CODE (type) != RECORD_TYPE)
5846 return false;
5848 /* If the vector of fields is growing too big, bail out early.
5849 Callers check for vec::length <= param_max_fields_for_field_sensitive, make
5850 sure this fails. */
5851 if (fieldstack->length () > (unsigned)param_max_fields_for_field_sensitive)
5852 return false;
5854 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5855 if (TREE_CODE (field) == FIELD_DECL)
5857 bool push = false;
5858 HOST_WIDE_INT foff = bitpos_of_field (field);
5859 tree field_type = TREE_TYPE (field);
5861 if (!var_can_have_subvars (field)
5862 || TREE_CODE (field_type) == QUAL_UNION_TYPE
5863 || TREE_CODE (field_type) == UNION_TYPE)
5864 push = true;
5865 else if (!push_fields_onto_fieldstack
5866 (field_type, fieldstack, offset + foff)
5867 && (DECL_SIZE (field)
5868 && !integer_zerop (DECL_SIZE (field))))
5869 /* Empty structures may have actual size, like in C++. So
5870 see if we didn't push any subfields and the size is
5871 nonzero, push the field onto the stack. */
5872 push = true;
5874 if (push)
5876 fieldoff_s *pair = NULL;
5877 bool has_unknown_size = false;
5878 bool must_have_pointers_p;
5880 if (!fieldstack->is_empty ())
5881 pair = &fieldstack->last ();
5883 /* If there isn't anything at offset zero, create sth. */
5884 if (!pair
5885 && offset + foff != 0)
5887 fieldoff_s e
5888 = {0, offset + foff, false, false, true, false, NULL_TREE};
5889 pair = fieldstack->safe_push (e);
5892 if (!DECL_SIZE (field)
5893 || !tree_fits_uhwi_p (DECL_SIZE (field)))
5894 has_unknown_size = true;
5896 /* If adjacent fields do not contain pointers merge them. */
5897 must_have_pointers_p = field_must_have_pointers (field);
5898 if (pair
5899 && !has_unknown_size
5900 && !must_have_pointers_p
5901 && !pair->must_have_pointers
5902 && !pair->has_unknown_size
5903 && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff)
5905 pair->size += tree_to_uhwi (DECL_SIZE (field));
5907 else
5909 fieldoff_s e;
5910 e.offset = offset + foff;
5911 e.has_unknown_size = has_unknown_size;
5912 if (!has_unknown_size)
5913 e.size = tree_to_uhwi (DECL_SIZE (field));
5914 else
5915 e.size = -1;
5916 e.must_have_pointers = must_have_pointers_p;
5917 e.may_have_pointers = true;
5918 e.only_restrict_pointers
5919 = (!has_unknown_size
5920 && POINTER_TYPE_P (field_type)
5921 && TYPE_RESTRICT (field_type));
5922 if (e.only_restrict_pointers)
5923 e.restrict_pointed_type = TREE_TYPE (field_type);
5924 fieldstack->safe_push (e);
5928 empty_p = false;
5931 return !empty_p;
5934 /* Count the number of arguments DECL has, and set IS_VARARGS to true
5935 if it is a varargs function. */
5937 static unsigned int
5938 count_num_arguments (tree decl, bool *is_varargs)
5940 unsigned int num = 0;
5941 tree t;
5943 /* Capture named arguments for K&R functions. They do not
5944 have a prototype and thus no TYPE_ARG_TYPES. */
5945 for (t = DECL_ARGUMENTS (decl); t; t = DECL_CHAIN (t))
5946 ++num;
5948 /* Check if the function has variadic arguments. */
5949 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl)); t; t = TREE_CHAIN (t))
5950 if (TREE_VALUE (t) == void_type_node)
5951 break;
5952 if (!t)
5953 *is_varargs = true;
5955 return num;
5958 /* Creation function node for DECL, using NAME, and return the index
5959 of the variable we've created for the function. If NONLOCAL_p, create
5960 initial constraints. */
5962 static varinfo_t
5963 create_function_info_for (tree decl, const char *name, bool add_id,
5964 bool nonlocal_p)
5966 struct function *fn = DECL_STRUCT_FUNCTION (decl);
5967 varinfo_t vi, prev_vi;
5968 tree arg;
5969 unsigned int i;
5970 bool is_varargs = false;
5971 unsigned int num_args = count_num_arguments (decl, &is_varargs);
5973 /* Create the variable info. */
5975 vi = new_var_info (decl, name, add_id);
5976 vi->offset = 0;
5977 vi->size = 1;
5978 vi->fullsize = fi_parm_base + num_args;
5979 vi->is_fn_info = 1;
5980 vi->may_have_pointers = false;
5981 if (is_varargs)
5982 vi->fullsize = ~0;
5983 insert_vi_for_tree (vi->decl, vi);
5985 prev_vi = vi;
5987 /* Create a variable for things the function clobbers and one for
5988 things the function uses. */
5990 varinfo_t clobbervi, usevi;
5991 const char *newname;
5992 char *tempname;
5994 tempname = xasprintf ("%s.clobber", name);
5995 newname = ggc_strdup (tempname);
5996 free (tempname);
5998 clobbervi = new_var_info (NULL, newname, false);
5999 clobbervi->offset = fi_clobbers;
6000 clobbervi->size = 1;
6001 clobbervi->fullsize = vi->fullsize;
6002 clobbervi->is_full_var = true;
6003 clobbervi->is_global_var = false;
6004 clobbervi->is_reg_var = true;
6006 gcc_assert (prev_vi->offset < clobbervi->offset);
6007 prev_vi->next = clobbervi->id;
6008 prev_vi = clobbervi;
6010 tempname = xasprintf ("%s.use", name);
6011 newname = ggc_strdup (tempname);
6012 free (tempname);
6014 usevi = new_var_info (NULL, newname, false);
6015 usevi->offset = fi_uses;
6016 usevi->size = 1;
6017 usevi->fullsize = vi->fullsize;
6018 usevi->is_full_var = true;
6019 usevi->is_global_var = false;
6020 usevi->is_reg_var = true;
6022 gcc_assert (prev_vi->offset < usevi->offset);
6023 prev_vi->next = usevi->id;
6024 prev_vi = usevi;
6027 /* And one for the static chain. */
6028 if (fn->static_chain_decl != NULL_TREE)
6030 varinfo_t chainvi;
6031 const char *newname;
6032 char *tempname;
6034 tempname = xasprintf ("%s.chain", name);
6035 newname = ggc_strdup (tempname);
6036 free (tempname);
6038 chainvi = new_var_info (fn->static_chain_decl, newname, false);
6039 chainvi->offset = fi_static_chain;
6040 chainvi->size = 1;
6041 chainvi->fullsize = vi->fullsize;
6042 chainvi->is_full_var = true;
6043 chainvi->is_global_var = false;
6045 insert_vi_for_tree (fn->static_chain_decl, chainvi);
6047 if (nonlocal_p
6048 && chainvi->may_have_pointers)
6049 make_constraint_from (chainvi, nonlocal_id);
6051 gcc_assert (prev_vi->offset < chainvi->offset);
6052 prev_vi->next = chainvi->id;
6053 prev_vi = chainvi;
6056 /* Create a variable for the return var. */
6057 if (DECL_RESULT (decl) != NULL
6058 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
6060 varinfo_t resultvi;
6061 const char *newname;
6062 char *tempname;
6063 tree resultdecl = decl;
6065 if (DECL_RESULT (decl))
6066 resultdecl = DECL_RESULT (decl);
6068 tempname = xasprintf ("%s.result", name);
6069 newname = ggc_strdup (tempname);
6070 free (tempname);
6072 resultvi = new_var_info (resultdecl, newname, false);
6073 resultvi->offset = fi_result;
6074 resultvi->size = 1;
6075 resultvi->fullsize = vi->fullsize;
6076 resultvi->is_full_var = true;
6077 if (DECL_RESULT (decl))
6078 resultvi->may_have_pointers = true;
6080 if (DECL_RESULT (decl))
6081 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
6083 if (nonlocal_p
6084 && DECL_RESULT (decl)
6085 && DECL_BY_REFERENCE (DECL_RESULT (decl)))
6086 make_constraint_from (resultvi, nonlocal_id);
6088 gcc_assert (prev_vi->offset < resultvi->offset);
6089 prev_vi->next = resultvi->id;
6090 prev_vi = resultvi;
6093 /* We also need to make function return values escape. Nothing
6094 escapes by returning from main though. */
6095 if (nonlocal_p
6096 && !MAIN_NAME_P (DECL_NAME (decl)))
6098 varinfo_t fi, rvi;
6099 fi = lookup_vi_for_tree (decl);
6100 rvi = first_vi_for_offset (fi, fi_result);
6101 if (rvi && rvi->offset == fi_result)
6102 make_copy_constraint (get_varinfo (escaped_id), rvi->id);
6105 /* Set up variables for each argument. */
6106 arg = DECL_ARGUMENTS (decl);
6107 for (i = 0; i < num_args; i++)
6109 varinfo_t argvi;
6110 const char *newname;
6111 char *tempname;
6112 tree argdecl = decl;
6114 if (arg)
6115 argdecl = arg;
6117 tempname = xasprintf ("%s.arg%d", name, i);
6118 newname = ggc_strdup (tempname);
6119 free (tempname);
6121 argvi = new_var_info (argdecl, newname, false);
6122 argvi->offset = fi_parm_base + i;
6123 argvi->size = 1;
6124 argvi->is_full_var = true;
6125 argvi->fullsize = vi->fullsize;
6126 if (arg)
6127 argvi->may_have_pointers = true;
6129 if (arg)
6130 insert_vi_for_tree (arg, argvi);
6132 if (nonlocal_p
6133 && argvi->may_have_pointers)
6134 make_constraint_from (argvi, nonlocal_id);
6136 gcc_assert (prev_vi->offset < argvi->offset);
6137 prev_vi->next = argvi->id;
6138 prev_vi = argvi;
6139 if (arg)
6140 arg = DECL_CHAIN (arg);
6143 /* Add one representative for all further args. */
6144 if (is_varargs)
6146 varinfo_t argvi;
6147 const char *newname;
6148 char *tempname;
6149 tree decl;
6151 tempname = xasprintf ("%s.varargs", name);
6152 newname = ggc_strdup (tempname);
6153 free (tempname);
6155 /* We need sth that can be pointed to for va_start. */
6156 decl = build_fake_var_decl (ptr_type_node);
6158 argvi = new_var_info (decl, newname, false);
6159 argvi->offset = fi_parm_base + num_args;
6160 argvi->size = ~0;
6161 argvi->is_full_var = true;
6162 argvi->is_heap_var = true;
6163 argvi->fullsize = vi->fullsize;
6165 if (nonlocal_p
6166 && argvi->may_have_pointers)
6167 make_constraint_from (argvi, nonlocal_id);
6169 gcc_assert (prev_vi->offset < argvi->offset);
6170 prev_vi->next = argvi->id;
6173 return vi;
6177 /* Return true if FIELDSTACK contains fields that overlap.
6178 FIELDSTACK is assumed to be sorted by offset. */
6180 static bool
6181 check_for_overlaps (const vec<fieldoff_s> &fieldstack)
6183 fieldoff_s *fo = NULL;
6184 unsigned int i;
6185 HOST_WIDE_INT lastoffset = -1;
6187 FOR_EACH_VEC_ELT (fieldstack, i, fo)
6189 if (fo->offset == lastoffset)
6190 return true;
6191 lastoffset = fo->offset;
6193 return false;
6196 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
6197 This will also create any varinfo structures necessary for fields
6198 of DECL. DECL is a function parameter if HANDLE_PARAM is set.
6199 HANDLED_STRUCT_TYPE is used to register struct types reached by following
6200 restrict pointers. This is needed to prevent infinite recursion.
6201 If ADD_RESTRICT, pretend that the pointer NAME is restrict even if DECL
6202 does not advertise it. */
6204 static varinfo_t
6205 create_variable_info_for_1 (tree decl, const char *name, bool add_id,
6206 bool handle_param, bitmap handled_struct_type,
6207 bool add_restrict = false)
6209 varinfo_t vi, newvi;
6210 tree decl_type = TREE_TYPE (decl);
6211 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type);
6212 auto_vec<fieldoff_s> fieldstack;
6213 fieldoff_s *fo;
6214 unsigned int i;
6216 if (!declsize
6217 || !tree_fits_uhwi_p (declsize))
6219 vi = new_var_info (decl, name, add_id);
6220 vi->offset = 0;
6221 vi->size = ~0;
6222 vi->fullsize = ~0;
6223 vi->is_unknown_size_var = true;
6224 vi->is_full_var = true;
6225 vi->may_have_pointers = true;
6226 return vi;
6229 /* Collect field information. */
6230 if (use_field_sensitive
6231 && var_can_have_subvars (decl)
6232 /* ??? Force us to not use subfields for globals in IPA mode.
6233 Else we'd have to parse arbitrary initializers. */
6234 && !(in_ipa_mode
6235 && is_global_var (decl)))
6237 fieldoff_s *fo = NULL;
6238 bool notokay = false;
6239 unsigned int i;
6241 push_fields_onto_fieldstack (decl_type, &fieldstack, 0);
6243 for (i = 0; !notokay && fieldstack.iterate (i, &fo); i++)
6244 if (fo->has_unknown_size
6245 || fo->offset < 0)
6247 notokay = true;
6248 break;
6251 /* We can't sort them if we have a field with a variable sized type,
6252 which will make notokay = true. In that case, we are going to return
6253 without creating varinfos for the fields anyway, so sorting them is a
6254 waste to boot. */
6255 if (!notokay)
6257 sort_fieldstack (fieldstack);
6258 /* Due to some C++ FE issues, like PR 22488, we might end up
6259 what appear to be overlapping fields even though they,
6260 in reality, do not overlap. Until the C++ FE is fixed,
6261 we will simply disable field-sensitivity for these cases. */
6262 notokay = check_for_overlaps (fieldstack);
6265 if (notokay)
6266 fieldstack.release ();
6269 /* If we didn't end up collecting sub-variables create a full
6270 variable for the decl. */
6271 if (fieldstack.length () == 0
6272 || fieldstack.length () > (unsigned)param_max_fields_for_field_sensitive)
6274 vi = new_var_info (decl, name, add_id);
6275 vi->offset = 0;
6276 vi->may_have_pointers = true;
6277 vi->fullsize = tree_to_uhwi (declsize);
6278 vi->size = vi->fullsize;
6279 vi->is_full_var = true;
6280 if (POINTER_TYPE_P (decl_type)
6281 && (TYPE_RESTRICT (decl_type) || add_restrict))
6282 vi->only_restrict_pointers = 1;
6283 if (vi->only_restrict_pointers
6284 && !type_contains_placeholder_p (TREE_TYPE (decl_type))
6285 && handle_param
6286 && !bitmap_bit_p (handled_struct_type,
6287 TYPE_UID (TREE_TYPE (decl_type))))
6289 varinfo_t rvi;
6290 tree heapvar = build_fake_var_decl (TREE_TYPE (decl_type));
6291 DECL_EXTERNAL (heapvar) = 1;
6292 if (var_can_have_subvars (heapvar))
6293 bitmap_set_bit (handled_struct_type,
6294 TYPE_UID (TREE_TYPE (decl_type)));
6295 rvi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS", true,
6296 true, handled_struct_type);
6297 if (var_can_have_subvars (heapvar))
6298 bitmap_clear_bit (handled_struct_type,
6299 TYPE_UID (TREE_TYPE (decl_type)));
6300 rvi->is_restrict_var = 1;
6301 insert_vi_for_tree (heapvar, rvi);
6302 make_constraint_from (vi, rvi->id);
6303 make_param_constraints (rvi);
6305 fieldstack.release ();
6306 return vi;
6309 vi = new_var_info (decl, name, add_id);
6310 vi->fullsize = tree_to_uhwi (declsize);
6311 if (fieldstack.length () == 1)
6312 vi->is_full_var = true;
6313 for (i = 0, newvi = vi;
6314 fieldstack.iterate (i, &fo);
6315 ++i, newvi = vi_next (newvi))
6317 const char *newname = NULL;
6318 char *tempname;
6320 if (dump_file)
6322 if (fieldstack.length () != 1)
6324 tempname
6325 = xasprintf ("%s." HOST_WIDE_INT_PRINT_DEC
6326 "+" HOST_WIDE_INT_PRINT_DEC, name,
6327 fo->offset, fo->size);
6328 newname = ggc_strdup (tempname);
6329 free (tempname);
6332 else
6333 newname = "NULL";
6335 if (newname)
6336 newvi->name = newname;
6337 newvi->offset = fo->offset;
6338 newvi->size = fo->size;
6339 newvi->fullsize = vi->fullsize;
6340 newvi->may_have_pointers = fo->may_have_pointers;
6341 newvi->only_restrict_pointers = fo->only_restrict_pointers;
6342 if (handle_param
6343 && newvi->only_restrict_pointers
6344 && !type_contains_placeholder_p (fo->restrict_pointed_type)
6345 && !bitmap_bit_p (handled_struct_type,
6346 TYPE_UID (fo->restrict_pointed_type)))
6348 varinfo_t rvi;
6349 tree heapvar = build_fake_var_decl (fo->restrict_pointed_type);
6350 DECL_EXTERNAL (heapvar) = 1;
6351 if (var_can_have_subvars (heapvar))
6352 bitmap_set_bit (handled_struct_type,
6353 TYPE_UID (fo->restrict_pointed_type));
6354 rvi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS", true,
6355 true, handled_struct_type);
6356 if (var_can_have_subvars (heapvar))
6357 bitmap_clear_bit (handled_struct_type,
6358 TYPE_UID (fo->restrict_pointed_type));
6359 rvi->is_restrict_var = 1;
6360 insert_vi_for_tree (heapvar, rvi);
6361 make_constraint_from (newvi, rvi->id);
6362 make_param_constraints (rvi);
6364 if (i + 1 < fieldstack.length ())
6366 varinfo_t tem = new_var_info (decl, name, false);
6367 newvi->next = tem->id;
6368 tem->head = vi->id;
6372 return vi;
6375 static unsigned int
6376 create_variable_info_for (tree decl, const char *name, bool add_id)
6378 /* First see if we are dealing with an ifunc resolver call and
6379 assiociate that with a call to the resolver function result. */
6380 cgraph_node *node;
6381 if (in_ipa_mode
6382 && TREE_CODE (decl) == FUNCTION_DECL
6383 && (node = cgraph_node::get (decl))
6384 && node->ifunc_resolver)
6386 varinfo_t fi = get_vi_for_tree (node->get_alias_target ()->decl);
6387 constraint_expr rhs
6388 = get_function_part_constraint (fi, fi_result);
6389 fi = new_var_info (NULL_TREE, "ifuncres", true);
6390 fi->is_reg_var = true;
6391 constraint_expr lhs;
6392 lhs.type = SCALAR;
6393 lhs.var = fi->id;
6394 lhs.offset = 0;
6395 process_constraint (new_constraint (lhs, rhs));
6396 insert_vi_for_tree (decl, fi);
6397 return fi->id;
6400 varinfo_t vi = create_variable_info_for_1 (decl, name, add_id, false, NULL);
6401 unsigned int id = vi->id;
6403 insert_vi_for_tree (decl, vi);
6405 if (!VAR_P (decl))
6406 return id;
6408 /* Create initial constraints for globals. */
6409 for (; vi; vi = vi_next (vi))
6411 if (!vi->may_have_pointers
6412 || !vi->is_global_var)
6413 continue;
6415 /* Mark global restrict qualified pointers. */
6416 if ((POINTER_TYPE_P (TREE_TYPE (decl))
6417 && TYPE_RESTRICT (TREE_TYPE (decl)))
6418 || vi->only_restrict_pointers)
6420 varinfo_t rvi
6421 = make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT",
6422 true);
6423 /* ??? For now exclude reads from globals as restrict sources
6424 if those are not (indirectly) from incoming parameters. */
6425 rvi->is_restrict_var = false;
6426 continue;
6429 /* In non-IPA mode the initializer from nonlocal is all we need. */
6430 if (!in_ipa_mode
6431 || DECL_HARD_REGISTER (decl))
6432 make_copy_constraint (vi, nonlocal_id);
6434 /* In IPA mode parse the initializer and generate proper constraints
6435 for it. */
6436 else
6438 varpool_node *vnode = varpool_node::get (decl);
6440 /* For escaped variables initialize them from nonlocal. */
6441 if (!vnode->all_refs_explicit_p ())
6442 make_copy_constraint (vi, nonlocal_id);
6444 /* If this is a global variable with an initializer and we are in
6445 IPA mode generate constraints for it. */
6446 ipa_ref *ref;
6447 for (unsigned idx = 0; vnode->iterate_reference (idx, ref); ++idx)
6449 auto_vec<ce_s> rhsc;
6450 struct constraint_expr lhs, *rhsp;
6451 unsigned i;
6452 get_constraint_for_address_of (ref->referred->decl, &rhsc);
6453 lhs.var = vi->id;
6454 lhs.offset = 0;
6455 lhs.type = SCALAR;
6456 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
6457 process_constraint (new_constraint (lhs, *rhsp));
6458 /* If this is a variable that escapes from the unit
6459 the initializer escapes as well. */
6460 if (!vnode->all_refs_explicit_p ())
6462 lhs.var = escaped_id;
6463 lhs.offset = 0;
6464 lhs.type = SCALAR;
6465 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
6466 process_constraint (new_constraint (lhs, *rhsp));
6472 return id;
6475 /* Print out the points-to solution for VAR to FILE. */
6477 static void
6478 dump_solution_for_var (FILE *file, unsigned int var)
6480 varinfo_t vi = get_varinfo (var);
6481 unsigned int i;
6482 bitmap_iterator bi;
6484 /* Dump the solution for unified vars anyway, this avoids difficulties
6485 in scanning dumps in the testsuite. */
6486 fprintf (file, "%s = { ", vi->name);
6487 vi = get_varinfo (find (var));
6488 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
6489 fprintf (file, "%s ", get_varinfo (i)->name);
6490 fprintf (file, "}");
6492 /* But note when the variable was unified. */
6493 if (vi->id != var)
6494 fprintf (file, " same as %s", vi->name);
6496 fprintf (file, "\n");
6499 /* Print the points-to solution for VAR to stderr. */
6501 DEBUG_FUNCTION void
6502 debug_solution_for_var (unsigned int var)
6504 dump_solution_for_var (stderr, var);
6507 /* Register the constraints for function parameter related VI. */
6509 static void
6510 make_param_constraints (varinfo_t vi)
6512 for (; vi; vi = vi_next (vi))
6514 if (vi->only_restrict_pointers)
6516 else if (vi->may_have_pointers)
6517 make_constraint_from (vi, nonlocal_id);
6519 if (vi->is_full_var)
6520 break;
6524 /* Create varinfo structures for all of the variables in the
6525 function for intraprocedural mode. */
6527 static void
6528 intra_create_variable_infos (struct function *fn)
6530 tree t;
6531 bitmap handled_struct_type = NULL;
6532 bool this_parm_in_ctor = DECL_CXX_CONSTRUCTOR_P (fn->decl);
6534 /* For each incoming pointer argument arg, create the constraint ARG
6535 = NONLOCAL or a dummy variable if it is a restrict qualified
6536 passed-by-reference argument. */
6537 for (t = DECL_ARGUMENTS (fn->decl); t; t = DECL_CHAIN (t))
6539 if (handled_struct_type == NULL)
6540 handled_struct_type = BITMAP_ALLOC (NULL);
6542 varinfo_t p
6543 = create_variable_info_for_1 (t, alias_get_name (t), false, true,
6544 handled_struct_type, this_parm_in_ctor);
6545 insert_vi_for_tree (t, p);
6547 make_param_constraints (p);
6549 this_parm_in_ctor = false;
6552 if (handled_struct_type != NULL)
6553 BITMAP_FREE (handled_struct_type);
6555 /* Add a constraint for a result decl that is passed by reference. */
6556 if (DECL_RESULT (fn->decl)
6557 && DECL_BY_REFERENCE (DECL_RESULT (fn->decl)))
6559 varinfo_t p, result_vi = get_vi_for_tree (DECL_RESULT (fn->decl));
6561 for (p = result_vi; p; p = vi_next (p))
6562 make_constraint_from (p, nonlocal_id);
6565 /* Add a constraint for the incoming static chain parameter. */
6566 if (fn->static_chain_decl != NULL_TREE)
6568 varinfo_t p, chain_vi = get_vi_for_tree (fn->static_chain_decl);
6570 for (p = chain_vi; p; p = vi_next (p))
6571 make_constraint_from (p, nonlocal_id);
6575 /* Structure used to put solution bitmaps in a hashtable so they can
6576 be shared among variables with the same points-to set. */
6578 typedef struct shared_bitmap_info
6580 bitmap pt_vars;
6581 hashval_t hashcode;
6582 } *shared_bitmap_info_t;
6583 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
6585 /* Shared_bitmap hashtable helpers. */
6587 struct shared_bitmap_hasher : free_ptr_hash <shared_bitmap_info>
6589 static inline hashval_t hash (const shared_bitmap_info *);
6590 static inline bool equal (const shared_bitmap_info *,
6591 const shared_bitmap_info *);
6594 /* Hash function for a shared_bitmap_info_t */
6596 inline hashval_t
6597 shared_bitmap_hasher::hash (const shared_bitmap_info *bi)
6599 return bi->hashcode;
6602 /* Equality function for two shared_bitmap_info_t's. */
6604 inline bool
6605 shared_bitmap_hasher::equal (const shared_bitmap_info *sbi1,
6606 const shared_bitmap_info *sbi2)
6608 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
6611 /* Shared_bitmap hashtable. */
6613 static hash_table<shared_bitmap_hasher> *shared_bitmap_table;
6615 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
6616 existing instance if there is one, NULL otherwise. */
6618 static bitmap
6619 shared_bitmap_lookup (bitmap pt_vars)
6621 shared_bitmap_info **slot;
6622 struct shared_bitmap_info sbi;
6624 sbi.pt_vars = pt_vars;
6625 sbi.hashcode = bitmap_hash (pt_vars);
6627 slot = shared_bitmap_table->find_slot (&sbi, NO_INSERT);
6628 if (!slot)
6629 return NULL;
6630 else
6631 return (*slot)->pt_vars;
6635 /* Add a bitmap to the shared bitmap hashtable. */
6637 static void
6638 shared_bitmap_add (bitmap pt_vars)
6640 shared_bitmap_info **slot;
6641 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
6643 sbi->pt_vars = pt_vars;
6644 sbi->hashcode = bitmap_hash (pt_vars);
6646 slot = shared_bitmap_table->find_slot (sbi, INSERT);
6647 gcc_assert (!*slot);
6648 *slot = sbi;
6652 /* Set bits in INTO corresponding to the variable uids in solution set FROM. */
6654 static void
6655 set_uids_in_ptset (bitmap into, bitmap from, struct pt_solution *pt,
6656 tree fndecl)
6658 unsigned int i;
6659 bitmap_iterator bi;
6660 varinfo_t escaped_vi = get_varinfo (find (escaped_id));
6661 bool everything_escaped
6662 = escaped_vi->solution && bitmap_bit_p (escaped_vi->solution, anything_id);
6664 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
6666 varinfo_t vi = get_varinfo (i);
6668 if (vi->is_artificial_var)
6669 continue;
6671 if (everything_escaped
6672 || (escaped_vi->solution
6673 && bitmap_bit_p (escaped_vi->solution, i)))
6675 pt->vars_contains_escaped = true;
6676 pt->vars_contains_escaped_heap |= vi->is_heap_var;
6679 if (vi->is_restrict_var)
6680 pt->vars_contains_restrict = true;
6682 if (VAR_P (vi->decl)
6683 || TREE_CODE (vi->decl) == PARM_DECL
6684 || TREE_CODE (vi->decl) == RESULT_DECL)
6686 /* If we are in IPA mode we will not recompute points-to
6687 sets after inlining so make sure they stay valid. */
6688 if (in_ipa_mode
6689 && !DECL_PT_UID_SET_P (vi->decl))
6690 SET_DECL_PT_UID (vi->decl, DECL_UID (vi->decl));
6692 /* Add the decl to the points-to set. Note that the points-to
6693 set contains global variables. */
6694 bitmap_set_bit (into, DECL_PT_UID (vi->decl));
6695 if (vi->is_global_var
6696 /* In IPA mode the escaped_heap trick doesn't work as
6697 ESCAPED is escaped from the unit but
6698 pt_solution_includes_global needs to answer true for
6699 all variables not automatic within a function.
6700 For the same reason is_global_var is not the
6701 correct flag to track - local variables from other
6702 functions also need to be considered global.
6703 Conveniently all HEAP vars are not put in function
6704 scope. */
6705 || (in_ipa_mode
6706 && fndecl
6707 && ! auto_var_in_fn_p (vi->decl, fndecl)))
6708 pt->vars_contains_nonlocal = true;
6710 /* If we have a variable that is interposable record that fact
6711 for pointer comparison simplification. */
6712 if (VAR_P (vi->decl)
6713 && (TREE_STATIC (vi->decl) || DECL_EXTERNAL (vi->decl))
6714 && ! decl_binds_to_current_def_p (vi->decl))
6715 pt->vars_contains_interposable = true;
6717 /* If this is a local variable we can have overlapping lifetime
6718 of different function invocations through recursion duplicate
6719 it with its shadow variable. */
6720 if (in_ipa_mode
6721 && vi->shadow_var_uid != 0)
6723 bitmap_set_bit (into, vi->shadow_var_uid);
6724 pt->vars_contains_nonlocal = true;
6728 else if (TREE_CODE (vi->decl) == FUNCTION_DECL
6729 || TREE_CODE (vi->decl) == LABEL_DECL)
6731 /* Nothing should read/write from/to code so we can
6732 save bits by not including them in the points-to bitmaps.
6733 Still mark the points-to set as containing global memory
6734 to make code-patching possible - see PR70128. */
6735 pt->vars_contains_nonlocal = true;
6741 /* Compute the points-to solution *PT for the variable VI. */
6743 static struct pt_solution
6744 find_what_var_points_to (tree fndecl, varinfo_t orig_vi)
6746 unsigned int i;
6747 bitmap_iterator bi;
6748 bitmap finished_solution;
6749 bitmap result;
6750 varinfo_t vi;
6751 struct pt_solution *pt;
6753 /* This variable may have been collapsed, let's get the real
6754 variable. */
6755 vi = get_varinfo (find (orig_vi->id));
6757 /* See if we have already computed the solution and return it. */
6758 pt_solution **slot = &final_solutions->get_or_insert (vi);
6759 if (*slot != NULL)
6760 return **slot;
6762 *slot = pt = XOBNEW (&final_solutions_obstack, struct pt_solution);
6763 memset (pt, 0, sizeof (struct pt_solution));
6765 /* Translate artificial variables into SSA_NAME_PTR_INFO
6766 attributes. */
6767 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
6769 varinfo_t vi = get_varinfo (i);
6771 if (vi->is_artificial_var)
6773 if (vi->id == nothing_id)
6774 pt->null = 1;
6775 else if (vi->id == escaped_id)
6777 if (in_ipa_mode)
6778 pt->ipa_escaped = 1;
6779 else
6780 pt->escaped = 1;
6781 /* Expand some special vars of ESCAPED in-place here. */
6782 varinfo_t evi = get_varinfo (find (escaped_id));
6783 if (bitmap_bit_p (evi->solution, nonlocal_id))
6784 pt->nonlocal = 1;
6786 else if (vi->id == nonlocal_id)
6787 pt->nonlocal = 1;
6788 else if (vi->id == string_id)
6789 /* Nobody cares - STRING_CSTs are read-only entities. */
6791 else if (vi->id == anything_id
6792 || vi->id == integer_id)
6793 pt->anything = 1;
6797 /* Instead of doing extra work, simply do not create
6798 elaborate points-to information for pt_anything pointers. */
6799 if (pt->anything)
6800 return *pt;
6802 /* Share the final set of variables when possible. */
6803 finished_solution = BITMAP_GGC_ALLOC ();
6804 stats.points_to_sets_created++;
6806 set_uids_in_ptset (finished_solution, vi->solution, pt, fndecl);
6807 result = shared_bitmap_lookup (finished_solution);
6808 if (!result)
6810 shared_bitmap_add (finished_solution);
6811 pt->vars = finished_solution;
6813 else
6815 pt->vars = result;
6816 bitmap_clear (finished_solution);
6819 return *pt;
6822 /* Given a pointer variable P, fill in its points-to set. */
6824 static void
6825 find_what_p_points_to (tree fndecl, tree p)
6827 struct ptr_info_def *pi;
6828 tree lookup_p = p;
6829 varinfo_t vi;
6830 value_range vr;
6831 get_range_query (DECL_STRUCT_FUNCTION (fndecl))->range_of_expr (vr, p);
6832 bool nonnull = vr.nonzero_p ();
6834 /* For parameters, get at the points-to set for the actual parm
6835 decl. */
6836 if (TREE_CODE (p) == SSA_NAME
6837 && SSA_NAME_IS_DEFAULT_DEF (p)
6838 && (TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
6839 || TREE_CODE (SSA_NAME_VAR (p)) == RESULT_DECL))
6840 lookup_p = SSA_NAME_VAR (p);
6842 vi = lookup_vi_for_tree (lookup_p);
6843 if (!vi)
6844 return;
6846 pi = get_ptr_info (p);
6847 pi->pt = find_what_var_points_to (fndecl, vi);
6848 /* Conservatively set to NULL from PTA (to true). */
6849 pi->pt.null = 1;
6850 /* Preserve pointer nonnull globally computed. */
6851 if (nonnull)
6852 set_ptr_nonnull (p);
6856 /* Query statistics for points-to solutions. */
6858 static struct {
6859 unsigned HOST_WIDE_INT pt_solution_includes_may_alias;
6860 unsigned HOST_WIDE_INT pt_solution_includes_no_alias;
6861 unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias;
6862 unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias;
6863 } pta_stats;
6865 void
6866 dump_pta_stats (FILE *s)
6868 fprintf (s, "\nPTA query stats:\n");
6869 fprintf (s, " pt_solution_includes: "
6870 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6871 HOST_WIDE_INT_PRINT_DEC" queries\n",
6872 pta_stats.pt_solution_includes_no_alias,
6873 pta_stats.pt_solution_includes_no_alias
6874 + pta_stats.pt_solution_includes_may_alias);
6875 fprintf (s, " pt_solutions_intersect: "
6876 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6877 HOST_WIDE_INT_PRINT_DEC" queries\n",
6878 pta_stats.pt_solutions_intersect_no_alias,
6879 pta_stats.pt_solutions_intersect_no_alias
6880 + pta_stats.pt_solutions_intersect_may_alias);
6884 /* Reset the points-to solution *PT to a conservative default
6885 (point to anything). */
6887 void
6888 pt_solution_reset (struct pt_solution *pt)
6890 memset (pt, 0, sizeof (struct pt_solution));
6891 pt->anything = true;
6892 pt->null = true;
6895 /* Set the points-to solution *PT to point only to the variables
6896 in VARS. VARS_CONTAINS_GLOBAL specifies whether that contains
6897 global variables and VARS_CONTAINS_RESTRICT specifies whether
6898 it contains restrict tag variables. */
6900 void
6901 pt_solution_set (struct pt_solution *pt, bitmap vars,
6902 bool vars_contains_nonlocal)
6904 memset (pt, 0, sizeof (struct pt_solution));
6905 pt->vars = vars;
6906 pt->vars_contains_nonlocal = vars_contains_nonlocal;
6907 pt->vars_contains_escaped
6908 = (cfun->gimple_df->escaped.anything
6909 || bitmap_intersect_p (cfun->gimple_df->escaped.vars, vars));
6912 /* Set the points-to solution *PT to point only to the variable VAR. */
6914 void
6915 pt_solution_set_var (struct pt_solution *pt, tree var)
6917 memset (pt, 0, sizeof (struct pt_solution));
6918 pt->vars = BITMAP_GGC_ALLOC ();
6919 bitmap_set_bit (pt->vars, DECL_PT_UID (var));
6920 pt->vars_contains_nonlocal = is_global_var (var);
6921 pt->vars_contains_escaped
6922 = (cfun->gimple_df->escaped.anything
6923 || bitmap_bit_p (cfun->gimple_df->escaped.vars, DECL_PT_UID (var)));
6926 /* Computes the union of the points-to solutions *DEST and *SRC and
6927 stores the result in *DEST. This changes the points-to bitmap
6928 of *DEST and thus may not be used if that might be shared.
6929 The points-to bitmap of *SRC and *DEST will not be shared after
6930 this function if they were not before. */
6932 static void
6933 pt_solution_ior_into (struct pt_solution *dest, struct pt_solution *src)
6935 dest->anything |= src->anything;
6936 if (dest->anything)
6938 pt_solution_reset (dest);
6939 return;
6942 dest->nonlocal |= src->nonlocal;
6943 dest->escaped |= src->escaped;
6944 dest->ipa_escaped |= src->ipa_escaped;
6945 dest->null |= src->null;
6946 dest->vars_contains_nonlocal |= src->vars_contains_nonlocal;
6947 dest->vars_contains_escaped |= src->vars_contains_escaped;
6948 dest->vars_contains_escaped_heap |= src->vars_contains_escaped_heap;
6949 if (!src->vars)
6950 return;
6952 if (!dest->vars)
6953 dest->vars = BITMAP_GGC_ALLOC ();
6954 bitmap_ior_into (dest->vars, src->vars);
6957 /* Return true if the points-to solution *PT is empty. */
6959 bool
6960 pt_solution_empty_p (const pt_solution *pt)
6962 if (pt->anything
6963 || pt->nonlocal)
6964 return false;
6966 if (pt->vars
6967 && !bitmap_empty_p (pt->vars))
6968 return false;
6970 /* If the solution includes ESCAPED, check if that is empty. */
6971 if (pt->escaped
6972 && !pt_solution_empty_p (&cfun->gimple_df->escaped))
6973 return false;
6975 /* If the solution includes ESCAPED, check if that is empty. */
6976 if (pt->ipa_escaped
6977 && !pt_solution_empty_p (&ipa_escaped_pt))
6978 return false;
6980 return true;
6983 /* Return true if the points-to solution *PT only point to a single var, and
6984 return the var uid in *UID. */
6986 bool
6987 pt_solution_singleton_or_null_p (struct pt_solution *pt, unsigned *uid)
6989 if (pt->anything || pt->nonlocal || pt->escaped || pt->ipa_escaped
6990 || pt->vars == NULL
6991 || !bitmap_single_bit_set_p (pt->vars))
6992 return false;
6994 *uid = bitmap_first_set_bit (pt->vars);
6995 return true;
6998 /* Return true if the points-to solution *PT includes global memory.
6999 If ESCAPED_LOCAL_P is true then escaped local variables are also
7000 considered global. */
7002 bool
7003 pt_solution_includes_global (struct pt_solution *pt, bool escaped_local_p)
7005 if (pt->anything
7006 || pt->nonlocal
7007 || pt->vars_contains_nonlocal
7008 /* The following is a hack to make the malloc escape hack work.
7009 In reality we'd need different sets for escaped-through-return
7010 and escaped-to-callees and passes would need to be updated. */
7011 || pt->vars_contains_escaped_heap)
7012 return true;
7014 if (escaped_local_p && pt->vars_contains_escaped)
7015 return true;
7017 /* 'escaped' is also a placeholder so we have to look into it. */
7018 if (pt->escaped)
7019 return pt_solution_includes_global (&cfun->gimple_df->escaped,
7020 escaped_local_p);
7022 if (pt->ipa_escaped)
7023 return pt_solution_includes_global (&ipa_escaped_pt,
7024 escaped_local_p);
7026 return false;
7029 /* Return true if the points-to solution *PT includes the variable
7030 declaration DECL. */
7032 static bool
7033 pt_solution_includes_1 (struct pt_solution *pt, const_tree decl)
7035 if (pt->anything)
7036 return true;
7038 if (pt->nonlocal
7039 && is_global_var (decl))
7040 return true;
7042 if (pt->vars
7043 && bitmap_bit_p (pt->vars, DECL_PT_UID (decl)))
7044 return true;
7046 /* If the solution includes ESCAPED, check it. */
7047 if (pt->escaped
7048 && pt_solution_includes_1 (&cfun->gimple_df->escaped, decl))
7049 return true;
7051 /* If the solution includes ESCAPED, check it. */
7052 if (pt->ipa_escaped
7053 && pt_solution_includes_1 (&ipa_escaped_pt, decl))
7054 return true;
7056 return false;
7059 bool
7060 pt_solution_includes (struct pt_solution *pt, const_tree decl)
7062 bool res = pt_solution_includes_1 (pt, decl);
7063 if (res)
7064 ++pta_stats.pt_solution_includes_may_alias;
7065 else
7066 ++pta_stats.pt_solution_includes_no_alias;
7067 return res;
7070 /* Return true if both points-to solutions PT1 and PT2 have a non-empty
7071 intersection. */
7073 static bool
7074 pt_solutions_intersect_1 (struct pt_solution *pt1, struct pt_solution *pt2)
7076 if (pt1->anything || pt2->anything)
7077 return true;
7079 /* If either points to unknown global memory and the other points to
7080 any global memory they alias. */
7081 if ((pt1->nonlocal
7082 && (pt2->nonlocal
7083 || pt2->vars_contains_nonlocal))
7084 || (pt2->nonlocal
7085 && pt1->vars_contains_nonlocal))
7086 return true;
7088 /* If either points to all escaped memory and the other points to
7089 any escaped memory they alias. */
7090 if ((pt1->escaped
7091 && (pt2->escaped
7092 || pt2->vars_contains_escaped))
7093 || (pt2->escaped
7094 && pt1->vars_contains_escaped))
7095 return true;
7097 /* Check the escaped solution if required.
7098 ??? Do we need to check the local against the IPA escaped sets? */
7099 if ((pt1->ipa_escaped || pt2->ipa_escaped)
7100 && !pt_solution_empty_p (&ipa_escaped_pt))
7102 /* If both point to escaped memory and that solution
7103 is not empty they alias. */
7104 if (pt1->ipa_escaped && pt2->ipa_escaped)
7105 return true;
7107 /* If either points to escaped memory see if the escaped solution
7108 intersects with the other. */
7109 if ((pt1->ipa_escaped
7110 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt2))
7111 || (pt2->ipa_escaped
7112 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt1)))
7113 return true;
7116 /* Now both pointers alias if their points-to solution intersects. */
7117 return (pt1->vars
7118 && pt2->vars
7119 && bitmap_intersect_p (pt1->vars, pt2->vars));
7122 bool
7123 pt_solutions_intersect (struct pt_solution *pt1, struct pt_solution *pt2)
7125 bool res = pt_solutions_intersect_1 (pt1, pt2);
7126 if (res)
7127 ++pta_stats.pt_solutions_intersect_may_alias;
7128 else
7129 ++pta_stats.pt_solutions_intersect_no_alias;
7130 return res;
7134 /* Dump points-to information to OUTFILE. */
7136 static void
7137 dump_sa_points_to_info (FILE *outfile)
7139 unsigned int i;
7141 fprintf (outfile, "\nPoints-to sets\n\n");
7143 if (dump_flags & TDF_STATS)
7145 fprintf (outfile, "Stats:\n");
7146 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
7147 fprintf (outfile, "Non-pointer vars: %d\n",
7148 stats.nonpointer_vars);
7149 fprintf (outfile, "Statically unified vars: %d\n",
7150 stats.unified_vars_static);
7151 fprintf (outfile, "Dynamically unified vars: %d\n",
7152 stats.unified_vars_dynamic);
7153 fprintf (outfile, "Iterations: %d\n", stats.iterations);
7154 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
7155 fprintf (outfile, "Number of implicit edges: %d\n",
7156 stats.num_implicit_edges);
7159 for (i = 1; i < varmap.length (); i++)
7161 varinfo_t vi = get_varinfo (i);
7162 if (!vi->may_have_pointers)
7163 continue;
7164 dump_solution_for_var (outfile, i);
7169 /* Debug points-to information to stderr. */
7171 DEBUG_FUNCTION void
7172 debug_sa_points_to_info (void)
7174 dump_sa_points_to_info (stderr);
7178 /* Initialize the always-existing constraint variables for NULL
7179 ANYTHING, READONLY, and INTEGER */
7181 static void
7182 init_base_vars (void)
7184 struct constraint_expr lhs, rhs;
7185 varinfo_t var_anything;
7186 varinfo_t var_nothing;
7187 varinfo_t var_string;
7188 varinfo_t var_escaped;
7189 varinfo_t var_nonlocal;
7190 varinfo_t var_storedanything;
7191 varinfo_t var_integer;
7193 /* Variable ID zero is reserved and should be NULL. */
7194 varmap.safe_push (NULL);
7196 /* Create the NULL variable, used to represent that a variable points
7197 to NULL. */
7198 var_nothing = new_var_info (NULL_TREE, "NULL", false);
7199 gcc_assert (var_nothing->id == nothing_id);
7200 var_nothing->is_artificial_var = 1;
7201 var_nothing->offset = 0;
7202 var_nothing->size = ~0;
7203 var_nothing->fullsize = ~0;
7204 var_nothing->is_special_var = 1;
7205 var_nothing->may_have_pointers = 0;
7206 var_nothing->is_global_var = 0;
7208 /* Create the ANYTHING variable, used to represent that a variable
7209 points to some unknown piece of memory. */
7210 var_anything = new_var_info (NULL_TREE, "ANYTHING", false);
7211 gcc_assert (var_anything->id == anything_id);
7212 var_anything->is_artificial_var = 1;
7213 var_anything->size = ~0;
7214 var_anything->offset = 0;
7215 var_anything->fullsize = ~0;
7216 var_anything->is_special_var = 1;
7218 /* Anything points to anything. This makes deref constraints just
7219 work in the presence of linked list and other p = *p type loops,
7220 by saying that *ANYTHING = ANYTHING. */
7221 lhs.type = SCALAR;
7222 lhs.var = anything_id;
7223 lhs.offset = 0;
7224 rhs.type = ADDRESSOF;
7225 rhs.var = anything_id;
7226 rhs.offset = 0;
7228 /* This specifically does not use process_constraint because
7229 process_constraint ignores all anything = anything constraints, since all
7230 but this one are redundant. */
7231 constraints.safe_push (new_constraint (lhs, rhs));
7233 /* Create the STRING variable, used to represent that a variable
7234 points to a string literal. String literals don't contain
7235 pointers so STRING doesn't point to anything. */
7236 var_string = new_var_info (NULL_TREE, "STRING", false);
7237 gcc_assert (var_string->id == string_id);
7238 var_string->is_artificial_var = 1;
7239 var_string->offset = 0;
7240 var_string->size = ~0;
7241 var_string->fullsize = ~0;
7242 var_string->is_special_var = 1;
7243 var_string->may_have_pointers = 0;
7245 /* Create the ESCAPED variable, used to represent the set of escaped
7246 memory. */
7247 var_escaped = new_var_info (NULL_TREE, "ESCAPED", false);
7248 gcc_assert (var_escaped->id == escaped_id);
7249 var_escaped->is_artificial_var = 1;
7250 var_escaped->offset = 0;
7251 var_escaped->size = ~0;
7252 var_escaped->fullsize = ~0;
7253 var_escaped->is_special_var = 0;
7255 /* Create the NONLOCAL variable, used to represent the set of nonlocal
7256 memory. */
7257 var_nonlocal = new_var_info (NULL_TREE, "NONLOCAL", false);
7258 gcc_assert (var_nonlocal->id == nonlocal_id);
7259 var_nonlocal->is_artificial_var = 1;
7260 var_nonlocal->offset = 0;
7261 var_nonlocal->size = ~0;
7262 var_nonlocal->fullsize = ~0;
7263 var_nonlocal->is_special_var = 1;
7265 /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
7266 lhs.type = SCALAR;
7267 lhs.var = escaped_id;
7268 lhs.offset = 0;
7269 rhs.type = DEREF;
7270 rhs.var = escaped_id;
7271 rhs.offset = 0;
7272 process_constraint (new_constraint (lhs, rhs));
7274 /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the
7275 whole variable escapes. */
7276 lhs.type = SCALAR;
7277 lhs.var = escaped_id;
7278 lhs.offset = 0;
7279 rhs.type = SCALAR;
7280 rhs.var = escaped_id;
7281 rhs.offset = UNKNOWN_OFFSET;
7282 process_constraint (new_constraint (lhs, rhs));
7284 /* *ESCAPED = NONLOCAL. This is true because we have to assume
7285 everything pointed to by escaped points to what global memory can
7286 point to. */
7287 lhs.type = DEREF;
7288 lhs.var = escaped_id;
7289 lhs.offset = 0;
7290 rhs.type = SCALAR;
7291 rhs.var = nonlocal_id;
7292 rhs.offset = 0;
7293 process_constraint (new_constraint (lhs, rhs));
7295 /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED. This is true because
7296 global memory may point to global memory and escaped memory. */
7297 lhs.type = SCALAR;
7298 lhs.var = nonlocal_id;
7299 lhs.offset = 0;
7300 rhs.type = ADDRESSOF;
7301 rhs.var = nonlocal_id;
7302 rhs.offset = 0;
7303 process_constraint (new_constraint (lhs, rhs));
7304 rhs.type = ADDRESSOF;
7305 rhs.var = escaped_id;
7306 rhs.offset = 0;
7307 process_constraint (new_constraint (lhs, rhs));
7309 /* Create the STOREDANYTHING variable, used to represent the set of
7310 variables stored to *ANYTHING. */
7311 var_storedanything = new_var_info (NULL_TREE, "STOREDANYTHING", false);
7312 gcc_assert (var_storedanything->id == storedanything_id);
7313 var_storedanything->is_artificial_var = 1;
7314 var_storedanything->offset = 0;
7315 var_storedanything->size = ~0;
7316 var_storedanything->fullsize = ~0;
7317 var_storedanything->is_special_var = 0;
7319 /* Create the INTEGER variable, used to represent that a variable points
7320 to what an INTEGER "points to". */
7321 var_integer = new_var_info (NULL_TREE, "INTEGER", false);
7322 gcc_assert (var_integer->id == integer_id);
7323 var_integer->is_artificial_var = 1;
7324 var_integer->size = ~0;
7325 var_integer->fullsize = ~0;
7326 var_integer->offset = 0;
7327 var_integer->is_special_var = 1;
7329 /* INTEGER = ANYTHING, because we don't know where a dereference of
7330 a random integer will point to. */
7331 lhs.type = SCALAR;
7332 lhs.var = integer_id;
7333 lhs.offset = 0;
7334 rhs.type = ADDRESSOF;
7335 rhs.var = anything_id;
7336 rhs.offset = 0;
7337 process_constraint (new_constraint (lhs, rhs));
7340 /* Initialize things necessary to perform PTA */
7342 static void
7343 init_alias_vars (void)
7345 use_field_sensitive = (param_max_fields_for_field_sensitive > 1);
7347 bitmap_obstack_initialize (&pta_obstack);
7348 bitmap_obstack_initialize (&oldpta_obstack);
7349 bitmap_obstack_initialize (&predbitmap_obstack);
7351 constraints.create (8);
7352 varmap.create (8);
7353 vi_for_tree = new hash_map<tree, varinfo_t>;
7354 call_stmt_vars = new hash_map<gimple *, varinfo_t>;
7356 memset (&stats, 0, sizeof (stats));
7357 shared_bitmap_table = new hash_table<shared_bitmap_hasher> (511);
7358 init_base_vars ();
7360 gcc_obstack_init (&fake_var_decl_obstack);
7362 final_solutions = new hash_map<varinfo_t, pt_solution *>;
7363 gcc_obstack_init (&final_solutions_obstack);
7366 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
7367 predecessor edges. */
7369 static void
7370 remove_preds_and_fake_succs (constraint_graph_t graph)
7372 unsigned int i;
7374 /* Clear the implicit ref and address nodes from the successor
7375 lists. */
7376 for (i = 1; i < FIRST_REF_NODE; i++)
7378 if (graph->succs[i])
7379 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
7380 FIRST_REF_NODE * 2);
7383 /* Free the successor list for the non-ref nodes. */
7384 for (i = FIRST_REF_NODE + 1; i < graph->size; i++)
7386 if (graph->succs[i])
7387 BITMAP_FREE (graph->succs[i]);
7390 /* Now reallocate the size of the successor list as, and blow away
7391 the predecessor bitmaps. */
7392 graph->size = varmap.length ();
7393 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
7395 free (graph->implicit_preds);
7396 graph->implicit_preds = NULL;
7397 free (graph->preds);
7398 graph->preds = NULL;
7399 bitmap_obstack_release (&predbitmap_obstack);
7402 /* Solve the constraint set. */
7404 static void
7405 solve_constraints (void)
7407 class scc_info *si;
7409 /* Sort varinfos so that ones that cannot be pointed to are last.
7410 This makes bitmaps more efficient. */
7411 unsigned int *map = XNEWVEC (unsigned int, varmap.length ());
7412 for (unsigned i = 0; i < integer_id + 1; ++i)
7413 map[i] = i;
7414 /* Start with address-taken vars, followed by not address-taken vars
7415 to move vars never appearing in the points-to solution bitmaps last. */
7416 unsigned j = integer_id + 1;
7417 for (unsigned i = integer_id + 1; i < varmap.length (); ++i)
7418 if (varmap[varmap[i]->head]->address_taken)
7419 map[i] = j++;
7420 for (unsigned i = integer_id + 1; i < varmap.length (); ++i)
7421 if (! varmap[varmap[i]->head]->address_taken)
7422 map[i] = j++;
7423 /* Shuffle varmap according to map. */
7424 for (unsigned i = integer_id + 1; i < varmap.length (); ++i)
7426 while (map[varmap[i]->id] != i)
7427 std::swap (varmap[i], varmap[map[varmap[i]->id]]);
7428 gcc_assert (bitmap_empty_p (varmap[i]->solution));
7429 varmap[i]->id = i;
7430 varmap[i]->next = map[varmap[i]->next];
7431 varmap[i]->head = map[varmap[i]->head];
7433 /* Finally rewrite constraints. */
7434 for (unsigned i = 0; i < constraints.length (); ++i)
7436 constraints[i]->lhs.var = map[constraints[i]->lhs.var];
7437 constraints[i]->rhs.var = map[constraints[i]->rhs.var];
7439 free (map);
7441 if (dump_file)
7442 fprintf (dump_file,
7443 "\nCollapsing static cycles and doing variable "
7444 "substitution\n");
7446 init_graph (varmap.length () * 2);
7448 if (dump_file)
7449 fprintf (dump_file, "Building predecessor graph\n");
7450 build_pred_graph ();
7452 if (dump_file)
7453 fprintf (dump_file, "Detecting pointer and location "
7454 "equivalences\n");
7455 si = perform_var_substitution (graph);
7457 if (dump_file)
7458 fprintf (dump_file, "Rewriting constraints and unifying "
7459 "variables\n");
7460 rewrite_constraints (graph, si);
7462 build_succ_graph ();
7464 free_var_substitution_info (si);
7466 /* Attach complex constraints to graph nodes. */
7467 move_complex_constraints (graph);
7469 if (dump_file)
7470 fprintf (dump_file, "Uniting pointer but not location equivalent "
7471 "variables\n");
7472 unite_pointer_equivalences (graph);
7474 if (dump_file)
7475 fprintf (dump_file, "Finding indirect cycles\n");
7476 find_indirect_cycles (graph);
7478 /* Implicit nodes and predecessors are no longer necessary at this
7479 point. */
7480 remove_preds_and_fake_succs (graph);
7482 if (dump_file && (dump_flags & TDF_GRAPH))
7484 fprintf (dump_file, "\n\n// The constraint graph before solve-graph "
7485 "in dot format:\n");
7486 dump_constraint_graph (dump_file);
7487 fprintf (dump_file, "\n\n");
7490 if (dump_file)
7491 fprintf (dump_file, "Solving graph\n");
7493 solve_graph (graph);
7495 if (dump_file && (dump_flags & TDF_GRAPH))
7497 fprintf (dump_file, "\n\n// The constraint graph after solve-graph "
7498 "in dot format:\n");
7499 dump_constraint_graph (dump_file);
7500 fprintf (dump_file, "\n\n");
7504 /* Create points-to sets for the current function. See the comments
7505 at the start of the file for an algorithmic overview. */
7507 static void
7508 compute_points_to_sets (void)
7510 basic_block bb;
7511 varinfo_t vi;
7513 timevar_push (TV_TREE_PTA);
7515 init_alias_vars ();
7517 intra_create_variable_infos (cfun);
7519 /* Now walk all statements and build the constraint set. */
7520 FOR_EACH_BB_FN (bb, cfun)
7522 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
7523 gsi_next (&gsi))
7525 gphi *phi = gsi.phi ();
7527 if (! virtual_operand_p (gimple_phi_result (phi)))
7528 find_func_aliases (cfun, phi);
7531 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
7532 gsi_next (&gsi))
7534 gimple *stmt = gsi_stmt (gsi);
7536 find_func_aliases (cfun, stmt);
7540 if (dump_file)
7542 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
7543 dump_constraints (dump_file, 0);
7546 /* From the constraints compute the points-to sets. */
7547 solve_constraints ();
7549 /* Post-process solutions for escapes through returns. */
7550 edge_iterator ei;
7551 edge e;
7552 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
7553 if (greturn *ret = safe_dyn_cast <greturn *> (last_stmt (e->src)))
7555 tree val = gimple_return_retval (ret);
7556 /* ??? Easy to handle simple indirections with some work.
7557 Arbitrary references like foo.bar.baz are more difficult
7558 (but conservatively easy enough with just looking at the base).
7559 Mind to fixup find_func_aliases as well. */
7560 if (!val || !SSA_VAR_P (val))
7561 continue;
7562 /* returns happen last in non-IPA so they only influence
7563 the ESCAPED solution and we can filter local variables. */
7564 varinfo_t escaped_vi = get_varinfo (find (escaped_id));
7565 varinfo_t vi = lookup_vi_for_tree (val);
7566 bitmap delta = BITMAP_ALLOC (&pta_obstack);
7567 bitmap_iterator bi;
7568 unsigned i;
7569 for (; vi; vi = vi_next (vi))
7571 varinfo_t part_vi = get_varinfo (find (vi->id));
7572 EXECUTE_IF_AND_COMPL_IN_BITMAP (part_vi->solution,
7573 escaped_vi->solution, 0, i, bi)
7575 varinfo_t pointed_to_vi = get_varinfo (i);
7576 if (pointed_to_vi->is_global_var
7577 /* We delay marking of heap memory as global. */
7578 || pointed_to_vi->is_heap_var)
7579 bitmap_set_bit (delta, i);
7583 /* Now compute the transitive closure. */
7584 bitmap_ior_into (escaped_vi->solution, delta);
7585 bitmap new_delta = BITMAP_ALLOC (&pta_obstack);
7586 while (!bitmap_empty_p (delta))
7588 EXECUTE_IF_SET_IN_BITMAP (delta, 0, i, bi)
7590 varinfo_t pointed_to_vi = get_varinfo (i);
7591 pointed_to_vi = get_varinfo (find (pointed_to_vi->id));
7592 unsigned j;
7593 bitmap_iterator bi2;
7594 EXECUTE_IF_AND_COMPL_IN_BITMAP (pointed_to_vi->solution,
7595 escaped_vi->solution,
7596 0, j, bi2)
7598 varinfo_t pointed_to_vi2 = get_varinfo (j);
7599 if (pointed_to_vi2->is_global_var
7600 /* We delay marking of heap memory as global. */
7601 || pointed_to_vi2->is_heap_var)
7602 bitmap_set_bit (new_delta, j);
7605 bitmap_ior_into (escaped_vi->solution, new_delta);
7606 bitmap_clear (delta);
7607 std::swap (delta, new_delta);
7609 BITMAP_FREE (delta);
7610 BITMAP_FREE (new_delta);
7613 if (dump_file)
7614 dump_sa_points_to_info (dump_file);
7616 /* Compute the points-to set for ESCAPED used for call-clobber analysis. */
7617 cfun->gimple_df->escaped = find_what_var_points_to (cfun->decl,
7618 get_varinfo (escaped_id));
7620 /* Make sure the ESCAPED solution (which is used as placeholder in
7621 other solutions) does not reference itself. This simplifies
7622 points-to solution queries. */
7623 cfun->gimple_df->escaped.escaped = 0;
7625 /* Compute the points-to sets for pointer SSA_NAMEs. */
7626 unsigned i;
7627 tree ptr;
7629 FOR_EACH_SSA_NAME (i, ptr, cfun)
7631 if (POINTER_TYPE_P (TREE_TYPE (ptr)))
7632 find_what_p_points_to (cfun->decl, ptr);
7635 /* Compute the call-used/clobbered sets. */
7636 FOR_EACH_BB_FN (bb, cfun)
7638 gimple_stmt_iterator gsi;
7640 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
7642 gcall *stmt;
7643 struct pt_solution *pt;
7645 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
7646 if (!stmt)
7647 continue;
7649 pt = gimple_call_use_set (stmt);
7650 if (gimple_call_flags (stmt) & ECF_CONST)
7651 memset (pt, 0, sizeof (struct pt_solution));
7652 else
7654 bool uses_global_memory = true;
7655 bool reads_global_memory = true;
7657 determine_global_memory_access (stmt, NULL,
7658 &reads_global_memory,
7659 &uses_global_memory);
7660 if ((vi = lookup_call_use_vi (stmt)) != NULL)
7662 *pt = find_what_var_points_to (cfun->decl, vi);
7663 /* Escaped (and thus nonlocal) variables are always
7664 implicitly used by calls. */
7665 /* ??? ESCAPED can be empty even though NONLOCAL
7666 always escaped. */
7667 if (uses_global_memory)
7669 pt->nonlocal = 1;
7670 pt->escaped = 1;
7673 else if (uses_global_memory)
7675 /* If there is nothing special about this call then
7676 we have made everything that is used also escape. */
7677 *pt = cfun->gimple_df->escaped;
7678 pt->nonlocal = 1;
7680 else
7681 memset (pt, 0, sizeof (struct pt_solution));
7684 pt = gimple_call_clobber_set (stmt);
7685 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
7686 memset (pt, 0, sizeof (struct pt_solution));
7687 else
7689 bool writes_global_memory = true;
7691 determine_global_memory_access (stmt, &writes_global_memory,
7692 NULL, NULL);
7694 if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
7696 *pt = find_what_var_points_to (cfun->decl, vi);
7697 /* Escaped (and thus nonlocal) variables are always
7698 implicitly clobbered by calls. */
7699 /* ??? ESCAPED can be empty even though NONLOCAL
7700 always escaped. */
7701 if (writes_global_memory)
7703 pt->nonlocal = 1;
7704 pt->escaped = 1;
7707 else if (writes_global_memory)
7709 /* If there is nothing special about this call then
7710 we have made everything that is used also escape. */
7711 *pt = cfun->gimple_df->escaped;
7712 pt->nonlocal = 1;
7714 else
7715 memset (pt, 0, sizeof (struct pt_solution));
7720 timevar_pop (TV_TREE_PTA);
7724 /* Delete created points-to sets. */
7726 static void
7727 delete_points_to_sets (void)
7729 unsigned int i;
7731 delete shared_bitmap_table;
7732 shared_bitmap_table = NULL;
7733 if (dump_file && (dump_flags & TDF_STATS))
7734 fprintf (dump_file, "Points to sets created:%d\n",
7735 stats.points_to_sets_created);
7737 delete vi_for_tree;
7738 delete call_stmt_vars;
7739 bitmap_obstack_release (&pta_obstack);
7740 constraints.release ();
7742 for (i = 0; i < graph->size; i++)
7743 graph->complex[i].release ();
7744 free (graph->complex);
7746 free (graph->rep);
7747 free (graph->succs);
7748 free (graph->pe);
7749 free (graph->pe_rep);
7750 free (graph->indirect_cycles);
7751 free (graph);
7753 varmap.release ();
7754 variable_info_pool.release ();
7755 constraint_pool.release ();
7757 obstack_free (&fake_var_decl_obstack, NULL);
7759 delete final_solutions;
7760 obstack_free (&final_solutions_obstack, NULL);
7763 struct vls_data
7765 unsigned short clique;
7766 bool escaped_p;
7767 bitmap rvars;
7770 /* Mark "other" loads and stores as belonging to CLIQUE and with
7771 base zero. */
7773 static bool
7774 visit_loadstore (gimple *, tree base, tree ref, void *data)
7776 unsigned short clique = ((vls_data *) data)->clique;
7777 bitmap rvars = ((vls_data *) data)->rvars;
7778 bool escaped_p = ((vls_data *) data)->escaped_p;
7779 if (TREE_CODE (base) == MEM_REF
7780 || TREE_CODE (base) == TARGET_MEM_REF)
7782 tree ptr = TREE_OPERAND (base, 0);
7783 if (TREE_CODE (ptr) == SSA_NAME)
7785 /* For parameters, get at the points-to set for the actual parm
7786 decl. */
7787 if (SSA_NAME_IS_DEFAULT_DEF (ptr)
7788 && (TREE_CODE (SSA_NAME_VAR (ptr)) == PARM_DECL
7789 || TREE_CODE (SSA_NAME_VAR (ptr)) == RESULT_DECL))
7790 ptr = SSA_NAME_VAR (ptr);
7792 /* We need to make sure 'ptr' doesn't include any of
7793 the restrict tags we added bases for in its points-to set. */
7794 varinfo_t vi = lookup_vi_for_tree (ptr);
7795 if (! vi)
7796 return false;
7798 vi = get_varinfo (find (vi->id));
7799 if (bitmap_intersect_p (rvars, vi->solution)
7800 || (escaped_p && bitmap_bit_p (vi->solution, escaped_id)))
7801 return false;
7804 /* Do not overwrite existing cliques (that includes clique, base
7805 pairs we just set). */
7806 if (MR_DEPENDENCE_CLIQUE (base) == 0)
7808 MR_DEPENDENCE_CLIQUE (base) = clique;
7809 MR_DEPENDENCE_BASE (base) = 0;
7813 /* For plain decl accesses see whether they are accesses to globals
7814 and rewrite them to MEM_REFs with { clique, 0 }. */
7815 if (VAR_P (base)
7816 && is_global_var (base)
7817 /* ??? We can't rewrite a plain decl with the walk_stmt_load_store
7818 ops callback. */
7819 && base != ref)
7821 tree *basep = &ref;
7822 while (handled_component_p (*basep))
7823 basep = &TREE_OPERAND (*basep, 0);
7824 gcc_assert (VAR_P (*basep));
7825 tree ptr = build_fold_addr_expr (*basep);
7826 tree zero = build_int_cst (TREE_TYPE (ptr), 0);
7827 *basep = build2 (MEM_REF, TREE_TYPE (*basep), ptr, zero);
7828 MR_DEPENDENCE_CLIQUE (*basep) = clique;
7829 MR_DEPENDENCE_BASE (*basep) = 0;
7832 return false;
7835 struct msdi_data {
7836 tree ptr;
7837 unsigned short *clique;
7838 unsigned short *last_ruid;
7839 varinfo_t restrict_var;
7842 /* If BASE is a MEM_REF then assign a clique, base pair to it, updating
7843 CLIQUE, *RESTRICT_VAR and LAST_RUID as passed via DATA.
7844 Return whether dependence info was assigned to BASE. */
7846 static bool
7847 maybe_set_dependence_info (gimple *, tree base, tree, void *data)
7849 tree ptr = ((msdi_data *)data)->ptr;
7850 unsigned short &clique = *((msdi_data *)data)->clique;
7851 unsigned short &last_ruid = *((msdi_data *)data)->last_ruid;
7852 varinfo_t restrict_var = ((msdi_data *)data)->restrict_var;
7853 if ((TREE_CODE (base) == MEM_REF
7854 || TREE_CODE (base) == TARGET_MEM_REF)
7855 && TREE_OPERAND (base, 0) == ptr)
7857 /* Do not overwrite existing cliques. This avoids overwriting dependence
7858 info inlined from a function with restrict parameters inlined
7859 into a function with restrict parameters. This usually means we
7860 prefer to be precise in innermost loops. */
7861 if (MR_DEPENDENCE_CLIQUE (base) == 0)
7863 if (clique == 0)
7865 if (cfun->last_clique == 0)
7866 cfun->last_clique = 1;
7867 clique = 1;
7869 if (restrict_var->ruid == 0)
7870 restrict_var->ruid = ++last_ruid;
7871 MR_DEPENDENCE_CLIQUE (base) = clique;
7872 MR_DEPENDENCE_BASE (base) = restrict_var->ruid;
7873 return true;
7876 return false;
7879 /* Clear dependence info for the clique DATA. */
7881 static bool
7882 clear_dependence_clique (gimple *, tree base, tree, void *data)
7884 unsigned short clique = (uintptr_t)data;
7885 if ((TREE_CODE (base) == MEM_REF
7886 || TREE_CODE (base) == TARGET_MEM_REF)
7887 && MR_DEPENDENCE_CLIQUE (base) == clique)
7889 MR_DEPENDENCE_CLIQUE (base) = 0;
7890 MR_DEPENDENCE_BASE (base) = 0;
7893 return false;
7896 /* Compute the set of independend memory references based on restrict
7897 tags and their conservative propagation to the points-to sets. */
7899 static void
7900 compute_dependence_clique (void)
7902 /* First clear the special "local" clique. */
7903 basic_block bb;
7904 if (cfun->last_clique != 0)
7905 FOR_EACH_BB_FN (bb, cfun)
7906 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
7907 !gsi_end_p (gsi); gsi_next (&gsi))
7909 gimple *stmt = gsi_stmt (gsi);
7910 walk_stmt_load_store_ops (stmt, (void *)(uintptr_t) 1,
7911 clear_dependence_clique,
7912 clear_dependence_clique);
7915 unsigned short clique = 0;
7916 unsigned short last_ruid = 0;
7917 bitmap rvars = BITMAP_ALLOC (NULL);
7918 bool escaped_p = false;
7919 for (unsigned i = 0; i < num_ssa_names; ++i)
7921 tree ptr = ssa_name (i);
7922 if (!ptr || !POINTER_TYPE_P (TREE_TYPE (ptr)))
7923 continue;
7925 /* Avoid all this when ptr is not dereferenced? */
7926 tree p = ptr;
7927 if (SSA_NAME_IS_DEFAULT_DEF (ptr)
7928 && (TREE_CODE (SSA_NAME_VAR (ptr)) == PARM_DECL
7929 || TREE_CODE (SSA_NAME_VAR (ptr)) == RESULT_DECL))
7930 p = SSA_NAME_VAR (ptr);
7931 varinfo_t vi = lookup_vi_for_tree (p);
7932 if (!vi)
7933 continue;
7934 vi = get_varinfo (find (vi->id));
7935 bitmap_iterator bi;
7936 unsigned j;
7937 varinfo_t restrict_var = NULL;
7938 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, j, bi)
7940 varinfo_t oi = get_varinfo (j);
7941 if (oi->head != j)
7942 oi = get_varinfo (oi->head);
7943 if (oi->is_restrict_var)
7945 if (restrict_var
7946 && restrict_var != oi)
7948 if (dump_file && (dump_flags & TDF_DETAILS))
7950 fprintf (dump_file, "found restrict pointed-to "
7951 "for ");
7952 print_generic_expr (dump_file, ptr);
7953 fprintf (dump_file, " but not exclusively\n");
7955 restrict_var = NULL;
7956 break;
7958 restrict_var = oi;
7960 /* NULL is the only other valid points-to entry. */
7961 else if (oi->id != nothing_id)
7963 restrict_var = NULL;
7964 break;
7967 /* Ok, found that ptr must(!) point to a single(!) restrict
7968 variable. */
7969 /* ??? PTA isn't really a proper propagation engine to compute
7970 this property.
7971 ??? We could handle merging of two restricts by unifying them. */
7972 if (restrict_var)
7974 /* Now look at possible dereferences of ptr. */
7975 imm_use_iterator ui;
7976 gimple *use_stmt;
7977 bool used = false;
7978 msdi_data data = { ptr, &clique, &last_ruid, restrict_var };
7979 FOR_EACH_IMM_USE_STMT (use_stmt, ui, ptr)
7980 used |= walk_stmt_load_store_ops (use_stmt, &data,
7981 maybe_set_dependence_info,
7982 maybe_set_dependence_info);
7983 if (used)
7985 /* Add all subvars to the set of restrict pointed-to set. */
7986 for (unsigned sv = restrict_var->head; sv != 0;
7987 sv = get_varinfo (sv)->next)
7988 bitmap_set_bit (rvars, sv);
7989 varinfo_t escaped = get_varinfo (find (escaped_id));
7990 if (bitmap_bit_p (escaped->solution, restrict_var->id))
7991 escaped_p = true;
7996 if (clique != 0)
7998 /* Assign the BASE id zero to all accesses not based on a restrict
7999 pointer. That way they get disambiguated against restrict
8000 accesses but not against each other. */
8001 /* ??? For restricts derived from globals (thus not incoming
8002 parameters) we can't restrict scoping properly thus the following
8003 is too aggressive there. For now we have excluded those globals from
8004 getting into the MR_DEPENDENCE machinery. */
8005 vls_data data = { clique, escaped_p, rvars };
8006 basic_block bb;
8007 FOR_EACH_BB_FN (bb, cfun)
8008 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
8009 !gsi_end_p (gsi); gsi_next (&gsi))
8011 gimple *stmt = gsi_stmt (gsi);
8012 walk_stmt_load_store_ops (stmt, &data,
8013 visit_loadstore, visit_loadstore);
8017 BITMAP_FREE (rvars);
8020 /* Compute points-to information for every SSA_NAME pointer in the
8021 current function and compute the transitive closure of escaped
8022 variables to re-initialize the call-clobber states of local variables. */
8024 unsigned int
8025 compute_may_aliases (void)
8027 if (cfun->gimple_df->ipa_pta)
8029 if (dump_file)
8031 fprintf (dump_file, "\nNot re-computing points-to information "
8032 "because IPA points-to information is available.\n\n");
8034 /* But still dump what we have remaining it. */
8035 dump_alias_info (dump_file);
8038 return 0;
8041 /* For each pointer P_i, determine the sets of variables that P_i may
8042 point-to. Compute the reachability set of escaped and call-used
8043 variables. */
8044 compute_points_to_sets ();
8046 /* Debugging dumps. */
8047 if (dump_file)
8048 dump_alias_info (dump_file);
8050 /* Compute restrict-based memory disambiguations. */
8051 compute_dependence_clique ();
8053 /* Deallocate memory used by aliasing data structures and the internal
8054 points-to solution. */
8055 delete_points_to_sets ();
8057 gcc_assert (!need_ssa_update_p (cfun));
8059 return 0;
8062 /* A dummy pass to cause points-to information to be computed via
8063 TODO_rebuild_alias. */
8065 namespace {
8067 const pass_data pass_data_build_alias =
8069 GIMPLE_PASS, /* type */
8070 "alias", /* name */
8071 OPTGROUP_NONE, /* optinfo_flags */
8072 TV_NONE, /* tv_id */
8073 ( PROP_cfg | PROP_ssa ), /* properties_required */
8074 0, /* properties_provided */
8075 0, /* properties_destroyed */
8076 0, /* todo_flags_start */
8077 TODO_rebuild_alias, /* todo_flags_finish */
8080 class pass_build_alias : public gimple_opt_pass
8082 public:
8083 pass_build_alias (gcc::context *ctxt)
8084 : gimple_opt_pass (pass_data_build_alias, ctxt)
8087 /* opt_pass methods: */
8088 virtual bool gate (function *) { return flag_tree_pta; }
8090 }; // class pass_build_alias
8092 } // anon namespace
8094 gimple_opt_pass *
8095 make_pass_build_alias (gcc::context *ctxt)
8097 return new pass_build_alias (ctxt);
8100 /* A dummy pass to cause points-to information to be computed via
8101 TODO_rebuild_alias. */
8103 namespace {
8105 const pass_data pass_data_build_ealias =
8107 GIMPLE_PASS, /* type */
8108 "ealias", /* name */
8109 OPTGROUP_NONE, /* optinfo_flags */
8110 TV_NONE, /* tv_id */
8111 ( PROP_cfg | PROP_ssa ), /* properties_required */
8112 0, /* properties_provided */
8113 0, /* properties_destroyed */
8114 0, /* todo_flags_start */
8115 TODO_rebuild_alias, /* todo_flags_finish */
8118 class pass_build_ealias : public gimple_opt_pass
8120 public:
8121 pass_build_ealias (gcc::context *ctxt)
8122 : gimple_opt_pass (pass_data_build_ealias, ctxt)
8125 /* opt_pass methods: */
8126 virtual bool gate (function *) { return flag_tree_pta; }
8128 }; // class pass_build_ealias
8130 } // anon namespace
8132 gimple_opt_pass *
8133 make_pass_build_ealias (gcc::context *ctxt)
8135 return new pass_build_ealias (ctxt);
8139 /* IPA PTA solutions for ESCAPED. */
8140 struct pt_solution ipa_escaped_pt
8141 = { true, false, false, false, false,
8142 false, false, false, false, false, NULL };
8144 /* Associate node with varinfo DATA. Worker for
8145 cgraph_for_symbol_thunks_and_aliases. */
8146 static bool
8147 associate_varinfo_to_alias (struct cgraph_node *node, void *data)
8149 if ((node->alias
8150 || (node->thunk
8151 && ! node->inlined_to))
8152 && node->analyzed
8153 && !node->ifunc_resolver)
8154 insert_vi_for_tree (node->decl, (varinfo_t)data);
8155 return false;
8158 /* Dump varinfo VI to FILE. */
8160 static void
8161 dump_varinfo (FILE *file, varinfo_t vi)
8163 if (vi == NULL)
8164 return;
8166 fprintf (file, "%u: %s\n", vi->id, vi->name);
8168 const char *sep = " ";
8169 if (vi->is_artificial_var)
8170 fprintf (file, "%sartificial", sep);
8171 if (vi->is_special_var)
8172 fprintf (file, "%sspecial", sep);
8173 if (vi->is_unknown_size_var)
8174 fprintf (file, "%sunknown-size", sep);
8175 if (vi->is_full_var)
8176 fprintf (file, "%sfull", sep);
8177 if (vi->is_heap_var)
8178 fprintf (file, "%sheap", sep);
8179 if (vi->may_have_pointers)
8180 fprintf (file, "%smay-have-pointers", sep);
8181 if (vi->only_restrict_pointers)
8182 fprintf (file, "%sonly-restrict-pointers", sep);
8183 if (vi->is_restrict_var)
8184 fprintf (file, "%sis-restrict-var", sep);
8185 if (vi->is_global_var)
8186 fprintf (file, "%sglobal", sep);
8187 if (vi->is_ipa_escape_point)
8188 fprintf (file, "%sipa-escape-point", sep);
8189 if (vi->is_fn_info)
8190 fprintf (file, "%sfn-info", sep);
8191 if (vi->ruid)
8192 fprintf (file, "%srestrict-uid:%u", sep, vi->ruid);
8193 if (vi->next)
8194 fprintf (file, "%snext:%u", sep, vi->next);
8195 if (vi->head != vi->id)
8196 fprintf (file, "%shead:%u", sep, vi->head);
8197 if (vi->offset)
8198 fprintf (file, "%soffset:" HOST_WIDE_INT_PRINT_DEC, sep, vi->offset);
8199 if (vi->size != ~(unsigned HOST_WIDE_INT)0)
8200 fprintf (file, "%ssize:" HOST_WIDE_INT_PRINT_DEC, sep, vi->size);
8201 if (vi->fullsize != ~(unsigned HOST_WIDE_INT)0
8202 && vi->fullsize != vi->size)
8203 fprintf (file, "%sfullsize:" HOST_WIDE_INT_PRINT_DEC, sep,
8204 vi->fullsize);
8205 fprintf (file, "\n");
8207 if (vi->solution && !bitmap_empty_p (vi->solution))
8209 bitmap_iterator bi;
8210 unsigned i;
8211 fprintf (file, " solution: {");
8212 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
8213 fprintf (file, " %u", i);
8214 fprintf (file, " }\n");
8217 if (vi->oldsolution && !bitmap_empty_p (vi->oldsolution)
8218 && !bitmap_equal_p (vi->solution, vi->oldsolution))
8220 bitmap_iterator bi;
8221 unsigned i;
8222 fprintf (file, " oldsolution: {");
8223 EXECUTE_IF_SET_IN_BITMAP (vi->oldsolution, 0, i, bi)
8224 fprintf (file, " %u", i);
8225 fprintf (file, " }\n");
8229 /* Dump varinfo VI to stderr. */
8231 DEBUG_FUNCTION void
8232 debug_varinfo (varinfo_t vi)
8234 dump_varinfo (stderr, vi);
8237 /* Dump varmap to FILE. */
8239 static void
8240 dump_varmap (FILE *file)
8242 if (varmap.length () == 0)
8243 return;
8245 fprintf (file, "variables:\n");
8247 for (unsigned int i = 0; i < varmap.length (); ++i)
8249 varinfo_t vi = get_varinfo (i);
8250 dump_varinfo (file, vi);
8253 fprintf (file, "\n");
8256 /* Dump varmap to stderr. */
8258 DEBUG_FUNCTION void
8259 debug_varmap (void)
8261 dump_varmap (stderr);
8264 /* Compute whether node is refered to non-locally. Worker for
8265 cgraph_for_symbol_thunks_and_aliases. */
8266 static bool
8267 refered_from_nonlocal_fn (struct cgraph_node *node, void *data)
8269 bool *nonlocal_p = (bool *)data;
8270 *nonlocal_p |= (node->used_from_other_partition
8271 || DECL_EXTERNAL (node->decl)
8272 || TREE_PUBLIC (node->decl)
8273 || node->force_output
8274 || lookup_attribute ("noipa", DECL_ATTRIBUTES (node->decl)));
8275 return false;
8278 /* Same for varpool nodes. */
8279 static bool
8280 refered_from_nonlocal_var (struct varpool_node *node, void *data)
8282 bool *nonlocal_p = (bool *)data;
8283 *nonlocal_p |= (node->used_from_other_partition
8284 || DECL_EXTERNAL (node->decl)
8285 || TREE_PUBLIC (node->decl)
8286 || node->force_output);
8287 return false;
8290 /* Execute the driver for IPA PTA. */
8291 static unsigned int
8292 ipa_pta_execute (void)
8294 struct cgraph_node *node;
8295 varpool_node *var;
8296 unsigned int from = 0;
8298 in_ipa_mode = 1;
8300 init_alias_vars ();
8302 if (dump_file && (dump_flags & TDF_DETAILS))
8304 symtab->dump (dump_file);
8305 fprintf (dump_file, "\n");
8308 if (dump_file)
8310 fprintf (dump_file, "Generating generic constraints\n\n");
8311 dump_constraints (dump_file, from);
8312 fprintf (dump_file, "\n");
8313 from = constraints.length ();
8316 /* Build the constraints. */
8317 FOR_EACH_DEFINED_FUNCTION (node)
8319 varinfo_t vi;
8320 /* Nodes without a body in this partition are not interesting.
8321 Especially do not visit clones at this point for now - we
8322 get duplicate decls there for inline clones at least. */
8323 if (!node->has_gimple_body_p ()
8324 || node->in_other_partition
8325 || node->inlined_to)
8326 continue;
8327 node->get_body ();
8329 gcc_assert (!node->clone_of);
8331 /* For externally visible or attribute used annotated functions use
8332 local constraints for their arguments.
8333 For local functions we see all callers and thus do not need initial
8334 constraints for parameters. */
8335 bool nonlocal_p = (node->used_from_other_partition
8336 || DECL_EXTERNAL (node->decl)
8337 || TREE_PUBLIC (node->decl)
8338 || node->force_output
8339 || lookup_attribute ("noipa",
8340 DECL_ATTRIBUTES (node->decl)));
8341 node->call_for_symbol_thunks_and_aliases (refered_from_nonlocal_fn,
8342 &nonlocal_p, true);
8344 vi = create_function_info_for (node->decl,
8345 alias_get_name (node->decl), false,
8346 nonlocal_p);
8347 if (dump_file
8348 && from != constraints.length ())
8350 fprintf (dump_file,
8351 "Generating initial constraints for %s",
8352 node->dump_name ());
8353 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
8354 fprintf (dump_file, " (%s)",
8355 IDENTIFIER_POINTER
8356 (DECL_ASSEMBLER_NAME (node->decl)));
8357 fprintf (dump_file, "\n\n");
8358 dump_constraints (dump_file, from);
8359 fprintf (dump_file, "\n");
8361 from = constraints.length ();
8364 node->call_for_symbol_thunks_and_aliases
8365 (associate_varinfo_to_alias, vi, true);
8368 /* Create constraints for global variables and their initializers. */
8369 FOR_EACH_VARIABLE (var)
8371 if (var->alias && var->analyzed)
8372 continue;
8374 varinfo_t vi = get_vi_for_tree (var->decl);
8376 /* For the purpose of IPA PTA unit-local globals are not
8377 escape points. */
8378 bool nonlocal_p = (DECL_EXTERNAL (var->decl)
8379 || TREE_PUBLIC (var->decl)
8380 || var->used_from_other_partition
8381 || var->force_output);
8382 var->call_for_symbol_and_aliases (refered_from_nonlocal_var,
8383 &nonlocal_p, true);
8384 if (nonlocal_p)
8385 vi->is_ipa_escape_point = true;
8388 if (dump_file
8389 && from != constraints.length ())
8391 fprintf (dump_file,
8392 "Generating constraints for global initializers\n\n");
8393 dump_constraints (dump_file, from);
8394 fprintf (dump_file, "\n");
8395 from = constraints.length ();
8398 FOR_EACH_DEFINED_FUNCTION (node)
8400 struct function *func;
8401 basic_block bb;
8403 /* Nodes without a body in this partition are not interesting. */
8404 if (!node->has_gimple_body_p ()
8405 || node->in_other_partition
8406 || node->clone_of)
8407 continue;
8409 if (dump_file)
8411 fprintf (dump_file,
8412 "Generating constraints for %s", node->dump_name ());
8413 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
8414 fprintf (dump_file, " (%s)",
8415 IDENTIFIER_POINTER
8416 (DECL_ASSEMBLER_NAME (node->decl)));
8417 fprintf (dump_file, "\n");
8420 func = DECL_STRUCT_FUNCTION (node->decl);
8421 gcc_assert (cfun == NULL);
8423 /* Build constriants for the function body. */
8424 FOR_EACH_BB_FN (bb, func)
8426 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
8427 gsi_next (&gsi))
8429 gphi *phi = gsi.phi ();
8431 if (! virtual_operand_p (gimple_phi_result (phi)))
8432 find_func_aliases (func, phi);
8435 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
8436 gsi_next (&gsi))
8438 gimple *stmt = gsi_stmt (gsi);
8440 find_func_aliases (func, stmt);
8441 find_func_clobbers (func, stmt);
8445 if (dump_file)
8447 fprintf (dump_file, "\n");
8448 dump_constraints (dump_file, from);
8449 fprintf (dump_file, "\n");
8450 from = constraints.length ();
8454 /* From the constraints compute the points-to sets. */
8455 solve_constraints ();
8457 if (dump_file)
8458 dump_sa_points_to_info (dump_file);
8460 /* Now post-process solutions to handle locals from different
8461 runtime instantiations coming in through recursive invocations. */
8462 unsigned shadow_var_cnt = 0;
8463 for (unsigned i = 1; i < varmap.length (); ++i)
8465 varinfo_t fi = get_varinfo (i);
8466 if (fi->is_fn_info
8467 && fi->decl)
8468 /* Automatic variables pointed to by their containing functions
8469 parameters need this treatment. */
8470 for (varinfo_t ai = first_vi_for_offset (fi, fi_parm_base);
8471 ai; ai = vi_next (ai))
8473 varinfo_t vi = get_varinfo (find (ai->id));
8474 bitmap_iterator bi;
8475 unsigned j;
8476 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, j, bi)
8478 varinfo_t pt = get_varinfo (j);
8479 if (pt->shadow_var_uid == 0
8480 && pt->decl
8481 && auto_var_in_fn_p (pt->decl, fi->decl))
8483 pt->shadow_var_uid = allocate_decl_uid ();
8484 shadow_var_cnt++;
8488 /* As well as global variables which are another way of passing
8489 arguments to recursive invocations. */
8490 else if (fi->is_global_var)
8492 for (varinfo_t ai = fi; ai; ai = vi_next (ai))
8494 varinfo_t vi = get_varinfo (find (ai->id));
8495 bitmap_iterator bi;
8496 unsigned j;
8497 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, j, bi)
8499 varinfo_t pt = get_varinfo (j);
8500 if (pt->shadow_var_uid == 0
8501 && pt->decl
8502 && auto_var_p (pt->decl))
8504 pt->shadow_var_uid = allocate_decl_uid ();
8505 shadow_var_cnt++;
8511 if (shadow_var_cnt && dump_file && (dump_flags & TDF_DETAILS))
8512 fprintf (dump_file, "Allocated %u shadow variables for locals "
8513 "maybe leaking into recursive invocations of their containing "
8514 "functions\n", shadow_var_cnt);
8516 /* Compute the global points-to sets for ESCAPED.
8517 ??? Note that the computed escape set is not correct
8518 for the whole unit as we fail to consider graph edges to
8519 externally visible functions. */
8520 ipa_escaped_pt = find_what_var_points_to (NULL, get_varinfo (escaped_id));
8522 /* Make sure the ESCAPED solution (which is used as placeholder in
8523 other solutions) does not reference itself. This simplifies
8524 points-to solution queries. */
8525 ipa_escaped_pt.ipa_escaped = 0;
8527 /* Assign the points-to sets to the SSA names in the unit. */
8528 FOR_EACH_DEFINED_FUNCTION (node)
8530 tree ptr;
8531 struct function *fn;
8532 unsigned i;
8533 basic_block bb;
8535 /* Nodes without a body in this partition are not interesting. */
8536 if (!node->has_gimple_body_p ()
8537 || node->in_other_partition
8538 || node->clone_of)
8539 continue;
8541 fn = DECL_STRUCT_FUNCTION (node->decl);
8543 /* Compute the points-to sets for pointer SSA_NAMEs. */
8544 FOR_EACH_VEC_ELT (*fn->gimple_df->ssa_names, i, ptr)
8546 if (ptr
8547 && POINTER_TYPE_P (TREE_TYPE (ptr)))
8548 find_what_p_points_to (node->decl, ptr);
8551 /* Compute the call-use and call-clobber sets for indirect calls
8552 and calls to external functions. */
8553 FOR_EACH_BB_FN (bb, fn)
8555 gimple_stmt_iterator gsi;
8557 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
8559 gcall *stmt;
8560 struct pt_solution *pt;
8561 varinfo_t vi, fi;
8562 tree decl;
8564 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
8565 if (!stmt)
8566 continue;
8568 /* Handle direct calls to functions with body. */
8569 decl = gimple_call_fndecl (stmt);
8572 tree called_decl = NULL_TREE;
8573 if (gimple_call_builtin_p (stmt, BUILT_IN_GOMP_PARALLEL))
8574 called_decl = TREE_OPERAND (gimple_call_arg (stmt, 0), 0);
8575 else if (gimple_call_builtin_p (stmt, BUILT_IN_GOACC_PARALLEL))
8576 called_decl = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
8578 if (called_decl != NULL_TREE
8579 && !fndecl_maybe_in_other_partition (called_decl))
8580 decl = called_decl;
8583 if (decl
8584 && (fi = lookup_vi_for_tree (decl))
8585 && fi->is_fn_info)
8587 *gimple_call_clobber_set (stmt)
8588 = find_what_var_points_to
8589 (node->decl, first_vi_for_offset (fi, fi_clobbers));
8590 *gimple_call_use_set (stmt)
8591 = find_what_var_points_to
8592 (node->decl, first_vi_for_offset (fi, fi_uses));
8594 /* Handle direct calls to external functions. */
8595 else if (decl && (!fi || fi->decl))
8597 pt = gimple_call_use_set (stmt);
8598 if (gimple_call_flags (stmt) & ECF_CONST)
8599 memset (pt, 0, sizeof (struct pt_solution));
8600 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
8602 *pt = find_what_var_points_to (node->decl, vi);
8603 /* Escaped (and thus nonlocal) variables are always
8604 implicitly used by calls. */
8605 /* ??? ESCAPED can be empty even though NONLOCAL
8606 always escaped. */
8607 pt->nonlocal = 1;
8608 pt->ipa_escaped = 1;
8610 else
8612 /* If there is nothing special about this call then
8613 we have made everything that is used also escape. */
8614 *pt = ipa_escaped_pt;
8615 pt->nonlocal = 1;
8618 pt = gimple_call_clobber_set (stmt);
8619 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
8620 memset (pt, 0, sizeof (struct pt_solution));
8621 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
8623 *pt = find_what_var_points_to (node->decl, vi);
8624 /* Escaped (and thus nonlocal) variables are always
8625 implicitly clobbered by calls. */
8626 /* ??? ESCAPED can be empty even though NONLOCAL
8627 always escaped. */
8628 pt->nonlocal = 1;
8629 pt->ipa_escaped = 1;
8631 else
8633 /* If there is nothing special about this call then
8634 we have made everything that is used also escape. */
8635 *pt = ipa_escaped_pt;
8636 pt->nonlocal = 1;
8639 /* Handle indirect calls. */
8640 else if ((fi = get_fi_for_callee (stmt)))
8642 /* We need to accumulate all clobbers/uses of all possible
8643 callees. */
8644 fi = get_varinfo (find (fi->id));
8645 /* If we cannot constrain the set of functions we'll end up
8646 calling we end up using/clobbering everything. */
8647 if (bitmap_bit_p (fi->solution, anything_id)
8648 || bitmap_bit_p (fi->solution, nonlocal_id)
8649 || bitmap_bit_p (fi->solution, escaped_id))
8651 pt_solution_reset (gimple_call_clobber_set (stmt));
8652 pt_solution_reset (gimple_call_use_set (stmt));
8654 else
8656 bitmap_iterator bi;
8657 unsigned i;
8658 struct pt_solution *uses, *clobbers;
8660 uses = gimple_call_use_set (stmt);
8661 clobbers = gimple_call_clobber_set (stmt);
8662 memset (uses, 0, sizeof (struct pt_solution));
8663 memset (clobbers, 0, sizeof (struct pt_solution));
8664 EXECUTE_IF_SET_IN_BITMAP (fi->solution, 0, i, bi)
8666 struct pt_solution sol;
8668 vi = get_varinfo (i);
8669 if (!vi->is_fn_info)
8671 /* ??? We could be more precise here? */
8672 uses->nonlocal = 1;
8673 uses->ipa_escaped = 1;
8674 clobbers->nonlocal = 1;
8675 clobbers->ipa_escaped = 1;
8676 continue;
8679 if (!uses->anything)
8681 sol = find_what_var_points_to
8682 (node->decl,
8683 first_vi_for_offset (vi, fi_uses));
8684 pt_solution_ior_into (uses, &sol);
8686 if (!clobbers->anything)
8688 sol = find_what_var_points_to
8689 (node->decl,
8690 first_vi_for_offset (vi, fi_clobbers));
8691 pt_solution_ior_into (clobbers, &sol);
8696 else
8697 gcc_unreachable ();
8701 fn->gimple_df->ipa_pta = true;
8703 /* We have to re-set the final-solution cache after each function
8704 because what is a "global" is dependent on function context. */
8705 final_solutions->empty ();
8706 obstack_free (&final_solutions_obstack, NULL);
8707 gcc_obstack_init (&final_solutions_obstack);
8710 delete_points_to_sets ();
8712 in_ipa_mode = 0;
8714 return 0;
8717 namespace {
8719 const pass_data pass_data_ipa_pta =
8721 SIMPLE_IPA_PASS, /* type */
8722 "pta", /* name */
8723 OPTGROUP_NONE, /* optinfo_flags */
8724 TV_IPA_PTA, /* tv_id */
8725 0, /* properties_required */
8726 0, /* properties_provided */
8727 0, /* properties_destroyed */
8728 0, /* todo_flags_start */
8729 0, /* todo_flags_finish */
8732 class pass_ipa_pta : public simple_ipa_opt_pass
8734 public:
8735 pass_ipa_pta (gcc::context *ctxt)
8736 : simple_ipa_opt_pass (pass_data_ipa_pta, ctxt)
8739 /* opt_pass methods: */
8740 virtual bool gate (function *)
8742 return (optimize
8743 && flag_ipa_pta
8744 /* Don't bother doing anything if the program has errors. */
8745 && !seen_error ());
8748 opt_pass * clone () { return new pass_ipa_pta (m_ctxt); }
8750 virtual unsigned int execute (function *) { return ipa_pta_execute (); }
8752 }; // class pass_ipa_pta
8754 } // anon namespace
8756 simple_ipa_opt_pass *
8757 make_pass_ipa_pta (gcc::context *ctxt)
8759 return new pass_ipa_pta (ctxt);