typeck.c (cp_build_function_call_vec): When mark_used fails unconditionally return...
[official-gcc.git] / gcc / tree-ssa-structalias.c
blob2e2b0e8cb1e87576b03a5ccc62c29403f2f46926
1 /* Tree based points-to analysis
2 Copyright (C) 2005-2019 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "tree-pass.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "tree-pretty-print.h"
33 #include "diagnostic-core.h"
34 #include "fold-const.h"
35 #include "stor-layout.h"
36 #include "stmt.h"
37 #include "gimple-iterator.h"
38 #include "tree-into-ssa.h"
39 #include "tree-dfa.h"
40 #include "params.h"
41 #include "gimple-walk.h"
42 #include "varasm.h"
43 #include "stringpool.h"
44 #include "attribs.h"
45 #include "tree-ssa.h"
47 /* The idea behind this analyzer is to generate set constraints from the
48 program, then solve the resulting constraints in order to generate the
49 points-to sets.
51 Set constraints are a way of modeling program analysis problems that
52 involve sets. They consist of an inclusion constraint language,
53 describing the variables (each variable is a set) and operations that
54 are involved on the variables, and a set of rules that derive facts
55 from these operations. To solve a system of set constraints, you derive
56 all possible facts under the rules, which gives you the correct sets
57 as a consequence.
59 See "Efficient Field-sensitive pointer analysis for C" by "David
60 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
61 http://citeseer.ist.psu.edu/pearce04efficient.html
63 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
64 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
65 http://citeseer.ist.psu.edu/heintze01ultrafast.html
67 There are three types of real constraint expressions, DEREF,
68 ADDRESSOF, and SCALAR. Each constraint expression consists
69 of a constraint type, a variable, and an offset.
71 SCALAR is a constraint expression type used to represent x, whether
72 it appears on the LHS or the RHS of a statement.
73 DEREF is a constraint expression type used to represent *x, whether
74 it appears on the LHS or the RHS of a statement.
75 ADDRESSOF is a constraint expression used to represent &x, whether
76 it appears on the LHS or the RHS of a statement.
78 Each pointer variable in the program is assigned an integer id, and
79 each field of a structure variable is assigned an integer id as well.
81 Structure variables are linked to their list of fields through a "next
82 field" in each variable that points to the next field in offset
83 order.
84 Each variable for a structure field has
86 1. "size", that tells the size in bits of that field.
87 2. "fullsize, that tells the size in bits of the entire structure.
88 3. "offset", that tells the offset in bits from the beginning of the
89 structure to this field.
91 Thus,
92 struct f
94 int a;
95 int b;
96 } foo;
97 int *bar;
99 looks like
101 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
102 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
103 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
106 In order to solve the system of set constraints, the following is
107 done:
109 1. Each constraint variable x has a solution set associated with it,
110 Sol(x).
112 2. Constraints are separated into direct, copy, and complex.
113 Direct constraints are ADDRESSOF constraints that require no extra
114 processing, such as P = &Q
115 Copy constraints are those of the form P = Q.
116 Complex constraints are all the constraints involving dereferences
117 and offsets (including offsetted copies).
119 3. All direct constraints of the form P = &Q are processed, such
120 that Q is added to Sol(P)
122 4. All complex constraints for a given constraint variable are stored in a
123 linked list attached to that variable's node.
125 5. A directed graph is built out of the copy constraints. Each
126 constraint variable is a node in the graph, and an edge from
127 Q to P is added for each copy constraint of the form P = Q
129 6. The graph is then walked, and solution sets are
130 propagated along the copy edges, such that an edge from Q to P
131 causes Sol(P) <- Sol(P) union Sol(Q).
133 7. As we visit each node, all complex constraints associated with
134 that node are processed by adding appropriate copy edges to the graph, or the
135 appropriate variables to the solution set.
137 8. The process of walking the graph is iterated until no solution
138 sets change.
140 Prior to walking the graph in steps 6 and 7, We perform static
141 cycle elimination on the constraint graph, as well
142 as off-line variable substitution.
144 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
145 on and turned into anything), but isn't. You can just see what offset
146 inside the pointed-to struct it's going to access.
148 TODO: Constant bounded arrays can be handled as if they were structs of the
149 same number of elements.
151 TODO: Modeling heap and incoming pointers becomes much better if we
152 add fields to them as we discover them, which we could do.
154 TODO: We could handle unions, but to be honest, it's probably not
155 worth the pain or slowdown. */
157 /* IPA-PTA optimizations possible.
159 When the indirect function called is ANYTHING we can add disambiguation
160 based on the function signatures (or simply the parameter count which
161 is the varinfo size). We also do not need to consider functions that
162 do not have their address taken.
164 The is_global_var bit which marks escape points is overly conservative
165 in IPA mode. Split it to is_escape_point and is_global_var - only
166 externally visible globals are escape points in IPA mode.
167 There is now is_ipa_escape_point but this is only used in a few
168 selected places.
170 The way we introduce DECL_PT_UID to avoid fixing up all points-to
171 sets in the translation unit when we copy a DECL during inlining
172 pessimizes precision. The advantage is that the DECL_PT_UID keeps
173 compile-time and memory usage overhead low - the points-to sets
174 do not grow or get unshared as they would during a fixup phase.
175 An alternative solution is to delay IPA PTA until after all
176 inlining transformations have been applied.
178 The way we propagate clobber/use information isn't optimized.
179 It should use a new complex constraint that properly filters
180 out local variables of the callee (though that would make
181 the sets invalid after inlining). OTOH we might as well
182 admit defeat to WHOPR and simply do all the clobber/use analysis
183 and propagation after PTA finished but before we threw away
184 points-to information for memory variables. WHOPR and PTA
185 do not play along well anyway - the whole constraint solving
186 would need to be done in WPA phase and it will be very interesting
187 to apply the results to local SSA names during LTRANS phase.
189 We probably should compute a per-function unit-ESCAPE solution
190 propagating it simply like the clobber / uses solutions. The
191 solution can go alongside the non-IPA espaced solution and be
192 used to query which vars escape the unit through a function.
193 This is also required to make the escaped-HEAP trick work in IPA mode.
195 We never put function decls in points-to sets so we do not
196 keep the set of called functions for indirect calls.
198 And probably more. */
200 static bool use_field_sensitive = true;
201 static int in_ipa_mode = 0;
203 /* Used for predecessor bitmaps. */
204 static bitmap_obstack predbitmap_obstack;
206 /* Used for points-to sets. */
207 static bitmap_obstack pta_obstack;
209 /* Used for oldsolution members of variables. */
210 static bitmap_obstack oldpta_obstack;
212 /* Used for per-solver-iteration bitmaps. */
213 static bitmap_obstack iteration_obstack;
215 static unsigned int create_variable_info_for (tree, const char *, bool);
216 typedef struct constraint_graph *constraint_graph_t;
217 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
219 struct constraint;
220 typedef struct constraint *constraint_t;
223 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
224 if (a) \
225 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
227 static struct constraint_stats
229 unsigned int total_vars;
230 unsigned int nonpointer_vars;
231 unsigned int unified_vars_static;
232 unsigned int unified_vars_dynamic;
233 unsigned int iterations;
234 unsigned int num_edges;
235 unsigned int num_implicit_edges;
236 unsigned int points_to_sets_created;
237 } stats;
239 struct variable_info
241 /* ID of this variable */
242 unsigned int id;
244 /* True if this is a variable created by the constraint analysis, such as
245 heap variables and constraints we had to break up. */
246 unsigned int is_artificial_var : 1;
248 /* True if this is a special variable whose solution set should not be
249 changed. */
250 unsigned int is_special_var : 1;
252 /* True for variables whose size is not known or variable. */
253 unsigned int is_unknown_size_var : 1;
255 /* True for (sub-)fields that represent a whole variable. */
256 unsigned int is_full_var : 1;
258 /* True if this is a heap variable. */
259 unsigned int is_heap_var : 1;
261 /* True if this is a register variable. */
262 unsigned int is_reg_var : 1;
264 /* True if this field may contain pointers. */
265 unsigned int may_have_pointers : 1;
267 /* True if this field has only restrict qualified pointers. */
268 unsigned int only_restrict_pointers : 1;
270 /* True if this represents a heap var created for a restrict qualified
271 pointer. */
272 unsigned int is_restrict_var : 1;
274 /* True if this represents a global variable. */
275 unsigned int is_global_var : 1;
277 /* True if this represents a module escape point for IPA analysis. */
278 unsigned int is_ipa_escape_point : 1;
280 /* True if this represents a IPA function info. */
281 unsigned int is_fn_info : 1;
283 /* ??? Store somewhere better. */
284 unsigned short ruid;
286 /* The ID of the variable for the next field in this structure
287 or zero for the last field in this structure. */
288 unsigned next;
290 /* The ID of the variable for the first field in this structure. */
291 unsigned head;
293 /* Offset of this variable, in bits, from the base variable */
294 unsigned HOST_WIDE_INT offset;
296 /* Size of the variable, in bits. */
297 unsigned HOST_WIDE_INT size;
299 /* Full size of the base variable, in bits. */
300 unsigned HOST_WIDE_INT fullsize;
302 /* In IPA mode the shadow UID in case the variable needs to be duplicated in
303 the final points-to solution because it reaches its containing
304 function recursively. Zero if none is needed. */
305 unsigned int shadow_var_uid;
307 /* Name of this variable */
308 const char *name;
310 /* Tree that this variable is associated with. */
311 tree decl;
313 /* Points-to set for this variable. */
314 bitmap solution;
316 /* Old points-to set for this variable. */
317 bitmap oldsolution;
319 typedef struct variable_info *varinfo_t;
321 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
322 static varinfo_t first_or_preceding_vi_for_offset (varinfo_t,
323 unsigned HOST_WIDE_INT);
324 static varinfo_t lookup_vi_for_tree (tree);
325 static inline bool type_can_have_subvars (const_tree);
326 static void make_param_constraints (varinfo_t);
328 /* Pool of variable info structures. */
329 static object_allocator<variable_info> variable_info_pool
330 ("Variable info pool");
332 /* Map varinfo to final pt_solution. */
333 static hash_map<varinfo_t, pt_solution *> *final_solutions;
334 struct obstack final_solutions_obstack;
336 /* Table of variable info structures for constraint variables.
337 Indexed directly by variable info id. */
338 static vec<varinfo_t> varmap;
340 /* Return the varmap element N */
342 static inline varinfo_t
343 get_varinfo (unsigned int n)
345 return varmap[n];
348 /* Return the next variable in the list of sub-variables of VI
349 or NULL if VI is the last sub-variable. */
351 static inline varinfo_t
352 vi_next (varinfo_t vi)
354 return get_varinfo (vi->next);
357 /* Static IDs for the special variables. Variable ID zero is unused
358 and used as terminator for the sub-variable chain. */
359 enum { nothing_id = 1, anything_id = 2, string_id = 3,
360 escaped_id = 4, nonlocal_id = 5,
361 storedanything_id = 6, integer_id = 7 };
363 /* Return a new variable info structure consisting for a variable
364 named NAME, and using constraint graph node NODE. Append it
365 to the vector of variable info structures. */
367 static varinfo_t
368 new_var_info (tree t, const char *name, bool add_id)
370 unsigned index = varmap.length ();
371 varinfo_t ret = variable_info_pool.allocate ();
373 if (dump_file && add_id)
375 char *tempname = xasprintf ("%s(%d)", name, index);
376 name = ggc_strdup (tempname);
377 free (tempname);
380 ret->id = index;
381 ret->name = name;
382 ret->decl = t;
383 /* Vars without decl are artificial and do not have sub-variables. */
384 ret->is_artificial_var = (t == NULL_TREE);
385 ret->is_special_var = false;
386 ret->is_unknown_size_var = false;
387 ret->is_full_var = (t == NULL_TREE);
388 ret->is_heap_var = false;
389 ret->may_have_pointers = true;
390 ret->only_restrict_pointers = false;
391 ret->is_restrict_var = false;
392 ret->ruid = 0;
393 ret->is_global_var = (t == NULL_TREE);
394 ret->is_ipa_escape_point = false;
395 ret->is_fn_info = false;
396 if (t && DECL_P (t))
397 ret->is_global_var = (is_global_var (t)
398 /* We have to treat even local register variables
399 as escape points. */
400 || (VAR_P (t) && DECL_HARD_REGISTER (t)));
401 ret->is_reg_var = (t && TREE_CODE (t) == SSA_NAME);
402 ret->solution = BITMAP_ALLOC (&pta_obstack);
403 ret->oldsolution = NULL;
404 ret->next = 0;
405 ret->shadow_var_uid = 0;
406 ret->head = ret->id;
408 stats.total_vars++;
410 varmap.safe_push (ret);
412 return ret;
415 /* A map mapping call statements to per-stmt variables for uses
416 and clobbers specific to the call. */
417 static hash_map<gimple *, varinfo_t> *call_stmt_vars;
419 /* Lookup or create the variable for the call statement CALL. */
421 static varinfo_t
422 get_call_vi (gcall *call)
424 varinfo_t vi, vi2;
426 bool existed;
427 varinfo_t *slot_p = &call_stmt_vars->get_or_insert (call, &existed);
428 if (existed)
429 return *slot_p;
431 vi = new_var_info (NULL_TREE, "CALLUSED", true);
432 vi->offset = 0;
433 vi->size = 1;
434 vi->fullsize = 2;
435 vi->is_full_var = true;
436 vi->is_reg_var = true;
438 vi2 = new_var_info (NULL_TREE, "CALLCLOBBERED", true);
439 vi2->offset = 1;
440 vi2->size = 1;
441 vi2->fullsize = 2;
442 vi2->is_full_var = true;
443 vi2->is_reg_var = true;
445 vi->next = vi2->id;
447 *slot_p = vi;
448 return vi;
451 /* Lookup the variable for the call statement CALL representing
452 the uses. Returns NULL if there is nothing special about this call. */
454 static varinfo_t
455 lookup_call_use_vi (gcall *call)
457 varinfo_t *slot_p = call_stmt_vars->get (call);
458 if (slot_p)
459 return *slot_p;
461 return NULL;
464 /* Lookup the variable for the call statement CALL representing
465 the clobbers. Returns NULL if there is nothing special about this call. */
467 static varinfo_t
468 lookup_call_clobber_vi (gcall *call)
470 varinfo_t uses = lookup_call_use_vi (call);
471 if (!uses)
472 return NULL;
474 return vi_next (uses);
477 /* Lookup or create the variable for the call statement CALL representing
478 the uses. */
480 static varinfo_t
481 get_call_use_vi (gcall *call)
483 return get_call_vi (call);
486 /* Lookup or create the variable for the call statement CALL representing
487 the clobbers. */
489 static varinfo_t ATTRIBUTE_UNUSED
490 get_call_clobber_vi (gcall *call)
492 return vi_next (get_call_vi (call));
496 enum constraint_expr_type {SCALAR, DEREF, ADDRESSOF};
498 /* An expression that appears in a constraint. */
500 struct constraint_expr
502 /* Constraint type. */
503 constraint_expr_type type;
505 /* Variable we are referring to in the constraint. */
506 unsigned int var;
508 /* Offset, in bits, of this constraint from the beginning of
509 variables it ends up referring to.
511 IOW, in a deref constraint, we would deref, get the result set,
512 then add OFFSET to each member. */
513 HOST_WIDE_INT offset;
516 /* Use 0x8000... as special unknown offset. */
517 #define UNKNOWN_OFFSET HOST_WIDE_INT_MIN
519 typedef struct constraint_expr ce_s;
520 static void get_constraint_for_1 (tree, vec<ce_s> *, bool, bool);
521 static void get_constraint_for (tree, vec<ce_s> *);
522 static void get_constraint_for_rhs (tree, vec<ce_s> *);
523 static void do_deref (vec<ce_s> *);
525 /* Our set constraints are made up of two constraint expressions, one
526 LHS, and one RHS.
528 As described in the introduction, our set constraints each represent an
529 operation between set valued variables.
531 struct constraint
533 struct constraint_expr lhs;
534 struct constraint_expr rhs;
537 /* List of constraints that we use to build the constraint graph from. */
539 static vec<constraint_t> constraints;
540 static object_allocator<constraint> constraint_pool ("Constraint pool");
542 /* The constraint graph is represented as an array of bitmaps
543 containing successor nodes. */
545 struct constraint_graph
547 /* Size of this graph, which may be different than the number of
548 nodes in the variable map. */
549 unsigned int size;
551 /* Explicit successors of each node. */
552 bitmap *succs;
554 /* Implicit predecessors of each node (Used for variable
555 substitution). */
556 bitmap *implicit_preds;
558 /* Explicit predecessors of each node (Used for variable substitution). */
559 bitmap *preds;
561 /* Indirect cycle representatives, or -1 if the node has no indirect
562 cycles. */
563 int *indirect_cycles;
565 /* Representative node for a node. rep[a] == a unless the node has
566 been unified. */
567 unsigned int *rep;
569 /* Equivalence class representative for a label. This is used for
570 variable substitution. */
571 int *eq_rep;
573 /* Pointer equivalence label for a node. All nodes with the same
574 pointer equivalence label can be unified together at some point
575 (either during constraint optimization or after the constraint
576 graph is built). */
577 unsigned int *pe;
579 /* Pointer equivalence representative for a label. This is used to
580 handle nodes that are pointer equivalent but not location
581 equivalent. We can unite these once the addressof constraints
582 are transformed into initial points-to sets. */
583 int *pe_rep;
585 /* Pointer equivalence label for each node, used during variable
586 substitution. */
587 unsigned int *pointer_label;
589 /* Location equivalence label for each node, used during location
590 equivalence finding. */
591 unsigned int *loc_label;
593 /* Pointed-by set for each node, used during location equivalence
594 finding. This is pointed-by rather than pointed-to, because it
595 is constructed using the predecessor graph. */
596 bitmap *pointed_by;
598 /* Points to sets for pointer equivalence. This is *not* the actual
599 points-to sets for nodes. */
600 bitmap *points_to;
602 /* Bitmap of nodes where the bit is set if the node is a direct
603 node. Used for variable substitution. */
604 sbitmap direct_nodes;
606 /* Bitmap of nodes where the bit is set if the node is address
607 taken. Used for variable substitution. */
608 bitmap address_taken;
610 /* Vector of complex constraints for each graph node. Complex
611 constraints are those involving dereferences or offsets that are
612 not 0. */
613 vec<constraint_t> *complex;
616 static constraint_graph_t graph;
618 /* During variable substitution and the offline version of indirect
619 cycle finding, we create nodes to represent dereferences and
620 address taken constraints. These represent where these start and
621 end. */
622 #define FIRST_REF_NODE (varmap).length ()
623 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
625 /* Return the representative node for NODE, if NODE has been unioned
626 with another NODE.
627 This function performs path compression along the way to finding
628 the representative. */
630 static unsigned int
631 find (unsigned int node)
633 gcc_checking_assert (node < graph->size);
634 if (graph->rep[node] != node)
635 return graph->rep[node] = find (graph->rep[node]);
636 return node;
639 /* Union the TO and FROM nodes to the TO nodes.
640 Note that at some point in the future, we may want to do
641 union-by-rank, in which case we are going to have to return the
642 node we unified to. */
644 static bool
645 unite (unsigned int to, unsigned int from)
647 gcc_checking_assert (to < graph->size && from < graph->size);
648 if (to != from && graph->rep[from] != to)
650 graph->rep[from] = to;
651 return true;
653 return false;
656 /* Create a new constraint consisting of LHS and RHS expressions. */
658 static constraint_t
659 new_constraint (const struct constraint_expr lhs,
660 const struct constraint_expr rhs)
662 constraint_t ret = constraint_pool.allocate ();
663 ret->lhs = lhs;
664 ret->rhs = rhs;
665 return ret;
668 /* Print out constraint C to FILE. */
670 static void
671 dump_constraint (FILE *file, constraint_t c)
673 if (c->lhs.type == ADDRESSOF)
674 fprintf (file, "&");
675 else if (c->lhs.type == DEREF)
676 fprintf (file, "*");
677 fprintf (file, "%s", get_varinfo (c->lhs.var)->name);
678 if (c->lhs.offset == UNKNOWN_OFFSET)
679 fprintf (file, " + UNKNOWN");
680 else if (c->lhs.offset != 0)
681 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
682 fprintf (file, " = ");
683 if (c->rhs.type == ADDRESSOF)
684 fprintf (file, "&");
685 else if (c->rhs.type == DEREF)
686 fprintf (file, "*");
687 fprintf (file, "%s", get_varinfo (c->rhs.var)->name);
688 if (c->rhs.offset == UNKNOWN_OFFSET)
689 fprintf (file, " + UNKNOWN");
690 else if (c->rhs.offset != 0)
691 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
695 void debug_constraint (constraint_t);
696 void debug_constraints (void);
697 void debug_constraint_graph (void);
698 void debug_solution_for_var (unsigned int);
699 void debug_sa_points_to_info (void);
700 void debug_varinfo (varinfo_t);
701 void debug_varmap (void);
703 /* Print out constraint C to stderr. */
705 DEBUG_FUNCTION void
706 debug_constraint (constraint_t c)
708 dump_constraint (stderr, c);
709 fprintf (stderr, "\n");
712 /* Print out all constraints to FILE */
714 static void
715 dump_constraints (FILE *file, int from)
717 int i;
718 constraint_t c;
719 for (i = from; constraints.iterate (i, &c); i++)
720 if (c)
722 dump_constraint (file, c);
723 fprintf (file, "\n");
727 /* Print out all constraints to stderr. */
729 DEBUG_FUNCTION void
730 debug_constraints (void)
732 dump_constraints (stderr, 0);
735 /* Print the constraint graph in dot format. */
737 static void
738 dump_constraint_graph (FILE *file)
740 unsigned int i;
742 /* Only print the graph if it has already been initialized: */
743 if (!graph)
744 return;
746 /* Prints the header of the dot file: */
747 fprintf (file, "strict digraph {\n");
748 fprintf (file, " node [\n shape = box\n ]\n");
749 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
750 fprintf (file, "\n // List of nodes and complex constraints in "
751 "the constraint graph:\n");
753 /* The next lines print the nodes in the graph together with the
754 complex constraints attached to them. */
755 for (i = 1; i < graph->size; i++)
757 if (i == FIRST_REF_NODE)
758 continue;
759 if (find (i) != i)
760 continue;
761 if (i < FIRST_REF_NODE)
762 fprintf (file, "\"%s\"", get_varinfo (i)->name);
763 else
764 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
765 if (graph->complex[i].exists ())
767 unsigned j;
768 constraint_t c;
769 fprintf (file, " [label=\"\\N\\n");
770 for (j = 0; graph->complex[i].iterate (j, &c); ++j)
772 dump_constraint (file, c);
773 fprintf (file, "\\l");
775 fprintf (file, "\"]");
777 fprintf (file, ";\n");
780 /* Go over the edges. */
781 fprintf (file, "\n // Edges in the constraint graph:\n");
782 for (i = 1; i < graph->size; i++)
784 unsigned j;
785 bitmap_iterator bi;
786 if (find (i) != i)
787 continue;
788 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
790 unsigned to = find (j);
791 if (i == to)
792 continue;
793 if (i < FIRST_REF_NODE)
794 fprintf (file, "\"%s\"", get_varinfo (i)->name);
795 else
796 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
797 fprintf (file, " -> ");
798 if (to < FIRST_REF_NODE)
799 fprintf (file, "\"%s\"", get_varinfo (to)->name);
800 else
801 fprintf (file, "\"*%s\"", get_varinfo (to - FIRST_REF_NODE)->name);
802 fprintf (file, ";\n");
806 /* Prints the tail of the dot file. */
807 fprintf (file, "}\n");
810 /* Print out the constraint graph to stderr. */
812 DEBUG_FUNCTION void
813 debug_constraint_graph (void)
815 dump_constraint_graph (stderr);
818 /* SOLVER FUNCTIONS
820 The solver is a simple worklist solver, that works on the following
821 algorithm:
823 sbitmap changed_nodes = all zeroes;
824 changed_count = 0;
825 For each node that is not already collapsed:
826 changed_count++;
827 set bit in changed nodes
829 while (changed_count > 0)
831 compute topological ordering for constraint graph
833 find and collapse cycles in the constraint graph (updating
834 changed if necessary)
836 for each node (n) in the graph in topological order:
837 changed_count--;
839 Process each complex constraint associated with the node,
840 updating changed if necessary.
842 For each outgoing edge from n, propagate the solution from n to
843 the destination of the edge, updating changed as necessary.
845 } */
847 /* Return true if two constraint expressions A and B are equal. */
849 static bool
850 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
852 return a.type == b.type && a.var == b.var && a.offset == b.offset;
855 /* Return true if constraint expression A is less than constraint expression
856 B. This is just arbitrary, but consistent, in order to give them an
857 ordering. */
859 static bool
860 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
862 if (a.type == b.type)
864 if (a.var == b.var)
865 return a.offset < b.offset;
866 else
867 return a.var < b.var;
869 else
870 return a.type < b.type;
873 /* Return true if constraint A is less than constraint B. This is just
874 arbitrary, but consistent, in order to give them an ordering. */
876 static bool
877 constraint_less (const constraint_t &a, const constraint_t &b)
879 if (constraint_expr_less (a->lhs, b->lhs))
880 return true;
881 else if (constraint_expr_less (b->lhs, a->lhs))
882 return false;
883 else
884 return constraint_expr_less (a->rhs, b->rhs);
887 /* Return true if two constraints A and B are equal. */
889 static bool
890 constraint_equal (struct constraint a, struct constraint b)
892 return constraint_expr_equal (a.lhs, b.lhs)
893 && constraint_expr_equal (a.rhs, b.rhs);
897 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
899 static constraint_t
900 constraint_vec_find (vec<constraint_t> vec,
901 struct constraint lookfor)
903 unsigned int place;
904 constraint_t found;
906 if (!vec.exists ())
907 return NULL;
909 place = vec.lower_bound (&lookfor, constraint_less);
910 if (place >= vec.length ())
911 return NULL;
912 found = vec[place];
913 if (!constraint_equal (*found, lookfor))
914 return NULL;
915 return found;
918 /* Union two constraint vectors, TO and FROM. Put the result in TO.
919 Returns true of TO set is changed. */
921 static bool
922 constraint_set_union (vec<constraint_t> *to,
923 vec<constraint_t> *from)
925 int i;
926 constraint_t c;
927 bool any_change = false;
929 FOR_EACH_VEC_ELT (*from, i, c)
931 if (constraint_vec_find (*to, *c) == NULL)
933 unsigned int place = to->lower_bound (c, constraint_less);
934 to->safe_insert (place, c);
935 any_change = true;
938 return any_change;
941 /* Expands the solution in SET to all sub-fields of variables included. */
943 static bitmap
944 solution_set_expand (bitmap set, bitmap *expanded)
946 bitmap_iterator bi;
947 unsigned j;
949 if (*expanded)
950 return *expanded;
952 *expanded = BITMAP_ALLOC (&iteration_obstack);
954 /* In a first pass expand to the head of the variables we need to
955 add all sub-fields off. This avoids quadratic behavior. */
956 EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi)
958 varinfo_t v = get_varinfo (j);
959 if (v->is_artificial_var
960 || v->is_full_var)
961 continue;
962 bitmap_set_bit (*expanded, v->head);
965 /* In the second pass now expand all head variables with subfields. */
966 EXECUTE_IF_SET_IN_BITMAP (*expanded, 0, j, bi)
968 varinfo_t v = get_varinfo (j);
969 if (v->head != j)
970 continue;
971 for (v = vi_next (v); v != NULL; v = vi_next (v))
972 bitmap_set_bit (*expanded, v->id);
975 /* And finally set the rest of the bits from SET. */
976 bitmap_ior_into (*expanded, set);
978 return *expanded;
981 /* Union solution sets TO and DELTA, and add INC to each member of DELTA in the
982 process. */
984 static bool
985 set_union_with_increment (bitmap to, bitmap delta, HOST_WIDE_INT inc,
986 bitmap *expanded_delta)
988 bool changed = false;
989 bitmap_iterator bi;
990 unsigned int i;
992 /* If the solution of DELTA contains anything it is good enough to transfer
993 this to TO. */
994 if (bitmap_bit_p (delta, anything_id))
995 return bitmap_set_bit (to, anything_id);
997 /* If the offset is unknown we have to expand the solution to
998 all subfields. */
999 if (inc == UNKNOWN_OFFSET)
1001 delta = solution_set_expand (delta, expanded_delta);
1002 changed |= bitmap_ior_into (to, delta);
1003 return changed;
1006 /* For non-zero offset union the offsetted solution into the destination. */
1007 EXECUTE_IF_SET_IN_BITMAP (delta, 0, i, bi)
1009 varinfo_t vi = get_varinfo (i);
1011 /* If this is a variable with just one field just set its bit
1012 in the result. */
1013 if (vi->is_artificial_var
1014 || vi->is_unknown_size_var
1015 || vi->is_full_var)
1016 changed |= bitmap_set_bit (to, i);
1017 else
1019 HOST_WIDE_INT fieldoffset = vi->offset + inc;
1020 unsigned HOST_WIDE_INT size = vi->size;
1022 /* If the offset makes the pointer point to before the
1023 variable use offset zero for the field lookup. */
1024 if (fieldoffset < 0)
1025 vi = get_varinfo (vi->head);
1026 else
1027 vi = first_or_preceding_vi_for_offset (vi, fieldoffset);
1031 changed |= bitmap_set_bit (to, vi->id);
1032 if (vi->is_full_var
1033 || vi->next == 0)
1034 break;
1036 /* We have to include all fields that overlap the current field
1037 shifted by inc. */
1038 vi = vi_next (vi);
1040 while (vi->offset < fieldoffset + size);
1044 return changed;
1047 /* Insert constraint C into the list of complex constraints for graph
1048 node VAR. */
1050 static void
1051 insert_into_complex (constraint_graph_t graph,
1052 unsigned int var, constraint_t c)
1054 vec<constraint_t> complex = graph->complex[var];
1055 unsigned int place = complex.lower_bound (c, constraint_less);
1057 /* Only insert constraints that do not already exist. */
1058 if (place >= complex.length ()
1059 || !constraint_equal (*c, *complex[place]))
1060 graph->complex[var].safe_insert (place, c);
1064 /* Condense two variable nodes into a single variable node, by moving
1065 all associated info from FROM to TO. Returns true if TO node's
1066 constraint set changes after the merge. */
1068 static bool
1069 merge_node_constraints (constraint_graph_t graph, unsigned int to,
1070 unsigned int from)
1072 unsigned int i;
1073 constraint_t c;
1074 bool any_change = false;
1076 gcc_checking_assert (find (from) == to);
1078 /* Move all complex constraints from src node into to node */
1079 FOR_EACH_VEC_ELT (graph->complex[from], i, c)
1081 /* In complex constraints for node FROM, we may have either
1082 a = *FROM, and *FROM = a, or an offseted constraint which are
1083 always added to the rhs node's constraints. */
1085 if (c->rhs.type == DEREF)
1086 c->rhs.var = to;
1087 else if (c->lhs.type == DEREF)
1088 c->lhs.var = to;
1089 else
1090 c->rhs.var = to;
1093 any_change = constraint_set_union (&graph->complex[to],
1094 &graph->complex[from]);
1095 graph->complex[from].release ();
1096 return any_change;
1100 /* Remove edges involving NODE from GRAPH. */
1102 static void
1103 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
1105 if (graph->succs[node])
1106 BITMAP_FREE (graph->succs[node]);
1109 /* Merge GRAPH nodes FROM and TO into node TO. */
1111 static void
1112 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
1113 unsigned int from)
1115 if (graph->indirect_cycles[from] != -1)
1117 /* If we have indirect cycles with the from node, and we have
1118 none on the to node, the to node has indirect cycles from the
1119 from node now that they are unified.
1120 If indirect cycles exist on both, unify the nodes that they
1121 are in a cycle with, since we know they are in a cycle with
1122 each other. */
1123 if (graph->indirect_cycles[to] == -1)
1124 graph->indirect_cycles[to] = graph->indirect_cycles[from];
1127 /* Merge all the successor edges. */
1128 if (graph->succs[from])
1130 if (!graph->succs[to])
1131 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
1132 bitmap_ior_into (graph->succs[to],
1133 graph->succs[from]);
1136 clear_edges_for_node (graph, from);
1140 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
1141 it doesn't exist in the graph already. */
1143 static void
1144 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
1145 unsigned int from)
1147 if (to == from)
1148 return;
1150 if (!graph->implicit_preds[to])
1151 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1153 if (bitmap_set_bit (graph->implicit_preds[to], from))
1154 stats.num_implicit_edges++;
1157 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
1158 it doesn't exist in the graph already.
1159 Return false if the edge already existed, true otherwise. */
1161 static void
1162 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
1163 unsigned int from)
1165 if (!graph->preds[to])
1166 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1167 bitmap_set_bit (graph->preds[to], from);
1170 /* Add a graph edge to GRAPH, going from FROM to TO if
1171 it doesn't exist in the graph already.
1172 Return false if the edge already existed, true otherwise. */
1174 static bool
1175 add_graph_edge (constraint_graph_t graph, unsigned int to,
1176 unsigned int from)
1178 if (to == from)
1180 return false;
1182 else
1184 bool r = false;
1186 if (!graph->succs[from])
1187 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
1188 if (bitmap_set_bit (graph->succs[from], to))
1190 r = true;
1191 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
1192 stats.num_edges++;
1194 return r;
1199 /* Initialize the constraint graph structure to contain SIZE nodes. */
1201 static void
1202 init_graph (unsigned int size)
1204 unsigned int j;
1206 graph = XCNEW (struct constraint_graph);
1207 graph->size = size;
1208 graph->succs = XCNEWVEC (bitmap, graph->size);
1209 graph->indirect_cycles = XNEWVEC (int, graph->size);
1210 graph->rep = XNEWVEC (unsigned int, graph->size);
1211 /* ??? Macros do not support template types with multiple arguments,
1212 so we use a typedef to work around it. */
1213 typedef vec<constraint_t> vec_constraint_t_heap;
1214 graph->complex = XCNEWVEC (vec_constraint_t_heap, size);
1215 graph->pe = XCNEWVEC (unsigned int, graph->size);
1216 graph->pe_rep = XNEWVEC (int, graph->size);
1218 for (j = 0; j < graph->size; j++)
1220 graph->rep[j] = j;
1221 graph->pe_rep[j] = -1;
1222 graph->indirect_cycles[j] = -1;
1226 /* Build the constraint graph, adding only predecessor edges right now. */
1228 static void
1229 build_pred_graph (void)
1231 int i;
1232 constraint_t c;
1233 unsigned int j;
1235 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
1236 graph->preds = XCNEWVEC (bitmap, graph->size);
1237 graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
1238 graph->loc_label = XCNEWVEC (unsigned int, graph->size);
1239 graph->pointed_by = XCNEWVEC (bitmap, graph->size);
1240 graph->points_to = XCNEWVEC (bitmap, graph->size);
1241 graph->eq_rep = XNEWVEC (int, graph->size);
1242 graph->direct_nodes = sbitmap_alloc (graph->size);
1243 graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1244 bitmap_clear (graph->direct_nodes);
1246 for (j = 1; j < FIRST_REF_NODE; j++)
1248 if (!get_varinfo (j)->is_special_var)
1249 bitmap_set_bit (graph->direct_nodes, j);
1252 for (j = 0; j < graph->size; j++)
1253 graph->eq_rep[j] = -1;
1255 for (j = 0; j < varmap.length (); j++)
1256 graph->indirect_cycles[j] = -1;
1258 FOR_EACH_VEC_ELT (constraints, i, c)
1260 struct constraint_expr lhs = c->lhs;
1261 struct constraint_expr rhs = c->rhs;
1262 unsigned int lhsvar = lhs.var;
1263 unsigned int rhsvar = rhs.var;
1265 if (lhs.type == DEREF)
1267 /* *x = y. */
1268 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1269 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1271 else if (rhs.type == DEREF)
1273 /* x = *y */
1274 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1275 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1276 else
1277 bitmap_clear_bit (graph->direct_nodes, lhsvar);
1279 else if (rhs.type == ADDRESSOF)
1281 varinfo_t v;
1283 /* x = &y */
1284 if (graph->points_to[lhsvar] == NULL)
1285 graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1286 bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1288 if (graph->pointed_by[rhsvar] == NULL)
1289 graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1290 bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1292 /* Implicitly, *x = y */
1293 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1295 /* All related variables are no longer direct nodes. */
1296 bitmap_clear_bit (graph->direct_nodes, rhsvar);
1297 v = get_varinfo (rhsvar);
1298 if (!v->is_full_var)
1300 v = get_varinfo (v->head);
1303 bitmap_clear_bit (graph->direct_nodes, v->id);
1304 v = vi_next (v);
1306 while (v != NULL);
1308 bitmap_set_bit (graph->address_taken, rhsvar);
1310 else if (lhsvar > anything_id
1311 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1313 /* x = y */
1314 add_pred_graph_edge (graph, lhsvar, rhsvar);
1315 /* Implicitly, *x = *y */
1316 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1317 FIRST_REF_NODE + rhsvar);
1319 else if (lhs.offset != 0 || rhs.offset != 0)
1321 if (rhs.offset != 0)
1322 bitmap_clear_bit (graph->direct_nodes, lhs.var);
1323 else if (lhs.offset != 0)
1324 bitmap_clear_bit (graph->direct_nodes, rhs.var);
1329 /* Build the constraint graph, adding successor edges. */
1331 static void
1332 build_succ_graph (void)
1334 unsigned i, t;
1335 constraint_t c;
1337 FOR_EACH_VEC_ELT (constraints, i, c)
1339 struct constraint_expr lhs;
1340 struct constraint_expr rhs;
1341 unsigned int lhsvar;
1342 unsigned int rhsvar;
1344 if (!c)
1345 continue;
1347 lhs = c->lhs;
1348 rhs = c->rhs;
1349 lhsvar = find (lhs.var);
1350 rhsvar = find (rhs.var);
1352 if (lhs.type == DEREF)
1354 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1355 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1357 else if (rhs.type == DEREF)
1359 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1360 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1362 else if (rhs.type == ADDRESSOF)
1364 /* x = &y */
1365 gcc_checking_assert (find (rhs.var) == rhs.var);
1366 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1368 else if (lhsvar > anything_id
1369 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1371 add_graph_edge (graph, lhsvar, rhsvar);
1375 /* Add edges from STOREDANYTHING to all non-direct nodes that can
1376 receive pointers. */
1377 t = find (storedanything_id);
1378 for (i = integer_id + 1; i < FIRST_REF_NODE; ++i)
1380 if (!bitmap_bit_p (graph->direct_nodes, i)
1381 && get_varinfo (i)->may_have_pointers)
1382 add_graph_edge (graph, find (i), t);
1385 /* Everything stored to ANYTHING also potentially escapes. */
1386 add_graph_edge (graph, find (escaped_id), t);
1390 /* Changed variables on the last iteration. */
1391 static bitmap changed;
1393 /* Strongly Connected Component visitation info. */
1395 struct scc_info
1397 scc_info (size_t size);
1398 ~scc_info ();
1400 auto_sbitmap visited;
1401 auto_sbitmap deleted;
1402 unsigned int *dfs;
1403 unsigned int *node_mapping;
1404 int current_index;
1405 auto_vec<unsigned> scc_stack;
1409 /* Recursive routine to find strongly connected components in GRAPH.
1410 SI is the SCC info to store the information in, and N is the id of current
1411 graph node we are processing.
1413 This is Tarjan's strongly connected component finding algorithm, as
1414 modified by Nuutila to keep only non-root nodes on the stack.
1415 The algorithm can be found in "On finding the strongly connected
1416 connected components in a directed graph" by Esko Nuutila and Eljas
1417 Soisalon-Soininen, in Information Processing Letters volume 49,
1418 number 1, pages 9-14. */
1420 static void
1421 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1423 unsigned int i;
1424 bitmap_iterator bi;
1425 unsigned int my_dfs;
1427 bitmap_set_bit (si->visited, n);
1428 si->dfs[n] = si->current_index ++;
1429 my_dfs = si->dfs[n];
1431 /* Visit all the successors. */
1432 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1434 unsigned int w;
1436 if (i > LAST_REF_NODE)
1437 break;
1439 w = find (i);
1440 if (bitmap_bit_p (si->deleted, w))
1441 continue;
1443 if (!bitmap_bit_p (si->visited, w))
1444 scc_visit (graph, si, w);
1446 unsigned int t = find (w);
1447 gcc_checking_assert (find (n) == n);
1448 if (si->dfs[t] < si->dfs[n])
1449 si->dfs[n] = si->dfs[t];
1452 /* See if any components have been identified. */
1453 if (si->dfs[n] == my_dfs)
1455 if (si->scc_stack.length () > 0
1456 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1458 bitmap scc = BITMAP_ALLOC (NULL);
1459 unsigned int lowest_node;
1460 bitmap_iterator bi;
1462 bitmap_set_bit (scc, n);
1464 while (si->scc_stack.length () != 0
1465 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1467 unsigned int w = si->scc_stack.pop ();
1469 bitmap_set_bit (scc, w);
1472 lowest_node = bitmap_first_set_bit (scc);
1473 gcc_assert (lowest_node < FIRST_REF_NODE);
1475 /* Collapse the SCC nodes into a single node, and mark the
1476 indirect cycles. */
1477 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1479 if (i < FIRST_REF_NODE)
1481 if (unite (lowest_node, i))
1482 unify_nodes (graph, lowest_node, i, false);
1484 else
1486 unite (lowest_node, i);
1487 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1491 bitmap_set_bit (si->deleted, n);
1493 else
1494 si->scc_stack.safe_push (n);
1497 /* Unify node FROM into node TO, updating the changed count if
1498 necessary when UPDATE_CHANGED is true. */
1500 static void
1501 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1502 bool update_changed)
1504 gcc_checking_assert (to != from && find (to) == to);
1506 if (dump_file && (dump_flags & TDF_DETAILS))
1507 fprintf (dump_file, "Unifying %s to %s\n",
1508 get_varinfo (from)->name,
1509 get_varinfo (to)->name);
1511 if (update_changed)
1512 stats.unified_vars_dynamic++;
1513 else
1514 stats.unified_vars_static++;
1516 merge_graph_nodes (graph, to, from);
1517 if (merge_node_constraints (graph, to, from))
1519 if (update_changed)
1520 bitmap_set_bit (changed, to);
1523 /* Mark TO as changed if FROM was changed. If TO was already marked
1524 as changed, decrease the changed count. */
1526 if (update_changed
1527 && bitmap_clear_bit (changed, from))
1528 bitmap_set_bit (changed, to);
1529 varinfo_t fromvi = get_varinfo (from);
1530 if (fromvi->solution)
1532 /* If the solution changes because of the merging, we need to mark
1533 the variable as changed. */
1534 varinfo_t tovi = get_varinfo (to);
1535 if (bitmap_ior_into (tovi->solution, fromvi->solution))
1537 if (update_changed)
1538 bitmap_set_bit (changed, to);
1541 BITMAP_FREE (fromvi->solution);
1542 if (fromvi->oldsolution)
1543 BITMAP_FREE (fromvi->oldsolution);
1545 if (stats.iterations > 0
1546 && tovi->oldsolution)
1547 BITMAP_FREE (tovi->oldsolution);
1549 if (graph->succs[to])
1550 bitmap_clear_bit (graph->succs[to], to);
1553 /* Information needed to compute the topological ordering of a graph. */
1555 struct topo_info
1557 /* sbitmap of visited nodes. */
1558 sbitmap visited;
1559 /* Array that stores the topological order of the graph, *in
1560 reverse*. */
1561 vec<unsigned> topo_order;
1565 /* Initialize and return a topological info structure. */
1567 static struct topo_info *
1568 init_topo_info (void)
1570 size_t size = graph->size;
1571 struct topo_info *ti = XNEW (struct topo_info);
1572 ti->visited = sbitmap_alloc (size);
1573 bitmap_clear (ti->visited);
1574 ti->topo_order.create (1);
1575 return ti;
1579 /* Free the topological sort info pointed to by TI. */
1581 static void
1582 free_topo_info (struct topo_info *ti)
1584 sbitmap_free (ti->visited);
1585 ti->topo_order.release ();
1586 free (ti);
1589 /* Visit the graph in topological order, and store the order in the
1590 topo_info structure. */
1592 static void
1593 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1594 unsigned int n)
1596 bitmap_iterator bi;
1597 unsigned int j;
1599 bitmap_set_bit (ti->visited, n);
1601 if (graph->succs[n])
1602 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1604 if (!bitmap_bit_p (ti->visited, j))
1605 topo_visit (graph, ti, j);
1608 ti->topo_order.safe_push (n);
1611 /* Process a constraint C that represents x = *(y + off), using DELTA as the
1612 starting solution for y. */
1614 static void
1615 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1616 bitmap delta, bitmap *expanded_delta)
1618 unsigned int lhs = c->lhs.var;
1619 bool flag = false;
1620 bitmap sol = get_varinfo (lhs)->solution;
1621 unsigned int j;
1622 bitmap_iterator bi;
1623 HOST_WIDE_INT roffset = c->rhs.offset;
1625 /* Our IL does not allow this. */
1626 gcc_checking_assert (c->lhs.offset == 0);
1628 /* If the solution of Y contains anything it is good enough to transfer
1629 this to the LHS. */
1630 if (bitmap_bit_p (delta, anything_id))
1632 flag |= bitmap_set_bit (sol, anything_id);
1633 goto done;
1636 /* If we do not know at with offset the rhs is dereferenced compute
1637 the reachability set of DELTA, conservatively assuming it is
1638 dereferenced at all valid offsets. */
1639 if (roffset == UNKNOWN_OFFSET)
1641 delta = solution_set_expand (delta, expanded_delta);
1642 /* No further offset processing is necessary. */
1643 roffset = 0;
1646 /* For each variable j in delta (Sol(y)), add
1647 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1648 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1650 varinfo_t v = get_varinfo (j);
1651 HOST_WIDE_INT fieldoffset = v->offset + roffset;
1652 unsigned HOST_WIDE_INT size = v->size;
1653 unsigned int t;
1655 if (v->is_full_var)
1657 else if (roffset != 0)
1659 if (fieldoffset < 0)
1660 v = get_varinfo (v->head);
1661 else
1662 v = first_or_preceding_vi_for_offset (v, fieldoffset);
1665 /* We have to include all fields that overlap the current field
1666 shifted by roffset. */
1669 t = find (v->id);
1671 /* Adding edges from the special vars is pointless.
1672 They don't have sets that can change. */
1673 if (get_varinfo (t)->is_special_var)
1674 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1675 /* Merging the solution from ESCAPED needlessly increases
1676 the set. Use ESCAPED as representative instead. */
1677 else if (v->id == escaped_id)
1678 flag |= bitmap_set_bit (sol, escaped_id);
1679 else if (v->may_have_pointers
1680 && add_graph_edge (graph, lhs, t))
1681 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1683 if (v->is_full_var
1684 || v->next == 0)
1685 break;
1687 v = vi_next (v);
1689 while (v->offset < fieldoffset + size);
1692 done:
1693 /* If the LHS solution changed, mark the var as changed. */
1694 if (flag)
1696 get_varinfo (lhs)->solution = sol;
1697 bitmap_set_bit (changed, lhs);
1701 /* Process a constraint C that represents *(x + off) = y using DELTA
1702 as the starting solution for x. */
1704 static void
1705 do_ds_constraint (constraint_t c, bitmap delta, bitmap *expanded_delta)
1707 unsigned int rhs = c->rhs.var;
1708 bitmap sol = get_varinfo (rhs)->solution;
1709 unsigned int j;
1710 bitmap_iterator bi;
1711 HOST_WIDE_INT loff = c->lhs.offset;
1712 bool escaped_p = false;
1714 /* Our IL does not allow this. */
1715 gcc_checking_assert (c->rhs.offset == 0);
1717 /* If the solution of y contains ANYTHING simply use the ANYTHING
1718 solution. This avoids needlessly increasing the points-to sets. */
1719 if (bitmap_bit_p (sol, anything_id))
1720 sol = get_varinfo (find (anything_id))->solution;
1722 /* If the solution for x contains ANYTHING we have to merge the
1723 solution of y into all pointer variables which we do via
1724 STOREDANYTHING. */
1725 if (bitmap_bit_p (delta, anything_id))
1727 unsigned t = find (storedanything_id);
1728 if (add_graph_edge (graph, t, rhs))
1730 if (bitmap_ior_into (get_varinfo (t)->solution, sol))
1731 bitmap_set_bit (changed, t);
1733 return;
1736 /* If we do not know at with offset the rhs is dereferenced compute
1737 the reachability set of DELTA, conservatively assuming it is
1738 dereferenced at all valid offsets. */
1739 if (loff == UNKNOWN_OFFSET)
1741 delta = solution_set_expand (delta, expanded_delta);
1742 loff = 0;
1745 /* For each member j of delta (Sol(x)), add an edge from y to j and
1746 union Sol(y) into Sol(j) */
1747 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1749 varinfo_t v = get_varinfo (j);
1750 unsigned int t;
1751 HOST_WIDE_INT fieldoffset = v->offset + loff;
1752 unsigned HOST_WIDE_INT size = v->size;
1754 if (v->is_full_var)
1756 else if (loff != 0)
1758 if (fieldoffset < 0)
1759 v = get_varinfo (v->head);
1760 else
1761 v = first_or_preceding_vi_for_offset (v, fieldoffset);
1764 /* We have to include all fields that overlap the current field
1765 shifted by loff. */
1768 if (v->may_have_pointers)
1770 /* If v is a global variable then this is an escape point. */
1771 if (v->is_global_var
1772 && !escaped_p)
1774 t = find (escaped_id);
1775 if (add_graph_edge (graph, t, rhs)
1776 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1777 bitmap_set_bit (changed, t);
1778 /* Enough to let rhs escape once. */
1779 escaped_p = true;
1782 if (v->is_special_var)
1783 break;
1785 t = find (v->id);
1786 if (add_graph_edge (graph, t, rhs)
1787 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1788 bitmap_set_bit (changed, t);
1791 if (v->is_full_var
1792 || v->next == 0)
1793 break;
1795 v = vi_next (v);
1797 while (v->offset < fieldoffset + size);
1801 /* Handle a non-simple (simple meaning requires no iteration),
1802 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1804 static void
1805 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta,
1806 bitmap *expanded_delta)
1808 if (c->lhs.type == DEREF)
1810 if (c->rhs.type == ADDRESSOF)
1812 gcc_unreachable ();
1814 else
1816 /* *x = y */
1817 do_ds_constraint (c, delta, expanded_delta);
1820 else if (c->rhs.type == DEREF)
1822 /* x = *y */
1823 if (!(get_varinfo (c->lhs.var)->is_special_var))
1824 do_sd_constraint (graph, c, delta, expanded_delta);
1826 else
1828 bitmap tmp;
1829 bool flag = false;
1831 gcc_checking_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR
1832 && c->rhs.offset != 0 && c->lhs.offset == 0);
1833 tmp = get_varinfo (c->lhs.var)->solution;
1835 flag = set_union_with_increment (tmp, delta, c->rhs.offset,
1836 expanded_delta);
1838 if (flag)
1839 bitmap_set_bit (changed, c->lhs.var);
1843 /* Initialize and return a new SCC info structure. */
1845 scc_info::scc_info (size_t size) :
1846 visited (size), deleted (size), current_index (0), scc_stack (1)
1848 bitmap_clear (visited);
1849 bitmap_clear (deleted);
1850 node_mapping = XNEWVEC (unsigned int, size);
1851 dfs = XCNEWVEC (unsigned int, size);
1853 for (size_t i = 0; i < size; i++)
1854 node_mapping[i] = i;
1857 /* Free an SCC info structure pointed to by SI */
1859 scc_info::~scc_info ()
1861 free (node_mapping);
1862 free (dfs);
1866 /* Find indirect cycles in GRAPH that occur, using strongly connected
1867 components, and note them in the indirect cycles map.
1869 This technique comes from Ben Hardekopf and Calvin Lin,
1870 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1871 Lines of Code", submitted to PLDI 2007. */
1873 static void
1874 find_indirect_cycles (constraint_graph_t graph)
1876 unsigned int i;
1877 unsigned int size = graph->size;
1878 scc_info si (size);
1880 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1881 if (!bitmap_bit_p (si.visited, i) && find (i) == i)
1882 scc_visit (graph, &si, i);
1885 /* Compute a topological ordering for GRAPH, and store the result in the
1886 topo_info structure TI. */
1888 static void
1889 compute_topo_order (constraint_graph_t graph,
1890 struct topo_info *ti)
1892 unsigned int i;
1893 unsigned int size = graph->size;
1895 for (i = 0; i != size; ++i)
1896 if (!bitmap_bit_p (ti->visited, i) && find (i) == i)
1897 topo_visit (graph, ti, i);
1900 /* Structure used to for hash value numbering of pointer equivalence
1901 classes. */
1903 typedef struct equiv_class_label
1905 hashval_t hashcode;
1906 unsigned int equivalence_class;
1907 bitmap labels;
1908 } *equiv_class_label_t;
1909 typedef const struct equiv_class_label *const_equiv_class_label_t;
1911 /* Equiv_class_label hashtable helpers. */
1913 struct equiv_class_hasher : free_ptr_hash <equiv_class_label>
1915 static inline hashval_t hash (const equiv_class_label *);
1916 static inline bool equal (const equiv_class_label *,
1917 const equiv_class_label *);
1920 /* Hash function for a equiv_class_label_t */
1922 inline hashval_t
1923 equiv_class_hasher::hash (const equiv_class_label *ecl)
1925 return ecl->hashcode;
1928 /* Equality function for two equiv_class_label_t's. */
1930 inline bool
1931 equiv_class_hasher::equal (const equiv_class_label *eql1,
1932 const equiv_class_label *eql2)
1934 return (eql1->hashcode == eql2->hashcode
1935 && bitmap_equal_p (eql1->labels, eql2->labels));
1938 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1939 classes. */
1940 static hash_table<equiv_class_hasher> *pointer_equiv_class_table;
1942 /* A hashtable for mapping a bitmap of labels->location equivalence
1943 classes. */
1944 static hash_table<equiv_class_hasher> *location_equiv_class_table;
1946 /* Lookup a equivalence class in TABLE by the bitmap of LABELS with
1947 hash HAS it contains. Sets *REF_LABELS to the bitmap LABELS
1948 is equivalent to. */
1950 static equiv_class_label *
1951 equiv_class_lookup_or_add (hash_table<equiv_class_hasher> *table,
1952 bitmap labels)
1954 equiv_class_label **slot;
1955 equiv_class_label ecl;
1957 ecl.labels = labels;
1958 ecl.hashcode = bitmap_hash (labels);
1959 slot = table->find_slot (&ecl, INSERT);
1960 if (!*slot)
1962 *slot = XNEW (struct equiv_class_label);
1963 (*slot)->labels = labels;
1964 (*slot)->hashcode = ecl.hashcode;
1965 (*slot)->equivalence_class = 0;
1968 return *slot;
1971 /* Perform offline variable substitution.
1973 This is a worst case quadratic time way of identifying variables
1974 that must have equivalent points-to sets, including those caused by
1975 static cycles, and single entry subgraphs, in the constraint graph.
1977 The technique is described in "Exploiting Pointer and Location
1978 Equivalence to Optimize Pointer Analysis. In the 14th International
1979 Static Analysis Symposium (SAS), August 2007." It is known as the
1980 "HU" algorithm, and is equivalent to value numbering the collapsed
1981 constraint graph including evaluating unions.
1983 The general method of finding equivalence classes is as follows:
1984 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1985 Initialize all non-REF nodes to be direct nodes.
1986 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1987 variable}
1988 For each constraint containing the dereference, we also do the same
1989 thing.
1991 We then compute SCC's in the graph and unify nodes in the same SCC,
1992 including pts sets.
1994 For each non-collapsed node x:
1995 Visit all unvisited explicit incoming edges.
1996 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
1997 where y->x.
1998 Lookup the equivalence class for pts(x).
1999 If we found one, equivalence_class(x) = found class.
2000 Otherwise, equivalence_class(x) = new class, and new_class is
2001 added to the lookup table.
2003 All direct nodes with the same equivalence class can be replaced
2004 with a single representative node.
2005 All unlabeled nodes (label == 0) are not pointers and all edges
2006 involving them can be eliminated.
2007 We perform these optimizations during rewrite_constraints
2009 In addition to pointer equivalence class finding, we also perform
2010 location equivalence class finding. This is the set of variables
2011 that always appear together in points-to sets. We use this to
2012 compress the size of the points-to sets. */
2014 /* Current maximum pointer equivalence class id. */
2015 static int pointer_equiv_class;
2017 /* Current maximum location equivalence class id. */
2018 static int location_equiv_class;
2020 /* Recursive routine to find strongly connected components in GRAPH,
2021 and label it's nodes with DFS numbers. */
2023 static void
2024 condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2026 unsigned int i;
2027 bitmap_iterator bi;
2028 unsigned int my_dfs;
2030 gcc_checking_assert (si->node_mapping[n] == n);
2031 bitmap_set_bit (si->visited, n);
2032 si->dfs[n] = si->current_index ++;
2033 my_dfs = si->dfs[n];
2035 /* Visit all the successors. */
2036 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2038 unsigned int w = si->node_mapping[i];
2040 if (bitmap_bit_p (si->deleted, w))
2041 continue;
2043 if (!bitmap_bit_p (si->visited, w))
2044 condense_visit (graph, si, w);
2046 unsigned int t = si->node_mapping[w];
2047 gcc_checking_assert (si->node_mapping[n] == n);
2048 if (si->dfs[t] < si->dfs[n])
2049 si->dfs[n] = si->dfs[t];
2052 /* Visit all the implicit predecessors. */
2053 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
2055 unsigned int w = si->node_mapping[i];
2057 if (bitmap_bit_p (si->deleted, w))
2058 continue;
2060 if (!bitmap_bit_p (si->visited, w))
2061 condense_visit (graph, si, w);
2063 unsigned int t = si->node_mapping[w];
2064 gcc_assert (si->node_mapping[n] == n);
2065 if (si->dfs[t] < si->dfs[n])
2066 si->dfs[n] = si->dfs[t];
2069 /* See if any components have been identified. */
2070 if (si->dfs[n] == my_dfs)
2072 while (si->scc_stack.length () != 0
2073 && si->dfs[si->scc_stack.last ()] >= my_dfs)
2075 unsigned int w = si->scc_stack.pop ();
2076 si->node_mapping[w] = n;
2078 if (!bitmap_bit_p (graph->direct_nodes, w))
2079 bitmap_clear_bit (graph->direct_nodes, n);
2081 /* Unify our nodes. */
2082 if (graph->preds[w])
2084 if (!graph->preds[n])
2085 graph->preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2086 bitmap_ior_into (graph->preds[n], graph->preds[w]);
2088 if (graph->implicit_preds[w])
2090 if (!graph->implicit_preds[n])
2091 graph->implicit_preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2092 bitmap_ior_into (graph->implicit_preds[n],
2093 graph->implicit_preds[w]);
2095 if (graph->points_to[w])
2097 if (!graph->points_to[n])
2098 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2099 bitmap_ior_into (graph->points_to[n],
2100 graph->points_to[w]);
2103 bitmap_set_bit (si->deleted, n);
2105 else
2106 si->scc_stack.safe_push (n);
2109 /* Label pointer equivalences.
2111 This performs a value numbering of the constraint graph to
2112 discover which variables will always have the same points-to sets
2113 under the current set of constraints.
2115 The way it value numbers is to store the set of points-to bits
2116 generated by the constraints and graph edges. This is just used as a
2117 hash and equality comparison. The *actual set of points-to bits* is
2118 completely irrelevant, in that we don't care about being able to
2119 extract them later.
2121 The equality values (currently bitmaps) just have to satisfy a few
2122 constraints, the main ones being:
2123 1. The combining operation must be order independent.
2124 2. The end result of a given set of operations must be unique iff the
2125 combination of input values is unique
2126 3. Hashable. */
2128 static void
2129 label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2131 unsigned int i, first_pred;
2132 bitmap_iterator bi;
2134 bitmap_set_bit (si->visited, n);
2136 /* Label and union our incoming edges's points to sets. */
2137 first_pred = -1U;
2138 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2140 unsigned int w = si->node_mapping[i];
2141 if (!bitmap_bit_p (si->visited, w))
2142 label_visit (graph, si, w);
2144 /* Skip unused edges */
2145 if (w == n || graph->pointer_label[w] == 0)
2146 continue;
2148 if (graph->points_to[w])
2150 if (!graph->points_to[n])
2152 if (first_pred == -1U)
2153 first_pred = w;
2154 else
2156 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2157 bitmap_ior (graph->points_to[n],
2158 graph->points_to[first_pred],
2159 graph->points_to[w]);
2162 else
2163 bitmap_ior_into (graph->points_to[n], graph->points_to[w]);
2167 /* Indirect nodes get fresh variables and a new pointer equiv class. */
2168 if (!bitmap_bit_p (graph->direct_nodes, n))
2170 if (!graph->points_to[n])
2172 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2173 if (first_pred != -1U)
2174 bitmap_copy (graph->points_to[n], graph->points_to[first_pred]);
2176 bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
2177 graph->pointer_label[n] = pointer_equiv_class++;
2178 equiv_class_label_t ecl;
2179 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2180 graph->points_to[n]);
2181 ecl->equivalence_class = graph->pointer_label[n];
2182 return;
2185 /* If there was only a single non-empty predecessor the pointer equiv
2186 class is the same. */
2187 if (!graph->points_to[n])
2189 if (first_pred != -1U)
2191 graph->pointer_label[n] = graph->pointer_label[first_pred];
2192 graph->points_to[n] = graph->points_to[first_pred];
2194 return;
2197 if (!bitmap_empty_p (graph->points_to[n]))
2199 equiv_class_label_t ecl;
2200 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2201 graph->points_to[n]);
2202 if (ecl->equivalence_class == 0)
2203 ecl->equivalence_class = pointer_equiv_class++;
2204 else
2206 BITMAP_FREE (graph->points_to[n]);
2207 graph->points_to[n] = ecl->labels;
2209 graph->pointer_label[n] = ecl->equivalence_class;
2213 /* Print the pred graph in dot format. */
2215 static void
2216 dump_pred_graph (struct scc_info *si, FILE *file)
2218 unsigned int i;
2220 /* Only print the graph if it has already been initialized: */
2221 if (!graph)
2222 return;
2224 /* Prints the header of the dot file: */
2225 fprintf (file, "strict digraph {\n");
2226 fprintf (file, " node [\n shape = box\n ]\n");
2227 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
2228 fprintf (file, "\n // List of nodes and complex constraints in "
2229 "the constraint graph:\n");
2231 /* The next lines print the nodes in the graph together with the
2232 complex constraints attached to them. */
2233 for (i = 1; i < graph->size; i++)
2235 if (i == FIRST_REF_NODE)
2236 continue;
2237 if (si->node_mapping[i] != i)
2238 continue;
2239 if (i < FIRST_REF_NODE)
2240 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2241 else
2242 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2243 if (graph->points_to[i]
2244 && !bitmap_empty_p (graph->points_to[i]))
2246 if (i < FIRST_REF_NODE)
2247 fprintf (file, "[label=\"%s = {", get_varinfo (i)->name);
2248 else
2249 fprintf (file, "[label=\"*%s = {",
2250 get_varinfo (i - FIRST_REF_NODE)->name);
2251 unsigned j;
2252 bitmap_iterator bi;
2253 EXECUTE_IF_SET_IN_BITMAP (graph->points_to[i], 0, j, bi)
2254 fprintf (file, " %d", j);
2255 fprintf (file, " }\"]");
2257 fprintf (file, ";\n");
2260 /* Go over the edges. */
2261 fprintf (file, "\n // Edges in the constraint graph:\n");
2262 for (i = 1; i < graph->size; i++)
2264 unsigned j;
2265 bitmap_iterator bi;
2266 if (si->node_mapping[i] != i)
2267 continue;
2268 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[i], 0, j, bi)
2270 unsigned from = si->node_mapping[j];
2271 if (from < FIRST_REF_NODE)
2272 fprintf (file, "\"%s\"", get_varinfo (from)->name);
2273 else
2274 fprintf (file, "\"*%s\"", get_varinfo (from - FIRST_REF_NODE)->name);
2275 fprintf (file, " -> ");
2276 if (i < FIRST_REF_NODE)
2277 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2278 else
2279 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2280 fprintf (file, ";\n");
2284 /* Prints the tail of the dot file. */
2285 fprintf (file, "}\n");
2288 /* Perform offline variable substitution, discovering equivalence
2289 classes, and eliminating non-pointer variables. */
2291 static struct scc_info *
2292 perform_var_substitution (constraint_graph_t graph)
2294 unsigned int i;
2295 unsigned int size = graph->size;
2296 scc_info *si = new scc_info (size);
2298 bitmap_obstack_initialize (&iteration_obstack);
2299 pointer_equiv_class_table = new hash_table<equiv_class_hasher> (511);
2300 location_equiv_class_table
2301 = new hash_table<equiv_class_hasher> (511);
2302 pointer_equiv_class = 1;
2303 location_equiv_class = 1;
2305 /* Condense the nodes, which means to find SCC's, count incoming
2306 predecessors, and unite nodes in SCC's. */
2307 for (i = 1; i < FIRST_REF_NODE; i++)
2308 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2309 condense_visit (graph, si, si->node_mapping[i]);
2311 if (dump_file && (dump_flags & TDF_GRAPH))
2313 fprintf (dump_file, "\n\n// The constraint graph before var-substitution "
2314 "in dot format:\n");
2315 dump_pred_graph (si, dump_file);
2316 fprintf (dump_file, "\n\n");
2319 bitmap_clear (si->visited);
2320 /* Actually the label the nodes for pointer equivalences */
2321 for (i = 1; i < FIRST_REF_NODE; i++)
2322 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2323 label_visit (graph, si, si->node_mapping[i]);
2325 /* Calculate location equivalence labels. */
2326 for (i = 1; i < FIRST_REF_NODE; i++)
2328 bitmap pointed_by;
2329 bitmap_iterator bi;
2330 unsigned int j;
2332 if (!graph->pointed_by[i])
2333 continue;
2334 pointed_by = BITMAP_ALLOC (&iteration_obstack);
2336 /* Translate the pointed-by mapping for pointer equivalence
2337 labels. */
2338 EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
2340 bitmap_set_bit (pointed_by,
2341 graph->pointer_label[si->node_mapping[j]]);
2343 /* The original pointed_by is now dead. */
2344 BITMAP_FREE (graph->pointed_by[i]);
2346 /* Look up the location equivalence label if one exists, or make
2347 one otherwise. */
2348 equiv_class_label_t ecl;
2349 ecl = equiv_class_lookup_or_add (location_equiv_class_table, pointed_by);
2350 if (ecl->equivalence_class == 0)
2351 ecl->equivalence_class = location_equiv_class++;
2352 else
2354 if (dump_file && (dump_flags & TDF_DETAILS))
2355 fprintf (dump_file, "Found location equivalence for node %s\n",
2356 get_varinfo (i)->name);
2357 BITMAP_FREE (pointed_by);
2359 graph->loc_label[i] = ecl->equivalence_class;
2363 if (dump_file && (dump_flags & TDF_DETAILS))
2364 for (i = 1; i < FIRST_REF_NODE; i++)
2366 unsigned j = si->node_mapping[i];
2367 if (j != i)
2369 fprintf (dump_file, "%s node id %d ",
2370 bitmap_bit_p (graph->direct_nodes, i)
2371 ? "Direct" : "Indirect", i);
2372 if (i < FIRST_REF_NODE)
2373 fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2374 else
2375 fprintf (dump_file, "\"*%s\"",
2376 get_varinfo (i - FIRST_REF_NODE)->name);
2377 fprintf (dump_file, " mapped to SCC leader node id %d ", j);
2378 if (j < FIRST_REF_NODE)
2379 fprintf (dump_file, "\"%s\"\n", get_varinfo (j)->name);
2380 else
2381 fprintf (dump_file, "\"*%s\"\n",
2382 get_varinfo (j - FIRST_REF_NODE)->name);
2384 else
2386 fprintf (dump_file,
2387 "Equivalence classes for %s node id %d ",
2388 bitmap_bit_p (graph->direct_nodes, i)
2389 ? "direct" : "indirect", i);
2390 if (i < FIRST_REF_NODE)
2391 fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2392 else
2393 fprintf (dump_file, "\"*%s\"",
2394 get_varinfo (i - FIRST_REF_NODE)->name);
2395 fprintf (dump_file,
2396 ": pointer %d, location %d\n",
2397 graph->pointer_label[i], graph->loc_label[i]);
2401 /* Quickly eliminate our non-pointer variables. */
2403 for (i = 1; i < FIRST_REF_NODE; i++)
2405 unsigned int node = si->node_mapping[i];
2407 if (graph->pointer_label[node] == 0)
2409 if (dump_file && (dump_flags & TDF_DETAILS))
2410 fprintf (dump_file,
2411 "%s is a non-pointer variable, eliminating edges.\n",
2412 get_varinfo (node)->name);
2413 stats.nonpointer_vars++;
2414 clear_edges_for_node (graph, node);
2418 return si;
2421 /* Free information that was only necessary for variable
2422 substitution. */
2424 static void
2425 free_var_substitution_info (struct scc_info *si)
2427 delete si;
2428 free (graph->pointer_label);
2429 free (graph->loc_label);
2430 free (graph->pointed_by);
2431 free (graph->points_to);
2432 free (graph->eq_rep);
2433 sbitmap_free (graph->direct_nodes);
2434 delete pointer_equiv_class_table;
2435 pointer_equiv_class_table = NULL;
2436 delete location_equiv_class_table;
2437 location_equiv_class_table = NULL;
2438 bitmap_obstack_release (&iteration_obstack);
2441 /* Return an existing node that is equivalent to NODE, which has
2442 equivalence class LABEL, if one exists. Return NODE otherwise. */
2444 static unsigned int
2445 find_equivalent_node (constraint_graph_t graph,
2446 unsigned int node, unsigned int label)
2448 /* If the address version of this variable is unused, we can
2449 substitute it for anything else with the same label.
2450 Otherwise, we know the pointers are equivalent, but not the
2451 locations, and we can unite them later. */
2453 if (!bitmap_bit_p (graph->address_taken, node))
2455 gcc_checking_assert (label < graph->size);
2457 if (graph->eq_rep[label] != -1)
2459 /* Unify the two variables since we know they are equivalent. */
2460 if (unite (graph->eq_rep[label], node))
2461 unify_nodes (graph, graph->eq_rep[label], node, false);
2462 return graph->eq_rep[label];
2464 else
2466 graph->eq_rep[label] = node;
2467 graph->pe_rep[label] = node;
2470 else
2472 gcc_checking_assert (label < graph->size);
2473 graph->pe[node] = label;
2474 if (graph->pe_rep[label] == -1)
2475 graph->pe_rep[label] = node;
2478 return node;
2481 /* Unite pointer equivalent but not location equivalent nodes in
2482 GRAPH. This may only be performed once variable substitution is
2483 finished. */
2485 static void
2486 unite_pointer_equivalences (constraint_graph_t graph)
2488 unsigned int i;
2490 /* Go through the pointer equivalences and unite them to their
2491 representative, if they aren't already. */
2492 for (i = 1; i < FIRST_REF_NODE; i++)
2494 unsigned int label = graph->pe[i];
2495 if (label)
2497 int label_rep = graph->pe_rep[label];
2499 if (label_rep == -1)
2500 continue;
2502 label_rep = find (label_rep);
2503 if (label_rep >= 0 && unite (label_rep, find (i)))
2504 unify_nodes (graph, label_rep, i, false);
2509 /* Move complex constraints to the GRAPH nodes they belong to. */
2511 static void
2512 move_complex_constraints (constraint_graph_t graph)
2514 int i;
2515 constraint_t c;
2517 FOR_EACH_VEC_ELT (constraints, i, c)
2519 if (c)
2521 struct constraint_expr lhs = c->lhs;
2522 struct constraint_expr rhs = c->rhs;
2524 if (lhs.type == DEREF)
2526 insert_into_complex (graph, lhs.var, c);
2528 else if (rhs.type == DEREF)
2530 if (!(get_varinfo (lhs.var)->is_special_var))
2531 insert_into_complex (graph, rhs.var, c);
2533 else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2534 && (lhs.offset != 0 || rhs.offset != 0))
2536 insert_into_complex (graph, rhs.var, c);
2543 /* Optimize and rewrite complex constraints while performing
2544 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2545 result of perform_variable_substitution. */
2547 static void
2548 rewrite_constraints (constraint_graph_t graph,
2549 struct scc_info *si)
2551 int i;
2552 constraint_t c;
2554 if (flag_checking)
2556 for (unsigned int j = 0; j < graph->size; j++)
2557 gcc_assert (find (j) == j);
2560 FOR_EACH_VEC_ELT (constraints, i, c)
2562 struct constraint_expr lhs = c->lhs;
2563 struct constraint_expr rhs = c->rhs;
2564 unsigned int lhsvar = find (lhs.var);
2565 unsigned int rhsvar = find (rhs.var);
2566 unsigned int lhsnode, rhsnode;
2567 unsigned int lhslabel, rhslabel;
2569 lhsnode = si->node_mapping[lhsvar];
2570 rhsnode = si->node_mapping[rhsvar];
2571 lhslabel = graph->pointer_label[lhsnode];
2572 rhslabel = graph->pointer_label[rhsnode];
2574 /* See if it is really a non-pointer variable, and if so, ignore
2575 the constraint. */
2576 if (lhslabel == 0)
2578 if (dump_file && (dump_flags & TDF_DETAILS))
2581 fprintf (dump_file, "%s is a non-pointer variable, "
2582 "ignoring constraint:",
2583 get_varinfo (lhs.var)->name);
2584 dump_constraint (dump_file, c);
2585 fprintf (dump_file, "\n");
2587 constraints[i] = NULL;
2588 continue;
2591 if (rhslabel == 0)
2593 if (dump_file && (dump_flags & TDF_DETAILS))
2596 fprintf (dump_file, "%s is a non-pointer variable, "
2597 "ignoring constraint:",
2598 get_varinfo (rhs.var)->name);
2599 dump_constraint (dump_file, c);
2600 fprintf (dump_file, "\n");
2602 constraints[i] = NULL;
2603 continue;
2606 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2607 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2608 c->lhs.var = lhsvar;
2609 c->rhs.var = rhsvar;
2613 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2614 part of an SCC, false otherwise. */
2616 static bool
2617 eliminate_indirect_cycles (unsigned int node)
2619 if (graph->indirect_cycles[node] != -1
2620 && !bitmap_empty_p (get_varinfo (node)->solution))
2622 unsigned int i;
2623 auto_vec<unsigned> queue;
2624 int queuepos;
2625 unsigned int to = find (graph->indirect_cycles[node]);
2626 bitmap_iterator bi;
2628 /* We can't touch the solution set and call unify_nodes
2629 at the same time, because unify_nodes is going to do
2630 bitmap unions into it. */
2632 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2634 if (find (i) == i && i != to)
2636 if (unite (to, i))
2637 queue.safe_push (i);
2641 for (queuepos = 0;
2642 queue.iterate (queuepos, &i);
2643 queuepos++)
2645 unify_nodes (graph, to, i, true);
2647 return true;
2649 return false;
2652 /* Solve the constraint graph GRAPH using our worklist solver.
2653 This is based on the PW* family of solvers from the "Efficient Field
2654 Sensitive Pointer Analysis for C" paper.
2655 It works by iterating over all the graph nodes, processing the complex
2656 constraints and propagating the copy constraints, until everything stops
2657 changed. This corresponds to steps 6-8 in the solving list given above. */
2659 static void
2660 solve_graph (constraint_graph_t graph)
2662 unsigned int size = graph->size;
2663 unsigned int i;
2664 bitmap pts;
2666 changed = BITMAP_ALLOC (NULL);
2668 /* Mark all initial non-collapsed nodes as changed. */
2669 for (i = 1; i < size; i++)
2671 varinfo_t ivi = get_varinfo (i);
2672 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2673 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2674 || graph->complex[i].length () > 0))
2675 bitmap_set_bit (changed, i);
2678 /* Allocate a bitmap to be used to store the changed bits. */
2679 pts = BITMAP_ALLOC (&pta_obstack);
2681 while (!bitmap_empty_p (changed))
2683 unsigned int i;
2684 struct topo_info *ti = init_topo_info ();
2685 stats.iterations++;
2687 bitmap_obstack_initialize (&iteration_obstack);
2689 compute_topo_order (graph, ti);
2691 while (ti->topo_order.length () != 0)
2694 i = ti->topo_order.pop ();
2696 /* If this variable is not a representative, skip it. */
2697 if (find (i) != i)
2698 continue;
2700 /* In certain indirect cycle cases, we may merge this
2701 variable to another. */
2702 if (eliminate_indirect_cycles (i) && find (i) != i)
2703 continue;
2705 /* If the node has changed, we need to process the
2706 complex constraints and outgoing edges again. */
2707 if (bitmap_clear_bit (changed, i))
2709 unsigned int j;
2710 constraint_t c;
2711 bitmap solution;
2712 vec<constraint_t> complex = graph->complex[i];
2713 varinfo_t vi = get_varinfo (i);
2714 bool solution_empty;
2716 /* Compute the changed set of solution bits. If anything
2717 is in the solution just propagate that. */
2718 if (bitmap_bit_p (vi->solution, anything_id))
2720 /* If anything is also in the old solution there is
2721 nothing to do.
2722 ??? But we shouldn't ended up with "changed" set ... */
2723 if (vi->oldsolution
2724 && bitmap_bit_p (vi->oldsolution, anything_id))
2725 continue;
2726 bitmap_copy (pts, get_varinfo (find (anything_id))->solution);
2728 else if (vi->oldsolution)
2729 bitmap_and_compl (pts, vi->solution, vi->oldsolution);
2730 else
2731 bitmap_copy (pts, vi->solution);
2733 if (bitmap_empty_p (pts))
2734 continue;
2736 if (vi->oldsolution)
2737 bitmap_ior_into (vi->oldsolution, pts);
2738 else
2740 vi->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
2741 bitmap_copy (vi->oldsolution, pts);
2744 solution = vi->solution;
2745 solution_empty = bitmap_empty_p (solution);
2747 /* Process the complex constraints */
2748 bitmap expanded_pts = NULL;
2749 FOR_EACH_VEC_ELT (complex, j, c)
2751 /* XXX: This is going to unsort the constraints in
2752 some cases, which will occasionally add duplicate
2753 constraints during unification. This does not
2754 affect correctness. */
2755 c->lhs.var = find (c->lhs.var);
2756 c->rhs.var = find (c->rhs.var);
2758 /* The only complex constraint that can change our
2759 solution to non-empty, given an empty solution,
2760 is a constraint where the lhs side is receiving
2761 some set from elsewhere. */
2762 if (!solution_empty || c->lhs.type != DEREF)
2763 do_complex_constraint (graph, c, pts, &expanded_pts);
2765 BITMAP_FREE (expanded_pts);
2767 solution_empty = bitmap_empty_p (solution);
2769 if (!solution_empty)
2771 bitmap_iterator bi;
2772 unsigned eff_escaped_id = find (escaped_id);
2774 /* Propagate solution to all successors. */
2775 unsigned to_remove = ~0U;
2776 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2777 0, j, bi)
2779 if (to_remove != ~0U)
2781 bitmap_clear_bit (graph->succs[i], to_remove);
2782 to_remove = ~0U;
2784 unsigned int to = find (j);
2785 if (to != j)
2787 /* Update the succ graph, avoiding duplicate
2788 work. */
2789 to_remove = j;
2790 if (! bitmap_set_bit (graph->succs[i], to))
2791 continue;
2792 /* We eventually end up processing 'to' twice
2793 as it is undefined whether bitmap iteration
2794 iterates over bits set during iteration.
2795 Play safe instead of doing tricks. */
2797 /* Don't try to propagate to ourselves. */
2798 if (to == i)
2799 continue;
2801 bitmap tmp = get_varinfo (to)->solution;
2802 bool flag = false;
2804 /* If we propagate from ESCAPED use ESCAPED as
2805 placeholder. */
2806 if (i == eff_escaped_id)
2807 flag = bitmap_set_bit (tmp, escaped_id);
2808 else
2809 flag = bitmap_ior_into (tmp, pts);
2811 if (flag)
2812 bitmap_set_bit (changed, to);
2814 if (to_remove != ~0U)
2815 bitmap_clear_bit (graph->succs[i], to_remove);
2819 free_topo_info (ti);
2820 bitmap_obstack_release (&iteration_obstack);
2823 BITMAP_FREE (pts);
2824 BITMAP_FREE (changed);
2825 bitmap_obstack_release (&oldpta_obstack);
2828 /* Map from trees to variable infos. */
2829 static hash_map<tree, varinfo_t> *vi_for_tree;
2832 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2834 static void
2835 insert_vi_for_tree (tree t, varinfo_t vi)
2837 gcc_assert (vi);
2838 gcc_assert (!vi_for_tree->put (t, vi));
2841 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2842 exist in the map, return NULL, otherwise, return the varinfo we found. */
2844 static varinfo_t
2845 lookup_vi_for_tree (tree t)
2847 varinfo_t *slot = vi_for_tree->get (t);
2848 if (slot == NULL)
2849 return NULL;
2851 return *slot;
2854 /* Return a printable name for DECL */
2856 static const char *
2857 alias_get_name (tree decl)
2859 const char *res = "NULL";
2860 if (dump_file)
2862 char *temp = NULL;
2863 if (TREE_CODE (decl) == SSA_NAME)
2865 res = get_name (decl);
2866 temp = xasprintf ("%s_%u", res ? res : "", SSA_NAME_VERSION (decl));
2868 else if (HAS_DECL_ASSEMBLER_NAME_P (decl)
2869 && DECL_ASSEMBLER_NAME_SET_P (decl))
2870 res = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME_RAW (decl));
2871 else if (DECL_P (decl))
2873 res = get_name (decl);
2874 if (!res)
2875 temp = xasprintf ("D.%u", DECL_UID (decl));
2878 if (temp)
2880 res = ggc_strdup (temp);
2881 free (temp);
2885 return res;
2888 /* Find the variable id for tree T in the map.
2889 If T doesn't exist in the map, create an entry for it and return it. */
2891 static varinfo_t
2892 get_vi_for_tree (tree t)
2894 varinfo_t *slot = vi_for_tree->get (t);
2895 if (slot == NULL)
2897 unsigned int id = create_variable_info_for (t, alias_get_name (t), false);
2898 return get_varinfo (id);
2901 return *slot;
2904 /* Get a scalar constraint expression for a new temporary variable. */
2906 static struct constraint_expr
2907 new_scalar_tmp_constraint_exp (const char *name, bool add_id)
2909 struct constraint_expr tmp;
2910 varinfo_t vi;
2912 vi = new_var_info (NULL_TREE, name, add_id);
2913 vi->offset = 0;
2914 vi->size = -1;
2915 vi->fullsize = -1;
2916 vi->is_full_var = 1;
2917 vi->is_reg_var = 1;
2919 tmp.var = vi->id;
2920 tmp.type = SCALAR;
2921 tmp.offset = 0;
2923 return tmp;
2926 /* Get a constraint expression vector from an SSA_VAR_P node.
2927 If address_p is true, the result will be taken its address of. */
2929 static void
2930 get_constraint_for_ssa_var (tree t, vec<ce_s> *results, bool address_p)
2932 struct constraint_expr cexpr;
2933 varinfo_t vi;
2935 /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
2936 gcc_assert (TREE_CODE (t) == SSA_NAME || DECL_P (t));
2938 if (TREE_CODE (t) == SSA_NAME
2939 && SSA_NAME_IS_DEFAULT_DEF (t))
2941 /* For parameters, get at the points-to set for the actual parm
2942 decl. */
2943 if (TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2944 || TREE_CODE (SSA_NAME_VAR (t)) == RESULT_DECL)
2946 get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
2947 return;
2949 /* For undefined SSA names return nothing. */
2950 else if (!ssa_defined_default_def_p (t))
2952 cexpr.var = nothing_id;
2953 cexpr.type = SCALAR;
2954 cexpr.offset = 0;
2955 results->safe_push (cexpr);
2956 return;
2960 /* For global variables resort to the alias target. */
2961 if (VAR_P (t) && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
2963 varpool_node *node = varpool_node::get (t);
2964 if (node && node->alias && node->analyzed)
2966 node = node->ultimate_alias_target ();
2967 /* Canonicalize the PT uid of all aliases to the ultimate target.
2968 ??? Hopefully the set of aliases can't change in a way that
2969 changes the ultimate alias target. */
2970 gcc_assert ((! DECL_PT_UID_SET_P (node->decl)
2971 || DECL_PT_UID (node->decl) == DECL_UID (node->decl))
2972 && (! DECL_PT_UID_SET_P (t)
2973 || DECL_PT_UID (t) == DECL_UID (node->decl)));
2974 DECL_PT_UID (t) = DECL_UID (node->decl);
2975 t = node->decl;
2978 /* If this is decl may bind to NULL note that. */
2979 if (address_p
2980 && (! node || ! node->nonzero_address ()))
2982 cexpr.var = nothing_id;
2983 cexpr.type = SCALAR;
2984 cexpr.offset = 0;
2985 results->safe_push (cexpr);
2989 vi = get_vi_for_tree (t);
2990 cexpr.var = vi->id;
2991 cexpr.type = SCALAR;
2992 cexpr.offset = 0;
2994 /* If we are not taking the address of the constraint expr, add all
2995 sub-fiels of the variable as well. */
2996 if (!address_p
2997 && !vi->is_full_var)
2999 for (; vi; vi = vi_next (vi))
3001 cexpr.var = vi->id;
3002 results->safe_push (cexpr);
3004 return;
3007 results->safe_push (cexpr);
3010 /* Process constraint T, performing various simplifications and then
3011 adding it to our list of overall constraints. */
3013 static void
3014 process_constraint (constraint_t t)
3016 struct constraint_expr rhs = t->rhs;
3017 struct constraint_expr lhs = t->lhs;
3019 gcc_assert (rhs.var < varmap.length ());
3020 gcc_assert (lhs.var < varmap.length ());
3022 /* If we didn't get any useful constraint from the lhs we get
3023 &ANYTHING as fallback from get_constraint_for. Deal with
3024 it here by turning it into *ANYTHING. */
3025 if (lhs.type == ADDRESSOF
3026 && lhs.var == anything_id)
3027 lhs.type = DEREF;
3029 /* ADDRESSOF on the lhs is invalid. */
3030 gcc_assert (lhs.type != ADDRESSOF);
3032 /* We shouldn't add constraints from things that cannot have pointers.
3033 It's not completely trivial to avoid in the callers, so do it here. */
3034 if (rhs.type != ADDRESSOF
3035 && !get_varinfo (rhs.var)->may_have_pointers)
3036 return;
3038 /* Likewise adding to the solution of a non-pointer var isn't useful. */
3039 if (!get_varinfo (lhs.var)->may_have_pointers)
3040 return;
3042 /* This can happen in our IR with things like n->a = *p */
3043 if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
3045 /* Split into tmp = *rhs, *lhs = tmp */
3046 struct constraint_expr tmplhs;
3047 tmplhs = new_scalar_tmp_constraint_exp ("doubledereftmp", true);
3048 process_constraint (new_constraint (tmplhs, rhs));
3049 process_constraint (new_constraint (lhs, tmplhs));
3051 else if ((rhs.type != SCALAR || rhs.offset != 0) && lhs.type == DEREF)
3053 /* Split into tmp = &rhs, *lhs = tmp */
3054 struct constraint_expr tmplhs;
3055 tmplhs = new_scalar_tmp_constraint_exp ("derefaddrtmp", true);
3056 process_constraint (new_constraint (tmplhs, rhs));
3057 process_constraint (new_constraint (lhs, tmplhs));
3059 else
3061 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
3062 constraints.safe_push (t);
3067 /* Return the position, in bits, of FIELD_DECL from the beginning of its
3068 structure. */
3070 static HOST_WIDE_INT
3071 bitpos_of_field (const tree fdecl)
3073 if (!tree_fits_shwi_p (DECL_FIELD_OFFSET (fdecl))
3074 || !tree_fits_shwi_p (DECL_FIELD_BIT_OFFSET (fdecl)))
3075 return -1;
3077 return (tree_to_shwi (DECL_FIELD_OFFSET (fdecl)) * BITS_PER_UNIT
3078 + tree_to_shwi (DECL_FIELD_BIT_OFFSET (fdecl)));
3082 /* Get constraint expressions for offsetting PTR by OFFSET. Stores the
3083 resulting constraint expressions in *RESULTS. */
3085 static void
3086 get_constraint_for_ptr_offset (tree ptr, tree offset,
3087 vec<ce_s> *results)
3089 struct constraint_expr c;
3090 unsigned int j, n;
3091 HOST_WIDE_INT rhsoffset;
3093 /* If we do not do field-sensitive PTA adding offsets to pointers
3094 does not change the points-to solution. */
3095 if (!use_field_sensitive)
3097 get_constraint_for_rhs (ptr, results);
3098 return;
3101 /* If the offset is not a non-negative integer constant that fits
3102 in a HOST_WIDE_INT, we have to fall back to a conservative
3103 solution which includes all sub-fields of all pointed-to
3104 variables of ptr. */
3105 if (offset == NULL_TREE
3106 || TREE_CODE (offset) != INTEGER_CST)
3107 rhsoffset = UNKNOWN_OFFSET;
3108 else
3110 /* Sign-extend the offset. */
3111 offset_int soffset = offset_int::from (wi::to_wide (offset), SIGNED);
3112 if (!wi::fits_shwi_p (soffset))
3113 rhsoffset = UNKNOWN_OFFSET;
3114 else
3116 /* Make sure the bit-offset also fits. */
3117 HOST_WIDE_INT rhsunitoffset = soffset.to_shwi ();
3118 rhsoffset = rhsunitoffset * (unsigned HOST_WIDE_INT) BITS_PER_UNIT;
3119 if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
3120 rhsoffset = UNKNOWN_OFFSET;
3124 get_constraint_for_rhs (ptr, results);
3125 if (rhsoffset == 0)
3126 return;
3128 /* As we are eventually appending to the solution do not use
3129 vec::iterate here. */
3130 n = results->length ();
3131 for (j = 0; j < n; j++)
3133 varinfo_t curr;
3134 c = (*results)[j];
3135 curr = get_varinfo (c.var);
3137 if (c.type == ADDRESSOF
3138 /* If this varinfo represents a full variable just use it. */
3139 && curr->is_full_var)
3141 else if (c.type == ADDRESSOF
3142 /* If we do not know the offset add all subfields. */
3143 && rhsoffset == UNKNOWN_OFFSET)
3145 varinfo_t temp = get_varinfo (curr->head);
3148 struct constraint_expr c2;
3149 c2.var = temp->id;
3150 c2.type = ADDRESSOF;
3151 c2.offset = 0;
3152 if (c2.var != c.var)
3153 results->safe_push (c2);
3154 temp = vi_next (temp);
3156 while (temp);
3158 else if (c.type == ADDRESSOF)
3160 varinfo_t temp;
3161 unsigned HOST_WIDE_INT offset = curr->offset + rhsoffset;
3163 /* If curr->offset + rhsoffset is less than zero adjust it. */
3164 if (rhsoffset < 0
3165 && curr->offset < offset)
3166 offset = 0;
3168 /* We have to include all fields that overlap the current
3169 field shifted by rhsoffset. And we include at least
3170 the last or the first field of the variable to represent
3171 reachability of off-bound addresses, in particular &object + 1,
3172 conservatively correct. */
3173 temp = first_or_preceding_vi_for_offset (curr, offset);
3174 c.var = temp->id;
3175 c.offset = 0;
3176 temp = vi_next (temp);
3177 while (temp
3178 && temp->offset < offset + curr->size)
3180 struct constraint_expr c2;
3181 c2.var = temp->id;
3182 c2.type = ADDRESSOF;
3183 c2.offset = 0;
3184 results->safe_push (c2);
3185 temp = vi_next (temp);
3188 else if (c.type == SCALAR)
3190 gcc_assert (c.offset == 0);
3191 c.offset = rhsoffset;
3193 else
3194 /* We shouldn't get any DEREFs here. */
3195 gcc_unreachable ();
3197 (*results)[j] = c;
3202 /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
3203 If address_p is true the result will be taken its address of.
3204 If lhs_p is true then the constraint expression is assumed to be used
3205 as the lhs. */
3207 static void
3208 get_constraint_for_component_ref (tree t, vec<ce_s> *results,
3209 bool address_p, bool lhs_p)
3211 tree orig_t = t;
3212 poly_int64 bitsize = -1;
3213 poly_int64 bitmaxsize = -1;
3214 poly_int64 bitpos;
3215 bool reverse;
3216 tree forzero;
3218 /* Some people like to do cute things like take the address of
3219 &0->a.b */
3220 forzero = t;
3221 while (handled_component_p (forzero)
3222 || INDIRECT_REF_P (forzero)
3223 || TREE_CODE (forzero) == MEM_REF)
3224 forzero = TREE_OPERAND (forzero, 0);
3226 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
3228 struct constraint_expr temp;
3230 temp.offset = 0;
3231 temp.var = integer_id;
3232 temp.type = SCALAR;
3233 results->safe_push (temp);
3234 return;
3237 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize, &reverse);
3239 /* We can end up here for component references on a
3240 VIEW_CONVERT_EXPR <>(&foobar) or things like a
3241 BIT_FIELD_REF <&MEM[(void *)&b + 4B], ...>. So for
3242 symbolic constants simply give up. */
3243 if (TREE_CODE (t) == ADDR_EXPR)
3245 constraint_expr result;
3246 result.type = SCALAR;
3247 result.var = anything_id;
3248 result.offset = 0;
3249 results->safe_push (result);
3250 return;
3253 /* Pretend to take the address of the base, we'll take care of
3254 adding the required subset of sub-fields below. */
3255 get_constraint_for_1 (t, results, true, lhs_p);
3256 /* Strip off nothing_id. */
3257 if (results->length () == 2)
3259 gcc_assert ((*results)[0].var == nothing_id);
3260 results->unordered_remove (0);
3262 gcc_assert (results->length () == 1);
3263 struct constraint_expr &result = results->last ();
3265 if (result.type == SCALAR
3266 && get_varinfo (result.var)->is_full_var)
3267 /* For single-field vars do not bother about the offset. */
3268 result.offset = 0;
3269 else if (result.type == SCALAR)
3271 /* In languages like C, you can access one past the end of an
3272 array. You aren't allowed to dereference it, so we can
3273 ignore this constraint. When we handle pointer subtraction,
3274 we may have to do something cute here. */
3276 if (maybe_lt (poly_uint64 (bitpos), get_varinfo (result.var)->fullsize)
3277 && maybe_ne (bitmaxsize, 0))
3279 /* It's also not true that the constraint will actually start at the
3280 right offset, it may start in some padding. We only care about
3281 setting the constraint to the first actual field it touches, so
3282 walk to find it. */
3283 struct constraint_expr cexpr = result;
3284 varinfo_t curr;
3285 results->pop ();
3286 cexpr.offset = 0;
3287 for (curr = get_varinfo (cexpr.var); curr; curr = vi_next (curr))
3289 if (ranges_maybe_overlap_p (poly_int64 (curr->offset),
3290 curr->size, bitpos, bitmaxsize))
3292 cexpr.var = curr->id;
3293 results->safe_push (cexpr);
3294 if (address_p)
3295 break;
3298 /* If we are going to take the address of this field then
3299 to be able to compute reachability correctly add at least
3300 the last field of the variable. */
3301 if (address_p && results->length () == 0)
3303 curr = get_varinfo (cexpr.var);
3304 while (curr->next != 0)
3305 curr = vi_next (curr);
3306 cexpr.var = curr->id;
3307 results->safe_push (cexpr);
3309 else if (results->length () == 0)
3310 /* Assert that we found *some* field there. The user couldn't be
3311 accessing *only* padding. */
3312 /* Still the user could access one past the end of an array
3313 embedded in a struct resulting in accessing *only* padding. */
3314 /* Or accessing only padding via type-punning to a type
3315 that has a filed just in padding space. */
3317 cexpr.type = SCALAR;
3318 cexpr.var = anything_id;
3319 cexpr.offset = 0;
3320 results->safe_push (cexpr);
3323 else if (known_eq (bitmaxsize, 0))
3325 if (dump_file && (dump_flags & TDF_DETAILS))
3326 fprintf (dump_file, "Access to zero-sized part of variable, "
3327 "ignoring\n");
3329 else
3330 if (dump_file && (dump_flags & TDF_DETAILS))
3331 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
3333 else if (result.type == DEREF)
3335 /* If we do not know exactly where the access goes say so. Note
3336 that only for non-structure accesses we know that we access
3337 at most one subfiled of any variable. */
3338 HOST_WIDE_INT const_bitpos;
3339 if (!bitpos.is_constant (&const_bitpos)
3340 || const_bitpos == -1
3341 || maybe_ne (bitsize, bitmaxsize)
3342 || AGGREGATE_TYPE_P (TREE_TYPE (orig_t))
3343 || result.offset == UNKNOWN_OFFSET)
3344 result.offset = UNKNOWN_OFFSET;
3345 else
3346 result.offset += const_bitpos;
3348 else if (result.type == ADDRESSOF)
3350 /* We can end up here for component references on constants like
3351 VIEW_CONVERT_EXPR <>({ 0, 1, 2, 3 })[i]. */
3352 result.type = SCALAR;
3353 result.var = anything_id;
3354 result.offset = 0;
3356 else
3357 gcc_unreachable ();
3361 /* Dereference the constraint expression CONS, and return the result.
3362 DEREF (ADDRESSOF) = SCALAR
3363 DEREF (SCALAR) = DEREF
3364 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
3365 This is needed so that we can handle dereferencing DEREF constraints. */
3367 static void
3368 do_deref (vec<ce_s> *constraints)
3370 struct constraint_expr *c;
3371 unsigned int i = 0;
3373 FOR_EACH_VEC_ELT (*constraints, i, c)
3375 if (c->type == SCALAR)
3376 c->type = DEREF;
3377 else if (c->type == ADDRESSOF)
3378 c->type = SCALAR;
3379 else if (c->type == DEREF)
3381 struct constraint_expr tmplhs;
3382 tmplhs = new_scalar_tmp_constraint_exp ("dereftmp", true);
3383 process_constraint (new_constraint (tmplhs, *c));
3384 c->var = tmplhs.var;
3386 else
3387 gcc_unreachable ();
3391 /* Given a tree T, return the constraint expression for taking the
3392 address of it. */
3394 static void
3395 get_constraint_for_address_of (tree t, vec<ce_s> *results)
3397 struct constraint_expr *c;
3398 unsigned int i;
3400 get_constraint_for_1 (t, results, true, true);
3402 FOR_EACH_VEC_ELT (*results, i, c)
3404 if (c->type == DEREF)
3405 c->type = SCALAR;
3406 else
3407 c->type = ADDRESSOF;
3411 /* Given a tree T, return the constraint expression for it. */
3413 static void
3414 get_constraint_for_1 (tree t, vec<ce_s> *results, bool address_p,
3415 bool lhs_p)
3417 struct constraint_expr temp;
3419 /* x = integer is all glommed to a single variable, which doesn't
3420 point to anything by itself. That is, of course, unless it is an
3421 integer constant being treated as a pointer, in which case, we
3422 will return that this is really the addressof anything. This
3423 happens below, since it will fall into the default case. The only
3424 case we know something about an integer treated like a pointer is
3425 when it is the NULL pointer, and then we just say it points to
3426 NULL.
3428 Do not do that if -fno-delete-null-pointer-checks though, because
3429 in that case *NULL does not fail, so it _should_ alias *anything.
3430 It is not worth adding a new option or renaming the existing one,
3431 since this case is relatively obscure. */
3432 if ((TREE_CODE (t) == INTEGER_CST
3433 && integer_zerop (t))
3434 /* The only valid CONSTRUCTORs in gimple with pointer typed
3435 elements are zero-initializer. But in IPA mode we also
3436 process global initializers, so verify at least. */
3437 || (TREE_CODE (t) == CONSTRUCTOR
3438 && CONSTRUCTOR_NELTS (t) == 0))
3440 if (flag_delete_null_pointer_checks)
3441 temp.var = nothing_id;
3442 else
3443 temp.var = nonlocal_id;
3444 temp.type = ADDRESSOF;
3445 temp.offset = 0;
3446 results->safe_push (temp);
3447 return;
3450 /* String constants are read-only, ideally we'd have a CONST_DECL
3451 for those. */
3452 if (TREE_CODE (t) == STRING_CST)
3454 temp.var = string_id;
3455 temp.type = SCALAR;
3456 temp.offset = 0;
3457 results->safe_push (temp);
3458 return;
3461 switch (TREE_CODE_CLASS (TREE_CODE (t)))
3463 case tcc_expression:
3465 switch (TREE_CODE (t))
3467 case ADDR_EXPR:
3468 get_constraint_for_address_of (TREE_OPERAND (t, 0), results);
3469 return;
3470 default:;
3472 break;
3474 case tcc_reference:
3476 switch (TREE_CODE (t))
3478 case MEM_REF:
3480 struct constraint_expr cs;
3481 varinfo_t vi, curr;
3482 get_constraint_for_ptr_offset (TREE_OPERAND (t, 0),
3483 TREE_OPERAND (t, 1), results);
3484 do_deref (results);
3486 /* If we are not taking the address then make sure to process
3487 all subvariables we might access. */
3488 if (address_p)
3489 return;
3491 cs = results->last ();
3492 if (cs.type == DEREF
3493 && type_can_have_subvars (TREE_TYPE (t)))
3495 /* For dereferences this means we have to defer it
3496 to solving time. */
3497 results->last ().offset = UNKNOWN_OFFSET;
3498 return;
3500 if (cs.type != SCALAR)
3501 return;
3503 vi = get_varinfo (cs.var);
3504 curr = vi_next (vi);
3505 if (!vi->is_full_var
3506 && curr)
3508 unsigned HOST_WIDE_INT size;
3509 if (tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (t))))
3510 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t)));
3511 else
3512 size = -1;
3513 for (; curr; curr = vi_next (curr))
3515 if (curr->offset - vi->offset < size)
3517 cs.var = curr->id;
3518 results->safe_push (cs);
3520 else
3521 break;
3524 return;
3526 case ARRAY_REF:
3527 case ARRAY_RANGE_REF:
3528 case COMPONENT_REF:
3529 case IMAGPART_EXPR:
3530 case REALPART_EXPR:
3531 case BIT_FIELD_REF:
3532 get_constraint_for_component_ref (t, results, address_p, lhs_p);
3533 return;
3534 case VIEW_CONVERT_EXPR:
3535 get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p,
3536 lhs_p);
3537 return;
3538 /* We are missing handling for TARGET_MEM_REF here. */
3539 default:;
3541 break;
3543 case tcc_exceptional:
3545 switch (TREE_CODE (t))
3547 case SSA_NAME:
3549 get_constraint_for_ssa_var (t, results, address_p);
3550 return;
3552 case CONSTRUCTOR:
3554 unsigned int i;
3555 tree val;
3556 auto_vec<ce_s> tmp;
3557 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
3559 struct constraint_expr *rhsp;
3560 unsigned j;
3561 get_constraint_for_1 (val, &tmp, address_p, lhs_p);
3562 FOR_EACH_VEC_ELT (tmp, j, rhsp)
3563 results->safe_push (*rhsp);
3564 tmp.truncate (0);
3566 /* We do not know whether the constructor was complete,
3567 so technically we have to add &NOTHING or &ANYTHING
3568 like we do for an empty constructor as well. */
3569 return;
3571 default:;
3573 break;
3575 case tcc_declaration:
3577 get_constraint_for_ssa_var (t, results, address_p);
3578 return;
3580 case tcc_constant:
3582 /* We cannot refer to automatic variables through constants. */
3583 temp.type = ADDRESSOF;
3584 temp.var = nonlocal_id;
3585 temp.offset = 0;
3586 results->safe_push (temp);
3587 return;
3589 default:;
3592 /* The default fallback is a constraint from anything. */
3593 temp.type = ADDRESSOF;
3594 temp.var = anything_id;
3595 temp.offset = 0;
3596 results->safe_push (temp);
3599 /* Given a gimple tree T, return the constraint expression vector for it. */
3601 static void
3602 get_constraint_for (tree t, vec<ce_s> *results)
3604 gcc_assert (results->length () == 0);
3606 get_constraint_for_1 (t, results, false, true);
3609 /* Given a gimple tree T, return the constraint expression vector for it
3610 to be used as the rhs of a constraint. */
3612 static void
3613 get_constraint_for_rhs (tree t, vec<ce_s> *results)
3615 gcc_assert (results->length () == 0);
3617 get_constraint_for_1 (t, results, false, false);
3621 /* Efficiently generates constraints from all entries in *RHSC to all
3622 entries in *LHSC. */
3624 static void
3625 process_all_all_constraints (vec<ce_s> lhsc,
3626 vec<ce_s> rhsc)
3628 struct constraint_expr *lhsp, *rhsp;
3629 unsigned i, j;
3631 if (lhsc.length () <= 1 || rhsc.length () <= 1)
3633 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3634 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
3635 process_constraint (new_constraint (*lhsp, *rhsp));
3637 else
3639 struct constraint_expr tmp;
3640 tmp = new_scalar_tmp_constraint_exp ("allalltmp", true);
3641 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
3642 process_constraint (new_constraint (tmp, *rhsp));
3643 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3644 process_constraint (new_constraint (*lhsp, tmp));
3648 /* Handle aggregate copies by expanding into copies of the respective
3649 fields of the structures. */
3651 static void
3652 do_structure_copy (tree lhsop, tree rhsop)
3654 struct constraint_expr *lhsp, *rhsp;
3655 auto_vec<ce_s> lhsc;
3656 auto_vec<ce_s> rhsc;
3657 unsigned j;
3659 get_constraint_for (lhsop, &lhsc);
3660 get_constraint_for_rhs (rhsop, &rhsc);
3661 lhsp = &lhsc[0];
3662 rhsp = &rhsc[0];
3663 if (lhsp->type == DEREF
3664 || (lhsp->type == ADDRESSOF && lhsp->var == anything_id)
3665 || rhsp->type == DEREF)
3667 if (lhsp->type == DEREF)
3669 gcc_assert (lhsc.length () == 1);
3670 lhsp->offset = UNKNOWN_OFFSET;
3672 if (rhsp->type == DEREF)
3674 gcc_assert (rhsc.length () == 1);
3675 rhsp->offset = UNKNOWN_OFFSET;
3677 process_all_all_constraints (lhsc, rhsc);
3679 else if (lhsp->type == SCALAR
3680 && (rhsp->type == SCALAR
3681 || rhsp->type == ADDRESSOF))
3683 HOST_WIDE_INT lhssize, lhsoffset;
3684 HOST_WIDE_INT rhssize, rhsoffset;
3685 bool reverse;
3686 unsigned k = 0;
3687 if (!get_ref_base_and_extent_hwi (lhsop, &lhsoffset, &lhssize, &reverse)
3688 || !get_ref_base_and_extent_hwi (rhsop, &rhsoffset, &rhssize,
3689 &reverse))
3691 process_all_all_constraints (lhsc, rhsc);
3692 return;
3694 for (j = 0; lhsc.iterate (j, &lhsp);)
3696 varinfo_t lhsv, rhsv;
3697 rhsp = &rhsc[k];
3698 lhsv = get_varinfo (lhsp->var);
3699 rhsv = get_varinfo (rhsp->var);
3700 if (lhsv->may_have_pointers
3701 && (lhsv->is_full_var
3702 || rhsv->is_full_var
3703 || ranges_overlap_p (lhsv->offset + rhsoffset, lhsv->size,
3704 rhsv->offset + lhsoffset, rhsv->size)))
3705 process_constraint (new_constraint (*lhsp, *rhsp));
3706 if (!rhsv->is_full_var
3707 && (lhsv->is_full_var
3708 || (lhsv->offset + rhsoffset + lhsv->size
3709 > rhsv->offset + lhsoffset + rhsv->size)))
3711 ++k;
3712 if (k >= rhsc.length ())
3713 break;
3715 else
3716 ++j;
3719 else
3720 gcc_unreachable ();
3723 /* Create constraints ID = { rhsc }. */
3725 static void
3726 make_constraints_to (unsigned id, vec<ce_s> rhsc)
3728 struct constraint_expr *c;
3729 struct constraint_expr includes;
3730 unsigned int j;
3732 includes.var = id;
3733 includes.offset = 0;
3734 includes.type = SCALAR;
3736 FOR_EACH_VEC_ELT (rhsc, j, c)
3737 process_constraint (new_constraint (includes, *c));
3740 /* Create a constraint ID = OP. */
3742 static void
3743 make_constraint_to (unsigned id, tree op)
3745 auto_vec<ce_s> rhsc;
3746 get_constraint_for_rhs (op, &rhsc);
3747 make_constraints_to (id, rhsc);
3750 /* Create a constraint ID = &FROM. */
3752 static void
3753 make_constraint_from (varinfo_t vi, int from)
3755 struct constraint_expr lhs, rhs;
3757 lhs.var = vi->id;
3758 lhs.offset = 0;
3759 lhs.type = SCALAR;
3761 rhs.var = from;
3762 rhs.offset = 0;
3763 rhs.type = ADDRESSOF;
3764 process_constraint (new_constraint (lhs, rhs));
3767 /* Create a constraint ID = FROM. */
3769 static void
3770 make_copy_constraint (varinfo_t vi, int from)
3772 struct constraint_expr lhs, rhs;
3774 lhs.var = vi->id;
3775 lhs.offset = 0;
3776 lhs.type = SCALAR;
3778 rhs.var = from;
3779 rhs.offset = 0;
3780 rhs.type = SCALAR;
3781 process_constraint (new_constraint (lhs, rhs));
3784 /* Make constraints necessary to make OP escape. */
3786 static void
3787 make_escape_constraint (tree op)
3789 make_constraint_to (escaped_id, op);
3792 /* Add constraints to that the solution of VI is transitively closed. */
3794 static void
3795 make_transitive_closure_constraints (varinfo_t vi)
3797 struct constraint_expr lhs, rhs;
3799 /* VAR = *(VAR + UNKNOWN); */
3800 lhs.type = SCALAR;
3801 lhs.var = vi->id;
3802 lhs.offset = 0;
3803 rhs.type = DEREF;
3804 rhs.var = vi->id;
3805 rhs.offset = UNKNOWN_OFFSET;
3806 process_constraint (new_constraint (lhs, rhs));
3809 /* Add constraints to that the solution of VI has all subvariables added. */
3811 static void
3812 make_any_offset_constraints (varinfo_t vi)
3814 struct constraint_expr lhs, rhs;
3816 /* VAR = VAR + UNKNOWN; */
3817 lhs.type = SCALAR;
3818 lhs.var = vi->id;
3819 lhs.offset = 0;
3820 rhs.type = SCALAR;
3821 rhs.var = vi->id;
3822 rhs.offset = UNKNOWN_OFFSET;
3823 process_constraint (new_constraint (lhs, rhs));
3826 /* Temporary storage for fake var decls. */
3827 struct obstack fake_var_decl_obstack;
3829 /* Build a fake VAR_DECL acting as referrer to a DECL_UID. */
3831 static tree
3832 build_fake_var_decl (tree type)
3834 tree decl = (tree) XOBNEW (&fake_var_decl_obstack, struct tree_var_decl);
3835 memset (decl, 0, sizeof (struct tree_var_decl));
3836 TREE_SET_CODE (decl, VAR_DECL);
3837 TREE_TYPE (decl) = type;
3838 DECL_UID (decl) = allocate_decl_uid ();
3839 SET_DECL_PT_UID (decl, -1);
3840 layout_decl (decl, 0);
3841 return decl;
3844 /* Create a new artificial heap variable with NAME.
3845 Return the created variable. */
3847 static varinfo_t
3848 make_heapvar (const char *name, bool add_id)
3850 varinfo_t vi;
3851 tree heapvar;
3853 heapvar = build_fake_var_decl (ptr_type_node);
3854 DECL_EXTERNAL (heapvar) = 1;
3856 vi = new_var_info (heapvar, name, add_id);
3857 vi->is_artificial_var = true;
3858 vi->is_heap_var = true;
3859 vi->is_unknown_size_var = true;
3860 vi->offset = 0;
3861 vi->fullsize = ~0;
3862 vi->size = ~0;
3863 vi->is_full_var = true;
3864 insert_vi_for_tree (heapvar, vi);
3866 return vi;
3869 /* Create a new artificial heap variable with NAME and make a
3870 constraint from it to LHS. Set flags according to a tag used
3871 for tracking restrict pointers. */
3873 static varinfo_t
3874 make_constraint_from_restrict (varinfo_t lhs, const char *name, bool add_id)
3876 varinfo_t vi = make_heapvar (name, add_id);
3877 vi->is_restrict_var = 1;
3878 vi->is_global_var = 1;
3879 vi->may_have_pointers = 1;
3880 make_constraint_from (lhs, vi->id);
3881 return vi;
3884 /* Create a new artificial heap variable with NAME and make a
3885 constraint from it to LHS. Set flags according to a tag used
3886 for tracking restrict pointers and make the artificial heap
3887 point to global memory. */
3889 static varinfo_t
3890 make_constraint_from_global_restrict (varinfo_t lhs, const char *name,
3891 bool add_id)
3893 varinfo_t vi = make_constraint_from_restrict (lhs, name, add_id);
3894 make_copy_constraint (vi, nonlocal_id);
3895 return vi;
3898 /* In IPA mode there are varinfos for different aspects of reach
3899 function designator. One for the points-to set of the return
3900 value, one for the variables that are clobbered by the function,
3901 one for its uses and one for each parameter (including a single
3902 glob for remaining variadic arguments). */
3904 enum { fi_clobbers = 1, fi_uses = 2,
3905 fi_static_chain = 3, fi_result = 4, fi_parm_base = 5 };
3907 /* Get a constraint for the requested part of a function designator FI
3908 when operating in IPA mode. */
3910 static struct constraint_expr
3911 get_function_part_constraint (varinfo_t fi, unsigned part)
3913 struct constraint_expr c;
3915 gcc_assert (in_ipa_mode);
3917 if (fi->id == anything_id)
3919 /* ??? We probably should have a ANYFN special variable. */
3920 c.var = anything_id;
3921 c.offset = 0;
3922 c.type = SCALAR;
3924 else if (fi->decl && TREE_CODE (fi->decl) == FUNCTION_DECL)
3926 varinfo_t ai = first_vi_for_offset (fi, part);
3927 if (ai)
3928 c.var = ai->id;
3929 else
3930 c.var = anything_id;
3931 c.offset = 0;
3932 c.type = SCALAR;
3934 else
3936 c.var = fi->id;
3937 c.offset = part;
3938 c.type = DEREF;
3941 return c;
3944 /* For non-IPA mode, generate constraints necessary for a call on the
3945 RHS. */
3947 static void
3948 handle_rhs_call (gcall *stmt, vec<ce_s> *results)
3950 struct constraint_expr rhsc;
3951 unsigned i;
3952 bool returns_uses = false;
3954 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3956 tree arg = gimple_call_arg (stmt, i);
3957 int flags = gimple_call_arg_flags (stmt, i);
3959 /* If the argument is not used we can ignore it. */
3960 if (flags & EAF_UNUSED)
3961 continue;
3963 /* As we compute ESCAPED context-insensitive we do not gain
3964 any precision with just EAF_NOCLOBBER but not EAF_NOESCAPE
3965 set. The argument would still get clobbered through the
3966 escape solution. */
3967 if ((flags & EAF_NOCLOBBER)
3968 && (flags & EAF_NOESCAPE))
3970 varinfo_t uses = get_call_use_vi (stmt);
3971 varinfo_t tem = new_var_info (NULL_TREE, "callarg", true);
3972 tem->is_reg_var = true;
3973 make_constraint_to (tem->id, arg);
3974 make_any_offset_constraints (tem);
3975 if (!(flags & EAF_DIRECT))
3976 make_transitive_closure_constraints (tem);
3977 make_copy_constraint (uses, tem->id);
3978 returns_uses = true;
3980 else if (flags & EAF_NOESCAPE)
3982 struct constraint_expr lhs, rhs;
3983 varinfo_t uses = get_call_use_vi (stmt);
3984 varinfo_t clobbers = get_call_clobber_vi (stmt);
3985 varinfo_t tem = new_var_info (NULL_TREE, "callarg", true);
3986 tem->is_reg_var = true;
3987 make_constraint_to (tem->id, arg);
3988 make_any_offset_constraints (tem);
3989 if (!(flags & EAF_DIRECT))
3990 make_transitive_closure_constraints (tem);
3991 make_copy_constraint (uses, tem->id);
3992 make_copy_constraint (clobbers, tem->id);
3993 /* Add *tem = nonlocal, do not add *tem = callused as
3994 EAF_NOESCAPE parameters do not escape to other parameters
3995 and all other uses appear in NONLOCAL as well. */
3996 lhs.type = DEREF;
3997 lhs.var = tem->id;
3998 lhs.offset = 0;
3999 rhs.type = SCALAR;
4000 rhs.var = nonlocal_id;
4001 rhs.offset = 0;
4002 process_constraint (new_constraint (lhs, rhs));
4003 returns_uses = true;
4005 else
4006 make_escape_constraint (arg);
4009 /* If we added to the calls uses solution make sure we account for
4010 pointers to it to be returned. */
4011 if (returns_uses)
4013 rhsc.var = get_call_use_vi (stmt)->id;
4014 rhsc.offset = UNKNOWN_OFFSET;
4015 rhsc.type = SCALAR;
4016 results->safe_push (rhsc);
4019 /* The static chain escapes as well. */
4020 if (gimple_call_chain (stmt))
4021 make_escape_constraint (gimple_call_chain (stmt));
4023 /* And if we applied NRV the address of the return slot escapes as well. */
4024 if (gimple_call_return_slot_opt_p (stmt)
4025 && gimple_call_lhs (stmt) != NULL_TREE
4026 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
4028 auto_vec<ce_s> tmpc;
4029 struct constraint_expr lhsc, *c;
4030 get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
4031 lhsc.var = escaped_id;
4032 lhsc.offset = 0;
4033 lhsc.type = SCALAR;
4034 FOR_EACH_VEC_ELT (tmpc, i, c)
4035 process_constraint (new_constraint (lhsc, *c));
4038 /* Regular functions return nonlocal memory. */
4039 rhsc.var = nonlocal_id;
4040 rhsc.offset = 0;
4041 rhsc.type = SCALAR;
4042 results->safe_push (rhsc);
4045 /* For non-IPA mode, generate constraints necessary for a call
4046 that returns a pointer and assigns it to LHS. This simply makes
4047 the LHS point to global and escaped variables. */
4049 static void
4050 handle_lhs_call (gcall *stmt, tree lhs, int flags, vec<ce_s> rhsc,
4051 tree fndecl)
4053 auto_vec<ce_s> lhsc;
4055 get_constraint_for (lhs, &lhsc);
4056 /* If the store is to a global decl make sure to
4057 add proper escape constraints. */
4058 lhs = get_base_address (lhs);
4059 if (lhs
4060 && DECL_P (lhs)
4061 && is_global_var (lhs))
4063 struct constraint_expr tmpc;
4064 tmpc.var = escaped_id;
4065 tmpc.offset = 0;
4066 tmpc.type = SCALAR;
4067 lhsc.safe_push (tmpc);
4070 /* If the call returns an argument unmodified override the rhs
4071 constraints. */
4072 if (flags & ERF_RETURNS_ARG
4073 && (flags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (stmt))
4075 tree arg;
4076 rhsc.create (0);
4077 arg = gimple_call_arg (stmt, flags & ERF_RETURN_ARG_MASK);
4078 get_constraint_for (arg, &rhsc);
4079 process_all_all_constraints (lhsc, rhsc);
4080 rhsc.release ();
4082 else if (flags & ERF_NOALIAS)
4084 varinfo_t vi;
4085 struct constraint_expr tmpc;
4086 rhsc.create (0);
4087 vi = make_heapvar ("HEAP", true);
4088 /* We are marking allocated storage local, we deal with it becoming
4089 global by escaping and setting of vars_contains_escaped_heap. */
4090 DECL_EXTERNAL (vi->decl) = 0;
4091 vi->is_global_var = 0;
4092 /* If this is not a real malloc call assume the memory was
4093 initialized and thus may point to global memory. All
4094 builtin functions with the malloc attribute behave in a sane way. */
4095 if (!fndecl
4096 || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
4097 make_constraint_from (vi, nonlocal_id);
4098 tmpc.var = vi->id;
4099 tmpc.offset = 0;
4100 tmpc.type = ADDRESSOF;
4101 rhsc.safe_push (tmpc);
4102 process_all_all_constraints (lhsc, rhsc);
4103 rhsc.release ();
4105 else
4106 process_all_all_constraints (lhsc, rhsc);
4109 /* For non-IPA mode, generate constraints necessary for a call of a
4110 const function that returns a pointer in the statement STMT. */
4112 static void
4113 handle_const_call (gcall *stmt, vec<ce_s> *results)
4115 struct constraint_expr rhsc;
4116 unsigned int k;
4117 bool need_uses = false;
4119 /* Treat nested const functions the same as pure functions as far
4120 as the static chain is concerned. */
4121 if (gimple_call_chain (stmt))
4123 varinfo_t uses = get_call_use_vi (stmt);
4124 make_constraint_to (uses->id, gimple_call_chain (stmt));
4125 need_uses = true;
4128 /* And if we applied NRV the address of the return slot escapes as well. */
4129 if (gimple_call_return_slot_opt_p (stmt)
4130 && gimple_call_lhs (stmt) != NULL_TREE
4131 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
4133 varinfo_t uses = get_call_use_vi (stmt);
4134 auto_vec<ce_s> tmpc;
4135 get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
4136 make_constraints_to (uses->id, tmpc);
4137 need_uses = true;
4140 if (need_uses)
4142 varinfo_t uses = get_call_use_vi (stmt);
4143 make_any_offset_constraints (uses);
4144 make_transitive_closure_constraints (uses);
4145 rhsc.var = uses->id;
4146 rhsc.offset = 0;
4147 rhsc.type = SCALAR;
4148 results->safe_push (rhsc);
4151 /* May return offsetted arguments. */
4152 varinfo_t tem = NULL;
4153 if (gimple_call_num_args (stmt) != 0)
4155 tem = new_var_info (NULL_TREE, "callarg", true);
4156 tem->is_reg_var = true;
4158 for (k = 0; k < gimple_call_num_args (stmt); ++k)
4160 tree arg = gimple_call_arg (stmt, k);
4161 auto_vec<ce_s> argc;
4162 get_constraint_for_rhs (arg, &argc);
4163 make_constraints_to (tem->id, argc);
4165 if (tem)
4167 ce_s ce;
4168 ce.type = SCALAR;
4169 ce.var = tem->id;
4170 ce.offset = UNKNOWN_OFFSET;
4171 results->safe_push (ce);
4174 /* May return addresses of globals. */
4175 rhsc.var = nonlocal_id;
4176 rhsc.offset = 0;
4177 rhsc.type = ADDRESSOF;
4178 results->safe_push (rhsc);
4181 /* For non-IPA mode, generate constraints necessary for a call to a
4182 pure function in statement STMT. */
4184 static void
4185 handle_pure_call (gcall *stmt, vec<ce_s> *results)
4187 struct constraint_expr rhsc;
4188 unsigned i;
4189 varinfo_t uses = NULL;
4191 /* Memory reached from pointer arguments is call-used. */
4192 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4194 tree arg = gimple_call_arg (stmt, i);
4195 if (!uses)
4197 uses = get_call_use_vi (stmt);
4198 make_any_offset_constraints (uses);
4199 make_transitive_closure_constraints (uses);
4201 make_constraint_to (uses->id, arg);
4204 /* The static chain is used as well. */
4205 if (gimple_call_chain (stmt))
4207 if (!uses)
4209 uses = get_call_use_vi (stmt);
4210 make_any_offset_constraints (uses);
4211 make_transitive_closure_constraints (uses);
4213 make_constraint_to (uses->id, gimple_call_chain (stmt));
4216 /* And if we applied NRV the address of the return slot. */
4217 if (gimple_call_return_slot_opt_p (stmt)
4218 && gimple_call_lhs (stmt) != NULL_TREE
4219 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
4221 if (!uses)
4223 uses = get_call_use_vi (stmt);
4224 make_any_offset_constraints (uses);
4225 make_transitive_closure_constraints (uses);
4227 auto_vec<ce_s> tmpc;
4228 get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
4229 make_constraints_to (uses->id, tmpc);
4232 /* Pure functions may return call-used and nonlocal memory. */
4233 if (uses)
4235 rhsc.var = uses->id;
4236 rhsc.offset = 0;
4237 rhsc.type = SCALAR;
4238 results->safe_push (rhsc);
4240 rhsc.var = nonlocal_id;
4241 rhsc.offset = 0;
4242 rhsc.type = SCALAR;
4243 results->safe_push (rhsc);
4247 /* Return the varinfo for the callee of CALL. */
4249 static varinfo_t
4250 get_fi_for_callee (gcall *call)
4252 tree decl, fn = gimple_call_fn (call);
4254 if (fn && TREE_CODE (fn) == OBJ_TYPE_REF)
4255 fn = OBJ_TYPE_REF_EXPR (fn);
4257 /* If we can directly resolve the function being called, do so.
4258 Otherwise, it must be some sort of indirect expression that
4259 we should still be able to handle. */
4260 decl = gimple_call_addr_fndecl (fn);
4261 if (decl)
4262 return get_vi_for_tree (decl);
4264 /* If the function is anything other than a SSA name pointer we have no
4265 clue and should be getting ANYFN (well, ANYTHING for now). */
4266 if (!fn || TREE_CODE (fn) != SSA_NAME)
4267 return get_varinfo (anything_id);
4269 if (SSA_NAME_IS_DEFAULT_DEF (fn)
4270 && (TREE_CODE (SSA_NAME_VAR (fn)) == PARM_DECL
4271 || TREE_CODE (SSA_NAME_VAR (fn)) == RESULT_DECL))
4272 fn = SSA_NAME_VAR (fn);
4274 return get_vi_for_tree (fn);
4277 /* Create constraints for assigning call argument ARG to the incoming parameter
4278 INDEX of function FI. */
4280 static void
4281 find_func_aliases_for_call_arg (varinfo_t fi, unsigned index, tree arg)
4283 struct constraint_expr lhs;
4284 lhs = get_function_part_constraint (fi, fi_parm_base + index);
4286 auto_vec<ce_s, 2> rhsc;
4287 get_constraint_for_rhs (arg, &rhsc);
4289 unsigned j;
4290 struct constraint_expr *rhsp;
4291 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4292 process_constraint (new_constraint (lhs, *rhsp));
4295 /* Return true if FNDECL may be part of another lto partition. */
4297 static bool
4298 fndecl_maybe_in_other_partition (tree fndecl)
4300 cgraph_node *fn_node = cgraph_node::get (fndecl);
4301 if (fn_node == NULL)
4302 return true;
4304 return fn_node->in_other_partition;
4307 /* Create constraints for the builtin call T. Return true if the call
4308 was handled, otherwise false. */
4310 static bool
4311 find_func_aliases_for_builtin_call (struct function *fn, gcall *t)
4313 tree fndecl = gimple_call_fndecl (t);
4314 auto_vec<ce_s, 2> lhsc;
4315 auto_vec<ce_s, 4> rhsc;
4316 varinfo_t fi;
4318 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
4319 /* ??? All builtins that are handled here need to be handled
4320 in the alias-oracle query functions explicitly! */
4321 switch (DECL_FUNCTION_CODE (fndecl))
4323 /* All the following functions return a pointer to the same object
4324 as their first argument points to. The functions do not add
4325 to the ESCAPED solution. The functions make the first argument
4326 pointed to memory point to what the second argument pointed to
4327 memory points to. */
4328 case BUILT_IN_STRCPY:
4329 case BUILT_IN_STRNCPY:
4330 case BUILT_IN_BCOPY:
4331 case BUILT_IN_MEMCPY:
4332 case BUILT_IN_MEMMOVE:
4333 case BUILT_IN_MEMPCPY:
4334 case BUILT_IN_STPCPY:
4335 case BUILT_IN_STPNCPY:
4336 case BUILT_IN_STRCAT:
4337 case BUILT_IN_STRNCAT:
4338 case BUILT_IN_STRCPY_CHK:
4339 case BUILT_IN_STRNCPY_CHK:
4340 case BUILT_IN_MEMCPY_CHK:
4341 case BUILT_IN_MEMMOVE_CHK:
4342 case BUILT_IN_MEMPCPY_CHK:
4343 case BUILT_IN_STPCPY_CHK:
4344 case BUILT_IN_STPNCPY_CHK:
4345 case BUILT_IN_STRCAT_CHK:
4346 case BUILT_IN_STRNCAT_CHK:
4347 case BUILT_IN_TM_MEMCPY:
4348 case BUILT_IN_TM_MEMMOVE:
4350 tree res = gimple_call_lhs (t);
4351 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4352 == BUILT_IN_BCOPY ? 1 : 0));
4353 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4354 == BUILT_IN_BCOPY ? 0 : 1));
4355 if (res != NULL_TREE)
4357 get_constraint_for (res, &lhsc);
4358 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY
4359 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY
4360 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY
4361 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY_CHK
4362 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY_CHK
4363 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY_CHK)
4364 get_constraint_for_ptr_offset (dest, NULL_TREE, &rhsc);
4365 else
4366 get_constraint_for (dest, &rhsc);
4367 process_all_all_constraints (lhsc, rhsc);
4368 lhsc.truncate (0);
4369 rhsc.truncate (0);
4371 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4372 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4373 do_deref (&lhsc);
4374 do_deref (&rhsc);
4375 process_all_all_constraints (lhsc, rhsc);
4376 return true;
4378 case BUILT_IN_MEMSET:
4379 case BUILT_IN_MEMSET_CHK:
4380 case BUILT_IN_TM_MEMSET:
4382 tree res = gimple_call_lhs (t);
4383 tree dest = gimple_call_arg (t, 0);
4384 unsigned i;
4385 ce_s *lhsp;
4386 struct constraint_expr ac;
4387 if (res != NULL_TREE)
4389 get_constraint_for (res, &lhsc);
4390 get_constraint_for (dest, &rhsc);
4391 process_all_all_constraints (lhsc, rhsc);
4392 lhsc.truncate (0);
4394 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4395 do_deref (&lhsc);
4396 if (flag_delete_null_pointer_checks
4397 && integer_zerop (gimple_call_arg (t, 1)))
4399 ac.type = ADDRESSOF;
4400 ac.var = nothing_id;
4402 else
4404 ac.type = SCALAR;
4405 ac.var = integer_id;
4407 ac.offset = 0;
4408 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4409 process_constraint (new_constraint (*lhsp, ac));
4410 return true;
4412 case BUILT_IN_POSIX_MEMALIGN:
4414 tree ptrptr = gimple_call_arg (t, 0);
4415 get_constraint_for (ptrptr, &lhsc);
4416 do_deref (&lhsc);
4417 varinfo_t vi = make_heapvar ("HEAP", true);
4418 /* We are marking allocated storage local, we deal with it becoming
4419 global by escaping and setting of vars_contains_escaped_heap. */
4420 DECL_EXTERNAL (vi->decl) = 0;
4421 vi->is_global_var = 0;
4422 struct constraint_expr tmpc;
4423 tmpc.var = vi->id;
4424 tmpc.offset = 0;
4425 tmpc.type = ADDRESSOF;
4426 rhsc.safe_push (tmpc);
4427 process_all_all_constraints (lhsc, rhsc);
4428 return true;
4430 case BUILT_IN_ASSUME_ALIGNED:
4432 tree res = gimple_call_lhs (t);
4433 tree dest = gimple_call_arg (t, 0);
4434 if (res != NULL_TREE)
4436 get_constraint_for (res, &lhsc);
4437 get_constraint_for (dest, &rhsc);
4438 process_all_all_constraints (lhsc, rhsc);
4440 return true;
4442 /* All the following functions do not return pointers, do not
4443 modify the points-to sets of memory reachable from their
4444 arguments and do not add to the ESCAPED solution. */
4445 case BUILT_IN_SINCOS:
4446 case BUILT_IN_SINCOSF:
4447 case BUILT_IN_SINCOSL:
4448 case BUILT_IN_FREXP:
4449 case BUILT_IN_FREXPF:
4450 case BUILT_IN_FREXPL:
4451 case BUILT_IN_GAMMA_R:
4452 case BUILT_IN_GAMMAF_R:
4453 case BUILT_IN_GAMMAL_R:
4454 case BUILT_IN_LGAMMA_R:
4455 case BUILT_IN_LGAMMAF_R:
4456 case BUILT_IN_LGAMMAL_R:
4457 case BUILT_IN_MODF:
4458 case BUILT_IN_MODFF:
4459 case BUILT_IN_MODFL:
4460 case BUILT_IN_REMQUO:
4461 case BUILT_IN_REMQUOF:
4462 case BUILT_IN_REMQUOL:
4463 case BUILT_IN_FREE:
4464 return true;
4465 case BUILT_IN_STRDUP:
4466 case BUILT_IN_STRNDUP:
4467 case BUILT_IN_REALLOC:
4468 if (gimple_call_lhs (t))
4470 handle_lhs_call (t, gimple_call_lhs (t),
4471 gimple_call_return_flags (t) | ERF_NOALIAS,
4472 vNULL, fndecl);
4473 get_constraint_for_ptr_offset (gimple_call_lhs (t),
4474 NULL_TREE, &lhsc);
4475 get_constraint_for_ptr_offset (gimple_call_arg (t, 0),
4476 NULL_TREE, &rhsc);
4477 do_deref (&lhsc);
4478 do_deref (&rhsc);
4479 process_all_all_constraints (lhsc, rhsc);
4480 lhsc.truncate (0);
4481 rhsc.truncate (0);
4482 /* For realloc the resulting pointer can be equal to the
4483 argument as well. But only doing this wouldn't be
4484 correct because with ptr == 0 realloc behaves like malloc. */
4485 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_REALLOC)
4487 get_constraint_for (gimple_call_lhs (t), &lhsc);
4488 get_constraint_for (gimple_call_arg (t, 0), &rhsc);
4489 process_all_all_constraints (lhsc, rhsc);
4491 return true;
4493 break;
4494 /* String / character search functions return a pointer into the
4495 source string or NULL. */
4496 case BUILT_IN_INDEX:
4497 case BUILT_IN_STRCHR:
4498 case BUILT_IN_STRRCHR:
4499 case BUILT_IN_MEMCHR:
4500 case BUILT_IN_STRSTR:
4501 case BUILT_IN_STRPBRK:
4502 if (gimple_call_lhs (t))
4504 tree src = gimple_call_arg (t, 0);
4505 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4506 constraint_expr nul;
4507 nul.var = nothing_id;
4508 nul.offset = 0;
4509 nul.type = ADDRESSOF;
4510 rhsc.safe_push (nul);
4511 get_constraint_for (gimple_call_lhs (t), &lhsc);
4512 process_all_all_constraints (lhsc, rhsc);
4514 return true;
4515 /* Pure functions that return something not based on any object and
4516 that use the memory pointed to by their arguments (but not
4517 transitively). */
4518 case BUILT_IN_STRCMP:
4519 case BUILT_IN_STRCMP_EQ:
4520 case BUILT_IN_STRNCMP:
4521 case BUILT_IN_STRNCMP_EQ:
4522 case BUILT_IN_STRCASECMP:
4523 case BUILT_IN_STRNCASECMP:
4524 case BUILT_IN_MEMCMP:
4525 case BUILT_IN_BCMP:
4526 case BUILT_IN_STRSPN:
4527 case BUILT_IN_STRCSPN:
4529 varinfo_t uses = get_call_use_vi (t);
4530 make_any_offset_constraints (uses);
4531 make_constraint_to (uses->id, gimple_call_arg (t, 0));
4532 make_constraint_to (uses->id, gimple_call_arg (t, 1));
4533 /* No constraints are necessary for the return value. */
4534 return true;
4536 case BUILT_IN_STRLEN:
4538 varinfo_t uses = get_call_use_vi (t);
4539 make_any_offset_constraints (uses);
4540 make_constraint_to (uses->id, gimple_call_arg (t, 0));
4541 /* No constraints are necessary for the return value. */
4542 return true;
4544 case BUILT_IN_OBJECT_SIZE:
4545 case BUILT_IN_CONSTANT_P:
4547 /* No constraints are necessary for the return value or the
4548 arguments. */
4549 return true;
4551 /* Trampolines are special - they set up passing the static
4552 frame. */
4553 case BUILT_IN_INIT_TRAMPOLINE:
4555 tree tramp = gimple_call_arg (t, 0);
4556 tree nfunc = gimple_call_arg (t, 1);
4557 tree frame = gimple_call_arg (t, 2);
4558 unsigned i;
4559 struct constraint_expr lhs, *rhsp;
4560 if (in_ipa_mode)
4562 varinfo_t nfi = NULL;
4563 gcc_assert (TREE_CODE (nfunc) == ADDR_EXPR);
4564 nfi = lookup_vi_for_tree (TREE_OPERAND (nfunc, 0));
4565 if (nfi)
4567 lhs = get_function_part_constraint (nfi, fi_static_chain);
4568 get_constraint_for (frame, &rhsc);
4569 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4570 process_constraint (new_constraint (lhs, *rhsp));
4571 rhsc.truncate (0);
4573 /* Make the frame point to the function for
4574 the trampoline adjustment call. */
4575 get_constraint_for (tramp, &lhsc);
4576 do_deref (&lhsc);
4577 get_constraint_for (nfunc, &rhsc);
4578 process_all_all_constraints (lhsc, rhsc);
4580 return true;
4583 /* Else fallthru to generic handling which will let
4584 the frame escape. */
4585 break;
4587 case BUILT_IN_ADJUST_TRAMPOLINE:
4589 tree tramp = gimple_call_arg (t, 0);
4590 tree res = gimple_call_lhs (t);
4591 if (in_ipa_mode && res)
4593 get_constraint_for (res, &lhsc);
4594 get_constraint_for (tramp, &rhsc);
4595 do_deref (&rhsc);
4596 process_all_all_constraints (lhsc, rhsc);
4598 return true;
4600 CASE_BUILT_IN_TM_STORE (1):
4601 CASE_BUILT_IN_TM_STORE (2):
4602 CASE_BUILT_IN_TM_STORE (4):
4603 CASE_BUILT_IN_TM_STORE (8):
4604 CASE_BUILT_IN_TM_STORE (FLOAT):
4605 CASE_BUILT_IN_TM_STORE (DOUBLE):
4606 CASE_BUILT_IN_TM_STORE (LDOUBLE):
4607 CASE_BUILT_IN_TM_STORE (M64):
4608 CASE_BUILT_IN_TM_STORE (M128):
4609 CASE_BUILT_IN_TM_STORE (M256):
4611 tree addr = gimple_call_arg (t, 0);
4612 tree src = gimple_call_arg (t, 1);
4614 get_constraint_for (addr, &lhsc);
4615 do_deref (&lhsc);
4616 get_constraint_for (src, &rhsc);
4617 process_all_all_constraints (lhsc, rhsc);
4618 return true;
4620 CASE_BUILT_IN_TM_LOAD (1):
4621 CASE_BUILT_IN_TM_LOAD (2):
4622 CASE_BUILT_IN_TM_LOAD (4):
4623 CASE_BUILT_IN_TM_LOAD (8):
4624 CASE_BUILT_IN_TM_LOAD (FLOAT):
4625 CASE_BUILT_IN_TM_LOAD (DOUBLE):
4626 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
4627 CASE_BUILT_IN_TM_LOAD (M64):
4628 CASE_BUILT_IN_TM_LOAD (M128):
4629 CASE_BUILT_IN_TM_LOAD (M256):
4631 tree dest = gimple_call_lhs (t);
4632 tree addr = gimple_call_arg (t, 0);
4634 get_constraint_for (dest, &lhsc);
4635 get_constraint_for (addr, &rhsc);
4636 do_deref (&rhsc);
4637 process_all_all_constraints (lhsc, rhsc);
4638 return true;
4640 /* Variadic argument handling needs to be handled in IPA
4641 mode as well. */
4642 case BUILT_IN_VA_START:
4644 tree valist = gimple_call_arg (t, 0);
4645 struct constraint_expr rhs, *lhsp;
4646 unsigned i;
4647 get_constraint_for_ptr_offset (valist, NULL_TREE, &lhsc);
4648 do_deref (&lhsc);
4649 /* The va_list gets access to pointers in variadic
4650 arguments. Which we know in the case of IPA analysis
4651 and otherwise are just all nonlocal variables. */
4652 if (in_ipa_mode)
4654 fi = lookup_vi_for_tree (fn->decl);
4655 rhs = get_function_part_constraint (fi, ~0);
4656 rhs.type = ADDRESSOF;
4658 else
4660 rhs.var = nonlocal_id;
4661 rhs.type = ADDRESSOF;
4662 rhs.offset = 0;
4664 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4665 process_constraint (new_constraint (*lhsp, rhs));
4666 /* va_list is clobbered. */
4667 make_constraint_to (get_call_clobber_vi (t)->id, valist);
4668 return true;
4670 /* va_end doesn't have any effect that matters. */
4671 case BUILT_IN_VA_END:
4672 return true;
4673 /* Alternate return. Simply give up for now. */
4674 case BUILT_IN_RETURN:
4676 fi = NULL;
4677 if (!in_ipa_mode
4678 || !(fi = get_vi_for_tree (fn->decl)))
4679 make_constraint_from (get_varinfo (escaped_id), anything_id);
4680 else if (in_ipa_mode
4681 && fi != NULL)
4683 struct constraint_expr lhs, rhs;
4684 lhs = get_function_part_constraint (fi, fi_result);
4685 rhs.var = anything_id;
4686 rhs.offset = 0;
4687 rhs.type = SCALAR;
4688 process_constraint (new_constraint (lhs, rhs));
4690 return true;
4692 case BUILT_IN_GOMP_PARALLEL:
4693 case BUILT_IN_GOACC_PARALLEL:
4695 if (in_ipa_mode)
4697 unsigned int fnpos, argpos;
4698 switch (DECL_FUNCTION_CODE (fndecl))
4700 case BUILT_IN_GOMP_PARALLEL:
4701 /* __builtin_GOMP_parallel (fn, data, num_threads, flags). */
4702 fnpos = 0;
4703 argpos = 1;
4704 break;
4705 case BUILT_IN_GOACC_PARALLEL:
4706 /* __builtin_GOACC_parallel (flags_m, fn, mapnum, hostaddrs,
4707 sizes, kinds, ...). */
4708 fnpos = 1;
4709 argpos = 3;
4710 break;
4711 default:
4712 gcc_unreachable ();
4715 tree fnarg = gimple_call_arg (t, fnpos);
4716 gcc_assert (TREE_CODE (fnarg) == ADDR_EXPR);
4717 tree fndecl = TREE_OPERAND (fnarg, 0);
4718 if (fndecl_maybe_in_other_partition (fndecl))
4719 /* Fallthru to general call handling. */
4720 break;
4722 tree arg = gimple_call_arg (t, argpos);
4724 varinfo_t fi = get_vi_for_tree (fndecl);
4725 find_func_aliases_for_call_arg (fi, 0, arg);
4726 return true;
4728 /* Else fallthru to generic call handling. */
4729 break;
4731 /* printf-style functions may have hooks to set pointers to
4732 point to somewhere into the generated string. Leave them
4733 for a later exercise... */
4734 default:
4735 /* Fallthru to general call handling. */;
4738 return false;
4741 /* Create constraints for the call T. */
4743 static void
4744 find_func_aliases_for_call (struct function *fn, gcall *t)
4746 tree fndecl = gimple_call_fndecl (t);
4747 varinfo_t fi;
4749 if (fndecl != NULL_TREE
4750 && fndecl_built_in_p (fndecl)
4751 && find_func_aliases_for_builtin_call (fn, t))
4752 return;
4754 fi = get_fi_for_callee (t);
4755 if (!in_ipa_mode
4756 || (fi->decl && fndecl && !fi->is_fn_info))
4758 auto_vec<ce_s, 16> rhsc;
4759 int flags = gimple_call_flags (t);
4761 /* Const functions can return their arguments and addresses
4762 of global memory but not of escaped memory. */
4763 if (flags & (ECF_CONST|ECF_NOVOPS))
4765 if (gimple_call_lhs (t))
4766 handle_const_call (t, &rhsc);
4768 /* Pure functions can return addresses in and of memory
4769 reachable from their arguments, but they are not an escape
4770 point for reachable memory of their arguments. */
4771 else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE))
4772 handle_pure_call (t, &rhsc);
4773 else
4774 handle_rhs_call (t, &rhsc);
4775 if (gimple_call_lhs (t))
4776 handle_lhs_call (t, gimple_call_lhs (t),
4777 gimple_call_return_flags (t), rhsc, fndecl);
4779 else
4781 auto_vec<ce_s, 2> rhsc;
4782 tree lhsop;
4783 unsigned j;
4785 /* Assign all the passed arguments to the appropriate incoming
4786 parameters of the function. */
4787 for (j = 0; j < gimple_call_num_args (t); j++)
4789 tree arg = gimple_call_arg (t, j);
4790 find_func_aliases_for_call_arg (fi, j, arg);
4793 /* If we are returning a value, assign it to the result. */
4794 lhsop = gimple_call_lhs (t);
4795 if (lhsop)
4797 auto_vec<ce_s, 2> lhsc;
4798 struct constraint_expr rhs;
4799 struct constraint_expr *lhsp;
4800 bool aggr_p = aggregate_value_p (lhsop, gimple_call_fntype (t));
4802 get_constraint_for (lhsop, &lhsc);
4803 rhs = get_function_part_constraint (fi, fi_result);
4804 if (aggr_p)
4806 auto_vec<ce_s, 2> tem;
4807 tem.quick_push (rhs);
4808 do_deref (&tem);
4809 gcc_checking_assert (tem.length () == 1);
4810 rhs = tem[0];
4812 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
4813 process_constraint (new_constraint (*lhsp, rhs));
4815 /* If we pass the result decl by reference, honor that. */
4816 if (aggr_p)
4818 struct constraint_expr lhs;
4819 struct constraint_expr *rhsp;
4821 get_constraint_for_address_of (lhsop, &rhsc);
4822 lhs = get_function_part_constraint (fi, fi_result);
4823 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4824 process_constraint (new_constraint (lhs, *rhsp));
4825 rhsc.truncate (0);
4829 /* If we use a static chain, pass it along. */
4830 if (gimple_call_chain (t))
4832 struct constraint_expr lhs;
4833 struct constraint_expr *rhsp;
4835 get_constraint_for (gimple_call_chain (t), &rhsc);
4836 lhs = get_function_part_constraint (fi, fi_static_chain);
4837 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4838 process_constraint (new_constraint (lhs, *rhsp));
4843 /* Walk statement T setting up aliasing constraints according to the
4844 references found in T. This function is the main part of the
4845 constraint builder. AI points to auxiliary alias information used
4846 when building alias sets and computing alias grouping heuristics. */
4848 static void
4849 find_func_aliases (struct function *fn, gimple *origt)
4851 gimple *t = origt;
4852 auto_vec<ce_s, 16> lhsc;
4853 auto_vec<ce_s, 16> rhsc;
4854 varinfo_t fi;
4856 /* Now build constraints expressions. */
4857 if (gimple_code (t) == GIMPLE_PHI)
4859 /* For a phi node, assign all the arguments to
4860 the result. */
4861 get_constraint_for (gimple_phi_result (t), &lhsc);
4862 for (unsigned i = 0; i < gimple_phi_num_args (t); i++)
4864 get_constraint_for_rhs (gimple_phi_arg_def (t, i), &rhsc);
4865 process_all_all_constraints (lhsc, rhsc);
4866 rhsc.truncate (0);
4869 /* In IPA mode, we need to generate constraints to pass call
4870 arguments through their calls. There are two cases,
4871 either a GIMPLE_CALL returning a value, or just a plain
4872 GIMPLE_CALL when we are not.
4874 In non-ipa mode, we need to generate constraints for each
4875 pointer passed by address. */
4876 else if (is_gimple_call (t))
4877 find_func_aliases_for_call (fn, as_a <gcall *> (t));
4879 /* Otherwise, just a regular assignment statement. Only care about
4880 operations with pointer result, others are dealt with as escape
4881 points if they have pointer operands. */
4882 else if (is_gimple_assign (t))
4884 /* Otherwise, just a regular assignment statement. */
4885 tree lhsop = gimple_assign_lhs (t);
4886 tree rhsop = (gimple_num_ops (t) == 2) ? gimple_assign_rhs1 (t) : NULL;
4888 if (rhsop && TREE_CLOBBER_P (rhsop))
4889 /* Ignore clobbers, they don't actually store anything into
4890 the LHS. */
4892 else if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
4893 do_structure_copy (lhsop, rhsop);
4894 else
4896 enum tree_code code = gimple_assign_rhs_code (t);
4898 get_constraint_for (lhsop, &lhsc);
4900 if (code == POINTER_PLUS_EXPR)
4901 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4902 gimple_assign_rhs2 (t), &rhsc);
4903 else if (code == BIT_AND_EXPR
4904 && TREE_CODE (gimple_assign_rhs2 (t)) == INTEGER_CST)
4906 /* Aligning a pointer via a BIT_AND_EXPR is offsetting
4907 the pointer. Handle it by offsetting it by UNKNOWN. */
4908 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4909 NULL_TREE, &rhsc);
4911 else if ((CONVERT_EXPR_CODE_P (code)
4912 && !(POINTER_TYPE_P (gimple_expr_type (t))
4913 && !POINTER_TYPE_P (TREE_TYPE (rhsop))))
4914 || gimple_assign_single_p (t))
4915 get_constraint_for_rhs (rhsop, &rhsc);
4916 else if (code == COND_EXPR)
4918 /* The result is a merge of both COND_EXPR arms. */
4919 auto_vec<ce_s, 2> tmp;
4920 struct constraint_expr *rhsp;
4921 unsigned i;
4922 get_constraint_for_rhs (gimple_assign_rhs2 (t), &rhsc);
4923 get_constraint_for_rhs (gimple_assign_rhs3 (t), &tmp);
4924 FOR_EACH_VEC_ELT (tmp, i, rhsp)
4925 rhsc.safe_push (*rhsp);
4927 else if (truth_value_p (code))
4928 /* Truth value results are not pointer (parts). Or at least
4929 very unreasonable obfuscation of a part. */
4931 else
4933 /* All other operations are merges. */
4934 auto_vec<ce_s, 4> tmp;
4935 struct constraint_expr *rhsp;
4936 unsigned i, j;
4937 get_constraint_for_rhs (gimple_assign_rhs1 (t), &rhsc);
4938 for (i = 2; i < gimple_num_ops (t); ++i)
4940 get_constraint_for_rhs (gimple_op (t, i), &tmp);
4941 FOR_EACH_VEC_ELT (tmp, j, rhsp)
4942 rhsc.safe_push (*rhsp);
4943 tmp.truncate (0);
4946 process_all_all_constraints (lhsc, rhsc);
4948 /* If there is a store to a global variable the rhs escapes. */
4949 if ((lhsop = get_base_address (lhsop)) != NULL_TREE
4950 && DECL_P (lhsop))
4952 varinfo_t vi = get_vi_for_tree (lhsop);
4953 if ((! in_ipa_mode && vi->is_global_var)
4954 || vi->is_ipa_escape_point)
4955 make_escape_constraint (rhsop);
4958 /* Handle escapes through return. */
4959 else if (gimple_code (t) == GIMPLE_RETURN
4960 && gimple_return_retval (as_a <greturn *> (t)) != NULL_TREE)
4962 greturn *return_stmt = as_a <greturn *> (t);
4963 fi = NULL;
4964 if (!in_ipa_mode
4965 || !(fi = get_vi_for_tree (fn->decl)))
4966 make_escape_constraint (gimple_return_retval (return_stmt));
4967 else if (in_ipa_mode)
4969 struct constraint_expr lhs ;
4970 struct constraint_expr *rhsp;
4971 unsigned i;
4973 lhs = get_function_part_constraint (fi, fi_result);
4974 get_constraint_for_rhs (gimple_return_retval (return_stmt), &rhsc);
4975 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4976 process_constraint (new_constraint (lhs, *rhsp));
4979 /* Handle asms conservatively by adding escape constraints to everything. */
4980 else if (gasm *asm_stmt = dyn_cast <gasm *> (t))
4982 unsigned i, noutputs;
4983 const char **oconstraints;
4984 const char *constraint;
4985 bool allows_mem, allows_reg, is_inout;
4987 noutputs = gimple_asm_noutputs (asm_stmt);
4988 oconstraints = XALLOCAVEC (const char *, noutputs);
4990 for (i = 0; i < noutputs; ++i)
4992 tree link = gimple_asm_output_op (asm_stmt, i);
4993 tree op = TREE_VALUE (link);
4995 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4996 oconstraints[i] = constraint;
4997 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
4998 &allows_reg, &is_inout);
5000 /* A memory constraint makes the address of the operand escape. */
5001 if (!allows_reg && allows_mem)
5002 make_escape_constraint (build_fold_addr_expr (op));
5004 /* The asm may read global memory, so outputs may point to
5005 any global memory. */
5006 if (op)
5008 auto_vec<ce_s, 2> lhsc;
5009 struct constraint_expr rhsc, *lhsp;
5010 unsigned j;
5011 get_constraint_for (op, &lhsc);
5012 rhsc.var = nonlocal_id;
5013 rhsc.offset = 0;
5014 rhsc.type = SCALAR;
5015 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
5016 process_constraint (new_constraint (*lhsp, rhsc));
5019 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
5021 tree link = gimple_asm_input_op (asm_stmt, i);
5022 tree op = TREE_VALUE (link);
5024 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5026 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
5027 &allows_mem, &allows_reg);
5029 /* A memory constraint makes the address of the operand escape. */
5030 if (!allows_reg && allows_mem)
5031 make_escape_constraint (build_fold_addr_expr (op));
5032 /* Strictly we'd only need the constraint to ESCAPED if
5033 the asm clobbers memory, otherwise using something
5034 along the lines of per-call clobbers/uses would be enough. */
5035 else if (op)
5036 make_escape_constraint (op);
5042 /* Create a constraint adding to the clobber set of FI the memory
5043 pointed to by PTR. */
5045 static void
5046 process_ipa_clobber (varinfo_t fi, tree ptr)
5048 vec<ce_s> ptrc = vNULL;
5049 struct constraint_expr *c, lhs;
5050 unsigned i;
5051 get_constraint_for_rhs (ptr, &ptrc);
5052 lhs = get_function_part_constraint (fi, fi_clobbers);
5053 FOR_EACH_VEC_ELT (ptrc, i, c)
5054 process_constraint (new_constraint (lhs, *c));
5055 ptrc.release ();
5058 /* Walk statement T setting up clobber and use constraints according to the
5059 references found in T. This function is a main part of the
5060 IPA constraint builder. */
5062 static void
5063 find_func_clobbers (struct function *fn, gimple *origt)
5065 gimple *t = origt;
5066 auto_vec<ce_s, 16> lhsc;
5067 auto_vec<ce_s, 16> rhsc;
5068 varinfo_t fi;
5070 /* Add constraints for clobbered/used in IPA mode.
5071 We are not interested in what automatic variables are clobbered
5072 or used as we only use the information in the caller to which
5073 they do not escape. */
5074 gcc_assert (in_ipa_mode);
5076 /* If the stmt refers to memory in any way it better had a VUSE. */
5077 if (gimple_vuse (t) == NULL_TREE)
5078 return;
5080 /* We'd better have function information for the current function. */
5081 fi = lookup_vi_for_tree (fn->decl);
5082 gcc_assert (fi != NULL);
5084 /* Account for stores in assignments and calls. */
5085 if (gimple_vdef (t) != NULL_TREE
5086 && gimple_has_lhs (t))
5088 tree lhs = gimple_get_lhs (t);
5089 tree tem = lhs;
5090 while (handled_component_p (tem))
5091 tem = TREE_OPERAND (tem, 0);
5092 if ((DECL_P (tem)
5093 && !auto_var_in_fn_p (tem, fn->decl))
5094 || INDIRECT_REF_P (tem)
5095 || (TREE_CODE (tem) == MEM_REF
5096 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
5097 && auto_var_in_fn_p
5098 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), fn->decl))))
5100 struct constraint_expr lhsc, *rhsp;
5101 unsigned i;
5102 lhsc = get_function_part_constraint (fi, fi_clobbers);
5103 get_constraint_for_address_of (lhs, &rhsc);
5104 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5105 process_constraint (new_constraint (lhsc, *rhsp));
5106 rhsc.truncate (0);
5110 /* Account for uses in assigments and returns. */
5111 if (gimple_assign_single_p (t)
5112 || (gimple_code (t) == GIMPLE_RETURN
5113 && gimple_return_retval (as_a <greturn *> (t)) != NULL_TREE))
5115 tree rhs = (gimple_assign_single_p (t)
5116 ? gimple_assign_rhs1 (t)
5117 : gimple_return_retval (as_a <greturn *> (t)));
5118 tree tem = rhs;
5119 while (handled_component_p (tem))
5120 tem = TREE_OPERAND (tem, 0);
5121 if ((DECL_P (tem)
5122 && !auto_var_in_fn_p (tem, fn->decl))
5123 || INDIRECT_REF_P (tem)
5124 || (TREE_CODE (tem) == MEM_REF
5125 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
5126 && auto_var_in_fn_p
5127 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), fn->decl))))
5129 struct constraint_expr lhs, *rhsp;
5130 unsigned i;
5131 lhs = get_function_part_constraint (fi, fi_uses);
5132 get_constraint_for_address_of (rhs, &rhsc);
5133 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5134 process_constraint (new_constraint (lhs, *rhsp));
5135 rhsc.truncate (0);
5139 if (gcall *call_stmt = dyn_cast <gcall *> (t))
5141 varinfo_t cfi = NULL;
5142 tree decl = gimple_call_fndecl (t);
5143 struct constraint_expr lhs, rhs;
5144 unsigned i, j;
5146 /* For builtins we do not have separate function info. For those
5147 we do not generate escapes for we have to generate clobbers/uses. */
5148 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
5149 switch (DECL_FUNCTION_CODE (decl))
5151 /* The following functions use and clobber memory pointed to
5152 by their arguments. */
5153 case BUILT_IN_STRCPY:
5154 case BUILT_IN_STRNCPY:
5155 case BUILT_IN_BCOPY:
5156 case BUILT_IN_MEMCPY:
5157 case BUILT_IN_MEMMOVE:
5158 case BUILT_IN_MEMPCPY:
5159 case BUILT_IN_STPCPY:
5160 case BUILT_IN_STPNCPY:
5161 case BUILT_IN_STRCAT:
5162 case BUILT_IN_STRNCAT:
5163 case BUILT_IN_STRCPY_CHK:
5164 case BUILT_IN_STRNCPY_CHK:
5165 case BUILT_IN_MEMCPY_CHK:
5166 case BUILT_IN_MEMMOVE_CHK:
5167 case BUILT_IN_MEMPCPY_CHK:
5168 case BUILT_IN_STPCPY_CHK:
5169 case BUILT_IN_STPNCPY_CHK:
5170 case BUILT_IN_STRCAT_CHK:
5171 case BUILT_IN_STRNCAT_CHK:
5173 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
5174 == BUILT_IN_BCOPY ? 1 : 0));
5175 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
5176 == BUILT_IN_BCOPY ? 0 : 1));
5177 unsigned i;
5178 struct constraint_expr *rhsp, *lhsp;
5179 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
5180 lhs = get_function_part_constraint (fi, fi_clobbers);
5181 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
5182 process_constraint (new_constraint (lhs, *lhsp));
5183 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
5184 lhs = get_function_part_constraint (fi, fi_uses);
5185 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5186 process_constraint (new_constraint (lhs, *rhsp));
5187 return;
5189 /* The following function clobbers memory pointed to by
5190 its argument. */
5191 case BUILT_IN_MEMSET:
5192 case BUILT_IN_MEMSET_CHK:
5193 case BUILT_IN_POSIX_MEMALIGN:
5195 tree dest = gimple_call_arg (t, 0);
5196 unsigned i;
5197 ce_s *lhsp;
5198 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
5199 lhs = get_function_part_constraint (fi, fi_clobbers);
5200 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
5201 process_constraint (new_constraint (lhs, *lhsp));
5202 return;
5204 /* The following functions clobber their second and third
5205 arguments. */
5206 case BUILT_IN_SINCOS:
5207 case BUILT_IN_SINCOSF:
5208 case BUILT_IN_SINCOSL:
5210 process_ipa_clobber (fi, gimple_call_arg (t, 1));
5211 process_ipa_clobber (fi, gimple_call_arg (t, 2));
5212 return;
5214 /* The following functions clobber their second argument. */
5215 case BUILT_IN_FREXP:
5216 case BUILT_IN_FREXPF:
5217 case BUILT_IN_FREXPL:
5218 case BUILT_IN_LGAMMA_R:
5219 case BUILT_IN_LGAMMAF_R:
5220 case BUILT_IN_LGAMMAL_R:
5221 case BUILT_IN_GAMMA_R:
5222 case BUILT_IN_GAMMAF_R:
5223 case BUILT_IN_GAMMAL_R:
5224 case BUILT_IN_MODF:
5225 case BUILT_IN_MODFF:
5226 case BUILT_IN_MODFL:
5228 process_ipa_clobber (fi, gimple_call_arg (t, 1));
5229 return;
5231 /* The following functions clobber their third argument. */
5232 case BUILT_IN_REMQUO:
5233 case BUILT_IN_REMQUOF:
5234 case BUILT_IN_REMQUOL:
5236 process_ipa_clobber (fi, gimple_call_arg (t, 2));
5237 return;
5239 /* The following functions neither read nor clobber memory. */
5240 case BUILT_IN_ASSUME_ALIGNED:
5241 case BUILT_IN_FREE:
5242 return;
5243 /* Trampolines are of no interest to us. */
5244 case BUILT_IN_INIT_TRAMPOLINE:
5245 case BUILT_IN_ADJUST_TRAMPOLINE:
5246 return;
5247 case BUILT_IN_VA_START:
5248 case BUILT_IN_VA_END:
5249 return;
5250 case BUILT_IN_GOMP_PARALLEL:
5251 case BUILT_IN_GOACC_PARALLEL:
5253 unsigned int fnpos, argpos;
5254 unsigned int implicit_use_args[2];
5255 unsigned int num_implicit_use_args = 0;
5256 switch (DECL_FUNCTION_CODE (decl))
5258 case BUILT_IN_GOMP_PARALLEL:
5259 /* __builtin_GOMP_parallel (fn, data, num_threads, flags). */
5260 fnpos = 0;
5261 argpos = 1;
5262 break;
5263 case BUILT_IN_GOACC_PARALLEL:
5264 /* __builtin_GOACC_parallel (flags_m, fn, mapnum, hostaddrs,
5265 sizes, kinds, ...). */
5266 fnpos = 1;
5267 argpos = 3;
5268 implicit_use_args[num_implicit_use_args++] = 4;
5269 implicit_use_args[num_implicit_use_args++] = 5;
5270 break;
5271 default:
5272 gcc_unreachable ();
5275 tree fnarg = gimple_call_arg (t, fnpos);
5276 gcc_assert (TREE_CODE (fnarg) == ADDR_EXPR);
5277 tree fndecl = TREE_OPERAND (fnarg, 0);
5278 if (fndecl_maybe_in_other_partition (fndecl))
5279 /* Fallthru to general call handling. */
5280 break;
5282 varinfo_t cfi = get_vi_for_tree (fndecl);
5284 tree arg = gimple_call_arg (t, argpos);
5286 /* Parameter passed by value is used. */
5287 lhs = get_function_part_constraint (fi, fi_uses);
5288 struct constraint_expr *rhsp;
5289 get_constraint_for (arg, &rhsc);
5290 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5291 process_constraint (new_constraint (lhs, *rhsp));
5292 rhsc.truncate (0);
5294 /* Handle parameters used by the call, but not used in cfi, as
5295 implicitly used by cfi. */
5296 lhs = get_function_part_constraint (cfi, fi_uses);
5297 for (unsigned i = 0; i < num_implicit_use_args; ++i)
5299 tree arg = gimple_call_arg (t, implicit_use_args[i]);
5300 get_constraint_for (arg, &rhsc);
5301 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5302 process_constraint (new_constraint (lhs, *rhsp));
5303 rhsc.truncate (0);
5306 /* The caller clobbers what the callee does. */
5307 lhs = get_function_part_constraint (fi, fi_clobbers);
5308 rhs = get_function_part_constraint (cfi, fi_clobbers);
5309 process_constraint (new_constraint (lhs, rhs));
5311 /* The caller uses what the callee does. */
5312 lhs = get_function_part_constraint (fi, fi_uses);
5313 rhs = get_function_part_constraint (cfi, fi_uses);
5314 process_constraint (new_constraint (lhs, rhs));
5316 return;
5318 /* printf-style functions may have hooks to set pointers to
5319 point to somewhere into the generated string. Leave them
5320 for a later exercise... */
5321 default:
5322 /* Fallthru to general call handling. */;
5325 /* Parameters passed by value are used. */
5326 lhs = get_function_part_constraint (fi, fi_uses);
5327 for (i = 0; i < gimple_call_num_args (t); i++)
5329 struct constraint_expr *rhsp;
5330 tree arg = gimple_call_arg (t, i);
5332 if (TREE_CODE (arg) == SSA_NAME
5333 || is_gimple_min_invariant (arg))
5334 continue;
5336 get_constraint_for_address_of (arg, &rhsc);
5337 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5338 process_constraint (new_constraint (lhs, *rhsp));
5339 rhsc.truncate (0);
5342 /* Build constraints for propagating clobbers/uses along the
5343 callgraph edges. */
5344 cfi = get_fi_for_callee (call_stmt);
5345 if (cfi->id == anything_id)
5347 if (gimple_vdef (t))
5348 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5349 anything_id);
5350 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5351 anything_id);
5352 return;
5355 /* For callees without function info (that's external functions),
5356 ESCAPED is clobbered and used. */
5357 if (cfi->decl
5358 && TREE_CODE (cfi->decl) == FUNCTION_DECL
5359 && !cfi->is_fn_info)
5361 varinfo_t vi;
5363 if (gimple_vdef (t))
5364 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5365 escaped_id);
5366 make_copy_constraint (first_vi_for_offset (fi, fi_uses), escaped_id);
5368 /* Also honor the call statement use/clobber info. */
5369 if ((vi = lookup_call_clobber_vi (call_stmt)) != NULL)
5370 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5371 vi->id);
5372 if ((vi = lookup_call_use_vi (call_stmt)) != NULL)
5373 make_copy_constraint (first_vi_for_offset (fi, fi_uses),
5374 vi->id);
5375 return;
5378 /* Otherwise the caller clobbers and uses what the callee does.
5379 ??? This should use a new complex constraint that filters
5380 local variables of the callee. */
5381 if (gimple_vdef (t))
5383 lhs = get_function_part_constraint (fi, fi_clobbers);
5384 rhs = get_function_part_constraint (cfi, fi_clobbers);
5385 process_constraint (new_constraint (lhs, rhs));
5387 lhs = get_function_part_constraint (fi, fi_uses);
5388 rhs = get_function_part_constraint (cfi, fi_uses);
5389 process_constraint (new_constraint (lhs, rhs));
5391 else if (gimple_code (t) == GIMPLE_ASM)
5393 /* ??? Ick. We can do better. */
5394 if (gimple_vdef (t))
5395 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5396 anything_id);
5397 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5398 anything_id);
5403 /* Find the first varinfo in the same variable as START that overlaps with
5404 OFFSET. Return NULL if we can't find one. */
5406 static varinfo_t
5407 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
5409 /* If the offset is outside of the variable, bail out. */
5410 if (offset >= start->fullsize)
5411 return NULL;
5413 /* If we cannot reach offset from start, lookup the first field
5414 and start from there. */
5415 if (start->offset > offset)
5416 start = get_varinfo (start->head);
5418 while (start)
5420 /* We may not find a variable in the field list with the actual
5421 offset when we have glommed a structure to a variable.
5422 In that case, however, offset should still be within the size
5423 of the variable. */
5424 if (offset >= start->offset
5425 && (offset - start->offset) < start->size)
5426 return start;
5428 start = vi_next (start);
5431 return NULL;
5434 /* Find the first varinfo in the same variable as START that overlaps with
5435 OFFSET. If there is no such varinfo the varinfo directly preceding
5436 OFFSET is returned. */
5438 static varinfo_t
5439 first_or_preceding_vi_for_offset (varinfo_t start,
5440 unsigned HOST_WIDE_INT offset)
5442 /* If we cannot reach offset from start, lookup the first field
5443 and start from there. */
5444 if (start->offset > offset)
5445 start = get_varinfo (start->head);
5447 /* We may not find a variable in the field list with the actual
5448 offset when we have glommed a structure to a variable.
5449 In that case, however, offset should still be within the size
5450 of the variable.
5451 If we got beyond the offset we look for return the field
5452 directly preceding offset which may be the last field. */
5453 while (start->next
5454 && offset >= start->offset
5455 && !((offset - start->offset) < start->size))
5456 start = vi_next (start);
5458 return start;
5462 /* This structure is used during pushing fields onto the fieldstack
5463 to track the offset of the field, since bitpos_of_field gives it
5464 relative to its immediate containing type, and we want it relative
5465 to the ultimate containing object. */
5467 struct fieldoff
5469 /* Offset from the base of the base containing object to this field. */
5470 HOST_WIDE_INT offset;
5472 /* Size, in bits, of the field. */
5473 unsigned HOST_WIDE_INT size;
5475 unsigned has_unknown_size : 1;
5477 unsigned must_have_pointers : 1;
5479 unsigned may_have_pointers : 1;
5481 unsigned only_restrict_pointers : 1;
5483 tree restrict_pointed_type;
5485 typedef struct fieldoff fieldoff_s;
5488 /* qsort comparison function for two fieldoff's PA and PB */
5490 static int
5491 fieldoff_compare (const void *pa, const void *pb)
5493 const fieldoff_s *foa = (const fieldoff_s *)pa;
5494 const fieldoff_s *fob = (const fieldoff_s *)pb;
5495 unsigned HOST_WIDE_INT foasize, fobsize;
5497 if (foa->offset < fob->offset)
5498 return -1;
5499 else if (foa->offset > fob->offset)
5500 return 1;
5502 foasize = foa->size;
5503 fobsize = fob->size;
5504 if (foasize < fobsize)
5505 return -1;
5506 else if (foasize > fobsize)
5507 return 1;
5508 return 0;
5511 /* Sort a fieldstack according to the field offset and sizes. */
5512 static void
5513 sort_fieldstack (vec<fieldoff_s> fieldstack)
5515 fieldstack.qsort (fieldoff_compare);
5518 /* Return true if T is a type that can have subvars. */
5520 static inline bool
5521 type_can_have_subvars (const_tree t)
5523 /* Aggregates without overlapping fields can have subvars. */
5524 return TREE_CODE (t) == RECORD_TYPE;
5527 /* Return true if V is a tree that we can have subvars for.
5528 Normally, this is any aggregate type. Also complex
5529 types which are not gimple registers can have subvars. */
5531 static inline bool
5532 var_can_have_subvars (const_tree v)
5534 /* Volatile variables should never have subvars. */
5535 if (TREE_THIS_VOLATILE (v))
5536 return false;
5538 /* Non decls or memory tags can never have subvars. */
5539 if (!DECL_P (v))
5540 return false;
5542 return type_can_have_subvars (TREE_TYPE (v));
5545 /* Return true if T is a type that does contain pointers. */
5547 static bool
5548 type_must_have_pointers (tree type)
5550 if (POINTER_TYPE_P (type))
5551 return true;
5553 if (TREE_CODE (type) == ARRAY_TYPE)
5554 return type_must_have_pointers (TREE_TYPE (type));
5556 /* A function or method can have pointers as arguments, so track
5557 those separately. */
5558 if (TREE_CODE (type) == FUNCTION_TYPE
5559 || TREE_CODE (type) == METHOD_TYPE)
5560 return true;
5562 return false;
5565 static bool
5566 field_must_have_pointers (tree t)
5568 return type_must_have_pointers (TREE_TYPE (t));
5571 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
5572 the fields of TYPE onto fieldstack, recording their offsets along
5573 the way.
5575 OFFSET is used to keep track of the offset in this entire
5576 structure, rather than just the immediately containing structure.
5577 Returns false if the caller is supposed to handle the field we
5578 recursed for. */
5580 static bool
5581 push_fields_onto_fieldstack (tree type, vec<fieldoff_s> *fieldstack,
5582 HOST_WIDE_INT offset)
5584 tree field;
5585 bool empty_p = true;
5587 if (TREE_CODE (type) != RECORD_TYPE)
5588 return false;
5590 /* If the vector of fields is growing too big, bail out early.
5591 Callers check for vec::length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
5592 sure this fails. */
5593 if (fieldstack->length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
5594 return false;
5596 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5597 if (TREE_CODE (field) == FIELD_DECL)
5599 bool push = false;
5600 HOST_WIDE_INT foff = bitpos_of_field (field);
5601 tree field_type = TREE_TYPE (field);
5603 if (!var_can_have_subvars (field)
5604 || TREE_CODE (field_type) == QUAL_UNION_TYPE
5605 || TREE_CODE (field_type) == UNION_TYPE)
5606 push = true;
5607 else if (!push_fields_onto_fieldstack
5608 (field_type, fieldstack, offset + foff)
5609 && (DECL_SIZE (field)
5610 && !integer_zerop (DECL_SIZE (field))))
5611 /* Empty structures may have actual size, like in C++. So
5612 see if we didn't push any subfields and the size is
5613 nonzero, push the field onto the stack. */
5614 push = true;
5616 if (push)
5618 fieldoff_s *pair = NULL;
5619 bool has_unknown_size = false;
5620 bool must_have_pointers_p;
5622 if (!fieldstack->is_empty ())
5623 pair = &fieldstack->last ();
5625 /* If there isn't anything at offset zero, create sth. */
5626 if (!pair
5627 && offset + foff != 0)
5629 fieldoff_s e
5630 = {0, offset + foff, false, false, true, false, NULL_TREE};
5631 pair = fieldstack->safe_push (e);
5634 if (!DECL_SIZE (field)
5635 || !tree_fits_uhwi_p (DECL_SIZE (field)))
5636 has_unknown_size = true;
5638 /* If adjacent fields do not contain pointers merge them. */
5639 must_have_pointers_p = field_must_have_pointers (field);
5640 if (pair
5641 && !has_unknown_size
5642 && !must_have_pointers_p
5643 && !pair->must_have_pointers
5644 && !pair->has_unknown_size
5645 && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff)
5647 pair->size += tree_to_uhwi (DECL_SIZE (field));
5649 else
5651 fieldoff_s e;
5652 e.offset = offset + foff;
5653 e.has_unknown_size = has_unknown_size;
5654 if (!has_unknown_size)
5655 e.size = tree_to_uhwi (DECL_SIZE (field));
5656 else
5657 e.size = -1;
5658 e.must_have_pointers = must_have_pointers_p;
5659 e.may_have_pointers = true;
5660 e.only_restrict_pointers
5661 = (!has_unknown_size
5662 && POINTER_TYPE_P (field_type)
5663 && TYPE_RESTRICT (field_type));
5664 if (e.only_restrict_pointers)
5665 e.restrict_pointed_type = TREE_TYPE (field_type);
5666 fieldstack->safe_push (e);
5670 empty_p = false;
5673 return !empty_p;
5676 /* Count the number of arguments DECL has, and set IS_VARARGS to true
5677 if it is a varargs function. */
5679 static unsigned int
5680 count_num_arguments (tree decl, bool *is_varargs)
5682 unsigned int num = 0;
5683 tree t;
5685 /* Capture named arguments for K&R functions. They do not
5686 have a prototype and thus no TYPE_ARG_TYPES. */
5687 for (t = DECL_ARGUMENTS (decl); t; t = DECL_CHAIN (t))
5688 ++num;
5690 /* Check if the function has variadic arguments. */
5691 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl)); t; t = TREE_CHAIN (t))
5692 if (TREE_VALUE (t) == void_type_node)
5693 break;
5694 if (!t)
5695 *is_varargs = true;
5697 return num;
5700 /* Creation function node for DECL, using NAME, and return the index
5701 of the variable we've created for the function. If NONLOCAL_p, create
5702 initial constraints. */
5704 static varinfo_t
5705 create_function_info_for (tree decl, const char *name, bool add_id,
5706 bool nonlocal_p)
5708 struct function *fn = DECL_STRUCT_FUNCTION (decl);
5709 varinfo_t vi, prev_vi;
5710 tree arg;
5711 unsigned int i;
5712 bool is_varargs = false;
5713 unsigned int num_args = count_num_arguments (decl, &is_varargs);
5715 /* Create the variable info. */
5717 vi = new_var_info (decl, name, add_id);
5718 vi->offset = 0;
5719 vi->size = 1;
5720 vi->fullsize = fi_parm_base + num_args;
5721 vi->is_fn_info = 1;
5722 vi->may_have_pointers = false;
5723 if (is_varargs)
5724 vi->fullsize = ~0;
5725 insert_vi_for_tree (vi->decl, vi);
5727 prev_vi = vi;
5729 /* Create a variable for things the function clobbers and one for
5730 things the function uses. */
5732 varinfo_t clobbervi, usevi;
5733 const char *newname;
5734 char *tempname;
5736 tempname = xasprintf ("%s.clobber", name);
5737 newname = ggc_strdup (tempname);
5738 free (tempname);
5740 clobbervi = new_var_info (NULL, newname, false);
5741 clobbervi->offset = fi_clobbers;
5742 clobbervi->size = 1;
5743 clobbervi->fullsize = vi->fullsize;
5744 clobbervi->is_full_var = true;
5745 clobbervi->is_global_var = false;
5746 clobbervi->is_reg_var = true;
5748 gcc_assert (prev_vi->offset < clobbervi->offset);
5749 prev_vi->next = clobbervi->id;
5750 prev_vi = clobbervi;
5752 tempname = xasprintf ("%s.use", name);
5753 newname = ggc_strdup (tempname);
5754 free (tempname);
5756 usevi = new_var_info (NULL, newname, false);
5757 usevi->offset = fi_uses;
5758 usevi->size = 1;
5759 usevi->fullsize = vi->fullsize;
5760 usevi->is_full_var = true;
5761 usevi->is_global_var = false;
5762 usevi->is_reg_var = true;
5764 gcc_assert (prev_vi->offset < usevi->offset);
5765 prev_vi->next = usevi->id;
5766 prev_vi = usevi;
5769 /* And one for the static chain. */
5770 if (fn->static_chain_decl != NULL_TREE)
5772 varinfo_t chainvi;
5773 const char *newname;
5774 char *tempname;
5776 tempname = xasprintf ("%s.chain", name);
5777 newname = ggc_strdup (tempname);
5778 free (tempname);
5780 chainvi = new_var_info (fn->static_chain_decl, newname, false);
5781 chainvi->offset = fi_static_chain;
5782 chainvi->size = 1;
5783 chainvi->fullsize = vi->fullsize;
5784 chainvi->is_full_var = true;
5785 chainvi->is_global_var = false;
5787 insert_vi_for_tree (fn->static_chain_decl, chainvi);
5789 if (nonlocal_p
5790 && chainvi->may_have_pointers)
5791 make_constraint_from (chainvi, nonlocal_id);
5793 gcc_assert (prev_vi->offset < chainvi->offset);
5794 prev_vi->next = chainvi->id;
5795 prev_vi = chainvi;
5798 /* Create a variable for the return var. */
5799 if (DECL_RESULT (decl) != NULL
5800 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
5802 varinfo_t resultvi;
5803 const char *newname;
5804 char *tempname;
5805 tree resultdecl = decl;
5807 if (DECL_RESULT (decl))
5808 resultdecl = DECL_RESULT (decl);
5810 tempname = xasprintf ("%s.result", name);
5811 newname = ggc_strdup (tempname);
5812 free (tempname);
5814 resultvi = new_var_info (resultdecl, newname, false);
5815 resultvi->offset = fi_result;
5816 resultvi->size = 1;
5817 resultvi->fullsize = vi->fullsize;
5818 resultvi->is_full_var = true;
5819 if (DECL_RESULT (decl))
5820 resultvi->may_have_pointers = true;
5822 if (DECL_RESULT (decl))
5823 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
5825 if (nonlocal_p
5826 && DECL_RESULT (decl)
5827 && DECL_BY_REFERENCE (DECL_RESULT (decl)))
5828 make_constraint_from (resultvi, nonlocal_id);
5830 gcc_assert (prev_vi->offset < resultvi->offset);
5831 prev_vi->next = resultvi->id;
5832 prev_vi = resultvi;
5835 /* We also need to make function return values escape. Nothing
5836 escapes by returning from main though. */
5837 if (nonlocal_p
5838 && !MAIN_NAME_P (DECL_NAME (decl)))
5840 varinfo_t fi, rvi;
5841 fi = lookup_vi_for_tree (decl);
5842 rvi = first_vi_for_offset (fi, fi_result);
5843 if (rvi && rvi->offset == fi_result)
5844 make_copy_constraint (get_varinfo (escaped_id), rvi->id);
5847 /* Set up variables for each argument. */
5848 arg = DECL_ARGUMENTS (decl);
5849 for (i = 0; i < num_args; i++)
5851 varinfo_t argvi;
5852 const char *newname;
5853 char *tempname;
5854 tree argdecl = decl;
5856 if (arg)
5857 argdecl = arg;
5859 tempname = xasprintf ("%s.arg%d", name, i);
5860 newname = ggc_strdup (tempname);
5861 free (tempname);
5863 argvi = new_var_info (argdecl, newname, false);
5864 argvi->offset = fi_parm_base + i;
5865 argvi->size = 1;
5866 argvi->is_full_var = true;
5867 argvi->fullsize = vi->fullsize;
5868 if (arg)
5869 argvi->may_have_pointers = true;
5871 if (arg)
5872 insert_vi_for_tree (arg, argvi);
5874 if (nonlocal_p
5875 && argvi->may_have_pointers)
5876 make_constraint_from (argvi, nonlocal_id);
5878 gcc_assert (prev_vi->offset < argvi->offset);
5879 prev_vi->next = argvi->id;
5880 prev_vi = argvi;
5881 if (arg)
5882 arg = DECL_CHAIN (arg);
5885 /* Add one representative for all further args. */
5886 if (is_varargs)
5888 varinfo_t argvi;
5889 const char *newname;
5890 char *tempname;
5891 tree decl;
5893 tempname = xasprintf ("%s.varargs", name);
5894 newname = ggc_strdup (tempname);
5895 free (tempname);
5897 /* We need sth that can be pointed to for va_start. */
5898 decl = build_fake_var_decl (ptr_type_node);
5900 argvi = new_var_info (decl, newname, false);
5901 argvi->offset = fi_parm_base + num_args;
5902 argvi->size = ~0;
5903 argvi->is_full_var = true;
5904 argvi->is_heap_var = true;
5905 argvi->fullsize = vi->fullsize;
5907 if (nonlocal_p
5908 && argvi->may_have_pointers)
5909 make_constraint_from (argvi, nonlocal_id);
5911 gcc_assert (prev_vi->offset < argvi->offset);
5912 prev_vi->next = argvi->id;
5913 prev_vi = argvi;
5916 return vi;
5920 /* Return true if FIELDSTACK contains fields that overlap.
5921 FIELDSTACK is assumed to be sorted by offset. */
5923 static bool
5924 check_for_overlaps (vec<fieldoff_s> fieldstack)
5926 fieldoff_s *fo = NULL;
5927 unsigned int i;
5928 HOST_WIDE_INT lastoffset = -1;
5930 FOR_EACH_VEC_ELT (fieldstack, i, fo)
5932 if (fo->offset == lastoffset)
5933 return true;
5934 lastoffset = fo->offset;
5936 return false;
5939 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
5940 This will also create any varinfo structures necessary for fields
5941 of DECL. DECL is a function parameter if HANDLE_PARAM is set.
5942 HANDLED_STRUCT_TYPE is used to register struct types reached by following
5943 restrict pointers. This is needed to prevent infinite recursion.
5944 If ADD_RESTRICT, pretend that the pointer NAME is restrict even if DECL
5945 does not advertise it. */
5947 static varinfo_t
5948 create_variable_info_for_1 (tree decl, const char *name, bool add_id,
5949 bool handle_param, bitmap handled_struct_type,
5950 bool add_restrict = false)
5952 varinfo_t vi, newvi;
5953 tree decl_type = TREE_TYPE (decl);
5954 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type);
5955 auto_vec<fieldoff_s> fieldstack;
5956 fieldoff_s *fo;
5957 unsigned int i;
5959 if (!declsize
5960 || !tree_fits_uhwi_p (declsize))
5962 vi = new_var_info (decl, name, add_id);
5963 vi->offset = 0;
5964 vi->size = ~0;
5965 vi->fullsize = ~0;
5966 vi->is_unknown_size_var = true;
5967 vi->is_full_var = true;
5968 vi->may_have_pointers = true;
5969 return vi;
5972 /* Collect field information. */
5973 if (use_field_sensitive
5974 && var_can_have_subvars (decl)
5975 /* ??? Force us to not use subfields for globals in IPA mode.
5976 Else we'd have to parse arbitrary initializers. */
5977 && !(in_ipa_mode
5978 && is_global_var (decl)))
5980 fieldoff_s *fo = NULL;
5981 bool notokay = false;
5982 unsigned int i;
5984 push_fields_onto_fieldstack (decl_type, &fieldstack, 0);
5986 for (i = 0; !notokay && fieldstack.iterate (i, &fo); i++)
5987 if (fo->has_unknown_size
5988 || fo->offset < 0)
5990 notokay = true;
5991 break;
5994 /* We can't sort them if we have a field with a variable sized type,
5995 which will make notokay = true. In that case, we are going to return
5996 without creating varinfos for the fields anyway, so sorting them is a
5997 waste to boot. */
5998 if (!notokay)
6000 sort_fieldstack (fieldstack);
6001 /* Due to some C++ FE issues, like PR 22488, we might end up
6002 what appear to be overlapping fields even though they,
6003 in reality, do not overlap. Until the C++ FE is fixed,
6004 we will simply disable field-sensitivity for these cases. */
6005 notokay = check_for_overlaps (fieldstack);
6008 if (notokay)
6009 fieldstack.release ();
6012 /* If we didn't end up collecting sub-variables create a full
6013 variable for the decl. */
6014 if (fieldstack.length () == 0
6015 || fieldstack.length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
6017 vi = new_var_info (decl, name, add_id);
6018 vi->offset = 0;
6019 vi->may_have_pointers = true;
6020 vi->fullsize = tree_to_uhwi (declsize);
6021 vi->size = vi->fullsize;
6022 vi->is_full_var = true;
6023 if (POINTER_TYPE_P (decl_type)
6024 && (TYPE_RESTRICT (decl_type) || add_restrict))
6025 vi->only_restrict_pointers = 1;
6026 if (vi->only_restrict_pointers
6027 && !type_contains_placeholder_p (TREE_TYPE (decl_type))
6028 && handle_param
6029 && !bitmap_bit_p (handled_struct_type,
6030 TYPE_UID (TREE_TYPE (decl_type))))
6032 varinfo_t rvi;
6033 tree heapvar = build_fake_var_decl (TREE_TYPE (decl_type));
6034 DECL_EXTERNAL (heapvar) = 1;
6035 if (var_can_have_subvars (heapvar))
6036 bitmap_set_bit (handled_struct_type,
6037 TYPE_UID (TREE_TYPE (decl_type)));
6038 rvi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS", true,
6039 true, handled_struct_type);
6040 if (var_can_have_subvars (heapvar))
6041 bitmap_clear_bit (handled_struct_type,
6042 TYPE_UID (TREE_TYPE (decl_type)));
6043 rvi->is_restrict_var = 1;
6044 insert_vi_for_tree (heapvar, rvi);
6045 make_constraint_from (vi, rvi->id);
6046 make_param_constraints (rvi);
6048 fieldstack.release ();
6049 return vi;
6052 vi = new_var_info (decl, name, add_id);
6053 vi->fullsize = tree_to_uhwi (declsize);
6054 if (fieldstack.length () == 1)
6055 vi->is_full_var = true;
6056 for (i = 0, newvi = vi;
6057 fieldstack.iterate (i, &fo);
6058 ++i, newvi = vi_next (newvi))
6060 const char *newname = NULL;
6061 char *tempname;
6063 if (dump_file)
6065 if (fieldstack.length () != 1)
6067 tempname
6068 = xasprintf ("%s." HOST_WIDE_INT_PRINT_DEC
6069 "+" HOST_WIDE_INT_PRINT_DEC, name,
6070 fo->offset, fo->size);
6071 newname = ggc_strdup (tempname);
6072 free (tempname);
6075 else
6076 newname = "NULL";
6078 if (newname)
6079 newvi->name = newname;
6080 newvi->offset = fo->offset;
6081 newvi->size = fo->size;
6082 newvi->fullsize = vi->fullsize;
6083 newvi->may_have_pointers = fo->may_have_pointers;
6084 newvi->only_restrict_pointers = fo->only_restrict_pointers;
6085 if (handle_param
6086 && newvi->only_restrict_pointers
6087 && !type_contains_placeholder_p (fo->restrict_pointed_type)
6088 && !bitmap_bit_p (handled_struct_type,
6089 TYPE_UID (fo->restrict_pointed_type)))
6091 varinfo_t rvi;
6092 tree heapvar = build_fake_var_decl (fo->restrict_pointed_type);
6093 DECL_EXTERNAL (heapvar) = 1;
6094 if (var_can_have_subvars (heapvar))
6095 bitmap_set_bit (handled_struct_type,
6096 TYPE_UID (fo->restrict_pointed_type));
6097 rvi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS", true,
6098 true, handled_struct_type);
6099 if (var_can_have_subvars (heapvar))
6100 bitmap_clear_bit (handled_struct_type,
6101 TYPE_UID (fo->restrict_pointed_type));
6102 rvi->is_restrict_var = 1;
6103 insert_vi_for_tree (heapvar, rvi);
6104 make_constraint_from (newvi, rvi->id);
6105 make_param_constraints (rvi);
6107 if (i + 1 < fieldstack.length ())
6109 varinfo_t tem = new_var_info (decl, name, false);
6110 newvi->next = tem->id;
6111 tem->head = vi->id;
6115 return vi;
6118 static unsigned int
6119 create_variable_info_for (tree decl, const char *name, bool add_id)
6121 /* First see if we are dealing with an ifunc resolver call and
6122 assiociate that with a call to the resolver function result. */
6123 cgraph_node *node;
6124 if (in_ipa_mode
6125 && TREE_CODE (decl) == FUNCTION_DECL
6126 && (node = cgraph_node::get (decl))
6127 && node->ifunc_resolver)
6129 varinfo_t fi = get_vi_for_tree (node->get_alias_target ()->decl);
6130 constraint_expr rhs
6131 = get_function_part_constraint (fi, fi_result);
6132 fi = new_var_info (NULL_TREE, "ifuncres", true);
6133 fi->is_reg_var = true;
6134 constraint_expr lhs;
6135 lhs.type = SCALAR;
6136 lhs.var = fi->id;
6137 lhs.offset = 0;
6138 process_constraint (new_constraint (lhs, rhs));
6139 insert_vi_for_tree (decl, fi);
6140 return fi->id;
6143 varinfo_t vi = create_variable_info_for_1 (decl, name, add_id, false, NULL);
6144 unsigned int id = vi->id;
6146 insert_vi_for_tree (decl, vi);
6148 if (!VAR_P (decl))
6149 return id;
6151 /* Create initial constraints for globals. */
6152 for (; vi; vi = vi_next (vi))
6154 if (!vi->may_have_pointers
6155 || !vi->is_global_var)
6156 continue;
6158 /* Mark global restrict qualified pointers. */
6159 if ((POINTER_TYPE_P (TREE_TYPE (decl))
6160 && TYPE_RESTRICT (TREE_TYPE (decl)))
6161 || vi->only_restrict_pointers)
6163 varinfo_t rvi
6164 = make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT",
6165 true);
6166 /* ??? For now exclude reads from globals as restrict sources
6167 if those are not (indirectly) from incoming parameters. */
6168 rvi->is_restrict_var = false;
6169 continue;
6172 /* In non-IPA mode the initializer from nonlocal is all we need. */
6173 if (!in_ipa_mode
6174 || DECL_HARD_REGISTER (decl))
6175 make_copy_constraint (vi, nonlocal_id);
6177 /* In IPA mode parse the initializer and generate proper constraints
6178 for it. */
6179 else
6181 varpool_node *vnode = varpool_node::get (decl);
6183 /* For escaped variables initialize them from nonlocal. */
6184 if (!vnode->all_refs_explicit_p ())
6185 make_copy_constraint (vi, nonlocal_id);
6187 /* If this is a global variable with an initializer and we are in
6188 IPA mode generate constraints for it. */
6189 ipa_ref *ref;
6190 for (unsigned idx = 0; vnode->iterate_reference (idx, ref); ++idx)
6192 auto_vec<ce_s> rhsc;
6193 struct constraint_expr lhs, *rhsp;
6194 unsigned i;
6195 get_constraint_for_address_of (ref->referred->decl, &rhsc);
6196 lhs.var = vi->id;
6197 lhs.offset = 0;
6198 lhs.type = SCALAR;
6199 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
6200 process_constraint (new_constraint (lhs, *rhsp));
6201 /* If this is a variable that escapes from the unit
6202 the initializer escapes as well. */
6203 if (!vnode->all_refs_explicit_p ())
6205 lhs.var = escaped_id;
6206 lhs.offset = 0;
6207 lhs.type = SCALAR;
6208 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
6209 process_constraint (new_constraint (lhs, *rhsp));
6215 return id;
6218 /* Print out the points-to solution for VAR to FILE. */
6220 static void
6221 dump_solution_for_var (FILE *file, unsigned int var)
6223 varinfo_t vi = get_varinfo (var);
6224 unsigned int i;
6225 bitmap_iterator bi;
6227 /* Dump the solution for unified vars anyway, this avoids difficulties
6228 in scanning dumps in the testsuite. */
6229 fprintf (file, "%s = { ", vi->name);
6230 vi = get_varinfo (find (var));
6231 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
6232 fprintf (file, "%s ", get_varinfo (i)->name);
6233 fprintf (file, "}");
6235 /* But note when the variable was unified. */
6236 if (vi->id != var)
6237 fprintf (file, " same as %s", vi->name);
6239 fprintf (file, "\n");
6242 /* Print the points-to solution for VAR to stderr. */
6244 DEBUG_FUNCTION void
6245 debug_solution_for_var (unsigned int var)
6247 dump_solution_for_var (stderr, var);
6250 /* Register the constraints for function parameter related VI. */
6252 static void
6253 make_param_constraints (varinfo_t vi)
6255 for (; vi; vi = vi_next (vi))
6257 if (vi->only_restrict_pointers)
6259 else if (vi->may_have_pointers)
6260 make_constraint_from (vi, nonlocal_id);
6262 if (vi->is_full_var)
6263 break;
6267 /* Create varinfo structures for all of the variables in the
6268 function for intraprocedural mode. */
6270 static void
6271 intra_create_variable_infos (struct function *fn)
6273 tree t;
6274 bitmap handled_struct_type = NULL;
6275 bool this_parm_in_ctor = DECL_CXX_CONSTRUCTOR_P (fn->decl);
6277 /* For each incoming pointer argument arg, create the constraint ARG
6278 = NONLOCAL or a dummy variable if it is a restrict qualified
6279 passed-by-reference argument. */
6280 for (t = DECL_ARGUMENTS (fn->decl); t; t = DECL_CHAIN (t))
6282 if (handled_struct_type == NULL)
6283 handled_struct_type = BITMAP_ALLOC (NULL);
6285 varinfo_t p
6286 = create_variable_info_for_1 (t, alias_get_name (t), false, true,
6287 handled_struct_type, this_parm_in_ctor);
6288 insert_vi_for_tree (t, p);
6290 make_param_constraints (p);
6292 this_parm_in_ctor = false;
6295 if (handled_struct_type != NULL)
6296 BITMAP_FREE (handled_struct_type);
6298 /* Add a constraint for a result decl that is passed by reference. */
6299 if (DECL_RESULT (fn->decl)
6300 && DECL_BY_REFERENCE (DECL_RESULT (fn->decl)))
6302 varinfo_t p, result_vi = get_vi_for_tree (DECL_RESULT (fn->decl));
6304 for (p = result_vi; p; p = vi_next (p))
6305 make_constraint_from (p, nonlocal_id);
6308 /* Add a constraint for the incoming static chain parameter. */
6309 if (fn->static_chain_decl != NULL_TREE)
6311 varinfo_t p, chain_vi = get_vi_for_tree (fn->static_chain_decl);
6313 for (p = chain_vi; p; p = vi_next (p))
6314 make_constraint_from (p, nonlocal_id);
6318 /* Structure used to put solution bitmaps in a hashtable so they can
6319 be shared among variables with the same points-to set. */
6321 typedef struct shared_bitmap_info
6323 bitmap pt_vars;
6324 hashval_t hashcode;
6325 } *shared_bitmap_info_t;
6326 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
6328 /* Shared_bitmap hashtable helpers. */
6330 struct shared_bitmap_hasher : free_ptr_hash <shared_bitmap_info>
6332 static inline hashval_t hash (const shared_bitmap_info *);
6333 static inline bool equal (const shared_bitmap_info *,
6334 const shared_bitmap_info *);
6337 /* Hash function for a shared_bitmap_info_t */
6339 inline hashval_t
6340 shared_bitmap_hasher::hash (const shared_bitmap_info *bi)
6342 return bi->hashcode;
6345 /* Equality function for two shared_bitmap_info_t's. */
6347 inline bool
6348 shared_bitmap_hasher::equal (const shared_bitmap_info *sbi1,
6349 const shared_bitmap_info *sbi2)
6351 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
6354 /* Shared_bitmap hashtable. */
6356 static hash_table<shared_bitmap_hasher> *shared_bitmap_table;
6358 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
6359 existing instance if there is one, NULL otherwise. */
6361 static bitmap
6362 shared_bitmap_lookup (bitmap pt_vars)
6364 shared_bitmap_info **slot;
6365 struct shared_bitmap_info sbi;
6367 sbi.pt_vars = pt_vars;
6368 sbi.hashcode = bitmap_hash (pt_vars);
6370 slot = shared_bitmap_table->find_slot (&sbi, NO_INSERT);
6371 if (!slot)
6372 return NULL;
6373 else
6374 return (*slot)->pt_vars;
6378 /* Add a bitmap to the shared bitmap hashtable. */
6380 static void
6381 shared_bitmap_add (bitmap pt_vars)
6383 shared_bitmap_info **slot;
6384 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
6386 sbi->pt_vars = pt_vars;
6387 sbi->hashcode = bitmap_hash (pt_vars);
6389 slot = shared_bitmap_table->find_slot (sbi, INSERT);
6390 gcc_assert (!*slot);
6391 *slot = sbi;
6395 /* Set bits in INTO corresponding to the variable uids in solution set FROM. */
6397 static void
6398 set_uids_in_ptset (bitmap into, bitmap from, struct pt_solution *pt,
6399 tree fndecl)
6401 unsigned int i;
6402 bitmap_iterator bi;
6403 varinfo_t escaped_vi = get_varinfo (find (escaped_id));
6404 bool everything_escaped
6405 = escaped_vi->solution && bitmap_bit_p (escaped_vi->solution, anything_id);
6407 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
6409 varinfo_t vi = get_varinfo (i);
6411 /* The only artificial variables that are allowed in a may-alias
6412 set are heap variables. */
6413 if (vi->is_artificial_var && !vi->is_heap_var)
6414 continue;
6416 if (everything_escaped
6417 || (escaped_vi->solution
6418 && bitmap_bit_p (escaped_vi->solution, i)))
6420 pt->vars_contains_escaped = true;
6421 pt->vars_contains_escaped_heap |= vi->is_heap_var;
6424 if (vi->is_restrict_var)
6425 pt->vars_contains_restrict = true;
6427 if (VAR_P (vi->decl)
6428 || TREE_CODE (vi->decl) == PARM_DECL
6429 || TREE_CODE (vi->decl) == RESULT_DECL)
6431 /* If we are in IPA mode we will not recompute points-to
6432 sets after inlining so make sure they stay valid. */
6433 if (in_ipa_mode
6434 && !DECL_PT_UID_SET_P (vi->decl))
6435 SET_DECL_PT_UID (vi->decl, DECL_UID (vi->decl));
6437 /* Add the decl to the points-to set. Note that the points-to
6438 set contains global variables. */
6439 bitmap_set_bit (into, DECL_PT_UID (vi->decl));
6440 if (vi->is_global_var
6441 /* In IPA mode the escaped_heap trick doesn't work as
6442 ESCAPED is escaped from the unit but
6443 pt_solution_includes_global needs to answer true for
6444 all variables not automatic within a function.
6445 For the same reason is_global_var is not the
6446 correct flag to track - local variables from other
6447 functions also need to be considered global.
6448 Conveniently all HEAP vars are not put in function
6449 scope. */
6450 || (in_ipa_mode
6451 && fndecl
6452 && ! auto_var_in_fn_p (vi->decl, fndecl)))
6453 pt->vars_contains_nonlocal = true;
6455 /* If we have a variable that is interposable record that fact
6456 for pointer comparison simplification. */
6457 if (VAR_P (vi->decl)
6458 && (TREE_STATIC (vi->decl) || DECL_EXTERNAL (vi->decl))
6459 && ! decl_binds_to_current_def_p (vi->decl))
6460 pt->vars_contains_interposable = true;
6462 /* If this is a local variable we can have overlapping lifetime
6463 of different function invocations through recursion duplicate
6464 it with its shadow variable. */
6465 if (in_ipa_mode
6466 && vi->shadow_var_uid != 0)
6468 bitmap_set_bit (into, vi->shadow_var_uid);
6469 pt->vars_contains_nonlocal = true;
6473 else if (TREE_CODE (vi->decl) == FUNCTION_DECL
6474 || TREE_CODE (vi->decl) == LABEL_DECL)
6476 /* Nothing should read/write from/to code so we can
6477 save bits by not including them in the points-to bitmaps.
6478 Still mark the points-to set as containing global memory
6479 to make code-patching possible - see PR70128. */
6480 pt->vars_contains_nonlocal = true;
6486 /* Compute the points-to solution *PT for the variable VI. */
6488 static struct pt_solution
6489 find_what_var_points_to (tree fndecl, varinfo_t orig_vi)
6491 unsigned int i;
6492 bitmap_iterator bi;
6493 bitmap finished_solution;
6494 bitmap result;
6495 varinfo_t vi;
6496 struct pt_solution *pt;
6498 /* This variable may have been collapsed, let's get the real
6499 variable. */
6500 vi = get_varinfo (find (orig_vi->id));
6502 /* See if we have already computed the solution and return it. */
6503 pt_solution **slot = &final_solutions->get_or_insert (vi);
6504 if (*slot != NULL)
6505 return **slot;
6507 *slot = pt = XOBNEW (&final_solutions_obstack, struct pt_solution);
6508 memset (pt, 0, sizeof (struct pt_solution));
6510 /* Translate artificial variables into SSA_NAME_PTR_INFO
6511 attributes. */
6512 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
6514 varinfo_t vi = get_varinfo (i);
6516 if (vi->is_artificial_var)
6518 if (vi->id == nothing_id)
6519 pt->null = 1;
6520 else if (vi->id == escaped_id)
6522 if (in_ipa_mode)
6523 pt->ipa_escaped = 1;
6524 else
6525 pt->escaped = 1;
6526 /* Expand some special vars of ESCAPED in-place here. */
6527 varinfo_t evi = get_varinfo (find (escaped_id));
6528 if (bitmap_bit_p (evi->solution, nonlocal_id))
6529 pt->nonlocal = 1;
6531 else if (vi->id == nonlocal_id)
6532 pt->nonlocal = 1;
6533 else if (vi->is_heap_var)
6534 /* We represent heapvars in the points-to set properly. */
6536 else if (vi->id == string_id)
6537 /* Nobody cares - STRING_CSTs are read-only entities. */
6539 else if (vi->id == anything_id
6540 || vi->id == integer_id)
6541 pt->anything = 1;
6545 /* Instead of doing extra work, simply do not create
6546 elaborate points-to information for pt_anything pointers. */
6547 if (pt->anything)
6548 return *pt;
6550 /* Share the final set of variables when possible. */
6551 finished_solution = BITMAP_GGC_ALLOC ();
6552 stats.points_to_sets_created++;
6554 set_uids_in_ptset (finished_solution, vi->solution, pt, fndecl);
6555 result = shared_bitmap_lookup (finished_solution);
6556 if (!result)
6558 shared_bitmap_add (finished_solution);
6559 pt->vars = finished_solution;
6561 else
6563 pt->vars = result;
6564 bitmap_clear (finished_solution);
6567 return *pt;
6570 /* Given a pointer variable P, fill in its points-to set. */
6572 static void
6573 find_what_p_points_to (tree fndecl, tree p)
6575 struct ptr_info_def *pi;
6576 tree lookup_p = p;
6577 varinfo_t vi;
6578 bool nonnull = get_ptr_nonnull (p);
6580 /* For parameters, get at the points-to set for the actual parm
6581 decl. */
6582 if (TREE_CODE (p) == SSA_NAME
6583 && SSA_NAME_IS_DEFAULT_DEF (p)
6584 && (TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
6585 || TREE_CODE (SSA_NAME_VAR (p)) == RESULT_DECL))
6586 lookup_p = SSA_NAME_VAR (p);
6588 vi = lookup_vi_for_tree (lookup_p);
6589 if (!vi)
6590 return;
6592 pi = get_ptr_info (p);
6593 pi->pt = find_what_var_points_to (fndecl, vi);
6594 /* Conservatively set to NULL from PTA (to true). */
6595 pi->pt.null = 1;
6596 /* Preserve pointer nonnull computed by VRP. See get_ptr_nonnull
6597 in gcc/tree-ssaname.c for more information. */
6598 if (nonnull)
6599 set_ptr_nonnull (p);
6603 /* Query statistics for points-to solutions. */
6605 static struct {
6606 unsigned HOST_WIDE_INT pt_solution_includes_may_alias;
6607 unsigned HOST_WIDE_INT pt_solution_includes_no_alias;
6608 unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias;
6609 unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias;
6610 } pta_stats;
6612 void
6613 dump_pta_stats (FILE *s)
6615 fprintf (s, "\nPTA query stats:\n");
6616 fprintf (s, " pt_solution_includes: "
6617 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6618 HOST_WIDE_INT_PRINT_DEC" queries\n",
6619 pta_stats.pt_solution_includes_no_alias,
6620 pta_stats.pt_solution_includes_no_alias
6621 + pta_stats.pt_solution_includes_may_alias);
6622 fprintf (s, " pt_solutions_intersect: "
6623 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6624 HOST_WIDE_INT_PRINT_DEC" queries\n",
6625 pta_stats.pt_solutions_intersect_no_alias,
6626 pta_stats.pt_solutions_intersect_no_alias
6627 + pta_stats.pt_solutions_intersect_may_alias);
6631 /* Reset the points-to solution *PT to a conservative default
6632 (point to anything). */
6634 void
6635 pt_solution_reset (struct pt_solution *pt)
6637 memset (pt, 0, sizeof (struct pt_solution));
6638 pt->anything = true;
6639 pt->null = true;
6642 /* Set the points-to solution *PT to point only to the variables
6643 in VARS. VARS_CONTAINS_GLOBAL specifies whether that contains
6644 global variables and VARS_CONTAINS_RESTRICT specifies whether
6645 it contains restrict tag variables. */
6647 void
6648 pt_solution_set (struct pt_solution *pt, bitmap vars,
6649 bool vars_contains_nonlocal)
6651 memset (pt, 0, sizeof (struct pt_solution));
6652 pt->vars = vars;
6653 pt->vars_contains_nonlocal = vars_contains_nonlocal;
6654 pt->vars_contains_escaped
6655 = (cfun->gimple_df->escaped.anything
6656 || bitmap_intersect_p (cfun->gimple_df->escaped.vars, vars));
6659 /* Set the points-to solution *PT to point only to the variable VAR. */
6661 void
6662 pt_solution_set_var (struct pt_solution *pt, tree var)
6664 memset (pt, 0, sizeof (struct pt_solution));
6665 pt->vars = BITMAP_GGC_ALLOC ();
6666 bitmap_set_bit (pt->vars, DECL_PT_UID (var));
6667 pt->vars_contains_nonlocal = is_global_var (var);
6668 pt->vars_contains_escaped
6669 = (cfun->gimple_df->escaped.anything
6670 || bitmap_bit_p (cfun->gimple_df->escaped.vars, DECL_PT_UID (var)));
6673 /* Computes the union of the points-to solutions *DEST and *SRC and
6674 stores the result in *DEST. This changes the points-to bitmap
6675 of *DEST and thus may not be used if that might be shared.
6676 The points-to bitmap of *SRC and *DEST will not be shared after
6677 this function if they were not before. */
6679 static void
6680 pt_solution_ior_into (struct pt_solution *dest, struct pt_solution *src)
6682 dest->anything |= src->anything;
6683 if (dest->anything)
6685 pt_solution_reset (dest);
6686 return;
6689 dest->nonlocal |= src->nonlocal;
6690 dest->escaped |= src->escaped;
6691 dest->ipa_escaped |= src->ipa_escaped;
6692 dest->null |= src->null;
6693 dest->vars_contains_nonlocal |= src->vars_contains_nonlocal;
6694 dest->vars_contains_escaped |= src->vars_contains_escaped;
6695 dest->vars_contains_escaped_heap |= src->vars_contains_escaped_heap;
6696 if (!src->vars)
6697 return;
6699 if (!dest->vars)
6700 dest->vars = BITMAP_GGC_ALLOC ();
6701 bitmap_ior_into (dest->vars, src->vars);
6704 /* Return true if the points-to solution *PT is empty. */
6706 bool
6707 pt_solution_empty_p (struct pt_solution *pt)
6709 if (pt->anything
6710 || pt->nonlocal)
6711 return false;
6713 if (pt->vars
6714 && !bitmap_empty_p (pt->vars))
6715 return false;
6717 /* If the solution includes ESCAPED, check if that is empty. */
6718 if (pt->escaped
6719 && !pt_solution_empty_p (&cfun->gimple_df->escaped))
6720 return false;
6722 /* If the solution includes ESCAPED, check if that is empty. */
6723 if (pt->ipa_escaped
6724 && !pt_solution_empty_p (&ipa_escaped_pt))
6725 return false;
6727 return true;
6730 /* Return true if the points-to solution *PT only point to a single var, and
6731 return the var uid in *UID. */
6733 bool
6734 pt_solution_singleton_or_null_p (struct pt_solution *pt, unsigned *uid)
6736 if (pt->anything || pt->nonlocal || pt->escaped || pt->ipa_escaped
6737 || pt->vars == NULL
6738 || !bitmap_single_bit_set_p (pt->vars))
6739 return false;
6741 *uid = bitmap_first_set_bit (pt->vars);
6742 return true;
6745 /* Return true if the points-to solution *PT includes global memory. */
6747 bool
6748 pt_solution_includes_global (struct pt_solution *pt)
6750 if (pt->anything
6751 || pt->nonlocal
6752 || pt->vars_contains_nonlocal
6753 /* The following is a hack to make the malloc escape hack work.
6754 In reality we'd need different sets for escaped-through-return
6755 and escaped-to-callees and passes would need to be updated. */
6756 || pt->vars_contains_escaped_heap)
6757 return true;
6759 /* 'escaped' is also a placeholder so we have to look into it. */
6760 if (pt->escaped)
6761 return pt_solution_includes_global (&cfun->gimple_df->escaped);
6763 if (pt->ipa_escaped)
6764 return pt_solution_includes_global (&ipa_escaped_pt);
6766 return false;
6769 /* Return true if the points-to solution *PT includes the variable
6770 declaration DECL. */
6772 static bool
6773 pt_solution_includes_1 (struct pt_solution *pt, const_tree decl)
6775 if (pt->anything)
6776 return true;
6778 if (pt->nonlocal
6779 && is_global_var (decl))
6780 return true;
6782 if (pt->vars
6783 && bitmap_bit_p (pt->vars, DECL_PT_UID (decl)))
6784 return true;
6786 /* If the solution includes ESCAPED, check it. */
6787 if (pt->escaped
6788 && pt_solution_includes_1 (&cfun->gimple_df->escaped, decl))
6789 return true;
6791 /* If the solution includes ESCAPED, check it. */
6792 if (pt->ipa_escaped
6793 && pt_solution_includes_1 (&ipa_escaped_pt, decl))
6794 return true;
6796 return false;
6799 bool
6800 pt_solution_includes (struct pt_solution *pt, const_tree decl)
6802 bool res = pt_solution_includes_1 (pt, decl);
6803 if (res)
6804 ++pta_stats.pt_solution_includes_may_alias;
6805 else
6806 ++pta_stats.pt_solution_includes_no_alias;
6807 return res;
6810 /* Return true if both points-to solutions PT1 and PT2 have a non-empty
6811 intersection. */
6813 static bool
6814 pt_solutions_intersect_1 (struct pt_solution *pt1, struct pt_solution *pt2)
6816 if (pt1->anything || pt2->anything)
6817 return true;
6819 /* If either points to unknown global memory and the other points to
6820 any global memory they alias. */
6821 if ((pt1->nonlocal
6822 && (pt2->nonlocal
6823 || pt2->vars_contains_nonlocal))
6824 || (pt2->nonlocal
6825 && pt1->vars_contains_nonlocal))
6826 return true;
6828 /* If either points to all escaped memory and the other points to
6829 any escaped memory they alias. */
6830 if ((pt1->escaped
6831 && (pt2->escaped
6832 || pt2->vars_contains_escaped))
6833 || (pt2->escaped
6834 && pt1->vars_contains_escaped))
6835 return true;
6837 /* Check the escaped solution if required.
6838 ??? Do we need to check the local against the IPA escaped sets? */
6839 if ((pt1->ipa_escaped || pt2->ipa_escaped)
6840 && !pt_solution_empty_p (&ipa_escaped_pt))
6842 /* If both point to escaped memory and that solution
6843 is not empty they alias. */
6844 if (pt1->ipa_escaped && pt2->ipa_escaped)
6845 return true;
6847 /* If either points to escaped memory see if the escaped solution
6848 intersects with the other. */
6849 if ((pt1->ipa_escaped
6850 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt2))
6851 || (pt2->ipa_escaped
6852 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt1)))
6853 return true;
6856 /* Now both pointers alias if their points-to solution intersects. */
6857 return (pt1->vars
6858 && pt2->vars
6859 && bitmap_intersect_p (pt1->vars, pt2->vars));
6862 bool
6863 pt_solutions_intersect (struct pt_solution *pt1, struct pt_solution *pt2)
6865 bool res = pt_solutions_intersect_1 (pt1, pt2);
6866 if (res)
6867 ++pta_stats.pt_solutions_intersect_may_alias;
6868 else
6869 ++pta_stats.pt_solutions_intersect_no_alias;
6870 return res;
6874 /* Dump points-to information to OUTFILE. */
6876 static void
6877 dump_sa_points_to_info (FILE *outfile)
6879 unsigned int i;
6881 fprintf (outfile, "\nPoints-to sets\n\n");
6883 if (dump_flags & TDF_STATS)
6885 fprintf (outfile, "Stats:\n");
6886 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
6887 fprintf (outfile, "Non-pointer vars: %d\n",
6888 stats.nonpointer_vars);
6889 fprintf (outfile, "Statically unified vars: %d\n",
6890 stats.unified_vars_static);
6891 fprintf (outfile, "Dynamically unified vars: %d\n",
6892 stats.unified_vars_dynamic);
6893 fprintf (outfile, "Iterations: %d\n", stats.iterations);
6894 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
6895 fprintf (outfile, "Number of implicit edges: %d\n",
6896 stats.num_implicit_edges);
6899 for (i = 1; i < varmap.length (); i++)
6901 varinfo_t vi = get_varinfo (i);
6902 if (!vi->may_have_pointers)
6903 continue;
6904 dump_solution_for_var (outfile, i);
6909 /* Debug points-to information to stderr. */
6911 DEBUG_FUNCTION void
6912 debug_sa_points_to_info (void)
6914 dump_sa_points_to_info (stderr);
6918 /* Initialize the always-existing constraint variables for NULL
6919 ANYTHING, READONLY, and INTEGER */
6921 static void
6922 init_base_vars (void)
6924 struct constraint_expr lhs, rhs;
6925 varinfo_t var_anything;
6926 varinfo_t var_nothing;
6927 varinfo_t var_string;
6928 varinfo_t var_escaped;
6929 varinfo_t var_nonlocal;
6930 varinfo_t var_storedanything;
6931 varinfo_t var_integer;
6933 /* Variable ID zero is reserved and should be NULL. */
6934 varmap.safe_push (NULL);
6936 /* Create the NULL variable, used to represent that a variable points
6937 to NULL. */
6938 var_nothing = new_var_info (NULL_TREE, "NULL", false);
6939 gcc_assert (var_nothing->id == nothing_id);
6940 var_nothing->is_artificial_var = 1;
6941 var_nothing->offset = 0;
6942 var_nothing->size = ~0;
6943 var_nothing->fullsize = ~0;
6944 var_nothing->is_special_var = 1;
6945 var_nothing->may_have_pointers = 0;
6946 var_nothing->is_global_var = 0;
6948 /* Create the ANYTHING variable, used to represent that a variable
6949 points to some unknown piece of memory. */
6950 var_anything = new_var_info (NULL_TREE, "ANYTHING", false);
6951 gcc_assert (var_anything->id == anything_id);
6952 var_anything->is_artificial_var = 1;
6953 var_anything->size = ~0;
6954 var_anything->offset = 0;
6955 var_anything->fullsize = ~0;
6956 var_anything->is_special_var = 1;
6958 /* Anything points to anything. This makes deref constraints just
6959 work in the presence of linked list and other p = *p type loops,
6960 by saying that *ANYTHING = ANYTHING. */
6961 lhs.type = SCALAR;
6962 lhs.var = anything_id;
6963 lhs.offset = 0;
6964 rhs.type = ADDRESSOF;
6965 rhs.var = anything_id;
6966 rhs.offset = 0;
6968 /* This specifically does not use process_constraint because
6969 process_constraint ignores all anything = anything constraints, since all
6970 but this one are redundant. */
6971 constraints.safe_push (new_constraint (lhs, rhs));
6973 /* Create the STRING variable, used to represent that a variable
6974 points to a string literal. String literals don't contain
6975 pointers so STRING doesn't point to anything. */
6976 var_string = new_var_info (NULL_TREE, "STRING", false);
6977 gcc_assert (var_string->id == string_id);
6978 var_string->is_artificial_var = 1;
6979 var_string->offset = 0;
6980 var_string->size = ~0;
6981 var_string->fullsize = ~0;
6982 var_string->is_special_var = 1;
6983 var_string->may_have_pointers = 0;
6985 /* Create the ESCAPED variable, used to represent the set of escaped
6986 memory. */
6987 var_escaped = new_var_info (NULL_TREE, "ESCAPED", false);
6988 gcc_assert (var_escaped->id == escaped_id);
6989 var_escaped->is_artificial_var = 1;
6990 var_escaped->offset = 0;
6991 var_escaped->size = ~0;
6992 var_escaped->fullsize = ~0;
6993 var_escaped->is_special_var = 0;
6995 /* Create the NONLOCAL variable, used to represent the set of nonlocal
6996 memory. */
6997 var_nonlocal = new_var_info (NULL_TREE, "NONLOCAL", false);
6998 gcc_assert (var_nonlocal->id == nonlocal_id);
6999 var_nonlocal->is_artificial_var = 1;
7000 var_nonlocal->offset = 0;
7001 var_nonlocal->size = ~0;
7002 var_nonlocal->fullsize = ~0;
7003 var_nonlocal->is_special_var = 1;
7005 /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
7006 lhs.type = SCALAR;
7007 lhs.var = escaped_id;
7008 lhs.offset = 0;
7009 rhs.type = DEREF;
7010 rhs.var = escaped_id;
7011 rhs.offset = 0;
7012 process_constraint (new_constraint (lhs, rhs));
7014 /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the
7015 whole variable escapes. */
7016 lhs.type = SCALAR;
7017 lhs.var = escaped_id;
7018 lhs.offset = 0;
7019 rhs.type = SCALAR;
7020 rhs.var = escaped_id;
7021 rhs.offset = UNKNOWN_OFFSET;
7022 process_constraint (new_constraint (lhs, rhs));
7024 /* *ESCAPED = NONLOCAL. This is true because we have to assume
7025 everything pointed to by escaped points to what global memory can
7026 point to. */
7027 lhs.type = DEREF;
7028 lhs.var = escaped_id;
7029 lhs.offset = 0;
7030 rhs.type = SCALAR;
7031 rhs.var = nonlocal_id;
7032 rhs.offset = 0;
7033 process_constraint (new_constraint (lhs, rhs));
7035 /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED. This is true because
7036 global memory may point to global memory and escaped memory. */
7037 lhs.type = SCALAR;
7038 lhs.var = nonlocal_id;
7039 lhs.offset = 0;
7040 rhs.type = ADDRESSOF;
7041 rhs.var = nonlocal_id;
7042 rhs.offset = 0;
7043 process_constraint (new_constraint (lhs, rhs));
7044 rhs.type = ADDRESSOF;
7045 rhs.var = escaped_id;
7046 rhs.offset = 0;
7047 process_constraint (new_constraint (lhs, rhs));
7049 /* Create the STOREDANYTHING variable, used to represent the set of
7050 variables stored to *ANYTHING. */
7051 var_storedanything = new_var_info (NULL_TREE, "STOREDANYTHING", false);
7052 gcc_assert (var_storedanything->id == storedanything_id);
7053 var_storedanything->is_artificial_var = 1;
7054 var_storedanything->offset = 0;
7055 var_storedanything->size = ~0;
7056 var_storedanything->fullsize = ~0;
7057 var_storedanything->is_special_var = 0;
7059 /* Create the INTEGER variable, used to represent that a variable points
7060 to what an INTEGER "points to". */
7061 var_integer = new_var_info (NULL_TREE, "INTEGER", false);
7062 gcc_assert (var_integer->id == integer_id);
7063 var_integer->is_artificial_var = 1;
7064 var_integer->size = ~0;
7065 var_integer->fullsize = ~0;
7066 var_integer->offset = 0;
7067 var_integer->is_special_var = 1;
7069 /* INTEGER = ANYTHING, because we don't know where a dereference of
7070 a random integer will point to. */
7071 lhs.type = SCALAR;
7072 lhs.var = integer_id;
7073 lhs.offset = 0;
7074 rhs.type = ADDRESSOF;
7075 rhs.var = anything_id;
7076 rhs.offset = 0;
7077 process_constraint (new_constraint (lhs, rhs));
7080 /* Initialize things necessary to perform PTA */
7082 static void
7083 init_alias_vars (void)
7085 use_field_sensitive = (MAX_FIELDS_FOR_FIELD_SENSITIVE > 1);
7087 bitmap_obstack_initialize (&pta_obstack);
7088 bitmap_obstack_initialize (&oldpta_obstack);
7089 bitmap_obstack_initialize (&predbitmap_obstack);
7091 constraints.create (8);
7092 varmap.create (8);
7093 vi_for_tree = new hash_map<tree, varinfo_t>;
7094 call_stmt_vars = new hash_map<gimple *, varinfo_t>;
7096 memset (&stats, 0, sizeof (stats));
7097 shared_bitmap_table = new hash_table<shared_bitmap_hasher> (511);
7098 init_base_vars ();
7100 gcc_obstack_init (&fake_var_decl_obstack);
7102 final_solutions = new hash_map<varinfo_t, pt_solution *>;
7103 gcc_obstack_init (&final_solutions_obstack);
7106 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
7107 predecessor edges. */
7109 static void
7110 remove_preds_and_fake_succs (constraint_graph_t graph)
7112 unsigned int i;
7114 /* Clear the implicit ref and address nodes from the successor
7115 lists. */
7116 for (i = 1; i < FIRST_REF_NODE; i++)
7118 if (graph->succs[i])
7119 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
7120 FIRST_REF_NODE * 2);
7123 /* Free the successor list for the non-ref nodes. */
7124 for (i = FIRST_REF_NODE + 1; i < graph->size; i++)
7126 if (graph->succs[i])
7127 BITMAP_FREE (graph->succs[i]);
7130 /* Now reallocate the size of the successor list as, and blow away
7131 the predecessor bitmaps. */
7132 graph->size = varmap.length ();
7133 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
7135 free (graph->implicit_preds);
7136 graph->implicit_preds = NULL;
7137 free (graph->preds);
7138 graph->preds = NULL;
7139 bitmap_obstack_release (&predbitmap_obstack);
7142 /* Solve the constraint set. */
7144 static void
7145 solve_constraints (void)
7147 struct scc_info *si;
7149 /* Sort varinfos so that ones that cannot be pointed to are last.
7150 This makes bitmaps more efficient. */
7151 unsigned int *map = XNEWVEC (unsigned int, varmap.length ());
7152 for (unsigned i = 0; i < integer_id + 1; ++i)
7153 map[i] = i;
7154 /* Start with non-register vars (as possibly address-taken), followed
7155 by register vars as conservative set of vars never appearing in
7156 the points-to solution bitmaps. */
7157 unsigned j = integer_id + 1;
7158 for (unsigned i = integer_id + 1; i < varmap.length (); ++i)
7159 if (! varmap[i]->is_reg_var)
7160 map[i] = j++;
7161 for (unsigned i = integer_id + 1; i < varmap.length (); ++i)
7162 if (varmap[i]->is_reg_var)
7163 map[i] = j++;
7164 /* Shuffle varmap according to map. */
7165 for (unsigned i = integer_id + 1; i < varmap.length (); ++i)
7167 while (map[varmap[i]->id] != i)
7168 std::swap (varmap[i], varmap[map[varmap[i]->id]]);
7169 gcc_assert (bitmap_empty_p (varmap[i]->solution));
7170 varmap[i]->id = i;
7171 varmap[i]->next = map[varmap[i]->next];
7172 varmap[i]->head = map[varmap[i]->head];
7174 /* Finally rewrite constraints. */
7175 for (unsigned i = 0; i < constraints.length (); ++i)
7177 constraints[i]->lhs.var = map[constraints[i]->lhs.var];
7178 constraints[i]->rhs.var = map[constraints[i]->rhs.var];
7180 free (map);
7182 if (dump_file)
7183 fprintf (dump_file,
7184 "\nCollapsing static cycles and doing variable "
7185 "substitution\n");
7187 init_graph (varmap.length () * 2);
7189 if (dump_file)
7190 fprintf (dump_file, "Building predecessor graph\n");
7191 build_pred_graph ();
7193 if (dump_file)
7194 fprintf (dump_file, "Detecting pointer and location "
7195 "equivalences\n");
7196 si = perform_var_substitution (graph);
7198 if (dump_file)
7199 fprintf (dump_file, "Rewriting constraints and unifying "
7200 "variables\n");
7201 rewrite_constraints (graph, si);
7203 build_succ_graph ();
7205 free_var_substitution_info (si);
7207 /* Attach complex constraints to graph nodes. */
7208 move_complex_constraints (graph);
7210 if (dump_file)
7211 fprintf (dump_file, "Uniting pointer but not location equivalent "
7212 "variables\n");
7213 unite_pointer_equivalences (graph);
7215 if (dump_file)
7216 fprintf (dump_file, "Finding indirect cycles\n");
7217 find_indirect_cycles (graph);
7219 /* Implicit nodes and predecessors are no longer necessary at this
7220 point. */
7221 remove_preds_and_fake_succs (graph);
7223 if (dump_file && (dump_flags & TDF_GRAPH))
7225 fprintf (dump_file, "\n\n// The constraint graph before solve-graph "
7226 "in dot format:\n");
7227 dump_constraint_graph (dump_file);
7228 fprintf (dump_file, "\n\n");
7231 if (dump_file)
7232 fprintf (dump_file, "Solving graph\n");
7234 solve_graph (graph);
7236 if (dump_file && (dump_flags & TDF_GRAPH))
7238 fprintf (dump_file, "\n\n// The constraint graph after solve-graph "
7239 "in dot format:\n");
7240 dump_constraint_graph (dump_file);
7241 fprintf (dump_file, "\n\n");
7244 if (dump_file)
7245 dump_sa_points_to_info (dump_file);
7248 /* Create points-to sets for the current function. See the comments
7249 at the start of the file for an algorithmic overview. */
7251 static void
7252 compute_points_to_sets (void)
7254 basic_block bb;
7255 varinfo_t vi;
7257 timevar_push (TV_TREE_PTA);
7259 init_alias_vars ();
7261 intra_create_variable_infos (cfun);
7263 /* Now walk all statements and build the constraint set. */
7264 FOR_EACH_BB_FN (bb, cfun)
7266 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
7267 gsi_next (&gsi))
7269 gphi *phi = gsi.phi ();
7271 if (! virtual_operand_p (gimple_phi_result (phi)))
7272 find_func_aliases (cfun, phi);
7275 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
7276 gsi_next (&gsi))
7278 gimple *stmt = gsi_stmt (gsi);
7280 find_func_aliases (cfun, stmt);
7284 if (dump_file)
7286 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
7287 dump_constraints (dump_file, 0);
7290 /* From the constraints compute the points-to sets. */
7291 solve_constraints ();
7293 /* Compute the points-to set for ESCAPED used for call-clobber analysis. */
7294 cfun->gimple_df->escaped = find_what_var_points_to (cfun->decl,
7295 get_varinfo (escaped_id));
7297 /* Make sure the ESCAPED solution (which is used as placeholder in
7298 other solutions) does not reference itself. This simplifies
7299 points-to solution queries. */
7300 cfun->gimple_df->escaped.escaped = 0;
7302 /* Compute the points-to sets for pointer SSA_NAMEs. */
7303 unsigned i;
7304 tree ptr;
7306 FOR_EACH_SSA_NAME (i, ptr, cfun)
7308 if (POINTER_TYPE_P (TREE_TYPE (ptr)))
7309 find_what_p_points_to (cfun->decl, ptr);
7312 /* Compute the call-used/clobbered sets. */
7313 FOR_EACH_BB_FN (bb, cfun)
7315 gimple_stmt_iterator gsi;
7317 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
7319 gcall *stmt;
7320 struct pt_solution *pt;
7322 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
7323 if (!stmt)
7324 continue;
7326 pt = gimple_call_use_set (stmt);
7327 if (gimple_call_flags (stmt) & ECF_CONST)
7328 memset (pt, 0, sizeof (struct pt_solution));
7329 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
7331 *pt = find_what_var_points_to (cfun->decl, vi);
7332 /* Escaped (and thus nonlocal) variables are always
7333 implicitly used by calls. */
7334 /* ??? ESCAPED can be empty even though NONLOCAL
7335 always escaped. */
7336 pt->nonlocal = 1;
7337 pt->escaped = 1;
7339 else
7341 /* If there is nothing special about this call then
7342 we have made everything that is used also escape. */
7343 *pt = cfun->gimple_df->escaped;
7344 pt->nonlocal = 1;
7347 pt = gimple_call_clobber_set (stmt);
7348 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
7349 memset (pt, 0, sizeof (struct pt_solution));
7350 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
7352 *pt = find_what_var_points_to (cfun->decl, vi);
7353 /* Escaped (and thus nonlocal) variables are always
7354 implicitly clobbered by calls. */
7355 /* ??? ESCAPED can be empty even though NONLOCAL
7356 always escaped. */
7357 pt->nonlocal = 1;
7358 pt->escaped = 1;
7360 else
7362 /* If there is nothing special about this call then
7363 we have made everything that is used also escape. */
7364 *pt = cfun->gimple_df->escaped;
7365 pt->nonlocal = 1;
7370 timevar_pop (TV_TREE_PTA);
7374 /* Delete created points-to sets. */
7376 static void
7377 delete_points_to_sets (void)
7379 unsigned int i;
7381 delete shared_bitmap_table;
7382 shared_bitmap_table = NULL;
7383 if (dump_file && (dump_flags & TDF_STATS))
7384 fprintf (dump_file, "Points to sets created:%d\n",
7385 stats.points_to_sets_created);
7387 delete vi_for_tree;
7388 delete call_stmt_vars;
7389 bitmap_obstack_release (&pta_obstack);
7390 constraints.release ();
7392 for (i = 0; i < graph->size; i++)
7393 graph->complex[i].release ();
7394 free (graph->complex);
7396 free (graph->rep);
7397 free (graph->succs);
7398 free (graph->pe);
7399 free (graph->pe_rep);
7400 free (graph->indirect_cycles);
7401 free (graph);
7403 varmap.release ();
7404 variable_info_pool.release ();
7405 constraint_pool.release ();
7407 obstack_free (&fake_var_decl_obstack, NULL);
7409 delete final_solutions;
7410 obstack_free (&final_solutions_obstack, NULL);
7413 struct vls_data
7415 unsigned short clique;
7416 bool escaped_p;
7417 bitmap rvars;
7420 /* Mark "other" loads and stores as belonging to CLIQUE and with
7421 base zero. */
7423 static bool
7424 visit_loadstore (gimple *, tree base, tree ref, void *data)
7426 unsigned short clique = ((vls_data *) data)->clique;
7427 bitmap rvars = ((vls_data *) data)->rvars;
7428 bool escaped_p = ((vls_data *) data)->escaped_p;
7429 if (TREE_CODE (base) == MEM_REF
7430 || TREE_CODE (base) == TARGET_MEM_REF)
7432 tree ptr = TREE_OPERAND (base, 0);
7433 if (TREE_CODE (ptr) == SSA_NAME)
7435 /* For parameters, get at the points-to set for the actual parm
7436 decl. */
7437 if (SSA_NAME_IS_DEFAULT_DEF (ptr)
7438 && (TREE_CODE (SSA_NAME_VAR (ptr)) == PARM_DECL
7439 || TREE_CODE (SSA_NAME_VAR (ptr)) == RESULT_DECL))
7440 ptr = SSA_NAME_VAR (ptr);
7442 /* We need to make sure 'ptr' doesn't include any of
7443 the restrict tags we added bases for in its points-to set. */
7444 varinfo_t vi = lookup_vi_for_tree (ptr);
7445 if (! vi)
7446 return false;
7448 vi = get_varinfo (find (vi->id));
7449 if (bitmap_intersect_p (rvars, vi->solution)
7450 || (escaped_p && bitmap_bit_p (vi->solution, escaped_id)))
7451 return false;
7454 /* Do not overwrite existing cliques (that includes clique, base
7455 pairs we just set). */
7456 if (MR_DEPENDENCE_CLIQUE (base) == 0)
7458 MR_DEPENDENCE_CLIQUE (base) = clique;
7459 MR_DEPENDENCE_BASE (base) = 0;
7463 /* For plain decl accesses see whether they are accesses to globals
7464 and rewrite them to MEM_REFs with { clique, 0 }. */
7465 if (VAR_P (base)
7466 && is_global_var (base)
7467 /* ??? We can't rewrite a plain decl with the walk_stmt_load_store
7468 ops callback. */
7469 && base != ref)
7471 tree *basep = &ref;
7472 while (handled_component_p (*basep))
7473 basep = &TREE_OPERAND (*basep, 0);
7474 gcc_assert (VAR_P (*basep));
7475 tree ptr = build_fold_addr_expr (*basep);
7476 tree zero = build_int_cst (TREE_TYPE (ptr), 0);
7477 *basep = build2 (MEM_REF, TREE_TYPE (*basep), ptr, zero);
7478 MR_DEPENDENCE_CLIQUE (*basep) = clique;
7479 MR_DEPENDENCE_BASE (*basep) = 0;
7482 return false;
7485 struct msdi_data {
7486 tree ptr;
7487 unsigned short *clique;
7488 unsigned short *last_ruid;
7489 varinfo_t restrict_var;
7492 /* If BASE is a MEM_REF then assign a clique, base pair to it, updating
7493 CLIQUE, *RESTRICT_VAR and LAST_RUID as passed via DATA.
7494 Return whether dependence info was assigned to BASE. */
7496 static bool
7497 maybe_set_dependence_info (gimple *, tree base, tree, void *data)
7499 tree ptr = ((msdi_data *)data)->ptr;
7500 unsigned short &clique = *((msdi_data *)data)->clique;
7501 unsigned short &last_ruid = *((msdi_data *)data)->last_ruid;
7502 varinfo_t restrict_var = ((msdi_data *)data)->restrict_var;
7503 if ((TREE_CODE (base) == MEM_REF
7504 || TREE_CODE (base) == TARGET_MEM_REF)
7505 && TREE_OPERAND (base, 0) == ptr)
7507 /* Do not overwrite existing cliques. This avoids overwriting dependence
7508 info inlined from a function with restrict parameters inlined
7509 into a function with restrict parameters. This usually means we
7510 prefer to be precise in innermost loops. */
7511 if (MR_DEPENDENCE_CLIQUE (base) == 0)
7513 if (clique == 0)
7515 if (cfun->last_clique == 0)
7516 cfun->last_clique = 1;
7517 clique = 1;
7519 if (restrict_var->ruid == 0)
7520 restrict_var->ruid = ++last_ruid;
7521 MR_DEPENDENCE_CLIQUE (base) = clique;
7522 MR_DEPENDENCE_BASE (base) = restrict_var->ruid;
7523 return true;
7526 return false;
7529 /* Clear dependence info for the clique DATA. */
7531 static bool
7532 clear_dependence_clique (gimple *, tree base, tree, void *data)
7534 unsigned short clique = (uintptr_t)data;
7535 if ((TREE_CODE (base) == MEM_REF
7536 || TREE_CODE (base) == TARGET_MEM_REF)
7537 && MR_DEPENDENCE_CLIQUE (base) == clique)
7539 MR_DEPENDENCE_CLIQUE (base) = 0;
7540 MR_DEPENDENCE_BASE (base) = 0;
7543 return false;
7546 /* Compute the set of independend memory references based on restrict
7547 tags and their conservative propagation to the points-to sets. */
7549 static void
7550 compute_dependence_clique (void)
7552 /* First clear the special "local" clique. */
7553 basic_block bb;
7554 if (cfun->last_clique != 0)
7555 FOR_EACH_BB_FN (bb, cfun)
7556 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
7557 !gsi_end_p (gsi); gsi_next (&gsi))
7559 gimple *stmt = gsi_stmt (gsi);
7560 walk_stmt_load_store_ops (stmt, (void *)(uintptr_t) 1,
7561 clear_dependence_clique,
7562 clear_dependence_clique);
7565 unsigned short clique = 0;
7566 unsigned short last_ruid = 0;
7567 bitmap rvars = BITMAP_ALLOC (NULL);
7568 bool escaped_p = false;
7569 for (unsigned i = 0; i < num_ssa_names; ++i)
7571 tree ptr = ssa_name (i);
7572 if (!ptr || !POINTER_TYPE_P (TREE_TYPE (ptr)))
7573 continue;
7575 /* Avoid all this when ptr is not dereferenced? */
7576 tree p = ptr;
7577 if (SSA_NAME_IS_DEFAULT_DEF (ptr)
7578 && (TREE_CODE (SSA_NAME_VAR (ptr)) == PARM_DECL
7579 || TREE_CODE (SSA_NAME_VAR (ptr)) == RESULT_DECL))
7580 p = SSA_NAME_VAR (ptr);
7581 varinfo_t vi = lookup_vi_for_tree (p);
7582 if (!vi)
7583 continue;
7584 vi = get_varinfo (find (vi->id));
7585 bitmap_iterator bi;
7586 unsigned j;
7587 varinfo_t restrict_var = NULL;
7588 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, j, bi)
7590 varinfo_t oi = get_varinfo (j);
7591 if (oi->head != j)
7592 oi = get_varinfo (oi->head);
7593 if (oi->is_restrict_var)
7595 if (restrict_var
7596 && restrict_var != oi)
7598 if (dump_file && (dump_flags & TDF_DETAILS))
7600 fprintf (dump_file, "found restrict pointed-to "
7601 "for ");
7602 print_generic_expr (dump_file, ptr);
7603 fprintf (dump_file, " but not exclusively\n");
7605 restrict_var = NULL;
7606 break;
7608 restrict_var = oi;
7610 /* NULL is the only other valid points-to entry. */
7611 else if (oi->id != nothing_id)
7613 restrict_var = NULL;
7614 break;
7617 /* Ok, found that ptr must(!) point to a single(!) restrict
7618 variable. */
7619 /* ??? PTA isn't really a proper propagation engine to compute
7620 this property.
7621 ??? We could handle merging of two restricts by unifying them. */
7622 if (restrict_var)
7624 /* Now look at possible dereferences of ptr. */
7625 imm_use_iterator ui;
7626 gimple *use_stmt;
7627 bool used = false;
7628 msdi_data data = { ptr, &clique, &last_ruid, restrict_var };
7629 FOR_EACH_IMM_USE_STMT (use_stmt, ui, ptr)
7630 used |= walk_stmt_load_store_ops (use_stmt, &data,
7631 maybe_set_dependence_info,
7632 maybe_set_dependence_info);
7633 if (used)
7635 /* Add all subvars to the set of restrict pointed-to set. */
7636 for (unsigned sv = restrict_var->head; sv != 0;
7637 sv = get_varinfo (sv)->next)
7638 bitmap_set_bit (rvars, sv);
7639 varinfo_t escaped = get_varinfo (find (escaped_id));
7640 if (bitmap_bit_p (escaped->solution, restrict_var->id))
7641 escaped_p = true;
7646 if (clique != 0)
7648 /* Assign the BASE id zero to all accesses not based on a restrict
7649 pointer. That way they get disambiguated against restrict
7650 accesses but not against each other. */
7651 /* ??? For restricts derived from globals (thus not incoming
7652 parameters) we can't restrict scoping properly thus the following
7653 is too aggressive there. For now we have excluded those globals from
7654 getting into the MR_DEPENDENCE machinery. */
7655 vls_data data = { clique, escaped_p, rvars };
7656 basic_block bb;
7657 FOR_EACH_BB_FN (bb, cfun)
7658 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
7659 !gsi_end_p (gsi); gsi_next (&gsi))
7661 gimple *stmt = gsi_stmt (gsi);
7662 walk_stmt_load_store_ops (stmt, &data,
7663 visit_loadstore, visit_loadstore);
7667 BITMAP_FREE (rvars);
7670 /* Compute points-to information for every SSA_NAME pointer in the
7671 current function and compute the transitive closure of escaped
7672 variables to re-initialize the call-clobber states of local variables. */
7674 unsigned int
7675 compute_may_aliases (void)
7677 if (cfun->gimple_df->ipa_pta)
7679 if (dump_file)
7681 fprintf (dump_file, "\nNot re-computing points-to information "
7682 "because IPA points-to information is available.\n\n");
7684 /* But still dump what we have remaining it. */
7685 dump_alias_info (dump_file);
7688 return 0;
7691 /* For each pointer P_i, determine the sets of variables that P_i may
7692 point-to. Compute the reachability set of escaped and call-used
7693 variables. */
7694 compute_points_to_sets ();
7696 /* Debugging dumps. */
7697 if (dump_file)
7698 dump_alias_info (dump_file);
7700 /* Compute restrict-based memory disambiguations. */
7701 compute_dependence_clique ();
7703 /* Deallocate memory used by aliasing data structures and the internal
7704 points-to solution. */
7705 delete_points_to_sets ();
7707 gcc_assert (!need_ssa_update_p (cfun));
7709 return 0;
7712 /* A dummy pass to cause points-to information to be computed via
7713 TODO_rebuild_alias. */
7715 namespace {
7717 const pass_data pass_data_build_alias =
7719 GIMPLE_PASS, /* type */
7720 "alias", /* name */
7721 OPTGROUP_NONE, /* optinfo_flags */
7722 TV_NONE, /* tv_id */
7723 ( PROP_cfg | PROP_ssa ), /* properties_required */
7724 0, /* properties_provided */
7725 0, /* properties_destroyed */
7726 0, /* todo_flags_start */
7727 TODO_rebuild_alias, /* todo_flags_finish */
7730 class pass_build_alias : public gimple_opt_pass
7732 public:
7733 pass_build_alias (gcc::context *ctxt)
7734 : gimple_opt_pass (pass_data_build_alias, ctxt)
7737 /* opt_pass methods: */
7738 virtual bool gate (function *) { return flag_tree_pta; }
7740 }; // class pass_build_alias
7742 } // anon namespace
7744 gimple_opt_pass *
7745 make_pass_build_alias (gcc::context *ctxt)
7747 return new pass_build_alias (ctxt);
7750 /* A dummy pass to cause points-to information to be computed via
7751 TODO_rebuild_alias. */
7753 namespace {
7755 const pass_data pass_data_build_ealias =
7757 GIMPLE_PASS, /* type */
7758 "ealias", /* name */
7759 OPTGROUP_NONE, /* optinfo_flags */
7760 TV_NONE, /* tv_id */
7761 ( PROP_cfg | PROP_ssa ), /* properties_required */
7762 0, /* properties_provided */
7763 0, /* properties_destroyed */
7764 0, /* todo_flags_start */
7765 TODO_rebuild_alias, /* todo_flags_finish */
7768 class pass_build_ealias : public gimple_opt_pass
7770 public:
7771 pass_build_ealias (gcc::context *ctxt)
7772 : gimple_opt_pass (pass_data_build_ealias, ctxt)
7775 /* opt_pass methods: */
7776 virtual bool gate (function *) { return flag_tree_pta; }
7778 }; // class pass_build_ealias
7780 } // anon namespace
7782 gimple_opt_pass *
7783 make_pass_build_ealias (gcc::context *ctxt)
7785 return new pass_build_ealias (ctxt);
7789 /* IPA PTA solutions for ESCAPED. */
7790 struct pt_solution ipa_escaped_pt
7791 = { true, false, false, false, false,
7792 false, false, false, false, false, NULL };
7794 /* Associate node with varinfo DATA. Worker for
7795 cgraph_for_symbol_thunks_and_aliases. */
7796 static bool
7797 associate_varinfo_to_alias (struct cgraph_node *node, void *data)
7799 if ((node->alias
7800 || (node->thunk.thunk_p
7801 && ! node->global.inlined_to))
7802 && node->analyzed
7803 && !node->ifunc_resolver)
7804 insert_vi_for_tree (node->decl, (varinfo_t)data);
7805 return false;
7808 /* Dump varinfo VI to FILE. */
7810 static void
7811 dump_varinfo (FILE *file, varinfo_t vi)
7813 if (vi == NULL)
7814 return;
7816 fprintf (file, "%u: %s\n", vi->id, vi->name);
7818 const char *sep = " ";
7819 if (vi->is_artificial_var)
7820 fprintf (file, "%sartificial", sep);
7821 if (vi->is_special_var)
7822 fprintf (file, "%sspecial", sep);
7823 if (vi->is_unknown_size_var)
7824 fprintf (file, "%sunknown-size", sep);
7825 if (vi->is_full_var)
7826 fprintf (file, "%sfull", sep);
7827 if (vi->is_heap_var)
7828 fprintf (file, "%sheap", sep);
7829 if (vi->may_have_pointers)
7830 fprintf (file, "%smay-have-pointers", sep);
7831 if (vi->only_restrict_pointers)
7832 fprintf (file, "%sonly-restrict-pointers", sep);
7833 if (vi->is_restrict_var)
7834 fprintf (file, "%sis-restrict-var", sep);
7835 if (vi->is_global_var)
7836 fprintf (file, "%sglobal", sep);
7837 if (vi->is_ipa_escape_point)
7838 fprintf (file, "%sipa-escape-point", sep);
7839 if (vi->is_fn_info)
7840 fprintf (file, "%sfn-info", sep);
7841 if (vi->ruid)
7842 fprintf (file, "%srestrict-uid:%u", sep, vi->ruid);
7843 if (vi->next)
7844 fprintf (file, "%snext:%u", sep, vi->next);
7845 if (vi->head != vi->id)
7846 fprintf (file, "%shead:%u", sep, vi->head);
7847 if (vi->offset)
7848 fprintf (file, "%soffset:" HOST_WIDE_INT_PRINT_DEC, sep, vi->offset);
7849 if (vi->size != ~(unsigned HOST_WIDE_INT)0)
7850 fprintf (file, "%ssize:" HOST_WIDE_INT_PRINT_DEC, sep, vi->size);
7851 if (vi->fullsize != ~(unsigned HOST_WIDE_INT)0
7852 && vi->fullsize != vi->size)
7853 fprintf (file, "%sfullsize:" HOST_WIDE_INT_PRINT_DEC, sep,
7854 vi->fullsize);
7855 fprintf (file, "\n");
7857 if (vi->solution && !bitmap_empty_p (vi->solution))
7859 bitmap_iterator bi;
7860 unsigned i;
7861 fprintf (file, " solution: {");
7862 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
7863 fprintf (file, " %u", i);
7864 fprintf (file, " }\n");
7867 if (vi->oldsolution && !bitmap_empty_p (vi->oldsolution)
7868 && !bitmap_equal_p (vi->solution, vi->oldsolution))
7870 bitmap_iterator bi;
7871 unsigned i;
7872 fprintf (file, " oldsolution: {");
7873 EXECUTE_IF_SET_IN_BITMAP (vi->oldsolution, 0, i, bi)
7874 fprintf (file, " %u", i);
7875 fprintf (file, " }\n");
7879 /* Dump varinfo VI to stderr. */
7881 DEBUG_FUNCTION void
7882 debug_varinfo (varinfo_t vi)
7884 dump_varinfo (stderr, vi);
7887 /* Dump varmap to FILE. */
7889 static void
7890 dump_varmap (FILE *file)
7892 if (varmap.length () == 0)
7893 return;
7895 fprintf (file, "variables:\n");
7897 for (unsigned int i = 0; i < varmap.length (); ++i)
7899 varinfo_t vi = get_varinfo (i);
7900 dump_varinfo (file, vi);
7903 fprintf (file, "\n");
7906 /* Dump varmap to stderr. */
7908 DEBUG_FUNCTION void
7909 debug_varmap (void)
7911 dump_varmap (stderr);
7914 /* Compute whether node is refered to non-locally. Worker for
7915 cgraph_for_symbol_thunks_and_aliases. */
7916 static bool
7917 refered_from_nonlocal_fn (struct cgraph_node *node, void *data)
7919 bool *nonlocal_p = (bool *)data;
7920 *nonlocal_p |= (node->used_from_other_partition
7921 || node->externally_visible
7922 || node->force_output
7923 || lookup_attribute ("noipa", DECL_ATTRIBUTES (node->decl)));
7924 return false;
7927 /* Same for varpool nodes. */
7928 static bool
7929 refered_from_nonlocal_var (struct varpool_node *node, void *data)
7931 bool *nonlocal_p = (bool *)data;
7932 *nonlocal_p |= (node->used_from_other_partition
7933 || node->externally_visible
7934 || node->force_output);
7935 return false;
7938 /* Execute the driver for IPA PTA. */
7939 static unsigned int
7940 ipa_pta_execute (void)
7942 struct cgraph_node *node;
7943 varpool_node *var;
7944 unsigned int from = 0;
7946 in_ipa_mode = 1;
7948 init_alias_vars ();
7950 if (dump_file && (dump_flags & TDF_DETAILS))
7952 symtab->dump (dump_file);
7953 fprintf (dump_file, "\n");
7956 if (dump_file)
7958 fprintf (dump_file, "Generating generic constraints\n\n");
7959 dump_constraints (dump_file, from);
7960 fprintf (dump_file, "\n");
7961 from = constraints.length ();
7964 /* Build the constraints. */
7965 FOR_EACH_DEFINED_FUNCTION (node)
7967 varinfo_t vi;
7968 /* Nodes without a body are not interesting. Especially do not
7969 visit clones at this point for now - we get duplicate decls
7970 there for inline clones at least. */
7971 if (!node->has_gimple_body_p () || node->global.inlined_to)
7972 continue;
7973 node->get_body ();
7975 gcc_assert (!node->clone_of);
7977 /* For externally visible or attribute used annotated functions use
7978 local constraints for their arguments.
7979 For local functions we see all callers and thus do not need initial
7980 constraints for parameters. */
7981 bool nonlocal_p = (node->used_from_other_partition
7982 || node->externally_visible
7983 || node->force_output
7984 || lookup_attribute ("noipa",
7985 DECL_ATTRIBUTES (node->decl)));
7986 node->call_for_symbol_thunks_and_aliases (refered_from_nonlocal_fn,
7987 &nonlocal_p, true);
7989 vi = create_function_info_for (node->decl,
7990 alias_get_name (node->decl), false,
7991 nonlocal_p);
7992 if (dump_file
7993 && from != constraints.length ())
7995 fprintf (dump_file,
7996 "Generating intial constraints for %s", node->name ());
7997 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
7998 fprintf (dump_file, " (%s)",
7999 IDENTIFIER_POINTER
8000 (DECL_ASSEMBLER_NAME (node->decl)));
8001 fprintf (dump_file, "\n\n");
8002 dump_constraints (dump_file, from);
8003 fprintf (dump_file, "\n");
8005 from = constraints.length ();
8008 node->call_for_symbol_thunks_and_aliases
8009 (associate_varinfo_to_alias, vi, true);
8012 /* Create constraints for global variables and their initializers. */
8013 FOR_EACH_VARIABLE (var)
8015 if (var->alias && var->analyzed)
8016 continue;
8018 varinfo_t vi = get_vi_for_tree (var->decl);
8020 /* For the purpose of IPA PTA unit-local globals are not
8021 escape points. */
8022 bool nonlocal_p = (var->used_from_other_partition
8023 || var->externally_visible
8024 || var->force_output);
8025 var->call_for_symbol_and_aliases (refered_from_nonlocal_var,
8026 &nonlocal_p, true);
8027 if (nonlocal_p)
8028 vi->is_ipa_escape_point = true;
8031 if (dump_file
8032 && from != constraints.length ())
8034 fprintf (dump_file,
8035 "Generating constraints for global initializers\n\n");
8036 dump_constraints (dump_file, from);
8037 fprintf (dump_file, "\n");
8038 from = constraints.length ();
8041 FOR_EACH_DEFINED_FUNCTION (node)
8043 struct function *func;
8044 basic_block bb;
8046 /* Nodes without a body are not interesting. */
8047 if (!node->has_gimple_body_p () || node->clone_of)
8048 continue;
8050 if (dump_file)
8052 fprintf (dump_file,
8053 "Generating constraints for %s", node->name ());
8054 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
8055 fprintf (dump_file, " (%s)",
8056 IDENTIFIER_POINTER
8057 (DECL_ASSEMBLER_NAME (node->decl)));
8058 fprintf (dump_file, "\n");
8061 func = DECL_STRUCT_FUNCTION (node->decl);
8062 gcc_assert (cfun == NULL);
8064 /* Build constriants for the function body. */
8065 FOR_EACH_BB_FN (bb, func)
8067 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
8068 gsi_next (&gsi))
8070 gphi *phi = gsi.phi ();
8072 if (! virtual_operand_p (gimple_phi_result (phi)))
8073 find_func_aliases (func, phi);
8076 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
8077 gsi_next (&gsi))
8079 gimple *stmt = gsi_stmt (gsi);
8081 find_func_aliases (func, stmt);
8082 find_func_clobbers (func, stmt);
8086 if (dump_file)
8088 fprintf (dump_file, "\n");
8089 dump_constraints (dump_file, from);
8090 fprintf (dump_file, "\n");
8091 from = constraints.length ();
8095 /* From the constraints compute the points-to sets. */
8096 solve_constraints ();
8098 /* Now post-process solutions to handle locals from different
8099 runtime instantiations coming in through recursive invocations. */
8100 unsigned shadow_var_cnt = 0;
8101 for (unsigned i = 1; i < varmap.length (); ++i)
8103 varinfo_t fi = get_varinfo (i);
8104 if (fi->is_fn_info
8105 && fi->decl)
8106 /* Automatic variables pointed to by their containing functions
8107 parameters need this treatment. */
8108 for (varinfo_t ai = first_vi_for_offset (fi, fi_parm_base);
8109 ai; ai = vi_next (ai))
8111 varinfo_t vi = get_varinfo (find (ai->id));
8112 bitmap_iterator bi;
8113 unsigned j;
8114 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, j, bi)
8116 varinfo_t pt = get_varinfo (j);
8117 if (pt->shadow_var_uid == 0
8118 && pt->decl
8119 && auto_var_in_fn_p (pt->decl, fi->decl))
8121 pt->shadow_var_uid = allocate_decl_uid ();
8122 shadow_var_cnt++;
8126 /* As well as global variables which are another way of passing
8127 arguments to recursive invocations. */
8128 else if (fi->is_global_var)
8130 for (varinfo_t ai = fi; ai; ai = vi_next (ai))
8132 varinfo_t vi = get_varinfo (find (ai->id));
8133 bitmap_iterator bi;
8134 unsigned j;
8135 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, j, bi)
8137 varinfo_t pt = get_varinfo (j);
8138 if (pt->shadow_var_uid == 0
8139 && pt->decl
8140 && auto_var_p (pt->decl))
8142 pt->shadow_var_uid = allocate_decl_uid ();
8143 shadow_var_cnt++;
8149 if (shadow_var_cnt && dump_file && (dump_flags & TDF_DETAILS))
8150 fprintf (dump_file, "Allocated %u shadow variables for locals "
8151 "maybe leaking into recursive invocations of their containing "
8152 "functions\n", shadow_var_cnt);
8154 /* Compute the global points-to sets for ESCAPED.
8155 ??? Note that the computed escape set is not correct
8156 for the whole unit as we fail to consider graph edges to
8157 externally visible functions. */
8158 ipa_escaped_pt = find_what_var_points_to (NULL, get_varinfo (escaped_id));
8160 /* Make sure the ESCAPED solution (which is used as placeholder in
8161 other solutions) does not reference itself. This simplifies
8162 points-to solution queries. */
8163 ipa_escaped_pt.ipa_escaped = 0;
8165 /* Assign the points-to sets to the SSA names in the unit. */
8166 FOR_EACH_DEFINED_FUNCTION (node)
8168 tree ptr;
8169 struct function *fn;
8170 unsigned i;
8171 basic_block bb;
8173 /* Nodes without a body are not interesting. */
8174 if (!node->has_gimple_body_p () || node->clone_of)
8175 continue;
8177 fn = DECL_STRUCT_FUNCTION (node->decl);
8179 /* Compute the points-to sets for pointer SSA_NAMEs. */
8180 FOR_EACH_VEC_ELT (*fn->gimple_df->ssa_names, i, ptr)
8182 if (ptr
8183 && POINTER_TYPE_P (TREE_TYPE (ptr)))
8184 find_what_p_points_to (node->decl, ptr);
8187 /* Compute the call-use and call-clobber sets for indirect calls
8188 and calls to external functions. */
8189 FOR_EACH_BB_FN (bb, fn)
8191 gimple_stmt_iterator gsi;
8193 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
8195 gcall *stmt;
8196 struct pt_solution *pt;
8197 varinfo_t vi, fi;
8198 tree decl;
8200 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
8201 if (!stmt)
8202 continue;
8204 /* Handle direct calls to functions with body. */
8205 decl = gimple_call_fndecl (stmt);
8208 tree called_decl = NULL_TREE;
8209 if (gimple_call_builtin_p (stmt, BUILT_IN_GOMP_PARALLEL))
8210 called_decl = TREE_OPERAND (gimple_call_arg (stmt, 0), 0);
8211 else if (gimple_call_builtin_p (stmt, BUILT_IN_GOACC_PARALLEL))
8212 called_decl = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
8214 if (called_decl != NULL_TREE
8215 && !fndecl_maybe_in_other_partition (called_decl))
8216 decl = called_decl;
8219 if (decl
8220 && (fi = lookup_vi_for_tree (decl))
8221 && fi->is_fn_info)
8223 *gimple_call_clobber_set (stmt)
8224 = find_what_var_points_to
8225 (node->decl, first_vi_for_offset (fi, fi_clobbers));
8226 *gimple_call_use_set (stmt)
8227 = find_what_var_points_to
8228 (node->decl, first_vi_for_offset (fi, fi_uses));
8230 /* Handle direct calls to external functions. */
8231 else if (decl && (!fi || fi->decl))
8233 pt = gimple_call_use_set (stmt);
8234 if (gimple_call_flags (stmt) & ECF_CONST)
8235 memset (pt, 0, sizeof (struct pt_solution));
8236 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
8238 *pt = find_what_var_points_to (node->decl, vi);
8239 /* Escaped (and thus nonlocal) variables are always
8240 implicitly used by calls. */
8241 /* ??? ESCAPED can be empty even though NONLOCAL
8242 always escaped. */
8243 pt->nonlocal = 1;
8244 pt->ipa_escaped = 1;
8246 else
8248 /* If there is nothing special about this call then
8249 we have made everything that is used also escape. */
8250 *pt = ipa_escaped_pt;
8251 pt->nonlocal = 1;
8254 pt = gimple_call_clobber_set (stmt);
8255 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
8256 memset (pt, 0, sizeof (struct pt_solution));
8257 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
8259 *pt = find_what_var_points_to (node->decl, vi);
8260 /* Escaped (and thus nonlocal) variables are always
8261 implicitly clobbered by calls. */
8262 /* ??? ESCAPED can be empty even though NONLOCAL
8263 always escaped. */
8264 pt->nonlocal = 1;
8265 pt->ipa_escaped = 1;
8267 else
8269 /* If there is nothing special about this call then
8270 we have made everything that is used also escape. */
8271 *pt = ipa_escaped_pt;
8272 pt->nonlocal = 1;
8275 /* Handle indirect calls. */
8276 else if ((fi = get_fi_for_callee (stmt)))
8278 /* We need to accumulate all clobbers/uses of all possible
8279 callees. */
8280 fi = get_varinfo (find (fi->id));
8281 /* If we cannot constrain the set of functions we'll end up
8282 calling we end up using/clobbering everything. */
8283 if (bitmap_bit_p (fi->solution, anything_id)
8284 || bitmap_bit_p (fi->solution, nonlocal_id)
8285 || bitmap_bit_p (fi->solution, escaped_id))
8287 pt_solution_reset (gimple_call_clobber_set (stmt));
8288 pt_solution_reset (gimple_call_use_set (stmt));
8290 else
8292 bitmap_iterator bi;
8293 unsigned i;
8294 struct pt_solution *uses, *clobbers;
8296 uses = gimple_call_use_set (stmt);
8297 clobbers = gimple_call_clobber_set (stmt);
8298 memset (uses, 0, sizeof (struct pt_solution));
8299 memset (clobbers, 0, sizeof (struct pt_solution));
8300 EXECUTE_IF_SET_IN_BITMAP (fi->solution, 0, i, bi)
8302 struct pt_solution sol;
8304 vi = get_varinfo (i);
8305 if (!vi->is_fn_info)
8307 /* ??? We could be more precise here? */
8308 uses->nonlocal = 1;
8309 uses->ipa_escaped = 1;
8310 clobbers->nonlocal = 1;
8311 clobbers->ipa_escaped = 1;
8312 continue;
8315 if (!uses->anything)
8317 sol = find_what_var_points_to
8318 (node->decl,
8319 first_vi_for_offset (vi, fi_uses));
8320 pt_solution_ior_into (uses, &sol);
8322 if (!clobbers->anything)
8324 sol = find_what_var_points_to
8325 (node->decl,
8326 first_vi_for_offset (vi, fi_clobbers));
8327 pt_solution_ior_into (clobbers, &sol);
8332 else
8333 gcc_unreachable ();
8337 fn->gimple_df->ipa_pta = true;
8339 /* We have to re-set the final-solution cache after each function
8340 because what is a "global" is dependent on function context. */
8341 final_solutions->empty ();
8342 obstack_free (&final_solutions_obstack, NULL);
8343 gcc_obstack_init (&final_solutions_obstack);
8346 delete_points_to_sets ();
8348 in_ipa_mode = 0;
8350 return 0;
8353 namespace {
8355 const pass_data pass_data_ipa_pta =
8357 SIMPLE_IPA_PASS, /* type */
8358 "pta", /* name */
8359 OPTGROUP_NONE, /* optinfo_flags */
8360 TV_IPA_PTA, /* tv_id */
8361 0, /* properties_required */
8362 0, /* properties_provided */
8363 0, /* properties_destroyed */
8364 0, /* todo_flags_start */
8365 0, /* todo_flags_finish */
8368 class pass_ipa_pta : public simple_ipa_opt_pass
8370 public:
8371 pass_ipa_pta (gcc::context *ctxt)
8372 : simple_ipa_opt_pass (pass_data_ipa_pta, ctxt)
8375 /* opt_pass methods: */
8376 virtual bool gate (function *)
8378 return (optimize
8379 && flag_ipa_pta
8380 /* Don't bother doing anything if the program has errors. */
8381 && !seen_error ());
8384 opt_pass * clone () { return new pass_ipa_pta (m_ctxt); }
8386 virtual unsigned int execute (function *) { return ipa_pta_execute (); }
8388 }; // class pass_ipa_pta
8390 } // anon namespace
8392 simple_ipa_opt_pass *
8393 make_pass_ipa_pta (gcc::context *ctxt)
8395 return new pass_ipa_pta (ctxt);