[i386] Fold __builtin_ia32_shufpd to VEC_PERM_EXPR
[official-gcc.git] / gcc / tree-ssa-structalias.c
blobd8ff2ce6dfe393205ecad7cd61fcd8f3061d91c1
1 /* Tree based points-to analysis
2 Copyright (C) 2005-2019 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "tree-pass.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "tree-pretty-print.h"
33 #include "diagnostic-core.h"
34 #include "fold-const.h"
35 #include "stor-layout.h"
36 #include "stmt.h"
37 #include "gimple-iterator.h"
38 #include "tree-into-ssa.h"
39 #include "tree-dfa.h"
40 #include "params.h"
41 #include "gimple-walk.h"
42 #include "varasm.h"
43 #include "stringpool.h"
44 #include "attribs.h"
45 #include "tree-ssa.h"
47 /* The idea behind this analyzer is to generate set constraints from the
48 program, then solve the resulting constraints in order to generate the
49 points-to sets.
51 Set constraints are a way of modeling program analysis problems that
52 involve sets. They consist of an inclusion constraint language,
53 describing the variables (each variable is a set) and operations that
54 are involved on the variables, and a set of rules that derive facts
55 from these operations. To solve a system of set constraints, you derive
56 all possible facts under the rules, which gives you the correct sets
57 as a consequence.
59 See "Efficient Field-sensitive pointer analysis for C" by "David
60 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
61 http://citeseer.ist.psu.edu/pearce04efficient.html
63 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
64 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
65 http://citeseer.ist.psu.edu/heintze01ultrafast.html
67 There are three types of real constraint expressions, DEREF,
68 ADDRESSOF, and SCALAR. Each constraint expression consists
69 of a constraint type, a variable, and an offset.
71 SCALAR is a constraint expression type used to represent x, whether
72 it appears on the LHS or the RHS of a statement.
73 DEREF is a constraint expression type used to represent *x, whether
74 it appears on the LHS or the RHS of a statement.
75 ADDRESSOF is a constraint expression used to represent &x, whether
76 it appears on the LHS or the RHS of a statement.
78 Each pointer variable in the program is assigned an integer id, and
79 each field of a structure variable is assigned an integer id as well.
81 Structure variables are linked to their list of fields through a "next
82 field" in each variable that points to the next field in offset
83 order.
84 Each variable for a structure field has
86 1. "size", that tells the size in bits of that field.
87 2. "fullsize, that tells the size in bits of the entire structure.
88 3. "offset", that tells the offset in bits from the beginning of the
89 structure to this field.
91 Thus,
92 struct f
94 int a;
95 int b;
96 } foo;
97 int *bar;
99 looks like
101 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
102 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
103 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
106 In order to solve the system of set constraints, the following is
107 done:
109 1. Each constraint variable x has a solution set associated with it,
110 Sol(x).
112 2. Constraints are separated into direct, copy, and complex.
113 Direct constraints are ADDRESSOF constraints that require no extra
114 processing, such as P = &Q
115 Copy constraints are those of the form P = Q.
116 Complex constraints are all the constraints involving dereferences
117 and offsets (including offsetted copies).
119 3. All direct constraints of the form P = &Q are processed, such
120 that Q is added to Sol(P)
122 4. All complex constraints for a given constraint variable are stored in a
123 linked list attached to that variable's node.
125 5. A directed graph is built out of the copy constraints. Each
126 constraint variable is a node in the graph, and an edge from
127 Q to P is added for each copy constraint of the form P = Q
129 6. The graph is then walked, and solution sets are
130 propagated along the copy edges, such that an edge from Q to P
131 causes Sol(P) <- Sol(P) union Sol(Q).
133 7. As we visit each node, all complex constraints associated with
134 that node are processed by adding appropriate copy edges to the graph, or the
135 appropriate variables to the solution set.
137 8. The process of walking the graph is iterated until no solution
138 sets change.
140 Prior to walking the graph in steps 6 and 7, We perform static
141 cycle elimination on the constraint graph, as well
142 as off-line variable substitution.
144 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
145 on and turned into anything), but isn't. You can just see what offset
146 inside the pointed-to struct it's going to access.
148 TODO: Constant bounded arrays can be handled as if they were structs of the
149 same number of elements.
151 TODO: Modeling heap and incoming pointers becomes much better if we
152 add fields to them as we discover them, which we could do.
154 TODO: We could handle unions, but to be honest, it's probably not
155 worth the pain or slowdown. */
157 /* IPA-PTA optimizations possible.
159 When the indirect function called is ANYTHING we can add disambiguation
160 based on the function signatures (or simply the parameter count which
161 is the varinfo size). We also do not need to consider functions that
162 do not have their address taken.
164 The is_global_var bit which marks escape points is overly conservative
165 in IPA mode. Split it to is_escape_point and is_global_var - only
166 externally visible globals are escape points in IPA mode.
167 There is now is_ipa_escape_point but this is only used in a few
168 selected places.
170 The way we introduce DECL_PT_UID to avoid fixing up all points-to
171 sets in the translation unit when we copy a DECL during inlining
172 pessimizes precision. The advantage is that the DECL_PT_UID keeps
173 compile-time and memory usage overhead low - the points-to sets
174 do not grow or get unshared as they would during a fixup phase.
175 An alternative solution is to delay IPA PTA until after all
176 inlining transformations have been applied.
178 The way we propagate clobber/use information isn't optimized.
179 It should use a new complex constraint that properly filters
180 out local variables of the callee (though that would make
181 the sets invalid after inlining). OTOH we might as well
182 admit defeat to WHOPR and simply do all the clobber/use analysis
183 and propagation after PTA finished but before we threw away
184 points-to information for memory variables. WHOPR and PTA
185 do not play along well anyway - the whole constraint solving
186 would need to be done in WPA phase and it will be very interesting
187 to apply the results to local SSA names during LTRANS phase.
189 We probably should compute a per-function unit-ESCAPE solution
190 propagating it simply like the clobber / uses solutions. The
191 solution can go alongside the non-IPA espaced solution and be
192 used to query which vars escape the unit through a function.
193 This is also required to make the escaped-HEAP trick work in IPA mode.
195 We never put function decls in points-to sets so we do not
196 keep the set of called functions for indirect calls.
198 And probably more. */
200 static bool use_field_sensitive = true;
201 static int in_ipa_mode = 0;
203 /* Used for predecessor bitmaps. */
204 static bitmap_obstack predbitmap_obstack;
206 /* Used for points-to sets. */
207 static bitmap_obstack pta_obstack;
209 /* Used for oldsolution members of variables. */
210 static bitmap_obstack oldpta_obstack;
212 /* Used for per-solver-iteration bitmaps. */
213 static bitmap_obstack iteration_obstack;
215 static unsigned int create_variable_info_for (tree, const char *, bool);
216 typedef struct constraint_graph *constraint_graph_t;
217 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
219 struct constraint;
220 typedef struct constraint *constraint_t;
223 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
224 if (a) \
225 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
227 static struct constraint_stats
229 unsigned int total_vars;
230 unsigned int nonpointer_vars;
231 unsigned int unified_vars_static;
232 unsigned int unified_vars_dynamic;
233 unsigned int iterations;
234 unsigned int num_edges;
235 unsigned int num_implicit_edges;
236 unsigned int points_to_sets_created;
237 } stats;
239 struct variable_info
241 /* ID of this variable */
242 unsigned int id;
244 /* True if this is a variable created by the constraint analysis, such as
245 heap variables and constraints we had to break up. */
246 unsigned int is_artificial_var : 1;
248 /* True if this is a special variable whose solution set should not be
249 changed. */
250 unsigned int is_special_var : 1;
252 /* True for variables whose size is not known or variable. */
253 unsigned int is_unknown_size_var : 1;
255 /* True for (sub-)fields that represent a whole variable. */
256 unsigned int is_full_var : 1;
258 /* True if this is a heap variable. */
259 unsigned int is_heap_var : 1;
261 /* True if this is a register variable. */
262 unsigned int is_reg_var : 1;
264 /* True if this field may contain pointers. */
265 unsigned int may_have_pointers : 1;
267 /* True if this field has only restrict qualified pointers. */
268 unsigned int only_restrict_pointers : 1;
270 /* True if this represents a heap var created for a restrict qualified
271 pointer. */
272 unsigned int is_restrict_var : 1;
274 /* True if this represents a global variable. */
275 unsigned int is_global_var : 1;
277 /* True if this represents a module escape point for IPA analysis. */
278 unsigned int is_ipa_escape_point : 1;
280 /* True if this represents a IPA function info. */
281 unsigned int is_fn_info : 1;
283 /* ??? Store somewhere better. */
284 unsigned short ruid;
286 /* The ID of the variable for the next field in this structure
287 or zero for the last field in this structure. */
288 unsigned next;
290 /* The ID of the variable for the first field in this structure. */
291 unsigned head;
293 /* Offset of this variable, in bits, from the base variable */
294 unsigned HOST_WIDE_INT offset;
296 /* Size of the variable, in bits. */
297 unsigned HOST_WIDE_INT size;
299 /* Full size of the base variable, in bits. */
300 unsigned HOST_WIDE_INT fullsize;
302 /* In IPA mode the shadow UID in case the variable needs to be duplicated in
303 the final points-to solution because it reaches its containing
304 function recursively. Zero if none is needed. */
305 unsigned int shadow_var_uid;
307 /* Name of this variable */
308 const char *name;
310 /* Tree that this variable is associated with. */
311 tree decl;
313 /* Points-to set for this variable. */
314 bitmap solution;
316 /* Old points-to set for this variable. */
317 bitmap oldsolution;
319 typedef struct variable_info *varinfo_t;
321 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
322 static varinfo_t first_or_preceding_vi_for_offset (varinfo_t,
323 unsigned HOST_WIDE_INT);
324 static varinfo_t lookup_vi_for_tree (tree);
325 static inline bool type_can_have_subvars (const_tree);
326 static void make_param_constraints (varinfo_t);
328 /* Pool of variable info structures. */
329 static object_allocator<variable_info> variable_info_pool
330 ("Variable info pool");
332 /* Map varinfo to final pt_solution. */
333 static hash_map<varinfo_t, pt_solution *> *final_solutions;
334 struct obstack final_solutions_obstack;
336 /* Table of variable info structures for constraint variables.
337 Indexed directly by variable info id. */
338 static vec<varinfo_t> varmap;
340 /* Return the varmap element N */
342 static inline varinfo_t
343 get_varinfo (unsigned int n)
345 return varmap[n];
348 /* Return the next variable in the list of sub-variables of VI
349 or NULL if VI is the last sub-variable. */
351 static inline varinfo_t
352 vi_next (varinfo_t vi)
354 return get_varinfo (vi->next);
357 /* Static IDs for the special variables. Variable ID zero is unused
358 and used as terminator for the sub-variable chain. */
359 enum { nothing_id = 1, anything_id = 2, string_id = 3,
360 escaped_id = 4, nonlocal_id = 5,
361 storedanything_id = 6, integer_id = 7 };
363 /* Return a new variable info structure consisting for a variable
364 named NAME, and using constraint graph node NODE. Append it
365 to the vector of variable info structures. */
367 static varinfo_t
368 new_var_info (tree t, const char *name, bool add_id)
370 unsigned index = varmap.length ();
371 varinfo_t ret = variable_info_pool.allocate ();
373 if (dump_file && add_id)
375 char *tempname = xasprintf ("%s(%d)", name, index);
376 name = ggc_strdup (tempname);
377 free (tempname);
380 ret->id = index;
381 ret->name = name;
382 ret->decl = t;
383 /* Vars without decl are artificial and do not have sub-variables. */
384 ret->is_artificial_var = (t == NULL_TREE);
385 ret->is_special_var = false;
386 ret->is_unknown_size_var = false;
387 ret->is_full_var = (t == NULL_TREE);
388 ret->is_heap_var = false;
389 ret->may_have_pointers = true;
390 ret->only_restrict_pointers = false;
391 ret->is_restrict_var = false;
392 ret->ruid = 0;
393 ret->is_global_var = (t == NULL_TREE);
394 ret->is_ipa_escape_point = false;
395 ret->is_fn_info = false;
396 if (t && DECL_P (t))
397 ret->is_global_var = (is_global_var (t)
398 /* We have to treat even local register variables
399 as escape points. */
400 || (VAR_P (t) && DECL_HARD_REGISTER (t)));
401 ret->is_reg_var = (t && TREE_CODE (t) == SSA_NAME);
402 ret->solution = BITMAP_ALLOC (&pta_obstack);
403 ret->oldsolution = NULL;
404 ret->next = 0;
405 ret->shadow_var_uid = 0;
406 ret->head = ret->id;
408 stats.total_vars++;
410 varmap.safe_push (ret);
412 return ret;
415 /* A map mapping call statements to per-stmt variables for uses
416 and clobbers specific to the call. */
417 static hash_map<gimple *, varinfo_t> *call_stmt_vars;
419 /* Lookup or create the variable for the call statement CALL. */
421 static varinfo_t
422 get_call_vi (gcall *call)
424 varinfo_t vi, vi2;
426 bool existed;
427 varinfo_t *slot_p = &call_stmt_vars->get_or_insert (call, &existed);
428 if (existed)
429 return *slot_p;
431 vi = new_var_info (NULL_TREE, "CALLUSED", true);
432 vi->offset = 0;
433 vi->size = 1;
434 vi->fullsize = 2;
435 vi->is_full_var = true;
436 vi->is_reg_var = true;
438 vi2 = new_var_info (NULL_TREE, "CALLCLOBBERED", true);
439 vi2->offset = 1;
440 vi2->size = 1;
441 vi2->fullsize = 2;
442 vi2->is_full_var = true;
443 vi2->is_reg_var = true;
445 vi->next = vi2->id;
447 *slot_p = vi;
448 return vi;
451 /* Lookup the variable for the call statement CALL representing
452 the uses. Returns NULL if there is nothing special about this call. */
454 static varinfo_t
455 lookup_call_use_vi (gcall *call)
457 varinfo_t *slot_p = call_stmt_vars->get (call);
458 if (slot_p)
459 return *slot_p;
461 return NULL;
464 /* Lookup the variable for the call statement CALL representing
465 the clobbers. Returns NULL if there is nothing special about this call. */
467 static varinfo_t
468 lookup_call_clobber_vi (gcall *call)
470 varinfo_t uses = lookup_call_use_vi (call);
471 if (!uses)
472 return NULL;
474 return vi_next (uses);
477 /* Lookup or create the variable for the call statement CALL representing
478 the uses. */
480 static varinfo_t
481 get_call_use_vi (gcall *call)
483 return get_call_vi (call);
486 /* Lookup or create the variable for the call statement CALL representing
487 the clobbers. */
489 static varinfo_t ATTRIBUTE_UNUSED
490 get_call_clobber_vi (gcall *call)
492 return vi_next (get_call_vi (call));
496 enum constraint_expr_type {SCALAR, DEREF, ADDRESSOF};
498 /* An expression that appears in a constraint. */
500 struct constraint_expr
502 /* Constraint type. */
503 constraint_expr_type type;
505 /* Variable we are referring to in the constraint. */
506 unsigned int var;
508 /* Offset, in bits, of this constraint from the beginning of
509 variables it ends up referring to.
511 IOW, in a deref constraint, we would deref, get the result set,
512 then add OFFSET to each member. */
513 HOST_WIDE_INT offset;
516 /* Use 0x8000... as special unknown offset. */
517 #define UNKNOWN_OFFSET HOST_WIDE_INT_MIN
519 typedef struct constraint_expr ce_s;
520 static void get_constraint_for_1 (tree, vec<ce_s> *, bool, bool);
521 static void get_constraint_for (tree, vec<ce_s> *);
522 static void get_constraint_for_rhs (tree, vec<ce_s> *);
523 static void do_deref (vec<ce_s> *);
525 /* Our set constraints are made up of two constraint expressions, one
526 LHS, and one RHS.
528 As described in the introduction, our set constraints each represent an
529 operation between set valued variables.
531 struct constraint
533 struct constraint_expr lhs;
534 struct constraint_expr rhs;
537 /* List of constraints that we use to build the constraint graph from. */
539 static vec<constraint_t> constraints;
540 static object_allocator<constraint> constraint_pool ("Constraint pool");
542 /* The constraint graph is represented as an array of bitmaps
543 containing successor nodes. */
545 struct constraint_graph
547 /* Size of this graph, which may be different than the number of
548 nodes in the variable map. */
549 unsigned int size;
551 /* Explicit successors of each node. */
552 bitmap *succs;
554 /* Implicit predecessors of each node (Used for variable
555 substitution). */
556 bitmap *implicit_preds;
558 /* Explicit predecessors of each node (Used for variable substitution). */
559 bitmap *preds;
561 /* Indirect cycle representatives, or -1 if the node has no indirect
562 cycles. */
563 int *indirect_cycles;
565 /* Representative node for a node. rep[a] == a unless the node has
566 been unified. */
567 unsigned int *rep;
569 /* Equivalence class representative for a label. This is used for
570 variable substitution. */
571 int *eq_rep;
573 /* Pointer equivalence label for a node. All nodes with the same
574 pointer equivalence label can be unified together at some point
575 (either during constraint optimization or after the constraint
576 graph is built). */
577 unsigned int *pe;
579 /* Pointer equivalence representative for a label. This is used to
580 handle nodes that are pointer equivalent but not location
581 equivalent. We can unite these once the addressof constraints
582 are transformed into initial points-to sets. */
583 int *pe_rep;
585 /* Pointer equivalence label for each node, used during variable
586 substitution. */
587 unsigned int *pointer_label;
589 /* Location equivalence label for each node, used during location
590 equivalence finding. */
591 unsigned int *loc_label;
593 /* Pointed-by set for each node, used during location equivalence
594 finding. This is pointed-by rather than pointed-to, because it
595 is constructed using the predecessor graph. */
596 bitmap *pointed_by;
598 /* Points to sets for pointer equivalence. This is *not* the actual
599 points-to sets for nodes. */
600 bitmap *points_to;
602 /* Bitmap of nodes where the bit is set if the node is a direct
603 node. Used for variable substitution. */
604 sbitmap direct_nodes;
606 /* Bitmap of nodes where the bit is set if the node is address
607 taken. Used for variable substitution. */
608 bitmap address_taken;
610 /* Vector of complex constraints for each graph node. Complex
611 constraints are those involving dereferences or offsets that are
612 not 0. */
613 vec<constraint_t> *complex;
616 static constraint_graph_t graph;
618 /* During variable substitution and the offline version of indirect
619 cycle finding, we create nodes to represent dereferences and
620 address taken constraints. These represent where these start and
621 end. */
622 #define FIRST_REF_NODE (varmap).length ()
623 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
625 /* Return the representative node for NODE, if NODE has been unioned
626 with another NODE.
627 This function performs path compression along the way to finding
628 the representative. */
630 static unsigned int
631 find (unsigned int node)
633 gcc_checking_assert (node < graph->size);
634 if (graph->rep[node] != node)
635 return graph->rep[node] = find (graph->rep[node]);
636 return node;
639 /* Union the TO and FROM nodes to the TO nodes.
640 Note that at some point in the future, we may want to do
641 union-by-rank, in which case we are going to have to return the
642 node we unified to. */
644 static bool
645 unite (unsigned int to, unsigned int from)
647 gcc_checking_assert (to < graph->size && from < graph->size);
648 if (to != from && graph->rep[from] != to)
650 graph->rep[from] = to;
651 return true;
653 return false;
656 /* Create a new constraint consisting of LHS and RHS expressions. */
658 static constraint_t
659 new_constraint (const struct constraint_expr lhs,
660 const struct constraint_expr rhs)
662 constraint_t ret = constraint_pool.allocate ();
663 ret->lhs = lhs;
664 ret->rhs = rhs;
665 return ret;
668 /* Print out constraint C to FILE. */
670 static void
671 dump_constraint (FILE *file, constraint_t c)
673 if (c->lhs.type == ADDRESSOF)
674 fprintf (file, "&");
675 else if (c->lhs.type == DEREF)
676 fprintf (file, "*");
677 fprintf (file, "%s", get_varinfo (c->lhs.var)->name);
678 if (c->lhs.offset == UNKNOWN_OFFSET)
679 fprintf (file, " + UNKNOWN");
680 else if (c->lhs.offset != 0)
681 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
682 fprintf (file, " = ");
683 if (c->rhs.type == ADDRESSOF)
684 fprintf (file, "&");
685 else if (c->rhs.type == DEREF)
686 fprintf (file, "*");
687 fprintf (file, "%s", get_varinfo (c->rhs.var)->name);
688 if (c->rhs.offset == UNKNOWN_OFFSET)
689 fprintf (file, " + UNKNOWN");
690 else if (c->rhs.offset != 0)
691 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
695 void debug_constraint (constraint_t);
696 void debug_constraints (void);
697 void debug_constraint_graph (void);
698 void debug_solution_for_var (unsigned int);
699 void debug_sa_points_to_info (void);
700 void debug_varinfo (varinfo_t);
701 void debug_varmap (void);
703 /* Print out constraint C to stderr. */
705 DEBUG_FUNCTION void
706 debug_constraint (constraint_t c)
708 dump_constraint (stderr, c);
709 fprintf (stderr, "\n");
712 /* Print out all constraints to FILE */
714 static void
715 dump_constraints (FILE *file, int from)
717 int i;
718 constraint_t c;
719 for (i = from; constraints.iterate (i, &c); i++)
720 if (c)
722 dump_constraint (file, c);
723 fprintf (file, "\n");
727 /* Print out all constraints to stderr. */
729 DEBUG_FUNCTION void
730 debug_constraints (void)
732 dump_constraints (stderr, 0);
735 /* Print the constraint graph in dot format. */
737 static void
738 dump_constraint_graph (FILE *file)
740 unsigned int i;
742 /* Only print the graph if it has already been initialized: */
743 if (!graph)
744 return;
746 /* Prints the header of the dot file: */
747 fprintf (file, "strict digraph {\n");
748 fprintf (file, " node [\n shape = box\n ]\n");
749 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
750 fprintf (file, "\n // List of nodes and complex constraints in "
751 "the constraint graph:\n");
753 /* The next lines print the nodes in the graph together with the
754 complex constraints attached to them. */
755 for (i = 1; i < graph->size; i++)
757 if (i == FIRST_REF_NODE)
758 continue;
759 if (find (i) != i)
760 continue;
761 if (i < FIRST_REF_NODE)
762 fprintf (file, "\"%s\"", get_varinfo (i)->name);
763 else
764 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
765 if (graph->complex[i].exists ())
767 unsigned j;
768 constraint_t c;
769 fprintf (file, " [label=\"\\N\\n");
770 for (j = 0; graph->complex[i].iterate (j, &c); ++j)
772 dump_constraint (file, c);
773 fprintf (file, "\\l");
775 fprintf (file, "\"]");
777 fprintf (file, ";\n");
780 /* Go over the edges. */
781 fprintf (file, "\n // Edges in the constraint graph:\n");
782 for (i = 1; i < graph->size; i++)
784 unsigned j;
785 bitmap_iterator bi;
786 if (find (i) != i)
787 continue;
788 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
790 unsigned to = find (j);
791 if (i == to)
792 continue;
793 if (i < FIRST_REF_NODE)
794 fprintf (file, "\"%s\"", get_varinfo (i)->name);
795 else
796 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
797 fprintf (file, " -> ");
798 if (to < FIRST_REF_NODE)
799 fprintf (file, "\"%s\"", get_varinfo (to)->name);
800 else
801 fprintf (file, "\"*%s\"", get_varinfo (to - FIRST_REF_NODE)->name);
802 fprintf (file, ";\n");
806 /* Prints the tail of the dot file. */
807 fprintf (file, "}\n");
810 /* Print out the constraint graph to stderr. */
812 DEBUG_FUNCTION void
813 debug_constraint_graph (void)
815 dump_constraint_graph (stderr);
818 /* SOLVER FUNCTIONS
820 The solver is a simple worklist solver, that works on the following
821 algorithm:
823 sbitmap changed_nodes = all zeroes;
824 changed_count = 0;
825 For each node that is not already collapsed:
826 changed_count++;
827 set bit in changed nodes
829 while (changed_count > 0)
831 compute topological ordering for constraint graph
833 find and collapse cycles in the constraint graph (updating
834 changed if necessary)
836 for each node (n) in the graph in topological order:
837 changed_count--;
839 Process each complex constraint associated with the node,
840 updating changed if necessary.
842 For each outgoing edge from n, propagate the solution from n to
843 the destination of the edge, updating changed as necessary.
845 } */
847 /* Return true if two constraint expressions A and B are equal. */
849 static bool
850 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
852 return a.type == b.type && a.var == b.var && a.offset == b.offset;
855 /* Return true if constraint expression A is less than constraint expression
856 B. This is just arbitrary, but consistent, in order to give them an
857 ordering. */
859 static bool
860 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
862 if (a.type == b.type)
864 if (a.var == b.var)
865 return a.offset < b.offset;
866 else
867 return a.var < b.var;
869 else
870 return a.type < b.type;
873 /* Return true if constraint A is less than constraint B. This is just
874 arbitrary, but consistent, in order to give them an ordering. */
876 static bool
877 constraint_less (const constraint_t &a, const constraint_t &b)
879 if (constraint_expr_less (a->lhs, b->lhs))
880 return true;
881 else if (constraint_expr_less (b->lhs, a->lhs))
882 return false;
883 else
884 return constraint_expr_less (a->rhs, b->rhs);
887 /* Return true if two constraints A and B are equal. */
889 static bool
890 constraint_equal (struct constraint a, struct constraint b)
892 return constraint_expr_equal (a.lhs, b.lhs)
893 && constraint_expr_equal (a.rhs, b.rhs);
897 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
899 static constraint_t
900 constraint_vec_find (vec<constraint_t> vec,
901 struct constraint lookfor)
903 unsigned int place;
904 constraint_t found;
906 if (!vec.exists ())
907 return NULL;
909 place = vec.lower_bound (&lookfor, constraint_less);
910 if (place >= vec.length ())
911 return NULL;
912 found = vec[place];
913 if (!constraint_equal (*found, lookfor))
914 return NULL;
915 return found;
918 /* Union two constraint vectors, TO and FROM. Put the result in TO.
919 Returns true of TO set is changed. */
921 static bool
922 constraint_set_union (vec<constraint_t> *to,
923 vec<constraint_t> *from)
925 int i;
926 constraint_t c;
927 bool any_change = false;
929 FOR_EACH_VEC_ELT (*from, i, c)
931 if (constraint_vec_find (*to, *c) == NULL)
933 unsigned int place = to->lower_bound (c, constraint_less);
934 to->safe_insert (place, c);
935 any_change = true;
938 return any_change;
941 /* Expands the solution in SET to all sub-fields of variables included. */
943 static bitmap
944 solution_set_expand (bitmap set, bitmap *expanded)
946 bitmap_iterator bi;
947 unsigned j;
949 if (*expanded)
950 return *expanded;
952 *expanded = BITMAP_ALLOC (&iteration_obstack);
954 /* In a first pass expand to the head of the variables we need to
955 add all sub-fields off. This avoids quadratic behavior. */
956 EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi)
958 varinfo_t v = get_varinfo (j);
959 if (v->is_artificial_var
960 || v->is_full_var)
961 continue;
962 bitmap_set_bit (*expanded, v->head);
965 /* In the second pass now expand all head variables with subfields. */
966 EXECUTE_IF_SET_IN_BITMAP (*expanded, 0, j, bi)
968 varinfo_t v = get_varinfo (j);
969 if (v->head != j)
970 continue;
971 for (v = vi_next (v); v != NULL; v = vi_next (v))
972 bitmap_set_bit (*expanded, v->id);
975 /* And finally set the rest of the bits from SET. */
976 bitmap_ior_into (*expanded, set);
978 return *expanded;
981 /* Union solution sets TO and DELTA, and add INC to each member of DELTA in the
982 process. */
984 static bool
985 set_union_with_increment (bitmap to, bitmap delta, HOST_WIDE_INT inc,
986 bitmap *expanded_delta)
988 bool changed = false;
989 bitmap_iterator bi;
990 unsigned int i;
992 /* If the solution of DELTA contains anything it is good enough to transfer
993 this to TO. */
994 if (bitmap_bit_p (delta, anything_id))
995 return bitmap_set_bit (to, anything_id);
997 /* If the offset is unknown we have to expand the solution to
998 all subfields. */
999 if (inc == UNKNOWN_OFFSET)
1001 delta = solution_set_expand (delta, expanded_delta);
1002 changed |= bitmap_ior_into (to, delta);
1003 return changed;
1006 /* For non-zero offset union the offsetted solution into the destination. */
1007 EXECUTE_IF_SET_IN_BITMAP (delta, 0, i, bi)
1009 varinfo_t vi = get_varinfo (i);
1011 /* If this is a variable with just one field just set its bit
1012 in the result. */
1013 if (vi->is_artificial_var
1014 || vi->is_unknown_size_var
1015 || vi->is_full_var)
1016 changed |= bitmap_set_bit (to, i);
1017 else
1019 HOST_WIDE_INT fieldoffset = vi->offset + inc;
1020 unsigned HOST_WIDE_INT size = vi->size;
1022 /* If the offset makes the pointer point to before the
1023 variable use offset zero for the field lookup. */
1024 if (fieldoffset < 0)
1025 vi = get_varinfo (vi->head);
1026 else
1027 vi = first_or_preceding_vi_for_offset (vi, fieldoffset);
1031 changed |= bitmap_set_bit (to, vi->id);
1032 if (vi->is_full_var
1033 || vi->next == 0)
1034 break;
1036 /* We have to include all fields that overlap the current field
1037 shifted by inc. */
1038 vi = vi_next (vi);
1040 while (vi->offset < fieldoffset + size);
1044 return changed;
1047 /* Insert constraint C into the list of complex constraints for graph
1048 node VAR. */
1050 static void
1051 insert_into_complex (constraint_graph_t graph,
1052 unsigned int var, constraint_t c)
1054 vec<constraint_t> complex = graph->complex[var];
1055 unsigned int place = complex.lower_bound (c, constraint_less);
1057 /* Only insert constraints that do not already exist. */
1058 if (place >= complex.length ()
1059 || !constraint_equal (*c, *complex[place]))
1060 graph->complex[var].safe_insert (place, c);
1064 /* Condense two variable nodes into a single variable node, by moving
1065 all associated info from FROM to TO. Returns true if TO node's
1066 constraint set changes after the merge. */
1068 static bool
1069 merge_node_constraints (constraint_graph_t graph, unsigned int to,
1070 unsigned int from)
1072 unsigned int i;
1073 constraint_t c;
1074 bool any_change = false;
1076 gcc_checking_assert (find (from) == to);
1078 /* Move all complex constraints from src node into to node */
1079 FOR_EACH_VEC_ELT (graph->complex[from], i, c)
1081 /* In complex constraints for node FROM, we may have either
1082 a = *FROM, and *FROM = a, or an offseted constraint which are
1083 always added to the rhs node's constraints. */
1085 if (c->rhs.type == DEREF)
1086 c->rhs.var = to;
1087 else if (c->lhs.type == DEREF)
1088 c->lhs.var = to;
1089 else
1090 c->rhs.var = to;
1093 any_change = constraint_set_union (&graph->complex[to],
1094 &graph->complex[from]);
1095 graph->complex[from].release ();
1096 return any_change;
1100 /* Remove edges involving NODE from GRAPH. */
1102 static void
1103 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
1105 if (graph->succs[node])
1106 BITMAP_FREE (graph->succs[node]);
1109 /* Merge GRAPH nodes FROM and TO into node TO. */
1111 static void
1112 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
1113 unsigned int from)
1115 if (graph->indirect_cycles[from] != -1)
1117 /* If we have indirect cycles with the from node, and we have
1118 none on the to node, the to node has indirect cycles from the
1119 from node now that they are unified.
1120 If indirect cycles exist on both, unify the nodes that they
1121 are in a cycle with, since we know they are in a cycle with
1122 each other. */
1123 if (graph->indirect_cycles[to] == -1)
1124 graph->indirect_cycles[to] = graph->indirect_cycles[from];
1127 /* Merge all the successor edges. */
1128 if (graph->succs[from])
1130 if (!graph->succs[to])
1131 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
1132 bitmap_ior_into (graph->succs[to],
1133 graph->succs[from]);
1136 clear_edges_for_node (graph, from);
1140 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
1141 it doesn't exist in the graph already. */
1143 static void
1144 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
1145 unsigned int from)
1147 if (to == from)
1148 return;
1150 if (!graph->implicit_preds[to])
1151 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1153 if (bitmap_set_bit (graph->implicit_preds[to], from))
1154 stats.num_implicit_edges++;
1157 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
1158 it doesn't exist in the graph already.
1159 Return false if the edge already existed, true otherwise. */
1161 static void
1162 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
1163 unsigned int from)
1165 if (!graph->preds[to])
1166 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1167 bitmap_set_bit (graph->preds[to], from);
1170 /* Add a graph edge to GRAPH, going from FROM to TO if
1171 it doesn't exist in the graph already.
1172 Return false if the edge already existed, true otherwise. */
1174 static bool
1175 add_graph_edge (constraint_graph_t graph, unsigned int to,
1176 unsigned int from)
1178 if (to == from)
1180 return false;
1182 else
1184 bool r = false;
1186 if (!graph->succs[from])
1187 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
1188 if (bitmap_set_bit (graph->succs[from], to))
1190 r = true;
1191 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
1192 stats.num_edges++;
1194 return r;
1199 /* Initialize the constraint graph structure to contain SIZE nodes. */
1201 static void
1202 init_graph (unsigned int size)
1204 unsigned int j;
1206 graph = XCNEW (struct constraint_graph);
1207 graph->size = size;
1208 graph->succs = XCNEWVEC (bitmap, graph->size);
1209 graph->indirect_cycles = XNEWVEC (int, graph->size);
1210 graph->rep = XNEWVEC (unsigned int, graph->size);
1211 /* ??? Macros do not support template types with multiple arguments,
1212 so we use a typedef to work around it. */
1213 typedef vec<constraint_t> vec_constraint_t_heap;
1214 graph->complex = XCNEWVEC (vec_constraint_t_heap, size);
1215 graph->pe = XCNEWVEC (unsigned int, graph->size);
1216 graph->pe_rep = XNEWVEC (int, graph->size);
1218 for (j = 0; j < graph->size; j++)
1220 graph->rep[j] = j;
1221 graph->pe_rep[j] = -1;
1222 graph->indirect_cycles[j] = -1;
1226 /* Build the constraint graph, adding only predecessor edges right now. */
1228 static void
1229 build_pred_graph (void)
1231 int i;
1232 constraint_t c;
1233 unsigned int j;
1235 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
1236 graph->preds = XCNEWVEC (bitmap, graph->size);
1237 graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
1238 graph->loc_label = XCNEWVEC (unsigned int, graph->size);
1239 graph->pointed_by = XCNEWVEC (bitmap, graph->size);
1240 graph->points_to = XCNEWVEC (bitmap, graph->size);
1241 graph->eq_rep = XNEWVEC (int, graph->size);
1242 graph->direct_nodes = sbitmap_alloc (graph->size);
1243 graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1244 bitmap_clear (graph->direct_nodes);
1246 for (j = 1; j < FIRST_REF_NODE; j++)
1248 if (!get_varinfo (j)->is_special_var)
1249 bitmap_set_bit (graph->direct_nodes, j);
1252 for (j = 0; j < graph->size; j++)
1253 graph->eq_rep[j] = -1;
1255 for (j = 0; j < varmap.length (); j++)
1256 graph->indirect_cycles[j] = -1;
1258 FOR_EACH_VEC_ELT (constraints, i, c)
1260 struct constraint_expr lhs = c->lhs;
1261 struct constraint_expr rhs = c->rhs;
1262 unsigned int lhsvar = lhs.var;
1263 unsigned int rhsvar = rhs.var;
1265 if (lhs.type == DEREF)
1267 /* *x = y. */
1268 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1269 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1271 else if (rhs.type == DEREF)
1273 /* x = *y */
1274 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1275 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1276 else
1277 bitmap_clear_bit (graph->direct_nodes, lhsvar);
1279 else if (rhs.type == ADDRESSOF)
1281 varinfo_t v;
1283 /* x = &y */
1284 if (graph->points_to[lhsvar] == NULL)
1285 graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1286 bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1288 if (graph->pointed_by[rhsvar] == NULL)
1289 graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1290 bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1292 /* Implicitly, *x = y */
1293 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1295 /* All related variables are no longer direct nodes. */
1296 bitmap_clear_bit (graph->direct_nodes, rhsvar);
1297 v = get_varinfo (rhsvar);
1298 if (!v->is_full_var)
1300 v = get_varinfo (v->head);
1303 bitmap_clear_bit (graph->direct_nodes, v->id);
1304 v = vi_next (v);
1306 while (v != NULL);
1308 bitmap_set_bit (graph->address_taken, rhsvar);
1310 else if (lhsvar > anything_id
1311 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1313 /* x = y */
1314 add_pred_graph_edge (graph, lhsvar, rhsvar);
1315 /* Implicitly, *x = *y */
1316 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1317 FIRST_REF_NODE + rhsvar);
1319 else if (lhs.offset != 0 || rhs.offset != 0)
1321 if (rhs.offset != 0)
1322 bitmap_clear_bit (graph->direct_nodes, lhs.var);
1323 else if (lhs.offset != 0)
1324 bitmap_clear_bit (graph->direct_nodes, rhs.var);
1329 /* Build the constraint graph, adding successor edges. */
1331 static void
1332 build_succ_graph (void)
1334 unsigned i, t;
1335 constraint_t c;
1337 FOR_EACH_VEC_ELT (constraints, i, c)
1339 struct constraint_expr lhs;
1340 struct constraint_expr rhs;
1341 unsigned int lhsvar;
1342 unsigned int rhsvar;
1344 if (!c)
1345 continue;
1347 lhs = c->lhs;
1348 rhs = c->rhs;
1349 lhsvar = find (lhs.var);
1350 rhsvar = find (rhs.var);
1352 if (lhs.type == DEREF)
1354 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1355 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1357 else if (rhs.type == DEREF)
1359 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1360 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1362 else if (rhs.type == ADDRESSOF)
1364 /* x = &y */
1365 gcc_checking_assert (find (rhs.var) == rhs.var);
1366 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1368 else if (lhsvar > anything_id
1369 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1371 add_graph_edge (graph, lhsvar, rhsvar);
1375 /* Add edges from STOREDANYTHING to all non-direct nodes that can
1376 receive pointers. */
1377 t = find (storedanything_id);
1378 for (i = integer_id + 1; i < FIRST_REF_NODE; ++i)
1380 if (!bitmap_bit_p (graph->direct_nodes, i)
1381 && get_varinfo (i)->may_have_pointers)
1382 add_graph_edge (graph, find (i), t);
1385 /* Everything stored to ANYTHING also potentially escapes. */
1386 add_graph_edge (graph, find (escaped_id), t);
1390 /* Changed variables on the last iteration. */
1391 static bitmap changed;
1393 /* Strongly Connected Component visitation info. */
1395 struct scc_info
1397 scc_info (size_t size);
1398 ~scc_info ();
1400 auto_sbitmap visited;
1401 auto_sbitmap deleted;
1402 unsigned int *dfs;
1403 unsigned int *node_mapping;
1404 int current_index;
1405 auto_vec<unsigned> scc_stack;
1409 /* Recursive routine to find strongly connected components in GRAPH.
1410 SI is the SCC info to store the information in, and N is the id of current
1411 graph node we are processing.
1413 This is Tarjan's strongly connected component finding algorithm, as
1414 modified by Nuutila to keep only non-root nodes on the stack.
1415 The algorithm can be found in "On finding the strongly connected
1416 connected components in a directed graph" by Esko Nuutila and Eljas
1417 Soisalon-Soininen, in Information Processing Letters volume 49,
1418 number 1, pages 9-14. */
1420 static void
1421 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1423 unsigned int i;
1424 bitmap_iterator bi;
1425 unsigned int my_dfs;
1427 bitmap_set_bit (si->visited, n);
1428 si->dfs[n] = si->current_index ++;
1429 my_dfs = si->dfs[n];
1431 /* Visit all the successors. */
1432 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1434 unsigned int w;
1436 if (i > LAST_REF_NODE)
1437 break;
1439 w = find (i);
1440 if (bitmap_bit_p (si->deleted, w))
1441 continue;
1443 if (!bitmap_bit_p (si->visited, w))
1444 scc_visit (graph, si, w);
1446 unsigned int t = find (w);
1447 gcc_checking_assert (find (n) == n);
1448 if (si->dfs[t] < si->dfs[n])
1449 si->dfs[n] = si->dfs[t];
1452 /* See if any components have been identified. */
1453 if (si->dfs[n] == my_dfs)
1455 if (si->scc_stack.length () > 0
1456 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1458 bitmap scc = BITMAP_ALLOC (NULL);
1459 unsigned int lowest_node;
1460 bitmap_iterator bi;
1462 bitmap_set_bit (scc, n);
1464 while (si->scc_stack.length () != 0
1465 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1467 unsigned int w = si->scc_stack.pop ();
1469 bitmap_set_bit (scc, w);
1472 lowest_node = bitmap_first_set_bit (scc);
1473 gcc_assert (lowest_node < FIRST_REF_NODE);
1475 /* Collapse the SCC nodes into a single node, and mark the
1476 indirect cycles. */
1477 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1479 if (i < FIRST_REF_NODE)
1481 if (unite (lowest_node, i))
1482 unify_nodes (graph, lowest_node, i, false);
1484 else
1486 unite (lowest_node, i);
1487 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1491 bitmap_set_bit (si->deleted, n);
1493 else
1494 si->scc_stack.safe_push (n);
1497 /* Unify node FROM into node TO, updating the changed count if
1498 necessary when UPDATE_CHANGED is true. */
1500 static void
1501 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1502 bool update_changed)
1504 gcc_checking_assert (to != from && find (to) == to);
1506 if (dump_file && (dump_flags & TDF_DETAILS))
1507 fprintf (dump_file, "Unifying %s to %s\n",
1508 get_varinfo (from)->name,
1509 get_varinfo (to)->name);
1511 if (update_changed)
1512 stats.unified_vars_dynamic++;
1513 else
1514 stats.unified_vars_static++;
1516 merge_graph_nodes (graph, to, from);
1517 if (merge_node_constraints (graph, to, from))
1519 if (update_changed)
1520 bitmap_set_bit (changed, to);
1523 /* Mark TO as changed if FROM was changed. If TO was already marked
1524 as changed, decrease the changed count. */
1526 if (update_changed
1527 && bitmap_clear_bit (changed, from))
1528 bitmap_set_bit (changed, to);
1529 varinfo_t fromvi = get_varinfo (from);
1530 if (fromvi->solution)
1532 /* If the solution changes because of the merging, we need to mark
1533 the variable as changed. */
1534 varinfo_t tovi = get_varinfo (to);
1535 if (bitmap_ior_into (tovi->solution, fromvi->solution))
1537 if (update_changed)
1538 bitmap_set_bit (changed, to);
1541 BITMAP_FREE (fromvi->solution);
1542 if (fromvi->oldsolution)
1543 BITMAP_FREE (fromvi->oldsolution);
1545 if (stats.iterations > 0
1546 && tovi->oldsolution)
1547 BITMAP_FREE (tovi->oldsolution);
1549 if (graph->succs[to])
1550 bitmap_clear_bit (graph->succs[to], to);
1553 /* Information needed to compute the topological ordering of a graph. */
1555 struct topo_info
1557 /* sbitmap of visited nodes. */
1558 sbitmap visited;
1559 /* Array that stores the topological order of the graph, *in
1560 reverse*. */
1561 vec<unsigned> topo_order;
1565 /* Initialize and return a topological info structure. */
1567 static struct topo_info *
1568 init_topo_info (void)
1570 size_t size = graph->size;
1571 struct topo_info *ti = XNEW (struct topo_info);
1572 ti->visited = sbitmap_alloc (size);
1573 bitmap_clear (ti->visited);
1574 ti->topo_order.create (1);
1575 return ti;
1579 /* Free the topological sort info pointed to by TI. */
1581 static void
1582 free_topo_info (struct topo_info *ti)
1584 sbitmap_free (ti->visited);
1585 ti->topo_order.release ();
1586 free (ti);
1589 /* Visit the graph in topological order, and store the order in the
1590 topo_info structure. */
1592 static void
1593 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1594 unsigned int n)
1596 bitmap_iterator bi;
1597 unsigned int j;
1599 bitmap_set_bit (ti->visited, n);
1601 if (graph->succs[n])
1602 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1604 if (!bitmap_bit_p (ti->visited, j))
1605 topo_visit (graph, ti, j);
1608 ti->topo_order.safe_push (n);
1611 /* Process a constraint C that represents x = *(y + off), using DELTA as the
1612 starting solution for y. */
1614 static void
1615 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1616 bitmap delta, bitmap *expanded_delta)
1618 unsigned int lhs = c->lhs.var;
1619 bool flag = false;
1620 bitmap sol = get_varinfo (lhs)->solution;
1621 unsigned int j;
1622 bitmap_iterator bi;
1623 HOST_WIDE_INT roffset = c->rhs.offset;
1625 /* Our IL does not allow this. */
1626 gcc_checking_assert (c->lhs.offset == 0);
1628 /* If the solution of Y contains anything it is good enough to transfer
1629 this to the LHS. */
1630 if (bitmap_bit_p (delta, anything_id))
1632 flag |= bitmap_set_bit (sol, anything_id);
1633 goto done;
1636 /* If we do not know at with offset the rhs is dereferenced compute
1637 the reachability set of DELTA, conservatively assuming it is
1638 dereferenced at all valid offsets. */
1639 if (roffset == UNKNOWN_OFFSET)
1641 delta = solution_set_expand (delta, expanded_delta);
1642 /* No further offset processing is necessary. */
1643 roffset = 0;
1646 /* For each variable j in delta (Sol(y)), add
1647 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1648 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1650 varinfo_t v = get_varinfo (j);
1651 HOST_WIDE_INT fieldoffset = v->offset + roffset;
1652 unsigned HOST_WIDE_INT size = v->size;
1653 unsigned int t;
1655 if (v->is_full_var)
1657 else if (roffset != 0)
1659 if (fieldoffset < 0)
1660 v = get_varinfo (v->head);
1661 else
1662 v = first_or_preceding_vi_for_offset (v, fieldoffset);
1665 /* We have to include all fields that overlap the current field
1666 shifted by roffset. */
1669 t = find (v->id);
1671 /* Adding edges from the special vars is pointless.
1672 They don't have sets that can change. */
1673 if (get_varinfo (t)->is_special_var)
1674 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1675 /* Merging the solution from ESCAPED needlessly increases
1676 the set. Use ESCAPED as representative instead. */
1677 else if (v->id == escaped_id)
1678 flag |= bitmap_set_bit (sol, escaped_id);
1679 else if (v->may_have_pointers
1680 && add_graph_edge (graph, lhs, t))
1681 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1683 if (v->is_full_var
1684 || v->next == 0)
1685 break;
1687 v = vi_next (v);
1689 while (v->offset < fieldoffset + size);
1692 done:
1693 /* If the LHS solution changed, mark the var as changed. */
1694 if (flag)
1696 get_varinfo (lhs)->solution = sol;
1697 bitmap_set_bit (changed, lhs);
1701 /* Process a constraint C that represents *(x + off) = y using DELTA
1702 as the starting solution for x. */
1704 static void
1705 do_ds_constraint (constraint_t c, bitmap delta, bitmap *expanded_delta)
1707 unsigned int rhs = c->rhs.var;
1708 bitmap sol = get_varinfo (rhs)->solution;
1709 unsigned int j;
1710 bitmap_iterator bi;
1711 HOST_WIDE_INT loff = c->lhs.offset;
1712 bool escaped_p = false;
1714 /* Our IL does not allow this. */
1715 gcc_checking_assert (c->rhs.offset == 0);
1717 /* If the solution of y contains ANYTHING simply use the ANYTHING
1718 solution. This avoids needlessly increasing the points-to sets. */
1719 if (bitmap_bit_p (sol, anything_id))
1720 sol = get_varinfo (find (anything_id))->solution;
1722 /* If the solution for x contains ANYTHING we have to merge the
1723 solution of y into all pointer variables which we do via
1724 STOREDANYTHING. */
1725 if (bitmap_bit_p (delta, anything_id))
1727 unsigned t = find (storedanything_id);
1728 if (add_graph_edge (graph, t, rhs))
1730 if (bitmap_ior_into (get_varinfo (t)->solution, sol))
1731 bitmap_set_bit (changed, t);
1733 return;
1736 /* If we do not know at with offset the rhs is dereferenced compute
1737 the reachability set of DELTA, conservatively assuming it is
1738 dereferenced at all valid offsets. */
1739 if (loff == UNKNOWN_OFFSET)
1741 delta = solution_set_expand (delta, expanded_delta);
1742 loff = 0;
1745 /* For each member j of delta (Sol(x)), add an edge from y to j and
1746 union Sol(y) into Sol(j) */
1747 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1749 varinfo_t v = get_varinfo (j);
1750 unsigned int t;
1751 HOST_WIDE_INT fieldoffset = v->offset + loff;
1752 unsigned HOST_WIDE_INT size = v->size;
1754 if (v->is_full_var)
1756 else if (loff != 0)
1758 if (fieldoffset < 0)
1759 v = get_varinfo (v->head);
1760 else
1761 v = first_or_preceding_vi_for_offset (v, fieldoffset);
1764 /* We have to include all fields that overlap the current field
1765 shifted by loff. */
1768 if (v->may_have_pointers)
1770 /* If v is a global variable then this is an escape point. */
1771 if (v->is_global_var
1772 && !escaped_p)
1774 t = find (escaped_id);
1775 if (add_graph_edge (graph, t, rhs)
1776 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1777 bitmap_set_bit (changed, t);
1778 /* Enough to let rhs escape once. */
1779 escaped_p = true;
1782 if (v->is_special_var)
1783 break;
1785 t = find (v->id);
1786 if (add_graph_edge (graph, t, rhs)
1787 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1788 bitmap_set_bit (changed, t);
1791 if (v->is_full_var
1792 || v->next == 0)
1793 break;
1795 v = vi_next (v);
1797 while (v->offset < fieldoffset + size);
1801 /* Handle a non-simple (simple meaning requires no iteration),
1802 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1804 static void
1805 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta,
1806 bitmap *expanded_delta)
1808 if (c->lhs.type == DEREF)
1810 if (c->rhs.type == ADDRESSOF)
1812 gcc_unreachable ();
1814 else
1816 /* *x = y */
1817 do_ds_constraint (c, delta, expanded_delta);
1820 else if (c->rhs.type == DEREF)
1822 /* x = *y */
1823 if (!(get_varinfo (c->lhs.var)->is_special_var))
1824 do_sd_constraint (graph, c, delta, expanded_delta);
1826 else
1828 bitmap tmp;
1829 bool flag = false;
1831 gcc_checking_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR
1832 && c->rhs.offset != 0 && c->lhs.offset == 0);
1833 tmp = get_varinfo (c->lhs.var)->solution;
1835 flag = set_union_with_increment (tmp, delta, c->rhs.offset,
1836 expanded_delta);
1838 if (flag)
1839 bitmap_set_bit (changed, c->lhs.var);
1843 /* Initialize and return a new SCC info structure. */
1845 scc_info::scc_info (size_t size) :
1846 visited (size), deleted (size), current_index (0), scc_stack (1)
1848 bitmap_clear (visited);
1849 bitmap_clear (deleted);
1850 node_mapping = XNEWVEC (unsigned int, size);
1851 dfs = XCNEWVEC (unsigned int, size);
1853 for (size_t i = 0; i < size; i++)
1854 node_mapping[i] = i;
1857 /* Free an SCC info structure pointed to by SI */
1859 scc_info::~scc_info ()
1861 free (node_mapping);
1862 free (dfs);
1866 /* Find indirect cycles in GRAPH that occur, using strongly connected
1867 components, and note them in the indirect cycles map.
1869 This technique comes from Ben Hardekopf and Calvin Lin,
1870 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1871 Lines of Code", submitted to PLDI 2007. */
1873 static void
1874 find_indirect_cycles (constraint_graph_t graph)
1876 unsigned int i;
1877 unsigned int size = graph->size;
1878 scc_info si (size);
1880 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1881 if (!bitmap_bit_p (si.visited, i) && find (i) == i)
1882 scc_visit (graph, &si, i);
1885 /* Compute a topological ordering for GRAPH, and store the result in the
1886 topo_info structure TI. */
1888 static void
1889 compute_topo_order (constraint_graph_t graph,
1890 struct topo_info *ti)
1892 unsigned int i;
1893 unsigned int size = graph->size;
1895 for (i = 0; i != size; ++i)
1896 if (!bitmap_bit_p (ti->visited, i) && find (i) == i)
1897 topo_visit (graph, ti, i);
1900 /* Structure used to for hash value numbering of pointer equivalence
1901 classes. */
1903 typedef struct equiv_class_label
1905 hashval_t hashcode;
1906 unsigned int equivalence_class;
1907 bitmap labels;
1908 } *equiv_class_label_t;
1909 typedef const struct equiv_class_label *const_equiv_class_label_t;
1911 /* Equiv_class_label hashtable helpers. */
1913 struct equiv_class_hasher : free_ptr_hash <equiv_class_label>
1915 static inline hashval_t hash (const equiv_class_label *);
1916 static inline bool equal (const equiv_class_label *,
1917 const equiv_class_label *);
1920 /* Hash function for a equiv_class_label_t */
1922 inline hashval_t
1923 equiv_class_hasher::hash (const equiv_class_label *ecl)
1925 return ecl->hashcode;
1928 /* Equality function for two equiv_class_label_t's. */
1930 inline bool
1931 equiv_class_hasher::equal (const equiv_class_label *eql1,
1932 const equiv_class_label *eql2)
1934 return (eql1->hashcode == eql2->hashcode
1935 && bitmap_equal_p (eql1->labels, eql2->labels));
1938 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1939 classes. */
1940 static hash_table<equiv_class_hasher> *pointer_equiv_class_table;
1942 /* A hashtable for mapping a bitmap of labels->location equivalence
1943 classes. */
1944 static hash_table<equiv_class_hasher> *location_equiv_class_table;
1946 /* Lookup a equivalence class in TABLE by the bitmap of LABELS with
1947 hash HAS it contains. Sets *REF_LABELS to the bitmap LABELS
1948 is equivalent to. */
1950 static equiv_class_label *
1951 equiv_class_lookup_or_add (hash_table<equiv_class_hasher> *table,
1952 bitmap labels)
1954 equiv_class_label **slot;
1955 equiv_class_label ecl;
1957 ecl.labels = labels;
1958 ecl.hashcode = bitmap_hash (labels);
1959 slot = table->find_slot (&ecl, INSERT);
1960 if (!*slot)
1962 *slot = XNEW (struct equiv_class_label);
1963 (*slot)->labels = labels;
1964 (*slot)->hashcode = ecl.hashcode;
1965 (*slot)->equivalence_class = 0;
1968 return *slot;
1971 /* Perform offline variable substitution.
1973 This is a worst case quadratic time way of identifying variables
1974 that must have equivalent points-to sets, including those caused by
1975 static cycles, and single entry subgraphs, in the constraint graph.
1977 The technique is described in "Exploiting Pointer and Location
1978 Equivalence to Optimize Pointer Analysis. In the 14th International
1979 Static Analysis Symposium (SAS), August 2007." It is known as the
1980 "HU" algorithm, and is equivalent to value numbering the collapsed
1981 constraint graph including evaluating unions.
1983 The general method of finding equivalence classes is as follows:
1984 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1985 Initialize all non-REF nodes to be direct nodes.
1986 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1987 variable}
1988 For each constraint containing the dereference, we also do the same
1989 thing.
1991 We then compute SCC's in the graph and unify nodes in the same SCC,
1992 including pts sets.
1994 For each non-collapsed node x:
1995 Visit all unvisited explicit incoming edges.
1996 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
1997 where y->x.
1998 Lookup the equivalence class for pts(x).
1999 If we found one, equivalence_class(x) = found class.
2000 Otherwise, equivalence_class(x) = new class, and new_class is
2001 added to the lookup table.
2003 All direct nodes with the same equivalence class can be replaced
2004 with a single representative node.
2005 All unlabeled nodes (label == 0) are not pointers and all edges
2006 involving them can be eliminated.
2007 We perform these optimizations during rewrite_constraints
2009 In addition to pointer equivalence class finding, we also perform
2010 location equivalence class finding. This is the set of variables
2011 that always appear together in points-to sets. We use this to
2012 compress the size of the points-to sets. */
2014 /* Current maximum pointer equivalence class id. */
2015 static int pointer_equiv_class;
2017 /* Current maximum location equivalence class id. */
2018 static int location_equiv_class;
2020 /* Recursive routine to find strongly connected components in GRAPH,
2021 and label it's nodes with DFS numbers. */
2023 static void
2024 condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2026 unsigned int i;
2027 bitmap_iterator bi;
2028 unsigned int my_dfs;
2030 gcc_checking_assert (si->node_mapping[n] == n);
2031 bitmap_set_bit (si->visited, n);
2032 si->dfs[n] = si->current_index ++;
2033 my_dfs = si->dfs[n];
2035 /* Visit all the successors. */
2036 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2038 unsigned int w = si->node_mapping[i];
2040 if (bitmap_bit_p (si->deleted, w))
2041 continue;
2043 if (!bitmap_bit_p (si->visited, w))
2044 condense_visit (graph, si, w);
2046 unsigned int t = si->node_mapping[w];
2047 gcc_checking_assert (si->node_mapping[n] == n);
2048 if (si->dfs[t] < si->dfs[n])
2049 si->dfs[n] = si->dfs[t];
2052 /* Visit all the implicit predecessors. */
2053 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
2055 unsigned int w = si->node_mapping[i];
2057 if (bitmap_bit_p (si->deleted, w))
2058 continue;
2060 if (!bitmap_bit_p (si->visited, w))
2061 condense_visit (graph, si, w);
2063 unsigned int t = si->node_mapping[w];
2064 gcc_assert (si->node_mapping[n] == n);
2065 if (si->dfs[t] < si->dfs[n])
2066 si->dfs[n] = si->dfs[t];
2069 /* See if any components have been identified. */
2070 if (si->dfs[n] == my_dfs)
2072 while (si->scc_stack.length () != 0
2073 && si->dfs[si->scc_stack.last ()] >= my_dfs)
2075 unsigned int w = si->scc_stack.pop ();
2076 si->node_mapping[w] = n;
2078 if (!bitmap_bit_p (graph->direct_nodes, w))
2079 bitmap_clear_bit (graph->direct_nodes, n);
2081 /* Unify our nodes. */
2082 if (graph->preds[w])
2084 if (!graph->preds[n])
2085 graph->preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2086 bitmap_ior_into (graph->preds[n], graph->preds[w]);
2088 if (graph->implicit_preds[w])
2090 if (!graph->implicit_preds[n])
2091 graph->implicit_preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
2092 bitmap_ior_into (graph->implicit_preds[n],
2093 graph->implicit_preds[w]);
2095 if (graph->points_to[w])
2097 if (!graph->points_to[n])
2098 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2099 bitmap_ior_into (graph->points_to[n],
2100 graph->points_to[w]);
2103 bitmap_set_bit (si->deleted, n);
2105 else
2106 si->scc_stack.safe_push (n);
2109 /* Label pointer equivalences.
2111 This performs a value numbering of the constraint graph to
2112 discover which variables will always have the same points-to sets
2113 under the current set of constraints.
2115 The way it value numbers is to store the set of points-to bits
2116 generated by the constraints and graph edges. This is just used as a
2117 hash and equality comparison. The *actual set of points-to bits* is
2118 completely irrelevant, in that we don't care about being able to
2119 extract them later.
2121 The equality values (currently bitmaps) just have to satisfy a few
2122 constraints, the main ones being:
2123 1. The combining operation must be order independent.
2124 2. The end result of a given set of operations must be unique iff the
2125 combination of input values is unique
2126 3. Hashable. */
2128 static void
2129 label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2131 unsigned int i, first_pred;
2132 bitmap_iterator bi;
2134 bitmap_set_bit (si->visited, n);
2136 /* Label and union our incoming edges's points to sets. */
2137 first_pred = -1U;
2138 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2140 unsigned int w = si->node_mapping[i];
2141 if (!bitmap_bit_p (si->visited, w))
2142 label_visit (graph, si, w);
2144 /* Skip unused edges */
2145 if (w == n || graph->pointer_label[w] == 0)
2146 continue;
2148 if (graph->points_to[w])
2150 if (!graph->points_to[n])
2152 if (first_pred == -1U)
2153 first_pred = w;
2154 else
2156 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2157 bitmap_ior (graph->points_to[n],
2158 graph->points_to[first_pred],
2159 graph->points_to[w]);
2162 else
2163 bitmap_ior_into (graph->points_to[n], graph->points_to[w]);
2167 /* Indirect nodes get fresh variables and a new pointer equiv class. */
2168 if (!bitmap_bit_p (graph->direct_nodes, n))
2170 if (!graph->points_to[n])
2172 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2173 if (first_pred != -1U)
2174 bitmap_copy (graph->points_to[n], graph->points_to[first_pred]);
2176 bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
2177 graph->pointer_label[n] = pointer_equiv_class++;
2178 equiv_class_label_t ecl;
2179 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2180 graph->points_to[n]);
2181 ecl->equivalence_class = graph->pointer_label[n];
2182 return;
2185 /* If there was only a single non-empty predecessor the pointer equiv
2186 class is the same. */
2187 if (!graph->points_to[n])
2189 if (first_pred != -1U)
2191 graph->pointer_label[n] = graph->pointer_label[first_pred];
2192 graph->points_to[n] = graph->points_to[first_pred];
2194 return;
2197 if (!bitmap_empty_p (graph->points_to[n]))
2199 equiv_class_label_t ecl;
2200 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2201 graph->points_to[n]);
2202 if (ecl->equivalence_class == 0)
2203 ecl->equivalence_class = pointer_equiv_class++;
2204 else
2206 BITMAP_FREE (graph->points_to[n]);
2207 graph->points_to[n] = ecl->labels;
2209 graph->pointer_label[n] = ecl->equivalence_class;
2213 /* Print the pred graph in dot format. */
2215 static void
2216 dump_pred_graph (struct scc_info *si, FILE *file)
2218 unsigned int i;
2220 /* Only print the graph if it has already been initialized: */
2221 if (!graph)
2222 return;
2224 /* Prints the header of the dot file: */
2225 fprintf (file, "strict digraph {\n");
2226 fprintf (file, " node [\n shape = box\n ]\n");
2227 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
2228 fprintf (file, "\n // List of nodes and complex constraints in "
2229 "the constraint graph:\n");
2231 /* The next lines print the nodes in the graph together with the
2232 complex constraints attached to them. */
2233 for (i = 1; i < graph->size; i++)
2235 if (i == FIRST_REF_NODE)
2236 continue;
2237 if (si->node_mapping[i] != i)
2238 continue;
2239 if (i < FIRST_REF_NODE)
2240 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2241 else
2242 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2243 if (graph->points_to[i]
2244 && !bitmap_empty_p (graph->points_to[i]))
2246 if (i < FIRST_REF_NODE)
2247 fprintf (file, "[label=\"%s = {", get_varinfo (i)->name);
2248 else
2249 fprintf (file, "[label=\"*%s = {",
2250 get_varinfo (i - FIRST_REF_NODE)->name);
2251 unsigned j;
2252 bitmap_iterator bi;
2253 EXECUTE_IF_SET_IN_BITMAP (graph->points_to[i], 0, j, bi)
2254 fprintf (file, " %d", j);
2255 fprintf (file, " }\"]");
2257 fprintf (file, ";\n");
2260 /* Go over the edges. */
2261 fprintf (file, "\n // Edges in the constraint graph:\n");
2262 for (i = 1; i < graph->size; i++)
2264 unsigned j;
2265 bitmap_iterator bi;
2266 if (si->node_mapping[i] != i)
2267 continue;
2268 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[i], 0, j, bi)
2270 unsigned from = si->node_mapping[j];
2271 if (from < FIRST_REF_NODE)
2272 fprintf (file, "\"%s\"", get_varinfo (from)->name);
2273 else
2274 fprintf (file, "\"*%s\"", get_varinfo (from - FIRST_REF_NODE)->name);
2275 fprintf (file, " -> ");
2276 if (i < FIRST_REF_NODE)
2277 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2278 else
2279 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2280 fprintf (file, ";\n");
2284 /* Prints the tail of the dot file. */
2285 fprintf (file, "}\n");
2288 /* Perform offline variable substitution, discovering equivalence
2289 classes, and eliminating non-pointer variables. */
2291 static struct scc_info *
2292 perform_var_substitution (constraint_graph_t graph)
2294 unsigned int i;
2295 unsigned int size = graph->size;
2296 scc_info *si = new scc_info (size);
2298 bitmap_obstack_initialize (&iteration_obstack);
2299 pointer_equiv_class_table = new hash_table<equiv_class_hasher> (511);
2300 location_equiv_class_table
2301 = new hash_table<equiv_class_hasher> (511);
2302 pointer_equiv_class = 1;
2303 location_equiv_class = 1;
2305 /* Condense the nodes, which means to find SCC's, count incoming
2306 predecessors, and unite nodes in SCC's. */
2307 for (i = 1; i < FIRST_REF_NODE; i++)
2308 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2309 condense_visit (graph, si, si->node_mapping[i]);
2311 if (dump_file && (dump_flags & TDF_GRAPH))
2313 fprintf (dump_file, "\n\n// The constraint graph before var-substitution "
2314 "in dot format:\n");
2315 dump_pred_graph (si, dump_file);
2316 fprintf (dump_file, "\n\n");
2319 bitmap_clear (si->visited);
2320 /* Actually the label the nodes for pointer equivalences */
2321 for (i = 1; i < FIRST_REF_NODE; i++)
2322 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2323 label_visit (graph, si, si->node_mapping[i]);
2325 /* Calculate location equivalence labels. */
2326 for (i = 1; i < FIRST_REF_NODE; i++)
2328 bitmap pointed_by;
2329 bitmap_iterator bi;
2330 unsigned int j;
2332 if (!graph->pointed_by[i])
2333 continue;
2334 pointed_by = BITMAP_ALLOC (&iteration_obstack);
2336 /* Translate the pointed-by mapping for pointer equivalence
2337 labels. */
2338 EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
2340 bitmap_set_bit (pointed_by,
2341 graph->pointer_label[si->node_mapping[j]]);
2343 /* The original pointed_by is now dead. */
2344 BITMAP_FREE (graph->pointed_by[i]);
2346 /* Look up the location equivalence label if one exists, or make
2347 one otherwise. */
2348 equiv_class_label_t ecl;
2349 ecl = equiv_class_lookup_or_add (location_equiv_class_table, pointed_by);
2350 if (ecl->equivalence_class == 0)
2351 ecl->equivalence_class = location_equiv_class++;
2352 else
2354 if (dump_file && (dump_flags & TDF_DETAILS))
2355 fprintf (dump_file, "Found location equivalence for node %s\n",
2356 get_varinfo (i)->name);
2357 BITMAP_FREE (pointed_by);
2359 graph->loc_label[i] = ecl->equivalence_class;
2363 if (dump_file && (dump_flags & TDF_DETAILS))
2364 for (i = 1; i < FIRST_REF_NODE; i++)
2366 unsigned j = si->node_mapping[i];
2367 if (j != i)
2369 fprintf (dump_file, "%s node id %d ",
2370 bitmap_bit_p (graph->direct_nodes, i)
2371 ? "Direct" : "Indirect", i);
2372 if (i < FIRST_REF_NODE)
2373 fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2374 else
2375 fprintf (dump_file, "\"*%s\"",
2376 get_varinfo (i - FIRST_REF_NODE)->name);
2377 fprintf (dump_file, " mapped to SCC leader node id %d ", j);
2378 if (j < FIRST_REF_NODE)
2379 fprintf (dump_file, "\"%s\"\n", get_varinfo (j)->name);
2380 else
2381 fprintf (dump_file, "\"*%s\"\n",
2382 get_varinfo (j - FIRST_REF_NODE)->name);
2384 else
2386 fprintf (dump_file,
2387 "Equivalence classes for %s node id %d ",
2388 bitmap_bit_p (graph->direct_nodes, i)
2389 ? "direct" : "indirect", i);
2390 if (i < FIRST_REF_NODE)
2391 fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2392 else
2393 fprintf (dump_file, "\"*%s\"",
2394 get_varinfo (i - FIRST_REF_NODE)->name);
2395 fprintf (dump_file,
2396 ": pointer %d, location %d\n",
2397 graph->pointer_label[i], graph->loc_label[i]);
2401 /* Quickly eliminate our non-pointer variables. */
2403 for (i = 1; i < FIRST_REF_NODE; i++)
2405 unsigned int node = si->node_mapping[i];
2407 if (graph->pointer_label[node] == 0)
2409 if (dump_file && (dump_flags & TDF_DETAILS))
2410 fprintf (dump_file,
2411 "%s is a non-pointer variable, eliminating edges.\n",
2412 get_varinfo (node)->name);
2413 stats.nonpointer_vars++;
2414 clear_edges_for_node (graph, node);
2418 return si;
2421 /* Free information that was only necessary for variable
2422 substitution. */
2424 static void
2425 free_var_substitution_info (struct scc_info *si)
2427 delete si;
2428 free (graph->pointer_label);
2429 free (graph->loc_label);
2430 free (graph->pointed_by);
2431 free (graph->points_to);
2432 free (graph->eq_rep);
2433 sbitmap_free (graph->direct_nodes);
2434 delete pointer_equiv_class_table;
2435 pointer_equiv_class_table = NULL;
2436 delete location_equiv_class_table;
2437 location_equiv_class_table = NULL;
2438 bitmap_obstack_release (&iteration_obstack);
2441 /* Return an existing node that is equivalent to NODE, which has
2442 equivalence class LABEL, if one exists. Return NODE otherwise. */
2444 static unsigned int
2445 find_equivalent_node (constraint_graph_t graph,
2446 unsigned int node, unsigned int label)
2448 /* If the address version of this variable is unused, we can
2449 substitute it for anything else with the same label.
2450 Otherwise, we know the pointers are equivalent, but not the
2451 locations, and we can unite them later. */
2453 if (!bitmap_bit_p (graph->address_taken, node))
2455 gcc_checking_assert (label < graph->size);
2457 if (graph->eq_rep[label] != -1)
2459 /* Unify the two variables since we know they are equivalent. */
2460 if (unite (graph->eq_rep[label], node))
2461 unify_nodes (graph, graph->eq_rep[label], node, false);
2462 return graph->eq_rep[label];
2464 else
2466 graph->eq_rep[label] = node;
2467 graph->pe_rep[label] = node;
2470 else
2472 gcc_checking_assert (label < graph->size);
2473 graph->pe[node] = label;
2474 if (graph->pe_rep[label] == -1)
2475 graph->pe_rep[label] = node;
2478 return node;
2481 /* Unite pointer equivalent but not location equivalent nodes in
2482 GRAPH. This may only be performed once variable substitution is
2483 finished. */
2485 static void
2486 unite_pointer_equivalences (constraint_graph_t graph)
2488 unsigned int i;
2490 /* Go through the pointer equivalences and unite them to their
2491 representative, if they aren't already. */
2492 for (i = 1; i < FIRST_REF_NODE; i++)
2494 unsigned int label = graph->pe[i];
2495 if (label)
2497 int label_rep = graph->pe_rep[label];
2499 if (label_rep == -1)
2500 continue;
2502 label_rep = find (label_rep);
2503 if (label_rep >= 0 && unite (label_rep, find (i)))
2504 unify_nodes (graph, label_rep, i, false);
2509 /* Move complex constraints to the GRAPH nodes they belong to. */
2511 static void
2512 move_complex_constraints (constraint_graph_t graph)
2514 int i;
2515 constraint_t c;
2517 FOR_EACH_VEC_ELT (constraints, i, c)
2519 if (c)
2521 struct constraint_expr lhs = c->lhs;
2522 struct constraint_expr rhs = c->rhs;
2524 if (lhs.type == DEREF)
2526 insert_into_complex (graph, lhs.var, c);
2528 else if (rhs.type == DEREF)
2530 if (!(get_varinfo (lhs.var)->is_special_var))
2531 insert_into_complex (graph, rhs.var, c);
2533 else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2534 && (lhs.offset != 0 || rhs.offset != 0))
2536 insert_into_complex (graph, rhs.var, c);
2543 /* Optimize and rewrite complex constraints while performing
2544 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2545 result of perform_variable_substitution. */
2547 static void
2548 rewrite_constraints (constraint_graph_t graph,
2549 struct scc_info *si)
2551 int i;
2552 constraint_t c;
2554 if (flag_checking)
2556 for (unsigned int j = 0; j < graph->size; j++)
2557 gcc_assert (find (j) == j);
2560 FOR_EACH_VEC_ELT (constraints, i, c)
2562 struct constraint_expr lhs = c->lhs;
2563 struct constraint_expr rhs = c->rhs;
2564 unsigned int lhsvar = find (lhs.var);
2565 unsigned int rhsvar = find (rhs.var);
2566 unsigned int lhsnode, rhsnode;
2567 unsigned int lhslabel, rhslabel;
2569 lhsnode = si->node_mapping[lhsvar];
2570 rhsnode = si->node_mapping[rhsvar];
2571 lhslabel = graph->pointer_label[lhsnode];
2572 rhslabel = graph->pointer_label[rhsnode];
2574 /* See if it is really a non-pointer variable, and if so, ignore
2575 the constraint. */
2576 if (lhslabel == 0)
2578 if (dump_file && (dump_flags & TDF_DETAILS))
2581 fprintf (dump_file, "%s is a non-pointer variable, "
2582 "ignoring constraint:",
2583 get_varinfo (lhs.var)->name);
2584 dump_constraint (dump_file, c);
2585 fprintf (dump_file, "\n");
2587 constraints[i] = NULL;
2588 continue;
2591 if (rhslabel == 0)
2593 if (dump_file && (dump_flags & TDF_DETAILS))
2596 fprintf (dump_file, "%s is a non-pointer variable, "
2597 "ignoring constraint:",
2598 get_varinfo (rhs.var)->name);
2599 dump_constraint (dump_file, c);
2600 fprintf (dump_file, "\n");
2602 constraints[i] = NULL;
2603 continue;
2606 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2607 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2608 c->lhs.var = lhsvar;
2609 c->rhs.var = rhsvar;
2613 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2614 part of an SCC, false otherwise. */
2616 static bool
2617 eliminate_indirect_cycles (unsigned int node)
2619 if (graph->indirect_cycles[node] != -1
2620 && !bitmap_empty_p (get_varinfo (node)->solution))
2622 unsigned int i;
2623 auto_vec<unsigned> queue;
2624 int queuepos;
2625 unsigned int to = find (graph->indirect_cycles[node]);
2626 bitmap_iterator bi;
2628 /* We can't touch the solution set and call unify_nodes
2629 at the same time, because unify_nodes is going to do
2630 bitmap unions into it. */
2632 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2634 if (find (i) == i && i != to)
2636 if (unite (to, i))
2637 queue.safe_push (i);
2641 for (queuepos = 0;
2642 queue.iterate (queuepos, &i);
2643 queuepos++)
2645 unify_nodes (graph, to, i, true);
2647 return true;
2649 return false;
2652 /* Solve the constraint graph GRAPH using our worklist solver.
2653 This is based on the PW* family of solvers from the "Efficient Field
2654 Sensitive Pointer Analysis for C" paper.
2655 It works by iterating over all the graph nodes, processing the complex
2656 constraints and propagating the copy constraints, until everything stops
2657 changed. This corresponds to steps 6-8 in the solving list given above. */
2659 static void
2660 solve_graph (constraint_graph_t graph)
2662 unsigned int size = graph->size;
2663 unsigned int i;
2664 bitmap pts;
2666 changed = BITMAP_ALLOC (NULL);
2668 /* Mark all initial non-collapsed nodes as changed. */
2669 for (i = 1; i < size; i++)
2671 varinfo_t ivi = get_varinfo (i);
2672 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2673 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2674 || graph->complex[i].length () > 0))
2675 bitmap_set_bit (changed, i);
2678 /* Allocate a bitmap to be used to store the changed bits. */
2679 pts = BITMAP_ALLOC (&pta_obstack);
2681 while (!bitmap_empty_p (changed))
2683 unsigned int i;
2684 struct topo_info *ti = init_topo_info ();
2685 stats.iterations++;
2687 bitmap_obstack_initialize (&iteration_obstack);
2689 compute_topo_order (graph, ti);
2691 while (ti->topo_order.length () != 0)
2694 i = ti->topo_order.pop ();
2696 /* If this variable is not a representative, skip it. */
2697 if (find (i) != i)
2698 continue;
2700 /* In certain indirect cycle cases, we may merge this
2701 variable to another. */
2702 if (eliminate_indirect_cycles (i) && find (i) != i)
2703 continue;
2705 /* If the node has changed, we need to process the
2706 complex constraints and outgoing edges again. */
2707 if (bitmap_clear_bit (changed, i))
2709 unsigned int j;
2710 constraint_t c;
2711 bitmap solution;
2712 vec<constraint_t> complex = graph->complex[i];
2713 varinfo_t vi = get_varinfo (i);
2714 bool solution_empty;
2716 /* Compute the changed set of solution bits. If anything
2717 is in the solution just propagate that. */
2718 if (bitmap_bit_p (vi->solution, anything_id))
2720 /* If anything is also in the old solution there is
2721 nothing to do.
2722 ??? But we shouldn't ended up with "changed" set ... */
2723 if (vi->oldsolution
2724 && bitmap_bit_p (vi->oldsolution, anything_id))
2725 continue;
2726 bitmap_copy (pts, get_varinfo (find (anything_id))->solution);
2728 else if (vi->oldsolution)
2729 bitmap_and_compl (pts, vi->solution, vi->oldsolution);
2730 else
2731 bitmap_copy (pts, vi->solution);
2733 if (bitmap_empty_p (pts))
2734 continue;
2736 if (vi->oldsolution)
2737 bitmap_ior_into (vi->oldsolution, pts);
2738 else
2740 vi->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
2741 bitmap_copy (vi->oldsolution, pts);
2744 solution = vi->solution;
2745 solution_empty = bitmap_empty_p (solution);
2747 /* Process the complex constraints */
2748 bitmap expanded_pts = NULL;
2749 FOR_EACH_VEC_ELT (complex, j, c)
2751 /* XXX: This is going to unsort the constraints in
2752 some cases, which will occasionally add duplicate
2753 constraints during unification. This does not
2754 affect correctness. */
2755 c->lhs.var = find (c->lhs.var);
2756 c->rhs.var = find (c->rhs.var);
2758 /* The only complex constraint that can change our
2759 solution to non-empty, given an empty solution,
2760 is a constraint where the lhs side is receiving
2761 some set from elsewhere. */
2762 if (!solution_empty || c->lhs.type != DEREF)
2763 do_complex_constraint (graph, c, pts, &expanded_pts);
2765 BITMAP_FREE (expanded_pts);
2767 solution_empty = bitmap_empty_p (solution);
2769 if (!solution_empty)
2771 bitmap_iterator bi;
2772 unsigned eff_escaped_id = find (escaped_id);
2774 /* Propagate solution to all successors. */
2775 unsigned to_remove = ~0U;
2776 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2777 0, j, bi)
2779 if (to_remove != ~0U)
2781 bitmap_clear_bit (graph->succs[i], to_remove);
2782 to_remove = ~0U;
2784 unsigned int to = find (j);
2785 if (to != j)
2787 /* Update the succ graph, avoiding duplicate
2788 work. */
2789 to_remove = j;
2790 if (! bitmap_set_bit (graph->succs[i], to))
2791 continue;
2792 /* We eventually end up processing 'to' twice
2793 as it is undefined whether bitmap iteration
2794 iterates over bits set during iteration.
2795 Play safe instead of doing tricks. */
2797 /* Don't try to propagate to ourselves. */
2798 if (to == i)
2799 continue;
2801 bitmap tmp = get_varinfo (to)->solution;
2802 bool flag = false;
2804 /* If we propagate from ESCAPED use ESCAPED as
2805 placeholder. */
2806 if (i == eff_escaped_id)
2807 flag = bitmap_set_bit (tmp, escaped_id);
2808 else
2809 flag = bitmap_ior_into (tmp, pts);
2811 if (flag)
2812 bitmap_set_bit (changed, to);
2814 if (to_remove != ~0U)
2815 bitmap_clear_bit (graph->succs[i], to_remove);
2819 free_topo_info (ti);
2820 bitmap_obstack_release (&iteration_obstack);
2823 BITMAP_FREE (pts);
2824 BITMAP_FREE (changed);
2825 bitmap_obstack_release (&oldpta_obstack);
2828 /* Map from trees to variable infos. */
2829 static hash_map<tree, varinfo_t> *vi_for_tree;
2832 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2834 static void
2835 insert_vi_for_tree (tree t, varinfo_t vi)
2837 gcc_assert (vi);
2838 gcc_assert (!vi_for_tree->put (t, vi));
2841 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2842 exist in the map, return NULL, otherwise, return the varinfo we found. */
2844 static varinfo_t
2845 lookup_vi_for_tree (tree t)
2847 varinfo_t *slot = vi_for_tree->get (t);
2848 if (slot == NULL)
2849 return NULL;
2851 return *slot;
2854 /* Return a printable name for DECL */
2856 static const char *
2857 alias_get_name (tree decl)
2859 const char *res = "NULL";
2860 if (dump_file)
2862 char *temp = NULL;
2863 if (TREE_CODE (decl) == SSA_NAME)
2865 res = get_name (decl);
2866 temp = xasprintf ("%s_%u", res ? res : "", SSA_NAME_VERSION (decl));
2868 else if (HAS_DECL_ASSEMBLER_NAME_P (decl)
2869 && DECL_ASSEMBLER_NAME_SET_P (decl))
2870 res = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME_RAW (decl));
2871 else if (DECL_P (decl))
2873 res = get_name (decl);
2874 if (!res)
2875 temp = xasprintf ("D.%u", DECL_UID (decl));
2878 if (temp)
2880 res = ggc_strdup (temp);
2881 free (temp);
2885 return res;
2888 /* Find the variable id for tree T in the map.
2889 If T doesn't exist in the map, create an entry for it and return it. */
2891 static varinfo_t
2892 get_vi_for_tree (tree t)
2894 varinfo_t *slot = vi_for_tree->get (t);
2895 if (slot == NULL)
2897 unsigned int id = create_variable_info_for (t, alias_get_name (t), false);
2898 return get_varinfo (id);
2901 return *slot;
2904 /* Get a scalar constraint expression for a new temporary variable. */
2906 static struct constraint_expr
2907 new_scalar_tmp_constraint_exp (const char *name, bool add_id)
2909 struct constraint_expr tmp;
2910 varinfo_t vi;
2912 vi = new_var_info (NULL_TREE, name, add_id);
2913 vi->offset = 0;
2914 vi->size = -1;
2915 vi->fullsize = -1;
2916 vi->is_full_var = 1;
2917 vi->is_reg_var = 1;
2919 tmp.var = vi->id;
2920 tmp.type = SCALAR;
2921 tmp.offset = 0;
2923 return tmp;
2926 /* Get a constraint expression vector from an SSA_VAR_P node.
2927 If address_p is true, the result will be taken its address of. */
2929 static void
2930 get_constraint_for_ssa_var (tree t, vec<ce_s> *results, bool address_p)
2932 struct constraint_expr cexpr;
2933 varinfo_t vi;
2935 /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
2936 gcc_assert (TREE_CODE (t) == SSA_NAME || DECL_P (t));
2938 if (TREE_CODE (t) == SSA_NAME
2939 && SSA_NAME_IS_DEFAULT_DEF (t))
2941 /* For parameters, get at the points-to set for the actual parm
2942 decl. */
2943 if (TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2944 || TREE_CODE (SSA_NAME_VAR (t)) == RESULT_DECL)
2946 get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
2947 return;
2949 /* For undefined SSA names return nothing. */
2950 else if (!ssa_defined_default_def_p (t))
2952 cexpr.var = nothing_id;
2953 cexpr.type = SCALAR;
2954 cexpr.offset = 0;
2955 results->safe_push (cexpr);
2956 return;
2960 /* For global variables resort to the alias target. */
2961 if (VAR_P (t) && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
2963 varpool_node *node = varpool_node::get (t);
2964 if (node && node->alias && node->analyzed)
2966 node = node->ultimate_alias_target ();
2967 /* Canonicalize the PT uid of all aliases to the ultimate target.
2968 ??? Hopefully the set of aliases can't change in a way that
2969 changes the ultimate alias target. */
2970 gcc_assert ((! DECL_PT_UID_SET_P (node->decl)
2971 || DECL_PT_UID (node->decl) == DECL_UID (node->decl))
2972 && (! DECL_PT_UID_SET_P (t)
2973 || DECL_PT_UID (t) == DECL_UID (node->decl)));
2974 DECL_PT_UID (t) = DECL_UID (node->decl);
2975 t = node->decl;
2978 /* If this is decl may bind to NULL note that. */
2979 if (address_p
2980 && (! node || ! node->nonzero_address ()))
2982 cexpr.var = nothing_id;
2983 cexpr.type = SCALAR;
2984 cexpr.offset = 0;
2985 results->safe_push (cexpr);
2989 vi = get_vi_for_tree (t);
2990 cexpr.var = vi->id;
2991 cexpr.type = SCALAR;
2992 cexpr.offset = 0;
2994 /* If we are not taking the address of the constraint expr, add all
2995 sub-fiels of the variable as well. */
2996 if (!address_p
2997 && !vi->is_full_var)
2999 for (; vi; vi = vi_next (vi))
3001 cexpr.var = vi->id;
3002 results->safe_push (cexpr);
3004 return;
3007 results->safe_push (cexpr);
3010 /* Process constraint T, performing various simplifications and then
3011 adding it to our list of overall constraints. */
3013 static void
3014 process_constraint (constraint_t t)
3016 struct constraint_expr rhs = t->rhs;
3017 struct constraint_expr lhs = t->lhs;
3019 gcc_assert (rhs.var < varmap.length ());
3020 gcc_assert (lhs.var < varmap.length ());
3022 /* If we didn't get any useful constraint from the lhs we get
3023 &ANYTHING as fallback from get_constraint_for. Deal with
3024 it here by turning it into *ANYTHING. */
3025 if (lhs.type == ADDRESSOF
3026 && lhs.var == anything_id)
3027 lhs.type = DEREF;
3029 /* ADDRESSOF on the lhs is invalid. */
3030 gcc_assert (lhs.type != ADDRESSOF);
3032 /* We shouldn't add constraints from things that cannot have pointers.
3033 It's not completely trivial to avoid in the callers, so do it here. */
3034 if (rhs.type != ADDRESSOF
3035 && !get_varinfo (rhs.var)->may_have_pointers)
3036 return;
3038 /* Likewise adding to the solution of a non-pointer var isn't useful. */
3039 if (!get_varinfo (lhs.var)->may_have_pointers)
3040 return;
3042 /* This can happen in our IR with things like n->a = *p */
3043 if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
3045 /* Split into tmp = *rhs, *lhs = tmp */
3046 struct constraint_expr tmplhs;
3047 tmplhs = new_scalar_tmp_constraint_exp ("doubledereftmp", true);
3048 process_constraint (new_constraint (tmplhs, rhs));
3049 process_constraint (new_constraint (lhs, tmplhs));
3051 else if ((rhs.type != SCALAR || rhs.offset != 0) && lhs.type == DEREF)
3053 /* Split into tmp = &rhs, *lhs = tmp */
3054 struct constraint_expr tmplhs;
3055 tmplhs = new_scalar_tmp_constraint_exp ("derefaddrtmp", true);
3056 process_constraint (new_constraint (tmplhs, rhs));
3057 process_constraint (new_constraint (lhs, tmplhs));
3059 else
3061 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
3062 constraints.safe_push (t);
3067 /* Return the position, in bits, of FIELD_DECL from the beginning of its
3068 structure. */
3070 static HOST_WIDE_INT
3071 bitpos_of_field (const tree fdecl)
3073 if (!tree_fits_shwi_p (DECL_FIELD_OFFSET (fdecl))
3074 || !tree_fits_shwi_p (DECL_FIELD_BIT_OFFSET (fdecl)))
3075 return -1;
3077 return (tree_to_shwi (DECL_FIELD_OFFSET (fdecl)) * BITS_PER_UNIT
3078 + tree_to_shwi (DECL_FIELD_BIT_OFFSET (fdecl)));
3082 /* Get constraint expressions for offsetting PTR by OFFSET. Stores the
3083 resulting constraint expressions in *RESULTS. */
3085 static void
3086 get_constraint_for_ptr_offset (tree ptr, tree offset,
3087 vec<ce_s> *results)
3089 struct constraint_expr c;
3090 unsigned int j, n;
3091 HOST_WIDE_INT rhsoffset;
3093 /* If we do not do field-sensitive PTA adding offsets to pointers
3094 does not change the points-to solution. */
3095 if (!use_field_sensitive)
3097 get_constraint_for_rhs (ptr, results);
3098 return;
3101 /* If the offset is not a non-negative integer constant that fits
3102 in a HOST_WIDE_INT, we have to fall back to a conservative
3103 solution which includes all sub-fields of all pointed-to
3104 variables of ptr. */
3105 if (offset == NULL_TREE
3106 || TREE_CODE (offset) != INTEGER_CST)
3107 rhsoffset = UNKNOWN_OFFSET;
3108 else
3110 /* Sign-extend the offset. */
3111 offset_int soffset = offset_int::from (wi::to_wide (offset), SIGNED);
3112 if (!wi::fits_shwi_p (soffset))
3113 rhsoffset = UNKNOWN_OFFSET;
3114 else
3116 /* Make sure the bit-offset also fits. */
3117 HOST_WIDE_INT rhsunitoffset = soffset.to_shwi ();
3118 rhsoffset = rhsunitoffset * (unsigned HOST_WIDE_INT) BITS_PER_UNIT;
3119 if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
3120 rhsoffset = UNKNOWN_OFFSET;
3124 get_constraint_for_rhs (ptr, results);
3125 if (rhsoffset == 0)
3126 return;
3128 /* As we are eventually appending to the solution do not use
3129 vec::iterate here. */
3130 n = results->length ();
3131 for (j = 0; j < n; j++)
3133 varinfo_t curr;
3134 c = (*results)[j];
3135 curr = get_varinfo (c.var);
3137 if (c.type == ADDRESSOF
3138 /* If this varinfo represents a full variable just use it. */
3139 && curr->is_full_var)
3141 else if (c.type == ADDRESSOF
3142 /* If we do not know the offset add all subfields. */
3143 && rhsoffset == UNKNOWN_OFFSET)
3145 varinfo_t temp = get_varinfo (curr->head);
3148 struct constraint_expr c2;
3149 c2.var = temp->id;
3150 c2.type = ADDRESSOF;
3151 c2.offset = 0;
3152 if (c2.var != c.var)
3153 results->safe_push (c2);
3154 temp = vi_next (temp);
3156 while (temp);
3158 else if (c.type == ADDRESSOF)
3160 varinfo_t temp;
3161 unsigned HOST_WIDE_INT offset = curr->offset + rhsoffset;
3163 /* If curr->offset + rhsoffset is less than zero adjust it. */
3164 if (rhsoffset < 0
3165 && curr->offset < offset)
3166 offset = 0;
3168 /* We have to include all fields that overlap the current
3169 field shifted by rhsoffset. And we include at least
3170 the last or the first field of the variable to represent
3171 reachability of off-bound addresses, in particular &object + 1,
3172 conservatively correct. */
3173 temp = first_or_preceding_vi_for_offset (curr, offset);
3174 c.var = temp->id;
3175 c.offset = 0;
3176 temp = vi_next (temp);
3177 while (temp
3178 && temp->offset < offset + curr->size)
3180 struct constraint_expr c2;
3181 c2.var = temp->id;
3182 c2.type = ADDRESSOF;
3183 c2.offset = 0;
3184 results->safe_push (c2);
3185 temp = vi_next (temp);
3188 else if (c.type == SCALAR)
3190 gcc_assert (c.offset == 0);
3191 c.offset = rhsoffset;
3193 else
3194 /* We shouldn't get any DEREFs here. */
3195 gcc_unreachable ();
3197 (*results)[j] = c;
3202 /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
3203 If address_p is true the result will be taken its address of.
3204 If lhs_p is true then the constraint expression is assumed to be used
3205 as the lhs. */
3207 static void
3208 get_constraint_for_component_ref (tree t, vec<ce_s> *results,
3209 bool address_p, bool lhs_p)
3211 tree orig_t = t;
3212 poly_int64 bitsize = -1;
3213 poly_int64 bitmaxsize = -1;
3214 poly_int64 bitpos;
3215 bool reverse;
3216 tree forzero;
3218 /* Some people like to do cute things like take the address of
3219 &0->a.b */
3220 forzero = t;
3221 while (handled_component_p (forzero)
3222 || INDIRECT_REF_P (forzero)
3223 || TREE_CODE (forzero) == MEM_REF)
3224 forzero = TREE_OPERAND (forzero, 0);
3226 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
3228 struct constraint_expr temp;
3230 temp.offset = 0;
3231 temp.var = integer_id;
3232 temp.type = SCALAR;
3233 results->safe_push (temp);
3234 return;
3237 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize, &reverse);
3239 /* We can end up here for component references on a
3240 VIEW_CONVERT_EXPR <>(&foobar) or things like a
3241 BIT_FIELD_REF <&MEM[(void *)&b + 4B], ...>. So for
3242 symbolic constants simply give up. */
3243 if (TREE_CODE (t) == ADDR_EXPR)
3245 constraint_expr result;
3246 result.type = SCALAR;
3247 result.var = anything_id;
3248 result.offset = 0;
3249 results->safe_push (result);
3250 return;
3253 /* Pretend to take the address of the base, we'll take care of
3254 adding the required subset of sub-fields below. */
3255 get_constraint_for_1 (t, results, true, lhs_p);
3256 /* Strip off nothing_id. */
3257 if (results->length () == 2)
3259 gcc_assert ((*results)[0].var == nothing_id);
3260 results->unordered_remove (0);
3262 gcc_assert (results->length () == 1);
3263 struct constraint_expr &result = results->last ();
3265 if (result.type == SCALAR
3266 && get_varinfo (result.var)->is_full_var)
3267 /* For single-field vars do not bother about the offset. */
3268 result.offset = 0;
3269 else if (result.type == SCALAR)
3271 /* In languages like C, you can access one past the end of an
3272 array. You aren't allowed to dereference it, so we can
3273 ignore this constraint. When we handle pointer subtraction,
3274 we may have to do something cute here. */
3276 if (maybe_lt (poly_uint64 (bitpos), get_varinfo (result.var)->fullsize)
3277 && maybe_ne (bitmaxsize, 0))
3279 /* It's also not true that the constraint will actually start at the
3280 right offset, it may start in some padding. We only care about
3281 setting the constraint to the first actual field it touches, so
3282 walk to find it. */
3283 struct constraint_expr cexpr = result;
3284 varinfo_t curr;
3285 results->pop ();
3286 cexpr.offset = 0;
3287 for (curr = get_varinfo (cexpr.var); curr; curr = vi_next (curr))
3289 if (ranges_maybe_overlap_p (poly_int64 (curr->offset),
3290 curr->size, bitpos, bitmaxsize))
3292 cexpr.var = curr->id;
3293 results->safe_push (cexpr);
3294 if (address_p)
3295 break;
3298 /* If we are going to take the address of this field then
3299 to be able to compute reachability correctly add at least
3300 the last field of the variable. */
3301 if (address_p && results->length () == 0)
3303 curr = get_varinfo (cexpr.var);
3304 while (curr->next != 0)
3305 curr = vi_next (curr);
3306 cexpr.var = curr->id;
3307 results->safe_push (cexpr);
3309 else if (results->length () == 0)
3310 /* Assert that we found *some* field there. The user couldn't be
3311 accessing *only* padding. */
3312 /* Still the user could access one past the end of an array
3313 embedded in a struct resulting in accessing *only* padding. */
3314 /* Or accessing only padding via type-punning to a type
3315 that has a filed just in padding space. */
3317 cexpr.type = SCALAR;
3318 cexpr.var = anything_id;
3319 cexpr.offset = 0;
3320 results->safe_push (cexpr);
3323 else if (known_eq (bitmaxsize, 0))
3325 if (dump_file && (dump_flags & TDF_DETAILS))
3326 fprintf (dump_file, "Access to zero-sized part of variable, "
3327 "ignoring\n");
3329 else
3330 if (dump_file && (dump_flags & TDF_DETAILS))
3331 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
3333 else if (result.type == DEREF)
3335 /* If we do not know exactly where the access goes say so. Note
3336 that only for non-structure accesses we know that we access
3337 at most one subfiled of any variable. */
3338 HOST_WIDE_INT const_bitpos;
3339 if (!bitpos.is_constant (&const_bitpos)
3340 || const_bitpos == -1
3341 || maybe_ne (bitsize, bitmaxsize)
3342 || AGGREGATE_TYPE_P (TREE_TYPE (orig_t))
3343 || result.offset == UNKNOWN_OFFSET)
3344 result.offset = UNKNOWN_OFFSET;
3345 else
3346 result.offset += const_bitpos;
3348 else if (result.type == ADDRESSOF)
3350 /* We can end up here for component references on constants like
3351 VIEW_CONVERT_EXPR <>({ 0, 1, 2, 3 })[i]. */
3352 result.type = SCALAR;
3353 result.var = anything_id;
3354 result.offset = 0;
3356 else
3357 gcc_unreachable ();
3361 /* Dereference the constraint expression CONS, and return the result.
3362 DEREF (ADDRESSOF) = SCALAR
3363 DEREF (SCALAR) = DEREF
3364 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
3365 This is needed so that we can handle dereferencing DEREF constraints. */
3367 static void
3368 do_deref (vec<ce_s> *constraints)
3370 struct constraint_expr *c;
3371 unsigned int i = 0;
3373 FOR_EACH_VEC_ELT (*constraints, i, c)
3375 if (c->type == SCALAR)
3376 c->type = DEREF;
3377 else if (c->type == ADDRESSOF)
3378 c->type = SCALAR;
3379 else if (c->type == DEREF)
3381 struct constraint_expr tmplhs;
3382 tmplhs = new_scalar_tmp_constraint_exp ("dereftmp", true);
3383 process_constraint (new_constraint (tmplhs, *c));
3384 c->var = tmplhs.var;
3386 else
3387 gcc_unreachable ();
3391 /* Given a tree T, return the constraint expression for taking the
3392 address of it. */
3394 static void
3395 get_constraint_for_address_of (tree t, vec<ce_s> *results)
3397 struct constraint_expr *c;
3398 unsigned int i;
3400 get_constraint_for_1 (t, results, true, true);
3402 FOR_EACH_VEC_ELT (*results, i, c)
3404 if (c->type == DEREF)
3405 c->type = SCALAR;
3406 else
3407 c->type = ADDRESSOF;
3411 /* Given a tree T, return the constraint expression for it. */
3413 static void
3414 get_constraint_for_1 (tree t, vec<ce_s> *results, bool address_p,
3415 bool lhs_p)
3417 struct constraint_expr temp;
3419 /* x = integer is all glommed to a single variable, which doesn't
3420 point to anything by itself. That is, of course, unless it is an
3421 integer constant being treated as a pointer, in which case, we
3422 will return that this is really the addressof anything. This
3423 happens below, since it will fall into the default case. The only
3424 case we know something about an integer treated like a pointer is
3425 when it is the NULL pointer, and then we just say it points to
3426 NULL.
3428 Do not do that if -fno-delete-null-pointer-checks though, because
3429 in that case *NULL does not fail, so it _should_ alias *anything.
3430 It is not worth adding a new option or renaming the existing one,
3431 since this case is relatively obscure. */
3432 if ((TREE_CODE (t) == INTEGER_CST
3433 && integer_zerop (t))
3434 /* The only valid CONSTRUCTORs in gimple with pointer typed
3435 elements are zero-initializer. But in IPA mode we also
3436 process global initializers, so verify at least. */
3437 || (TREE_CODE (t) == CONSTRUCTOR
3438 && CONSTRUCTOR_NELTS (t) == 0))
3440 if (flag_delete_null_pointer_checks)
3441 temp.var = nothing_id;
3442 else
3443 temp.var = nonlocal_id;
3444 temp.type = ADDRESSOF;
3445 temp.offset = 0;
3446 results->safe_push (temp);
3447 return;
3450 /* String constants are read-only, ideally we'd have a CONST_DECL
3451 for those. */
3452 if (TREE_CODE (t) == STRING_CST)
3454 temp.var = string_id;
3455 temp.type = SCALAR;
3456 temp.offset = 0;
3457 results->safe_push (temp);
3458 return;
3461 switch (TREE_CODE_CLASS (TREE_CODE (t)))
3463 case tcc_expression:
3465 switch (TREE_CODE (t))
3467 case ADDR_EXPR:
3468 get_constraint_for_address_of (TREE_OPERAND (t, 0), results);
3469 return;
3470 default:;
3472 break;
3474 case tcc_reference:
3476 switch (TREE_CODE (t))
3478 case MEM_REF:
3480 struct constraint_expr cs;
3481 varinfo_t vi, curr;
3482 get_constraint_for_ptr_offset (TREE_OPERAND (t, 0),
3483 TREE_OPERAND (t, 1), results);
3484 do_deref (results);
3486 /* If we are not taking the address then make sure to process
3487 all subvariables we might access. */
3488 if (address_p)
3489 return;
3491 cs = results->last ();
3492 if (cs.type == DEREF
3493 && type_can_have_subvars (TREE_TYPE (t)))
3495 /* For dereferences this means we have to defer it
3496 to solving time. */
3497 results->last ().offset = UNKNOWN_OFFSET;
3498 return;
3500 if (cs.type != SCALAR)
3501 return;
3503 vi = get_varinfo (cs.var);
3504 curr = vi_next (vi);
3505 if (!vi->is_full_var
3506 && curr)
3508 unsigned HOST_WIDE_INT size;
3509 if (tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (t))))
3510 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t)));
3511 else
3512 size = -1;
3513 for (; curr; curr = vi_next (curr))
3515 if (curr->offset - vi->offset < size)
3517 cs.var = curr->id;
3518 results->safe_push (cs);
3520 else
3521 break;
3524 return;
3526 case ARRAY_REF:
3527 case ARRAY_RANGE_REF:
3528 case COMPONENT_REF:
3529 case IMAGPART_EXPR:
3530 case REALPART_EXPR:
3531 case BIT_FIELD_REF:
3532 get_constraint_for_component_ref (t, results, address_p, lhs_p);
3533 return;
3534 case VIEW_CONVERT_EXPR:
3535 get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p,
3536 lhs_p);
3537 return;
3538 /* We are missing handling for TARGET_MEM_REF here. */
3539 default:;
3541 break;
3543 case tcc_exceptional:
3545 switch (TREE_CODE (t))
3547 case SSA_NAME:
3549 get_constraint_for_ssa_var (t, results, address_p);
3550 return;
3552 case CONSTRUCTOR:
3554 unsigned int i;
3555 tree val;
3556 auto_vec<ce_s> tmp;
3557 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
3559 struct constraint_expr *rhsp;
3560 unsigned j;
3561 get_constraint_for_1 (val, &tmp, address_p, lhs_p);
3562 FOR_EACH_VEC_ELT (tmp, j, rhsp)
3563 results->safe_push (*rhsp);
3564 tmp.truncate (0);
3566 /* We do not know whether the constructor was complete,
3567 so technically we have to add &NOTHING or &ANYTHING
3568 like we do for an empty constructor as well. */
3569 return;
3571 default:;
3573 break;
3575 case tcc_declaration:
3577 get_constraint_for_ssa_var (t, results, address_p);
3578 return;
3580 case tcc_constant:
3582 /* We cannot refer to automatic variables through constants. */
3583 temp.type = ADDRESSOF;
3584 temp.var = nonlocal_id;
3585 temp.offset = 0;
3586 results->safe_push (temp);
3587 return;
3589 default:;
3592 /* The default fallback is a constraint from anything. */
3593 temp.type = ADDRESSOF;
3594 temp.var = anything_id;
3595 temp.offset = 0;
3596 results->safe_push (temp);
3599 /* Given a gimple tree T, return the constraint expression vector for it. */
3601 static void
3602 get_constraint_for (tree t, vec<ce_s> *results)
3604 gcc_assert (results->length () == 0);
3606 get_constraint_for_1 (t, results, false, true);
3609 /* Given a gimple tree T, return the constraint expression vector for it
3610 to be used as the rhs of a constraint. */
3612 static void
3613 get_constraint_for_rhs (tree t, vec<ce_s> *results)
3615 gcc_assert (results->length () == 0);
3617 get_constraint_for_1 (t, results, false, false);
3621 /* Efficiently generates constraints from all entries in *RHSC to all
3622 entries in *LHSC. */
3624 static void
3625 process_all_all_constraints (vec<ce_s> lhsc,
3626 vec<ce_s> rhsc)
3628 struct constraint_expr *lhsp, *rhsp;
3629 unsigned i, j;
3631 if (lhsc.length () <= 1 || rhsc.length () <= 1)
3633 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3634 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
3635 process_constraint (new_constraint (*lhsp, *rhsp));
3637 else
3639 struct constraint_expr tmp;
3640 tmp = new_scalar_tmp_constraint_exp ("allalltmp", true);
3641 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
3642 process_constraint (new_constraint (tmp, *rhsp));
3643 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3644 process_constraint (new_constraint (*lhsp, tmp));
3648 /* Handle aggregate copies by expanding into copies of the respective
3649 fields of the structures. */
3651 static void
3652 do_structure_copy (tree lhsop, tree rhsop)
3654 struct constraint_expr *lhsp, *rhsp;
3655 auto_vec<ce_s> lhsc;
3656 auto_vec<ce_s> rhsc;
3657 unsigned j;
3659 get_constraint_for (lhsop, &lhsc);
3660 get_constraint_for_rhs (rhsop, &rhsc);
3661 lhsp = &lhsc[0];
3662 rhsp = &rhsc[0];
3663 if (lhsp->type == DEREF
3664 || (lhsp->type == ADDRESSOF && lhsp->var == anything_id)
3665 || rhsp->type == DEREF)
3667 if (lhsp->type == DEREF)
3669 gcc_assert (lhsc.length () == 1);
3670 lhsp->offset = UNKNOWN_OFFSET;
3672 if (rhsp->type == DEREF)
3674 gcc_assert (rhsc.length () == 1);
3675 rhsp->offset = UNKNOWN_OFFSET;
3677 process_all_all_constraints (lhsc, rhsc);
3679 else if (lhsp->type == SCALAR
3680 && (rhsp->type == SCALAR
3681 || rhsp->type == ADDRESSOF))
3683 HOST_WIDE_INT lhssize, lhsoffset;
3684 HOST_WIDE_INT rhssize, rhsoffset;
3685 bool reverse;
3686 unsigned k = 0;
3687 if (!get_ref_base_and_extent_hwi (lhsop, &lhsoffset, &lhssize, &reverse)
3688 || !get_ref_base_and_extent_hwi (rhsop, &rhsoffset, &rhssize,
3689 &reverse))
3691 process_all_all_constraints (lhsc, rhsc);
3692 return;
3694 for (j = 0; lhsc.iterate (j, &lhsp);)
3696 varinfo_t lhsv, rhsv;
3697 rhsp = &rhsc[k];
3698 lhsv = get_varinfo (lhsp->var);
3699 rhsv = get_varinfo (rhsp->var);
3700 if (lhsv->may_have_pointers
3701 && (lhsv->is_full_var
3702 || rhsv->is_full_var
3703 || ranges_overlap_p (lhsv->offset + rhsoffset, lhsv->size,
3704 rhsv->offset + lhsoffset, rhsv->size)))
3705 process_constraint (new_constraint (*lhsp, *rhsp));
3706 if (!rhsv->is_full_var
3707 && (lhsv->is_full_var
3708 || (lhsv->offset + rhsoffset + lhsv->size
3709 > rhsv->offset + lhsoffset + rhsv->size)))
3711 ++k;
3712 if (k >= rhsc.length ())
3713 break;
3715 else
3716 ++j;
3719 else
3720 gcc_unreachable ();
3723 /* Create constraints ID = { rhsc }. */
3725 static void
3726 make_constraints_to (unsigned id, vec<ce_s> rhsc)
3728 struct constraint_expr *c;
3729 struct constraint_expr includes;
3730 unsigned int j;
3732 includes.var = id;
3733 includes.offset = 0;
3734 includes.type = SCALAR;
3736 FOR_EACH_VEC_ELT (rhsc, j, c)
3737 process_constraint (new_constraint (includes, *c));
3740 /* Create a constraint ID = OP. */
3742 static void
3743 make_constraint_to (unsigned id, tree op)
3745 auto_vec<ce_s> rhsc;
3746 get_constraint_for_rhs (op, &rhsc);
3747 make_constraints_to (id, rhsc);
3750 /* Create a constraint ID = &FROM. */
3752 static void
3753 make_constraint_from (varinfo_t vi, int from)
3755 struct constraint_expr lhs, rhs;
3757 lhs.var = vi->id;
3758 lhs.offset = 0;
3759 lhs.type = SCALAR;
3761 rhs.var = from;
3762 rhs.offset = 0;
3763 rhs.type = ADDRESSOF;
3764 process_constraint (new_constraint (lhs, rhs));
3767 /* Create a constraint ID = FROM. */
3769 static void
3770 make_copy_constraint (varinfo_t vi, int from)
3772 struct constraint_expr lhs, rhs;
3774 lhs.var = vi->id;
3775 lhs.offset = 0;
3776 lhs.type = SCALAR;
3778 rhs.var = from;
3779 rhs.offset = 0;
3780 rhs.type = SCALAR;
3781 process_constraint (new_constraint (lhs, rhs));
3784 /* Make constraints necessary to make OP escape. */
3786 static void
3787 make_escape_constraint (tree op)
3789 make_constraint_to (escaped_id, op);
3792 /* Add constraints to that the solution of VI is transitively closed. */
3794 static void
3795 make_transitive_closure_constraints (varinfo_t vi)
3797 struct constraint_expr lhs, rhs;
3799 /* VAR = *(VAR + UNKNOWN); */
3800 lhs.type = SCALAR;
3801 lhs.var = vi->id;
3802 lhs.offset = 0;
3803 rhs.type = DEREF;
3804 rhs.var = vi->id;
3805 rhs.offset = UNKNOWN_OFFSET;
3806 process_constraint (new_constraint (lhs, rhs));
3809 /* Add constraints to that the solution of VI has all subvariables added. */
3811 static void
3812 make_any_offset_constraints (varinfo_t vi)
3814 struct constraint_expr lhs, rhs;
3816 /* VAR = VAR + UNKNOWN; */
3817 lhs.type = SCALAR;
3818 lhs.var = vi->id;
3819 lhs.offset = 0;
3820 rhs.type = SCALAR;
3821 rhs.var = vi->id;
3822 rhs.offset = UNKNOWN_OFFSET;
3823 process_constraint (new_constraint (lhs, rhs));
3826 /* Temporary storage for fake var decls. */
3827 struct obstack fake_var_decl_obstack;
3829 /* Build a fake VAR_DECL acting as referrer to a DECL_UID. */
3831 static tree
3832 build_fake_var_decl (tree type)
3834 tree decl = (tree) XOBNEW (&fake_var_decl_obstack, struct tree_var_decl);
3835 memset (decl, 0, sizeof (struct tree_var_decl));
3836 TREE_SET_CODE (decl, VAR_DECL);
3837 TREE_TYPE (decl) = type;
3838 DECL_UID (decl) = allocate_decl_uid ();
3839 SET_DECL_PT_UID (decl, -1);
3840 layout_decl (decl, 0);
3841 return decl;
3844 /* Create a new artificial heap variable with NAME.
3845 Return the created variable. */
3847 static varinfo_t
3848 make_heapvar (const char *name, bool add_id)
3850 varinfo_t vi;
3851 tree heapvar;
3853 heapvar = build_fake_var_decl (ptr_type_node);
3854 DECL_EXTERNAL (heapvar) = 1;
3856 vi = new_var_info (heapvar, name, add_id);
3857 vi->is_artificial_var = true;
3858 vi->is_heap_var = true;
3859 vi->is_unknown_size_var = true;
3860 vi->offset = 0;
3861 vi->fullsize = ~0;
3862 vi->size = ~0;
3863 vi->is_full_var = true;
3864 insert_vi_for_tree (heapvar, vi);
3866 return vi;
3869 /* Create a new artificial heap variable with NAME and make a
3870 constraint from it to LHS. Set flags according to a tag used
3871 for tracking restrict pointers. */
3873 static varinfo_t
3874 make_constraint_from_restrict (varinfo_t lhs, const char *name, bool add_id)
3876 varinfo_t vi = make_heapvar (name, add_id);
3877 vi->is_restrict_var = 1;
3878 vi->is_global_var = 1;
3879 vi->may_have_pointers = 1;
3880 make_constraint_from (lhs, vi->id);
3881 return vi;
3884 /* Create a new artificial heap variable with NAME and make a
3885 constraint from it to LHS. Set flags according to a tag used
3886 for tracking restrict pointers and make the artificial heap
3887 point to global memory. */
3889 static varinfo_t
3890 make_constraint_from_global_restrict (varinfo_t lhs, const char *name,
3891 bool add_id)
3893 varinfo_t vi = make_constraint_from_restrict (lhs, name, add_id);
3894 make_copy_constraint (vi, nonlocal_id);
3895 return vi;
3898 /* In IPA mode there are varinfos for different aspects of reach
3899 function designator. One for the points-to set of the return
3900 value, one for the variables that are clobbered by the function,
3901 one for its uses and one for each parameter (including a single
3902 glob for remaining variadic arguments). */
3904 enum { fi_clobbers = 1, fi_uses = 2,
3905 fi_static_chain = 3, fi_result = 4, fi_parm_base = 5 };
3907 /* Get a constraint for the requested part of a function designator FI
3908 when operating in IPA mode. */
3910 static struct constraint_expr
3911 get_function_part_constraint (varinfo_t fi, unsigned part)
3913 struct constraint_expr c;
3915 gcc_assert (in_ipa_mode);
3917 if (fi->id == anything_id)
3919 /* ??? We probably should have a ANYFN special variable. */
3920 c.var = anything_id;
3921 c.offset = 0;
3922 c.type = SCALAR;
3924 else if (fi->decl && TREE_CODE (fi->decl) == FUNCTION_DECL)
3926 varinfo_t ai = first_vi_for_offset (fi, part);
3927 if (ai)
3928 c.var = ai->id;
3929 else
3930 c.var = anything_id;
3931 c.offset = 0;
3932 c.type = SCALAR;
3934 else
3936 c.var = fi->id;
3937 c.offset = part;
3938 c.type = DEREF;
3941 return c;
3944 /* For non-IPA mode, generate constraints necessary for a call on the
3945 RHS. */
3947 static void
3948 handle_rhs_call (gcall *stmt, vec<ce_s> *results)
3950 struct constraint_expr rhsc;
3951 unsigned i;
3952 bool returns_uses = false;
3954 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3956 tree arg = gimple_call_arg (stmt, i);
3957 int flags = gimple_call_arg_flags (stmt, i);
3959 /* If the argument is not used we can ignore it. */
3960 if (flags & EAF_UNUSED)
3961 continue;
3963 /* As we compute ESCAPED context-insensitive we do not gain
3964 any precision with just EAF_NOCLOBBER but not EAF_NOESCAPE
3965 set. The argument would still get clobbered through the
3966 escape solution. */
3967 if ((flags & EAF_NOCLOBBER)
3968 && (flags & EAF_NOESCAPE))
3970 varinfo_t uses = get_call_use_vi (stmt);
3971 varinfo_t tem = new_var_info (NULL_TREE, "callarg", true);
3972 tem->is_reg_var = true;
3973 make_constraint_to (tem->id, arg);
3974 make_any_offset_constraints (tem);
3975 if (!(flags & EAF_DIRECT))
3976 make_transitive_closure_constraints (tem);
3977 make_copy_constraint (uses, tem->id);
3978 returns_uses = true;
3980 else if (flags & EAF_NOESCAPE)
3982 struct constraint_expr lhs, rhs;
3983 varinfo_t uses = get_call_use_vi (stmt);
3984 varinfo_t clobbers = get_call_clobber_vi (stmt);
3985 varinfo_t tem = new_var_info (NULL_TREE, "callarg", true);
3986 tem->is_reg_var = true;
3987 make_constraint_to (tem->id, arg);
3988 make_any_offset_constraints (tem);
3989 if (!(flags & EAF_DIRECT))
3990 make_transitive_closure_constraints (tem);
3991 make_copy_constraint (uses, tem->id);
3992 make_copy_constraint (clobbers, tem->id);
3993 /* Add *tem = nonlocal, do not add *tem = callused as
3994 EAF_NOESCAPE parameters do not escape to other parameters
3995 and all other uses appear in NONLOCAL as well. */
3996 lhs.type = DEREF;
3997 lhs.var = tem->id;
3998 lhs.offset = 0;
3999 rhs.type = SCALAR;
4000 rhs.var = nonlocal_id;
4001 rhs.offset = 0;
4002 process_constraint (new_constraint (lhs, rhs));
4003 returns_uses = true;
4005 else
4006 make_escape_constraint (arg);
4009 /* If we added to the calls uses solution make sure we account for
4010 pointers to it to be returned. */
4011 if (returns_uses)
4013 rhsc.var = get_call_use_vi (stmt)->id;
4014 rhsc.offset = UNKNOWN_OFFSET;
4015 rhsc.type = SCALAR;
4016 results->safe_push (rhsc);
4019 /* The static chain escapes as well. */
4020 if (gimple_call_chain (stmt))
4021 make_escape_constraint (gimple_call_chain (stmt));
4023 /* And if we applied NRV the address of the return slot escapes as well. */
4024 if (gimple_call_return_slot_opt_p (stmt)
4025 && gimple_call_lhs (stmt) != NULL_TREE
4026 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
4028 auto_vec<ce_s> tmpc;
4029 struct constraint_expr lhsc, *c;
4030 get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
4031 lhsc.var = escaped_id;
4032 lhsc.offset = 0;
4033 lhsc.type = SCALAR;
4034 FOR_EACH_VEC_ELT (tmpc, i, c)
4035 process_constraint (new_constraint (lhsc, *c));
4038 /* Regular functions return nonlocal memory. */
4039 rhsc.var = nonlocal_id;
4040 rhsc.offset = 0;
4041 rhsc.type = SCALAR;
4042 results->safe_push (rhsc);
4045 /* For non-IPA mode, generate constraints necessary for a call
4046 that returns a pointer and assigns it to LHS. This simply makes
4047 the LHS point to global and escaped variables. */
4049 static void
4050 handle_lhs_call (gcall *stmt, tree lhs, int flags, vec<ce_s> rhsc,
4051 tree fndecl)
4053 auto_vec<ce_s> lhsc;
4055 get_constraint_for (lhs, &lhsc);
4056 /* If the store is to a global decl make sure to
4057 add proper escape constraints. */
4058 lhs = get_base_address (lhs);
4059 if (lhs
4060 && DECL_P (lhs)
4061 && is_global_var (lhs))
4063 struct constraint_expr tmpc;
4064 tmpc.var = escaped_id;
4065 tmpc.offset = 0;
4066 tmpc.type = SCALAR;
4067 lhsc.safe_push (tmpc);
4070 /* If the call returns an argument unmodified override the rhs
4071 constraints. */
4072 if (flags & ERF_RETURNS_ARG
4073 && (flags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (stmt))
4075 tree arg;
4076 rhsc.create (0);
4077 arg = gimple_call_arg (stmt, flags & ERF_RETURN_ARG_MASK);
4078 get_constraint_for (arg, &rhsc);
4079 process_all_all_constraints (lhsc, rhsc);
4080 rhsc.release ();
4082 else if (flags & ERF_NOALIAS)
4084 varinfo_t vi;
4085 struct constraint_expr tmpc;
4086 rhsc.create (0);
4087 vi = make_heapvar ("HEAP", true);
4088 /* We are marking allocated storage local, we deal with it becoming
4089 global by escaping and setting of vars_contains_escaped_heap. */
4090 DECL_EXTERNAL (vi->decl) = 0;
4091 vi->is_global_var = 0;
4092 /* If this is not a real malloc call assume the memory was
4093 initialized and thus may point to global memory. All
4094 builtin functions with the malloc attribute behave in a sane way. */
4095 if (!fndecl
4096 || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
4097 make_constraint_from (vi, nonlocal_id);
4098 tmpc.var = vi->id;
4099 tmpc.offset = 0;
4100 tmpc.type = ADDRESSOF;
4101 rhsc.safe_push (tmpc);
4102 process_all_all_constraints (lhsc, rhsc);
4103 rhsc.release ();
4105 else
4106 process_all_all_constraints (lhsc, rhsc);
4109 /* For non-IPA mode, generate constraints necessary for a call of a
4110 const function that returns a pointer in the statement STMT. */
4112 static void
4113 handle_const_call (gcall *stmt, vec<ce_s> *results)
4115 struct constraint_expr rhsc;
4116 unsigned int k;
4117 bool need_uses = false;
4119 /* Treat nested const functions the same as pure functions as far
4120 as the static chain is concerned. */
4121 if (gimple_call_chain (stmt))
4123 varinfo_t uses = get_call_use_vi (stmt);
4124 make_constraint_to (uses->id, gimple_call_chain (stmt));
4125 need_uses = true;
4128 /* And if we applied NRV the address of the return slot escapes as well. */
4129 if (gimple_call_return_slot_opt_p (stmt)
4130 && gimple_call_lhs (stmt) != NULL_TREE
4131 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
4133 varinfo_t uses = get_call_use_vi (stmt);
4134 auto_vec<ce_s> tmpc;
4135 get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
4136 make_constraints_to (uses->id, tmpc);
4137 need_uses = true;
4140 if (need_uses)
4142 varinfo_t uses = get_call_use_vi (stmt);
4143 make_any_offset_constraints (uses);
4144 make_transitive_closure_constraints (uses);
4145 rhsc.var = uses->id;
4146 rhsc.offset = 0;
4147 rhsc.type = SCALAR;
4148 results->safe_push (rhsc);
4151 /* May return offsetted arguments. */
4152 varinfo_t tem = NULL;
4153 if (gimple_call_num_args (stmt) != 0)
4155 tem = new_var_info (NULL_TREE, "callarg", true);
4156 tem->is_reg_var = true;
4158 for (k = 0; k < gimple_call_num_args (stmt); ++k)
4160 tree arg = gimple_call_arg (stmt, k);
4161 auto_vec<ce_s> argc;
4162 get_constraint_for_rhs (arg, &argc);
4163 make_constraints_to (tem->id, argc);
4165 if (tem)
4167 ce_s ce;
4168 ce.type = SCALAR;
4169 ce.var = tem->id;
4170 ce.offset = UNKNOWN_OFFSET;
4171 results->safe_push (ce);
4174 /* May return addresses of globals. */
4175 rhsc.var = nonlocal_id;
4176 rhsc.offset = 0;
4177 rhsc.type = ADDRESSOF;
4178 results->safe_push (rhsc);
4181 /* For non-IPA mode, generate constraints necessary for a call to a
4182 pure function in statement STMT. */
4184 static void
4185 handle_pure_call (gcall *stmt, vec<ce_s> *results)
4187 struct constraint_expr rhsc;
4188 unsigned i;
4189 varinfo_t uses = NULL;
4191 /* Memory reached from pointer arguments is call-used. */
4192 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4194 tree arg = gimple_call_arg (stmt, i);
4195 if (!uses)
4197 uses = get_call_use_vi (stmt);
4198 make_any_offset_constraints (uses);
4199 make_transitive_closure_constraints (uses);
4201 make_constraint_to (uses->id, arg);
4204 /* The static chain is used as well. */
4205 if (gimple_call_chain (stmt))
4207 if (!uses)
4209 uses = get_call_use_vi (stmt);
4210 make_any_offset_constraints (uses);
4211 make_transitive_closure_constraints (uses);
4213 make_constraint_to (uses->id, gimple_call_chain (stmt));
4216 /* And if we applied NRV the address of the return slot. */
4217 if (gimple_call_return_slot_opt_p (stmt)
4218 && gimple_call_lhs (stmt) != NULL_TREE
4219 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
4221 if (!uses)
4223 uses = get_call_use_vi (stmt);
4224 make_any_offset_constraints (uses);
4225 make_transitive_closure_constraints (uses);
4227 auto_vec<ce_s> tmpc;
4228 get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
4229 make_constraints_to (uses->id, tmpc);
4232 /* Pure functions may return call-used and nonlocal memory. */
4233 if (uses)
4235 rhsc.var = uses->id;
4236 rhsc.offset = 0;
4237 rhsc.type = SCALAR;
4238 results->safe_push (rhsc);
4240 rhsc.var = nonlocal_id;
4241 rhsc.offset = 0;
4242 rhsc.type = SCALAR;
4243 results->safe_push (rhsc);
4247 /* Return the varinfo for the callee of CALL. */
4249 static varinfo_t
4250 get_fi_for_callee (gcall *call)
4252 tree decl, fn = gimple_call_fn (call);
4254 if (fn && TREE_CODE (fn) == OBJ_TYPE_REF)
4255 fn = OBJ_TYPE_REF_EXPR (fn);
4257 /* If we can directly resolve the function being called, do so.
4258 Otherwise, it must be some sort of indirect expression that
4259 we should still be able to handle. */
4260 decl = gimple_call_addr_fndecl (fn);
4261 if (decl)
4262 return get_vi_for_tree (decl);
4264 /* If the function is anything other than a SSA name pointer we have no
4265 clue and should be getting ANYFN (well, ANYTHING for now). */
4266 if (!fn || TREE_CODE (fn) != SSA_NAME)
4267 return get_varinfo (anything_id);
4269 if (SSA_NAME_IS_DEFAULT_DEF (fn)
4270 && (TREE_CODE (SSA_NAME_VAR (fn)) == PARM_DECL
4271 || TREE_CODE (SSA_NAME_VAR (fn)) == RESULT_DECL))
4272 fn = SSA_NAME_VAR (fn);
4274 return get_vi_for_tree (fn);
4277 /* Create constraints for assigning call argument ARG to the incoming parameter
4278 INDEX of function FI. */
4280 static void
4281 find_func_aliases_for_call_arg (varinfo_t fi, unsigned index, tree arg)
4283 struct constraint_expr lhs;
4284 lhs = get_function_part_constraint (fi, fi_parm_base + index);
4286 auto_vec<ce_s, 2> rhsc;
4287 get_constraint_for_rhs (arg, &rhsc);
4289 unsigned j;
4290 struct constraint_expr *rhsp;
4291 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4292 process_constraint (new_constraint (lhs, *rhsp));
4295 /* Return true if FNDECL may be part of another lto partition. */
4297 static bool
4298 fndecl_maybe_in_other_partition (tree fndecl)
4300 cgraph_node *fn_node = cgraph_node::get (fndecl);
4301 if (fn_node == NULL)
4302 return true;
4304 return fn_node->in_other_partition;
4307 /* Create constraints for the builtin call T. Return true if the call
4308 was handled, otherwise false. */
4310 static bool
4311 find_func_aliases_for_builtin_call (struct function *fn, gcall *t)
4313 tree fndecl = gimple_call_fndecl (t);
4314 auto_vec<ce_s, 2> lhsc;
4315 auto_vec<ce_s, 4> rhsc;
4316 varinfo_t fi;
4318 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
4319 /* ??? All builtins that are handled here need to be handled
4320 in the alias-oracle query functions explicitly! */
4321 switch (DECL_FUNCTION_CODE (fndecl))
4323 /* All the following functions return a pointer to the same object
4324 as their first argument points to. The functions do not add
4325 to the ESCAPED solution. The functions make the first argument
4326 pointed to memory point to what the second argument pointed to
4327 memory points to. */
4328 case BUILT_IN_STRCPY:
4329 case BUILT_IN_STRNCPY:
4330 case BUILT_IN_BCOPY:
4331 case BUILT_IN_MEMCPY:
4332 case BUILT_IN_MEMMOVE:
4333 case BUILT_IN_MEMPCPY:
4334 case BUILT_IN_STPCPY:
4335 case BUILT_IN_STPNCPY:
4336 case BUILT_IN_STRCAT:
4337 case BUILT_IN_STRNCAT:
4338 case BUILT_IN_STRCPY_CHK:
4339 case BUILT_IN_STRNCPY_CHK:
4340 case BUILT_IN_MEMCPY_CHK:
4341 case BUILT_IN_MEMMOVE_CHK:
4342 case BUILT_IN_MEMPCPY_CHK:
4343 case BUILT_IN_STPCPY_CHK:
4344 case BUILT_IN_STPNCPY_CHK:
4345 case BUILT_IN_STRCAT_CHK:
4346 case BUILT_IN_STRNCAT_CHK:
4347 case BUILT_IN_TM_MEMCPY:
4348 case BUILT_IN_TM_MEMMOVE:
4350 tree res = gimple_call_lhs (t);
4351 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4352 == BUILT_IN_BCOPY ? 1 : 0));
4353 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4354 == BUILT_IN_BCOPY ? 0 : 1));
4355 if (res != NULL_TREE)
4357 get_constraint_for (res, &lhsc);
4358 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY
4359 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY
4360 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY
4361 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY_CHK
4362 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY_CHK
4363 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY_CHK)
4364 get_constraint_for_ptr_offset (dest, NULL_TREE, &rhsc);
4365 else
4366 get_constraint_for (dest, &rhsc);
4367 process_all_all_constraints (lhsc, rhsc);
4368 lhsc.truncate (0);
4369 rhsc.truncate (0);
4371 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4372 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4373 do_deref (&lhsc);
4374 do_deref (&rhsc);
4375 process_all_all_constraints (lhsc, rhsc);
4376 return true;
4378 case BUILT_IN_MEMSET:
4379 case BUILT_IN_MEMSET_CHK:
4380 case BUILT_IN_TM_MEMSET:
4382 tree res = gimple_call_lhs (t);
4383 tree dest = gimple_call_arg (t, 0);
4384 unsigned i;
4385 ce_s *lhsp;
4386 struct constraint_expr ac;
4387 if (res != NULL_TREE)
4389 get_constraint_for (res, &lhsc);
4390 get_constraint_for (dest, &rhsc);
4391 process_all_all_constraints (lhsc, rhsc);
4392 lhsc.truncate (0);
4394 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4395 do_deref (&lhsc);
4396 if (flag_delete_null_pointer_checks
4397 && integer_zerop (gimple_call_arg (t, 1)))
4399 ac.type = ADDRESSOF;
4400 ac.var = nothing_id;
4402 else
4404 ac.type = SCALAR;
4405 ac.var = integer_id;
4407 ac.offset = 0;
4408 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4409 process_constraint (new_constraint (*lhsp, ac));
4410 return true;
4412 case BUILT_IN_POSIX_MEMALIGN:
4414 tree ptrptr = gimple_call_arg (t, 0);
4415 get_constraint_for (ptrptr, &lhsc);
4416 do_deref (&lhsc);
4417 varinfo_t vi = make_heapvar ("HEAP", true);
4418 /* We are marking allocated storage local, we deal with it becoming
4419 global by escaping and setting of vars_contains_escaped_heap. */
4420 DECL_EXTERNAL (vi->decl) = 0;
4421 vi->is_global_var = 0;
4422 struct constraint_expr tmpc;
4423 tmpc.var = vi->id;
4424 tmpc.offset = 0;
4425 tmpc.type = ADDRESSOF;
4426 rhsc.safe_push (tmpc);
4427 process_all_all_constraints (lhsc, rhsc);
4428 return true;
4430 case BUILT_IN_ASSUME_ALIGNED:
4432 tree res = gimple_call_lhs (t);
4433 tree dest = gimple_call_arg (t, 0);
4434 if (res != NULL_TREE)
4436 get_constraint_for (res, &lhsc);
4437 get_constraint_for (dest, &rhsc);
4438 process_all_all_constraints (lhsc, rhsc);
4440 return true;
4442 /* All the following functions do not return pointers, do not
4443 modify the points-to sets of memory reachable from their
4444 arguments and do not add to the ESCAPED solution. */
4445 case BUILT_IN_SINCOS:
4446 case BUILT_IN_SINCOSF:
4447 case BUILT_IN_SINCOSL:
4448 case BUILT_IN_FREXP:
4449 case BUILT_IN_FREXPF:
4450 case BUILT_IN_FREXPL:
4451 case BUILT_IN_GAMMA_R:
4452 case BUILT_IN_GAMMAF_R:
4453 case BUILT_IN_GAMMAL_R:
4454 case BUILT_IN_LGAMMA_R:
4455 case BUILT_IN_LGAMMAF_R:
4456 case BUILT_IN_LGAMMAL_R:
4457 case BUILT_IN_MODF:
4458 case BUILT_IN_MODFF:
4459 case BUILT_IN_MODFL:
4460 case BUILT_IN_REMQUO:
4461 case BUILT_IN_REMQUOF:
4462 case BUILT_IN_REMQUOL:
4463 case BUILT_IN_FREE:
4464 return true;
4465 case BUILT_IN_STRDUP:
4466 case BUILT_IN_STRNDUP:
4467 case BUILT_IN_REALLOC:
4468 if (gimple_call_lhs (t))
4470 handle_lhs_call (t, gimple_call_lhs (t),
4471 gimple_call_return_flags (t) | ERF_NOALIAS,
4472 vNULL, fndecl);
4473 get_constraint_for_ptr_offset (gimple_call_lhs (t),
4474 NULL_TREE, &lhsc);
4475 get_constraint_for_ptr_offset (gimple_call_arg (t, 0),
4476 NULL_TREE, &rhsc);
4477 do_deref (&lhsc);
4478 do_deref (&rhsc);
4479 process_all_all_constraints (lhsc, rhsc);
4480 lhsc.truncate (0);
4481 rhsc.truncate (0);
4482 /* For realloc the resulting pointer can be equal to the
4483 argument as well. But only doing this wouldn't be
4484 correct because with ptr == 0 realloc behaves like malloc. */
4485 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_REALLOC)
4487 get_constraint_for (gimple_call_lhs (t), &lhsc);
4488 get_constraint_for (gimple_call_arg (t, 0), &rhsc);
4489 process_all_all_constraints (lhsc, rhsc);
4491 return true;
4493 break;
4494 /* String / character search functions return a pointer into the
4495 source string or NULL. */
4496 case BUILT_IN_INDEX:
4497 case BUILT_IN_STRCHR:
4498 case BUILT_IN_STRRCHR:
4499 case BUILT_IN_MEMCHR:
4500 case BUILT_IN_STRSTR:
4501 case BUILT_IN_STRPBRK:
4502 if (gimple_call_lhs (t))
4504 tree src = gimple_call_arg (t, 0);
4505 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4506 constraint_expr nul;
4507 nul.var = nothing_id;
4508 nul.offset = 0;
4509 nul.type = ADDRESSOF;
4510 rhsc.safe_push (nul);
4511 get_constraint_for (gimple_call_lhs (t), &lhsc);
4512 process_all_all_constraints (lhsc, rhsc);
4514 return true;
4515 /* Pure functions that return something not based on any object and
4516 that use the memory pointed to by their arguments (but not
4517 transitively). */
4518 case BUILT_IN_STRCMP:
4519 case BUILT_IN_STRCMP_EQ:
4520 case BUILT_IN_STRNCMP:
4521 case BUILT_IN_STRNCMP_EQ:
4522 case BUILT_IN_STRCASECMP:
4523 case BUILT_IN_STRNCASECMP:
4524 case BUILT_IN_MEMCMP:
4525 case BUILT_IN_BCMP:
4526 case BUILT_IN_STRSPN:
4527 case BUILT_IN_STRCSPN:
4529 varinfo_t uses = get_call_use_vi (t);
4530 make_any_offset_constraints (uses);
4531 make_constraint_to (uses->id, gimple_call_arg (t, 0));
4532 make_constraint_to (uses->id, gimple_call_arg (t, 1));
4533 /* No constraints are necessary for the return value. */
4534 return true;
4536 case BUILT_IN_STRLEN:
4538 varinfo_t uses = get_call_use_vi (t);
4539 make_any_offset_constraints (uses);
4540 make_constraint_to (uses->id, gimple_call_arg (t, 0));
4541 /* No constraints are necessary for the return value. */
4542 return true;
4544 case BUILT_IN_OBJECT_SIZE:
4545 case BUILT_IN_CONSTANT_P:
4547 /* No constraints are necessary for the return value or the
4548 arguments. */
4549 return true;
4551 /* Trampolines are special - they set up passing the static
4552 frame. */
4553 case BUILT_IN_INIT_TRAMPOLINE:
4555 tree tramp = gimple_call_arg (t, 0);
4556 tree nfunc = gimple_call_arg (t, 1);
4557 tree frame = gimple_call_arg (t, 2);
4558 unsigned i;
4559 struct constraint_expr lhs, *rhsp;
4560 if (in_ipa_mode)
4562 varinfo_t nfi = NULL;
4563 gcc_assert (TREE_CODE (nfunc) == ADDR_EXPR);
4564 nfi = lookup_vi_for_tree (TREE_OPERAND (nfunc, 0));
4565 if (nfi)
4567 lhs = get_function_part_constraint (nfi, fi_static_chain);
4568 get_constraint_for (frame, &rhsc);
4569 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4570 process_constraint (new_constraint (lhs, *rhsp));
4571 rhsc.truncate (0);
4573 /* Make the frame point to the function for
4574 the trampoline adjustment call. */
4575 get_constraint_for (tramp, &lhsc);
4576 do_deref (&lhsc);
4577 get_constraint_for (nfunc, &rhsc);
4578 process_all_all_constraints (lhsc, rhsc);
4580 return true;
4583 /* Else fallthru to generic handling which will let
4584 the frame escape. */
4585 break;
4587 case BUILT_IN_ADJUST_TRAMPOLINE:
4589 tree tramp = gimple_call_arg (t, 0);
4590 tree res = gimple_call_lhs (t);
4591 if (in_ipa_mode && res)
4593 get_constraint_for (res, &lhsc);
4594 get_constraint_for (tramp, &rhsc);
4595 do_deref (&rhsc);
4596 process_all_all_constraints (lhsc, rhsc);
4598 return true;
4600 CASE_BUILT_IN_TM_STORE (1):
4601 CASE_BUILT_IN_TM_STORE (2):
4602 CASE_BUILT_IN_TM_STORE (4):
4603 CASE_BUILT_IN_TM_STORE (8):
4604 CASE_BUILT_IN_TM_STORE (FLOAT):
4605 CASE_BUILT_IN_TM_STORE (DOUBLE):
4606 CASE_BUILT_IN_TM_STORE (LDOUBLE):
4607 CASE_BUILT_IN_TM_STORE (M64):
4608 CASE_BUILT_IN_TM_STORE (M128):
4609 CASE_BUILT_IN_TM_STORE (M256):
4611 tree addr = gimple_call_arg (t, 0);
4612 tree src = gimple_call_arg (t, 1);
4614 get_constraint_for (addr, &lhsc);
4615 do_deref (&lhsc);
4616 get_constraint_for (src, &rhsc);
4617 process_all_all_constraints (lhsc, rhsc);
4618 return true;
4620 CASE_BUILT_IN_TM_LOAD (1):
4621 CASE_BUILT_IN_TM_LOAD (2):
4622 CASE_BUILT_IN_TM_LOAD (4):
4623 CASE_BUILT_IN_TM_LOAD (8):
4624 CASE_BUILT_IN_TM_LOAD (FLOAT):
4625 CASE_BUILT_IN_TM_LOAD (DOUBLE):
4626 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
4627 CASE_BUILT_IN_TM_LOAD (M64):
4628 CASE_BUILT_IN_TM_LOAD (M128):
4629 CASE_BUILT_IN_TM_LOAD (M256):
4631 tree dest = gimple_call_lhs (t);
4632 tree addr = gimple_call_arg (t, 0);
4634 get_constraint_for (dest, &lhsc);
4635 get_constraint_for (addr, &rhsc);
4636 do_deref (&rhsc);
4637 process_all_all_constraints (lhsc, rhsc);
4638 return true;
4640 /* Variadic argument handling needs to be handled in IPA
4641 mode as well. */
4642 case BUILT_IN_VA_START:
4644 tree valist = gimple_call_arg (t, 0);
4645 struct constraint_expr rhs, *lhsp;
4646 unsigned i;
4647 get_constraint_for_ptr_offset (valist, NULL_TREE, &lhsc);
4648 do_deref (&lhsc);
4649 /* The va_list gets access to pointers in variadic
4650 arguments. Which we know in the case of IPA analysis
4651 and otherwise are just all nonlocal variables. */
4652 if (in_ipa_mode)
4654 fi = lookup_vi_for_tree (fn->decl);
4655 rhs = get_function_part_constraint (fi, ~0);
4656 rhs.type = ADDRESSOF;
4658 else
4660 rhs.var = nonlocal_id;
4661 rhs.type = ADDRESSOF;
4662 rhs.offset = 0;
4664 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4665 process_constraint (new_constraint (*lhsp, rhs));
4666 /* va_list is clobbered. */
4667 make_constraint_to (get_call_clobber_vi (t)->id, valist);
4668 return true;
4670 /* va_end doesn't have any effect that matters. */
4671 case BUILT_IN_VA_END:
4672 return true;
4673 /* Alternate return. Simply give up for now. */
4674 case BUILT_IN_RETURN:
4676 fi = NULL;
4677 if (!in_ipa_mode
4678 || !(fi = get_vi_for_tree (fn->decl)))
4679 make_constraint_from (get_varinfo (escaped_id), anything_id);
4680 else if (in_ipa_mode
4681 && fi != NULL)
4683 struct constraint_expr lhs, rhs;
4684 lhs = get_function_part_constraint (fi, fi_result);
4685 rhs.var = anything_id;
4686 rhs.offset = 0;
4687 rhs.type = SCALAR;
4688 process_constraint (new_constraint (lhs, rhs));
4690 return true;
4692 case BUILT_IN_GOMP_PARALLEL:
4693 case BUILT_IN_GOACC_PARALLEL:
4695 if (in_ipa_mode)
4697 unsigned int fnpos, argpos;
4698 switch (DECL_FUNCTION_CODE (fndecl))
4700 case BUILT_IN_GOMP_PARALLEL:
4701 /* __builtin_GOMP_parallel (fn, data, num_threads, flags). */
4702 fnpos = 0;
4703 argpos = 1;
4704 break;
4705 case BUILT_IN_GOACC_PARALLEL:
4706 /* __builtin_GOACC_parallel (flags_m, fn, mapnum, hostaddrs,
4707 sizes, kinds, ...). */
4708 fnpos = 1;
4709 argpos = 3;
4710 break;
4711 default:
4712 gcc_unreachable ();
4715 tree fnarg = gimple_call_arg (t, fnpos);
4716 gcc_assert (TREE_CODE (fnarg) == ADDR_EXPR);
4717 tree fndecl = TREE_OPERAND (fnarg, 0);
4718 if (fndecl_maybe_in_other_partition (fndecl))
4719 /* Fallthru to general call handling. */
4720 break;
4722 tree arg = gimple_call_arg (t, argpos);
4724 varinfo_t fi = get_vi_for_tree (fndecl);
4725 find_func_aliases_for_call_arg (fi, 0, arg);
4726 return true;
4728 /* Else fallthru to generic call handling. */
4729 break;
4731 /* printf-style functions may have hooks to set pointers to
4732 point to somewhere into the generated string. Leave them
4733 for a later exercise... */
4734 default:
4735 /* Fallthru to general call handling. */;
4738 return false;
4741 /* Create constraints for the call T. */
4743 static void
4744 find_func_aliases_for_call (struct function *fn, gcall *t)
4746 tree fndecl = gimple_call_fndecl (t);
4747 varinfo_t fi;
4749 if (fndecl != NULL_TREE
4750 && fndecl_built_in_p (fndecl)
4751 && find_func_aliases_for_builtin_call (fn, t))
4752 return;
4754 fi = get_fi_for_callee (t);
4755 if (!in_ipa_mode
4756 || (fi->decl && fndecl && !fi->is_fn_info))
4758 auto_vec<ce_s, 16> rhsc;
4759 int flags = gimple_call_flags (t);
4761 /* Const functions can return their arguments and addresses
4762 of global memory but not of escaped memory. */
4763 if (flags & (ECF_CONST|ECF_NOVOPS))
4765 if (gimple_call_lhs (t))
4766 handle_const_call (t, &rhsc);
4768 /* Pure functions can return addresses in and of memory
4769 reachable from their arguments, but they are not an escape
4770 point for reachable memory of their arguments. */
4771 else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE))
4772 handle_pure_call (t, &rhsc);
4773 else
4774 handle_rhs_call (t, &rhsc);
4775 if (gimple_call_lhs (t))
4776 handle_lhs_call (t, gimple_call_lhs (t),
4777 gimple_call_return_flags (t), rhsc, fndecl);
4779 else
4781 auto_vec<ce_s, 2> rhsc;
4782 tree lhsop;
4783 unsigned j;
4785 /* Assign all the passed arguments to the appropriate incoming
4786 parameters of the function. */
4787 for (j = 0; j < gimple_call_num_args (t); j++)
4789 tree arg = gimple_call_arg (t, j);
4790 find_func_aliases_for_call_arg (fi, j, arg);
4793 /* If we are returning a value, assign it to the result. */
4794 lhsop = gimple_call_lhs (t);
4795 if (lhsop)
4797 auto_vec<ce_s, 2> lhsc;
4798 struct constraint_expr rhs;
4799 struct constraint_expr *lhsp;
4800 bool aggr_p = aggregate_value_p (lhsop, gimple_call_fntype (t));
4802 get_constraint_for (lhsop, &lhsc);
4803 rhs = get_function_part_constraint (fi, fi_result);
4804 if (aggr_p)
4806 auto_vec<ce_s, 2> tem;
4807 tem.quick_push (rhs);
4808 do_deref (&tem);
4809 gcc_checking_assert (tem.length () == 1);
4810 rhs = tem[0];
4812 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
4813 process_constraint (new_constraint (*lhsp, rhs));
4815 /* If we pass the result decl by reference, honor that. */
4816 if (aggr_p)
4818 struct constraint_expr lhs;
4819 struct constraint_expr *rhsp;
4821 get_constraint_for_address_of (lhsop, &rhsc);
4822 lhs = get_function_part_constraint (fi, fi_result);
4823 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4824 process_constraint (new_constraint (lhs, *rhsp));
4825 rhsc.truncate (0);
4829 /* If we use a static chain, pass it along. */
4830 if (gimple_call_chain (t))
4832 struct constraint_expr lhs;
4833 struct constraint_expr *rhsp;
4835 get_constraint_for (gimple_call_chain (t), &rhsc);
4836 lhs = get_function_part_constraint (fi, fi_static_chain);
4837 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4838 process_constraint (new_constraint (lhs, *rhsp));
4843 /* Walk statement T setting up aliasing constraints according to the
4844 references found in T. This function is the main part of the
4845 constraint builder. AI points to auxiliary alias information used
4846 when building alias sets and computing alias grouping heuristics. */
4848 static void
4849 find_func_aliases (struct function *fn, gimple *origt)
4851 gimple *t = origt;
4852 auto_vec<ce_s, 16> lhsc;
4853 auto_vec<ce_s, 16> rhsc;
4854 varinfo_t fi;
4856 /* Now build constraints expressions. */
4857 if (gimple_code (t) == GIMPLE_PHI)
4859 /* For a phi node, assign all the arguments to
4860 the result. */
4861 get_constraint_for (gimple_phi_result (t), &lhsc);
4862 for (unsigned i = 0; i < gimple_phi_num_args (t); i++)
4864 get_constraint_for_rhs (gimple_phi_arg_def (t, i), &rhsc);
4865 process_all_all_constraints (lhsc, rhsc);
4866 rhsc.truncate (0);
4869 /* In IPA mode, we need to generate constraints to pass call
4870 arguments through their calls. There are two cases,
4871 either a GIMPLE_CALL returning a value, or just a plain
4872 GIMPLE_CALL when we are not.
4874 In non-ipa mode, we need to generate constraints for each
4875 pointer passed by address. */
4876 else if (is_gimple_call (t))
4877 find_func_aliases_for_call (fn, as_a <gcall *> (t));
4879 /* Otherwise, just a regular assignment statement. Only care about
4880 operations with pointer result, others are dealt with as escape
4881 points if they have pointer operands. */
4882 else if (is_gimple_assign (t))
4884 /* Otherwise, just a regular assignment statement. */
4885 tree lhsop = gimple_assign_lhs (t);
4886 tree rhsop = (gimple_num_ops (t) == 2) ? gimple_assign_rhs1 (t) : NULL;
4888 if (rhsop && TREE_CLOBBER_P (rhsop))
4889 /* Ignore clobbers, they don't actually store anything into
4890 the LHS. */
4892 else if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
4893 do_structure_copy (lhsop, rhsop);
4894 else
4896 enum tree_code code = gimple_assign_rhs_code (t);
4898 get_constraint_for (lhsop, &lhsc);
4900 if (code == POINTER_PLUS_EXPR)
4901 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4902 gimple_assign_rhs2 (t), &rhsc);
4903 else if (code == POINTER_DIFF_EXPR)
4904 /* The result is not a pointer (part). */
4906 else if (code == BIT_AND_EXPR
4907 && TREE_CODE (gimple_assign_rhs2 (t)) == INTEGER_CST)
4909 /* Aligning a pointer via a BIT_AND_EXPR is offsetting
4910 the pointer. Handle it by offsetting it by UNKNOWN. */
4911 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
4912 NULL_TREE, &rhsc);
4914 else if (code == TRUNC_DIV_EXPR
4915 || code == CEIL_DIV_EXPR
4916 || code == FLOOR_DIV_EXPR
4917 || code == ROUND_DIV_EXPR
4918 || code == EXACT_DIV_EXPR
4919 || code == TRUNC_MOD_EXPR
4920 || code == CEIL_MOD_EXPR
4921 || code == FLOOR_MOD_EXPR
4922 || code == ROUND_MOD_EXPR)
4923 /* Division and modulo transfer the pointer from the LHS. */
4924 get_constraint_for_rhs (gimple_assign_rhs1 (t), &rhsc);
4925 else if ((CONVERT_EXPR_CODE_P (code)
4926 && !(POINTER_TYPE_P (gimple_expr_type (t))
4927 && !POINTER_TYPE_P (TREE_TYPE (rhsop))))
4928 || gimple_assign_single_p (t))
4929 get_constraint_for_rhs (rhsop, &rhsc);
4930 else if (code == COND_EXPR)
4932 /* The result is a merge of both COND_EXPR arms. */
4933 auto_vec<ce_s, 2> tmp;
4934 struct constraint_expr *rhsp;
4935 unsigned i;
4936 get_constraint_for_rhs (gimple_assign_rhs2 (t), &rhsc);
4937 get_constraint_for_rhs (gimple_assign_rhs3 (t), &tmp);
4938 FOR_EACH_VEC_ELT (tmp, i, rhsp)
4939 rhsc.safe_push (*rhsp);
4941 else if (truth_value_p (code))
4942 /* Truth value results are not pointer (parts). Or at least
4943 very unreasonable obfuscation of a part. */
4945 else
4947 /* All other operations are merges. */
4948 auto_vec<ce_s, 4> tmp;
4949 struct constraint_expr *rhsp;
4950 unsigned i, j;
4951 get_constraint_for_rhs (gimple_assign_rhs1 (t), &rhsc);
4952 for (i = 2; i < gimple_num_ops (t); ++i)
4954 get_constraint_for_rhs (gimple_op (t, i), &tmp);
4955 FOR_EACH_VEC_ELT (tmp, j, rhsp)
4956 rhsc.safe_push (*rhsp);
4957 tmp.truncate (0);
4960 process_all_all_constraints (lhsc, rhsc);
4962 /* If there is a store to a global variable the rhs escapes. */
4963 if ((lhsop = get_base_address (lhsop)) != NULL_TREE
4964 && DECL_P (lhsop))
4966 varinfo_t vi = get_vi_for_tree (lhsop);
4967 if ((! in_ipa_mode && vi->is_global_var)
4968 || vi->is_ipa_escape_point)
4969 make_escape_constraint (rhsop);
4972 /* Handle escapes through return. */
4973 else if (gimple_code (t) == GIMPLE_RETURN
4974 && gimple_return_retval (as_a <greturn *> (t)) != NULL_TREE)
4976 greturn *return_stmt = as_a <greturn *> (t);
4977 fi = NULL;
4978 if (!in_ipa_mode
4979 || !(fi = get_vi_for_tree (fn->decl)))
4980 make_escape_constraint (gimple_return_retval (return_stmt));
4981 else if (in_ipa_mode)
4983 struct constraint_expr lhs ;
4984 struct constraint_expr *rhsp;
4985 unsigned i;
4987 lhs = get_function_part_constraint (fi, fi_result);
4988 get_constraint_for_rhs (gimple_return_retval (return_stmt), &rhsc);
4989 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4990 process_constraint (new_constraint (lhs, *rhsp));
4993 /* Handle asms conservatively by adding escape constraints to everything. */
4994 else if (gasm *asm_stmt = dyn_cast <gasm *> (t))
4996 unsigned i, noutputs;
4997 const char **oconstraints;
4998 const char *constraint;
4999 bool allows_mem, allows_reg, is_inout;
5001 noutputs = gimple_asm_noutputs (asm_stmt);
5002 oconstraints = XALLOCAVEC (const char *, noutputs);
5004 for (i = 0; i < noutputs; ++i)
5006 tree link = gimple_asm_output_op (asm_stmt, i);
5007 tree op = TREE_VALUE (link);
5009 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5010 oconstraints[i] = constraint;
5011 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
5012 &allows_reg, &is_inout);
5014 /* A memory constraint makes the address of the operand escape. */
5015 if (!allows_reg && allows_mem)
5016 make_escape_constraint (build_fold_addr_expr (op));
5018 /* The asm may read global memory, so outputs may point to
5019 any global memory. */
5020 if (op)
5022 auto_vec<ce_s, 2> lhsc;
5023 struct constraint_expr rhsc, *lhsp;
5024 unsigned j;
5025 get_constraint_for (op, &lhsc);
5026 rhsc.var = nonlocal_id;
5027 rhsc.offset = 0;
5028 rhsc.type = SCALAR;
5029 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
5030 process_constraint (new_constraint (*lhsp, rhsc));
5033 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
5035 tree link = gimple_asm_input_op (asm_stmt, i);
5036 tree op = TREE_VALUE (link);
5038 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5040 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
5041 &allows_mem, &allows_reg);
5043 /* A memory constraint makes the address of the operand escape. */
5044 if (!allows_reg && allows_mem)
5045 make_escape_constraint (build_fold_addr_expr (op));
5046 /* Strictly we'd only need the constraint to ESCAPED if
5047 the asm clobbers memory, otherwise using something
5048 along the lines of per-call clobbers/uses would be enough. */
5049 else if (op)
5050 make_escape_constraint (op);
5056 /* Create a constraint adding to the clobber set of FI the memory
5057 pointed to by PTR. */
5059 static void
5060 process_ipa_clobber (varinfo_t fi, tree ptr)
5062 vec<ce_s> ptrc = vNULL;
5063 struct constraint_expr *c, lhs;
5064 unsigned i;
5065 get_constraint_for_rhs (ptr, &ptrc);
5066 lhs = get_function_part_constraint (fi, fi_clobbers);
5067 FOR_EACH_VEC_ELT (ptrc, i, c)
5068 process_constraint (new_constraint (lhs, *c));
5069 ptrc.release ();
5072 /* Walk statement T setting up clobber and use constraints according to the
5073 references found in T. This function is a main part of the
5074 IPA constraint builder. */
5076 static void
5077 find_func_clobbers (struct function *fn, gimple *origt)
5079 gimple *t = origt;
5080 auto_vec<ce_s, 16> lhsc;
5081 auto_vec<ce_s, 16> rhsc;
5082 varinfo_t fi;
5084 /* Add constraints for clobbered/used in IPA mode.
5085 We are not interested in what automatic variables are clobbered
5086 or used as we only use the information in the caller to which
5087 they do not escape. */
5088 gcc_assert (in_ipa_mode);
5090 /* If the stmt refers to memory in any way it better had a VUSE. */
5091 if (gimple_vuse (t) == NULL_TREE)
5092 return;
5094 /* We'd better have function information for the current function. */
5095 fi = lookup_vi_for_tree (fn->decl);
5096 gcc_assert (fi != NULL);
5098 /* Account for stores in assignments and calls. */
5099 if (gimple_vdef (t) != NULL_TREE
5100 && gimple_has_lhs (t))
5102 tree lhs = gimple_get_lhs (t);
5103 tree tem = lhs;
5104 while (handled_component_p (tem))
5105 tem = TREE_OPERAND (tem, 0);
5106 if ((DECL_P (tem)
5107 && !auto_var_in_fn_p (tem, fn->decl))
5108 || INDIRECT_REF_P (tem)
5109 || (TREE_CODE (tem) == MEM_REF
5110 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
5111 && auto_var_in_fn_p
5112 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), fn->decl))))
5114 struct constraint_expr lhsc, *rhsp;
5115 unsigned i;
5116 lhsc = get_function_part_constraint (fi, fi_clobbers);
5117 get_constraint_for_address_of (lhs, &rhsc);
5118 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5119 process_constraint (new_constraint (lhsc, *rhsp));
5120 rhsc.truncate (0);
5124 /* Account for uses in assigments and returns. */
5125 if (gimple_assign_single_p (t)
5126 || (gimple_code (t) == GIMPLE_RETURN
5127 && gimple_return_retval (as_a <greturn *> (t)) != NULL_TREE))
5129 tree rhs = (gimple_assign_single_p (t)
5130 ? gimple_assign_rhs1 (t)
5131 : gimple_return_retval (as_a <greturn *> (t)));
5132 tree tem = rhs;
5133 while (handled_component_p (tem))
5134 tem = TREE_OPERAND (tem, 0);
5135 if ((DECL_P (tem)
5136 && !auto_var_in_fn_p (tem, fn->decl))
5137 || INDIRECT_REF_P (tem)
5138 || (TREE_CODE (tem) == MEM_REF
5139 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
5140 && auto_var_in_fn_p
5141 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), fn->decl))))
5143 struct constraint_expr lhs, *rhsp;
5144 unsigned i;
5145 lhs = get_function_part_constraint (fi, fi_uses);
5146 get_constraint_for_address_of (rhs, &rhsc);
5147 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5148 process_constraint (new_constraint (lhs, *rhsp));
5149 rhsc.truncate (0);
5153 if (gcall *call_stmt = dyn_cast <gcall *> (t))
5155 varinfo_t cfi = NULL;
5156 tree decl = gimple_call_fndecl (t);
5157 struct constraint_expr lhs, rhs;
5158 unsigned i, j;
5160 /* For builtins we do not have separate function info. For those
5161 we do not generate escapes for we have to generate clobbers/uses. */
5162 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
5163 switch (DECL_FUNCTION_CODE (decl))
5165 /* The following functions use and clobber memory pointed to
5166 by their arguments. */
5167 case BUILT_IN_STRCPY:
5168 case BUILT_IN_STRNCPY:
5169 case BUILT_IN_BCOPY:
5170 case BUILT_IN_MEMCPY:
5171 case BUILT_IN_MEMMOVE:
5172 case BUILT_IN_MEMPCPY:
5173 case BUILT_IN_STPCPY:
5174 case BUILT_IN_STPNCPY:
5175 case BUILT_IN_STRCAT:
5176 case BUILT_IN_STRNCAT:
5177 case BUILT_IN_STRCPY_CHK:
5178 case BUILT_IN_STRNCPY_CHK:
5179 case BUILT_IN_MEMCPY_CHK:
5180 case BUILT_IN_MEMMOVE_CHK:
5181 case BUILT_IN_MEMPCPY_CHK:
5182 case BUILT_IN_STPCPY_CHK:
5183 case BUILT_IN_STPNCPY_CHK:
5184 case BUILT_IN_STRCAT_CHK:
5185 case BUILT_IN_STRNCAT_CHK:
5187 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
5188 == BUILT_IN_BCOPY ? 1 : 0));
5189 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
5190 == BUILT_IN_BCOPY ? 0 : 1));
5191 unsigned i;
5192 struct constraint_expr *rhsp, *lhsp;
5193 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
5194 lhs = get_function_part_constraint (fi, fi_clobbers);
5195 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
5196 process_constraint (new_constraint (lhs, *lhsp));
5197 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
5198 lhs = get_function_part_constraint (fi, fi_uses);
5199 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5200 process_constraint (new_constraint (lhs, *rhsp));
5201 return;
5203 /* The following function clobbers memory pointed to by
5204 its argument. */
5205 case BUILT_IN_MEMSET:
5206 case BUILT_IN_MEMSET_CHK:
5207 case BUILT_IN_POSIX_MEMALIGN:
5209 tree dest = gimple_call_arg (t, 0);
5210 unsigned i;
5211 ce_s *lhsp;
5212 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
5213 lhs = get_function_part_constraint (fi, fi_clobbers);
5214 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
5215 process_constraint (new_constraint (lhs, *lhsp));
5216 return;
5218 /* The following functions clobber their second and third
5219 arguments. */
5220 case BUILT_IN_SINCOS:
5221 case BUILT_IN_SINCOSF:
5222 case BUILT_IN_SINCOSL:
5224 process_ipa_clobber (fi, gimple_call_arg (t, 1));
5225 process_ipa_clobber (fi, gimple_call_arg (t, 2));
5226 return;
5228 /* The following functions clobber their second argument. */
5229 case BUILT_IN_FREXP:
5230 case BUILT_IN_FREXPF:
5231 case BUILT_IN_FREXPL:
5232 case BUILT_IN_LGAMMA_R:
5233 case BUILT_IN_LGAMMAF_R:
5234 case BUILT_IN_LGAMMAL_R:
5235 case BUILT_IN_GAMMA_R:
5236 case BUILT_IN_GAMMAF_R:
5237 case BUILT_IN_GAMMAL_R:
5238 case BUILT_IN_MODF:
5239 case BUILT_IN_MODFF:
5240 case BUILT_IN_MODFL:
5242 process_ipa_clobber (fi, gimple_call_arg (t, 1));
5243 return;
5245 /* The following functions clobber their third argument. */
5246 case BUILT_IN_REMQUO:
5247 case BUILT_IN_REMQUOF:
5248 case BUILT_IN_REMQUOL:
5250 process_ipa_clobber (fi, gimple_call_arg (t, 2));
5251 return;
5253 /* The following functions neither read nor clobber memory. */
5254 case BUILT_IN_ASSUME_ALIGNED:
5255 case BUILT_IN_FREE:
5256 return;
5257 /* Trampolines are of no interest to us. */
5258 case BUILT_IN_INIT_TRAMPOLINE:
5259 case BUILT_IN_ADJUST_TRAMPOLINE:
5260 return;
5261 case BUILT_IN_VA_START:
5262 case BUILT_IN_VA_END:
5263 return;
5264 case BUILT_IN_GOMP_PARALLEL:
5265 case BUILT_IN_GOACC_PARALLEL:
5267 unsigned int fnpos, argpos;
5268 unsigned int implicit_use_args[2];
5269 unsigned int num_implicit_use_args = 0;
5270 switch (DECL_FUNCTION_CODE (decl))
5272 case BUILT_IN_GOMP_PARALLEL:
5273 /* __builtin_GOMP_parallel (fn, data, num_threads, flags). */
5274 fnpos = 0;
5275 argpos = 1;
5276 break;
5277 case BUILT_IN_GOACC_PARALLEL:
5278 /* __builtin_GOACC_parallel (flags_m, fn, mapnum, hostaddrs,
5279 sizes, kinds, ...). */
5280 fnpos = 1;
5281 argpos = 3;
5282 implicit_use_args[num_implicit_use_args++] = 4;
5283 implicit_use_args[num_implicit_use_args++] = 5;
5284 break;
5285 default:
5286 gcc_unreachable ();
5289 tree fnarg = gimple_call_arg (t, fnpos);
5290 gcc_assert (TREE_CODE (fnarg) == ADDR_EXPR);
5291 tree fndecl = TREE_OPERAND (fnarg, 0);
5292 if (fndecl_maybe_in_other_partition (fndecl))
5293 /* Fallthru to general call handling. */
5294 break;
5296 varinfo_t cfi = get_vi_for_tree (fndecl);
5298 tree arg = gimple_call_arg (t, argpos);
5300 /* Parameter passed by value is used. */
5301 lhs = get_function_part_constraint (fi, fi_uses);
5302 struct constraint_expr *rhsp;
5303 get_constraint_for (arg, &rhsc);
5304 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5305 process_constraint (new_constraint (lhs, *rhsp));
5306 rhsc.truncate (0);
5308 /* Handle parameters used by the call, but not used in cfi, as
5309 implicitly used by cfi. */
5310 lhs = get_function_part_constraint (cfi, fi_uses);
5311 for (unsigned i = 0; i < num_implicit_use_args; ++i)
5313 tree arg = gimple_call_arg (t, implicit_use_args[i]);
5314 get_constraint_for (arg, &rhsc);
5315 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5316 process_constraint (new_constraint (lhs, *rhsp));
5317 rhsc.truncate (0);
5320 /* The caller clobbers what the callee does. */
5321 lhs = get_function_part_constraint (fi, fi_clobbers);
5322 rhs = get_function_part_constraint (cfi, fi_clobbers);
5323 process_constraint (new_constraint (lhs, rhs));
5325 /* The caller uses what the callee does. */
5326 lhs = get_function_part_constraint (fi, fi_uses);
5327 rhs = get_function_part_constraint (cfi, fi_uses);
5328 process_constraint (new_constraint (lhs, rhs));
5330 return;
5332 /* printf-style functions may have hooks to set pointers to
5333 point to somewhere into the generated string. Leave them
5334 for a later exercise... */
5335 default:
5336 /* Fallthru to general call handling. */;
5339 /* Parameters passed by value are used. */
5340 lhs = get_function_part_constraint (fi, fi_uses);
5341 for (i = 0; i < gimple_call_num_args (t); i++)
5343 struct constraint_expr *rhsp;
5344 tree arg = gimple_call_arg (t, i);
5346 if (TREE_CODE (arg) == SSA_NAME
5347 || is_gimple_min_invariant (arg))
5348 continue;
5350 get_constraint_for_address_of (arg, &rhsc);
5351 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5352 process_constraint (new_constraint (lhs, *rhsp));
5353 rhsc.truncate (0);
5356 /* Build constraints for propagating clobbers/uses along the
5357 callgraph edges. */
5358 cfi = get_fi_for_callee (call_stmt);
5359 if (cfi->id == anything_id)
5361 if (gimple_vdef (t))
5362 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5363 anything_id);
5364 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5365 anything_id);
5366 return;
5369 /* For callees without function info (that's external functions),
5370 ESCAPED is clobbered and used. */
5371 if (cfi->decl
5372 && TREE_CODE (cfi->decl) == FUNCTION_DECL
5373 && !cfi->is_fn_info)
5375 varinfo_t vi;
5377 if (gimple_vdef (t))
5378 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5379 escaped_id);
5380 make_copy_constraint (first_vi_for_offset (fi, fi_uses), escaped_id);
5382 /* Also honor the call statement use/clobber info. */
5383 if ((vi = lookup_call_clobber_vi (call_stmt)) != NULL)
5384 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5385 vi->id);
5386 if ((vi = lookup_call_use_vi (call_stmt)) != NULL)
5387 make_copy_constraint (first_vi_for_offset (fi, fi_uses),
5388 vi->id);
5389 return;
5392 /* Otherwise the caller clobbers and uses what the callee does.
5393 ??? This should use a new complex constraint that filters
5394 local variables of the callee. */
5395 if (gimple_vdef (t))
5397 lhs = get_function_part_constraint (fi, fi_clobbers);
5398 rhs = get_function_part_constraint (cfi, fi_clobbers);
5399 process_constraint (new_constraint (lhs, rhs));
5401 lhs = get_function_part_constraint (fi, fi_uses);
5402 rhs = get_function_part_constraint (cfi, fi_uses);
5403 process_constraint (new_constraint (lhs, rhs));
5405 else if (gimple_code (t) == GIMPLE_ASM)
5407 /* ??? Ick. We can do better. */
5408 if (gimple_vdef (t))
5409 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5410 anything_id);
5411 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5412 anything_id);
5417 /* Find the first varinfo in the same variable as START that overlaps with
5418 OFFSET. Return NULL if we can't find one. */
5420 static varinfo_t
5421 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
5423 /* If the offset is outside of the variable, bail out. */
5424 if (offset >= start->fullsize)
5425 return NULL;
5427 /* If we cannot reach offset from start, lookup the first field
5428 and start from there. */
5429 if (start->offset > offset)
5430 start = get_varinfo (start->head);
5432 while (start)
5434 /* We may not find a variable in the field list with the actual
5435 offset when we have glommed a structure to a variable.
5436 In that case, however, offset should still be within the size
5437 of the variable. */
5438 if (offset >= start->offset
5439 && (offset - start->offset) < start->size)
5440 return start;
5442 start = vi_next (start);
5445 return NULL;
5448 /* Find the first varinfo in the same variable as START that overlaps with
5449 OFFSET. If there is no such varinfo the varinfo directly preceding
5450 OFFSET is returned. */
5452 static varinfo_t
5453 first_or_preceding_vi_for_offset (varinfo_t start,
5454 unsigned HOST_WIDE_INT offset)
5456 /* If we cannot reach offset from start, lookup the first field
5457 and start from there. */
5458 if (start->offset > offset)
5459 start = get_varinfo (start->head);
5461 /* We may not find a variable in the field list with the actual
5462 offset when we have glommed a structure to a variable.
5463 In that case, however, offset should still be within the size
5464 of the variable.
5465 If we got beyond the offset we look for return the field
5466 directly preceding offset which may be the last field. */
5467 while (start->next
5468 && offset >= start->offset
5469 && !((offset - start->offset) < start->size))
5470 start = vi_next (start);
5472 return start;
5476 /* This structure is used during pushing fields onto the fieldstack
5477 to track the offset of the field, since bitpos_of_field gives it
5478 relative to its immediate containing type, and we want it relative
5479 to the ultimate containing object. */
5481 struct fieldoff
5483 /* Offset from the base of the base containing object to this field. */
5484 HOST_WIDE_INT offset;
5486 /* Size, in bits, of the field. */
5487 unsigned HOST_WIDE_INT size;
5489 unsigned has_unknown_size : 1;
5491 unsigned must_have_pointers : 1;
5493 unsigned may_have_pointers : 1;
5495 unsigned only_restrict_pointers : 1;
5497 tree restrict_pointed_type;
5499 typedef struct fieldoff fieldoff_s;
5502 /* qsort comparison function for two fieldoff's PA and PB */
5504 static int
5505 fieldoff_compare (const void *pa, const void *pb)
5507 const fieldoff_s *foa = (const fieldoff_s *)pa;
5508 const fieldoff_s *fob = (const fieldoff_s *)pb;
5509 unsigned HOST_WIDE_INT foasize, fobsize;
5511 if (foa->offset < fob->offset)
5512 return -1;
5513 else if (foa->offset > fob->offset)
5514 return 1;
5516 foasize = foa->size;
5517 fobsize = fob->size;
5518 if (foasize < fobsize)
5519 return -1;
5520 else if (foasize > fobsize)
5521 return 1;
5522 return 0;
5525 /* Sort a fieldstack according to the field offset and sizes. */
5526 static void
5527 sort_fieldstack (vec<fieldoff_s> fieldstack)
5529 fieldstack.qsort (fieldoff_compare);
5532 /* Return true if T is a type that can have subvars. */
5534 static inline bool
5535 type_can_have_subvars (const_tree t)
5537 /* Aggregates without overlapping fields can have subvars. */
5538 return TREE_CODE (t) == RECORD_TYPE;
5541 /* Return true if V is a tree that we can have subvars for.
5542 Normally, this is any aggregate type. Also complex
5543 types which are not gimple registers can have subvars. */
5545 static inline bool
5546 var_can_have_subvars (const_tree v)
5548 /* Volatile variables should never have subvars. */
5549 if (TREE_THIS_VOLATILE (v))
5550 return false;
5552 /* Non decls or memory tags can never have subvars. */
5553 if (!DECL_P (v))
5554 return false;
5556 return type_can_have_subvars (TREE_TYPE (v));
5559 /* Return true if T is a type that does contain pointers. */
5561 static bool
5562 type_must_have_pointers (tree type)
5564 if (POINTER_TYPE_P (type))
5565 return true;
5567 if (TREE_CODE (type) == ARRAY_TYPE)
5568 return type_must_have_pointers (TREE_TYPE (type));
5570 /* A function or method can have pointers as arguments, so track
5571 those separately. */
5572 if (TREE_CODE (type) == FUNCTION_TYPE
5573 || TREE_CODE (type) == METHOD_TYPE)
5574 return true;
5576 return false;
5579 static bool
5580 field_must_have_pointers (tree t)
5582 return type_must_have_pointers (TREE_TYPE (t));
5585 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
5586 the fields of TYPE onto fieldstack, recording their offsets along
5587 the way.
5589 OFFSET is used to keep track of the offset in this entire
5590 structure, rather than just the immediately containing structure.
5591 Returns false if the caller is supposed to handle the field we
5592 recursed for. */
5594 static bool
5595 push_fields_onto_fieldstack (tree type, vec<fieldoff_s> *fieldstack,
5596 HOST_WIDE_INT offset)
5598 tree field;
5599 bool empty_p = true;
5601 if (TREE_CODE (type) != RECORD_TYPE)
5602 return false;
5604 /* If the vector of fields is growing too big, bail out early.
5605 Callers check for vec::length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
5606 sure this fails. */
5607 if (fieldstack->length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
5608 return false;
5610 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5611 if (TREE_CODE (field) == FIELD_DECL)
5613 bool push = false;
5614 HOST_WIDE_INT foff = bitpos_of_field (field);
5615 tree field_type = TREE_TYPE (field);
5617 if (!var_can_have_subvars (field)
5618 || TREE_CODE (field_type) == QUAL_UNION_TYPE
5619 || TREE_CODE (field_type) == UNION_TYPE)
5620 push = true;
5621 else if (!push_fields_onto_fieldstack
5622 (field_type, fieldstack, offset + foff)
5623 && (DECL_SIZE (field)
5624 && !integer_zerop (DECL_SIZE (field))))
5625 /* Empty structures may have actual size, like in C++. So
5626 see if we didn't push any subfields and the size is
5627 nonzero, push the field onto the stack. */
5628 push = true;
5630 if (push)
5632 fieldoff_s *pair = NULL;
5633 bool has_unknown_size = false;
5634 bool must_have_pointers_p;
5636 if (!fieldstack->is_empty ())
5637 pair = &fieldstack->last ();
5639 /* If there isn't anything at offset zero, create sth. */
5640 if (!pair
5641 && offset + foff != 0)
5643 fieldoff_s e
5644 = {0, offset + foff, false, false, true, false, NULL_TREE};
5645 pair = fieldstack->safe_push (e);
5648 if (!DECL_SIZE (field)
5649 || !tree_fits_uhwi_p (DECL_SIZE (field)))
5650 has_unknown_size = true;
5652 /* If adjacent fields do not contain pointers merge them. */
5653 must_have_pointers_p = field_must_have_pointers (field);
5654 if (pair
5655 && !has_unknown_size
5656 && !must_have_pointers_p
5657 && !pair->must_have_pointers
5658 && !pair->has_unknown_size
5659 && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff)
5661 pair->size += tree_to_uhwi (DECL_SIZE (field));
5663 else
5665 fieldoff_s e;
5666 e.offset = offset + foff;
5667 e.has_unknown_size = has_unknown_size;
5668 if (!has_unknown_size)
5669 e.size = tree_to_uhwi (DECL_SIZE (field));
5670 else
5671 e.size = -1;
5672 e.must_have_pointers = must_have_pointers_p;
5673 e.may_have_pointers = true;
5674 e.only_restrict_pointers
5675 = (!has_unknown_size
5676 && POINTER_TYPE_P (field_type)
5677 && TYPE_RESTRICT (field_type));
5678 if (e.only_restrict_pointers)
5679 e.restrict_pointed_type = TREE_TYPE (field_type);
5680 fieldstack->safe_push (e);
5684 empty_p = false;
5687 return !empty_p;
5690 /* Count the number of arguments DECL has, and set IS_VARARGS to true
5691 if it is a varargs function. */
5693 static unsigned int
5694 count_num_arguments (tree decl, bool *is_varargs)
5696 unsigned int num = 0;
5697 tree t;
5699 /* Capture named arguments for K&R functions. They do not
5700 have a prototype and thus no TYPE_ARG_TYPES. */
5701 for (t = DECL_ARGUMENTS (decl); t; t = DECL_CHAIN (t))
5702 ++num;
5704 /* Check if the function has variadic arguments. */
5705 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl)); t; t = TREE_CHAIN (t))
5706 if (TREE_VALUE (t) == void_type_node)
5707 break;
5708 if (!t)
5709 *is_varargs = true;
5711 return num;
5714 /* Creation function node for DECL, using NAME, and return the index
5715 of the variable we've created for the function. If NONLOCAL_p, create
5716 initial constraints. */
5718 static varinfo_t
5719 create_function_info_for (tree decl, const char *name, bool add_id,
5720 bool nonlocal_p)
5722 struct function *fn = DECL_STRUCT_FUNCTION (decl);
5723 varinfo_t vi, prev_vi;
5724 tree arg;
5725 unsigned int i;
5726 bool is_varargs = false;
5727 unsigned int num_args = count_num_arguments (decl, &is_varargs);
5729 /* Create the variable info. */
5731 vi = new_var_info (decl, name, add_id);
5732 vi->offset = 0;
5733 vi->size = 1;
5734 vi->fullsize = fi_parm_base + num_args;
5735 vi->is_fn_info = 1;
5736 vi->may_have_pointers = false;
5737 if (is_varargs)
5738 vi->fullsize = ~0;
5739 insert_vi_for_tree (vi->decl, vi);
5741 prev_vi = vi;
5743 /* Create a variable for things the function clobbers and one for
5744 things the function uses. */
5746 varinfo_t clobbervi, usevi;
5747 const char *newname;
5748 char *tempname;
5750 tempname = xasprintf ("%s.clobber", name);
5751 newname = ggc_strdup (tempname);
5752 free (tempname);
5754 clobbervi = new_var_info (NULL, newname, false);
5755 clobbervi->offset = fi_clobbers;
5756 clobbervi->size = 1;
5757 clobbervi->fullsize = vi->fullsize;
5758 clobbervi->is_full_var = true;
5759 clobbervi->is_global_var = false;
5760 clobbervi->is_reg_var = true;
5762 gcc_assert (prev_vi->offset < clobbervi->offset);
5763 prev_vi->next = clobbervi->id;
5764 prev_vi = clobbervi;
5766 tempname = xasprintf ("%s.use", name);
5767 newname = ggc_strdup (tempname);
5768 free (tempname);
5770 usevi = new_var_info (NULL, newname, false);
5771 usevi->offset = fi_uses;
5772 usevi->size = 1;
5773 usevi->fullsize = vi->fullsize;
5774 usevi->is_full_var = true;
5775 usevi->is_global_var = false;
5776 usevi->is_reg_var = true;
5778 gcc_assert (prev_vi->offset < usevi->offset);
5779 prev_vi->next = usevi->id;
5780 prev_vi = usevi;
5783 /* And one for the static chain. */
5784 if (fn->static_chain_decl != NULL_TREE)
5786 varinfo_t chainvi;
5787 const char *newname;
5788 char *tempname;
5790 tempname = xasprintf ("%s.chain", name);
5791 newname = ggc_strdup (tempname);
5792 free (tempname);
5794 chainvi = new_var_info (fn->static_chain_decl, newname, false);
5795 chainvi->offset = fi_static_chain;
5796 chainvi->size = 1;
5797 chainvi->fullsize = vi->fullsize;
5798 chainvi->is_full_var = true;
5799 chainvi->is_global_var = false;
5801 insert_vi_for_tree (fn->static_chain_decl, chainvi);
5803 if (nonlocal_p
5804 && chainvi->may_have_pointers)
5805 make_constraint_from (chainvi, nonlocal_id);
5807 gcc_assert (prev_vi->offset < chainvi->offset);
5808 prev_vi->next = chainvi->id;
5809 prev_vi = chainvi;
5812 /* Create a variable for the return var. */
5813 if (DECL_RESULT (decl) != NULL
5814 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
5816 varinfo_t resultvi;
5817 const char *newname;
5818 char *tempname;
5819 tree resultdecl = decl;
5821 if (DECL_RESULT (decl))
5822 resultdecl = DECL_RESULT (decl);
5824 tempname = xasprintf ("%s.result", name);
5825 newname = ggc_strdup (tempname);
5826 free (tempname);
5828 resultvi = new_var_info (resultdecl, newname, false);
5829 resultvi->offset = fi_result;
5830 resultvi->size = 1;
5831 resultvi->fullsize = vi->fullsize;
5832 resultvi->is_full_var = true;
5833 if (DECL_RESULT (decl))
5834 resultvi->may_have_pointers = true;
5836 if (DECL_RESULT (decl))
5837 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
5839 if (nonlocal_p
5840 && DECL_RESULT (decl)
5841 && DECL_BY_REFERENCE (DECL_RESULT (decl)))
5842 make_constraint_from (resultvi, nonlocal_id);
5844 gcc_assert (prev_vi->offset < resultvi->offset);
5845 prev_vi->next = resultvi->id;
5846 prev_vi = resultvi;
5849 /* We also need to make function return values escape. Nothing
5850 escapes by returning from main though. */
5851 if (nonlocal_p
5852 && !MAIN_NAME_P (DECL_NAME (decl)))
5854 varinfo_t fi, rvi;
5855 fi = lookup_vi_for_tree (decl);
5856 rvi = first_vi_for_offset (fi, fi_result);
5857 if (rvi && rvi->offset == fi_result)
5858 make_copy_constraint (get_varinfo (escaped_id), rvi->id);
5861 /* Set up variables for each argument. */
5862 arg = DECL_ARGUMENTS (decl);
5863 for (i = 0; i < num_args; i++)
5865 varinfo_t argvi;
5866 const char *newname;
5867 char *tempname;
5868 tree argdecl = decl;
5870 if (arg)
5871 argdecl = arg;
5873 tempname = xasprintf ("%s.arg%d", name, i);
5874 newname = ggc_strdup (tempname);
5875 free (tempname);
5877 argvi = new_var_info (argdecl, newname, false);
5878 argvi->offset = fi_parm_base + i;
5879 argvi->size = 1;
5880 argvi->is_full_var = true;
5881 argvi->fullsize = vi->fullsize;
5882 if (arg)
5883 argvi->may_have_pointers = true;
5885 if (arg)
5886 insert_vi_for_tree (arg, argvi);
5888 if (nonlocal_p
5889 && argvi->may_have_pointers)
5890 make_constraint_from (argvi, nonlocal_id);
5892 gcc_assert (prev_vi->offset < argvi->offset);
5893 prev_vi->next = argvi->id;
5894 prev_vi = argvi;
5895 if (arg)
5896 arg = DECL_CHAIN (arg);
5899 /* Add one representative for all further args. */
5900 if (is_varargs)
5902 varinfo_t argvi;
5903 const char *newname;
5904 char *tempname;
5905 tree decl;
5907 tempname = xasprintf ("%s.varargs", name);
5908 newname = ggc_strdup (tempname);
5909 free (tempname);
5911 /* We need sth that can be pointed to for va_start. */
5912 decl = build_fake_var_decl (ptr_type_node);
5914 argvi = new_var_info (decl, newname, false);
5915 argvi->offset = fi_parm_base + num_args;
5916 argvi->size = ~0;
5917 argvi->is_full_var = true;
5918 argvi->is_heap_var = true;
5919 argvi->fullsize = vi->fullsize;
5921 if (nonlocal_p
5922 && argvi->may_have_pointers)
5923 make_constraint_from (argvi, nonlocal_id);
5925 gcc_assert (prev_vi->offset < argvi->offset);
5926 prev_vi->next = argvi->id;
5927 prev_vi = argvi;
5930 return vi;
5934 /* Return true if FIELDSTACK contains fields that overlap.
5935 FIELDSTACK is assumed to be sorted by offset. */
5937 static bool
5938 check_for_overlaps (vec<fieldoff_s> fieldstack)
5940 fieldoff_s *fo = NULL;
5941 unsigned int i;
5942 HOST_WIDE_INT lastoffset = -1;
5944 FOR_EACH_VEC_ELT (fieldstack, i, fo)
5946 if (fo->offset == lastoffset)
5947 return true;
5948 lastoffset = fo->offset;
5950 return false;
5953 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
5954 This will also create any varinfo structures necessary for fields
5955 of DECL. DECL is a function parameter if HANDLE_PARAM is set.
5956 HANDLED_STRUCT_TYPE is used to register struct types reached by following
5957 restrict pointers. This is needed to prevent infinite recursion.
5958 If ADD_RESTRICT, pretend that the pointer NAME is restrict even if DECL
5959 does not advertise it. */
5961 static varinfo_t
5962 create_variable_info_for_1 (tree decl, const char *name, bool add_id,
5963 bool handle_param, bitmap handled_struct_type,
5964 bool add_restrict = false)
5966 varinfo_t vi, newvi;
5967 tree decl_type = TREE_TYPE (decl);
5968 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type);
5969 auto_vec<fieldoff_s> fieldstack;
5970 fieldoff_s *fo;
5971 unsigned int i;
5973 if (!declsize
5974 || !tree_fits_uhwi_p (declsize))
5976 vi = new_var_info (decl, name, add_id);
5977 vi->offset = 0;
5978 vi->size = ~0;
5979 vi->fullsize = ~0;
5980 vi->is_unknown_size_var = true;
5981 vi->is_full_var = true;
5982 vi->may_have_pointers = true;
5983 return vi;
5986 /* Collect field information. */
5987 if (use_field_sensitive
5988 && var_can_have_subvars (decl)
5989 /* ??? Force us to not use subfields for globals in IPA mode.
5990 Else we'd have to parse arbitrary initializers. */
5991 && !(in_ipa_mode
5992 && is_global_var (decl)))
5994 fieldoff_s *fo = NULL;
5995 bool notokay = false;
5996 unsigned int i;
5998 push_fields_onto_fieldstack (decl_type, &fieldstack, 0);
6000 for (i = 0; !notokay && fieldstack.iterate (i, &fo); i++)
6001 if (fo->has_unknown_size
6002 || fo->offset < 0)
6004 notokay = true;
6005 break;
6008 /* We can't sort them if we have a field with a variable sized type,
6009 which will make notokay = true. In that case, we are going to return
6010 without creating varinfos for the fields anyway, so sorting them is a
6011 waste to boot. */
6012 if (!notokay)
6014 sort_fieldstack (fieldstack);
6015 /* Due to some C++ FE issues, like PR 22488, we might end up
6016 what appear to be overlapping fields even though they,
6017 in reality, do not overlap. Until the C++ FE is fixed,
6018 we will simply disable field-sensitivity for these cases. */
6019 notokay = check_for_overlaps (fieldstack);
6022 if (notokay)
6023 fieldstack.release ();
6026 /* If we didn't end up collecting sub-variables create a full
6027 variable for the decl. */
6028 if (fieldstack.length () == 0
6029 || fieldstack.length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
6031 vi = new_var_info (decl, name, add_id);
6032 vi->offset = 0;
6033 vi->may_have_pointers = true;
6034 vi->fullsize = tree_to_uhwi (declsize);
6035 vi->size = vi->fullsize;
6036 vi->is_full_var = true;
6037 if (POINTER_TYPE_P (decl_type)
6038 && (TYPE_RESTRICT (decl_type) || add_restrict))
6039 vi->only_restrict_pointers = 1;
6040 if (vi->only_restrict_pointers
6041 && !type_contains_placeholder_p (TREE_TYPE (decl_type))
6042 && handle_param
6043 && !bitmap_bit_p (handled_struct_type,
6044 TYPE_UID (TREE_TYPE (decl_type))))
6046 varinfo_t rvi;
6047 tree heapvar = build_fake_var_decl (TREE_TYPE (decl_type));
6048 DECL_EXTERNAL (heapvar) = 1;
6049 if (var_can_have_subvars (heapvar))
6050 bitmap_set_bit (handled_struct_type,
6051 TYPE_UID (TREE_TYPE (decl_type)));
6052 rvi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS", true,
6053 true, handled_struct_type);
6054 if (var_can_have_subvars (heapvar))
6055 bitmap_clear_bit (handled_struct_type,
6056 TYPE_UID (TREE_TYPE (decl_type)));
6057 rvi->is_restrict_var = 1;
6058 insert_vi_for_tree (heapvar, rvi);
6059 make_constraint_from (vi, rvi->id);
6060 make_param_constraints (rvi);
6062 fieldstack.release ();
6063 return vi;
6066 vi = new_var_info (decl, name, add_id);
6067 vi->fullsize = tree_to_uhwi (declsize);
6068 if (fieldstack.length () == 1)
6069 vi->is_full_var = true;
6070 for (i = 0, newvi = vi;
6071 fieldstack.iterate (i, &fo);
6072 ++i, newvi = vi_next (newvi))
6074 const char *newname = NULL;
6075 char *tempname;
6077 if (dump_file)
6079 if (fieldstack.length () != 1)
6081 tempname
6082 = xasprintf ("%s." HOST_WIDE_INT_PRINT_DEC
6083 "+" HOST_WIDE_INT_PRINT_DEC, name,
6084 fo->offset, fo->size);
6085 newname = ggc_strdup (tempname);
6086 free (tempname);
6089 else
6090 newname = "NULL";
6092 if (newname)
6093 newvi->name = newname;
6094 newvi->offset = fo->offset;
6095 newvi->size = fo->size;
6096 newvi->fullsize = vi->fullsize;
6097 newvi->may_have_pointers = fo->may_have_pointers;
6098 newvi->only_restrict_pointers = fo->only_restrict_pointers;
6099 if (handle_param
6100 && newvi->only_restrict_pointers
6101 && !type_contains_placeholder_p (fo->restrict_pointed_type)
6102 && !bitmap_bit_p (handled_struct_type,
6103 TYPE_UID (fo->restrict_pointed_type)))
6105 varinfo_t rvi;
6106 tree heapvar = build_fake_var_decl (fo->restrict_pointed_type);
6107 DECL_EXTERNAL (heapvar) = 1;
6108 if (var_can_have_subvars (heapvar))
6109 bitmap_set_bit (handled_struct_type,
6110 TYPE_UID (fo->restrict_pointed_type));
6111 rvi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS", true,
6112 true, handled_struct_type);
6113 if (var_can_have_subvars (heapvar))
6114 bitmap_clear_bit (handled_struct_type,
6115 TYPE_UID (fo->restrict_pointed_type));
6116 rvi->is_restrict_var = 1;
6117 insert_vi_for_tree (heapvar, rvi);
6118 make_constraint_from (newvi, rvi->id);
6119 make_param_constraints (rvi);
6121 if (i + 1 < fieldstack.length ())
6123 varinfo_t tem = new_var_info (decl, name, false);
6124 newvi->next = tem->id;
6125 tem->head = vi->id;
6129 return vi;
6132 static unsigned int
6133 create_variable_info_for (tree decl, const char *name, bool add_id)
6135 /* First see if we are dealing with an ifunc resolver call and
6136 assiociate that with a call to the resolver function result. */
6137 cgraph_node *node;
6138 if (in_ipa_mode
6139 && TREE_CODE (decl) == FUNCTION_DECL
6140 && (node = cgraph_node::get (decl))
6141 && node->ifunc_resolver)
6143 varinfo_t fi = get_vi_for_tree (node->get_alias_target ()->decl);
6144 constraint_expr rhs
6145 = get_function_part_constraint (fi, fi_result);
6146 fi = new_var_info (NULL_TREE, "ifuncres", true);
6147 fi->is_reg_var = true;
6148 constraint_expr lhs;
6149 lhs.type = SCALAR;
6150 lhs.var = fi->id;
6151 lhs.offset = 0;
6152 process_constraint (new_constraint (lhs, rhs));
6153 insert_vi_for_tree (decl, fi);
6154 return fi->id;
6157 varinfo_t vi = create_variable_info_for_1 (decl, name, add_id, false, NULL);
6158 unsigned int id = vi->id;
6160 insert_vi_for_tree (decl, vi);
6162 if (!VAR_P (decl))
6163 return id;
6165 /* Create initial constraints for globals. */
6166 for (; vi; vi = vi_next (vi))
6168 if (!vi->may_have_pointers
6169 || !vi->is_global_var)
6170 continue;
6172 /* Mark global restrict qualified pointers. */
6173 if ((POINTER_TYPE_P (TREE_TYPE (decl))
6174 && TYPE_RESTRICT (TREE_TYPE (decl)))
6175 || vi->only_restrict_pointers)
6177 varinfo_t rvi
6178 = make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT",
6179 true);
6180 /* ??? For now exclude reads from globals as restrict sources
6181 if those are not (indirectly) from incoming parameters. */
6182 rvi->is_restrict_var = false;
6183 continue;
6186 /* In non-IPA mode the initializer from nonlocal is all we need. */
6187 if (!in_ipa_mode
6188 || DECL_HARD_REGISTER (decl))
6189 make_copy_constraint (vi, nonlocal_id);
6191 /* In IPA mode parse the initializer and generate proper constraints
6192 for it. */
6193 else
6195 varpool_node *vnode = varpool_node::get (decl);
6197 /* For escaped variables initialize them from nonlocal. */
6198 if (!vnode->all_refs_explicit_p ())
6199 make_copy_constraint (vi, nonlocal_id);
6201 /* If this is a global variable with an initializer and we are in
6202 IPA mode generate constraints for it. */
6203 ipa_ref *ref;
6204 for (unsigned idx = 0; vnode->iterate_reference (idx, ref); ++idx)
6206 auto_vec<ce_s> rhsc;
6207 struct constraint_expr lhs, *rhsp;
6208 unsigned i;
6209 get_constraint_for_address_of (ref->referred->decl, &rhsc);
6210 lhs.var = vi->id;
6211 lhs.offset = 0;
6212 lhs.type = SCALAR;
6213 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
6214 process_constraint (new_constraint (lhs, *rhsp));
6215 /* If this is a variable that escapes from the unit
6216 the initializer escapes as well. */
6217 if (!vnode->all_refs_explicit_p ())
6219 lhs.var = escaped_id;
6220 lhs.offset = 0;
6221 lhs.type = SCALAR;
6222 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
6223 process_constraint (new_constraint (lhs, *rhsp));
6229 return id;
6232 /* Print out the points-to solution for VAR to FILE. */
6234 static void
6235 dump_solution_for_var (FILE *file, unsigned int var)
6237 varinfo_t vi = get_varinfo (var);
6238 unsigned int i;
6239 bitmap_iterator bi;
6241 /* Dump the solution for unified vars anyway, this avoids difficulties
6242 in scanning dumps in the testsuite. */
6243 fprintf (file, "%s = { ", vi->name);
6244 vi = get_varinfo (find (var));
6245 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
6246 fprintf (file, "%s ", get_varinfo (i)->name);
6247 fprintf (file, "}");
6249 /* But note when the variable was unified. */
6250 if (vi->id != var)
6251 fprintf (file, " same as %s", vi->name);
6253 fprintf (file, "\n");
6256 /* Print the points-to solution for VAR to stderr. */
6258 DEBUG_FUNCTION void
6259 debug_solution_for_var (unsigned int var)
6261 dump_solution_for_var (stderr, var);
6264 /* Register the constraints for function parameter related VI. */
6266 static void
6267 make_param_constraints (varinfo_t vi)
6269 for (; vi; vi = vi_next (vi))
6271 if (vi->only_restrict_pointers)
6273 else if (vi->may_have_pointers)
6274 make_constraint_from (vi, nonlocal_id);
6276 if (vi->is_full_var)
6277 break;
6281 /* Create varinfo structures for all of the variables in the
6282 function for intraprocedural mode. */
6284 static void
6285 intra_create_variable_infos (struct function *fn)
6287 tree t;
6288 bitmap handled_struct_type = NULL;
6289 bool this_parm_in_ctor = DECL_CXX_CONSTRUCTOR_P (fn->decl);
6291 /* For each incoming pointer argument arg, create the constraint ARG
6292 = NONLOCAL or a dummy variable if it is a restrict qualified
6293 passed-by-reference argument. */
6294 for (t = DECL_ARGUMENTS (fn->decl); t; t = DECL_CHAIN (t))
6296 if (handled_struct_type == NULL)
6297 handled_struct_type = BITMAP_ALLOC (NULL);
6299 varinfo_t p
6300 = create_variable_info_for_1 (t, alias_get_name (t), false, true,
6301 handled_struct_type, this_parm_in_ctor);
6302 insert_vi_for_tree (t, p);
6304 make_param_constraints (p);
6306 this_parm_in_ctor = false;
6309 if (handled_struct_type != NULL)
6310 BITMAP_FREE (handled_struct_type);
6312 /* Add a constraint for a result decl that is passed by reference. */
6313 if (DECL_RESULT (fn->decl)
6314 && DECL_BY_REFERENCE (DECL_RESULT (fn->decl)))
6316 varinfo_t p, result_vi = get_vi_for_tree (DECL_RESULT (fn->decl));
6318 for (p = result_vi; p; p = vi_next (p))
6319 make_constraint_from (p, nonlocal_id);
6322 /* Add a constraint for the incoming static chain parameter. */
6323 if (fn->static_chain_decl != NULL_TREE)
6325 varinfo_t p, chain_vi = get_vi_for_tree (fn->static_chain_decl);
6327 for (p = chain_vi; p; p = vi_next (p))
6328 make_constraint_from (p, nonlocal_id);
6332 /* Structure used to put solution bitmaps in a hashtable so they can
6333 be shared among variables with the same points-to set. */
6335 typedef struct shared_bitmap_info
6337 bitmap pt_vars;
6338 hashval_t hashcode;
6339 } *shared_bitmap_info_t;
6340 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
6342 /* Shared_bitmap hashtable helpers. */
6344 struct shared_bitmap_hasher : free_ptr_hash <shared_bitmap_info>
6346 static inline hashval_t hash (const shared_bitmap_info *);
6347 static inline bool equal (const shared_bitmap_info *,
6348 const shared_bitmap_info *);
6351 /* Hash function for a shared_bitmap_info_t */
6353 inline hashval_t
6354 shared_bitmap_hasher::hash (const shared_bitmap_info *bi)
6356 return bi->hashcode;
6359 /* Equality function for two shared_bitmap_info_t's. */
6361 inline bool
6362 shared_bitmap_hasher::equal (const shared_bitmap_info *sbi1,
6363 const shared_bitmap_info *sbi2)
6365 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
6368 /* Shared_bitmap hashtable. */
6370 static hash_table<shared_bitmap_hasher> *shared_bitmap_table;
6372 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
6373 existing instance if there is one, NULL otherwise. */
6375 static bitmap
6376 shared_bitmap_lookup (bitmap pt_vars)
6378 shared_bitmap_info **slot;
6379 struct shared_bitmap_info sbi;
6381 sbi.pt_vars = pt_vars;
6382 sbi.hashcode = bitmap_hash (pt_vars);
6384 slot = shared_bitmap_table->find_slot (&sbi, NO_INSERT);
6385 if (!slot)
6386 return NULL;
6387 else
6388 return (*slot)->pt_vars;
6392 /* Add a bitmap to the shared bitmap hashtable. */
6394 static void
6395 shared_bitmap_add (bitmap pt_vars)
6397 shared_bitmap_info **slot;
6398 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
6400 sbi->pt_vars = pt_vars;
6401 sbi->hashcode = bitmap_hash (pt_vars);
6403 slot = shared_bitmap_table->find_slot (sbi, INSERT);
6404 gcc_assert (!*slot);
6405 *slot = sbi;
6409 /* Set bits in INTO corresponding to the variable uids in solution set FROM. */
6411 static void
6412 set_uids_in_ptset (bitmap into, bitmap from, struct pt_solution *pt,
6413 tree fndecl)
6415 unsigned int i;
6416 bitmap_iterator bi;
6417 varinfo_t escaped_vi = get_varinfo (find (escaped_id));
6418 bool everything_escaped
6419 = escaped_vi->solution && bitmap_bit_p (escaped_vi->solution, anything_id);
6421 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
6423 varinfo_t vi = get_varinfo (i);
6425 /* The only artificial variables that are allowed in a may-alias
6426 set are heap variables. */
6427 if (vi->is_artificial_var && !vi->is_heap_var)
6428 continue;
6430 if (everything_escaped
6431 || (escaped_vi->solution
6432 && bitmap_bit_p (escaped_vi->solution, i)))
6434 pt->vars_contains_escaped = true;
6435 pt->vars_contains_escaped_heap |= vi->is_heap_var;
6438 if (vi->is_restrict_var)
6439 pt->vars_contains_restrict = true;
6441 if (VAR_P (vi->decl)
6442 || TREE_CODE (vi->decl) == PARM_DECL
6443 || TREE_CODE (vi->decl) == RESULT_DECL)
6445 /* If we are in IPA mode we will not recompute points-to
6446 sets after inlining so make sure they stay valid. */
6447 if (in_ipa_mode
6448 && !DECL_PT_UID_SET_P (vi->decl))
6449 SET_DECL_PT_UID (vi->decl, DECL_UID (vi->decl));
6451 /* Add the decl to the points-to set. Note that the points-to
6452 set contains global variables. */
6453 bitmap_set_bit (into, DECL_PT_UID (vi->decl));
6454 if (vi->is_global_var
6455 /* In IPA mode the escaped_heap trick doesn't work as
6456 ESCAPED is escaped from the unit but
6457 pt_solution_includes_global needs to answer true for
6458 all variables not automatic within a function.
6459 For the same reason is_global_var is not the
6460 correct flag to track - local variables from other
6461 functions also need to be considered global.
6462 Conveniently all HEAP vars are not put in function
6463 scope. */
6464 || (in_ipa_mode
6465 && fndecl
6466 && ! auto_var_in_fn_p (vi->decl, fndecl)))
6467 pt->vars_contains_nonlocal = true;
6469 /* If we have a variable that is interposable record that fact
6470 for pointer comparison simplification. */
6471 if (VAR_P (vi->decl)
6472 && (TREE_STATIC (vi->decl) || DECL_EXTERNAL (vi->decl))
6473 && ! decl_binds_to_current_def_p (vi->decl))
6474 pt->vars_contains_interposable = true;
6476 /* If this is a local variable we can have overlapping lifetime
6477 of different function invocations through recursion duplicate
6478 it with its shadow variable. */
6479 if (in_ipa_mode
6480 && vi->shadow_var_uid != 0)
6482 bitmap_set_bit (into, vi->shadow_var_uid);
6483 pt->vars_contains_nonlocal = true;
6487 else if (TREE_CODE (vi->decl) == FUNCTION_DECL
6488 || TREE_CODE (vi->decl) == LABEL_DECL)
6490 /* Nothing should read/write from/to code so we can
6491 save bits by not including them in the points-to bitmaps.
6492 Still mark the points-to set as containing global memory
6493 to make code-patching possible - see PR70128. */
6494 pt->vars_contains_nonlocal = true;
6500 /* Compute the points-to solution *PT for the variable VI. */
6502 static struct pt_solution
6503 find_what_var_points_to (tree fndecl, varinfo_t orig_vi)
6505 unsigned int i;
6506 bitmap_iterator bi;
6507 bitmap finished_solution;
6508 bitmap result;
6509 varinfo_t vi;
6510 struct pt_solution *pt;
6512 /* This variable may have been collapsed, let's get the real
6513 variable. */
6514 vi = get_varinfo (find (orig_vi->id));
6516 /* See if we have already computed the solution and return it. */
6517 pt_solution **slot = &final_solutions->get_or_insert (vi);
6518 if (*slot != NULL)
6519 return **slot;
6521 *slot = pt = XOBNEW (&final_solutions_obstack, struct pt_solution);
6522 memset (pt, 0, sizeof (struct pt_solution));
6524 /* Translate artificial variables into SSA_NAME_PTR_INFO
6525 attributes. */
6526 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
6528 varinfo_t vi = get_varinfo (i);
6530 if (vi->is_artificial_var)
6532 if (vi->id == nothing_id)
6533 pt->null = 1;
6534 else if (vi->id == escaped_id)
6536 if (in_ipa_mode)
6537 pt->ipa_escaped = 1;
6538 else
6539 pt->escaped = 1;
6540 /* Expand some special vars of ESCAPED in-place here. */
6541 varinfo_t evi = get_varinfo (find (escaped_id));
6542 if (bitmap_bit_p (evi->solution, nonlocal_id))
6543 pt->nonlocal = 1;
6545 else if (vi->id == nonlocal_id)
6546 pt->nonlocal = 1;
6547 else if (vi->is_heap_var)
6548 /* We represent heapvars in the points-to set properly. */
6550 else if (vi->id == string_id)
6551 /* Nobody cares - STRING_CSTs are read-only entities. */
6553 else if (vi->id == anything_id
6554 || vi->id == integer_id)
6555 pt->anything = 1;
6559 /* Instead of doing extra work, simply do not create
6560 elaborate points-to information for pt_anything pointers. */
6561 if (pt->anything)
6562 return *pt;
6564 /* Share the final set of variables when possible. */
6565 finished_solution = BITMAP_GGC_ALLOC ();
6566 stats.points_to_sets_created++;
6568 set_uids_in_ptset (finished_solution, vi->solution, pt, fndecl);
6569 result = shared_bitmap_lookup (finished_solution);
6570 if (!result)
6572 shared_bitmap_add (finished_solution);
6573 pt->vars = finished_solution;
6575 else
6577 pt->vars = result;
6578 bitmap_clear (finished_solution);
6581 return *pt;
6584 /* Given a pointer variable P, fill in its points-to set. */
6586 static void
6587 find_what_p_points_to (tree fndecl, tree p)
6589 struct ptr_info_def *pi;
6590 tree lookup_p = p;
6591 varinfo_t vi;
6592 bool nonnull = get_ptr_nonnull (p);
6594 /* For parameters, get at the points-to set for the actual parm
6595 decl. */
6596 if (TREE_CODE (p) == SSA_NAME
6597 && SSA_NAME_IS_DEFAULT_DEF (p)
6598 && (TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
6599 || TREE_CODE (SSA_NAME_VAR (p)) == RESULT_DECL))
6600 lookup_p = SSA_NAME_VAR (p);
6602 vi = lookup_vi_for_tree (lookup_p);
6603 if (!vi)
6604 return;
6606 pi = get_ptr_info (p);
6607 pi->pt = find_what_var_points_to (fndecl, vi);
6608 /* Conservatively set to NULL from PTA (to true). */
6609 pi->pt.null = 1;
6610 /* Preserve pointer nonnull computed by VRP. See get_ptr_nonnull
6611 in gcc/tree-ssaname.c for more information. */
6612 if (nonnull)
6613 set_ptr_nonnull (p);
6617 /* Query statistics for points-to solutions. */
6619 static struct {
6620 unsigned HOST_WIDE_INT pt_solution_includes_may_alias;
6621 unsigned HOST_WIDE_INT pt_solution_includes_no_alias;
6622 unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias;
6623 unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias;
6624 } pta_stats;
6626 void
6627 dump_pta_stats (FILE *s)
6629 fprintf (s, "\nPTA query stats:\n");
6630 fprintf (s, " pt_solution_includes: "
6631 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6632 HOST_WIDE_INT_PRINT_DEC" queries\n",
6633 pta_stats.pt_solution_includes_no_alias,
6634 pta_stats.pt_solution_includes_no_alias
6635 + pta_stats.pt_solution_includes_may_alias);
6636 fprintf (s, " pt_solutions_intersect: "
6637 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6638 HOST_WIDE_INT_PRINT_DEC" queries\n",
6639 pta_stats.pt_solutions_intersect_no_alias,
6640 pta_stats.pt_solutions_intersect_no_alias
6641 + pta_stats.pt_solutions_intersect_may_alias);
6645 /* Reset the points-to solution *PT to a conservative default
6646 (point to anything). */
6648 void
6649 pt_solution_reset (struct pt_solution *pt)
6651 memset (pt, 0, sizeof (struct pt_solution));
6652 pt->anything = true;
6653 pt->null = true;
6656 /* Set the points-to solution *PT to point only to the variables
6657 in VARS. VARS_CONTAINS_GLOBAL specifies whether that contains
6658 global variables and VARS_CONTAINS_RESTRICT specifies whether
6659 it contains restrict tag variables. */
6661 void
6662 pt_solution_set (struct pt_solution *pt, bitmap vars,
6663 bool vars_contains_nonlocal)
6665 memset (pt, 0, sizeof (struct pt_solution));
6666 pt->vars = vars;
6667 pt->vars_contains_nonlocal = vars_contains_nonlocal;
6668 pt->vars_contains_escaped
6669 = (cfun->gimple_df->escaped.anything
6670 || bitmap_intersect_p (cfun->gimple_df->escaped.vars, vars));
6673 /* Set the points-to solution *PT to point only to the variable VAR. */
6675 void
6676 pt_solution_set_var (struct pt_solution *pt, tree var)
6678 memset (pt, 0, sizeof (struct pt_solution));
6679 pt->vars = BITMAP_GGC_ALLOC ();
6680 bitmap_set_bit (pt->vars, DECL_PT_UID (var));
6681 pt->vars_contains_nonlocal = is_global_var (var);
6682 pt->vars_contains_escaped
6683 = (cfun->gimple_df->escaped.anything
6684 || bitmap_bit_p (cfun->gimple_df->escaped.vars, DECL_PT_UID (var)));
6687 /* Computes the union of the points-to solutions *DEST and *SRC and
6688 stores the result in *DEST. This changes the points-to bitmap
6689 of *DEST and thus may not be used if that might be shared.
6690 The points-to bitmap of *SRC and *DEST will not be shared after
6691 this function if they were not before. */
6693 static void
6694 pt_solution_ior_into (struct pt_solution *dest, struct pt_solution *src)
6696 dest->anything |= src->anything;
6697 if (dest->anything)
6699 pt_solution_reset (dest);
6700 return;
6703 dest->nonlocal |= src->nonlocal;
6704 dest->escaped |= src->escaped;
6705 dest->ipa_escaped |= src->ipa_escaped;
6706 dest->null |= src->null;
6707 dest->vars_contains_nonlocal |= src->vars_contains_nonlocal;
6708 dest->vars_contains_escaped |= src->vars_contains_escaped;
6709 dest->vars_contains_escaped_heap |= src->vars_contains_escaped_heap;
6710 if (!src->vars)
6711 return;
6713 if (!dest->vars)
6714 dest->vars = BITMAP_GGC_ALLOC ();
6715 bitmap_ior_into (dest->vars, src->vars);
6718 /* Return true if the points-to solution *PT is empty. */
6720 bool
6721 pt_solution_empty_p (struct pt_solution *pt)
6723 if (pt->anything
6724 || pt->nonlocal)
6725 return false;
6727 if (pt->vars
6728 && !bitmap_empty_p (pt->vars))
6729 return false;
6731 /* If the solution includes ESCAPED, check if that is empty. */
6732 if (pt->escaped
6733 && !pt_solution_empty_p (&cfun->gimple_df->escaped))
6734 return false;
6736 /* If the solution includes ESCAPED, check if that is empty. */
6737 if (pt->ipa_escaped
6738 && !pt_solution_empty_p (&ipa_escaped_pt))
6739 return false;
6741 return true;
6744 /* Return true if the points-to solution *PT only point to a single var, and
6745 return the var uid in *UID. */
6747 bool
6748 pt_solution_singleton_or_null_p (struct pt_solution *pt, unsigned *uid)
6750 if (pt->anything || pt->nonlocal || pt->escaped || pt->ipa_escaped
6751 || pt->vars == NULL
6752 || !bitmap_single_bit_set_p (pt->vars))
6753 return false;
6755 *uid = bitmap_first_set_bit (pt->vars);
6756 return true;
6759 /* Return true if the points-to solution *PT includes global memory. */
6761 bool
6762 pt_solution_includes_global (struct pt_solution *pt)
6764 if (pt->anything
6765 || pt->nonlocal
6766 || pt->vars_contains_nonlocal
6767 /* The following is a hack to make the malloc escape hack work.
6768 In reality we'd need different sets for escaped-through-return
6769 and escaped-to-callees and passes would need to be updated. */
6770 || pt->vars_contains_escaped_heap)
6771 return true;
6773 /* 'escaped' is also a placeholder so we have to look into it. */
6774 if (pt->escaped)
6775 return pt_solution_includes_global (&cfun->gimple_df->escaped);
6777 if (pt->ipa_escaped)
6778 return pt_solution_includes_global (&ipa_escaped_pt);
6780 return false;
6783 /* Return true if the points-to solution *PT includes the variable
6784 declaration DECL. */
6786 static bool
6787 pt_solution_includes_1 (struct pt_solution *pt, const_tree decl)
6789 if (pt->anything)
6790 return true;
6792 if (pt->nonlocal
6793 && is_global_var (decl))
6794 return true;
6796 if (pt->vars
6797 && bitmap_bit_p (pt->vars, DECL_PT_UID (decl)))
6798 return true;
6800 /* If the solution includes ESCAPED, check it. */
6801 if (pt->escaped
6802 && pt_solution_includes_1 (&cfun->gimple_df->escaped, decl))
6803 return true;
6805 /* If the solution includes ESCAPED, check it. */
6806 if (pt->ipa_escaped
6807 && pt_solution_includes_1 (&ipa_escaped_pt, decl))
6808 return true;
6810 return false;
6813 bool
6814 pt_solution_includes (struct pt_solution *pt, const_tree decl)
6816 bool res = pt_solution_includes_1 (pt, decl);
6817 if (res)
6818 ++pta_stats.pt_solution_includes_may_alias;
6819 else
6820 ++pta_stats.pt_solution_includes_no_alias;
6821 return res;
6824 /* Return true if both points-to solutions PT1 and PT2 have a non-empty
6825 intersection. */
6827 static bool
6828 pt_solutions_intersect_1 (struct pt_solution *pt1, struct pt_solution *pt2)
6830 if (pt1->anything || pt2->anything)
6831 return true;
6833 /* If either points to unknown global memory and the other points to
6834 any global memory they alias. */
6835 if ((pt1->nonlocal
6836 && (pt2->nonlocal
6837 || pt2->vars_contains_nonlocal))
6838 || (pt2->nonlocal
6839 && pt1->vars_contains_nonlocal))
6840 return true;
6842 /* If either points to all escaped memory and the other points to
6843 any escaped memory they alias. */
6844 if ((pt1->escaped
6845 && (pt2->escaped
6846 || pt2->vars_contains_escaped))
6847 || (pt2->escaped
6848 && pt1->vars_contains_escaped))
6849 return true;
6851 /* Check the escaped solution if required.
6852 ??? Do we need to check the local against the IPA escaped sets? */
6853 if ((pt1->ipa_escaped || pt2->ipa_escaped)
6854 && !pt_solution_empty_p (&ipa_escaped_pt))
6856 /* If both point to escaped memory and that solution
6857 is not empty they alias. */
6858 if (pt1->ipa_escaped && pt2->ipa_escaped)
6859 return true;
6861 /* If either points to escaped memory see if the escaped solution
6862 intersects with the other. */
6863 if ((pt1->ipa_escaped
6864 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt2))
6865 || (pt2->ipa_escaped
6866 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt1)))
6867 return true;
6870 /* Now both pointers alias if their points-to solution intersects. */
6871 return (pt1->vars
6872 && pt2->vars
6873 && bitmap_intersect_p (pt1->vars, pt2->vars));
6876 bool
6877 pt_solutions_intersect (struct pt_solution *pt1, struct pt_solution *pt2)
6879 bool res = pt_solutions_intersect_1 (pt1, pt2);
6880 if (res)
6881 ++pta_stats.pt_solutions_intersect_may_alias;
6882 else
6883 ++pta_stats.pt_solutions_intersect_no_alias;
6884 return res;
6888 /* Dump points-to information to OUTFILE. */
6890 static void
6891 dump_sa_points_to_info (FILE *outfile)
6893 unsigned int i;
6895 fprintf (outfile, "\nPoints-to sets\n\n");
6897 if (dump_flags & TDF_STATS)
6899 fprintf (outfile, "Stats:\n");
6900 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
6901 fprintf (outfile, "Non-pointer vars: %d\n",
6902 stats.nonpointer_vars);
6903 fprintf (outfile, "Statically unified vars: %d\n",
6904 stats.unified_vars_static);
6905 fprintf (outfile, "Dynamically unified vars: %d\n",
6906 stats.unified_vars_dynamic);
6907 fprintf (outfile, "Iterations: %d\n", stats.iterations);
6908 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
6909 fprintf (outfile, "Number of implicit edges: %d\n",
6910 stats.num_implicit_edges);
6913 for (i = 1; i < varmap.length (); i++)
6915 varinfo_t vi = get_varinfo (i);
6916 if (!vi->may_have_pointers)
6917 continue;
6918 dump_solution_for_var (outfile, i);
6923 /* Debug points-to information to stderr. */
6925 DEBUG_FUNCTION void
6926 debug_sa_points_to_info (void)
6928 dump_sa_points_to_info (stderr);
6932 /* Initialize the always-existing constraint variables for NULL
6933 ANYTHING, READONLY, and INTEGER */
6935 static void
6936 init_base_vars (void)
6938 struct constraint_expr lhs, rhs;
6939 varinfo_t var_anything;
6940 varinfo_t var_nothing;
6941 varinfo_t var_string;
6942 varinfo_t var_escaped;
6943 varinfo_t var_nonlocal;
6944 varinfo_t var_storedanything;
6945 varinfo_t var_integer;
6947 /* Variable ID zero is reserved and should be NULL. */
6948 varmap.safe_push (NULL);
6950 /* Create the NULL variable, used to represent that a variable points
6951 to NULL. */
6952 var_nothing = new_var_info (NULL_TREE, "NULL", false);
6953 gcc_assert (var_nothing->id == nothing_id);
6954 var_nothing->is_artificial_var = 1;
6955 var_nothing->offset = 0;
6956 var_nothing->size = ~0;
6957 var_nothing->fullsize = ~0;
6958 var_nothing->is_special_var = 1;
6959 var_nothing->may_have_pointers = 0;
6960 var_nothing->is_global_var = 0;
6962 /* Create the ANYTHING variable, used to represent that a variable
6963 points to some unknown piece of memory. */
6964 var_anything = new_var_info (NULL_TREE, "ANYTHING", false);
6965 gcc_assert (var_anything->id == anything_id);
6966 var_anything->is_artificial_var = 1;
6967 var_anything->size = ~0;
6968 var_anything->offset = 0;
6969 var_anything->fullsize = ~0;
6970 var_anything->is_special_var = 1;
6972 /* Anything points to anything. This makes deref constraints just
6973 work in the presence of linked list and other p = *p type loops,
6974 by saying that *ANYTHING = ANYTHING. */
6975 lhs.type = SCALAR;
6976 lhs.var = anything_id;
6977 lhs.offset = 0;
6978 rhs.type = ADDRESSOF;
6979 rhs.var = anything_id;
6980 rhs.offset = 0;
6982 /* This specifically does not use process_constraint because
6983 process_constraint ignores all anything = anything constraints, since all
6984 but this one are redundant. */
6985 constraints.safe_push (new_constraint (lhs, rhs));
6987 /* Create the STRING variable, used to represent that a variable
6988 points to a string literal. String literals don't contain
6989 pointers so STRING doesn't point to anything. */
6990 var_string = new_var_info (NULL_TREE, "STRING", false);
6991 gcc_assert (var_string->id == string_id);
6992 var_string->is_artificial_var = 1;
6993 var_string->offset = 0;
6994 var_string->size = ~0;
6995 var_string->fullsize = ~0;
6996 var_string->is_special_var = 1;
6997 var_string->may_have_pointers = 0;
6999 /* Create the ESCAPED variable, used to represent the set of escaped
7000 memory. */
7001 var_escaped = new_var_info (NULL_TREE, "ESCAPED", false);
7002 gcc_assert (var_escaped->id == escaped_id);
7003 var_escaped->is_artificial_var = 1;
7004 var_escaped->offset = 0;
7005 var_escaped->size = ~0;
7006 var_escaped->fullsize = ~0;
7007 var_escaped->is_special_var = 0;
7009 /* Create the NONLOCAL variable, used to represent the set of nonlocal
7010 memory. */
7011 var_nonlocal = new_var_info (NULL_TREE, "NONLOCAL", false);
7012 gcc_assert (var_nonlocal->id == nonlocal_id);
7013 var_nonlocal->is_artificial_var = 1;
7014 var_nonlocal->offset = 0;
7015 var_nonlocal->size = ~0;
7016 var_nonlocal->fullsize = ~0;
7017 var_nonlocal->is_special_var = 1;
7019 /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
7020 lhs.type = SCALAR;
7021 lhs.var = escaped_id;
7022 lhs.offset = 0;
7023 rhs.type = DEREF;
7024 rhs.var = escaped_id;
7025 rhs.offset = 0;
7026 process_constraint (new_constraint (lhs, rhs));
7028 /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the
7029 whole variable escapes. */
7030 lhs.type = SCALAR;
7031 lhs.var = escaped_id;
7032 lhs.offset = 0;
7033 rhs.type = SCALAR;
7034 rhs.var = escaped_id;
7035 rhs.offset = UNKNOWN_OFFSET;
7036 process_constraint (new_constraint (lhs, rhs));
7038 /* *ESCAPED = NONLOCAL. This is true because we have to assume
7039 everything pointed to by escaped points to what global memory can
7040 point to. */
7041 lhs.type = DEREF;
7042 lhs.var = escaped_id;
7043 lhs.offset = 0;
7044 rhs.type = SCALAR;
7045 rhs.var = nonlocal_id;
7046 rhs.offset = 0;
7047 process_constraint (new_constraint (lhs, rhs));
7049 /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED. This is true because
7050 global memory may point to global memory and escaped memory. */
7051 lhs.type = SCALAR;
7052 lhs.var = nonlocal_id;
7053 lhs.offset = 0;
7054 rhs.type = ADDRESSOF;
7055 rhs.var = nonlocal_id;
7056 rhs.offset = 0;
7057 process_constraint (new_constraint (lhs, rhs));
7058 rhs.type = ADDRESSOF;
7059 rhs.var = escaped_id;
7060 rhs.offset = 0;
7061 process_constraint (new_constraint (lhs, rhs));
7063 /* Create the STOREDANYTHING variable, used to represent the set of
7064 variables stored to *ANYTHING. */
7065 var_storedanything = new_var_info (NULL_TREE, "STOREDANYTHING", false);
7066 gcc_assert (var_storedanything->id == storedanything_id);
7067 var_storedanything->is_artificial_var = 1;
7068 var_storedanything->offset = 0;
7069 var_storedanything->size = ~0;
7070 var_storedanything->fullsize = ~0;
7071 var_storedanything->is_special_var = 0;
7073 /* Create the INTEGER variable, used to represent that a variable points
7074 to what an INTEGER "points to". */
7075 var_integer = new_var_info (NULL_TREE, "INTEGER", false);
7076 gcc_assert (var_integer->id == integer_id);
7077 var_integer->is_artificial_var = 1;
7078 var_integer->size = ~0;
7079 var_integer->fullsize = ~0;
7080 var_integer->offset = 0;
7081 var_integer->is_special_var = 1;
7083 /* INTEGER = ANYTHING, because we don't know where a dereference of
7084 a random integer will point to. */
7085 lhs.type = SCALAR;
7086 lhs.var = integer_id;
7087 lhs.offset = 0;
7088 rhs.type = ADDRESSOF;
7089 rhs.var = anything_id;
7090 rhs.offset = 0;
7091 process_constraint (new_constraint (lhs, rhs));
7094 /* Initialize things necessary to perform PTA */
7096 static void
7097 init_alias_vars (void)
7099 use_field_sensitive = (MAX_FIELDS_FOR_FIELD_SENSITIVE > 1);
7101 bitmap_obstack_initialize (&pta_obstack);
7102 bitmap_obstack_initialize (&oldpta_obstack);
7103 bitmap_obstack_initialize (&predbitmap_obstack);
7105 constraints.create (8);
7106 varmap.create (8);
7107 vi_for_tree = new hash_map<tree, varinfo_t>;
7108 call_stmt_vars = new hash_map<gimple *, varinfo_t>;
7110 memset (&stats, 0, sizeof (stats));
7111 shared_bitmap_table = new hash_table<shared_bitmap_hasher> (511);
7112 init_base_vars ();
7114 gcc_obstack_init (&fake_var_decl_obstack);
7116 final_solutions = new hash_map<varinfo_t, pt_solution *>;
7117 gcc_obstack_init (&final_solutions_obstack);
7120 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
7121 predecessor edges. */
7123 static void
7124 remove_preds_and_fake_succs (constraint_graph_t graph)
7126 unsigned int i;
7128 /* Clear the implicit ref and address nodes from the successor
7129 lists. */
7130 for (i = 1; i < FIRST_REF_NODE; i++)
7132 if (graph->succs[i])
7133 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
7134 FIRST_REF_NODE * 2);
7137 /* Free the successor list for the non-ref nodes. */
7138 for (i = FIRST_REF_NODE + 1; i < graph->size; i++)
7140 if (graph->succs[i])
7141 BITMAP_FREE (graph->succs[i]);
7144 /* Now reallocate the size of the successor list as, and blow away
7145 the predecessor bitmaps. */
7146 graph->size = varmap.length ();
7147 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
7149 free (graph->implicit_preds);
7150 graph->implicit_preds = NULL;
7151 free (graph->preds);
7152 graph->preds = NULL;
7153 bitmap_obstack_release (&predbitmap_obstack);
7156 /* Solve the constraint set. */
7158 static void
7159 solve_constraints (void)
7161 struct scc_info *si;
7163 /* Sort varinfos so that ones that cannot be pointed to are last.
7164 This makes bitmaps more efficient. */
7165 unsigned int *map = XNEWVEC (unsigned int, varmap.length ());
7166 for (unsigned i = 0; i < integer_id + 1; ++i)
7167 map[i] = i;
7168 /* Start with non-register vars (as possibly address-taken), followed
7169 by register vars as conservative set of vars never appearing in
7170 the points-to solution bitmaps. */
7171 unsigned j = integer_id + 1;
7172 for (unsigned i = integer_id + 1; i < varmap.length (); ++i)
7173 if (! varmap[i]->is_reg_var)
7174 map[i] = j++;
7175 for (unsigned i = integer_id + 1; i < varmap.length (); ++i)
7176 if (varmap[i]->is_reg_var)
7177 map[i] = j++;
7178 /* Shuffle varmap according to map. */
7179 for (unsigned i = integer_id + 1; i < varmap.length (); ++i)
7181 while (map[varmap[i]->id] != i)
7182 std::swap (varmap[i], varmap[map[varmap[i]->id]]);
7183 gcc_assert (bitmap_empty_p (varmap[i]->solution));
7184 varmap[i]->id = i;
7185 varmap[i]->next = map[varmap[i]->next];
7186 varmap[i]->head = map[varmap[i]->head];
7188 /* Finally rewrite constraints. */
7189 for (unsigned i = 0; i < constraints.length (); ++i)
7191 constraints[i]->lhs.var = map[constraints[i]->lhs.var];
7192 constraints[i]->rhs.var = map[constraints[i]->rhs.var];
7194 free (map);
7196 if (dump_file)
7197 fprintf (dump_file,
7198 "\nCollapsing static cycles and doing variable "
7199 "substitution\n");
7201 init_graph (varmap.length () * 2);
7203 if (dump_file)
7204 fprintf (dump_file, "Building predecessor graph\n");
7205 build_pred_graph ();
7207 if (dump_file)
7208 fprintf (dump_file, "Detecting pointer and location "
7209 "equivalences\n");
7210 si = perform_var_substitution (graph);
7212 if (dump_file)
7213 fprintf (dump_file, "Rewriting constraints and unifying "
7214 "variables\n");
7215 rewrite_constraints (graph, si);
7217 build_succ_graph ();
7219 free_var_substitution_info (si);
7221 /* Attach complex constraints to graph nodes. */
7222 move_complex_constraints (graph);
7224 if (dump_file)
7225 fprintf (dump_file, "Uniting pointer but not location equivalent "
7226 "variables\n");
7227 unite_pointer_equivalences (graph);
7229 if (dump_file)
7230 fprintf (dump_file, "Finding indirect cycles\n");
7231 find_indirect_cycles (graph);
7233 /* Implicit nodes and predecessors are no longer necessary at this
7234 point. */
7235 remove_preds_and_fake_succs (graph);
7237 if (dump_file && (dump_flags & TDF_GRAPH))
7239 fprintf (dump_file, "\n\n// The constraint graph before solve-graph "
7240 "in dot format:\n");
7241 dump_constraint_graph (dump_file);
7242 fprintf (dump_file, "\n\n");
7245 if (dump_file)
7246 fprintf (dump_file, "Solving graph\n");
7248 solve_graph (graph);
7250 if (dump_file && (dump_flags & TDF_GRAPH))
7252 fprintf (dump_file, "\n\n// The constraint graph after solve-graph "
7253 "in dot format:\n");
7254 dump_constraint_graph (dump_file);
7255 fprintf (dump_file, "\n\n");
7258 if (dump_file)
7259 dump_sa_points_to_info (dump_file);
7262 /* Create points-to sets for the current function. See the comments
7263 at the start of the file for an algorithmic overview. */
7265 static void
7266 compute_points_to_sets (void)
7268 basic_block bb;
7269 varinfo_t vi;
7271 timevar_push (TV_TREE_PTA);
7273 init_alias_vars ();
7275 intra_create_variable_infos (cfun);
7277 /* Now walk all statements and build the constraint set. */
7278 FOR_EACH_BB_FN (bb, cfun)
7280 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
7281 gsi_next (&gsi))
7283 gphi *phi = gsi.phi ();
7285 if (! virtual_operand_p (gimple_phi_result (phi)))
7286 find_func_aliases (cfun, phi);
7289 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
7290 gsi_next (&gsi))
7292 gimple *stmt = gsi_stmt (gsi);
7294 find_func_aliases (cfun, stmt);
7298 if (dump_file)
7300 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
7301 dump_constraints (dump_file, 0);
7304 /* From the constraints compute the points-to sets. */
7305 solve_constraints ();
7307 /* Compute the points-to set for ESCAPED used for call-clobber analysis. */
7308 cfun->gimple_df->escaped = find_what_var_points_to (cfun->decl,
7309 get_varinfo (escaped_id));
7311 /* Make sure the ESCAPED solution (which is used as placeholder in
7312 other solutions) does not reference itself. This simplifies
7313 points-to solution queries. */
7314 cfun->gimple_df->escaped.escaped = 0;
7316 /* Compute the points-to sets for pointer SSA_NAMEs. */
7317 unsigned i;
7318 tree ptr;
7320 FOR_EACH_SSA_NAME (i, ptr, cfun)
7322 if (POINTER_TYPE_P (TREE_TYPE (ptr)))
7323 find_what_p_points_to (cfun->decl, ptr);
7326 /* Compute the call-used/clobbered sets. */
7327 FOR_EACH_BB_FN (bb, cfun)
7329 gimple_stmt_iterator gsi;
7331 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
7333 gcall *stmt;
7334 struct pt_solution *pt;
7336 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
7337 if (!stmt)
7338 continue;
7340 pt = gimple_call_use_set (stmt);
7341 if (gimple_call_flags (stmt) & ECF_CONST)
7342 memset (pt, 0, sizeof (struct pt_solution));
7343 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
7345 *pt = find_what_var_points_to (cfun->decl, vi);
7346 /* Escaped (and thus nonlocal) variables are always
7347 implicitly used by calls. */
7348 /* ??? ESCAPED can be empty even though NONLOCAL
7349 always escaped. */
7350 pt->nonlocal = 1;
7351 pt->escaped = 1;
7353 else
7355 /* If there is nothing special about this call then
7356 we have made everything that is used also escape. */
7357 *pt = cfun->gimple_df->escaped;
7358 pt->nonlocal = 1;
7361 pt = gimple_call_clobber_set (stmt);
7362 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
7363 memset (pt, 0, sizeof (struct pt_solution));
7364 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
7366 *pt = find_what_var_points_to (cfun->decl, vi);
7367 /* Escaped (and thus nonlocal) variables are always
7368 implicitly clobbered by calls. */
7369 /* ??? ESCAPED can be empty even though NONLOCAL
7370 always escaped. */
7371 pt->nonlocal = 1;
7372 pt->escaped = 1;
7374 else
7376 /* If there is nothing special about this call then
7377 we have made everything that is used also escape. */
7378 *pt = cfun->gimple_df->escaped;
7379 pt->nonlocal = 1;
7384 timevar_pop (TV_TREE_PTA);
7388 /* Delete created points-to sets. */
7390 static void
7391 delete_points_to_sets (void)
7393 unsigned int i;
7395 delete shared_bitmap_table;
7396 shared_bitmap_table = NULL;
7397 if (dump_file && (dump_flags & TDF_STATS))
7398 fprintf (dump_file, "Points to sets created:%d\n",
7399 stats.points_to_sets_created);
7401 delete vi_for_tree;
7402 delete call_stmt_vars;
7403 bitmap_obstack_release (&pta_obstack);
7404 constraints.release ();
7406 for (i = 0; i < graph->size; i++)
7407 graph->complex[i].release ();
7408 free (graph->complex);
7410 free (graph->rep);
7411 free (graph->succs);
7412 free (graph->pe);
7413 free (graph->pe_rep);
7414 free (graph->indirect_cycles);
7415 free (graph);
7417 varmap.release ();
7418 variable_info_pool.release ();
7419 constraint_pool.release ();
7421 obstack_free (&fake_var_decl_obstack, NULL);
7423 delete final_solutions;
7424 obstack_free (&final_solutions_obstack, NULL);
7427 struct vls_data
7429 unsigned short clique;
7430 bool escaped_p;
7431 bitmap rvars;
7434 /* Mark "other" loads and stores as belonging to CLIQUE and with
7435 base zero. */
7437 static bool
7438 visit_loadstore (gimple *, tree base, tree ref, void *data)
7440 unsigned short clique = ((vls_data *) data)->clique;
7441 bitmap rvars = ((vls_data *) data)->rvars;
7442 bool escaped_p = ((vls_data *) data)->escaped_p;
7443 if (TREE_CODE (base) == MEM_REF
7444 || TREE_CODE (base) == TARGET_MEM_REF)
7446 tree ptr = TREE_OPERAND (base, 0);
7447 if (TREE_CODE (ptr) == SSA_NAME)
7449 /* For parameters, get at the points-to set for the actual parm
7450 decl. */
7451 if (SSA_NAME_IS_DEFAULT_DEF (ptr)
7452 && (TREE_CODE (SSA_NAME_VAR (ptr)) == PARM_DECL
7453 || TREE_CODE (SSA_NAME_VAR (ptr)) == RESULT_DECL))
7454 ptr = SSA_NAME_VAR (ptr);
7456 /* We need to make sure 'ptr' doesn't include any of
7457 the restrict tags we added bases for in its points-to set. */
7458 varinfo_t vi = lookup_vi_for_tree (ptr);
7459 if (! vi)
7460 return false;
7462 vi = get_varinfo (find (vi->id));
7463 if (bitmap_intersect_p (rvars, vi->solution)
7464 || (escaped_p && bitmap_bit_p (vi->solution, escaped_id)))
7465 return false;
7468 /* Do not overwrite existing cliques (that includes clique, base
7469 pairs we just set). */
7470 if (MR_DEPENDENCE_CLIQUE (base) == 0)
7472 MR_DEPENDENCE_CLIQUE (base) = clique;
7473 MR_DEPENDENCE_BASE (base) = 0;
7477 /* For plain decl accesses see whether they are accesses to globals
7478 and rewrite them to MEM_REFs with { clique, 0 }. */
7479 if (VAR_P (base)
7480 && is_global_var (base)
7481 /* ??? We can't rewrite a plain decl with the walk_stmt_load_store
7482 ops callback. */
7483 && base != ref)
7485 tree *basep = &ref;
7486 while (handled_component_p (*basep))
7487 basep = &TREE_OPERAND (*basep, 0);
7488 gcc_assert (VAR_P (*basep));
7489 tree ptr = build_fold_addr_expr (*basep);
7490 tree zero = build_int_cst (TREE_TYPE (ptr), 0);
7491 *basep = build2 (MEM_REF, TREE_TYPE (*basep), ptr, zero);
7492 MR_DEPENDENCE_CLIQUE (*basep) = clique;
7493 MR_DEPENDENCE_BASE (*basep) = 0;
7496 return false;
7499 struct msdi_data {
7500 tree ptr;
7501 unsigned short *clique;
7502 unsigned short *last_ruid;
7503 varinfo_t restrict_var;
7506 /* If BASE is a MEM_REF then assign a clique, base pair to it, updating
7507 CLIQUE, *RESTRICT_VAR and LAST_RUID as passed via DATA.
7508 Return whether dependence info was assigned to BASE. */
7510 static bool
7511 maybe_set_dependence_info (gimple *, tree base, tree, void *data)
7513 tree ptr = ((msdi_data *)data)->ptr;
7514 unsigned short &clique = *((msdi_data *)data)->clique;
7515 unsigned short &last_ruid = *((msdi_data *)data)->last_ruid;
7516 varinfo_t restrict_var = ((msdi_data *)data)->restrict_var;
7517 if ((TREE_CODE (base) == MEM_REF
7518 || TREE_CODE (base) == TARGET_MEM_REF)
7519 && TREE_OPERAND (base, 0) == ptr)
7521 /* Do not overwrite existing cliques. This avoids overwriting dependence
7522 info inlined from a function with restrict parameters inlined
7523 into a function with restrict parameters. This usually means we
7524 prefer to be precise in innermost loops. */
7525 if (MR_DEPENDENCE_CLIQUE (base) == 0)
7527 if (clique == 0)
7529 if (cfun->last_clique == 0)
7530 cfun->last_clique = 1;
7531 clique = 1;
7533 if (restrict_var->ruid == 0)
7534 restrict_var->ruid = ++last_ruid;
7535 MR_DEPENDENCE_CLIQUE (base) = clique;
7536 MR_DEPENDENCE_BASE (base) = restrict_var->ruid;
7537 return true;
7540 return false;
7543 /* Clear dependence info for the clique DATA. */
7545 static bool
7546 clear_dependence_clique (gimple *, tree base, tree, void *data)
7548 unsigned short clique = (uintptr_t)data;
7549 if ((TREE_CODE (base) == MEM_REF
7550 || TREE_CODE (base) == TARGET_MEM_REF)
7551 && MR_DEPENDENCE_CLIQUE (base) == clique)
7553 MR_DEPENDENCE_CLIQUE (base) = 0;
7554 MR_DEPENDENCE_BASE (base) = 0;
7557 return false;
7560 /* Compute the set of independend memory references based on restrict
7561 tags and their conservative propagation to the points-to sets. */
7563 static void
7564 compute_dependence_clique (void)
7566 /* First clear the special "local" clique. */
7567 basic_block bb;
7568 if (cfun->last_clique != 0)
7569 FOR_EACH_BB_FN (bb, cfun)
7570 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
7571 !gsi_end_p (gsi); gsi_next (&gsi))
7573 gimple *stmt = gsi_stmt (gsi);
7574 walk_stmt_load_store_ops (stmt, (void *)(uintptr_t) 1,
7575 clear_dependence_clique,
7576 clear_dependence_clique);
7579 unsigned short clique = 0;
7580 unsigned short last_ruid = 0;
7581 bitmap rvars = BITMAP_ALLOC (NULL);
7582 bool escaped_p = false;
7583 for (unsigned i = 0; i < num_ssa_names; ++i)
7585 tree ptr = ssa_name (i);
7586 if (!ptr || !POINTER_TYPE_P (TREE_TYPE (ptr)))
7587 continue;
7589 /* Avoid all this when ptr is not dereferenced? */
7590 tree p = ptr;
7591 if (SSA_NAME_IS_DEFAULT_DEF (ptr)
7592 && (TREE_CODE (SSA_NAME_VAR (ptr)) == PARM_DECL
7593 || TREE_CODE (SSA_NAME_VAR (ptr)) == RESULT_DECL))
7594 p = SSA_NAME_VAR (ptr);
7595 varinfo_t vi = lookup_vi_for_tree (p);
7596 if (!vi)
7597 continue;
7598 vi = get_varinfo (find (vi->id));
7599 bitmap_iterator bi;
7600 unsigned j;
7601 varinfo_t restrict_var = NULL;
7602 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, j, bi)
7604 varinfo_t oi = get_varinfo (j);
7605 if (oi->head != j)
7606 oi = get_varinfo (oi->head);
7607 if (oi->is_restrict_var)
7609 if (restrict_var
7610 && restrict_var != oi)
7612 if (dump_file && (dump_flags & TDF_DETAILS))
7614 fprintf (dump_file, "found restrict pointed-to "
7615 "for ");
7616 print_generic_expr (dump_file, ptr);
7617 fprintf (dump_file, " but not exclusively\n");
7619 restrict_var = NULL;
7620 break;
7622 restrict_var = oi;
7624 /* NULL is the only other valid points-to entry. */
7625 else if (oi->id != nothing_id)
7627 restrict_var = NULL;
7628 break;
7631 /* Ok, found that ptr must(!) point to a single(!) restrict
7632 variable. */
7633 /* ??? PTA isn't really a proper propagation engine to compute
7634 this property.
7635 ??? We could handle merging of two restricts by unifying them. */
7636 if (restrict_var)
7638 /* Now look at possible dereferences of ptr. */
7639 imm_use_iterator ui;
7640 gimple *use_stmt;
7641 bool used = false;
7642 msdi_data data = { ptr, &clique, &last_ruid, restrict_var };
7643 FOR_EACH_IMM_USE_STMT (use_stmt, ui, ptr)
7644 used |= walk_stmt_load_store_ops (use_stmt, &data,
7645 maybe_set_dependence_info,
7646 maybe_set_dependence_info);
7647 if (used)
7649 /* Add all subvars to the set of restrict pointed-to set. */
7650 for (unsigned sv = restrict_var->head; sv != 0;
7651 sv = get_varinfo (sv)->next)
7652 bitmap_set_bit (rvars, sv);
7653 varinfo_t escaped = get_varinfo (find (escaped_id));
7654 if (bitmap_bit_p (escaped->solution, restrict_var->id))
7655 escaped_p = true;
7660 if (clique != 0)
7662 /* Assign the BASE id zero to all accesses not based on a restrict
7663 pointer. That way they get disambiguated against restrict
7664 accesses but not against each other. */
7665 /* ??? For restricts derived from globals (thus not incoming
7666 parameters) we can't restrict scoping properly thus the following
7667 is too aggressive there. For now we have excluded those globals from
7668 getting into the MR_DEPENDENCE machinery. */
7669 vls_data data = { clique, escaped_p, rvars };
7670 basic_block bb;
7671 FOR_EACH_BB_FN (bb, cfun)
7672 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
7673 !gsi_end_p (gsi); gsi_next (&gsi))
7675 gimple *stmt = gsi_stmt (gsi);
7676 walk_stmt_load_store_ops (stmt, &data,
7677 visit_loadstore, visit_loadstore);
7681 BITMAP_FREE (rvars);
7684 /* Compute points-to information for every SSA_NAME pointer in the
7685 current function and compute the transitive closure of escaped
7686 variables to re-initialize the call-clobber states of local variables. */
7688 unsigned int
7689 compute_may_aliases (void)
7691 if (cfun->gimple_df->ipa_pta)
7693 if (dump_file)
7695 fprintf (dump_file, "\nNot re-computing points-to information "
7696 "because IPA points-to information is available.\n\n");
7698 /* But still dump what we have remaining it. */
7699 dump_alias_info (dump_file);
7702 return 0;
7705 /* For each pointer P_i, determine the sets of variables that P_i may
7706 point-to. Compute the reachability set of escaped and call-used
7707 variables. */
7708 compute_points_to_sets ();
7710 /* Debugging dumps. */
7711 if (dump_file)
7712 dump_alias_info (dump_file);
7714 /* Compute restrict-based memory disambiguations. */
7715 compute_dependence_clique ();
7717 /* Deallocate memory used by aliasing data structures and the internal
7718 points-to solution. */
7719 delete_points_to_sets ();
7721 gcc_assert (!need_ssa_update_p (cfun));
7723 return 0;
7726 /* A dummy pass to cause points-to information to be computed via
7727 TODO_rebuild_alias. */
7729 namespace {
7731 const pass_data pass_data_build_alias =
7733 GIMPLE_PASS, /* type */
7734 "alias", /* name */
7735 OPTGROUP_NONE, /* optinfo_flags */
7736 TV_NONE, /* tv_id */
7737 ( PROP_cfg | PROP_ssa ), /* properties_required */
7738 0, /* properties_provided */
7739 0, /* properties_destroyed */
7740 0, /* todo_flags_start */
7741 TODO_rebuild_alias, /* todo_flags_finish */
7744 class pass_build_alias : public gimple_opt_pass
7746 public:
7747 pass_build_alias (gcc::context *ctxt)
7748 : gimple_opt_pass (pass_data_build_alias, ctxt)
7751 /* opt_pass methods: */
7752 virtual bool gate (function *) { return flag_tree_pta; }
7754 }; // class pass_build_alias
7756 } // anon namespace
7758 gimple_opt_pass *
7759 make_pass_build_alias (gcc::context *ctxt)
7761 return new pass_build_alias (ctxt);
7764 /* A dummy pass to cause points-to information to be computed via
7765 TODO_rebuild_alias. */
7767 namespace {
7769 const pass_data pass_data_build_ealias =
7771 GIMPLE_PASS, /* type */
7772 "ealias", /* name */
7773 OPTGROUP_NONE, /* optinfo_flags */
7774 TV_NONE, /* tv_id */
7775 ( PROP_cfg | PROP_ssa ), /* properties_required */
7776 0, /* properties_provided */
7777 0, /* properties_destroyed */
7778 0, /* todo_flags_start */
7779 TODO_rebuild_alias, /* todo_flags_finish */
7782 class pass_build_ealias : public gimple_opt_pass
7784 public:
7785 pass_build_ealias (gcc::context *ctxt)
7786 : gimple_opt_pass (pass_data_build_ealias, ctxt)
7789 /* opt_pass methods: */
7790 virtual bool gate (function *) { return flag_tree_pta; }
7792 }; // class pass_build_ealias
7794 } // anon namespace
7796 gimple_opt_pass *
7797 make_pass_build_ealias (gcc::context *ctxt)
7799 return new pass_build_ealias (ctxt);
7803 /* IPA PTA solutions for ESCAPED. */
7804 struct pt_solution ipa_escaped_pt
7805 = { true, false, false, false, false,
7806 false, false, false, false, false, NULL };
7808 /* Associate node with varinfo DATA. Worker for
7809 cgraph_for_symbol_thunks_and_aliases. */
7810 static bool
7811 associate_varinfo_to_alias (struct cgraph_node *node, void *data)
7813 if ((node->alias
7814 || (node->thunk.thunk_p
7815 && ! node->global.inlined_to))
7816 && node->analyzed
7817 && !node->ifunc_resolver)
7818 insert_vi_for_tree (node->decl, (varinfo_t)data);
7819 return false;
7822 /* Dump varinfo VI to FILE. */
7824 static void
7825 dump_varinfo (FILE *file, varinfo_t vi)
7827 if (vi == NULL)
7828 return;
7830 fprintf (file, "%u: %s\n", vi->id, vi->name);
7832 const char *sep = " ";
7833 if (vi->is_artificial_var)
7834 fprintf (file, "%sartificial", sep);
7835 if (vi->is_special_var)
7836 fprintf (file, "%sspecial", sep);
7837 if (vi->is_unknown_size_var)
7838 fprintf (file, "%sunknown-size", sep);
7839 if (vi->is_full_var)
7840 fprintf (file, "%sfull", sep);
7841 if (vi->is_heap_var)
7842 fprintf (file, "%sheap", sep);
7843 if (vi->may_have_pointers)
7844 fprintf (file, "%smay-have-pointers", sep);
7845 if (vi->only_restrict_pointers)
7846 fprintf (file, "%sonly-restrict-pointers", sep);
7847 if (vi->is_restrict_var)
7848 fprintf (file, "%sis-restrict-var", sep);
7849 if (vi->is_global_var)
7850 fprintf (file, "%sglobal", sep);
7851 if (vi->is_ipa_escape_point)
7852 fprintf (file, "%sipa-escape-point", sep);
7853 if (vi->is_fn_info)
7854 fprintf (file, "%sfn-info", sep);
7855 if (vi->ruid)
7856 fprintf (file, "%srestrict-uid:%u", sep, vi->ruid);
7857 if (vi->next)
7858 fprintf (file, "%snext:%u", sep, vi->next);
7859 if (vi->head != vi->id)
7860 fprintf (file, "%shead:%u", sep, vi->head);
7861 if (vi->offset)
7862 fprintf (file, "%soffset:" HOST_WIDE_INT_PRINT_DEC, sep, vi->offset);
7863 if (vi->size != ~(unsigned HOST_WIDE_INT)0)
7864 fprintf (file, "%ssize:" HOST_WIDE_INT_PRINT_DEC, sep, vi->size);
7865 if (vi->fullsize != ~(unsigned HOST_WIDE_INT)0
7866 && vi->fullsize != vi->size)
7867 fprintf (file, "%sfullsize:" HOST_WIDE_INT_PRINT_DEC, sep,
7868 vi->fullsize);
7869 fprintf (file, "\n");
7871 if (vi->solution && !bitmap_empty_p (vi->solution))
7873 bitmap_iterator bi;
7874 unsigned i;
7875 fprintf (file, " solution: {");
7876 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
7877 fprintf (file, " %u", i);
7878 fprintf (file, " }\n");
7881 if (vi->oldsolution && !bitmap_empty_p (vi->oldsolution)
7882 && !bitmap_equal_p (vi->solution, vi->oldsolution))
7884 bitmap_iterator bi;
7885 unsigned i;
7886 fprintf (file, " oldsolution: {");
7887 EXECUTE_IF_SET_IN_BITMAP (vi->oldsolution, 0, i, bi)
7888 fprintf (file, " %u", i);
7889 fprintf (file, " }\n");
7893 /* Dump varinfo VI to stderr. */
7895 DEBUG_FUNCTION void
7896 debug_varinfo (varinfo_t vi)
7898 dump_varinfo (stderr, vi);
7901 /* Dump varmap to FILE. */
7903 static void
7904 dump_varmap (FILE *file)
7906 if (varmap.length () == 0)
7907 return;
7909 fprintf (file, "variables:\n");
7911 for (unsigned int i = 0; i < varmap.length (); ++i)
7913 varinfo_t vi = get_varinfo (i);
7914 dump_varinfo (file, vi);
7917 fprintf (file, "\n");
7920 /* Dump varmap to stderr. */
7922 DEBUG_FUNCTION void
7923 debug_varmap (void)
7925 dump_varmap (stderr);
7928 /* Compute whether node is refered to non-locally. Worker for
7929 cgraph_for_symbol_thunks_and_aliases. */
7930 static bool
7931 refered_from_nonlocal_fn (struct cgraph_node *node, void *data)
7933 bool *nonlocal_p = (bool *)data;
7934 *nonlocal_p |= (node->used_from_other_partition
7935 || node->externally_visible
7936 || node->force_output
7937 || lookup_attribute ("noipa", DECL_ATTRIBUTES (node->decl)));
7938 return false;
7941 /* Same for varpool nodes. */
7942 static bool
7943 refered_from_nonlocal_var (struct varpool_node *node, void *data)
7945 bool *nonlocal_p = (bool *)data;
7946 *nonlocal_p |= (node->used_from_other_partition
7947 || node->externally_visible
7948 || node->force_output);
7949 return false;
7952 /* Execute the driver for IPA PTA. */
7953 static unsigned int
7954 ipa_pta_execute (void)
7956 struct cgraph_node *node;
7957 varpool_node *var;
7958 unsigned int from = 0;
7960 in_ipa_mode = 1;
7962 init_alias_vars ();
7964 if (dump_file && (dump_flags & TDF_DETAILS))
7966 symtab->dump (dump_file);
7967 fprintf (dump_file, "\n");
7970 if (dump_file)
7972 fprintf (dump_file, "Generating generic constraints\n\n");
7973 dump_constraints (dump_file, from);
7974 fprintf (dump_file, "\n");
7975 from = constraints.length ();
7978 /* Build the constraints. */
7979 FOR_EACH_DEFINED_FUNCTION (node)
7981 varinfo_t vi;
7982 /* Nodes without a body are not interesting. Especially do not
7983 visit clones at this point for now - we get duplicate decls
7984 there for inline clones at least. */
7985 if (!node->has_gimple_body_p () || node->global.inlined_to)
7986 continue;
7987 node->get_body ();
7989 gcc_assert (!node->clone_of);
7991 /* For externally visible or attribute used annotated functions use
7992 local constraints for their arguments.
7993 For local functions we see all callers and thus do not need initial
7994 constraints for parameters. */
7995 bool nonlocal_p = (node->used_from_other_partition
7996 || node->externally_visible
7997 || node->force_output
7998 || lookup_attribute ("noipa",
7999 DECL_ATTRIBUTES (node->decl)));
8000 node->call_for_symbol_thunks_and_aliases (refered_from_nonlocal_fn,
8001 &nonlocal_p, true);
8003 vi = create_function_info_for (node->decl,
8004 alias_get_name (node->decl), false,
8005 nonlocal_p);
8006 if (dump_file
8007 && from != constraints.length ())
8009 fprintf (dump_file,
8010 "Generating intial constraints for %s", node->name ());
8011 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
8012 fprintf (dump_file, " (%s)",
8013 IDENTIFIER_POINTER
8014 (DECL_ASSEMBLER_NAME (node->decl)));
8015 fprintf (dump_file, "\n\n");
8016 dump_constraints (dump_file, from);
8017 fprintf (dump_file, "\n");
8019 from = constraints.length ();
8022 node->call_for_symbol_thunks_and_aliases
8023 (associate_varinfo_to_alias, vi, true);
8026 /* Create constraints for global variables and their initializers. */
8027 FOR_EACH_VARIABLE (var)
8029 if (var->alias && var->analyzed)
8030 continue;
8032 varinfo_t vi = get_vi_for_tree (var->decl);
8034 /* For the purpose of IPA PTA unit-local globals are not
8035 escape points. */
8036 bool nonlocal_p = (var->used_from_other_partition
8037 || var->externally_visible
8038 || var->force_output);
8039 var->call_for_symbol_and_aliases (refered_from_nonlocal_var,
8040 &nonlocal_p, true);
8041 if (nonlocal_p)
8042 vi->is_ipa_escape_point = true;
8045 if (dump_file
8046 && from != constraints.length ())
8048 fprintf (dump_file,
8049 "Generating constraints for global initializers\n\n");
8050 dump_constraints (dump_file, from);
8051 fprintf (dump_file, "\n");
8052 from = constraints.length ();
8055 FOR_EACH_DEFINED_FUNCTION (node)
8057 struct function *func;
8058 basic_block bb;
8060 /* Nodes without a body are not interesting. */
8061 if (!node->has_gimple_body_p () || node->clone_of)
8062 continue;
8064 if (dump_file)
8066 fprintf (dump_file,
8067 "Generating constraints for %s", node->name ());
8068 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
8069 fprintf (dump_file, " (%s)",
8070 IDENTIFIER_POINTER
8071 (DECL_ASSEMBLER_NAME (node->decl)));
8072 fprintf (dump_file, "\n");
8075 func = DECL_STRUCT_FUNCTION (node->decl);
8076 gcc_assert (cfun == NULL);
8078 /* Build constriants for the function body. */
8079 FOR_EACH_BB_FN (bb, func)
8081 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
8082 gsi_next (&gsi))
8084 gphi *phi = gsi.phi ();
8086 if (! virtual_operand_p (gimple_phi_result (phi)))
8087 find_func_aliases (func, phi);
8090 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
8091 gsi_next (&gsi))
8093 gimple *stmt = gsi_stmt (gsi);
8095 find_func_aliases (func, stmt);
8096 find_func_clobbers (func, stmt);
8100 if (dump_file)
8102 fprintf (dump_file, "\n");
8103 dump_constraints (dump_file, from);
8104 fprintf (dump_file, "\n");
8105 from = constraints.length ();
8109 /* From the constraints compute the points-to sets. */
8110 solve_constraints ();
8112 /* Now post-process solutions to handle locals from different
8113 runtime instantiations coming in through recursive invocations. */
8114 unsigned shadow_var_cnt = 0;
8115 for (unsigned i = 1; i < varmap.length (); ++i)
8117 varinfo_t fi = get_varinfo (i);
8118 if (fi->is_fn_info
8119 && fi->decl)
8120 /* Automatic variables pointed to by their containing functions
8121 parameters need this treatment. */
8122 for (varinfo_t ai = first_vi_for_offset (fi, fi_parm_base);
8123 ai; ai = vi_next (ai))
8125 varinfo_t vi = get_varinfo (find (ai->id));
8126 bitmap_iterator bi;
8127 unsigned j;
8128 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, j, bi)
8130 varinfo_t pt = get_varinfo (j);
8131 if (pt->shadow_var_uid == 0
8132 && pt->decl
8133 && auto_var_in_fn_p (pt->decl, fi->decl))
8135 pt->shadow_var_uid = allocate_decl_uid ();
8136 shadow_var_cnt++;
8140 /* As well as global variables which are another way of passing
8141 arguments to recursive invocations. */
8142 else if (fi->is_global_var)
8144 for (varinfo_t ai = fi; ai; ai = vi_next (ai))
8146 varinfo_t vi = get_varinfo (find (ai->id));
8147 bitmap_iterator bi;
8148 unsigned j;
8149 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, j, bi)
8151 varinfo_t pt = get_varinfo (j);
8152 if (pt->shadow_var_uid == 0
8153 && pt->decl
8154 && auto_var_p (pt->decl))
8156 pt->shadow_var_uid = allocate_decl_uid ();
8157 shadow_var_cnt++;
8163 if (shadow_var_cnt && dump_file && (dump_flags & TDF_DETAILS))
8164 fprintf (dump_file, "Allocated %u shadow variables for locals "
8165 "maybe leaking into recursive invocations of their containing "
8166 "functions\n", shadow_var_cnt);
8168 /* Compute the global points-to sets for ESCAPED.
8169 ??? Note that the computed escape set is not correct
8170 for the whole unit as we fail to consider graph edges to
8171 externally visible functions. */
8172 ipa_escaped_pt = find_what_var_points_to (NULL, get_varinfo (escaped_id));
8174 /* Make sure the ESCAPED solution (which is used as placeholder in
8175 other solutions) does not reference itself. This simplifies
8176 points-to solution queries. */
8177 ipa_escaped_pt.ipa_escaped = 0;
8179 /* Assign the points-to sets to the SSA names in the unit. */
8180 FOR_EACH_DEFINED_FUNCTION (node)
8182 tree ptr;
8183 struct function *fn;
8184 unsigned i;
8185 basic_block bb;
8187 /* Nodes without a body are not interesting. */
8188 if (!node->has_gimple_body_p () || node->clone_of)
8189 continue;
8191 fn = DECL_STRUCT_FUNCTION (node->decl);
8193 /* Compute the points-to sets for pointer SSA_NAMEs. */
8194 FOR_EACH_VEC_ELT (*fn->gimple_df->ssa_names, i, ptr)
8196 if (ptr
8197 && POINTER_TYPE_P (TREE_TYPE (ptr)))
8198 find_what_p_points_to (node->decl, ptr);
8201 /* Compute the call-use and call-clobber sets for indirect calls
8202 and calls to external functions. */
8203 FOR_EACH_BB_FN (bb, fn)
8205 gimple_stmt_iterator gsi;
8207 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
8209 gcall *stmt;
8210 struct pt_solution *pt;
8211 varinfo_t vi, fi;
8212 tree decl;
8214 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
8215 if (!stmt)
8216 continue;
8218 /* Handle direct calls to functions with body. */
8219 decl = gimple_call_fndecl (stmt);
8222 tree called_decl = NULL_TREE;
8223 if (gimple_call_builtin_p (stmt, BUILT_IN_GOMP_PARALLEL))
8224 called_decl = TREE_OPERAND (gimple_call_arg (stmt, 0), 0);
8225 else if (gimple_call_builtin_p (stmt, BUILT_IN_GOACC_PARALLEL))
8226 called_decl = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
8228 if (called_decl != NULL_TREE
8229 && !fndecl_maybe_in_other_partition (called_decl))
8230 decl = called_decl;
8233 if (decl
8234 && (fi = lookup_vi_for_tree (decl))
8235 && fi->is_fn_info)
8237 *gimple_call_clobber_set (stmt)
8238 = find_what_var_points_to
8239 (node->decl, first_vi_for_offset (fi, fi_clobbers));
8240 *gimple_call_use_set (stmt)
8241 = find_what_var_points_to
8242 (node->decl, first_vi_for_offset (fi, fi_uses));
8244 /* Handle direct calls to external functions. */
8245 else if (decl && (!fi || fi->decl))
8247 pt = gimple_call_use_set (stmt);
8248 if (gimple_call_flags (stmt) & ECF_CONST)
8249 memset (pt, 0, sizeof (struct pt_solution));
8250 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
8252 *pt = find_what_var_points_to (node->decl, vi);
8253 /* Escaped (and thus nonlocal) variables are always
8254 implicitly used by calls. */
8255 /* ??? ESCAPED can be empty even though NONLOCAL
8256 always escaped. */
8257 pt->nonlocal = 1;
8258 pt->ipa_escaped = 1;
8260 else
8262 /* If there is nothing special about this call then
8263 we have made everything that is used also escape. */
8264 *pt = ipa_escaped_pt;
8265 pt->nonlocal = 1;
8268 pt = gimple_call_clobber_set (stmt);
8269 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
8270 memset (pt, 0, sizeof (struct pt_solution));
8271 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
8273 *pt = find_what_var_points_to (node->decl, vi);
8274 /* Escaped (and thus nonlocal) variables are always
8275 implicitly clobbered by calls. */
8276 /* ??? ESCAPED can be empty even though NONLOCAL
8277 always escaped. */
8278 pt->nonlocal = 1;
8279 pt->ipa_escaped = 1;
8281 else
8283 /* If there is nothing special about this call then
8284 we have made everything that is used also escape. */
8285 *pt = ipa_escaped_pt;
8286 pt->nonlocal = 1;
8289 /* Handle indirect calls. */
8290 else if ((fi = get_fi_for_callee (stmt)))
8292 /* We need to accumulate all clobbers/uses of all possible
8293 callees. */
8294 fi = get_varinfo (find (fi->id));
8295 /* If we cannot constrain the set of functions we'll end up
8296 calling we end up using/clobbering everything. */
8297 if (bitmap_bit_p (fi->solution, anything_id)
8298 || bitmap_bit_p (fi->solution, nonlocal_id)
8299 || bitmap_bit_p (fi->solution, escaped_id))
8301 pt_solution_reset (gimple_call_clobber_set (stmt));
8302 pt_solution_reset (gimple_call_use_set (stmt));
8304 else
8306 bitmap_iterator bi;
8307 unsigned i;
8308 struct pt_solution *uses, *clobbers;
8310 uses = gimple_call_use_set (stmt);
8311 clobbers = gimple_call_clobber_set (stmt);
8312 memset (uses, 0, sizeof (struct pt_solution));
8313 memset (clobbers, 0, sizeof (struct pt_solution));
8314 EXECUTE_IF_SET_IN_BITMAP (fi->solution, 0, i, bi)
8316 struct pt_solution sol;
8318 vi = get_varinfo (i);
8319 if (!vi->is_fn_info)
8321 /* ??? We could be more precise here? */
8322 uses->nonlocal = 1;
8323 uses->ipa_escaped = 1;
8324 clobbers->nonlocal = 1;
8325 clobbers->ipa_escaped = 1;
8326 continue;
8329 if (!uses->anything)
8331 sol = find_what_var_points_to
8332 (node->decl,
8333 first_vi_for_offset (vi, fi_uses));
8334 pt_solution_ior_into (uses, &sol);
8336 if (!clobbers->anything)
8338 sol = find_what_var_points_to
8339 (node->decl,
8340 first_vi_for_offset (vi, fi_clobbers));
8341 pt_solution_ior_into (clobbers, &sol);
8346 else
8347 gcc_unreachable ();
8351 fn->gimple_df->ipa_pta = true;
8353 /* We have to re-set the final-solution cache after each function
8354 because what is a "global" is dependent on function context. */
8355 final_solutions->empty ();
8356 obstack_free (&final_solutions_obstack, NULL);
8357 gcc_obstack_init (&final_solutions_obstack);
8360 delete_points_to_sets ();
8362 in_ipa_mode = 0;
8364 return 0;
8367 namespace {
8369 const pass_data pass_data_ipa_pta =
8371 SIMPLE_IPA_PASS, /* type */
8372 "pta", /* name */
8373 OPTGROUP_NONE, /* optinfo_flags */
8374 TV_IPA_PTA, /* tv_id */
8375 0, /* properties_required */
8376 0, /* properties_provided */
8377 0, /* properties_destroyed */
8378 0, /* todo_flags_start */
8379 0, /* todo_flags_finish */
8382 class pass_ipa_pta : public simple_ipa_opt_pass
8384 public:
8385 pass_ipa_pta (gcc::context *ctxt)
8386 : simple_ipa_opt_pass (pass_data_ipa_pta, ctxt)
8389 /* opt_pass methods: */
8390 virtual bool gate (function *)
8392 return (optimize
8393 && flag_ipa_pta
8394 /* Don't bother doing anything if the program has errors. */
8395 && !seen_error ());
8398 opt_pass * clone () { return new pass_ipa_pta (m_ctxt); }
8400 virtual unsigned int execute (function *) { return ipa_pta_execute (); }
8402 }; // class pass_ipa_pta
8404 } // anon namespace
8406 simple_ipa_opt_pass *
8407 make_pass_ipa_pta (gcc::context *ctxt)
8409 return new pass_ipa_pta (ctxt);