c++: wrong looser excep spec for dep noexcept [PR113158]
[official-gcc.git] / gcc / tree-ssa-sccvn.cc
blob38b806649cd92ffad8bfcbe553167c85228a958a
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2024 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "splay-tree.h"
25 #include "backend.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "insn-config.h"
32 #include "memmodel.h"
33 #include "emit-rtl.h"
34 #include "cgraph.h"
35 #include "gimple-pretty-print.h"
36 #include "alias.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "cfganal.h"
40 #include "tree-inline.h"
41 #include "internal-fn.h"
42 #include "gimple-iterator.h"
43 #include "gimple-fold.h"
44 #include "tree-eh.h"
45 #include "gimplify.h"
46 #include "flags.h"
47 #include "dojump.h"
48 #include "explow.h"
49 #include "calls.h"
50 #include "varasm.h"
51 #include "stmt.h"
52 #include "expr.h"
53 #include "tree-dfa.h"
54 #include "tree-ssa.h"
55 #include "dumpfile.h"
56 #include "cfgloop.h"
57 #include "tree-ssa-propagate.h"
58 #include "tree-cfg.h"
59 #include "domwalk.h"
60 #include "gimple-match.h"
61 #include "stringpool.h"
62 #include "attribs.h"
63 #include "tree-pass.h"
64 #include "statistics.h"
65 #include "langhooks.h"
66 #include "ipa-utils.h"
67 #include "dbgcnt.h"
68 #include "tree-cfgcleanup.h"
69 #include "tree-ssa-loop.h"
70 #include "tree-scalar-evolution.h"
71 #include "tree-ssa-loop-niter.h"
72 #include "builtins.h"
73 #include "fold-const-call.h"
74 #include "ipa-modref-tree.h"
75 #include "ipa-modref.h"
76 #include "tree-ssa-sccvn.h"
77 #include "alloc-pool.h"
78 #include "symbol-summary.h"
79 #include "ipa-prop.h"
80 #include "target.h"
82 /* This algorithm is based on the SCC algorithm presented by Keith
83 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
84 (http://citeseer.ist.psu.edu/41805.html). In
85 straight line code, it is equivalent to a regular hash based value
86 numbering that is performed in reverse postorder.
88 For code with cycles, there are two alternatives, both of which
89 require keeping the hashtables separate from the actual list of
90 value numbers for SSA names.
92 1. Iterate value numbering in an RPO walk of the blocks, removing
93 all the entries from the hashtable after each iteration (but
94 keeping the SSA name->value number mapping between iterations).
95 Iterate until it does not change.
97 2. Perform value numbering as part of an SCC walk on the SSA graph,
98 iterating only the cycles in the SSA graph until they do not change
99 (using a separate, optimistic hashtable for value numbering the SCC
100 operands).
102 The second is not just faster in practice (because most SSA graph
103 cycles do not involve all the variables in the graph), it also has
104 some nice properties.
106 One of these nice properties is that when we pop an SCC off the
107 stack, we are guaranteed to have processed all the operands coming from
108 *outside of that SCC*, so we do not need to do anything special to
109 ensure they have value numbers.
111 Another nice property is that the SCC walk is done as part of a DFS
112 of the SSA graph, which makes it easy to perform combining and
113 simplifying operations at the same time.
115 The code below is deliberately written in a way that makes it easy
116 to separate the SCC walk from the other work it does.
118 In order to propagate constants through the code, we track which
119 expressions contain constants, and use those while folding. In
120 theory, we could also track expressions whose value numbers are
121 replaced, in case we end up folding based on expression
122 identities.
124 In order to value number memory, we assign value numbers to vuses.
125 This enables us to note that, for example, stores to the same
126 address of the same value from the same starting memory states are
127 equivalent.
128 TODO:
130 1. We can iterate only the changing portions of the SCC's, but
131 I have not seen an SCC big enough for this to be a win.
132 2. If you differentiate between phi nodes for loops and phi nodes
133 for if-then-else, you can properly consider phi nodes in different
134 blocks for equivalence.
135 3. We could value number vuses in more cases, particularly, whole
136 structure copies.
139 /* There's no BB_EXECUTABLE but we can use BB_VISITED. */
140 #define BB_EXECUTABLE BB_VISITED
142 static vn_lookup_kind default_vn_walk_kind;
144 /* vn_nary_op hashtable helpers. */
146 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
148 typedef vn_nary_op_s *compare_type;
149 static inline hashval_t hash (const vn_nary_op_s *);
150 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
153 /* Return the computed hashcode for nary operation P1. */
155 inline hashval_t
156 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
158 return vno1->hashcode;
161 /* Compare nary operations P1 and P2 and return true if they are
162 equivalent. */
164 inline bool
165 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
167 return vno1 == vno2 || vn_nary_op_eq (vno1, vno2);
170 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
171 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
174 /* vn_phi hashtable helpers. */
176 static int
177 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
179 struct vn_phi_hasher : nofree_ptr_hash <vn_phi_s>
181 static inline hashval_t hash (const vn_phi_s *);
182 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
185 /* Return the computed hashcode for phi operation P1. */
187 inline hashval_t
188 vn_phi_hasher::hash (const vn_phi_s *vp1)
190 return vp1->hashcode;
193 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
195 inline bool
196 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
198 return vp1 == vp2 || vn_phi_eq (vp1, vp2);
201 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
202 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
205 /* Compare two reference operands P1 and P2 for equality. Return true if
206 they are equal, and false otherwise. */
208 static int
209 vn_reference_op_eq (const void *p1, const void *p2)
211 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
212 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
214 return (vro1->opcode == vro2->opcode
215 /* We do not care for differences in type qualification. */
216 && (vro1->type == vro2->type
217 || (vro1->type && vro2->type
218 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
219 TYPE_MAIN_VARIANT (vro2->type))))
220 && expressions_equal_p (vro1->op0, vro2->op0)
221 && expressions_equal_p (vro1->op1, vro2->op1)
222 && expressions_equal_p (vro1->op2, vro2->op2)
223 && (vro1->opcode != CALL_EXPR || vro1->clique == vro2->clique));
226 /* Free a reference operation structure VP. */
228 static inline void
229 free_reference (vn_reference_s *vr)
231 vr->operands.release ();
235 /* vn_reference hashtable helpers. */
237 struct vn_reference_hasher : nofree_ptr_hash <vn_reference_s>
239 static inline hashval_t hash (const vn_reference_s *);
240 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
243 /* Return the hashcode for a given reference operation P1. */
245 inline hashval_t
246 vn_reference_hasher::hash (const vn_reference_s *vr1)
248 return vr1->hashcode;
251 inline bool
252 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
254 return v == c || vn_reference_eq (v, c);
257 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
258 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
260 /* Pretty-print OPS to OUTFILE. */
262 void
263 print_vn_reference_ops (FILE *outfile, const vec<vn_reference_op_s> ops)
265 vn_reference_op_t vro;
266 unsigned int i;
267 fprintf (outfile, "{");
268 for (i = 0; ops.iterate (i, &vro); i++)
270 bool closebrace = false;
271 if (vro->opcode != SSA_NAME
272 && TREE_CODE_CLASS (vro->opcode) != tcc_declaration)
274 fprintf (outfile, "%s", get_tree_code_name (vro->opcode));
275 if (vro->op0 || vro->opcode == CALL_EXPR)
277 fprintf (outfile, "<");
278 closebrace = true;
281 if (vro->op0 || vro->opcode == CALL_EXPR)
283 if (!vro->op0)
284 fprintf (outfile, internal_fn_name ((internal_fn)vro->clique));
285 else
286 print_generic_expr (outfile, vro->op0);
287 if (vro->op1)
289 fprintf (outfile, ",");
290 print_generic_expr (outfile, vro->op1);
292 if (vro->op2)
294 fprintf (outfile, ",");
295 print_generic_expr (outfile, vro->op2);
298 if (closebrace)
299 fprintf (outfile, ">");
300 if (i != ops.length () - 1)
301 fprintf (outfile, ",");
303 fprintf (outfile, "}");
306 DEBUG_FUNCTION void
307 debug_vn_reference_ops (const vec<vn_reference_op_s> ops)
309 print_vn_reference_ops (stderr, ops);
310 fputc ('\n', stderr);
313 /* The set of VN hashtables. */
315 typedef struct vn_tables_s
317 vn_nary_op_table_type *nary;
318 vn_phi_table_type *phis;
319 vn_reference_table_type *references;
320 } *vn_tables_t;
323 /* vn_constant hashtable helpers. */
325 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
327 static inline hashval_t hash (const vn_constant_s *);
328 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
331 /* Hash table hash function for vn_constant_t. */
333 inline hashval_t
334 vn_constant_hasher::hash (const vn_constant_s *vc1)
336 return vc1->hashcode;
339 /* Hash table equality function for vn_constant_t. */
341 inline bool
342 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
344 if (vc1->hashcode != vc2->hashcode)
345 return false;
347 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
350 static hash_table<vn_constant_hasher> *constant_to_value_id;
353 /* Obstack we allocate the vn-tables elements from. */
354 static obstack vn_tables_obstack;
355 /* Special obstack we never unwind. */
356 static obstack vn_tables_insert_obstack;
358 static vn_reference_t last_inserted_ref;
359 static vn_phi_t last_inserted_phi;
360 static vn_nary_op_t last_inserted_nary;
361 static vn_ssa_aux_t last_pushed_avail;
363 /* Valid hashtables storing information we have proven to be
364 correct. */
365 static vn_tables_t valid_info;
368 /* Valueization hook for simplify_replace_tree. Valueize NAME if it is
369 an SSA name, otherwise just return it. */
370 tree (*vn_valueize) (tree);
371 static tree
372 vn_valueize_for_srt (tree t, void* context ATTRIBUTE_UNUSED)
374 basic_block saved_vn_context_bb = vn_context_bb;
375 /* Look for sth available at the definition block of the argument.
376 This avoids inconsistencies between availability there which
377 decides if the stmt can be removed and availability at the
378 use site. The SSA property ensures that things available
379 at the definition are also available at uses. */
380 if (!SSA_NAME_IS_DEFAULT_DEF (t))
381 vn_context_bb = gimple_bb (SSA_NAME_DEF_STMT (t));
382 tree res = vn_valueize (t);
383 vn_context_bb = saved_vn_context_bb;
384 return res;
388 /* This represents the top of the VN lattice, which is the universal
389 value. */
391 tree VN_TOP;
393 /* Unique counter for our value ids. */
395 static unsigned int next_value_id;
396 static int next_constant_value_id;
399 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
400 are allocated on an obstack for locality reasons, and to free them
401 without looping over the vec. */
403 struct vn_ssa_aux_hasher : typed_noop_remove <vn_ssa_aux_t>
405 typedef vn_ssa_aux_t value_type;
406 typedef tree compare_type;
407 static inline hashval_t hash (const value_type &);
408 static inline bool equal (const value_type &, const compare_type &);
409 static inline void mark_deleted (value_type &) {}
410 static const bool empty_zero_p = true;
411 static inline void mark_empty (value_type &e) { e = NULL; }
412 static inline bool is_deleted (value_type &) { return false; }
413 static inline bool is_empty (value_type &e) { return e == NULL; }
416 hashval_t
417 vn_ssa_aux_hasher::hash (const value_type &entry)
419 return SSA_NAME_VERSION (entry->name);
422 bool
423 vn_ssa_aux_hasher::equal (const value_type &entry, const compare_type &name)
425 return name == entry->name;
428 static hash_table<vn_ssa_aux_hasher> *vn_ssa_aux_hash;
429 typedef hash_table<vn_ssa_aux_hasher>::iterator vn_ssa_aux_iterator_type;
430 static struct obstack vn_ssa_aux_obstack;
432 static vn_nary_op_t vn_nary_op_insert_stmt (gimple *, tree);
433 static vn_nary_op_t vn_nary_op_insert_into (vn_nary_op_t,
434 vn_nary_op_table_type *);
435 static void init_vn_nary_op_from_pieces (vn_nary_op_t, unsigned int,
436 enum tree_code, tree, tree *);
437 static tree vn_lookup_simplify_result (gimple_match_op *);
438 static vn_reference_t vn_reference_lookup_or_insert_for_pieces
439 (tree, alias_set_type, alias_set_type, tree,
440 vec<vn_reference_op_s, va_heap>, tree);
442 /* Return whether there is value numbering information for a given SSA name. */
444 bool
445 has_VN_INFO (tree name)
447 return vn_ssa_aux_hash->find_with_hash (name, SSA_NAME_VERSION (name));
450 vn_ssa_aux_t
451 VN_INFO (tree name)
453 vn_ssa_aux_t *res
454 = vn_ssa_aux_hash->find_slot_with_hash (name, SSA_NAME_VERSION (name),
455 INSERT);
456 if (*res != NULL)
457 return *res;
459 vn_ssa_aux_t newinfo = *res = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
460 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
461 newinfo->name = name;
462 newinfo->valnum = VN_TOP;
463 /* We are using the visited flag to handle uses with defs not within the
464 region being value-numbered. */
465 newinfo->visited = false;
467 /* Given we create the VN_INFOs on-demand now we have to do initialization
468 different than VN_TOP here. */
469 if (SSA_NAME_IS_DEFAULT_DEF (name))
470 switch (TREE_CODE (SSA_NAME_VAR (name)))
472 case VAR_DECL:
473 /* All undefined vars are VARYING. */
474 newinfo->valnum = name;
475 newinfo->visited = true;
476 break;
478 case PARM_DECL:
479 /* Parameters are VARYING but we can record a condition
480 if we know it is a non-NULL pointer. */
481 newinfo->visited = true;
482 newinfo->valnum = name;
483 if (POINTER_TYPE_P (TREE_TYPE (name))
484 && nonnull_arg_p (SSA_NAME_VAR (name)))
486 tree ops[2];
487 ops[0] = name;
488 ops[1] = build_int_cst (TREE_TYPE (name), 0);
489 vn_nary_op_t nary;
490 /* Allocate from non-unwinding stack. */
491 nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
492 init_vn_nary_op_from_pieces (nary, 2, NE_EXPR,
493 boolean_type_node, ops);
494 nary->predicated_values = 0;
495 nary->u.result = boolean_true_node;
496 vn_nary_op_insert_into (nary, valid_info->nary);
497 gcc_assert (nary->unwind_to == NULL);
498 /* Also do not link it into the undo chain. */
499 last_inserted_nary = nary->next;
500 nary->next = (vn_nary_op_t)(void *)-1;
501 nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
502 init_vn_nary_op_from_pieces (nary, 2, EQ_EXPR,
503 boolean_type_node, ops);
504 nary->predicated_values = 0;
505 nary->u.result = boolean_false_node;
506 vn_nary_op_insert_into (nary, valid_info->nary);
507 gcc_assert (nary->unwind_to == NULL);
508 last_inserted_nary = nary->next;
509 nary->next = (vn_nary_op_t)(void *)-1;
510 if (dump_file && (dump_flags & TDF_DETAILS))
512 fprintf (dump_file, "Recording ");
513 print_generic_expr (dump_file, name, TDF_SLIM);
514 fprintf (dump_file, " != 0\n");
517 break;
519 case RESULT_DECL:
520 /* If the result is passed by invisible reference the default
521 def is initialized, otherwise it's uninitialized. Still
522 undefined is varying. */
523 newinfo->visited = true;
524 newinfo->valnum = name;
525 break;
527 default:
528 gcc_unreachable ();
530 return newinfo;
533 /* Return the SSA value of X. */
535 inline tree
536 SSA_VAL (tree x, bool *visited = NULL)
538 vn_ssa_aux_t tem = vn_ssa_aux_hash->find_with_hash (x, SSA_NAME_VERSION (x));
539 if (visited)
540 *visited = tem && tem->visited;
541 return tem && tem->visited ? tem->valnum : x;
544 /* Return the SSA value of the VUSE x, supporting released VDEFs
545 during elimination which will value-number the VDEF to the
546 associated VUSE (but not substitute in the whole lattice). */
548 static inline tree
549 vuse_ssa_val (tree x)
551 if (!x)
552 return NULL_TREE;
556 x = SSA_VAL (x);
557 gcc_assert (x != VN_TOP);
559 while (SSA_NAME_IN_FREE_LIST (x));
561 return x;
564 /* Similar to the above but used as callback for walk_non_aliased_vuses
565 and thus should stop at unvisited VUSE to not walk across region
566 boundaries. */
568 static tree
569 vuse_valueize (tree vuse)
573 bool visited;
574 vuse = SSA_VAL (vuse, &visited);
575 if (!visited)
576 return NULL_TREE;
577 gcc_assert (vuse != VN_TOP);
579 while (SSA_NAME_IN_FREE_LIST (vuse));
580 return vuse;
584 /* Return the vn_kind the expression computed by the stmt should be
585 associated with. */
587 enum vn_kind
588 vn_get_stmt_kind (gimple *stmt)
590 switch (gimple_code (stmt))
592 case GIMPLE_CALL:
593 return VN_REFERENCE;
594 case GIMPLE_PHI:
595 return VN_PHI;
596 case GIMPLE_ASSIGN:
598 enum tree_code code = gimple_assign_rhs_code (stmt);
599 tree rhs1 = gimple_assign_rhs1 (stmt);
600 switch (get_gimple_rhs_class (code))
602 case GIMPLE_UNARY_RHS:
603 case GIMPLE_BINARY_RHS:
604 case GIMPLE_TERNARY_RHS:
605 return VN_NARY;
606 case GIMPLE_SINGLE_RHS:
607 switch (TREE_CODE_CLASS (code))
609 case tcc_reference:
610 /* VOP-less references can go through unary case. */
611 if ((code == REALPART_EXPR
612 || code == IMAGPART_EXPR
613 || code == VIEW_CONVERT_EXPR
614 || code == BIT_FIELD_REF)
615 && (TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME
616 || is_gimple_min_invariant (TREE_OPERAND (rhs1, 0))))
617 return VN_NARY;
619 /* Fallthrough. */
620 case tcc_declaration:
621 return VN_REFERENCE;
623 case tcc_constant:
624 return VN_CONSTANT;
626 default:
627 if (code == ADDR_EXPR)
628 return (is_gimple_min_invariant (rhs1)
629 ? VN_CONSTANT : VN_REFERENCE);
630 else if (code == CONSTRUCTOR)
631 return VN_NARY;
632 return VN_NONE;
634 default:
635 return VN_NONE;
638 default:
639 return VN_NONE;
643 /* Lookup a value id for CONSTANT and return it. If it does not
644 exist returns 0. */
646 unsigned int
647 get_constant_value_id (tree constant)
649 vn_constant_s **slot;
650 struct vn_constant_s vc;
652 vc.hashcode = vn_hash_constant_with_type (constant);
653 vc.constant = constant;
654 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
655 if (slot)
656 return (*slot)->value_id;
657 return 0;
660 /* Lookup a value id for CONSTANT, and if it does not exist, create a
661 new one and return it. If it does exist, return it. */
663 unsigned int
664 get_or_alloc_constant_value_id (tree constant)
666 vn_constant_s **slot;
667 struct vn_constant_s vc;
668 vn_constant_t vcp;
670 /* If the hashtable isn't initialized we're not running from PRE and thus
671 do not need value-ids. */
672 if (!constant_to_value_id)
673 return 0;
675 vc.hashcode = vn_hash_constant_with_type (constant);
676 vc.constant = constant;
677 slot = constant_to_value_id->find_slot (&vc, INSERT);
678 if (*slot)
679 return (*slot)->value_id;
681 vcp = XNEW (struct vn_constant_s);
682 vcp->hashcode = vc.hashcode;
683 vcp->constant = constant;
684 vcp->value_id = get_next_constant_value_id ();
685 *slot = vcp;
686 return vcp->value_id;
689 /* Compute the hash for a reference operand VRO1. */
691 static void
692 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
694 hstate.add_int (vro1->opcode);
695 if (vro1->opcode == CALL_EXPR && !vro1->op0)
696 hstate.add_int (vro1->clique);
697 if (vro1->op0)
698 inchash::add_expr (vro1->op0, hstate);
699 if (vro1->op1)
700 inchash::add_expr (vro1->op1, hstate);
701 if (vro1->op2)
702 inchash::add_expr (vro1->op2, hstate);
705 /* Compute a hash for the reference operation VR1 and return it. */
707 static hashval_t
708 vn_reference_compute_hash (const vn_reference_t vr1)
710 inchash::hash hstate;
711 hashval_t result;
712 int i;
713 vn_reference_op_t vro;
714 poly_int64 off = -1;
715 bool deref = false;
717 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
719 if (vro->opcode == MEM_REF)
720 deref = true;
721 else if (vro->opcode != ADDR_EXPR)
722 deref = false;
723 if (maybe_ne (vro->off, -1))
725 if (known_eq (off, -1))
726 off = 0;
727 off += vro->off;
729 else
731 if (maybe_ne (off, -1)
732 && maybe_ne (off, 0))
733 hstate.add_poly_int (off);
734 off = -1;
735 if (deref
736 && vro->opcode == ADDR_EXPR)
738 if (vro->op0)
740 tree op = TREE_OPERAND (vro->op0, 0);
741 hstate.add_int (TREE_CODE (op));
742 inchash::add_expr (op, hstate);
745 else
746 vn_reference_op_compute_hash (vro, hstate);
749 result = hstate.end ();
750 /* ??? We would ICE later if we hash instead of adding that in. */
751 if (vr1->vuse)
752 result += SSA_NAME_VERSION (vr1->vuse);
754 return result;
757 /* Return true if reference operations VR1 and VR2 are equivalent. This
758 means they have the same set of operands and vuses. */
760 bool
761 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
763 unsigned i, j;
765 /* Early out if this is not a hash collision. */
766 if (vr1->hashcode != vr2->hashcode)
767 return false;
769 /* The VOP needs to be the same. */
770 if (vr1->vuse != vr2->vuse)
771 return false;
773 /* If the operands are the same we are done. */
774 if (vr1->operands == vr2->operands)
775 return true;
777 if (!vr1->type || !vr2->type)
779 if (vr1->type != vr2->type)
780 return false;
782 else if (vr1->type == vr2->type)
784 else if (COMPLETE_TYPE_P (vr1->type) != COMPLETE_TYPE_P (vr2->type)
785 || (COMPLETE_TYPE_P (vr1->type)
786 && !expressions_equal_p (TYPE_SIZE (vr1->type),
787 TYPE_SIZE (vr2->type))))
788 return false;
789 else if (vr1->operands[0].opcode == CALL_EXPR
790 && !types_compatible_p (vr1->type, vr2->type))
791 return false;
792 else if (INTEGRAL_TYPE_P (vr1->type)
793 && INTEGRAL_TYPE_P (vr2->type))
795 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
796 return false;
798 else if (INTEGRAL_TYPE_P (vr1->type)
799 && (TYPE_PRECISION (vr1->type)
800 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
801 return false;
802 else if (INTEGRAL_TYPE_P (vr2->type)
803 && (TYPE_PRECISION (vr2->type)
804 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
805 return false;
806 else if (VECTOR_BOOLEAN_TYPE_P (vr1->type)
807 && VECTOR_BOOLEAN_TYPE_P (vr2->type))
809 /* Vector boolean types can have padding, verify we are dealing with
810 the same number of elements, aka the precision of the types.
811 For example, In most architecture the precision_size of vbool*_t
812 types are caculated like below:
813 precision_size = type_size * 8
815 Unfortunately, the RISC-V will adjust the precision_size for the
816 vbool*_t in order to align the ISA as below:
817 type_size = [1, 1, 1, 1, 2, 4, 8]
818 precision_size = [1, 2, 4, 8, 16, 32, 64]
820 Then the precision_size of RISC-V vbool*_t will not be the multiple
821 of the type_size. We take care of this case consolidated here. */
822 if (maybe_ne (TYPE_VECTOR_SUBPARTS (vr1->type),
823 TYPE_VECTOR_SUBPARTS (vr2->type)))
824 return false;
827 i = 0;
828 j = 0;
831 poly_int64 off1 = 0, off2 = 0;
832 vn_reference_op_t vro1, vro2;
833 vn_reference_op_s tem1, tem2;
834 bool deref1 = false, deref2 = false;
835 bool reverse1 = false, reverse2 = false;
836 for (; vr1->operands.iterate (i, &vro1); i++)
838 if (vro1->opcode == MEM_REF)
839 deref1 = true;
840 /* Do not look through a storage order barrier. */
841 else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
842 return false;
843 reverse1 |= vro1->reverse;
844 if (known_eq (vro1->off, -1))
845 break;
846 off1 += vro1->off;
848 for (; vr2->operands.iterate (j, &vro2); j++)
850 if (vro2->opcode == MEM_REF)
851 deref2 = true;
852 /* Do not look through a storage order barrier. */
853 else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
854 return false;
855 reverse2 |= vro2->reverse;
856 if (known_eq (vro2->off, -1))
857 break;
858 off2 += vro2->off;
860 if (maybe_ne (off1, off2) || reverse1 != reverse2)
861 return false;
862 if (deref1 && vro1->opcode == ADDR_EXPR)
864 memset (&tem1, 0, sizeof (tem1));
865 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
866 tem1.type = TREE_TYPE (tem1.op0);
867 tem1.opcode = TREE_CODE (tem1.op0);
868 vro1 = &tem1;
869 deref1 = false;
871 if (deref2 && vro2->opcode == ADDR_EXPR)
873 memset (&tem2, 0, sizeof (tem2));
874 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
875 tem2.type = TREE_TYPE (tem2.op0);
876 tem2.opcode = TREE_CODE (tem2.op0);
877 vro2 = &tem2;
878 deref2 = false;
880 if (deref1 != deref2)
881 return false;
882 if (!vn_reference_op_eq (vro1, vro2))
883 return false;
884 ++j;
885 ++i;
887 while (vr1->operands.length () != i
888 || vr2->operands.length () != j);
890 return true;
893 /* Copy the operations present in load/store REF into RESULT, a vector of
894 vn_reference_op_s's. */
896 static void
897 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
899 /* For non-calls, store the information that makes up the address. */
900 tree orig = ref;
901 unsigned start = result->length ();
902 bool seen_variable_array_ref = false;
903 while (ref)
905 vn_reference_op_s temp;
907 memset (&temp, 0, sizeof (temp));
908 temp.type = TREE_TYPE (ref);
909 temp.opcode = TREE_CODE (ref);
910 temp.off = -1;
912 switch (temp.opcode)
914 case MODIFY_EXPR:
915 temp.op0 = TREE_OPERAND (ref, 1);
916 break;
917 case WITH_SIZE_EXPR:
918 temp.op0 = TREE_OPERAND (ref, 1);
919 temp.off = 0;
920 break;
921 case MEM_REF:
922 /* The base address gets its own vn_reference_op_s structure. */
923 temp.op0 = TREE_OPERAND (ref, 1);
924 if (!mem_ref_offset (ref).to_shwi (&temp.off))
925 temp.off = -1;
926 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
927 temp.base = MR_DEPENDENCE_BASE (ref);
928 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
929 break;
930 case TARGET_MEM_REF:
931 /* The base address gets its own vn_reference_op_s structure. */
932 temp.op0 = TMR_INDEX (ref);
933 temp.op1 = TMR_STEP (ref);
934 temp.op2 = TMR_OFFSET (ref);
935 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
936 temp.base = MR_DEPENDENCE_BASE (ref);
937 result->safe_push (temp);
938 memset (&temp, 0, sizeof (temp));
939 temp.type = NULL_TREE;
940 temp.opcode = ERROR_MARK;
941 temp.op0 = TMR_INDEX2 (ref);
942 temp.off = -1;
943 break;
944 case BIT_FIELD_REF:
945 /* Record bits, position and storage order. */
946 temp.op0 = TREE_OPERAND (ref, 1);
947 temp.op1 = TREE_OPERAND (ref, 2);
948 if (!multiple_p (bit_field_offset (ref), BITS_PER_UNIT, &temp.off))
949 temp.off = -1;
950 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
951 break;
952 case COMPONENT_REF:
953 /* The field decl is enough to unambiguously specify the field,
954 so use its type here. */
955 temp.type = TREE_TYPE (TREE_OPERAND (ref, 1));
956 temp.op0 = TREE_OPERAND (ref, 1);
957 temp.op1 = TREE_OPERAND (ref, 2);
958 temp.reverse = (AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (ref, 0)))
959 && TYPE_REVERSE_STORAGE_ORDER
960 (TREE_TYPE (TREE_OPERAND (ref, 0))));
962 tree this_offset = component_ref_field_offset (ref);
963 if (this_offset
964 && poly_int_tree_p (this_offset))
966 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
967 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
969 poly_offset_int off
970 = (wi::to_poly_offset (this_offset)
971 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
972 /* Probibit value-numbering zero offset components
973 of addresses the same before the pass folding
974 __builtin_object_size had a chance to run. */
975 if (TREE_CODE (orig) != ADDR_EXPR
976 || maybe_ne (off, 0)
977 || (cfun->curr_properties & PROP_objsz))
978 off.to_shwi (&temp.off);
982 break;
983 case ARRAY_RANGE_REF:
984 case ARRAY_REF:
986 tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
987 /* Record index as operand. */
988 temp.op0 = TREE_OPERAND (ref, 1);
989 /* When the index is not constant we have to apply the same
990 logic as get_ref_base_and_extent which eventually uses
991 global ranges to refine the overall ref extent. Record
992 we've seen such a case, fixup below. */
993 if (TREE_CODE (temp.op0) == SSA_NAME)
994 seen_variable_array_ref = true;
995 /* Always record lower bounds and element size. */
996 temp.op1 = array_ref_low_bound (ref);
997 /* But record element size in units of the type alignment. */
998 temp.op2 = TREE_OPERAND (ref, 3);
999 temp.align = eltype->type_common.align;
1000 if (! temp.op2)
1001 temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
1002 size_int (TYPE_ALIGN_UNIT (eltype)));
1003 if (poly_int_tree_p (temp.op0)
1004 && poly_int_tree_p (temp.op1)
1005 && TREE_CODE (temp.op2) == INTEGER_CST)
1007 poly_offset_int off = ((wi::to_poly_offset (temp.op0)
1008 - wi::to_poly_offset (temp.op1))
1009 * wi::to_offset (temp.op2)
1010 * vn_ref_op_align_unit (&temp));
1011 off.to_shwi (&temp.off);
1013 temp.reverse = (AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (ref, 0)))
1014 && TYPE_REVERSE_STORAGE_ORDER
1015 (TREE_TYPE (TREE_OPERAND (ref, 0))));
1017 break;
1018 case VAR_DECL:
1019 if (DECL_HARD_REGISTER (ref))
1021 temp.op0 = ref;
1022 break;
1024 /* Fallthru. */
1025 case PARM_DECL:
1026 case CONST_DECL:
1027 case RESULT_DECL:
1028 /* Canonicalize decls to MEM[&decl] which is what we end up with
1029 when valueizing MEM[ptr] with ptr = &decl. */
1030 temp.opcode = MEM_REF;
1031 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
1032 temp.off = 0;
1033 result->safe_push (temp);
1034 temp.opcode = ADDR_EXPR;
1035 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
1036 temp.type = TREE_TYPE (temp.op0);
1037 temp.off = -1;
1038 break;
1039 case STRING_CST:
1040 case INTEGER_CST:
1041 case POLY_INT_CST:
1042 case COMPLEX_CST:
1043 case VECTOR_CST:
1044 case REAL_CST:
1045 case FIXED_CST:
1046 case CONSTRUCTOR:
1047 case SSA_NAME:
1048 temp.op0 = ref;
1049 break;
1050 case ADDR_EXPR:
1051 if (is_gimple_min_invariant (ref))
1053 temp.op0 = ref;
1054 break;
1056 break;
1057 /* These are only interesting for their operands, their
1058 existence, and their type. They will never be the last
1059 ref in the chain of references (IE they require an
1060 operand), so we don't have to put anything
1061 for op* as it will be handled by the iteration */
1062 case REALPART_EXPR:
1063 temp.off = 0;
1064 break;
1065 case VIEW_CONVERT_EXPR:
1066 temp.off = 0;
1067 temp.reverse = storage_order_barrier_p (ref);
1068 break;
1069 case IMAGPART_EXPR:
1070 /* This is only interesting for its constant offset. */
1071 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
1072 break;
1073 default:
1074 gcc_unreachable ();
1076 result->safe_push (temp);
1078 if (REFERENCE_CLASS_P (ref)
1079 || TREE_CODE (ref) == MODIFY_EXPR
1080 || TREE_CODE (ref) == WITH_SIZE_EXPR
1081 || (TREE_CODE (ref) == ADDR_EXPR
1082 && !is_gimple_min_invariant (ref)))
1083 ref = TREE_OPERAND (ref, 0);
1084 else
1085 ref = NULL_TREE;
1087 poly_int64 offset, size, max_size;
1088 tree base;
1089 bool rev;
1090 if (seen_variable_array_ref
1091 && handled_component_p (orig)
1092 && (base = get_ref_base_and_extent (orig,
1093 &offset, &size, &max_size, &rev))
1094 && known_size_p (max_size)
1095 && known_eq (size, max_size))
1097 poly_int64 orig_offset = offset;
1098 poly_int64 tem;
1099 if (TREE_CODE (base) == MEM_REF
1100 && mem_ref_offset (base).to_shwi (&tem))
1101 offset += tem * BITS_PER_UNIT;
1102 HOST_WIDE_INT coffset = offset.to_constant ();
1103 /* When get_ref_base_and_extent computes an offset constrained to
1104 a constant position we have to fixup variable array indexes in
1105 the ref to avoid the situation where based on context we'd have
1106 to value-number the same vn_reference ops differently. Make
1107 the vn_reference ops differ by adjusting those indexes to
1108 appropriate constants. */
1109 poly_int64 off = 0;
1110 bool oob_index = false;
1111 for (unsigned i = result->length (); i > start; --i)
1113 auto &op = (*result)[i-1];
1114 if (flag_checking
1115 && op.opcode == ARRAY_REF
1116 && TREE_CODE (op.op0) == INTEGER_CST)
1118 /* The verifier below chokes on inconsistencies of handling
1119 out-of-bound accesses so disable it in that case. */
1120 tree atype = (*result)[i].type;
1121 if (TREE_CODE (atype) == ARRAY_TYPE)
1122 if (tree dom = TYPE_DOMAIN (atype))
1123 if ((TYPE_MIN_VALUE (dom)
1124 && TREE_CODE (TYPE_MIN_VALUE (dom)) == INTEGER_CST
1125 && (wi::to_widest (op.op0)
1126 < wi::to_widest (TYPE_MIN_VALUE (dom))))
1127 || (TYPE_MAX_VALUE (dom)
1128 && TREE_CODE (TYPE_MAX_VALUE (dom)) == INTEGER_CST
1129 && (wi::to_widest (op.op0)
1130 > wi::to_widest (TYPE_MAX_VALUE (dom)))))
1131 oob_index = true;
1133 if ((op.opcode == ARRAY_REF
1134 || op.opcode == ARRAY_RANGE_REF)
1135 && TREE_CODE (op.op0) == SSA_NAME)
1137 /* There's a single constant index that get's 'off' closer
1138 to 'offset'. */
1139 unsigned HOST_WIDE_INT elsz
1140 = tree_to_uhwi (op.op2) * vn_ref_op_align_unit (&op);
1141 unsigned HOST_WIDE_INT idx
1142 = (coffset - off.to_constant ()) / BITS_PER_UNIT / elsz;
1143 if (idx == 0)
1144 op.op0 = op.op1;
1145 else
1146 op.op0 = wide_int_to_tree (TREE_TYPE (op.op0),
1147 wi::to_poly_wide (op.op1) + idx);
1148 op.off = idx * elsz;
1149 off += op.off * BITS_PER_UNIT;
1151 else
1153 if (op.opcode == ERROR_MARK)
1154 /* two-ops codes have the offset in the first op. */
1156 else if (op.opcode == ADDR_EXPR
1157 || op.opcode == SSA_NAME
1158 || op.opcode == CONSTRUCTOR
1159 || TREE_CODE_CLASS (op.opcode) == tcc_declaration
1160 || TREE_CODE_CLASS (op.opcode) == tcc_constant)
1161 /* end-of ref. */
1162 gcc_assert (i == result->length ());
1163 else if (op.opcode == COMPONENT_REF)
1165 /* op.off is tracked in bytes, re-do it manually
1166 because of bitfields. */
1167 tree field = op.op0;
1168 /* We do not have a complete COMPONENT_REF tree here so we
1169 cannot use component_ref_field_offset. Do the interesting
1170 parts manually. */
1171 tree this_offset = DECL_FIELD_OFFSET (field);
1172 if (op.op1 || !poly_int_tree_p (this_offset))
1173 gcc_unreachable ();
1174 else
1176 poly_offset_int woffset
1177 = (wi::to_poly_offset (this_offset)
1178 << LOG2_BITS_PER_UNIT);
1179 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1180 off += woffset.force_shwi ();
1183 else
1185 gcc_assert (known_ne (op.off, -1)
1186 /* Out-of-bound indices can compute to
1187 a known -1 offset. */
1188 || ((op.opcode == ARRAY_REF
1189 || op.opcode == ARRAY_RANGE_REF)
1190 && poly_int_tree_p (op.op0)
1191 && poly_int_tree_p (op.op1)
1192 && TREE_CODE (op.op2) == INTEGER_CST));
1193 off += op.off * BITS_PER_UNIT;
1197 if (flag_checking && !oob_index)
1199 ao_ref r;
1200 if (start != 0)
1202 else if (ao_ref_init_from_vn_reference (&r, 0, 0, TREE_TYPE (orig),
1203 *result))
1204 gcc_assert (known_eq (r.offset, orig_offset)
1205 && known_eq (r.size, size)
1206 && known_eq (r.max_size, max_size));
1207 else
1208 gcc_unreachable ();
1213 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
1214 operands in *OPS, the reference alias set SET and the reference type TYPE.
1215 Return true if something useful was produced. */
1217 bool
1218 ao_ref_init_from_vn_reference (ao_ref *ref,
1219 alias_set_type set, alias_set_type base_set,
1220 tree type, const vec<vn_reference_op_s> &ops)
1222 unsigned i;
1223 tree base = NULL_TREE;
1224 tree *op0_p = &base;
1225 poly_offset_int offset = 0;
1226 poly_offset_int max_size;
1227 poly_offset_int size = -1;
1228 tree size_tree = NULL_TREE;
1230 /* We don't handle calls. */
1231 if (!type)
1232 return false;
1234 machine_mode mode = TYPE_MODE (type);
1235 if (mode == BLKmode)
1236 size_tree = TYPE_SIZE (type);
1237 else
1238 size = GET_MODE_BITSIZE (mode);
1239 if (size_tree != NULL_TREE
1240 && poly_int_tree_p (size_tree))
1241 size = wi::to_poly_offset (size_tree);
1243 /* Lower the final access size from the outermost expression. */
1244 const_vn_reference_op_t cst_op = &ops[0];
1245 /* Cast away constness for the sake of the const-unsafe
1246 FOR_EACH_VEC_ELT(). */
1247 vn_reference_op_t op = const_cast<vn_reference_op_t>(cst_op);
1248 size_tree = NULL_TREE;
1249 if (op->opcode == COMPONENT_REF)
1250 size_tree = DECL_SIZE (op->op0);
1251 else if (op->opcode == BIT_FIELD_REF)
1252 size_tree = op->op0;
1253 if (size_tree != NULL_TREE
1254 && poly_int_tree_p (size_tree)
1255 && (!known_size_p (size)
1256 || known_lt (wi::to_poly_offset (size_tree), size)))
1257 size = wi::to_poly_offset (size_tree);
1259 /* Initially, maxsize is the same as the accessed element size.
1260 In the following it will only grow (or become -1). */
1261 max_size = size;
1263 /* Compute cumulative bit-offset for nested component-refs and array-refs,
1264 and find the ultimate containing object. */
1265 FOR_EACH_VEC_ELT (ops, i, op)
1267 switch (op->opcode)
1269 /* These may be in the reference ops, but we cannot do anything
1270 sensible with them here. */
1271 case ADDR_EXPR:
1272 /* Apart from ADDR_EXPR arguments to MEM_REF. */
1273 if (base != NULL_TREE
1274 && TREE_CODE (base) == MEM_REF
1275 && op->op0
1276 && DECL_P (TREE_OPERAND (op->op0, 0)))
1278 const_vn_reference_op_t pop = &ops[i-1];
1279 base = TREE_OPERAND (op->op0, 0);
1280 if (known_eq (pop->off, -1))
1282 max_size = -1;
1283 offset = 0;
1285 else
1286 offset += pop->off * BITS_PER_UNIT;
1287 op0_p = NULL;
1288 break;
1290 /* Fallthru. */
1291 case CALL_EXPR:
1292 return false;
1294 /* Record the base objects. */
1295 case MEM_REF:
1296 *op0_p = build2 (MEM_REF, op->type,
1297 NULL_TREE, op->op0);
1298 MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
1299 MR_DEPENDENCE_BASE (*op0_p) = op->base;
1300 op0_p = &TREE_OPERAND (*op0_p, 0);
1301 break;
1303 case VAR_DECL:
1304 case PARM_DECL:
1305 case RESULT_DECL:
1306 case SSA_NAME:
1307 *op0_p = op->op0;
1308 op0_p = NULL;
1309 break;
1311 /* And now the usual component-reference style ops. */
1312 case BIT_FIELD_REF:
1313 offset += wi::to_poly_offset (op->op1);
1314 break;
1316 case COMPONENT_REF:
1318 tree field = op->op0;
1319 /* We do not have a complete COMPONENT_REF tree here so we
1320 cannot use component_ref_field_offset. Do the interesting
1321 parts manually. */
1322 tree this_offset = DECL_FIELD_OFFSET (field);
1324 if (op->op1 || !poly_int_tree_p (this_offset))
1325 max_size = -1;
1326 else
1328 poly_offset_int woffset = (wi::to_poly_offset (this_offset)
1329 << LOG2_BITS_PER_UNIT);
1330 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1331 offset += woffset;
1333 break;
1336 case ARRAY_RANGE_REF:
1337 case ARRAY_REF:
1338 /* Use the recorded constant offset. */
1339 if (maybe_eq (op->off, -1))
1340 max_size = -1;
1341 else
1342 offset += op->off << LOG2_BITS_PER_UNIT;
1343 break;
1345 case REALPART_EXPR:
1346 break;
1348 case IMAGPART_EXPR:
1349 offset += size;
1350 break;
1352 case VIEW_CONVERT_EXPR:
1353 break;
1355 case STRING_CST:
1356 case INTEGER_CST:
1357 case COMPLEX_CST:
1358 case VECTOR_CST:
1359 case REAL_CST:
1360 case CONSTRUCTOR:
1361 case CONST_DECL:
1362 return false;
1364 default:
1365 return false;
1369 if (base == NULL_TREE)
1370 return false;
1372 ref->ref = NULL_TREE;
1373 ref->base = base;
1374 ref->ref_alias_set = set;
1375 ref->base_alias_set = base_set;
1376 /* We discount volatiles from value-numbering elsewhere. */
1377 ref->volatile_p = false;
1379 if (!size.to_shwi (&ref->size) || maybe_lt (ref->size, 0))
1381 ref->offset = 0;
1382 ref->size = -1;
1383 ref->max_size = -1;
1384 return true;
1387 if (!offset.to_shwi (&ref->offset))
1389 ref->offset = 0;
1390 ref->max_size = -1;
1391 return true;
1394 if (!max_size.to_shwi (&ref->max_size) || maybe_lt (ref->max_size, 0))
1395 ref->max_size = -1;
1397 return true;
1400 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1401 vn_reference_op_s's. */
1403 static void
1404 copy_reference_ops_from_call (gcall *call,
1405 vec<vn_reference_op_s> *result)
1407 vn_reference_op_s temp;
1408 unsigned i;
1409 tree lhs = gimple_call_lhs (call);
1410 int lr;
1412 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1413 different. By adding the lhs here in the vector, we ensure that the
1414 hashcode is different, guaranteeing a different value number. */
1415 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1417 memset (&temp, 0, sizeof (temp));
1418 temp.opcode = MODIFY_EXPR;
1419 temp.type = TREE_TYPE (lhs);
1420 temp.op0 = lhs;
1421 temp.off = -1;
1422 result->safe_push (temp);
1425 /* Copy the type, opcode, function, static chain and EH region, if any. */
1426 memset (&temp, 0, sizeof (temp));
1427 temp.type = gimple_call_fntype (call);
1428 temp.opcode = CALL_EXPR;
1429 temp.op0 = gimple_call_fn (call);
1430 if (gimple_call_internal_p (call))
1431 temp.clique = gimple_call_internal_fn (call);
1432 temp.op1 = gimple_call_chain (call);
1433 if (stmt_could_throw_p (cfun, call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1434 temp.op2 = size_int (lr);
1435 temp.off = -1;
1436 result->safe_push (temp);
1438 /* Copy the call arguments. As they can be references as well,
1439 just chain them together. */
1440 for (i = 0; i < gimple_call_num_args (call); ++i)
1442 tree callarg = gimple_call_arg (call, i);
1443 copy_reference_ops_from_ref (callarg, result);
1447 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1448 *I_P to point to the last element of the replacement. */
1449 static bool
1450 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1451 unsigned int *i_p)
1453 unsigned int i = *i_p;
1454 vn_reference_op_t op = &(*ops)[i];
1455 vn_reference_op_t mem_op = &(*ops)[i - 1];
1456 tree addr_base;
1457 poly_int64 addr_offset = 0;
1459 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1460 from .foo.bar to the preceding MEM_REF offset and replace the
1461 address with &OBJ. */
1462 addr_base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (op->op0, 0),
1463 &addr_offset, vn_valueize);
1464 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1465 if (addr_base != TREE_OPERAND (op->op0, 0))
1467 poly_offset_int off
1468 = (poly_offset_int::from (wi::to_poly_wide (mem_op->op0),
1469 SIGNED)
1470 + addr_offset);
1471 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1472 op->op0 = build_fold_addr_expr (addr_base);
1473 if (tree_fits_shwi_p (mem_op->op0))
1474 mem_op->off = tree_to_shwi (mem_op->op0);
1475 else
1476 mem_op->off = -1;
1477 return true;
1479 return false;
1482 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1483 *I_P to point to the last element of the replacement. */
1484 static bool
1485 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1486 unsigned int *i_p)
1488 bool changed = false;
1489 vn_reference_op_t op;
1493 unsigned int i = *i_p;
1494 op = &(*ops)[i];
1495 vn_reference_op_t mem_op = &(*ops)[i - 1];
1496 gimple *def_stmt;
1497 enum tree_code code;
1498 poly_offset_int off;
1500 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1501 if (!is_gimple_assign (def_stmt))
1502 return changed;
1504 code = gimple_assign_rhs_code (def_stmt);
1505 if (code != ADDR_EXPR
1506 && code != POINTER_PLUS_EXPR)
1507 return changed;
1509 off = poly_offset_int::from (wi::to_poly_wide (mem_op->op0), SIGNED);
1511 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1512 from .foo.bar to the preceding MEM_REF offset and replace the
1513 address with &OBJ. */
1514 if (code == ADDR_EXPR)
1516 tree addr, addr_base;
1517 poly_int64 addr_offset;
1519 addr = gimple_assign_rhs1 (def_stmt);
1520 addr_base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (addr, 0),
1521 &addr_offset,
1522 vn_valueize);
1523 /* If that didn't work because the address isn't invariant propagate
1524 the reference tree from the address operation in case the current
1525 dereference isn't offsetted. */
1526 if (!addr_base
1527 && *i_p == ops->length () - 1
1528 && known_eq (off, 0)
1529 /* This makes us disable this transform for PRE where the
1530 reference ops might be also used for code insertion which
1531 is invalid. */
1532 && default_vn_walk_kind == VN_WALKREWRITE)
1534 auto_vec<vn_reference_op_s, 32> tem;
1535 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1536 /* Make sure to preserve TBAA info. The only objects not
1537 wrapped in MEM_REFs that can have their address taken are
1538 STRING_CSTs. */
1539 if (tem.length () >= 2
1540 && tem[tem.length () - 2].opcode == MEM_REF)
1542 vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1543 new_mem_op->op0
1544 = wide_int_to_tree (TREE_TYPE (mem_op->op0),
1545 wi::to_poly_wide (new_mem_op->op0));
1547 else
1548 gcc_assert (tem.last ().opcode == STRING_CST);
1549 ops->pop ();
1550 ops->pop ();
1551 ops->safe_splice (tem);
1552 --*i_p;
1553 return true;
1555 if (!addr_base
1556 || TREE_CODE (addr_base) != MEM_REF
1557 || (TREE_CODE (TREE_OPERAND (addr_base, 0)) == SSA_NAME
1558 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (addr_base,
1559 0))))
1560 return changed;
1562 off += addr_offset;
1563 off += mem_ref_offset (addr_base);
1564 op->op0 = TREE_OPERAND (addr_base, 0);
1566 else
1568 tree ptr, ptroff;
1569 ptr = gimple_assign_rhs1 (def_stmt);
1570 ptroff = gimple_assign_rhs2 (def_stmt);
1571 if (TREE_CODE (ptr) != SSA_NAME
1572 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1573 /* Make sure to not endlessly recurse.
1574 See gcc.dg/tree-ssa/20040408-1.c for an example. Can easily
1575 happen when we value-number a PHI to its backedge value. */
1576 || SSA_VAL (ptr) == op->op0
1577 || !poly_int_tree_p (ptroff))
1578 return changed;
1580 off += wi::to_poly_offset (ptroff);
1581 op->op0 = ptr;
1584 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1585 if (tree_fits_shwi_p (mem_op->op0))
1586 mem_op->off = tree_to_shwi (mem_op->op0);
1587 else
1588 mem_op->off = -1;
1589 /* ??? Can end up with endless recursion here!?
1590 gcc.c-torture/execute/strcmp-1.c */
1591 if (TREE_CODE (op->op0) == SSA_NAME)
1592 op->op0 = SSA_VAL (op->op0);
1593 if (TREE_CODE (op->op0) != SSA_NAME)
1594 op->opcode = TREE_CODE (op->op0);
1596 changed = true;
1598 /* Tail-recurse. */
1599 while (TREE_CODE (op->op0) == SSA_NAME);
1601 /* Fold a remaining *&. */
1602 if (TREE_CODE (op->op0) == ADDR_EXPR)
1603 vn_reference_fold_indirect (ops, i_p);
1605 return changed;
1608 /* Optimize the reference REF to a constant if possible or return
1609 NULL_TREE if not. */
1611 tree
1612 fully_constant_vn_reference_p (vn_reference_t ref)
1614 vec<vn_reference_op_s> operands = ref->operands;
1615 vn_reference_op_t op;
1617 /* Try to simplify the translated expression if it is
1618 a call to a builtin function with at most two arguments. */
1619 op = &operands[0];
1620 if (op->opcode == CALL_EXPR
1621 && (!op->op0
1622 || (TREE_CODE (op->op0) == ADDR_EXPR
1623 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1624 && fndecl_built_in_p (TREE_OPERAND (op->op0, 0),
1625 BUILT_IN_NORMAL)))
1626 && operands.length () >= 2
1627 && operands.length () <= 3)
1629 vn_reference_op_t arg0, arg1 = NULL;
1630 bool anyconst = false;
1631 arg0 = &operands[1];
1632 if (operands.length () > 2)
1633 arg1 = &operands[2];
1634 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1635 || (arg0->opcode == ADDR_EXPR
1636 && is_gimple_min_invariant (arg0->op0)))
1637 anyconst = true;
1638 if (arg1
1639 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1640 || (arg1->opcode == ADDR_EXPR
1641 && is_gimple_min_invariant (arg1->op0))))
1642 anyconst = true;
1643 if (anyconst)
1645 combined_fn fn;
1646 if (op->op0)
1647 fn = as_combined_fn (DECL_FUNCTION_CODE
1648 (TREE_OPERAND (op->op0, 0)));
1649 else
1650 fn = as_combined_fn ((internal_fn) op->clique);
1651 tree folded;
1652 if (arg1)
1653 folded = fold_const_call (fn, ref->type, arg0->op0, arg1->op0);
1654 else
1655 folded = fold_const_call (fn, ref->type, arg0->op0);
1656 if (folded
1657 && is_gimple_min_invariant (folded))
1658 return folded;
1662 /* Simplify reads from constants or constant initializers. */
1663 else if (BITS_PER_UNIT == 8
1664 && ref->type
1665 && COMPLETE_TYPE_P (ref->type)
1666 && is_gimple_reg_type (ref->type))
1668 poly_int64 off = 0;
1669 HOST_WIDE_INT size;
1670 if (INTEGRAL_TYPE_P (ref->type))
1671 size = TYPE_PRECISION (ref->type);
1672 else if (tree_fits_shwi_p (TYPE_SIZE (ref->type)))
1673 size = tree_to_shwi (TYPE_SIZE (ref->type));
1674 else
1675 return NULL_TREE;
1676 if (size % BITS_PER_UNIT != 0
1677 || size > MAX_BITSIZE_MODE_ANY_MODE)
1678 return NULL_TREE;
1679 size /= BITS_PER_UNIT;
1680 unsigned i;
1681 for (i = 0; i < operands.length (); ++i)
1683 if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
1685 ++i;
1686 break;
1688 if (known_eq (operands[i].off, -1))
1689 return NULL_TREE;
1690 off += operands[i].off;
1691 if (operands[i].opcode == MEM_REF)
1693 ++i;
1694 break;
1697 vn_reference_op_t base = &operands[--i];
1698 tree ctor = error_mark_node;
1699 tree decl = NULL_TREE;
1700 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1701 ctor = base->op0;
1702 else if (base->opcode == MEM_REF
1703 && base[1].opcode == ADDR_EXPR
1704 && (VAR_P (TREE_OPERAND (base[1].op0, 0))
1705 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL
1706 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == STRING_CST))
1708 decl = TREE_OPERAND (base[1].op0, 0);
1709 if (TREE_CODE (decl) == STRING_CST)
1710 ctor = decl;
1711 else
1712 ctor = ctor_for_folding (decl);
1714 if (ctor == NULL_TREE)
1715 return build_zero_cst (ref->type);
1716 else if (ctor != error_mark_node)
1718 HOST_WIDE_INT const_off;
1719 if (decl)
1721 tree res = fold_ctor_reference (ref->type, ctor,
1722 off * BITS_PER_UNIT,
1723 size * BITS_PER_UNIT, decl);
1724 if (res)
1726 STRIP_USELESS_TYPE_CONVERSION (res);
1727 if (is_gimple_min_invariant (res))
1728 return res;
1731 else if (off.is_constant (&const_off))
1733 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1734 int len = native_encode_expr (ctor, buf, size, const_off);
1735 if (len > 0)
1736 return native_interpret_expr (ref->type, buf, len);
1741 return NULL_TREE;
1744 /* Return true if OPS contain a storage order barrier. */
1746 static bool
1747 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1749 vn_reference_op_t op;
1750 unsigned i;
1752 FOR_EACH_VEC_ELT (ops, i, op)
1753 if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1754 return true;
1756 return false;
1759 /* Return true if OPS represent an access with reverse storage order. */
1761 static bool
1762 reverse_storage_order_for_component_p (vec<vn_reference_op_s> ops)
1764 unsigned i = 0;
1765 if (ops[i].opcode == REALPART_EXPR || ops[i].opcode == IMAGPART_EXPR)
1766 ++i;
1767 switch (ops[i].opcode)
1769 case ARRAY_REF:
1770 case COMPONENT_REF:
1771 case BIT_FIELD_REF:
1772 case MEM_REF:
1773 return ops[i].reverse;
1774 default:
1775 return false;
1779 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1780 structures into their value numbers. This is done in-place, and
1781 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1782 whether any operands were valueized. */
1784 static void
1785 valueize_refs_1 (vec<vn_reference_op_s> *orig, bool *valueized_anything,
1786 bool with_avail = false)
1788 *valueized_anything = false;
1790 for (unsigned i = 0; i < orig->length (); ++i)
1792 re_valueize:
1793 vn_reference_op_t vro = &(*orig)[i];
1794 if (vro->opcode == SSA_NAME
1795 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1797 tree tem = with_avail ? vn_valueize (vro->op0) : SSA_VAL (vro->op0);
1798 if (tem != vro->op0)
1800 *valueized_anything = true;
1801 vro->op0 = tem;
1803 /* If it transforms from an SSA_NAME to a constant, update
1804 the opcode. */
1805 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1806 vro->opcode = TREE_CODE (vro->op0);
1808 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1810 tree tem = with_avail ? vn_valueize (vro->op1) : SSA_VAL (vro->op1);
1811 if (tem != vro->op1)
1813 *valueized_anything = true;
1814 vro->op1 = tem;
1817 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1819 tree tem = with_avail ? vn_valueize (vro->op2) : SSA_VAL (vro->op2);
1820 if (tem != vro->op2)
1822 *valueized_anything = true;
1823 vro->op2 = tem;
1826 /* If it transforms from an SSA_NAME to an address, fold with
1827 a preceding indirect reference. */
1828 if (i > 0
1829 && vro->op0
1830 && TREE_CODE (vro->op0) == ADDR_EXPR
1831 && (*orig)[i - 1].opcode == MEM_REF)
1833 if (vn_reference_fold_indirect (orig, &i))
1834 *valueized_anything = true;
1836 else if (i > 0
1837 && vro->opcode == SSA_NAME
1838 && (*orig)[i - 1].opcode == MEM_REF)
1840 if (vn_reference_maybe_forwprop_address (orig, &i))
1842 *valueized_anything = true;
1843 /* Re-valueize the current operand. */
1844 goto re_valueize;
1847 /* If it transforms a non-constant ARRAY_REF into a constant
1848 one, adjust the constant offset. */
1849 else if ((vro->opcode == ARRAY_REF
1850 || vro->opcode == ARRAY_RANGE_REF)
1851 && known_eq (vro->off, -1)
1852 && poly_int_tree_p (vro->op0)
1853 && poly_int_tree_p (vro->op1)
1854 && TREE_CODE (vro->op2) == INTEGER_CST)
1856 poly_offset_int off = ((wi::to_poly_offset (vro->op0)
1857 - wi::to_poly_offset (vro->op1))
1858 * wi::to_offset (vro->op2)
1859 * vn_ref_op_align_unit (vro));
1860 off.to_shwi (&vro->off);
1865 static void
1866 valueize_refs (vec<vn_reference_op_s> *orig)
1868 bool tem;
1869 valueize_refs_1 (orig, &tem);
1872 static vec<vn_reference_op_s> shared_lookup_references;
1874 /* Create a vector of vn_reference_op_s structures from REF, a
1875 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1876 this function. *VALUEIZED_ANYTHING will specify whether any
1877 operands were valueized. */
1879 static vec<vn_reference_op_s>
1880 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1882 if (!ref)
1883 return vNULL;
1884 shared_lookup_references.truncate (0);
1885 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1886 valueize_refs_1 (&shared_lookup_references, valueized_anything);
1887 return shared_lookup_references;
1890 /* Create a vector of vn_reference_op_s structures from CALL, a
1891 call statement. The vector is shared among all callers of
1892 this function. */
1894 static vec<vn_reference_op_s>
1895 valueize_shared_reference_ops_from_call (gcall *call)
1897 if (!call)
1898 return vNULL;
1899 shared_lookup_references.truncate (0);
1900 copy_reference_ops_from_call (call, &shared_lookup_references);
1901 valueize_refs (&shared_lookup_references);
1902 return shared_lookup_references;
1905 /* Lookup a SCCVN reference operation VR in the current hash table.
1906 Returns the resulting value number if it exists in the hash table,
1907 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1908 vn_reference_t stored in the hashtable if something is found. */
1910 static tree
1911 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1913 vn_reference_s **slot;
1914 hashval_t hash;
1916 hash = vr->hashcode;
1917 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1918 if (slot)
1920 if (vnresult)
1921 *vnresult = (vn_reference_t)*slot;
1922 return ((vn_reference_t)*slot)->result;
1925 return NULL_TREE;
1929 /* Partial definition tracking support. */
1931 struct pd_range
1933 HOST_WIDE_INT offset;
1934 HOST_WIDE_INT size;
1937 struct pd_data
1939 tree rhs;
1940 HOST_WIDE_INT rhs_off;
1941 HOST_WIDE_INT offset;
1942 HOST_WIDE_INT size;
1945 /* Context for alias walking. */
1947 struct vn_walk_cb_data
1949 vn_walk_cb_data (vn_reference_t vr_, tree orig_ref_, tree *last_vuse_ptr_,
1950 vn_lookup_kind vn_walk_kind_, bool tbaa_p_, tree mask_,
1951 bool redundant_store_removal_p_)
1952 : vr (vr_), last_vuse_ptr (last_vuse_ptr_), last_vuse (NULL_TREE),
1953 mask (mask_), masked_result (NULL_TREE), same_val (NULL_TREE),
1954 vn_walk_kind (vn_walk_kind_),
1955 tbaa_p (tbaa_p_), redundant_store_removal_p (redundant_store_removal_p_),
1956 saved_operands (vNULL), first_set (-2), first_base_set (-2),
1957 known_ranges (NULL)
1959 if (!last_vuse_ptr)
1960 last_vuse_ptr = &last_vuse;
1961 ao_ref_init (&orig_ref, orig_ref_);
1962 if (mask)
1964 wide_int w = wi::to_wide (mask);
1965 unsigned int pos = 0, prec = w.get_precision ();
1966 pd_data pd;
1967 pd.rhs = build_constructor (NULL_TREE, NULL);
1968 pd.rhs_off = 0;
1969 /* When bitwise and with a constant is done on a memory load,
1970 we don't really need all the bits to be defined or defined
1971 to constants, we don't really care what is in the position
1972 corresponding to 0 bits in the mask.
1973 So, push the ranges of those 0 bits in the mask as artificial
1974 zero stores and let the partial def handling code do the
1975 rest. */
1976 while (pos < prec)
1978 int tz = wi::ctz (w);
1979 if (pos + tz > prec)
1980 tz = prec - pos;
1981 if (tz)
1983 if (BYTES_BIG_ENDIAN)
1984 pd.offset = prec - pos - tz;
1985 else
1986 pd.offset = pos;
1987 pd.size = tz;
1988 void *r = push_partial_def (pd, 0, 0, 0, prec);
1989 gcc_assert (r == NULL_TREE);
1991 pos += tz;
1992 if (pos == prec)
1993 break;
1994 w = wi::lrshift (w, tz);
1995 tz = wi::ctz (wi::bit_not (w));
1996 if (pos + tz > prec)
1997 tz = prec - pos;
1998 pos += tz;
1999 w = wi::lrshift (w, tz);
2003 ~vn_walk_cb_data ();
2004 void *finish (alias_set_type, alias_set_type, tree);
2005 void *push_partial_def (pd_data pd,
2006 alias_set_type, alias_set_type, HOST_WIDE_INT,
2007 HOST_WIDE_INT);
2009 vn_reference_t vr;
2010 ao_ref orig_ref;
2011 tree *last_vuse_ptr;
2012 tree last_vuse;
2013 tree mask;
2014 tree masked_result;
2015 tree same_val;
2016 vn_lookup_kind vn_walk_kind;
2017 bool tbaa_p;
2018 bool redundant_store_removal_p;
2019 vec<vn_reference_op_s> saved_operands;
2021 /* The VDEFs of partial defs we come along. */
2022 auto_vec<pd_data, 2> partial_defs;
2023 /* The first defs range to avoid splay tree setup in most cases. */
2024 pd_range first_range;
2025 alias_set_type first_set;
2026 alias_set_type first_base_set;
2027 splay_tree known_ranges;
2028 obstack ranges_obstack;
2029 static constexpr HOST_WIDE_INT bufsize = 64;
2032 vn_walk_cb_data::~vn_walk_cb_data ()
2034 if (known_ranges)
2036 splay_tree_delete (known_ranges);
2037 obstack_free (&ranges_obstack, NULL);
2039 saved_operands.release ();
2042 void *
2043 vn_walk_cb_data::finish (alias_set_type set, alias_set_type base_set, tree val)
2045 if (first_set != -2)
2047 set = first_set;
2048 base_set = first_base_set;
2050 if (mask)
2052 masked_result = val;
2053 return (void *) -1;
2055 if (same_val && !operand_equal_p (val, same_val))
2056 return (void *) -1;
2057 vec<vn_reference_op_s> &operands
2058 = saved_operands.exists () ? saved_operands : vr->operands;
2059 return vn_reference_lookup_or_insert_for_pieces (last_vuse, set, base_set,
2060 vr->type, operands, val);
2063 /* pd_range splay-tree helpers. */
2065 static int
2066 pd_range_compare (splay_tree_key offset1p, splay_tree_key offset2p)
2068 HOST_WIDE_INT offset1 = *(HOST_WIDE_INT *)offset1p;
2069 HOST_WIDE_INT offset2 = *(HOST_WIDE_INT *)offset2p;
2070 if (offset1 < offset2)
2071 return -1;
2072 else if (offset1 > offset2)
2073 return 1;
2074 return 0;
2077 static void *
2078 pd_tree_alloc (int size, void *data_)
2080 vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
2081 return obstack_alloc (&data->ranges_obstack, size);
2084 static void
2085 pd_tree_dealloc (void *, void *)
2089 /* Push PD to the vector of partial definitions returning a
2090 value when we are ready to combine things with VUSE, SET and MAXSIZEI,
2091 NULL when we want to continue looking for partial defs or -1
2092 on failure. */
2094 void *
2095 vn_walk_cb_data::push_partial_def (pd_data pd,
2096 alias_set_type set, alias_set_type base_set,
2097 HOST_WIDE_INT offseti,
2098 HOST_WIDE_INT maxsizei)
2100 /* We're using a fixed buffer for encoding so fail early if the object
2101 we want to interpret is bigger. */
2102 if (maxsizei > bufsize * BITS_PER_UNIT
2103 || CHAR_BIT != 8
2104 || BITS_PER_UNIT != 8
2105 /* Not prepared to handle PDP endian. */
2106 || BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
2107 return (void *)-1;
2109 /* Turn too large constant stores into non-constant stores. */
2110 if (CONSTANT_CLASS_P (pd.rhs) && pd.size > bufsize * BITS_PER_UNIT)
2111 pd.rhs = error_mark_node;
2113 /* And for non-constant or CONSTRUCTOR stores shrink them to only keep at
2114 most a partial byte before and/or after the region. */
2115 if (!CONSTANT_CLASS_P (pd.rhs))
2117 if (pd.offset < offseti)
2119 HOST_WIDE_INT o = ROUND_DOWN (offseti - pd.offset, BITS_PER_UNIT);
2120 gcc_assert (pd.size > o);
2121 pd.size -= o;
2122 pd.offset += o;
2124 if (pd.size > maxsizei)
2125 pd.size = maxsizei + ((pd.size - maxsizei) % BITS_PER_UNIT);
2128 pd.offset -= offseti;
2130 bool pd_constant_p = (TREE_CODE (pd.rhs) == CONSTRUCTOR
2131 || CONSTANT_CLASS_P (pd.rhs));
2132 pd_range *r;
2133 if (partial_defs.is_empty ())
2135 /* If we get a clobber upfront, fail. */
2136 if (TREE_CLOBBER_P (pd.rhs))
2137 return (void *)-1;
2138 if (!pd_constant_p)
2139 return (void *)-1;
2140 partial_defs.safe_push (pd);
2141 first_range.offset = pd.offset;
2142 first_range.size = pd.size;
2143 first_set = set;
2144 first_base_set = base_set;
2145 last_vuse_ptr = NULL;
2146 r = &first_range;
2147 /* Go check if the first partial definition was a full one in case
2148 the caller didn't optimize for this. */
2150 else
2152 if (!known_ranges)
2154 /* ??? Optimize the case where the 2nd partial def completes
2155 things. */
2156 gcc_obstack_init (&ranges_obstack);
2157 known_ranges = splay_tree_new_with_allocator (pd_range_compare, 0, 0,
2158 pd_tree_alloc,
2159 pd_tree_dealloc, this);
2160 splay_tree_insert (known_ranges,
2161 (splay_tree_key)&first_range.offset,
2162 (splay_tree_value)&first_range);
2165 pd_range newr = { pd.offset, pd.size };
2166 splay_tree_node n;
2167 /* Lookup the predecessor of offset + 1 and see if we need to merge. */
2168 HOST_WIDE_INT loffset = newr.offset + 1;
2169 if ((n = splay_tree_predecessor (known_ranges, (splay_tree_key)&loffset))
2170 && ((r = (pd_range *)n->value), true)
2171 && ranges_known_overlap_p (r->offset, r->size + 1,
2172 newr.offset, newr.size))
2174 /* Ignore partial defs already covered. Here we also drop shadowed
2175 clobbers arriving here at the floor. */
2176 if (known_subrange_p (newr.offset, newr.size, r->offset, r->size))
2177 return NULL;
2178 r->size
2179 = MAX (r->offset + r->size, newr.offset + newr.size) - r->offset;
2181 else
2183 /* newr.offset wasn't covered yet, insert the range. */
2184 r = XOBNEW (&ranges_obstack, pd_range);
2185 *r = newr;
2186 splay_tree_insert (known_ranges, (splay_tree_key)&r->offset,
2187 (splay_tree_value)r);
2189 /* Merge r which now contains newr and is a member of the splay tree with
2190 adjacent overlapping ranges. */
2191 pd_range *rafter;
2192 while ((n = splay_tree_successor (known_ranges,
2193 (splay_tree_key)&r->offset))
2194 && ((rafter = (pd_range *)n->value), true)
2195 && ranges_known_overlap_p (r->offset, r->size + 1,
2196 rafter->offset, rafter->size))
2198 r->size = MAX (r->offset + r->size,
2199 rafter->offset + rafter->size) - r->offset;
2200 splay_tree_remove (known_ranges, (splay_tree_key)&rafter->offset);
2202 /* If we get a clobber, fail. */
2203 if (TREE_CLOBBER_P (pd.rhs))
2204 return (void *)-1;
2205 /* Non-constants are OK as long as they are shadowed by a constant. */
2206 if (!pd_constant_p)
2207 return (void *)-1;
2208 partial_defs.safe_push (pd);
2211 /* Now we have merged newr into the range tree. When we have covered
2212 [offseti, sizei] then the tree will contain exactly one node which has
2213 the desired properties and it will be 'r'. */
2214 if (!known_subrange_p (0, maxsizei, r->offset, r->size))
2215 /* Continue looking for partial defs. */
2216 return NULL;
2218 /* Now simply native encode all partial defs in reverse order. */
2219 unsigned ndefs = partial_defs.length ();
2220 /* We support up to 512-bit values (for V8DFmode). */
2221 unsigned char buffer[bufsize + 1];
2222 unsigned char this_buffer[bufsize + 1];
2223 int len;
2225 memset (buffer, 0, bufsize + 1);
2226 unsigned needed_len = ROUND_UP (maxsizei, BITS_PER_UNIT) / BITS_PER_UNIT;
2227 while (!partial_defs.is_empty ())
2229 pd_data pd = partial_defs.pop ();
2230 unsigned int amnt;
2231 if (TREE_CODE (pd.rhs) == CONSTRUCTOR)
2233 /* Empty CONSTRUCTOR. */
2234 if (pd.size >= needed_len * BITS_PER_UNIT)
2235 len = needed_len;
2236 else
2237 len = ROUND_UP (pd.size, BITS_PER_UNIT) / BITS_PER_UNIT;
2238 memset (this_buffer, 0, len);
2240 else if (pd.rhs_off >= 0)
2242 len = native_encode_expr (pd.rhs, this_buffer, bufsize,
2243 (MAX (0, -pd.offset)
2244 + pd.rhs_off) / BITS_PER_UNIT);
2245 if (len <= 0
2246 || len < (ROUND_UP (pd.size, BITS_PER_UNIT) / BITS_PER_UNIT
2247 - MAX (0, -pd.offset) / BITS_PER_UNIT))
2249 if (dump_file && (dump_flags & TDF_DETAILS))
2250 fprintf (dump_file, "Failed to encode %u "
2251 "partial definitions\n", ndefs);
2252 return (void *)-1;
2255 else /* negative pd.rhs_off indicates we want to chop off first bits */
2257 if (-pd.rhs_off >= bufsize)
2258 return (void *)-1;
2259 len = native_encode_expr (pd.rhs,
2260 this_buffer + -pd.rhs_off / BITS_PER_UNIT,
2261 bufsize - -pd.rhs_off / BITS_PER_UNIT,
2262 MAX (0, -pd.offset) / BITS_PER_UNIT);
2263 if (len <= 0
2264 || len < (ROUND_UP (pd.size, BITS_PER_UNIT) / BITS_PER_UNIT
2265 - MAX (0, -pd.offset) / BITS_PER_UNIT))
2267 if (dump_file && (dump_flags & TDF_DETAILS))
2268 fprintf (dump_file, "Failed to encode %u "
2269 "partial definitions\n", ndefs);
2270 return (void *)-1;
2274 unsigned char *p = buffer;
2275 HOST_WIDE_INT size = pd.size;
2276 if (pd.offset < 0)
2277 size -= ROUND_DOWN (-pd.offset, BITS_PER_UNIT);
2278 this_buffer[len] = 0;
2279 if (BYTES_BIG_ENDIAN)
2281 /* LSB of this_buffer[len - 1] byte should be at
2282 pd.offset + pd.size - 1 bits in buffer. */
2283 amnt = ((unsigned HOST_WIDE_INT) pd.offset
2284 + pd.size) % BITS_PER_UNIT;
2285 if (amnt)
2286 shift_bytes_in_array_right (this_buffer, len + 1, amnt);
2287 unsigned char *q = this_buffer;
2288 unsigned int off = 0;
2289 if (pd.offset >= 0)
2291 unsigned int msk;
2292 off = pd.offset / BITS_PER_UNIT;
2293 gcc_assert (off < needed_len);
2294 p = buffer + off;
2295 if (size <= amnt)
2297 msk = ((1 << size) - 1) << (BITS_PER_UNIT - amnt);
2298 *p = (*p & ~msk) | (this_buffer[len] & msk);
2299 size = 0;
2301 else
2303 if (TREE_CODE (pd.rhs) != CONSTRUCTOR)
2304 q = (this_buffer + len
2305 - (ROUND_UP (size - amnt, BITS_PER_UNIT)
2306 / BITS_PER_UNIT));
2307 if (pd.offset % BITS_PER_UNIT)
2309 msk = -1U << (BITS_PER_UNIT
2310 - (pd.offset % BITS_PER_UNIT));
2311 *p = (*p & msk) | (*q & ~msk);
2312 p++;
2313 q++;
2314 off++;
2315 size -= BITS_PER_UNIT - (pd.offset % BITS_PER_UNIT);
2316 gcc_assert (size >= 0);
2320 else if (TREE_CODE (pd.rhs) != CONSTRUCTOR)
2322 q = (this_buffer + len
2323 - (ROUND_UP (size - amnt, BITS_PER_UNIT)
2324 / BITS_PER_UNIT));
2325 if (pd.offset % BITS_PER_UNIT)
2327 q++;
2328 size -= BITS_PER_UNIT - ((unsigned HOST_WIDE_INT) pd.offset
2329 % BITS_PER_UNIT);
2330 gcc_assert (size >= 0);
2333 if ((unsigned HOST_WIDE_INT) size / BITS_PER_UNIT + off
2334 > needed_len)
2335 size = (needed_len - off) * BITS_PER_UNIT;
2336 memcpy (p, q, size / BITS_PER_UNIT);
2337 if (size % BITS_PER_UNIT)
2339 unsigned int msk
2340 = -1U << (BITS_PER_UNIT - (size % BITS_PER_UNIT));
2341 p += size / BITS_PER_UNIT;
2342 q += size / BITS_PER_UNIT;
2343 *p = (*q & msk) | (*p & ~msk);
2346 else
2348 if (pd.offset >= 0)
2350 /* LSB of this_buffer[0] byte should be at pd.offset bits
2351 in buffer. */
2352 unsigned int msk;
2353 size = MIN (size, (HOST_WIDE_INT) needed_len * BITS_PER_UNIT);
2354 amnt = pd.offset % BITS_PER_UNIT;
2355 if (amnt)
2356 shift_bytes_in_array_left (this_buffer, len + 1, amnt);
2357 unsigned int off = pd.offset / BITS_PER_UNIT;
2358 gcc_assert (off < needed_len);
2359 size = MIN (size,
2360 (HOST_WIDE_INT) (needed_len - off) * BITS_PER_UNIT);
2361 p = buffer + off;
2362 if (amnt + size < BITS_PER_UNIT)
2364 /* Low amnt bits come from *p, then size bits
2365 from this_buffer[0] and the remaining again from
2366 *p. */
2367 msk = ((1 << size) - 1) << amnt;
2368 *p = (*p & ~msk) | (this_buffer[0] & msk);
2369 size = 0;
2371 else if (amnt)
2373 msk = -1U << amnt;
2374 *p = (*p & ~msk) | (this_buffer[0] & msk);
2375 p++;
2376 size -= (BITS_PER_UNIT - amnt);
2379 else
2381 amnt = (unsigned HOST_WIDE_INT) pd.offset % BITS_PER_UNIT;
2382 if (amnt)
2383 size -= BITS_PER_UNIT - amnt;
2384 size = MIN (size, (HOST_WIDE_INT) needed_len * BITS_PER_UNIT);
2385 if (amnt)
2386 shift_bytes_in_array_left (this_buffer, len + 1, amnt);
2388 memcpy (p, this_buffer + (amnt != 0), size / BITS_PER_UNIT);
2389 p += size / BITS_PER_UNIT;
2390 if (size % BITS_PER_UNIT)
2392 unsigned int msk = -1U << (size % BITS_PER_UNIT);
2393 *p = (this_buffer[(amnt != 0) + size / BITS_PER_UNIT]
2394 & ~msk) | (*p & msk);
2399 tree type = vr->type;
2400 /* Make sure to interpret in a type that has a range covering the whole
2401 access size. */
2402 if (INTEGRAL_TYPE_P (vr->type) && maxsizei != TYPE_PRECISION (vr->type))
2403 type = build_nonstandard_integer_type (maxsizei, TYPE_UNSIGNED (type));
2404 tree val;
2405 if (BYTES_BIG_ENDIAN)
2407 unsigned sz = needed_len;
2408 if (maxsizei % BITS_PER_UNIT)
2409 shift_bytes_in_array_right (buffer, needed_len,
2410 BITS_PER_UNIT
2411 - (maxsizei % BITS_PER_UNIT));
2412 if (INTEGRAL_TYPE_P (type))
2414 if (TYPE_MODE (type) != BLKmode)
2415 sz = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
2416 else
2417 sz = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (type));
2419 if (sz > needed_len)
2421 memcpy (this_buffer + (sz - needed_len), buffer, needed_len);
2422 val = native_interpret_expr (type, this_buffer, sz);
2424 else
2425 val = native_interpret_expr (type, buffer, needed_len);
2427 else
2428 val = native_interpret_expr (type, buffer, bufsize);
2429 /* If we chop off bits because the types precision doesn't match the memory
2430 access size this is ok when optimizing reads but not when called from
2431 the DSE code during elimination. */
2432 if (val && type != vr->type)
2434 if (! int_fits_type_p (val, vr->type))
2435 val = NULL_TREE;
2436 else
2437 val = fold_convert (vr->type, val);
2440 if (val)
2442 if (dump_file && (dump_flags & TDF_DETAILS))
2443 fprintf (dump_file,
2444 "Successfully combined %u partial definitions\n", ndefs);
2445 /* We are using the alias-set of the first store we encounter which
2446 should be appropriate here. */
2447 return finish (first_set, first_base_set, val);
2449 else
2451 if (dump_file && (dump_flags & TDF_DETAILS))
2452 fprintf (dump_file,
2453 "Failed to interpret %u encoded partial definitions\n", ndefs);
2454 return (void *)-1;
2458 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
2459 with the current VUSE and performs the expression lookup. */
2461 static void *
2462 vn_reference_lookup_2 (ao_ref *op, tree vuse, void *data_)
2464 vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
2465 vn_reference_t vr = data->vr;
2466 vn_reference_s **slot;
2467 hashval_t hash;
2469 /* If we have partial definitions recorded we have to go through
2470 vn_reference_lookup_3. */
2471 if (!data->partial_defs.is_empty ())
2472 return NULL;
2474 if (data->last_vuse_ptr)
2476 *data->last_vuse_ptr = vuse;
2477 data->last_vuse = vuse;
2480 /* Fixup vuse and hash. */
2481 if (vr->vuse)
2482 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
2483 vr->vuse = vuse_ssa_val (vuse);
2484 if (vr->vuse)
2485 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
2487 hash = vr->hashcode;
2488 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
2489 if (slot)
2491 if ((*slot)->result && data->saved_operands.exists ())
2492 return data->finish (vr->set, vr->base_set, (*slot)->result);
2493 return *slot;
2496 if (SSA_NAME_IS_DEFAULT_DEF (vuse))
2498 HOST_WIDE_INT op_offset, op_size;
2499 tree v = NULL_TREE;
2500 tree base = ao_ref_base (op);
2502 if (base
2503 && op->offset.is_constant (&op_offset)
2504 && op->size.is_constant (&op_size)
2505 && op->max_size_known_p ()
2506 && known_eq (op->size, op->max_size))
2508 if (TREE_CODE (base) == PARM_DECL)
2509 v = ipcp_get_aggregate_const (cfun, base, false, op_offset,
2510 op_size);
2511 else if (TREE_CODE (base) == MEM_REF
2512 && integer_zerop (TREE_OPERAND (base, 1))
2513 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
2514 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
2515 && (TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (base, 0)))
2516 == PARM_DECL))
2517 v = ipcp_get_aggregate_const (cfun,
2518 SSA_NAME_VAR (TREE_OPERAND (base, 0)),
2519 true, op_offset, op_size);
2521 if (v)
2522 return data->finish (vr->set, vr->base_set, v);
2525 return NULL;
2528 /* Lookup an existing or insert a new vn_reference entry into the
2529 value table for the VUSE, SET, TYPE, OPERANDS reference which
2530 has the value VALUE which is either a constant or an SSA name. */
2532 static vn_reference_t
2533 vn_reference_lookup_or_insert_for_pieces (tree vuse,
2534 alias_set_type set,
2535 alias_set_type base_set,
2536 tree type,
2537 vec<vn_reference_op_s,
2538 va_heap> operands,
2539 tree value)
2541 vn_reference_s vr1;
2542 vn_reference_t result;
2543 unsigned value_id;
2544 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2545 vr1.operands = operands;
2546 vr1.type = type;
2547 vr1.set = set;
2548 vr1.base_set = base_set;
2549 vr1.hashcode = vn_reference_compute_hash (&vr1);
2550 if (vn_reference_lookup_1 (&vr1, &result))
2551 return result;
2552 if (TREE_CODE (value) == SSA_NAME)
2553 value_id = VN_INFO (value)->value_id;
2554 else
2555 value_id = get_or_alloc_constant_value_id (value);
2556 return vn_reference_insert_pieces (vuse, set, base_set, type,
2557 operands.copy (), value, value_id);
2560 /* Return a value-number for RCODE OPS... either by looking up an existing
2561 value-number for the possibly simplified result or by inserting the
2562 operation if INSERT is true. If SIMPLIFY is false, return a value
2563 number for the unsimplified expression. */
2565 static tree
2566 vn_nary_build_or_lookup_1 (gimple_match_op *res_op, bool insert,
2567 bool simplify)
2569 tree result = NULL_TREE;
2570 /* We will be creating a value number for
2571 RCODE (OPS...).
2572 So first simplify and lookup this expression to see if it
2573 is already available. */
2574 /* For simplification valueize. */
2575 unsigned i = 0;
2576 if (simplify)
2577 for (i = 0; i < res_op->num_ops; ++i)
2578 if (TREE_CODE (res_op->ops[i]) == SSA_NAME)
2580 tree tem = vn_valueize (res_op->ops[i]);
2581 if (!tem)
2582 break;
2583 res_op->ops[i] = tem;
2585 /* If valueization of an operand fails (it is not available), skip
2586 simplification. */
2587 bool res = false;
2588 if (i == res_op->num_ops)
2590 mprts_hook = vn_lookup_simplify_result;
2591 res = res_op->resimplify (NULL, vn_valueize);
2592 mprts_hook = NULL;
2594 gimple *new_stmt = NULL;
2595 if (res
2596 && gimple_simplified_result_is_gimple_val (res_op))
2598 /* The expression is already available. */
2599 result = res_op->ops[0];
2600 /* Valueize it, simplification returns sth in AVAIL only. */
2601 if (TREE_CODE (result) == SSA_NAME)
2602 result = SSA_VAL (result);
2604 else
2606 tree val = vn_lookup_simplify_result (res_op);
2607 if (!val && insert)
2609 gimple_seq stmts = NULL;
2610 result = maybe_push_res_to_seq (res_op, &stmts);
2611 if (result)
2613 gcc_assert (gimple_seq_singleton_p (stmts));
2614 new_stmt = gimple_seq_first_stmt (stmts);
2617 else
2618 /* The expression is already available. */
2619 result = val;
2621 if (new_stmt)
2623 /* The expression is not yet available, value-number lhs to
2624 the new SSA_NAME we created. */
2625 /* Initialize value-number information properly. */
2626 vn_ssa_aux_t result_info = VN_INFO (result);
2627 result_info->valnum = result;
2628 result_info->value_id = get_next_value_id ();
2629 result_info->visited = 1;
2630 gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
2631 new_stmt);
2632 result_info->needs_insertion = true;
2633 /* ??? PRE phi-translation inserts NARYs without corresponding
2634 SSA name result. Re-use those but set their result according
2635 to the stmt we just built. */
2636 vn_nary_op_t nary = NULL;
2637 vn_nary_op_lookup_stmt (new_stmt, &nary);
2638 if (nary)
2640 gcc_assert (! nary->predicated_values && nary->u.result == NULL_TREE);
2641 nary->u.result = gimple_assign_lhs (new_stmt);
2643 /* As all "inserted" statements are singleton SCCs, insert
2644 to the valid table. This is strictly needed to
2645 avoid re-generating new value SSA_NAMEs for the same
2646 expression during SCC iteration over and over (the
2647 optimistic table gets cleared after each iteration).
2648 We do not need to insert into the optimistic table, as
2649 lookups there will fall back to the valid table. */
2650 else
2652 unsigned int length = vn_nary_length_from_stmt (new_stmt);
2653 vn_nary_op_t vno1
2654 = alloc_vn_nary_op_noinit (length, &vn_tables_insert_obstack);
2655 vno1->value_id = result_info->value_id;
2656 vno1->length = length;
2657 vno1->predicated_values = 0;
2658 vno1->u.result = result;
2659 init_vn_nary_op_from_stmt (vno1, as_a <gassign *> (new_stmt));
2660 vn_nary_op_insert_into (vno1, valid_info->nary);
2661 /* Also do not link it into the undo chain. */
2662 last_inserted_nary = vno1->next;
2663 vno1->next = (vn_nary_op_t)(void *)-1;
2665 if (dump_file && (dump_flags & TDF_DETAILS))
2667 fprintf (dump_file, "Inserting name ");
2668 print_generic_expr (dump_file, result);
2669 fprintf (dump_file, " for expression ");
2670 print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
2671 fprintf (dump_file, "\n");
2674 return result;
2677 /* Return a value-number for RCODE OPS... either by looking up an existing
2678 value-number for the simplified result or by inserting the operation. */
2680 static tree
2681 vn_nary_build_or_lookup (gimple_match_op *res_op)
2683 return vn_nary_build_or_lookup_1 (res_op, true, true);
2686 /* Try to simplify the expression RCODE OPS... of type TYPE and return
2687 its value if present. */
2689 tree
2690 vn_nary_simplify (vn_nary_op_t nary)
2692 if (nary->length > gimple_match_op::MAX_NUM_OPS)
2693 return NULL_TREE;
2694 gimple_match_op op (gimple_match_cond::UNCOND, nary->opcode,
2695 nary->type, nary->length);
2696 memcpy (op.ops, nary->op, sizeof (tree) * nary->length);
2697 return vn_nary_build_or_lookup_1 (&op, false, true);
2700 /* Elimination engine. */
2702 class eliminate_dom_walker : public dom_walker
2704 public:
2705 eliminate_dom_walker (cdi_direction, bitmap);
2706 ~eliminate_dom_walker ();
2708 edge before_dom_children (basic_block) final override;
2709 void after_dom_children (basic_block) final override;
2711 virtual tree eliminate_avail (basic_block, tree op);
2712 virtual void eliminate_push_avail (basic_block, tree op);
2713 tree eliminate_insert (basic_block, gimple_stmt_iterator *gsi, tree val);
2715 void eliminate_stmt (basic_block, gimple_stmt_iterator *);
2717 unsigned eliminate_cleanup (bool region_p = false);
2719 bool do_pre;
2720 unsigned int el_todo;
2721 unsigned int eliminations;
2722 unsigned int insertions;
2724 /* SSA names that had their defs inserted by PRE if do_pre. */
2725 bitmap inserted_exprs;
2727 /* Blocks with statements that have had their EH properties changed. */
2728 bitmap need_eh_cleanup;
2730 /* Blocks with statements that have had their AB properties changed. */
2731 bitmap need_ab_cleanup;
2733 /* Local state for the eliminate domwalk. */
2734 auto_vec<gimple *> to_remove;
2735 auto_vec<gimple *> to_fixup;
2736 auto_vec<tree> avail;
2737 auto_vec<tree> avail_stack;
2740 /* Adaptor to the elimination engine using RPO availability. */
2742 class rpo_elim : public eliminate_dom_walker
2744 public:
2745 rpo_elim(basic_block entry_)
2746 : eliminate_dom_walker (CDI_DOMINATORS, NULL), entry (entry_),
2747 m_avail_freelist (NULL) {}
2749 tree eliminate_avail (basic_block, tree op) final override;
2751 void eliminate_push_avail (basic_block, tree) final override;
2753 basic_block entry;
2754 /* Freelist of avail entries which are allocated from the vn_ssa_aux
2755 obstack. */
2756 vn_avail *m_avail_freelist;
2759 /* Global RPO state for access from hooks. */
2760 static eliminate_dom_walker *rpo_avail;
2761 basic_block vn_context_bb;
2763 /* Return true if BASE1 and BASE2 can be adjusted so they have the
2764 same address and adjust *OFFSET1 and *OFFSET2 accordingly.
2765 Otherwise return false. */
2767 static bool
2768 adjust_offsets_for_equal_base_address (tree base1, poly_int64 *offset1,
2769 tree base2, poly_int64 *offset2)
2771 poly_int64 soff;
2772 if (TREE_CODE (base1) == MEM_REF
2773 && TREE_CODE (base2) == MEM_REF)
2775 if (mem_ref_offset (base1).to_shwi (&soff))
2777 base1 = TREE_OPERAND (base1, 0);
2778 *offset1 += soff * BITS_PER_UNIT;
2780 if (mem_ref_offset (base2).to_shwi (&soff))
2782 base2 = TREE_OPERAND (base2, 0);
2783 *offset2 += soff * BITS_PER_UNIT;
2785 return operand_equal_p (base1, base2, 0);
2787 return operand_equal_p (base1, base2, OEP_ADDRESS_OF);
2790 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
2791 from the statement defining VUSE and if not successful tries to
2792 translate *REFP and VR_ through an aggregate copy at the definition
2793 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
2794 of *REF and *VR. If only disambiguation was performed then
2795 *DISAMBIGUATE_ONLY is set to true. */
2797 static void *
2798 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
2799 translate_flags *disambiguate_only)
2801 vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
2802 vn_reference_t vr = data->vr;
2803 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
2804 tree base = ao_ref_base (ref);
2805 HOST_WIDE_INT offseti = 0, maxsizei, sizei = 0;
2806 static vec<vn_reference_op_s> lhs_ops;
2807 ao_ref lhs_ref;
2808 bool lhs_ref_ok = false;
2809 poly_int64 copy_size;
2811 /* First try to disambiguate after value-replacing in the definitions LHS. */
2812 if (is_gimple_assign (def_stmt))
2814 tree lhs = gimple_assign_lhs (def_stmt);
2815 bool valueized_anything = false;
2816 /* Avoid re-allocation overhead. */
2817 lhs_ops.truncate (0);
2818 basic_block saved_rpo_bb = vn_context_bb;
2819 vn_context_bb = gimple_bb (def_stmt);
2820 if (*disambiguate_only <= TR_VALUEIZE_AND_DISAMBIGUATE)
2822 copy_reference_ops_from_ref (lhs, &lhs_ops);
2823 valueize_refs_1 (&lhs_ops, &valueized_anything, true);
2825 vn_context_bb = saved_rpo_bb;
2826 ao_ref_init (&lhs_ref, lhs);
2827 lhs_ref_ok = true;
2828 if (valueized_anything
2829 && ao_ref_init_from_vn_reference
2830 (&lhs_ref, ao_ref_alias_set (&lhs_ref),
2831 ao_ref_base_alias_set (&lhs_ref), TREE_TYPE (lhs), lhs_ops)
2832 && !refs_may_alias_p_1 (ref, &lhs_ref, data->tbaa_p))
2834 *disambiguate_only = TR_VALUEIZE_AND_DISAMBIGUATE;
2835 return NULL;
2838 /* When the def is a CLOBBER we can optimistically disambiguate
2839 against it since any overlap it would be undefined behavior.
2840 Avoid this for obvious must aliases to save compile-time though.
2841 We also may not do this when the query is used for redundant
2842 store removal. */
2843 if (!data->redundant_store_removal_p
2844 && gimple_clobber_p (def_stmt)
2845 && !operand_equal_p (ao_ref_base (&lhs_ref), base, OEP_ADDRESS_OF))
2847 *disambiguate_only = TR_DISAMBIGUATE;
2848 return NULL;
2851 /* Besides valueizing the LHS we can also use access-path based
2852 disambiguation on the original non-valueized ref. */
2853 if (!ref->ref
2854 && lhs_ref_ok
2855 && data->orig_ref.ref)
2857 /* We want to use the non-valueized LHS for this, but avoid redundant
2858 work. */
2859 ao_ref *lref = &lhs_ref;
2860 ao_ref lref_alt;
2861 if (valueized_anything)
2863 ao_ref_init (&lref_alt, lhs);
2864 lref = &lref_alt;
2866 if (!refs_may_alias_p_1 (&data->orig_ref, lref, data->tbaa_p))
2868 *disambiguate_only = (valueized_anything
2869 ? TR_VALUEIZE_AND_DISAMBIGUATE
2870 : TR_DISAMBIGUATE);
2871 return NULL;
2875 /* If we reach a clobbering statement try to skip it and see if
2876 we find a VN result with exactly the same value as the
2877 possible clobber. In this case we can ignore the clobber
2878 and return the found value. */
2879 if (is_gimple_reg_type (TREE_TYPE (lhs))
2880 && types_compatible_p (TREE_TYPE (lhs), vr->type)
2881 && (ref->ref || data->orig_ref.ref)
2882 && !data->mask
2883 && data->partial_defs.is_empty ()
2884 && multiple_p (get_object_alignment
2885 (ref->ref ? ref->ref : data->orig_ref.ref),
2886 ref->size)
2887 && multiple_p (get_object_alignment (lhs), ref->size))
2889 tree rhs = gimple_assign_rhs1 (def_stmt);
2890 /* ??? We may not compare to ahead values which might be from
2891 a different loop iteration but only to loop invariants. Use
2892 CONSTANT_CLASS_P (unvalueized!) as conservative approximation.
2893 The one-hop lookup below doesn't have this issue since there's
2894 a virtual PHI before we ever reach a backedge to cross.
2895 We can skip multiple defs as long as they are from the same
2896 value though. */
2897 if (data->same_val
2898 && !operand_equal_p (data->same_val, rhs))
2900 else if (CONSTANT_CLASS_P (rhs))
2902 if (dump_file && (dump_flags & TDF_DETAILS))
2904 fprintf (dump_file,
2905 "Skipping possible redundant definition ");
2906 print_gimple_stmt (dump_file, def_stmt, 0);
2908 /* Delay the actual compare of the values to the end of the walk
2909 but do not update last_vuse from here. */
2910 data->last_vuse_ptr = NULL;
2911 data->same_val = rhs;
2912 return NULL;
2914 else
2916 tree saved_vuse = vr->vuse;
2917 hashval_t saved_hashcode = vr->hashcode;
2918 if (vr->vuse)
2919 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
2920 vr->vuse = vuse_ssa_val (gimple_vuse (def_stmt));
2921 if (vr->vuse)
2922 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
2923 vn_reference_t vnresult = NULL;
2924 /* Do not use vn_reference_lookup_2 since that might perform
2925 expression hashtable insertion but this lookup crosses
2926 a possible may-alias making such insertion conditionally
2927 invalid. */
2928 vn_reference_lookup_1 (vr, &vnresult);
2929 /* Need to restore vr->vuse and vr->hashcode. */
2930 vr->vuse = saved_vuse;
2931 vr->hashcode = saved_hashcode;
2932 if (vnresult)
2934 if (TREE_CODE (rhs) == SSA_NAME)
2935 rhs = SSA_VAL (rhs);
2936 if (vnresult->result
2937 && operand_equal_p (vnresult->result, rhs, 0))
2938 return vnresult;
2943 else if (*disambiguate_only <= TR_VALUEIZE_AND_DISAMBIGUATE
2944 && gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
2945 && gimple_call_num_args (def_stmt) <= 4)
2947 /* For builtin calls valueize its arguments and call the
2948 alias oracle again. Valueization may improve points-to
2949 info of pointers and constify size and position arguments.
2950 Originally this was motivated by PR61034 which has
2951 conditional calls to free falsely clobbering ref because
2952 of imprecise points-to info of the argument. */
2953 tree oldargs[4];
2954 bool valueized_anything = false;
2955 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
2957 oldargs[i] = gimple_call_arg (def_stmt, i);
2958 tree val = vn_valueize (oldargs[i]);
2959 if (val != oldargs[i])
2961 gimple_call_set_arg (def_stmt, i, val);
2962 valueized_anything = true;
2965 if (valueized_anything)
2967 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
2968 ref, data->tbaa_p);
2969 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
2970 gimple_call_set_arg (def_stmt, i, oldargs[i]);
2971 if (!res)
2973 *disambiguate_only = TR_VALUEIZE_AND_DISAMBIGUATE;
2974 return NULL;
2979 if (*disambiguate_only > TR_TRANSLATE)
2980 return (void *)-1;
2982 /* If we cannot constrain the size of the reference we cannot
2983 test if anything kills it. */
2984 if (!ref->max_size_known_p ())
2985 return (void *)-1;
2987 poly_int64 offset = ref->offset;
2988 poly_int64 maxsize = ref->max_size;
2990 /* def_stmt may-defs *ref. See if we can derive a value for *ref
2991 from that definition.
2992 1) Memset. */
2993 if (is_gimple_reg_type (vr->type)
2994 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
2995 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET_CHK))
2996 && (integer_zerop (gimple_call_arg (def_stmt, 1))
2997 || ((TREE_CODE (gimple_call_arg (def_stmt, 1)) == INTEGER_CST
2998 || (INTEGRAL_TYPE_P (vr->type) && known_eq (ref->size, 8)))
2999 && CHAR_BIT == 8
3000 && BITS_PER_UNIT == 8
3001 && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
3002 && offset.is_constant (&offseti)
3003 && ref->size.is_constant (&sizei)
3004 && (offseti % BITS_PER_UNIT == 0
3005 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == INTEGER_CST)))
3006 && (poly_int_tree_p (gimple_call_arg (def_stmt, 2))
3007 || (TREE_CODE (gimple_call_arg (def_stmt, 2)) == SSA_NAME
3008 && poly_int_tree_p (SSA_VAL (gimple_call_arg (def_stmt, 2)))))
3009 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
3010 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME))
3012 tree base2;
3013 poly_int64 offset2, size2, maxsize2;
3014 bool reverse;
3015 tree ref2 = gimple_call_arg (def_stmt, 0);
3016 if (TREE_CODE (ref2) == SSA_NAME)
3018 ref2 = SSA_VAL (ref2);
3019 if (TREE_CODE (ref2) == SSA_NAME
3020 && (TREE_CODE (base) != MEM_REF
3021 || TREE_OPERAND (base, 0) != ref2))
3023 gimple *def_stmt = SSA_NAME_DEF_STMT (ref2);
3024 if (gimple_assign_single_p (def_stmt)
3025 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
3026 ref2 = gimple_assign_rhs1 (def_stmt);
3029 if (TREE_CODE (ref2) == ADDR_EXPR)
3031 ref2 = TREE_OPERAND (ref2, 0);
3032 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
3033 &reverse);
3034 if (!known_size_p (maxsize2)
3035 || !known_eq (maxsize2, size2)
3036 || !operand_equal_p (base, base2, OEP_ADDRESS_OF))
3037 return (void *)-1;
3039 else if (TREE_CODE (ref2) == SSA_NAME)
3041 poly_int64 soff;
3042 if (TREE_CODE (base) != MEM_REF
3043 || !(mem_ref_offset (base)
3044 << LOG2_BITS_PER_UNIT).to_shwi (&soff))
3045 return (void *)-1;
3046 offset += soff;
3047 offset2 = 0;
3048 if (TREE_OPERAND (base, 0) != ref2)
3050 gimple *def = SSA_NAME_DEF_STMT (ref2);
3051 if (is_gimple_assign (def)
3052 && gimple_assign_rhs_code (def) == POINTER_PLUS_EXPR
3053 && gimple_assign_rhs1 (def) == TREE_OPERAND (base, 0)
3054 && poly_int_tree_p (gimple_assign_rhs2 (def)))
3056 tree rhs2 = gimple_assign_rhs2 (def);
3057 if (!(poly_offset_int::from (wi::to_poly_wide (rhs2),
3058 SIGNED)
3059 << LOG2_BITS_PER_UNIT).to_shwi (&offset2))
3060 return (void *)-1;
3061 ref2 = gimple_assign_rhs1 (def);
3062 if (TREE_CODE (ref2) == SSA_NAME)
3063 ref2 = SSA_VAL (ref2);
3065 else
3066 return (void *)-1;
3069 else
3070 return (void *)-1;
3071 tree len = gimple_call_arg (def_stmt, 2);
3072 HOST_WIDE_INT leni, offset2i;
3073 if (TREE_CODE (len) == SSA_NAME)
3074 len = SSA_VAL (len);
3075 /* Sometimes the above trickery is smarter than alias analysis. Take
3076 advantage of that. */
3077 if (!ranges_maybe_overlap_p (offset, maxsize, offset2,
3078 (wi::to_poly_offset (len)
3079 << LOG2_BITS_PER_UNIT)))
3080 return NULL;
3081 if (data->partial_defs.is_empty ()
3082 && known_subrange_p (offset, maxsize, offset2,
3083 wi::to_poly_offset (len) << LOG2_BITS_PER_UNIT))
3085 tree val;
3086 if (integer_zerop (gimple_call_arg (def_stmt, 1)))
3087 val = build_zero_cst (vr->type);
3088 else if (INTEGRAL_TYPE_P (vr->type)
3089 && known_eq (ref->size, 8)
3090 && offseti % BITS_PER_UNIT == 0)
3092 gimple_match_op res_op (gimple_match_cond::UNCOND, NOP_EXPR,
3093 vr->type, gimple_call_arg (def_stmt, 1));
3094 val = vn_nary_build_or_lookup (&res_op);
3095 if (!val
3096 || (TREE_CODE (val) == SSA_NAME
3097 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
3098 return (void *)-1;
3100 else
3102 unsigned buflen
3103 = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (vr->type)) + 1;
3104 if (INTEGRAL_TYPE_P (vr->type)
3105 && TYPE_MODE (vr->type) != BLKmode)
3106 buflen = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (vr->type)) + 1;
3107 unsigned char *buf = XALLOCAVEC (unsigned char, buflen);
3108 memset (buf, TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 1)),
3109 buflen);
3110 if (BYTES_BIG_ENDIAN)
3112 unsigned int amnt
3113 = (((unsigned HOST_WIDE_INT) offseti + sizei)
3114 % BITS_PER_UNIT);
3115 if (amnt)
3117 shift_bytes_in_array_right (buf, buflen,
3118 BITS_PER_UNIT - amnt);
3119 buf++;
3120 buflen--;
3123 else if (offseti % BITS_PER_UNIT != 0)
3125 unsigned int amnt
3126 = BITS_PER_UNIT - ((unsigned HOST_WIDE_INT) offseti
3127 % BITS_PER_UNIT);
3128 shift_bytes_in_array_left (buf, buflen, amnt);
3129 buf++;
3130 buflen--;
3132 val = native_interpret_expr (vr->type, buf, buflen);
3133 if (!val)
3134 return (void *)-1;
3136 return data->finish (0, 0, val);
3138 /* For now handle clearing memory with partial defs. */
3139 else if (known_eq (ref->size, maxsize)
3140 && integer_zerop (gimple_call_arg (def_stmt, 1))
3141 && tree_fits_poly_int64_p (len)
3142 && tree_to_poly_int64 (len).is_constant (&leni)
3143 && leni <= INTTYPE_MAXIMUM (HOST_WIDE_INT) / BITS_PER_UNIT
3144 && offset.is_constant (&offseti)
3145 && offset2.is_constant (&offset2i)
3146 && maxsize.is_constant (&maxsizei)
3147 && ranges_known_overlap_p (offseti, maxsizei, offset2i,
3148 leni << LOG2_BITS_PER_UNIT))
3150 pd_data pd;
3151 pd.rhs = build_constructor (NULL_TREE, NULL);
3152 pd.rhs_off = 0;
3153 pd.offset = offset2i;
3154 pd.size = leni << LOG2_BITS_PER_UNIT;
3155 return data->push_partial_def (pd, 0, 0, offseti, maxsizei);
3159 /* 2) Assignment from an empty CONSTRUCTOR. */
3160 else if (is_gimple_reg_type (vr->type)
3161 && gimple_assign_single_p (def_stmt)
3162 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
3163 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
3165 tree base2;
3166 poly_int64 offset2, size2, maxsize2;
3167 HOST_WIDE_INT offset2i, size2i;
3168 gcc_assert (lhs_ref_ok);
3169 base2 = ao_ref_base (&lhs_ref);
3170 offset2 = lhs_ref.offset;
3171 size2 = lhs_ref.size;
3172 maxsize2 = lhs_ref.max_size;
3173 if (known_size_p (maxsize2)
3174 && known_eq (maxsize2, size2)
3175 && adjust_offsets_for_equal_base_address (base, &offset,
3176 base2, &offset2))
3178 if (data->partial_defs.is_empty ()
3179 && known_subrange_p (offset, maxsize, offset2, size2))
3181 /* While technically undefined behavior do not optimize
3182 a full read from a clobber. */
3183 if (gimple_clobber_p (def_stmt))
3184 return (void *)-1;
3185 tree val = build_zero_cst (vr->type);
3186 return data->finish (ao_ref_alias_set (&lhs_ref),
3187 ao_ref_base_alias_set (&lhs_ref), val);
3189 else if (known_eq (ref->size, maxsize)
3190 && maxsize.is_constant (&maxsizei)
3191 && offset.is_constant (&offseti)
3192 && offset2.is_constant (&offset2i)
3193 && size2.is_constant (&size2i)
3194 && ranges_known_overlap_p (offseti, maxsizei,
3195 offset2i, size2i))
3197 /* Let clobbers be consumed by the partial-def tracker
3198 which can choose to ignore them if they are shadowed
3199 by a later def. */
3200 pd_data pd;
3201 pd.rhs = gimple_assign_rhs1 (def_stmt);
3202 pd.rhs_off = 0;
3203 pd.offset = offset2i;
3204 pd.size = size2i;
3205 return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
3206 ao_ref_base_alias_set (&lhs_ref),
3207 offseti, maxsizei);
3212 /* 3) Assignment from a constant. We can use folds native encode/interpret
3213 routines to extract the assigned bits. */
3214 else if (known_eq (ref->size, maxsize)
3215 && is_gimple_reg_type (vr->type)
3216 && !reverse_storage_order_for_component_p (vr->operands)
3217 && !contains_storage_order_barrier_p (vr->operands)
3218 && gimple_assign_single_p (def_stmt)
3219 && CHAR_BIT == 8
3220 && BITS_PER_UNIT == 8
3221 && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
3222 /* native_encode and native_decode operate on arrays of bytes
3223 and so fundamentally need a compile-time size and offset. */
3224 && maxsize.is_constant (&maxsizei)
3225 && offset.is_constant (&offseti)
3226 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
3227 || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
3228 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
3230 tree lhs = gimple_assign_lhs (def_stmt);
3231 tree base2;
3232 poly_int64 offset2, size2, maxsize2;
3233 HOST_WIDE_INT offset2i, size2i;
3234 bool reverse;
3235 gcc_assert (lhs_ref_ok);
3236 base2 = ao_ref_base (&lhs_ref);
3237 offset2 = lhs_ref.offset;
3238 size2 = lhs_ref.size;
3239 maxsize2 = lhs_ref.max_size;
3240 reverse = reverse_storage_order_for_component_p (lhs);
3241 if (base2
3242 && !reverse
3243 && !storage_order_barrier_p (lhs)
3244 && known_eq (maxsize2, size2)
3245 && adjust_offsets_for_equal_base_address (base, &offset,
3246 base2, &offset2)
3247 && offset.is_constant (&offseti)
3248 && offset2.is_constant (&offset2i)
3249 && size2.is_constant (&size2i))
3251 if (data->partial_defs.is_empty ()
3252 && known_subrange_p (offseti, maxsizei, offset2, size2))
3254 /* We support up to 512-bit values (for V8DFmode). */
3255 unsigned char buffer[65];
3256 int len;
3258 tree rhs = gimple_assign_rhs1 (def_stmt);
3259 if (TREE_CODE (rhs) == SSA_NAME)
3260 rhs = SSA_VAL (rhs);
3261 len = native_encode_expr (rhs,
3262 buffer, sizeof (buffer) - 1,
3263 (offseti - offset2i) / BITS_PER_UNIT);
3264 if (len > 0 && len * BITS_PER_UNIT >= maxsizei)
3266 tree type = vr->type;
3267 unsigned char *buf = buffer;
3268 unsigned int amnt = 0;
3269 /* Make sure to interpret in a type that has a range
3270 covering the whole access size. */
3271 if (INTEGRAL_TYPE_P (vr->type)
3272 && maxsizei != TYPE_PRECISION (vr->type))
3273 type = build_nonstandard_integer_type (maxsizei,
3274 TYPE_UNSIGNED (type));
3275 if (BYTES_BIG_ENDIAN)
3277 /* For big-endian native_encode_expr stored the rhs
3278 such that the LSB of it is the LSB of buffer[len - 1].
3279 That bit is stored into memory at position
3280 offset2 + size2 - 1, i.e. in byte
3281 base + (offset2 + size2 - 1) / BITS_PER_UNIT.
3282 E.g. for offset2 1 and size2 14, rhs -1 and memory
3283 previously cleared that is:
3285 01111111|11111110
3286 Now, if we want to extract offset 2 and size 12 from
3287 it using native_interpret_expr (which actually works
3288 for integral bitfield types in terms of byte size of
3289 the mode), the native_encode_expr stored the value
3290 into buffer as
3291 XX111111|11111111
3292 and returned len 2 (the X bits are outside of
3293 precision).
3294 Let sz be maxsize / BITS_PER_UNIT if not extracting
3295 a bitfield, and GET_MODE_SIZE otherwise.
3296 We need to align the LSB of the value we want to
3297 extract as the LSB of buf[sz - 1].
3298 The LSB from memory we need to read is at position
3299 offset + maxsize - 1. */
3300 HOST_WIDE_INT sz = maxsizei / BITS_PER_UNIT;
3301 if (INTEGRAL_TYPE_P (type))
3303 if (TYPE_MODE (type) != BLKmode)
3304 sz = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
3305 else
3306 sz = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (type));
3308 amnt = ((unsigned HOST_WIDE_INT) offset2i + size2i
3309 - offseti - maxsizei) % BITS_PER_UNIT;
3310 if (amnt)
3311 shift_bytes_in_array_right (buffer, len, amnt);
3312 amnt = ((unsigned HOST_WIDE_INT) offset2i + size2i
3313 - offseti - maxsizei - amnt) / BITS_PER_UNIT;
3314 if ((unsigned HOST_WIDE_INT) sz + amnt > (unsigned) len)
3315 len = 0;
3316 else
3318 buf = buffer + len - sz - amnt;
3319 len -= (buf - buffer);
3322 else
3324 amnt = ((unsigned HOST_WIDE_INT) offset2i
3325 - offseti) % BITS_PER_UNIT;
3326 if (amnt)
3328 buffer[len] = 0;
3329 shift_bytes_in_array_left (buffer, len + 1, amnt);
3330 buf = buffer + 1;
3333 tree val = native_interpret_expr (type, buf, len);
3334 /* If we chop off bits because the types precision doesn't
3335 match the memory access size this is ok when optimizing
3336 reads but not when called from the DSE code during
3337 elimination. */
3338 if (val
3339 && type != vr->type)
3341 if (! int_fits_type_p (val, vr->type))
3342 val = NULL_TREE;
3343 else
3344 val = fold_convert (vr->type, val);
3347 if (val)
3348 return data->finish (ao_ref_alias_set (&lhs_ref),
3349 ao_ref_base_alias_set (&lhs_ref), val);
3352 else if (ranges_known_overlap_p (offseti, maxsizei, offset2i,
3353 size2i))
3355 pd_data pd;
3356 tree rhs = gimple_assign_rhs1 (def_stmt);
3357 if (TREE_CODE (rhs) == SSA_NAME)
3358 rhs = SSA_VAL (rhs);
3359 pd.rhs = rhs;
3360 pd.rhs_off = 0;
3361 pd.offset = offset2i;
3362 pd.size = size2i;
3363 return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
3364 ao_ref_base_alias_set (&lhs_ref),
3365 offseti, maxsizei);
3370 /* 4) Assignment from an SSA name which definition we may be able
3371 to access pieces from or we can combine to a larger entity. */
3372 else if (known_eq (ref->size, maxsize)
3373 && is_gimple_reg_type (vr->type)
3374 && !reverse_storage_order_for_component_p (vr->operands)
3375 && !contains_storage_order_barrier_p (vr->operands)
3376 && gimple_assign_single_p (def_stmt)
3377 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
3379 tree lhs = gimple_assign_lhs (def_stmt);
3380 tree base2;
3381 poly_int64 offset2, size2, maxsize2;
3382 HOST_WIDE_INT offset2i, size2i, offseti;
3383 bool reverse;
3384 gcc_assert (lhs_ref_ok);
3385 base2 = ao_ref_base (&lhs_ref);
3386 offset2 = lhs_ref.offset;
3387 size2 = lhs_ref.size;
3388 maxsize2 = lhs_ref.max_size;
3389 reverse = reverse_storage_order_for_component_p (lhs);
3390 tree def_rhs = gimple_assign_rhs1 (def_stmt);
3391 if (!reverse
3392 && !storage_order_barrier_p (lhs)
3393 && known_size_p (maxsize2)
3394 && known_eq (maxsize2, size2)
3395 && adjust_offsets_for_equal_base_address (base, &offset,
3396 base2, &offset2))
3398 if (data->partial_defs.is_empty ()
3399 && known_subrange_p (offset, maxsize, offset2, size2)
3400 /* ??? We can't handle bitfield precision extracts without
3401 either using an alternate type for the BIT_FIELD_REF and
3402 then doing a conversion or possibly adjusting the offset
3403 according to endianness. */
3404 && (! INTEGRAL_TYPE_P (vr->type)
3405 || known_eq (ref->size, TYPE_PRECISION (vr->type)))
3406 && multiple_p (ref->size, BITS_PER_UNIT))
3408 tree val = NULL_TREE;
3409 if (! INTEGRAL_TYPE_P (TREE_TYPE (def_rhs))
3410 || type_has_mode_precision_p (TREE_TYPE (def_rhs)))
3412 gimple_match_op op (gimple_match_cond::UNCOND,
3413 BIT_FIELD_REF, vr->type,
3414 SSA_VAL (def_rhs),
3415 bitsize_int (ref->size),
3416 bitsize_int (offset - offset2));
3417 val = vn_nary_build_or_lookup (&op);
3419 else if (known_eq (ref->size, size2))
3421 gimple_match_op op (gimple_match_cond::UNCOND,
3422 VIEW_CONVERT_EXPR, vr->type,
3423 SSA_VAL (def_rhs));
3424 val = vn_nary_build_or_lookup (&op);
3426 if (val
3427 && (TREE_CODE (val) != SSA_NAME
3428 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
3429 return data->finish (ao_ref_alias_set (&lhs_ref),
3430 ao_ref_base_alias_set (&lhs_ref), val);
3432 else if (maxsize.is_constant (&maxsizei)
3433 && offset.is_constant (&offseti)
3434 && offset2.is_constant (&offset2i)
3435 && size2.is_constant (&size2i)
3436 && ranges_known_overlap_p (offset, maxsize, offset2, size2))
3438 pd_data pd;
3439 pd.rhs = SSA_VAL (def_rhs);
3440 pd.rhs_off = 0;
3441 pd.offset = offset2i;
3442 pd.size = size2i;
3443 return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
3444 ao_ref_base_alias_set (&lhs_ref),
3445 offseti, maxsizei);
3450 /* 4b) Assignment done via one of the vectorizer internal store
3451 functions where we may be able to access pieces from or we can
3452 combine to a larger entity. */
3453 else if (known_eq (ref->size, maxsize)
3454 && is_gimple_reg_type (vr->type)
3455 && !reverse_storage_order_for_component_p (vr->operands)
3456 && !contains_storage_order_barrier_p (vr->operands)
3457 && is_gimple_call (def_stmt)
3458 && gimple_call_internal_p (def_stmt)
3459 && internal_store_fn_p (gimple_call_internal_fn (def_stmt)))
3461 gcall *call = as_a <gcall *> (def_stmt);
3462 internal_fn fn = gimple_call_internal_fn (call);
3464 tree mask = NULL_TREE, len = NULL_TREE, bias = NULL_TREE;
3465 switch (fn)
3467 case IFN_MASK_STORE:
3468 mask = gimple_call_arg (call, internal_fn_mask_index (fn));
3469 mask = vn_valueize (mask);
3470 if (TREE_CODE (mask) != VECTOR_CST)
3471 return (void *)-1;
3472 break;
3473 case IFN_LEN_STORE:
3475 int len_index = internal_fn_len_index (fn);
3476 len = gimple_call_arg (call, len_index);
3477 bias = gimple_call_arg (call, len_index + 1);
3478 if (!tree_fits_uhwi_p (len) || !tree_fits_shwi_p (bias))
3479 return (void *) -1;
3480 break;
3482 default:
3483 return (void *)-1;
3485 tree def_rhs = gimple_call_arg (call,
3486 internal_fn_stored_value_index (fn));
3487 def_rhs = vn_valueize (def_rhs);
3488 if (TREE_CODE (def_rhs) != VECTOR_CST)
3489 return (void *)-1;
3491 ao_ref_init_from_ptr_and_size (&lhs_ref,
3492 vn_valueize (gimple_call_arg (call, 0)),
3493 TYPE_SIZE_UNIT (TREE_TYPE (def_rhs)));
3494 tree base2;
3495 poly_int64 offset2, size2, maxsize2;
3496 HOST_WIDE_INT offset2i, size2i, offseti;
3497 base2 = ao_ref_base (&lhs_ref);
3498 offset2 = lhs_ref.offset;
3499 size2 = lhs_ref.size;
3500 maxsize2 = lhs_ref.max_size;
3501 if (known_size_p (maxsize2)
3502 && known_eq (maxsize2, size2)
3503 && adjust_offsets_for_equal_base_address (base, &offset,
3504 base2, &offset2)
3505 && maxsize.is_constant (&maxsizei)
3506 && offset.is_constant (&offseti)
3507 && offset2.is_constant (&offset2i)
3508 && size2.is_constant (&size2i))
3510 if (!ranges_maybe_overlap_p (offset, maxsize, offset2, size2))
3511 /* Poor-mans disambiguation. */
3512 return NULL;
3513 else if (ranges_known_overlap_p (offset, maxsize, offset2, size2))
3515 pd_data pd;
3516 pd.rhs = def_rhs;
3517 tree aa = gimple_call_arg (call, 1);
3518 alias_set_type set = get_deref_alias_set (TREE_TYPE (aa));
3519 tree vectype = TREE_TYPE (def_rhs);
3520 unsigned HOST_WIDE_INT elsz
3521 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (vectype)));
3522 if (mask)
3524 HOST_WIDE_INT start = 0, length = 0;
3525 unsigned mask_idx = 0;
3528 if (integer_zerop (VECTOR_CST_ELT (mask, mask_idx)))
3530 if (length != 0)
3532 pd.rhs_off = start;
3533 pd.offset = offset2i + start;
3534 pd.size = length;
3535 if (ranges_known_overlap_p
3536 (offset, maxsize, pd.offset, pd.size))
3538 void *res = data->push_partial_def
3539 (pd, set, set, offseti, maxsizei);
3540 if (res != NULL)
3541 return res;
3544 start = (mask_idx + 1) * elsz;
3545 length = 0;
3547 else
3548 length += elsz;
3549 mask_idx++;
3551 while (known_lt (mask_idx, TYPE_VECTOR_SUBPARTS (vectype)));
3552 if (length != 0)
3554 pd.rhs_off = start;
3555 pd.offset = offset2i + start;
3556 pd.size = length;
3557 if (ranges_known_overlap_p (offset, maxsize,
3558 pd.offset, pd.size))
3559 return data->push_partial_def (pd, set, set,
3560 offseti, maxsizei);
3563 else if (fn == IFN_LEN_STORE)
3565 pd.offset = offset2i;
3566 pd.size = (tree_to_uhwi (len)
3567 + -tree_to_shwi (bias)) * BITS_PER_UNIT;
3568 if (BYTES_BIG_ENDIAN)
3569 pd.rhs_off = pd.size - tree_to_uhwi (TYPE_SIZE (vectype));
3570 else
3571 pd.rhs_off = 0;
3572 if (ranges_known_overlap_p (offset, maxsize,
3573 pd.offset, pd.size))
3574 return data->push_partial_def (pd, set, set,
3575 offseti, maxsizei);
3577 else
3578 gcc_unreachable ();
3579 return NULL;
3584 /* 5) For aggregate copies translate the reference through them if
3585 the copy kills ref. */
3586 else if (data->vn_walk_kind == VN_WALKREWRITE
3587 && gimple_assign_single_p (def_stmt)
3588 && (DECL_P (gimple_assign_rhs1 (def_stmt))
3589 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
3590 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
3592 tree base2;
3593 int i, j, k;
3594 auto_vec<vn_reference_op_s> rhs;
3595 vn_reference_op_t vro;
3596 ao_ref r;
3598 gcc_assert (lhs_ref_ok);
3600 /* See if the assignment kills REF. */
3601 base2 = ao_ref_base (&lhs_ref);
3602 if (!lhs_ref.max_size_known_p ()
3603 || (base != base2
3604 && (TREE_CODE (base) != MEM_REF
3605 || TREE_CODE (base2) != MEM_REF
3606 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
3607 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
3608 TREE_OPERAND (base2, 1))))
3609 || !stmt_kills_ref_p (def_stmt, ref))
3610 return (void *)-1;
3612 /* Find the common base of ref and the lhs. lhs_ops already
3613 contains valueized operands for the lhs. */
3614 i = vr->operands.length () - 1;
3615 j = lhs_ops.length () - 1;
3616 while (j >= 0 && i >= 0
3617 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
3619 i--;
3620 j--;
3623 /* ??? The innermost op should always be a MEM_REF and we already
3624 checked that the assignment to the lhs kills vr. Thus for
3625 aggregate copies using char[] types the vn_reference_op_eq
3626 may fail when comparing types for compatibility. But we really
3627 don't care here - further lookups with the rewritten operands
3628 will simply fail if we messed up types too badly. */
3629 poly_int64 extra_off = 0;
3630 if (j == 0 && i >= 0
3631 && lhs_ops[0].opcode == MEM_REF
3632 && maybe_ne (lhs_ops[0].off, -1))
3634 if (known_eq (lhs_ops[0].off, vr->operands[i].off))
3635 i--, j--;
3636 else if (vr->operands[i].opcode == MEM_REF
3637 && maybe_ne (vr->operands[i].off, -1))
3639 extra_off = vr->operands[i].off - lhs_ops[0].off;
3640 i--, j--;
3644 /* i now points to the first additional op.
3645 ??? LHS may not be completely contained in VR, one or more
3646 VIEW_CONVERT_EXPRs could be in its way. We could at least
3647 try handling outermost VIEW_CONVERT_EXPRs. */
3648 if (j != -1)
3649 return (void *)-1;
3651 /* Punt if the additional ops contain a storage order barrier. */
3652 for (k = i; k >= 0; k--)
3654 vro = &vr->operands[k];
3655 if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
3656 return (void *)-1;
3659 /* Now re-write REF to be based on the rhs of the assignment. */
3660 tree rhs1 = gimple_assign_rhs1 (def_stmt);
3661 copy_reference_ops_from_ref (rhs1, &rhs);
3663 /* Apply an extra offset to the inner MEM_REF of the RHS. */
3664 bool force_no_tbaa = false;
3665 if (maybe_ne (extra_off, 0))
3667 if (rhs.length () < 2)
3668 return (void *)-1;
3669 int ix = rhs.length () - 2;
3670 if (rhs[ix].opcode != MEM_REF
3671 || known_eq (rhs[ix].off, -1))
3672 return (void *)-1;
3673 rhs[ix].off += extra_off;
3674 rhs[ix].op0 = int_const_binop (PLUS_EXPR, rhs[ix].op0,
3675 build_int_cst (TREE_TYPE (rhs[ix].op0),
3676 extra_off));
3677 /* When we have offsetted the RHS, reading only parts of it,
3678 we can no longer use the original TBAA type, force alias-set
3679 zero. */
3680 force_no_tbaa = true;
3683 /* Save the operands since we need to use the original ones for
3684 the hash entry we use. */
3685 if (!data->saved_operands.exists ())
3686 data->saved_operands = vr->operands.copy ();
3688 /* We need to pre-pend vr->operands[0..i] to rhs. */
3689 vec<vn_reference_op_s> old = vr->operands;
3690 if (i + 1 + rhs.length () > vr->operands.length ())
3691 vr->operands.safe_grow (i + 1 + rhs.length (), true);
3692 else
3693 vr->operands.truncate (i + 1 + rhs.length ());
3694 FOR_EACH_VEC_ELT (rhs, j, vro)
3695 vr->operands[i + 1 + j] = *vro;
3696 valueize_refs (&vr->operands);
3697 if (old == shared_lookup_references)
3698 shared_lookup_references = vr->operands;
3699 vr->hashcode = vn_reference_compute_hash (vr);
3701 /* Try folding the new reference to a constant. */
3702 tree val = fully_constant_vn_reference_p (vr);
3703 if (val)
3705 if (data->partial_defs.is_empty ())
3706 return data->finish (ao_ref_alias_set (&lhs_ref),
3707 ao_ref_base_alias_set (&lhs_ref), val);
3708 /* This is the only interesting case for partial-def handling
3709 coming from targets that like to gimplify init-ctors as
3710 aggregate copies from constant data like aarch64 for
3711 PR83518. */
3712 if (maxsize.is_constant (&maxsizei) && known_eq (ref->size, maxsize))
3714 pd_data pd;
3715 pd.rhs = val;
3716 pd.rhs_off = 0;
3717 pd.offset = 0;
3718 pd.size = maxsizei;
3719 return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
3720 ao_ref_base_alias_set (&lhs_ref),
3721 0, maxsizei);
3725 /* Continuing with partial defs isn't easily possible here, we
3726 have to find a full def from further lookups from here. Probably
3727 not worth the special-casing everywhere. */
3728 if (!data->partial_defs.is_empty ())
3729 return (void *)-1;
3731 /* Adjust *ref from the new operands. */
3732 ao_ref rhs1_ref;
3733 ao_ref_init (&rhs1_ref, rhs1);
3734 if (!ao_ref_init_from_vn_reference (&r,
3735 force_no_tbaa ? 0
3736 : ao_ref_alias_set (&rhs1_ref),
3737 force_no_tbaa ? 0
3738 : ao_ref_base_alias_set (&rhs1_ref),
3739 vr->type, vr->operands))
3740 return (void *)-1;
3741 /* This can happen with bitfields. */
3742 if (maybe_ne (ref->size, r.size))
3744 /* If the access lacks some subsetting simply apply that by
3745 shortening it. That in the end can only be successful
3746 if we can pun the lookup result which in turn requires
3747 exact offsets. */
3748 if (known_eq (r.size, r.max_size)
3749 && known_lt (ref->size, r.size))
3750 r.size = r.max_size = ref->size;
3751 else
3752 return (void *)-1;
3754 *ref = r;
3756 /* Do not update last seen VUSE after translating. */
3757 data->last_vuse_ptr = NULL;
3758 /* Invalidate the original access path since it now contains
3759 the wrong base. */
3760 data->orig_ref.ref = NULL_TREE;
3761 /* Use the alias-set of this LHS for recording an eventual result. */
3762 if (data->first_set == -2)
3764 data->first_set = ao_ref_alias_set (&lhs_ref);
3765 data->first_base_set = ao_ref_base_alias_set (&lhs_ref);
3768 /* Keep looking for the adjusted *REF / VR pair. */
3769 return NULL;
3772 /* 6) For memcpy copies translate the reference through them if the copy
3773 kills ref. But we cannot (easily) do this translation if the memcpy is
3774 a storage order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that
3775 can modify the storage order of objects (see storage_order_barrier_p). */
3776 else if (data->vn_walk_kind == VN_WALKREWRITE
3777 && is_gimple_reg_type (vr->type)
3778 /* ??? Handle BCOPY as well. */
3779 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
3780 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY_CHK)
3781 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
3782 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY_CHK)
3783 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE)
3784 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE_CHK))
3785 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
3786 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
3787 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
3788 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
3789 && (poly_int_tree_p (gimple_call_arg (def_stmt, 2), &copy_size)
3790 || (TREE_CODE (gimple_call_arg (def_stmt, 2)) == SSA_NAME
3791 && poly_int_tree_p (SSA_VAL (gimple_call_arg (def_stmt, 2)),
3792 &copy_size)))
3793 /* Handling this is more complicated, give up for now. */
3794 && data->partial_defs.is_empty ())
3796 tree lhs, rhs;
3797 ao_ref r;
3798 poly_int64 rhs_offset, lhs_offset;
3799 vn_reference_op_s op;
3800 poly_uint64 mem_offset;
3801 poly_int64 at, byte_maxsize;
3803 /* Only handle non-variable, addressable refs. */
3804 if (maybe_ne (ref->size, maxsize)
3805 || !multiple_p (offset, BITS_PER_UNIT, &at)
3806 || !multiple_p (maxsize, BITS_PER_UNIT, &byte_maxsize))
3807 return (void *)-1;
3809 /* Extract a pointer base and an offset for the destination. */
3810 lhs = gimple_call_arg (def_stmt, 0);
3811 lhs_offset = 0;
3812 if (TREE_CODE (lhs) == SSA_NAME)
3814 lhs = vn_valueize (lhs);
3815 if (TREE_CODE (lhs) == SSA_NAME)
3817 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
3818 if (gimple_assign_single_p (def_stmt)
3819 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
3820 lhs = gimple_assign_rhs1 (def_stmt);
3823 if (TREE_CODE (lhs) == ADDR_EXPR)
3825 if (AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (lhs)))
3826 && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (TREE_TYPE (lhs))))
3827 return (void *)-1;
3828 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
3829 &lhs_offset);
3830 if (!tem)
3831 return (void *)-1;
3832 if (TREE_CODE (tem) == MEM_REF
3833 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
3835 lhs = TREE_OPERAND (tem, 0);
3836 if (TREE_CODE (lhs) == SSA_NAME)
3837 lhs = vn_valueize (lhs);
3838 lhs_offset += mem_offset;
3840 else if (DECL_P (tem))
3841 lhs = build_fold_addr_expr (tem);
3842 else
3843 return (void *)-1;
3845 if (TREE_CODE (lhs) != SSA_NAME
3846 && TREE_CODE (lhs) != ADDR_EXPR)
3847 return (void *)-1;
3849 /* Extract a pointer base and an offset for the source. */
3850 rhs = gimple_call_arg (def_stmt, 1);
3851 rhs_offset = 0;
3852 if (TREE_CODE (rhs) == SSA_NAME)
3853 rhs = vn_valueize (rhs);
3854 if (TREE_CODE (rhs) == ADDR_EXPR)
3856 if (AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (rhs)))
3857 && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (TREE_TYPE (rhs))))
3858 return (void *)-1;
3859 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
3860 &rhs_offset);
3861 if (!tem)
3862 return (void *)-1;
3863 if (TREE_CODE (tem) == MEM_REF
3864 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
3866 rhs = TREE_OPERAND (tem, 0);
3867 rhs_offset += mem_offset;
3869 else if (DECL_P (tem)
3870 || TREE_CODE (tem) == STRING_CST)
3871 rhs = build_fold_addr_expr (tem);
3872 else
3873 return (void *)-1;
3875 if (TREE_CODE (rhs) == SSA_NAME)
3876 rhs = SSA_VAL (rhs);
3877 else if (TREE_CODE (rhs) != ADDR_EXPR)
3878 return (void *)-1;
3880 /* The bases of the destination and the references have to agree. */
3881 if (TREE_CODE (base) == MEM_REF)
3883 if (TREE_OPERAND (base, 0) != lhs
3884 || !poly_int_tree_p (TREE_OPERAND (base, 1), &mem_offset))
3885 return (void *) -1;
3886 at += mem_offset;
3888 else if (!DECL_P (base)
3889 || TREE_CODE (lhs) != ADDR_EXPR
3890 || TREE_OPERAND (lhs, 0) != base)
3891 return (void *)-1;
3893 /* If the access is completely outside of the memcpy destination
3894 area there is no aliasing. */
3895 if (!ranges_maybe_overlap_p (lhs_offset, copy_size, at, byte_maxsize))
3896 return NULL;
3897 /* And the access has to be contained within the memcpy destination. */
3898 if (!known_subrange_p (at, byte_maxsize, lhs_offset, copy_size))
3899 return (void *)-1;
3901 /* Save the operands since we need to use the original ones for
3902 the hash entry we use. */
3903 if (!data->saved_operands.exists ())
3904 data->saved_operands = vr->operands.copy ();
3906 /* Make room for 2 operands in the new reference. */
3907 if (vr->operands.length () < 2)
3909 vec<vn_reference_op_s> old = vr->operands;
3910 vr->operands.safe_grow_cleared (2, true);
3911 if (old == shared_lookup_references)
3912 shared_lookup_references = vr->operands;
3914 else
3915 vr->operands.truncate (2);
3917 /* The looked-through reference is a simple MEM_REF. */
3918 memset (&op, 0, sizeof (op));
3919 op.type = vr->type;
3920 op.opcode = MEM_REF;
3921 op.op0 = build_int_cst (ptr_type_node, at - lhs_offset + rhs_offset);
3922 op.off = at - lhs_offset + rhs_offset;
3923 vr->operands[0] = op;
3924 op.type = TREE_TYPE (rhs);
3925 op.opcode = TREE_CODE (rhs);
3926 op.op0 = rhs;
3927 op.off = -1;
3928 vr->operands[1] = op;
3929 vr->hashcode = vn_reference_compute_hash (vr);
3931 /* Try folding the new reference to a constant. */
3932 tree val = fully_constant_vn_reference_p (vr);
3933 if (val)
3934 return data->finish (0, 0, val);
3936 /* Adjust *ref from the new operands. */
3937 if (!ao_ref_init_from_vn_reference (&r, 0, 0, vr->type, vr->operands))
3938 return (void *)-1;
3939 /* This can happen with bitfields. */
3940 if (maybe_ne (ref->size, r.size))
3941 return (void *)-1;
3942 *ref = r;
3944 /* Do not update last seen VUSE after translating. */
3945 data->last_vuse_ptr = NULL;
3946 /* Invalidate the original access path since it now contains
3947 the wrong base. */
3948 data->orig_ref.ref = NULL_TREE;
3949 /* Use the alias-set of this stmt for recording an eventual result. */
3950 if (data->first_set == -2)
3952 data->first_set = 0;
3953 data->first_base_set = 0;
3956 /* Keep looking for the adjusted *REF / VR pair. */
3957 return NULL;
3960 /* Bail out and stop walking. */
3961 return (void *)-1;
3964 /* Return a reference op vector from OP that can be used for
3965 vn_reference_lookup_pieces. The caller is responsible for releasing
3966 the vector. */
3968 vec<vn_reference_op_s>
3969 vn_reference_operands_for_lookup (tree op)
3971 bool valueized;
3972 return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
3975 /* Lookup a reference operation by it's parts, in the current hash table.
3976 Returns the resulting value number if it exists in the hash table,
3977 NULL_TREE otherwise. VNRESULT will be filled in with the actual
3978 vn_reference_t stored in the hashtable if something is found. */
3980 tree
3981 vn_reference_lookup_pieces (tree vuse, alias_set_type set,
3982 alias_set_type base_set, tree type,
3983 vec<vn_reference_op_s> operands,
3984 vn_reference_t *vnresult, vn_lookup_kind kind)
3986 struct vn_reference_s vr1;
3987 vn_reference_t tmp;
3988 tree cst;
3990 if (!vnresult)
3991 vnresult = &tmp;
3992 *vnresult = NULL;
3994 vr1.vuse = vuse_ssa_val (vuse);
3995 shared_lookup_references.truncate (0);
3996 shared_lookup_references.safe_grow (operands.length (), true);
3997 memcpy (shared_lookup_references.address (),
3998 operands.address (),
3999 sizeof (vn_reference_op_s)
4000 * operands.length ());
4001 bool valueized_p;
4002 valueize_refs_1 (&shared_lookup_references, &valueized_p);
4003 vr1.operands = shared_lookup_references;
4004 vr1.type = type;
4005 vr1.set = set;
4006 vr1.base_set = base_set;
4007 vr1.hashcode = vn_reference_compute_hash (&vr1);
4008 if ((cst = fully_constant_vn_reference_p (&vr1)))
4009 return cst;
4011 vn_reference_lookup_1 (&vr1, vnresult);
4012 if (!*vnresult
4013 && kind != VN_NOWALK
4014 && vr1.vuse)
4016 ao_ref r;
4017 unsigned limit = param_sccvn_max_alias_queries_per_access;
4018 vn_walk_cb_data data (&vr1, NULL_TREE, NULL, kind, true, NULL_TREE,
4019 false);
4020 vec<vn_reference_op_s> ops_for_ref;
4021 if (!valueized_p)
4022 ops_for_ref = vr1.operands;
4023 else
4025 /* For ao_ref_from_mem we have to ensure only available SSA names
4026 end up in base and the only convenient way to make this work
4027 for PRE is to re-valueize with that in mind. */
4028 ops_for_ref.create (operands.length ());
4029 ops_for_ref.quick_grow (operands.length ());
4030 memcpy (ops_for_ref.address (),
4031 operands.address (),
4032 sizeof (vn_reference_op_s)
4033 * operands.length ());
4034 valueize_refs_1 (&ops_for_ref, &valueized_p, true);
4036 if (ao_ref_init_from_vn_reference (&r, set, base_set, type,
4037 ops_for_ref))
4038 *vnresult
4039 = ((vn_reference_t)
4040 walk_non_aliased_vuses (&r, vr1.vuse, true, vn_reference_lookup_2,
4041 vn_reference_lookup_3, vuse_valueize,
4042 limit, &data));
4043 if (ops_for_ref != shared_lookup_references)
4044 ops_for_ref.release ();
4045 gcc_checking_assert (vr1.operands == shared_lookup_references);
4046 if (*vnresult
4047 && data.same_val
4048 && (!(*vnresult)->result
4049 || !operand_equal_p ((*vnresult)->result, data.same_val)))
4051 *vnresult = NULL;
4052 return NULL_TREE;
4056 if (*vnresult)
4057 return (*vnresult)->result;
4059 return NULL_TREE;
4062 /* Lookup OP in the current hash table, and return the resulting value
4063 number if it exists in the hash table. Return NULL_TREE if it does
4064 not exist in the hash table or if the result field of the structure
4065 was NULL.. VNRESULT will be filled in with the vn_reference_t
4066 stored in the hashtable if one exists. When TBAA_P is false assume
4067 we are looking up a store and treat it as having alias-set zero.
4068 *LAST_VUSE_PTR will be updated with the VUSE the value lookup succeeded.
4069 MASK is either NULL_TREE, or can be an INTEGER_CST if the result of the
4070 load is bitwise anded with MASK and so we are only interested in a subset
4071 of the bits and can ignore if the other bits are uninitialized or
4072 not initialized with constants. When doing redundant store removal
4073 the caller has to set REDUNDANT_STORE_REMOVAL_P. */
4075 tree
4076 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
4077 vn_reference_t *vnresult, bool tbaa_p,
4078 tree *last_vuse_ptr, tree mask,
4079 bool redundant_store_removal_p)
4081 vec<vn_reference_op_s> operands;
4082 struct vn_reference_s vr1;
4083 bool valueized_anything;
4085 if (vnresult)
4086 *vnresult = NULL;
4088 vr1.vuse = vuse_ssa_val (vuse);
4089 vr1.operands = operands
4090 = valueize_shared_reference_ops_from_ref (op, &valueized_anything);
4092 /* Handle &MEM[ptr + 5].b[1].c as POINTER_PLUS_EXPR. Avoid doing
4093 this before the pass folding __builtin_object_size had a chance to run. */
4094 if ((cfun->curr_properties & PROP_objsz)
4095 && operands[0].opcode == ADDR_EXPR
4096 && operands.last ().opcode == SSA_NAME)
4098 poly_int64 off = 0;
4099 vn_reference_op_t vro;
4100 unsigned i;
4101 for (i = 1; operands.iterate (i, &vro); ++i)
4103 if (vro->opcode == SSA_NAME)
4104 break;
4105 else if (known_eq (vro->off, -1))
4106 break;
4107 off += vro->off;
4109 if (i == operands.length () - 1
4110 /* Make sure we the offset we accumulated in a 64bit int
4111 fits the address computation carried out in target
4112 offset precision. */
4113 && (off.coeffs[0]
4114 == sext_hwi (off.coeffs[0], TYPE_PRECISION (sizetype))))
4116 gcc_assert (operands[i-1].opcode == MEM_REF);
4117 tree ops[2];
4118 ops[0] = operands[i].op0;
4119 ops[1] = wide_int_to_tree (sizetype, off);
4120 tree res = vn_nary_op_lookup_pieces (2, POINTER_PLUS_EXPR,
4121 TREE_TYPE (op), ops, NULL);
4122 if (res)
4123 return res;
4124 return NULL_TREE;
4128 vr1.type = TREE_TYPE (op);
4129 ao_ref op_ref;
4130 ao_ref_init (&op_ref, op);
4131 vr1.set = ao_ref_alias_set (&op_ref);
4132 vr1.base_set = ao_ref_base_alias_set (&op_ref);
4133 vr1.hashcode = vn_reference_compute_hash (&vr1);
4134 if (mask == NULL_TREE)
4135 if (tree cst = fully_constant_vn_reference_p (&vr1))
4136 return cst;
4138 if (kind != VN_NOWALK && vr1.vuse)
4140 vn_reference_t wvnresult;
4141 ao_ref r;
4142 unsigned limit = param_sccvn_max_alias_queries_per_access;
4143 auto_vec<vn_reference_op_s> ops_for_ref;
4144 if (valueized_anything)
4146 copy_reference_ops_from_ref (op, &ops_for_ref);
4147 bool tem;
4148 valueize_refs_1 (&ops_for_ref, &tem, true);
4150 /* Make sure to use a valueized reference if we valueized anything.
4151 Otherwise preserve the full reference for advanced TBAA. */
4152 if (!valueized_anything
4153 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.base_set,
4154 vr1.type, ops_for_ref))
4155 ao_ref_init (&r, op);
4156 vn_walk_cb_data data (&vr1, r.ref ? NULL_TREE : op,
4157 last_vuse_ptr, kind, tbaa_p, mask,
4158 redundant_store_removal_p);
4160 wvnresult
4161 = ((vn_reference_t)
4162 walk_non_aliased_vuses (&r, vr1.vuse, tbaa_p, vn_reference_lookup_2,
4163 vn_reference_lookup_3, vuse_valueize, limit,
4164 &data));
4165 gcc_checking_assert (vr1.operands == shared_lookup_references);
4166 if (wvnresult)
4168 gcc_assert (mask == NULL_TREE);
4169 if (data.same_val
4170 && (!wvnresult->result
4171 || !operand_equal_p (wvnresult->result, data.same_val)))
4172 return NULL_TREE;
4173 if (vnresult)
4174 *vnresult = wvnresult;
4175 return wvnresult->result;
4177 else if (mask)
4178 return data.masked_result;
4180 return NULL_TREE;
4183 if (last_vuse_ptr)
4184 *last_vuse_ptr = vr1.vuse;
4185 if (mask)
4186 return NULL_TREE;
4187 return vn_reference_lookup_1 (&vr1, vnresult);
4190 /* Lookup CALL in the current hash table and return the entry in
4191 *VNRESULT if found. Populates *VR for the hashtable lookup. */
4193 void
4194 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
4195 vn_reference_t vr)
4197 if (vnresult)
4198 *vnresult = NULL;
4200 tree vuse = gimple_vuse (call);
4202 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
4203 vr->operands = valueize_shared_reference_ops_from_call (call);
4204 tree lhs = gimple_call_lhs (call);
4205 /* For non-SSA return values the referece ops contain the LHS. */
4206 vr->type = ((lhs && TREE_CODE (lhs) == SSA_NAME)
4207 ? TREE_TYPE (lhs) : NULL_TREE);
4208 vr->punned = false;
4209 vr->set = 0;
4210 vr->base_set = 0;
4211 vr->hashcode = vn_reference_compute_hash (vr);
4212 vn_reference_lookup_1 (vr, vnresult);
4215 /* Insert OP into the current hash table with a value number of RESULT. */
4217 static void
4218 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
4220 vn_reference_s **slot;
4221 vn_reference_t vr1;
4222 bool tem;
4224 vec<vn_reference_op_s> operands
4225 = valueize_shared_reference_ops_from_ref (op, &tem);
4226 /* Handle &MEM[ptr + 5].b[1].c as POINTER_PLUS_EXPR. Avoid doing this
4227 before the pass folding __builtin_object_size had a chance to run. */
4228 if ((cfun->curr_properties & PROP_objsz)
4229 && operands[0].opcode == ADDR_EXPR
4230 && operands.last ().opcode == SSA_NAME)
4232 poly_int64 off = 0;
4233 vn_reference_op_t vro;
4234 unsigned i;
4235 for (i = 1; operands.iterate (i, &vro); ++i)
4237 if (vro->opcode == SSA_NAME)
4238 break;
4239 else if (known_eq (vro->off, -1))
4240 break;
4241 off += vro->off;
4243 if (i == operands.length () - 1
4244 /* Make sure we the offset we accumulated in a 64bit int
4245 fits the address computation carried out in target
4246 offset precision. */
4247 && (off.coeffs[0]
4248 == sext_hwi (off.coeffs[0], TYPE_PRECISION (sizetype))))
4250 gcc_assert (operands[i-1].opcode == MEM_REF);
4251 tree ops[2];
4252 ops[0] = operands[i].op0;
4253 ops[1] = wide_int_to_tree (sizetype, off);
4254 vn_nary_op_insert_pieces (2, POINTER_PLUS_EXPR,
4255 TREE_TYPE (op), ops, result,
4256 VN_INFO (result)->value_id);
4257 return;
4261 vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
4262 if (TREE_CODE (result) == SSA_NAME)
4263 vr1->value_id = VN_INFO (result)->value_id;
4264 else
4265 vr1->value_id = get_or_alloc_constant_value_id (result);
4266 vr1->vuse = vuse_ssa_val (vuse);
4267 vr1->operands = operands.copy ();
4268 vr1->type = TREE_TYPE (op);
4269 vr1->punned = false;
4270 ao_ref op_ref;
4271 ao_ref_init (&op_ref, op);
4272 vr1->set = ao_ref_alias_set (&op_ref);
4273 vr1->base_set = ao_ref_base_alias_set (&op_ref);
4274 vr1->hashcode = vn_reference_compute_hash (vr1);
4275 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
4276 vr1->result_vdef = vdef;
4278 slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
4279 INSERT);
4281 /* Because IL walking on reference lookup can end up visiting
4282 a def that is only to be visited later in iteration order
4283 when we are about to make an irreducible region reducible
4284 the def can be effectively processed and its ref being inserted
4285 by vn_reference_lookup_3 already. So we cannot assert (!*slot)
4286 but save a lookup if we deal with already inserted refs here. */
4287 if (*slot)
4289 /* We cannot assert that we have the same value either because
4290 when disentangling an irreducible region we may end up visiting
4291 a use before the corresponding def. That's a missed optimization
4292 only though. See gcc.dg/tree-ssa/pr87126.c for example. */
4293 if (dump_file && (dump_flags & TDF_DETAILS)
4294 && !operand_equal_p ((*slot)->result, vr1->result, 0))
4296 fprintf (dump_file, "Keeping old value ");
4297 print_generic_expr (dump_file, (*slot)->result);
4298 fprintf (dump_file, " because of collision\n");
4300 free_reference (vr1);
4301 obstack_free (&vn_tables_obstack, vr1);
4302 return;
4305 *slot = vr1;
4306 vr1->next = last_inserted_ref;
4307 last_inserted_ref = vr1;
4310 /* Insert a reference by it's pieces into the current hash table with
4311 a value number of RESULT. Return the resulting reference
4312 structure we created. */
4314 vn_reference_t
4315 vn_reference_insert_pieces (tree vuse, alias_set_type set,
4316 alias_set_type base_set, tree type,
4317 vec<vn_reference_op_s> operands,
4318 tree result, unsigned int value_id)
4321 vn_reference_s **slot;
4322 vn_reference_t vr1;
4324 vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
4325 vr1->value_id = value_id;
4326 vr1->vuse = vuse_ssa_val (vuse);
4327 vr1->operands = operands;
4328 valueize_refs (&vr1->operands);
4329 vr1->type = type;
4330 vr1->punned = false;
4331 vr1->set = set;
4332 vr1->base_set = base_set;
4333 vr1->hashcode = vn_reference_compute_hash (vr1);
4334 if (result && TREE_CODE (result) == SSA_NAME)
4335 result = SSA_VAL (result);
4336 vr1->result = result;
4337 vr1->result_vdef = NULL_TREE;
4339 slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
4340 INSERT);
4342 /* At this point we should have all the things inserted that we have
4343 seen before, and we should never try inserting something that
4344 already exists. */
4345 gcc_assert (!*slot);
4347 *slot = vr1;
4348 vr1->next = last_inserted_ref;
4349 last_inserted_ref = vr1;
4350 return vr1;
4353 /* Compute and return the hash value for nary operation VBO1. */
4355 hashval_t
4356 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
4358 inchash::hash hstate;
4359 unsigned i;
4361 if (((vno1->length == 2
4362 && commutative_tree_code (vno1->opcode))
4363 || (vno1->length == 3
4364 && commutative_ternary_tree_code (vno1->opcode)))
4365 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
4366 std::swap (vno1->op[0], vno1->op[1]);
4367 else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
4368 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
4370 std::swap (vno1->op[0], vno1->op[1]);
4371 vno1->opcode = swap_tree_comparison (vno1->opcode);
4374 hstate.add_int (vno1->opcode);
4375 for (i = 0; i < vno1->length; ++i)
4376 inchash::add_expr (vno1->op[i], hstate);
4378 return hstate.end ();
4381 /* Compare nary operations VNO1 and VNO2 and return true if they are
4382 equivalent. */
4384 bool
4385 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
4387 unsigned i;
4389 if (vno1->hashcode != vno2->hashcode)
4390 return false;
4392 if (vno1->length != vno2->length)
4393 return false;
4395 if (vno1->opcode != vno2->opcode
4396 || !types_compatible_p (vno1->type, vno2->type))
4397 return false;
4399 for (i = 0; i < vno1->length; ++i)
4400 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
4401 return false;
4403 /* BIT_INSERT_EXPR has an implict operand as the type precision
4404 of op1. Need to check to make sure they are the same. */
4405 if (vno1->opcode == BIT_INSERT_EXPR
4406 && TREE_CODE (vno1->op[1]) == INTEGER_CST
4407 && TYPE_PRECISION (TREE_TYPE (vno1->op[1]))
4408 != TYPE_PRECISION (TREE_TYPE (vno2->op[1])))
4409 return false;
4411 return true;
4414 /* Initialize VNO from the pieces provided. */
4416 static void
4417 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
4418 enum tree_code code, tree type, tree *ops)
4420 vno->opcode = code;
4421 vno->length = length;
4422 vno->type = type;
4423 memcpy (&vno->op[0], ops, sizeof (tree) * length);
4426 /* Return the number of operands for a vn_nary ops structure from STMT. */
4428 unsigned int
4429 vn_nary_length_from_stmt (gimple *stmt)
4431 switch (gimple_assign_rhs_code (stmt))
4433 case REALPART_EXPR:
4434 case IMAGPART_EXPR:
4435 case VIEW_CONVERT_EXPR:
4436 return 1;
4438 case BIT_FIELD_REF:
4439 return 3;
4441 case CONSTRUCTOR:
4442 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
4444 default:
4445 return gimple_num_ops (stmt) - 1;
4449 /* Initialize VNO from STMT. */
4451 void
4452 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gassign *stmt)
4454 unsigned i;
4456 vno->opcode = gimple_assign_rhs_code (stmt);
4457 vno->type = TREE_TYPE (gimple_assign_lhs (stmt));
4458 switch (vno->opcode)
4460 case REALPART_EXPR:
4461 case IMAGPART_EXPR:
4462 case VIEW_CONVERT_EXPR:
4463 vno->length = 1;
4464 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
4465 break;
4467 case BIT_FIELD_REF:
4468 vno->length = 3;
4469 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
4470 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
4471 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
4472 break;
4474 case CONSTRUCTOR:
4475 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
4476 for (i = 0; i < vno->length; ++i)
4477 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
4478 break;
4480 default:
4481 gcc_checking_assert (!gimple_assign_single_p (stmt));
4482 vno->length = gimple_num_ops (stmt) - 1;
4483 for (i = 0; i < vno->length; ++i)
4484 vno->op[i] = gimple_op (stmt, i + 1);
4488 /* Compute the hashcode for VNO and look for it in the hash table;
4489 return the resulting value number if it exists in the hash table.
4490 Return NULL_TREE if it does not exist in the hash table or if the
4491 result field of the operation is NULL. VNRESULT will contain the
4492 vn_nary_op_t from the hashtable if it exists. */
4494 static tree
4495 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
4497 vn_nary_op_s **slot;
4499 if (vnresult)
4500 *vnresult = NULL;
4502 for (unsigned i = 0; i < vno->length; ++i)
4503 if (TREE_CODE (vno->op[i]) == SSA_NAME)
4504 vno->op[i] = SSA_VAL (vno->op[i]);
4506 vno->hashcode = vn_nary_op_compute_hash (vno);
4507 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode, NO_INSERT);
4508 if (!slot)
4509 return NULL_TREE;
4510 if (vnresult)
4511 *vnresult = *slot;
4512 return (*slot)->predicated_values ? NULL_TREE : (*slot)->u.result;
4515 /* Lookup a n-ary operation by its pieces and return the resulting value
4516 number if it exists in the hash table. Return NULL_TREE if it does
4517 not exist in the hash table or if the result field of the operation
4518 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
4519 if it exists. */
4521 tree
4522 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
4523 tree type, tree *ops, vn_nary_op_t *vnresult)
4525 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
4526 sizeof_vn_nary_op (length));
4527 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
4528 return vn_nary_op_lookup_1 (vno1, vnresult);
4531 /* Lookup the rhs of STMT in the current hash table, and return the resulting
4532 value number if it exists in the hash table. Return NULL_TREE if
4533 it does not exist in the hash table. VNRESULT will contain the
4534 vn_nary_op_t from the hashtable if it exists. */
4536 tree
4537 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
4539 vn_nary_op_t vno1
4540 = XALLOCAVAR (struct vn_nary_op_s,
4541 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
4542 init_vn_nary_op_from_stmt (vno1, as_a <gassign *> (stmt));
4543 return vn_nary_op_lookup_1 (vno1, vnresult);
4546 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
4548 vn_nary_op_t
4549 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
4551 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
4554 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
4555 obstack. */
4557 static vn_nary_op_t
4558 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
4560 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length, &vn_tables_obstack);
4562 vno1->value_id = value_id;
4563 vno1->length = length;
4564 vno1->predicated_values = 0;
4565 vno1->u.result = result;
4567 return vno1;
4570 /* Insert VNO into TABLE. */
4572 static vn_nary_op_t
4573 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table)
4575 vn_nary_op_s **slot;
4577 gcc_assert (! vno->predicated_values
4578 || (! vno->u.values->next
4579 && vno->u.values->n == 1));
4581 for (unsigned i = 0; i < vno->length; ++i)
4582 if (TREE_CODE (vno->op[i]) == SSA_NAME)
4583 vno->op[i] = SSA_VAL (vno->op[i]);
4585 vno->hashcode = vn_nary_op_compute_hash (vno);
4586 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
4587 vno->unwind_to = *slot;
4588 if (*slot)
4590 /* Prefer non-predicated values.
4591 ??? Only if those are constant, otherwise, with constant predicated
4592 value, turn them into predicated values with entry-block validity
4593 (??? but we always find the first valid result currently). */
4594 if ((*slot)->predicated_values
4595 && ! vno->predicated_values)
4597 /* ??? We cannot remove *slot from the unwind stack list.
4598 For the moment we deal with this by skipping not found
4599 entries but this isn't ideal ... */
4600 *slot = vno;
4601 /* ??? Maintain a stack of states we can unwind in
4602 vn_nary_op_s? But how far do we unwind? In reality
4603 we need to push change records somewhere... Or not
4604 unwind vn_nary_op_s and linking them but instead
4605 unwind the results "list", linking that, which also
4606 doesn't move on hashtable resize. */
4607 /* We can also have a ->unwind_to recording *slot there.
4608 That way we can make u.values a fixed size array with
4609 recording the number of entries but of course we then
4610 have always N copies for each unwind_to-state. Or we
4611 make sure to only ever append and each unwinding will
4612 pop off one entry (but how to deal with predicated
4613 replaced with non-predicated here?) */
4614 vno->next = last_inserted_nary;
4615 last_inserted_nary = vno;
4616 return vno;
4618 else if (vno->predicated_values
4619 && ! (*slot)->predicated_values)
4620 return *slot;
4621 else if (vno->predicated_values
4622 && (*slot)->predicated_values)
4624 /* ??? Factor this all into a insert_single_predicated_value
4625 routine. */
4626 gcc_assert (!vno->u.values->next && vno->u.values->n == 1);
4627 basic_block vno_bb
4628 = BASIC_BLOCK_FOR_FN (cfun, vno->u.values->valid_dominated_by_p[0]);
4629 vn_pval *nval = vno->u.values;
4630 vn_pval **next = &vno->u.values;
4631 bool found = false;
4632 for (vn_pval *val = (*slot)->u.values; val; val = val->next)
4634 if (expressions_equal_p (val->result, nval->result))
4636 found = true;
4637 for (unsigned i = 0; i < val->n; ++i)
4639 basic_block val_bb
4640 = BASIC_BLOCK_FOR_FN (cfun,
4641 val->valid_dominated_by_p[i]);
4642 if (dominated_by_p (CDI_DOMINATORS, vno_bb, val_bb))
4643 /* Value registered with more generic predicate. */
4644 return *slot;
4645 else if (flag_checking)
4646 /* Shouldn't happen, we insert in RPO order. */
4647 gcc_assert (!dominated_by_p (CDI_DOMINATORS,
4648 val_bb, vno_bb));
4650 /* Append value. */
4651 *next = (vn_pval *) obstack_alloc (&vn_tables_obstack,
4652 sizeof (vn_pval)
4653 + val->n * sizeof (int));
4654 (*next)->next = NULL;
4655 (*next)->result = val->result;
4656 (*next)->n = val->n + 1;
4657 memcpy ((*next)->valid_dominated_by_p,
4658 val->valid_dominated_by_p,
4659 val->n * sizeof (int));
4660 (*next)->valid_dominated_by_p[val->n] = vno_bb->index;
4661 next = &(*next)->next;
4662 if (dump_file && (dump_flags & TDF_DETAILS))
4663 fprintf (dump_file, "Appending predicate to value.\n");
4664 continue;
4666 /* Copy other predicated values. */
4667 *next = (vn_pval *) obstack_alloc (&vn_tables_obstack,
4668 sizeof (vn_pval)
4669 + (val->n-1) * sizeof (int));
4670 memcpy (*next, val, sizeof (vn_pval) + (val->n-1) * sizeof (int));
4671 (*next)->next = NULL;
4672 next = &(*next)->next;
4674 if (!found)
4675 *next = nval;
4677 *slot = vno;
4678 vno->next = last_inserted_nary;
4679 last_inserted_nary = vno;
4680 return vno;
4683 /* While we do not want to insert things twice it's awkward to
4684 avoid it in the case where visit_nary_op pattern-matches stuff
4685 and ends up simplifying the replacement to itself. We then
4686 get two inserts, one from visit_nary_op and one from
4687 vn_nary_build_or_lookup.
4688 So allow inserts with the same value number. */
4689 if ((*slot)->u.result == vno->u.result)
4690 return *slot;
4693 /* ??? There's also optimistic vs. previous commited state merging
4694 that is problematic for the case of unwinding. */
4696 /* ??? We should return NULL if we do not use 'vno' and have the
4697 caller release it. */
4698 gcc_assert (!*slot);
4700 *slot = vno;
4701 vno->next = last_inserted_nary;
4702 last_inserted_nary = vno;
4703 return vno;
4706 /* Insert a n-ary operation into the current hash table using it's
4707 pieces. Return the vn_nary_op_t structure we created and put in
4708 the hashtable. */
4710 vn_nary_op_t
4711 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
4712 tree type, tree *ops,
4713 tree result, unsigned int value_id)
4715 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
4716 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
4717 return vn_nary_op_insert_into (vno1, valid_info->nary);
4720 /* Return whether we can track a predicate valid when PRED_E is executed. */
4722 static bool
4723 can_track_predicate_on_edge (edge pred_e)
4725 /* ??? As we are currently recording the destination basic-block index in
4726 vn_pval.valid_dominated_by_p and using dominance for the
4727 validity check we cannot track predicates on all edges. */
4728 if (single_pred_p (pred_e->dest))
4729 return true;
4730 /* Never record for backedges. */
4731 if (pred_e->flags & EDGE_DFS_BACK)
4732 return false;
4733 /* When there's more than one predecessor we cannot track
4734 predicate validity based on the destination block. The
4735 exception is when all other incoming edges sources are
4736 dominated by the destination block. */
4737 edge_iterator ei;
4738 edge e;
4739 FOR_EACH_EDGE (e, ei, pred_e->dest->preds)
4740 if (e != pred_e && ! dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
4741 return false;
4742 return true;
4745 static vn_nary_op_t
4746 vn_nary_op_insert_pieces_predicated (unsigned int length, enum tree_code code,
4747 tree type, tree *ops,
4748 tree result, unsigned int value_id,
4749 edge pred_e)
4751 gcc_assert (can_track_predicate_on_edge (pred_e));
4753 if (dump_file && (dump_flags & TDF_DETAILS)
4754 /* ??? Fix dumping, but currently we only get comparisons. */
4755 && TREE_CODE_CLASS (code) == tcc_comparison)
4757 fprintf (dump_file, "Recording on edge %d->%d ", pred_e->src->index,
4758 pred_e->dest->index);
4759 print_generic_expr (dump_file, ops[0], TDF_SLIM);
4760 fprintf (dump_file, " %s ", get_tree_code_name (code));
4761 print_generic_expr (dump_file, ops[1], TDF_SLIM);
4762 fprintf (dump_file, " == %s\n",
4763 integer_zerop (result) ? "false" : "true");
4765 vn_nary_op_t vno1 = alloc_vn_nary_op (length, NULL_TREE, value_id);
4766 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
4767 vno1->predicated_values = 1;
4768 vno1->u.values = (vn_pval *) obstack_alloc (&vn_tables_obstack,
4769 sizeof (vn_pval));
4770 vno1->u.values->next = NULL;
4771 vno1->u.values->result = result;
4772 vno1->u.values->n = 1;
4773 vno1->u.values->valid_dominated_by_p[0] = pred_e->dest->index;
4774 return vn_nary_op_insert_into (vno1, valid_info->nary);
4777 static bool
4778 dominated_by_p_w_unex (basic_block bb1, basic_block bb2, bool);
4780 static tree
4781 vn_nary_op_get_predicated_value (vn_nary_op_t vno, basic_block bb,
4782 edge e = NULL)
4784 if (! vno->predicated_values)
4785 return vno->u.result;
4786 for (vn_pval *val = vno->u.values; val; val = val->next)
4787 for (unsigned i = 0; i < val->n; ++i)
4789 basic_block cand
4790 = BASIC_BLOCK_FOR_FN (cfun, val->valid_dominated_by_p[i]);
4791 /* Do not handle backedge executability optimistically since
4792 when figuring out whether to iterate we do not consider
4793 changed predication.
4794 When asking for predicated values on an edge avoid looking
4795 at edge executability for edges forward in our iteration
4796 as well. */
4797 if (e && (e->flags & EDGE_DFS_BACK))
4799 if (dominated_by_p (CDI_DOMINATORS, bb, cand))
4800 return val->result;
4802 else if (dominated_by_p_w_unex (bb, cand, false))
4803 return val->result;
4805 return NULL_TREE;
4808 static tree
4809 vn_nary_op_get_predicated_value (vn_nary_op_t vno, edge e)
4811 return vn_nary_op_get_predicated_value (vno, e->src, e);
4814 /* Insert the rhs of STMT into the current hash table with a value number of
4815 RESULT. */
4817 static vn_nary_op_t
4818 vn_nary_op_insert_stmt (gimple *stmt, tree result)
4820 vn_nary_op_t vno1
4821 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
4822 result, VN_INFO (result)->value_id);
4823 init_vn_nary_op_from_stmt (vno1, as_a <gassign *> (stmt));
4824 return vn_nary_op_insert_into (vno1, valid_info->nary);
4827 /* Compute a hashcode for PHI operation VP1 and return it. */
4829 static inline hashval_t
4830 vn_phi_compute_hash (vn_phi_t vp1)
4832 inchash::hash hstate;
4833 tree phi1op;
4834 tree type;
4835 edge e;
4836 edge_iterator ei;
4838 hstate.add_int (EDGE_COUNT (vp1->block->preds));
4839 switch (EDGE_COUNT (vp1->block->preds))
4841 case 1:
4842 break;
4843 case 2:
4844 /* When this is a PHI node subject to CSE for different blocks
4845 avoid hashing the block index. */
4846 if (vp1->cclhs)
4847 break;
4848 /* Fallthru. */
4849 default:
4850 hstate.add_int (vp1->block->index);
4853 /* If all PHI arguments are constants we need to distinguish
4854 the PHI node via its type. */
4855 type = vp1->type;
4856 hstate.merge_hash (vn_hash_type (type));
4858 FOR_EACH_EDGE (e, ei, vp1->block->preds)
4860 /* Don't hash backedge values they need to be handled as VN_TOP
4861 for optimistic value-numbering. */
4862 if (e->flags & EDGE_DFS_BACK)
4863 continue;
4865 phi1op = vp1->phiargs[e->dest_idx];
4866 if (phi1op == VN_TOP)
4867 continue;
4868 inchash::add_expr (phi1op, hstate);
4871 return hstate.end ();
4875 /* Return true if COND1 and COND2 represent the same condition, set
4876 *INVERTED_P if one needs to be inverted to make it the same as
4877 the other. */
4879 static bool
4880 cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1,
4881 gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p)
4883 enum tree_code code1 = gimple_cond_code (cond1);
4884 enum tree_code code2 = gimple_cond_code (cond2);
4886 *inverted_p = false;
4887 if (code1 == code2)
4889 else if (code1 == swap_tree_comparison (code2))
4890 std::swap (lhs2, rhs2);
4891 else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
4892 *inverted_p = true;
4893 else if (code1 == invert_tree_comparison
4894 (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
4896 std::swap (lhs2, rhs2);
4897 *inverted_p = true;
4899 else
4900 return false;
4902 return ((expressions_equal_p (lhs1, lhs2)
4903 && expressions_equal_p (rhs1, rhs2))
4904 || (commutative_tree_code (code1)
4905 && expressions_equal_p (lhs1, rhs2)
4906 && expressions_equal_p (rhs1, lhs2)));
4909 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
4911 static int
4912 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
4914 if (vp1->hashcode != vp2->hashcode)
4915 return false;
4917 if (vp1->block != vp2->block)
4919 if (EDGE_COUNT (vp1->block->preds) != EDGE_COUNT (vp2->block->preds))
4920 return false;
4922 switch (EDGE_COUNT (vp1->block->preds))
4924 case 1:
4925 /* Single-arg PHIs are just copies. */
4926 break;
4928 case 2:
4930 /* Make sure both PHIs are classified as CSEable. */
4931 if (! vp1->cclhs || ! vp2->cclhs)
4932 return false;
4934 /* Rule out backedges into the PHI. */
4935 gcc_checking_assert
4936 (vp1->block->loop_father->header != vp1->block
4937 && vp2->block->loop_father->header != vp2->block);
4939 /* If the PHI nodes do not have compatible types
4940 they are not the same. */
4941 if (!types_compatible_p (vp1->type, vp2->type))
4942 return false;
4944 /* If the immediate dominator end in switch stmts multiple
4945 values may end up in the same PHI arg via intermediate
4946 CFG merges. */
4947 basic_block idom1
4948 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
4949 basic_block idom2
4950 = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
4951 gcc_checking_assert (EDGE_COUNT (idom1->succs) == 2
4952 && EDGE_COUNT (idom2->succs) == 2);
4954 /* Verify the controlling stmt is the same. */
4955 gcond *last1 = as_a <gcond *> (*gsi_last_bb (idom1));
4956 gcond *last2 = as_a <gcond *> (*gsi_last_bb (idom2));
4957 bool inverted_p;
4958 if (! cond_stmts_equal_p (last1, vp1->cclhs, vp1->ccrhs,
4959 last2, vp2->cclhs, vp2->ccrhs,
4960 &inverted_p))
4961 return false;
4963 /* Get at true/false controlled edges into the PHI. */
4964 edge te1, te2, fe1, fe2;
4965 if (! extract_true_false_controlled_edges (idom1, vp1->block,
4966 &te1, &fe1)
4967 || ! extract_true_false_controlled_edges (idom2, vp2->block,
4968 &te2, &fe2))
4969 return false;
4971 /* Swap edges if the second condition is the inverted of the
4972 first. */
4973 if (inverted_p)
4974 std::swap (te2, fe2);
4976 /* Since we do not know which edge will be executed we have
4977 to be careful when matching VN_TOP. Be conservative and
4978 only match VN_TOP == VN_TOP for now, we could allow
4979 VN_TOP on the not prevailing PHI though. See for example
4980 PR102920. */
4981 if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
4982 vp2->phiargs[te2->dest_idx], false)
4983 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
4984 vp2->phiargs[fe2->dest_idx], false))
4985 return false;
4987 return true;
4990 default:
4991 return false;
4995 /* If the PHI nodes do not have compatible types
4996 they are not the same. */
4997 if (!types_compatible_p (vp1->type, vp2->type))
4998 return false;
5000 /* Any phi in the same block will have it's arguments in the
5001 same edge order, because of how we store phi nodes. */
5002 unsigned nargs = EDGE_COUNT (vp1->block->preds);
5003 for (unsigned i = 0; i < nargs; ++i)
5005 tree phi1op = vp1->phiargs[i];
5006 tree phi2op = vp2->phiargs[i];
5007 if (phi1op == phi2op)
5008 continue;
5009 if (!expressions_equal_p (phi1op, phi2op, false))
5010 return false;
5013 return true;
5016 /* Lookup PHI in the current hash table, and return the resulting
5017 value number if it exists in the hash table. Return NULL_TREE if
5018 it does not exist in the hash table. */
5020 static tree
5021 vn_phi_lookup (gimple *phi, bool backedges_varying_p)
5023 vn_phi_s **slot;
5024 struct vn_phi_s *vp1;
5025 edge e;
5026 edge_iterator ei;
5028 vp1 = XALLOCAVAR (struct vn_phi_s,
5029 sizeof (struct vn_phi_s)
5030 + (gimple_phi_num_args (phi) - 1) * sizeof (tree));
5032 /* Canonicalize the SSA_NAME's to their value number. */
5033 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
5035 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5036 if (TREE_CODE (def) == SSA_NAME
5037 && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)))
5039 if (!virtual_operand_p (def)
5040 && ssa_undefined_value_p (def, false))
5041 def = VN_TOP;
5042 else
5043 def = SSA_VAL (def);
5045 vp1->phiargs[e->dest_idx] = def;
5047 vp1->type = TREE_TYPE (gimple_phi_result (phi));
5048 vp1->block = gimple_bb (phi);
5049 /* Extract values of the controlling condition. */
5050 vp1->cclhs = NULL_TREE;
5051 vp1->ccrhs = NULL_TREE;
5052 if (EDGE_COUNT (vp1->block->preds) == 2
5053 && vp1->block->loop_father->header != vp1->block)
5055 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
5056 if (EDGE_COUNT (idom1->succs) == 2)
5057 if (gcond *last1 = safe_dyn_cast <gcond *> (*gsi_last_bb (idom1)))
5059 /* ??? We want to use SSA_VAL here. But possibly not
5060 allow VN_TOP. */
5061 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
5062 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
5065 vp1->hashcode = vn_phi_compute_hash (vp1);
5066 slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, NO_INSERT);
5067 if (!slot)
5068 return NULL_TREE;
5069 return (*slot)->result;
5072 /* Insert PHI into the current hash table with a value number of
5073 RESULT. */
5075 static vn_phi_t
5076 vn_phi_insert (gimple *phi, tree result, bool backedges_varying_p)
5078 vn_phi_s **slot;
5079 vn_phi_t vp1 = (vn_phi_t) obstack_alloc (&vn_tables_obstack,
5080 sizeof (vn_phi_s)
5081 + ((gimple_phi_num_args (phi) - 1)
5082 * sizeof (tree)));
5083 edge e;
5084 edge_iterator ei;
5086 /* Canonicalize the SSA_NAME's to their value number. */
5087 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
5089 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5090 if (TREE_CODE (def) == SSA_NAME
5091 && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)))
5093 if (!virtual_operand_p (def)
5094 && ssa_undefined_value_p (def, false))
5095 def = VN_TOP;
5096 else
5097 def = SSA_VAL (def);
5099 vp1->phiargs[e->dest_idx] = def;
5101 vp1->value_id = VN_INFO (result)->value_id;
5102 vp1->type = TREE_TYPE (gimple_phi_result (phi));
5103 vp1->block = gimple_bb (phi);
5104 /* Extract values of the controlling condition. */
5105 vp1->cclhs = NULL_TREE;
5106 vp1->ccrhs = NULL_TREE;
5107 if (EDGE_COUNT (vp1->block->preds) == 2
5108 && vp1->block->loop_father->header != vp1->block)
5110 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
5111 if (EDGE_COUNT (idom1->succs) == 2)
5112 if (gcond *last1 = safe_dyn_cast <gcond *> (*gsi_last_bb (idom1)))
5114 /* ??? We want to use SSA_VAL here. But possibly not
5115 allow VN_TOP. */
5116 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
5117 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
5120 vp1->result = result;
5121 vp1->hashcode = vn_phi_compute_hash (vp1);
5123 slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
5124 gcc_assert (!*slot);
5126 *slot = vp1;
5127 vp1->next = last_inserted_phi;
5128 last_inserted_phi = vp1;
5129 return vp1;
5133 /* Return true if BB1 is dominated by BB2 taking into account edges
5134 that are not executable. When ALLOW_BACK is false consider not
5135 executable backedges as executable. */
5137 static bool
5138 dominated_by_p_w_unex (basic_block bb1, basic_block bb2, bool allow_back)
5140 edge_iterator ei;
5141 edge e;
5143 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
5144 return true;
5146 /* Before iterating we'd like to know if there exists a
5147 (executable) path from bb2 to bb1 at all, if not we can
5148 directly return false. For now simply iterate once. */
5150 /* Iterate to the single executable bb1 predecessor. */
5151 if (EDGE_COUNT (bb1->preds) > 1)
5153 edge prede = NULL;
5154 FOR_EACH_EDGE (e, ei, bb1->preds)
5155 if ((e->flags & EDGE_EXECUTABLE)
5156 || (!allow_back && (e->flags & EDGE_DFS_BACK)))
5158 if (prede)
5160 prede = NULL;
5161 break;
5163 prede = e;
5165 if (prede)
5167 bb1 = prede->src;
5169 /* Re-do the dominance check with changed bb1. */
5170 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
5171 return true;
5175 /* Iterate to the single executable bb2 successor. */
5176 if (EDGE_COUNT (bb2->succs) > 1)
5178 edge succe = NULL;
5179 FOR_EACH_EDGE (e, ei, bb2->succs)
5180 if ((e->flags & EDGE_EXECUTABLE)
5181 || (!allow_back && (e->flags & EDGE_DFS_BACK)))
5183 if (succe)
5185 succe = NULL;
5186 break;
5188 succe = e;
5190 if (succe)
5192 /* Verify the reached block is only reached through succe.
5193 If there is only one edge we can spare us the dominator
5194 check and iterate directly. */
5195 if (EDGE_COUNT (succe->dest->preds) > 1)
5197 FOR_EACH_EDGE (e, ei, succe->dest->preds)
5198 if (e != succe
5199 && ((e->flags & EDGE_EXECUTABLE)
5200 || (!allow_back && (e->flags & EDGE_DFS_BACK))))
5202 succe = NULL;
5203 break;
5206 if (succe)
5208 bb2 = succe->dest;
5210 /* Re-do the dominance check with changed bb2. */
5211 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
5212 return true;
5217 /* We could now iterate updating bb1 / bb2. */
5218 return false;
5221 /* Set the value number of FROM to TO, return true if it has changed
5222 as a result. */
5224 static inline bool
5225 set_ssa_val_to (tree from, tree to)
5227 vn_ssa_aux_t from_info = VN_INFO (from);
5228 tree currval = from_info->valnum; // SSA_VAL (from)
5229 poly_int64 toff, coff;
5230 bool curr_undefined = false;
5231 bool curr_invariant = false;
5233 /* The only thing we allow as value numbers are ssa_names
5234 and invariants. So assert that here. We don't allow VN_TOP
5235 as visiting a stmt should produce a value-number other than
5236 that.
5237 ??? Still VN_TOP can happen for unreachable code, so force
5238 it to varying in that case. Not all code is prepared to
5239 get VN_TOP on valueization. */
5240 if (to == VN_TOP)
5242 /* ??? When iterating and visiting PHI <undef, backedge-value>
5243 for the first time we rightfully get VN_TOP and we need to
5244 preserve that to optimize for example gcc.dg/tree-ssa/ssa-sccvn-2.c.
5245 With SCCVN we were simply lucky we iterated the other PHI
5246 cycles first and thus visited the backedge-value DEF. */
5247 if (currval == VN_TOP)
5248 goto set_and_exit;
5249 if (dump_file && (dump_flags & TDF_DETAILS))
5250 fprintf (dump_file, "Forcing value number to varying on "
5251 "receiving VN_TOP\n");
5252 to = from;
5255 gcc_checking_assert (to != NULL_TREE
5256 && ((TREE_CODE (to) == SSA_NAME
5257 && (to == from || SSA_VAL (to) == to))
5258 || is_gimple_min_invariant (to)));
5260 if (from != to)
5262 if (currval == from)
5264 if (dump_file && (dump_flags & TDF_DETAILS))
5266 fprintf (dump_file, "Not changing value number of ");
5267 print_generic_expr (dump_file, from);
5268 fprintf (dump_file, " from VARYING to ");
5269 print_generic_expr (dump_file, to);
5270 fprintf (dump_file, "\n");
5272 return false;
5274 curr_invariant = is_gimple_min_invariant (currval);
5275 curr_undefined = (TREE_CODE (currval) == SSA_NAME
5276 && !virtual_operand_p (currval)
5277 && ssa_undefined_value_p (currval, false));
5278 if (currval != VN_TOP
5279 && !curr_invariant
5280 && !curr_undefined
5281 && is_gimple_min_invariant (to))
5283 if (dump_file && (dump_flags & TDF_DETAILS))
5285 fprintf (dump_file, "Forcing VARYING instead of changing "
5286 "value number of ");
5287 print_generic_expr (dump_file, from);
5288 fprintf (dump_file, " from ");
5289 print_generic_expr (dump_file, currval);
5290 fprintf (dump_file, " (non-constant) to ");
5291 print_generic_expr (dump_file, to);
5292 fprintf (dump_file, " (constant)\n");
5294 to = from;
5296 else if (currval != VN_TOP
5297 && !curr_undefined
5298 && TREE_CODE (to) == SSA_NAME
5299 && !virtual_operand_p (to)
5300 && ssa_undefined_value_p (to, false))
5302 if (dump_file && (dump_flags & TDF_DETAILS))
5304 fprintf (dump_file, "Forcing VARYING instead of changing "
5305 "value number of ");
5306 print_generic_expr (dump_file, from);
5307 fprintf (dump_file, " from ");
5308 print_generic_expr (dump_file, currval);
5309 fprintf (dump_file, " (non-undefined) to ");
5310 print_generic_expr (dump_file, to);
5311 fprintf (dump_file, " (undefined)\n");
5313 to = from;
5315 else if (TREE_CODE (to) == SSA_NAME
5316 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
5317 to = from;
5320 set_and_exit:
5321 if (dump_file && (dump_flags & TDF_DETAILS))
5323 fprintf (dump_file, "Setting value number of ");
5324 print_generic_expr (dump_file, from);
5325 fprintf (dump_file, " to ");
5326 print_generic_expr (dump_file, to);
5329 if (currval != to
5330 && !operand_equal_p (currval, to, 0)
5331 /* Different undefined SSA names are not actually different. See
5332 PR82320 for a testcase were we'd otherwise not terminate iteration. */
5333 && !(curr_undefined
5334 && TREE_CODE (to) == SSA_NAME
5335 && !virtual_operand_p (to)
5336 && ssa_undefined_value_p (to, false))
5337 /* ??? For addresses involving volatile objects or types operand_equal_p
5338 does not reliably detect ADDR_EXPRs as equal. We know we are only
5339 getting invariant gimple addresses here, so can use
5340 get_addr_base_and_unit_offset to do this comparison. */
5341 && !(TREE_CODE (currval) == ADDR_EXPR
5342 && TREE_CODE (to) == ADDR_EXPR
5343 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
5344 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
5345 && known_eq (coff, toff)))
5347 if (to != from
5348 && currval != VN_TOP
5349 && !curr_undefined
5350 /* We do not want to allow lattice transitions from one value
5351 to another since that may lead to not terminating iteration
5352 (see PR95049). Since there's no convenient way to check
5353 for the allowed transition of VAL -> PHI (loop entry value,
5354 same on two PHIs, to same PHI result) we restrict the check
5355 to invariants. */
5356 && curr_invariant
5357 && is_gimple_min_invariant (to))
5359 if (dump_file && (dump_flags & TDF_DETAILS))
5360 fprintf (dump_file, " forced VARYING");
5361 to = from;
5363 if (dump_file && (dump_flags & TDF_DETAILS))
5364 fprintf (dump_file, " (changed)\n");
5365 from_info->valnum = to;
5366 return true;
5368 if (dump_file && (dump_flags & TDF_DETAILS))
5369 fprintf (dump_file, "\n");
5370 return false;
5373 /* Set all definitions in STMT to value number to themselves.
5374 Return true if a value number changed. */
5376 static bool
5377 defs_to_varying (gimple *stmt)
5379 bool changed = false;
5380 ssa_op_iter iter;
5381 def_operand_p defp;
5383 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
5385 tree def = DEF_FROM_PTR (defp);
5386 changed |= set_ssa_val_to (def, def);
5388 return changed;
5391 /* Visit a copy between LHS and RHS, return true if the value number
5392 changed. */
5394 static bool
5395 visit_copy (tree lhs, tree rhs)
5397 /* Valueize. */
5398 rhs = SSA_VAL (rhs);
5400 return set_ssa_val_to (lhs, rhs);
5403 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
5404 is the same. */
5406 static tree
5407 valueized_wider_op (tree wide_type, tree op, bool allow_truncate)
5409 if (TREE_CODE (op) == SSA_NAME)
5410 op = vn_valueize (op);
5412 /* Either we have the op widened available. */
5413 tree ops[3] = {};
5414 ops[0] = op;
5415 tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR,
5416 wide_type, ops, NULL);
5417 if (tem)
5418 return tem;
5420 /* Or the op is truncated from some existing value. */
5421 if (allow_truncate && TREE_CODE (op) == SSA_NAME)
5423 gimple *def = SSA_NAME_DEF_STMT (op);
5424 if (is_gimple_assign (def)
5425 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
5427 tem = gimple_assign_rhs1 (def);
5428 if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)))
5430 if (TREE_CODE (tem) == SSA_NAME)
5431 tem = vn_valueize (tem);
5432 return tem;
5437 /* For constants simply extend it. */
5438 if (TREE_CODE (op) == INTEGER_CST)
5439 return wide_int_to_tree (wide_type, wi::to_widest (op));
5441 return NULL_TREE;
5444 /* Visit a nary operator RHS, value number it, and return true if the
5445 value number of LHS has changed as a result. */
5447 static bool
5448 visit_nary_op (tree lhs, gassign *stmt)
5450 vn_nary_op_t vnresult;
5451 tree result = vn_nary_op_lookup_stmt (stmt, &vnresult);
5452 if (! result && vnresult)
5453 result = vn_nary_op_get_predicated_value (vnresult, gimple_bb (stmt));
5454 if (result)
5455 return set_ssa_val_to (lhs, result);
5457 /* Do some special pattern matching for redundancies of operations
5458 in different types. */
5459 enum tree_code code = gimple_assign_rhs_code (stmt);
5460 tree type = TREE_TYPE (lhs);
5461 tree rhs1 = gimple_assign_rhs1 (stmt);
5462 switch (code)
5464 CASE_CONVERT:
5465 /* Match arithmetic done in a different type where we can easily
5466 substitute the result from some earlier sign-changed or widened
5467 operation. */
5468 if (INTEGRAL_TYPE_P (type)
5469 && TREE_CODE (rhs1) == SSA_NAME
5470 /* We only handle sign-changes, zero-extension -> & mask or
5471 sign-extension if we know the inner operation doesn't
5472 overflow. */
5473 && (((TYPE_UNSIGNED (TREE_TYPE (rhs1))
5474 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
5475 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (rhs1))))
5476 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs1)))
5477 || TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (rhs1))))
5479 gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
5480 if (def
5481 && (gimple_assign_rhs_code (def) == PLUS_EXPR
5482 || gimple_assign_rhs_code (def) == MINUS_EXPR
5483 || gimple_assign_rhs_code (def) == MULT_EXPR))
5485 tree ops[3] = {};
5486 /* When requiring a sign-extension we cannot model a
5487 previous truncation with a single op so don't bother. */
5488 bool allow_truncate = TYPE_UNSIGNED (TREE_TYPE (rhs1));
5489 /* Either we have the op widened available. */
5490 ops[0] = valueized_wider_op (type, gimple_assign_rhs1 (def),
5491 allow_truncate);
5492 if (ops[0])
5493 ops[1] = valueized_wider_op (type, gimple_assign_rhs2 (def),
5494 allow_truncate);
5495 if (ops[0] && ops[1])
5497 ops[0] = vn_nary_op_lookup_pieces
5498 (2, gimple_assign_rhs_code (def), type, ops, NULL);
5499 /* We have wider operation available. */
5500 if (ops[0]
5501 /* If the leader is a wrapping operation we can
5502 insert it for code hoisting w/o introducing
5503 undefined overflow. If it is not it has to
5504 be available. See PR86554. */
5505 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (ops[0]))
5506 || (rpo_avail && vn_context_bb
5507 && rpo_avail->eliminate_avail (vn_context_bb,
5508 ops[0]))))
5510 unsigned lhs_prec = TYPE_PRECISION (type);
5511 unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1));
5512 if (lhs_prec == rhs_prec
5513 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
5514 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (rhs1))))
5516 gimple_match_op match_op (gimple_match_cond::UNCOND,
5517 NOP_EXPR, type, ops[0]);
5518 result = vn_nary_build_or_lookup (&match_op);
5519 if (result)
5521 bool changed = set_ssa_val_to (lhs, result);
5522 vn_nary_op_insert_stmt (stmt, result);
5523 return changed;
5526 else
5528 tree mask = wide_int_to_tree
5529 (type, wi::mask (rhs_prec, false, lhs_prec));
5530 gimple_match_op match_op (gimple_match_cond::UNCOND,
5531 BIT_AND_EXPR,
5532 TREE_TYPE (lhs),
5533 ops[0], mask);
5534 result = vn_nary_build_or_lookup (&match_op);
5535 if (result)
5537 bool changed = set_ssa_val_to (lhs, result);
5538 vn_nary_op_insert_stmt (stmt, result);
5539 return changed;
5546 break;
5547 case BIT_AND_EXPR:
5548 if (INTEGRAL_TYPE_P (type)
5549 && TREE_CODE (rhs1) == SSA_NAME
5550 && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST
5551 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)
5552 && default_vn_walk_kind != VN_NOWALK
5553 && CHAR_BIT == 8
5554 && BITS_PER_UNIT == 8
5555 && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
5556 && TYPE_PRECISION (type) <= vn_walk_cb_data::bufsize * BITS_PER_UNIT
5557 && !integer_all_onesp (gimple_assign_rhs2 (stmt))
5558 && !integer_zerop (gimple_assign_rhs2 (stmt)))
5560 gassign *ass = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
5561 if (ass
5562 && !gimple_has_volatile_ops (ass)
5563 && vn_get_stmt_kind (ass) == VN_REFERENCE)
5565 tree last_vuse = gimple_vuse (ass);
5566 tree op = gimple_assign_rhs1 (ass);
5567 tree result = vn_reference_lookup (op, gimple_vuse (ass),
5568 default_vn_walk_kind,
5569 NULL, true, &last_vuse,
5570 gimple_assign_rhs2 (stmt));
5571 if (result
5572 && useless_type_conversion_p (TREE_TYPE (result),
5573 TREE_TYPE (op)))
5574 return set_ssa_val_to (lhs, result);
5577 break;
5578 case TRUNC_DIV_EXPR:
5579 if (TYPE_UNSIGNED (type))
5580 break;
5581 /* Fallthru. */
5582 case RDIV_EXPR:
5583 case MULT_EXPR:
5584 /* Match up ([-]a){/,*}([-])b with v=a{/,*}b, replacing it with -v. */
5585 if (! HONOR_SIGN_DEPENDENT_ROUNDING (type))
5587 tree rhs[2];
5588 rhs[0] = rhs1;
5589 rhs[1] = gimple_assign_rhs2 (stmt);
5590 for (unsigned i = 0; i <= 1; ++i)
5592 unsigned j = i == 0 ? 1 : 0;
5593 tree ops[2];
5594 gimple_match_op match_op (gimple_match_cond::UNCOND,
5595 NEGATE_EXPR, type, rhs[i]);
5596 ops[i] = vn_nary_build_or_lookup_1 (&match_op, false, true);
5597 ops[j] = rhs[j];
5598 if (ops[i]
5599 && (ops[0] = vn_nary_op_lookup_pieces (2, code,
5600 type, ops, NULL)))
5602 gimple_match_op match_op (gimple_match_cond::UNCOND,
5603 NEGATE_EXPR, type, ops[0]);
5604 result = vn_nary_build_or_lookup_1 (&match_op, true, false);
5605 if (result)
5607 bool changed = set_ssa_val_to (lhs, result);
5608 vn_nary_op_insert_stmt (stmt, result);
5609 return changed;
5614 break;
5615 case LSHIFT_EXPR:
5616 /* For X << C, use the value number of X * (1 << C). */
5617 if (INTEGRAL_TYPE_P (type)
5618 && TYPE_OVERFLOW_WRAPS (type)
5619 && !TYPE_SATURATING (type))
5621 tree rhs2 = gimple_assign_rhs2 (stmt);
5622 if (TREE_CODE (rhs2) == INTEGER_CST
5623 && tree_fits_uhwi_p (rhs2)
5624 && tree_to_uhwi (rhs2) < TYPE_PRECISION (type))
5626 wide_int w = wi::set_bit_in_zero (tree_to_uhwi (rhs2),
5627 TYPE_PRECISION (type));
5628 gimple_match_op match_op (gimple_match_cond::UNCOND,
5629 MULT_EXPR, type, rhs1,
5630 wide_int_to_tree (type, w));
5631 result = vn_nary_build_or_lookup (&match_op);
5632 if (result)
5634 bool changed = set_ssa_val_to (lhs, result);
5635 if (TREE_CODE (result) == SSA_NAME)
5636 vn_nary_op_insert_stmt (stmt, result);
5637 return changed;
5641 break;
5642 default:
5643 break;
5646 bool changed = set_ssa_val_to (lhs, lhs);
5647 vn_nary_op_insert_stmt (stmt, lhs);
5648 return changed;
5651 /* Visit a call STMT storing into LHS. Return true if the value number
5652 of the LHS has changed as a result. */
5654 static bool
5655 visit_reference_op_call (tree lhs, gcall *stmt)
5657 bool changed = false;
5658 struct vn_reference_s vr1;
5659 vn_reference_t vnresult = NULL;
5660 tree vdef = gimple_vdef (stmt);
5661 modref_summary *summary;
5663 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
5664 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5665 lhs = NULL_TREE;
5667 vn_reference_lookup_call (stmt, &vnresult, &vr1);
5669 /* If the lookup did not succeed for pure functions try to use
5670 modref info to find a candidate to CSE to. */
5671 const unsigned accesses_limit = 8;
5672 if (!vnresult
5673 && !vdef
5674 && lhs
5675 && gimple_vuse (stmt)
5676 && (((summary = get_modref_function_summary (stmt, NULL))
5677 && !summary->global_memory_read
5678 && summary->load_accesses < accesses_limit)
5679 || gimple_call_flags (stmt) & ECF_CONST))
5681 /* First search if we can do someting useful and build a
5682 vector of all loads we have to check. */
5683 bool unknown_memory_access = false;
5684 auto_vec<ao_ref, accesses_limit> accesses;
5685 unsigned load_accesses = summary ? summary->load_accesses : 0;
5686 if (!unknown_memory_access)
5687 /* Add loads done as part of setting up the call arguments.
5688 That's also necessary for CONST functions which will
5689 not have a modref summary. */
5690 for (unsigned i = 0; i < gimple_call_num_args (stmt); ++i)
5692 tree arg = gimple_call_arg (stmt, i);
5693 if (TREE_CODE (arg) != SSA_NAME
5694 && !is_gimple_min_invariant (arg))
5696 if (accesses.length () >= accesses_limit - load_accesses)
5698 unknown_memory_access = true;
5699 break;
5701 accesses.quick_grow (accesses.length () + 1);
5702 ao_ref_init (&accesses.last (), arg);
5705 if (summary && !unknown_memory_access)
5707 /* Add loads as analyzed by IPA modref. */
5708 for (auto base_node : summary->loads->bases)
5709 if (unknown_memory_access)
5710 break;
5711 else for (auto ref_node : base_node->refs)
5712 if (unknown_memory_access)
5713 break;
5714 else for (auto access_node : ref_node->accesses)
5716 accesses.quick_grow (accesses.length () + 1);
5717 ao_ref *r = &accesses.last ();
5718 if (!access_node.get_ao_ref (stmt, r))
5720 /* Initialize a ref based on the argument and
5721 unknown offset if possible. */
5722 tree arg = access_node.get_call_arg (stmt);
5723 if (arg && TREE_CODE (arg) == SSA_NAME)
5724 arg = SSA_VAL (arg);
5725 if (arg
5726 && TREE_CODE (arg) == ADDR_EXPR
5727 && (arg = get_base_address (arg))
5728 && DECL_P (arg))
5730 ao_ref_init (r, arg);
5731 r->ref = NULL_TREE;
5732 r->base = arg;
5734 else
5736 unknown_memory_access = true;
5737 break;
5740 r->base_alias_set = base_node->base;
5741 r->ref_alias_set = ref_node->ref;
5745 /* Walk the VUSE->VDEF chain optimistically trying to find an entry
5746 for the call in the hashtable. */
5747 unsigned limit = (unknown_memory_access
5749 : (param_sccvn_max_alias_queries_per_access
5750 / (accesses.length () + 1)));
5751 tree saved_vuse = vr1.vuse;
5752 hashval_t saved_hashcode = vr1.hashcode;
5753 while (limit > 0 && !vnresult && !SSA_NAME_IS_DEFAULT_DEF (vr1.vuse))
5755 vr1.hashcode = vr1.hashcode - SSA_NAME_VERSION (vr1.vuse);
5756 gimple *def = SSA_NAME_DEF_STMT (vr1.vuse);
5757 /* ??? We could use fancy stuff like in walk_non_aliased_vuses, but
5758 do not bother for now. */
5759 if (is_a <gphi *> (def))
5760 break;
5761 vr1.vuse = vuse_ssa_val (gimple_vuse (def));
5762 vr1.hashcode = vr1.hashcode + SSA_NAME_VERSION (vr1.vuse);
5763 vn_reference_lookup_1 (&vr1, &vnresult);
5764 limit--;
5767 /* If we found a candidate to CSE to verify it is valid. */
5768 if (vnresult && !accesses.is_empty ())
5770 tree vuse = vuse_ssa_val (gimple_vuse (stmt));
5771 while (vnresult && vuse != vr1.vuse)
5773 gimple *def = SSA_NAME_DEF_STMT (vuse);
5774 for (auto &ref : accesses)
5776 /* ??? stmt_may_clobber_ref_p_1 does per stmt constant
5777 analysis overhead that we might be able to cache. */
5778 if (stmt_may_clobber_ref_p_1 (def, &ref, true))
5780 vnresult = NULL;
5781 break;
5784 vuse = vuse_ssa_val (gimple_vuse (def));
5787 vr1.vuse = saved_vuse;
5788 vr1.hashcode = saved_hashcode;
5791 if (vnresult)
5793 if (vdef)
5795 if (vnresult->result_vdef)
5796 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
5797 else if (!lhs && gimple_call_lhs (stmt))
5798 /* If stmt has non-SSA_NAME lhs, value number the vdef to itself,
5799 as the call still acts as a lhs store. */
5800 changed |= set_ssa_val_to (vdef, vdef);
5801 else
5802 /* If the call was discovered to be pure or const reflect
5803 that as far as possible. */
5804 changed |= set_ssa_val_to (vdef,
5805 vuse_ssa_val (gimple_vuse (stmt)));
5808 if (!vnresult->result && lhs)
5809 vnresult->result = lhs;
5811 if (vnresult->result && lhs)
5812 changed |= set_ssa_val_to (lhs, vnresult->result);
5814 else
5816 vn_reference_t vr2;
5817 vn_reference_s **slot;
5818 tree vdef_val = vdef;
5819 if (vdef)
5821 /* If we value numbered an indirect functions function to
5822 one not clobbering memory value number its VDEF to its
5823 VUSE. */
5824 tree fn = gimple_call_fn (stmt);
5825 if (fn && TREE_CODE (fn) == SSA_NAME)
5827 fn = SSA_VAL (fn);
5828 if (TREE_CODE (fn) == ADDR_EXPR
5829 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5830 && (flags_from_decl_or_type (TREE_OPERAND (fn, 0))
5831 & (ECF_CONST | ECF_PURE))
5832 /* If stmt has non-SSA_NAME lhs, value number the
5833 vdef to itself, as the call still acts as a lhs
5834 store. */
5835 && (lhs || gimple_call_lhs (stmt) == NULL_TREE))
5836 vdef_val = vuse_ssa_val (gimple_vuse (stmt));
5838 changed |= set_ssa_val_to (vdef, vdef_val);
5840 if (lhs)
5841 changed |= set_ssa_val_to (lhs, lhs);
5842 vr2 = XOBNEW (&vn_tables_obstack, vn_reference_s);
5843 vr2->vuse = vr1.vuse;
5844 /* As we are not walking the virtual operand chain we know the
5845 shared_lookup_references are still original so we can re-use
5846 them here. */
5847 vr2->operands = vr1.operands.copy ();
5848 vr2->type = vr1.type;
5849 vr2->punned = vr1.punned;
5850 vr2->set = vr1.set;
5851 vr2->base_set = vr1.base_set;
5852 vr2->hashcode = vr1.hashcode;
5853 vr2->result = lhs;
5854 vr2->result_vdef = vdef_val;
5855 vr2->value_id = 0;
5856 slot = valid_info->references->find_slot_with_hash (vr2, vr2->hashcode,
5857 INSERT);
5858 gcc_assert (!*slot);
5859 *slot = vr2;
5860 vr2->next = last_inserted_ref;
5861 last_inserted_ref = vr2;
5864 return changed;
5867 /* Visit a load from a reference operator RHS, part of STMT, value number it,
5868 and return true if the value number of the LHS has changed as a result. */
5870 static bool
5871 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
5873 bool changed = false;
5874 tree result;
5875 vn_reference_t res;
5877 tree vuse = gimple_vuse (stmt);
5878 tree last_vuse = vuse;
5879 result = vn_reference_lookup (op, vuse, default_vn_walk_kind, &res, true, &last_vuse);
5881 /* We handle type-punning through unions by value-numbering based
5882 on offset and size of the access. Be prepared to handle a
5883 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
5884 if (result
5885 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
5887 /* Avoid the type punning in case the result mode has padding where
5888 the op we lookup has not. */
5889 if (TYPE_MODE (TREE_TYPE (result)) != BLKmode
5890 && maybe_lt (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (result))),
5891 GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (op)))))
5892 result = NULL_TREE;
5893 else if (CONSTANT_CLASS_P (result))
5894 result = const_unop (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
5895 else
5897 /* We will be setting the value number of lhs to the value number
5898 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
5899 So first simplify and lookup this expression to see if it
5900 is already available. */
5901 gimple_match_op res_op (gimple_match_cond::UNCOND,
5902 VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
5903 result = vn_nary_build_or_lookup (&res_op);
5904 if (result
5905 && TREE_CODE (result) == SSA_NAME
5906 && VN_INFO (result)->needs_insertion)
5907 /* Track whether this is the canonical expression for different
5908 typed loads. We use that as a stopgap measure for code
5909 hoisting when dealing with floating point loads. */
5910 res->punned = true;
5913 /* When building the conversion fails avoid inserting the reference
5914 again. */
5915 if (!result)
5916 return set_ssa_val_to (lhs, lhs);
5919 if (result)
5920 changed = set_ssa_val_to (lhs, result);
5921 else
5923 changed = set_ssa_val_to (lhs, lhs);
5924 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
5925 if (vuse && SSA_VAL (last_vuse) != SSA_VAL (vuse))
5927 if (dump_file && (dump_flags & TDF_DETAILS))
5929 fprintf (dump_file, "Using extra use virtual operand ");
5930 print_generic_expr (dump_file, last_vuse);
5931 fprintf (dump_file, "\n");
5933 vn_reference_insert (op, lhs, vuse, NULL_TREE);
5937 return changed;
5941 /* Visit a store to a reference operator LHS, part of STMT, value number it,
5942 and return true if the value number of the LHS has changed as a result. */
5944 static bool
5945 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
5947 bool changed = false;
5948 vn_reference_t vnresult = NULL;
5949 tree assign;
5950 bool resultsame = false;
5951 tree vuse = gimple_vuse (stmt);
5952 tree vdef = gimple_vdef (stmt);
5954 if (TREE_CODE (op) == SSA_NAME)
5955 op = SSA_VAL (op);
5957 /* First we want to lookup using the *vuses* from the store and see
5958 if there the last store to this location with the same address
5959 had the same value.
5961 The vuses represent the memory state before the store. If the
5962 memory state, address, and value of the store is the same as the
5963 last store to this location, then this store will produce the
5964 same memory state as that store.
5966 In this case the vdef versions for this store are value numbered to those
5967 vuse versions, since they represent the same memory state after
5968 this store.
5970 Otherwise, the vdefs for the store are used when inserting into
5971 the table, since the store generates a new memory state. */
5973 vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
5974 if (vnresult
5975 && vnresult->result)
5977 tree result = vnresult->result;
5978 gcc_checking_assert (TREE_CODE (result) != SSA_NAME
5979 || result == SSA_VAL (result));
5980 resultsame = expressions_equal_p (result, op);
5981 if (resultsame)
5983 /* If the TBAA state isn't compatible for downstream reads
5984 we cannot value-number the VDEFs the same. */
5985 ao_ref lhs_ref;
5986 ao_ref_init (&lhs_ref, lhs);
5987 alias_set_type set = ao_ref_alias_set (&lhs_ref);
5988 alias_set_type base_set = ao_ref_base_alias_set (&lhs_ref);
5989 if ((vnresult->set != set
5990 && ! alias_set_subset_of (set, vnresult->set))
5991 || (vnresult->base_set != base_set
5992 && ! alias_set_subset_of (base_set, vnresult->base_set)))
5993 resultsame = false;
5997 if (!resultsame)
5999 if (dump_file && (dump_flags & TDF_DETAILS))
6001 fprintf (dump_file, "No store match\n");
6002 fprintf (dump_file, "Value numbering store ");
6003 print_generic_expr (dump_file, lhs);
6004 fprintf (dump_file, " to ");
6005 print_generic_expr (dump_file, op);
6006 fprintf (dump_file, "\n");
6008 /* Have to set value numbers before insert, since insert is
6009 going to valueize the references in-place. */
6010 if (vdef)
6011 changed |= set_ssa_val_to (vdef, vdef);
6013 /* Do not insert structure copies into the tables. */
6014 if (is_gimple_min_invariant (op)
6015 || is_gimple_reg (op))
6016 vn_reference_insert (lhs, op, vdef, NULL);
6018 /* Only perform the following when being called from PRE
6019 which embeds tail merging. */
6020 if (default_vn_walk_kind == VN_WALK)
6022 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
6023 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
6024 if (!vnresult)
6025 vn_reference_insert (assign, lhs, vuse, vdef);
6028 else
6030 /* We had a match, so value number the vdef to have the value
6031 number of the vuse it came from. */
6033 if (dump_file && (dump_flags & TDF_DETAILS))
6034 fprintf (dump_file, "Store matched earlier value, "
6035 "value numbering store vdefs to matching vuses.\n");
6037 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
6040 return changed;
6043 /* Visit and value number PHI, return true if the value number
6044 changed. When BACKEDGES_VARYING_P is true then assume all
6045 backedge values are varying. When INSERTED is not NULL then
6046 this is just a ahead query for a possible iteration, set INSERTED
6047 to true if we'd insert into the hashtable. */
6049 static bool
6050 visit_phi (gimple *phi, bool *inserted, bool backedges_varying_p)
6052 tree result, sameval = VN_TOP, seen_undef = NULL_TREE;
6053 bool seen_undef_visited = false;
6054 tree backedge_val = NULL_TREE;
6055 bool seen_non_backedge = false;
6056 tree sameval_base = NULL_TREE;
6057 poly_int64 soff, doff;
6058 unsigned n_executable = 0;
6059 edge_iterator ei;
6060 edge e, sameval_e = NULL;
6062 /* TODO: We could check for this in initialization, and replace this
6063 with a gcc_assert. */
6064 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
6065 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
6067 /* We track whether a PHI was CSEd to to avoid excessive iterations
6068 that would be necessary only because the PHI changed arguments
6069 but not value. */
6070 if (!inserted)
6071 gimple_set_plf (phi, GF_PLF_1, false);
6073 /* See if all non-TOP arguments have the same value. TOP is
6074 equivalent to everything, so we can ignore it. */
6075 basic_block bb = gimple_bb (phi);
6076 FOR_EACH_EDGE (e, ei, bb->preds)
6077 if (e->flags & EDGE_EXECUTABLE)
6079 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
6081 if (def == PHI_RESULT (phi))
6082 continue;
6083 ++n_executable;
6084 bool visited = true;
6085 if (TREE_CODE (def) == SSA_NAME)
6087 tree val = SSA_VAL (def, &visited);
6088 if (SSA_NAME_IS_DEFAULT_DEF (def))
6089 visited = true;
6090 if (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK))
6091 def = val;
6092 if (e->flags & EDGE_DFS_BACK)
6093 backedge_val = def;
6095 if (!(e->flags & EDGE_DFS_BACK))
6096 seen_non_backedge = true;
6097 if (def == VN_TOP)
6099 /* Ignore undefined defs for sameval but record one. */
6100 else if (TREE_CODE (def) == SSA_NAME
6101 && ! virtual_operand_p (def)
6102 && ssa_undefined_value_p (def, false))
6104 if (!seen_undef
6105 /* Avoid having not visited undefined defs if we also have
6106 a visited one. */
6107 || (!seen_undef_visited && visited))
6109 seen_undef = def;
6110 seen_undef_visited = visited;
6113 else if (sameval == VN_TOP)
6115 sameval = def;
6116 sameval_e = e;
6118 else if (expressions_equal_p (def, sameval))
6119 sameval_e = NULL;
6120 else if (virtual_operand_p (def))
6122 sameval = NULL_TREE;
6123 break;
6125 else
6127 /* We know we're arriving only with invariant addresses here,
6128 try harder comparing them. We can do some caching here
6129 which we cannot do in expressions_equal_p. */
6130 if (TREE_CODE (def) == ADDR_EXPR
6131 && TREE_CODE (sameval) == ADDR_EXPR
6132 && sameval_base != (void *)-1)
6134 if (!sameval_base)
6135 sameval_base = get_addr_base_and_unit_offset
6136 (TREE_OPERAND (sameval, 0), &soff);
6137 if (!sameval_base)
6138 sameval_base = (tree)(void *)-1;
6139 else if ((get_addr_base_and_unit_offset
6140 (TREE_OPERAND (def, 0), &doff) == sameval_base)
6141 && known_eq (soff, doff))
6142 continue;
6144 /* There's also the possibility to use equivalences. */
6145 if (!FLOAT_TYPE_P (TREE_TYPE (def))
6146 /* But only do this if we didn't force any of sameval or
6147 val to VARYING because of backedge processing rules. */
6148 && (TREE_CODE (sameval) != SSA_NAME
6149 || SSA_VAL (sameval) == sameval)
6150 && (TREE_CODE (def) != SSA_NAME || SSA_VAL (def) == def))
6152 vn_nary_op_t vnresult;
6153 tree ops[2];
6154 ops[0] = def;
6155 ops[1] = sameval;
6156 tree val = vn_nary_op_lookup_pieces (2, EQ_EXPR,
6157 boolean_type_node,
6158 ops, &vnresult);
6159 if (! val && vnresult && vnresult->predicated_values)
6161 val = vn_nary_op_get_predicated_value (vnresult, e);
6162 if (val && integer_truep (val)
6163 && !(sameval_e && (sameval_e->flags & EDGE_DFS_BACK)))
6165 if (dump_file && (dump_flags & TDF_DETAILS))
6167 fprintf (dump_file, "Predication says ");
6168 print_generic_expr (dump_file, def, TDF_NONE);
6169 fprintf (dump_file, " and ");
6170 print_generic_expr (dump_file, sameval, TDF_NONE);
6171 fprintf (dump_file, " are equal on edge %d -> %d\n",
6172 e->src->index, e->dest->index);
6174 continue;
6176 /* If on all previous edges the value was equal to def
6177 we can change sameval to def. */
6178 if (EDGE_COUNT (bb->preds) == 2
6179 && (val = vn_nary_op_get_predicated_value
6180 (vnresult, EDGE_PRED (bb, 0)))
6181 && integer_truep (val)
6182 && !(e->flags & EDGE_DFS_BACK))
6184 if (dump_file && (dump_flags & TDF_DETAILS))
6186 fprintf (dump_file, "Predication says ");
6187 print_generic_expr (dump_file, def, TDF_NONE);
6188 fprintf (dump_file, " and ");
6189 print_generic_expr (dump_file, sameval, TDF_NONE);
6190 fprintf (dump_file, " are equal on edge %d -> %d\n",
6191 EDGE_PRED (bb, 0)->src->index,
6192 EDGE_PRED (bb, 0)->dest->index);
6194 sameval = def;
6195 continue;
6199 sameval = NULL_TREE;
6200 break;
6204 /* If the value we want to use is flowing over the backedge and we
6205 should take it as VARYING but it has a non-VARYING value drop to
6206 VARYING.
6207 If we value-number a virtual operand never value-number to the
6208 value from the backedge as that confuses the alias-walking code.
6209 See gcc.dg/torture/pr87176.c. If the value is the same on a
6210 non-backedge everything is OK though. */
6211 bool visited_p;
6212 if ((backedge_val
6213 && !seen_non_backedge
6214 && TREE_CODE (backedge_val) == SSA_NAME
6215 && sameval == backedge_val
6216 && (SSA_NAME_IS_VIRTUAL_OPERAND (backedge_val)
6217 || SSA_VAL (backedge_val) != backedge_val))
6218 /* Do not value-number a virtual operand to sth not visited though
6219 given that allows us to escape a region in alias walking. */
6220 || (sameval
6221 && TREE_CODE (sameval) == SSA_NAME
6222 && !SSA_NAME_IS_DEFAULT_DEF (sameval)
6223 && SSA_NAME_IS_VIRTUAL_OPERAND (sameval)
6224 && (SSA_VAL (sameval, &visited_p), !visited_p)))
6225 /* Note this just drops to VARYING without inserting the PHI into
6226 the hashes. */
6227 result = PHI_RESULT (phi);
6228 /* If none of the edges was executable keep the value-number at VN_TOP,
6229 if only a single edge is exectuable use its value. */
6230 else if (n_executable <= 1)
6231 result = seen_undef ? seen_undef : sameval;
6232 /* If we saw only undefined values and VN_TOP use one of the
6233 undefined values. */
6234 else if (sameval == VN_TOP)
6235 result = (seen_undef && seen_undef_visited) ? seen_undef : sameval;
6236 /* First see if it is equivalent to a phi node in this block. We prefer
6237 this as it allows IV elimination - see PRs 66502 and 67167. */
6238 else if ((result = vn_phi_lookup (phi, backedges_varying_p)))
6240 if (!inserted
6241 && TREE_CODE (result) == SSA_NAME
6242 && gimple_code (SSA_NAME_DEF_STMT (result)) == GIMPLE_PHI)
6244 gimple_set_plf (SSA_NAME_DEF_STMT (result), GF_PLF_1, true);
6245 if (dump_file && (dump_flags & TDF_DETAILS))
6247 fprintf (dump_file, "Marking CSEd to PHI node ");
6248 print_gimple_expr (dump_file, SSA_NAME_DEF_STMT (result),
6249 0, TDF_SLIM);
6250 fprintf (dump_file, "\n");
6254 /* If all values are the same use that, unless we've seen undefined
6255 values as well and the value isn't constant.
6256 CCP/copyprop have the same restriction to not remove uninit warnings. */
6257 else if (sameval
6258 && (! seen_undef || is_gimple_min_invariant (sameval)))
6259 result = sameval;
6260 else
6262 result = PHI_RESULT (phi);
6263 /* Only insert PHIs that are varying, for constant value numbers
6264 we mess up equivalences otherwise as we are only comparing
6265 the immediate controlling predicates. */
6266 vn_phi_insert (phi, result, backedges_varying_p);
6267 if (inserted)
6268 *inserted = true;
6271 return set_ssa_val_to (PHI_RESULT (phi), result);
6274 /* Try to simplify RHS using equivalences and constant folding. */
6276 static tree
6277 try_to_simplify (gassign *stmt)
6279 enum tree_code code = gimple_assign_rhs_code (stmt);
6280 tree tem;
6282 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
6283 in this case, there is no point in doing extra work. */
6284 if (code == SSA_NAME)
6285 return NULL_TREE;
6287 /* First try constant folding based on our current lattice. */
6288 mprts_hook = vn_lookup_simplify_result;
6289 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
6290 mprts_hook = NULL;
6291 if (tem
6292 && (TREE_CODE (tem) == SSA_NAME
6293 || is_gimple_min_invariant (tem)))
6294 return tem;
6296 return NULL_TREE;
6299 /* Visit and value number STMT, return true if the value number
6300 changed. */
6302 static bool
6303 visit_stmt (gimple *stmt, bool backedges_varying_p = false)
6305 bool changed = false;
6307 if (dump_file && (dump_flags & TDF_DETAILS))
6309 fprintf (dump_file, "Value numbering stmt = ");
6310 print_gimple_stmt (dump_file, stmt, 0);
6313 if (gimple_code (stmt) == GIMPLE_PHI)
6314 changed = visit_phi (stmt, NULL, backedges_varying_p);
6315 else if (gimple_has_volatile_ops (stmt))
6316 changed = defs_to_varying (stmt);
6317 else if (gassign *ass = dyn_cast <gassign *> (stmt))
6319 enum tree_code code = gimple_assign_rhs_code (ass);
6320 tree lhs = gimple_assign_lhs (ass);
6321 tree rhs1 = gimple_assign_rhs1 (ass);
6322 tree simplified;
6324 /* Shortcut for copies. Simplifying copies is pointless,
6325 since we copy the expression and value they represent. */
6326 if (code == SSA_NAME
6327 && TREE_CODE (lhs) == SSA_NAME)
6329 changed = visit_copy (lhs, rhs1);
6330 goto done;
6332 simplified = try_to_simplify (ass);
6333 if (simplified)
6335 if (dump_file && (dump_flags & TDF_DETAILS))
6337 fprintf (dump_file, "RHS ");
6338 print_gimple_expr (dump_file, ass, 0);
6339 fprintf (dump_file, " simplified to ");
6340 print_generic_expr (dump_file, simplified);
6341 fprintf (dump_file, "\n");
6344 /* Setting value numbers to constants will occasionally
6345 screw up phi congruence because constants are not
6346 uniquely associated with a single ssa name that can be
6347 looked up. */
6348 if (simplified
6349 && is_gimple_min_invariant (simplified)
6350 && TREE_CODE (lhs) == SSA_NAME)
6352 changed = set_ssa_val_to (lhs, simplified);
6353 goto done;
6355 else if (simplified
6356 && TREE_CODE (simplified) == SSA_NAME
6357 && TREE_CODE (lhs) == SSA_NAME)
6359 changed = visit_copy (lhs, simplified);
6360 goto done;
6363 if ((TREE_CODE (lhs) == SSA_NAME
6364 /* We can substitute SSA_NAMEs that are live over
6365 abnormal edges with their constant value. */
6366 && !(gimple_assign_copy_p (ass)
6367 && is_gimple_min_invariant (rhs1))
6368 && !(simplified
6369 && is_gimple_min_invariant (simplified))
6370 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
6371 /* Stores or copies from SSA_NAMEs that are live over
6372 abnormal edges are a problem. */
6373 || (code == SSA_NAME
6374 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
6375 changed = defs_to_varying (ass);
6376 else if (REFERENCE_CLASS_P (lhs)
6377 || DECL_P (lhs))
6378 changed = visit_reference_op_store (lhs, rhs1, ass);
6379 else if (TREE_CODE (lhs) == SSA_NAME)
6381 if ((gimple_assign_copy_p (ass)
6382 && is_gimple_min_invariant (rhs1))
6383 || (simplified
6384 && is_gimple_min_invariant (simplified)))
6386 if (simplified)
6387 changed = set_ssa_val_to (lhs, simplified);
6388 else
6389 changed = set_ssa_val_to (lhs, rhs1);
6391 else
6393 /* Visit the original statement. */
6394 switch (vn_get_stmt_kind (ass))
6396 case VN_NARY:
6397 changed = visit_nary_op (lhs, ass);
6398 break;
6399 case VN_REFERENCE:
6400 changed = visit_reference_op_load (lhs, rhs1, ass);
6401 break;
6402 default:
6403 changed = defs_to_varying (ass);
6404 break;
6408 else
6409 changed = defs_to_varying (ass);
6411 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
6413 tree lhs = gimple_call_lhs (call_stmt);
6414 if (lhs && TREE_CODE (lhs) == SSA_NAME)
6416 /* Try constant folding based on our current lattice. */
6417 tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
6418 vn_valueize);
6419 if (simplified)
6421 if (dump_file && (dump_flags & TDF_DETAILS))
6423 fprintf (dump_file, "call ");
6424 print_gimple_expr (dump_file, call_stmt, 0);
6425 fprintf (dump_file, " simplified to ");
6426 print_generic_expr (dump_file, simplified);
6427 fprintf (dump_file, "\n");
6430 /* Setting value numbers to constants will occasionally
6431 screw up phi congruence because constants are not
6432 uniquely associated with a single ssa name that can be
6433 looked up. */
6434 if (simplified
6435 && is_gimple_min_invariant (simplified))
6437 changed = set_ssa_val_to (lhs, simplified);
6438 if (gimple_vdef (call_stmt))
6439 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
6440 SSA_VAL (gimple_vuse (call_stmt)));
6441 goto done;
6443 else if (simplified
6444 && TREE_CODE (simplified) == SSA_NAME)
6446 changed = visit_copy (lhs, simplified);
6447 if (gimple_vdef (call_stmt))
6448 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
6449 SSA_VAL (gimple_vuse (call_stmt)));
6450 goto done;
6452 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
6454 changed = defs_to_varying (call_stmt);
6455 goto done;
6459 /* Pick up flags from a devirtualization target. */
6460 tree fn = gimple_call_fn (stmt);
6461 int extra_fnflags = 0;
6462 if (fn && TREE_CODE (fn) == SSA_NAME)
6464 fn = SSA_VAL (fn);
6465 if (TREE_CODE (fn) == ADDR_EXPR
6466 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
6467 extra_fnflags = flags_from_decl_or_type (TREE_OPERAND (fn, 0));
6469 if ((/* Calls to the same function with the same vuse
6470 and the same operands do not necessarily return the same
6471 value, unless they're pure or const. */
6472 ((gimple_call_flags (call_stmt) | extra_fnflags)
6473 & (ECF_PURE | ECF_CONST))
6474 /* If calls have a vdef, subsequent calls won't have
6475 the same incoming vuse. So, if 2 calls with vdef have the
6476 same vuse, we know they're not subsequent.
6477 We can value number 2 calls to the same function with the
6478 same vuse and the same operands which are not subsequent
6479 the same, because there is no code in the program that can
6480 compare the 2 values... */
6481 || (gimple_vdef (call_stmt)
6482 /* ... unless the call returns a pointer which does
6483 not alias with anything else. In which case the
6484 information that the values are distinct are encoded
6485 in the IL. */
6486 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
6487 /* Only perform the following when being called from PRE
6488 which embeds tail merging. */
6489 && default_vn_walk_kind == VN_WALK))
6490 /* Do not process .DEFERRED_INIT since that confuses uninit
6491 analysis. */
6492 && !gimple_call_internal_p (call_stmt, IFN_DEFERRED_INIT))
6493 changed = visit_reference_op_call (lhs, call_stmt);
6494 else
6495 changed = defs_to_varying (call_stmt);
6497 else
6498 changed = defs_to_varying (stmt);
6499 done:
6500 return changed;
6504 /* Allocate a value number table. */
6506 static void
6507 allocate_vn_table (vn_tables_t table, unsigned size)
6509 table->phis = new vn_phi_table_type (size);
6510 table->nary = new vn_nary_op_table_type (size);
6511 table->references = new vn_reference_table_type (size);
6514 /* Free a value number table. */
6516 static void
6517 free_vn_table (vn_tables_t table)
6519 /* Walk over elements and release vectors. */
6520 vn_reference_iterator_type hir;
6521 vn_reference_t vr;
6522 FOR_EACH_HASH_TABLE_ELEMENT (*table->references, vr, vn_reference_t, hir)
6523 vr->operands.release ();
6524 delete table->phis;
6525 table->phis = NULL;
6526 delete table->nary;
6527 table->nary = NULL;
6528 delete table->references;
6529 table->references = NULL;
6532 /* Set *ID according to RESULT. */
6534 static void
6535 set_value_id_for_result (tree result, unsigned int *id)
6537 if (result && TREE_CODE (result) == SSA_NAME)
6538 *id = VN_INFO (result)->value_id;
6539 else if (result && is_gimple_min_invariant (result))
6540 *id = get_or_alloc_constant_value_id (result);
6541 else
6542 *id = get_next_value_id ();
6545 /* Set the value ids in the valid hash tables. */
6547 static void
6548 set_hashtable_value_ids (void)
6550 vn_nary_op_iterator_type hin;
6551 vn_phi_iterator_type hip;
6552 vn_reference_iterator_type hir;
6553 vn_nary_op_t vno;
6554 vn_reference_t vr;
6555 vn_phi_t vp;
6557 /* Now set the value ids of the things we had put in the hash
6558 table. */
6560 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
6561 if (! vno->predicated_values)
6562 set_value_id_for_result (vno->u.result, &vno->value_id);
6564 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
6565 set_value_id_for_result (vp->result, &vp->value_id);
6567 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
6568 hir)
6569 set_value_id_for_result (vr->result, &vr->value_id);
6572 /* Return the maximum value id we have ever seen. */
6574 unsigned int
6575 get_max_value_id (void)
6577 return next_value_id;
6580 /* Return the maximum constant value id we have ever seen. */
6582 unsigned int
6583 get_max_constant_value_id (void)
6585 return -next_constant_value_id;
6588 /* Return the next unique value id. */
6590 unsigned int
6591 get_next_value_id (void)
6593 gcc_checking_assert ((int)next_value_id > 0);
6594 return next_value_id++;
6597 /* Return the next unique value id for constants. */
6599 unsigned int
6600 get_next_constant_value_id (void)
6602 gcc_checking_assert (next_constant_value_id < 0);
6603 return next_constant_value_id--;
6607 /* Compare two expressions E1 and E2 and return true if they are equal.
6608 If match_vn_top_optimistically is true then VN_TOP is equal to anything,
6609 otherwise VN_TOP only matches VN_TOP. */
6611 bool
6612 expressions_equal_p (tree e1, tree e2, bool match_vn_top_optimistically)
6614 /* The obvious case. */
6615 if (e1 == e2)
6616 return true;
6618 /* If either one is VN_TOP consider them equal. */
6619 if (match_vn_top_optimistically
6620 && (e1 == VN_TOP || e2 == VN_TOP))
6621 return true;
6623 /* If only one of them is null, they cannot be equal. While in general
6624 this should not happen for operations like TARGET_MEM_REF some
6625 operands are optional and an identity value we could substitute
6626 has differing semantics. */
6627 if (!e1 || !e2)
6628 return false;
6630 /* SSA_NAME compare pointer equal. */
6631 if (TREE_CODE (e1) == SSA_NAME || TREE_CODE (e2) == SSA_NAME)
6632 return false;
6634 /* Now perform the actual comparison. */
6635 if (TREE_CODE (e1) == TREE_CODE (e2)
6636 && operand_equal_p (e1, e2, OEP_PURE_SAME))
6637 return true;
6639 return false;
6643 /* Return true if the nary operation NARY may trap. This is a copy
6644 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
6646 bool
6647 vn_nary_may_trap (vn_nary_op_t nary)
6649 tree type;
6650 tree rhs2 = NULL_TREE;
6651 bool honor_nans = false;
6652 bool honor_snans = false;
6653 bool fp_operation = false;
6654 bool honor_trapv = false;
6655 bool handled, ret;
6656 unsigned i;
6658 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
6659 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
6660 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
6662 type = nary->type;
6663 fp_operation = FLOAT_TYPE_P (type);
6664 if (fp_operation)
6666 honor_nans = flag_trapping_math && !flag_finite_math_only;
6667 honor_snans = flag_signaling_nans != 0;
6669 else if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_TRAPS (type))
6670 honor_trapv = true;
6672 if (nary->length >= 2)
6673 rhs2 = nary->op[1];
6674 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
6675 honor_trapv, honor_nans, honor_snans,
6676 rhs2, &handled);
6677 if (handled && ret)
6678 return true;
6680 for (i = 0; i < nary->length; ++i)
6681 if (tree_could_trap_p (nary->op[i]))
6682 return true;
6684 return false;
6687 /* Return true if the reference operation REF may trap. */
6689 bool
6690 vn_reference_may_trap (vn_reference_t ref)
6692 switch (ref->operands[0].opcode)
6694 case MODIFY_EXPR:
6695 case CALL_EXPR:
6696 /* We do not handle calls. */
6697 return true;
6698 case ADDR_EXPR:
6699 /* And toplevel address computations never trap. */
6700 return false;
6701 default:;
6704 vn_reference_op_t op;
6705 unsigned i;
6706 FOR_EACH_VEC_ELT (ref->operands, i, op)
6708 switch (op->opcode)
6710 case WITH_SIZE_EXPR:
6711 case TARGET_MEM_REF:
6712 /* Always variable. */
6713 return true;
6714 case COMPONENT_REF:
6715 if (op->op1 && TREE_CODE (op->op1) == SSA_NAME)
6716 return true;
6717 break;
6718 case ARRAY_RANGE_REF:
6719 if (TREE_CODE (op->op0) == SSA_NAME)
6720 return true;
6721 break;
6722 case ARRAY_REF:
6724 if (TREE_CODE (op->op0) != INTEGER_CST)
6725 return true;
6727 /* !in_array_bounds */
6728 tree domain_type = TYPE_DOMAIN (ref->operands[i+1].type);
6729 if (!domain_type)
6730 return true;
6732 tree min = op->op1;
6733 tree max = TYPE_MAX_VALUE (domain_type);
6734 if (!min
6735 || !max
6736 || TREE_CODE (min) != INTEGER_CST
6737 || TREE_CODE (max) != INTEGER_CST)
6738 return true;
6740 if (tree_int_cst_lt (op->op0, min)
6741 || tree_int_cst_lt (max, op->op0))
6742 return true;
6744 break;
6746 case MEM_REF:
6747 /* Nothing interesting in itself, the base is separate. */
6748 break;
6749 /* The following are the address bases. */
6750 case SSA_NAME:
6751 return true;
6752 case ADDR_EXPR:
6753 if (op->op0)
6754 return tree_could_trap_p (TREE_OPERAND (op->op0, 0));
6755 return false;
6756 default:;
6759 return false;
6762 eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction,
6763 bitmap inserted_exprs_)
6764 : dom_walker (direction), do_pre (inserted_exprs_ != NULL),
6765 el_todo (0), eliminations (0), insertions (0),
6766 inserted_exprs (inserted_exprs_)
6768 need_eh_cleanup = BITMAP_ALLOC (NULL);
6769 need_ab_cleanup = BITMAP_ALLOC (NULL);
6772 eliminate_dom_walker::~eliminate_dom_walker ()
6774 BITMAP_FREE (need_eh_cleanup);
6775 BITMAP_FREE (need_ab_cleanup);
6778 /* Return a leader for OP that is available at the current point of the
6779 eliminate domwalk. */
6781 tree
6782 eliminate_dom_walker::eliminate_avail (basic_block, tree op)
6784 tree valnum = VN_INFO (op)->valnum;
6785 if (TREE_CODE (valnum) == SSA_NAME)
6787 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
6788 return valnum;
6789 if (avail.length () > SSA_NAME_VERSION (valnum))
6791 tree av = avail[SSA_NAME_VERSION (valnum)];
6792 /* When PRE discovers a new redundancy there's no way to unite
6793 the value classes so it instead inserts a copy old-val = new-val.
6794 Look through such copies here, providing one more level of
6795 simplification at elimination time. */
6796 gassign *ass;
6797 if (av && (ass = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (av))))
6798 if (gimple_assign_rhs_class (ass) == GIMPLE_SINGLE_RHS)
6800 tree rhs1 = gimple_assign_rhs1 (ass);
6801 if (CONSTANT_CLASS_P (rhs1)
6802 || (TREE_CODE (rhs1) == SSA_NAME
6803 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
6804 av = rhs1;
6806 return av;
6809 else if (is_gimple_min_invariant (valnum))
6810 return valnum;
6811 return NULL_TREE;
6814 /* At the current point of the eliminate domwalk make OP available. */
6816 void
6817 eliminate_dom_walker::eliminate_push_avail (basic_block, tree op)
6819 tree valnum = VN_INFO (op)->valnum;
6820 if (TREE_CODE (valnum) == SSA_NAME)
6822 if (avail.length () <= SSA_NAME_VERSION (valnum))
6823 avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1, true);
6824 tree pushop = op;
6825 if (avail[SSA_NAME_VERSION (valnum)])
6826 pushop = avail[SSA_NAME_VERSION (valnum)];
6827 avail_stack.safe_push (pushop);
6828 avail[SSA_NAME_VERSION (valnum)] = op;
6832 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
6833 the leader for the expression if insertion was successful. */
6835 tree
6836 eliminate_dom_walker::eliminate_insert (basic_block bb,
6837 gimple_stmt_iterator *gsi, tree val)
6839 /* We can insert a sequence with a single assignment only. */
6840 gimple_seq stmts = VN_INFO (val)->expr;
6841 if (!gimple_seq_singleton_p (stmts))
6842 return NULL_TREE;
6843 gassign *stmt = dyn_cast <gassign *> (gimple_seq_first_stmt (stmts));
6844 if (!stmt
6845 || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
6846 && gimple_assign_rhs_code (stmt) != VIEW_CONVERT_EXPR
6847 && gimple_assign_rhs_code (stmt) != NEGATE_EXPR
6848 && gimple_assign_rhs_code (stmt) != BIT_FIELD_REF
6849 && (gimple_assign_rhs_code (stmt) != BIT_AND_EXPR
6850 || TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)))
6851 return NULL_TREE;
6853 tree op = gimple_assign_rhs1 (stmt);
6854 if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR
6855 || gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
6856 op = TREE_OPERAND (op, 0);
6857 tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (bb, op) : op;
6858 if (!leader)
6859 return NULL_TREE;
6861 tree res;
6862 stmts = NULL;
6863 if (gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
6864 res = gimple_build (&stmts, BIT_FIELD_REF,
6865 TREE_TYPE (val), leader,
6866 TREE_OPERAND (gimple_assign_rhs1 (stmt), 1),
6867 TREE_OPERAND (gimple_assign_rhs1 (stmt), 2));
6868 else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
6869 res = gimple_build (&stmts, BIT_AND_EXPR,
6870 TREE_TYPE (val), leader, gimple_assign_rhs2 (stmt));
6871 else
6872 res = gimple_build (&stmts, gimple_assign_rhs_code (stmt),
6873 TREE_TYPE (val), leader);
6874 if (TREE_CODE (res) != SSA_NAME
6875 || SSA_NAME_IS_DEFAULT_DEF (res)
6876 || gimple_bb (SSA_NAME_DEF_STMT (res)))
6878 gimple_seq_discard (stmts);
6880 /* During propagation we have to treat SSA info conservatively
6881 and thus we can end up simplifying the inserted expression
6882 at elimination time to sth not defined in stmts. */
6883 /* But then this is a redundancy we failed to detect. Which means
6884 res now has two values. That doesn't play well with how
6885 we track availability here, so give up. */
6886 if (dump_file && (dump_flags & TDF_DETAILS))
6888 if (TREE_CODE (res) == SSA_NAME)
6889 res = eliminate_avail (bb, res);
6890 if (res)
6892 fprintf (dump_file, "Failed to insert expression for value ");
6893 print_generic_expr (dump_file, val);
6894 fprintf (dump_file, " which is really fully redundant to ");
6895 print_generic_expr (dump_file, res);
6896 fprintf (dump_file, "\n");
6900 return NULL_TREE;
6902 else
6904 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
6905 vn_ssa_aux_t vn_info = VN_INFO (res);
6906 vn_info->valnum = val;
6907 vn_info->visited = true;
6910 insertions++;
6911 if (dump_file && (dump_flags & TDF_DETAILS))
6913 fprintf (dump_file, "Inserted ");
6914 print_gimple_stmt (dump_file, SSA_NAME_DEF_STMT (res), 0);
6917 return res;
6920 void
6921 eliminate_dom_walker::eliminate_stmt (basic_block b, gimple_stmt_iterator *gsi)
6923 tree sprime = NULL_TREE;
6924 gimple *stmt = gsi_stmt (*gsi);
6925 tree lhs = gimple_get_lhs (stmt);
6926 if (lhs && TREE_CODE (lhs) == SSA_NAME
6927 && !gimple_has_volatile_ops (stmt)
6928 /* See PR43491. Do not replace a global register variable when
6929 it is a the RHS of an assignment. Do replace local register
6930 variables since gcc does not guarantee a local variable will
6931 be allocated in register.
6932 ??? The fix isn't effective here. This should instead
6933 be ensured by not value-numbering them the same but treating
6934 them like volatiles? */
6935 && !(gimple_assign_single_p (stmt)
6936 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
6937 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))
6938 && is_global_var (gimple_assign_rhs1 (stmt)))))
6940 sprime = eliminate_avail (b, lhs);
6941 if (!sprime)
6943 /* If there is no existing usable leader but SCCVN thinks
6944 it has an expression it wants to use as replacement,
6945 insert that. */
6946 tree val = VN_INFO (lhs)->valnum;
6947 vn_ssa_aux_t vn_info;
6948 if (val != VN_TOP
6949 && TREE_CODE (val) == SSA_NAME
6950 && (vn_info = VN_INFO (val), true)
6951 && vn_info->needs_insertion
6952 && vn_info->expr != NULL
6953 && (sprime = eliminate_insert (b, gsi, val)) != NULL_TREE)
6954 eliminate_push_avail (b, sprime);
6957 /* If this now constitutes a copy duplicate points-to
6958 and range info appropriately. This is especially
6959 important for inserted code. See tree-ssa-copy.cc
6960 for similar code. */
6961 if (sprime
6962 && TREE_CODE (sprime) == SSA_NAME)
6964 basic_block sprime_b = gimple_bb (SSA_NAME_DEF_STMT (sprime));
6965 if (POINTER_TYPE_P (TREE_TYPE (lhs))
6966 && SSA_NAME_PTR_INFO (lhs)
6967 && ! SSA_NAME_PTR_INFO (sprime))
6969 duplicate_ssa_name_ptr_info (sprime,
6970 SSA_NAME_PTR_INFO (lhs));
6971 if (b != sprime_b)
6972 reset_flow_sensitive_info (sprime);
6974 else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
6975 && SSA_NAME_RANGE_INFO (lhs)
6976 && ! SSA_NAME_RANGE_INFO (sprime)
6977 && b == sprime_b)
6978 duplicate_ssa_name_range_info (sprime, lhs);
6981 /* Inhibit the use of an inserted PHI on a loop header when
6982 the address of the memory reference is a simple induction
6983 variable. In other cases the vectorizer won't do anything
6984 anyway (either it's loop invariant or a complicated
6985 expression). */
6986 if (sprime
6987 && TREE_CODE (sprime) == SSA_NAME
6988 && do_pre
6989 && (flag_tree_loop_vectorize || flag_tree_parallelize_loops > 1)
6990 && loop_outer (b->loop_father)
6991 && has_zero_uses (sprime)
6992 && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
6993 && gimple_assign_load_p (stmt))
6995 gimple *def_stmt = SSA_NAME_DEF_STMT (sprime);
6996 basic_block def_bb = gimple_bb (def_stmt);
6997 if (gimple_code (def_stmt) == GIMPLE_PHI
6998 && def_bb->loop_father->header == def_bb)
7000 loop_p loop = def_bb->loop_father;
7001 ssa_op_iter iter;
7002 tree op;
7003 bool found = false;
7004 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
7006 affine_iv iv;
7007 def_bb = gimple_bb (SSA_NAME_DEF_STMT (op));
7008 if (def_bb
7009 && flow_bb_inside_loop_p (loop, def_bb)
7010 && simple_iv (loop, loop, op, &iv, true))
7012 found = true;
7013 break;
7016 if (found)
7018 if (dump_file && (dump_flags & TDF_DETAILS))
7020 fprintf (dump_file, "Not replacing ");
7021 print_gimple_expr (dump_file, stmt, 0);
7022 fprintf (dump_file, " with ");
7023 print_generic_expr (dump_file, sprime);
7024 fprintf (dump_file, " which would add a loop"
7025 " carried dependence to loop %d\n",
7026 loop->num);
7028 /* Don't keep sprime available. */
7029 sprime = NULL_TREE;
7034 if (sprime)
7036 /* If we can propagate the value computed for LHS into
7037 all uses don't bother doing anything with this stmt. */
7038 if (may_propagate_copy (lhs, sprime))
7040 /* Mark it for removal. */
7041 to_remove.safe_push (stmt);
7043 /* ??? Don't count copy/constant propagations. */
7044 if (gimple_assign_single_p (stmt)
7045 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
7046 || gimple_assign_rhs1 (stmt) == sprime))
7047 return;
7049 if (dump_file && (dump_flags & TDF_DETAILS))
7051 fprintf (dump_file, "Replaced ");
7052 print_gimple_expr (dump_file, stmt, 0);
7053 fprintf (dump_file, " with ");
7054 print_generic_expr (dump_file, sprime);
7055 fprintf (dump_file, " in all uses of ");
7056 print_gimple_stmt (dump_file, stmt, 0);
7059 eliminations++;
7060 return;
7063 /* If this is an assignment from our leader (which
7064 happens in the case the value-number is a constant)
7065 then there is nothing to do. Likewise if we run into
7066 inserted code that needed a conversion because of
7067 our type-agnostic value-numbering of loads. */
7068 if ((gimple_assign_single_p (stmt)
7069 || (is_gimple_assign (stmt)
7070 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
7071 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)))
7072 && sprime == gimple_assign_rhs1 (stmt))
7073 return;
7075 /* Else replace its RHS. */
7076 if (dump_file && (dump_flags & TDF_DETAILS))
7078 fprintf (dump_file, "Replaced ");
7079 print_gimple_expr (dump_file, stmt, 0);
7080 fprintf (dump_file, " with ");
7081 print_generic_expr (dump_file, sprime);
7082 fprintf (dump_file, " in ");
7083 print_gimple_stmt (dump_file, stmt, 0);
7085 eliminations++;
7087 bool can_make_abnormal_goto = (is_gimple_call (stmt)
7088 && stmt_can_make_abnormal_goto (stmt));
7089 gimple *orig_stmt = stmt;
7090 if (!useless_type_conversion_p (TREE_TYPE (lhs),
7091 TREE_TYPE (sprime)))
7093 /* We preserve conversions to but not from function or method
7094 types. This asymmetry makes it necessary to re-instantiate
7095 conversions here. */
7096 if (POINTER_TYPE_P (TREE_TYPE (lhs))
7097 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (lhs))))
7098 sprime = fold_convert (TREE_TYPE (lhs), sprime);
7099 else
7100 gcc_unreachable ();
7102 tree vdef = gimple_vdef (stmt);
7103 tree vuse = gimple_vuse (stmt);
7104 propagate_tree_value_into_stmt (gsi, sprime);
7105 stmt = gsi_stmt (*gsi);
7106 update_stmt (stmt);
7107 /* In case the VDEF on the original stmt was released, value-number
7108 it to the VUSE. This is to make vuse_ssa_val able to skip
7109 released virtual operands. */
7110 if (vdef != gimple_vdef (stmt))
7112 gcc_assert (SSA_NAME_IN_FREE_LIST (vdef));
7113 VN_INFO (vdef)->valnum = vuse;
7116 /* If we removed EH side-effects from the statement, clean
7117 its EH information. */
7118 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
7120 bitmap_set_bit (need_eh_cleanup,
7121 gimple_bb (stmt)->index);
7122 if (dump_file && (dump_flags & TDF_DETAILS))
7123 fprintf (dump_file, " Removed EH side-effects.\n");
7126 /* Likewise for AB side-effects. */
7127 if (can_make_abnormal_goto
7128 && !stmt_can_make_abnormal_goto (stmt))
7130 bitmap_set_bit (need_ab_cleanup,
7131 gimple_bb (stmt)->index);
7132 if (dump_file && (dump_flags & TDF_DETAILS))
7133 fprintf (dump_file, " Removed AB side-effects.\n");
7136 return;
7140 /* If the statement is a scalar store, see if the expression
7141 has the same value number as its rhs. If so, the store is
7142 dead. */
7143 if (gimple_assign_single_p (stmt)
7144 && !gimple_has_volatile_ops (stmt)
7145 && !is_gimple_reg (gimple_assign_lhs (stmt))
7146 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
7147 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
7149 tree rhs = gimple_assign_rhs1 (stmt);
7150 vn_reference_t vnresult;
7151 /* ??? gcc.dg/torture/pr91445.c shows that we lookup a boolean
7152 typed load of a byte known to be 0x11 as 1 so a store of
7153 a boolean 1 is detected as redundant. Because of this we
7154 have to make sure to lookup with a ref where its size
7155 matches the precision. */
7156 tree lookup_lhs = lhs;
7157 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
7158 && (TREE_CODE (lhs) != COMPONENT_REF
7159 || !DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs, 1)))
7160 && !type_has_mode_precision_p (TREE_TYPE (lhs)))
7162 if (TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
7163 && TYPE_PRECISION (TREE_TYPE (lhs)) > MAX_FIXED_MODE_SIZE)
7164 lookup_lhs = NULL_TREE;
7165 else if (TREE_CODE (lhs) == COMPONENT_REF
7166 || TREE_CODE (lhs) == MEM_REF)
7168 tree ltype = build_nonstandard_integer_type
7169 (TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (lhs))),
7170 TYPE_UNSIGNED (TREE_TYPE (lhs)));
7171 if (TREE_CODE (lhs) == COMPONENT_REF)
7173 tree foff = component_ref_field_offset (lhs);
7174 tree f = TREE_OPERAND (lhs, 1);
7175 if (!poly_int_tree_p (foff))
7176 lookup_lhs = NULL_TREE;
7177 else
7178 lookup_lhs = build3 (BIT_FIELD_REF, ltype,
7179 TREE_OPERAND (lhs, 0),
7180 TYPE_SIZE (TREE_TYPE (lhs)),
7181 bit_from_pos
7182 (foff, DECL_FIELD_BIT_OFFSET (f)));
7184 else
7185 lookup_lhs = build2 (MEM_REF, ltype,
7186 TREE_OPERAND (lhs, 0),
7187 TREE_OPERAND (lhs, 1));
7189 else
7190 lookup_lhs = NULL_TREE;
7192 tree val = NULL_TREE;
7193 if (lookup_lhs)
7194 val = vn_reference_lookup (lookup_lhs, gimple_vuse (stmt),
7195 VN_WALKREWRITE, &vnresult, false,
7196 NULL, NULL_TREE, true);
7197 if (TREE_CODE (rhs) == SSA_NAME)
7198 rhs = VN_INFO (rhs)->valnum;
7199 if (val
7200 && (operand_equal_p (val, rhs, 0)
7201 /* Due to the bitfield lookups above we can get bit
7202 interpretations of the same RHS as values here. Those
7203 are redundant as well. */
7204 || (TREE_CODE (val) == SSA_NAME
7205 && gimple_assign_single_p (SSA_NAME_DEF_STMT (val))
7206 && (val = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (val)))
7207 && TREE_CODE (val) == VIEW_CONVERT_EXPR
7208 && TREE_OPERAND (val, 0) == rhs)))
7210 /* We can only remove the later store if the former aliases
7211 at least all accesses the later one does or if the store
7212 was to readonly memory storing the same value. */
7213 ao_ref lhs_ref;
7214 ao_ref_init (&lhs_ref, lhs);
7215 alias_set_type set = ao_ref_alias_set (&lhs_ref);
7216 alias_set_type base_set = ao_ref_base_alias_set (&lhs_ref);
7217 if (! vnresult
7218 || ((vnresult->set == set
7219 || alias_set_subset_of (set, vnresult->set))
7220 && (vnresult->base_set == base_set
7221 || alias_set_subset_of (base_set, vnresult->base_set))))
7223 if (dump_file && (dump_flags & TDF_DETAILS))
7225 fprintf (dump_file, "Deleted redundant store ");
7226 print_gimple_stmt (dump_file, stmt, 0);
7229 /* Queue stmt for removal. */
7230 to_remove.safe_push (stmt);
7231 return;
7236 /* If this is a control statement value numbering left edges
7237 unexecuted on force the condition in a way consistent with
7238 that. */
7239 if (gcond *cond = dyn_cast <gcond *> (stmt))
7241 if ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE)
7242 ^ (EDGE_SUCC (b, 1)->flags & EDGE_EXECUTABLE))
7244 if (dump_file && (dump_flags & TDF_DETAILS))
7246 fprintf (dump_file, "Removing unexecutable edge from ");
7247 print_gimple_stmt (dump_file, stmt, 0);
7249 if (((EDGE_SUCC (b, 0)->flags & EDGE_TRUE_VALUE) != 0)
7250 == ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE) != 0))
7251 gimple_cond_make_true (cond);
7252 else
7253 gimple_cond_make_false (cond);
7254 update_stmt (cond);
7255 el_todo |= TODO_cleanup_cfg;
7256 return;
7260 bool can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
7261 bool was_noreturn = (is_gimple_call (stmt)
7262 && gimple_call_noreturn_p (stmt));
7263 tree vdef = gimple_vdef (stmt);
7264 tree vuse = gimple_vuse (stmt);
7266 /* If we didn't replace the whole stmt (or propagate the result
7267 into all uses), replace all uses on this stmt with their
7268 leaders. */
7269 bool modified = false;
7270 use_operand_p use_p;
7271 ssa_op_iter iter;
7272 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
7274 tree use = USE_FROM_PTR (use_p);
7275 /* ??? The call code above leaves stmt operands un-updated. */
7276 if (TREE_CODE (use) != SSA_NAME)
7277 continue;
7278 tree sprime;
7279 if (SSA_NAME_IS_DEFAULT_DEF (use))
7280 /* ??? For default defs BB shouldn't matter, but we have to
7281 solve the inconsistency between rpo eliminate and
7282 dom eliminate avail valueization first. */
7283 sprime = eliminate_avail (b, use);
7284 else
7285 /* Look for sth available at the definition block of the argument.
7286 This avoids inconsistencies between availability there which
7287 decides if the stmt can be removed and availability at the
7288 use site. The SSA property ensures that things available
7289 at the definition are also available at uses. */
7290 sprime = eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (use)), use);
7291 if (sprime && sprime != use
7292 && may_propagate_copy (use, sprime, true)
7293 /* We substitute into debug stmts to avoid excessive
7294 debug temporaries created by removed stmts, but we need
7295 to avoid doing so for inserted sprimes as we never want
7296 to create debug temporaries for them. */
7297 && (!inserted_exprs
7298 || TREE_CODE (sprime) != SSA_NAME
7299 || !is_gimple_debug (stmt)
7300 || !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))))
7302 propagate_value (use_p, sprime);
7303 modified = true;
7307 /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
7308 into which is a requirement for the IPA devirt machinery. */
7309 gimple *old_stmt = stmt;
7310 if (modified)
7312 /* If a formerly non-invariant ADDR_EXPR is turned into an
7313 invariant one it was on a separate stmt. */
7314 if (gimple_assign_single_p (stmt)
7315 && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
7316 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
7317 gimple_stmt_iterator prev = *gsi;
7318 gsi_prev (&prev);
7319 if (fold_stmt (gsi, follow_all_ssa_edges))
7321 /* fold_stmt may have created new stmts inbetween
7322 the previous stmt and the folded stmt. Mark
7323 all defs created there as varying to not confuse
7324 the SCCVN machinery as we're using that even during
7325 elimination. */
7326 if (gsi_end_p (prev))
7327 prev = gsi_start_bb (b);
7328 else
7329 gsi_next (&prev);
7330 if (gsi_stmt (prev) != gsi_stmt (*gsi))
7333 tree def;
7334 ssa_op_iter dit;
7335 FOR_EACH_SSA_TREE_OPERAND (def, gsi_stmt (prev),
7336 dit, SSA_OP_ALL_DEFS)
7337 /* As existing DEFs may move between stmts
7338 only process new ones. */
7339 if (! has_VN_INFO (def))
7341 vn_ssa_aux_t vn_info = VN_INFO (def);
7342 vn_info->valnum = def;
7343 vn_info->visited = true;
7345 if (gsi_stmt (prev) == gsi_stmt (*gsi))
7346 break;
7347 gsi_next (&prev);
7349 while (1);
7351 stmt = gsi_stmt (*gsi);
7352 /* In case we folded the stmt away schedule the NOP for removal. */
7353 if (gimple_nop_p (stmt))
7354 to_remove.safe_push (stmt);
7357 /* Visit indirect calls and turn them into direct calls if
7358 possible using the devirtualization machinery. Do this before
7359 checking for required EH/abnormal/noreturn cleanup as devird
7360 may expose more of those. */
7361 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
7363 tree fn = gimple_call_fn (call_stmt);
7364 if (fn
7365 && flag_devirtualize
7366 && virtual_method_call_p (fn))
7368 tree otr_type = obj_type_ref_class (fn);
7369 unsigned HOST_WIDE_INT otr_tok
7370 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn));
7371 tree instance;
7372 ipa_polymorphic_call_context context (current_function_decl,
7373 fn, stmt, &instance);
7374 context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn),
7375 otr_type, stmt, NULL);
7376 bool final;
7377 vec <cgraph_node *> targets
7378 = possible_polymorphic_call_targets (obj_type_ref_class (fn),
7379 otr_tok, context, &final);
7380 if (dump_file)
7381 dump_possible_polymorphic_call_targets (dump_file,
7382 obj_type_ref_class (fn),
7383 otr_tok, context);
7384 if (final && targets.length () <= 1 && dbg_cnt (devirt))
7386 tree fn;
7387 if (targets.length () == 1)
7388 fn = targets[0]->decl;
7389 else
7390 fn = builtin_decl_unreachable ();
7391 if (dump_enabled_p ())
7393 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
7394 "converting indirect call to "
7395 "function %s\n",
7396 lang_hooks.decl_printable_name (fn, 2));
7398 gimple_call_set_fndecl (call_stmt, fn);
7399 /* If changing the call to __builtin_unreachable
7400 or similar noreturn function, adjust gimple_call_fntype
7401 too. */
7402 if (gimple_call_noreturn_p (call_stmt)
7403 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
7404 && TYPE_ARG_TYPES (TREE_TYPE (fn))
7405 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn)))
7406 == void_type_node))
7407 gimple_call_set_fntype (call_stmt, TREE_TYPE (fn));
7408 maybe_remove_unused_call_args (cfun, call_stmt);
7409 modified = true;
7414 if (modified)
7416 /* When changing a call into a noreturn call, cfg cleanup
7417 is needed to fix up the noreturn call. */
7418 if (!was_noreturn
7419 && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
7420 to_fixup.safe_push (stmt);
7421 /* When changing a condition or switch into one we know what
7422 edge will be executed, schedule a cfg cleanup. */
7423 if ((gimple_code (stmt) == GIMPLE_COND
7424 && (gimple_cond_true_p (as_a <gcond *> (stmt))
7425 || gimple_cond_false_p (as_a <gcond *> (stmt))))
7426 || (gimple_code (stmt) == GIMPLE_SWITCH
7427 && TREE_CODE (gimple_switch_index
7428 (as_a <gswitch *> (stmt))) == INTEGER_CST))
7429 el_todo |= TODO_cleanup_cfg;
7430 /* If we removed EH side-effects from the statement, clean
7431 its EH information. */
7432 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
7434 bitmap_set_bit (need_eh_cleanup,
7435 gimple_bb (stmt)->index);
7436 if (dump_file && (dump_flags & TDF_DETAILS))
7437 fprintf (dump_file, " Removed EH side-effects.\n");
7439 /* Likewise for AB side-effects. */
7440 if (can_make_abnormal_goto
7441 && !stmt_can_make_abnormal_goto (stmt))
7443 bitmap_set_bit (need_ab_cleanup,
7444 gimple_bb (stmt)->index);
7445 if (dump_file && (dump_flags & TDF_DETAILS))
7446 fprintf (dump_file, " Removed AB side-effects.\n");
7448 update_stmt (stmt);
7449 /* In case the VDEF on the original stmt was released, value-number
7450 it to the VUSE. This is to make vuse_ssa_val able to skip
7451 released virtual operands. */
7452 if (vdef && SSA_NAME_IN_FREE_LIST (vdef))
7453 VN_INFO (vdef)->valnum = vuse;
7456 /* Make new values available - for fully redundant LHS we
7457 continue with the next stmt above and skip this.
7458 But avoid picking up dead defs. */
7459 tree def;
7460 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_DEF)
7461 if (! has_zero_uses (def)
7462 || (inserted_exprs
7463 && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (def))))
7464 eliminate_push_avail (b, def);
7467 /* Perform elimination for the basic-block B during the domwalk. */
7469 edge
7470 eliminate_dom_walker::before_dom_children (basic_block b)
7472 /* Mark new bb. */
7473 avail_stack.safe_push (NULL_TREE);
7475 /* Skip unreachable blocks marked unreachable during the SCCVN domwalk. */
7476 if (!(b->flags & BB_EXECUTABLE))
7477 return NULL;
7479 vn_context_bb = b;
7481 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
7483 gphi *phi = gsi.phi ();
7484 tree res = PHI_RESULT (phi);
7486 if (virtual_operand_p (res))
7488 gsi_next (&gsi);
7489 continue;
7492 tree sprime = eliminate_avail (b, res);
7493 if (sprime
7494 && sprime != res)
7496 if (dump_file && (dump_flags & TDF_DETAILS))
7498 fprintf (dump_file, "Replaced redundant PHI node defining ");
7499 print_generic_expr (dump_file, res);
7500 fprintf (dump_file, " with ");
7501 print_generic_expr (dump_file, sprime);
7502 fprintf (dump_file, "\n");
7505 /* If we inserted this PHI node ourself, it's not an elimination. */
7506 if (! inserted_exprs
7507 || ! bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
7508 eliminations++;
7510 /* If we will propagate into all uses don't bother to do
7511 anything. */
7512 if (may_propagate_copy (res, sprime))
7514 /* Mark the PHI for removal. */
7515 to_remove.safe_push (phi);
7516 gsi_next (&gsi);
7517 continue;
7520 remove_phi_node (&gsi, false);
7522 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
7523 sprime = fold_convert (TREE_TYPE (res), sprime);
7524 gimple *stmt = gimple_build_assign (res, sprime);
7525 gimple_stmt_iterator gsi2 = gsi_after_labels (b);
7526 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
7527 continue;
7530 eliminate_push_avail (b, res);
7531 gsi_next (&gsi);
7534 for (gimple_stmt_iterator gsi = gsi_start_bb (b);
7535 !gsi_end_p (gsi);
7536 gsi_next (&gsi))
7537 eliminate_stmt (b, &gsi);
7539 /* Replace destination PHI arguments. */
7540 edge_iterator ei;
7541 edge e;
7542 FOR_EACH_EDGE (e, ei, b->succs)
7543 if (e->flags & EDGE_EXECUTABLE)
7544 for (gphi_iterator gsi = gsi_start_phis (e->dest);
7545 !gsi_end_p (gsi);
7546 gsi_next (&gsi))
7548 gphi *phi = gsi.phi ();
7549 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
7550 tree arg = USE_FROM_PTR (use_p);
7551 if (TREE_CODE (arg) != SSA_NAME
7552 || virtual_operand_p (arg))
7553 continue;
7554 tree sprime = eliminate_avail (b, arg);
7555 if (sprime && may_propagate_copy (arg, sprime,
7556 !(e->flags & EDGE_ABNORMAL)))
7557 propagate_value (use_p, sprime);
7560 vn_context_bb = NULL;
7562 return NULL;
7565 /* Make no longer available leaders no longer available. */
7567 void
7568 eliminate_dom_walker::after_dom_children (basic_block)
7570 tree entry;
7571 while ((entry = avail_stack.pop ()) != NULL_TREE)
7573 tree valnum = VN_INFO (entry)->valnum;
7574 tree old = avail[SSA_NAME_VERSION (valnum)];
7575 if (old == entry)
7576 avail[SSA_NAME_VERSION (valnum)] = NULL_TREE;
7577 else
7578 avail[SSA_NAME_VERSION (valnum)] = entry;
7582 /* Remove queued stmts and perform delayed cleanups. */
7584 unsigned
7585 eliminate_dom_walker::eliminate_cleanup (bool region_p)
7587 statistics_counter_event (cfun, "Eliminated", eliminations);
7588 statistics_counter_event (cfun, "Insertions", insertions);
7590 /* We cannot remove stmts during BB walk, especially not release SSA
7591 names there as this confuses the VN machinery. The stmts ending
7592 up in to_remove are either stores or simple copies.
7593 Remove stmts in reverse order to make debug stmt creation possible. */
7594 while (!to_remove.is_empty ())
7596 bool do_release_defs = true;
7597 gimple *stmt = to_remove.pop ();
7599 /* When we are value-numbering a region we do not require exit PHIs to
7600 be present so we have to make sure to deal with uses outside of the
7601 region of stmts that we thought are eliminated.
7602 ??? Note we may be confused by uses in dead regions we didn't run
7603 elimination on. Rather than checking individual uses we accept
7604 dead copies to be generated here (gcc.c-torture/execute/20060905-1.c
7605 contains such example). */
7606 if (region_p)
7608 if (gphi *phi = dyn_cast <gphi *> (stmt))
7610 tree lhs = gimple_phi_result (phi);
7611 if (!has_zero_uses (lhs))
7613 if (dump_file && (dump_flags & TDF_DETAILS))
7614 fprintf (dump_file, "Keeping eliminated stmt live "
7615 "as copy because of out-of-region uses\n");
7616 tree sprime = eliminate_avail (gimple_bb (stmt), lhs);
7617 gimple *copy = gimple_build_assign (lhs, sprime);
7618 gimple_stmt_iterator gsi
7619 = gsi_after_labels (gimple_bb (stmt));
7620 gsi_insert_before (&gsi, copy, GSI_SAME_STMT);
7621 do_release_defs = false;
7624 else if (tree lhs = gimple_get_lhs (stmt))
7625 if (TREE_CODE (lhs) == SSA_NAME
7626 && !has_zero_uses (lhs))
7628 if (dump_file && (dump_flags & TDF_DETAILS))
7629 fprintf (dump_file, "Keeping eliminated stmt live "
7630 "as copy because of out-of-region uses\n");
7631 tree sprime = eliminate_avail (gimple_bb (stmt), lhs);
7632 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
7633 if (is_gimple_assign (stmt))
7635 gimple_assign_set_rhs_from_tree (&gsi, sprime);
7636 stmt = gsi_stmt (gsi);
7637 update_stmt (stmt);
7638 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
7639 bitmap_set_bit (need_eh_cleanup, gimple_bb (stmt)->index);
7640 continue;
7642 else
7644 gimple *copy = gimple_build_assign (lhs, sprime);
7645 gsi_insert_before (&gsi, copy, GSI_SAME_STMT);
7646 do_release_defs = false;
7651 if (dump_file && (dump_flags & TDF_DETAILS))
7653 fprintf (dump_file, "Removing dead stmt ");
7654 print_gimple_stmt (dump_file, stmt, 0, TDF_NONE);
7657 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
7658 if (gimple_code (stmt) == GIMPLE_PHI)
7659 remove_phi_node (&gsi, do_release_defs);
7660 else
7662 basic_block bb = gimple_bb (stmt);
7663 unlink_stmt_vdef (stmt);
7664 if (gsi_remove (&gsi, true))
7665 bitmap_set_bit (need_eh_cleanup, bb->index);
7666 if (is_gimple_call (stmt) && stmt_can_make_abnormal_goto (stmt))
7667 bitmap_set_bit (need_ab_cleanup, bb->index);
7668 if (do_release_defs)
7669 release_defs (stmt);
7672 /* Removing a stmt may expose a forwarder block. */
7673 el_todo |= TODO_cleanup_cfg;
7676 /* Fixup stmts that became noreturn calls. This may require splitting
7677 blocks and thus isn't possible during the dominator walk. Do this
7678 in reverse order so we don't inadvertedly remove a stmt we want to
7679 fixup by visiting a dominating now noreturn call first. */
7680 while (!to_fixup.is_empty ())
7682 gimple *stmt = to_fixup.pop ();
7684 if (dump_file && (dump_flags & TDF_DETAILS))
7686 fprintf (dump_file, "Fixing up noreturn call ");
7687 print_gimple_stmt (dump_file, stmt, 0);
7690 if (fixup_noreturn_call (stmt))
7691 el_todo |= TODO_cleanup_cfg;
7694 bool do_eh_cleanup = !bitmap_empty_p (need_eh_cleanup);
7695 bool do_ab_cleanup = !bitmap_empty_p (need_ab_cleanup);
7697 if (do_eh_cleanup)
7698 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
7700 if (do_ab_cleanup)
7701 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
7703 if (do_eh_cleanup || do_ab_cleanup)
7704 el_todo |= TODO_cleanup_cfg;
7706 return el_todo;
7709 /* Eliminate fully redundant computations. */
7711 unsigned
7712 eliminate_with_rpo_vn (bitmap inserted_exprs)
7714 eliminate_dom_walker walker (CDI_DOMINATORS, inserted_exprs);
7716 eliminate_dom_walker *saved_rpo_avail = rpo_avail;
7717 rpo_avail = &walker;
7718 walker.walk (cfun->cfg->x_entry_block_ptr);
7719 rpo_avail = saved_rpo_avail;
7721 return walker.eliminate_cleanup ();
7724 static unsigned
7725 do_rpo_vn_1 (function *fn, edge entry, bitmap exit_bbs,
7726 bool iterate, bool eliminate, bool skip_entry_phis,
7727 vn_lookup_kind kind);
7729 void
7730 run_rpo_vn (vn_lookup_kind kind)
7732 do_rpo_vn_1 (cfun, NULL, NULL, true, false, false, kind);
7734 /* ??? Prune requirement of these. */
7735 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
7737 /* Initialize the value ids and prune out remaining VN_TOPs
7738 from dead code. */
7739 tree name;
7740 unsigned i;
7741 FOR_EACH_SSA_NAME (i, name, cfun)
7743 vn_ssa_aux_t info = VN_INFO (name);
7744 if (!info->visited
7745 || info->valnum == VN_TOP)
7746 info->valnum = name;
7747 if (info->valnum == name)
7748 info->value_id = get_next_value_id ();
7749 else if (is_gimple_min_invariant (info->valnum))
7750 info->value_id = get_or_alloc_constant_value_id (info->valnum);
7753 /* Propagate. */
7754 FOR_EACH_SSA_NAME (i, name, cfun)
7756 vn_ssa_aux_t info = VN_INFO (name);
7757 if (TREE_CODE (info->valnum) == SSA_NAME
7758 && info->valnum != name
7759 && info->value_id != VN_INFO (info->valnum)->value_id)
7760 info->value_id = VN_INFO (info->valnum)->value_id;
7763 set_hashtable_value_ids ();
7765 if (dump_file && (dump_flags & TDF_DETAILS))
7767 fprintf (dump_file, "Value numbers:\n");
7768 FOR_EACH_SSA_NAME (i, name, cfun)
7770 if (VN_INFO (name)->visited
7771 && SSA_VAL (name) != name)
7773 print_generic_expr (dump_file, name);
7774 fprintf (dump_file, " = ");
7775 print_generic_expr (dump_file, SSA_VAL (name));
7776 fprintf (dump_file, " (%04d)\n", VN_INFO (name)->value_id);
7782 /* Free VN associated data structures. */
7784 void
7785 free_rpo_vn (void)
7787 free_vn_table (valid_info);
7788 XDELETE (valid_info);
7789 obstack_free (&vn_tables_obstack, NULL);
7790 obstack_free (&vn_tables_insert_obstack, NULL);
7792 vn_ssa_aux_iterator_type it;
7793 vn_ssa_aux_t info;
7794 FOR_EACH_HASH_TABLE_ELEMENT (*vn_ssa_aux_hash, info, vn_ssa_aux_t, it)
7795 if (info->needs_insertion)
7796 release_ssa_name (info->name);
7797 obstack_free (&vn_ssa_aux_obstack, NULL);
7798 delete vn_ssa_aux_hash;
7800 delete constant_to_value_id;
7801 constant_to_value_id = NULL;
7804 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
7806 static tree
7807 vn_lookup_simplify_result (gimple_match_op *res_op)
7809 if (!res_op->code.is_tree_code ())
7810 return NULL_TREE;
7811 tree *ops = res_op->ops;
7812 unsigned int length = res_op->num_ops;
7813 if (res_op->code == CONSTRUCTOR
7814 /* ??? We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
7815 and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree. */
7816 && TREE_CODE (res_op->ops[0]) == CONSTRUCTOR)
7818 length = CONSTRUCTOR_NELTS (res_op->ops[0]);
7819 ops = XALLOCAVEC (tree, length);
7820 for (unsigned i = 0; i < length; ++i)
7821 ops[i] = CONSTRUCTOR_ELT (res_op->ops[0], i)->value;
7823 vn_nary_op_t vnresult = NULL;
7824 tree res = vn_nary_op_lookup_pieces (length, (tree_code) res_op->code,
7825 res_op->type, ops, &vnresult);
7826 /* If this is used from expression simplification make sure to
7827 return an available expression. */
7828 if (res && TREE_CODE (res) == SSA_NAME && mprts_hook && rpo_avail)
7829 res = rpo_avail->eliminate_avail (vn_context_bb, res);
7830 return res;
7833 /* Return a leader for OPs value that is valid at BB. */
7835 tree
7836 rpo_elim::eliminate_avail (basic_block bb, tree op)
7838 bool visited;
7839 tree valnum = SSA_VAL (op, &visited);
7840 /* If we didn't visit OP then it must be defined outside of the
7841 region we process and also dominate it. So it is available. */
7842 if (!visited)
7843 return op;
7844 if (TREE_CODE (valnum) == SSA_NAME)
7846 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
7847 return valnum;
7848 vn_ssa_aux_t valnum_info = VN_INFO (valnum);
7849 vn_avail *av = valnum_info->avail;
7850 if (!av)
7852 /* See above. But when there's availability info prefer
7853 what we recorded there for example to preserve LC SSA. */
7854 if (!valnum_info->visited)
7855 return valnum;
7856 return NULL_TREE;
7858 if (av->location == bb->index)
7859 /* On tramp3d 90% of the cases are here. */
7860 return ssa_name (av->leader);
7863 basic_block abb = BASIC_BLOCK_FOR_FN (cfun, av->location);
7864 /* ??? During elimination we have to use availability at the
7865 definition site of a use we try to replace. This
7866 is required to not run into inconsistencies because
7867 of dominated_by_p_w_unex behavior and removing a definition
7868 while not replacing all uses.
7869 ??? We could try to consistently walk dominators
7870 ignoring non-executable regions. The nearest common
7871 dominator of bb and abb is where we can stop walking. We
7872 may also be able to "pre-compute" (bits of) the next immediate
7873 (non-)dominator during the RPO walk when marking edges as
7874 executable. */
7875 if (dominated_by_p_w_unex (bb, abb, true))
7877 tree leader = ssa_name (av->leader);
7878 /* Prevent eliminations that break loop-closed SSA. */
7879 if (loops_state_satisfies_p (LOOP_CLOSED_SSA)
7880 && ! SSA_NAME_IS_DEFAULT_DEF (leader)
7881 && ! flow_bb_inside_loop_p (gimple_bb (SSA_NAME_DEF_STMT
7882 (leader))->loop_father,
7883 bb))
7884 return NULL_TREE;
7885 if (dump_file && (dump_flags & TDF_DETAILS))
7887 print_generic_expr (dump_file, leader);
7888 fprintf (dump_file, " is available for ");
7889 print_generic_expr (dump_file, valnum);
7890 fprintf (dump_file, "\n");
7892 /* On tramp3d 99% of the _remaining_ cases succeed at
7893 the first enty. */
7894 return leader;
7896 /* ??? Can we somehow skip to the immediate dominator
7897 RPO index (bb_to_rpo)? Again, maybe not worth, on
7898 tramp3d the worst number of elements in the vector is 9. */
7899 av = av->next;
7901 while (av);
7902 /* While we prefer avail we have to fallback to using the value
7903 directly if defined outside of the region when none of the
7904 available defs suit. */
7905 if (!valnum_info->visited)
7906 return valnum;
7908 else if (valnum != VN_TOP)
7909 /* valnum is is_gimple_min_invariant. */
7910 return valnum;
7911 return NULL_TREE;
7914 /* Make LEADER a leader for its value at BB. */
7916 void
7917 rpo_elim::eliminate_push_avail (basic_block bb, tree leader)
7919 tree valnum = VN_INFO (leader)->valnum;
7920 if (valnum == VN_TOP
7921 || is_gimple_min_invariant (valnum))
7922 return;
7923 if (dump_file && (dump_flags & TDF_DETAILS))
7925 fprintf (dump_file, "Making available beyond BB%d ", bb->index);
7926 print_generic_expr (dump_file, leader);
7927 fprintf (dump_file, " for value ");
7928 print_generic_expr (dump_file, valnum);
7929 fprintf (dump_file, "\n");
7931 vn_ssa_aux_t value = VN_INFO (valnum);
7932 vn_avail *av;
7933 if (m_avail_freelist)
7935 av = m_avail_freelist;
7936 m_avail_freelist = m_avail_freelist->next;
7938 else
7939 av = XOBNEW (&vn_ssa_aux_obstack, vn_avail);
7940 av->location = bb->index;
7941 av->leader = SSA_NAME_VERSION (leader);
7942 av->next = value->avail;
7943 av->next_undo = last_pushed_avail;
7944 last_pushed_avail = value;
7945 value->avail = av;
7948 /* Valueization hook for RPO VN plus required state. */
7950 tree
7951 rpo_vn_valueize (tree name)
7953 if (TREE_CODE (name) == SSA_NAME)
7955 vn_ssa_aux_t val = VN_INFO (name);
7956 if (val)
7958 tree tem = val->valnum;
7959 if (tem != VN_TOP && tem != name)
7961 if (TREE_CODE (tem) != SSA_NAME)
7962 return tem;
7963 /* For all values we only valueize to an available leader
7964 which means we can use SSA name info without restriction. */
7965 tem = rpo_avail->eliminate_avail (vn_context_bb, tem);
7966 if (tem)
7967 return tem;
7971 return name;
7974 /* Insert on PRED_E predicates derived from CODE OPS being true besides the
7975 inverted condition. */
7977 static void
7978 insert_related_predicates_on_edge (enum tree_code code, tree *ops, edge pred_e)
7980 switch (code)
7982 case LT_EXPR:
7983 /* a < b -> a {!,<}= b */
7984 vn_nary_op_insert_pieces_predicated (2, NE_EXPR, boolean_type_node,
7985 ops, boolean_true_node, 0, pred_e);
7986 vn_nary_op_insert_pieces_predicated (2, LE_EXPR, boolean_type_node,
7987 ops, boolean_true_node, 0, pred_e);
7988 /* a < b -> ! a {>,=} b */
7989 vn_nary_op_insert_pieces_predicated (2, GT_EXPR, boolean_type_node,
7990 ops, boolean_false_node, 0, pred_e);
7991 vn_nary_op_insert_pieces_predicated (2, EQ_EXPR, boolean_type_node,
7992 ops, boolean_false_node, 0, pred_e);
7993 break;
7994 case GT_EXPR:
7995 /* a > b -> a {!,>}= b */
7996 vn_nary_op_insert_pieces_predicated (2, NE_EXPR, boolean_type_node,
7997 ops, boolean_true_node, 0, pred_e);
7998 vn_nary_op_insert_pieces_predicated (2, GE_EXPR, boolean_type_node,
7999 ops, boolean_true_node, 0, pred_e);
8000 /* a > b -> ! a {<,=} b */
8001 vn_nary_op_insert_pieces_predicated (2, LT_EXPR, boolean_type_node,
8002 ops, boolean_false_node, 0, pred_e);
8003 vn_nary_op_insert_pieces_predicated (2, EQ_EXPR, boolean_type_node,
8004 ops, boolean_false_node, 0, pred_e);
8005 break;
8006 case EQ_EXPR:
8007 /* a == b -> ! a {<,>} b */
8008 vn_nary_op_insert_pieces_predicated (2, LT_EXPR, boolean_type_node,
8009 ops, boolean_false_node, 0, pred_e);
8010 vn_nary_op_insert_pieces_predicated (2, GT_EXPR, boolean_type_node,
8011 ops, boolean_false_node, 0, pred_e);
8012 break;
8013 case LE_EXPR:
8014 case GE_EXPR:
8015 case NE_EXPR:
8016 /* Nothing besides inverted condition. */
8017 break;
8018 default:;
8022 /* Main stmt worker for RPO VN, process BB. */
8024 static unsigned
8025 process_bb (rpo_elim &avail, basic_block bb,
8026 bool bb_visited, bool iterate_phis, bool iterate, bool eliminate,
8027 bool do_region, bitmap exit_bbs, bool skip_phis)
8029 unsigned todo = 0;
8030 edge_iterator ei;
8031 edge e;
8033 vn_context_bb = bb;
8035 /* If we are in loop-closed SSA preserve this state. This is
8036 relevant when called on regions from outside of FRE/PRE. */
8037 bool lc_phi_nodes = false;
8038 if (!skip_phis
8039 && loops_state_satisfies_p (LOOP_CLOSED_SSA))
8040 FOR_EACH_EDGE (e, ei, bb->preds)
8041 if (e->src->loop_father != e->dest->loop_father
8042 && flow_loop_nested_p (e->dest->loop_father,
8043 e->src->loop_father))
8045 lc_phi_nodes = true;
8046 break;
8049 /* When we visit a loop header substitute into loop info. */
8050 if (!iterate && eliminate && bb->loop_father->header == bb)
8052 /* Keep fields in sync with substitute_in_loop_info. */
8053 if (bb->loop_father->nb_iterations)
8054 bb->loop_father->nb_iterations
8055 = simplify_replace_tree (bb->loop_father->nb_iterations,
8056 NULL_TREE, NULL_TREE, &vn_valueize_for_srt);
8059 /* Value-number all defs in the basic-block. */
8060 if (!skip_phis)
8061 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
8062 gsi_next (&gsi))
8064 gphi *phi = gsi.phi ();
8065 tree res = PHI_RESULT (phi);
8066 vn_ssa_aux_t res_info = VN_INFO (res);
8067 if (!bb_visited)
8069 gcc_assert (!res_info->visited);
8070 res_info->valnum = VN_TOP;
8071 res_info->visited = true;
8074 /* When not iterating force backedge values to varying. */
8075 visit_stmt (phi, !iterate_phis);
8076 if (virtual_operand_p (res))
8077 continue;
8079 /* Eliminate */
8080 /* The interesting case is gcc.dg/tree-ssa/pr22230.c for correctness
8081 how we handle backedges and availability.
8082 And gcc.dg/tree-ssa/ssa-sccvn-2.c for optimization. */
8083 tree val = res_info->valnum;
8084 if (res != val && !iterate && eliminate)
8086 if (tree leader = avail.eliminate_avail (bb, res))
8088 if (leader != res
8089 /* Preserve loop-closed SSA form. */
8090 && (! lc_phi_nodes
8091 || is_gimple_min_invariant (leader)))
8093 if (dump_file && (dump_flags & TDF_DETAILS))
8095 fprintf (dump_file, "Replaced redundant PHI node "
8096 "defining ");
8097 print_generic_expr (dump_file, res);
8098 fprintf (dump_file, " with ");
8099 print_generic_expr (dump_file, leader);
8100 fprintf (dump_file, "\n");
8102 avail.eliminations++;
8104 if (may_propagate_copy (res, leader))
8106 /* Schedule for removal. */
8107 avail.to_remove.safe_push (phi);
8108 continue;
8110 /* ??? Else generate a copy stmt. */
8114 /* Only make defs available that not already are. But make
8115 sure loop-closed SSA PHI node defs are picked up for
8116 downstream uses. */
8117 if (lc_phi_nodes
8118 || res == val
8119 || ! avail.eliminate_avail (bb, res))
8120 avail.eliminate_push_avail (bb, res);
8123 /* For empty BBs mark outgoing edges executable. For non-empty BBs
8124 we do this when processing the last stmt as we have to do this
8125 before elimination which otherwise forces GIMPLE_CONDs to
8126 if (1 != 0) style when seeing non-executable edges. */
8127 if (gsi_end_p (gsi_start_bb (bb)))
8129 FOR_EACH_EDGE (e, ei, bb->succs)
8131 if (!(e->flags & EDGE_EXECUTABLE))
8133 if (dump_file && (dump_flags & TDF_DETAILS))
8134 fprintf (dump_file,
8135 "marking outgoing edge %d -> %d executable\n",
8136 e->src->index, e->dest->index);
8137 e->flags |= EDGE_EXECUTABLE;
8138 e->dest->flags |= BB_EXECUTABLE;
8140 else if (!(e->dest->flags & BB_EXECUTABLE))
8142 if (dump_file && (dump_flags & TDF_DETAILS))
8143 fprintf (dump_file,
8144 "marking destination block %d reachable\n",
8145 e->dest->index);
8146 e->dest->flags |= BB_EXECUTABLE;
8150 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
8151 !gsi_end_p (gsi); gsi_next (&gsi))
8153 ssa_op_iter i;
8154 tree op;
8155 if (!bb_visited)
8157 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
8159 vn_ssa_aux_t op_info = VN_INFO (op);
8160 gcc_assert (!op_info->visited);
8161 op_info->valnum = VN_TOP;
8162 op_info->visited = true;
8165 /* We somehow have to deal with uses that are not defined
8166 in the processed region. Forcing unvisited uses to
8167 varying here doesn't play well with def-use following during
8168 expression simplification, so we deal with this by checking
8169 the visited flag in SSA_VAL. */
8172 visit_stmt (gsi_stmt (gsi));
8174 gimple *last = gsi_stmt (gsi);
8175 e = NULL;
8176 switch (gimple_code (last))
8178 case GIMPLE_SWITCH:
8179 e = find_taken_edge (bb, vn_valueize (gimple_switch_index
8180 (as_a <gswitch *> (last))));
8181 break;
8182 case GIMPLE_COND:
8184 tree lhs = vn_valueize (gimple_cond_lhs (last));
8185 tree rhs = vn_valueize (gimple_cond_rhs (last));
8186 tree val = gimple_simplify (gimple_cond_code (last),
8187 boolean_type_node, lhs, rhs,
8188 NULL, vn_valueize);
8189 /* If the condition didn't simplfy see if we have recorded
8190 an expression from sofar taken edges. */
8191 if (! val || TREE_CODE (val) != INTEGER_CST)
8193 vn_nary_op_t vnresult;
8194 tree ops[2];
8195 ops[0] = lhs;
8196 ops[1] = rhs;
8197 val = vn_nary_op_lookup_pieces (2, gimple_cond_code (last),
8198 boolean_type_node, ops,
8199 &vnresult);
8200 /* Did we get a predicated value? */
8201 if (! val && vnresult && vnresult->predicated_values)
8203 val = vn_nary_op_get_predicated_value (vnresult, bb);
8204 if (val && dump_file && (dump_flags & TDF_DETAILS))
8206 fprintf (dump_file, "Got predicated value ");
8207 print_generic_expr (dump_file, val, TDF_NONE);
8208 fprintf (dump_file, " for ");
8209 print_gimple_stmt (dump_file, last, TDF_SLIM);
8213 if (val)
8214 e = find_taken_edge (bb, val);
8215 if (! e)
8217 /* If we didn't manage to compute the taken edge then
8218 push predicated expressions for the condition itself
8219 and related conditions to the hashtables. This allows
8220 simplification of redundant conditions which is
8221 important as early cleanup. */
8222 edge true_e, false_e;
8223 extract_true_false_edges_from_block (bb, &true_e, &false_e);
8224 enum tree_code code = gimple_cond_code (last);
8225 enum tree_code icode
8226 = invert_tree_comparison (code, HONOR_NANS (lhs));
8227 tree ops[2];
8228 ops[0] = lhs;
8229 ops[1] = rhs;
8230 if ((do_region && bitmap_bit_p (exit_bbs, true_e->dest->index))
8231 || !can_track_predicate_on_edge (true_e))
8232 true_e = NULL;
8233 if ((do_region && bitmap_bit_p (exit_bbs, false_e->dest->index))
8234 || !can_track_predicate_on_edge (false_e))
8235 false_e = NULL;
8236 if (true_e)
8237 vn_nary_op_insert_pieces_predicated
8238 (2, code, boolean_type_node, ops,
8239 boolean_true_node, 0, true_e);
8240 if (false_e)
8241 vn_nary_op_insert_pieces_predicated
8242 (2, code, boolean_type_node, ops,
8243 boolean_false_node, 0, false_e);
8244 if (icode != ERROR_MARK)
8246 if (true_e)
8247 vn_nary_op_insert_pieces_predicated
8248 (2, icode, boolean_type_node, ops,
8249 boolean_false_node, 0, true_e);
8250 if (false_e)
8251 vn_nary_op_insert_pieces_predicated
8252 (2, icode, boolean_type_node, ops,
8253 boolean_true_node, 0, false_e);
8255 /* Relax for non-integers, inverted condition handled
8256 above. */
8257 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
8259 if (true_e)
8260 insert_related_predicates_on_edge (code, ops, true_e);
8261 if (false_e)
8262 insert_related_predicates_on_edge (icode, ops, false_e);
8265 break;
8267 case GIMPLE_GOTO:
8268 e = find_taken_edge (bb, vn_valueize (gimple_goto_dest (last)));
8269 break;
8270 default:
8271 e = NULL;
8273 if (e)
8275 todo = TODO_cleanup_cfg;
8276 if (!(e->flags & EDGE_EXECUTABLE))
8278 if (dump_file && (dump_flags & TDF_DETAILS))
8279 fprintf (dump_file,
8280 "marking known outgoing %sedge %d -> %d executable\n",
8281 e->flags & EDGE_DFS_BACK ? "back-" : "",
8282 e->src->index, e->dest->index);
8283 e->flags |= EDGE_EXECUTABLE;
8284 e->dest->flags |= BB_EXECUTABLE;
8286 else if (!(e->dest->flags & BB_EXECUTABLE))
8288 if (dump_file && (dump_flags & TDF_DETAILS))
8289 fprintf (dump_file,
8290 "marking destination block %d reachable\n",
8291 e->dest->index);
8292 e->dest->flags |= BB_EXECUTABLE;
8295 else if (gsi_one_before_end_p (gsi))
8297 FOR_EACH_EDGE (e, ei, bb->succs)
8299 if (!(e->flags & EDGE_EXECUTABLE))
8301 if (dump_file && (dump_flags & TDF_DETAILS))
8302 fprintf (dump_file,
8303 "marking outgoing edge %d -> %d executable\n",
8304 e->src->index, e->dest->index);
8305 e->flags |= EDGE_EXECUTABLE;
8306 e->dest->flags |= BB_EXECUTABLE;
8308 else if (!(e->dest->flags & BB_EXECUTABLE))
8310 if (dump_file && (dump_flags & TDF_DETAILS))
8311 fprintf (dump_file,
8312 "marking destination block %d reachable\n",
8313 e->dest->index);
8314 e->dest->flags |= BB_EXECUTABLE;
8319 /* Eliminate. That also pushes to avail. */
8320 if (eliminate && ! iterate)
8321 avail.eliminate_stmt (bb, &gsi);
8322 else
8323 /* If not eliminating, make all not already available defs
8324 available. But avoid picking up dead defs. */
8325 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_DEF)
8326 if (! has_zero_uses (op)
8327 && ! avail.eliminate_avail (bb, op))
8328 avail.eliminate_push_avail (bb, op);
8331 /* Eliminate in destination PHI arguments. Always substitute in dest
8332 PHIs, even for non-executable edges. This handles region
8333 exits PHIs. */
8334 if (!iterate && eliminate)
8335 FOR_EACH_EDGE (e, ei, bb->succs)
8336 for (gphi_iterator gsi = gsi_start_phis (e->dest);
8337 !gsi_end_p (gsi); gsi_next (&gsi))
8339 gphi *phi = gsi.phi ();
8340 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
8341 tree arg = USE_FROM_PTR (use_p);
8342 if (TREE_CODE (arg) != SSA_NAME
8343 || virtual_operand_p (arg))
8344 continue;
8345 tree sprime;
8346 if (SSA_NAME_IS_DEFAULT_DEF (arg))
8348 sprime = SSA_VAL (arg);
8349 gcc_assert (TREE_CODE (sprime) != SSA_NAME
8350 || SSA_NAME_IS_DEFAULT_DEF (sprime));
8352 else
8353 /* Look for sth available at the definition block of the argument.
8354 This avoids inconsistencies between availability there which
8355 decides if the stmt can be removed and availability at the
8356 use site. The SSA property ensures that things available
8357 at the definition are also available at uses. */
8358 sprime = avail.eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (arg)),
8359 arg);
8360 if (sprime
8361 && sprime != arg
8362 && may_propagate_copy (arg, sprime, !(e->flags & EDGE_ABNORMAL)))
8363 propagate_value (use_p, sprime);
8366 vn_context_bb = NULL;
8367 return todo;
8370 /* Unwind state per basic-block. */
8372 struct unwind_state
8374 /* Times this block has been visited. */
8375 unsigned visited;
8376 /* Whether to handle this as iteration point or whether to treat
8377 incoming backedge PHI values as varying. */
8378 bool iterate;
8379 /* Maximum RPO index this block is reachable from. */
8380 int max_rpo;
8381 /* Unwind state. */
8382 void *ob_top;
8383 vn_reference_t ref_top;
8384 vn_phi_t phi_top;
8385 vn_nary_op_t nary_top;
8386 vn_avail *avail_top;
8389 /* Unwind the RPO VN state for iteration. */
8391 static void
8392 do_unwind (unwind_state *to, rpo_elim &avail)
8394 gcc_assert (to->iterate);
8395 for (; last_inserted_nary != to->nary_top;
8396 last_inserted_nary = last_inserted_nary->next)
8398 vn_nary_op_t *slot;
8399 slot = valid_info->nary->find_slot_with_hash
8400 (last_inserted_nary, last_inserted_nary->hashcode, NO_INSERT);
8401 /* Predication causes the need to restore previous state. */
8402 if ((*slot)->unwind_to)
8403 *slot = (*slot)->unwind_to;
8404 else
8405 valid_info->nary->clear_slot (slot);
8407 for (; last_inserted_phi != to->phi_top;
8408 last_inserted_phi = last_inserted_phi->next)
8410 vn_phi_t *slot;
8411 slot = valid_info->phis->find_slot_with_hash
8412 (last_inserted_phi, last_inserted_phi->hashcode, NO_INSERT);
8413 valid_info->phis->clear_slot (slot);
8415 for (; last_inserted_ref != to->ref_top;
8416 last_inserted_ref = last_inserted_ref->next)
8418 vn_reference_t *slot;
8419 slot = valid_info->references->find_slot_with_hash
8420 (last_inserted_ref, last_inserted_ref->hashcode, NO_INSERT);
8421 (*slot)->operands.release ();
8422 valid_info->references->clear_slot (slot);
8424 obstack_free (&vn_tables_obstack, to->ob_top);
8426 /* Prune [rpo_idx, ] from avail. */
8427 for (; last_pushed_avail && last_pushed_avail->avail != to->avail_top;)
8429 vn_ssa_aux_t val = last_pushed_avail;
8430 vn_avail *av = val->avail;
8431 val->avail = av->next;
8432 last_pushed_avail = av->next_undo;
8433 av->next = avail.m_avail_freelist;
8434 avail.m_avail_freelist = av;
8438 /* Do VN on a SEME region specified by ENTRY and EXIT_BBS in FN.
8439 If ITERATE is true then treat backedges optimistically as not
8440 executed and iterate. If ELIMINATE is true then perform
8441 elimination, otherwise leave that to the caller. If SKIP_ENTRY_PHIS
8442 is true then force PHI nodes in ENTRY->dest to VARYING. */
8444 static unsigned
8445 do_rpo_vn_1 (function *fn, edge entry, bitmap exit_bbs,
8446 bool iterate, bool eliminate, bool skip_entry_phis,
8447 vn_lookup_kind kind)
8449 unsigned todo = 0;
8450 default_vn_walk_kind = kind;
8452 /* We currently do not support region-based iteration when
8453 elimination is requested. */
8454 gcc_assert (!entry || !iterate || !eliminate);
8455 /* When iterating we need loop info up-to-date. */
8456 gcc_assert (!iterate || !loops_state_satisfies_p (LOOPS_NEED_FIXUP));
8458 bool do_region = entry != NULL;
8459 if (!do_region)
8461 entry = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (fn));
8462 exit_bbs = BITMAP_ALLOC (NULL);
8463 bitmap_set_bit (exit_bbs, EXIT_BLOCK);
8466 /* Clear EDGE_DFS_BACK on "all" entry edges, RPO order compute will
8467 re-mark those that are contained in the region. */
8468 edge_iterator ei;
8469 edge e;
8470 FOR_EACH_EDGE (e, ei, entry->dest->preds)
8471 e->flags &= ~EDGE_DFS_BACK;
8473 int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (fn) - NUM_FIXED_BLOCKS);
8474 auto_vec<std::pair<int, int> > toplevel_scc_extents;
8475 int n = rev_post_order_and_mark_dfs_back_seme
8476 (fn, entry, exit_bbs, true, rpo, !iterate ? &toplevel_scc_extents : NULL);
8478 if (!do_region)
8479 BITMAP_FREE (exit_bbs);
8481 /* If there are any non-DFS_BACK edges into entry->dest skip
8482 processing PHI nodes for that block. This supports
8483 value-numbering loop bodies w/o the actual loop. */
8484 FOR_EACH_EDGE (e, ei, entry->dest->preds)
8485 if (e != entry
8486 && !(e->flags & EDGE_DFS_BACK))
8487 break;
8488 if (e != NULL && dump_file && (dump_flags & TDF_DETAILS))
8489 fprintf (dump_file, "Region does not contain all edges into "
8490 "the entry block, skipping its PHIs.\n");
8491 skip_entry_phis |= e != NULL;
8493 int *bb_to_rpo = XNEWVEC (int, last_basic_block_for_fn (fn));
8494 for (int i = 0; i < n; ++i)
8495 bb_to_rpo[rpo[i]] = i;
8497 unwind_state *rpo_state = XNEWVEC (unwind_state, n);
8499 rpo_elim avail (entry->dest);
8500 rpo_avail = &avail;
8502 /* Verify we have no extra entries into the region. */
8503 if (flag_checking && do_region)
8505 auto_bb_flag bb_in_region (fn);
8506 for (int i = 0; i < n; ++i)
8508 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
8509 bb->flags |= bb_in_region;
8511 /* We can't merge the first two loops because we cannot rely
8512 on EDGE_DFS_BACK for edges not within the region. But if
8513 we decide to always have the bb_in_region flag we can
8514 do the checking during the RPO walk itself (but then it's
8515 also easy to handle MEME conservatively). */
8516 for (int i = 0; i < n; ++i)
8518 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
8519 edge e;
8520 edge_iterator ei;
8521 FOR_EACH_EDGE (e, ei, bb->preds)
8522 gcc_assert (e == entry
8523 || (skip_entry_phis && bb == entry->dest)
8524 || (e->src->flags & bb_in_region));
8526 for (int i = 0; i < n; ++i)
8528 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
8529 bb->flags &= ~bb_in_region;
8533 /* Create the VN state. For the initial size of the various hashtables
8534 use a heuristic based on region size and number of SSA names. */
8535 unsigned region_size = (((unsigned HOST_WIDE_INT)n * num_ssa_names)
8536 / (n_basic_blocks_for_fn (fn) - NUM_FIXED_BLOCKS));
8537 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
8538 next_value_id = 1;
8539 next_constant_value_id = -1;
8541 vn_ssa_aux_hash = new hash_table <vn_ssa_aux_hasher> (region_size * 2);
8542 gcc_obstack_init (&vn_ssa_aux_obstack);
8544 gcc_obstack_init (&vn_tables_obstack);
8545 gcc_obstack_init (&vn_tables_insert_obstack);
8546 valid_info = XCNEW (struct vn_tables_s);
8547 allocate_vn_table (valid_info, region_size);
8548 last_inserted_ref = NULL;
8549 last_inserted_phi = NULL;
8550 last_inserted_nary = NULL;
8551 last_pushed_avail = NULL;
8553 vn_valueize = rpo_vn_valueize;
8555 /* Initialize the unwind state and edge/BB executable state. */
8556 unsigned curr_scc = 0;
8557 for (int i = 0; i < n; ++i)
8559 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
8560 rpo_state[i].visited = 0;
8561 rpo_state[i].max_rpo = i;
8562 if (!iterate && curr_scc < toplevel_scc_extents.length ())
8564 if (i >= toplevel_scc_extents[curr_scc].first
8565 && i <= toplevel_scc_extents[curr_scc].second)
8566 rpo_state[i].max_rpo = toplevel_scc_extents[curr_scc].second;
8567 if (i == toplevel_scc_extents[curr_scc].second)
8568 curr_scc++;
8570 bb->flags &= ~BB_EXECUTABLE;
8571 bool has_backedges = false;
8572 edge e;
8573 edge_iterator ei;
8574 FOR_EACH_EDGE (e, ei, bb->preds)
8576 if (e->flags & EDGE_DFS_BACK)
8577 has_backedges = true;
8578 e->flags &= ~EDGE_EXECUTABLE;
8579 if (iterate || e == entry || (skip_entry_phis && bb == entry->dest))
8580 continue;
8582 rpo_state[i].iterate = iterate && has_backedges;
8584 entry->flags |= EDGE_EXECUTABLE;
8585 entry->dest->flags |= BB_EXECUTABLE;
8587 /* As heuristic to improve compile-time we handle only the N innermost
8588 loops and the outermost one optimistically. */
8589 if (iterate)
8591 unsigned max_depth = param_rpo_vn_max_loop_depth;
8592 for (auto loop : loops_list (cfun, LI_ONLY_INNERMOST))
8593 if (loop_depth (loop) > max_depth)
8594 for (unsigned i = 2;
8595 i < loop_depth (loop) - max_depth; ++i)
8597 basic_block header = superloop_at_depth (loop, i)->header;
8598 bool non_latch_backedge = false;
8599 edge e;
8600 edge_iterator ei;
8601 FOR_EACH_EDGE (e, ei, header->preds)
8602 if (e->flags & EDGE_DFS_BACK)
8604 /* There can be a non-latch backedge into the header
8605 which is part of an outer irreducible region. We
8606 cannot avoid iterating this block then. */
8607 if (!dominated_by_p (CDI_DOMINATORS,
8608 e->src, e->dest))
8610 if (dump_file && (dump_flags & TDF_DETAILS))
8611 fprintf (dump_file, "non-latch backedge %d -> %d "
8612 "forces iteration of loop %d\n",
8613 e->src->index, e->dest->index, loop->num);
8614 non_latch_backedge = true;
8616 else
8617 e->flags |= EDGE_EXECUTABLE;
8619 rpo_state[bb_to_rpo[header->index]].iterate = non_latch_backedge;
8623 uint64_t nblk = 0;
8624 int idx = 0;
8625 if (iterate)
8626 /* Go and process all blocks, iterating as necessary. */
8629 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[idx]);
8631 /* If the block has incoming backedges remember unwind state. This
8632 is required even for non-executable blocks since in irreducible
8633 regions we might reach them via the backedge and re-start iterating
8634 from there.
8635 Note we can individually mark blocks with incoming backedges to
8636 not iterate where we then handle PHIs conservatively. We do that
8637 heuristically to reduce compile-time for degenerate cases. */
8638 if (rpo_state[idx].iterate)
8640 rpo_state[idx].ob_top = obstack_alloc (&vn_tables_obstack, 0);
8641 rpo_state[idx].ref_top = last_inserted_ref;
8642 rpo_state[idx].phi_top = last_inserted_phi;
8643 rpo_state[idx].nary_top = last_inserted_nary;
8644 rpo_state[idx].avail_top
8645 = last_pushed_avail ? last_pushed_avail->avail : NULL;
8648 if (!(bb->flags & BB_EXECUTABLE))
8650 if (dump_file && (dump_flags & TDF_DETAILS))
8651 fprintf (dump_file, "Block %d: BB%d found not executable\n",
8652 idx, bb->index);
8653 idx++;
8654 continue;
8657 if (dump_file && (dump_flags & TDF_DETAILS))
8658 fprintf (dump_file, "Processing block %d: BB%d\n", idx, bb->index);
8659 nblk++;
8660 todo |= process_bb (avail, bb,
8661 rpo_state[idx].visited != 0,
8662 rpo_state[idx].iterate,
8663 iterate, eliminate, do_region, exit_bbs, false);
8664 rpo_state[idx].visited++;
8666 /* Verify if changed values flow over executable outgoing backedges
8667 and those change destination PHI values (that's the thing we
8668 can easily verify). Reduce over all such edges to the farthest
8669 away PHI. */
8670 int iterate_to = -1;
8671 edge_iterator ei;
8672 edge e;
8673 FOR_EACH_EDGE (e, ei, bb->succs)
8674 if ((e->flags & (EDGE_DFS_BACK|EDGE_EXECUTABLE))
8675 == (EDGE_DFS_BACK|EDGE_EXECUTABLE)
8676 && rpo_state[bb_to_rpo[e->dest->index]].iterate)
8678 int destidx = bb_to_rpo[e->dest->index];
8679 if (!rpo_state[destidx].visited)
8681 if (dump_file && (dump_flags & TDF_DETAILS))
8682 fprintf (dump_file, "Unvisited destination %d\n",
8683 e->dest->index);
8684 if (iterate_to == -1 || destidx < iterate_to)
8685 iterate_to = destidx;
8686 continue;
8688 if (dump_file && (dump_flags & TDF_DETAILS))
8689 fprintf (dump_file, "Looking for changed values of backedge"
8690 " %d->%d destination PHIs\n",
8691 e->src->index, e->dest->index);
8692 vn_context_bb = e->dest;
8693 gphi_iterator gsi;
8694 for (gsi = gsi_start_phis (e->dest);
8695 !gsi_end_p (gsi); gsi_next (&gsi))
8697 bool inserted = false;
8698 /* While we'd ideally just iterate on value changes
8699 we CSE PHIs and do that even across basic-block
8700 boundaries. So even hashtable state changes can
8701 be important (which is roughly equivalent to
8702 PHI argument value changes). To not excessively
8703 iterate because of that we track whether a PHI
8704 was CSEd to with GF_PLF_1. */
8705 bool phival_changed;
8706 if ((phival_changed = visit_phi (gsi.phi (),
8707 &inserted, false))
8708 || (inserted && gimple_plf (gsi.phi (), GF_PLF_1)))
8710 if (!phival_changed
8711 && dump_file && (dump_flags & TDF_DETAILS))
8712 fprintf (dump_file, "PHI was CSEd and hashtable "
8713 "state (changed)\n");
8714 if (iterate_to == -1 || destidx < iterate_to)
8715 iterate_to = destidx;
8716 break;
8719 vn_context_bb = NULL;
8721 if (iterate_to != -1)
8723 do_unwind (&rpo_state[iterate_to], avail);
8724 idx = iterate_to;
8725 if (dump_file && (dump_flags & TDF_DETAILS))
8726 fprintf (dump_file, "Iterating to %d BB%d\n",
8727 iterate_to, rpo[iterate_to]);
8728 continue;
8731 idx++;
8733 while (idx < n);
8735 else /* !iterate */
8737 /* Process all blocks greedily with a worklist that enforces RPO
8738 processing of reachable blocks. */
8739 auto_bitmap worklist;
8740 bitmap_set_bit (worklist, 0);
8741 while (!bitmap_empty_p (worklist))
8743 int idx = bitmap_clear_first_set_bit (worklist);
8744 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[idx]);
8745 gcc_assert ((bb->flags & BB_EXECUTABLE)
8746 && !rpo_state[idx].visited);
8748 if (dump_file && (dump_flags & TDF_DETAILS))
8749 fprintf (dump_file, "Processing block %d: BB%d\n", idx, bb->index);
8751 /* When we run into predecessor edges where we cannot trust its
8752 executable state mark them executable so PHI processing will
8753 be conservative.
8754 ??? Do we need to force arguments flowing over that edge
8755 to be varying or will they even always be? */
8756 edge_iterator ei;
8757 edge e;
8758 FOR_EACH_EDGE (e, ei, bb->preds)
8759 if (!(e->flags & EDGE_EXECUTABLE)
8760 && (bb == entry->dest
8761 || (!rpo_state[bb_to_rpo[e->src->index]].visited
8762 && (rpo_state[bb_to_rpo[e->src->index]].max_rpo
8763 >= (int)idx))))
8765 if (dump_file && (dump_flags & TDF_DETAILS))
8766 fprintf (dump_file, "Cannot trust state of predecessor "
8767 "edge %d -> %d, marking executable\n",
8768 e->src->index, e->dest->index);
8769 e->flags |= EDGE_EXECUTABLE;
8772 nblk++;
8773 todo |= process_bb (avail, bb, false, false, false, eliminate,
8774 do_region, exit_bbs,
8775 skip_entry_phis && bb == entry->dest);
8776 rpo_state[idx].visited++;
8778 FOR_EACH_EDGE (e, ei, bb->succs)
8779 if ((e->flags & EDGE_EXECUTABLE)
8780 && e->dest->index != EXIT_BLOCK
8781 && (!do_region || !bitmap_bit_p (exit_bbs, e->dest->index))
8782 && !rpo_state[bb_to_rpo[e->dest->index]].visited)
8783 bitmap_set_bit (worklist, bb_to_rpo[e->dest->index]);
8787 /* If statistics or dump file active. */
8788 int nex = 0;
8789 unsigned max_visited = 1;
8790 for (int i = 0; i < n; ++i)
8792 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
8793 if (bb->flags & BB_EXECUTABLE)
8794 nex++;
8795 statistics_histogram_event (cfun, "RPO block visited times",
8796 rpo_state[i].visited);
8797 if (rpo_state[i].visited > max_visited)
8798 max_visited = rpo_state[i].visited;
8800 unsigned nvalues = 0, navail = 0;
8801 for (hash_table<vn_ssa_aux_hasher>::iterator i = vn_ssa_aux_hash->begin ();
8802 i != vn_ssa_aux_hash->end (); ++i)
8804 nvalues++;
8805 vn_avail *av = (*i)->avail;
8806 while (av)
8808 navail++;
8809 av = av->next;
8812 statistics_counter_event (cfun, "RPO blocks", n);
8813 statistics_counter_event (cfun, "RPO blocks visited", nblk);
8814 statistics_counter_event (cfun, "RPO blocks executable", nex);
8815 statistics_histogram_event (cfun, "RPO iterations", 10*nblk / nex);
8816 statistics_histogram_event (cfun, "RPO num values", nvalues);
8817 statistics_histogram_event (cfun, "RPO num avail", navail);
8818 statistics_histogram_event (cfun, "RPO num lattice",
8819 vn_ssa_aux_hash->elements ());
8820 if (dump_file && (dump_flags & (TDF_DETAILS|TDF_STATS)))
8822 fprintf (dump_file, "RPO iteration over %d blocks visited %" PRIu64
8823 " blocks in total discovering %d executable blocks iterating "
8824 "%d.%d times, a block was visited max. %u times\n",
8825 n, nblk, nex,
8826 (int)((10*nblk / nex)/10), (int)((10*nblk / nex)%10),
8827 max_visited);
8828 fprintf (dump_file, "RPO tracked %d values available at %d locations "
8829 "and %" PRIu64 " lattice elements\n",
8830 nvalues, navail, (uint64_t) vn_ssa_aux_hash->elements ());
8833 if (eliminate)
8835 /* When !iterate we already performed elimination during the RPO
8836 walk. */
8837 if (iterate)
8839 /* Elimination for region-based VN needs to be done within the
8840 RPO walk. */
8841 gcc_assert (! do_region);
8842 /* Note we can't use avail.walk here because that gets confused
8843 by the existing availability and it will be less efficient
8844 as well. */
8845 todo |= eliminate_with_rpo_vn (NULL);
8847 else
8848 todo |= avail.eliminate_cleanup (do_region);
8851 vn_valueize = NULL;
8852 rpo_avail = NULL;
8854 XDELETEVEC (bb_to_rpo);
8855 XDELETEVEC (rpo);
8856 XDELETEVEC (rpo_state);
8858 return todo;
8861 /* Region-based entry for RPO VN. Performs value-numbering and elimination
8862 on the SEME region specified by ENTRY and EXIT_BBS. If ENTRY is not
8863 the only edge into the region at ENTRY->dest PHI nodes in ENTRY->dest
8864 are not considered.
8865 If ITERATE is true then treat backedges optimistically as not
8866 executed and iterate. If ELIMINATE is true then perform
8867 elimination, otherwise leave that to the caller.
8868 If SKIP_ENTRY_PHIS is true then force PHI nodes in ENTRY->dest to VARYING.
8869 KIND specifies the amount of work done for handling memory operations. */
8871 unsigned
8872 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs,
8873 bool iterate, bool eliminate, bool skip_entry_phis,
8874 vn_lookup_kind kind)
8876 auto_timevar tv (TV_TREE_RPO_VN);
8877 unsigned todo = do_rpo_vn_1 (fn, entry, exit_bbs, iterate, eliminate,
8878 skip_entry_phis, kind);
8879 free_rpo_vn ();
8880 return todo;
8884 namespace {
8886 const pass_data pass_data_fre =
8888 GIMPLE_PASS, /* type */
8889 "fre", /* name */
8890 OPTGROUP_NONE, /* optinfo_flags */
8891 TV_TREE_FRE, /* tv_id */
8892 ( PROP_cfg | PROP_ssa ), /* properties_required */
8893 0, /* properties_provided */
8894 0, /* properties_destroyed */
8895 0, /* todo_flags_start */
8896 0, /* todo_flags_finish */
8899 class pass_fre : public gimple_opt_pass
8901 public:
8902 pass_fre (gcc::context *ctxt)
8903 : gimple_opt_pass (pass_data_fre, ctxt), may_iterate (true)
8906 /* opt_pass methods: */
8907 opt_pass * clone () final override { return new pass_fre (m_ctxt); }
8908 void set_pass_param (unsigned int n, bool param) final override
8910 gcc_assert (n == 0);
8911 may_iterate = param;
8913 bool gate (function *) final override
8915 return flag_tree_fre != 0 && (may_iterate || optimize > 1);
8917 unsigned int execute (function *) final override;
8919 private:
8920 bool may_iterate;
8921 }; // class pass_fre
8923 unsigned int
8924 pass_fre::execute (function *fun)
8926 unsigned todo = 0;
8928 /* At -O[1g] use the cheap non-iterating mode. */
8929 bool iterate_p = may_iterate && (optimize > 1);
8930 calculate_dominance_info (CDI_DOMINATORS);
8931 if (iterate_p)
8932 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
8934 todo = do_rpo_vn_1 (fun, NULL, NULL, iterate_p, true, false, VN_WALKREWRITE);
8935 free_rpo_vn ();
8937 if (iterate_p)
8938 loop_optimizer_finalize ();
8940 if (scev_initialized_p ())
8941 scev_reset_htab ();
8943 /* For late FRE after IVOPTs and unrolling, see if we can
8944 remove some TREE_ADDRESSABLE and rewrite stuff into SSA. */
8945 if (!may_iterate)
8946 todo |= TODO_update_address_taken;
8948 return todo;
8951 } // anon namespace
8953 gimple_opt_pass *
8954 make_pass_fre (gcc::context *ctxt)
8956 return new pass_fre (ctxt);
8959 #undef BB_EXECUTABLE