hppa: Fix LO_SUM DLTIND14R address support in PRINT_OPERAND_ADDRESS
[official-gcc.git] / gcc / tree-ssa-sccvn.cc
blob936498950560fb6190fcace0cf2e1c97e45108c6
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2024 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "splay-tree.h"
25 #include "backend.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "insn-config.h"
32 #include "memmodel.h"
33 #include "emit-rtl.h"
34 #include "cgraph.h"
35 #include "gimple-pretty-print.h"
36 #include "alias.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "cfganal.h"
40 #include "tree-inline.h"
41 #include "internal-fn.h"
42 #include "gimple-iterator.h"
43 #include "gimple-fold.h"
44 #include "tree-eh.h"
45 #include "gimplify.h"
46 #include "flags.h"
47 #include "dojump.h"
48 #include "explow.h"
49 #include "calls.h"
50 #include "varasm.h"
51 #include "stmt.h"
52 #include "expr.h"
53 #include "tree-dfa.h"
54 #include "tree-ssa.h"
55 #include "dumpfile.h"
56 #include "cfgloop.h"
57 #include "tree-ssa-propagate.h"
58 #include "tree-cfg.h"
59 #include "domwalk.h"
60 #include "gimple-match.h"
61 #include "stringpool.h"
62 #include "attribs.h"
63 #include "tree-pass.h"
64 #include "statistics.h"
65 #include "langhooks.h"
66 #include "ipa-utils.h"
67 #include "dbgcnt.h"
68 #include "tree-cfgcleanup.h"
69 #include "tree-ssa-loop.h"
70 #include "tree-scalar-evolution.h"
71 #include "tree-ssa-loop-niter.h"
72 #include "builtins.h"
73 #include "fold-const-call.h"
74 #include "ipa-modref-tree.h"
75 #include "ipa-modref.h"
76 #include "tree-ssa-sccvn.h"
77 #include "alloc-pool.h"
78 #include "symbol-summary.h"
79 #include "sreal.h"
80 #include "ipa-cp.h"
81 #include "ipa-prop.h"
82 #include "target.h"
84 /* This algorithm is based on the SCC algorithm presented by Keith
85 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
86 (http://citeseer.ist.psu.edu/41805.html). In
87 straight line code, it is equivalent to a regular hash based value
88 numbering that is performed in reverse postorder.
90 For code with cycles, there are two alternatives, both of which
91 require keeping the hashtables separate from the actual list of
92 value numbers for SSA names.
94 1. Iterate value numbering in an RPO walk of the blocks, removing
95 all the entries from the hashtable after each iteration (but
96 keeping the SSA name->value number mapping between iterations).
97 Iterate until it does not change.
99 2. Perform value numbering as part of an SCC walk on the SSA graph,
100 iterating only the cycles in the SSA graph until they do not change
101 (using a separate, optimistic hashtable for value numbering the SCC
102 operands).
104 The second is not just faster in practice (because most SSA graph
105 cycles do not involve all the variables in the graph), it also has
106 some nice properties.
108 One of these nice properties is that when we pop an SCC off the
109 stack, we are guaranteed to have processed all the operands coming from
110 *outside of that SCC*, so we do not need to do anything special to
111 ensure they have value numbers.
113 Another nice property is that the SCC walk is done as part of a DFS
114 of the SSA graph, which makes it easy to perform combining and
115 simplifying operations at the same time.
117 The code below is deliberately written in a way that makes it easy
118 to separate the SCC walk from the other work it does.
120 In order to propagate constants through the code, we track which
121 expressions contain constants, and use those while folding. In
122 theory, we could also track expressions whose value numbers are
123 replaced, in case we end up folding based on expression
124 identities.
126 In order to value number memory, we assign value numbers to vuses.
127 This enables us to note that, for example, stores to the same
128 address of the same value from the same starting memory states are
129 equivalent.
130 TODO:
132 1. We can iterate only the changing portions of the SCC's, but
133 I have not seen an SCC big enough for this to be a win.
134 2. If you differentiate between phi nodes for loops and phi nodes
135 for if-then-else, you can properly consider phi nodes in different
136 blocks for equivalence.
137 3. We could value number vuses in more cases, particularly, whole
138 structure copies.
141 /* There's no BB_EXECUTABLE but we can use BB_VISITED. */
142 #define BB_EXECUTABLE BB_VISITED
144 static vn_lookup_kind default_vn_walk_kind;
146 /* vn_nary_op hashtable helpers. */
148 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
150 typedef vn_nary_op_s *compare_type;
151 static inline hashval_t hash (const vn_nary_op_s *);
152 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
155 /* Return the computed hashcode for nary operation P1. */
157 inline hashval_t
158 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
160 return vno1->hashcode;
163 /* Compare nary operations P1 and P2 and return true if they are
164 equivalent. */
166 inline bool
167 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
169 return vno1 == vno2 || vn_nary_op_eq (vno1, vno2);
172 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
173 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
176 /* vn_phi hashtable helpers. */
178 static int
179 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
181 struct vn_phi_hasher : nofree_ptr_hash <vn_phi_s>
183 static inline hashval_t hash (const vn_phi_s *);
184 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
187 /* Return the computed hashcode for phi operation P1. */
189 inline hashval_t
190 vn_phi_hasher::hash (const vn_phi_s *vp1)
192 return vp1->hashcode;
195 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
197 inline bool
198 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
200 return vp1 == vp2 || vn_phi_eq (vp1, vp2);
203 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
204 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
207 /* Compare two reference operands P1 and P2 for equality. Return true if
208 they are equal, and false otherwise. */
210 static int
211 vn_reference_op_eq (const void *p1, const void *p2)
213 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
214 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
216 return (vro1->opcode == vro2->opcode
217 /* We do not care for differences in type qualification. */
218 && (vro1->type == vro2->type
219 || (vro1->type && vro2->type
220 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
221 TYPE_MAIN_VARIANT (vro2->type))))
222 && expressions_equal_p (vro1->op0, vro2->op0)
223 && expressions_equal_p (vro1->op1, vro2->op1)
224 && expressions_equal_p (vro1->op2, vro2->op2)
225 && (vro1->opcode != CALL_EXPR || vro1->clique == vro2->clique));
228 /* Free a reference operation structure VP. */
230 static inline void
231 free_reference (vn_reference_s *vr)
233 vr->operands.release ();
237 /* vn_reference hashtable helpers. */
239 struct vn_reference_hasher : nofree_ptr_hash <vn_reference_s>
241 static inline hashval_t hash (const vn_reference_s *);
242 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
245 /* Return the hashcode for a given reference operation P1. */
247 inline hashval_t
248 vn_reference_hasher::hash (const vn_reference_s *vr1)
250 return vr1->hashcode;
253 inline bool
254 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
256 return v == c || vn_reference_eq (v, c);
259 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
260 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
262 /* Pretty-print OPS to OUTFILE. */
264 void
265 print_vn_reference_ops (FILE *outfile, const vec<vn_reference_op_s> ops)
267 vn_reference_op_t vro;
268 unsigned int i;
269 fprintf (outfile, "{");
270 for (i = 0; ops.iterate (i, &vro); i++)
272 bool closebrace = false;
273 if (vro->opcode != SSA_NAME
274 && TREE_CODE_CLASS (vro->opcode) != tcc_declaration)
276 fprintf (outfile, "%s", get_tree_code_name (vro->opcode));
277 if (vro->op0 || vro->opcode == CALL_EXPR)
279 fprintf (outfile, "<");
280 closebrace = true;
283 if (vro->op0 || vro->opcode == CALL_EXPR)
285 if (!vro->op0)
286 fprintf (outfile, internal_fn_name ((internal_fn)vro->clique));
287 else
288 print_generic_expr (outfile, vro->op0);
289 if (vro->op1)
291 fprintf (outfile, ",");
292 print_generic_expr (outfile, vro->op1);
294 if (vro->op2)
296 fprintf (outfile, ",");
297 print_generic_expr (outfile, vro->op2);
300 if (closebrace)
301 fprintf (outfile, ">");
302 if (i != ops.length () - 1)
303 fprintf (outfile, ",");
305 fprintf (outfile, "}");
308 DEBUG_FUNCTION void
309 debug_vn_reference_ops (const vec<vn_reference_op_s> ops)
311 print_vn_reference_ops (stderr, ops);
312 fputc ('\n', stderr);
315 /* The set of VN hashtables. */
317 typedef struct vn_tables_s
319 vn_nary_op_table_type *nary;
320 vn_phi_table_type *phis;
321 vn_reference_table_type *references;
322 } *vn_tables_t;
325 /* vn_constant hashtable helpers. */
327 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
329 static inline hashval_t hash (const vn_constant_s *);
330 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
333 /* Hash table hash function for vn_constant_t. */
335 inline hashval_t
336 vn_constant_hasher::hash (const vn_constant_s *vc1)
338 return vc1->hashcode;
341 /* Hash table equality function for vn_constant_t. */
343 inline bool
344 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
346 if (vc1->hashcode != vc2->hashcode)
347 return false;
349 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
352 static hash_table<vn_constant_hasher> *constant_to_value_id;
355 /* Obstack we allocate the vn-tables elements from. */
356 static obstack vn_tables_obstack;
357 /* Special obstack we never unwind. */
358 static obstack vn_tables_insert_obstack;
360 static vn_reference_t last_inserted_ref;
361 static vn_phi_t last_inserted_phi;
362 static vn_nary_op_t last_inserted_nary;
363 static vn_ssa_aux_t last_pushed_avail;
365 /* Valid hashtables storing information we have proven to be
366 correct. */
367 static vn_tables_t valid_info;
370 /* Valueization hook for simplify_replace_tree. Valueize NAME if it is
371 an SSA name, otherwise just return it. */
372 tree (*vn_valueize) (tree);
373 static tree
374 vn_valueize_for_srt (tree t, void* context ATTRIBUTE_UNUSED)
376 basic_block saved_vn_context_bb = vn_context_bb;
377 /* Look for sth available at the definition block of the argument.
378 This avoids inconsistencies between availability there which
379 decides if the stmt can be removed and availability at the
380 use site. The SSA property ensures that things available
381 at the definition are also available at uses. */
382 if (!SSA_NAME_IS_DEFAULT_DEF (t))
383 vn_context_bb = gimple_bb (SSA_NAME_DEF_STMT (t));
384 tree res = vn_valueize (t);
385 vn_context_bb = saved_vn_context_bb;
386 return res;
390 /* This represents the top of the VN lattice, which is the universal
391 value. */
393 tree VN_TOP;
395 /* Unique counter for our value ids. */
397 static unsigned int next_value_id;
398 static int next_constant_value_id;
401 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
402 are allocated on an obstack for locality reasons, and to free them
403 without looping over the vec. */
405 struct vn_ssa_aux_hasher : typed_noop_remove <vn_ssa_aux_t>
407 typedef vn_ssa_aux_t value_type;
408 typedef tree compare_type;
409 static inline hashval_t hash (const value_type &);
410 static inline bool equal (const value_type &, const compare_type &);
411 static inline void mark_deleted (value_type &) {}
412 static const bool empty_zero_p = true;
413 static inline void mark_empty (value_type &e) { e = NULL; }
414 static inline bool is_deleted (value_type &) { return false; }
415 static inline bool is_empty (value_type &e) { return e == NULL; }
418 hashval_t
419 vn_ssa_aux_hasher::hash (const value_type &entry)
421 return SSA_NAME_VERSION (entry->name);
424 bool
425 vn_ssa_aux_hasher::equal (const value_type &entry, const compare_type &name)
427 return name == entry->name;
430 static hash_table<vn_ssa_aux_hasher> *vn_ssa_aux_hash;
431 typedef hash_table<vn_ssa_aux_hasher>::iterator vn_ssa_aux_iterator_type;
432 static struct obstack vn_ssa_aux_obstack;
434 static vn_nary_op_t vn_nary_op_insert_stmt (gimple *, tree);
435 static vn_nary_op_t vn_nary_op_insert_into (vn_nary_op_t,
436 vn_nary_op_table_type *);
437 static void init_vn_nary_op_from_pieces (vn_nary_op_t, unsigned int,
438 enum tree_code, tree, tree *);
439 static tree vn_lookup_simplify_result (gimple_match_op *);
440 static vn_reference_t vn_reference_lookup_or_insert_for_pieces
441 (tree, alias_set_type, alias_set_type, poly_int64, poly_int64, tree,
442 vec<vn_reference_op_s, va_heap>, tree);
444 /* Return whether there is value numbering information for a given SSA name. */
446 bool
447 has_VN_INFO (tree name)
449 return vn_ssa_aux_hash->find_with_hash (name, SSA_NAME_VERSION (name));
452 vn_ssa_aux_t
453 VN_INFO (tree name)
455 vn_ssa_aux_t *res
456 = vn_ssa_aux_hash->find_slot_with_hash (name, SSA_NAME_VERSION (name),
457 INSERT);
458 if (*res != NULL)
459 return *res;
461 vn_ssa_aux_t newinfo = *res = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
462 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
463 newinfo->name = name;
464 newinfo->valnum = VN_TOP;
465 /* We are using the visited flag to handle uses with defs not within the
466 region being value-numbered. */
467 newinfo->visited = false;
469 /* Given we create the VN_INFOs on-demand now we have to do initialization
470 different than VN_TOP here. */
471 if (SSA_NAME_IS_DEFAULT_DEF (name))
472 switch (TREE_CODE (SSA_NAME_VAR (name)))
474 case VAR_DECL:
475 /* All undefined vars are VARYING. */
476 newinfo->valnum = name;
477 newinfo->visited = true;
478 break;
480 case PARM_DECL:
481 /* Parameters are VARYING but we can record a condition
482 if we know it is a non-NULL pointer. */
483 newinfo->visited = true;
484 newinfo->valnum = name;
485 if (POINTER_TYPE_P (TREE_TYPE (name))
486 && nonnull_arg_p (SSA_NAME_VAR (name)))
488 tree ops[2];
489 ops[0] = name;
490 ops[1] = build_int_cst (TREE_TYPE (name), 0);
491 vn_nary_op_t nary;
492 /* Allocate from non-unwinding stack. */
493 nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
494 init_vn_nary_op_from_pieces (nary, 2, NE_EXPR,
495 boolean_type_node, ops);
496 nary->predicated_values = 0;
497 nary->u.result = boolean_true_node;
498 vn_nary_op_insert_into (nary, valid_info->nary);
499 gcc_assert (nary->unwind_to == NULL);
500 /* Also do not link it into the undo chain. */
501 last_inserted_nary = nary->next;
502 nary->next = (vn_nary_op_t)(void *)-1;
503 nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
504 init_vn_nary_op_from_pieces (nary, 2, EQ_EXPR,
505 boolean_type_node, ops);
506 nary->predicated_values = 0;
507 nary->u.result = boolean_false_node;
508 vn_nary_op_insert_into (nary, valid_info->nary);
509 gcc_assert (nary->unwind_to == NULL);
510 last_inserted_nary = nary->next;
511 nary->next = (vn_nary_op_t)(void *)-1;
512 if (dump_file && (dump_flags & TDF_DETAILS))
514 fprintf (dump_file, "Recording ");
515 print_generic_expr (dump_file, name, TDF_SLIM);
516 fprintf (dump_file, " != 0\n");
519 break;
521 case RESULT_DECL:
522 /* If the result is passed by invisible reference the default
523 def is initialized, otherwise it's uninitialized. Still
524 undefined is varying. */
525 newinfo->visited = true;
526 newinfo->valnum = name;
527 break;
529 default:
530 gcc_unreachable ();
532 return newinfo;
535 /* Return the SSA value of X. */
537 inline tree
538 SSA_VAL (tree x, bool *visited = NULL)
540 vn_ssa_aux_t tem = vn_ssa_aux_hash->find_with_hash (x, SSA_NAME_VERSION (x));
541 if (visited)
542 *visited = tem && tem->visited;
543 return tem && tem->visited ? tem->valnum : x;
546 /* Return the SSA value of the VUSE x, supporting released VDEFs
547 during elimination which will value-number the VDEF to the
548 associated VUSE (but not substitute in the whole lattice). */
550 static inline tree
551 vuse_ssa_val (tree x)
553 if (!x)
554 return NULL_TREE;
558 x = SSA_VAL (x);
559 gcc_assert (x != VN_TOP);
561 while (SSA_NAME_IN_FREE_LIST (x));
563 return x;
566 /* Similar to the above but used as callback for walk_non_aliased_vuses
567 and thus should stop at unvisited VUSE to not walk across region
568 boundaries. */
570 static tree
571 vuse_valueize (tree vuse)
575 bool visited;
576 vuse = SSA_VAL (vuse, &visited);
577 if (!visited)
578 return NULL_TREE;
579 gcc_assert (vuse != VN_TOP);
581 while (SSA_NAME_IN_FREE_LIST (vuse));
582 return vuse;
586 /* Return the vn_kind the expression computed by the stmt should be
587 associated with. */
589 enum vn_kind
590 vn_get_stmt_kind (gimple *stmt)
592 switch (gimple_code (stmt))
594 case GIMPLE_CALL:
595 return VN_REFERENCE;
596 case GIMPLE_PHI:
597 return VN_PHI;
598 case GIMPLE_ASSIGN:
600 enum tree_code code = gimple_assign_rhs_code (stmt);
601 tree rhs1 = gimple_assign_rhs1 (stmt);
602 switch (get_gimple_rhs_class (code))
604 case GIMPLE_UNARY_RHS:
605 case GIMPLE_BINARY_RHS:
606 case GIMPLE_TERNARY_RHS:
607 return VN_NARY;
608 case GIMPLE_SINGLE_RHS:
609 switch (TREE_CODE_CLASS (code))
611 case tcc_reference:
612 /* VOP-less references can go through unary case. */
613 if ((code == REALPART_EXPR
614 || code == IMAGPART_EXPR
615 || code == VIEW_CONVERT_EXPR
616 || code == BIT_FIELD_REF)
617 && (TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME
618 || is_gimple_min_invariant (TREE_OPERAND (rhs1, 0))))
619 return VN_NARY;
621 /* Fallthrough. */
622 case tcc_declaration:
623 return VN_REFERENCE;
625 case tcc_constant:
626 return VN_CONSTANT;
628 default:
629 if (code == ADDR_EXPR)
630 return (is_gimple_min_invariant (rhs1)
631 ? VN_CONSTANT : VN_REFERENCE);
632 else if (code == CONSTRUCTOR)
633 return VN_NARY;
634 return VN_NONE;
636 default:
637 return VN_NONE;
640 default:
641 return VN_NONE;
645 /* Lookup a value id for CONSTANT and return it. If it does not
646 exist returns 0. */
648 unsigned int
649 get_constant_value_id (tree constant)
651 vn_constant_s **slot;
652 struct vn_constant_s vc;
654 vc.hashcode = vn_hash_constant_with_type (constant);
655 vc.constant = constant;
656 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
657 if (slot)
658 return (*slot)->value_id;
659 return 0;
662 /* Lookup a value id for CONSTANT, and if it does not exist, create a
663 new one and return it. If it does exist, return it. */
665 unsigned int
666 get_or_alloc_constant_value_id (tree constant)
668 vn_constant_s **slot;
669 struct vn_constant_s vc;
670 vn_constant_t vcp;
672 /* If the hashtable isn't initialized we're not running from PRE and thus
673 do not need value-ids. */
674 if (!constant_to_value_id)
675 return 0;
677 vc.hashcode = vn_hash_constant_with_type (constant);
678 vc.constant = constant;
679 slot = constant_to_value_id->find_slot (&vc, INSERT);
680 if (*slot)
681 return (*slot)->value_id;
683 vcp = XNEW (struct vn_constant_s);
684 vcp->hashcode = vc.hashcode;
685 vcp->constant = constant;
686 vcp->value_id = get_next_constant_value_id ();
687 *slot = vcp;
688 return vcp->value_id;
691 /* Compute the hash for a reference operand VRO1. */
693 static void
694 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
696 hstate.add_int (vro1->opcode);
697 if (vro1->opcode == CALL_EXPR && !vro1->op0)
698 hstate.add_int (vro1->clique);
699 if (vro1->op0)
700 inchash::add_expr (vro1->op0, hstate);
701 if (vro1->op1)
702 inchash::add_expr (vro1->op1, hstate);
703 if (vro1->op2)
704 inchash::add_expr (vro1->op2, hstate);
707 /* Compute a hash for the reference operation VR1 and return it. */
709 static hashval_t
710 vn_reference_compute_hash (const vn_reference_t vr1)
712 inchash::hash hstate;
713 hashval_t result;
714 int i;
715 vn_reference_op_t vro;
716 poly_int64 off = -1;
717 bool deref = false;
719 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
721 if (vro->opcode == MEM_REF)
722 deref = true;
723 else if (vro->opcode != ADDR_EXPR)
724 deref = false;
725 if (maybe_ne (vro->off, -1))
727 if (known_eq (off, -1))
728 off = 0;
729 off += vro->off;
731 else
733 if (maybe_ne (off, -1)
734 && maybe_ne (off, 0))
735 hstate.add_poly_int (off);
736 off = -1;
737 if (deref
738 && vro->opcode == ADDR_EXPR)
740 if (vro->op0)
742 tree op = TREE_OPERAND (vro->op0, 0);
743 hstate.add_int (TREE_CODE (op));
744 inchash::add_expr (op, hstate);
747 else
748 vn_reference_op_compute_hash (vro, hstate);
751 /* Do not hash vr1->offset or vr1->max_size, we want to get collisions
752 to be able to identify compatible results. */
753 result = hstate.end ();
754 /* ??? We would ICE later if we hash instead of adding that in. */
755 if (vr1->vuse)
756 result += SSA_NAME_VERSION (vr1->vuse);
758 return result;
761 /* Return true if reference operations VR1 and VR2 are equivalent. This
762 means they have the same set of operands and vuses. */
764 bool
765 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
767 unsigned i, j;
769 /* Early out if this is not a hash collision. */
770 if (vr1->hashcode != vr2->hashcode)
771 return false;
773 /* The VOP needs to be the same. */
774 if (vr1->vuse != vr2->vuse)
775 return false;
777 /* The offset/max_size used for the ao_ref during lookup has to be
778 the same. */
779 if (maybe_ne (vr1->offset, vr2->offset)
780 || maybe_ne (vr1->max_size, vr2->max_size))
782 /* But nothing known in the prevailing entry is OK to be used. */
783 if (maybe_ne (vr1->offset, 0) || known_size_p (vr1->max_size))
784 return false;
787 /* If the operands are the same we are done. */
788 if (vr1->operands == vr2->operands)
789 return true;
791 if (!vr1->type || !vr2->type)
793 if (vr1->type != vr2->type)
794 return false;
796 else if (vr1->type == vr2->type)
798 else if (COMPLETE_TYPE_P (vr1->type) != COMPLETE_TYPE_P (vr2->type)
799 || (COMPLETE_TYPE_P (vr1->type)
800 && !expressions_equal_p (TYPE_SIZE (vr1->type),
801 TYPE_SIZE (vr2->type))))
802 return false;
803 else if (vr1->operands[0].opcode == CALL_EXPR
804 && !types_compatible_p (vr1->type, vr2->type))
805 return false;
806 else if (INTEGRAL_TYPE_P (vr1->type)
807 && INTEGRAL_TYPE_P (vr2->type))
809 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
810 return false;
812 else if (INTEGRAL_TYPE_P (vr1->type)
813 && (TYPE_PRECISION (vr1->type)
814 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
815 return false;
816 else if (INTEGRAL_TYPE_P (vr2->type)
817 && (TYPE_PRECISION (vr2->type)
818 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
819 return false;
820 else if (VECTOR_BOOLEAN_TYPE_P (vr1->type)
821 && VECTOR_BOOLEAN_TYPE_P (vr2->type))
823 /* Vector boolean types can have padding, verify we are dealing with
824 the same number of elements, aka the precision of the types.
825 For example, In most architecture the precision_size of vbool*_t
826 types are caculated like below:
827 precision_size = type_size * 8
829 Unfortunately, the RISC-V will adjust the precision_size for the
830 vbool*_t in order to align the ISA as below:
831 type_size = [1, 1, 1, 1, 2, 4, 8]
832 precision_size = [1, 2, 4, 8, 16, 32, 64]
834 Then the precision_size of RISC-V vbool*_t will not be the multiple
835 of the type_size. We take care of this case consolidated here. */
836 if (maybe_ne (TYPE_VECTOR_SUBPARTS (vr1->type),
837 TYPE_VECTOR_SUBPARTS (vr2->type)))
838 return false;
841 i = 0;
842 j = 0;
845 poly_int64 off1 = 0, off2 = 0;
846 vn_reference_op_t vro1, vro2;
847 vn_reference_op_s tem1, tem2;
848 bool deref1 = false, deref2 = false;
849 bool reverse1 = false, reverse2 = false;
850 for (; vr1->operands.iterate (i, &vro1); i++)
852 if (vro1->opcode == MEM_REF)
853 deref1 = true;
854 /* Do not look through a storage order barrier. */
855 else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
856 return false;
857 reverse1 |= vro1->reverse;
858 if (known_eq (vro1->off, -1))
859 break;
860 off1 += vro1->off;
862 for (; vr2->operands.iterate (j, &vro2); j++)
864 if (vro2->opcode == MEM_REF)
865 deref2 = true;
866 /* Do not look through a storage order barrier. */
867 else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
868 return false;
869 reverse2 |= vro2->reverse;
870 if (known_eq (vro2->off, -1))
871 break;
872 off2 += vro2->off;
874 if (maybe_ne (off1, off2) || reverse1 != reverse2)
875 return false;
876 if (deref1 && vro1->opcode == ADDR_EXPR)
878 memset (&tem1, 0, sizeof (tem1));
879 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
880 tem1.type = TREE_TYPE (tem1.op0);
881 tem1.opcode = TREE_CODE (tem1.op0);
882 vro1 = &tem1;
883 deref1 = false;
885 if (deref2 && vro2->opcode == ADDR_EXPR)
887 memset (&tem2, 0, sizeof (tem2));
888 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
889 tem2.type = TREE_TYPE (tem2.op0);
890 tem2.opcode = TREE_CODE (tem2.op0);
891 vro2 = &tem2;
892 deref2 = false;
894 if (deref1 != deref2)
895 return false;
896 if (!vn_reference_op_eq (vro1, vro2))
897 return false;
898 ++j;
899 ++i;
901 while (vr1->operands.length () != i
902 || vr2->operands.length () != j);
904 return true;
907 /* Copy the operations present in load/store REF into RESULT, a vector of
908 vn_reference_op_s's. */
910 static void
911 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
913 /* For non-calls, store the information that makes up the address. */
914 tree orig = ref;
915 while (ref)
917 vn_reference_op_s temp;
919 memset (&temp, 0, sizeof (temp));
920 temp.type = TREE_TYPE (ref);
921 temp.opcode = TREE_CODE (ref);
922 temp.off = -1;
924 switch (temp.opcode)
926 case MODIFY_EXPR:
927 temp.op0 = TREE_OPERAND (ref, 1);
928 break;
929 case WITH_SIZE_EXPR:
930 temp.op0 = TREE_OPERAND (ref, 1);
931 temp.off = 0;
932 break;
933 case MEM_REF:
934 /* The base address gets its own vn_reference_op_s structure. */
935 temp.op0 = TREE_OPERAND (ref, 1);
936 if (!mem_ref_offset (ref).to_shwi (&temp.off))
937 temp.off = -1;
938 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
939 temp.base = MR_DEPENDENCE_BASE (ref);
940 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
941 break;
942 case TARGET_MEM_REF:
943 /* The base address gets its own vn_reference_op_s structure. */
944 temp.op0 = TMR_INDEX (ref);
945 temp.op1 = TMR_STEP (ref);
946 temp.op2 = TMR_OFFSET (ref);
947 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
948 temp.base = MR_DEPENDENCE_BASE (ref);
949 result->safe_push (temp);
950 memset (&temp, 0, sizeof (temp));
951 temp.type = NULL_TREE;
952 temp.opcode = ERROR_MARK;
953 temp.op0 = TMR_INDEX2 (ref);
954 temp.off = -1;
955 break;
956 case BIT_FIELD_REF:
957 /* Record bits, position and storage order. */
958 temp.op0 = TREE_OPERAND (ref, 1);
959 temp.op1 = TREE_OPERAND (ref, 2);
960 if (!multiple_p (bit_field_offset (ref), BITS_PER_UNIT, &temp.off))
961 temp.off = -1;
962 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
963 break;
964 case COMPONENT_REF:
965 /* The field decl is enough to unambiguously specify the field,
966 so use its type here. */
967 temp.type = TREE_TYPE (TREE_OPERAND (ref, 1));
968 temp.op0 = TREE_OPERAND (ref, 1);
969 temp.op1 = TREE_OPERAND (ref, 2);
970 temp.reverse = (AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (ref, 0)))
971 && TYPE_REVERSE_STORAGE_ORDER
972 (TREE_TYPE (TREE_OPERAND (ref, 0))));
974 tree this_offset = component_ref_field_offset (ref);
975 if (this_offset
976 && poly_int_tree_p (this_offset))
978 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
979 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
981 poly_offset_int off
982 = (wi::to_poly_offset (this_offset)
983 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
984 /* Probibit value-numbering zero offset components
985 of addresses the same before the pass folding
986 __builtin_object_size had a chance to run. */
987 if (TREE_CODE (orig) != ADDR_EXPR
988 || maybe_ne (off, 0)
989 || (cfun->curr_properties & PROP_objsz))
990 off.to_shwi (&temp.off);
994 break;
995 case ARRAY_RANGE_REF:
996 case ARRAY_REF:
998 tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
999 /* Record index as operand. */
1000 temp.op0 = TREE_OPERAND (ref, 1);
1001 /* Always record lower bounds and element size. */
1002 temp.op1 = array_ref_low_bound (ref);
1003 /* But record element size in units of the type alignment. */
1004 temp.op2 = TREE_OPERAND (ref, 3);
1005 temp.align = eltype->type_common.align;
1006 if (! temp.op2)
1007 temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
1008 size_int (TYPE_ALIGN_UNIT (eltype)));
1009 if (poly_int_tree_p (temp.op0)
1010 && poly_int_tree_p (temp.op1)
1011 && TREE_CODE (temp.op2) == INTEGER_CST)
1013 poly_offset_int off = ((wi::to_poly_offset (temp.op0)
1014 - wi::to_poly_offset (temp.op1))
1015 * wi::to_offset (temp.op2)
1016 * vn_ref_op_align_unit (&temp));
1017 off.to_shwi (&temp.off);
1019 temp.reverse = (AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (ref, 0)))
1020 && TYPE_REVERSE_STORAGE_ORDER
1021 (TREE_TYPE (TREE_OPERAND (ref, 0))));
1023 break;
1024 case VAR_DECL:
1025 if (DECL_HARD_REGISTER (ref))
1027 temp.op0 = ref;
1028 break;
1030 /* Fallthru. */
1031 case PARM_DECL:
1032 case CONST_DECL:
1033 case RESULT_DECL:
1034 /* Canonicalize decls to MEM[&decl] which is what we end up with
1035 when valueizing MEM[ptr] with ptr = &decl. */
1036 temp.opcode = MEM_REF;
1037 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
1038 temp.off = 0;
1039 result->safe_push (temp);
1040 temp.opcode = ADDR_EXPR;
1041 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
1042 temp.type = TREE_TYPE (temp.op0);
1043 temp.off = -1;
1044 break;
1045 case STRING_CST:
1046 case INTEGER_CST:
1047 case POLY_INT_CST:
1048 case COMPLEX_CST:
1049 case VECTOR_CST:
1050 case REAL_CST:
1051 case FIXED_CST:
1052 case CONSTRUCTOR:
1053 case SSA_NAME:
1054 temp.op0 = ref;
1055 break;
1056 case ADDR_EXPR:
1057 if (is_gimple_min_invariant (ref))
1059 temp.op0 = ref;
1060 break;
1062 break;
1063 /* These are only interesting for their operands, their
1064 existence, and their type. They will never be the last
1065 ref in the chain of references (IE they require an
1066 operand), so we don't have to put anything
1067 for op* as it will be handled by the iteration */
1068 case REALPART_EXPR:
1069 temp.off = 0;
1070 break;
1071 case VIEW_CONVERT_EXPR:
1072 temp.off = 0;
1073 temp.reverse = storage_order_barrier_p (ref);
1074 break;
1075 case IMAGPART_EXPR:
1076 /* This is only interesting for its constant offset. */
1077 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
1078 break;
1079 default:
1080 gcc_unreachable ();
1082 result->safe_push (temp);
1084 if (REFERENCE_CLASS_P (ref)
1085 || TREE_CODE (ref) == MODIFY_EXPR
1086 || TREE_CODE (ref) == WITH_SIZE_EXPR
1087 || (TREE_CODE (ref) == ADDR_EXPR
1088 && !is_gimple_min_invariant (ref)))
1089 ref = TREE_OPERAND (ref, 0);
1090 else
1091 ref = NULL_TREE;
1095 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
1096 operands in *OPS, the reference alias set SET and the reference type TYPE.
1097 Return true if something useful was produced. */
1099 bool
1100 ao_ref_init_from_vn_reference (ao_ref *ref,
1101 alias_set_type set, alias_set_type base_set,
1102 tree type, const vec<vn_reference_op_s> &ops)
1104 unsigned i;
1105 tree base = NULL_TREE;
1106 tree *op0_p = &base;
1107 poly_offset_int offset = 0;
1108 poly_offset_int max_size;
1109 poly_offset_int size = -1;
1110 tree size_tree = NULL_TREE;
1112 /* We don't handle calls. */
1113 if (!type)
1114 return false;
1116 machine_mode mode = TYPE_MODE (type);
1117 if (mode == BLKmode)
1118 size_tree = TYPE_SIZE (type);
1119 else
1120 size = GET_MODE_BITSIZE (mode);
1121 if (size_tree != NULL_TREE
1122 && poly_int_tree_p (size_tree))
1123 size = wi::to_poly_offset (size_tree);
1125 /* Lower the final access size from the outermost expression. */
1126 const_vn_reference_op_t cst_op = &ops[0];
1127 /* Cast away constness for the sake of the const-unsafe
1128 FOR_EACH_VEC_ELT(). */
1129 vn_reference_op_t op = const_cast<vn_reference_op_t>(cst_op);
1130 size_tree = NULL_TREE;
1131 if (op->opcode == COMPONENT_REF)
1132 size_tree = DECL_SIZE (op->op0);
1133 else if (op->opcode == BIT_FIELD_REF)
1134 size_tree = op->op0;
1135 if (size_tree != NULL_TREE
1136 && poly_int_tree_p (size_tree)
1137 && (!known_size_p (size)
1138 || known_lt (wi::to_poly_offset (size_tree), size)))
1139 size = wi::to_poly_offset (size_tree);
1141 /* Initially, maxsize is the same as the accessed element size.
1142 In the following it will only grow (or become -1). */
1143 max_size = size;
1145 /* Compute cumulative bit-offset for nested component-refs and array-refs,
1146 and find the ultimate containing object. */
1147 FOR_EACH_VEC_ELT (ops, i, op)
1149 switch (op->opcode)
1151 /* These may be in the reference ops, but we cannot do anything
1152 sensible with them here. */
1153 case ADDR_EXPR:
1154 /* Apart from ADDR_EXPR arguments to MEM_REF. */
1155 if (base != NULL_TREE
1156 && TREE_CODE (base) == MEM_REF
1157 && op->op0
1158 && DECL_P (TREE_OPERAND (op->op0, 0)))
1160 const_vn_reference_op_t pop = &ops[i-1];
1161 base = TREE_OPERAND (op->op0, 0);
1162 if (known_eq (pop->off, -1))
1164 max_size = -1;
1165 offset = 0;
1167 else
1168 offset += pop->off * BITS_PER_UNIT;
1169 op0_p = NULL;
1170 break;
1172 /* Fallthru. */
1173 case CALL_EXPR:
1174 return false;
1176 /* Record the base objects. */
1177 case MEM_REF:
1178 *op0_p = build2 (MEM_REF, op->type,
1179 NULL_TREE, op->op0);
1180 MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
1181 MR_DEPENDENCE_BASE (*op0_p) = op->base;
1182 op0_p = &TREE_OPERAND (*op0_p, 0);
1183 break;
1185 case VAR_DECL:
1186 case PARM_DECL:
1187 case RESULT_DECL:
1188 case SSA_NAME:
1189 *op0_p = op->op0;
1190 op0_p = NULL;
1191 break;
1193 /* And now the usual component-reference style ops. */
1194 case BIT_FIELD_REF:
1195 offset += wi::to_poly_offset (op->op1);
1196 break;
1198 case COMPONENT_REF:
1200 tree field = op->op0;
1201 /* We do not have a complete COMPONENT_REF tree here so we
1202 cannot use component_ref_field_offset. Do the interesting
1203 parts manually. */
1204 tree this_offset = DECL_FIELD_OFFSET (field);
1206 if (op->op1 || !poly_int_tree_p (this_offset))
1207 max_size = -1;
1208 else
1210 poly_offset_int woffset = (wi::to_poly_offset (this_offset)
1211 << LOG2_BITS_PER_UNIT);
1212 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1213 offset += woffset;
1215 break;
1218 case ARRAY_RANGE_REF:
1219 case ARRAY_REF:
1220 /* Use the recorded constant offset. */
1221 if (maybe_eq (op->off, -1))
1222 max_size = -1;
1223 else
1224 offset += op->off * BITS_PER_UNIT;
1225 break;
1227 case REALPART_EXPR:
1228 break;
1230 case IMAGPART_EXPR:
1231 offset += size;
1232 break;
1234 case VIEW_CONVERT_EXPR:
1235 break;
1237 case STRING_CST:
1238 case INTEGER_CST:
1239 case COMPLEX_CST:
1240 case VECTOR_CST:
1241 case REAL_CST:
1242 case CONSTRUCTOR:
1243 case CONST_DECL:
1244 return false;
1246 default:
1247 return false;
1251 if (base == NULL_TREE)
1252 return false;
1254 ref->ref = NULL_TREE;
1255 ref->base = base;
1256 ref->ref_alias_set = set;
1257 ref->base_alias_set = base_set;
1258 /* We discount volatiles from value-numbering elsewhere. */
1259 ref->volatile_p = false;
1261 if (!size.to_shwi (&ref->size) || maybe_lt (ref->size, 0))
1263 ref->offset = 0;
1264 ref->size = -1;
1265 ref->max_size = -1;
1266 return true;
1269 if (!offset.to_shwi (&ref->offset))
1271 ref->offset = 0;
1272 ref->max_size = -1;
1273 return true;
1276 if (!max_size.to_shwi (&ref->max_size) || maybe_lt (ref->max_size, 0))
1277 ref->max_size = -1;
1279 return true;
1282 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1283 vn_reference_op_s's. */
1285 static void
1286 copy_reference_ops_from_call (gcall *call,
1287 vec<vn_reference_op_s> *result)
1289 vn_reference_op_s temp;
1290 unsigned i;
1291 tree lhs = gimple_call_lhs (call);
1292 int lr;
1294 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1295 different. By adding the lhs here in the vector, we ensure that the
1296 hashcode is different, guaranteeing a different value number. */
1297 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1299 memset (&temp, 0, sizeof (temp));
1300 temp.opcode = MODIFY_EXPR;
1301 temp.type = TREE_TYPE (lhs);
1302 temp.op0 = lhs;
1303 temp.off = -1;
1304 result->safe_push (temp);
1307 /* Copy the type, opcode, function, static chain and EH region, if any. */
1308 memset (&temp, 0, sizeof (temp));
1309 temp.type = gimple_call_fntype (call);
1310 temp.opcode = CALL_EXPR;
1311 temp.op0 = gimple_call_fn (call);
1312 if (gimple_call_internal_p (call))
1313 temp.clique = gimple_call_internal_fn (call);
1314 temp.op1 = gimple_call_chain (call);
1315 if (stmt_could_throw_p (cfun, call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1316 temp.op2 = size_int (lr);
1317 temp.off = -1;
1318 result->safe_push (temp);
1320 /* Copy the call arguments. As they can be references as well,
1321 just chain them together. */
1322 for (i = 0; i < gimple_call_num_args (call); ++i)
1324 tree callarg = gimple_call_arg (call, i);
1325 copy_reference_ops_from_ref (callarg, result);
1329 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1330 *I_P to point to the last element of the replacement. */
1331 static bool
1332 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1333 unsigned int *i_p)
1335 unsigned int i = *i_p;
1336 vn_reference_op_t op = &(*ops)[i];
1337 vn_reference_op_t mem_op = &(*ops)[i - 1];
1338 tree addr_base;
1339 poly_int64 addr_offset = 0;
1341 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1342 from .foo.bar to the preceding MEM_REF offset and replace the
1343 address with &OBJ. */
1344 addr_base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (op->op0, 0),
1345 &addr_offset, vn_valueize);
1346 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1347 if (addr_base != TREE_OPERAND (op->op0, 0))
1349 poly_offset_int off
1350 = (poly_offset_int::from (wi::to_poly_wide (mem_op->op0),
1351 SIGNED)
1352 + addr_offset);
1353 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1354 op->op0 = build_fold_addr_expr (addr_base);
1355 if (tree_fits_shwi_p (mem_op->op0))
1356 mem_op->off = tree_to_shwi (mem_op->op0);
1357 else
1358 mem_op->off = -1;
1359 return true;
1361 return false;
1364 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1365 *I_P to point to the last element of the replacement. */
1366 static bool
1367 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1368 unsigned int *i_p)
1370 bool changed = false;
1371 vn_reference_op_t op;
1375 unsigned int i = *i_p;
1376 op = &(*ops)[i];
1377 vn_reference_op_t mem_op = &(*ops)[i - 1];
1378 gimple *def_stmt;
1379 enum tree_code code;
1380 poly_offset_int off;
1382 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1383 if (!is_gimple_assign (def_stmt))
1384 return changed;
1386 code = gimple_assign_rhs_code (def_stmt);
1387 if (code != ADDR_EXPR
1388 && code != POINTER_PLUS_EXPR)
1389 return changed;
1391 off = poly_offset_int::from (wi::to_poly_wide (mem_op->op0), SIGNED);
1393 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1394 from .foo.bar to the preceding MEM_REF offset and replace the
1395 address with &OBJ. */
1396 if (code == ADDR_EXPR)
1398 tree addr, addr_base;
1399 poly_int64 addr_offset;
1401 addr = gimple_assign_rhs1 (def_stmt);
1402 addr_base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (addr, 0),
1403 &addr_offset,
1404 vn_valueize);
1405 /* If that didn't work because the address isn't invariant propagate
1406 the reference tree from the address operation in case the current
1407 dereference isn't offsetted. */
1408 if (!addr_base
1409 && *i_p == ops->length () - 1
1410 && known_eq (off, 0)
1411 /* This makes us disable this transform for PRE where the
1412 reference ops might be also used for code insertion which
1413 is invalid. */
1414 && default_vn_walk_kind == VN_WALKREWRITE)
1416 auto_vec<vn_reference_op_s, 32> tem;
1417 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1418 /* Make sure to preserve TBAA info. The only objects not
1419 wrapped in MEM_REFs that can have their address taken are
1420 STRING_CSTs. */
1421 if (tem.length () >= 2
1422 && tem[tem.length () - 2].opcode == MEM_REF)
1424 vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1425 new_mem_op->op0
1426 = wide_int_to_tree (TREE_TYPE (mem_op->op0),
1427 wi::to_poly_wide (new_mem_op->op0));
1429 else
1430 gcc_assert (tem.last ().opcode == STRING_CST);
1431 ops->pop ();
1432 ops->pop ();
1433 ops->safe_splice (tem);
1434 --*i_p;
1435 return true;
1437 if (!addr_base
1438 || TREE_CODE (addr_base) != MEM_REF
1439 || (TREE_CODE (TREE_OPERAND (addr_base, 0)) == SSA_NAME
1440 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (addr_base,
1441 0))))
1442 return changed;
1444 off += addr_offset;
1445 off += mem_ref_offset (addr_base);
1446 op->op0 = TREE_OPERAND (addr_base, 0);
1448 else
1450 tree ptr, ptroff;
1451 ptr = gimple_assign_rhs1 (def_stmt);
1452 ptroff = gimple_assign_rhs2 (def_stmt);
1453 if (TREE_CODE (ptr) != SSA_NAME
1454 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1455 /* Make sure to not endlessly recurse.
1456 See gcc.dg/tree-ssa/20040408-1.c for an example. Can easily
1457 happen when we value-number a PHI to its backedge value. */
1458 || SSA_VAL (ptr) == op->op0
1459 || !poly_int_tree_p (ptroff))
1460 return changed;
1462 off += wi::to_poly_offset (ptroff);
1463 op->op0 = ptr;
1466 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1467 if (tree_fits_shwi_p (mem_op->op0))
1468 mem_op->off = tree_to_shwi (mem_op->op0);
1469 else
1470 mem_op->off = -1;
1471 /* ??? Can end up with endless recursion here!?
1472 gcc.c-torture/execute/strcmp-1.c */
1473 if (TREE_CODE (op->op0) == SSA_NAME)
1474 op->op0 = SSA_VAL (op->op0);
1475 if (TREE_CODE (op->op0) != SSA_NAME)
1476 op->opcode = TREE_CODE (op->op0);
1478 changed = true;
1480 /* Tail-recurse. */
1481 while (TREE_CODE (op->op0) == SSA_NAME);
1483 /* Fold a remaining *&. */
1484 if (TREE_CODE (op->op0) == ADDR_EXPR)
1485 vn_reference_fold_indirect (ops, i_p);
1487 return changed;
1490 /* Optimize the reference REF to a constant if possible or return
1491 NULL_TREE if not. */
1493 tree
1494 fully_constant_vn_reference_p (vn_reference_t ref)
1496 vec<vn_reference_op_s> operands = ref->operands;
1497 vn_reference_op_t op;
1499 /* Try to simplify the translated expression if it is
1500 a call to a builtin function with at most two arguments. */
1501 op = &operands[0];
1502 if (op->opcode == CALL_EXPR
1503 && (!op->op0
1504 || (TREE_CODE (op->op0) == ADDR_EXPR
1505 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1506 && fndecl_built_in_p (TREE_OPERAND (op->op0, 0),
1507 BUILT_IN_NORMAL)))
1508 && operands.length () >= 2
1509 && operands.length () <= 3)
1511 vn_reference_op_t arg0, arg1 = NULL;
1512 bool anyconst = false;
1513 arg0 = &operands[1];
1514 if (operands.length () > 2)
1515 arg1 = &operands[2];
1516 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1517 || (arg0->opcode == ADDR_EXPR
1518 && is_gimple_min_invariant (arg0->op0)))
1519 anyconst = true;
1520 if (arg1
1521 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1522 || (arg1->opcode == ADDR_EXPR
1523 && is_gimple_min_invariant (arg1->op0))))
1524 anyconst = true;
1525 if (anyconst)
1527 combined_fn fn;
1528 if (op->op0)
1529 fn = as_combined_fn (DECL_FUNCTION_CODE
1530 (TREE_OPERAND (op->op0, 0)));
1531 else
1532 fn = as_combined_fn ((internal_fn) op->clique);
1533 tree folded;
1534 if (arg1)
1535 folded = fold_const_call (fn, ref->type, arg0->op0, arg1->op0);
1536 else
1537 folded = fold_const_call (fn, ref->type, arg0->op0);
1538 if (folded
1539 && is_gimple_min_invariant (folded))
1540 return folded;
1544 /* Simplify reads from constants or constant initializers. */
1545 else if (BITS_PER_UNIT == 8
1546 && ref->type
1547 && COMPLETE_TYPE_P (ref->type)
1548 && is_gimple_reg_type (ref->type))
1550 poly_int64 off = 0;
1551 HOST_WIDE_INT size;
1552 if (INTEGRAL_TYPE_P (ref->type))
1553 size = TYPE_PRECISION (ref->type);
1554 else if (tree_fits_shwi_p (TYPE_SIZE (ref->type)))
1555 size = tree_to_shwi (TYPE_SIZE (ref->type));
1556 else
1557 return NULL_TREE;
1558 if (size % BITS_PER_UNIT != 0
1559 || size > MAX_BITSIZE_MODE_ANY_MODE)
1560 return NULL_TREE;
1561 size /= BITS_PER_UNIT;
1562 unsigned i;
1563 for (i = 0; i < operands.length (); ++i)
1565 if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
1567 ++i;
1568 break;
1570 if (known_eq (operands[i].off, -1))
1571 return NULL_TREE;
1572 off += operands[i].off;
1573 if (operands[i].opcode == MEM_REF)
1575 ++i;
1576 break;
1579 vn_reference_op_t base = &operands[--i];
1580 tree ctor = error_mark_node;
1581 tree decl = NULL_TREE;
1582 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1583 ctor = base->op0;
1584 else if (base->opcode == MEM_REF
1585 && base[1].opcode == ADDR_EXPR
1586 && (VAR_P (TREE_OPERAND (base[1].op0, 0))
1587 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL
1588 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == STRING_CST))
1590 decl = TREE_OPERAND (base[1].op0, 0);
1591 if (TREE_CODE (decl) == STRING_CST)
1592 ctor = decl;
1593 else
1594 ctor = ctor_for_folding (decl);
1596 if (ctor == NULL_TREE)
1597 return build_zero_cst (ref->type);
1598 else if (ctor != error_mark_node)
1600 HOST_WIDE_INT const_off;
1601 if (decl)
1603 tree res = fold_ctor_reference (ref->type, ctor,
1604 off * BITS_PER_UNIT,
1605 size * BITS_PER_UNIT, decl);
1606 if (res)
1608 STRIP_USELESS_TYPE_CONVERSION (res);
1609 if (is_gimple_min_invariant (res))
1610 return res;
1613 else if (off.is_constant (&const_off))
1615 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1616 int len = native_encode_expr (ctor, buf, size, const_off);
1617 if (len > 0)
1618 return native_interpret_expr (ref->type, buf, len);
1623 return NULL_TREE;
1626 /* Return true if OPS contain a storage order barrier. */
1628 static bool
1629 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1631 vn_reference_op_t op;
1632 unsigned i;
1634 FOR_EACH_VEC_ELT (ops, i, op)
1635 if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1636 return true;
1638 return false;
1641 /* Return true if OPS represent an access with reverse storage order. */
1643 static bool
1644 reverse_storage_order_for_component_p (vec<vn_reference_op_s> ops)
1646 unsigned i = 0;
1647 if (ops[i].opcode == REALPART_EXPR || ops[i].opcode == IMAGPART_EXPR)
1648 ++i;
1649 switch (ops[i].opcode)
1651 case ARRAY_REF:
1652 case COMPONENT_REF:
1653 case BIT_FIELD_REF:
1654 case MEM_REF:
1655 return ops[i].reverse;
1656 default:
1657 return false;
1661 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1662 structures into their value numbers. This is done in-place, and
1663 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1664 whether any operands were valueized. */
1666 static void
1667 valueize_refs_1 (vec<vn_reference_op_s> *orig, bool *valueized_anything,
1668 bool with_avail = false)
1670 *valueized_anything = false;
1672 for (unsigned i = 0; i < orig->length (); ++i)
1674 re_valueize:
1675 vn_reference_op_t vro = &(*orig)[i];
1676 if (vro->opcode == SSA_NAME
1677 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1679 tree tem = with_avail ? vn_valueize (vro->op0) : SSA_VAL (vro->op0);
1680 if (tem != vro->op0)
1682 *valueized_anything = true;
1683 vro->op0 = tem;
1685 /* If it transforms from an SSA_NAME to a constant, update
1686 the opcode. */
1687 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1688 vro->opcode = TREE_CODE (vro->op0);
1690 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1692 tree tem = with_avail ? vn_valueize (vro->op1) : SSA_VAL (vro->op1);
1693 if (tem != vro->op1)
1695 *valueized_anything = true;
1696 vro->op1 = tem;
1699 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1701 tree tem = with_avail ? vn_valueize (vro->op2) : SSA_VAL (vro->op2);
1702 if (tem != vro->op2)
1704 *valueized_anything = true;
1705 vro->op2 = tem;
1708 /* If it transforms from an SSA_NAME to an address, fold with
1709 a preceding indirect reference. */
1710 if (i > 0
1711 && vro->op0
1712 && TREE_CODE (vro->op0) == ADDR_EXPR
1713 && (*orig)[i - 1].opcode == MEM_REF)
1715 if (vn_reference_fold_indirect (orig, &i))
1716 *valueized_anything = true;
1718 else if (i > 0
1719 && vro->opcode == SSA_NAME
1720 && (*orig)[i - 1].opcode == MEM_REF)
1722 if (vn_reference_maybe_forwprop_address (orig, &i))
1724 *valueized_anything = true;
1725 /* Re-valueize the current operand. */
1726 goto re_valueize;
1729 /* If it transforms a non-constant ARRAY_REF into a constant
1730 one, adjust the constant offset. */
1731 else if ((vro->opcode == ARRAY_REF
1732 || vro->opcode == ARRAY_RANGE_REF)
1733 && known_eq (vro->off, -1)
1734 && poly_int_tree_p (vro->op0)
1735 && poly_int_tree_p (vro->op1)
1736 && TREE_CODE (vro->op2) == INTEGER_CST)
1738 poly_offset_int off = ((wi::to_poly_offset (vro->op0)
1739 - wi::to_poly_offset (vro->op1))
1740 * wi::to_offset (vro->op2)
1741 * vn_ref_op_align_unit (vro));
1742 off.to_shwi (&vro->off);
1747 static void
1748 valueize_refs (vec<vn_reference_op_s> *orig)
1750 bool tem;
1751 valueize_refs_1 (orig, &tem);
1754 static vec<vn_reference_op_s> shared_lookup_references;
1756 /* Create a vector of vn_reference_op_s structures from REF, a
1757 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1758 this function. *VALUEIZED_ANYTHING will specify whether any
1759 operands were valueized. */
1761 static vec<vn_reference_op_s>
1762 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1764 if (!ref)
1765 return vNULL;
1766 shared_lookup_references.truncate (0);
1767 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1768 valueize_refs_1 (&shared_lookup_references, valueized_anything);
1769 return shared_lookup_references;
1772 /* Create a vector of vn_reference_op_s structures from CALL, a
1773 call statement. The vector is shared among all callers of
1774 this function. */
1776 static vec<vn_reference_op_s>
1777 valueize_shared_reference_ops_from_call (gcall *call)
1779 if (!call)
1780 return vNULL;
1781 shared_lookup_references.truncate (0);
1782 copy_reference_ops_from_call (call, &shared_lookup_references);
1783 valueize_refs (&shared_lookup_references);
1784 return shared_lookup_references;
1787 /* Lookup a SCCVN reference operation VR in the current hash table.
1788 Returns the resulting value number if it exists in the hash table,
1789 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1790 vn_reference_t stored in the hashtable if something is found. */
1792 static tree
1793 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1795 vn_reference_s **slot;
1796 hashval_t hash;
1798 hash = vr->hashcode;
1799 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1800 if (slot)
1802 if (vnresult)
1803 *vnresult = (vn_reference_t)*slot;
1804 return ((vn_reference_t)*slot)->result;
1807 return NULL_TREE;
1811 /* Partial definition tracking support. */
1813 struct pd_range
1815 HOST_WIDE_INT offset;
1816 HOST_WIDE_INT size;
1819 struct pd_data
1821 tree rhs;
1822 HOST_WIDE_INT rhs_off;
1823 HOST_WIDE_INT offset;
1824 HOST_WIDE_INT size;
1827 /* Context for alias walking. */
1829 struct vn_walk_cb_data
1831 vn_walk_cb_data (vn_reference_t vr_, tree orig_ref_, tree *last_vuse_ptr_,
1832 vn_lookup_kind vn_walk_kind_, bool tbaa_p_, tree mask_,
1833 bool redundant_store_removal_p_)
1834 : vr (vr_), last_vuse_ptr (last_vuse_ptr_), last_vuse (NULL_TREE),
1835 mask (mask_), masked_result (NULL_TREE), same_val (NULL_TREE),
1836 vn_walk_kind (vn_walk_kind_),
1837 tbaa_p (tbaa_p_), redundant_store_removal_p (redundant_store_removal_p_),
1838 saved_operands (vNULL), first_set (-2), first_base_set (-2),
1839 known_ranges (NULL)
1841 if (!last_vuse_ptr)
1842 last_vuse_ptr = &last_vuse;
1843 ao_ref_init (&orig_ref, orig_ref_);
1844 if (mask)
1846 wide_int w = wi::to_wide (mask);
1847 unsigned int pos = 0, prec = w.get_precision ();
1848 pd_data pd;
1849 pd.rhs = build_constructor (NULL_TREE, NULL);
1850 pd.rhs_off = 0;
1851 /* When bitwise and with a constant is done on a memory load,
1852 we don't really need all the bits to be defined or defined
1853 to constants, we don't really care what is in the position
1854 corresponding to 0 bits in the mask.
1855 So, push the ranges of those 0 bits in the mask as artificial
1856 zero stores and let the partial def handling code do the
1857 rest. */
1858 while (pos < prec)
1860 int tz = wi::ctz (w);
1861 if (pos + tz > prec)
1862 tz = prec - pos;
1863 if (tz)
1865 if (BYTES_BIG_ENDIAN)
1866 pd.offset = prec - pos - tz;
1867 else
1868 pd.offset = pos;
1869 pd.size = tz;
1870 void *r = push_partial_def (pd, 0, 0, 0, prec);
1871 gcc_assert (r == NULL_TREE);
1873 pos += tz;
1874 if (pos == prec)
1875 break;
1876 w = wi::lrshift (w, tz);
1877 tz = wi::ctz (wi::bit_not (w));
1878 if (pos + tz > prec)
1879 tz = prec - pos;
1880 pos += tz;
1881 w = wi::lrshift (w, tz);
1885 ~vn_walk_cb_data ();
1886 void *finish (alias_set_type, alias_set_type, tree);
1887 void *push_partial_def (pd_data pd,
1888 alias_set_type, alias_set_type, HOST_WIDE_INT,
1889 HOST_WIDE_INT);
1891 vn_reference_t vr;
1892 ao_ref orig_ref;
1893 tree *last_vuse_ptr;
1894 tree last_vuse;
1895 tree mask;
1896 tree masked_result;
1897 tree same_val;
1898 vn_lookup_kind vn_walk_kind;
1899 bool tbaa_p;
1900 bool redundant_store_removal_p;
1901 vec<vn_reference_op_s> saved_operands;
1903 /* The VDEFs of partial defs we come along. */
1904 auto_vec<pd_data, 2> partial_defs;
1905 /* The first defs range to avoid splay tree setup in most cases. */
1906 pd_range first_range;
1907 alias_set_type first_set;
1908 alias_set_type first_base_set;
1909 splay_tree known_ranges;
1910 obstack ranges_obstack;
1911 static constexpr HOST_WIDE_INT bufsize = 64;
1914 vn_walk_cb_data::~vn_walk_cb_data ()
1916 if (known_ranges)
1918 splay_tree_delete (known_ranges);
1919 obstack_free (&ranges_obstack, NULL);
1921 saved_operands.release ();
1924 void *
1925 vn_walk_cb_data::finish (alias_set_type set, alias_set_type base_set, tree val)
1927 if (first_set != -2)
1929 set = first_set;
1930 base_set = first_base_set;
1932 if (mask)
1934 masked_result = val;
1935 return (void *) -1;
1937 if (same_val && !operand_equal_p (val, same_val))
1938 return (void *) -1;
1939 vec<vn_reference_op_s> &operands
1940 = saved_operands.exists () ? saved_operands : vr->operands;
1941 return vn_reference_lookup_or_insert_for_pieces (last_vuse, set, base_set,
1942 vr->offset, vr->max_size,
1943 vr->type, operands, val);
1946 /* pd_range splay-tree helpers. */
1948 static int
1949 pd_range_compare (splay_tree_key offset1p, splay_tree_key offset2p)
1951 HOST_WIDE_INT offset1 = *(HOST_WIDE_INT *)offset1p;
1952 HOST_WIDE_INT offset2 = *(HOST_WIDE_INT *)offset2p;
1953 if (offset1 < offset2)
1954 return -1;
1955 else if (offset1 > offset2)
1956 return 1;
1957 return 0;
1960 static void *
1961 pd_tree_alloc (int size, void *data_)
1963 vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
1964 return obstack_alloc (&data->ranges_obstack, size);
1967 static void
1968 pd_tree_dealloc (void *, void *)
1972 /* Push PD to the vector of partial definitions returning a
1973 value when we are ready to combine things with VUSE, SET and MAXSIZEI,
1974 NULL when we want to continue looking for partial defs or -1
1975 on failure. */
1977 void *
1978 vn_walk_cb_data::push_partial_def (pd_data pd,
1979 alias_set_type set, alias_set_type base_set,
1980 HOST_WIDE_INT offseti,
1981 HOST_WIDE_INT maxsizei)
1983 /* We're using a fixed buffer for encoding so fail early if the object
1984 we want to interpret is bigger. */
1985 if (maxsizei > bufsize * BITS_PER_UNIT
1986 || CHAR_BIT != 8
1987 || BITS_PER_UNIT != 8
1988 /* Not prepared to handle PDP endian. */
1989 || BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
1990 return (void *)-1;
1992 /* Turn too large constant stores into non-constant stores. */
1993 if (CONSTANT_CLASS_P (pd.rhs) && pd.size > bufsize * BITS_PER_UNIT)
1994 pd.rhs = error_mark_node;
1996 /* And for non-constant or CONSTRUCTOR stores shrink them to only keep at
1997 most a partial byte before and/or after the region. */
1998 if (!CONSTANT_CLASS_P (pd.rhs))
2000 if (pd.offset < offseti)
2002 HOST_WIDE_INT o = ROUND_DOWN (offseti - pd.offset, BITS_PER_UNIT);
2003 gcc_assert (pd.size > o);
2004 pd.size -= o;
2005 pd.offset += o;
2007 if (pd.size > maxsizei)
2008 pd.size = maxsizei + ((pd.size - maxsizei) % BITS_PER_UNIT);
2011 pd.offset -= offseti;
2013 bool pd_constant_p = (TREE_CODE (pd.rhs) == CONSTRUCTOR
2014 || CONSTANT_CLASS_P (pd.rhs));
2015 pd_range *r;
2016 if (partial_defs.is_empty ())
2018 /* If we get a clobber upfront, fail. */
2019 if (TREE_CLOBBER_P (pd.rhs))
2020 return (void *)-1;
2021 if (!pd_constant_p)
2022 return (void *)-1;
2023 partial_defs.safe_push (pd);
2024 first_range.offset = pd.offset;
2025 first_range.size = pd.size;
2026 first_set = set;
2027 first_base_set = base_set;
2028 last_vuse_ptr = NULL;
2029 r = &first_range;
2030 /* Go check if the first partial definition was a full one in case
2031 the caller didn't optimize for this. */
2033 else
2035 if (!known_ranges)
2037 /* ??? Optimize the case where the 2nd partial def completes
2038 things. */
2039 gcc_obstack_init (&ranges_obstack);
2040 known_ranges = splay_tree_new_with_allocator (pd_range_compare, 0, 0,
2041 pd_tree_alloc,
2042 pd_tree_dealloc, this);
2043 splay_tree_insert (known_ranges,
2044 (splay_tree_key)&first_range.offset,
2045 (splay_tree_value)&first_range);
2048 pd_range newr = { pd.offset, pd.size };
2049 splay_tree_node n;
2050 /* Lookup the predecessor of offset + 1 and see if we need to merge. */
2051 HOST_WIDE_INT loffset = newr.offset + 1;
2052 if ((n = splay_tree_predecessor (known_ranges, (splay_tree_key)&loffset))
2053 && ((r = (pd_range *)n->value), true)
2054 && ranges_known_overlap_p (r->offset, r->size + 1,
2055 newr.offset, newr.size))
2057 /* Ignore partial defs already covered. Here we also drop shadowed
2058 clobbers arriving here at the floor. */
2059 if (known_subrange_p (newr.offset, newr.size, r->offset, r->size))
2060 return NULL;
2061 r->size
2062 = MAX (r->offset + r->size, newr.offset + newr.size) - r->offset;
2064 else
2066 /* newr.offset wasn't covered yet, insert the range. */
2067 r = XOBNEW (&ranges_obstack, pd_range);
2068 *r = newr;
2069 splay_tree_insert (known_ranges, (splay_tree_key)&r->offset,
2070 (splay_tree_value)r);
2072 /* Merge r which now contains newr and is a member of the splay tree with
2073 adjacent overlapping ranges. */
2074 pd_range *rafter;
2075 while ((n = splay_tree_successor (known_ranges,
2076 (splay_tree_key)&r->offset))
2077 && ((rafter = (pd_range *)n->value), true)
2078 && ranges_known_overlap_p (r->offset, r->size + 1,
2079 rafter->offset, rafter->size))
2081 r->size = MAX (r->offset + r->size,
2082 rafter->offset + rafter->size) - r->offset;
2083 splay_tree_remove (known_ranges, (splay_tree_key)&rafter->offset);
2085 /* If we get a clobber, fail. */
2086 if (TREE_CLOBBER_P (pd.rhs))
2087 return (void *)-1;
2088 /* Non-constants are OK as long as they are shadowed by a constant. */
2089 if (!pd_constant_p)
2090 return (void *)-1;
2091 partial_defs.safe_push (pd);
2094 /* Now we have merged newr into the range tree. When we have covered
2095 [offseti, sizei] then the tree will contain exactly one node which has
2096 the desired properties and it will be 'r'. */
2097 if (!known_subrange_p (0, maxsizei, r->offset, r->size))
2098 /* Continue looking for partial defs. */
2099 return NULL;
2101 /* Now simply native encode all partial defs in reverse order. */
2102 unsigned ndefs = partial_defs.length ();
2103 /* We support up to 512-bit values (for V8DFmode). */
2104 unsigned char buffer[bufsize + 1];
2105 unsigned char this_buffer[bufsize + 1];
2106 int len;
2108 memset (buffer, 0, bufsize + 1);
2109 unsigned needed_len = ROUND_UP (maxsizei, BITS_PER_UNIT) / BITS_PER_UNIT;
2110 while (!partial_defs.is_empty ())
2112 pd_data pd = partial_defs.pop ();
2113 unsigned int amnt;
2114 if (TREE_CODE (pd.rhs) == CONSTRUCTOR)
2116 /* Empty CONSTRUCTOR. */
2117 if (pd.size >= needed_len * BITS_PER_UNIT)
2118 len = needed_len;
2119 else
2120 len = ROUND_UP (pd.size, BITS_PER_UNIT) / BITS_PER_UNIT;
2121 memset (this_buffer, 0, len);
2123 else if (pd.rhs_off >= 0)
2125 len = native_encode_expr (pd.rhs, this_buffer, bufsize,
2126 (MAX (0, -pd.offset)
2127 + pd.rhs_off) / BITS_PER_UNIT);
2128 if (len <= 0
2129 || len < (ROUND_UP (pd.size, BITS_PER_UNIT) / BITS_PER_UNIT
2130 - MAX (0, -pd.offset) / BITS_PER_UNIT))
2132 if (dump_file && (dump_flags & TDF_DETAILS))
2133 fprintf (dump_file, "Failed to encode %u "
2134 "partial definitions\n", ndefs);
2135 return (void *)-1;
2138 else /* negative pd.rhs_off indicates we want to chop off first bits */
2140 if (-pd.rhs_off >= bufsize)
2141 return (void *)-1;
2142 len = native_encode_expr (pd.rhs,
2143 this_buffer + -pd.rhs_off / BITS_PER_UNIT,
2144 bufsize - -pd.rhs_off / BITS_PER_UNIT,
2145 MAX (0, -pd.offset) / BITS_PER_UNIT);
2146 if (len <= 0
2147 || len < (ROUND_UP (pd.size, BITS_PER_UNIT) / BITS_PER_UNIT
2148 - MAX (0, -pd.offset) / BITS_PER_UNIT))
2150 if (dump_file && (dump_flags & TDF_DETAILS))
2151 fprintf (dump_file, "Failed to encode %u "
2152 "partial definitions\n", ndefs);
2153 return (void *)-1;
2157 unsigned char *p = buffer;
2158 HOST_WIDE_INT size = pd.size;
2159 if (pd.offset < 0)
2160 size -= ROUND_DOWN (-pd.offset, BITS_PER_UNIT);
2161 this_buffer[len] = 0;
2162 if (BYTES_BIG_ENDIAN)
2164 /* LSB of this_buffer[len - 1] byte should be at
2165 pd.offset + pd.size - 1 bits in buffer. */
2166 amnt = ((unsigned HOST_WIDE_INT) pd.offset
2167 + pd.size) % BITS_PER_UNIT;
2168 if (amnt)
2169 shift_bytes_in_array_right (this_buffer, len + 1, amnt);
2170 unsigned char *q = this_buffer;
2171 unsigned int off = 0;
2172 if (pd.offset >= 0)
2174 unsigned int msk;
2175 off = pd.offset / BITS_PER_UNIT;
2176 gcc_assert (off < needed_len);
2177 p = buffer + off;
2178 if (size <= amnt)
2180 msk = ((1 << size) - 1) << (BITS_PER_UNIT - amnt);
2181 *p = (*p & ~msk) | (this_buffer[len] & msk);
2182 size = 0;
2184 else
2186 if (TREE_CODE (pd.rhs) != CONSTRUCTOR)
2187 q = (this_buffer + len
2188 - (ROUND_UP (size - amnt, BITS_PER_UNIT)
2189 / BITS_PER_UNIT));
2190 if (pd.offset % BITS_PER_UNIT)
2192 msk = -1U << (BITS_PER_UNIT
2193 - (pd.offset % BITS_PER_UNIT));
2194 *p = (*p & msk) | (*q & ~msk);
2195 p++;
2196 q++;
2197 off++;
2198 size -= BITS_PER_UNIT - (pd.offset % BITS_PER_UNIT);
2199 gcc_assert (size >= 0);
2203 else if (TREE_CODE (pd.rhs) != CONSTRUCTOR)
2205 q = (this_buffer + len
2206 - (ROUND_UP (size - amnt, BITS_PER_UNIT)
2207 / BITS_PER_UNIT));
2208 if (pd.offset % BITS_PER_UNIT)
2210 q++;
2211 size -= BITS_PER_UNIT - ((unsigned HOST_WIDE_INT) pd.offset
2212 % BITS_PER_UNIT);
2213 gcc_assert (size >= 0);
2216 if ((unsigned HOST_WIDE_INT) size / BITS_PER_UNIT + off
2217 > needed_len)
2218 size = (needed_len - off) * BITS_PER_UNIT;
2219 memcpy (p, q, size / BITS_PER_UNIT);
2220 if (size % BITS_PER_UNIT)
2222 unsigned int msk
2223 = -1U << (BITS_PER_UNIT - (size % BITS_PER_UNIT));
2224 p += size / BITS_PER_UNIT;
2225 q += size / BITS_PER_UNIT;
2226 *p = (*q & msk) | (*p & ~msk);
2229 else
2231 if (pd.offset >= 0)
2233 /* LSB of this_buffer[0] byte should be at pd.offset bits
2234 in buffer. */
2235 unsigned int msk;
2236 size = MIN (size, (HOST_WIDE_INT) needed_len * BITS_PER_UNIT);
2237 amnt = pd.offset % BITS_PER_UNIT;
2238 if (amnt)
2239 shift_bytes_in_array_left (this_buffer, len + 1, amnt);
2240 unsigned int off = pd.offset / BITS_PER_UNIT;
2241 gcc_assert (off < needed_len);
2242 size = MIN (size,
2243 (HOST_WIDE_INT) (needed_len - off) * BITS_PER_UNIT);
2244 p = buffer + off;
2245 if (amnt + size < BITS_PER_UNIT)
2247 /* Low amnt bits come from *p, then size bits
2248 from this_buffer[0] and the remaining again from
2249 *p. */
2250 msk = ((1 << size) - 1) << amnt;
2251 *p = (*p & ~msk) | (this_buffer[0] & msk);
2252 size = 0;
2254 else if (amnt)
2256 msk = -1U << amnt;
2257 *p = (*p & ~msk) | (this_buffer[0] & msk);
2258 p++;
2259 size -= (BITS_PER_UNIT - amnt);
2262 else
2264 amnt = (unsigned HOST_WIDE_INT) pd.offset % BITS_PER_UNIT;
2265 if (amnt)
2266 size -= BITS_PER_UNIT - amnt;
2267 size = MIN (size, (HOST_WIDE_INT) needed_len * BITS_PER_UNIT);
2268 if (amnt)
2269 shift_bytes_in_array_left (this_buffer, len + 1, amnt);
2271 memcpy (p, this_buffer + (amnt != 0), size / BITS_PER_UNIT);
2272 p += size / BITS_PER_UNIT;
2273 if (size % BITS_PER_UNIT)
2275 unsigned int msk = -1U << (size % BITS_PER_UNIT);
2276 *p = (this_buffer[(amnt != 0) + size / BITS_PER_UNIT]
2277 & ~msk) | (*p & msk);
2282 tree type = vr->type;
2283 /* Make sure to interpret in a type that has a range covering the whole
2284 access size. */
2285 if (INTEGRAL_TYPE_P (vr->type) && maxsizei != TYPE_PRECISION (vr->type))
2286 type = build_nonstandard_integer_type (maxsizei, TYPE_UNSIGNED (type));
2287 tree val;
2288 if (BYTES_BIG_ENDIAN)
2290 unsigned sz = needed_len;
2291 if (maxsizei % BITS_PER_UNIT)
2292 shift_bytes_in_array_right (buffer, needed_len,
2293 BITS_PER_UNIT
2294 - (maxsizei % BITS_PER_UNIT));
2295 if (INTEGRAL_TYPE_P (type))
2297 if (TYPE_MODE (type) != BLKmode)
2298 sz = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
2299 else
2300 sz = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (type));
2302 if (sz > needed_len)
2304 memcpy (this_buffer + (sz - needed_len), buffer, needed_len);
2305 val = native_interpret_expr (type, this_buffer, sz);
2307 else
2308 val = native_interpret_expr (type, buffer, needed_len);
2310 else
2311 val = native_interpret_expr (type, buffer, bufsize);
2312 /* If we chop off bits because the types precision doesn't match the memory
2313 access size this is ok when optimizing reads but not when called from
2314 the DSE code during elimination. */
2315 if (val && type != vr->type)
2317 if (! int_fits_type_p (val, vr->type))
2318 val = NULL_TREE;
2319 else
2320 val = fold_convert (vr->type, val);
2323 if (val)
2325 if (dump_file && (dump_flags & TDF_DETAILS))
2326 fprintf (dump_file,
2327 "Successfully combined %u partial definitions\n", ndefs);
2328 /* We are using the alias-set of the first store we encounter which
2329 should be appropriate here. */
2330 return finish (first_set, first_base_set, val);
2332 else
2334 if (dump_file && (dump_flags & TDF_DETAILS))
2335 fprintf (dump_file,
2336 "Failed to interpret %u encoded partial definitions\n", ndefs);
2337 return (void *)-1;
2341 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
2342 with the current VUSE and performs the expression lookup. */
2344 static void *
2345 vn_reference_lookup_2 (ao_ref *op, tree vuse, void *data_)
2347 vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
2348 vn_reference_t vr = data->vr;
2349 vn_reference_s **slot;
2350 hashval_t hash;
2352 /* If we have partial definitions recorded we have to go through
2353 vn_reference_lookup_3. */
2354 if (!data->partial_defs.is_empty ())
2355 return NULL;
2357 if (data->last_vuse_ptr)
2359 *data->last_vuse_ptr = vuse;
2360 data->last_vuse = vuse;
2363 /* Fixup vuse and hash. */
2364 if (vr->vuse)
2365 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
2366 vr->vuse = vuse_ssa_val (vuse);
2367 if (vr->vuse)
2368 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
2370 hash = vr->hashcode;
2371 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
2372 if (slot)
2374 if ((*slot)->result && data->saved_operands.exists ())
2375 return data->finish (vr->set, vr->base_set, (*slot)->result);
2376 return *slot;
2379 if (SSA_NAME_IS_DEFAULT_DEF (vuse))
2381 HOST_WIDE_INT op_offset, op_size;
2382 tree v = NULL_TREE;
2383 tree base = ao_ref_base (op);
2385 if (base
2386 && op->offset.is_constant (&op_offset)
2387 && op->size.is_constant (&op_size)
2388 && op->max_size_known_p ()
2389 && known_eq (op->size, op->max_size))
2391 if (TREE_CODE (base) == PARM_DECL)
2392 v = ipcp_get_aggregate_const (cfun, base, false, op_offset,
2393 op_size);
2394 else if (TREE_CODE (base) == MEM_REF
2395 && integer_zerop (TREE_OPERAND (base, 1))
2396 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
2397 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
2398 && (TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (base, 0)))
2399 == PARM_DECL))
2400 v = ipcp_get_aggregate_const (cfun,
2401 SSA_NAME_VAR (TREE_OPERAND (base, 0)),
2402 true, op_offset, op_size);
2404 if (v)
2405 return data->finish (vr->set, vr->base_set, v);
2408 return NULL;
2411 /* Lookup an existing or insert a new vn_reference entry into the
2412 value table for the VUSE, SET, TYPE, OPERANDS reference which
2413 has the value VALUE which is either a constant or an SSA name. */
2415 static vn_reference_t
2416 vn_reference_lookup_or_insert_for_pieces (tree vuse,
2417 alias_set_type set,
2418 alias_set_type base_set,
2419 poly_int64 offset,
2420 poly_int64 max_size,
2421 tree type,
2422 vec<vn_reference_op_s,
2423 va_heap> operands,
2424 tree value)
2426 vn_reference_s vr1;
2427 vn_reference_t result;
2428 unsigned value_id;
2429 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2430 vr1.operands = operands;
2431 vr1.type = type;
2432 vr1.set = set;
2433 vr1.base_set = base_set;
2434 vr1.offset = offset;
2435 vr1.max_size = max_size;
2436 vr1.hashcode = vn_reference_compute_hash (&vr1);
2437 if (vn_reference_lookup_1 (&vr1, &result))
2438 return result;
2440 if (TREE_CODE (value) == SSA_NAME)
2441 value_id = VN_INFO (value)->value_id;
2442 else
2443 value_id = get_or_alloc_constant_value_id (value);
2444 return vn_reference_insert_pieces (vuse, set, base_set, offset, max_size,
2445 type, operands.copy (), value, value_id);
2448 /* Return a value-number for RCODE OPS... either by looking up an existing
2449 value-number for the possibly simplified result or by inserting the
2450 operation if INSERT is true. If SIMPLIFY is false, return a value
2451 number for the unsimplified expression. */
2453 static tree
2454 vn_nary_build_or_lookup_1 (gimple_match_op *res_op, bool insert,
2455 bool simplify)
2457 tree result = NULL_TREE;
2458 /* We will be creating a value number for
2459 RCODE (OPS...).
2460 So first simplify and lookup this expression to see if it
2461 is already available. */
2462 /* For simplification valueize. */
2463 unsigned i = 0;
2464 if (simplify)
2465 for (i = 0; i < res_op->num_ops; ++i)
2466 if (TREE_CODE (res_op->ops[i]) == SSA_NAME)
2468 tree tem = vn_valueize (res_op->ops[i]);
2469 if (!tem)
2470 break;
2471 res_op->ops[i] = tem;
2473 /* If valueization of an operand fails (it is not available), skip
2474 simplification. */
2475 bool res = false;
2476 if (i == res_op->num_ops)
2478 mprts_hook = vn_lookup_simplify_result;
2479 res = res_op->resimplify (NULL, vn_valueize);
2480 mprts_hook = NULL;
2482 gimple *new_stmt = NULL;
2483 if (res
2484 && gimple_simplified_result_is_gimple_val (res_op))
2486 /* The expression is already available. */
2487 result = res_op->ops[0];
2488 /* Valueize it, simplification returns sth in AVAIL only. */
2489 if (TREE_CODE (result) == SSA_NAME)
2490 result = SSA_VAL (result);
2492 else
2494 tree val = vn_lookup_simplify_result (res_op);
2495 if (!val && insert)
2497 gimple_seq stmts = NULL;
2498 result = maybe_push_res_to_seq (res_op, &stmts);
2499 if (result)
2501 gcc_assert (gimple_seq_singleton_p (stmts));
2502 new_stmt = gimple_seq_first_stmt (stmts);
2505 else
2506 /* The expression is already available. */
2507 result = val;
2509 if (new_stmt)
2511 /* The expression is not yet available, value-number lhs to
2512 the new SSA_NAME we created. */
2513 /* Initialize value-number information properly. */
2514 vn_ssa_aux_t result_info = VN_INFO (result);
2515 result_info->valnum = result;
2516 result_info->value_id = get_next_value_id ();
2517 result_info->visited = 1;
2518 gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
2519 new_stmt);
2520 result_info->needs_insertion = true;
2521 /* ??? PRE phi-translation inserts NARYs without corresponding
2522 SSA name result. Re-use those but set their result according
2523 to the stmt we just built. */
2524 vn_nary_op_t nary = NULL;
2525 vn_nary_op_lookup_stmt (new_stmt, &nary);
2526 if (nary)
2528 gcc_assert (! nary->predicated_values && nary->u.result == NULL_TREE);
2529 nary->u.result = gimple_assign_lhs (new_stmt);
2531 /* As all "inserted" statements are singleton SCCs, insert
2532 to the valid table. This is strictly needed to
2533 avoid re-generating new value SSA_NAMEs for the same
2534 expression during SCC iteration over and over (the
2535 optimistic table gets cleared after each iteration).
2536 We do not need to insert into the optimistic table, as
2537 lookups there will fall back to the valid table. */
2538 else
2540 unsigned int length = vn_nary_length_from_stmt (new_stmt);
2541 vn_nary_op_t vno1
2542 = alloc_vn_nary_op_noinit (length, &vn_tables_insert_obstack);
2543 vno1->value_id = result_info->value_id;
2544 vno1->length = length;
2545 vno1->predicated_values = 0;
2546 vno1->u.result = result;
2547 init_vn_nary_op_from_stmt (vno1, as_a <gassign *> (new_stmt));
2548 vn_nary_op_insert_into (vno1, valid_info->nary);
2549 /* Also do not link it into the undo chain. */
2550 last_inserted_nary = vno1->next;
2551 vno1->next = (vn_nary_op_t)(void *)-1;
2553 if (dump_file && (dump_flags & TDF_DETAILS))
2555 fprintf (dump_file, "Inserting name ");
2556 print_generic_expr (dump_file, result);
2557 fprintf (dump_file, " for expression ");
2558 print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
2559 fprintf (dump_file, "\n");
2562 return result;
2565 /* Return a value-number for RCODE OPS... either by looking up an existing
2566 value-number for the simplified result or by inserting the operation. */
2568 static tree
2569 vn_nary_build_or_lookup (gimple_match_op *res_op)
2571 return vn_nary_build_or_lookup_1 (res_op, true, true);
2574 /* Try to simplify the expression RCODE OPS... of type TYPE and return
2575 its value if present. */
2577 tree
2578 vn_nary_simplify (vn_nary_op_t nary)
2580 if (nary->length > gimple_match_op::MAX_NUM_OPS)
2581 return NULL_TREE;
2582 gimple_match_op op (gimple_match_cond::UNCOND, nary->opcode,
2583 nary->type, nary->length);
2584 memcpy (op.ops, nary->op, sizeof (tree) * nary->length);
2585 return vn_nary_build_or_lookup_1 (&op, false, true);
2588 /* Elimination engine. */
2590 class eliminate_dom_walker : public dom_walker
2592 public:
2593 eliminate_dom_walker (cdi_direction, bitmap);
2594 ~eliminate_dom_walker ();
2596 edge before_dom_children (basic_block) final override;
2597 void after_dom_children (basic_block) final override;
2599 virtual tree eliminate_avail (basic_block, tree op);
2600 virtual void eliminate_push_avail (basic_block, tree op);
2601 tree eliminate_insert (basic_block, gimple_stmt_iterator *gsi, tree val);
2603 void eliminate_stmt (basic_block, gimple_stmt_iterator *);
2605 unsigned eliminate_cleanup (bool region_p = false);
2607 bool do_pre;
2608 unsigned int el_todo;
2609 unsigned int eliminations;
2610 unsigned int insertions;
2612 /* SSA names that had their defs inserted by PRE if do_pre. */
2613 bitmap inserted_exprs;
2615 /* Blocks with statements that have had their EH properties changed. */
2616 bitmap need_eh_cleanup;
2618 /* Blocks with statements that have had their AB properties changed. */
2619 bitmap need_ab_cleanup;
2621 /* Local state for the eliminate domwalk. */
2622 auto_vec<gimple *> to_remove;
2623 auto_vec<gimple *> to_fixup;
2624 auto_vec<tree> avail;
2625 auto_vec<tree> avail_stack;
2628 /* Adaptor to the elimination engine using RPO availability. */
2630 class rpo_elim : public eliminate_dom_walker
2632 public:
2633 rpo_elim(basic_block entry_)
2634 : eliminate_dom_walker (CDI_DOMINATORS, NULL), entry (entry_),
2635 m_avail_freelist (NULL) {}
2637 tree eliminate_avail (basic_block, tree op) final override;
2639 void eliminate_push_avail (basic_block, tree) final override;
2641 basic_block entry;
2642 /* Freelist of avail entries which are allocated from the vn_ssa_aux
2643 obstack. */
2644 vn_avail *m_avail_freelist;
2647 /* Global RPO state for access from hooks. */
2648 static eliminate_dom_walker *rpo_avail;
2649 basic_block vn_context_bb;
2651 /* Return true if BASE1 and BASE2 can be adjusted so they have the
2652 same address and adjust *OFFSET1 and *OFFSET2 accordingly.
2653 Otherwise return false. */
2655 static bool
2656 adjust_offsets_for_equal_base_address (tree base1, poly_int64 *offset1,
2657 tree base2, poly_int64 *offset2)
2659 poly_int64 soff;
2660 if (TREE_CODE (base1) == MEM_REF
2661 && TREE_CODE (base2) == MEM_REF)
2663 if (mem_ref_offset (base1).to_shwi (&soff))
2665 base1 = TREE_OPERAND (base1, 0);
2666 *offset1 += soff * BITS_PER_UNIT;
2668 if (mem_ref_offset (base2).to_shwi (&soff))
2670 base2 = TREE_OPERAND (base2, 0);
2671 *offset2 += soff * BITS_PER_UNIT;
2673 return operand_equal_p (base1, base2, 0);
2675 return operand_equal_p (base1, base2, OEP_ADDRESS_OF);
2678 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
2679 from the statement defining VUSE and if not successful tries to
2680 translate *REFP and VR_ through an aggregate copy at the definition
2681 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
2682 of *REF and *VR. If only disambiguation was performed then
2683 *DISAMBIGUATE_ONLY is set to true. */
2685 static void *
2686 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
2687 translate_flags *disambiguate_only)
2689 vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
2690 vn_reference_t vr = data->vr;
2691 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
2692 tree base = ao_ref_base (ref);
2693 HOST_WIDE_INT offseti = 0, maxsizei, sizei = 0;
2694 static vec<vn_reference_op_s> lhs_ops;
2695 ao_ref lhs_ref;
2696 bool lhs_ref_ok = false;
2697 poly_int64 copy_size;
2699 /* First try to disambiguate after value-replacing in the definitions LHS. */
2700 if (is_gimple_assign (def_stmt))
2702 tree lhs = gimple_assign_lhs (def_stmt);
2703 bool valueized_anything = false;
2704 /* Avoid re-allocation overhead. */
2705 lhs_ops.truncate (0);
2706 basic_block saved_rpo_bb = vn_context_bb;
2707 vn_context_bb = gimple_bb (def_stmt);
2708 if (*disambiguate_only <= TR_VALUEIZE_AND_DISAMBIGUATE)
2710 copy_reference_ops_from_ref (lhs, &lhs_ops);
2711 valueize_refs_1 (&lhs_ops, &valueized_anything, true);
2713 vn_context_bb = saved_rpo_bb;
2714 ao_ref_init (&lhs_ref, lhs);
2715 lhs_ref_ok = true;
2716 if (valueized_anything
2717 && ao_ref_init_from_vn_reference
2718 (&lhs_ref, ao_ref_alias_set (&lhs_ref),
2719 ao_ref_base_alias_set (&lhs_ref), TREE_TYPE (lhs), lhs_ops)
2720 && !refs_may_alias_p_1 (ref, &lhs_ref, data->tbaa_p))
2722 *disambiguate_only = TR_VALUEIZE_AND_DISAMBIGUATE;
2723 return NULL;
2726 /* When the def is a CLOBBER we can optimistically disambiguate
2727 against it since any overlap it would be undefined behavior.
2728 Avoid this for obvious must aliases to save compile-time though.
2729 We also may not do this when the query is used for redundant
2730 store removal. */
2731 if (!data->redundant_store_removal_p
2732 && gimple_clobber_p (def_stmt)
2733 && !operand_equal_p (ao_ref_base (&lhs_ref), base, OEP_ADDRESS_OF))
2735 *disambiguate_only = TR_DISAMBIGUATE;
2736 return NULL;
2739 /* Besides valueizing the LHS we can also use access-path based
2740 disambiguation on the original non-valueized ref. */
2741 if (!ref->ref
2742 && lhs_ref_ok
2743 && data->orig_ref.ref)
2745 /* We want to use the non-valueized LHS for this, but avoid redundant
2746 work. */
2747 ao_ref *lref = &lhs_ref;
2748 ao_ref lref_alt;
2749 if (valueized_anything)
2751 ao_ref_init (&lref_alt, lhs);
2752 lref = &lref_alt;
2754 if (!refs_may_alias_p_1 (&data->orig_ref, lref, data->tbaa_p))
2756 *disambiguate_only = (valueized_anything
2757 ? TR_VALUEIZE_AND_DISAMBIGUATE
2758 : TR_DISAMBIGUATE);
2759 return NULL;
2763 /* If we reach a clobbering statement try to skip it and see if
2764 we find a VN result with exactly the same value as the
2765 possible clobber. In this case we can ignore the clobber
2766 and return the found value. */
2767 if (is_gimple_reg_type (TREE_TYPE (lhs))
2768 && types_compatible_p (TREE_TYPE (lhs), vr->type)
2769 && (ref->ref || data->orig_ref.ref)
2770 && !data->mask
2771 && data->partial_defs.is_empty ()
2772 && multiple_p (get_object_alignment
2773 (ref->ref ? ref->ref : data->orig_ref.ref),
2774 ref->size)
2775 && multiple_p (get_object_alignment (lhs), ref->size))
2777 tree rhs = gimple_assign_rhs1 (def_stmt);
2778 /* ??? We may not compare to ahead values which might be from
2779 a different loop iteration but only to loop invariants. Use
2780 CONSTANT_CLASS_P (unvalueized!) as conservative approximation.
2781 The one-hop lookup below doesn't have this issue since there's
2782 a virtual PHI before we ever reach a backedge to cross.
2783 We can skip multiple defs as long as they are from the same
2784 value though. */
2785 if (data->same_val
2786 && !operand_equal_p (data->same_val, rhs))
2788 else if (CONSTANT_CLASS_P (rhs))
2790 if (dump_file && (dump_flags & TDF_DETAILS))
2792 fprintf (dump_file,
2793 "Skipping possible redundant definition ");
2794 print_gimple_stmt (dump_file, def_stmt, 0);
2796 /* Delay the actual compare of the values to the end of the walk
2797 but do not update last_vuse from here. */
2798 data->last_vuse_ptr = NULL;
2799 data->same_val = rhs;
2800 return NULL;
2802 else
2804 tree saved_vuse = vr->vuse;
2805 hashval_t saved_hashcode = vr->hashcode;
2806 if (vr->vuse)
2807 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
2808 vr->vuse = vuse_ssa_val (gimple_vuse (def_stmt));
2809 if (vr->vuse)
2810 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
2811 vn_reference_t vnresult = NULL;
2812 /* Do not use vn_reference_lookup_2 since that might perform
2813 expression hashtable insertion but this lookup crosses
2814 a possible may-alias making such insertion conditionally
2815 invalid. */
2816 vn_reference_lookup_1 (vr, &vnresult);
2817 /* Need to restore vr->vuse and vr->hashcode. */
2818 vr->vuse = saved_vuse;
2819 vr->hashcode = saved_hashcode;
2820 if (vnresult)
2822 if (TREE_CODE (rhs) == SSA_NAME)
2823 rhs = SSA_VAL (rhs);
2824 if (vnresult->result
2825 && operand_equal_p (vnresult->result, rhs, 0))
2826 return vnresult;
2831 else if (*disambiguate_only <= TR_VALUEIZE_AND_DISAMBIGUATE
2832 && gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
2833 && gimple_call_num_args (def_stmt) <= 4)
2835 /* For builtin calls valueize its arguments and call the
2836 alias oracle again. Valueization may improve points-to
2837 info of pointers and constify size and position arguments.
2838 Originally this was motivated by PR61034 which has
2839 conditional calls to free falsely clobbering ref because
2840 of imprecise points-to info of the argument. */
2841 tree oldargs[4];
2842 bool valueized_anything = false;
2843 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
2845 oldargs[i] = gimple_call_arg (def_stmt, i);
2846 tree val = vn_valueize (oldargs[i]);
2847 if (val != oldargs[i])
2849 gimple_call_set_arg (def_stmt, i, val);
2850 valueized_anything = true;
2853 if (valueized_anything)
2855 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
2856 ref, data->tbaa_p);
2857 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
2858 gimple_call_set_arg (def_stmt, i, oldargs[i]);
2859 if (!res)
2861 *disambiguate_only = TR_VALUEIZE_AND_DISAMBIGUATE;
2862 return NULL;
2867 if (*disambiguate_only > TR_TRANSLATE)
2868 return (void *)-1;
2870 /* If we cannot constrain the size of the reference we cannot
2871 test if anything kills it. */
2872 if (!ref->max_size_known_p ())
2873 return (void *)-1;
2875 poly_int64 offset = ref->offset;
2876 poly_int64 maxsize = ref->max_size;
2878 /* def_stmt may-defs *ref. See if we can derive a value for *ref
2879 from that definition.
2880 1) Memset. */
2881 if (is_gimple_reg_type (vr->type)
2882 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
2883 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET_CHK))
2884 && (integer_zerop (gimple_call_arg (def_stmt, 1))
2885 || ((TREE_CODE (gimple_call_arg (def_stmt, 1)) == INTEGER_CST
2886 || (INTEGRAL_TYPE_P (vr->type) && known_eq (ref->size, 8)))
2887 && CHAR_BIT == 8
2888 && BITS_PER_UNIT == 8
2889 && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
2890 && offset.is_constant (&offseti)
2891 && ref->size.is_constant (&sizei)
2892 && (offseti % BITS_PER_UNIT == 0
2893 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == INTEGER_CST)))
2894 && (poly_int_tree_p (gimple_call_arg (def_stmt, 2))
2895 || (TREE_CODE (gimple_call_arg (def_stmt, 2)) == SSA_NAME
2896 && poly_int_tree_p (SSA_VAL (gimple_call_arg (def_stmt, 2)))))
2897 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2898 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME))
2900 tree base2;
2901 poly_int64 offset2, size2, maxsize2;
2902 bool reverse;
2903 tree ref2 = gimple_call_arg (def_stmt, 0);
2904 if (TREE_CODE (ref2) == SSA_NAME)
2906 ref2 = SSA_VAL (ref2);
2907 if (TREE_CODE (ref2) == SSA_NAME
2908 && (TREE_CODE (base) != MEM_REF
2909 || TREE_OPERAND (base, 0) != ref2))
2911 gimple *def_stmt = SSA_NAME_DEF_STMT (ref2);
2912 if (gimple_assign_single_p (def_stmt)
2913 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2914 ref2 = gimple_assign_rhs1 (def_stmt);
2917 if (TREE_CODE (ref2) == ADDR_EXPR)
2919 ref2 = TREE_OPERAND (ref2, 0);
2920 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
2921 &reverse);
2922 if (!known_size_p (maxsize2)
2923 || !known_eq (maxsize2, size2)
2924 || !operand_equal_p (base, base2, OEP_ADDRESS_OF))
2925 return (void *)-1;
2927 else if (TREE_CODE (ref2) == SSA_NAME)
2929 poly_int64 soff;
2930 if (TREE_CODE (base) != MEM_REF
2931 || !(mem_ref_offset (base)
2932 << LOG2_BITS_PER_UNIT).to_shwi (&soff))
2933 return (void *)-1;
2934 offset += soff;
2935 offset2 = 0;
2936 if (TREE_OPERAND (base, 0) != ref2)
2938 gimple *def = SSA_NAME_DEF_STMT (ref2);
2939 if (is_gimple_assign (def)
2940 && gimple_assign_rhs_code (def) == POINTER_PLUS_EXPR
2941 && gimple_assign_rhs1 (def) == TREE_OPERAND (base, 0)
2942 && poly_int_tree_p (gimple_assign_rhs2 (def)))
2944 tree rhs2 = gimple_assign_rhs2 (def);
2945 if (!(poly_offset_int::from (wi::to_poly_wide (rhs2),
2946 SIGNED)
2947 << LOG2_BITS_PER_UNIT).to_shwi (&offset2))
2948 return (void *)-1;
2949 ref2 = gimple_assign_rhs1 (def);
2950 if (TREE_CODE (ref2) == SSA_NAME)
2951 ref2 = SSA_VAL (ref2);
2953 else
2954 return (void *)-1;
2957 else
2958 return (void *)-1;
2959 tree len = gimple_call_arg (def_stmt, 2);
2960 HOST_WIDE_INT leni, offset2i;
2961 if (TREE_CODE (len) == SSA_NAME)
2962 len = SSA_VAL (len);
2963 /* Sometimes the above trickery is smarter than alias analysis. Take
2964 advantage of that. */
2965 if (!ranges_maybe_overlap_p (offset, maxsize, offset2,
2966 (wi::to_poly_offset (len)
2967 << LOG2_BITS_PER_UNIT)))
2968 return NULL;
2969 if (data->partial_defs.is_empty ()
2970 && known_subrange_p (offset, maxsize, offset2,
2971 wi::to_poly_offset (len) << LOG2_BITS_PER_UNIT))
2973 tree val;
2974 if (integer_zerop (gimple_call_arg (def_stmt, 1)))
2975 val = build_zero_cst (vr->type);
2976 else if (INTEGRAL_TYPE_P (vr->type)
2977 && known_eq (ref->size, 8)
2978 && offseti % BITS_PER_UNIT == 0)
2980 gimple_match_op res_op (gimple_match_cond::UNCOND, NOP_EXPR,
2981 vr->type, gimple_call_arg (def_stmt, 1));
2982 val = vn_nary_build_or_lookup (&res_op);
2983 if (!val
2984 || (TREE_CODE (val) == SSA_NAME
2985 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2986 return (void *)-1;
2988 else
2990 unsigned buflen
2991 = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (vr->type)) + 1;
2992 if (INTEGRAL_TYPE_P (vr->type)
2993 && TYPE_MODE (vr->type) != BLKmode)
2994 buflen = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (vr->type)) + 1;
2995 unsigned char *buf = XALLOCAVEC (unsigned char, buflen);
2996 memset (buf, TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 1)),
2997 buflen);
2998 if (BYTES_BIG_ENDIAN)
3000 unsigned int amnt
3001 = (((unsigned HOST_WIDE_INT) offseti + sizei)
3002 % BITS_PER_UNIT);
3003 if (amnt)
3005 shift_bytes_in_array_right (buf, buflen,
3006 BITS_PER_UNIT - amnt);
3007 buf++;
3008 buflen--;
3011 else if (offseti % BITS_PER_UNIT != 0)
3013 unsigned int amnt
3014 = BITS_PER_UNIT - ((unsigned HOST_WIDE_INT) offseti
3015 % BITS_PER_UNIT);
3016 shift_bytes_in_array_left (buf, buflen, amnt);
3017 buf++;
3018 buflen--;
3020 val = native_interpret_expr (vr->type, buf, buflen);
3021 if (!val)
3022 return (void *)-1;
3024 return data->finish (0, 0, val);
3026 /* For now handle clearing memory with partial defs. */
3027 else if (known_eq (ref->size, maxsize)
3028 && integer_zerop (gimple_call_arg (def_stmt, 1))
3029 && tree_fits_poly_int64_p (len)
3030 && tree_to_poly_int64 (len).is_constant (&leni)
3031 && leni <= INTTYPE_MAXIMUM (HOST_WIDE_INT) / BITS_PER_UNIT
3032 && offset.is_constant (&offseti)
3033 && offset2.is_constant (&offset2i)
3034 && maxsize.is_constant (&maxsizei)
3035 && ranges_known_overlap_p (offseti, maxsizei, offset2i,
3036 leni << LOG2_BITS_PER_UNIT))
3038 pd_data pd;
3039 pd.rhs = build_constructor (NULL_TREE, NULL);
3040 pd.rhs_off = 0;
3041 pd.offset = offset2i;
3042 pd.size = leni << LOG2_BITS_PER_UNIT;
3043 return data->push_partial_def (pd, 0, 0, offseti, maxsizei);
3047 /* 2) Assignment from an empty CONSTRUCTOR. */
3048 else if (is_gimple_reg_type (vr->type)
3049 && gimple_assign_single_p (def_stmt)
3050 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
3051 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
3053 tree base2;
3054 poly_int64 offset2, size2, maxsize2;
3055 HOST_WIDE_INT offset2i, size2i;
3056 gcc_assert (lhs_ref_ok);
3057 base2 = ao_ref_base (&lhs_ref);
3058 offset2 = lhs_ref.offset;
3059 size2 = lhs_ref.size;
3060 maxsize2 = lhs_ref.max_size;
3061 if (known_size_p (maxsize2)
3062 && known_eq (maxsize2, size2)
3063 && adjust_offsets_for_equal_base_address (base, &offset,
3064 base2, &offset2))
3066 if (data->partial_defs.is_empty ()
3067 && known_subrange_p (offset, maxsize, offset2, size2))
3069 /* While technically undefined behavior do not optimize
3070 a full read from a clobber. */
3071 if (gimple_clobber_p (def_stmt))
3072 return (void *)-1;
3073 tree val = build_zero_cst (vr->type);
3074 return data->finish (ao_ref_alias_set (&lhs_ref),
3075 ao_ref_base_alias_set (&lhs_ref), val);
3077 else if (known_eq (ref->size, maxsize)
3078 && maxsize.is_constant (&maxsizei)
3079 && offset.is_constant (&offseti)
3080 && offset2.is_constant (&offset2i)
3081 && size2.is_constant (&size2i)
3082 && ranges_known_overlap_p (offseti, maxsizei,
3083 offset2i, size2i))
3085 /* Let clobbers be consumed by the partial-def tracker
3086 which can choose to ignore them if they are shadowed
3087 by a later def. */
3088 pd_data pd;
3089 pd.rhs = gimple_assign_rhs1 (def_stmt);
3090 pd.rhs_off = 0;
3091 pd.offset = offset2i;
3092 pd.size = size2i;
3093 return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
3094 ao_ref_base_alias_set (&lhs_ref),
3095 offseti, maxsizei);
3100 /* 3) Assignment from a constant. We can use folds native encode/interpret
3101 routines to extract the assigned bits. */
3102 else if (known_eq (ref->size, maxsize)
3103 && is_gimple_reg_type (vr->type)
3104 && !reverse_storage_order_for_component_p (vr->operands)
3105 && !contains_storage_order_barrier_p (vr->operands)
3106 && gimple_assign_single_p (def_stmt)
3107 && CHAR_BIT == 8
3108 && BITS_PER_UNIT == 8
3109 && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
3110 /* native_encode and native_decode operate on arrays of bytes
3111 and so fundamentally need a compile-time size and offset. */
3112 && maxsize.is_constant (&maxsizei)
3113 && offset.is_constant (&offseti)
3114 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
3115 || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
3116 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
3118 tree lhs = gimple_assign_lhs (def_stmt);
3119 tree base2;
3120 poly_int64 offset2, size2, maxsize2;
3121 HOST_WIDE_INT offset2i, size2i;
3122 bool reverse;
3123 gcc_assert (lhs_ref_ok);
3124 base2 = ao_ref_base (&lhs_ref);
3125 offset2 = lhs_ref.offset;
3126 size2 = lhs_ref.size;
3127 maxsize2 = lhs_ref.max_size;
3128 reverse = reverse_storage_order_for_component_p (lhs);
3129 if (base2
3130 && !reverse
3131 && !storage_order_barrier_p (lhs)
3132 && known_eq (maxsize2, size2)
3133 && adjust_offsets_for_equal_base_address (base, &offset,
3134 base2, &offset2)
3135 && offset.is_constant (&offseti)
3136 && offset2.is_constant (&offset2i)
3137 && size2.is_constant (&size2i))
3139 if (data->partial_defs.is_empty ()
3140 && known_subrange_p (offseti, maxsizei, offset2, size2))
3142 /* We support up to 512-bit values (for V8DFmode). */
3143 unsigned char buffer[65];
3144 int len;
3146 tree rhs = gimple_assign_rhs1 (def_stmt);
3147 if (TREE_CODE (rhs) == SSA_NAME)
3148 rhs = SSA_VAL (rhs);
3149 len = native_encode_expr (rhs,
3150 buffer, sizeof (buffer) - 1,
3151 (offseti - offset2i) / BITS_PER_UNIT);
3152 if (len > 0 && len * BITS_PER_UNIT >= maxsizei)
3154 tree type = vr->type;
3155 unsigned char *buf = buffer;
3156 unsigned int amnt = 0;
3157 /* Make sure to interpret in a type that has a range
3158 covering the whole access size. */
3159 if (INTEGRAL_TYPE_P (vr->type)
3160 && maxsizei != TYPE_PRECISION (vr->type))
3161 type = build_nonstandard_integer_type (maxsizei,
3162 TYPE_UNSIGNED (type));
3163 if (BYTES_BIG_ENDIAN)
3165 /* For big-endian native_encode_expr stored the rhs
3166 such that the LSB of it is the LSB of buffer[len - 1].
3167 That bit is stored into memory at position
3168 offset2 + size2 - 1, i.e. in byte
3169 base + (offset2 + size2 - 1) / BITS_PER_UNIT.
3170 E.g. for offset2 1 and size2 14, rhs -1 and memory
3171 previously cleared that is:
3173 01111111|11111110
3174 Now, if we want to extract offset 2 and size 12 from
3175 it using native_interpret_expr (which actually works
3176 for integral bitfield types in terms of byte size of
3177 the mode), the native_encode_expr stored the value
3178 into buffer as
3179 XX111111|11111111
3180 and returned len 2 (the X bits are outside of
3181 precision).
3182 Let sz be maxsize / BITS_PER_UNIT if not extracting
3183 a bitfield, and GET_MODE_SIZE otherwise.
3184 We need to align the LSB of the value we want to
3185 extract as the LSB of buf[sz - 1].
3186 The LSB from memory we need to read is at position
3187 offset + maxsize - 1. */
3188 HOST_WIDE_INT sz = maxsizei / BITS_PER_UNIT;
3189 if (INTEGRAL_TYPE_P (type))
3191 if (TYPE_MODE (type) != BLKmode)
3192 sz = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
3193 else
3194 sz = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (type));
3196 amnt = ((unsigned HOST_WIDE_INT) offset2i + size2i
3197 - offseti - maxsizei) % BITS_PER_UNIT;
3198 if (amnt)
3199 shift_bytes_in_array_right (buffer, len, amnt);
3200 amnt = ((unsigned HOST_WIDE_INT) offset2i + size2i
3201 - offseti - maxsizei - amnt) / BITS_PER_UNIT;
3202 if ((unsigned HOST_WIDE_INT) sz + amnt > (unsigned) len)
3203 len = 0;
3204 else
3206 buf = buffer + len - sz - amnt;
3207 len -= (buf - buffer);
3210 else
3212 amnt = ((unsigned HOST_WIDE_INT) offset2i
3213 - offseti) % BITS_PER_UNIT;
3214 if (amnt)
3216 buffer[len] = 0;
3217 shift_bytes_in_array_left (buffer, len + 1, amnt);
3218 buf = buffer + 1;
3221 tree val = native_interpret_expr (type, buf, len);
3222 /* If we chop off bits because the types precision doesn't
3223 match the memory access size this is ok when optimizing
3224 reads but not when called from the DSE code during
3225 elimination. */
3226 if (val
3227 && type != vr->type)
3229 if (! int_fits_type_p (val, vr->type))
3230 val = NULL_TREE;
3231 else
3232 val = fold_convert (vr->type, val);
3235 if (val)
3236 return data->finish (ao_ref_alias_set (&lhs_ref),
3237 ao_ref_base_alias_set (&lhs_ref), val);
3240 else if (ranges_known_overlap_p (offseti, maxsizei, offset2i,
3241 size2i))
3243 pd_data pd;
3244 tree rhs = gimple_assign_rhs1 (def_stmt);
3245 if (TREE_CODE (rhs) == SSA_NAME)
3246 rhs = SSA_VAL (rhs);
3247 pd.rhs = rhs;
3248 pd.rhs_off = 0;
3249 pd.offset = offset2i;
3250 pd.size = size2i;
3251 return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
3252 ao_ref_base_alias_set (&lhs_ref),
3253 offseti, maxsizei);
3258 /* 4) Assignment from an SSA name which definition we may be able
3259 to access pieces from or we can combine to a larger entity. */
3260 else if (known_eq (ref->size, maxsize)
3261 && is_gimple_reg_type (vr->type)
3262 && !reverse_storage_order_for_component_p (vr->operands)
3263 && !contains_storage_order_barrier_p (vr->operands)
3264 && gimple_assign_single_p (def_stmt)
3265 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
3267 tree lhs = gimple_assign_lhs (def_stmt);
3268 tree base2;
3269 poly_int64 offset2, size2, maxsize2;
3270 HOST_WIDE_INT offset2i, size2i, offseti;
3271 bool reverse;
3272 gcc_assert (lhs_ref_ok);
3273 base2 = ao_ref_base (&lhs_ref);
3274 offset2 = lhs_ref.offset;
3275 size2 = lhs_ref.size;
3276 maxsize2 = lhs_ref.max_size;
3277 reverse = reverse_storage_order_for_component_p (lhs);
3278 tree def_rhs = gimple_assign_rhs1 (def_stmt);
3279 if (!reverse
3280 && !storage_order_barrier_p (lhs)
3281 && known_size_p (maxsize2)
3282 && known_eq (maxsize2, size2)
3283 && adjust_offsets_for_equal_base_address (base, &offset,
3284 base2, &offset2))
3286 if (data->partial_defs.is_empty ()
3287 && known_subrange_p (offset, maxsize, offset2, size2)
3288 /* ??? We can't handle bitfield precision extracts without
3289 either using an alternate type for the BIT_FIELD_REF and
3290 then doing a conversion or possibly adjusting the offset
3291 according to endianness. */
3292 && (! INTEGRAL_TYPE_P (vr->type)
3293 || known_eq (ref->size, TYPE_PRECISION (vr->type)))
3294 && multiple_p (ref->size, BITS_PER_UNIT))
3296 tree val = NULL_TREE;
3297 if (! INTEGRAL_TYPE_P (TREE_TYPE (def_rhs))
3298 || type_has_mode_precision_p (TREE_TYPE (def_rhs)))
3300 gimple_match_op op (gimple_match_cond::UNCOND,
3301 BIT_FIELD_REF, vr->type,
3302 SSA_VAL (def_rhs),
3303 bitsize_int (ref->size),
3304 bitsize_int (offset - offset2));
3305 val = vn_nary_build_or_lookup (&op);
3307 else if (known_eq (ref->size, size2))
3309 gimple_match_op op (gimple_match_cond::UNCOND,
3310 VIEW_CONVERT_EXPR, vr->type,
3311 SSA_VAL (def_rhs));
3312 val = vn_nary_build_or_lookup (&op);
3314 if (val
3315 && (TREE_CODE (val) != SSA_NAME
3316 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
3317 return data->finish (ao_ref_alias_set (&lhs_ref),
3318 ao_ref_base_alias_set (&lhs_ref), val);
3320 else if (maxsize.is_constant (&maxsizei)
3321 && offset.is_constant (&offseti)
3322 && offset2.is_constant (&offset2i)
3323 && size2.is_constant (&size2i)
3324 && ranges_known_overlap_p (offset, maxsize, offset2, size2))
3326 pd_data pd;
3327 pd.rhs = SSA_VAL (def_rhs);
3328 pd.rhs_off = 0;
3329 pd.offset = offset2i;
3330 pd.size = size2i;
3331 return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
3332 ao_ref_base_alias_set (&lhs_ref),
3333 offseti, maxsizei);
3338 /* 4b) Assignment done via one of the vectorizer internal store
3339 functions where we may be able to access pieces from or we can
3340 combine to a larger entity. */
3341 else if (known_eq (ref->size, maxsize)
3342 && is_gimple_reg_type (vr->type)
3343 && !reverse_storage_order_for_component_p (vr->operands)
3344 && !contains_storage_order_barrier_p (vr->operands)
3345 && is_gimple_call (def_stmt)
3346 && gimple_call_internal_p (def_stmt)
3347 && internal_store_fn_p (gimple_call_internal_fn (def_stmt)))
3349 gcall *call = as_a <gcall *> (def_stmt);
3350 internal_fn fn = gimple_call_internal_fn (call);
3352 tree mask = NULL_TREE, len = NULL_TREE, bias = NULL_TREE;
3353 switch (fn)
3355 case IFN_MASK_STORE:
3356 mask = gimple_call_arg (call, internal_fn_mask_index (fn));
3357 mask = vn_valueize (mask);
3358 if (TREE_CODE (mask) != VECTOR_CST)
3359 return (void *)-1;
3360 break;
3361 case IFN_LEN_STORE:
3363 int len_index = internal_fn_len_index (fn);
3364 len = gimple_call_arg (call, len_index);
3365 bias = gimple_call_arg (call, len_index + 1);
3366 if (!tree_fits_uhwi_p (len) || !tree_fits_shwi_p (bias))
3367 return (void *) -1;
3368 break;
3370 default:
3371 return (void *)-1;
3373 tree def_rhs = gimple_call_arg (call,
3374 internal_fn_stored_value_index (fn));
3375 def_rhs = vn_valueize (def_rhs);
3376 if (TREE_CODE (def_rhs) != VECTOR_CST)
3377 return (void *)-1;
3379 ao_ref_init_from_ptr_and_size (&lhs_ref,
3380 vn_valueize (gimple_call_arg (call, 0)),
3381 TYPE_SIZE_UNIT (TREE_TYPE (def_rhs)));
3382 tree base2;
3383 poly_int64 offset2, size2, maxsize2;
3384 HOST_WIDE_INT offset2i, size2i, offseti;
3385 base2 = ao_ref_base (&lhs_ref);
3386 offset2 = lhs_ref.offset;
3387 size2 = lhs_ref.size;
3388 maxsize2 = lhs_ref.max_size;
3389 if (known_size_p (maxsize2)
3390 && known_eq (maxsize2, size2)
3391 && adjust_offsets_for_equal_base_address (base, &offset,
3392 base2, &offset2)
3393 && maxsize.is_constant (&maxsizei)
3394 && offset.is_constant (&offseti)
3395 && offset2.is_constant (&offset2i)
3396 && size2.is_constant (&size2i))
3398 if (!ranges_maybe_overlap_p (offset, maxsize, offset2, size2))
3399 /* Poor-mans disambiguation. */
3400 return NULL;
3401 else if (ranges_known_overlap_p (offset, maxsize, offset2, size2))
3403 pd_data pd;
3404 pd.rhs = def_rhs;
3405 tree aa = gimple_call_arg (call, 1);
3406 alias_set_type set = get_deref_alias_set (TREE_TYPE (aa));
3407 tree vectype = TREE_TYPE (def_rhs);
3408 unsigned HOST_WIDE_INT elsz
3409 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (vectype)));
3410 if (mask)
3412 HOST_WIDE_INT start = 0, length = 0;
3413 unsigned mask_idx = 0;
3416 if (integer_zerop (VECTOR_CST_ELT (mask, mask_idx)))
3418 if (length != 0)
3420 pd.rhs_off = start;
3421 pd.offset = offset2i + start;
3422 pd.size = length;
3423 if (ranges_known_overlap_p
3424 (offset, maxsize, pd.offset, pd.size))
3426 void *res = data->push_partial_def
3427 (pd, set, set, offseti, maxsizei);
3428 if (res != NULL)
3429 return res;
3432 start = (mask_idx + 1) * elsz;
3433 length = 0;
3435 else
3436 length += elsz;
3437 mask_idx++;
3439 while (known_lt (mask_idx, TYPE_VECTOR_SUBPARTS (vectype)));
3440 if (length != 0)
3442 pd.rhs_off = start;
3443 pd.offset = offset2i + start;
3444 pd.size = length;
3445 if (ranges_known_overlap_p (offset, maxsize,
3446 pd.offset, pd.size))
3447 return data->push_partial_def (pd, set, set,
3448 offseti, maxsizei);
3451 else if (fn == IFN_LEN_STORE)
3453 pd.offset = offset2i;
3454 pd.size = (tree_to_uhwi (len)
3455 + -tree_to_shwi (bias)) * BITS_PER_UNIT;
3456 if (BYTES_BIG_ENDIAN)
3457 pd.rhs_off = pd.size - tree_to_uhwi (TYPE_SIZE (vectype));
3458 else
3459 pd.rhs_off = 0;
3460 if (ranges_known_overlap_p (offset, maxsize,
3461 pd.offset, pd.size))
3462 return data->push_partial_def (pd, set, set,
3463 offseti, maxsizei);
3465 else
3466 gcc_unreachable ();
3467 return NULL;
3472 /* 5) For aggregate copies translate the reference through them if
3473 the copy kills ref. */
3474 else if (data->vn_walk_kind == VN_WALKREWRITE
3475 && gimple_assign_single_p (def_stmt)
3476 && (DECL_P (gimple_assign_rhs1 (def_stmt))
3477 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
3478 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
3480 tree base2;
3481 int i, j, k;
3482 auto_vec<vn_reference_op_s> rhs;
3483 vn_reference_op_t vro;
3484 ao_ref r;
3486 gcc_assert (lhs_ref_ok);
3488 /* See if the assignment kills REF. */
3489 base2 = ao_ref_base (&lhs_ref);
3490 if (!lhs_ref.max_size_known_p ()
3491 || (base != base2
3492 && (TREE_CODE (base) != MEM_REF
3493 || TREE_CODE (base2) != MEM_REF
3494 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
3495 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
3496 TREE_OPERAND (base2, 1))))
3497 || !stmt_kills_ref_p (def_stmt, ref))
3498 return (void *)-1;
3500 /* Find the common base of ref and the lhs. lhs_ops already
3501 contains valueized operands for the lhs. */
3502 i = vr->operands.length () - 1;
3503 j = lhs_ops.length () - 1;
3504 while (j >= 0 && i >= 0
3505 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
3507 i--;
3508 j--;
3511 /* ??? The innermost op should always be a MEM_REF and we already
3512 checked that the assignment to the lhs kills vr. Thus for
3513 aggregate copies using char[] types the vn_reference_op_eq
3514 may fail when comparing types for compatibility. But we really
3515 don't care here - further lookups with the rewritten operands
3516 will simply fail if we messed up types too badly. */
3517 poly_int64 extra_off = 0;
3518 if (j == 0 && i >= 0
3519 && lhs_ops[0].opcode == MEM_REF
3520 && maybe_ne (lhs_ops[0].off, -1))
3522 if (known_eq (lhs_ops[0].off, vr->operands[i].off))
3523 i--, j--;
3524 else if (vr->operands[i].opcode == MEM_REF
3525 && maybe_ne (vr->operands[i].off, -1))
3527 extra_off = vr->operands[i].off - lhs_ops[0].off;
3528 i--, j--;
3532 /* i now points to the first additional op.
3533 ??? LHS may not be completely contained in VR, one or more
3534 VIEW_CONVERT_EXPRs could be in its way. We could at least
3535 try handling outermost VIEW_CONVERT_EXPRs. */
3536 if (j != -1)
3537 return (void *)-1;
3539 /* Punt if the additional ops contain a storage order barrier. */
3540 for (k = i; k >= 0; k--)
3542 vro = &vr->operands[k];
3543 if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
3544 return (void *)-1;
3547 /* Now re-write REF to be based on the rhs of the assignment. */
3548 tree rhs1 = gimple_assign_rhs1 (def_stmt);
3549 copy_reference_ops_from_ref (rhs1, &rhs);
3551 /* Apply an extra offset to the inner MEM_REF of the RHS. */
3552 bool force_no_tbaa = false;
3553 if (maybe_ne (extra_off, 0))
3555 if (rhs.length () < 2)
3556 return (void *)-1;
3557 int ix = rhs.length () - 2;
3558 if (rhs[ix].opcode != MEM_REF
3559 || known_eq (rhs[ix].off, -1))
3560 return (void *)-1;
3561 rhs[ix].off += extra_off;
3562 rhs[ix].op0 = int_const_binop (PLUS_EXPR, rhs[ix].op0,
3563 build_int_cst (TREE_TYPE (rhs[ix].op0),
3564 extra_off));
3565 /* When we have offsetted the RHS, reading only parts of it,
3566 we can no longer use the original TBAA type, force alias-set
3567 zero. */
3568 force_no_tbaa = true;
3571 /* Save the operands since we need to use the original ones for
3572 the hash entry we use. */
3573 if (!data->saved_operands.exists ())
3574 data->saved_operands = vr->operands.copy ();
3576 /* We need to pre-pend vr->operands[0..i] to rhs. */
3577 vec<vn_reference_op_s> old = vr->operands;
3578 if (i + 1 + rhs.length () > vr->operands.length ())
3579 vr->operands.safe_grow (i + 1 + rhs.length (), true);
3580 else
3581 vr->operands.truncate (i + 1 + rhs.length ());
3582 FOR_EACH_VEC_ELT (rhs, j, vro)
3583 vr->operands[i + 1 + j] = *vro;
3584 valueize_refs (&vr->operands);
3585 if (old == shared_lookup_references)
3586 shared_lookup_references = vr->operands;
3587 vr->hashcode = vn_reference_compute_hash (vr);
3589 /* Try folding the new reference to a constant. */
3590 tree val = fully_constant_vn_reference_p (vr);
3591 if (val)
3593 if (data->partial_defs.is_empty ())
3594 return data->finish (ao_ref_alias_set (&lhs_ref),
3595 ao_ref_base_alias_set (&lhs_ref), val);
3596 /* This is the only interesting case for partial-def handling
3597 coming from targets that like to gimplify init-ctors as
3598 aggregate copies from constant data like aarch64 for
3599 PR83518. */
3600 if (maxsize.is_constant (&maxsizei) && known_eq (ref->size, maxsize))
3602 pd_data pd;
3603 pd.rhs = val;
3604 pd.rhs_off = 0;
3605 pd.offset = 0;
3606 pd.size = maxsizei;
3607 return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
3608 ao_ref_base_alias_set (&lhs_ref),
3609 0, maxsizei);
3613 /* Continuing with partial defs isn't easily possible here, we
3614 have to find a full def from further lookups from here. Probably
3615 not worth the special-casing everywhere. */
3616 if (!data->partial_defs.is_empty ())
3617 return (void *)-1;
3619 /* Adjust *ref from the new operands. */
3620 ao_ref rhs1_ref;
3621 ao_ref_init (&rhs1_ref, rhs1);
3622 if (!ao_ref_init_from_vn_reference (&r,
3623 force_no_tbaa ? 0
3624 : ao_ref_alias_set (&rhs1_ref),
3625 force_no_tbaa ? 0
3626 : ao_ref_base_alias_set (&rhs1_ref),
3627 vr->type, vr->operands))
3628 return (void *)-1;
3629 /* This can happen with bitfields. */
3630 if (maybe_ne (ref->size, r.size))
3632 /* If the access lacks some subsetting simply apply that by
3633 shortening it. That in the end can only be successful
3634 if we can pun the lookup result which in turn requires
3635 exact offsets. */
3636 if (known_eq (r.size, r.max_size)
3637 && known_lt (ref->size, r.size))
3638 r.size = r.max_size = ref->size;
3639 else
3640 return (void *)-1;
3642 *ref = r;
3643 vr->offset = r.offset;
3644 vr->max_size = r.max_size;
3646 /* Do not update last seen VUSE after translating. */
3647 data->last_vuse_ptr = NULL;
3648 /* Invalidate the original access path since it now contains
3649 the wrong base. */
3650 data->orig_ref.ref = NULL_TREE;
3651 /* Use the alias-set of this LHS for recording an eventual result. */
3652 if (data->first_set == -2)
3654 data->first_set = ao_ref_alias_set (&lhs_ref);
3655 data->first_base_set = ao_ref_base_alias_set (&lhs_ref);
3658 /* Keep looking for the adjusted *REF / VR pair. */
3659 return NULL;
3662 /* 6) For memcpy copies translate the reference through them if the copy
3663 kills ref. But we cannot (easily) do this translation if the memcpy is
3664 a storage order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that
3665 can modify the storage order of objects (see storage_order_barrier_p). */
3666 else if (data->vn_walk_kind == VN_WALKREWRITE
3667 && is_gimple_reg_type (vr->type)
3668 /* ??? Handle BCOPY as well. */
3669 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
3670 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY_CHK)
3671 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
3672 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY_CHK)
3673 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE)
3674 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE_CHK))
3675 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
3676 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
3677 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
3678 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
3679 && (poly_int_tree_p (gimple_call_arg (def_stmt, 2), &copy_size)
3680 || (TREE_CODE (gimple_call_arg (def_stmt, 2)) == SSA_NAME
3681 && poly_int_tree_p (SSA_VAL (gimple_call_arg (def_stmt, 2)),
3682 &copy_size)))
3683 /* Handling this is more complicated, give up for now. */
3684 && data->partial_defs.is_empty ())
3686 tree lhs, rhs;
3687 ao_ref r;
3688 poly_int64 rhs_offset, lhs_offset;
3689 vn_reference_op_s op;
3690 poly_uint64 mem_offset;
3691 poly_int64 at, byte_maxsize;
3693 /* Only handle non-variable, addressable refs. */
3694 if (maybe_ne (ref->size, maxsize)
3695 || !multiple_p (offset, BITS_PER_UNIT, &at)
3696 || !multiple_p (maxsize, BITS_PER_UNIT, &byte_maxsize))
3697 return (void *)-1;
3699 /* Extract a pointer base and an offset for the destination. */
3700 lhs = gimple_call_arg (def_stmt, 0);
3701 lhs_offset = 0;
3702 if (TREE_CODE (lhs) == SSA_NAME)
3704 lhs = vn_valueize (lhs);
3705 if (TREE_CODE (lhs) == SSA_NAME)
3707 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
3708 if (gimple_assign_single_p (def_stmt)
3709 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
3710 lhs = gimple_assign_rhs1 (def_stmt);
3713 if (TREE_CODE (lhs) == ADDR_EXPR)
3715 if (AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (lhs)))
3716 && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (TREE_TYPE (lhs))))
3717 return (void *)-1;
3718 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
3719 &lhs_offset);
3720 if (!tem)
3721 return (void *)-1;
3722 if (TREE_CODE (tem) == MEM_REF
3723 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
3725 lhs = TREE_OPERAND (tem, 0);
3726 if (TREE_CODE (lhs) == SSA_NAME)
3727 lhs = vn_valueize (lhs);
3728 lhs_offset += mem_offset;
3730 else if (DECL_P (tem))
3731 lhs = build_fold_addr_expr (tem);
3732 else
3733 return (void *)-1;
3735 if (TREE_CODE (lhs) != SSA_NAME
3736 && TREE_CODE (lhs) != ADDR_EXPR)
3737 return (void *)-1;
3739 /* Extract a pointer base and an offset for the source. */
3740 rhs = gimple_call_arg (def_stmt, 1);
3741 rhs_offset = 0;
3742 if (TREE_CODE (rhs) == SSA_NAME)
3743 rhs = vn_valueize (rhs);
3744 if (TREE_CODE (rhs) == ADDR_EXPR)
3746 if (AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (rhs)))
3747 && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (TREE_TYPE (rhs))))
3748 return (void *)-1;
3749 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
3750 &rhs_offset);
3751 if (!tem)
3752 return (void *)-1;
3753 if (TREE_CODE (tem) == MEM_REF
3754 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
3756 rhs = TREE_OPERAND (tem, 0);
3757 rhs_offset += mem_offset;
3759 else if (DECL_P (tem)
3760 || TREE_CODE (tem) == STRING_CST)
3761 rhs = build_fold_addr_expr (tem);
3762 else
3763 return (void *)-1;
3765 if (TREE_CODE (rhs) == SSA_NAME)
3766 rhs = SSA_VAL (rhs);
3767 else if (TREE_CODE (rhs) != ADDR_EXPR)
3768 return (void *)-1;
3770 /* The bases of the destination and the references have to agree. */
3771 if (TREE_CODE (base) == MEM_REF)
3773 if (TREE_OPERAND (base, 0) != lhs
3774 || !poly_int_tree_p (TREE_OPERAND (base, 1), &mem_offset))
3775 return (void *) -1;
3776 at += mem_offset;
3778 else if (!DECL_P (base)
3779 || TREE_CODE (lhs) != ADDR_EXPR
3780 || TREE_OPERAND (lhs, 0) != base)
3781 return (void *)-1;
3783 /* If the access is completely outside of the memcpy destination
3784 area there is no aliasing. */
3785 if (!ranges_maybe_overlap_p (lhs_offset, copy_size, at, byte_maxsize))
3786 return NULL;
3787 /* And the access has to be contained within the memcpy destination. */
3788 if (!known_subrange_p (at, byte_maxsize, lhs_offset, copy_size))
3789 return (void *)-1;
3791 /* Save the operands since we need to use the original ones for
3792 the hash entry we use. */
3793 if (!data->saved_operands.exists ())
3794 data->saved_operands = vr->operands.copy ();
3796 /* Make room for 2 operands in the new reference. */
3797 if (vr->operands.length () < 2)
3799 vec<vn_reference_op_s> old = vr->operands;
3800 vr->operands.safe_grow_cleared (2, true);
3801 if (old == shared_lookup_references)
3802 shared_lookup_references = vr->operands;
3804 else
3805 vr->operands.truncate (2);
3807 /* The looked-through reference is a simple MEM_REF. */
3808 memset (&op, 0, sizeof (op));
3809 op.type = vr->type;
3810 op.opcode = MEM_REF;
3811 op.op0 = build_int_cst (ptr_type_node, at - lhs_offset + rhs_offset);
3812 op.off = at - lhs_offset + rhs_offset;
3813 vr->operands[0] = op;
3814 op.type = TREE_TYPE (rhs);
3815 op.opcode = TREE_CODE (rhs);
3816 op.op0 = rhs;
3817 op.off = -1;
3818 vr->operands[1] = op;
3819 vr->hashcode = vn_reference_compute_hash (vr);
3821 /* Try folding the new reference to a constant. */
3822 tree val = fully_constant_vn_reference_p (vr);
3823 if (val)
3824 return data->finish (0, 0, val);
3826 /* Adjust *ref from the new operands. */
3827 if (!ao_ref_init_from_vn_reference (&r, 0, 0, vr->type, vr->operands))
3828 return (void *)-1;
3829 /* This can happen with bitfields. */
3830 if (maybe_ne (ref->size, r.size))
3831 return (void *)-1;
3832 *ref = r;
3833 vr->offset = r.offset;
3834 vr->max_size = r.max_size;
3836 /* Do not update last seen VUSE after translating. */
3837 data->last_vuse_ptr = NULL;
3838 /* Invalidate the original access path since it now contains
3839 the wrong base. */
3840 data->orig_ref.ref = NULL_TREE;
3841 /* Use the alias-set of this stmt for recording an eventual result. */
3842 if (data->first_set == -2)
3844 data->first_set = 0;
3845 data->first_base_set = 0;
3848 /* Keep looking for the adjusted *REF / VR pair. */
3849 return NULL;
3852 /* Bail out and stop walking. */
3853 return (void *)-1;
3856 /* Return a reference op vector from OP that can be used for
3857 vn_reference_lookup_pieces. The caller is responsible for releasing
3858 the vector. */
3860 vec<vn_reference_op_s>
3861 vn_reference_operands_for_lookup (tree op)
3863 bool valueized;
3864 return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
3867 /* Lookup a reference operation by it's parts, in the current hash table.
3868 Returns the resulting value number if it exists in the hash table,
3869 NULL_TREE otherwise. VNRESULT will be filled in with the actual
3870 vn_reference_t stored in the hashtable if something is found. */
3872 tree
3873 vn_reference_lookup_pieces (tree vuse, alias_set_type set,
3874 alias_set_type base_set, tree type,
3875 vec<vn_reference_op_s> operands,
3876 vn_reference_t *vnresult, vn_lookup_kind kind)
3878 struct vn_reference_s vr1;
3879 vn_reference_t tmp;
3880 tree cst;
3882 if (!vnresult)
3883 vnresult = &tmp;
3884 *vnresult = NULL;
3886 vr1.vuse = vuse_ssa_val (vuse);
3887 shared_lookup_references.truncate (0);
3888 shared_lookup_references.safe_grow (operands.length (), true);
3889 memcpy (shared_lookup_references.address (),
3890 operands.address (),
3891 sizeof (vn_reference_op_s)
3892 * operands.length ());
3893 bool valueized_p;
3894 valueize_refs_1 (&shared_lookup_references, &valueized_p);
3895 vr1.operands = shared_lookup_references;
3896 vr1.type = type;
3897 vr1.set = set;
3898 vr1.base_set = base_set;
3899 /* We can pretend there's no extra info fed in since the ao_refs offset
3900 and max_size are computed only from the VN reference ops. */
3901 vr1.offset = 0;
3902 vr1.max_size = -1;
3903 vr1.hashcode = vn_reference_compute_hash (&vr1);
3904 if ((cst = fully_constant_vn_reference_p (&vr1)))
3905 return cst;
3907 vn_reference_lookup_1 (&vr1, vnresult);
3908 if (!*vnresult
3909 && kind != VN_NOWALK
3910 && vr1.vuse)
3912 ao_ref r;
3913 unsigned limit = param_sccvn_max_alias_queries_per_access;
3914 vn_walk_cb_data data (&vr1, NULL_TREE, NULL, kind, true, NULL_TREE,
3915 false);
3916 vec<vn_reference_op_s> ops_for_ref;
3917 if (!valueized_p)
3918 ops_for_ref = vr1.operands;
3919 else
3921 /* For ao_ref_from_mem we have to ensure only available SSA names
3922 end up in base and the only convenient way to make this work
3923 for PRE is to re-valueize with that in mind. */
3924 ops_for_ref.create (operands.length ());
3925 ops_for_ref.quick_grow (operands.length ());
3926 memcpy (ops_for_ref.address (),
3927 operands.address (),
3928 sizeof (vn_reference_op_s)
3929 * operands.length ());
3930 valueize_refs_1 (&ops_for_ref, &valueized_p, true);
3932 if (ao_ref_init_from_vn_reference (&r, set, base_set, type,
3933 ops_for_ref))
3934 *vnresult
3935 = ((vn_reference_t)
3936 walk_non_aliased_vuses (&r, vr1.vuse, true, vn_reference_lookup_2,
3937 vn_reference_lookup_3, vuse_valueize,
3938 limit, &data));
3939 if (ops_for_ref != shared_lookup_references)
3940 ops_for_ref.release ();
3941 gcc_checking_assert (vr1.operands == shared_lookup_references);
3942 if (*vnresult
3943 && data.same_val
3944 && (!(*vnresult)->result
3945 || !operand_equal_p ((*vnresult)->result, data.same_val)))
3947 *vnresult = NULL;
3948 return NULL_TREE;
3952 if (*vnresult)
3953 return (*vnresult)->result;
3955 return NULL_TREE;
3958 /* Lookup OP in the current hash table, and return the resulting value
3959 number if it exists in the hash table. Return NULL_TREE if it does
3960 not exist in the hash table or if the result field of the structure
3961 was NULL.. VNRESULT will be filled in with the vn_reference_t
3962 stored in the hashtable if one exists. When TBAA_P is false assume
3963 we are looking up a store and treat it as having alias-set zero.
3964 *LAST_VUSE_PTR will be updated with the VUSE the value lookup succeeded.
3965 MASK is either NULL_TREE, or can be an INTEGER_CST if the result of the
3966 load is bitwise anded with MASK and so we are only interested in a subset
3967 of the bits and can ignore if the other bits are uninitialized or
3968 not initialized with constants. When doing redundant store removal
3969 the caller has to set REDUNDANT_STORE_REMOVAL_P. */
3971 tree
3972 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
3973 vn_reference_t *vnresult, bool tbaa_p,
3974 tree *last_vuse_ptr, tree mask,
3975 bool redundant_store_removal_p)
3977 vec<vn_reference_op_s> operands;
3978 struct vn_reference_s vr1;
3979 bool valueized_anything;
3981 if (vnresult)
3982 *vnresult = NULL;
3984 vr1.vuse = vuse_ssa_val (vuse);
3985 vr1.operands = operands
3986 = valueize_shared_reference_ops_from_ref (op, &valueized_anything);
3988 /* Handle &MEM[ptr + 5].b[1].c as POINTER_PLUS_EXPR. Avoid doing
3989 this before the pass folding __builtin_object_size had a chance to run. */
3990 if ((cfun->curr_properties & PROP_objsz)
3991 && operands[0].opcode == ADDR_EXPR
3992 && operands.last ().opcode == SSA_NAME)
3994 poly_int64 off = 0;
3995 vn_reference_op_t vro;
3996 unsigned i;
3997 for (i = 1; operands.iterate (i, &vro); ++i)
3999 if (vro->opcode == SSA_NAME)
4000 break;
4001 else if (known_eq (vro->off, -1))
4002 break;
4003 off += vro->off;
4005 if (i == operands.length () - 1
4006 /* Make sure we the offset we accumulated in a 64bit int
4007 fits the address computation carried out in target
4008 offset precision. */
4009 && (off.coeffs[0]
4010 == sext_hwi (off.coeffs[0], TYPE_PRECISION (sizetype))))
4012 gcc_assert (operands[i-1].opcode == MEM_REF);
4013 tree ops[2];
4014 ops[0] = operands[i].op0;
4015 ops[1] = wide_int_to_tree (sizetype, off);
4016 tree res = vn_nary_op_lookup_pieces (2, POINTER_PLUS_EXPR,
4017 TREE_TYPE (op), ops, NULL);
4018 if (res)
4019 return res;
4020 return NULL_TREE;
4024 vr1.type = TREE_TYPE (op);
4025 ao_ref op_ref;
4026 ao_ref_init (&op_ref, op);
4027 vr1.set = ao_ref_alias_set (&op_ref);
4028 vr1.base_set = ao_ref_base_alias_set (&op_ref);
4029 vr1.offset = 0;
4030 vr1.max_size = -1;
4031 vr1.hashcode = vn_reference_compute_hash (&vr1);
4032 if (mask == NULL_TREE)
4033 if (tree cst = fully_constant_vn_reference_p (&vr1))
4034 return cst;
4036 if (kind != VN_NOWALK && vr1.vuse)
4038 vn_reference_t wvnresult;
4039 ao_ref r;
4040 unsigned limit = param_sccvn_max_alias_queries_per_access;
4041 auto_vec<vn_reference_op_s> ops_for_ref;
4042 if (valueized_anything)
4044 copy_reference_ops_from_ref (op, &ops_for_ref);
4045 bool tem;
4046 valueize_refs_1 (&ops_for_ref, &tem, true);
4048 /* Make sure to use a valueized reference if we valueized anything.
4049 Otherwise preserve the full reference for advanced TBAA. */
4050 if (!valueized_anything
4051 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.base_set,
4052 vr1.type, ops_for_ref))
4054 ao_ref_init (&r, op);
4055 /* Record the extra info we're getting from the full ref. */
4056 ao_ref_base (&r);
4057 vr1.offset = r.offset;
4058 vr1.max_size = r.max_size;
4060 vn_walk_cb_data data (&vr1, r.ref ? NULL_TREE : op,
4061 last_vuse_ptr, kind, tbaa_p, mask,
4062 redundant_store_removal_p);
4064 wvnresult
4065 = ((vn_reference_t)
4066 walk_non_aliased_vuses (&r, vr1.vuse, tbaa_p, vn_reference_lookup_2,
4067 vn_reference_lookup_3, vuse_valueize, limit,
4068 &data));
4069 gcc_checking_assert (vr1.operands == shared_lookup_references);
4070 if (wvnresult)
4072 gcc_assert (mask == NULL_TREE);
4073 if (data.same_val
4074 && (!wvnresult->result
4075 || !operand_equal_p (wvnresult->result, data.same_val)))
4076 return NULL_TREE;
4077 if (vnresult)
4078 *vnresult = wvnresult;
4079 return wvnresult->result;
4081 else if (mask)
4082 return data.masked_result;
4084 return NULL_TREE;
4087 if (last_vuse_ptr)
4088 *last_vuse_ptr = vr1.vuse;
4089 if (mask)
4090 return NULL_TREE;
4091 return vn_reference_lookup_1 (&vr1, vnresult);
4094 /* Lookup CALL in the current hash table and return the entry in
4095 *VNRESULT if found. Populates *VR for the hashtable lookup. */
4097 void
4098 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
4099 vn_reference_t vr)
4101 if (vnresult)
4102 *vnresult = NULL;
4104 tree vuse = gimple_vuse (call);
4106 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
4107 vr->operands = valueize_shared_reference_ops_from_call (call);
4108 tree lhs = gimple_call_lhs (call);
4109 /* For non-SSA return values the referece ops contain the LHS. */
4110 vr->type = ((lhs && TREE_CODE (lhs) == SSA_NAME)
4111 ? TREE_TYPE (lhs) : NULL_TREE);
4112 vr->punned = false;
4113 vr->set = 0;
4114 vr->base_set = 0;
4115 vr->offset = 0;
4116 vr->max_size = -1;
4117 vr->hashcode = vn_reference_compute_hash (vr);
4118 vn_reference_lookup_1 (vr, vnresult);
4121 /* Insert OP into the current hash table with a value number of RESULT. */
4123 static void
4124 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
4126 vn_reference_s **slot;
4127 vn_reference_t vr1;
4128 bool tem;
4130 vec<vn_reference_op_s> operands
4131 = valueize_shared_reference_ops_from_ref (op, &tem);
4132 /* Handle &MEM[ptr + 5].b[1].c as POINTER_PLUS_EXPR. Avoid doing this
4133 before the pass folding __builtin_object_size had a chance to run. */
4134 if ((cfun->curr_properties & PROP_objsz)
4135 && operands[0].opcode == ADDR_EXPR
4136 && operands.last ().opcode == SSA_NAME)
4138 poly_int64 off = 0;
4139 vn_reference_op_t vro;
4140 unsigned i;
4141 for (i = 1; operands.iterate (i, &vro); ++i)
4143 if (vro->opcode == SSA_NAME)
4144 break;
4145 else if (known_eq (vro->off, -1))
4146 break;
4147 off += vro->off;
4149 if (i == operands.length () - 1
4150 /* Make sure we the offset we accumulated in a 64bit int
4151 fits the address computation carried out in target
4152 offset precision. */
4153 && (off.coeffs[0]
4154 == sext_hwi (off.coeffs[0], TYPE_PRECISION (sizetype))))
4156 gcc_assert (operands[i-1].opcode == MEM_REF);
4157 tree ops[2];
4158 ops[0] = operands[i].op0;
4159 ops[1] = wide_int_to_tree (sizetype, off);
4160 vn_nary_op_insert_pieces (2, POINTER_PLUS_EXPR,
4161 TREE_TYPE (op), ops, result,
4162 VN_INFO (result)->value_id);
4163 return;
4167 vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
4168 if (TREE_CODE (result) == SSA_NAME)
4169 vr1->value_id = VN_INFO (result)->value_id;
4170 else
4171 vr1->value_id = get_or_alloc_constant_value_id (result);
4172 vr1->vuse = vuse_ssa_val (vuse);
4173 vr1->operands = operands.copy ();
4174 vr1->type = TREE_TYPE (op);
4175 vr1->punned = false;
4176 ao_ref op_ref;
4177 ao_ref_init (&op_ref, op);
4178 vr1->set = ao_ref_alias_set (&op_ref);
4179 vr1->base_set = ao_ref_base_alias_set (&op_ref);
4180 /* Specifically use an unknown extent here, we're not doing any lookup
4181 and assume the caller didn't either (or it went VARYING). */
4182 vr1->offset = 0;
4183 vr1->max_size = -1;
4184 vr1->hashcode = vn_reference_compute_hash (vr1);
4185 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
4186 vr1->result_vdef = vdef;
4188 slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
4189 INSERT);
4191 /* Because IL walking on reference lookup can end up visiting
4192 a def that is only to be visited later in iteration order
4193 when we are about to make an irreducible region reducible
4194 the def can be effectively processed and its ref being inserted
4195 by vn_reference_lookup_3 already. So we cannot assert (!*slot)
4196 but save a lookup if we deal with already inserted refs here. */
4197 if (*slot)
4199 /* We cannot assert that we have the same value either because
4200 when disentangling an irreducible region we may end up visiting
4201 a use before the corresponding def. That's a missed optimization
4202 only though. See gcc.dg/tree-ssa/pr87126.c for example. */
4203 if (dump_file && (dump_flags & TDF_DETAILS)
4204 && !operand_equal_p ((*slot)->result, vr1->result, 0))
4206 fprintf (dump_file, "Keeping old value ");
4207 print_generic_expr (dump_file, (*slot)->result);
4208 fprintf (dump_file, " because of collision\n");
4210 free_reference (vr1);
4211 obstack_free (&vn_tables_obstack, vr1);
4212 return;
4215 *slot = vr1;
4216 vr1->next = last_inserted_ref;
4217 last_inserted_ref = vr1;
4220 /* Insert a reference by it's pieces into the current hash table with
4221 a value number of RESULT. Return the resulting reference
4222 structure we created. */
4224 vn_reference_t
4225 vn_reference_insert_pieces (tree vuse, alias_set_type set,
4226 alias_set_type base_set,
4227 poly_int64 offset, poly_int64 max_size, tree type,
4228 vec<vn_reference_op_s> operands,
4229 tree result, unsigned int value_id)
4232 vn_reference_s **slot;
4233 vn_reference_t vr1;
4235 vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
4236 vr1->value_id = value_id;
4237 vr1->vuse = vuse_ssa_val (vuse);
4238 vr1->operands = operands;
4239 valueize_refs (&vr1->operands);
4240 vr1->type = type;
4241 vr1->punned = false;
4242 vr1->set = set;
4243 vr1->base_set = base_set;
4244 vr1->offset = offset;
4245 vr1->max_size = max_size;
4246 vr1->hashcode = vn_reference_compute_hash (vr1);
4247 if (result && TREE_CODE (result) == SSA_NAME)
4248 result = SSA_VAL (result);
4249 vr1->result = result;
4250 vr1->result_vdef = NULL_TREE;
4252 slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
4253 INSERT);
4255 /* At this point we should have all the things inserted that we have
4256 seen before, and we should never try inserting something that
4257 already exists. */
4258 gcc_assert (!*slot);
4260 *slot = vr1;
4261 vr1->next = last_inserted_ref;
4262 last_inserted_ref = vr1;
4263 return vr1;
4266 /* Compute and return the hash value for nary operation VBO1. */
4268 hashval_t
4269 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
4271 inchash::hash hstate;
4272 unsigned i;
4274 if (((vno1->length == 2
4275 && commutative_tree_code (vno1->opcode))
4276 || (vno1->length == 3
4277 && commutative_ternary_tree_code (vno1->opcode)))
4278 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
4279 std::swap (vno1->op[0], vno1->op[1]);
4280 else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
4281 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
4283 std::swap (vno1->op[0], vno1->op[1]);
4284 vno1->opcode = swap_tree_comparison (vno1->opcode);
4287 hstate.add_int (vno1->opcode);
4288 for (i = 0; i < vno1->length; ++i)
4289 inchash::add_expr (vno1->op[i], hstate);
4291 return hstate.end ();
4294 /* Compare nary operations VNO1 and VNO2 and return true if they are
4295 equivalent. */
4297 bool
4298 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
4300 unsigned i;
4302 if (vno1->hashcode != vno2->hashcode)
4303 return false;
4305 if (vno1->length != vno2->length)
4306 return false;
4308 if (vno1->opcode != vno2->opcode
4309 || !types_compatible_p (vno1->type, vno2->type))
4310 return false;
4312 for (i = 0; i < vno1->length; ++i)
4313 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
4314 return false;
4316 /* BIT_INSERT_EXPR has an implict operand as the type precision
4317 of op1. Need to check to make sure they are the same. */
4318 if (vno1->opcode == BIT_INSERT_EXPR
4319 && TREE_CODE (vno1->op[1]) == INTEGER_CST
4320 && TYPE_PRECISION (TREE_TYPE (vno1->op[1]))
4321 != TYPE_PRECISION (TREE_TYPE (vno2->op[1])))
4322 return false;
4324 return true;
4327 /* Initialize VNO from the pieces provided. */
4329 static void
4330 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
4331 enum tree_code code, tree type, tree *ops)
4333 vno->opcode = code;
4334 vno->length = length;
4335 vno->type = type;
4336 memcpy (&vno->op[0], ops, sizeof (tree) * length);
4339 /* Return the number of operands for a vn_nary ops structure from STMT. */
4341 unsigned int
4342 vn_nary_length_from_stmt (gimple *stmt)
4344 switch (gimple_assign_rhs_code (stmt))
4346 case REALPART_EXPR:
4347 case IMAGPART_EXPR:
4348 case VIEW_CONVERT_EXPR:
4349 return 1;
4351 case BIT_FIELD_REF:
4352 return 3;
4354 case CONSTRUCTOR:
4355 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
4357 default:
4358 return gimple_num_ops (stmt) - 1;
4362 /* Initialize VNO from STMT. */
4364 void
4365 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gassign *stmt)
4367 unsigned i;
4369 vno->opcode = gimple_assign_rhs_code (stmt);
4370 vno->type = TREE_TYPE (gimple_assign_lhs (stmt));
4371 switch (vno->opcode)
4373 case REALPART_EXPR:
4374 case IMAGPART_EXPR:
4375 case VIEW_CONVERT_EXPR:
4376 vno->length = 1;
4377 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
4378 break;
4380 case BIT_FIELD_REF:
4381 vno->length = 3;
4382 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
4383 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
4384 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
4385 break;
4387 case CONSTRUCTOR:
4388 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
4389 for (i = 0; i < vno->length; ++i)
4390 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
4391 break;
4393 default:
4394 gcc_checking_assert (!gimple_assign_single_p (stmt));
4395 vno->length = gimple_num_ops (stmt) - 1;
4396 for (i = 0; i < vno->length; ++i)
4397 vno->op[i] = gimple_op (stmt, i + 1);
4401 /* Compute the hashcode for VNO and look for it in the hash table;
4402 return the resulting value number if it exists in the hash table.
4403 Return NULL_TREE if it does not exist in the hash table or if the
4404 result field of the operation is NULL. VNRESULT will contain the
4405 vn_nary_op_t from the hashtable if it exists. */
4407 static tree
4408 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
4410 vn_nary_op_s **slot;
4412 if (vnresult)
4413 *vnresult = NULL;
4415 for (unsigned i = 0; i < vno->length; ++i)
4416 if (TREE_CODE (vno->op[i]) == SSA_NAME)
4417 vno->op[i] = SSA_VAL (vno->op[i]);
4419 vno->hashcode = vn_nary_op_compute_hash (vno);
4420 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode, NO_INSERT);
4421 if (!slot)
4422 return NULL_TREE;
4423 if (vnresult)
4424 *vnresult = *slot;
4425 return (*slot)->predicated_values ? NULL_TREE : (*slot)->u.result;
4428 /* Lookup a n-ary operation by its pieces and return the resulting value
4429 number if it exists in the hash table. Return NULL_TREE if it does
4430 not exist in the hash table or if the result field of the operation
4431 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
4432 if it exists. */
4434 tree
4435 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
4436 tree type, tree *ops, vn_nary_op_t *vnresult)
4438 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
4439 sizeof_vn_nary_op (length));
4440 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
4441 return vn_nary_op_lookup_1 (vno1, vnresult);
4444 /* Lookup the rhs of STMT in the current hash table, and return the resulting
4445 value number if it exists in the hash table. Return NULL_TREE if
4446 it does not exist in the hash table. VNRESULT will contain the
4447 vn_nary_op_t from the hashtable if it exists. */
4449 tree
4450 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
4452 vn_nary_op_t vno1
4453 = XALLOCAVAR (struct vn_nary_op_s,
4454 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
4455 init_vn_nary_op_from_stmt (vno1, as_a <gassign *> (stmt));
4456 return vn_nary_op_lookup_1 (vno1, vnresult);
4459 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
4461 vn_nary_op_t
4462 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
4464 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
4467 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
4468 obstack. */
4470 static vn_nary_op_t
4471 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
4473 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length, &vn_tables_obstack);
4475 vno1->value_id = value_id;
4476 vno1->length = length;
4477 vno1->predicated_values = 0;
4478 vno1->u.result = result;
4480 return vno1;
4483 /* Insert VNO into TABLE. */
4485 static vn_nary_op_t
4486 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table)
4488 vn_nary_op_s **slot;
4490 gcc_assert (! vno->predicated_values
4491 || (! vno->u.values->next
4492 && vno->u.values->n == 1));
4494 for (unsigned i = 0; i < vno->length; ++i)
4495 if (TREE_CODE (vno->op[i]) == SSA_NAME)
4496 vno->op[i] = SSA_VAL (vno->op[i]);
4498 vno->hashcode = vn_nary_op_compute_hash (vno);
4499 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
4500 vno->unwind_to = *slot;
4501 if (*slot)
4503 /* Prefer non-predicated values.
4504 ??? Only if those are constant, otherwise, with constant predicated
4505 value, turn them into predicated values with entry-block validity
4506 (??? but we always find the first valid result currently). */
4507 if ((*slot)->predicated_values
4508 && ! vno->predicated_values)
4510 /* ??? We cannot remove *slot from the unwind stack list.
4511 For the moment we deal with this by skipping not found
4512 entries but this isn't ideal ... */
4513 *slot = vno;
4514 /* ??? Maintain a stack of states we can unwind in
4515 vn_nary_op_s? But how far do we unwind? In reality
4516 we need to push change records somewhere... Or not
4517 unwind vn_nary_op_s and linking them but instead
4518 unwind the results "list", linking that, which also
4519 doesn't move on hashtable resize. */
4520 /* We can also have a ->unwind_to recording *slot there.
4521 That way we can make u.values a fixed size array with
4522 recording the number of entries but of course we then
4523 have always N copies for each unwind_to-state. Or we
4524 make sure to only ever append and each unwinding will
4525 pop off one entry (but how to deal with predicated
4526 replaced with non-predicated here?) */
4527 vno->next = last_inserted_nary;
4528 last_inserted_nary = vno;
4529 return vno;
4531 else if (vno->predicated_values
4532 && ! (*slot)->predicated_values)
4533 return *slot;
4534 else if (vno->predicated_values
4535 && (*slot)->predicated_values)
4537 /* ??? Factor this all into a insert_single_predicated_value
4538 routine. */
4539 gcc_assert (!vno->u.values->next && vno->u.values->n == 1);
4540 basic_block vno_bb
4541 = BASIC_BLOCK_FOR_FN (cfun, vno->u.values->valid_dominated_by_p[0]);
4542 vn_pval *nval = vno->u.values;
4543 vn_pval **next = &vno->u.values;
4544 bool found = false;
4545 for (vn_pval *val = (*slot)->u.values; val; val = val->next)
4547 if (expressions_equal_p (val->result, nval->result))
4549 found = true;
4550 for (unsigned i = 0; i < val->n; ++i)
4552 basic_block val_bb
4553 = BASIC_BLOCK_FOR_FN (cfun,
4554 val->valid_dominated_by_p[i]);
4555 if (dominated_by_p (CDI_DOMINATORS, vno_bb, val_bb))
4556 /* Value registered with more generic predicate. */
4557 return *slot;
4558 else if (flag_checking)
4559 /* Shouldn't happen, we insert in RPO order. */
4560 gcc_assert (!dominated_by_p (CDI_DOMINATORS,
4561 val_bb, vno_bb));
4563 /* Append value. */
4564 *next = (vn_pval *) obstack_alloc (&vn_tables_obstack,
4565 sizeof (vn_pval)
4566 + val->n * sizeof (int));
4567 (*next)->next = NULL;
4568 (*next)->result = val->result;
4569 (*next)->n = val->n + 1;
4570 memcpy ((*next)->valid_dominated_by_p,
4571 val->valid_dominated_by_p,
4572 val->n * sizeof (int));
4573 (*next)->valid_dominated_by_p[val->n] = vno_bb->index;
4574 next = &(*next)->next;
4575 if (dump_file && (dump_flags & TDF_DETAILS))
4576 fprintf (dump_file, "Appending predicate to value.\n");
4577 continue;
4579 /* Copy other predicated values. */
4580 *next = (vn_pval *) obstack_alloc (&vn_tables_obstack,
4581 sizeof (vn_pval)
4582 + (val->n-1) * sizeof (int));
4583 memcpy (*next, val, sizeof (vn_pval) + (val->n-1) * sizeof (int));
4584 (*next)->next = NULL;
4585 next = &(*next)->next;
4587 if (!found)
4588 *next = nval;
4590 *slot = vno;
4591 vno->next = last_inserted_nary;
4592 last_inserted_nary = vno;
4593 return vno;
4596 /* While we do not want to insert things twice it's awkward to
4597 avoid it in the case where visit_nary_op pattern-matches stuff
4598 and ends up simplifying the replacement to itself. We then
4599 get two inserts, one from visit_nary_op and one from
4600 vn_nary_build_or_lookup.
4601 So allow inserts with the same value number. */
4602 if ((*slot)->u.result == vno->u.result)
4603 return *slot;
4606 /* ??? There's also optimistic vs. previous commited state merging
4607 that is problematic for the case of unwinding. */
4609 /* ??? We should return NULL if we do not use 'vno' and have the
4610 caller release it. */
4611 gcc_assert (!*slot);
4613 *slot = vno;
4614 vno->next = last_inserted_nary;
4615 last_inserted_nary = vno;
4616 return vno;
4619 /* Insert a n-ary operation into the current hash table using it's
4620 pieces. Return the vn_nary_op_t structure we created and put in
4621 the hashtable. */
4623 vn_nary_op_t
4624 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
4625 tree type, tree *ops,
4626 tree result, unsigned int value_id)
4628 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
4629 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
4630 return vn_nary_op_insert_into (vno1, valid_info->nary);
4633 /* Return whether we can track a predicate valid when PRED_E is executed. */
4635 static bool
4636 can_track_predicate_on_edge (edge pred_e)
4638 /* ??? As we are currently recording the destination basic-block index in
4639 vn_pval.valid_dominated_by_p and using dominance for the
4640 validity check we cannot track predicates on all edges. */
4641 if (single_pred_p (pred_e->dest))
4642 return true;
4643 /* Never record for backedges. */
4644 if (pred_e->flags & EDGE_DFS_BACK)
4645 return false;
4646 /* When there's more than one predecessor we cannot track
4647 predicate validity based on the destination block. The
4648 exception is when all other incoming edges sources are
4649 dominated by the destination block. */
4650 edge_iterator ei;
4651 edge e;
4652 FOR_EACH_EDGE (e, ei, pred_e->dest->preds)
4653 if (e != pred_e && ! dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
4654 return false;
4655 return true;
4658 static vn_nary_op_t
4659 vn_nary_op_insert_pieces_predicated (unsigned int length, enum tree_code code,
4660 tree type, tree *ops,
4661 tree result, unsigned int value_id,
4662 edge pred_e)
4664 gcc_assert (can_track_predicate_on_edge (pred_e));
4666 if (dump_file && (dump_flags & TDF_DETAILS)
4667 /* ??? Fix dumping, but currently we only get comparisons. */
4668 && TREE_CODE_CLASS (code) == tcc_comparison)
4670 fprintf (dump_file, "Recording on edge %d->%d ", pred_e->src->index,
4671 pred_e->dest->index);
4672 print_generic_expr (dump_file, ops[0], TDF_SLIM);
4673 fprintf (dump_file, " %s ", get_tree_code_name (code));
4674 print_generic_expr (dump_file, ops[1], TDF_SLIM);
4675 fprintf (dump_file, " == %s\n",
4676 integer_zerop (result) ? "false" : "true");
4678 vn_nary_op_t vno1 = alloc_vn_nary_op (length, NULL_TREE, value_id);
4679 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
4680 vno1->predicated_values = 1;
4681 vno1->u.values = (vn_pval *) obstack_alloc (&vn_tables_obstack,
4682 sizeof (vn_pval));
4683 vno1->u.values->next = NULL;
4684 vno1->u.values->result = result;
4685 vno1->u.values->n = 1;
4686 vno1->u.values->valid_dominated_by_p[0] = pred_e->dest->index;
4687 return vn_nary_op_insert_into (vno1, valid_info->nary);
4690 static bool
4691 dominated_by_p_w_unex (basic_block bb1, basic_block bb2, bool);
4693 static tree
4694 vn_nary_op_get_predicated_value (vn_nary_op_t vno, basic_block bb,
4695 edge e = NULL)
4697 if (! vno->predicated_values)
4698 return vno->u.result;
4699 for (vn_pval *val = vno->u.values; val; val = val->next)
4700 for (unsigned i = 0; i < val->n; ++i)
4702 basic_block cand
4703 = BASIC_BLOCK_FOR_FN (cfun, val->valid_dominated_by_p[i]);
4704 /* Do not handle backedge executability optimistically since
4705 when figuring out whether to iterate we do not consider
4706 changed predication.
4707 When asking for predicated values on an edge avoid looking
4708 at edge executability for edges forward in our iteration
4709 as well. */
4710 if (e && (e->flags & EDGE_DFS_BACK))
4712 if (dominated_by_p (CDI_DOMINATORS, bb, cand))
4713 return val->result;
4715 else if (dominated_by_p_w_unex (bb, cand, false))
4716 return val->result;
4718 return NULL_TREE;
4721 static tree
4722 vn_nary_op_get_predicated_value (vn_nary_op_t vno, edge e)
4724 return vn_nary_op_get_predicated_value (vno, e->src, e);
4727 /* Insert the rhs of STMT into the current hash table with a value number of
4728 RESULT. */
4730 static vn_nary_op_t
4731 vn_nary_op_insert_stmt (gimple *stmt, tree result)
4733 vn_nary_op_t vno1
4734 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
4735 result, VN_INFO (result)->value_id);
4736 init_vn_nary_op_from_stmt (vno1, as_a <gassign *> (stmt));
4737 return vn_nary_op_insert_into (vno1, valid_info->nary);
4740 /* Compute a hashcode for PHI operation VP1 and return it. */
4742 static inline hashval_t
4743 vn_phi_compute_hash (vn_phi_t vp1)
4745 inchash::hash hstate;
4746 tree phi1op;
4747 tree type;
4748 edge e;
4749 edge_iterator ei;
4751 hstate.add_int (EDGE_COUNT (vp1->block->preds));
4752 switch (EDGE_COUNT (vp1->block->preds))
4754 case 1:
4755 break;
4756 case 2:
4757 /* When this is a PHI node subject to CSE for different blocks
4758 avoid hashing the block index. */
4759 if (vp1->cclhs)
4760 break;
4761 /* Fallthru. */
4762 default:
4763 hstate.add_int (vp1->block->index);
4766 /* If all PHI arguments are constants we need to distinguish
4767 the PHI node via its type. */
4768 type = vp1->type;
4769 hstate.merge_hash (vn_hash_type (type));
4771 FOR_EACH_EDGE (e, ei, vp1->block->preds)
4773 /* Don't hash backedge values they need to be handled as VN_TOP
4774 for optimistic value-numbering. */
4775 if (e->flags & EDGE_DFS_BACK)
4776 continue;
4778 phi1op = vp1->phiargs[e->dest_idx];
4779 if (phi1op == VN_TOP)
4780 continue;
4781 inchash::add_expr (phi1op, hstate);
4784 return hstate.end ();
4788 /* Return true if COND1 and COND2 represent the same condition, set
4789 *INVERTED_P if one needs to be inverted to make it the same as
4790 the other. */
4792 static bool
4793 cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1,
4794 gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p)
4796 enum tree_code code1 = gimple_cond_code (cond1);
4797 enum tree_code code2 = gimple_cond_code (cond2);
4799 *inverted_p = false;
4800 if (code1 == code2)
4802 else if (code1 == swap_tree_comparison (code2))
4803 std::swap (lhs2, rhs2);
4804 else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
4805 *inverted_p = true;
4806 else if (code1 == invert_tree_comparison
4807 (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
4809 std::swap (lhs2, rhs2);
4810 *inverted_p = true;
4812 else
4813 return false;
4815 return ((expressions_equal_p (lhs1, lhs2)
4816 && expressions_equal_p (rhs1, rhs2))
4817 || (commutative_tree_code (code1)
4818 && expressions_equal_p (lhs1, rhs2)
4819 && expressions_equal_p (rhs1, lhs2)));
4822 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
4824 static int
4825 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
4827 if (vp1->hashcode != vp2->hashcode)
4828 return false;
4830 if (vp1->block != vp2->block)
4832 if (EDGE_COUNT (vp1->block->preds) != EDGE_COUNT (vp2->block->preds))
4833 return false;
4835 switch (EDGE_COUNT (vp1->block->preds))
4837 case 1:
4838 /* Single-arg PHIs are just copies. */
4839 break;
4841 case 2:
4843 /* Make sure both PHIs are classified as CSEable. */
4844 if (! vp1->cclhs || ! vp2->cclhs)
4845 return false;
4847 /* Rule out backedges into the PHI. */
4848 gcc_checking_assert
4849 (vp1->block->loop_father->header != vp1->block
4850 && vp2->block->loop_father->header != vp2->block);
4852 /* If the PHI nodes do not have compatible types
4853 they are not the same. */
4854 if (!types_compatible_p (vp1->type, vp2->type))
4855 return false;
4857 /* If the immediate dominator end in switch stmts multiple
4858 values may end up in the same PHI arg via intermediate
4859 CFG merges. */
4860 basic_block idom1
4861 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
4862 basic_block idom2
4863 = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
4864 gcc_checking_assert (EDGE_COUNT (idom1->succs) == 2
4865 && EDGE_COUNT (idom2->succs) == 2);
4867 /* Verify the controlling stmt is the same. */
4868 gcond *last1 = as_a <gcond *> (*gsi_last_bb (idom1));
4869 gcond *last2 = as_a <gcond *> (*gsi_last_bb (idom2));
4870 bool inverted_p;
4871 if (! cond_stmts_equal_p (last1, vp1->cclhs, vp1->ccrhs,
4872 last2, vp2->cclhs, vp2->ccrhs,
4873 &inverted_p))
4874 return false;
4876 /* Get at true/false controlled edges into the PHI. */
4877 edge te1, te2, fe1, fe2;
4878 if (! extract_true_false_controlled_edges (idom1, vp1->block,
4879 &te1, &fe1)
4880 || ! extract_true_false_controlled_edges (idom2, vp2->block,
4881 &te2, &fe2))
4882 return false;
4884 /* Swap edges if the second condition is the inverted of the
4885 first. */
4886 if (inverted_p)
4887 std::swap (te2, fe2);
4889 /* Since we do not know which edge will be executed we have
4890 to be careful when matching VN_TOP. Be conservative and
4891 only match VN_TOP == VN_TOP for now, we could allow
4892 VN_TOP on the not prevailing PHI though. See for example
4893 PR102920. */
4894 if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
4895 vp2->phiargs[te2->dest_idx], false)
4896 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
4897 vp2->phiargs[fe2->dest_idx], false))
4898 return false;
4900 return true;
4903 default:
4904 return false;
4908 /* If the PHI nodes do not have compatible types
4909 they are not the same. */
4910 if (!types_compatible_p (vp1->type, vp2->type))
4911 return false;
4913 /* Any phi in the same block will have it's arguments in the
4914 same edge order, because of how we store phi nodes. */
4915 unsigned nargs = EDGE_COUNT (vp1->block->preds);
4916 for (unsigned i = 0; i < nargs; ++i)
4918 tree phi1op = vp1->phiargs[i];
4919 tree phi2op = vp2->phiargs[i];
4920 if (phi1op == phi2op)
4921 continue;
4922 if (!expressions_equal_p (phi1op, phi2op, false))
4923 return false;
4926 return true;
4929 /* Lookup PHI in the current hash table, and return the resulting
4930 value number if it exists in the hash table. Return NULL_TREE if
4931 it does not exist in the hash table. */
4933 static tree
4934 vn_phi_lookup (gimple *phi, bool backedges_varying_p)
4936 vn_phi_s **slot;
4937 struct vn_phi_s *vp1;
4938 edge e;
4939 edge_iterator ei;
4941 vp1 = XALLOCAVAR (struct vn_phi_s,
4942 sizeof (struct vn_phi_s)
4943 + (gimple_phi_num_args (phi) - 1) * sizeof (tree));
4945 /* Canonicalize the SSA_NAME's to their value number. */
4946 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
4948 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4949 if (TREE_CODE (def) == SSA_NAME
4950 && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)))
4952 if (!virtual_operand_p (def)
4953 && ssa_undefined_value_p (def, false))
4954 def = VN_TOP;
4955 else
4956 def = SSA_VAL (def);
4958 vp1->phiargs[e->dest_idx] = def;
4960 vp1->type = TREE_TYPE (gimple_phi_result (phi));
4961 vp1->block = gimple_bb (phi);
4962 /* Extract values of the controlling condition. */
4963 vp1->cclhs = NULL_TREE;
4964 vp1->ccrhs = NULL_TREE;
4965 if (EDGE_COUNT (vp1->block->preds) == 2
4966 && vp1->block->loop_father->header != vp1->block)
4968 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
4969 if (EDGE_COUNT (idom1->succs) == 2)
4970 if (gcond *last1 = safe_dyn_cast <gcond *> (*gsi_last_bb (idom1)))
4972 /* ??? We want to use SSA_VAL here. But possibly not
4973 allow VN_TOP. */
4974 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
4975 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
4978 vp1->hashcode = vn_phi_compute_hash (vp1);
4979 slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, NO_INSERT);
4980 if (!slot)
4981 return NULL_TREE;
4982 return (*slot)->result;
4985 /* Insert PHI into the current hash table with a value number of
4986 RESULT. */
4988 static vn_phi_t
4989 vn_phi_insert (gimple *phi, tree result, bool backedges_varying_p)
4991 vn_phi_s **slot;
4992 vn_phi_t vp1 = (vn_phi_t) obstack_alloc (&vn_tables_obstack,
4993 sizeof (vn_phi_s)
4994 + ((gimple_phi_num_args (phi) - 1)
4995 * sizeof (tree)));
4996 edge e;
4997 edge_iterator ei;
4999 /* Canonicalize the SSA_NAME's to their value number. */
5000 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
5002 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5003 if (TREE_CODE (def) == SSA_NAME
5004 && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)))
5006 if (!virtual_operand_p (def)
5007 && ssa_undefined_value_p (def, false))
5008 def = VN_TOP;
5009 else
5010 def = SSA_VAL (def);
5012 vp1->phiargs[e->dest_idx] = def;
5014 vp1->value_id = VN_INFO (result)->value_id;
5015 vp1->type = TREE_TYPE (gimple_phi_result (phi));
5016 vp1->block = gimple_bb (phi);
5017 /* Extract values of the controlling condition. */
5018 vp1->cclhs = NULL_TREE;
5019 vp1->ccrhs = NULL_TREE;
5020 if (EDGE_COUNT (vp1->block->preds) == 2
5021 && vp1->block->loop_father->header != vp1->block)
5023 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
5024 if (EDGE_COUNT (idom1->succs) == 2)
5025 if (gcond *last1 = safe_dyn_cast <gcond *> (*gsi_last_bb (idom1)))
5027 /* ??? We want to use SSA_VAL here. But possibly not
5028 allow VN_TOP. */
5029 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
5030 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
5033 vp1->result = result;
5034 vp1->hashcode = vn_phi_compute_hash (vp1);
5036 slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
5037 gcc_assert (!*slot);
5039 *slot = vp1;
5040 vp1->next = last_inserted_phi;
5041 last_inserted_phi = vp1;
5042 return vp1;
5046 /* Return true if BB1 is dominated by BB2 taking into account edges
5047 that are not executable. When ALLOW_BACK is false consider not
5048 executable backedges as executable. */
5050 static bool
5051 dominated_by_p_w_unex (basic_block bb1, basic_block bb2, bool allow_back)
5053 edge_iterator ei;
5054 edge e;
5056 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
5057 return true;
5059 /* Before iterating we'd like to know if there exists a
5060 (executable) path from bb2 to bb1 at all, if not we can
5061 directly return false. For now simply iterate once. */
5063 /* Iterate to the single executable bb1 predecessor. */
5064 if (EDGE_COUNT (bb1->preds) > 1)
5066 edge prede = NULL;
5067 FOR_EACH_EDGE (e, ei, bb1->preds)
5068 if ((e->flags & EDGE_EXECUTABLE)
5069 || (!allow_back && (e->flags & EDGE_DFS_BACK)))
5071 if (prede)
5073 prede = NULL;
5074 break;
5076 prede = e;
5078 if (prede)
5080 bb1 = prede->src;
5082 /* Re-do the dominance check with changed bb1. */
5083 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
5084 return true;
5088 /* Iterate to the single executable bb2 successor. */
5089 if (EDGE_COUNT (bb2->succs) > 1)
5091 edge succe = NULL;
5092 FOR_EACH_EDGE (e, ei, bb2->succs)
5093 if ((e->flags & EDGE_EXECUTABLE)
5094 || (!allow_back && (e->flags & EDGE_DFS_BACK)))
5096 if (succe)
5098 succe = NULL;
5099 break;
5101 succe = e;
5103 if (succe)
5105 /* Verify the reached block is only reached through succe.
5106 If there is only one edge we can spare us the dominator
5107 check and iterate directly. */
5108 if (EDGE_COUNT (succe->dest->preds) > 1)
5110 FOR_EACH_EDGE (e, ei, succe->dest->preds)
5111 if (e != succe
5112 && ((e->flags & EDGE_EXECUTABLE)
5113 || (!allow_back && (e->flags & EDGE_DFS_BACK))))
5115 succe = NULL;
5116 break;
5119 if (succe)
5121 bb2 = succe->dest;
5123 /* Re-do the dominance check with changed bb2. */
5124 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
5125 return true;
5130 /* We could now iterate updating bb1 / bb2. */
5131 return false;
5134 /* Set the value number of FROM to TO, return true if it has changed
5135 as a result. */
5137 static inline bool
5138 set_ssa_val_to (tree from, tree to)
5140 vn_ssa_aux_t from_info = VN_INFO (from);
5141 tree currval = from_info->valnum; // SSA_VAL (from)
5142 poly_int64 toff, coff;
5143 bool curr_undefined = false;
5144 bool curr_invariant = false;
5146 /* The only thing we allow as value numbers are ssa_names
5147 and invariants. So assert that here. We don't allow VN_TOP
5148 as visiting a stmt should produce a value-number other than
5149 that.
5150 ??? Still VN_TOP can happen for unreachable code, so force
5151 it to varying in that case. Not all code is prepared to
5152 get VN_TOP on valueization. */
5153 if (to == VN_TOP)
5155 /* ??? When iterating and visiting PHI <undef, backedge-value>
5156 for the first time we rightfully get VN_TOP and we need to
5157 preserve that to optimize for example gcc.dg/tree-ssa/ssa-sccvn-2.c.
5158 With SCCVN we were simply lucky we iterated the other PHI
5159 cycles first and thus visited the backedge-value DEF. */
5160 if (currval == VN_TOP)
5161 goto set_and_exit;
5162 if (dump_file && (dump_flags & TDF_DETAILS))
5163 fprintf (dump_file, "Forcing value number to varying on "
5164 "receiving VN_TOP\n");
5165 to = from;
5168 gcc_checking_assert (to != NULL_TREE
5169 && ((TREE_CODE (to) == SSA_NAME
5170 && (to == from || SSA_VAL (to) == to))
5171 || is_gimple_min_invariant (to)));
5173 if (from != to)
5175 if (currval == from)
5177 if (dump_file && (dump_flags & TDF_DETAILS))
5179 fprintf (dump_file, "Not changing value number of ");
5180 print_generic_expr (dump_file, from);
5181 fprintf (dump_file, " from VARYING to ");
5182 print_generic_expr (dump_file, to);
5183 fprintf (dump_file, "\n");
5185 return false;
5187 curr_invariant = is_gimple_min_invariant (currval);
5188 curr_undefined = (TREE_CODE (currval) == SSA_NAME
5189 && !virtual_operand_p (currval)
5190 && ssa_undefined_value_p (currval, false));
5191 if (currval != VN_TOP
5192 && !curr_invariant
5193 && !curr_undefined
5194 && is_gimple_min_invariant (to))
5196 if (dump_file && (dump_flags & TDF_DETAILS))
5198 fprintf (dump_file, "Forcing VARYING instead of changing "
5199 "value number of ");
5200 print_generic_expr (dump_file, from);
5201 fprintf (dump_file, " from ");
5202 print_generic_expr (dump_file, currval);
5203 fprintf (dump_file, " (non-constant) to ");
5204 print_generic_expr (dump_file, to);
5205 fprintf (dump_file, " (constant)\n");
5207 to = from;
5209 else if (currval != VN_TOP
5210 && !curr_undefined
5211 && TREE_CODE (to) == SSA_NAME
5212 && !virtual_operand_p (to)
5213 && ssa_undefined_value_p (to, false))
5215 if (dump_file && (dump_flags & TDF_DETAILS))
5217 fprintf (dump_file, "Forcing VARYING instead of changing "
5218 "value number of ");
5219 print_generic_expr (dump_file, from);
5220 fprintf (dump_file, " from ");
5221 print_generic_expr (dump_file, currval);
5222 fprintf (dump_file, " (non-undefined) to ");
5223 print_generic_expr (dump_file, to);
5224 fprintf (dump_file, " (undefined)\n");
5226 to = from;
5228 else if (TREE_CODE (to) == SSA_NAME
5229 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
5230 to = from;
5233 set_and_exit:
5234 if (dump_file && (dump_flags & TDF_DETAILS))
5236 fprintf (dump_file, "Setting value number of ");
5237 print_generic_expr (dump_file, from);
5238 fprintf (dump_file, " to ");
5239 print_generic_expr (dump_file, to);
5242 if (currval != to
5243 && !operand_equal_p (currval, to, 0)
5244 /* Different undefined SSA names are not actually different. See
5245 PR82320 for a testcase were we'd otherwise not terminate iteration. */
5246 && !(curr_undefined
5247 && TREE_CODE (to) == SSA_NAME
5248 && !virtual_operand_p (to)
5249 && ssa_undefined_value_p (to, false))
5250 /* ??? For addresses involving volatile objects or types operand_equal_p
5251 does not reliably detect ADDR_EXPRs as equal. We know we are only
5252 getting invariant gimple addresses here, so can use
5253 get_addr_base_and_unit_offset to do this comparison. */
5254 && !(TREE_CODE (currval) == ADDR_EXPR
5255 && TREE_CODE (to) == ADDR_EXPR
5256 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
5257 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
5258 && known_eq (coff, toff)))
5260 if (to != from
5261 && currval != VN_TOP
5262 && !curr_undefined
5263 /* We do not want to allow lattice transitions from one value
5264 to another since that may lead to not terminating iteration
5265 (see PR95049). Since there's no convenient way to check
5266 for the allowed transition of VAL -> PHI (loop entry value,
5267 same on two PHIs, to same PHI result) we restrict the check
5268 to invariants. */
5269 && curr_invariant
5270 && is_gimple_min_invariant (to))
5272 if (dump_file && (dump_flags & TDF_DETAILS))
5273 fprintf (dump_file, " forced VARYING");
5274 to = from;
5276 if (dump_file && (dump_flags & TDF_DETAILS))
5277 fprintf (dump_file, " (changed)\n");
5278 from_info->valnum = to;
5279 return true;
5281 if (dump_file && (dump_flags & TDF_DETAILS))
5282 fprintf (dump_file, "\n");
5283 return false;
5286 /* Set all definitions in STMT to value number to themselves.
5287 Return true if a value number changed. */
5289 static bool
5290 defs_to_varying (gimple *stmt)
5292 bool changed = false;
5293 ssa_op_iter iter;
5294 def_operand_p defp;
5296 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
5298 tree def = DEF_FROM_PTR (defp);
5299 changed |= set_ssa_val_to (def, def);
5301 return changed;
5304 /* Visit a copy between LHS and RHS, return true if the value number
5305 changed. */
5307 static bool
5308 visit_copy (tree lhs, tree rhs)
5310 /* Valueize. */
5311 rhs = SSA_VAL (rhs);
5313 return set_ssa_val_to (lhs, rhs);
5316 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
5317 is the same. */
5319 static tree
5320 valueized_wider_op (tree wide_type, tree op, bool allow_truncate)
5322 if (TREE_CODE (op) == SSA_NAME)
5323 op = vn_valueize (op);
5325 /* Either we have the op widened available. */
5326 tree ops[3] = {};
5327 ops[0] = op;
5328 tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR,
5329 wide_type, ops, NULL);
5330 if (tem)
5331 return tem;
5333 /* Or the op is truncated from some existing value. */
5334 if (allow_truncate && TREE_CODE (op) == SSA_NAME)
5336 gimple *def = SSA_NAME_DEF_STMT (op);
5337 if (is_gimple_assign (def)
5338 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
5340 tem = gimple_assign_rhs1 (def);
5341 if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)))
5343 if (TREE_CODE (tem) == SSA_NAME)
5344 tem = vn_valueize (tem);
5345 return tem;
5350 /* For constants simply extend it. */
5351 if (TREE_CODE (op) == INTEGER_CST)
5352 return wide_int_to_tree (wide_type, wi::to_widest (op));
5354 return NULL_TREE;
5357 /* Visit a nary operator RHS, value number it, and return true if the
5358 value number of LHS has changed as a result. */
5360 static bool
5361 visit_nary_op (tree lhs, gassign *stmt)
5363 vn_nary_op_t vnresult;
5364 tree result = vn_nary_op_lookup_stmt (stmt, &vnresult);
5365 if (! result && vnresult)
5366 result = vn_nary_op_get_predicated_value (vnresult, gimple_bb (stmt));
5367 if (result)
5368 return set_ssa_val_to (lhs, result);
5370 /* Do some special pattern matching for redundancies of operations
5371 in different types. */
5372 enum tree_code code = gimple_assign_rhs_code (stmt);
5373 tree type = TREE_TYPE (lhs);
5374 tree rhs1 = gimple_assign_rhs1 (stmt);
5375 switch (code)
5377 CASE_CONVERT:
5378 /* Match arithmetic done in a different type where we can easily
5379 substitute the result from some earlier sign-changed or widened
5380 operation. */
5381 if (INTEGRAL_TYPE_P (type)
5382 && TREE_CODE (rhs1) == SSA_NAME
5383 /* We only handle sign-changes, zero-extension -> & mask or
5384 sign-extension if we know the inner operation doesn't
5385 overflow. */
5386 && (((TYPE_UNSIGNED (TREE_TYPE (rhs1))
5387 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
5388 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (rhs1))))
5389 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs1)))
5390 || TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (rhs1))))
5392 gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
5393 if (def
5394 && (gimple_assign_rhs_code (def) == PLUS_EXPR
5395 || gimple_assign_rhs_code (def) == MINUS_EXPR
5396 || gimple_assign_rhs_code (def) == MULT_EXPR))
5398 tree ops[3] = {};
5399 /* When requiring a sign-extension we cannot model a
5400 previous truncation with a single op so don't bother. */
5401 bool allow_truncate = TYPE_UNSIGNED (TREE_TYPE (rhs1));
5402 /* Either we have the op widened available. */
5403 ops[0] = valueized_wider_op (type, gimple_assign_rhs1 (def),
5404 allow_truncate);
5405 if (ops[0])
5406 ops[1] = valueized_wider_op (type, gimple_assign_rhs2 (def),
5407 allow_truncate);
5408 if (ops[0] && ops[1])
5410 ops[0] = vn_nary_op_lookup_pieces
5411 (2, gimple_assign_rhs_code (def), type, ops, NULL);
5412 /* We have wider operation available. */
5413 if (ops[0]
5414 /* If the leader is a wrapping operation we can
5415 insert it for code hoisting w/o introducing
5416 undefined overflow. If it is not it has to
5417 be available. See PR86554. */
5418 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (ops[0]))
5419 || (rpo_avail && vn_context_bb
5420 && rpo_avail->eliminate_avail (vn_context_bb,
5421 ops[0]))))
5423 unsigned lhs_prec = TYPE_PRECISION (type);
5424 unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1));
5425 if (lhs_prec == rhs_prec
5426 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
5427 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (rhs1))))
5429 gimple_match_op match_op (gimple_match_cond::UNCOND,
5430 NOP_EXPR, type, ops[0]);
5431 result = vn_nary_build_or_lookup (&match_op);
5432 if (result)
5434 bool changed = set_ssa_val_to (lhs, result);
5435 vn_nary_op_insert_stmt (stmt, result);
5436 return changed;
5439 else
5441 tree mask = wide_int_to_tree
5442 (type, wi::mask (rhs_prec, false, lhs_prec));
5443 gimple_match_op match_op (gimple_match_cond::UNCOND,
5444 BIT_AND_EXPR,
5445 TREE_TYPE (lhs),
5446 ops[0], mask);
5447 result = vn_nary_build_or_lookup (&match_op);
5448 if (result)
5450 bool changed = set_ssa_val_to (lhs, result);
5451 vn_nary_op_insert_stmt (stmt, result);
5452 return changed;
5459 break;
5460 case BIT_AND_EXPR:
5461 if (INTEGRAL_TYPE_P (type)
5462 && TREE_CODE (rhs1) == SSA_NAME
5463 && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST
5464 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)
5465 && default_vn_walk_kind != VN_NOWALK
5466 && CHAR_BIT == 8
5467 && BITS_PER_UNIT == 8
5468 && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
5469 && TYPE_PRECISION (type) <= vn_walk_cb_data::bufsize * BITS_PER_UNIT
5470 && !integer_all_onesp (gimple_assign_rhs2 (stmt))
5471 && !integer_zerop (gimple_assign_rhs2 (stmt)))
5473 gassign *ass = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
5474 if (ass
5475 && !gimple_has_volatile_ops (ass)
5476 && vn_get_stmt_kind (ass) == VN_REFERENCE)
5478 tree last_vuse = gimple_vuse (ass);
5479 tree op = gimple_assign_rhs1 (ass);
5480 tree result = vn_reference_lookup (op, gimple_vuse (ass),
5481 default_vn_walk_kind,
5482 NULL, true, &last_vuse,
5483 gimple_assign_rhs2 (stmt));
5484 if (result
5485 && useless_type_conversion_p (TREE_TYPE (result),
5486 TREE_TYPE (op)))
5487 return set_ssa_val_to (lhs, result);
5490 break;
5491 case TRUNC_DIV_EXPR:
5492 if (TYPE_UNSIGNED (type))
5493 break;
5494 /* Fallthru. */
5495 case RDIV_EXPR:
5496 case MULT_EXPR:
5497 /* Match up ([-]a){/,*}([-])b with v=a{/,*}b, replacing it with -v. */
5498 if (! HONOR_SIGN_DEPENDENT_ROUNDING (type))
5500 tree rhs[2];
5501 rhs[0] = rhs1;
5502 rhs[1] = gimple_assign_rhs2 (stmt);
5503 for (unsigned i = 0; i <= 1; ++i)
5505 unsigned j = i == 0 ? 1 : 0;
5506 tree ops[2];
5507 gimple_match_op match_op (gimple_match_cond::UNCOND,
5508 NEGATE_EXPR, type, rhs[i]);
5509 ops[i] = vn_nary_build_or_lookup_1 (&match_op, false, true);
5510 ops[j] = rhs[j];
5511 if (ops[i]
5512 && (ops[0] = vn_nary_op_lookup_pieces (2, code,
5513 type, ops, NULL)))
5515 gimple_match_op match_op (gimple_match_cond::UNCOND,
5516 NEGATE_EXPR, type, ops[0]);
5517 result = vn_nary_build_or_lookup_1 (&match_op, true, false);
5518 if (result)
5520 bool changed = set_ssa_val_to (lhs, result);
5521 vn_nary_op_insert_stmt (stmt, result);
5522 return changed;
5527 break;
5528 case LSHIFT_EXPR:
5529 /* For X << C, use the value number of X * (1 << C). */
5530 if (INTEGRAL_TYPE_P (type)
5531 && TYPE_OVERFLOW_WRAPS (type)
5532 && !TYPE_SATURATING (type))
5534 tree rhs2 = gimple_assign_rhs2 (stmt);
5535 if (TREE_CODE (rhs2) == INTEGER_CST
5536 && tree_fits_uhwi_p (rhs2)
5537 && tree_to_uhwi (rhs2) < TYPE_PRECISION (type))
5539 wide_int w = wi::set_bit_in_zero (tree_to_uhwi (rhs2),
5540 TYPE_PRECISION (type));
5541 gimple_match_op match_op (gimple_match_cond::UNCOND,
5542 MULT_EXPR, type, rhs1,
5543 wide_int_to_tree (type, w));
5544 result = vn_nary_build_or_lookup (&match_op);
5545 if (result)
5547 bool changed = set_ssa_val_to (lhs, result);
5548 if (TREE_CODE (result) == SSA_NAME)
5549 vn_nary_op_insert_stmt (stmt, result);
5550 return changed;
5554 break;
5555 default:
5556 break;
5559 bool changed = set_ssa_val_to (lhs, lhs);
5560 vn_nary_op_insert_stmt (stmt, lhs);
5561 return changed;
5564 /* Visit a call STMT storing into LHS. Return true if the value number
5565 of the LHS has changed as a result. */
5567 static bool
5568 visit_reference_op_call (tree lhs, gcall *stmt)
5570 bool changed = false;
5571 struct vn_reference_s vr1;
5572 vn_reference_t vnresult = NULL;
5573 tree vdef = gimple_vdef (stmt);
5574 modref_summary *summary;
5576 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
5577 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5578 lhs = NULL_TREE;
5580 vn_reference_lookup_call (stmt, &vnresult, &vr1);
5582 /* If the lookup did not succeed for pure functions try to use
5583 modref info to find a candidate to CSE to. */
5584 const unsigned accesses_limit = 8;
5585 if (!vnresult
5586 && !vdef
5587 && lhs
5588 && gimple_vuse (stmt)
5589 && (((summary = get_modref_function_summary (stmt, NULL))
5590 && !summary->global_memory_read
5591 && summary->load_accesses < accesses_limit)
5592 || gimple_call_flags (stmt) & ECF_CONST))
5594 /* First search if we can do someting useful and build a
5595 vector of all loads we have to check. */
5596 bool unknown_memory_access = false;
5597 auto_vec<ao_ref, accesses_limit> accesses;
5598 unsigned load_accesses = summary ? summary->load_accesses : 0;
5599 if (!unknown_memory_access)
5600 /* Add loads done as part of setting up the call arguments.
5601 That's also necessary for CONST functions which will
5602 not have a modref summary. */
5603 for (unsigned i = 0; i < gimple_call_num_args (stmt); ++i)
5605 tree arg = gimple_call_arg (stmt, i);
5606 if (TREE_CODE (arg) != SSA_NAME
5607 && !is_gimple_min_invariant (arg))
5609 if (accesses.length () >= accesses_limit - load_accesses)
5611 unknown_memory_access = true;
5612 break;
5614 accesses.quick_grow (accesses.length () + 1);
5615 ao_ref_init (&accesses.last (), arg);
5618 if (summary && !unknown_memory_access)
5620 /* Add loads as analyzed by IPA modref. */
5621 for (auto base_node : summary->loads->bases)
5622 if (unknown_memory_access)
5623 break;
5624 else for (auto ref_node : base_node->refs)
5625 if (unknown_memory_access)
5626 break;
5627 else for (auto access_node : ref_node->accesses)
5629 accesses.quick_grow (accesses.length () + 1);
5630 ao_ref *r = &accesses.last ();
5631 if (!access_node.get_ao_ref (stmt, r))
5633 /* Initialize a ref based on the argument and
5634 unknown offset if possible. */
5635 tree arg = access_node.get_call_arg (stmt);
5636 if (arg && TREE_CODE (arg) == SSA_NAME)
5637 arg = SSA_VAL (arg);
5638 if (arg
5639 && TREE_CODE (arg) == ADDR_EXPR
5640 && (arg = get_base_address (arg))
5641 && DECL_P (arg))
5643 ao_ref_init (r, arg);
5644 r->ref = NULL_TREE;
5645 r->base = arg;
5647 else
5649 unknown_memory_access = true;
5650 break;
5653 r->base_alias_set = base_node->base;
5654 r->ref_alias_set = ref_node->ref;
5658 /* Walk the VUSE->VDEF chain optimistically trying to find an entry
5659 for the call in the hashtable. */
5660 unsigned limit = (unknown_memory_access
5662 : (param_sccvn_max_alias_queries_per_access
5663 / (accesses.length () + 1)));
5664 tree saved_vuse = vr1.vuse;
5665 hashval_t saved_hashcode = vr1.hashcode;
5666 while (limit > 0 && !vnresult && !SSA_NAME_IS_DEFAULT_DEF (vr1.vuse))
5668 vr1.hashcode = vr1.hashcode - SSA_NAME_VERSION (vr1.vuse);
5669 gimple *def = SSA_NAME_DEF_STMT (vr1.vuse);
5670 /* ??? We could use fancy stuff like in walk_non_aliased_vuses, but
5671 do not bother for now. */
5672 if (is_a <gphi *> (def))
5673 break;
5674 vr1.vuse = vuse_ssa_val (gimple_vuse (def));
5675 vr1.hashcode = vr1.hashcode + SSA_NAME_VERSION (vr1.vuse);
5676 vn_reference_lookup_1 (&vr1, &vnresult);
5677 limit--;
5680 /* If we found a candidate to CSE to verify it is valid. */
5681 if (vnresult && !accesses.is_empty ())
5683 tree vuse = vuse_ssa_val (gimple_vuse (stmt));
5684 while (vnresult && vuse != vr1.vuse)
5686 gimple *def = SSA_NAME_DEF_STMT (vuse);
5687 for (auto &ref : accesses)
5689 /* ??? stmt_may_clobber_ref_p_1 does per stmt constant
5690 analysis overhead that we might be able to cache. */
5691 if (stmt_may_clobber_ref_p_1 (def, &ref, true))
5693 vnresult = NULL;
5694 break;
5697 vuse = vuse_ssa_val (gimple_vuse (def));
5700 vr1.vuse = saved_vuse;
5701 vr1.hashcode = saved_hashcode;
5704 if (vnresult)
5706 if (vdef)
5708 if (vnresult->result_vdef)
5709 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
5710 else if (!lhs && gimple_call_lhs (stmt))
5711 /* If stmt has non-SSA_NAME lhs, value number the vdef to itself,
5712 as the call still acts as a lhs store. */
5713 changed |= set_ssa_val_to (vdef, vdef);
5714 else
5715 /* If the call was discovered to be pure or const reflect
5716 that as far as possible. */
5717 changed |= set_ssa_val_to (vdef,
5718 vuse_ssa_val (gimple_vuse (stmt)));
5721 if (!vnresult->result && lhs)
5722 vnresult->result = lhs;
5724 if (vnresult->result && lhs)
5725 changed |= set_ssa_val_to (lhs, vnresult->result);
5727 else
5729 vn_reference_t vr2;
5730 vn_reference_s **slot;
5731 tree vdef_val = vdef;
5732 if (vdef)
5734 /* If we value numbered an indirect functions function to
5735 one not clobbering memory value number its VDEF to its
5736 VUSE. */
5737 tree fn = gimple_call_fn (stmt);
5738 if (fn && TREE_CODE (fn) == SSA_NAME)
5740 fn = SSA_VAL (fn);
5741 if (TREE_CODE (fn) == ADDR_EXPR
5742 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5743 && (flags_from_decl_or_type (TREE_OPERAND (fn, 0))
5744 & (ECF_CONST | ECF_PURE))
5745 /* If stmt has non-SSA_NAME lhs, value number the
5746 vdef to itself, as the call still acts as a lhs
5747 store. */
5748 && (lhs || gimple_call_lhs (stmt) == NULL_TREE))
5749 vdef_val = vuse_ssa_val (gimple_vuse (stmt));
5751 changed |= set_ssa_val_to (vdef, vdef_val);
5753 if (lhs)
5754 changed |= set_ssa_val_to (lhs, lhs);
5755 vr2 = XOBNEW (&vn_tables_obstack, vn_reference_s);
5756 vr2->vuse = vr1.vuse;
5757 /* As we are not walking the virtual operand chain we know the
5758 shared_lookup_references are still original so we can re-use
5759 them here. */
5760 vr2->operands = vr1.operands.copy ();
5761 vr2->type = vr1.type;
5762 vr2->punned = vr1.punned;
5763 vr2->set = vr1.set;
5764 vr2->offset = vr1.offset;
5765 vr2->max_size = vr1.max_size;
5766 vr2->base_set = vr1.base_set;
5767 vr2->hashcode = vr1.hashcode;
5768 vr2->result = lhs;
5769 vr2->result_vdef = vdef_val;
5770 vr2->value_id = 0;
5771 slot = valid_info->references->find_slot_with_hash (vr2, vr2->hashcode,
5772 INSERT);
5773 gcc_assert (!*slot);
5774 *slot = vr2;
5775 vr2->next = last_inserted_ref;
5776 last_inserted_ref = vr2;
5779 return changed;
5782 /* Visit a load from a reference operator RHS, part of STMT, value number it,
5783 and return true if the value number of the LHS has changed as a result. */
5785 static bool
5786 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
5788 bool changed = false;
5789 tree result;
5790 vn_reference_t res;
5792 tree vuse = gimple_vuse (stmt);
5793 tree last_vuse = vuse;
5794 result = vn_reference_lookup (op, vuse, default_vn_walk_kind, &res, true, &last_vuse);
5796 /* We handle type-punning through unions by value-numbering based
5797 on offset and size of the access. Be prepared to handle a
5798 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
5799 if (result
5800 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
5802 /* Avoid the type punning in case the result mode has padding where
5803 the op we lookup has not. */
5804 if (TYPE_MODE (TREE_TYPE (result)) != BLKmode
5805 && maybe_lt (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (result))),
5806 GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (op)))))
5807 result = NULL_TREE;
5808 else if (CONSTANT_CLASS_P (result))
5809 result = const_unop (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
5810 else
5812 /* We will be setting the value number of lhs to the value number
5813 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
5814 So first simplify and lookup this expression to see if it
5815 is already available. */
5816 gimple_match_op res_op (gimple_match_cond::UNCOND,
5817 VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
5818 result = vn_nary_build_or_lookup (&res_op);
5819 if (result
5820 && TREE_CODE (result) == SSA_NAME
5821 && VN_INFO (result)->needs_insertion)
5822 /* Track whether this is the canonical expression for different
5823 typed loads. We use that as a stopgap measure for code
5824 hoisting when dealing with floating point loads. */
5825 res->punned = true;
5828 /* When building the conversion fails avoid inserting the reference
5829 again. */
5830 if (!result)
5831 return set_ssa_val_to (lhs, lhs);
5834 if (result)
5835 changed = set_ssa_val_to (lhs, result);
5836 else
5838 changed = set_ssa_val_to (lhs, lhs);
5839 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
5840 if (vuse && SSA_VAL (last_vuse) != SSA_VAL (vuse))
5842 if (dump_file && (dump_flags & TDF_DETAILS))
5844 fprintf (dump_file, "Using extra use virtual operand ");
5845 print_generic_expr (dump_file, last_vuse);
5846 fprintf (dump_file, "\n");
5848 vn_reference_insert (op, lhs, vuse, NULL_TREE);
5852 return changed;
5856 /* Visit a store to a reference operator LHS, part of STMT, value number it,
5857 and return true if the value number of the LHS has changed as a result. */
5859 static bool
5860 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
5862 bool changed = false;
5863 vn_reference_t vnresult = NULL;
5864 tree assign;
5865 bool resultsame = false;
5866 tree vuse = gimple_vuse (stmt);
5867 tree vdef = gimple_vdef (stmt);
5869 if (TREE_CODE (op) == SSA_NAME)
5870 op = SSA_VAL (op);
5872 /* First we want to lookup using the *vuses* from the store and see
5873 if there the last store to this location with the same address
5874 had the same value.
5876 The vuses represent the memory state before the store. If the
5877 memory state, address, and value of the store is the same as the
5878 last store to this location, then this store will produce the
5879 same memory state as that store.
5881 In this case the vdef versions for this store are value numbered to those
5882 vuse versions, since they represent the same memory state after
5883 this store.
5885 Otherwise, the vdefs for the store are used when inserting into
5886 the table, since the store generates a new memory state. */
5888 vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
5889 if (vnresult
5890 && vnresult->result)
5892 tree result = vnresult->result;
5893 gcc_checking_assert (TREE_CODE (result) != SSA_NAME
5894 || result == SSA_VAL (result));
5895 resultsame = expressions_equal_p (result, op);
5896 if (resultsame)
5898 /* If the TBAA state isn't compatible for downstream reads
5899 we cannot value-number the VDEFs the same. */
5900 ao_ref lhs_ref;
5901 ao_ref_init (&lhs_ref, lhs);
5902 alias_set_type set = ao_ref_alias_set (&lhs_ref);
5903 alias_set_type base_set = ao_ref_base_alias_set (&lhs_ref);
5904 if ((vnresult->set != set
5905 && ! alias_set_subset_of (set, vnresult->set))
5906 || (vnresult->base_set != base_set
5907 && ! alias_set_subset_of (base_set, vnresult->base_set)))
5908 resultsame = false;
5912 if (!resultsame)
5914 if (dump_file && (dump_flags & TDF_DETAILS))
5916 fprintf (dump_file, "No store match\n");
5917 fprintf (dump_file, "Value numbering store ");
5918 print_generic_expr (dump_file, lhs);
5919 fprintf (dump_file, " to ");
5920 print_generic_expr (dump_file, op);
5921 fprintf (dump_file, "\n");
5923 /* Have to set value numbers before insert, since insert is
5924 going to valueize the references in-place. */
5925 if (vdef)
5926 changed |= set_ssa_val_to (vdef, vdef);
5928 /* Do not insert structure copies into the tables. */
5929 if (is_gimple_min_invariant (op)
5930 || is_gimple_reg (op))
5931 vn_reference_insert (lhs, op, vdef, NULL);
5933 /* Only perform the following when being called from PRE
5934 which embeds tail merging. */
5935 if (default_vn_walk_kind == VN_WALK)
5937 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
5938 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
5939 if (!vnresult)
5940 vn_reference_insert (assign, lhs, vuse, vdef);
5943 else
5945 /* We had a match, so value number the vdef to have the value
5946 number of the vuse it came from. */
5948 if (dump_file && (dump_flags & TDF_DETAILS))
5949 fprintf (dump_file, "Store matched earlier value, "
5950 "value numbering store vdefs to matching vuses.\n");
5952 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
5955 return changed;
5958 /* Visit and value number PHI, return true if the value number
5959 changed. When BACKEDGES_VARYING_P is true then assume all
5960 backedge values are varying. When INSERTED is not NULL then
5961 this is just a ahead query for a possible iteration, set INSERTED
5962 to true if we'd insert into the hashtable. */
5964 static bool
5965 visit_phi (gimple *phi, bool *inserted, bool backedges_varying_p)
5967 tree result, sameval = VN_TOP, seen_undef = NULL_TREE;
5968 bool seen_undef_visited = false;
5969 tree backedge_val = NULL_TREE;
5970 bool seen_non_backedge = false;
5971 tree sameval_base = NULL_TREE;
5972 poly_int64 soff, doff;
5973 unsigned n_executable = 0;
5974 edge_iterator ei;
5975 edge e, sameval_e = NULL;
5977 /* TODO: We could check for this in initialization, and replace this
5978 with a gcc_assert. */
5979 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
5980 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
5982 /* We track whether a PHI was CSEd to to avoid excessive iterations
5983 that would be necessary only because the PHI changed arguments
5984 but not value. */
5985 if (!inserted)
5986 gimple_set_plf (phi, GF_PLF_1, false);
5988 /* See if all non-TOP arguments have the same value. TOP is
5989 equivalent to everything, so we can ignore it. */
5990 basic_block bb = gimple_bb (phi);
5991 FOR_EACH_EDGE (e, ei, bb->preds)
5992 if (e->flags & EDGE_EXECUTABLE)
5994 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5996 if (def == PHI_RESULT (phi))
5997 continue;
5998 ++n_executable;
5999 bool visited = true;
6000 if (TREE_CODE (def) == SSA_NAME)
6002 tree val = SSA_VAL (def, &visited);
6003 if (SSA_NAME_IS_DEFAULT_DEF (def))
6004 visited = true;
6005 if (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK))
6006 def = val;
6007 if (e->flags & EDGE_DFS_BACK)
6008 backedge_val = def;
6010 if (!(e->flags & EDGE_DFS_BACK))
6011 seen_non_backedge = true;
6012 if (def == VN_TOP)
6014 /* Ignore undefined defs for sameval but record one. */
6015 else if (TREE_CODE (def) == SSA_NAME
6016 && ! virtual_operand_p (def)
6017 && ssa_undefined_value_p (def, false))
6019 if (!seen_undef
6020 /* Avoid having not visited undefined defs if we also have
6021 a visited one. */
6022 || (!seen_undef_visited && visited))
6024 seen_undef = def;
6025 seen_undef_visited = visited;
6028 else if (sameval == VN_TOP)
6030 sameval = def;
6031 sameval_e = e;
6033 else if (expressions_equal_p (def, sameval))
6034 sameval_e = NULL;
6035 else if (virtual_operand_p (def))
6037 sameval = NULL_TREE;
6038 break;
6040 else
6042 /* We know we're arriving only with invariant addresses here,
6043 try harder comparing them. We can do some caching here
6044 which we cannot do in expressions_equal_p. */
6045 if (TREE_CODE (def) == ADDR_EXPR
6046 && TREE_CODE (sameval) == ADDR_EXPR
6047 && sameval_base != (void *)-1)
6049 if (!sameval_base)
6050 sameval_base = get_addr_base_and_unit_offset
6051 (TREE_OPERAND (sameval, 0), &soff);
6052 if (!sameval_base)
6053 sameval_base = (tree)(void *)-1;
6054 else if ((get_addr_base_and_unit_offset
6055 (TREE_OPERAND (def, 0), &doff) == sameval_base)
6056 && known_eq (soff, doff))
6057 continue;
6059 /* There's also the possibility to use equivalences. */
6060 if (!FLOAT_TYPE_P (TREE_TYPE (def))
6061 /* But only do this if we didn't force any of sameval or
6062 val to VARYING because of backedge processing rules. */
6063 && (TREE_CODE (sameval) != SSA_NAME
6064 || SSA_VAL (sameval) == sameval)
6065 && (TREE_CODE (def) != SSA_NAME || SSA_VAL (def) == def))
6067 vn_nary_op_t vnresult;
6068 tree ops[2];
6069 ops[0] = def;
6070 ops[1] = sameval;
6071 tree val = vn_nary_op_lookup_pieces (2, EQ_EXPR,
6072 boolean_type_node,
6073 ops, &vnresult);
6074 if (! val && vnresult && vnresult->predicated_values)
6076 val = vn_nary_op_get_predicated_value (vnresult, e);
6077 if (val && integer_truep (val)
6078 && !(sameval_e && (sameval_e->flags & EDGE_DFS_BACK)))
6080 if (dump_file && (dump_flags & TDF_DETAILS))
6082 fprintf (dump_file, "Predication says ");
6083 print_generic_expr (dump_file, def, TDF_NONE);
6084 fprintf (dump_file, " and ");
6085 print_generic_expr (dump_file, sameval, TDF_NONE);
6086 fprintf (dump_file, " are equal on edge %d -> %d\n",
6087 e->src->index, e->dest->index);
6089 continue;
6091 /* If on all previous edges the value was equal to def
6092 we can change sameval to def. */
6093 if (EDGE_COUNT (bb->preds) == 2
6094 && (val = vn_nary_op_get_predicated_value
6095 (vnresult, EDGE_PRED (bb, 0)))
6096 && integer_truep (val)
6097 && !(e->flags & EDGE_DFS_BACK))
6099 if (dump_file && (dump_flags & TDF_DETAILS))
6101 fprintf (dump_file, "Predication says ");
6102 print_generic_expr (dump_file, def, TDF_NONE);
6103 fprintf (dump_file, " and ");
6104 print_generic_expr (dump_file, sameval, TDF_NONE);
6105 fprintf (dump_file, " are equal on edge %d -> %d\n",
6106 EDGE_PRED (bb, 0)->src->index,
6107 EDGE_PRED (bb, 0)->dest->index);
6109 sameval = def;
6110 continue;
6114 sameval = NULL_TREE;
6115 break;
6119 /* If the value we want to use is flowing over the backedge and we
6120 should take it as VARYING but it has a non-VARYING value drop to
6121 VARYING.
6122 If we value-number a virtual operand never value-number to the
6123 value from the backedge as that confuses the alias-walking code.
6124 See gcc.dg/torture/pr87176.c. If the value is the same on a
6125 non-backedge everything is OK though. */
6126 bool visited_p;
6127 if ((backedge_val
6128 && !seen_non_backedge
6129 && TREE_CODE (backedge_val) == SSA_NAME
6130 && sameval == backedge_val
6131 && (SSA_NAME_IS_VIRTUAL_OPERAND (backedge_val)
6132 || SSA_VAL (backedge_val) != backedge_val))
6133 /* Do not value-number a virtual operand to sth not visited though
6134 given that allows us to escape a region in alias walking. */
6135 || (sameval
6136 && TREE_CODE (sameval) == SSA_NAME
6137 && !SSA_NAME_IS_DEFAULT_DEF (sameval)
6138 && SSA_NAME_IS_VIRTUAL_OPERAND (sameval)
6139 && (SSA_VAL (sameval, &visited_p), !visited_p)))
6140 /* Note this just drops to VARYING without inserting the PHI into
6141 the hashes. */
6142 result = PHI_RESULT (phi);
6143 /* If none of the edges was executable keep the value-number at VN_TOP,
6144 if only a single edge is exectuable use its value. */
6145 else if (n_executable <= 1)
6146 result = seen_undef ? seen_undef : sameval;
6147 /* If we saw only undefined values and VN_TOP use one of the
6148 undefined values. */
6149 else if (sameval == VN_TOP)
6150 result = (seen_undef && seen_undef_visited) ? seen_undef : sameval;
6151 /* First see if it is equivalent to a phi node in this block. We prefer
6152 this as it allows IV elimination - see PRs 66502 and 67167. */
6153 else if ((result = vn_phi_lookup (phi, backedges_varying_p)))
6155 if (!inserted
6156 && TREE_CODE (result) == SSA_NAME
6157 && gimple_code (SSA_NAME_DEF_STMT (result)) == GIMPLE_PHI)
6159 gimple_set_plf (SSA_NAME_DEF_STMT (result), GF_PLF_1, true);
6160 if (dump_file && (dump_flags & TDF_DETAILS))
6162 fprintf (dump_file, "Marking CSEd to PHI node ");
6163 print_gimple_expr (dump_file, SSA_NAME_DEF_STMT (result),
6164 0, TDF_SLIM);
6165 fprintf (dump_file, "\n");
6169 /* If all values are the same use that, unless we've seen undefined
6170 values as well and the value isn't constant.
6171 CCP/copyprop have the same restriction to not remove uninit warnings. */
6172 else if (sameval
6173 && (! seen_undef || is_gimple_min_invariant (sameval)))
6174 result = sameval;
6175 else
6177 result = PHI_RESULT (phi);
6178 /* Only insert PHIs that are varying, for constant value numbers
6179 we mess up equivalences otherwise as we are only comparing
6180 the immediate controlling predicates. */
6181 vn_phi_insert (phi, result, backedges_varying_p);
6182 if (inserted)
6183 *inserted = true;
6186 return set_ssa_val_to (PHI_RESULT (phi), result);
6189 /* Try to simplify RHS using equivalences and constant folding. */
6191 static tree
6192 try_to_simplify (gassign *stmt)
6194 enum tree_code code = gimple_assign_rhs_code (stmt);
6195 tree tem;
6197 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
6198 in this case, there is no point in doing extra work. */
6199 if (code == SSA_NAME)
6200 return NULL_TREE;
6202 /* First try constant folding based on our current lattice. */
6203 mprts_hook = vn_lookup_simplify_result;
6204 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
6205 mprts_hook = NULL;
6206 if (tem
6207 && (TREE_CODE (tem) == SSA_NAME
6208 || is_gimple_min_invariant (tem)))
6209 return tem;
6211 return NULL_TREE;
6214 /* Visit and value number STMT, return true if the value number
6215 changed. */
6217 static bool
6218 visit_stmt (gimple *stmt, bool backedges_varying_p = false)
6220 bool changed = false;
6222 if (dump_file && (dump_flags & TDF_DETAILS))
6224 fprintf (dump_file, "Value numbering stmt = ");
6225 print_gimple_stmt (dump_file, stmt, 0);
6228 if (gimple_code (stmt) == GIMPLE_PHI)
6229 changed = visit_phi (stmt, NULL, backedges_varying_p);
6230 else if (gimple_has_volatile_ops (stmt))
6231 changed = defs_to_varying (stmt);
6232 else if (gassign *ass = dyn_cast <gassign *> (stmt))
6234 enum tree_code code = gimple_assign_rhs_code (ass);
6235 tree lhs = gimple_assign_lhs (ass);
6236 tree rhs1 = gimple_assign_rhs1 (ass);
6237 tree simplified;
6239 /* Shortcut for copies. Simplifying copies is pointless,
6240 since we copy the expression and value they represent. */
6241 if (code == SSA_NAME
6242 && TREE_CODE (lhs) == SSA_NAME)
6244 changed = visit_copy (lhs, rhs1);
6245 goto done;
6247 simplified = try_to_simplify (ass);
6248 if (simplified)
6250 if (dump_file && (dump_flags & TDF_DETAILS))
6252 fprintf (dump_file, "RHS ");
6253 print_gimple_expr (dump_file, ass, 0);
6254 fprintf (dump_file, " simplified to ");
6255 print_generic_expr (dump_file, simplified);
6256 fprintf (dump_file, "\n");
6259 /* Setting value numbers to constants will occasionally
6260 screw up phi congruence because constants are not
6261 uniquely associated with a single ssa name that can be
6262 looked up. */
6263 if (simplified
6264 && is_gimple_min_invariant (simplified)
6265 && TREE_CODE (lhs) == SSA_NAME)
6267 changed = set_ssa_val_to (lhs, simplified);
6268 goto done;
6270 else if (simplified
6271 && TREE_CODE (simplified) == SSA_NAME
6272 && TREE_CODE (lhs) == SSA_NAME)
6274 changed = visit_copy (lhs, simplified);
6275 goto done;
6278 if ((TREE_CODE (lhs) == SSA_NAME
6279 /* We can substitute SSA_NAMEs that are live over
6280 abnormal edges with their constant value. */
6281 && !(gimple_assign_copy_p (ass)
6282 && is_gimple_min_invariant (rhs1))
6283 && !(simplified
6284 && is_gimple_min_invariant (simplified))
6285 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
6286 /* Stores or copies from SSA_NAMEs that are live over
6287 abnormal edges are a problem. */
6288 || (code == SSA_NAME
6289 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
6290 changed = defs_to_varying (ass);
6291 else if (REFERENCE_CLASS_P (lhs)
6292 || DECL_P (lhs))
6293 changed = visit_reference_op_store (lhs, rhs1, ass);
6294 else if (TREE_CODE (lhs) == SSA_NAME)
6296 if ((gimple_assign_copy_p (ass)
6297 && is_gimple_min_invariant (rhs1))
6298 || (simplified
6299 && is_gimple_min_invariant (simplified)))
6301 if (simplified)
6302 changed = set_ssa_val_to (lhs, simplified);
6303 else
6304 changed = set_ssa_val_to (lhs, rhs1);
6306 else
6308 /* Visit the original statement. */
6309 switch (vn_get_stmt_kind (ass))
6311 case VN_NARY:
6312 changed = visit_nary_op (lhs, ass);
6313 break;
6314 case VN_REFERENCE:
6315 changed = visit_reference_op_load (lhs, rhs1, ass);
6316 break;
6317 default:
6318 changed = defs_to_varying (ass);
6319 break;
6323 else
6324 changed = defs_to_varying (ass);
6326 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
6328 tree lhs = gimple_call_lhs (call_stmt);
6329 if (lhs && TREE_CODE (lhs) == SSA_NAME)
6331 /* Try constant folding based on our current lattice. */
6332 tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
6333 vn_valueize);
6334 if (simplified)
6336 if (dump_file && (dump_flags & TDF_DETAILS))
6338 fprintf (dump_file, "call ");
6339 print_gimple_expr (dump_file, call_stmt, 0);
6340 fprintf (dump_file, " simplified to ");
6341 print_generic_expr (dump_file, simplified);
6342 fprintf (dump_file, "\n");
6345 /* Setting value numbers to constants will occasionally
6346 screw up phi congruence because constants are not
6347 uniquely associated with a single ssa name that can be
6348 looked up. */
6349 if (simplified
6350 && is_gimple_min_invariant (simplified))
6352 changed = set_ssa_val_to (lhs, simplified);
6353 if (gimple_vdef (call_stmt))
6354 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
6355 SSA_VAL (gimple_vuse (call_stmt)));
6356 goto done;
6358 else if (simplified
6359 && TREE_CODE (simplified) == SSA_NAME)
6361 changed = visit_copy (lhs, simplified);
6362 if (gimple_vdef (call_stmt))
6363 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
6364 SSA_VAL (gimple_vuse (call_stmt)));
6365 goto done;
6367 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
6369 changed = defs_to_varying (call_stmt);
6370 goto done;
6374 /* Pick up flags from a devirtualization target. */
6375 tree fn = gimple_call_fn (stmt);
6376 int extra_fnflags = 0;
6377 if (fn && TREE_CODE (fn) == SSA_NAME)
6379 fn = SSA_VAL (fn);
6380 if (TREE_CODE (fn) == ADDR_EXPR
6381 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
6382 extra_fnflags = flags_from_decl_or_type (TREE_OPERAND (fn, 0));
6384 if ((/* Calls to the same function with the same vuse
6385 and the same operands do not necessarily return the same
6386 value, unless they're pure or const. */
6387 ((gimple_call_flags (call_stmt) | extra_fnflags)
6388 & (ECF_PURE | ECF_CONST))
6389 /* If calls have a vdef, subsequent calls won't have
6390 the same incoming vuse. So, if 2 calls with vdef have the
6391 same vuse, we know they're not subsequent.
6392 We can value number 2 calls to the same function with the
6393 same vuse and the same operands which are not subsequent
6394 the same, because there is no code in the program that can
6395 compare the 2 values... */
6396 || (gimple_vdef (call_stmt)
6397 /* ... unless the call returns a pointer which does
6398 not alias with anything else. In which case the
6399 information that the values are distinct are encoded
6400 in the IL. */
6401 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
6402 /* Only perform the following when being called from PRE
6403 which embeds tail merging. */
6404 && default_vn_walk_kind == VN_WALK))
6405 /* Do not process .DEFERRED_INIT since that confuses uninit
6406 analysis. */
6407 && !gimple_call_internal_p (call_stmt, IFN_DEFERRED_INIT))
6408 changed = visit_reference_op_call (lhs, call_stmt);
6409 else
6410 changed = defs_to_varying (call_stmt);
6412 else
6413 changed = defs_to_varying (stmt);
6414 done:
6415 return changed;
6419 /* Allocate a value number table. */
6421 static void
6422 allocate_vn_table (vn_tables_t table, unsigned size)
6424 table->phis = new vn_phi_table_type (size);
6425 table->nary = new vn_nary_op_table_type (size);
6426 table->references = new vn_reference_table_type (size);
6429 /* Free a value number table. */
6431 static void
6432 free_vn_table (vn_tables_t table)
6434 /* Walk over elements and release vectors. */
6435 vn_reference_iterator_type hir;
6436 vn_reference_t vr;
6437 FOR_EACH_HASH_TABLE_ELEMENT (*table->references, vr, vn_reference_t, hir)
6438 vr->operands.release ();
6439 delete table->phis;
6440 table->phis = NULL;
6441 delete table->nary;
6442 table->nary = NULL;
6443 delete table->references;
6444 table->references = NULL;
6447 /* Set *ID according to RESULT. */
6449 static void
6450 set_value_id_for_result (tree result, unsigned int *id)
6452 if (result && TREE_CODE (result) == SSA_NAME)
6453 *id = VN_INFO (result)->value_id;
6454 else if (result && is_gimple_min_invariant (result))
6455 *id = get_or_alloc_constant_value_id (result);
6456 else
6457 *id = get_next_value_id ();
6460 /* Set the value ids in the valid hash tables. */
6462 static void
6463 set_hashtable_value_ids (void)
6465 vn_nary_op_iterator_type hin;
6466 vn_phi_iterator_type hip;
6467 vn_reference_iterator_type hir;
6468 vn_nary_op_t vno;
6469 vn_reference_t vr;
6470 vn_phi_t vp;
6472 /* Now set the value ids of the things we had put in the hash
6473 table. */
6475 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
6476 if (! vno->predicated_values)
6477 set_value_id_for_result (vno->u.result, &vno->value_id);
6479 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
6480 set_value_id_for_result (vp->result, &vp->value_id);
6482 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
6483 hir)
6484 set_value_id_for_result (vr->result, &vr->value_id);
6487 /* Return the maximum value id we have ever seen. */
6489 unsigned int
6490 get_max_value_id (void)
6492 return next_value_id;
6495 /* Return the maximum constant value id we have ever seen. */
6497 unsigned int
6498 get_max_constant_value_id (void)
6500 return -next_constant_value_id;
6503 /* Return the next unique value id. */
6505 unsigned int
6506 get_next_value_id (void)
6508 gcc_checking_assert ((int)next_value_id > 0);
6509 return next_value_id++;
6512 /* Return the next unique value id for constants. */
6514 unsigned int
6515 get_next_constant_value_id (void)
6517 gcc_checking_assert (next_constant_value_id < 0);
6518 return next_constant_value_id--;
6522 /* Compare two expressions E1 and E2 and return true if they are equal.
6523 If match_vn_top_optimistically is true then VN_TOP is equal to anything,
6524 otherwise VN_TOP only matches VN_TOP. */
6526 bool
6527 expressions_equal_p (tree e1, tree e2, bool match_vn_top_optimistically)
6529 /* The obvious case. */
6530 if (e1 == e2)
6531 return true;
6533 /* If either one is VN_TOP consider them equal. */
6534 if (match_vn_top_optimistically
6535 && (e1 == VN_TOP || e2 == VN_TOP))
6536 return true;
6538 /* If only one of them is null, they cannot be equal. While in general
6539 this should not happen for operations like TARGET_MEM_REF some
6540 operands are optional and an identity value we could substitute
6541 has differing semantics. */
6542 if (!e1 || !e2)
6543 return false;
6545 /* SSA_NAME compare pointer equal. */
6546 if (TREE_CODE (e1) == SSA_NAME || TREE_CODE (e2) == SSA_NAME)
6547 return false;
6549 /* Now perform the actual comparison. */
6550 if (TREE_CODE (e1) == TREE_CODE (e2)
6551 && operand_equal_p (e1, e2, OEP_PURE_SAME))
6552 return true;
6554 return false;
6558 /* Return true if the nary operation NARY may trap. This is a copy
6559 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
6561 bool
6562 vn_nary_may_trap (vn_nary_op_t nary)
6564 tree type;
6565 tree rhs2 = NULL_TREE;
6566 bool honor_nans = false;
6567 bool honor_snans = false;
6568 bool fp_operation = false;
6569 bool honor_trapv = false;
6570 bool handled, ret;
6571 unsigned i;
6573 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
6574 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
6575 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
6577 type = nary->type;
6578 fp_operation = FLOAT_TYPE_P (type);
6579 if (fp_operation)
6581 honor_nans = flag_trapping_math && !flag_finite_math_only;
6582 honor_snans = flag_signaling_nans != 0;
6584 else if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_TRAPS (type))
6585 honor_trapv = true;
6587 if (nary->length >= 2)
6588 rhs2 = nary->op[1];
6589 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
6590 honor_trapv, honor_nans, honor_snans,
6591 rhs2, &handled);
6592 if (handled && ret)
6593 return true;
6595 for (i = 0; i < nary->length; ++i)
6596 if (tree_could_trap_p (nary->op[i]))
6597 return true;
6599 return false;
6602 /* Return true if the reference operation REF may trap. */
6604 bool
6605 vn_reference_may_trap (vn_reference_t ref)
6607 switch (ref->operands[0].opcode)
6609 case MODIFY_EXPR:
6610 case CALL_EXPR:
6611 /* We do not handle calls. */
6612 return true;
6613 case ADDR_EXPR:
6614 /* And toplevel address computations never trap. */
6615 return false;
6616 default:;
6619 vn_reference_op_t op;
6620 unsigned i;
6621 FOR_EACH_VEC_ELT (ref->operands, i, op)
6623 switch (op->opcode)
6625 case WITH_SIZE_EXPR:
6626 case TARGET_MEM_REF:
6627 /* Always variable. */
6628 return true;
6629 case COMPONENT_REF:
6630 if (op->op1 && TREE_CODE (op->op1) == SSA_NAME)
6631 return true;
6632 break;
6633 case ARRAY_RANGE_REF:
6634 if (TREE_CODE (op->op0) == SSA_NAME)
6635 return true;
6636 break;
6637 case ARRAY_REF:
6639 if (TREE_CODE (op->op0) != INTEGER_CST)
6640 return true;
6642 /* !in_array_bounds */
6643 tree domain_type = TYPE_DOMAIN (ref->operands[i+1].type);
6644 if (!domain_type)
6645 return true;
6647 tree min = op->op1;
6648 tree max = TYPE_MAX_VALUE (domain_type);
6649 if (!min
6650 || !max
6651 || TREE_CODE (min) != INTEGER_CST
6652 || TREE_CODE (max) != INTEGER_CST)
6653 return true;
6655 if (tree_int_cst_lt (op->op0, min)
6656 || tree_int_cst_lt (max, op->op0))
6657 return true;
6659 break;
6661 case MEM_REF:
6662 /* Nothing interesting in itself, the base is separate. */
6663 break;
6664 /* The following are the address bases. */
6665 case SSA_NAME:
6666 return true;
6667 case ADDR_EXPR:
6668 if (op->op0)
6669 return tree_could_trap_p (TREE_OPERAND (op->op0, 0));
6670 return false;
6671 default:;
6674 return false;
6677 eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction,
6678 bitmap inserted_exprs_)
6679 : dom_walker (direction), do_pre (inserted_exprs_ != NULL),
6680 el_todo (0), eliminations (0), insertions (0),
6681 inserted_exprs (inserted_exprs_)
6683 need_eh_cleanup = BITMAP_ALLOC (NULL);
6684 need_ab_cleanup = BITMAP_ALLOC (NULL);
6687 eliminate_dom_walker::~eliminate_dom_walker ()
6689 BITMAP_FREE (need_eh_cleanup);
6690 BITMAP_FREE (need_ab_cleanup);
6693 /* Return a leader for OP that is available at the current point of the
6694 eliminate domwalk. */
6696 tree
6697 eliminate_dom_walker::eliminate_avail (basic_block, tree op)
6699 tree valnum = VN_INFO (op)->valnum;
6700 if (TREE_CODE (valnum) == SSA_NAME)
6702 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
6703 return valnum;
6704 if (avail.length () > SSA_NAME_VERSION (valnum))
6706 tree av = avail[SSA_NAME_VERSION (valnum)];
6707 /* When PRE discovers a new redundancy there's no way to unite
6708 the value classes so it instead inserts a copy old-val = new-val.
6709 Look through such copies here, providing one more level of
6710 simplification at elimination time. */
6711 gassign *ass;
6712 if (av && (ass = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (av))))
6713 if (gimple_assign_rhs_class (ass) == GIMPLE_SINGLE_RHS)
6715 tree rhs1 = gimple_assign_rhs1 (ass);
6716 if (CONSTANT_CLASS_P (rhs1)
6717 || (TREE_CODE (rhs1) == SSA_NAME
6718 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
6719 av = rhs1;
6721 return av;
6724 else if (is_gimple_min_invariant (valnum))
6725 return valnum;
6726 return NULL_TREE;
6729 /* At the current point of the eliminate domwalk make OP available. */
6731 void
6732 eliminate_dom_walker::eliminate_push_avail (basic_block, tree op)
6734 tree valnum = VN_INFO (op)->valnum;
6735 if (TREE_CODE (valnum) == SSA_NAME)
6737 if (avail.length () <= SSA_NAME_VERSION (valnum))
6738 avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1, true);
6739 tree pushop = op;
6740 if (avail[SSA_NAME_VERSION (valnum)])
6741 pushop = avail[SSA_NAME_VERSION (valnum)];
6742 avail_stack.safe_push (pushop);
6743 avail[SSA_NAME_VERSION (valnum)] = op;
6747 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
6748 the leader for the expression if insertion was successful. */
6750 tree
6751 eliminate_dom_walker::eliminate_insert (basic_block bb,
6752 gimple_stmt_iterator *gsi, tree val)
6754 /* We can insert a sequence with a single assignment only. */
6755 gimple_seq stmts = VN_INFO (val)->expr;
6756 if (!gimple_seq_singleton_p (stmts))
6757 return NULL_TREE;
6758 gassign *stmt = dyn_cast <gassign *> (gimple_seq_first_stmt (stmts));
6759 if (!stmt
6760 || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
6761 && gimple_assign_rhs_code (stmt) != VIEW_CONVERT_EXPR
6762 && gimple_assign_rhs_code (stmt) != NEGATE_EXPR
6763 && gimple_assign_rhs_code (stmt) != BIT_FIELD_REF
6764 && (gimple_assign_rhs_code (stmt) != BIT_AND_EXPR
6765 || TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)))
6766 return NULL_TREE;
6768 tree op = gimple_assign_rhs1 (stmt);
6769 if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR
6770 || gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
6771 op = TREE_OPERAND (op, 0);
6772 tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (bb, op) : op;
6773 if (!leader)
6774 return NULL_TREE;
6776 tree res;
6777 stmts = NULL;
6778 if (gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
6779 res = gimple_build (&stmts, BIT_FIELD_REF,
6780 TREE_TYPE (val), leader,
6781 TREE_OPERAND (gimple_assign_rhs1 (stmt), 1),
6782 TREE_OPERAND (gimple_assign_rhs1 (stmt), 2));
6783 else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
6784 res = gimple_build (&stmts, BIT_AND_EXPR,
6785 TREE_TYPE (val), leader, gimple_assign_rhs2 (stmt));
6786 else
6787 res = gimple_build (&stmts, gimple_assign_rhs_code (stmt),
6788 TREE_TYPE (val), leader);
6789 if (TREE_CODE (res) != SSA_NAME
6790 || SSA_NAME_IS_DEFAULT_DEF (res)
6791 || gimple_bb (SSA_NAME_DEF_STMT (res)))
6793 gimple_seq_discard (stmts);
6795 /* During propagation we have to treat SSA info conservatively
6796 and thus we can end up simplifying the inserted expression
6797 at elimination time to sth not defined in stmts. */
6798 /* But then this is a redundancy we failed to detect. Which means
6799 res now has two values. That doesn't play well with how
6800 we track availability here, so give up. */
6801 if (dump_file && (dump_flags & TDF_DETAILS))
6803 if (TREE_CODE (res) == SSA_NAME)
6804 res = eliminate_avail (bb, res);
6805 if (res)
6807 fprintf (dump_file, "Failed to insert expression for value ");
6808 print_generic_expr (dump_file, val);
6809 fprintf (dump_file, " which is really fully redundant to ");
6810 print_generic_expr (dump_file, res);
6811 fprintf (dump_file, "\n");
6815 return NULL_TREE;
6817 else
6819 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
6820 vn_ssa_aux_t vn_info = VN_INFO (res);
6821 vn_info->valnum = val;
6822 vn_info->visited = true;
6825 insertions++;
6826 if (dump_file && (dump_flags & TDF_DETAILS))
6828 fprintf (dump_file, "Inserted ");
6829 print_gimple_stmt (dump_file, SSA_NAME_DEF_STMT (res), 0);
6832 return res;
6835 void
6836 eliminate_dom_walker::eliminate_stmt (basic_block b, gimple_stmt_iterator *gsi)
6838 tree sprime = NULL_TREE;
6839 gimple *stmt = gsi_stmt (*gsi);
6840 tree lhs = gimple_get_lhs (stmt);
6841 if (lhs && TREE_CODE (lhs) == SSA_NAME
6842 && !gimple_has_volatile_ops (stmt)
6843 /* See PR43491. Do not replace a global register variable when
6844 it is a the RHS of an assignment. Do replace local register
6845 variables since gcc does not guarantee a local variable will
6846 be allocated in register.
6847 ??? The fix isn't effective here. This should instead
6848 be ensured by not value-numbering them the same but treating
6849 them like volatiles? */
6850 && !(gimple_assign_single_p (stmt)
6851 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
6852 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))
6853 && is_global_var (gimple_assign_rhs1 (stmt)))))
6855 sprime = eliminate_avail (b, lhs);
6856 if (!sprime)
6858 /* If there is no existing usable leader but SCCVN thinks
6859 it has an expression it wants to use as replacement,
6860 insert that. */
6861 tree val = VN_INFO (lhs)->valnum;
6862 vn_ssa_aux_t vn_info;
6863 if (val != VN_TOP
6864 && TREE_CODE (val) == SSA_NAME
6865 && (vn_info = VN_INFO (val), true)
6866 && vn_info->needs_insertion
6867 && vn_info->expr != NULL
6868 && (sprime = eliminate_insert (b, gsi, val)) != NULL_TREE)
6869 eliminate_push_avail (b, sprime);
6872 /* If this now constitutes a copy duplicate points-to
6873 and range info appropriately. This is especially
6874 important for inserted code. See tree-ssa-copy.cc
6875 for similar code. */
6876 if (sprime
6877 && TREE_CODE (sprime) == SSA_NAME)
6879 basic_block sprime_b = gimple_bb (SSA_NAME_DEF_STMT (sprime));
6880 if (POINTER_TYPE_P (TREE_TYPE (lhs))
6881 && SSA_NAME_PTR_INFO (lhs)
6882 && ! SSA_NAME_PTR_INFO (sprime))
6884 duplicate_ssa_name_ptr_info (sprime,
6885 SSA_NAME_PTR_INFO (lhs));
6886 if (b != sprime_b)
6887 reset_flow_sensitive_info (sprime);
6889 else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
6890 && SSA_NAME_RANGE_INFO (lhs)
6891 && ! SSA_NAME_RANGE_INFO (sprime)
6892 && b == sprime_b)
6893 duplicate_ssa_name_range_info (sprime, lhs);
6896 /* Inhibit the use of an inserted PHI on a loop header when
6897 the address of the memory reference is a simple induction
6898 variable. In other cases the vectorizer won't do anything
6899 anyway (either it's loop invariant or a complicated
6900 expression). */
6901 if (sprime
6902 && TREE_CODE (sprime) == SSA_NAME
6903 && do_pre
6904 && (flag_tree_loop_vectorize || flag_tree_parallelize_loops > 1)
6905 && loop_outer (b->loop_father)
6906 && has_zero_uses (sprime)
6907 && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
6908 && gimple_assign_load_p (stmt))
6910 gimple *def_stmt = SSA_NAME_DEF_STMT (sprime);
6911 basic_block def_bb = gimple_bb (def_stmt);
6912 if (gimple_code (def_stmt) == GIMPLE_PHI
6913 && def_bb->loop_father->header == def_bb)
6915 loop_p loop = def_bb->loop_father;
6916 ssa_op_iter iter;
6917 tree op;
6918 bool found = false;
6919 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
6921 affine_iv iv;
6922 def_bb = gimple_bb (SSA_NAME_DEF_STMT (op));
6923 if (def_bb
6924 && flow_bb_inside_loop_p (loop, def_bb)
6925 && simple_iv (loop, loop, op, &iv, true))
6927 found = true;
6928 break;
6931 if (found)
6933 if (dump_file && (dump_flags & TDF_DETAILS))
6935 fprintf (dump_file, "Not replacing ");
6936 print_gimple_expr (dump_file, stmt, 0);
6937 fprintf (dump_file, " with ");
6938 print_generic_expr (dump_file, sprime);
6939 fprintf (dump_file, " which would add a loop"
6940 " carried dependence to loop %d\n",
6941 loop->num);
6943 /* Don't keep sprime available. */
6944 sprime = NULL_TREE;
6949 if (sprime)
6951 /* If we can propagate the value computed for LHS into
6952 all uses don't bother doing anything with this stmt. */
6953 if (may_propagate_copy (lhs, sprime))
6955 /* Mark it for removal. */
6956 to_remove.safe_push (stmt);
6958 /* ??? Don't count copy/constant propagations. */
6959 if (gimple_assign_single_p (stmt)
6960 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
6961 || gimple_assign_rhs1 (stmt) == sprime))
6962 return;
6964 if (dump_file && (dump_flags & TDF_DETAILS))
6966 fprintf (dump_file, "Replaced ");
6967 print_gimple_expr (dump_file, stmt, 0);
6968 fprintf (dump_file, " with ");
6969 print_generic_expr (dump_file, sprime);
6970 fprintf (dump_file, " in all uses of ");
6971 print_gimple_stmt (dump_file, stmt, 0);
6974 eliminations++;
6975 return;
6978 /* If this is an assignment from our leader (which
6979 happens in the case the value-number is a constant)
6980 then there is nothing to do. Likewise if we run into
6981 inserted code that needed a conversion because of
6982 our type-agnostic value-numbering of loads. */
6983 if ((gimple_assign_single_p (stmt)
6984 || (is_gimple_assign (stmt)
6985 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
6986 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)))
6987 && sprime == gimple_assign_rhs1 (stmt))
6988 return;
6990 /* Else replace its RHS. */
6991 if (dump_file && (dump_flags & TDF_DETAILS))
6993 fprintf (dump_file, "Replaced ");
6994 print_gimple_expr (dump_file, stmt, 0);
6995 fprintf (dump_file, " with ");
6996 print_generic_expr (dump_file, sprime);
6997 fprintf (dump_file, " in ");
6998 print_gimple_stmt (dump_file, stmt, 0);
7000 eliminations++;
7002 bool can_make_abnormal_goto = (is_gimple_call (stmt)
7003 && stmt_can_make_abnormal_goto (stmt));
7004 gimple *orig_stmt = stmt;
7005 if (!useless_type_conversion_p (TREE_TYPE (lhs),
7006 TREE_TYPE (sprime)))
7008 /* We preserve conversions to but not from function or method
7009 types. This asymmetry makes it necessary to re-instantiate
7010 conversions here. */
7011 if (POINTER_TYPE_P (TREE_TYPE (lhs))
7012 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (lhs))))
7013 sprime = fold_convert (TREE_TYPE (lhs), sprime);
7014 else
7015 gcc_unreachable ();
7017 tree vdef = gimple_vdef (stmt);
7018 tree vuse = gimple_vuse (stmt);
7019 propagate_tree_value_into_stmt (gsi, sprime);
7020 stmt = gsi_stmt (*gsi);
7021 update_stmt (stmt);
7022 /* In case the VDEF on the original stmt was released, value-number
7023 it to the VUSE. This is to make vuse_ssa_val able to skip
7024 released virtual operands. */
7025 if (vdef != gimple_vdef (stmt))
7027 gcc_assert (SSA_NAME_IN_FREE_LIST (vdef));
7028 VN_INFO (vdef)->valnum = vuse;
7031 /* If we removed EH side-effects from the statement, clean
7032 its EH information. */
7033 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
7035 bitmap_set_bit (need_eh_cleanup,
7036 gimple_bb (stmt)->index);
7037 if (dump_file && (dump_flags & TDF_DETAILS))
7038 fprintf (dump_file, " Removed EH side-effects.\n");
7041 /* Likewise for AB side-effects. */
7042 if (can_make_abnormal_goto
7043 && !stmt_can_make_abnormal_goto (stmt))
7045 bitmap_set_bit (need_ab_cleanup,
7046 gimple_bb (stmt)->index);
7047 if (dump_file && (dump_flags & TDF_DETAILS))
7048 fprintf (dump_file, " Removed AB side-effects.\n");
7051 return;
7055 /* If the statement is a scalar store, see if the expression
7056 has the same value number as its rhs. If so, the store is
7057 dead. */
7058 if (gimple_assign_single_p (stmt)
7059 && !gimple_has_volatile_ops (stmt)
7060 && !is_gimple_reg (gimple_assign_lhs (stmt))
7061 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
7062 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
7064 tree rhs = gimple_assign_rhs1 (stmt);
7065 vn_reference_t vnresult;
7066 /* ??? gcc.dg/torture/pr91445.c shows that we lookup a boolean
7067 typed load of a byte known to be 0x11 as 1 so a store of
7068 a boolean 1 is detected as redundant. Because of this we
7069 have to make sure to lookup with a ref where its size
7070 matches the precision. */
7071 tree lookup_lhs = lhs;
7072 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
7073 && (TREE_CODE (lhs) != COMPONENT_REF
7074 || !DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs, 1)))
7075 && !type_has_mode_precision_p (TREE_TYPE (lhs)))
7077 if (TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
7078 && TYPE_PRECISION (TREE_TYPE (lhs)) > MAX_FIXED_MODE_SIZE)
7079 lookup_lhs = NULL_TREE;
7080 else if (TREE_CODE (lhs) == COMPONENT_REF
7081 || TREE_CODE (lhs) == MEM_REF)
7083 tree ltype = build_nonstandard_integer_type
7084 (TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (lhs))),
7085 TYPE_UNSIGNED (TREE_TYPE (lhs)));
7086 if (TREE_CODE (lhs) == COMPONENT_REF)
7088 tree foff = component_ref_field_offset (lhs);
7089 tree f = TREE_OPERAND (lhs, 1);
7090 if (!poly_int_tree_p (foff))
7091 lookup_lhs = NULL_TREE;
7092 else
7093 lookup_lhs = build3 (BIT_FIELD_REF, ltype,
7094 TREE_OPERAND (lhs, 0),
7095 TYPE_SIZE (TREE_TYPE (lhs)),
7096 bit_from_pos
7097 (foff, DECL_FIELD_BIT_OFFSET (f)));
7099 else
7100 lookup_lhs = build2 (MEM_REF, ltype,
7101 TREE_OPERAND (lhs, 0),
7102 TREE_OPERAND (lhs, 1));
7104 else
7105 lookup_lhs = NULL_TREE;
7107 tree val = NULL_TREE;
7108 if (lookup_lhs)
7109 val = vn_reference_lookup (lookup_lhs, gimple_vuse (stmt),
7110 VN_WALKREWRITE, &vnresult, false,
7111 NULL, NULL_TREE, true);
7112 if (TREE_CODE (rhs) == SSA_NAME)
7113 rhs = VN_INFO (rhs)->valnum;
7114 if (val
7115 && (operand_equal_p (val, rhs, 0)
7116 /* Due to the bitfield lookups above we can get bit
7117 interpretations of the same RHS as values here. Those
7118 are redundant as well. */
7119 || (TREE_CODE (val) == SSA_NAME
7120 && gimple_assign_single_p (SSA_NAME_DEF_STMT (val))
7121 && (val = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (val)))
7122 && TREE_CODE (val) == VIEW_CONVERT_EXPR
7123 && TREE_OPERAND (val, 0) == rhs)))
7125 /* We can only remove the later store if the former aliases
7126 at least all accesses the later one does or if the store
7127 was to readonly memory storing the same value. */
7128 ao_ref lhs_ref;
7129 ao_ref_init (&lhs_ref, lhs);
7130 alias_set_type set = ao_ref_alias_set (&lhs_ref);
7131 alias_set_type base_set = ao_ref_base_alias_set (&lhs_ref);
7132 if (! vnresult
7133 || ((vnresult->set == set
7134 || alias_set_subset_of (set, vnresult->set))
7135 && (vnresult->base_set == base_set
7136 || alias_set_subset_of (base_set, vnresult->base_set))))
7138 if (dump_file && (dump_flags & TDF_DETAILS))
7140 fprintf (dump_file, "Deleted redundant store ");
7141 print_gimple_stmt (dump_file, stmt, 0);
7144 /* Queue stmt for removal. */
7145 to_remove.safe_push (stmt);
7146 return;
7151 /* If this is a control statement value numbering left edges
7152 unexecuted on force the condition in a way consistent with
7153 that. */
7154 if (gcond *cond = dyn_cast <gcond *> (stmt))
7156 if ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE)
7157 ^ (EDGE_SUCC (b, 1)->flags & EDGE_EXECUTABLE))
7159 if (dump_file && (dump_flags & TDF_DETAILS))
7161 fprintf (dump_file, "Removing unexecutable edge from ");
7162 print_gimple_stmt (dump_file, stmt, 0);
7164 if (((EDGE_SUCC (b, 0)->flags & EDGE_TRUE_VALUE) != 0)
7165 == ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE) != 0))
7166 gimple_cond_make_true (cond);
7167 else
7168 gimple_cond_make_false (cond);
7169 update_stmt (cond);
7170 el_todo |= TODO_cleanup_cfg;
7171 return;
7175 bool can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
7176 bool was_noreturn = (is_gimple_call (stmt)
7177 && gimple_call_noreturn_p (stmt));
7178 tree vdef = gimple_vdef (stmt);
7179 tree vuse = gimple_vuse (stmt);
7181 /* If we didn't replace the whole stmt (or propagate the result
7182 into all uses), replace all uses on this stmt with their
7183 leaders. */
7184 bool modified = false;
7185 use_operand_p use_p;
7186 ssa_op_iter iter;
7187 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
7189 tree use = USE_FROM_PTR (use_p);
7190 /* ??? The call code above leaves stmt operands un-updated. */
7191 if (TREE_CODE (use) != SSA_NAME)
7192 continue;
7193 tree sprime;
7194 if (SSA_NAME_IS_DEFAULT_DEF (use))
7195 /* ??? For default defs BB shouldn't matter, but we have to
7196 solve the inconsistency between rpo eliminate and
7197 dom eliminate avail valueization first. */
7198 sprime = eliminate_avail (b, use);
7199 else
7200 /* Look for sth available at the definition block of the argument.
7201 This avoids inconsistencies between availability there which
7202 decides if the stmt can be removed and availability at the
7203 use site. The SSA property ensures that things available
7204 at the definition are also available at uses. */
7205 sprime = eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (use)), use);
7206 if (sprime && sprime != use
7207 && may_propagate_copy (use, sprime, true)
7208 /* We substitute into debug stmts to avoid excessive
7209 debug temporaries created by removed stmts, but we need
7210 to avoid doing so for inserted sprimes as we never want
7211 to create debug temporaries for them. */
7212 && (!inserted_exprs
7213 || TREE_CODE (sprime) != SSA_NAME
7214 || !is_gimple_debug (stmt)
7215 || !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))))
7217 propagate_value (use_p, sprime);
7218 modified = true;
7222 /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
7223 into which is a requirement for the IPA devirt machinery. */
7224 gimple *old_stmt = stmt;
7225 if (modified)
7227 /* If a formerly non-invariant ADDR_EXPR is turned into an
7228 invariant one it was on a separate stmt. */
7229 if (gimple_assign_single_p (stmt)
7230 && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
7231 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
7232 gimple_stmt_iterator prev = *gsi;
7233 gsi_prev (&prev);
7234 if (fold_stmt (gsi, follow_all_ssa_edges))
7236 /* fold_stmt may have created new stmts inbetween
7237 the previous stmt and the folded stmt. Mark
7238 all defs created there as varying to not confuse
7239 the SCCVN machinery as we're using that even during
7240 elimination. */
7241 if (gsi_end_p (prev))
7242 prev = gsi_start_bb (b);
7243 else
7244 gsi_next (&prev);
7245 if (gsi_stmt (prev) != gsi_stmt (*gsi))
7248 tree def;
7249 ssa_op_iter dit;
7250 FOR_EACH_SSA_TREE_OPERAND (def, gsi_stmt (prev),
7251 dit, SSA_OP_ALL_DEFS)
7252 /* As existing DEFs may move between stmts
7253 only process new ones. */
7254 if (! has_VN_INFO (def))
7256 vn_ssa_aux_t vn_info = VN_INFO (def);
7257 vn_info->valnum = def;
7258 vn_info->visited = true;
7260 if (gsi_stmt (prev) == gsi_stmt (*gsi))
7261 break;
7262 gsi_next (&prev);
7264 while (1);
7266 stmt = gsi_stmt (*gsi);
7267 /* In case we folded the stmt away schedule the NOP for removal. */
7268 if (gimple_nop_p (stmt))
7269 to_remove.safe_push (stmt);
7272 /* Visit indirect calls and turn them into direct calls if
7273 possible using the devirtualization machinery. Do this before
7274 checking for required EH/abnormal/noreturn cleanup as devird
7275 may expose more of those. */
7276 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
7278 tree fn = gimple_call_fn (call_stmt);
7279 if (fn
7280 && flag_devirtualize
7281 && virtual_method_call_p (fn))
7283 tree otr_type = obj_type_ref_class (fn);
7284 unsigned HOST_WIDE_INT otr_tok
7285 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn));
7286 tree instance;
7287 ipa_polymorphic_call_context context (current_function_decl,
7288 fn, stmt, &instance);
7289 context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn),
7290 otr_type, stmt, NULL);
7291 bool final;
7292 vec <cgraph_node *> targets
7293 = possible_polymorphic_call_targets (obj_type_ref_class (fn),
7294 otr_tok, context, &final);
7295 if (dump_file)
7296 dump_possible_polymorphic_call_targets (dump_file,
7297 obj_type_ref_class (fn),
7298 otr_tok, context);
7299 if (final && targets.length () <= 1 && dbg_cnt (devirt))
7301 tree fn;
7302 if (targets.length () == 1)
7303 fn = targets[0]->decl;
7304 else
7305 fn = builtin_decl_unreachable ();
7306 if (dump_enabled_p ())
7308 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
7309 "converting indirect call to "
7310 "function %s\n",
7311 lang_hooks.decl_printable_name (fn, 2));
7313 gimple_call_set_fndecl (call_stmt, fn);
7314 /* If changing the call to __builtin_unreachable
7315 or similar noreturn function, adjust gimple_call_fntype
7316 too. */
7317 if (gimple_call_noreturn_p (call_stmt)
7318 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
7319 && TYPE_ARG_TYPES (TREE_TYPE (fn))
7320 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn)))
7321 == void_type_node))
7322 gimple_call_set_fntype (call_stmt, TREE_TYPE (fn));
7323 maybe_remove_unused_call_args (cfun, call_stmt);
7324 modified = true;
7329 if (modified)
7331 /* When changing a call into a noreturn call, cfg cleanup
7332 is needed to fix up the noreturn call. */
7333 if (!was_noreturn
7334 && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
7335 to_fixup.safe_push (stmt);
7336 /* When changing a condition or switch into one we know what
7337 edge will be executed, schedule a cfg cleanup. */
7338 if ((gimple_code (stmt) == GIMPLE_COND
7339 && (gimple_cond_true_p (as_a <gcond *> (stmt))
7340 || gimple_cond_false_p (as_a <gcond *> (stmt))))
7341 || (gimple_code (stmt) == GIMPLE_SWITCH
7342 && TREE_CODE (gimple_switch_index
7343 (as_a <gswitch *> (stmt))) == INTEGER_CST))
7344 el_todo |= TODO_cleanup_cfg;
7345 /* If we removed EH side-effects from the statement, clean
7346 its EH information. */
7347 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
7349 bitmap_set_bit (need_eh_cleanup,
7350 gimple_bb (stmt)->index);
7351 if (dump_file && (dump_flags & TDF_DETAILS))
7352 fprintf (dump_file, " Removed EH side-effects.\n");
7354 /* Likewise for AB side-effects. */
7355 if (can_make_abnormal_goto
7356 && !stmt_can_make_abnormal_goto (stmt))
7358 bitmap_set_bit (need_ab_cleanup,
7359 gimple_bb (stmt)->index);
7360 if (dump_file && (dump_flags & TDF_DETAILS))
7361 fprintf (dump_file, " Removed AB side-effects.\n");
7363 update_stmt (stmt);
7364 /* In case the VDEF on the original stmt was released, value-number
7365 it to the VUSE. This is to make vuse_ssa_val able to skip
7366 released virtual operands. */
7367 if (vdef && SSA_NAME_IN_FREE_LIST (vdef))
7368 VN_INFO (vdef)->valnum = vuse;
7371 /* Make new values available - for fully redundant LHS we
7372 continue with the next stmt above and skip this.
7373 But avoid picking up dead defs. */
7374 tree def;
7375 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_DEF)
7376 if (! has_zero_uses (def)
7377 || (inserted_exprs
7378 && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (def))))
7379 eliminate_push_avail (b, def);
7382 /* Perform elimination for the basic-block B during the domwalk. */
7384 edge
7385 eliminate_dom_walker::before_dom_children (basic_block b)
7387 /* Mark new bb. */
7388 avail_stack.safe_push (NULL_TREE);
7390 /* Skip unreachable blocks marked unreachable during the SCCVN domwalk. */
7391 if (!(b->flags & BB_EXECUTABLE))
7392 return NULL;
7394 vn_context_bb = b;
7396 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
7398 gphi *phi = gsi.phi ();
7399 tree res = PHI_RESULT (phi);
7401 if (virtual_operand_p (res))
7403 gsi_next (&gsi);
7404 continue;
7407 tree sprime = eliminate_avail (b, res);
7408 if (sprime
7409 && sprime != res)
7411 if (dump_file && (dump_flags & TDF_DETAILS))
7413 fprintf (dump_file, "Replaced redundant PHI node defining ");
7414 print_generic_expr (dump_file, res);
7415 fprintf (dump_file, " with ");
7416 print_generic_expr (dump_file, sprime);
7417 fprintf (dump_file, "\n");
7420 /* If we inserted this PHI node ourself, it's not an elimination. */
7421 if (! inserted_exprs
7422 || ! bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
7423 eliminations++;
7425 /* If we will propagate into all uses don't bother to do
7426 anything. */
7427 if (may_propagate_copy (res, sprime))
7429 /* Mark the PHI for removal. */
7430 to_remove.safe_push (phi);
7431 gsi_next (&gsi);
7432 continue;
7435 remove_phi_node (&gsi, false);
7437 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
7438 sprime = fold_convert (TREE_TYPE (res), sprime);
7439 gimple *stmt = gimple_build_assign (res, sprime);
7440 gimple_stmt_iterator gsi2 = gsi_after_labels (b);
7441 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
7442 continue;
7445 eliminate_push_avail (b, res);
7446 gsi_next (&gsi);
7449 for (gimple_stmt_iterator gsi = gsi_start_bb (b);
7450 !gsi_end_p (gsi);
7451 gsi_next (&gsi))
7452 eliminate_stmt (b, &gsi);
7454 /* Replace destination PHI arguments. */
7455 edge_iterator ei;
7456 edge e;
7457 FOR_EACH_EDGE (e, ei, b->succs)
7458 if (e->flags & EDGE_EXECUTABLE)
7459 for (gphi_iterator gsi = gsi_start_phis (e->dest);
7460 !gsi_end_p (gsi);
7461 gsi_next (&gsi))
7463 gphi *phi = gsi.phi ();
7464 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
7465 tree arg = USE_FROM_PTR (use_p);
7466 if (TREE_CODE (arg) != SSA_NAME
7467 || virtual_operand_p (arg))
7468 continue;
7469 tree sprime = eliminate_avail (b, arg);
7470 if (sprime && may_propagate_copy (arg, sprime,
7471 !(e->flags & EDGE_ABNORMAL)))
7472 propagate_value (use_p, sprime);
7475 vn_context_bb = NULL;
7477 return NULL;
7480 /* Make no longer available leaders no longer available. */
7482 void
7483 eliminate_dom_walker::after_dom_children (basic_block)
7485 tree entry;
7486 while ((entry = avail_stack.pop ()) != NULL_TREE)
7488 tree valnum = VN_INFO (entry)->valnum;
7489 tree old = avail[SSA_NAME_VERSION (valnum)];
7490 if (old == entry)
7491 avail[SSA_NAME_VERSION (valnum)] = NULL_TREE;
7492 else
7493 avail[SSA_NAME_VERSION (valnum)] = entry;
7497 /* Remove queued stmts and perform delayed cleanups. */
7499 unsigned
7500 eliminate_dom_walker::eliminate_cleanup (bool region_p)
7502 statistics_counter_event (cfun, "Eliminated", eliminations);
7503 statistics_counter_event (cfun, "Insertions", insertions);
7505 /* We cannot remove stmts during BB walk, especially not release SSA
7506 names there as this confuses the VN machinery. The stmts ending
7507 up in to_remove are either stores or simple copies.
7508 Remove stmts in reverse order to make debug stmt creation possible. */
7509 while (!to_remove.is_empty ())
7511 bool do_release_defs = true;
7512 gimple *stmt = to_remove.pop ();
7514 /* When we are value-numbering a region we do not require exit PHIs to
7515 be present so we have to make sure to deal with uses outside of the
7516 region of stmts that we thought are eliminated.
7517 ??? Note we may be confused by uses in dead regions we didn't run
7518 elimination on. Rather than checking individual uses we accept
7519 dead copies to be generated here (gcc.c-torture/execute/20060905-1.c
7520 contains such example). */
7521 if (region_p)
7523 if (gphi *phi = dyn_cast <gphi *> (stmt))
7525 tree lhs = gimple_phi_result (phi);
7526 if (!has_zero_uses (lhs))
7528 if (dump_file && (dump_flags & TDF_DETAILS))
7529 fprintf (dump_file, "Keeping eliminated stmt live "
7530 "as copy because of out-of-region uses\n");
7531 tree sprime = eliminate_avail (gimple_bb (stmt), lhs);
7532 gimple *copy = gimple_build_assign (lhs, sprime);
7533 gimple_stmt_iterator gsi
7534 = gsi_after_labels (gimple_bb (stmt));
7535 gsi_insert_before (&gsi, copy, GSI_SAME_STMT);
7536 do_release_defs = false;
7539 else if (tree lhs = gimple_get_lhs (stmt))
7540 if (TREE_CODE (lhs) == SSA_NAME
7541 && !has_zero_uses (lhs))
7543 if (dump_file && (dump_flags & TDF_DETAILS))
7544 fprintf (dump_file, "Keeping eliminated stmt live "
7545 "as copy because of out-of-region uses\n");
7546 tree sprime = eliminate_avail (gimple_bb (stmt), lhs);
7547 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
7548 if (is_gimple_assign (stmt))
7550 gimple_assign_set_rhs_from_tree (&gsi, sprime);
7551 stmt = gsi_stmt (gsi);
7552 update_stmt (stmt);
7553 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
7554 bitmap_set_bit (need_eh_cleanup, gimple_bb (stmt)->index);
7555 continue;
7557 else
7559 gimple *copy = gimple_build_assign (lhs, sprime);
7560 gsi_insert_before (&gsi, copy, GSI_SAME_STMT);
7561 do_release_defs = false;
7566 if (dump_file && (dump_flags & TDF_DETAILS))
7568 fprintf (dump_file, "Removing dead stmt ");
7569 print_gimple_stmt (dump_file, stmt, 0, TDF_NONE);
7572 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
7573 if (gimple_code (stmt) == GIMPLE_PHI)
7574 remove_phi_node (&gsi, do_release_defs);
7575 else
7577 basic_block bb = gimple_bb (stmt);
7578 unlink_stmt_vdef (stmt);
7579 if (gsi_remove (&gsi, true))
7580 bitmap_set_bit (need_eh_cleanup, bb->index);
7581 if (is_gimple_call (stmt) && stmt_can_make_abnormal_goto (stmt))
7582 bitmap_set_bit (need_ab_cleanup, bb->index);
7583 if (do_release_defs)
7584 release_defs (stmt);
7587 /* Removing a stmt may expose a forwarder block. */
7588 el_todo |= TODO_cleanup_cfg;
7591 /* Fixup stmts that became noreturn calls. This may require splitting
7592 blocks and thus isn't possible during the dominator walk. Do this
7593 in reverse order so we don't inadvertedly remove a stmt we want to
7594 fixup by visiting a dominating now noreturn call first. */
7595 while (!to_fixup.is_empty ())
7597 gimple *stmt = to_fixup.pop ();
7599 if (dump_file && (dump_flags & TDF_DETAILS))
7601 fprintf (dump_file, "Fixing up noreturn call ");
7602 print_gimple_stmt (dump_file, stmt, 0);
7605 if (fixup_noreturn_call (stmt))
7606 el_todo |= TODO_cleanup_cfg;
7609 bool do_eh_cleanup = !bitmap_empty_p (need_eh_cleanup);
7610 bool do_ab_cleanup = !bitmap_empty_p (need_ab_cleanup);
7612 if (do_eh_cleanup)
7613 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
7615 if (do_ab_cleanup)
7616 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
7618 if (do_eh_cleanup || do_ab_cleanup)
7619 el_todo |= TODO_cleanup_cfg;
7621 return el_todo;
7624 /* Eliminate fully redundant computations. */
7626 unsigned
7627 eliminate_with_rpo_vn (bitmap inserted_exprs)
7629 eliminate_dom_walker walker (CDI_DOMINATORS, inserted_exprs);
7631 eliminate_dom_walker *saved_rpo_avail = rpo_avail;
7632 rpo_avail = &walker;
7633 walker.walk (cfun->cfg->x_entry_block_ptr);
7634 rpo_avail = saved_rpo_avail;
7636 return walker.eliminate_cleanup ();
7639 static unsigned
7640 do_rpo_vn_1 (function *fn, edge entry, bitmap exit_bbs,
7641 bool iterate, bool eliminate, bool skip_entry_phis,
7642 vn_lookup_kind kind);
7644 void
7645 run_rpo_vn (vn_lookup_kind kind)
7647 do_rpo_vn_1 (cfun, NULL, NULL, true, false, false, kind);
7649 /* ??? Prune requirement of these. */
7650 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
7652 /* Initialize the value ids and prune out remaining VN_TOPs
7653 from dead code. */
7654 tree name;
7655 unsigned i;
7656 FOR_EACH_SSA_NAME (i, name, cfun)
7658 vn_ssa_aux_t info = VN_INFO (name);
7659 if (!info->visited
7660 || info->valnum == VN_TOP)
7661 info->valnum = name;
7662 if (info->valnum == name)
7663 info->value_id = get_next_value_id ();
7664 else if (is_gimple_min_invariant (info->valnum))
7665 info->value_id = get_or_alloc_constant_value_id (info->valnum);
7668 /* Propagate. */
7669 FOR_EACH_SSA_NAME (i, name, cfun)
7671 vn_ssa_aux_t info = VN_INFO (name);
7672 if (TREE_CODE (info->valnum) == SSA_NAME
7673 && info->valnum != name
7674 && info->value_id != VN_INFO (info->valnum)->value_id)
7675 info->value_id = VN_INFO (info->valnum)->value_id;
7678 set_hashtable_value_ids ();
7680 if (dump_file && (dump_flags & TDF_DETAILS))
7682 fprintf (dump_file, "Value numbers:\n");
7683 FOR_EACH_SSA_NAME (i, name, cfun)
7685 if (VN_INFO (name)->visited
7686 && SSA_VAL (name) != name)
7688 print_generic_expr (dump_file, name);
7689 fprintf (dump_file, " = ");
7690 print_generic_expr (dump_file, SSA_VAL (name));
7691 fprintf (dump_file, " (%04d)\n", VN_INFO (name)->value_id);
7697 /* Free VN associated data structures. */
7699 void
7700 free_rpo_vn (void)
7702 free_vn_table (valid_info);
7703 XDELETE (valid_info);
7704 obstack_free (&vn_tables_obstack, NULL);
7705 obstack_free (&vn_tables_insert_obstack, NULL);
7707 vn_ssa_aux_iterator_type it;
7708 vn_ssa_aux_t info;
7709 FOR_EACH_HASH_TABLE_ELEMENT (*vn_ssa_aux_hash, info, vn_ssa_aux_t, it)
7710 if (info->needs_insertion)
7711 release_ssa_name (info->name);
7712 obstack_free (&vn_ssa_aux_obstack, NULL);
7713 delete vn_ssa_aux_hash;
7715 delete constant_to_value_id;
7716 constant_to_value_id = NULL;
7719 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
7721 static tree
7722 vn_lookup_simplify_result (gimple_match_op *res_op)
7724 if (!res_op->code.is_tree_code ())
7725 return NULL_TREE;
7726 tree *ops = res_op->ops;
7727 unsigned int length = res_op->num_ops;
7728 if (res_op->code == CONSTRUCTOR
7729 /* ??? We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
7730 and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree. */
7731 && TREE_CODE (res_op->ops[0]) == CONSTRUCTOR)
7733 length = CONSTRUCTOR_NELTS (res_op->ops[0]);
7734 ops = XALLOCAVEC (tree, length);
7735 for (unsigned i = 0; i < length; ++i)
7736 ops[i] = CONSTRUCTOR_ELT (res_op->ops[0], i)->value;
7738 vn_nary_op_t vnresult = NULL;
7739 tree res = vn_nary_op_lookup_pieces (length, (tree_code) res_op->code,
7740 res_op->type, ops, &vnresult);
7741 /* If this is used from expression simplification make sure to
7742 return an available expression. */
7743 if (res && TREE_CODE (res) == SSA_NAME && mprts_hook && rpo_avail)
7744 res = rpo_avail->eliminate_avail (vn_context_bb, res);
7745 return res;
7748 /* Return a leader for OPs value that is valid at BB. */
7750 tree
7751 rpo_elim::eliminate_avail (basic_block bb, tree op)
7753 bool visited;
7754 tree valnum = SSA_VAL (op, &visited);
7755 /* If we didn't visit OP then it must be defined outside of the
7756 region we process and also dominate it. So it is available. */
7757 if (!visited)
7758 return op;
7759 if (TREE_CODE (valnum) == SSA_NAME)
7761 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
7762 return valnum;
7763 vn_ssa_aux_t valnum_info = VN_INFO (valnum);
7764 vn_avail *av = valnum_info->avail;
7765 if (!av)
7767 /* See above. But when there's availability info prefer
7768 what we recorded there for example to preserve LC SSA. */
7769 if (!valnum_info->visited)
7770 return valnum;
7771 return NULL_TREE;
7773 if (av->location == bb->index)
7774 /* On tramp3d 90% of the cases are here. */
7775 return ssa_name (av->leader);
7778 basic_block abb = BASIC_BLOCK_FOR_FN (cfun, av->location);
7779 /* ??? During elimination we have to use availability at the
7780 definition site of a use we try to replace. This
7781 is required to not run into inconsistencies because
7782 of dominated_by_p_w_unex behavior and removing a definition
7783 while not replacing all uses.
7784 ??? We could try to consistently walk dominators
7785 ignoring non-executable regions. The nearest common
7786 dominator of bb and abb is where we can stop walking. We
7787 may also be able to "pre-compute" (bits of) the next immediate
7788 (non-)dominator during the RPO walk when marking edges as
7789 executable. */
7790 if (dominated_by_p_w_unex (bb, abb, true))
7792 tree leader = ssa_name (av->leader);
7793 /* Prevent eliminations that break loop-closed SSA. */
7794 if (loops_state_satisfies_p (LOOP_CLOSED_SSA)
7795 && ! SSA_NAME_IS_DEFAULT_DEF (leader)
7796 && ! flow_bb_inside_loop_p (gimple_bb (SSA_NAME_DEF_STMT
7797 (leader))->loop_father,
7798 bb))
7799 return NULL_TREE;
7800 if (dump_file && (dump_flags & TDF_DETAILS))
7802 print_generic_expr (dump_file, leader);
7803 fprintf (dump_file, " is available for ");
7804 print_generic_expr (dump_file, valnum);
7805 fprintf (dump_file, "\n");
7807 /* On tramp3d 99% of the _remaining_ cases succeed at
7808 the first enty. */
7809 return leader;
7811 /* ??? Can we somehow skip to the immediate dominator
7812 RPO index (bb_to_rpo)? Again, maybe not worth, on
7813 tramp3d the worst number of elements in the vector is 9. */
7814 av = av->next;
7816 while (av);
7817 /* While we prefer avail we have to fallback to using the value
7818 directly if defined outside of the region when none of the
7819 available defs suit. */
7820 if (!valnum_info->visited)
7821 return valnum;
7823 else if (valnum != VN_TOP)
7824 /* valnum is is_gimple_min_invariant. */
7825 return valnum;
7826 return NULL_TREE;
7829 /* Make LEADER a leader for its value at BB. */
7831 void
7832 rpo_elim::eliminate_push_avail (basic_block bb, tree leader)
7834 tree valnum = VN_INFO (leader)->valnum;
7835 if (valnum == VN_TOP
7836 || is_gimple_min_invariant (valnum))
7837 return;
7838 if (dump_file && (dump_flags & TDF_DETAILS))
7840 fprintf (dump_file, "Making available beyond BB%d ", bb->index);
7841 print_generic_expr (dump_file, leader);
7842 fprintf (dump_file, " for value ");
7843 print_generic_expr (dump_file, valnum);
7844 fprintf (dump_file, "\n");
7846 vn_ssa_aux_t value = VN_INFO (valnum);
7847 vn_avail *av;
7848 if (m_avail_freelist)
7850 av = m_avail_freelist;
7851 m_avail_freelist = m_avail_freelist->next;
7853 else
7854 av = XOBNEW (&vn_ssa_aux_obstack, vn_avail);
7855 av->location = bb->index;
7856 av->leader = SSA_NAME_VERSION (leader);
7857 av->next = value->avail;
7858 av->next_undo = last_pushed_avail;
7859 last_pushed_avail = value;
7860 value->avail = av;
7863 /* Valueization hook for RPO VN plus required state. */
7865 tree
7866 rpo_vn_valueize (tree name)
7868 if (TREE_CODE (name) == SSA_NAME)
7870 vn_ssa_aux_t val = VN_INFO (name);
7871 if (val)
7873 tree tem = val->valnum;
7874 if (tem != VN_TOP && tem != name)
7876 if (TREE_CODE (tem) != SSA_NAME)
7877 return tem;
7878 /* For all values we only valueize to an available leader
7879 which means we can use SSA name info without restriction. */
7880 tem = rpo_avail->eliminate_avail (vn_context_bb, tem);
7881 if (tem)
7882 return tem;
7886 return name;
7889 /* Insert on PRED_E predicates derived from CODE OPS being true besides the
7890 inverted condition. */
7892 static void
7893 insert_related_predicates_on_edge (enum tree_code code, tree *ops, edge pred_e)
7895 switch (code)
7897 case LT_EXPR:
7898 /* a < b -> a {!,<}= b */
7899 vn_nary_op_insert_pieces_predicated (2, NE_EXPR, boolean_type_node,
7900 ops, boolean_true_node, 0, pred_e);
7901 vn_nary_op_insert_pieces_predicated (2, LE_EXPR, boolean_type_node,
7902 ops, boolean_true_node, 0, pred_e);
7903 /* a < b -> ! a {>,=} b */
7904 vn_nary_op_insert_pieces_predicated (2, GT_EXPR, boolean_type_node,
7905 ops, boolean_false_node, 0, pred_e);
7906 vn_nary_op_insert_pieces_predicated (2, EQ_EXPR, boolean_type_node,
7907 ops, boolean_false_node, 0, pred_e);
7908 break;
7909 case GT_EXPR:
7910 /* a > b -> a {!,>}= b */
7911 vn_nary_op_insert_pieces_predicated (2, NE_EXPR, boolean_type_node,
7912 ops, boolean_true_node, 0, pred_e);
7913 vn_nary_op_insert_pieces_predicated (2, GE_EXPR, boolean_type_node,
7914 ops, boolean_true_node, 0, pred_e);
7915 /* a > b -> ! a {<,=} b */
7916 vn_nary_op_insert_pieces_predicated (2, LT_EXPR, boolean_type_node,
7917 ops, boolean_false_node, 0, pred_e);
7918 vn_nary_op_insert_pieces_predicated (2, EQ_EXPR, boolean_type_node,
7919 ops, boolean_false_node, 0, pred_e);
7920 break;
7921 case EQ_EXPR:
7922 /* a == b -> ! a {<,>} b */
7923 vn_nary_op_insert_pieces_predicated (2, LT_EXPR, boolean_type_node,
7924 ops, boolean_false_node, 0, pred_e);
7925 vn_nary_op_insert_pieces_predicated (2, GT_EXPR, boolean_type_node,
7926 ops, boolean_false_node, 0, pred_e);
7927 break;
7928 case LE_EXPR:
7929 case GE_EXPR:
7930 case NE_EXPR:
7931 /* Nothing besides inverted condition. */
7932 break;
7933 default:;
7937 /* Main stmt worker for RPO VN, process BB. */
7939 static unsigned
7940 process_bb (rpo_elim &avail, basic_block bb,
7941 bool bb_visited, bool iterate_phis, bool iterate, bool eliminate,
7942 bool do_region, bitmap exit_bbs, bool skip_phis)
7944 unsigned todo = 0;
7945 edge_iterator ei;
7946 edge e;
7948 vn_context_bb = bb;
7950 /* If we are in loop-closed SSA preserve this state. This is
7951 relevant when called on regions from outside of FRE/PRE. */
7952 bool lc_phi_nodes = false;
7953 if (!skip_phis
7954 && loops_state_satisfies_p (LOOP_CLOSED_SSA))
7955 FOR_EACH_EDGE (e, ei, bb->preds)
7956 if (e->src->loop_father != e->dest->loop_father
7957 && flow_loop_nested_p (e->dest->loop_father,
7958 e->src->loop_father))
7960 lc_phi_nodes = true;
7961 break;
7964 /* When we visit a loop header substitute into loop info. */
7965 if (!iterate && eliminate && bb->loop_father->header == bb)
7967 /* Keep fields in sync with substitute_in_loop_info. */
7968 if (bb->loop_father->nb_iterations)
7969 bb->loop_father->nb_iterations
7970 = simplify_replace_tree (bb->loop_father->nb_iterations,
7971 NULL_TREE, NULL_TREE, &vn_valueize_for_srt);
7974 /* Value-number all defs in the basic-block. */
7975 if (!skip_phis)
7976 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
7977 gsi_next (&gsi))
7979 gphi *phi = gsi.phi ();
7980 tree res = PHI_RESULT (phi);
7981 vn_ssa_aux_t res_info = VN_INFO (res);
7982 if (!bb_visited)
7984 gcc_assert (!res_info->visited);
7985 res_info->valnum = VN_TOP;
7986 res_info->visited = true;
7989 /* When not iterating force backedge values to varying. */
7990 visit_stmt (phi, !iterate_phis);
7991 if (virtual_operand_p (res))
7992 continue;
7994 /* Eliminate */
7995 /* The interesting case is gcc.dg/tree-ssa/pr22230.c for correctness
7996 how we handle backedges and availability.
7997 And gcc.dg/tree-ssa/ssa-sccvn-2.c for optimization. */
7998 tree val = res_info->valnum;
7999 if (res != val && !iterate && eliminate)
8001 if (tree leader = avail.eliminate_avail (bb, res))
8003 if (leader != res
8004 /* Preserve loop-closed SSA form. */
8005 && (! lc_phi_nodes
8006 || is_gimple_min_invariant (leader)))
8008 if (dump_file && (dump_flags & TDF_DETAILS))
8010 fprintf (dump_file, "Replaced redundant PHI node "
8011 "defining ");
8012 print_generic_expr (dump_file, res);
8013 fprintf (dump_file, " with ");
8014 print_generic_expr (dump_file, leader);
8015 fprintf (dump_file, "\n");
8017 avail.eliminations++;
8019 if (may_propagate_copy (res, leader))
8021 /* Schedule for removal. */
8022 avail.to_remove.safe_push (phi);
8023 continue;
8025 /* ??? Else generate a copy stmt. */
8029 /* Only make defs available that not already are. But make
8030 sure loop-closed SSA PHI node defs are picked up for
8031 downstream uses. */
8032 if (lc_phi_nodes
8033 || res == val
8034 || ! avail.eliminate_avail (bb, res))
8035 avail.eliminate_push_avail (bb, res);
8038 /* For empty BBs mark outgoing edges executable. For non-empty BBs
8039 we do this when processing the last stmt as we have to do this
8040 before elimination which otherwise forces GIMPLE_CONDs to
8041 if (1 != 0) style when seeing non-executable edges. */
8042 if (gsi_end_p (gsi_start_bb (bb)))
8044 FOR_EACH_EDGE (e, ei, bb->succs)
8046 if (!(e->flags & EDGE_EXECUTABLE))
8048 if (dump_file && (dump_flags & TDF_DETAILS))
8049 fprintf (dump_file,
8050 "marking outgoing edge %d -> %d executable\n",
8051 e->src->index, e->dest->index);
8052 e->flags |= EDGE_EXECUTABLE;
8053 e->dest->flags |= BB_EXECUTABLE;
8055 else if (!(e->dest->flags & BB_EXECUTABLE))
8057 if (dump_file && (dump_flags & TDF_DETAILS))
8058 fprintf (dump_file,
8059 "marking destination block %d reachable\n",
8060 e->dest->index);
8061 e->dest->flags |= BB_EXECUTABLE;
8065 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
8066 !gsi_end_p (gsi); gsi_next (&gsi))
8068 ssa_op_iter i;
8069 tree op;
8070 if (!bb_visited)
8072 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
8074 vn_ssa_aux_t op_info = VN_INFO (op);
8075 gcc_assert (!op_info->visited);
8076 op_info->valnum = VN_TOP;
8077 op_info->visited = true;
8080 /* We somehow have to deal with uses that are not defined
8081 in the processed region. Forcing unvisited uses to
8082 varying here doesn't play well with def-use following during
8083 expression simplification, so we deal with this by checking
8084 the visited flag in SSA_VAL. */
8087 visit_stmt (gsi_stmt (gsi));
8089 gimple *last = gsi_stmt (gsi);
8090 e = NULL;
8091 switch (gimple_code (last))
8093 case GIMPLE_SWITCH:
8094 e = find_taken_edge (bb, vn_valueize (gimple_switch_index
8095 (as_a <gswitch *> (last))));
8096 break;
8097 case GIMPLE_COND:
8099 tree lhs = vn_valueize (gimple_cond_lhs (last));
8100 tree rhs = vn_valueize (gimple_cond_rhs (last));
8101 tree val = gimple_simplify (gimple_cond_code (last),
8102 boolean_type_node, lhs, rhs,
8103 NULL, vn_valueize);
8104 /* If the condition didn't simplfy see if we have recorded
8105 an expression from sofar taken edges. */
8106 if (! val || TREE_CODE (val) != INTEGER_CST)
8108 vn_nary_op_t vnresult;
8109 tree ops[2];
8110 ops[0] = lhs;
8111 ops[1] = rhs;
8112 val = vn_nary_op_lookup_pieces (2, gimple_cond_code (last),
8113 boolean_type_node, ops,
8114 &vnresult);
8115 /* Did we get a predicated value? */
8116 if (! val && vnresult && vnresult->predicated_values)
8118 val = vn_nary_op_get_predicated_value (vnresult, bb);
8119 if (val && dump_file && (dump_flags & TDF_DETAILS))
8121 fprintf (dump_file, "Got predicated value ");
8122 print_generic_expr (dump_file, val, TDF_NONE);
8123 fprintf (dump_file, " for ");
8124 print_gimple_stmt (dump_file, last, TDF_SLIM);
8128 if (val)
8129 e = find_taken_edge (bb, val);
8130 if (! e)
8132 /* If we didn't manage to compute the taken edge then
8133 push predicated expressions for the condition itself
8134 and related conditions to the hashtables. This allows
8135 simplification of redundant conditions which is
8136 important as early cleanup. */
8137 edge true_e, false_e;
8138 extract_true_false_edges_from_block (bb, &true_e, &false_e);
8139 enum tree_code code = gimple_cond_code (last);
8140 enum tree_code icode
8141 = invert_tree_comparison (code, HONOR_NANS (lhs));
8142 tree ops[2];
8143 ops[0] = lhs;
8144 ops[1] = rhs;
8145 if ((do_region && bitmap_bit_p (exit_bbs, true_e->dest->index))
8146 || !can_track_predicate_on_edge (true_e))
8147 true_e = NULL;
8148 if ((do_region && bitmap_bit_p (exit_bbs, false_e->dest->index))
8149 || !can_track_predicate_on_edge (false_e))
8150 false_e = NULL;
8151 if (true_e)
8152 vn_nary_op_insert_pieces_predicated
8153 (2, code, boolean_type_node, ops,
8154 boolean_true_node, 0, true_e);
8155 if (false_e)
8156 vn_nary_op_insert_pieces_predicated
8157 (2, code, boolean_type_node, ops,
8158 boolean_false_node, 0, false_e);
8159 if (icode != ERROR_MARK)
8161 if (true_e)
8162 vn_nary_op_insert_pieces_predicated
8163 (2, icode, boolean_type_node, ops,
8164 boolean_false_node, 0, true_e);
8165 if (false_e)
8166 vn_nary_op_insert_pieces_predicated
8167 (2, icode, boolean_type_node, ops,
8168 boolean_true_node, 0, false_e);
8170 /* Relax for non-integers, inverted condition handled
8171 above. */
8172 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
8174 if (true_e)
8175 insert_related_predicates_on_edge (code, ops, true_e);
8176 if (false_e)
8177 insert_related_predicates_on_edge (icode, ops, false_e);
8180 break;
8182 case GIMPLE_GOTO:
8183 e = find_taken_edge (bb, vn_valueize (gimple_goto_dest (last)));
8184 break;
8185 default:
8186 e = NULL;
8188 if (e)
8190 todo = TODO_cleanup_cfg;
8191 if (!(e->flags & EDGE_EXECUTABLE))
8193 if (dump_file && (dump_flags & TDF_DETAILS))
8194 fprintf (dump_file,
8195 "marking known outgoing %sedge %d -> %d executable\n",
8196 e->flags & EDGE_DFS_BACK ? "back-" : "",
8197 e->src->index, e->dest->index);
8198 e->flags |= EDGE_EXECUTABLE;
8199 e->dest->flags |= BB_EXECUTABLE;
8201 else if (!(e->dest->flags & BB_EXECUTABLE))
8203 if (dump_file && (dump_flags & TDF_DETAILS))
8204 fprintf (dump_file,
8205 "marking destination block %d reachable\n",
8206 e->dest->index);
8207 e->dest->flags |= BB_EXECUTABLE;
8210 else if (gsi_one_before_end_p (gsi))
8212 FOR_EACH_EDGE (e, ei, bb->succs)
8214 if (!(e->flags & EDGE_EXECUTABLE))
8216 if (dump_file && (dump_flags & TDF_DETAILS))
8217 fprintf (dump_file,
8218 "marking outgoing edge %d -> %d executable\n",
8219 e->src->index, e->dest->index);
8220 e->flags |= EDGE_EXECUTABLE;
8221 e->dest->flags |= BB_EXECUTABLE;
8223 else if (!(e->dest->flags & BB_EXECUTABLE))
8225 if (dump_file && (dump_flags & TDF_DETAILS))
8226 fprintf (dump_file,
8227 "marking destination block %d reachable\n",
8228 e->dest->index);
8229 e->dest->flags |= BB_EXECUTABLE;
8234 /* Eliminate. That also pushes to avail. */
8235 if (eliminate && ! iterate)
8236 avail.eliminate_stmt (bb, &gsi);
8237 else
8238 /* If not eliminating, make all not already available defs
8239 available. But avoid picking up dead defs. */
8240 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_DEF)
8241 if (! has_zero_uses (op)
8242 && ! avail.eliminate_avail (bb, op))
8243 avail.eliminate_push_avail (bb, op);
8246 /* Eliminate in destination PHI arguments. Always substitute in dest
8247 PHIs, even for non-executable edges. This handles region
8248 exits PHIs. */
8249 if (!iterate && eliminate)
8250 FOR_EACH_EDGE (e, ei, bb->succs)
8251 for (gphi_iterator gsi = gsi_start_phis (e->dest);
8252 !gsi_end_p (gsi); gsi_next (&gsi))
8254 gphi *phi = gsi.phi ();
8255 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
8256 tree arg = USE_FROM_PTR (use_p);
8257 if (TREE_CODE (arg) != SSA_NAME
8258 || virtual_operand_p (arg))
8259 continue;
8260 tree sprime;
8261 if (SSA_NAME_IS_DEFAULT_DEF (arg))
8263 sprime = SSA_VAL (arg);
8264 gcc_assert (TREE_CODE (sprime) != SSA_NAME
8265 || SSA_NAME_IS_DEFAULT_DEF (sprime));
8267 else
8268 /* Look for sth available at the definition block of the argument.
8269 This avoids inconsistencies between availability there which
8270 decides if the stmt can be removed and availability at the
8271 use site. The SSA property ensures that things available
8272 at the definition are also available at uses. */
8273 sprime = avail.eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (arg)),
8274 arg);
8275 if (sprime
8276 && sprime != arg
8277 && may_propagate_copy (arg, sprime, !(e->flags & EDGE_ABNORMAL)))
8278 propagate_value (use_p, sprime);
8281 vn_context_bb = NULL;
8282 return todo;
8285 /* Unwind state per basic-block. */
8287 struct unwind_state
8289 /* Times this block has been visited. */
8290 unsigned visited;
8291 /* Whether to handle this as iteration point or whether to treat
8292 incoming backedge PHI values as varying. */
8293 bool iterate;
8294 /* Maximum RPO index this block is reachable from. */
8295 int max_rpo;
8296 /* Unwind state. */
8297 void *ob_top;
8298 vn_reference_t ref_top;
8299 vn_phi_t phi_top;
8300 vn_nary_op_t nary_top;
8301 vn_avail *avail_top;
8304 /* Unwind the RPO VN state for iteration. */
8306 static void
8307 do_unwind (unwind_state *to, rpo_elim &avail)
8309 gcc_assert (to->iterate);
8310 for (; last_inserted_nary != to->nary_top;
8311 last_inserted_nary = last_inserted_nary->next)
8313 vn_nary_op_t *slot;
8314 slot = valid_info->nary->find_slot_with_hash
8315 (last_inserted_nary, last_inserted_nary->hashcode, NO_INSERT);
8316 /* Predication causes the need to restore previous state. */
8317 if ((*slot)->unwind_to)
8318 *slot = (*slot)->unwind_to;
8319 else
8320 valid_info->nary->clear_slot (slot);
8322 for (; last_inserted_phi != to->phi_top;
8323 last_inserted_phi = last_inserted_phi->next)
8325 vn_phi_t *slot;
8326 slot = valid_info->phis->find_slot_with_hash
8327 (last_inserted_phi, last_inserted_phi->hashcode, NO_INSERT);
8328 valid_info->phis->clear_slot (slot);
8330 for (; last_inserted_ref != to->ref_top;
8331 last_inserted_ref = last_inserted_ref->next)
8333 vn_reference_t *slot;
8334 slot = valid_info->references->find_slot_with_hash
8335 (last_inserted_ref, last_inserted_ref->hashcode, NO_INSERT);
8336 (*slot)->operands.release ();
8337 valid_info->references->clear_slot (slot);
8339 obstack_free (&vn_tables_obstack, to->ob_top);
8341 /* Prune [rpo_idx, ] from avail. */
8342 for (; last_pushed_avail && last_pushed_avail->avail != to->avail_top;)
8344 vn_ssa_aux_t val = last_pushed_avail;
8345 vn_avail *av = val->avail;
8346 val->avail = av->next;
8347 last_pushed_avail = av->next_undo;
8348 av->next = avail.m_avail_freelist;
8349 avail.m_avail_freelist = av;
8353 /* Do VN on a SEME region specified by ENTRY and EXIT_BBS in FN.
8354 If ITERATE is true then treat backedges optimistically as not
8355 executed and iterate. If ELIMINATE is true then perform
8356 elimination, otherwise leave that to the caller. If SKIP_ENTRY_PHIS
8357 is true then force PHI nodes in ENTRY->dest to VARYING. */
8359 static unsigned
8360 do_rpo_vn_1 (function *fn, edge entry, bitmap exit_bbs,
8361 bool iterate, bool eliminate, bool skip_entry_phis,
8362 vn_lookup_kind kind)
8364 unsigned todo = 0;
8365 default_vn_walk_kind = kind;
8367 /* We currently do not support region-based iteration when
8368 elimination is requested. */
8369 gcc_assert (!entry || !iterate || !eliminate);
8370 /* When iterating we need loop info up-to-date. */
8371 gcc_assert (!iterate || !loops_state_satisfies_p (LOOPS_NEED_FIXUP));
8373 bool do_region = entry != NULL;
8374 if (!do_region)
8376 entry = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (fn));
8377 exit_bbs = BITMAP_ALLOC (NULL);
8378 bitmap_set_bit (exit_bbs, EXIT_BLOCK);
8381 /* Clear EDGE_DFS_BACK on "all" entry edges, RPO order compute will
8382 re-mark those that are contained in the region. */
8383 edge_iterator ei;
8384 edge e;
8385 FOR_EACH_EDGE (e, ei, entry->dest->preds)
8386 e->flags &= ~EDGE_DFS_BACK;
8388 int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (fn) - NUM_FIXED_BLOCKS);
8389 auto_vec<std::pair<int, int> > toplevel_scc_extents;
8390 int n = rev_post_order_and_mark_dfs_back_seme
8391 (fn, entry, exit_bbs, true, rpo, !iterate ? &toplevel_scc_extents : NULL);
8393 if (!do_region)
8394 BITMAP_FREE (exit_bbs);
8396 /* If there are any non-DFS_BACK edges into entry->dest skip
8397 processing PHI nodes for that block. This supports
8398 value-numbering loop bodies w/o the actual loop. */
8399 FOR_EACH_EDGE (e, ei, entry->dest->preds)
8400 if (e != entry
8401 && !(e->flags & EDGE_DFS_BACK))
8402 break;
8403 if (e != NULL && dump_file && (dump_flags & TDF_DETAILS))
8404 fprintf (dump_file, "Region does not contain all edges into "
8405 "the entry block, skipping its PHIs.\n");
8406 skip_entry_phis |= e != NULL;
8408 int *bb_to_rpo = XNEWVEC (int, last_basic_block_for_fn (fn));
8409 for (int i = 0; i < n; ++i)
8410 bb_to_rpo[rpo[i]] = i;
8412 unwind_state *rpo_state = XNEWVEC (unwind_state, n);
8414 rpo_elim avail (entry->dest);
8415 rpo_avail = &avail;
8417 /* Verify we have no extra entries into the region. */
8418 if (flag_checking && do_region)
8420 auto_bb_flag bb_in_region (fn);
8421 for (int i = 0; i < n; ++i)
8423 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
8424 bb->flags |= bb_in_region;
8426 /* We can't merge the first two loops because we cannot rely
8427 on EDGE_DFS_BACK for edges not within the region. But if
8428 we decide to always have the bb_in_region flag we can
8429 do the checking during the RPO walk itself (but then it's
8430 also easy to handle MEME conservatively). */
8431 for (int i = 0; i < n; ++i)
8433 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
8434 edge e;
8435 edge_iterator ei;
8436 FOR_EACH_EDGE (e, ei, bb->preds)
8437 gcc_assert (e == entry
8438 || (skip_entry_phis && bb == entry->dest)
8439 || (e->src->flags & bb_in_region));
8441 for (int i = 0; i < n; ++i)
8443 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
8444 bb->flags &= ~bb_in_region;
8448 /* Create the VN state. For the initial size of the various hashtables
8449 use a heuristic based on region size and number of SSA names. */
8450 unsigned region_size = (((unsigned HOST_WIDE_INT)n * num_ssa_names)
8451 / (n_basic_blocks_for_fn (fn) - NUM_FIXED_BLOCKS));
8452 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
8453 next_value_id = 1;
8454 next_constant_value_id = -1;
8456 vn_ssa_aux_hash = new hash_table <vn_ssa_aux_hasher> (region_size * 2);
8457 gcc_obstack_init (&vn_ssa_aux_obstack);
8459 gcc_obstack_init (&vn_tables_obstack);
8460 gcc_obstack_init (&vn_tables_insert_obstack);
8461 valid_info = XCNEW (struct vn_tables_s);
8462 allocate_vn_table (valid_info, region_size);
8463 last_inserted_ref = NULL;
8464 last_inserted_phi = NULL;
8465 last_inserted_nary = NULL;
8466 last_pushed_avail = NULL;
8468 vn_valueize = rpo_vn_valueize;
8470 /* Initialize the unwind state and edge/BB executable state. */
8471 unsigned curr_scc = 0;
8472 for (int i = 0; i < n; ++i)
8474 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
8475 rpo_state[i].visited = 0;
8476 rpo_state[i].max_rpo = i;
8477 if (!iterate && curr_scc < toplevel_scc_extents.length ())
8479 if (i >= toplevel_scc_extents[curr_scc].first
8480 && i <= toplevel_scc_extents[curr_scc].second)
8481 rpo_state[i].max_rpo = toplevel_scc_extents[curr_scc].second;
8482 if (i == toplevel_scc_extents[curr_scc].second)
8483 curr_scc++;
8485 bb->flags &= ~BB_EXECUTABLE;
8486 bool has_backedges = false;
8487 edge e;
8488 edge_iterator ei;
8489 FOR_EACH_EDGE (e, ei, bb->preds)
8491 if (e->flags & EDGE_DFS_BACK)
8492 has_backedges = true;
8493 e->flags &= ~EDGE_EXECUTABLE;
8494 if (iterate || e == entry || (skip_entry_phis && bb == entry->dest))
8495 continue;
8497 rpo_state[i].iterate = iterate && has_backedges;
8499 entry->flags |= EDGE_EXECUTABLE;
8500 entry->dest->flags |= BB_EXECUTABLE;
8502 /* As heuristic to improve compile-time we handle only the N innermost
8503 loops and the outermost one optimistically. */
8504 if (iterate)
8506 unsigned max_depth = param_rpo_vn_max_loop_depth;
8507 for (auto loop : loops_list (cfun, LI_ONLY_INNERMOST))
8508 if (loop_depth (loop) > max_depth)
8509 for (unsigned i = 2;
8510 i < loop_depth (loop) - max_depth; ++i)
8512 basic_block header = superloop_at_depth (loop, i)->header;
8513 bool non_latch_backedge = false;
8514 edge e;
8515 edge_iterator ei;
8516 FOR_EACH_EDGE (e, ei, header->preds)
8517 if (e->flags & EDGE_DFS_BACK)
8519 /* There can be a non-latch backedge into the header
8520 which is part of an outer irreducible region. We
8521 cannot avoid iterating this block then. */
8522 if (!dominated_by_p (CDI_DOMINATORS,
8523 e->src, e->dest))
8525 if (dump_file && (dump_flags & TDF_DETAILS))
8526 fprintf (dump_file, "non-latch backedge %d -> %d "
8527 "forces iteration of loop %d\n",
8528 e->src->index, e->dest->index, loop->num);
8529 non_latch_backedge = true;
8531 else
8532 e->flags |= EDGE_EXECUTABLE;
8534 rpo_state[bb_to_rpo[header->index]].iterate = non_latch_backedge;
8538 uint64_t nblk = 0;
8539 int idx = 0;
8540 if (iterate)
8541 /* Go and process all blocks, iterating as necessary. */
8544 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[idx]);
8546 /* If the block has incoming backedges remember unwind state. This
8547 is required even for non-executable blocks since in irreducible
8548 regions we might reach them via the backedge and re-start iterating
8549 from there.
8550 Note we can individually mark blocks with incoming backedges to
8551 not iterate where we then handle PHIs conservatively. We do that
8552 heuristically to reduce compile-time for degenerate cases. */
8553 if (rpo_state[idx].iterate)
8555 rpo_state[idx].ob_top = obstack_alloc (&vn_tables_obstack, 0);
8556 rpo_state[idx].ref_top = last_inserted_ref;
8557 rpo_state[idx].phi_top = last_inserted_phi;
8558 rpo_state[idx].nary_top = last_inserted_nary;
8559 rpo_state[idx].avail_top
8560 = last_pushed_avail ? last_pushed_avail->avail : NULL;
8563 if (!(bb->flags & BB_EXECUTABLE))
8565 if (dump_file && (dump_flags & TDF_DETAILS))
8566 fprintf (dump_file, "Block %d: BB%d found not executable\n",
8567 idx, bb->index);
8568 idx++;
8569 continue;
8572 if (dump_file && (dump_flags & TDF_DETAILS))
8573 fprintf (dump_file, "Processing block %d: BB%d\n", idx, bb->index);
8574 nblk++;
8575 todo |= process_bb (avail, bb,
8576 rpo_state[idx].visited != 0,
8577 rpo_state[idx].iterate,
8578 iterate, eliminate, do_region, exit_bbs, false);
8579 rpo_state[idx].visited++;
8581 /* Verify if changed values flow over executable outgoing backedges
8582 and those change destination PHI values (that's the thing we
8583 can easily verify). Reduce over all such edges to the farthest
8584 away PHI. */
8585 int iterate_to = -1;
8586 edge_iterator ei;
8587 edge e;
8588 FOR_EACH_EDGE (e, ei, bb->succs)
8589 if ((e->flags & (EDGE_DFS_BACK|EDGE_EXECUTABLE))
8590 == (EDGE_DFS_BACK|EDGE_EXECUTABLE)
8591 && rpo_state[bb_to_rpo[e->dest->index]].iterate)
8593 int destidx = bb_to_rpo[e->dest->index];
8594 if (!rpo_state[destidx].visited)
8596 if (dump_file && (dump_flags & TDF_DETAILS))
8597 fprintf (dump_file, "Unvisited destination %d\n",
8598 e->dest->index);
8599 if (iterate_to == -1 || destidx < iterate_to)
8600 iterate_to = destidx;
8601 continue;
8603 if (dump_file && (dump_flags & TDF_DETAILS))
8604 fprintf (dump_file, "Looking for changed values of backedge"
8605 " %d->%d destination PHIs\n",
8606 e->src->index, e->dest->index);
8607 vn_context_bb = e->dest;
8608 gphi_iterator gsi;
8609 for (gsi = gsi_start_phis (e->dest);
8610 !gsi_end_p (gsi); gsi_next (&gsi))
8612 bool inserted = false;
8613 /* While we'd ideally just iterate on value changes
8614 we CSE PHIs and do that even across basic-block
8615 boundaries. So even hashtable state changes can
8616 be important (which is roughly equivalent to
8617 PHI argument value changes). To not excessively
8618 iterate because of that we track whether a PHI
8619 was CSEd to with GF_PLF_1. */
8620 bool phival_changed;
8621 if ((phival_changed = visit_phi (gsi.phi (),
8622 &inserted, false))
8623 || (inserted && gimple_plf (gsi.phi (), GF_PLF_1)))
8625 if (!phival_changed
8626 && dump_file && (dump_flags & TDF_DETAILS))
8627 fprintf (dump_file, "PHI was CSEd and hashtable "
8628 "state (changed)\n");
8629 if (iterate_to == -1 || destidx < iterate_to)
8630 iterate_to = destidx;
8631 break;
8634 vn_context_bb = NULL;
8636 if (iterate_to != -1)
8638 do_unwind (&rpo_state[iterate_to], avail);
8639 idx = iterate_to;
8640 if (dump_file && (dump_flags & TDF_DETAILS))
8641 fprintf (dump_file, "Iterating to %d BB%d\n",
8642 iterate_to, rpo[iterate_to]);
8643 continue;
8646 idx++;
8648 while (idx < n);
8650 else /* !iterate */
8652 /* Process all blocks greedily with a worklist that enforces RPO
8653 processing of reachable blocks. */
8654 auto_bitmap worklist;
8655 bitmap_set_bit (worklist, 0);
8656 while (!bitmap_empty_p (worklist))
8658 int idx = bitmap_clear_first_set_bit (worklist);
8659 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[idx]);
8660 gcc_assert ((bb->flags & BB_EXECUTABLE)
8661 && !rpo_state[idx].visited);
8663 if (dump_file && (dump_flags & TDF_DETAILS))
8664 fprintf (dump_file, "Processing block %d: BB%d\n", idx, bb->index);
8666 /* When we run into predecessor edges where we cannot trust its
8667 executable state mark them executable so PHI processing will
8668 be conservative.
8669 ??? Do we need to force arguments flowing over that edge
8670 to be varying or will they even always be? */
8671 edge_iterator ei;
8672 edge e;
8673 FOR_EACH_EDGE (e, ei, bb->preds)
8674 if (!(e->flags & EDGE_EXECUTABLE)
8675 && (bb == entry->dest
8676 || (!rpo_state[bb_to_rpo[e->src->index]].visited
8677 && (rpo_state[bb_to_rpo[e->src->index]].max_rpo
8678 >= (int)idx))))
8680 if (dump_file && (dump_flags & TDF_DETAILS))
8681 fprintf (dump_file, "Cannot trust state of predecessor "
8682 "edge %d -> %d, marking executable\n",
8683 e->src->index, e->dest->index);
8684 e->flags |= EDGE_EXECUTABLE;
8687 nblk++;
8688 todo |= process_bb (avail, bb, false, false, false, eliminate,
8689 do_region, exit_bbs,
8690 skip_entry_phis && bb == entry->dest);
8691 rpo_state[idx].visited++;
8693 FOR_EACH_EDGE (e, ei, bb->succs)
8694 if ((e->flags & EDGE_EXECUTABLE)
8695 && e->dest->index != EXIT_BLOCK
8696 && (!do_region || !bitmap_bit_p (exit_bbs, e->dest->index))
8697 && !rpo_state[bb_to_rpo[e->dest->index]].visited)
8698 bitmap_set_bit (worklist, bb_to_rpo[e->dest->index]);
8702 /* If statistics or dump file active. */
8703 int nex = 0;
8704 unsigned max_visited = 1;
8705 for (int i = 0; i < n; ++i)
8707 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
8708 if (bb->flags & BB_EXECUTABLE)
8709 nex++;
8710 statistics_histogram_event (cfun, "RPO block visited times",
8711 rpo_state[i].visited);
8712 if (rpo_state[i].visited > max_visited)
8713 max_visited = rpo_state[i].visited;
8715 unsigned nvalues = 0, navail = 0;
8716 for (hash_table<vn_ssa_aux_hasher>::iterator i = vn_ssa_aux_hash->begin ();
8717 i != vn_ssa_aux_hash->end (); ++i)
8719 nvalues++;
8720 vn_avail *av = (*i)->avail;
8721 while (av)
8723 navail++;
8724 av = av->next;
8727 statistics_counter_event (cfun, "RPO blocks", n);
8728 statistics_counter_event (cfun, "RPO blocks visited", nblk);
8729 statistics_counter_event (cfun, "RPO blocks executable", nex);
8730 statistics_histogram_event (cfun, "RPO iterations", 10*nblk / nex);
8731 statistics_histogram_event (cfun, "RPO num values", nvalues);
8732 statistics_histogram_event (cfun, "RPO num avail", navail);
8733 statistics_histogram_event (cfun, "RPO num lattice",
8734 vn_ssa_aux_hash->elements ());
8735 if (dump_file && (dump_flags & (TDF_DETAILS|TDF_STATS)))
8737 fprintf (dump_file, "RPO iteration over %d blocks visited %" PRIu64
8738 " blocks in total discovering %d executable blocks iterating "
8739 "%d.%d times, a block was visited max. %u times\n",
8740 n, nblk, nex,
8741 (int)((10*nblk / nex)/10), (int)((10*nblk / nex)%10),
8742 max_visited);
8743 fprintf (dump_file, "RPO tracked %d values available at %d locations "
8744 "and %" PRIu64 " lattice elements\n",
8745 nvalues, navail, (uint64_t) vn_ssa_aux_hash->elements ());
8748 if (eliminate)
8750 /* When !iterate we already performed elimination during the RPO
8751 walk. */
8752 if (iterate)
8754 /* Elimination for region-based VN needs to be done within the
8755 RPO walk. */
8756 gcc_assert (! do_region);
8757 /* Note we can't use avail.walk here because that gets confused
8758 by the existing availability and it will be less efficient
8759 as well. */
8760 todo |= eliminate_with_rpo_vn (NULL);
8762 else
8763 todo |= avail.eliminate_cleanup (do_region);
8766 vn_valueize = NULL;
8767 rpo_avail = NULL;
8769 XDELETEVEC (bb_to_rpo);
8770 XDELETEVEC (rpo);
8771 XDELETEVEC (rpo_state);
8773 return todo;
8776 /* Region-based entry for RPO VN. Performs value-numbering and elimination
8777 on the SEME region specified by ENTRY and EXIT_BBS. If ENTRY is not
8778 the only edge into the region at ENTRY->dest PHI nodes in ENTRY->dest
8779 are not considered.
8780 If ITERATE is true then treat backedges optimistically as not
8781 executed and iterate. If ELIMINATE is true then perform
8782 elimination, otherwise leave that to the caller.
8783 If SKIP_ENTRY_PHIS is true then force PHI nodes in ENTRY->dest to VARYING.
8784 KIND specifies the amount of work done for handling memory operations. */
8786 unsigned
8787 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs,
8788 bool iterate, bool eliminate, bool skip_entry_phis,
8789 vn_lookup_kind kind)
8791 auto_timevar tv (TV_TREE_RPO_VN);
8792 unsigned todo = do_rpo_vn_1 (fn, entry, exit_bbs, iterate, eliminate,
8793 skip_entry_phis, kind);
8794 free_rpo_vn ();
8795 return todo;
8799 namespace {
8801 const pass_data pass_data_fre =
8803 GIMPLE_PASS, /* type */
8804 "fre", /* name */
8805 OPTGROUP_NONE, /* optinfo_flags */
8806 TV_TREE_FRE, /* tv_id */
8807 ( PROP_cfg | PROP_ssa ), /* properties_required */
8808 0, /* properties_provided */
8809 0, /* properties_destroyed */
8810 0, /* todo_flags_start */
8811 0, /* todo_flags_finish */
8814 class pass_fre : public gimple_opt_pass
8816 public:
8817 pass_fre (gcc::context *ctxt)
8818 : gimple_opt_pass (pass_data_fre, ctxt), may_iterate (true)
8821 /* opt_pass methods: */
8822 opt_pass * clone () final override { return new pass_fre (m_ctxt); }
8823 void set_pass_param (unsigned int n, bool param) final override
8825 gcc_assert (n == 0);
8826 may_iterate = param;
8828 bool gate (function *) final override
8830 return flag_tree_fre != 0 && (may_iterate || optimize > 1);
8832 unsigned int execute (function *) final override;
8834 private:
8835 bool may_iterate;
8836 }; // class pass_fre
8838 unsigned int
8839 pass_fre::execute (function *fun)
8841 unsigned todo = 0;
8843 /* At -O[1g] use the cheap non-iterating mode. */
8844 bool iterate_p = may_iterate && (optimize > 1);
8845 calculate_dominance_info (CDI_DOMINATORS);
8846 if (iterate_p)
8847 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
8849 todo = do_rpo_vn_1 (fun, NULL, NULL, iterate_p, true, false, VN_WALKREWRITE);
8850 free_rpo_vn ();
8852 if (iterate_p)
8853 loop_optimizer_finalize ();
8855 if (scev_initialized_p ())
8856 scev_reset_htab ();
8858 /* For late FRE after IVOPTs and unrolling, see if we can
8859 remove some TREE_ADDRESSABLE and rewrite stuff into SSA. */
8860 if (!may_iterate)
8861 todo |= TODO_update_address_taken;
8863 return todo;
8866 } // anon namespace
8868 gimple_opt_pass *
8869 make_pass_fre (gcc::context *ctxt)
8871 return new pass_fre (ctxt);
8874 #undef BB_EXECUTABLE