fix __builtin___clear_cache overrider fallout
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blob81990fcbd34687917020b24bd235a485d1377704
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2020 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "splay-tree.h"
25 #include "backend.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "insn-config.h"
32 #include "memmodel.h"
33 #include "emit-rtl.h"
34 #include "cgraph.h"
35 #include "gimple-pretty-print.h"
36 #include "alias.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "cfganal.h"
40 #include "tree-inline.h"
41 #include "internal-fn.h"
42 #include "gimple-fold.h"
43 #include "tree-eh.h"
44 #include "gimplify.h"
45 #include "flags.h"
46 #include "dojump.h"
47 #include "explow.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "stmt.h"
51 #include "expr.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "dumpfile.h"
55 #include "cfgloop.h"
56 #include "tree-ssa-propagate.h"
57 #include "tree-cfg.h"
58 #include "domwalk.h"
59 #include "gimple-iterator.h"
60 #include "gimple-match.h"
61 #include "stringpool.h"
62 #include "attribs.h"
63 #include "tree-pass.h"
64 #include "statistics.h"
65 #include "langhooks.h"
66 #include "ipa-utils.h"
67 #include "dbgcnt.h"
68 #include "tree-cfgcleanup.h"
69 #include "tree-ssa-loop.h"
70 #include "tree-scalar-evolution.h"
71 #include "tree-ssa-loop-niter.h"
72 #include "builtins.h"
73 #include "tree-ssa-sccvn.h"
75 /* This algorithm is based on the SCC algorithm presented by Keith
76 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
77 (http://citeseer.ist.psu.edu/41805.html). In
78 straight line code, it is equivalent to a regular hash based value
79 numbering that is performed in reverse postorder.
81 For code with cycles, there are two alternatives, both of which
82 require keeping the hashtables separate from the actual list of
83 value numbers for SSA names.
85 1. Iterate value numbering in an RPO walk of the blocks, removing
86 all the entries from the hashtable after each iteration (but
87 keeping the SSA name->value number mapping between iterations).
88 Iterate until it does not change.
90 2. Perform value numbering as part of an SCC walk on the SSA graph,
91 iterating only the cycles in the SSA graph until they do not change
92 (using a separate, optimistic hashtable for value numbering the SCC
93 operands).
95 The second is not just faster in practice (because most SSA graph
96 cycles do not involve all the variables in the graph), it also has
97 some nice properties.
99 One of these nice properties is that when we pop an SCC off the
100 stack, we are guaranteed to have processed all the operands coming from
101 *outside of that SCC*, so we do not need to do anything special to
102 ensure they have value numbers.
104 Another nice property is that the SCC walk is done as part of a DFS
105 of the SSA graph, which makes it easy to perform combining and
106 simplifying operations at the same time.
108 The code below is deliberately written in a way that makes it easy
109 to separate the SCC walk from the other work it does.
111 In order to propagate constants through the code, we track which
112 expressions contain constants, and use those while folding. In
113 theory, we could also track expressions whose value numbers are
114 replaced, in case we end up folding based on expression
115 identities.
117 In order to value number memory, we assign value numbers to vuses.
118 This enables us to note that, for example, stores to the same
119 address of the same value from the same starting memory states are
120 equivalent.
121 TODO:
123 1. We can iterate only the changing portions of the SCC's, but
124 I have not seen an SCC big enough for this to be a win.
125 2. If you differentiate between phi nodes for loops and phi nodes
126 for if-then-else, you can properly consider phi nodes in different
127 blocks for equivalence.
128 3. We could value number vuses in more cases, particularly, whole
129 structure copies.
132 /* There's no BB_EXECUTABLE but we can use BB_VISITED. */
133 #define BB_EXECUTABLE BB_VISITED
135 static vn_lookup_kind default_vn_walk_kind;
137 /* vn_nary_op hashtable helpers. */
139 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
141 typedef vn_nary_op_s *compare_type;
142 static inline hashval_t hash (const vn_nary_op_s *);
143 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
146 /* Return the computed hashcode for nary operation P1. */
148 inline hashval_t
149 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
151 return vno1->hashcode;
154 /* Compare nary operations P1 and P2 and return true if they are
155 equivalent. */
157 inline bool
158 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
160 return vno1 == vno2 || vn_nary_op_eq (vno1, vno2);
163 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
164 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
167 /* vn_phi hashtable helpers. */
169 static int
170 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
172 struct vn_phi_hasher : nofree_ptr_hash <vn_phi_s>
174 static inline hashval_t hash (const vn_phi_s *);
175 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
178 /* Return the computed hashcode for phi operation P1. */
180 inline hashval_t
181 vn_phi_hasher::hash (const vn_phi_s *vp1)
183 return vp1->hashcode;
186 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
188 inline bool
189 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
191 return vp1 == vp2 || vn_phi_eq (vp1, vp2);
194 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
195 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
198 /* Compare two reference operands P1 and P2 for equality. Return true if
199 they are equal, and false otherwise. */
201 static int
202 vn_reference_op_eq (const void *p1, const void *p2)
204 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
205 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
207 return (vro1->opcode == vro2->opcode
208 /* We do not care for differences in type qualification. */
209 && (vro1->type == vro2->type
210 || (vro1->type && vro2->type
211 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
212 TYPE_MAIN_VARIANT (vro2->type))))
213 && expressions_equal_p (vro1->op0, vro2->op0)
214 && expressions_equal_p (vro1->op1, vro2->op1)
215 && expressions_equal_p (vro1->op2, vro2->op2));
218 /* Free a reference operation structure VP. */
220 static inline void
221 free_reference (vn_reference_s *vr)
223 vr->operands.release ();
227 /* vn_reference hashtable helpers. */
229 struct vn_reference_hasher : nofree_ptr_hash <vn_reference_s>
231 static inline hashval_t hash (const vn_reference_s *);
232 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
235 /* Return the hashcode for a given reference operation P1. */
237 inline hashval_t
238 vn_reference_hasher::hash (const vn_reference_s *vr1)
240 return vr1->hashcode;
243 inline bool
244 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
246 return v == c || vn_reference_eq (v, c);
249 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
250 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
253 /* The set of VN hashtables. */
255 typedef struct vn_tables_s
257 vn_nary_op_table_type *nary;
258 vn_phi_table_type *phis;
259 vn_reference_table_type *references;
260 } *vn_tables_t;
263 /* vn_constant hashtable helpers. */
265 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
267 static inline hashval_t hash (const vn_constant_s *);
268 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
271 /* Hash table hash function for vn_constant_t. */
273 inline hashval_t
274 vn_constant_hasher::hash (const vn_constant_s *vc1)
276 return vc1->hashcode;
279 /* Hash table equality function for vn_constant_t. */
281 inline bool
282 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
284 if (vc1->hashcode != vc2->hashcode)
285 return false;
287 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
290 static hash_table<vn_constant_hasher> *constant_to_value_id;
293 /* Obstack we allocate the vn-tables elements from. */
294 static obstack vn_tables_obstack;
295 /* Special obstack we never unwind. */
296 static obstack vn_tables_insert_obstack;
298 static vn_reference_t last_inserted_ref;
299 static vn_phi_t last_inserted_phi;
300 static vn_nary_op_t last_inserted_nary;
302 /* Valid hashtables storing information we have proven to be
303 correct. */
304 static vn_tables_t valid_info;
307 /* Valueization hook. Valueize NAME if it is an SSA name, otherwise
308 just return it. */
309 tree (*vn_valueize) (tree);
310 tree vn_valueize_wrapper (tree t, void* context ATTRIBUTE_UNUSED)
312 return vn_valueize (t);
316 /* This represents the top of the VN lattice, which is the universal
317 value. */
319 tree VN_TOP;
321 /* Unique counter for our value ids. */
323 static unsigned int next_value_id;
324 static int next_constant_value_id;
327 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
328 are allocated on an obstack for locality reasons, and to free them
329 without looping over the vec. */
331 struct vn_ssa_aux_hasher : typed_noop_remove <vn_ssa_aux_t>
333 typedef vn_ssa_aux_t value_type;
334 typedef tree compare_type;
335 static inline hashval_t hash (const value_type &);
336 static inline bool equal (const value_type &, const compare_type &);
337 static inline void mark_deleted (value_type &) {}
338 static const bool empty_zero_p = true;
339 static inline void mark_empty (value_type &e) { e = NULL; }
340 static inline bool is_deleted (value_type &) { return false; }
341 static inline bool is_empty (value_type &e) { return e == NULL; }
344 hashval_t
345 vn_ssa_aux_hasher::hash (const value_type &entry)
347 return SSA_NAME_VERSION (entry->name);
350 bool
351 vn_ssa_aux_hasher::equal (const value_type &entry, const compare_type &name)
353 return name == entry->name;
356 static hash_table<vn_ssa_aux_hasher> *vn_ssa_aux_hash;
357 typedef hash_table<vn_ssa_aux_hasher>::iterator vn_ssa_aux_iterator_type;
358 static struct obstack vn_ssa_aux_obstack;
360 static vn_nary_op_t vn_nary_op_insert_stmt (gimple *, tree);
361 static unsigned int vn_nary_length_from_stmt (gimple *);
362 static vn_nary_op_t alloc_vn_nary_op_noinit (unsigned int, obstack *);
363 static vn_nary_op_t vn_nary_op_insert_into (vn_nary_op_t,
364 vn_nary_op_table_type *, bool);
365 static void init_vn_nary_op_from_stmt (vn_nary_op_t, gimple *);
366 static void init_vn_nary_op_from_pieces (vn_nary_op_t, unsigned int,
367 enum tree_code, tree, tree *);
368 static tree vn_lookup_simplify_result (gimple_match_op *);
369 static vn_reference_t vn_reference_lookup_or_insert_for_pieces
370 (tree, alias_set_type, alias_set_type, tree,
371 vec<vn_reference_op_s, va_heap>, tree);
373 /* Return whether there is value numbering information for a given SSA name. */
375 bool
376 has_VN_INFO (tree name)
378 return vn_ssa_aux_hash->find_with_hash (name, SSA_NAME_VERSION (name));
381 vn_ssa_aux_t
382 VN_INFO (tree name)
384 vn_ssa_aux_t *res
385 = vn_ssa_aux_hash->find_slot_with_hash (name, SSA_NAME_VERSION (name),
386 INSERT);
387 if (*res != NULL)
388 return *res;
390 vn_ssa_aux_t newinfo = *res = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
391 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
392 newinfo->name = name;
393 newinfo->valnum = VN_TOP;
394 /* We are using the visited flag to handle uses with defs not within the
395 region being value-numbered. */
396 newinfo->visited = false;
398 /* Given we create the VN_INFOs on-demand now we have to do initialization
399 different than VN_TOP here. */
400 if (SSA_NAME_IS_DEFAULT_DEF (name))
401 switch (TREE_CODE (SSA_NAME_VAR (name)))
403 case VAR_DECL:
404 /* All undefined vars are VARYING. */
405 newinfo->valnum = name;
406 newinfo->visited = true;
407 break;
409 case PARM_DECL:
410 /* Parameters are VARYING but we can record a condition
411 if we know it is a non-NULL pointer. */
412 newinfo->visited = true;
413 newinfo->valnum = name;
414 if (POINTER_TYPE_P (TREE_TYPE (name))
415 && nonnull_arg_p (SSA_NAME_VAR (name)))
417 tree ops[2];
418 ops[0] = name;
419 ops[1] = build_int_cst (TREE_TYPE (name), 0);
420 vn_nary_op_t nary;
421 /* Allocate from non-unwinding stack. */
422 nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
423 init_vn_nary_op_from_pieces (nary, 2, NE_EXPR,
424 boolean_type_node, ops);
425 nary->predicated_values = 0;
426 nary->u.result = boolean_true_node;
427 vn_nary_op_insert_into (nary, valid_info->nary, true);
428 gcc_assert (nary->unwind_to == NULL);
429 /* Also do not link it into the undo chain. */
430 last_inserted_nary = nary->next;
431 nary->next = (vn_nary_op_t)(void *)-1;
432 nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
433 init_vn_nary_op_from_pieces (nary, 2, EQ_EXPR,
434 boolean_type_node, ops);
435 nary->predicated_values = 0;
436 nary->u.result = boolean_false_node;
437 vn_nary_op_insert_into (nary, valid_info->nary, true);
438 gcc_assert (nary->unwind_to == NULL);
439 last_inserted_nary = nary->next;
440 nary->next = (vn_nary_op_t)(void *)-1;
441 if (dump_file && (dump_flags & TDF_DETAILS))
443 fprintf (dump_file, "Recording ");
444 print_generic_expr (dump_file, name, TDF_SLIM);
445 fprintf (dump_file, " != 0\n");
448 break;
450 case RESULT_DECL:
451 /* If the result is passed by invisible reference the default
452 def is initialized, otherwise it's uninitialized. Still
453 undefined is varying. */
454 newinfo->visited = true;
455 newinfo->valnum = name;
456 break;
458 default:
459 gcc_unreachable ();
461 return newinfo;
464 /* Return the SSA value of X. */
466 inline tree
467 SSA_VAL (tree x, bool *visited = NULL)
469 vn_ssa_aux_t tem = vn_ssa_aux_hash->find_with_hash (x, SSA_NAME_VERSION (x));
470 if (visited)
471 *visited = tem && tem->visited;
472 return tem && tem->visited ? tem->valnum : x;
475 /* Return the SSA value of the VUSE x, supporting released VDEFs
476 during elimination which will value-number the VDEF to the
477 associated VUSE (but not substitute in the whole lattice). */
479 static inline tree
480 vuse_ssa_val (tree x)
482 if (!x)
483 return NULL_TREE;
487 x = SSA_VAL (x);
488 gcc_assert (x != VN_TOP);
490 while (SSA_NAME_IN_FREE_LIST (x));
492 return x;
495 /* Similar to the above but used as callback for walk_non_aliases_vuses
496 and thus should stop at unvisited VUSE to not walk across region
497 boundaries. */
499 static tree
500 vuse_valueize (tree vuse)
504 bool visited;
505 vuse = SSA_VAL (vuse, &visited);
506 if (!visited)
507 return NULL_TREE;
508 gcc_assert (vuse != VN_TOP);
510 while (SSA_NAME_IN_FREE_LIST (vuse));
511 return vuse;
515 /* Return the vn_kind the expression computed by the stmt should be
516 associated with. */
518 enum vn_kind
519 vn_get_stmt_kind (gimple *stmt)
521 switch (gimple_code (stmt))
523 case GIMPLE_CALL:
524 return VN_REFERENCE;
525 case GIMPLE_PHI:
526 return VN_PHI;
527 case GIMPLE_ASSIGN:
529 enum tree_code code = gimple_assign_rhs_code (stmt);
530 tree rhs1 = gimple_assign_rhs1 (stmt);
531 switch (get_gimple_rhs_class (code))
533 case GIMPLE_UNARY_RHS:
534 case GIMPLE_BINARY_RHS:
535 case GIMPLE_TERNARY_RHS:
536 return VN_NARY;
537 case GIMPLE_SINGLE_RHS:
538 switch (TREE_CODE_CLASS (code))
540 case tcc_reference:
541 /* VOP-less references can go through unary case. */
542 if ((code == REALPART_EXPR
543 || code == IMAGPART_EXPR
544 || code == VIEW_CONVERT_EXPR
545 || code == BIT_FIELD_REF)
546 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
547 return VN_NARY;
549 /* Fallthrough. */
550 case tcc_declaration:
551 return VN_REFERENCE;
553 case tcc_constant:
554 return VN_CONSTANT;
556 default:
557 if (code == ADDR_EXPR)
558 return (is_gimple_min_invariant (rhs1)
559 ? VN_CONSTANT : VN_REFERENCE);
560 else if (code == CONSTRUCTOR)
561 return VN_NARY;
562 return VN_NONE;
564 default:
565 return VN_NONE;
568 default:
569 return VN_NONE;
573 /* Lookup a value id for CONSTANT and return it. If it does not
574 exist returns 0. */
576 unsigned int
577 get_constant_value_id (tree constant)
579 vn_constant_s **slot;
580 struct vn_constant_s vc;
582 vc.hashcode = vn_hash_constant_with_type (constant);
583 vc.constant = constant;
584 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
585 if (slot)
586 return (*slot)->value_id;
587 return 0;
590 /* Lookup a value id for CONSTANT, and if it does not exist, create a
591 new one and return it. If it does exist, return it. */
593 unsigned int
594 get_or_alloc_constant_value_id (tree constant)
596 vn_constant_s **slot;
597 struct vn_constant_s vc;
598 vn_constant_t vcp;
600 /* If the hashtable isn't initialized we're not running from PRE and thus
601 do not need value-ids. */
602 if (!constant_to_value_id)
603 return 0;
605 vc.hashcode = vn_hash_constant_with_type (constant);
606 vc.constant = constant;
607 slot = constant_to_value_id->find_slot (&vc, INSERT);
608 if (*slot)
609 return (*slot)->value_id;
611 vcp = XNEW (struct vn_constant_s);
612 vcp->hashcode = vc.hashcode;
613 vcp->constant = constant;
614 vcp->value_id = get_next_constant_value_id ();
615 *slot = vcp;
616 return vcp->value_id;
619 /* Compute the hash for a reference operand VRO1. */
621 static void
622 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
624 hstate.add_int (vro1->opcode);
625 if (vro1->op0)
626 inchash::add_expr (vro1->op0, hstate);
627 if (vro1->op1)
628 inchash::add_expr (vro1->op1, hstate);
629 if (vro1->op2)
630 inchash::add_expr (vro1->op2, hstate);
633 /* Compute a hash for the reference operation VR1 and return it. */
635 static hashval_t
636 vn_reference_compute_hash (const vn_reference_t vr1)
638 inchash::hash hstate;
639 hashval_t result;
640 int i;
641 vn_reference_op_t vro;
642 poly_int64 off = -1;
643 bool deref = false;
645 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
647 if (vro->opcode == MEM_REF)
648 deref = true;
649 else if (vro->opcode != ADDR_EXPR)
650 deref = false;
651 if (maybe_ne (vro->off, -1))
653 if (known_eq (off, -1))
654 off = 0;
655 off += vro->off;
657 else
659 if (maybe_ne (off, -1)
660 && maybe_ne (off, 0))
661 hstate.add_poly_int (off);
662 off = -1;
663 if (deref
664 && vro->opcode == ADDR_EXPR)
666 if (vro->op0)
668 tree op = TREE_OPERAND (vro->op0, 0);
669 hstate.add_int (TREE_CODE (op));
670 inchash::add_expr (op, hstate);
673 else
674 vn_reference_op_compute_hash (vro, hstate);
677 result = hstate.end ();
678 /* ??? We would ICE later if we hash instead of adding that in. */
679 if (vr1->vuse)
680 result += SSA_NAME_VERSION (vr1->vuse);
682 return result;
685 /* Return true if reference operations VR1 and VR2 are equivalent. This
686 means they have the same set of operands and vuses. */
688 bool
689 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
691 unsigned i, j;
693 /* Early out if this is not a hash collision. */
694 if (vr1->hashcode != vr2->hashcode)
695 return false;
697 /* The VOP needs to be the same. */
698 if (vr1->vuse != vr2->vuse)
699 return false;
701 /* If the operands are the same we are done. */
702 if (vr1->operands == vr2->operands)
703 return true;
705 if (COMPLETE_TYPE_P (vr1->type) != COMPLETE_TYPE_P (vr2->type)
706 || (COMPLETE_TYPE_P (vr1->type)
707 && !expressions_equal_p (TYPE_SIZE (vr1->type),
708 TYPE_SIZE (vr2->type))))
709 return false;
711 if (INTEGRAL_TYPE_P (vr1->type)
712 && INTEGRAL_TYPE_P (vr2->type))
714 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
715 return false;
717 else if (INTEGRAL_TYPE_P (vr1->type)
718 && (TYPE_PRECISION (vr1->type)
719 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
720 return false;
721 else if (INTEGRAL_TYPE_P (vr2->type)
722 && (TYPE_PRECISION (vr2->type)
723 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
724 return false;
726 i = 0;
727 j = 0;
730 poly_int64 off1 = 0, off2 = 0;
731 vn_reference_op_t vro1, vro2;
732 vn_reference_op_s tem1, tem2;
733 bool deref1 = false, deref2 = false;
734 for (; vr1->operands.iterate (i, &vro1); i++)
736 if (vro1->opcode == MEM_REF)
737 deref1 = true;
738 /* Do not look through a storage order barrier. */
739 else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
740 return false;
741 if (known_eq (vro1->off, -1))
742 break;
743 off1 += vro1->off;
745 for (; vr2->operands.iterate (j, &vro2); j++)
747 if (vro2->opcode == MEM_REF)
748 deref2 = true;
749 /* Do not look through a storage order barrier. */
750 else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
751 return false;
752 if (known_eq (vro2->off, -1))
753 break;
754 off2 += vro2->off;
756 if (maybe_ne (off1, off2))
757 return false;
758 if (deref1 && vro1->opcode == ADDR_EXPR)
760 memset (&tem1, 0, sizeof (tem1));
761 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
762 tem1.type = TREE_TYPE (tem1.op0);
763 tem1.opcode = TREE_CODE (tem1.op0);
764 vro1 = &tem1;
765 deref1 = false;
767 if (deref2 && vro2->opcode == ADDR_EXPR)
769 memset (&tem2, 0, sizeof (tem2));
770 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
771 tem2.type = TREE_TYPE (tem2.op0);
772 tem2.opcode = TREE_CODE (tem2.op0);
773 vro2 = &tem2;
774 deref2 = false;
776 if (deref1 != deref2)
777 return false;
778 if (!vn_reference_op_eq (vro1, vro2))
779 return false;
780 ++j;
781 ++i;
783 while (vr1->operands.length () != i
784 || vr2->operands.length () != j);
786 return true;
789 /* Copy the operations present in load/store REF into RESULT, a vector of
790 vn_reference_op_s's. */
792 static void
793 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
795 /* For non-calls, store the information that makes up the address. */
796 tree orig = ref;
797 while (ref)
799 vn_reference_op_s temp;
801 memset (&temp, 0, sizeof (temp));
802 temp.type = TREE_TYPE (ref);
803 temp.opcode = TREE_CODE (ref);
804 temp.off = -1;
806 switch (temp.opcode)
808 case MODIFY_EXPR:
809 temp.op0 = TREE_OPERAND (ref, 1);
810 break;
811 case WITH_SIZE_EXPR:
812 temp.op0 = TREE_OPERAND (ref, 1);
813 temp.off = 0;
814 break;
815 case MEM_REF:
816 /* The base address gets its own vn_reference_op_s structure. */
817 temp.op0 = TREE_OPERAND (ref, 1);
818 if (!mem_ref_offset (ref).to_shwi (&temp.off))
819 temp.off = -1;
820 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
821 temp.base = MR_DEPENDENCE_BASE (ref);
822 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
823 break;
824 case TARGET_MEM_REF:
825 /* The base address gets its own vn_reference_op_s structure. */
826 temp.op0 = TMR_INDEX (ref);
827 temp.op1 = TMR_STEP (ref);
828 temp.op2 = TMR_OFFSET (ref);
829 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
830 temp.base = MR_DEPENDENCE_BASE (ref);
831 result->safe_push (temp);
832 memset (&temp, 0, sizeof (temp));
833 temp.type = NULL_TREE;
834 temp.opcode = ERROR_MARK;
835 temp.op0 = TMR_INDEX2 (ref);
836 temp.off = -1;
837 break;
838 case BIT_FIELD_REF:
839 /* Record bits, position and storage order. */
840 temp.op0 = TREE_OPERAND (ref, 1);
841 temp.op1 = TREE_OPERAND (ref, 2);
842 if (!multiple_p (bit_field_offset (ref), BITS_PER_UNIT, &temp.off))
843 temp.off = -1;
844 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
845 break;
846 case COMPONENT_REF:
847 /* The field decl is enough to unambiguously specify the field,
848 a matching type is not necessary and a mismatching type
849 is always a spurious difference. */
850 temp.type = NULL_TREE;
851 temp.op0 = TREE_OPERAND (ref, 1);
852 temp.op1 = TREE_OPERAND (ref, 2);
854 tree this_offset = component_ref_field_offset (ref);
855 if (this_offset
856 && poly_int_tree_p (this_offset))
858 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
859 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
861 poly_offset_int off
862 = (wi::to_poly_offset (this_offset)
863 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
864 /* Probibit value-numbering zero offset components
865 of addresses the same before the pass folding
866 __builtin_object_size had a chance to run
867 (checking cfun->after_inlining does the
868 trick here). */
869 if (TREE_CODE (orig) != ADDR_EXPR
870 || maybe_ne (off, 0)
871 || cfun->after_inlining)
872 off.to_shwi (&temp.off);
876 break;
877 case ARRAY_RANGE_REF:
878 case ARRAY_REF:
880 tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
881 /* Record index as operand. */
882 temp.op0 = TREE_OPERAND (ref, 1);
883 /* Always record lower bounds and element size. */
884 temp.op1 = array_ref_low_bound (ref);
885 /* But record element size in units of the type alignment. */
886 temp.op2 = TREE_OPERAND (ref, 3);
887 temp.align = eltype->type_common.align;
888 if (! temp.op2)
889 temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
890 size_int (TYPE_ALIGN_UNIT (eltype)));
891 if (poly_int_tree_p (temp.op0)
892 && poly_int_tree_p (temp.op1)
893 && TREE_CODE (temp.op2) == INTEGER_CST)
895 poly_offset_int off = ((wi::to_poly_offset (temp.op0)
896 - wi::to_poly_offset (temp.op1))
897 * wi::to_offset (temp.op2)
898 * vn_ref_op_align_unit (&temp));
899 off.to_shwi (&temp.off);
902 break;
903 case VAR_DECL:
904 if (DECL_HARD_REGISTER (ref))
906 temp.op0 = ref;
907 break;
909 /* Fallthru. */
910 case PARM_DECL:
911 case CONST_DECL:
912 case RESULT_DECL:
913 /* Canonicalize decls to MEM[&decl] which is what we end up with
914 when valueizing MEM[ptr] with ptr = &decl. */
915 temp.opcode = MEM_REF;
916 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
917 temp.off = 0;
918 result->safe_push (temp);
919 temp.opcode = ADDR_EXPR;
920 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
921 temp.type = TREE_TYPE (temp.op0);
922 temp.off = -1;
923 break;
924 case STRING_CST:
925 case INTEGER_CST:
926 case POLY_INT_CST:
927 case COMPLEX_CST:
928 case VECTOR_CST:
929 case REAL_CST:
930 case FIXED_CST:
931 case CONSTRUCTOR:
932 case SSA_NAME:
933 temp.op0 = ref;
934 break;
935 case ADDR_EXPR:
936 if (is_gimple_min_invariant (ref))
938 temp.op0 = ref;
939 break;
941 break;
942 /* These are only interesting for their operands, their
943 existence, and their type. They will never be the last
944 ref in the chain of references (IE they require an
945 operand), so we don't have to put anything
946 for op* as it will be handled by the iteration */
947 case REALPART_EXPR:
948 temp.off = 0;
949 break;
950 case VIEW_CONVERT_EXPR:
951 temp.off = 0;
952 temp.reverse = storage_order_barrier_p (ref);
953 break;
954 case IMAGPART_EXPR:
955 /* This is only interesting for its constant offset. */
956 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
957 break;
958 default:
959 gcc_unreachable ();
961 result->safe_push (temp);
963 if (REFERENCE_CLASS_P (ref)
964 || TREE_CODE (ref) == MODIFY_EXPR
965 || TREE_CODE (ref) == WITH_SIZE_EXPR
966 || (TREE_CODE (ref) == ADDR_EXPR
967 && !is_gimple_min_invariant (ref)))
968 ref = TREE_OPERAND (ref, 0);
969 else
970 ref = NULL_TREE;
974 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
975 operands in *OPS, the reference alias set SET and the reference type TYPE.
976 Return true if something useful was produced. */
978 bool
979 ao_ref_init_from_vn_reference (ao_ref *ref,
980 alias_set_type set, alias_set_type base_set,
981 tree type, vec<vn_reference_op_s> ops)
983 vn_reference_op_t op;
984 unsigned i;
985 tree base = NULL_TREE;
986 tree *op0_p = &base;
987 poly_offset_int offset = 0;
988 poly_offset_int max_size;
989 poly_offset_int size = -1;
990 tree size_tree = NULL_TREE;
992 /* First get the final access size from just the outermost expression. */
993 op = &ops[0];
994 if (op->opcode == COMPONENT_REF)
995 size_tree = DECL_SIZE (op->op0);
996 else if (op->opcode == BIT_FIELD_REF)
997 size_tree = op->op0;
998 else
1000 machine_mode mode = TYPE_MODE (type);
1001 if (mode == BLKmode)
1002 size_tree = TYPE_SIZE (type);
1003 else
1004 size = GET_MODE_BITSIZE (mode);
1006 if (size_tree != NULL_TREE
1007 && poly_int_tree_p (size_tree))
1008 size = wi::to_poly_offset (size_tree);
1010 /* Initially, maxsize is the same as the accessed element size.
1011 In the following it will only grow (or become -1). */
1012 max_size = size;
1014 /* Compute cumulative bit-offset for nested component-refs and array-refs,
1015 and find the ultimate containing object. */
1016 FOR_EACH_VEC_ELT (ops, i, op)
1018 switch (op->opcode)
1020 /* These may be in the reference ops, but we cannot do anything
1021 sensible with them here. */
1022 case ADDR_EXPR:
1023 /* Apart from ADDR_EXPR arguments to MEM_REF. */
1024 if (base != NULL_TREE
1025 && TREE_CODE (base) == MEM_REF
1026 && op->op0
1027 && DECL_P (TREE_OPERAND (op->op0, 0)))
1029 vn_reference_op_t pop = &ops[i-1];
1030 base = TREE_OPERAND (op->op0, 0);
1031 if (known_eq (pop->off, -1))
1033 max_size = -1;
1034 offset = 0;
1036 else
1037 offset += pop->off * BITS_PER_UNIT;
1038 op0_p = NULL;
1039 break;
1041 /* Fallthru. */
1042 case CALL_EXPR:
1043 return false;
1045 /* Record the base objects. */
1046 case MEM_REF:
1047 *op0_p = build2 (MEM_REF, op->type,
1048 NULL_TREE, op->op0);
1049 MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
1050 MR_DEPENDENCE_BASE (*op0_p) = op->base;
1051 op0_p = &TREE_OPERAND (*op0_p, 0);
1052 break;
1054 case VAR_DECL:
1055 case PARM_DECL:
1056 case RESULT_DECL:
1057 case SSA_NAME:
1058 *op0_p = op->op0;
1059 op0_p = NULL;
1060 break;
1062 /* And now the usual component-reference style ops. */
1063 case BIT_FIELD_REF:
1064 offset += wi::to_poly_offset (op->op1);
1065 break;
1067 case COMPONENT_REF:
1069 tree field = op->op0;
1070 /* We do not have a complete COMPONENT_REF tree here so we
1071 cannot use component_ref_field_offset. Do the interesting
1072 parts manually. */
1073 tree this_offset = DECL_FIELD_OFFSET (field);
1075 if (op->op1 || !poly_int_tree_p (this_offset))
1076 max_size = -1;
1077 else
1079 poly_offset_int woffset = (wi::to_poly_offset (this_offset)
1080 << LOG2_BITS_PER_UNIT);
1081 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1082 offset += woffset;
1084 break;
1087 case ARRAY_RANGE_REF:
1088 case ARRAY_REF:
1089 /* We recorded the lower bound and the element size. */
1090 if (!poly_int_tree_p (op->op0)
1091 || !poly_int_tree_p (op->op1)
1092 || TREE_CODE (op->op2) != INTEGER_CST)
1093 max_size = -1;
1094 else
1096 poly_offset_int woffset
1097 = wi::sext (wi::to_poly_offset (op->op0)
1098 - wi::to_poly_offset (op->op1),
1099 TYPE_PRECISION (TREE_TYPE (op->op0)));
1100 woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
1101 woffset <<= LOG2_BITS_PER_UNIT;
1102 offset += woffset;
1104 break;
1106 case REALPART_EXPR:
1107 break;
1109 case IMAGPART_EXPR:
1110 offset += size;
1111 break;
1113 case VIEW_CONVERT_EXPR:
1114 break;
1116 case STRING_CST:
1117 case INTEGER_CST:
1118 case COMPLEX_CST:
1119 case VECTOR_CST:
1120 case REAL_CST:
1121 case CONSTRUCTOR:
1122 case CONST_DECL:
1123 return false;
1125 default:
1126 return false;
1130 if (base == NULL_TREE)
1131 return false;
1133 ref->ref = NULL_TREE;
1134 ref->base = base;
1135 ref->ref_alias_set = set;
1136 ref->base_alias_set = base_set;
1137 /* We discount volatiles from value-numbering elsewhere. */
1138 ref->volatile_p = false;
1140 if (!size.to_shwi (&ref->size) || maybe_lt (ref->size, 0))
1142 ref->offset = 0;
1143 ref->size = -1;
1144 ref->max_size = -1;
1145 return true;
1148 if (!offset.to_shwi (&ref->offset))
1150 ref->offset = 0;
1151 ref->max_size = -1;
1152 return true;
1155 if (!max_size.to_shwi (&ref->max_size) || maybe_lt (ref->max_size, 0))
1156 ref->max_size = -1;
1158 return true;
1161 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1162 vn_reference_op_s's. */
1164 static void
1165 copy_reference_ops_from_call (gcall *call,
1166 vec<vn_reference_op_s> *result)
1168 vn_reference_op_s temp;
1169 unsigned i;
1170 tree lhs = gimple_call_lhs (call);
1171 int lr;
1173 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1174 different. By adding the lhs here in the vector, we ensure that the
1175 hashcode is different, guaranteeing a different value number. */
1176 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1178 memset (&temp, 0, sizeof (temp));
1179 temp.opcode = MODIFY_EXPR;
1180 temp.type = TREE_TYPE (lhs);
1181 temp.op0 = lhs;
1182 temp.off = -1;
1183 result->safe_push (temp);
1186 /* Copy the type, opcode, function, static chain and EH region, if any. */
1187 memset (&temp, 0, sizeof (temp));
1188 temp.type = gimple_call_fntype (call);
1189 temp.opcode = CALL_EXPR;
1190 temp.op0 = gimple_call_fn (call);
1191 temp.op1 = gimple_call_chain (call);
1192 if (stmt_could_throw_p (cfun, call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1193 temp.op2 = size_int (lr);
1194 temp.off = -1;
1195 result->safe_push (temp);
1197 /* Copy the call arguments. As they can be references as well,
1198 just chain them together. */
1199 for (i = 0; i < gimple_call_num_args (call); ++i)
1201 tree callarg = gimple_call_arg (call, i);
1202 copy_reference_ops_from_ref (callarg, result);
1206 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1207 *I_P to point to the last element of the replacement. */
1208 static bool
1209 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1210 unsigned int *i_p)
1212 unsigned int i = *i_p;
1213 vn_reference_op_t op = &(*ops)[i];
1214 vn_reference_op_t mem_op = &(*ops)[i - 1];
1215 tree addr_base;
1216 poly_int64 addr_offset = 0;
1218 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1219 from .foo.bar to the preceding MEM_REF offset and replace the
1220 address with &OBJ. */
1221 addr_base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (op->op0, 0),
1222 &addr_offset, vn_valueize);
1223 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1224 if (addr_base != TREE_OPERAND (op->op0, 0))
1226 poly_offset_int off
1227 = (poly_offset_int::from (wi::to_poly_wide (mem_op->op0),
1228 SIGNED)
1229 + addr_offset);
1230 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1231 op->op0 = build_fold_addr_expr (addr_base);
1232 if (tree_fits_shwi_p (mem_op->op0))
1233 mem_op->off = tree_to_shwi (mem_op->op0);
1234 else
1235 mem_op->off = -1;
1236 return true;
1238 return false;
1241 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1242 *I_P to point to the last element of the replacement. */
1243 static bool
1244 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1245 unsigned int *i_p)
1247 bool changed = false;
1248 vn_reference_op_t op;
1252 unsigned int i = *i_p;
1253 op = &(*ops)[i];
1254 vn_reference_op_t mem_op = &(*ops)[i - 1];
1255 gimple *def_stmt;
1256 enum tree_code code;
1257 poly_offset_int off;
1259 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1260 if (!is_gimple_assign (def_stmt))
1261 return changed;
1263 code = gimple_assign_rhs_code (def_stmt);
1264 if (code != ADDR_EXPR
1265 && code != POINTER_PLUS_EXPR)
1266 return changed;
1268 off = poly_offset_int::from (wi::to_poly_wide (mem_op->op0), SIGNED);
1270 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1271 from .foo.bar to the preceding MEM_REF offset and replace the
1272 address with &OBJ. */
1273 if (code == ADDR_EXPR)
1275 tree addr, addr_base;
1276 poly_int64 addr_offset;
1278 addr = gimple_assign_rhs1 (def_stmt);
1279 addr_base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (addr, 0),
1280 &addr_offset,
1281 vn_valueize);
1282 /* If that didn't work because the address isn't invariant propagate
1283 the reference tree from the address operation in case the current
1284 dereference isn't offsetted. */
1285 if (!addr_base
1286 && *i_p == ops->length () - 1
1287 && known_eq (off, 0)
1288 /* This makes us disable this transform for PRE where the
1289 reference ops might be also used for code insertion which
1290 is invalid. */
1291 && default_vn_walk_kind == VN_WALKREWRITE)
1293 auto_vec<vn_reference_op_s, 32> tem;
1294 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1295 /* Make sure to preserve TBAA info. The only objects not
1296 wrapped in MEM_REFs that can have their address taken are
1297 STRING_CSTs. */
1298 if (tem.length () >= 2
1299 && tem[tem.length () - 2].opcode == MEM_REF)
1301 vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1302 new_mem_op->op0
1303 = wide_int_to_tree (TREE_TYPE (mem_op->op0),
1304 wi::to_poly_wide (new_mem_op->op0));
1306 else
1307 gcc_assert (tem.last ().opcode == STRING_CST);
1308 ops->pop ();
1309 ops->pop ();
1310 ops->safe_splice (tem);
1311 --*i_p;
1312 return true;
1314 if (!addr_base
1315 || TREE_CODE (addr_base) != MEM_REF
1316 || (TREE_CODE (TREE_OPERAND (addr_base, 0)) == SSA_NAME
1317 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (addr_base,
1318 0))))
1319 return changed;
1321 off += addr_offset;
1322 off += mem_ref_offset (addr_base);
1323 op->op0 = TREE_OPERAND (addr_base, 0);
1325 else
1327 tree ptr, ptroff;
1328 ptr = gimple_assign_rhs1 (def_stmt);
1329 ptroff = gimple_assign_rhs2 (def_stmt);
1330 if (TREE_CODE (ptr) != SSA_NAME
1331 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1332 /* Make sure to not endlessly recurse.
1333 See gcc.dg/tree-ssa/20040408-1.c for an example. Can easily
1334 happen when we value-number a PHI to its backedge value. */
1335 || SSA_VAL (ptr) == op->op0
1336 || !poly_int_tree_p (ptroff))
1337 return changed;
1339 off += wi::to_poly_offset (ptroff);
1340 op->op0 = ptr;
1343 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1344 if (tree_fits_shwi_p (mem_op->op0))
1345 mem_op->off = tree_to_shwi (mem_op->op0);
1346 else
1347 mem_op->off = -1;
1348 /* ??? Can end up with endless recursion here!?
1349 gcc.c-torture/execute/strcmp-1.c */
1350 if (TREE_CODE (op->op0) == SSA_NAME)
1351 op->op0 = SSA_VAL (op->op0);
1352 if (TREE_CODE (op->op0) != SSA_NAME)
1353 op->opcode = TREE_CODE (op->op0);
1355 changed = true;
1357 /* Tail-recurse. */
1358 while (TREE_CODE (op->op0) == SSA_NAME);
1360 /* Fold a remaining *&. */
1361 if (TREE_CODE (op->op0) == ADDR_EXPR)
1362 vn_reference_fold_indirect (ops, i_p);
1364 return changed;
1367 /* Optimize the reference REF to a constant if possible or return
1368 NULL_TREE if not. */
1370 tree
1371 fully_constant_vn_reference_p (vn_reference_t ref)
1373 vec<vn_reference_op_s> operands = ref->operands;
1374 vn_reference_op_t op;
1376 /* Try to simplify the translated expression if it is
1377 a call to a builtin function with at most two arguments. */
1378 op = &operands[0];
1379 if (op->opcode == CALL_EXPR
1380 && TREE_CODE (op->op0) == ADDR_EXPR
1381 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1382 && fndecl_built_in_p (TREE_OPERAND (op->op0, 0))
1383 && operands.length () >= 2
1384 && operands.length () <= 3)
1386 vn_reference_op_t arg0, arg1 = NULL;
1387 bool anyconst = false;
1388 arg0 = &operands[1];
1389 if (operands.length () > 2)
1390 arg1 = &operands[2];
1391 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1392 || (arg0->opcode == ADDR_EXPR
1393 && is_gimple_min_invariant (arg0->op0)))
1394 anyconst = true;
1395 if (arg1
1396 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1397 || (arg1->opcode == ADDR_EXPR
1398 && is_gimple_min_invariant (arg1->op0))))
1399 anyconst = true;
1400 if (anyconst)
1402 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1403 arg1 ? 2 : 1,
1404 arg0->op0,
1405 arg1 ? arg1->op0 : NULL);
1406 if (folded
1407 && TREE_CODE (folded) == NOP_EXPR)
1408 folded = TREE_OPERAND (folded, 0);
1409 if (folded
1410 && is_gimple_min_invariant (folded))
1411 return folded;
1415 /* Simplify reads from constants or constant initializers. */
1416 else if (BITS_PER_UNIT == 8
1417 && COMPLETE_TYPE_P (ref->type)
1418 && is_gimple_reg_type (ref->type))
1420 poly_int64 off = 0;
1421 HOST_WIDE_INT size;
1422 if (INTEGRAL_TYPE_P (ref->type))
1423 size = TYPE_PRECISION (ref->type);
1424 else if (tree_fits_shwi_p (TYPE_SIZE (ref->type)))
1425 size = tree_to_shwi (TYPE_SIZE (ref->type));
1426 else
1427 return NULL_TREE;
1428 if (size % BITS_PER_UNIT != 0
1429 || size > MAX_BITSIZE_MODE_ANY_MODE)
1430 return NULL_TREE;
1431 size /= BITS_PER_UNIT;
1432 unsigned i;
1433 for (i = 0; i < operands.length (); ++i)
1435 if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
1437 ++i;
1438 break;
1440 if (known_eq (operands[i].off, -1))
1441 return NULL_TREE;
1442 off += operands[i].off;
1443 if (operands[i].opcode == MEM_REF)
1445 ++i;
1446 break;
1449 vn_reference_op_t base = &operands[--i];
1450 tree ctor = error_mark_node;
1451 tree decl = NULL_TREE;
1452 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1453 ctor = base->op0;
1454 else if (base->opcode == MEM_REF
1455 && base[1].opcode == ADDR_EXPR
1456 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1457 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL
1458 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == STRING_CST))
1460 decl = TREE_OPERAND (base[1].op0, 0);
1461 if (TREE_CODE (decl) == STRING_CST)
1462 ctor = decl;
1463 else
1464 ctor = ctor_for_folding (decl);
1466 if (ctor == NULL_TREE)
1467 return build_zero_cst (ref->type);
1468 else if (ctor != error_mark_node)
1470 HOST_WIDE_INT const_off;
1471 if (decl)
1473 tree res = fold_ctor_reference (ref->type, ctor,
1474 off * BITS_PER_UNIT,
1475 size * BITS_PER_UNIT, decl);
1476 if (res)
1478 STRIP_USELESS_TYPE_CONVERSION (res);
1479 if (is_gimple_min_invariant (res))
1480 return res;
1483 else if (off.is_constant (&const_off))
1485 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1486 int len = native_encode_expr (ctor, buf, size, const_off);
1487 if (len > 0)
1488 return native_interpret_expr (ref->type, buf, len);
1493 return NULL_TREE;
1496 /* Return true if OPS contain a storage order barrier. */
1498 static bool
1499 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1501 vn_reference_op_t op;
1502 unsigned i;
1504 FOR_EACH_VEC_ELT (ops, i, op)
1505 if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1506 return true;
1508 return false;
1511 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1512 structures into their value numbers. This is done in-place, and
1513 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1514 whether any operands were valueized. */
1516 static vec<vn_reference_op_s>
1517 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything,
1518 bool with_avail = false)
1520 vn_reference_op_t vro;
1521 unsigned int i;
1523 *valueized_anything = false;
1525 FOR_EACH_VEC_ELT (orig, i, vro)
1527 if (vro->opcode == SSA_NAME
1528 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1530 tree tem = with_avail ? vn_valueize (vro->op0) : SSA_VAL (vro->op0);
1531 if (tem != vro->op0)
1533 *valueized_anything = true;
1534 vro->op0 = tem;
1536 /* If it transforms from an SSA_NAME to a constant, update
1537 the opcode. */
1538 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1539 vro->opcode = TREE_CODE (vro->op0);
1541 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1543 tree tem = with_avail ? vn_valueize (vro->op1) : SSA_VAL (vro->op1);
1544 if (tem != vro->op1)
1546 *valueized_anything = true;
1547 vro->op1 = tem;
1550 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1552 tree tem = with_avail ? vn_valueize (vro->op2) : SSA_VAL (vro->op2);
1553 if (tem != vro->op2)
1555 *valueized_anything = true;
1556 vro->op2 = tem;
1559 /* If it transforms from an SSA_NAME to an address, fold with
1560 a preceding indirect reference. */
1561 if (i > 0
1562 && vro->op0
1563 && TREE_CODE (vro->op0) == ADDR_EXPR
1564 && orig[i - 1].opcode == MEM_REF)
1566 if (vn_reference_fold_indirect (&orig, &i))
1567 *valueized_anything = true;
1569 else if (i > 0
1570 && vro->opcode == SSA_NAME
1571 && orig[i - 1].opcode == MEM_REF)
1573 if (vn_reference_maybe_forwprop_address (&orig, &i))
1574 *valueized_anything = true;
1576 /* If it transforms a non-constant ARRAY_REF into a constant
1577 one, adjust the constant offset. */
1578 else if (vro->opcode == ARRAY_REF
1579 && known_eq (vro->off, -1)
1580 && poly_int_tree_p (vro->op0)
1581 && poly_int_tree_p (vro->op1)
1582 && TREE_CODE (vro->op2) == INTEGER_CST)
1584 poly_offset_int off = ((wi::to_poly_offset (vro->op0)
1585 - wi::to_poly_offset (vro->op1))
1586 * wi::to_offset (vro->op2)
1587 * vn_ref_op_align_unit (vro));
1588 off.to_shwi (&vro->off);
1592 return orig;
1595 static vec<vn_reference_op_s>
1596 valueize_refs (vec<vn_reference_op_s> orig)
1598 bool tem;
1599 return valueize_refs_1 (orig, &tem);
1602 static vec<vn_reference_op_s> shared_lookup_references;
1604 /* Create a vector of vn_reference_op_s structures from REF, a
1605 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1606 this function. *VALUEIZED_ANYTHING will specify whether any
1607 operands were valueized. */
1609 static vec<vn_reference_op_s>
1610 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1612 if (!ref)
1613 return vNULL;
1614 shared_lookup_references.truncate (0);
1615 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1616 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1617 valueized_anything);
1618 return shared_lookup_references;
1621 /* Create a vector of vn_reference_op_s structures from CALL, a
1622 call statement. The vector is shared among all callers of
1623 this function. */
1625 static vec<vn_reference_op_s>
1626 valueize_shared_reference_ops_from_call (gcall *call)
1628 if (!call)
1629 return vNULL;
1630 shared_lookup_references.truncate (0);
1631 copy_reference_ops_from_call (call, &shared_lookup_references);
1632 shared_lookup_references = valueize_refs (shared_lookup_references);
1633 return shared_lookup_references;
1636 /* Lookup a SCCVN reference operation VR in the current hash table.
1637 Returns the resulting value number if it exists in the hash table,
1638 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1639 vn_reference_t stored in the hashtable if something is found. */
1641 static tree
1642 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1644 vn_reference_s **slot;
1645 hashval_t hash;
1647 hash = vr->hashcode;
1648 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1649 if (slot)
1651 if (vnresult)
1652 *vnresult = (vn_reference_t)*slot;
1653 return ((vn_reference_t)*slot)->result;
1656 return NULL_TREE;
1660 /* Partial definition tracking support. */
1662 struct pd_range
1664 HOST_WIDE_INT offset;
1665 HOST_WIDE_INT size;
1668 struct pd_data
1670 tree rhs;
1671 HOST_WIDE_INT offset;
1672 HOST_WIDE_INT size;
1675 /* Context for alias walking. */
1677 struct vn_walk_cb_data
1679 vn_walk_cb_data (vn_reference_t vr_, tree orig_ref_, tree *last_vuse_ptr_,
1680 vn_lookup_kind vn_walk_kind_, bool tbaa_p_, tree mask_)
1681 : vr (vr_), last_vuse_ptr (last_vuse_ptr_), last_vuse (NULL_TREE),
1682 mask (mask_), masked_result (NULL_TREE), vn_walk_kind (vn_walk_kind_),
1683 tbaa_p (tbaa_p_), saved_operands (vNULL), first_set (-2),
1684 first_base_set (-2), known_ranges (NULL)
1686 if (!last_vuse_ptr)
1687 last_vuse_ptr = &last_vuse;
1688 ao_ref_init (&orig_ref, orig_ref_);
1689 if (mask)
1691 wide_int w = wi::to_wide (mask);
1692 unsigned int pos = 0, prec = w.get_precision ();
1693 pd_data pd;
1694 pd.rhs = build_constructor (NULL_TREE, NULL);
1695 /* When bitwise and with a constant is done on a memory load,
1696 we don't really need all the bits to be defined or defined
1697 to constants, we don't really care what is in the position
1698 corresponding to 0 bits in the mask.
1699 So, push the ranges of those 0 bits in the mask as artificial
1700 zero stores and let the partial def handling code do the
1701 rest. */
1702 while (pos < prec)
1704 int tz = wi::ctz (w);
1705 if (pos + tz > prec)
1706 tz = prec - pos;
1707 if (tz)
1709 if (BYTES_BIG_ENDIAN)
1710 pd.offset = prec - pos - tz;
1711 else
1712 pd.offset = pos;
1713 pd.size = tz;
1714 void *r = push_partial_def (pd, 0, 0, 0, prec);
1715 gcc_assert (r == NULL_TREE);
1717 pos += tz;
1718 if (pos == prec)
1719 break;
1720 w = wi::lrshift (w, tz);
1721 tz = wi::ctz (wi::bit_not (w));
1722 if (pos + tz > prec)
1723 tz = prec - pos;
1724 pos += tz;
1725 w = wi::lrshift (w, tz);
1729 ~vn_walk_cb_data ();
1730 void *finish (alias_set_type, alias_set_type, tree);
1731 void *push_partial_def (pd_data pd,
1732 alias_set_type, alias_set_type, HOST_WIDE_INT,
1733 HOST_WIDE_INT);
1735 vn_reference_t vr;
1736 ao_ref orig_ref;
1737 tree *last_vuse_ptr;
1738 tree last_vuse;
1739 tree mask;
1740 tree masked_result;
1741 vn_lookup_kind vn_walk_kind;
1742 bool tbaa_p;
1743 vec<vn_reference_op_s> saved_operands;
1745 /* The VDEFs of partial defs we come along. */
1746 auto_vec<pd_data, 2> partial_defs;
1747 /* The first defs range to avoid splay tree setup in most cases. */
1748 pd_range first_range;
1749 alias_set_type first_set;
1750 alias_set_type first_base_set;
1751 splay_tree known_ranges;
1752 obstack ranges_obstack;
1755 vn_walk_cb_data::~vn_walk_cb_data ()
1757 if (known_ranges)
1759 splay_tree_delete (known_ranges);
1760 obstack_free (&ranges_obstack, NULL);
1762 saved_operands.release ();
1765 void *
1766 vn_walk_cb_data::finish (alias_set_type set, alias_set_type base_set, tree val)
1768 if (first_set != -2)
1770 set = first_set;
1771 base_set = first_base_set;
1773 if (mask)
1775 masked_result = val;
1776 return (void *) -1;
1778 vec<vn_reference_op_s> &operands
1779 = saved_operands.exists () ? saved_operands : vr->operands;
1780 return vn_reference_lookup_or_insert_for_pieces (last_vuse, set, base_set,
1781 vr->type, operands, val);
1784 /* pd_range splay-tree helpers. */
1786 static int
1787 pd_range_compare (splay_tree_key offset1p, splay_tree_key offset2p)
1789 HOST_WIDE_INT offset1 = *(HOST_WIDE_INT *)offset1p;
1790 HOST_WIDE_INT offset2 = *(HOST_WIDE_INT *)offset2p;
1791 if (offset1 < offset2)
1792 return -1;
1793 else if (offset1 > offset2)
1794 return 1;
1795 return 0;
1798 static void *
1799 pd_tree_alloc (int size, void *data_)
1801 vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
1802 return obstack_alloc (&data->ranges_obstack, size);
1805 static void
1806 pd_tree_dealloc (void *, void *)
1810 /* Push PD to the vector of partial definitions returning a
1811 value when we are ready to combine things with VUSE, SET and MAXSIZEI,
1812 NULL when we want to continue looking for partial defs or -1
1813 on failure. */
1815 void *
1816 vn_walk_cb_data::push_partial_def (pd_data pd,
1817 alias_set_type set, alias_set_type base_set,
1818 HOST_WIDE_INT offseti,
1819 HOST_WIDE_INT maxsizei)
1821 const HOST_WIDE_INT bufsize = 64;
1822 /* We're using a fixed buffer for encoding so fail early if the object
1823 we want to interpret is bigger. */
1824 if (maxsizei > bufsize * BITS_PER_UNIT
1825 || CHAR_BIT != 8
1826 || BITS_PER_UNIT != 8
1827 /* Not prepared to handle PDP endian. */
1828 || BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
1829 return (void *)-1;
1831 /* Turn too large constant stores into non-constant stores. */
1832 if (CONSTANT_CLASS_P (pd.rhs) && pd.size > bufsize * BITS_PER_UNIT)
1833 pd.rhs = error_mark_node;
1835 /* And for non-constant or CONSTRUCTOR stores shrink them to only keep at
1836 most a partial byte before and/or after the region. */
1837 if (!CONSTANT_CLASS_P (pd.rhs))
1839 if (pd.offset < offseti)
1841 HOST_WIDE_INT o = ROUND_DOWN (offseti - pd.offset, BITS_PER_UNIT);
1842 gcc_assert (pd.size > o);
1843 pd.size -= o;
1844 pd.offset += o;
1846 if (pd.size > maxsizei)
1847 pd.size = maxsizei + ((pd.size - maxsizei) % BITS_PER_UNIT);
1850 pd.offset -= offseti;
1852 bool pd_constant_p = (TREE_CODE (pd.rhs) == CONSTRUCTOR
1853 || CONSTANT_CLASS_P (pd.rhs));
1854 if (partial_defs.is_empty ())
1856 /* If we get a clobber upfront, fail. */
1857 if (TREE_CLOBBER_P (pd.rhs))
1858 return (void *)-1;
1859 if (!pd_constant_p)
1860 return (void *)-1;
1861 partial_defs.safe_push (pd);
1862 first_range.offset = pd.offset;
1863 first_range.size = pd.size;
1864 first_set = set;
1865 first_base_set = base_set;
1866 last_vuse_ptr = NULL;
1867 /* Continue looking for partial defs. */
1868 return NULL;
1871 if (!known_ranges)
1873 /* ??? Optimize the case where the 2nd partial def completes things. */
1874 gcc_obstack_init (&ranges_obstack);
1875 known_ranges = splay_tree_new_with_allocator (pd_range_compare, 0, 0,
1876 pd_tree_alloc,
1877 pd_tree_dealloc, this);
1878 splay_tree_insert (known_ranges,
1879 (splay_tree_key)&first_range.offset,
1880 (splay_tree_value)&first_range);
1883 pd_range newr = { pd.offset, pd.size };
1884 splay_tree_node n;
1885 pd_range *r;
1886 /* Lookup the predecessor of offset + 1 and see if we need to merge. */
1887 HOST_WIDE_INT loffset = newr.offset + 1;
1888 if ((n = splay_tree_predecessor (known_ranges, (splay_tree_key)&loffset))
1889 && ((r = (pd_range *)n->value), true)
1890 && ranges_known_overlap_p (r->offset, r->size + 1,
1891 newr.offset, newr.size))
1893 /* Ignore partial defs already covered. Here we also drop shadowed
1894 clobbers arriving here at the floor. */
1895 if (known_subrange_p (newr.offset, newr.size, r->offset, r->size))
1896 return NULL;
1897 r->size = MAX (r->offset + r->size, newr.offset + newr.size) - r->offset;
1899 else
1901 /* newr.offset wasn't covered yet, insert the range. */
1902 r = XOBNEW (&ranges_obstack, pd_range);
1903 *r = newr;
1904 splay_tree_insert (known_ranges, (splay_tree_key)&r->offset,
1905 (splay_tree_value)r);
1907 /* Merge r which now contains newr and is a member of the splay tree with
1908 adjacent overlapping ranges. */
1909 pd_range *rafter;
1910 while ((n = splay_tree_successor (known_ranges, (splay_tree_key)&r->offset))
1911 && ((rafter = (pd_range *)n->value), true)
1912 && ranges_known_overlap_p (r->offset, r->size + 1,
1913 rafter->offset, rafter->size))
1915 r->size = MAX (r->offset + r->size,
1916 rafter->offset + rafter->size) - r->offset;
1917 splay_tree_remove (known_ranges, (splay_tree_key)&rafter->offset);
1919 /* If we get a clobber, fail. */
1920 if (TREE_CLOBBER_P (pd.rhs))
1921 return (void *)-1;
1922 /* Non-constants are OK as long as they are shadowed by a constant. */
1923 if (!pd_constant_p)
1924 return (void *)-1;
1925 partial_defs.safe_push (pd);
1927 /* Now we have merged newr into the range tree. When we have covered
1928 [offseti, sizei] then the tree will contain exactly one node which has
1929 the desired properties and it will be 'r'. */
1930 if (!known_subrange_p (0, maxsizei, r->offset, r->size))
1931 /* Continue looking for partial defs. */
1932 return NULL;
1934 /* Now simply native encode all partial defs in reverse order. */
1935 unsigned ndefs = partial_defs.length ();
1936 /* We support up to 512-bit values (for V8DFmode). */
1937 unsigned char buffer[bufsize + 1];
1938 unsigned char this_buffer[bufsize + 1];
1939 int len;
1941 memset (buffer, 0, bufsize + 1);
1942 unsigned needed_len = ROUND_UP (maxsizei, BITS_PER_UNIT) / BITS_PER_UNIT;
1943 while (!partial_defs.is_empty ())
1945 pd_data pd = partial_defs.pop ();
1946 unsigned int amnt;
1947 if (TREE_CODE (pd.rhs) == CONSTRUCTOR)
1949 /* Empty CONSTRUCTOR. */
1950 if (pd.size >= needed_len * BITS_PER_UNIT)
1951 len = needed_len;
1952 else
1953 len = ROUND_UP (pd.size, BITS_PER_UNIT) / BITS_PER_UNIT;
1954 memset (this_buffer, 0, len);
1956 else
1958 len = native_encode_expr (pd.rhs, this_buffer, bufsize,
1959 MAX (0, -pd.offset) / BITS_PER_UNIT);
1960 if (len <= 0
1961 || len < (ROUND_UP (pd.size, BITS_PER_UNIT) / BITS_PER_UNIT
1962 - MAX (0, -pd.offset) / BITS_PER_UNIT))
1964 if (dump_file && (dump_flags & TDF_DETAILS))
1965 fprintf (dump_file, "Failed to encode %u "
1966 "partial definitions\n", ndefs);
1967 return (void *)-1;
1971 unsigned char *p = buffer;
1972 HOST_WIDE_INT size = pd.size;
1973 if (pd.offset < 0)
1974 size -= ROUND_DOWN (-pd.offset, BITS_PER_UNIT);
1975 this_buffer[len] = 0;
1976 if (BYTES_BIG_ENDIAN)
1978 /* LSB of this_buffer[len - 1] byte should be at
1979 pd.offset + pd.size - 1 bits in buffer. */
1980 amnt = ((unsigned HOST_WIDE_INT) pd.offset
1981 + pd.size) % BITS_PER_UNIT;
1982 if (amnt)
1983 shift_bytes_in_array_right (this_buffer, len + 1, amnt);
1984 unsigned char *q = this_buffer;
1985 unsigned int off = 0;
1986 if (pd.offset >= 0)
1988 unsigned int msk;
1989 off = pd.offset / BITS_PER_UNIT;
1990 gcc_assert (off < needed_len);
1991 p = buffer + off;
1992 if (size <= amnt)
1994 msk = ((1 << size) - 1) << (BITS_PER_UNIT - amnt);
1995 *p = (*p & ~msk) | (this_buffer[len] & msk);
1996 size = 0;
1998 else
2000 if (TREE_CODE (pd.rhs) != CONSTRUCTOR)
2001 q = (this_buffer + len
2002 - (ROUND_UP (size - amnt, BITS_PER_UNIT)
2003 / BITS_PER_UNIT));
2004 if (pd.offset % BITS_PER_UNIT)
2006 msk = -1U << (BITS_PER_UNIT
2007 - (pd.offset % BITS_PER_UNIT));
2008 *p = (*p & msk) | (*q & ~msk);
2009 p++;
2010 q++;
2011 off++;
2012 size -= BITS_PER_UNIT - (pd.offset % BITS_PER_UNIT);
2013 gcc_assert (size >= 0);
2017 else if (TREE_CODE (pd.rhs) != CONSTRUCTOR)
2019 q = (this_buffer + len
2020 - (ROUND_UP (size - amnt, BITS_PER_UNIT)
2021 / BITS_PER_UNIT));
2022 if (pd.offset % BITS_PER_UNIT)
2024 q++;
2025 size -= BITS_PER_UNIT - ((unsigned HOST_WIDE_INT) pd.offset
2026 % BITS_PER_UNIT);
2027 gcc_assert (size >= 0);
2030 if ((unsigned HOST_WIDE_INT) size / BITS_PER_UNIT + off
2031 > needed_len)
2032 size = (needed_len - off) * BITS_PER_UNIT;
2033 memcpy (p, q, size / BITS_PER_UNIT);
2034 if (size % BITS_PER_UNIT)
2036 unsigned int msk
2037 = -1U << (BITS_PER_UNIT - (size % BITS_PER_UNIT));
2038 p += size / BITS_PER_UNIT;
2039 q += size / BITS_PER_UNIT;
2040 *p = (*q & msk) | (*p & ~msk);
2043 else
2045 if (pd.offset >= 0)
2047 /* LSB of this_buffer[0] byte should be at pd.offset bits
2048 in buffer. */
2049 unsigned int msk;
2050 size = MIN (size, (HOST_WIDE_INT) needed_len * BITS_PER_UNIT);
2051 amnt = pd.offset % BITS_PER_UNIT;
2052 if (amnt)
2053 shift_bytes_in_array_left (this_buffer, len + 1, amnt);
2054 unsigned int off = pd.offset / BITS_PER_UNIT;
2055 gcc_assert (off < needed_len);
2056 size = MIN (size,
2057 (HOST_WIDE_INT) (needed_len - off) * BITS_PER_UNIT);
2058 p = buffer + off;
2059 if (amnt + size < BITS_PER_UNIT)
2061 /* Low amnt bits come from *p, then size bits
2062 from this_buffer[0] and the remaining again from
2063 *p. */
2064 msk = ((1 << size) - 1) << amnt;
2065 *p = (*p & ~msk) | (this_buffer[0] & msk);
2066 size = 0;
2068 else if (amnt)
2070 msk = -1U << amnt;
2071 *p = (*p & ~msk) | (this_buffer[0] & msk);
2072 p++;
2073 size -= (BITS_PER_UNIT - amnt);
2076 else
2078 amnt = (unsigned HOST_WIDE_INT) pd.offset % BITS_PER_UNIT;
2079 if (amnt)
2080 size -= BITS_PER_UNIT - amnt;
2081 size = MIN (size, (HOST_WIDE_INT) needed_len * BITS_PER_UNIT);
2082 if (amnt)
2083 shift_bytes_in_array_left (this_buffer, len + 1, amnt);
2085 memcpy (p, this_buffer + (amnt != 0), size / BITS_PER_UNIT);
2086 p += size / BITS_PER_UNIT;
2087 if (size % BITS_PER_UNIT)
2089 unsigned int msk = -1U << (size % BITS_PER_UNIT);
2090 *p = (this_buffer[(amnt != 0) + size / BITS_PER_UNIT]
2091 & ~msk) | (*p & msk);
2096 tree type = vr->type;
2097 /* Make sure to interpret in a type that has a range covering the whole
2098 access size. */
2099 if (INTEGRAL_TYPE_P (vr->type) && maxsizei != TYPE_PRECISION (vr->type))
2100 type = build_nonstandard_integer_type (maxsizei, TYPE_UNSIGNED (type));
2101 tree val;
2102 if (BYTES_BIG_ENDIAN)
2104 unsigned sz = needed_len;
2105 if (maxsizei % BITS_PER_UNIT)
2106 shift_bytes_in_array_right (buffer, needed_len,
2107 BITS_PER_UNIT
2108 - (maxsizei % BITS_PER_UNIT));
2109 if (INTEGRAL_TYPE_P (type))
2110 sz = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
2111 if (sz > needed_len)
2113 memcpy (this_buffer + (sz - needed_len), buffer, needed_len);
2114 val = native_interpret_expr (type, this_buffer, sz);
2116 else
2117 val = native_interpret_expr (type, buffer, needed_len);
2119 else
2120 val = native_interpret_expr (type, buffer, bufsize);
2121 /* If we chop off bits because the types precision doesn't match the memory
2122 access size this is ok when optimizing reads but not when called from
2123 the DSE code during elimination. */
2124 if (val && type != vr->type)
2126 if (! int_fits_type_p (val, vr->type))
2127 val = NULL_TREE;
2128 else
2129 val = fold_convert (vr->type, val);
2132 if (val)
2134 if (dump_file && (dump_flags & TDF_DETAILS))
2135 fprintf (dump_file,
2136 "Successfully combined %u partial definitions\n", ndefs);
2137 /* We are using the alias-set of the first store we encounter which
2138 should be appropriate here. */
2139 return finish (first_set, first_base_set, val);
2141 else
2143 if (dump_file && (dump_flags & TDF_DETAILS))
2144 fprintf (dump_file,
2145 "Failed to interpret %u encoded partial definitions\n", ndefs);
2146 return (void *)-1;
2150 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
2151 with the current VUSE and performs the expression lookup. */
2153 static void *
2154 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *data_)
2156 vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
2157 vn_reference_t vr = data->vr;
2158 vn_reference_s **slot;
2159 hashval_t hash;
2161 /* If we have partial definitions recorded we have to go through
2162 vn_reference_lookup_3. */
2163 if (!data->partial_defs.is_empty ())
2164 return NULL;
2166 if (data->last_vuse_ptr)
2168 *data->last_vuse_ptr = vuse;
2169 data->last_vuse = vuse;
2172 /* Fixup vuse and hash. */
2173 if (vr->vuse)
2174 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
2175 vr->vuse = vuse_ssa_val (vuse);
2176 if (vr->vuse)
2177 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
2179 hash = vr->hashcode;
2180 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
2181 if (slot)
2183 if ((*slot)->result && data->saved_operands.exists ())
2184 return data->finish (vr->set, vr->base_set, (*slot)->result);
2185 return *slot;
2188 return NULL;
2191 /* Lookup an existing or insert a new vn_reference entry into the
2192 value table for the VUSE, SET, TYPE, OPERANDS reference which
2193 has the value VALUE which is either a constant or an SSA name. */
2195 static vn_reference_t
2196 vn_reference_lookup_or_insert_for_pieces (tree vuse,
2197 alias_set_type set,
2198 alias_set_type base_set,
2199 tree type,
2200 vec<vn_reference_op_s,
2201 va_heap> operands,
2202 tree value)
2204 vn_reference_s vr1;
2205 vn_reference_t result;
2206 unsigned value_id;
2207 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2208 vr1.operands = operands;
2209 vr1.type = type;
2210 vr1.set = set;
2211 vr1.base_set = base_set;
2212 vr1.hashcode = vn_reference_compute_hash (&vr1);
2213 if (vn_reference_lookup_1 (&vr1, &result))
2214 return result;
2215 if (TREE_CODE (value) == SSA_NAME)
2216 value_id = VN_INFO (value)->value_id;
2217 else
2218 value_id = get_or_alloc_constant_value_id (value);
2219 return vn_reference_insert_pieces (vuse, set, base_set, type,
2220 operands.copy (), value, value_id);
2223 /* Return a value-number for RCODE OPS... either by looking up an existing
2224 value-number for the simplified result or by inserting the operation if
2225 INSERT is true. */
2227 static tree
2228 vn_nary_build_or_lookup_1 (gimple_match_op *res_op, bool insert)
2230 tree result = NULL_TREE;
2231 /* We will be creating a value number for
2232 RCODE (OPS...).
2233 So first simplify and lookup this expression to see if it
2234 is already available. */
2235 /* For simplification valueize. */
2236 unsigned i;
2237 for (i = 0; i < res_op->num_ops; ++i)
2238 if (TREE_CODE (res_op->ops[i]) == SSA_NAME)
2240 tree tem = vn_valueize (res_op->ops[i]);
2241 if (!tem)
2242 break;
2243 res_op->ops[i] = tem;
2245 /* If valueization of an operand fails (it is not available), skip
2246 simplification. */
2247 bool res = false;
2248 if (i == res_op->num_ops)
2250 mprts_hook = vn_lookup_simplify_result;
2251 res = res_op->resimplify (NULL, vn_valueize);
2252 mprts_hook = NULL;
2254 gimple *new_stmt = NULL;
2255 if (res
2256 && gimple_simplified_result_is_gimple_val (res_op))
2258 /* The expression is already available. */
2259 result = res_op->ops[0];
2260 /* Valueize it, simplification returns sth in AVAIL only. */
2261 if (TREE_CODE (result) == SSA_NAME)
2262 result = SSA_VAL (result);
2264 else
2266 tree val = vn_lookup_simplify_result (res_op);
2267 if (!val && insert)
2269 gimple_seq stmts = NULL;
2270 result = maybe_push_res_to_seq (res_op, &stmts);
2271 if (result)
2273 gcc_assert (gimple_seq_singleton_p (stmts));
2274 new_stmt = gimple_seq_first_stmt (stmts);
2277 else
2278 /* The expression is already available. */
2279 result = val;
2281 if (new_stmt)
2283 /* The expression is not yet available, value-number lhs to
2284 the new SSA_NAME we created. */
2285 /* Initialize value-number information properly. */
2286 vn_ssa_aux_t result_info = VN_INFO (result);
2287 result_info->valnum = result;
2288 result_info->value_id = get_next_value_id ();
2289 result_info->visited = 1;
2290 gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
2291 new_stmt);
2292 result_info->needs_insertion = true;
2293 /* ??? PRE phi-translation inserts NARYs without corresponding
2294 SSA name result. Re-use those but set their result according
2295 to the stmt we just built. */
2296 vn_nary_op_t nary = NULL;
2297 vn_nary_op_lookup_stmt (new_stmt, &nary);
2298 if (nary)
2300 gcc_assert (! nary->predicated_values && nary->u.result == NULL_TREE);
2301 nary->u.result = gimple_assign_lhs (new_stmt);
2303 /* As all "inserted" statements are singleton SCCs, insert
2304 to the valid table. This is strictly needed to
2305 avoid re-generating new value SSA_NAMEs for the same
2306 expression during SCC iteration over and over (the
2307 optimistic table gets cleared after each iteration).
2308 We do not need to insert into the optimistic table, as
2309 lookups there will fall back to the valid table. */
2310 else
2312 unsigned int length = vn_nary_length_from_stmt (new_stmt);
2313 vn_nary_op_t vno1
2314 = alloc_vn_nary_op_noinit (length, &vn_tables_insert_obstack);
2315 vno1->value_id = result_info->value_id;
2316 vno1->length = length;
2317 vno1->predicated_values = 0;
2318 vno1->u.result = result;
2319 init_vn_nary_op_from_stmt (vno1, new_stmt);
2320 vn_nary_op_insert_into (vno1, valid_info->nary, true);
2321 /* Also do not link it into the undo chain. */
2322 last_inserted_nary = vno1->next;
2323 vno1->next = (vn_nary_op_t)(void *)-1;
2325 if (dump_file && (dump_flags & TDF_DETAILS))
2327 fprintf (dump_file, "Inserting name ");
2328 print_generic_expr (dump_file, result);
2329 fprintf (dump_file, " for expression ");
2330 print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
2331 fprintf (dump_file, "\n");
2334 return result;
2337 /* Return a value-number for RCODE OPS... either by looking up an existing
2338 value-number for the simplified result or by inserting the operation. */
2340 static tree
2341 vn_nary_build_or_lookup (gimple_match_op *res_op)
2343 return vn_nary_build_or_lookup_1 (res_op, true);
2346 /* Try to simplify the expression RCODE OPS... of type TYPE and return
2347 its value if present. */
2349 tree
2350 vn_nary_simplify (vn_nary_op_t nary)
2352 if (nary->length > gimple_match_op::MAX_NUM_OPS)
2353 return NULL_TREE;
2354 gimple_match_op op (gimple_match_cond::UNCOND, nary->opcode,
2355 nary->type, nary->length);
2356 memcpy (op.ops, nary->op, sizeof (tree) * nary->length);
2357 return vn_nary_build_or_lookup_1 (&op, false);
2360 /* Elimination engine. */
2362 class eliminate_dom_walker : public dom_walker
2364 public:
2365 eliminate_dom_walker (cdi_direction, bitmap);
2366 ~eliminate_dom_walker ();
2368 virtual edge before_dom_children (basic_block);
2369 virtual void after_dom_children (basic_block);
2371 virtual tree eliminate_avail (basic_block, tree op);
2372 virtual void eliminate_push_avail (basic_block, tree op);
2373 tree eliminate_insert (basic_block, gimple_stmt_iterator *gsi, tree val);
2375 void eliminate_stmt (basic_block, gimple_stmt_iterator *);
2377 unsigned eliminate_cleanup (bool region_p = false);
2379 bool do_pre;
2380 unsigned int el_todo;
2381 unsigned int eliminations;
2382 unsigned int insertions;
2384 /* SSA names that had their defs inserted by PRE if do_pre. */
2385 bitmap inserted_exprs;
2387 /* Blocks with statements that have had their EH properties changed. */
2388 bitmap need_eh_cleanup;
2390 /* Blocks with statements that have had their AB properties changed. */
2391 bitmap need_ab_cleanup;
2393 /* Local state for the eliminate domwalk. */
2394 auto_vec<gimple *> to_remove;
2395 auto_vec<gimple *> to_fixup;
2396 auto_vec<tree> avail;
2397 auto_vec<tree> avail_stack;
2400 /* Adaptor to the elimination engine using RPO availability. */
2402 class rpo_elim : public eliminate_dom_walker
2404 public:
2405 rpo_elim(basic_block entry_)
2406 : eliminate_dom_walker (CDI_DOMINATORS, NULL), entry (entry_),
2407 m_avail_freelist (NULL) {}
2409 virtual tree eliminate_avail (basic_block, tree op);
2411 virtual void eliminate_push_avail (basic_block, tree);
2413 basic_block entry;
2414 /* Freelist of avail entries which are allocated from the vn_ssa_aux
2415 obstack. */
2416 vn_avail *m_avail_freelist;
2419 /* Global RPO state for access from hooks. */
2420 static eliminate_dom_walker *rpo_avail;
2421 basic_block vn_context_bb;
2423 /* Return true if BASE1 and BASE2 can be adjusted so they have the
2424 same address and adjust *OFFSET1 and *OFFSET2 accordingly.
2425 Otherwise return false. */
2427 static bool
2428 adjust_offsets_for_equal_base_address (tree base1, poly_int64 *offset1,
2429 tree base2, poly_int64 *offset2)
2431 poly_int64 soff;
2432 if (TREE_CODE (base1) == MEM_REF
2433 && TREE_CODE (base2) == MEM_REF)
2435 if (mem_ref_offset (base1).to_shwi (&soff))
2437 base1 = TREE_OPERAND (base1, 0);
2438 *offset1 += soff * BITS_PER_UNIT;
2440 if (mem_ref_offset (base2).to_shwi (&soff))
2442 base2 = TREE_OPERAND (base2, 0);
2443 *offset2 += soff * BITS_PER_UNIT;
2445 return operand_equal_p (base1, base2, 0);
2447 return operand_equal_p (base1, base2, OEP_ADDRESS_OF);
2450 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
2451 from the statement defining VUSE and if not successful tries to
2452 translate *REFP and VR_ through an aggregate copy at the definition
2453 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
2454 of *REF and *VR. If only disambiguation was performed then
2455 *DISAMBIGUATE_ONLY is set to true. */
2457 static void *
2458 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
2459 translate_flags *disambiguate_only)
2461 vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
2462 vn_reference_t vr = data->vr;
2463 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
2464 tree base = ao_ref_base (ref);
2465 HOST_WIDE_INT offseti = 0, maxsizei, sizei = 0;
2466 static vec<vn_reference_op_s> lhs_ops;
2467 ao_ref lhs_ref;
2468 bool lhs_ref_ok = false;
2469 poly_int64 copy_size;
2471 /* First try to disambiguate after value-replacing in the definitions LHS. */
2472 if (is_gimple_assign (def_stmt))
2474 tree lhs = gimple_assign_lhs (def_stmt);
2475 bool valueized_anything = false;
2476 /* Avoid re-allocation overhead. */
2477 lhs_ops.truncate (0);
2478 basic_block saved_rpo_bb = vn_context_bb;
2479 vn_context_bb = gimple_bb (def_stmt);
2480 if (*disambiguate_only <= TR_VALUEIZE_AND_DISAMBIGUATE)
2482 copy_reference_ops_from_ref (lhs, &lhs_ops);
2483 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything, true);
2485 vn_context_bb = saved_rpo_bb;
2486 ao_ref_init (&lhs_ref, lhs);
2487 lhs_ref_ok = true;
2488 if (valueized_anything
2489 && ao_ref_init_from_vn_reference
2490 (&lhs_ref, ao_ref_alias_set (&lhs_ref),
2491 ao_ref_base_alias_set (&lhs_ref), TREE_TYPE (lhs), lhs_ops)
2492 && !refs_may_alias_p_1 (ref, &lhs_ref, data->tbaa_p))
2494 *disambiguate_only = TR_VALUEIZE_AND_DISAMBIGUATE;
2495 return NULL;
2498 /* Besides valueizing the LHS we can also use access-path based
2499 disambiguation on the original non-valueized ref. */
2500 if (!ref->ref
2501 && lhs_ref_ok
2502 && data->orig_ref.ref)
2504 /* We want to use the non-valueized LHS for this, but avoid redundant
2505 work. */
2506 ao_ref *lref = &lhs_ref;
2507 ao_ref lref_alt;
2508 if (valueized_anything)
2510 ao_ref_init (&lref_alt, lhs);
2511 lref = &lref_alt;
2513 if (!refs_may_alias_p_1 (&data->orig_ref, lref, data->tbaa_p))
2515 *disambiguate_only = (valueized_anything
2516 ? TR_VALUEIZE_AND_DISAMBIGUATE
2517 : TR_DISAMBIGUATE);
2518 return NULL;
2522 /* If we reach a clobbering statement try to skip it and see if
2523 we find a VN result with exactly the same value as the
2524 possible clobber. In this case we can ignore the clobber
2525 and return the found value. */
2526 if (is_gimple_reg_type (TREE_TYPE (lhs))
2527 && types_compatible_p (TREE_TYPE (lhs), vr->type)
2528 && (ref->ref || data->orig_ref.ref))
2530 tree *saved_last_vuse_ptr = data->last_vuse_ptr;
2531 /* Do not update last_vuse_ptr in vn_reference_lookup_2. */
2532 data->last_vuse_ptr = NULL;
2533 tree saved_vuse = vr->vuse;
2534 hashval_t saved_hashcode = vr->hashcode;
2535 void *res = vn_reference_lookup_2 (ref, gimple_vuse (def_stmt), data);
2536 /* Need to restore vr->vuse and vr->hashcode. */
2537 vr->vuse = saved_vuse;
2538 vr->hashcode = saved_hashcode;
2539 data->last_vuse_ptr = saved_last_vuse_ptr;
2540 if (res && res != (void *)-1)
2542 vn_reference_t vnresult = (vn_reference_t) res;
2543 tree rhs = gimple_assign_rhs1 (def_stmt);
2544 if (TREE_CODE (rhs) == SSA_NAME)
2545 rhs = SSA_VAL (rhs);
2546 if (vnresult->result
2547 && operand_equal_p (vnresult->result, rhs, 0)
2548 /* We have to honor our promise about union type punning
2549 and also support arbitrary overlaps with
2550 -fno-strict-aliasing. So simply resort to alignment to
2551 rule out overlaps. Do this check last because it is
2552 quite expensive compared to the hash-lookup above. */
2553 && multiple_p (get_object_alignment
2554 (ref->ref ? ref->ref : data->orig_ref.ref),
2555 ref->size)
2556 && multiple_p (get_object_alignment (lhs), ref->size))
2557 return res;
2561 else if (*disambiguate_only <= TR_VALUEIZE_AND_DISAMBIGUATE
2562 && gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
2563 && gimple_call_num_args (def_stmt) <= 4)
2565 /* For builtin calls valueize its arguments and call the
2566 alias oracle again. Valueization may improve points-to
2567 info of pointers and constify size and position arguments.
2568 Originally this was motivated by PR61034 which has
2569 conditional calls to free falsely clobbering ref because
2570 of imprecise points-to info of the argument. */
2571 tree oldargs[4];
2572 bool valueized_anything = false;
2573 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
2575 oldargs[i] = gimple_call_arg (def_stmt, i);
2576 tree val = vn_valueize (oldargs[i]);
2577 if (val != oldargs[i])
2579 gimple_call_set_arg (def_stmt, i, val);
2580 valueized_anything = true;
2583 if (valueized_anything)
2585 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
2586 ref, data->tbaa_p);
2587 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
2588 gimple_call_set_arg (def_stmt, i, oldargs[i]);
2589 if (!res)
2591 *disambiguate_only = TR_VALUEIZE_AND_DISAMBIGUATE;
2592 return NULL;
2597 if (*disambiguate_only > TR_TRANSLATE)
2598 return (void *)-1;
2600 /* If we cannot constrain the size of the reference we cannot
2601 test if anything kills it. */
2602 if (!ref->max_size_known_p ())
2603 return (void *)-1;
2605 poly_int64 offset = ref->offset;
2606 poly_int64 maxsize = ref->max_size;
2608 /* def_stmt may-defs *ref. See if we can derive a value for *ref
2609 from that definition.
2610 1) Memset. */
2611 if (is_gimple_reg_type (vr->type)
2612 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
2613 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET_CHK))
2614 && (integer_zerop (gimple_call_arg (def_stmt, 1))
2615 || ((TREE_CODE (gimple_call_arg (def_stmt, 1)) == INTEGER_CST
2616 || (INTEGRAL_TYPE_P (vr->type) && known_eq (ref->size, 8)))
2617 && CHAR_BIT == 8
2618 && BITS_PER_UNIT == 8
2619 && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
2620 && offset.is_constant (&offseti)
2621 && ref->size.is_constant (&sizei)
2622 && (offseti % BITS_PER_UNIT == 0
2623 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == INTEGER_CST)))
2624 && (poly_int_tree_p (gimple_call_arg (def_stmt, 2))
2625 || (TREE_CODE (gimple_call_arg (def_stmt, 2)) == SSA_NAME
2626 && poly_int_tree_p (SSA_VAL (gimple_call_arg (def_stmt, 2)))))
2627 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2628 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME))
2630 tree base2;
2631 poly_int64 offset2, size2, maxsize2;
2632 bool reverse;
2633 tree ref2 = gimple_call_arg (def_stmt, 0);
2634 if (TREE_CODE (ref2) == SSA_NAME)
2636 ref2 = SSA_VAL (ref2);
2637 if (TREE_CODE (ref2) == SSA_NAME
2638 && (TREE_CODE (base) != MEM_REF
2639 || TREE_OPERAND (base, 0) != ref2))
2641 gimple *def_stmt = SSA_NAME_DEF_STMT (ref2);
2642 if (gimple_assign_single_p (def_stmt)
2643 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2644 ref2 = gimple_assign_rhs1 (def_stmt);
2647 if (TREE_CODE (ref2) == ADDR_EXPR)
2649 ref2 = TREE_OPERAND (ref2, 0);
2650 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
2651 &reverse);
2652 if (!known_size_p (maxsize2)
2653 || !known_eq (maxsize2, size2)
2654 || !operand_equal_p (base, base2, OEP_ADDRESS_OF))
2655 return (void *)-1;
2657 else if (TREE_CODE (ref2) == SSA_NAME)
2659 poly_int64 soff;
2660 if (TREE_CODE (base) != MEM_REF
2661 || !(mem_ref_offset (base)
2662 << LOG2_BITS_PER_UNIT).to_shwi (&soff))
2663 return (void *)-1;
2664 offset += soff;
2665 offset2 = 0;
2666 if (TREE_OPERAND (base, 0) != ref2)
2668 gimple *def = SSA_NAME_DEF_STMT (ref2);
2669 if (is_gimple_assign (def)
2670 && gimple_assign_rhs_code (def) == POINTER_PLUS_EXPR
2671 && gimple_assign_rhs1 (def) == TREE_OPERAND (base, 0)
2672 && poly_int_tree_p (gimple_assign_rhs2 (def)))
2674 tree rhs2 = gimple_assign_rhs2 (def);
2675 if (!(poly_offset_int::from (wi::to_poly_wide (rhs2),
2676 SIGNED)
2677 << LOG2_BITS_PER_UNIT).to_shwi (&offset2))
2678 return (void *)-1;
2679 ref2 = gimple_assign_rhs1 (def);
2680 if (TREE_CODE (ref2) == SSA_NAME)
2681 ref2 = SSA_VAL (ref2);
2683 else
2684 return (void *)-1;
2687 else
2688 return (void *)-1;
2689 tree len = gimple_call_arg (def_stmt, 2);
2690 HOST_WIDE_INT leni, offset2i;
2691 if (TREE_CODE (len) == SSA_NAME)
2692 len = SSA_VAL (len);
2693 /* Sometimes the above trickery is smarter than alias analysis. Take
2694 advantage of that. */
2695 if (!ranges_maybe_overlap_p (offset, maxsize, offset2,
2696 (wi::to_poly_offset (len)
2697 << LOG2_BITS_PER_UNIT)))
2698 return NULL;
2699 if (data->partial_defs.is_empty ()
2700 && known_subrange_p (offset, maxsize, offset2,
2701 wi::to_poly_offset (len) << LOG2_BITS_PER_UNIT))
2703 tree val;
2704 if (integer_zerop (gimple_call_arg (def_stmt, 1)))
2705 val = build_zero_cst (vr->type);
2706 else if (INTEGRAL_TYPE_P (vr->type)
2707 && known_eq (ref->size, 8)
2708 && offseti % BITS_PER_UNIT == 0)
2710 gimple_match_op res_op (gimple_match_cond::UNCOND, NOP_EXPR,
2711 vr->type, gimple_call_arg (def_stmt, 1));
2712 val = vn_nary_build_or_lookup (&res_op);
2713 if (!val
2714 || (TREE_CODE (val) == SSA_NAME
2715 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2716 return (void *)-1;
2718 else
2720 unsigned buflen = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (vr->type)) + 1;
2721 if (INTEGRAL_TYPE_P (vr->type))
2722 buflen = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (vr->type)) + 1;
2723 unsigned char *buf = XALLOCAVEC (unsigned char, buflen);
2724 memset (buf, TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 1)),
2725 buflen);
2726 if (BYTES_BIG_ENDIAN)
2728 unsigned int amnt
2729 = (((unsigned HOST_WIDE_INT) offseti + sizei)
2730 % BITS_PER_UNIT);
2731 if (amnt)
2733 shift_bytes_in_array_right (buf, buflen,
2734 BITS_PER_UNIT - amnt);
2735 buf++;
2736 buflen--;
2739 else if (offseti % BITS_PER_UNIT != 0)
2741 unsigned int amnt
2742 = BITS_PER_UNIT - ((unsigned HOST_WIDE_INT) offseti
2743 % BITS_PER_UNIT);
2744 shift_bytes_in_array_left (buf, buflen, amnt);
2745 buf++;
2746 buflen--;
2748 val = native_interpret_expr (vr->type, buf, buflen);
2749 if (!val)
2750 return (void *)-1;
2752 return data->finish (0, 0, val);
2754 /* For now handle clearing memory with partial defs. */
2755 else if (known_eq (ref->size, maxsize)
2756 && integer_zerop (gimple_call_arg (def_stmt, 1))
2757 && tree_fits_poly_int64_p (len)
2758 && tree_to_poly_int64 (len).is_constant (&leni)
2759 && leni <= INTTYPE_MAXIMUM (HOST_WIDE_INT) / BITS_PER_UNIT
2760 && offset.is_constant (&offseti)
2761 && offset2.is_constant (&offset2i)
2762 && maxsize.is_constant (&maxsizei)
2763 && ranges_known_overlap_p (offseti, maxsizei, offset2i,
2764 leni << LOG2_BITS_PER_UNIT))
2766 pd_data pd;
2767 pd.rhs = build_constructor (NULL_TREE, NULL);
2768 pd.offset = offset2i;
2769 pd.size = leni << LOG2_BITS_PER_UNIT;
2770 return data->push_partial_def (pd, 0, 0, offseti, maxsizei);
2774 /* 2) Assignment from an empty CONSTRUCTOR. */
2775 else if (is_gimple_reg_type (vr->type)
2776 && gimple_assign_single_p (def_stmt)
2777 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
2778 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
2780 tree base2;
2781 poly_int64 offset2, size2, maxsize2;
2782 HOST_WIDE_INT offset2i, size2i;
2783 gcc_assert (lhs_ref_ok);
2784 base2 = ao_ref_base (&lhs_ref);
2785 offset2 = lhs_ref.offset;
2786 size2 = lhs_ref.size;
2787 maxsize2 = lhs_ref.max_size;
2788 if (known_size_p (maxsize2)
2789 && known_eq (maxsize2, size2)
2790 && adjust_offsets_for_equal_base_address (base, &offset,
2791 base2, &offset2))
2793 if (data->partial_defs.is_empty ()
2794 && known_subrange_p (offset, maxsize, offset2, size2))
2796 /* While technically undefined behavior do not optimize
2797 a full read from a clobber. */
2798 if (gimple_clobber_p (def_stmt))
2799 return (void *)-1;
2800 tree val = build_zero_cst (vr->type);
2801 return data->finish (ao_ref_alias_set (&lhs_ref),
2802 ao_ref_base_alias_set (&lhs_ref), val);
2804 else if (known_eq (ref->size, maxsize)
2805 && maxsize.is_constant (&maxsizei)
2806 && offset.is_constant (&offseti)
2807 && offset2.is_constant (&offset2i)
2808 && size2.is_constant (&size2i)
2809 && ranges_known_overlap_p (offseti, maxsizei,
2810 offset2i, size2i))
2812 /* Let clobbers be consumed by the partial-def tracker
2813 which can choose to ignore them if they are shadowed
2814 by a later def. */
2815 pd_data pd;
2816 pd.rhs = gimple_assign_rhs1 (def_stmt);
2817 pd.offset = offset2i;
2818 pd.size = size2i;
2819 return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
2820 ao_ref_base_alias_set (&lhs_ref),
2821 offseti, maxsizei);
2826 /* 3) Assignment from a constant. We can use folds native encode/interpret
2827 routines to extract the assigned bits. */
2828 else if (known_eq (ref->size, maxsize)
2829 && is_gimple_reg_type (vr->type)
2830 && !contains_storage_order_barrier_p (vr->operands)
2831 && gimple_assign_single_p (def_stmt)
2832 && CHAR_BIT == 8
2833 && BITS_PER_UNIT == 8
2834 && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
2835 /* native_encode and native_decode operate on arrays of bytes
2836 and so fundamentally need a compile-time size and offset. */
2837 && maxsize.is_constant (&maxsizei)
2838 && offset.is_constant (&offseti)
2839 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
2840 || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
2841 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
2843 tree lhs = gimple_assign_lhs (def_stmt);
2844 tree base2;
2845 poly_int64 offset2, size2, maxsize2;
2846 HOST_WIDE_INT offset2i, size2i;
2847 bool reverse;
2848 gcc_assert (lhs_ref_ok);
2849 base2 = ao_ref_base (&lhs_ref);
2850 offset2 = lhs_ref.offset;
2851 size2 = lhs_ref.size;
2852 maxsize2 = lhs_ref.max_size;
2853 reverse = reverse_storage_order_for_component_p (lhs);
2854 if (base2
2855 && !reverse
2856 && !storage_order_barrier_p (lhs)
2857 && known_eq (maxsize2, size2)
2858 && adjust_offsets_for_equal_base_address (base, &offset,
2859 base2, &offset2)
2860 && offset.is_constant (&offseti)
2861 && offset2.is_constant (&offset2i)
2862 && size2.is_constant (&size2i))
2864 if (data->partial_defs.is_empty ()
2865 && known_subrange_p (offseti, maxsizei, offset2, size2))
2867 /* We support up to 512-bit values (for V8DFmode). */
2868 unsigned char buffer[65];
2869 int len;
2871 tree rhs = gimple_assign_rhs1 (def_stmt);
2872 if (TREE_CODE (rhs) == SSA_NAME)
2873 rhs = SSA_VAL (rhs);
2874 len = native_encode_expr (rhs,
2875 buffer, sizeof (buffer) - 1,
2876 (offseti - offset2i) / BITS_PER_UNIT);
2877 if (len > 0 && len * BITS_PER_UNIT >= maxsizei)
2879 tree type = vr->type;
2880 unsigned char *buf = buffer;
2881 unsigned int amnt = 0;
2882 /* Make sure to interpret in a type that has a range
2883 covering the whole access size. */
2884 if (INTEGRAL_TYPE_P (vr->type)
2885 && maxsizei != TYPE_PRECISION (vr->type))
2886 type = build_nonstandard_integer_type (maxsizei,
2887 TYPE_UNSIGNED (type));
2888 if (BYTES_BIG_ENDIAN)
2890 /* For big-endian native_encode_expr stored the rhs
2891 such that the LSB of it is the LSB of buffer[len - 1].
2892 That bit is stored into memory at position
2893 offset2 + size2 - 1, i.e. in byte
2894 base + (offset2 + size2 - 1) / BITS_PER_UNIT.
2895 E.g. for offset2 1 and size2 14, rhs -1 and memory
2896 previously cleared that is:
2898 01111111|11111110
2899 Now, if we want to extract offset 2 and size 12 from
2900 it using native_interpret_expr (which actually works
2901 for integral bitfield types in terms of byte size of
2902 the mode), the native_encode_expr stored the value
2903 into buffer as
2904 XX111111|11111111
2905 and returned len 2 (the X bits are outside of
2906 precision).
2907 Let sz be maxsize / BITS_PER_UNIT if not extracting
2908 a bitfield, and GET_MODE_SIZE otherwise.
2909 We need to align the LSB of the value we want to
2910 extract as the LSB of buf[sz - 1].
2911 The LSB from memory we need to read is at position
2912 offset + maxsize - 1. */
2913 HOST_WIDE_INT sz = maxsizei / BITS_PER_UNIT;
2914 if (INTEGRAL_TYPE_P (type))
2915 sz = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
2916 amnt = ((unsigned HOST_WIDE_INT) offset2i + size2i
2917 - offseti - maxsizei) % BITS_PER_UNIT;
2918 if (amnt)
2919 shift_bytes_in_array_right (buffer, len, amnt);
2920 amnt = ((unsigned HOST_WIDE_INT) offset2i + size2i
2921 - offseti - maxsizei - amnt) / BITS_PER_UNIT;
2922 if ((unsigned HOST_WIDE_INT) sz + amnt > (unsigned) len)
2923 len = 0;
2924 else
2926 buf = buffer + len - sz - amnt;
2927 len -= (buf - buffer);
2930 else
2932 amnt = ((unsigned HOST_WIDE_INT) offset2i
2933 - offseti) % BITS_PER_UNIT;
2934 if (amnt)
2936 buffer[len] = 0;
2937 shift_bytes_in_array_left (buffer, len + 1, amnt);
2938 buf = buffer + 1;
2941 tree val = native_interpret_expr (type, buf, len);
2942 /* If we chop off bits because the types precision doesn't
2943 match the memory access size this is ok when optimizing
2944 reads but not when called from the DSE code during
2945 elimination. */
2946 if (val
2947 && type != vr->type)
2949 if (! int_fits_type_p (val, vr->type))
2950 val = NULL_TREE;
2951 else
2952 val = fold_convert (vr->type, val);
2955 if (val)
2956 return data->finish (ao_ref_alias_set (&lhs_ref),
2957 ao_ref_base_alias_set (&lhs_ref), val);
2960 else if (ranges_known_overlap_p (offseti, maxsizei, offset2i,
2961 size2i))
2963 pd_data pd;
2964 tree rhs = gimple_assign_rhs1 (def_stmt);
2965 if (TREE_CODE (rhs) == SSA_NAME)
2966 rhs = SSA_VAL (rhs);
2967 pd.rhs = rhs;
2968 pd.offset = offset2i;
2969 pd.size = size2i;
2970 return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
2971 ao_ref_base_alias_set (&lhs_ref),
2972 offseti, maxsizei);
2977 /* 4) Assignment from an SSA name which definition we may be able
2978 to access pieces from or we can combine to a larger entity. */
2979 else if (known_eq (ref->size, maxsize)
2980 && is_gimple_reg_type (vr->type)
2981 && !contains_storage_order_barrier_p (vr->operands)
2982 && gimple_assign_single_p (def_stmt)
2983 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
2985 tree lhs = gimple_assign_lhs (def_stmt);
2986 tree base2;
2987 poly_int64 offset2, size2, maxsize2;
2988 HOST_WIDE_INT offset2i, size2i, offseti;
2989 bool reverse;
2990 gcc_assert (lhs_ref_ok);
2991 base2 = ao_ref_base (&lhs_ref);
2992 offset2 = lhs_ref.offset;
2993 size2 = lhs_ref.size;
2994 maxsize2 = lhs_ref.max_size;
2995 reverse = reverse_storage_order_for_component_p (lhs);
2996 tree def_rhs = gimple_assign_rhs1 (def_stmt);
2997 if (!reverse
2998 && !storage_order_barrier_p (lhs)
2999 && known_size_p (maxsize2)
3000 && known_eq (maxsize2, size2)
3001 && adjust_offsets_for_equal_base_address (base, &offset,
3002 base2, &offset2))
3004 if (data->partial_defs.is_empty ()
3005 && known_subrange_p (offset, maxsize, offset2, size2)
3006 /* ??? We can't handle bitfield precision extracts without
3007 either using an alternate type for the BIT_FIELD_REF and
3008 then doing a conversion or possibly adjusting the offset
3009 according to endianness. */
3010 && (! INTEGRAL_TYPE_P (vr->type)
3011 || known_eq (ref->size, TYPE_PRECISION (vr->type)))
3012 && multiple_p (ref->size, BITS_PER_UNIT))
3014 tree val = NULL_TREE;
3015 if (! INTEGRAL_TYPE_P (TREE_TYPE (def_rhs))
3016 || type_has_mode_precision_p (TREE_TYPE (def_rhs)))
3018 gimple_match_op op (gimple_match_cond::UNCOND,
3019 BIT_FIELD_REF, vr->type,
3020 SSA_VAL (def_rhs),
3021 bitsize_int (ref->size),
3022 bitsize_int (offset - offset2));
3023 val = vn_nary_build_or_lookup (&op);
3025 else if (known_eq (ref->size, size2))
3027 gimple_match_op op (gimple_match_cond::UNCOND,
3028 VIEW_CONVERT_EXPR, vr->type,
3029 SSA_VAL (def_rhs));
3030 val = vn_nary_build_or_lookup (&op);
3032 if (val
3033 && (TREE_CODE (val) != SSA_NAME
3034 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
3035 return data->finish (ao_ref_alias_set (&lhs_ref),
3036 ao_ref_base_alias_set (&lhs_ref), val);
3038 else if (maxsize.is_constant (&maxsizei)
3039 && offset.is_constant (&offseti)
3040 && offset2.is_constant (&offset2i)
3041 && size2.is_constant (&size2i)
3042 && ranges_known_overlap_p (offset, maxsize, offset2, size2))
3044 pd_data pd;
3045 pd.rhs = SSA_VAL (def_rhs);
3046 pd.offset = offset2i;
3047 pd.size = size2i;
3048 return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
3049 ao_ref_base_alias_set (&lhs_ref),
3050 offseti, maxsizei);
3055 /* 5) For aggregate copies translate the reference through them if
3056 the copy kills ref. */
3057 else if (data->vn_walk_kind == VN_WALKREWRITE
3058 && gimple_assign_single_p (def_stmt)
3059 && (DECL_P (gimple_assign_rhs1 (def_stmt))
3060 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
3061 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
3063 tree base2;
3064 int i, j, k;
3065 auto_vec<vn_reference_op_s> rhs;
3066 vn_reference_op_t vro;
3067 ao_ref r;
3069 gcc_assert (lhs_ref_ok);
3071 /* See if the assignment kills REF. */
3072 base2 = ao_ref_base (&lhs_ref);
3073 if (!lhs_ref.max_size_known_p ()
3074 || (base != base2
3075 && (TREE_CODE (base) != MEM_REF
3076 || TREE_CODE (base2) != MEM_REF
3077 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
3078 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
3079 TREE_OPERAND (base2, 1))))
3080 || !stmt_kills_ref_p (def_stmt, ref))
3081 return (void *)-1;
3083 /* Find the common base of ref and the lhs. lhs_ops already
3084 contains valueized operands for the lhs. */
3085 i = vr->operands.length () - 1;
3086 j = lhs_ops.length () - 1;
3087 while (j >= 0 && i >= 0
3088 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
3090 i--;
3091 j--;
3094 /* ??? The innermost op should always be a MEM_REF and we already
3095 checked that the assignment to the lhs kills vr. Thus for
3096 aggregate copies using char[] types the vn_reference_op_eq
3097 may fail when comparing types for compatibility. But we really
3098 don't care here - further lookups with the rewritten operands
3099 will simply fail if we messed up types too badly. */
3100 poly_int64 extra_off = 0;
3101 if (j == 0 && i >= 0
3102 && lhs_ops[0].opcode == MEM_REF
3103 && maybe_ne (lhs_ops[0].off, -1))
3105 if (known_eq (lhs_ops[0].off, vr->operands[i].off))
3106 i--, j--;
3107 else if (vr->operands[i].opcode == MEM_REF
3108 && maybe_ne (vr->operands[i].off, -1))
3110 extra_off = vr->operands[i].off - lhs_ops[0].off;
3111 i--, j--;
3115 /* i now points to the first additional op.
3116 ??? LHS may not be completely contained in VR, one or more
3117 VIEW_CONVERT_EXPRs could be in its way. We could at least
3118 try handling outermost VIEW_CONVERT_EXPRs. */
3119 if (j != -1)
3120 return (void *)-1;
3122 /* Punt if the additional ops contain a storage order barrier. */
3123 for (k = i; k >= 0; k--)
3125 vro = &vr->operands[k];
3126 if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
3127 return (void *)-1;
3130 /* Now re-write REF to be based on the rhs of the assignment. */
3131 tree rhs1 = gimple_assign_rhs1 (def_stmt);
3132 copy_reference_ops_from_ref (rhs1, &rhs);
3134 /* Apply an extra offset to the inner MEM_REF of the RHS. */
3135 if (maybe_ne (extra_off, 0))
3137 if (rhs.length () < 2)
3138 return (void *)-1;
3139 int ix = rhs.length () - 2;
3140 if (rhs[ix].opcode != MEM_REF
3141 || known_eq (rhs[ix].off, -1))
3142 return (void *)-1;
3143 rhs[ix].off += extra_off;
3144 rhs[ix].op0 = int_const_binop (PLUS_EXPR, rhs[ix].op0,
3145 build_int_cst (TREE_TYPE (rhs[ix].op0),
3146 extra_off));
3149 /* Save the operands since we need to use the original ones for
3150 the hash entry we use. */
3151 if (!data->saved_operands.exists ())
3152 data->saved_operands = vr->operands.copy ();
3154 /* We need to pre-pend vr->operands[0..i] to rhs. */
3155 vec<vn_reference_op_s> old = vr->operands;
3156 if (i + 1 + rhs.length () > vr->operands.length ())
3157 vr->operands.safe_grow (i + 1 + rhs.length (), true);
3158 else
3159 vr->operands.truncate (i + 1 + rhs.length ());
3160 FOR_EACH_VEC_ELT (rhs, j, vro)
3161 vr->operands[i + 1 + j] = *vro;
3162 vr->operands = valueize_refs (vr->operands);
3163 if (old == shared_lookup_references)
3164 shared_lookup_references = vr->operands;
3165 vr->hashcode = vn_reference_compute_hash (vr);
3167 /* Try folding the new reference to a constant. */
3168 tree val = fully_constant_vn_reference_p (vr);
3169 if (val)
3171 if (data->partial_defs.is_empty ())
3172 return data->finish (ao_ref_alias_set (&lhs_ref),
3173 ao_ref_base_alias_set (&lhs_ref), val);
3174 /* This is the only interesting case for partial-def handling
3175 coming from targets that like to gimplify init-ctors as
3176 aggregate copies from constant data like aarch64 for
3177 PR83518. */
3178 if (maxsize.is_constant (&maxsizei) && known_eq (ref->size, maxsize))
3180 pd_data pd;
3181 pd.rhs = val;
3182 pd.offset = 0;
3183 pd.size = maxsizei;
3184 return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
3185 ao_ref_base_alias_set (&lhs_ref),
3186 0, maxsizei);
3190 /* Continuing with partial defs isn't easily possible here, we
3191 have to find a full def from further lookups from here. Probably
3192 not worth the special-casing everywhere. */
3193 if (!data->partial_defs.is_empty ())
3194 return (void *)-1;
3196 /* Adjust *ref from the new operands. */
3197 ao_ref rhs1_ref;
3198 ao_ref_init (&rhs1_ref, rhs1);
3199 if (!ao_ref_init_from_vn_reference (&r, ao_ref_alias_set (&rhs1_ref),
3200 ao_ref_base_alias_set (&rhs1_ref),
3201 vr->type, vr->operands))
3202 return (void *)-1;
3203 /* This can happen with bitfields. */
3204 if (maybe_ne (ref->size, r.size))
3205 return (void *)-1;
3206 *ref = r;
3208 /* Do not update last seen VUSE after translating. */
3209 data->last_vuse_ptr = NULL;
3210 /* Invalidate the original access path since it now contains
3211 the wrong base. */
3212 data->orig_ref.ref = NULL_TREE;
3213 /* Use the alias-set of this LHS for recording an eventual result. */
3214 if (data->first_set == -2)
3216 data->first_set = ao_ref_alias_set (&lhs_ref);
3217 data->first_base_set = ao_ref_base_alias_set (&lhs_ref);
3220 /* Keep looking for the adjusted *REF / VR pair. */
3221 return NULL;
3224 /* 6) For memcpy copies translate the reference through them if the copy
3225 kills ref. But we cannot (easily) do this translation if the memcpy is
3226 a storage order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that
3227 can modify the storage order of objects (see storage_order_barrier_p). */
3228 else if (data->vn_walk_kind == VN_WALKREWRITE
3229 && is_gimple_reg_type (vr->type)
3230 /* ??? Handle BCOPY as well. */
3231 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
3232 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY_CHK)
3233 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
3234 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY_CHK)
3235 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE)
3236 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE_CHK))
3237 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
3238 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
3239 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
3240 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
3241 && (poly_int_tree_p (gimple_call_arg (def_stmt, 2), &copy_size)
3242 || (TREE_CODE (gimple_call_arg (def_stmt, 2)) == SSA_NAME
3243 && poly_int_tree_p (SSA_VAL (gimple_call_arg (def_stmt, 2)),
3244 &copy_size)))
3245 /* Handling this is more complicated, give up for now. */
3246 && data->partial_defs.is_empty ())
3248 tree lhs, rhs;
3249 ao_ref r;
3250 poly_int64 rhs_offset, lhs_offset;
3251 vn_reference_op_s op;
3252 poly_uint64 mem_offset;
3253 poly_int64 at, byte_maxsize;
3255 /* Only handle non-variable, addressable refs. */
3256 if (maybe_ne (ref->size, maxsize)
3257 || !multiple_p (offset, BITS_PER_UNIT, &at)
3258 || !multiple_p (maxsize, BITS_PER_UNIT, &byte_maxsize))
3259 return (void *)-1;
3261 /* Extract a pointer base and an offset for the destination. */
3262 lhs = gimple_call_arg (def_stmt, 0);
3263 lhs_offset = 0;
3264 if (TREE_CODE (lhs) == SSA_NAME)
3266 lhs = vn_valueize (lhs);
3267 if (TREE_CODE (lhs) == SSA_NAME)
3269 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
3270 if (gimple_assign_single_p (def_stmt)
3271 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
3272 lhs = gimple_assign_rhs1 (def_stmt);
3275 if (TREE_CODE (lhs) == ADDR_EXPR)
3277 if (AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (lhs)))
3278 && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (TREE_TYPE (lhs))))
3279 return (void *)-1;
3280 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
3281 &lhs_offset);
3282 if (!tem)
3283 return (void *)-1;
3284 if (TREE_CODE (tem) == MEM_REF
3285 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
3287 lhs = TREE_OPERAND (tem, 0);
3288 if (TREE_CODE (lhs) == SSA_NAME)
3289 lhs = vn_valueize (lhs);
3290 lhs_offset += mem_offset;
3292 else if (DECL_P (tem))
3293 lhs = build_fold_addr_expr (tem);
3294 else
3295 return (void *)-1;
3297 if (TREE_CODE (lhs) != SSA_NAME
3298 && TREE_CODE (lhs) != ADDR_EXPR)
3299 return (void *)-1;
3301 /* Extract a pointer base and an offset for the source. */
3302 rhs = gimple_call_arg (def_stmt, 1);
3303 rhs_offset = 0;
3304 if (TREE_CODE (rhs) == SSA_NAME)
3305 rhs = vn_valueize (rhs);
3306 if (TREE_CODE (rhs) == ADDR_EXPR)
3308 if (AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (rhs)))
3309 && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (TREE_TYPE (rhs))))
3310 return (void *)-1;
3311 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
3312 &rhs_offset);
3313 if (!tem)
3314 return (void *)-1;
3315 if (TREE_CODE (tem) == MEM_REF
3316 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
3318 rhs = TREE_OPERAND (tem, 0);
3319 rhs_offset += mem_offset;
3321 else if (DECL_P (tem)
3322 || TREE_CODE (tem) == STRING_CST)
3323 rhs = build_fold_addr_expr (tem);
3324 else
3325 return (void *)-1;
3327 if (TREE_CODE (rhs) == SSA_NAME)
3328 rhs = SSA_VAL (rhs);
3329 else if (TREE_CODE (rhs) != ADDR_EXPR)
3330 return (void *)-1;
3332 /* The bases of the destination and the references have to agree. */
3333 if (TREE_CODE (base) == MEM_REF)
3335 if (TREE_OPERAND (base, 0) != lhs
3336 || !poly_int_tree_p (TREE_OPERAND (base, 1), &mem_offset))
3337 return (void *) -1;
3338 at += mem_offset;
3340 else if (!DECL_P (base)
3341 || TREE_CODE (lhs) != ADDR_EXPR
3342 || TREE_OPERAND (lhs, 0) != base)
3343 return (void *)-1;
3345 /* If the access is completely outside of the memcpy destination
3346 area there is no aliasing. */
3347 if (!ranges_maybe_overlap_p (lhs_offset, copy_size, at, byte_maxsize))
3348 return NULL;
3349 /* And the access has to be contained within the memcpy destination. */
3350 if (!known_subrange_p (at, byte_maxsize, lhs_offset, copy_size))
3351 return (void *)-1;
3353 /* Save the operands since we need to use the original ones for
3354 the hash entry we use. */
3355 if (!data->saved_operands.exists ())
3356 data->saved_operands = vr->operands.copy ();
3358 /* Make room for 2 operands in the new reference. */
3359 if (vr->operands.length () < 2)
3361 vec<vn_reference_op_s> old = vr->operands;
3362 vr->operands.safe_grow_cleared (2, true);
3363 if (old == shared_lookup_references)
3364 shared_lookup_references = vr->operands;
3366 else
3367 vr->operands.truncate (2);
3369 /* The looked-through reference is a simple MEM_REF. */
3370 memset (&op, 0, sizeof (op));
3371 op.type = vr->type;
3372 op.opcode = MEM_REF;
3373 op.op0 = build_int_cst (ptr_type_node, at - lhs_offset + rhs_offset);
3374 op.off = at - lhs_offset + rhs_offset;
3375 vr->operands[0] = op;
3376 op.type = TREE_TYPE (rhs);
3377 op.opcode = TREE_CODE (rhs);
3378 op.op0 = rhs;
3379 op.off = -1;
3380 vr->operands[1] = op;
3381 vr->hashcode = vn_reference_compute_hash (vr);
3383 /* Try folding the new reference to a constant. */
3384 tree val = fully_constant_vn_reference_p (vr);
3385 if (val)
3386 return data->finish (0, 0, val);
3388 /* Adjust *ref from the new operands. */
3389 if (!ao_ref_init_from_vn_reference (&r, 0, 0, vr->type, vr->operands))
3390 return (void *)-1;
3391 /* This can happen with bitfields. */
3392 if (maybe_ne (ref->size, r.size))
3393 return (void *)-1;
3394 *ref = r;
3396 /* Do not update last seen VUSE after translating. */
3397 data->last_vuse_ptr = NULL;
3398 /* Invalidate the original access path since it now contains
3399 the wrong base. */
3400 data->orig_ref.ref = NULL_TREE;
3401 /* Use the alias-set of this stmt for recording an eventual result. */
3402 if (data->first_set == -2)
3404 data->first_set = 0;
3405 data->first_base_set = 0;
3408 /* Keep looking for the adjusted *REF / VR pair. */
3409 return NULL;
3412 /* Bail out and stop walking. */
3413 return (void *)-1;
3416 /* Return a reference op vector from OP that can be used for
3417 vn_reference_lookup_pieces. The caller is responsible for releasing
3418 the vector. */
3420 vec<vn_reference_op_s>
3421 vn_reference_operands_for_lookup (tree op)
3423 bool valueized;
3424 return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
3427 /* Lookup a reference operation by it's parts, in the current hash table.
3428 Returns the resulting value number if it exists in the hash table,
3429 NULL_TREE otherwise. VNRESULT will be filled in with the actual
3430 vn_reference_t stored in the hashtable if something is found. */
3432 tree
3433 vn_reference_lookup_pieces (tree vuse, alias_set_type set,
3434 alias_set_type base_set, tree type,
3435 vec<vn_reference_op_s> operands,
3436 vn_reference_t *vnresult, vn_lookup_kind kind)
3438 struct vn_reference_s vr1;
3439 vn_reference_t tmp;
3440 tree cst;
3442 if (!vnresult)
3443 vnresult = &tmp;
3444 *vnresult = NULL;
3446 vr1.vuse = vuse_ssa_val (vuse);
3447 shared_lookup_references.truncate (0);
3448 shared_lookup_references.safe_grow (operands.length (), true);
3449 memcpy (shared_lookup_references.address (),
3450 operands.address (),
3451 sizeof (vn_reference_op_s)
3452 * operands.length ());
3453 vr1.operands = operands = shared_lookup_references
3454 = valueize_refs (shared_lookup_references);
3455 vr1.type = type;
3456 vr1.set = set;
3457 vr1.base_set = base_set;
3458 vr1.hashcode = vn_reference_compute_hash (&vr1);
3459 if ((cst = fully_constant_vn_reference_p (&vr1)))
3460 return cst;
3462 vn_reference_lookup_1 (&vr1, vnresult);
3463 if (!*vnresult
3464 && kind != VN_NOWALK
3465 && vr1.vuse)
3467 ao_ref r;
3468 unsigned limit = param_sccvn_max_alias_queries_per_access;
3469 vn_walk_cb_data data (&vr1, NULL_TREE, NULL, kind, true, NULL_TREE);
3470 if (ao_ref_init_from_vn_reference (&r, set, base_set, type,
3471 vr1.operands))
3472 *vnresult
3473 = ((vn_reference_t)
3474 walk_non_aliased_vuses (&r, vr1.vuse, true, vn_reference_lookup_2,
3475 vn_reference_lookup_3, vuse_valueize,
3476 limit, &data));
3477 gcc_checking_assert (vr1.operands == shared_lookup_references);
3480 if (*vnresult)
3481 return (*vnresult)->result;
3483 return NULL_TREE;
3486 /* Lookup OP in the current hash table, and return the resulting value
3487 number if it exists in the hash table. Return NULL_TREE if it does
3488 not exist in the hash table or if the result field of the structure
3489 was NULL.. VNRESULT will be filled in with the vn_reference_t
3490 stored in the hashtable if one exists. When TBAA_P is false assume
3491 we are looking up a store and treat it as having alias-set zero.
3492 *LAST_VUSE_PTR will be updated with the VUSE the value lookup succeeded.
3493 MASK is either NULL_TREE, or can be an INTEGER_CST if the result of the
3494 load is bitwise anded with MASK and so we are only interested in a subset
3495 of the bits and can ignore if the other bits are uninitialized or
3496 not initialized with constants. */
3498 tree
3499 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
3500 vn_reference_t *vnresult, bool tbaa_p,
3501 tree *last_vuse_ptr, tree mask)
3503 vec<vn_reference_op_s> operands;
3504 struct vn_reference_s vr1;
3505 bool valuezied_anything;
3507 if (vnresult)
3508 *vnresult = NULL;
3510 vr1.vuse = vuse_ssa_val (vuse);
3511 vr1.operands = operands
3512 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
3513 vr1.type = TREE_TYPE (op);
3514 ao_ref op_ref;
3515 ao_ref_init (&op_ref, op);
3516 vr1.set = ao_ref_alias_set (&op_ref);
3517 vr1.base_set = ao_ref_base_alias_set (&op_ref);
3518 vr1.hashcode = vn_reference_compute_hash (&vr1);
3519 if (mask == NULL_TREE)
3520 if (tree cst = fully_constant_vn_reference_p (&vr1))
3521 return cst;
3523 if (kind != VN_NOWALK && vr1.vuse)
3525 vn_reference_t wvnresult;
3526 ao_ref r;
3527 unsigned limit = param_sccvn_max_alias_queries_per_access;
3528 /* Make sure to use a valueized reference if we valueized anything.
3529 Otherwise preserve the full reference for advanced TBAA. */
3530 if (!valuezied_anything
3531 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.base_set,
3532 vr1.type, vr1.operands))
3533 ao_ref_init (&r, op);
3534 vn_walk_cb_data data (&vr1, r.ref ? NULL_TREE : op,
3535 last_vuse_ptr, kind, tbaa_p, mask);
3537 wvnresult
3538 = ((vn_reference_t)
3539 walk_non_aliased_vuses (&r, vr1.vuse, tbaa_p, vn_reference_lookup_2,
3540 vn_reference_lookup_3, vuse_valueize, limit,
3541 &data));
3542 gcc_checking_assert (vr1.operands == shared_lookup_references);
3543 if (wvnresult)
3545 gcc_assert (mask == NULL_TREE);
3546 if (vnresult)
3547 *vnresult = wvnresult;
3548 return wvnresult->result;
3550 else if (mask)
3551 return data.masked_result;
3553 return NULL_TREE;
3556 if (last_vuse_ptr)
3557 *last_vuse_ptr = vr1.vuse;
3558 if (mask)
3559 return NULL_TREE;
3560 return vn_reference_lookup_1 (&vr1, vnresult);
3563 /* Lookup CALL in the current hash table and return the entry in
3564 *VNRESULT if found. Populates *VR for the hashtable lookup. */
3566 void
3567 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
3568 vn_reference_t vr)
3570 if (vnresult)
3571 *vnresult = NULL;
3573 tree vuse = gimple_vuse (call);
3575 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
3576 vr->operands = valueize_shared_reference_ops_from_call (call);
3577 vr->type = gimple_expr_type (call);
3578 vr->punned = false;
3579 vr->set = 0;
3580 vr->base_set = 0;
3581 vr->hashcode = vn_reference_compute_hash (vr);
3582 vn_reference_lookup_1 (vr, vnresult);
3585 /* Insert OP into the current hash table with a value number of RESULT. */
3587 static void
3588 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
3590 vn_reference_s **slot;
3591 vn_reference_t vr1;
3592 bool tem;
3594 vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
3595 if (TREE_CODE (result) == SSA_NAME)
3596 vr1->value_id = VN_INFO (result)->value_id;
3597 else
3598 vr1->value_id = get_or_alloc_constant_value_id (result);
3599 vr1->vuse = vuse_ssa_val (vuse);
3600 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
3601 vr1->type = TREE_TYPE (op);
3602 vr1->punned = false;
3603 ao_ref op_ref;
3604 ao_ref_init (&op_ref, op);
3605 vr1->set = ao_ref_alias_set (&op_ref);
3606 vr1->base_set = ao_ref_base_alias_set (&op_ref);
3607 vr1->hashcode = vn_reference_compute_hash (vr1);
3608 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
3609 vr1->result_vdef = vdef;
3611 slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
3612 INSERT);
3614 /* Because IL walking on reference lookup can end up visiting
3615 a def that is only to be visited later in iteration order
3616 when we are about to make an irreducible region reducible
3617 the def can be effectively processed and its ref being inserted
3618 by vn_reference_lookup_3 already. So we cannot assert (!*slot)
3619 but save a lookup if we deal with already inserted refs here. */
3620 if (*slot)
3622 /* We cannot assert that we have the same value either because
3623 when disentangling an irreducible region we may end up visiting
3624 a use before the corresponding def. That's a missed optimization
3625 only though. See gcc.dg/tree-ssa/pr87126.c for example. */
3626 if (dump_file && (dump_flags & TDF_DETAILS)
3627 && !operand_equal_p ((*slot)->result, vr1->result, 0))
3629 fprintf (dump_file, "Keeping old value ");
3630 print_generic_expr (dump_file, (*slot)->result);
3631 fprintf (dump_file, " because of collision\n");
3633 free_reference (vr1);
3634 obstack_free (&vn_tables_obstack, vr1);
3635 return;
3638 *slot = vr1;
3639 vr1->next = last_inserted_ref;
3640 last_inserted_ref = vr1;
3643 /* Insert a reference by it's pieces into the current hash table with
3644 a value number of RESULT. Return the resulting reference
3645 structure we created. */
3647 vn_reference_t
3648 vn_reference_insert_pieces (tree vuse, alias_set_type set,
3649 alias_set_type base_set, tree type,
3650 vec<vn_reference_op_s> operands,
3651 tree result, unsigned int value_id)
3654 vn_reference_s **slot;
3655 vn_reference_t vr1;
3657 vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
3658 vr1->value_id = value_id;
3659 vr1->vuse = vuse_ssa_val (vuse);
3660 vr1->operands = valueize_refs (operands);
3661 vr1->type = type;
3662 vr1->punned = false;
3663 vr1->set = set;
3664 vr1->base_set = base_set;
3665 vr1->hashcode = vn_reference_compute_hash (vr1);
3666 if (result && TREE_CODE (result) == SSA_NAME)
3667 result = SSA_VAL (result);
3668 vr1->result = result;
3670 slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
3671 INSERT);
3673 /* At this point we should have all the things inserted that we have
3674 seen before, and we should never try inserting something that
3675 already exists. */
3676 gcc_assert (!*slot);
3678 *slot = vr1;
3679 vr1->next = last_inserted_ref;
3680 last_inserted_ref = vr1;
3681 return vr1;
3684 /* Compute and return the hash value for nary operation VBO1. */
3686 static hashval_t
3687 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
3689 inchash::hash hstate;
3690 unsigned i;
3692 for (i = 0; i < vno1->length; ++i)
3693 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
3694 vno1->op[i] = SSA_VAL (vno1->op[i]);
3696 if (((vno1->length == 2
3697 && commutative_tree_code (vno1->opcode))
3698 || (vno1->length == 3
3699 && commutative_ternary_tree_code (vno1->opcode)))
3700 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
3701 std::swap (vno1->op[0], vno1->op[1]);
3702 else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
3703 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
3705 std::swap (vno1->op[0], vno1->op[1]);
3706 vno1->opcode = swap_tree_comparison (vno1->opcode);
3709 hstate.add_int (vno1->opcode);
3710 for (i = 0; i < vno1->length; ++i)
3711 inchash::add_expr (vno1->op[i], hstate);
3713 return hstate.end ();
3716 /* Compare nary operations VNO1 and VNO2 and return true if they are
3717 equivalent. */
3719 bool
3720 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
3722 unsigned i;
3724 if (vno1->hashcode != vno2->hashcode)
3725 return false;
3727 if (vno1->length != vno2->length)
3728 return false;
3730 if (vno1->opcode != vno2->opcode
3731 || !types_compatible_p (vno1->type, vno2->type))
3732 return false;
3734 for (i = 0; i < vno1->length; ++i)
3735 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
3736 return false;
3738 /* BIT_INSERT_EXPR has an implict operand as the type precision
3739 of op1. Need to check to make sure they are the same. */
3740 if (vno1->opcode == BIT_INSERT_EXPR
3741 && TREE_CODE (vno1->op[1]) == INTEGER_CST
3742 && TYPE_PRECISION (TREE_TYPE (vno1->op[1]))
3743 != TYPE_PRECISION (TREE_TYPE (vno2->op[1])))
3744 return false;
3746 return true;
3749 /* Initialize VNO from the pieces provided. */
3751 static void
3752 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
3753 enum tree_code code, tree type, tree *ops)
3755 vno->opcode = code;
3756 vno->length = length;
3757 vno->type = type;
3758 memcpy (&vno->op[0], ops, sizeof (tree) * length);
3761 /* Return the number of operands for a vn_nary ops structure from STMT. */
3763 static unsigned int
3764 vn_nary_length_from_stmt (gimple *stmt)
3766 switch (gimple_assign_rhs_code (stmt))
3768 case REALPART_EXPR:
3769 case IMAGPART_EXPR:
3770 case VIEW_CONVERT_EXPR:
3771 return 1;
3773 case BIT_FIELD_REF:
3774 return 3;
3776 case CONSTRUCTOR:
3777 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
3779 default:
3780 return gimple_num_ops (stmt) - 1;
3784 /* Initialize VNO from STMT. */
3786 static void
3787 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
3789 unsigned i;
3791 vno->opcode = gimple_assign_rhs_code (stmt);
3792 vno->type = gimple_expr_type (stmt);
3793 switch (vno->opcode)
3795 case REALPART_EXPR:
3796 case IMAGPART_EXPR:
3797 case VIEW_CONVERT_EXPR:
3798 vno->length = 1;
3799 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
3800 break;
3802 case BIT_FIELD_REF:
3803 vno->length = 3;
3804 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
3805 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
3806 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
3807 break;
3809 case CONSTRUCTOR:
3810 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
3811 for (i = 0; i < vno->length; ++i)
3812 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
3813 break;
3815 default:
3816 gcc_checking_assert (!gimple_assign_single_p (stmt));
3817 vno->length = gimple_num_ops (stmt) - 1;
3818 for (i = 0; i < vno->length; ++i)
3819 vno->op[i] = gimple_op (stmt, i + 1);
3823 /* Compute the hashcode for VNO and look for it in the hash table;
3824 return the resulting value number if it exists in the hash table.
3825 Return NULL_TREE if it does not exist in the hash table or if the
3826 result field of the operation is NULL. VNRESULT will contain the
3827 vn_nary_op_t from the hashtable if it exists. */
3829 static tree
3830 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
3832 vn_nary_op_s **slot;
3834 if (vnresult)
3835 *vnresult = NULL;
3837 vno->hashcode = vn_nary_op_compute_hash (vno);
3838 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode, NO_INSERT);
3839 if (!slot)
3840 return NULL_TREE;
3841 if (vnresult)
3842 *vnresult = *slot;
3843 return (*slot)->predicated_values ? NULL_TREE : (*slot)->u.result;
3846 /* Lookup a n-ary operation by its pieces and return the resulting value
3847 number if it exists in the hash table. Return NULL_TREE if it does
3848 not exist in the hash table or if the result field of the operation
3849 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
3850 if it exists. */
3852 tree
3853 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
3854 tree type, tree *ops, vn_nary_op_t *vnresult)
3856 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
3857 sizeof_vn_nary_op (length));
3858 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
3859 return vn_nary_op_lookup_1 (vno1, vnresult);
3862 /* Lookup the rhs of STMT in the current hash table, and return the resulting
3863 value number if it exists in the hash table. Return NULL_TREE if
3864 it does not exist in the hash table. VNRESULT will contain the
3865 vn_nary_op_t from the hashtable if it exists. */
3867 tree
3868 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
3870 vn_nary_op_t vno1
3871 = XALLOCAVAR (struct vn_nary_op_s,
3872 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
3873 init_vn_nary_op_from_stmt (vno1, stmt);
3874 return vn_nary_op_lookup_1 (vno1, vnresult);
3877 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
3879 static vn_nary_op_t
3880 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
3882 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
3885 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
3886 obstack. */
3888 static vn_nary_op_t
3889 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
3891 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length, &vn_tables_obstack);
3893 vno1->value_id = value_id;
3894 vno1->length = length;
3895 vno1->predicated_values = 0;
3896 vno1->u.result = result;
3898 return vno1;
3901 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
3902 VNO->HASHCODE first. */
3904 static vn_nary_op_t
3905 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
3906 bool compute_hash)
3908 vn_nary_op_s **slot;
3910 if (compute_hash)
3912 vno->hashcode = vn_nary_op_compute_hash (vno);
3913 gcc_assert (! vno->predicated_values
3914 || (! vno->u.values->next
3915 && vno->u.values->n == 1));
3918 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
3919 vno->unwind_to = *slot;
3920 if (*slot)
3922 /* Prefer non-predicated values.
3923 ??? Only if those are constant, otherwise, with constant predicated
3924 value, turn them into predicated values with entry-block validity
3925 (??? but we always find the first valid result currently). */
3926 if ((*slot)->predicated_values
3927 && ! vno->predicated_values)
3929 /* ??? We cannot remove *slot from the unwind stack list.
3930 For the moment we deal with this by skipping not found
3931 entries but this isn't ideal ... */
3932 *slot = vno;
3933 /* ??? Maintain a stack of states we can unwind in
3934 vn_nary_op_s? But how far do we unwind? In reality
3935 we need to push change records somewhere... Or not
3936 unwind vn_nary_op_s and linking them but instead
3937 unwind the results "list", linking that, which also
3938 doesn't move on hashtable resize. */
3939 /* We can also have a ->unwind_to recording *slot there.
3940 That way we can make u.values a fixed size array with
3941 recording the number of entries but of course we then
3942 have always N copies for each unwind_to-state. Or we
3943 make sure to only ever append and each unwinding will
3944 pop off one entry (but how to deal with predicated
3945 replaced with non-predicated here?) */
3946 vno->next = last_inserted_nary;
3947 last_inserted_nary = vno;
3948 return vno;
3950 else if (vno->predicated_values
3951 && ! (*slot)->predicated_values)
3952 return *slot;
3953 else if (vno->predicated_values
3954 && (*slot)->predicated_values)
3956 /* ??? Factor this all into a insert_single_predicated_value
3957 routine. */
3958 gcc_assert (!vno->u.values->next && vno->u.values->n == 1);
3959 basic_block vno_bb
3960 = BASIC_BLOCK_FOR_FN (cfun, vno->u.values->valid_dominated_by_p[0]);
3961 vn_pval *nval = vno->u.values;
3962 vn_pval **next = &vno->u.values;
3963 bool found = false;
3964 for (vn_pval *val = (*slot)->u.values; val; val = val->next)
3966 if (expressions_equal_p (val->result, vno->u.values->result))
3968 found = true;
3969 for (unsigned i = 0; i < val->n; ++i)
3971 basic_block val_bb
3972 = BASIC_BLOCK_FOR_FN (cfun,
3973 val->valid_dominated_by_p[i]);
3974 if (dominated_by_p (CDI_DOMINATORS, vno_bb, val_bb))
3975 /* Value registered with more generic predicate. */
3976 return *slot;
3977 else if (dominated_by_p (CDI_DOMINATORS, val_bb, vno_bb))
3978 /* Shouldn't happen, we insert in RPO order. */
3979 gcc_unreachable ();
3981 /* Append value. */
3982 *next = (vn_pval *) obstack_alloc (&vn_tables_obstack,
3983 sizeof (vn_pval)
3984 + val->n * sizeof (int));
3985 (*next)->next = NULL;
3986 (*next)->result = val->result;
3987 (*next)->n = val->n + 1;
3988 memcpy ((*next)->valid_dominated_by_p,
3989 val->valid_dominated_by_p,
3990 val->n * sizeof (int));
3991 (*next)->valid_dominated_by_p[val->n] = vno_bb->index;
3992 next = &(*next)->next;
3993 if (dump_file && (dump_flags & TDF_DETAILS))
3994 fprintf (dump_file, "Appending predicate to value.\n");
3995 continue;
3997 /* Copy other predicated values. */
3998 *next = (vn_pval *) obstack_alloc (&vn_tables_obstack,
3999 sizeof (vn_pval)
4000 + (val->n-1) * sizeof (int));
4001 memcpy (*next, val, sizeof (vn_pval) + (val->n-1) * sizeof (int));
4002 (*next)->next = NULL;
4003 next = &(*next)->next;
4005 if (!found)
4006 *next = nval;
4008 *slot = vno;
4009 vno->next = last_inserted_nary;
4010 last_inserted_nary = vno;
4011 return vno;
4014 /* While we do not want to insert things twice it's awkward to
4015 avoid it in the case where visit_nary_op pattern-matches stuff
4016 and ends up simplifying the replacement to itself. We then
4017 get two inserts, one from visit_nary_op and one from
4018 vn_nary_build_or_lookup.
4019 So allow inserts with the same value number. */
4020 if ((*slot)->u.result == vno->u.result)
4021 return *slot;
4024 /* ??? There's also optimistic vs. previous commited state merging
4025 that is problematic for the case of unwinding. */
4027 /* ??? We should return NULL if we do not use 'vno' and have the
4028 caller release it. */
4029 gcc_assert (!*slot);
4031 *slot = vno;
4032 vno->next = last_inserted_nary;
4033 last_inserted_nary = vno;
4034 return vno;
4037 /* Insert a n-ary operation into the current hash table using it's
4038 pieces. Return the vn_nary_op_t structure we created and put in
4039 the hashtable. */
4041 vn_nary_op_t
4042 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
4043 tree type, tree *ops,
4044 tree result, unsigned int value_id)
4046 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
4047 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
4048 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
4051 static vn_nary_op_t
4052 vn_nary_op_insert_pieces_predicated (unsigned int length, enum tree_code code,
4053 tree type, tree *ops,
4054 tree result, unsigned int value_id,
4055 edge pred_e)
4057 /* ??? Currently tracking BBs. */
4058 if (! single_pred_p (pred_e->dest))
4060 /* Never record for backedges. */
4061 if (pred_e->flags & EDGE_DFS_BACK)
4062 return NULL;
4063 edge_iterator ei;
4064 edge e;
4065 int cnt = 0;
4066 /* Ignore backedges. */
4067 FOR_EACH_EDGE (e, ei, pred_e->dest->preds)
4068 if (! dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
4069 cnt++;
4070 if (cnt != 1)
4071 return NULL;
4073 if (dump_file && (dump_flags & TDF_DETAILS)
4074 /* ??? Fix dumping, but currently we only get comparisons. */
4075 && TREE_CODE_CLASS (code) == tcc_comparison)
4077 fprintf (dump_file, "Recording on edge %d->%d ", pred_e->src->index,
4078 pred_e->dest->index);
4079 print_generic_expr (dump_file, ops[0], TDF_SLIM);
4080 fprintf (dump_file, " %s ", get_tree_code_name (code));
4081 print_generic_expr (dump_file, ops[1], TDF_SLIM);
4082 fprintf (dump_file, " == %s\n",
4083 integer_zerop (result) ? "false" : "true");
4085 vn_nary_op_t vno1 = alloc_vn_nary_op (length, NULL_TREE, value_id);
4086 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
4087 vno1->predicated_values = 1;
4088 vno1->u.values = (vn_pval *) obstack_alloc (&vn_tables_obstack,
4089 sizeof (vn_pval));
4090 vno1->u.values->next = NULL;
4091 vno1->u.values->result = result;
4092 vno1->u.values->n = 1;
4093 vno1->u.values->valid_dominated_by_p[0] = pred_e->dest->index;
4094 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
4097 static bool
4098 dominated_by_p_w_unex (basic_block bb1, basic_block bb2);
4100 static tree
4101 vn_nary_op_get_predicated_value (vn_nary_op_t vno, basic_block bb)
4103 if (! vno->predicated_values)
4104 return vno->u.result;
4105 for (vn_pval *val = vno->u.values; val; val = val->next)
4106 for (unsigned i = 0; i < val->n; ++i)
4107 if (dominated_by_p_w_unex (bb,
4108 BASIC_BLOCK_FOR_FN
4109 (cfun, val->valid_dominated_by_p[i])))
4110 return val->result;
4111 return NULL_TREE;
4114 /* Insert the rhs of STMT into the current hash table with a value number of
4115 RESULT. */
4117 static vn_nary_op_t
4118 vn_nary_op_insert_stmt (gimple *stmt, tree result)
4120 vn_nary_op_t vno1
4121 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
4122 result, VN_INFO (result)->value_id);
4123 init_vn_nary_op_from_stmt (vno1, stmt);
4124 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
4127 /* Compute a hashcode for PHI operation VP1 and return it. */
4129 static inline hashval_t
4130 vn_phi_compute_hash (vn_phi_t vp1)
4132 inchash::hash hstate;
4133 tree phi1op;
4134 tree type;
4135 edge e;
4136 edge_iterator ei;
4138 hstate.add_int (EDGE_COUNT (vp1->block->preds));
4139 switch (EDGE_COUNT (vp1->block->preds))
4141 case 1:
4142 break;
4143 case 2:
4144 if (vp1->block->loop_father->header == vp1->block)
4146 else
4147 break;
4148 /* Fallthru. */
4149 default:
4150 hstate.add_int (vp1->block->index);
4153 /* If all PHI arguments are constants we need to distinguish
4154 the PHI node via its type. */
4155 type = vp1->type;
4156 hstate.merge_hash (vn_hash_type (type));
4158 FOR_EACH_EDGE (e, ei, vp1->block->preds)
4160 /* Don't hash backedge values they need to be handled as VN_TOP
4161 for optimistic value-numbering. */
4162 if (e->flags & EDGE_DFS_BACK)
4163 continue;
4165 phi1op = vp1->phiargs[e->dest_idx];
4166 if (phi1op == VN_TOP)
4167 continue;
4168 inchash::add_expr (phi1op, hstate);
4171 return hstate.end ();
4175 /* Return true if COND1 and COND2 represent the same condition, set
4176 *INVERTED_P if one needs to be inverted to make it the same as
4177 the other. */
4179 static bool
4180 cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1,
4181 gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p)
4183 enum tree_code code1 = gimple_cond_code (cond1);
4184 enum tree_code code2 = gimple_cond_code (cond2);
4186 *inverted_p = false;
4187 if (code1 == code2)
4189 else if (code1 == swap_tree_comparison (code2))
4190 std::swap (lhs2, rhs2);
4191 else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
4192 *inverted_p = true;
4193 else if (code1 == invert_tree_comparison
4194 (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
4196 std::swap (lhs2, rhs2);
4197 *inverted_p = true;
4199 else
4200 return false;
4202 return ((expressions_equal_p (lhs1, lhs2)
4203 && expressions_equal_p (rhs1, rhs2))
4204 || (commutative_tree_code (code1)
4205 && expressions_equal_p (lhs1, rhs2)
4206 && expressions_equal_p (rhs1, lhs2)));
4209 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
4211 static int
4212 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
4214 if (vp1->hashcode != vp2->hashcode)
4215 return false;
4217 if (vp1->block != vp2->block)
4219 if (EDGE_COUNT (vp1->block->preds) != EDGE_COUNT (vp2->block->preds))
4220 return false;
4222 switch (EDGE_COUNT (vp1->block->preds))
4224 case 1:
4225 /* Single-arg PHIs are just copies. */
4226 break;
4228 case 2:
4230 /* Rule out backedges into the PHI. */
4231 if (vp1->block->loop_father->header == vp1->block
4232 || vp2->block->loop_father->header == vp2->block)
4233 return false;
4235 /* If the PHI nodes do not have compatible types
4236 they are not the same. */
4237 if (!types_compatible_p (vp1->type, vp2->type))
4238 return false;
4240 basic_block idom1
4241 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
4242 basic_block idom2
4243 = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
4244 /* If the immediate dominator end in switch stmts multiple
4245 values may end up in the same PHI arg via intermediate
4246 CFG merges. */
4247 if (EDGE_COUNT (idom1->succs) != 2
4248 || EDGE_COUNT (idom2->succs) != 2)
4249 return false;
4251 /* Verify the controlling stmt is the same. */
4252 gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1));
4253 gcond *last2 = safe_dyn_cast <gcond *> (last_stmt (idom2));
4254 if (! last1 || ! last2)
4255 return false;
4256 bool inverted_p;
4257 if (! cond_stmts_equal_p (last1, vp1->cclhs, vp1->ccrhs,
4258 last2, vp2->cclhs, vp2->ccrhs,
4259 &inverted_p))
4260 return false;
4262 /* Get at true/false controlled edges into the PHI. */
4263 edge te1, te2, fe1, fe2;
4264 if (! extract_true_false_controlled_edges (idom1, vp1->block,
4265 &te1, &fe1)
4266 || ! extract_true_false_controlled_edges (idom2, vp2->block,
4267 &te2, &fe2))
4268 return false;
4270 /* Swap edges if the second condition is the inverted of the
4271 first. */
4272 if (inverted_p)
4273 std::swap (te2, fe2);
4275 /* ??? Handle VN_TOP specially. */
4276 if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
4277 vp2->phiargs[te2->dest_idx])
4278 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
4279 vp2->phiargs[fe2->dest_idx]))
4280 return false;
4282 return true;
4285 default:
4286 return false;
4290 /* If the PHI nodes do not have compatible types
4291 they are not the same. */
4292 if (!types_compatible_p (vp1->type, vp2->type))
4293 return false;
4295 /* Any phi in the same block will have it's arguments in the
4296 same edge order, because of how we store phi nodes. */
4297 unsigned nargs = EDGE_COUNT (vp1->block->preds);
4298 for (unsigned i = 0; i < nargs; ++i)
4300 tree phi1op = vp1->phiargs[i];
4301 tree phi2op = vp2->phiargs[i];
4302 if (phi1op == phi2op)
4303 continue;
4304 if (!expressions_equal_p (phi1op, phi2op))
4305 return false;
4308 return true;
4311 /* Lookup PHI in the current hash table, and return the resulting
4312 value number if it exists in the hash table. Return NULL_TREE if
4313 it does not exist in the hash table. */
4315 static tree
4316 vn_phi_lookup (gimple *phi, bool backedges_varying_p)
4318 vn_phi_s **slot;
4319 struct vn_phi_s *vp1;
4320 edge e;
4321 edge_iterator ei;
4323 vp1 = XALLOCAVAR (struct vn_phi_s,
4324 sizeof (struct vn_phi_s)
4325 + (gimple_phi_num_args (phi) - 1) * sizeof (tree));
4327 /* Canonicalize the SSA_NAME's to their value number. */
4328 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
4330 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4331 if (TREE_CODE (def) == SSA_NAME
4332 && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)))
4333 def = SSA_VAL (def);
4334 vp1->phiargs[e->dest_idx] = def;
4336 vp1->type = TREE_TYPE (gimple_phi_result (phi));
4337 vp1->block = gimple_bb (phi);
4338 /* Extract values of the controlling condition. */
4339 vp1->cclhs = NULL_TREE;
4340 vp1->ccrhs = NULL_TREE;
4341 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
4342 if (EDGE_COUNT (idom1->succs) == 2)
4343 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
4345 /* ??? We want to use SSA_VAL here. But possibly not
4346 allow VN_TOP. */
4347 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
4348 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
4350 vp1->hashcode = vn_phi_compute_hash (vp1);
4351 slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, NO_INSERT);
4352 if (!slot)
4353 return NULL_TREE;
4354 return (*slot)->result;
4357 /* Insert PHI into the current hash table with a value number of
4358 RESULT. */
4360 static vn_phi_t
4361 vn_phi_insert (gimple *phi, tree result, bool backedges_varying_p)
4363 vn_phi_s **slot;
4364 vn_phi_t vp1 = (vn_phi_t) obstack_alloc (&vn_tables_obstack,
4365 sizeof (vn_phi_s)
4366 + ((gimple_phi_num_args (phi) - 1)
4367 * sizeof (tree)));
4368 edge e;
4369 edge_iterator ei;
4371 /* Canonicalize the SSA_NAME's to their value number. */
4372 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
4374 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4375 if (TREE_CODE (def) == SSA_NAME
4376 && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)))
4377 def = SSA_VAL (def);
4378 vp1->phiargs[e->dest_idx] = def;
4380 vp1->value_id = VN_INFO (result)->value_id;
4381 vp1->type = TREE_TYPE (gimple_phi_result (phi));
4382 vp1->block = gimple_bb (phi);
4383 /* Extract values of the controlling condition. */
4384 vp1->cclhs = NULL_TREE;
4385 vp1->ccrhs = NULL_TREE;
4386 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
4387 if (EDGE_COUNT (idom1->succs) == 2)
4388 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
4390 /* ??? We want to use SSA_VAL here. But possibly not
4391 allow VN_TOP. */
4392 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
4393 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
4395 vp1->result = result;
4396 vp1->hashcode = vn_phi_compute_hash (vp1);
4398 slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
4399 gcc_assert (!*slot);
4401 *slot = vp1;
4402 vp1->next = last_inserted_phi;
4403 last_inserted_phi = vp1;
4404 return vp1;
4408 /* Return true if BB1 is dominated by BB2 taking into account edges
4409 that are not executable. */
4411 static bool
4412 dominated_by_p_w_unex (basic_block bb1, basic_block bb2)
4414 edge_iterator ei;
4415 edge e;
4417 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
4418 return true;
4420 /* Before iterating we'd like to know if there exists a
4421 (executable) path from bb2 to bb1 at all, if not we can
4422 directly return false. For now simply iterate once. */
4424 /* Iterate to the single executable bb1 predecessor. */
4425 if (EDGE_COUNT (bb1->preds) > 1)
4427 edge prede = NULL;
4428 FOR_EACH_EDGE (e, ei, bb1->preds)
4429 if (e->flags & EDGE_EXECUTABLE)
4431 if (prede)
4433 prede = NULL;
4434 break;
4436 prede = e;
4438 if (prede)
4440 bb1 = prede->src;
4442 /* Re-do the dominance check with changed bb1. */
4443 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
4444 return true;
4448 /* Iterate to the single executable bb2 successor. */
4449 edge succe = NULL;
4450 FOR_EACH_EDGE (e, ei, bb2->succs)
4451 if (e->flags & EDGE_EXECUTABLE)
4453 if (succe)
4455 succe = NULL;
4456 break;
4458 succe = e;
4460 if (succe)
4462 /* Verify the reached block is only reached through succe.
4463 If there is only one edge we can spare us the dominator
4464 check and iterate directly. */
4465 if (EDGE_COUNT (succe->dest->preds) > 1)
4467 FOR_EACH_EDGE (e, ei, succe->dest->preds)
4468 if (e != succe
4469 && (e->flags & EDGE_EXECUTABLE))
4471 succe = NULL;
4472 break;
4475 if (succe)
4477 bb2 = succe->dest;
4479 /* Re-do the dominance check with changed bb2. */
4480 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
4481 return true;
4485 /* We could now iterate updating bb1 / bb2. */
4486 return false;
4489 /* Set the value number of FROM to TO, return true if it has changed
4490 as a result. */
4492 static inline bool
4493 set_ssa_val_to (tree from, tree to)
4495 vn_ssa_aux_t from_info = VN_INFO (from);
4496 tree currval = from_info->valnum; // SSA_VAL (from)
4497 poly_int64 toff, coff;
4498 bool curr_undefined = false;
4499 bool curr_invariant = false;
4501 /* The only thing we allow as value numbers are ssa_names
4502 and invariants. So assert that here. We don't allow VN_TOP
4503 as visiting a stmt should produce a value-number other than
4504 that.
4505 ??? Still VN_TOP can happen for unreachable code, so force
4506 it to varying in that case. Not all code is prepared to
4507 get VN_TOP on valueization. */
4508 if (to == VN_TOP)
4510 /* ??? When iterating and visiting PHI <undef, backedge-value>
4511 for the first time we rightfully get VN_TOP and we need to
4512 preserve that to optimize for example gcc.dg/tree-ssa/ssa-sccvn-2.c.
4513 With SCCVN we were simply lucky we iterated the other PHI
4514 cycles first and thus visited the backedge-value DEF. */
4515 if (currval == VN_TOP)
4516 goto set_and_exit;
4517 if (dump_file && (dump_flags & TDF_DETAILS))
4518 fprintf (dump_file, "Forcing value number to varying on "
4519 "receiving VN_TOP\n");
4520 to = from;
4523 gcc_checking_assert (to != NULL_TREE
4524 && ((TREE_CODE (to) == SSA_NAME
4525 && (to == from || SSA_VAL (to) == to))
4526 || is_gimple_min_invariant (to)));
4528 if (from != to)
4530 if (currval == from)
4532 if (dump_file && (dump_flags & TDF_DETAILS))
4534 fprintf (dump_file, "Not changing value number of ");
4535 print_generic_expr (dump_file, from);
4536 fprintf (dump_file, " from VARYING to ");
4537 print_generic_expr (dump_file, to);
4538 fprintf (dump_file, "\n");
4540 return false;
4542 curr_invariant = is_gimple_min_invariant (currval);
4543 curr_undefined = (TREE_CODE (currval) == SSA_NAME
4544 && ssa_undefined_value_p (currval, false));
4545 if (currval != VN_TOP
4546 && !curr_invariant
4547 && !curr_undefined
4548 && is_gimple_min_invariant (to))
4550 if (dump_file && (dump_flags & TDF_DETAILS))
4552 fprintf (dump_file, "Forcing VARYING instead of changing "
4553 "value number of ");
4554 print_generic_expr (dump_file, from);
4555 fprintf (dump_file, " from ");
4556 print_generic_expr (dump_file, currval);
4557 fprintf (dump_file, " (non-constant) to ");
4558 print_generic_expr (dump_file, to);
4559 fprintf (dump_file, " (constant)\n");
4561 to = from;
4563 else if (currval != VN_TOP
4564 && !curr_undefined
4565 && TREE_CODE (to) == SSA_NAME
4566 && ssa_undefined_value_p (to, false))
4568 if (dump_file && (dump_flags & TDF_DETAILS))
4570 fprintf (dump_file, "Forcing VARYING instead of changing "
4571 "value number of ");
4572 print_generic_expr (dump_file, from);
4573 fprintf (dump_file, " from ");
4574 print_generic_expr (dump_file, currval);
4575 fprintf (dump_file, " (non-undefined) to ");
4576 print_generic_expr (dump_file, to);
4577 fprintf (dump_file, " (undefined)\n");
4579 to = from;
4581 else if (TREE_CODE (to) == SSA_NAME
4582 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
4583 to = from;
4586 set_and_exit:
4587 if (dump_file && (dump_flags & TDF_DETAILS))
4589 fprintf (dump_file, "Setting value number of ");
4590 print_generic_expr (dump_file, from);
4591 fprintf (dump_file, " to ");
4592 print_generic_expr (dump_file, to);
4595 if (currval != to
4596 && !operand_equal_p (currval, to, 0)
4597 /* Different undefined SSA names are not actually different. See
4598 PR82320 for a testcase were we'd otherwise not terminate iteration. */
4599 && !(curr_undefined
4600 && TREE_CODE (to) == SSA_NAME
4601 && ssa_undefined_value_p (to, false))
4602 /* ??? For addresses involving volatile objects or types operand_equal_p
4603 does not reliably detect ADDR_EXPRs as equal. We know we are only
4604 getting invariant gimple addresses here, so can use
4605 get_addr_base_and_unit_offset to do this comparison. */
4606 && !(TREE_CODE (currval) == ADDR_EXPR
4607 && TREE_CODE (to) == ADDR_EXPR
4608 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
4609 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
4610 && known_eq (coff, toff)))
4612 if (to != from
4613 && currval != VN_TOP
4614 && !curr_undefined
4615 /* We do not want to allow lattice transitions from one value
4616 to another since that may lead to not terminating iteration
4617 (see PR95049). Since there's no convenient way to check
4618 for the allowed transition of VAL -> PHI (loop entry value,
4619 same on two PHIs, to same PHI result) we restrict the check
4620 to invariants. */
4621 && curr_invariant
4622 && is_gimple_min_invariant (to))
4624 if (dump_file && (dump_flags & TDF_DETAILS))
4625 fprintf (dump_file, " forced VARYING");
4626 to = from;
4628 if (dump_file && (dump_flags & TDF_DETAILS))
4629 fprintf (dump_file, " (changed)\n");
4630 from_info->valnum = to;
4631 return true;
4633 if (dump_file && (dump_flags & TDF_DETAILS))
4634 fprintf (dump_file, "\n");
4635 return false;
4638 /* Set all definitions in STMT to value number to themselves.
4639 Return true if a value number changed. */
4641 static bool
4642 defs_to_varying (gimple *stmt)
4644 bool changed = false;
4645 ssa_op_iter iter;
4646 def_operand_p defp;
4648 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
4650 tree def = DEF_FROM_PTR (defp);
4651 changed |= set_ssa_val_to (def, def);
4653 return changed;
4656 /* Visit a copy between LHS and RHS, return true if the value number
4657 changed. */
4659 static bool
4660 visit_copy (tree lhs, tree rhs)
4662 /* Valueize. */
4663 rhs = SSA_VAL (rhs);
4665 return set_ssa_val_to (lhs, rhs);
4668 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
4669 is the same. */
4671 static tree
4672 valueized_wider_op (tree wide_type, tree op)
4674 if (TREE_CODE (op) == SSA_NAME)
4675 op = vn_valueize (op);
4677 /* Either we have the op widened available. */
4678 tree ops[3] = {};
4679 ops[0] = op;
4680 tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR,
4681 wide_type, ops, NULL);
4682 if (tem)
4683 return tem;
4685 /* Or the op is truncated from some existing value. */
4686 if (TREE_CODE (op) == SSA_NAME)
4688 gimple *def = SSA_NAME_DEF_STMT (op);
4689 if (is_gimple_assign (def)
4690 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
4692 tem = gimple_assign_rhs1 (def);
4693 if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)))
4695 if (TREE_CODE (tem) == SSA_NAME)
4696 tem = vn_valueize (tem);
4697 return tem;
4702 /* For constants simply extend it. */
4703 if (TREE_CODE (op) == INTEGER_CST)
4704 return wide_int_to_tree (wide_type, wi::to_wide (op));
4706 return NULL_TREE;
4709 /* Visit a nary operator RHS, value number it, and return true if the
4710 value number of LHS has changed as a result. */
4712 static bool
4713 visit_nary_op (tree lhs, gassign *stmt)
4715 vn_nary_op_t vnresult;
4716 tree result = vn_nary_op_lookup_stmt (stmt, &vnresult);
4717 if (! result && vnresult)
4718 result = vn_nary_op_get_predicated_value (vnresult, gimple_bb (stmt));
4719 if (result)
4720 return set_ssa_val_to (lhs, result);
4722 /* Do some special pattern matching for redundancies of operations
4723 in different types. */
4724 enum tree_code code = gimple_assign_rhs_code (stmt);
4725 tree type = TREE_TYPE (lhs);
4726 tree rhs1 = gimple_assign_rhs1 (stmt);
4727 switch (code)
4729 CASE_CONVERT:
4730 /* Match arithmetic done in a different type where we can easily
4731 substitute the result from some earlier sign-changed or widened
4732 operation. */
4733 if (INTEGRAL_TYPE_P (type)
4734 && TREE_CODE (rhs1) == SSA_NAME
4735 /* We only handle sign-changes, zero-extension -> & mask or
4736 sign-extension if we know the inner operation doesn't
4737 overflow. */
4738 && (((TYPE_UNSIGNED (TREE_TYPE (rhs1))
4739 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
4740 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (rhs1))))
4741 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs1)))
4742 || TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (rhs1))))
4744 gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
4745 if (def
4746 && (gimple_assign_rhs_code (def) == PLUS_EXPR
4747 || gimple_assign_rhs_code (def) == MINUS_EXPR
4748 || gimple_assign_rhs_code (def) == MULT_EXPR))
4750 tree ops[3] = {};
4751 /* Either we have the op widened available. */
4752 ops[0] = valueized_wider_op (type,
4753 gimple_assign_rhs1 (def));
4754 if (ops[0])
4755 ops[1] = valueized_wider_op (type,
4756 gimple_assign_rhs2 (def));
4757 if (ops[0] && ops[1])
4759 ops[0] = vn_nary_op_lookup_pieces
4760 (2, gimple_assign_rhs_code (def), type, ops, NULL);
4761 /* We have wider operation available. */
4762 if (ops[0]
4763 /* If the leader is a wrapping operation we can
4764 insert it for code hoisting w/o introducing
4765 undefined overflow. If it is not it has to
4766 be available. See PR86554. */
4767 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (ops[0]))
4768 || (rpo_avail && vn_context_bb
4769 && rpo_avail->eliminate_avail (vn_context_bb,
4770 ops[0]))))
4772 unsigned lhs_prec = TYPE_PRECISION (type);
4773 unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1));
4774 if (lhs_prec == rhs_prec
4775 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
4776 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (rhs1))))
4778 gimple_match_op match_op (gimple_match_cond::UNCOND,
4779 NOP_EXPR, type, ops[0]);
4780 result = vn_nary_build_or_lookup (&match_op);
4781 if (result)
4783 bool changed = set_ssa_val_to (lhs, result);
4784 vn_nary_op_insert_stmt (stmt, result);
4785 return changed;
4788 else
4790 tree mask = wide_int_to_tree
4791 (type, wi::mask (rhs_prec, false, lhs_prec));
4792 gimple_match_op match_op (gimple_match_cond::UNCOND,
4793 BIT_AND_EXPR,
4794 TREE_TYPE (lhs),
4795 ops[0], mask);
4796 result = vn_nary_build_or_lookup (&match_op);
4797 if (result)
4799 bool changed = set_ssa_val_to (lhs, result);
4800 vn_nary_op_insert_stmt (stmt, result);
4801 return changed;
4808 break;
4809 case BIT_AND_EXPR:
4810 if (INTEGRAL_TYPE_P (type)
4811 && TREE_CODE (rhs1) == SSA_NAME
4812 && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST
4813 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)
4814 && default_vn_walk_kind != VN_NOWALK
4815 && CHAR_BIT == 8
4816 && BITS_PER_UNIT == 8
4817 && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
4818 && !integer_all_onesp (gimple_assign_rhs2 (stmt))
4819 && !integer_zerop (gimple_assign_rhs2 (stmt)))
4821 gassign *ass = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
4822 if (ass
4823 && !gimple_has_volatile_ops (ass)
4824 && vn_get_stmt_kind (ass) == VN_REFERENCE)
4826 tree last_vuse = gimple_vuse (ass);
4827 tree op = gimple_assign_rhs1 (ass);
4828 tree result = vn_reference_lookup (op, gimple_vuse (ass),
4829 default_vn_walk_kind,
4830 NULL, true, &last_vuse,
4831 gimple_assign_rhs2 (stmt));
4832 if (result
4833 && useless_type_conversion_p (TREE_TYPE (result),
4834 TREE_TYPE (op)))
4835 return set_ssa_val_to (lhs, result);
4838 break;
4839 case TRUNC_DIV_EXPR:
4840 if (TYPE_UNSIGNED (type))
4841 break;
4842 /* Fallthru. */
4843 case RDIV_EXPR:
4844 case MULT_EXPR:
4845 /* Match up ([-]a){/,*}([-])b with v=a{/,*}b, replacing it with -v. */
4846 if (! HONOR_SIGN_DEPENDENT_ROUNDING (type))
4848 tree rhs[2];
4849 rhs[0] = rhs1;
4850 rhs[1] = gimple_assign_rhs2 (stmt);
4851 for (unsigned i = 0; i <= 1; ++i)
4853 unsigned j = i == 0 ? 1 : 0;
4854 tree ops[2];
4855 gimple_match_op match_op (gimple_match_cond::UNCOND,
4856 NEGATE_EXPR, type, rhs[i]);
4857 ops[i] = vn_nary_build_or_lookup_1 (&match_op, false);
4858 ops[j] = rhs[j];
4859 if (ops[i]
4860 && (ops[0] = vn_nary_op_lookup_pieces (2, code,
4861 type, ops, NULL)))
4863 gimple_match_op match_op (gimple_match_cond::UNCOND,
4864 NEGATE_EXPR, type, ops[0]);
4865 result = vn_nary_build_or_lookup (&match_op);
4866 if (result)
4868 bool changed = set_ssa_val_to (lhs, result);
4869 vn_nary_op_insert_stmt (stmt, result);
4870 return changed;
4875 break;
4876 default:
4877 break;
4880 bool changed = set_ssa_val_to (lhs, lhs);
4881 vn_nary_op_insert_stmt (stmt, lhs);
4882 return changed;
4885 /* Visit a call STMT storing into LHS. Return true if the value number
4886 of the LHS has changed as a result. */
4888 static bool
4889 visit_reference_op_call (tree lhs, gcall *stmt)
4891 bool changed = false;
4892 struct vn_reference_s vr1;
4893 vn_reference_t vnresult = NULL;
4894 tree vdef = gimple_vdef (stmt);
4896 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
4897 if (lhs && TREE_CODE (lhs) != SSA_NAME)
4898 lhs = NULL_TREE;
4900 vn_reference_lookup_call (stmt, &vnresult, &vr1);
4901 if (vnresult)
4903 if (vnresult->result_vdef && vdef)
4904 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
4905 else if (vdef)
4906 /* If the call was discovered to be pure or const reflect
4907 that as far as possible. */
4908 changed |= set_ssa_val_to (vdef, vuse_ssa_val (gimple_vuse (stmt)));
4910 if (!vnresult->result && lhs)
4911 vnresult->result = lhs;
4913 if (vnresult->result && lhs)
4914 changed |= set_ssa_val_to (lhs, vnresult->result);
4916 else
4918 vn_reference_t vr2;
4919 vn_reference_s **slot;
4920 tree vdef_val = vdef;
4921 if (vdef)
4923 /* If we value numbered an indirect functions function to
4924 one not clobbering memory value number its VDEF to its
4925 VUSE. */
4926 tree fn = gimple_call_fn (stmt);
4927 if (fn && TREE_CODE (fn) == SSA_NAME)
4929 fn = SSA_VAL (fn);
4930 if (TREE_CODE (fn) == ADDR_EXPR
4931 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
4932 && (flags_from_decl_or_type (TREE_OPERAND (fn, 0))
4933 & (ECF_CONST | ECF_PURE)))
4934 vdef_val = vuse_ssa_val (gimple_vuse (stmt));
4936 changed |= set_ssa_val_to (vdef, vdef_val);
4938 if (lhs)
4939 changed |= set_ssa_val_to (lhs, lhs);
4940 vr2 = XOBNEW (&vn_tables_obstack, vn_reference_s);
4941 vr2->vuse = vr1.vuse;
4942 /* As we are not walking the virtual operand chain we know the
4943 shared_lookup_references are still original so we can re-use
4944 them here. */
4945 vr2->operands = vr1.operands.copy ();
4946 vr2->type = vr1.type;
4947 vr2->punned = vr1.punned;
4948 vr2->set = vr1.set;
4949 vr2->base_set = vr1.base_set;
4950 vr2->hashcode = vr1.hashcode;
4951 vr2->result = lhs;
4952 vr2->result_vdef = vdef_val;
4953 vr2->value_id = 0;
4954 slot = valid_info->references->find_slot_with_hash (vr2, vr2->hashcode,
4955 INSERT);
4956 gcc_assert (!*slot);
4957 *slot = vr2;
4958 vr2->next = last_inserted_ref;
4959 last_inserted_ref = vr2;
4962 return changed;
4965 /* Visit a load from a reference operator RHS, part of STMT, value number it,
4966 and return true if the value number of the LHS has changed as a result. */
4968 static bool
4969 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
4971 bool changed = false;
4972 tree last_vuse;
4973 tree result;
4974 vn_reference_t res;
4976 last_vuse = gimple_vuse (stmt);
4977 result = vn_reference_lookup (op, gimple_vuse (stmt),
4978 default_vn_walk_kind, &res, true, &last_vuse);
4980 /* We handle type-punning through unions by value-numbering based
4981 on offset and size of the access. Be prepared to handle a
4982 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
4983 if (result
4984 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
4986 /* Avoid the type punning in case the result mode has padding where
4987 the op we lookup has not. */
4988 if (maybe_lt (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (result))),
4989 GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (op)))))
4990 result = NULL_TREE;
4991 else
4993 /* We will be setting the value number of lhs to the value number
4994 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
4995 So first simplify and lookup this expression to see if it
4996 is already available. */
4997 gimple_match_op res_op (gimple_match_cond::UNCOND,
4998 VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
4999 result = vn_nary_build_or_lookup (&res_op);
5000 if (result
5001 && TREE_CODE (result) == SSA_NAME
5002 && VN_INFO (result)->needs_insertion)
5003 /* Track whether this is the canonical expression for different
5004 typed loads. We use that as a stopgap measure for code
5005 hoisting when dealing with floating point loads. */
5006 res->punned = true;
5009 /* When building the conversion fails avoid inserting the reference
5010 again. */
5011 if (!result)
5012 return set_ssa_val_to (lhs, lhs);
5015 if (result)
5016 changed = set_ssa_val_to (lhs, result);
5017 else
5019 changed = set_ssa_val_to (lhs, lhs);
5020 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
5023 return changed;
5027 /* Visit a store to a reference operator LHS, part of STMT, value number it,
5028 and return true if the value number of the LHS has changed as a result. */
5030 static bool
5031 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
5033 bool changed = false;
5034 vn_reference_t vnresult = NULL;
5035 tree assign;
5036 bool resultsame = false;
5037 tree vuse = gimple_vuse (stmt);
5038 tree vdef = gimple_vdef (stmt);
5040 if (TREE_CODE (op) == SSA_NAME)
5041 op = SSA_VAL (op);
5043 /* First we want to lookup using the *vuses* from the store and see
5044 if there the last store to this location with the same address
5045 had the same value.
5047 The vuses represent the memory state before the store. If the
5048 memory state, address, and value of the store is the same as the
5049 last store to this location, then this store will produce the
5050 same memory state as that store.
5052 In this case the vdef versions for this store are value numbered to those
5053 vuse versions, since they represent the same memory state after
5054 this store.
5056 Otherwise, the vdefs for the store are used when inserting into
5057 the table, since the store generates a new memory state. */
5059 vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
5060 if (vnresult
5061 && vnresult->result)
5063 tree result = vnresult->result;
5064 gcc_checking_assert (TREE_CODE (result) != SSA_NAME
5065 || result == SSA_VAL (result));
5066 resultsame = expressions_equal_p (result, op);
5067 if (resultsame)
5069 /* If the TBAA state isn't compatible for downstream reads
5070 we cannot value-number the VDEFs the same. */
5071 ao_ref lhs_ref;
5072 ao_ref_init (&lhs_ref, lhs);
5073 alias_set_type set = ao_ref_alias_set (&lhs_ref);
5074 alias_set_type base_set = ao_ref_base_alias_set (&lhs_ref);
5075 if ((vnresult->set != set
5076 && ! alias_set_subset_of (set, vnresult->set))
5077 || (vnresult->base_set != base_set
5078 && ! alias_set_subset_of (base_set, vnresult->base_set)))
5079 resultsame = false;
5083 if (!resultsame)
5085 /* Only perform the following when being called from PRE
5086 which embeds tail merging. */
5087 if (default_vn_walk_kind == VN_WALK)
5089 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
5090 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
5091 if (vnresult)
5093 VN_INFO (vdef)->visited = true;
5094 return set_ssa_val_to (vdef, vnresult->result_vdef);
5098 if (dump_file && (dump_flags & TDF_DETAILS))
5100 fprintf (dump_file, "No store match\n");
5101 fprintf (dump_file, "Value numbering store ");
5102 print_generic_expr (dump_file, lhs);
5103 fprintf (dump_file, " to ");
5104 print_generic_expr (dump_file, op);
5105 fprintf (dump_file, "\n");
5107 /* Have to set value numbers before insert, since insert is
5108 going to valueize the references in-place. */
5109 if (vdef)
5110 changed |= set_ssa_val_to (vdef, vdef);
5112 /* Do not insert structure copies into the tables. */
5113 if (is_gimple_min_invariant (op)
5114 || is_gimple_reg (op))
5115 vn_reference_insert (lhs, op, vdef, NULL);
5117 /* Only perform the following when being called from PRE
5118 which embeds tail merging. */
5119 if (default_vn_walk_kind == VN_WALK)
5121 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
5122 vn_reference_insert (assign, lhs, vuse, vdef);
5125 else
5127 /* We had a match, so value number the vdef to have the value
5128 number of the vuse it came from. */
5130 if (dump_file && (dump_flags & TDF_DETAILS))
5131 fprintf (dump_file, "Store matched earlier value, "
5132 "value numbering store vdefs to matching vuses.\n");
5134 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
5137 return changed;
5140 /* Visit and value number PHI, return true if the value number
5141 changed. When BACKEDGES_VARYING_P is true then assume all
5142 backedge values are varying. When INSERTED is not NULL then
5143 this is just a ahead query for a possible iteration, set INSERTED
5144 to true if we'd insert into the hashtable. */
5146 static bool
5147 visit_phi (gimple *phi, bool *inserted, bool backedges_varying_p)
5149 tree result, sameval = VN_TOP, seen_undef = NULL_TREE;
5150 tree backedge_val = NULL_TREE;
5151 bool seen_non_backedge = false;
5152 tree sameval_base = NULL_TREE;
5153 poly_int64 soff, doff;
5154 unsigned n_executable = 0;
5155 edge_iterator ei;
5156 edge e;
5158 /* TODO: We could check for this in initialization, and replace this
5159 with a gcc_assert. */
5160 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
5161 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
5163 /* We track whether a PHI was CSEd to to avoid excessive iterations
5164 that would be necessary only because the PHI changed arguments
5165 but not value. */
5166 if (!inserted)
5167 gimple_set_plf (phi, GF_PLF_1, false);
5169 /* See if all non-TOP arguments have the same value. TOP is
5170 equivalent to everything, so we can ignore it. */
5171 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
5172 if (e->flags & EDGE_EXECUTABLE)
5174 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5176 ++n_executable;
5177 if (TREE_CODE (def) == SSA_NAME)
5179 if (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK))
5180 def = SSA_VAL (def);
5181 if (e->flags & EDGE_DFS_BACK)
5182 backedge_val = def;
5184 if (!(e->flags & EDGE_DFS_BACK))
5185 seen_non_backedge = true;
5186 if (def == VN_TOP)
5188 /* Ignore undefined defs for sameval but record one. */
5189 else if (TREE_CODE (def) == SSA_NAME
5190 && ! virtual_operand_p (def)
5191 && ssa_undefined_value_p (def, false))
5192 seen_undef = def;
5193 else if (sameval == VN_TOP)
5194 sameval = def;
5195 else if (!expressions_equal_p (def, sameval))
5197 /* We know we're arriving only with invariant addresses here,
5198 try harder comparing them. We can do some caching here
5199 which we cannot do in expressions_equal_p. */
5200 if (TREE_CODE (def) == ADDR_EXPR
5201 && TREE_CODE (sameval) == ADDR_EXPR
5202 && sameval_base != (void *)-1)
5204 if (!sameval_base)
5205 sameval_base = get_addr_base_and_unit_offset
5206 (TREE_OPERAND (sameval, 0), &soff);
5207 if (!sameval_base)
5208 sameval_base = (tree)(void *)-1;
5209 else if ((get_addr_base_and_unit_offset
5210 (TREE_OPERAND (def, 0), &doff) == sameval_base)
5211 && known_eq (soff, doff))
5212 continue;
5214 sameval = NULL_TREE;
5215 break;
5219 /* If the value we want to use is flowing over the backedge and we
5220 should take it as VARYING but it has a non-VARYING value drop to
5221 VARYING.
5222 If we value-number a virtual operand never value-number to the
5223 value from the backedge as that confuses the alias-walking code.
5224 See gcc.dg/torture/pr87176.c. If the value is the same on a
5225 non-backedge everything is OK though. */
5226 bool visited_p;
5227 if ((backedge_val
5228 && !seen_non_backedge
5229 && TREE_CODE (backedge_val) == SSA_NAME
5230 && sameval == backedge_val
5231 && (SSA_NAME_IS_VIRTUAL_OPERAND (backedge_val)
5232 || SSA_VAL (backedge_val) != backedge_val))
5233 /* Do not value-number a virtual operand to sth not visited though
5234 given that allows us to escape a region in alias walking. */
5235 || (sameval
5236 && TREE_CODE (sameval) == SSA_NAME
5237 && !SSA_NAME_IS_DEFAULT_DEF (sameval)
5238 && SSA_NAME_IS_VIRTUAL_OPERAND (sameval)
5239 && (SSA_VAL (sameval, &visited_p), !visited_p)))
5240 /* Note this just drops to VARYING without inserting the PHI into
5241 the hashes. */
5242 result = PHI_RESULT (phi);
5243 /* If none of the edges was executable keep the value-number at VN_TOP,
5244 if only a single edge is exectuable use its value. */
5245 else if (n_executable <= 1)
5246 result = seen_undef ? seen_undef : sameval;
5247 /* If we saw only undefined values and VN_TOP use one of the
5248 undefined values. */
5249 else if (sameval == VN_TOP)
5250 result = seen_undef ? seen_undef : sameval;
5251 /* First see if it is equivalent to a phi node in this block. We prefer
5252 this as it allows IV elimination - see PRs 66502 and 67167. */
5253 else if ((result = vn_phi_lookup (phi, backedges_varying_p)))
5255 if (!inserted
5256 && TREE_CODE (result) == SSA_NAME
5257 && gimple_code (SSA_NAME_DEF_STMT (result)) == GIMPLE_PHI)
5259 gimple_set_plf (SSA_NAME_DEF_STMT (result), GF_PLF_1, true);
5260 if (dump_file && (dump_flags & TDF_DETAILS))
5262 fprintf (dump_file, "Marking CSEd to PHI node ");
5263 print_gimple_expr (dump_file, SSA_NAME_DEF_STMT (result),
5264 0, TDF_SLIM);
5265 fprintf (dump_file, "\n");
5269 /* If all values are the same use that, unless we've seen undefined
5270 values as well and the value isn't constant.
5271 CCP/copyprop have the same restriction to not remove uninit warnings. */
5272 else if (sameval
5273 && (! seen_undef || is_gimple_min_invariant (sameval)))
5274 result = sameval;
5275 else
5277 result = PHI_RESULT (phi);
5278 /* Only insert PHIs that are varying, for constant value numbers
5279 we mess up equivalences otherwise as we are only comparing
5280 the immediate controlling predicates. */
5281 vn_phi_insert (phi, result, backedges_varying_p);
5282 if (inserted)
5283 *inserted = true;
5286 return set_ssa_val_to (PHI_RESULT (phi), result);
5289 /* Try to simplify RHS using equivalences and constant folding. */
5291 static tree
5292 try_to_simplify (gassign *stmt)
5294 enum tree_code code = gimple_assign_rhs_code (stmt);
5295 tree tem;
5297 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
5298 in this case, there is no point in doing extra work. */
5299 if (code == SSA_NAME)
5300 return NULL_TREE;
5302 /* First try constant folding based on our current lattice. */
5303 mprts_hook = vn_lookup_simplify_result;
5304 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
5305 mprts_hook = NULL;
5306 if (tem
5307 && (TREE_CODE (tem) == SSA_NAME
5308 || is_gimple_min_invariant (tem)))
5309 return tem;
5311 return NULL_TREE;
5314 /* Visit and value number STMT, return true if the value number
5315 changed. */
5317 static bool
5318 visit_stmt (gimple *stmt, bool backedges_varying_p = false)
5320 bool changed = false;
5322 if (dump_file && (dump_flags & TDF_DETAILS))
5324 fprintf (dump_file, "Value numbering stmt = ");
5325 print_gimple_stmt (dump_file, stmt, 0);
5328 if (gimple_code (stmt) == GIMPLE_PHI)
5329 changed = visit_phi (stmt, NULL, backedges_varying_p);
5330 else if (gimple_has_volatile_ops (stmt))
5331 changed = defs_to_varying (stmt);
5332 else if (gassign *ass = dyn_cast <gassign *> (stmt))
5334 enum tree_code code = gimple_assign_rhs_code (ass);
5335 tree lhs = gimple_assign_lhs (ass);
5336 tree rhs1 = gimple_assign_rhs1 (ass);
5337 tree simplified;
5339 /* Shortcut for copies. Simplifying copies is pointless,
5340 since we copy the expression and value they represent. */
5341 if (code == SSA_NAME
5342 && TREE_CODE (lhs) == SSA_NAME)
5344 changed = visit_copy (lhs, rhs1);
5345 goto done;
5347 simplified = try_to_simplify (ass);
5348 if (simplified)
5350 if (dump_file && (dump_flags & TDF_DETAILS))
5352 fprintf (dump_file, "RHS ");
5353 print_gimple_expr (dump_file, ass, 0);
5354 fprintf (dump_file, " simplified to ");
5355 print_generic_expr (dump_file, simplified);
5356 fprintf (dump_file, "\n");
5359 /* Setting value numbers to constants will occasionally
5360 screw up phi congruence because constants are not
5361 uniquely associated with a single ssa name that can be
5362 looked up. */
5363 if (simplified
5364 && is_gimple_min_invariant (simplified)
5365 && TREE_CODE (lhs) == SSA_NAME)
5367 changed = set_ssa_val_to (lhs, simplified);
5368 goto done;
5370 else if (simplified
5371 && TREE_CODE (simplified) == SSA_NAME
5372 && TREE_CODE (lhs) == SSA_NAME)
5374 changed = visit_copy (lhs, simplified);
5375 goto done;
5378 if ((TREE_CODE (lhs) == SSA_NAME
5379 /* We can substitute SSA_NAMEs that are live over
5380 abnormal edges with their constant value. */
5381 && !(gimple_assign_copy_p (ass)
5382 && is_gimple_min_invariant (rhs1))
5383 && !(simplified
5384 && is_gimple_min_invariant (simplified))
5385 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
5386 /* Stores or copies from SSA_NAMEs that are live over
5387 abnormal edges are a problem. */
5388 || (code == SSA_NAME
5389 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
5390 changed = defs_to_varying (ass);
5391 else if (REFERENCE_CLASS_P (lhs)
5392 || DECL_P (lhs))
5393 changed = visit_reference_op_store (lhs, rhs1, ass);
5394 else if (TREE_CODE (lhs) == SSA_NAME)
5396 if ((gimple_assign_copy_p (ass)
5397 && is_gimple_min_invariant (rhs1))
5398 || (simplified
5399 && is_gimple_min_invariant (simplified)))
5401 if (simplified)
5402 changed = set_ssa_val_to (lhs, simplified);
5403 else
5404 changed = set_ssa_val_to (lhs, rhs1);
5406 else
5408 /* Visit the original statement. */
5409 switch (vn_get_stmt_kind (ass))
5411 case VN_NARY:
5412 changed = visit_nary_op (lhs, ass);
5413 break;
5414 case VN_REFERENCE:
5415 changed = visit_reference_op_load (lhs, rhs1, ass);
5416 break;
5417 default:
5418 changed = defs_to_varying (ass);
5419 break;
5423 else
5424 changed = defs_to_varying (ass);
5426 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
5428 tree lhs = gimple_call_lhs (call_stmt);
5429 if (lhs && TREE_CODE (lhs) == SSA_NAME)
5431 /* Try constant folding based on our current lattice. */
5432 tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
5433 vn_valueize);
5434 if (simplified)
5436 if (dump_file && (dump_flags & TDF_DETAILS))
5438 fprintf (dump_file, "call ");
5439 print_gimple_expr (dump_file, call_stmt, 0);
5440 fprintf (dump_file, " simplified to ");
5441 print_generic_expr (dump_file, simplified);
5442 fprintf (dump_file, "\n");
5445 /* Setting value numbers to constants will occasionally
5446 screw up phi congruence because constants are not
5447 uniquely associated with a single ssa name that can be
5448 looked up. */
5449 if (simplified
5450 && is_gimple_min_invariant (simplified))
5452 changed = set_ssa_val_to (lhs, simplified);
5453 if (gimple_vdef (call_stmt))
5454 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
5455 SSA_VAL (gimple_vuse (call_stmt)));
5456 goto done;
5458 else if (simplified
5459 && TREE_CODE (simplified) == SSA_NAME)
5461 changed = visit_copy (lhs, simplified);
5462 if (gimple_vdef (call_stmt))
5463 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
5464 SSA_VAL (gimple_vuse (call_stmt)));
5465 goto done;
5467 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
5469 changed = defs_to_varying (call_stmt);
5470 goto done;
5474 /* Pick up flags from a devirtualization target. */
5475 tree fn = gimple_call_fn (stmt);
5476 int extra_fnflags = 0;
5477 if (fn && TREE_CODE (fn) == SSA_NAME)
5479 fn = SSA_VAL (fn);
5480 if (TREE_CODE (fn) == ADDR_EXPR
5481 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
5482 extra_fnflags = flags_from_decl_or_type (TREE_OPERAND (fn, 0));
5484 if (!gimple_call_internal_p (call_stmt)
5485 && (/* Calls to the same function with the same vuse
5486 and the same operands do not necessarily return the same
5487 value, unless they're pure or const. */
5488 ((gimple_call_flags (call_stmt) | extra_fnflags)
5489 & (ECF_PURE | ECF_CONST))
5490 /* If calls have a vdef, subsequent calls won't have
5491 the same incoming vuse. So, if 2 calls with vdef have the
5492 same vuse, we know they're not subsequent.
5493 We can value number 2 calls to the same function with the
5494 same vuse and the same operands which are not subsequent
5495 the same, because there is no code in the program that can
5496 compare the 2 values... */
5497 || (gimple_vdef (call_stmt)
5498 /* ... unless the call returns a pointer which does
5499 not alias with anything else. In which case the
5500 information that the values are distinct are encoded
5501 in the IL. */
5502 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
5503 /* Only perform the following when being called from PRE
5504 which embeds tail merging. */
5505 && default_vn_walk_kind == VN_WALK)))
5506 changed = visit_reference_op_call (lhs, call_stmt);
5507 else
5508 changed = defs_to_varying (call_stmt);
5510 else
5511 changed = defs_to_varying (stmt);
5512 done:
5513 return changed;
5517 /* Allocate a value number table. */
5519 static void
5520 allocate_vn_table (vn_tables_t table, unsigned size)
5522 table->phis = new vn_phi_table_type (size);
5523 table->nary = new vn_nary_op_table_type (size);
5524 table->references = new vn_reference_table_type (size);
5527 /* Free a value number table. */
5529 static void
5530 free_vn_table (vn_tables_t table)
5532 /* Walk over elements and release vectors. */
5533 vn_reference_iterator_type hir;
5534 vn_reference_t vr;
5535 FOR_EACH_HASH_TABLE_ELEMENT (*table->references, vr, vn_reference_t, hir)
5536 vr->operands.release ();
5537 delete table->phis;
5538 table->phis = NULL;
5539 delete table->nary;
5540 table->nary = NULL;
5541 delete table->references;
5542 table->references = NULL;
5545 /* Set *ID according to RESULT. */
5547 static void
5548 set_value_id_for_result (tree result, unsigned int *id)
5550 if (result && TREE_CODE (result) == SSA_NAME)
5551 *id = VN_INFO (result)->value_id;
5552 else if (result && is_gimple_min_invariant (result))
5553 *id = get_or_alloc_constant_value_id (result);
5554 else
5555 *id = get_next_value_id ();
5558 /* Set the value ids in the valid hash tables. */
5560 static void
5561 set_hashtable_value_ids (void)
5563 vn_nary_op_iterator_type hin;
5564 vn_phi_iterator_type hip;
5565 vn_reference_iterator_type hir;
5566 vn_nary_op_t vno;
5567 vn_reference_t vr;
5568 vn_phi_t vp;
5570 /* Now set the value ids of the things we had put in the hash
5571 table. */
5573 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
5574 if (! vno->predicated_values)
5575 set_value_id_for_result (vno->u.result, &vno->value_id);
5577 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
5578 set_value_id_for_result (vp->result, &vp->value_id);
5580 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
5581 hir)
5582 set_value_id_for_result (vr->result, &vr->value_id);
5585 /* Return the maximum value id we have ever seen. */
5587 unsigned int
5588 get_max_value_id (void)
5590 return next_value_id;
5593 /* Return the maximum constant value id we have ever seen. */
5595 unsigned int
5596 get_max_constant_value_id (void)
5598 return -next_constant_value_id;
5601 /* Return the next unique value id. */
5603 unsigned int
5604 get_next_value_id (void)
5606 gcc_checking_assert ((int)next_value_id > 0);
5607 return next_value_id++;
5610 /* Return the next unique value id for constants. */
5612 unsigned int
5613 get_next_constant_value_id (void)
5615 gcc_checking_assert (next_constant_value_id < 0);
5616 return next_constant_value_id--;
5620 /* Compare two expressions E1 and E2 and return true if they are equal. */
5622 bool
5623 expressions_equal_p (tree e1, tree e2)
5625 /* The obvious case. */
5626 if (e1 == e2)
5627 return true;
5629 /* If either one is VN_TOP consider them equal. */
5630 if (e1 == VN_TOP || e2 == VN_TOP)
5631 return true;
5633 /* SSA_NAME compare pointer equal. */
5634 if (TREE_CODE (e1) == SSA_NAME || TREE_CODE (e2) == SSA_NAME)
5635 return false;
5637 /* Now perform the actual comparison. */
5638 if (TREE_CODE (e1) == TREE_CODE (e2)
5639 && operand_equal_p (e1, e2, OEP_PURE_SAME))
5640 return true;
5642 return false;
5646 /* Return true if the nary operation NARY may trap. This is a copy
5647 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
5649 bool
5650 vn_nary_may_trap (vn_nary_op_t nary)
5652 tree type;
5653 tree rhs2 = NULL_TREE;
5654 bool honor_nans = false;
5655 bool honor_snans = false;
5656 bool fp_operation = false;
5657 bool honor_trapv = false;
5658 bool handled, ret;
5659 unsigned i;
5661 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
5662 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
5663 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
5665 type = nary->type;
5666 fp_operation = FLOAT_TYPE_P (type);
5667 if (fp_operation)
5669 honor_nans = flag_trapping_math && !flag_finite_math_only;
5670 honor_snans = flag_signaling_nans != 0;
5672 else if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_TRAPS (type))
5673 honor_trapv = true;
5675 if (nary->length >= 2)
5676 rhs2 = nary->op[1];
5677 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
5678 honor_trapv, honor_nans, honor_snans,
5679 rhs2, &handled);
5680 if (handled && ret)
5681 return true;
5683 for (i = 0; i < nary->length; ++i)
5684 if (tree_could_trap_p (nary->op[i]))
5685 return true;
5687 return false;
5690 /* Return true if the reference operation REF may trap. */
5692 bool
5693 vn_reference_may_trap (vn_reference_t ref)
5695 switch (ref->operands[0].opcode)
5697 case MODIFY_EXPR:
5698 case CALL_EXPR:
5699 /* We do not handle calls. */
5700 case ADDR_EXPR:
5701 /* And toplevel address computations never trap. */
5702 return false;
5703 default:;
5706 vn_reference_op_t op;
5707 unsigned i;
5708 FOR_EACH_VEC_ELT (ref->operands, i, op)
5710 switch (op->opcode)
5712 case WITH_SIZE_EXPR:
5713 case TARGET_MEM_REF:
5714 /* Always variable. */
5715 return true;
5716 case COMPONENT_REF:
5717 if (op->op1 && TREE_CODE (op->op1) == SSA_NAME)
5718 return true;
5719 break;
5720 case ARRAY_RANGE_REF:
5721 case ARRAY_REF:
5722 if (TREE_CODE (op->op0) == SSA_NAME)
5723 return true;
5724 break;
5725 case MEM_REF:
5726 /* Nothing interesting in itself, the base is separate. */
5727 break;
5728 /* The following are the address bases. */
5729 case SSA_NAME:
5730 return true;
5731 case ADDR_EXPR:
5732 if (op->op0)
5733 return tree_could_trap_p (TREE_OPERAND (op->op0, 0));
5734 return false;
5735 default:;
5738 return false;
5741 eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction,
5742 bitmap inserted_exprs_)
5743 : dom_walker (direction), do_pre (inserted_exprs_ != NULL),
5744 el_todo (0), eliminations (0), insertions (0),
5745 inserted_exprs (inserted_exprs_)
5747 need_eh_cleanup = BITMAP_ALLOC (NULL);
5748 need_ab_cleanup = BITMAP_ALLOC (NULL);
5751 eliminate_dom_walker::~eliminate_dom_walker ()
5753 BITMAP_FREE (need_eh_cleanup);
5754 BITMAP_FREE (need_ab_cleanup);
5757 /* Return a leader for OP that is available at the current point of the
5758 eliminate domwalk. */
5760 tree
5761 eliminate_dom_walker::eliminate_avail (basic_block, tree op)
5763 tree valnum = VN_INFO (op)->valnum;
5764 if (TREE_CODE (valnum) == SSA_NAME)
5766 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
5767 return valnum;
5768 if (avail.length () > SSA_NAME_VERSION (valnum))
5769 return avail[SSA_NAME_VERSION (valnum)];
5771 else if (is_gimple_min_invariant (valnum))
5772 return valnum;
5773 return NULL_TREE;
5776 /* At the current point of the eliminate domwalk make OP available. */
5778 void
5779 eliminate_dom_walker::eliminate_push_avail (basic_block, tree op)
5781 tree valnum = VN_INFO (op)->valnum;
5782 if (TREE_CODE (valnum) == SSA_NAME)
5784 if (avail.length () <= SSA_NAME_VERSION (valnum))
5785 avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1, true);
5786 tree pushop = op;
5787 if (avail[SSA_NAME_VERSION (valnum)])
5788 pushop = avail[SSA_NAME_VERSION (valnum)];
5789 avail_stack.safe_push (pushop);
5790 avail[SSA_NAME_VERSION (valnum)] = op;
5794 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
5795 the leader for the expression if insertion was successful. */
5797 tree
5798 eliminate_dom_walker::eliminate_insert (basic_block bb,
5799 gimple_stmt_iterator *gsi, tree val)
5801 /* We can insert a sequence with a single assignment only. */
5802 gimple_seq stmts = VN_INFO (val)->expr;
5803 if (!gimple_seq_singleton_p (stmts))
5804 return NULL_TREE;
5805 gassign *stmt = dyn_cast <gassign *> (gimple_seq_first_stmt (stmts));
5806 if (!stmt
5807 || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
5808 && gimple_assign_rhs_code (stmt) != VIEW_CONVERT_EXPR
5809 && gimple_assign_rhs_code (stmt) != NEGATE_EXPR
5810 && gimple_assign_rhs_code (stmt) != BIT_FIELD_REF
5811 && (gimple_assign_rhs_code (stmt) != BIT_AND_EXPR
5812 || TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)))
5813 return NULL_TREE;
5815 tree op = gimple_assign_rhs1 (stmt);
5816 if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR
5817 || gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5818 op = TREE_OPERAND (op, 0);
5819 tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (bb, op) : op;
5820 if (!leader)
5821 return NULL_TREE;
5823 tree res;
5824 stmts = NULL;
5825 if (gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5826 res = gimple_build (&stmts, BIT_FIELD_REF,
5827 TREE_TYPE (val), leader,
5828 TREE_OPERAND (gimple_assign_rhs1 (stmt), 1),
5829 TREE_OPERAND (gimple_assign_rhs1 (stmt), 2));
5830 else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
5831 res = gimple_build (&stmts, BIT_AND_EXPR,
5832 TREE_TYPE (val), leader, gimple_assign_rhs2 (stmt));
5833 else
5834 res = gimple_build (&stmts, gimple_assign_rhs_code (stmt),
5835 TREE_TYPE (val), leader);
5836 if (TREE_CODE (res) != SSA_NAME
5837 || SSA_NAME_IS_DEFAULT_DEF (res)
5838 || gimple_bb (SSA_NAME_DEF_STMT (res)))
5840 gimple_seq_discard (stmts);
5842 /* During propagation we have to treat SSA info conservatively
5843 and thus we can end up simplifying the inserted expression
5844 at elimination time to sth not defined in stmts. */
5845 /* But then this is a redundancy we failed to detect. Which means
5846 res now has two values. That doesn't play well with how
5847 we track availability here, so give up. */
5848 if (dump_file && (dump_flags & TDF_DETAILS))
5850 if (TREE_CODE (res) == SSA_NAME)
5851 res = eliminate_avail (bb, res);
5852 if (res)
5854 fprintf (dump_file, "Failed to insert expression for value ");
5855 print_generic_expr (dump_file, val);
5856 fprintf (dump_file, " which is really fully redundant to ");
5857 print_generic_expr (dump_file, res);
5858 fprintf (dump_file, "\n");
5862 return NULL_TREE;
5864 else
5866 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
5867 vn_ssa_aux_t vn_info = VN_INFO (res);
5868 vn_info->valnum = val;
5869 vn_info->visited = true;
5872 insertions++;
5873 if (dump_file && (dump_flags & TDF_DETAILS))
5875 fprintf (dump_file, "Inserted ");
5876 print_gimple_stmt (dump_file, SSA_NAME_DEF_STMT (res), 0);
5879 return res;
5882 void
5883 eliminate_dom_walker::eliminate_stmt (basic_block b, gimple_stmt_iterator *gsi)
5885 tree sprime = NULL_TREE;
5886 gimple *stmt = gsi_stmt (*gsi);
5887 tree lhs = gimple_get_lhs (stmt);
5888 if (lhs && TREE_CODE (lhs) == SSA_NAME
5889 && !gimple_has_volatile_ops (stmt)
5890 /* See PR43491. Do not replace a global register variable when
5891 it is a the RHS of an assignment. Do replace local register
5892 variables since gcc does not guarantee a local variable will
5893 be allocated in register.
5894 ??? The fix isn't effective here. This should instead
5895 be ensured by not value-numbering them the same but treating
5896 them like volatiles? */
5897 && !(gimple_assign_single_p (stmt)
5898 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
5899 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))
5900 && is_global_var (gimple_assign_rhs1 (stmt)))))
5902 sprime = eliminate_avail (b, lhs);
5903 if (!sprime)
5905 /* If there is no existing usable leader but SCCVN thinks
5906 it has an expression it wants to use as replacement,
5907 insert that. */
5908 tree val = VN_INFO (lhs)->valnum;
5909 vn_ssa_aux_t vn_info;
5910 if (val != VN_TOP
5911 && TREE_CODE (val) == SSA_NAME
5912 && (vn_info = VN_INFO (val), true)
5913 && vn_info->needs_insertion
5914 && vn_info->expr != NULL
5915 && (sprime = eliminate_insert (b, gsi, val)) != NULL_TREE)
5916 eliminate_push_avail (b, sprime);
5919 /* If this now constitutes a copy duplicate points-to
5920 and range info appropriately. This is especially
5921 important for inserted code. See tree-ssa-copy.c
5922 for similar code. */
5923 if (sprime
5924 && TREE_CODE (sprime) == SSA_NAME)
5926 basic_block sprime_b = gimple_bb (SSA_NAME_DEF_STMT (sprime));
5927 if (POINTER_TYPE_P (TREE_TYPE (lhs))
5928 && SSA_NAME_PTR_INFO (lhs)
5929 && ! SSA_NAME_PTR_INFO (sprime))
5931 duplicate_ssa_name_ptr_info (sprime,
5932 SSA_NAME_PTR_INFO (lhs));
5933 if (b != sprime_b)
5934 reset_flow_sensitive_info (sprime);
5936 else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
5937 && SSA_NAME_RANGE_INFO (lhs)
5938 && ! SSA_NAME_RANGE_INFO (sprime)
5939 && b == sprime_b)
5940 duplicate_ssa_name_range_info (sprime,
5941 SSA_NAME_RANGE_TYPE (lhs),
5942 SSA_NAME_RANGE_INFO (lhs));
5945 /* Inhibit the use of an inserted PHI on a loop header when
5946 the address of the memory reference is a simple induction
5947 variable. In other cases the vectorizer won't do anything
5948 anyway (either it's loop invariant or a complicated
5949 expression). */
5950 if (sprime
5951 && TREE_CODE (sprime) == SSA_NAME
5952 && do_pre
5953 && (flag_tree_loop_vectorize || flag_tree_parallelize_loops > 1)
5954 && loop_outer (b->loop_father)
5955 && has_zero_uses (sprime)
5956 && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
5957 && gimple_assign_load_p (stmt))
5959 gimple *def_stmt = SSA_NAME_DEF_STMT (sprime);
5960 basic_block def_bb = gimple_bb (def_stmt);
5961 if (gimple_code (def_stmt) == GIMPLE_PHI
5962 && def_bb->loop_father->header == def_bb)
5964 loop_p loop = def_bb->loop_father;
5965 ssa_op_iter iter;
5966 tree op;
5967 bool found = false;
5968 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5970 affine_iv iv;
5971 def_bb = gimple_bb (SSA_NAME_DEF_STMT (op));
5972 if (def_bb
5973 && flow_bb_inside_loop_p (loop, def_bb)
5974 && simple_iv (loop, loop, op, &iv, true))
5976 found = true;
5977 break;
5980 if (found)
5982 if (dump_file && (dump_flags & TDF_DETAILS))
5984 fprintf (dump_file, "Not replacing ");
5985 print_gimple_expr (dump_file, stmt, 0);
5986 fprintf (dump_file, " with ");
5987 print_generic_expr (dump_file, sprime);
5988 fprintf (dump_file, " which would add a loop"
5989 " carried dependence to loop %d\n",
5990 loop->num);
5992 /* Don't keep sprime available. */
5993 sprime = NULL_TREE;
5998 if (sprime)
6000 /* If we can propagate the value computed for LHS into
6001 all uses don't bother doing anything with this stmt. */
6002 if (may_propagate_copy (lhs, sprime))
6004 /* Mark it for removal. */
6005 to_remove.safe_push (stmt);
6007 /* ??? Don't count copy/constant propagations. */
6008 if (gimple_assign_single_p (stmt)
6009 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
6010 || gimple_assign_rhs1 (stmt) == sprime))
6011 return;
6013 if (dump_file && (dump_flags & TDF_DETAILS))
6015 fprintf (dump_file, "Replaced ");
6016 print_gimple_expr (dump_file, stmt, 0);
6017 fprintf (dump_file, " with ");
6018 print_generic_expr (dump_file, sprime);
6019 fprintf (dump_file, " in all uses of ");
6020 print_gimple_stmt (dump_file, stmt, 0);
6023 eliminations++;
6024 return;
6027 /* If this is an assignment from our leader (which
6028 happens in the case the value-number is a constant)
6029 then there is nothing to do. Likewise if we run into
6030 inserted code that needed a conversion because of
6031 our type-agnostic value-numbering of loads. */
6032 if ((gimple_assign_single_p (stmt)
6033 || (is_gimple_assign (stmt)
6034 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
6035 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)))
6036 && sprime == gimple_assign_rhs1 (stmt))
6037 return;
6039 /* Else replace its RHS. */
6040 if (dump_file && (dump_flags & TDF_DETAILS))
6042 fprintf (dump_file, "Replaced ");
6043 print_gimple_expr (dump_file, stmt, 0);
6044 fprintf (dump_file, " with ");
6045 print_generic_expr (dump_file, sprime);
6046 fprintf (dump_file, " in ");
6047 print_gimple_stmt (dump_file, stmt, 0);
6049 eliminations++;
6051 bool can_make_abnormal_goto = (is_gimple_call (stmt)
6052 && stmt_can_make_abnormal_goto (stmt));
6053 gimple *orig_stmt = stmt;
6054 if (!useless_type_conversion_p (TREE_TYPE (lhs),
6055 TREE_TYPE (sprime)))
6057 /* We preserve conversions to but not from function or method
6058 types. This asymmetry makes it necessary to re-instantiate
6059 conversions here. */
6060 if (POINTER_TYPE_P (TREE_TYPE (lhs))
6061 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (lhs))))
6062 sprime = fold_convert (TREE_TYPE (lhs), sprime);
6063 else
6064 gcc_unreachable ();
6066 tree vdef = gimple_vdef (stmt);
6067 tree vuse = gimple_vuse (stmt);
6068 propagate_tree_value_into_stmt (gsi, sprime);
6069 stmt = gsi_stmt (*gsi);
6070 update_stmt (stmt);
6071 /* In case the VDEF on the original stmt was released, value-number
6072 it to the VUSE. This is to make vuse_ssa_val able to skip
6073 released virtual operands. */
6074 if (vdef != gimple_vdef (stmt))
6076 gcc_assert (SSA_NAME_IN_FREE_LIST (vdef));
6077 VN_INFO (vdef)->valnum = vuse;
6080 /* If we removed EH side-effects from the statement, clean
6081 its EH information. */
6082 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
6084 bitmap_set_bit (need_eh_cleanup,
6085 gimple_bb (stmt)->index);
6086 if (dump_file && (dump_flags & TDF_DETAILS))
6087 fprintf (dump_file, " Removed EH side-effects.\n");
6090 /* Likewise for AB side-effects. */
6091 if (can_make_abnormal_goto
6092 && !stmt_can_make_abnormal_goto (stmt))
6094 bitmap_set_bit (need_ab_cleanup,
6095 gimple_bb (stmt)->index);
6096 if (dump_file && (dump_flags & TDF_DETAILS))
6097 fprintf (dump_file, " Removed AB side-effects.\n");
6100 return;
6104 /* If the statement is a scalar store, see if the expression
6105 has the same value number as its rhs. If so, the store is
6106 dead. */
6107 if (gimple_assign_single_p (stmt)
6108 && !gimple_has_volatile_ops (stmt)
6109 && !is_gimple_reg (gimple_assign_lhs (stmt))
6110 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
6111 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
6113 tree rhs = gimple_assign_rhs1 (stmt);
6114 vn_reference_t vnresult;
6115 /* ??? gcc.dg/torture/pr91445.c shows that we lookup a boolean
6116 typed load of a byte known to be 0x11 as 1 so a store of
6117 a boolean 1 is detected as redundant. Because of this we
6118 have to make sure to lookup with a ref where its size
6119 matches the precision. */
6120 tree lookup_lhs = lhs;
6121 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
6122 && (TREE_CODE (lhs) != COMPONENT_REF
6123 || !DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs, 1)))
6124 && !type_has_mode_precision_p (TREE_TYPE (lhs)))
6126 if (TREE_CODE (lhs) == COMPONENT_REF
6127 || TREE_CODE (lhs) == MEM_REF)
6129 tree ltype = build_nonstandard_integer_type
6130 (TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (lhs))),
6131 TYPE_UNSIGNED (TREE_TYPE (lhs)));
6132 if (TREE_CODE (lhs) == COMPONENT_REF)
6134 tree foff = component_ref_field_offset (lhs);
6135 tree f = TREE_OPERAND (lhs, 1);
6136 if (!poly_int_tree_p (foff))
6137 lookup_lhs = NULL_TREE;
6138 else
6139 lookup_lhs = build3 (BIT_FIELD_REF, ltype,
6140 TREE_OPERAND (lhs, 0),
6141 TYPE_SIZE (TREE_TYPE (lhs)),
6142 bit_from_pos
6143 (foff, DECL_FIELD_BIT_OFFSET (f)));
6145 else
6146 lookup_lhs = build2 (MEM_REF, ltype,
6147 TREE_OPERAND (lhs, 0),
6148 TREE_OPERAND (lhs, 1));
6150 else
6151 lookup_lhs = NULL_TREE;
6153 tree val = NULL_TREE;
6154 if (lookup_lhs)
6155 val = vn_reference_lookup (lookup_lhs, gimple_vuse (stmt),
6156 VN_WALKREWRITE, &vnresult, false);
6157 if (TREE_CODE (rhs) == SSA_NAME)
6158 rhs = VN_INFO (rhs)->valnum;
6159 if (val
6160 && (operand_equal_p (val, rhs, 0)
6161 /* Due to the bitfield lookups above we can get bit
6162 interpretations of the same RHS as values here. Those
6163 are redundant as well. */
6164 || (TREE_CODE (val) == SSA_NAME
6165 && gimple_assign_single_p (SSA_NAME_DEF_STMT (val))
6166 && (val = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (val)))
6167 && TREE_CODE (val) == VIEW_CONVERT_EXPR
6168 && TREE_OPERAND (val, 0) == rhs)))
6170 /* We can only remove the later store if the former aliases
6171 at least all accesses the later one does or if the store
6172 was to readonly memory storing the same value. */
6173 ao_ref lhs_ref;
6174 ao_ref_init (&lhs_ref, lhs);
6175 alias_set_type set = ao_ref_alias_set (&lhs_ref);
6176 alias_set_type base_set = ao_ref_base_alias_set (&lhs_ref);
6177 if (! vnresult
6178 || ((vnresult->set == set
6179 || alias_set_subset_of (set, vnresult->set))
6180 && (vnresult->base_set == base_set
6181 || alias_set_subset_of (base_set, vnresult->base_set))))
6183 if (dump_file && (dump_flags & TDF_DETAILS))
6185 fprintf (dump_file, "Deleted redundant store ");
6186 print_gimple_stmt (dump_file, stmt, 0);
6189 /* Queue stmt for removal. */
6190 to_remove.safe_push (stmt);
6191 return;
6196 /* If this is a control statement value numbering left edges
6197 unexecuted on force the condition in a way consistent with
6198 that. */
6199 if (gcond *cond = dyn_cast <gcond *> (stmt))
6201 if ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE)
6202 ^ (EDGE_SUCC (b, 1)->flags & EDGE_EXECUTABLE))
6204 if (dump_file && (dump_flags & TDF_DETAILS))
6206 fprintf (dump_file, "Removing unexecutable edge from ");
6207 print_gimple_stmt (dump_file, stmt, 0);
6209 if (((EDGE_SUCC (b, 0)->flags & EDGE_TRUE_VALUE) != 0)
6210 == ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE) != 0))
6211 gimple_cond_make_true (cond);
6212 else
6213 gimple_cond_make_false (cond);
6214 update_stmt (cond);
6215 el_todo |= TODO_cleanup_cfg;
6216 return;
6220 bool can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
6221 bool was_noreturn = (is_gimple_call (stmt)
6222 && gimple_call_noreturn_p (stmt));
6223 tree vdef = gimple_vdef (stmt);
6224 tree vuse = gimple_vuse (stmt);
6226 /* If we didn't replace the whole stmt (or propagate the result
6227 into all uses), replace all uses on this stmt with their
6228 leaders. */
6229 bool modified = false;
6230 use_operand_p use_p;
6231 ssa_op_iter iter;
6232 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
6234 tree use = USE_FROM_PTR (use_p);
6235 /* ??? The call code above leaves stmt operands un-updated. */
6236 if (TREE_CODE (use) != SSA_NAME)
6237 continue;
6238 tree sprime;
6239 if (SSA_NAME_IS_DEFAULT_DEF (use))
6240 /* ??? For default defs BB shouldn't matter, but we have to
6241 solve the inconsistency between rpo eliminate and
6242 dom eliminate avail valueization first. */
6243 sprime = eliminate_avail (b, use);
6244 else
6245 /* Look for sth available at the definition block of the argument.
6246 This avoids inconsistencies between availability there which
6247 decides if the stmt can be removed and availability at the
6248 use site. The SSA property ensures that things available
6249 at the definition are also available at uses. */
6250 sprime = eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (use)), use);
6251 if (sprime && sprime != use
6252 && may_propagate_copy (use, sprime)
6253 /* We substitute into debug stmts to avoid excessive
6254 debug temporaries created by removed stmts, but we need
6255 to avoid doing so for inserted sprimes as we never want
6256 to create debug temporaries for them. */
6257 && (!inserted_exprs
6258 || TREE_CODE (sprime) != SSA_NAME
6259 || !is_gimple_debug (stmt)
6260 || !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))))
6262 propagate_value (use_p, sprime);
6263 modified = true;
6267 /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
6268 into which is a requirement for the IPA devirt machinery. */
6269 gimple *old_stmt = stmt;
6270 if (modified)
6272 /* If a formerly non-invariant ADDR_EXPR is turned into an
6273 invariant one it was on a separate stmt. */
6274 if (gimple_assign_single_p (stmt)
6275 && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
6276 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
6277 gimple_stmt_iterator prev = *gsi;
6278 gsi_prev (&prev);
6279 if (fold_stmt (gsi))
6281 /* fold_stmt may have created new stmts inbetween
6282 the previous stmt and the folded stmt. Mark
6283 all defs created there as varying to not confuse
6284 the SCCVN machinery as we're using that even during
6285 elimination. */
6286 if (gsi_end_p (prev))
6287 prev = gsi_start_bb (b);
6288 else
6289 gsi_next (&prev);
6290 if (gsi_stmt (prev) != gsi_stmt (*gsi))
6293 tree def;
6294 ssa_op_iter dit;
6295 FOR_EACH_SSA_TREE_OPERAND (def, gsi_stmt (prev),
6296 dit, SSA_OP_ALL_DEFS)
6297 /* As existing DEFs may move between stmts
6298 only process new ones. */
6299 if (! has_VN_INFO (def))
6301 vn_ssa_aux_t vn_info = VN_INFO (def);
6302 vn_info->valnum = def;
6303 vn_info->visited = true;
6305 if (gsi_stmt (prev) == gsi_stmt (*gsi))
6306 break;
6307 gsi_next (&prev);
6309 while (1);
6311 stmt = gsi_stmt (*gsi);
6312 /* In case we folded the stmt away schedule the NOP for removal. */
6313 if (gimple_nop_p (stmt))
6314 to_remove.safe_push (stmt);
6317 /* Visit indirect calls and turn them into direct calls if
6318 possible using the devirtualization machinery. Do this before
6319 checking for required EH/abnormal/noreturn cleanup as devird
6320 may expose more of those. */
6321 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
6323 tree fn = gimple_call_fn (call_stmt);
6324 if (fn
6325 && flag_devirtualize
6326 && virtual_method_call_p (fn))
6328 tree otr_type = obj_type_ref_class (fn);
6329 unsigned HOST_WIDE_INT otr_tok
6330 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn));
6331 tree instance;
6332 ipa_polymorphic_call_context context (current_function_decl,
6333 fn, stmt, &instance);
6334 context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn),
6335 otr_type, stmt, NULL);
6336 bool final;
6337 vec <cgraph_node *> targets
6338 = possible_polymorphic_call_targets (obj_type_ref_class (fn),
6339 otr_tok, context, &final);
6340 if (dump_file)
6341 dump_possible_polymorphic_call_targets (dump_file,
6342 obj_type_ref_class (fn),
6343 otr_tok, context);
6344 if (final && targets.length () <= 1 && dbg_cnt (devirt))
6346 tree fn;
6347 if (targets.length () == 1)
6348 fn = targets[0]->decl;
6349 else
6350 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
6351 if (dump_enabled_p ())
6353 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
6354 "converting indirect call to "
6355 "function %s\n",
6356 lang_hooks.decl_printable_name (fn, 2));
6358 gimple_call_set_fndecl (call_stmt, fn);
6359 /* If changing the call to __builtin_unreachable
6360 or similar noreturn function, adjust gimple_call_fntype
6361 too. */
6362 if (gimple_call_noreturn_p (call_stmt)
6363 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
6364 && TYPE_ARG_TYPES (TREE_TYPE (fn))
6365 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn)))
6366 == void_type_node))
6367 gimple_call_set_fntype (call_stmt, TREE_TYPE (fn));
6368 maybe_remove_unused_call_args (cfun, call_stmt);
6369 modified = true;
6374 if (modified)
6376 /* When changing a call into a noreturn call, cfg cleanup
6377 is needed to fix up the noreturn call. */
6378 if (!was_noreturn
6379 && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
6380 to_fixup.safe_push (stmt);
6381 /* When changing a condition or switch into one we know what
6382 edge will be executed, schedule a cfg cleanup. */
6383 if ((gimple_code (stmt) == GIMPLE_COND
6384 && (gimple_cond_true_p (as_a <gcond *> (stmt))
6385 || gimple_cond_false_p (as_a <gcond *> (stmt))))
6386 || (gimple_code (stmt) == GIMPLE_SWITCH
6387 && TREE_CODE (gimple_switch_index
6388 (as_a <gswitch *> (stmt))) == INTEGER_CST))
6389 el_todo |= TODO_cleanup_cfg;
6390 /* If we removed EH side-effects from the statement, clean
6391 its EH information. */
6392 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
6394 bitmap_set_bit (need_eh_cleanup,
6395 gimple_bb (stmt)->index);
6396 if (dump_file && (dump_flags & TDF_DETAILS))
6397 fprintf (dump_file, " Removed EH side-effects.\n");
6399 /* Likewise for AB side-effects. */
6400 if (can_make_abnormal_goto
6401 && !stmt_can_make_abnormal_goto (stmt))
6403 bitmap_set_bit (need_ab_cleanup,
6404 gimple_bb (stmt)->index);
6405 if (dump_file && (dump_flags & TDF_DETAILS))
6406 fprintf (dump_file, " Removed AB side-effects.\n");
6408 update_stmt (stmt);
6409 /* In case the VDEF on the original stmt was released, value-number
6410 it to the VUSE. This is to make vuse_ssa_val able to skip
6411 released virtual operands. */
6412 if (vdef && SSA_NAME_IN_FREE_LIST (vdef))
6413 VN_INFO (vdef)->valnum = vuse;
6416 /* Make new values available - for fully redundant LHS we
6417 continue with the next stmt above and skip this. */
6418 def_operand_p defp;
6419 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
6420 eliminate_push_avail (b, DEF_FROM_PTR (defp));
6423 /* Perform elimination for the basic-block B during the domwalk. */
6425 edge
6426 eliminate_dom_walker::before_dom_children (basic_block b)
6428 /* Mark new bb. */
6429 avail_stack.safe_push (NULL_TREE);
6431 /* Skip unreachable blocks marked unreachable during the SCCVN domwalk. */
6432 if (!(b->flags & BB_EXECUTABLE))
6433 return NULL;
6435 vn_context_bb = b;
6437 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
6439 gphi *phi = gsi.phi ();
6440 tree res = PHI_RESULT (phi);
6442 if (virtual_operand_p (res))
6444 gsi_next (&gsi);
6445 continue;
6448 tree sprime = eliminate_avail (b, res);
6449 if (sprime
6450 && sprime != res)
6452 if (dump_file && (dump_flags & TDF_DETAILS))
6454 fprintf (dump_file, "Replaced redundant PHI node defining ");
6455 print_generic_expr (dump_file, res);
6456 fprintf (dump_file, " with ");
6457 print_generic_expr (dump_file, sprime);
6458 fprintf (dump_file, "\n");
6461 /* If we inserted this PHI node ourself, it's not an elimination. */
6462 if (! inserted_exprs
6463 || ! bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
6464 eliminations++;
6466 /* If we will propagate into all uses don't bother to do
6467 anything. */
6468 if (may_propagate_copy (res, sprime))
6470 /* Mark the PHI for removal. */
6471 to_remove.safe_push (phi);
6472 gsi_next (&gsi);
6473 continue;
6476 remove_phi_node (&gsi, false);
6478 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
6479 sprime = fold_convert (TREE_TYPE (res), sprime);
6480 gimple *stmt = gimple_build_assign (res, sprime);
6481 gimple_stmt_iterator gsi2 = gsi_after_labels (b);
6482 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
6483 continue;
6486 eliminate_push_avail (b, res);
6487 gsi_next (&gsi);
6490 for (gimple_stmt_iterator gsi = gsi_start_bb (b);
6491 !gsi_end_p (gsi);
6492 gsi_next (&gsi))
6493 eliminate_stmt (b, &gsi);
6495 /* Replace destination PHI arguments. */
6496 edge_iterator ei;
6497 edge e;
6498 FOR_EACH_EDGE (e, ei, b->succs)
6499 if (e->flags & EDGE_EXECUTABLE)
6500 for (gphi_iterator gsi = gsi_start_phis (e->dest);
6501 !gsi_end_p (gsi);
6502 gsi_next (&gsi))
6504 gphi *phi = gsi.phi ();
6505 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
6506 tree arg = USE_FROM_PTR (use_p);
6507 if (TREE_CODE (arg) != SSA_NAME
6508 || virtual_operand_p (arg))
6509 continue;
6510 tree sprime = eliminate_avail (b, arg);
6511 if (sprime && may_propagate_copy (arg, sprime))
6512 propagate_value (use_p, sprime);
6515 vn_context_bb = NULL;
6517 return NULL;
6520 /* Make no longer available leaders no longer available. */
6522 void
6523 eliminate_dom_walker::after_dom_children (basic_block)
6525 tree entry;
6526 while ((entry = avail_stack.pop ()) != NULL_TREE)
6528 tree valnum = VN_INFO (entry)->valnum;
6529 tree old = avail[SSA_NAME_VERSION (valnum)];
6530 if (old == entry)
6531 avail[SSA_NAME_VERSION (valnum)] = NULL_TREE;
6532 else
6533 avail[SSA_NAME_VERSION (valnum)] = entry;
6537 /* Remove queued stmts and perform delayed cleanups. */
6539 unsigned
6540 eliminate_dom_walker::eliminate_cleanup (bool region_p)
6542 statistics_counter_event (cfun, "Eliminated", eliminations);
6543 statistics_counter_event (cfun, "Insertions", insertions);
6545 /* We cannot remove stmts during BB walk, especially not release SSA
6546 names there as this confuses the VN machinery. The stmts ending
6547 up in to_remove are either stores or simple copies.
6548 Remove stmts in reverse order to make debug stmt creation possible. */
6549 while (!to_remove.is_empty ())
6551 bool do_release_defs = true;
6552 gimple *stmt = to_remove.pop ();
6554 /* When we are value-numbering a region we do not require exit PHIs to
6555 be present so we have to make sure to deal with uses outside of the
6556 region of stmts that we thought are eliminated.
6557 ??? Note we may be confused by uses in dead regions we didn't run
6558 elimination on. Rather than checking individual uses we accept
6559 dead copies to be generated here (gcc.c-torture/execute/20060905-1.c
6560 contains such example). */
6561 if (region_p)
6563 if (gphi *phi = dyn_cast <gphi *> (stmt))
6565 tree lhs = gimple_phi_result (phi);
6566 if (!has_zero_uses (lhs))
6568 if (dump_file && (dump_flags & TDF_DETAILS))
6569 fprintf (dump_file, "Keeping eliminated stmt live "
6570 "as copy because of out-of-region uses\n");
6571 tree sprime = eliminate_avail (gimple_bb (stmt), lhs);
6572 gimple *copy = gimple_build_assign (lhs, sprime);
6573 gimple_stmt_iterator gsi
6574 = gsi_after_labels (gimple_bb (stmt));
6575 gsi_insert_before (&gsi, copy, GSI_SAME_STMT);
6576 do_release_defs = false;
6579 else if (tree lhs = gimple_get_lhs (stmt))
6580 if (TREE_CODE (lhs) == SSA_NAME
6581 && !has_zero_uses (lhs))
6583 if (dump_file && (dump_flags & TDF_DETAILS))
6584 fprintf (dump_file, "Keeping eliminated stmt live "
6585 "as copy because of out-of-region uses\n");
6586 tree sprime = eliminate_avail (gimple_bb (stmt), lhs);
6587 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
6588 if (is_gimple_assign (stmt))
6590 gimple_assign_set_rhs_from_tree (&gsi, sprime);
6591 stmt = gsi_stmt (gsi);
6592 update_stmt (stmt);
6593 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
6594 bitmap_set_bit (need_eh_cleanup, gimple_bb (stmt)->index);
6595 continue;
6597 else
6599 gimple *copy = gimple_build_assign (lhs, sprime);
6600 gsi_insert_before (&gsi, copy, GSI_SAME_STMT);
6601 do_release_defs = false;
6606 if (dump_file && (dump_flags & TDF_DETAILS))
6608 fprintf (dump_file, "Removing dead stmt ");
6609 print_gimple_stmt (dump_file, stmt, 0, TDF_NONE);
6612 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
6613 if (gimple_code (stmt) == GIMPLE_PHI)
6614 remove_phi_node (&gsi, do_release_defs);
6615 else
6617 basic_block bb = gimple_bb (stmt);
6618 unlink_stmt_vdef (stmt);
6619 if (gsi_remove (&gsi, true))
6620 bitmap_set_bit (need_eh_cleanup, bb->index);
6621 if (is_gimple_call (stmt) && stmt_can_make_abnormal_goto (stmt))
6622 bitmap_set_bit (need_ab_cleanup, bb->index);
6623 if (do_release_defs)
6624 release_defs (stmt);
6627 /* Removing a stmt may expose a forwarder block. */
6628 el_todo |= TODO_cleanup_cfg;
6631 /* Fixup stmts that became noreturn calls. This may require splitting
6632 blocks and thus isn't possible during the dominator walk. Do this
6633 in reverse order so we don't inadvertedly remove a stmt we want to
6634 fixup by visiting a dominating now noreturn call first. */
6635 while (!to_fixup.is_empty ())
6637 gimple *stmt = to_fixup.pop ();
6639 if (dump_file && (dump_flags & TDF_DETAILS))
6641 fprintf (dump_file, "Fixing up noreturn call ");
6642 print_gimple_stmt (dump_file, stmt, 0);
6645 if (fixup_noreturn_call (stmt))
6646 el_todo |= TODO_cleanup_cfg;
6649 bool do_eh_cleanup = !bitmap_empty_p (need_eh_cleanup);
6650 bool do_ab_cleanup = !bitmap_empty_p (need_ab_cleanup);
6652 if (do_eh_cleanup)
6653 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
6655 if (do_ab_cleanup)
6656 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
6658 if (do_eh_cleanup || do_ab_cleanup)
6659 el_todo |= TODO_cleanup_cfg;
6661 return el_todo;
6664 /* Eliminate fully redundant computations. */
6666 unsigned
6667 eliminate_with_rpo_vn (bitmap inserted_exprs)
6669 eliminate_dom_walker walker (CDI_DOMINATORS, inserted_exprs);
6671 eliminate_dom_walker *saved_rpo_avail = rpo_avail;
6672 rpo_avail = &walker;
6673 walker.walk (cfun->cfg->x_entry_block_ptr);
6674 rpo_avail = saved_rpo_avail;
6676 return walker.eliminate_cleanup ();
6679 static unsigned
6680 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs,
6681 bool iterate, bool eliminate);
6683 void
6684 run_rpo_vn (vn_lookup_kind kind)
6686 default_vn_walk_kind = kind;
6687 do_rpo_vn (cfun, NULL, NULL, true, false);
6689 /* ??? Prune requirement of these. */
6690 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
6692 /* Initialize the value ids and prune out remaining VN_TOPs
6693 from dead code. */
6694 tree name;
6695 unsigned i;
6696 FOR_EACH_SSA_NAME (i, name, cfun)
6698 vn_ssa_aux_t info = VN_INFO (name);
6699 if (!info->visited
6700 || info->valnum == VN_TOP)
6701 info->valnum = name;
6702 if (info->valnum == name)
6703 info->value_id = get_next_value_id ();
6704 else if (is_gimple_min_invariant (info->valnum))
6705 info->value_id = get_or_alloc_constant_value_id (info->valnum);
6708 /* Propagate. */
6709 FOR_EACH_SSA_NAME (i, name, cfun)
6711 vn_ssa_aux_t info = VN_INFO (name);
6712 if (TREE_CODE (info->valnum) == SSA_NAME
6713 && info->valnum != name
6714 && info->value_id != VN_INFO (info->valnum)->value_id)
6715 info->value_id = VN_INFO (info->valnum)->value_id;
6718 set_hashtable_value_ids ();
6720 if (dump_file && (dump_flags & TDF_DETAILS))
6722 fprintf (dump_file, "Value numbers:\n");
6723 FOR_EACH_SSA_NAME (i, name, cfun)
6725 if (VN_INFO (name)->visited
6726 && SSA_VAL (name) != name)
6728 print_generic_expr (dump_file, name);
6729 fprintf (dump_file, " = ");
6730 print_generic_expr (dump_file, SSA_VAL (name));
6731 fprintf (dump_file, " (%04d)\n", VN_INFO (name)->value_id);
6737 /* Free VN associated data structures. */
6739 void
6740 free_rpo_vn (void)
6742 free_vn_table (valid_info);
6743 XDELETE (valid_info);
6744 obstack_free (&vn_tables_obstack, NULL);
6745 obstack_free (&vn_tables_insert_obstack, NULL);
6747 vn_ssa_aux_iterator_type it;
6748 vn_ssa_aux_t info;
6749 FOR_EACH_HASH_TABLE_ELEMENT (*vn_ssa_aux_hash, info, vn_ssa_aux_t, it)
6750 if (info->needs_insertion)
6751 release_ssa_name (info->name);
6752 obstack_free (&vn_ssa_aux_obstack, NULL);
6753 delete vn_ssa_aux_hash;
6755 delete constant_to_value_id;
6756 constant_to_value_id = NULL;
6759 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
6761 static tree
6762 vn_lookup_simplify_result (gimple_match_op *res_op)
6764 if (!res_op->code.is_tree_code ())
6765 return NULL_TREE;
6766 tree *ops = res_op->ops;
6767 unsigned int length = res_op->num_ops;
6768 if (res_op->code == CONSTRUCTOR
6769 /* ??? We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
6770 and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree. */
6771 && TREE_CODE (res_op->ops[0]) == CONSTRUCTOR)
6773 length = CONSTRUCTOR_NELTS (res_op->ops[0]);
6774 ops = XALLOCAVEC (tree, length);
6775 for (unsigned i = 0; i < length; ++i)
6776 ops[i] = CONSTRUCTOR_ELT (res_op->ops[0], i)->value;
6778 vn_nary_op_t vnresult = NULL;
6779 tree res = vn_nary_op_lookup_pieces (length, (tree_code) res_op->code,
6780 res_op->type, ops, &vnresult);
6781 /* If this is used from expression simplification make sure to
6782 return an available expression. */
6783 if (res && TREE_CODE (res) == SSA_NAME && mprts_hook && rpo_avail)
6784 res = rpo_avail->eliminate_avail (vn_context_bb, res);
6785 return res;
6788 /* Return a leader for OPs value that is valid at BB. */
6790 tree
6791 rpo_elim::eliminate_avail (basic_block bb, tree op)
6793 bool visited;
6794 tree valnum = SSA_VAL (op, &visited);
6795 /* If we didn't visit OP then it must be defined outside of the
6796 region we process and also dominate it. So it is available. */
6797 if (!visited)
6798 return op;
6799 if (TREE_CODE (valnum) == SSA_NAME)
6801 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
6802 return valnum;
6803 vn_avail *av = VN_INFO (valnum)->avail;
6804 if (!av)
6805 return NULL_TREE;
6806 if (av->location == bb->index)
6807 /* On tramp3d 90% of the cases are here. */
6808 return ssa_name (av->leader);
6811 basic_block abb = BASIC_BLOCK_FOR_FN (cfun, av->location);
6812 /* ??? During elimination we have to use availability at the
6813 definition site of a use we try to replace. This
6814 is required to not run into inconsistencies because
6815 of dominated_by_p_w_unex behavior and removing a definition
6816 while not replacing all uses.
6817 ??? We could try to consistently walk dominators
6818 ignoring non-executable regions. The nearest common
6819 dominator of bb and abb is where we can stop walking. We
6820 may also be able to "pre-compute" (bits of) the next immediate
6821 (non-)dominator during the RPO walk when marking edges as
6822 executable. */
6823 if (dominated_by_p_w_unex (bb, abb))
6825 tree leader = ssa_name (av->leader);
6826 /* Prevent eliminations that break loop-closed SSA. */
6827 if (loops_state_satisfies_p (LOOP_CLOSED_SSA)
6828 && ! SSA_NAME_IS_DEFAULT_DEF (leader)
6829 && ! flow_bb_inside_loop_p (gimple_bb (SSA_NAME_DEF_STMT
6830 (leader))->loop_father,
6831 bb))
6832 return NULL_TREE;
6833 if (dump_file && (dump_flags & TDF_DETAILS))
6835 print_generic_expr (dump_file, leader);
6836 fprintf (dump_file, " is available for ");
6837 print_generic_expr (dump_file, valnum);
6838 fprintf (dump_file, "\n");
6840 /* On tramp3d 99% of the _remaining_ cases succeed at
6841 the first enty. */
6842 return leader;
6844 /* ??? Can we somehow skip to the immediate dominator
6845 RPO index (bb_to_rpo)? Again, maybe not worth, on
6846 tramp3d the worst number of elements in the vector is 9. */
6847 av = av->next;
6849 while (av);
6851 else if (valnum != VN_TOP)
6852 /* valnum is is_gimple_min_invariant. */
6853 return valnum;
6854 return NULL_TREE;
6857 /* Make LEADER a leader for its value at BB. */
6859 void
6860 rpo_elim::eliminate_push_avail (basic_block bb, tree leader)
6862 tree valnum = VN_INFO (leader)->valnum;
6863 if (valnum == VN_TOP
6864 || is_gimple_min_invariant (valnum))
6865 return;
6866 if (dump_file && (dump_flags & TDF_DETAILS))
6868 fprintf (dump_file, "Making available beyond BB%d ", bb->index);
6869 print_generic_expr (dump_file, leader);
6870 fprintf (dump_file, " for value ");
6871 print_generic_expr (dump_file, valnum);
6872 fprintf (dump_file, "\n");
6874 vn_ssa_aux_t value = VN_INFO (valnum);
6875 vn_avail *av;
6876 if (m_avail_freelist)
6878 av = m_avail_freelist;
6879 m_avail_freelist = m_avail_freelist->next;
6881 else
6882 av = XOBNEW (&vn_ssa_aux_obstack, vn_avail);
6883 av->location = bb->index;
6884 av->leader = SSA_NAME_VERSION (leader);
6885 av->next = value->avail;
6886 value->avail = av;
6889 /* Valueization hook for RPO VN plus required state. */
6891 tree
6892 rpo_vn_valueize (tree name)
6894 if (TREE_CODE (name) == SSA_NAME)
6896 vn_ssa_aux_t val = VN_INFO (name);
6897 if (val)
6899 tree tem = val->valnum;
6900 if (tem != VN_TOP && tem != name)
6902 if (TREE_CODE (tem) != SSA_NAME)
6903 return tem;
6904 /* For all values we only valueize to an available leader
6905 which means we can use SSA name info without restriction. */
6906 tem = rpo_avail->eliminate_avail (vn_context_bb, tem);
6907 if (tem)
6908 return tem;
6912 return name;
6915 /* Insert on PRED_E predicates derived from CODE OPS being true besides the
6916 inverted condition. */
6918 static void
6919 insert_related_predicates_on_edge (enum tree_code code, tree *ops, edge pred_e)
6921 switch (code)
6923 case LT_EXPR:
6924 /* a < b -> a {!,<}= b */
6925 vn_nary_op_insert_pieces_predicated (2, NE_EXPR, boolean_type_node,
6926 ops, boolean_true_node, 0, pred_e);
6927 vn_nary_op_insert_pieces_predicated (2, LE_EXPR, boolean_type_node,
6928 ops, boolean_true_node, 0, pred_e);
6929 /* a < b -> ! a {>,=} b */
6930 vn_nary_op_insert_pieces_predicated (2, GT_EXPR, boolean_type_node,
6931 ops, boolean_false_node, 0, pred_e);
6932 vn_nary_op_insert_pieces_predicated (2, EQ_EXPR, boolean_type_node,
6933 ops, boolean_false_node, 0, pred_e);
6934 break;
6935 case GT_EXPR:
6936 /* a > b -> a {!,>}= b */
6937 vn_nary_op_insert_pieces_predicated (2, NE_EXPR, boolean_type_node,
6938 ops, boolean_true_node, 0, pred_e);
6939 vn_nary_op_insert_pieces_predicated (2, GE_EXPR, boolean_type_node,
6940 ops, boolean_true_node, 0, pred_e);
6941 /* a > b -> ! a {<,=} b */
6942 vn_nary_op_insert_pieces_predicated (2, LT_EXPR, boolean_type_node,
6943 ops, boolean_false_node, 0, pred_e);
6944 vn_nary_op_insert_pieces_predicated (2, EQ_EXPR, boolean_type_node,
6945 ops, boolean_false_node, 0, pred_e);
6946 break;
6947 case EQ_EXPR:
6948 /* a == b -> ! a {<,>} b */
6949 vn_nary_op_insert_pieces_predicated (2, LT_EXPR, boolean_type_node,
6950 ops, boolean_false_node, 0, pred_e);
6951 vn_nary_op_insert_pieces_predicated (2, GT_EXPR, boolean_type_node,
6952 ops, boolean_false_node, 0, pred_e);
6953 break;
6954 case LE_EXPR:
6955 case GE_EXPR:
6956 case NE_EXPR:
6957 /* Nothing besides inverted condition. */
6958 break;
6959 default:;
6963 /* Main stmt worker for RPO VN, process BB. */
6965 static unsigned
6966 process_bb (rpo_elim &avail, basic_block bb,
6967 bool bb_visited, bool iterate_phis, bool iterate, bool eliminate,
6968 bool do_region, bitmap exit_bbs, bool skip_phis)
6970 unsigned todo = 0;
6971 edge_iterator ei;
6972 edge e;
6974 vn_context_bb = bb;
6976 /* If we are in loop-closed SSA preserve this state. This is
6977 relevant when called on regions from outside of FRE/PRE. */
6978 bool lc_phi_nodes = false;
6979 if (!skip_phis
6980 && loops_state_satisfies_p (LOOP_CLOSED_SSA))
6981 FOR_EACH_EDGE (e, ei, bb->preds)
6982 if (e->src->loop_father != e->dest->loop_father
6983 && flow_loop_nested_p (e->dest->loop_father,
6984 e->src->loop_father))
6986 lc_phi_nodes = true;
6987 break;
6990 /* When we visit a loop header substitute into loop info. */
6991 if (!iterate && eliminate && bb->loop_father->header == bb)
6993 /* Keep fields in sync with substitute_in_loop_info. */
6994 if (bb->loop_father->nb_iterations)
6995 bb->loop_father->nb_iterations
6996 = simplify_replace_tree (bb->loop_father->nb_iterations,
6997 NULL_TREE, NULL_TREE, &vn_valueize_wrapper);
7000 /* Value-number all defs in the basic-block. */
7001 if (!skip_phis)
7002 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
7003 gsi_next (&gsi))
7005 gphi *phi = gsi.phi ();
7006 tree res = PHI_RESULT (phi);
7007 vn_ssa_aux_t res_info = VN_INFO (res);
7008 if (!bb_visited)
7010 gcc_assert (!res_info->visited);
7011 res_info->valnum = VN_TOP;
7012 res_info->visited = true;
7015 /* When not iterating force backedge values to varying. */
7016 visit_stmt (phi, !iterate_phis);
7017 if (virtual_operand_p (res))
7018 continue;
7020 /* Eliminate */
7021 /* The interesting case is gcc.dg/tree-ssa/pr22230.c for correctness
7022 how we handle backedges and availability.
7023 And gcc.dg/tree-ssa/ssa-sccvn-2.c for optimization. */
7024 tree val = res_info->valnum;
7025 if (res != val && !iterate && eliminate)
7027 if (tree leader = avail.eliminate_avail (bb, res))
7029 if (leader != res
7030 /* Preserve loop-closed SSA form. */
7031 && (! lc_phi_nodes
7032 || is_gimple_min_invariant (leader)))
7034 if (dump_file && (dump_flags & TDF_DETAILS))
7036 fprintf (dump_file, "Replaced redundant PHI node "
7037 "defining ");
7038 print_generic_expr (dump_file, res);
7039 fprintf (dump_file, " with ");
7040 print_generic_expr (dump_file, leader);
7041 fprintf (dump_file, "\n");
7043 avail.eliminations++;
7045 if (may_propagate_copy (res, leader))
7047 /* Schedule for removal. */
7048 avail.to_remove.safe_push (phi);
7049 continue;
7051 /* ??? Else generate a copy stmt. */
7055 /* Only make defs available that not already are. But make
7056 sure loop-closed SSA PHI node defs are picked up for
7057 downstream uses. */
7058 if (lc_phi_nodes
7059 || res == val
7060 || ! avail.eliminate_avail (bb, res))
7061 avail.eliminate_push_avail (bb, res);
7064 /* For empty BBs mark outgoing edges executable. For non-empty BBs
7065 we do this when processing the last stmt as we have to do this
7066 before elimination which otherwise forces GIMPLE_CONDs to
7067 if (1 != 0) style when seeing non-executable edges. */
7068 if (gsi_end_p (gsi_start_bb (bb)))
7070 FOR_EACH_EDGE (e, ei, bb->succs)
7072 if (!(e->flags & EDGE_EXECUTABLE))
7074 if (dump_file && (dump_flags & TDF_DETAILS))
7075 fprintf (dump_file,
7076 "marking outgoing edge %d -> %d executable\n",
7077 e->src->index, e->dest->index);
7078 e->flags |= EDGE_EXECUTABLE;
7079 e->dest->flags |= BB_EXECUTABLE;
7081 else if (!(e->dest->flags & BB_EXECUTABLE))
7083 if (dump_file && (dump_flags & TDF_DETAILS))
7084 fprintf (dump_file,
7085 "marking destination block %d reachable\n",
7086 e->dest->index);
7087 e->dest->flags |= BB_EXECUTABLE;
7091 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
7092 !gsi_end_p (gsi); gsi_next (&gsi))
7094 ssa_op_iter i;
7095 tree op;
7096 if (!bb_visited)
7098 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
7100 vn_ssa_aux_t op_info = VN_INFO (op);
7101 gcc_assert (!op_info->visited);
7102 op_info->valnum = VN_TOP;
7103 op_info->visited = true;
7106 /* We somehow have to deal with uses that are not defined
7107 in the processed region. Forcing unvisited uses to
7108 varying here doesn't play well with def-use following during
7109 expression simplification, so we deal with this by checking
7110 the visited flag in SSA_VAL. */
7113 visit_stmt (gsi_stmt (gsi));
7115 gimple *last = gsi_stmt (gsi);
7116 e = NULL;
7117 switch (gimple_code (last))
7119 case GIMPLE_SWITCH:
7120 e = find_taken_edge (bb, vn_valueize (gimple_switch_index
7121 (as_a <gswitch *> (last))));
7122 break;
7123 case GIMPLE_COND:
7125 tree lhs = vn_valueize (gimple_cond_lhs (last));
7126 tree rhs = vn_valueize (gimple_cond_rhs (last));
7127 tree val = gimple_simplify (gimple_cond_code (last),
7128 boolean_type_node, lhs, rhs,
7129 NULL, vn_valueize);
7130 /* If the condition didn't simplfy see if we have recorded
7131 an expression from sofar taken edges. */
7132 if (! val || TREE_CODE (val) != INTEGER_CST)
7134 vn_nary_op_t vnresult;
7135 tree ops[2];
7136 ops[0] = lhs;
7137 ops[1] = rhs;
7138 val = vn_nary_op_lookup_pieces (2, gimple_cond_code (last),
7139 boolean_type_node, ops,
7140 &vnresult);
7141 /* Did we get a predicated value? */
7142 if (! val && vnresult && vnresult->predicated_values)
7144 val = vn_nary_op_get_predicated_value (vnresult, bb);
7145 if (val && dump_file && (dump_flags & TDF_DETAILS))
7147 fprintf (dump_file, "Got predicated value ");
7148 print_generic_expr (dump_file, val, TDF_NONE);
7149 fprintf (dump_file, " for ");
7150 print_gimple_stmt (dump_file, last, TDF_SLIM);
7154 if (val)
7155 e = find_taken_edge (bb, val);
7156 if (! e)
7158 /* If we didn't manage to compute the taken edge then
7159 push predicated expressions for the condition itself
7160 and related conditions to the hashtables. This allows
7161 simplification of redundant conditions which is
7162 important as early cleanup. */
7163 edge true_e, false_e;
7164 extract_true_false_edges_from_block (bb, &true_e, &false_e);
7165 enum tree_code code = gimple_cond_code (last);
7166 enum tree_code icode
7167 = invert_tree_comparison (code, HONOR_NANS (lhs));
7168 tree ops[2];
7169 ops[0] = lhs;
7170 ops[1] = rhs;
7171 if (do_region
7172 && bitmap_bit_p (exit_bbs, true_e->dest->index))
7173 true_e = NULL;
7174 if (do_region
7175 && bitmap_bit_p (exit_bbs, false_e->dest->index))
7176 false_e = NULL;
7177 if (true_e)
7178 vn_nary_op_insert_pieces_predicated
7179 (2, code, boolean_type_node, ops,
7180 boolean_true_node, 0, true_e);
7181 if (false_e)
7182 vn_nary_op_insert_pieces_predicated
7183 (2, code, boolean_type_node, ops,
7184 boolean_false_node, 0, false_e);
7185 if (icode != ERROR_MARK)
7187 if (true_e)
7188 vn_nary_op_insert_pieces_predicated
7189 (2, icode, boolean_type_node, ops,
7190 boolean_false_node, 0, true_e);
7191 if (false_e)
7192 vn_nary_op_insert_pieces_predicated
7193 (2, icode, boolean_type_node, ops,
7194 boolean_true_node, 0, false_e);
7196 /* Relax for non-integers, inverted condition handled
7197 above. */
7198 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
7200 if (true_e)
7201 insert_related_predicates_on_edge (code, ops, true_e);
7202 if (false_e)
7203 insert_related_predicates_on_edge (icode, ops, false_e);
7206 break;
7208 case GIMPLE_GOTO:
7209 e = find_taken_edge (bb, vn_valueize (gimple_goto_dest (last)));
7210 break;
7211 default:
7212 e = NULL;
7214 if (e)
7216 todo = TODO_cleanup_cfg;
7217 if (!(e->flags & EDGE_EXECUTABLE))
7219 if (dump_file && (dump_flags & TDF_DETAILS))
7220 fprintf (dump_file,
7221 "marking known outgoing %sedge %d -> %d executable\n",
7222 e->flags & EDGE_DFS_BACK ? "back-" : "",
7223 e->src->index, e->dest->index);
7224 e->flags |= EDGE_EXECUTABLE;
7225 e->dest->flags |= BB_EXECUTABLE;
7227 else if (!(e->dest->flags & BB_EXECUTABLE))
7229 if (dump_file && (dump_flags & TDF_DETAILS))
7230 fprintf (dump_file,
7231 "marking destination block %d reachable\n",
7232 e->dest->index);
7233 e->dest->flags |= BB_EXECUTABLE;
7236 else if (gsi_one_before_end_p (gsi))
7238 FOR_EACH_EDGE (e, ei, bb->succs)
7240 if (!(e->flags & EDGE_EXECUTABLE))
7242 if (dump_file && (dump_flags & TDF_DETAILS))
7243 fprintf (dump_file,
7244 "marking outgoing edge %d -> %d executable\n",
7245 e->src->index, e->dest->index);
7246 e->flags |= EDGE_EXECUTABLE;
7247 e->dest->flags |= BB_EXECUTABLE;
7249 else if (!(e->dest->flags & BB_EXECUTABLE))
7251 if (dump_file && (dump_flags & TDF_DETAILS))
7252 fprintf (dump_file,
7253 "marking destination block %d reachable\n",
7254 e->dest->index);
7255 e->dest->flags |= BB_EXECUTABLE;
7260 /* Eliminate. That also pushes to avail. */
7261 if (eliminate && ! iterate)
7262 avail.eliminate_stmt (bb, &gsi);
7263 else
7264 /* If not eliminating, make all not already available defs
7265 available. */
7266 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_DEF)
7267 if (! avail.eliminate_avail (bb, op))
7268 avail.eliminate_push_avail (bb, op);
7271 /* Eliminate in destination PHI arguments. Always substitute in dest
7272 PHIs, even for non-executable edges. This handles region
7273 exits PHIs. */
7274 if (!iterate && eliminate)
7275 FOR_EACH_EDGE (e, ei, bb->succs)
7276 for (gphi_iterator gsi = gsi_start_phis (e->dest);
7277 !gsi_end_p (gsi); gsi_next (&gsi))
7279 gphi *phi = gsi.phi ();
7280 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
7281 tree arg = USE_FROM_PTR (use_p);
7282 if (TREE_CODE (arg) != SSA_NAME
7283 || virtual_operand_p (arg))
7284 continue;
7285 tree sprime;
7286 if (SSA_NAME_IS_DEFAULT_DEF (arg))
7288 sprime = SSA_VAL (arg);
7289 gcc_assert (TREE_CODE (sprime) != SSA_NAME
7290 || SSA_NAME_IS_DEFAULT_DEF (sprime));
7292 else
7293 /* Look for sth available at the definition block of the argument.
7294 This avoids inconsistencies between availability there which
7295 decides if the stmt can be removed and availability at the
7296 use site. The SSA property ensures that things available
7297 at the definition are also available at uses. */
7298 sprime = avail.eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (arg)),
7299 arg);
7300 if (sprime
7301 && sprime != arg
7302 && may_propagate_copy (arg, sprime))
7303 propagate_value (use_p, sprime);
7306 vn_context_bb = NULL;
7307 return todo;
7310 /* Unwind state per basic-block. */
7312 struct unwind_state
7314 /* Times this block has been visited. */
7315 unsigned visited;
7316 /* Whether to handle this as iteration point or whether to treat
7317 incoming backedge PHI values as varying. */
7318 bool iterate;
7319 /* Maximum RPO index this block is reachable from. */
7320 int max_rpo;
7321 /* Unwind state. */
7322 void *ob_top;
7323 vn_reference_t ref_top;
7324 vn_phi_t phi_top;
7325 vn_nary_op_t nary_top;
7328 /* Unwind the RPO VN state for iteration. */
7330 static void
7331 do_unwind (unwind_state *to, int rpo_idx, rpo_elim &avail, int *bb_to_rpo)
7333 gcc_assert (to->iterate);
7334 for (; last_inserted_nary != to->nary_top;
7335 last_inserted_nary = last_inserted_nary->next)
7337 vn_nary_op_t *slot;
7338 slot = valid_info->nary->find_slot_with_hash
7339 (last_inserted_nary, last_inserted_nary->hashcode, NO_INSERT);
7340 /* Predication causes the need to restore previous state. */
7341 if ((*slot)->unwind_to)
7342 *slot = (*slot)->unwind_to;
7343 else
7344 valid_info->nary->clear_slot (slot);
7346 for (; last_inserted_phi != to->phi_top;
7347 last_inserted_phi = last_inserted_phi->next)
7349 vn_phi_t *slot;
7350 slot = valid_info->phis->find_slot_with_hash
7351 (last_inserted_phi, last_inserted_phi->hashcode, NO_INSERT);
7352 valid_info->phis->clear_slot (slot);
7354 for (; last_inserted_ref != to->ref_top;
7355 last_inserted_ref = last_inserted_ref->next)
7357 vn_reference_t *slot;
7358 slot = valid_info->references->find_slot_with_hash
7359 (last_inserted_ref, last_inserted_ref->hashcode, NO_INSERT);
7360 (*slot)->operands.release ();
7361 valid_info->references->clear_slot (slot);
7363 obstack_free (&vn_tables_obstack, to->ob_top);
7365 /* Prune [rpo_idx, ] from avail. */
7366 /* ??? This is O(number-of-values-in-region) which is
7367 O(region-size) rather than O(iteration-piece). */
7368 for (hash_table<vn_ssa_aux_hasher>::iterator i = vn_ssa_aux_hash->begin ();
7369 i != vn_ssa_aux_hash->end (); ++i)
7371 while ((*i)->avail)
7373 if (bb_to_rpo[(*i)->avail->location] < rpo_idx)
7374 break;
7375 vn_avail *av = (*i)->avail;
7376 (*i)->avail = (*i)->avail->next;
7377 av->next = avail.m_avail_freelist;
7378 avail.m_avail_freelist = av;
7383 /* Do VN on a SEME region specified by ENTRY and EXIT_BBS in FN.
7384 If ITERATE is true then treat backedges optimistically as not
7385 executed and iterate. If ELIMINATE is true then perform
7386 elimination, otherwise leave that to the caller. */
7388 static unsigned
7389 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs,
7390 bool iterate, bool eliminate)
7392 unsigned todo = 0;
7394 /* We currently do not support region-based iteration when
7395 elimination is requested. */
7396 gcc_assert (!entry || !iterate || !eliminate);
7397 /* When iterating we need loop info up-to-date. */
7398 gcc_assert (!iterate || !loops_state_satisfies_p (LOOPS_NEED_FIXUP));
7400 bool do_region = entry != NULL;
7401 if (!do_region)
7403 entry = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (fn));
7404 exit_bbs = BITMAP_ALLOC (NULL);
7405 bitmap_set_bit (exit_bbs, EXIT_BLOCK);
7408 /* Clear EDGE_DFS_BACK on "all" entry edges, RPO order compute will
7409 re-mark those that are contained in the region. */
7410 edge_iterator ei;
7411 edge e;
7412 FOR_EACH_EDGE (e, ei, entry->dest->preds)
7413 e->flags &= ~EDGE_DFS_BACK;
7415 int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (fn) - NUM_FIXED_BLOCKS);
7416 auto_vec<std::pair<int, int> > toplevel_scc_extents;
7417 int n = rev_post_order_and_mark_dfs_back_seme
7418 (fn, entry, exit_bbs, true, rpo, !iterate ? &toplevel_scc_extents : NULL);
7420 if (!do_region)
7421 BITMAP_FREE (exit_bbs);
7423 /* If there are any non-DFS_BACK edges into entry->dest skip
7424 processing PHI nodes for that block. This supports
7425 value-numbering loop bodies w/o the actual loop. */
7426 FOR_EACH_EDGE (e, ei, entry->dest->preds)
7427 if (e != entry
7428 && !(e->flags & EDGE_DFS_BACK))
7429 break;
7430 bool skip_entry_phis = e != NULL;
7431 if (skip_entry_phis && dump_file && (dump_flags & TDF_DETAILS))
7432 fprintf (dump_file, "Region does not contain all edges into "
7433 "the entry block, skipping its PHIs.\n");
7435 int *bb_to_rpo = XNEWVEC (int, last_basic_block_for_fn (fn));
7436 for (int i = 0; i < n; ++i)
7437 bb_to_rpo[rpo[i]] = i;
7439 unwind_state *rpo_state = XNEWVEC (unwind_state, n);
7441 rpo_elim avail (entry->dest);
7442 rpo_avail = &avail;
7444 /* Verify we have no extra entries into the region. */
7445 if (flag_checking && do_region)
7447 auto_bb_flag bb_in_region (fn);
7448 for (int i = 0; i < n; ++i)
7450 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
7451 bb->flags |= bb_in_region;
7453 /* We can't merge the first two loops because we cannot rely
7454 on EDGE_DFS_BACK for edges not within the region. But if
7455 we decide to always have the bb_in_region flag we can
7456 do the checking during the RPO walk itself (but then it's
7457 also easy to handle MEME conservatively). */
7458 for (int i = 0; i < n; ++i)
7460 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
7461 edge e;
7462 edge_iterator ei;
7463 FOR_EACH_EDGE (e, ei, bb->preds)
7464 gcc_assert (e == entry
7465 || (skip_entry_phis && bb == entry->dest)
7466 || (e->src->flags & bb_in_region));
7468 for (int i = 0; i < n; ++i)
7470 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
7471 bb->flags &= ~bb_in_region;
7475 /* Create the VN state. For the initial size of the various hashtables
7476 use a heuristic based on region size and number of SSA names. */
7477 unsigned region_size = (((unsigned HOST_WIDE_INT)n * num_ssa_names)
7478 / (n_basic_blocks_for_fn (fn) - NUM_FIXED_BLOCKS));
7479 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
7480 next_value_id = 1;
7481 next_constant_value_id = -1;
7483 vn_ssa_aux_hash = new hash_table <vn_ssa_aux_hasher> (region_size * 2);
7484 gcc_obstack_init (&vn_ssa_aux_obstack);
7486 gcc_obstack_init (&vn_tables_obstack);
7487 gcc_obstack_init (&vn_tables_insert_obstack);
7488 valid_info = XCNEW (struct vn_tables_s);
7489 allocate_vn_table (valid_info, region_size);
7490 last_inserted_ref = NULL;
7491 last_inserted_phi = NULL;
7492 last_inserted_nary = NULL;
7494 vn_valueize = rpo_vn_valueize;
7496 /* Initialize the unwind state and edge/BB executable state. */
7497 unsigned curr_scc = 0;
7498 for (int i = 0; i < n; ++i)
7500 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
7501 rpo_state[i].visited = 0;
7502 rpo_state[i].max_rpo = i;
7503 if (!iterate && curr_scc < toplevel_scc_extents.length ())
7505 if (i >= toplevel_scc_extents[curr_scc].first
7506 && i <= toplevel_scc_extents[curr_scc].second)
7507 rpo_state[i].max_rpo = toplevel_scc_extents[curr_scc].second;
7508 if (i == toplevel_scc_extents[curr_scc].second)
7509 curr_scc++;
7511 bb->flags &= ~BB_EXECUTABLE;
7512 bool has_backedges = false;
7513 edge e;
7514 edge_iterator ei;
7515 FOR_EACH_EDGE (e, ei, bb->preds)
7517 if (e->flags & EDGE_DFS_BACK)
7518 has_backedges = true;
7519 e->flags &= ~EDGE_EXECUTABLE;
7520 if (iterate || e == entry || (skip_entry_phis && bb == entry->dest))
7521 continue;
7523 rpo_state[i].iterate = iterate && has_backedges;
7525 entry->flags |= EDGE_EXECUTABLE;
7526 entry->dest->flags |= BB_EXECUTABLE;
7528 /* As heuristic to improve compile-time we handle only the N innermost
7529 loops and the outermost one optimistically. */
7530 if (iterate)
7532 loop_p loop;
7533 unsigned max_depth = param_rpo_vn_max_loop_depth;
7534 FOR_EACH_LOOP (loop, LI_ONLY_INNERMOST)
7535 if (loop_depth (loop) > max_depth)
7536 for (unsigned i = 2;
7537 i < loop_depth (loop) - max_depth; ++i)
7539 basic_block header = superloop_at_depth (loop, i)->header;
7540 bool non_latch_backedge = false;
7541 edge e;
7542 edge_iterator ei;
7543 FOR_EACH_EDGE (e, ei, header->preds)
7544 if (e->flags & EDGE_DFS_BACK)
7546 /* There can be a non-latch backedge into the header
7547 which is part of an outer irreducible region. We
7548 cannot avoid iterating this block then. */
7549 if (!dominated_by_p (CDI_DOMINATORS,
7550 e->src, e->dest))
7552 if (dump_file && (dump_flags & TDF_DETAILS))
7553 fprintf (dump_file, "non-latch backedge %d -> %d "
7554 "forces iteration of loop %d\n",
7555 e->src->index, e->dest->index, loop->num);
7556 non_latch_backedge = true;
7558 else
7559 e->flags |= EDGE_EXECUTABLE;
7561 rpo_state[bb_to_rpo[header->index]].iterate = non_latch_backedge;
7565 uint64_t nblk = 0;
7566 int idx = 0;
7567 if (iterate)
7568 /* Go and process all blocks, iterating as necessary. */
7571 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[idx]);
7573 /* If the block has incoming backedges remember unwind state. This
7574 is required even for non-executable blocks since in irreducible
7575 regions we might reach them via the backedge and re-start iterating
7576 from there.
7577 Note we can individually mark blocks with incoming backedges to
7578 not iterate where we then handle PHIs conservatively. We do that
7579 heuristically to reduce compile-time for degenerate cases. */
7580 if (rpo_state[idx].iterate)
7582 rpo_state[idx].ob_top = obstack_alloc (&vn_tables_obstack, 0);
7583 rpo_state[idx].ref_top = last_inserted_ref;
7584 rpo_state[idx].phi_top = last_inserted_phi;
7585 rpo_state[idx].nary_top = last_inserted_nary;
7588 if (!(bb->flags & BB_EXECUTABLE))
7590 if (dump_file && (dump_flags & TDF_DETAILS))
7591 fprintf (dump_file, "Block %d: BB%d found not executable\n",
7592 idx, bb->index);
7593 idx++;
7594 continue;
7597 if (dump_file && (dump_flags & TDF_DETAILS))
7598 fprintf (dump_file, "Processing block %d: BB%d\n", idx, bb->index);
7599 nblk++;
7600 todo |= process_bb (avail, bb,
7601 rpo_state[idx].visited != 0,
7602 rpo_state[idx].iterate,
7603 iterate, eliminate, do_region, exit_bbs, false);
7604 rpo_state[idx].visited++;
7606 /* Verify if changed values flow over executable outgoing backedges
7607 and those change destination PHI values (that's the thing we
7608 can easily verify). Reduce over all such edges to the farthest
7609 away PHI. */
7610 int iterate_to = -1;
7611 edge_iterator ei;
7612 edge e;
7613 FOR_EACH_EDGE (e, ei, bb->succs)
7614 if ((e->flags & (EDGE_DFS_BACK|EDGE_EXECUTABLE))
7615 == (EDGE_DFS_BACK|EDGE_EXECUTABLE)
7616 && rpo_state[bb_to_rpo[e->dest->index]].iterate)
7618 int destidx = bb_to_rpo[e->dest->index];
7619 if (!rpo_state[destidx].visited)
7621 if (dump_file && (dump_flags & TDF_DETAILS))
7622 fprintf (dump_file, "Unvisited destination %d\n",
7623 e->dest->index);
7624 if (iterate_to == -1 || destidx < iterate_to)
7625 iterate_to = destidx;
7626 continue;
7628 if (dump_file && (dump_flags & TDF_DETAILS))
7629 fprintf (dump_file, "Looking for changed values of backedge"
7630 " %d->%d destination PHIs\n",
7631 e->src->index, e->dest->index);
7632 vn_context_bb = e->dest;
7633 gphi_iterator gsi;
7634 for (gsi = gsi_start_phis (e->dest);
7635 !gsi_end_p (gsi); gsi_next (&gsi))
7637 bool inserted = false;
7638 /* While we'd ideally just iterate on value changes
7639 we CSE PHIs and do that even across basic-block
7640 boundaries. So even hashtable state changes can
7641 be important (which is roughly equivalent to
7642 PHI argument value changes). To not excessively
7643 iterate because of that we track whether a PHI
7644 was CSEd to with GF_PLF_1. */
7645 bool phival_changed;
7646 if ((phival_changed = visit_phi (gsi.phi (),
7647 &inserted, false))
7648 || (inserted && gimple_plf (gsi.phi (), GF_PLF_1)))
7650 if (!phival_changed
7651 && dump_file && (dump_flags & TDF_DETAILS))
7652 fprintf (dump_file, "PHI was CSEd and hashtable "
7653 "state (changed)\n");
7654 if (iterate_to == -1 || destidx < iterate_to)
7655 iterate_to = destidx;
7656 break;
7659 vn_context_bb = NULL;
7661 if (iterate_to != -1)
7663 do_unwind (&rpo_state[iterate_to], iterate_to, avail, bb_to_rpo);
7664 idx = iterate_to;
7665 if (dump_file && (dump_flags & TDF_DETAILS))
7666 fprintf (dump_file, "Iterating to %d BB%d\n",
7667 iterate_to, rpo[iterate_to]);
7668 continue;
7671 idx++;
7673 while (idx < n);
7675 else /* !iterate */
7677 /* Process all blocks greedily with a worklist that enforces RPO
7678 processing of reachable blocks. */
7679 auto_bitmap worklist;
7680 bitmap_set_bit (worklist, 0);
7681 while (!bitmap_empty_p (worklist))
7683 int idx = bitmap_first_set_bit (worklist);
7684 bitmap_clear_bit (worklist, idx);
7685 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[idx]);
7686 gcc_assert ((bb->flags & BB_EXECUTABLE)
7687 && !rpo_state[idx].visited);
7689 if (dump_file && (dump_flags & TDF_DETAILS))
7690 fprintf (dump_file, "Processing block %d: BB%d\n", idx, bb->index);
7692 /* When we run into predecessor edges where we cannot trust its
7693 executable state mark them executable so PHI processing will
7694 be conservative.
7695 ??? Do we need to force arguments flowing over that edge
7696 to be varying or will they even always be? */
7697 edge_iterator ei;
7698 edge e;
7699 FOR_EACH_EDGE (e, ei, bb->preds)
7700 if (!(e->flags & EDGE_EXECUTABLE)
7701 && (bb == entry->dest
7702 || (!rpo_state[bb_to_rpo[e->src->index]].visited
7703 && (rpo_state[bb_to_rpo[e->src->index]].max_rpo
7704 >= (int)idx))))
7706 if (dump_file && (dump_flags & TDF_DETAILS))
7707 fprintf (dump_file, "Cannot trust state of predecessor "
7708 "edge %d -> %d, marking executable\n",
7709 e->src->index, e->dest->index);
7710 e->flags |= EDGE_EXECUTABLE;
7713 nblk++;
7714 todo |= process_bb (avail, bb, false, false, false, eliminate,
7715 do_region, exit_bbs,
7716 skip_entry_phis && bb == entry->dest);
7717 rpo_state[idx].visited++;
7719 FOR_EACH_EDGE (e, ei, bb->succs)
7720 if ((e->flags & EDGE_EXECUTABLE)
7721 && e->dest->index != EXIT_BLOCK
7722 && (!do_region || !bitmap_bit_p (exit_bbs, e->dest->index))
7723 && !rpo_state[bb_to_rpo[e->dest->index]].visited)
7724 bitmap_set_bit (worklist, bb_to_rpo[e->dest->index]);
7728 /* If statistics or dump file active. */
7729 int nex = 0;
7730 unsigned max_visited = 1;
7731 for (int i = 0; i < n; ++i)
7733 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
7734 if (bb->flags & BB_EXECUTABLE)
7735 nex++;
7736 statistics_histogram_event (cfun, "RPO block visited times",
7737 rpo_state[i].visited);
7738 if (rpo_state[i].visited > max_visited)
7739 max_visited = rpo_state[i].visited;
7741 unsigned nvalues = 0, navail = 0;
7742 for (hash_table<vn_ssa_aux_hasher>::iterator i = vn_ssa_aux_hash->begin ();
7743 i != vn_ssa_aux_hash->end (); ++i)
7745 nvalues++;
7746 vn_avail *av = (*i)->avail;
7747 while (av)
7749 navail++;
7750 av = av->next;
7753 statistics_counter_event (cfun, "RPO blocks", n);
7754 statistics_counter_event (cfun, "RPO blocks visited", nblk);
7755 statistics_counter_event (cfun, "RPO blocks executable", nex);
7756 statistics_histogram_event (cfun, "RPO iterations", 10*nblk / nex);
7757 statistics_histogram_event (cfun, "RPO num values", nvalues);
7758 statistics_histogram_event (cfun, "RPO num avail", navail);
7759 statistics_histogram_event (cfun, "RPO num lattice",
7760 vn_ssa_aux_hash->elements ());
7761 if (dump_file && (dump_flags & (TDF_DETAILS|TDF_STATS)))
7763 fprintf (dump_file, "RPO iteration over %d blocks visited %" PRIu64
7764 " blocks in total discovering %d executable blocks iterating "
7765 "%d.%d times, a block was visited max. %u times\n",
7766 n, nblk, nex,
7767 (int)((10*nblk / nex)/10), (int)((10*nblk / nex)%10),
7768 max_visited);
7769 fprintf (dump_file, "RPO tracked %d values available at %d locations "
7770 "and %" PRIu64 " lattice elements\n",
7771 nvalues, navail, (uint64_t) vn_ssa_aux_hash->elements ());
7774 if (eliminate)
7776 /* When !iterate we already performed elimination during the RPO
7777 walk. */
7778 if (iterate)
7780 /* Elimination for region-based VN needs to be done within the
7781 RPO walk. */
7782 gcc_assert (! do_region);
7783 /* Note we can't use avail.walk here because that gets confused
7784 by the existing availability and it will be less efficient
7785 as well. */
7786 todo |= eliminate_with_rpo_vn (NULL);
7788 else
7789 todo |= avail.eliminate_cleanup (do_region);
7792 vn_valueize = NULL;
7793 rpo_avail = NULL;
7795 XDELETEVEC (bb_to_rpo);
7796 XDELETEVEC (rpo);
7797 XDELETEVEC (rpo_state);
7799 return todo;
7802 /* Region-based entry for RPO VN. Performs value-numbering and elimination
7803 on the SEME region specified by ENTRY and EXIT_BBS. If ENTRY is not
7804 the only edge into the region at ENTRY->dest PHI nodes in ENTRY->dest
7805 are not considered. */
7807 unsigned
7808 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs)
7810 default_vn_walk_kind = VN_WALKREWRITE;
7811 unsigned todo = do_rpo_vn (fn, entry, exit_bbs, false, true);
7812 free_rpo_vn ();
7813 return todo;
7817 namespace {
7819 const pass_data pass_data_fre =
7821 GIMPLE_PASS, /* type */
7822 "fre", /* name */
7823 OPTGROUP_NONE, /* optinfo_flags */
7824 TV_TREE_FRE, /* tv_id */
7825 ( PROP_cfg | PROP_ssa ), /* properties_required */
7826 0, /* properties_provided */
7827 0, /* properties_destroyed */
7828 0, /* todo_flags_start */
7829 0, /* todo_flags_finish */
7832 class pass_fre : public gimple_opt_pass
7834 public:
7835 pass_fre (gcc::context *ctxt)
7836 : gimple_opt_pass (pass_data_fre, ctxt), may_iterate (true)
7839 /* opt_pass methods: */
7840 opt_pass * clone () { return new pass_fre (m_ctxt); }
7841 void set_pass_param (unsigned int n, bool param)
7843 gcc_assert (n == 0);
7844 may_iterate = param;
7846 virtual bool gate (function *)
7848 return flag_tree_fre != 0 && (may_iterate || optimize > 1);
7850 virtual unsigned int execute (function *);
7852 private:
7853 bool may_iterate;
7854 }; // class pass_fre
7856 unsigned int
7857 pass_fre::execute (function *fun)
7859 unsigned todo = 0;
7861 /* At -O[1g] use the cheap non-iterating mode. */
7862 bool iterate_p = may_iterate && (optimize > 1);
7863 calculate_dominance_info (CDI_DOMINATORS);
7864 if (iterate_p)
7865 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
7867 default_vn_walk_kind = VN_WALKREWRITE;
7868 todo = do_rpo_vn (fun, NULL, NULL, iterate_p, true);
7869 free_rpo_vn ();
7871 if (iterate_p)
7872 loop_optimizer_finalize ();
7874 /* For late FRE after IVOPTs and unrolling, see if we can
7875 remove some TREE_ADDRESSABLE and rewrite stuff into SSA. */
7876 if (!may_iterate)
7877 todo |= TODO_update_address_taken;
7879 return todo;
7882 } // anon namespace
7884 gimple_opt_pass *
7885 make_pass_fre (gcc::context *ctxt)
7887 return new pass_fre (ctxt);
7890 #undef BB_EXECUTABLE